Python - Subprocess - Recording and Dumping Measurements - python

In python3 with Ubuntu 16.04LTS, I have a subprocess that I created from my main script to record measurements from a device connected to my local machine. I would like to know how to send a message to this subprocess when I want to finish data recording, and switch to dumping the measurements to a csv file. Shown below is a stripped-down version of what I have tried so far, but the code hangs and I am unable to dump the measurements I record. In fact, I only record 1 measurement. I am not sure about how to asynchronously check for stdin inputs while recording data. May I please get some help?
Main.py
# start subprocess
p_1 = subprocess.Popen(["./ekg.py", saveFilename_ekg], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
# do other stuff
...
# send message to quit
message = str("1")
encMsg = message.encode()
print("Message:", encMsg.decode())
p_stdout = p_1.communicate(input=encMsg)[0]
# print "Done" from subprocess
print(p_stdout.decode('utf-8').strip())
# kill subprocess
p_1.kill()
ekg.py
def dumpLiveData(outputFile):
ekg = ekgClass()
dataMeasurements = []
for liveData in ekg.getLiveData():
# monitor stdin for message
if int(sys.stdin.read()) == 1:
break
else:
meas = [liveData.time, liveData.pulseWaveform]
dataMeasurements.append(meas)
#print ("Dumping data")
with open(outputFile, 'wb') as csvfile:
writer = csv.writer(csvfile, quoting=csv.QUOTE_NONNUMERIC)
#print ("Created text file")
header = ["Time", "Waveform value"]
writer.writerow(header)
for idx, val in enumerate(dataMeasurements):
writer.writerow(dataMeasurements[idx])
print("Done")
if __name__== "__main__":
# get parameters
parser = argparse.ArgumentParser(description="ekg.py")
parser.add_argument("outputFile", help="Output CSV file.")
# parse
args = parser.parse_args()
# record and dump measurements
dumpLiveData(args.outputFile)

Solved it by sending a control + C event to the subprocess. An try-except-else block caught the keyboard interrupt, processed it, and then I gracefully exit the block. After exiting, I write the data recorded to a csv file.
main.py
import subprocess, signal
# start subprocess
p_1 = subprocess.Popen(["./ekg.py", saveFilename_ekg], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
# do other stuff
...
# send control + C event
p_1.send_signal(signal.SIGINT)
stdout, stderr = p_1.communicate(input=encMsg)[0]
# print output from subprocess
print(stdout.decode('utf-8').strip())
# wait for subprocess to write file
p_1.wait()
# kill subprocess
p_1.kill()
ekg.py
def dumpLiveData(outputFile):
ekg = ekgClass()
dataMeasurements = []
exception_found = None
try:
for liveData in ekg.getLiveData():
if exception_found == True:
break
meas = [liveData.time, liveData.pulseWaveform]
dataMeasurements.append(meas)
except KeyboardInterrupt:
exception_found = True
else:
pass
print ("Dumping data")
with open(outputFile, 'wb') as csvfile:
writer = csv.writer(csvfile, quoting=csv.QUOTE_NONNUMERIC)
print ("Created text file")
header = ["Time", "Waveform value"]
writer.writerow(header)
for idx, val in enumerate(dataMeasurements):
writer.writerow(dataMeasurements[idx])
print("Done")

Related

sub script stdin prompt text display after user input content when using subprocess popen

I am trying to write one script to read the output of another py file. and because the calling py file may have some input. So in main python file, I would handle the stdin.
I got one strange phenomenon, the calling script's prompt ('please input something') is displayed after user input content. Any ideas?
see the picture
eating.py (main py)
# coding: utf-8
import subprocess
from queue import Queue
from threading import Thread
class TextStreaming(Thread):
def __init__(self, stream, queue):
super().__init__()
self._streaming = stream
self.queue = queue
def run(self) -> None:
for line in iter(self._streaming.readline, ''):
self.queue.put(line)
cmd = 'python working.py'
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, stdin=subprocess.PIPE)
queue = Queue()
mytest = TextStreaming(p.stdout, queue)
mytest.setDaemon(True)
mytest.start()
is_reading = True
while is_reading and mytest.is_alive():
while is_reading and not queue.empty():
line = queue.get()
line = line.decode('utf-8')
if line:
print(line)
if 'counting' in line:
data = input()
data = data.encode('utf-8')
p.stdin.write(data)
p.stdin.write(b'\n')
p.stdin.flush()
working.py (calling script)
# coding: utf-8
import time
timeout = 30
deadtime = time.time() + timeout
count = 0
while time.time() < deadtime:
count = count + 1
print(f'{time.time()} I am working')
time.sleep(1)
if count == 5:
print('counting came\n')
data = input('please input something\n')
print(f'{data} inputted')

Can't write to file from child process

I can't wrap my head around this... I have the following code:
def launch(command):
pf = os.path.join(working_directory, '.pid')
pid = os.fork()
if pid == 0:
stdout = os.open(..., os.O_WRONLY | os.O_CREAT)
try:
proc = Popen(command, shell=False, stdout=stdout, cwd=workdir)
print(proc.pid)
with open(pf, 'wb') as p: # pf should not be open as the file is just created.
p.write(proc.pid)
print("Hello World")
except OSError as proc_error:
...
finally:
os._exit(o) # socketserver catches SystemExit exception (flask)
else:
start = time.time()
while not os.path.isfile(pf): # I'm just checking if that file exists here never opened it in the first place.
if time.time() - start >= 30:
raise TimeoutError...
time.sleep(5)
pid = int(open(pf, 'rb').read())
Here's the output:
$pid
TimeoutError occurred
The script seem to be hanging at opening pf for writing. I verified, the file if not created, Hello World never gets printed.
Why is this happening, and how can fix it?
Thanks!
I have reduced your code to this (removing everything I could not reproduce given your code):
import os
import time
s = "foo"
pid = os.fork()
from subprocess import Popen
if pid == 0:
proc = Popen(["sleep", "3"])
with open(s, "w") as p:
p.write(str(proc.pid)) # <- Only real error I could see
os._exit(0)
else:
start = time.time()
while not os.path.isfile(s):
if time.time() - start >= 30:
raise TimeoutError("Command took to long")
time.sleep(5)
print("Read from file: " + open(s, 'r').read())
However, it works just fine, it prints Read from file: 12075. So the issue is not in the part that can be reproduced, given your code.
To read/write the procid to the binary file I succesfully used the pickle module:
pickle.dump(proc.pid,p) # write to file
pickle.load(open(s, "rb")) #read from file

Understanding named Pipes (FIFO) in Python

I am running Python 2.7 on a Unix environment (tested on Ubuntu and OSX)
I have the following programs:
With os.open():
[SCRIPT 1]
import os
pipe_1_name = "pipe_1"
pipe_2_name = "pipe_2"
pipe_3_name = "pipe_3"
def set_connection():
pipe_names = [pipe_1_name, pipe_2_name, pipe_3_name]
for pipe_name in pipe_names:
if os.path.exists(pipe_name):
os.remove(pipe_name)
os.mkfifo(pipe_name)
else:
os.mkfifo(pipe_name)
pipe_1 = os.open(pipe_1_name, os.O_WRONLY)
os.write(pipe_1, "server_message_0\n")
pipe_2 = open(pipe_2_name, 'r')
received = pipe_2.readline()[:-1]
print "[0] Now processing if received is correct: " + received
pipe_3 = open(pipe_3_name, 'r')
received = pipe_3.readline()[:-1]
print "[1] Now processing if received is correct: " + received
print "Connection established."
return pipe_1,pipe_2,pipe_3
def main():
pipe_1, pipe_2, pipe_3 = set_connection()
print str(pipe_1)
print str(pipe_2)
print str(pipe_3)
if __name__ == "__main__":
main()
[SCRIPT 2]
import os
pipe_1_name = "pipe_1"
pipe_2_name = "pipe_2"
pipe_3_name = "pipe_3"
def get_connection():
pipe_names = [pipe_1_name, pipe_2_name, pipe_3_name]
for pipe_name in pipe_names:
if not os.path.exists(pipe_name):
raise Exception("Pipe "+pipe_name+" does not exist!")
pipe_1 = open(pipe_1_name, 'r')
received = pipe_1.readline()[:-1]
print "[0] Now processing if received is correct: " + received
pipe_2 = os.open(pipe_2_name, os.O_WRONLY)
os.write(pipe_2, "client_message_0\n")
pipe_3 = os.open(pipe_3_name, os.O_WRONLY)
os.write(pipe_3, "client_message_1\n")
print "Connection established."
return pipe_1,pipe_2,pipe_3
def main():
pipe_1, pipe_2, pipe_3 = get_connection()
print str(pipe_1)
print str(pipe_2)
print str(pipe_3)
if __name__ == "__main__":
main()
The logic is simple:
[Pipe 1]
1. Script 1 opens a write pipe to Script 2.
2. Script 2 reads from the pipe.
[Pipe 2]
3. Script 2 open a write pipe to Script 1.
4. Script 1 reads from the pipe.
[Pipe 3]
5. Script 2 open a write pipe to Script 1.
6. Script 1 reads from the pipe.
Works exactly as expected.
Here is the problem. I don't want to use os.open(). I would like the receive a file object and use it to interface with the pipe. Clearly, it is not impossible since I can read from a pipe with a file object. However, The following script does not work.
Without os.open()
[Script 1]
import os
pipe_1_name = "pipe_1"
pipe_2_name = "pipe_2"
pipe_3_name = "pipe_3"
def set_connection():
pipe_names = [pipe_1_name, pipe_2_name, pipe_3_name]
for pipe_name in pipe_names:
if os.path.exists(pipe_name):
os.remove(pipe_name)
os.mkfifo(pipe_name)
else:
os.mkfifo(pipe_name)
pipe_1 = open(pipe_1_name, 'w')
pipe_1.write("server_message_0\n")
pipe_2 = open(pipe_2_name, 'r')
received = pipe_2.readline()[:-1]
print "[0] Now processing if received is correct: " + received
pipe_3 = open(pipe_3_name, 'r')
received = pipe_3.readline()[:-1]
print "[1] Now processing if received is correct: " + received
print "Connection established."
return pipe_1,pipe_2,pipe_3
def main():
pipe_1, pipe_2, pipe_3 = set_connection()
print str(pipe_1)
print str(pipe_2)
print str(pipe_3)
if __name__ == "__main__":
main()
[Script 2]
import os
pipe_1_name = "pipe_1"
pipe_2_name = "pipe_2"
pipe_3_name = "pipe_3"
def get_connection():
pipe_names = [pipe_1_name, pipe_2_name, pipe_3_name]
for pipe_name in pipe_names:
if not os.path.exists(pipe_name):
raise Exception("Pipe "+pipe_name+" does not exist!")
pipe_1 = open(pipe_1_name, 'r')
received = pipe_1.readline()[:-1]
print "[0] Now processing if received is correct: " + received
pipe_2 = open(pipe_2_name, 'w')
pipe_2.write("client_message_0\n")
pipe_3 = open(pipe_3_name, 'w')
pipe_3.write("client_message_1\n")
print "Connection established."
return pipe_1,pipe_2,pipe_3
def main():
pipe_1, pipe_2, pipe_3 = get_connection()
print str(pipe_1)
print str(pipe_2)
print str(pipe_3)
if __name__ == "__main__":
main()
They look the same, don't they? The only difference is how I open the fifo. Instead of os.open(pipe_name,os.O_WRONLY) I use pipe = open(pipe_name, 'w').
What happens in the second set of scripts, the ones that don't use os.open(), Script 1 blocks at pipe_2 = open(pipe_2_name, 'r') while Script 2 blocks at pipe_2 = open(pipe_2_name, 'w').
Why is this happening?
Sorry for the wall of text. I am really confused about this issue.
What happens in the second set of scripts, the ones that don't use
os.open(), Script 1 blocks at pipe_2 = open(pipe_2_name, 'r') while Script 2 blocks at pipe_2 = open(pipe_2_name, 'w').
No, Script 2 blocks at received = pipe_1.readline()[:-1].
Why is this happening?
It's because Script 1's open(pipe_1_name, 'w') causes the written message to be buffered in fixed-size chunks (typically 4096 or 8192 bytes), so the pipe_1.write("server_message_0\n") does not yet write anything to the pipe, but only to the buffer, and Script 2 doesn't get anything to read. See open() and also How often does python flush to a file?
To cure this, since your messages are complete lines, it suffices to use line buffering, e. g.
pipe_1 = open(pipe_1_name, 'w', 1)
(as well for the other write pipes).

Python threading - Stopping a thread from another function

I've started playing with threading in python, after looking at lots of tutorials I finally have threading working in my project - however I've not found any examples where the thread is stopped from another function. So I can start the thread, it works fine, CSV is written correctly but I can't stop it. I get the following error from the stop function:
File "./blackboxv1.1.py", line 162, in stop_csv_logging
csv_thread.join()
NameError: global name 'csv_thread' is not defined
I've attached my code, it's only a snippet from much larger bit of code but everything is there I think should be relevant. The code essentially writes GPS data to a CSV file for logging GPS and accelerometer data, its called from a menu which calls the start and stop functions.
This bit in each function catches the key presses from the buttons on the LCD board and work fine.
sleep(0.5)
while 1:
if lcd.is_pressed(0):
break
else:
I'm fairly new to Python so this is a steep curve ..
class WriteCSV:
def __init__(self):
self._running = True
def terminate(self):
self._running = False
def run(self):
# create unique filename based on date/time
new_file = time.strftime("%Y%m%d-%H%M%S.csv")
file_path = "/home/pi/scripts/gps/log_data/"
new_filename = file_path + new_file
# Set headers for csv
headers = ['TimeStamp', 'Speed', 'Lat', 'Lon', 'Alt', 'Climb', 'Acc_X', 'Accl_Y']
# needed to get accl data
accel = lsm.readAccelerationsG()
# create file and set headers then close
outfile = open(new_filename, 'a+')
writer = csv.writer(outfile)
writer.writerow(headers)
outfile.close()
while self._running:
# reopen existing file
outfile = open(new_filename, 'a+')
writer = csv.writer(outfile)
# unique timestamp needed for each row
time_stamp = time.strftime("%Y%m%d-%H%M%S")
# need to round down the g force numbers
accel.x = round(accel.x,1)
accel.y = round(accel.y,1)
# Set Row
row = [time_stamp, gpsd.fix.speed, gpsd.fix.latitude, gpsd.fix.longitude, gpsd.fix.altitude, gpsd.fix.climb, accel.x, accel.y]
# Write Row
writer.writerow(row)
# Close file
outfile.close()
# set sleep of 1 sec
sleep(1)
#========================================================
# Start CSV Logging
#========================================================
def start_csv_logging():
# create an instance of class WriteCSV
# create a thread using the instance
# then start the thread
sleep(0.5)
while 1:
if lcd.is_pressed(0):
break
else:
global csv_log
csv_log = WriteCSV()
# create the thread and start run()
csv_thread = threading.Thread(target=csv_log.run)
# start it up
csv_thread.start()
lcd.clear()
lcd.message("Starting Log ..")
sleep(2)
lcd.clear()
lcd.message("Logging!")
break
#========================================================
# Stop CSV Logging
#========================================================
def stop_csv_logging():
sleep(0.5)
while 1:
if lcd.is_pressed(0):
break
else:
# Signal termination
csv_log.terminate()
csv_thread.join()
# Wait for termination
lcd.clear()
lcd.message("Stopping ....")
sleep(2)
lcd.clear()
lcd.message("Log Stopped!")
sleep(2)
break
#========================================================

how two process communicate with each other

I'm very new with python.
I started implementing twp daemon processes that will send messages to each other.
right now i have just 2 daemons that are running.
I don't understand how to build something that they can communicate through..
I read that there are pipe, or queue ...
sill, could not understand how to build a pipe or a queue that the two ends will be the two processes..
import multiprocessing
import time
import sys
def daemon():
p = multiprocessing.current_process()
print 'Starting:', p.name, p.pid
sys.stdout.flush()
while (1):
time.sleep(1)
print 'Exiting :', p.name, p.pid
sys.stdout.flush()
def machine_func():
p = multiprocessing.current_process()
print 'Starting:', p.name, p.pid
sys.stdout.flush()
while (1):
time.sleep(1)
print 'Exiting :', p.name, p.pid
sys.stdout.flush()
cs = multiprocessing.Process(name='control_service', target=control_service_func)
cs.daemon = True
m = multiprocessing.Process(name='machine', target=machine_func)
m.daemon = True
cs.start()
m.start()
You can find very good examples here: Communication Between Processes
you can communicate with daemons via text files like this:
from multiprocessing import Process
from ast import literal_eval as literal
from random import random
import time
def clock(): # 24 hour clock formatted HH:MM:SS
return str(time.ctime())[11:19]
def sub_a(): # writes dictionary that tallys +/- every second
a = 0
while 1:
data = {'a': a}
opened = 0
while not opened:
try:
with open('a_test.txt', 'w+') as file:
file.write(str(data))
opened = 1
except:
print ('b_test.txt in use, try WRITE again...')
pass
a+=1
time.sleep(random()*2)
def sub_b(): # writes dictionary that tallys +/- every 2 seconds
b = 0
while 1:
data = {'b': b}
opened = 0
while not opened:
try:
with open('b_test.txt', 'w+') as file:
file.write(str(data))
opened = 1
except:
print ('b_test.txt in use, try WRITE again...')
pass
b += 1
time.sleep(random()*4)
# clear communication lines
with open('a_test.txt', 'w+') as file:
file.write('')
with open('b_test.txt', 'w+') as file:
file.write('')
# begin daemons
sa = Process(target=sub_a)
sa.daemon = True
sb = Process(target=sub_b)
sb.daemon = True
sa.start()
sb.start()
begin = time.time()
m = 0
while 1:
m += 1
time.sleep(1)
elapsed = int(time.time()-begin)
#fetch data from deamons
opened = 0
while not opened:
try:
with open('a_test.txt', 'r') as f:
a = literal(f.read())
opened = 1
except:
print ('a_test.txt in use, try READ again...')
pass
opened = 0
while not opened:
try:
with open('b_test.txt', 'r') as f:
b = literal(f.read())
opened = 1
except:
print ('READ b_test.txt in use, try READ again...')
pass
print(clock(), '========', elapsed, b['b'], a['a'])
in this manner you can make object (like a dict) into string, write() to file, then:
ast.literal_eval
to get it back out on the other side when you read()
while not opened try
method prevents race condition so daemons and main process have time needed to not clash while they open/process/close the file
with open as file
method ensures file is opened and closed efficiently
added bonus is you can open the text file in an editor to check its state in real time.

Categories

Resources