Python multiprocessing queue not sending data to parent process - python

So I've got a bluetooth connection from an arduino reading a joystick and sending the axis readout over bluetooth to my raspberry pi (4b running ubuntu 20.10). I've confirmed it's receiving this data too.
Now I try to run this bluetooth communcication in a separate process using the python multiprocessing module. to access the data from the arduino, I give the function a queue from the parent main process to put the data in there. Then in the main function I continuously try to read from this queue and process the data there.
The queue in the parent process always remains empty, however, and as such I can't process the data any further.
How can I get the data from the bluetooth process back to the main process?
main.py
#!/usr/bin/env python3
import time
import logging
import multiprocessing as mp
import bluetoothlib
logging.basicConfig(level=logging.DEBUG)
logging.info("creating queue")
global q
q = mp.Queue()
def main():
try:
logging.info("starting bluetooth process")
p = mp.Process(target=bluetoothlib.serlistener, args=(q,))
p.start()
except:
logging.error("unable to start bluetooth listener")
logging.info("start reading from queue")
while True:
#logging.info(q.qsize())
if not q.empty():
mss = q.get()
logging.info(mss)
#do something with data
elif q.empty():
logging.info("queue empty")
time.sleep(1)
main()
bluetoothlib.py
#!/usr/bin/env python3
import os
import serial
import io
def serlistener(q):
print ("creating connection")
btConn = serial.Serial("/dev/rfcomm0", 57600, timeout=1)
btConn.flushInput()
sio = io.TextIOWrapper(io.BufferedRWPair(btConn, btConn, 1),encoding="utf-8")
print ("connection created, starting listening")
while btConn.is_open:
try:
mss = sio.readline()
q.put(mss)
except:
print("error")
break

At thelizardking34's suggestion, I relooked at the global stuff I'd been messing with and after correcting it, the code as now given in the question works.
Thanks to thelizardking34!

Related

How to pass arguments to subprocess python instance

I'm opening multiple python instances using multiprocessing package and subprocess object. So basicly 10 different python instances that have two sockets in them that serve as client socket and server socket.
Here is example how I launch two python instances with two different files:
from time import sleep
from multiprocessing import Process
import subprocess
def task1():
print('This is task1')
subprocess.Popen(['python','server_client_pair1.py'])
sleep(1)
def task2():
# block for a moment
sleep(1)
# display a message
print('This is task2')
p1 = subprocess.Popen(['python','server_client_pair2.py'])
sleep(1)
if __name__ == '__main__':
# create a process
process1 = Process(target=task1)
sleep(.5)
process2 = Process(target=task2)
sleep(.5)
# run the process
process1.start()
sleep(.5)
process2.start()
sleep(.5)
# wait for the process to finish
print('Waiting for the process...')
process1.join()
process2.join()
I need to pass argument which changes variable PORT which is port number and I'd like to change it with PORT+1 every loop in the file ('server_client_pair.py')
Right now I have working code that uses 10 different server_client_pair.py files (server_client_pair1.py, server_client_pair2.py, server_client_pair3.py, etc)
I'm wondering how to do this with just one file. Any help would be welcome.
*edited the post for more info
first you need to add arguments to your
server_client_pair.py
file then
it will work for you as well :
def task1():
for i in range(10):
subprocess.Popen(['python','server_client_pair.py',str(i)])
sleep(1)
check here to learn how to pass arguments to your python files:
https://www.tutorialspoint.com/python/python_command_line_arguments.htm

PyQt5 - Wait until data is written to QSharedMemory

I have 2 separate processes using QSharedMemory for IPC. One of them should have a thread waiting for the other process to write to this shared memory segment and once that happens perform an action (all of this without blocking the main thread).
The file consumer.py:
import time
import threading
from PyQt5.QtCore import QSystemSemaphore, QSharedMemory
sem = QSystemSemaphore("test-sem")
shmem = QSharedMemory("test-shmem")
shmem.create(64, mode=QSharedMemory.ReadWrite)
def wait_for_release():
while True:
sem.acquire()
shmem.lock()
data = shmem.data()
data_str = str(data, encoding="utf-8")
data_str = data_str.rstrip("\x00")
shmem.unlock()
print(f"Hello {data_str}!")
threading.Thread(target=wait_for_release).start()
# This should be running while the other thread waits for the semaphore to be released
while True:
print("Waiting...")
time.sleep(1)
producer.py
from PyQt5.QtCore import QSystemSemaphore, QSharedMemory, QByteArray
sem = QSystemSemaphore("test-sem")
shmem = QSharedMemory("test-shmem")
if shmem.attach(QSharedMemory.ReadWrite):
str_bytes = QByteArray("World".encode("ascii"))
shmem.lock()
shmem.data()[:str_bytes.size()] = str_bytes
# Release so the other process can print "Hello World!"
sem.release()
The thing I expected to happen when I start running consumer.py is the main thread printing "Waiting..." every second and once i run producer.py printing "Hello World!" the main thread blocks when the semaphore (which is running in a separate thread) blocks. Does QSystemSemaphore block the whole process? Is there another alternative?

Multiprocessing callback message

I have long running process, that I want to keep track about in which state it currently is in. There is N processes running in same time therefore multiprocessing issue.
I pass Queue into process to report messages about state, and this Queue is then read(if not empty) in thread every couple of second.
I'm using Spider on windows as environment and later described behavior is in its console. I did not try it in different env.
from multiprocessing import Process,Queue,Lock
import time
def test(process_msg: Queue):
try:
process_msg.put('Inside process message')
# process...
return # to have exitstate = 0
except Exception as e:
process_msg.put(e)
callback_msg = Queue()
if __name__ == '__main__':
p = Process(target = test,
args = (callback_msg,))
p.start()
time.sleep(5)
print(p)
while not callback_msg.empty():
msg = callback_msg.get()
if type(msg) != Exception:
tqdm.write(str(msg))
else:
raise msg
Problem is that whatever I do with code, it never reads what is inside the Queue(also because it never puts anything in it). Only when I switch to dummy version, which runs similary to threading on only 1 CPU from multiprocessing.dummy import Process,Queue,Lock
Apparently the test function have to be in separate file.

wrpcap not creating pcap file, when running in separate thread

I'm trying to capture network traffic using scapy , by running sniff in a thread rather than running it in the main thread itself. So as to avoid blocking of the app . But the problem that i'm facing is that that the wrpcap is not creating a file, if it does it creates a 0 Kb file.
The other reason why i'm using a separate thread is, because i want to be able to close the thread when the user wishes to end the capture.
from scapy.all import *
from time import gmtime , strftime
import threading
import time
def bomber(stop_event):
data=[]
pkts=[]
while not stop_event.isSet() :
pkt100=sniff(count=100)
data.append(pkt100)
for pkt in data:
for x in range(100):
pkts.append(pkt[x])
pktsRoll=PacketList(pkts)
savename="F:\\%s.pcap" % strftime("%Y-%m-%d,%H:%M", gmtime())
wrpcap(savename,pktsRoll)
print " its done "
def main():
stop_event = threading.Event()
c_thread = threading.Thread(target=bomber, args=(stop_event,))
c_thread.start()
time.sleep(20)
stop_event.set()
This is the code that i'm trying out . I'm using Python 2.5
Thanks in advance.

python interprocess querying/control

I have this Python based service daemon which is doing a lot of multiplexed IO (select).
From another script (also Python) I want to query this service daemon about status/information and/or control the processing (e.g. pause it, shut it down, change some parameters, etc).
What is the best way to send control messages ("from now on you process like this!") and query processed data ("what was the result of that?") using python?
I read somewhere that named pipes might work, but don't know that much about named pipes, especially in python - and whether there are any better alternatives.
Both the background service daemon AND the frontend will be programmed by me, so all options are open :)
I am using Linux.
Pipes and Named pipes are good solution to communicate between different processes.
Pipes work like shared memory buffer but has an interface that mimics a simple file on each of two ends. One process writes data on one end of the pipe, and another reads that data on the other end.
Named pipes are similar to above , except that this pipe is actually associated with a real file in your computer.
More details at
http://www.softpanorama.org/Scripting/pipes.shtml
In Python, named pipe files are created with the os.mkfifo call
x = os.mkfifo(filename)
In child and parent open this pipe as file
out = os.open(filename, os.O_WRONLY)
in = open(filename, 'r')
To write
os.write(out, 'xxxx')
To read
lines = in.readline( )
Edit: Adding links from SO
Create a temporary FIFO (named pipe) in Python?
https://stackoverflow.com/search?q=python+named+pipes
You may want to read more on "IPC and Python"
http://www.freenetpages.co.uk/hp/alan.gauld/tutipc.htm
The best way to do IPC is using message Queue in python as bellow
server process server.py (run this before running client.py and interact.py)
from multiprocessing.managers import BaseManager
import Queue
queue1 = Queue.Queue()
queue2 = Queue.Queue()
class QueueManager(BaseManager): pass
QueueManager.register('get_queue1', callable=lambda:queue1)
QueueManager.register('get_queue2', callable=lambda:queue2)
m = QueueManager(address=('', 50000), authkey='abracadabra')
s = m.get_server()
s.serve_forever()
The inter-actor which is for I/O interact.py
from multiprocessing.managers import BaseManager
import threading
import sys
class QueueManager(BaseManager): pass
QueueManager.register('get_queue1')
QueueManager.register('get_queue2')
m = QueueManager(address=('localhost', 50000),authkey='abracadabra')
m.connect()
queue1 = m.get_queue1()
queue2 = m.get_queue2()
def read():
while True:
sys.stdout.write(queue2.get())
def write():
while True:
queue1.put(sys.stdin.readline())
threads = []
threadr = threading.Thread(target=read)
threadr.start()
threads.append(threadr)
threadw = threading.Thread(target=write)
threadw.start()
threads.append(threadw)
for thread in threads:
thread.join()
The client program Client.py
from multiprocessing.managers import BaseManager
import sys
import string
import os
class QueueManager(BaseManager): pass
QueueManager.register('get_queue1')
QueueManager.register('get_queue2')
m = QueueManager(address=('localhost', 50000), authkey='abracadabra')
m.connect()
queue1 = m.get_queue1()
queue2 = m.get_queue2()
class RedirectOutput:
def __init__(self, stdout):
self.stdout = stdout
def write(self, s):
queue2.put(s)
class RedirectInput:
def __init__(self, stdin):
self.stdin = stdin
def readline(self):
return queue1.get()
# redirect standard output
sys.stdout = RedirectOutput(sys.stdout)
sys.stdin = RedirectInput(sys.stdin)
# The test program which will take input and produce output
Text=raw_input("Enter Text:")
print "you have entered:",Text
def x():
while True:
x= raw_input("Enter 'exit' to end and some thing else to continue")
print x
if 'exit' in x:
break
x()
this can be used to communicate between two process in network or on same machine
remember that inter-actor and server process will not terminate until you manually kill it.

Categories

Resources