How to exchange data via Pipe in Python? [duplicate] - python

Does Windows support multithreading.pipes()? If yes, then what is wrong with this code? Do I need to be using reduction? The code hangs on p2.recv() and I get a RuntimeError when run from the command line.
import multiprocessing
def ProcessCreator(pipe):
pipe.send("hello from other process")
p1, p2 = multiprocessing.Pipe()
proc = multiprocessing.Process(target = ProcessCreator, args = (p2,))
proc.start()
print p1.recv()
if __name__ == "__main__":
multiprocessing.freeze_support()

You need put pipe code into if __name__ == '__main__' part.(Why?) And change p2.recv to p1.recv
import multiprocessing
def ProcessCreator(pipe):
pipe.send("hello from other process")
if __name__ == "__main__":
multiprocessing.freeze_support()
p1, p2 = multiprocessing.Pipe()
proc = multiprocessing.Process(target = ProcessCreator, args = (p2,))
proc.start()
print p1.recv()

Related

How i get function outputs from multiprocessing in python?

I have two functions and needed the return values to proceed with the further part of the script...but currently my code giving only the output of the first function...
import multiprocessing
def gm(name):
h = "Good Morning"+str(name)
qout.put(h)
def sal(name):
k="Hi "+str(name)
qout.put(k)
if __name__ == '__main__':
qout = multiprocessing.Queue()
p1 = multiprocessing.Process(target=gm, args=("ashin",))
p2 = multiprocessing.Process(target=sal, args=("ashin",))
p1.start()
p2.start()
p1.join()
p2.join()
result = qout.get()
#output - "Good Morning ashin"
#required output - "Good Morning ashin" & "Hi ashin"
Appreciate your help......
qout.get() gets you the first element from queue. I do not know the bigger picture of what you're are trying to achieve, but you can get all elements from queue like in the following.
from multiprocessing import Process, Queue
def gm(name):
h = "Good Morning"+str(name)
qout.put(h)
def sal(name):
k="Hi "+str(name)
qout.put(k)
if __name__ == '__main__':
qout = Queue()
p1 = Process(target=gm, args=("ashin",))
p2 = Process(target=sal, args=("ashin",))
p1.start()
p2.start()
p1.join()
p2.join()
list1 = []
while not qout.empty():
list1.append(qout.get())
temp = list(map(str, list1))
print(" & ".join(temp))
output
Hi ashin & Good Morningashin
Instead of managing your own output queue, just use the latest Python 3 concurrency features:
from concurrent.futures import as_completed, ProcessPoolExecutor
def gm(name):
return f'Good Morning {name}'
def sal(name):
return f'Hi {name}'
if __name__ == '__main__':
with ProcessPoolExecutor() as exe:
futures = [exe.submit(x, 'ashin') for x in (gm, sal)]
for future in as_completed(futures):
print(future.result())

Python; unable to run multiple processes

I'm trying to run 2 separate processes in my python application. So I have code like this:
from multiprocessing import Process
def f1():
while 1:
print('Hello')
def f2():
while 1:
print('Goodbye')
def main():
p1 = Process(target=f1, args=())
p1.start()
p1.join()
p2 = Process(target=f2, args=())
p2.start()
p2.join()
if __name__ == '__main__':
main()
This code does nothing on my machine, it doesn't produce any output. I thought initially that maybe it was an IDE-related problem, but it's the same on both my IDEs, PyScripter and IDLE.
Any ideas, why this doesn't print anything?
How about using Queue?
from multiprocessing import Process, Queue
def f1(q):
while 1:
q.put('Hello')
def f2(q):
while 1:
q.put('Goodbye')
def main():
q = Queue()
p1 = Process(target=f1, args=(q,))
p1.start()
p2 = Process(target=f2, args=(q,))
p2.start()
while True:
try:
print q.get()
except:
break
if __name__ == '__main__':
main()
You should save it and run outside the IDE:
C:\> python multi.py
then it infinitely prints out Hello. You should change your main to see both Hello and Goodbye:
def main():
p1 = Process(target=f1, args=())
p2 = Process(target=f2, args=())
p1.start()
p2.start()
p1.join()
p2.join()
Then you have a little happy race condition that constantly prints out GHoodbyeello because both processes use the same stdout resource concurrently.

Python overgiving variable from one thread to the other multithreading

How to give a variable from _While.py to __scheduler.py in this multithreading Szenario?
I tried so much stuff but nothing seems to be working.
Thread.py
from multiprocessing import Process
import _While
import _Scheduler
if __name__ == '__main__':
p1 = Process(target=_While.main)
p1.start()
p2 = Process(target=_Scheduler.main)
p2.start()
_While.py
import time
def main():
while True:
print "while"
time.sleep(0.5)
"""getting button status"""
"""giving button status to _Scheudler._scheduler"""
__Scheduler.py
import logging
import time
from apscheduler.scheduler import Scheduler
from _While import """button status"""
def _scheduler():
print "scheduler"
while """button status"""==True:
print "Button is pressed"
time.sleep(0.5)
def main():
logging.basicConfig()
scheduler = Scheduler(standalone=True)
scheduler.add_interval_job(_scheduler, seconds=2)
scheduler.start()
if __name__ == '__main__':
main()
Solution:
Thread.py
from multiprocessing import Process, Value, Array
import time
import _While
import _Scheduler
if __name__ == '__main__':
num = Value('d', 0.0)
arr = Array('i', range(10))
p1 = Process(target=_While.main, args=(num, arr))
p1.start()
p2 = Process(target=_Scheduler.main, args=(num, arr))
p2.start()
p1.join()
p2.join()
print num.value
_While
import time
def main(num, arr):
while True:
print "while"
num.value = 1
time.sleep(10)
"""getting button status"""
"""giving button status to _Scheudler._scheduler"""
__Scheduler.py
import logging
from apscheduler.scheduler import Scheduler
def _scheduler(num, arr):
while True:
print num.value
if num.value == 1:
print "mongo"
num.value = 0
break
def main(num, arr):
logging.basicConfig()
scheduler = Scheduler(standalone=True)
scheduler.add_interval_job(_scheduler, args=(num, arr), seconds=2)
scheduler.start()
if __name__ == '__main__':
main()
The only problem left is that I can't use Value without using Array
Create an instance of multiprocessing.Value in Threads.py just before you create p1 and p2, then pass the instance of Value as args to both p1 and p2 and change the main() method of _While.py and _Scheduler.py to accept the new Value parameter.
Similar to how it is done here http://docs.python.org/2/library/multiprocessing.html#sharing-state-between-processes
You could also use Queues or Pipes as suggested by Euegene C. an example can be found here http://docs.python.org/2/library/multiprocessing.html#exchanging-objects-between-processes

Multiprocessing pipes on windows with python

Does Windows support multithreading.pipes()? If yes, then what is wrong with this code? Do I need to be using reduction? The code hangs on p2.recv() and I get a RuntimeError when run from the command line.
import multiprocessing
def ProcessCreator(pipe):
pipe.send("hello from other process")
p1, p2 = multiprocessing.Pipe()
proc = multiprocessing.Process(target = ProcessCreator, args = (p2,))
proc.start()
print p1.recv()
if __name__ == "__main__":
multiprocessing.freeze_support()
You need put pipe code into if __name__ == '__main__' part.(Why?) And change p2.recv to p1.recv
import multiprocessing
def ProcessCreator(pipe):
pipe.send("hello from other process")
if __name__ == "__main__":
multiprocessing.freeze_support()
p1, p2 = multiprocessing.Pipe()
proc = multiprocessing.Process(target = ProcessCreator, args = (p2,))
proc.start()
print p1.recv()

In multiprocess module of python , how to find which worker process has executed the job

Is there any way to find out , which worker process among the Pool has executed a specific job .
For example,
def start_exe():
#execute some bunch of statements
if __name__ == '__main__':
p = Pool(5)
result = p.apply.async(start_exe)
print result.get()
I don't see any API for that, but you can embed the name of the process that did the job in the result:
from multiprocessing import Pool, current_process
def start_exe():
return 'done', current_process().name
if __name__ == '__main__':
p = Pool(5)
result = p.apply_async(start_exe)
print result.get()
Example output:
('done', 'PoolWorker-4')

Categories

Resources