How i get function outputs from multiprocessing in python? - python

I have two functions and needed the return values to proceed with the further part of the script...but currently my code giving only the output of the first function...
import multiprocessing
def gm(name):
h = "Good Morning"+str(name)
qout.put(h)
def sal(name):
k="Hi "+str(name)
qout.put(k)
if __name__ == '__main__':
qout = multiprocessing.Queue()
p1 = multiprocessing.Process(target=gm, args=("ashin",))
p2 = multiprocessing.Process(target=sal, args=("ashin",))
p1.start()
p2.start()
p1.join()
p2.join()
result = qout.get()
#output - "Good Morning ashin"
#required output - "Good Morning ashin" & "Hi ashin"
Appreciate your help......

qout.get() gets you the first element from queue. I do not know the bigger picture of what you're are trying to achieve, but you can get all elements from queue like in the following.
from multiprocessing import Process, Queue
def gm(name):
h = "Good Morning"+str(name)
qout.put(h)
def sal(name):
k="Hi "+str(name)
qout.put(k)
if __name__ == '__main__':
qout = Queue()
p1 = Process(target=gm, args=("ashin",))
p2 = Process(target=sal, args=("ashin",))
p1.start()
p2.start()
p1.join()
p2.join()
list1 = []
while not qout.empty():
list1.append(qout.get())
temp = list(map(str, list1))
print(" & ".join(temp))
output
Hi ashin & Good Morningashin

Instead of managing your own output queue, just use the latest Python 3 concurrency features:
from concurrent.futures import as_completed, ProcessPoolExecutor
def gm(name):
return f'Good Morning {name}'
def sal(name):
return f'Hi {name}'
if __name__ == '__main__':
with ProcessPoolExecutor() as exe:
futures = [exe.submit(x, 'ashin') for x in (gm, sal)]
for future in as_completed(futures):
print(future.result())

Related

multiprocessing in python, can i use variables for global?

hellow,
please some help.
i want to take variables when using repeating statement.
Actually in my code, there are so many variables and function to handle variables.
so i have to use multiprocess for some reason, but it's doesn't work for what i want.
below is simple code,
please help me.
from multiprocessing import Process, Manager
import time
def a(final_list):
c=0
while True:
c += 1
final_list.append(c)
time.sleep(1)
print(final_list)
def b(final_list):
while True:
print(final_list[-1])
time.sleep(1)
if __name__ == '__main__':
manager = Manager()
final_list = []
final_list = manager.list()
#print(a)
p1 = Process(target=a, args=(final_list,))
p2 = Process(target=b, args=(final_list,))
p1.start()
time.sleep(3)
p2.start()
I think you forgot to use join() for the processes. try this:
from multiprocessing import Process, Manager
import time
def a(final_list):
c=0
while True:
c += 1
final_list.append(c)
time.sleep(1)
print(final_list)
def b(final_list):
while True:
print(final_list[-1])
time.sleep(1)
if __name__ == '__main__':
with Manager() as manager:
final_list = manager.list()
p1 = Process(target=a, args=(final_list,))
p2 = Process(target=b, args=(final_list,))
p1.start()
time.sleep(3)
p2.start()
p1.join()
p2.join()

EOFError when communicating between processes using SharedMemoryManager

The following code communicates a list between processes by using SharedMemoryManager, but I don't know why it causes EOFError: Ran out of input
from multiprocessing.managers import SharedMemoryManager
from multiprocessing import Process
import time
def train(sl, process_name):
for i in range(100):
sl[0] = i
time.sleep(0.2)
def debug3d(cls, process_name):
for i in range(100):
print("from", process_name, cls)
time.sleep(0.2)
class Cls:
def __init__(self, smm):
self.smm = smm
if __name__ == "__main__":
with SharedMemoryManager() as smm:
sl = smm.ShareableList([1])
p1 = Process(target=train, args=(sl, "process1"))
instance = Cls(smm)
p2 = Process(target=debug3d, args=(instance, "process2"))
p1.start()
p2.start()
p1.join()
p2.join()
however, when I pass shareableList as a parameter of debug3d, it works without an error
from multiprocessing.managers import SharedMemoryManager
from multiprocessing import Process
import time
def train(sl, process_name):
for i in range(100):
sl[0] = i
time.sleep(0.2)
def debug3d(sl, process_name):
user_cls = Cls(sl)
for i in range(100):
print("from", process_name, user_cls.smm)
time.sleep(0.2)
class Cls:
def __init__(self, smm):
self.smm = smm
if __name__ == "__main__":
with SharedMemoryManager() as smm:
sl = smm.ShareableList([1])
p1 = Process(target=train, args=(sl, "process1"))
p2 = Process(target=debug3d, args=(sl, "process2")) # I changed here
p1.start()
p2.start()
p1.join()
p2.join()
I can't figure out the difference between two.
Thanks for your help in advance.

Multiprocessing and variable return?

I have been banging my head against Multiprocessing in Python for the better part of the day now, and I've managed to make very little progress - I apologize if my question is a duplicate or my ignorance is apparent - I couldn't find it asked anywhere else in this way.
I'm looking for a way to run functions in parallel, and return some arbitrary thing they've produced back to the main script.
The question is: Can a Process() started from Multiprocessing return a list or some other arbitrary variable type?
For example, I would like to:
def 30_second_function():
#pretend this takes 30 seconds to run
return ["mango", "habanero", "salsa"]
#End 30_second_function()
def 5_second_function():
#pretend this takes 5 seconds to run
return {"beans": "8 oz", "tomato paste": "16 oz"}
#End 5_second_function()
p1 = multiprocessing.Process(target=30_second_function)
p1.start()
p2 = multiprocessing.Process(target=5_second_function)
p2.start()
#Somehow retrieve the list and the dictionary here. p1.returned??
And then somehow access the list from 30_second_function and the dictionary from 5_second_function. Is this possible? Am I going about this the wrong way?
Process itself does not provide a way to get return value. To exchange data between processes, you need to use queue, pipe, shared memory, ...:
import multiprocessing
def thirty_second_function(q):
q.put(["mango", "habanero", "salsa"])
def five_second_function(q):
q.put({"beans": "8 oz", "tomato paste": "16 oz"})
if __name__ == '__main__':
q1 = multiprocessing.Queue()
p1 = multiprocessing.Process(target=thirty_second_function, args=(q1,))
p1.start()
q2 = multiprocessing.Queue()
p2 = multiprocessing.Process(target=five_second_function, args=(q2,))
p2.start()
print(q1.get())
print(q2.get())
Alternative using multiprocessing.pool.Pool:
import multiprocessing.pool
def thirty_second_function():
return ["mango", "habanero", "salsa"]
def five_second_function():
return {"beans": "8 oz", "tomato paste": "16 oz"}
if __name__ == '__main__':
p = multiprocessing.pool.Pool()
p1 = p.apply_async(thirty_second_function)
p2 = p.apply_async(five_second_function)
print(p1.get())
print(p2.get())
Or using concurrent.futures module (also available in standard library since Python 3.2+):
from concurrent.futures import ProcessPoolExecutor
def thirty_second_function():
return ["mango", "habanero", "salsa"]
def five_second_function():
return {"beans": "8 oz", "tomato paste": "16 oz"}
if __name__ == '__main__':
with ProcessPoolExecutor() as e:
p1 = e.submit(thirty_second_function)
p2 = e.submit(five_second_function)
print(p1.result())
print(p2.result())

Python; unable to run multiple processes

I'm trying to run 2 separate processes in my python application. So I have code like this:
from multiprocessing import Process
def f1():
while 1:
print('Hello')
def f2():
while 1:
print('Goodbye')
def main():
p1 = Process(target=f1, args=())
p1.start()
p1.join()
p2 = Process(target=f2, args=())
p2.start()
p2.join()
if __name__ == '__main__':
main()
This code does nothing on my machine, it doesn't produce any output. I thought initially that maybe it was an IDE-related problem, but it's the same on both my IDEs, PyScripter and IDLE.
Any ideas, why this doesn't print anything?
How about using Queue?
from multiprocessing import Process, Queue
def f1(q):
while 1:
q.put('Hello')
def f2(q):
while 1:
q.put('Goodbye')
def main():
q = Queue()
p1 = Process(target=f1, args=(q,))
p1.start()
p2 = Process(target=f2, args=(q,))
p2.start()
while True:
try:
print q.get()
except:
break
if __name__ == '__main__':
main()
You should save it and run outside the IDE:
C:\> python multi.py
then it infinitely prints out Hello. You should change your main to see both Hello and Goodbye:
def main():
p1 = Process(target=f1, args=())
p2 = Process(target=f2, args=())
p1.start()
p2.start()
p1.join()
p2.join()
Then you have a little happy race condition that constantly prints out GHoodbyeello because both processes use the same stdout resource concurrently.

Python overgiving variable from one thread to the other multithreading

How to give a variable from _While.py to __scheduler.py in this multithreading Szenario?
I tried so much stuff but nothing seems to be working.
Thread.py
from multiprocessing import Process
import _While
import _Scheduler
if __name__ == '__main__':
p1 = Process(target=_While.main)
p1.start()
p2 = Process(target=_Scheduler.main)
p2.start()
_While.py
import time
def main():
while True:
print "while"
time.sleep(0.5)
"""getting button status"""
"""giving button status to _Scheudler._scheduler"""
__Scheduler.py
import logging
import time
from apscheduler.scheduler import Scheduler
from _While import """button status"""
def _scheduler():
print "scheduler"
while """button status"""==True:
print "Button is pressed"
time.sleep(0.5)
def main():
logging.basicConfig()
scheduler = Scheduler(standalone=True)
scheduler.add_interval_job(_scheduler, seconds=2)
scheduler.start()
if __name__ == '__main__':
main()
Solution:
Thread.py
from multiprocessing import Process, Value, Array
import time
import _While
import _Scheduler
if __name__ == '__main__':
num = Value('d', 0.0)
arr = Array('i', range(10))
p1 = Process(target=_While.main, args=(num, arr))
p1.start()
p2 = Process(target=_Scheduler.main, args=(num, arr))
p2.start()
p1.join()
p2.join()
print num.value
_While
import time
def main(num, arr):
while True:
print "while"
num.value = 1
time.sleep(10)
"""getting button status"""
"""giving button status to _Scheudler._scheduler"""
__Scheduler.py
import logging
from apscheduler.scheduler import Scheduler
def _scheduler(num, arr):
while True:
print num.value
if num.value == 1:
print "mongo"
num.value = 0
break
def main(num, arr):
logging.basicConfig()
scheduler = Scheduler(standalone=True)
scheduler.add_interval_job(_scheduler, args=(num, arr), seconds=2)
scheduler.start()
if __name__ == '__main__':
main()
The only problem left is that I can't use Value without using Array
Create an instance of multiprocessing.Value in Threads.py just before you create p1 and p2, then pass the instance of Value as args to both p1 and p2 and change the main() method of _While.py and _Scheduler.py to accept the new Value parameter.
Similar to how it is done here http://docs.python.org/2/library/multiprocessing.html#sharing-state-between-processes
You could also use Queues or Pipes as suggested by Euegene C. an example can be found here http://docs.python.org/2/library/multiprocessing.html#exchanging-objects-between-processes

Categories

Resources