I have a program:
from multiprocessing import Process
import time
def plus():
for i in range(10):
print("+")
time.sleep(1)
def star():
for i in range(10):
print("%")
time.sleep(1)
if __name__ == '__main__':
p1=Process(target=plus)
p1.start()
p2=Process(target=star)
p2.start()
p1.join()
p2.join()
print('FINISH')
the output is more or less the same:
+&+&+&+&+&+&+&+&+&+& FINISH
I am trying to achieve the same passing object method into Process as follows:
import time
class Plus:
def run(self):
for i in range(10):
print("+")
time.sleep(1)
import time
class Star:
def run(self):
for i in range(10):
print("&")
time.sleep(1)
from Plus import Plus
from Star import Star
from multiprocessing import Process
class ThreadsCreation:
def run(self):
if __name__ == '__main__':
p = Plus()
g = Star()
process1 = Process(target=p.run())
process2 = Process(target=g.run())
process1.start()
process1.join()
process2.start()
process2.join()
print('FINISH')
ThreadsCreation().run()
In the second approach the output is always:
++++++++++&&&&&&&&&&FINISH
How to achieve the output of the first example with the object approach from the second one?
Related
hellow,
please some help.
i want to take variables when using repeating statement.
Actually in my code, there are so many variables and function to handle variables.
so i have to use multiprocess for some reason, but it's doesn't work for what i want.
below is simple code,
please help me.
from multiprocessing import Process, Manager
import time
def a(final_list):
c=0
while True:
c += 1
final_list.append(c)
time.sleep(1)
print(final_list)
def b(final_list):
while True:
print(final_list[-1])
time.sleep(1)
if __name__ == '__main__':
manager = Manager()
final_list = []
final_list = manager.list()
#print(a)
p1 = Process(target=a, args=(final_list,))
p2 = Process(target=b, args=(final_list,))
p1.start()
time.sleep(3)
p2.start()
I think you forgot to use join() for the processes. try this:
from multiprocessing import Process, Manager
import time
def a(final_list):
c=0
while True:
c += 1
final_list.append(c)
time.sleep(1)
print(final_list)
def b(final_list):
while True:
print(final_list[-1])
time.sleep(1)
if __name__ == '__main__':
with Manager() as manager:
final_list = manager.list()
p1 = Process(target=a, args=(final_list,))
p2 = Process(target=b, args=(final_list,))
p1.start()
time.sleep(3)
p2.start()
p1.join()
p2.join()
The following code communicates a list between processes by using SharedMemoryManager, but I don't know why it causes EOFError: Ran out of input
from multiprocessing.managers import SharedMemoryManager
from multiprocessing import Process
import time
def train(sl, process_name):
for i in range(100):
sl[0] = i
time.sleep(0.2)
def debug3d(cls, process_name):
for i in range(100):
print("from", process_name, cls)
time.sleep(0.2)
class Cls:
def __init__(self, smm):
self.smm = smm
if __name__ == "__main__":
with SharedMemoryManager() as smm:
sl = smm.ShareableList([1])
p1 = Process(target=train, args=(sl, "process1"))
instance = Cls(smm)
p2 = Process(target=debug3d, args=(instance, "process2"))
p1.start()
p2.start()
p1.join()
p2.join()
however, when I pass shareableList as a parameter of debug3d, it works without an error
from multiprocessing.managers import SharedMemoryManager
from multiprocessing import Process
import time
def train(sl, process_name):
for i in range(100):
sl[0] = i
time.sleep(0.2)
def debug3d(sl, process_name):
user_cls = Cls(sl)
for i in range(100):
print("from", process_name, user_cls.smm)
time.sleep(0.2)
class Cls:
def __init__(self, smm):
self.smm = smm
if __name__ == "__main__":
with SharedMemoryManager() as smm:
sl = smm.ShareableList([1])
p1 = Process(target=train, args=(sl, "process1"))
p2 = Process(target=debug3d, args=(sl, "process2")) # I changed here
p1.start()
p2.start()
p1.join()
p2.join()
I can't figure out the difference between two.
Thanks for your help in advance.
I'm using a multiprocessing library, running two functions at the same time. I want to at some point finish one of the two functions and when that function finishes the other function should also end.
Example:
from multiprocessing import Process
def a():
while True:
print('a')
break
def b():
while True:
print('b')
if __name__ == '__main__':
pA = Process(target=a)
pB = Process(target=b)
In the code above, how would I do it so that when the function a is stopped, consequently it also stop the function b (which is repeating itself)?
Thanks in advance
Use a multiprocessing.Event to communicate to b() that a() ended.
from multiprocessing import Process, Event
def a(stopEvent):
while True:
print('a')
break
stopEvent.set()
def b(stopEvent):
while True:
print('b')
if stopEvent.is_set():
break;
if __name__ == '__main__':
stopEvent = Event()
pA = Process(target=a, args=(stopEvent,))
pB = Process(target=b, args=(stopEvent,))
pB.start()
pA.start()
pA.join()
pB.join()
You can use a Queue from one Process to the other:
#soMultiprocessingOneProcessEndsAnother
from multiprocessing import Process, Queue
import time
def a(q):
while True:
print('a')
break
q.put(None)
print('a Done')
def b(q):
while q.empty():
print('b')
time.sleep(0.01)
print('b Done')
if __name__ == '__main__':
pq = Queue()
pA = Process(target=a, args=(pq,))
pB = Process(target=b, args=(pq,))
pB.start()
time.sleep(0.1)
pA.start()
pA.join()
pB.join()
print('m Done')
Sample Output:
b
b
b
b
b
b
b
a
a Done
b Done
m Done
Why while loop is ignored in work1? I would like to update value from string to another value in loop and output this value in process work2. Also already tried with Queue, but problem is I have only one variable which I would like to update in work1 and access to it at work2.
from multiprocessing import Process, Manager, Value
from ctypes import c_char_p
import time
def work1(string):
i = 2
string.value = i
# while True:
# print("work1")
# string.value = i + 1
# time.sleep(2)
def work2(string):
while True:
print("Value set in work1 " + str(string.value))
time.sleep(2)
if __name__ == '__main__':
manager = Manager()
string = manager.Value(int, 0);
p1=Process(target=work1, args=(string,))
p1.start()
p1.join()
p2=Process(target=work2, args=(string,))
p2.start()
p2.join()
That is because you didn't make your program parallel with two processes, but instead, two processes run in tandem. What you need to do is to start both process before any join. Like my modification below:
from multiprocessing import Process, Manager, Value
from ctypes import c_char_p
import time
def work1(string):
i = 2
string.value = i
while True:
i = i+1
string.value = i
print("work1 set value to "+str(string.value))
time.sleep(2)
def work2(string):
while True:
print("Value set in work1 " + str(string.value))
time.sleep(2)
if __name__ == '__main__':
manager = Manager()
string = manager.Value(int, 0, lock=False);
p1=Process(target=work1, args=(string,))
p2=Process(target=work2, args=(string,))
p1.start()
p2.start()
p2.join()
p1.join()
Indeed, if you write the code in this way, the join never happened due to the infinite while loop.
How to give a variable from _While.py to __scheduler.py in this multithreading Szenario?
I tried so much stuff but nothing seems to be working.
Thread.py
from multiprocessing import Process
import _While
import _Scheduler
if __name__ == '__main__':
p1 = Process(target=_While.main)
p1.start()
p2 = Process(target=_Scheduler.main)
p2.start()
_While.py
import time
def main():
while True:
print "while"
time.sleep(0.5)
"""getting button status"""
"""giving button status to _Scheudler._scheduler"""
__Scheduler.py
import logging
import time
from apscheduler.scheduler import Scheduler
from _While import """button status"""
def _scheduler():
print "scheduler"
while """button status"""==True:
print "Button is pressed"
time.sleep(0.5)
def main():
logging.basicConfig()
scheduler = Scheduler(standalone=True)
scheduler.add_interval_job(_scheduler, seconds=2)
scheduler.start()
if __name__ == '__main__':
main()
Solution:
Thread.py
from multiprocessing import Process, Value, Array
import time
import _While
import _Scheduler
if __name__ == '__main__':
num = Value('d', 0.0)
arr = Array('i', range(10))
p1 = Process(target=_While.main, args=(num, arr))
p1.start()
p2 = Process(target=_Scheduler.main, args=(num, arr))
p2.start()
p1.join()
p2.join()
print num.value
_While
import time
def main(num, arr):
while True:
print "while"
num.value = 1
time.sleep(10)
"""getting button status"""
"""giving button status to _Scheudler._scheduler"""
__Scheduler.py
import logging
from apscheduler.scheduler import Scheduler
def _scheduler(num, arr):
while True:
print num.value
if num.value == 1:
print "mongo"
num.value = 0
break
def main(num, arr):
logging.basicConfig()
scheduler = Scheduler(standalone=True)
scheduler.add_interval_job(_scheduler, args=(num, arr), seconds=2)
scheduler.start()
if __name__ == '__main__':
main()
The only problem left is that I can't use Value without using Array
Create an instance of multiprocessing.Value in Threads.py just before you create p1 and p2, then pass the instance of Value as args to both p1 and p2 and change the main() method of _While.py and _Scheduler.py to accept the new Value parameter.
Similar to how it is done here http://docs.python.org/2/library/multiprocessing.html#sharing-state-between-processes
You could also use Queues or Pipes as suggested by Euegene C. an example can be found here http://docs.python.org/2/library/multiprocessing.html#exchanging-objects-between-processes