A surprising thing which I came across while writing a logic of saving some value during process termination was a bit strange for me. Writing a toy example program to show the problem.
import multiprocessing
import time
class A(multiprocessing.Process):
def __init__(self):
self.till = 0
super(A, self).__init__()
def run(self):
i = 0
while True:
print(i)
i += 1
self.till = i
time.sleep(1)
def terminate(self):
print("Terminating : {}".format(self.till))
super(A, self).terminate()
if __name__ == "__main__":
obj = A()
obj.start()
time.sleep(5)
obj.terminate()
The output for the above program is -
0
1
2
3
4
Terminating : 0
Why is terminate() not printing out 4? Anything I am missing?
What you are doing is actually running terminate on the main process, look at this code:
class A(multiprocessing.Process):
def __init__(self):
self.till = 0
super(A, self).__init__()
def run(self):
i = 0
print(os.getpid())
while True:
print(i)
i += 1
self.till = i
time.sleep(1)
def terminate(self):
print("Terminating : {}".format(self.till))
print(os.getpid())
super(A, self).terminate()
if __name__ == "__main__":
print("parent pid:")
print(os.getpid())
print("child pid:")
obj = A()
obj.start()
time.sleep(3)
obj.terminate()
Will lead to the output:
parent pid:
12111
child pid:
12222
0
1
2
Terminating : 0
12111
At terminate, you are actually sending SIGTERM to the child process, it is done from the parent process, thus the memory is of the parent, where there was no increments to self.till
init and terminate methods run on the main process hence the sub-process prints 0 for your terminate function. Run method only increments in the sub process. You can confirm this by using os.getpid() method in python.
Edit: This problem probably only occurs in Windows since it does not have a fork() system call like in Linux/Unix systems. Windows starts the whole module from the beginning to achieve the effect.
Related
with some help I could run a process in python, Now I wan't to share a value betwenn the two tasks. I can set the value inside the init, but I can't change it inside the run method.
And by the way: how to kill the process when the main process stops?
from multiprocessing import Process, Value
import serial
import time
class P(Process):
def __init__(self, num):
num.value = 15
super(P, self).__init__()
def run(self):
while True:
num.value = num.value + 1
print("run simple process")
time.sleep(0.5)
def main():
while True:
print("run main")
print (num.value)
time.sleep(2.5)
if __name__ == "__main__":
num = Value('d', 0.0)
p = P(num)
p.start()
#p.join()
main()
In your simplified case you just passed num value upon initialization time.
To be able to access that value in other process's methods - set it as a state of the process:
class P(Process):
def __init__(self, num):
self.num = num
self.num.value = 15
super(P, self).__init__()
def run(self):
while True:
self.num.value += 1
print("run simple process")
time.sleep(0.5)
For a more "serious" cases - consider using Managers and Synchronization primitives.
I'm working with a toy multiprocessing problem, and event signalling is not working as expected. The multiprocessing documentation refers detail description of Event() to the multithreading documentation, and the description of the methods there are precisely what I'm trying to do. I want worker processes (subclassed from multiprocessing.Process) spawned by a parent class, then wait for a start signal from the parent class, do their thing, then terminate. What seems to be happening, however, is that the first process, once running, blocks any others. What's going on here, and how do I fix?
class Worker(Process):
def __init__(self, my_id, caller):
Process.__init__(self)
self.caller = caller
self.my_id = my_id
def run(self):
print("%i started"%self.my_id)
self.caller.start_flag.wait()
print("%i sleeping"%self.my_id)
sleep(2000)
class ParentProcess(object):
def __init__(self, num_procs):
self.procs = []
self.start_flag = Event()
for i in range(num_procs):
self.procs.append(Worker(i, self))
def run(self):
for proc in self.procs:
proc.run()
self.start_flag.set()
for proc in self.procs:
proc.join()
print("%i done"%proc.my_id)
if __name__ == '__main__':
cpus = cpu_count()
world = ParentProcess(cpus)
start = time()
world.run()
end = time()
runtime = end - start
print("Runtime: %3.6f"%runtime)
This is only outputting "0 started", then hanging. It seems the Event.wait() is blocking all other threads, even the caller. The documentation implies this should not happen.
He is a working version of the code. When you subclass process you implement the run method to define what should run in that process. When you actually want the process to start you should call the start method on it (proc.start()).
from multiprocessing import Process, Event
from time import time, sleep
class Worker(Process):
def __init__(self, my_id, caller):
Process.__init__(self)
self.caller = caller
self.my_id = my_id
def run(self):
print("%i started"%self.my_id)
self.caller.start_flag.wait()
print("%i sleeping"%self.my_id)
sleep(5)
class ParentProcess(object):
def __init__(self, num_procs):
self.procs = []
self.start_flag = Event()
for i in range(num_procs):
self.procs.append(Worker(i, self))
def run(self):
for proc in self.procs:
proc.start()
self.start_flag.set()
for proc in self.procs:
proc.join()
print("%i done"%proc.my_id)
if __name__ == '__main__':
cpus = 4
world = ParentProcess(cpus)
start = time()
world.run()
end = time()
runtime = end - start
print(runtime)
Outputs:
0 started
1 started
2 started
2 sleeping
0 sleeping
1 sleeping
3 started
3 sleeping
0 done
1 done
2 done
3 done
5.01037812233
It's my second day in Python, I found it's a really cool language and I want to try different things in it.
Is it possible to call an object and create a daemon of that object's method which would change the objects attributes?
from multiprocessing import Process
import time
class Foo(object):
def __init__(self):
self.number = 1
# this attribute...
def loop(self):
while 1:
print self.number
# ...is changed here
self.number += 1
time.sleep(1)
if __name__ == '__main__':
f = Foo()
p = Process(target=f.loop)
p.deamon = True # this makes it work in the background
p.start()
# proceed with the main loop...
while 1:
time.sleep(1)
print f.number * 10
The result:
1
10
2
10
3
10
4
10
...
Why doesn't f.loop() change the self.number of f? They are both part of the same class Foo().
What can I change to receive this output:
1
10
2
20
3
30
4
40
...
/edit 1:
I tried this, with the same result (why?):
class Foo(Process):
def __init__(self):
super(Foo, self).__init__()
self.daemon = True # is daemon
self.number = 1
self._target = self.loop # on start() it will run loop()
def loop(self):
while 1:
print self.number
self.number += 1
time.sleep(1)
if __name__ == '__main__':
f = Foo() # is now Process
f.start() # runs f.loop()
while 1:
time.sleep(1)
print f.number * 10
Same output as before.
You're using multiprocessing. The short (and somewhat simplified) answer is that processes to do not share memory by default. Try using threading instead.
If you're hell bent on experimenting with shared memory and processes then look at sharing state in the documentation on multiprocessing.
Also daemon doesn't do what you think it does. If a process creates children then it will attempt to kill all it's daemonic children when it exits. All Processes will work in the background, you just need to start them.
I created two threads each running different functions.
What i tryed to achieve is if first thread ends then the second should also end ( i tryed achieving it using global variable)
Once both the threads end the same procedure should continue.
The script is not working as expected.
I am using Linux - Centos and python 2.7
#!/usr/bin/python
import threading
import time
import subprocess
import datetime
import os
import thread
command= "strace -o /root/Desktop/a.txt -c ./server"
final_dir = "/root/Desktop/"
exitflag = 0
# Define a function for the thread
def print_time(*args):
os.chdir(final_dir)
print "IN first thread"
proc = subprocess.Popen(command,shell=True,stdout=subprocess.PIPE, stderr=subprocess.PIPE)
proc.wait(70)
exitflag=1
def print_time1(*args):
print "In second thread"
global exitflag
while exitflag:
thread.exit()
#proc = subprocess.Popen(command1,shell=True,stdout=subprocess.PIPE, sterr=subprocess.PIPE)
# Create two threads as follows
while (1):
t1=threading.Thread(target=print_time)
t1.start()
t2=threading.Thread(target=print_time1)
t2=start()
time.sleep(80)
z = t1.isAlive()
z1 = t2.isAlive()
if z:
z.exit()
if z1:
z1.exit()
threading.Thread(target=print_time1).start()
threading.Thread(target=print_time1).start()
print "In try"
Where am i going wrong?
You could create an object to share state, and have the dependent thread check that state. Something like:
import threading
import time
import datetime
class Worker1( threading.Thread ):
def __init__(self, state):
super(Worker1, self).__init__()
self.state = state
def run(self):
print_time_helper("Worker1 Start")
time.sleep(4)
print_time_helper("Worker1 End")
self.state.keepOnRunning = False
class Worker2( threading.Thread ):
def __init__(self, state):
super(Worker2, self).__init__()
self.state = state
def run(self):
while self.state.keepOnRunning:
print_time_helper("Worker2")
time.sleep(1)
class State( object ):
def __init__(self):
self.keepOnRunning = True
def main():
state = State()
thread1 = Worker1(state)
thread2 = Worker2(state)
thread1.start()
thread2.start()
thread1.join()
thread2.join()
def print_time_helper(name):
print "{0}: {1}".format(name, datetime.datetime.now().time().strftime("%S"))
which will output something like this (numbers show current time seconds):
Worker1 Start: 39
Worker2: 39
Worker2: 40
Worker2: 41
Worker2: 42
Worker1 End: 43
However, this is a bit simplistic for most situations. You might be better off using message queues - this is a good intro.
Use a threading.Event instead of an int and wait for it to be set.
Also your logic appears to be wrong in print_time1 because your while loop will never run since exitflag is initially 0, but even if it was 1 it would still just exit immediately. It's not actually waiting on anything.
Having class which has a long method.
Creating a thread for that method.
How i can kill\terminate this thread?
Main problem is that i can't check for threading.Event in thread run() method because it doesn't contain loop.
Similar code as here:
import time
import threading
class LongAction:
def time_consuming_action(self):
tmax = 600
for i in range(tmax):
print i
time.sleep(1)
time.sleep(tmax)
self.tmax = tmax
return "Slept well"
class LongActionThread(threading.Thread):
def __init__(self, la_object):
self.la = la_object
threading.Thread.__init__(self)
def run(self):
self.la.time_consuming_action()
la = LongAction()
la_thread = LongActionThread(la)
la_thread.start()
# After 5 sec i've changed my mind and trying to kill LongActionThread
time.sleep(5)
print "Trying to kill LongActionThread"
la_thread.kill()
This code works fine but there is a need to explicitly flush data from standard output.
Haven't found a way where prints would work without flushing.
import time
from multiprocessing.process import Process
import sys
class LongAction:
def time_consuming_action(self):
tmax = 600
for i in range(tmax):
print i
time.sleep(1)
sys.stdout.flush()
time.sleep(tmax)
self.tmax = tmax
return "Slept well"
sys.stdout.flush()
class LongActionThread(Process):
def __init__(self, la_object):
self.la = la_object
Process.__init__(self)
def run(self):
self.la.time_consuming_action()
if __name__ == "__main__":
la = LongAction()
la_thread = LongActionThread(la)
la_thread.start()
# After 5 sec i've changed my mind and trying to kill LongActionThread
time.sleep(5)
print "Trying to kill LongActionThread"
la_thread.terminate()
While it is not a good idea to kill a thread, if you really must do it, the easiest solution is to implement a running semaphor, divide your time consuming method in sub_methods and check for thread status between the submethods.
Code partly copied from this SO question:
class StoppableThread(threading.Thread):
"""Thread class with a stop() method. The thread itself has to check
regularly for the stopped() condition."""
def __init__(self,la_object):
super(StoppableThread, self).__init__()
self.la = la_object
self._stop = threading.Event()
def stop(self):
self._stop.set()
def stopped(self):
return self._stop.isSet()
def run(self):
self.la.time_consuming_action( self.stopped )
class La :
def __init__(self):
#init here
def time_consuming_action(self, thread_stop_callback ):
sub_work1()
if thread_stop_callback():
raise 'Thread Killed ! '
sub_work2()
if thread_stop_callback():
raise 'Thread Killed ! '
sub_work3()
#etc...