How to kill all the multiprocessing? - python

I have a python function which calls another function in a multiprocessing.How can I kill all the multiprocessing using python?
This is the outer function which is also a multiprocess
p = Process(target=api.queue_processor, args=(process_queue_in, process_queue_out, process_lock,
command_queue_in, command_queue_out, api.OBJECTIVE_0_5NA, quit_event,
log_file))
p.start()
This is the function which is getting called
def queue_processor(process_queue_in, process_queue_out, process_lock,command_queue_in, command_queue_out, objective_type_NA, quit_event,log_file=None):
slide, roi_coordinates = process_obj[0]
logger.info("Received for imaging {} with ROI {} and type {}".format(slide.tile_dir,
roi_coordinates,
slide.slide_type))
p = Process(target=iad.image_slide,
args=(slide.tile_dir, slide.slide_type, roi_coordinates, exception_queue,
objective_type_NA, logger, clog_path))
p.start() #process to kill
I want to kill the second multiprocess.(commented)

We can kill or terminate a process immediately by using the terminate() method. We will use this method to terminate the child process, which has been created with the help of function, immediately before completing its execution.
import multiprocessing
import time
def Child_process():
print ('Starting function')
time.sleep(5)
print ('Finished function')
P = multiprocessing.Process(target = Child_process)
P.start()
print("My Process has terminated, terminating main thread")
print("Terminating Child Process")
P.terminate()
print("Child Process successfully terminated")

Related

How to wait until user close the process in python?

I want to create a program that init a new process and then wait until thhis process was killed by user or another program.
But i cant understand why the process create by subprocess is finished after a fraction of time and not by the user closing the process.
import subprocess
import psutil
old_process = {p.pid for p in psutil.process_iter()}
calc_process = subprocess.Popen('C:\Windows\System32\calc')
new_process = {p.pid for p in psutil.process_iter()}
process = list(new_process - old_process)[0]
while psutil.pid_exists(process):
print('Process running')
print('Done')
Popen has this built in using the poll method.
from subprocess import Popen
process = Popen('C:\Windows\System32\calc')
process.wait()
while not process.poll():
print('Process running')
print('Done')

multiprocess messaging queue between functions or process python

Im trying to understand how processes are messaging the other one, below example;
i use second function to do my main job, and queue feeds first function sometimes to do it own job and no matter when its finished, i look many example and try different ways, but no success, is any one can explain how can i do it over my example.
from multiprocessing import Process, Queue, Manager
import time
def first(a,b):
q.get()
print a+b
time.sleep(3)
def second():
for i in xrange(10):
print "seconf func"
k+=1
q.put=(i,k)
if __name__ == "__main__":
processes = []
q = Queue()
manager = Manager()
p = Process(target=first, args=(a,b))
p.start()
processes.append(p)
p2 = Process(target=second)
p2.start()
processes.append(p2)
try:
for process in processes:
process.join()
except KeyboardInterrupt:
print "Interupt"

kill the sub processes by signal, but position affects?

I use the signal function to kill all sub-processes in the mul-process program, the code is shown blow, save as a file named mul_process.py:
import time
import os
import signal
from multiprocessing import Process
processes = []
def fun(x):
print 'current sub-process pid is %s' % os.getpid()
while True:
print 'args is %s' % x
time.sleep(100)
def term(sig_num, frame):
print 'terminate process %d' % os.getpid()
for p in processes:
print p.pid
try:
for p in processes:
print 'process %d terminate' % p.pid
p.terminate()
p.join()
except Exception as e:
print str(e)
if __name__ == '__main__':
print 'current main-process pid is %s' % os.getpid()
for i in range(3):
t = Process(target=fun, args=(str(i),))
t.start()
processes.append(t)
signal.signal(signal.SIGTERM, term)
try:
for p in processes:
p.join()
except Exception as e:
print str(e)
Using 'python mul_process.py' to launch the program on Ubuntu 10.04.4 and Python 2.6, when it start running, in another tab, I use kill -15 with the main process pid to send signal SIGTERM to kill all processes, when the main process receive the signal SIGTERM, it exit after terminate all sub processes, but when I use kill -15 with the sub process pid, it does not work, the program still alive and running as before, and does not print the sentence defined in the function term, seems that the subprocess doesn't receive the SIGTERM.As I know, the sub process will inherit the signal handler, but it doesn`t work, here is the first question.
And then I move the line 'signal.signal(signal.SIGTERM, term)' to position after line 'if name == 'main':', like this:
if __name__ == '__main__':
signal.signal(signal.SIGTERM, term)
print 'current main-process pid is %s' % os.getpid()
for i in range(3):
t = Process(target=fun, args=(str(i),))
t.start()
processes.append(t)
try:
for p in processes:
p.join()
except Exception as e:
print str(e)
Launch the program, and use kill -15 with the main process pid to send the signal SIGTERM, the program receive the signal and call the function term but also doesn't kill any subprocessed and exit itself, this is the second question.
Few problems in your program- Agree that subprocess will inherit signal handler in your 2nd code snippet, But global variable "processes" list won't be shared. So list of process would be available with main process only. "process" would be empty list for other sub process.
You can use queue or pipe kind of mechanism to pass list of process to sub processes. But it will bring another problem
You terminate process1 and handler of process1 try to terminate process2 to process4.
Now process 2 also has same handler,
So Process 2 handler again try to terminate all other process
which will push your program into infinite loop.

Not able to exchange object/ timeout a child process using multiprocessing.Process() Python

From the main process I am spawning a new process using multiprocessing.Process.
My aim is to do a heavy CPU intensive task in the child process and if the task takes too long (using timeout_in variable) to finish, then terminate it with a response else compute and get back the result from this task in the child process.
I am able to terminate if it is taking too long, but I am not able to get the object (result) in case of no forced termination of child process.
from multiprocessing import Process,Queue
def do_threading(function,argument, timeout_in=1):
# Making a queue for data exchange
q = Queue()
# Start function as a process
p = Process(target=function, args=(argument,q,))
p.start()
# Wait for 10 seconds or until process finishes
p.join(timeout_in)
# If thread is still active
if p.is_alive():
print("running... let's kill it...")
# print(q.get())
# Terminate
p.terminate()
p.join()
def do_big_job(argument, q):
# Do something with passed argument
print(argument)
# heavy computation
result = 2**1234567
# print("in child thread ",result)
# Putting result in the queue for exchange
q.put(result)
def main_2():
print("Main thread starting...")
do_threading( do_big_job, "Child thread starting...", timeout_in=10)
if __name__ == '__main__':
main_2()
I think the problem come from the fact that you create the Queue inside do_threading. So when your calculation runs normally (no timeout), the function is terminated and the queue with it.
Here is an alternative code that works if there is no timeout:
from multiprocessing import Process,Queue
def do_threading(q,function,argument, timeout_in=1):
# Start function as a process
p = Process(target=function, args=(argument,q,))
p.start()
# Wait for 10 seconds or until process finishes
p.join(timeout_in)
print "time out"
# If thread is still active
if p.is_alive():
print("running... let's kill it...")
# print(q.get())
# Terminate
p.terminate()
print "terminate"
p.join()
def do_big_job(argument, q):
# Do something with passed argument
print(argument)
# heavy computation
result = 2**123
# print("in child thread ",result)
# Putting result in the queue for exchange
q.put(result)
if __name__ == '__main__':
q = Queue() # Creating the queue in the main allows you to access it anytime
print("Main thread starting...")
do_threading( q, do_big_job, "Child thread starting...", timeout_in=10)
if q.empty():
pass
else:
print(q.get()) # get your result here.
Try to catch timeout exception in queue instead of process, for example:
...
from multiprocessing.queues import Empty
...
def do_threading(q,function,argument, timeout_in=1):
# Start function as a process
p = Process(target=function, args=(argument,q,))
p.start()
try:
print(q.get(True, timeout_in))
except Empty:
print "time out"
p.terminate()
p.join()
or you can able got result in else from your code:
...
# If thread is still active
if p.is_alive():
print("running... let's kill it...")
# Terminate
p.terminate()
else:
print(q.get())

how to kill running subprocess while another subprocess is completed (in multiprocessing)

I came across a problem with multiprocessing:
class PythonHelper(object):
#staticmethod
def run_in_parallel(*functions):
processes=list()
for function in functions:
process=Process(target=function)
process.start()
processes.append(process)
for process in processes:
process.join()
The above stathic method is used by me to run several functions simultaneously (combining them in one process). Everything was okay until I came across the need to force the process terminating while one of 'subprocess' is terminated.
For example:
from PythonHelper import PythonHelper as ph
from Recorder import Recorder
class Logger(object):
def run_recorder_proc(self):
rec=Recorder()
rec.record_video()
def run_printer_proc(self):
#hypothetical function: execution takes a long time
for i in range(9000000):
print("number: {}".format(i))
def run_logger(self):
ph.run_in_parallel(self.run_printer_proc,self.run_recorder_proc)
self.run_printer_proc and self.run_recorder_proc are my subprocesses. How to 'kill' remaining subprocess while one of them was completed?
Edit:
Full source code:
class PythonHelper(object):
#staticmethod
#with your fix
def run_in_parallel(*functions):
processes={}
for function in functions:
process=Process(target=function)
process.start()
processes[process.pid]=process
# wait for any process to complete
pid, status = os.waitpid(-1, 0)
# one process terminated
# join it
processes[pid].join()
del processes[pid]
# terminate the rest
for process in processes.values():
process.terminate()
for process in processes.values():
process.join()
class Logger(object):
def run_numbers_1(self):
for i in range(900000):
print("number: {}".format(i))
def run_numbers_2(self):
for i in range(100000):
print("number: {}".format(i))
def run_logger(self):
ph.run_in_parallel(self.run_numbers_1,self.run_numbers_2)
if __name__=="__main__":
logger=Logger()
logger.run_logger()
Based on the above example I would like to force termination of run_numbers_1 while run_numbers_2 is completed.
You may achieve that by changing run_in_parallel() slightly:
def run_in_parallel(*functions):
processes={}
for function in functions:
process=Process(target=function)
process.start()
processes[process.pid]=process
# wait for any process to complete
pid, status = os.waitpid(-1, 0)
# one process terminated
# join it
processes[pid].join()
del processes[pid]
# terminate the rest
for process in processes.itervalues():
process.terminate()
for process in processes.itervalues():
process.join()
[Update]
Based on your complete code here is a working example. Instead of race-prone os.waitpid() it uses Event object, which other processes set when completed:
from multiprocessing import Process, Event
class MyProcess(Process):
def __init__(self, event, *args, **kwargs):
self.event = event
Process.__init__(self, *args, **kwargs)
def run(self):
Process.run(self)
self.event.set()
class PythonHelper(object):
#staticmethod
#with your fix
def run_in_parallel(*functions):
event = Event()
processes=[]
for function in functions:
process=MyProcess(event, target=function)
process.start()
processes.append(process)
# wait for any process to complete
event.wait()
# one process completed
# terminate all child processes
for process in processes:
process.terminate()
for process in processes:
process.join()
class Logger(object):
def run_numbers_1(self):
for i in range(90000):
print("1 number: {}".format(i))
def run_numbers_2(self):
for i in range(10000):
print("2 number: {}".format(i))
def run_logger(self):
PythonHelper.run_in_parallel(self.run_numbers_1,self.run_numbers_2)
if __name__=="__main__":
logger=Logger()
logger.run_logger()

Categories

Resources