python multiprocessing process count - python

#coding:utf-8
import sys
import time
import os
import multiprocessing
class Worker(object):
def __init__(self):
self.progress = 0
self.task_info = None
def init(self):
pass
def status(self):
pass
def set_task_info(self, task_info):
self.task_info = task_info
def run(self, worker_status_meta_dict):
print multiprocessing.current_process()
print "process is %d" % self.progress
while self.progress < 5:
self.progress = self.progress +1
worker_status_meta_dict['state'] = 0
worker_status_meta_dict['status'] = "running"
time.sleep(2)
worker_status_meta_dict['state'] = 1
worker_status_meta_dict['status'] = "succeeded"
print "bavscan worker finished..."
if __name__ == "__main__":
worker = Worker()
worker_process_dict = multiprocessing.Manager().dict()
process = multiprocessing.Process(target=Worker.run, args=(worker, worker_process_dict))
process.start()
time.sleep(60)
This is a simple demo for python multiprocess.
The main process invoke the Worker.run method in a subprocess with multiprocessing.Process.
When run it in wondows 7, the main process will lauch two subprocess.

I find the problem in the "Python27\Lib\multiprocessing__init__.py"
def Manager():
'''
Returns a manager associated with a running server process
The managers methods such as `Lock()`, `Condition()` and `Queue()`
can be used to create shared objects.
'''
from multiprocessing.managers import SyncManager
m = SyncManager()
m.start()
return m
m.start() will lauch a subprocess to start the manager.

Related

Python multiprocessing exits without exception when accessing shared value. Is this normal?

I am trying to share a string between processes with multiprocessing.Value(c_wchar_p, ""). Program exits unexpectedly without exception with code (0xC0000005). I am guessing that problem is somehow caused by main process trying to access memory of child process. Issue occurs only when ctypes c_char_p/c_wchar_p are used. Thread works fine. Is this expected beheavor?
Steps to reproduce:
import multiprocessing
import threading
import ctypes
import time
class ProcessExample:
def __init__(self, proc: bool):
self.shared_bool = multiprocessing.Value(ctypes.c_bool, False)
self.shared_string = multiprocessing.Value(ctypes.c_wchar_p, "one")
self.shared_byte = multiprocessing.Value(ctypes.c_char_p, b"one")
if proc:
self.th_proc = multiprocessing.Process(target=self.target, args=(self.shared_bool, self.shared_string, self.shared_byte))
else:
self.th_proc = threading.Thread(target=self.target, args=(self.shared_bool, self.shared_string, self.shared_byte))
self.th_proc.start()
#staticmethod
def target(shared_bool, shared_string, shared_byte):
shared_bool.value = True
shared_string.value = "two"
shared_byte.value = b"two"
def print_values(self):
self.th_proc.join()
print(self.shared_bool.value)
print(self.shared_string.value) # Exits
print(self.shared_byte.value) # Exits
if __name__ == "__main__":
# example = ProcessExample(False) # Works
example = ProcessExample(True)
time.sleep(1)
example.print_values()
Output
True
Process finished with exit code -1073741819 (0xC0000005)
I think the problem is in multiprocessing.Value(ctypes.c_wchar_p, "one") and multiprocessing.Value(ctypes.c_char_p, b"one"). To have string/byte string initialized correctly, use multiprocessing.Array:
import multiprocessing
import threading
import ctypes
import time
class ProcessExample:
def __init__(self, proc: bool):
self.shared_bool = multiprocessing.Value(ctypes.c_bool, False)
self.shared_string = multiprocessing.Array(ctypes.c_wchar, "one")
self.shared_byte = multiprocessing.Array(ctypes.c_char, b"one")
if proc:
self.th_proc = multiprocessing.Process(
target=self.target,
args=(self.shared_bool, self.shared_string, self.shared_byte),
)
else:
self.th_proc = threading.Thread(
target=self.target,
args=(self.shared_bool, self.shared_string, self.shared_byte),
)
self.th_proc.start()
#staticmethod
def target(shared_bool, shared_string, shared_byte):
shared_bool.value = True
shared_string[:] = "two"
shared_byte[:] = b"two"
def print_values(self):
self.th_proc.join()
print(self.shared_bool.value)
print(self.shared_string[:])
print(self.shared_byte[:])
if __name__ == "__main__":
example = ProcessExample(True)
time.sleep(1)
example.print_values()
Prints:
True
two
b'two'

Python multiprocessing Event with process pool and custom manager

the following code works fine -
import time
from concurrent.futures import Future, as_completed
from concurrent.futures.process import ProcessPoolExecutor
import multiprocessing as mp
from multiprocessing.managers import BaseManager
class Progress(object):
_target: int = 0
_progress: int = 0
def __init__(self):
self._target = 0
self._progress = 0
def completed(self, n):
self._progress += n
def progress(self):
return (self._progress/self._target) * 100
def set_target(self, n):
self._target = n
class ObjectManager(BaseManager):
pass
def dummy_worker(progress: Progress, cancel: mp.Event = None):
print("--> Worker started")
for i in range(10):
time.sleep(1)
progress.completed(1)
return 1
if __name__ == "__main__":
ObjectManager.register('Progress', Progress)
print('Starting manager')
with ObjectManager() as manager:
print('Manager started')
progress = manager.Progress()
progress.set_target(10)
with ProcessPoolExecutor() as pool:
f = pool.submit(dummy_worker, progress)
futures = [f]
for f in as_completed(futures):
print(f.result())
print(f'Progress: {progress.progress()}')
while the following gives me Condition objects should only be shared between processes through inheritance. i am not sure how to use Event in dummy_worker. the main goal is to pass some shared object and also have a way to cancel the worker. sorry for the long post, i wanted to give full code for clarity.
import time
from concurrent.futures import Future, as_completed
from concurrent.futures.process import ProcessPoolExecutor
import multiprocessing as mp
from multiprocessing.managers import BaseManager
class Progress(object):
_target: int = 0
_progress: int = 0
def __init__(self):
self._target = 0
self._progress = 0
def completed(self, n):
self._progress += n
def progress(self):
return (self._progress/self._target) * 100
def set_target(self, n):
self._target = n
class ObjectManager(BaseManager):
pass
def dummy_worker(progress: Progress, cancel: mp.Event = None):
print("--> Worker started")
for i in range(10):
time.sleep(1)
progress.completed(1)
return 1
if __name__ == "__main__":
ObjectManager.register('Progress', Progress)
cancel = mp.Event()
print('Starting manager')
with ObjectManager() as manager:
print('Manager started')
progress = manager.Progress()
progress.set_target(10)
with ProcessPoolExecutor() as pool:
f = pool.submit(dummy_worker, progress, cancel)
futures = [f]
for f in as_completed(futures):
print(f.result())
print(f'Progress: {progress.progress()}')
assume i am using python 3.5+.
Try the following changes:
from multiprocessing.managers import SyncManager
class ObjectManager(SyncManager):
pass
# use an Event() created by ObjectManager instance: cancel = manager.Event()
if __name__ == "__main__":
ObjectManager.register('Progress', Progress)
#cancel = mp.Event() # not this
print('Starting manager')
with ObjectManager() as manager:
print('Manager started')
progress = manager.Progress()
cancel = manager.Event() # but rather this
progress.set_target(10)
with ProcessPoolExecutor() as pool:
f = pool.submit(dummy_worker, progress, cancel)
futures = [f]
for f in as_completed(futures):
print(f.result())
print(f'Progress: {progress.progress()}')
Prints:
Starting manager
Manager started
--> Worker started
1
Progress: 100.0
Also, get rid of/change references to mp.Event.

shared memory between process and main task in python

with some help I could run a process in python, Now I wan't to share a value betwenn the two tasks. I can set the value inside the init, but I can't change it inside the run method.
And by the way: how to kill the process when the main process stops?
from multiprocessing import Process, Value
import serial
import time
class P(Process):
def __init__(self, num):
num.value = 15
super(P, self).__init__()
def run(self):
while True:
num.value = num.value + 1
print("run simple process")
time.sleep(0.5)
def main():
while True:
print("run main")
print (num.value)
time.sleep(2.5)
if __name__ == "__main__":
num = Value('d', 0.0)
p = P(num)
p.start()
#p.join()
main()
In your simplified case you just passed num value upon initialization time.
To be able to access that value in other process's methods - set it as a state of the process:
class P(Process):
def __init__(self, num):
self.num = num
self.num.value = 15
super(P, self).__init__()
def run(self):
while True:
self.num.value += 1
print("run simple process")
time.sleep(0.5)
For a more "serious" cases - consider using Managers and Synchronization primitives.

Checking a value of an object in a running process

In this easy example how can I access the value of a counter object while the process is still running?
import multiprocessing
import time
class Counter(object):
def __init__(self):
self.value = 0
def update(self):
self.value += 1
def job(Counter):
while True:
counter.update()
if __name__ == '__main__':
counter = Counter()
p = multiprocessing.Process(target=job,args=(counter,)
p.start()
time.sleep(10)
# I want to check the value of the counter object here
p.terminate()
You have to use multiprocessing.Queue() or multiprocessing.Pipe() to communicate between processes.
multiprocessing.Pipe() creates two endpoints conn_1, conn_2 and you have to use one of them in main process and second in subprocess.
Use poll() to check if there is something in pipe and then you can use recv() to receive data. (if you use directly recv() then it will block program till you send something to pipe).
Now you can use send() to send message with result.
Here I use conn_2 in job()
import multiprocessing
import time
class Counter(object):
def __init__(self):
self.value = 0
def update(self):
self.value += 1
def job(counter, conn):
while True:
counter.update()
if conn.poll():
print('job:', conn.recv())
conn.send(counter.value)
if __name__ == '__main__':
conn_1, conn_2 = multiprocessing.Pipe()
counter = Counter()
p = multiprocessing.Process(target=job, args=(counter, conn_2))
p.start()
time.sleep(2)
# I want to check the value of the counter object here
conn_1.send('give me result')
print('result:', conn_1.recv())
p.terminate()
Here I use conn_2 directly in class
import multiprocessing
import time
class Counter(object):
def __init__(self, conn):
self.conn = conn
self.value = 0
def update(self):
self.value += 1
if self.conn.poll(): # if message from main process
print('Counter:', self.conn.recv())
self.conn.send(self.value)
def job(counter):
while True:
counter.update()
if __name__ == '__main__':
conn_1, conn_2 = multiprocessing.Pipe()
counter = Counter(conn_2)
p = multiprocessing.Process(target=job, args=(counter,))
p.start()
time.sleep(2)
conn_1.send('give me result')
print('result:', conn_1.recv())
p.terminate()
You could consider attaching a debugger (such as the PyDev debugger, GDB or others) to the running process. You can then freeze the process with a breakpoint and inspect state.

Python class object Sharing between worker processes

Here is my code. I am not getting why it is giving the Error: AttributeError: 'AutoProxy[MySharedClass]' object has no attribute 'dict_'
import time
import multiprocessing
from multiprocessing import Process, Manager
import multiprocessing.managers as manager
class MySharedClass(object):
def __init__(self):
self.dict_ = {}
self.dict_['one'] = 1
self.dict_['two'] = 2
class MyManager(manager.BaseManager):
pass
def test_process(param_object):
print"Child Process:"
print param_object.dict_
param_object.dict_['three'] = 3
print param_object.dict_
return param_object
def main_process():
MyManager.register("MySharedClass", MySharedClass)
my_manager = MyManager()
my_manager.start()
param_object = my_manager.MySharedClass()
print"Main process:"
print param_object.dict_
time.sleep(.2)
proc = Process(target=test_process, args=(param_object,))
proc.start()
print"Process created with id %s" %proc.pid
time.sleep(10)
print "Main process End:"
print param_object.dict_
main_process()

Categories

Resources