python Class Object Sharing between Processes created using multiprocessing module - python

How do I create the python shared object of my class which can be modified by worker processes. I created the worker processes by using multiprocessing.Process module. I have some knowledge about multiprocessing.Manager module. Can any one explain with example how to register my class in to Manager, start the manager and create the shared object of my class.

Here is an example:
from multiprocessing import Process, Pool
from multiprocessing.managers import BaseManager
class MySharedClass(object):
stored_value = 0
def get(self):
return self.stored_value
def set(self, new_value):
self.stored_value = new_value
return self.stored_value
class MyManager(BaseManager):
pass
MyManager.register('MySharedClass', MySharedClass)
def worker ( proxy_object, i):
proxy_object.set( proxy_object.get() + i )
print ("id %d, sum %d" %(i, proxy_object.get()))
return proxy_object
if __name__ == '__main__':
manager = MyManager()
manager.start()
shared = manager.MySharedClass()
pool = Pool(5)
for i in range(33):
pool.apply(func=worker, args=(shared, i))
pool.close()
pool.join()
print "result: %d" % shared.get()
id 0, sum 0
id 1, sum 1
id 2, sum 3
...
id 31, sum 496
id 32, sum 528
result: 528
Another variant (have never use it in the real project):
from multiprocessing import Process, Pool
from multiprocessing.managers import BaseManager, NamespaceProxy
class MySharedClass(object):
def __init__(self):
self.stored_value = 0
def get(self):
return self.stored_value
def set(self, new_value):
self.stored_value = new_value
return self.stored_value
class MyManager(BaseManager):
pass
class MyProxy(NamespaceProxy):
_exposed_ = ('__getattribute__', '__setattr__', '__delattr__')# add 'get' to use get
#def get(self):
# callmethod = object.__getattribute__(self, '_callmethod')
# return callmethod('get')
MyManager.register('MySharedClass', MySharedClass, MyProxy)
def worker ( proxy_object, i):
proxy_object.stored_value = proxy_object.stored_value + i
print ("id %d, sum %d" %(i, proxy_object.stored_value))
return proxy_object
if __name__ == '__main__':
manager = MyManager()
manager.start()
shared = manager.MySharedClass()
print shared.stored_value
pool = Pool(5)
for i in range(33):
pool.apply(func=worker, args=(shared, i))
pool.close()
pool.join()
print "result: %d" % shared.stored_value

Related

Shared memory in multiprocessing.pool

Code:
import multiprocessing
import multiprocessing.managers
class A:
name = "A name"
arr_b = []
def __init__(self, num):
for i in range(5):
self.arr_b.append(B())
class B:
name = "B name"
def __init__(self):
pass
def func(num):
return A(num)
if __name__ == '__main__':
pool = multiprocessing.Pool()
result = pool.map(func, range(5))
for res in result:
print(res.name)
print(res.arr_b)
Result:
A name
[]
A name
[]
A name
[]
A name
[]
A name
[]
How i can share array of B class objects correctly?
I tried to used Manager and BaseManager, but it allows me to use created in main object. But i need to create object in func and return it into main.

Python multiprocessing, share class instance does not work

I want to send tasks to the POOL inside the shared class based on some conditions. But I got some unexpected result, Which are shown below.
• Why the len(self.map) is 0, not 100.
• Do I have to reconstruct my code to achieve this goal.
from multiprocessing import Pool
from multiprocessing.managers import BaseManager
pool = None
def doSomething(obj, *args):
obj.doSomething(*args)
class SharedClass:
def __init__(self):
global pool
self.map = set()
pool = Pool(4)
def someCondition(self):
# the condition is rely on the instance, here is just an example
return True
def go(self, n):
global pool
for i in xrange(n):
if self.someCondition():
# pass the shared class to other process
pool.apply_async(doSomething, (self, i))
pool.close()
pool.join()
# got AssertionError here
# why the len of self.map is 0
assert len(self.map) == 100
def doSomething(self, n):
# this should change the same SharedClass instance?
self.map.add(n)
class MyManager(BaseManager):
pass
MyManager.register("SharedClass", SharedClass)
def main():
manager = MyManager()
manager.start()
obj = manager.SharedClass()
obj.go(100)
if __name__ == "__main__":
main()

Python multiprocessing Event with process pool and custom manager

the following code works fine -
import time
from concurrent.futures import Future, as_completed
from concurrent.futures.process import ProcessPoolExecutor
import multiprocessing as mp
from multiprocessing.managers import BaseManager
class Progress(object):
_target: int = 0
_progress: int = 0
def __init__(self):
self._target = 0
self._progress = 0
def completed(self, n):
self._progress += n
def progress(self):
return (self._progress/self._target) * 100
def set_target(self, n):
self._target = n
class ObjectManager(BaseManager):
pass
def dummy_worker(progress: Progress, cancel: mp.Event = None):
print("--> Worker started")
for i in range(10):
time.sleep(1)
progress.completed(1)
return 1
if __name__ == "__main__":
ObjectManager.register('Progress', Progress)
print('Starting manager')
with ObjectManager() as manager:
print('Manager started')
progress = manager.Progress()
progress.set_target(10)
with ProcessPoolExecutor() as pool:
f = pool.submit(dummy_worker, progress)
futures = [f]
for f in as_completed(futures):
print(f.result())
print(f'Progress: {progress.progress()}')
while the following gives me Condition objects should only be shared between processes through inheritance. i am not sure how to use Event in dummy_worker. the main goal is to pass some shared object and also have a way to cancel the worker. sorry for the long post, i wanted to give full code for clarity.
import time
from concurrent.futures import Future, as_completed
from concurrent.futures.process import ProcessPoolExecutor
import multiprocessing as mp
from multiprocessing.managers import BaseManager
class Progress(object):
_target: int = 0
_progress: int = 0
def __init__(self):
self._target = 0
self._progress = 0
def completed(self, n):
self._progress += n
def progress(self):
return (self._progress/self._target) * 100
def set_target(self, n):
self._target = n
class ObjectManager(BaseManager):
pass
def dummy_worker(progress: Progress, cancel: mp.Event = None):
print("--> Worker started")
for i in range(10):
time.sleep(1)
progress.completed(1)
return 1
if __name__ == "__main__":
ObjectManager.register('Progress', Progress)
cancel = mp.Event()
print('Starting manager')
with ObjectManager() as manager:
print('Manager started')
progress = manager.Progress()
progress.set_target(10)
with ProcessPoolExecutor() as pool:
f = pool.submit(dummy_worker, progress, cancel)
futures = [f]
for f in as_completed(futures):
print(f.result())
print(f'Progress: {progress.progress()}')
assume i am using python 3.5+.
Try the following changes:
from multiprocessing.managers import SyncManager
class ObjectManager(SyncManager):
pass
# use an Event() created by ObjectManager instance: cancel = manager.Event()
if __name__ == "__main__":
ObjectManager.register('Progress', Progress)
#cancel = mp.Event() # not this
print('Starting manager')
with ObjectManager() as manager:
print('Manager started')
progress = manager.Progress()
cancel = manager.Event() # but rather this
progress.set_target(10)
with ProcessPoolExecutor() as pool:
f = pool.submit(dummy_worker, progress, cancel)
futures = [f]
for f in as_completed(futures):
print(f.result())
print(f'Progress: {progress.progress()}')
Prints:
Starting manager
Manager started
--> Worker started
1
Progress: 100.0
Also, get rid of/change references to mp.Event.

Python class object Sharing between worker processes

Here is my code. I am not getting why it is giving the Error: AttributeError: 'AutoProxy[MySharedClass]' object has no attribute 'dict_'
import time
import multiprocessing
from multiprocessing import Process, Manager
import multiprocessing.managers as manager
class MySharedClass(object):
def __init__(self):
self.dict_ = {}
self.dict_['one'] = 1
self.dict_['two'] = 2
class MyManager(manager.BaseManager):
pass
def test_process(param_object):
print"Child Process:"
print param_object.dict_
param_object.dict_['three'] = 3
print param_object.dict_
return param_object
def main_process():
MyManager.register("MySharedClass", MySharedClass)
my_manager = MyManager()
my_manager.start()
param_object = my_manager.MySharedClass()
print"Main process:"
print param_object.dict_
time.sleep(.2)
proc = Process(target=test_process, args=(param_object,))
proc.start()
print"Process created with id %s" %proc.pid
time.sleep(10)
print "Main process End:"
print param_object.dict_
main_process()

python multiprocessing process count

#coding:utf-8
import sys
import time
import os
import multiprocessing
class Worker(object):
def __init__(self):
self.progress = 0
self.task_info = None
def init(self):
pass
def status(self):
pass
def set_task_info(self, task_info):
self.task_info = task_info
def run(self, worker_status_meta_dict):
print multiprocessing.current_process()
print "process is %d" % self.progress
while self.progress < 5:
self.progress = self.progress +1
worker_status_meta_dict['state'] = 0
worker_status_meta_dict['status'] = "running"
time.sleep(2)
worker_status_meta_dict['state'] = 1
worker_status_meta_dict['status'] = "succeeded"
print "bavscan worker finished..."
if __name__ == "__main__":
worker = Worker()
worker_process_dict = multiprocessing.Manager().dict()
process = multiprocessing.Process(target=Worker.run, args=(worker, worker_process_dict))
process.start()
time.sleep(60)
This is a simple demo for python multiprocess.
The main process invoke the Worker.run method in a subprocess with multiprocessing.Process.
When run it in wondows 7, the main process will lauch two subprocess.
I find the problem in the "Python27\Lib\multiprocessing__init__.py"
def Manager():
'''
Returns a manager associated with a running server process
The managers methods such as `Lock()`, `Condition()` and `Queue()`
can be used to create shared objects.
'''
from multiprocessing.managers import SyncManager
m = SyncManager()
m.start()
return m
m.start() will lauch a subprocess to start the manager.

Categories

Resources