python mutiprocessing on windows using shared values - python

I am trying to gain an insight into using multiprocessing with python. I have an example of using shared values for Unix but I cannot get a simple educational example to work on Windows 10.
I have the code below running ok on Windows but with the call updating the shared value commented out in foo().
What is my problem please?
import multiprocessing as mp
def foo(q):
#global shared_num
q.put('hello')
#shared_num.value = 777
if __name__ == '__main__':
global shared_num
mp.set_start_method('spawn')
shared_num = mp.Value('d', 0)
lock = mp.Lock()
q = mp.Queue()
p = mp.Process(target=foo, args=(q,))
p.start()
p.join()
print(q.get(), " ",shared_num.value)
#print(q.get(), " ")
If I run the code below with the foo() setting the shared value I get:
Traceback (most recent call last):
File "C:\Program Files\WindowsApps\PythonSoftwareFoundation.Python.3.8_3.8.2032.0_x64__qbz5n2kfra8p0\lib\multiprocessing\process.py", line 315, in _bootstrap
self.run()
File "C:\Program Files\WindowsApps\PythonSoftwareFoundation.Python.3.8_3.8.2032.0_x64__qbz5n2kfra8p0\lib\multiprocessing\process.py", line 108, in run
self._target(*self._args, **self._kwargs)
File "C:\Users\ken38\OneDrive\Projects\Python\GH_Pi\python\ms_mp.py", line 6, in foo
shared_num.value = 777
NameError: name 'shared_num' is not defined
hello 0.0

Michael Butscher actually answered this with his comment. But as I could not flag that as an answer I thought I would show the corrected code as an answer.
This does highlight difference if you try and test simple examples on Windows. Linux based examples may not work an Windows.
This code worked on both Windows and Debian (Rpi).
import multiprocessing as mp
def foo(q, shared_num, lock):
#global shared_num
q.put('hello')
with lock:
shared_num.value = 777
if __name__ == '__main__':
global shared_num
mp.set_start_method('spawn')
shared_num = mp.Value('d', 0)
lock = mp.Lock()
q = mp.Queue()
p = mp.Process(target=foo, args=(q, shared_num, lock,))
p.start()
p.join()
print(q.get(), " ",shared_num.value)
#print(q.get(), " ")

Related

An error while using locks in multiprocessing in Python

I'm a beginner learning python and ran into some issues while using locks during multiprocessing.
I get an exit code 0 and the right answer but still have some sort of error message which I really don't fully understand. Here's The code I've written-
import time
import multiprocessing
def deposit(balance):
for i in range(100):
time.sleep(0.01)
lck.acquire()
balance.value += 1
lck.release()
def withdraw(balance):
for i in range(100):
time.sleep(0.01)
lck.acquire()
balance.value -= 1
lck.release()
if __name__ == '__main__':
balance = multiprocessing.Value('i', 200)
lck = multiprocessing.Lock()
d = multiprocessing.Process(target=deposit, args=(balance,))
w = multiprocessing.Process(target=withdraw, args=(balance,))
d.start()
w.start()
d.join()
w.join()
print(balance.value)
and here's the error I get
`Process Process-1:
Traceback (most recent call last):
File "C:\Users\rahul\AppData\Local\Programs\Python\Python39\lib\multiprocessing\process.py", line
315, in _bootstrap
self.run()
File "C:\Users\rahul\AppData\Local\Programs\Python\Python39\lib\multiprocessing\process.py", line
108, in run
self._target(*self._args, **self._kwargs)
File "C:\Users\rahul\PycharmProjects\pythonProject\LearningPython.py", line 10, in deposit
lck.acquire()
NameError: name 'lck' is not defined
Process Process-2:
Traceback (most recent call last):
File "C:\Users\rahul\AppData\Local\Programs\Python\Python39\lib\multiprocessing\process.py", line
315, in _bootstrap
self.run()
File "C:\Users\rahul\AppData\Local\Programs\Python\Python39\lib\multiprocessing\process.py", line
108, in run
self._target(*self._args, **self._kwargs)
File "C:\Users\rahul\PycharmProjects\pythonProject\LearningPython.py", line 17, in withdraw
lck.acquire()
NameError: name 'lck' is not defined
200
Process finished with exit code 0
The problem here is that lck is out of scope of your child processes. Global variables aren't shared across processes. Try passing the lock into the processes.
Alternatively use threads as suggested in kahn's answer. They are much friendlier and still work fine in this case.
import time
import multiprocessing
def deposit(balance,lck):
for i in range(100):
time.sleep(0.01)
lck.acquire()
balance.value += 1
lck.release()
def withdraw(balance,lck):
for i in range(100):
time.sleep(0.01)
lck.acquire()
balance.value -= 1
lck.release()
if __name__ == '__main__':
balance = multiprocessing.Value('i', 200)
lck = multiprocessing.Lock()
d = multiprocessing.Process(target=deposit, args=(balance,lck))
w = multiprocessing.Process(target=withdraw, args=(balance,lck))
d.start()
w.start()
d.join()
w.join()
print(balance.value)
Because deposit and withdraw run in other processes.In their view,the process is not __main__,so the if statement is not executed and lck is not defined.
Try Run
import os
import multiprocessing
def deposit(balance):
print(os.getpid(),__name__)
def withdraw(balance):
print(os.getpid(),__name__)
if __name__ == '__main__':
print(os.getpid(), __name__)
balance = multiprocessing.Value('i', 200)
lck = multiprocessing.Lock()
d = multiprocessing.Process(target=deposit, args=(balance,))
w = multiprocessing.Process(target=withdraw, args=(balance,))
d.start()
w.start()
d.join()
w.join()
In my case ,it shows
19604 __main__
33320 __mp_main__
45584 __mp_main__
Your code can run if you put lck = multiprocessing.Lock() outside if.But I'm sure it's not what you want.
You should use threading instead of multiprocess in this case,and have a look at difference between multi-thread and multi-process.

'tuple' object is not callable in pool.apply multiprocessing in Python [duplicate]

I am trying to execute the following script by using multiprocessing and queues,
from googlefinance import getQuotes
from yahoo_finance import Share
import multiprocessing
class Stock:
def __init__(self,symbol,q):
self.symbol = symbol
self.q = q
def current_value(self):
current_price =self.q.put(float(getQuotes(self.symbol)[0]['LastTradeWithCurrency']))
return current_price
def fundems(self):
marketcap = self.q.put(Share(self.symbol).get_market_cap())
bookvalue = self.q.put(Share(self.symbol).get_book_value())
dividend = self.q.put(Share(self.symbol).get_dividend_share())
dividend_paydate = self.q.put(Share(self.symbol).get_dividend_pay_date())
dividend_yield = self.q.put(Share(self.symbol).get_dividend_yield())
return marketcap, bookvalue, dividend, dividend_paydate, dividend_yield
def main():
q = multiprocessing.Queue()
Stock1 = Stock('aapl', q)
p1 = multiprocessing.Process(target = Stock1.current_value(), args = (q,))
p2 = multiprocessing.Process(target = Stock1.fundems(), args = (q,))
p1.start()
p2.start()
p1.join()
p2.join()
while q.empty() is False:
print q.get()
if __name__ == '__main__':
main()
I am getting the output as the following:
Process Process-2:
Traceback (most recent call last):
File "/usr/lib/python2.7/multiprocessing/process.py", line 258, in _bootstrap
self.run()
File "/usr/lib/python2.7/multiprocessing/process.py", line 114, in run
self._target(*self._args, **self._kwargs)
TypeError: 'tuple' object is not callable
139.52
732.00B
25.19
2.28
2/16/2017
1.63
Here I see that I am able to get the output which I wanted, but there was an error before that which is kind a making me confused.
Can anyone please help me understand the concept here.
Thanks in advance.
The target should be an uncalled function, you're calling the function in the parent process and trying to launch a Process with the results of the call as the target. Change:
p1 = multiprocessing.Process(target = Stock1.current_value(), args = (q,))
p2 = multiprocessing.Process(target = Stock1.fundems(), args = (q,))
to:
p1 = multiprocessing.Process(target=Stock1.current_value)
p2 = multiprocessing.Process(target=Stock1.fundems)
q is removed as an argument because the object was constructed with q, and uses its own state to access it, it doesn't receive it as an argument to each method.

python multiprocessing process pool fails to find asynced function

Pretty simple multiprocessing example. Goals:
Create a pool of process workers using mp.Pool
Do some sort of transformation (here a simple string operation on line)
Push the transformed line to mp.Queue
Further process data from that mp.Queue in the main program afterwards
So lets do this:
import multiprocessing as mp
Init async processes with a mp.queue
def process_pool_init_per_process(q):
global mp_queue
mp_queue = q
Really init the mp_pool
no_of_processes = 4
q = mp.Queue()
mp_pool = mp.Pool(no_of_processes, process_pool_init_per_process, (q,))
This is getting called for every line to be proccesed async
def process_async_main(line):
print(line)
q.put(line + '_asynced')
And now let´s start it using apply_async
line = "Hi, this is a test to test mp_queues with mp process pools"
handler = mp_pool.apply_async(process_async_main, (line))
mp_resp = handler.get()
And read from the queue
while not q.empty():
print(q.get()) # This should be the inital line
Fails wih:
python3 mp_process_example.py
Process ForkPoolWorker-1:
Traceback (most recent call last):
File "/usr/lib/python3.6/multiprocessing/process.py", line 258, in _bootstrap
self.run()
File "/usr/lib/python3.6/multiprocessing/process.py", line 93, in run
self._target(*self._args, **self._kwargs)
File "/usr/lib/python3.6/multiprocessing/pool.py", line 108, in worker
task = get()
File "/usr/lib/python3.6/multiprocessing/queues.py", line 337, in get
return _ForkingPickler.loads(res)
AttributeError: Can't get attribute 'process_async_main' on <module '__main__' from 'mp_process_example.py'>
The question is: Why is multiprocessing not finding the main class?
Complete code to reproduce:
import multiprocessing as mp
##### Init async processes
def process_pool_init_per_process(q):
global mp_queue
mp_queue = q
# Really init the mp_pool
no_of_processes = 4
q = mp.Queue()
mp_pool = mp.Pool(no_of_processes, process_pool_init_per_process, (q,))
#This is getting called for every line to be proccesed async
def process_async_main(line):
print(line)
q.put(line + '_asynced')
line = "Hi, this is a test to test mp_queues with mp process pools"
handler = mp_pool.apply_async(process_async_main, (line))
mp_resp = handler.get()
while not q.empty():
print(q.get()) # This should be the inital line
Ok... I´ve got it... For some strange reason multiprocessing is not able to have the function to be asynced in the same file as the synchronized code.
Writing the code like this:
asynced.py
##### Init async processes
def process_pool_init_per_process(q):
global mp_queue
mp_queue = q
##### Function to be asycned
def process_async_main(line):
print(line)
mp_queue.put(line + '_asynced')
And than mp_process_example.py:
import multiprocessing as mp
from asynced import process_async_main, process_pool_init_per_process
# Really init the mp_pool
no_of_processes = 4
q = mp.Queue()
mp_pool = mp.Pool(no_of_processes, process_pool_init_per_process, (q,))
line = "Hi, this is a test to test mp_queues with mp process pools"
handler = mp_pool.apply_async(process_async_main, (line,))
mp_resp = handler.get()
while not q.empty():
print(q.get()) # This should be the inital line + "_asynced"
Works as expected:
$ python3 mp_process_example.py
Hi, this is a test to test mp_queues with mp process pools
Hi, this is a test to test mp_queues with mp process pools_asynced

Python Multiprocess issues process not starting

I wrote this code, I want to have a main thread that starts multiple subprocesses that spawn a listener thread to wait for a kill message subprocess works but testprocess does not run there are no errors any ideas?
from multiprocessing import Process, Pipe
from threading import Thread
import time
Alive = True
def listener_thread(conn): #listens for kill from main
global Alive
while True:
data = conn.recv()
if data == "kill":
Alive = False #value for kill
break
def subprocess(conn):
t = Thread(target=listener_thread, args=(conn,))
count = 0
t.start()
while Alive:
print "Run number = %d" % count
count = count + 1
def testprocess(conn):
t = Thread(target=listner_thread, args=(conn,))
count = 0
t.start()
while Alive:
print "This is a different thread run = %d" % count
count = count + 1
parent_conn, child_conn = Pipe()
p = Process(target=subprocess, args=(child_conn,))
p2 = Process(target=testprocess, args=(child_conn,))
runNum = int(raw_input("Enter a number: "))
p.start()
p2.start()
time.sleep(runNum)
parent_conn.send("kill") #sends kill to listener thread to tell them when to stop
p.join()
p2.join()
A typo in testprocess makes the function to quit early.
listner_thread should be listener_thread.
If you comment out subprocess related code and run the code, you will see following error:
Process Process-1:
Traceback (most recent call last):
File "/usr/lib/python2.7/multiprocessing/process.py", line 258, in _bootstrap
self.run()
File "/usr/lib/python2.7/multiprocessing/process.py", line 114, in run
self._target(*self._args, **self._kwargs)
File "t.py", line 25, in testprocess
t = Thread(target=listner_thread, args=(conn,))
NameError: global name 'listner_thread' is not defined

cant reference a global dictionary entry

I am aware that global variables are not always the best way to deal with things in this case they are fine for what I am doing. I am not going to be doing heavy read/writes mostly just reads.
alive = {'subAlive': True, 'testAlive': True};
def sub_listener(conn): #listens for kill from main
global alive
while True:
data = conn.recv()
if data == "kill":
alive['subAlive'] = False; #value for kill
break
def subprocess(conn, threadNum):
t = Thread(target=sub_listener, args=(conn,))
count = 0
threadVal = threadNum
t.start()
run = alive[subAlive];
while run:
print "Thread %d Run number = %d" % (threadVal, count)
count = count + 1
sub_parent, sub_child = Pipe()
runNum = int(raw_input("Enter a number: "))
threadNum = int(raw_input("Enter number of threads: "))
print "Starting threads"
for i in range(threadNum):
p = Process(target=subprocess, args=(sub_child, i))
p.start()
print "Starting run"
time.sleep(runNum)
print "Terminating Subprocess run"
for i in range(threadNum):
sub_parent.send("kill") #sends kill to listener
p.join()
I get this error
NameError: global name 'testAlive' is not defined
Traceback (most recent call last):
File "/usr/lib/python2.7/multiprocessing/process.py", line 258, in _bootstrap
self.run()
File "/usr/lib/python2.7/multiprocessing/process.py", line 114, in run
self._target(*self._args, **self._kwargs)
File "multiprocessDemo.py", line 38, in subprocess
run = alive[subAlive];
NameError: global name 'subAlive' is not defined
I have tried accessing the dictionary a few different ways and I can't seem to find out what is wrong on google. If I use separate variables it does work but that wont dynamically scale well.
Put quotes around subAlive:
run = alive['subAlive']

Categories

Resources