cant reference a global dictionary entry - python

I am aware that global variables are not always the best way to deal with things in this case they are fine for what I am doing. I am not going to be doing heavy read/writes mostly just reads.
alive = {'subAlive': True, 'testAlive': True};
def sub_listener(conn): #listens for kill from main
global alive
while True:
data = conn.recv()
if data == "kill":
alive['subAlive'] = False; #value for kill
break
def subprocess(conn, threadNum):
t = Thread(target=sub_listener, args=(conn,))
count = 0
threadVal = threadNum
t.start()
run = alive[subAlive];
while run:
print "Thread %d Run number = %d" % (threadVal, count)
count = count + 1
sub_parent, sub_child = Pipe()
runNum = int(raw_input("Enter a number: "))
threadNum = int(raw_input("Enter number of threads: "))
print "Starting threads"
for i in range(threadNum):
p = Process(target=subprocess, args=(sub_child, i))
p.start()
print "Starting run"
time.sleep(runNum)
print "Terminating Subprocess run"
for i in range(threadNum):
sub_parent.send("kill") #sends kill to listener
p.join()
I get this error
NameError: global name 'testAlive' is not defined
Traceback (most recent call last):
File "/usr/lib/python2.7/multiprocessing/process.py", line 258, in _bootstrap
self.run()
File "/usr/lib/python2.7/multiprocessing/process.py", line 114, in run
self._target(*self._args, **self._kwargs)
File "multiprocessDemo.py", line 38, in subprocess
run = alive[subAlive];
NameError: global name 'subAlive' is not defined
I have tried accessing the dictionary a few different ways and I can't seem to find out what is wrong on google. If I use separate variables it does work but that wont dynamically scale well.

Put quotes around subAlive:
run = alive['subAlive']

Related

python mutiprocessing on windows using shared values

I am trying to gain an insight into using multiprocessing with python. I have an example of using shared values for Unix but I cannot get a simple educational example to work on Windows 10.
I have the code below running ok on Windows but with the call updating the shared value commented out in foo().
What is my problem please?
import multiprocessing as mp
def foo(q):
#global shared_num
q.put('hello')
#shared_num.value = 777
if __name__ == '__main__':
global shared_num
mp.set_start_method('spawn')
shared_num = mp.Value('d', 0)
lock = mp.Lock()
q = mp.Queue()
p = mp.Process(target=foo, args=(q,))
p.start()
p.join()
print(q.get(), " ",shared_num.value)
#print(q.get(), " ")
If I run the code below with the foo() setting the shared value I get:
Traceback (most recent call last):
File "C:\Program Files\WindowsApps\PythonSoftwareFoundation.Python.3.8_3.8.2032.0_x64__qbz5n2kfra8p0\lib\multiprocessing\process.py", line 315, in _bootstrap
self.run()
File "C:\Program Files\WindowsApps\PythonSoftwareFoundation.Python.3.8_3.8.2032.0_x64__qbz5n2kfra8p0\lib\multiprocessing\process.py", line 108, in run
self._target(*self._args, **self._kwargs)
File "C:\Users\ken38\OneDrive\Projects\Python\GH_Pi\python\ms_mp.py", line 6, in foo
shared_num.value = 777
NameError: name 'shared_num' is not defined
hello 0.0
Michael Butscher actually answered this with his comment. But as I could not flag that as an answer I thought I would show the corrected code as an answer.
This does highlight difference if you try and test simple examples on Windows. Linux based examples may not work an Windows.
This code worked on both Windows and Debian (Rpi).
import multiprocessing as mp
def foo(q, shared_num, lock):
#global shared_num
q.put('hello')
with lock:
shared_num.value = 777
if __name__ == '__main__':
global shared_num
mp.set_start_method('spawn')
shared_num = mp.Value('d', 0)
lock = mp.Lock()
q = mp.Queue()
p = mp.Process(target=foo, args=(q, shared_num, lock,))
p.start()
p.join()
print(q.get(), " ",shared_num.value)
#print(q.get(), " ")

python multithread and strangeTypeError: takes exactly 1 argument (2 given)

def crawler(id):
print id
crawer.getCourseFromUrl("http://www.imooc.com/view/"+id)
time.sleep(3)
def main():
print '*** Starting crawler ***'
try:
for id in xrange(100):
threads = []
for i in range(10):
t = threading.Thread(target = crawler,args = str(i+1))
threads.append(t)
for t in threads:
t.start()
for t in threads:
t.join()
for t in threads:
t.close()
except:
pass
print '*** crawler End ***'
Above is my code, and when args is 1 to 9, it works well, but when it comes to 10 and larger it comes to the error:
Exception in thread Thread-10:
Traceback (most recent call last):
File "/System/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/threading.py", line 810, in __bootstrap_inner
self.run()
File "/System/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/threading.py", line 763, in run
self.__target(*self.__args, **self.__kwargs)
TypeError: crawler() takes exactly 1 argument (2 given)
I could not figure out what it's wrong.
Thread gets args and use it with function this way:
crawler(args[0], args[1], ...)
When str(i+1) has only one char it gives
crawler(args[0])
When str(i+1) has two chars it gives
crawler(args[0], args[1])
but your functions expects only one argument.
You have to use list or tuple in Thread
Thread(target = crawler,args = [ str(i+1) ] )
Thread(target = crawler,args = ( str(i+1), ) )
and then args[0] will get all str(i+1) as one element.
Try to pass args as a tuple:
t = threading.Thread(target = crawler, args = (str(i+1),))
As documented here https://docs.python.org/2/library/threading.html#threading.Thread

Local variable referenced before assignment, using multi-threading

Code to add and delete values in a list are operations performed in different threads.
using these global variables in multi-threading:
from threading import Thread
import time
a=[]
i = 0
j = 0
function for thread1:
def val_in():
while 1:
a.append(raw_input())
print "%s value at %d: %d added" % ( time.ctime(time.time()), i ,int(a[i])) // line 14
i+=1
function for thread 2:
def val_out():
while 1:
time.sleep(5)
try:
print "%s value at %d: %d deleted" % (time.ctime(time.time()), j, int(a.pop(j)))
i-=1
except:
print"no values lefts"
time.sleep(2)
defining and starting threads:
t = Thread(target = val_in)
t1 = Thread(target= val_out)
t.start()
t1.start()
Now when input is given as 1, it should be added to the list a, but there is an error:
Error: Exception in thread Thread-1:
Traceback (most recent call last):
File "/System/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/threading.py", line 810, in __bootstrap_inner
self.run()
File "/System/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/threading.py", line 763, in run
self.__target(*self.__args, **self.__kwargs)
File "/Users/dhiraj.agarwal/Documents/workspace/try3/multithread.py", line 14, in val_in
UnboundLocalError: local variable 'i' referenced before assignment
You should tell python that i is global:
def val_in():
global i
...
def val_out():
global i
...
This is an issue with the scope of the variable. You might used local variable in the thread for different methods. If that the case then you have to make the variable as global.
def val_in():
global i # add this line
while 1:
a.append(raw_input())
print "%s value at %d: %d added" % ( time.ctime(time.time()), i ,int(a[i]))
i+=1
def val_out():
global i # add this line
while 1:
time.sleep(5)
try:
print "%s value at %d: %d deleted" % (time.ctime(time.time()), j, int(a.pop(j)))
i-=1
except:
print"no values lefts"
time.sleep(2)

Memory errors in Python multiprocessing application

I am making a script to run multiple instances of threads.
I keep getting errors when running more than 3 concurrent sets of threads, they are mostly to do with the pipes. How can I kill multiple individual processes? Is there a better way to do this?
from multiprocessing import Process, Pipe
from threading import Thread
import time
alive = {'subAlive': True, 'testAlive': True};
def control_listener(conn, threadAlive): #listens for kill from main
global alive
while True:
data = conn.recv()
if data == "kill":
print "Killing"
alive[threadAlive] = False; #value for kill
print "testListner alive %s" % threadAlive, alive[threadAlive];
break
def subprocess(conn, threadNum, threadAlive):
t = Thread(target=control_listener, args=(conn, threadAlive))
count = 0
threadVal = threadNum
t.start()
run = alive['subAlive'];
while run == True:
print "Thread %d Run number = %d" % (threadVal, count), alive['subAlive'];
count = count + 1
run = alive['subAlive'];
def testprocess(conn, threadNum, threadAlive):
t = Thread(target=control_listener, args=(conn, threadAlive))
count = 0
threadVal = threadNum
t.start()
run = alive['testAlive'];
while run == True:
print "This is a different thread %d Run = %d" % (threadVal, count)
count = count + 1
run = alive['testAlive'];
sub_parent, sub_child = Pipe()
test_parent, test_child = Pipe()
runNum = int(raw_input("Enter a number: "))
threadNum = int(raw_input("Enter number of threads: "))
print "Starting threads"
for i in range(threadNum):
p = Process(target=subprocess, args=(sub_child, i, 'subAlive'))
p.start()
print "Subprocess started"
for i in range(threadNum):
p2 = Process(target=testprocess, args=(test_child, i, 'testAlive'))
p2.start()
print "Testproccess started"
print "Starting run"
time.sleep(runNum)
print "Terminating Subprocess run"
for i in range(threadNum):
sub_parent.send("kill") #sends kill to listener
print "Testprocess termination alive", alive['subAlive'];
print "Terminating Testprocess run"
for i in range(threadNum):
test_parent.send("kill") #sends kill to listener
print "Testprocess termination alive", alive['subAlive'];
p.join()
p2.join()
If I run it with more than 2 threads I get random errors like
Exception in thread Thread-1:^M
Traceback (most recent call last):^M
File "/usr/lib/python2.7/threading.py", line 551, in __bootstrap_inner^M
self.run()^M
File "/usr/lib/python2.7/threading.py", line 504, in run^M
self.__target(*self.__args, **self.__kwargs)^M
File "multiprocessDemo.py", line 28, in control_listener^M
data = conn.recv()^M
EOFError
Or this
Traceback (most recent call last):^M
File "/usr/lib/python2.7/threading.py", line 551, in __bootstrap_inner^M
self.run()^M
File "/usr/lib/python2.7/threading.py", line 504, in run^M
self.__target(*self.__args, **self.__kwargs)^M
File "multiprocessDemo.py", line 28, in control_listener^M
data = conn.recv()^M
MemoryError
They occur sporadically when a message would get passed then one of the two threads will stop but the other will keep going.
I want to be able to run this with multiple things going on, say 16 concurrent threads total of one of a few different types. All I really need this bit to do is stop and start them reliably. I do not need to sync jobs nor do I need complex inter-process communication. Any suggestions? Examples I can look at?

Python Multiprocess issues process not starting

I wrote this code, I want to have a main thread that starts multiple subprocesses that spawn a listener thread to wait for a kill message subprocess works but testprocess does not run there are no errors any ideas?
from multiprocessing import Process, Pipe
from threading import Thread
import time
Alive = True
def listener_thread(conn): #listens for kill from main
global Alive
while True:
data = conn.recv()
if data == "kill":
Alive = False #value for kill
break
def subprocess(conn):
t = Thread(target=listener_thread, args=(conn,))
count = 0
t.start()
while Alive:
print "Run number = %d" % count
count = count + 1
def testprocess(conn):
t = Thread(target=listner_thread, args=(conn,))
count = 0
t.start()
while Alive:
print "This is a different thread run = %d" % count
count = count + 1
parent_conn, child_conn = Pipe()
p = Process(target=subprocess, args=(child_conn,))
p2 = Process(target=testprocess, args=(child_conn,))
runNum = int(raw_input("Enter a number: "))
p.start()
p2.start()
time.sleep(runNum)
parent_conn.send("kill") #sends kill to listener thread to tell them when to stop
p.join()
p2.join()
A typo in testprocess makes the function to quit early.
listner_thread should be listener_thread.
If you comment out subprocess related code and run the code, you will see following error:
Process Process-1:
Traceback (most recent call last):
File "/usr/lib/python2.7/multiprocessing/process.py", line 258, in _bootstrap
self.run()
File "/usr/lib/python2.7/multiprocessing/process.py", line 114, in run
self._target(*self._args, **self._kwargs)
File "t.py", line 25, in testprocess
t = Thread(target=listner_thread, args=(conn,))
NameError: global name 'listner_thread' is not defined

Categories

Resources