I have this script and it does work it has 2 separate processes that spawn listener threads to kill the process when kill is sent to the listener via a pipe.
from multiprocessing import Process, Pipe
from threading import Thread
import time
subAlive = True
testAlive = True
def sub_listener(conn): #listens for kill from main
global subAlive
while True:
data = conn.recv()
if data == "kill":
subAlive = False #value for kill
break
def test_listener(conn): #listens for kill from main
global testAlive
while True:
data = conn.recv()
if data == "kill":
testAlive = False #value for kill
break
def subprocess(conn, threadNum):
t = Thread(target=sub_listener, args=(conn,))
count = 0
threadVal = threadNum
t.start()
while subAlive:
print "Thread %d Run number = %d" % (threadVal, count)
count = count + 1
def testprocess(conn, threadNum):
t = Thread(target=test_listener, args=(conn,))
count = 0
threadVal = threadNum
t.start()
while testAlive:
print "This is a different thread %d Run = %d" % (threadVal, count)
count = count + 1
sub_parent, sub_child = Pipe()
test_parent, test_child = Pipe()
runNum = int(raw_input("Enter a number: "))
threadNum = int(raw_input("Enter number of threads: "))
print "Starting threads"
for i in range(threadNum):
p = Process(target=subprocess, args=(sub_child, i))
p.start()
print "Subprocess started"
for i in range(threadNum):
p2 = Process(target=testprocess, args=(test_child, i))
p2.start()
print "Testproccess started"
print "Starting run"
time.sleep(runNum)
print "Terminating Subprocess run"
for i in range(threadNum):
sub_parent.send("kill") #sends kill to listener
print "Terminating Testprocess run"
for i in range(threadNum):
test_parent.send("kill") #sends kill to listener
p.join()
p2.join()
Id like to not need a separate listener function hard coded for every process I call. I was thinking about passing global variables when the thread is spawned. The global variables are really the only differences between the listener functions. Thanks guys!
You can access globals through the globals() dictionary.
>>> foo = 'value'
>>> def change(name):
... globals()[name] = 'changed'
...
>>> change('foo')
>>> foo
'changed'
but I would suggest:
alive = {}
def sub_listener(conn, key): #listens for kill from main
while True:
data = conn.recv()
if data == "kill":
alive[key] = False #value for kill
break
e.g.
from multiprocessing import Process, Pipe
from threading import Thread
import time
alive = {
'sub': 1,
'test': 1,
}
def listener_factory(key):
def listener(conn): #listens for kill from main
while True:
data = conn.recv()
if data == "kill":
alive[key] = False #value for kill
break
return listener
def process_factory(key):
listener = listener_factory(key)
def subprocess(conn, threadNum):
t = Thread(target=listener, args=(conn,))
count = 0
threadVal = threadNum
t.start()
while alive[key]:
print "Thread[%s] %d Run number = %d" % (key, threadVal, count)
count = count + 1
return subprocess
def main():
sub_parent, sub_child = Pipe()
test_parent, test_child = Pipe()
runNum = int(raw_input("Enter a number: "))
threadNum = int(raw_input("Enter number of threads: "))
print "Starting threads"
for i in range(threadNum):
p = Process(target=process_factory('sub'), args=(sub_child, i))
p.start()
print "Subprocess started"
for i in range(threadNum):
p2 = Process(target=process_factory('test'), args=(test_child, i))
p2.start()
print "Testproccess started"
print "Starting run"
time.sleep(runNum)
print "Terminating Subprocess run"
for i in range(threadNum):
sub_parent.send("kill") #sends kill to listener
print "Terminating Testprocess run"
for i in range(threadNum):
test_parent.send("kill") #sends kill to listener
p.join()
p2.join()
if __name__ == '__main__':
main()
Related
Here is the example code for my question:
import multiprocessing, time
def nopr():
i=0
while 1:
i = i+1
print i
time.sleep(1)
p = multiprocessing.Process(target = nopr)
print "process started"
p.start()
time.sleep(04)
print "process ended"
p.terminate()
time.sleep(1)
p.start()
No you cannot start a terminated process, you would have to recreate it :
import multiprocessing, time
def nopr():
i=0
while 1:
i = i+1
print i
time.sleep(1)
p = multiprocessing.Process(target = nopr)
print "process started"
p.start()
time.sleep(04)
print "process ended"
p.terminate()
time.sleep(1)
p = multiprocessing.Process(target = nopr) # recreate
p.start()
I have a python script that uses threading, here is my code:
def main():
ip_list = get_ip_list() #['ip1', 'ip2'..., 'ipn'] thousands
for x in ip_list:
q.put(x)
threads = []
threads_num = 50
for x in range(threads_num):
w = Worker(q, stantard_config_to_dict, id_dict, logger,ip_position)
threads.append(w)
w.start()
for w in threads:
w.join()
logger.debug('End of main thread, exit')
print 'End of main thread, exit'
def run(self):
while 1:
try:
ip = self.queue.get(False)
self.logger.debug('%s get %s from queue, left %s in queue' % (self.getName(), ip, self.queue.qsize()))
self.get_inter_conf(ip)
except Queue.Empty:
self.logger.debug('queue is empty, exit')
break
self.logger.info("%s finished" % (self.getName()))
The string 'End of main thread' doesn't get printed right now, only when I change the number of items in ip_list it gets printed.
Why does this happen?
Thanks to #user5402 for the previous solution.
I am trying to handle multiple messages that are queued up. Here is the code:
import sys
import socket
from multiprocessing import Process, Queue
UDP_ADDR = ("", 13000)
def send(m):
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.sendto(m, UDP_ADDR)
def receive(q):
buf = 1024
Sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
Sock.bind(UDP_ADDR)
while True:
(data, addr) = Sock.recvfrom(buf)
q.put(data)
In the client function I want to handle multiple messages, that have knock on affects.
def client():
q = Queue()
r = Process(target = receive, args=(q,))
r.start()
print "client loop started"
while True:
m = q.get()
print "got:", m
while m == "start":
print "started"
z = q.get()
if z == "stop":
return
print "loop ended"
r.terminate()
So when start is sent, it then goes into a while loop that is infinitely printing "started", and waiting for the stop message to come through. The above client code does not work.
Here is the code to start the function:
if __name__ == '__main__':
args = sys.argv
if len(args) > 1:
send(args[1])
else:
client()
You can write the client loop like this:
print "client loop started"
while True:
m = q.get()
print "waiting for start, got:", m
if m == "start":
while True:
try:
m = q.get(False)
except:
m = None
print "waiting for stop, got:", m
if m == "stop":
break
Based on your comments, this would be a better approach:
import sys
import socket
import Queue as Q
import time
from multiprocessing import Process, Queue
UDP_ADDR = ("", 13000)
def send(m):
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.sendto(m, UDP_ADDR)
def receive(q):
buf = 1024
Sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
Sock.bind(UDP_ADDR)
while True:
(data, addr) = Sock.recvfrom(buf)
q.put(data)
def doit():
# ... what the processing thread will do ...
while True:
print "sleeping..."
time.sleep(3)
def client():
q = Queue()
r = Process(target = receive, args=(q,))
r.start()
print "client loop started"
t = None # the processing thread
while True:
m = q.get()
if m == "start":
if t:
print "processing thread already started"
else:
t = Process(target = doit)
t.start()
print "processing thread started"
elif m == "stop":
if t:
t.terminate()
t = None
print "processing thread stopped"
else:
print "processing thread not running"
elif m == "quit":
print "shutting down"
if t:
t.terminate()
t = None # play it safe
break
else:
print "huh?"
r.terminate()
if __name__ == '__main__':
args = sys.argv
if len(args) > 1:
send(args[1])
else:
client()
I am having issues with this multiprocess script I modeled it after the one I found here
http://broadcast.oreilly.com/2009/04/pymotw-multiprocessing-part-2.html
class test_imports:#Test classes remove
def import_1(self, control_queue, thread_number):
print ("Import_1 number %d started") % thread_number
run = True
count = 1
while run:
alive = control_queue.get()
if alive == 't1kill':
print ("Killing thread type 1 number %d") % thread_number
run = False
break
print ("Thread type 1 number %d run count %d") % (thread_number, count)
count = count + 1
def worker_generator(control_queue, threadName, runNum):
if threadName == 'one':
print ("Starting import_1 number %d") % runNum
p = Process(target=test_import.import_1, args=(control_queue, runNum))
p.start()
if __name__ == '__main__':
# Establish communication queues
control = multiprocessing.Queue()
runNum = int(raw_input("Enter a number: "))
threadNum = int(raw_input("Enter number of threads: "))
threadName = raw_input("Enter number: ")
thread_Count = 0
print ("Starting threads")
for i in range(threadNum):
worker_generator(control, threadName, i)
thread_Count = thread_Count + 1
time.sleep(runNum)#let threads do their thing
print ("Terminating threads")
for i in range(thread_Count):
control.put("t1kill")
control.put("t2kill")
This is the error I get when I run it:
Traceback (most recent call last):
File "multiQueue.py", line 62, in <module>
worker_generator(control, threadName, i)
File "multiQueue.py", line 34, in worker_generator
p = Process(target=test_import.import_1, args=(control_queue, runNum))
NameError: global name 'Process' is not defined`
I know where it is, but I took that process call from known good code so I don't think it is a syntax error. Any help?
You probably did import multiprocessing. Which is fine, because in your code, you actually did:
multiprocessing.Queue()
But, when doing Process(), you forgot to put multiprocessing. before it.
However, you can also fix this by importing the classes directly:
from multiprocessing import Queue, Process
But then you would have to change multiprocessing.Queue() to just Queue()
Normally this is due to a missing module import.
Did you import multiprocessing?
The code I have is:
import multiprocessing
import time
class test_imports:#Test classes remove
def import_1(self, control_queue, thread_number):
print ("Import_1 number %d started") % thread_number
run = True
count = 1
while run:
alive = control_queue.get()
if alive == 't1kill':
print ("Killing thread type 1 number %d") % thread_number
run = False
break
print ("Thread type 1 number %d run count %d") % (thread_number, count)
count = count + 1
def worker_generator(control_queue, threadName, runNum):
if threadName == 'one':
print ("Starting import_1 number %d") % runNum
p = multiprocessing.Process(target=test_imports.import_1, args=(control_queue, runNum))
p.start()
if __name__ == '__main__':
# Establish communication queues
control = multiprocessing.Queue()
runNum = int(raw_input("Enter a number: "))
threadNum = int(raw_input("Enter number of threads: "))
threadName = raw_input("Enter name: ")
thread_Count = 0
print ("Starting threads")
for i in range(threadNum):
worker_generator(control, threadName, i)
thread_Count = thread_Count + 1
time.sleep(runNum)#let threads do their thing
print ("Terminating threads")
for i in range(thread_Count):
control.put("t1kill")
control.put("t2kill")
This code is to start and stop multiple copies of various different thread types. I will preface this with saying I tried using pipes to control the threads but keep getting random memory errors to do with the pipes. This is a prototype of a factory to produce several copies of multiple thread types, so a queue method didn't seem practical to me either though if there is something I am missing I am all ears. So my newest plan is to use a global dictionary with a different entry for every thread and thread kill condition. ie sub[Alive] subKill["kill"]
Yet for some reason the control_listener thread spawned by the processes wont trigger the kill condition and wont read the global variable.
from multiprocessing import Process, Pipe
from threading import Thread
import time
alive = {'subAlive': True, 'subKill': "Alive", 'testAlive': True, 'testKill': "Alive"};
def control_listener(aliveFlag, threadAlive): #listens for kill from main
global alive
while True:
data = alive[aliveFlag];
print "Thread", alive[threadAlive];
print "Thread status", alive[aliveFlag];
if data == "kill":
print "Killing"
alive[threadAlive] = False; #value for kill
print "testListner alive %s" % threadAlive, alive[threadAlive];
print "deactivating %s" % threadAlive, alive['aliveFlag'];
break
def subprocess(aliveFlag, threadNum, threadAlive):
t = Thread(target=control_listener, args=(aliveFlag, threadAlive))
count = 0
threadVal = threadNum
t.start()
run = alive['subAlive'];
while run == True:
print "Thread alive %s" % alive['aliveFlag'];
print "Thread %d Run number = %d" % (threadVal, count), alive['subAlive'];
count = count + 1
run = alive['subAlive'];
def testprocess(aliveFlag, threadNum, threadAlive):
t = Thread(target=control_listener, args=(aliveFlag, threadAlive))
count = 0
threadVal = threadNum
t.start()
run = alive['testAlive'];
while run == True:
print "This is a different thread %d Run = %d" % (threadVal, count)
count = count + 1
run = alive['testAlive'];
runNum = int(raw_input("Enter a number: "))
threadNum = int(raw_input("Enter number of threads: "))
print "Starting threads"
for i in range(threadNum):
p = Process(target=subprocess, args=('subKill', i, 'subAlive'))
p.start()
print "Subprocess started"
for i in range(threadNum):
p2 = Process(target=testprocess, args=('subKill', i, 'testAlive'))
p2.start()
print "Testproccess started"
print "Starting run"
time.sleep(runNum)
print "Terminating Subprocess run"
for i in range(threadNum):
alive['subKill'] = "kill";
print "Subkill = %s" % alive['subKill'];
print "Testprocess termination alive", alive['subAlive'];
print "Terminating Testprocess run"
for i in range(threadNum):
alive['subKill'] = "kill";
print "Testprocess termination alive", alive['subAlive'];
p.join()
p2.join()