How can the target= and args= be obtained from variables? - python

I am new to both stackoverflow and python so please bear over with me
When I run this test program, it doesn't seem like the threads start the function.
How can the target= and args= be obtained from variables?
import queue
import random
import threading
import time
def start_threads(count, func, args):
threads =[]
for _ in range(count):
thread = threading.Thread(target=func, args=args)
thread.start
threads.append(thread)
return threads
def function(a , b):
print("Start function")
time.sleep(random.randint(a, b))
print("Stop function")
if __name__ == "__main__":
num_threads = 5
func_name = "function"
min_wait = 3
max_wait = 7
threads = start_threads(num_threads, func_name, (min_wait,max_wait))
print(f"Active threads {threading.active_count()}")

You cannot pass a String as a target argument by calling Thread(...). You must provide a function object.
Here is a working solution:
import random
import time
from threading import Thread
def function(a, b):
print("START FUNCTION")
time.sleep(random.randint(a, b))
print("STOP FUNCTION")
def create_threads(count, func, times):
threads = []
for _ in range(count):
thread = Thread(target=func, args=[times[0], times[1]])
threads.append(thread)
return threads
def run_threads(threads):
for thread in threads:
thread.start()
NUM_THREADS = 5
FUNC_NAME = function
MIN_WAIT = 3
MAX_WAIT = 7
threads = create_threads(NUM_THREADS, FUNC_NAME, (MIN_WAIT, MAX_WAIT))
run_threads(threads)

Related

How do you run a process that spawns multiple threads? How do you serialize a thread object?

I'm trying to create multiple processes in which each process takes in a group of threads that it will start. I keep getting this error:
TypeError: cannot pickle '_thread.lock' object
Here is a basic example of what I am trying to achieve:
import time
import threading
import multiprocessing
def threading_func(i):
print(f'Starting Function {i}')
time.sleep(1)
print(f'Ending Function {i}')
def process_func(threads):
for thread in threads:
thread.start()
for thread in threads:
thread.join()
thread_list = [threading.Thread(target=threading_func, args=(i,)) for i in range(1, 9)]
processes = [multiprocessing.Process(target=process_func, args=([thread],)) for thread in thread_list]
for process in processes:
process.start()
for process in processes:
process.join()
I am aware that the arguments passed into a Process instance must have the ability to be serialized. The real question, then, is how can I make a Thread object serialized?
There is no simple way of getting a Thread instance to be serializable. An alternative would be to pass to each process the arguments required for it create the threads in its own address space. But this can be done rather painlessly by creating a class called MyThread that sort of behaves like the Thread class although it is meant to be used with the target argument since it does not have a run method:
import time
import threading
import multiprocessing
class MyThread:
def __init__(self, *args, **kwargs):
self.args = args
self.kwargs = kwargs
self.thread = None
def start(self):
self.thread = threading.Thread(*self.args, **self.kwargs)
self.thread.start()
def join(self):
self.thread.join()
def threading_func(i):
print(f'Starting Function {i}')
time.sleep(1)
print(f'Ending Function {i}')
def process_func(threads):
for thread in threads:
thread.start()
for thread in threads:
thread.join()
# Required for Windows:
if __name__ == '__main__':
thread_list = [MyThread(target=threading_func, args=(i,)) for i in range(1, 9)]
processes = [multiprocessing.Process(target=process_func, args=([thread],)) for thread in thread_list]
for process in processes:
process.start()
for process in processes:
process.join()
A version using a more generic MyThread class
import time
import threading
import multiprocessing
class MyThread:
def __init__(self, *args, **kwargs):
self.args = args
self.kwargs = kwargs
self.thread = None
def start(self):
self.thread = threading.Thread(*self.args, **self.kwargs) if 'target' in self.kwargs else threading.Thread(*self.args, **self.kwargs, target=self.run)
self.thread.start()
def join(self):
self.thread.join()
def run(self, *args, **kwargs):
"""
This method would need to be overridden if this class is not initialized
with the `target` keyword and you wanted to perform something useful
"""
pass
class T(MyThread):
def run(self, i):
print(f'Starting Function {i}')
time.sleep(1)
print(f'Ending Function {i}')
def process_func(threads):
for thread in threads:
thread.start()
for thread in threads:
thread.join()
# Required for Windows:
if __name__ == '__main__':
thread_list = [T(args=(i,)) for i in range(1, 9)]
processes = [multiprocessing.Process(target=process_func, args=([thread],)) for thread in thread_list]
for process in processes:
process.start()
for process in processes:
process.join()

Is there a way to have threads communicate with each other?

Hi I am trying to make it so 2 threads will change the other one but I can't figure it out this is an example of what I have
Import time
Import threading
s=0
def thing1():
time.sleep(1)
s+=1
def thing2():
print(s)
t = threading.Thread(target = thing1)
t.start()
t2 = threading.Thread(target = thing2)
t2.start()
When they run thing2 will print 0, not the seconds. I have it so they run later this is just all the code that's necessary
You need to use a semaphore so that each thread is not accessing the variable at the same time. However, any two threads can access the same variable s using global.
import threading
import time
s = 0
sem = threading.Semaphore()
def thing1():
global s
for _ in range(3):
time.sleep(1)
sem.acquire()
s += 1
sem.release()
def thing2():
global s
for _ in range(3):
time.sleep(1)
sem.acquire()
print(s)
sem.release()
t = threading.Thread(target = thing1)
t.start()
t2 = threading.Thread(target = thing2)
t2.start()

Python multiprocessing Event with process pool and custom manager

the following code works fine -
import time
from concurrent.futures import Future, as_completed
from concurrent.futures.process import ProcessPoolExecutor
import multiprocessing as mp
from multiprocessing.managers import BaseManager
class Progress(object):
_target: int = 0
_progress: int = 0
def __init__(self):
self._target = 0
self._progress = 0
def completed(self, n):
self._progress += n
def progress(self):
return (self._progress/self._target) * 100
def set_target(self, n):
self._target = n
class ObjectManager(BaseManager):
pass
def dummy_worker(progress: Progress, cancel: mp.Event = None):
print("--> Worker started")
for i in range(10):
time.sleep(1)
progress.completed(1)
return 1
if __name__ == "__main__":
ObjectManager.register('Progress', Progress)
print('Starting manager')
with ObjectManager() as manager:
print('Manager started')
progress = manager.Progress()
progress.set_target(10)
with ProcessPoolExecutor() as pool:
f = pool.submit(dummy_worker, progress)
futures = [f]
for f in as_completed(futures):
print(f.result())
print(f'Progress: {progress.progress()}')
while the following gives me Condition objects should only be shared between processes through inheritance. i am not sure how to use Event in dummy_worker. the main goal is to pass some shared object and also have a way to cancel the worker. sorry for the long post, i wanted to give full code for clarity.
import time
from concurrent.futures import Future, as_completed
from concurrent.futures.process import ProcessPoolExecutor
import multiprocessing as mp
from multiprocessing.managers import BaseManager
class Progress(object):
_target: int = 0
_progress: int = 0
def __init__(self):
self._target = 0
self._progress = 0
def completed(self, n):
self._progress += n
def progress(self):
return (self._progress/self._target) * 100
def set_target(self, n):
self._target = n
class ObjectManager(BaseManager):
pass
def dummy_worker(progress: Progress, cancel: mp.Event = None):
print("--> Worker started")
for i in range(10):
time.sleep(1)
progress.completed(1)
return 1
if __name__ == "__main__":
ObjectManager.register('Progress', Progress)
cancel = mp.Event()
print('Starting manager')
with ObjectManager() as manager:
print('Manager started')
progress = manager.Progress()
progress.set_target(10)
with ProcessPoolExecutor() as pool:
f = pool.submit(dummy_worker, progress, cancel)
futures = [f]
for f in as_completed(futures):
print(f.result())
print(f'Progress: {progress.progress()}')
assume i am using python 3.5+.
Try the following changes:
from multiprocessing.managers import SyncManager
class ObjectManager(SyncManager):
pass
# use an Event() created by ObjectManager instance: cancel = manager.Event()
if __name__ == "__main__":
ObjectManager.register('Progress', Progress)
#cancel = mp.Event() # not this
print('Starting manager')
with ObjectManager() as manager:
print('Manager started')
progress = manager.Progress()
cancel = manager.Event() # but rather this
progress.set_target(10)
with ProcessPoolExecutor() as pool:
f = pool.submit(dummy_worker, progress, cancel)
futures = [f]
for f in as_completed(futures):
print(f.result())
print(f'Progress: {progress.progress()}')
Prints:
Starting manager
Manager started
--> Worker started
1
Progress: 100.0
Also, get rid of/change references to mp.Event.

shared memory between process and main task in python

with some help I could run a process in python, Now I wan't to share a value betwenn the two tasks. I can set the value inside the init, but I can't change it inside the run method.
And by the way: how to kill the process when the main process stops?
from multiprocessing import Process, Value
import serial
import time
class P(Process):
def __init__(self, num):
num.value = 15
super(P, self).__init__()
def run(self):
while True:
num.value = num.value + 1
print("run simple process")
time.sleep(0.5)
def main():
while True:
print("run main")
print (num.value)
time.sleep(2.5)
if __name__ == "__main__":
num = Value('d', 0.0)
p = P(num)
p.start()
#p.join()
main()
In your simplified case you just passed num value upon initialization time.
To be able to access that value in other process's methods - set it as a state of the process:
class P(Process):
def __init__(self, num):
self.num = num
self.num.value = 15
super(P, self).__init__()
def run(self):
while True:
self.num.value += 1
print("run simple process")
time.sleep(0.5)
For a more "serious" cases - consider using Managers and Synchronization primitives.

Class Decorators Singleton?

So for example, I'm making an async decorator and wanted to limit the number of concurrent threads:
from multiprocessing import cpu_count
from threading import Thread
class async:
def __init__(self, function):
self.func = function
self.max_threads = cpu_count()
self.current_threads = []
def __call__(self, *args, **kwargs):
func_thread = Thread(target = self.func, args = args, kwargs = kwargs)
func_thread.start()
self.current_threads.append(func_thread)
while len(self.current_threads) > self.max_threads:
self.current_threads = [t for t in self.current_threads if t.isAlive()]
from time import sleep
#async
def printA():
sleep(1)
print "A"
#async
def printB():
sleep(1)
print "B"
Is this going to limit the total concurrent threads? IE. If I had 8 cores, would the current code end up having 16+ threads due to two separate async objects existing?
If so, how would I fix that?
Thanks!

Categories

Resources