Print message while a function is running in Python with AsyncIO - python

I'm trying to create a print function that keeps printing, for example, 'Hello World', while another function (called miner) is running in parallel, and both functions must end at the same time.
This is for a real-time cost measurer for bitcoin mining that I'm working on.
I found that python as asyncio and tried to use it, however I couldn't get the print function to stop at the same time the miner function ends. The timer function printed once and waited for the miner function.
import asyncio
import time
from datetime import datetime
class Test2:
async def miner(self):
await asyncio.sleep(5)
return 0
async def timer(self):
while True:
print("\n Hello World \n")
time.sleep(1)
t2 = Test2()
loop = asyncio.get_event_loop()
loop.run_until_complete(asyncio.gather(t2.miner(), t2.timer()))
I tried concurrent tasks, but both function does not run in parallel (timer and miner).
Could you show me a way to solve this?
Thank you!

time.sleep() is not asynchronous function so timer process locks other operations before it ends (but unfortunately it is endless)
You may add shared trigger variable to stop timer when miner complete
import asyncio
class Test2:
is_ended = False
async def miner(self):
await asyncio.sleep(5)
self.is_ended = True
print("\n I'm done \n")
return 0
async def timer(self):
while True:
if self.is_ended:
print('\n Bye Bye \n')
break
print("\n Hello World \n")
await asyncio.sleep(1)
t2 = Test2()
loop = asyncio.get_event_loop()
loop.run_until_complete(asyncio.gather(t2.miner(), t2.timer()))
Hello World
Hello World
Hello World
Hello World
Hello World
I'm done
Bye Bye

You can also use threading:-
def func1():
print('Working')
def func2():
print("Working")
if __name__ == '__main__':
Thread(target = func1).start()
Thread(target = func2).start()

You can use multiprocessing or threading
from multiprocessing import Process
index=0
def func1():
global index
print ('start func1')
while index< 20:
index+= 1
print ('end func1')
def func2():
global rocket
print ('start func2')
while index< 10:
index+= 1
print ('end func2')
if __name__=='__main__':
p1 = Process(target = func1)
p1.start()
p2 = Process(target = func2)
p2.start()
for treading
import threading
x = threading.Thread(target=func1)
y = threading.Thread(target=func1)

Related

Python Thread counter

I'm trying to make thread counter but I'm stuck. I've got the code below:
import threading
def Hello():
global t
print("Hello!")
t = threading.Timer(1, Hello)
t.start()
Hello()
How can I make the loop stop say after 5 'Hellos'?
Not sure what you're trying to accomplish but this code will work
import threading
count = 0
def hello():
global count
print("Hello!")
count += 1
if count < 5:
t = threading.Timer(1, function=hello,)
t.start()
hello()

How to exit a while loop in a multiprocessing pool process from the main script in python

EDIT 3: See last example at the end.
I need a while loop doing continuous send and return operations with an USB connection.
During this continuous operation I need (amongst other stuff in my main script) a few identical and isolated send/return operations on that same USB connection.
This seems to require multiprocessing and some tweaking.
I want to use the following workaround with the multiprocessing library:
Put the continuous send/return operation on a different thread with a pool (apply_async).
Put this process on "hold" when I perform the isolated send/return operation (using clear()).
Immediately after the isolated send/return operation resume the continuous send/return (using set()).
Stop the continuous send/return when i reach the end of the main script (here i have no solution yet should be x.stop() or something like this since terminate() won't do).
Get some return value from the stopped process (use get()).
I tried couple of things already but i just cant exit the while loop via a main command.
import multiprocessing
import time
def setup(event):
global unpaused
unpaused = event
class func:
def __init__(self):
self.finished = False
def stop(self):
self.finished = True
def myFunction(self, arg):
i = 0
s=[]
while self.finished == False:
unpaused.wait()
print(arg+i)
s.append(arg+i)
i=i+1
time.sleep(1)
return s
if __name__ == "__main__":
x=func()
event = multiprocessing.Event() # initially unset, so workers will be paused at first
pool = multiprocessing.Pool(1, setup, (event,))
result = pool.apply_async(x.myFunction, (10,))
print('We unpause for 2 sec')
event.set() # unpause
time.sleep(2)
print('We pause for 2 sec')
event.clear() # pause
time.sleep(2)
print('We unpause for 2 sec')
event.set() # unpause
time.sleep(2)
print('Now we try to terminate in 2 sec')
time.sleep(2)
x.stop()
return_val = result.get()
print('get worked with '+str(return_val))
Can someone point me in the right direction? As seen this wont stop with x.stop().
Global values also do not work.
Thanks in advance.
EDIT:
as suggested I tried to put the multiprocessing in a seperated object.
Is this done by putting functions in a class like my example below?
import multiprocessing
import time
class func(object):
def __init__(self):
self.event = multiprocessing.Event() # initially unset, so workers will be paused at first
self.pool = multiprocessing.Pool(1, self.setup, (self.event,))
def setup(self):
global unpaused
unpaused = self.event
def stop(self):
self.finished = True
def resume(self):
self.event.set() # unpause
def hold(self):
self.event.clear() #pause
def run(self, arg):
self.pool.apply_async(self.myFunction, (arg,))
def myFunction(self, arg):
i = 0
s=[]
self.finished = False
while self.finished == False:
unpaused.wait()
print(arg+i)
s.append(arg+i)
i=i+1
time.sleep(1)
return s
if __name__ == "__main__":
x=func()
result = x.run(10)
print('We unpause for 2 sec')
x.resume() # unpause
time.sleep(2)
print('We pause for 2 sec')
x.hold() # pause
time.sleep(2)
print('We unpause for 2 sec')
x.resume() # unpause
time.sleep(2)
print('Now we try to terminate in 2 sec')
time.sleep(2)
x.stop()
return_val = result.get()
print('get worked with '+str(return_val))
I added a hold and resume function and put the setup function in a single class.
But the lower example wont even run the function anymore.
What a complex little problem. I am puzzled with this.
EDIT2:
I tried a workaround with what i found so far.
Big trouble came in while using the microprocessing.pool library.
It is not straightforward using it with the USB connection...
I produced a mediocre workaround below:
from multiprocessing.pool import ThreadPool
import time
class switch:
state = 1
s1 = switch()
def myFunction(arg):
i = 0
while s1.state == 1 or s1.state == 2 or s1.state == 3:
if s1.state == 1:
print(arg+i)
s.append(arg+i)
i=i+1
time.sleep(1)
elif s1.state == 2:
print('we entered snippet mode (state 2)')
time.sleep(1)
x = s
return x
pool.close()
pool.join()
elif s1.state == 3:
while s1.state == 3:
time.sleep(1)
print('holding (state 3)')
return s
if __name__ == "__main__":
global s
s=[]
print('we set the state in the class on top to ' +str(s1.state))
pool = ThreadPool(processes=1)
async_result = pool.apply_async(myFunction, (10,))
print('in 5 sec we switch mode sir, buckle up')
time.sleep(5)
s1.state = 2
print('we switched for a snippet which is')
snippet = async_result.get()
print(str(snippet[-1])+' this snippet comes from main')
time.sleep(1)
print('now we return to see the full list in the end')
s1.state = 1
async_result = pool.apply_async(myFunction, (10,))
print('in 5 sec we hold it')
time.sleep(5)
s1.state = 3
print('in 5 sec we exit')
time.sleep(5)
s1.state = 0
return_val = async_result.get()
print('Succsses if you see a list of numbers '+ str(return_val))
EDIT 3:
from multiprocessing.pool import ThreadPool
import time
class switch:
state = 1
s1 = switch()
def myFunction(arg):
i = 0
while s1.state == 1 or s1.state == 2:
if s1.state == 1:
print(arg+i)
s.append(arg+i)
i=i+1
time.sleep(1)
elif s1.state == 2:
print('we entered snippet mode (state 2)')
time.sleep(1)
x = s
return x
pool.close() #These are not relevant i guess.
pool.join() #These are not relevant i guess.
return s
if __name__ == "__main__":
global s
s=[]
print('we set the state in the class on top to ' +str(s1.state))
pool = ThreadPool(processes=1)
async_result = pool.apply_async(myFunction, (10,))
print('in 5 sec we switch mode sir, buckle up')
time.sleep(5)
s1.state = 2
snippet = async_result.get()
print(str(snippet[-1])+' this snippet comes from main')
time.sleep(1)
print('now we return to see the full list in the end')
s1.state = 1
async_result = pool.apply_async(myFunction, (10,))
print('in 5 sec we exit')
time.sleep(5)
s1.state = 0
return_val = async_result.get()
print('Succsses if you see a list of numbers '+ str(return_val))
Well, this is what i have come up with...
Not great not terrible. Maybe a bit more on the terrible side (:
I hate it that I have to recall the function pool.apply_async(myFunction, (10,)) after I grabbed a single piece of data.
Currently only ThreadingPool works with no further code changes in my actual script!
in a situation where I need a process to run continuously, while occasionally doing other things, I like to use asyncio. This is a rough draft of how I would approach this
import asyncio
class MyObject:
def __init__(self):
self.mydatastructure = []
self.finished = False
self.loop = None
async def main_loop(self):
while not self.finished:
new_result = self.get_data()
self.mydatastructure.append(new_result)
await asyncio.sleep(0)
async def timed_loop(self):
while not self.finished:
await asyncio.sleep(2)
self.dotimedtask(self.mydatastructure)
async def run(self):
await asyncio.gather(self.main_loop(), self.timed_loop())
asyncio.run(MyObject().run())
only one coroutine will be running at a time, with the timed one being scheduled once every 2 seconds. It would always get the data passed out of the most recent continuous execution. you could do things like keep a connection open on an object as well. Depending on your requirements (is it a 2 second interval, or once every other second no matter how long it takes) there are library packages to make the scheduling a bit more elegant.

How to handle data returned from thread functions in main function in Python

I have a python script where I have created a thread. Below is the code snippet
stop_thread = False
get_data = False
status = ""
subject_id = ""
def get_data_function():
global stop_thread
global get_data
global status
global subject_id
while stop_thread is False:
if get_data:
# SOME CODE
# SOME MORE CODE
(status, subject_id) = tive.extract_data()
get_data = False
time.sleep(1)
def main():
global stop_thread
global get_data
global status
global subject_id
thread = Thread(target=get_data_function)
thread.start()
res_dict = dict()
while True:
# SOME CODE
# SOME MORE CODE
if some_condition:
get_data = True
res_dict['status'] = status
res_dict['subject_id'] = subject_id
# SOME CODE
# SOME MORE CODE
In above code I have defined a thread and its function get_data_function(). This function calls tive.extract_data() which gives status, subject_id. I have defined these variables as global so that once we have the value of these variables , I can use it in main function.
In main function, after some_condition is True we need to get the values of status and subject_id so I set global variable get_data as True which enables the get_data_function main code and returns the data but the problem is that tive.extract_data() takes 2-3 secs to respond due to which res_dict['status'] = status and res_dict['subject_id'] = subject_id in main function gives error for that 2-3 secs and after that it starts working fine.
Is there any other way of handling the values of these variables in optimized way so that until we don't have values for these variables, we don't get errors. Please help. Thanks
I would define a thread class. I have bad experience using globals.
# ==================================================================================
class ThreadGetData(Thread):
def __init__(self):
super().__init__()
self.stop_thread = False
self.get_data = False
self.status = ""
self.subject_id = ""
self.data_present = False
# ------------------------------------------------------------------------------
def get_data_function(self):
if self.get_data:
# SOME CODE
# SOME MORE CODE
(self.status, self.subject_id) = tive.extract_data()
self.data_present = True
self.get_data = False
time.sleep(1)
# ------------------------------------------------------------------------------
def run(self):
while not self.stop_thread:
self.get_data_function()
# ==================================================================================
def main():
thread = ThreadGetData()
thread.start()
res_dict = dict()
while True:
# SOME CODE
# SOME MORE CODE
if some_condition:
thread.get_data = True
if thread.data_present:
res_dict['status'] = thread.status
res_dict['subject_id'] = thread.subject_id
self.data_present = False
# SOME CODE
# SOME MORE CODE
I would use Python 3's asyncio module, and use a coroutine instead of a thread. You can then move all of the operations that can be done before you need the value that the first coroutine returns into a second coroutine. Then, use asyncio.gather() to run both coroutines in parallel, waiting until both are finished.
Here is a simple example which demonstrates this pattern:
import asyncio
import datetime
async def main():
print("Starting at " + str(datetime.datetime.now()))
values = await asyncio.gather(get_value(3), do_other_stuff())
print("Received: " + str(list(values)[0]))
print("Finished at " + str(datetime.datetime.now()))
async def get_value(n):
await asyncio.sleep(3)
return n*10
async def do_other_stuff():
print("doing things 1")
await asyncio.sleep(1)
print("doing things 2")
await asyncio.sleep(2)
print("doing things 3")
if __name__ == "__main__":
loop = asyncio.get_event_loop()
loop.run_until_complete(main())

Cancel timer in Python

I am working on timer class in python and wrote a simple test code for the same. My purpose is to print the "hello world" message 10 times and then cancel the timer once the iterations are done. The problem is I am unable to cancel the timer and code seems to print "hello world" infinitely.
Below is my code:
from threading import Timer
class myclass():
iteration_count = 0
heartbeat = 1
def printMsg(self):
print "hello world!"
def start_job(self):
self.printMsg()
self.iteration_count = self.iteration_count + 1
if self.iteration_count == 10:
Timer(self.heartbeat, self.start_job, ()).cancel()
Timer(self.heartbeat, self.start_job, ()).start()
m = myclass()
m.start_job()
I am using Python 2.7
Any help would be highly appreciated
Your problem is you've made another Timer() in if condition and .cancel() it. The following code solves your problem:
from threading import Timer
class MyClass(object):
def __init__(self):
self.iteration_count = 0
self.heartbeat = 1
#staticmethod
def print_msg():
print "hello world!"
def start_job(self):
self.print_msg()
self.iteration_count += 1
timer = Timer(
interval=self.heartbeat,
function=self.start_job,
)
timer.start()
if self.iteration_count >= 10:
timer.cancel()
MyClass().start_job()
Seems like you start your timer again right after you cancelled it.
If you change your code to return from start_job() when your end-condition is reached it should work.
if self.iteration_count == 10:
Timer(self.heartbeat, self.start_job, ()).cancel()
return
Actually you don't even have to cancel the timer this way, you just don't start a new one, if the condition is reached.
cancelmethod is used to stop the created timer before its action has begun, so just return will be ok.
if self.iteration_count == 10:
return
See Timer Objects
The timer can be stopped (before its action has begun) by calling the cancel() method.
def hello():
print "hello, world"
t = Timer(30.0, hello)
t.start() # will print "hello, world" after 30 seconds
t.cancel() # stop it printing "hello, world"

Semaphores on Python

I've started programming in Python a few weeks ago and was trying to use Semaphores to synchronize two simple threads, for learning purposes. Here is what I've got:
import threading
sem = threading.Semaphore()
def fun1():
while True:
sem.acquire()
print(1)
sem.release()
def fun2():
while True:
sem.acquire()
print(2)
sem.release()
t = threading.Thread(target = fun1)
t.start()
t2 = threading.Thread(target = fun2)
t2.start()
But it keeps printing just 1's. How can I intercale the prints?
It is working fine, its just that its printing too fast for you to see . Try putting a time.sleep() in both functions (a small amount) to sleep the thread for that much amount of time, to actually be able to see both 1 as well as 2.
Example -
import threading
import time
sem = threading.Semaphore()
def fun1():
while True:
sem.acquire()
print(1)
sem.release()
time.sleep(0.25)
def fun2():
while True:
sem.acquire()
print(2)
sem.release()
time.sleep(0.25)
t = threading.Thread(target = fun1)
t.start()
t2 = threading.Thread(target = fun2)
t2.start()
Also, you can use Lock/mutex method as follows:
import threading
import time
mutex = threading.Lock() # is equal to threading.Semaphore(1)
def fun1():
while True:
mutex.acquire()
print(1)
mutex.release()
time.sleep(.5)
def fun2():
while True:
mutex.acquire()
print(2)
mutex.release()
time.sleep(.5)
t1 = threading.Thread(target=fun1).start()
t2 = threading.Thread(target=fun2).start()
Simpler style using "with":
import threading
import time
mutex = threading.Lock() # is equal to threading.Semaphore(1)
def fun1():
while True:
with mutex:
print(1)
time.sleep(.5)
def fun2():
while True:
with mutex:
print(2)
time.sleep(.5)
t1 = threading.Thread(target=fun1).start()
t2 = threading.Thread(target=fun2).start()
[NOTE]:
The difference between mutex, semaphore, and lock
In fact, I want to find asyncio.Semaphores, not threading.Semaphore,
and I believe someone may want it too.
So, I decided to share the asyncio.Semaphores, hope you don't mind.
from asyncio import (
Task,
Semaphore,
)
import asyncio
from typing import List
async def shopping(sem: Semaphore):
while True:
async with sem:
print(shopping.__name__)
await asyncio.sleep(0.25) # Transfer control to the loop, and it will assign another job (is idle) to run.
async def coding(sem: Semaphore):
while True:
async with sem:
print(coding.__name__)
await asyncio.sleep(0.25)
async def main():
sem = Semaphore(value=1)
list_task: List[Task] = [asyncio.create_task(_coroutine(sem)) for _coroutine in (shopping, coding)]
"""
# Normally, we will wait until all the task has done, but that is impossible in your case.
for task in list_task:
await task
"""
await asyncio.sleep(2) # So, I let the main loop wait for 2 seconds, then close the program.
asyncio.run(main())
output
shopping
coding
shopping
coding
shopping
coding
shopping
coding
shopping
coding
shopping
coding
shopping
coding
shopping
coding
16*0.25 = 2
I used this code to demonstrate how 1 thread can use a Semaphore and the other thread will wait (non-blocking) until the Sempahore is available.
This was written using Python3.6; Not tested on any other version.
This will only work is the synchronization is being done from the same thread, IPC from separate processes will fail using this mechanism.
import threading
from time import sleep
sem = threading.Semaphore()
def fun1():
print("fun1 starting")
sem.acquire()
for loop in range(1,5):
print("Fun1 Working {}".format(loop))
sleep(1)
sem.release()
print("fun1 finished")
def fun2():
print("fun2 starting")
while not sem.acquire(blocking=False):
print("Fun2 No Semaphore available")
sleep(1)
else:
print("Got Semphore")
for loop in range(1, 5):
print("Fun2 Working {}".format(loop))
sleep(1)
sem.release()
t1 = threading.Thread(target = fun1)
t2 = threading.Thread(target = fun2)
t1.start()
t2.start()
t1.join()
t2.join()
print("All Threads done Exiting")
When I run this - I get the following output.
fun1 starting
Fun1 Working 1
fun2 starting
Fun2 No Semaphore available
Fun1 Working 2
Fun2 No Semaphore available
Fun1 Working 3
Fun2 No Semaphore available
Fun1 Working 4
Fun2 No Semaphore available
fun1 finished
Got Semphore
Fun2 Working 1
Fun2 Working 2
Fun2 Working 3
Fun2 Working 4
All Threads done Exiting
Existing answers are wastefully sleeping
I noticed that almost all answers use some form of time.sleep or asyncio.sleep, which blocks the thread. This should be avoided in real software, because blocking your thread for 0.25, 0.5 or 1 second is unnecessary/wasteful - you could be doing more processing, especially if your application is IO bound - it already blocks when it does IO AND you are introducing arbitrary delays (latency) in your processing time. If all your threads are sleeping, your app isn't doing anything. Also, these variables are quite arbitrary, which is why each answer has a different value they sleep (block the thread for).
The answers are using it as a way to get Python's bytecode interpreter to pre-empt the thread after each print line, so that it alternates deterministically between running the 2 threads. By default, the interpreter pre-empts a thread every 5ms (sys.getswitchinterval() returns 0.005), and remember that these threads never run in parallel, because of Python's GIL
Solution to problem
How can I intercale the prints?
So my answer would be, you do not want to use semaphores to print (or process) something in a certain order reliably, because you cannot rely on thread prioritization in Python. See Controlling scheduling priority of python threads? for more. time.sleep(arbitrarilyLargeEnoughNumber) doesn't really work when you have more than 2 concurrent pieces of code, since you don't know which one will run next - see * below. If the order matters, use a queue, and worker threads:
from threading import Thread
import queue
q = queue.Queue()
def enqueue():
while True:
q.put(1)
q.put(2)
def reader():
while True:
value = q.get()
print(value)
enqueuer_thread = Thread(target = enqueue)
reader_thread_1 = Thread(target = reader)
reader_thread_2 = Thread(target = reader)
reader_thread_3 = Thread(target = reader)
enqueuer_thread.start()
reader_thread_1.start()
reader_thread_2.start()
reader_thread_3.start()
...
Unfortunately in this problem, you don't get to use Semaphore.
*An extra check for you
If you try a modification of the top voted answer but with an extra function/thread to print(3), you'll get:
1
2
3
1
3
2
1
3
...
Within a few prints, the ordering is broken - it's 1-3-2.
You need to use 2 semaphores to do what you want to do, and you need to initialize them at 0.
import threading
SEM_FUN1 = threading.Semaphore(0)
SEM_FUN2 = threading.Semaphore(0)
def fun1() -> None:
for _ in range(5):
SEM_FUN1.acquire()
print(1)
SEM_FUN2.release()
def fun2() -> None:
for _ in range(5):
SEM_FUN2.acquire()
print(2)
SEM_FUN1.release()
threading.Thread(target=fun1).start()
threading.Thread(target=fun2).start()
SEM_FUN1.release() # Trigger fun1
Output:

Categories

Resources