Problem with ainput (Async input in Python) - python

I have a queue variable and an input order. In case the queue has something, I have to force the input order to stop. I have had to use asyncio module to do that. As a example, let's see the following code:
import asyncio
import multiprocessing
import time
from aioconsole import ainput
def my_function(queue):
time.sleep(3)
queue.put(5)
async def my_loop(queue):
while True:
await asyncio.sleep(0.1)
if not queue.empty():
break
async def main():
queue = multiprocessing.Queue()
p = multiprocessing.Process(target=my_function, args=(queue,))
p.start()
task1 = asyncio.create_task(ainput("Enter text:"))
task2 = asyncio.create_task(my_loop(queue))
await asyncio.wait([task1, task2], return_when='FIRST_COMPLETED')
try:
text = task1.result()
q = ""
except asyncio.exceptions.InvalidStateError:
text = ""
q = queue.get()
print('Doing stuff with input %s/%s...' % (text, q))
This works perfectly and the program ends whenever the queue has something on it or the user inputs some text. My real program has several input orders like this in a row, something like this:
import asyncio
import multiprocessing
import time
from aioconsole import ainput
def my_function(queue):
time.sleep(3)
queue.put(5)
async def my_loop(queue):
while True:
await asyncio.sleep(0.1)
if not queue.empty():
break
async def main():
queue = multiprocessing.Queue()
p = multiprocessing.Process(target=my_function, args=(queue,))
p.start()
task1 = asyncio.create_task(ainput("Enter text:"))
task2 = asyncio.create_task(my_loop(queue))
await asyncio.wait([task1, task2], return_when='FIRST_COMPLETED')
try:
text = task1.result()
q = ""
except asyncio.exceptions.InvalidStateError:
text = ""
q = queue.get()
print('Doing stuff with input %s/%s...' % (text, q))
task1 = asyncio.create_task(ainput("Next: "))
task2 = asyncio.create_task(my_loop(queue))
await asyncio.wait([task1, task2], return_when='FIRST_COMPLETED')
try:
text = task1.result()
q = ""
except asyncio.exceptions.InvalidStateError:
text = ""
q = queue.get()
print('Doing stuff with input %s/%s...' % (text, q))
if __name__ == '__main__':
asyncio.run(main())
The problem is that, if I wait for the queue the first time, the second time, I have to enter the input twice, like if the first input was still waiting or something. Do you know how can I definitely "break" the first input if the user doesn't write anything? Thanks in advance

If I understand you correctly, you can use use task.cancel() between the steps cancel all pending tasks:
import asyncio
import multiprocessing
import time
from aioconsole import ainput
def my_function(queue):
time.sleep(3)
queue.put(5)
async def my_loop(queue):
while True:
await asyncio.sleep(0.1)
if not queue.empty():
break
async def main():
queue = multiprocessing.Queue()
p = multiprocessing.Process(target=my_function, args=(queue,))
p.start()
task1 = asyncio.create_task(ainput("Enter text:"))
task2 = asyncio.create_task(my_loop(queue))
tasks = [task1, task2]
await asyncio.wait(tasks, return_when="FIRST_COMPLETED")
try:
text = task1.result()
q = ""
except asyncio.exceptions.InvalidStateError:
text = ""
q = queue.get()
print()
print("1. Doing stuff with input %s/%s..." % (text, q))
# cancel all tasks:
for t in [task1, task2]:
t.cancel()
# Wait until all worker tasks are cancelled:
await asyncio.gather(*tasks, return_exceptions=True)
task1 = asyncio.create_task(ainput("Next: "))
task2 = asyncio.create_task(my_loop(queue))
tasks = [task1, task2]
await asyncio.wait(tasks, return_when="FIRST_COMPLETED")
try:
text = task1.result()
q = ""
except asyncio.exceptions.InvalidStateError:
text = ""
q = queue.get()
print()
print("2. Doing stuff with input %s/%s..." % (text, q))
if __name__ == "__main__":
asyncio.run(main())
Prints (for example):
Enter text:
1. Doing stuff with input /5...
Next: Hello World
2. Doing stuff with input Hello World/...

Related

How can I put 'keyboard module' function into async code?

In my case, I want to hook the keyboard keys that is pressed.
And when I press "0", I want to kill the terminal.
Both functions should work at the same time.
But my code doesn't work. What is wrong with this code?
import keyboard
import asyncio
def sleep(a):
rand1 = random.uniform(0, 0.009)
rand2 = random.uniform(0.01, 0.02)
result = random.uniform(rand1, rand2)
asyncio.sleep(a + result)
async def record_start():
while True:
k = keyboard.read_key()
k = keyboard.read_key()
print(k)
async def record_stop():
while True:
if keyboard.is_pressed('0'):
print('stop')
sleep(1)
exit()
async def main():
await asyncio.gather(
record_stop(),
record_start(),
)
asyncio.run(main())
I tried out using another modules.
And I assume that problem is modules or way to use "while"
your record_start function never gives a chance to any other async code to run.
Introduce an awaiting call in it, like await asyncio.sleep(.01) in it (it may be sleep(0) but I'd advise a larger interval), and things should work.
I just solved this issue.
It doesn't need to use 'asyncio' module.
instead, I run this code with 'threading' module.
from threading import Thread
import os
def thread_1():
while True:
start_time = time.time()
k = keyboard.read_key()
k = keyboard.read_key()
print("sleep(%s)" % round(time.time() - start_time, 3))
print("press('%s')" % k)
def thread_2():
while True:
if keyboard.is_pressed('0'):
print('stop')
pid = os.getpid()
os.kill(pid, 2)
if __name__ == "__main__":
t1 = Thread(target=thread_1)
t2 = Thread(target=thread_2)
print('start')
t1.start()
t2.start()

How to stop threads using Queue()

I have a program(python 3.9.10) that has a read queue and a write queue. One thread reads and once read, sends to the write queue and another thread writes.
All works fine unless there is an error. If there is, the threads do not stop.
In the following code I am simulating an error being detected in the read thread and trying to stop the threads from reading/writing so the program exits however the program/threads stay active and the program never finishes. If I remove the error simulation code, the threads stop and the program finishes.
I wish to handle the errors WITHIN the threads and if need be, stop the threads/program without throwing an error up
What am I doing wrong? Thanks
Here is a working example of my issue:
import pandas as pd
import datetime
import traceback
from queue import Queue
from threading import Thread
import time
dlQueue = Queue()
writeQueue = Queue()
dlQDone = False
errorStop = False
def log(text):
text = datetime.datetime.now().strftime("%Y/%m/%d, %H:%M:%S ") + text
print(text)
def errorBreak():
global dlQueue
global writeQueue
global errorStop
global dlQDone
dlQueue = Queue()
writeQueue = Queue()
errorStop = True
dlQDone = True
def downloadTable(t, q):
global dlQDone
global errorStop
while True:
if errorStop:
return
nextQ = q.get()
log("READING: " + nextQ)
writeQueue.put("Writing " + nextQ)
log("DONE READING: " + nextQ)
####sumulating an error and need to exit threads###
if nextQ == "Read 7":
log("Breaking Read")
errorBreak()
return
###################################################
q.task_done()
if q.qsize() == 0:
log("Download QUEUE finished")
dlQDone = True
return
def writeTable(t, q):
global errorStop
global dlQDone
while True:
if errorStop:
log("Error Stop return")
return
nextQ = q.get()
log("WRITING: " + nextQ)
log("DONE WRITING: " + nextQ)
q.task_done()
if dlQDone:
if q.qsize() == 0:
log("Writing QUEUE finished")
return
try:
log("PROCESS STARTING!!")
for i in range(10):
dlQueue.put("Read " + str(i))
startTime = time.time()
log("Starting threaded pull....")
dlWorker = Thread(
target=downloadTable,
args=(
"DL",
dlQueue,
),
)
dlWorker.start()
writeWorker = Thread(
target=writeTable,
args=(
"Write",
writeQueue,
),
)
writeWorker.start()
dlQueue.join()
writeQueue.join()
log(f"Finished thread in {str(time.time() - startTime)} seconds") # CANNOT GET HERE
log("Threads: " + str(dlWorker.is_alive()) + str(writeWorker.is_alive()))
except Exception as error:
log(error)
log(traceback.format_exc())
If I understood you correctly, you want to stop both threads in case there's some error that warrants it; you can do that with a threading.Event, and changing your queue reads to have a timeout.
import datetime
import time
import queue
import threading
dlQueue = queue.Queue()
writeQueue = queue.Queue()
stop_event = threading.Event()
def log(text):
text = datetime.datetime.now().strftime("%Y/%m/%d, %H:%M:%S ") + text
print(text)
def downloadTable(t: str, q: queue.Queue):
while not stop_event.is_set():
try:
nextQ = q.get(timeout=1)
except queue.Empty:
continue
log("READING: " + nextQ)
writeQueue.put("Writing " + nextQ)
log("DONE READING: " + nextQ)
if nextQ == "7":
log("Breaking Read")
stop_event.set()
break
q.task_done()
log("Download thread exiting")
def writeTable(t, q):
while not stop_event.is_set():
try:
nextQ = q.get(timeout=1)
except queue.Empty:
continue
log("WRITING: " + nextQ)
log("DONE WRITING: " + nextQ)
q.task_done()
log("Write thread exiting")
def main():
log("PROCESS STARTING!!")
for i in range(10):
dlQueue.put(f"{i}")
log("Starting threaded pull....")
dlWorker = threading.Thread(
target=downloadTable,
args=(
"DL",
dlQueue,
),
)
dlWorker.start()
writeWorker = threading.Thread(
target=writeTable,
args=(
"Write",
writeQueue,
),
)
writeWorker.start()
dlWorker.join()
writeWorker.join()
if __name__ == "__main__":
main()

`asyncio.run()` does not wait for coroutine to finish

I'm running this code in Python 3.7.3
import asyncio
async def fun(time):
print(f"will wait for {time}")
await asyncio.sleep(time)
print(f"done waiting for {time}")
async def async_cenas():
t1 = asyncio.create_task(fun(1))
print("after 1")
t2 = asyncio.create_task(fun(2))
print("after 2")
def main():
t1 = asyncio.run(async_cenas())
print("ok main")
print(t1)
if __name__ == '__main__':
main()
print("finished __name__")
And getting this output:
after 1
after 2
will wait for 1
will wait for 2
ok main
None
finished __name__
I was expecting to see also:
done waiting for 1
done waiting for 2
I.e., why was expecting asyncio.run(X) would wait for the coroutines to complete before proceeding.
If you want to wait for the completion of all tasks spawned by the create_task, then you need to do it explicitly by, for example, just await for them in turn or asyncio facilities like gather or wait (the difference is described here). Otherwise, they will be canceled by the asyncio.run when exiting the main coroutine, which is passed to asyncio.run.
Example:
import asyncio
async def fun(time):
print(f"will wait for {time}")
await asyncio.sleep(time)
print(f"done waiting for {time}")
async def async_cenas():
t1 = asyncio.create_task(fun(1))
print("after 1")
t2 = asyncio.create_task(fun(2))
print("after 2")
await asyncio.wait({t1, t2}, return_when=asyncio.ALL_COMPLETED)
# or just
# await t1
# await t2
def main():
t1 = asyncio.run(async_cenas())
print("ok main")
print(t1)
if __name__ == '__main__':
main()
print("finished __name__")
after 1
after 2
will wait for 1
will wait for 2
done waiting for 1
done waiting for 2
ok main
None
finished __name__

None block 'while True' using asyncio

Using below code I'm attempting to start 2 infinite loops using asyncio:
async def do_job_1():
while True :
print('do_job_1')
await asyncio.sleep(5)
async def do_job_2():
while True :
print('do_job_2')
await asyncio.sleep(5)
if __name__ == '__main__':
asyncio.run(do_job_1())
asyncio.run(do_job_2())
do_job_1 blocks do_job_2, as do_job_2 never prints do_job_1. What mistake have I made ?
Ultimately I'm trying to convert kafka consumer code:
from confluent_kafka import Consumer, KafkaError
settings = {
'bootstrap.servers': 'localhost:9092',
'group.id': 'mygroup',
'client.id': 'client-1',
'enable.auto.commit': True,
'session.timeout.ms': 6000,
'default.topic.config': {'auto.offset.reset': 'smallest'}
}
c = Consumer(settings)
c.subscribe(['mytopic'])
try:
while True:
msg = c.poll(0.1)
if msg is None:
continue
elif not msg.error():
print('Received message: {0}'.format(msg.value()))
elif msg.error().code() == KafkaError._PARTITION_EOF:
print('End of partition reached {0}/{1}'
.format(msg.topic(), msg.partition()))
else:
print('Error occured: {0}'.format(msg.error().str()))
except KeyboardInterrupt:
pass
finally:
c.close()
taken from https://www.confluent.io/blog/introduction-to-apache-kafka-for-python-programmers to be concurrent so I can parallelize processing of Kafka messages.
From help(asyncio.run):
It should be used as a main entry point for asyncio programs, and should ideally only be called once.
But you can use asyncio.gather to join the tasks:
import asyncio
async def do_job_1():
while True :
print('do_job_1')
await asyncio.sleep(5)
async def do_job_2():
while True :
print('do_job_2')
await asyncio.sleep(5)
async def main():
await asyncio.gather(do_job_1(), do_job_2())
if __name__ == '__main__':
asyncio.run(main())

How to prevent print() lift up input text while user is typing

Basically, I have a async worker doing stuff, but I can caught an user command line any time.
The problem is just visual, when the user is typing and the code prints something, the text the user was typing is lifted up together with the printed text. How to make the last line "isolated" from console?
this is an exemple code:
import queue
import threading
import time
import heapq
def worker():
while True:
item = q.get()
if item is None:
break
do_work(item)
#q.task_done()
time.sleep(2)
q.put(item)
def do_work(item):
print(item,end = '')
print(time.time(), end = '')
q = queue.PriorityQueue()
num_worker_threads = 1
threads = []
for i in range(num_worker_threads):
t = threading.Thread(target=worker)
t.start()
threads.append(t)
fruits = [(1,"apple"), (2,"banana"), (3,"cherry")]
for x in fruits:
q.put(x)
gameFinished = 0
# block until all tasks are done
#q.join()
while not gameFinished:
q.put((-1,input()))
# stop workers
for i in range(num_worker_threads):
q.put(None)
for t in threads:
t.join()
input("Press enter to exit ;)")

Categories

Resources