Python Bleak scan for advertisements and exit event loop - python

I've inherited some code that utilizes Python Bleak to scan for advertisements emitted from a certain device. Whenever an advertisement from the Bluetooth mac address and service id we're looking for is detected and a certain condition from the extracted payload information is true, we want to terminate and return. In the attached code, I've masked the Bluetooth and service ID:s.
Not being too familiar with the event loop, is there a way to exit before the timer runs out? I suppose there's probably a better way to approach this problem.
Sample code:
import asyncio
import struct
from bleak import BleakScanner
timeout_seconds = 10
address_to_look_for = 'masked'
service_id_to_look_for = 'masked'
def detection_callback(device, advertisement_data):
if device.address == address_to_look_for:
byte_data = advertisement_data.service_data.get(service_id_to_look_for)
num_to_test = struct.unpack_from('<I', byte_data, 0)
if num_to_test == 1:
print('here we want to terminate')
async def run():
scanner = BleakScanner()
scanner.register_detection_callback(detection_callback)
await scanner.start()
await asyncio.sleep(timeout_seconds)
await scanner.stop()
if __name__=='__main__':
loop = asyncio.get_event_loop()
loop.run_until_complete(run())

I'm sure there are many ways this can be done. A small mod to your code would be rather than having the asyncio.sleep for the full period before you stop the scan, you could could have a while loop that ends on time elapsed or device found event.
For example:
import asyncio
import struct
from bleak import BleakScanner
timeout_seconds = 20
address_to_look_for = 'F1:D9:3B:39:4D:A2'
service_id_to_look_for = '0000feaa-0000-1000-8000-00805f9b34fb'
class MyScanner:
def __init__(self):
self._scanner = BleakScanner()
self._scanner.register_detection_callback(self.detection_callback)
self.scanning = asyncio.Event()
def detection_callback(self, device, advertisement_data):
# Looking for:
# AdvertisementData(service_data={
# '0000feaa-0000-1000-8000-00805f9b34fb': b'\x00\xf6\x00\x00\x00Jupiter\x00\x00\x00\x00\x00\x0b'},
# service_uuids=['0000feaa-0000-1000-8000-00805f9b34fb'])
if device.address == address_to_look_for:
byte_data = advertisement_data.service_data.get(service_id_to_look_for)
num_to_test, = struct.unpack_from('<I', byte_data, 0)
if num_to_test == 62976:
print('\t\tDevice found so we terminate')
self.scanning.clear()
async def run(self):
await self._scanner.start()
self.scanning.set()
end_time = loop.time() + timeout_seconds
while self.scanning.is_set():
if loop.time() > end_time:
self.scanning.clear()
print('\t\tScan has timed out so we terminate')
await asyncio.sleep(0.1)
await self._scanner.stop()
if __name__ == '__main__':
my_scanner = MyScanner()
loop = asyncio.get_event_loop()
loop.run_until_complete(my_scanner.run())

Related

RuntimeError: Timeout context manager should be used inside a task when using threading and async io in aiogram

i tried to create a telegram bot in aiogram to send messages if a particular share price is reached. im using threading in order to run this in background as the bot takes user inputs. basically it takes a dict and takes the key as ticker for yfinance and gets the current price and compares with the given condition and if its a true condition it will send the alert to a chat. this is the code
#bot info
import yfinance as yf
import logging
logging.basicConfig(level=logging.INFO)
from aiogram import Bot, Dispatcher, executor
import asyncio
import threading
bot = Bot(token='bot api key')
dp = Dispatcher(bot)
sh={}
jk={}
#reads a dict from a text file
with open('shares.txt','r') as f:
a=f.read()
b=a.rstrip('}').lstrip('{').split(',')
for i in b:
try:
c=i.split(':')
com_name=c[0].strip().strip(''' ' ''').strip()
com_price=float(c[1])
sh[com_name]=com_price
except:
sh={}
break
async def send_price(pp: str):
await bot.send_message(chat_id=user chat id, text=str(pp))
#price updater
def pp():
while True:
pl = sh.copy()
index = ''
for keys, values in list(pl.items()):
if keys in jk and pl[keys] == jk[keys]:
del pl[keys]
print(pl)
kj = pl
if len(kj)==0:
break
for i in kj:
if kj[i]==0:
break
s = yf.Ticker(i)
a = (s.info)['currentPrice']
# print(s,a)
if kj[i] == 0:
continue
# print('kj',kj[i])
if kj[i] <= a:
index += (f'{i} is currently up rn up with current price being {str(a)} \n')
jk[i] = kj[i]
#print(index)
if len(index) != 0:
asyncio.run(send_price(index))
threading.Thread(target=pp).start()
if __name__ == '__main__':
executor.start_polling(dp, skip_updates=True)
when i run the code i get this error .
RuntimeError: Timeout context manager should be used inside a task

Timeout and count control with asyncio

I just try to use asynchronous script for parsing with asyncio. I find the similar question and took this answer as pattern for my tasks. I added latency for requests (1st part of linked answer) and also tried to add a counter of active requests(2nd part). But this code launch just 5 requests and after become to wait.
I didn`t find good explanation for me how asyncio.Event works, so I would like to ask you to help me to improve my code. Thank you in advance.
import json
from bs4 import BeautifulSoup
import asyncio
import aiohttp
active_calls = 0
MAX_CALLS = 5
def write_to_txt_file(text, name):
f = open(f'{PATH}{name}.txt', 'w')
f.write(text)
f.close()
async def getData(item , session, next_delay , event):
global active_calls, next_delay
await event.wait()
if active_calls > MAX_CALLS - 1:
event.clear()
next_delay = 0.1
print( 'start' , active_calls)
active_calls += 1
next_delay += DELAY
await asyncio.sleep(next_delay)
try:
async with session.get(url=item['Link'] ) as response:
soup = BeautifulSoup(await response.text(), 'html.parser')
name = str(item["ID"]) + '. ' + item["Title"][:100]
text = soup.find(id="begin").get_text()
write_to_txt_file(text , name)
finally:
active_calls -= 1
if active_calls == 0:
event.set()
async def parseFromJson():
with open('./data2/csvjson.json') as data_file: #take links from JSON
data = json.load(data_file)
async with aiohttp.ClientSession() as session:
tasks = []
event = asyncio.Event()
event.set()
next_delay = 0.1
DELAY = 0.3
for item in data:
task = asyncio.create_task(getData(item , session, next_delay , event))
next_delay += DELAY
tasks.append(task)
await asyncio.gather(*tasks)
def main():
asyncio.run(parseFromJson())
if __name__ == "__main__":
main()
UPD: As I suppose this code can just stop requests but no start it again?
UPD: I have changed my code and now it works like this:
All getData() functions launched at one time
Script launches 5 requests and stops on 'await event.wait()' line
rest of them
All requests finished and I start 'event.set()'
After this all functions continuous work and rest requests
started(without limitation of 5 requests).
How to fix it?
So I just added the loop, and all functions check waiting every time, it seems not correct but it helped me
while active_calls > MAX_CALLS:
print(1)
await event.wait()

Run bleak (python library) in background with asyncio

I want to use the bleak library in Python to receive data from a Bluetooth Low Energy device. This part is working. My problem is now, that I don't know how to run this code in the background or parallel.
Eventually, I want to build a tiny python app which is processing the data from the Bluetooth device. So bleak is looping all the time fetching data from a bluetooth device and sending it to the main process where it is processed and displayed.
For some reason, bleak does not run in a thread. Is it possible to use asyncio for this (since it is already used by bleak maybe a good way to go)?
I checked out threads and multiprocessing but somehow I found only examples without processes which loop infinitely and send data. I'm totally new to the topic of parallelization and/or asynchronous processes. Maybe one of you can give a hint where to look for a proper solution for this case.
Below is my code so far (for now I just loop and print data).
from bleak import BleakClient
import json
import time
current_index = 0
time_array = [0] * 20
def TicTocGenerator():
# Generator that returns time differences
ti = 0 # initial time
tf = time.time() # final time
while True:
ti = tf
tf = time.time()
yield tf-ti # returns the time difference
TicToc = TicTocGenerator() # create an instance of the TicTocGen generator
# This will be the main function through which we define both tic() and toc()
def toc(tempBool=True):
# Prints the time difference yielded by generator instance TicToc
tempTimeInterval = next(TicToc)
global current_index
if tempBool:
#print( "Elapsed time: %f seconds.\n" %tempTimeInterval )
time_array[current_index] = tempTimeInterval
if current_index == 19:
current_index = 0
else:
current_index += 1
def tic():
# Records a time in TicToc, marks the beginning of a time interval
toc(False)
def Average(lst):
return sum(lst) / len(lst)
#address = "30:ae:a4:5d:bc:ba"
address = "CCA9907B-10EA-411E-9816-A5E247DCA0C7"
MODEL_NBR_UUID = "beb5483e-36e1-4688-b7f5-ea07361b26a8"
async def run(address, loop):
async with BleakClient(address, loop=loop) as client:
while True:
tic()
model_number = await client.read_gatt_char(MODEL_NBR_UUID)
toc()
json_payload=json.loads(model_number)
print()
print(json_payload)
print("Temp [°C]: "+"{:.2f}".format(json_payload["Temp"]))
print("Volt [V]: "+"{:.2f}".format(json_payload["Volt"]))
print("AngX: "+str(json_payload["AngX"]))
print("AngY: "+str(json_payload["AngY"]))
print("AngZ: "+str(json_payload["AngZ"]))
#print("Millis: {0}".format("".join(map(chr, model_number))))
print("Average [ms]: {:.1f}".format(Average(time_array)*1000))
loop = asyncio.get_event_loop()
loop.run_until_complete(run(address, loop))
I had to make GUI for app that automates FUOTA on multiple BLE devices so my solution was to put bleak loop in separate thread in order to be able to use tkinter mainloop in main thread. You need to use asyncio.run_coroutine_threadsafe to schedule a new task from main thread.
from threading import Thread
import tkinter as tk
from Bleak import BleakScanner
async def scan():
device = await BleakScanner.discover()
for device in devices:
print(device)
def startScan():
# call startScan() from main thread
asyncio.run_coroutine_threadsafe(scan(), loop)
if __name__ == "__main__":
window = tk.Tk()
# ...
loop = asyncio.get_event_loop()
def bleak_thread(loop):
asyncio.set_event_loop(loop)
loop.run_forever()
t = Thread(target=bleak_thread, args=(loop,))
t.start()
window.mainloop()
loop.call_soon_threadsafe(loop.stop)

Python: Correct way of using proxybroker lib with asyncio

I want to use the ProxyBroker lib in my python program to generate a list/queue of 10 working proxies.
Unfortunately I was not able to find anything similar in the example page of the lib.
This is what I got right now, but it feels like I'm using asyncio the wrong way to complete my task. Especially the gather function I'm using in combination with the collect(proxies) call.
def get_proxies(self, limit=10):
async def collect(proxies):
p = []
while True:
proxy = await proxies.get()
if proxy is None:
break
p.append(proxy)
return p
proxies = asyncio.Queue()
broker = Broker(proxies)
tasks = asyncio.gather(
broker.find(types=['HTTP', 'HTTPS'], limit=10),
collect(proxies))
loop = asyncio.get_event_loop()
proxy_list = loop.run_until_complete(tasks)
loop.close()
return proxy_list
What would be the preferred/correct way of generating the proxy list?
You can do it this:
"""Find and show 10 working HTTP(S) proxies."""
import asyncio
from proxybroker import Broker
async def show(proxies):
while True:
proxy = await proxies.get()
if proxy is None: break
print('Found proxy: %s' % proxy)
proxies = asyncio.Queue()
broker = Broker(proxies)
tasks = asyncio.gather(
broker.find(types=['HTTP', 'HTTPS'], limit=10),
show(proxies))
loop = asyncio.get_event_loop()
loop.run_until_complete(tasks)
or if you want to generate a file:
import asyncio
from proxybroker import Broker
async def save(proxies, filename):
"""Save proxies to a file."""
with open(filename, 'w') as f:
while True:
proxy = await proxies.get()
if proxy is None:
break
proto = 'https' if 'HTTPS' in proxy.types else 'http'
row = '%s://%s:%d\n' % (proto, proxy.host, proxy.port)
f.write(row)
def main():
proxies = asyncio.Queue()
broker = Broker(proxies)
tasks = asyncio.gather(broker.find(types=['HTTP', 'HTTPS'], limit=10),
save(proxies, filename='proxies.txt'))
loop = asyncio.get_event_loop()
loop.run_until_complete(tasks)
if __name__ == '__main__':
main()
Is all!
Good look!

How do I use concurrency in faust?

I'm working with faust and would like to leverage concurrency feature.
The example listed doesn't quite demonstrate the use of concurrency.
What I would like to do is, read from kafka producer and unnest json.
Then the shipments are sent to a process to calculate billing etc. I should send 10 shipments at one time to a function which does the calculation. For this i'm using concurrency so 10 shipments can calculate concurrently.
import faust
import time
import json
from typing import List
import asyncio
class Items(faust.Record):
name: str
billing_unit: str
billing_qty: int
class Shipments(faust.Record, serializer="json"):
shipments: List[Items]
ship_type: str
shipping_service: str
shipped_at: str
app = faust.App('ships_app', broker='kafka://localhost:9092', )
ship_topic = app.topic('test_shipments', value_type=Shipments)
#app.agent(value_type=str, concurrency=10)
async def mytask(records):
# task that does some other activity
async for record in records:
print(f'received....{record}')
time.sleep(5)
#app.agent(ship_topic)
async def process_shipments(shipments):
# async for ships in stream.take(100, within=10):
async for ships in shipments:
data = ships.items
uid = faust.uuid()
for item in data:
item_uuid = faust.uuid()
print(f'{uid}, {item_uuid}, {ships.ship_type}, {ships.shipping_service}, {ships.shipped_at}, {item.name}, {item.billing_unit}, {item.billing_qty}')
await mytask.send(value=("{} -- {}".format(uid, item_uuid)))
# time.sleep(2)
# time.sleep(10)
if __name__ == '__main__':
app.main()
Ok I figured out how it works. The problem with the example you gave was actually with the time.sleep bit, not the concurrency bit. Below are two silly examples that show how an agent would work with and without concurrency.
import faust
import asyncio
app = faust.App(
'example_app',
broker="kafka://localhost:9092",
value_serializer='raw',
)
t = app.topic('topic_1')
# #app.agent(t, concurrency=1)
# async def my_task(tasks):
# async for my_task in tasks:
# val = my_task.decode('utf-8')
# if (val == "Meher"):
# # This will print out second because there is only one thread.
# # It'll take 5ish seconds and print out right after Waldo
# print("Meher's a jerk.")
# else:
# await asyncio.sleep(5)
# # Since there's only one thread running this will effectively
# # block the agent.
# print(f"Where did {val} go?")
#app.agent(t, concurrency=2)
async def my_task2(tasks):
async for my_task in tasks:
val = my_task.decode('utf-8')
if (val == "Meher"):
# This will print out first even though the Meher message is
# received second.
print("Meher's a jerk.")
else:
await asyncio.sleep(5)
# Because this will be sleeping and there are two threads available.
print(f"Where did {val} go?")
# ===============================
# In another process run
from kafka import KafkaProducer
p = KafkaProducer()
p.send('topic_1', b'Waldo'); p.send('topic_1', b'Meher')

Categories

Resources