Binance Multithread Sockets - functions not called concurrently - python

I have a code to receive data from binance, about current prices:
import asyncio
from binance import AsyncClient, BinanceSocketManager
import time
from datetime import datetime
def analyze(res):
kline = res['k']
if kline['x']: #candle is compleated
print('{} start_sleeping {} {}'.format(
datetime.now(),
kline['s'],
datetime.fromtimestamp(kline['t'] / 1000),
))
time.sleep(5)
print('{} finish_sleeping {}'.format(datetime.now(), kline['s']))
async def open_binance_stream(symbol):
client = await AsyncClient.create()
bm = BinanceSocketManager(client)
ts = bm.kline_socket(symbol)
async with ts as tscm:
while True:
res = await tscm.recv()
analyze(res)
await client.close_connection()
async def main():
t1 = asyncio.create_task(open_binance_stream('ETHBTC'))
t2 = asyncio.create_task(open_binance_stream('XRPBTC'))
await asyncio.gather(*[t1, t2])
if __name__ == "__main__":
asyncio.run(main())
How to make analyze function to be called concurently.
Binance sends info in the same time with both streams data (ETHBTC and XRPBTC)
But function analyze will be called only once previous analyze (sleep) is completed.
I wish function analyze is called immediately and independently.

Have you tried to put analyze in a thread. I think it will achieve what you want.
import asyncio
from binance import AsyncClient, BinanceSocketManager
import time
from datetime import datetime
from threading import Thread
def analyze(res):
kline = res['k']
if kline['x']: #candle is compleated
print('{} start_sleeping {} {}'.format(
datetime.now(),
kline['s'],
datetime.fromtimestamp(kline['t'] / 1000),
))
time.sleep(5)
print('{} finish_sleeping {}'.format(datetime.now(), kline['s']))
async def open_binance_stream(symbol):
client = await AsyncClient.create()
bm = BinanceSocketManager(client)
ts = bm.kline_socket(symbol)
async with ts as tscm:
while True:
res = await tscm.recv()
Thread(target= analyze, args = (res)).start()
await client.close_connection()
async def main():
t1 = asyncio.create_task(open_binance_stream('ETHBTC'))
t2 = asyncio.create_task(open_binance_stream('XRPBTC'))
await asyncio.gather(*[t1, t2])
if __name__ == "__main__":
asyncio.run(main())
This should work as expected.

Related

BinanceSocketManager doesn't show futures_user_socket()

I'm using Binance UMFutures testnet and want to build stream for user_data ( order updates).
Keys are ok. Other methods like klines work perfect as well. But .futures_user_socket() shows nothing when I place or cancel orders. Could you please advice what am I doing wrong?
`
from binance import AsyncClient , BinanceSocketManager
import asyncio
api_key_testnet = '<--->'
secret_key_testnet = '<--->'
async def user_data_listener(client):
bm = BinanceSocketManager(client)
async with bm.futures_user_socket() as stream:
while True:
res = await stream.recv()
print(res)
async def main():
client = await AsyncClient.create(api_key_testnet,secret_key_testnet,testnet=True)
await user_data_listener(client)
if __name__ == "__main__":
asyncio.run(main())
`

manipulating websocket values

I'm trying to handling websockets with python for the first time.
I created two clients and I want to do some calculate with these two results.
(which is realtime crypto price)
Is it possible to add or multiply numbers of theses result?
import websockets
import asyncio
import json
from binance import AsyncClient, BinanceSocketManager
async def upbit_ws_client():
uri = "wss://api.upbit.com/websocket/v1"
async with websockets.connect(uri) as websocket:
subscribe_fmt = [
{"ticket": "test"},
{
"type": "ticker",
"codes": ["KRW-BTC"],
"isOnlyRealtime": True
},
{"format": "SIMPLE"}
]
subscribe_data = json.dumps(subscribe_fmt)
await websocket.send(subscribe_data)
while True:
data = await websocket.recv()
data = json.loads(data)
print(data['cd'], data['hp'])
async def binance_ws_client():
client = await AsyncClient.create()
bm = BinanceSocketManager(client)
ts = bm.symbol_book_ticker_socket("BTCUSDT")
async with ts as tscm:
while True:
res = await tscm.recv()
print(res['b'], res['a'])
if __name__ == '__main__':
my_loop = asyncio.get_event_loop()
my_loop.run_until_complete(asyncio.gather(*[upbit_ws_client(), binance_ws_client()]))
my_loop.close()
The results are strings so you need to convert them to floats first in order to have a data type which you can calculate:
a = float(res['a'])
b = float(res['b'])
Then you can substract a and b:
result = a - b
print(result)
which you can add to your code like below:
import websockets
import asyncio
import json
from binance import AsyncClient, BinanceSocketManager
async def upbit_ws_client():
uri = "wss://api.upbit.com/websocket/v1"
async with websockets.connect(uri) as websocket:
subscribe_fmt = [
{"ticket": "test"},
{
"type": "ticker",
"codes": ["KRW-BTC"],
"isOnlyRealtime": True
},
{"format": "SIMPLE"}
]
subscribe_data = json.dumps(subscribe_fmt)
await websocket.send(subscribe_data)
while True:
data = await websocket.recv()
data = json.loads(data)
print(data['cd'], data['hp'])
async def binance_ws_client():
client = await AsyncClient.create()
bm = BinanceSocketManager(client)
ts = bm.symbol_book_ticker_socket("BTCUSDT")
async with ts as tscm:
while True:
res = await tscm.recv()
print(res['b'], res['a'])
a = float(res['a'])
b = float(res['b'])
result = a - b
print(result)
if __name__ == '__main__':
my_loop = asyncio.get_event_loop()
my_loop.run_until_complete(asyncio.gather(*[upbit_ws_client(), binance_ws_client()]))
my_loop.close()

How can I limit 300 API calls per minute using asyncio

I make API calls with asyncio but I have a limit of 300 calls per minutes. How can I pause or limit asyncio? I tried with "semaphore" with no success. How can I do this?
from asyncio.tasks import sleep
from aiohttp import ClientSession
import asyncio
import time
import aiohttp
semaphore = asyncio.Semaphore(2)
async def get_sites(sites):
tasks = [asyncio.create_task(fetch_site(s)) for s in sites]
return await asyncio.gather(*tasks)
NCAVStock = []
async def fetch_site(url):
async with ClientSession() as session:
async with session.get(url) as resp:
data = await resp.json()
data = data['symbol']
NCAVStock.append(data)
print(NCAVStock)
return data
if __name__ == '__main__':
List_Not_China = ['SRDX', 'AQB', 'CDTI', 'VRSN', 'MEC', 'NFG', 'KTOS', 'PRO', 'BEAT', 'HRB', 'JBLU', 'SRTS', 'PCRX', 'RVLV', 'CTSH', 'DHCNL', 'SYX', 'FARM', 'BAM', 'CALX', 'VTIQW', 'LKQ', 'ISR', 'GLDW', 'WORK', 'UTI', 'MXL', 'MTOR', 'CRWS', 'CHWY', 'GKOS', 'MDEX', 'AGI', 'LH', 'IDIV', 'CVEO', 'URI', 'FIX', 'RICK', 'ITW', 'STRT', 'SGLBW', 'EIX', 'AWX', 'ADSK', 'INS', 'MLHR', 'IIIV']
sites = [
f'http://financialmodelingprep.com/api/v3/financials/balance-sheet-statement/{company}?period=quarter&apikey=APIKEY'for company in List_Not_China
]
data = asyncio.run(get_sites(sites))

Python Bleak scan for advertisements and exit event loop

I've inherited some code that utilizes Python Bleak to scan for advertisements emitted from a certain device. Whenever an advertisement from the Bluetooth mac address and service id we're looking for is detected and a certain condition from the extracted payload information is true, we want to terminate and return. In the attached code, I've masked the Bluetooth and service ID:s.
Not being too familiar with the event loop, is there a way to exit before the timer runs out? I suppose there's probably a better way to approach this problem.
Sample code:
import asyncio
import struct
from bleak import BleakScanner
timeout_seconds = 10
address_to_look_for = 'masked'
service_id_to_look_for = 'masked'
def detection_callback(device, advertisement_data):
if device.address == address_to_look_for:
byte_data = advertisement_data.service_data.get(service_id_to_look_for)
num_to_test = struct.unpack_from('<I', byte_data, 0)
if num_to_test == 1:
print('here we want to terminate')
async def run():
scanner = BleakScanner()
scanner.register_detection_callback(detection_callback)
await scanner.start()
await asyncio.sleep(timeout_seconds)
await scanner.stop()
if __name__=='__main__':
loop = asyncio.get_event_loop()
loop.run_until_complete(run())
I'm sure there are many ways this can be done. A small mod to your code would be rather than having the asyncio.sleep for the full period before you stop the scan, you could could have a while loop that ends on time elapsed or device found event.
For example:
import asyncio
import struct
from bleak import BleakScanner
timeout_seconds = 20
address_to_look_for = 'F1:D9:3B:39:4D:A2'
service_id_to_look_for = '0000feaa-0000-1000-8000-00805f9b34fb'
class MyScanner:
def __init__(self):
self._scanner = BleakScanner()
self._scanner.register_detection_callback(self.detection_callback)
self.scanning = asyncio.Event()
def detection_callback(self, device, advertisement_data):
# Looking for:
# AdvertisementData(service_data={
# '0000feaa-0000-1000-8000-00805f9b34fb': b'\x00\xf6\x00\x00\x00Jupiter\x00\x00\x00\x00\x00\x0b'},
# service_uuids=['0000feaa-0000-1000-8000-00805f9b34fb'])
if device.address == address_to_look_for:
byte_data = advertisement_data.service_data.get(service_id_to_look_for)
num_to_test, = struct.unpack_from('<I', byte_data, 0)
if num_to_test == 62976:
print('\t\tDevice found so we terminate')
self.scanning.clear()
async def run(self):
await self._scanner.start()
self.scanning.set()
end_time = loop.time() + timeout_seconds
while self.scanning.is_set():
if loop.time() > end_time:
self.scanning.clear()
print('\t\tScan has timed out so we terminate')
await asyncio.sleep(0.1)
await self._scanner.stop()
if __name__ == '__main__':
my_scanner = MyScanner()
loop = asyncio.get_event_loop()
loop.run_until_complete(my_scanner.run())

Interprocess communication using asyncio?

I have a set of CPU-intensive processes that once in a while depend on each other to proceed. So something like
def run():
while True:
do stuff
wake up some other process
wait for some other process to wake me up
do stuff
Within each process I'd like to use async, so that I can always have an instance of run running while others are waiting to be woken up. Looking at the asyncio docs, the only IPC option in the "High-level APIs" section that I see uses sockets. I'd much rather use a pipe, which it looks like I can perhaps do with the low-level API, but that documentation is chock full of warnings that if you're just writing an application then it's a mistake to be using it. Can someone weigh in on the idiomatic thing to do here? (And also, speed is an important factor, so if there's some less-idiomatic-but-more-performant thing I'd like to know about that option as well.)
I would like to mention the aioprocessing library, as I successfully used it in one of my projects. It provides an anync interface to the multiprocessing primitives including IPC, such as Process, Pipe, Lock, Queue and etc. It uses thread pool to do this:
...
#staticmethod
def coro_maker(func):
def coro_func(self, *args, loop=None, **kwargs):
return self.run_in_executor(
getattr(self, func), *args, loop=loop, **kwargs
)
return coro_func
But to be honest, a lot depends on the problem being solved, on what tasks are being performed concurrently, since the intensive IPC itself within the async approach is less effective than the synchronous approach due to overhead of event loop, thread pool and etc. Sometimes it is better to make all IPC operations synchronous and put it all in a separate thread. Again, it all depends on the problem and the environment. Below is a benchmark that is far from comprehensive, but it can give an approximate picture of the problem that is being solved in it (intensive exchange of buffers).
note: I wrote about the difference between a Queue and SimpleQueue here
Sync SimpleQueue: 1.4309470653533936
AioSimpleQueue: 12.32670259475708
AioQueue: 14.342737436294556
AioPipe: 11.747064590454102
subprocess pipe stream: 7.344956159591675
socket stream: 4.360717058181763
# main.py
import sys
import time
import asyncio
import aioprocessing as ap
import multiprocessing as mp
import proc
count = 5*10**4
data = b'*'*100
async def sync_simple_queue_func():
out_ = mp.SimpleQueue()
in_ = mp.SimpleQueue()
p = ap.AioProcess(target=proc.start_sync_queue_func, args=(out_, in_))
p.start()
begin_ts = time.time()
for i in range(count):
out_.put(data)
res = in_.get()
print('Sync SimpleQueue: ', time.time() - begin_ts)
out_.put(None)
async def simple_queue_func():
out_ = ap.AioSimpleQueue()
in_ = ap.AioSimpleQueue()
p = ap.AioProcess(target=proc.start_queue_func, args=(out_, in_))
p.start()
begin_ts = time.time()
for i in range(count):
await out_.coro_put(data)
res = await in_.coro_get()
print('AioSimpleQueue: ', time.time() - begin_ts)
await out_.coro_put(None)
async def queue_func():
out_ = ap.AioQueue()
in_ = ap.AioQueue()
p = ap.AioProcess(target=proc.start_queue_func, args=(out_, in_))
p.start()
begin_ts = time.time()
for i in range(count):
await out_.coro_put(data)
res = await in_.coro_get()
print('AioQueue: ', time.time() - begin_ts)
await out_.coro_put(None)
async def pipe_func():
main_, child_ = ap.AioPipe()
p = ap.AioProcess(target=proc.start_pipe_func, args=(child_,))
p.start()
begin_ts = time.time()
for i in range(count):
await main_.coro_send(data)
res = await main_.coro_recv()
print('AioPipe: ', time.time() - begin_ts)
await main_.coro_send(None)
await p.coro_join()
server = None
async def handle_child(reader, writer):
begin_ts = time.time()
for i in range(count):
writer.write(data)
res = await reader.read(len(data))
print('socket stream: ', time.time() - begin_ts)
writer.close()
async def socket_func():
global server
addr = ('127.0.0.1', 8888)
server = await asyncio.start_server(handle_child, *addr)
p = ap.AioProcess(target=proc.start_socket_func, args=(addr,))
p.start()
async with server:
await server.serve_forever()
async def subprocess_func():
prog = await asyncio.create_subprocess_shell(
'python proc.py',
stdin=asyncio.subprocess.PIPE,
stdout=asyncio.subprocess.PIPE)
begin_ts = time.time()
for i in range(count):
prog.stdin.write(data)
res = await prog.stdout.read(len(data))
print('subprocess pipe stream: ', time.time() - begin_ts)
prog.stdin.close()
async def main():
await sync_simple_queue_func()
await simple_queue_func()
await queue_func()
await pipe_func()
await subprocess_func()
await socket_func()
asyncio.run(main())
# proc.py
import asyncio
import sys
import aioprocessing as ap
async def sync_queue_func(in_, out_):
while True:
n = in_.get()
if n is None:
return
out_.put(n)
async def queue_func(in_, out_):
while True:
n = await in_.coro_get()
if n is None:
return
await out_.coro_put(n)
async def pipe_func(child):
while True:
n = await child.coro_recv()
if n is None:
return
await child.coro_send(n)
data = b'*' * 100
async def socket_func(addr):
reader, writer = await asyncio.open_connection(*addr)
while True:
n = await reader.read(len(data))
if not n:
break
writer.write(n)
def start_sync_queue_func(in_, out_):
asyncio.run(sync_queue_func(in_, out_))
def start_queue_func(in_, out_):
asyncio.run(queue_func(in_, out_))
def start_pipe_func(child):
asyncio.run(pipe_func(child))
def start_socket_func(addr):
asyncio.run(socket_func(addr))
async def connect_stdin_stdout():
loop = asyncio.get_event_loop()
reader = asyncio.StreamReader()
protocol = asyncio.StreamReaderProtocol(reader)
dummy = asyncio.Protocol()
await loop.connect_read_pipe(lambda: protocol, sys.stdin) # sets read_transport
w_transport, _ = await loop.connect_write_pipe(lambda: dummy, sys.stdout)
writer = asyncio.StreamWriter(w_transport, protocol, reader, loop)
return reader, writer
async def main():
reader, writer = await connect_stdin_stdout()
while True:
res = await reader.read(len(data))
if not res:
break
writer.write(res)
if __name__ == "__main__":
asyncio.run(main())

Categories

Resources