async def get_event(self,address):
infura_ws_url = 'wss://ropsten.infura.io/ws/v3/7c074579719748599c087f6090c413e2'
address_checksumed = self.w3.toChecksumAddress(address)
async with connect(infura_ws_url) as ws:
await ws.send('{"jsonrpc": "2.0", "id": 1, "method": "eth_subscribe", "params": ["newPendingTransactions"]}')
# print("HERE")
subscription_response = await ws.recv()
print(subscription_response)
while True:
try:
message = await asyncio.wait_for(ws.recv(), timeout=15)
response = json.loads(message)
txHash = response['params']['result']
tx =self.w3.eth.get_transaction(txHash)
if tx.to == address_checksumed :
print("Pending transaction fincoming:")
print({
"hash": txHash,
"from": tx["from"],
"value": self.w3.fromWei(tx["value"], 'ether')
})
transaction_receipt_json = {
"transaction_hash": txHash,
"from": tx["from"],
"value": self.w3.fromWei(tx["value"], 'ether')
}
return transaction_receipt_json
# return Response(transaction_receipt_json)
pass
except Exception as e:
print("Exception")
print(e.args)
print(e.__str__)
pass
#action(detail=False, methods=['GET'], url_path='subscribe-deposit')
def subscribe_deposit_address(self, request):
address = self.request.query_params.get('address')
# get_event.delay(address,)
# return Response('Address subscribed')
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
while True:
resp = loop.run_until_complete(self.get_event(address))
return Response(resp)
when I call this API from postman it subscribe the pending tx and returns when any transaction pending contains my address as destination but immediately after returning the response on postman my request ends and I don't want it that my request get end I want my request don't end and whenever any pending tx comes which contains my address as destination it returns the tx in response
I have a code that writes down the user id and the name of the streamer, when the streamer starts the stream, the user who entered the command is notified.
How can I correctly add all user IDs of users so that it works for everyone, and not just for one
import requests
import pymongo
from apscheduler.schedulers.asyncio import AsyncIOScheduler
from aiogram import Bot, types
from aiogram.dispatcher import Dispatcher
from aiogram.utils import executor
TOKEN = ''
bot = Bot(token=TOKEN)
dp = Dispatcher(bot)
scheduler = AsyncIOScheduler(timezone="Europe/Kiev")
client = pymongo.MongoClient('')
db = client['Users']
collection = db['twitch']
def add_user(streamer_name, chat_id):
collection.update_one({
"_id": streamer_name
}, {"$set": {
'online': '-',
'chat_id': chat_id
}}, upsert=True)
def set_online(streamers):
collection.update_one({
'_id': streamers
}, {'$set': {
'online': 'True'
}})
def set_offline(streamers):
collection.update_one({
'_id': streamers
}, {'$set': {
'online': 'False'
}})
async def check(streamer_name, chat_id):
client_id = ''
client_secret = ''
body = {
'client_id': client_id,
'client_secret': client_secret,
"grant_type": 'client_credentials'
}
r = requests.post('https://id.twitch.tv/oauth2/token', body)
keys = r.json()
headers = {
'Client-ID': client_id,
'Authorization': 'Bearer ' + keys['access_token']
}
all_records = collection.find()
users = list(all_records)
for i in users:
streamers = i['_id']
send_users = i['chat_id']
online = i['online']
stream = requests.get('https://api.twitch.tv/helix/streams?user_login=' + streamers, headers=headers)
stream_data = stream.json()
if len(stream_data['data']) == 1:
live = (streamers + ' is live: ' + stream_data['data'][0]['title'])
if online == 'False':
await bot.send_message(send_users, live)
set_online(streamers)
if online == 'True':
print('streamer online')
else:
set_offline(streamers)
scheduler.add_job(check, "interval", seconds=5, args=(streamer_name, chat_id))
#dp.message_handler(commands='check')
async def check_stream(message: types.Message):
streamer_name = message.text[7:]
chat_id = message.chat.id
add_user(streamer_name, chat_id)
await check(streamer_name, chat_id)
if __name__ == "__main__":
scheduler.start()
executor.start_polling(dp, skip_updates=True)
And when the streamer starts the stream, then many messages come in and not just one.
Example Code:
# Here is a minimal reproducible example
import json
from starlette.datastructures import MutableHeaders
from starlette.types import ASGIApp, Receive, Scope, Send, Message
import datetime
import socket
import uvicorn
from fastapi import FastAPI
class MetaDataAdderMiddleware:
application_generic_urls = ['/openapi.json', '/docs', '/docs/oauth2-redirect', '/redoc']
def __init__(self, app: ASGIApp) -> None:
self.app = app
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
start_time = datetime.datetime.utcnow()
async def send_wrapper(message: Message) -> None:
if message["type"] == "http.response.body" and len(message["body"]) and not any([scope["path"].startswith(endpoint) for endpoint in MetaDataAdderMiddleware.application_generic_urls]):
response_body = json.loads(message["body"].decode())
end_time = datetime.datetime.utcnow()
response_processing_time_seconds = end_time - start_time
data = {}
data["data"] = response_body
data['metadata'] = {
'request_timestamp_utc': start_time,
'response_timestamp_utc': end_time,
'processing_time_seconds': response_processing_time_seconds,
'service_host': socket.gethostname()
}
data_to_be_sent_to_user = json.dumps(data, default=str).encode("utf-8")
message["body"] = data_to_be_sent_to_user
await send(message)
await self.app(scope, receive, send_wrapper)
app = FastAPI(
title="MY DUMMY APP",
)
app.add_middleware(MetaDataAdderMiddleware)
#app.get("/")
async def root():
return {"message": "Hello World"}
Description:
So here is my usecase: All of my endpoints in FastAPI APP, whatever response they are sending, I need to wrap that response, with some metadata. Let's say, some endpoint is sending me this: {"data_key": "data_value"}. But, the users should see, this as the final output:
{
"data": {"data_key": "data_value"}
"metadata": {
"request_timestamp_utc": "somevalue",
...and so on
}
}
I have a big application, and numerous routers. We have achieved the functionality of adding Request ID, Authentication and Authorization, so far by writing middlewares.
However, when I hit APIs of my app, after adding the abovementioned MetaDataAdderMiddleware, I am greeted with this following error:
ERROR: Exception in ASGI application
Traceback (most recent call last):
File "<MY PYTHON PATH>/lib/python3.6/site-packages/uvicorn/protocols/http/httptools_impl.py", line 521, in send
raise RuntimeError("Response content longer than Content-Length")
RuntimeError: Response content longer than Content-Length
This error is logical enough, since I have modified the Response body but not changed the content-length headers.
Here is snapshot of message and scope bodies in the send_wrapper function level, and as well as header values I have printed from the httptools_impl.py level:
(I have edited out some fields, to mask org-specific things)
send_wrapper called
message: {'type': 'http.response.start', 'status': 200, 'headers': [(b'content-length', b'58'), (b'content-type', b'application/json')]}
scope: {'type': 'http', 'asgi': {'version': '3.0', 'spec_version': '2.1'}, 'http_version': '1.1', 'scheme': 'http', 'method': 'POST', 'root_path': '', 'query_string': b'', 'headers': [(b'content-type', b'application/json'), (b'accept', b'*/*'), (b'cache-control', b'no-cache'), (b'accept-encoding', b'gzip, deflate'), (b'content-length', b'238'), (b'connection', b'keep-alive')], 'app': <fastapi.applications.FastAPI object at >, 'fastapi_astack': <contextlib2.AsyncExitStack object at >, 'router': <fastapi.routing.APIRouter object at >, 'endpoint': <function initiate_playbook_execution at >, 'path_params': {}, 'route': <fastapi.routing.APIRoute object at >}
INFO: - "POST /MYAPI" 200 OK
INSIDE httptools_impl
name: b'content-length' | value: b'58'
self.expected_content_length: 58
send_wrapper called
message: {'type': 'http.response.body', 'body': b'{"status":true,"stdout":null,"stderr":null,"message":null}'}
scope: {'type': 'http', 'asgi': {'version': '3.0', 'spec_version': '2.1'}, 'http_version': '1.1', 'scheme': 'http', 'method': 'POST', 'root_path': '', 'query_string': b'', 'headers': [(b'content-type', b'application/json'), (b'accept', b'*/*'), (b'cache-control', b'no-cache'), (b'accept-encoding', b'gzip, deflate'), (b'content-length', b'238'), (b'connection', b'keep-alive')], 'app': <fastapi.applications.FastAPI object at >, 'fastapi_astack': <contextlib2.AsyncExitStack object at >, 'router': <fastapi.routing.APIRouter object at >, 'endpoint': <function initiate_playbook_execution at >, 'path_params': {}, 'route': <fastapi.routing.APIRoute object at >}
INSIDE httptools_impl
body: b'{"data": {"status": true, "stdout": null, "stderr": null, "message": null}, "metadata": {"request_timestamp_utc": "BLAH", "response_timestamp_utc": "BLAH", "processing_time_seconds": "0:00:00.469472", "some_field": "some_value"}}'
num_bytes: 286
Here are the attempts that I have made to update the content-length:
In the send wrapper function just after I update the response body, I have tried doing the following:
data_to_be_sent_to_user = json.dumps(data, default=str).encode("utf-8")
message["body"] = data_to_be_sent_to_user
headers = MutableHeaders(scope=scope)
headers["content-length"] = str(len(data_to_be_sent_to_user))
# But this hasn't worked, no change in situation!
How can I proceed forward?
Thanks to #MatsLindh comment, I referred to Starlette's GZipMiddleware codebase here: https://github.com/encode/starlette/blob/fcc4c705ff69182ebd663bc686cb55c242d32683/starlette/middleware/gzip.py#L60
So the idea is, the problematic content-length value is in header present in http.response.start message. So, how GZipMiddleware has been written is, they have simply not sent this first http.response.start message instantly. Instead, they also capture http.response.body, then modify the response, then find its length, then update the length in http.response.start message, and then send both these messages in the correct order.
The working implementation that I was able to write, borrowing heavily from GZipMiddleware is here:
from starlette.types import ASGIApp, Receive, Scope, Send, Message
from starlette.requests import Request
import json
from starlette.datastructures import MutableHeaders
class MetaDataAdderMiddleware:
application_generic_urls = ['/openapi.json', '/docs', '/docs/oauth2-redirect', '/redoc']
def __init__(
self,
app: ASGIApp
) -> None:
self.app = app
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
if scope["type"] == "http" and not any([scope["path"].startswith(endpoint) for endpoint in MetaDataAdderMiddleware.application_generic_urls]):
responder = MetaDataAdderMiddlewareResponder(self.app, self.standard_meta_data, self.additional_custom_information)
await responder(scope, receive, send)
return
await self.app(scope, receive, send)
class MetaDataAdderMiddlewareResponder:
def __init__(
self,
app: ASGIApp,
) -> None:
"""
"""
self.app = app
self.initial_message: Message = {}
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
self.send = send
await self.app(scope, receive, self.send_with_meta_response)
async def send_with_meta_response(self, message: Message):
message_type = message["type"]
if message_type == "http.response.start":
# Don't send the initial message until we've determined how to
# modify the outgoing headers correctly.
self.initial_message = message
elif message_type == "http.response.body":
response_body = json.loads(message["body"].decode())
data = {}
data["data"] = response_body
data['metadata'] = {
'field_1': 'value_1',
'field_2': 'value_2'
}
data_to_be_sent_to_user = json.dumps(data, default=str).encode("utf-8")
headers = MutableHeaders(raw=self.initial_message["headers"])
headers["Content-Length"] = str(len(data_to_be_sent_to_user))
message["body"] = data_to_be_sent_to_user
await self.send(self.initial_message)
await self.send(message)
It show error {"success":false,"error":"Missing parameter market"}
import time
import hmac
from requests import Request
import requests
import json
api_key=''
api_secret=''
payload = json.dumps({
"market": 'XRP/USDT',
"side": 'BUY',
"price": 0.7,
"size": 1,
"type": "limit",
"reduceOnly": False,
"ioc": False,
"postOnly": False,
"clientId": None
})
ts = int(time.time() * 1000)
request = Request('POST', 'https://ftx.com/api/orders')
prepared = request.prepare()
signature_payload = f'{ts}{prepared.method}{prepared.path_url}{payload}'.encode()
print(signature_payload)
signature = hmac.new(api_secret.encode(), signature_payload, 'sha256').hexdigest()
prepared.headers['FTX-KEY'] = api_key
prepared.headers['FTX-SIGN'] = signature
prepared.headers['FTX-TS'] = str(ts)
url='https://ftx.com/api/orders'
response = requests.request("POST", url,headers=prepared.headers,data=payload)
print(response.text)
Can you please recommend how to fix ? I have try many way but it doesn't work
You may try
prepared.headers['Content-Type'] = 'application/json'
Hello I know this question is a bit older, but maybe someone else needs an answer.
I sent prepared and it worked for me.
from requests import Request, Session
import hmac
import json
import time
from pprint import pprint
API_KEY = ""
API_SECRET = ""
SUBACCOUNT = ""
def place_order(market, side, price, size, order_type, reduceOnly, postOnly, ioc=False):
endpoint = "https://ftx.com/api/orders"
ts = int(time.time() * 1000)
s = Session()
data = json.dumps({
"market": market,
"side": side,
"price": price,
"type": order_type,
"size": size,
"reduceOnly": reduceOnly,
"ioc": ioc,
"postOnly": postOnly
})
request = Request("POST", endpoint, data=data)
prepared = request.prepare()
signature_payload = f"{ts}{prepared.method}{prepared.path_url}{data}".encode()
signature = hmac.new(API_SECRET.encode(), signature_payload, "sha256").hexdigest()
prepared.headers["FTX-KEY"] = API_KEY
prepared.headers["FTX-SIGN"] = signature
prepared.headers["FTX-TS"] = str(ts)
prepared.headers["FTX-SUBACCOUNT"] = SUBACCOUNT
response = s.send(prepared)
data = response.json()
pprint(data)
return data
I'm doing a simple kafka produce/consume test on the local machine using a docker.
docker-compose file: https://github.com/confluentinc/cp-all-in-one/blob/6.2.1-post/cp-all-in-one/docker-compose.yml
And I've written a simple python code like below:
import json
import random
import asyncio
from collections import namedtuple
from confluent_kafka.schema_registry.avro import AvroSerializer, AvroDeserializer
from confluent_kafka.schema_registry import SchemaRegistryClient
from confluent_kafka.schema_registry import Schema
from confluent_kafka import SerializingProducer, DeserializingConsumer
from faker import Faker
from dataclasses import dataclass, field, asdict
faker = Faker()
registry_client = SchemaRegistryClient({"url": "http://localhost:8081"})
#dataclass
class CIS:
user_id: str = field(default_factory=faker.user_name)
question_id: int = field(default_factory=lambda: random.randint(1, 20000))
is_correct: bool = field(default_factory=lambda: random.choice([True, False]))
async def produce(topic_name, serializer):
p = SerializingProducer({
"bootstrap.servers": "PLAINTEXT://localhost:9092",
"value.serializer": serializer
})
while True:
p.produce(
topic=topic_name,
value=CIS(),
)
print("put!")
await asyncio.sleep(1)
async def consume(topic_name, deserialzier):
c = DeserializingConsumer(
{
'bootstrap.servers': "PLAINTEXT://localhost:9092",
# 'key.deserializer': string_deserializer,
'value.deserializer': deserialzier,
'group.id': "123",
'auto.offset.reset': "latest"
}
)
c.subscribe([topic_name])
while True:
message = c.poll(0.1)
if message is None:
print(message)
continue
else:
print(message.value())
await asyncio.sleep(1)
if __name__ == "__main__":
topic_name = "my_topic"
schema_str = json.dumps(
{
"type": "record",
"name": "cis",
"namespace": "interaction",
"fields": [
{"name": "user_id", "type": "string"},
{"name": "question_id", "type": "int"},
{"name": "is_correct", "type": "boolean"}
]
}
)
def to_dict(obj, ctx):
return asdict(obj)
def to_obj(obj, ctx):
return CIS(
user_id=obj["user_id"],
question_id=obj["question_id"],
is_correct=obj["is_correct"],
)
avro_serializer = AvroSerializer(registry_client, schema_str, to_dict)
avro_deserializer = AvroDeserializer(registry_client, schema_str, to_obj)
loop = asyncio.get_event_loop()
t1 = loop.create_task(produce(topic_name, avro_serializer))
t2 = loop.create_task(consume(topic_name, avro_deserializer))
results = await asyncio.gather(t1, t2)
When I run this code, output is:
>>>
put!
None
None
None
None
None
None
None
None
None
None
None
None
None
None
None
None
None
None
None
None
....
I don't see why produce() is called only first time.
confluent_kafka is not asyncio compatible, it uses blocking calls.
For asyncio code I can suggest aiokafka. The project's README has code snippets that illustrates how to write async producer and consumer.