So I am trying to get a part of the URL like http://example/home?code=123456789 this 123456789 changes every time since it is oauth
so I am trying to get it
This is the py file
from aiohttp import web, web_urldispatcher
import discord
from discord.ext import commands
import aiohttp_jinja2
import jinja2
from pathlib import Path
from oauth import Ouath
#aiohttp_jinja2.template('home.html')
async def start(request):
raise web.HTTPSeeOther(location=Ouath.discord_login_url)
#aiohttp_jinja2.template('home.html')
async def login(request):
return
app = web.Application(loop=self.client.loop)
aiohttp_jinja2.setup(app, loader=jinja2.FileSystemLoader(str(here)))
app.router.add_get('/', start)
app.router.add_get('/home', login)
runner = web.AppRunner(app)
await runner.setup()
self.site = web.TCPSite(runner, '127.0.0.1', 5000)
await self.client.wait_until_ready()
await self.site.start()
i want to print it in the html file
but i don't know how to get that part
note: i edit the code box
Since you're using web from aiohttp you can add a route that accepts a parameter
routes = web.RouteTableDef()
#routes.get('/guild/{guild}')
async def guild(request):
gid = request.match_info['guild']
The url would be http://localhost:PORT/guild/123456
Once you've fetched the required details, you're free to render a template or return a response.
After digging in source code of aiohttp_jinja2 and aiohttp it seems you can get it with request.query.get('code')
#aiohttp_jinja2.template('home.html')
async def login(request):
#print('code:', request.query.get('code'))
return {'code': request.query.get('code')}
If there is not ?code=... in URL then it gives None but you can set other default value using request.query.get('code', some_default_value)
Doc aiohttp: web.BaseRequest.query
If you have string then you can use
URL = 'http://example/home?code=123456789'
code = URL.split('?code=')[-1]
or if number is always at the end and it has always the same lenght
URL = 'http://example/home?code=123456789'
code = URL[-9:]
But there is also urllib.parse and
URL = 'http://example/home?code=123456789'
data = urllib.parse.parse_qs(urllib.parse.urlsplit(URL).query)
gives dictionary
{'code': ['123456789']}
and you can do
code = data.get('code')
and it will gives expected code or None if there was no ?code=... in url.
EDIT Probably you have to use request.url
#aiohttp_jinja2.template('home.html')
async def login(request):
data = urllib.parse.parse_qs(urllib.parse.urlsplit(request.url).query)
code = data.get('code')
return {'code': code}
Because data is dictionary with "code" so you could use return data
#aiohttp_jinja2.template('home.html')
async def login(request):
data = urllib.parse.parse_qs(urllib.parse.urlsplit(request.url).query)
return data
#aiohttp_jinja2.template('home.html')
async def login(request):
code = urllib.parse.parse_qs(urllib.parse.urlsplit(request).query)
return {'code': code}
like this ?
Related
Below is my code.
#app.post("/")
async def main(user: schemas.UserCreate, db: Session = Depends(get_db)):
db_user = crud.get_user_by_email(db, email=user.email)
if db_user:
return RedirectResponse(url = '/exist',status_code=302)
crud.create_user(db=db,user=user)
return RedirectResponse(url='/', status_code=302)
#app.get('/exist')
async def exist():
return {'message' : "already exist"}
I check network and the Redirect Response itself works fine.
However, the url on the page remains the same as the current page.
I want to change url to "/exist", How can I do this?
Is it possible to get the cookies when someone hits the API? I need to read the cookies for each request.
#app.get("/")
async def root(text: str, sessionKey: str = Header(None)):
print(sessionKey)
return {"message": text+" returned"}
if __name__ == "__main__":
uvicorn.run("main:app", host="0.0.0.0", port=5001 ,reload=True)
You can do it in the same way you are accessing the headers in your example (see docs):
from fastapi import Cookie
#app.get("/")
async def root(text: str, sessionKey: str = Header(None), cookie_param: int | None = Cookie(None)):
print(cookie_param)
return {"message": f"{text} returned"}
Option 1
Use the Request object to get the cookie you wish, as described in Starlette documentation.
from fastapi import Request
#app.get('/')
async def root(request: Request):
print(request.cookies.get('sessionKey'))
return 'OK'
Option 2
Use the Cookie parameter, as described in FastAPI documentation.
from fastapi import Cookie
#app.get('/')
async def root(sessionKey: str = Cookie(None)):
print(sessionKey)
return 'OK'
I'm in a situation where I need to retry async request even when the request returns 200 response. For some specific cases, I need to check if there's a key in the output. If so, we need to retry. The following sample code (which can be executed in a Jupyter notebook) works for retries whenever the request fails (non-200). How can I tweak this to cater to this particular need?
P.S. Ideally, the response should've been non-200 but this is the option I'm left with.
# load required libraries
import json
import asyncio
import aiohttp
from async_retrying import retry
base_url = "http://localhost:1050/hello?rid="
# async ginger call
#retry(attempts=3)
async def async_ginger_call():
connector = aiohttp.TCPConnector(limit=3)
async with aiohttp.ClientSession(connector=connector) as session:
async with session.post(url, raise_for_status=True, timeout=300) as response:
result = await response.text()
# condition here; if key in result then retry
return json.loads(result)
reqs = 2
tasks = []
connector = aiohttp.TCPConnector(limit=reqs)
async with aiohttp.ClientSession(connector=connector) as session:
for i in range(reqs):
url = base_url + str(i)
# encode sentence
tasks.append(async_ginger_call())
results = await asyncio.gather(*tasks, return_exceptions=True)
Sample flask server code
# sample api
import time
import json
import datetime
from flask import Flask, request
from flask import Response
app = Flask(__name__)
#app.route('/hello', methods=['GET', 'POST'])
def welcome():
rid = request.args.get('rid', default=3, type=int)
valid_response = json.dumps({
"Result": {
"Warnings": [
{
"Code": 1100,
"Message": "A technical error occurred during execution."
}
]
}
}
)
# testing for failure
if rid == 2:
# pass
# return valid_response
return Response("{'Result': ''}", status=500, mimetype='application/json')
return valid_response
if __name__ == '__main__':
app.run(host='0.0.0.0', port=1050)
Usage of async/await was presented in Flask 2.0. (https://flask.palletsprojects.com/en/2.0.x/async-await/)
I am using Flask-RestX so is it possible to use async/await in RestX requests handlers?
Something like:
#api.route('/try-async')
class MyResource(Resource):
#api.expect(some_schema)
async def get(self):
result = await async_function()
return result
is not working and when I try to reach this endpoint I'm getting error:
TypeError: Object of type coroutine is not JSON serializable
Is there any info on that?
Package versions:
flask==2.0.1
flask-restx==0.4.0
and I've also installed flask[async] as documentation suggests.
I've gotten around this by using an internal redirect
#api.route('/try-async')
class MyResource(Resource):
#api.expect(some_schema)
def get(self):
return redirect(url_for('.hidden_async'), code=307)
#api.route('/hidden-async', methods=['GET'])
async def hidden_async():
result = await async_function()
return result
Redirecting with code=307 will ensure any method and body are unchanged after the redirect (Link). So passing data to the async function is possible as well.
#api.route('/try-async')
class MyResource(Resource):
#api.expect(some_schema)
def post(self):
return redirect(url_for('.hidden_async'), code=307)
#api.route('/hidden-async', methods=['POST'])
async def hidden_async():
data = request.get_json()
tasks = [async_function(d) for d in data]
result = await asyncio.gather(tasks)
return result
I currently have this class for making requests to an API and caching the JSON response:
import os
import pathlib
import json
import hashlib
import time
import requests
class NoJSONResponseError(Exception):
pass
class JSONRequestCacher(object):
"""Manage a JSON object through the cache.
Download the associated resource from the provided URL
when need be and retrieve the JSON from a cached file
if possible.
"""
def __init__(self, duration=86400, cachedir=None):
self.duration = duration
self.cachedir = self._get_cachedir(cachedir)
self._validate_cache()
def _get_cachedir(self, cachedir):
if cachedir is None:
cachedir = os.environ.get(
'CUSTOM_CACHEDIR',
pathlib.Path(pathlib.Path.home(), '.custom_cache/')
)
return cachedir
def _validate_cache(self):
"""Create the cache directory if it doesn't exist"""
self.cachedir.mkdir(parents=True, exist_ok=True)
def _request(self, url):
"""Perform the retrieval of the requested JSON data"""
return requests.get(url)
def save(self, raw, cachefile):
"""Save the provided raw JSON data into the cached file"""
with open(cachefile, 'w') as out:
json.dump(raw, out)
def load(self, cachefile):
"""Retrieve the saved JSON data from the cached file"""
with open(cachefile) as cached:
return json.load(cached)
def cache_is_valid(self, cachefile):
"""Check if cache exists and is more recent than the cutoff"""
if cachefile.is_file():
cache_age = time.time() - cachefile.stat().st_mtime
return cache_age < self.duration
return False
def request(self, url, refresh=False):
"""The JSON data associated to the given URL.
Either read from the cache or fetch from the web.
"""
urlhash = hashlib.md5(url.encode()).hexdigest()
cachefile = self.cachedir.joinpath(urlhash)
start = time.time()
if not refresh and self.cache_is_valid(cachefile):
return self.load(cachefile), True, time.time() - start
resp = self._request(url)
resp.raise_for_status()
try:
raw = resp.json()
except ValueError:
raise NoJSONResponseError()
self.save(raw, cachefile)
return raw, False, resp.elapsed.total_seconds()
I then have other classes and code which call the request method of this code like so:
class APIHelper():
def __init__(self):
self.cache = JSONRequestCacher()
def fetch(val):
url = 'my/url/{}'.format(val)
return self.cache.request(url)
def fetchall(vals):
repsonses = []
for val in vals:
responses.append(self.fetch(val))
return responses
For a small number of vals this is fine and it's really no big deal to wait 10 mins. However I am now looking at making 30,000+ hits to this endpoint. In the past I have used threadpools (multiprocessing.dummy.Pool) to achieve some parallelism, however from my reading it seems like async/await and aiothttp is a better way to go. Unfortunately try as I might I cannot wrap my head around how to translate that to this code. I am using Python 3.8.
EDIT
I tried making this change:
class JSONRequestCacher():
def __init__():
self.http = aiohttp.ClientSession()
async def _request(self, url):
async with self.http.get(url) as response:
return await response.read()
Got the error: AttributeError: 'coroutine' object has no attribute 'json' from my raw = resp.json() line
Tried then adding resp = await self._request(url) but that is SyntaxError: 'await' outside async function. Then if I make request an async function then calling it just seems to return me a coroutine object that doesn't give me the expected response.
And this is just trying to make the _request call async. I can't even start to understand how I am meant to make multiple calls to it via another class (APIHelper).