Make google drive api multi threading - python

Im trying to build a new wrapper to make my google drive api client thread safe and multi threading.
So I have this code, 2 files:
APIConnector.py:
import logging
from __future__ import annotations
from dataclasses import dataclass, field
from typing import Any, Callable, List, Optional
import google.auth
import httplib2
from google_auth_httplib2 import AuthorizedHttp
from googleapiclient.http import HttpRequest
import APIConnector
#dataclass
class APIConnector:
factory: Callable[[], AuthorizedHttp]
pool: List[AuthorizedHttp] = field(default_factory=[])
#classmethod
def new(
cls,
credentials: google.auth.Credentials,
initial_size: int = 5,
timeout_seconds: int = 10,
) -> APIConnector:
factory = lambda: AuthorizedHttp(
credentials,
http=httplib2.Http(timeout=timeout_seconds)
)
pool: List[AuthorizedHttp] = []
for i in range(initial_size):
pool.append(factory())
return cls(factory, pool=pool)
def execute(self, req: HttpRequest) -> Any:
http: Optional[AuthorizedHttp] = None
try:
http = self._provision_http()
return req.execute(http=http)
finally:
if http:
self.pool.append(http)
def _provision_http(self) -> AuthorizedHttp:
# This function can run in parallel in multiple threads.
try:
return self.pool.pop()
except IndexError:
logging.info("Pool exhausted. Creating new transport")
return self.factory()
and GSuiteUserManager.py
from __future__ import annotations
from dataclasses import dataclass
import googleapiclient
from googleapiclient.discovery import build
from google.auth.credentials import Credentials
import GSuiteUserManager
from ngus_crawlers.googledrive.APIConnector import APIConnector
#dataclass
class GSuiteUserManager:
api: APIConnector
users: googleapiclient.discovery.Resource
domain: str
#classmethod
def new(cls, domain, credentials) -> GSuiteUserManager:
api = APIConnector.new(Credentials)
service = googleapiclient.discovery.build(
"admin",
"directory_v1",
credentials=credentials,
cache_discovery=False,
)
users = service.users()
return cls(api=api, users=users, domain=domain)
def list(self) -> dict:
return self.api.execute(
self.users.list(domain=self.domain)
)
def get(self, email: str) -> dict:
pass
my question is so,
how do I make this function to use the new code:
def connect_logic(config: dict[str]) -> googleapiclient.discovery.Resource:
logging.info("Connecting to google drive...")
credentials: Credentials = Credentials.from_authorized_user_info(
info=config[GD_CONFIG][TOKEN_CONFIG], scopes=config[GD_CONFIG][SCOPES])
service: googleapiclient.discovery.Resource = build("drive", "v3", credentials=credentials)
logging.info("Successfully logged into google drive")
return service
or even, how do I make line like this to use the new code:
raw_data = service.files().get(fileId=file_id, fields='*').execute()
Huge thanks to everyone who helps!

Related

Making asynchronous requests to a Vertex AI endpoint (Google cloud platform)

I deployed a model to the model registry on Vertex AI. I added an endpoint too, and I am able to make inferences. Below is the code that I wrote (using Python 3.9.12):
from google.cloud import aiplatform
from google.oauth2 import service_account
# settings is a Pydantic BaseSettings subclass object
credentials_json = json.loads(settings.GCP_VERTEX_SERVICE_ACC)
credentials = service_account.Credentials.from_service_account_info(
info=credentials_json
)
aiplatform.init(project=settings.GCLOUD_PROJECT_NUMBER,
location=settings.GCLOUD_LOCATION,
credentials=credentials)
endpoint = aiplatform.Endpoint(settings.GCLOUD_SBERT_ENDPOINT_ID)
...
async def do_inference(list_strs: List[str]):
result = endpoint.predict(instances=list_strs)
return result.predictions
Right now I'm not able to make asynchronous requests. Is there a way around this? For instance, would using the aiplatform_v1beta1.PredictionServiceAsyncClient library be a solution? Thanks in advance!
---- EDIT -----
Below is the piece of code that did it for me in case someone else is struggling with the same thing.
import asyncio
from google.cloud import aiplatform_v1beta1
from google.oauth2 import service_account
from google.protobuf import json_format
from google.protobuf.struct_pb2 import Value
# settings is a Pydantic BaseSettings subclass object
credentials_json = json.loads(settings.GCP_VERTEX_SERVICE_ACC)
credentials = service_account.Credentials.from_service_account_info(
info=credentials_json
)
client_options = {"api_endpoint": f"{settings.GCLOUD_LOCATION}-aiplatform.googleapis.com"}
client = aiplatform_v1beta1.PredictionServiceAsyncClient(credentials=credentials, client_options=client_options)
...
async def do_inference(list_strs: List[str]):
request = aiplatform_v1beta1.PredictRequest(endpoint=endpoint)
request.instances.extend(list_strs)
response = await client.predict(request)
predictions = response.predictions
return predictions
asyncio.get_event_loop().run_until_complete(do_inference())
This code owes a lot to #milad_raesi's answer!

Solana SPL token transfer with python

So after having read a couple of articles I'm yet to understand how to create a Transaction and send custom SPL tokens across the Solana blockchain.
I've attached my code below.
I truly don't understand what each part of the transaction is supposed to be.
So I figured that owner is the account/wallet that is sending and paying for the transaction. And I'm assuming that dest is where I wish to send the tokens to.
This is the token (on devnet) that I wish to send, But I don't seem to be able.
from spl.token.constants import TOKEN_PROGRAM_ID
from spl.token.instructions import transfer_checked, TransferCheckedParams
from solana.rpc.commitment import Confirmed
from solana.rpc.api import Client
from solana.rpc.types import TxOpts
from solana.keypair import Keypair
from solana.publickey import PublicKey
from solana.transaction import Transaction
import os
from dotenv import load_dotenv
class TransferService:
def __init__(self, client: Client, service: SolanaService, token) -> None:
self.client = client
self.service = service
self.keypair = self.service.get_keypair(token)
def make_transaction(self, source, mint, dest, owner, amount=1, decimals=0) -> Transaction:
transaction = Transaction()
transaction.add(transfer_checked(
TransferCheckedParams(
program_id=TOKEN_PROGRAM_ID,
mint=PublicKey(mint),
source=PublicKey(source),
dest=PublicKey(dest),
owner=owner,
amount=amount,
decimals=decimals,
signers=[]
)))
return transaction
def send_transaction(self, transaction) -> None:
self.client.send_transaction(
transaction,
self.keypair,
opts=TxOpts(skip_confirmation=False, preflight_commitment=Confirmed)
)
load_dotenv()
if __name__ == "__main__":
token = os.getenv('TOKEN')
client = Client('https://api.devnet.solana.com')
service = SolanaService(client)
token = os.getenv('KEYPAIR')
transfer = TransferService(client, service, token)
a = client.get_account_info(transfer.keypair.public_key)
transaction = transfer.make_transaction(
source='CtURxXpzn9aredXse2KNtyDMeVW627tL3p7DCucdv8bc',
mint='DCzbhHu3YGnc8Vhez4YEMznQ38ad6WYGVYqeB4Wn3mie',
dest='sPkypr2LBtF5Go87zYSn5fBtWxCDEcobWeQQxXHpxJR',
owner=transfer.keypair.public_key,
amount=1,
decimals=9
)
transfer.send_transaction(transaction)
The destination sPkypr2LBtF5Go87zYSn5fBtWxCDEcobWeQQxXHpxJR is incorrect.
When you send SPL tokens, the source and dest must be addresses of token accounts, and in this case, sPkypr2LBtF5Go87zYSn5fBtWxCDEcobWeQQxXHpxJR is a wallet, so you'll need to create a recipient account for this wallet.
The preferred standard is to use an associated token account, created using something like create_associated_token_account from the SPL part of solana-py client: https://github.com/michaelhly/solana-py/blob/2c45353cb510bfeb7259fa19dacbaefe6b9ae3d1/src/spl/token/client.py#L173
For reference, the most important part is creating the instruction to create the associated token account:
def create_associated_token_account(payer: PublicKey, owner: PublicKey, mint: PublicKey) -> TransactionInstruction:
associated_token_address = PublicKey.find_program_address(
seeds=[bytes(owner), bytes(TOKEN_PROGRAM_ID), bytes(mint)], program_id=ASSOCIATED_TOKEN_PROGRAM_ID
)
return TransactionInstruction(
keys=[
AccountMeta(pubkey=payer, is_signer=True, is_writable=True),
AccountMeta(pubkey=associated_token_address, is_signer=False, is_writable=True),
AccountMeta(pubkey=owner, is_signer=False, is_writable=False),
AccountMeta(pubkey=mint, is_signer=False, is_writable=False),
AccountMeta(pubkey=SYS_PROGRAM_ID, is_signer=False, is_writable=False),
AccountMeta(pubkey=TOKEN_PROGRAM_ID, is_signer=False, is_writable=False),
AccountMeta(pubkey=SYSVAR_RENT_PUBKEY, is_signer=False, is_writable=False),
],
program_id=ASSOCIATED_TOKEN_PROGRAM_ID,
)
More background information about associated token accounts at https://spl.solana.com/associated-token-account

FastAPI - Supporting multiple authentication dependencies

Problem
I currently have JWT dependency named jwt which makes sure it passes JWT authentication stage before hitting the endpoint like this:
sample_endpoint.py:
from fastapi import APIRouter, Depends, Request
from JWTBearer import JWTBearer
from jwt import jwks
router = APIRouter()
jwt = JWTBearer(jwks)
#router.get("/test_jwt", dependencies=[Depends(jwt)])
async def test_endpoint(request: Request):
return True
Below is the JWT dependency which authenticate users using JWT (source: https://medium.com/datadriveninvestor/jwt-authentication-with-fastapi-and-aws-cognito-1333f7f2729e):
JWTBearer.py
from typing import Dict, Optional, List
from fastapi import HTTPException
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
from jose import jwt, jwk, JWTError
from jose.utils import base64url_decode
from pydantic import BaseModel
from starlette.requests import Request
from starlette.status import HTTP_403_FORBIDDEN
JWK = Dict[str, str]
class JWKS(BaseModel):
keys: List[JWK]
class JWTAuthorizationCredentials(BaseModel):
jwt_token: str
header: Dict[str, str]
claims: Dict[str, str]
signature: str
message: str
class JWTBearer(HTTPBearer):
def __init__(self, jwks: JWKS, auto_error: bool = True):
super().__init__(auto_error=auto_error)
self.kid_to_jwk = {jwk["kid"]: jwk for jwk in jwks.keys}
def verify_jwk_token(self, jwt_credentials: JWTAuthorizationCredentials) -> bool:
try:
public_key = self.kid_to_jwk[jwt_credentials.header["kid"]]
except KeyError:
raise HTTPException(
status_code=HTTP_403_FORBIDDEN, detail="JWK public key not found"
)
key = jwk.construct(public_key)
decoded_signature = base64url_decode(jwt_credentials.signature.encode())
return key.verify(jwt_credentials.message.encode(), decoded_signature)
async def __call__(self, request: Request) -> Optional[JWTAuthorizationCredentials]:
credentials: HTTPAuthorizationCredentials = await super().__call__(request)
if credentials:
if not credentials.scheme == "Bearer":
raise HTTPException(
status_code=HTTP_403_FORBIDDEN, detail="Wrong authentication method"
)
jwt_token = credentials.credentials
message, signature = jwt_token.rsplit(".", 1)
try:
jwt_credentials = JWTAuthorizationCredentials(
jwt_token=jwt_token,
header=jwt.get_unverified_header(jwt_token),
claims=jwt.get_unverified_claims(jwt_token),
signature=signature,
message=message,
)
except JWTError:
raise HTTPException(status_code=HTTP_403_FORBIDDEN, detail="JWK invalid")
if not self.verify_jwk_token(jwt_credentials):
raise HTTPException(status_code=HTTP_403_FORBIDDEN, detail="JWK invalid")
return jwt_credentials
jwt.py:
import os
import requests
from dotenv import load_dotenv
from fastapi import Depends, HTTPException
from starlette.status import HTTP_403_FORBIDDEN
from app.JWTBearer import JWKS, JWTBearer, JWTAuthorizationCredentials
load_dotenv() # Automatically load environment variables from a '.env' file.
jwks = JWKS.parse_obj(
requests.get(
f"https://cognito-idp.{os.environ.get('COGNITO_REGION')}.amazonaws.com/"
f"{os.environ.get('COGNITO_POOL_ID')}/.well-known/jwks.json"
).json()
)
jwt = JWTBearer(jwks)
async def get_current_user(
credentials: JWTAuthorizationCredentials = Depends(auth)
) -> str:
try:
return credentials.claims["username"]
except KeyError:
HTTPException(status_code=HTTP_403_FORBIDDEN, detail="Username missing")
api_key_dependency.py (very simplified right now, it will be changed):
from fastapi import Security, FastAPI, HTTPException
from fastapi.security.api_key import APIKeyHeader
from starlette.status import HTTP_403_FORBIDDEN
async def get_api_key(
api_key_header: str = Security(api_key_header)
):
API_KEY = ... getting API KEY logic ...
if api_key_header == API_KEY:
return True
else:
raise HTTPException(
status_code=HTTP_403_FORBIDDEN, detail="Could not validate credentials"
)
Question
Depending on the situation, I would like to first check if it has API Key in the header, and if its present, use that to authenticate. Otherwise, I would like to use jwt dependency for authentication. I want to make sure that if either api-key authentication or jwt authentication passes, the user is authenticated. Would this be possible in FastAPI (i.e. having multiple dependencies and if one of them passes, authentication passed). Thank you!
Sorry, got lost with things to do
The endpoint has a unique dependency, call it check from the file check_auth
ENDPOINT
from fastapi import APIRouter, Depends, Request
from check_auth import check
from JWTBearer import JWTBearer
from jwt import jwks
router = APIRouter()
jwt = JWTBearer(jwks)
#router.get("/test_jwt", dependencies=[Depends(check)])
async def test_endpoint(request: Request):
return True
The function check will depend on two separate dependencies, one for api-key and one for JWT. If both or one of these passes, the authentication passes. Otherwise, we raise exception as shown below.
DEPENDENCY
def key_auth(api_key=Header(None)):
if not api_key:
return None
... verification logic goes here ...
def jwt(authorization=Header(None)):
if not authorization:
return None
... verification logic goes here ...
async def check(key_result=Depends(jwt_auth), jwt_result=Depends(key_auth)):
if not (key_result or jwt_result):
raise Exception
This worked for me (JWT or APIkey Auth). If both or one of the authentication method passes, the authentication passes.
def jwt_auth(auth: HTTPAuthorizationCredentials = Depends(HTTPBearer(auto_error=False))):
if not auth:
return None
## validation logic
return True
def key_auth(apikey_header=Depends(APIKeyHeader(name='X-API-Key', auto_error=False))):
if not apikey_header:
return None
## validation logic
return True
async def jwt_or_key_auth(jwt_result=Depends(jwt_auth), key_result=Depends(key_auth)):
if not (key_result or jwt_result):
raise HTTPException(status_code=401, detail="Not authenticated")
#app.get("/", dependencies=[Depends(jwt_or_key_auth)])
async def root():
return {"message": "Hello World"}

How to make sure that the object gets updated from module to the downstream?

I'm not sure if my title is misleading but I'm a python noob. I'm doing an interface for Vault. What it's doing is just renewing the keys every 30s so the client doesn't have to do it. Otherwise the secret will expire or change and the client using the key will get an error. I'm using python threading to renew the keys. Here's my code.
from __future__ import print_function
import hvac
import time
import threading
import os
VAULT_URL = os.environ['VAULT_ADDR']
VAULT_TOKEN = os.environ['VAULT_TOKEN']
class Client:
def __init__(self, *keys):
self.keys = keys
self.data_dict = {}
self.client = hvac.Client(
url=VAULT_URL,
token=VAULT_TOKEN)
self.__renew()
def read(self):
for key in self.keys:
self.data_dict[key] = self.client.read(key)
return self.data_dict
def __renew(self):
self.client.renew_token()
threading.Timer(30, self.__renew).start()
self.read()
And this is how it's being used.
from cnvault.cnvault import Client
data_dict = Client('secret/key').read()
// This is for web.py just to test
class ping:
def GET(self):
print(data_dict)
return 'pong'
Now if I change the data in Vault using Vault CLI and I call /ping, I'm still seeing old data.

Tornado Auth (Twittermixin) issue

I am currently trying to use tornado to display my twitter streams. Below is my code:
#!/usr/bin/env python
import time
import logging
from tornado.auth import TwitterMixin
from tornado.escape import json_decode, json_encode
from tornado.ioloop import IOLoop
from tornado import gen
from tornado.options import define, options, parse_command_line, parse_config_file
from tornado.web import Application, RequestHandler, authenticated, HTTPError
define('port', default=8080, help="port to listen on")
define('config_file', default='secrets.cfg',
help='filename for additional configuration')
define('debug', default=True, group='application',
help="run in debug mode (with automatic reloading)")
# The following settings should probably be defined in secrets.cfg
define('twitter_consumer_key', type=str, group='application')
define('twitter_consumer_secret', type=str, group='application')
define('cookie_secret', type=str, group='application',
default='this is a string',
help="signing key for secure cookies")
class BaseHandler(RequestHandler):
COOKIE_NAME = "uuser"
def get_current_user(self):
user_json = self.get_secure_cookie(self.COOKIE_NAME)
if not user_json:
print(" No user_json")
return None
print(" Yes user_json")
return json_decode(user_json)
class MainHandler(BaseHandler, TwitterMixin):
#authenticated
#gen.coroutine
def get(self):
timeline = yield self.twitter_request(
'/statuses/home_timeline',
access_token = self.current_user['access_token'])
self.render('home.html', timeline=timeline)
class LoginHandler(BaseHandler, TwitterMixin):
#gen.coroutine
def get(self):
if self.get_argument('oauth_token', None):
user = yield self.get_authenticated_user()
print(' user:', type(user))
del user["description"]
self.set_secure_cookie(self.COOKIE_NAME, json_encode(user))
print(' get_secure_cookie:', self.get_secure_cookie(self.COOKIE_NAME) )
self.redirect(self.get_argument('next', '/'))
else:
print(" Authorize_redirecting...")
yield self.authorize_redirect(callback_uri=self.request.full_url())
class LogoutHandler(BaseHandler):
def get(self):
self.clear_cookie("user")
def main():
parse_command_line(final=False)
parse_config_file(options.config_file)
app = Application(
[
(r'/', MainHandler),
(r'/login', LoginHandler),
(r'/logout', LogoutHandler),
],
login_url='/login',
**options.group_dict('application'))
app.listen(options.port)
logging.info('Listening on http://localhost:%d' % options.port)
IOLoop.current().start()
if __name__ == '__main__':
main()
So my understanding of the flow is as follows:
1.) Visit '/' - MainHandler, the #authenticated will redirect to login_url if the user is not logged in.
2.) Visit '/login' - LoginHandler, self.authorize_redirect(callback_uri=self.request.full_url()) will append oauth_token argument at the end of url, and re-visit '/login'
3.) Visit'/login' - LoginHandler, obtain user from self.get_authenticated_user(), and set_secure_cookie(self.COOKIE_NAME, json_encode(user))
And here is the problem I think, I can't seem to set the cookie. When I try to access it immediately by self.get_secure_cookie(self.COOKIE_NAME), it returns None, and hence it keeps on re-visiting '/login'
Can anybody offer some help to my problem? Maybe it is something very obvious I am not seeing. Thanks
I have also set http://127.0.0.1:8080/ as the callback url on my twitter app setting, not sure if this has any contribution to the problem.
Final solution!!
#!/usr/bin/env python
import time
import uuid
import logging
from tornado.auth import TwitterMixin
from tornado.escape import json_decode, json_encode, url_escape, url_unescape
from tornado.ioloop import IOLoop
from tornado import gen
from tornado.options import define, options, parse_command_line, parse_config_file
from tornado.web import Application, RequestHandler, authenticated, HTTPError
from urllib.parse import quote
import re
define('port', default=8080, help="port to listen on")
define('config_file', default='secrets.cfg',
help='filename for additional configuration')
define('debug', default=True, group='application',
help="run in debug mode (with automatic reloading)")
# The following settings should probably be defined in secrets.cfg
define('twitter_consumer_key', type=str, group='application')
define('twitter_consumer_secret', type=str, group='application')
# define('cookie_secret', type=str, group='application',
# default='thisisastring',
# help="signing key for secure cookies")
class BaseHandler(RequestHandler):
COOKIE_NAME = "user"
def get_current_user(self):
user_json = self.get_cookie(self.COOKIE_NAME)
if not user_json:
print("\n - Cannot obtain cookie from client browser")
return None
print("\n - Cookie obtained from client browser")
return json_decode(user_json)
class MainHandler(BaseHandler, TwitterMixin):
#authenticated
#gen.coroutine
def get(self):
print("\n - Obtaining timeline from twitter")
timeline = yield self.twitter_request(
'/statuses/home_timeline',
access_token = self.current_user)
self.render('home.html', timeline=timeline)
class LoginHandler(BaseHandler, TwitterMixin):
#gen.coroutine
def get(self):
if self.get_argument('oauth_token', None):
print("\n - Authenticating with oauth_token...")
user = yield self.get_authenticated_user()
encoded_token = json_encode(user['access_token'])
# remove certain ascii symbols which are rejected
# by self.set_cookie() function...
encoded_token = re.sub(r"[\x00-\x20]", '', encoded_token)
# save encoded token as cookie
self.set_cookie(name=self.COOKIE_NAME, value=encoded_token)
self.redirect(self.get_argument('next', '/'))
else:
print("\n - Authorize_redirecting...")
yield self.authorize_redirect(callback_uri=self.request.full_url())
class LogoutHandler(BaseHandler):
def get(self):
self.clear_cookie(self.COOKIE_NAME)
def main():
parse_command_line(final=False)
parse_config_file(options.config_file)
app = Application(
[
(r'/', MainHandler),
(r'/login', LoginHandler),
(r'/logout', LogoutHandler),
],
login_url='/login',
cookie_secret=str(uuid.uuid4().bytes),
**options.group_dict('application'))
app.listen(options.port)
logging.info('Listening on http://localhost:%d' % options.port)
IOLoop.current().start()
if __name__ == '__main__':
main()

Categories

Resources