Creating a reusable datastore client for large flask application - python

I want to create a database connection from my python flask application to my datastore instance on GCP. I have a file services/db.py:
from google.cloud import datastore
from google.auth import compute_engine
from config import env
import os
import logging
import warnings
namespace = env.str('ENV')
class Datastore_Client():
def context_local(self):
datastore_client = datastore.Client(namespace=namespace)
return datastore_client
def context_test(self):
project = env.str("GOOGLE_CLOUD_PROJECT")
credentials = compute_engine.Credentials()
datastore_client = datastore.Client(credentials=credentials, project=project, namespace='test')
return datastore_client
def context_live(self):
datastore_client = datastore.Client(namespace=namespace)
return datastore_client
def get_datastore_client(self):
contexts = {
'local': self.context_local,
'test': self.context_test,
'appengine': self.context_live
}
context_func = contexts.get(env.str("CONTEXT"), self.context_local)
return context_func()
builtin_list = list
def from_datastore(self, entity):
if not entity:
return None
if isinstance(entity, list):
entity = entity.pop()
return entity
In my model files I would reference the datastore client like so:
from ..services.db import Datastore_Client
client = Datastore_Client().get_datastore_client()
but in providing this reference in all files which need it when running my application it seems to spin up a database connection for each instance whereas I would want an application wide connection.
I have looked at application context but with example using sqlite and talking about tearing down the connection after I am not sure if the same approach can be used for datastore.

You are creating an object everytime you call Datastore_Client(). It sounds like you want to create a Singleton Datastore_Client and use that in your model files.

Related

Using Pytest to monkeypatch an initialized redis connection in a flask application

I've been struggling with this for awhile now. I Have a flask app that is executed in my app.py file. In this file I have a bunch of endpoints that call different functions from other files. In another file, extensions.py, I've instantiated a class that contains a redis connection. See the file structure below.
#app.py
from flask import Flask
from extensions import redis_obj
app = Flask(__name__)
#app.route('/flush-cache', methods=['POST'])
def flush_redis():
result = redis_obj.flush_redis_cache()
return result
# extensions.py
from redis_class import CloudRedis
redis_obj = CloudRedis()
# redis_class
import redis
class CloudRedis:
def __init__(self):
self.conn = redis.Redis(connection_pool=redis.ConnectionPool.from_url('REDIS_URL',
ssl_cert_reqs=None))
def flush_redis_cache(self):
try:
self.conn.flushdb()
return 'OK'
except:
return 'redis flush failed'
I've been attempting to use monkeypatching in a test patch flush_redis_cache, so when I run flush_redis() the call to redis_obj.flush_redis_cache() will just return "Ok", since I've already tested the CloudRedis class in other pytests. However, no matter what I've tried I haven't been able to successfully patch this. This is what I have below.
from extensions import redis_obj
from app import app
#pytest.fixture()
def client():
yield app.test_client()
def test_flush_redis_when_redis_flushed(client, monkeypatch):
# setup
def get_mock_flush_redis_cache():
return 'OK'
monkeypatch.setattr(cloud_reids, 'flush_redis_cache', get_mock_flush_redis_cache)
cloud_redis.flush_redis = get_mock_flush_redis_cache
# act
res = client.post('/flush-cache')
result = flush_redis()
Does anyone have any ideas on how this can be done?

How to test python tornado application that use Mongodb with Motor Client

I want to test my tornado python application with pytest.
for that purpose, I want to have a mock db for the mongo and to use motor "fake" client to simulate the calls to the mongodb.
I found alot of solution for pymongo but not for motor.
any idea?
I do not clearly understand your problem — why not just have hard-coded JSON data?
If you just want to have a class that would mock the following:
from motor.motor_tornado import MotorClient
client = MotorClient(MONGODB_URL)
my_db = client.my_db
result = await my_db['my_collection'].insert_one(my_json_load)
So I recommend creating a Class:
Class Collection():
database = []
async def insert_one(self,data):
database.append(data)
data['_id'] = "5063114bd386d8fadbd6b004" ## You may make it random or consequent
...
## Also, you may save the 'database' list to the pickle on disk to preserve data between runs
return data
async def find_one(self, data):
## Search in the list
return data
async def delete_one(self, data_id):
delete_one.deleted_count = 1
return
## Then create a collection:
my_db = {}
my_db['my_collecton'] = Collection()
### The following is the part of 'views.py'
from tornado.web import RequestHandler, authenticated
from tornado.escape import xhtml_escape
class UserHandler(RequestHandler):
async def post(self, name):
getusername = xhtml_escape(self.get_argument("user_variable"))
my_json_load = {'username':getusername}
result = await my_db['my_collection'].insert_one(my_json_load)
...
return self.write(result)
If you would clarify your question, I will develop my answer.

connecting mongodb server via seperate class

I am using flask to create simple api. The api simply returns values from mongoDB. Everything works great if i do the connection within same function. I am not doing connection simply at start of file because i am using uwsgi and nginx server on ubuntu. If i do that then there will be a problem of fork.
However, I have to use this connection with other api so thought to make a seperate class for connection and each api will simply call it . I m using this functionality to make codes manageable. However when i try the these codes it always shows internal server error. I tried making this function static too , still the error exists.
Note - I have replaced mongodb address with xxx as i am using mongodbatlas account here
from flask import Flask
from flask import request, jsonify
from flask_pymongo import pymongo
from pymongo import MongoClient
from flask_restful import Resource, Api, reqparse
app = Flask(__name__)
api = Api(app)
#client = MongoClient("xxx")
#db = client.get_database('restdb')
#records = db.stars
class dbConnect():
def connect(self):
client = MongoClient("xxx")
db = client.get_database('restdb')
records = db.stars
return records
class Order(Resource):
def get(self):
#client = MongoClient("xxx")
#db = client.get_database('restdb')
#records = db.stars
#star = records
star = dbConnect.connect
output = []
for s in star.find():
output.append({'name' : s['name'], 'distance' : s['distance']})
return jsonify({'result' : output})
api.add_resource(Order, '/')
if __name__ == "__main__":
app.run(host='0.0.0.0')
ERROR {"message": "Internal Server Error"}
Preliminary investigation suggests that you haven't instantiated your dbConnect class. Also, you haven't called the method connect properly.
class Order(Resource):
def get(self):
db = dbConnect() # This was missing
star = db.connect() # This is how you make method call properly.
output = []
for s in star.find():
output.append({'name' : s['name'], 'distance' : s['distance']})
return jsonify({'result' : output})
Also class dbConnect() should be declared as class dbConnect:.

Python ml engine predict: How can I make a googleapiclient.discovery.build persistent?

I need to make online predictions from a model that is deployed in cloud ml engine. My code in python is similar to the one found in the docs (https://cloud.google.com/ml-engine/docs/tensorflow/online-predict):
service = googleapiclient.discovery.build('ml', 'v1')
name = 'projects/{}/models/{}'.format(project, model)
if version is not None:
name += '/versions/{}'.format(version)
response = service.projects().predict(
name=name,
body={'instances': instances}
).execute()
However, I receive the "instances" data from outside the script, I wonder if there is a way I could run this script without making the "service = googleapiclient.discovery.build('ml', 'v1')" each time before a request, since it takes time.
pd: this is my very first project on gcp. Thank you.
Something like this will work. You'll want to initialize your service globally then use that service instance to make your call.
import googleapiclient.discovery
AI_SERVICE = None
def ai_platform_init():
global AI_SERVICE
# Set GCP Authentication
credentials = os.environ.get('GOOGLE_APPLICATION_CREDENTIALS')
# Path to your credentials
credentials_path = os.path.join(os.path.dirname(__file__), 'ai-platform-credentials.json')
if credentials is None and os.path.exists(credentials_path):
os.environ['GOOGLE_APPLICATION_CREDENTIALS'] = credentials_path
# Create AI Platform Service
if os.path.exists(credentials_path):
AI_SERVICE = googleapiclient.discovery.build('ml', 'v1', cache=MemoryCache())
# Initialize AI Platform on load.
ai_platform_init()
then later on, you can do something like this:
def call_ai_platform():
response = AI_SERVICE.projects().predict(name=name,
body={'instances': instances}).execute()
Bonus! in case you were curious about the MemoryCache class in the googleapiclient.discovery call, that was borrowed from another SO:
class MemoryCache():
"""A workaround for cache warnings from Google.
Check out: https://github.com/googleapis/google-api-python-client/issues/325#issuecomment-274349841
"""
_CACHE = {}
def get(self, url):
return MemoryCache._CACHE.get(url)
def set(self, url, content):
MemoryCache._CACHE[url] = content

Use database to store session instead of Cookie with Flask

I have a python project with Flask.
I'm using SQL Alchemy (according to this page of the documentation : http://flask.pocoo.org/docs/0.10/patterns/sqlalche) to handle my database actions.
I'm using Flask.session to store user's information (authentication status, preferences, ...)
Default Flask's Session behaviour is to store sessions in user's cookie, and to sign this cookie with secret_key so users can't alter it, but they can read it.
I don't like that my users are able to "see" session's content. Does Flask offer a built-in way to store session's content in ORM (SQLAlchemy), or do I have to implement that myself ?
Thanks !
This was adapted from http://flask.pocoo.org/snippets/75/.
If you need to store a lot of session data it makes sense to move the data from the cookie to the server. In that case you might want to use redis as the storage backend for the actual session data.
The following code implements a session backend using redis. It allows you to either pass in a redis client or will connect to the redis instance on localhost. All the keys are prefixed with a specified prefix which defaults to session:.
import pickle
from datetime import timedelta
from uuid import uuid4
from redis import Redis
from werkzeug.datastructures import CallbackDict
from flask.sessions import SessionInterface, SessionMixin
class RedisSession(CallbackDict, SessionMixin):
def __init__(self, initial=None, sid=None, new=False):
def on_update(self):
self.modified = True
CallbackDict.__init__(self, initial, on_update)
self.sid = sid
self.new = new
self.modified = False
class RedisSessionInterface(SessionInterface):
serializer = pickle
session_class = RedisSession
def __init__(self, redis=None, prefix='session:'):
if redis is None:
redis = Redis()
self.redis = redis
self.prefix = prefix
def generate_sid(self):
return str(uuid4())
def get_redis_expiration_time(self, app, session):
if session.permanent:
return app.permanent_session_lifetime
return timedelta(days=1)
def open_session(self, app, request):
sid = request.cookies.get(app.session_cookie_name)
if not sid:
sid = self.generate_sid()
return self.session_class(sid=sid, new=True)
val = self.redis.get(self.prefix + sid)
if val is not None:
data = self.serializer.loads(val)
return self.session_class(data, sid=sid)
return self.session_class(sid=sid, new=True)
def save_session(self, app, session, response):
domain = self.get_cookie_domain(app)
if not session:
self.redis.delete(self.prefix + session.sid)
if session.modified:
response.delete_cookie(app.session_cookie_name,
domain=domain)
return
redis_exp = self.get_redis_expiration_time(app, session)
cookie_exp = self.get_expiration_time(app, session)
val = self.serializer.dumps(dict(session))
self.redis.setex(self.prefix + session.sid, val,
int(redis_exp.total_seconds()))
response.set_cookie(app.session_cookie_name, session.sid,
expires=cookie_exp, httponly=True,
domain=domain)
Here is how to enable it:
app = Flask(__name__)
app.session_interface = RedisSessionInterface()
If you get an attribute error that total_seconds is missing it means you're using a version of Python older than 2.7. In this case you can use this function as a replacement for the total_seconds method:
def total_seconds(td):
return td.days * 60 * 60 * 24 + td.seconds

Categories

Resources