I am running a flask server which fetched data from mongo DB.
from flask import Flask
from flask import render_template
from pymongo import Connection
import json
from bson import json_util
from bson.json_util import dumps
app = Flask(__name__)
MONGODB_HOST = 'localhost'
MONGODB_PORT = 27017
DBS_NAME = 'donorschoose'
COLLECTION_NAME = 'projects'
FIELDS = {'school_state': True, 'resource_type': True, 'poverty_level': True, 'date_posted': True, 'total_donations': True, '_id': False}
#app.route("/")
def index():
return render_template("index.html")
#app.route("/donorschoose/projects")
def donorschoose_projects():
connection = Connection(MONGODB_HOST, MONGODB_PORT)
collection = connection[DBS_NAME][COLLECTION_NAME]
projects = collection.find(fields=FIELDS)
json_projects = []
for project in projects:
json_projects.append(project)
json_projects = json.dumps(json_projects, default=json_util.default)
connection.disconnect()
return json_projects
if __name__ == "__main__":
app.run(host='0.0.0.0',port=5000,debug=True)
I got this code from the net and implemented it successfully. And feeding data to d3 apps via this app. My question is: Is it possible to modify data right here in the flask environment using python? (in the code that i have pasted above?). I only ask because python would allow a greater deal of flexibility than d3 as my expertise in d3 is less. To the problem: The 'poverty-level' column will have 4 fixed values i.e. low, medium, high, unkown.
My aim is to calculate the percentage of high poverty level
i.e. for the column'poverty_level' -> count(val =high)/count(all rows)
Essentially i need just one column to display my metric and i had a tough time doing this is d3. Any d3 or python level help will be much appreciated :)
Thank you.
First you need to completely iterate the Cursor returned by find():
projects = list(collection.find(fields=FIELDS))
Then calculate the total number and the number of high-poverty projects:
high_poverty_count = len(p for p in projects if p['poverty_level'] == 'high')
high_poverty_ratio = float(high_poverty_count) / len(projects)
Then I'd add this together with the list of all projects, together as a document:
result = {'high_poverty_ratio': high_poverty_ratio,
'projects': projects}
return json.dumps(result, default=json_util.default)
Also, note that your application has two severe problems:
First, you use "Connection", which is obsolete. Do this:
from pymongo import MongoClient
client = MongoClient(MONGODB_HOST, MONGODB_PORT)
Second, you create a new client and disconnect it for each request. This is extremely slow. Instead, create the client when your application begins, and never disconnect it:
client = MongoClient(MONGODB_HOST, MONGODB_PORT)
#app.route("/donorschoose/projects")
def donorschoose_projects():
collection = client[DBS_NAME][COLLECTION_NAME]
# ... etc ....
return json.dumps(result, default=json_util.default)
Related
I want to know how am I supposed to test my code and see whether it works properly. I want to make sure that it stores the received data to the database. Can you please tell me how am I supposed to do that? While I was searching the forum I found this post but I did not really understand what is going on. here is the code I want to test.
client = MongoClient(os.environ.get("MONGODB_URI"))
app.db = client.securify
app.secret_key = str(os.environ.get("APP_SECRET"))
#app.route("/", methods=["GET", "POST"])
def home():
if request.method == "POST":
ip_address = request.remote_addr
entry_content = request.form.get("content")
formatted_date = datetime.datetime.today().strftime("%Y-%m-%d/%H:%M")
app.db.entries.insert({"content": entry_content, "date": formatted_date, "IP": ip_address})
return render_template("home.html")
and here is the mock test I wrote:
import os
from unittest import TestCase
from app import app
class AppTest(TestCase):
# executed prior to each test
def setUp(self):
# you can change your application configuration
app.config['TESTING'] = True
# you can recover a "test cient" of your defined application
self.app = app.test_client()
# then in your test method you can use self.app.[get, post, etc.] to make the request
def test_home(self):
url_path = '/'
response = self.app.get(url_path)
self.assertEqual(response.status_code, 200)
def test_post(self):
url_path = '/'
response = self.app.post(url_path,data={"content": "this is a test"})
self.assertEqual(response.status_code, 200)
The test_post gets stuck and after some seconds gives an error when reaches app.db.entries.insert({"content": entry_content, "date": formatted_date, "IP": ip_address}) part. Please tell me also how can I retrieve the saved data in order to make sure it is saved in the expected way
This is what I do using NodeJS, not tested at all in python but the idea is the same.
First of all, find a in-memory DB, there are options like pymongo-inmemory or mongomock
Then in your code you have to do the connection according to you environment (production/development/whatever)
Something like this:
env = os.environ.get("ENV")
if env == "TESTING":
# connect to mock db
elif env == "DEVELOMPENT":
# for example if you want to test against a real DB but not the production one
# then do the connection here
else:
# connect to production DB
I don't know if it is the proper way to do it but I found a solution. After creating a test client self.app = app.test_client() the db gets set to localhost:27017 so I changed it manually as follows and it worked:
self.app = app.test_client()
client = MongoClient(os.environ.get("MONGODB_URI"))
I am using flask to create simple api. The api simply returns values from mongoDB. Everything works great if i do the connection within same function. I am not doing connection simply at start of file because i am using uwsgi and nginx server on ubuntu. If i do that then there will be a problem of fork.
However, I have to use this connection with other api so thought to make a seperate class for connection and each api will simply call it . I m using this functionality to make codes manageable. However when i try the these codes it always shows internal server error. I tried making this function static too , still the error exists.
Note - I have replaced mongodb address with xxx as i am using mongodbatlas account here
from flask import Flask
from flask import request, jsonify
from flask_pymongo import pymongo
from pymongo import MongoClient
from flask_restful import Resource, Api, reqparse
app = Flask(__name__)
api = Api(app)
#client = MongoClient("xxx")
#db = client.get_database('restdb')
#records = db.stars
class dbConnect():
def connect(self):
client = MongoClient("xxx")
db = client.get_database('restdb')
records = db.stars
return records
class Order(Resource):
def get(self):
#client = MongoClient("xxx")
#db = client.get_database('restdb')
#records = db.stars
#star = records
star = dbConnect.connect
output = []
for s in star.find():
output.append({'name' : s['name'], 'distance' : s['distance']})
return jsonify({'result' : output})
api.add_resource(Order, '/')
if __name__ == "__main__":
app.run(host='0.0.0.0')
ERROR {"message": "Internal Server Error"}
Preliminary investigation suggests that you haven't instantiated your dbConnect class. Also, you haven't called the method connect properly.
class Order(Resource):
def get(self):
db = dbConnect() # This was missing
star = db.connect() # This is how you make method call properly.
output = []
for s in star.find():
output.append({'name' : s['name'], 'distance' : s['distance']})
return jsonify({'result' : output})
Also class dbConnect() should be declared as class dbConnect:.
Hi there I'm new in python.
I would like to implement the listener on my Firebase DB.
When I change one or more parameters on the DB my Python code have to do something.
How can I do it?
Thank a lot
my db is like simple list of data from 001 to 200:
"remote-controller"
001 -> 000
002 -> 020
003 -> 230
my code is:
from firebase import firebase
firebase = firebase.FirebaseApplication('https://remote-controller.firebaseio.com/', None)
result = firebase.get('003', None)
print result
It looks like this is supported now (october 2018): although it's not documented in the 'Retrieving Data' guide, you can find the needed functionality in the API reference. I tested it and it works like this:
def listener(event):
print(event.event_type) # can be 'put' or 'patch'
print(event.path) # relative to the reference, it seems
print(event.data) # new data at /reference/event.path. None if deleted
firebase_admin.db.reference('my/data/path').listen(listener)
As Peter Haddad suggested, you should use Pyrebase for achieving something like that given that the python SDK still does not support realtime event listeners.
import pyrebase
config = {
"apiKey": "apiKey",
"authDomain": "projectId.firebaseapp.com",
"databaseURL": "https://databaseName.firebaseio.com",
"storageBucket": "projectId.appspot.com"
}
firebase = pyrebase.initialize_app(config)
db = firebase.database()
def stream_handler(message):
print(message["event"]) # put
print(message["path"]) # /-K7yGTTEp7O549EzTYtI
print(message["data"]) # {'title': 'Pyrebase', "body": "etc..."}
my_stream = db.child("posts").stream(stream_handler)
If Anybody wants to create multiple listener using same listener function and want to get more info about triggered node, One can do like this.
Normal Listener function will get a Event object it has only Data, Node Name, Event type. If you add multiple listener and You want to differentiate between the data change. You can write your own class and add some info to it while creating object.
class ListenerClass:
def __init__(self, appname):
self.appname = appname
def listener(self, event):
print(event.event_type) # can be 'put' or 'patch'
print(event.path) # relative to the reference, it seems
print(event.data) # new data at /reference/event.path. None if deleted
print(self.appname) # Extra data related to change add your own member variable
Creating Objects:
listenerObject = ListenerClass(my_app_name + '1')
db.reference('PatientMonitoring', app= obj).listen(listenerObject.listener)
listenerObject = ListenerClass(my_app_name + '2')
db.reference('SomeOtherPath', app= obj).listen(listenerObject.listener)
Full Code:
import firebase_admin
from firebase_admin import credentials
from firebase_admin import db
# Initialising Database with credentials
json_path = r'E:\Projectz\FYP\FreshOnes\Python\PastLocations\fyp-healthapp-project-firebase-adminsdk-40qfo-f8fc938674.json'
my_app_name = 'fyp-healthapp-project'
xyz = {'databaseURL': 'https://{}.firebaseio.com'.format(my_app_name),'storageBucket': '{}.appspot.com'.format(my_app_name)}
cred = credentials.Certificate(json_path)
obj = firebase_admin.initialize_app(cred,xyz , name=my_app_name)
# Create Objects Here, You can use loops and create many listener, But listener will create thread per every listener, Don't create irrelevant listeners. It won't work if you are running on machine with thread constraint
listenerObject = ListenerClass(my_app_name + '1') # Decide your own parameters, How you want to differentiate. Depends on you
db.reference('PatientMonitoring', app= obj).listen(listenerObject.listener)
listenerObject = ListenerClass(my_app_name + '2')
db.reference('SomeOtherPath', app= obj).listen(listenerObject.listener)
As you can see on the per-language feature chart on the Firebase Admin SDK home page, Python and Go currently don't have realtime event listeners. If you need that on your backend, you'll have to use the node.js or Java SDKs.
You can use Pyrebase, which is a python wrapper for the Firebase API.
more info here:
https://github.com/thisbejim/Pyrebase
To retrieve data you need to use val(), example:
users = db.child("users").get()
print(users.val())
Python Firebase Realtime Listener Full Code :
import firebase_admin
from firebase_admin import credentials
from firebase_admin import db
def listener(event):
print(event.event_type) # can be 'put' or 'patch'
print(event.path) # relative to the reference, it seems
print(event.data) # new data at /reference/event.path. None if deleted
json_path = r'E:\Projectz\FYP\FreshOnes\Python\PastLocations\fyp-healthapp-project-firebase-adminsdk-40qfo-f8fc938674.json'
my_app_name = 'fyp-healthapp-project'
xyz = {'databaseURL': 'https://{}.firebaseio.com'.format(my_app_name),'storageBucket': '{}.appspot.com'.format(my_app_name)}
cred = credentials.Certificate(json_path)
obj = firebase_admin.initialize_app(cred,xyz , name=my_app_name)
db.reference('PatientMonitoring', app= obj).listen(listener)
Output:
put
/
{'n0': '40', 'n1': '71'} # for first time its gonna fetch the data from path whether data is changed or not
put # On data changed
/n1
725
put # On data changed
/n0
401
I'm writing a simple internal REST API for our solution using Flask, serving JSON objects through get calls (including authentication). We have multiple backends to fetch data from. From what I understand these should be connected to in a function decorated with #app.before_request and assigned to the g global for use in the specific route being requested. It's not a pattern I'm used to.
Here is a toy example of what I'm doing:
#app.before_request
def before_request():
g.some_conn_a = create_connection('a')
g.some_conn_b = create_connection('b')
g.some_client = create_client()
#app.route('/get_some_data')
#requires_auth
def get_some_data():
# Fetch something from all connections in g
payload = ... # Construct payload using above connections
return jsonify(payload)
#app.route('/get_some_other_data')
#requires_auth
def get_some_other_data():
# Fetch something from maybe just g.some_conn_b
payload = ... # Construct payload using g.some_conn_b
return jsonify(payload)
This seems wasteful to me if the user makes a request for data residing in only one or two of these connections/clients, like in the get_some_other_data route example.
I'm considering just making the connections/clients in the route functions instead, or load it lazily. What's the "correct" way? I hope it isn't to make a new module, that seems extreme for what I'm doing.
Riffing on the Flask docs Database Connections example
you could modify get_db() to accept an argument for each of your multiple connections.
def get_db(conn):
"""Open specificied connection if none yet for the current app context. """
if conn == 'some_conn_a':
if not hasattr(g, 'some_conn_a'):
g.some_conn_a = create_connection('a')
db = g.some_conn_a
elif conn == 'some_conn_b':
if not hasattr(g, 'some_conn_b'):
g.some_conn_b = create_connection('b')
db = g.some_conn_b
elif conn == 'some_client':
if not hasattr(g, 'some_client'):
g.some_client = create_client()
db = g.some_client
else:
raise Exception("Unknown connection: %s" % conn)
return db
#app.teardown_appcontext
def close_db(error):
"""Closes the db connections. """
if hasattr(g, 'some_conn_a'):
g.some_conn_a.close()
if hasattr(g, 'some_conn_b'):
g.some_conn_b.close()
if hasattr(g, 'some_client'):
g.some_client.close()
Then you could query each connection as needed:
#app.route('/get_some_data')
def get_some_data():
data_a = get_db('some_conn_a').query().something()
data_b = get_db('some_conn_b').query().something()
data_c = get_db('some_client').query().something()
payload = {'a': data_a, 'b': data_b, 'c': data_c}
return jsonify(payload)
The get_db() pattern is preferred over the before_request pattern for lazy loading database connections. The docs examples for Flask 0.11 and up utilize the get_db() pattern to a larger extent.
I'm currently implementing a webapp in flask. It's an app that does a visualization of data gathered. Each page or section will always have a GET call and each call will return a JSON response which then will be processed into displayed data.
The current problem is that some calculation is needed before the function could return a JSON response. This causes some of the response to arrive slower than others and thus making the page loads a bit slow. How do I properly deal with this? I have read into caching in flask and wonder whether that is what the app need right now. I have also researched a bit into implementing a Redis-Queue. I'm not really sure which is the correct method.
Any help or insights would be appreciated. Thanks in advance
Here are some ideas:
If the source data that you use for your calculations is not likely to change often then you can run the calculations once and save the results. Then you can serve the results directly for as long as the source data remains the same.
You can save the results back to your database, or as you suggest, you can save them in a faster storage such as Redis. Based on your description I suspect the big performance gain will be in not doing calculations so often, the difference between storing in a regular database vs. Redis or similar is probably not significant in comparison.
If the data changes often then you will still need to do calculations frequently. For such a case an option that you have is to push the calculations to the client. Your Flask app can just return the source data in JSON format and then the browser can do the processing on the user's computer.
I hope this helps.
You can use
copy_current_request_context and Redis, Thread
It is helpful when you need long time to make JSON response.
The first request maybe slow, but next request will faster.
Example
from datetime import timedelta, datetime
from threading import Thread
from . import dbb, redis_client
from flask import Blueprint, request, jsonify, flash, after_this_request, copy_current_request_context, \
current_app, send_from_directory
from .models import Shop, Customers
def save_customer_json_to_redis(request):
response_json = {
"have_customer": False,
"status": False,
"anythingelse": None,
"message":"False, you have to check..."
}
#print(request.data)
headers = request.headers
Authorization = headers['Authorization']
token = Authorization.replace("Bearer", "")
phone = request.args.get('phone')
if phone is not None and phone != "":
print('token', token, "phone", phone)
now = datetime.utcnow() + timedelta(hours=7)
shop = Shop.query.filter(Shop.private_token == token, Shop.ended_date > now, Shop.shop_active == True).first()
customer = Customers.query.filter_by(shop_id=shop.id, phone=phone).first()
if customer:
redis_name = f'{shop.id}_api_v2_customer_phone_{phone}_customer_id_{customer.id}'
print(redis_name)
response_json["anythingelse"] = ...# do want you want, it need long time to do
response_json["status"] = True
response_json["message"] = "Successful"
redis_client.set(redis_name, json.dumps(response_json)) #Save JSON to Redis
#app.route('/api/v2/customer', methods=['GET'])
def api_customer():
#copy_current_request_context
def do_update_customer_to_redis():# this function to save JSON you want to response next time to Redis
save_customer_json_to_redis(request)
Thread(target=do_update_customer_to_redis).start()
response_json = {
"have_customer": False,
"status": False,
"anythingelse": {},
"message": "False, you have to check..."
}
#print(request.data)
headers = request.headers
Authorization = headers['Authorization']
token = Authorization.replace("Bearer", "")
phone = request.args.get('phone')
if phone is not None and phone != "":
print('token', token, "phone", phone)
now = datetime.utcnow() + timedelta(hours=7)
shop = Shop.query.filter(Shop.private_token == token, Shop.ended_date > now,Shop.shop_active == True).first()
customer = Customers.query.filter_by(shop_id=shop.id, phone=phone).first()
if customer:
redis_name = f'{shop.id}_api_v2_customer_phone_{phone}_customer_id_{customer.id}'
print(redis_name)
try:
response_json = json.loads(redis_client.get(redis_name)) # if have json from app
print("json.loads(redis_client.get(redis_name))")
except Exception as e:
print("json.loads(redis_client.get(redis_name))", e)
#do any thing you want to response json
response_json["anythingelse"] = ...# do want you want, it need long time to do
response_json["message"]= ...#do want you want
#redis_client.set(redis_name, json.dumps(response_json))
response_json["status"] = True
response_json["message"] = "Successful"
return jsonify(response_json)
In the init.py
from flask import Flask
from flask_cors import CORS
from flask_mail import Mail
from flask_sqlalchemy import SQLAlchemy
from redis import Redis
# init SQLAlchemy so we can use it later in our models
dbb = SQLAlchemy(session_options={"autoflush": False})
redis_client = Redis(
host='localhost',
port='6379',
password='your_redis_password'
)
def create_app():
app = Flask(__name__)
...........