Flask API facing InterfaceError with PostgreSQL - python

I have a Flask API based on Flask RestPlus extension and is hosted on Google App Engine. The API does a basic job of fetching data from a Google Cloud SQL PostgreSQL. The API is working fine otherwise but sometimes it starts returning InterfaceError: cursor already closed.
Strangely, when I do a gcloud app deploy, the API starts working fine again.
Here's a basic format of the API:
import simplejson as json
import psycopg2
from flask import Flask, jsonify
from flask_restplus import Api, Resource, fields
from psycopg2.extras import RealDictCursor
app = Flask(__name__)
app.config['SWAGGER_UI_JSONEDITOR'] = True
api = Api(app=app,
doc='/docs',
version="1.0",
title="Title",
description="description")
app.config['SWAGGER_UI_JSONEDITOR'] = True
ns_pricing = api.namespace('cropPricing')
db_user = "xxxx"
db_pass = "xxxx"
db_name = "xxxxx"
cloud_sql_connection_name = "xxxxxx"
conn = psycopg2.connect(user=db_user,
password=db_pass,
host='xxxxx',
dbname=db_name)
#ns_pricing.route('/list')
class States(Resource):
def get(self):
"""
list all the states for which data is available
"""
cur = conn.cursor(cursor_factory=RealDictCursor)
query = """
SELECT
DISTINCT state
FROM
db.table
"""
conn.commit()
cur.execute(query)
states = json.loads(json.dumps(cur.fetchall()))
if len(states) == 0:
return jsonify(data=[],
status="Error",
message="Requested data not found")
else:
return jsonify(status="Success",
message="Successfully retreived states",
data=states)
What should I fix to not see the error anymore?

It would be good to use the ORMs such as SQLAlchemy / Flask-SQLAlchemy which would handle the establishing / re-establishing the connection part.
Though, if using psycopg2. you can use try except to catch the exception and re-establish the connection again.
try:
cur.execute(query)
except psycopg2.InterfaceError as err:
print err.message
conn = psycopg2.connect(....)
cur = conn.cursor()
cur.execute(query)

Related

creating a webservice in flask that queries a Mysql database and returns json

Please help:
I am trying to create a webservice in flask (this is my first time) that takes a str, queries an external mysql database and returns 1 row as json.
I am sure there are other of issues with the code below (all suggestions much appreciated), but I cannot even see them yet, because when I try to access example.com/webservice/vin/ I get "Internal Server Error
The server encountered an internal error and was unable to complete your request. Either the server is overloaded or there is an error in the application." from nginx.
PLease can someone guide me where I am going wrong?
example.com return Hello from app_name running in docker...yayy. Its the other route that isnt working.
from flask.ext.mysqldb import MySQL
import json
app.config['MYSQL_HOST'] = 'xxx'
app.config['MYSQL_USER'] = 'xxx'
app.config['MYSQL_PASSWORD'] = 'xxx'
app.config['MYSQL_DB'] = 'xxx'
mysql = MySQL(app)
#app.route("/")
def index():
# Use os.getenv("key") to get environment variables
app_name = os.getenv("APP_NAME")
if app_name:
return f"Hello from {app_name} running in a Docker container behind Nginx!"
return "Hello from Flask"
#app.route('/webservice/vin/<vin>', methods=['GET'])
def get_vehicle(vin):
sql = "SELECT * FROM `table` where column = '(%s )';" %(vin)
cur = mysql.connection.cursor()
cur.execute(sql)
row_headers=[x[0] for x in cur.description] #this will extract row headers
rv = cur.fetchall()
json_data=[]
for result in rv:
json_data.append(dict(zip(row_headers,result)))
return json.dumps(json_data)
from flask import make_response
#app.errorhandler(404)
def not_found(error):
return make_response(jsonify({'error': 'Not found'}), 404)

How to return postgreSQL query result in cloud function using python? [duplicate]

This question already has answers here:
Flask view raises TypeError: 'bool' object is not callable
(1 answer)
return SQL table as JSON in python
(15 answers)
Python converting mysql query result to json
(6 answers)
Closed 3 years ago.
I am writing a flask api with get method using python 3.6 which will create a connection with GCP postgreSQL and execute a simple select query. Here I want that my function should return me a result of postgreSQL query(rows). Below is my code :
from sqlalchemy.engine import url
import pg8000
import logging
import os
import json
from flask import Flask, Response, request, render_template
import sqlalchemy
app = Flask(__name__)
logger = logging.getLogger()
# Environment specific parameter values to establish DB connection with postgreSQL
db_user = os.environ.get("DB_USER")
db_pass = os.environ.get("DB_PASS")
db_name = os.environ.get("DB_NAME")
cloud_sql_connection_name = os.environ.get("CLOUD_SQL_CONNECTION_NAME")
# Creating a connection with postgresql DB model.
db = sqlalchemy.create_engine(
url.URL(
drivername='postgres+pg8000',
username=db_user,
password=db_pass,
database=db_name,
query={
'unix_sock': '/cloudsql/{}/.s.PGSQL.5432'.format(
cloud_sql_connection_name)
}
),
pool_size=5,
max_overflow=2,
pool_timeout=30,
pool_recycle=1800,
)
#app.route('/', methods=['GET'])
def main(request):
with db.connect() as conn:
# Execute the query and fetch all usernames
users = conn.execute("select username from user_table").fetchall()
return users
if __name__ == '__main__':
app.run(host='127.0.0.1', port=8080, debug=True)
I tried your code and received this error:
TypeError: The view function did not return a valid response. The return type must be a string, dict, tuple, Response instance, or WSGI callable, but it was a list
So try returning str(users) or convert data to dict first.
fetchall() returns the cursor from docs https://docs.sqlalchemy.org/en/13/core/connections.html#sqlalchemy.engine.ResultProxy.fetchall. You can't return the cursor object as a response. You need to convert it to allowed response types. Below code might help you understand the same.
#app.route('/', methods=['GET'])
def main(request):
users = []
with db.connect() as conn:
# Execute the query and fetch all usernames
db_users = conn.execute("select username from user_table").fetchall()
users = list(db_users)
data = {'users': users}
resp = Response(json.dumps(data), status=200, mimetype="application/json")
return resp

Stranger error in SqlAlchemy. Bug?

I tried to use Flask-Restful + SqlAlchemy (with Automap) + MySql SGDB but I don't understand why this error occured in my code:
I sent the request in my controller and it worked normally, but after 10s an error is generated about a loss of connection with a SGDB.
itens = session.query(estados).filter(estados.ativo == True)
But stranger is that if I use SQL string syntax, the problem does not occur.
itens = engine.execute("SELECT `TBEstados`.`id`, `TBEstados`.`nome`, `TBEstados`.`ativo` FROM `intbr_webapp`.`TBEstados`;")
I'm using SqlAlchemy 1.2 but I did try also the 1.1 version. I did try also use the pre-ping=true and I didn't obtain success.
Does someone know anything about this? I don't understand why using ORM structure doesn't work, but with SQL syntax it does work. The connection is same, but the result is not.
My code is below:
estado.py
from flask import jsonify
from flask_restful import Resource
from json import dumps
from resources.database import Base, session, engine
#from resources.dataEncoder import JsonModel
from models.TBEstados import TBEstadosSchema
class Estados(Resource):
def get(self):
estados = Base.classes.TBEstados
itens = session.query(estados).filter(estados.ativo == True)
result = TBEstadosSchema(many=True).dump(itens)
return jsonify(result.data)
database.py (imported in EstadoModel)
from flask import Flask, g
from flask_sqlalchemy import SQLAlchemy
from sqlalchemy import create_engine
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.orm import sessionmaker
from flask_marshmallow import Marshmallow
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = my conn string
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
app.config['SQLALCHEMY_POOL_RECYCLE'] = 3600
app.config['SQLALCHEMY_POOL_TIMEOUT'] = 30
Base = automap_base()
engine = create_engine(app.config['SQLALCHEMY_DATABASE_URI'], pool_pre_ping=True)
# reflect the tables
Base.prepare(engine, reflect=True)
Session = sessionmaker(bind=engine)
session = Session()
ma = Marshmallow()
run.py
from flask import Flask, g
from flask_restful import Resource, Api
import resources.database
from controllers.Estados import Estados
app = Flask(__name__)
api = Api(app)
api.add_resource(Estados, '/estados')
if __name__ == '__main__':
app.run(debug=True, port=9002)
The exact error:
sqlalchemy.exc.OperationalError
sqlalchemy.exc.OperationalError: (pymysql.err.OperationalError) (2013, 'Lost connection to MySQL server during query') [SQL: 'SELECT TBEstados.id AS TBEstados_id, TBEstados.nome AS TBEstados_nome, TBEstados.ativo AS TBEstados_ativo \nFROM TBEstados \nWHERE TBEstados.ativo = true']
I solved my problem!
I changed value of the MySql variable wait_timeout of the "10s" to 100s (to test) and error in SqlAlchemy using PyMySql provider doesn't occurred again.
https://dev.mysql.com/doc/refman/5.7/en/server-system-variables.html#sysvar_wait_timeout
Thanks for help!

Flask RESTful API: Issue with db connection pool

I'm trying to create REST API endpoints using flask framework. This is my fully working script:
from flask import Flask, jsonify
from flask_restful import Resource, Api
from flask_restful import reqparse
from sqlalchemy import create_engine
from flask.ext.httpauth import HTTPBasicAuth
from flask.ext.cors import CORS
conn_string = "mssql+pyodbc://x:x#x:1433/x?driver=SQL Server"
auth = HTTPBasicAuth()
#auth.get_password
def get_password(username):
if username == 'x':
return 'x'
return None
app = Flask(__name__)
cors = CORS(app)
api = Api(app)
class Report(Resource):
decorators = [auth.login_required]
def get(self):
parser = reqparse.RequestParser()
parser.add_argument('start', type = str)
parser.add_argument('end', type = str)
args = parser.parse_args()
e = create_engine(conn_string)
conn = e.connect()
stat = """
select x from report
"""
query = conn.execute(stat)
json_dict = []
for i in query.cursor.fetchall():
res = {'x': i[0], 'xx': i[1]}
json_dict.append(res)
conn.close()
e.dispose()
return jsonify(results=json_dict)
api.add_resource(Report, '/report')
if __name__ == '__main__':
app.run(host='0.0.0.0')
The issue is that I get results when I call this API only for a day or so after which I stop getting results unless I restart my script (or sometimes even my VM) after which I get results again. I reckon there is some issue with the database connection pool or something but I'm closing the connection and disposing it as well. I have no idea why the API gives me results only for some time being because of which I have to restart my VM every single day. Any ideas?
Per my experience, the issue was caused by coding create_engine(conn_string) to create db pool inside the Class Report so that always do the create & destory operations of db pool for per restful request. It's not correct way for using SQLAlchemy ORM, and be cause IO resouce clash related to DB connection, see the engine.dispose() function description below at http://docs.sqlalchemy.org/en/rel_1_0/core/connections.html#sqlalchemy.engine.Engine:
To resolve the issue, you just need to move e = create_engine(conn_string) to the below of the code conn_string = "mssql+pyodbc://x:x#x:1433/x?driver=SQL Server" and remove the code e.dispose() both in the Class Report, see below.
conn_string = "mssql+pyodbc://x:x#x:1433/x?driver=SQL Server"
e = create_engine(conn_string) # To here
In the def get(delf) function:
args = parser.parse_args()
# Move: e = create_engine(conn_string)
conn = e.connect()
and
conn.close()
# Remove: e.dispose()
return jsonify(results=json_dict)

mySQL UPDATE doesn't work for GAE when called from browser

I am writing a verify email address python file for Google App Engine. (ya I know django has stuff, but I wanted to write my own because that is how I learn)
Below is the python code. The code returns "Email Account Verified" which seems to me that the queries worked. However when I look at the "active" column in the database, it is still 0.
If I run the query string that logging.info("%s",db_query) in the database itself, it works and is updated to 1.
All my other python code (with UPDATES) works fine, the only difference is those python files are called from my ios app and this one is called from a browser.
#Make the libs folder with 3rd party libraries and common methods
import sys
sys.path.insert(0, 'libs')
#Imports
import logging
import webapp2
from django.utils.html import strip_tags
import common
import MySQLdb
import json
VERIFIED_HTML = """\
<html>
<body>
<h1>Email Account Verified</h1>
</body>
</html>
"""
ERROR_HTML = """\
<html>
<body>
<h1>ERROR</h1>
</body>
</html>
"""
class VerifyEmail(webapp2.RequestHandler):
def get(self):
user_email = strip_tags(self.request.get('user_email').lower().strip())
user_activation_hash = strip_tags(self.request.get('user_activation_hash').strip())
logging.info("User Email = %s", user_email)
logging.info("User Activation Hash = %s", user_activation_hash)
#Insert the information into the users table
#Get the database connection to Google Cloud SQL
db = common.connect_to_google_cloud_sql()
db_cursor = db.cursor(MySQLdb.cursors.DictCursor)
#Check to see if user already exists
#Query for user
db_query = """SELECT \
email, activation_hash \
FROM users WHERE email='%s' AND activation_hash='%s'""" % (user_email, user_activation_hash)
db_cursor.execute(db_query)
#If there is one record containing the username check password
if(db_cursor.rowcount == 1):
db_query = """UPDATE users SET active=%s WHERE email='%s';""" % (1, user_email)
logging.info("%s" % db_query)
if(db_cursor.execute(db_query)):
self.response.write(VERIFIED_HTML)
else:
self.response.write(ERROR_HTML)
else: #either no user, or activation_hash doesn't match
self.response.write(ERROR_HTML)
Connect to Google Cloud SQL
def connect_to_google_cloud_sql():
#hostname = DEV_DB_HOSTNAME
#hostname = PROD_DB_HOSTNAME
db_username = 'dummy_user' #not real
db_password = 'dummypassword' # not real
#If PROD or Deployed Testing, use unix_socket
if(os.getenv('SERVER_SOFTWARE') and os.getenv('SERVER_SOFTWARE').startswith('Google App Engine/')):
db = MySQLdb.connect(unix_socket='/cloudsql/' + _DATABASE_HOSTNAME, db='dummydbname', user=db_username, passwd=db_password)
else: #Local Testing uses host
db = MySQLdb.connect(host=_DATABASE_HOSTNAME, port=3306, db='dummydbname', user=db_username, passwd=db_password)
logging.info("Got DB Connection")
return db
Any suggestions? Is it GAE Cloud SQL Privledges????
Maybe because I was using my browser with the local app engine running on my local ip???
You need to call .commit() on MySQLdb cursors after executing queries. This is why your update is failing. It updates inside the transaction, but when your code ends without committing the transaction, the changes to the DB are rolled back, despite having told the user of success on update.
You can also use the following method to ensure commits when using a cursor: db_cursor.autocommit(True).

Categories

Resources