Python Flask Only Run Code Once - python

I am trying to set a global variable as in:
# -*- coding: utf8 -*-
from flask import Flask, render_template,redirect,flash, url_for, session, request
from sqlalchemy.sql import text
from sqlalchemy.orm import aliased, Query
from sqlalchemy import exc
from flask_bootstrap import Bootstrap
from .forms import *
import zlib
#from sqlalchemy.orm.query import limit, order_by
from dbmodel import *
app = Flask(__name__)
app.secret_key = 'A0Zr98j/3yX R~XHH!jmN]LWX/,?RT'
Bootstrap(app)
MAX_ITEMS=50
MAX_ZIPS=5
MAX_ORDERS=20
clientMaxSQL="SELECT * FROM CLIENT WHERE 1"
engine, dbsession = dbconnect()
#app.route("/clientRegister",methods=['GET', 'POST'])
def clientRegister():
form = ClientQuery()
global clientMaxSQL
flash('maxSql at start %s ' %(clientMaxSQL))
if form.reset.data == "True":
flash('RESET TABLE FILTERS')
clientMaxSQL="SELECT * FROM CLIENT WHERE 1"
sql = clientMaxSQL
if form.validate_on_submit():
if len(form.compareVal.data.strip()) > 0 and ( any(c.isalpha() for c in form.compareVal.data) or any(c.isdigit() for c in form.compareVal.data)):
sql = sql + " AND " + form.columnSelection.data + form.comparisonSelection.data + "'" +form.compareVal.data+"'"
start=int(request.args.get('start', '0'))
prev=start-MAX_ITEMS
nextStart = start + MAX_ITEMS
tmpSQL = clientMaxSQL
clientMaxSQL = sql
sql = sql + " LIMIT " + str(start)+","+str(nextStart)
try:
clients = engine.execute(sql)
maxStart=engine.execute(clientMaxSQL).rowcount
flash('Attempting Query: %s' %(sql))
except:
flash('Invalid Comparison of %s' %(form.compareVal.data))
sql = tmpSQL + " LIMIT " + str(start)+","+str(nextStart)
clientMaxSQL=tmpSQL
clients = engine.execute(sql)
maxStart=engine.execute(clientMaxSQL)
flash('maxSql at end %s ' %(clientMaxSQL))
return render_template("clientRegister.html",form=form,maxStart=maxStart,clients=clients,start=start, prev=prev,nextStart=nextStart)
I am getting some strange results. It seems as if this statement (declared above all of my function definitions) is executing every once in a while on its own. Is this possible? It is imperative to my web app that this only runs once otherwise I will lose the current state of my query.
Added all the code in the program that ever changes the variable. It is resetting on its own though.

use the, #app.before_first_request decorator with the function to make it run only once
As in:
#app.before_first_request
def function_to_run_only_once():
#your statement(s)
This should make sure that your said statements execute only once in the entire lifetime of your program

Related

Mocking sqlobject function call for test db

I am trying to mock sqlbuilder.func for test cases with pytest
I successfully mocked sqlbuilder.func.TO_BASE64 with correct output but when I tried mocking sqlbuilder.func.FROM_UNIXTIME I didn't get any error but the resulted output is incorrect with the generated query. Below is the minimal working example of the problem.
models.py
from sqlobject import (
sqlbuilder,
sqlhub,
SQLObject,
StringCol,
BLOBCol,
TimestampCol,
)
class Store(SQLObject):
name = StringCol()
sample = BLOBCol()
createdAt = TimestampCol()
DATE_FORMAT = "%Y-%m-%d"
def retrieve(name):
query = sqlbuilder.Select([
sqlbuilder.func.TO_BASE64(Store.q.sample),
],
sqlbuilder.AND(
Store.q.name == name,
sqlbuilder.func.FROM_UNIXTIME(Store.q.createdAt, DATE_FORMAT) >= sqlbuilder.func.FROM_UNIXTIME("2018-10-12", DATE_FORMAT)
)
)
connection = sqlhub.getConnection()
query = connection.sqlrepr(query)
print(query)
queryResult = connection.queryAll(query)
return queryResult
conftest.py
import pytest
from models import Store
from sqlobject import sqlhub
from sqlobject.sqlite import sqliteconnection
#pytest.fixture(autouse=True, scope="session")
def sqlite_db_session(tmpdir_factory):
file = tmpdir_factory.mktemp("db").join("sqlite.db")
conn = sqliteconnection.SQLiteConnection(str(file))
sqlhub.processConnection = conn
init_tables()
yield conn
conn.close()
def init_tables():
Store.createTable(ifNotExists=True)
test_ex1.py
import pytest
from sqlobject import sqlbuilder
from models import retrieve
try:
import mock
from mock import MagicMock
except ImportError:
from unittest import mock
from unittest.mock import MagicMock
def TO_BASE64(x):
return x
def FROM_UNIXTIME(x, y):
return 'strftime("%Y%m%d", datetime({},"unixepoch", "localtime"))'.format(x)
# #mock.patch("sqlobject.sqlbuilder.func.TO_BASE64")
# #mock.patch("sqlobject.sqlbuilder.func.TO_BASE64", MagicMock(side_effect=lambda x: x))
# #mock.patch("sqlobject.sqlbuilder.func.TO_BASE64", new_callable=MagicMock(side_effect=lambda x: x))
#mock.patch("sqlobject.sqlbuilder.func.TO_BASE64", TO_BASE64)
#mock.patch("sqlobject.sqlbuilder.func.FROM_UNIXTIME", FROM_UNIXTIME)
def test_retrieve():
result = retrieve('Some')
assert result == []
Current SQL:
SELECT store.sample FROM store WHERE (((store.name) = ('Some')) AND (1))
Expected SQL:
SELECT
store.sample
FROM
store
WHERE
store.name = 'Some'
AND
strftime(
'%Y%m%d',
datetime(store.created_at, 'unixepoch', 'localtime')
) >= strftime(
'%Y%m%d',
datetime('2018-10-12', 'unixepoch', 'localtime')
)
Edit Example
#! /usr/bin/env python
from sqlobject import *
__connection__ = "sqlite:/:memory:?debug=1&debugOutput=1"
try:
import mock
from mock import MagicMock
except ImportError:
from unittest import mock
from unittest.mock import MagicMock
class Store(SQLObject):
name = StringCol()
sample = BLOBCol()
createdAt = TimestampCol()
Store.createTable()
DATE_FORMAT = "%Y-%m-%d"
def retrieve(name):
query = sqlbuilder.Select([
sqlbuilder.func.TO_BASE64(Store.q.sample),
],
sqlbuilder.AND(
Store.q.name == name,
sqlbuilder.func.FROM_UNIXTIME(Store.q.createdAt, DATE_FORMAT) >= sqlbuilder.func.FROM_UNIXTIME("2018-10-12", DATE_FORMAT)
)
)
connection = Store._connection
query = connection.sqlrepr(query)
queryResult = connection.queryAll(query)
return queryResult
def TO_BASE64(x):
return x
def FROM_UNIXTIME(x, y):
return 'strftime("%Y%m%d", datetime({},"unixepoch", "localtime"))'.format(x)
for p in [
mock.patch("sqlobject.sqlbuilder.func.TO_BASE64",TO_BASE64),
mock.patch("sqlobject.sqlbuilder.func.FROM_UNIXTIME",FROM_UNIXTIME),
]:
p.start()
retrieve('Some')
mock.patch.stopall()
By default, sqlbuilder.func is an SQLExpression that passes its attribute (sqlbuilder.func.datetime, e.g.) to the SQL backend as a constant (sqlbuilder.func actually is an alias for sqlbuilder.ConstantSpace). See the docs about SQLExpression, the FAQ and the code for func.
When you mock an attribute in func namespace it's evaluated by SQLObject and passed to the backend in reduced form. If you want to return a string literal from the mocking function you need to tell SQLObject it's a value that has to be passed to the backend as is, unevaluated. The way to do it is to wrap the literal in SQLConstant like this:
def FROM_UNIXTIME(x, y):
return sqlbuilder.SQLConstant('strftime("%Y%m%d", datetime({},"unixepoch", "localtime"))'.format(x))
See SQLConstant.
The entire test script now looks this
#! /usr/bin/env python3.7
from sqlobject import *
__connection__ = "sqlite:/:memory:?debug=1&debugOutput=1"
try:
import mock
from mock import MagicMock
except ImportError:
from unittest import mock
from unittest.mock import MagicMock
class Store(SQLObject):
name = StringCol()
sample = BLOBCol()
createdAt = TimestampCol()
Store.createTable()
DATE_FORMAT = "%Y-%m-%d"
def retrieve(name):
query = sqlbuilder.Select([
sqlbuilder.func.TO_BASE64(Store.q.sample),
],
sqlbuilder.AND(
Store.q.name == name,
sqlbuilder.func.FROM_UNIXTIME(Store.q.createdAt, DATE_FORMAT) >= sqlbuilder.func.FROM_UNIXTIME("2018-10-12", DATE_FORMAT)
)
)
connection = Store._connection
query = connection.sqlrepr(query)
queryResult = connection.queryAll(query)
return queryResult
def TO_BASE64(x):
return x
def FROM_UNIXTIME(x, y):
return sqlbuilder.SQLConstant('strftime("%Y%m%d", datetime({},"unixepoch", "localtime"))'.format(x))
for p in [
mock.patch("sqlobject.sqlbuilder.func.TO_BASE64",TO_BASE64),
mock.patch("sqlobject.sqlbuilder.func.FROM_UNIXTIME",FROM_UNIXTIME),
]:
p.start()
retrieve('Some')
mock.patch.stopall()
The output is:
1/Query : CREATE TABLE store (
id INTEGER PRIMARY KEY AUTOINCREMENT,
name TEXT,
sample TEXT,
created_at TIMESTAMP
)
1/QueryR : CREATE TABLE store (
id INTEGER PRIMARY KEY AUTOINCREMENT,
name TEXT,
sample TEXT,
created_at TIMESTAMP
)
2/QueryAll: SELECT store.sample FROM store WHERE (((store.name) = ('Some')) AND ((strftime("%Y%m%d", datetime(store.created_at,"unixepoch", "localtime"))) >= (strftime("%Y%m%d", datetime(2018-10-12,"unixepoch", "localtime")))))
2/QueryR : SELECT store.sample FROM store WHERE (((store.name) = ('Some')) AND ((strftime("%Y%m%d", datetime(store.created_at,"unixepoch", "localtime"))) >= (strftime("%Y%m%d", datetime(2018-10-12,"unixepoch", "localtime")))))
2/QueryAll-> []
PS. Full disclosure: I'm the current maintainer of SQLObject.
As #phd pointed that SQLObject evaluate the expression before passing it to backend in reducted form.
Then we can also pass expression directly which SQLObject will evaluate so instead of passing string literal we can also do as below
def FROM_UNIXTIME(x, y):
return sqlbuilder.func.strftime("%Y%m%d", sqlbuilder.func.datetime(x, "unixepoch", "localtime"))
Output:
SELECT store.sample FROM store WHERE (((store.name) = ('Some')) AND ((strftime("%Y%m%d", datetime(store.created_at,"unixepoch", "localtime"))) >= (strftime("%Y%m%d", datetime(2018-10-12,"unixepoch", "localtime")))))

datetime.combine() gets my Flask app and pc blocked

I am using Flask app to get info from all daily hours from SQLAlchemy
If SQLAlchemy query.all() doesn't return all hours of the day, because they doesn't exists in the target table, I use a for loop to insert the missing hours before return with jsonfy to the route.
I have a route like this
import os
from flask import Flask, request, jsonify, render_template
from flask_sqlalchemy import SQLAlchemy
import datetime as dt
#app.route("/getall")
def get_all():
try:
ttt = TruckTurnaroundTime.query.filter(TruckTurnaroundTime.hour != None).all()
for i, o in enumerate(ttt):
if i == 0:
if o.hour != dt.time(0, 0):
ttt.insert(i, TruckTurnaroundTime(dt.time(0, 0), None))
old_o = o
continue
current_hour = (dt.datetime.combine(dt.date(1, 1, 1), old_o.hour) + dt.timedelta(hours=1)).time()
if o.hour != current_hour:
ttt.insert(i, TruckTurnaroundTime(current_hour, None))
old_o = o
return jsonify([e.serialize() for e in ttt])
except Exception as e:
return(str(e))
The problem is that when I go to the route (via browser) my computer just simply blocks and never loads the route "/getall"
If I get rid of this lines:
if o.hour != current_hour:
ttt.insert(i, TruckTurnaroundTime(current_hour, None))
the problem dissapears.
I think the problem is something related with dt.datetime.combine() but I don't know.
This is my models.py:
from app import db
class TruckTurnaroundTime(db.Model):
__tablename__ = 'truck_turnaround_time'
id = db.Column(db.Integer, primary_key=True)
hour = db.Column(db.Time())
truck_turnaround_time = db.Column(db.Float)
def __init__(self, hour, truck_turnaround_time):
self.hour = hour
self.truck_turnaround_time = truck_turnaround_time
def __repr__(self):
return '<id {}>'.format(self.id)
def serialize(self):
return {
'hour': str(self.hour),
'truck_turnaround_time': self.truck_turnaround_time
}
EDIT: Python version 3.8.5

How to get execution time of postgres cursor (python) [duplicate]

I'm trying to get the performance statistics on queries executed by psycopg2, but the documentation / examples still seem fuzzy and not as clear as it could be.
I've at least got debugging working through the logger.
What would I need to do to access the performance data for the query? I'm wanting to get the number for query execution time.
Is there a method I can access, or something else I need to initialize to output the query execution time?
Here's a pieced together extract of what I have so far:
import psycopg2
import psycopg2.extensions
from psycopg2.extras import LoggingConnection
import logging
logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger(__name__)
# set higher up in script
db_settings = {
"user": user,
"password": password,
"host": host,
"database": dbname,
}
query_txt = "[query_txt_from file]"
conn = psycopg2.connect(connection_factory=LoggingConnection, **db_settings)
conn.initialize(logger)
cur = conn.cursor()
cur.execute(query_txt)
and I get
DEBUG:__main__: [the query executed]
Easy enough to set timestamp at start of execution and calculate duration at end. You'll need your own simple subclasses of LoggingConnection and LoggingCursor. See my example code.
This is based on source of MinTimeLoggingConnection you can find in psycopg2/extras.py source.
import time
import psycopg2
import psycopg2.extensions
from psycopg2.extras import LoggingConnection, LoggingCursor
import logging
logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger(__name__)
# MyLoggingCursor simply sets self.timestamp at start of each query
class MyLoggingCursor(LoggingCursor):
def execute(self, query, vars=None):
self.timestamp = time.time()
return super(MyLoggingCursor, self).execute(query, vars)
def callproc(self, procname, vars=None):
self.timestamp = time.time()
return super(MyLoggingCursor, self).callproc(procname, vars)
# MyLogging Connection:
# a) calls MyLoggingCursor rather than the default
# b) adds resulting execution (+ transport) time via filter()
class MyLoggingConnection(LoggingConnection):
def filter(self, msg, curs):
return msg + " %d ms" % int((time.time() - curs.timestamp) * 1000)
def cursor(self, *args, **kwargs):
kwargs.setdefault('cursor_factory', MyLoggingCursor)
return LoggingConnection.cursor(self, *args, **kwargs)
db_settings = {
....
}
query_txt = "[query_text_from file]"
conn = psycopg2.connect(connection_factory=MyLoggingConnection, **db_settings)
conn.initialize(logger)
cur = conn.cursor()
cur.execute(query_text)
and you'll get:
DEBUG: __main__:[query] 3 ms
within your filter() you can change the formatting, or choose to not display, if less than some value.

How to escape table names in SqlAlchemy

I'm working on a SQLAlchemy dialect for Apache Drill and I've run into an issue that I can't quite seem to figure out.
The basic problem is that SQLAlchemy is generating a query like the one below:
SELECT `field1`, `field2`
FROM dfs.test.data.csv LIMIT 100
which fails because data.csv needs backticks around it as shown below:
SELECT `field1`, `field2`
FROM dfs.test.`data.csv` LIMIT 100
I've defined the various visit_() functions in the dialect's compiler but these seem to have no effect.
This took some time to figure out, and I thought I'd post the result so that if anyone else runs into this issue, they'll have a point of reference as to how to solve it.
Here is the final working code:
https://github.com/JohnOmernik/sqlalchemy-drill/blob/master/sqlalchemy_drill/base.py
Here is what ultimately solved the issue:
def __init__(self, dialect):
super(DrillIdentifierPreparer, self).__init__(dialect, initial_quote='`', final_quote='`')
def format_drill_table(self, schema, isFile=True):
formatted_schema = ""
num_dots = schema.count(".")
schema = schema.replace('`', '')
# For a file, the last section will be the file extension
schema_parts = schema.split('.')
if isFile and num_dots == 3:
# Case for File + Workspace
plugin = schema_parts[0]
workspace = schema_parts[1]
table = schema_parts[2] + "." + schema_parts[3]
formatted_schema = plugin + ".`" + workspace + "`.`" + table + "`"
elif isFile and num_dots == 2:
# Case for file and no workspace
plugin = schema_parts[0]
formatted_schema = plugin + "." + schema_parts[1] + ".`" + schema_parts[2] + "`"
else:
# Case for non-file plugins or incomplete schema parts
for part in schema_parts:
quoted_part = "`" + part + "`"
if len(formatted_schema) > 0:
formatted_schema += "." + quoted_part
else:
formatted_schema = quoted_part
return formatted_schema

How would I get psycopg2 logging of query execution time?

I'm trying to get the performance statistics on queries executed by psycopg2, but the documentation / examples still seem fuzzy and not as clear as it could be.
I've at least got debugging working through the logger.
What would I need to do to access the performance data for the query? I'm wanting to get the number for query execution time.
Is there a method I can access, or something else I need to initialize to output the query execution time?
Here's a pieced together extract of what I have so far:
import psycopg2
import psycopg2.extensions
from psycopg2.extras import LoggingConnection
import logging
logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger(__name__)
# set higher up in script
db_settings = {
"user": user,
"password": password,
"host": host,
"database": dbname,
}
query_txt = "[query_txt_from file]"
conn = psycopg2.connect(connection_factory=LoggingConnection, **db_settings)
conn.initialize(logger)
cur = conn.cursor()
cur.execute(query_txt)
and I get
DEBUG:__main__: [the query executed]
Easy enough to set timestamp at start of execution and calculate duration at end. You'll need your own simple subclasses of LoggingConnection and LoggingCursor. See my example code.
This is based on source of MinTimeLoggingConnection you can find in psycopg2/extras.py source.
import time
import psycopg2
import psycopg2.extensions
from psycopg2.extras import LoggingConnection, LoggingCursor
import logging
logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger(__name__)
# MyLoggingCursor simply sets self.timestamp at start of each query
class MyLoggingCursor(LoggingCursor):
def execute(self, query, vars=None):
self.timestamp = time.time()
return super(MyLoggingCursor, self).execute(query, vars)
def callproc(self, procname, vars=None):
self.timestamp = time.time()
return super(MyLoggingCursor, self).callproc(procname, vars)
# MyLogging Connection:
# a) calls MyLoggingCursor rather than the default
# b) adds resulting execution (+ transport) time via filter()
class MyLoggingConnection(LoggingConnection):
def filter(self, msg, curs):
return msg + " %d ms" % int((time.time() - curs.timestamp) * 1000)
def cursor(self, *args, **kwargs):
kwargs.setdefault('cursor_factory', MyLoggingCursor)
return LoggingConnection.cursor(self, *args, **kwargs)
db_settings = {
....
}
query_txt = "[query_text_from file]"
conn = psycopg2.connect(connection_factory=MyLoggingConnection, **db_settings)
conn.initialize(logger)
cur = conn.cursor()
cur.execute(query_text)
and you'll get:
DEBUG: __main__:[query] 3 ms
within your filter() you can change the formatting, or choose to not display, if less than some value.

Categories

Resources