I am using Flask app to get info from all daily hours from SQLAlchemy
If SQLAlchemy query.all() doesn't return all hours of the day, because they doesn't exists in the target table, I use a for loop to insert the missing hours before return with jsonfy to the route.
I have a route like this
import os
from flask import Flask, request, jsonify, render_template
from flask_sqlalchemy import SQLAlchemy
import datetime as dt
#app.route("/getall")
def get_all():
try:
ttt = TruckTurnaroundTime.query.filter(TruckTurnaroundTime.hour != None).all()
for i, o in enumerate(ttt):
if i == 0:
if o.hour != dt.time(0, 0):
ttt.insert(i, TruckTurnaroundTime(dt.time(0, 0), None))
old_o = o
continue
current_hour = (dt.datetime.combine(dt.date(1, 1, 1), old_o.hour) + dt.timedelta(hours=1)).time()
if o.hour != current_hour:
ttt.insert(i, TruckTurnaroundTime(current_hour, None))
old_o = o
return jsonify([e.serialize() for e in ttt])
except Exception as e:
return(str(e))
The problem is that when I go to the route (via browser) my computer just simply blocks and never loads the route "/getall"
If I get rid of this lines:
if o.hour != current_hour:
ttt.insert(i, TruckTurnaroundTime(current_hour, None))
the problem dissapears.
I think the problem is something related with dt.datetime.combine() but I don't know.
This is my models.py:
from app import db
class TruckTurnaroundTime(db.Model):
__tablename__ = 'truck_turnaround_time'
id = db.Column(db.Integer, primary_key=True)
hour = db.Column(db.Time())
truck_turnaround_time = db.Column(db.Float)
def __init__(self, hour, truck_turnaround_time):
self.hour = hour
self.truck_turnaround_time = truck_turnaround_time
def __repr__(self):
return '<id {}>'.format(self.id)
def serialize(self):
return {
'hour': str(self.hour),
'truck_turnaround_time': self.truck_turnaround_time
}
EDIT: Python version 3.8.5
Related
I am trying to mock sqlbuilder.func for test cases with pytest
I successfully mocked sqlbuilder.func.TO_BASE64 with correct output but when I tried mocking sqlbuilder.func.FROM_UNIXTIME I didn't get any error but the resulted output is incorrect with the generated query. Below is the minimal working example of the problem.
models.py
from sqlobject import (
sqlbuilder,
sqlhub,
SQLObject,
StringCol,
BLOBCol,
TimestampCol,
)
class Store(SQLObject):
name = StringCol()
sample = BLOBCol()
createdAt = TimestampCol()
DATE_FORMAT = "%Y-%m-%d"
def retrieve(name):
query = sqlbuilder.Select([
sqlbuilder.func.TO_BASE64(Store.q.sample),
],
sqlbuilder.AND(
Store.q.name == name,
sqlbuilder.func.FROM_UNIXTIME(Store.q.createdAt, DATE_FORMAT) >= sqlbuilder.func.FROM_UNIXTIME("2018-10-12", DATE_FORMAT)
)
)
connection = sqlhub.getConnection()
query = connection.sqlrepr(query)
print(query)
queryResult = connection.queryAll(query)
return queryResult
conftest.py
import pytest
from models import Store
from sqlobject import sqlhub
from sqlobject.sqlite import sqliteconnection
#pytest.fixture(autouse=True, scope="session")
def sqlite_db_session(tmpdir_factory):
file = tmpdir_factory.mktemp("db").join("sqlite.db")
conn = sqliteconnection.SQLiteConnection(str(file))
sqlhub.processConnection = conn
init_tables()
yield conn
conn.close()
def init_tables():
Store.createTable(ifNotExists=True)
test_ex1.py
import pytest
from sqlobject import sqlbuilder
from models import retrieve
try:
import mock
from mock import MagicMock
except ImportError:
from unittest import mock
from unittest.mock import MagicMock
def TO_BASE64(x):
return x
def FROM_UNIXTIME(x, y):
return 'strftime("%Y%m%d", datetime({},"unixepoch", "localtime"))'.format(x)
# #mock.patch("sqlobject.sqlbuilder.func.TO_BASE64")
# #mock.patch("sqlobject.sqlbuilder.func.TO_BASE64", MagicMock(side_effect=lambda x: x))
# #mock.patch("sqlobject.sqlbuilder.func.TO_BASE64", new_callable=MagicMock(side_effect=lambda x: x))
#mock.patch("sqlobject.sqlbuilder.func.TO_BASE64", TO_BASE64)
#mock.patch("sqlobject.sqlbuilder.func.FROM_UNIXTIME", FROM_UNIXTIME)
def test_retrieve():
result = retrieve('Some')
assert result == []
Current SQL:
SELECT store.sample FROM store WHERE (((store.name) = ('Some')) AND (1))
Expected SQL:
SELECT
store.sample
FROM
store
WHERE
store.name = 'Some'
AND
strftime(
'%Y%m%d',
datetime(store.created_at, 'unixepoch', 'localtime')
) >= strftime(
'%Y%m%d',
datetime('2018-10-12', 'unixepoch', 'localtime')
)
Edit Example
#! /usr/bin/env python
from sqlobject import *
__connection__ = "sqlite:/:memory:?debug=1&debugOutput=1"
try:
import mock
from mock import MagicMock
except ImportError:
from unittest import mock
from unittest.mock import MagicMock
class Store(SQLObject):
name = StringCol()
sample = BLOBCol()
createdAt = TimestampCol()
Store.createTable()
DATE_FORMAT = "%Y-%m-%d"
def retrieve(name):
query = sqlbuilder.Select([
sqlbuilder.func.TO_BASE64(Store.q.sample),
],
sqlbuilder.AND(
Store.q.name == name,
sqlbuilder.func.FROM_UNIXTIME(Store.q.createdAt, DATE_FORMAT) >= sqlbuilder.func.FROM_UNIXTIME("2018-10-12", DATE_FORMAT)
)
)
connection = Store._connection
query = connection.sqlrepr(query)
queryResult = connection.queryAll(query)
return queryResult
def TO_BASE64(x):
return x
def FROM_UNIXTIME(x, y):
return 'strftime("%Y%m%d", datetime({},"unixepoch", "localtime"))'.format(x)
for p in [
mock.patch("sqlobject.sqlbuilder.func.TO_BASE64",TO_BASE64),
mock.patch("sqlobject.sqlbuilder.func.FROM_UNIXTIME",FROM_UNIXTIME),
]:
p.start()
retrieve('Some')
mock.patch.stopall()
By default, sqlbuilder.func is an SQLExpression that passes its attribute (sqlbuilder.func.datetime, e.g.) to the SQL backend as a constant (sqlbuilder.func actually is an alias for sqlbuilder.ConstantSpace). See the docs about SQLExpression, the FAQ and the code for func.
When you mock an attribute in func namespace it's evaluated by SQLObject and passed to the backend in reduced form. If you want to return a string literal from the mocking function you need to tell SQLObject it's a value that has to be passed to the backend as is, unevaluated. The way to do it is to wrap the literal in SQLConstant like this:
def FROM_UNIXTIME(x, y):
return sqlbuilder.SQLConstant('strftime("%Y%m%d", datetime({},"unixepoch", "localtime"))'.format(x))
See SQLConstant.
The entire test script now looks this
#! /usr/bin/env python3.7
from sqlobject import *
__connection__ = "sqlite:/:memory:?debug=1&debugOutput=1"
try:
import mock
from mock import MagicMock
except ImportError:
from unittest import mock
from unittest.mock import MagicMock
class Store(SQLObject):
name = StringCol()
sample = BLOBCol()
createdAt = TimestampCol()
Store.createTable()
DATE_FORMAT = "%Y-%m-%d"
def retrieve(name):
query = sqlbuilder.Select([
sqlbuilder.func.TO_BASE64(Store.q.sample),
],
sqlbuilder.AND(
Store.q.name == name,
sqlbuilder.func.FROM_UNIXTIME(Store.q.createdAt, DATE_FORMAT) >= sqlbuilder.func.FROM_UNIXTIME("2018-10-12", DATE_FORMAT)
)
)
connection = Store._connection
query = connection.sqlrepr(query)
queryResult = connection.queryAll(query)
return queryResult
def TO_BASE64(x):
return x
def FROM_UNIXTIME(x, y):
return sqlbuilder.SQLConstant('strftime("%Y%m%d", datetime({},"unixepoch", "localtime"))'.format(x))
for p in [
mock.patch("sqlobject.sqlbuilder.func.TO_BASE64",TO_BASE64),
mock.patch("sqlobject.sqlbuilder.func.FROM_UNIXTIME",FROM_UNIXTIME),
]:
p.start()
retrieve('Some')
mock.patch.stopall()
The output is:
1/Query : CREATE TABLE store (
id INTEGER PRIMARY KEY AUTOINCREMENT,
name TEXT,
sample TEXT,
created_at TIMESTAMP
)
1/QueryR : CREATE TABLE store (
id INTEGER PRIMARY KEY AUTOINCREMENT,
name TEXT,
sample TEXT,
created_at TIMESTAMP
)
2/QueryAll: SELECT store.sample FROM store WHERE (((store.name) = ('Some')) AND ((strftime("%Y%m%d", datetime(store.created_at,"unixepoch", "localtime"))) >= (strftime("%Y%m%d", datetime(2018-10-12,"unixepoch", "localtime")))))
2/QueryR : SELECT store.sample FROM store WHERE (((store.name) = ('Some')) AND ((strftime("%Y%m%d", datetime(store.created_at,"unixepoch", "localtime"))) >= (strftime("%Y%m%d", datetime(2018-10-12,"unixepoch", "localtime")))))
2/QueryAll-> []
PS. Full disclosure: I'm the current maintainer of SQLObject.
As #phd pointed that SQLObject evaluate the expression before passing it to backend in reducted form.
Then we can also pass expression directly which SQLObject will evaluate so instead of passing string literal we can also do as below
def FROM_UNIXTIME(x, y):
return sqlbuilder.func.strftime("%Y%m%d", sqlbuilder.func.datetime(x, "unixepoch", "localtime"))
Output:
SELECT store.sample FROM store WHERE (((store.name) = ('Some')) AND ((strftime("%Y%m%d", datetime(store.created_at,"unixepoch", "localtime"))) >= (strftime("%Y%m%d", datetime(2018-10-12,"unixepoch", "localtime")))))
I have a Python function and deployed it on Azure Funtion. I used a TXT file to hold a time and I will use it.
On local, this function runs without problem. But I received the below error when run on Azure:
Result: Failure Exception: OSError: [Errno 30] Read-only file system: 'date_time.txt' Stack:
In my search, I find some solutions like TempFile, but I need the value of date_time.txt for next time.
My code:
import os
import psycopg2
import datetime
import logging
import tempfile
import pandas as pd
import json
import sqlalchemy
from azure.servicebus import ServiceBusClient, ServiceBusMessage
import azure.functions as func
connstr = "****"
topic_name = "***"
subscription_name = "***"
def get_engine(database='portal_rms', username='***', password='**', host='***', port=5432):
engine_string = f"postgresql+psycopg2://{username}:{password}#{host}:{port}/{database}"
engine = sqlalchemy.create_engine(engine_string)
return engine
def read_from_db(table_name, date_time):
acceleration_array = []
engine = get_engine()
connection = engine.connect()
metadata = sqlalchemy.MetaData()
pinconnector_attacheddevicelogdata = sqlalchemy.Table(
table_name, metadata, autoload=True, autoload_with=engine)
query = sqlalchemy.select([pinconnector_attacheddevicelogdata]).where(pinconnector_attacheddevicelogdata.columns.capture_time > date_time)\
.order_by(pinconnector_attacheddevicelogdata.columns.capture_time).limit(100)
ResultProxy = connection.execute(query)
ResultSet = ResultProxy.fetchall()
engine.dispose()
return ResultSet
def get_date_time(date_time_string):
[date, time] = date_time_string.split(" ")
[year, month, day] = date.split("-")
[hour, minute, second] = time.split(":")
[second, microsecond] = second.split(".")
[year, month, day, hour, minute, second, microsecond] = list(
map(lambda x: int(x), [year, month, day, hour, minute, second, microsecond]))
return datetime.datetime(year, month, day, hour, minute, second, microsecond, tzinfo=psycopg2.tz.FixedOffsetTimezone(offset=0, name=None))
def write_date_time(date_time, address="date_time.txt"):
file1 = open(address, "w+")
file1.write((str(date_time)).split("+")[0])
file1.close()
def read_date_time(address="date_time.txt"):
file1 = open(address, "r")
date_time = file1.readline()
date_time = get_date_time(date_time)
file1.close()
return date_time
def get_query_latest_date_time(db_query, date_time):
for item in db_query:
if item["capture_time"] > date_time:
date_time = item["capture_time"]
return date_time
def transform(query_row):
query_row = dict(query_row)
for item in ['capture_time', 'received_time', 'read_time']:
query_row[item] = str(query_row[item]).split("+")[0]
return query_row
date_time = read_date_time()
db_query = read_from_db("pinconnector_attacheddevicelogdata", date_time)
write_date_time(db_query[-1]["capture_time"])
# print(db_query)
#data_send = json.dumps(db_query)
data_send = json.dumps(list(map(lambda x: transform(x), db_query)))
with ServiceBusClient.from_connection_string(connstr) as client:
with client.get_topic_sender(topic_name) as sender:
sender.send_messages(ServiceBusMessage(data_send))
The reason could be an incompatible OS version. Please consider custom container in this case.
You may also refer to this similar issue : https://learn.microsoft.com/en-us/answers/questions/67126/failure-exception-oserror-errno-30-read-only-file.html
Flask-Dance cannot add entries more than 1. This also causes people to login into others accounts. This is how the db looks like all the time:
We are using sqlalchemy to cache user logins.
dbsl = SQLAlchemy()
d_oauth = discord_oauth.make_discord_blueprint(
client_id="id",
client_secret="secret",
scope=["guilds", "identify"],
redirect_to="index",
login_url="/",
authorized_url="/authorized"
)
class OAuth(OAuthConsumerMixin, dbsl.Model):
pass
d_oauth.storage = SQLAlchemyStorage(OAuth, dbsl.session)
dbsl.create_all()
This is our Flask app:
def create_app():
app_db = Flask(__name__)
app_db.config["SECRET_KEY"] = "please-work"
app_db.config['SQLALCHEMY_DATABASE_URI'] = "sqlite:///configs/cache.sqlite3"
app_db.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
app_db.config["SESSION_TYPE"] = "sqlalchemy"
app_db.config["SESSION_SQLALCHEMY_TABLE"] = "flask_dance_oauth"
app_db.config.from_object(__name__)
#sess = Session(app_db)
#sess.init_app(app_db)
dbsl.init_app(app_db)
app_db.app_context().push()
return app_db
We really don't know how to fix this, can anyone help?
So i've been stuck on this problem for a while now. For one file I have it is working and it is allowing me to use the flask framework. with this code.
from flask import Flask, render_template
from flask import request
from flask import *
from datetime import datetime
from functools import wraps
import time
import csv
app = Flask(__name__)
app.secret_key ='lukey'
#displays index page
#app.route('/')
def home():
return render_template('index.html')
#displays welcome page
#app.route('/welcome')
def welcome():
return render_template('welcome.html')
#allows user to login
#app.route('/log', methods=['GET','POST'])
def log():
error = None
if request.method == "POST":
if request.form['user'] != 'admin' or request.form['pass'] != 'admin':
error = "Invalid credentials"
else:
session['logged_in'] = True
return redirect (url_for('welcome'))
return render_template('log.html', error=error)
#allows user to logout
#app.route('/logout')
def logout():
session.pop('logged_in', None)
flash('you were logged out')
return redirect (url_for('log'))
#function to check if admin is logged in
def login_required(test):
#wraps(test)
def wrap(*args, **kwargs):
if 'logged_in' in session:
return test(*args, **kwargs)
else:
flash('you need to login before using admin tools')
return redirect(url_for('log'))
return wrap
#Displays map
#app.route('/map')
def map():
return render_template('map.html')
#Displays gallery
#app.route('/gallery')
def gallery():
return render_template('gallery.html')
#Allows users to view previous bookings
#app.route('/bookings', methods = ['GET'])
def bookings():
bookingsFile ='static\\bookings.csv'
data = readFile(bookingsFile)
return render_template('bookings.html', data=data)
#Allows user to request for a booking
#app.route('/addBookings', methods = ['POST'])
def addBookings():
bookingsFile = 'static\\bookings.csv'
data = readFile(bookingsFile)
bookingName = request.form[('name')]
bookingEmail = request.form[('email')]
bookingDate= request.form[('date')]
#Converts the date string to unix timestamp
bookingDateUnix = time.mktime(datetime.strptime(request.form[('date')], "%Y-%m-%d").timetuple())
numberOfDays = request.form[('days')]
#calculates the end date in unix form
endDateUnix = int(numberOfDays)*24*60*60+int(bookingDateUnix)
#converts the unix form end date to string
newDate = datetime.fromtimestamp(int(endDateUnix)).strftime('%Y-%m-%d')
#Calculates the price of the users stay
price = int(numberOfDays) * 200
#Will be changed by admin to confirm bookings
hasBeenBooked = 'Awaiting confirmation'
bookingsFile ='static\\bookings.csv'
for row in data:
prevBookingDateUnix = row[7]
prevEndDateUnix = row[8]
#Testing no double bookings
if row[2] == bookingDate or row[6] == newDate:
flash('This time has already been allocated')
return redirect(url_for('bookings'))
#Testing there are no crossover points
elif float(prevBookingDateUnix) < bookingDateUnix and float(prevEndDateUnix) < bookingDateUnix and bookingDateUnix < endDateUnix:
flash('valid input')
else:
flash('invalid input')
return redirect(url_for('bookings'))
#parameters parsed from input
newEntry =[bookingName, bookingEmail, bookingDate, numberOfDays, hasBeenBooked, price, newDate, bookingDateUnix, endDateUnix]
data.append(newEntry)
writeFile(data, bookingsFile)
return render_template('bookings.html', data=data)
#allows viewing of comments in csv file
#app.route('/comments', methods = ['GET'])
def comments():
commentsFile = 'static\\comments.csv'
data = readFile(commentsFile)
return render_template('comments.html', data=data)
#adding comments to csv file
#app.route('/addComments', methods = ['POST'])
def addComments():
# add an entry to the data
#read the data from file
commentsFile = 'static\\comments.csv'
data = readFile(commentsFile)
#add the new entry
commentorsName = request.form[('commentorsName')]
comment = request.form[('comment')]
commentDate = datetime.now().strftime("%Y-%m-%d / %H:%M")
newEntry = [commentorsName, comment, commentDate]
data.append(newEntry)
#save the data to the file
writeFile(data, commentsFile)
return render_template('comments.html', data=data)
#Ensures the administrator is logged in before comments are deleted
#app.route('/deleteComments', methods = ['POST'])
#login_required
def deleteComments():
f = open('static\\comments.csv', 'w')
f.truncate()
f.close()
return render_template('comments.html')
#Ensures the administrator is logged in before bookings are deleted
#app.route('/deleteBookings', methods = ['POST'])
#login_required
def deleteBookings():
f = open('static\\bookings.csv', 'w')
f.truncate()
f.close()
return render_template('bookings.html')
def readFile(aFile):
#read in 'aFile'
with open(aFile, 'r') as inFile:
reader = csv.reader(inFile)
data = [row for row in reader]
return data
def writeFile(aList, aFile):
#write 'aList' to 'aFile'
with open(aFile, 'w', newline='') as outFile:
writer = csv.writer(outFile)
writer.writerows(aList)
return
if __name__ == '__main__':
app.run(debug = True)
But with this code it throws the error. No module named 'flask'
#!/usr/bin/python3.4
#
# Small script to show PostgreSQL and Pyscopg together
#
from flask import Flask, render_template
from flask import request
from flask import *
from datetime import datetime
from functools import wraps
import time
import csv
import psycopg2
app = Flask(__name__)
app.secret_key ='lukey'
def getConn():
connStr=("dbname='test' user='lukey' password='lukey'")
conn=psycopg2.connect(connStr)
return conn
#app.route('/')
def home():
return render_template(index.html)
#app.route('/displayStudent', methods =['GET'])
def displayStudent():
residence = request.args['residence']
try:
conn = None
conn = getConn()
cur = conn.cursor()
cur.execute('SET search_path to public')
cur.execute('SELECT stu_id,student.name,course.name,home_town FROM student,\
course WHERE course = course_id AND student.residence = %s',[residence])
rows = cur.fetchall()
if rows:
return render_template('stu.html', rows = rows, residence = residence)
else:
return render_template('index.html', msg1='no data found')
except Exception as e:
return render_template('index.html', msg1='No data found', error1 = e)
finally:
if conn:
conn.close()
##app.route('/addStudent, methods =['GET','POST']')
#def addStudent():
if __name__ == '__main__':
app.run(debug = True)
I feel like the problem is going to be something to do with the versions of python/flask/pip i'm using. Any ideas thank you.
Your Python version is 2.X.
Take a look at this question and its answers.
Your best bet is to use virtualenv, as it makes handling package versions very simple. The accepted answer includes the proper command prompt commands if you want to use Python 3 for this app:
virtualenv -p C:\Python34\python.exe py3env
py3env\Scripts\activate
pip install package-name
I would recommend using Anaconda. Download, install, then run:
conda install flask
And you're done.
I am trying to set a global variable as in:
# -*- coding: utf8 -*-
from flask import Flask, render_template,redirect,flash, url_for, session, request
from sqlalchemy.sql import text
from sqlalchemy.orm import aliased, Query
from sqlalchemy import exc
from flask_bootstrap import Bootstrap
from .forms import *
import zlib
#from sqlalchemy.orm.query import limit, order_by
from dbmodel import *
app = Flask(__name__)
app.secret_key = 'A0Zr98j/3yX R~XHH!jmN]LWX/,?RT'
Bootstrap(app)
MAX_ITEMS=50
MAX_ZIPS=5
MAX_ORDERS=20
clientMaxSQL="SELECT * FROM CLIENT WHERE 1"
engine, dbsession = dbconnect()
#app.route("/clientRegister",methods=['GET', 'POST'])
def clientRegister():
form = ClientQuery()
global clientMaxSQL
flash('maxSql at start %s ' %(clientMaxSQL))
if form.reset.data == "True":
flash('RESET TABLE FILTERS')
clientMaxSQL="SELECT * FROM CLIENT WHERE 1"
sql = clientMaxSQL
if form.validate_on_submit():
if len(form.compareVal.data.strip()) > 0 and ( any(c.isalpha() for c in form.compareVal.data) or any(c.isdigit() for c in form.compareVal.data)):
sql = sql + " AND " + form.columnSelection.data + form.comparisonSelection.data + "'" +form.compareVal.data+"'"
start=int(request.args.get('start', '0'))
prev=start-MAX_ITEMS
nextStart = start + MAX_ITEMS
tmpSQL = clientMaxSQL
clientMaxSQL = sql
sql = sql + " LIMIT " + str(start)+","+str(nextStart)
try:
clients = engine.execute(sql)
maxStart=engine.execute(clientMaxSQL).rowcount
flash('Attempting Query: %s' %(sql))
except:
flash('Invalid Comparison of %s' %(form.compareVal.data))
sql = tmpSQL + " LIMIT " + str(start)+","+str(nextStart)
clientMaxSQL=tmpSQL
clients = engine.execute(sql)
maxStart=engine.execute(clientMaxSQL)
flash('maxSql at end %s ' %(clientMaxSQL))
return render_template("clientRegister.html",form=form,maxStart=maxStart,clients=clients,start=start, prev=prev,nextStart=nextStart)
I am getting some strange results. It seems as if this statement (declared above all of my function definitions) is executing every once in a while on its own. Is this possible? It is imperative to my web app that this only runs once otherwise I will lose the current state of my query.
Added all the code in the program that ever changes the variable. It is resetting on its own though.
use the, #app.before_first_request decorator with the function to make it run only once
As in:
#app.before_first_request
def function_to_run_only_once():
#your statement(s)
This should make sure that your said statements execute only once in the entire lifetime of your program