Why the database is not refreshing after delete? - python

in the DB I have two events:
{1: {'title': 'APPLE'}, 2: {'title': 'BANANA'}}
as you can see bellow, I am calling the deletefromDB function on refresh with the parameter of the second event and I am printing out the data (the json above). My issue is, that the data will change, only after I restart my flask server. Till that point it's the same json output. Can somebody explain what I am doing wrong?
this is my code:
import sqlite3
from flask import Flask, render_template, request
app = Flask(__name__)
data = {}
conn = sqlite3.connect('events.db')
cursor = conn.cursor()
# INIT
def initDB():
with conn:
cursor.execute("SELECT * FROM Events")
rows = cursor.fetchall()
for row in rows:
temp = {}
temp["title"] = row[1]
data[row[0]] = temp
print(data)
initDB()
def deletefromDB(eventID):
query = 'DELETE FROM Events WHERE EventId = {}'.format(eventID)
with conn:
cursor.execute(query)
print(query)
initDB()
#app.route('/')
def index():
deletefromDB(2)
return 'Index Page'
if __name__ == "__main__":
app.run()

You are caching the tables result into the data dict on database initialization. Then you delete a row from the table without committing the changes to the data dict. Either you reload the entire data dict after deletion or you remove the entry directly from it:
def deletefromDB(eventID):
query = 'DELETE FROM Events WHERE EventId = {}'.format(eventID)
with conn:
cursor.execute(query)
del data[eventID]
print(query)
initDB()
Be careful, you create a formatted SQL string without escaping the parameter (SQL Injection alert). Use the cursor execute to substitute the variables.
query = 'DELETE FROM Events WHERE EventId = %d'
# ...
cursor.execute(query, (eventID,))
Do it whenever you substitute something into a SQL query!

Related

using :memory: returns 'sqlite3.OperationalError: no such table:'

I am working on a messaging RESTful app using Flask on Python.
I need to implement an in memory solution but all the solutions I found online are not working.
Find the code below:
# Helper
def _get_message(id=None):
"""Return a list of message objects (as dicts)"""
with sqlite3.connect(':memory:') as conn:
c = conn.cursor()
if id:
q = "SELECT * FROM messages WHERE id=? ORDER BY dt DESC"
rows = c.execute(q, (id,))
else:
q = "SELECT * FROM messages ORDER BY dt DESC"
rows = c.execute(q)
return [{'id': r[0], 'dt': r[1], 'message': r[2], 'sender': r[3]} for r in rows]
def _add_message(message, sender):
with sqlite3.connect(':memory:') as conn:
c = conn.cursor()
id = str(uuid.uuid4().hex)
q = "INSERT INTO messages VALUES (?, datetime('now'),?,?)"
c.execute(q, (id, message, sender))
conn.commit()
return c.lastrowid
..and so on with other functions..
Then I am running the app and initialising the database (I've found this online not using an in-memory solution though). I'd like to take the form of my db_init.sql:
if __name__ == '__main__':
# Test whether the database exists; if not, create it and create the table
if not os.path.exists(:memory:):
try:
conn = sqlite3.connect(:memory:)
# Absolute path needed for testing environment
# sql_path = os.path.join(app.config['APP_ROOT'], 'db_init.sql')
sql_path = os.path.join('db_init.sql')
cmd = open(sql_path, 'r').read()
c = conn.cursor()
c.execute(cmd)
conn.commit()
conn.close()
except IOError:
print("Couldn't initialize the database, exiting...")
raise
except sqlite3.OperationalError:
print("Couldn't execute the SQL, exiting...")
raise
app.run(host='0.0.0.0')
db_init.sql:
CREATE TABLE IF NOT EXISTS messages (
id TEXT NOT NULL,
dt TEXT NOT NULL,
message TEXT NOT NULL,
sender TEXT NOT NULL
);
But I get this error: 'sqlite3.OperationalError: no such table:'
I have also tried "file::memory:", "DataSource:memory:" and ""file::memory:?cache=shared"", "file::memory:?cache=shared" uri=True
How can I use in memory with my initialised db_init.sql?

How to a add a list of items into the sqlite3 flask?

I've recently started with blueprint flask.
There is a list of dicts => list[{'title':'ajsdhkd', 'author': 'askjdh qweqwqw'}, {'title':'ajsdhkd', 'author': 'askjdh qweqwqw'}] that I need to add to sql.
I want to initialize my database with created table that will contain this dicts. So that I can later execute it from blueprint flask route.
But I don't understand how can it be done.
In db.py I have created :
import sqlite3
import click
from flask import current_app, g
from flask.cli import with_appcontext
def get_db():
if 'db' not in g:
g.db = sqlite3.connect(
current_app.config['DATABASE'],
detect_types=sqlite3.PARSE_DECLTYPES
)
g.db.row_factory = sqlite3.Row
return g.db
def query_db(query, args=(), one=False):
cur = get_db().execute(query, args)
rv = cur.fetchall()
cur.close()
return (rv[0] if rv else None) if one else rv
def close_db(e=None):
db = g.pop('db', None)
if db is not None:
db.close()
def init_db():
db = get_db()
with current_app.open_resource('schema.sql') as f:
db.executescript(f.read().decode('utf8'))
#click.command('init-db')
#with_appcontext
def init_db_command():
init_db()
click.echo('Initialized the database.')
def init_app(app):
app.teardown_appcontext(close_db)
app.cli.add_command(init_db_command)
There is a schema.sql file where:
DROP TABLE IF EXISTS books;
CREATE TABLE books (
title varchar(50),
author varchar(100),
);
.....
Using plain sqlite3, the below function should create a SQLite table named "books" and then add the list of dicts you describe to that table. You may want to substitute current_app.config['DATABASE'] in place of 'test.db'.
import sqlite3
def init_db():
data = [{'title':'ajsdhkd', 'author': 'askjdh qweqwqw'}, {'title':'ajsdhkd', 'author': 'askjdh qweqwqw'}]
conn = sqlite3.connect('test.db', detect_types=sqlite3.PARSE_DECLTYPES)
cursor = conn.cursor()
cursor.execute("CREATE TABLE 'books' (title TEXT, author TEXT);")
sql = "INSERT INTO 'books' (title, author) VALUES (?, ?);"
for book in data:
cursor.execute(sql, (book['title'], book['author']))
conn.commit()
conn.close()
You may also consider this alternative that uses my "easy_db" library to simplify building the "books" table and adding data to it. This function does the same thing as the previous function.
import easy_db
def init_db2():
data = [{'title':'ajsdhkd', 'author': 'askjdh qweqwqw'}, {'title':'ajsdhkd', 'author': 'askjdh qweqwqw'}]
db = easy_db.DataBase('test.db')
db.append('books', data)
The single db.append('books', data) line both creates the "books" table and adds the data list of dicts to that table. The "title" and "author" columns are automatically created with the TEXT type based on the input data.

Storing dataframe in a sqlite database using Flask API

So I am fairly new to flask and I am currently trying to create a flask api for a project I am working on. However, there are a couple of issues I am facing.
So for my 1st issue, I can't get my dataframe from the 1st function to work in my second function. I am just wondering how I can get the data_1 to work in the second function.
Code:
from flask import Flask
from sqlalchemy import create_engine
import sqlite3 as sql
import pandas as pd
import datetime
import os
app = Flask(__name__)
#app.route('/', methods=['GET'])
def get_data():
...
data_1 = ...
#print(data_1.head(n=10))
return "hello"
#app.route('/table1', methods=['GET'])
def store_table1_data_df():
db_path = os.path.join(os.path.dirname(__file__),'table1.db')
engine = create_engine('sqlite:///{}'.format(db_path), echo=True)
sqlite_connection = engine.connect()
sqlite_table = 'table1'
data_1.to_sql(sqlite_table,sqlite_connection, if_exists='append')
sqlite_connection.close()
return "table1"
For my second issue, is there a better way of storing a dataframe within flask api using sqlalchemy or sqlite3?
More context as to what kind of data_1 is: data_1 can only hold the past 15 days/records like from 6/15/2021-6/30/2021. However, tomorrow, if I fetch the newest data_1 it will contain 6/16/2021-7/01/2021. How can I just append 07/01/2021 to the old data_1 without creating duplicate records from 06/16/2021, creating two more functions, and an extra db file?
#app.route('/table1', methods=['GET'])
def store_table1_data_df():
db_path = os.path.join(os.path.dirname(__file__),'table1.db')
engine = create_engine('sqlite:///{}'.format(db_path), echo=True)
sqlite_connection = engine.connect()
sqlite_table = 'table1'
data_1.to_sql(sqlite_table,sqlite_connection, if_exists='append')
sqlite_connection.close()
return "table1"
#app.route('/table2', methods=['GET'])
def store_table2_data_df():
db_path2 = os.path.join(os.path.dirname(__file__),'table2.db')
engine2 = create_engine('sqlite:///{}'.format(db_path2), echo=True)
sqlite_connection2 = engine2.connect()
sqlite_table2 = 'table2'
data_1.to_sql(sqlite_table2,sqlite_connection2, if_exists='append')
sqlite_connection2.close()
return "table2"
# What I probably have down below is not the correct way to solve this problem
#app.route('/table1', methods=['GET'])
conn = sql.connect("table1.db")
cur = conn.cursor()
#cur.execute
cur.execute("ATTACH 'table2.db' as 'table2' ")
conn.commit()
table_3 = pd.read_sql_query("SELECT DISTINCT date, value FROM table1 UNION SELECT DISTINCT date, value from table2 ORDER BY date", conn)
cur.execcute("SELECT DISTINCT date, value FROM table1 UNION SELECT DISTINCT date, value from table2 ORDER BY date")
conn.commit()
results3 = cur.fetchall()
sqlite_table='table1'
table_3.to_sql(sqlite_table, conn, if_exists='replace')
cur.close()
conn.close()
return "work"
Any help is greatly appreciated.
For your 1st problem. You may do either of these:
If the size of data-1 is small(than 200kb) you may use flask-session to store the data and access it across routes.
You create a function that returns data_1. Call that function in any route you want. Hint:
def getdata1(val1, val2):
#calculation here
return data_1
Just call this wherever you need data_1.
Store the data frame in a DB and fetch it.
For the second part, a simple for loop will work. Hint on that:
sql_table = ["Fetch your sql table here with the dataframe. Considering dates in one column"]
data_1 = ["Your dataframe"]
for i in data_1['Dates']:
if i != sql_table['dates']:
#insert this key:value pair in sql table
If your data frame and sql is getting loaded in order by date, even better. You just need to check the last elements of each.

Python response JSON to MySQLDB comes back empty

My Goal is to parse the API via pagination. Store as a JSON feed, and then send it off to the MySQL DB. Once stored, I want to check if any new rows have been added, if so delete the database and add all new rows. (maybe not the best approach?) However for some strange reason nothing is storing in mySQLDB anymore, and my prints aren't working. Any thoughts on what I messed up?
PYTHON
import requests
import json
def dbconnect():
try:
db = MySQLdb.connect(
host='localhost',
user='root',
passwd='',
db='watch',
)
except Exception as e:
sys.exit("Can't connect to database")
return db
#init db
db = dbconnect()
cursor = db.cursor()
# Start getting all entries
def get_all_cracked_entries():
# results will be appended to this list
all_time_entries = []
# loop through all pages and return JSON object
for page in range(1, 4):
url = "https://api.watch.com/api?page="+str(page)
response = requests.get(url=url).json()
all_time_entries.append(response)
page += 1
for product in response:
print("id:", product["_id"])
print("title:", product["title"])
print("slug:", product["slug"])
print("releaseDate:", product["releaseDate"])
cursor.execute("INSERT INTO jsondump (id, title, slug, releaseDate) VALUES (%s,%s,%s,%s)", (product["_id"], product["title"], product["slug"], product["releaseDate"]))
db.commit()
#Check Row Count
cursor.execute("SELECT * FROM `jsondump`")
cursor.fetchall()
rc = cursor.rowcount
print("%d"%rc)
if rc > rc+1:
rs = cursor.fetchall()
else:
cursor.execute("TRUNCATE TABLE jsondump")
for product in response:
print("id:", product["_id"])
print("title:", product["title"])
print("slug:", product["slug"])
print("releaseDate:", product["releaseDate"])
print('---')
db = dbconnect()
cursor = db.cursor()
cursor.execute("INSERT INTO jsondump (id, title, slug, releaseDate) VALUES (%s,%s,%s)", (product["_id"], product["title"], product["slug"], product["releaseDate"]]))
db.commit()
cursor.close()
# prettify JSON
data = json.dumps(all_time_entries, sort_keys=True, indent=0)
#
return data
SAMPLE JSON
[{
"_id":"xxxxxxx",
"releaseDate":"2020-02-13T21:00:00-03:00",
"slug":"table-manners",
"title":"Table Manners","
}] ```

How to fix data binding issue with SqlAlchemy

I am using Python 3.6 with Flask. I am trying to connect to Amazon Redshift database using SqlAlchemy. The query has IN operation in the Where clause and the values for it are sent by binding it to the query. For some reason it does not work? It does not error out, but does not fetch any results. If I hard code the values in the query, it works fine.
I have tried a few suggested options but no luck -
1. binding the values as a list or just as comma separated string
2. removing the brackets in the query
3. changing the code to
stmt = text(file.read())
stmt = stmt.bindparams(search = ids)
df = pd.read_sql_query(stmt, connection)
dtv_script.txt
Select * from tbl1 where id IN (:search)
def get_dt(id_list):
engine = create_engine('postgresql://xxxxxxxxxx')
connection = engine.connect()
ids = list(id_list.split(","))
#dtv_script.txt has the sql
file = open('dtv_script.txt')
sql = text(file.read())
df = pd.read_sql_query(sql, connection, params={'search' : ids})
connection.close()
return df
The ids are posted from a form on the index.html.
Sample ids = 2011592,2021593,2033591.
The flask route page captures it in the get_dt() function and returns the dataframe back to the results.html page for display
#app.route('/result', methods=['POST'])
def result():
if request.method == 'POST':
id_list = request.form.get('ids')
df_dt = dofri.get_dt(id_list)
return render_template('result.html', **locals())
else:
flash('There was some error. Check the logs')
return index()
Below is the solution. Make sure to
add - from sqlalchemy import bindparam
remove - brackets from the query
add - expanding=True
dtv_script.txt
Select * from tbl1 where id IN :search
def get_dt(id_list):
engine = create_engine('postgresql://xxxxxxxxxx')
connection = engine.connect()
ids = list(id_list.split(","))
#dtv_script.txt has the sql
file = open('dtv_script.txt')
sql = file.read()
t = text(sql)
t = t.bindparams(bindparam('search', expanding=True))
df = pd.read_sql_query(t, connection, params={'search' : ids })
connection.close()
return df

Categories

Resources