Python response JSON to MySQLDB comes back empty - python

My Goal is to parse the API via pagination. Store as a JSON feed, and then send it off to the MySQL DB. Once stored, I want to check if any new rows have been added, if so delete the database and add all new rows. (maybe not the best approach?) However for some strange reason nothing is storing in mySQLDB anymore, and my prints aren't working. Any thoughts on what I messed up?
PYTHON
import requests
import json
def dbconnect():
try:
db = MySQLdb.connect(
host='localhost',
user='root',
passwd='',
db='watch',
)
except Exception as e:
sys.exit("Can't connect to database")
return db
#init db
db = dbconnect()
cursor = db.cursor()
# Start getting all entries
def get_all_cracked_entries():
# results will be appended to this list
all_time_entries = []
# loop through all pages and return JSON object
for page in range(1, 4):
url = "https://api.watch.com/api?page="+str(page)
response = requests.get(url=url).json()
all_time_entries.append(response)
page += 1
for product in response:
print("id:", product["_id"])
print("title:", product["title"])
print("slug:", product["slug"])
print("releaseDate:", product["releaseDate"])
cursor.execute("INSERT INTO jsondump (id, title, slug, releaseDate) VALUES (%s,%s,%s,%s)", (product["_id"], product["title"], product["slug"], product["releaseDate"]))
db.commit()
#Check Row Count
cursor.execute("SELECT * FROM `jsondump`")
cursor.fetchall()
rc = cursor.rowcount
print("%d"%rc)
if rc > rc+1:
rs = cursor.fetchall()
else:
cursor.execute("TRUNCATE TABLE jsondump")
for product in response:
print("id:", product["_id"])
print("title:", product["title"])
print("slug:", product["slug"])
print("releaseDate:", product["releaseDate"])
print('---')
db = dbconnect()
cursor = db.cursor()
cursor.execute("INSERT INTO jsondump (id, title, slug, releaseDate) VALUES (%s,%s,%s)", (product["_id"], product["title"], product["slug"], product["releaseDate"]]))
db.commit()
cursor.close()
# prettify JSON
data = json.dumps(all_time_entries, sort_keys=True, indent=0)
#
return data
SAMPLE JSON
[{
"_id":"xxxxxxx",
"releaseDate":"2020-02-13T21:00:00-03:00",
"slug":"table-manners",
"title":"Table Manners","
}] ```

Related

using :memory: returns 'sqlite3.OperationalError: no such table:'

I am working on a messaging RESTful app using Flask on Python.
I need to implement an in memory solution but all the solutions I found online are not working.
Find the code below:
# Helper
def _get_message(id=None):
"""Return a list of message objects (as dicts)"""
with sqlite3.connect(':memory:') as conn:
c = conn.cursor()
if id:
q = "SELECT * FROM messages WHERE id=? ORDER BY dt DESC"
rows = c.execute(q, (id,))
else:
q = "SELECT * FROM messages ORDER BY dt DESC"
rows = c.execute(q)
return [{'id': r[0], 'dt': r[1], 'message': r[2], 'sender': r[3]} for r in rows]
def _add_message(message, sender):
with sqlite3.connect(':memory:') as conn:
c = conn.cursor()
id = str(uuid.uuid4().hex)
q = "INSERT INTO messages VALUES (?, datetime('now'),?,?)"
c.execute(q, (id, message, sender))
conn.commit()
return c.lastrowid
..and so on with other functions..
Then I am running the app and initialising the database (I've found this online not using an in-memory solution though). I'd like to take the form of my db_init.sql:
if __name__ == '__main__':
# Test whether the database exists; if not, create it and create the table
if not os.path.exists(:memory:):
try:
conn = sqlite3.connect(:memory:)
# Absolute path needed for testing environment
# sql_path = os.path.join(app.config['APP_ROOT'], 'db_init.sql')
sql_path = os.path.join('db_init.sql')
cmd = open(sql_path, 'r').read()
c = conn.cursor()
c.execute(cmd)
conn.commit()
conn.close()
except IOError:
print("Couldn't initialize the database, exiting...")
raise
except sqlite3.OperationalError:
print("Couldn't execute the SQL, exiting...")
raise
app.run(host='0.0.0.0')
db_init.sql:
CREATE TABLE IF NOT EXISTS messages (
id TEXT NOT NULL,
dt TEXT NOT NULL,
message TEXT NOT NULL,
sender TEXT NOT NULL
);
But I get this error: 'sqlite3.OperationalError: no such table:'
I have also tried "file::memory:", "DataSource:memory:" and ""file::memory:?cache=shared"", "file::memory:?cache=shared" uri=True
How can I use in memory with my initialised db_init.sql?

Storing a pickled dictionary in Postgresql (Psycopg2)

I am trying to store a pickled nested dictionary in Postgresql (I am aware that this is a quick & dirty method and won't be able to access dictionary contents from Postgresql - usually bad practice)
# boilerplate, preamble and upstream work.
import psycopg2
''' Inputs: nd = dictionary to be pickled '''
pickled = pickle.dumps(nd)
connection = psycopg2.connect(user = "-----",
password = "----",
host = "----",
port = "----",
database = "----")
name = 'database1'
print('Connected...')
cursor = connection.cursor()
print(connection.get_dsn_parameters(),"\n")
cursor.execute("CREATE TABLE thetable (name TEXT, ablob BYTEA)")
print('Created Table...')
cursor.execute("INSERT INTO thetable VALUES(%s)",(psycopg2.Binary(pickled),))
connection.commit()
print('Added Data...')
cursor.close()
connection.close()
print('Connection closed...')
When I come to data data retrieval, I am having many issues importing the data from Postgres - essentially the data is to be opened, unpickled back to the dictionary and visualised. I have tried:
import psycopg2
from io import BytesIO
connection = psycopg2.connect(user = "----",
password = "----",
host = "----",
port = "----",
database = "----")
cursor = connection.cursor()
cursor.execute("SELECT ablob FROM thetable")
result, = cursor.fetchone()
cursor.close()
connection.rollback()
result = BytesIO(result)
print(pickle.load(result))
As per this link: https://www.oreilly.com/library/view/python-cookbook/0596001673/ch08s08.html, and consulted: Insert an image in postgresql database and: saving python object in postgres table with pickle, however have been unable to return the pickled dictionary.
Any advice in achieving this is greatly appreciated!
When your CREATE TABLE lists two fields, you have to list in INSERT which ones you want to fill, unless you fill them all.
import psycopg2
import pickle
dict = {
"foo": "bar"
}
p = pickle.dumps(dict)
connection = psycopg2.connect(database = "test")
cursor = connection.cursor()
cursor.execute("CREATE TABLE thetable (name TEXT, ablob BYTEA)")
cursor.execute("INSERT INTO thetable VALUES(%s,%s)",('test',p))
connection.commit()
cursor.close()
connection.close()
and reading
import psycopg2
import pickle
connection = psycopg2.connect(database = "test")
cursor = connection.cursor()
cursor.execute("SELECT ablob FROM thetable WHERE name='test';")
result = cursor.fetchone()
print pickle.loads(result[0])
cursor.close()
connection.close()

Python SQL Query on PostgreSQL DB hosted ElepantSQL

I have an issue to run my SQL queries on a Postgres ElephantSql hosted:
This is my code to connect (except dynamo, user, password which are replaced by XXX
DATABASE_URL = 'postgres://YYYY:ZZZZ#drona.db.elephantsql.com:5432/YYYY'
# ---------------------------- CONNECT ELEPHANT DB
def ElephantConnect():
up.uses_netloc.append("postgres")
url = up.urlparse(DATABASE_URL)
conn = psycopg2.connect(dbname='YYYY',
user='YYYY',
password='ZZZZ',
host='drona.db.elephantsql.com',
port='5432'
)
cursor = conn.cursor()
# cursor.execute("CREATE TABLE notes(id integer primary key, body text, title text);")
#conn.commit()
# conn.close()
return conn
this code seems to connect well to db
My issue is when I want to delete a table:
def update(df, table_name, deleteYes= 'Yes'):
conn = ElephantConnect()
db = create_engine(DATABASE_URL)
cursor =conn.cursor()
if deleteYes == 'Yes': # delete
queryCount = "SELECT count(*) FROM {};".format(table_name)
queryDelete = "DELETE FROM {};".format(table_name)
count = db.execute(queryCount)
rows_before = count.fetchone()[0]
try:
db.execute(queryDelete)
logging.info('Deleted {} rows into table {}'.format(rows_before, table_name))
except:
logging.info('Deleted error into table {}'.format(table_name))
else:
pass
It seems when I run db.execute(queryDelete), it goes to the exception.
I have no message of error. But the query with count data is working...
thanks
I think that the reason for the error is because there are foreign keys against the table. In order to be sure, assign the exception into a variable and print it:
except Exception as ex:
print(ex)
By the way, if you want to quickly delete all of the rows from a table then
It will be much more efficient to truncate the table instead of deleting all the rows:
truncate table table_name
Delete is more useful when you want to delete rows under some conditions:
delete from table_name where ...

Insert API response into many rows at once - psycopg2

I want to INSERT a resulting dict from an API into my db, so far I can insert one item at a time.
This is my code:
import json
import requests
import psycopg2
def my_func():
response = requests.get("https://path/to/api/")
data = response.json()
while data['next'] is not None:
response = requests.get(data['next'])
data = response.json()
for item in data['results']:
try:
connection = psycopg2.connect(user="user",
password="user",
host="127.0.0.1",
port="5432",
database="mydb")
cursor = connection.cursor()
postgres_insert_query = """ INSERT INTO table_items (NAME) VALUES (%s)"""
record_to_insert = item['name']
cursor.execute(postgres_insert_query, (record_to_insert,))
connection.commit()
count = cursor.rowcount
print (count, "success")
except (Exception, psycopg2.Error) as error :
if(connection):
print("error", error)
finally:
if(connection):
cursor.close()
connection.close()
my_func()
So, this one is working, but for example if I want to insert into table_items, not just on name row, but let's say, name, address, weight, cost_per_unit, from that table, then I will change these lines of code:
postgres_insert_query = 'INSERT INTO table_items (NAME, ADDRESS, WEIGHT, COST_PER_UNIT) VALUES (%s,%s,%s,%s)'
record_to_insert = (item['name']['address']['weight']['cost_per_unit'])
Then it will throw:
Failed to insert record into table_items table string indices must be integers
PostgreSQL connection is closed
I mean, the first version, with just one field works perfectly, but I need to insert into the other 3 fields everytime, any ideas?
You have to fix the syntax when you call the item attributes to define the parameters, and also change the object you pass to the parameterized query, since record_to_insert is already a tuple :
postgres_insert_query = """ INSERT INTO table_items
(NAME, ADDRESS, WEIGHT, COST_PER_UNIT) VALUES (%s,%s,%s,%s)"""
record_to_insert = (item['name'],
item['address'],
item['weight'],
item['cost_per_unit'])
cursor.execute(postgres_insert_query, record_to_insert) # you can pass the tuple directly

Why the database is not refreshing after delete?

in the DB I have two events:
{1: {'title': 'APPLE'}, 2: {'title': 'BANANA'}}
as you can see bellow, I am calling the deletefromDB function on refresh with the parameter of the second event and I am printing out the data (the json above). My issue is, that the data will change, only after I restart my flask server. Till that point it's the same json output. Can somebody explain what I am doing wrong?
this is my code:
import sqlite3
from flask import Flask, render_template, request
app = Flask(__name__)
data = {}
conn = sqlite3.connect('events.db')
cursor = conn.cursor()
# INIT
def initDB():
with conn:
cursor.execute("SELECT * FROM Events")
rows = cursor.fetchall()
for row in rows:
temp = {}
temp["title"] = row[1]
data[row[0]] = temp
print(data)
initDB()
def deletefromDB(eventID):
query = 'DELETE FROM Events WHERE EventId = {}'.format(eventID)
with conn:
cursor.execute(query)
print(query)
initDB()
#app.route('/')
def index():
deletefromDB(2)
return 'Index Page'
if __name__ == "__main__":
app.run()
You are caching the tables result into the data dict on database initialization. Then you delete a row from the table without committing the changes to the data dict. Either you reload the entire data dict after deletion or you remove the entry directly from it:
def deletefromDB(eventID):
query = 'DELETE FROM Events WHERE EventId = {}'.format(eventID)
with conn:
cursor.execute(query)
del data[eventID]
print(query)
initDB()
Be careful, you create a formatted SQL string without escaping the parameter (SQL Injection alert). Use the cursor execute to substitute the variables.
query = 'DELETE FROM Events WHERE EventId = %d'
# ...
cursor.execute(query, (eventID,))
Do it whenever you substitute something into a SQL query!

Categories

Resources