I'm trying to insert the data from a json file called output.json and i have this code:
I'm getting this error:
MySQLdb._exceptions.ProgrammingError: not all arguments converted during bytes formatting
In mysql i insert like insert into t1 values ({JSONFILE})
from flask import Flask
from flask_mysqldb import MySQL
app = Flask(__name__)
app.config['MYSQL_HOST'] = 'localhost'
app.config['MYSQL_USER'] = 'root'
app.config['MYSQL_PASSWORD'] = '*****'
app.config['MYSQL_DB'] = '*****'
mysql = MySQL(app)
#app.route("/sendf",methods=['POST'])
def sendfilet():
cursor = mysql.connection.cursor()
file = open('output.json', 'r')
file_content = file.read()
file.close()
sql = "INSERT INTO t1 (tablename) VALUES (%s)"
val = (json.dumps(file_content))
cursor.execute(sql, val)
db.commit()
db.close()
return 200
if __name__ == '__main__':
app.run(debug=True,port=5050)
my json is like this:
{"_id":{"$oid":"60f458945d77cb5ec7872b61"},"insertionDate":{"$date":"2021-07-18T16:36:36.193Z"},"sessionData":{"time":["1364","1374","1384"],"yaw":["0.15","0.3","0.45"],"pitch":["0.36","0.76","1.08"],"roll":["-0.13","-0.25","-0.35"],"ax":["-0.42","-0.41","-0.41"],"ay":["-0.15","-0.13","-0.1"],"az":["0.9","0.91","1"],"gx":["0","0","0"],"gy":["-0.01","0","-0.01"],"gz":["0.02","0.02","0.02"],"mx":["0.26","0.26","0.26"],"my":["0.01","0.01","0.01"],"mz":["-0.04","-0.04","-0.07"]},"metaData":{"userId":123456,"gender":"M","ageGroup":"SENIOR","weightKg":70,"heightCm":175,"poolSizeM":50}}
In order to store your JSON data into MySQL in python, you can to create a MySQLUtil so that you can insert your JSON data in MySQL.
You can do mysql = MySQLUtil()
After this, you need to make a connection to the database and then store the JSON data into a variable, and simply use MySQLstrong text.execSql() to insert the data.
(your json variable name) = pymysql.escape_string(// your json data)
Mysql = "insert data (index name) value"('" + json_data + "') "
mysql.execSql(sql)
you can read more here Python JSON encoder and decoder
For test Python code, first you try it:
from pathlib import Path
from . import mysql # because I use blueprint, so i import it from __init__.py, you can pass it.
#app.route("/sendf", methods=['GET'])
def sendfilet():
try:
print("File Path:", Path(__file__).absolute())
print("Directory Path:", Path().absolute()) # Directory of current working directory, not
#file_path = str(Path().absolute())+'/output.json'
parent_path = Path(__file__).parent.absolute()
file_path = f"{str(parent_path)}/output.json"
print(file_path, file_path)
file = open(file_path, 'r')
file_content = file.read()
file.close()
#print(file_content)
try:
cursor = mysql.connection.cursor()
cursor.execute("SELECT COUNT(*) FROM test_json")
property_count = cursor.fetchone()[0] # to get current ID,
sql = """INSERT INTO test_json (id, json_content)
VALUES (%s, %s)"""
#sql = "INSERT INTO test (json_content) VALUES (%s)"
row_id = property_count+1 # make new id
val = (row_id, file_content)
cursor.execute(sql, val)
mysql.connection.commit()
except Exception as e:
print(e)
except Exception as e:
print(e)
return "OK", 200
if you dont have the column ID with primary key in table, error will come.
My test table with name "test_json"
I think you should use SQLAlchemy, it is better than flask_mysqldb, easy to autoincrement id.
id = db.Column(db.BigInteger, autoincrement=True, primary_key=True)
Related
I am working on a messaging RESTful app using Flask on Python.
I need to implement an in memory solution but all the solutions I found online are not working.
Find the code below:
# Helper
def _get_message(id=None):
"""Return a list of message objects (as dicts)"""
with sqlite3.connect(':memory:') as conn:
c = conn.cursor()
if id:
q = "SELECT * FROM messages WHERE id=? ORDER BY dt DESC"
rows = c.execute(q, (id,))
else:
q = "SELECT * FROM messages ORDER BY dt DESC"
rows = c.execute(q)
return [{'id': r[0], 'dt': r[1], 'message': r[2], 'sender': r[3]} for r in rows]
def _add_message(message, sender):
with sqlite3.connect(':memory:') as conn:
c = conn.cursor()
id = str(uuid.uuid4().hex)
q = "INSERT INTO messages VALUES (?, datetime('now'),?,?)"
c.execute(q, (id, message, sender))
conn.commit()
return c.lastrowid
..and so on with other functions..
Then I am running the app and initialising the database (I've found this online not using an in-memory solution though). I'd like to take the form of my db_init.sql:
if __name__ == '__main__':
# Test whether the database exists; if not, create it and create the table
if not os.path.exists(:memory:):
try:
conn = sqlite3.connect(:memory:)
# Absolute path needed for testing environment
# sql_path = os.path.join(app.config['APP_ROOT'], 'db_init.sql')
sql_path = os.path.join('db_init.sql')
cmd = open(sql_path, 'r').read()
c = conn.cursor()
c.execute(cmd)
conn.commit()
conn.close()
except IOError:
print("Couldn't initialize the database, exiting...")
raise
except sqlite3.OperationalError:
print("Couldn't execute the SQL, exiting...")
raise
app.run(host='0.0.0.0')
db_init.sql:
CREATE TABLE IF NOT EXISTS messages (
id TEXT NOT NULL,
dt TEXT NOT NULL,
message TEXT NOT NULL,
sender TEXT NOT NULL
);
But I get this error: 'sqlite3.OperationalError: no such table:'
I have also tried "file::memory:", "DataSource:memory:" and ""file::memory:?cache=shared"", "file::memory:?cache=shared" uri=True
How can I use in memory with my initialised db_init.sql?
i'm new in json. i'm work with mysql and json in my python project. i have json file like this:
i want to store some contents to sql database, but i have an error TypeError: not all arguments converted during string formatting when i run the code. Here's the piece of code:
def create_db(db_name, table_name):
# try:
db = dbconnect()
cursor = db.cursor()
cursor.execute("SET sql_notes = 0;")
cursor.execute("CREATE DATABASE IF NOT EXISTS {}".format(db_name))
cursor.execute("SET sql_notes = 0;")
cursor.execute(
"""CREATE TABLE IF NOT EXISTS {}.{}(waktu varchar(150),plate varchar(20),region varchar(150), score varchar(20), filename varchar(50));""".format(db_name, table_name))
cursor.execute("SET sql_notes = 1;")
with open('data.json') as f:
data = json.load(f)
for i in data:
cursor.execute(
"""INSERT INTO {}.{}(waktu, plate, region, score, filename) VALUES(%s,%s)""".format
(db_name, table_name),
(i['timestamp'], i['results'][0]['plate'], i['results'][0]['region']['code'], i['results'][0]['score'], i['filename']))
db.commit()
db.close()
# except Exception as e:
# print(e)
create_db(db_name="plate_recognizer", table_name="kendaraan")
how to solve this? any helps will be appriciate, thankyou
The problem is here:
"""INSERT INTO {}.{}(waktu, plate, region, score, filename) VALUES(%s,%s)""".format
(db_name, table_name),
(i['timestamp'], i['results'][0]['plate'], i['results'][0]['region']['code'], i['results'][0]['score'], i['filename']))
You provided 5 arguments((i['timestamp'], i['results'][0]['plate'], i['results'][0]['region']['code'], i['results'][0]['score'], i['filename']))) but only has 2 formats(VALUES(%s,%s)). You should add more formats like this: VALUES(%s, %s, %s, %s, %s)(use proper format if needed).
I am trying to store a pickled nested dictionary in Postgresql (I am aware that this is a quick & dirty method and won't be able to access dictionary contents from Postgresql - usually bad practice)
# boilerplate, preamble and upstream work.
import psycopg2
''' Inputs: nd = dictionary to be pickled '''
pickled = pickle.dumps(nd)
connection = psycopg2.connect(user = "-----",
password = "----",
host = "----",
port = "----",
database = "----")
name = 'database1'
print('Connected...')
cursor = connection.cursor()
print(connection.get_dsn_parameters(),"\n")
cursor.execute("CREATE TABLE thetable (name TEXT, ablob BYTEA)")
print('Created Table...')
cursor.execute("INSERT INTO thetable VALUES(%s)",(psycopg2.Binary(pickled),))
connection.commit()
print('Added Data...')
cursor.close()
connection.close()
print('Connection closed...')
When I come to data data retrieval, I am having many issues importing the data from Postgres - essentially the data is to be opened, unpickled back to the dictionary and visualised. I have tried:
import psycopg2
from io import BytesIO
connection = psycopg2.connect(user = "----",
password = "----",
host = "----",
port = "----",
database = "----")
cursor = connection.cursor()
cursor.execute("SELECT ablob FROM thetable")
result, = cursor.fetchone()
cursor.close()
connection.rollback()
result = BytesIO(result)
print(pickle.load(result))
As per this link: https://www.oreilly.com/library/view/python-cookbook/0596001673/ch08s08.html, and consulted: Insert an image in postgresql database and: saving python object in postgres table with pickle, however have been unable to return the pickled dictionary.
Any advice in achieving this is greatly appreciated!
When your CREATE TABLE lists two fields, you have to list in INSERT which ones you want to fill, unless you fill them all.
import psycopg2
import pickle
dict = {
"foo": "bar"
}
p = pickle.dumps(dict)
connection = psycopg2.connect(database = "test")
cursor = connection.cursor()
cursor.execute("CREATE TABLE thetable (name TEXT, ablob BYTEA)")
cursor.execute("INSERT INTO thetable VALUES(%s,%s)",('test',p))
connection.commit()
cursor.close()
connection.close()
and reading
import psycopg2
import pickle
connection = psycopg2.connect(database = "test")
cursor = connection.cursor()
cursor.execute("SELECT ablob FROM thetable WHERE name='test';")
result = cursor.fetchone()
print pickle.loads(result[0])
cursor.close()
connection.close()
My Goal is to parse the API via pagination. Store as a JSON feed, and then send it off to the MySQL DB. Once stored, I want to check if any new rows have been added, if so delete the database and add all new rows. (maybe not the best approach?) However for some strange reason nothing is storing in mySQLDB anymore, and my prints aren't working. Any thoughts on what I messed up?
PYTHON
import requests
import json
def dbconnect():
try:
db = MySQLdb.connect(
host='localhost',
user='root',
passwd='',
db='watch',
)
except Exception as e:
sys.exit("Can't connect to database")
return db
#init db
db = dbconnect()
cursor = db.cursor()
# Start getting all entries
def get_all_cracked_entries():
# results will be appended to this list
all_time_entries = []
# loop through all pages and return JSON object
for page in range(1, 4):
url = "https://api.watch.com/api?page="+str(page)
response = requests.get(url=url).json()
all_time_entries.append(response)
page += 1
for product in response:
print("id:", product["_id"])
print("title:", product["title"])
print("slug:", product["slug"])
print("releaseDate:", product["releaseDate"])
cursor.execute("INSERT INTO jsondump (id, title, slug, releaseDate) VALUES (%s,%s,%s,%s)", (product["_id"], product["title"], product["slug"], product["releaseDate"]))
db.commit()
#Check Row Count
cursor.execute("SELECT * FROM `jsondump`")
cursor.fetchall()
rc = cursor.rowcount
print("%d"%rc)
if rc > rc+1:
rs = cursor.fetchall()
else:
cursor.execute("TRUNCATE TABLE jsondump")
for product in response:
print("id:", product["_id"])
print("title:", product["title"])
print("slug:", product["slug"])
print("releaseDate:", product["releaseDate"])
print('---')
db = dbconnect()
cursor = db.cursor()
cursor.execute("INSERT INTO jsondump (id, title, slug, releaseDate) VALUES (%s,%s,%s)", (product["_id"], product["title"], product["slug"], product["releaseDate"]]))
db.commit()
cursor.close()
# prettify JSON
data = json.dumps(all_time_entries, sort_keys=True, indent=0)
#
return data
SAMPLE JSON
[{
"_id":"xxxxxxx",
"releaseDate":"2020-02-13T21:00:00-03:00",
"slug":"table-manners",
"title":"Table Manners","
}] ```
I have an issue to run my SQL queries on a Postgres ElephantSql hosted:
This is my code to connect (except dynamo, user, password which are replaced by XXX
DATABASE_URL = 'postgres://YYYY:ZZZZ#drona.db.elephantsql.com:5432/YYYY'
# ---------------------------- CONNECT ELEPHANT DB
def ElephantConnect():
up.uses_netloc.append("postgres")
url = up.urlparse(DATABASE_URL)
conn = psycopg2.connect(dbname='YYYY',
user='YYYY',
password='ZZZZ',
host='drona.db.elephantsql.com',
port='5432'
)
cursor = conn.cursor()
# cursor.execute("CREATE TABLE notes(id integer primary key, body text, title text);")
#conn.commit()
# conn.close()
return conn
this code seems to connect well to db
My issue is when I want to delete a table:
def update(df, table_name, deleteYes= 'Yes'):
conn = ElephantConnect()
db = create_engine(DATABASE_URL)
cursor =conn.cursor()
if deleteYes == 'Yes': # delete
queryCount = "SELECT count(*) FROM {};".format(table_name)
queryDelete = "DELETE FROM {};".format(table_name)
count = db.execute(queryCount)
rows_before = count.fetchone()[0]
try:
db.execute(queryDelete)
logging.info('Deleted {} rows into table {}'.format(rows_before, table_name))
except:
logging.info('Deleted error into table {}'.format(table_name))
else:
pass
It seems when I run db.execute(queryDelete), it goes to the exception.
I have no message of error. But the query with count data is working...
thanks
I think that the reason for the error is because there are foreign keys against the table. In order to be sure, assign the exception into a variable and print it:
except Exception as ex:
print(ex)
By the way, if you want to quickly delete all of the rows from a table then
It will be much more efficient to truncate the table instead of deleting all the rows:
truncate table table_name
Delete is more useful when you want to delete rows under some conditions:
delete from table_name where ...