I have a JSON file. Now I need to load the JSON data to my PSQL database.
So far I tried this one
import psycopg2
import json
with open('new.json') as f:
data = f.read()
dd = json.loads(data)
conn = psycopg2.connect(database="newdb", user = "postgres", password = "postgres",host = "127.0.0.1", port = "5432")
print "Opened database successfully"
cur = conn.cursor()
cur.execute(''' CREATE TABLE jsontable(SUM INT NOT NULL,
APP CHAR[30] NOT NULL,
ID INT NOT NULL,
DOMAINNAME TEXT NOT NULL,
DOMAINID INT NOT NULL);''')
print "Table Created successfully"
cur.execute('''INSERT INTO jsontable(data)
VALUES(%s)
''',
(data, str(dd['sum'],str(dd['app'],str(dd['id'],str(dd['Domain_name'],str(dd['Domain_Id'])))
print ("Data Entered successfully")
conn.commit()
conn.close()
Please provide some examples, how to pass the JSON file data to the database
Personally I like asyncpg as it's fully async especially if you're using Python 3.x and essentially all you need to do is put await in front of the sync commands.
import asyncpg
import json
with open('new.json') as f:
data = f.read()
dd = json.loads(data)
conn = await asyncpg.connect(database="newdb", user = "postgres", password = "postgres",host = "127.0.0.1", port = "5432")
print "Opened database successfully"
await con.execute(''' CREATE TABLE jsontable(SUM INT NOT NULL,
APP CHAR[30] NOT NULL,
ID INT NOT NULL,
DOMAINNAME TEXT NOT NULL,
DOMAINID INT NOT NULL);''')
print "Table Created successfully"
await con.execute('''INSERT INTO jsontable(SUM, APP, ID, DOMAINNAME, DOMAINID)
VALUES($1, $2, $3, $4, $5)
''',(str(dd['sum'],str(dd['app'],str(dd['id'],str(dd['Domain_name'],str(dd['Domain_Id'])))
print ("Data Entered successfully")
await conn.commit()
await conn.close()
Related
How do I convert this code into proper function, means create table should be in seperate function, making connection should be seperate and inserting , viewing records too, I am trying but getting error, if anybody can help me:
import psycopg2
import pandas as pd
df = pd.read_csv('dataframe.csv')
# creating connection
conn = psycopg2.connect(database = "postgres",
user = "postgres",
password = "12345",
host = "127.0.0.1",
port = "5432")
print("Opened database successfully")
# Creating table stockmarketforecasting
cur = conn.cursor()
cur.execute('''CREATE TABLE STOCK_MARKET_FORECASTING
(date VARCHAR(30) NOT NULL,
open float NOT NULL,
high float NOT NULL,
low float NOT NULL,
close float NOT NULL);''')
print("Table created successfully")
# Inserting records in database
for i in range(0 ,len(df)):
values = (df['date'][i], df['open'][i], df['high'][i], df['low'][i], df['close'][i])
cur.execute("INSERT INTO STOCK_MARKET_FORECASTING (date, open, high, low, close) VALUES (%s, %s, %s, %s, %s)",
values)
conn.commit()
print("Records created successfully")
# View the records
cur.execute("SELECT * from STOCK_MARKET_FORECASTING")
rows = cur.fetchall()
for row in rows:
print(row)
print("Operation done successfully")
conn.close()
Below code is which I am trying to convert into functions but getting error, Please tell me that what thing I am doing wrong.
My code:
import psycopg2
import pandas as pd
df = pd.read_csv('2_months_dataframe.csv')
# creating connection
def create_connection():
conn = psycopg2.connect(database = "postgres",
user = "postgres",
password = "12345",
host = "127.0.0.1",
port = "5432")
print("Opened database successfully")
return conn
conn = create_connection()
# Creating table stockmarketforecasting
def create_table(conn):
conn = create_connection()
cur = conn.cursor(conn)
cur.execute('''CREATE TABLE STOCK_MARKET_FORECASTING
(date VARCHAR(30) NOT NULL,
open float NOT NULL,
high float NOT NULL,
low float NOT NULL,
close float NOT NULL);''')
print("Table created successfully")
conn.commit()
return conn, cur
conn, cur = create_table(conn)
# Inserting records in database
def insering_records(df, conn, cur):
for i in range(0 ,len(df)):
values = (df['date'][i], df['open'][i], df['high'][i], df['low'][i], df['close'][i])
cur.execute
("INSERT INTO STOCK_MARKET_FORECASTING (date, open, high, low, close) VALUES (%s, %s, %s, %s, %s)",
values)
conn.commit()
print("Records created successfully")
return cur, conn
conn, cur = insering_records(df, conn, cur)
# View the records
def viewing_records(conn, cur):
cur.execute("SELECT * from STOCK_MARKET_FORECASTING")
rows = cur.fetchall()
for row in rows:
print(row)
print("Operation done successfully")
return rows
conn.close()
rows = viewing_records(conn, cur)
I've made some corrections to your code, and it should work in this form. Let me know if you have any issues.
import psycopg2 as ps
import pandas as pd
# Here you can put your database credentials
DB_HOST = ''
DB_NAME = ''
DB_USER = ''
DB_PASS = ''
df = pd.read_csv('dataframe.csv')
# Here you create a temporary connection, that closes automatically when the code stops
with ps.connect(dbname=DB_NAME, user=DB_USER, password=DB_PASS, host=DB_HOST) as conn:
with conn.cursor() as cur:
print("Opened database successfully")
# Creating table stockmarketforecasting
cur.execute('''CREATE TABLE STOCK_MARKET_FORECASTING
(date VARCHAR(30) NOT NULL,
open float NOT NULL,
high float NOT NULL,
low float NOT NULL,
close float NOT NULL);''')
print("Table created successfully")
# Inserting records in database
for i in range(0 ,len(df)):
values = (df['date'][i], df['open'][i], df['high'][i], df['low'][i], df['close'][i])
cur.execute("INSERT INTO STOCK_MARKET_FORECASTING (date, open, high, low, close) VALUES (%s, %s, %s, %s, %s)",
values)
conn.commit()
print("Records created successfully")
# View the records
cur.execute("SELECT * from STOCK_MARKET_FORECASTING")
rows = cur.fetchall()
for row in rows:
print(row)
print("Operation done successfully")
I'm trying to insert the data from a json file called output.json and i have this code:
I'm getting this error:
MySQLdb._exceptions.ProgrammingError: not all arguments converted during bytes formatting
In mysql i insert like insert into t1 values ({JSONFILE})
from flask import Flask
from flask_mysqldb import MySQL
app = Flask(__name__)
app.config['MYSQL_HOST'] = 'localhost'
app.config['MYSQL_USER'] = 'root'
app.config['MYSQL_PASSWORD'] = '*****'
app.config['MYSQL_DB'] = '*****'
mysql = MySQL(app)
#app.route("/sendf",methods=['POST'])
def sendfilet():
cursor = mysql.connection.cursor()
file = open('output.json', 'r')
file_content = file.read()
file.close()
sql = "INSERT INTO t1 (tablename) VALUES (%s)"
val = (json.dumps(file_content))
cursor.execute(sql, val)
db.commit()
db.close()
return 200
if __name__ == '__main__':
app.run(debug=True,port=5050)
my json is like this:
{"_id":{"$oid":"60f458945d77cb5ec7872b61"},"insertionDate":{"$date":"2021-07-18T16:36:36.193Z"},"sessionData":{"time":["1364","1374","1384"],"yaw":["0.15","0.3","0.45"],"pitch":["0.36","0.76","1.08"],"roll":["-0.13","-0.25","-0.35"],"ax":["-0.42","-0.41","-0.41"],"ay":["-0.15","-0.13","-0.1"],"az":["0.9","0.91","1"],"gx":["0","0","0"],"gy":["-0.01","0","-0.01"],"gz":["0.02","0.02","0.02"],"mx":["0.26","0.26","0.26"],"my":["0.01","0.01","0.01"],"mz":["-0.04","-0.04","-0.07"]},"metaData":{"userId":123456,"gender":"M","ageGroup":"SENIOR","weightKg":70,"heightCm":175,"poolSizeM":50}}
In order to store your JSON data into MySQL in python, you can to create a MySQLUtil so that you can insert your JSON data in MySQL.
You can do mysql = MySQLUtil()
After this, you need to make a connection to the database and then store the JSON data into a variable, and simply use MySQLstrong text.execSql() to insert the data.
(your json variable name) = pymysql.escape_string(// your json data)
Mysql = "insert data (index name) value"('" + json_data + "') "
mysql.execSql(sql)
you can read more here Python JSON encoder and decoder
For test Python code, first you try it:
from pathlib import Path
from . import mysql # because I use blueprint, so i import it from __init__.py, you can pass it.
#app.route("/sendf", methods=['GET'])
def sendfilet():
try:
print("File Path:", Path(__file__).absolute())
print("Directory Path:", Path().absolute()) # Directory of current working directory, not
#file_path = str(Path().absolute())+'/output.json'
parent_path = Path(__file__).parent.absolute()
file_path = f"{str(parent_path)}/output.json"
print(file_path, file_path)
file = open(file_path, 'r')
file_content = file.read()
file.close()
#print(file_content)
try:
cursor = mysql.connection.cursor()
cursor.execute("SELECT COUNT(*) FROM test_json")
property_count = cursor.fetchone()[0] # to get current ID,
sql = """INSERT INTO test_json (id, json_content)
VALUES (%s, %s)"""
#sql = "INSERT INTO test (json_content) VALUES (%s)"
row_id = property_count+1 # make new id
val = (row_id, file_content)
cursor.execute(sql, val)
mysql.connection.commit()
except Exception as e:
print(e)
except Exception as e:
print(e)
return "OK", 200
if you dont have the column ID with primary key in table, error will come.
My test table with name "test_json"
I think you should use SQLAlchemy, it is better than flask_mysqldb, easy to autoincrement id.
id = db.Column(db.BigInteger, autoincrement=True, primary_key=True)
I am working on a messaging RESTful app using Flask on Python.
I need to implement an in memory solution but all the solutions I found online are not working.
Find the code below:
# Helper
def _get_message(id=None):
"""Return a list of message objects (as dicts)"""
with sqlite3.connect(':memory:') as conn:
c = conn.cursor()
if id:
q = "SELECT * FROM messages WHERE id=? ORDER BY dt DESC"
rows = c.execute(q, (id,))
else:
q = "SELECT * FROM messages ORDER BY dt DESC"
rows = c.execute(q)
return [{'id': r[0], 'dt': r[1], 'message': r[2], 'sender': r[3]} for r in rows]
def _add_message(message, sender):
with sqlite3.connect(':memory:') as conn:
c = conn.cursor()
id = str(uuid.uuid4().hex)
q = "INSERT INTO messages VALUES (?, datetime('now'),?,?)"
c.execute(q, (id, message, sender))
conn.commit()
return c.lastrowid
..and so on with other functions..
Then I am running the app and initialising the database (I've found this online not using an in-memory solution though). I'd like to take the form of my db_init.sql:
if __name__ == '__main__':
# Test whether the database exists; if not, create it and create the table
if not os.path.exists(:memory:):
try:
conn = sqlite3.connect(:memory:)
# Absolute path needed for testing environment
# sql_path = os.path.join(app.config['APP_ROOT'], 'db_init.sql')
sql_path = os.path.join('db_init.sql')
cmd = open(sql_path, 'r').read()
c = conn.cursor()
c.execute(cmd)
conn.commit()
conn.close()
except IOError:
print("Couldn't initialize the database, exiting...")
raise
except sqlite3.OperationalError:
print("Couldn't execute the SQL, exiting...")
raise
app.run(host='0.0.0.0')
db_init.sql:
CREATE TABLE IF NOT EXISTS messages (
id TEXT NOT NULL,
dt TEXT NOT NULL,
message TEXT NOT NULL,
sender TEXT NOT NULL
);
But I get this error: 'sqlite3.OperationalError: no such table:'
I have also tried "file::memory:", "DataSource:memory:" and ""file::memory:?cache=shared"", "file::memory:?cache=shared" uri=True
How can I use in memory with my initialised db_init.sql?
I want to export the text file data into MySQL database
import MySQLdb
import re
conn = MySQLdb.connect (host = "127.0.0.1", user = "root", passwd = "123456")
mycursor =conn.cursor()
mycursor.execute("CREATE DATABASE IF NOT EXISTS EMP")
mycursor.execute("USE EMP")
mycursor.execute("CREATE TABLE IF NOT EXISTS emp_details (Id VARCHAR(255) , Firstname VARCHAR(255),Lastname VARCHAR(255),department VARCHAR(255),salary VARCHAR(255)) ")
f = open("new.txt", "rb")
print (f.next())
for x in f:
mycursor.execute("INSERT INTO emp_details VALUES (%s,%s,%s,%s,%s)",x)
conn.commit()
print(mycursor.rowcount, "record inserted.")
I am getting an error
query = query % args
TypeError: not all arguments converted during string formatting
this is what my text file data looks like
Split your line on comma as you must provide 5 arguments to that execute string. Also, you should explicitly name those columns:
mycursor.execute("INSERT INTO emp_details (Id, Firstname, Lastname, department, salary) VALUES (%s,%s,%s,%s,%s)", x.split(","))
When I create the DataBase CURRENT_users.db:
import sqlite3
conn = sqlite3.connect('CURRENT_users.db')
cursor = conn.cursor()
cursor.execute("""
CREATE TABLE IF NOT EXISTS users (
id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
nome TEXT NOT NULL,
email TEXT NOT NULL,
created_in DATE NOT NULL,
password TEXT NOT NULL
)
""")
print("Success! DATABASE created with success!")
conn.close()
import UserLoginUI_Part2_Tes
t1
And I insert the DATA:
import sqlite3
conn = sqlite3.connect("CURRENT_users.db")
cursor = conn.cursor()
cursor.execute("""
INSERT INTO users (id, nome, email, created_in, password)
VALUES (001, "Renatinho", "renato.lenon#Outlook.com", 2005-4-21, "Plugxyvj9");
""")
conn.commit()
print("A new user has been incremented! Now,have fun!!!")
conn.close()
import UserInterface
In "UserInterface", I type "Renatinho" (that's my NOME data),it seems like that "IF" doesn't work!!
import sqlite3
conn = sqlite3.connect("CURRENT_users.db")
cursor = conn.cursor()
user_INFO = cursor.execute(""" SELECT nome FROM users; """)
user_in_SCRIPT = str(input("Your credentials: USERNAME: \n>>>"))
logged_in = False;
if user_in_SCRIPT == user_INFO:
print("You are logged in! Enjoy your new account...")
logged_in = True;
else:
print("Error: Not a valid user or USERNAME!!")
conn.close()
And it ever shows me the ELSE "command block"..
Please,who can help me?
Thanks for everything...
PRINT OF THE ERROR:
You've called SQL SELECT but you need to fetch the data.
cursor.execute("SELECT nome FROM users")
user_INFO = cursor.fetchone()
This would return a tuple, so to get the string inside, take the zero index:
if user_in_SCRIPT == user_INFO[0]:
print("You are logged in! Enjoy your new account...")
logged_in = True
BTW, you're in Python, not JavaScript. You don't need to end statements with semicolons. :-)