I'm a bit at a loss for how to just push my json data to MySql. I have the cursor. Do I need to create a table first? Is there a method to simply push my json data straight in?
with open("trades.json") as f:
trades_data = json.load(f)
trades_data = reconfigure_data(trades_data)
db = mysql.connector.connect(
host="localhost",
user=os.environ.get('MYSQL_DB_USER'),
password=os.environ.get('MYSQL_DB_USER_PASS',
database='trades')
)
cursor = db.cursor()
I found the syntax that works:
cursor = db.cursor()
# create table on database
sql = "CREATE TABLE IF NOT EXISTS `test` (`id` INT(11) UNSIGNED NOT NULL AUTO_INCREMENT, `objectid` VARCHAR(100), `type` VARCHAR(5), `ownerid` VARCHAR(100), `color` VARCHAR(15), `shape` VARCHAR(15), `size` VARCHAR(15), PRIMARY KEY (`id`) );"
print(sql)
cursor.execute(sql)
with open("trades.json") as f:
trades_data = json.load(f)
for trade in trades_data:
for right in trade['rights']:
sql = f"INSERT INTO test (objectid, type, ownerid, color, shape, size) VALUES ('{trade['objectid']}', '{trade['type']}', '{trade['ownerid']}', '{right['color']}', '{right['shape']}', '{right['size']}' )"
cursor.execute(sql)
db.commit()
Related
class Setup:
def __init__(self,mydb,User):
self._mydb = mydb
self._User = User
def DataBaseSetup(self):
mycursor = self._mydb.cursor(buffered=True)
mycursor.execute("CREATE DATABASE IF NOT EXISTS StaffDB;")
mycursor.execute("CREATE TABLE IF NOT EXISTS StaffDB.Staff(UserName VarChar(40) PRIMARY KEY,Privlage Int Not Null,FirstName VARCHAR(20),LastName VARCHAR(20),ContactNo VarChar(11),jobTitle VARCHAR(40),startDate DATE,salary INT,DateOfBirth DATE,Address VARCHAR(120))")
mycursor.execute("CREATE TABLE IF NOT EXISTS StaffDB.Classes (RoomNO INT AUTO_INCREMENT PRIMARY KEY, Teacher int, Department VARCHAR(255), ITAccess boolean, ClassCapacity Int)")
mycursor.execute("CREATE TABLE IF NOT EXISTS StaffDB.Pupils (PupilID INT AUTO_INCREMENT PRIMARY KEY, RegClass int, FirstName VARCHAR(20), LastName VARCHAR(20), ContactNo VARCHAR(11), Address VARCHAR(120))")
sql = "INSERT IGNORE INTO StaffDB.Staff (Username,Privlage,FirstName,LastName,ContactNo,jobTitle,startDate,salary,DateOfBirth,Address) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
val = [
("root","0","Fname","LName","07783812342","Admin","2012-12-22","1540000","1997-01-01","Black address")
]
mycursor.executemany(sql, val)
self._mydb.commit()
print(mycursor.rowcount, "record was inserted.")
mycursor.execute("CREATE USER 'Tam'#'localhost' IDENTIFIED BY 'Tam';GRANT ALL ON staffdb.* TO 'Tam'#'localhost';FLUSH PRIVILEGES;",multi=True)
def GetPriv(self):
mycursor = self._mydb.cursor(buffered=True)
mycursor.execute(f"SELECT Privlage FROM StaffDB.Staff WHERE USERNAME LIKE '{self._User}'")
myresult = mycursor.fetchall()
Priv = myresult[0]
return Priv[0]
The Line mycursor.execute("CREATE USER 'Tam'#'localhost' IDENTIFIED BY 'Tam';GRANT ALL ON staffdb.* TO 'Tam'#'localhost';FLUSH PRIVILEGES;",multi=True)
is run shows no errors but has no effect on the DataBase. when i run the Same Code from the database it works perfectly fine.
I'm new to PostgreSQL and psycopg2, and I face a problem.
import psycopg2
def create_tables(whichone):
in_str_station = "CREATE TABLE station (id SMALLINT NOT NULL PRIMARY KEY, name VARCHAR(40) NOT NULL," \
" country VARCHAR(3) NOT NULL, latitude VARCHAR(10), " \
"longitude VARCHAR(10),height SMALLINT);"
in_str_dailyData = "CREATE TABLE daily_data (id BIGSERIAL NOT NULL PRIMARY KEY," \
"s_id SMALLINT NOT NULL REFERENCES station(id)," \
"d_date DATE NOT NULL, d_mean NUMERIC(6, 1), quality SMALLINT);"
int_str_monthlyMean = "CREATE TABLE monthly_mean (id BIGSERIAL NOT NULL PRIMARY KEY,"\
"s_id SMALLINT NOT NULL REFERENCES station(id),"\
"m_date DATE NOT NULL, m_mean NUMERIC(9, 3),"\
"var NUMERIC(9, 3), std NUMERIC(9, 3));"
in_str_yearlymean = "CREATE TABLE yearly_mean (id BIGSERIAL NOT NULL PRIMARY KEY, " \
"s_id SMALLINT NOT NULL REFERENCES station(id)," \
"y_date DATE NOT NULL, y_mean NUMERIC(9, 3),var NUMERIC(9, 3)," \
"std NUMERIC(9, 3), var_m NUMERIC(9, 3), std_m NUMERIC(9, 3));"
database_list = {'station': in_str_station, 'monthly_mean': int_str_monthlyMean,
'daily_data': in_str_dailyData, 'yearly_mean': in_str_yearlymean}
try:
conn = psycopg2.connect(
host="localhost",
database="climate",
user="postgres",
password="1")
cur = conn.cursor()
in_str = database_list.get(whichone)
cur.execute(in_str)
output_ = cur.fetchall()
print(output_)
cur.close()
except (Exception, psycopg2.DatabaseError) as error:
print(error)
finally:
if conn is not None:
conn.close()
After I run the script, no matter which one of the in_str_ I choose, the table is not created. I have checked and when I copy the content of in_str that I executed in cur.execute and use it in the PostgreSQL shell, everything works.
Where did I make the mistake?
Call conn.commit() after cur.execute(), but before conn.close(). Transactions are not implicitly committed with psycopg2:
If the connection is closed (using the close() method) or destroyed (using del or by letting it fall out of scope) while a transaction is in progress, the server will discard the transaction.
I don't know if you just did that here but it's indented wrong. You need to indent code after the function.
def foo(a):
pass
I'm trying to use execute many to insert four (4) records at a time to a MySQL Database table, using MySQL Connector driver.
The problem is executemany() is consistently "skipping" the first 3 records without throwing an error and I can't understand what's wrong.
I'm trying to insert the values in a list:
my_records = [(334, 20533, 387.5, 'Label1'), (335, 20534, 387.5, 'Label2'), (336, 108659, 387.5, 'Label3'), (337, 108660, 387.5, 'Label4')]
And then here's my code:
try:
mydb = mysql.connector.connect(
host=os.getenv('DBM_HOST', 'x.x.x.x'),
user=os.getenv('DBM_USER', 'username'),
passwd=os.getenv('DBM_PASSWORD', 'password'),
database=os.getenv('DBM_NAME', 'my_database')
)
mycursor = mydb.cursor()
sql = """
INSERT INTO my_table (
batch_id, user_id, assessment, label
) VALUES (
%s, %s, %s, %s
)
"""
mycursor.executemany(sql, my_records)
mydb.commit()
mycursor.close()
mydb.close()
return True
except Exception as exception:
print(exception)
return None
Does anyone know what's happening? Or why the first 3 records are not inserting?
Here's the table structure:
Field Type Null Key Default Extra
id int(10) unsigned NO PRI auto_increment
batch_id int(10) unsigned NO MUL
user_id int(10) unsigned NO
assessment decimal(10,2) unsigned NO
label varchar(250) YES
I am trying to insert data from a python dictionary to mySql DB. but I don't understand what is wrong with my sql query.
I am Getting this error:
pymysql.err.ProgrammingError: (1064, u"You have an error in your SQL
syntax; check the manual that corresponds to your MySQL server version
for the right syntax to use near ''DiedIn' ('name', 'city') VALUES
('\'Ethel_Merman\'', '\'New_York_City\\n\'')' at line 1")
this is my code:
import pymysql.cursors
wasBornIn = {}
with open("wasBornIn.txt") as f:
for line in f:
(key, val) = line.split(':')
wasBornIn[key] = val
diedIn = {}
with open("diedIn.txt") as f:
for line in f:
(key, val) = line.split(':')
diedIn[key] = val
isLocatedIn = {}
with open("isLocatedIn.txt") as f:
for line in f:
(key, val) = line.split(':')
isLocatedIn[key] = val
connection = pymysql.connect(host='********', user='******', password='******', db='*******',
charset='utf8mb4', cursorclass=pymysql.cursors.DictCursor)
try:
with connection.cursor() as cursor:
# Create a new record
sql = "DROP TABLE DiedIn"
cursor.execute(sql)
with connection.cursor() as cursor:
# Create a new record
sql = "DROP TABLE isLocatedIn"
cursor.execute(sql)
with connection.cursor() as cursor:
# Create a new record
sql = "DROP TABLE BornIn"
cursor.execute(sql)
with connection.cursor() as cursor:
sql = "CREATE TABLE `DiedIn`(`name` varchar(100) COLLATE utf8_bin NOT NULL, `city` varchar(50) COLLATE utf8_bin NOT NULL, " \
"PRIMARY KEY(`name`)) ENGINE = InnoDB DEFAULT CHARSET = utf8" \
" COLLATE = utf8_bin;"
cursor.execute(sql)
with connection.cursor() as cursor:
sql = "CREATE TABLE `isLocatedIn`(`name` varchar(150) COLLATE utf8_bin NOT NULL, `location` varchar(50) COLLATE utf8_bin NOT NULL, " \
"PRIMARY KEY(`name`)) ENGINE = InnoDB DEFAULT CHARSET = utf8" \
" COLLATE = utf8_bin;"
cursor.execute(sql)
with connection.cursor() as cursor:
sql = "CREATE TABLE `BornIn`(`name` varchar(100) COLLATE utf8_bin NOT NULL, `city` varchar(50) COLLATE utf8_bin NOT NULL, " \
"PRIMARY KEY(`name`)) ENGINE = InnoDB DEFAULT CHARSET = utf8" \
" COLLATE = utf8_bin;"
cursor.execute(sql)
with connection.cursor() as cursor:
for key, value in diedIn.iteritems():
strKey = repr(key)
strValue = repr(value)
sql = "INSERT INTO 'DiedIn' ('name', 'city') VALUES (%s, %s);"
cursor.execute(sql, (strKey, strValue))
# connection is not autocommit by default. So you must commit to save
# your changes.
connection.commit()
finally:
connection.close()
thanks for the help.
try:
sql = "INSERT INTO 'DiedIn' (name, city) VALUES ('%s', '%s');"
I would like to dump only one table but by the looks of it, there is no parameter for this.
I found this example of the dump but it is for all the tables in the DB:
# Convert file existing_db.db to SQL dump file dump.sql
import sqlite3, os
con = sqlite3.connect('existing_db.db')
with open('dump.sql', 'w') as f:
for line in con.iterdump():
f.write('%s\n' % line)
You can copy only the single table in an in memory db:
import sqlite3
def getTableDump(db_file, table_to_dump):
conn = sqlite3.connect(':memory:')
cu = conn.cursor()
cu.execute("attach database '" + db_file + "' as attached_db")
cu.execute("select sql from attached_db.sqlite_master "
"where type='table' and name='" + table_to_dump + "'")
sql_create_table = cu.fetchone()[0]
cu.execute(sql_create_table);
cu.execute("insert into " + table_to_dump +
" select * from attached_db." + table_to_dump)
conn.commit()
cu.execute("detach database attached_db")
return "\n".join(conn.iterdump())
TABLE_TO_DUMP = 'table_to_dump'
DB_FILE = 'db_file'
print getTableDump(DB_FILE, TABLE_TO_DUMP)
Pro:
Simplicity and reliability: you don't have to re-write any library method, and you are more assured that the code is compatible with future versions of the sqlite3 module.
Con:
You need to load the whole table in memory, which may or may not be a big deal depending on how big the table is, and how much memory is available.
Dump realization lies here http://coverage.livinglogic.de/Lib/sqlite3/dump.py.html (local path: PythonPath/Lib/sqlite3/dump.py)
You can modify it a little:
# Mimic the sqlite3 console shell's .dump command
# Author: Paul Kippes <kippesp#gmail.com>
def _iterdump(connection, table_name):
"""
Returns an iterator to the dump of the database in an SQL text format.
Used to produce an SQL dump of the database. Useful to save an in-memory
database for later restoration. This function should not be called
directly but instead called from the Connection method, iterdump().
"""
cu = connection.cursor()
table_name = table_name
yield('BEGIN TRANSACTION;')
# sqlite_master table contains the SQL CREATE statements for the database.
q = """
SELECT name, type, sql
FROM sqlite_master
WHERE sql NOT NULL AND
type == 'table' AND
name == :table_name
"""
schema_res = cu.execute(q, {'table_name': table_name})
for table_name, type, sql in schema_res.fetchall():
if table_name == 'sqlite_sequence':
yield('DELETE FROM sqlite_sequence;')
elif table_name == 'sqlite_stat1':
yield('ANALYZE sqlite_master;')
elif table_name.startswith('sqlite_'):
continue
else:
yield('%s;' % sql)
# Build the insert statement for each row of the current table
res = cu.execute("PRAGMA table_info('%s')" % table_name)
column_names = [str(table_info[1]) for table_info in res.fetchall()]
q = "SELECT 'INSERT INTO \"%(tbl_name)s\" VALUES("
q += ",".join(["'||quote(" + col + ")||'" for col in column_names])
q += ")' FROM '%(tbl_name)s'"
query_res = cu.execute(q % {'tbl_name': table_name})
for row in query_res:
yield("%s;" % row[0])
# Now when the type is 'index', 'trigger', or 'view'
#q = """
# SELECT name, type, sql
# FROM sqlite_master
# WHERE sql NOT NULL AND
# type IN ('index', 'trigger', 'view')
# """
#schema_res = cu.execute(q)
#for name, type, sql in schema_res.fetchall():
# yield('%s;' % sql)
yield('COMMIT;')
Now it accepts table name as second argument.
You can use it like this:
with open('dump.sql', 'w') as f:
for line in _iterdump(con, 'GTS_vehicle'):
f.write('%s\n' % line)
Will get something like:
BEGIN TRANSACTION;
CREATE TABLE "GTS_vehicle" ("id" integer NOT NULL PRIMARY KEY, "name" varchar(20) NOT NULL, "company_id" integer NULL, "license_plate" varchar(20) NULL, "icon" varchar(100) NOT NULL DEFAULT 'baseicon.png', "car_brand" varchar(30) NULL, "content_type_id" integer NULL, "modemID" varchar(100) NULL, "distance" integer NULL, "max_speed" integer NULL DEFAULT 100, "max_rpm" integer NULL DEFAULT 4000, "fuel_tank_volume" integer NULL DEFAULT 70, "max_battery_voltage" integer NULL, "creation_date" datetime NOT NULL, "last_RFID" text NULL);
INSERT INTO "GTS_vehicle" VALUES(1,'lan1_op1_car1',1,'03115','baseicon.png','UFP',16,'lan_op1_car1',NULL,100,4000,70,12,'2011-06-23 11:54:32.395000',NULL);
INSERT INTO "GTS_vehicle" VALUES(2,'lang_op1_car2',1,'03','baseicon.png','ыва',16,'lan_op1_car2',NULL,100,4000,70,12,'2011-06-23 11:55:02.372000',NULL);
INSERT INTO "GTS_vehicle" VALUES(3,'lang_sup_car1',1,'0000','baseicon.png','Fiat',16,'lan_sup_car1',NULL,100,4000,70,12,'2011-06-23 12:32:09.017000',NULL);
INSERT INTO "GTS_vehicle" VALUES(4,'lang_sup_car2',1,'123','baseicon.png','ЗАЗ',16,'lan_sup_car2',NULL,100,4000,70,12,'2011-06-23 12:31:38.108000',NULL);
INSERT INTO "GTS_vehicle" VALUES(9,'lang_op2_car1',1,'','baseicon.png','',16,'1233211234',NULL,100,4000,70,12,'2011-07-05 13:32:09.865000',NULL);
INSERT INTO "GTS_vehicle" VALUES(11,'Big RIder',1,'','baseicon.png','0311523',16,'111',NULL,100,4000,70,20,'2011-07-07 12:12:40.358000',NULL);
COMMIT;
By iterdump(), all information would be displayed like this:
INSERT INTO "name" VALUES(1, 'John')
INSERT INTO "name" VALUES(2, 'Jane')
INSERT INTO "phone" VALUES(1, '111000')
INSERT INTO "phone" VALUES(2, '111001')
An easy way is by filter certain keywords by string.startswith() method.
For example, the table name is 'phone':
# Convert file existing_db.db to SQL dump file dump.sql
import sqlite3, os
con = sqlite3.connect('existing_db.db')
with open('dump.sql', 'w') as f:
for line in con.iterdump():
if line.startswith('INSERT INTO "phone"'):
f.write('%s\n' % line)
Not very smart, but can fit your objective.