I have executed this code to insert a dictionary into my table in database,
d = {'err': '0', 'tst': '0', 'Type o': 'FTP', 'recip': 'ADMIN', 'id': '101', 'origin': 'REPORT', 'Type recip': 'SMTP', 'date': '2010-01-10 18:47:52'}
db = MySQLdb.connect("localhost","admin","password","database")
cursor = db.cursor()
cursor.execute("""INSERT INTO mytable(ID, ERR, TST, DATE, ORIGIN, TYPE_O, RECIP, TYPE_RECIP) VALUES (%(id)s, %(err)s, %(tst)s, %(date)s, %(origin)s, %(Type o)s, %(recip)s, %(Type recip)s)""", d)
db.commit()
db.close()
Create statement of my table:
CREATE TABLE mytable (
`ID` tinyint unsigned NOT NULL,
`ERR` tinyint NOT NULL,
`TST` tinyint unsigned NOT NULL,
`DATE` datetime NOT NULL,
`ORIGIN` varchar(30) NOT NULL,
`TYPE_O` varchar(10) NOT NULL,
`RECIP` varchar(30) NOT NULL,
`TYPE_RECIP` varchar(10) NOT NULL,
PRIMARY KEY (`ID`,`DATE`)
) ENGINE = InnoDB;
But i have an error, it says:
1064, "you have an error in your SQL syntax; check the manual that
corresponds to you MySQL server version... )
Be aware of SQL injections and use the second argument to execute for inserting your query parameters:
cursor.execute("""
INSERT INTO
table
(name, age, origin, date)
VALUES
(%(name)s, %(age)s, %(origin)s, %(date)s)
""", d)
Related
I'm trying to implement a few simple SQL insert statements with python-mariadb-connector but cannot figure out what I'm doing wrong.
The database looks like this:
SET FOREIGN_KEY_CHECKS = false;
CREATE OR REPLACE TABLE `forums` (
`id` int(10) unsigned NOT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
CREATE OR REPLACE TABLE `accounts` (
`id` int(10) unsigned NOT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
CREATE OR REPLACE TABLE `posts` (
`id` int(10) unsigned NOT NULL,
`forum_id` int(10) unsigned NOT NULL,
`account_id` int(10) unsigned NOT NULL,
PRIMARY KEY (`id`),
KEY `posts_forum_fk` (`forum_id`),
KEY `posts_account_fk` (`account_id`),
CONSTRAINT `posts_account_fk` FOREIGN KEY (`account_id`) REFERENCES `accounts` (`id`),
CONSTRAINT `posts_forum_fk` FOREIGN KEY (`forum_id`) REFERENCES `forums` (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
CREATE OR REPLACE TABLE `comments` (
`id` int(10) unsigned NOT NULL,
`post_id` int(10) unsigned NOT NULL,
`account_id` int(10) unsigned NOT NULL,
`parent_id` int(10) unsigned DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `comments_post_fk` (`post_id`),
KEY `comments_account_fk` (`account_id`),
-- KEY `comments_comments_fk` (`parent_id`),
-- CONSTRAINT `comments_comments_fk` FOREIGN KEY (`parent_id`) REFERENCES `comments` (`id`),
CONSTRAINT `comments_account_fk` FOREIGN KEY (`account_id`) REFERENCES `accounts` (`id`),
CONSTRAINT `comments_post_fk` FOREIGN KEY (`post_id`) REFERENCES `posts` (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
SET FOREIGN_KEY_CHECKS = true;
The code for inserting data looks like this:
import mariadb
config = {
"user": "db_user_name",
"password": "db_passwd",
"host": "db_host",
"port": 3306,
"database": "db_name"
}
if __name__ == '__main__':
with mariadb.connect(**config) as conn:
cur = conn.cursor()
cur.executemany(
"INSERT INTO `forums` (`id`) VALUES (?)",
[(1,), (2,), (3,)]
)
cur.executemany(
"INSERT INTO `accounts` (`id`) VALUES (?)",
[(1,), (2,), (3,), (4,)]
)
cur.executemany(
"INSERT INTO `posts` (`id`, `forum_id`, `account_id`) VALUES (?, ?, ?)",
[(6, 3, 1)]
)
cur.executemany(
"INSERT INTO `comments` (`id`, `post_id`, `account_id`, `parent_id`) VALUES (?, ?, ?, ?)",
[(1, 6, 1, None), (2, 6, 2, 1)]
) # exception happens here
When executing this, I get the following error:
Traceback (most recent call last):
File ".../db_test.py", line 28, in <module>
cur.executemany(
mariadb.DatabaseError.DataError: Invalid parameter type at row 2, column 4
Im not sure how executemany is implemented but I think it should do something similar to the following SQL-query:
INSERT INTO `forums` (`id`) VALUES (1), (2), (3);
INSERT INTO `accounts` (`id`) VALUES (1), (2), (3), (4);
INSERT INTO `posts` (`id`, `forum_id`, `account_id`) VALUES (6, 3, 1);
INSERT INTO `comments` (`id`, `post_id`, `account_id`, `parent_id`)
VALUES (1, 6, 1, NULL), (2, 6, 2, 1);
which works just fine for me...
Is it a bug or am I doing something wrong here?
It took me a while
cur.executemany(
"INSERT INTO `comments` (`id`, `post_id`, `account_id`, `parent_id`) VALUES (?, ?, ?, ?)",
[(1, 6, 1, None), (2, 6, 2, 1)]
)
But in your comment table, you have this constraint
CONSTRAINT `comments_comments_fk` FOREIGN KEY (`parent_id`)
REFERENCES `comments` (`id`),
Before you can enter (2, 6, 2, 1) the tuple (1, 6, 1, None) has to already commit in the database and in a bulk insert the commit is made after all inserts are in the database, but the first tuple isn't at the time not there
so if you make this, both rows will appear in the database(i also committed all other tables after bulk insert):
MariaDB
mycursor.execute(
"INSERT INTO `comments` (`id`, `post_id`, `account_id`,
`parent_id`) VALUES (?, ?, ?, ?)",
(1, 6, 1,None ))
mydb.commit()
mycursor.executemany(
"INSERT INTO `comments` (`id`, `post_id`, `account_id`, `parent_id`) VALUES (?, ?, ?, ?)",
[ (2, 6, 2, 1)])
mydb.commit()
MySQL
sql = """INSERT INTO forums (id) VALUES (%s)"""
val = [("1",), ("2",), ("3",)]
mycursor.executemany(sql,val)
mydb.commit()
mycursor.executemany(
"INSERT INTO accounts (id) VALUES (%s)",
[(1,), (2,), (3,), (4,)]
)
mydb.commit()
mycursor.executemany(
"INSERT INTO posts (id, forum_id, account_id) VALUES (%s, %s, %s)",
[(6, 3, 1)]
)
mydb.commit()
mycursor.execute(
"INSERT INTO `comments` (`id`, `post_id`, `account_id`, `parent_id`) VALUES (%s, %s, %s, %s)",
(1, 6, 1,None ))
mydb.commit()
mycursor.executemany(
"INSERT INTO `comments` (`id`, `post_id`, `account_id`, `parent_id`) VALUES (%s, %s, %s, %s)",
[ (2, 6, 2, 1)])
mydb.commit()
Python: 2.7.12
pyodbc: 4.0.24
OS: Ubuntu 16.4
DB: MySQL 8
driver: MySQL 8
Expected behaviour: resultset should have numbers in columns with datatype int
Actual Behaviour: All of the columns with int data type have 0's (If parameterised query is used)
Here's the queries -
1.
cursor.execute("SELECT * FROM TABLE where id =7")
Result set:
[(7, 1, None, 1, u'An', u'Zed', None, u'Ms', datetime.datetime(2016, 12, 20, 0, 0), u'F', u'Not To Be Disclosed', None, None, u'SPRING', None, u'4000', datetime.datetime(2009, 5, 20, 18, 55), datetime.datetime(2019, 1, 4, 14, 25, 58, 763000), 0, None, None, None, bytearray(b'\x00\x00\x00\x00\x01(n\xba'))]
2.
cursor.execute("SELECT * FROM patients where patient_id=?", [7])`
or
cursor.execute("SELECT * FROM patients where patient_id=?", ['7'])
or
cursor.execute("SELECT * FROM patients where patient_id IN ", [7])
Result set:
[(0, 0, None, 0, u'An', u'Zed', None, u'Ms', datetime.datetime(2016, 12, 20, 0, 0), u'F', u'Not To Be Disclosed', None, None, u'SPRING', None, u'4000', datetime.datetime(2009, 5, 20, 18, 55), datetime.datetime(2019, 1, 4, 14, 25, 58, 763000), 0, None, None, None, bytearray(b'\x00\x00\x00\x00\x01(n\xba'))]
Rest of the result set is okay except for the columns with int data type that all have 0's if paramterized query is used.
It seems like it should have worked without issues. Can I get some help here.
Edit : Here's the schema of the table:
CREATE TABLE `patient
`lastname` varchar(30) DEFAULT NULL,
`known_as` varchar(30) DEFAULT NULL,
`title` varchar(50) DEFAULT NULL,
`dob` datetime DEFAULT NULL,
`sex` char(1) DEFAULT NULL,
`address1` varchar(30) DEFAULT NULL,
`address2` varchar(30) DEFAULT NULL,
`address3` varchar(30) DEFAULT NULL,
`city` varchar(30) DEFAULT NULL,
`state` varchar(16) DEFAULT NULL,
`postcode` char(4) DEFAULT NULL,
`datecreated` datetime NOT NULL,
`dateupdated` datetime(6) DEFAULT NULL,
`isrep` tinyint(1) DEFAULT NULL,
`photo` longblob,
`foreign_images_imported` tinyint(1) DEFAULT NULL,
`ismerged` tinyint(1) DEFAULT NULL,
`rowversion` varbinary(8) DEFAULT NULL,
PRIMARY KEY (`patient_id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
You have encountered this bug in MySQL Connector/ODBC.
EDIT: The bug has now been fixed.
The following (Python 3) test code verifies that MySQL Connector/ODBC returns zero (incorrect), while mysqlclient returns the correct value:
import MySQLdb # from mysqlclient
import pyodbc
host = 'localhost'
user = 'root'
passwd = 'whatever'
db = 'mydb'
port = 3307
charset = 'utf8mb4'
use_odbc = False # or True
print(f'{"" if use_odbc else "not "}using ODBC ...')
if use_odbc:
connection_string = (
f'DRIVER=MySQL ODBC 8.0 ANSI Driver;'
f'SERVER={host};UID={user};PWD={passwd};DATABASE={db};PORT={port};'
f'charset={charset};'
)
cnxn = pyodbc.connect(connection_string)
print(f'{cnxn.getinfo(pyodbc.SQL_DRIVER_NAME)}, version {cnxn.getinfo(pyodbc.SQL_DRIVER_VER)}')
else:
cnxn = MySQLdb.connect(
host=host, user=user, passwd=passwd, db=db, port=port, charset=charset
)
int_value = 123
crsr = cnxn.cursor()
crsr.execute("CREATE TEMPORARY TABLE foo (id varchar(10) PRIMARY KEY, intcol int, othercol longblob)")
crsr.execute(f"INSERT INTO foo (id, intcol) VALUES ('Alfa', {int_value})")
sql = f"SELECT intcol, othercol FROM foo WHERE id = {'?' if use_odbc else '%s'}"
crsr.execute(sql, ('Alfa',))
result = crsr.fetchone()[0]
print(f'{"pass" if result == int_value else "FAIL"} -- expected: {repr(int_value)} ; actual: {repr(result)}')
Console output with use_odbc = True:
using ODBC ...
myodbc8a.dll, version 08.00.0018
FAIL -- expected: 123 ; actual: 0
Console output with use_odbc = False:
not using ODBC ...
pass -- expected: 123 ; actual: 123
FWIW i just posted a question where i was seeing this in version 3.1.14 of the ODBC connector but NOT in version 3.1.10.
I have a tuple as below
data = ({'weather station name': 'Substation', 'wind': '6 km/h', 'barometer': '1010.3hPa', 'humidity': '42%', 'temperature': '34.5 C', 'place_id': '001D0A00B36E', 'date': '2016-05-10 09:48:58'})
I am trying to push the values from the above tuple to the postgres table using the code below:
try:
con = psycopg2.connect("dbname='WeatherForecast' user='postgres' host='localhost' password='****'")
cur = con.cursor()
cur.executemany("""INSERT INTO weather_data(temperature,humidity,wind,barometer,updated_on,place_id) VALUES (%(temperature)f, %(humidity)f, %(wind)f, %(barometer)f, %(date)s, %(place_id)d)""", final_weather_data)
ver = cur.fetchone()
print(ver)
except psycopg2.DatabaseError as e:
print('Error {}'.format(e))
sys.exit(1)
finally:
if con:
con.close()
Where datatype of each field in the DB is as below:
id serial NOT NULL,
temperature double precision NOT NULL,
humidity double precision NOT NULL,
wind double precision NOT NULL,
barometer double precision NOT NULL,
updated_on timestamp with time zone NOT NULL,
place_id integer NOT NULL,
When i run the code to push the data into postgres table using psycopg2, it is raising an error "ValueError: unsupported format character 'f'"
I hope the issue is in formatting. Am using Python3.4
Have a look at the documentation:
The variables placeholder must always be a %s, even if a different placeholder (such as a %d for integers or %f for floats) may look more appropriate:
>>> cur.execute("INSERT INTO numbers VALUES (%d)", (42,)) # WRONG
>>> cur.execute("INSERT INTO numbers VALUES (%s)", (42,)) # correct
While, your SQL query contains all type of placeholders:
"""INSERT INTO weather_data(temperature,humidity,wind,barometer,updated_on,place_id)
VALUES (%(temperature)f, %(humidity)f, %(wind)f, %(barometer)f, %(date)s, %(place_id)d)"""
I'm trying to import a simple CSV file that I downloaded from Quandl into a MySQL table with the odo python package
t = odo('C:\ProgramData\MySQL\MySQL Server 5.6\Uploads\WIKI_20160725.partial.csv', 'mysql+pymysql://' + self._sql._user+':'
+ self._sql._password +'#localhost/testDB::QUANDL_DATA_WIKI')
The first row looks like this in the CSV:
A 7/25/2016 46.49 46.52 45.92 46.14 1719772 0 1 46.49 46.52 45.92 46.14 1719772
The MySQL table is defined as follows:
Ticker varchar(255) NOT NULL,
Date date NOT NULL,
Open numeric(15,2) NULL,
High numeric(15,2) NULL,
Low numeric(15,2) NULL,
Close numeric(15,2) NULL,
Volume bigint NULL,
ExDividend numeric(15,2),
SplitRatio int NULL,
OpenAdj numeric(15,2) NULL,
HighAdj numeric(15,2) NULL,
LowAdj numeric(15,2) NULL,
CloseAdj numeric(15,2) NULL,
VolumeAdj bigint NULL,
PRIMARY KEY(Ticker,Date)
It throws an exception 1292 with the following info:
sqlalchemy.exc.InternalError: (pymysql.err.InternalError) (1292, "Incorrect date value: '7/25/2016' for column 'Date' at row 1") [SQL: 'LOAD DATA INFILE %(path)s\n INTO TABLE QUANDL_DATA_WIKI\n CHARACTER SET %(encoding)s\n FIELDS\n TERMINATED BY %(delimiter)s\n ENCLOSED BY %(quotechar)s\n ESCAPED BY %(escapechar)s\n LINES TERMINATED BY %(lineterminator)s\n IGNORE %(skiprows)s LINES\n '] [parameters: {'path': 'C:\ProgramData\MySQL\MySQL Server 5.6\Uploads\WIKI_20160725.partial.csv', 'quotechar': '"', 'skiprows': 0, 'lineterminator': '\r\n', 'escapechar': '\', 'delimiter': ',', 'encoding': 'utf8'}]
Does anyone have an idea what is wrong with the date in the first row? It doesn't seem to match it to the MySql database
mysql has problems with date conversions. I noticed when I defined the date field as a varchar
Date varchar(255) NOT NULL
then the csv file was read properly
in my SQL the conversion of the string to date format then looks like this:
STR_TO_DATE(Date, "%m/%d/%Y")
This is my code. It will read a bunch of files with SQL commands in identical formats (i.e. comments prefaced with -, along with some blank lines, hence the if condition in my loop)
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import csv,sqlite3,os
conn = sqlite3.connect('db_all.db')
c = conn.cursor()
files = os.listdir('C:\\Users\\ghb\\Desktop\\database_schema')
for file in files:
string = ''
with open(file, 'rb') as read:
for line in read.readlines():
if line[0]!='-' and len(line)!=0: string = string + line.rstrip() #error also occurs if I skip .rstrip
print string #for debugging purposes
c.executescript(string)
string=''
conn.close()
Error:
Traceback (most recent call last):
File "C:/Users/ghb/Desktop/database_schema/database_schema.py", line 16, in <module>
c.executescript(string)
sqlite3.OperationalError: near "SET": syntax error
For fear of clutter, here is the output of the string variable (without the INSERT INTO data, that is confidential):
SET SQL_MODE="NO_AUTO_VALUE_ON_ZERO";
CREATE TABLE IF NOT EXISTS `career` (
`carKey` int(11) NOT NULL AUTO_INCREMENT,
`persID` bigint(10) unsigned zerofill NOT NULL,
`persKey` int(6) unsigned NOT NULL,
`wcKey` int(2) unsigned NOT NULL,
`wtKey` int(2) unsigned DEFAULT NULL,
`pwtKey` int(2) unsigned DEFAULT NULL,
`dptId` bigint(10) unsigned NOT NULL,
`dptNr` int(4) unsigned NOT NULL,
`dptalias` varchar(10) COLLATE utf8_icelandic_ci NOT NULL,
`class` enum('A','B') COLLATE utf8_icelandic_ci NOT NULL,
`getfilm` enum('yes','no') COLLATE utf8_icelandic_ci NOT NULL DEFAULT 'yes',
`finished` enum('true','false') COLLATE utf8_icelandic_ci NOT NULL DEFAULT 'false',
`startDate` date NOT NULL,
`endDate` date DEFAULT NULL,
`regDate` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
`user` tinyint(4) NOT NULL,
`status` set('LÉST','BRFL','BRFD','BRNN') COLLATE utf8_icelandic_ci DEFAULT NULL,
`descr` text COLLATE utf8_icelandic_ci,
PRIMARY KEY (`carKey`),
KEY `pwtKey` (`pwtKey`),
KEY `wtKey` (`wtKey`),
KEY `dptId` (`dptId`),
KEY `user` (`user`),
KEY `persID` (`persID`),
KEY `persKey` (`persKey`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8 COLLATE=utf8_icelandic_ci AUTO_INCREMENT=2686 ;
Input sample:
INSERT INTO `career` (`carKey`, `persID`, `persKey`, `wcKey`, `wtKey`, `pwtKey`, `dptId`, `dptNr`, `dptalias`, `class`, `getfilm`, `finished`, `startDate`, `endDate`, `regDate`, `user`,
(5, 34536, 346, 22, 44, 34, 3454356, 33, 'asdasd', 'ASDASD', 'ASDSD', 'true', '1991-02-04', '2010-05-02', '2009-05-02 00:01:02', 1, NULL, 'HH:
'),