Python MySQL Connector not inserting - python

So here is my code. I don't know why insert isn't working. A select statement works. It doesn't fail the try catch either leading me to believe the query is executing. Also entering the insert query manually into MySQL Workbench seems to work fine.
def runQuery(query):
try:
conn = mysql.connector.connect(host='localhost',
database='optionsdata',
user='python',
passwd='python')
cursor = conn.cursor()
cursor.execute(query)
conn.close()
cursor.close()
print(query)
except Error as e:
print("Error", e)
def convertDate(date_str):
date_object = datetime.datetime.strptime(date_str, '%m/%d/%Y').date()
return date_object
ticker = "MSFT"
html = urlopen("https://api.nasdaq.com/api/quote/" + ticker + "/option-chain?assetclass=stocks&todate=2020-05-08&fromdate=2020-04-07&limit=0").read().decode('utf-8')
optionsData = json.loads(html)
rows = optionsData["data"]["optionChainList"]["rows"]
for row in rows:
call = row["call"]
expiryDate = convertDate(call["expiryDate"])
query = "INSERT INTO `optionsdata`.`call` (`ticker`, `symbol`, `last`, `change`, `bid`, `ask`, `volume`, `openinterest`, `strike`, `expiryDate`, `grabTime`) VALUES ('{0}', '{1}', '{2}', '{3}', '{4}', '{5}', '{6}', '{7}', '{8}', '{9}', '{10}');".format(ticker, call["symbol"], call["last"], call["change"], call["bid"], call["ask"], call["volume"], call["openinterest"], call["strike"], expiryDate, datetime.datetime.now())
runQuery(query)
A sample of what an insert query looks like
INSERT INTO `optionsdata`.`call` (`ticker`, `symbol`, `last`, `change`, `bid`, `ask`, `volume`, `openinterest`, `strike`, `expiryDate`, `grabTime`) VALUES ('MSFT', '#MSFT 200508C00175000', '3.21', '-0.29', '2.80', '4.25', '54', '228', '175.00', '2020-05-08', '2020-04-09 19:39:22.554538');

This is a great question! I spent hours trying to figure this out a few weeks ago. It's tricky because after executing the query, you have to call
conn.commit()
to actually update the data. So change your runQuery function like this:
def runQuery(query):
try:
conn = mysql.connector.connect(host='localhost',
database='optionsdata',
user='python',
passwd='python')
cursor = conn.cursor()
cursor.execute(query)
conn.commit() # Added commit line
conn.close()
cursor.close()
print(query)
except Error as e:
print("Error", e)
See this doc page for more info.

Related

i don't know how solvw it

error:
You have an error in your SQL syntax; check the manual that corresponds to your MySQL server version for the right syntax to use near '%s)' at line 1
upi = upi_entry.get()
mysqldb = mysql.connector.connect(
host="localhost",
user="root",
password="deol9646",
database="train_login",
)
mycursor = mysqldb.cursor()
try:
mycursor.execute(
"""create table if not exists upi_data(upi text)"""
)
sql = "INSERT INTO UPI_DATA (UPI) VALUES (%s)"
val = upi
mycursor.execute(sql, val)
mysqldb.commit()
lastid = mycursor.lastrowid
messagebox.showinfo("information", "upi inserted successfully...")
upi_entry.delete(0, END)
upi_entry.focus_set()
except Exception as e:
print(e)
mysqldb.rollback()
mysqldb.close()
The parameters need to be a tuple; you're passing in val as a single value, so the MySQL driver doesn't turn %s into anything and that ends up a syntax error.
Add a comma to make a parenthesized expression ((upi)) into a 1-tuple: (upi,)
sql = "INSERT INTO UPI_DATA (UPI) VALUES (%s)"
val = (upi,)

python MySQL insert big data

using python,I am looping through csv file to read data, then I am ding some modifications on the readied row and call a save function to insert the modified data into MySQL.
def save(Id, modifiedData,):
try:
mydb = mysql.connector.connect(host="localhost",user="use",password="pass",database="data")
sql = "INSERT INTO data (Id, modifiedData) VALUES (%s, %s)"
recordTuple = (Id, modifiedData)
mycursor = mydb.cursor()
mycursor.execute(sql,recordTuple)
mydb.commit()
print("Record inserted successfully into table")
except mysql.connector.Error as error:
print("Failed to insert into MySQL table {}".format(error))
def main():
for row in csv:
#modify row
#creat Id
save(Id, modifiedData,)
but I don't think this is good solution to do MYSQL connection and insert data with each iteration, it will be time and resources consuming , specially when I move to real server in production
how can I improve my solution?
Ideally, connections should be managed by connection pool, should be committed bulky. But amount of csv at most, need not to mind so much. Anyway, If you don't wanna bother it, I recommend using ORM like SQLAlchemy.
You only need to create the connection once, and that should be in function main, who then passes the connection to function save as follows:
def save(mydb, Id, modifiedData):
try:
sql = "INSERT INTO data (Id, modifiedData) VALUES (%s, %s)"
recordTuple = (Id, modifiedData)
mycursor = mydb.cursor()
mycursor.execute(sql,recordTuple)
mydb.commit()
print("Record inserted successfully into table")
except mysql.connector.Error as error:
print("Failed to insert into MySQL table {}".format(error))
def main():
try:
mydb = mysql.connector.connect(host="localhost",user="use",password="pass",database="data")
except mysql.connector.Error as error:
print("Failed to create connection: {}".format(error))
return
for row in csv:
#modify row
#creat Id
save(mydb, Id, modifiedData)
For perhaps even greater performance you can try executemany:
def save(mydb, modified_records):
try:
sql = "INSERT INTO data (Id, modifiedData) VALUES (%s, %s)"
mycursor = mydb.cursor()
mycursor.executemany(sql, modified_records)
mydb.commit()
print("Records inserted successfully into table")
except mysql.connector.Error as error:
print("Failed to insert into MySQL table {}".format(error))
def main():
try:
mydb = mysql.connector.connect(host="localhost",user="use",password="pass",database="data")
except mysql.connector.Error as error:
print("Failed to create connection: {}".format(error))
return
modified_records = []
for row in csv:
#modify row
#creat Id
modified_records.append([id, modifiedData])
save(mydb, modified_records)

Export Python Dataframe to SQL Table

I'm trying to export a python dataframe to a SQL Server table.
Is there a better way to do this? I'm getting errors.
Dataframe - results_out
Output SQL table - FraudCheckOutput
cnn_out = pyodbc.connect('driver={SQL Server};server=XYZ;database=BulkLog;uid=sa;pwd=test')
results_out.to_sql(con=cnn_out, name='FraudCheckOutput', if_exists='replace', flavor='sqlite_master')
Thanks.
Ok, this supposed to make the work done:
import pypyodbc
def database_insert(query, params=())
conn_params = 'driver={SQL Server};server=XYZ;database=BulkLog;uid=sa;pwd=test'
try:
conn = pypyodbc.connect(conn_params)
except pypyodbc.Error, e:
print str(e)
else:
if conn.connected:
db = conn.cursor()
db.execute(query, params).commit()
finally:
if conn:
conn.close()
SQL_INSERT_QUERY = """
INSERT INTO table_name (
[field_name1],
[field_name2]
)
VALUES (
1,
'vale string'
)
WHERE
field_name3 = ?
"""
database_insert(SQL_INSERT_QUERY, ('field_name3_value',))
in pyodbc usage is very similar
cnxn = pyodbc.connect('DRIVER={SQL Server};SERVER=localhost;DATABASE=testdb;UID=me;PWD=pass')
cursor = cnxn.cursor()
cursor.execute("insert into products(id, name) values ('pyodbc', 'awesome library')")
cnxn.commit()
more on http://code.google.com/p/pyodbc/wiki/GettingStarted

Issue with inserting parameter values into MySQLdb.execute()

My code executes a query and then for each row in the result set tries to execute another query using values from that row.
import MySQLdb as mdb
try:
con = mdb.connect('localhost', 'root', '', 'cccorder_uk');
with con:
cur = con.cursor()
cur.execute("SELECT code, name, box_size, commodity_code, country_of_origin FROM cccorder_uk.stocks")
rows = cur.fetchall()
for row in rows:
# split the code and take colour and size
code = row[0].split('-')
product_code = code[0]
sql = """SELECT stock_groups.name FROM stock_groups_styles_map, stock_groups WHERE stock_groups_styles_map.style='%s'""" % (product_code,)
cur.execute(sql)
results = cur.fetchall()
print results
except mdb.Error, e:
print "Error %d: %s" % (e.args[0],e.args[1])
sys.exit(1)
finally:
if con:
con.close()
When I print results I get an empty tuple, but if I hard code the product_code, for example sql = """SELECT stock_groups.name FROM stock_groups_styles_map, stock_groups WHERE stock_groups_styles_map.style='EP22'""", this returns the results I expect.
Why is my code printing an empty tuple?
Python's string-format operator % isn't smart enough to quote args for MySQL -- pass args to the database execute function, which will pass the args to MySQL correctly.
Example:
cur.execute("SELECT stock_groups.name FROM stock_groups_styles_map, stock_groups WHERE stock_groups_styles_map.style=%s", product_code)
See: How can I format strings to query with mysqldb in Python?

How do I make a return statement from this loop

I have this code and i need to get the lastrowid as a return statement. How can i fix it
def main():
while True:
#code here
for item in name2:#break
conn = sqlite3.connect("foods.db")
cursor = conn.cursor()
cursor.execute("INSERT INTO INPUT33 (NAME) VALUES (?);", (name2,))
cursor.execute("select MAX(rowid) from [input33];")
conn.commit()
conn.close()
for rowid in cursor:break
for elem in rowid:
return rowid#this is not working
print(m)
You closed the database, so any cursor no longer has access to the data. Retrieve the data before closing. I am assuming here that you have a reason to re-open the database in a loop here.
def main():
while True:
for item in name2:
conn = sqlite3.connect("foods.db")
cursor = conn.cursor()
with conn:
cursor.execute("INSERT INTO INPUT33 (NAME) VALUES (?);", (name2,))
cursor.execute("select MAX(rowid) from [input33];")
rowid = cursor.fetchone()[0]
conn.close()
return rowid

Categories

Resources