Not able to insert csv data into mysql table - python

I have one csv file that have so many fields but interested in only 5. We picked them up using pandas and now want to insert into mysql table.
This is the logic for same.
Table strucure:
order_execution_time,text,YES,"",,""
symbol,text,YES,"",,""
trade_type,text,YES,"",,""
price,double,YES,"",,""
quantity,int,YES,"",,""
Code:
import pandas as pd
from mysql.connector import Error
from src.config.python_mysql_config import get_connection
df = pd.read_csv("C:\\Users\\91720\\Downloads\\tradebook-VT0986.csv", usecols = ['order_execution_time', 'symbol', 'trade_type', 'price', 'quantity'])
try:
conn = get_connection()
if conn.is_connected():
cursor = conn.cursor()
cursor.execute("select database();")
record = cursor.fetchone()
print("You're connected to database: ", record)
#loop through the data frame
for i,row in df.iterrows():
#here %S means string values
sql = "INSERT INTO trades (symbol, order_execution_time, trade_type, price, quantity) VALUES (%s,%s,%s,%d,%f)"
cursor.execute(sql, tuple(row))
print("Record inserted")
# the connection is not auto committed by default, so we must commit to save our changes
conn.commit()
except Error as e:
print("Error while connecting to MySQL", e)
It is throwing me this error:
Error while connecting to MySQL Not all parameters were used in the SQL statement
How to get it resolve?

Related

how can i add only the new rows into mysql database table from datafram?

i wrote i python code that requsts an api and collect data existed on this api in a dataframe named df2
i created a mysql database named speed_data
the code bellow allowed me to add the data existed in my dataframe into my SQLdatabase :
try:
conn = msql.connect(host='localhost',database='speed_data' ,user='root',
password='')
if conn.is_connected():
cursor = conn.cursor()
cursor.execute("select database();")
record = cursor.fetchone()
print("You're connected to database: ", record)
cursor.execute("DROP TABLE IF EXISTS speed_cord;")
print('creating table.....')
cursor.execute("CREATE TABLE speed_cord(id varchar(20),vitesse_med varchar(20),vitesse_max varchar(20),distance varchar(20),temps timestamp,PRIMARY KEY(id)) ")
print("table is created....")
for i,row in df2.iterrows():
sql = "INSERT INTO speed_data.speed_cord VALUES (%s,%s,%s,%s,%s)"
cursor.execute(sql, tuple(row))
print("Record inserted")
# the connection is not auto committed by default, so we must commit to save our changes
conn.commit()
except Error as e:
print("Error while connecting to MySQL", e)
now when i request the api evry time it returns new rows in my datafram df2 and i need to add only these new rows to my table speed_cord without dropping the first data
how can i do that?

Python Error: Not all parameters were used in the SQL statement

Trying to read a CSV and save information to a MySQL-database.
I get several errors:
ProgrammingError: Not all parameters were used in the SQL statement
AttributeError: 'MySQLCursor' object has no attribute 'rollback'
Which I think i connected to:
cursor.executemany(sql_insert,records)
cursor.commit();
cursor.rollback()
import mysql.connector as sql_db
import pypyodbc as odbc
import pyodbc
import pandas as pd
import csv
df = pd.read_csv(r'C:\Users\xxx\Documents\Python Scripts\Databases\Testfiles\test.csv',sep=";")
columns = ['Id', 'Name', 'Url', 'ImageUrl', 'MaxNumberOfRooms', 'MinNumberOfRooms', 'MaxArea', 'MaxPrice']
df_data = df[columns]
records = df_data.values.tolist()
mydb = sql_db.connect(
host="127.0.0.1",
user="Admin",
password="AdminPassword",
database="dbTest"
)
sql_insert = """
INSERT INTO TestTable
VALUES (%s,%s,%s,%s,%s,%s,%s,%s)
"""
try:
cursor = mydb.cursor()
cursor.executemany(sql_insert,records)
mydb.commit();
except Exception as e:
mydb.rollback()
print(str(e[1]))
finally:
cursor.close()
mydb.close()
Try -
VALUES (?,?,?,?,?,?,?,?,GETDATE(), Name, Url, ImageUrl, MaxNumberOfRooms, MinNumberOfRooms, MaxArea, MaxPrice) Name, Url etc. being your variables.

Mysql table name getting unwanted quotes resulting table does not exist error

import mysql.connector
def add_features_to_db(stockname, timeframe, date, feature):
try:
conn = mysql.connector.connect(
user='root', password='', host='localhost', database='fx003')
cursor = conn.cursor()
dbtable = stockname + timeframe
mySql_insert_query = """INSERT INTO `%s` (date, trend) VALUES ( `%s`, `%s` )"""
record = (dbtable, date, feature)
cursor.execute(mySql_insert_query, record)
conn.commit()
print("Record inserted successfully")
except mysql.connector.Error as error:
print("Failed to insert into MySQL table {}".format(error))
finally:
if conn.is_connected():
cursor.close()
conn.close()
print("MySQL connection is closed")
add_features_to_db("aud-cad", "_30mins", "2021-09-24 21:00:00", "Short")
I have the code above and giving me the below error:
Failed to insert into MySQL table 1146 (42S02): Table 'fx003.'aud-cad_30mins'' doesn't exist
aud-cad_30mins table does exist and an insert query like below doing its job:
mySql_insert_query = """INSERT INTO aud-cad_30mins (date, trend) VALUES ( "2021-09-24 21:00:00","Short" )"""
So when I try to use variables in the query, it gives the error. Why the table name getting unwanted quotes? Checked several tutorials but couldn't find a solution, any ideas?
The table name should be hardcoded in the query string instead of having it there as a placeholder %s, which is meant for the values to be inserted. So if you have the table name in the variable, you can replace it via format() before calling cursor.execute()
dbtable = stockname + timeframe
mySql_insert_query = """INSERT INTO {} (date, trend) VALUES ( %s, %s )""".format(dbtable)
see the examples in the docs
edit: as Bill mentioned in the comment, dont add the backticks around the %s placeholders.

Failed inserting record into consumer table not all arguments converted during string formatting

I'm trying to insert data that I got from a csv file to tables that I previously created using sqlalchemy in python. However, when I try running the following code I get an error message that says that not all arguments were converted during string formatting.
Could you help me identify what my error is and how can I fix it?
#Importing the csv input file
df = pd.read_csv('APAN5310_HW6_DATA.csv')
print (df)
print (df.columns)
df.dtypes
#Splitting the data for the first table
first_name = df['first_name']
last_name = df['last_name']
email = df['email']
df[['cell_phone','home_phone']] = df.cell_and_home_phones.str.split(";",expand=True,)
cell_phone = df['cell_phone']
home_phone = df['home_phone']
consumer_list = [first_name, last_name, email, cell_phone, home_phone]
import psycopg2
def bulkInsert(records):
try:
connection = psycopg2.connect(user="postgres",
password="123",
host="localhost",
port="5432",
database="Pharmacy")
cursor = connection.cursor()
sql_insert_query = """ INSERT INTO consumer (consumer_list)
VALUES (%s,%s,%s,%s,%s) """
# executemany() to insert multiple rows
result = cursor.executemany(sql_insert_query, records)
connection.commit()
print(cursor.rowcount, "Record inserted successfully into consumer table")
except (Exception, psycopg2.Error) as error:
print("Failed inserting record into consumer table {}".format(error))
finally:
# closing database connection.
if connection:
cursor.close()
connection.close()
print("PostgreSQL connection is closed")
records = consumer_list
bulkInsert(records)```
Error Message I get
"Failed inserting record into consumer table not all arguments converted during string formatting
PostgreSQL connection is closed"
#Javier you have to convert all arguments to columns.... as #mechanical_meat said:
""" INSERT INTO consumer (column1, column2, column3, column4, column5)
VALUES (%s, %s, %s, %s, %s) """
But I suppose that you found that in a year.

Not all parameters were used in the SQL statement when using python and mysql

hi I am doing the python mysql at this project, I initial the database and try to create the table record, but it seems cannot load data to the table, can anyone here can help me out with this
import mysql.connector
mydb = mysql.connector.connect( host="localhost",user="root",password="asd619248636",database="mydatabase")
mycursor = mydb.cursor()
mycursor.excute=("CREATE TABLE record (temperature FLOAT(20) , humidity FLOAT(20))")
sql = "INSERT INTO record (temperature,humidity) VALUES (%d, %d)"
val = (2.3,4.5)
mycursor.execute(sql,val)
mydb.commit()
print(mycursor.rowcount, "record inserted.")
and the error shows "Not all parameters were used in the SQL statement")
mysql.connector.errors.ProgrammingError: Not all parameters were used in the SQL statement
Changing the following should fix your problem:
sql = "INSERT INTO record (temperature,humidity) VALUES (%s, %s)"
val = ("2.3","4.5") # You can also use (2.3, 4.5)
mycursor.execute(sql,val)
The database API takes strings as arguments, and later converts them to the appropriate datatype. Your code is throwing an error because it isn't expecting %d or %f (int or float) datatypes.
For more info on this you can look here
simply change insert method to
sql = "INSERT INTO record (temperature,humidity) VALUES (%s, %s)"
then it works fine
This works for me.
# Insert from dataframe to table in SQL Server
import time
import pandas as pd
import pyodbc
# create timer
start_time = time.time()
from sqlalchemy import create_engine
df = pd.read_csv("C:\\your_path_here\\CSV1.csv")
conn_str = (
r'DRIVER={SQL Server Native Client 11.0};'
r'SERVER=Excel-Your_Server_Name;'
r'DATABASE=NORTHWND;'
r'Trusted_Connection=yes;'
)
cnxn = pyodbc.connect(conn_str)
cursor = cnxn.cursor()
for index,row in df.iterrows():
cursor.execute('INSERT INTO dbo.Table_1([Name],[Address],[Age],[Work]) values (?,?,?,?)',
row['Name'],
row['Address'],
row['Age'],
row['Work'])
cnxn.commit()
cursor.close()
cnxn.close()

Categories

Resources