How to use variables in PostgreSQL in Python - python

My Sounds table has 7 columns: Start_Time, End_Time, Salience_Label, Class_label, Bitrate, Bitdepth, Samplerate.
I want to insert some values into this table with the command
cursor.execute("INSERT INTO Sounds VALUES (%s, %s, %s, %s, %s, %s, %s)",(start_time, end_time, salience_label, class_label, samplerate, bitrate, bitdepth))
try:
conn = psycopg2.connect(conn_string)
cursor = conn.cursor()
.... doing staff for getting values for my variables ...
cursor.execute("INSERT INTO Sounds VALUES (%s, %s, %s, %s, %s, %s, %s)",(start_time, end_time, salience_label, class_label, samplerate, bitrate, bitdepth))
print "Connected!\n"
except:
print "I am unable to connect to the database"
cursor.close()
conn.close()
print('Close conection')

While testing do not catch exceptions. Make the parameters a single tuple as Psycopg will adapt it to a record. Use mogrify to check what is being sent to the server:
conn = psycopg2.connect(conn_string)
cursor = conn.cursor()
insert = "insert into Sounds values %s"
parameters = (
start_time, end_time, salience_label,
class_label, samplerate, bitrate, bitdepth
)
print cursor.mogrify(insert, (parameters,))
cursor.execute(insert, (parameters,))
conn.commit()
cursor.close()
conn.close()
BTW, the good practice is to name the columns which will receive the data like in:
insert into t (col_a, col_b) values (1, 'a')
That will avoid some problems.

Related

TypeError: can only concatenate str (not "numpy.float64") to str Python/MYSQL

I'm writing a stock tracker app for myself with a python backend, i needed to add an update statement in and when i do the code no longer functions - I get the error, "TypeError: can only concatenate str (not "numpy.float64") to str."
The incert statements work fine and if i remove this it all works again. Any ideas where i am going wrong with this?
mycursor = mydb.cursor()
mycursor.execute("UPDATE stock_mains.user_stock SET user_stock1_price = '" + stock_live_price + "' WHERE user_id = 'da_mike' AND user_stock1 is NOT NULL")
mydb.commit()
if market_status == "REGULAR":
mycursor = mydb.cursor()
sql = "INSERT INTO stock_live (Stock_symbol, Stock_live_price, stock_datetime, stock_volume, stock_marketcap, stock_dayrange, stock_open, stock_previousclose) VALUES (%s, %s, %s, %s, %s, %s, %s, %s)"
val = (arg, stock_live_price, timestamp, volume, marketcap, dayrange, openprice, previousclose)
mycursor.execute(sql, val)
mydb.commit()
stock_live_int = float(stock_live_price)
print(round(stock_live_int, 2))
elif market_status == "PRE":
stock_premarket_price = si.get_premarket_price(arg)
mycursor = mydb.cursor()
sql = "INSERT INTO stock_live (Stock_symbol, stock_premarket_price, stock_datetime, stock_volume, stock_marketcap, stock_dayrange, stock_open, stock_previousclose) VALUES (%s, %s, %s, %s, %s, %s, %s, %s)"
val = (arg, stock_premarket_price, timestamp, volume, marketcap, dayrange, openprice, previousclose)
mycursor.execute(sql, val)
mydb.commit()
print(stock_premarket_price)
print(mycursor.rowcount, "record inserted.")
elif market_status == "POST":
stock_postmarket_price = si.get_postmarket_price(arg)
mycursor = mydb.cursor()
sql = "INSERT INTO stock_live (Stock_symbol, stock_aftermarket_price, stock_datetime, stock_volume, stock_marketcap, stock_dayrange, stock_open, stock_previousclose) VALUES (%s, %s, %s, %s, %s, %s, %s, %s)"
val = (arg, stock_postmarket_price, timestamp, volume, marketcap, dayrange, openprice, previousclose)
mycursor.execute(sql, val)
mydb.commit()
print(stock_postmarket_price)
print(mycursor.rowcount, "record inserted.")
elif market_status == "CLOSED":
mycursor = mydb.cursor()
mycursor.execute("UPDATE stock_mains.user_stock SET user_stock1_price = '" + stock_live_price + "' WHERE user_id = 'da_mike' AND user_stock1 is NOT NULL")
mydb.commit()
print("Market is Closed")
stock_live_int = float(stock_live_price)
print(round(stock_live_int, 2))
time.sleep(0.1)
First of all, you should use prepared statements to avoid SQL injections. You can change your query to convert the float value to str:
mycursor = mydb.cursor()
query = "UPDATE stock_mains.user_stock SET user_stock1_price = %s WHERE user_id = 'da_mike' AND user_stock1 is NOT NULL"
mycursor.execute(query, (str(stock_live_price),))
mydb.commit()

importing values of a python dictionary as data to an existing mysql table

I have problem with storing values of a python dictionary as data to an existing mysql table
I tried to use the code below but it's not working.
db = mysql.connect(
host="localhost",
user="root",
passwd="123456",
database="tgdb"
)
cursor = db.cursor()
val = ', '.join("'" + str(x) + "'" for x in dict.values())
sql = "INSERT INTO tgdb.channel(user_name, image_url, name,
number_of_members, description, channel_url) VALUES (%s, %s, %s, %s, %s,
%s)"
cursor.execute(sql, val)
db.commit()
print(cursor.rowcount, "record inserted.")
"you have an error in your SQL syntax"
As writed #Torxed shouldn't translate dict in string, you can write just that:
cursor.execute(sql, list(dict.values())

Insert Data to SQL Server Table using pymssql

I am trying to write the data frame into the SQL Server Table. My code:
conn = pymssql.connect(host="Dev02", database="DEVDb")
cur = conn.cursor()
query = "INSERT INTO dbo.SCORE_TABLE VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)"
cur.executemany(query, df_sql)
conn.commit()
cur.close()
conn.close()
The dimension of the df_sql is (5860, 20) i.e. the number of columns in the data frame is same as the number of columns in the SQL Server Table. Still I am getting following error:
ValueError: more placeholders in sql than params available
UPDATED BELOW
As per one of the comments, I tried using turbodbc as below:
conn = turbodbc.connect(driver="{SQL Server}", server="Dev02", Database="DEVDb")
conn.use_async_io = True
cur = conn.cursor()
query = "INSERT INTO dbo.STG_CONTACTABILITY_SCORE VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)"
cur.executemany(query, df_sql.values)
cur.commit()
cur.close()
conn.close()
I am getting following error:
ValueError: The truth value of an array with more than one element is
ambiguous. Use a.any() or a.all()
I don't get it. What is wrong here. I see df_sql.values and I don't find anything wrong.
The first row of ndarray is as below:
[nan 'DUSTIN HOPKINS' 'SOUTHEAST MISSOURI STATE UNIVERSITY' 13.0
'5736512217' None None 'Monday' '8:00AM' '9:00AM' 'Summer' None None None
None '2017-12-22 10:39:30.626331' 'Completed' None '1-11KUFFZ'
'Central Time Zone']
I think you just need to specify each column name and don't forget the table must have the id field to charge the data frame index:
conn = pymssql.connect(host="Dev02", database="DEVDb")
cur = conn.cursor()
query = """INSERT INTO dbo.SCORE_TABLE(index, column1, column2, ..., column20)
VALUES (?, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s,
%s, %s, %s, %s, %s, %s)"""
cur.executemany(query, df_sql)
conn.commit()
cur.close()
conn.close()
Ok I have been using pandas and I exported the last data frame to csv like:
df.to_csv('new_file_name.csv', sep=',', encoding='utf-8')
Then I just used pyobdc and BULK INSERT Transact-SQL like:
import pyodbc
conn = pyodbc.connect(DRIVER='{SQL Server}', Server='server_name', Database='Database_name', trusted_connection='yes')
cur = conn.cursor()
cur.execute("""BULK INSERT table_name
FROM 'C:\\Users\\folders path\\new_file_name.csv'
WITH
(
CODEPAGE = 'ACP',
FIRSTROW = 2,
FIELDTERMINATOR = ',',
ROWTERMINATOR = '\n'
)""")
conn.commit()
cur.close()
conn.close()
It was a second to charge 15314 rows into SQL Server. I hope this gives you an idea.
If i understand correctly you want to use DataFrame.to_sql() method:
df_sql.to_sql('dbo.SCORE_TABLE', conn, index=False, if_exists='append')
Possibly executemany treats each row in the ndarray from your df.values call as one item since there are no comma separators between values. Hence, the placeholders outnumber actual binded values and you receive the mismatch error.
Consider converting array to a tuple of tuples (or lists of lists/tuple of lists/list of tuples) and then pass that object into executemany:
query = "INTO dbo.SCORE_TABLE VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)"
sql_data = tuple(map(tuple, df.values))
cur.executemany(query, sql_data)
cur.commit()
This works for me-
insert_query = """INSERT INTO dbo.temptable(CHECK_TIME, DEVICE, METRIC, VALUE, TOWER, LOCATION, ANOMALY, ANOMALY_SCORE, ANOMALY_SEVERITY)
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s)"""
write_data = tuple(map(tuple, data_frame.values))
cursor.executemany(insert_query, write_data)
con.commit()
cursor.close()
con.close()

mysql.connector in python / cursor in for-loop only working for first row of table

I have a table and I want to translate columns 'topic' and 'review' of a row and store the entire table with their translations into a new table. It seems that the for-loop doesn't iterate over all rows of the input table. Only the first row is stored into the new table. Why?
database = mysql.connector.connect(user='root', password='root', host='localhost', database='test')
DBcursor = database.cursor(buffered=True)
query = ("SELECT * FROM test_de")
DBcursor.execute(query)
for (id, user_name, date, country, version, score, topic, review, url) in DBcursor:
topic_trans = translate(topic, 'en')
review_trans = translate(review, 'en')
add_translation = ("INSERT INTO test_de_en(id, user_name, date, country, version, score, topic, review, url)"
"VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s)")
translation_data = (id, user_name, date, country, version, score, topic_trans, review_trans, url)
DBcursor.execute(add_translation, translation_data)
database.commit()
DBcursor.close()
database.close()

Postgresql insert data error when using python

I am trying to insert data to the table that was created earlier using python script. Here is the code I am trying to execute. I want to insert data into table with date as well.
date_today = dt.date.today()
conn = psycopg2.connect(host = serverip, port = port, database = database, user = uid, password = pwd)
cursor = conn.cursor()
cursor.execute("INSERT INTO My_TABLE (Date, Class, Total_students, failed_students, Percent_passed_students) VALUES (date_today, 'Class Name', int1, int2, int3)")
print "Data Inserted successfully"
conn.commit()
conn.close()
Here is the error I see from my job. what am i missing here?
psycopg2.ProgrammingError: column "date_today" does not exist
I created the table using different job with the following query:
cursor.execute("""CREATE TABLE MY_TABL(Date date, Lob varchar(30), Total_Students int, failed_students int, Percent_passed_students int)""")
And the table is created with above five columns.
This line:
cursor.execute("INSERT INTO My_TABLE (Date, Class, Total_students, failed_students, Percent_passed_students) VALUES (date_today, 'Class Name', int1, int2, int3)")
Is the incorrect way to dynamically insert values into a database.
Here's a functional and correct example:
cursor.execute("INSERT INTO table VALUES (%s, %s, %s)", (var1, var2, var3))
And applying it in your case...
cursor.execute("INSERT INTO My_TABLE VALUES (%s, %s, %s, %s, %s)", (date_today, 'Class Name', int1, int2, int3))

Categories

Resources