Could not process parameters tkinter and MySQL [duplicate] - python

I'm trying to loop through an array and insert each element into a table. As far as I can see my syntax is correct and I took this code straight from Microsoft Azure's documentation.
try:
conn = mysql.connector.connect(**config)
print("Connection established")
except mysql.connector.Error as err:
if err.errno == errorcode.ER_ACCESS_DENIED_ERROR:
print("Something is wrong with the user name or password")
elif err.errno == errorcode.ER_BAD_DB_ERROR:
print("Database does not exist")
else:
print(err)
else:
cursor = conn.cursor()
data = ['1','2','3','4','5']
for x in data:
cursor.execute("INSERT INTO test (serial) VALUES (%s)",(x))
print("Inserted",cursor.rowcount,"row(s) of data.")
conn.commit()
cursor.close()
conn.close()
print("Done.")
When I run this is gets to cursor.execute(...) and then fails. Here is the stack trace.
Traceback (most recent call last):
File "test.py", line 29, in
cursor.execute("INSERT INTO test (serial) VALUES (%s)",("test"))
File "C:\Users\AlexJ\AppData\Local\Programs\Python\Python37\lib\site-packages\mysql\connector\cursor_cext.py", line 248, in execute
prepared = self._cnx.prepare_for_mysql(params)
File "C:\Users\AlexJ\AppData\Local\Programs\Python\Python37\lib\site-packages\mysql\connector\connection_cext.py", line 538, in prepare_for_mysql
raise ValueError("Could not process parameters")
ValueError: Could not process parameters

Try this:
for x in data:
value = "test"
query = "INSERT INTO test (serial) VALUES (%s)"
cursor.execute(query,(value,))
print("Inserted",cursor.rowcount,"row(s) of data.")
Since you are using mysql module, cursor.execute requires a sql query and a tuple as parameters

Nice answer from #lucas, but maybe this help other, cz i think more cleaner
sql = "INSERT INTO your_db (your_table) VALUES (%s)"
val = [("data could be array")]
cursor = cnx.cursor()
cursor.execute(sql, val)
print("Inserted",cursor.rowcount,"row(s) of data.")
cnx.commit()
cnx.close()
Cz this is useful for my purpose, to input multiple data.

I'm facing same issue but instead of array, I'm looping through a set and insert each item into mysql db and got this error mysql.connector.errors.ProgrammingError: Could not process parameters: str(Data_Tokens), it must be of type list, tuple or dict.
The uniqueTokenSet includes string data type, but as error shows that it must be list, tuple or dict. By converting item to list of tuple [(item)] work for me.
uniqueTokenSet = set()
for item in uniqueTokenSet:
tokenSql = "insert into tokens(token) values (%s)"
data = [(item)]
mycursor.execute(tokenSql, data)
print('data inserted')
mydb.commit()

Related

i don't know how solvw it

error:
You have an error in your SQL syntax; check the manual that corresponds to your MySQL server version for the right syntax to use near '%s)' at line 1
upi = upi_entry.get()
mysqldb = mysql.connector.connect(
host="localhost",
user="root",
password="deol9646",
database="train_login",
)
mycursor = mysqldb.cursor()
try:
mycursor.execute(
"""create table if not exists upi_data(upi text)"""
)
sql = "INSERT INTO UPI_DATA (UPI) VALUES (%s)"
val = upi
mycursor.execute(sql, val)
mysqldb.commit()
lastid = mycursor.lastrowid
messagebox.showinfo("information", "upi inserted successfully...")
upi_entry.delete(0, END)
upi_entry.focus_set()
except Exception as e:
print(e)
mysqldb.rollback()
mysqldb.close()
The parameters need to be a tuple; you're passing in val as a single value, so the MySQL driver doesn't turn %s into anything and that ends up a syntax error.
Add a comma to make a parenthesized expression ((upi)) into a 1-tuple: (upi,)
sql = "INSERT INTO UPI_DATA (UPI) VALUES (%s)"
val = (upi,)

python MySQL insert big data

using python,I am looping through csv file to read data, then I am ding some modifications on the readied row and call a save function to insert the modified data into MySQL.
def save(Id, modifiedData,):
try:
mydb = mysql.connector.connect(host="localhost",user="use",password="pass",database="data")
sql = "INSERT INTO data (Id, modifiedData) VALUES (%s, %s)"
recordTuple = (Id, modifiedData)
mycursor = mydb.cursor()
mycursor.execute(sql,recordTuple)
mydb.commit()
print("Record inserted successfully into table")
except mysql.connector.Error as error:
print("Failed to insert into MySQL table {}".format(error))
def main():
for row in csv:
#modify row
#creat Id
save(Id, modifiedData,)
but I don't think this is good solution to do MYSQL connection and insert data with each iteration, it will be time and resources consuming , specially when I move to real server in production
how can I improve my solution?
Ideally, connections should be managed by connection pool, should be committed bulky. But amount of csv at most, need not to mind so much. Anyway, If you don't wanna bother it, I recommend using ORM like SQLAlchemy.
You only need to create the connection once, and that should be in function main, who then passes the connection to function save as follows:
def save(mydb, Id, modifiedData):
try:
sql = "INSERT INTO data (Id, modifiedData) VALUES (%s, %s)"
recordTuple = (Id, modifiedData)
mycursor = mydb.cursor()
mycursor.execute(sql,recordTuple)
mydb.commit()
print("Record inserted successfully into table")
except mysql.connector.Error as error:
print("Failed to insert into MySQL table {}".format(error))
def main():
try:
mydb = mysql.connector.connect(host="localhost",user="use",password="pass",database="data")
except mysql.connector.Error as error:
print("Failed to create connection: {}".format(error))
return
for row in csv:
#modify row
#creat Id
save(mydb, Id, modifiedData)
For perhaps even greater performance you can try executemany:
def save(mydb, modified_records):
try:
sql = "INSERT INTO data (Id, modifiedData) VALUES (%s, %s)"
mycursor = mydb.cursor()
mycursor.executemany(sql, modified_records)
mydb.commit()
print("Records inserted successfully into table")
except mysql.connector.Error as error:
print("Failed to insert into MySQL table {}".format(error))
def main():
try:
mydb = mysql.connector.connect(host="localhost",user="use",password="pass",database="data")
except mysql.connector.Error as error:
print("Failed to create connection: {}".format(error))
return
modified_records = []
for row in csv:
#modify row
#creat Id
modified_records.append([id, modifiedData])
save(mydb, modified_records)

Getting keyerror while fetching JSON data from API using python

I am getting keyerror in one while printing one of the json data fetched from API using python.
Error:
Except nagios_service, I am able to print other data
Traceback (most recent call last):
File "<ipython-input-55-3a1eadbbe594>", line 1, in <module>
runfile('Y:/_Temp/MEIPE/python/20190104_Script_Jason_APIv3.py', wdir='Y:/_Temp/MEIPE/python')
File "C:\Users\MEIPE\AppData\Local\Continuum\anaconda2\lib\site-packages\spyder_kernels\customize\spydercustomize.py", line 704, in runfile
execfile(filename, namespace)
File "C:\Users\MEIPE\AppData\Local\Continuum\anaconda2\lib\site-packages\spyder_kernels\customize\spydercustomize.py", line 93, in execfile
exec(compile(scripttext, filename, 'exec'), glob, loc)
File "Y:/_Temp/MEIPE/python/20190104_Script_Jason_APIv3.py", line 68, in <module>
print data[i]["_source"]["nagios_service"]
KeyError: 'nagios_service'
My code:
url1 = "http://nagiosdatagateway.vestas.net/esq/ITE1452552/logstash-
2018.12.16/2/desc"
response = urllib.urlopen(url1)
data = json.loads(response.read())
#define db connection
cnxn = pyodbc.connect("Driver={SQL Server Native Client 11.0};"
"Server=DKCDCVDCP42\DPA;"
"Database=VPDC;"
"Trusted_Connection=yes;")
cursor = cnxn.cursor()
sql="SELECT count(*) as count_of_rows FROM [VPDC].[pa].
[ROC_Nagios_Reporting_RawData]"
cursor.execute(sql)
for row in cursor.fetchall():
k = row.count_of_rows
i = 0
j = len(data)#find length of data set
#print j
for i in range(0,j): #loop to insert date into SQL Server
print data[i]["_source"]["nagios_service"]
print data[i]["_source"]["nagios_host"]
print data[i]["_source"]["nagios_author"]
print data[i]["_source"]["nagios_severity_label"]
print data[i]["_source"]["nagios_external_command"]
print data[i]["_source"]["#timestamp"]
cnxn.commit() #commit transaction
cursor.close()
cnxn.close()
I need help in fixing this keyerror on nagios_service. And should print all data.
We might be able to provide a better answer if you showed us the data or explained what the purpose of this was, but for now if you want to run this code without getting exceptions, you need to allow for the possibility that not all the items contain this key. One way would be to use get() calls instead of __getitem__ calls (using square brackets) - the dict.get(key, default) method returns default if key is not in the dict, or None if you don't provide default. So a basic solution would be:
for i in range(0,j): #loop to insert date into SQL Server
source_data = data[i]["_source"]
print source_data.get("nagios_service")
print source_data.get("nagios_host")
print source_data.get("nagios_author")
print source_data.get("nagios_severity_label")
print source_data.get("nagios_external_command")
print source_data.get("#timestamp")
A slightly better version that will tell you which key is missing:
for i in range(0,j): #loop to insert date into SQL Server
source_data = data[i]["_source"]
keys = ['_source', 'nagios_service', 'nagios_host', 'nagios_author',
'nagios_severity_label', 'nagios_external_command', '#timestamp']
for key in keys:
print source_data.get(key, "Missing key: '%s'" % key)
I tried using try: and except KeyError: in my code after searching SO a little more and was able to insert JSON data into SQL table with out any errors.
url1 = "http://nagiosdatagateway.vestas.net/esq/ITE1452552/logstash-" + ysday1
#print url1 #test
#url = "http://nagiosdatagateway.vestas.net/esq/ITE1452552/logstash-
2018.12.16/2/desc"
response = urllib.urlopen(url1)
data = json.loads(response.read())
#define db connection
cnxn = pyodbc.connect("Driver={SQL Server Native Client 11.0};"
"Server=DKCDCVDCP42\DPA;"
"Database=VPDC;"
"Trusted_Connection=yes;")
cursor = cnxn.cursor()
sql= "SELECT count(*) as count_of_rows FROM [VPDC].[pa].
[ROC_Nagios_Reporting_RawData]"
cursor.execute(sql)
for row in cursor.fetchall():
k = row.count_of_rows
i = 0
j = len(data)#find length of data set
#print j
#for each in data:
for i in range(0,j): #loop to insert date into SQL Server
try:
print data[i]["_source"]["nagios_author"]
print data[i]["_source"]["nagios_service"]
cursor.execute("insert into [VPDC].[pa].[ROC_Nagios_Reporting_RawData]
(Nagios_Author,Nagios_service,Nagios_host,Nagios_comment) values
(?,?,?,?)",(data[i]["_source"]["nagios_author"],data[i]["_source"]
["nagios_service"],data[i]["_source"]["nagios_host"],data[i]["_source"]
["nagios_comment"] ))
except KeyError:
pass
cnxn.commit() #commit transaction
cursor.close()
cnxn.close() #close connection

Why is this query not working from python to SQL?

I am new to coding and databases, I can not get the query to work if I write it long hand but I have a lot to carry out and want it in a function but cannot get it to work, it returns a parameters error
import mysql.connector
def connection_check_1(query, value):
mydb = mysql.connector.connect(
host="******",
user="*****",
passwd="*****",
database="****"
)
mycursor = mydb.cursor()
mycursor.execute(query, (value))
myresult = mycursor.fetchall()
mydb.close()
return myresult
value = "sheep"
query = 'select inlicence from licence where animal = %s'
myresult = connection_check_1(query, value)
print(myresult)
Here is the SQL table I have
create table licence
(
animal varchar (20) primary key,
inlicence int (1)
);
This is the error I get
Traceback (most recent call last):
File "*******************", line 20, in
myresult = connection_check_1(query, value)
File "********************", line 13, in connection_check_1
mycursor.execute(query, (value))
File "********************************************88", line 246, in execute
prepared = self._cnx.prepare_for_mysql(params)
File "/home/kev/PycharmProjects/test bed/venv/lib/python3.5/site-packages/mysql/connector/connection_cext.py", line 535, in prepare_for_mysql
raise ValueError("Could not process parameters")
ValueError: Could not process parameters
I have tried changing the way the query is written, changing it to fetchall().
Wrapping a value with () doesn't turn it in to a tuple. You probably meant to add a comma there:
mycursor.execute(query, (value,))
# Creates a one-element tuple-^

TypeError: not enough arguments for format string when using %s and a python dictionary

I've printed the output of my "payload" which I want to save to the MySQL database:
('B01MTOV8IP', '40462', '23.95', 'n/a', 'Usually ships in 24 hours',
'https://www.amazon.com/reviews/iframe?akid=AKIAIDCPAFSAQICDTFNQ&alinkCode=xm2&asin=B01MTOV8IP&atag=reakenture-20&exp=2017-08-25T17%3A27%3A37Z&v=2&sig=3zbBXVo4cQAJueFeVeo%252F%252FejvaUOmvuwAtfB4EfMyDiU%253D', 'CHG-GSTWL')
There seems to be something wrong with the way I am formatting it before I pass it to connect.
try:
selling_price = product.price_and_currency
selling_price_v = selling_price[0]#type
print selling_price_v
except Exception as e:
selling_price = "n/a"
conn = MySQLdb.connect(host="clabadmin.cfcudy1fdz8o.us-east-1.rds.amazonaws.com", user="", passwd="", db="")
payload =[
asin,
bsr,
str(selling_price_v),
str(listing_price_v),
# availability_type,
availability,
reviews,
sku]
print payload
# conn = sqlite3.connect('skubsr.db')
c = conn.cursor()
c.execute("""UPDATE webservice_bsr
SET
AISN = %s,
Best_Sellers_Rank = %s,
selling_price = %s,
price = %s,
# availability_type = %s,
availability = %s,
reviews = %s
WHERE ItemSKU = %s""", payload)
conn.commit()
I get the following error:
Traceback (most recent call last):
File "/home/trackstarz/clabReportScraper/bsrimport.py", line 907, in <module>
WHERE ItemSKU = %s""", payload)
File "/usr/local/lib/python2.7/dist-packages/MySQLdb/cursors.py", line 187, in execute
query = query % tuple([db.literal(item) for item in args])
TypeError: not enough arguments for format string
[Finished in 3.1s with exit code 1]
# is only used to indicate a comment when used inside Python code. In your query, it is inside the query string, and so is not parsed as a comment identifier, but as part of the query.
If you delete it, you are left with 8 %s and only 7 items inside payload.
I believe the problem is you have multiple %s string indicators in your execute string but are only giving it a single item (in this case a list) which it doesn't know it should break down into multiple values.
Try using some of the suggestions in this post to get your desired effect.
Using Python String Formatting with Lists

Categories

Resources