I am new to coding and databases, I can not get the query to work if I write it long hand but I have a lot to carry out and want it in a function but cannot get it to work, it returns a parameters error
import mysql.connector
def connection_check_1(query, value):
mydb = mysql.connector.connect(
host="******",
user="*****",
passwd="*****",
database="****"
)
mycursor = mydb.cursor()
mycursor.execute(query, (value))
myresult = mycursor.fetchall()
mydb.close()
return myresult
value = "sheep"
query = 'select inlicence from licence where animal = %s'
myresult = connection_check_1(query, value)
print(myresult)
Here is the SQL table I have
create table licence
(
animal varchar (20) primary key,
inlicence int (1)
);
This is the error I get
Traceback (most recent call last):
File "*******************", line 20, in
myresult = connection_check_1(query, value)
File "********************", line 13, in connection_check_1
mycursor.execute(query, (value))
File "********************************************88", line 246, in execute
prepared = self._cnx.prepare_for_mysql(params)
File "/home/kev/PycharmProjects/test bed/venv/lib/python3.5/site-packages/mysql/connector/connection_cext.py", line 535, in prepare_for_mysql
raise ValueError("Could not process parameters")
ValueError: Could not process parameters
I have tried changing the way the query is written, changing it to fetchall().
Wrapping a value with () doesn't turn it in to a tuple. You probably meant to add a comma there:
mycursor.execute(query, (value,))
# Creates a one-element tuple-^
Related
I'm trying to loop through an array and insert each element into a table. As far as I can see my syntax is correct and I took this code straight from Microsoft Azure's documentation.
try:
conn = mysql.connector.connect(**config)
print("Connection established")
except mysql.connector.Error as err:
if err.errno == errorcode.ER_ACCESS_DENIED_ERROR:
print("Something is wrong with the user name or password")
elif err.errno == errorcode.ER_BAD_DB_ERROR:
print("Database does not exist")
else:
print(err)
else:
cursor = conn.cursor()
data = ['1','2','3','4','5']
for x in data:
cursor.execute("INSERT INTO test (serial) VALUES (%s)",(x))
print("Inserted",cursor.rowcount,"row(s) of data.")
conn.commit()
cursor.close()
conn.close()
print("Done.")
When I run this is gets to cursor.execute(...) and then fails. Here is the stack trace.
Traceback (most recent call last):
File "test.py", line 29, in
cursor.execute("INSERT INTO test (serial) VALUES (%s)",("test"))
File "C:\Users\AlexJ\AppData\Local\Programs\Python\Python37\lib\site-packages\mysql\connector\cursor_cext.py", line 248, in execute
prepared = self._cnx.prepare_for_mysql(params)
File "C:\Users\AlexJ\AppData\Local\Programs\Python\Python37\lib\site-packages\mysql\connector\connection_cext.py", line 538, in prepare_for_mysql
raise ValueError("Could not process parameters")
ValueError: Could not process parameters
Try this:
for x in data:
value = "test"
query = "INSERT INTO test (serial) VALUES (%s)"
cursor.execute(query,(value,))
print("Inserted",cursor.rowcount,"row(s) of data.")
Since you are using mysql module, cursor.execute requires a sql query and a tuple as parameters
Nice answer from #lucas, but maybe this help other, cz i think more cleaner
sql = "INSERT INTO your_db (your_table) VALUES (%s)"
val = [("data could be array")]
cursor = cnx.cursor()
cursor.execute(sql, val)
print("Inserted",cursor.rowcount,"row(s) of data.")
cnx.commit()
cnx.close()
Cz this is useful for my purpose, to input multiple data.
I'm facing same issue but instead of array, I'm looping through a set and insert each item into mysql db and got this error mysql.connector.errors.ProgrammingError: Could not process parameters: str(Data_Tokens), it must be of type list, tuple or dict.
The uniqueTokenSet includes string data type, but as error shows that it must be list, tuple or dict. By converting item to list of tuple [(item)] work for me.
uniqueTokenSet = set()
for item in uniqueTokenSet:
tokenSql = "insert into tokens(token) values (%s)"
data = [(item)]
mycursor.execute(tokenSql, data)
print('data inserted')
mydb.commit()
I am getting keyerror in one while printing one of the json data fetched from API using python.
Error:
Except nagios_service, I am able to print other data
Traceback (most recent call last):
File "<ipython-input-55-3a1eadbbe594>", line 1, in <module>
runfile('Y:/_Temp/MEIPE/python/20190104_Script_Jason_APIv3.py', wdir='Y:/_Temp/MEIPE/python')
File "C:\Users\MEIPE\AppData\Local\Continuum\anaconda2\lib\site-packages\spyder_kernels\customize\spydercustomize.py", line 704, in runfile
execfile(filename, namespace)
File "C:\Users\MEIPE\AppData\Local\Continuum\anaconda2\lib\site-packages\spyder_kernels\customize\spydercustomize.py", line 93, in execfile
exec(compile(scripttext, filename, 'exec'), glob, loc)
File "Y:/_Temp/MEIPE/python/20190104_Script_Jason_APIv3.py", line 68, in <module>
print data[i]["_source"]["nagios_service"]
KeyError: 'nagios_service'
My code:
url1 = "http://nagiosdatagateway.vestas.net/esq/ITE1452552/logstash-
2018.12.16/2/desc"
response = urllib.urlopen(url1)
data = json.loads(response.read())
#define db connection
cnxn = pyodbc.connect("Driver={SQL Server Native Client 11.0};"
"Server=DKCDCVDCP42\DPA;"
"Database=VPDC;"
"Trusted_Connection=yes;")
cursor = cnxn.cursor()
sql="SELECT count(*) as count_of_rows FROM [VPDC].[pa].
[ROC_Nagios_Reporting_RawData]"
cursor.execute(sql)
for row in cursor.fetchall():
k = row.count_of_rows
i = 0
j = len(data)#find length of data set
#print j
for i in range(0,j): #loop to insert date into SQL Server
print data[i]["_source"]["nagios_service"]
print data[i]["_source"]["nagios_host"]
print data[i]["_source"]["nagios_author"]
print data[i]["_source"]["nagios_severity_label"]
print data[i]["_source"]["nagios_external_command"]
print data[i]["_source"]["#timestamp"]
cnxn.commit() #commit transaction
cursor.close()
cnxn.close()
I need help in fixing this keyerror on nagios_service. And should print all data.
We might be able to provide a better answer if you showed us the data or explained what the purpose of this was, but for now if you want to run this code without getting exceptions, you need to allow for the possibility that not all the items contain this key. One way would be to use get() calls instead of __getitem__ calls (using square brackets) - the dict.get(key, default) method returns default if key is not in the dict, or None if you don't provide default. So a basic solution would be:
for i in range(0,j): #loop to insert date into SQL Server
source_data = data[i]["_source"]
print source_data.get("nagios_service")
print source_data.get("nagios_host")
print source_data.get("nagios_author")
print source_data.get("nagios_severity_label")
print source_data.get("nagios_external_command")
print source_data.get("#timestamp")
A slightly better version that will tell you which key is missing:
for i in range(0,j): #loop to insert date into SQL Server
source_data = data[i]["_source"]
keys = ['_source', 'nagios_service', 'nagios_host', 'nagios_author',
'nagios_severity_label', 'nagios_external_command', '#timestamp']
for key in keys:
print source_data.get(key, "Missing key: '%s'" % key)
I tried using try: and except KeyError: in my code after searching SO a little more and was able to insert JSON data into SQL table with out any errors.
url1 = "http://nagiosdatagateway.vestas.net/esq/ITE1452552/logstash-" + ysday1
#print url1 #test
#url = "http://nagiosdatagateway.vestas.net/esq/ITE1452552/logstash-
2018.12.16/2/desc"
response = urllib.urlopen(url1)
data = json.loads(response.read())
#define db connection
cnxn = pyodbc.connect("Driver={SQL Server Native Client 11.0};"
"Server=DKCDCVDCP42\DPA;"
"Database=VPDC;"
"Trusted_Connection=yes;")
cursor = cnxn.cursor()
sql= "SELECT count(*) as count_of_rows FROM [VPDC].[pa].
[ROC_Nagios_Reporting_RawData]"
cursor.execute(sql)
for row in cursor.fetchall():
k = row.count_of_rows
i = 0
j = len(data)#find length of data set
#print j
#for each in data:
for i in range(0,j): #loop to insert date into SQL Server
try:
print data[i]["_source"]["nagios_author"]
print data[i]["_source"]["nagios_service"]
cursor.execute("insert into [VPDC].[pa].[ROC_Nagios_Reporting_RawData]
(Nagios_Author,Nagios_service,Nagios_host,Nagios_comment) values
(?,?,?,?)",(data[i]["_source"]["nagios_author"],data[i]["_source"]
["nagios_service"],data[i]["_source"]["nagios_host"],data[i]["_source"]
["nagios_comment"] ))
except KeyError:
pass
cnxn.commit() #commit transaction
cursor.close()
cnxn.close() #close connection
I've printed the output of my "payload" which I want to save to the MySQL database:
('B01MTOV8IP', '40462', '23.95', 'n/a', 'Usually ships in 24 hours',
'https://www.amazon.com/reviews/iframe?akid=AKIAIDCPAFSAQICDTFNQ&alinkCode=xm2&asin=B01MTOV8IP&atag=reakenture-20&exp=2017-08-25T17%3A27%3A37Z&v=2&sig=3zbBXVo4cQAJueFeVeo%252F%252FejvaUOmvuwAtfB4EfMyDiU%253D', 'CHG-GSTWL')
There seems to be something wrong with the way I am formatting it before I pass it to connect.
try:
selling_price = product.price_and_currency
selling_price_v = selling_price[0]#type
print selling_price_v
except Exception as e:
selling_price = "n/a"
conn = MySQLdb.connect(host="clabadmin.cfcudy1fdz8o.us-east-1.rds.amazonaws.com", user="", passwd="", db="")
payload =[
asin,
bsr,
str(selling_price_v),
str(listing_price_v),
# availability_type,
availability,
reviews,
sku]
print payload
# conn = sqlite3.connect('skubsr.db')
c = conn.cursor()
c.execute("""UPDATE webservice_bsr
SET
AISN = %s,
Best_Sellers_Rank = %s,
selling_price = %s,
price = %s,
# availability_type = %s,
availability = %s,
reviews = %s
WHERE ItemSKU = %s""", payload)
conn.commit()
I get the following error:
Traceback (most recent call last):
File "/home/trackstarz/clabReportScraper/bsrimport.py", line 907, in <module>
WHERE ItemSKU = %s""", payload)
File "/usr/local/lib/python2.7/dist-packages/MySQLdb/cursors.py", line 187, in execute
query = query % tuple([db.literal(item) for item in args])
TypeError: not enough arguments for format string
[Finished in 3.1s with exit code 1]
# is only used to indicate a comment when used inside Python code. In your query, it is inside the query string, and so is not parsed as a comment identifier, but as part of the query.
If you delete it, you are left with 8 %s and only 7 items inside payload.
I believe the problem is you have multiple %s string indicators in your execute string but are only giving it a single item (in this case a list) which it doesn't know it should break down into multiple values.
Try using some of the suggestions in this post to get your desired effect.
Using Python String Formatting with Lists
I'm trying to move data in the same row from one field to another.
This is my code, but it doesn't work with the update statement:
def update_ondemanddrama(Name):
with sqlite3.connect("sky_ondemand.db") as db:
cursor = db.cursor()
sql = "update TVshowsDrama set SecLastEp=LastEp where Name=?"
cursor.execute(sql, Name)
db.commit()
works
def insert_ondemanddrama(values):
with sqlite3.connect("sky_ondemand.db") as db:
cursor = db.cursor()
sql = "update TVshowsDrama set Name=?, LastEp=? where Name=?"
cursor.execute(sql,values)
db.commit()
def insert_ondemanddoc(values):
with sqlite3.connect("sky_ondemand.db") as db:
cursor = db.cursor()
sql = "update TVshowsDoc set Name=?, LastEp=? where Name=?"
cursor.execute(sql,values)
db.commit()
Type = int(input("Doc (1) or Drama (2)"))
Name = input("Enter name of Show")
LastEp = input("Enter Last episode aired (ex. s1e4)")
if Type == 1:
if __name__== "__main__":
show = (Name, LastEp, Name)
insert_ondemanddoc(show)
elif Type == 2:
if __name__== "__main__":
show = (Name, LastEp, Name)
update_ondemanddrama(Name)
insert_ondemanddrama(show)
elif Type >=3:
print ("Incorrect entry")
The error I get running this in python is:
Traceback (most recent call last): File "C:\Users\ict\Downloads\skyondemandv1.py", line 65, in <module>
update_ondemanddrama(Name) File "C:\Users\ict\Downloads\skyondemandv1.py", line 34, in
update_ondemanddrama cursor.execute(sql, Name) sqlite3.ProgrammingError: Incorrect number of bindings supplied.
The current statement uses 1, and there are 5 supplied.
cursor.execute expects an iterable. When you give it a string, execute perceives it as a 5 item iterable (5 characters).
Change the execute line to
cursor.execute(sql, (Name,))
New to SO and fairly new to coding, so doing my best to follow the appropriate protocols.
In my python script, I'm creating a new table and populating column names from a list, named 'dups'.
dups = ['Id', 'Name', 'Price', 'Rating']
I'm inputting this list as columns for the new table, called "SuperTable", via a for loop. See code below:
with new_db:
cur = new_db.cursor()
cur.execute("DROP TABLE IF EXISTS SuperTable")
for i in dups:
if i == dups[0]:
new_col = i.replace("'","")
cur.execute("CREATE TABLE SuperTable(%s)" % (new_col))
else:
cur.execute("ALTER TABLE SuperTable ADD COLUMN %s" % i)
I've looked around a lot and can't seem to identify what I'm doing wrong. This approach worked with Sqlite but I keep getting this same error for MySQLdb:
Traceback (most recent call last):
File "MySQL_SuperTable.py", line 125, in <module>
cur.execute("CREATE TABLE Super(%s)" % (new_col))
File "/opt/local/Library/Frameworks/Python.framework/Versions/2.6/lib/python2.6/site-packages/MySQLdb/cursors.py", line 174, in execute
self.errorhandler(self, exc, value)
File "/opt/local/Library/Frameworks/Python.framework/Versions/2.6/lib/python2.6/site-packages/MySQLdb/connections.py", line 36, in defaulterrorhandler
raise errorclass, errorvalue
_mysql_exceptions.ProgrammingError: (1064, "You have an error in your SQL syntax; check the manual that corresponds to your MySQL server version for the right syntax to use near ')' at line 1")
Thanks to eggyal! He pointed out that MySQL columns require a datatype. This is what the code looks like now (I created a list of tuples to input the datatypes + column names via a for loop):
with new_db:
cur = new_db.cursor()
cur.execute("DROP TABLE IF EXISTS SuperTable")
for i in col_namestypes:
if i == col_namestypes[0]:
cur.execute("CREATE TABLE SuperTable(%s %s)" % (i))
else:
cur.execute("ALTER TABLE SuperTable ADD COLUMN %s %s" % i)
for i in new_table:
count = len(i)
question_marks = []
while a < count:
question_marks.append('%s')
a += 1
quests = ','.join(question_marks)
cur.executemany("INSERT INTO SuperTable VALUES(%s)" % quests, new_table)