I was told that writting directly to a cursor is a serious SQL vunlerability and anyone could easily dump my DB... How can I securely do SQL stuff?
import psycopg2
import web
urls = (
"/", "Index",
"/questlist", "Questlist"
)
web.config.debug = True
app = web.application(urls, globals())
render = web.template.render("templates/", base="layout")
con = psycopg2.connect(
database = "postgres",
user = "postgres",
password = "balloons",
port = "55210502147432"
)
class Index(object):
def __init__(self):
pass
def GET(self):
return render.index()
class Questlist(object):
def __init__(self):
pass
def GET(self):
try:
c = con.cursor()
c.execute("SELECT quest_title, quest_difficulty, quest_post FROM quest_list")
questlist = c.fetchall()
return render.quest(Quests = questlist)
except psycopg2.InternalError as e:
con.rollback()
print e
return "Session error"
return "wtf did u do,? u really busted her"
def POST(self):
form = web.input(quest_title="", quest_difficulty="", quest_post="")
if len(form.quest_title) + len(form.quest_difficulty) + len(form.quest_post) > 50:
return "Too many characters submitted"
try:
c = con.cursor()
c.execute("INSERT INTO quest_list (quest_title, quest_difficulty, quest_post) \
VALUES (%s, %s, %s)", (form.quest_title, form.quest_difficulty, form.quest_post))
con.commit()
except psycopg2.InternalError as e:
con.rollback()
print e
except psycopg2.DataError as e:
con.rollback()
print e
return "invalid data, you turkey"
return render.index()
if __name__ == "__main__":
app.run()
Here's the SQL im worried about:
c.execute("INSERT INTO quest_list (quest_title, quest_difficulty, quest_post) \
VALUES (%s, %s, %s)", (form.quest_title, form.quest_difficulty, form.quest_post))
here's the site right now that im using this on:
http://rpg.jeffk.org/questlist
feel free to try and break it
c.execute("INSERT INTO quest_list (quest_title, quest_difficulty, quest_post) \
VALUES (%s, %s, %s)", (form.quest_title, form.quest_difficulty, form.quest_post))
this is fine ... you are using the format strings that are built into python SQL libraries to avoid injection issues
c.execute("INSERT INTO quest_list(quest_title, quest_difficulty, quest_post)\
VALUES (%s, %s, %s)"%(form.quest_title, form.quest_difficulty, form.quest_post))
would be a potential security flaw as you are just using standard string formatting instead of the SQL mechanisms
when using standard string formatting consider the following user input
form.quest_post = "1);SELECT * FROM USERS;//"
this would allow them to dump your whole user table as it would get passed as
c.execute("INSERT INTO quest_list(quest_title,quest_dificulty,quest_post)\
VALUES (something_benign,something_else,1);SELECT * FROM USERS;//)")
which hopefully you can recognize as being a problematic statement ... or they could change your admin password or whatever ...
Related
error:
You have an error in your SQL syntax; check the manual that corresponds to your MySQL server version for the right syntax to use near '%s)' at line 1
upi = upi_entry.get()
mysqldb = mysql.connector.connect(
host="localhost",
user="root",
password="deol9646",
database="train_login",
)
mycursor = mysqldb.cursor()
try:
mycursor.execute(
"""create table if not exists upi_data(upi text)"""
)
sql = "INSERT INTO UPI_DATA (UPI) VALUES (%s)"
val = upi
mycursor.execute(sql, val)
mysqldb.commit()
lastid = mycursor.lastrowid
messagebox.showinfo("information", "upi inserted successfully...")
upi_entry.delete(0, END)
upi_entry.focus_set()
except Exception as e:
print(e)
mysqldb.rollback()
mysqldb.close()
The parameters need to be a tuple; you're passing in val as a single value, so the MySQL driver doesn't turn %s into anything and that ends up a syntax error.
Add a comma to make a parenthesized expression ((upi)) into a 1-tuple: (upi,)
sql = "INSERT INTO UPI_DATA (UPI) VALUES (%s)"
val = (upi,)
I have several CSV files that I'm trying to upload to a PostgreSQL database.
My current file/function setup works perfectly fine for files without NULLs, but it's when I have null values that I hit an issue.
In the CSV, those nulls are currently empty cells. The function I'm using to upload them to the database looks like this:
insert_query_string = ("""
INSERT INTO sample_table (
primaryKey,
color,
date,
place,
numeric1,
numeric2,
numeric3)
VALUES (%s, %s, %s, %s, %s, %s, %s)
""")
def loadData(cur, conn, query):
"""
Loads data blahblahblah
"""
try:
data_csv = csv.reader(open(data_path + 'data.csv'))
header = next(data_csv)
for row in data_csv:
print(row)
cur.execute(query, row)
print(' - data loaded.')
conn.commit()
except FileNotFoundError:
print("CSV file not found.")
def main():
conn = None
try:
# connect
print("Connecting ... ")
conn = pg.connect(**nfl_params)
cur = conn.cursor()
# load
print("Loading data ... ")
loadData(cur, conn, insert_query_string)
except (Exception, pg.DatabaseError) as error:
print(error)
finally:
if conn is not None:
conn.close()
print("Database Connection closed.")
if __name__ == "__main__":
main()
Some of the numeric columns are null, and yet I can't seem to figure out how to get them into the database. As is, I'm getting invalid input syntax for type numeric: "" so I thought it was trying to read it as a string or something.
This stackoverflow question seems to be saying to replace the empty cells to 'null' or 'NULL' but that doesn't seem to work either.
My problem, as far as I can tell, is because csv's don't have anyway to store null values other than empty, which isn't working for me. I'm thinking for now that I'll replace the null values with -1 or something, but this doesn't seem to be the cleanest way to do it.
Thanks in advance!
using python,I am looping through csv file to read data, then I am ding some modifications on the readied row and call a save function to insert the modified data into MySQL.
def save(Id, modifiedData,):
try:
mydb = mysql.connector.connect(host="localhost",user="use",password="pass",database="data")
sql = "INSERT INTO data (Id, modifiedData) VALUES (%s, %s)"
recordTuple = (Id, modifiedData)
mycursor = mydb.cursor()
mycursor.execute(sql,recordTuple)
mydb.commit()
print("Record inserted successfully into table")
except mysql.connector.Error as error:
print("Failed to insert into MySQL table {}".format(error))
def main():
for row in csv:
#modify row
#creat Id
save(Id, modifiedData,)
but I don't think this is good solution to do MYSQL connection and insert data with each iteration, it will be time and resources consuming , specially when I move to real server in production
how can I improve my solution?
Ideally, connections should be managed by connection pool, should be committed bulky. But amount of csv at most, need not to mind so much. Anyway, If you don't wanna bother it, I recommend using ORM like SQLAlchemy.
You only need to create the connection once, and that should be in function main, who then passes the connection to function save as follows:
def save(mydb, Id, modifiedData):
try:
sql = "INSERT INTO data (Id, modifiedData) VALUES (%s, %s)"
recordTuple = (Id, modifiedData)
mycursor = mydb.cursor()
mycursor.execute(sql,recordTuple)
mydb.commit()
print("Record inserted successfully into table")
except mysql.connector.Error as error:
print("Failed to insert into MySQL table {}".format(error))
def main():
try:
mydb = mysql.connector.connect(host="localhost",user="use",password="pass",database="data")
except mysql.connector.Error as error:
print("Failed to create connection: {}".format(error))
return
for row in csv:
#modify row
#creat Id
save(mydb, Id, modifiedData)
For perhaps even greater performance you can try executemany:
def save(mydb, modified_records):
try:
sql = "INSERT INTO data (Id, modifiedData) VALUES (%s, %s)"
mycursor = mydb.cursor()
mycursor.executemany(sql, modified_records)
mydb.commit()
print("Records inserted successfully into table")
except mysql.connector.Error as error:
print("Failed to insert into MySQL table {}".format(error))
def main():
try:
mydb = mysql.connector.connect(host="localhost",user="use",password="pass",database="data")
except mysql.connector.Error as error:
print("Failed to create connection: {}".format(error))
return
modified_records = []
for row in csv:
#modify row
#creat Id
modified_records.append([id, modifiedData])
save(mydb, modified_records)
i'm trying to schedule a task every 5 seconds, here what i did:
conn = connect('mydatabase.db')
c = conn.cursor()
c.execute('CREATE TABLE IF NOT EXISTS RSSEntries (entry_id INTEGER PRIMARY KEY AUTOINCREMENT, title , url , date );')
def checkLink(linko):
c.execute("SELECT entry_id FROM RSSEntries WHERE url = ?", (linko,))
datas=c.fetchall()
if len(datas)==0:
return True
else:
return False
def storeData():
data = feedparser.parse("http://www...")
for i in range(len(data['entries'])):
if checkLink(data.entries[i].link) is True:
print "doesn't exist"
c.execute("insert into RSSEntries VALUES\
(NULL,'%s', '%s', '%s')" % (data.entries[i].title,data.entries[i].link, data.feed.updated))
else:
print "exist"
schedule.every(5).seconds.do(storeData)
conn.commit()
but the storeData method is not reachable..
if i run storeData() instead of schedule.every(5).seconds.do(storeData) the code work perfectly, what i'm doing wrong
any suggesting or other ways to do this task are welcome.
I think you are missing the scheduler loop at the end of your script:
while True:
schedule.run_pending()
time.sleep(1)
https://pypi.python.org/pypi/schedule
i have the following problem:
i want to insert the temperature of my RPI using SQLite3 and Python.
The python script that i want to use:
import subprocess
import os
import sqlite3 as lite
import datetime
import sys
import time
def get_temperature():
"Returns the temperature in degrees C"
try:
s = subprocess.check_output(["cat","/sys/class/thermal/thermal_zone0/temp"])
return s[:-1]
except:
return 0
try:
con = lite.connect('/www/auslastung.s3db')
cur = con.cursor()
temp = int(get_temperature())
zeit = time.strftime('%Y-%m-%d %H:%M:%S')
cur.execute("INSERT INTO 'temperatur' ('Wert', 'Zeit') VALUES (%s, %s)", (temp, zeit))
con.commit()
except lite.Error, e:
if con:
con.rollback()
print "Error %s" % e.args[0]
sys.exit(1)
finally:
if con:
con.close()
Every time i want to run this, i just get the error:
Error near "%": syntax error
What should i do to solve this?
Replace
cur.execute("INSERT INTO 'temperatur' ('Wert', 'Zeit') VALUES (%s, %s)", (temp, zeit))
with
cur.execute("INSERT INTO 'temperatur' ('Wert', 'Zeit') VALUES (?, ?)", (temp, zeit))
There is also a problem with your finally clause. It will fail with the error NameError: name 'con' is not defined if con is never assigned to in the first place (e.g., if the directory /www/ does not exist, so con = lite.connect('/www/auslastung.s3db') fails). You could do the following to avoid this issue:
con = None
try:
# ...
except lite.Error, e:
if con is not None:
con.rollback()
# ...
finally:
if con is not None:
con.close()
You can also replace:
cur.execute("INSERT INTO 'temperatur' ('Wert', 'Zeit') VALUES (%s, %s)", (temp, zeit))
with
cur.execute("INSERT INTO 'temperatur' ('Wert', 'Zeit') VALUES (%s, %s)" % (temp, zeit))
Either #nwk or my answer should work depending on preference.