import MySQLdb
def Network():
dict = {'CISCO' : 'NEXUS', 'JUNIPER' : 'JUNO', 'Alcatel' : 'Alc' }
#len(dict)
return (dict)
def db_store(variable):
con = MySQLdb.connect("localhost","root","root","fs_company" )
cur = con.cursor()
for key,value in variable.items():
cur.execute('''INSERT into google (name, company) values (%s, %s)''', (key, value))
con.commit()
cur.execute("""SELECT * FROM google""")
variable = cur.fetchall()
#print variable
variable = Network()
db_store(variable)
I have above code to store the data to mysql database, I want to retrieve the data from database in dictionary format. need help for the same
You are missing only one line. You can convert variable into dict like this:
cur.execute("""SELECT * FROM google""")
out = cur.fetchall()
variable = {key:val for key,val in out}
print(variable)
Related
How to insert data from code below?
I have a code below
latitude1 = -6.208470935786019
longitude1 = 106.81796891087399
new_data = [[latitude1, longitude1]]
preds = model.predict(new_data)
preds
arr = [latitude1,longitude1]
arrcon = np.concatenate((arr,preds))
print(arrcon) #[-6.208470935786019 106.81796891087399 'Not Categorized']
listarcon= arrcon.tolist()
print(listarcon) #[-6.208470935786019, 106.81796891087399, 'Not Categorized']
#make the list into multi list
singlearcon = np.array(listarcon).reshape(1,3)
print(singlearcon) #[['-6.208470935786019' '106.81796891087399' 'Not Categorized']]
This is insert into database code
mycursor = conn.cursor()
sql = "INSERT INTO traveldata (Latitude,Longitude,Wisata) VALUES (%s, %s, %s)"
val = (listarcon[0],listarcon[1],listarcon[2])
mycursor.execute(sql, val)
How to insert it to database? the data didn't seem to get to the database.
After executing a transaction mycursor.execute(sql, val), we should commit the change mycursor.commit()
Reference for the commit method https://dev.mysql.com/doc/connector-python/en/connector-python-api-mysqlconnection-commit.html
An example of insert code https://dev.mysql.com/doc/connector-python/en/connector-python-example-cursor-transaction.html
I am trying to write data in a dictionary back into a SQL Server database table with pymssql.
But I am getting this error:
ValueError: more placeholders in sql than params available
Here is my code:
cursor = conn.cursor()
for key in dictW:
x = dictW[key]
sql = 'UPDATE tablename SET col = %s WHERE %s = #url '
cursor.executemany(sql, (key, x))
conn.commit()
conn.close()
What am I doing wrong here?
You are attempting to execute your queries one by one but are using executemany(). You should consider using a simple execute() instead:
cursor = conn.cursor()
for key in dictW:
x = dictW[key]
sql = 'UPDATE tablename SET col = %s WHERE %s = #url '
cursor.execute(sql, (key, x))
conn.commit()
conn.close()
If you want to use executemany(), you should make a list of tuples like this:
cursor = conn.cursor()
params = [(k, v) for k, v in dictW.items()]
sql = 'UPDATE tablename SET col = %s WHERE %s = #url '
cursor.executemany(sql, params)
conn.commit()
conn.close()
I obtaines a dictionary 'p' from the following code,but cannot able to insert into the mysql database.please help me to insert the datas into database.
dictionary is :[('Casssandraw', 'Cooking'), ('Archanea', 'Playing'), ('Adarshan', 'Programming'), ('Leelal', 'Baking')]
should be stored to Names and Hobby fields.
Name Hobby
Cassandraw Cooking
Archanea Playing
... ...
Program:
import MySQLdb
import re
db = MySQLdb.connect(host="localhost", # your host, usually localhost
user="root", # your username
passwd="mysql", # your password
db="sakila") # n
with open('qwer2.txt','r') as file, db as cursor:
f = open('qwer2.txt', 'r')
lines = f.readlines()
for x in lines:
p=re.findall(r'(?:name is|me)\s+(\w+).*?(?:interest|hobby)\s+is\s+(\w+)',x, re.I)
print p
cursor.execute(
'''INSERT INTO Details (Names, Hobby)
VALUES (%s, %s)''',
(name, hobby))#<-donot know what to provide
db.commit()
It looks like you have a list of tuples containing name/hobby not a dict:
You can unpack the two and insert:
for name, hobby in p: # I am presuming p is the list you posted in your question
cursor.execute(
'''INSERT INTO Details (Names, Hobby)
VALUES (%s, %s)''',
(name, hobby))#<-donot know what to provide
for name,hobby in p:
print name,hobby
Casssandraw Cooking
Archanea Playing
Adarshan Programming
Leelal Baking
I am trying to store the the following dictionary into mysql DB by converting the dictionary into a string and then trying to insert, but I am getting following error. How can this be solved, or is there any other way to store a dictionary into mysql DB?
dic = {'office': {'component_office': ['Word2010SP0', 'PowerPoint2010SP0']}}
d = str(dic)
# Sql query
sql = "INSERT INTO ep_soft(ip_address, soft_data) VALUES ('%s', '%s')" % ("192.xxx.xx.xx", d )
soft_data is a VARCHAR(500)
Error:
execution exception (1064, "You have an error in your SQL syntax; check the manual that corresponds to your MySQL server version for the right syntax to
use near 'office': {'component_office': ['Word2010SP0', 'PowerPoint2010SP0' at line 1")
Any suggestions or help please?
First of all, don't ever construct raw SQL queries like that. Never ever. This is what parametrized queries are for. You've asking for an SQL injection attack.
If you want to store arbitrary data, as for example Python dictionaries, you should serialize that data. JSON would be good choice for the format.
Overall your code should look like this:
import MySQLdb
import json
db = MySQLdb.connect(...)
cursor = db.cursor()
dic = {'office': {'component_office': ['Word2010SP0', 'PowerPoint2010SP0']}}
sql = "INSERT INTO ep_soft(ip_address, soft_data) VALUES (%s, %s)"
cursor.execute(sql, ("192.xxx.xx.xx", json.dumps(dic)))
cursor.commit()
Change your code as below:
dic = {'office': {'component_office': ['Word2010SP0', 'PowerPoint2010SP0']}}
d = str(dic)
# Sql query
sql = """INSERT INTO ep_soft(ip_address, soft_data) VALUES (%r, %r)""" % ("192.xxx.xx.xx", d )
Try this:
dic = { 'office': {'component_office': ['Word2010SP0', 'PowerPoint2010SP0'] } }
"INSERT INTO `db`.`table`(`ip_address`, `soft_data`) VALUES (`{}`, `{}`)".format("192.xxx.xx.xx", str(dic))
Change db and table to the values you need.
It is a good idea to sanitize your inputs, and '.format' is useful when needing to use the same variable multiple times within a query. (Not that you to for this example)
dic = {'office': {'component_office': ['Word2010SP0', 'PowerPoint2010SP0']}}
ip = '192.xxx.xx.xx'
with conn.cursor() as cur:
cur.execute("INSERT INTO `ep_soft`(`ip_address`, `soft_data`) VALUES ({0}, '{1}')".format(cur.escape(ip),json.dumps(event)))
conn.commit()
If you do not use cur.escape(variable), you will need to enclose the placeholder {} in quotes.
This answer has some pseudo code regarding the connection object and the flavor of mysql is memsql, but other than that it should be straightforward to follow.
import json
#... do something
a_big_dict = getAHugeDict() #build a huge python dict
conn = getMeAConnection(...)
serialized_dict = json.dumps(a_big_dict) #serialize dict to string
#Something like this to hold the serialization...
qry_create = """
CREATE TABLE TABLE_OF_BIG_DICTS (
ROWID BIGINT NOT NULL AUTO_INCREMENT,
SERIALIZED_DICT BLOB NOT NULL,
UPLOAD_DT TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP,
KEY (`ROWID`) USING CLUSTERED COLUMNSTORE
);
"""
conn.execute(qry_create)
#Something like this to hold em'
qry_insert = """
INSERT INTO TABLE_OF_BIG_DICTS (SERIALIZED_DICT)
SELECT '{SERIALIZED_DICT}' as SERIALIZED_DICT;
"""
#Send it to db
conn.execute(qry_insert.format(SERIALIZED_DICT=serialized_dict))
#grab the latest
qry_read = """
SELECT a.SERIALIZED_DICT
from TABLE_OF_BIG_DICTS a
JOIN
(
SELECT MAX(UPLOAD_DT) AS MAX_UPLOAD_DT
FROM TABLE_OF_BIG_DICTS
) b
ON a.UPLOAD_DT = b.MAX_UPLOAD_DT
LIMIT 1
"""
#something like this to read the latest dict...
df_dict = conn.sql_to_dataframe(qry_read)
dict_str = df_dict.iloc[df_dict.index.min()][0]
#dicts never die they just get rebuilt
dict_better = json.loads(dict_str)
I am trying to get a fast i.e. fast and not a lot of code, way to get csv data into postgres data base. I am reading into python using csvDictreader which works fine. Then I need to generate code somehow that takes the dicts and puts it into a table. I want to do this automaticaly as my tables often have hundreds of variables. (I don't want to read directly to Postgres because in many cases I must transform the data and python is good for that)
This is some of what I have got:
import psycopg2
import sys
import itertools
import sys, csv
import psycopg2.extras
import psycopg2.extensions
csvReader=csv.DictReader(open( '/home/matthew/Downloads/us_gis_data/statesp020.csv', "rb"), delimiter = ',')
#close.cursor()
x = 0
ConnectionString = "host='localhost' dbname='mydb' user='postgres' password='######"
try:
connection = psycopg2.extras.DictConnection(ConnectionString)
print "connecting"
except:
print "did not work"
# Create a test table with some data
dict_cur = connection.cursor()
#dict_cur.execute("CREATE TABLE test (id serial PRIMARY KEY, num integer, data varchar);")
for i in range(1,50):
x = x+1
print x
dict_cur.execute("INSERT INTO test (num, data) VALUES(%s, %s)",(x, 3.6))#"abc'def"))
### how to I create the table and insert value using the dictreader?
dict_cur.execute("SELECT * FROM test")
for k in range(0,x+1):
rec = dict_cur.fetchone()
print rec['num'], rec['data']
Say you have a list of field names (presumably you can get this from the header of your csv file):
fieldnames = ['Name', 'Address', 'City', 'State']
Assuming they're all VARCHARs, you can create the table "TableName":
sql_table = 'CREATE TABLE TableName (%s)' % ','.join('%s VARCHAR(50)' % name for name in fieldnames)
cursor.execute(sql_table)
You can insert the rows from a dictionary "dict":
sql_insert = ('INSERT INTO TableName (%s) VALUES (%s)' %
(','.join('%s' % name for name in fieldnames),
','.join('%%(%s)s' % name for name in fieldnames)))
cursor.execute(sql_insert, dict)
Or do it in one go, given a list dictionaries:
dictlist = [dict1, dict2, ...]
cursor.executemany(sql_insert, dictlist)
You can adapt this as necessary based on the type of your fields and the use of DictReader.
I am a novice but this worked for me. I used PG Admin to create the 'testCSV' table.
import psycopg2 as dbapi
con = dbapi.connect(database="testpg", user="postgres", password="secret")
cur = con.cursor()
import csv
csvObject = csv.reader(open(r'C:\testcsv.csv', 'r'), dialect = 'excel', delimiter = ',')
passData = "INSERT INTO testCSV (param1, param2, param3, param4, param5) VALUES (%s,%s,%s,%s,%s);"
for row in csvObject:
csvLine = row
cur.execute(passData, csvLine)
con.commit()