Inserting an array of points into postgres from python - python

I have a table, region, defined
place_id | integer |
pts | point[] |
region_name | text |
region_id | integer | not null default nextval('region_region_id_seq'::regclass)
and would like to insert a row from python.
I have an array pgon
'{{-171.01155,-11.05869},{-171.01116,-11.06022},{-171.01138,-11.07814},{-171.01708,-11.08953},{-171.01963,-11.09463},{-171.03217,-11.10509},{-171.04886,-11.10929},{-171.07441,-11.11013},{-171.09834,-11.10787},{-171.10588,-11.10552},{-171.12063,-11.10091},{-171.13384,-11.09055},{-171.13385,-11.09055},{-171.13386,-11.09054},{-171.13387,-11.09053},{-171.13386,-11.09053},{-171.13384,-11.09053},{-171.13383,-11.09053},{-171.13529,-11.08719},{-171.13966,-11.07717},{-171.14112,-11.07383},{-171.14113,-11.07382},{-171.14114,-11.07382},{-171.14115,-11.07382},{-171.14191,-11.06076},{-171.14118,-11.04661},{-171.14116,-11.04622},{-171.13745,-11.03009},{-171.12577,-11.0156},{-171.1114,-11.00379},{-171.09488,-10.9978},{-171.08485,-10.9975},{-171.07463,-10.9972},{-171.05771,-10.99903},{-171.05163,-11.00014},{-171.03634,-11.00291},{-171.03358,-11.00409},{-171.0279,-11.00652},{-171.01928,-11.01225},{-171.01876,-11.01339},{-171.01407,-11.0237},{-171.01238,-11.04712}}'
place_id = 6, name = "American Samoa"
My python command line is:
cur.execute("insert into region (place_id,pts,region_name) values (%,%,%)",(place_id, pgon, name))
But I get
ValueError: "unsupported format character ',' (0x2c) at index 55"
I have done inserts before, but not with point[] in the target. Can someone tell me how I should be doing this? Thanks.

create table t (p point[]);
Using the adapter example from the documentation:
from psycopg2.extensions import adapt, register_adapter, AsIs
class Point(object):
def __init__(self, x, y):
self.x = x
self.y = y
def adapt_point(point):
x = adapt(point.x).getquoted()
y = adapt(point.y).getquoted()
return AsIs("'(%s, %s)'" % (x, y))
register_adapter(Point, adapt_point)
my_tuples_list = [(-171.01155,-11.05869),(-171.01116,-11.06022),(-171.01138,-11.07814),(-171.01708,-11.08953),(-171.01963,-11.09463),(-171.03217,-11.10509),(-171.04886,-11.10929),(-171.07441,-11.11013),(-171.09834,-11.10787),(-171.10588,-11.10552),(-171.12063,-11.10091),(-171.13384,-11.09055),(-171.13385,-11.09055),(-171.13386,-11.09054),(-171.13387,-11.09053),(-171.13386,-11.09053),(-171.13384,-11.09053),(-171.13383,-11.09053),(-171.13529,-11.08719),(-171.13966,-11.07717),(-171.14112,-11.07383),(-171.14113,-11.07382),(-171.14114,-11.07382),(-171.14115,-11.07382),(-171.14191,-11.06076),(-171.14118,-11.04661),(-171.14116,-11.04622),(-171.13745,-11.03009),(-171.12577,-11.0156),(-171.1114,-11.00379),(-171.09488,-10.9978),(-171.08485,-10.9975),(-171.07463,-10.9972),(-171.05771,-10.99903),(-171.05163,-11.00014),(-171.03634,-11.00291),(-171.03358,-11.00409),(-171.0279,-11.00652),(-171.01928,-11.01225),(-171.01876,-11.01339),(-171.01407,-11.0237),(-171.01238,-11.04712)]
my_point_list = [Point(p[0], p[1]) for p in my_tuples_list]
conn = psycopg2.connect(database='cpn')
cursor = conn.cursor()
insert = "insert into t (p) values (%s::point[])"
cursor.execute(insert, (my_point_list,))
conn.commit()

I got this running. Code is below. The point[] gets to PG. Now unfortunately, cur.fetchall() throws an exception after cur.execute("select * from test;")
#!/usr/bin/python
import psycopg2
from config import *
dbname="test"
try:
dbconn = psycopg2.connect(dbname=dbname, host=dbHost, port=5432, user=dbUser,password=dbPassword)
cur = dbconn.cursor()
except:
print "Error : Unable to open database\n"
poly="-94.26958,33.56679 -94.26926,33.56763 -94.23197,33.55209"
poly=poly.replace(" ",")\",\"(")
poly="{\"("+poly+")\"}"
try:
cur.execute("drop table if exists test;");
cur.execute("CREATE TABLE test (id serial PRIMARY KEY, num integer, anum integer[],pt point, apt point[],nm text);")
cur.execute("insert into test (num,apt,nm) values (%s,%s,%s) returning *",[1,poly,"fred"])
dbconn.commit()
cur.execute("select * from test;")
print cur.fetchall()
except psycopg2.Error as e :
print e.pgerror

targetConn = pg.connect(dbname='', user='', host='127.0.0.1', password='root')
targetCursor = targetConn.cursor()
class Point(object):
def __init__(self, x, y):
self.x = x
self.y = y
def adapt_point(point):
x = adapt(point.x)
y = adapt(point.y)
return AsIs("'(%s, %s)'" % (x, y))
register_adapter(Point, adapt_point)
targetCursor.execute("INSERT INTO point (point) VALUES (%s)",
(Point(1.23, 4.56),))
targetConn.commit()
targetConn.close()

Related

Incorrect number of bindings supplied. The current statement uses 1, and there are 6 supplied

In the line:
for row in cursor.execute("SELECT classCapacity FROM tblClass1 WHERE classID = ?", [classID]):
I am getting a binding error saying the statement uses one and six have been supplied and have been stuck trying to fix it for hours now any suggestions are welcome.
def addBooking():
def CostCalculation():
Cost = 0
Validation = False
sqlite3.connect("TestProject.db")
cursor = connection.cursor()
for row in cursor.execute("SELECT classCost from tblClass1 WHERE classID =?", cClassID):
Validation = True
cost = row[0]
Cost = (int(cost))
return Cost, Validation
connection = sqlite3.connect("TestProject.db")
curosr = connection.cursor()
cClientID = getID(clientID.get())
cClassID = classID.get()
cDate = bookingDate.get()
cCost, costValidation = CostCalculation()
bookingRec = [cClientID, cClassID, cDate, cCost]
for row in cursor.execute("SELECT classCapacity FROM tblClass1 WHERE classID = ?", [classID]):
maxCap = row[0]
capacity = 0
for row in cursor.execute("SELECT * FROM tblBooking WHERE classID = ? and bookingDate =?", [cClassID, cDate]):
capacity = capacity + 1
if capacity < maxCap:
cursor.execute("INSERT INTO tblBooking VALUES(Null,?,?,?,?)", bookingRec)
connection.commit()
connection.close()

Update Values in Mysql

from dataa import *
dataman = mysql.connector.connect(host='localhost', user='root', passwd='12345678', database='basictest')
databoy = dataman.cursor()
with open('data.txt') as g:
data = g.read().split('\n')
vl = ['']
vm = ['SELECT Name FROM Patient', 'SELECT DOB FROM Patient', 'SELECT Email FROM Patient',
'SELECT Username FROM Patient', 'SELECT Password FROM Patient', 'SELECT Mobile FROM Patient']
x = 0
for j, i in zip(data, vm):
if
vl[0] = j
x += 1
databax = ("INSERT INTO Patient(Name) VALUES(%s)")
d = [('a')]
databoy.execute(databax, d)
dataman.commit()
print(x)
I want to append each value of J to each column seperately. Only thing I can do is that append all of that to same column . on addition of other column, it throws an error. What can I do. No use of classes will be apppreciated. Can I select each column seperately and update it each time I update the form. This is the registration form of a application

How to use multiple variables in pymysql

How can I use multiple variables in my query:
import pymysql
def get_firstname(y):
db = pymysql.connect(host="xxx.xxx.xxx.xxx",
user="xxxx",
passwd="xxxxx",
db="XXXXXXX")
cur = db.cursor()
query = "SELECT first_name FROM information WHERE x = y;"
cur.execute(query, (y))
result = str(cur.fetchone()[0])
return(result)
x = user_id
y = 1
print (get_firstname(y))
I would like to be able to change x to different variables, depending on my choice. Like user_id, last name, email etc. All columns in my database. #y is another variable which represent the value of the column in my database.
Ex1: if x is "user_id" and y is "1", then the result is the first name of the person with user_id 1.
Ex2: if x is "email" and y is "person#person.com", then the result is the first name of the person with the email person#person.com.
Use the below code:
import pymysql
def get_firstname(x, y):
db = pymysql.connect(host='xxx.xxx.xxx.xxx',
port=3306,
user='xxxx',
password='xxxxxxxx',
db='test_db')
cur = db.cursor()
query = "SELECT first_name FROM `information` WHERE %s" %(x)
final_query = query+"=%s"
#print(final_query)
cur.execute(final_query, (y,))
result = cur.fetchall()
return(result)
x = 'user_id'
y = 1
output = get_firstname(x,y)
if output:
for x in output:
print(x)[0]
Sample Inputs:
x = 'email_id'
y = 'xx#gmail.com'
or
x = 'user_id'
y = 1 // Not enclosed with single quotes

Python does not commit on insert query

I am trying to bulk insert locations on wordpress. I have defined functions to check and adding terms and taxonomy
def checkTerm(term,con):
cur = con.cursor()
query = "SELECT term_id FROM wp_terms as t WHERE t.name = '%s'" % term
print query
cur.execute(query)
rows = cur.fetchall()
if rows: return rows[0][0]
else : return None
def addTerm(term,slug,con):
cur = con.cursor()
try:
query = "INSERT INTO `wp_terms` (`name`,`slug`,`term_group`) VALUES ('%s','%s',0)" % (term,slug)
print query
cur.execute(query)
con.commit()
rows = checkTerm(term,con)
if rows: return rows[0][0]
else : return None
except:
return None
def checkTaxonomy(term_id,con):
cur = con.cursor()
query = "SELECT tt.term_taxonomy_id,tt.parent FROM wp_term_taxonomy AS tt INNER JOIN wp_terms AS t ON tt.term_id = t.term_id WHERE tt.taxonomy = 'project_location' AND t.term_id = '%s'" % term_id
print query
cur.execute(query)
rows = cur.fetchall()
if rows: return rows
else : return None
def addTaxonomy(term_id,taxonomy,description,parent,count,con):
cur = con.cursor()
query = "INSERT INTO `wp_term_taxonomy` (`term_id`,`taxonomy`,`description`,`parent`,`count`) VALUES ('%s','%s','%s','%s','%s')" % (term_id,taxonomy,description,parent,count)
print query
cur.execute(query)
con.commit()
rows = checkTaxonomy(term_id,con)
if rows: return rows
else: return None
I store cities in dictionary of dicionaries
df = pd.read_table('./Argentina.csv',sep='\t',header=None,engine='python')
for line in xrange(len(df)):
stringa = str(df[17][line])
location = str(df[1][line])
population = int(df[14][line])
if population < limit_pop: continue
string_state = stringa.split("/")
country = string_state[1]
state = string_state[2]
if not country in states:
states[country] = {}
if not state in states[country]:
states[country][state] = [location]
else :
states[country][state].append(location)
Then I try to insert terms and taxonomies in the wordpress db
con = mdb.connect('localhost', 'root', 'mypassword, 'Wordpress')
for country in states:
country_id = checkTerm(country.replace("_"," "),con)
if not country_id:
country_id = addTerm(country.replace("_"," "),country,con)
taxonomy = checkTaxonomy(country_id,con)
if not taxonomy:
taxonomy = addTaxonomy(country_id,'project_location','','0','0',con)
parent = dict((y, x) for x, y in taxonomy)
if not 0 in parent:
taxonomy = addTaxonomy(country_id,'project_location','','0','0',con)
for state in states[country]:
state_id = checkTerm(state.replace("_"," "),con)
if not state_id:
state_id = addTerm(state.replace("_"," "),state,con)
taxonomy = checkTaxonomy(state_id,con)
if not taxonomy:
taxonomy = addTaxonomy(state_id,'project_location','',country_id,'0',con)
parent = dict((y, x) for x, y in taxonomy)
if not country_id in parent:
taxonomy = addTaxonomy(state_id,'project_location','',country_id,'0',con)
for location in states[country][state]:
location_id=checkTerm(location.replace("_"," "),con)
if not location_id:
location_id = addTerm(location.replace("_"," "),location,con)
taxonomy = checkTaxonomy(location_id,con)
if not taxonomy:
taxonomy = addTaxonomy(location_id,'project_location','',state_id,'0',con)
parent = dict((y, x) for x, y in taxonomy)
if not state_id in parent:
taxonomy = addTaxonomy(location_id,'project_location','',state_id,'0',con)
When I try to execute the script I found this behaviour
SELECT term_id FROM wp_terms as t WHERE t.name = 'Argentina'
INSERT INTO `wp_terms` (`name`,`slug`,`term_group`) VALUES ('Argentina','Argentina',0)
SELECT term_id FROM wp_terms as t WHERE t.name = 'Argentina'
SELECT tt.term_taxonomy_id,tt.parent FROM wp_term_taxonomy AS tt INNER JOIN wp_terms AS t ON tt.term_id = t.term_id WHERE tt.taxonomy = 'project_location' AND t.term_id = 'None'
INSERT INTO `wp_term_taxonomy` (`term_id`,`taxonomy`,`description`,`parent`,`count`) VALUES ('None','project_location','','0','0')
SELECT tt.term_taxonomy_id,tt.parent FROM wp_term_taxonomy AS tt INNER JOIN wp_terms AS t ON tt.term_id = t.term_id WHERE tt.taxonomy = 'project_location' AND t.term_id = 'None'
And the script stop with the following error
./import.py:59: Warning: Truncated incorrect DOUBLE value: 'None'
cur.execute(query)
./import.py:69: Warning: Incorrect integer value: 'None' for column 'term_id' at row 1
cur.execute(query)
Traceback (most recent call last):
File "./import.py", line 115, in <module>
parent = dict((y, x) for x, y in taxonomy)
TypeError: 'NoneType' object is not iterable
This means that the insert statements are not executed. I don't understand. I con.commit() the query but it is still not executed. Where is the problem?
Solution:
I changed
import MySQLdb as mdb
to
import mysql.connector
and
con = mdb.connect(host='localhost',user='root', password='passowrd',database= 'Wordpress');
to
con = mysql.connector.connect(host='localhost',user='root', password='password',database= 'Wordpress',autocommit=False,buffered=False);

Getting different results with mysql when using python multiprocessing

I can't figure out what I'm doing wrong(or how to correct it). It might be easier to show some code(its a bit simplified from what I'm doing but it proves my point):
from multiprocessing import Pool
import MySQLdb
import sys
#sql connection
try:
conn = MySQLdb.connect (host = "127.0.0.1",user = "user",passwd = "password", db = "mydb")
except MySQLdb.Error, e:
print "Error %d: %s" % (e.args[0], e.args[1])
sys.exit (1)
#with database
cursor = conn.cursor ()
cursor.execute ("DROP TABLE IF EXISTS data_table")
cursor.execute ("""
CREATE TABLE data_table(
value CHAR(80)
) ENGINE=MyISAM
""")
cursor.execute (""" INSERT INTO data_table (value) VALUES ('%s')""" % [0, 0]) #need to insert basecase
conn.commit()
def build_table(i,x): # i is index, x is data[i]
conn = MySQLdb.connect (host = "127.0.0.1",user = "user",passwd = "password", db = "mydb")
cursor = conn.cursor ()
#print i,x
target_sum = 100
for s in range(target_sum + 1):
for c in range(target_sum + 1):
#print c, i
cursor.execute ("""
INSERT INTO data_table (value)
SELECT '%s'
FROM dual
WHERE ( SELECT COUNT(*) FROM data_table WHERE value='%s' )
= 1
AND NOT EXISTS
( SELECT * FROM data_table WHERE value='%s' )
""" % ([s, i+1], [s - c * x, i], [s, i+1]))
conn.commit()
conn.close()
data = [2,5,8]
pool = Pool(processes=4)
for i, x in enumerate(data):
build_table(i,x) #creates 250 records
#pool.apply_async(build_table, (i, x))
pool.close()
pool.join()
print 'completed'
It basically creates a table in mysql. The code above creates 250 entries(which is correct), but if you comment out build_table(i,x) in the for loop and uncomment pool.apply_async(build_table, (i, x)) it creates only 52 records. Why is there a difference when multiprocessing the same function and is there anything I can do to fix it so the results are the same(I thought quickly committing updates would fix it but no luck)?
If I play around pool = Pool(processes=4) and change it to 1, it works but I guess thats expected because its not multiprocessing really at that point. Also, if it helps I'm using InnoDB.
UPDATE: when I change to MyISAM I get 240 results being updated(not quite the 250 I need but much better than 52).
UPDATE2: mysql command was combined into a single command, and results seem to vary. Sometimes I get 248 results in the database, sometimes 240 or less. Maybe multiprocessing is causing this diverge between expected and actual results ?
I would try to combine the 2 Selects and the Insert in one Insert statement:
#print c, i
cursor.execute(""" SELECT value FROM data_table WHERE value='%s' """ % ([s - c * x, i]))
if cursor.rowcount == 1:
cursor.execute(""" SELECT value FROM data_table WHERE value='%s' """ % [s, i+1])
if cursor.rowcount == 0:
cursor.execute (""" INSERT INTO data_table (value) VALUES ('%s')""" % [s, i+1])
Into something like:
#print c, i
cursor.execute ("""
INSERT INTO data_table (value)
SELECT '%s'
FROM dual
WHERE ( SELECT COUNT(*) FROM data_table WHERE value='%s' )
= 1
AND NOT EXISTS
( SELECT * FROM data_table WHERE value='%s' )
""" % ([s, i+1], [s - c * x, i], [s, i+1]))
Not sure about the syntax in the last line. You'll need to pass 3 parameters.

Categories

Resources