import psycopg2
import time
import threading
def initiate():
conn = psycopg2.connect(host='localhost', user='postgres', password='password', port='5432', database='test')
conn.set_isolation_level(1)
conn.autocommit = False
cursor1 = conn.cursor()
cursor2 = conn.cursor()
cursor3 = conn.cursor()
t1 = threading.Thread(target=test1, args=(cursor1, conn))
t2 = threading.Thread(target=test2, args=(cursor2, conn))
t3 = threading.Thread(target=test3, args=(cursor3, conn))
t1.start()
t2.start()
t3.start()
def test1(cursor, conn):
cursor.execute("INSERT INTO test_sch.tb_for_test1(col_for_t12, col_for_t13, col_for_t14) VALUES ('test_col1', 'test_col1', 1)")
time.sleep(10)
conn.commit()
cursor.close()
print("completed test1")
def test2(cursor, conn):
cursor.execute("INSERT INTO test_sch.tb_for_test1(col_for_t12, col_for_t13, col_for_t14) VALUES ('test_col2', 'test_col2', 2)")
time.sleep(5)
conn.commit()
cursor.close()
print("completed test2")
def test3(cursor, conn):
cursor.execute("INSERT INTO test_sch.tb_for_test1(col_for_t12, col_for_t13, col_for_t14) VALUES ('test_col3', 'test_col3', 3)")
time.sleep(15)
conn.commit()
cursor.close()
print("completed test3")
initiate()
In the above code I have tried to insert three records to database using three different threads running three different methods. When method test2 gets completed it is committing all three records instead waiting for transaction at method test1 and test3. I can understand that psycopg2 transaction is per connection and not per cursor. So any suggestions on how it can be moved to per cursor basis ?. One of my limitation is not to open too many connections
Related
i'm studying about mysql connection with python(pycharm)
i have question about curs.execute()
when it work and when it not work...
in my code i write remarks about not working point
import pymysql
try:
conn = pymysql.connect(host='localhost', user='root', password='1234', db='university')
conn.set_charset('utf8')
curs = conn.cursor(pymysql.cursors.DictCursor) #Dictionary cursor 생성
# curs = conn.cursor()
print("Connected to MySQL")
sql = "SELECT sno, midterm, final from db_score where midterm >= 20 and final >= 20 order by sno"
# sql = "select* from db_score"
curs.execute(sql)
#this point not work :(
except Exception as e:
print(str(e))
finally:
if conn:
curs.close()
conn.close()
print("MySql connection is closed")
and fetchall() didnt work :(\
import pandas as pd
import pymysql
xl_file = 'db_score.xlsx'
df = pd.read_excel(xl_file)
tp = list(df.itertuples(index=False, name=None))
# ('sno', 'attendance', 'homework', 'discussion', 'midterm', 'final', 'score', 'grade')
try:
conn = pymysql.connect(host='localhost', user='root', password='1234', db='university')
conn.set_charset('utf8')
#curs = conn.cursor(pymysql.cursors.DictCursor)
curs = conn.cursor()
print("Connected to MySQL")
sql = "INSERT INTO db_score VALUES (%s, %s, %s, %s, %s, %s, %s, %s)"
for i in range(0, len(df.index)):
# print('hi')
curs.execute(sql, tp[i])
#why work i dont know because other part is not working
# sql2 = "SELECT* from db_score"
# curs.execute(sql2)
# try execute, but not work
records = curs.fetchall()
for row in records:
print("why didn't work")
print(row)
#print not work :(
conn.commit()
except Exception as e:
print(str(e))
conn.rollback()
finally:
if conn:
curs.close()
conn.close()
print("MySql connection is closed")
please comment why work and why not work please...
thanks for watching
db connection is so hard:(
I'm trying to transform my standard database functions into aiomysql async functions (for a bot) but I don't really understand how does the async functions work...
Here's my actual code that I want to transform :
def connection(Database):
conn = mysql.connector.connect(host=Database[0],
user=Database[1],
password=Database[2],
database=Database[3])
c = conn.cursor()
return c, conn
def insertToTable(Database, insert, data):
c, conn = connection(Database)
try:
pass
c.execute(insert, data)
conn.commit()
except mysql.connector.IntegrityError as e:
#cnx.close()
def deleteFromTable(Database, query):
c, conn = connection(Database)
try:
c.execute(query)
c.commit()
except:
pass
def getMax(Database, columnName, tableName):
c, conn = connection(Database)
c.execute("SELECT MAX(" + columnName + ") FROM " + tableName)
result = c.fetchall()
for i in result:
if i[0] is None:
return 0
else:
return i[0]
My projects is separed in multiples files, I got some others basics requests that I didn't transform in function :
c, conn = dbconnect.connection(DB)
c.execute("update Tar SET va= (%s) WHERE x=1",(va_num))
conn.commit()
and some select fetchall/fetchone etc
I wrote that after reading the documentations and finding some (rare) examples :
import asyncio
import aiomysql
import setup as setup
loop = asyncio.get_event_loop()
#asyncio.coroutine
def exec_sql(insert, data):
pool = yield from aiomysql.create_pool(host=setup.DB_local[0], port=3306,
user=setup.DB_local[1], password=setup.DB_local[2],
db=setup.DB_local[3], loop=loop, autocommit=False)
with (yield from pool) as conn:
cur = yield from conn.cursor()
yield from cur.execute(insert, data)
yield from conn.commit()
conn.close
#pool.close()
#yield from pool.wait_closed()
insert = ("INSERT into discord_rights (discord_id, discord_name, discord_role, is_admin, is_caster, is_player)""VALUES (%s, %s, %s, %s, %s, %s)")
data = (10, "lea", 0, 0, 1, 1)
sql = loop.run_until_complete(exec_sql(insert, data))
#asyncio.coroutine
def get_one_sql(sql):
pool = yield from aiomysql.create_pool(host=setup.DB_local[0], port=3306,
user=setup.DB_local[1], password=setup.DB_local[2],
db=setup.DB_local[3], loop=loop, autocommit=False)
with (yield from pool) as conn:
cur = yield from conn.cursor()
yield from cur.execute(sql)
r = yield from cur.fetchone()
conn.close
return r
#pool.close()
#yield from pool.wait_closed()
sql = loop.run_until_complete(get_one_sql("SELECT * from discord_rights WHERE discord_id = 124545xxxxxxxx"))
print(sql)
But I'm not sure if this is a good way cause I create a new pool for every request, right ?
Can someone help me to build on of the function (importing the pool created in an another part of the code) or something better if I'm still wrong ?
Thx for your help and sorry for the long message, I prefered to show you my codes instead of nothing !
In a classical "Threading/Queue"-application. I need to do further calculations in my "consumer"-function. After Queue is empty no further code is executed after urls.task_done().
I am importing market data from an JSON api and import it into my MariaDB database.
On the API every item that i want to fetch has an own url, so I am creating a queue for all available urls in a function.
A "consumer"-function processes the queue importing a new set of data or updating an existent entry depending on the already existing data in my database. I already tried to wrap the actual while True loop into its own function but it didn't work for me.
def create_url():
try:
mariadb_connection = mariadb.connect(host='host
database='db',
user='user',
password='pw')
cursor = mariadb_connection.cursor()
cursor.execute('SELECT type_id from tbl_items')
item_list = cursor.fetchall()
print("Create URL - Record retrieved successfully")
for row in item_list:
url = 'https://someinternet.com/type_id=' + \
str(row[0])
urls.put(url)
return urls
except mariadb.Error as error:
mariadb_connection.rollback()
print("Failed retrieving itemtypes from tbl_items table
{}".format(error))
finally:
if mariadb_connection.is_connected():
cursor.close()
mariadb_connection.close()
def import(urls):
list_mo_esi = []
try:
mariadb_connection = mariadb.connect(host='host',
database='db',
user='user',
password='pw')
cursor = mariadb_connection.cursor()
while True:
s = requests.Session()
retries = Retry(total=5, backoff_factor=1, status_forcelist=[502, 503, 504])
s.mount('https://', HTTPAdapter(max_retries=retries))
jsonraw = s.get(urls.get())
jsondata = ujson.loads(jsonraw.text)
for row in jsondata:
cursor.execute('SELECT order_id from tbl_mo WHERE order_id = %s',
(row['order_id'], ))
exists_mo = cursor.fetchall()
list_mo_esi.append(row['order_id'])
if len(exists_mo) != 0:
print("updating order#", row['order_id'])
cursor.execute('UPDATE tbl_mo SET volume = %s, price = %s WHERE order_id = %s',
(row['volume_remain'], row['price'], row['order_id'], ))
mariadb_connection.commit()
else:
cursor.execute('INSERT INTO tbl_mo (type_id, order_id, ordertype,volume, price) VALUES (%s,%s,%s,%s,%s)',
(row['type_id'], row['order_id'], row['is_buy_order'], row['volume_remain'], row['price'], ))
mariadb_connection.commit()
urls.task_done()
except mariadb.Error as error:
mariadb_connection.rollback()
print("Failed retrieving itemtypes from tbl_items table {}".format(error))
The following finally part of my function is not executed, but should.
finally:
list_mo_purge = list(set(list_mo_sql)-set(list_mo_esi))
cursor.execute('SELECT order_id FROM tbl_mo')
list_mo_sql = cursor.fetchall()
print(len(list_mo_esi))
print(len(list_mo_sql))
if mariadb_connection.is_connected():
cursor.close()
mariadb_connection.close()
main thread
for i in range(num_threads):
worker = Thread(target=import_mo, args=(urls,))
worker.setDaemon(True)
worker.start()
create_url()
urls.join()
After all tasks are completed my worker stop executing code right after urls.task_done().
However, i have some more code after the function urls.task_done() i need to be executed for closing database connection and cleaning up my database from old entries. How can I make this "finally"-part run?
You are not breaking from the while.
You should do the following:
if urls.empty():
break
Most likely your import thread gets blocked at urls.get()
I would like to produce the following set up in Python 3.4, SQLite3 v.3.8.11:
(1) Create an in-memory shared-cache SQLite3 database:
(2) Create one connection that only writes to this DB from one thread
(3) Create multiple connections that concurrently read from this DB from various other threads
This is what I have created to test this:
import time
import zmq
import random
from threading import Thread
import sqlite3
def producer(context):
zmq_socket = context.socket(zmq.PUB)
zmq_socket.bind("inproc://test_pub")
while True:
msg = random.random()
zmq_socket.send(str(msg).encode())
wait_time = random.uniform(0, 0.05)
time.sleep(wait_time)
def subscriber_writer(context):
# Create database connection for writing to memory
write_con = sqlite3.connect('file::memory:?cache=shared', uri=True)
cursor = write_con.cursor()
zmq_socket = context.socket(zmq.SUB)
zmq_socket.connect("inproc://test_pub")
zmq_socket.setsockopt(zmq.SUBSCRIBE, b'')
while True:
msg = float(zmq_socket.recv().decode())
cursor.execute('UPDATE TEST SET Value=? WHERE Key="Val"', [msg])
write_con.commit()
def consumer(context):
# Create database connection for reading from memory in read-only mode
read_con = sqlite3.connect('file::memory:?cache=shared&mode=ro', uri=True)
cursor = read_con.cursor()
while True:
cursor.execute('SELECT Value FROM TEST WHERE Key="Val"')
row = cursor.fetchone()
result = row[0]
print(str(result))
wait_time = random.uniform(0, 0.05)
time.sleep(wait_time)
def main():
# Create context
context = zmq.Context()
# Create database
con = sqlite3.connect('file::memory:?cache=shared', uri=True)
# Create db table
cursor = con.cursor()
cursor.execute('CREATE TABLE TEST(Key TEXT, Value NUMERIC)')
cursor.execute('INSERT INTO TEST VALUES (?,?)', ["Val", 0.00])
con.commit()
Thread(target=subscriber_writer, args=(context,)).start()
Thread(target=producer, args=(context,)).start()
Thread(target=consumer, args=(context,)).start()
if __name__ == '__main__':
main()
This works for a while .....but then I get the following error:
...
0.2504188310554989
0.2504188310554989
0.8038719720740617
0.42408896748682956
0.21361498908206744
0.3404497358396832
0.010459475861968603
0.5070540941748318
0.5070540941748318
0.23151535812095037
0.636881359928549
0.4184038089576615
0.9920311052908629
Exception in thread Thread-3:
Traceback (most recent call last):
File "E:\Python34-64\lib\threading.py", line 911, in _bootstrap_inner
self.run()
File "E:\Python34-64\lib\threading.py", line 859, in run
self._target(*self._args, **self._kwargs)
File "test.py", line 43, in consumer
cursor.execute('SELECT Value FROM TEST WHERE Key="Val"')
sqlite3.OperationalError: database table is locked: TEST
How can I make this work?
As a side note, CREATING ONLY CONNECTION WITH check_same_thread=False and sharing this across the whole process works even when the wait times are eliminated....is this advisable to do instead? See below:
import time
import zmq
import random
from threading import Thread
import sqlite3
def producer(context):
zmq_socket = context.socket(zmq.PUB)
zmq_socket.bind("inproc://test_pub")
while True:
msg = random.random()
zmq_socket.send(str(msg).encode())
# wait_time = random.uniform(0, 0.05)
# time.sleep(wait_time)
def subscriber_writer(context, con):
zmq_socket = context.socket(zmq.SUB)
zmq_socket.connect("inproc://test_pub")
zmq_socket.setsockopt(zmq.SUBSCRIBE, b'')
cursor = con.cursor()
while True:
msg = float(zmq_socket.recv().decode())
cursor.execute('UPDATE TEST SET Value=? WHERE Key="Val"', [msg])
def consumer(context, con):
cursor = con.cursor()
while True:
cursor.execute('SELECT Value FROM TEST WHERE Key="Val"')
row = cursor.fetchone()
result = row[0]
print(str(result))
# wait_time = random.uniform(0, 0.05)
# time.sleep(wait_time)
def main():
# Create context
context = zmq.Context()
# Create database
con = sqlite3.connect('file::memory:?cache=shared', uri=True, isolation_level=None, check_same_thread=False)
# Create db table
cursor = con.cursor()
cursor.execute('CREATE TABLE TEST(Key TEXT, Value NUMERIC)')
cursor.execute('INSERT INTO TEST VALUES (?,?)', ["Val", 0.00])
Thread(target=subscriber_writer, args=(context, con)).start()
Thread(target=producer, args=(context,)).start()
Thread(target=consumer, args=(context, con)).start()
if __name__ == '__main__':
main()
What is wrong with my code? Thank you
import os
import os.path
import time
global item_count
#-*- coding: cp936 -*-
import MySQLdb
import MySQLdb.cursors
import threading
import multiprocessing
from time import sleep,ctime
def qucun():
#connect to mysql
conn=MySQLdb.connect(host="localhost",user="root",passwd="caihong")
cursor=conn.cursor()
try:
cursor.execute("""create database if not exists quad""")
except:
print 'Quad is exist'
conn.select_db('quad')
conn=MySQLdb.connect(host="localhost",user="root",passwd="caihong",db="quad")
#get cursor
cursor=conn.cursor()
try:
cursor.execute("""create table if not exists record(fn1 varchar(100),
fn2 varchar(100),fn3 varchar(100),fn4 varchar(100),
fn5 varchar(100),fn6 varchar(100),fn7 varchar(100),fn8 varchar(100))""")
except:
print 'Table record is exist'
loops=['2013071818_1.txt','2013071818_2.txt','2013071818_3.txt','2013071818_4.txt','2013071818_5.txt']
def loop(nloop,filename):
print 'This loop%s start at:'%nloop,ctime()
#connect to quad
conn=MySQLdb.connect(host="localhost",user="root",passwd="caihong",db="quad")
conn.select_db('quad')
#get cursor
cursor=conn.cursor()
newitem=open('C:\\Python27\\caihong\\%s'%filename,'r')
data=[line.strip() for line in newitem.readlines()]
print data
##put data into value
values=['%s'%data[0],'%s'%data[1],'%s'%data[2],'%s'%data[3],'%s'%data[4],
'%s'%data[5],'%s'%data[6],'%s'%data[7]]
cursor.execute("""insert into record values(%s,%s,%s,%s,%s,%s,%s,%s)""",values);
conn.commit()
cursor.close()
sleep(2)
print 'This loop done at',ctime()
if __name__=='__main__':
print 'starting at:',ctime()
threads=[]
nloops=range(len(loops))
pool=multiprocessing.Pool(processes=2)
for i in nloops:
t=pool.apply_async(loop,(i,loops[i]))
pool.close()
pool.join()
if t.successful():
print 'successful'
print 'all Done at:',ctime()
os.system("pause")
qucun()
You are attempting to call locally defined function in async.
You are trying to share an open connection between processes.
First is tricky to implement in 2.7 and second is impossible in any multiprocessing
You have to use separate connection for each process in process pool.
import os
import os.path
import time
global item_count
#-*- coding: cp936 -*-
import MySQLdb
import MySQLdb.cursors
import threading
import multiprocessing
from time import sleep,ctime
CONNECTION = None
def close_connection():
CONNECTION.close()
def get_connection():
global CONNECTION
#If this process pool member launched for a first time - create connection
if CONNECTION is None:
conn = MySQLdb.connect( host="localhost",
user="root",
passwd="caihong")
cursor = conn.cursor()
try:
cursor.execute("""create database if not exists quad""")
except:
print 'Quad is exist'
conn.select_db('quad')
CONNECTION = MySQLdb.connect(host="localhost",
user="root",
passwd="caihong",
db="quad")
cursor = CONNECTION.cursor()
try:
cursor.execute("""create table if not exists record(fn1 varchar(100),
fn2 varchar(100),fn3 varchar(100),fn4 varchar(100),
fn5 varchar(100),fn6 varchar(100),fn7 varchar(100),fn8 varchar(100))""")
except:
print 'Table record is exist'
# we dont need to close connection after each insert.
# insted - register a finalizer once
# so it will be called right before Pool.close()
multiprocessing.util.Finalize(CONNECTION, close_connection, exitpriority=1)
#use existing connection
return CONNECTION
def loop(nloop, filename):
conn = get_connection()
cursor = conn.cursor()
print 'This loop %s start at: %s'%(nloop, ctime())
with open('C:\\Python27\\caihong\\%s'%filename, 'r') as newitem:
data = [line.strip() for line in newitem.readlines()]
# values=['%s'%data[0],'%s'%data[1],'%s'%data[2],'%s'%data[3],'%s'%data[4],
# '%s'%data[5],'%s'%data[6],'%s'%data[7]]
# ^^^ Thats a bad way to stringify list
cursor.execute('insert into record values(%s)', ','.join(data));
conn.commit()
# we dont need to close connection after each insert.
# cursor.close()
print 'This loop done at', ctime()
LOOPS = ['2013071818_1.txt', '2013071818_2.txt', '2013071818_3.txt', '2013071818_4.txt', '2013071818_5.txt']
if __name__=='__main__':
pool = multiprocessing.Pool(processes=2)
results = []
for i, loopfile in enumerate(LOOPS):
results.apply(pool.apply_async(loop, (i, loopfile)))
pool.close()
pool.join()
if all((res.successful() for res in results)):
print 'successful'
print 'all Done at:', ctime()
os.system('pause')