Troubles using async library : aiomysql (python - bot) - python

I'm trying to transform my standard database functions into aiomysql async functions (for a bot) but I don't really understand how does the async functions work...
Here's my actual code that I want to transform :
def connection(Database):
conn = mysql.connector.connect(host=Database[0],
user=Database[1],
password=Database[2],
database=Database[3])
c = conn.cursor()
return c, conn
def insertToTable(Database, insert, data):
c, conn = connection(Database)
try:
pass
c.execute(insert, data)
conn.commit()
except mysql.connector.IntegrityError as e:
#cnx.close()
def deleteFromTable(Database, query):
c, conn = connection(Database)
try:
c.execute(query)
c.commit()
except:
pass
def getMax(Database, columnName, tableName):
c, conn = connection(Database)
c.execute("SELECT MAX(" + columnName + ") FROM " + tableName)
result = c.fetchall()
for i in result:
if i[0] is None:
return 0
else:
return i[0]
My projects is separed in multiples files, I got some others basics requests that I didn't transform in function :
c, conn = dbconnect.connection(DB)
c.execute("update Tar SET va= (%s) WHERE x=1",(va_num))
conn.commit()
and some select fetchall/fetchone etc
I wrote that after reading the documentations and finding some (rare) examples :
import asyncio
import aiomysql
import setup as setup
loop = asyncio.get_event_loop()
#asyncio.coroutine
def exec_sql(insert, data):
pool = yield from aiomysql.create_pool(host=setup.DB_local[0], port=3306,
user=setup.DB_local[1], password=setup.DB_local[2],
db=setup.DB_local[3], loop=loop, autocommit=False)
with (yield from pool) as conn:
cur = yield from conn.cursor()
yield from cur.execute(insert, data)
yield from conn.commit()
conn.close
#pool.close()
#yield from pool.wait_closed()
insert = ("INSERT into discord_rights (discord_id, discord_name, discord_role, is_admin, is_caster, is_player)""VALUES (%s, %s, %s, %s, %s, %s)")
data = (10, "lea", 0, 0, 1, 1)
sql = loop.run_until_complete(exec_sql(insert, data))
#asyncio.coroutine
def get_one_sql(sql):
pool = yield from aiomysql.create_pool(host=setup.DB_local[0], port=3306,
user=setup.DB_local[1], password=setup.DB_local[2],
db=setup.DB_local[3], loop=loop, autocommit=False)
with (yield from pool) as conn:
cur = yield from conn.cursor()
yield from cur.execute(sql)
r = yield from cur.fetchone()
conn.close
return r
#pool.close()
#yield from pool.wait_closed()
sql = loop.run_until_complete(get_one_sql("SELECT * from discord_rights WHERE discord_id = 124545xxxxxxxx"))
print(sql)
But I'm not sure if this is a good way cause I create a new pool for every request, right ?
Can someone help me to build on of the function (importing the pool created in an another part of the code) or something better if I'm still wrong ?
Thx for your help and sorry for the long message, I prefered to show you my codes instead of nothing !

Related

What should I put for the first parameter when calling "get_Tables_byName" function?

I'm writing a python code to read from mysql database:
def create_server_connection(host, user, password):
connection = None
try:
connection = pymysql.connect(host='localhost',
user='root',
password='pwd',
database='raw_data',
cursorclass=pymysql.cursors.DictCursor)
print("MySQL Database connection successful")
except err as error:
print(f"Error: '{error}'")
return connection
def read_query(connection, query):
cur = connection.cursor()
result = None
try:
cur.execute(query)
result = cur.fetchall()
return result
except err as error:
print(f"Error: '{error}'")
return cur
def get_Tables_byName(cursor, tableName):
q1 = f'''
SELECT table_name FROM raw_data.tables
where table_name like '{tableName}'; '''
res = []
cursor.execute(q1)
for row in cursor:
res.append(row[0])
return res
get_Tables_byName(cursor,'data_31942010201')
If I want to call get_Tables_byName function, what should I put in the first parameter? If I put cursor, the error message shows NameError: name 'cursor' is not defined

syntax error in mariaDB that i cannot find

i am writing a code to write my gps variables in my database but i get this error everytime i run it:
You have an error in your SQL syntax; check the manual that corresponds to your MariaDB server version for the right syntax to use near 'Long, Status) VALUES (1, 50.821139333333335, 3.2815086666666664, 'Eind')' at line 1
the database has an id that is auto incremented and then the following values i'm trying to add, no clue what's going wrong. maybe you guys have an idea
database.py:
#staticmethod
def execute_sql(sqlQuery, params=None):
result = None
db, cursor = Database.__open_connection()
try:
cursor.execute(sqlQuery, params)
db.commit()
result = cursor.lastrowid
except connector.Error as error:
db.rollback()
result = None
print(f"Error: Data not stored.{error.msg}")
finally:
cursor.close()
db.close()
return result
datarepository.py:
#staticmethod
def create_gps(RouteID, Lat, Long, Status):
sql = "INSERT INTO gps (RouteID, Lat, Long, Status) VALUES (%s, %s, %s, %s)"
params = [RouteID, Lat, Long, Status]
return Database.execute_sql(sql, params)
app.py:
#socketio.on('F2B_GPS')
def gps_aan(toggle, status):
while(toggle != 1):
port = "/dev/serial0"
ser = serial.Serial(port, baudrate=9600, timeout=0.5)
dataout = pynmea2.NMEAStreamReader()
newdata = ser.readline()
if sys.version_info[0] == 3:
newdata = newdata.decode("utf-8", "ignore")
if newdata[0:6] == "$GPRMC":
newmsg = pynmea2.parse(newdata)
lat = newmsg.latitude
lng = newmsg.longitude
gps = "Latitude=" + str(lat) + "and Longitude=" + str(lng)
print(gps)
DataRepository.create_gps(1,lat,lng, status)
print("added")
toggle = 1
Thanks to anyone replying :)
ok i found it you cant use Long since that screws it up in mysql so i had to change it to longitude

Running multithreaded code after Queue.task_done()

In a classical "Threading/Queue"-application. I need to do further calculations in my "consumer"-function. After Queue is empty no further code is executed after urls.task_done().
I am importing market data from an JSON api and import it into my MariaDB database.
On the API every item that i want to fetch has an own url, so I am creating a queue for all available urls in a function.
A "consumer"-function processes the queue importing a new set of data or updating an existent entry depending on the already existing data in my database. I already tried to wrap the actual while True loop into its own function but it didn't work for me.
def create_url():
try:
mariadb_connection = mariadb.connect(host='host
database='db',
user='user',
password='pw')
cursor = mariadb_connection.cursor()
cursor.execute('SELECT type_id from tbl_items')
item_list = cursor.fetchall()
print("Create URL - Record retrieved successfully")
for row in item_list:
url = 'https://someinternet.com/type_id=' + \
str(row[0])
urls.put(url)
return urls
except mariadb.Error as error:
mariadb_connection.rollback()
print("Failed retrieving itemtypes from tbl_items table
{}".format(error))
finally:
if mariadb_connection.is_connected():
cursor.close()
mariadb_connection.close()
def import(urls):
list_mo_esi = []
try:
mariadb_connection = mariadb.connect(host='host',
database='db',
user='user',
password='pw')
cursor = mariadb_connection.cursor()
while True:
s = requests.Session()
retries = Retry(total=5, backoff_factor=1, status_forcelist=[502, 503, 504])
s.mount('https://', HTTPAdapter(max_retries=retries))
jsonraw = s.get(urls.get())
jsondata = ujson.loads(jsonraw.text)
for row in jsondata:
cursor.execute('SELECT order_id from tbl_mo WHERE order_id = %s',
(row['order_id'], ))
exists_mo = cursor.fetchall()
list_mo_esi.append(row['order_id'])
if len(exists_mo) != 0:
print("updating order#", row['order_id'])
cursor.execute('UPDATE tbl_mo SET volume = %s, price = %s WHERE order_id = %s',
(row['volume_remain'], row['price'], row['order_id'], ))
mariadb_connection.commit()
else:
cursor.execute('INSERT INTO tbl_mo (type_id, order_id, ordertype,volume, price) VALUES (%s,%s,%s,%s,%s)',
(row['type_id'], row['order_id'], row['is_buy_order'], row['volume_remain'], row['price'], ))
mariadb_connection.commit()
urls.task_done()
except mariadb.Error as error:
mariadb_connection.rollback()
print("Failed retrieving itemtypes from tbl_items table {}".format(error))
The following finally part of my function is not executed, but should.
finally:
list_mo_purge = list(set(list_mo_sql)-set(list_mo_esi))
cursor.execute('SELECT order_id FROM tbl_mo')
list_mo_sql = cursor.fetchall()
print(len(list_mo_esi))
print(len(list_mo_sql))
if mariadb_connection.is_connected():
cursor.close()
mariadb_connection.close()
main thread
for i in range(num_threads):
worker = Thread(target=import_mo, args=(urls,))
worker.setDaemon(True)
worker.start()
create_url()
urls.join()
After all tasks are completed my worker stop executing code right after urls.task_done().
However, i have some more code after the function urls.task_done() i need to be executed for closing database connection and cleaning up my database from old entries. How can I make this "finally"-part run?
You are not breaking from the while.
You should do the following:
if urls.empty():
break
Most likely your import thread gets blocked at urls.get()

PyMySQL not Inserting Data

I have a piece of code which is taking Windows logs and inserting various pieces of information into an mySQL database. The code is running perfectly with no errors, but does not actually input the data into the table. The table remains blank. I pulled my mySQL syntax from an example with some modification, so I'm not entirely sure what is going wrong. I have a feeling it has either to do with the data types, or some changes I made to the syntax.
import sys
import pymysql
import pymysql.cursors
import win32evtlog # requires pywin32 pre-installed
import win32evtlogutil
import time
server = 'localhost' # name of the target computer to get event logs
logtype = 'System' # 'Application' # 'Security'
hand = win32evtlog.OpenEventLog(server,logtype)
flags =
win32evtlog.EVENTLOG_BACKWARDS_READ|win32evtlog.EVENTLOG_SEQUENTIAL_READ
events = win32evtlog.ReadEventLog(hand, flags,0)
while True:
for event in events:
evt_tp = event.EventType
if evt_tp != (1 or 2 or 8):
eve_cat = str(('Event Category:', event.EventCategory))
eve_timegen = str(('Time Generated:', event.TimeGenerated))
eve_srcnm = str(('Source Name:', event.SourceName))
eve_id = str(('Event ID:', event.EventID))
eve_typ = str(('Event Type:', event.EventType))
data = event.StringInserts
if data:
print ('Event Data:')
for msg in data:
print(msg)
print(type(eve_cat))
print(type(eve_timegen))
print(type(eve_srcnm))
print(type(eve_id))
print(type(eve_typ))
print(type(data))
time.sleep(10)
else:
eve_cat = ('Event Category:', event.EventCategory)
eve_timegen = ('Time Generated:', event.TimeGenerated)
eve_srcnm = ('Source Name:', event.SourceName)
eve_id = ('Event ID:', event.EventID)
eve_typ = ('Event Type:', event.EventType)
data = event.StringInserts
print('There were no errors found')
print(eve_cat)
print(eve_timegen)
print(eve_srcnm)
print(eve_id)
print(eve_typ)
print(data)
time.sleep(10)
# Connect to the database
connection = pymysql.connect(host='localhost',
user='root',
password='',
db='ptest',
charset='utf8mb4',
cursorclass=pymysql.cursors.DictCursor)
try:
with connection.cursor() as cursor:
# Create a new record
sql = "INSERT INTO `win_logs` (`Category`, `TimeGenerated`, 'SourceName',
'EventID', 'Type') VALUES (%s, %s, %s, %s, %s)"
cursor.execute(sql, (eve_cat, eve_timegen, eve_srcnm, eve_id, eve_typ))
# connection is not autocommit by default. So you must commit to save
# your changes.
connection.commit()
with connection.cursor() as cursor:
# Read a single record
sql = "SELECT `id`, `Type` FROM `win_logs` WHERE `Category`=%s"
cursor.execute(sql, ('webmaster#python.org',))
result = cursor.fetchone()
print(result)
finally:
connection.close()
I can be very wrong.
But this is python.
Indentation matter.
Try just:
try:
with connection.cursor() as cursor:
# Create a new record
sql = "INSERT INTO `win_logs` (`Category`, `TimeGenerated`, 'SourceName`, 'EventID', 'Type') VALUES (%s, %s, %s, %s, %s)"
cursor.execute(sql, (eve_cat, eve_timegen, eve_srcnm, eve_id, eve_typ))
I guess your cursor is out of with scope

PostgreSQL query taking too long via Python

#!/usr/bin/env python
import pika
def doQuery( conn, i ) :
cur = conn.cursor()
cur.execute("SELECT * FROM table OFFSET %s LIMIT 100000", (i,))
return cur.fetchall()
print "Using psycopg2"
import psycopg2
myConnection = psycopg2.connect( host=hostname, user=username,
password=password, dbname=database )
connection =
pika.BlockingConnection(pika.ConnectionParameters(host='localhost'))
channel = connection.channel()
channel.queue_declare(queue='task_queue2')
endloop = False
i = 1
while True:
results = doQuery( myConnection, i )
j = 0
while j < 10000:
try:
results[j][-1]
except:
endloop = True
break
message = str(results[j][-1]).encode("hex")
channel.basic_publish(exchange='',
routing_key='task_queue2',
body=message
#properties=pika.BasicProperties(
#delivery_mode = 2, # make message persistent
)#)
j = j + 1
# if i % 10000 == 0:
# print i
if endloop == False:
break
i = i + 10000
The SQL query is taking too long to execute when i gets to 100,000,000, but I have about two billion entries I need to put into the queue. Anyone know of a more efficient SQL query that I can run so that I can get all those two billion into the queue faster?
psycopg2 supports server-side cursors, that is, a cursor that is managed on the database server rather than in the client. The full result set is not transferred all at once to the client, rather it is fed to it as required via the cursor interface.
This will allow you to perform the query without using paging (as LIMIT/OFFSET implements), and will simplify your code. To use a server side cursor use the name parameter when creating the cursor.
import pika
import psycopg2
with psycopg2.connect(host=hostname, user=username, password=password, dbname=database) as conn:
with conn.cursor(name='my_cursor') as cur: # create a named server-side cursor
cur.execute('select * from table')
connection = pika.BlockingConnection(pika.ConnectionParameters(host='localhost'))
channel = connection.channel()
channel.queue_declare(queue='task_queue2')
for row in cur:
message = str(row[-1]).encode('hex')
channel.basic_publish(exchange='', routing_key='task_queue2', body=message)
You might want to tweak cur.itersize to improve performance if necessary.

Categories

Resources