I'm trying to read the data from MySql database to OPC UA server. I tested it with the following code and sample database it is working. However, I'm not sure if it runs in a real time environment as the database has 40+ tables and 30+ columns in each table recording 1 minute data. Can someone please suggest the optimal way to do this.
from opcua import ua, uamethod, Server
from time import sleep
import logging
import mysql.connector
mydb = mysql.connector.connect(
host="127.0.0.1",
port=3306,
user="root",
password="root",
database="classicmodels")
mycursor = mydb.cursor(buffered=True , dictionary=True)
sql = "SELECT * FROM classicmodels.customers"
mycursor.execute(sql)
myresult = mycursor.fetchone()
sql1 = "SELECT * FROM classicmodels.employees"
mycursor.execute(sql1)
myresult1 = mycursor.fetchone()
if __name__ == "__main__":
"""
OPC-UA-Server Setup
"""
server = Server()
endpoint = "opc.tcp://127.0.0.1:4848"
server.set_endpoint(endpoint)
servername = "Python-OPC-UA-Server"
server.set_server_name(servername)
"""
OPC-UA-Modeling
"""
root_node = server.get_root_node()
object_node = server.get_objects_node()
idx = server.register_namespace("OPCUA_SERVER")
myobj = object_node.add_object(idx, "DA_UA")
myobj1 = object_node.add_object(idx, "D_U")
"""
OPC-UA-Server Add Variable
"""
for key, value in myresult.items():
myobj.add_variable(idx, key, str(value))
for key, value in myresult1.items():
myobj1.add_variable(idx, key, str(value))
"""
OPC-UA-Server Start
"""
server.start()
'''
Related
#!/usr/bin/env python
import pika
def doQuery( conn, i ) :
cur = conn.cursor()
cur.execute("SELECT * FROM table OFFSET %s LIMIT 100000", (i,))
return cur.fetchall()
print "Using psycopg2"
import psycopg2
myConnection = psycopg2.connect( host=hostname, user=username,
password=password, dbname=database )
connection =
pika.BlockingConnection(pika.ConnectionParameters(host='localhost'))
channel = connection.channel()
channel.queue_declare(queue='task_queue2')
endloop = False
i = 1
while True:
results = doQuery( myConnection, i )
j = 0
while j < 10000:
try:
results[j][-1]
except:
endloop = True
break
message = str(results[j][-1]).encode("hex")
channel.basic_publish(exchange='',
routing_key='task_queue2',
body=message
#properties=pika.BasicProperties(
#delivery_mode = 2, # make message persistent
)#)
j = j + 1
# if i % 10000 == 0:
# print i
if endloop == False:
break
i = i + 10000
The SQL query is taking too long to execute when i gets to 100,000,000, but I have about two billion entries I need to put into the queue. Anyone know of a more efficient SQL query that I can run so that I can get all those two billion into the queue faster?
psycopg2 supports server-side cursors, that is, a cursor that is managed on the database server rather than in the client. The full result set is not transferred all at once to the client, rather it is fed to it as required via the cursor interface.
This will allow you to perform the query without using paging (as LIMIT/OFFSET implements), and will simplify your code. To use a server side cursor use the name parameter when creating the cursor.
import pika
import psycopg2
with psycopg2.connect(host=hostname, user=username, password=password, dbname=database) as conn:
with conn.cursor(name='my_cursor') as cur: # create a named server-side cursor
cur.execute('select * from table')
connection = pika.BlockingConnection(pika.ConnectionParameters(host='localhost'))
channel = connection.channel()
channel.queue_declare(queue='task_queue2')
for row in cur:
message = str(row[-1]).encode('hex')
channel.basic_publish(exchange='', routing_key='task_queue2', body=message)
You might want to tweak cur.itersize to improve performance if necessary.
Iteration of each record then print by key display nothing
#!/usr/bin/env python
# Print necessary headers.
print("Content-Type: text/html")
print()
import pymysql.cursors
# Connect to the database.
import pymysql
conn = pymysql.connect(db='candidate', user='root',passwd='123',host='localhost')
print ("connect successful!!")
c = conn.cursor()
c.execute("SELECT can_id,can_name,state,total_votes FROM voting")
result_set = c.fetchall()
for row in result_set:
print(row["can_id"])
print(row) works fine
c = conn.cursor(pymysql.cursors.DictCursor)
or
conn = pymysql.connect(db='candidate', user='root', passwd='123', host='localhost', cursorclass=pymysql.cursors.DictCursor)
I propose some changes using the PyMySQL documentation as reference:
#!/usr/bin/env python
# -*- conding: utf-8 -*-
# Print necessary headers.
print("Content-Type: text/html")
import pymysql.cursors
# Connect to the database.
# That is important: the order of connection parameters
# 1st host, 2nd user, ...
conn = pymysql.connect(
host='localhost'
user='root',
password='123',
db='candidate',
cursorclass=pymysql.cursors.DictCursor # <-- declare cursor class
)
print ("connect successful!!")
try:
with conn.cursor() as c:
query = "SELECT can_id, can_name, state, total_votes FROM voting;"
c.execute(query)
with c as pointer:
for row in pointer.fetchall():
print("Name: %s" % row['can_name'])
finally:
conn.close()
Mind that I am new to flask and python for that matter, I appreciate any help that anyone gives. I'm looking to access one of the fields of my JSON response(just the field not the entire response), how should I go about parsing the response. Image of the response attached below,thanks.
This is my main thread
from flask import Flask,render_template,request
from Qhandler import Qhandler
from MakePlayer import MakePlayer
app = Flask(__name__)
#app.route('/createplayer',methods=['GET','POST'] )
def showCreatePlayer():
if request.method == 'POST':
MakePlayer(request.form['playername'],request.form['playerteam'],request.form['playerrole'], request.form['playerpos'])
return "created player: <br>"+request.form['playername']+" "+request.form['playerteam']+" "+request.form['playerrole']+" "+request.form['playerpos']
return render_template("createPlayer.html")
#app.route('/sucess')
def success():
return "success"
#app.route('/showplayers')
def showPlayers():
Q = Qhandler()
return Q.displayQuery(""" select * from Player""")
if __name__ == '__main__':
app.run(debug=True)
This is my query handler
from flask import Flask, jsonify, json
from flaskext.mysql import MySQL
class Qhandler(object):
#global mysql
global cursor
global connection
global mysql
# database connection
app = Flask(__name__)
mysql = MySQL()
app.config['MYSQL_DATABASE_USER'] = 'root'
app.config['MYSQL_DATABASE_PASSWORD'] = 'root'
app.config['MYSQL_DATABASE_DB'] = 'Optimizer'
app.config['MYSQL_DATABASE_HOST'] = 'localhost'
mysql.init_app(app)
def ins(self,query):
try:
connection=mysql.connect()
cursor = connection.cursor()
cursor.execute(query)
connection.commit()
except:
print "error running query"
finally:
#cursor.close()
connection.close()
def displayQuery(self,query):
try:
connection = mysql.connect()
cursor = connection.cursor()
cursor.execute(query)
fetchedData = cursor.fetchall()
fetchedData = jsonify(fetchedData)
#fetchedData = json.dumps(fetchedData)
#record = json.loads(fetchedData)
#print "the resonse is here:"
return fetchedData
except:
print "error running query"
finally:
#cursor.close()
connection.close()
current response is
screenshot of results
Use "fetchedData = json.dumps(fetchedData)" instead of "fetchedData = jsonify(fetchedData)" then create a json decoder and parse the response, refer to below :
def displayQuery(self,query):
try:
connection = mysql.connect()
cursor = connection.cursor()
cursor.execute(query)
fetchedData = cursor.fetchall()
fetchedData = json.dumps(fetchedData)
#create a json decoder
d = json.JSONDecoder()
fieldPlayerName = d.decode(fetchedData)
#parse the json that is returned ( fieldPlayerName[0][1])
print "should print the field with the player name",fieldPlayerName[0][1]
return fieldPlayerName[0][1]
I would like to produce the following set up in Python 3.4, SQLite3 v.3.8.11:
(1) Create an in-memory shared-cache SQLite3 database:
(2) Create one connection that only writes to this DB from one thread
(3) Create multiple connections that concurrently read from this DB from various other threads
This is what I have created to test this:
import time
import zmq
import random
from threading import Thread
import sqlite3
def producer(context):
zmq_socket = context.socket(zmq.PUB)
zmq_socket.bind("inproc://test_pub")
while True:
msg = random.random()
zmq_socket.send(str(msg).encode())
wait_time = random.uniform(0, 0.05)
time.sleep(wait_time)
def subscriber_writer(context):
# Create database connection for writing to memory
write_con = sqlite3.connect('file::memory:?cache=shared', uri=True)
cursor = write_con.cursor()
zmq_socket = context.socket(zmq.SUB)
zmq_socket.connect("inproc://test_pub")
zmq_socket.setsockopt(zmq.SUBSCRIBE, b'')
while True:
msg = float(zmq_socket.recv().decode())
cursor.execute('UPDATE TEST SET Value=? WHERE Key="Val"', [msg])
write_con.commit()
def consumer(context):
# Create database connection for reading from memory in read-only mode
read_con = sqlite3.connect('file::memory:?cache=shared&mode=ro', uri=True)
cursor = read_con.cursor()
while True:
cursor.execute('SELECT Value FROM TEST WHERE Key="Val"')
row = cursor.fetchone()
result = row[0]
print(str(result))
wait_time = random.uniform(0, 0.05)
time.sleep(wait_time)
def main():
# Create context
context = zmq.Context()
# Create database
con = sqlite3.connect('file::memory:?cache=shared', uri=True)
# Create db table
cursor = con.cursor()
cursor.execute('CREATE TABLE TEST(Key TEXT, Value NUMERIC)')
cursor.execute('INSERT INTO TEST VALUES (?,?)', ["Val", 0.00])
con.commit()
Thread(target=subscriber_writer, args=(context,)).start()
Thread(target=producer, args=(context,)).start()
Thread(target=consumer, args=(context,)).start()
if __name__ == '__main__':
main()
This works for a while .....but then I get the following error:
...
0.2504188310554989
0.2504188310554989
0.8038719720740617
0.42408896748682956
0.21361498908206744
0.3404497358396832
0.010459475861968603
0.5070540941748318
0.5070540941748318
0.23151535812095037
0.636881359928549
0.4184038089576615
0.9920311052908629
Exception in thread Thread-3:
Traceback (most recent call last):
File "E:\Python34-64\lib\threading.py", line 911, in _bootstrap_inner
self.run()
File "E:\Python34-64\lib\threading.py", line 859, in run
self._target(*self._args, **self._kwargs)
File "test.py", line 43, in consumer
cursor.execute('SELECT Value FROM TEST WHERE Key="Val"')
sqlite3.OperationalError: database table is locked: TEST
How can I make this work?
As a side note, CREATING ONLY CONNECTION WITH check_same_thread=False and sharing this across the whole process works even when the wait times are eliminated....is this advisable to do instead? See below:
import time
import zmq
import random
from threading import Thread
import sqlite3
def producer(context):
zmq_socket = context.socket(zmq.PUB)
zmq_socket.bind("inproc://test_pub")
while True:
msg = random.random()
zmq_socket.send(str(msg).encode())
# wait_time = random.uniform(0, 0.05)
# time.sleep(wait_time)
def subscriber_writer(context, con):
zmq_socket = context.socket(zmq.SUB)
zmq_socket.connect("inproc://test_pub")
zmq_socket.setsockopt(zmq.SUBSCRIBE, b'')
cursor = con.cursor()
while True:
msg = float(zmq_socket.recv().decode())
cursor.execute('UPDATE TEST SET Value=? WHERE Key="Val"', [msg])
def consumer(context, con):
cursor = con.cursor()
while True:
cursor.execute('SELECT Value FROM TEST WHERE Key="Val"')
row = cursor.fetchone()
result = row[0]
print(str(result))
# wait_time = random.uniform(0, 0.05)
# time.sleep(wait_time)
def main():
# Create context
context = zmq.Context()
# Create database
con = sqlite3.connect('file::memory:?cache=shared', uri=True, isolation_level=None, check_same_thread=False)
# Create db table
cursor = con.cursor()
cursor.execute('CREATE TABLE TEST(Key TEXT, Value NUMERIC)')
cursor.execute('INSERT INTO TEST VALUES (?,?)', ["Val", 0.00])
Thread(target=subscriber_writer, args=(context, con)).start()
Thread(target=producer, args=(context,)).start()
Thread(target=consumer, args=(context, con)).start()
if __name__ == '__main__':
main()
I am newbie in python, so, it looks like my first project on that lang.
Everytime when I'm trying to run my script - I get different answers from mysql server.
The most frequent answer is OperationalError: (2006, 'MySQL server has gone away')
Sometimes I get output Thread: 11 commited (see code below).
And sometimes emergency stop (traslated, I have russian output in console).
Whatever if output full of commited - records in table still the same.
import MySQLdb
import pyping
import socket, struct
from threading import Thread
def ip2int(addr):
"""Convert ip to integer"""
return struct.unpack("!I", socket.inet_aton(addr))[0]
def int2ip(addr):
"""Convert integer to ip"""
return socket.inet_ntoa(struct.pack("!I", addr))
def ping(ip):
"""Pinging client"""
request = pyping.ping(ip, timeout=100, count=1)
return int(request.max_rtt)
class UpdateThread(Thread):
def __init__(self, records, name):
Thread.__init__(self)
self.database = MySQLdb.connect(host="***", port=3306, user="root", passwd="***", db="dns")
self.cursor = database.cursor()
self.name = name
self.records = records
def run(self):
print(self.name)
for r in self.records:
#latency = ping(int2ip(r[1])) what the hell :x
#ip = str(int2ip(r[1]))
id = str(r[0])
self.cursor.execute("""update clients set has_subn=%s where id=%s""" % (id, id))
self.database.commit()
print(self.name + " commited")
#start
database = MySQLdb.connect(host="***", port=3306, user="root", passwd="***", db="dns")
cursor = database.cursor()
cursor.execute("""select * from clients""")
data = cursor.fetchall() #All records from DataBase
count = len(data)
threads_counter = 10 #We are creating 10 threads for all records
th_count = count / threads_counter #Count of records for each thread
last_thread = count % threads_counter #Last records
threads = []
i = 0
while i < (count - last_thread):
temp_list = data[i:(i+th_count)]
#print(temp_list)
threads.append(UpdateThread(records = temp_list, name = "Thread: " + str((i/3) + 1)).start())
i += th_count
threads.append(UpdateThread(records = data[i: count], name = "Thread: 11").start())
P.S.
Another answers I found here is not helping me.
UPD:
I found that some(everytime another) thread print
OperationalError: (2013, 'Lost connection to MySQL server during query') and all next threads print OperationalError: (2013, 'Lost connection to MySQL server during query')
You need to close your DB connections when you're done with them or else the DB server will become overwhelmed and make your connections expire. For your program, I would change your code so that you have only one DB connection. You can pass a reference to it to your UpdateThread instances and close it when you're done.
database.close()