I need to call my class >> CotarMoedas another python file.
But I need to get return of cons live stream as result.
#https://www.youtube.com/watch?v=pxyaENuoZUM&list=PL9ATnizYJ7f8_opOpLnekEZNsNVUVbCZN&index=47
import websocket
import json
import pandas as pd
from sqlalchemy import create_engine
from threading import Thread
engine = create_engine('sqlite:///COINS.db')
class CotarMoedas:
def __init__(self):
self.endpoint = 'wss://stream.binance.com:9443/ws/!miniTicker#arr'
tr = Thread(target=self.call_ws)
tr.start()
def df_import(self, data):
#Pegar os valores dos pares casados com USDT
df_ = pd.DataFrame(data)
df_ = df_[df_['s'].str.endswith('USDT')]
df_.c = df_.c.astype(float)
final = df_[['s','E','c']]
for i in range(len(final)):
row_ = final[i:i+1]
#print(row_)
row_[['E','c']].to_sql(row_.s.values[0], engine, index=False, if_exists='append')
def best_conin(self):
engine = create_engine('sqlite:///COINS.db')
dfs = pd.read_sql("""SELECT name FROM sqlite_master WHERE type='table'""", engine)
tables = dfs.name.to_list()
reutrns = []
for table in tables:
df__ = pd.read_sql(table,engine)
# ret_ = (df__.tail(20).c.pct_change() + 1).prod() - 1 # Os 20 mais recentes
ret_ = (df__.c.pct_change() + 1).prod() - 1
reutrns.append(ret_)
rst = pd.Series(reutrns).nlargest(10)
rst.sort_values()
bv = rst.to_dict()
ls = []
for k, v in bv.items():
ls.append([tables[k],v])
tt = print(tables[k],v)
return tt
def on_close(self,ws):
print('Conexão encerrada!')
def on_message(self, ws, message):
out = json.loads(message)
#print(out)
df_import(out)
print('============')
best_conin()
def call_ws(self):
ws = websocket.WebSocketApp(self.endpoint, on_message=self.on_message, on_close=self.on_close)
ws.run_forever()
But I traying call it like this
from videos_algovibes.cotacao import CotarMoedas
rst = CotarMoedas()
vai = rst.best_conin()
if __name__ == "__main__":
print(vai)
I'm simplifying the code :
#https://www.youtube.com/watch?v=pxyaENuoZUM&list=PL9ATnizYJ7f8_opOpLnekEZNsNVUVbCZN&index=47
import websocket
import json
import pandas as pd
from sqlalchemy import create_engine
from threading import Thread
import threading
class CotarMoedas:
def __init__(self):
self.endpoint = 'wss://stream.binance.com:9443/ws/!miniTicker#arr'
#tr = Thread(target=self.call_ws)
#tr.start()
self.call_ws()
def best_conin(self):
print("Best coin")
def on_close(self,ws):
print('Conexão encerrada!')
def on_message(self, ws, message):
def run(*args):
print(message)
print("Message received...")
threading.Thread(target=run).start()
def call_ws(self):
def _call_ws():
ws = websocket.WebSocketApp(self.endpoint, on_message=self.on_message, on_close=self.on_close)
ws.run_forever()
threading.Thread(target=_call_ws).start()
https://youtu.be/pDslZbvO7Hs
As you can see in the video.
Threading could be handled this way.
Good luck with SQLAlchemy, Sessions, etc.
Related
Im trying to edit this project in Python to have HP ILO exporter for Prometheus, so far I read a few articles here on stackoverflow and tried to implement some functionalities, eventually I came up to partialy working script but the hostname is not changing after first request, is there a way to dump collector?
I have tried it with try&except but it just does not work.
The goal is to use curl like this
curl localhost:9116/metrics?hostname=ip
And what will happen if there will be 10 requests at the same time with different hostname? Should it create somekind of a queue?
Can someone help me? Thanks
Original Project : https://github.com/JackWindows/ilo-exporter
My code :
#!/usr/bin/env python
import collections
import os
import time
import flask
import redfish
import waitress
from flask import Flask
from prometheus_client import make_wsgi_app
from prometheus_client.core import GaugeMetricFamily, REGISTRY
from werkzeug.middleware.dispatcher import DispatcherMiddleware
from flask import request
from time import sleep
from flask import Flask, Response, request
import traceback
from werkzeug.wsgi import ClosingIterator
class AfterResponse:
def __init__(self, app=None):
self.callbacks = []
if app:
self.init_app(app)
def __call__(self, callback):
self.callbacks.append(callback)
return callback
def init_app(self, app):
# install extension
app.after_response = self
# install middleware
app.wsgi_app = AfterResponseMiddleware(app.wsgi_app, self)
def flush(self):
for fn in self.callbacks:
try:
fn()
except Exception:
traceback.print_exc()
class AfterResponseMiddleware:
def __init__(self, application, after_response_ext):
self.application = application
self.after_response_ext = after_response_ext
def __call__(self, environ, after_response):
iterator = self.application(environ, after_response)
try:
return ClosingIterator(iterator, [self.after_response_ext.flush])
except Exception:
traceback.print_exc()
return iterator
class ILOCollector(object):
def __init__(self, hostname: str, port: int = 443, user: str = 'admin', password: str = 'password') -> None:
self.ilo = redfish.LegacyRestClient(base_url=hostname, username=user, password=password)
self.ilo.login()
system = self.ilo.get('/redfish/v1/Systems/1/').obj
self.label_names = ('hostname', 'product_name', 'sn')
self.label_values = (hostname, system.Model, system.SerialNumber.strip())
def collect(self):
embedded_media = self.ilo.get('/redfish/v1/Managers/1/EmbeddedMedia/').obj
smart_storage = self.ilo.get('/redfish/v1/Systems/1/SmartStorage/').obj
thermal = self.ilo.get('/redfish/v1/Chassis/1/Thermal/').obj
power = self.ilo.get('/redfish/v1/Chassis/1/Power/').obj
g = GaugeMetricFamily('hpilo_health',
'iLO health status, -1: Unknown, 0: OK, 1: Degraded, 2: Failed.',
labels=self.label_names + ('component',))
def status_to_code(status: str) -> int:
status = status.lower()
ret = -1
if status == 'ok':
ret = 0
elif status == 'warning':
ret = 1
elif status == 'failed':
ret = 2
return ret
g.add_metric(self.label_values + ('embedded_media',), status_to_code(embedded_media.Controller.Status.Health))
g.add_metric(self.label_values + ('smart_storage',), status_to_code(smart_storage.Status.Health))
for fan in thermal.Fans:
g.add_metric(self.label_values + (fan.FanName,), status_to_code(fan.Status.Health))
yield g
g = GaugeMetricFamily('hpilo_fan_speed', 'Fan speed in percentage.',
labels=self.label_names + ('fan',), unit='percentage')
for fan in thermal.Fans:
g.add_metric(self.label_values + (fan.FanName,), fan.CurrentReading)
yield g
sensors_by_unit = collections.defaultdict(list)
for sensor in thermal.Temperatures:
if sensor.Status.State.lower() != 'enabled':
continue
reading = sensor.CurrentReading
unit = sensor.Units
sensors_by_unit[unit].append((sensor.Name, reading))
for unit in sensors_by_unit:
g = GaugeMetricFamily('hpilo_temperature', 'Temperature sensors reading.',
labels=self.label_names + ('sensor',), unit=unit.lower())
for sensor_name, sensor_reading in sensors_by_unit[unit]:
g.add_metric(self.label_values + (sensor_name,), sensor_reading)
yield g
g = GaugeMetricFamily('hpilo_power_current', 'Current power consumption in Watts.', labels=self.label_names,
unit='watts')
g.add_metric(self.label_values, power.PowerConsumedWatts)
yield g
label_values = self.label_values + (str(power.PowerMetrics.IntervalInMin),)
g = GaugeMetricFamily('hpilo_power_average', 'Average power consumption in Watts.',
labels=self.label_names + ('IntervalInMin',), unit='watts')
g.add_metric(label_values, power.PowerMetrics.AverageConsumedWatts)
yield g
g = GaugeMetricFamily('hpilo_power_min', 'Min power consumption in Watts.',
labels=self.label_names + ('IntervalInMin',), unit='watts')
g.add_metric(label_values, power.PowerMetrics.MinConsumedWatts)
yield g
g = GaugeMetricFamily('hpilo_power_max', 'Max power consumption in Watts.',
labels=self.label_names + ('IntervalInMin',), unit='watts')
g.add_metric(label_values, power.PowerMetrics.MaxConsumedWatts)
yield g
# Create Flask app
app = Flask('iLO Exporter')
#app.route('/')
def root():
return '''<html>
<head><title>iLO Exporter</title></head>
<body>
<h1>iLO Exporter</h1>
<p><a href='/metrics'>Metrics</a></p>
</body>
</html>'''
AfterResponse(app)
#app.after_response
def say_hi():
print("hi")
#app.route("/metrics")
def home():
try:
REGISTRY.unregister(collector)
except:
print("An exception occurred")
pass
port = int(os.getenv('ILO_PORT', 443))
user = os.getenv('ILO_USER', 'admin')
password = os.getenv('ILO_PASSWORD', 'password')
hostname = request.args.get('hostname')
app.wsgi_app = DispatcherMiddleware(app.wsgi_app, {
'/metrics': make_wsgi_app()
})
collector = ILOCollector(hostname, port, user, password)
REGISTRY.register(collector)
if __name__ == '__main__':
exporter_port = int(os.getenv('LISTEN_PORT', 9116))
waitress.serve(app, host='0.0.0.0', port=exporter_port)
I have a python program that uses mysql.connector to login to a server and execute the query and saving the sql result in excel file. i use pyqt for my gui and when i press the button the gui is freezing. i have created the qthread and worker class but when i import the executor function and pressing the button the program is running and the gui is starting to freeze, what am i doing wrong?
how can i implement the multithreading?
from cmath import inf
from multiprocessing import connection
from operator import index
import os
from sqlite3 import Cursor
from unittest.result import failfast
from datetime import datetime
import mysql.connector
from mysql.connector import errorcode
import pandas as pd
#importing Queries
from Queries import *
#Location Ip's
from locations import locs
from locations import usr,passwd,db
startDate='2022-04-01'
endDate='2022-04-15'
logfailedls = 'logfailed.txt'
logsuccessls = 'logsuccess.txt'
class bcolors:
HEADER = '\033[95m'
OKBLUE = '\033[94m'
OKCYAN = '\033[96m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
def executor(QUERY):
alldf = None
for type,info in locs.items():
if not os.path.isdir('%s/'%(type)):
os.makedirs('%s/'%(type))
ls = os.listdir('{}/'.format(type))
if logfailedls in ls:
os.remove('{}/{}'.format(type,logfailedls))
if logsuccessls in ls:
os.remove('{}/{}'.format(type,logsuccessls))
for ip,locName in info.items():
try:
cnx = mysql.connector.connect(user=usr, password=passwd,host=ip, database=db)
if cnx.is_connected():
print("Connection Succesfull to {}".format(locName))
logsuccess = open('{}/logsuccess.txt'.format(type),'a')
logsuccess.write('{} : {}-{}\n'.format(datetime.now().strftime("%Y-%m-%d %H:%M:%S"),ip,locName))
logsuccess.close()
location = cnx.cursor(buffered=True)
location.execute("SELECT loccod FROM docparameters d limit 1")
loc = location.fetchone()[0]
cursor = cnx.cursor()
cursor.execute(QUERY)
df = pd.DataFrame(cursor.fetchall())
if alldf is not None:
alldf = alldf.append(df)
else:
alldf = df
print(df)
field_names = [ i[0] for i in cursor.description]
print(field_names)
xlswriter = pd.ExcelWriter('{}/{}.xls'.format(type,loc),engine='openpyxl')
df.to_excel(xlswriter)
xlswriter.save()
cnx.close()
except mysql.connector.Error as err:
if err.errno == errorcode.ER_ACCESS_DENIED_ERROR:
print("Something wrong with your username or password")
elif err.errno == errorcode.ER_BAD_DB_ERROR:
print("DATABASE does not exist")
else:
print(err)
print("Connectin Failed to %s"%(loc))
logfailed = open("{}/logfailed.txt".format(type),'a')
logfailed.write('{} : {}-{}\n'.format(datetime.now().strftime("%Y-%m-%d %H:%M:%S"),ip,locName))
logfailed.close()
else:
cnx.close()
return [alldf,field_names]
def saveToExcel(query,filename):
xlswriter = pd.ExcelWriter("%s.xls"%(filename),engine='openpyxl')
queryDatas = executor(query)
export = queryDatas[0]
export.columns = queryDatas[1]
export.to_excel(xlswriter,index=False)
xlswriter.save()
#saveToExcel(union,'UNION 15%')
#saveToExcel(hnb,'HBC 25%')
#saveToExcel(SCB,'SCB')
this is my pyqt gui program
from concurrent.futures import Executor
import traceback
from unittest import result
from PyQt5.QtGui import *
from PyQt5.QtWidgets import *
from PyQt5.QtCore import *
import sys
import time
import copy
from connector import *
class WorkerSignals(QObject):
finished = pyqtSignal()
error = pyqtSignal(tuple)
result = pyqtSignal(object)
class Worker(QThread):
def __init__(self,fn,*args,**kwargs):
super(Worker,self).__init__()
self.fn = fn
self.args = args
self.kwargs = kwargs
self.signals = WorkerSignals()
#pyqtSlot()
def run(self):
try:
result = self.fn(*self.args,**self.kwargs)
except:
traceback.print_exc()
exctype, value = sys,sys.exc_info()[:2]
self.signals.error.emit((exctype,value,traceback.format_exc()))
else:
self.signals.result.emit(result)
finally:
self.signals.finished.emit()
class MainWindow(QMainWindow):
def __init__(self, *args, **kwargs):
super(MainWindow, self).__init__(*args, **kwargs)
self.threadpool = QThreadPool()
print("multithreading with maximum %d threads" % self.threadpool.maxThreadCount())
self.counter = 0
layout = QVBoxLayout()
self.l = QLabel("Start")
b = QPushButton("DANGER!")
b.pressed.connect(self.oh_no)
c = QPushButton("?")
c.pressed.connect(self.change_message)
layout.addWidget(self.l)
layout.addWidget(b)
layout.addWidget(c)
w = QWidget()
w.setLayout(layout)
self.setCentralWidget(w)
self.show()
def change_message(self):
self.message = "OH NO"
def print_output(self,s):
print(s)
def thread_complete(self):
print("THREAD COMPLETE!")
def oh_no(self):
worker = Worker(executor(SCB))
worker.signals.result.connect(self.print_output)
worker.signals.finished.connect(self.thread_complete)
self.threadpool.start(worker)
app = QApplication([])
window = MainWindow()
app.exec_()
I am trying to run a code to load dataframe into a sql server using PyQT5 and also at the same time showing a "Please wait" pop up while data is being loaded in DB. I am utilizing QThread for the same. If there is some error then QMessageBox shows the error to the user. But my program is crashing with Message Box appearing blank. I am attaching both code and error snippet.Error Snippet
from functools import partial
from PyQt5 import QtCore, QtGui, QtWidgets
import configparser
import pyodbc
def ConfigSection(section,Config):
dict1 = {}
options = Config.options(section)
for option in options:
try:
dict1[option] = Config.get(section,option)
except:
dict1[option] = None
return dict1
class DatabaseWorker(QtCore.QObject):
started = QtCore.pyqtSignal()
finished = QtCore.pyqtSignal()
message = QtCore.pyqtSignal(object)
msg = ""
#QtCore.pyqtSlot()
def writeToDatabase(self, final_pivot):
self.started.emit()
Config = configparser.ConfigParser(interpolation=None)
try:
config_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'config.ini')
Config.read_string(open(config_file).read())
config_values = ConfigSection("Global", Config)
driver = config_values.get('driver')
server = config_values.get('server')
database = config_values.get('database')
username = config_values.get('username')
password = config_values.get('password')
con_str = ("Driver={" + driver + "};"
"Server=" + server + ";"
"Database=" + database + ";"
"UID=" + username + ";"
"PWD=" + password + ";")
cnxn = pyodbc.connect(con_str)
cursor = cnxn.cursor()
for i, row in final_pivot.iterrows():
sql = "INSERT INTO dbo.[FPP_Cleansed_MarketData] (ID,Region,Geography,Category,Company,Year,Adjusted_Sales,Sales_Units,Unit_Type) VALUES (" + "?," * (
len(row) - 1) + "?)"
cursor.execute(sql, tuple(row))
cnxn.commit()
except pyodbc.Error as ex:
sqlstate = ex.args[0]
if sqlstate == '28000':
msg = "Error: Error while logging in. Please check login credentials"
self.message.emit(msg)
credentials")
return -1
elif sqlstate == '23000':
msg = "Error: Trying to insert duplicate ID"
self.message.emit(msg)
return -1
else:
msg = "Error: Error encountered while inserting records into Data Warehouse"
self.message.emit(msg)
return -1
finally:
self.finished.emit()
class Windows_GUI(QtWidgets.QMainWindow):
def __init__(self, df):
QtWidgets.QMainWindow.__init__(self)
self.__threads = []
self.thread = QtCore.QThread(parent=self)
self.__threads.append(self.thread)
self.thread.start()
self.m_database_worker = DatabaseWorker()
self.m_database_worker.moveToThread(self.thread)
self.m_database_worker.started.connect(self.start_animation)
self.m_database_worker.message.connect(self.show_message)
wrapper = partial(self.m_database_worker.writeToDatabase,df)
QtCore.QTimer.singleShot(0, wrapper)
#QtCore.pyqtSlot()
def start_animation(self):
gif_path = "loading.gif"
self.loading_window = QtWidgets.QDialog()
self.loading_window.setWindowFlags(QtCore.Qt.SplashScreen)
self.loading_window.setAttribute(QtCore.Qt.WA_DeleteOnClose)
movie = QtGui.QMovie(gif_path, cacheMode=QtGui.QMovie.CacheAll)
movie_label = QtWidgets.QLabel(alignment=QtCore.Qt.AlignCenter)
movie_label.setFixedSize(100, 75)
movie_label.setMovie(movie)
text_label = QtWidgets.QLabel('Data is getting loaded into Data Warehouse. Please wait...')
text_label.setFont(QtGui.QFont('Arial',11))
vbox = QtWidgets.QHBoxLayout(self.loading_window)
vbox.addWidget(movie_label)
vbox.addWidget(text_label)
self.m_database_worker.finished.connect(self.close_window)
movie.start()
self.setVisible(False)
self.loading_window.show()
def show_message(self,msg):
try:
self.msg_box = QtWidgets.QMessageBox()
self.msg_box.setWindowTitle('Error')
self.msg_box.setIcon(QtWidgets.QMessageBox.Critical)
self.msg_box.information(self, 'Message', msg)
#self.msg_box.setText("Hello Rock the Brahma Bull")
self.msg_box.move(self.frameGeometry().center())
self.msg_box.exec_()
except Exception as e:
print(e)
def close_window(self):
for thread in self.__threads:
thread.quit()
thread.wait()
self.close()
if __name__ == "__main__":
import sys
import pandas as pd
app = QtWidgets.QApplication(sys.argv)
df = pd.read_excel('test.xlsx')
df['ID'] = ['Project_id_' + str(i) for i in range(len(df))]
df = df[['ID', 'Regions', 'Geographies', 'Categories', 'Companies', 'Year', 'Sales', 'Sales_Unit','Currency Conversion']]
w = Windows_GUI(df)
w.show()
sys.exit(app.exec_())
I am trying to develop a websocket server with Python and tornado. This websocket server streams a large database result to the client for some visualization.
The problem that I am facing is that no client can connect until the long process (send_data) is finished. It is as if only one client can connect at a time.
Is websocket already an async process or should I implement an async process?
The following is my code:
import time
import random
import json
import datetime
import os
import sys
import cx_Oracle
import string
import re
import subprocess
import asyncio
from tornado import websocket, web, ioloop, escape
from datetime import timedelta
from random import randint
from pprint import pprint
from tornado.web import RequestHandler
os.environ['ORACLE_HOME'] = 'pathToOracleHome'
os.environ['LD_LIBRARY_PATH'] = "$ORACLE_HOME/lib"
def is_hex(a):
printable = set(string.printable) - set("\x0b\x0c")
return any(c not in printable for c in a)
def json_print(d):
print(json.dumps(d, indent=4))
def printf (format,*args):
sys.stdout.write (format % args)
def db(database_name='localhost/database'):
return cx_Oracle.connect('user', 'pwd', database_name)
def query_db(query, args=(), one=False):
cur = db().cursor()
cur.arraysize = 1500
cur.execute(query, args)
return cur
class SummaryWebSocketHandler(websocket.WebSocketHandler):
clients = []
def check_origin(self, origin):
return True
def on_message(self, message):
print ('message received')
def closeDbConn(self,cur):
cur.connection.close()
def query(self, sql):
cursor = query_db(sql)
self.send_data(cursor)
### THIS IS THE LONG PROCESS ###
def send_data(self, cur):
results = {}
columns = [column[0] for column in cur.description]
total = 0
while True:
Res = []
rows = cur.fetchmany()
if rows == []:
print('no more rows')
break;
for row in rows:
results = {}
for i, value in enumerate(row):
if value == None:
value = '-'
results[cur.description[i][0]] = value
Res.append(results)
self.write_message(json.dumps(Res))
total = total + len(rows)
print('total rows send', total)
self.write_message("finished sending all data")
self.on_close(cur)
def open(self, table):
print ('Connection established. \n')
print ('Query string '+table+'\n')
p = re.compile(r'fields=')
m = p.match(table)
matches = table.split("&")
print (matches)
param_string = ''
params = []
if matches:
for m in matches:
print('m', m);
param = ''
items = m.split('=')
if items[1] != '':
param = '--'+items[0]+' '+items[1]
params.append(param)
param_string = " ".join(params)
script = "php getStmt.php "+param_string
print (script)
proc = subprocess.Popen(script, shell=True,stdout=subprocess.PIPE)
sql = proc.stdout.read()
print (sql)
self.query(sql)
def on_close(self, cursor):
print ('Connection closed.')
cursor.close()
settings = {'auto_reload': True, 'debug': True}
if __name__ == "__main__":
print ("Starting websocket server program. Awaiting client requests to open websocket ...")
application = web.Application([(r"/\/table\/(.*)",SummaryWebSocketHandler),
]
,**settings)
application.listen(3001)
ioloop.IOLoop.instance().start()
I'm using a multiprocess to make 2 tasks. A process 1 is a async tcp server which receives commands and have to pass these commands to process 2 (is a while true loop).
How i'm using multiprocessing, the process don't share global variables, so i have to use a queue. But, a proccess 1 is a simple asynctcp server. I don't know how to pass the queue object to handle_read fuction.
Anyone have an idea? Thanks a lot!
The code i'm trying:
#!usr/bin/env python3
#import dos módulos necessarios
import time
import collections
from datetime import datetime
from datetime import timedelta
from threading import Timer
import os
import sys
from smbus import SMBus
from struct import pack, unpack
import threading
from multiprocessing import Process, Queue
import asyncore
import socket
bstatus = 0
lastdata = {}
#bytechecksum para confirmação
chksum = 15
#funções auxiliares
def millis():
dt = datetime.now()-start_time
ms = (dt.days*24*60*60 + dt.seconds)*1000+dt.microseconds / 1000.0
return ms
def getbit(data,index):
return(data & (1<<index)!=0)
def parseData(data):
mydata = {}
if data[8] == 27:
mydata['Temp1'] = data[0]
mydata['Temp2'] = data[1]
mydata['Temp3'] = data[2]
mydata['Temp4'] = data[3]
mydata['HotFlow'] = data[4]
mydata['ColdFlow'] = data[5]
mydata['PumpSpeed'] = data[6]
mydata['PumpStatus'] = getbit(data[7],0)
mydata['HeaterStatus'] = getbit(data[7],1)
mydata['ArduinoMode'] = getbit(data[7],2)
mydata['TimeStamp'] = timezone.now().strftime('%Y-%m-%d %H:%M:%S.%f')[:-3]
#pegar o modo do arduino
arduino_mode = mydata['ArduinoMode']
parseStatus = True
else:
parseStatus = False
return parseStatus, mydata
#classes para implmmentar o servidor assincrono
class dataHandler(asyncore.dispatcher_with_send):
#this function doesn't working
def __init__(self,sock,queue):
self.queue = queue
self.sock = sock
def handle_read(self):
data = self.sock.recv(50)
'''interpretar os comandos:
operação: Ligar/Desligar Bomba, Ligar/Desligar Aquecedor, Alterar velocidade da bomba
Modo: trocar de modo automático para remoto
Armazenamento: ativar ou desativar o armazenamento de dados para o trend
'''
if(data == b'7'):
operation_mode = 1
queue.put(data)
print(data)
elif(data == b'8'):
operation_mode = 0
queue.put(data)
print(data)
try:
bytescommand = pack('=cb',data,chksum)
bus.write_block_data(arduinoAddress,ord(data),list(bytescommand))
except Exception as err:
print(str(err))
finally:
pass
#print(data)
class Server(asyncore.dispatcher):
def __init__(self,host,port,queue):
asyncore.dispatcher.__init__(self)
self.create_socket(socket.AF_INET,socket.SOCK_STREAM)
self.bind((host,port))
self.listen(1)
self.queue = queue
def handle_accept(self):
pair = self.accept()
if pair is None:
return
else:
sock,addr = pair
handler = dataHandler(sock,self.queue) #doesn't working
#classe para implementar a função principal
def tcpserver(queue):
server = Server('localhost',8080,queue)
asyncore.loop()
def mainloop(stime,ftime,queue):
prevmillis = stime
prevmillis2 = ftime
operation_mode = 1
while True:
try:
currentmillis2 = millis()
if(queue.empty):
pass
else:
print(queue.get())
if(currentmillis2 - prevmillis2 > readinterval):
#do some stuff
#programa principal
if __name__=='__main__':
prevmillis= millis() #contador para solicitação de dados para o arduino
prevmillis2 = prevmillis #contador para envio do banco
#create Queue
queue = Queue()
p1 = Process(target=tcpserver,args=(queue,))
p1.start()
p2 = Process(target=mainloop,args=(prevmillis,prevmillis2,queue,))
p2.start()
strstatus = 'Servidor rodando'
print(strstatus)
In mainloop you don't test the return value of queue.empty, you test the function object itself. That always evaluates True, so it looks like queue is always empty. Change to a function call:
def mainloop(stime,ftime,queue):
prevmillis = stime
prevmillis2 = ftime
operation_mode = 1
while True:
try:
currentmillis2 = millis()
if(queue.empty()): # Added ()
pass
else:
print(queue.get())
if(currentmillis2 - prevmillis2 > readinterval):
#do some stuff