How to run thread with Django how to send data inside views file
from django.shortcuts import render
from django.http.response import HttpResponse
from datetime import datetime
from .pnet import Worker1
global value
value = False
def home(request):
global value
if value == False:
Worker1.start()
print(a)
value = True
today = datetime.today()
return render(request, "home.html")`
pnet.py
import threading
class Worker1(threading.Thread):
def run(self):
a = 10`
from django.shortcuts import render
from django.http.response import HttpResponse
from .models import WorkAccident,Absenteeism,OverTime,Person,Breakdown,MTTR,MTBF,PredictiveMaintenance,Budget
from datetime import datetime
import asyncio
import threading
import time
import snap7
IP = '192.168.75.220'
RACK = 0
SLOT = 1
DB_NUMBER_1 = 1
DB_NUMBER_2 = 2
START_ADDRESS = 0
SIZE_1 = 2
SIZE_2 = 28
System = False
try:
plc = snap7.client.Client()
plc.connect(IP, RACK, SLOT)
System = True
print("Baglı")
except:
print("Baglanmadı")
global value
value = False
async def background_task():
# Do some long-running task
db1 = plc.db_read(DB_NUMBER_1, START_ADDRESS, SIZE_1)
product_status = bytes(db1[0:2])
binary_string = "{:016b}".format(int(product_status.hex(),16))
print(f'PRODUCT BİT: {binary_string}')
await asyncio.sleep(1)
return 'Task completed'
class MyThread(threading.Thread):
def run(self):
result = asyncio.run(background_task())
# Do something with the result, such as saving it to a database
# Create your views here.
async def home(request):
##u = WorkAccident.objects.all()
##for val in u :
## print(val.date)
asyncio.create_task(background_task())
today = datetime.today()
##fault = list(filter(lambda item: item is not None, u.values()))
data = {
"workaccident": WorkAccident.objects.filter(date__month=today.month, date__year=today.year),
"absenteeism" : Absenteeism.objects.filter(date__month=today.month, date__year=today.year),
"overtime" : OverTime.objects.filter(date__month=today.month, date__year=today.year)
}
return render(request, "home.html", data)
Related
I need to call my class >> CotarMoedas another python file.
But I need to get return of cons live stream as result.
#https://www.youtube.com/watch?v=pxyaENuoZUM&list=PL9ATnizYJ7f8_opOpLnekEZNsNVUVbCZN&index=47
import websocket
import json
import pandas as pd
from sqlalchemy import create_engine
from threading import Thread
engine = create_engine('sqlite:///COINS.db')
class CotarMoedas:
def __init__(self):
self.endpoint = 'wss://stream.binance.com:9443/ws/!miniTicker#arr'
tr = Thread(target=self.call_ws)
tr.start()
def df_import(self, data):
#Pegar os valores dos pares casados com USDT
df_ = pd.DataFrame(data)
df_ = df_[df_['s'].str.endswith('USDT')]
df_.c = df_.c.astype(float)
final = df_[['s','E','c']]
for i in range(len(final)):
row_ = final[i:i+1]
#print(row_)
row_[['E','c']].to_sql(row_.s.values[0], engine, index=False, if_exists='append')
def best_conin(self):
engine = create_engine('sqlite:///COINS.db')
dfs = pd.read_sql("""SELECT name FROM sqlite_master WHERE type='table'""", engine)
tables = dfs.name.to_list()
reutrns = []
for table in tables:
df__ = pd.read_sql(table,engine)
# ret_ = (df__.tail(20).c.pct_change() + 1).prod() - 1 # Os 20 mais recentes
ret_ = (df__.c.pct_change() + 1).prod() - 1
reutrns.append(ret_)
rst = pd.Series(reutrns).nlargest(10)
rst.sort_values()
bv = rst.to_dict()
ls = []
for k, v in bv.items():
ls.append([tables[k],v])
tt = print(tables[k],v)
return tt
def on_close(self,ws):
print('Conexão encerrada!')
def on_message(self, ws, message):
out = json.loads(message)
#print(out)
df_import(out)
print('============')
best_conin()
def call_ws(self):
ws = websocket.WebSocketApp(self.endpoint, on_message=self.on_message, on_close=self.on_close)
ws.run_forever()
But I traying call it like this
from videos_algovibes.cotacao import CotarMoedas
rst = CotarMoedas()
vai = rst.best_conin()
if __name__ == "__main__":
print(vai)
I'm simplifying the code :
#https://www.youtube.com/watch?v=pxyaENuoZUM&list=PL9ATnizYJ7f8_opOpLnekEZNsNVUVbCZN&index=47
import websocket
import json
import pandas as pd
from sqlalchemy import create_engine
from threading import Thread
import threading
class CotarMoedas:
def __init__(self):
self.endpoint = 'wss://stream.binance.com:9443/ws/!miniTicker#arr'
#tr = Thread(target=self.call_ws)
#tr.start()
self.call_ws()
def best_conin(self):
print("Best coin")
def on_close(self,ws):
print('Conexão encerrada!')
def on_message(self, ws, message):
def run(*args):
print(message)
print("Message received...")
threading.Thread(target=run).start()
def call_ws(self):
def _call_ws():
ws = websocket.WebSocketApp(self.endpoint, on_message=self.on_message, on_close=self.on_close)
ws.run_forever()
threading.Thread(target=_call_ws).start()
https://youtu.be/pDslZbvO7Hs
As you can see in the video.
Threading could be handled this way.
Good luck with SQLAlchemy, Sessions, etc.
Im trying to edit this project in Python to have HP ILO exporter for Prometheus, so far I read a few articles here on stackoverflow and tried to implement some functionalities, eventually I came up to partialy working script but the hostname is not changing after first request, is there a way to dump collector?
I have tried it with try&except but it just does not work.
The goal is to use curl like this
curl localhost:9116/metrics?hostname=ip
And what will happen if there will be 10 requests at the same time with different hostname? Should it create somekind of a queue?
Can someone help me? Thanks
Original Project : https://github.com/JackWindows/ilo-exporter
My code :
#!/usr/bin/env python
import collections
import os
import time
import flask
import redfish
import waitress
from flask import Flask
from prometheus_client import make_wsgi_app
from prometheus_client.core import GaugeMetricFamily, REGISTRY
from werkzeug.middleware.dispatcher import DispatcherMiddleware
from flask import request
from time import sleep
from flask import Flask, Response, request
import traceback
from werkzeug.wsgi import ClosingIterator
class AfterResponse:
def __init__(self, app=None):
self.callbacks = []
if app:
self.init_app(app)
def __call__(self, callback):
self.callbacks.append(callback)
return callback
def init_app(self, app):
# install extension
app.after_response = self
# install middleware
app.wsgi_app = AfterResponseMiddleware(app.wsgi_app, self)
def flush(self):
for fn in self.callbacks:
try:
fn()
except Exception:
traceback.print_exc()
class AfterResponseMiddleware:
def __init__(self, application, after_response_ext):
self.application = application
self.after_response_ext = after_response_ext
def __call__(self, environ, after_response):
iterator = self.application(environ, after_response)
try:
return ClosingIterator(iterator, [self.after_response_ext.flush])
except Exception:
traceback.print_exc()
return iterator
class ILOCollector(object):
def __init__(self, hostname: str, port: int = 443, user: str = 'admin', password: str = 'password') -> None:
self.ilo = redfish.LegacyRestClient(base_url=hostname, username=user, password=password)
self.ilo.login()
system = self.ilo.get('/redfish/v1/Systems/1/').obj
self.label_names = ('hostname', 'product_name', 'sn')
self.label_values = (hostname, system.Model, system.SerialNumber.strip())
def collect(self):
embedded_media = self.ilo.get('/redfish/v1/Managers/1/EmbeddedMedia/').obj
smart_storage = self.ilo.get('/redfish/v1/Systems/1/SmartStorage/').obj
thermal = self.ilo.get('/redfish/v1/Chassis/1/Thermal/').obj
power = self.ilo.get('/redfish/v1/Chassis/1/Power/').obj
g = GaugeMetricFamily('hpilo_health',
'iLO health status, -1: Unknown, 0: OK, 1: Degraded, 2: Failed.',
labels=self.label_names + ('component',))
def status_to_code(status: str) -> int:
status = status.lower()
ret = -1
if status == 'ok':
ret = 0
elif status == 'warning':
ret = 1
elif status == 'failed':
ret = 2
return ret
g.add_metric(self.label_values + ('embedded_media',), status_to_code(embedded_media.Controller.Status.Health))
g.add_metric(self.label_values + ('smart_storage',), status_to_code(smart_storage.Status.Health))
for fan in thermal.Fans:
g.add_metric(self.label_values + (fan.FanName,), status_to_code(fan.Status.Health))
yield g
g = GaugeMetricFamily('hpilo_fan_speed', 'Fan speed in percentage.',
labels=self.label_names + ('fan',), unit='percentage')
for fan in thermal.Fans:
g.add_metric(self.label_values + (fan.FanName,), fan.CurrentReading)
yield g
sensors_by_unit = collections.defaultdict(list)
for sensor in thermal.Temperatures:
if sensor.Status.State.lower() != 'enabled':
continue
reading = sensor.CurrentReading
unit = sensor.Units
sensors_by_unit[unit].append((sensor.Name, reading))
for unit in sensors_by_unit:
g = GaugeMetricFamily('hpilo_temperature', 'Temperature sensors reading.',
labels=self.label_names + ('sensor',), unit=unit.lower())
for sensor_name, sensor_reading in sensors_by_unit[unit]:
g.add_metric(self.label_values + (sensor_name,), sensor_reading)
yield g
g = GaugeMetricFamily('hpilo_power_current', 'Current power consumption in Watts.', labels=self.label_names,
unit='watts')
g.add_metric(self.label_values, power.PowerConsumedWatts)
yield g
label_values = self.label_values + (str(power.PowerMetrics.IntervalInMin),)
g = GaugeMetricFamily('hpilo_power_average', 'Average power consumption in Watts.',
labels=self.label_names + ('IntervalInMin',), unit='watts')
g.add_metric(label_values, power.PowerMetrics.AverageConsumedWatts)
yield g
g = GaugeMetricFamily('hpilo_power_min', 'Min power consumption in Watts.',
labels=self.label_names + ('IntervalInMin',), unit='watts')
g.add_metric(label_values, power.PowerMetrics.MinConsumedWatts)
yield g
g = GaugeMetricFamily('hpilo_power_max', 'Max power consumption in Watts.',
labels=self.label_names + ('IntervalInMin',), unit='watts')
g.add_metric(label_values, power.PowerMetrics.MaxConsumedWatts)
yield g
# Create Flask app
app = Flask('iLO Exporter')
#app.route('/')
def root():
return '''<html>
<head><title>iLO Exporter</title></head>
<body>
<h1>iLO Exporter</h1>
<p><a href='/metrics'>Metrics</a></p>
</body>
</html>'''
AfterResponse(app)
#app.after_response
def say_hi():
print("hi")
#app.route("/metrics")
def home():
try:
REGISTRY.unregister(collector)
except:
print("An exception occurred")
pass
port = int(os.getenv('ILO_PORT', 443))
user = os.getenv('ILO_USER', 'admin')
password = os.getenv('ILO_PASSWORD', 'password')
hostname = request.args.get('hostname')
app.wsgi_app = DispatcherMiddleware(app.wsgi_app, {
'/metrics': make_wsgi_app()
})
collector = ILOCollector(hostname, port, user, password)
REGISTRY.register(collector)
if __name__ == '__main__':
exporter_port = int(os.getenv('LISTEN_PORT', 9116))
waitress.serve(app, host='0.0.0.0', port=exporter_port)
I need a way to make my code loop through the onMessage() function once a new message is detected by the server. Currently, it will send a response once then nothing will happen.
If I was to loop back through the onMessage() function manually it would just use the same stored response as the first time and repeatedly output a similar response.
PLEASE IGNORE RANDOM VARIABLES IN CODE: I removed the long logic that calculated the response since it should be irrelevant here.
from fbchat import Client, log
from fbchat.models import *
import fileinput
import nltk
from nltk.stem.lancaster import LancasterStemmer
stemmer = LancasterStemmer()
import numpy
import tflearn
import tensorflow
import random
import json
import pickle
class chatbot(Client):
def onMessage(self, author_id=None, message_object=None, thread_id=None, thread_type=ThreadType.USER, **kwargs):
toggle = client.fetchThreadMessages(thread_id=client.uid, limit=3) # client.uid means its our own acc
for message in toggle:
pText=message.text.lower()
print("test1")
self.markAsRead(author_id)
log.info("Message {} from {} in {}".format(message_object, thread_id, thread_type))
msgText = message_object.text.lower()
print("test2")
def getResponse(self, message, model, words, labels, data, thread_id, thread_type, author_id):
print("test9")
while True:
results = model.predict([self.bag_of_words(message, words)])
results_index = numpy.argmax(results)
tag = labels[results_index]
for tg in data["intents"]:
if tg['tag'] == tag:
responses = tg['responses']
finalResponse = random.choice(responses)
run_once = 0
while 1:
if run_once == 0:
self.sendMessage(finalResponse, thread_id, thread_type, author_id)
run_once = 1
def sendMessage(self, response, thread_id, thread_type, author_id):
print("test10")
if 1 == 1:# (author_id!=self.uid):
self.send(Message(text=response), thread_id=thread_id, thread_type=thread_type)
self.markAsDelivered(author_id, thread_id)
client = chatbot("********", "****************")
client.listen()
Issue solved by adding changing to bottom of the code to:
while True:
if run_once == 0:
if author_id != 'YOUR USER ID':
self.sendMessage(finalResponse, thread_id, thread_type, author_id)
run_once = 1
print("testA")
return False
return False
return False
def sendMessage(self, response, thread_id, thread_type, author_id):
print("test10")
# (author_id!=self.uid):
print(response)
self.send(Message(text=response), thread_id=thread_id, thread_type=thread_type)
self.markAsDelivered(author_id, thread_id)
client = chatbot("username", "password")
while True:
client.listen()
I am trying to develop a websocket server with Python and tornado. This websocket server streams a large database result to the client for some visualization.
The problem that I am facing is that no client can connect until the long process (send_data) is finished. It is as if only one client can connect at a time.
Is websocket already an async process or should I implement an async process?
The following is my code:
import time
import random
import json
import datetime
import os
import sys
import cx_Oracle
import string
import re
import subprocess
import asyncio
from tornado import websocket, web, ioloop, escape
from datetime import timedelta
from random import randint
from pprint import pprint
from tornado.web import RequestHandler
os.environ['ORACLE_HOME'] = 'pathToOracleHome'
os.environ['LD_LIBRARY_PATH'] = "$ORACLE_HOME/lib"
def is_hex(a):
printable = set(string.printable) - set("\x0b\x0c")
return any(c not in printable for c in a)
def json_print(d):
print(json.dumps(d, indent=4))
def printf (format,*args):
sys.stdout.write (format % args)
def db(database_name='localhost/database'):
return cx_Oracle.connect('user', 'pwd', database_name)
def query_db(query, args=(), one=False):
cur = db().cursor()
cur.arraysize = 1500
cur.execute(query, args)
return cur
class SummaryWebSocketHandler(websocket.WebSocketHandler):
clients = []
def check_origin(self, origin):
return True
def on_message(self, message):
print ('message received')
def closeDbConn(self,cur):
cur.connection.close()
def query(self, sql):
cursor = query_db(sql)
self.send_data(cursor)
### THIS IS THE LONG PROCESS ###
def send_data(self, cur):
results = {}
columns = [column[0] for column in cur.description]
total = 0
while True:
Res = []
rows = cur.fetchmany()
if rows == []:
print('no more rows')
break;
for row in rows:
results = {}
for i, value in enumerate(row):
if value == None:
value = '-'
results[cur.description[i][0]] = value
Res.append(results)
self.write_message(json.dumps(Res))
total = total + len(rows)
print('total rows send', total)
self.write_message("finished sending all data")
self.on_close(cur)
def open(self, table):
print ('Connection established. \n')
print ('Query string '+table+'\n')
p = re.compile(r'fields=')
m = p.match(table)
matches = table.split("&")
print (matches)
param_string = ''
params = []
if matches:
for m in matches:
print('m', m);
param = ''
items = m.split('=')
if items[1] != '':
param = '--'+items[0]+' '+items[1]
params.append(param)
param_string = " ".join(params)
script = "php getStmt.php "+param_string
print (script)
proc = subprocess.Popen(script, shell=True,stdout=subprocess.PIPE)
sql = proc.stdout.read()
print (sql)
self.query(sql)
def on_close(self, cursor):
print ('Connection closed.')
cursor.close()
settings = {'auto_reload': True, 'debug': True}
if __name__ == "__main__":
print ("Starting websocket server program. Awaiting client requests to open websocket ...")
application = web.Application([(r"/\/table\/(.*)",SummaryWebSocketHandler),
]
,**settings)
application.listen(3001)
ioloop.IOLoop.instance().start()
New to Tornado, and Redis
I find someone had this same problem here , tornado-redis: RPOP works but BRPOP doesn't?
but I still do not understand why, and how to resove my problem
code blow work fine
#coding:utf-8
import random
import time
import tornado.web
import tornado.httpserver
import tornado.ioloop
import tornado.options
from uuid import uuid4
# import redis
from tornado.escape import json_encode
import tornado.gen
import tornadoredis
class noticePush(tornado.web.RequestHandler):
def initialize(self):
print 'initialize'
#tornado.web.asynchronous
#tornado.gen.engine
def get(self):
print 'go here'
try:
**uid = self.get_argument('uid')
# key = u'test_comet%s'%uid
key = 'test_comet1'
c = tornadoredis.Client(host='127.0.0.1', port=6379,password='psw')
print key
res = yield tornado.gen.Task(c.blpop, key, 0)**
print res
if res :
self.finish(json_encode(res))
else :
self.finish('None')
except Exception, e :
print e
pass
class Application(tornado.web.Application):
def __init__(self):
handlers = [
(r'/', noticePush)
]
settings = {
'template_path': 'templates',
'static_path': 'static',
'debug': True
}
tornado.web.Application.__init__(self, handlers, **settings)
if __name__ == '__main__':
tornado.options.parse_command_line()
app = Application()
server = tornado.httpserver.HTTPServer(app)
server.listen(8000)
tornado.ioloop.IOLoop.instance().start()
But , I try to use get_argument for the key, blpop never return any data
**uid = self.get_argument('uid')
key = 'test_comet' + uid
c = tornadoredis.Client(host='127.0.0.1', port=6379, password='psw')
print key
res = yield tornado.gen.Task(c.blpop, key, 0)**
print res
if res :
self.finish(json_encode(res))
else :
self.finish('None')
I try to read the tornadoredis code, find the blpop def and I find the reason
def blpop(self, keys, timeout=0, callback=None):
tokens = to_list(keys)
tokens.append(timeout)
self.execute_command('BLPOP', *tokens, callback=callback)
def to_list(source):
if isinstance(source, str):
return [source]
else:
return list(source)
important is
str_key = 'test_comet', type (key) -> str
unicode_key = 'test_comet' + uid , type (key) -> unicode
when I encode the unicode_key.encode('utf-8'), code worked!