Access Kafka data in instance variable from Flask endpoints - python

I have written a Kafka consumer, in which I am capturing some metrics inside a class variable, below is the code of consumer.
class Consumer(Thread):
def __init__(self, kafka_topic, kafka_brokers, group_id):
Thread.__init__(self)
self.consumer = KafkaConsumer(kafka_topic, bootstrap_servers=kafka_brokers, group_id=group_id)
self.metrics = Metrics()
def process_message(self, message):
msg_data = json.loads(message.value)
meeting_id = str(uuid.uuid1())
metric_response = MetricResponse(meeting_id)
#perform some task and update metric response object
self.metrics.add_metric_response(metric_response)
print('\nFinished......')
def get_metrics(self):
return self.metrics
def run(self):
print('Consumer started...')
with ThreadPoolExecutor(max_workers=10) as executor:
for message in self.consumer:
executor.map(self.process_message, (message,))
Now i have written a flask api to retrieve this metrics variable where I am starting consumer also but I am getting empty metrics object.
Below is api code
app = Flask(__name__)
consumer = Consumer('my-topic', ['localhost:9092'], 'my-group')
consumer.start()
#app.route('/metrics')
def get_metrics():
metrics = consumer.get_metrics()
metrics_response = metrics.toJSON()
return Response(metrics_response, 200, mimetype='application/json')
if __name__ == '__main__':
app.run(debug=True)
I need help to access metrics object from consumer class. I am new to Python

Related

Return value from a pubsub callback function

I'm trying to get a value from a pubsub callback function.
If i print message.data in the callback function i can see the data. I've tried though stream_pull_future and making a python class but with no success.
project = 'project_id'
topic = 'topic'
subscription = "sub"
timeout = 10.0
subscriber = pubsub_v1.SubscriberClient()
subscription_path = subscriber.subscription_path(
project, subscription
)
def test_callback():
callback_class = CallBackMethod()
streaming_pull_future = subscriber.subscribe(
subscription_path, callback=callback_class.callback()
)
time.sleep(30)
print("streaming", streaming_pull_future.result())
print("streaming_timeout", streaming_pull_future.result(timeout=timeout))
decoded_string = streaming_pull_future.decode('utf-8')
print("decoded_string", decoded_string)
# other stuff with string
class CallBackMethod:
def __init__(self, data=None):
self.data = data
#classmethod
def callback(cls, message=None):
info("Got message {}".format(message))
if message is None:
return
return cls(message.data)

How to get a return value of a function in python

import gnsq
class something():
def __init__(self, pb=None, pk=None, address=None):
self.pb = pb
self.pk = pk
self.address = address
def connect(self):
consumer = gnsq.Consumer(self.pb, 'ch', self.address)
#consumer.on_message.connect
def response_handler(consumer, msg):
return msg.body
consumer.start()
how would i get the return value of response_handler so in turn, I'd be able to pass to the parent function connect(), so when i call it, it will be returning the value of message.body from the child function.
I would think something like the following:
import gnsq
class something():
def __init__(self, pb=None, pk=None, address=None):
self.pb = pb
self.pk = pk
self.address = address
def connect(self):
consumer = gnsq.Consumer(self.pb, 'ch', self.address)
#consumer.on_message.connect
def response_handler(consumer, msg):
return msg.body
consumer.start()
return response_handler
nsq = something('pb', 'pk', 'address')
# should print whatever message.body is
print nsq.connect()
but It's not working. Note: consumer.start() is blocking
What you're asking doesn't make sense in the context of what the Consumer() really is.
In your connect() method, you set up a consumer, set up a response handler and start the consumer with consumer.start(). From that point onward, whenever there is a message to consume, the consumer will call the handler with that message. Not just once, but again and again.
Your handler may be called many times and unless the consumer is closed, you never know when it will be done - so, there's no way your connect() method could return the complete result.
What you could do is have the connect method return a reference to a collection that will at any time contain all the messages collected so far. It would be empty at first, but after some time, could contain all the received messages.
Something like:
import gnsq
class Collector():
def __init__(self, topic, address):
self.topic = topic
self.address = address
self.messages = []
def connect(self):
self.messages = []
consumer = gnsq.Consumer(self.pb, 'ch', self.address)
#consumer.on_message.connect
def response_handler(consumer, msg):
self.messages.append(msg)
consumer.start()
return self.messages
I don't think this is really how you want to be using this, it would only really make sense if you provide more context on why and how you want to use this output.

Python: Asyncio NATS.io blocking

I have troubles to make Python Asyncio NATS.io running sequentialy. I have two classes: Account and Bridge
Account holds the logic of application and it is communicating thought Bridge with external service via NATS.io.
Main file:
loop = asyncio.get_event_loop()
account = Account(loop, options)
asyncio.async(account.start())
loop.run_forever()
Account class:
class Account:
bridge = Bridge()
def connect(self):
result = self.bridge.connect(self.id)
return result
Bridge class:
def connect(self, account_id):
data = None
try:
response = yield from self.nc.timed_request("bank.account.connect",
BankRequest(
method="connect",
data={...}
), 10)
data = json.loads(response.data.decode())
except ErrTimeout:
status = Messages.REQUEST_TIMED_OUT
return Result(data=data)
I need to call account.connect() from anywhere inside account class and get result of connection (sequentialy). now I'm getting generator object
your connect() methods should probably be coroutines:
class Account:
bridge = Bridge() # you probably want to put this in `def __init__(self)`!
#asyncio.coroutine
def connect(self):
result = yield from self.bridge.connect(self.id)
return result
class Bridge:
#asyncio.coroutine
def connect(self, account_id):
data = None
try:
response = yield from self.nc.timed_request("bank.account.connect",
BankRequest(
method="connect",
data={...}
), 10)
data = json.loads(response.data.decode())
except ErrTimeout:
status = Messages.REQUEST_TIMED_OUT
return Result(data=data)
and:
resp = yield from account.connect()

Tornado websocket + Redis

I would like to create notification system. When server save data into database (Notification model only) then it should be send by Tornado websocket to client (browser)
So far I configured websocket but I dont know how to send data to client.
class WebSocketHandler(tornado.websocket.WebSocketHandler):
def __init__(self, *args, **kwargs):
self.id = None
self.client = None
super(WebSocketHandler, self).__init__(*args, **kwargs)
def open(self, *args):
self.id = self.get_argument("Id")
self.stream.set_nodelay(True)
clients[self.id] = {"id": self.id, "object": self}
def on_message(self, message):
message = json.loads(message)
print("Client %s received a message : %s" % (self.id, message))
self.write_message("message: " + str(message['body']))
def on_close(self):
print('closed?')
if self.id in clients:
del clients[self.id]
def check_origin(self, origin):
return True
def _connect_to_redis(self):
logging.info('connect to redis...')
self._redis_client = tornadoredis.Client(host='localhost', port=6379)
self._redis_client.connect()
app = tornado.web.Application([
(r'/socket', WebSocketHandler),
])
parse_command_line()
app.listen(8888)
tornado.ioloop.IOLoop.instance().start()
I guess I need to plugin Redis into that. Can someone help me with that ?
If someone need it I did it in this way:
import tornado.ioloop
import tornado.web
import tornado.websocket
from tornado.options import parse_command_line
from tornado import gen
import logging
import tornadoredis
import json
from urllib.parse import urlparse
from django.core.management.base import NoArgsCommand
from django.conf import settings
logging = logging.getLogger('base.tornado')
# store clients in dictionary..
clients = dict()
# REDIS_URL = 'redis://localhost:6379/'
# REDIS_UPDATES_CHANNEL = 'django_bus'
class WebSocketHandler(tornado.websocket.WebSocketHandler):
def __init__(self, *args, **kwargs):
self.client_id = None
self._redis_client = None
super(WebSocketHandler, self).__init__(*args, **kwargs)
self._connect_to_redis()
self._listen()
def open(self, *args):
self.client_id = self.get_argument("Id")
self.stream.set_nodelay(True)
clients[self.client_id] = self
def on_message(self, message):
"""
:param message (str, not-parsed JSON): data from client (web browser)
"""
print("on message")
#gen.coroutine
def _on_update(self, message):
"""
Receive Message from Redis when data become published and send it to selected client.
:param message (Redis Message): data from redis
"""
body = json.loads(message.body)
if self.client_id == body['client_id']:
self.write_message(message.body)
#tornado.gen.engine
def _listen(self):
"""
Listening chanel 'REDIS_UPDATES_CHANNEL'
"""
yield tornado.gen.Task(self._redis_client.subscribe, settings.REDIS_UPDATES_CHANNEL)
self._redis_client.listen(self._on_update)
def on_close(self):
"""
When client will disconnect (close web browser) then shut down connection for selected client
"""
if self.client_id in clients:
del clients[self.client_id]
self._redis_client.unsubscribe(settings.REDIS_UPDATES_CHANNEL)
self._redis_client.disconnect()
def check_origin(self, origin):
"""
Check if incoming connection is in supported domain
:param origin (str): Origin/Domain of connection
"""
return True
def _connect_to_redis(self):
"""
Extracts connection parameters from settings variable 'REDIS_URL' and
connects stored client to Redis server.
"""
redis_url = settings.REDIS_URL
parsed = urlparse(redis_url)
self._redis_client = tornadoredis.Client(host=parsed.hostname, port=parsed.port)
self._redis_client.connect()
app = tornado.web.Application([
(r'/socket', WebSocketHandler),
])
class Command(NoArgsCommand):
def handle_noargs(self, **kwargs):
logging.info('Started Tornado')
parse_command_line()
app.listen(8888)
tornado.ioloop.IOLoop.instance().start()

How to pass variable to the Flask function while using threading?

I want to create multiple threads and every one of them should create flask app.
I am not sure how to do it, but that's what I have:
app = Flask(__name__)
app.url_map.strict_slashes = False
#app.route('/api/v1/something/<string:FirstArgument>/<string:SecondArgument>/', methods=['POST'])
def do_it(FirstArgument, SecondArgument):
request_str = request.get_data().decode('utf-8').strip()
response = somefunction(mydata.state, request_str)
return response, 200
def run_app(this_port, mydata):
currentThread = threading.current_thread()
mydata.state = some_function_that_returns_6GB_of_data()
app.run(host='0.0.0.0',port=this_port)
if __name__ == '__main__':
mydata = threading.local()
thread1 = Thread(target=run_app, args=(4100, mydata,))
#thread2 = Thread(target=run_app, args=(4101,mydata,))
thread1.start()
#thread2.start()
For now I want to test only one thread. And I don't know how to pass mydata.state to the 'do_it'. If I add new argument (def do_it(FirstArgument, SecondArgument, mydata.state)) than Flask says that he wants to get this variable from the app.route. How can I pass this data to the do_it function?
And one more question. This program will pas N instances of state to N threads on N ports?
Or I should do something like this:
def do_it(FirstArgument, SecondArgument):
request_str = request.get_data().decode('utf-8').strip()
response = somefunction(mydata.state[threading.get_ident()], request_str)
return response, 200
def run_app(this_port, mydata):
currentThread = threading.current_thread()
mydata.state[threading.get_ident()] = some_function_that_returns_6GB_of_data()
app.run(host='0.0.0.0',port=this_port)

Categories

Resources