Run a function in background using thread in Flask - python

I am trying to implement limit order book using flask and I am working on the backend part right now. I am new to flask so I am still learning and I am not much aware about how backend of trading works but I am trying to learn via this small project.
I have created 3 endpoints in my application which add order, remove order and give a response of the order status and these three endpoints are working fine checked them with postman. Now I am trying to run a function in background which will continuously check the new orders (buy/sell) from a json file which save all new orders. It will pick them one by one and will find a match based on price if a user's buy order matches a different user's sell order it will process and store it in a dict which I want to return or store all those successful order to the user.
Here is my code for the class I have created:
import json
import bisect
import random
import os
class Process(object):
def __init__(self):
self.trade_book = []
self.bid_prices = []
self.ask_prices = []
self.ask_book = {}
self.bid_book = {}
self.confirm_traded = []
self.orders_history = {}
self.traded = False
self.counter = 0
def save_userdata(self,order, newId):
orderid = order['order']['trader'] +"_"+ str(newId)
user_list = order
newJson = {
"orders":[
{ orderid: order['order']}
]
}
with open('data_user.json', 'a+') as jsonFile:
with open('data_user.json', 'r') as readableJson:
try:
jsonObj = json.load(readableJson)
except Exception as e:
jsonObj = {}
if jsonObj == {}:
json.dump(newJson, jsonFile)
else:
with open('data_user.json', 'w+') as writeFile:
exists = False
for item in jsonObj['orders']:
if item.get(orderid, None) is not None:
item[orderid] = order['order']
exists = True
break
if not exists:
jsonObj['orders'].append(newJson['orders'][0])
json.dump(jsonObj, writeFile)
return orderid
def get_userdata(self):
with open('data_user.json', 'r') as readableJson:
return json.load(readableJson)
def removeOrder(self, orderid):
order_id = list(orderid.values())[0]
with open('data_user.json') as data_file:
data = json.load(data_file)
newData = []
for item in data['orders']:
if item.get(order_id, None) is not None:
del item[order_id]
else:
newData.append(item)
data['orders'] = newData
with open('data_user.json', 'w') as data_file:
data = json.dump(data, data_file)
return order_id
def add_order_to_book(self, order):
index = list(order.keys())[0]
book_order = order[index]
print(index)
if order[index]['side'] == 'buy':
book_prices = self.bid_prices
book = self.bid_book
else: #order[index]['side'] == 'sell'
book_prices = self.ask_prices
book = self.ask_book
if order[index]['price'] in book_prices:
book[order[index]['price']]['num_orders'] += 1
book[order[index]['price']]['size'] += order[index]['quantity']
book[order[index]['price']]['order_ids'].append(index)
book[order[index]['price']]['orders'][index] = book_order
else:
bisect.insort(book_prices, order[index]['price'])
book[order[index]['price']] = {'num_orders': 1, 'size': order[index]['quantity'],'order_ids':
[index],
'orders': {index: book_order}}
def confirm_trade(self,order_id, timestamp, order_quantity, order_price, order_side):
trader = order_id.partition('_')[0]
self.confirm_traded.append({ 'trader': trader,'quantity': order_quantity, 'side': order_side,
'price': order_price,
'status': 'Successful'})
return self.confirm_traded
def process_trade_orders(self, order):
self.traded = False
index = list(order.keys())[0]
if order[index]['side'] == 'buy':
book = self.ask_book
if order[index]['price'] in self.ask_prices:
remainder = order[index]['quantity']
while remainder > 0:
book_order_id = book[order[index]['price']]['order_ids'][0]
book_order = book[order[index]['price']]['orders'][book_order_id]
if remainder >= book_order['quantity']:
self.trade_book.append({'order_id': book_order_id, 'timestamp': order[index]['timestamp'],
'price': order[index]['price'],
'quantity': order[index]['quantity'], 'side': book_order['side']})
self.confirm_trade(index, order[index]['timestamp'], order[index]['quantity'], order[index]['price'], order[index]['side'])
self.traded = True
remainder = remainder - book_order['quantity']
self.save_historty_orders(index, order[index])
break
else:
self.traded = True
self.trade_book.append({'order_id': index, 'timestamp': order[index]['timestamp'],
'price': order[index]['price'],
'quantity': order[index]['quantity'], 'side': order[index]['side']})
self.confirm_trade(index, order[index]['timestamp'], order[index]['quantity'], order[index]['price'], order[index]['side'])
self.save_historty_orders(index, order[index])
break
else:
self.add_order_to_book(order)
self.save_historty_orders(index, order[index])
else: #order['side'] == 'sell'
book = self.bid_book
if order[index]['price'] in self.bid_prices:
remainder = order[index]['quantity']
while remainder > 0:
book_order_id = book[order[index]['price']]['order_ids'][0]
book_order = book[order[index]['price']]['orders'][book_order_id]
if remainder >= book_order['quantity']:
self.trade_book.append({'order_id': book_order_id, 'timestamp': order[index]['timestamp'],
'price': order[index]['price'],
'quantity': order[index]['quantity'], 'side': order[index]['side']})
self.traded = True
self.confirm_trade(index, order[index]['timestamp'], order[index]['quantity'], order[index]['price'], order[index]['side'])
remainder = remainder - book_order['quantity']
self.save_historty_orders(index, order[index])
break
else:
self.traded = True
self.trade_book.append({'order_id': book_order_id, 'timestamp': order[index]['timestamp'],
'price': order[index]['price'],
'quantity': order[index]['quantity'], 'side': order[index]['side']})
self.confirm_trade(index, order[index]['timestamp'], order[index]['quantity'], order[index]['price'], order[index]['side'])
self.save_historty_orders(index, order[index])
break
else:
self.add_order_to_book(order)
self.save_historty_orders(index, order[index])
This class process I create object in my app.py and call the function process_trade_orders in that inside a function processing():
app = Flask(__name__)
app.config['DEBUG'] = True
newUser = Process()
succorder = Success()
#sched = BackgroundScheduler()
def generate_orderid():
num = 0
while num < 1000:
yield num
num = num + 1
genid = generate_orderid()
proc = Process()
sucorder = Success()
#Processing orders to find if they have a match
def processing():
get_orders_data = proc.get_userdata()
print(get_orders_data)
print("\n")
for data in get_orders_data['orders']:
index = list(data.keys())[0]
if data[index]['status'] == 'Successful':
sucorder.add_trader_orders(data[index],index)
else:
proc.process_trade_orders(data)
# sched = BackgroundScheduler()
# sched.add_job(func = processing, trigger="interval", seconds = 2)
# sched.start()
I did use APSbackground-scheduler for the same but I want to use thread for it. I was thinking of running a main thread in infinite loop as a daemon and use worker thread to run this function processing() in app.py which will be called after every few seconds to check if there are any successful order it will return the value to the main thread and those list of dict every new one I can return a response or some other way to the user about this successful order getting matched.
Note that this will be running is short intervals like 5 seconds and multiple add orders will be added and will be continuously running the checks asynchronously so I am not sure how will I return those values. I am just confused so if anyone can help me will be grateful.

If you want to make a threaded function that runs in background, just use the threading module, like this:
from threading import Thread
def bg_func():
doSomething
t = Thread(target=bg_func)
t.start() # will start the function and continue, even if the function still runs
doSomethingelseAtSameTime # runs with bg_func
You can also have multiple background threads.
Check the documentation for more info.

Related

Multiprocessing and relationship traversal?

I am trying to implement multiprocessing to speed up traversing a relationship graph. I want to capture items that have a total less than 1000. If the parent is over 1000, process the children until there's no more to check.
I've mocked up an illustration that shows that ThreadPoolExecutor only processes the initial items provided to the class while the class.search_queue_list list is still populated. I also tried using a Queue instead of a list with similar results. Synchronous processing works as expected for list and Queue. Is there a way to make multiprocessing work here when the initial array of items can change?
from concurrent.futures import ThreadPoolExecutor
from queue import Queue
from time import sleep
dummy_data = {
'id1': {'total': 1001, 'children': ['id101','id102']}, # over 1000, children will be processed
'id2': {'total': 999, 'children': ['id201','id202']}, # under 1000, children won't be processed
'id101': {'total': 501, 'children': ['more_children']},
'id102': {'total': 500, 'children': ['more_children']},
'id201': {'total': 499,'children': ['more_children']},
'id202': {'total': 500, 'children': ['more_children']},
}
class SearchDummy(object):
def __init__(self, start_list):
# with list
self.search_queue_list = start_list
# with Queue
self.search_queue_queue = Queue()
for item in self.search_queue_list:
self.search_queue_queue.put(item)
self.good_ids = []
def get_total(self, search_id):
# artificial delay
sleep(0.5)
return dummy_data[search_id]['total']
def get_children(self, search_id):
# artificial delay
sleep(0.5)
return dummy_data[search_id]['children']
# START LIST
def current_check_list(self):
# get first element in search_queue_list
current_id = self.search_queue_list.pop(0)
# check if current_id['total'] is over 1000
if self.get_total(current_id) <= 1000:
self.good_ids.append(current_id)
else:
# prepend children to search_queue_list
self.search_queue_list.extend(self.get_children(current_id))
def search_list(self):
while self.search_queue_list:
self.current_check_list()
def multi_search_list(self):
with ThreadPoolExecutor() as e:
while self.search_queue_list:
e.submit(self.current_check_list)
# END LIST
# START QUEUE
def current_check_queue(self):
# get item from search_queue_queue
current_id = self.search_queue_queue.get()
# check if current_id['total'] is over 1000
if self.get_total(current_id) <= 1000:
self.good_ids.append(current_id)
else:
# put children in search_queue_queue
for child in self.get_children(current_id):
self.search_queue_queue.put(child)
def search_queue(self):
while not self.search_queue_queue.empty():
self.current_check_queue()
def multi_search_queue(self):
with ThreadPoolExecutor() as e:
while not self.search_queue_queue.empty():
e.submit(self.current_check_queue)
# END QUEUE
# synchronous list
s = SearchDummy(['id1','id2'])
s.search_list()
print('List output', s.good_ids) # returns ['id101', 'id102', 'id2']
print('Remaining list size', len(s.search_queue_list)) # returns 0
# synchronous queue
s = SearchDummy(['id1','id2'])
s.search_queue()
print('Queue output', s.good_ids) # returns ['id101', 'id102', 'id2']
print('Remaining queue size', s.search_queue_queue.qsize()) # returns 0
# multiprocessing list
s = SearchDummy(['id1','id2'])
s.multi_search_list()
print('Multi list output', s.good_ids) # returns ['id2']
print('Multi list remaining', s.search_queue_list) # returns ['id101', 'id102']
# multiprocessing queue
s = SearchDummy(['id1','id2'])
s.multi_search_queue()
print('Multi queue output', s.good_ids) # returns ['id2']
print('Multi queue remaining', list(s.search_queue_queue.queue)) # returns ['id101', 'id102']

Python concurrent.future not running in Parallel

I am writing code for querying CMDB data from ServiceNow through REST API calls using python. To gain results faster I am using concurrent.future module of python to allow parallel query executions.
Below is the code and output:
import requests
import os
import base64
import threading
import concurrent.futures
import datetime
class ServiceNowAPI:
__ServiceNowUserName = os.environ.get('ServiceNow_USERNAME')
__ServiceNowPassword = None
__ServiceNowPasswordProd = None
__BASE_URL = None
__WSI_URL = 'https://esiocecr.contoso.com:9443/rsrv_servicenow-outbound/'
__ServiceNow_Cert_Path = 'C:\Certificates\OSMCert.pem'
# ServiceNow API URL paths
__GET_CI_DETAILS = "/api/now/table/cmdb_ci_circuit?sysparm_query=u_port_circuit_idLIKE__CINAME__^ORnameLIKE__CINAME__&sysparam_fields=*"
def __init__(self, ServiceNowEnvironment):
self.ServiceNowEnvironment = ServiceNowEnvironment
self.__ServiceNowPassword = os.environ.get('ServiceNow_PROD_PWD') if ServiceNowEnvironment.lower() == "prod" or ServiceNowEnvironment.lower() == "production" else os.environ.get('ServiceNow_EAGLE_PWD')
self.__BASE_URL = "https://contososervicenow.service-now.com" if ServiceNowEnvironment.lower() == "prod" or ServiceNowEnvironment.lower() == "production" else "https://contosoeagle.service-now.com"
CredsPair = "{0}:{1}".format(self.__ServiceNowUserName, self.__ServiceNowPassword)
CredBytes = CredsPair.encode('ascii')
Base64Creds = base64.b64encode(CredBytes).decode('utf-8')
self.__Authorization = "Basic {0}".format(Base64Creds)
def _GetRouterName(self, RouterLink):
RouterName = ''
with requests.Session() as ServiceNowCall:
ServiceNowCall.headers.update({ 'Authorization': self.__Authorization, 'content-type': 'application/json', 'DP_EXTERNAL_URL': RouterLink})
ServiceNowCall.cert = self.__ServiceNow_Cert_Path
ServiceNowCall.verify = self.__ServiceNow_Cert_Path
with ServiceNowCall.get(self.__WSI_URL) as ResponseObject:
ResponseJSON = ResponseObject.json()
Results = ResponseJSON['result']
RouterName = Results['name']
return RouterName
def _GetCircuitCIDetails(self, CircuitID):
print('Started for ' + CircuitID + ' at ' + datetime.datetime.now().strftime("%d-%b-%Y %H:%M:%S.%f"))
ResponseJSON = ''
URL = "{0}{1}".format(self.__BASE_URL, self.__GET_CI_DETAILS.replace('__CINAME__', CircuitID))
with requests.Session() as ServiceNowCall:
ServiceNowCall.headers.update({ 'Authorization': self.__Authorization, 'content-type': 'application/json', 'DP_EXTERNAL_URL': URL})
ServiceNowCall.cert = self.__ServiceNow_Cert_Path
ServiceNowCall.verify = self.__ServiceNow_Cert_Path
with ServiceNowCall.get(self.__WSI_URL) as ResponseObject:
ResponseJSON = ResponseObject.json()
AllRecords = list(ResponseJSON['result'])
ActiveRecord = [rec for rec in AllRecords if rec['u_lifecycle_status'] != 'end of life'][0]
Router_Name = self._GetRouterName(ActiveRecord['u_router_name']['link'])
Results = {
'Name': ActiveRecord['name'],
'CarrierName': ActiveRecord['u_carrier_name'],
'NetworkType': ActiveRecord['u_network_type'],
'NetworkSubType': ActiveRecord['u_network_sub_type'],
'RouterName': Router_Name,
'PortCircuitID': ActiveRecord['name'],
'AccessCircuitID': ActiveRecord['u_port_circuit_id']
}
print('Finished ' + CircuitID + ' at ' + datetime.datetime.now().strftime("%d-%b-%Y %H:%M:%S.%f"))
yield Results
def GetCIDetails(self, CICSV):
CircuitDetails = []
CircuitIDList = [Circuit.strip() for Circuit in CICSV.split(',')]
CircuitDetails = []
with concurrent.futures.ThreadPoolExecutor() as executor:
CIDetailsResult = { executor.submit(self._GetCircuitCIDetails, CircuitID): CircuitID for CircuitID in CircuitIDList }
for future in concurrent.futures.as_completed(CIDetailsResult):
CircuitCI = CIDetailsResult[future]
try:
CurrentResult = future.result()
except Exception as exc:
ErrorResult = dict({ 'Name': CircuitCI, 'CarrierName': 'NA', 'NetworkType': 'NA', 'NetworkSubType': 'NA', 'RouterName': 'NA', 'PortCircuitID': 'Error', 'AccessCircuitID': 'Error'})
CircuitDetails.extend(ErrorResult)
else:
CircuitDetails.extend(CurrentResult)
return CircuitDetails
if __name__ == "__main__":
ServiceNowAPIClass = ServiceNowAPI('NONPROD')
CIDetails = ServiceNowAPIClass.GetCIDetails('Circuit1,Circuit2')
print(CIDetails)
Output:
Started for Circuit1 at 30-Apr-2022 13:40:06.784841
Finished Circuit1 at 30-Apr-2022 13:40:09.749164
Started for Circuit2 at 30-Apr-2022 13:40:09.751166
Finished Circuit2 at 30-Apr-2022 13:40:12.479171
[{'Name': 'Circuit1', 'CarrierName': 'CenturyLink', 'NetworkType': 'EU-BTM', 'NetworkSubType': 'N/A', 'RouterName': 'RT1234ABCD03', 'PortCircuitID': 'Circuit1', 'AccessCircuitID': 'Circuit1'}, {'Name': 'Circuit2', 'CarrierName': 'Verizon', 'NetworkType': 'DPS-NA', 'NetworkSubType': 'N/A', 'RouterName': 'RT12345678ABC', 'PortCircuitID': 'Circuit2', 'AccessCircuitID': 'Circuit2'}]
However, as you can see the executions are not happening in Parallel. It is finishing the queries for each circuit one after another.
How can I fix this to run the _GetCircuitCIDetails(self, CircuitID) function against all CircuitIDs in parallel?

NoRegionError Boto3

Is there a way to run boto3 module without intrinsically defining a region? I am trying to run a script to confirm tags have been added to cloudformation AWS stacks. I want it to run from whatever region would be currently selected inside of my default connection. I'm not sure if I need to write something for this. I am also curious if it needs to be written to just say the region. If that's the case is it possible to check ALL regions without it slowing down?
import boto3
cf = boto3.client('cloudformation')
def validate_stack(stack_id):
rsp = cf.describe_stacks(StackName=stack_id)
for stack in rsp['Stacks']:
has_product = False
has_service = False
has_team = False
has_owner = False
for tag in stack['Tags']:
if tag['Key'] == 'Product':
has_product = True
if tag['Key'] == 'Service':
has_service = True
if tag['Key'] == 'Team':
has_team = True
if tag['Key'] == 'Owner':
has_owner = True
last_time = stack.get('LastUpdatedTime', stack['CreationTime'])
if (has_product == False) or (has_service == False) or (has_team == False) or (has_owner == False):
print('last updated: {5}, has_product={1}, has_service={2}, has_team={3}, has_owner={4} {0}'.format(stack_id, has_product, has_service, has_team, has_owner, last_time))
def check_cloudformation(deployment_id):
list_rsp = cf.list_stacks(
StackStatusFilter=[
'CREATE_COMPLETE',
'UPDATE_COMPLETE'
]
)
deployment_substring = '-{0}-'.format(deployment_id)
while True:
for summary in list_rsp['StackSummaries']:
if deployment_substring in summary['StackName']:
validate_stack(summary['StackId'])
next_token = list_rsp.get('NextToken')
if next_token == None:
break
list_rsp = cf.list_stacks(
StackStatusFilter=[
'CREATE_COMPLETE',
'UPDATE_COMPLETE'
],
NextToken=next_token
)

Exclude positionAmt values of dict [jsonArray] object with python

I get the following json object with python. I want to exclude the following object whose positionAmt value is greater than 0. How can I do that ?
If the value of positionAmt is zero and greater then delete the row it is on.
def webhook(request):
webhook_received_json = json.loads(request.body)
while True:
get_active_position()
def get_active_position():
print("get_active position")
sleep(3)
futures_api_key = "asd"
futures_secret_key = "fdg"
client = Client(futures_api_key, futures_secret_key)
data = client.futures_position_information()
# data = list
for key, value in data.items():
if "positionAmt" >= "0.0000":
del data[key]
break
return get_active_position
[
{'symbol': 'BCHUSDT', 'positionAmt': '0.00000', 'entryPrice': '262.39000'},
,{'symbol': 'BCHUSDT', 'positionAmt': '-0.001', 'entryPrice': '262.39000'},
{'symbol': 'ETHUSDT', 'positionAmt': '-0.001', 'entryPrice': '386.60000'}]
The value is a string. you need it to convert to float or decimal.Decimal in order to compare properly. Also you iterate over a list.
def get_active_position():
print("get_active position")
sleep(3)
futures_api_key = "asd"
futures_secret_key = "fdg"
client = Client(futures_api_key, futures_secret_key)
data = client.futures_position_information()
return [item for item in data if float(item.get('positionAmt', '0')) < 0]
Note, the code is not tested
EDIT: I edit the code so that it will exclude 0

Python: How to Speed Up API Requests?

Problem: I am trying to extract data through an API Service. A single request can take anywhere from 3 to 10 seconds. There are roughly 20,000 rows of data from a Pandas DataFrame to input into the API Call. I have managed to speed it up a bit through multiprocessing, but it's still running very slow. Any suggestions?
Code:
def scored_card_features2(source, n_batches):
"""Multiprocessing version of Scored Card Features Function
Returns reason for rating
"""
# read in source data and convert to list of lists for inputs
data = pd.read_excel(source)
data = data[['primary_bank_report_id', 'primary_tu_credit_report_id', 'purpose']]
inputs = data.values.tolist()
def scored_card_map(i):
"""form request to scored card service and retrieve values"""
url = "url/FourthGen?bank_report_id=%s&credit_report_id=%s&" \
"&loan_purpose=%s" % (i[0], i[1], i[2].replace(" ", "%20"))
r = requests.get(url)
try:
d = json.loads(r.text)
l = [d['probability_of_default'],
d['condition'],
d['purpose_of_loan'],
d['rating'],
d['bank_report_id'],
d['reason_for_rating'],
d['credit_report_id']]
return l
except:
l = [np.nan] * 7
return l
# inititate multithreading
with Pool(n_batches) as p:
vals = p.map(scored_card_map, inputs)
result = pd.DataFrame(vals, columns=['Probability of Default', 'Condition', 'Purpose of Loan', 'Rating', 'Bank Report ID',
'Reason for Rating', 'Credit Report ID'])
result = result.dropna(how='all')
return result
if __name__ == '__main__':
# model features
start = time.time()
df = scored_card_features2('BankCreditPortalIDsPurpose.xlsx', multiprocessing.cpu_count()-1)
df.to_csv('scored_card_features.csv', index=False)
end = time.time()
print(end-start)

Categories

Resources