NoRegionError Boto3 - python

Is there a way to run boto3 module without intrinsically defining a region? I am trying to run a script to confirm tags have been added to cloudformation AWS stacks. I want it to run from whatever region would be currently selected inside of my default connection. I'm not sure if I need to write something for this. I am also curious if it needs to be written to just say the region. If that's the case is it possible to check ALL regions without it slowing down?
import boto3
cf = boto3.client('cloudformation')
def validate_stack(stack_id):
rsp = cf.describe_stacks(StackName=stack_id)
for stack in rsp['Stacks']:
has_product = False
has_service = False
has_team = False
has_owner = False
for tag in stack['Tags']:
if tag['Key'] == 'Product':
has_product = True
if tag['Key'] == 'Service':
has_service = True
if tag['Key'] == 'Team':
has_team = True
if tag['Key'] == 'Owner':
has_owner = True
last_time = stack.get('LastUpdatedTime', stack['CreationTime'])
if (has_product == False) or (has_service == False) or (has_team == False) or (has_owner == False):
print('last updated: {5}, has_product={1}, has_service={2}, has_team={3}, has_owner={4} {0}'.format(stack_id, has_product, has_service, has_team, has_owner, last_time))
def check_cloudformation(deployment_id):
list_rsp = cf.list_stacks(
StackStatusFilter=[
'CREATE_COMPLETE',
'UPDATE_COMPLETE'
]
)
deployment_substring = '-{0}-'.format(deployment_id)
while True:
for summary in list_rsp['StackSummaries']:
if deployment_substring in summary['StackName']:
validate_stack(summary['StackId'])
next_token = list_rsp.get('NextToken')
if next_token == None:
break
list_rsp = cf.list_stacks(
StackStatusFilter=[
'CREATE_COMPLETE',
'UPDATE_COMPLETE'
],
NextToken=next_token
)

Related

Python, SqlAlchemy. Or_ expected 2 arguments, got 3

I have this problem with my code. When Iinsert three or more params in the body request, I get this Error "POST Error: or_ expected 2 arguments, got 3."
I can only pass one or two parameters in the body, in this case it works fine. But I don't understand where is the mistake. Can someone help me?
def read_uptime(logid, filteredData, dateStart, dateEnd, timeStart, timeEnd, elementsForPage, currentPage, filterUptime):
log.info(f"{logid} read_uptime: Started")
try:
# Check Timeframe Correct
startDateTime, endDateTime = _checkDataInput(timeStart, timeEnd, dateStart, dateEnd)
# Create Filter
filters = _createFilter(filteredData, startDateTime, endDateTime, filterUptime)
# Query
dataFiltered = uptime_model_db.query.with_entities(
uptime_model_db.projectId.label('projectId'),
uptime_model_db.url.label('url'),
uptime_model_db.timeStamp.label('timeStamp'),
uptime_model_db.uptime.label('uptime'),
uptime_model_db.latency.label('latency')
).filter(*filters).paginate(per_page=int(elementsForPage + 1), page=int(currentPage), error_out=True)
# Checking more pages
nextPage = {
"currentPage": currentPage,
"totalElements": len(dataFiltered.items)
}
if (len(dataFiltered.items) > elementsForPage):
nextPage["nextPage"] = True
else:
nextPage["nextPage"] = False
# Format and return JSON
return _createJson(dataFiltered.items, nextPage)
except Exception as e:
log.error(f"{logid} read_uptime: function read_uptime returned {e}")
raise e
i get in this code the mistake: "array.Filter.append(and_(uptime_model.db.porjectId == projectId, or_(*arrayUrl))"
def filterAppend(arrayFilter, urls, projectId, arrayUrl):
if(len(urls) == 1):
arrayFilter.append(and_(uptime_model_db.projectId == projectId, uptime_model_db.url == urls[0]))
if(len(urls) > 1):
for url in urls:
arrayUrl.append(uptime_model_db.url == url)
arrayFilter.append(and_(uptime_model_db.projectId == projectId, or_(*arrayUrl)))
i get in this code the mistake:
"filters.append(or_(*arrayFilter))"
def _createFilter(filteredData, startDateTime, endDateTime, filterUptime):
filters = []
if filteredData is not None:
arrayFilter = []
for data in filteredData:
projectId = data["projectId"]
urls = data["uptimeUrls"]
arrayUrl = []
if (len(filteredData) == 1):
filterAppend(filters, urls, projectId, arrayUrl)
else:
filterAppend(arrayFilter, urls, projectId, arrayUrl)
if(len(filteredData) > 1 or len(arrayFilter) > 1):
filters.append(or_(*arrayFilter))
if startDateTime is not None:
filters.append(str(startDateTime) <= uptime_model_db.timeStamp)
if startDateTime is not None:
filters.append(str(endDateTime) >= uptime_model_db.timeStamp)
if filterUptime == "True":
filters.append(uptime_model_db.uptime < 100)
return filters
import or_ from sqlalchemy instead of operators:
from sqlalchemy import or_

Run a function in background using thread in Flask

I am trying to implement limit order book using flask and I am working on the backend part right now. I am new to flask so I am still learning and I am not much aware about how backend of trading works but I am trying to learn via this small project.
I have created 3 endpoints in my application which add order, remove order and give a response of the order status and these three endpoints are working fine checked them with postman. Now I am trying to run a function in background which will continuously check the new orders (buy/sell) from a json file which save all new orders. It will pick them one by one and will find a match based on price if a user's buy order matches a different user's sell order it will process and store it in a dict which I want to return or store all those successful order to the user.
Here is my code for the class I have created:
import json
import bisect
import random
import os
class Process(object):
def __init__(self):
self.trade_book = []
self.bid_prices = []
self.ask_prices = []
self.ask_book = {}
self.bid_book = {}
self.confirm_traded = []
self.orders_history = {}
self.traded = False
self.counter = 0
def save_userdata(self,order, newId):
orderid = order['order']['trader'] +"_"+ str(newId)
user_list = order
newJson = {
"orders":[
{ orderid: order['order']}
]
}
with open('data_user.json', 'a+') as jsonFile:
with open('data_user.json', 'r') as readableJson:
try:
jsonObj = json.load(readableJson)
except Exception as e:
jsonObj = {}
if jsonObj == {}:
json.dump(newJson, jsonFile)
else:
with open('data_user.json', 'w+') as writeFile:
exists = False
for item in jsonObj['orders']:
if item.get(orderid, None) is not None:
item[orderid] = order['order']
exists = True
break
if not exists:
jsonObj['orders'].append(newJson['orders'][0])
json.dump(jsonObj, writeFile)
return orderid
def get_userdata(self):
with open('data_user.json', 'r') as readableJson:
return json.load(readableJson)
def removeOrder(self, orderid):
order_id = list(orderid.values())[0]
with open('data_user.json') as data_file:
data = json.load(data_file)
newData = []
for item in data['orders']:
if item.get(order_id, None) is not None:
del item[order_id]
else:
newData.append(item)
data['orders'] = newData
with open('data_user.json', 'w') as data_file:
data = json.dump(data, data_file)
return order_id
def add_order_to_book(self, order):
index = list(order.keys())[0]
book_order = order[index]
print(index)
if order[index]['side'] == 'buy':
book_prices = self.bid_prices
book = self.bid_book
else: #order[index]['side'] == 'sell'
book_prices = self.ask_prices
book = self.ask_book
if order[index]['price'] in book_prices:
book[order[index]['price']]['num_orders'] += 1
book[order[index]['price']]['size'] += order[index]['quantity']
book[order[index]['price']]['order_ids'].append(index)
book[order[index]['price']]['orders'][index] = book_order
else:
bisect.insort(book_prices, order[index]['price'])
book[order[index]['price']] = {'num_orders': 1, 'size': order[index]['quantity'],'order_ids':
[index],
'orders': {index: book_order}}
def confirm_trade(self,order_id, timestamp, order_quantity, order_price, order_side):
trader = order_id.partition('_')[0]
self.confirm_traded.append({ 'trader': trader,'quantity': order_quantity, 'side': order_side,
'price': order_price,
'status': 'Successful'})
return self.confirm_traded
def process_trade_orders(self, order):
self.traded = False
index = list(order.keys())[0]
if order[index]['side'] == 'buy':
book = self.ask_book
if order[index]['price'] in self.ask_prices:
remainder = order[index]['quantity']
while remainder > 0:
book_order_id = book[order[index]['price']]['order_ids'][0]
book_order = book[order[index]['price']]['orders'][book_order_id]
if remainder >= book_order['quantity']:
self.trade_book.append({'order_id': book_order_id, 'timestamp': order[index]['timestamp'],
'price': order[index]['price'],
'quantity': order[index]['quantity'], 'side': book_order['side']})
self.confirm_trade(index, order[index]['timestamp'], order[index]['quantity'], order[index]['price'], order[index]['side'])
self.traded = True
remainder = remainder - book_order['quantity']
self.save_historty_orders(index, order[index])
break
else:
self.traded = True
self.trade_book.append({'order_id': index, 'timestamp': order[index]['timestamp'],
'price': order[index]['price'],
'quantity': order[index]['quantity'], 'side': order[index]['side']})
self.confirm_trade(index, order[index]['timestamp'], order[index]['quantity'], order[index]['price'], order[index]['side'])
self.save_historty_orders(index, order[index])
break
else:
self.add_order_to_book(order)
self.save_historty_orders(index, order[index])
else: #order['side'] == 'sell'
book = self.bid_book
if order[index]['price'] in self.bid_prices:
remainder = order[index]['quantity']
while remainder > 0:
book_order_id = book[order[index]['price']]['order_ids'][0]
book_order = book[order[index]['price']]['orders'][book_order_id]
if remainder >= book_order['quantity']:
self.trade_book.append({'order_id': book_order_id, 'timestamp': order[index]['timestamp'],
'price': order[index]['price'],
'quantity': order[index]['quantity'], 'side': order[index]['side']})
self.traded = True
self.confirm_trade(index, order[index]['timestamp'], order[index]['quantity'], order[index]['price'], order[index]['side'])
remainder = remainder - book_order['quantity']
self.save_historty_orders(index, order[index])
break
else:
self.traded = True
self.trade_book.append({'order_id': book_order_id, 'timestamp': order[index]['timestamp'],
'price': order[index]['price'],
'quantity': order[index]['quantity'], 'side': order[index]['side']})
self.confirm_trade(index, order[index]['timestamp'], order[index]['quantity'], order[index]['price'], order[index]['side'])
self.save_historty_orders(index, order[index])
break
else:
self.add_order_to_book(order)
self.save_historty_orders(index, order[index])
This class process I create object in my app.py and call the function process_trade_orders in that inside a function processing():
app = Flask(__name__)
app.config['DEBUG'] = True
newUser = Process()
succorder = Success()
#sched = BackgroundScheduler()
def generate_orderid():
num = 0
while num < 1000:
yield num
num = num + 1
genid = generate_orderid()
proc = Process()
sucorder = Success()
#Processing orders to find if they have a match
def processing():
get_orders_data = proc.get_userdata()
print(get_orders_data)
print("\n")
for data in get_orders_data['orders']:
index = list(data.keys())[0]
if data[index]['status'] == 'Successful':
sucorder.add_trader_orders(data[index],index)
else:
proc.process_trade_orders(data)
# sched = BackgroundScheduler()
# sched.add_job(func = processing, trigger="interval", seconds = 2)
# sched.start()
I did use APSbackground-scheduler for the same but I want to use thread for it. I was thinking of running a main thread in infinite loop as a daemon and use worker thread to run this function processing() in app.py which will be called after every few seconds to check if there are any successful order it will return the value to the main thread and those list of dict every new one I can return a response or some other way to the user about this successful order getting matched.
Note that this will be running is short intervals like 5 seconds and multiple add orders will be added and will be continuously running the checks asynchronously so I am not sure how will I return those values. I am just confused so if anyone can help me will be grateful.
If you want to make a threaded function that runs in background, just use the threading module, like this:
from threading import Thread
def bg_func():
doSomething
t = Thread(target=bg_func)
t.start() # will start the function and continue, even if the function still runs
doSomethingelseAtSameTime # runs with bg_func
You can also have multiple background threads.
Check the documentation for more info.

dict values pass into a function

I have below dictionary
a={
'set1': {'fileName': 'filename1',
'moduleName': 'modulename1',
'connection1.name': 'connection1',
'connection.test':'connectiontest1',
'connection2.name': 'connection2',
'connection.test':'connectiontest2',
'queue1.name': 'queue1',
'queue1.test':'queuetest1',
'topic1.name':'topic1',
'topic1.test':'topic1test',
'topic2.name':'topic2',
'topic2.test':'topic2test',
'ServerName': 'serverone',
'DeploymentName': 'deployment1'
},
'set2':{'fileName': 'filename2',
'moduleName': 'modulename2',
'connection1.name': 'connection1',
'connection.test':'connectiontest1',
'queue1.name': 'queue1',
'queue1.test':'queuetest1',
'topic1.name':'topic1',
'topic1.test':'topic1test',
'topic2.name':'topic2',
'topic2.test':'topic2test',
'ServerName': 'servertwo',
'DeploymentName': 'deployment2'
}}
in each set, I need to pass it to a function to create a server from the above dictionary.
def create_server_config(fileName,moduleName,connection,connectiontest,topic,topictest,queue,queuetest,servername,deploymentname):
create_queue(queue,queuetest)
create_topic(topic,topictest)
create_connection(connection,connectiontest)
In this I want to pass each set into a function as an argument So that it will create a server for each set, the problem here is queue,topic,connection has multiple increment values in each set. so anyone helps me with how to pass it to a function. for example, if I want to create a queue then under create_server_config function , need to pass all the queue and queuetest related to set1 dict.
I have a few questions for you. Based on that, the answer might change.
Below code assumes the following:
In Set1 & Set2, there are two connection.test keys. This may be a typo. I am assuming the first one is connection1.test and second one is connection2.test
connection1.name, queue1.name, and topic1.name will ALWAYS be present in each set
If topic2.name exists but connection2.name and queue2.name does not exist, then it is OK to pass connection1.name and queue1.name values to create server config.
Similarly, if any combination of connection2.name, connection2.test, queue2.name, queue2.test, topic2.name, topic2.test exists but the others do not exist, then the value from xxx1.name or xxx1.test will be used in its place where xxx is connection or queue or topic.
This code will call the create server config function for each value of the dict:
a={
'set1':{'fileName': 'filename1',
'moduleName': 'modulename1',
'connection1.name': 'connection1',
'connection1.test':'connectiontest1',
'connection2.name': 'connection2',
'connection2.test':'connectiontest2',
'queue1.name': 'queue1',
'queue1.test':'queuetest1',
'topic1.name':'topic1',
'topic1.test':'topic1test',
'topic2.name':'topic2',
'topic2.test':'topic2test',
'ServerName': 'serverone',
'DeploymentName': 'deployment1'
},
'set2':{'fileName': 'filename2',
'moduleName': 'modulename2',
'connection1.name': 'connection1',
'connection1.test':'connectiontest1',
'queue1.name': 'queue1',
'queue1.test':'queuetest1',
'topic1.name':'topic1',
'topic1.test':'topic1test',
'topic2.name':'topic2',
'topic2.test':'topic2test',
'ServerName': 'servertwo',
'DeploymentName': 'deployment2'
}
}
for s,setkey in a.items():
fileName = setkey['fileName']
moduleName = setkey['moduleName']
connection = setkey['connection1.name']
connectiontest = setkey['connection1.test']
topic = setkey['topic1.name']
topictest = setkey['topic1.test']
queue = setkey['queue1.name']
queuetest = setkey['queue1.test']
servername = setkey['ServerName']
deploymentname = setkey['DeploymentName']
print (fileName,moduleName, \
connection,connectiontest, \
topic,topictest,queue, \
queuetest,servername,deploymentname)
create_server_config(fileName,moduleName, \
connection,connectiontest, \
topic,topictest,queue, \
queuetest,servername,deploymentname)
flag = False
if 'connection2.name' in setkey:
flag = True
connection = setkey['connection2.name']
if 'connection2.test' in setkey:
flag = True
connectiontest = setkey['connection2.test']
if 'queue2.name' in setkey:
flag = True
queue = setkey['queue2.name']
if 'queue2.test' in setkey:
flag = True
queuetest = setkey['queue2.test']
if 'topic2.name' in setkey:
flag = True
topic = setkey['topic2.name']
if 'topic2.test' in setkey:
flag = True
topictest = setkey['topic2.test']
if flag:
print (fileName,moduleName, \
connection,connectiontest, \
topic,topictest,queue, \
queuetest,servername,deploymentname)
create_server_config(fileName,moduleName, \
connection,connectiontest, \
topic,topictest,queue, \
queuetest,servername,deploymentname)

Python: assign a list to a function

Is there a way to assign a list based on a variable env and pass the result to function? I am passing a variable called env, which could be UPE, DEV, PRD for example. Based on the result, I want to assign the list respectively to the functions below. What would be the best approach?
UPE=['SERVER1','SERVER2','SERVER3','SERVER4']
DEV=['ServerA','ServerB']
PRD=['SERVER1','SERVER2','SERVER3','SERVER4']
if os.path.isfile('/myfile/' + configFile):
config_parser = ConfigParser()
config_parser.read('/myfile/' + configFile)
if actions == "start":
startOVD('start',UPE[3]) //I want pass the result of env setup variable
#ans = raw_input("Would you like to start OVD, MSAS,OAG : y | n : ")
if env == 'UPE':
startMSAS('start',UPE[0])
startOAG('start',UPE[1])
startOHS('start',UPE[2])
for section_name in sorted(config_parser.sections(), reverse=True):
adminURL = config_parser.get(section_name, 'ADMIN_URL')
adminUsername = config_parser.get(section_name, 'ADMIN_USER')
adminPassword = config_parser.get(section_name, 'ADMIN_PASS')
adminHost = config_parser.get(section_name, 'NM_HOST')
domainName = config_parser.get(section_name, 'DOMAIN_NAME')
domainDir = config_parser.get(section_name, 'DOMAIN_DIR')
admPort = adminURL[-4:]
printHeader('Initiating Starting Sequence')
startAdmin('start', adminHost, domainDir, domainName, admPort)
showServerStatus('start', adminUsername, adminPassword, adminURL)
if actions == "stop":
for section_name in (config_parser.sections()):
adminURL = config_parser.get(section_name, 'ADMIN_URL')
adminUsername = config_parser.get(section_name, 'ADMIN_USER')
adminPassword = config_parser.get(section_name, 'ADMIN_PASS')
adminHost = config_parser.get(section_name, 'NM_HOST')
domainName = config_parser.get(section_name, 'DOMAIN_NAME')
domainDir = config_parser.get(section_name, 'DOMAIN_DIR')
admPort = adminURL[-4:]
printHeader('Initiating Stopping Sequence')
showServerStatus('stop', adminUsername, adminPassword, adminURL)
stopAdmin(adminHost, domainDir, domainName, admPort)
if env == 'UPE':
stopMSAS('stop',UPE[0])
stopOAG('stop',UPE[1])
stopOHS('stop',UPE[2])
stopOVD('stop',UPE[3])
I would organize this by setting up a list of callbacks.
from functools import partial
start_funcs = [partial(startOVD 'start',UPE[3])
if env == 'UPE':
start_funcs.extend([partial(startMSAS, 'start', UPE[0]),
partial(startOAG, 'start', UPE[1])])
start_funcs.append(partial(startOHS, 'start', UPE[2]))
Add similar logic for the cases when env has a different value. In the end, you'll just iterate over start_funcs and call each function in order.
if actions == "start":
for f in start_funcs:
f()
# ...

Prevent Double (or more) post in Django

I'm working on a site built in django, but i'm having a little trouble, in multiple parts im getting double (sometimes 3 or 4) post, for example im submitting a form where you need to save a contact, but when the page reloads im getting 2, 3 or 4 of the same contact. The difference in time is about 0.5 secs. of each other.
I tried to code a little solution like this:
def not_double_post(request):
session_data = request.session.get('post_data', None)
session_new_data = '-'.join([str(request.user.username),
str(int(time.time()))])
if session_data is None:
request.session['post_data'] = session_new_data
return True
else:
current_user = request.user.username
current_time = int(time.time())
session_post_user = session_data.split('-')[0]
session_post_time = session_data.split('-')[1]
if current_user == session_post_user:
time_difference = current_time - int(session_post_time)
if time_difference > 10:
request.session.pop('post_data')
request.session['post_data'] = ''.join([str(current_user),
str(int(time.time()))])
return True
else:
return False
else:
return True
On each request.method == 'POST' i check this function, store the user and time when the operation has made in session, if the operation user are the same, and the time difference is less than 10 seconds, is an invalid post.
(I'm also setting the attribute of the button to disabled when the user clicks)
But im getting double post with this. I dont know if this really works like i expect, but it would be good to take another point of view (or another recommendation to achieve this).
Thanks!
==== UDATE views.py =====
def cancel_invoice(request, pk):
if not_double_post(request):
order = PaymentOrder.objects.get(pk=pk)
order.cancelled_date = datetime.date.today()
folio = order.invoice_sat.comp_folio
serie = order.invoice_sat.comp_serie
rfc_receptor = order.client.rfc
folio = folio
soap_doc = '<soapenv:Envelope ' \
'xmlns:soapenv="http://schemas.xmlsoap.org/soap/envelope/" ' \
'xmlns:xsaf="http://xsafilereceiver.webservices.manager.xsa.tralix.com">' \
'<soapenv:Header/>' \
'<soapenv:Body>' \
'<xsaf:cancelaCFD>' \
'<xsaf:in0>' + serie + '</xsaf:in0>' \
'<xsaf:in1>' + folio + '</xsaf:in1>' \
'<xsaf:in2>' + settings.IENTC_RFC + '</xsaf:in2>' \
'<xsaf:in3>' + settings.API_KEY + '</xsaf:in3>' \
'</xsaf:cancelaCFD>' \
'</soapenv:Body>' \
'</soapenv:Envelope>'
factura_client = ServiceClient(settings.API_CANCEL_URL)
invoice_status = factura_client.service.cancelaCFD(__inject={'msg': soap_doc})
if invoice_status:
order.invoice_sat.invoice_status = 0
order.amount_to_pay = None
order.invoice_sat.save()
order.save()
if serie == settings.INVOICE_SERIE:
order.client.balance = order.client.balance + order.total
else:
order.client.balance = order.client.balance - order.total
order.client.save()
comment = request.POST.get('comment', None)
if comment.strip() != '' and comment is not None:
cancelled_folio = folio
CANCEL_EVENT = 'Administración'
CANCEL_INVOICE_TITLE = 'Cancelación de Factura #%s' % \
str(cancelled_folio)
cancel_type, created = EventType.objects.get_or_create(
name=CANCEL_EVENT)
Event(
client=order.client,
register_date=datetime.datetime.now(),
event_type=cancel_type,
event=CANCEL_INVOICE_TITLE,
additional_comments=comment.strip(),
created_by=request.user
).save()
mandrill_client = mandrill.Mandrill(settings.MADRILL_API_KEY)
message = dict()
message['subject'] = 'Cancelación de Factura'
message['from_email'] = 'crm#ientc.com'
message['from_name'] = 'CRM IENTC'
message['text'] = CANCEL_INVOICE_TITLE
message['to'] = [
{
'email': 'conta#ientc.com',
'name': 'Contabilidad IENTC',
'type': 'to'
}
]
result = mandrill_client.messages.send(
message=message, async=False)
print result
return response_json("OK", 200)
else:
return response_json("ERROR CANCELANDO FACTURA", 400)
else:
return response_json("OK", 200)
In this particular POST im making a soap call, but also i'm having troubles on other POST with simple code. Thanks!

Categories

Resources