I am trying to build a trading bot and for that I am sending the following post request:
def binanceOrder(symbol, amount, price, buyorsell, orderType):
# curl -H "X-MBX-APIKEY: vmPUZE6mv9SD5VNHk4HlWFsOr6aKE2zvsw0MuIgwCIPy6utIco14y7Ju91duEh8A" -X POST 'https://api.binance.com/api/v3/order?symbol=LTCBTC&side=BUY&type=LIMIT&timeInForce=GTC&quantity=1&price=0.1&recvWindow=5000×tamp=1499827319559&signature=c8db56825ae71d6d79447849e617115f4a920fa2acdcab2b053c4b2838bd6b71'
body = {
"timestamp": int(time.mktime(datetime.datetime.today().timetuple())),
"symbol": symbol, #'BTCUSD',
"quantity": amount,
"price": price, # Use random number for market orders.
# exchange: 'bitfinex',
"side": buyorsell,
"type": orderType, # LIMIT
# ocoorder: 'false'
"timeInForce": "GTC",
"recvWindow": 10000,
"signature": BINANCE_SIGNATURE
}
headers = {
'Content-Type': 'multipart/form-data',
"X-MBX-APIKEY": BINANCE_API_KEY
}
http_client.fetch("https://api.binance.com/api/v3/order", binanceOrderResponse, method='POST', headers=urllib.urlencode(headers), body=urllib.urlencode(body))
if __name__ == "__main__":
binanceOrder("LTCUSD", 1, 0, "buy", "MARKET")
for this I am getting the following error:
traceback (most recent call last):
File "asyncbot.py", line 263, in <module>
binanceOrder("LTCUSD", 1, 0, "buy", "MARKET")
File "asyncbot.py", line 67, in binanceOrder
http_client.fetch("https://api.binance.com/api/v3/order", binanceOrderResponse, method='POST', headers=urllib.urlencode(headers), body=urllib.urlencode(body))
File "/Users/adityasista/anaconda/envs/trading-bot/lib/python2.7/site-packages/tornado/httpclient.py", line 236, in fetch
request.headers = httputil.HTTPHeaders(request.headers)
File "/Users/adityasista/anaconda/envs/trading-bot/lib/python2.7/site-packages/tornado/httputil.py", line 145, in __init__
self.update(*args, **kwargs)
File "/Users/adityasista/anaconda/envs/trading-bot/lib/python2.7/_abcoll.py", line 571, in update
for key, value in other:
ValueError: need more than 1 value to unpack
what am I doing wrong here?
Tornado's HTTP clients expect either a dict or a tornado.httputil.HTTPHeaders object as the headers argument. Do not try to encode the headers yourself.
Related
I have been tasked with writing tests for an s3 uploading function which uses S3.Object.wait_until_exists to wait for upload to complete and get the content length of the upload to return it.
But so far I am failing to stub head_object for the waiter.
I have explored and found the waiter has two acceptors:
if HTTP code == 200, accept
if HTTP code == 404, retry
I don't know how to explain in text more so instead here is an MRE.
from datetime import datetime
from io import BytesIO
import boto3
import botocore
import botocore.stub
testing_bucket = "bucket"
testing_key = "key/of/object"
testing_data = b"data"
s3 = boto3.resource("s3")
def put():
try:
o = s3.Object(testing_bucket, testing_key)
o.load() # head_object * 1
except botocore.exceptions.ClientError as e:
if e.response["Error"]["Code"] == "NoSuchKey":
etag = ""
else:
raise e
else:
etag = o.e_tag
try:
o.upload_fileobj(BytesIO(testing_data)) # put_object * 1
except botocore.exceptions.ClientError as e:
raise e
else:
o.wait_until_exists(IfNoneMatch=etag) # head_object * n until accepted
return o.content_length # not sure if calling head_object again
with botocore.stub.Stubber(s3.meta.client) as s3_stub:
s3_stub.add_response(
method="head_object",
service_response={
"ETag": "fffffffe",
"ContentLength": 0,
},
expected_params={
"Bucket": testing_bucket,
"Key": testing_key,
},
)
s3_stub.add_response(
method="put_object",
service_response={},
expected_params={
"Bucket": testing_bucket,
"Key": testing_key,
"Body": botocore.stub.ANY,
},
)
s3_stub.add_response( # cause time to increase by 5 seconds per response
method="head_object",
service_response={
"ETag": "ffffffff",
"AcceptRanges": "bytes",
"ContentLength": len(testing_data),
"LastModified": datetime.now(),
"Metadata": {},
"VersionId": "null",
},
expected_params={
"Bucket": testing_bucket,
"Key": testing_key,
"IfNoneMatch": "fffffffe",
},
)
print(put()) # should print 4
And running the above gives:
time python mre.py
Traceback (most recent call last):
File "/tmp/mre.py", line 72, in <module>
put()
File "/tmp/mre.py", line 30, in put
o.wait_until_exists(IfNoneMatch=etag) # head_object * 1
File "/tmp/.tox/py310/lib/python3.10/site-packages/boto3/resources/factory.py", line 413, in do_waiter
waiter(self, *args, **kwargs)
File "/tmp/.tox/py310/lib/python3.10/site-packages/boto3/resources/action.py", line 215, in __call__
response = waiter.wait(**params)
File "/tmp/.tox/py310/lib/python3.10/site-packages/botocore/waiter.py", line 55, in wait
Waiter.wait(self, **kwargs)
File "/tmp/.tox/py310/lib/python3.10/site-packages/botocore/waiter.py", line 343, in wait
response = self._operation_method(**kwargs)
File "/tmp/.tox/py310/lib/python3.10/site-packages/botocore/waiter.py", line 93, in __call__
return self._client_method(**kwargs)
File "/tmp/.tox/py310/lib/python3.10/site-packages/botocore/client.py", line 508, in _api_call
return self._make_api_call(operation_name, kwargs)
File "/tmp/.tox/py310/lib/python3.10/site-packages/botocore/client.py", line 878, in _make_api_call
request_dict = self._convert_to_request_dict(
File "/tmp/.tox/py310/lib/python3.10/site-packages/botocore/client.py", line 936, in _convert_to_request_dict
api_params = self._emit_api_params(
File "/tmp/.tox/py310/lib/python3.10/site-packages/botocore/client.py", line 969, in _emit_api_params
self.meta.events.emit(
File "/tmp/.tox/py310/lib/python3.10/site-packages/botocore/hooks.py", line 412, in emit
return self._emitter.emit(aliased_event_name, **kwargs)
File "/tmp/.tox/py310/lib/python3.10/site-packages/botocore/hooks.py", line 256, in emit
return self._emit(event_name, kwargs)
File "/tmp/.tox/py310/lib/python3.10/site-packages/botocore/hooks.py", line 239, in _emit
response = handler(**kwargs)
File "/tmp/.tox/py310/lib/python3.10/site-packages/botocore/stub.py", line 376, in _assert_expected_params
self._assert_expected_call_order(model, params)
File "/tmp/.tox/py310/lib/python3.10/site-packages/botocore/stub.py", line 352, in _assert_expected_call_order
raise UnStubbedResponseError(
botocore.exceptions.UnStubbedResponseError: Error getting response stub for operation HeadObject: Unexpected API Call: A call was made but no additional calls expected. Either the API Call was not stubbed or it was called multiple times.
python mre.py 0.39s user 0.19s system 9% cpu 5.859 total
Or with 2 answer, same thing with python mre.py 0.40s user 0.20s system 5% cpu 10.742 total.
I found a solution for this, as highlighted the waiter waits for a 200 status code, adding it to the response like the following works:
s3_stub.add_response(
method="head_object",
service_response={
"ETag": "ffffffff",
"AcceptRanges": "bytes",
"ContentLength": len(testing_data),
"LastModified": datetime.now(),
"Metadata": {},
"VersionId": "null",
"ResponseMetadata": {"HTTPStatusCode": 200},
},
expected_params={
"Bucket": testing_bucket,
"Key": testing_key,
"IfNoneMatch": "fffffffe",
},
)
I am trying to train model using httpapi and json data blow is the code.
import requests
import json
data = {
"config": "language: en\npipeline:\n- name: WhitespaceTokenizer\n- name: RegexFeaturizer\n- name: LexicalSyntacticFeaturizer\n- name: CountVectorsFeaturizer\n- name: CountVectorsFeaturizer\nanalyzer: \"char_wb\"\nmin_ngram: 1\nmax_ngram: 4\n- name: DIETClassifier\nepochs: 100\n- name: EntitySynonymMapper\n- name: ResponseSelector\nepochs: 100",
"nlu": json.dumps({
"rasa_nlu_data": {
"regex_features": [],
"entity_synonyms": [],
"common_examples": [
{
"text": "i m looking for a place to eat",
"intent": "restaurant_search",
"entities": []
},
{
"text": "I want to grab lunch",
"intent": "restaurant_search",
"entities": []
},
{
"text": "I am searching for a dinner spot",
"intent": "restaurant_search",
"entities": []
},
]
}
}),
"force": False,
"save_to_default_model_directory": True
}
r = requests.post('http://localhost:5005/model/train', json=data)
It gives me 500 error. Below is the log for error:
2020-09-30 07:40:37,511 [DEBUG] Traceback (most recent call last):
File "/home/Documents/practice/rasa/test1/venv/lib/python3.6/site-packages/rasa/server.py", line 810, in train
None, functools.partial(train_model, **info)
File "/usr/lib/python3.6/concurrent/futures/thread.py", line 56, in run
result = self.fn(*self.args, **self.kwargs)
File "/home/Documents/practice/rasa/test1/venv/lib/python3.6/site-packages/rasa/train.py", line 50, in train
additional_arguments=additional_arguments,
File "uvloop/loop.pyx", line 1456, in uvloop.loop.Loop.run_until_complete
File "/home/Documents/practice/rasa/test1/venv/lib/python3.6/site-packages/rasa/train.py", line 83, in train_async
config, domain, training_files
File "/home/Documents/practice/rasa/test1/venv/lib/python3.6/site-packages/rasa/importers/importer.py", line 79, in load_from_config
config = io_utils.read_config_file(config_path)
File "/home/Documents/practice/rasa/test1/venv/lib/python3.6/site-packages/rasa/utils/io.py", line 188, in read_config_file
content = read_yaml(read_file(filename))
File "/home/Documents/practice/rasa/test1/venv/lib/python3.6/site-packages/rasa/utils/io.py", line 124, in read_yaml
return yaml_parser.load(content) or {}
File "/home/Documents/practice/rasa/test1/venv/lib/python3.6/site-packages/ruamel/yaml/main.py", line 343, in load
return constructor.get_single_data()
File "/home/Documents/practice/rasa/test1/venv/lib/python3.6/site-packages/ruamel/yaml/constructor.py", line 111, in get_single_data
node = self.composer.get_single_node()
File "_ruamel_yaml.pyx", line 706, in _ruamel_yaml.CParser.get_single_node
File "_ruamel_yaml.pyx", line 724, in _ruamel_yaml.CParser._compose_document
File "_ruamel_yaml.pyx", line 775, in _ruamel_yaml.CParser._compose_node
File "_ruamel_yaml.pyx", line 891, in _ruamel_yaml.CParser._compose_mapping_node
File "_ruamel_yaml.pyx", line 904, in _ruamel_yaml.CParser._parse_next_event
ruamel.yaml.parser.ParserError: while parsing a block mapping
in "<unicode string>", line 1, column 1
did not find expected key
in "<unicode string>", line 11, column 1
When I train model using terminal commands and json file, it is trained successfully. I think I am missing some formatting required for /model/train api. Can someone tell me where am I going wrong?
I am using rasa version 1.10.14.
Thankyou in advance.
Turns out that the string in config was not proper. It was giving error when training model due to double quotes used with escape characters. I made some tweaks in the config and it trained the model successfully
I threw this together earlier today:
import stripe
stripe.api_key = "sk_test_key"
stripe.Token.create(
card={
"number": '4242424242424242',
"exp_month": 12,
"exp_year": 2018,
"cvc": '123'
},
)
stripe.Customer.create(
description="test test test",
source="tok_visa"
)
stripe.Customer.list(limit=1)
stripe.Subscription.create(
customer="id",
items=[
{
"plan": "1",
},
],
)
Running this returns with this
read-only#bash: Traceback (most recent call last):
File "main.py", line 28, in <module> "plan": "1",
File "/tmp/stripe/api_resources/subscription.py", line 33, in create return super(Subscription, cls).create(**params)
File "/tmp/stripe/api_resources/abstract/createable_api_resource.py", line 17, in create response, api_key = requestor.request('post', url, params, headers)
File "/tmp/stripe/api_requestor.py", line 152, in request resp = self.interpret_response(rbody, rcode, rheaders)
File "/tmp/stripe/api_requestor.py", line 359, in interpret_response self.handle_error_response(rbody, rcode, resp, rheaders)
File "/tmp/stripe/api_requestor.py", line 177, in handle_error_response raise err
stripe.error.InvalidRequestError: Request req_ZmLk0oWGjmrkut: No such customer: id
This seems to be an error or I'm just putting the wrong value in "customer"
Any ideas on on what to do?
I have a huge number of keywords written in a file. I put them in an array and tried to run a query for each of the item in the array and retrieve the documents that have any of the keywords. It shows me the number of returned documents only for 50 of them and at the end I get a couple of errors.
Here is my code:
A subset of keywords:
C0001396 SYNDROME STOKES-ADAMS
C0001396 Syndrome, Adams-Stokes
C0001396 Syndrome, Stokes-Adams
C0002962 3-12 ANGINAL SYNDROMES
C0002962 ANGINA
The CODE:
from elasticsearch import Elasticsearch
import json
count=0
keyword_array = []
es = Elasticsearch(['http://IP:9200/'])
with open('localDrive\\C0577785C.txt') as my_keywordfile:
for keyword in my_keywordfile.readlines():
keyword_ex=keyword[9:]
print(keyword_ex)
keyword_array.append(keyword_ex.strip().strip("'"))
with open('localDrive\\out.txt', 'wb') as f:
for x in keyword_array:
doc = {
"from" : 0, "size" : 1000000,
"query":{
"query_string":{
"fields" : ["description", "title"],
"query" : x
}
}
}
res = es.search(index='INDEXED_REPO', body=doc)
print("Got %d Hits:" % res['hits']['total'])
count += 1
print(count)
f.write(json.dumps(res).encode("utf-8"))
f.flush()
f.close()
Errors:
GET http://INDEX_REPO/indexed/_search [status:400 request:0.012s]
Traceback (most recent call last):
File
"localDrive/PycharmProjects/extract_keywords/elastic_serach5.py", line
32, in <module>
res = es.search(index='INDEXED_REPO', body=doc)
File "......\Local\Programs\Python\Python36-32\lib\site-
packages\elasticsearch\client\utils.py", line 73, in _wrapped
return func(*args, params=params, **kwargs)
File "....\AppData\Local\Programs\Python\Python36-32\lib\site-
packages\elasticsearch\client\__init__.py", line 623, in search
doc_type, '_search'), params=params, body=body)
File "......\AppData\Local\Programs\Python\Python36-32\lib\site-
packages\elasticsearch\transport.py", line 312, in perform_request
status, headers, data = connection.perform_request(method, url, params,
body, ignore=ignore, timeout=timeout)
File "......\AppData\Local\Programs\Python\Python36-32\lib\site-
packages\elasticsearch\connection\http_urllib3.py", line 128, in
perform_request
self._raise_error(response.status, raw_data)
File "......\AppData\Local\Programs\Python\Python36-32\lib\site-
packages\elasticsearch\connection\base.py", line 125, in _raise_error
raise HTTP_EXCEPTIONS.get(status_code, TransportError)(status_code,
error_message, additional_info)
elasticsearch.exceptions.RequestError: <exception str() failed>
Any idea why it is happening?
Thanks,
The elasticsearch query was not in the right format. I changed it to the following and it worked!:
doc = {
"query": {
"multi_match": {
"query": x,
"type": "phrase",
"fields": ["title", "description"],
"operator": "and"
}
}
}
I am using a package called lob to standardize a dataset of addresses I have. I have been receiving a 504 gateway error after running through a few thousand addresses. The response error from Lob is the following:
.......Traceback (most recent call last):
File "verify_modified_v2.py", line 82, in <module>
zip_code=row['zip_code'],
File "C:\Users\******\Anaconda2\lib\site-packages\lob\resource.py", line 123, in create
response = requestor.request('post', cls.endpoint, params)
File "C:\Users\******\Anaconda2\lib\site-packages\lob\api_requestor.py", line 84, in request
requests.post(lob.api_base + url, auth=(self.api_key, ''), data=data, files=files, headers=headers)
File "C:\Users\******\Anaconda2\lib\site-packages\lob\api_requestor.py", line 27, in parse_response
resp.content, resp.status_code, resp)
lob.error.APIConnectionError: {
"error": {
"message": "GATEWAY_TIMEOUT",
"status_code": 504
}
}
I have tried to except this error so that my code can repeatedly contact lob until it can get through without the gateway error:
for idx, row in enumerate(input_csv):
try:
verifiedAddress = lob.USVerification.create(
primary_line=row['primary_line'],
secondary_line=row['secondary_line'],
city=row['city'],
state=row['state'],
zip_code=row['zip_code'],
)
if verifiedAddress.deliverability in success_deliverabilities:
success_csv.writerow({
'primary_line': verifiedAddress.primary_line,
'secondary_line': verifiedAddress.secondary_line,
'urbanization': verifiedAddress.urbanization,
'last_line': verifiedAddress.last_line,
'deliverability': verifiedAddress.deliverability,
'identifier': row['identifier'],
'status': row['2']
})
else:
failure_csv.writerow({
'primary_line': row['primary_line'],
'secondary_line': row['secondary_line'],
'city': row['city'],
'state': row['state'],
'zip_code': row['zip_code'],
'deliverability': verifiedAddress.deliverability,
'identifier': row['identifier'],
'status': row['2']
})
# Print success
sys.stdout.write('.')
sys.stdout.flush()
# New lines for larger csv's
if idx % 10 is 9:
sys.stdout.write('\n')
sys.stdout.flush()
except lob.error.APIConnectionError:
print "caught error"
It does not seem that the gateway error is able to be "excepted"; does anyone have any thoughts on a way around this?
My end goal is this:
Bypass the error.
Log the error.
Continue with the next row of the csv file.
Thanks.