For about the past week I've been trying to wrap my head around the concept of a signed HMAC sha256 request.
In this example I'm just trying to get a list of all current orders.
I thought I'd figured it out but for some reason this still won't work.
The API keys are new...I've tried both Read and Write versions, and my IP is whitelisted.
I'm getting {'code': -1022, 'msg': 'Signature for this request is not valid.'}
My code...
import hmac
import hashlib
import json
import requests
import time
import Credentials
class Private:
def GetAllOrders(pair,orderid='',start='',finish='',limit='',window=''):
# Credentials #
ApiKey = Credentials.Binance.ReadAPIKey
SecretKey = Credentials.Binance.ReadSecretKey
# Base #
BaseURL = 'https://api.binance.com'
EndPoint = '/api/v3/allOrders'
# Required #
Pair = '?symbol='+str(pair)
Time = '×tamp='+str(int(time.time()*1000))
# Optional #
if orderid != '':
OrderID = '&orderId='+str(orderid)
else:
OrderID = orderid
if start != '':
Start = '&startTime='+str(start*1000)
else:
Start = start
if finish != '':
Finish = '&endTime='+str(finish*1000)
else:
Finish = finish
if limit != '':
Limit = '&limit='+str(limit)
else:
Limit = limit
if window != '':
Window = '&recvWindow='+str(window)
else:
Window = window
# HMAC #
HMAC = hmac.new(bytes(SecretKey.encode('utf-8')),
(Pair+OrderID+Start+Finish+Limit+Window+Time).encode('utf-8'),
hashlib.sha256).hexdigest()
# Signature #
Signature = '&signature='+str(HMAC)
# Headers #
Headers = {'X-MBX-APIKEY': ApiKey}
# Request #
JSON = requests.get(BaseURL+EndPoint+Pair+OrderID+Start+Finish+Limit+Window+Time+Signature,headers=Headers).json()
return JSON
print(Private.GetAllOrders(pair='BTCUSDT'))
Any help would be appreciated...
I figured it out...
The HMAC does not recognize the '?' as being the start of the parameters, whereas the URL (API) does.
The following lines should look like this...
# Required #
Pair = 'symbol='+str(pair)
# Request #
JSON = requests.get(BaseURL+EndPoint+'?'+Pair+OrderID+Start+Finish+Limit+Window+Time+Signature,headers=Headers).json()
Related
I'm trying to validate WebApp data but the result is not what I wanted.
Telegram documentation:
data_check_string = ...
secret_key = HMAC_SHA256(<bot_token>, "WebAppData")
if (hex(HMAC_SHA256(data_check_string, secret_key)) == hash) {
// data is from Telegram
}
MyCode:
BOT_TOKEN = '5139539316:AAGVhDje2A3mB9yA_7l8-TV8xikC7KcudNk'
data_check_string = 'query_id=AAGcqlFKAAAAAJyqUUp6-Y62&user=%7B%22id%22%3A1246866076%2C%22first_name%22%3A%22Dante%22%2C%22last_name%22%3A%22%22%2C%22username%22%3A%22S_User%22%2C%22language_code%22%3A%22en%22%7D&auth_date=1651689536&hash=de7f6b26aadbd667a36d76d91969ecf6ffec70ffaa40b3e98d20555e2406bfbb'
data_check_arr = data_check_string.split('&')
needle = 'hash='
hash_item = ''
telegram_hash = ''
for item in data_check_arr:
if item[0:len(needle)] == needle:
telegram_hash = item[len(needle):]
hash_item = item
data_check_arr.remove(hash_item)
data_check_arr.sort()
data_check_string = "\n".join(data_check_arr)
secret_key = hmac.new("WebAppData".encode(), BOT_TOKEN.encode(), hashlib.sha256).digest()
calculated_hash = hmac.new(data_check_string.encode(), secret_key, hashlib.sha256).hexdigest()
print(calculated_hash == telegram_hash) # print False
I'm trying to validate webapp data in python, but my code didn't give the intended result.
the hash which my code gives me is different from the telegram's one.
UPDATE: valid data added, and bot-token has been changed.
You need to unquote data_check_string
from urllib.parse import unquote
data_check_string = unquote('query_id=AAGcqlFKAAAAAJyqUUp6-Y62&user=%7B%22id%22%3A1246866076%2C%22first_name%22%3A%22Dante%22%2C%22last_name%22%3A%22%22%2C%22username%22%3A%22S_User%22%2C%22language_code%22%3A%22en%22%7D&auth_date=1651689536&hash=de7f6b26aadbd667a36d76d91969ecf6ffec70ffaa40b3e98d20555e2406bfbb')
And swap the arguments
calculated_hash = hmac.new(secret_key, data_check_string.encode(), hashlib.sha256).hexdigest()
You can replace the for-loops with a couple of lines (already incorporates kurdyukovpv's suggestion to unquote the query string):
data_check_string = sorted([ chunk.split("=") for chunk in unquote(data_check_string).split("&")
if chunk[:len("hash=")]!="hash="],
key=lambda x: x[0])
data_check_string = "\n".join([f"{rec[0]}={rec[1]}" for rec in data_check_string])
EDIT: Figured I might as well just post the entire working function I got out of this thread ) :
import hmac
import hashlib
from urllib.parse import unquote
def validate(hash_str, init_data, token, c_str="WebAppData"):
"""
Validates the data received from the Telegram web app, using the
method documented here:
https://core.telegram.org/bots/webapps#validating-data-received-via-the-web-app
hash_str - the has string passed by the webapp
init_data - the query string passed by the webapp
token - Telegram bot's token
c_str - constant string (default = "WebAppData")
"""
init_data = sorted([ chunk.split("=")
for chunk in unquote(init_data).split("&")
if chunk[:len("hash=")]!="hash="],
key=lambda x: x[0])
init_data = "\n".join([f"{rec[0]}={rec[1]}" for rec in init_data])
secret_key = hmac.new(c_str.encode(), token.encode(),
hashlib.sha256 ).digest()
data_check = hmac.new( secret_key, init_data.encode(),
hashlib.sha256)
return data_check.hexdigest() == hash_str
I have this loop in my app.py. For some reason it extends the load time by over 3 seconds. Are there any solutions?
import dateutil.parser as dp
# Converts date from ISO-8601 string to formatted string and returns it
def dateConvert(date):
return dp.parse(date).strftime("%H:%M # %e/%b/%y")
def nameFromID(userID):
if userID is None:
return 'Unknown'
else:
response = requests.get("https://example2.org/" + str(userID), headers=headers)
return response.json()['firstName'] + ' ' + response.json()['lastName']
logs = []
response = requests.get("https://example.org", headers=headers)
for response in response.json():
logs.append([nameFromID(response['member']), dateConvert(response['createdAt'])])
It extends the load time by over 3 seconds because it does a lot of unnecessary work, that's why.
You're not using requests Sessions. Each request will require creating and tearing down an HTTPS connection. That's slow.
You're doing another HTTPS request for each name conversion. (See above.)
You're parsing the JSON you get in that function twice.
Whatever dp.parse() is (dateutil?), it's probably doing a lot of extra work parsing from a free-form string. If you know the input format, use strptime.
Here's a rework that should be significantly faster. Please see the TODO points first, of course.
Also, if you are at liberty to knowing the member id -> name mapping doesn't change, you can make name_cache a suitably named global variable too (but remember it may be persisted between requests).
import datetime
import requests
INPUT_DATE_FORMAT = "TODO_FILL_ME_IN" # TODO: FILL ME IN.
def dateConvert(date: str):
return datetime.datetime.strptime(date, INPUT_DATE_FORMAT).strftime(
"%H:%M # %e/%b/%y"
)
def nameFromID(sess: requests.Session, userID):
if userID is None:
return "Unknown"
response = sess.get(f"https://example2.org/{userID}")
response.raise_for_status()
data = response.json()
return "{firstName} {lastName}".format_map(data)
def do_thing():
headers = {} # TODO: fill me in
name_cache = {}
with requests.Session() as sess:
sess.headers.update(headers)
logs = []
response = sess.get("https://example.org")
for response in response.json():
member_id = response["member"]
name = name_cache.get(member_id)
if not name:
name = name_cache[member_id] = nameFromID(sess, member_id)
logs.append([name, dateConvert(response["createdAt"])])
An inefficient version of what I'm trying to do is this:
while(True):
dynamic_variable = http(request) # where http request is subject to change values
method(dynamic_variable)
Where method(dynamic_variable isn't guaranteed to finish, and if http(request) returns a different value than method(dynamic_variable) becomes a useless function.
It seems like I should be able to change dynamic_variable more efficiently by having it "automatically" update whenever http(request) changes value.
I think what I want to do is called the "observer pattern" but I'm not quite fluent enough in code to know if that's the correct pattern or how to implement it.
A simple example would be much appreciated!
Edit:
from web3 import Web3
import json
from hexbytes import HexBytes
import numpy as np
import os
import time
INFURA_ROPSTEN_URL = "https://ropsten.infura.io/v3/<api_key>"
# metamask account information
PUBLIC_KEY = "0x3FaD9AccC3A39aDbd9887E82F94602cEA6c7F86f"
PRIVATE_KEY = "myprivatekey"
UNITS_ADDRESS = "units_address"
# from truffle build. For ABI
JSON_PATH = "../truffle/build/contracts/Units.json"
def set_up_web3():
web3 = Web3(Web3.HTTPProvider(INFURA_ROPSTEN_URL))
web3.eth.defaultAccount = PUBLIC_KEY
return web3
def get_units_contract_object():
with open(JSON_PATH, 'r') as json_file:
abi = json.load(json_file)['abi']
return web3.eth.contract(address=UNITS_ADDRESS,abi=abi)
def solve(web3, units):
nonce = np.random.randint(0,1e10)
while True:
challenge_number_hex = HexBytes(units.functions.getChallengeNumber().call()).hex()
my_digest_hex = web3.solidityKeccak(
['bytes32','address','uint256'],
[challenge_number_hex, PUBLIC_KEY, nonce]).hex()
my_digest_number = int(my_digest_hex,0)
target = units.functions.getMiningTarget().call()
if my_digest_number < target:
return (nonce, my_digest_hex)
else:
nonce += 1
def build_transaction(units, nonce, digest_hex, txn_count):
return units.functions.mint(
nonce,
digest_hex
).buildTransaction({
"nonce" : txn_count,
})
if __name__ == "__main__":
web3 = set_up_web3()
txn_count = web3.eth.getTransactionCount(PUBLIC_KEY)
units = get_units_contract_object()
_cycle_goal = 20
_prev_finish = time.time()
_wait_time = 0
while True:
target = units.functions.getMiningTarget().call()
nonce, digest_hex = solve(web3, units)
mint_txn = build_transaction(units, nonce,digest_hex, txn_count)
signed_txn = web3.eth.account.sign_transaction(mint_txn, private_key=PRIVATE_KEY)
txn_address = web3.eth.sendRawTransaction(signed_txn.rawTransaction)
txn_count += 1
print(f"Solution found! nonce={nonce}, digest_hex={digest_hex}")
_finished = time.time()
_elapsed = _finished - _prev_finish
_additional_wait = _cycle_goal - _elapsed
_wait_time += _additional_wait
print(f"Waiting {_wait_time}")
_prev_finish = _finished
time.sleep(_wait_time)
challenge_hex_number is the variable I want to update
I'm putting together a python script to make trades on poloniex with the API, and so far I've got it to make trades when certain conditions are met, but I still need it to NOT place anymore trades for the rest of that day (I have the entire script looping every 60 seconds).
So far I have this script:
import requests
import urllib.request
import urllib.parse
import http.client
import hashlib
import hmac
import time
import json
from urllib.request import urlopen
The_Currency_Pair = input('Which Currency Pair?\nPAIRS TO CHOOSE FROM:\nUSDT_BTC\nUSDT_XRP\nUSDT_ETH\nUSDT_BCH\nUSDT_STR\nUSDT_LTC\nUSDT_ETC\nUSDT_XMR\n')
api = 'https://poloniex.com/tradingApi'
key = 'XXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
secret = 'XXXXXXXXXXXXXXXXXXXXXXXXX'
def main():
poloniexPrices = urlopen('https://poloniex.com/public?command=returnTicker').read()
poloniexjson = json.loads(poloniexPrices)
poloniexlastP = poloniexjson[The_Currency_Pair]['last']
poloniexOCHL = urlopen('https://poloniex.com/public?command=returnChartData¤cyPair=USDT_BCH&start=1538352000&period=86400').read()
poloniexOCHLjson = json.loads(poloniexOCHL)
poloniexlasthigh = poloniexOCHLjson[-2]['high']
print ('Last Price')
print (poloniexlastP)
print ('----------------------------------------')
print ('Last Day High')
print (poloniexlasthigh)
print ('----------------------------------------')
data = {
'command': 'returnBalances',
'nonce' : int(time.time() * 1000)
}
data = urllib.parse.urlencode(data).encode()
signature = hmac.new(secret.encode(), data, hashlib.sha512)
headers = {
'Key' : key,
'Sign': signature.hexdigest()
}
request = urllib.request.Request(
url=api, data=data, headers=headers, method='POST'
)
text = urllib.request.urlopen(request).read().decode()
print ('MY ACCOUNT BALANCE')
try:
print(json.loads(text)['USDT'])
except:
print(text)
print ('-----------------------------------------')
if float(poloniexlastP) > 0:
print ('PLACING TRADE')
print ('-----------------------------------------------')
parms = {"command":"buy",
"currencyPair":The_Currency_Pair,
"rate":100,
"immediateOrCancel":1,
"amount":0.01,
"nonce":int(time.time() * 1000)}
parms = urllib.parse.urlencode(parms).encode()
signature = hmac.new(secret.encode(), parms, hashlib.sha512)
headers = {'Key' : key,
'Sign': signature.hexdigest()}
request = urllib.request.Request(
url=api, data=parms, headers=headers, method='POST')
text = urllib.request.urlopen(request).read().decode()
ordernumber = (json.loads(text)['orderNumber'])
print ('Order Number:')
print (ordernumber)
while True:
main()
time.sleep(60)
Anyway, after a trade has been placed, I need it to make sure that after the 60 second sleep, it doesn't make a second trade unless it is a new day/the day after the trade was made. (Could I use poloniex server time for this?)
So, if it has got as far as print (ordernumber) that means it has placed a trade. But how do I mark it as placed trade for the day or something and use it in the if float(poloniexlastP) > 0: for the next loop to make sure it doesn't place another one?
You probably want a flag of some sort that resets. Maybe something along these lines:
print ('Order Number:')
print (ordernumber)
return True # You've made a trade
return False # You've didn't make a trade
have_traded = False
while True:
if not have_traded:
have_traded = main()
time.sleep(60)
if new_date_function():
have_traded = False
If you are using a application server like Flask, you could easily setup a limiter
from flask_limiter import Limiter
app = Flask(__name__)
limiter = Limiter(app, global_limits=["100 per hour", "20 per minute"])
and then you can decorate each endpoint with the limiter defined:
#app.route("/slow")
#limiter.limit("1 per day")
def slow():
return "24"
#app.route("/fast")
def fast():
return "42"
#app.route("/ping")
#limiter.exempt
def ping():
return 'PONG'
More detail here: https://github.com/alisaifee/flask-limiter
I have a CSV with keywords in one column and the number of impressions in a second column.
I'd like to provide the keywords in a url (while looping) and for the Google language api to return what type of language was the keyword in.
I have it working manually. If I enter (with the correct api key):
http://ajax.googleapis.com/ajax/services/language/detect?v=1.0&key=myapikey&q=merde
I get:
{"responseData": {"language":"fr","isReliable":false,"confidence":6.213709E-4}, "responseDetails": null, "responseStatus": 200}
which is correct, 'merde' is French.
so far I have this code but I keep getting server unreachable errors:
import time
import csv
from operator import itemgetter
import sys
import fileinput
import urllib2
import json
E_OPERATION_ERROR = 1
E_INVALID_PARAMS = 2
#not working
def parse_result(result):
"""Parse a JSONP result string and return a list of terms"""
# Deserialize JSON to Python objects
result_object = json.loads(result)
#Get the rows in the table, then get the second column's value
# for each row
return row in result_object
#not working
def retrieve_terms(seedterm):
print(seedterm)
"""Retrieves and parses data and returns a list of terms"""
url_template = 'http://ajax.googleapis.com/ajax/services/language/detect?v=1.0&key=myapikey&q=%(seed)s'
url = url_template % {"seed": seedterm}
try:
with urllib2.urlopen(url) as data:
data = perform_request(seedterm)
result = data.read()
except:
sys.stderr.write('%s\n' % 'Could not request data from server')
exit(E_OPERATION_ERROR)
#terms = parse_result(result)
#print terms
print result
def main(argv):
filename = argv[1]
csvfile = open(filename, 'r')
csvreader = csv.DictReader(csvfile)
rows = []
for row in csvreader:
rows.append(row)
sortedrows = sorted(rows, key=itemgetter('impressions'), reverse = True)
keys = sortedrows[0].keys()
for item in sortedrows:
retrieve_terms(item['keywords'])
try:
outputfile = open('Output_%s.csv' % (filename),'w')
except IOError:
print("The file is active in another program - close it first!")
sys.exit()
dict_writer = csv.DictWriter(outputfile, keys, lineterminator='\n')
dict_writer.writer.writerow(keys)
dict_writer.writerows(sortedrows)
outputfile.close()
print("File is Done!! Check your folder")
if __name__ == '__main__':
start_time = time.clock()
main(sys.argv)
print("\n")
print time.clock() - start_time, "seconds for script time"
Any idea how to finish the code so that it will work? Thank you!
Try to add referrer, userip as described in the docs:
An area to pay special attention to
relates to correctly identifying
yourself in your requests.
Applications MUST always include a
valid and accurate http referer header
in their requests. In addition, we
ask, but do not require, that each
request contains a valid API Key. By
providing a key, your application
provides us with a secondary
identification mechanism that is
useful should we need to contact you
in order to correct any problems. Read
more about the usefulness of having an
API key
Developers are also encouraged to make
use of the userip parameter (see
below) to supply the IP address of the
end-user on whose behalf you are
making the API request. Doing so will
help distinguish this legitimate
server-side traffic from traffic which
doesn't come from an end-user.
Here's an example based on the answer to the question "access to google with python":
#!/usr/bin/python
# -*- coding: utf-8 -*-
import json
import urllib, urllib2
from pprint import pprint
api_key, userip = None, None
query = {'q' : 'матрёшка'}
referrer = "https://stackoverflow.com/q/4309599/4279"
if userip:
query.update(userip=userip)
if api_key:
query.update(key=api_key)
url = 'http://ajax.googleapis.com/ajax/services/language/detect?v=1.0&%s' %(
urllib.urlencode(query))
request = urllib2.Request(url, headers=dict(Referer=referrer))
json_data = json.load(urllib2.urlopen(request))
pprint(json_data['responseData'])
Output
{u'confidence': 0.070496580000000003, u'isReliable': False, u'language': u'ru'}
Another issue might be that seedterm is not properly quoted:
if isinstance(seedterm, unicode):
value = seedterm
else: # bytes
value = seedterm.decode(put_encoding_here)
url = 'http://...q=%s' % urllib.quote_plus(value.encode('utf-8'))