when I whant to send request to coinex futures api I get authorization fail.can you send me the correct code for python3.9?
coinex docs:
Authorization
The authorization process is as follows
The input parameter string of the http message is as follows:
market=BTCUSD&type=buy&price=680&amount=1.0×tamp=1550743431000
Paste the secret_key to the end of the above string as:
market=BTCUSD&type=buy&price=680&amount=1.0×tamp=1550743431000&secret_key=B51068CF10B34E7789C374AB932696A05E0A629BE7BFC62F
Note: secret_key parameter is not required to send the http message body, this step is just for calculating the sha256 signature.
Perform sha256 on the above string, convert it to hexadecimal lowercase, and the length is 64 bits, and then add this signature to the http header as follows:
Authorization: a174066d9ccbeb33803c2a84e20792d31bed5a6e3da8fca23e38fc8dbb917a13
Add AccessId in the http header, and the server will look for the corresponding user information according to AccessId: 4DA36FFC61334695A66F8D29020EB589
After receiving the http message, the server finds the user’s secret key according to the AccessId, and performs the same operation as above to determine whether the received signature is equal to the signature calculated by itself. If they are equal, the authorization succeeds, otherwise it fails.
import time
import hashlib
import requests
access_id = '5#######################8'
secret_key = 'C##########################################7'
base_url = 'https://api.coinex.com/perpetual/v1'
def get_sign(params,secret_key):
data = []
for item in params:
data.append(item + '=' + str(params[item]))
str_params = "{0}&secret_key={1}".format('&'.join(data), secret_key)
token = hashlib.sha256(str_params.encode()).hexdigest().lower()
return token
def Adjust_Leverage():
header = {
'Content-Type': 'application/json; charset=utf-8',
'Accept': 'application/json',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.90 Safari/537.36'
}
timestamp = int(time.time()*1000)
params = {
'market': 'BTCUSDT',
'leverage':'10',
'position_type':1,
'timestamp':timestamp}
header['Authorization'] = get_sign(params , secret_key)
header['AccessId'] = access_id
res = requests.post(
url=f'{base_url}/market/adjust_leverage',
headers=header,
json=params
)
return res.text
def Market_Order():
header = {
'Content-Type': 'application/json; charset=utf-8',
'Accept': 'application/json',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.90 Safari/537.36'
}
timestamp = int(time.time()*1000)
params = {
'market': 'BTCUSDT',
'side':1,
'amount':'10',
'timestamp':timestamp}
header['authorization'] = get_sign(params , secret_key)
header['AccessId'] = access_id
res = requests.post(
url=f'{base_url}/order/put_market',
headers=header,
json=params
)
return res.text
print(Adjust_Leverage())
I use below code but I got authorization fail again:
def get_sign(params, secret_key):
data = ['='.join([str(k), str(v)]) for k, v in params.items()]
str_params = "{0}&secret_key={1}".format(
'&'.join(data), secret_key).encode()
token = hashlib.sha256(str_params).hexdigest()
return token
Related
I want to crawl the data from this website:'http://www.stcn.com/article/search.html?search_type=all&page_time=1', but the website needs to have cookies on the homepage first, so I first get the cookies he needs from this website('http://www.stcn.com/article/search.html') and set them into the request, but it doesn't work after many attempts.
My code looks like this:
import requests
headers = {
'User-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/96.0.4664.93 Safari/537.36','Host':'www.stcn.com'}
def _getStcnCookie(keyWords='all'):
url = "http://www.stcn.com/article/search.html"
data = {'keyword': keyWords}
r = requests.get(url, data, headers=headers, timeout=10)
if r.status_code != 200:
return None
return requests.utils.dict_from_cookiejar(r.cookies)
def searchStcnData(url,keyWords) :
myHeader = dict.copy(headers)
myHeader['X-Requested-With'] = 'XMLHttpRequest'
cookies = _getStcnCookie(keyWords=keyWords)
print(cookies)
jar = requests.cookies.cookiejar_from_dict(cookies)
data = {'keyword':'Paxlovid', 'page_time': 1, 'search_type': 'all'}
#Option One
s = requests.Session()
response = s.post(url, data, headers=myHeader, timeout=5, cookies=cookies)
print(response.text)
# Option two
# myHeader['Cookie'] = 'advanced-stcn_web=potef1789mm5nqgmd6jc1rcih3; path=/; HttpOnly;'+cookiesStr
# Option three
r = requests.post(url, data, headers=myHeader, timeout=5, cookies=cookies)
print(r.json())
return r.json()
searchStcnData('http://www.stcn.com/article/search.html?search_type=all&page_time=1','Paxlovid')
I've tried options 1, 2, and 3 to no avail.
I set cookies in Postman, and only set 'advanced-stcn_web=5sdfitvu42qggmnjvop4dearj4' can get the data, like this :
{
"state": 1,
"msg": "操作成功",
"data": "<li class=\"\">\n <div class=\"content\">\n <div class=\"tt\">\n <a href=\"/article/detail/769123.html\" target=\"_blank\">\n ......
"page_time": 2
}
This is my code I dont really know what is the issue its giving me a 404 everytime I run the code the URL does exist.
Any help appreciated
btw im pretty new to requests
The site doesnt have much protection as of what I know also it uses cloudflare but not heavily where it checks the browser.
import requests
import randominfo
from requests.models import Response
import random
import string
import time
lenght = 15
lower = string.ascii_lowercase
upper = string.ascii_uppercase
num = string.digits
sym = string.punctuation
all = lower + upper + num + sym
temp = random.sample(all,lenght)
password = "".join(temp)
with open(path, 'r') as configFile:
catchall = configFile.read()
login = randominfo.get_first_name() + '.' + randominfo.get_last_name() + catchall
passwordInput = password
passwordVeryfication = password
URLGet = 'https://de.afew-store.com'
url = 'https://de.afew-store.com/account/register'
RequestHeaders = {
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36',
'referer': 'https://de.afew-store.com/',
'content-language': 'de',
'method': 'POST',
'server': 'cloudflare',
'x-content-type-options': 'nosniff',
'path': 'account',
'scheme': 'https',
}
getHeaders = {
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36',
'location': 'https://de.afew-store.com/',
'server': 'cloudflare',
'x-content-type-options': 'nosniff',
'path': 'account',
'scheme': 'https',
}
firstName = randominfo.get_first_name()
lastName = randominfo.get_last_name()
customerEmail = firstName + lastName + catchall
s = requests.session()
signup = s.get(URLGet, headers=getHeaders)
payload = {
'form_type': 'create_customer',
'utf8': '✓',
'customer[tags]': 'lang:en',
'customer[first_name]': firstName,
'customer[last_name]': lastName,
'customer[email]': customerEmail,
'customer[password]': password,
}
login_info = s.post(url, headers=RequestHeaders, data=payload)
print(login_info.status_code)
The host https://de.afew-store.com is unknown. Maybe you mistyped it
ping https://de.afew-store.com
ping: cannot resolve https://de.afew-store.com: Unknown host
I am trying to harness a Russian language spellcheck API, Yandex.Speller.
The request seems to work fine in my browser. However, when I use a python script, the response is empty.
I am stumped as to what I am doing wrong.
Here is my code:
import urllib
from urllib.request import urlopen
import json
def main():
api(text_preproc())
def text_preproc():
""" Takes misspelled word/phrase,
“t”, and prepares it for
API request
"""
t = "синхрафазатрон в дубне"
text = t.replace(" ", "+")
return text
def diff_api(text):
my_url = "https://speller.yandex.net/services/spellservice.json/checkText?text="
my_headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.105 Safari/537.36'}
my_data = {
"text" : text,
"lang" : "ru",
"format" : "plain"}
my_uedata = urllib.parse.urlencode(my_data)
my_edata = my_uedata.encode('ascii')
req = urllib.request.Request(url=my_url, data=my_edata, headers=my_headers)
response = urlopen(req)
data = json.load(response)
print(data)
The response is always an empty array, no matter how I tinker with my request.
Any insight into what I might be doing wrong?
my_uedata has to be a part of the URL you send the request to.
Also, in:
def main():
api(text_preproc())
You call api() but the function is not defined. I've used diff_api().
Try this:
import json
import urllib
from urllib.request import urlopen
def main():
diff_api(text_preproc("синхрафазатрон в дубне"))
def text_preproc(phrase):
""" Takes misspelled word/phrase,
“t”, and prepares it for
API request
"""
return phrase.replace(" ", "+")
def diff_api(text):
my_url = "https://speller.yandex.net/services/spellservice.json/checkText?text="
my_headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.105 Safari/537.36'}
my_data = {
"text": text,
"lang": "ru",
"format": "plain"}
my_uedata = urllib.parse.urlencode(my_data)
req = urllib.request.Request(url=my_url+my_uedata, headers=my_headers)
data = json.load(urlopen(req))
print(data)
main()
Output:
[{'code': 1, 'pos': 5, 'row': 0, 'col': 5, 'len': 14, 'word': 'синхрафазатрон', 's': ['синхрофазотрон', 'синхрофазатрон', 'синхрофазотрона']}]
i'm sending below request to URL and get the response from it
import requests
url = "http://localhost/dat.txt"
payload = {}
headers = {
'Upgrade-Insecure-Requests': '1',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.149 Safari/537.36',
'Sec-Fetch-Dest': 'document',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9'
}
response = requests.request("GET", url, headers=headers, data = payload)
print(response.text.encode('utf8'))
Below is the response data that I get -
mohame4|nameon#example.com|passsd!##$4|head,customer|manager,devlop
mohame3|nameon3#example.com|passsd!##$4|head,customer|manager,devlop
I do this with the data
for i in response.text:
try:
i = i.strip().split('|')
userna = i[0]
emaill = i[1]
passd = i[2]
rol1= i[3]
rol2= i[4]
except:
pass
How can I make rol1 as
this head,customer
to
rol1=['head','customer']
Simply split the string you're getting:
rol1 = i[3].split(',')
You could do this more... gracefully, though, using iterable unpacking:
username, email, password, rol1, rol2 = i.strip().split('|')
rol1 = rol1.split(',')
thanks for all helper special #ForceBru
import requests
url = "http://localhost/dat.txt"
response = requests.request("GET", url)
print(response.text)
dat = str(response.text).split('\n')
for i in dat:
i = i.strip().split('|')
print(i[3].split(","))
# TODO: write code...
Below is my code which basically retrieves data from the database, puts it into a variable in CSV format which I then am trying to append on to a GET request URL. However, the get request results in null as the GET Request URL has an ampersand (&) sign in it.
Question is how do I get rid of it?
This is the URL, note the ampersand (&):
https://demo-api.ig.com/gateway/deal/clientsentiment?marketIds=&JGB,BCHUSD,AT20,
import requests
import json
import time
import datetime
import csv
import pandas as pd
import psycopg2
conn_string = "host=' dbname='' user='' password=''"
conn = psycopg2.connect(conn_string)
cursor=conn.cursor()
# Query to source marketIds
postgreSQL_select_Query = "SELECT DISTINCT () FROM static WHERE TYPE!='' AND marketId!='None'"
cursor.execute(postgreSQL_select_Query)
#print("Selecting marketId from table using cursor.fetchall")
instrument_static_marketId = cursor.fetchall()
cursor.execute(postgreSQL_select_Query )
#This puts the sql result into nice CSV format
y=','.join([y[0] for y in cursor.fetchall() ])
print(y)
# closing database connection.
conn.close ()
def main():
headers = {
'Connection': 'keep-alive',
'Origin': 'https://.com',
'X-IG-API-KEY': '',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/78.0.3904.108 Safari/537.36',
'Content-Type': 'application/json; charset=UTF-8',
'Accept': 'application/json; charset=UTF-8',
'X-SECURITY-TOKEN': '',
'CST': '',
'Sec-Fetch-Site': 'same-site',
'Sec-Fetch-Mode': 'cors',
'Referer': 'https://',
'Accept-Encoding': 'gzip, deflate, br',
'Accept-Language': 'en-GB,en-US;q=0.9,en;q=0.8',
}
response = requests.get('https://demo-api.ig.com/gateway/deal/clientsentiment?marketIds=',params=y, headers=headers)
print(response.url)
result = response.json()
print(result)
if __name__ == '__main__':
main()
You've included part of a parameter in your URL which is incorrect and confused requests.
Leave that off, and pass a dictionary for params, just like you're already doing with headers:
y = 'JGB,BCHUSD,AT20'
params = {
'marketIDs': y,
}
url = 'https://demo-api.ig.com/gateway/deal/clientsentiment'
response = requests.get(url, params=params, headers=headers)