Amazon MWS SubmitFeed: Incorrect Signature - python

Ive been working on this for a few days now and Im kind of lost at this point and was hoping for a little help to get over this speed bump. Im pretty new to programming and python but Ive created a few requests for Amazons Product api and figured that the SubmitFeed Cant be that different. Ive ran the same setup in the MWS scratch pad and get a different signature then what my code creates. Im not sure what Im doing wrong, Ive went over the payload several times and nothing has stood out. Any help would be great.
def get_data():
global SECRET_KEY
request_description = '&'.join(['%s=%s' % (k, urllib.parse.quote(payload[k], safe='-_.~')) for k in sorted(payload)])
#print(request_description)
sig_data = method + "\n" + domain.replace('https://', '').lower() + "\n" + URI + "\n" + request_description
sig_data = bytes(sig_data, 'utf-8')
SECRET_KEY1 = bytes(SECRET_KEY, 'utf-8')
sig_data = hmac.new(SECRET_KEY1, sig_data, digestmod=hashlib.sha256)
print(sig_data.hexdigest())
calc_signature = urllib.parse.quote(base64.b64encode(sig_data.digest()), safe='-_.~')
sig = calc_signature
print(sig)
url = '%s%s?%s&Signature=%s' % (domain, URI, request_description, sig)
r = requests.post(url)
file = open('C:\Python34\Python34\CompetitorsPrices.xml', 'w+')
print(r.text, end="", file=file)
file.close()
SECRET_KEY = '**'
AWS_ACCESS_KEY = '**'
SELLER_ID = '**'
MARKETPLACE_ID = '**'
Action = 'SubmitFeed'
SignatureMethod = 'HmacSHA256'
SignatureVersion = '2'
Timestamp = '2017-02-17T18:51:08Z'
Version = '2009-01-01'
URI = '/'
domain = 'https://mws.amazonservices.com'
method = 'POST'
payload = {'AWSAccessKeyId': AWS_ACCESS_KEY,
'Action': Action,
'Merchant': SELLER_ID,
'SignatureVersion': SignatureVersion,
'Timestamp': Timestamp,
'Version': Version,
'SignatureMethod': SignatureMethod,
'MarketplaceIdList.Id.1': MARKETPLACE_ID,
'FeedType': '_POST_FLAT_FILE_PRICEANDQUANTITYONLY_UPDATE_DATA_',
'PurgeAndReplace': 'false'}
get_data()
print('Press Enter to Exit')

Related

How do I set the python docusign_esign ApiClient to use a proxy?

I am using the following examples from the docusign site.
I have a set of python scripts that works well on my PC.
I have the move the code to a server behind a proxy.
I could not find any example or settings to configure a proxy.
I tired setting it in the underlining URLLIB3 code but it is being overwritten each time the AP creates class of the APIClient().
How do I set the python docusign_esign ApiClient to use a proxy?
Below is the portion of the code.
from docusign_esign import ApiClient
from docusign_esign import EnvelopesApi
from jwt_helper import get_jwt_token, get_private_key
# this one has all the connection parameters
from jwt_config import DS_JWT
import urllib3
proxy = urllib3.ProxyManager('http://<id>:<pwd>#<proxy_server>:3128/', maxsize=10)
# used by docusign to decide what you have access to
SCOPES = ["signature", "impersonation"]
# Call the envelope status change method to list the envelopes changed in the last 10 days
def worker(args):
api_client = ApiClient()
api_client.host = args['base_path']
api_client.set_default_header("Authorization", "Bearer " + args['access_token'])
envelope_api = EnvelopesApi(api_client)
# The Envelopes::listStatusChanges method has many options
# The list status changes call requires at least a from_date OR
# a set of envelopeIds. Here we filter using a from_date.
# Here we set the from_date to filter envelopes for the last month
# Use ISO 8601 date format
from_date = (datetime.datetime.utcnow() - timedelta(days=120)).isoformat()
results = envelope_api.list_status_changes(args['account_id'], from_date=from_date)
return results, envelope_api
# Call request_jwt_user_token method
def get_token(private_key, api_client):
token_response = get_jwt_token(private_key, SCOPES, DS_JWT["authorization_server"],
DS_JWT["ds_client_id"], DS_JWT["ds_impersonated_user_id"])
access_token = token_response.access_token
# Save API account ID
user_info = api_client.get_user_info(access_token)
accounts = user_info.get_accounts()
api_account_id = accounts[0].account_id
base_path = accounts[0].base_uri + "/restapi"
return {"access_token": access_token, "api_account_id": api_account_id, "base_path":
base_path}
# bucket to keep track of token info
def get_args(api_account_id, access_token, base_path):
args = {
"account_id": api_account_id,
"base_path": base_path,
"access_token": access_token
}
return args
# start the actual code here create and then setup the object
api_client = ApiClient()
api_client.set_base_path(DS_JWT["authorization_server"])
api_client.set_oauth_host_name(DS_JWT["authorization_server"])
api_client.rest_client.pool_manager.proxy = proxy
api_client.rest_client.pool_manager.proxy.scheme = "http"
private_key = get_private_key(DS_JWT["private_key_file"]).encode("ascii").decode("utf-8")
jwt_values = get_token(private_key, api_client)
args = get_args(jwt_values["api_account_id"], jwt_values["access_token"], jwt_values["base_path"])
account_id = args["account_id"]
# return the envelope list and api_client object created to get it
results, envelope_api = worker(args)
print("We found " + str(results.result_set_size) + " sets of files")
for envelope in results.envelopes:
envelope_id = envelope.envelope_id
print("Extracting " + envelope_id)
# The SDK always stores the received file as a temp file you can not set the path for this
# Call the envelope get method
temp_file = envelope_api.get_document(account_id=account_id, document_id="archive",
envelope_id=envelope_id)
if temp_file:
print("File is here " + temp_file)
with zipfile.ZipFile(temp_file, 'r') as zip_ref:
zip_ref.extractall(extract_dir + envelope_id + "\\")
zip_ref.close()
print("Done extracting " + envelope_id + " deleting zip file")
os.remove(temp_file)
print("Deleted file here " + temp_file)
else:
print("Failed to get data for " + envelope_id)

HP ALM results attachment and status update using python

Challenge : Attach screenshots to Tests in TestLab , update status as PASS/FAIL steps wise (currently updating pass status is enough)
I am expected to write a script in python , to attach test results to the testcases present in test lab, then for each test step Expected result to be set as "As Expected" and pass the TC step by step.
Ie while performing manually, we select the case , click run and then enter "As expected" in expected output area and pass that step, and perfrom this for all teststeps on the test case. This need to be automated. I hav a folder which has Screenshots(similar to TC name), so script shoudl upload the screenshots and update the status.
What I have tried so far :
I was able to connect to alm , with partial testcase name, I was able to pull full testcase name from testplan, but unfortunately i am still struggling to achieve the final goal.
My code so far :
import win32com
from win32com.client import Dispatch
import codecs
import re
import json
# Login Credentials
qcServer = "https://almurl.saas.microfocus.com/qcbin/"
qcUser = "my_username"
qcPassword = "pwd"
qcDomain = "domain"
testList = []
testdict = {}
project = "Crew_Management"
# Do the actual login
td = win32com.client.Dispatch("TDApiOle80.TDConnection.1")
td.InitConnectionEx(qcServer)
td.Login(qcUser,qcPassword)
td.Connect(qcDomain,project)
if td.Connected == True:
print ("System: Logged in to " +project)
else:
print ("Connect failed to " +project)
mg = td.TreeManager # Tree manager
name = ['TC001','TC002','TC003','TC003','TC004','TC005','TC006','TC007','TC008','TC009','TC010','TC011','TC012','TC013','TC014']
folder = mg.NodeByPath('Subject\\Test Factory\\MPG\\MPG Regression Test_Yearly Request\\GUI')
for x in name:
testList = folder.FindTests(x)
#print(type(testList))
print(testList[0].Name)
print(testList[0].DesStepsNum)
td.Disconnect()
td.Logout()
Any help or guidance is much appreciated !
Assuming that you have working experience in Python. Here I am writing all the different functions needed to complete your task.
Reference: https://admhelp.microfocus.com/alm/api_refs/REST_TECH_PREVIEW/ALM_REST_API_TP.html
Global Variable
import re
import json
import datetime
import time
import sys
import os, fnmatch
from os import listdir
from os.path import isfile, join
from xml.etree.ElementTree import Element, SubElement, tostring, parse
import glob
from requests.auth import HTTPBasicAuth
import requests
ALM_USER_NAME = ""
ALM_PASSWORD = ""
ALM_DOMAIN = ""
ALM_URL = ""
AUTH_END_POINT = ALM_URL + "authentication-point/authenticate"
QC_SESSION_END_POINT = ALM_URL + "rest/site-session"
QC_LOGOUT_END_POINT = ALM_URL + "authentication-point/logout"
ALM_MIDPOINT = "rest/domains/" + ALM_DOMAIN + "/projects/"
PATH_SEP = os.path.sep
Login Function
def alm_login(self):
"""
Function : alm_login
Description : Authenticate user
Parameters : global parameter
alm_username - ALM User
alm_password - ALM Password
"""
response = self.alm_session.post(AUTH_END_POINT,
auth=HTTPBasicAuth(ALM_USER_NAME, ALM_PASSWORD))
if response.status_code == 200:
response = self.alm_session.post(QC_SESSION_END_POINT)
if response.status_code == 200 | response.status_code == 201:
print "ALM Authentication successful"
else:
print "Error: ", response.staus_code
else:
print "Error: ", response.staus_code
self.alm_session.headers.update({'Accept':'application/json',
'Content-Type': 'application/xml'})
return
Logout Function
After the logout method is successful the cookie should expire
def alm_logout(self):
'''
Function : alm_logout
Description : terminate user session
Parameters : No Parameters
'''
response = self.alm_session.post(QC_LOGOUT_END_POINT)
print "Logout successful", response.headers.get('Expires'), response.status_code
return
Get Test Set Folder
If the test cases span across multiple test suites then it is better to get the test set folder first and find the necessary test suite.
def find_test_set_folder(self):
'''
Function : find_test_set_folder
Description : This sends a couple of http request and authenticate the user
Parameters : 1 Parameter
test_set_path - ALM test set path
'''
json_str = json.loads(self.find_folder_id(self.test_set_path.split("\\"), "test-set-folders"
, 0, "id"))
if 'entities' in json_str:
return create_key_value(json_str['entities'][0]['Fields'])['id']
else:
return create_key_value(json_str['Fields'])['id']
Get Folder Id
This method will help you find the Test Suite Folder ID or Test Plan Folder Id.
def find_folder_id(self, arrfolder, str_api, parent_id, fields):
'''
Function : find_folder_id
Description : This sends a couple of http request and authenticate the user
Parameters : 1 Parameter
test_set_path - ALM test set path
'''
for foldername in arrfolder:
payload = {"query": "{name['" + foldername + "'];parent-id[" + str(parent_id) + "]}",
"fields": fields}
response = self.alm_session.get(ALM_URL + ALM_MIDPOINT + "/" + str_api, params=payload)
obj = json.loads(response.text)
if obj["TotalResults"] >= 1:
parent_id = get_field_value(obj['entities'][0]['Fields'], "id")
# print("folder id of " + foldername + " is " + str(parent_id))
else:
# print("Folder " + foldername + " does not exists")
inputdata = dict()
inputdata['Type'] = str_api[0:len(str_api) - 1]
inputdata['name'] = foldername
inputdata['parent-id'] = str(parent_id)
data = generate_xml_data(inputdata)
response = self.alm_session.post(ALM_URL + ALM_MIDPOINT + "/" + str_api, data=data)
obj = json.loads(response.text)
if response.status_code == 200 | response.status_code == 201:
parent_id = get_field_value(obj['Fields'], "id")
# print("folder id of " + foldername + " is " + str(parent_id))
return response.text
Create Run Instance
Before updating the testing status, we must create a run instance for the test.
def create_run_instance(self, test_set_id, test_map):
'''
Function : create_run_instance
Description : Create new run instances
Parameters : Test Set Id
'''
str_api = "test-instances"
fields = "id,test-id,test-config-id,cycle-id"
payload = {"query": "{cycle-id['" + test_set_id + "']}", "fields": fields,
"page-size": 5000}
response = self.alm_session.get(ALM_URL + ALM_MIDPOINT + "/" + str_api, params=payload)
obj = json.loads(response.text)
run_instance_post = "<Entities>"
for entity in obj["entities"]:
run_name = re.sub('[-:]', '_',
'automation_' + datetime.datetime.fromtimestamp(time.time()).strftime(
'%Y-%m-%d %H:%M:%S'))
temp_map = create_key_value(entity["Fields"])
_test_id = int(temp_map['test-id'])
self.parser_temp_dic[_test_id]['testcycl-id'] = temp_map['id']
self.parser_temp_dic[_test_id]['test-config-id'] = temp_map['test-config-id']
self.parser_temp_dic[_test_id]['test-id'] = temp_map['test-id']
self.parser_temp_dic[_test_id]['cycle-id'] = temp_map['cycle-id']
# parser_temp_dic[int(temp_map['test-id'])]['status'].sort()
status = "Passed"
if 'Failed' in self.parser_temp_dic[int(temp_map['test-id'])]['status']:
status = 'Failed'
self.parser_temp_dic[int(temp_map['test-id'])]['final-status'] = status
inputdata = dict()
inputdata['Type'] = 'run'
inputdata['name'] = run_name
inputdata['owner'] = ALM_USER_NAME
inputdata['test-instance'] = str(1)
inputdata['testcycl-id'] = str(temp_map['id'])
inputdata['cycle-id'] = str(temp_map['cycle-id'])
inputdata['status'] = 'Not Completed'
inputdata['test-id'] = temp_map['test-id']
inputdata['subtype-id'] = 'hp.qc.run.MANUAL'
data = generate_xml_data(inputdata)
run_instance_post = run_instance_post + data
self.bulk_operation("runs", run_instance_post + "</Entities>", True, "POST")
return
Update Run Instance
def update_run_instance(self, test_set_id):
'''
Function : update_run_instance
Description : Update the test status in run instances
Parameters : No input parameter
'''
fields = "id,test-id"
payload = {"query": "{cycle-id['" + test_set_id + "']}", "fields": fields,
"page-size": 5000}
response = self.alm_session.get(ALM_URL + ALM_MIDPOINT + "/runs", params=payload)
obj = json.loads(response.text)
run_instance_put = "<Entities>"
for entity in obj["entities"]:
if len(entity["Fields"]) != 1:
temp_map = create_key_value(entity["Fields"])
self.parser_temp_dic[int(temp_map['test-id'])]['run-id'] = temp_map['id']
inputdata = dict()
inputdata['Type'] = 'run'
inputdata['id'] = str(temp_map['id'])
intermediate_ = self.parser_temp_dic[int(temp_map['test-id'])]['testcycl-id']
inputdata['testcycl-id'] = str(intermediate_)
inputdata['status'] = self.parser_temp_dic[int(temp_map['test-id'])]['final-status']
data = generate_xml_data(inputdata)
run_instance_put = run_instance_put + data
self.bulk_operation("runs", run_instance_put + "</Entities>", True, "PUT")
return
Upload Result File
Uploading file to any object in ALM
def upload_result_file(self, test_set_id, report_file):
'''
Function : upload_result_file
Description : Upload test result to ALM
'''
payload = open(report_file, 'rb')
headers = {}
headers['Content-Type'] = "application/octet-stream"
headers['slug'] = "test-results" + report_file[report_file.rfind(".")+1: ]
response = self.alm_session.post(ALM_URL + ALM_MIDPOINT + "/test-sets/" +
str(test_set_id) + "/attachments/",
headers=headers, data=payload)
if not (response.status_code == 200 or response.status_code == 201):
print "Attachment step failed!", response.text, response.url, response.status_code
return
Bulk Operation
This is a helper that allows us to POST an array of data.
def bulk_operation(self, str_api, data, isbulk, request_type):
'''
Function : Post Test Case / Test Instance
Description : Generic function to post multiple entities.
Parameters : 3 parameters
str_api - End point name
data - Actual data to post
isbulk - True or False
'''
response = None
headers = {}
try:
if isbulk:
headers['Content-Type'] = "application/xml;type = collection"
if request_type == 'POST':
response = self.alm_session.post(ALM_URL + ALM_MIDPOINT + "/" + str_api, data=data,
headers=headers)
elif request_type == 'PUT':
response = self.alm_session.put(ALM_URL + ALM_MIDPOINT + "/" + str_api, data=data,
headers=headers)
except Exception as err:
print err
if response.status_code == 200 | response.status_code == 201:
return response.text
return response
You can refer this code
https://github.com/arunprabusamy/Python-Libraries/blob/main/alm_RestAPI/almLib.py
You need to send only three values - Test Set ID (Cycle ID), ALM Test ID & Execution Status. The library automatically builds the json payload and creates a test run and update result.

Nested JSON Values cause "TypeError: Object of type 'int64' is not JSON serializable"

Would love some help here. Full context this is my first "purposeful" Python script. Prior to this I've only dabbled a bit and am honestly still learning so maybe I jumped in a bit too early here.
Long story short, been running all over fixing various type mismatches or just general indentation issues (dear lord python isn't forgiving on this).
I think I'm about finished but have a few last issues. Most of them seem to come from the same section too. This script is just mean to get a csv file that has 3 columns and use that to send requests based on the first column (either iOS or Android). The problem is when I'm creating the body to send...
Here's the code (a few tokens omitted for postability):
#!/usr/bin/python
# -*- coding: utf-8 -*-
import requests
import json
import pandas as pd
from tqdm import tqdm
from datetime import *
import uuid
import warnings
from math import isnan
import time
## throttling based on AF's 80 request per 2 minute rule
def throttle():
i = 0
while i <= 3:
print ("PAUSED FOR THROTTLING!" + "\n" + str(3-i) + " minutes remaining")
time.sleep(60)
i = i + 1
print (i)
return 0
## function for reformating the dates
def date():
d = datetime.utcnow() # # <-- get time in UTC
d = d.isoformat('T') + 'Z'
t = d.split('.')
t = t[0] + 'Z'
return str(t)
## function for dealing with Android requests
def android_request(madv_id,mtime,muuid,android_app,token,endpoint):
headers = {'Content-Type': 'application/json', 'Accept': 'application/json'}
params = {'api_token': token }
subject_identities = {
"identity_format": "raw",
"identity_type": "android_advertising_id",
"identity_value": madv_id
}
body = {
'subject_request_id': muuid,
'subject_request_type': 'erasure',
'submitted_time': mtime,
'subject_identities': dict(subject_identities),
'property_id': android_app
}
body = json.dumps(body)
res = requests.request('POST', endpoint, headers=headers,
data=body, params=params)
print("android " + res.text)
## function for dealing with iOS requests
def ios_request(midfa, mtime, muuid, ios_app, token, endpoint):
headers = {'Content-Type': 'application/json',
'Accept': 'application/json'}
params = {'api_token': token}
subject_identities = {
'identity_format': 'raw',
'identity_type': 'ios_advertising_id',
'identity_value': midfa,
}
body = {
'subject_request_id': muuid,
'subject_request_type': 'erasure',
'submitted_time': mtime,
'subject_identities': list(subject_identities),
'property_id': ios_app,
}
body = json.dumps(body)
res = requests.request('POST', endpoint, headers=headers, data=body, params=params)
print("ios " + res.text)
## main run function. Determines whether it is iOS or Android request and sends if not LAT-user
def run(output, mdf, is_test):
# # assigning variables to the columns I need from file
print ('Sending requests! Stand by...')
platform = mdf.platform
device = mdf.device_id
if is_test=="y":
ios = 'id000000000'
android = 'com.tacos.okay'
token = 'OMMITTED_FOR_STACKOVERFLOW_Q'
endpoint = 'https://hq1.appsflyer.com/gdpr/stub'
else:
ios = 'id000000000'
android = 'com.tacos.best'
token = 'OMMITTED_FOR_STACKOVERFLOW_Q'
endpoint = 'https://hq1.appsflyer.com/gdpr/opengdpr_requests'
for position in tqdm(range(len(device))):
if position % 80 == 0 and position != 0:
throttle()
else:
req_id = str(uuid.uuid4())
timestamp = str(date())
if platform[position] == 'android' and device[position] != '':
android_request(device[position], timestamp, req_id, android, token, endpoint)
mdf['subject_request_id'][position] = req_id
if platform[position] == 'ios' and device[position] != '':
ios_request(device[position], timestamp, req_id, ios, token, endpoint)
mdf['subject_request_id'][position] = req_id
if 'LAT' in platform[position]:
mdf['subject_request_id'][position] = 'null'
mdf['error status'][position] = 'Limit Ad Tracking Users Unsupported. Device ID Required'
mdf.to_csv(output, sep=',', index = False, header=True)
# mdf.close()
print ('\nDONE. Please see ' + output
+ ' for the subject_request_id and/or error messages\n')
## takes the CSV given by the user and makes a copy of it for us to use
def read(mname):
orig_csv = pd.read_csv(mname)
mdf = orig_csv.copy()
# Check that both dataframes are actually the same
# print(pd.DataFrame.equals(orig_csv, mdf))
return mdf
## just used to create the renamed file with _LOGS.csv
def rename(mname):
msuffix = '_LOG.csv'
i = mname.split('.')
i = i[0] + msuffix
return i
## adds relevant columns to the log file
def logs_csv(out, df):
mdf = df
mdf['subject_request_id'] = ''
mdf['error status'] = ''
mdf['device_id'].fillna('')
mdf.to_csv(out, sep=',', index=None, header=True)
return mdf
## solely for reading in the file name from the user. creates string out of filename
def readin_name():
mprefix = input('FILE NAME: ')
msuffix = '.csv'
mname = str(mprefix + msuffix)
print ('\n' + 'Reading in file: ' + mname)
return mname
def start():
print ('\nWelcome to GDPR STREAMLINE')
# # blue = OpenFile()
testing = input('Is this a test? (y/n) : ')
# return a CSV
name = readin_name()
import_csv = read(name)
output_name = rename(name)
output_file = logs_csv(output_name, import_csv)
run( output_name, output_file, testing)
# # print ("FILE PATH:" + blue)
## to disable all warnings in console logs
warnings.filterwarnings('ignore')
start()
And here's the error stacktrace:
Reading in file: test.csv
Sending requests! Stand by...
0%| | 0/384 [00:00<?, ?it/s]
Traceback (most recent call last):
File "a_GDPR_delete.py", line 199, in <module>
start()
File "a_GDPR_delete.py", line 191, in start
run( output_name, output_file, testing)
File "a_GDPR_delete.py", line 114, in run
android_request(device[position], timestamp, req_id, android, token, endpoint)
File "a_GDPR_delete.py", line 57, in android_request
body = json.dumps(body)
File "/Users/joseph/anaconda3/lib/python3.6/json/__init__.py", line 231, in dumps
return _default_encoder.encode(obj)
File "/Users/joseph/anaconda3/lib/python3.6/json/encoder.py", line 199, in encode
chunks = self.iterencode(o, _one_shot=True)
File "/Users/joseph/anaconda3/lib/python3.6/json/encoder.py", line 257, in iterencode
return _iterencode(o, 0)
File "/Users/joseph/anaconda3/lib/python3.6/json/encoder.py", line 180, in default
o.__class__.__name__)
TypeError: Object of type 'int64' is not JSON serializable
TL;DR:
Getting a typeError when calling this on a JSON with another nested JSON. I've confirmed that the nested JSON is the problem because if I remove the "subject_identities" section this compiles and works...but the API I'm using NEEDS those values so this doesn't actually do anything without that section.
Here's the relevant code again (and in the version I first used that WAS working previously):
def android (madv_id, mtime, muuid):
headers = {
"Content-Type": "application/json",
"Accept": "application/json"
}
params = {
"api_token": "OMMITTED_FOR_STACKOVERFLOW_Q"
}
body = {
"subject_request_id": muuid, #muuid,
"subject_request_type": "erasure",
"submitted_time": mtime,
"subject_identities": [
{ "identity_type": "android_advertising_id",
"identity_value": madv_id,
"identity_format": "raw" }
],
"property_id": "com.tacos.best"
}
body = json.dumps(body)
res = requests.request("POST",
"https://hq1.appsflyer.com/gdpr/opengdpr_requests",
headers=headers, data=body, params=params)
I get the feeling I'm close to this working. I had a much simpler version early on that worked but I rewrote this to be more dynamic and use less hard coded values (so that I can eventually use this to apply to any app I'm working with an not only the two it was made for).
Please be nice, I'm entirely new to python and also just rusty on coding in general (thus trying to do projects like this one)
You can check for numpy dtypes like so:
if hasattr(obj, 'dtype'):
obj = obj.item()
This will convert it to the closest equivalent data type
EDIT:
Apparently np.nan is JSON serializable so I've removed that catch from my answer
Thanks to everyone for helping so quickly here. Apparently I was deceived by the error message as the fix from #juanpa.arrivillaga did the job with one adjustment.
Corrected code was on these parts:
android_request(str(device[position]), timestamp, req_id, android, token, endpoint)
and here:
ios_request(str(device[position]), timestamp, req_id, ios, token, endpoint)
I had to cast to string apparently even though these values are not originally integers and tend to look like this instead ab12ab12-12ab-34cd-56ef-1234abcd5678

401: Authorization Required when retrieving Request Token from Twitter API

I'm working on a Django application which retrieves some data from Twitter, and I need to work with requests manually since it's for an exam whose main focus is for us to learn how to work with network protocols such as HTTP. So please don't suggest me to use a wrapper.
My issue is that whenever I try to request the initial request token (with a POST request that is referenced in this doc and also here), I receive a 401: Authorization Required and I have no clue what could be causing it, since I'm including the Authorization header in exactly the format required by the docs I've linked. It may be that while working with urllib I don't manage to make the POST request as I wanted, or perhaps there's something wrongin how I made the signature/where I've placed it in the header, even though in my opinion it should be alright.
Here's my code to handle the request to "sign in with Twitter":
def sign(request):
url = 'https://api.twitter.com/oauth/request_token'
#I store consumer key, consumer secret and callback url in a txt
dir_path = path.dirname(path.realpath(__file__))
with open(path.join(dir_path, 'credentials.txt'), 'r') as TweetCred:
creds = TweetCred.read().split(linesep)
oauth_consumer_key = creds[0].split()[1]
oauth_callback = creds[1].split()[1]
consumer_secret = creds[2].split()[1]
oauth_nonce = ''.join(SystemRandom().choice(string.ascii_letters + string.digits) for _ in range(32))
#Desperate to understand what could be causing the error, I tried to use an integer timestamp sinceit was integer in the examples
oauth_timestamp = int(time.time())
parameter_string = ( 'oauth_nonce=' + urllib.parse.quote(oauth_nonce, safe='') +
'&oauth_callback=' + urllib.parse.quote(oauth_callback, safe='') +
'&oauth_signature_method=' + urllib.parse.quote('HMAC-SHA1', safe='') +
'&oauth_timestamp=' + urllib.parse.quote(str(oauth_timestamp), safe='') +
'&oauth_consumer_key=' + urllib.parse.quote(oauth_consumer_key, safe='') +
'&oauth_version=' + urllib.parse.quote('1.0', safe='')
)
sigbase = 'POST' + '&' + urllib.parse.quote(url, safe='') + '&' + urllib.parse.quote(parameter_string, safe='')
signkey = urllib.parse.quote(consumer_secret, safe='') + '&'
key = bytes(signkey, 'UTF-8')
base = bytes(sigbase, 'UTF-8')
digester = hmac.new(key, base, hashlib.sha1)
binsignature = digester.digest()
signature = urllib.parse.quote(base64.urlsafe_b64encode(binsignature), safe='')
oauth_header = ('Oauth ' + 'oauth_nonce="%s", ' % urllib.parse.quote(oauth_nonce, safe='') +
'oauth_callback="%s", ' % urllib.parse.quote(oauth_callback, safe='') +
'oauth_signature_method="%s", ' % urllib.parse.quote('HMAC-SHA1', safe='') +
'oauth_timestamp="%s", ' %urllib.parse.quote(str(oauth_timestamp), safe='') +
'oauth_consumer_key="%s", ' % urllib.parse.quote(oauth_consumer_key, safe='') +
'oauth_version="%s", ' % urllib.parse.quote('1.0', safe='') +
'oauth_signature="%s"' % signature
)
headers = {'Authorization': oauth_header}
#urllib wants a body to know that it's a POST, at least that's what the docs said, so I'm providing an empty one
values = {}
data = urllib.parse.urlencode(values).encode('ascii')
TokenRequest = urllib.request.Request(url, data, headers)
try:
print('opening request token url...')
with urllib.request.urlopen(TokenRequest) as response:
if response.getcode() != 200:
print('Response is %s' % response.getcode())
return HttpResponse('Error in getting the token from Twitter, please try again...')
body = loads(response.read())
if body['oauth_callback_confirmed'] != 'true':
print('oauth_callback not confirmed')
return HttpResponse('Error in getting the token from Twitter, please try again...')
oauth_token = body['oauth_token']
oauth_token_secret = body['oauth_token_secret']
except urllib.error.HTTPError as err:
#my program always ends up here, catches the exception and returns the line below
return HttpResponse('Error %s in getting the token from Twitter, please try again...' % err)
print('Successfully retrieved request token! Redirecting...')
loggee = User.objects.Create()
loggee.oauth_token = oauth_token
loggee.oauth_token_secret = oauth_token_secret
return HttpResponseRedirect('https://api.twitter.com/oauth/authenticate?oauth_token='+oauth_token)
Any help will be appreciated! I'm quite in a hurry and I really can't wrap my head around this!
Your signature base string needs to be sorted lexigraphically (which means alphabetically for all intents), before you calculate your signature (ref: Creating a signature)
Therefore your parameter string needs to be:
parameter_string = (
'oauth_callback=' + urllib.parse.quote(oauth_callback, safe='') +
'&oauth_consumer_key=' + urllib.parse.quote(oauth_consumer_key, safe='') +
'&oauth_nonce=' + urllib.parse.quote(oauth_nonce, safe='') +
'&oauth_signature_method=' + urllib.parse.quote('HMAC-SHA1', safe='') +
'&oauth_timestamp=' + urllib.parse.quote(str(oauth_timestamp), safe='') +
'&oauth_version=' + urllib.parse.quote('1.0', safe='')
)

Oauth1.0 API issue with Python

I'm trying to get magiccardmarket.eu API authentication to work in Python, but no matter whether I'm using rauth or requests_oauthlib, I get 403.
My code is:
#!/usr/bin/python
import logging
import rauth
import requests_oauthlib
logging.basicConfig(level=logging.DEBUG)
mkm_app_token = 'B7VI9Qg2xh855WtR'
mkm_app_secret = '<cut>'
mkm_access_token = 'LQj2rUwOFUJsmuJvCTlny1UzGZSXzHjo'
mkm_token_secret = '<cut>'
url = 'https://sandbox.mkmapi.eu/ws/v1.1/account'
# session = rauth.OAuth1Session(
# consumer_key=mkm_app_token,
# consumer_secret=mkm_app_secret,
# access_token=mkm_access_token,
# access_token_secret=mkm_token_secret,
# )
session = requests_oauthlib.OAuth1Session(
mkm_app_token,
client_secret=mkm_app_secret,
resource_owner_key=mkm_access_token,
resource_owner_secret=mkm_token_secret,
)
r = session.get(url)
print(r)
When I look at debugging info, everything seems fine (of course besides 403 response):
DEBUG:requests_oauthlib.oauth1_auth:Signing request <PreparedRequest [GET]> using client <Client nonce=None, signature_method=HMAC-SHA1, realm=None, encoding=utf-8, timestamp=None, resource_owner_secret=****, decoding=utf-8, verifier=None, signature_type=AUTH_HEADER, rsa_key=None, resource_owner_key=LQj2rUwOFUJsmuJvCTlny1UzGZSXzHjo, client_secret=****, callback_uri=None, client_key=B7VI9Qg2xh855WtR>
DEBUG:requests_oauthlib.oauth1_auth:Including body in call to sign: False
DEBUG:oauthlib.oauth1.rfc5849:Collected params: [(u'oauth_nonce', u'87129670621454425921416648590'), (u'oauth_timestamp', u'1416648590'), (u'oauth_consumer_key', u'B7VI9Qg2xh855WtR'), (u'oauth_signature_method', u'HMAC-SHA1'), (u'oauth_version', u'1.0'), (u'oauth_token', u'LQj2rUwOFUJsmuJvCTlny1UzGZSXzHjo')]
DEBUG:oauthlib.oauth1.rfc5849:Normalized params: oauth_consumer_key=B7VI9Qg2xh855WtR&oauth_nonce=87129670621454425921416648590&oauth_signature_method=HMAC-SHA1&oauth_timestamp=1416648590&oauth_token=LQj2rUwOFUJsmuJvCTlny1UzGZSXzHjo&oauth_version=1.0
DEBUG:oauthlib.oauth1.rfc5849:Normalized URI: https://sandbox.mkmapi.eu/ws/v1.1/account
DEBUG:oauthlib.oauth1.rfc5849:Base signing string: GET&https%3A%2F%2Fsandbox.mkmapi.eu%2Fws%2Fv1.1%2Faccount&oauth_consumer_key%3DB7VI9Qg2xh855WtR%26oauth_nonce%3D87129670621454425921416648590%26oauth_signature_method%3DHMAC-SHA1%26oauth_timestamp%3D1416648590%26oauth_token%3DLQj2rUwOFUJsmuJvCTlny1UzGZSXzHjo%26oauth_version%3D1.0
DEBUG:oauthlib.oauth1.rfc5849:Signature: 291LTesHZR6W4bjZ1NqSW5hEgoM=
DEBUG:oauthlib.oauth1.rfc5849:Encoding URI, headers and body to utf-8.
DEBUG:requests_oauthlib.oauth1_auth:Updated url: https://sandbox.mkmapi.eu/ws/v1.1/account
DEBUG:requests_oauthlib.oauth1_auth:Updated headers: {'Accept': '*/*', 'Connection': 'keep-alive', 'Accept-Encoding': 'gzip, deflate', 'Authorization': 'OAuth oauth_nonce="87129670621454425921416648590", oauth_timestamp="1416648590", oauth_version="1.0", oauth_signature_method="HMAC-SHA1", oauth_consumer_key="B7VI9Qg2xh855WtR", oauth_token="LQj2rUwOFUJsmuJvCTlny1UzGZSXzHjo", oauth_signature="291LTesHZR6W4bjZ1NqSW5hEgoM%3D"', 'User-Agent': 'python-requests/2.4.3 CPython/2.7.8 Darwin/14.0.0'}
DEBUG:requests_oauthlib.oauth1_auth:Updated body: None
INFO:requests.packages.urllib3.connectionpool:Starting new HTTPS connection (1): sandbox.mkmapi.eu
DEBUG:requests.packages.urllib3.connectionpool:"GET /ws/v1.1/account HTTP/1.1" 403 None
This is not an issue of authentication details, which are provided on account profile page when you request dedicated application API access, since those details work fine with PHP example provided by the site: https://www.mkmapi.eu/ws/documentation/API:Auth_libcurl
When I go through site's documentation, nothing seems out of ordinary: https://www.mkmapi.eu/ws/documentation/API:Auth_Overview
I honestly don't know where to go from here...
I realized that the code above with requests_oauthlib didn't build the header like it was layed out in the documentation, so I ended up inventing the wheel again and building the header myself, following the steps outlined in the documentation: https://www.mkmapi.eu/ws/documentation/API:Auth_OAuthHeader
The following script is not very beautiful, but it does its job.
import requests
from urllib import quote_plus as rawurlencode
import time
import string
import random
import operator
from hashlib import sha1
from hmac import new as hmac
def id_generator(size=6, chars=string.ascii_uppercase + string.digits):
return ''.join(random.choice(chars) for _ in range(size))
# personal Info - taken from https://www.mkmapi.eu/ws/documentation/API:Auth_Overview
mkmAppToken = 'bfaD9xOU0SXBhtBP'
mkmAppSecret = 'pChvrpp6AEOEwxBIIUBOvWcRG3X9xL4Y'
mkmAccessToken = 'lBY1xptUJ7ZJSK01x4fNwzw8kAe5b10Q'
mkmAccessSecret = 'hc1wJAOX02pGGJK2uAv1ZOiwS7I9Tpoe'
# Url to access on mkm
# note that this deviates from the example in the header documentation (https://www.mkmapi.eu/ws/documentation/API:Auth_OAuthHeader) which uses
#accessUrl = 'https://www.mkmapi.eu/ws/v1.1/account'
accessUrl = 'https://www.mkmapi.eu/ws/v1.1/output.json/account'
#Method for access
MyMethod = "GET"
baseString = MyMethod + "&" + rawurlencode(accessUrl) + "&"
# create a random string
# the documentation in https://www.mkmapi.eu/ws/documentation/API:Auth_OAuthHeader uses
#nonce = 53eb1f44909d6
nonce = id_generator(8)
# what time is it?
# the documentation in https://www.mkmapi.eu/ws/documentation/API:Auth_OAuthHeader uses
#now = 1407917892
now = str(int(time.time()))
MyOauthmethod = "HMAC-SHA1"
MyOauthver = "1.0"
# define Parameters and values, order doesn't matter
paramDict ={"oauth_consumer_key":mkmAppToken, "oauth_token" :mkmAccessToken, "oauth_nonce":nonce, "oauth_timestamp":now, "oauth_signature_method":MyOauthmethod, "oauth_version":MyOauthver}
# sorting of parameters is done here
sorted_paramDict = sorted(paramDict.items(), key=operator.itemgetter(0))
#collect the full parameters string
paramStr = ''
for kv in sorted_paramDict:
paramStr = paramStr + kv[0] + "=" + kv[1] + "&"
# and get rid of the trailing ampersand
paramStr = paramStr[:-1]
#concatenate request and oauth parameters
baseString = baseString + rawurlencode(paramStr)
# concatenate both keys
signingKey = rawurlencode(mkmAppSecret) + "&" + rawurlencode(mkmAccessSecret)
# and create a hased signature with the key and the baseString
Signature = hmac(signingKey, baseString, sha1).digest().encode('base64')[:-1]
# construct the header from the parameters and the URL and the signature
MyHeader = 'OAuth ' + 'realm="' + accessUrl + '", '
for kv in sorted_paramDict:
MyHeader += kv[0] + '="' + kv[1] + '",'
MyHeader += 'oauth_signature="' + Signature +'"'
headers = {'Authorization': MyHeader}
# and now requests can do its magic (pun intended)
r = requests.get(accessUrl, headers=headers)
outjson = r.json()
You need to provide the realm as an argument to the OAuth1Session, like so:
session = requests_oauthlib.OAuth1Session(
mkm_app_token,
client_secret=mkm_app_secret,
resource_owner_key=mkm_access_token,
resource_owner_secret=mkm_token_secret,
realm=url
)
Other things I have run into in the past include the fact that the mkm api doesn't (or at least didn't) accept URI-escaped parameters, so you may need to unescape them.
For anyone who's reading in 2020, there's no need to reinvent the wheel, just pass the Oauth header and the parameters to requests, here's an example with metaproducts/find:
import requests
from requests_oauthlib import OAuth1
import json
import passwords
card_name = 'Tarmogoyf'
output = 'output.json'
base_url = 'https://api.cardmarket.com/ws/v2.0/' + output + '/'
url = base_url + 'metaproducts/find'
params={'search': card_name}
headeroauth = OAuth1(
realm = url,
client_key = passwords.mkm_app_token,
client_secret = passwords.mkm_app_secret,
resource_owner_key = passwords.mkm_access_token,
resource_owner_secret = passwords.mkm_token_secret,
)
response = requests.get(
url,
params,
auth = headeroauth
)
if (response.ok == True):
json_response = response.json()
print(json.dumps(json_response, indent=4, sort_keys=True))
else:
print(str(response.status_code) + " " + response.reason)
exit()
The /output.json/ part of the string makes the output JSON instead of XML

Categories

Resources