This is a code to send invoice via , but I cannot enter a " for loop " loop on it to put product name and price and quantity of it , so how to deal with this to put products and other data , I tried to add for loop but it didn't work ,
###########Send Payment###########
baseURL = "https://apitest.myfatoorah.com"
token = 'rLtt6JWvbUHDDhsZnfpAhpYk4dxYDQkbcPTyGaKp2TYqQgG7FGZ5Th_WD53Oq8Ebz6A53njUoo1w3pjU1D4vs_ZMqFiz_j0urb_BH9Oq9VZoKFoJEDAbRZepGcQanImyYrry7Kt6MnMdgfG5jn4HngWoRdKduNNyP4kzcp3mRv7x00ahkm9LAK7ZRieg7k1PDAnBIOG3EyVSJ5kK4WLMvYr7sCwHbHcu4A5WwelxYK0GMJy37bNAarSJDFQsJ2ZvJjvMDmfWwDVFEVe_5tOomfVNt6bOg9mexbGjMrnHBnKnZR1vQbBtQieDlQepzTZMuQrSuKn-t5XZM7V6fCW7oP-uXGX-sMOajeX65JOf6XVpk29DP6ro8WTAflCDANC193yof8-f5_EYY-3hXhJj7RBXmizDpneEQDSaSz5sFk0sV5qPcARJ9zGG73vuGFyenjPPmtDtXtpx35A-BVcOSBYVIWe9kndG3nclfefjKEuZ3m4jL9Gg1h2JBvmXSMYiZtp9MR5I6pvbvylU_PP5xJFSjVTIz7IQSjcVGO41npnwIxRXNRxFOdIUHn0tjQ-7LwvEcTXyPsHXcMD8WtgBh-wxR8aKX7WPSsT1O8d8reb2aR7K3rkV3K82K_0OgawImEpwSvp9MNKynEAJQS6ZHe_J_l77652xwPNxMRTMASk1ZsJL'
def send_payment():
url = baseURL + "/v2/SendPayment"
payload = "{\"CustomerName\": \"Ahmed\",\"NotificationOption\": \"ALL\",\"MobileCountryCode\": \"+965\"," \
"\"CustomerMobile\": \"12345678\",\"CustomerEmail\": \"xx#yy.com\",\"InvoiceValue\": 100," \
"\"DisplayCurrencyIso\": \"KWD\",\"CallBackUrl\": \"https://google.com\",\"ErrorUrl\": " \
"\"https://google.com\",\"Language\": \"en\",\"CustomerReference\": \"ref 1\",\"CustomerCivilId\": " \
"12345678,\"UserDefinedField\": \"Custom field\",\"ExpireDate\": \"\",\"CustomerAddress\": {\"Block\": " \
"\"\",\"Street\": \"\",\"HouseBuildingNo\": \"\",\"Address\": \"\",\"AddressInstructions\": \"\"}," \
"\"InvoiceItems\": [{\"ItemName\": \"Product 01\",\"Quantity\": 1,\"UnitPrice\": 100}]} "
headers = {'Content-Type': "application/json", 'Authorization': "Bearer " + token}
response = requests.request("POST", url, data=payload, headers=headers)
print("Send Payment Response:\n" + response.text)
I tried this and made for loop but it didn't work
url = baseURL + "/v2/SendPayment"
sss={'ItemName': 'product 01',
'Quantity': 30,
'UnitPrice': 10,},
payload={
"CustomerName": "name", # Mandatory Field ("string")
"NotificationOption": "SMS", # Mandatory Field ("LNK", "SMS", "EML", or "ALL")
"InvoiceValue": 300, # Mandatory Field (Number)
# Optional Fields
"MobileCountryCode": "+966",
"CustomerMobile": "12345678", #Mandatory if the NotificationOption = SMS or ALL
# "CustomerEmail": "mail#company.com", #Mandatory if the NotificationOption = EML or ALL
"DisplayCurrencyIso": "kwd",
"CallBackUrl": "https://yoursite.com/success",
"ErrorUrl": "https://yoursite.com/error",
"Language": "ar",
# "CustomerReference": "noshipping-nosupplier",
# "CustomerAddress": {
# "Block": "string",
# "Street": "string",
# "HouseBuildingNo": "string",
# "Address": "address",
# "AddressInstructions": "string"
# },
"InvoiceItems": [
sss
]
}
payload=str(payload)
print(f"this is pyload: {payload}")
print(f"this is sss: {sss}")
headers = {'Content-Type': "application/json", 'Authorization': "Bearer " + token}
response = requests.request("POST", url, data=payload2, headers=headers)
print("Send Payment Response:\n" + response.text)
The data parameter in requests.request should be a dict, in other word, don't convert payload into string, make it dict.
Related
i have a python script which makes a call to Jira API for creating a Jira issue.
The API Call istself works fine, if i test it with Postman i am able to create tickets however the same URL does not work using Python.I do not not understand why i am not able to create a secure connection to jira
Executed [createIssue] action via OEC[integrationId: f457bfd9-5fe0-4fc5-89a9-ee007e85cf1b integrationType: Jira] with errors. Reason: Err: exit status 1, Stderr: Traceback (most recent call last): File
"/home/opsgenie/oec_test/scripts/actionExecutor.py", line 279, in <module> main() File "/home/opsgenie/oec_test/scripts/actionExecutor.py", line 233, in main timeout=timeout) File
"/usr/lib/python2.7/dist-packages/requests/api.py", line 112, in post return request('post', url, data=data, json=json, **kwargs) File "/usr/lib/python2.7/dist-packages/requests/api.py", line 58, in request return session.request(method=method, url=url, **kwargs) File "/usr/lib/python2.7/dist-packages/requests/sessions.py", line 520, in
request resp = self.send(prep, **send_kwargs) File "/usr/lib/python2.7/dist-packages/requests/sessions.py", line 630, in send r = adapter.send(request, **kwargs) File
"/usr/lib/python2.7/dist-packages/requests/adapters.py", line 508, in send raise
ConnectionError(e, request=request) requests.exceptions.ConnectionError: HTTPSConnectionPool(host='jiratest.gk.gk-software.com', port=443): Max retries exceeded with url: /rest/api/2/issue
(Caused by NewConnectionError('<urllib3.connection.VerifiedHTTPSConnection object at 0x7ffa264aa1d0>: Failed to establish a new connection: [Errno -2] Name or service not known',))
The complete code looks like this:
import argparse
import json
import logging
import sys
reload(sys)
sys.setdefaultencoding('utf8')
import re
import requests
from requests.auth import HTTPBasicAuth
parser = argparse.ArgumentParser()
parser.add_argument('-payload', '--queuePayload', help='Payload from queue', required=True)
parser.add_argument('-apiKey', '--apiKey', help='The apiKey of the integration', required=True)
parser.add_argument('-opsgenieUrl', '--opsgenieUrl', help='The url', required=True)
parser.add_argument('-logLevel', '--logLevel', help='Level of log', required=True)
parser.add_argument('-username', '--username', help='Username', required=False)
parser.add_argument('-password', '--password', help='Password', required=False)
parser.add_argument('-url', '--url', help='URL', required=False)
parser.add_argument('-projectKey', '--projectKey', help='Project Key', required=False)
parser.add_argument('-issueTypeName', '--issueTypeName', help='Issue Type', required=False)
args = vars(parser.parse_args())
logging.basicConfig(stream=sys.stdout, level=args['logLevel'])
def parse_field(key, mandatory):
variable = queue_message.get(key)
if not variable:
variable = args.get(key)
if mandatory and not variable:
logging.error(LOG_PREFIX + " Skipping action, Mandatory conf item '" + key +
"' is missing. Check your configuration file.")
raise ValueError(LOG_PREFIX + " Skipping action, Mandatory conf item '" + key +
"' is missing. Check your configuration file.")
return variable
def parse_timeout():
parsed_timeout = args.get('http.timeout')
if not parsed_timeout:
return 30000
return int(parsed_timeout)
def get_alert_details(alertId):
alert_api_url = args['opsgenieUrl'] + "/v2/alerts/" + alertId
headers = {
"Content-Type": "application/json",
"Accept-Language": "application/json",
"Authorization": "GenieKey " + args['apiKey']
}
req = requests.get(alert_api_url, headers=headers)
alert = req.json()
return alert["data"]
def get_transition_id(request_headers, jira_url, transition_name, token):
transition_id = str()
response = requests.get(jira_url, None, headers=request_headers, auth=token, timeout=timeout)
try:
body = response.json()
if body and response.status_code < 299:
transition_list = body["transitions"]
for transition in transition_list:
to = transition['to']
if transition_name == to['name']:
transition_id = transition['id']
logging.info(LOG_PREFIX + " Successfully executed at Jira")
logging.debug(LOG_PREFIX + " Jira response: " + str(response.status_code) + " " + str(response.content))
else:
logging.error(
LOG_PREFIX + " Could not execute at Jira; response: " + str(response.content) + " status code: " + str(
response.status_code))
if not transition_id:
logging.debug(LOG_PREFIX + " Transition id is empty")
return transition_id
except ValueError:
logging.error("The response body is not a valid json object!")
def get_comp():
jira_comp = re.search(r"topic:\s\'(.*)[']", str(queue_message.get("description")))
if jira_comp:
jira_comp = jira_comp.group(1)
return (jira_comp)
else:
jira_comp = "Dummy"
return (jira_comp)
def get_prio():
severity = re.search(r"severity:\s\'(.*)[']",queue_message.get("description"))
if severity:
jira_prio = severity.group(1)
if jira_prio == "critical":
jira_prio = "Very High"
return (jira_prio)
if jira_prio == "Very High":
jira_prio = "High"
return (jira_prio)
else:
severity = "High"
return (severity)
def get_context():
context = re.search(r"context:\s\'(.*)[']", str(queue_message.get("description")))
if context:
context = context.group(1)
return (context)
else:
context = ""
return (context)
def main():
global LOG_PREFIX
global queue_message
global timeout
global to_customfield_20500; to_customfield_20500=[]
global project_to_customfield_20500
global cluster_to_customfield_20500
queue_message_string = args['queuePayload']
queue_message_string = queue_message_string.strip()
queue_message = json.loads(queue_message_string)
alert_id = queue_message["alertId"]
mapped_action = queue_message["mappedActionV2"]["name"]
alert_details = get_alert_details(alert_id)
LOG_PREFIX = "[" + mapped_action + "]"
logging.info("Will execute " + mapped_action + " for alertId " + alert_id)
timeout = parse_timeout()
url = parse_field('url', True)
username = parse_field('username', True)
password = parse_field('password', True)
project_key = parse_field('projectKey', False)
issue_type_name = parse_field('issueTypeName', False)
issue_key = queue_message.get("key")
logging.debug("Url: " + str(url))
logging.debug("Username: " + str(username))
logging.debug("Project Key: " + str(project_key))
logging.debug("Issue Type: " + str(issue_type_name))
logging.debug("Issue Key: " + str(issue_key))
content_params = dict()
token = HTTPBasicAuth(username, password)
headers = {
"Content-Type": "application/json",
"Accept-Language": "application/json",
}
result_url = url + "/rest/api/2/issue"
if mapped_action == "addCommentToIssue":
content_params = {
"body": queue_message.get('body')
}
result_url += "/" + issue_key + "/comment"
elif mapped_action == "createIssue":
getcontext = get_context()
getcomp = get_comp()
priority = get_prio()
content_params = {
"fields": {
"project": {"key": project_key},
"issuetype": {"name": issue_type_name},
"summary": queue_message.get("summary"),
"description": queue_message.get("description"),
"customfield_20500": [{"value": "DE - Germany"}],
"customfield_13604": "tbd",
"components": [{"name": getcomp}],
"versions": [{"name": "tbd"}],
"customfield_15000": [getcontext],
"priority": {"name": priority},
"assignee": {"name": "#cloudoperations"}
}
}
elif mapped_action == "resolveIssue":
result_url += "/" + issue_key + "/transitions"
content_params = {
"transition": {
"id": get_transition_id(headers, result_url, "Resolved", token)
},
"fields": {
"resolution": {
"name": "Done"
}
}
}
elif mapped_action == "closeIssue":
result_url += "/" + issue_key + "/transitions"
content_params = {
"transition": {
"id": get_transition_id(headers, result_url, "Closed", token)
},
"fields": {
"resolution": {
"name": "Done"
}
}
}
elif mapped_action == "issueDone":
result_url += "/" + issue_key + "/transitions"
content_params = {
"transition": {
"id": get_transition_id(headers, result_url, "Done", token)
}
}
elif mapped_action == "inProgressIssue":
result_url += "/" + issue_key + "/transitions"
content_params = {
"transition": {
"id": get_transition_id(headers, result_url, "In Progress", token)
}
}
logging.debug(str(content_params))
response = requests.post(result_url, data=json.dumps(content_params), headers=headers, auth=token,
timeout=timeout)
if response.status_code < 299:
logging.info("Successfully executed at Jira")
if mapped_action == "createIssue":
try:
response_body = response.json()
if response_body:
issue_key_from_response = response_body['key']
if issue_key_from_response:
alert_api_url = args['opsgenieUrl'] + "/v2/alerts/" + alert_id + "/details"
content = {
"details":
{
"issueKey": issue_key_from_response
}
}
headers = {
"Content-Type": "application/json",
"Accept-Language": "application/json",
"Authorization": "GenieKey " + args['apiKey']
}
logging.debug(str(alert_api_url) + str(content) + str(headers))
alert_response = requests.post(alert_api_url,
data=json.dumps(content), headers=headers,
timeout=timeout)
if alert_response.status_code < 299:
logging.info(LOG_PREFIX + " Successfully sent to Opsgenie")
logging.debug(
LOG_PREFIX + " Jira response: " + str(alert_response.content) + " " + str(
alert_response.status_code))
else:
logging.warning(
LOG_PREFIX + " Could not execute at Opsgenie; response: " + str(
alert_response.content) + " status code: " + str(alert_response.status_code))
else:
logging.warning(
LOG_PREFIX + " Jira response is empty")
except ValueError:
logging.error(ValueError)
else:
logging.warning(
LOG_PREFIX + " Could not execute at Jira; response: " + str(response.content) + " status code: " + str(
response.status_code))
if __name__ == '__main__':
main()
I'm not sure either but I suggest using the Jira Python library, which in turn uses the requests library
i found out it was an connection issue as from the VM where py script is executed,the jira URL was not reachable
I have the exact same request as below:
Convert a column of json strings into columns of data
Following the suggested solutions and getting the following error: What am I doing incorrectly? I'm learning to use python and make API requests.
***df['json'] = df['json'].map(address',': dict(eval(address)))***
^
`**SyntaxError: invalid syntax**
Below is my code
import requests
import json
import pandas as pd
import dictionary as dict
Base_url = 'MY_URL'
TOKEN_EndPoint = Base_url + 'token'
Account_EndPoint = Base_url + 'MY_URL'
data = {
'username': 'MY_USERNAME',
'password': 'MY_PASSWORD',
'grant_type': 'MY_GRANT_TYPE'
}
def main():
results = requests.post(url=TOKEN_EndPoint, data=data)
MyToken = results.json()['access_token']
print(MyToken)
MyInputs = GetSourceAddress()
callData = {
'ClientKey': 'MY_CLIENTKEY',
'StreetName': MyInputs['StreetName'],
'CityName' : MyInputs['CityName'],
# 'StateCode' : MyInputs['StateCode'],
'PostalCode' : MyInputs['PostalCode'],
'ManagerVersion': '2'
}
PostFields = json.dumps(callData)
MyHeader = {'Authorization': 'Bearer ' + MyToken,
'content-type': 'application/json'}
results = requests.post(url = Account_EndPoint, data = PostFields,
headers = MyHeader)
address = results.json()
# results.json() = pd.DataFrame(address)
df = pd.DataFrame(['address'], columns=['json'])
**df['json'] = df['json'].map(address',': dict(eval(address)))**
address = df['json'].apply(pd.Series)
for address in address:
print(address)
def GetSourceAddress():
MyInputs = {
'StreetName': 'MY_STREETNAME',
'CityName': 'MY_CITYNAME',
# 'StateCode': 'MY_STATE',
'PostalCode': 'MY_ZIPCODE',
# 'Unit': 'UNIT #'
}
return MyInputs
def GetAddressFrom****(result):
inputs = {
'StreetName': result['ShippingStreet'],
'CityName': result['BillingCity'],
'StateCode': result['BillingState'],
'PostalCode': result['BillingPostalCode'],
# 'Unit': ''
}
return inputs
main()
Thanks
I'm connecting to API which has 500 rows limit per call.
This is my code for a single API call (Works great):
def getdata(data):
auth_token = access_token
hed = {'Authorization': 'Bearer ' + auth_token, 'Accept': 'application/json'}
urlApi = 'https://..../orders?Offset=0&Limit=499'
datar = requests.get(urlApi, data=data, headers=hed, verify=True)
return datar
Now I want to scale it up so it will get me all the records.
This is what I tried to do:
In order to make sure that I have all the rows, I must iterate until there is no more data:
get 1st page
get 2nd page
merge
get 3rd page
merge
etc...
each page is an API call.
This is what I'm trying to do:
def getData(data):
auth_token = access_token
value_offset = 0
hed = {'Authorization': 'Bearer ' + auth_token, 'Accept': 'application/json'}
datarALL = None
while True:
urlApi = 'https://..../orders?Offset=' + value_offset + '&Limit=499'
responsedata = requests.get(urlApi, data=data, headers=hed, verify=True)
if responsedata.ok:
value_offset = value_offset + 499
#to do: merge the result of the get request
datarALL= datarALL+ responsedata (?)
# to do: check if response is empty then break out.
return datarALL
I couldn't find information about how I merge the results of the API calls nor how do I check if I can break the loop.
Edit:
To clear what I'm after.
I can see the results of the API call using:
logger.debug('response is : {0}'.format(datar.json()))
What I want to be able to do:
logger.debug('response is : {0}'.format(datarALL.json()))
and it will show all results from all calls. This requires generate API calls until there is no more data to get.
This is the return sample of API call:
"offset": 0,
"limit": 0,
"total": 0,
"results": [
{
"field1": 0,
"field2": "string",
"field3": "string",
"field4": "string"
}
]
}
In this case, you are almost correct with the idea.
is_valid = True
while is_valid:
is_valid = False
...
...
responsedata = requests.get(urlApi, data=data, headers=hed, verify=True)
if responsedata.status_code == 200: #Use status code to check request status, 200 for successful call
responsedata = responsedata.text
value_offset = value_offset + 499
#to do: merge the result of the get request
jsondata = json.loads(responsedata)
if "results" in jsondata:
if jsondata["results"]:
is_valid = True
if is_valid:
#concat array by + operand
datarALL = datarALL + jsondata["results"]
As I don't know if "results" still exists when the data ran out, so I checked both level.
I am using the face++ API, I need to get an attribute from the first request(json_resp) to add it in the second one (json_resp2)
import requests
json_resp = requests.post( 'https://api- us.faceplusplus.com/facepp/v3/detect',
data = { 'api_key' : 'api key' ,
'api_secret' : 'api secret',
'image_url' : 'http://www.pick-health.com/wp-content/uploads/2013/08/happy-person.jpg' } )
print("Response : ", json_resp.text)
This request outputs:
Response : {"image_id": "0UqxdZ6b58TaAFxBiujyMA==", "request_id": "1523139597,9f47c376-481b-446f-9fa3-fb49e404437c", "time_used": 327, "faces": [{"face_rectangle": {"width": 126, "top": 130, "left": 261, "height": 126}, "face_token": "2da210ada488fb10b58cdd2cd9eb3801"}]}
I need to access the face_token to pass it to the second request:
json_resp2 = requests.post( 'https://api-us.faceplusplus.com/facepp/v3/face/analyze',
data = { 'api_key' : 'api key' ,
'api_secret' : 'api secret',
'face_tokens' : 'json_resp.face_tokens',
'return_landmark':0,
'return_attributes':'emotion'} )
print("Response2 : ", json_resp2.text)
how can I do this please ?
To get the text string from the response object, you can use json_resp.text. You can then use the json library to convert this into a dict, and then extract the field you want:
json_resp = requests.post(...) ## Your post request, as written above
node = json.loads(json_resp.text)
face_token = node['faces'][0]['face_token']
Here is the full code (using your snippets above):
import requests
import json
api_key = 'your api key'
api_secret = 'your api secret'
json_resp = requests.post(
'https://api-us.faceplusplus.com/facepp/v3/detect',
data = {
'api_key' : api_key,
'api_secret' : api_secret,
'image_url' : 'http://www.pick-health.com/wp-content/uploads/2013/08/happy-person.jpg'
}
)
node = json.loads(json_resp.text)
face_token = node['faces'][0]['face_token']
json_resp2 = requests.post(
'https://api-us.faceplusplus.com/facepp/v3/face/analyze',
data = {
'api_key' : api_key,
'api_secret' : api_secret,
'face_tokens' : face_token,
'return_landmark' : 0,
'return_attributes' : 'emotion'
}
)
print("Response2 : ", json_resp2.text)
PS: It's a bad idea to post API keys online, since people can run your bill up by using your services.
I have a python script where I am sending a POST request for data to a server. I am expecting a particular response which indicates there is data in the response. If I do not receive this response, how can I restart my script/go to the beginning of it. The script is wrapped in a function which allows it to run every minute.
I would like to return to the beginning of my function if my response isn't as expected.
Script:
import sched, time, requests, jsonpickle, arcpy, requests, json, datetime
s = sched.scheduler(time.time, time.sleep)
def do_something(sc):
data2 = jsonpickle.decode((f2.read()))
Start = datetime.datetime.now()
# Start = datetime.datetime.strftime(data2['QueryRequest']['LastUpdatedDate'])
DD = datetime.timedelta(minutes=5)
earlier = Start - DD
earlier_str = earlier.strftime('X%m/%d/%Y %H:%M:%S').replace('X0','X').replace('X','')
data2["QueryRequest"]['LastUpdatedDate'] = str(earlier_str)
data2 = jsonpickle.encode(data2)
BulkyItemInfo = " "
spatial_ref = arcpy.SpatialReference(4326)
lastpage = 'false'
startrow = 0
newquery = 'new'
pagesize = 100
url2 = "URL"
headers2 = {'Content-type': 'text/plain', 'Accept': '/'}
while lastpage == 'false':
r2 = requests.post(url2, data=data2, headers=headers2)
print r2.text
decoded2 = json.loads(r2.text)
f2 =open('C:\Users\GeoffreyWest\Desktop\Request.json')
data2 = jsonpickle.decode((f2.read()))
if decoded2['Response']['LastPage'] == 'false':
data2['QueryRequest']['PageSize'] = pagesize
startrow = startrow + data2['QueryRequest']['PageSize']
data2['QueryRequest']['StartRowNum'] = startrow
data2['QueryRequest']['NewQuery'] = 'false'
data2 = jsonpickle.encode(data2)
print startrow
else:
lastpage = 'true'
print json.dumps(decoded2, sort_keys=True, indent=4)
items = []
for sr in decoded2['Response']['ListOfServiceRequest']['ServiceRequest']:#Where response is successful or fails
Output for successful response:
{
"status": {
"code": 311,
"message": "Service Request Successfully Queried.",
"cause": ""
},
"Response": {
"LastPage": "false",
"NumOutputObjects": "100",
"ListOfServiceRequest": {
"ServiceRequest": [
{
Output for unsuccessful response:
{"status":{"code":311,"message":"Service Request Successfully Queried.","cause":""},"Response":{"LastPage":"true","NumOutputObjects":"0","ListOfServiceRequest":{}}}