I'm using locust to load test APIs, but every time I'm testing API which need parameter other than authorization to be inputted like this:
It always fail at 100% which says 'BadStatusCode('endpoint',)', already google what does it mean and search locust error documentation but I still haven't found any clue. Every other API (mainly API with method other than get) I test with locust which only need parameter authorization like this:
run perfectly fine. I already google various locust code on how to input parameters to endpoint and I think my code already correct.
Here's my code (with 100% failures):
import os
from dotenv import load_dotenv
from locust import TaskSet, task, between, HttpLocust
from locust.contrib.fasthttp import FastHttpLocust
import resource
from dotenv import dotenv_values
load_dotenv()
resource.setrlimit(resource.RLIMIT_NOFILE, (65536, 999999))
host_p = os.getenv("HOST_P")
header = {
'authorization': 'Bearer ' + os.getenv('TOKEN')
}
values = {
"amount": 100
}
def payment(self):
self.client.post("/pay", headers=header, json=values)
class ProcessPost(TaskSet):
tasks={payment:2}
class ApiPost(FastHttpLocust):
host = host_payment
task_set = ProcessPost
wait_time = between(5.0, 9.0)
and here's my other code (run perfectly fine):
import os
from dotenv import load_dotenv
from locust import TaskSet, task, between, HttpLocust
from locust.contrib.fasthttp import FastHttpLocust
import resource
import datetime as d
from dotenv import dotenv_values
import json
load_dotenv()
resource.setrlimit(resource.RLIMIT_NOFILE, (65536, 999999))
host_p = os.getenv("HOST_P")
header = {
'authorization': 'Bearer ' + os.getenv('TOKEN')
}
def payment(self):
self.client.get("/pay", headers=header)
class ProcessPost(TaskSet):
tasks={payment:2}
class ApiPost(FastHttpLocust):
host = host_payment
task_set = ProcessPost
wait_time = between(5.0, 9.0)
I guess it is probably you are sending data as body and not adding content type header , if you use json instead it adds content-type header itself, but you need to add it yourself if you use data to pass values.
headers['Content-Type'] = "application/json"
Related
I deployed a model to the model registry on Vertex AI. I added an endpoint too, and I am able to make inferences. Below is the code that I wrote (using Python 3.9.12):
from google.cloud import aiplatform
from google.oauth2 import service_account
# settings is a Pydantic BaseSettings subclass object
credentials_json = json.loads(settings.GCP_VERTEX_SERVICE_ACC)
credentials = service_account.Credentials.from_service_account_info(
info=credentials_json
)
aiplatform.init(project=settings.GCLOUD_PROJECT_NUMBER,
location=settings.GCLOUD_LOCATION,
credentials=credentials)
endpoint = aiplatform.Endpoint(settings.GCLOUD_SBERT_ENDPOINT_ID)
...
async def do_inference(list_strs: List[str]):
result = endpoint.predict(instances=list_strs)
return result.predictions
Right now I'm not able to make asynchronous requests. Is there a way around this? For instance, would using the aiplatform_v1beta1.PredictionServiceAsyncClient library be a solution? Thanks in advance!
---- EDIT -----
Below is the piece of code that did it for me in case someone else is struggling with the same thing.
import asyncio
from google.cloud import aiplatform_v1beta1
from google.oauth2 import service_account
from google.protobuf import json_format
from google.protobuf.struct_pb2 import Value
# settings is a Pydantic BaseSettings subclass object
credentials_json = json.loads(settings.GCP_VERTEX_SERVICE_ACC)
credentials = service_account.Credentials.from_service_account_info(
info=credentials_json
)
client_options = {"api_endpoint": f"{settings.GCLOUD_LOCATION}-aiplatform.googleapis.com"}
client = aiplatform_v1beta1.PredictionServiceAsyncClient(credentials=credentials, client_options=client_options)
...
async def do_inference(list_strs: List[str]):
request = aiplatform_v1beta1.PredictRequest(endpoint=endpoint)
request.instances.extend(list_strs)
response = await client.predict(request)
predictions = response.predictions
return predictions
asyncio.get_event_loop().run_until_complete(do_inference())
This code owes a lot to #milad_raesi's answer!
I'm making an API call to Hubspot, and everything works fine until I try to replace the UNIX date with a variable.
First, I get the dates I'm looking for into UNIX format, which Hubspot expects. This works fine:
value = (begin_last_wk_raw - datetime(1970,1,1)).total_seconds()
highValue = (end_last_wk_raw - datetime(1970,1,1)).total_seconds()
print(value)
print(highValue)
To which I get the following response (which seems great):
1665930401.451494
1666535201.451494
I then run the following code:
url = 'https://api.hubapi.com/crm/v3/objects/contacts/search'
headers = {
'Content-Type': 'application/json',
'Authorization': 'Bearer ' + access_token
}
json_data = {
"filterGroups":[
{
"filters":[
{
"propertyName":"createdate",
"operator":"GTE",
"highValue": highValue,
"value": value
}
]
}
]
}
response = requests.post(url, headers=headers, data=json.dumps(json_data))
After which I get the following error:
'{"status":"error","message":"There was a problem with the request.","correlationId":"0a9e1fe0-0e80-4571-b610-e0ad05645baa"}'
My code is pretty bloated, but here are the necessary packages I've imported:
import logging
import requests
import pandas as pd
import json
from dotenv import load_dotenv
import datetime
from datetime import date
from datetime import datetime, timedelta
import gspread
from gspread_dataframe import set_with_dataframe
from hubspot import HubSpot
from hubspot.crm.contacts import ApiException
import os
load_dotenv()
I'm expecting to get data based on the new date range. I've verified that the date range contained in the variables does have data (by doing the query within Hubspot).
Again, when I enter the dates in UNIX format into the JSON section (i.e. "highValue" and "value"), I get error-free results. It's only when I try to replace the values with the variables "highValue" and "value" do I see a problem.
I tried searching Google for this error, but it didn't return much in the way of results, and I'm not sure if the error message is very specific anyway.
I'm using Cloud Composer, and I have a DAG that has one task that calls an HTTPS-triggered cloud function that sends out an email (due to restrictions on the project I'm working on, I had to do it this way).
The most simple form of this works. I can trigger the cloud function, and the emails are being sent successfully. However, I want to pass some variables I'm defining in the DAG to the Cloud Function, and this is where something is failing. I was using the usual way to pass parameters to the request URL.
This was the way I was defining the DAG:
# --------------------------------------------------------------------------------
# Import Libraries
# --------------------------------------------------------------------------------
import datetime
from airflow.models import DAG
from airflow.operators.dummy_operator import DummyOperator
from airflow.operators.python_operator import PythonOperator
from airflow.contrib.operators.bigquery_operator import BigQueryOperator
from airflow.providers.google.cloud.operators.bigquery import BigQueryInsertJobOperator,BigQueryExecuteQueryOperator
from airflow.providers.google.common.utils import id_token_credentials as id_token_credential_utils
import google.auth.transport.requests
from google.auth.transport.requests import AuthorizedSession
# --------------------------------------------------------------------------------
# Set variables
# --------------------------------------------------------------------------------
(...)
report_name_url = "report_name_url"
end_user = "end_user#email.com"
# --------------------------------------------------------------------------------
# Functions
# --------------------------------------------------------------------------------
def invoke_cloud_function():
url = "https://<trigger_url>?report_name_url={}&end_user={}".format(report_name_url, end_user) #I'M ADDING THE STRINGS TO THE URL AFTER THE ?, TO PASS WHAT I WANT AS ARGUMENTS TO THE CLOUD FUNCTION
request = google.auth.transport.requests.Request() #this is a request for obtaining the the credentials
id_token_credentials = id_token_credential_utils.get_default_id_token_credentials(url, request=request) # If your cloud function url has query parameters, remove them before passing to the audience
resp = AuthorizedSession(id_token_credentials).request("GET", url=url) # the authorized session object is used to access the Cloud Function
print(resp.status_code) # should return 200
print(resp.content) # the body of the HTTP response
# --------------------------------------------------------------------------------
# Define DAG
# --------------------------------------------------------------------------------
with DAG(
dag_id,
schedule_interval= '0 13 05 * *', # DAG Cron scheduler
default_args = default_args) as dag:
(...)
send_email = PythonOperator(
task_id="send_email",
python_callable=invoke_cloud_function
)
start >> run_stored_procedure >> composer_logging >> send_email >> end
This is what I have as far as the DAG goes. From the perspective of the cloud function, I have the following:
def send_email(request):
import ssl
from email.message import EmailMessage
import smtplib
import os
report_name_url = request.args.get('report_name_url')
report_name = report_name_url.replace("_", " ")
end_user = request.args.get('end_user')
(...)
context = ssl.create_default_context()
with smtplib.SMTP_SSL('smtp.gmail.com', 465, context=context) as smtp:
smtp.login(sender_email, password)
smtp.sendmail(sender_email, receiver_email, em.as_string())
Can someone point me toward a solution for my use-case?
Thank you very much.
Edit for added context:
I'm getting the following information from the logs:
"(...)Unauthorized</h1>\n<h2>Your client does not have permission to the requested URL <code>...</code>.</h2>\n<h2></h2>\n</body></html>\n'"
This is odd because I think this was the error I was getting BEFORE giving permissions to my service account to invoke the cloud function.
Now, that permission is in place.
The only problem I foresee is that I'm not exactly calling the original URL that triggers the cloud function, since I'm adding parameters. Could this be the problem?
EDIT:
After a lot of digging around, I found a way to do this. First and foremost, I had to switch from GET to POST. This way, I was able to pass the URL that indeed is meant to trigger the Cloud Function.
The final solution came down to this:
This was the function in the DAG:
def invoke_cloud_function_success():
url = "<trigger url>" #the url is also the target audience.
request = google.auth.transport.requests.Request() #this is a request for obtaining the the credentials
id_token_credentials =
id_token_credential_utils.get_default_id_token_credentials(url, request=request) # If your cloud function url has query parameters, remove them before passing to the audience
headers = {"Content-Type": "application/json"}
body = {"report_name":report_name, "end_user":end_user, "datastudio_link":datastudio_link}
resp = AuthorizedSession(id_token_credentials).post(url=url, json=body, headers=headers) # the authorized session object is used to access the Cloud Function
print(resp.status_code) # should return 200
print(resp.content) # the body of the HTTP response
In the final Cloud Function I had to put:
request_json = request.get_json()
report_name = list(request_json['report_name'])
datastudio_link = list(request_json['datastudio_link'])
end_user = list(request_json['end_user'])
I'm trying to run a script to get some data from our Git API but when I try to use the API filter "projects?search=defects" it only returns: {"Git2":[]}. I'm suspecting it may be related to the "?" symbol.
Below is the code which I'm using. The "Git" function works fine, but the "Git2" function doesn't. What I'm doing wrong?
import requests
from flask import Flask
import os
import json
uri = "https://gitlab.local.com/api/v4/"
#this code work
#app.route('/git')
def Git():
uri2 = "projects"
uri3 = uri + uri2
response = requests.get(uri3)
return({"Git" : response.json()})
#this code doesnt work
#app.route('/git2')
def Git2():
uri2 = "projects?search=my-defects"
uri3 = uri + uri2
response = requests.get(uri3)
return({"Git2" : response.json()})
If you are going to use flask you should consider using custom flask functions.
import requests
from flask import Flask, jsonify, request
import os
uri = "https://gitlab.local.com/api/v4/"
#this code work
#app.route('/git')
def Git():
uri2 = "projects"
uri3 = uri + uri2
response = requests.get(uri3)
return jsonify({"Git" : response})
The requests library takes in query parameters as well, so instead of adding the query parameters directly to your url just add it as an argument to the get request.
#this code works too
#app.route('/git2')
def Git2():
params = {"search":"my-defects"}
response = requests.get(uri3, params=params)
return jsonify({"Git2" : response})
Hope this helps
I've been trying to use the designate client to create DNS entries, but even before creating them, I tried listing all the entries.
Started with listing zones first and it doesn't seem to work.
#!/usr/bin/env python3
import os
import json
from designateclient.v2 import client
from keystoneauth1.identity import generic
from keystoneauth1 import session as keystone_session
auth = generic.Password(
auth_url='url',
username='username',
password=os.environ['password'],
project_name='domain name',
project_domain_id='default',
user_domain_id='default')
session = keystone_session.Session(auth=auth)
client = client.Client(session=session)
print(client.zones.list())
Any help how I could get this data? Thank you in advance :)
So I got over the issue of authentication, by using the following code:
import os
from keystoneclient.auth.identity import v3
from keystoneclient import session
from keystoneclient.v3 import client
from designateclient.v2 import client as d_client
v3_auth = v3.Password(auth_url='auth_url',
username='username',
password=os.environ['pass'],
project_name='project_name',
project_domain_name="project_domain_name",
user_domain_name="user_domain_name")
v3_ses = session.Session(auth=v3_auth)
auth_token = v3_ses.get_token()
session = session.Session(auth=v3_auth, timeout=10)
desig_client = d_client.Client(session=session)
print(desig_client)
list_of_zones = desig_client.zones.list()
The next question is how do I create a new domain?