I need to write test case for a function which use to fetch data from API. In there i used httpx.AsyncClient() as context manager. But i don't understand how to write test case for that function.
async def make_dropbox_request(url, payload, dropbox_token):
async with httpx.AsyncClient(timeout=None, follow_redirects=True) as client:
headers = {
'Content-Type': 'application/json',
'authorization': 'Bearer '+ dropbox_token
}
# make the api call
response = await client.post(url, headers=headers, json=payload)
if response.status_code not in [200]:
print('Dropbox Status Code: ' + str(response.status_code))
if response.status_code in [200, 202, 303]:
return json.loads(response.text)
elif response.status_code == 401:
raise DropboxAuthenticationError()
elif response.status_code == 429:
sleep_time = int(response.headers['Retry-After'])
if sleep_time < 1*60:
await asyncio.sleep(sleep_time)
raise DropboxMaxRateLimitError()
raise DropboxMaxDailyRateLimitError()
raise DropboxHTTPError()
I need to write test cases without calling the API. So there for i believe in this case i need to mock client.post() but i do not understand how to do that. If anyone can help me to figure this out that would be really helpful for me.
This image also include my code block
TL;DR: use return_value.__aenter__.return_value to mock the async context.
Assuming you are using Pytest and pytest-mock, your can use the mocker fixture to mock httpx.AsyncClient.
Since the post function is async, you will need to use an AsyncMock.
Finally, since you use an async context, you will also need to use return_value.__aenter__.return_value to properly mock the returned context. Note for a synchronous context, simply use __enter__ instead of __aenter__.
#pytest.fixture
def mock_AsyncClient(mocker: MockerFixture) -> Mock:
mocked_AsyncClient = mocker.patch(f"{TESTED_MODULE}.AsyncClient")
mocked_async_client = Mock()
response = Response(status_code=200)
mocked_async_client.post = AsyncMock(return_value=response)
mocked_AsyncClient.return_value.__aenter__.return_value = mocked_async_client
return mocked_async_client
I also faced with same issue and handled it with patch decorator. I share my code, so that might help for others.
from unittest.mock import patch
import pytest
import httpx
from app.services import your_service
#pytest.mark.anyio
#patch(
'app.services.your_service.httpx.AsyncClient.post',
return_value = httpx.Response(200, json={'id': '9ed7dasdasd-08ff-4ae1-8952-37e3a323eb08'})
)
async def test_get_id(mocker):
result = await your_service.get_id()
assert result == '9ed7dasdasd-08ff-4ae1-8952-37e3a323eb08'
You can try out the RESPX mocking library to test and mock your HTTPX clients.
In your case, something like this should do it:
async def make_dropbox_request(url, payload, dropbox_token):
...
response = await client.post(url, headers=headers, json=payload)
...
return response.json()
#respx.mock
async def test_dropbox_endpoint():
url = "https://dropbox-api/some-endpoint/"
endpoint = respx.post(url).respond(json={"some": "data"})
result = await make_dropbox_request(url, ..., ...)
assert endpoint.called
assert result == {"some": "data"}
To be dry and not repeat the mocking in each test, you can set up your own pytest fixture, or respx instance, globally that pre-mocks all dropbox api endpoints, and then in each test just alter response/error depending on the scenario for the test, to get full test coverage on make_dropbox_request.
#pytest.fixture()
async def dropbox_mock():
async with respx.mock() as dropbox:
# default endpoints and their responses
dropbox.post("some-endpoint", name="foo").respond(404)
dropbox.post("some-other-endpoint", name="bar").respond(404)
# ^ name routes for access in tests
yield dropbox
async def test_some_case(dropbox_mock):
dropbox_mock["foo"].respond(json={})
....
Related
So, I have a server running FastAPI which will make a API call to a remote API upon request.
I am developping unit-testing for this application, but here comes the question:
Can I, for the purpose of the test, replace a legit remote API server response by a predefined response ?
Example of the tests runned:
from fastapi.testclient import TestClient
from web_api import app
client = TestClient(app)
def test_get_root():
response = client.get('/')
assert response.status_code == 200
assert response.json() == {"running": True}
And the my server
from fastapi import FastAPI
app = FastAPI()
#app.get("/")
def home():
return {"running": True}
This is a simple example, but on other endpoints of my API I would call an external remote API
def call_api(self, endpoint:str, params:dict):
url = self.BASEURL + urllib.parse.quote(endpoint)
try:
response = requests.get(url, params=params)
response.raise_for_status()
except requests.exceptions.HTTPError as error:
print(error)
return response
Because I want to test the response of MY API, I would like to replace the remote API with a predefined response.
Also, one user request can end-up in multiple background API requests with transformed pieces of data.
Edit
Here are some more details on the structure of the application:
#app.get("/stuff/.......",
# lots of params
)
def get_stuff_from_things(stuff:list, params):
api = API(api_key=...)
# Do some stuff with the params
things = generate_things_list(params)
api.search_things(params)
# Check the result
# do some other stuff
return some_response
class API:
BASE_URL = 'https://api.example.com/'
def search_things(self, params):
# Do some stuff
# like putting stuff in the params
for s in stuff:
s.update(self.get_thing(params)) # -> get_thing()
# Do some more stuff
return stuff
# get_thing <- search_things
def get_thing(self, params...):
# Some stuff
results = self.call_api('something', params) # -> call_api()
json = results.json()
# Some more stuff
things = []
for thing in json['things']:
t = Thing(thing)
things.append(t)
return things
# call_api <- get_thing
def call_api(self, endpoint:str, params:dict):
url = self.BASEURL + urllib.parse.quote(endpoint)
try:
response = requests.get(url, params=params)
response.raise_for_status()
except requests.exceptions.HTTPError as error:
print(error)
self.last_response = response
return response
Nb. That is pseudo-code, I simplified the functions by removing the parameters, etc.
I hope it is clear, thanks for your help.
A complex API method might look like this (please pay attention to the depends mechanism - it is crucial):
import urllib
import requests
from fastapi import FastAPI, Depends
app = FastAPI()
# this can be in a different file
class RemoteCallWrapper:
def call_api(self, baseurl: str, endpoint: str, params: dict):
url = baseurl + urllib.parse.quote(endpoint)
try:
response = requests.get(url, params=params)
response.raise_for_status()
except requests.exceptions.HTTPError as error:
print(error)
return response
#app.get("/complex_api")
def calls_other_api(remote_call_wrapper=Depends(RemoteCallWrapper)):
response = remote_call_wrapper.call_api("https://jsonplaceholder.typicode.com",
"/todos/1", None)
return {"result": response.json()}
Now, we wish to replace the remote call class. I wrote a helper library that simplifies the replacement for tests - pytest-fastapi-deps:
from fastapi.testclient import TestClient
from mock.mock import Mock
from requests import Response
from web_api import app, RemoteCallWrapper
client = TestClient(app)
class MyRemoteCallWrapper:
def call_api(self, baseurl: str, endpoint: str, params: dict):
the_response = Mock(spec=Response)
the_response.json.return_value = {"my": "response"}
return the_response
def test_get_root(fastapi_dep):
with fastapi_dep(app).override({RemoteCallWrapper: MyRemoteCallWrapper}):
response = client.get('/complex_api')
assert response.status_code == 200
assert response.json() == {"result": {"my": "response"}}
You override the RemoteCallWrapper with your MyRemoteCallWrapper implementation for the test, which has the same spec.
As asserted - the response changed to our predefined response.
It sounds like you'd want to mock your call_api() function.
With a small modification to call_api() (returning the result of .json()), you can easily mock the whole function while calling the endpoint in your tests.
I'll use two files, app.py and test_app.py, to demonstrate how I would do this:
# app.py
import requests
import urllib
from fastapi import FastAPI
app = FastAPI()
def call_api(self, endpoint: str, params: dict):
url = self.BASEURL + urllib.parse.quote(endpoint)
try:
response = requests.get(url, params=params)
response.raise_for_status()
except requests.exceptions.HTTPError as error:
print(error)
return response.json() # <-- This is the only change. Makes it easier to test things.
#app.get("/")
def home():
return {"running": True}
#app.get("/call-api")
def make_call_to_external_api():
# `endpoint` and `params` could be anything here and could be different
# depending on the query parameters when calling this endpoint.
response = call_api(endpoint="something", params={})
# Do something with the response...
result = response["some_parameter"]
return result
# test_app.py
from unittest import mock
from fastapi import status
from fastapi.testclient import TestClient
import app as app_module
from app import app
def test_call_api_endpoint():
test_response = {
"some_parameter": "some_value",
"another_parameter": "another_value",
}
# The line below will "replace" the result of `call_api()` with whatever
# is given in `return_value`. The original function is never executed.
with mock.patch.object(app_module, "call_api", return_value=test_response) as mock_call:
with TestClient(app) as client:
res = client.get("/call-api")
assert res.status_code == status.HTTP_200_OK
assert res.json() == "some_value"
# Make sure the function has been called with the right parameters.
# This could be dynamic based on how the endpoint has been called.
mock_call.assert_called_once_with(endpoint="something", params={})
If app.py and test_app.py are in the same directory you can run the tests simply by running pytest inside that directory.
I have implemented a test function in pytest which loads data from files, casts it into Python objects and provides a new object for each test.
Each one of these objects contains a request I need to make to the server and the expected responses, the function looks like this:
#pytest.mark.asyncio
#pytest.mark.parametrize('test', TestLoader.load(JSONTest, 'json_tests'))
async def test_json(test: JSONTest, groups: Set[TestGroup], client: httpx.AsyncClient):
skip_if_not_in_groups(test, groups)
request = Request(url=test.url, body=test.body.dict())
response = await client.post(request.url, json=request.body)
# Assertions down here...
Many times I send many requests that contain the same http endpoint with the same body so the response is the same, but I'm testing for different things in the response.
Because of that I thought of implementing an in-memory cache so that for each test run the same requests won't be implemented twice.
What I've tried to do is create a request object, with its own __hash__ implementation and use the #asyncstdlib.lru_cache on the function, it didn't seem to work.
# Does not work...
#asyncstdlib.lru_cache
async def send_request(request: Request, client: httpx.AsyncClient):
return await client.post(request.url, json=request.body)
#pytest.mark.asyncio
#pytest.mark.parametrize('test', TestLoader.load(JSONTest, 'json_tests'))
async def test_json(test: JSONTest, groups: Set[TestGroup], client: httpx.AsyncClient):
skip_if_not_in_groups(test, groups)
request = Request(url=test.url, body=test.body.dict())
response = await send_request(request)
The client I'm using: httpx.AsyncClient also implements __hash__, it's coming from a pytest.fixture in conftest.py and it has a scope of 'session':
# conftest.py
#pytest.fixture(scope='session')
def event_loop(request):
loop = asyncio.get_event_loop_policy().new_event_loop()
yield loop
loop.close()
#pytest.fixture(scope='session')
async def client() -> httpx.AsyncClient:
async with httpx.AsyncClient() as client:
yield client
Just let go of the opaque 3rd party cache, and cache yourself.
Since you don't require cleaning-up the cache during a single execution, a plain dictionary will work:
_cache = {}
async def send_request(request: Request, client: httpx.AsyncClient):
if request.url not in _cache:
_cache[request.url] = await client.post(request.url, json=request.body)
return _cache[request.url]
I am using Quart App.
I am calling a service in my before_serving(app_initionalization) function and I do not want to call that in pytests. Actually, I want to disable my before_serving function or something like mock it.
import pytest
#pytest.mark.asyncio
async def test_my_api_call(test_app: Pint, headers: dict):
test_client = test_app.test_client()
response = await test_client.get("/get_user", headers)
assert response.status_code == 200
This is my test_app.
#pytest.fixture(name="test_app", scope="function")
async def _test_app(s3_client, tmp_path, async_mongodb):
os.environ["BLOB_STORE"] = str(tmp_path)
db_config['db'] = async_mongodb
async with app.test_app() as test_app:
yield test_app
Your fixture will run the before-serving startup functions as it uses the test_app,
async with app.test_app() as test_app:
As you don't wish to run these you can change your fixture to,
#pytest.fixture(name="test_app", scope="function")
async def _test_app(s3_client, tmp_path, async_mongodb):
os.environ["BLOB_STORE"] = str(tmp_path)
db_config['db'] = async_mongodb
return app
I have http get method mocked so to get the response from the url without actually sending the url:
def get(url, retries=None, back_off_factor=None, max_back_off=None, timeout=None, response_encoding=None,
retry_on_timeout=None, retry_codes=None, **kwargs):
return _make_request("GET", url,
retries=retries, back_off_factor=back_off_factor,
max_back_off=max_back_off,
timeout=timeout,
response_encoding=response_encoding,
retry_on_timeout=retry_on_timeout,
retry_codes=retry_codes,
**kwargs)
#patch('lib.httputil.get')
def test_harvest(self, mock_get):
articles = json.load(json_file)
# Configure the mock to return a response with an OK status code. Also, the mock should have
# a `json()` method that returns a list of todos.
mock_get.return_value = Mock(ok=True)
mock_get.return_value.json.return_value = articles
mock_get.return_value.status_code = 200
the_rest_of_the_test()
But I realized I need to mock it only if the URL is specific. I know I could use new keyword and do:
def mock_get(self, url):
if url == MY_SPECIFIC_URL:
{...}
else:
self.old_get(url)
{...}
with mock.patch('portality.lib.httputil.get', new=self.mock_get):
the_rest_of_the_test()
but I don't really know how to mock the Response object so that it returns the correct status code and gives the correct result to .json() method.
How can I use both of these approaches altogether so that on one hand I can use the conditional but on the other mock the Response in easy way?
I suggest that you use the requests library, along with responses which is specifically meant for returning the desired HTTP responses.
You can mock specific urls:
import responses
import requests
#responses.activate
def test_simple():
responses.add(responses.GET, 'http://twitter.com/api/1/foobar',
json={'error': 'not found'}, status=404)
resp = requests.get('http://twitter.com/api/1/foobar')
assert resp.json() == {"error": "not found"}
assert len(responses.calls) == 1
assert responses.calls[0].request.url == 'http://twitter.com/api/1/foobar'
assert responses.calls[0].response.text == '{"error": "not found"}'
And you can exclude other urls:
responses.add_passthru(re.compile('https://percy.io/\\w+'))
I want to attach a middleware to specific handler and if client is not authorized then want to return an error response. However with the following code :
async def middleware_factory(app, handler):
async def auth_handler(request):
if request.headers.get('Authorization') == 'Basic test1234':
return await handler(request)
return web.Response(text='Forbidden', status='403')
return auth_handler
I am getting an exception that :
AssertionError: Handler <function AbstractRoute.__init__.
<locals>.handler_wrapper at 0x10da56bf8> should return response
instance, got <class 'NoneType'> [middlewares [<function
middleware_factory at 0x1104cb268>]]
Documentation states that I should return a response object which I am doing. Still this error. Where am I going wrong?
You can look to an example from official documentation.
But the main concern that if you want to have Middleware Factory - needs to be a function not a coroutine. Also, recommend to use #web.middleware decorator for that.
from aiohttp import web
def middleware_factory(text):
#web.middleware
async def sample_middleware(request, handler):
resp = await handler(request)
resp.text = resp.text + text
return resp
return sample_middleware