I'm writing "unittests" for my flask api. Using Flask.test_client() is easy locally. eg:
app = Flask(__name__)
client = app.test_client()
However, I want to still be able to do things such as
self.client.post('{0}/account/password'.format(self.base_path),
data=json.dumps(passwordu), headers=self.get_headers(),
content_type=self.content_type)
But I want the client to talk to a remote host.
I've seen I can use environ_override as a parameter to the client.post(...) method. To me there must be a simpler way to get a simple client that does the necessary sending of requests and decoding of responses in the same way test_client() does. However, subclassing client, still requires me to use a Flask instance.
Ok, firstly, thanks for the suggestion Anarci. Here's what I came up with.
I wouldn't call it nice OO design, but it works and is lazy.
class RemoteApiTest(AppTest, ApiTestMixin):
#classmethod
def setUpClass(cls):
super(RemoteApiTest, cls).setUpClass()
cls.kwargs = config.FLASK_CLIENT_CONFIG
cls.base_url = cls.kwargs.get('base_url')
cls.content_type = cls.kwargs.get('content_type')
cls.client = requests
cls.db = db
cls.db.create_all()
#classmethod
def tearDownClass(cls):
super(RemoteApiTest, cls).tearDownClass()
cls.db.session.remove()
cls.db.drop_all()
#cls.db.get_engine(cls.app).dispose()
#property
def base_url(self):
return self.__class__.base_url
#property
def content_type(self):
return self.__class__.content_type
def get_json(self, r):
return r.json()
#property
def headers(self):
headers = {'content-type': self.content_type}
if self.api_key:
headers.update({'X-consumer-apiKey': self.api_key})
return headers
def basic_checks(self, rv):
eq_(rv.status_code, 200)
r = rv.json()
assert rv is not None
assert r is not None
assert r.get('correlationID') is not None
return r
def setUp(self):
super(RemoteApiTest, self).setUp()
self.api_key = None
r = self.client.post('{0}/account/register'.format(self.base_url), data=json.dumps(account_register),
headers=self.headers)
j = r.json()
self.api_key = j['apiKey']
def tearDown(self):
rv = self.client.delete('{0}/account'.format(self.base_url), headers=self.headers)
super(RemoteApiTest, self).tearDown()
With that I can config the remote server etc in my same config, and I can reuse code like this:
class ApiTestMixin:
def test_account_get(self):
rv = self.client.get(self.base_url + '/account', headers=self.headers)
r = self.basic_checks(rv)
account = r.get('account')
eq_(account.get('fullName'), account_register.get('fullName'))
assert r.get('correlationID') is not None
Definitely open to criticism, but that's how I made it work,
Related
I have a function that looks like this
async def get_cluster_fee(self, request: web.Request):
cluster_type = request.rel_url.query[constants.CLUSTER_TYPE]
response = ClusterFeeModel().get_cluster_fee(cluster_type)
return web.json_response(response)
To test it I have implemented the test
async def test_get_cluster_fee_success(self, loop, app_client):
client, app = app_client
path = "/bm-shop-bff/v2/cluster-fee?type=CLS1"
with patch("src.api.v2.cpq.clusterfee.model.ClusterFeeModel", new=ClusterFeeModelStub):
response = await client.session.get(url=f"http://{client.host}:{client.port}{path}")
assert 200 == response.status
response_data = await response.json()
assert response_data["cluster_fee"] and response_data["cluster_type"]
I want to mock ClusterFeeModel to return a specific get_cluster_fee, but I don't want to instantiate ClusterFeeModel object during execution (as it tries to connect to external systems in constructor).
I have created a stub class for that
class ClusterFeeModelStub:
def get_cluster_fee(self, cluster_type):
return {"cluster_type": "CLS1", "cluster_fee": 1.1}
However with my patch it still tries to instantiate ClusterFeeModel object. What is the problem?
I have a basic flask API running :
u/app.route('/helloworld', methods = ['GET'])
def first_api():
hname = "hello"
lhname = "world"
print(hname+lhanme)
Now I need to add some unit tests to it, and here is my unit test file:
import json
def test_index(app, client):
res = client.get('/helloworld')
assert res.status_code == 200
assert "hello" in res.data
How can I pass value for variables hname and lhname from this unit test?
Here is my conf file for pytest:
import pytest
from app import app as flask_app
u/pytest.fixture
def app():
return flask_app
u/pytest.fixture
def client(app):
return app.test_client()
You have a little mistake in your endpoint. You want it to return the string instead of printing it. Please consider the following example:
from flask import Flask, request
flask_app = Flask(__name__)
app = flask_app
#app.route('/helloworld', methods = ['GET'])
def first_api():
hname = request.args.get("hname")
lhname = request.args.get("lname")
print(hname)
return hname + lhname
def test_index(app):
client = app.test_client()
hname = "hello"
lname = "world"
res = client.get('/helloworld?hname={}&lname={}'.format(hname, lname))
assert res.status_code == 200
assert "hello" in res.data.decode("UTF-8")
if __name__ == "__main__":
test_index(app)
I'm trying to use aiohttp to make a sort of advanced reverse proxy.
I want to get content of HTTP request and pass it to new HTTP request without pulling it to memory. While there is the only upstream the task is fairly easy: aiohttp server returns request content as StreamReader and aiohttp client can accept StreamReader as request body.
The problem is that I want to send origin request to several upstreams or, for example, simultaneously send content to upstream and write it on disk.
Is there some instruments to broadcast content of StreamReader?
I've tried to make some naive broadcaster but it fails on large objects. What do I do wrong?
class StreamBroadcast:
async def __do_broadcast(self):
while True:
chunk = await self.__source.read(self.__n)
if not chunk:
break
for output in self.__sinks:
output.feed_data(chunk)
for output in self.__sinks:
output.feed_eof()
def __init__(self, source: StreamReader, sinks_count: int, n: int = -1):
self.__source = source
self.__n = n
self.__sinks = [StreamReader() for i in range(sinks_count)]
self.__task = asyncio.create_task(self.__do_broadcast())
#property
def sinks(self) -> Iterable[StreamReader]:
return self.__sinks
#property
def ready(self) -> Task:
return self.__task
Well, I've looked through asyncio sources and discovered that I should use Transport to pump data over a stream. Here is my solution.
import asyncio
from asyncio import StreamReader, StreamWriter, ReadTransport, StreamReaderProtocol
from typing import Iterable
class _BroadcastReadTransport(ReadTransport):
"""
Internal class, is not meant to be instantiated manually
"""
def __init__(self, source: StreamReader, sinks: Iterable[StreamReader]):
super().__init__()
self.__source = source
self.__sinks = tuple(StreamReaderProtocol(s) for s in sinks)
for sink in sinks:
sink.set_transport(self)
self.__waiting_for_data = len(self.__sinks)
asyncio.create_task(self.__broadcast_next_chunk(), name='initial-chunk-broadcast')
def is_reading(self):
return self.__waiting_for_data == len(self.__sinks)
def pause_reading(self):
self.__waiting_for_data -= 1
async def __broadcast_next_chunk(self):
data = await self.__source.read()
if data:
for sink in self.__sinks:
sink.data_received(data)
if self.is_reading():
asyncio.create_task(self.__broadcast_next_chunk())
else:
for sink in self.__sinks:
sink.eof_received()
def resume_reading(self):
self.__waiting_for_data += 1
if self.__waiting_for_data == len(self.__sinks):
asyncio.create_task(self.__broadcast_next_chunk(), name='chunk-broadcast')
#property
def is_completed(self):
return self.__source.at_eof()
class StreamBroadcast:
def __init__(self, source: StreamReader, sinks_count: int):
self.__source = source
self.__sinks = tuple(StreamReader() for _ in range(sinks_count))
self.__transport = _BroadcastReadTransport(self.__source, self.__sinks)
#property
def sinks(self) -> Iterable[StreamReader]:
return self.__sinks
#property
def is_completed(self):
return self.__transport.is_completed
Hope once I'll pack it to pip module.
I have troubles to make Python Asyncio NATS.io running sequentialy. I have two classes: Account and Bridge
Account holds the logic of application and it is communicating thought Bridge with external service via NATS.io.
Main file:
loop = asyncio.get_event_loop()
account = Account(loop, options)
asyncio.async(account.start())
loop.run_forever()
Account class:
class Account:
bridge = Bridge()
def connect(self):
result = self.bridge.connect(self.id)
return result
Bridge class:
def connect(self, account_id):
data = None
try:
response = yield from self.nc.timed_request("bank.account.connect",
BankRequest(
method="connect",
data={...}
), 10)
data = json.loads(response.data.decode())
except ErrTimeout:
status = Messages.REQUEST_TIMED_OUT
return Result(data=data)
I need to call account.connect() from anywhere inside account class and get result of connection (sequentialy). now I'm getting generator object
your connect() methods should probably be coroutines:
class Account:
bridge = Bridge() # you probably want to put this in `def __init__(self)`!
#asyncio.coroutine
def connect(self):
result = yield from self.bridge.connect(self.id)
return result
class Bridge:
#asyncio.coroutine
def connect(self, account_id):
data = None
try:
response = yield from self.nc.timed_request("bank.account.connect",
BankRequest(
method="connect",
data={...}
), 10)
data = json.loads(response.data.decode())
except ErrTimeout:
status = Messages.REQUEST_TIMED_OUT
return Result(data=data)
and:
resp = yield from account.connect()
I want to create multiple threads and every one of them should create flask app.
I am not sure how to do it, but that's what I have:
app = Flask(__name__)
app.url_map.strict_slashes = False
#app.route('/api/v1/something/<string:FirstArgument>/<string:SecondArgument>/', methods=['POST'])
def do_it(FirstArgument, SecondArgument):
request_str = request.get_data().decode('utf-8').strip()
response = somefunction(mydata.state, request_str)
return response, 200
def run_app(this_port, mydata):
currentThread = threading.current_thread()
mydata.state = some_function_that_returns_6GB_of_data()
app.run(host='0.0.0.0',port=this_port)
if __name__ == '__main__':
mydata = threading.local()
thread1 = Thread(target=run_app, args=(4100, mydata,))
#thread2 = Thread(target=run_app, args=(4101,mydata,))
thread1.start()
#thread2.start()
For now I want to test only one thread. And I don't know how to pass mydata.state to the 'do_it'. If I add new argument (def do_it(FirstArgument, SecondArgument, mydata.state)) than Flask says that he wants to get this variable from the app.route. How can I pass this data to the do_it function?
And one more question. This program will pas N instances of state to N threads on N ports?
Or I should do something like this:
def do_it(FirstArgument, SecondArgument):
request_str = request.get_data().decode('utf-8').strip()
response = somefunction(mydata.state[threading.get_ident()], request_str)
return response, 200
def run_app(this_port, mydata):
currentThread = threading.current_thread()
mydata.state[threading.get_ident()] = some_function_that_returns_6GB_of_data()
app.run(host='0.0.0.0',port=this_port)