I trying to test my endpoint with pytest
main.py:
from fastapi import FastAPI, status, HTTPException, Depends
from sqlalchemy.ext.asyncio import AsyncSession
from .schema import ClientIn, ClientOut, ClientInWithID, Client, ValidationErrorSchema
from . import clients
from .database import SessionLocal
app = FastAPI()
async def get_db() -> AsyncSession:
if hasattr(get_db, "db"):
db: AsyncSession = get_db.db
return db
db = SessionLocal()
setattr(get_db, "db", db)
return db
#app.post("/client/",
response_model=ClientOut,
tags=["client"],
responses={422: {"model": ValidationErrorSchema}}
)
async def create_client(client_in: ClientIn, db: AsyncSession = Depends(get_db)) -> Client:
client = await clients.create_client(db, client_in)
return client
#app.put("/client/",
response_model=ClientOut | None,
tags=["client"],
responses={422: {"model": ValidationErrorSchema}, 404: {}}
)
async def update_client(client: ClientInWithID, db: AsyncSession = Depends(get_db)) -> Client | None:
db_client = await clients.get_client_by_id(db, client.id)
if not db_client:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND)
return await clients.update_client(db, client)
test_main.py:
import pytest
from httpx import AsyncClient
from app import main
#pytest.mark.asyncio
async def test_create_client():
data = {
"phone_number": "+79009999999",
"phone_operator_code": 900,
"timezone": "Europe/Amsterdam",
"tag": {
"text": "Any text"
}
}
async with AsyncClient(app=main.app, base_url="http://localhost:8000") as client:
response = await client.post(url="client/", json=data)
assert response.status_code == 200
#pytest.mark.asyncio
async def test_update_client():
data = {
"id": 1,
"phone_number": "+79009900000",
"phone_operator_code": 900,
"timezone": "Europe/Amsterdam",
"tag": {
"text": "Fuck this shit"
}
}
async with AsyncClient(app=main.app, base_url="http://localhost:8000") as client:
response = await client.put(url="client/", json=data)
assert response.status_code == 200
I use sqlalchemy and its connects to postgres with asyncpg, cause of asyncpg I have error:
venv/lib/python3.11/site-packages/asyncpg/connection.py:565: in prepare
return await self._prepare(
venv/lib/python3.11/site-packages/asyncpg/connection.py:583: in _prepare
stmt = await self._get_statement(
venv/lib/python3.11/site-packages/asyncpg/connection.py:397: in _get_statement
statement = await self._protocol.prepare(
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
> ???
E RuntimeError: Task <Task pending name='Task-3' coro=<test_update_client() running at /home/kryseyt/code/python/BackendTask1/tests/test_main.py:38> cb=[_run_until_complete_cb() at /home/kryseyt/.python3.11/lib/python3.11/asyncio/base_events.py:180]> got Future <Future pending cb=[Protocol._on_waiter_completed()]> attached to a different loop
asyncpg/protocol/protocol.pyx:168: RuntimeError
================================================ short test summary info =========================================
FAILED tests/test_main.py::test_update_client - RuntimeError: Task <Task pending name='Task-3' coro=<test_update_client() running at /home/kryseyt/code/python/BackendTask1/tests/test_main.py:38> cb=[_run_until_complet...
======================================================================== 1 failed, 1 passed in 5.82s =========================================================================
I this this happens cause there are creating another event loop for work with db, but what can I do with that?
Can I do something with this without mocking my database CRUD?
Try to add the "poolclass=NullPool" argument in the engine constructor.
from sqlalchemy.ext.asyncio import create_async_engine
from sqlalchemy.pool import NullPool
engine = create_async_engine(
"postgresql+asyncpg://user:pass#host/dbname",
poolclass=NullPool,
)
Sources:
https://docs.sqlalchemy.org/en/14/orm/extensions/asyncio.html#using-multiple-asyncio-event-loops
RuntimeError: Task ___ running at ___ at got Future <Future pending cb=[Protocol._on_waiter_completed()]> attached to a different loop
I am using below code for running test cases locally, but it is asking for aws credentials which I not supposed to give. Its like mock/moto is failing some how.
as I am new to this to python and itss library, I am unable to understand it deeper
import sys, os
import pprint
myPath = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, myPath + '/../')
from botocore.exceptions import ClientError
pprint.pprint(sys.path)
import pytest
from services.testrun_result_details import *
import json
from moto import *
from mock import MagicMock, patch
import boto3
#mock_dynamodb2
#patch('boto3.resource')
def test_success_response(dynamodb_test_testrun_results):
create_dynamodb_tables()
response = run_result_details_handler(EVENT_JSON, None)
assert response["statusCode"] == 200
---------------------------Error-------------------------
/usr/local/lib/python3.7/site-packages/botocore/signers.py:90: in handler
return self.sign(operation_name, request)
/usr/local/lib/python3.7/site-packages/botocore/signers.py:162: in sign
auth.add_auth(request)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <botocore.auth.SigV4Auth object at 0x7fab7fc2ee50>
request = <botocore.awsrequest.AWSRequest object at 0x7fab7fc48d90>
def add_auth(self, request):
if self.credentials is None:
> raise NoCredentialsError()
E botocore.exceptions.NoCredentialsError: Unable to locate credentials
/usr/local/lib/python3.7/site-packages/botocore/auth.py:373: NoCredentialsError
can anyone suggest me, how to resolve that issue?
I found the issue to be that my ~/.aws/credentials file did not have a default profile.
I added this, which fixed the issue:
[default]
aws_access_key_id = 123
aws_secret_access_key = ABC
I got something really weird going on with Tornado
from tornado.web import Application
from tornado.testing import AsyncHTTPTestCase
import requests
import tornado
from tornado.ioloop import IOLoop
class MyTest(AsyncHTTPTestCase):
def get_app(self):
return Application()
#tornado.testing.gen_test(timeout=30)
async def test_pass(self):
response = await IOLoop.current().run_in_executor(None, requests.get,"http://127.0.0.1:" + str(self.get_http_port()) + "/foo")
print(response)
#tornado.testing.gen_test(timeout=30)
async def test_fail(self):
response = await IOLoop.current().run_in_executor(None, requests.get,"http://192.168.2.1:" + str(self.get_http_port()) + "/foo")
print(response)
The first test passes while the second one fails with
_____________________________________________________________ MyTest.test_fail _____________________________________________________________
self = <urllib3.connection.HTTPConnection object at 0x7fea40367e50>
def _new_conn(self):
"""Establish a socket connection and set nodelay settings on it.
:return: New socket connection.
"""
extra_kw = {}
if self.source_address:
extra_kw["source_address"] = self.source_address
if self.socket_options:
extra_kw["socket_options"] = self.socket_options
try:
conn = connection.create_connection(
> (self._dns_host, self.port), self.timeout, **extra_kw
)
../.virtualenvs/creative-preview/lib/python3.7/site-packages/urllib3/connection.py:170:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
address = ('192.168.2.1', 41809), timeout = None, source_address = None, socket_options = [(6, 1, 1)]
def create_connection(
address,
timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
source_address=None,
socket_options=None,
):
"""Connect to *address* and return the socket object.
Convenience function. Connect to *address* (a 2-tuple ``(host,
port)``) and return the socket object. Passing the optional
*timeout* parameter will set the timeout on the socket instance
before attempting to connect. If no *timeout* is supplied, the
global default timeout setting returned by :func:`socket.getdefaulttimeout`
is used. If *source_address* is set it must be a tuple of (host, port)
for the socket to bind as a source address before making the connection.
An host of '' or port 0 tells the OS to use the default.
"""
host, port = address
if host.startswith("["):
host = host.strip("[]")
err = None
# Using the value from allowed_gai_family() in the context of getaddrinfo lets
# us select whether to work with IPv4 DNS records, IPv6 records, or both.
# The original create_connection function always returns all records.
family = allowed_gai_family()
try:
host.encode("idna")
except UnicodeError:
return six.raise_from(
LocationParseError(u"'%s', label empty or too long" % host), None
)
for res in socket.getaddrinfo(host, port, family, socket.SOCK_STREAM):
af, socktype, proto, canonname, sa = res
sock = None
try:
sock = socket.socket(af, socktype, proto)
# If provided, set socket level options before connecting.
_set_socket_options(sock, socket_options)
if timeout is not socket._GLOBAL_DEFAULT_TIMEOUT:
sock.settimeout(timeout)
if source_address:
sock.bind(source_address)
sock.connect(sa)
return sock
except socket.error as e:
err = e
if sock is not None:
sock.close()
sock = None
if err is not None:
> raise err
../.virtualenvs/creative-preview/lib/python3.7/site-packages/urllib3/util/connection.py:96:
...
During handling of the above exception, another exception occurred:
self = <my_test.MyTest testMethod=test_fail>
#tornado.testing.gen_test(timeout=30)
async def test_fail(self):
> response = await IOLoop.current().run_in_executor(None, requests.get,"http://192.168.2.1:" + str(self.get_http_port()) + "/foo")
my_test.py:18:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/usr/lib/python3.7/concurrent/futures/thread.py:57: in run
result = self.fn(*self.args, **self.kwargs)
../.virtualenvs/creative-preview/lib/python3.7/site-packages/requests/api.py:76: in get
return request('get', url, params=params, **kwargs)
../.virtualenvs/creative-preview/lib/python3.7/site-packages/requests/api.py:61: in request
return session.request(method=method, url=url, **kwargs)
../.virtualenvs/creative-preview/lib/python3.7/site-packages/requests/sessions.py:542: in request
resp = self.send(prep, **send_kwargs)
../.virtualenvs/creative-preview/lib/python3.7/site-packages/requests/sessions.py:655: in send
r = adapter.send(request, **kwargs)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <requests.adapters.HTTPAdapter object at 0x7fea4034dbd0>, request = <PreparedRequest [GET]>, stream = False
timeout = Timeout(connect=None, read=None, total=None), verify = True, cert = None, proxies = OrderedDict()
def send(self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None):
"""Sends PreparedRequest object. Returns Response object.
:param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
:param stream: (optional) Whether to stream the request content.
:param timeout: (optional) How long to wait for the server to send
data before giving up, as a float, or a :ref:`(connect timeout,
read timeout) <timeouts>` tuple.
:type timeout: float or tuple or urllib3 Timeout object
:param verify: (optional) Either a boolean, in which case it controls whether
we verify the server's TLS certificate, or a string, in which case it
must be a path to a CA bundle to use
:param cert: (optional) Any user-provided SSL certificate to be trusted.
:param proxies: (optional) The proxies dictionary to apply to the request.
:rtype: requests.Response
"""
try:
conn = self.get_connection(request.url, proxies)
except LocationValueError as e:
raise InvalidURL(e, request=request)
self.cert_verify(conn, request.url, verify, cert)
url = self.request_url(request, proxies)
self.add_headers(request, stream=stream, timeout=timeout, verify=verify, cert=cert, proxies=proxies)
chunked = not (request.body is None or 'Content-Length' in request.headers)
if isinstance(timeout, tuple):
try:
connect, read = timeout
timeout = TimeoutSauce(connect=connect, read=read)
except ValueError as e:
# this may raise a string formatting error.
err = ("Invalid timeout {}. Pass a (connect, read) "
"timeout tuple, or a single float to set "
"both timeouts to the same value".format(timeout))
raise ValueError(err)
elif isinstance(timeout, TimeoutSauce):
pass
else:
timeout = TimeoutSauce(connect=timeout, read=timeout)
try:
if not chunked:
resp = conn.urlopen(
method=request.method,
url=url,
body=request.body,
headers=request.headers,
redirect=False,
assert_same_host=False,
preload_content=False,
decode_content=False,
retries=self.max_retries,
timeout=timeout
)
# Send the request.
else:
if hasattr(conn, 'proxy_pool'):
conn = conn.proxy_pool
low_conn = conn._get_conn(timeout=DEFAULT_POOL_TIMEOUT)
try:
low_conn.putrequest(request.method,
url,
skip_accept_encoding=True)
for header, value in request.headers.items():
low_conn.putheader(header, value)
low_conn.endheaders()
for i in request.body:
low_conn.send(hex(len(i))[2:].encode('utf-8'))
low_conn.send(b'\r\n')
low_conn.send(i)
low_conn.send(b'\r\n')
low_conn.send(b'0\r\n\r\n')
# Receive the response from the server
try:
# For Python 2.7, use buffering of HTTP responses
r = low_conn.getresponse(buffering=True)
except TypeError:
# For compatibility with Python 3.3+
r = low_conn.getresponse()
resp = HTTPResponse.from_httplib(
r,
pool=conn,
connection=low_conn,
preload_content=False,
decode_content=False
)
except:
# If we hit any problems here, clean up the connection.
# Then, reraise so that we can handle the actual exception.
low_conn.close()
raise
except (ProtocolError, socket.error) as err:
raise ConnectionError(err, request=request)
except MaxRetryError as e:
if isinstance(e.reason, ConnectTimeoutError):
# TODO: Remove this in 3.0.0: see #2811
if not isinstance(e.reason, NewConnectionError):
raise ConnectTimeout(e, request=request)
if isinstance(e.reason, ResponseError):
raise RetryError(e, request=request)
if isinstance(e.reason, _ProxyError):
raise ProxyError(e, request=request)
if isinstance(e.reason, _SSLError):
# This branch is for urllib3 v1.22 and later.
raise SSLError(e, request=request)
> raise ConnectionError(e, request=request)
E requests.exceptions.ConnectionError: HTTPConnectionPool(host='192.168.2.1', port=41809): Max retries exceeded with url: /foo (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7fea40367e50>: Failed to establish a new connection: [Errno 111] Connection refused'))
../.virtualenvs/creative-preview/lib/python3.7/site-packages/requests/adapters.py:516: ConnectionError
==================================================== 1 failed, 1 passed in 0.22 seconds ==================================================
192.168.2.1 is localhost as well (through docker0 interface). The weird thing is that if I run a simple http server python -m http.server then curl 127.0.0.1:8000 and curl 192.168.2.1:8000 both work, so it doesn't seem to be a docker related issue. So I really don't know what's going on
can you try curl -v 192.168.2.1:8000 (verbose) and check the output?
maybe you're redirected to localhost and it's not happened in your code.
What's your app listening host is? localhost or 0.0.0.0?
If you want to be able to connect from the local machine you need to ensure you're listening on 0.0.0.0.
What I'm Doing
I'm learning aiohttp by building a REST api which I'm testing with Pytest (and its async and aiohttp plugins).
For my first test (I'm going with TDD from outset) I have the following code:
#pytest.mark.asyncio
async def test_handle_user_create(
aiohttp_client, init_test_app, create_test_user_table
):
payload = {
"email": "tintin#gmail.com",
"username": "Tintin",
"password": "y0u != n00b1e",
}
client = await aiohttp_client(init_test_app)
resp = await client.post("/users/", json=payload)
...
aiohttp_client is the client fixture from pytest-aiohttp
init_test_app is a fixture which essentially mirrors the app I'm going to build
create_test_user_table is my fixture for creating a table for users in the test database
What's Going Wrong With It
My first test is throwing the following runtime error at the last line in the code block above:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
env36\lib\site-packages\aiohttp\test_utils.py:295: in request
method, self.make_url(path), **kwargs
env36\lib\site-packages\aiohttp\client.py:417: in _request
with timer:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <aiohttp.helpers.TimerContext object at 0x0000015DE23A3B38>
def __enter__(self) -> BaseTimerContext:
task = current_task(loop=self._loop)
if task is None:
> raise RuntimeError(
'Timeout context manager should be used '
'inside a task'
)
E RuntimeError: Timeout context manager should
be used inside a task
env36\lib\site-packages\aiohttp\helpers.py:568: RuntimeError
From the error message, I gather that the client is trying to use an async timeout context manager, but this fails because I'm not calling it inside a task.
I don't know whether my deduction is correct.
And then also, I'm not comfortable enough with asyncio to know how to resolve this.
I will be grateful if someone shows me the way out.
More Info
Here's the source code of my test file:
import asyncio
import sqlite3
from pathlib import Path
import pytest
from aiohttp import web
from app import router
#pytest.fixture(name="event_loop", scope="session")
def fixture_event_loop():
"""
Mock session scoped event loop.
Default event loop is function scoped, and won't work with
otherwisely scoped fixtures. Hence, the need for this overwrite.
"""
loop = asyncio.get_event_loop()
yield loop
loop.close()
#pytest.fixture(name="test_db_path", scope="session")
async def fixture_test_db_path():
return Path(__file__).absolute().parent.joinpath("test_db.sqlite")
#pytest.fixture(name="init_test_db_conn", scope="session")
async def fixture_init_test_db_conn(test_db_path):
"""
Mock initiator of test database connection.
"""
async def _init_test_db_conn(test_app):
with sqlite3.connect(str(test_db_path)) as conn:
test_app["DB_CONN"] = conn
yield
return _init_test_db_conn
#pytest.fixture(name="init_test_app", scope="session")
async def fixture_init_test_app(init_test_db_conn):
"""
Mock app for testing.
Substitute the test db for the development db for testing and
undo the substitution after all tests have been run.
"""
app = web.Application()
app.add_routes(router)
app.cleanup_ctx.append(init_test_db_conn)
return app
#pytest.fixture(name="create_test_user_table")
def fixture_create_test_user_table(test_db_path):
"""
Mock user table for tests. Scope at function level.
Drop table at end of each test.
"""
conn = sqlite3.connect(str(test_db_path))
conn.execute(
"""CREATE TABLE test_users (
id INTEGER PRIMARY KEY,
email TEXT NOT NULL UNIQUE,
username TEXT NOT NULL UNIQUE,
pwd_hash TEXT NOT NULL,
active INTEGER,
joined TEXT NOT NULL);
"""
)
yield
conn.execute("""DROP TABLE test_users;""")
#pytest.mark.asyncio
async def test_handle_user_create(
aiohttp_client, init_test_app, create_test_user_table
):
payload = {
"email": "tintin#gmail.com",
"username": "Tintin",
"password": "y0u != n00b1e",
}
client = await aiohttp_client(init_test_app)
resp = await client.post("/users/", json=payload)
assert resp.status == 200
resp_json = await resp.json()
assert resp_json["email"] == payload["email"]
assert resp_json["username"] == payload["username"]
assert resp_json["pwd_hash"] != payload["pwd_hash"]
assert resp_json["active"] == 0
client.close()
And here's a full trace of the runtime error (alongside deprecation warnings - which I'll appreciate getting help on, as well :) )
$ pytest
============================= test session starts =============================
platform win32 -- Python 3.6.8, pytest-5.3.5, py-1.8.1, pluggy-0.13.1
rootdir: C:\Users\Mfonism\Codeville\AIOHttp\curious_me
plugins: aiohttp-0.3.0, asyncio-0.10.0
collected 1 item
test_app.py F [100%]
================================== FAILURES ===================================
_______________________ test_handle_user_create[pyloop] _______________________
aiohttp_client = <function aiohttp_client.<locals>.go at 0x0000015DE239AD08>
init_test_app = <Application 0x15de23a0d30>, create_test_user_table = None
#pytest.mark.asyncio
async def test_handle_user_create(
aiohttp_client, init_test_app, create_test_user_table
):
payload = {
"email": "tintin#gmail.com",
"username": "Tintin",
"password": "y0u != n00b1e",
}
client = await aiohttp_client(init_test_app)
> resp = await client.post("/users/", json=payload)
test_app.py:89:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
env36\lib\site-packages\aiohttp\test_utils.py:295: in request
method, self.make_url(path), **kwargs
env36\lib\site-packages\aiohttp\client.py:417: in _request
with timer:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <aiohttp.helpers.TimerContext object at 0x0000015DE23A3B38>
def __enter__(self) -> BaseTimerContext:
task = current_task(loop=self._loop)
if task is None:
> raise RuntimeError('Timeout context manager should be used '
'inside a task')
E RuntimeError: Timeout context manager should be used inside a task
env36\lib\site-packages\aiohttp\helpers.py:568: RuntimeError
============================== warnings summary ===============================
test_app.py::test_handle_user_create[pyloop]
c:\users\mfonism\codeville\aiohttp\curious_me\env36\lib\site-packages\aiohttp\cookiejar.py:55: DeprecationWarning: The object should be created from async function
super().__init__(loop=loop)
test_app.py::test_handle_user_create[pyloop]
c:\users\mfonism\codeville\aiohttp\curious_me\env36\lib\site-packages\aiohttp\test_utils.py:247: DeprecationWarning: The object should be created from async function
**kwargs)
test_app.py::test_handle_user_create[pyloop]
c:\users\mfonism\codeville\aiohttp\curious_me\env36\lib\site-packages\aiohttp\connector.py:730: DeprecationWarning: The object should be created from async function
loop=loop)
test_app.py::test_handle_user_create[pyloop]
c:\users\mfonism\codeville\aiohttp\curious_me\env36\lib\site-packages\aiohttp\connector.py:735: DeprecationWarning: The object should be created from async function
resolver = DefaultResolver(loop=self._loop)
-- Docs: https://docs.pytest.org/en/latest/warnings.html
====== 1 failed, 4 warnings in 0.78s ======
Following the instructions at: https://docs.gitlab.com/ce/ci/services/redis.html
I've added
services:
- redis:latest
to my .gitlab-ci.yml file, and changed to redis connect call to:
redis.StrictRedis(host='redis', port=6379, db=0)
The error I'm getting is:
/usr/local/lib/python2.7/site-packages/redis/client.py:772: in execute_command
connection = pool.get_connection(command_name, **options)
/usr/local/lib/python2.7/site-packages/redis/connection.py:994: in get_connection
connection.connect()
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = Connection<host=redis,port=6379,db=0>
def connect(self):
"Connects to the Redis server if not already connected"
if self._sock:
return
try:
sock = self._connect()
except socket.timeout:
raise TimeoutError("Timeout connecting to server")
except socket.error:
e = sys.exc_info()[1]
> raise ConnectionError(self._error_message(e))
E ConnectionError: Error -2 connecting to redis:6379. Name or service not known.
Turns out that it was as easy as putting the services declaration within every task, eg.:
py27:
stage: run
image: python:2.7
script:
- mkdir -p build/coverage
- pip install -r requirements.txt
- pytest --color=no --verbose --ignore=almanac/migrations --cov=almanac --cov-config=.coveragerc tests
- cp .coverage build/coverage/py27.coverage
services: # <===== here ========
- redis:latest
artifacts:
paths:
- build/coverage/*.coverage