How can I pass a ctx (Context) to CliRunner? - python

CliRunner lists no parameter to provide a context in its documentation.
The following should qualify as a minimum working example.
The real problem is a bit different.
It could be solved by moving the click decorated function into its own function for test coverage. Then the click function would be rendered almost useless.
import click
from click.testing import CliRunner
class Config():
def __init__(self):
self.value = 651
#click.command()
#click.pass_context
def print_numberinfo(ctx):
if not hasattr(ctx.obj, 'value'):
ctx.obj = Config()
click.echo(ctx.obj.value)
def test_print_numberinfo():
ctx = click.Context(print_numberinfo, obj = Config())
ctx.obj.value = 777
runner = CliRunner()
# how do I pass ctx to runner.invoke?
result = runner.invoke(print_numberinfo)
assert result.output == str(ctx.obj.value) + '\n'

You would directly pass your Config instance as keyword argument obj to runner.invoke:
import click
from click.testing import CliRunner
class Config():
def __init__(self):
self.value = 651
#click.command()
#click.pass_obj
def print_numberinfo(obj):
if not hasattr(obj, 'value'):
obj = Config()
click.echo(obj.value)
def test_print_numberinfo():
obj = Config()
obj.value = 777
runner = CliRunner()
# how do I pass ctx to runner.invoke?
result = runner.invoke(print_numberinfo, obj=obj)
assert result.output == str(obj.value) + '\n'

For someone who just want to make context.obj works like call from command-line:
CliRunner().invoke(commands.cli, ['sayhello'], catch_exceptions=False)
The first argument should be the the root group of click, then you can pass the command you want to call to the second argument(that is sayhello).
How commands.py like:
# !/usr/bin/env python
# coding: utf-8
import click
#click.group()
#click.pass_context
def cli(ctx):
ctx.obj = {
'foo': 'bar'
}
#cli.command()
#click.pass_context
def sayehello(ctx):
click.echo('hello!' + ctx.obj)
Appreciate to geowurster providing the solution.

Related

Intercept function call with mock without pytest/unittest/etc

I want to avoid using a test runner such as pytest because of slow test discovery and inconvenient logging output in lieu of setting up my test and calling it as a script however I am having trouble intercepting a function call within my test objects and amending its behavior. I have been able to do this in the past with pytest but I can't seem to get access to the function the way I have things setup. Side note: In part, this is an attempt to mock out a call to a RESTful server which should explain why I am interested in getting this to work.
Here is my sample application structure:
# entry_object.py
import inner_object, time
class EntryObject():
def __init__(self, client):
self.inner_obj = inner_object.InnerObject(client)
pass
def time_injector(self, posix_time : int):
self.inner_obj.doit(posix_time)
pass
def the_time_injector(length_in_secs, client) -> None:
entryobj = EntryObject(client)
i = 0;
posix_time = time.time()
while (i < length_in_secs):
entryobj.time_injector(posix_time+i)
i += 1
# inner_object.py
import pretendapi
class InnerObject():
def __init__(self, client):
self.pretend_api_object = pretendapi.PretendApi(client)
def doit(self, posix_time : int) -> None:
logmessage = self.pretend_api_object.call_api_endpoint(posix_time=posix_time)
# pretendapi.py
import bravado
class PretendApi():
def __init__(self, client=None):
print("Created PretendApi")
self.client = bravado.client.SwaggerClient(None) if client is None else client
self.ex = "pretendapi"
def extract_and_verify_apiresult(self, posix_time : int, apiresult : object) -> (bool, object):
return (True, apiresult[0])
def call_api_endpoint(self, posix_time : int) -> object:
future = self.client.Api.ApiEndpoint()
(is_cmd_success, apiresult) = self.extract_and_verify_apiresult(posix_time, future.result())
if not is_cmd_success:
# print(f"is_cmd_success={dollup(is_cmd_success)}")
return is_cmd_success
# print(f"{apiresult}")
return is_cmd_success
# test_entry_object.py
rom bravado.testing.response_mocks import BravadoResponseMock
import mock
import entry_object, pretendapi
def amend_extract_and_verify_apiresult(original_func):
def amended_func(self, posix_time : int, apiresult : object) -> (bool, object):
print(f"{posix_time} {apiresult[0]['ret_msg']}")
return original_func(self, posix_time, object)
def test_time_injector():
# mock the necessary objects
# setup up the unit under test
# run the test
mock_client = mock.Mock(name='mock SwaggerClient')
mock_client.Api.ApiEndpoint.return_value = \
BravadoResponseMock(result=lambda : \
({'ret_code': 0, 'ret_msg': 'OK', 'ext_code': '', 'ext_info': ''}, '<bravado.requests_client.RequestsResponseAdapter object at 0x0000028385176518>')
)
mock.patch.object(pretendapi.PretendApi, 'extract_and_verify_apiresult',
side_effect=amend_extract_and_verify_apiresult(
pretendapi.PretendApi.extract_and_verify_apiresult),
autospec=True) # Needed for the self argument
entry_object.the_time_injector(10, mock_client)
if __name__ == "__main__":
test_time_injector()
Does anybody understand why I can't seem to decorate the behavior of extract_and_verify_apiresult?

Creation of Classes?

I am new in python and trying to figure out how to modularize my functions. My project is a unit testing framework for Restful APIs. For brevity I've simplified the code.
type_parser.py
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--type', help='a or b')
args = parser.parse_args()
def A(func):
def return_func():
if args.type == "b":
return func()
else:
pass
return return_func
def B(func):
def return_func():
if args.type == "a":
return func()
else:
pass
return return_func
api_funcs.py
from type_parser import *
class ApiFunctions:
#A
def login():
print "cool"
#B
def logout()
print "not cool"
main.py
from api_funcs import *
api = ApiFunctions()
def __main__():
api.login()
api.logout()
__main__()
CLI
python main.py --type=a
Outcome
Expected:
cool
Actual:
TypeError: return_func() takes no arguments
It works if I take api functions out of a class and call it straight up, but I would want to make it more abstract since there will be 3 sets of APIs
Update - I figured out the answer
class ApiFunctions:
#A
def login(self):
print "cool"
#B
def logout(self)
print "not cool"
def A(func):
def return_func(self):
if args.type == "b":
return func(self)
else:
pass
return return_func
In python the object itself has to be explicitely part of method singature.
Thus you need to write:
def login(self):
Writing self.login is kinda like ()* writing login(self). Since login() takes no argument you get an error.
(*) said kinda like, don't write it
api_funcs.py
from type_parser import *
class ApiFunctions:
#A
def login(self):
print "cool"
#B
def logout(self)
print "not cool"

Shared options and flags between commands

Say my CLI utility has three commands: cmd1, cmd2, cmd3
And I want cmd3 to have same options and flags as cmd1 and cmd2. Like some sort of inheritance.
#click.command()
#click.options("--verbose")
def cmd1():
pass
#click.command()
#click.options("--directory")
def cmd2():
pass
#click.command()
#click.inherit(cmd1, cmd2) # HYPOTHETICAL
def cmd3():
pass
So cmd3 will have flag --verbose and option --directory. Is it possible to make this with Click? Maybe I just have overlooked something in the documentation...
EDIT: I know that I can do this with click.group(). But then all the group's options must be specified before group's command. I want to have all the options normally after command.
cli.py --verbose --directory /tmp cmd3 -> cli.py cmd3 --verbose --directory /tmp
I have found a simple solution! I slightly edited the snippet from https://github.com/pallets/click/issues/108 :
import click
_cmd1_options = [
click.option('--cmd1-opt')
]
_cmd2_options = [
click.option('--cmd2-opt')
]
def add_options(options):
def _add_options(func):
for option in reversed(options):
func = option(func)
return func
return _add_options
#click.group()
def group(**kwargs):
pass
#group.command()
#add_options(_cmd1_options)
def cmd1(**kwargs):
print(kwargs)
#group.command()
#add_options(_cmd2_options)
def cmd2(**kwargs):
print(kwargs)
#group.command()
#add_options(_cmd1_options)
#add_options(_cmd2_options)
#click.option("--cmd3-opt")
def cmd3(**kwargs):
print(kwargs)
if __name__ == '__main__':
group()
Define a class with common parameters
class StdCommand(click.core.Command):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.params.insert(0, click.core.Option(('--default-option',), help='Every command should have one'))
Then pass the class to decorator when defining the command function
#click.command(cls=StdCommand)
#click.option('--other')
def main(default_option, other):
...
You could also have another decorator for shared options. I found this solution here
def common_params(func):
#click.option('--foo')
#click.option('--bar')
#functools.wraps(func)
def wrapper(*args, **kwargs):
return func(*args, **kwargs)
return wrapper
#click.command()
#common_params
#click.option('--baz')
def cli(foo, bar, baz):
print(foo, bar, baz)
This code extracts all the options from it's arguments
def extract_params(*args):
from click import Command
if len(args) == 0:
return ['']
if any([ not isinstance(a, Command) for a in args ]):
raise TypeError('Handles only Command instances')
params = [ p.opts() for cmd_inst in args for p in cmd_inst.params ]
return list(set(params))
now you can use it:
#click.command()
#click.option(extract_params(cmd1, cmd2))
def cmd3():
pass
This code extracts only the parameters and none of their default values, you can improve it if needed.
A slight improvement on #jirinovo solution.
this version support an unlimited number of click options.
one thing that is worth mentioning, the order you pass the options is important
import click
_global_options = [click.option('--foo', '-f')]
_local_options = [click.option('--bar', '-b', required=True)]
_local_options2 = [click.option('--foofoo', required=True)]
def add_options(*args):
def _add_options(func):
options = [x for n in args for x in n]
for option in reversed(options):
func = option(func)
return func
return _add_options
#click.group()
def cli():
pass
#cli.group()
def subcommand():
pass
#subcommand.command()
#add_options(_global_options, _local_options)
def echo(foo, bar):
print(foo, bar, sep='\n')
#subcommand.command()
#add_options(_global_options)
def echo2(foo):
print(foo)
#subcommand.command()
#add_options(_global_options, _local_options2)
def echo3(foo, foofoo):
print(foo, foofoo, sep='\n')
#subcommand.command()
#add_options(_global_options, _local_options, _local_options2)
def echo4(foo, bar, foofoo):
print(foo, bar, foofoo, sep='\n')
if __name__ == '__main__':
cli()

How to access the py.test capsys from inside a test?

py.test documentations says that I should add capsys parameter to my test methods but in my case this doesn't seem to be possible.
class testAll(unittest.TestCase):
def setUp(self):
self.cwd = os.path.abspath(os.path.split(inspect.getfile(inspect.currentframe()))[0])
os.chdir(self.cwd)
def execute(self, cmd, result=0):
"""
Helper method used by many other tests, that would prevent replicating too much code.
"""
# cmd = "%s > /dev/null 2>&1" % cmd
ret = os.system(cmd) >> 8
self.assertEqual(ret, result, "`%s` returned %s instead of %s (cws=%s)\n\t%s" % (cmd, ret, result, os.getcwd(), OUTPUT)) ### << how to access the output from here
def test_1(self):
self.execute("do someting", 0)
You could define a helper function in the class that inherits the capsys fixture:
#pytest.fixture(autouse=True)
def capsys(self, capsys):
self.capsys = capsys
Then call this function inside the test:
out,err = self.capsys.readouterr()
assert out == 'foobar'
Kudos to MichaƂ Krassowski for his workaround which helped me work through a similar problem.
https://github.com/pytest-dev/pytest/issues/2504#issuecomment-309475790
Thomas Wright's answer is perfect. I'm just sticking this code block here for my own reference as my search led me here and I'll likely forget this in future! [doing a few things in this so useful reference for me]. If anyone is looking and sees where it can be improved - suggest away!
import os
import pytest
from _pytest.monkeypatch import MonkeyPatch
from unittest import TestCase
# -----------------------------------------------------------------------------
def foo_under_test(inp1):
"""Example of a Method under test"""
do_some_calcs_here = inp1*2
get_a_return = ClassCalled.foo_called(do_some_calcs_here)
return get_a_return
# -----------------------------------------------------------------------------
class ClassUnderTest():
"""Example of a Class contained Method under test"""
def __init__(self):
"""Instantiate the class"""
self.var1 = "TestVar"
def foo_under_test2(self, inp11):
"""The class method under test"""
return self.var1 + self.foo_called2(inp11)
def foo_called2(self, inp12):
"""Nominal sub-foo to foo_under_test2"""
return str(inp12*5)
# -----------------------------------------------------------------------------
class ClassCalled:
"""Example of a class that could be called by foo_under_test"""
def foo_called(inp2):
"""Sub-foo to foo_under_test"""
return inp2 * 2
# -----------------------------------------------------------------------------
class MockResponses:
"""Class for holding the mock responses"""
def foo_called(inp2):
"""**Mock of foo_called**"""
return inp2*3
def foo_called2(inp12):
"""**Mock of foo_called**"""
return str(inp12*4)
# -----------------------------------------------------------------------------
class Test_foo_under_test(TestCase):
"""Test class - means of grouping up tests for a target function
This one is addressing the individual function (not within a class)
"""
# ---------------------------------------------------------------
#pytest.fixture(autouse=True)
def capsys(self, capsys):
"""Capsys hook into this class"""
self.capsys = capsys
def print_to_console(self, strOut):
"""Print strOut to console (even within a pyTest execution)"""
with self.capsys.disabled():
print(strOut)
def setUp(self):
"""Ran by pyTest before running any test_*() functions"""
self.monkeypatch = MonkeyPatch()
# ---------------------------------------------------------------
def test_1(self):
"""**Test case**"""
def mock_foo_called(inp2):
return MockResponses.foo_called(inp2)
mockedFoo = ClassCalled.foo_called # Need to get this handle here
self.monkeypatch.setattr(ClassCalled, "foo_called", mock_foo_called)
x = foo_under_test(1)
self.print_to_console("\n")
strOut = "Rtn from foo: " + str(x)
self.print_to_console(strOut)
assert x == 6
# Manually clear the monkey patch
self.monkeypatch.setattr(
ClassCalled, "foo_called", mockedFoo)
"""I've noticed with me having monkeypatch inside the
class, the damn thing persists across functions.
This is the only workaround I've found so far"""
# -----------------------------------------------------------------------------
class Test_ClassUnderTest_foo_under_test(TestCase):
"""Test class - means of grouping up tests for a target function
This one is addressing the function within a class
"""
# ---------------------------------------------------------------
#pytest.fixture(autouse=True)
def capsys(self, capsys):
"""Capsys hook into this class"""
self.capsys = capsys
def print_to_console(self, strOut):
"""Print strOut to console (even within a pyTest execution)"""
with self.capsys.disabled():
print(strOut)
def setUp(self):
"""Ran by pyTest before running any test_*() functions"""
self.monkeypatch = MonkeyPatch()
# ---------------------------------------------------------------
def test_1(self):
"""**Test case**"""
def mock_foo_called2(self, inp2):
"""
Mock function
Defining a mock function, note this can be dealt with directly
here, or if its more comprehensible, put it in a separate class
(i.e. MockResponses)
"""
# return MockResponses.foo_called2(inp2) # Delegated approach
return str(inp2*4) # Direct approach
"""Note that the existence of self within this test class forces
a wrapper around calling a MockClass - so we have to go through
both the line below and the function mock_foo_called2() above to
properly invoke MockResponses.foo_called2()
"""
mockedFoo = ClassUnderTest.foo_called2
self.monkeypatch.setattr(
ClassUnderTest, "foo_called2", mock_foo_called2)
x = ClassUnderTest().foo_under_test2(1)
strOut = "Rtn from foo: " + str(x)
self.print_to_console("\n")
self.print_to_console(strOut)
assert x == "TestVar" + str(4)
self.monkeypatch.setattr(
ClassUnderTest, "foo_called2", mockedFoo)
# -----------------------------------------------------------------------------
# ---- Main
if __name__ == "__main__":
#
# Setup for pytest
outFileName = os.path.basename(__file__)[:-3] # Remove the .py from end
currScript = os.path.basename(__file__)
# -------------------------------------------------------------------------
# PyTest execution
pytest.main([currScript, "--html", outFileName + "_report.html"])
rtnA = foo_under_test(1)
print(rtnA == 4)
# This should output 4, demonstrating effect of stub (which produced 6)
rtnB = ClassUnderTest().foo_under_test2(1)
print(rtnB == "TestVar"+str(5))
# This should output "TestVar5", demonstrating effect of stub
# conftest.py
class TTY:
def communicate(self):
with self.trace():
print('wow!')
#pytest.fixture(autouse=True)
def set_capsys(capsys):
TTY.trace = capsys.disabled
#pytest.fixture
def get_tty():
_get_tty():
return TTY()
return _get_tty
# test_wow.py
def test_wow(get_tty):
get_tty().communicate()

cherrypy handle all request with one function or class

i'd like to use cherrypy but i don't want to use the normal dispatcher, i'd like to have a function that catch all the requests and then perform my code. I think that i have to implement my own dispatcher but i can't find any valid example. Can you help me by posting some code or link ?
Thanks
make a default function:
import cherrypy
class server(object):
#cherrypy.expose
def default(self,*args,**kwargs):
return "It works!"
cherrypy.quickstart(server())
What you ask can be done with routes and defining a custom dispatcher
http://tools.cherrypy.org/wiki/RoutesUrlGeneration
Something like the following. Note the class instantiation assigned to a variable that is used as the controller for all routes, otherwise you will get multiple instances of your class. This differs from the example in the link, but I think is more what you want.
class Root:
def index(self):
<cherrpy stuff>
return some_variable
dispatcher = None
root = Root()
def setup_routes():
d = cherrypy.dispatch.RoutesDispatcher()
d.connect('blog', 'myblog/:entry_id/:action', controller=root)
d.connect('main', ':action', controller=root)
dispatcher = d
return dispatcher
conf = {'/': {'request.dispatch': setup_routes()}}
Hope that helps : )
Here's a quick example for CherryPy 3.2:
from cherrypy._cpdispatch import LateParamPageHandler
class SingletonDispatcher(object):
def __init__(self, func):
self.func = func
def set_config(self, path_info):
# Get config for the root object/path.
request = cherrypy.serving.request
request.config = base = cherrypy.config.copy()
curpath = ""
def merge(nodeconf):
if 'tools.staticdir.dir' in nodeconf:
nodeconf['tools.staticdir.section'] = curpath or "/"
base.update(nodeconf)
# Mix in values from app.config.
app = request.app
if "/" in app.config:
merge(app.config["/"])
for segment in path_info.split("/")[:-1]:
curpath = "/".join((curpath, segment))
if curpath in app.config:
merge(app.config[curpath])
def __call__(self, path_info):
"""Set handler and config for the current request."""
self.set_config(path_info)
# Decode any leftover %2F in the virtual_path atoms.
vpath = [x.replace("%2F", "/") for x in path_info.split("/") if x]
cherrypy.request.handler = LateParamPageHandler(self.func, *vpath)
Then just set it in config for the paths you intend:
[/single]
request.dispatch = myapp.SingletonDispatcher(myapp.dispatch_func)
...where "dispatch_func" is your "function that catches all the requests". It will be passed any path segments as positional arguments, and any querystring as keyword arguments.

Categories

Resources