How to use pytest fixtures with Unittest methods - python

class MyTestCase(unittest.Testcase):
def setUp(self):
self.something = True
#pytest.fixture(autouse=True)
def MyTestMethod(self, frozentime):
fn(self.something) # self.something is NOT defined
If I use #pytest.fixture(autouse=True) I end up with some strange behavior from PyTest. Instead of calling my setUp method before the test method, PyTest skips the setUp and calls MyTestMethod as if it was a PyTest MyTestFunction which of course does not work very well.
How do I get MyTestMethod to use the frozentime fixture without ignoring the setUp method that should be called first.
class MyTestCase(unittest.Testcase):
def setUp(self):
self.something = True
##pytest.fixture(autouse=True)
def MyTestMethod(self, frozentime): # Fails on call, because it needs too many arguments.
fn(self.something)

That's because the autouse fixtures are executed before the setUp/tearDown methods:
Note
Due to architectural differences between the two frameworks, setup and teardown for unittest-based tests is performed during the call phase of testing instead of in pytest‘s standard setup and teardown stages. This can be important to understand in some situations, particularly when reasoning about errors. For example, if a unittest-based suite exhibits errors during setup, pytest will report no errors during its setup phase and will instead raise the error during call.
Source
There's nothing you can do to work around this behaviour. You can either move the fixture-relevant code out of setUp/tearDown methods, for example: if self.flag is used in class-scoped fixtures, you can replace
class Tests(unittest.TestCase):
def setUp(self):
self.flag = True
def tearDown(self):
self.flag = False
#pytest.fixture(autouse=True)
def myfixture(self):
print(self.flag)
with
class Tests(unittest.TestCase):
#pytest.fixture(autouse=True)
def prepare_flag(self):
self.flag = True
yield
self.flag = False
#pytest.fixture(autouse=True)
def myfixture(self, prepare_flag):
print(self.flag)
Or you can move all the setUp relevant code from fixtures:
class Tests(unittest.TestCase):
def setUp(self):
self.flag = True
#pytest.fixture(autouse=True)
def myfixture(self, somearg):
fn(self.flag, somearg)
becomes
class Tests(unittest.TestCase):
def setUp(self):
self.flag = True
fn(self.flag, self._somearg)
#pytest.fixture(autouse=True)
def assign_stuff(self, somearg):
self._somearg = somearg

As #hoefling mentioned, the two lifecycles are incompatible... but that can be hacked around if you're not aiming for drop-in compatibility.
import pytest
from pytestqt.plugin import QtBot
from unittest import TestCase
from myproject import custom_widget
#pytest.fixture(scope="class")
def qtbot_adapter(qapp, request):
"""Adapt qtbot fixture for usefixtures and unittest.TestCase"""
request.cls.qtbot = QtBot(request)
def with_updown(function):
"""Wrapper to bodge setUp/tearDown into fixtures+TestCase"""
def test_wrapper(self, *args, **kwargs):
__tracebackhide__ = True
if callable(getattr(self, 'up', None)):
self.up()
try:
function(self, *args, **kwargs)
finally:
if callable(getattr(self, 'down', None)):
self.down()
test_wrapper.__doc__ = function.__doc__
return test_wrapper
#pytest.mark.usefixtures("qtbot_adapter")
class MyTestCase(TestCase):
def up(self):
self.widget = custom_widget.CustomWidget()
self.widget.show()
#with_updown
def test_some_property(self):
with self.qtbot.waitSignal(self.widget.my_signal,
timeout=300):
self.widget.do_thing()
self.assertEqual(self.widget.get_thing(), 'foo')

Related

How can I import a testclass properly to inherit from, without it being run as a test

Context
I have a test class where all my tests inherit from. It cant run by itself as it really doesnt contain any setup info
I wanted to add a test which is executed by ALL tests (adding it to the baseclass seems logical)
But now I notice the basetestclass( => Foo) which I import is being detected as a test itself and runs and is visible in the reports
Code
the base class in base.py
from unittest import TestCase
class Foo(TestCase):
#classmethod
def setUpClass(cls):
# prepare the generic setup stuff based on what is defined in the child class
print("setupclass Foo done")
def test_run_in_all_inherited_tests(self):
print("fooBar")
assert True
the real test in test_something.py
from base import Foo # <= This is being detected as a testclass object and thus will be executed
class TestFoo(Foo):
#classmethod
def setUpClass(cls):
# define specific test setup
super().setUpClass()
print("setup TestFoo done")
def test_pass(self):
pass
def test_assert(self):
assert False
This triggers a testrun of the imported Foo
The Question
How can I import Foo without that its being detected as a 'test'
If I remove the test to run in all tests all is fine.
Adding #nottest decorator to Foo wont work since then also all inherited classes are defined nottest.
It needs to run on nose, pytest and unittest testrunners
I noticed if I changed the import statement like below that it also works. But that would mean adjusting a few hundreds of testfiles in different repos. (I'd like to avoid that)
import base
class TestFoo(base.Foo):
The key to the answer seems to be that each test has an attribute __test__ which is set to True when it is a test.
Setting it to False when the class should not be a test will then let the test collector ignore this class.
The answer assumes I can only do changes in the base.py
In python 3.9 classmethod and property decorators can be combined so I wrote a separate answer for that
answer for < py3.9
the base class in base.py
from unittest import TestCase
class MetaFoo(type):
#property
def __test__(cls):
return cls != Foo
class Foo(TestCase, metaclass=MetaFoo):
#classmethod
def setUpClass(cls):
# prepare the generic setup stuff based on what is defined in the child class
print("setupclass Foo done")
def test_run_in_all_inherited_tests(self):
print("fooBar")
assert True
answer for >= py3.9
the base class in base.py
from unittest import TestCase
class Foo(TestCase):
#classmethod
#property
def __test__(cls):
return cls != Foo
#classmethod
def setUpClass(cls):
# prepare the generic setup stuff based on what is defined in the child class
print("setupclass Foo done")
def test_run_in_all_inherited_tests(self):
print("fooBar")
assert True
the actual test
test_something.py
from base import Foo # <= This will not be detected as a test anymore as __test__ returns False
class TestFoo(Foo):
#classmethod
def setUpClass(cls):
# define specific test setup
super().setUpClass()
print("setup TestFoo done")
def test_pass(self):
pass
def test_assert(self):
assert False
This doesnt trigger a testrun of the imported Foo anymore

Python unittesting - How to assert inside of setUpClass?

In my setUpClass I would like to create a resource in the database one time, which is then used for all of the tests in the class.
After I create the resource in setUpClass, I would like to perform assertions on it right then and there. However, I'm not sure how to call assertions in setUpClass, given that all of the assertion functions are instance methods, not class methods.
import unittest
class TestFoo(unittest.TestCase):
#classmethod
def setUpClass(cls):
cls.foo = cls.make_foo(name='bar')
# How would I assert that cls.foo['name'] == 'bar'?
# The following does not work, as the assertEquals function is not a class method
# cls.assertEquals(cls.foo['name'], 'bar')
#classmethod
def make_foo(cls, name):
# Calls a POST API to create a resource in the database
return {'name': name}
def test_foo_0(self):
# Do something with cls.foo
pass
def test_foo_1(self):
# do something else with cls.foo
pass
The only alternative I can think of is to raise an exception in setUpClass:
#classmethod
def setUpClass(cls):
cls.foo = cls.make_foo(name='bar')
if cls.foo['name'] != 'bar':
raise Exception("Failed to create resource, cannot do the tests")
Of course, I do not want to call the assertions from each test, as this will just duplicate the code.
Edit: I don't think this workaround is good, because the failure message will point to the self.assertFalse(self.flag) line, instead of the if cls.foo['name'] ~= 'bar' line. In addition, if you created multiple resources, this would need multiple flags to disambiguate.
flag=False
#classmethod
def setUpClass(cls):
cls.foo = cls.make_foo(name='bar')
if cls.foo['name'] != 'bar':
cls.flag=True
def setUp(self):
self.assertFalse(self.flag)

using the pytest.fixture scope function on setup_class

I have two test case modules TestClassA, TestClassB. On test_1_that_needs_resource_a in TeseClass A, I am using fixture so I can call resource_a and pass parameters before executing testcase. How can I use the same fixture on setup_class in TestClassB so all testcase
in TestClassB so the fixture can be called one time before all testcase in TestClassB
import pytest
#pytest.fixture(scope='function')
def resource(request):
print('resources_a_setup()')
return request.param
class TestClassA:
#classmethod
def setup_class(cls):
print('\nsetup_class()')
#classmethod
def teardown_class(cls):
print('\nteardown_class()')
#pytest.mark.parametrize('resource', [dict(), dict(name=1)], indirect=['resource'])
def test_1_that_needs_resource_a(self, resource):
assert resource == dict()
print(f'\ntest_1_that_needs_resource_a(), {resource}')
class TestClassB:
## I would like to call resource fixture with passing some parameter on setup_class here
#classmethod
def setup_class(cls):
print('\nsetup_class()')
#classmethod
def teardown_class(cls):
print('\nteardown_class()')
def test_1_that_needs_resource_a(self):
print('\ntest_1_that_needs_resource_a()')

Django test global setup

I have some file for unit test with django:
test1.py
class Test1(unittest.TestCase):
def setUp(self):
...
def tearDown(self):
...
test1.py
class Test1(unittest.TestCase):
def setUp(self):
...
def tearDown(self):
...
testn.py
class Testn(unittest.TestCase):
def setUp(self):
...
def tearDown(self):
...
I want to create a global setup to make some configuration for it all test, someting like:
some_file.py
class GlobalSetUpTest(SomeClass):
def setup(self): # or any function name
global_stuff = "whatever"
is that possible? if so, how? Thanks in advance.
You could just create a parent class with your custom global setUp method and then have all of your other test classes extend that:
class MyTestCase(unittest.TestCase):
def setUp(self):
self.global_stuff = "whatever"
class TestOne(MyTestCase):
def test_one(self):
a = self.global_stuff
class TestTwo(MyTestCase):
def setUp(self):
# Other setUp operations here
super(TestTwo, self).setUp() # this will call MyTestCase.setUp to ensure self.global_stuff is assigned.
def test_two(self):
a = self.global_stuff
Obviously you could use the same technique for a 'global' tearDown method.
If you want to have it only run once for all tests, you can override the test management command by placing a management/commands/test.py in one of your apps:
from django.core.management.commands import test
class Command(test.Command):
def handle(self, *args, **options):
# Do your magic here
super(Command, self).handle(*args, **options)
Unfortunately this does not work well with PyCharm.
In PyCharm you can use the "Before Lunch" Task instead.

Using a class decorator, how to override a method without redefining the class?

For unit tests (using the unittest module) that use the App Engine testbed, I need setUp and tearDown methods to activate and deactivate the testbed, respectively (slightly simplified):
class SomeTest(unittest.TestCase):
def setUp(self):
self.testbed = testbed.Testbed()
self.testbed.activate()
def tearDown(self):
self.testbed.deactivate()
def testSomething(self):
...
This quickly becomes a burden to write. I could write a base class TestCaseWithTestbed, but then I'd have to remember to call the superclass method each time I need a custom setUp in one of the test cases.
I thought it would be more elegant to solve this with a class decorator instead. So I'd like to write:
#WithTestbed
class SomeTest(unittest.TestCase):
def testSomething(self):
...
With this decorator applied, the testbed should just be activated magically. So... how to implement the WithTestbed decorator? I currently have the following:
def WithTestbed(cls):
class ClsWithTestbed(cls):
def setUp(self):
self.testbed = testbed.Testbed()
self.testbed.activate()
cls.setUp(self)
def tearDown(self):
cls.tearDown(self)
self.testbed.deactivate()
return ClsWithTestbed
This works for simple cases, but has some serious problems:
The name of the test class becomes ClsWithTestbed and this shows up in the test output.
Concrete test classes calling super(SomeTestClass, self).setUp() end up in an infinite recursion, because SomeTestClass is now equal to WithTestbed.
I'm a bit hazy on Python's runtime type manipulation. So, how to do this the Right Way?
This appears to work and solve the problems:
def WithTestbed(cls):
def DoNothing(self):
pass
orig_setUp = getattr(cls, 'setUp', DoNothing)
orig_tearDown = getattr(cls, 'tearDown', DoNothing)
def setUp(self):
self.testbed = testbed.Testbed()
self.testbed.activate()
orig_setUp(self)
def tearDown(self):
orig_tearDown(self)
self.testbed.deactivate()
cls.setUp = setUp
cls.tearDown = tearDown
return cls
Does anyone see any problems with this approach?
Here's a simple way to do what you're asking with subclassing instead of a decorator:
class TestCaseWithTestBed(unittest.TestCase):
def setUp(self):
self.testbed = testbed.Testbed()
self.testbed.activate()
self.mySetUp()
def tearDown(self):
self.myTearDown()
self.testbed.deactivate()
def mySetUp(self): pass
def myTearDown(self): pass
class SomeTest(TestCaseWithTestBed):
def mySetUp(self):
"Insert custom setup here"
All you have to do is define mySetUp and myTearDown in your test cases instead of setUp and tearDown.
Something like this would work:
def WithTestbed(cls):
cls._post_testbed_setUp = getattr(cls, 'setUp', lambda self : None)
cls._post_testbed_tearDown = getattr(cls, 'tearDown', lambda self : None)
def setUp(self):
self.testbed = testbed.Testbed()
self.testbed.activate()
self._post_testbed_setUp()
def tearDown(self):
self.testbed.deactivate()
self._post_testbed_tearDown()
cls.setUp = setUp
cls.tearDown = tearDown
return cls
#WithTestbed
class SomeTest(object):
...

Categories

Resources