Decorator factory on a unittest method - python

def register_processor2(processor_name='SomeProcessor'):
def decorator(func):
class SomeProcessor(GenericPaymentProcessor, TriggeredProcessorMixin):
name = processor_name
transaction_class = Transaction
#staticmethod
def setup(data=None):
pass
#wraps(func)
def func_wrapper(*args, **kwargs):
PaymentProcessorManager.register(SomeProcessor)
result = func(*args, **kwargs)
PaymentProcessorManager.unregister(SomeProcessor)
return result
return func_wrapper
return decorator
def register_processor(func):
class SomeProcessor(GenericPaymentProcessor, TriggeredProcessorMixin):
name = 'SomeProcessor'
transaction_class = Transaction
#staticmethod
def setup(data=None):
pass
#wraps(func)
def func_wrapper(*args, **kwargs):
PaymentProcessorManager.register(SomeProcessor)
result = func(*args, **kwargs)
PaymentProcessorManager.unregister(SomeProcessor)
return result
return func_wrapper
class TestPaymentMethodEndpoints(APITestCase):
#register_processor
def test_put_detail_cannot_change_processor(self):
self.assertEqual(True, False)
Ok so the decorator register_processor works as expected. And the test fails, but I want to make the name of the inner class customizable so I went for a decorator factory implementation instead.
The thing is when running the test decorated with register_processor2 I get the following:
AttributeError: 'TestPaymentMethodEndpoints' object has no attribute '__name__'
This is from #wraps(func), my question is why is func here an instance of TestPaymentMethodEndpoints, and not the bound method?
Also if I remove the #wraps decorator then the test runs and passes.
I'd expect that the test would not be discovered as func_wrapper does not start with test_* and even if it is discovered then it should fail.
Any insight on what is happening and how I'd go about doing this?
EDIT
So I figured it out even if the decorator factory has arguments that have default values you still need to place () when calling it.
But would still love to hear an explanation of what happened in case of the tests passing / getting discovered in the first place.
class TestPaymentMethodEndpoints(APITestCase):
#register_processor()
def test_put_detail_cannot_change_processor(self):
self.assertEqual(True, False)
Makes sense now that I think about it :D, gosh you learn something new each day!

I think you're now asking "how come the unittest module can find test cases that have been wrapped in functions with names that don't start test?"
The answer to that is because unittest doesn't use the names of the functions to find the methods to run, it uses the attribute names of the test case classes to find them.
So try running the following code:
from unittest import TestCase
def apply_fixture(func):
def wrap_with_fixture(self):
print('setting up fixture...')
try:
func(self)
finally:
print('tearing down fixture')
return wrap_with_fixture
class MyTestCase(TestCase):
#apply_fixture
def test_something(self):
print('run test')
print('Attributes of MyTestCase: %s' % dir(MyTestCase))
print('test_something method: %s' % MyTestCase.test_something)
mtc = MyTestCase()
mtc.test_something()
You will see that the output from dir contains the name test_something:
Attributes of MyTestCase: ['__call__', ...lots of things..., 'test_something']
but that the value of that attribute is the wrapping function wrap_with_fixture:
test_something method: <function apply_fixture.<locals>.wrap_with_fixture at 0x10d90aea0>
This makes sense when you consider that when you create a function you are both creating a function with the name provided and a local variable with the same name, and that the decorator # syntax is just syntactic sugar, so the following would have been an equally valid albeit longer-winded way of creating your test case class:
class MyTestCase(TestCase):
def test_something(self):
print('run test')
# Overwrite existing 'local' (or 'class' variable in this context)
# with a new value. We haven't deleted the test_something function
# which still exists but now is owned by the function we've created.
test_something = apply_fixture(test_something)

Related

Python unittest patch mock entire class

I have a class that I want to patch in my unittests.
class OriginalClass():
def method_a():
# do something
def method_b():
# do another thing
Now I created another class to patch it with, so the code for patching it is like
class MockClass(OriginalClass):
def method_a():
# This will override the original method and return custom response for testing.
patcher = patch('OriginalClass', new=MockClass)
mock_instance = patcher.start()
This works exactly as I want it to and I can return whatever responses required for my unittests.
Now this issue is when I want to verify that a method is called with the right parameters in the unittests.
I tried
mock_instance.method_a.assert_called_once()
But it fail with error AttributeError: 'function' object has no attribute 'assert_called_once'.
How can I test the method calls here?
AttributeError: 'function' object has no attribute 'assert_called_once'.
Once mock object is created, there is no method_a exists, you have to call once m.method_a() before assert.
m = mock.create_autospec(OriginalClass)
m.method_a()
m.method_a.assert_called_once()
patch mock entire class
I took it as mock the whole class and all its methods, I would take an example from here
https://docs.python.org/3.3/library/unittest.mock-examples.html
Applying the same patch to every test method, Here is my example, patch the entire Primary class as MockPrimay for every methods and every tests, setup or SetupClass could be added for the methods needed, even the whole class is mocked, but not every methods to be used in the tests.
from tests.lib.primary_secondary import Secondary
#mock.patch('tests.lib.primary_secondary.Primary')
class TestSecondaryMockPrimary(unittest.TestCase):
def test_method_d(self, MockPrimary):
MockPrimary().process()
MockPrimary().process.return_value = 1
oc = Secondary()
self.assertEqual(oc.method_d(), 1)
import tests
self.assertIs(tests.lib.primary_secondary.Primary, MockPrimary)
The Primary is needed for the Secondary for this test
class Primary(object):
def __init__(self, param):
self._param = param
def process(self):
if self._param == 1:
self._do_intermediate_process()
self._do_process()
class Secondary(object):
def __init__(self):
self.scl = Primary(1)
def method_d(self):
return self.scl.process
I think wraps can be useful here:
from unittest.mock import patch
class Person:
name = "Bob"
def age(self):
return 35
class Double(Person):
def age(self):
return 5
with patch('__main__.Person', wraps=Double()) as mock:
print(mock.name) # mocks data
print(mock.age()) # runs real methods, but still spies their calls
mock.age.assert_not_called()
Output:
<MagicMock name='Person.name' id='139815250247536'>
5
...
raise AssertionError(msg)
AssertionError: Expected 'age' to not have been called. Called 1 times.
Calls: [call()].

Give function scope access to the self where it is called?

I have a class "Wrapper". Wrapper inherits from object, and it's primary responsibility is to hook method calls to selenium during a test. It's __init__ method signature is
__init__(self, object_to_wrap, actions={}, **kwargs)
Part of it's functionality is to take a dictionary as an argument in the init method, and to expose the keys as attributes on the object_to_wrap object by defining the __getattr__ magic method
(signature: __getattr__(self, item):)
and checking self.actions's keys for item. If it is found, the method is invoked.
In the test code, the initialization would look similar to:
def navigate(scoped_self, to=''):
self.driver.switch_to_default_content()
self.driver.switch_to.frame(to)
scoped_self.navigations.append(to)
# the navigate method is scoped in an
# instance method of the test class, so it has access to self
So, my question is, how can I make the scope, or self in the above method, be the scope of my Wrapper class, and not the test class?
For clarity, if the solution I am looking for in this question was found, the navigate implementation would change to be:
def navigate(scoped_self, to=''):
self.object_to_wrap.switch_to_default_content()
self.object_to_wrap.switch_to.frame(to)
Also for clarity, I'm pretty sure what I'm looking for is exactly what Javascripts Function.prototype.bind accomplishes.
UPDATE: Defining the methods, such as navigate, inside the Wrapper class is not an option, as the Wrapper class cannot have test-specific logic. The test suite is done on n > 1 DOM, that are completely unrelated. For example, one of the tests requires the method "alert_handlers" (to overwrite the window.alert function and return the alerts presented as a string), another might require the navigate method, and a third might require both.
UPDATE #2: Thanks to the answer below from c17r, I realized that without the getattribute code included in my example, that it would appear as if I was asking for something we had already accomplished. What I am looking for is the ability, in the above navigate method, is for scoped_self to be the instance of Wrapper.
Furthermore, I am specifically looking for a way to "dynamically" pass scoped_self to the functions (the way that Function.prototype.bind"dynamically" sets this where this is myEventListener.bind(window); if you're unfamiliar with javascript, make an event listener on the body and console.log this without binding, and then with binding, to see the difference)
Furthermore, I figured it might help to give an example implementation without the solution I am looking for. This is currently working as expected:
class Wrapper(object):
def __init__(self, wrapped, actions={}):
self.wrapped = wrapped
self.actions = actions
self.navigations = [] # EXAMPLE, SEE THE TEST CLASS CODE
def __getattr__(self, item):
if item in self.actions:
return self.actions[item]
# do other fancy stuff here
# UPDATE #2: added for clarity. this is the current implementation
orig_attr = self.wrapped.__getattribute__(item)
if callable(orig_attr):
def hooked(*args, **kwargs):
self.pre(item, *args, **kwargs)
self.err = False
try:
result = orig_attr(*args, **kwargs)
except Exception as e:
#logs
self.post(*args, **kwargs)
raise
if type(self.wrapped) == type(result):
return self
return result
return hooked
else:
return orig_attr
class SomeTest():
#blah blah init stuff, set self.driver = selenium.webdriver.Phantomjs
def spawn_actions(self):
def navigate(scoped_self, to=''):
self.driver.switch_to_default_content()
self.driver.switch_to.frame(to)
scoped_self.navigations.append(to) # <--- appended to wrapper.navigations
return {'navigate': navigate}
def run(self):
driver = Wrapper(self.driver, self.spawn_actions())
driver.get(url)
driver.navigate('New Request')
# fun tests!
If I understand you correctly, 3 things:
Wrapper would need to pass any unknown functions down to the wrapped item, otherwise driver.get() won't work properly.
The navigate function needs self as the first parameter, like it would if it was actually defined on Wrapper
Wrapper needs to pass self into the dict-based function. This is a little tricky since __getattr__ doesn't actually call the function, so you need to return a function that calls the underlying function properly, similar to how decorators work.
Like this:
class Driver(object):
def get(self, url):
print('get')
print(repr(self))
print(repr(url))
print('--')
class Wrapper(object):
def __init__(self, wrapped, actions={}):
self.wrapped = wrapped
self.actions = actions
def __getattr__(self, item):
if item in self.actions:
def unwrap(*args, **kwargs):
return self.actions[item](self, *args, **kwargs)
return unwrap
else:
return getattr(self.wrapped, item)
class Test(object):
def __init__(self):
self.driver = Driver();
def spawn_actions(self):
def navigate(self, to=''):
print('navigate')
print(repr(self))
print(repr(to))
print(repr(self.wrapped))
print('--')
return {
'navigate': navigate
}
def run(self):
driver = Wrapper(self.driver, self.spawn_actions())
driver.get('url')
driver.navigate('thing')
Now calling:
t = Test()
t.run()
Outputs:
get
<__main__.Driver object at 0x104008630>
'url'
--
navigate
<__main__.Wrapper object at 0x104008ba8>
'thing'
<__main__.Driver object at 0x104008630>
--
EDIT
You can also dynamically bind the methods to the instance instead of __getattr__ returning the unwrap function:
import types
class Wrapper(object):
def __init__(self, wrapped, actions={}):
self.wrapped = wrapped
for name, func in actions.items():
setattr(self, name, types.MethodType(func, self))
def __getattr__(self, item):
return getattr(self.wrapped, item)

Decorator for a class method that caches return value after first access

My problem, and why
I'm trying to write a decorator for a class method, #cachedproperty. I want it to behave so that when the method is first called, the method is replaced with its return value. I also want it to behave like #property so that it doesn't need to be explicitly called. Basically, it should be indistinguishable from #property except that it's faster, because it only calculates the value once and then stores it. My idea is that this would not slow down instantiation like defining it in __init__ would. That's why I want to do this.
What I tried
First, I tried to override the fget method of the property, but it's read-only.
Next, I figured I'd try to implement a decorator that does needs to be called the first time but then caches the values. This isn't my final goal of a property-type decorator that never needs to be called, but I thought this would be a simpler problem to tackle first. In other words, this is a not-working solution to a slightly simpler problem.
I tried:
def cachedproperty(func):
""" Used on methods to convert them to methods that replace themselves
with their return value once they are called. """
def cache(*args):
self = args[0] # Reference to the class who owns the method
funcname = inspect.stack()[0][3] # Name of the function, so that it can be overridden.
setattr(self, funcname, func()) # Replace the function with its value
return func() # Return the result of the function
return cache
However, this doesn't seem work. I tested this with:
>>> class Test:
... #cachedproperty
... def test(self):
... print "Execute"
... return "Return"
...
>>> Test.test
<unbound method Test.cache>
>>> Test.test()
but I get an error about how the class didn't pass itself to the method:
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
TypeError: unbound method cache() must be called with Test instance as first argument (got nothing instead)
At this point, me and my limited knowledge of deep Python methods are very confused, and I have no idea where my code went wrong or how to fix it. (I've never tried to write a decorator before)
The question
How can I write a decorator that will return the result of calling a class method the first time it's accessed (like #property does), and be replaced with a cached value for all subsequent queries?
I hope this question isn't too confusing, I tried to explain it as well as I could.
If you don't mind alternative solutions, I'd recommend lru_cache
for example
from functools import lru_cache
class Test:
#property
#lru_cache(maxsize=None)
def calc(self):
print("Calculating")
return 1
Expected output
In [2]: t = Test()
In [3]: t.calc
Calculating
Out[3]: 1
In [4]: t.calc
Out[4]: 1
First of all Test should be instantiated
test = Test()
Second, there is no need for inspect cause we can get the property name from func.__name__
And third, we return property(cache) to make python to do all the magic.
def cachedproperty(func):
" Used on methods to convert them to methods that replace themselves\
with their return value once they are called. "
def cache(*args):
self = args[0] # Reference to the class who owns the method
funcname = func.__name__
ret_value = func(self)
setattr(self, funcname, ret_value) # Replace the function with its value
return ret_value # Return the result of the function
return property(cache)
class Test:
#cachedproperty
def test(self):
print "Execute"
return "Return"
>>> test = Test()
>>> test.test
Execute
'Return'
>>> test.test
'Return'
>>>
"""
With Python 3.8 or later you can use functools.cached_property().
It works similar as the previously proposed lru_cache solution.
Example usage:
import functools
class Test:
#functools.cached_property
def calc(self):
print("Calculating")
return 1
Test output:
In [2]: t = Test()
In [3]: t.calc
Calculating
Out[3]: 1
In [4]: t.calc
Out[4]: 1
I think you're better off with a custom descriptor, since this is exactly the kind of thing descriptors are for. Like so:
class CachedProperty:
def __init__(self, name, get_the_value):
self.name = name
self.get_the_value = get_the_value
def __get__(self, obj, typ):
name = self.name
while True:
try:
return getattr(obj, name)
except AttributeError:
get_the_value = self.get_the_value
try:
# get_the_value can be a string which is the name of an obj method
value = getattr(obj, get_the_value)()
except AttributeError:
# or it can be another external function
value = get_the_value()
setattr(obj, name, value)
continue
break
class Mine:
cached_property = CachedProperty("_cached_property ", get_cached_property_value)
# OR:
class Mine:
cached_property = CachedProperty("_cached_property", "get_cached_property_value")
def get_cached_property_value(self):
return "the_value"
EDIT: By the way, you don't even actually need a custom descriptor. You could just cache the value inside of your property function. E.g.:
#property
def test(self):
while True:
try:
return self._test
except AttributeError:
self._test = get_initial_value()
That's all there is to it.
However, many would consider this a bit of an abuse of property, and to be an unexpected way of using it. And unexpected usually means you should do it another, more explicit way. A custom CachedProperty descriptor is very explicit, so for that reason I would prefer it to the property approach, though it requires more code.
Django's version of this decorator does exactly what you describe and is simple, so besides my comment I'll just copy it here:
class cached_property(object):
"""
Decorator that converts a method with a single self argument into a
property cached on the instance.
Optional ``name`` argument allows you to make cached properties of other
methods. (e.g. url = cached_property(get_absolute_url, name='url') )
"""
def __init__(self, func, name=None):
self.func = func
self.__doc__ = getattr(func, '__doc__')
self.name = name or func.__name__
def __get__(self, instance, type=None):
if instance is None:
return self
res = instance.__dict__[self.name] = self.func(instance)
return res
(source).
As you can see, it uses func.name to determine the name of the function (no need to fiddle with inspect.stack) and it replaces the method with its result by mutating instance.__dict__. So subsequent "calls" are just an attribute lookup and there is no need for any caches, et cetera.
You can use something like this:
def cached(timeout=None):
def decorator(func):
def wrapper(self, *args, **kwargs):
value = None
key = '_'.join([type(self).__name__, str(self.id) if hasattr(self, 'id') else '', func.__name__])
if settings.CACHING_ENABLED:
value = cache.get(key)
if value is None:
value = func(self, *args, **kwargs)
if settings.CACHING_ENABLED:
# if timeout=None Django cache reads a global value from settings
cache.set(key, value, timeout=timeout)
return value
return wrapper
return decorator
When adding to the cache dictionary it generates keys based on the convention class_id_function in case you are caching entities and the property could possibly return a different value for each one.
It also checks a settings key CACHING_ENABLED in case you want to turn it off temporarily when doing benchmarks.
But it does not encapsulate the standard property decorator so you should still call it like a function, or you can use it like this (why restrict it to properties only):
#cached
#property
def total_sales(self):
# Some calculations here...
pass
Also it may be worth noting that in case you are caching a result from lazy foreign key relationships, there are times depending on your data where it would be faster to simply run an aggregate function when doing your select query and fetching everything at once, than visiting the cache for every record in your result-set. So use some tool like django-debug-toolbar for your framework to compare what performs best in your scenario.
#functools.lru_cache()
def func(....):
....
Reference: #functools.lru_cache() | Python
Have u tried djangos built in:
from django.utils.functional import cached_property
please don't use lru_cache as suggested by multiple people as it opens up a host of possible memory leak issues

How can I use method decorators before the constructor is called?

I have some code that's allows you to define what happens on christmas without knowing anything about the underlying implementation or how the method is called e.g.,
# main.py
import lib.person
person = lib.person.Person()
#person.onchristmas()
def christmas():
print "It's Christmas"
The implementation of the class is something like this:
# lib.person.py
class Person():
def onchristmas(self):
def decorator(f):
self.christmas_handler = f
return f
return decorator
def is_christmas(self):
# called from somewhere else:
self.christmas_handler()
The problem is that I can't import main.py without constructing a person. Similarly I can't move the constructor to be:
person = None
def init():
person = lib.person.Person()
return person
because then person would be NoneType and the decorators won't work. What the correct way to factor this code so that:
I can still use the decorator to let people implement their own christmas action without editing lib.person.py
I can construct person explicitly with init() instead of it happening on import.
EDIT FURTHER DETAIL FROM COMMENTS:
In actual fact there are many different things that can happen not just christmas, and there isn't just one handler per action there might be a number and all must execute:
So:
def onchristmas(self):
def decorator(f):
self.christmas_handler.append(f)
return f
return decorator
def is_christmas(self):
# called from somewhere else:
for h in self.christmas_handler:
h()
Usage: I would like others to be able to specify the behavior of one or more actions without having to know how/when they will be called and ideally further down the line to be able to de-register handlers.
Also I should mention that there will only ever be on instance of Person, I'm not too familiar with static methods and singletons in Python though. Thanks for the help!
The issue is the mixing of your decorator and your state. Why not break your decorator out of the class and allow your users to supply their own function. Such that the only state the decorator relies on is that which is supplied to it. Something similiar to this:
def christmas_config(user_defined_func):
def inner_config(func):
def f(*args, **kwargs):
print 'Hey Yo'
return user_defined_func(func, *args, **kwargs)
return f
return inner_config
def test(func, *args, **kwargs):
print 'This is hairy'
return func(*args, **kwargs)
#christmas_config(test)
def my_func():
print 'test'
my_func()

Python decorator makes function forget that it belongs to a class

I am trying to write a decorator to do logging:
def logger(myFunc):
def new(*args, **keyargs):
print 'Entering %s.%s' % (myFunc.im_class.__name__, myFunc.__name__)
return myFunc(*args, **keyargs)
return new
class C(object):
#logger
def f():
pass
C().f()
I would like this to print:
Entering C.f
but instead I get this error message:
AttributeError: 'function' object has no attribute 'im_class'
Presumably this is something to do with the scope of 'myFunc' inside 'logger', but I've no idea what.
Claudiu's answer is correct, but you can also cheat by getting the class name off of the self argument. This will give misleading log statements in cases of inheritance, but will tell you the class of the object whose method is being called. For example:
from functools import wraps # use this to preserve function signatures and docstrings
def logger(func):
#wraps(func)
def with_logging(*args, **kwargs):
print "Entering %s.%s" % (args[0].__class__.__name__, func.__name__)
return func(*args, **kwargs)
return with_logging
class C(object):
#logger
def f(self):
pass
C().f()
As I said, this won't work properly in cases where you've inherited a function from a parent class; in this case you might say
class B(C):
pass
b = B()
b.f()
and get the message Entering B.f where you actually want to get the message Entering C.f since that's the correct class. On the other hand, this might be acceptable, in which case I'd recommend this approach over Claudiu's suggestion.
Functions only become methods at runtime. That is, when you get C.f you get a bound function (and C.f.im_class is C). At the time your function is defined it is just a plain function, it is not bound to any class. This unbound and disassociated function is what is decorated by logger.
self.__class__.__name__ will give you the name of the class, but you can also use descriptors to accomplish this in a somewhat more general way. This pattern is described in a blog post on Decorators and Descriptors, and an implementation of your logger decorator in particular would look like:
class logger(object):
def __init__(self, func):
self.func = func
def __get__(self, obj, type=None):
return self.__class__(self.func.__get__(obj, type))
def __call__(self, *args, **kw):
print 'Entering %s' % self.func
return self.func(*args, **kw)
class C(object):
#logger
def f(self, x, y):
return x+y
C().f(1, 2)
# => Entering <bound method C.f of <__main__.C object at 0x...>>
Obviously the output can be improved (by using, for example, getattr(self.func, 'im_class', None)), but this general pattern will work for both methods and functions. However it will not work for old-style classes (but just don't use those ;)
Ideas proposed here are excellent, but have some disadvantages:
inspect.getouterframes and args[0].__class__.__name__ are not suitable for plain functions and static-methods.
__get__ must be in a class, that is rejected by #wraps.
#wraps itself should be hiding traces better.
So, I've combined some ideas from this page, links, docs and my own head,
and finally found a solution, that lacks all three disadvantages above.
As a result, method_decorator:
Knows the class the decorated method is bound to.
Hides decorator traces by answering to system attributes more correctly than functools.wraps() does.
Is covered with unit-tests for bound an unbound instance-methods, class-methods, static-methods, and plain functions.
Usage:
pip install method_decorator
from method_decorator import method_decorator
class my_decorator(method_decorator):
# ...
See full unit-tests for usage details.
And here is just the code of the method_decorator class:
class method_decorator(object):
def __init__(self, func, obj=None, cls=None, method_type='function'):
# These defaults are OK for plain functions
# and will be changed by __get__() for methods once a method is dot-referenced.
self.func, self.obj, self.cls, self.method_type = func, obj, cls, method_type
def __get__(self, obj=None, cls=None):
# It is executed when decorated func is referenced as a method: cls.func or obj.func.
if self.obj == obj and self.cls == cls:
return self # Use the same instance that is already processed by previous call to this __get__().
method_type = (
'staticmethod' if isinstance(self.func, staticmethod) else
'classmethod' if isinstance(self.func, classmethod) else
'instancemethod'
# No branch for plain function - correct method_type for it is already set in __init__() defaults.
)
return object.__getattribute__(self, '__class__')( # Use specialized method_decorator (or descendant) instance, don't change current instance attributes - it leads to conflicts.
self.func.__get__(obj, cls), obj, cls, method_type) # Use bound or unbound method with this underlying func.
def __call__(self, *args, **kwargs):
return self.func(*args, **kwargs)
def __getattribute__(self, attr_name): # Hiding traces of decoration.
if attr_name in ('__init__', '__get__', '__call__', '__getattribute__', 'func', 'obj', 'cls', 'method_type'): # Our known names. '__class__' is not included because is used only with explicit object.__getattribute__().
return object.__getattribute__(self, attr_name) # Stopping recursion.
# All other attr_names, including auto-defined by system in self, are searched in decorated self.func, e.g.: __module__, __class__, __name__, __doc__, im_*, func_*, etc.
return getattr(self.func, attr_name) # Raises correct AttributeError if name is not found in decorated self.func.
def __repr__(self): # Special case: __repr__ ignores __getattribute__.
return self.func.__repr__()
It seems that while the class is being created, Python creates regular function objects. They only get turned into unbound method objects afterwards. Knowing that, this is the only way I could find to do what you want:
def logger(myFunc):
def new(*args, **keyargs):
print 'Entering %s.%s' % (myFunc.im_class.__name__, myFunc.__name__)
return myFunc(*args, **keyargs)
return new
class C(object):
def f(self):
pass
C.f = logger(C.f)
C().f()
This outputs the desired result.
If you want to wrap all the methods in a class, then you probably want to create a wrapClass function, which you could then use like this:
C = wrapClass(C)
Class functions should always take self as their first argument, so you can use that instead of im_class.
def logger(myFunc):
def new(self, *args, **keyargs):
print 'Entering %s.%s' % (self.__class__.__name__, myFunc.__name__)
return myFunc(self, *args, **keyargs)
return new
class C(object):
#logger
def f(self):
pass
C().f()
at first I wanted to use self.__name__ but that doesn't work because the instance has no name. you must use self.__class__.__name__ to get the name of the class.
I found another solution to a very similar problem using the inspect library. When the decorator is called, even though the function is not yet bound to the class, you can inspect the stack and discover which class is calling the decorator. You can at least get the string name of the class, if that is all you need (probably can't reference it yet since it is being created). Then you do not need to call anything after the class has been created.
import inspect
def logger(myFunc):
classname = inspect.getouterframes(inspect.currentframe())[1][3]
def new(*args, **keyargs):
print 'Entering %s.%s' % (classname, myFunc.__name__)
return myFunc(*args, **keyargs)
return new
class C(object):
#logger
def f(self):
pass
C().f()
While this is not necessarily better than the others, it is the only way I can figure out to discover the class name of the future method during the call to the decorator. Make note of not keeping references to frames around in the inspect library documentation.
As shown in Asa Ayers' answer, you don't need to access the class object. It may be worth to know that since Python 3.3, you can also use __qualname__, which gives you the fully qualified name:
>>> def logger(myFunc):
... def new(*args, **keyargs):
... print('Entering %s' % myFunc.__qualname__)
... return myFunc(*args, **keyargs)
...
... return new
...
>>> class C(object):
... #logger
... def f(self):
... pass
...
>>> C().f()
Entering C.f
This has the added advantage of working also in the case of nested classes, as shown in this example taken from PEP 3155:
>>> class C:
... def f(): pass
... class D:
... def g(): pass
...
>>> C.__qualname__
'C'
>>> C.f.__qualname__
'C.f'
>>> C.D.__qualname__
'C.D'
>>> C.D.g.__qualname__
'C.D.g'
Notice also that in Python 3 the im_class attribute is gone, therefore if you really wish to access the class in a decorator, you need an other method. The approach I currently use involves object.__set_name__ and is detailed in my answer to "Can a Python decorator of an instance method access the class?"
You can also use new.instancemethod() to create an instance method (either bound or unbound) from a function.
Instead of injecting decorating code at definition time, when function doesn't know it's class, delay running this code until function is accessed/called. Descriptor object facilitates injecting own code late, at access/call time:
class decorated(object):
def __init__(self, func, type_=None):
self.func = func
self.type = type_
def __get__(self, obj, type_=None):
return self.__class__(self.func.__get__(obj, type_), type_)
def __call__(self, *args, **kwargs):
name = '%s.%s' % (self.type.__name__, self.func.__name__)
print('called %s with args=%s kwargs=%s' % (name, args, kwargs))
return self.func(*args, **kwargs)
class Foo(object):
#decorated
def foo(self, a, b):
pass
Now we can inspect class both at access time (__get__) and at call time (__call__). This mechanism works for plain methods as well as static|class methods:
>>> Foo().foo(1, b=2)
called Foo.foo with args=(1,) kwargs={'b': 2}
Full example at: https://github.com/aurzenligl/study/blob/master/python-robotwrap/Example4.py

Categories

Resources