I'm trying to write a decorator that can be added to instance methods and non-instance methods alike. I have reduced my code to a minimum example that demonstrates my point
def call(fn):
def _impl(*args, **kwargs):
return fn(*args, **kwargs)
fn.call = _impl
return fn
class Foo(object):
#call
def bar(self):
pass
Foo().bar.call()
This gives the beautiful error
Traceback (most recent call last):
File "/tmp/511749370/main.py", line 14, in <module>
Foo().bar.call()
File "/tmp/511749370/main.py", line 3, in _impl
return fn(*args, **kwargs)
TypeError: bar() missing 1 required positional argument: 'self'
Is it possible to do something like this without resorting to
Foo.bar.call(Foo())
Or is that my only option?
You have to implement your decorator as a class and implement the descriptor protocol. Basically, the descriptor __get__ function is what's responsible for creating bound methods. By overriding this function, you get access to self and can create a bound copy of the call function.
The following implementation does exactly that. The Foo instance is saved in the __self__ attribute. The decorator has a __call__ method which calls the decorated function, and a call method which does the same thing.
import inspect
import functools
from copy import copy
class call:
def __init__(self, func):
self.func = func
self.__self__ = None # "__self__" is also used by bound methods
def __call__(self, *args, **kwargs):
# if bound to on object, pass it as the first argument
if self.__self__ is not None:
args = (self.__self__,) + args
return self.func(*args, **kwargs)
def call(self, *args, **kwargs):
self(*args, **kwargs)
def __get__(self, obj, cls):
if obj is None:
return self
# create a bound copy of the decorator
bound = copy(self)
bound.__self__ = obj
# update __doc__ and similar attributes
functools.wraps(bound.func)(bound)
bound.__signature__ = inspect.signature(bound.func)
# add the bound instance to the object's dict so that
# __get__ won't be called a 2nd time
setattr(obj, self.func.__name__, bound)
return bound
Test:
class Foo(object):
#call
def bar(self):
print('bar')
#call
def foo():
print('foo')
Foo().bar.call() # output: bar
foo() # output: foo
Related
This question already has answers here:
How can I decorate an instance method with a decorator class?
(2 answers)
Closed 2 years ago.
I thought the following would work as a decorator
class D:
def __init__(self, func):
self.func = func
def __call__(self, *args, **kwargs):
return self.func(*args, **kwargs)
class A:
#D
def f(self, x):
pass
a=A()
a.f(1)
but I get TypeError: f() missing 1 required positional argument: 'x'
What's going on and is there a way a can use a class as a decorator like this?
The thing is that besides the decorator mechanism, there is the mechanism that Python uses so that functions inside class bodies behave as instance methods: it is the "descriptor protocol". That is actually simple: all function objects have a __get__ method (but not __set__ or __del__) method, which make of them "non data descriptors". When Python retrieves the attribute from an instance, __get__ is called with the instance as a parameter - the __get__ method them have to return a callable that will work as the method, and has to know which was the instance called:
# example only - DO NOT DO THIS but for learning purposes,
# due to concurrency problems:
class D:
def __init__(self, func):
self.func = func
def __call__(self, *args, **kwargs):
return self.func(self.instance, *args, **kwargs)
def __get__(self, instance, owner):
self.instance = instance
return self
class A:
#D
def f(self, x):
print(self, x)
a=A()
a.f(1)
This will print "<main.A object at 0x...> 1"
However, as it is easily perceivable this only allows the decorated method to be called in a single instance at once - even non parallel code that owns more than an instance of "A" could have the method called with the wrong instance in mind. That is, this sequence:
In [127]: a1 = A()
In [128]: a2 = A()
In [129]: f1 = a1.f
In [130]: f2 = a2.f
In [131]: f1()
will end up calling "a2.f()" not "a1.f()"
To avoid this, what you have to return is a callable from __get__ that won't need to retrieve the instance as a class attribute. One way to do that is to create a partial callable and include that - however, note that since this is a necessary step, there is no need for the decorator class itself to have the "run wrapper + original code" function in the __call__ method - it could have any name:
from functools import partial
class D:
def __init__(self, func):
self.func = func
def __call__(self, *args, _instance=None, **kwargs):
if _instance:
return self.func(_instance, *args, **kwargs)
else:
return self.func(*args, **kwargs)
def __get__(self, instance, owner):
return partial(self.__call__, _instance=instance)
class A:
#D
def f(self, x):
print(self, x)
a=A()
a.f(1)
This question already has answers here:
Decorating class methods - how to pass the instance to the decorator?
(3 answers)
Closed 2 years ago.
NOTE:
I've got a related question here:
How to access variables from a Class Decorator from within the method it's applied on?
I'm planning to write a fairly complicated decorator. Therefore, the decorator itself should be a class of its own. I know this is possible in Python (Python 3.8):
import functools
class MyDecoratorClass:
def __init__(self, func):
functools.update_wrapper(self, func)
self.func = func
def __call__(self, *args, **kwargs):
# do stuff before
retval = self.func(*args, **kwargs)
# do stuff after
return retval
#MyDecoratorClass
def foo():
print("foo")
Now my problem starts when I try to apply the decorator on a method instead of just a function - especially if it's a method from another class. Let me show you what I've tried:
1. Trial one: identity loss
The decorator MyDecoratorClass below doesn't (or shouldn't) do anything. It's just boilerplate code, ready to be put to use later on. The method foo() from class Foobar prints the object it is called on:
import functools
class MyDecoratorClass:
def __init__(self, method):
functools.update_wrapper(self, method)
self.method = method
def __call__(self, *args, **kwargs):
# do stuff before
retval = self.method(self, *args, **kwargs)
# do stuff after
return retval
class Foobar:
def __init__(self):
# initialize stuff
pass
#MyDecoratorClass
def foo(self):
print(f"foo() called on object {self}")
return
Now what you observe here is that the self in the foo() method gets swapped. It's no longer a Foobar() instance, but a MyDecoratorClass() instance instead:
>>> foobar = Foobar()
>>> foobar.foo()
foo() called from object <__main__.MyDecoratorClass object at 0x000002DAE0B77A60>
In other words, the method foo() loses its original identity. That brings us to the next trial.
2. Trial two: keep identity, but crash
I attempt to preserve the original identity of the foo() method:
import functools
class MyDecoratorClass:
def __init__(self, method):
functools.update_wrapper(self, method)
self.method = method
def __call__(self, *args, **kwargs):
# do stuff before
retval = self.method(self.method.__self__, *args, **kwargs)
# do stuff after
return retval
class Foobar:
def __init__(self):
# initialize stuff
pass
#MyDecoratorClass
def foo(self):
print(f"foo() called on object {self}")
return
Now let's test:
>>> foobar = Foobar()
>>> foobar.foo()
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "<stdin>", line 7, in __call__
AttributeError: 'function' object has no attribute '__self__'
Yikes!
EDIT
Thank you #AlexHall and #juanpa.arrivillaga for your solutions. They both work. However, there is a subtle difference between them.
Let's first take a look at this one:
def __get__(self, obj, objtype) -> object:
temp = type(self)(self.method.__get__(obj, objtype))
print(temp)
return temp
I've introduced a temporary variable, just to print what __get__() returns. Each time you access the method foo(), this __get__() function returns a new MyDecoratorClass() instance:
>>> f = Foobar()
>>> func1 = f.foo
>>> func2 = f.foo
>>> print(func1 == func2)
>>> print(func1 is func2)
<__main__.MyDecoratorClass object at 0x000001B7E974D3A0>
<__main__.MyDecoratorClass object at 0x000001B7E96C5520>
False
False
The second approach (from #juanpa.arrivillaga) is different:
def __get__(self, obj, objtype) -> object:
temp = types.MethodType(self, obj)
print(temp)
return temp
The output:
>>> f = Foobar()
>>> func1 = f.foo
>>> func2 = f.foo
>>> print(func1 == func2)
>>> print(func1 is func2)
<bound method Foobar.foo of <__main__.Foobar object at 0x000002824BBEF4C0>>
<bound method Foobar.foo of <__main__.Foobar object at 0x000002824BBEF4C0>>
True
False
There is a subtle difference, but I'm not sure why.
Functions are descriptors and that's what allows them to auto-bind self. The easiest way to deal with this is to implement decorators using functions so that this is handled for you. Otherwise you need to explicitly invoke the descriptor. Here's one way:
import functools
class MyDecoratorClass:
def __init__(self, method):
functools.update_wrapper(self, method)
self.method = method
def __get__(self, instance, owner):
return type(self)(self.method.__get__(instance, owner))
def __call__(self, *args, **kwargs):
# do stuff before
retval = self.method(*args, **kwargs)
# do stuff after
return retval
class Foobar:
def __init__(self):
# initialize stuff
pass
#MyDecoratorClass
def foo(self, x, y):
print(f"{[self, x, y]=}")
#MyDecoratorClass
def bar(spam):
print(f"{[spam]=}")
Foobar().foo(1, 2)
bar(3)
Here the __get__ method creates a new instance of MyDecoratorClass with the bound method (previously self.method was just a function since no instance existed yet). Also note that __call__ just calls self.method(*args, **kwargs) - if self.method is now a bound method, the self of FooBar is already implied.
You can implement the descriptor protocol, an example of how functions do it (but in pure python) is available in the Descriptor HOWTO, translated to your case:
import functools
import types
class MyDecoratorClass:
def __init__(self, func):
functools.update_wrapper(self, func)
self.func = func
def __call__(self, *args, **kwargs):
# do stuff before
retval = self.func(*args, **kwargs)
# do stuff after
return retval
def __get__(self, obj, objtype=None):
if obj is None:
return self
return types.MethodType(self, obj)
Note, return types.MethodType(self, obj) is essentially equivalent to
return lambda *args, **kwargs : self.func(obj, *args, **kwargs)
Note from Kristof
Could it be that you meant this:
return types.MethodType(self, obj) is essentially equivalent to
return lambda *args, **kwargs : self(obj, *args, **kwargs)
Note that I replaced self.func(..) with self(..). I tried, and only this way I can ensure that the statements at # do stuff before and # do stuff after actually run.
I am trying to implement a metaclass that initializes class variables when a first its instance is being created. I want to keep a new magic method __load__ that should be called as a classmethod (like __new__). So I implemented it like this:
class StaticLoad(type):
__loaded_classes = set()
def __call__(cls, *args, **kwargs):
if cls not in cls.__loaded_classes:
if hasattr(cls, '__load__'):
cls.__load__()
cls.__loaded_classes.add(cls)
return super().__call__(*args, **kwargs)
class BaseClass(metaclass=StaticLoad):
s = 0
class MyClass(BaseClass):
#classmethod
def __load__(cls):
print("Loading", cls.__name__, "...")
cls.s += 1
obj1 = MyClass()
obj2 = MyClass()
print(MyClass.s)
It works fine and gives the correct result:
Loading MyClass ...
1
Now I want to implement the method __load__ as a classmethod by default like __new__ (without the need to type #classmethod above each time). I tried this:
class StaticLoad(type):
__loaded_classes = set()
def __call__(cls, *args, **kwargs):
if cls not in cls.__loaded_classes:
if hasattr(cls, '__load__'):
# I try to apply classmethod routine to make
# cls.__load__ a classmethod
classmethod(cls.__load__)()
cls.__loaded_classes.add(cls)
return super().__call__(*args, **kwargs)
class BaseClass(metaclass=StaticLoad):
s = 0
class MyClass(BaseClass):
# #classmethod line was deleted
def __load__(cls):
print("Loading", cls.__name__, "...")
cls.s += 1
obj1 = MyClass()
obj2 = MyClass()
print(MyClass.s)
I got the error:
Traceback (most recent call last):
File "example.py", line 22, in <module>
obj1 = MyClass()
File "example.py", line 7, in __call__
classmethod(cls.__load__)()
TypeError: 'classmethod' object is not callable
It looks like classmethod routine is correctly available only inside a class definition.
How should I improve my metaclass to make it work fine? I would like to keep the content of classes BaseClass and MyClass as I wrote above, placing all magic into StaticLoad.
With the help of #AnttiHaapala the solution is simple. Instead of calling
classmethod(cls.__load__)()
I had to call
cls.__load__(cls)
If you want to perform transforms on the certain methods and attributes of a class creation, you do that on the metaclass' __new__ function.
Since yu already have a metaclass, all you have to do is to implement its __new__ method to convert any __load__ methods in a classmethod:
class StaticLoad(type):
__loaded_classes = set()
def __new__(metacls, name, bases, namespace):
if "__load__" in namespace and not isinstance(namespace["__load__"], classmethod):
namespace["__load__"] = classmethod(namespace["load"])
return super().__new__(metacls, name, bases, namespace)
def __call__(cls, *args, **kwargs):
if cls not in cls.__class__.__loaded_classes:
if hasattr(cls, '__load__'):
cls.__load__()
type(cls).__loaded_classes.add(cls)
return super().__call__(*args, **kwargs)
(the other change I made was to make explict that "__loaded_classes" should be accessed on the metaclass, not on the class itself).
This is a question similar to How to call a method implicitly after every method call? but for python
Say I have a crawler class with some attributes (e.g. self.db) with a crawl_1(self, *args, **kwargs) and another one save_to_db(self, *args, **kwargs) which saves the crawling results to a database (self.db).
I want somehow to have save_to_db run after every crawl_1, crawl_2, etc. call. I've tried making this as a "global" util decorator but I don't like the result since it involves passing around self as an argument.
If you want to implicitly run a method after all of your crawl_* methods, the simplest solution may be to set up a metaclass that will programatically wrap the methods for you. Start with this, a simple wrapper function:
import functools
def wrapit(func):
#functools.wraps(func)
def _(self, *args, **kwargs):
func(self, *args, **kwargs)
self.save_to_db()
return _
That's a basic decorator that wraps func, calling
self.save_to_db() after calling func. Now, we set up a metaclass
that will programatically apply this to specific methods:
class Wrapper (type):
def __new__(mcls, name, bases, nmspc):
for attrname, attrval in nmspc.items():
if callable(attrval) and attrname.startswith('crawl_'):
nmspc[attrname] = wrapit(attrval)
return super(Wrapper, mcls).__new__(mcls, name, bases, nmspc)
This will iterate over the methods in the wrapped class, looking for
method names that start with crawl_ and wrapping them with our
decorator function.
Finally, the wrapped class itself, which declares Wrapper as a
metaclass:
class Wrapped (object):
__metaclass__ = Wrapper
def crawl_1(self):
print 'this is crawl 1'
def crawl_2(self):
print 'this is crawl 2'
def this_is_not_wrapped(self):
print 'this is not wrapped'
def save_to_db(self):
print 'saving to database'
Given the above, we get the following behavior:
>>> W = Wrapped()
>>> W.crawl_1()
this is crawl 1
saving to database
>>> W.crawl_2()
this is crawl 2
saving to database
>>> W.this_is_not_wrapped()
this is not wrapped
>>>
You can see the our save_to_database method is being called after
each of crawl_1 and crawl_2 (but not after this_is_not_wrapped).
The above works in Python 2. In Python 3, replase this:
class Wrapped (object):
__metaclass__ = Wrapper
With:
class Wrapped (object, metaclass=Wrapper):
Something like this:
from functools import wraps
def my_decorator(f):
#wraps(f)
def wrapper(*args, **kwargs):
print 'Calling decorated function'
res = f(*args, **kwargs)
obj = args[0] if len(args) > 0 else None
if obj and hasattr(obj, "bar"):
obj.bar()
return wrapper
class MyClass(object):
#my_decorator
def foo(self, *args, **kwargs):
print "Calling foo"
def bar(self, *args, **kwargs):
print "Calling bar"
#my_decorator
def example():
print 'Called example function'
example()
obj = MyClass()
obj.foo()
It will give you the following output:
Calling decorated function
Called example function
Calling decorated function
Calling foo
Calling bar
A decorator in Python looks like this, it's a method taking a single method as argument and returning another wrapper method that shall be called instead of the decorated one. Usually the wrapper "wraps" the decorated method, i.e. calls it before/after performing some other actions.
Example:
# define a decorator method:
def save_db_decorator(fn):
# The wrapper method which will get called instead of the decorated method:
def wrapper(self, *args, **kwargs):
fn(self, *args, **kwargs) # call the decorated method
MyTest.save_to_db(self, *args, **kwargs) # call the additional method
return wrapper # return the wrapper method
Now learn how to use it:
class MyTest:
# The additional method called by the decorator:
def save_to_db(self, *args, **kwargs):
print("Saver")
# The decorated methods:
#save_db_decorator
def crawl_1(self, *args, **kwargs):
print("Crawler 1")
#save_db_decorator
def crawl_2(self, *args, **kwargs):
print("Crawler 2")
# Calling the decorated methods:
my_test = MyTest()
print("Starting Crawler 1")
my_test.crawl_1()
print("Starting Crawler 1")
my_test.crawl_2()
This would output the following:
Starting Crawler 1
Crawler 1
Saver
Starting Crawler 1
Crawler 2
Saver
See this code running on ideone.com
When defining a decorator using a class, how do I automatically transfer over__name__, __module__ and __doc__? Normally, I would use the #wraps decorator from functools. Here's what I did instead for a class (this is not entirely my code):
class memoized:
"""Decorator that caches a function's return value each time it is called.
If called later with the same arguments, the cached value is returned, and
not re-evaluated.
"""
def __init__(self, func):
super().__init__()
self.func = func
self.cache = {}
def __call__(self, *args):
try:
return self.cache[args]
except KeyError:
value = self.func(*args)
self.cache[args] = value
return value
except TypeError:
# uncacheable -- for instance, passing a list as an argument.
# Better to not cache than to blow up entirely.
return self.func(*args)
def __repr__(self):
return self.func.__repr__()
def __get__(self, obj, objtype):
return functools.partial(self.__call__, obj)
__doc__ = property(lambda self:self.func.__doc__)
__module__ = property(lambda self:self.func.__module__)
__name__ = property(lambda self:self.func.__name__)
Is there a standard decorator to automate the creation of name module and doc? Also, to automate the get method (I assume that's for creating bound methods?) Are there any missing methods?
Everyone seems to have missed the obvious solution. Using functools.update_wrapper:
>>> import functools
>>> class memoized(object):
"""Decorator that caches a function's return value each time it is called.
If called later with the same arguments, the cached value is returned, and
not re-evaluated.
"""
def __init__(self, func):
self.func = func
self.cache = {}
functools.update_wrapper(self, func) ## TA-DA! ##
def __call__(self, *args):
pass # Not needed for this demo.
>>> #memoized
def fibonacci(n):
"""fibonacci docstring"""
pass # Not needed for this demo.
>>> fibonacci
<__main__.memoized object at 0x0156DE30>
>>> fibonacci.__name__
'fibonacci'
>>> fibonacci.__doc__
'fibonacci docstring'
I'm not aware of such things in stdlib, but we can create our own if we need to.
Something like this can work :
from functools import WRAPPER_ASSIGNMENTS
def class_wraps(cls):
"""Update a wrapper class `cls` to look like the wrapped."""
class Wrapper(cls):
"""New wrapper that will extend the wrapper `cls` to make it look like `wrapped`.
wrapped: Original function or class that is beign decorated.
assigned: A list of attribute to assign to the the wrapper, by default they are:
['__doc__', '__name__', '__module__', '__annotations__'].
"""
def __init__(self, wrapped, assigned=WRAPPER_ASSIGNMENTS):
self.__wrapped = wrapped
for attr in assigned:
setattr(self, attr, getattr(wrapped, attr))
super().__init__(wrapped)
def __repr__(self):
return repr(self.__wrapped)
return Wrapper
Usage:
#class_wraps
class memoized:
"""Decorator that caches a function's return value each time it is called.
If called later with the same arguments, the cached value is returned, and
not re-evaluated.
"""
def __init__(self, func):
super().__init__()
self.func = func
self.cache = {}
def __call__(self, *args):
try:
return self.cache[args]
except KeyError:
value = self.func(*args)
self.cache[args] = value
return value
except TypeError:
# uncacheable -- for instance, passing a list as an argument.
# Better to not cache than to blow up entirely.
return self.func(*args)
def __get__(self, obj, objtype):
return functools.partial(self.__call__, obj)
#memoized
def fibonacci(n):
"""fibonacci docstring"""
if n in (0, 1):
return n
return fibonacci(n-1) + fibonacci(n-2)
print(fibonacci)
print("__doc__: ", fibonacci.__doc__)
print("__name__: ", fibonacci.__name__)
Output:
<function fibonacci at 0x14627c0>
__doc__: fibonacci docstring
__name__: fibonacci
EDIT:
And if you are wondering why this wasn't included in the stdlib is because you can
wrap your class decorator in a function decorator and use functools.wraps like this:
def wrapper(f):
memoize = memoized(f)
#functools.wraps(f)
def helper(*args, **kws):
return memoize(*args, **kws)
return helper
#wrapper
def fibonacci(n):
"""fibonacci docstring"""
if n <= 1:
return n
return fibonacci(n-1) + fibonacci(n-2)
Turns out there's a straightforward solution using functools.wraps itself:
import functools
def dec(cls):
#functools.wraps(cls, updated=())
class D(cls):
decorated = 1
return D
#dec
class C:
"""doc"""
print(f'{C.__name__=} {C.__doc__=} {C.__wrapped__=}')
$ python3 t.py
C.__name__='C' C.__doc__='doc' C.__wrapped__=<class '__main__.C'>
Note that updated=() is needed to prevent an attempt to update the class's __dict__ (this output is without updated=()):
$ python t.py
Traceback (most recent call last):
File "t.py", line 26, in <module>
class C:
File "t.py", line 20, in dec
class D(cls):
File "/usr/lib/python3.8/functools.py", line 57, in update_wrapper
getattr(wrapper, attr).update(getattr(wrapped, attr, {}))
AttributeError: 'mappingproxy' object has no attribute 'update'
I needed something that would wrap both classes and functions and wrote this:
def wrap_is_timeout(base):
'''Adds `.is_timeout=True` attribute to objects returned by `base()`.
When `base` is class, it returns a subclass with same name and adds read-only property.
Otherwise, it returns a function that sets `.is_timeout` attribute on result of `base()` call.
Wrappers make best effort to be transparent.
'''
if inspect.isclass(base):
class wrapped(base):
is_timeout = property(lambda _: True)
for k in functools.WRAPPER_ASSIGNMENTS:
v = getattr(base, k, _MISSING)
if v is not _MISSING:
try:
setattr(wrapped, k, v)
except AttributeError:
pass
return wrapped
#functools.wraps(base)
def fun(*args, **kwargs):
ex = base(*args, **kwargs)
ex.is_timeout = True
return ex
return fun
All we really need to do is modify the behavior of the decorator so that it is "hygienic", i.e. it is attribute-preserving.
#!/usr/bin/python3
def hygienic(decorator):
def new_decorator(original):
wrapped = decorator(original)
wrapped.__name__ = original.__name__
wrapped.__doc__ = original.__doc__
wrapped.__module__ = original.__module__
return wrapped
return new_decorator
This is ALL you need. In general. It doesn't preserve the signature, but if you really want that you can use a library to do that. I also went ahead and rewrote the memoization code so that it works on keyword arguments as well. Also there was a bug where failure to convert it to a hashable tuple would make it not work in 100% of cases.
Demo of rewritten memoized decorator with #hygienic modifying its behavior. memoized is now a function that wraps the original class, though you can (like the other answer) write a wrapping class instead, or even better, something which detects if it's a class and if so wraps the __init__ method.
#hygienic
class memoized:
def __init__(self, func):
self.func = func
self.cache = {}
def __call__(self, *args, **kw):
try:
key = (tuple(args), frozenset(kw.items()))
if not key in self.cache:
self.cache[key] = self.func(*args,**kw)
return self.cache[key]
except TypeError:
# uncacheable -- for instance, passing a list as an argument.
# Better to not cache than to blow up entirely.
return self.func(*args,**kw)
In action:
#memoized
def f(a, b=5, *args, keyword=10):
"""Intact docstring!"""
print('f was called!')
return {'a':a, 'b':b, 'args':args, 'keyword':10}
x=f(0)
#OUTPUT: f was called!
print(x)
#OUTPUT: {'a': 0, 'b': 5, 'keyword': 10, 'args': ()}
y=f(0)
#NO OUTPUT - MEANS MEMOIZATION IS WORKING
print(y)
#OUTPUT: {'a': 0, 'b': 5, 'keyword': 10, 'args': ()}
print(f.__name__)
#OUTPUT: 'f'
print(f.__doc__)
#OUTPUT: 'Intact docstring!'
Another solution using inheritance:
import functools
import types
class CallableClassDecorator:
"""Base class that extracts attributes and assigns them to self.
By default the extracted attributes are:
['__doc__', '__name__', '__module__'].
"""
def __init__(self, wrapped, assigned=functools.WRAPPER_ASSIGNMENTS):
for attr in assigned:
setattr(self, attr, getattr(wrapped, attr))
super().__init__()
def __get__(self, obj, objtype):
return types.MethodType(self.__call__, obj)
And, usage:
class memoized(CallableClassDecorator):
"""Decorator that caches a function's return value each time it is called.
If called later with the same arguments, the cached value is returned, and
not re-evaluated.
"""
def __init__(self, function):
super().__init__(function)
self.function = function
self.cache = {}
def __call__(self, *args):
try:
return self.cache[args]
except KeyError:
value = self.function(*args)
self.cache[args] = value
return value
except TypeError:
# uncacheable -- for instance, passing a list as an argument.
# Better to not cache than to blow up entirely.
return self.function(*args)