Repeat methods for object of same class - python

I would like to create two objects such that methods for some of this objects will be repeated for other. Then I try to do this:
def backup(method):
def wrapper(self, *args, **kwargs):
method(self, *args, **kwargs)
getattr(self.another_tester, method.__name__)(*args, **kwargs)
return wrapper
class Tester():
def __init__(self, name):
self.name = name
def select_backup(self, tester):
self.another_tester = tester
#backup
def foo1(self, stop=False):
print("I am tester {}, do method foo1".format(self.name))
#backup
def foo2(self, stop=False):
print("I am tester {}, do method foo2".format(self.name))
#backup
def foo3(self, stop=False):
print("I am tester {}, do method foo3".format(self.name))
tester1 = Tester("A")
tester2 = Tester("B")
tester1.select_backup(tester2)
tester2.select_backup(tester1)
tester1.foo1()
tester2.foo2()
tester1.foo3()
I get RuntimeError: maximum recursion depth exceeded while calling a Python object for such code. class Tester have a lot of different methods (foo1, foo2, foo3, ...), and I would like to backup (repeat) each method. So I use decorator.
I can change decorator:
def backup(method):
def wrapper(self, *args, **kwargs):
method(self, *args, **kwargs)
try:
kwargs["stop"]
except KeyError:
getattr(self.another_tester, method.__name__)(stop=True, *args, **kwargs)
return wrapper
It's work, but I think there is more pythonic way to do it. Can anyone offer such way?

If your pair of objects are always instances of the same class (or at least, never have different implementations of the decorated method), you can change the decorator so that it directly calls the original method on the other instance, rather than getting the decorated version via getattr:
def backup(method):
def wrapper(self, *args, **kwargs):
method(self, *args, **kwargs)
method(self.another_tester, *args, **kwargs)
return wrapper
If you want to be able to support pairs of objects of different classes (and different method implementations), things need to be a bit more complicated. You can save a reference to the original undecorated method as an attribute on the wrapper function, then look it up when you need it:
def backup(method):
def wrapper(self, *args, **kwargs):
method(self, *args, **kwargs)
other_method = getattr(self.another_tester, method.__name__).orig_method
other_method(self.another_tester, *args, **kwargs)
wrapper.orig_method = method
return wrapper

Related

Custom property decorator that behaves like #property?

I'd like to create a custom Python decorator that will 1) check the value of a class attribute my_attribute before running the function, and 2) turn the class method into a property. I can achieve this as follows:
def my_decorator(f):
def wrapper(self, *args, **kwargs):
if self.my_attribute is None:
raise ValueError('No location set to model.')
return f(self, *args, **kwargs)
return wrapper
class MyClass:
my_attribute = None
#property
#my_decorator
def my_method():
return self. my_attribute
I'd like to know how I can edit the definition of my_decorator so that it makes the wrapped method a property. Essentially I'd like to avoid the use of #property and #my_decorator for each method, letting me write the class definition as
class MyClass:
my_attribute = None
#my_new_decorator
def my_method():
return self. my_attribute
I've looked up the declaration of the builtin #property decorator, but it's defined as a class and wasn't much help.
Any suggestions?
What if you changed your decorator definition to look like this?
def my_decorator(f):
#property
def wrapper(self, *args, **kwargs):
if self.my_attribute is None:
raise ValueError('No location set to model.')
return f(self, *args, **kwargs)
return wrapper
That should make the returned function behave like a property, and keep your custom error handling.
If one wants to keep/use also the original my_decorator, one could create a my_decorator_property as following:
def my_decorator_property(f):
#property
def wrapper(self, *args, **kwargs):
return my_decorator(f)(self, *args, **kwargs)
return wrapper

How to reference an object instance in callable decorator object with python?

Context:
I'd like to be able to decorate functions so that I can track their stats. Using this post as a reference I went about trying to make my own callable decorator objects.
Here is what I ended up with:
def Stats(fn):
Class StatsObject(object):
def __init__(self, fn):
self.fn = fn
self.stats = {}
def __call__(self, obj, *args, **kwargs):
self.stats['times_called'] = self.stats.get('times_called', 0) + 1
return self.fn(obj, *args, **kwargs)
function = StatsObject(fn)
def wrapper(self, *args **kwargs):
return function(self, *args, **kwargs)
return wrapper
Class MockClass(object):
#Stats
def mock_fn(self, *args, **kwargs):
# do things
Problem:
This actually calls the mock_fn function correctly but I don't have a reference to the stats object outside the wrapper function. i.e. I can't do:
mc = MockClass()
mc.mock_fn()
mc.mock_fn.stats
# HasNoAttribute Exception
Then I tried changing the following code recognizing that it was a scoping issue:
From:
function = StatsObject(fn)
def wrapper(self, *args **kwargs):
return function(self, *args, **kwargs)
return wrapper
To:
function = StatsObject(fn)
return function
But of course I lost the self reference (self becomes the StatsObject instance, obj becomes the first arg, and the MockClass object self reference gets lost).
So I understand why the first issue is happening, but not the second. Is there any way that I can pass the self reference of MockClass to the StatsObject __call__ function?
Functions can actually themselves have attributes in Python.
def Stats(fn):
class StatsObject(object):
def __init__(self, fn):
self.fn = fn
self.stats = {}
def __call__(self, obj, *args, **kwargs):
self.stats['times_called'] = self.stats.get('times_called', 0) + 1
return self.fn(obj, *args, **kwargs)
function = StatsObject(fn)
def wrapper(self, *args **kwargs):
return function(self, *args, **kwargs)
# KEY LINE BELOW: make the StatsObject available outside as "stats_fn"
wrapper.stats_fn = function
return wrapper
class MockClass(object):
#Stats
def mock_fn(self, *args, **kwargs):
# do things
The key line is assigning the StatsObject instance (which you've, perhaps misleadingly, locally named function) as an attribute of the function which you return from the decorator.
Once you do this, self.mock_fn.stats_fn.stats (not self.mock_fn()! The attribute is on the function, not its return value) will work within an instance of MockClass, and MockClass.mock_fn.stats_fn.stats will be available outside. The statistics will be global across all instances of MockClass (since the decorator is called once, not once per instance), which may or may not be what you want.

Conditionally disable caching decorator based on instance variable

I want to cache certain methods of a class - but only if a read_only flag is set on the instance of the class.
So, in the object below, I want the get() method to be cacheable, but only if self.readonly is True.
I can't conditionally use the decorator, because it's set when the class is defined, not when it's instantiated.
from beaker.cache import cache_regions, cache_region
cache_regions.update({
'long_term':{
'expire':86400,
'type':'dbm',
'data_dir':'/tmp',
'key_length': 32,
}
})
class Foo(object):
def __init__(self, read_only=True):
self.read_only = read_only
#cache_region('long_term')
def get(self, arg):
return arg + 1
You can use a decorator to call the right (cached or not) function checking the desired attribute:
def conditional(decorator):
def conditional_decorator(fn):
dec = decorator(fn)
def wrapper(self, *args, **kw):
if self.read_only:
return dec(self, *args, **kw)
return fn(self, *args, **kw)
return wrapper
return conditional_decorator
Use like this:
#conditional(cache_region('long_term'))
def get(self, arg):
return arg + 1

Python: shortcut for writing decorators which accept arguments?

Does the Python standard library have a shortcut for writing decorators which accept arguments?
For example, if I want to write a decorator like with_timeout(timeout):
#with_timeout(10.0)
def cook_eggs(eggs):
while not eggs.are_done():
eggs.cook()
I have to write something like:
def with_timeout(timeout):
_func = [None]
def with_timeout_helper(*args, **kwargs):
with Timeout(timeout):
return _func[0](*args, **kwargs)
def with_timeout_return(f):
return functools.wraps(f)(with_timeout_helper)
return with_timeout_return
But that's awfully verbose. Is there a shortcut which makes decorators which accept arguments easier to write?
Note: I realize that it's also possible to use three nested functions to implement decorators with arguments… But that feels just a bit suboptimal too.
For example, possibly something like a #decorator_with_arguments function:
#decorator_with_arguments
def timeout(f, timeout):
#functools.wraps(f)
def timeout_helper(*args, **kwargs):
with Timeout(timeout):
return f(*args, **kwargs)
return timeout_helper
I tend to write my decorators as classes to be honest
class TestWithArgs(object):
def __init__(self, *deco_args, **deco_kwargs):
self.deco_args = deco_args
self.deco_kwargs = deco_kwargs
def __call__(self, func):
def _wrap(self, *args, **kwargs):
print "Blah blah blah"
return func(*args, **kwargs)
return _wrap
Its nothing if not slightly clearer
I know you said it feels suboptimal but I still feel that using three nested models is the cleanest solution. The inner two functions are just the 'normal' way of defining a decorator for a function that takes arguments (see example in python's docs for #wraps). The outer one is really just a function that takes and argument and returns a decorator.
def with_timeout(timeout):
def decorator(f):
#wraps(f)
def wrapper(*args, **kwargs):
with Timeout(timeout):
return f(*args, **kwargs)
return wrapper
return decorator
Based on Jakob's suggestion, I've implemented a small Decorator class, which I feel does a fairly decent job:
class Decorator(object):
def __call__(self, f):
self.f = f
return functools.wraps(f)(lambda *a, **kw: self.wrap(*a, **kw))
def wrap(self, *args, **kwrags):
raise NotImplemented("Subclasses of Decorator must implement 'wrap'")
class with_timeout(Decorator):
def __init__(self, timeout):
self.timeout = timeout
def wrap(self, *args, **kwargs):
with Timeout(timeout):
return self.f(*args, **kwargs)
First, we can define a little meta-decorator:
def decorator_with_arguments(wrapper):
return lambda *args, **kwargs: lambda func: wrapper(func, *args, **kwargs)
That allows us to create decorators that accept arguments like so:
#decorator_with_arguments
def my_wrapper(func, *decorator_args, **decorator_kwargs):
def wrapped(*call_args, **call_kwargs):
print "from decorator:", decorator_args, decorator_kwargs
func(*call_args, **call_kwargs)
return wrapped
Which can then be used normally:
#my_wrapper(1, 2, 3)
def test(*args, **kwargs):
print "passed directly:", args, kwargs
test(4, 5, 6)
Adding functools.wraps decoration is left as an exercise :)
Another take, without using lambdas:
def decorator_with_arguments(f):
#functools.wraps(f)
def with_arguments_helper(*args, **kwargs):
def decorator(g):
return f(g, *args, **kwargs)
return decorator
return with_arguments_helper

Programmatically generate methods for a class

I have about 20 methods to redirect to a wrapper method that takes the original method, and the rest of the arguments:
class my_socket(parent):
def _in(self, method, *args, **kwargs):
# do funky stuff
def recv(self, *args, **kwargs):
return self._in(super().recv, *args, **kwargs)
def recv_into(self, *args, **kwargs):
return self._in(super().recv_into, *args, **kwargs)
# and so on...
How can I add more of these methods programmatically? This is about as far as I get before everything starts to look wrong:
for method in 'recv', 'recvfrom', 'recvfrom_into', 'recv_into', ...:
setattr(my_socket, method, ???)
Can I do this by assigning within the class definition, or something else that feels more natural?
class my_socket(parent):
def makes_recv_methods(name):
# wraps call to name
def recv_meh = makes_recv_methods('recv_meh')
I'd prefer to use __get__ and friends when possible over magic functions from types.
I'd do it by running some code to generate the methods from a list after the class is defined - you could put this into a decorator.
import functools
def wrap_method(cls, name):
# This unbound method will be pulled from the superclass.
wrapped = getattr(cls, name)
#functools.wraps(wrapped)
def wrapper(self, *args, **kwargs):
return self._in(wrapped.__get__(self, cls), *args, **kwargs)
return wrapper
def wrap_methods(cls):
for name in cls.WRAP_ATTRS:
setattr(cls, name, wrap_method(cls, name))
return cls
#wrap_methods
class my_socket(parent_class):
WRAP_ATTRS = ['recv', 'recvfrom'] # ... + more method names
def _in(self, method, *args, **kwargs):
# do funky stuff
wilberforce proposal works, but there is a simpler way using OOP only:
def wrap_method(wrapped):
#functools.wraps(wrapped)
def wrapper(self, *args, **kwargs):
return self._in(wrapped.__get__(self, cls), *args, **kwargs)
return wrapper
class Parent:
def _in(self, method, *args, **kwargs):
return method(*args, **kwargs)
#wrap_method
def recv(self, *args, **kwargs):
return # whatever
#wrap_method
def recv_into(self, *args, **kwargs):
return # whatever
class MySocket(Parent):
def _in(self, method, *args, **kwargs):
# do funky stuff
I'd like to expand on the accepted answer. I wanted to potentially have a very long list of decorator methods applied to a very long list of methods.
import functools
def wrap_method(cls, name, wrapper_method_name):
# This unbound method will be pulled from the superclass.
wrapped = getattr(cls, name, wrapper_method_name)
#functools.wraps(wrapped)
def wrapper(self, *args, **kwargs):
wrapper_method = getattr(self, wrapper_method_name)
return wrapper_method(wrapped.__get__(self, cls), *args, **kwargs)
return wrapper
def wrap_methods(cls):
for wrapper_method_name in cls.WRAPPER_METHOD_NAMES:
for name in cls.WRAPPED_METHODS:
setattr(cls, name, wrap_method(cls, name, wrapper_method_name))
return cls
And here is the class that wraps the original
#wrap_methods
class WrappedConnection(BaseConnection):
"""
This class adds some quality-of-life improvements to the BaseConnection class.
-WRAPPED_METHODS are wrapped by WRAPPER_METHOD_NAMES
-wrappers can be toggled on and off.
example:
connection = WrappedConnection(show_messages=True, log_errors=False, keep_authenticated=False)
default:
connection = WrappedConnection(show_messages=False, log_errors=True, keep_authenticated=True)
"""
WRAPPER_METHOD_NAMES = ['log_errors', 'keep_authenticated', 'show_messages']
WRAPPED_METHODS = ['a_method', 'b_method', 'c_method', 'd_method']
MESSAGE_OVERRIDE_MAP = {"a_method": "a_method_message_override_attribute",
"b_method": "b_method_message_override_attribute"}
def keep_authenticated(self, method, *args, **kwargs):
"""
If the session has expired, the session is re-authenticated. The incident is logged by the default logger.
This option can be turned off by setting keep_authenticated during initialization of a WrappedConnection object.
- connection = WrappedConnection(keep_authenticated=False) # why would you ever do this
:param method: (method) method to be wrapped
:param args: (args) passed args
:param kwargs: (kwargs) passed kwargs
:return: (method) method wrapped by #keep_authenticated
"""
response, expired_session = method(*args, **kwargs), None
if response["errors"] and self._keep_authenticated:
expired_session = list(filter(lambda x: 'expired session' in x, response["errors"]))
if expired_session:
self.__init__()
logging.info('Session has been re-authenticated.')
response = method(*args, **kwargs)
return response
def log_errors(self, method, *args, **kwargs):
"""
If there is an error the incident is logged. This option can be turned off by setting log_errors
during initialization of a WrappedConnection object.
- connection = WrappedConnection(log_errors=False)
:param method: (method) method to be wrapped
:param args: (args) passed args
:param kwargs: (kwargs) passed kwargs
:return: (method) method wrapped by #log_errors
"""
response = method(*args, **kwargs)
if response["errors"] and self._log_errors:
errors = response["errors"]
logging.error(errors)
return response
def show_messages(self, method, *args, **kwargs):
"""
Shows the xml that is sent during the request. This option can be turned on by setting show_messages during
initialization of a WrappedConnection object.
- connection = WrappedConnection(show_messages=True)
:param method: (method) method to be wrapped
:param args: (args) passed args
:param kwargs: (kwargs) passed kwargs
:return: (method) method wrapped by #show_messages
"""
response = method(*args, **kwargs)
if self._show_messages:
message_override_attr = WrappedConnection.MESSAGE_OVERRIDE_MAP.get(method.__name__)
if message_override_attr:
message_override = getattr(self, message_override_attr)
print(BeautifulSoup(message_override, "xml").prettify())
else:
self._show_message(method.__name__, *args, **kwargs)
return response
def __init__(self, *args, keep_authenticated=True, log_errors=True, show_messages=False, **kwargs):
super(WrappedConnection, self).__init__(*args, **kwargs)
self._keep_authenticated = keep_authenticated
self._log_errors = log_errors
self._show_messages = show_messages
You could use cog.
class MySocket(Parent):
"""[[[cog
import cog
l = ['in','out']
for item in l:
cog.outl("def _{0}(self, method, *args, **kwargs):".format(item))
]]]"""
#[[[end]]]
This has the added advantages of easily being updated, not touching your code outside of the end comment, and you can twiddle the generated code if necessary.
I've successfully used cog for generating boilerplate on another project, mixed in with the non generated code. It started out reading an input file of instructions into a dictionary. Then for each section of boilerplate it used that piece of the dictionary to know what to write.
I edit the instruction file in one spot, instead of twenty different places in the boilerplate.

Categories

Resources