I'm trying to add a wrapper to each method in a class by subclassing it, and reassigning them in the constructor of the new class, however i'm getting the same reference for all subclassed methods, how is this possible?
class A:
def foo(self):
print("foo")
def bar(self):
print("bar")
class B(A):
def __init__(self):
super().__init__()
methods = [
(method_name, getattr(self, method_name)) for method_name in dir(self) if not method_name.startswith('_')
]
for (method_name, f) in methods:
def wrapper(*args, **kwargs):
print('wrapped')
return f(*args, **kwargs)
setattr(self, method_name, wrapper)
b = B()
b.foo()
>>> wrapped
>>> foo
b.bar()
>>> wrapped
>>> foo
This is a spin on a common python gotcha, late binding closures.
What is happening is the last value of f is being bound to all your wrapped methods.
A common workaround is binding your changing variable to a keyword argument or using functools.partial.
For your example you can use it as a keyword argument.
class A:
def foo(self, baz='foo'):
print(baz)
def bar(self, baz='bar'):
print(baz)
class B(A):
def __init__(self):
super().__init__()
methods = [
(method_name, getattr(self, method_name)) for method_name in dir(self) if not method_name.startswith('_')
]
for (method_name, f) in methods:
# here you can use an implied private keyword argument
# to minimize the chance of conflicts
def wrapper(*args, _f=f, **kwargs):
print('wrapped')
return _f(*args, **kwargs)
setattr(self, method_name, wrapper)
b = B()
b.foo()
b.foo('baz')
b.foo(baz='baz')
b.bar()
I added a few more calls to your method to demonstrate that it still works with different forms of calls.
Related
I would like to override the method of an instance of class A with a method from class B but in a way so that all references to the old method of the instance (made before overriding the method) then 'link' to the new one. In code:
import types
class A:
def foo(self):
print('A')
class B:
def foo(self):
print('B')
class C:
def __init__(self, a):
self.func = a.foo
def do_something(self):
self.func()
a = A()
c = C(a)
method_name = 'foo' # it has to be dynamic
new_method = getattr(B, method_name)
setattr(a, method_name, types.MethodType(new_method, a))
c.do_something() # still prints A, I want it to print B now
I want c.func to hold the new method from class B after the attribute of a has been set (without doing anything with the c object).
Is there a way to set the attribute of the instance a so that all previously made references then refer to the new method?
Sorry if this question is kind of stupid, I am not that much into this.
You could do it like this, for example:
...
def retain(foo):
return lambda *args, **kwargs: getattr(foo.__self__, foo.__name__)(*args, **kwargs)
class C:
def __init__(self, a):
self.func = retain(a.foo)
...
Just adding to Alex's answer.
In my case, the described dynamic partly comes from a need for serialization and deserialization. To serialize certain method references, I used to use func.__name__.
However, c.func.__name__ would only return <lambda> using Alex's approach. I prevented this by creating a callable class that uses the retain function but stores the method's name separately which in my case is enough because it's just some specific references I need to serialize.
def retain(foo):
return lambda *args, **kwargs: getattr(foo.__self__, foo.__name__)(*args, **kwargs)
class M:
def __init__(self, method):
self.method_name = method.__name__
self.method = retain(method)
def __call__(self, *args, **kwargs):
self.method(*args, **kwargs)
class A:
def foo(self):
print('A')
class B:
def foo(self):
print('B')
class C:
def __init__(self, method):
self.func = M(method)
def do_something(self):
self.func()
a = A()
c = C(a.foo)
setattr(a, method_name, types.MethodType(getattr(B, 'foo'), a))
c.do_something() # now prints B
# when serializing
method_name = c.func.method_name
Is it possible to create a "constructor".. or rather "Initializer" to each function, instead of having to manually write it at the top of each function in class?
So, each time a function in a class is called, the other assigned function (unknown to caller) is always called first (called pre_check in below example).
An example using super(), but I then have to manually copy it inside each function.
class Helper():
def pre_check(self):
print("Helper fcn")
class Parent(Helper):
def __init__(self):
print("Initializer")
def foo(self):
super().pre_check() # <---- new code
# ... existing code here ...
def bar(self):
super().pre_check() # <---- new code
# ... existing code here ...
def many_more_functions(self):
super().pre_check() # <---- new code
# ... existing code here ...
m = Parent()
m.foo()
m.bar()
Note how __init__ in Parent is not supposed to run pre_check.
You can use a decorator for the class that will in turn decorate all public methods defined in the class:
def addhelper(helpmethod):
def deco(cls):
def decomethod(method):
def inner(self, *args, **kwargs):
helpmethod(self)
return method(self, *args, **kwargs)
# copy signature, doc and names from the original method
inner.__signature__ = inspect.signature(method)
inner.__doc__ = method.__doc__
inner.__name__ = method.__name__
inner.__qualname__ = method.__qualname__
return inner
# search all methods declared in cls with a name not starting with _
for name, meth in inspect.getmembers(
cls,lambda x: inspect.isfunction(x)
and not x.__name__.startswith('_')
and x.__qualname__.startswith(cls.__name__)):
# replace each method with its decoration
setattr(cls, name, decomethod(meth))
return cls
return deco
class Helper():
def pre_check(self):
print("Helper fcn")
#addhelper(Helper.pre_check)
class Parent(Helper):
def __init__(self):
print("Initializer")
def foo(self):
# super().pre_check() # <----
print('in foo')
def bar(self):
# super().pre_check() # <----
print('in bar')
def many_more_functions(self):
# super().pre_check() # <----
print('in many_more_functions')
We can now use it:
>>> p = Parent()
Initializer
>>> p.foo()
Helper fcn
in foo
>>> p.bar()
Helper fcn
in bar
>>> p.many_more_functions()
Helper fcn
in many_more_functions
Use __init_subclass__ to change subclasses as they are created. You can wrap the methods of subclasses:
class Helper():
def __init_subclass__(cls):
for field, value in cls.__dict__.items():
# add additional checks as desired, e.g. exclude __special_methods__
if inspect.isfunction(value) and not getattr(value, 'checked', False):
setattr(cls, field, cls._check(value)) # wrap method
#classmethod
def _check(cls, fcn):
"""Create a wrapper to inspect the arguments passed to methods"""
#functools.wraps(fcn)
def checked_fcn(*args, **kwargs):
print(fcn, "got", args, kwargs)
return fcn(*args, **kwargs)
return checked_fcn
class Parent(Helper):
def __init__(self):
print("Initializer")
def foo(self):
print("Foo")
Note that this will wrap all methods, including special methods such as __init__:
>>> Parent().foo()
<function Parent.__init__ at 0x1029b2378> got (<__main__.Parent object at 0x102c09080>,) {}
Initializer
<function Parent.foo at 0x1029b2158> got (<__main__.Parent object at 0x102c09080>,) {}
Foo
You can extend the check in __init_subclass__ with arbitrary rules to filter out functions. For example, field[:2] == field[-2:] == "__" excludes special methods.
You can use metaclass and define a decorator for each method in the instance of that metaclass
Code :
def decorate(f):
def do_something(self, a):
if (f(self, a) > 18) :
return ("Eligible to vote")
else :
return ("Not eligible to vote")
return do_something
class Meta(type):
def __new__(cls, name, bases, namespace, **kwds):
namespace = {k: v if k.startswith('__') else decorate(v) for k, v in namespace.items()}
return type.__new__(cls, name, bases, namespace)
class MetaInstance(metaclass=Meta):
def foo1(self, val):
return val + 15
def foo2(self, val):
return val + 9
obj1 = MetaInstance()
print(obj1.foo1(5))
print(obj1.foo2(2))
I have seen cls().__init__() used in a classmethod, but it seems that the code could have used a simple cls() instead. As in:
class SomeCrazyClass:
#classmethod
def newclass(cls):
return cls().__init__()
#classmethod
def newclass2(cls):
return cls()
Is this just a poor coding style choice or is there a practical use of cls().__init__() in some situation?
The difference between cls().__init__() and cls() is that former calls the __init__ on instance twice and hence will return None and the latter will return the actual instance.
But an imaginary scenario to of calling __init__ again can be used in lazy initialization of a class or may be some other use-cases as well.
For example in the below code the instance variables are loaded only on the first access of an attribute:
def init(cls, real_init):
def wrapped(self, *args, **kwargs):
cls.__init__ = real_init
return wrapped
class A(object):
def __new__(cls, *args, **kwargs):
cls.__init__ = init(cls, cls.__init__)
instance = object.__new__(cls)
return instance
def __getattr__(self, attr):
expected_attrs = ('a', 'b')
if attr in expected_attrs:
self.__init__(range(10000), range(1000))
return object.__getattribute__(self, attr)
def __init__(self, a, b):
print('inside __init__')
self.a = sum(a)
self.b = sum(b)
Demo:
>>> a = A()
>>> a.__dict__
{}
>>> a.a, a.b
inside __init__
(49995000, 499500)
>>> a.__dict__
{'a': 49995000, 'b': 499500}
>>> a = A()
>>> a.__init__(range(10**5), range(10**4))
inside __init__
>>> a.a, a.b
(4999950000, 49995000)
We can now also return a value from __init__ now which is usually not possible.
In A.__init__ I call self.func(argument):
class A(object):
def __init__(self, argument, key=0):
self.func(argument)
def func(self, argument):
#some code here
I want to change the signature of A.func in B. B.func gets called in B.__init__ through A.__init__:
class B(A):
def __init__(self, argument1, argument2, key=0):
super(B, self).__init__(argument1, key) # calls A.__init__
def func(self, argument1, argument2):
#some code here
Clearly, this doesn't work because the signature of B.func expects two arguments while A.__init__ calls it with one argument. How do I work around this? Or is there something incorrect with the way I have designed my classes?
key is a default argument to A.__init__. argument2 is not intended for key. argument2 is an extra argument that B takes but A does not. B also takes key and has default value for it.
Another constraint is that I would like not to change the signature of A.__init__. key will usually be 0. So I want to allow users to be able to write A(arg) rather than A(arg, key=0).
Generally speaking, changing the signature of a method between subclasses breaks the expectation that the methods on subclasses implement the same API as those on the parent.
However, you could re-tool your A.__init__ to allow for arbitrary extra arguments, passing those on to self.func():
class A(object):
def __init__(self, argument, *extra, **kwargs):
key = kwargs.get('key', 0)
self.func(argument, *extra)
# ...
class B(A):
def __init__(self, argument1, argument2, key=0):
super(B, self).__init__(argument1, argument2, key=key)
# ...
The second argument passed to super(B, self).__init__() is then captured in the extra tuple, and applied to self.func() in addition to argument.
In Python 2, to make it possible to use extra however, you need to switch to using **kwargs, otherwise key is always going to capture the second positional argument. Make sure to pass on key from B with key=key.
In Python 3, you are not bound by this restriction; put *args before key=0 and only ever use key as a keyword argument in calls:
class A(object):
def __init__(self, argument, *extra, key=0):
self.func(argument, *extra)
I'd give func() an *extra parameter too, so that it's interface essentially is going to remain unchanged between A and B; it just ignores anything beyond the first parameter passed in for A, and beyond the first two for B:
class A(object):
# ...
def func(self, argument, *extra):
# ...
class B(A):
# ...
def func(self, argument1, argument2, *extra):
# ...
Python 2 demo:
>>> class A(object):
... def __init__(self, argument, *extra, **kwargs):
... key = kwargs.get('key', 0)
... self.func(argument, *extra)
... def func(self, argument, *extra):
... print('func({!r}, *{!r}) called'.format(argument, extra))
...
>>> class B(A):
... def __init__(self, argument1, argument2, key=0):
... super(B, self).__init__(argument1, argument2, key=key)
... def func(self, argument1, argument2, *extra):
... print('func({!r}, {!r}, *{!r}) called'.format(argument1, argument2, extra))
...
>>> A('foo')
func('foo', *()) called
<__main__.A object at 0x105f602d0>
>>> B('foo', 'bar')
func('foo', 'bar', *()) called
<__main__.B object at 0x105f4fa50>
It seems to be that there is a problem in your design. The following might fix your particular case but seems to perpetuate bad design even further. Notice the Parent.method being called directly.
>>> class Parent:
def __init__(self, a, b=None):
Parent.method(self, a)
self.b = b
def method(self, a):
self.location = id(a)
>>> class Child(Parent):
def __init__(self, a):
super().__init__(a, object())
def method(self, a, b):
self.location = id(a), id(b)
>>> test = Child(object())
Please consider adding a default argument to the second parameter of the method you are overriding. Otherwise, design your class and call structure differently. Reorganization might eliminate the problem.
actually I would resort to put an extra boolean argument in A's __init__ to control the call of the func, and just pass False from B's __init__
class A(object):
def __init__(self, argument, key=0, call_func=True):
if call_func:
self.func(argument)
class B(A):
def __init__(self, argument):
argument1, argument2 = argument, 'something else'
super(B, self).__init__(argument1, argument2, call_func=False)
object of type A and Is there a way to programatically wrap a class object?
Given
class A(object):
def __init__(self):
## ..
def f0(self, a):
## ...
def f1(self, a, b):
## ..
I want another class that wraps an A, such as
class B(object):
def __init__(self):
self.a = A()
def f0(self,a):
try:
a.f0(a)
except (Exception),ex:
## ...
def f1(self, a, b):
try:
a.f1(a,b)
except (Exception),ex:
## ...
Is there a way to do create B.f0 & B.f1 by reflection/inspection of class A?
If you want to create class B by calling a function on a predefined class A, you can simply do B = wrap_class(A) with a function wrap_class that looks like this:
import copy
def wrap_class(cls):
'Wraps a class so that exceptions in its methods are caught.'
# The copy is necessary so that mutable class attributes are not
# shared between the old class cls and the new class:
new_cls = copy.deepcopy(cls)
# vars() is used instead of dir() so that the attributes of base classes
# are not modified, but one might want to use dir() instead:
for (attr_name, value) in vars(cls).items():
if isinstance(value, types.FunctionType):
setattr(new_cls, attr_name, func_wrapper(value))
return new_cls
B = wrap_class(A)
As Jürgen pointed out, this creates a copy of the class; this is only needed, however, if you really want to keep your original class A around (like suggested in the original question). If you don't care about A, you can simply decorate it with a wrapper that does not perform any copy, like so:
def wrap_class(cls):
'Wraps a class so that exceptions in its methods are caught.'
# vars() is used instead of dir() so that the attributes of base classes
# are not modified, but one might want to use dir() instead:
for (attr_name, value) in vars(cls).items():
if isinstance(value, types.FunctionType):
setattr(cls, attr_name, func_wrapper(value))
return cls
#wrap_class
class A(object):
… # Original A class, with methods that are not wrapped with exception catching
The decorated class A catches exceptions.
The metaclass version is heavier, but its principle is similar:
import types
def func_wrapper(f):
'Returns a version of function f that prints an error message if an exception is raised.'
def wrapped_f(*args, **kwargs):
try:
return f(*args, **kwargs)
except Exception, ex:
print "Function", f, "raised", ex
return wrapped_f
class ExceptionCatcher(type):
'Metaclass that wraps methods with func_wrapper().'
def __new__(meta, cname, bases, cdict):
# cdict contains the attributes of class cname:
for (attr_name, value) in cdict.items():
if isinstance(value, types.FunctionType): # Various attribute types can be wrapped differently
cdict[attr_name] = func_wrapper(value)
return super(meta, ExceptionCatcher).__new__(meta, cname, bases, cdict)
class B(object):
__metaclass__ = ExceptionCatcher # ExceptionCatcher will be used for creating class A
class_attr = 42 # Will not be wrapped
def __init__(self):
pass
def f0(self, a):
return a*10
def f1(self, a, b):
1/0 # Raises a division by zero exception!
# Test:
b = B()
print b.f0(3.14)
print b.class_attr
print b.f1(2, 3)
This prints:
31.4
42
Function <function f1 at 0x107812d70> raised integer division or modulo by zero
None
What you want to do is in fact typically done by a metaclass, which is a class whose instances are a class: this is a way of building the B class dynamically based on its parsed Python code (the code for class A, in the question). More information on this can be found in the nice, short description of metaclasses given in Chris's Wiki (in part 1 and parts 2-4).
Meta classes are an option, but generally hard to understand. As is too much reflection
if not needed in simple cases, because it is easy to catch too many (internal) functions. If the wrapped functions are a stable known set, and B might gain other functions, you can delegate explicitly function by function and still keep your error handling code in one place:
class B(object):
def __init__(self):
a = A()
self.f0 = errorHandler(a.f0)
self.f1 = errorHandler(a.f1)
You might do the assignments in a loop if they are many, using getattr/setattr.
The errorhandler function will need to return a function which wraps its argument with
error handling code.
def errorHandler(f):
def wrapped(*args, **kw):
try:
return f(*args, **kw)
except:
# log or something
return wrapped
You can also use errorhandler as decorator on new functions not delegating to the A instance.
def B(A):
...
#errorHandler
def f_new(self):
...
This solution keeps B simple and it is quite explicit what's going on.
You could try it old-school with __getattr__:
class B(object):
def __init__(self):
self.a = A()
def __getattr__(self, name):
a_method = getattr(a, name, None)
if not callable(a_method):
raise AttributeError("Unknown attribute %r" % name)
def wrapper(*args, **kwargs):
try:
return a_method(*args, **kwargs)
except Exception, ex:
# ...
return wrapper
Or with updating B's dict:
class B(object):
def __init__(self):
a = A()
for attr_name in dir(a):
attr = getattr(a, attr_name)
if callable(attr):
def wrapper(*args, **kwargs):
try:
return attr(*args, **kwargs)
except Exception, ex:
# ...
setattr(self, attr_name, wrapper) # or try self.__dict__[x] = y