Specifying an exit function for an abstract function - python

I need a way to call a function from an abstract method, i.e.
class A(object):
#abc.abstractmethod
def method1(self):
raise Exception("Unimplemented method")
def method2(self):
print "method1 finished"
class B(A):
def method1(self):
print "executing method1 from class B"
I need a way to automatically call method2 of class A, after the method1 has been executed (It should be done on the class A side - independently from inherited classes).
Is there a nice way of doing this?

You could use a metaclass, which wraps method1 at the time the class is created:
from functools import wraps
class MetaA(type):
def __new__(meta, name, bases, attr):
method1 = attr['method1']
if not getattr(method, '__isabstractmethod__'):
#wraps(method1)
def wrapper(self, *args, **kw):
res = method1(self, *args, **kw)
self.method2()
return res
attr['method1'] = wrapper
return super(MetaA, meta).__new__(meta, name, bases, attr)
class A(object):
__metaclass__ = MetaA
#abc.abstractmethod
def method1(self):
raise Exception("Unimplemented method")
def method2(self):
print "method1 finished"
This applies what is basically a decorator to a specific method whenever a (sub)class is created.
Another approach, somewhat hackish, is to intercept the method access, but would work. You'd implement a __getattribute__ hook on A that adds a wrapper:
from functools import wraps
class A(object):
#abc.abstractmethod
def method1(self):
raise Exception("Unimplemented method")
def method2(self):
print "method1 finished"
def __getattribute__(self, name):
obj = super(A, self).__getattribute__(name)
if name == 'method1':
#wraps(obj)
def wrapper(*args, **kw):
res = obj()
self.method2()
return res
return wrapper
return obj
Either approach results in:
>>> B().method1()
executing method1 from class B
method1 finished
By using the #functools.wraps() decorator the wrapper maintains several important attributes from the wrapped method, like its name and docstring.

This looks like a job for the Template method pattern, for example:
class A(object):
def method1(self):
# do something before
try:
self.method1_impl()
finally:
# do something after, for example:
self.method2()
#abc.abstractmethod
def method1_impl(self):
pass
def method2(self):
print "method1 finished"
class B(A):
def method1_impl(self):
print "executing method1 from class B"
While I'm not a proponent of this style (it tends to become hard to comprehend as the code grows and becomes more complex), this is occasionally used and has right to exist.

When this kind of situation happens, it can usually be solved by overriding "deeper" functionality.
Instead of having class B override method1, make it override method1_subfunction, and call both method1_subfunction and method2 from method1
class A(object):
def method1(self):
self.method1_subfunction()
self.method2()
#abc.abstractmethod
def method1_subfunction(self):
raise Exception("Unimplemented method")
def method2(self):
print "method1 finished"
class B(A):
def method1_subfunction(self):
print "executing method1 from class B"

Related

Do something before and after method execution

I was trying to perform an action before and after the execution of some methods in my class.
First I thought about using a decorator to extend the functionality, something like this:
def decorator(f):
def wrap(*args, **kwargs):
print("before")
f(*args, **kwargs)
print("after")
return wrap
class Foo(object):
#decorator
def do(self):
print("This is do")
a = Foo()
a.do()
This outputs what I want:
before
This is do
after
But then I realized that if I wanted to inherit from Foo to extend do that wouldn't work, at least the way I'm doing it:
class Bar(Foo):
def do(self):
super(Bar, self).do()
print("This is the new implementation of do")
b = Bar()
b.do()
This will be the output and it's not okay. "after" should be the last thing to be printed:
before
This is do
after
This is the new implementation of do
Maybe there is another way to decorate do in Bar so it does what I want but I don't know how (if there is I would like to know it) and tbh decorating do every time doesn't seem like a good way to go.
So finally I came up with what I think is a nice solution. Defining __getattribute__so it returns a wrapper for do:
class Foo(object):
def __getattribute__(self, name):
attribute = super(Foo, self).__getattribute__(name)
if name == "do":
def wrap(*args, **kwargs):
print("before")
attribute(*args, **kwargs)
print("after")
return wrap
else:
return attribute
def do(self):
print("This is do")
class Bar(Foo):
def __init__(self):
super(Bar, self).__init__()
def do(self):
super(Bar, self).do()
print("This is the new implementation of do")
a = Bar()
a.do()
Is this a good solution? Any downsides? I'm missing something that could create a problem in the future? Other workarounds?
Thx!!
You can access the wrapped function in python3 with __wrapped__, since it's a custom decorator you need a slight modification with the functools.wraps decorator.
from functools import wraps
def decorator(f):
#wraps(f)
def wrap(*args, **kwargs):
print("before")
f(*args, **kwargs)
print("after")
return wrap
Reapply the decorator to the new do() and strip it from the old one
class Bar(Foo):
#decorator
def do(self):
super(Bar, self).do.__wrapped__(super)
print("This is the new implementation of do")
you get:
before
This is do
This is the new implementation of do
after
You could place the implementation of do() in a separate method that will be the one to override in subclasses and have the main do() method with pre/post processing only in the base class:
class Foo:
def _do(self):
# do your stuff
def do(self):
self.preProcessing()
self._do()
self.postProcessing()
class Bar(Foo):
def _do(self):
super()._do()
# do more stuff

decorators to class methods called without class instantiation?

In the code below I created a decorator to my class Class methods. I noticed that this decorator is called even without creating a class instance!
And without calling these methods in the class!
Is there any explanation for that?
decorator :
def deco(class_name):
def inner_function(method):
print("method is = {} and class is: {}".format(method.__name__,class_name.__name__))
return method
return inner_function
class_deco
class class_deco :
def __init__(self):
pass
Class:
class Class :
def __init__(self):
pass
#deco(class_deco)
def f1(self):
pass
#deco(class_deco)
def f2(self):
pass
When I run the script :
if __name__ == "__main__":
pass
I get this result:
method is = f1 and class is: class_deco
method is = f2 and class is: class_deco
Decorators are just syntatic sugar for the following
#deco(class_deco)
def f1(self):
pass
is same as -
f1 = deco(class_deco)(f1)
So this code runs as soon as the module is imported just like any other name declaration would and f1 name is replaced with decorated f1 as above.
As already explained, the #decorator syntax is only syntactic sugar, so this:
#somedecorator
def foo():
pass
is stryctly equivalent to
def foo():
pass
foo = somedecorator(foo)
In you case, you ARE explicitely calling the decorator function:
#deco(class_deco)
def f1(self):
pass
which is equivalent to:
def f1(self):
pass
_effective_decorator = deco(class_deco)
f1 = _effective_decorator(f1)
which is why your inner_function is indeed executed at import time.
Decorators that take additional params needs one more level of nesting, so technically your decorator should look like:
def deco(cls):
def real_deco(func):
def inner_function(*args, **kw):
print("method is = {} and class is: {}".format(func.__name__,cls.__name__))
return func(*args, **kw)
return inner_function
return real_deco
return inner_function
BUT if the point is to get the name of the class the method really belongs to, this is still broken - you should get the class from the instance on which the method is called, not try to hard-code it in the decorator call (which will never work as intended since the real class doesn't exist when you're applying the decorator to the function). So the proper implementation would look something like:
def deco(func):
# we're only supposed to use this on methods...
def wrapper(self, *args, **kw):
print("class {} - method {}".format(type(self).__name__, func.__name__))
return wrapper
class Class:
#deco
def f1(self):
pass
NB: this won't handle classmethods nor staticmethods, of course.
Here is a demo showing just two possible ways a decorator could be constructed:
def Deco(*deco_params):
print('In Deco', deco_params)
def deco(func):
print('In deco(func)')
def inner(*args, **kwargs):
print('In inner(*args, **kwargs)')
return func(*args, **kwargs)
return inner
return deco
def deco(method):
print('In deco(method)')
def inner_function(*args, **kwargs):
print("method is = {} called".format(method.__name__))
return method(*args, **kwargs)
return inner_function
class Class :
def __init__(self):
pass
#deco
def f1(self):
pass
#Deco(42)
def f2(self):
pass
if __name__ == "__main__":
print('Now in Main')
c = Class()
c.f1()
c.f2()
Output:
In deco(method)
In Deco (42,)
In deco(func)
Now in Main
method is = f1 called
In inner(*args, **kwargs)

Delegation design pattern with abstract methods in python

I have the following classes implementing a "Delegation Design Pattern" with an additional DelegatorParent class:
class DelegatorParent():
def __init__(self):
self.a = 'whatever'
class ConcreteDelegatee():
def myMethod(self):
return 'myMethod'
class Delegator(DelegatorParent):
def __init__(self):
self.delegatee = ConcreteDelegatee()
DelegatorParent.__init__(self)
def __getattr__(self, attrname):
return getattr(self.delegatee, attrname)
a = Delegator()
result = a.myMethod()
Everything looks fine.
Now I would like to put an abstract method in DelegatorParent, to ensure that "myMethod" is always defined.
from abc import ABCMeta, abstractmethod
class DelegatorParent():
__metaclass__ = ABCMeta
#abstractmethod
def myMethod(self):
pass
def __init__(self):
self.a = 'whatever'
class ConcreteDelegatee():
def myMethod(self):
return 'myMethod'
class Delegator(DelegatorParent):
def __init__(self):
self.delegatee = ConcreteDelegatee()
DelegatorParent.__init__(self)
def __getattr__(self, attrname):
return getattr(self.delegatee, attrname)
# This method seems unnecessary, but if I erase it an exception is
# raised because the abstract method's restriction is violated
def myMethod(self):
return self.delegatee.myMethod()
a = Delegator()
result = a.myMethod()
Can you help me find an "elegant" way to remove "myMethod" from "Delegator"... Intuition tells me that it is somehow redundant (considering that a custom getattr method is defined).
And more importantly, notice that with this implementation, if I forget to define myMethod in ConcreteDelegatee the program compiles, but it may crash in runtime if I call Delegator.myMethod(), which is exactly what I wanted to avoid by using abstract methods in DelegatorParent.
Obviously a simple solution would be to move #abstractmethod to the Delegator class, but I want to avoid doing that because in my program DelegatorParent is a very important class (and Delegator is just an auxiliary class).
You can decide to automatically implement abstract methods delegared to ConcreteDelegatee.
For each abstract method, check if it's name exist in the ConcreteDelegatee class and implement this method as a delegate to this class method.
from abc import ABCMeta, abstractmethod
class DelegatorParent(object):
__metaclass__ = ABCMeta
def __init__(self):
self.a = 'whatever'
#abstractmethod
def myMethod(self):
pass
class Delegatee(object):
pass
class ConcreteDelegatee(Delegatee):
def myMethod(self):
return 'myMethod'
def myMethod2(self):
return 'myMethod2'
class Delegator(DelegatorParent):
def __new__(cls, *args, **kwargs):
implemented = set()
for name in cls.__abstractmethods__:
if hasattr(ConcreteDelegatee, name):
def delegated(this, *a, **kw):
meth = getattr(this.delegatee, name)
return meth(*a, **kw)
setattr(cls, name, delegated)
implemented.add(name)
cls.__abstractmethods__ = frozenset(cls.__abstractmethods__ - implemented)
obj = super(Delegator, cls).__new__(cls, *args, **kwargs)
obj.delegatee = ConcreteDelegatee()
return obj
def __getattr__(self, attrname):
# Called only for attributes not defined by this class (or its bases).
# Retrieve attribute from current behavior delegate class instance.
return getattr(self.delegatee, attrname)
# All abstract methods are delegared to ConcreteDelegatee
a = Delegator()
print(a.myMethod()) # correctly prints 'myMethod'
print(a.myMethod2()) #correctly prints 'myMethod2'
This solves the main problem (prevent ConcreteDelegatee from forgetting to define myMethod). Other abstract methods are still checked if you forgot to implement them.
The __new__ method is in charge of the delegation, that frees your __init__ to do it.
Since you use ABCMeta, you must defined the abstract methods. One could remove your method from the __abstractmethods__ set, but it is a frozenset. Anyway, it involves listing all abstract methods.
So, instead of playing with __getattr__, you can use a simple descriptor.
For instance:
class Delegated(object):
def __init__(self, attrname=None):
self.attrname = attrname
def __get__(self, instance, owner):
if instance is None:
return self
delegatee = instance.delegatee
return getattr(delegatee, self.attrname)
class Delegator(DelegatorParent):
def __init__(self):
self.delegatee = ConcreteDelegatee()
DelegatorParent.__init__(self)
myMethod = Delegated('myMethod')
An advantage here: the developer has the explicit information that "myMethod" is delegated.
If you try:
a = Delegator()
result = a.myMethod()
It works! But if you forget to implement myMethod in Delegator class, you have the classic error:
Traceback (most recent call last):
File "script.py", line 40, in <module>
a = Delegator()
TypeError: Can't instantiate abstract class Delegator with abstract methods myMethod
Edit
This implementation can be generalized as follow:
class DelegatorParent():
__metaclass__ = ABCMeta
#abstractmethod
def myMethod1(self):
pass
#abstractmethod
def myMethod2(self):
pass
def __init__(self):
self.a = 'whatever'
class ConcreteDelegatee1():
def myMethod1(self):
return 'myMethod1'
class ConcreteDelegatee2():
def myMethod2(self):
return 'myMethod2'
class DelegatedTo(object):
def __init__(self, attrname):
self.delegatee_name, self.attrname = attrname.split('.')
def __get__(self, instance, owner):
if instance is None:
return self
delegatee = getattr(instance, self.delegatee_name)
return getattr(delegatee, self.attrname)
class Delegator(DelegatorParent):
def __init__(self):
self.delegatee1 = ConcreteDelegatee1()
self.delegatee2 = ConcreteDelegatee2()
DelegatorParent.__init__(self)
myMethod1 = DelegatedTo('delegatee1.myMethod1')
myMethod2 = DelegatedTo('delegatee2.myMethod2')
a = Delegator()
result = a.myMethod2()
Here, we can specify the delegatee name and delegatee method.
Here is my current solution. It solves the main problem (prevent ConcreteDelegatee from forgetting to define myMethod), but I'm still not convinced because I still need to define myMethod inside Delegator, which seems redundant
from abc import ABCMeta, abstractmethod
class DelegatorParent(object):
__metaclass__ = ABCMeta
def __init__(self):
self.a = 'whatever'
#abstractmethod
def myMethod(self):
pass
class Delegatee(object):
def checkExistence(self, attrname):
if not callable(getattr(self, attrname, None)):
error_msg = "Can't instantiate " + str(self.__class__.__name__) + " without abstract method " + attrname
raise NotImplementedError(error_msg)
class ConcreteDelegatee(Delegatee):
def myMethod(self):
return 'myMethod'
def myMethod2(self):
return 'myMethod2'
class Delegator(DelegatorParent):
def __init__(self):
self.delegatee = ConcreteDelegatee()
DelegatorParent.__init__(self)
for method in DelegatorParent.__abstractmethods__:
self.delegatee.checkExistence(method)
def myMethod(self, *args, **kw):
return self.delegatee.myMethod(*args, **kw)
def __getattr__(self, attrname):
# Called only for attributes not defined by this class (or its bases).
# Retrieve attribute from current behavior delegate class instance.
return getattr(self.delegatee, attrname)
# if I forget to implement myMethod inside ConcreteDelegatee,
# the following line will correctly raise an exception saying
# that 'myMethod' is missing inside 'ConcreteDelegatee'.
a = Delegator()
print a.myMethod() # correctly prints 'myMethod'
print a.myMethod2() #correctly prints 'myMethod2'

How can I add a delay to every method in a Python sub-class when I don't want to replicate every method in the parent class

My apologies if this question has already been answered somewhere, but if it has I have not been able to locate the answer.
I would like to create a sub-class of a parent class in such a way that there will be a delay (e.g. time.sleep()) before each call to the corresponding parent class method. I would like to do this in such a way that I do not need to replicate each parent class method in the child class. In fact, I would like to have a generic method that would work with virtually any parent class -- so that I do not even need to know all the parent class methods.
The delay would be specified when instantiating the sub-class.
For example:
class Parent():
....
def method1(self):
....
def method2(self):
....
class Child(Parent):
def __init__(self, delay)
self.delay = delay
....
child = Child(1)
A call to child.method1() would result in a 1 second delay before Parent.method1() is called.
I think the previously given answers have not really addressed your specific need to delay ALL methods from the parent class, and not necessarily have to go and decorate them. You said you do NOT want to have to replicate the parent class method in the child class just so that you can delay them. This answer uses the same delay wrapper from S.Lott, but also uses a metaclass (http://www.voidspace.org.uk/python/articles/metaclasses.shtml)
#!/usr/bin/env python
from types import FunctionType
import time
def MetaClassFactory(function):
class MetaClass(type):
def __new__(meta, classname, bases, classDict):
newClassDict = {}
for attributeName, attribute in classDict.items():
if type(attribute) == FunctionType:
attribute = function(attribute)
newClassDict[attributeName] = attribute
return type.__new__(meta, classname, bases, newClassDict)
return MetaClass
def delayed(func):
def wrapped(*args, **kwargs):
time.sleep(2)
func(*args, **kwargs)
return wrapped
Delayed = MetaClassFactory(delayed)
class MyClass(object):
__metaclass__ = Delayed
def a(self):
print 'foo'
def b(self):
print 'bar'
The MetaClassFactory wraps every function in the delayed decorator. If you wanted to make sure certain built-ins like the init function were not delayed, you could just check for that name in the MetaClassFactory and ignore it.
Really, what you have here is a design that involves a Strategy object.
Your best approach is to fix the parent class to include a call to a "delay object". A default delay object does nothing.
This violates the "so that I do not even need to know all the parent class methods" hoped-for feature set.
Method lookup doesn't have a handy __getmethod__ that corresponds to __getattribute__; this gap makes it difficult to tap into Python's internals for method invocation.
class Parent( object ):
delay= ZeroDelay()
def method1(self):
self.delay()
....
def method2(self):
self.delay()
...
class ZeroDelay( object ):
def __call__( self ):
pass
class ShortDelay( ZeroDelay ):
def __init__( self, duration=1.0 )
self.duration= duration
def __call__( self ):
time.sleep( self.duration )
class Child( Parent ):
delay= ShortDelay( 1 )
EDIT: Of course, you can decorate each method, also.
def delayed( delayer ):
def wrap( a_method ):
def do_delay( *args, **kw ):
delayer()
return a_method( *args, **kw )
return do_delay
return wrap
class Parent( object ):
delay= ZeroDelay()
#delayed( self.delay )
def method1(self):
self.delay()
....
#delayed( self.delay )
def method2(self):
self.delay()
...
S.Lott solution is a good one. If you need more granularity (i.e. to delay only certain methods, not all of them), you could go with a decorator:
from time import sleep
def delayed(func):
'''This is the decorator'''
def wrapped(*args, **kwargs):
sleep(2)
func(*args, **kwargs)
return wrapped
class Example(object):
#delayed
def method(self, str):
print str
e = Example()
print "Brace! I'm delaying!"
e.method("I'm done!")
The idea is that you add #delayed in before the definition of those methods you want to delete.
EDIT: Even more granularity: setting an arbitrary delay:
from time import sleep
def set_delay(seconds):
def delayed(func):
'''This is the decorator'''
def wrapped(*args, **kwargs):
sleep(seconds)
func(*args, **kwargs)
return wrapped
return delayed
class Example(object):
#set_delay(1)
def method(self, str):
print str
#set_delay(2)
def method_2(self, str):
print str
e = Example()
print "Brace! I'm delaying!"
e.method("I'm done!")
e.method_2("I'm also done!")
You can achieve what you want by using the method __getattribute__
class Child(Parent):
def __init__(self, delay):
self.delay = delay
def __getattribute__(self, name):
attr = object.__getattribute__(self, name)
if hasattr(attr, '__call__'):
def proxFct(*args, **kwargs):
time.sleep(object.__getattribute__(self, "delay"))
return attr(*args, **kwargs)
return proxFct
else:
return attr
Update: Updated according delnan's comment
Update 2: Updated according delnan's second comment

Inheritance in Python Such That All Base Functions Are Called

Basically, what I want is to do this:
class B:
def fn(self):
print 'B'
class A:
def fn(self):
print 'A'
#extendInherit
class C(A,B):
pass
c=C()
c.fn()
And have the output be
A
B
How would I implement the extendInherit decorator?
This is not a job for decorators. You want to completely change the normal behaviour of a class, so this is actually a job for a metaclass.
import types
class CallAll(type):
""" MetaClass that adds methods to call all superclass implementations """
def __new__(meta, clsname, bases, attrs):
## collect a list of functions defined on superclasses
funcs = {}
for base in bases:
for name, val in vars(base).iteritems():
if type(val) is types.FunctionType:
if name in funcs:
funcs[name].append( val )
else:
funcs[name] = [val]
## now we have all methods, so decorate each of them
for name in funcs:
def caller(self, *args,**kwargs):
""" calls all baseclass implementations """
for func in funcs[name]:
func(self, *args,**kwargs)
attrs[name] = caller
return type.__new__(meta, clsname, bases, attrs)
class B:
def fn(self):
print 'B'
class A:
def fn(self):
print 'A'
class C(A,B, object):
__metaclass__=CallAll
c=C()
c.fn()
A metaclass is a possible solution, but somewhat complex. super can do it very simply (with new style classes of course: there's no reason to use legacy classes in new code!):
class B(object):
def fn(self):
print 'B'
try: super(B, self).fn()
except AttributeError: pass
class A(object):
def fn(self):
print 'A'
try: super(A, self).fn()
except AttributeError: pass
class C(A, B): pass
c = C()
c.fn()
You need the try/except to support any order of single or multiple inheritance (since at some point there will be no further base along the method-resolution-order, MRO, defining a method named fn, you need to catch and ignore the resulting AttributeError). But as you see, differently from what you appear to think based on your comment to a different answer, you don't necessarily need to override fn in your leafmost class unless you need to do something specific to that class in such an override -- super works fine on purely inherited (not overridden) methods, too!
I personally wouldn't try doing this with a decorator since using new-style classes and super(), the following can be achieved:
>>> class A(object):
... def __init__(self):
... super(A, self).__init__()
... print "A"
...
>>> class B(object):
... def __init__(self):
... super(B, self).__init__()
... print "B"
...
>>> class C(A, B):
... def __init__(self):
... super(C, self).__init__()
...
>>> foo = C()
B
A
I'd imagine method invocations would work the same way.

Categories

Resources