Should I be using abstract methods in this Python scenario? - python

I'm not sure my approach is good design and I'm hoping I can get a tip. I'm thinking somewhere along the lines of an abstract method, but in this case I want the method to be optional. This is how I'm doing it now...
from pymel.core import *
class A(object):
def __init__(self, *args, **kwargs):
if callable(self.createDrivers):
self._drivers = self.createDrivers(*args, **kwargs)
select(self._drivers)
class B(A):
def createDrivers(self, *args, **kwargs):
c1 = circle(sweep=270)[0]
c2 = circle(sweep=180)[0]
return c1, c2
b = B()
In the above example, I'm just creating 2 circle arcs in PyMEL for Maya, but I fully intend on creating more subclasses that may or may not have a createDrivers method at all! So I want it to be optional and I'm wondering if my approach is—well, if my approach could be improved?

You still have a problem, when you will inherit your class B, and this will call A.__init__ and if you don't implement createDrivers in the subclass this line callable(self.createDrivers) will throw an error as that createDrivers doesn't exist (AttributeError) i think if i were you i will do it like so:
class A(object):
def __init__(self, *args, **kwargs):
try:
self._drivers = self.createDrivers(*args, **kwargs)
select(self._drivers)
except NotImplementedError:
pass
def createDrivers(self, *args, **kwargs):
raise NotImplementedError("This class wasn't implemented")
class B(A):
def createDrivers(self, *args, **kwargs):
c1 = circle(sweep=270)[0]
c2 = circle(sweep=180)[0]
return c1, c2
class C(A):
pass
Another way is to replace callable(self.createDrivers) by hasattr(self, 'createDrivers').

I would do this:
class A(object):
def __init__(self, *args, **kwargs):
self.createDrivers(*args, **kwargs)
def createDrivers(self, *args, **kwargs):
"Override"
pass
class B(A):
def createDrivers(self, *args, **kwargs):
self._drivers = blabla

If you want createDrivers to be optional but still always there, the best is not an abstract method, but do implement it in the base class as a noop.
class A(object):
def __init__(self, *args, **kwargs):
self._drivers = self.createDrivers(*args, **kwargs)
select(self._drivers)
def createDrivers(self, *args, **kwargs):
"""This should be overridden by subclasses if they need custom drivers"""
pass

Related

Adding hooks to functions in subclassed methods

Given the following simplified code:
from abc import ABC, abstractmethod
class Parent(ABC):
def __init__(self,*args,**kwargs):
self.parent_name = 'SuperClass'
# global hook to run before each subclass run()
def global_pre_run_hook(self):
pass
#abstractmethod
def run(self, *args, **kwargs):
raise NotImplementedError()
class Child(Parent):
def __init__(self,*args,**kwargs):
super().__init__(*args,**kwargs)
self.name = 'ChildClass'
def run(self):
print(f'my parent name is {self.parent_name}')
print(f'my name is {self.name}')
return 22
obj = Child()
result = obj.run()
Is there a way to add functionality so that when the child class run() method is called directly, it first triggers a hook function from the parent class? Assume there is a parent class and a lot of classes that subclass it - would I need to manually add a call global hook() at the beginning of each run() definition for each class that subclasses Parent()? Is there a pythonic way to accomplish this?
There might be a way to do this with a proper decorator, but for the time being I think the cleanest solution you might come up with would be to create your own 'decorator' and manually apply it in the course of Parent.__init__(), which would make sure it takes effect so long as the child class invokes the parent __init__():
from abc import ABC, abstractmethod
def create_hook(func, hook):
def wrapper(*args, **kwargs):
hook()
return func(*args, **kwargs)
return wrapper
class Parent(ABC):
def __init__(self, *args, **kwargs):
self.parent_name = 'SuperClass'
self.run = create_hook(self.run, self.global_pre_run_hook)
# global hook to run before each subclass run()
def global_pre_run_hook(self):
print("Hooked")
#abstractmethod
def run(self, *args, **kwargs):
raise NotImplementedError()
class Child(Parent):
def __init__(self,*args,**kwargs):
super().__init__(*args,**kwargs)
self.name = 'ChildClass'
def run(self):
print(f'my parent name is {self.parent_name}')
print(f'my name is {self.name}')
return 22
obj = Child()
result = obj.run()
# this prints:
# Hooked
# my parent name is SuperClass
# my name is ChildClass
The answer by Green Cloak Guy works, but cannot be pickled! To fix this, we need to move the hook creation into __new__. Also, it's a good idea to make use of functools.wraps in the hook creator.
import pickle
from abc import ABC, abstractmethod
from functools import wraps
def create_hook(func, hook):
#wraps(func)
def wrapper(*args, **kwargs):
hook(*args, **kwargs)
return func(*args, **kwargs)
return wrapper
class Parent(ABC):
def __new__(cls, *args, **kwargs):
cls.run = create_hook(cls.run, cls.global_pre_run_hook)
return super().__new__(cls, *args, **kwargs)
# global hook to run before each subclass run()
def global_pre_run_hook(self, *args, **kwargs):
print("Hooked")
#abstractmethod
def run(self, *args, **kwargs):
raise NotImplementedError()
class Child(Parent):
def run(self):
print(f"my parents are {self.__class__.__mro__}")
print(f"my name is {self.__class__.__name__}")
return 22
obj = Child()
result = obj.run()
pickle.dumps(obj)

Can an __init__ function in a class be added to or altered without overwriting it?

In Python 3.x with the given class hierarchy, where SubClassZero and SubClassOne inherit from SuperClass;
class SuperClass(object):
def __init__(self, *args, **kwargs):
self.thing_zero = "Inherited string."
class SubClassZero(SuperClass):
def __init__(self, *args, **kwargs):
SuperClass.__init__(self, *args, **kwargs)
self.thing_one = "SubClassZero's string."
class SubClassOne(SuperClass):
def __init__(self, *args, **kwargs):
SuperClass.__init__(self, *args, **kwargs)
self.thing_one = "SubClassOne's string."
scz = SubClassZero()
sco = SubClassOne()
If we want to give the subclasses the ability set a title from a kwarg then the __init__ function of SuperClass can be redefined using the the built setattr function like so;
def __init__(self, *args, **kwargs):
self.thing_zero = "Inherited string."
if 'title' in kwargs:
self.title = kwargs['title']
else:
self.title = ""
setattr(SuperClass, '__init__', __init__)
But to do that one has to know the previous structure of __init__ to maintain full functionality. Is there a way to somehow add to the __init__ function of SuperClass without completely overwriting it?

Best python way to return the initializing value of class if of that same class

I have a class that I want it to accept an instance of that same class as initialization; in such case, it will simply return that instance.
The reason is that I want this class to accept a myriad of initialization values and then the proceeding code can use this as an object with known properties, independent on how it was initialized.
I have thought of something like:
class c(object):
def __new__(cls, *args, **kwargs):
if isinstance(args[0], c):
return args[0]
else:
return super(c, cls).__new__(cls, *args, **kwargs)
The problem is that I don't want __init__() to be called when initialized in this manner. Is there any other way?
Thanks!
You probably want to use a factory (f.e. see this question for details or google).
Or just use a class method for what you want, f.e.:
class C(object):
#classmethod
def new(cls, *args, **kwargs):
if isinstance(args[0], cls):
return args[0]
else:
return cls(*args, **kwargs)
obj = C.new()
obj2 = C.new(obj)
The standard way to do this is to simply not do your initialization in __init__. Do it in __new__.
You can use a metaclass
class InstanceReturnMeta(type): # You should probably think of a better name
def __call__(cls, *args, **kwargs):
if args and isinstance(args[0], cls):
return args[0]
instance = cls.__new__(cls, *args, **kwargs)
instance.__init__(*args, **kwargs)
return instance
class Test(object):
__metaclass__ = InstanceReturnMeta
def __init__(self, value):
self.value = value
Let's test it
In [3]: instance1 = Test(0)
In [4]: instance2 = Test(instance1)
In [5]: print id(instance1) == id(instance2)
Out[5]: True
The ids are identical, hence both variables reference the same instance.
P.S. I assume you are on Python 2, since your class explicitly inherits from object.

Do additional operations on object instantation, based on where the object has been instantiated

I have an API class (it will be extended and used by other classes):
class A(object):
def __init__(self, **kwargs):
self.kwargs = kwargs
#classmethod
def create(cls, **kwargs):
return cls(**kwargs)
It is extended by:
class B(A):
pass
Now, this is what I want: if I instantiate class B like B(arg1=1, arg2=2) I would like to do some additional operations when initializing it, like validate the kwargs arguments (and this code should reside in class A, not in B). Then, if I do B.create(arg1=1, arg2=2), that validation should not occur.
In short, I would like to do extra operations when initializing an object only from the outside of the class it was defined; initalizing an object from a classmethod inside its class should not trigger any extra operation.
You can use additional argument to distinguish method of class initialisation:
class A(object):
def __init__(self, __a_validate=True, **kwargs):
self.kwargs = kwargs
if __a_validate:
print 'do validation'
#classmethod
def create(cls, **kwargs):
return cls(__a_validate=False, **kwargs)
Demo:
>>> B.create(arg1=1, arg2=2)
<__main__.B object at 0x9b82f4c>
>>> B(arg1=1, arg2=2)
do validation
<__main__.B object at 0x9b7bbcc>
update for comments:
This is another solution. You can modify class attribute in create method, for example set validate function to None, call constructor and then set validate back to original state:
class A(object):
def __init__(self, **kwargs):
self.kwargs = kwargs
if self.validate:
self.validate(kwargs)
def validate(self, kwargs):
print 'do validation'
#classmethod
def create(cls, **kwargs):
tmp = cls.validate
cls.validate = None
instance = cls(**kwargs)
cls.validate = tmp
return instance
If your "extra operations" are in the constructor then I'm afraid they'll always get run, regardless of whether you instantiate directly or through a factory. You could consider only allowing object creation through a set of factory methods (some with validation and some without).
class A(object):
def __init__(self, **kwargs):
self.kwargs = kwargs
#no extra stuff
#classmethod
def create_with_extra_stuff(cls, **kwargs):
c = cls(**kwargs)
c.extra_stuff()
return c
#classmethod
def create_withuot_extra_stuff(cls, **kwargs):
return cls(**kwargs)
#classmethod
def create(cls, with_extra_stuff = False, **kwargs):
if with_extra_stuff:
return cls.create_with_extra_stuff(**kwargs)
else:
return cls.create_without_extra_stuff(**kwargs)
Of course, I don't know your full use case but factory patterns are pretty much designed for this sort of thing.
This section was added after the comments were made:
class A(object):
def __init__(self, do_extra_stuff = True, **kwargs):
if do_extra_stuff:
self.do_extra_stuff(**kwargs)
self.kwargs = kwargs
#classmethod
def create(cls, **kwargs):
return cls(do_extra_stuff = False, **kwargs)
Further edit showing alternative:
class A(object):
def __init__(self, **kwargs):
#lightweight constructor
self.kwargs = kwargs
def validate(self):
#Does not alter class variables. That would be bad in this case
#Do stuff with self.kwargs
#classmethod
def create(cls, **kwargs):
return cls(**kwargs)
main():
a = A({...})
a.validate()
b = A.create({...})
#b.validate() not called
The point is that the constructor will be called on construction. The trick then is to decide what additional stuff needs to be selective called.

Is this correct usage of super in python?

class SessionWizardView(WizardView):
#classonlymethod
def as_view(cls, *args, **kwargs):
#...snipped..
pass
class ParentWizard(SessionWizardView):
#classonlymethod
def as_view(cls, *args, **kwargs):
return super(SessionWizardView, cls).as_view( ... )
class ChildWizard(ParentWizard):
#classonlymethod
def as_view(cls, *args, **kwargs):
return super(SessionWizardView, cls).as_view( ... )
In ChildWizard, is it legal to pass in a grandparent class (SessionWizardView) into the first parameter of super ? pylint is vomiting this error message:
Method should have "self" as first argument
Bad first argument 'SessionWizardView' given to super class
The usual usage of super for classmethods is to put cls first and the name of the current class as the second argument:
class SessionWizardView(WizardView):
#classonlymethod
def as_view(cls, *args, **kwargs):
#...snipped..
pass
class ParentWizard(SessionWizardView):
#classonlymethod
def as_view(cls, *args, **kwargs):
return super(cls, ParentWizard).as_view( ... )
class ChildWizard(ParentWizard):
#classonlymethod
def as_view(cls, *args, **kwargs):
return super(cls, ChildWizard).as_view( ... )
If you build working code that passes tests, I wouldn't worry to much about how pylint reports the naming of the first argument. pylint knows that a standard Python classmethod will use cls as the first argument, but it doesn't know anything about classonlymethod which has the same pattern.
References:
http://docs.python.org/library/functions.html#super
http://blogs.gnome.org/jamesh/2005/06/23/overriding-class-methods-in-python/
http://pylint-messages.wikidot.com/messages:c0202

Categories

Resources