Base class with abstract method _conn()
from cached_property import cached_property
class Base:
def __init__(self, conn_id):
"""Store secrets details"""
self.conn_id = conn_id
#cached_property
def _conn(self):
"""Fetch secrets and generate authenticated client"""
raise NotImplemented
def get_conn(self):
"""Return authenticated client"""
return self._conn
Parent classes
from cached_property import cached_property
class Parent(Base):
#cached_property
def _conn(self):
"""Generate authenticated client specific for Parent"""
client = ...
return client
Child classes
from typing import List, Optional
from pydantic.dataclasses import dataclass
#dataclass
class Toy:
name: str
type: str
def get_color(self) -> str:
color = self.get_conn().get_toy_color(...)
return color
#dataclass
class Child:
first_name: str
last_name: str
...
def list_all_toys(self) -> List[Toy]:
all_toys = self.get_conn().fetch_all_toys(...)
return [Toy(name=x.toy_name, type=x.toy_type) for x in all_toys]
def get_favorite_toy(self) -> Optional[Toy]:
favorite_toy = self.get_conn().fetch_favorite_toy(...)
if not favorite_toy:
return None
return Toy(name=favorite_toy.toy_name, type=favorite_toy.toy_type)
(Ideal) Usage
parent = Parent(conn_id='my-secret-connection-details')
child_1 = parent.Child(first_name='John', last_name='Doe')
for each_toy in child_1.list_all_toys():
print(f"{child_1.first_name}'s toy {each_toy.name} is a {each_toy.get_color()} {each_toy.type}.")
# John's toy Teddy is a white stuffed bear.
Important notes
The parent class can be an ordinary Python class, but the children classes should be Python/Pydantic dataclasses.
The goal of using the same get_conn() method is to reduce the number attempts to fetch credentials and to authenticate.
I've thought about solving this using a #classmethod of the children classes that returns an authenticated instance. Seemed promising until I realized that dataclasses don't allow you to modify their __init__ method. For example:
from typing import Callable, Optional
from pydantic.dataclasses import dataclass
#dataclass
class Toy:
...
#classmethod
def generate_with_connection(cls, connection: Callable, *args, **kwargs):
return cls(*args, **kwargs, connection=connection) # Requires logic in __init__ to handle `connection`.
#dataclass
class Child:
...
def get_favorite_toy(self) -> Optional[Toy]:
favorite_toy = self.get_conn().fetch_favorite_toy(...)
if not favorite_toy:
return None
return Toy.generate_with_connection(
connection=self.get_conn,
name=favorite_toy.toy_name,
type=favorite_toy.toy_type
)
Questions
How do you link the parent class and multiple children classes to ensure that each child class can access the same get_conn() method of the parent class? My first guess would be inheritance but I don't think it solves the next question. Are there other ways using the inspect/traceback modules?
How do we ensure that the methods of each children class can return instances of other children classes that also have access to the same get_conn() method of the parent class? For example: Child.get_favorite_toy() should return an instance of Toy which can successfully run Toy.get_color() using the same get_conn() method.
Related
I want to create instances of several classes at runtime, based on configuration xml files which contain the specific class type and other stuff. Let's start with a simple example:
class ParentClass:
#staticmethod
def create_subclass(class_type: str, xml):
if class_type == 'childA':
return ChildClassA(xml)
elif class_type == 'childB':
return ChildClassB(xml)
else:
return ParentClass(xml)
def __init__(self, xml):
print('\nParent stuff done.')
class ChildClassA(ParentClass):
def __init__(self, xml):
super().__init__(xml)
print('Class A stuff done.')
class ChildClassB(ParentClass):
def __init__(self, xml):
super().__init__(xml)
print('Class B stuff done.')
ParentClass.create_subclass('childA', None)
ParentClass.create_subclass('childB', None)
ParentClass.create_subclass('unknown', None)
The output is as expected:
Parent stuff done.
Class A stuff done.
Parent stuff done.
Class B stuff done.
Parent stuff done.
As the hierarchy grows larger I want to have the parent class in a separate module and each child class in a separate module. Obviously, I need to import the parent module to the child class modules. Due to the create subclass method, the parent class module needs to know about the child classes. Importing those to the parent module would cause a circular import which is considered bad design.
What would be a good solution to solve this issue? Are there better ways to create the subclass instances?
You can use some form of Registry:
from typing import Dict
class Registry:
_instance: 'Registry' = None
def __init__(self):
self._classes: Dict[str, type] = {}
self._default: type = object
#classmethod
def get_instance(cls) -> 'Registry':
if cls._instance is None:
cls._instance = Registry()
return cls._instance
def register(self, name: str, cls: type) -> None:
self._classes[name] = cls
def set_default(self, cls: type) -> None:
self._default = cls
def access(self, name: str) -> type:
cls = self._classes.get(name)
if cls is None:
return self._default
return cls
then you can set the parent class as its default class.
from .registry import Registry
class ParentClass:
def __init__(self, xml):
print('\nParent stuff done.')
registry = Registry.get_instance()
registry.set_default(ParentClass)
and submit child classes to your registry after defining them.
from .registry import Registry
class ChildClassA(ParentClass):
def __init__(self, xml):
super().__init__(xml)
print('Class A stuff done.')
registry = Registry.get_instance()
registry.register('childA', ChildClassA)
from .registry import Registry
class ChildClassB(ParentClass):
def __init__(self, xml):
super().__init__(xml)
print('Class B stuff done.')
registry = Registry.get_instance()
registry.register('childB', ChildClassB)
Cause your registry doesn't depend on your classes, you can use it without worrying about circulaNoticerts. notice that the access method returns a type, and you must call the constructor yourself.
from .registry import Registry
registry = Registry.get_instance()
registry.access('childA')(xml=None)
# Parent stuff done.
# Class A stuff done.
registry.access('childB')(xml=None)
# Parent stuff done.
# Class B stuff done.
registry.access('unknown')(xml=None)
# Parent stuff done.
You can also access subclasses and their names dynamically (as answered here) but since you didn't use subclass names as your key when creating subclasses, you probably need some mapping between names and subclasses. If you want your subclasses to tell your parent class the name they wish to be called by, you are implementing Registry in your parent class again.
accessing subclass names:
print([cls.__name__ for cls in ParentClass.__subclasses__()])
# ['ChildClassA', 'ChildClassB']
accessing subclasses themselves:
print(ParentClass.__subclasses__())
# [<class '__main__.ChildClassA'>, <class '__main__.ChildClassB'>]
I would like to define an abstract method which from my base class through a second inheritance.
Since I can imagine that my question or even this sentence is confusing here is an example from what I am trying to do:
Imagine the following BaseClass containing 2 abstract methods: process & validation
class BaseClass(ABC):
"""
Base class for all services
"""
def __init__(self) -> None:
pass
#abstractmethod
def validate_input(self, payload: dict) -> None:
"""
validates the payload
"""
pass
#abstractmethod
def process(self, payload: dict) -> dict:
"""
processes the payload
"""
pass
As you can tell there are 2 methods in the BaseClass that need to be defined by the processes. I now want to define the process method in the Process1 class which will inherit from the BaseClass. However I also need to define the validate_input method, which can be the same for different processes. Therefore I thought of solving this using Multiple Inheritance.
In my case I would like to create a second BaseValidation1 Class that contains a certain valdidation method, for example to check if there is key in the payload.
class Validation1(ABC):
"""
Validates if the payload is valid given a certained method e.g. if payload contains a certain key
"""
def validate_input(self, payload_dict: typing.Dict[str, typing.Any]) -> None:
"""
check if dict contains a certain key
"""
if not payload_dict:
raise ValidationException("payload is empty")
if not payload_dict.get("key"):
raise ValidationException("payload does not contain key")
So now I would like to inherit from both to define my Process1.
class Process1(BaseClass,Validation1):
"""
Base class for processing the payload
"""
def process(self, payload: typing.Dict) -> typing.Dict:
"""
Process the payload
"""
processed_payload = payload
return processed_payload
I am however super unsure if my method is even the best way to solve this problem. Furthermore Pylint already show the following warning:
E0110: Abstract class 'Process1' with abstract methods instantiated (abstract-class-instantiated)
Appreciate any solution.
Your example works, if you reverse the order of your base classes:
class Process1(Validation1, BaseClass):
...
The reason has to do with python's method resolution order: The way you defined the parent classes of Process1 causes the python interpreter to "find" BaseClass.validate() when you invoke Process1.validate(), which is abstract.
I would further remove the ABC parent from Validation1, as I consider that class a Mixin. Here is a fullly working minimal example:
from abc import ABC, abstractmethod
class BaseClass(ABC):
def __init__(self):
pass
#abstractmethod
def validate(self):
pass
#abstractmethod
def process(self):
pass
class ValidationMixin:
def validate(self):
pass
class Processor(ValidationMixin, BaseClass):
def process(self):
pass
I have a situation where I have two convenient methods for finding and initialising classes from an entry point, and I want to distinguish what they do from one another.
The following is a simplified version of my problem
class ToBeExtended:
""" Abstract base class that is to be extended """
pass
def find(classname: str) -> ToBeExtended:
""" Find the class definition of the class named `classname` from the entry points """
... # Get the class
return ClassDefinition
def connect(classname: str, *args, **kwargs) -> ToBeExtended:
""" Find and initialise the class with the passed arguments and return """
return find(classname)(*args, **kwargs)
find in this example is returning a class object, not an instance.
Is there a way to wrap this to give that context to a linter/user? There doesn't seem to be anything in typing and I'd like to have the class identifiable, nothing like -> type:
import typing
class ToBeExtended:
""" Abstract base class that is to be extended """
pass
def find(classname: str) -> typing.Type[ToBeExtended]:
""" Find the class definition of the class named `classname` from the entry points """
... # Get the class
return ClassDefinition
def connect(classname: str, *args, **kwargs) -> ToBeExtended:
""" Find and initialise the class with the passed arguments and return """
return find(classname)(*args, **kwargs)
I want to create a class that has some nested class that defines some contract in Python. A tenable example is a typed config object. My attempt at this is below:
from typing import Mapping
from abc import ABCMeta, abstractmethod
class BaseClass(metaclass=ABCMeta):
# If you want to implement BaseClass, you must also implement BaseConfig
class BaseConfig(metaclass=ABCMeta):
#abstractmethod
def to_dict(self) -> Mapping:
"""Converts the config to a dictionary"""
But unfortunately I can instantiate a subclass of BaseClass without implementing BaseConfig:
class Foo(BaseClass):
pass
if __name__ == "__main__":
foo = Foo()
Is there some way to enforce that a subclass must implement an inner class, too?
It doesn't seem like this is currently possible. The closest thing is to create two abstract classes (corresponding to outer and inner classes) and to force the implementer to provide the cls constructor for the concrete inner class; e.g.:
from abc import ABCMeta, abstractmethod
class Inner(metaclass=ABCMeta):
#abstractmethod
def __str__(self):
pass
class Outer(metaclass=ABCMeta):
inner_cls = Inner
def shout(self, *args, **kwargs):
inner = self.inner_cls(*args, **kwargs)
print(f"My inner is {inner}!!!")
class FooInner(Inner):
def __str__(self):
return "FooInner"
class FooOuter(Outer):
inner_cls = FooInner
This requires Inner to have at least one abstractmethod otherwise it can be instantiated as a default inner_cls implementation.
When I define a class, I like to include type checking (using assert) of the input variables. I am now defining a 'specialized' class Rule which inherits from an abstract base class (ABC) BaseRule, similar to the following:
import abc
class BaseRule(object):
__metaclass__ = abc.ABCMeta
#abc.abstractproperty
def resources(self):
pass
class Rule(BaseRule):
def __init__(self, resources):
assert all(isinstance(resource, Resource) for resource in resources) # type checking
self._resources = resources
#property
def resources(self):
return self._resources
class Resource(object):
def __init__(self, domain):
self.domain = domain
if __name__ == "__main__":
resources = [Resource("facebook.com")]
rule = Rule(resources)
The assert statement in the __init__ function of the Rule class ensures that the resources input is a list (or other iterable) of Resource objects. However, this would also be the case for other classes which inherit from BaseRule, so I would like to incorporate this assertion in the abstractproperty somehow. How might I go about this?
See this documentation on abc Type annotations with mypy-lang https://mypy.readthedocs.io/en/latest/class_basics.html#abstract-base-classes-and-multiple-inheritance
Make your base class have a non-abstract property that calls separate abstract getter and setter methods. The property can do the validation you want before calling the setter. Other code (such as the __init__ method of a derived class) that wants to trigger the validation can do so by doing its assignment via the property:
class BaseRule(object):
__metaclass__ = abc.ABCMeta
#property
def resources(self): # this property isn't abstract and shouldn't be overridden
return self._get_resources()
#resources.setter
def resources(self, value):
assert all(isinstance(resource, Resources) for resource in value)
self._set_resources(value)
#abstractmethod
def _get_resources(self): # these methods should be, instead
pass
#abstractmethod
def _set_resources(self, value):
pass
class Rule(BaseRule):
def __init__(self, resources):
self.resources = resources # assign via the property to get type-checking!
def _get_resources(self):
return self._resources
def _set_resources(self, value):
self._resources = value
You might even consider moving the __init__ method from Rule into the BaseRule class, since it doesn't need any knowledge about Rule's concrete implementation.