Python nested init in inheritance - python

What is the disadvantage of doing this way in python
I want to reduce variable that unique to each class but same line for every class.
For example a logging
import abc
import logging
def get_logger(name: str) -> logging.Logger:
return logging.getLogger("myapp.{}".format(name))
class AbstractService(abc.ABC):
def __init__(self, *args, **kwargs):
self._logger = get_logger(self.__class__.__name__)
self._init(*args, **kwargs)
def _init(self, *args, **kwargs):
pass
Then we have the actual class which is extending AbstractService
But here the _init will be call by AbstractService.
class UserService(AbstractService):
def _init(self, user_repository):
self._user_repository = user_repository
self._logger.info("") in UserService will print the correct class as "myapp.UserService" when logging something
This is help me a lot, like sharing db, queue, etc.
This way the _init argument can be focus on what dependency is actually need for the service.
I use dependency-injector https://python-dependency-injector.ets-labs.org/ for the class initiation.
But what is the disadvantage, I haven't tested it if it was deep nested class or a diamond inheritance
Is there a better way to call nested init without writing this
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
on every class ?

Related

How to mock a #property method with "self" as parameter?

I'm trying to mock a property on only a specific instance of a class. To simplify the example, I have a Thing which gets initialised with a name, and based on this name the class will load a configuration file in a pre-defined location /conf/{name}_conf.yaml.
When testing, a couple of instances of Thing are created and I just want to override the configuration for one of these.
I commented below the initial code that I had to change to make it work:
class Thing():
def __init__(self, name):
self.name = name
# I wasn't able to mock this:
# self.configuration_name = f'/configuration/{self.name}_configuration.yaml'
# #property <- nor this
def configuration_filename(self):
return f'/configuration/{self.name}_configuration.yaml'
And in my tests, the mock should be able to take as parameter a different configuration file (specific to the test), but only be applied to the instance of Thing named test_thing.
I got it working with the above implementation like this:
configuration_filename_orig = Thing.configuration_filename
def my_patched_configuration_filename(self, configuration_filename, *args, **kwargs):
if self.slug == 'cmstest':
return configuration_filename
else:
return configuration_filename_orig(self, *args, **kwargs)
Then I can "inject" a custom test configuration file for each test class like this:
from functools import partial
from test.utils import my_patched_configuration_filename
...
#patch.object(Thing, 'configuration_filename', autospec=True, side_effect=partial(my_patched_configuration_filename, configuration_filename='testdata/test_1.yaml'))
class ConfigurationTests(TestCase):
def test_1(self, mocked_conf):
# test something
def test_2(self, mocked_conf):
# test something else
#patch.object(Thing, 'configuration_filename', autospec=True, side_effect=partial(my_patched_configuration_filename, configuration_filename='testdata/test_2.yaml'))
class ConfigurationTestsAdvanced(TestCase):
def test_1(self, mocked_conf):
# test something
def test_2(self, mocked_conf):
# test something else
Now... this works but I wonder if there's a way to do something similar but with a real property on the Thing class (either with the #property decorator or with the property initialised in the the __init__ method).
I've spent a couple of hours trying different things... but the main issue seems that using return_value doesn't pass the self argument to the mock, so I can't use it.
Any idea ?
ok there might be a better way but I got this working as follow:
I can use the #property decorator on my class, that's what I want to mock:
class Thing():
def __init__(self, name):
self.name = name
#property
def configuration_filename(self):
return f'/configuration/{self.name}_configuration.yaml'
I create a new mock class, based on Mock:
# first copy original
configuration_filename_orig = Thing.configuration_filename.__get__
class ConfigurationPropertyMock(Mock):
# and here, add the `self` to the args
def __get__(self, obj, obj_type=None, *args, **kwargs):
return self(obj, obj_type, *args, **kwargs)
def patched_filename(self, *args, **kwargs):
configuration_filename = kwargs.pop('configuration_filename')
if self.slug == 'cmstest' and configuration_filename:
return configuration_filename
else:
return configuration_filename_orig(self, *args, **kwargs)
And I patch the test class where I can pass a custom configuration_filename:
from unittest.mock import patch
from tests.utils import ConfigurationPropertyMock, patched_filename
...
#patch('somewhere.Thing.configuration_filename',
new_callable=ConfigurationPropertyMock,
side_effect=partial(patched_filename, configuration_filename='test_conf.yaml'))
)
class YAMLApiConfigurationTests(TestCase):
def test_api_configuration_document(self, mocked_conf):
# test here, the test configuration is loaded
pass
VoilĂ  :)

How to use super() in this complex multiple inheritance situation?

I'm writing a library that provides subclasses of each of two existing base classes with extra functionality.
Rather than explain the arrangement in words, here's a diagram:
And minimal code:
class Base0:
pass
class Base1(Base0):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.foo = something()
class Base2(Base0):
pass
class Mixin:
def __init__(self, bar):
self.bar = bar
# More code
class Child1(Base1, Mixin):
def __init__(self, *args, **kwargs):
Base1.__init__(self, *args, **kwargs)
Mixin.__init__(self, some_function_of(self.foo))
class Child2(Base2, Mixin):
def __init__(self, *args, **kwargs):
Base2.__init__(self, *args, **kwargs)
Mixin.__init__(self, something_else())
The Base classes are outside my control. I wrote the Mixin and Child classes. Users of my library will subclass the Child classes, so it's very important that the inheritance be sane and correct.
What I'd like to do is use super().__init__ in the Child classes rather than explicitly invoking the Base and Mixin initializers. The reason this is nontrivial is that in Child1, the value passed to the Mixin initializer can't be determined until after the Base1 initializer has run.
What is the simplest/sanest way to set this up?

How to defer execution of a function?

I am implementing the classes in Python 2.7 as below:
class MyClass(object):
def do_something(self):
"""
do something here
"""
class MyClassManager(object):
def get_my_class_obj(self):
"""read my_class_obj from db"""
return instance # instance has type MyClass
class MyClassUser(object):
my_class_obj = new MyClass() # this is a class variable
In main:
MyClassUser.my_class_obj = MyClassManager().get_my_class_obj()
"""
do a lot of different things else in main
"""
From somewhere else:
"""only when some condition happens"""
MyClassUser.my_class_obj.do_something()
Is there a way I can defer the read obj (read from db inside get_my_class_obj) process in MyClassManager until obj.do_something method is actually invoked? Provided that I have to call MyClassManager.get_my_class_obj for some setup at the beginning. Suppose the situation is in the context of a web server and do_something will only be invoked when there is some request but I need to set up it first
A very quick&dirty, dumbed down possible solution (and certainly not how I'd design this but anyway). This example assume your object only has an id (db primary key) and a name (stored in db).
import functools
class MyClassManager(object):
def get(self, object_id):
return MyClass(object_id)
def load(self, object):
obj.name = self.get_from_db(object.id)
def autoload(func):
#functools.wraps(func)
def wrapper(self, *args, **kw):
self._load()
return func(self, *args, **kw)
return wrapper
class MyClass(object):
def __init__(self, id, name=None):
self.id = id
self._name = name
self._loaded = name is not None
def _load(self):
if self._loaded:
return
MyManager.load(self)
#property
#autoload
def name(self):
return self._name
#autoload
def do_something(self):
# your code here
But really, don't do this if you're a beginner (and you wouldn't ask if you were not), use an existing working ORM instead. Unless it's for educational purpose of course, in which case, well, you might either learn a lot or give up in despair - or both ;-)

Python multiple class hierarchy __init__ not being executed

I'm using django, but this is rather a generic python question.
I have defined a class that I intend to use to extend the ModelForm and Form classes, from django.forms.
The code looks like this:
class FormMixin(object):
def __init__(self, *args, **kwargs):
""" every method ocurrence must call super """
super(FormMixin, self).__init__(*args, **kwargs)
self.new_attr = 'This is an attribute'
class ModelFormAdapter(forms.ModelForm):
""" I use this class so __init__ signatures match """
def __init__(self, *args, **kwargs):
""" every method ocurrence must call super """
super(ModelFormAdapter, self).__init__(*args, **kwargs)
class BaseModelForm(ModelFormAdapter, FormMixin):
def __init__(self, *args, **kwargs):
""" BaseModelForm never gets the attribute new_attr """
super(BaseModelForm, self).__init__(*args, **kwargs)
I have even debugged this and the FormMixin init method is never called. What am I doing wrong? What I want to achieve is to add some attributes to the form and preprocess field labels and css classes
That's because one of ModelFormAdapter's ancestors (BaseForm), doesn't call super, and the chain breaks. Put FormMixin first in the parent list.

Injecting function call after __init__ with decorator

I'm trying to find the best way to create a class decorator that does the following:
Injects a few functions into the decorated class
Forces a call to one of these functions AFTER the decorated class' __init__ is called
Currently, I'm just saving off a reference to the 'original' __init__ method and replacing it with my __init__ that calls the original and my additional function. It looks similar to this:
orig_init = cls.__init__
def new_init(self, *args, **kwargs):
"""
'Extend' wrapped class' __init__ so we can attach to all signals
automatically
"""
orig_init(self, *args, **kwargs)
self._debugSignals()
cls.__init__ = new_init
Is there a better way to 'augment' the original __init__ or inject my call somewhere else? All I really need is for my self._debugSignals() to be called sometime after the object is created. I also want it happen automatically, which is why I thought after __init__ was a good place.
Extra misc. decorator notes
It might be worth mentioning some background on this decorator. You can find the full code here. The point of the decorator is to automatically attach to any PyQt signals and print when they are emitted. The decorator works fine when I decorate my own subclasses of QtCore.QObject, however I've been recently trying to automatically decorate all QObject children.
I'd like to have a 'debug' mode in the application where I can automatically print ALL signals just to make sure things are doing what I expect. I'm sure this will result in TONS of debug, but I'd still like to see what's happening.
The problem is my current version of the decorator is causing a segfault when replacing QtCore.QObject.__init__. I've tried to debug this, but the code is all SIP generated, which I don't have much experience with.
So, I was wondering if there was a safer, more pythonic way to inject a function call AFTER the __init__ and hopefully avoid the segfault.
Based on this post and this answer, an alternative way to do this is through a custom metaclass. This would work as follows (tested in Python 2.7):
# define a new metaclass which overrides the "__call__" function
class NewInitCaller(type):
def __call__(cls, *args, **kwargs):
"""Called when you call MyNewClass() """
obj = type.__call__(cls, *args, **kwargs)
obj.new_init()
return obj
# then create a new class with the __metaclass__ set as our custom metaclass
class MyNewClass(object):
__metaclass__ = NewInitCaller
def __init__(self):
print "Init class"
def new_init(self):
print "New init!!"
# when you create an instance
a = MyNewClass()
>>> Init class
>>> New init!!
The basic idea is that:
when you call MyNewClass() it searches for the metaclass, finds that you have defined NewInitCaller
The metaclass __call__ function is called.
This function creates the MyNewClass instance using type,
The instance runs its own __init__ (printing "Init class").
The meta class then calls the new_init function of the instance.
Here is the solution for Python 3.x, based on this post's accepted answer. Also see PEP 3115 for reference, I think the rationale is an interesting read.
Changes in the example above are shown with comments; the only real change is the way the metaclass is defined, all other are trivial 2to3 modifications.
# define a new metaclass which overrides the "__call__" function
class NewInitCaller(type):
def __call__(cls, *args, **kwargs):
"""Called when you call MyNewClass() """
obj = type.__call__(cls, *args, **kwargs)
obj.new_init()
return obj
# then create a new class with the metaclass passed as an argument
class MyNewClass(object, metaclass=NewInitCaller): # added argument
# __metaclass__ = NewInitCaller this line is removed; would not have effect
def __init__(self):
print("Init class") # function, not command
def new_init(self):
print("New init!!") # function, not command
# when you create an instance
a = MyNewClass()
>>> Init class
>>> New init!!
Here's a generalized form of jake77's example which implements __post_init__ on a non-dataclass. This enables a subclass's configure() to be automatically invoked in correct sequence after the base & subclass __init__s have completed.
# define a new metaclass which overrides the "__call__" function
class PostInitCaller(type):
def __call__(cls, *args, **kwargs):
"""Called when you call BaseClass() """
print(f"{__class__.__name__}.__call__({args}, {kwargs})")
obj = type.__call__(cls, *args, **kwargs)
obj.__post_init__(*args, **kwargs)
return obj
# then create a new class with the metaclass passed as an argument
class BaseClass(object, metaclass=PostInitCaller):
def __init__(self, *args, **kwargs):
print(f"{__class__.__name__}.__init__({args}, {kwargs})")
super().__init__()
def __post_init__(self, *args, **kwargs):
print(f"{__class__.__name__}.__post_init__({args}, {kwargs})")
self.configure(*args, **kwargs)
def configure(self, *args, **kwargs):
print(f"{__class__.__name__}.configure({args}, {kwargs})")
class SubClass(BaseClass):
def __init__(self, *args, **kwargs):
print(f"{__class__.__name__}.__init__({args}, {kwargs})")
super().__init__(*args, **kwargs)
def configure(self, *args, **kwargs):
print(f"{__class__.__name__}.configure({args}, {kwargs})")
super().configure(*args, **kwargs)
# when you create an instance
a = SubClass('a', b='b')
running gives:
PostInitCaller.__call__(('a',), {'b': 'b'})
SubClass.__init__(('a',), {'b': 'b'})
BaseClass.__init__(('a',), {'b': 'b'})
BaseClass.__post_init__(('a',), {'b': 'b'})
SubClass.configure(('a',), {'b': 'b'})
BaseClass.configure(('a',), {'b': 'b'})
I know that the metaclass approach is the Pro way, but I've a more readable and easy proposal using #staticmethod:
class Invites(TimestampModel, db.Model):
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
invitee_email = db.Column(db.String(128), nullable=False)
def __init__(self, invitee_email):
invitee_email = invitee_email
#staticmethod
def create_invitation(invitee_email):
"""
Create an invitation
saves it and fetches it because the id
is being generated in the DB
"""
invitation = Invites(invitee_email)
db.session.save(invitation)
db.session.commit()
return Invites.query.filter(
PartnerInvites.invitee_email == invitee_email
).one_or_none()
So I could use it this way:
invitation = Invites.create_invitation("jim#mail.com")
print(invitation.id, invitation.invitee_email)
>>>> 1 jim#mail.com

Categories

Resources