Property setter not working in Python class - python

I am using a class (MainClass) over which I have no control. I want to base my class on MainClass but to add extra functionality. I have added an attribute (index) to my class (SuperClass), but when I try convert index to a property, the #.setter seems to be ignored. What is wrong here?
class MainClass(object):
def __init__(self):
self.name = 'abc'
class SuperClass(object):
def __init__(self, main, *args, **kwargs):
super(SuperClass, self).__init__(*args, **kwargs)
self.__main = main
self._index = 0
def __getattr__(self, attr):
return getattr(self.__main, attr)
def __setattr__(self, attr, val):
if attr == '_SuperClass__main':
object.__setattr__(self, attr, val)
return setattr(self.__main, attr, val)
#property
def index(self):
return self._index
#index.setter
def index(self, value):
self._index = value
main_object = MainClass()
super_object = SuperClass(main_object)
print('x', super_object.index, super_object.name)
super_object.index = 3
print('y', super_object.index)
super_object.index += 2
print('z', super_object.index)

__getattr__ is only used when the normal lookup mechanism fails.
__setattr__, however, is called for all attempts to set an attribute. This means your current definition creates an attribute named index on the
MainClass instance, rather than accessing the property's setter.
>>> super_object._SuperClass__main.index
2
Because __setattr__ always calls setattr(self.__main, attr, val), += is effectively treated as =.
__setattr__ has to handle three cases:
The attribute _SuperClass__main itself, for when you assign to self.__main in __init__.
Assignments to attributes that exist on self.__main
Assignments to attributes specific to SuperClass.
With that in mind, try
def __setattr__(self, attr, val):
if attr == '_SuperClass__main':
super().__setattr__(attr, val)
elif hasattr(self.__main, attr):
setattr(self.__main, attr, val)
else:
super().__setattr__(attr, val)

The __setattr__ method you have defined is taking precedence over the #index.setter
Simplify the code and it should work:
class MainClass(object):
def __init__(self):
self.name = 'abc'
class SuperClass(object):
def __init__(self, main, *args, **kwargs):
super(SuperClass, self).__init__(*args, **kwargs)
self.__main = main
self._index = 0
#property
def name(self):
return self.__main.name
#name.setter
def name(self):
return self.__main.name
#property
def index(self):
return self._index
#index.setter
def index(self, value):
self._index = value
main_object = MainClass()
super_object = SuperClass(main_object)
print('x', super_object.index, super_object.name)
super_object.index = 3
print('y', super_object.index)
super_object.index += 2
print('z', super_object.index)
Output:
x 0 abc
y 3
z 5

I would also suggest the simpler option of just inheriting from MainClass instead of using composition and delegation:
class SuperClass(MainClass):
def __init__(self):
super().__init__()
self._index = 0
#property
def index(self):
return self._index
#index.setter
def index(self, value):
self._index = value

Related

how to cache object using pickle in __new__ method?

I'd like to cache an object in __new__ method so that it can load the cache when a new object is constructed, but now the following code will got an exception:
RecursionError: maximum recursion depth exceeded while calling a Python object
I have no idea about how to break the recursion
import pickle
class Cache:
def __init__(self):
self.d = {}
def __setitem__(self, obj, val):
self.d[obj] = pickle.dumps(val)
def __getitem__(self, obj):
return pickle.loads(self.d[obj])
class Car:
cache = Cache()
def __reduce__(self):
return (self.__class__, (self.name,))
def __new__(cls, name):
try:
return cls.cache[name]
except KeyError:
return cls.new(name)
#classmethod
def new(cls, name):
car = object.__new__(cls)
car.init(name)
cls.cache[name] = car
return car
def init(self, name):
self.name = name
def __repr__(self):
return self.name
a = Car('audi')
b = Car('audi')
Have a try. This may fix this, but it may not be the proper solution. If anyone have any better idea, feel free to leave comments.
Just remove the __reduce__ method.
Then implement __getnewargs__ and __getnewargs_ex__
import pickle
class Cache:
def __init__(self):
self.d = {}
def __setitem__(self, obj, val):
self.d[obj] = pickle.dumps(val)
def __getitem__(self, obj):
return pickle.loads(self.d[obj])
def __contains__(self, x):
return x in self.d
class Car:
cache = Cache()
def __new__(cls, name, extra=None, _FORCE_CREATE=False):
if _FORCE_CREATE or name not in cls.cache:
car = object.__new__(cls)
car.init(name)
car.extra = extra
cls.cache[name] = car
return car
else:
return cls.cache[name]
def init(self, name):
self.name = name
def __repr__(self):
return self.name
def __getnewargs__(self):
return (self.name, None, True)
def __getnewargs_ex__(self):
# override __getnewargs_ex__ and __getnewargs__ to provide args for __new__
return (self.name, ), {"_FORCE_CREATE": True}
a = Car('audi', extra="extra_attr")
b = Car('audi')
print(id(a), a.extra) # 1921399938016 extra_attr
print(id(b), b.extra) # 1921399937728 extra_attr

generating function based off class field?

I have a class which has fields that would all be properties with pass through getters and setters that are validated in a certain way, such that it would satisfy the following pattern:
import numpy as np
import typing
def validate_field(value, dtype: typing.Type):
limits = np.iinfo(dtype)
assert limits.min < value < limits.max, \
"value shoule be in range: {} < {} < {}".format(limits.min, value,
limits.max)
return value
class Foo:
def __init__(self, a, b, c):
self._a = a
self._b = b
self._c = c
#property
def a(self):
return self._a
#property
def b(self):
return self._b
#property
def c(self):
return self._c
#a.setter
def a(self, value):
self._a = validate_field(value, self._a.dtype)
#b.setter
def b(self, value):
self._b = validate_field(value, self._b.dtype)
#c.setter
def c(self, value):
self._c = validate_field(value, self._c.dtype)
I want to eliminate having to type a separate property and setter decorator for each method.
I thought about using properties manually via
self._a = a
def set_a(self, value):
self._a = validate_field(value, self._a.dtype)
self.a = property(lambda self: self._a, set_a)
...
However, it seemed I would still have to manually define a function that accessed the required member for both setter and getter, so I wasn't really saving much work.
If there was a way to automatically generate such functions via naming the parameter e.g.:
def generate_function(self, parameter)
def temp(self, value):
self.parameter = validate_field(value, self.parameter.dtype)
return temp
then I wouldn't have any issues, but right now I don't see how to accomplish this.
Is there a way for me to generate these functions with a single decorator per field or automated function based property generation in __init__?
You can use getattr() and setattr(), or direct dictionary access via self.__dict__, to parametrize the attribute name:
def validated_property(name):
def getter(self):
return getattr(self, name)
def setter(self, value):
dtype = getter(self).dtype
setattr(self, name, validate_field(value, dtype))
return property(getter, setter)
then use this as
class Foo:
# ...
a = validated_property('_a')
b = validated_property('_b')
c = validated_property('_c')
etc.
If you are using Python 3.6 or newer, you can avoid having to repeat the attribute name and generate one from the name for the property (by prefixing it with _, for example), by implementing your own descriptor object, which is passed the name under which it is being assigned to a class via the descriptor.__set_name__() method:
class ValidatedProperty:
_name = None
def __set_name__(self, owner, name):
self._name = '_' + name
def __get__(self, instance, owner):
if instance is None:
return self
return getattr(instance, self._name)
def __set__(self, instance, value):
dtype = self.__get__(instance, type(instance)).dtype
setattr(instance, self._name, validate_field(value, dtype))
then use this like this:
class Foo:
# ...
a = ValidatedProperty()
b = ValidatedProperty()
c = ValidatedProperty()

How do I define setter, getter for dynamically added attributes

I have a class as follows:
class A:
def __init__(self):
pass
def add_attr(self, name):
setattr(self, name, 'something')
How do I define custom setter, getter for self.name? I cannot use __setattr__, __getattribute__ because that will change the behaviour of add_attr too.
EDIT: the users of this class will add arbitrary number of attributes with arbitrary names:
a = A()
a.add_attr('attr1')
a.add_attr('attr2')
I want custom behavior for only these user added attributes.
Building off #Devesh Kumar Singh’s answer, I would implement it in some way like this:
class A:
def __init__(self):
self.attrs = {}
def __setattr__(self, key, value):
if key in self.attrs:
self.set_attr(key, value)
else:
object.__setattr__(self, key, value)
def __getattribute__(self, key):
if key in self.__dict__.get(attrs, {}):
return self.__dict__['get_attr'](self, key)
return object.__getattribute__(self, key)
def get_attr(self, key):
r = self.attrs[key]
# logic
return r
def set_attr(self, key, value):
# logic
self.attrs[key] = value
def add_attr(self, key, value=None):
self.attrs[key] = value
add_attr is only used to initialise the variable the first time. You could also edit __setattr__ to set all new attributes in the self.attrs rather than self.__dict__
Custom getter and setter logic? That's what a property is made for. Usually these are used to magically mask function calls and make them look like attribute access
class MyDoubler(object):
def __init__(self, x):
self._x = x
#property
def x(self):
return x * 2
#x.setter
def x(self, value):
self._x = value
>>> md = MyDoubler(10)
>>> md.x
20
>>> md.x = 20
>>> md.x
40
>>> md._x
20
But there's no rule saying you can't abuse that power to add custom behavior to your getters and setters.
class A(object):
def __init__(self):
pass
#staticmethod
def default_getter_factory(name):
def default_getter(self):
return self.name
return default_getter
#staticmethod
def default_setter_factory(name):
def default_setter(self, value):
setattr(self, name, value)
return default_setter
def add_attr(self, name, getterfactory=None, setterfactory=None):
private_name = f"_{name}"
if getterfactory is None:
getterfactory = self.__class__.default_getter_factory
if setterfactory is None:
setterfactory = self.__class__.default_setter_factory
getter, setter = getterfactory(private_name), setterfactory(private_name)
getter = property(getter)
setattr(self.__class__, name, getter)
setattr(self.__class__, name, getter.setter(setter))
That said this is all a bit silly, and chances are that whatever it is you're trying to do is a thing that shouldn't be done. Dynamic programming is all well and good, but if I were to review code that did this, I would think very long and hard about alternative solutions before approving it. This reeks of technical debt to me.
One possibility I could think of is to have a dictionary of dynamic attributes, and set and get the dynamic attributes using the dictionary
class A:
def __init__(self):
#Dictionary of attributes
self.attrs = {}
#Set attribute
def set_attr(self, name):
self.attrs[name] = 'something'
#Get attribute
def get_attr(self, name):
return self.attrs.get(name)
a = A()
a.set_attr('var')
print(a.get_attr('var'))
The output will be something
Or an alternate is to use property decorator to add arguments explicitly outside the class, as described here
class A:
def __init__(self):
pass
a = A()
#Add attributes via property decorator
a.attr_1 = property(lambda self: self.attr_1)
a.attr_2 = property(lambda self: self.attr_2)
#Assign them values and print them
a.attr_1 = 4
a.attr_2 = 6
print(a.attr_1, a.attr_2)
The output will be 4 6
I am gonna answer my own question just for reference. This is based on others' answers here. The idea is to use default __setattr__ and __getattribute__ on attributes not added through add_attr.
class A:
def __init__(self):
self.attrs = {}
def add_attr(self, name):
self.attrs[name] = 'something'
def __getattribute__(self, name):
try:
object.__getattribute__(self, 'attrs')[name] # valid only if added by user
# custom logic and return
except (KeyError, AttributeError):
return object.__getattribute__(self, name)
def __setattr__(self, name, val):
# similar to __getattribute__

python - bind property attribute of one instance to another class

I'd like to bind a specific property/descriptor on an instance of one class to an another attribute on an instance of another class (dynamic property?). Something like:
class Base(object):
def __init__(self, value=None):
self._value = value
#property
def value(self):
return self._value
#value.setter
def value(self, value):
self._value = value
class Child(Base):
pass
class Parent(Base):
def __init__(self, *args, **kwargs):
super(Parent, self).__init__(*args, **kwargs)
self.children = []
def add_attrs(self, attr_child):
for attr, child in attr_child:
self.children.append(child)
setattr(self, attr, child.value)
av = [("id_", Child(123)), ("name", Child("test"))]
p = Parent()
p.add_attrs(av)
assert p.name == p.children[-1].value
# at this point p.name == "test"
p.name = "abc"
# would like above to also set the child.value to "abc"
assert p.name == p.children[-1].value
Ultimately I could do p.name.value if instead I set name to the Child instance, but I was wondering if this could be done as I think it's a bit nicer. I tried something like:
def get_dict_attr(obj, attr):
for obj in [obj] + obj.__class__.mro():
if attr in obj.__dict__:
return obj.__dict__[attr]
raise AttributeError
class Parent(Base):
def __init__(self, *args, **kwargs):
super(Parent, self).__init__(*args, **kwargs)
self.children = []
def add_attrs(self, attr_child):
for attr, child in attr_child:
self.children.append(child)
val = get_dict_attr(child, "value")
setattr(self, attr, val)
but then p.name == <property object at ... >. Wondering if something like this is possible and haven't come across anything suggesting it is so far. Thanks!

Overriding __setattr__ at runtime

I am trying to override the __setattr__ method of a Python class, since I want to call another function each time an instance attribute changes its value. However, I don't want this behaviour in the __init__ method, because during this initialization I set some attributes which are going to be used later:
So far I have this solution, without overriding __setattr__ at runtime:
class Foo(object):
def __init__(self, a, host):
object.__setattr__(self, 'a', a)
object.__setattr__(self, 'b', b)
result = self.process(a)
for key, value in result.items():
object.__setattr__(self, key, value)
def __setattr__(self, name, value):
print(self.b) # Call to a function using self.b
object.__setattr__(self, name, value)
However, I would like to avoid these object.__setattr__(...) and override __setattr__ at the end of the __init__ method:
class Foo(object):
def __init__(self, a, b):
self.a = a
self.b = b
result = self.process(a)
for key, value in result.items():
setattr(self, key, value)
# override self.__setattr__ here
def aux(self, name, value):
print(self.b)
object.__setattr__(self, name, value)
I have tried with self.__dict__['__setitem__'] = self.aux and object.__setitem__['__setitem__'] = self.aux, but none of these attemps has effect. I have read this section of the data model reference, but it looks like the assignment of the own __setattr__ is a bit tricky.
How could be possible to override __setattr__ at the end of __init__, or at least have a pythonic solution where __setattr__ is called in the normal way only in the constructor?
Unfortunately, there's no way to "override, after init" python special methods; as a side effect of how that lookup works. The crux of the problem is that python doesn't actually look at the instance; except to get its class; before it starts looking up the special method; so there's no way to get the object's state to affect which method is looked up.
If you don't like the special behavior in __init__, you could refactor your code to put the special knowledge in __setattr__ instead. Something like:
class Foo(object):
__initialized = False
def __init__(self, a, b):
try:
self.a = a
self.b = b
# ...
finally:
self.__initialized = True
def __setattr__(self, attr, value):
if self.__initialzed:
print(self.b)
super(Foo, self).__setattr__(attr, value)
Edit: Actually, there is a way to change which special method is looked up, so long as you change its class after it has been initialized. This approach will send you far into the weeds of metaclasses, so without further explanation, here's how that looks:
class AssignableSetattr(type):
def __new__(mcls, name, bases, attrs):
def __setattr__(self, attr, value):
object.__setattr__(self, attr, value)
init_attrs = dict(attrs)
init_attrs['__setattr__'] = __setattr__
init_cls = super(AssignableSetattr, mcls).__new__(mcls, name, bases, init_attrs)
real_cls = super(AssignableSetattr, mcls).__new__(mcls, name, (init_cls,), attrs)
init_cls.__real_cls = real_cls
return init_cls
def __call__(cls, *args, **kwargs):
self = super(AssignableSetattr, cls).__call__(*args, **kwargs)
print "Created", self
real_cls = cls.__real_cls
self.__class__ = real_cls
return self
class Foo(object):
__metaclass__ = AssignableSetattr
def __init__(self, a, b):
self.a = a
self.b = b
for key, value in process(a).items():
setattr(self, key, value)
def __setattr__(self, attr, value):
frob(self.b)
super(Foo, self).__setattr__(attr, value)
def process(a):
print "processing"
return {'c': 3 * a}
def frob(x):
print "frobbing", x
myfoo = Foo(1, 2)
myfoo.d = myfoo.c + 1
#SingleNegationElimination's answer is great, but it cannot work with inheritence, since the child class's __mro__ store's the original class of super class. Inspired by his answer, with little change,
The idea is simple, switch __setattr__ before __init__, and restore it back after __init__ completed.
class CleanSetAttrMeta(type):
def __call__(cls, *args, **kwargs):
real_setattr = cls.__setattr__
cls.__setattr__ = object.__setattr__
self = super(CleanSetAttrMeta, cls).__call__(*args, **kwargs)
cls.__setattr__ = real_setattr
return self
class Foo(object):
__metaclass__ = CleanSetAttrMeta
def __init__(self):
super(Foo, self).__init__()
self.a = 1
self.b = 2
def __setattr__(self, key, value):
print 'after __init__', self.b
super(Foo, self).__setattr__(key, value)
class Bar(Foo):
def __init__(self):
super(Bar, self).__init__()
self.c = 3
>>> f = Foo()
>>> f.a = 10
after __init__ 2
>>>
>>> b = Bar()
>>> b.c = 30
after __init__ 2

Categories

Resources