Having an instance c of a class C,
I would like to make c immutable, but other instances of C dont have to.
Is there an easy way to achieve this in python?
You can't make Python classes fully immutable. You can however imitate it:
class C:
_immutable = False
def __setattr__(self, name, value):
if self._immutable:
raise TypeError(f"Can't set attribute, {self!r} is immutable.")
super().__setattr__(name, value)
Example:
>>> c = C()
>>> c.hello = 123
>>> c.hello
123
>>> c._immutable = True
>>> c.hello = 456
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "<stdin>", line 5, in __setattr__
TypeError: Can't set attribute, <__main__.C object at 0x000002087C679D20> is immutable.
If you wish to set it at initialization, you can add an __init__ like so:
class C:
_immutable = False
def __init__(self, immutable=False):
self._immutable = immutable
def __setattr__(self, name, value):
if self._immutable:
raise TypeError(f"Can't set attribute, {self!r} is immutable.")
super().__setattr__(name, value)
Keep in mind you can still bypass it by accessing and modifying the __dict__ of the instance directly:
>>> c = C(immutable=True)
>>> c.__dict__["hello"] = 123
>>> c.hello
123
You may attempt to block it like so:
class C:
_immutable = False
def __init__(self, immutable=False):
self._immutable = immutable
def __getattribute__(self, name):
if name == "__dict__":
raise TypeError("Can't access class dict.")
return super().__getattribute__(name)
def __setattr__(self, name, value):
if self._immutable:
raise TypeError(f"Can't set attribute, {self!r} is immutable.")
super().__setattr__(name, value)
But even then it's possible to bypass:
>>> c = C(immutable=True)
>>> object.__getattribute__(c, "__dict__")["hello"] = 123
>>> c.hello
123
Related
I'm currently working through the O Reilly Python Cookbook but Im confused about the following code:
class Descriptor:
def __init__(self, name=None, **opts):
self.name = name
self.__dict__.update(opts)
def __set__(self, instance, value):
instance.__dict__[self.name] = value
# Descriptor for enforcing types
class Typed(Descriptor):
expected_type = type(None)
def __set__(self, instance, value):
if not isinstance(value, self.expected_type):
raise TypeError('expected ' + str(self.expected_type))
super().__set__(instance, value)
class Integer(Typed):
expected_type = int
i get how the the instance attribute gets set.
Im confused though about the following line:
instance.__dict__[self.name] = value
Why do this? As it means the instance attribute can be given a name different to the one originally set when declaring the class.
For ex:
class A:
x = Integer('d')
b=A()
b.x=5
b.x
<__main__.Integer at 0x1188d1390>
b.d
5
We have overridden the name x with b. Why allow this?
Also Im confused about the following in the init function:
self.__dict__.update(opts)
Why might we want to populate a type checking descriptor with additional attributes?
What purpose might these added attributes serve?
The cookbook was last updated in 2013, three years before the release of Python 3.6, which added a __set_name__ method to the descriptor protocol. This method is automatically called when the descriptor is created, so writing
x = Integer()
would let the descriptor see that it was assigned to a name x and set its own name attribute directly.
class Descriptor:
def __init__(self, **opts):
self.__dict__.update(opts)
# I don't *think* we need to know which class
# the descriptor is being added to for this use case,
# but I might be overlooking something
def __set_name__(self, owner, name):
self.name = name
def __set__(self, instance, value):
instance.__dict__[self.name] = value
Additional attributes would let you define a "restricted" type. For instance, you could restrict the value to a range between 1 and 10:
class RestrictedInteger(Integer):
def __set__(self, instance, value):
if not (self.low <= value <= self.high):
raise ValueError(f"{value} not in range {self.low}-{self.high}")
super().__set__(instance, value)
class A:
x = RestrictedInteger(low=1, high=10)
Then
>>> b = A()
>>> b.x = 5
>>> b.x
5
>>> b.x = 11
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "tmp.py", line 29, in __set__
raise ValueError(f"{value} not in range {self.low}-{self.high}")
ValueError: 11 not in range 1-10
>>> b.x = 0
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "tmp.py", line 29, in __set__
raise ValueError(f"{value} not in range {self.low}-{self.high}")
ValueError: 0 not in range 1-10
This may appear as a very basic question, but I couldn't find anything helpful on SO or elsewhere...
If you take built-in classes, such as int or list, there is no way to create additional class attributes for them (which is obviously a desirable behavior) :
>>> int.x = 0
Traceback (most recent call last):
File "<pyshell#16>", line 1, in <module>
int.x = 0
TypeError: can't set attributes of built-in/extension type 'int'
but if you create your own custom class, this restriction is not actived by default, so anybody may create additional class attributes in it
class foo(object):
a = 1
b = 2
>>> foo.c = 3
>>> print(foo.a, foo.b, foo.c)
1 2 3
I know that the __slots__ class attribute is one solution (among others) to forbid creation of unwanted instance attributes, but what is the process to forbid unwanted class attributes, as done in the built-in classes ?
I think you should play with metaclasses. It can define the behavior of your class instead of its instances.
The comment from Patrick Haugh refers to another SO answer with the following code snippet:
class FrozenMeta(type):
def __new__(cls, name, bases, dct):
inst = super().__new__(cls, name, bases, {"_FrozenMeta__frozen": False, **dct})
inst.__frozen = True
return inst
def __setattr__(self, key, value):
if self.__frozen and not hasattr(self, key):
raise TypeError("I am frozen")
super().__setattr__(key, value)
class A(metaclass=FrozenMeta):
a = 1
b = 2
A.a = 2
A.c = 1 # TypeError: I am frozen
#AlexisBRENON's answer works but if you want to emulate the behavior of a built-in class, where subclasses are allowed to override attributes, you can set the __frozen attribute to True only when the bases argument is empty:
class FrozenMeta(type):
def __new__(cls, name, bases, dct):
inst = super().__new__(cls, name, bases, {"_FrozenMeta__frozen": False, **dct})
inst.__frozen = not bases
return inst
def __setattr__(self, key, value):
if self.__frozen and not hasattr(self, key):
raise TypeError("I am frozen")
super().__setattr__(key, value)
class A(metaclass=FrozenMeta):
a = 1
b = 2
class B(A):
pass
B.a = 2
B.c = 1 # this is OK
A.c = 1 # TypeError: I am frozen
Whenever you see built-in/extension type you are dealing with an object that was not created in Python. The built-in types of CPython were created with C, for example, and so the extra behavior of assigning new attributes was simply not written in.
You see similar behavior with __slots__:
>>> class Huh:
... __slots__ = ('a', 'b')
>>> class Hah(Huh):
... pass
>>> Huh().c = 5 # traceback
>>> Hah().c = 5 # works
As far as making Python classes immutable, or at least unable to have new attributes defined, a metaclass is the route to go -- although anything written in pure Python will be modifiable, it's just a matter of how much effort it will take:
>>> class A(metaclass=FrozenMeta):
... a = 1
... b = 2
>>> type.__setattr__(A, 'c', 9)
>>> A.c
9
A more complete metaclass:
class Locked(type):
"support various levels of immutability"
#
def __new__(metacls, cls_name, bases, clsdict, create=False, change=False, delete=False):
cls = super().__new__(metacls, cls_name, bases, {
"_Locked__create": True,
"_Locked__change": True,
"_Locked__delete": True,
**clsdict,
})
cls.__create = create
cls.__change = change
cls.__delete = delete
return cls
#
def __setattr__(cls, name, value):
if hasattr(cls, name):
if cls.__change:
super().__setattr__(name, value)
else:
raise TypeError('%s: cannot modify %r' % (cls.__name__, name))
elif cls.__create:
super().__setattr__(name, value)
else:
raise TypeError('%s: cannot create %r' % (cls.__name__, name))
#
def __delattr__(cls, name):
if not hasattr(cls, name):
raise AttributeError('%s: %r does not exist' % (cls.__name__, name))
if not cls.__delete or name in (
'_Locked__create', '_Locked__change', '_Locked_delete',
):
raise TypeError('%s: cannot delete %r' % (cls.__name__, name))
super().__delattr__(name)
and in use:
>>> class Changable(metaclass=Locked, change=True):
... a = 1
... b = 2
...
>>> Changable.a = 9
>>> Changable.c = 7
Traceback (most recent call last):
...
TypeError: Changable: cannot create 'c'
>>> del Changable.b
Traceback (most recent call last):
...
TypeError: Changable: cannot delete 'b'
In this example, I would like to avoid the # Oops eventuality.
def foo():
return "foo"
class MyClass(object):
def __init__(self):
setattr(self, 'foo', foo)
def bar(self):
return "bar"
-
>>> x = MyClass()
>>> x.foo()
>>> x.foo = 2 # Oops
>>> x.foo()
TypeError: 'int' object is not callable
How can I prevent my methods to be overwritten by mistake?
Make x.foo a property, without specifying a setter. However it is quite tricky to do it dynamically:
def add_property(inst, name, method):
'''
Adds a property to a class instance.
Property must be added to the CLASS.
'''
cls = type(inst)
if not hasattr(cls, '__perinstance'):
cls = type(cls.__name__, (cls,), {})
cls.__perinstance = True
inst.__class__ = cls
setattr(cls, name, property(method))
And then instead of just doing setattr do it like this:
class MyClass(object):
def __init__(self):
add_property(self, 'foo', lambda _ : 2)
(for more realistic use, replace the lambda function with the
function or method returning the value for foo)
Output:
>>> o=MyClass()
>>> o.foo
2
>>> o.foo=3
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
AttributeError: can't set attribute
>>>
You can check if passed to setattr attribute name is already exists in class (and instance if needed) __dict__ and do not rewrite it in that case:
class MyClass(object):
def __setattr__(self, name, value):
if name not in self.__class__.__dict__ and name not in self.__dict__:
super(MyClass, self).__setattr__(name, value)
test:
>>> x = MyClass()
>>> x.foo = foo # equal to your __init__ setattr call
>>> x.foo()
'foo'
>>> x.foo = 2
>>> x.foo()
'foo'
I know the questions about: copy properties, or dynamic creation of properties has already been posted and also been answered (here, here and here). You could also find an excellent description, how the property function works here.
But I think, that my question is a bit more specific. I do not only want to copy the property from one class to another. No, I also want the specific getter, setter and deleter functions to be copied to the destination class. After a whole day of searching for an answer, I decided to create an new post for this question.
So let me get a bit more in detail. A have an attribute class which is more a class group and stores property-classes:
class AttrContainer():
class a():
ATTR=1
#property
def a(self):
return self.ATTR
#a.setter
def a(self, n):
self.ATTR = n + 3.021
class b():
ATTR=None
#property
def b(self):
return "Something"
class c():
ATTR=None
#property
def c(self):
return 3
#c.setter
def c(self, n):
self.ATTR = n - 8.5201
As you can see, I have different getter, setter (not in the example: deleter) definitions of each property.
I want to use those properties with my item "wrapper" objects. But not all of item objects needs all properties, thats why I want to copy them dynamically into my wrapper classes.
So, this is how my item "wrapper" classes looks like:
class Item01Object():
properties = ["a","c"]
ATTR = None
#[...]
class Item02Object():
properties = ["b","c"]
ATTR = None
#[...]
#[...]
Because I can't set the properties dynamically while the item class will be instanced, I have to set them before I instance the class:
def SetProperties( ItemObject ):
for propName, cls in AttrContainer.__dict__.iteritems():
if propName in ItemObject.properties:
prop = cls.__dict__[propName]
fget = prop.fget if prop.fget else None
fset = prop.fset if prop.fset else None
fdel = prop.fdel if prop.fdel else None
ItemObject.__dict__[propName] = property(fget,fset,fdel)
return ItemObject()
In the end, i would instance my ItemObjects like this:
item = SetProperties(Item01Object)
I would expect, that this will work...
>>> print item
<__builtin__.Item01Object instance at 0x0000000003270F88>
>>> print item.a
None
This is result is right, because I do not update my property ATTR..
Lets change the property:
>>> item.a = 20
>>> print item.a
20
But this result is wrong, it should be 23.021 and NOT 20 . It looks like my properties do not using the setter functions from its classes.
Why? What do I wrong in my code?
Edit: Sorry, I forgot to remove the inherited object of the ItemObject classes.. Now the code works.
For properties with setters and deleters to work properly, your classes need to inherit from object: Why does #foo.setter in Python not work for me?
You can just copy the property object itself over to the new class. It'll hold references to the getter, setter and deleter functions and there is no need to copy those across.
For new-style classes, your code is not working; you cannot assign to a class __dict__ attribute:
>>> item = SetProperties(Item01Object)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "<stdin>", line 4, in SetProperties
TypeError: 'dictproxy' object does not support item assignment
Use setattr() instead to set attributes on new-style classes:
def SetProperties( ItemObject ):
for propName, cls in AttrContainer.__dict__.iteritems():
if propName in ItemObject.properties:
setattr(ItemObject, propName, cls.__dict__[propName])
return ItemObject()
Note that the property object is copied across wholesale.
Demo:
>>> class Item01Object(object):
... properties = ["a","c"]
... ATTR = None
...
>>> def SetProperties( ItemObject ):
... for propName, cls in AttrContainer.__dict__.iteritems():
... if propName in ItemObject.properties:
... setattr(ItemObject, propName, cls.__dict__[propName])
... return ItemObject()
...
>>> item = SetProperties(Item01Object)
>>> item
<__main__.Item01Object object at 0x108205850>
>>> item.a
>>> item.a = 20
>>> item.a
23.021
You only have to copy across property objects to the target class once though; that your function returns an instance implies you are planning to use it for all instances created.
I'd make it a decorator instead:
def set_properties(cls):
for name, propcls in vars(AttrContainer).iteritems():
if name in cls.properties:
setattr(cls, name, vars(propcls)[name])
return cls
then use this on each of your Item*Object classes:
#set_properties
class Item01Object(object):
properties = ["a","c"]
ATTR = None
#set_properties
class Item02Object(object):
properties = ["b","c"]
ATTR = None
Demo:
>>> def set_properties(cls):
... for name, propcls in vars(AttrContainer).iteritems():
... if name in cls.properties:
... setattr(cls, name, vars(propcls)[name])
... return cls
...
>>> #set_properties
... class Item01Object(object):
... properties = ["a","c"]
... ATTR = None
...
>>> #set_properties
... class Item02Object(object):
... properties = ["b","c"]
... ATTR = None
...
>>> item01 = Item01Object()
>>> item01.c = 20
>>> item01.c
3
>>> item02 = Item02Object()
>>> item02.b = 42
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
AttributeError: can't set attribute
>>> item02.b
'Something'
Here's the behavior I'm looking for:
>>> o = SomeClass()
>>> # Works:
>>> o.foo.bar = 'bar'
>>> print o.foo.bar
'bar'
>>> # The in-between object would be of type SomeClass as well:
>>> print o.foo
>>> <__main__.SomeClass object at 0x7fea2f0ef810>
>>> # I want referencing an unassigned attribute to fail:
>>> print o.baz
Traceback (most recent call last):
File "<stdin>", line 5, in <module>
print o.baz
AttributeError: 'SomeClass' object has no attribute 'baz'
In other words, I want to override __getattr__ and __setattr__ (and possibly __getattribute__) in such a way that work similarly to defaultdict, allowing assignment to arbitrary attributes, but if an attribute is just referenced but not assigned to, that it throws an AttributeError as it normally would.
Is this possible?
This is impossible in Python.
What you're asking is for this:
>>> o = SomeClass()
>>> o.foo.bar = 'bar'
>>> print o.foo.bar
'bar'
>>> a = o.baz
raises AttributeError
This can't be done. There's no way to distinguish
>>> o.foo.bar = 'bar'
from
>>> temp = o.foo
>>> temp.bar = 'bar'
They're logically equivalent, and under the hood Python is doing the same thing in both cases. You can't differentiate them in order to raise an exception in the latter case but not the former.
I'm not sure what you mean. The language features already let you do that:
>>> class MyClass(object):
... pass
...
>>> f = MyClass()
>>> f.foo = 5
>>> print f.foo
5
>>> f.baz
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
AttributeError: 'MyClass' object has no attribute 'baz'
>>>
How about:
class AutoVivifier(object):
def __getattr__(self, key):
value = type(self)()
object.__setattr__(self,key,value)
return value
o=AutoVivifier()
o.foo.bar='baz'
print(o.foo.bar)
# baz
print(o.foo.baz)
# <__main__.AutoVivifier object at 0xb776bb0c>
o.foo.baz='bing'
print(o.foo.baz)
# bing
This doesn't raise any AttributeErrors, but it is easy to tell when an attribute chain has no previously assigned value -- the expression will be an instance of Autovivifier. That is, isinstance(o.foo.baz,AutoVivifier) is True.
I think the implementation is cleaner this way, than if you defined all sorts of special methods like __str__ and __eq__ to raise AttributeErrors.
I'm still not clear on why you need to raise AttributeErrors in the first place, but perhaps using AutoVivifier you can write functions or methods that achieve your goals, with isinstance(...,AutoVivifier) tests replacing try...except AttributeError blocks.
[~/.src/pyusb-1.0.0-a1]
|4>class SomeClass: pass
...:
[~/.src/pyusb-1.0.0-a1]
|5>o = SomeClass()
[~/.src/pyusb-1.0.0-a1]
|6>o.foo='bar'
[~/.src/pyusb-1.0.0-a1]
|7>print o.foo
bar
[~/.src/pyusb-1.0.0-a1]
|8>print o.baz
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
AttributeError: SomeClass instance has no attribute 'baz'
[~/.src/pyusb-1.0.0-a1]
|9>
This is really hacky, but perhaps a start at what you want:
class SomeClass(object):
def __init__(self):
object.__setattr__(self, "_SomeClass__children", {})
object.__setattr__(self, "_SomeClass__empty", True)
def __getattr__(self, k):
if k not in self.__children:
self.__children[k] = SomeClass()
return self.__children[k]
def __setattr__(self, k, v):
object.__setattr__(self, "_SomeClass__empty", False)
object.__setattr__(self, k, v)
def __str__(self):
if not self.__hasvalue():
raise AttributeError("Never truly existed")
return object.__str__(self)
def __hasvalue(self):
if not self.__empty:
return True
return any(v.__hasvalue() for v in self.__children.itervalues())
o = SomeClass()
o.foo.bar = 'bar'
print o.foo.bar
print o.foo
print o.baz
And output:
bar
<__main__.SomeClass object at 0x7f2431404c90>
Traceback (most recent call last):
File "spam.py", line 29, in <module>
print o.baz
File "spam.py", line 17, in __str__
raise AttributeError("Never truly existed")
AttributeError: Never truly existed
Here's what I've got so far:
def raise_wrapper(wrapped_method=None):
def method(tmp_instance, *args, **kawrgs):
raise AttributeError("'%s' object has no attribute '%s'" % (
type(tmp_instance._parent).__name__, tmp_instance._key))
if wrapped_method:
method.__doc__ = wrapped_method.__doc__
return method
class TemporaryValue(object):
def __init__(self, parent, key):
self._parent = parent
self._key = key
def __setattr__(self, key, value):
if key in ('_parent', '_key'):
return object.__setattr__(self, key, value)
newval = ObjectLike()
object.__setattr__(self._parent, self._key, newval)
return object.__setattr__(newval, key, value)
__eq__ = raise_wrapper(object.__eq__)
# __del__ = raise_wrapper()
# __repr__ = raise_wrapper(object.__repr__)
__str__ = raise_wrapper(object.__str__)
__lt__ = raise_wrapper(object.__lt__)
__le__ = raise_wrapper(object.__le__)
__eq__ = raise_wrapper(object.__eq__)
__ne__ = raise_wrapper(object.__ne__)
__cmp__ = raise_wrapper()
__hash__ = raise_wrapper(object.__hash__)
__nonzero__ = raise_wrapper()
__unicode__ = raise_wrapper()
__delattr__ = raise_wrapper(object.__delattr__)
__call__ = raise_wrapper(object.__call__)
class ObjectLike(object):
def __init__(self):
pass
def __getattr__(self, key):
newtmp = TemporaryValue(self, key)
object.__setattr__(self, key, newtmp)
return newtmp
def __str__(self):
return str(self.__dict__)
o = ObjectLike()
o.foo.bar = 'baz'
print o.foo.bar
print o.not_set_yet
print o.some_function()
if o.unset > 3:
print "yes"
else:
print "no"