In the example enumeration code given in this question, reproduced below, why does TOKEN contain the implementations of __contains__ and __repr__ from the metaclass EnumerationType?
from ctypes import *
class EnumerationType(type(c_uint)):
def __new__(metacls, name, bases, dict):
if not "_members_" in dict:
_members_ = {}
for key,value in dict.items():
if not key.startswith("_"):
_members_[key] = value
dict["_members_"] = _members_
cls = type(c_uint).__new__(metacls, name, bases, dict)
for key,value in cls._members_.items():
globals()[key] = value
return cls
def __contains__(self, value):
return value in self._members_.values()
def __repr__(self):
return "<Enumeration %s>" % self.__name__
class Enumeration(c_uint):
__metaclass__ = EnumerationType
_members_ = {}
def __init__(self, value):
for k,v in self._members_.items():
if v == value:
self.name = k
break
else:
raise ValueError("No enumeration member with value %r" % value)
c_uint.__init__(self, value)
#classmethod
def from_param(cls, param):
if isinstance(param, Enumeration):
if param.__class__ != cls:
raise ValueError("Cannot mix enumeration members")
else:
return param
else:
return cls(param)
def __repr__(self):
return "<member %s=%d of %r>" % (self.name, self.value, self.__class__)
class TOKEN(Enumeration):
_members_ = {'T_UNDEF':0, 'T_NAME':1, 'T_NUMBER':2, 'T_STRING':3, 'T_OPERATOR':4, 'T_VARIABLE':5, 'T_FUNCTION':6}
I would expect to have exceptions thrown by the following code to the effect that __contains__ is not implemented, instead however, I receive True False.
print 2 in TOKEN, 7 in TOKEN
Both Enumeration and TOKEN are instances of EnumerationType:
>>> isinstance(Enumeration, EnumerationType)
True
>>> isinstance(TOKEN, EnumerationType)
True
And special methods for instances of new style classes are looked up in class, e.g. repr(TOKEN) is equivalent to type(TOKEN).__repr__(TOKEN), which is EnumerationType.__repr__(TOKEN).
Related
Lets take the following example of class decorators (origin http://www.informit.com/articles/article.aspx?p=1309289&seqNum=4):
class GenericDescriptor:
def __init__(self, getter, setter):
self.getter = getter
self.setter = setter
def __get__(self, instance, owner=None):
if instance is None:
return self
return self.getter(instance)
def __set__(self, instance, value):
return self.setter(instance, value)
def valid_string(attr_name, empty_allowed=True, regex=None,
acceptable=None):
def decorator(cls):
name = "__" + attr_name
def getter(self):
return getattr(self, name)
def setter(self, value):
assert isinstance(value, str), (attr_name +
" must be a string")
if not empty_allowed and not value:
raise ValueError("{0} may not be empty".format(
attr_name))
if ((acceptable is not None and value not in acceptable) or
(regex is not None and not regex.match(value))):
raise ValueError("{attr_name} cannot be set to "
"{value}".format(**locals()))
setattr(self, name, value)
setattr(cls, attr_name, GenericDescriptor(getter, setter))
return cls
return decorator
#valid_string("name", empty_allowed=False)
class StockItem:
name = None
def __init__(self, **kwargs):
if kwargs.get('second_call'):
pass
# proceed normally without calling #valid_string
self.name = kwargs.get('name', None)
self.price = kwargs.get('price', None)
self.quantity = kwargs.get('quantity', None)
if __name__ == "__main__":
import doctest
doctest.testmod()
# valid value for name
cameras1 = StockItem(name="Camera", price=45.99, quatity=2)
# invalid value for name according to #valid_string but I need this to be also valid if 'second_call'
cameras2 = StockItem(name=67, price=45.99, quatity=2, second_call=True)
The StockItem class constructor is invoked twice and on the second turn I want the #valid_string decorator to be somehow canceled (I don't want name attribute's value to be altered anymore).
Read the Python Cookbook and saw descriptors, particularly the example for enforcing types when using class attributes. I am writing a few classes where that would be useful, but I would also like to enforce immutability. How to do it? Type checking descriptor adapted from the book:
class Descriptor(object):
def __init__(self, name=None, **kwargs):
self.name = name
for key, value in kwargs.items():
setattr(self, key, value)
def __set__(self, instance, value):
instance.__dict__[self.name] = value
# by default allows None
class Typed(Descriptor):
def __init__(self, expected_types=None, **kwargs):
self.expected_types = expected_types
super().__init__(**kwargs)
def __set__(self, instance, value):
if value is not None and not isinstance(value, self.expected_types):
raise TypeError('Expected: {}'.format(str(self.expected_types)))
super(Typed, self).__set__(instance, value)
class T(object):
v = Typed(int)
def __init__(self, v):
self.v = v
Attempt #1: add a self.is_set attribute to Typed
# by default allows None
class ImmutableTyped(Descriptor):
def __init__(self, expected_types=None, **kwargs):
self.expected_types = expected_types
self.is_set = False
super().__init__(**kwargs)
def __set__(self, instance, value):
if self.is_set:
raise ImmutableException(...)
if value is not None and not isinstance(value, self.expected_types):
raise TypeError('Expected: {}'.format(str(self.expected_types)))
self.is_set = True
super(Typed, self).__set__(instance, value)
Wrong, because when doing the following, ImmutableTyped is 'global' in the sense that it's a singleton throughout all instances of the class. When t2 is instantiated, is_set is already True from the previous object.
class T(object):
v = ImmutableTyped(int)
def __init__(self, v):
self.v = v
t1 = T()
t2 = T() # fail when instantiating
Attempt #2: Thought instance in __set__ refers to the class containing the attribute so tried to check if instance.__dict__[self.name] is still a Typed. That is also wrong.
Idea #3: Make Typed be used more similar to #property by accepting a 'fget' method returning the __dict__ of T instances. This would require the definition of a function in T similar to:
#Typed
def v(self):
return self.__dict__
which seems wrong.
How to implement immutability AND type checking as a descriptor?
Now this is my approach to the problem:
class ImmutableTyped:
def __set_name__(self, owner, name):
self.name = name
def __init__(self, *, immutable=False, types=None)
self.immutable == immutable is True
self.types = types if types else []
def __get__(self, instance, owner):
return instance.__dict__[self.name]
def __set__(self, instance, value):
if self.immutable is True:
raise TypeError('read-only attribute')
elif not any(isinstance(value, cls)
for cls in self.types):
raise TypeError('invalid argument type')
else:
instance.__dict__[self.name] = value
Side note: __set_name__ can be used to allow you to not specify the attribute name in initialisation. This means you can just do:
class Foo:
bar = ImmutableTyped()
and the instance of ImmutableTyped will automatically have the name attribute bar since I typed for that to occur in the __set_name__ method.
Could not succeed in making such a descriptor. Perhaps it's also unnecessarily complicated. The following method + property use suffices.
# this also allows None to go through
def check_type(data, expected_types):
if data is not None and not isinstance(data, expected_types):
raise TypeError('Expected: {}'.format(str(expected_types)))
return data
class A():
def __init__(self, value=None):
self._value = check_type(value, (str, bytes))
#property
def value(self):
return self._value
foo = A()
print(foo.value) # None
foo.value = 'bla' # AttributeError
bar = A('goosfraba')
print(bar.value) # goosfraba
bar.value = 'bla' # AttributeError
class ImmutableTyped(object):
def __set_name__(self, owner, name):
self.name = name
def __init__(self, *, types=None):
self.types = tuple(types or [])
self.instances = {}
return None
def __get__(self, instance, owner):
return instance.__dict__[self.name]
def __set__(self, instance, value):
is_set = self.instances.setdefault(id(instance), False)
if is_set:
raise AttributeError("read-only attribute '%s'" % (self.name))
if self.types:
if not isinstance(value, self.types):
raise TypeError("invalid argument type '%s' for '%s'" % (type(value), self.name))
self.instances[id(instance)] = True
instance.__dict__[self.name] = value
return None
Examples:
class Something(object):
prop1 = ImmutableTyped(types=[int])
something = Something()
something.prop1 = "1"
Will give:
TypeError: invalid argument type '<class 'str'>' for 'prop1'
And:
something = Something()
something.prop1 = 1
something.prop1 = 2
Will give:
TypeError: read-only attribute 'prop1'
I am playing a little bit around with Python metaprogramming.
class FormMetaClass(type):
def __new__(cls, clsname, bases, methods):
# Attach attribute names to the descriptors
for key, value in methods.items():
if isinstance(value, FieldDescriptor):
value.name = key
return type.__new__(cls, clsname, bases, methods)
class Form(metaclass=FormMetaClass):
#classmethod
def from_json(cls, incoming):
instance = cls()
data = json.loads(incoming)
for k, v in data.items():
if (not hasattr(instance, k)):
raise KeyError("Atrribute not found")
instance.__setattr__(k, v)
return cls
class MyForm(Form):
first_name = String()
last_name = String()
age = Integer()
def __repr__(self):
return "{} {}".format(self.first_name, self.last_name)
def main():
data = json.dumps({'first_name': 'Thomas',
'last_name': 'Junk'})
form = MyForm.from_json(data)
print(form)
if __name__ == "__main__":
main()
class FieldDescriptor:
def __init__(self, name=None, **opts):
self.name = name
for key, value in opts.items():
setattr(self, key, value)
def __set__(self, instance, value):
instance.__dict__[self.name] = value
class Typechecked(FieldDescriptor):
expected_type = type(None)
def __set__(self, instance, value):
if not isinstance(value, self.expected_type):
raise TypeError('expected ' + str(self.expected_type))
super().__set__(instance, value)
class Integer(Typechecked):
expected_type = int
class String(Typechecked):
expected_type = str
I have a Form which has a metaclass FormMetaClass.
To have an alternative constructor I am using a #classmethod.
I create an instance, which seem to work so far.
What doesn't work is calling the __repr__ (or __str__ interchangeably).
When I create an instance via MyForm() everything is fine.
When I create an instance via the #classmethod, some "default" implementation is taken.
I expected Thomas Junk, but I get <class '__main__.MyForm'>
Could you give me a hint, what I am overlooking?
You are returning the class, not the newly created instance:
return cls
So you return MyForm, not the new instance MyForm() you just set all the attributes on. And you indeed see the repr() output for the class:
>>> form is MyForm
True
>>> print(MyForm)
<class '__main__.MyForm'>
The fix is simple, return instance instead:
return instance
or, as a full method:
#classmethod
def from_json(cls, incoming):
instance = cls()
data = json.loads(incoming)
for k, v in data.items():
if (not hasattr(instance, k)):
raise KeyError("Atrribute not found")
instance.__setattr__(k, v)
return instance
at which point the method returns an instance and everything works:
>>> isinstance(form, MyForm)
True
>>> print(form)
Thomas Junk
I'm using metaclass to create property for new classes like this:
class Property(object):
def __init__(self, internal_name, type_, default_value):
self._internal_name = internal_name
self._type = type_
self._default_value = default_value
def generate_property(self):
def getter(object_):
return getattr(object_, self._internal_name)
def setter(object_, value):
if not isinstance(value, self._type):
raise TypeError("Expect type {0}, got {1}.".format(self._type, type(value)))
else:
setattr(object_, self._internal_name, value)
return property(getter, setter)
class AutoPropertyMeta(type):
def __new__(cls, name, bases, attributes):
for name, value in attributes.iteritems():
if isinstance(value, Property):
attributes[name] = value.generate_property()
return super(AutoPropertyMeta, cls).__new__(cls, name, bases, attributes)
In this way I can write code like this:
class SomeClassWithALotAttributes(object):
__metaclass__ = AutoPropertyMeta
attribute_a = Property("_attribute_a", int, 0)
...
attribute_z = Property("_attribute_z", float, 1.0)
instead of:
class SomeClassWithALotAttributes(object):
def __init__(self):
self._attribute_a = 0
...
self._attribute_z = 1.0
def get_attribute_a(self):
return self._attribute_a
def set_attribute_a(self, value):
if not isinstance(value, int):
raise TypeError("Expect type {0}, got {1}.".format(self._type, type(value))
else:
self._attribute_a = value
attribute_a = property(get_attribute_a, set_attribute_a)
...
It works great, if you always set the value before get the value of an attribute, since the AutoPropertyMeta only generate the getter and setter method. The actual instance attribute is created when you set the value the first time. So I want to know if there is a way to create instance attribute for a class by metaclass.
Here is a workaround I'm using now, but I always wonder if there is a better way:
class Property(object):
def __init__(self, internal_name, type_, default_value):
self._internal_name = internal_name
self._type = type_
self._default_value = default_value
def generate_property(self):
def getter(object_):
return getattr(object_, self._internal_name)
def setter(object_, value):
if not isinstance(value, self._type):
raise TypeError("Expect type {0}, got {1}.".format(self._type, type(value)))
else:
setattr(object_, self._internal_name, value)
return property(getter, setter)
def generate_attribute(self, object_):
setattr(object_, self._internal_name, self._default_value)
class AutoPropertyMeta(type):
def __new__(cls, name, bases, attributes):
property_list = []
for name, value in attributes.iteritems():
if isinstance(value, Property):
attributes[name] = value.generate_property()
property_list.append(value)
attributes["_property_list"] = property_list
return super(AutoPropertyMeta, cls).__new__(cls, name, bases, attributes)
class AutoPropertyClass(object):
__metaclass__ = AutoPropertyMeta
def __init__(self):
for property_ in self._property_list:
property_.generate_attribute(self)
class SomeClassWithALotAttributes(AutoPropertyClass):
attribute_a = Property("_attribute_a", int, 0)
Here's an example of what I meant about injecting a new __init__. Please be advised this is just for fun and you shouldn't do it.
class Property(object):
def __init__(self, type_, default_value):
self._type = type_
self._default_value = default_value
def generate_property(self, name):
self._internal_name = '_' + name
def getter(object_):
return getattr(object_, self._internal_name)
def setter(object_, value):
if not isinstance(value, self._type):
raise TypeError("Expect type {0}, got {1}.".format(self._type, type(value)))
else:
setattr(object_, self._internal_name, value)
return property(getter, setter)
class AutoPropertyMeta(type):
def __new__(meta, name, bases, attributes):
defaults = {}
for name, value in attributes.iteritems():
if isinstance(value, Property):
attributes[name] = value.generate_property(name)
defaults[name] = value._default_value
# create __init__ to inject into the class
# our __init__ sets up our secret attributes
if '__init__' in attributes:
realInit = attributes['__init__']
# we do a deepcopy in case default is mutable
# but beware, this might not always work
def injectedInit(self, *args, **kwargs):
for name, value in defaults.iteritems():
setattr(self, '_' + name, copy.deepcopy(value))
# call the "real" __init__ that we hid with our injected one
realInit(self, *args, **kwargs)
else:
def injectedInit(self, *args, **kwargs):
for name, value in defaults.iteritems():
setattr(self, '_' + name, copy.deepcopy(value))
# inject it
attributes['__init__'] = injectedInit
return super(AutoPropertyMeta, meta).__new__(meta, name, bases, attributes)
Then:
class SomeClassWithALotAttributes(object):
__metaclass__ = AutoPropertyMeta
attribute_a = Property(int, 0)
attribute_z = Property(list, [1, 2, 3])
def __init__(self):
print("This __init__ is still called")
>>> x = SomeClassWithALotAttributes()
This __init__ is still called
>>> y = SomeClassWithALotAttributes()
This __init__ is still called
>>> x.attribute_a
0
>>> y.attribute_a
0
>>> x.attribute_a = 88
>>> x.attribute_a
88
>>> y.attribute_a
0
>>> x.attribute_z.append(88)
>>> x.attribute_z
[1, 2, 3, 88]
>>> y.attribute_z
[1, 2, 3]
>>> x.attribute_z = 88
Traceback (most recent call last):
File "<pyshell#76>", line 1, in <module>
x.attribute_z = 88
File "<pyshell#41>", line 12, in setter
raise TypeError("Expect type {0}, got {1}.".format(self._type, type(value)))
TypeError: Expect type <type 'list'>, got <type 'int'>.
The idea is to write your own __init__ that does the initialization of the secret attributes. You then inject it into the class namespace before creating the class, but store a reference to the original __init__ (if any) so you can call it when needed.
A read-only data descriptor is a descriptor that defines both __get__ and __set__, but __set__ raises AttributeError when called.
An example is a simple read-only property:
class Test():
_i = 1
#property
def i(self):
return self._i
assert hasattr(Test.i, '__get__')
assert hasattr(Test.i, '__set__')
t = Test()
t.i # 1
t.i = 2 # ERROR
If I have an instance of a class, I can determine if the instance attribute is a read-only data descriptor this way (although I don't like this at all):
def is_ro_data_descriptor_from_instance(instance, attr):
temp = getattr(instance, attr)
try:
setattr(instance, attr, None)
except AttributeError:
return True
else:
setattr(instance, attr, temp)
return False
If I know the class doesn't require any arguments to be instantiated, I can determine if its class attribute is a read-only data descriptor similar to the above:
def is_ro_data_descriptor_from_klass(klass, attr):
try:
setattr(klass(), attr, None)
except AttributeError:
return True
else:
return False
However, if I don't know the signature of the class ahead of time, and I try to instantiate a temporary object in this way, I could get an error:
class MyClass():
i = 1
def __init__(self, a, b, c):
'''a, b, and c are required!'''
pass
def is_ro_data_descriptor_from_klass(MyClass, 'i') # Error
What can be done to determine if a class attribute is a read-only data descriptor?
EDIT: Adding more information.
Below is the code I am trying to get working:
class StaticVarsMeta(type):
'''A metaclass that will emulate the "static variable" behavior of
other languages. For example:
class Test(metaclass = StaticVarsMeta):
_i = 1
#property
def i(self):
return self._i
t = Test()
assert t.i == Test.i'''
statics = {}
def __new__(meta, name, bases, dct):
klass = super().__new__(meta, name, bases, dct)
meta.statics[klass] = {}
for key, value in dct.items():
if "_" + key in dct:
meta.statics[klass][key] = set()
if hasattr(value, '__get__'):
meta.statics[klass][key].add('__get__')
if hasattr(value, '__set__'):
try:
value.__set__(None, None)
except AttributeError:
continue
else:
meta.statics[klass][key].add('__set__')
return klass
def __getattribute__(klass, attr):
if attr not in StaticVarsMeta.statics[klass]:
return super().__getattribute__(attr)
elif '__get__' not in StaticVarsMeta.statics[klass][attr]:
return super().__getattribute__(attr)
else:
return getattr(klass, '_' + attr)
def __setattr__(klass, attr, value):
if attr not in StaticVarsMeta.statics[klass]:
super().__setattr__(attr, value)
elif '__set__' not in StaticVarsMeta.statics[klass][attr]:
super().__setattr__(attr, value)
else:
setattr(klass, '_' + attr, value)
class Test(metaclass = StaticVarsMeta):
_i = 1
def get_i(self):
return self._i
i = property(get_i)
Note the following:
type(Test.i) # int
type(Test.__dict__['i']) # property
Test().i = 2 # ERROR, as expected
Test.i = 2 # NO ERROR - should produce an error
It seems super-awkward, but here's how you could implement it based on my comment:
class StaticVarsMeta(type):
statics = {}
def __new__(meta, name, bases, dct):
cls = super().__new__(meta, name, bases, dct)
meta.statics[cls] = {}
for key, val in dct.items():
if hasattr(val, '__get__') and hasattr(val, '__set__'):
meta.statics[cls][key] = {'__get__'}
try:
val.__set__(None, None)
except AttributeError as err:
if "can't set attribute" in err.args:
continue
meta.statics[cls][key].add('__set__')
return cls
In use:
>>> class ReadOnly(metaclass=StaticVarsMeta):
#property
def foo(self):
return None
>>> class ReadWrite(metaclass=StaticVarsMeta):
#property
def bar(self):
return None
#bar.setter
def bar(self, val):
pass
>>> StaticVarsMeta.statics
{<class '__main__.ReadOnly'>: {'foo': {'__get__'}},
<class '__main__.ReadWrite'>: {'bar': {'__get__', '__set__'}}}
This is more of a "starter for 10", there must be a better way to do it...
Your first solution can be made simpler and slightly more robust, by attempting to assign the value it already has. This way, no undoing is required (Still, this isn't thread-safe).
def is_ro_data_descriptor_from_instance(instance, attr):
temp = getattr(instance, attr)
try:
setattr(instance, attr, temp)
except AttributeError:
return True
else:
return False