I had to write a class of some sort that overrides __getattribute__.
basically my class is a container, which saves every user-added property to self._meta which is a dictionary.
class Container(object):
def __init__(self, **kwargs):
super(Container, self).__setattr__('_meta', OrderedDict())
#self._meta = OrderedDict()
super(Container, self).__setattr__('_hasattr', lambda key : key in self._meta)
for attr, value in kwargs.iteritems():
self._meta[attr] = value
def __getattribute__(self, key):
try:
return super(Container, self).__getattribute__(key)
except:
if key in self._meta : return self._meta[key]
else:
raise AttributeError, key
def __setattr__(self, key, value):
self._meta[key] = value
#usage:
>>> a = Container()
>>> a
<__main__.Container object at 0x0000000002B2DA58>
>>> a.abc = 1 #set an attribute
>>> a._meta
OrderedDict([('abc', 1)]) #attribute is in ._meta dictionary
I have some classes which inherit Container base class and some of their methods have #property decorator.
class Response(Container):
#property
def rawtext(self):
if self._hasattr("value") and self.value is not None:
_raw = self.__repr__()
_raw += "|%s" %(self.value.encode("utf-8"))
return _raw
problem is that .rawtext isn't accessible. (I get attributeerror.) every key in ._meta is accessible, every attributes added by __setattr__ of object base class is accessible, but method-to-properties by #property decorator isn't. I think it has to do with my way of overriding __getattribute__ in Container base class. What should I do to make properties from #property accessible?
I think you should probably think about looking at __getattr__ instead of __getattribute__ here. The difference is this: __getattribute__ is called inconditionally if it exists -- __getattr__ is only called if python can't find the attribute via other means.
I completely agree with mgilson. If you want a sample code which should be equivalent to your code but work well with properties you can try:
class Container(object):
def __init__(self, **kwargs):
self._meta = OrderedDict()
#self._hasattr = lambda key: key in self._meta #???
for attr, value in kwargs.iteritems():
self._meta[attr] = value
def __getattr__(self, key):
try:
return self._meta[key]
except KeyError:
raise AttributeError(key)
def __setattr__(self, key, value):
if key in ('_meta', '_hasattr'):
super(Container, self).__setattr__(key, value)
else:
self._meta[key] = value
I really do not understand your _hasattr attribute. You put it as an attribute but it's actually a function that has access to self... shouldn't it be a method?
Actually I think you should simple use the built-in function hasattr:
class Response(Container):
#property
def rawtext(self):
if hasattr(self, 'value') and self.value is not None:
_raw = self.__repr__()
_raw += "|%s" %(self.value.encode("utf-8"))
return _raw
Note that hasattr(container, attr) will return True also for _meta.
An other thing that puzzles me is why you use an OrderedDict. I mean, you iterate over kwargs, and the iteration has random order since it's a normal dict, and add the items in the OrderedDict. Now you have _meta which contains the values in random order.
If you aren't sure whether you need to have a specific order or not, simply use dict and eventually swap to OrderedDict later.
By the way: never ever use an try: ... except: without specifying the exception to catch. In your code you actually wanted to catch only AttributeErrors so you should have done:
try:
return super(Container, self).__getattribute__(key)
except AttributeError:
#stuff
Related
I have a class with a bunch of properties. I want to override an arbitrary number of them with a dict parsed from a yaml file. I've tried a few approaches inculding __getattributes__ and setting the instance __dict__ with the new variable.
The yaml would look like
property_a: 1
property_b: 2
The first approach I tried with __getattribute__ results in a recursion error because I'm trying to access self.yamlsettings over and over again
import yaml
class Properties(object):
def __init__(self):
with open("config/staging/kjh.yaml") as f:
yamlsettings = yaml.load(f)
self.yamlsettings = yamlsettings
def __getattribute__(self, attr):
try:
return self.yamlsettings[attr]
except KeyError:
return object.__getattribute__(self, attr)
#property
def property_a(self):
return "a"
#property
def property_b(self):
return "b"
#property
def property_c(self):
return "c"
The second approach I tried was setting the instance's dict to the key value pair in the yaml file.
The problem is why I'm trying to access the attribute it calls the property rather than the attribute.
import yaml
class Properties(object):
def __init__(self):
with open("config/staging/kjh.yaml") as f:
yamlsettings = yaml.load(f)
for k, v in yamlsettings.items():
self.__dict__[k] = v
#property
def property_a(self):
return "a"
#property
def property_b(self):
return "b"
#property
def property_c(self):
return "c"
prop = Properties()
prop.__dict__
>> {'property_a': 1, 'property_b': 2}
prop.property_a
>> 'a'
Can anyone point me in the right direction? I think I might be able to achieve this through a getter but it seems extremely verbose because I have so many properties.
Thanks!
To avoid the recursion error, use the superclass (object) method to access self.yamlsettings:
...
def __getatttibute__(self, attr):
try:
return object.__getattribute__(
self, 'yamlsettings'
)[attr]
except KeyError:
return object.__getattribute__(self, attr)
Using py3, I have an object that uses the #property decorator
class O(object):
def __init__(self):
self._a = None
#property
def a(self):
return 1
accessing the attribute a via __dict__ (with _a) doesn't seem to return the property decorated value but the initialized value None
o = O()
print(o.a, o.__dict__['_a'])
>>> 1, None
Is there a generic way to make this work? I mostly need this for
def __str__(self):
return ' '.join('{}: {}'.format(key, val) for key, val in self.__dict__.items())
Of course self.__dict__["_a"] will return self._a (well actually it's the other way round - self._a will return self.__dict__["_a"] - but anyway), not self.a. The only thing the property is doing here is to automatically invoke it's getter (your a(self) function) so you don't have to type the parens, otherwise it's just a plain method call.
If you want something that works with properties too, you'll have to get those manually from dir(self.__class__) and getattr(self.__class__, name), ie:
def __str__(self):
# py2
attribs = self.__dict__.items()
# py3
# attribs = list(self.__dict__.items())
for name in dir(self.__class__):
obj = getattr(self.__class__, name)
if isinstance(obj, property):
val = obj.__get__(self, self.__class__)
attribs.append((name, val))
return ' '.join('{}: {}'.format(key, val) for key, val in attribs)
Note that this won't prevent _a to appears in attribs - if you want to avoid this you'll also have to filter out protected names from the attribs list (all protected names, since you ask for something generic):
def __str__(self):
attribs = [(k, v) for k, v in self.__dict__.items() if not k.startswith("_")]
for name in dir(self.__class__):
# a protected property is somewhat uncommon but
# let's stay consistent with plain attribs
if name.startswith("_"):
continue
obj = getattr(self.__class__, name)
if isinstance(obj, property):
val = obj.__get__(self, self.__class__)
attribs.append((name, val))
return ' '.join('{}: {}'.format(key, val) for key, val in attribs)
Also note that this won't handle other computed attributes (property is just one generic implementation of the descriptor protocol). At this point, your best bet for something that's still as generic as possible but that can be customised if needed is to implement the above as a mixin class with a couple hooks for specialization:
class PropStrMixin(object):
# add other descriptor types you want to include in the
# attribs list
_COMPUTED_ATTRIBUTES_CLASSES = [property,]
def _get_attr_list(self):
attribs = [(k, v) for k, v in self.__dict__.items() if not k.startswith("_")]
for name in dir(self.__class__):
# a protected property is somewhat uncommon but
# let's stay consistent with plain attribs
if name.startswith("_"):
continue
obj = getattr(self.__class__, name)
if isinstance(obj, *self._COMPUTED_ATTRIBUTES_CLASSES):
val = obj.__get__(self, self.__class__)
attribs.append((name, val))
return attribs
def __str__(self):
attribs = self._get_attr_list()
return ' '.join('{}: {}'.format(key, val) for key, val in attribs)
class YouClass(SomeParent, PropStrMixin):
# here you can add to _COMPUTED_ATTRIBUTES_CLASSES
_COMPUTED_ATTRIBUTES_CLASSES = PropStrMixin + [SomeCustomDescriptor])
Property is basically a "computed attribute". In general, the property's value is not stored anywhere, it is computed on demand. That's why you cannot find it in the __dict__.
#property decorator replaces the class method by a descriptor object which then calls the original method as its getter. This happens at the class level.
The lookup for o.a starts at the instance. It does not exist there, the class is checked in the next step. O.a exists and is a descriptor (because it has special methods for the descriptor protocol), so the descriptor's getter is called and the returned value is used.
(EDITED)
There is not a general way to dump the name:value pairs for the descriptors. Classes including the bases must be inspected, this part is not difficult. However retrieving the values is equivalent to a function call and may have unexpected and undesirable side-effects. For a different perspective I'd like to quote a comment by bruno desthuilliers here: "property get should not have unwanted side effects (if it does then there's an obvious design error)".
You can also update self._a as getter since the return of the getter should always reflect what self._a is stored:
class O(object):
def __init__(self):
self._a = self.a
#property
def a(self):
self._a = 1
return self._a
A bit redundant, maybe, but setting self._a = None initially is useless in this case.
In case you need a setter
This would also be compatible given remove the first line in getter:
#a.setter
def a(self, value):
self._a = value
I am a total noob in Python and just couldn't help myself but did it again and dreamed of something I just couldn't achieve.
I wanted to have a class, which can be instantiated as such:
my_class = MyClass(**params)
and be consumed as such, in Flask:
jsonify(my_class)
The expected outcome would be a JSON:
{ "key" : "value", ... }
Now, the implementation of MyClass is,
class MyClas(NamedMutableSequence, Document):
__slots__ = (
'key_1',
'key_2',
'key_3'
)
def __init__(self, **params):
NamedMutableSequence.__init__(self, **params)
Document.__init__(self, 'myclass')
def save(self):
self._db_col.update({'key_1': self.key_1}, {'key_2': self.key_2, 'key_3': self.key_3})
By now, you are wondering what NamedMutableSequence and Document are...
class NamedMutableSequence(Sequence):
___slots__ = ()
def __init__(self, *positional_values, **keyword_values):
subclass_propeties = self.__slots__
for key in subclass_propeties:
setattr(self, key, keyword_values.get(key))
if positional_values:
for key, value in zip(subclass_propeties, positional_values):
setattr(self, key, value)
def __str__(self):
values = ', '.join('%s=%r' % (key, getattr(self, key)) for key in self.__slots__)
return '%s(%s)' % (clsname, values)
__repr__ = __str__
def __getitem__(self, item):
return getattr(self, item)
def __setitem__(self, item, value):
return setattr(self, item, value)
def __len__(self):
return len(self.__slots__)
Admittedly, I just copied someone's solution to a mutable namedtuple for this base class and fixed __getitem__ & __setitem__ to allow my_class.key_1 = 'some value'
class Document():
__slots__ = ('_db_col')
def __init__(self, collection):
self._db_col = mongo_db[collection]
This is just what I spew out in attempt for a base class which I will be using throughout my model classes for db connection.
This is, in my opinion, where it starts I got too over myself and just created a mess. Because no matter what I try, I can't stop raising TypeError: {string value of my_class} is not JSON serializable.
To make matters worse, when I try to dict(my_class), I get a shiny attributes must be string error raised on getattr().
I would still like to keep the base classes and I still need to make it JSON serializable.
How can I save myself?
I found an answer finally, and the solution was found from another stackoverflow post (How can I convert python class with slots to dictionary?)
What I did was just to add another method on the NamedMutableSequence as such:
def json(self):
return {key : getattr(self, key, None) for key in self.__slots__}
and just call it when I need a JSON parsable dictionary, as such:
my_class = MyClass(**params)
jsonify(my_class.json())
I have a method for automatically creating Python classes that wrap database tables, with class members that have the same name as the fields in the table. The class files look like this:
class CpsaUpldBuildChrgResultSet(Recordset):
def __init__(self, connection):
super().__init__(connection)
self.DefaultTableName = 'cpsa_upld_build_chrg_result'
self._keyFields.append('j_trans_seq')
self._keyFields.append('j_index')
#property
def j_trans_seq(self):
return self.GetValue('j_trans_seq')
#j_trans_seq.setter
def j_trans_seq(self, value):
self.SetKeyValue('j_trans_seq', value)
#property
def j_index(self):
return self.GetValue('j_index')
#j_index.setter
def j_index(self, value):
self.SetKeyValue('j_index', value)
I just found that if I try to set a value for a non-existent class member, such as J_TRANS_SEQ, no exception is thrown. Is there something I can add to this class so that an attempt to access a non-existent member would raise an exception?
You can add a __setattr__ method to your class that raises an AttributeError whenever an invalid attribute is assigned to. I'm not sure exactly how you'd want to determine which attributes are valid and which are not, but one approach might be something like this:
def __setattr__(self, name, value):
if hasattr(self, name):
super().__setattr__(name, value)
else:
raise AttributeError("{} object has no attribute {!r}".format(type(self), name))
This assumes that any attribute that can be looked up is also valid to be assigned to. It might break if your property's getters don't work unless the setter is called before the getter. It might also be too permissive, since it would allow setting of instance attributes that override class attributes (such as __init__). Another approach might be to check the name against a white-list of known attributes (but be sure to include the attributes that you need for the inherited class machinery, like DefaultTableName and _keyFields).
I think #Blckknght has the right idea, but left out some important details in his answer—such has how class attributes (class members) are set the first time, when they don't preexist, such as in the typical scenario when the class's __init__() method executes. Here's a more fully fleshed-out answer that works in Python 3 which addresses that deficiency.
It also shows how to minimize the coding of a bunch of repetitive properties.
class Recordset(object):
def __init__(self, connection):
print('Recordset.__init__({!r}) called'.format(connection))
def SetKeyValue(self, name, value):
print('SetKeyValue({!r}, {!r}) called'.format(name, value))
def GetValue(self, name):
print('GetValue({!r}) called'.format(name))
def fieldname_property(name):
storage_name = '_' + name
#property
def prop(self):
return self.GetValue(storage_name)
#prop.setter
def prop(self, value):
self.SetKeyValue(storage_name, value)
return prop
class CpsaUpldBuildChrgResultSet(Recordset):
# define properties for valid fieldnames
j_trans_seq = fieldname_property('j_trans_seq')
j_index = fieldname_property('j_index')
def __init__(self, connection):
super().__init__(connection)
self._setter('DefaultTableName', 'cpsa_upld_build_chrg_result')
def __setattr__(self, name, value):
if hasattr(self, name):
self._setter(name, value)
else:
raise AttributeError("No field named %r" % name)
def _setter(self, name, value):
"""Provides way to intentionally bypass overloaded __setattr__."""
super().__setattr__(name, value)
print('start')
db_table = CpsaUpldBuildChrgResultSet('SomeConnection')
print('assigning attributes...')
db_table.j_trans_seq = 42 # OK
db_table.j_index = 13 # OK
db_table.J_TRANS_SEQ = 99 # -> AttributeError: No field named 'J_TRANS_SEQ'
print('done')
I would like to have a special obj that does the following:
obj.newly_created_attribute = some_value
Obviously, all objects will allow this. But I would like the previous code to automatically call a method when newly_created_attribute is not yet a attribute of obj. In my particular case, I wish to set up a custom get and set method for obj.newly_created_attribute (a property now).
Is there any way to do this? Some way to specify a callback that will be run whenever a new attribute is added to a object?
You can accomplish this by overriding __setattr__:
class SomeClass(object):
def __setattr__(self, name, value):
if not hasattr(self, name):
print "new attribute", name
# do stuff here
return object.__setattr__(self, name, value)
__setattr__ will help you there:
Called when an attribute assignment is attempted. This is called instead of the normal mechanism (i.e. store the value in the instance dictionary). name is the attribute name, value is the value to be assigned to it.
#!/usr/bin/env python
class Klass(object):
def __setattr__(self, name, value):
if not hasattr(self, name):
self.on_first_setattr()
return object.__setattr__(self, name, value)
def on_first_setattr(self):
print "I am just a callback and my story's seldom told."
obj = Klass()
obj.some_attr = 1 # will call callback
obj.some_attr = 2 # no output
Overload __setattr__. Example:
class Foo(object):
def __setattr__(self, attr, val):
print "setattr"
if attr not in self.__dict__:
print "new attr:", attr
self.__dict__[attr] = val
else:
print "extant attr:", attr
self.__dict__[attr] = val