Why can't I remove a method with delattr? - python

There mustn't be an error (according to the docs of Python 2.7):
class C(object):
def __init__(self):
self.d = {}
def set_key(self, key, val):
self.d[key] = val
def get_key(self, key):
return self.d[key]
c = C()
delattr(c, 'set_key')
However:
AttributeError: set_key
I can do delattr on a class object. Can I remove a bound function from an instance?

set_key is an attribute of the class, not the instance. delattr(C, 'set_key') works as expected.
If you want the function not to be available from only the one instance, you can try c.set_key = None.

I am not quite sure why Python (2.7 at least) isn't allowing bound methods to be deleted in this manner.
That said, here is one way to simulate your desired behaviour:
def delete_method(obj, method_name):
def stub(*args, **kwargs):
raise AttributeError("'%s' object has no attribute '%s'" % (
obj.__class__.__name__, method_name))
setattr(obj, method_name, stub)
c = C()
delete_method(c, 'set_key')
c.set_key('the_answer', 42)
When run, this gives
AttributeError: 'C' object has no attribute 'set_key'
Note that this technique isn't universal: for example, you won't be able to use it to remove a method from an instance of the dict class.

There is also a method to hide member methods (not deleting them) to pass restricted interface to another place:
def safe_interface(obj, enabled):
"Returns an interface object blocking functions which are not in enabled."
def assert_name(name):
if name not in enabled:
raise KeyError('No attribute %s.' % name)
class Caller(object):
def __getattribute__(self, name):
assert_name(name)
return getattr(obj, name)
def __setattr__(self, name, val):
assert_name(name)
setattr(obj, name, val)
c = Caller()
return c
if __name__ == '__main__':
class C(object):
def __init__(self):
self.d = {}
def set_key(self, key, val):
self.d[key] = val
def get_key(self, key):
return self.d[key]
c = C()
d = safe_interface(c, ['set_key'])
key = 'c'
val = 'm'
d.set_key(key, val)
print c.get_key(key)
print d.get_key(key) # error, this method is not allowed

Related

How do I define setter, getter for dynamically added attributes

I have a class as follows:
class A:
def __init__(self):
pass
def add_attr(self, name):
setattr(self, name, 'something')
How do I define custom setter, getter for self.name? I cannot use __setattr__, __getattribute__ because that will change the behaviour of add_attr too.
EDIT: the users of this class will add arbitrary number of attributes with arbitrary names:
a = A()
a.add_attr('attr1')
a.add_attr('attr2')
I want custom behavior for only these user added attributes.
Building off #Devesh Kumar Singh’s answer, I would implement it in some way like this:
class A:
def __init__(self):
self.attrs = {}
def __setattr__(self, key, value):
if key in self.attrs:
self.set_attr(key, value)
else:
object.__setattr__(self, key, value)
def __getattribute__(self, key):
if key in self.__dict__.get(attrs, {}):
return self.__dict__['get_attr'](self, key)
return object.__getattribute__(self, key)
def get_attr(self, key):
r = self.attrs[key]
# logic
return r
def set_attr(self, key, value):
# logic
self.attrs[key] = value
def add_attr(self, key, value=None):
self.attrs[key] = value
add_attr is only used to initialise the variable the first time. You could also edit __setattr__ to set all new attributes in the self.attrs rather than self.__dict__
Custom getter and setter logic? That's what a property is made for. Usually these are used to magically mask function calls and make them look like attribute access
class MyDoubler(object):
def __init__(self, x):
self._x = x
#property
def x(self):
return x * 2
#x.setter
def x(self, value):
self._x = value
>>> md = MyDoubler(10)
>>> md.x
20
>>> md.x = 20
>>> md.x
40
>>> md._x
20
But there's no rule saying you can't abuse that power to add custom behavior to your getters and setters.
class A(object):
def __init__(self):
pass
#staticmethod
def default_getter_factory(name):
def default_getter(self):
return self.name
return default_getter
#staticmethod
def default_setter_factory(name):
def default_setter(self, value):
setattr(self, name, value)
return default_setter
def add_attr(self, name, getterfactory=None, setterfactory=None):
private_name = f"_{name}"
if getterfactory is None:
getterfactory = self.__class__.default_getter_factory
if setterfactory is None:
setterfactory = self.__class__.default_setter_factory
getter, setter = getterfactory(private_name), setterfactory(private_name)
getter = property(getter)
setattr(self.__class__, name, getter)
setattr(self.__class__, name, getter.setter(setter))
That said this is all a bit silly, and chances are that whatever it is you're trying to do is a thing that shouldn't be done. Dynamic programming is all well and good, but if I were to review code that did this, I would think very long and hard about alternative solutions before approving it. This reeks of technical debt to me.
One possibility I could think of is to have a dictionary of dynamic attributes, and set and get the dynamic attributes using the dictionary
class A:
def __init__(self):
#Dictionary of attributes
self.attrs = {}
#Set attribute
def set_attr(self, name):
self.attrs[name] = 'something'
#Get attribute
def get_attr(self, name):
return self.attrs.get(name)
a = A()
a.set_attr('var')
print(a.get_attr('var'))
The output will be something
Or an alternate is to use property decorator to add arguments explicitly outside the class, as described here
class A:
def __init__(self):
pass
a = A()
#Add attributes via property decorator
a.attr_1 = property(lambda self: self.attr_1)
a.attr_2 = property(lambda self: self.attr_2)
#Assign them values and print them
a.attr_1 = 4
a.attr_2 = 6
print(a.attr_1, a.attr_2)
The output will be 4 6
I am gonna answer my own question just for reference. This is based on others' answers here. The idea is to use default __setattr__ and __getattribute__ on attributes not added through add_attr.
class A:
def __init__(self):
self.attrs = {}
def add_attr(self, name):
self.attrs[name] = 'something'
def __getattribute__(self, name):
try:
object.__getattribute__(self, 'attrs')[name] # valid only if added by user
# custom logic and return
except (KeyError, AttributeError):
return object.__getattribute__(self, name)
def __setattr__(self, name, val):
# similar to __getattribute__

python nested attributes encapsulation

I have some question about encapsulation nested attributes in python. Let's assume few classes:
Here we have a main class (DataWrapper) that includes two more classes: InnerWrapper1 and InnerWrapper2. Both inner wrappers includes two attributes.
class DataWrapper(object):
#property
def inner_wrapper1(self):
return self.__inner_wrapper1
#inner_wrapper1.setter
def inner_wrapper1(self, value):
self.__inner_wrapper1 = value
#property
def inner_wrapper2(self):
return self.__inner_wrapper2
#inner_wrapper2.setter
def inner_wrapper2(self, value):
self.__inner_wrapper2 = value
class InnerWrapper1(object):
#property
def property1(self):
return self.__property1
#property1.setter
def property1(self, value):
self.__property1 = value
#property
def property2(self):
return self.__property2
#property2.setter
def property2(self, value):
self.__property2 = value
class InnerWrapper2(object):
#property
def property3(self):
return self.__property3
#property3.setter
def property3(self, value):
self.__property3 = value
#property
def property4(self):
return self.__property4
#property4.setter
def property4(self, value):
self.__property4 = value
Is it possible to override somehow getattr and setattr methods to make possible below encapsulation? What I want to achieve is to have an access to those nested attributes from the top class- DataWrapper.
data_wrapper = DataWrapper()
data_wrapper.property1 = "abc"
...
var = data_wrapper.property2
...
The first thing that came to my mind was to execute hasattr in getattr, but that gave a maximum recursion depth...
Here's a complete code:
class DataWrapper(object):
def __init__(self):
self.inner_wrapper1 = InnerWrapper1()
self.inner_wrapper2 = InnerWrapper2()
#property
def inner_wrapper1(self):
return self.__inner_wrapper1
#inner_wrapper1.setter
def inner_wrapper1(self, value):
self.__inner_wrapper1 = value
#property
def inner_wrapper2(self):
return self.__inner_wrapper2
#inner_wrapper2.setter
def inner_wrapper2(self, value):
self.__inner_wrapper2 = value
def __setattr__(self, attribute, value):
#if attribute in {'innerwrapper1', 'innerwrapper2'}:
if attribute in ['inner_wrapper1', 'inner_wrapper2']:
return super(DataWrapper, self).__setattr__(attribute, value)
if hasattr(self.inner_wrapper1, attribute):
return setattr(self.inner_wrapper1, attribute, value)
elif hasattr(self.inner_wrapper2, attribute):
return setattr(self.inner_wrapper2, attribute, value)
def __getattr__(self, attribute):
try:
return getattr(self.inner_wrapper1, attribute)
except AttributeError: pass
try:
return getattr(self.inner_wrapper2, attribute)
except AttributeError: pass
class InnerWrapper1(object):
#property
def property1(self):
return self.__property1
#property1.setter
def property1(self, value):
self.__property1 = value
#property
def property2(self):
return self.__property2
#property2.setter
def property2(self, value):
self.__property2 = value
class InnerWrapper2(object):
#property
def property3(self):
return self.__property3
#property3.setter
def property3(self, value):
self.__property3 = value
#property
def property4(self):
return self.__property4
#property4.setter
def property4(self, value):
self.__property4 = value
def main():
data_wrapper = DataWrapper()
data_wrapper.property1 = "abc"
if __name__ == "__main__":
main()
You get an infinite recursion error because you forgot to take into account setting the inner_wrapper1 and inner_wrapper2 attributes in your __init__ method.
When you do this:
self.inner_wrapper1 = InnerWrapper()
Python will also use your __setattr__ method. This then tries to use self.inner_wrapper1 which doesn't yet exist so __getattr__ is called, which tries to use self.inner_wrapper1 which doesn't yet exist, and you enter into an infinite recursion loop.
In __setattr__ delegate attribute setting to the superclass:
def __setattr__(self, attribute, value):
if attribute in {'innerwrapper1', 'innerwrapper2'}:
return super(DataWrapper, self).__setattr__(attribute, value)
if hasattr(self.inner_wrapper1, attribute):
return setattr(self.inner_wrapper1, attribute, value)
elif hasattr(self.inner_wrapper2, attribute):
return setattr(self.inner_wrapper2, attribute, value)
If you used a single leading underscore for 'private' attributes (so _innerwrapper1 and _innerwrapper2) you could just test for that:
def __setattr__(self, attribute, value):
if attribute[0] == '_': # private attribute
return super(DataWrapper, self).__setattr__(attribute, value)
so you don't have to hardcode a whole set of names.
Since your updated full script uses __inner_wrapper1 and __inner_wrapper2 as the actual attribute names, and you are using properties, you'll have to adjust your __setattr__ test to look for those names. Because you are using double-underscore names you need to adjust for the name mangling of such attributes:
def __setattr__(self, attribute, value):
if attribute in {
'inner_wrapper1', 'inner_wrapper2',
'_DataWrapper__inner_wrapper1', '_DataWrapper__inner_wrapper2'}:
return super(DataWrapper, self).__setattr__(attribute, value)
Unless you are going to subclass DataWrapper and must protect your attributes from accidental overriding, I'd avoid using double-underscored names altogether, however. In Pythonic code, you don't worry about other code accessing attributes, there is no concept of truly private attributes.
Using properties is also overkill here; properties don't buy you encapsulation, in Python you'd only use those to simplify the API (replacing a method call with attribute access).
Note that the hasattr() tests for the InnerWrapper* property* attributes will fail because you don't have default values:
>>> inner = InnerWrapper1()
>>> hasattr(inner, 'property1')
False
hasattr() doesn't test for properties, it simply tries to access an attribute and if any exception is raised it returns False:
>>> inner = InnerWrapper1()
>>> hasattr(inner, 'property1')
False
>>> inner.property1
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "<string>", line 43, in property1
AttributeError: 'InnerWrapper1' object has no attribute '_InnerWrapper1__property1'
>>> inner.property1 = 'foo'
>>> inner.property1
'foo'
>>> hasattr(inner, 'property1')
True
By removing all the #property objects you can simplify this greatly:
class DataWrapper(object):
def __init__(self):
self._inner_wrapper1 = InnerWrapper1()
self._inner_wrapper2 = InnerWrapper2()
def __setattr__(self, attribute, value):
if attribute[0] == '_':
return super(DataWrapper, self).__setattr__(attribute, value)
if hasattr(self._inner_wrapper1, attribute):
return setattr(self._inner_wrapper1, attribute, value)
elif hasattr(self._inner_wrapper2, attribute):
return setattr(self._inner_wrapper2, attribute, value)
def __getattr__(self, attribute):
try:
return getattr(self._inner_wrapper1, attribute)
except AttributeError: pass
return getattr(self._inner_wrapper2, attribute)
class InnerWrapper1(object):
property1 = None
property2 = None
class InnerWrapper2(object):
property3 = None
property4 = None

Determine if class attribute is a read-only data descriptor

A read-only data descriptor is a descriptor that defines both __get__ and __set__, but __set__ raises AttributeError when called.
An example is a simple read-only property:
class Test():
_i = 1
#property
def i(self):
return self._i
assert hasattr(Test.i, '__get__')
assert hasattr(Test.i, '__set__')
t = Test()
t.i # 1
t.i = 2 # ERROR
If I have an instance of a class, I can determine if the instance attribute is a read-only data descriptor this way (although I don't like this at all):
def is_ro_data_descriptor_from_instance(instance, attr):
temp = getattr(instance, attr)
try:
setattr(instance, attr, None)
except AttributeError:
return True
else:
setattr(instance, attr, temp)
return False
If I know the class doesn't require any arguments to be instantiated, I can determine if its class attribute is a read-only data descriptor similar to the above:
def is_ro_data_descriptor_from_klass(klass, attr):
try:
setattr(klass(), attr, None)
except AttributeError:
return True
else:
return False
However, if I don't know the signature of the class ahead of time, and I try to instantiate a temporary object in this way, I could get an error:
class MyClass():
i = 1
def __init__(self, a, b, c):
'''a, b, and c are required!'''
pass
def is_ro_data_descriptor_from_klass(MyClass, 'i') # Error
What can be done to determine if a class attribute is a read-only data descriptor?
EDIT: Adding more information.
Below is the code I am trying to get working:
class StaticVarsMeta(type):
'''A metaclass that will emulate the "static variable" behavior of
other languages. For example:
class Test(metaclass = StaticVarsMeta):
_i = 1
#property
def i(self):
return self._i
t = Test()
assert t.i == Test.i'''
statics = {}
def __new__(meta, name, bases, dct):
klass = super().__new__(meta, name, bases, dct)
meta.statics[klass] = {}
for key, value in dct.items():
if "_" + key in dct:
meta.statics[klass][key] = set()
if hasattr(value, '__get__'):
meta.statics[klass][key].add('__get__')
if hasattr(value, '__set__'):
try:
value.__set__(None, None)
except AttributeError:
continue
else:
meta.statics[klass][key].add('__set__')
return klass
def __getattribute__(klass, attr):
if attr not in StaticVarsMeta.statics[klass]:
return super().__getattribute__(attr)
elif '__get__' not in StaticVarsMeta.statics[klass][attr]:
return super().__getattribute__(attr)
else:
return getattr(klass, '_' + attr)
def __setattr__(klass, attr, value):
if attr not in StaticVarsMeta.statics[klass]:
super().__setattr__(attr, value)
elif '__set__' not in StaticVarsMeta.statics[klass][attr]:
super().__setattr__(attr, value)
else:
setattr(klass, '_' + attr, value)
class Test(metaclass = StaticVarsMeta):
_i = 1
def get_i(self):
return self._i
i = property(get_i)
Note the following:
type(Test.i) # int
type(Test.__dict__['i']) # property
Test().i = 2 # ERROR, as expected
Test.i = 2 # NO ERROR - should produce an error
It seems super-awkward, but here's how you could implement it based on my comment:
class StaticVarsMeta(type):
statics = {}
def __new__(meta, name, bases, dct):
cls = super().__new__(meta, name, bases, dct)
meta.statics[cls] = {}
for key, val in dct.items():
if hasattr(val, '__get__') and hasattr(val, '__set__'):
meta.statics[cls][key] = {'__get__'}
try:
val.__set__(None, None)
except AttributeError as err:
if "can't set attribute" in err.args:
continue
meta.statics[cls][key].add('__set__')
return cls
In use:
>>> class ReadOnly(metaclass=StaticVarsMeta):
#property
def foo(self):
return None
>>> class ReadWrite(metaclass=StaticVarsMeta):
#property
def bar(self):
return None
#bar.setter
def bar(self, val):
pass
>>> StaticVarsMeta.statics
{<class '__main__.ReadOnly'>: {'foo': {'__get__'}},
<class '__main__.ReadWrite'>: {'bar': {'__get__', '__set__'}}}
This is more of a "starter for 10", there must be a better way to do it...
Your first solution can be made simpler and slightly more robust, by attempting to assign the value it already has. This way, no undoing is required (Still, this isn't thread-safe).
def is_ro_data_descriptor_from_instance(instance, attr):
temp = getattr(instance, attr)
try:
setattr(instance, attr, temp)
except AttributeError:
return True
else:
return False

How to clear instance data without setattr?

I wanted, to make traversable (by DB, single file or just as string) class in python. I Write this (shorted):
from json import JSONDecoder, JSONEncoder
def json_decode(object): return JSONDecoder().decode(object)
def json_encode(object): return JSONEncoder().encode(object)
class Storage:
__separator__ = 'ANY OF ANYS'
__keys__ = []
__vals__ = []
__slots__ = ('__keys__', '__vals__', '__separator__')
def __getattr__(self, key):
try:
return self.__vals__[self.__keys__.index(key)]
except IndexError:
raise AttributeError
def __setattr__(self, key, val):
self.__keys__.append(key)
self.__vals__.append(val)
def store(self):
return (json_encode(self.__keys__) + self.__separator__ +
json_encode(self.__vals__))
def restore(self, stored):
stored = stored.split(self.__separator__)
for (key, val) in zip(json_decode(stored[0]), json_decode(stored[1])):
setattr(self, key, val)
And yea - that work, but... When i'm making more instances, all of them are like singleton.
So - how to set attribute to instance without _setattr_?
PS. I got idea - make in set/getattr an pass for keys/vals, but it'll make mess.
your __separator__, __keys__, __vals__ and __slots__ are attributes of the object "Storage"(class object). I don't know if it's exactly the same, but I'd call it static variables of the class.
If you want to have different values for each instance of Storage, define each of these variables in your __init__ function:
class Storage(object):
__slots__ = ('__keys__', '__vals__', '__separator__')
def __init__(self):
super(Storage, self).__setattr__('__separator__', "ANY OF ANYS")
super(Storage, self).__setattr__('__keys__', [])
super(Storage, self).__setattr__('__vals__', [])
def __getattr__(self, key):
try:
vals = getattr(self, '__vals__')
keys = getattr(self, '__keys__')
return vals[keys.index(key)]
except IndexError:
raise AttributeError
def __setattr__(self, key, val):
vals = getattr(self, '__vals__')
keys = getattr(self, '__keys__')
vals.append(val)
keys.append(key)
edited so getattr and setattr works
I got that problem 2 days ago. Don't know if that's exactly your problem, but you said that about "its like I have a singleton"
You could make your Storage class a subclass of a special base class like this:
class Singleton(object):
def __new__(cls, *args, **kwargs):
if '_inst_' not in vars(cls):
cls._inst = type.__new__(cls, *args, *kwargs)
return cls._inst
class Storage(Singleton):
....
As long as you don't override __new__() in your subclass, all subsequent calls to create new instances after the first will return the one first created.

python koans: class proxy

I'm solving the python koans.
I haven't got any real problem until the 34th.
this is the problem:
Project: Create a Proxy Class
In this assignment, create a proxy class (one is started for you
below). You should be able to initialize the proxy object with any
object. Any attributes called on the proxy object should be forwarded
to the target object. As each attribute call is sent, the proxy
should record the name of the attribute sent.
The proxy class is started for you. You will need to add a method
missing handler and any other supporting methods. The specification
of the Proxy class is given in the AboutProxyObjectProject koan.
Note: This is a bit trickier that it's Ruby Koans counterpart, but you
can do it!
and this is my solution until now:
class Proxy(object):
def __init__(self, target_object):
self._count = {}
#initialize '_obj' attribute last. Trust me on this!
self._obj = target_object
def __setattr__(self, name, value):pass
def __getattr__(self, attr):
if attr in self._count:
self._count[attr]+=1
else:
self._count[attr]=1
return getattr(self._obj, attr)
def messages(self):
return self._count.keys()
def was_called(self, attr):
if attr in self._count:
return True
else: False
def number_of_times_called(self, attr):
if attr in self._count:
return self._count[attr]
else: return False
It works until this test:
def test_proxy_records_messages_sent_to_tv(self):
tv = Proxy(Television())
tv.power()
tv.channel = 10
self.assertEqual(['power', 'channel='], tv.messages())
where tv.messages() is ['power'] because tv.channel=10 is taken by the proxy object and not the television object.
I've tried to manipulate the __setattr__ method, but I always end in a unlimited loop.
edit 1:
I'm trying this:
def __setattr__(self, name, value):
if hasattr(self, name):
object.__setattr__(self,name,value)
else:
object.__setattr__(self._obj, name, value)
But then I get this error in a loop on the last entry:
RuntimeError: maximum recursion depth exceeded while calling a Python object
File "/home/kurojishi/programmi/python_koans/python 2/koans/about_proxy_object_project.py", line 60, in test_proxy_method_returns_wrapped_object
tv = Proxy(Television())
File "/home/kurojishi/programmi/python_koans/python 2/koans/about_proxy_object_project.py", line 25, in __init__
self._count = {}
File "/home/kurojishi/programmi/python_koans/python 2/koans/about_proxy_object_project.py", line 33, in __setattr__
object.__setattr__(self._obj, name, value)
File "/home/kurojishi/programmi/python_koans/python 2/koans/about_proxy_object_project.py", line 36, in __getattr__
if attr in self._count:
The loop is in __getattr__.
You are using hasattr in __setattr__ to decide whether you should write to the local or proxied object. This works well for all but one case.
In your __init__ you have the following line:
self._count = {}
This calls __setattr__ with '_count' which does not exist at that point and therefore (hence hasattr returns False) is forwarded to the proxied object.
If you want to use your approach you have to write your __init__ like this:
def __init__(self, target_object):
object.__setattr__(self, '_count', {})
#initialize '_obj' attribute last. Trust me on this!
object.__setattr__(self, '_obj', target_object)
As I understand maybe your problem is related with the recursive call when you set and attribute value. From docs:
If __setattr__() wants to assign to an instance attribute, it should not simply execute "self.name = value" -- this would cause a recursive call to itself. Instead, it should insert the value in the dictionary of instance attributes, e.g., "self.__dict__[name] = value". For new-style classes, rather than accessing the instance dictionary, it should call the base class method with the same name, for example, "object.__setattr__(self, name, value)".
setattr is called on all assignments. It's more like getattribute than getattr. This also affects code in the __init__ method.
This means that the first branch of this code will almost always fail, only attributes inherited from object will pass the test:
def __setattr__(self, name, value):
if hasattr(self, name):
object.__setattr__(self,name,value)
else:
object.__setattr__(self._obj, name, value)
Instead we
can assume that assignments are meant for the Proxy unless it has an _obj attribute. Hence the comment in __init__. We set up our proxy's attributes, then add the target object and all future assignments get sent to it.
def __setattr__(self, name, value):
if hasattr(self, '_obj'):
object.__setattr__(self._obj, name, value)
else:
object.__setattr__(self, name, value)
But by using hasattr we would also need to alter __getattr__ to check for _obj to prevent recursion:
def __getattr__(self, name):
if '_obj' == name:
raise AttributeError
if attr in self._count:
self._count[attr]+=1
else:
self._count[attr]=1
return getattr(self._obj, attr)
An alternative would be to inspect the proxy's __dict__ attribute directly in the __setattr__ method:
def __setattr__(self, name, value):
if '_obj' in self.__dict__:
...
from the test, it is a requirement for proxy to log all the attribute calls via proxy. And the proxy has only few built-in methods which are exceptionally used for logging, so my answer was:
class Proxy(object):
def __init__(self, target_object):
self.logs=[]
self._obj = target_object
def __getattribute__(self, attrname):
if attrname in ['_obj','logs','messages','was_called','number_of_times_called'] :
return object.__getattribute__(self, attrname)
else:
self.logs.append(attrname)
return object.__getattribute__((object.__getattribute__(self, '_obj')), attrname)
def __setattr__(self, name, value):
if hasattr(self, '_obj'):
self.logs.append(name)
object.__setattr__(object.__getattribute__(self,'_obj'), name, value)
else :
object.__setattr__(self, name, value)
After this it is quite easy to implement other methods ('messages', 'was_called', ... )
Sorry for necro'ing old question.
and I found out that getattribute can be changed : just check whether the attribute is in the target object.
def __getattribute__(self, attrname):
if attrname not in dir(object.__getattribute__(self, '_obj')):
return object.__getattribute__(self, attrname)
else:
self.logs.append(attrname)
return object.__getattribute__((object.__getattribute__(self, '_obj')), attrname)
class Proxy(object):
"""Proxy class wraps any other class, and adds functionality to remember and report all messages called.
Limitations include that proxy blocks all direct subclass calls to:
messages, number_of_times_called, was_called, _obj, and _message_counts.
These calls must be made directly like my_proxy_instance._obj.messages.
"""
def __init__(self, target_object):
print 'initializing a proxy for ' + target_object.__class__.__name__
# WRITE CODE HERE
self._message_counts = Counter();
#initialize '_obj' attribute last. Trust me on this!
self._obj = target_object
# WRITE CODE HERE
def __getattr__(self, attr_name):
print 'getting an attribute: "' + attr_name + '" from "' + self._obj.__class__.__name__ + '"'
self._message_counts[attr_name] += 1
print self._message_counts
return object.__getattribute__(self._obj, attr_name)
#def __getattribute__(self, attr_name):
# print "intercepted!~ " + attr_name
# object.__getattribute__(self, attr_name)
def __setattr__(self, attr_name, value):
if((attr_name == '_obj') | (attr_name == '_message_counts')): # special proxy attributes.
print 'setting the PROXY attribute: "' + attr_name + '"'
object.__setattr__(self, attr_name, value)
else:
print 'setting the REAL attribute: "' + attr_name + '"'
self._message_counts[attr_name+"="] += 1
object.__setattr__(self._obj, attr_name, value)
def messages(self):
return self._message_counts.keys()
def number_of_times_called(self, attr_name):
return self._message_counts[attr_name]
def was_called(self, attr_name):
return attr_name in self._message_counts
What I did was take all the calls to attributes in the proxy and call them via object.__getattribute__ to avoid recursion.
That did not work for methods so I wrapped the method calls in a try..except AttributeError to try them first in the proxy. and then if they raise an error try them in the child object.
If anyone has a more elegant solution would love to see it.
from runner.koan import *
from collections import Counter
class Proxy(object):
def __init__(self, target_object):
self._messages=[]
self._obj = target_object
def messages(self):
return self._messages
def was_called(self, message):
return message in self._messages
def number_of_times_called(self, message):
_count = Counter(self._messages).get(message)
if _count:
return _count
else: # catch None
return 0
def __getattribute__(self, attr_name):
try: # call on self
retval = object.__getattribute__(self, attr_name)
except AttributeError: # call on child object
retval = self._obj.__getattribute__(attr_name)
object.__getattribute__(self, '_messages').append(attr_name)
return retval
def __setattr__(self, attr_name, attr_value):
if hasattr(self, '_obj'): # call child object and log message
self._obj.__setattr__(attr_name, attr_value)
attr_name += "="
object.__getattribute__(self, '_messages').append(attr_name)
else: # use this before_obj is set in __init__
object.__setattr__(self, attr_name, attr_value)
def messages(self):
return self._messages
why not use method_missing?
my answer:
class Proxy
def initialize(target_object)
#object = target_object
# ADD MORE CODE HERE
#messages = []
end
# WRITE CODE HERE
def method_missing(method_name, *args, &block)
#messages.push method_name unless method_name == :messages
#object.send method_name, *args, &block
end
def messages
#messages
end
def called? target
#messages.include? target
end
def number_of_times_called target
result = 0
#messages.each do |t|
result += 1 if t == target
end
result
end
end

Categories

Resources