How to clear instance data without setattr? - python

I wanted, to make traversable (by DB, single file or just as string) class in python. I Write this (shorted):
from json import JSONDecoder, JSONEncoder
def json_decode(object): return JSONDecoder().decode(object)
def json_encode(object): return JSONEncoder().encode(object)
class Storage:
__separator__ = 'ANY OF ANYS'
__keys__ = []
__vals__ = []
__slots__ = ('__keys__', '__vals__', '__separator__')
def __getattr__(self, key):
try:
return self.__vals__[self.__keys__.index(key)]
except IndexError:
raise AttributeError
def __setattr__(self, key, val):
self.__keys__.append(key)
self.__vals__.append(val)
def store(self):
return (json_encode(self.__keys__) + self.__separator__ +
json_encode(self.__vals__))
def restore(self, stored):
stored = stored.split(self.__separator__)
for (key, val) in zip(json_decode(stored[0]), json_decode(stored[1])):
setattr(self, key, val)
And yea - that work, but... When i'm making more instances, all of them are like singleton.
So - how to set attribute to instance without _setattr_?
PS. I got idea - make in set/getattr an pass for keys/vals, but it'll make mess.

your __separator__, __keys__, __vals__ and __slots__ are attributes of the object "Storage"(class object). I don't know if it's exactly the same, but I'd call it static variables of the class.
If you want to have different values for each instance of Storage, define each of these variables in your __init__ function:
class Storage(object):
__slots__ = ('__keys__', '__vals__', '__separator__')
def __init__(self):
super(Storage, self).__setattr__('__separator__', "ANY OF ANYS")
super(Storage, self).__setattr__('__keys__', [])
super(Storage, self).__setattr__('__vals__', [])
def __getattr__(self, key):
try:
vals = getattr(self, '__vals__')
keys = getattr(self, '__keys__')
return vals[keys.index(key)]
except IndexError:
raise AttributeError
def __setattr__(self, key, val):
vals = getattr(self, '__vals__')
keys = getattr(self, '__keys__')
vals.append(val)
keys.append(key)
edited so getattr and setattr works
I got that problem 2 days ago. Don't know if that's exactly your problem, but you said that about "its like I have a singleton"

You could make your Storage class a subclass of a special base class like this:
class Singleton(object):
def __new__(cls, *args, **kwargs):
if '_inst_' not in vars(cls):
cls._inst = type.__new__(cls, *args, *kwargs)
return cls._inst
class Storage(Singleton):
....
As long as you don't override __new__() in your subclass, all subsequent calls to create new instances after the first will return the one first created.

Related

How do I define setter, getter for dynamically added attributes

I have a class as follows:
class A:
def __init__(self):
pass
def add_attr(self, name):
setattr(self, name, 'something')
How do I define custom setter, getter for self.name? I cannot use __setattr__, __getattribute__ because that will change the behaviour of add_attr too.
EDIT: the users of this class will add arbitrary number of attributes with arbitrary names:
a = A()
a.add_attr('attr1')
a.add_attr('attr2')
I want custom behavior for only these user added attributes.
Building off #Devesh Kumar Singh’s answer, I would implement it in some way like this:
class A:
def __init__(self):
self.attrs = {}
def __setattr__(self, key, value):
if key in self.attrs:
self.set_attr(key, value)
else:
object.__setattr__(self, key, value)
def __getattribute__(self, key):
if key in self.__dict__.get(attrs, {}):
return self.__dict__['get_attr'](self, key)
return object.__getattribute__(self, key)
def get_attr(self, key):
r = self.attrs[key]
# logic
return r
def set_attr(self, key, value):
# logic
self.attrs[key] = value
def add_attr(self, key, value=None):
self.attrs[key] = value
add_attr is only used to initialise the variable the first time. You could also edit __setattr__ to set all new attributes in the self.attrs rather than self.__dict__
Custom getter and setter logic? That's what a property is made for. Usually these are used to magically mask function calls and make them look like attribute access
class MyDoubler(object):
def __init__(self, x):
self._x = x
#property
def x(self):
return x * 2
#x.setter
def x(self, value):
self._x = value
>>> md = MyDoubler(10)
>>> md.x
20
>>> md.x = 20
>>> md.x
40
>>> md._x
20
But there's no rule saying you can't abuse that power to add custom behavior to your getters and setters.
class A(object):
def __init__(self):
pass
#staticmethod
def default_getter_factory(name):
def default_getter(self):
return self.name
return default_getter
#staticmethod
def default_setter_factory(name):
def default_setter(self, value):
setattr(self, name, value)
return default_setter
def add_attr(self, name, getterfactory=None, setterfactory=None):
private_name = f"_{name}"
if getterfactory is None:
getterfactory = self.__class__.default_getter_factory
if setterfactory is None:
setterfactory = self.__class__.default_setter_factory
getter, setter = getterfactory(private_name), setterfactory(private_name)
getter = property(getter)
setattr(self.__class__, name, getter)
setattr(self.__class__, name, getter.setter(setter))
That said this is all a bit silly, and chances are that whatever it is you're trying to do is a thing that shouldn't be done. Dynamic programming is all well and good, but if I were to review code that did this, I would think very long and hard about alternative solutions before approving it. This reeks of technical debt to me.
One possibility I could think of is to have a dictionary of dynamic attributes, and set and get the dynamic attributes using the dictionary
class A:
def __init__(self):
#Dictionary of attributes
self.attrs = {}
#Set attribute
def set_attr(self, name):
self.attrs[name] = 'something'
#Get attribute
def get_attr(self, name):
return self.attrs.get(name)
a = A()
a.set_attr('var')
print(a.get_attr('var'))
The output will be something
Or an alternate is to use property decorator to add arguments explicitly outside the class, as described here
class A:
def __init__(self):
pass
a = A()
#Add attributes via property decorator
a.attr_1 = property(lambda self: self.attr_1)
a.attr_2 = property(lambda self: self.attr_2)
#Assign them values and print them
a.attr_1 = 4
a.attr_2 = 6
print(a.attr_1, a.attr_2)
The output will be 4 6
I am gonna answer my own question just for reference. This is based on others' answers here. The idea is to use default __setattr__ and __getattribute__ on attributes not added through add_attr.
class A:
def __init__(self):
self.attrs = {}
def add_attr(self, name):
self.attrs[name] = 'something'
def __getattribute__(self, name):
try:
object.__getattribute__(self, 'attrs')[name] # valid only if added by user
# custom logic and return
except (KeyError, AttributeError):
return object.__getattribute__(self, name)
def __setattr__(self, name, val):
# similar to __getattribute__

Overwrite several methods of a class with a loop

I would like to know if there is an easy way to do some identical edits on several methods of a class. An example :
class Dog():
def __init__(self):
self.name = 'abc'
self.age = 1
def setName(self, newValue):
self.name = newValue
def setAge(self, newValue):
self.age = newValue
class TalkingDog(Dog):
def __init__(self):
super().__init__()
# The end is in pseudo code :
for method in TalkingDog.allMethods :
method = method + 'print('I have been edited !')'
I know that I can also overwrite each method but in a situation with tens of methods, that will be a little boring...
So I tried this :
class TalkingDog(Dog):
def __init__(self):
super().__init__()
for method in self.__dir__():
if method.startswith('set'):
oldMethod = getattr(self, method)
def _newMethod(newValue):
oldMethod(newValue)
print('I have been edited !')
setattr(self, method, _newMethod)
a = TalkingDog()
print(a.setName) >>> <function TalkingDog.__init__.<locals>._newMethod at 0x0000000002C350D0>
That almost works but setName is not anymore a method. It's an attribute which contains a function. I completely understand why but I'm trying to get a cleaner result. With that result, I risk of having problems later. For example I can't use the library pickle with that object (got the error _pickle.PicklingError: Can't pickle <function TalkingDog.__init__.<locals>._newMethod at 0x00000000003DCBF8>: attribute lookup _newMethod on __main__ failed).
The Pythonic way to do this is probably to use the descriptor protocol, which is also what properties use:
class VocalAttribute:
def __init__(self, name, feedback):
"""Called when you first create the descriptor."""
self.name = name # the name of the attribute 'behind' the property
self.feedback = feedback # the feedback to show when the value changes
def __get__(self, obj):
"""Called when you get the descriptor value."""
return getattr(obj, self.name)
def __set__(self, obj, value):
"""Called when you set the descriptor value."""
prev = getattr(obj, self.name, None)
if value != prev:
setattr(obj, self.name, value)
print(self.feedback)
def __delete__(self, obj):
"""Called when you delete the descriptor value."""
delattr(obj, self.name)
class Foo:
bar = VocalAttribute('_bar', 'I have been edited!')
foo = Foo()
print('1.')
foo.bar = 'hello'
print('2.')
foo.bar = 'hello'
print('3.')
foo.bar = 'world'
Output:
1.
I have been edited!
2.
3.
I have been edited!
Note that this only gives feedback when the new value is different to the old one - you can tweak the behaviour as needed in __set__. It also means you can directly read from and assign to foo.bar, rather than needing to call getters and setters (what is this, Java?)
since decorator could explicit called here a way to use it:
def updater(obj, call_back, call_back_args=(), call_back_kw=None, replace=False):
# ability to be called on the fly with different args and kw for the callback
# now it returns the updated obj (instance or class)
# but could a be factory returning a new obj in this case make a copy of obj, update this coy and return it
def update_function(fn, *args, **kw):
def wrapper(*args, **kw):
if replace:
# call only the callback
res = call_back(*call_back_args, **call_back_kw)
else:
res = fn(*args, **kw)
call_back(*call_back_args, **call_back_kw)
return res
return wrapper
# get all methods of the obj
# and apply update_function (a decorator) to all methods
for name, m in inspect.getmembers(
obj, predicate=lambda x: inspect.isfunction(x) or inspect.ismethod(x)):
# make the selection here
# could be made on the name for instance
if not name.startswith('_'):
new_m = update_function(m)
setattr(obj, name, new_m)
return obj
# declare a callback
def call_back(*args, **kw):
# simple callback
print("I have been edited and called with %r args and %r kw " % (args, kw))
a = Dog()
# could be called on instance or class
# apply the callback on all "public" methods
updater(
a,
call_back,
call_back_args=(2, 3, 4),
call_back_kw={"kw1": "v_1"}
)

python nested attributes encapsulation

I have some question about encapsulation nested attributes in python. Let's assume few classes:
Here we have a main class (DataWrapper) that includes two more classes: InnerWrapper1 and InnerWrapper2. Both inner wrappers includes two attributes.
class DataWrapper(object):
#property
def inner_wrapper1(self):
return self.__inner_wrapper1
#inner_wrapper1.setter
def inner_wrapper1(self, value):
self.__inner_wrapper1 = value
#property
def inner_wrapper2(self):
return self.__inner_wrapper2
#inner_wrapper2.setter
def inner_wrapper2(self, value):
self.__inner_wrapper2 = value
class InnerWrapper1(object):
#property
def property1(self):
return self.__property1
#property1.setter
def property1(self, value):
self.__property1 = value
#property
def property2(self):
return self.__property2
#property2.setter
def property2(self, value):
self.__property2 = value
class InnerWrapper2(object):
#property
def property3(self):
return self.__property3
#property3.setter
def property3(self, value):
self.__property3 = value
#property
def property4(self):
return self.__property4
#property4.setter
def property4(self, value):
self.__property4 = value
Is it possible to override somehow getattr and setattr methods to make possible below encapsulation? What I want to achieve is to have an access to those nested attributes from the top class- DataWrapper.
data_wrapper = DataWrapper()
data_wrapper.property1 = "abc"
...
var = data_wrapper.property2
...
The first thing that came to my mind was to execute hasattr in getattr, but that gave a maximum recursion depth...
Here's a complete code:
class DataWrapper(object):
def __init__(self):
self.inner_wrapper1 = InnerWrapper1()
self.inner_wrapper2 = InnerWrapper2()
#property
def inner_wrapper1(self):
return self.__inner_wrapper1
#inner_wrapper1.setter
def inner_wrapper1(self, value):
self.__inner_wrapper1 = value
#property
def inner_wrapper2(self):
return self.__inner_wrapper2
#inner_wrapper2.setter
def inner_wrapper2(self, value):
self.__inner_wrapper2 = value
def __setattr__(self, attribute, value):
#if attribute in {'innerwrapper1', 'innerwrapper2'}:
if attribute in ['inner_wrapper1', 'inner_wrapper2']:
return super(DataWrapper, self).__setattr__(attribute, value)
if hasattr(self.inner_wrapper1, attribute):
return setattr(self.inner_wrapper1, attribute, value)
elif hasattr(self.inner_wrapper2, attribute):
return setattr(self.inner_wrapper2, attribute, value)
def __getattr__(self, attribute):
try:
return getattr(self.inner_wrapper1, attribute)
except AttributeError: pass
try:
return getattr(self.inner_wrapper2, attribute)
except AttributeError: pass
class InnerWrapper1(object):
#property
def property1(self):
return self.__property1
#property1.setter
def property1(self, value):
self.__property1 = value
#property
def property2(self):
return self.__property2
#property2.setter
def property2(self, value):
self.__property2 = value
class InnerWrapper2(object):
#property
def property3(self):
return self.__property3
#property3.setter
def property3(self, value):
self.__property3 = value
#property
def property4(self):
return self.__property4
#property4.setter
def property4(self, value):
self.__property4 = value
def main():
data_wrapper = DataWrapper()
data_wrapper.property1 = "abc"
if __name__ == "__main__":
main()
You get an infinite recursion error because you forgot to take into account setting the inner_wrapper1 and inner_wrapper2 attributes in your __init__ method.
When you do this:
self.inner_wrapper1 = InnerWrapper()
Python will also use your __setattr__ method. This then tries to use self.inner_wrapper1 which doesn't yet exist so __getattr__ is called, which tries to use self.inner_wrapper1 which doesn't yet exist, and you enter into an infinite recursion loop.
In __setattr__ delegate attribute setting to the superclass:
def __setattr__(self, attribute, value):
if attribute in {'innerwrapper1', 'innerwrapper2'}:
return super(DataWrapper, self).__setattr__(attribute, value)
if hasattr(self.inner_wrapper1, attribute):
return setattr(self.inner_wrapper1, attribute, value)
elif hasattr(self.inner_wrapper2, attribute):
return setattr(self.inner_wrapper2, attribute, value)
If you used a single leading underscore for 'private' attributes (so _innerwrapper1 and _innerwrapper2) you could just test for that:
def __setattr__(self, attribute, value):
if attribute[0] == '_': # private attribute
return super(DataWrapper, self).__setattr__(attribute, value)
so you don't have to hardcode a whole set of names.
Since your updated full script uses __inner_wrapper1 and __inner_wrapper2 as the actual attribute names, and you are using properties, you'll have to adjust your __setattr__ test to look for those names. Because you are using double-underscore names you need to adjust for the name mangling of such attributes:
def __setattr__(self, attribute, value):
if attribute in {
'inner_wrapper1', 'inner_wrapper2',
'_DataWrapper__inner_wrapper1', '_DataWrapper__inner_wrapper2'}:
return super(DataWrapper, self).__setattr__(attribute, value)
Unless you are going to subclass DataWrapper and must protect your attributes from accidental overriding, I'd avoid using double-underscored names altogether, however. In Pythonic code, you don't worry about other code accessing attributes, there is no concept of truly private attributes.
Using properties is also overkill here; properties don't buy you encapsulation, in Python you'd only use those to simplify the API (replacing a method call with attribute access).
Note that the hasattr() tests for the InnerWrapper* property* attributes will fail because you don't have default values:
>>> inner = InnerWrapper1()
>>> hasattr(inner, 'property1')
False
hasattr() doesn't test for properties, it simply tries to access an attribute and if any exception is raised it returns False:
>>> inner = InnerWrapper1()
>>> hasattr(inner, 'property1')
False
>>> inner.property1
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "<string>", line 43, in property1
AttributeError: 'InnerWrapper1' object has no attribute '_InnerWrapper1__property1'
>>> inner.property1 = 'foo'
>>> inner.property1
'foo'
>>> hasattr(inner, 'property1')
True
By removing all the #property objects you can simplify this greatly:
class DataWrapper(object):
def __init__(self):
self._inner_wrapper1 = InnerWrapper1()
self._inner_wrapper2 = InnerWrapper2()
def __setattr__(self, attribute, value):
if attribute[0] == '_':
return super(DataWrapper, self).__setattr__(attribute, value)
if hasattr(self._inner_wrapper1, attribute):
return setattr(self._inner_wrapper1, attribute, value)
elif hasattr(self._inner_wrapper2, attribute):
return setattr(self._inner_wrapper2, attribute, value)
def __getattr__(self, attribute):
try:
return getattr(self._inner_wrapper1, attribute)
except AttributeError: pass
return getattr(self._inner_wrapper2, attribute)
class InnerWrapper1(object):
property1 = None
property2 = None
class InnerWrapper2(object):
property3 = None
property4 = None

Writing a setting method for

In a class I am writing, one of the member properties is a list:
#property
def address_list(self):
return self._address_list
#address_list.setter
def address_list(self, addr_list):
if type(addr_list) is not list:
return
self._address_list = addr_list
I want to be able to write a property so if someone wanted to append something onto the list, it would call something like another setter function, but for adding onto the list:
Object.address_list.append(value)
would call something like
#property.appender # I made this line up
def address_list.append(self, value):
if value >= 0 and value <= 127:
self._address_list.append(value)
so I could safely append values to my private list. Is something like this possible without having to create a new type of list object?
EDIT: Address list returns a standard python list
you would need to create a new AddressList class to handle this. something like
class Wrapper(object):
"""Wrapper class that provides proxy access to an instance of some
internal instance."""
__wraps__ = None
__ignore__ = "class mro new init setattr getattr getattribute"
def __init__(self, obj):
if self.__wraps__ is None:
raise TypeError("base class Wrapper may not be instantiated")
elif isinstance(obj, self.__wraps__):
self._obj = obj
else:
raise ValueError("wrapped object must be of %s" % self.__wraps__)
# provide proxy access to regular attributes of wrapped object
def __getattr__(self, name):
return getattr(self._obj, name)
# create proxies for wrapped object's double-underscore attributes
class __metaclass__(type):
def __init__(cls, name, bases, dct):
def make_proxy(name):
def proxy(self, *args):
return getattr(self._obj, name)
return proxy
type.__init__(cls, name, bases, dct)
if cls.__wraps__:
ignore = set("__%s__" % n for n in cls.__ignore__.split())
for name in dir(cls.__wraps__):
if name.startswith("__"):
if name not in ignore and name not in dct:
setattr(cls, name, property(make_proxy(name)))
class AddressList(Wrapper):
__wraps__=list
def append(self,x):
if 0 <= x <= 127:
self._obj.append(x)
else:
raise ValueError("Cannot append %r"%x)
class MyContainer:
def __init__(self):
self.address_list = AddressList([])
x = MyContainer()
x.address_list.append(1)
x.address_list.append(7)
x.address_list.append(-1)
print x.address_list
*note that this answer borrows heavily from https://stackoverflow.com/a/9059858/541038 *

python koans: class proxy

I'm solving the python koans.
I haven't got any real problem until the 34th.
this is the problem:
Project: Create a Proxy Class
In this assignment, create a proxy class (one is started for you
below). You should be able to initialize the proxy object with any
object. Any attributes called on the proxy object should be forwarded
to the target object. As each attribute call is sent, the proxy
should record the name of the attribute sent.
The proxy class is started for you. You will need to add a method
missing handler and any other supporting methods. The specification
of the Proxy class is given in the AboutProxyObjectProject koan.
Note: This is a bit trickier that it's Ruby Koans counterpart, but you
can do it!
and this is my solution until now:
class Proxy(object):
def __init__(self, target_object):
self._count = {}
#initialize '_obj' attribute last. Trust me on this!
self._obj = target_object
def __setattr__(self, name, value):pass
def __getattr__(self, attr):
if attr in self._count:
self._count[attr]+=1
else:
self._count[attr]=1
return getattr(self._obj, attr)
def messages(self):
return self._count.keys()
def was_called(self, attr):
if attr in self._count:
return True
else: False
def number_of_times_called(self, attr):
if attr in self._count:
return self._count[attr]
else: return False
It works until this test:
def test_proxy_records_messages_sent_to_tv(self):
tv = Proxy(Television())
tv.power()
tv.channel = 10
self.assertEqual(['power', 'channel='], tv.messages())
where tv.messages() is ['power'] because tv.channel=10 is taken by the proxy object and not the television object.
I've tried to manipulate the __setattr__ method, but I always end in a unlimited loop.
edit 1:
I'm trying this:
def __setattr__(self, name, value):
if hasattr(self, name):
object.__setattr__(self,name,value)
else:
object.__setattr__(self._obj, name, value)
But then I get this error in a loop on the last entry:
RuntimeError: maximum recursion depth exceeded while calling a Python object
File "/home/kurojishi/programmi/python_koans/python 2/koans/about_proxy_object_project.py", line 60, in test_proxy_method_returns_wrapped_object
tv = Proxy(Television())
File "/home/kurojishi/programmi/python_koans/python 2/koans/about_proxy_object_project.py", line 25, in __init__
self._count = {}
File "/home/kurojishi/programmi/python_koans/python 2/koans/about_proxy_object_project.py", line 33, in __setattr__
object.__setattr__(self._obj, name, value)
File "/home/kurojishi/programmi/python_koans/python 2/koans/about_proxy_object_project.py", line 36, in __getattr__
if attr in self._count:
The loop is in __getattr__.
You are using hasattr in __setattr__ to decide whether you should write to the local or proxied object. This works well for all but one case.
In your __init__ you have the following line:
self._count = {}
This calls __setattr__ with '_count' which does not exist at that point and therefore (hence hasattr returns False) is forwarded to the proxied object.
If you want to use your approach you have to write your __init__ like this:
def __init__(self, target_object):
object.__setattr__(self, '_count', {})
#initialize '_obj' attribute last. Trust me on this!
object.__setattr__(self, '_obj', target_object)
As I understand maybe your problem is related with the recursive call when you set and attribute value. From docs:
If __setattr__() wants to assign to an instance attribute, it should not simply execute "self.name = value" -- this would cause a recursive call to itself. Instead, it should insert the value in the dictionary of instance attributes, e.g., "self.__dict__[name] = value". For new-style classes, rather than accessing the instance dictionary, it should call the base class method with the same name, for example, "object.__setattr__(self, name, value)".
setattr is called on all assignments. It's more like getattribute than getattr. This also affects code in the __init__ method.
This means that the first branch of this code will almost always fail, only attributes inherited from object will pass the test:
def __setattr__(self, name, value):
if hasattr(self, name):
object.__setattr__(self,name,value)
else:
object.__setattr__(self._obj, name, value)
Instead we
can assume that assignments are meant for the Proxy unless it has an _obj attribute. Hence the comment in __init__. We set up our proxy's attributes, then add the target object and all future assignments get sent to it.
def __setattr__(self, name, value):
if hasattr(self, '_obj'):
object.__setattr__(self._obj, name, value)
else:
object.__setattr__(self, name, value)
But by using hasattr we would also need to alter __getattr__ to check for _obj to prevent recursion:
def __getattr__(self, name):
if '_obj' == name:
raise AttributeError
if attr in self._count:
self._count[attr]+=1
else:
self._count[attr]=1
return getattr(self._obj, attr)
An alternative would be to inspect the proxy's __dict__ attribute directly in the __setattr__ method:
def __setattr__(self, name, value):
if '_obj' in self.__dict__:
...
from the test, it is a requirement for proxy to log all the attribute calls via proxy. And the proxy has only few built-in methods which are exceptionally used for logging, so my answer was:
class Proxy(object):
def __init__(self, target_object):
self.logs=[]
self._obj = target_object
def __getattribute__(self, attrname):
if attrname in ['_obj','logs','messages','was_called','number_of_times_called'] :
return object.__getattribute__(self, attrname)
else:
self.logs.append(attrname)
return object.__getattribute__((object.__getattribute__(self, '_obj')), attrname)
def __setattr__(self, name, value):
if hasattr(self, '_obj'):
self.logs.append(name)
object.__setattr__(object.__getattribute__(self,'_obj'), name, value)
else :
object.__setattr__(self, name, value)
After this it is quite easy to implement other methods ('messages', 'was_called', ... )
Sorry for necro'ing old question.
and I found out that getattribute can be changed : just check whether the attribute is in the target object.
def __getattribute__(self, attrname):
if attrname not in dir(object.__getattribute__(self, '_obj')):
return object.__getattribute__(self, attrname)
else:
self.logs.append(attrname)
return object.__getattribute__((object.__getattribute__(self, '_obj')), attrname)
class Proxy(object):
"""Proxy class wraps any other class, and adds functionality to remember and report all messages called.
Limitations include that proxy blocks all direct subclass calls to:
messages, number_of_times_called, was_called, _obj, and _message_counts.
These calls must be made directly like my_proxy_instance._obj.messages.
"""
def __init__(self, target_object):
print 'initializing a proxy for ' + target_object.__class__.__name__
# WRITE CODE HERE
self._message_counts = Counter();
#initialize '_obj' attribute last. Trust me on this!
self._obj = target_object
# WRITE CODE HERE
def __getattr__(self, attr_name):
print 'getting an attribute: "' + attr_name + '" from "' + self._obj.__class__.__name__ + '"'
self._message_counts[attr_name] += 1
print self._message_counts
return object.__getattribute__(self._obj, attr_name)
#def __getattribute__(self, attr_name):
# print "intercepted!~ " + attr_name
# object.__getattribute__(self, attr_name)
def __setattr__(self, attr_name, value):
if((attr_name == '_obj') | (attr_name == '_message_counts')): # special proxy attributes.
print 'setting the PROXY attribute: "' + attr_name + '"'
object.__setattr__(self, attr_name, value)
else:
print 'setting the REAL attribute: "' + attr_name + '"'
self._message_counts[attr_name+"="] += 1
object.__setattr__(self._obj, attr_name, value)
def messages(self):
return self._message_counts.keys()
def number_of_times_called(self, attr_name):
return self._message_counts[attr_name]
def was_called(self, attr_name):
return attr_name in self._message_counts
What I did was take all the calls to attributes in the proxy and call them via object.__getattribute__ to avoid recursion.
That did not work for methods so I wrapped the method calls in a try..except AttributeError to try them first in the proxy. and then if they raise an error try them in the child object.
If anyone has a more elegant solution would love to see it.
from runner.koan import *
from collections import Counter
class Proxy(object):
def __init__(self, target_object):
self._messages=[]
self._obj = target_object
def messages(self):
return self._messages
def was_called(self, message):
return message in self._messages
def number_of_times_called(self, message):
_count = Counter(self._messages).get(message)
if _count:
return _count
else: # catch None
return 0
def __getattribute__(self, attr_name):
try: # call on self
retval = object.__getattribute__(self, attr_name)
except AttributeError: # call on child object
retval = self._obj.__getattribute__(attr_name)
object.__getattribute__(self, '_messages').append(attr_name)
return retval
def __setattr__(self, attr_name, attr_value):
if hasattr(self, '_obj'): # call child object and log message
self._obj.__setattr__(attr_name, attr_value)
attr_name += "="
object.__getattribute__(self, '_messages').append(attr_name)
else: # use this before_obj is set in __init__
object.__setattr__(self, attr_name, attr_value)
def messages(self):
return self._messages
why not use method_missing?
my answer:
class Proxy
def initialize(target_object)
#object = target_object
# ADD MORE CODE HERE
#messages = []
end
# WRITE CODE HERE
def method_missing(method_name, *args, &block)
#messages.push method_name unless method_name == :messages
#object.send method_name, *args, &block
end
def messages
#messages
end
def called? target
#messages.include? target
end
def number_of_times_called target
result = 0
#messages.each do |t|
result += 1 if t == target
end
result
end
end

Categories

Resources