Overwrite several methods of a class with a loop - python

I would like to know if there is an easy way to do some identical edits on several methods of a class. An example :
class Dog():
def __init__(self):
self.name = 'abc'
self.age = 1
def setName(self, newValue):
self.name = newValue
def setAge(self, newValue):
self.age = newValue
class TalkingDog(Dog):
def __init__(self):
super().__init__()
# The end is in pseudo code :
for method in TalkingDog.allMethods :
method = method + 'print('I have been edited !')'
I know that I can also overwrite each method but in a situation with tens of methods, that will be a little boring...
So I tried this :
class TalkingDog(Dog):
def __init__(self):
super().__init__()
for method in self.__dir__():
if method.startswith('set'):
oldMethod = getattr(self, method)
def _newMethod(newValue):
oldMethod(newValue)
print('I have been edited !')
setattr(self, method, _newMethod)
a = TalkingDog()
print(a.setName) >>> <function TalkingDog.__init__.<locals>._newMethod at 0x0000000002C350D0>
That almost works but setName is not anymore a method. It's an attribute which contains a function. I completely understand why but I'm trying to get a cleaner result. With that result, I risk of having problems later. For example I can't use the library pickle with that object (got the error _pickle.PicklingError: Can't pickle <function TalkingDog.__init__.<locals>._newMethod at 0x00000000003DCBF8>: attribute lookup _newMethod on __main__ failed).

The Pythonic way to do this is probably to use the descriptor protocol, which is also what properties use:
class VocalAttribute:
def __init__(self, name, feedback):
"""Called when you first create the descriptor."""
self.name = name # the name of the attribute 'behind' the property
self.feedback = feedback # the feedback to show when the value changes
def __get__(self, obj):
"""Called when you get the descriptor value."""
return getattr(obj, self.name)
def __set__(self, obj, value):
"""Called when you set the descriptor value."""
prev = getattr(obj, self.name, None)
if value != prev:
setattr(obj, self.name, value)
print(self.feedback)
def __delete__(self, obj):
"""Called when you delete the descriptor value."""
delattr(obj, self.name)
class Foo:
bar = VocalAttribute('_bar', 'I have been edited!')
foo = Foo()
print('1.')
foo.bar = 'hello'
print('2.')
foo.bar = 'hello'
print('3.')
foo.bar = 'world'
Output:
1.
I have been edited!
2.
3.
I have been edited!
Note that this only gives feedback when the new value is different to the old one - you can tweak the behaviour as needed in __set__. It also means you can directly read from and assign to foo.bar, rather than needing to call getters and setters (what is this, Java?)

since decorator could explicit called here a way to use it:
def updater(obj, call_back, call_back_args=(), call_back_kw=None, replace=False):
# ability to be called on the fly with different args and kw for the callback
# now it returns the updated obj (instance or class)
# but could a be factory returning a new obj in this case make a copy of obj, update this coy and return it
def update_function(fn, *args, **kw):
def wrapper(*args, **kw):
if replace:
# call only the callback
res = call_back(*call_back_args, **call_back_kw)
else:
res = fn(*args, **kw)
call_back(*call_back_args, **call_back_kw)
return res
return wrapper
# get all methods of the obj
# and apply update_function (a decorator) to all methods
for name, m in inspect.getmembers(
obj, predicate=lambda x: inspect.isfunction(x) or inspect.ismethod(x)):
# make the selection here
# could be made on the name for instance
if not name.startswith('_'):
new_m = update_function(m)
setattr(obj, name, new_m)
return obj
# declare a callback
def call_back(*args, **kw):
# simple callback
print("I have been edited and called with %r args and %r kw " % (args, kw))
a = Dog()
# could be called on instance or class
# apply the callback on all "public" methods
updater(
a,
call_back,
call_back_args=(2, 3, 4),
call_back_kw={"kw1": "v_1"}
)

Related

How does Python turn a function into a method?

I know that functions are just descriptors, like this:
def func(self):
print(self.name)
class C:
def __init__(self, name):
self.name = name
C.func = func
c = C("foo")
c.func()
I thought at first that c.func equals C.func.__get__(c),yes,C.func.__get__(c) return a bound method. But when I set the __get__ of func to None, c.func still returns a bound method.
def func(self):
print(self.name)
class C:
def __init__(self, name):
self.name = name
func.__get__ = None
C.func = func
c = C("foo")
c.func
output:
<bound method func of <__main__.C object at 0x0000027EB23BF088>>
So I'm confused. Moreover, I found that when calling a function from an instance, Python actually calls the class's ___getAttribute__ method, which returns a bound method.
def func(self):
print(self.name)
func.__get__ = None
class C:
def __getattribute__(self, name):
r = super().__getattribute__(name)
print(r) # r is a bound method already
return r
def __init__(self, name):
self.name = name
C.func = func
c = C("foo")
c.func
output:
<bound method func of <__main__.C object at 0x0000027EB243D1C8>>
func.__get__ doesn't seem to have any effect. So, What happended in __getattribute__? How does Python turn a function into a method? I've Googled and done some research, but I still can't find the answer.
Maybe I'm making things complicated, In my understanding, function is itself a descriptor, but just like the code below, I set the func to None, it works normally:
class C:
def func(self):
print('hello world')
func.__get__ = None
c = C()
c.func()
but if it's a descriptor, it will raise TypeError:
class C:
class D:
def __get__(self, inst, cls):
if inst is None:
return self
return 'hello world'
D.__get__ = None
func = D()
c = C()
c.func
Well, if I understand correctly from what I found. (Since I didn't know the descriptors, that's exactly why I like to help, still learning)
First, let's look at __getattr__ and __getattribute__.
Let's have an empty class A
class A:
pass
If I initialize an object and try to call a property, because there is none at the moment, we get AttributeError.
a = A()
a.some_property
The following occurs:
Simple check of flow:
class FlowDemo:
def __init__(self):
self.inited_property = True
def __getattribute__(self, item):
if item in ('__class__', '__len__') : # For less spam of getting this attribute, if you want, you can remove condition.
print('Get Attribute', item)
# Call default behavior
return super().__getattribute__(item)
def __getattr__(self, item):
print('Get Attr', item)
if item == 'some_magic_name':
return "It's magic!"
raise AttributeError
fd = FlowDemo()
fd.inited_property
# Get Attribute inited_property
# True
fd.some_magic_property
# Get Attribute some_magic_name
# Get Attr some_magic_name
# "It's magic!"
fd.some_property
# Get Attribute some_property
# Get Attr some_property
# Traceback (most recent call last):
# File "<input>", line 1, in <module>
# File "stack-class-property-and-descriptors.py", line 67, in # __getattr__
# raise AttributeError
# AttributeError
This is probably understandable, including the use. But to be sure, I'll give an example. This logic is used as a dynamic representation of the result from the databases (mapping of attributes to ordinary dict, list, etc.).
But it can also be just logic for accessing an attribute (property), such as an access counter or validation (but this applies to __setattr__ and __setattribute__)
And what about descriptors?
First let's look at data-descriptors, they are easier for me to understand.
This is a class or decoder that has __get__ and one or both of __set__ and __delete__.
Once this is defined, python, when used in the property definition with it and then does not return a class but the value it obtains through __get__, does not overwrite an already declared class when declaring a value, but uses its __set__.
Example:
class WeekDayDescriptor:
def __init__(self):
self.__week_day = 0
def __get__(self, instance, owner=None):
return self.__week_day
def __set__(self, instance, value):
if not isinstance(value, int):
raise TypeError('Value must be int')
if not (0 <= value < 6):
raise ValueError('Value must be in range 0 - 6')
self.__week_day = value
class Calendar:
week_day = WeekDayDescriptor()
def __init__(self, week_day):
self.week_day = week_day
Demo:
c = Calendar(9)
# ValueError: Value must be in range 0-6
c = Calendar('6')
# TypeError: Value must be int
c = Calendar(3)
c.week_day = 6
c.week_day = 10
# ValueError: Value must be in range 0-6
c.week_day = 'monday'
# TypeError: Value must be int
Decorator descriptor:
class Calendar:
#property
def week_day(self):
return self.__week_day
#week_day.setter
def week_day(self, week_day):
if not isinstance(week_day, int):
raise TypeError('Value must be int')
if not (0 <= week_day < 6):
raise ValueError('Value must be in range 0 - 6')
self.__week_day = week_day
def __init__(self, week_day):
self.week_day = week_day
pass
And now for non-data descriptors...
A non-data descriptor is one that has only __get__.
As I understand it, each method automatically has its own descriptor, thanks to which the functions get references to the object - self.
We can write our own descriptor for a function / method, but it's not that straightforward, we have to help ourselves and get around it a bit.
def function_as_method(self, value):
print(self, value)
class HelperDescriptor:
def __get__(self, instance, owner):
def wrapper(*args, **kwargs):
return function_as_method(instance, *args, **kwargs)
return wrapper
class Foo:
baz = HelperDescriptor()
>>> bar = Foo()
>>> bar.baz(1)
<__main__.Foo object at 0x7f64f7768b70> 1
Source of last code block, but in czech lang.
And finally, your mentioned problem, when we set __get__ to None and you still get a reference to the function.
It's simple, python doesn't directly distinguish between primitive data types and functions, it's all a variable (or attribute / property) that has a value. Whether it's just value or it's callable is a different matter.
def f(): return True
print(type(f), f())
# <class 'function'> True
f = 123
print(type(f), f)
# <class 'int'> 123
Therefore, when we ask for the obj.func method or call it obj.func() directly, the first two changed magic is called first - __getattribute__ and __getattr__.
And if we call a method, it is called only after we get a reference to a function in memory.
Again a simple example:
def func(self, value):
print('Printing:', value)
class PrintDescriptor:
def __init__(self, name):
self.name = name
def __get__(self, instance, owner):
def wrapper(*args, **kwargs):
print(f"Calling '{self.name}' method")
return func(instance, *args, **kwargs)
return wrapper
class B:
foo = PrintDescriptor('foo')
bar = PrintDescriptor('bar')
def __getattribute__(self, item):
if item not in ('__len__', '__class__', '__dict__'):
print('Get Attribute', item)
return super().__getattribute__(item)
Demo:
b = B()
b.foo
# Get Attribute foo
# <function PrintDescriptor.__get__.<locals>.wrapper at 0x7f774a782ee0>
b.foo(2)
# Get Attribute foo
# Calling 'foo' method
# Printing: 2
b.bar(4)
# Get Attribute bar
# Calling 'bar' method
# Printing: 4
Sources:
https://www.datacamp.com/community/tutorials/python-descriptors#above1
https://blog.milde.cz/post/319-pokrocile-techniky-v-pythonu-deskriptory/
Python Doc, __get__
Python Docs, __getattribute__
Python Docs, __getattr__

How to make a "switch" between functions, depending on attribute access type (using class or instance)?

Disclaimer:
This article is more a recipe than a question, but I found the subject quite interesting, with almost no references in the Web.
If there is any better place on StackOverflow to publish this kind of articles, please let me know.
Subject:
How can I force Python to invoke different function depending on the type of attribute access (using class or instance) - e.g. force Python to invoke different method for MyClass.my_method() and MyClass().my_method()?
Usecase:
Let's say, we have custom Enum implementation (based on Python36 Enum, but with some customization). As a user of this Enum, we want to create a CustomEnum, inherit not just from Enum, but also from str: class MyEnum(str, Enum).We also want to add encoding and decoding feature. Our idea is to use MyEnum.encode to encode any object, that includes our enum members, but leave the original str.encode in power for instances of our enum class.
In short: MyEnum.encode invoke our custom encoding function, and have perfectly sens, from this point of view. MyEnum() is a string, so MyEnum().encode should invoke encode function inherited from str class.
Solution:
Write a descriptor, which will work as a switch.
Full answer in my first post.
Solution:
As far as I know, descriptors are the only objects, that can distinguish, if they are invoke for class or instance, because of the __get__ function signature: __get__(self, instance, instance_type). This property allows us to build a switch on top of it.
class boundmethod(object):
def __init__(self, cls_method=None, instance_method=None, doc=None):
self._cls_method = cls_method
self._instance_method = instance_method
if cls_method:
self._method_name = cls_method.__name__
elif instance_method:
self._method_name = instance_method.__name__
if doc is None and cls_method is not None:
doc = cls_method.__doc__
self.__doc__ = doc
self._method = None
self._object = None
def _find_method(self, instance, instance_type, method_name):
for base in instance_type.mro()[1:]:
method = getattr(base, method_name, None)
if _is_descriptor(method):
method = method.__get__(instance, base)
if method and method is not self:
try:
return method.__func__
except AttributeError:
return method
def __get__(self, instance, instance_type):
if instance is None:
self._method = self._cls_method or self._find_method(instance, instance_type, self._method_name)
self._object = instance_type
else:
self._method = self._instance_method or self._find_method(instance, instance_type, self._method_name)
self._object = instance
return self
#staticmethod
def cls_method(obj=None):
def constructor(cls_method):
if obj is None:
return boundmethod(cls_method, None, cls_method.__doc__)
else:
return type(obj)(cls_method, obj._instance_method, obj.__doc__)
if isinstance(obj, FunctionType):
return boundmethod(obj, None, obj.__doc__)
else:
return constructor
#staticmethod
def instance_method(obj=None):
def constructor(instance_method):
if obj is None:
return boundmethod(None, instance_method, instance_method.__doc__)
else:
return type(obj)(obj._cls_method, instance_method, obj.__doc__)
if isinstance(obj, FunctionType):
return boundmethod(None, obj, obj.__doc__)
else:
return constructor
def __call__(self, *args, **kwargs):
if self._method:
try:
return self._method(self._object, *args, **kwargs)
except TypeError:
return self._method(*args, **kwargs)
return None
Example:
>>> class Walkmen(object):
... #boundmethod.cls_method
... def start(self):
... return 'Walkmen start class bound method'
... #boundmethod.instance_method(start)
... def start(self):
... return 'Walkmen start instance bound method'
>>> print Walkmen.start()
Walkmen start class bound method
>>> print Walkmen().start()
Walkmen start instance bound method
I hope it will help some o you guys.
Best.
I actually just asked this question (Python descriptors and inheritance I hadn't seen this question). My solution uses descriptors and a metaclass for inheritance.
from my answer:
class dynamicmethod:
'''
Descriptor to allow dynamic dispatch on calls to class.Method vs obj.Method
fragile when used with inheritence, to inherit and then overwrite or extend
a dynamicmethod class must have dynamicmethod_meta as its metaclass
'''
def __init__(self, f=None, m=None):
self.f = f
self.m = m
def __get__(self, obj, objtype=None):
if obj is not None and self.f is not None:
return types.MethodType(self.f, obj)
elif objtype is not None and self.m is not None:
return types.MethodType(self.m, objtype)
else:
raise AttributeError('No associated method')
def method(self, f):
return type(self)(f, self.m)
def classmethod(self, m):
return type(self)(self.f, m)
def make_dynamicmethod_meta(meta):
class _dynamicmethod_meta(meta):
def __prepare__(name, bases, **kwargs):
d = meta.__prepare__(name, bases, **kwargs)
for base in bases:
for k,v in base.__dict__.items():
if isinstance(v, dynamicmethod):
if k in d:
raise ValueError('Multiple base classes define the same dynamicmethod')
d[k] = v
return d
return _dynamicmethod_meta
dynamicmethod_meta=make_dynamicmethod_meta(type)
class A(metaclass=dynamicmethod_meta):
#dynamicmethod
def a(self):
print('Called from obj {} defined in A'.format(self))
#a.classmethod
def a(cls)
print('Called from class {} defined in A'.format(cls))
class B(A):
#a.method
def a(self):
print('Called from obj {} defined in B'.format(self))
A.a()
A().a()
B.a()
B().a()
results in:
Called from class <class 'A'> defined in A
Called from obj <A object at ...> defined in A
Called from class <class 'B'> defined in A
Called from obj <B object at ...> defined in B

in Python remember last method called/get

I'm trying to do something like this:
inst = AnyClass()
remember_last_method(inst)
inst.foo()
inst.bar()
print inst.last_method # print bar
inst.foo
print inst.last_method # print foo
inst.remember_last_method = False
inst.bar()
print inst.last_method # print foo
inst.remember.last_method = True
Any suggestion to write the remember_last_method function?
First edit:
seems that votes are negatives...
Here is the code I started to write, if it can clarify the question:
def remember_last_method_get(cls):
"""
Subclass cls in a way that remeber last method get by instances
>>> #remember_last_method_get
... class Foo(object):
... def bar(self):
... pass
... def baz(self):
... pass
>>> foo = Foo()
>>> foo.bar()
>>> foo.baz()
>>> print foo.last_method_get
baz
>>> m = foo.bar # get a method without calling it
>>> print foo.last_method_get
bar
"""
class clsRememberLastMethodGet(cls):
def __getattribute__(self,name):
attr = cls.__getattribute__(self,name)
if callable(attr):
self.last_method_get = name
return attr
return clsRememberLastMethodGet
if __name__ == '__main__':
import doctest
doctest.testmod()
Works on instances and not on classes as I want, and doesn't have the remember_last_method=True/False attribute
Second edit:
Here is a metaclass that do the job (only for method called, not
get, which is better):
class RememberLastMethod(type):
def __init__(self, name, bases, d):
type.__init__(self, name, bases, d)
for name,attr in d.iteritems():
if not callable(attr) or name.startswith('_'):
continue
def create_new_attr(name,attr):
def new_attr(self,*args,**kwargs):
if self.remember_last_method:
self.last_method = name
return attr(self,*args,**kwargs)
return new_attr
setattr(self,name,create_new_attr(name,attr))
orig__init__ = self.__init__
def new__init__(self,*args,**kwargs):
self.remember_last_method = True
self.last_method = None
orig__init__(self)
self.__init__ = new__init__
class AnyClass(object):
__metaclass__ = RememberLastMethod
def foo(self):
pass
def bar(self):
pass
# Call two method, inst.last_method is the last
inst = AnyClass()
inst.foo()
inst.bar()
assert inst.last_method == "bar"
# Call a new method, changes inst.last_method.
inst.foo()
assert inst.last_method == "foo"
# Stop changing inst.last_method.
inst.remember_last_method = False
inst.bar()
assert inst.last_method == "foo"
# Changing last_method again.
inst.remember_last_method = True
inst.bar()
assert inst.last_method == "bar"
# Work with reference to method as well
method = inst.foo
inst.remember_last_method = False
method()
assert inst.last_method == "bar"
inst.remember_last_method = True
method()
assert inst.last_method == "foo"
Thrid edit:
Here is a function that take an instance as argument and do the same work as the metaclass:
def remember_last_method(inst):
inst.remember_last_method = True
cls = inst.__class__
for name in dir(inst):
if name.startswith('_'):
continue
attr = getattr(inst,name)
if not callable(attr):
continue
def create_new_attr(name,attr):
def new_attr(self,*args,**kwargs):
if self.remember_last_method:
self.last_method = name
return attr(*args,**kwargs)
return new_attr
setattr(cls,name,create_new_attr(name,attr))
class AnyClass(object):
def foo(self):
pass
def bar(self):
pass
inst = AnyClass()
remember_last_method(inst)
# Call two method, inst.last_method is the last
inst.foo()
inst.bar()
assert inst.last_method == "bar"
# Call a new method, changes inst.last_method.
inst.foo()
assert inst.last_method == "foo"
# Stop changing inst.last_method.
inst.remember_last_method = False
inst.bar()
assert inst.last_method == "foo"
# Changing last_method again.
inst.remember_last_method = True
inst.bar()
assert inst.last_method == "bar"
# Work with reference to method as well
method = inst.foo
inst.remember_last_method = False
method()
assert inst.last_method == "bar"
inst.remember_last_method = True
method()
assert inst.last_method == "foo"
You can implement this yourself for each method:
class X(object):
def __init__(self):
self.last_method = None
self.should_store_last_method = True
def set_last_method(self, meth):
if self.should_store_last_method:
self.last_method = meth
def store_last_method(self, should_store):
self.should_store_last_method = should_store
def one(self):
self.set_last_method(self.one)
print("ONE")
def two(self):
self.set_last_method(self.two)
print("TWO")
to use it:
x = X()
x.one()
# ONE
print x.last_method
# <bound method X.one of <__main__.X object at 0x1035f8210>>
x.last_method()
# ONE
x.two()
# TWO
print x.last_method
# <bound method X.two of <__main__.X object at 0x1035f8210>>
x.last_method()
# TWO
x.store_last_method(False)
x.one()
# ONE
print x.last_method
# <bound method X.one of <__main__.X object at 0x1035f8210>>
gives:
ONE
<bound method X.one of <__main__.X object at 0x1035f8210>>
ONE
TWO
<bound method X.two of <__main__.X object at 0x1035f8210>>
TWO
ONE
<bound method X.one of <__main__.X object at 0x1035f8210>>
A metaclass is definitely the way to go. Your metaclass implementation is a good one, but it falls over in a couple of edge cases. That is there are several things that are callable, but will not be turned into instance methods that can exist on a class. For instance, you might have a staticmethod or a classmethod or even define a class within the parent class, or most simply an object of a class that has a __call__ method.
Your function/property implementation avoids these problems, but with the downside of recording these function calls. These functions do not access the object on which can they can be found, so do you really want to be recording them?
I've provided an implementation below. This metaclass only works with Python 3. To convert to Python 2 you must remove the name arg from the __init__ and __new__ methods on MethodLoggerMetaclass. You will also have to use the __metaclass__ name instead of providing it as an argument on the class declaration line.
from types import FunctionType, MethodType
from collections import Callable
import builtins
class MethodLoggerMetaclass(type):
"""Records the last method that was called on a class as the base function
(called an unbound method in python 2.x)
By default _last_method is used to record which method was last called.
Use `class MyClass(metaclass=MethodLoggerMetaclass, name="new_name")' to
change this behaviour.
Set record_superclass to True to also record the methods on superclasses.
Set record_hidden to True to also record attributes beginning with s
single underscore.
Set record_callable to True to also record callable objects that are *not*
instance methods. eg. static methods and class methods."""
def __init__(self, classname, parents, attributes, name="_last_method",
record_superclass=False, record_hidden=False,
record_callable=False):
type.__init__(self, classname, parents, attributes)
method_logger_names[self] = name
if record_superclass:
for attr, value, needs_self in get_superclass_functions(self,
record_hidden, record_callable):
type.__setattr__(self, attr, wrap(value, name, needs_self))
def __new__(metaclass, classname, parents, attributes,
name="_last_method", record_superclass=False,
record_hidden=False, record_callable=False):
types = FunctionType if not record_callable else Callable
for attr, value in attributes.items():
if record(attr, value, record_hidden, types):
attributes[attr] = wrap(value, name, isinstance(value,
FunctionType))
attributes[name] = MethodLoggerProperty()
return type.__new__(metaclass, classname, parents, attributes)
def __setattr__(self, attr, value):
"""Used to wrap functions that are added to the class after class
creation."""
if isinstance(value, FunctionType):
type.__setattr__(self, attr, wrap(value,
method_logger_names[self], True))
else:
type.__setattr__(self, attr, value)
class MethodLogger:
"""Used to record the last method called on a instance. Method stored as
base function. Has convenience properties for getting just the name or as a
method, or the unwrapped function.
Calling the provided function or method will also record the call if record
is set to True."""
_function = None
record = True
def __init__(self, instance):
self.instance = instance
#property
def function(self):
return wrap(self._function, method_logger_names[type(self.instance)])
#function.setter
def function(self, function):
if self.record:
self._function = function
#property
def unwrapped_function(self):
return self._function
#property
def method(self):
if self._function:
return MethodType(self.function, self.instance)
#property
def name(self):
if self._function:
return self._function.__name__
def __str__(self):
return "MethodLogger(instance={}, function={}, record={})".format(
self.instance, self._function, self.record)
__repr__ = __str__
class MethodLoggerProperty:
"""Provides initial MethodLogger for new instances of a class"""
def __get__(self, instance, cls=None):
method_logger = MethodLogger(instance)
setattr(instance, method_logger_names[cls], method_logger)
return method_logger
def wrap(function, method_logger_name, needs_self):
"""Wraps a function and in a logging function, and makes the wrapper
appear as the original function."""
if needs_self:
def wrapper(self, *args, **kwargs):
if getattr(self, method_logger_name).record:
getattr(self, method_logger_name).function = function
return function(self, *args, **kwargs)
else:
def wrapper(self, *args, **kwargs):
if getattr(self, method_logger_name).record:
getattr(self, method_logger_name).function = function
return function(*args, **kwargs)
wrapper.__name__ = getattr(function, "__name__", str(function))
wrapper.__doc__ = function.__doc__
return wrapper
# used to store name where the method logger is stored for each class
method_logger_names = {}
def record(attr, value, record_hidden, types=FunctionType):
"""Returns whether an attribute is a method and should be logged.
Never returns true for "dunder" attributes (names beginning with __)"""
return isinstance(value, types) and not attr.startswith("__") and \
(record_hidden or not attr.startswith("_"))
def get_superclass_functions(cls, include_hidden, include_callable):
"""Finds all functions derived from the superclasses of a class. Gives
the name under which the function was found and the function itself.
Returns tuples of (attribute name, function, if the function needs an
object instance). If `include_callable' is False then the the function
always needs an object instance."""
base_types = FunctionType if not include_callable else Callable
attrs = set(vars(cls).keys())
for superclass in cls.mro()[1:-1]: # exclude actual class and object
types = (base_types if not hasattr(builtins, superclass.__name__) else
Callable)
for attr, value in vars(superclass).items():
if attr not in attrs and record(attr, value, include_hidden, types):
attrs.add(attr)
yield attr, value, (isinstance(value, FunctionType) or
hasattr(builtins, superclass.__name__))
Example usage:
class MethodLoggerList(list, metaclass=MethodLoggerMetaclass,
name="_method_logger", record_superclass=True):
def f(self, kwarg="keyword argument"):
print(self, kwarg)
def g(self):
pass
# example use
t = MethodLoggerList()
print(t._method_logger)
t.f()
t.f("another value")
print(t._method_logger)
# note that methods on superclass are not recorded by default
t.append(0)
print(t._method_logger)
# won't record dunder/magic methods at all
t += [1]
print(t._method_logger)
# stop recording
t._method_logger.record = False
t.g()
print(t._method_logger)
# add methods to class after class creation, and still record them
def h(self):
pass
MethodLoggerList.h = h
t._method_logger.record = True
t.h()
print(t._method_logger)
# also records lambdas
MethodLoggerList.i = lambda self: None
t.i()
print(t._method_logger)
# does not record monkey-patched methods
def j():
pass
t.j = j
t.j()
print(t._method_logger)
# does record method or function access from _last_method
method = t._method_logger.method
t.g()
method()
print(t._method_logger)

How to clear instance data without setattr?

I wanted, to make traversable (by DB, single file or just as string) class in python. I Write this (shorted):
from json import JSONDecoder, JSONEncoder
def json_decode(object): return JSONDecoder().decode(object)
def json_encode(object): return JSONEncoder().encode(object)
class Storage:
__separator__ = 'ANY OF ANYS'
__keys__ = []
__vals__ = []
__slots__ = ('__keys__', '__vals__', '__separator__')
def __getattr__(self, key):
try:
return self.__vals__[self.__keys__.index(key)]
except IndexError:
raise AttributeError
def __setattr__(self, key, val):
self.__keys__.append(key)
self.__vals__.append(val)
def store(self):
return (json_encode(self.__keys__) + self.__separator__ +
json_encode(self.__vals__))
def restore(self, stored):
stored = stored.split(self.__separator__)
for (key, val) in zip(json_decode(stored[0]), json_decode(stored[1])):
setattr(self, key, val)
And yea - that work, but... When i'm making more instances, all of them are like singleton.
So - how to set attribute to instance without _setattr_?
PS. I got idea - make in set/getattr an pass for keys/vals, but it'll make mess.
your __separator__, __keys__, __vals__ and __slots__ are attributes of the object "Storage"(class object). I don't know if it's exactly the same, but I'd call it static variables of the class.
If you want to have different values for each instance of Storage, define each of these variables in your __init__ function:
class Storage(object):
__slots__ = ('__keys__', '__vals__', '__separator__')
def __init__(self):
super(Storage, self).__setattr__('__separator__', "ANY OF ANYS")
super(Storage, self).__setattr__('__keys__', [])
super(Storage, self).__setattr__('__vals__', [])
def __getattr__(self, key):
try:
vals = getattr(self, '__vals__')
keys = getattr(self, '__keys__')
return vals[keys.index(key)]
except IndexError:
raise AttributeError
def __setattr__(self, key, val):
vals = getattr(self, '__vals__')
keys = getattr(self, '__keys__')
vals.append(val)
keys.append(key)
edited so getattr and setattr works
I got that problem 2 days ago. Don't know if that's exactly your problem, but you said that about "its like I have a singleton"
You could make your Storage class a subclass of a special base class like this:
class Singleton(object):
def __new__(cls, *args, **kwargs):
if '_inst_' not in vars(cls):
cls._inst = type.__new__(cls, *args, *kwargs)
return cls._inst
class Storage(Singleton):
....
As long as you don't override __new__() in your subclass, all subsequent calls to create new instances after the first will return the one first created.

Finding a function's parameters in Python

I want to be able to ask a class's __init__ method what it's parameters are. The straightforward approach is the following:
cls.__init__.__func__.__code__.co_varnames[:code.co_argcount]
However, that won't work if the class has any decorators. It will give the parameter list for the function returned by the decorator. I want to get down to the original __init__ method and get those original parameters. In the case of a decorator, the decorator function is going to be found in the closure of the function returned by the decorator:
cls.__init__.__func__.__closure__[0]
However, it is more complicated if there are other things in the closure, which decorators may do from time to time:
def Something(test):
def decorator(func):
def newfunc(self):
stuff = test
return func(self)
return newfunc
return decorator
def test():
class Test(object):
#Something(4)
def something(self):
print Test
return Test
test().something.__func__.__closure__
(<cell at 0xb7ce7584: int object at 0x81b208c>, <cell at 0xb7ce7614: function object at 0xb7ce6994>)
And then I have to decide if I want to the parameters from decorator or the parameters from the original function. The function returned by the decorator could have *args and **kwargs for its parameters. What if there are multiple decorators and I have to decide which is the one I care about?
So what is the best way to find a function's parameters even when the function may be decorated? Also, what is the best way to go down a chain of decorators back to the decorated function?
Update:
Here is effectively how I am doing this right now (names have been changed to protect the identity of the accused):
import abc
import collections
IGNORED_PARAMS = ("self",)
DEFAULT_PARAM_MAPPING = {}
DEFAULT_DEFAULT_PARAMS = {}
class DICT_MAPPING_Placeholder(object):
def __get__(self, obj, type):
DICT_MAPPING = {}
for key in type.PARAMS:
DICT_MAPPING[key] = None
for cls in type.mro():
if "__init__" in cls.__dict__:
cls.DICT_MAPPING = DICT_MAPPING
break
return DICT_MAPPING
class PARAM_MAPPING_Placeholder(object):
def __get__(self, obj, type):
for cls in type.mro():
if "__init__" in cls.__dict__:
cls.PARAM_MAPPING = DEFAULT_PARAM_MAPPING
break
return DEFAULT_PARAM_MAPPING
class DEFAULT_PARAMS_Placeholder(object):
def __get__(self, obj, type):
for cls in type.mro():
if "__init__" in cls.__dict__:
cls.DEFAULT_PARAMS = DEFAULT_DEFAULT_PARAMS
break
return DEFAULT_DEFAULT_PARAMS
class PARAMS_Placeholder(object):
def __get__(self, obj, type):
func = type.__init__.__func__
# unwrap decorators here
code = func.__code__
keys = list(code.co_varnames[:code.co_argcount])
for name in IGNORED_PARAMS:
try: keys.remove(name)
except ValueError: pass
for cls in type.mro():
if "__init__" in cls.__dict__:
cls.PARAMS = tuple(keys)
break
return tuple(keys)
class BaseMeta(abc.ABCMeta):
def __init__(self, name, bases, dict):
super(BaseMeta, self).__init__(name, bases, dict)
if "__init__" not in dict:
return
if "PARAMS" not in dict:
self.PARAMS = PARAMS_Placeholder()
if "DEFAULT_PARAMS" not in dict:
self.DEFAULT_PARAMS = DEFAULT_PARAMS_Placeholder()
if "PARAM_MAPPING" not in dict:
self.PARAM_MAPPING = PARAM_MAPPING_Placeholder()
if "DICT_MAPPING" not in dict:
self.DICT_MAPPING = DICT_MAPPING_Placeholder()
class Base(collections.Mapping):
__metaclass__ = BaseMeta
"""
Dict-like class that uses its __init__ params for default keys.
Override PARAMS, DEFAULT_PARAMS, PARAM_MAPPING, and DICT_MAPPING
in the subclass definition to give non-default behavior.
"""
def __init__(self):
pass
def __nonzero__(self):
"""Handle bool casting instead of __len__."""
return True
def __getitem__(self, key):
action = self.DICT_MAPPING[key]
if action is None:
return getattr(self, key)
try:
return action(self)
except AttributeError:
return getattr(self, action)
def __iter__(self):
return iter(self.DICT_MAPPING)
def __len__(self):
return len(self.DICT_MAPPING)
print Base.PARAMS
# ()
print dict(Base())
# {}
At this point Base reports uninteresting values for the four contants and the dict version of instances is empty. However, if you subclass you can override any of the four, or you can include other parameters to the __init__:
class Sub1(Base):
def __init__(self, one, two):
super(Sub1, self).__init__()
self.one = one
self.two = two
Sub1.PARAMS
# ("one", "two")
dict(Sub1(1,2))
# {"one": 1, "two": 2}
class Sub2(Base):
PARAMS = ("first", "second")
def __init__(self, one, two):
super(Sub2, self).__init__()
self.first = one
self.second = two
Sub2.PARAMS
# ("first", "second")
dict(Sub2(1,2))
# {"first": 1, "second": 2}
Consider this decorator:
def rickroll(old_function):
return lambda junk, junk1, junk2: "Never Going To Give You Up"
class Foo(object):
#rickroll
def bar(self, p1, p2):
return p1 * p2
print Foo().bar(1, 2)
In it, the rickroll decorator takes the bar method, discards it, replaces it with a new function that ignores its differently-named (and possibly numbered!) parameters and instead returns a line from a classic song.
There are no further references to the original function, and the garbage collector can come and remove it any time it likes.
In such a case, I cannot see how you could find the parameter names p1 and p2. In my understanding, even the Python interpreter itself has no idea what they used to be called.

Categories

Resources