I'm trying to create an object collection proxy, which could do something like this:
class A:
def do_something():
# ...
class B:
def get_a():
return A()
class Proxy:
?
collection = [B(), B()]
proxy = Proxy(collection)
proxy.get_a().do_something()
# ^ for each B in collection get_a() and do_something()
What would be the best architecture / strategy for achieving this?
The key question, I guess is, how to cache the result of get_a() so I can then proxy do_something()
N.B. I don't expect proxy.get_a().do_something() to return anything sensible, it's only supposed to do things.
Simple enough... you may want to adapt it to do some more checking
class A(object):
def do_something(self):
print id(self), "called"
class B(object):
def get_a(self):
return A()
class Proxy(object):
def __init__(self, objs):
self._objs = objs
def __getattr__(self, name):
def func(*args, **kwargs):
return Proxy([getattr(o, name)(*args, **kwargs) for o in self._objs])
return func
collection = [B(), B()]
proxy = Proxy(collection)
proxy.get_a().do_something()
Results in:
4455571152 called
4455571216 called
The most pythonic way of going about this would probably be a list comprehension:
results = [b.get_a().do_something() for b in collection]
If you want to cache calls to B.get_a(), you can use memoization. A simple way of doing memoization yourself could look like this:
cache = None
# ...
class B:
def get_a(self):
global cache
if cache is None:
cache = A()
return cache
If you want to use caching in multiple places, you'll need to cache results based on keys in order to distinguish them, and for convenience's sake write a decorator that you can simply wrap functions with whose results you want to cache.
A good example of this is found in Python Algorithms: Mastering Basic Algorithms in the Python Language (see this question). Modified for your case, to not use the function arguments but the function name as cache key, it would look like this:
from functools import wraps
def memoize(func):
cache = {}
key = func.__name__
# wraps(func)
def wrap(*args):
if key not in cache:
cache[key] = func(*args)
return cache[key]
return wrap
class A:
def do_something(self):
return 1
class B:
#memoize
def get_a(self):
print "B.get_a() was called"
return A()
collection = [B(), B()]
results = [b.get_a().do_something() for b in collection]
print results
Output:
B.get_a() was called
[1, 1]
Related
I am not new to python but I am far from being an expert (or intermediate). Right now, I play around with objects and their behavior (like setattr, monkey-patch, etc.). During this, I stumbled upon a problem where I do not have any idea on how this might work.
Imagine following code:
class MyCalculator():
def __init__(self):
pass
def addition(self, a, b):
return a + b
def substraction(self, a, b):
return a - b
import inspect
class Changing():
def __init__(self):
pass
def listUserMethods(self, myObject):
object_names = [object_name for object_name in inspect.getmembers(myObject) if (inspect.ismethod(object_name[1]))]
return object_names
def setMethodAttribute(self, myMethod):
pass
if __name__=="__main__":
myCalc = MyCalculator()
change = Changing()
Now, I would like that setMethodAttribute() will change the code of the method I provide itself. Like, inserting a print() statement before the rest of the original method is executed. E.g. printing the input parameter before executing the addition, etc.
In my case, this does not need to be done during runtime (even if this is very interesting to know). I could imagine, that using inheritance or something similar could be a way. Perhaps somebody has a great idea?
Thanks for the help!
The answer really depends what you are after.
Wrapping a method of a class (before runtime)
This is very typical use case of decorators (the #something above a function definition).
def with_printing(func):
def wrapper(*args, **kwargs):
print("Before calling method")
ret = func(*args, **kwargs)
print("After calling method")
return ret
return wrapper
class MyCalculator:
#with_printing
def addition(self, a, b):
print("calling addition")
return a + b
If you want to keep the docstring of the original method, you would use the functools.wraps().
Example output
mycalc = MyCalculator()
print(mycalc.addition(2, 3))
would print
Before calling method
calling addition
After calling method
5
Wrapping a method of an object instance (runtime)
Here is one implementation which changes the method of an object. Note that this changes the method of an instance and not every instance of that class.
class MyCalculator:
def addition(self, a, b):
print("calling addition")
return a + b
class Changing:
def set_method_attribute(self, obj, method_name):
method = getattr(obj, method_name)
def wrapper(*args, **kwargs):
print("Before calling method")
ret = method(*args, **kwargs)
print("After calling method")
return ret
setattr(obj, method_name, wrapper)
Example usage
# Create two calculator objects for testing
mycalc = MyCalculator()
mycalc2 = MyCalculator()
change = Changing()
# Before change
print(mycalc.addition(2, 3))
print("----")
# After change
change.set_method_attribute(mycalc, "addition")
print(mycalc.addition(2, 3))
print("----")
# The another calculator object remains unchanged
print(mycalc2.addition(2, 3))
will print
calling addition
5
----
Before calling method
calling addition
After calling method
5
----
calling addition
5
I recreated some of the code in a new way. Would you mind taking a look on it and telling me if this is a "good" way?
import inspect
class InBetween():
def __init__(self):
object_names = [object_name for object_name in inspect.getmembers(self) if (inspect.ismethod(object_name[1]) and (object_name[0] != 'with_print'))]
for name in object_names:
method = getattr(self, name[0])
wrapper = self.with_print(method)
setattr(self, name[0], wrapper)
def with_print(self, method):
def wrapper(*args, **kwargs):
print("before")
ret = method(*args, **kwargs)
print("after")
return ret
return wrapper
class MyCalculator(InBetween):
def __init__(self):
super().__init__()
def addition(self, a, b):
return a + b
def substraction(self, a, b):
return a - b
def multiply(self, a, b):
return a * b
if __name__=="__main__":
myCalc = MyCalculator()
print(myCalc.addition(2,5))
print(myCalc.multiply(2,5))
The basic idea is, that every class wich will inherit "InBetween" can add via super() the wrapper to each method. Without doing this manually in the script. My next idea is, to replace the print statements by logging etc. At the end, if I call the parent "init" infromation will be easily logged, and if not, "nothing" happens.
Love to hear other opinions on that!
Thank you all!
I have a test framework that requires test cases to be defined using the following class patterns:
class TestBase:
def __init__(self, params):
self.name = str(self.__class__)
print('initializing test: {} with params: {}'.format(self.name, params))
class TestCase1(TestBase):
def run(self):
print('running test: ' + self.name)
When I create and run a test, I get the following:
>>> test1 = TestCase1('test 1 params')
initializing test: <class '__main__.TestCase1'> with params: test 1 params
>>> test1.run()
running test: <class '__main__.TestCase1'>
The test framework searches for and loads all TestCase classes it can find, instantiates each one, then calls the run method for each test.
load_test(TestCase1(test_params1))
load_test(TestCase2(test_params2))
...
load_test(TestCaseN(test_params3))
...
for test in loaded_tests:
test.run()
However, I now have some test cases for which I don't want the __init__ method called until the time that the run method is called, but I have little control over the framework structure or methods. How can I delay the call to __init__ without redefining the __init__ or run methods?
Update
The speculations that this originated as an XY problem are correct. A coworker asked me this question a while back when I was maintaining said test framework. I inquired further about what he was really trying to achieve and we figured out a simpler workaround that didn't involve changing the framework or introducing metaclasses, etc.
However, I still think this is a question worth investigating: if I wanted to create new objects with "lazy" initialization ("lazy" as in lazy evaluation generators such as range, etc.) what would be the best way of accomplishing it? My best attempt so far is listed below, I'm interested in knowing if there's anything simpler or less verbose.
First Solution:use property.the elegant way of setter/getter in python.
class Bars(object):
def __init__(self):
self._foo = None
#property
def foo(self):
if not self._foo:
print("lazy initialization")
self._foo = [1,2,3]
return self._foo
if __name__ == "__main__":
f = Bars()
print(f.foo)
print(f.foo)
Second Solution:the proxy solution,and always implement by decorator.
In short, Proxy is a wrapper that wraps the object you need. Proxy could provide additional functionality to the object that it wraps and doesn't change the object's code. It's a surrogate which provide the abitity of control access to a object.there is the code come form user Cyclone.
class LazyProperty:
def __init__(self, method):
self.method = method
self.method_name = method.__name__
def __get__(self, obj, cls):
if not obj:
return None
value = self.method(obj)
print('value {}'.format(value))
setattr(obj, self.method_name, value)
return value
class test:
def __init__(self):
self._resource = None
#LazyProperty
def resource(self):
print("lazy")
self._resource = tuple(range(5))
return self._resource
if __name__ == '__main__':
t = test()
print(t.resource)
print(t.resource)
print(t.resource)
To be used for true one-time calculated lazy properties. I like it because it avoids sticking extra attributes on objects, and once activated does not waste time checking for attribute presence
Metaclass option
You can intercept the call to __init__ using a metaclass. Create the object with __new__ and overwrite the __getattribute__ method to check if __init__ has been called or not and call it if it hasn't.
class DelayInit(type):
def __call__(cls, *args, **kwargs):
def init_before_get(obj, attr):
if not object.__getattribute__(obj, '_initialized'):
obj.__init__(*args, **kwargs)
obj._initialized = True
return object.__getattribute__(obj, attr)
cls.__getattribute__ = init_before_get
new_obj = cls.__new__(cls, *args, **kwargs)
new_obj._initialized = False
return new_obj
class TestDelayed(TestCase1, metaclass=DelayInit):
pass
In the example below, you'll see that the init print won't occur until the run method is executed.
>>> new_test = TestDelayed('delayed test params')
>>> new_test.run()
initializing test: <class '__main__.TestDelayed'> with params: delayed test params
running test: <class '__main__.TestDelayed'>
Decorator option
You could also use a decorator that has a similar pattern to the metaclass above:
def delayinit(cls):
def init_before_get(obj, attr):
if not object.__getattribute__(obj, '_initialized'):
obj.__init__(*obj._init_args, **obj._init_kwargs)
obj._initialized = True
return object.__getattribute__(obj, attr)
cls.__getattribute__ = init_before_get
def construct(*args, **kwargs):
obj = cls.__new__(cls, *args, **kwargs)
obj._init_args = args
obj._init_kwargs = kwargs
obj._initialized = False
return obj
return construct
#delayinit
class TestDelayed(TestCase1):
pass
This will behave identically to the example above.
In Python, there is no way that you can avoid calling __init__ when you instantiate a class cls. If calling cls(args) returns an instance of cls, then the language guarantees that cls.__init__ will have been called.
So the only way to achieve something similar to what you are asking is to introduce another class that will postpone the calling of __init__ in the original class until an attribute of the instantiated class is being accessed.
Here is one way:
def delay_init(cls):
class Delay(cls):
def __init__(self, *arg, **kwarg):
self._arg = arg
self._kwarg = kwarg
def __getattribute__(self, name):
self.__class__ = cls
arg = self._arg
kwarg = self._kwarg
del self._arg
del self._kwarg
self.__init__(*arg, **kwarg)
return getattr(self, name)
return Delay
This wrapper function works by catching any attempt to access an attribute of the instantiated class. When such an attempt is made, it changes the instance's __class__ to the original class, calls the original __init__ method with the arguments that were used when the instance was created, and then returns the proper attribute. This function can be used as decorator for your TestCase1 class:
class TestBase:
def __init__(self, params):
self.name = str(self.__class__)
print('initializing test: {} with params: {}'.format(self.name, params))
class TestCase1(TestBase):
def run(self):
print('running test: ' + self.name)
>>> t1 = TestCase1("No delay")
initializing test: <class '__main__.TestCase1'> with params: No delay
>>> t2 = delay_init(TestCase1)("Delayed init")
>>> t1.run()
running test: <class '__main__.TestCase1'>
>>> t2.run()
initializing test: <class '__main__.TestCase1'> with params: Delayed init
running test: <class '__main__.TestCase1'>
>>>
Be careful where you apply this function though. If you decorate TestBase with delay_init, it will not work, because it will turn the TestCase1 instances into TestBase instances.
In my answer I'd like to focus on cases when one wants to instantiate a class whose initialiser (dunder init) has side effects. For instance, pysftp.Connection, creates an SSH connection, which may be undesired until it's actually used.
In a great blog series about conceiving of wrapt package (nit-picky decorator implementaion), the author describes Transparent object proxy. This code can be customised for the subject in question.
class LazyObject:
_factory = None
'''Callable responsible for creation of target object'''
_object = None
'''Target object created lazily'''
def __init__(self, factory):
self._factory = factory
def __getattr__(self, name):
if not self._object:
self._object = self._factory()
return getattr(self._object, name)
Then it can be used as:
obj = LazyObject(lambda: dict(foo = 'bar'))
obj.keys() # dict_keys(['foo'])
But len(obj), obj['foo'] and other language constructs which invoke Python object protocols (dunder methods, like __len__ and __getitem__) will not work. However, for many cases, which are limited to regular methods, this is a solution.
To proxy object protocol implementations, it's possible to use neither __getattr__, nor __getattribute__ (to do it in a generic way). The latter's documentation notes:
This method may still be bypassed when looking up special methods as the result of implicit invocation via language syntax or built-in functions. See Special method lookup.
As a complete solution is demanded, there are examples of manual implementations like werkzeug's LocalProxy and django's SimpleLazyObject. However a clever workaround is possible.
Luckily there's a dedicated package (based on wrapt) for the exact use case, lazy-object-proxy which is described in this blog post.
from lazy_object_proxy import Proxy
obj = Proxy(labmda: dict(foo = 'bar'))
obj.keys() # dict_keys(['foo'])
len(len(obj)) # 1
obj['foo'] # 'bar'
One alternative would be to write a wrapper that takes a class as input and returns a class with delayed initialization until any member is accessed. This could for example be done as this:
def lazy_init(cls):
class LazyInit(cls):
def __init__(self, *args, **kwargs):
self.args = args
self.kwargs = kwargs
self._initialized = False
def __getattr__(self, attr):
if not self.__dict__['_initialized']:
cls.__init__(self,
*self.__dict__['args'], **self.__dict__['kwargs'])
self._initialized = True
return self.__dict__[attr]
return LazyInit
This could then be used as such
load_test(lazy_init(TestCase1)(test_params1))
load_test(lazy_init(TestCase2)(test_params2))
...
load_test(lazy_init(TestCaseN)(test_params3))
...
for test in loaded_tests:
test.run()
Answering your original question (and the problem I think you are actually trying to solve), "How can I delay the init call until an attribute is accessed?": don't call init until you access the attribute.
Said another way: you can make the class initialization simultaneous with the attribute call. What you seem to actually want is 1) create a collection of TestCase# classes along with their associated parameters; 2) run each test case.
Probably your original problem came from thinking you had to initialize all your TestCase classes in order to create a list of them that you could iterate over. But in fact you can store class objects in lists, dicts etc. That means you can do whatever method you have for finding all TestCase classes and store those class objects in a dict with their relevant parameters. Then just iterate that dict and call each class with its run() method.
It might look like:
tests = {TestCase1: 'test 1 params', TestCase2: 'test 2 params', TestCase3: 'test 3 params'}
for test_case, param in tests.items():
test_case(param).run()
Overridding __new__
You could do this by overriding __new__ method and replacing __init__ method with a custom function.
def init(cls, real_init):
def wrapped(self, *args, **kwargs):
# This will run during the first call to `__init__`
# made after `__new__`. Here we re-assign the original
# __init__ back to class and assign a custom function
# to `instances.__init__`.
cls.__init__ = real_init
def new_init():
if new_init.called is False:
real_init(self, *args, **kwargs)
new_init.called = True
new_init.called = False
self.__init__ = new_init
return wrapped
class DelayInitMixin(object):
def __new__(cls, *args, **kwargs):
cls.__init__ = init(cls, cls.__init__)
return object.__new__(cls)
class A(DelayInitMixin):
def __init__(self, a, b):
print('inside __init__')
self.a = sum(a)
self.b = sum(b)
def __getattribute__(self, attr):
init = object.__getattribute__(self, '__init__')
if not init.called:
init()
return object.__getattribute__(self, attr)
def run(self):
pass
def fun(self):
pass
Demo:
>>> a = A(range(1000), range(10000))
>>> a.run()
inside __init__
>>> a.a, a.b
(499500, 49995000)
>>> a.run(), a.__init__()
(None, None)
>>> b = A(range(100), range(10000))
>>> b.a, b.b
inside __init__
(4950, 49995000)
>>> b.run(), b.__init__()
(None, None)
Using cached properties
The idea is to do the heavy calculation only once by caching results. This approach will lead to much more readable code if the whole point of delaying initialization is improving performance.
Django comes with a nice decorator called #cached_property. I tend to use it a lot in both code and unit-tests for caching results of heavy properties.
A cached_property is a non-data descriptor. Hence once the key is set in instance's dictionary, the access to property would always get the value from there.
class cached_property(object):
"""
Decorator that converts a method with a single self argument into a
property cached on the instance.
Optional ``name`` argument allows you to make cached properties of other
methods. (e.g. url = cached_property(get_absolute_url, name='url') )
"""
def __init__(self, func, name=None):
self.func = func
self.__doc__ = getattr(func, '__doc__')
self.name = name or func.__name__
def __get__(self, instance, cls=None):
if instance is None:
return self
res = instance.__dict__[self.name] = self.func(instance)
return res
Usage:
class A:
#cached_property
def a(self):
print('calculating a')
return sum(range(1000))
#cached_property
def b(self):
print('calculating b')
return sum(range(10000))
Demo:
>>> a = A()
>>> a.a
calculating a
499500
>>> a.b
calculating b
49995000
>>> a.a, a.b
(499500, 49995000)
I think you can use a wrapper class to hold the real class you want to instance, and use call __init__ yourself in your code, like(Python 3 code):
class Wrapper:
def __init__(self, cls):
self.cls = cls
self.instance = None
def your_method(self, *args, **kwargs):
if not self.instance:
self.instnace = cls()
return self.instance(*args, **kwargs)
class YourClass:
def __init__(self):
print("calling __init__")
but it's a dump way, but without any trick.
I would like to wrap a number of class methods in Python with the same wrapper.
Conceptually it would look something like this in the simplest scenario:
x = 0 # some arbitrary context
class Base(object):
def a(self):
print "a x: %s" % x
def b(self):
print "b x: %s" % x
class MixinWithX(Base):
"""Wrap"""
def a(self):
global x
x = 1
super(MixinWithX, self).a()
x = 0
def b(self):
global x
x = 1
super(MixinWithX, self).a()
x = 0
Of course, when there are more methods than a and b, this becomes a mess. It seems like there ought to be something simpler. Obviously x could be modified in a decorator but one still ends up having a long list of garbage, which instead of the above looks like:
from functools import wraps
def withx(f):
#wraps(f) # good practice
def wrapped(*args, **kwargs):
global x
x = 1
f(*args, **kwargs)
x = 0
return wrapped
class MixinWithX(Base):
"""Wrap"""
#withx
def a(self):
super(MixinWithX, self).a()
#withx
def b(self):
super(MixinWithX, self).b()
I thought about using __getattr__ in the mixin, but of course since methods such as a and b are already defined this is never called.
I also thought about using __getattribute__ but it returns the attribute, not wrapping the call. I suppose __getattribute__ could return a closure (example below) but I am not sure how sound a design that is. Here is an example:
class MixinWithX(Base):
# a list of the methods of our parent class (Base) that are wrapped
wrapped = ['a', 'b']
# application of the wrapper around the methods specified
def __getattribute__(self, name):
original = object.__getattribute__(self, name)
if name in wrapped:
def wrapped(self, *args, **kwargs):
global x
x = 1 # in this example, a context manager would be handy.
ret = original(*args, **kwargs)
x = 0
return ret
return wrapped
return original
It has occurred to me that there may be something built into Python that may alleviate the need to manually reproduce every method of the parent class that is to be wrapped. Or maybe a closure in __getattribute__ is the proper way to do this. I would be grateful for thoughts.
Here's my attempt, which allows for a more terse syntax...
x = 0 # some arbitrary context
# Define a simple function to return a wrapped class
def wrap_class(base, towrap):
class ClassWrapper(base):
def __getattribute__(self, name):
original = base.__getattribute__(self, name)
if name in towrap:
def func_wrapper(*args, **kwargs):
global x
x = 1
try:
return original(*args, **kwargs)
finally:
x = 0
return func_wrapper
return original
return ClassWrapper
# Our existing base class
class Base(object):
def a(self):
print "a x: %s" % x
def b(self):
print "b x: %s" % x
# Create a wrapped class in one line, without needing to define a new class
# for each class you want to wrap.
Wrapped = wrap_class(Base, ('a',))
# Now use it
m = Wrapped()
m.a()
m.b()
# ...or do it in one line...
m = wrap_class(Base, ('a',))()
...which outputs...
a x: 1
b x: 0
You can do this using decorators and inspect:
from functools import wraps
import inspect
def withx(f):
#wraps(f)
def wrapped(*args, **kwargs):
print "decorator"
x = 1
f(*args, **kwargs)
x = 0
return wrapped
class MyDecoratingBaseClass(object):
def __init__(self, *args, **kwargs):
for member in inspect.getmembers(self, predicate=inspect.ismethod):
if member[0] in self.wrapped_methods:
setattr(self, member[0], withx(member[1]))
class MyDecoratedSubClass(MyDecoratingBaseClass):
wrapped_methods = ['a', 'b']
def a(self):
print 'a'
def b(self):
print 'b'
def c(self):
print 'c'
if __name__ == '__main__':
my_instance = MyDecoratedSubClass()
my_instance.a()
my_instance.b()
my_instance.c()
Output:
decorator
a
decorator
b
c
There are two general directions I can think of which are useful in your case.
One is using a class decorator. Write a function which takes a class, and returns a class with the same set of methods, decorated (either by creating a new class by calling type(...), or by changing the input class in place).
EDIT: (the actual wrapping/inspecting code I had in mind is similar to
what #girasquid has in his answer, but connecting is done using decoration instead of mixin/inheritance, which I think is more flexible an robust.)
Which brings me to the second option, which is to use a metaclass, which may be cleaner (yet trickier if you're not used to working with metaclasses). If you don't have access to the definition of the original class, or don't want to change the original definition, you can subclass the original class, setting the metaclass on the derived.
There is a solution, and it's called a decorator. Google "python decorators" for lots of information.
The basic concept is that a decorator is a function which takes a function as a parameter, and returns a function:
def decorate_with_x(f)
def inner(self):
self.x = 1 #you must always use self to refer to member variables, even if you're not decorating
f(self)
self.x = 0
return inner
class Foo(object):
#decorate_with_x # #-syntax passes the function defined on next line
# to the function named s.t. it is equivalent to
# foo_func = decorate_with_x(foo_func)
def foo_func(self):
pass
Using a class that has an xmlrpc proxy as one of it's object's properties
def __init__(self):
self.proxy = ServerProxy(...)
# ...
I'm trying to ease the use of some of the proxy's functions. Only a subset of the proxy functions are supposed to be used and I thus thought of creating a set of tiny wrapper functions for them like
def sample(self):
""" A nice docstring for a wrapper function. """
self.proxy.sample()
Is there a good way of getting a list of all the wrapper functions? I'm thinking about something like dir(), but then I would need to filter for the object's wrapper functions. xmlrpc introspection (http://xmlrpc-c.sourceforge.net/introspection.html) doesn't help much either since I don't want to use/ provide all the server's functions.
Maybe setting an attribute on the wrappers together with a #staticmethod get_wrappers() would do the trick. Having a _wrapper suffix is not appropriate for my use case. A static list in the class that keeps track of the available is too error prone. So I'm looking for good ideas on how to best getting a list of the wrapper functions?
I'm not 100% sure if this is what you want, but it works:
def proxy_wrapper(name, docstring):
def wrapper(self, *args, **kwargs):
return self.proxy.__getattribute__(name)(*args, **kwargs)
wrapper.__doc__ = docstring
wrapper._is_wrapper = True
return wrapper
class Something(object):
def __init__(self):
self.proxy = {}
#classmethod
def get_proxy_wrappers(cls):
return [m for m in dir(cls) if hasattr(getattr(cls, m), "_is_wrapper")]
update = proxy_wrapper("update", "wraps the proxy's update() method")
proxy_keys = proxy_wrapper("keys", "wraps the proxy's keys() method")
Then
>>> a = Something()
>>> print a.proxy
{}
>>> a.update({1: 42})
>>> print a.proxy
{1: 42}
>>> a.update({"foo": "bar"})
>>> print a.proxy_keys()
[1, 'foo']
>>> print a.get_proxy_wrappers()
['proxy_keys', 'update']
Use xml-rpc introspection to get the server list and intersect it with your object's properties. Something like:
loc = dir(self)
rem = proxy.listMethods() # However introspection gets a method list
wrapped = [x for x in rem if x in loc]
I am new to Python and I wonder if there is any way to aggregate methods into 'subspaces'. I mean something similar to this syntax:
smth = Something()
smth.subspace.do_smth()
smth.another_subspace.do_smth_else()
I am writing an API wrapper and I'm going to have a lot of very similar methods (only different URI) so I though it would be good to place them in a few subspaces that refer to the API requests categories. In other words, I want to create namespaces inside a class. I don't know if this is even possible in Python and have know idea what to look for in Google.
I will appreciate any help.
One way to do this is by defining subspace and another_subspace as properties that return objects that provide do_smth and do_smth_else respectively:
class Something:
#property
def subspace(self):
class SubSpaceClass:
def do_smth(other_self):
print('do_smth')
return SubSpaceClass()
#property
def another_subspace(self):
class AnotherSubSpaceClass:
def do_smth_else(other_self):
print('do_smth_else')
return AnotherSubSpaceClass()
Which does what you want:
>>> smth = Something()
>>> smth.subspace.do_smth()
do_smth
>>> smth.another_subspace.do_smth_else()
do_smth_else
Depending on what you intend to use the methods for, you may want to make SubSpaceClass a singleton, but i doubt the performance gain is worth it.
I had this need a couple years ago and came up with this:
class Registry:
"""Namespace within a class."""
def __get__(self, obj, cls=None):
if obj is None:
return self
else:
return InstanceRegistry(self, obj)
def __call__(self, name=None):
def decorator(f):
use_name = name or f.__name__
if hasattr(self, use_name):
raise ValueError("%s is already registered" % use_name)
setattr(self, name or f.__name__, f)
return f
return decorator
class InstanceRegistry:
"""
Helper for accessing a namespace from an instance of the class.
Used internally by :class:`Registry`. Returns a partial that will pass
the instance as the first parameter.
"""
def __init__(self, registry, obj):
self.__registry = registry
self.__obj = obj
def __getattr__(self, attr):
return partial(getattr(self.__registry, attr), self.__obj)
# Usage:
class Something:
subspace = Registry()
another_subspace = Registry()
#MyClass.subspace()
def do_smth(self):
# `self` will be an instance of Something
pass
#MyClass.another_subspace('do_smth_else')
def this_can_be_called_anything_and_take_any_parameter_name(obj, other):
# Call it `obj` or whatever else if `self` outside a class is unsettling
pass
At runtime:
>>> smth = Something()
>>> smth.subspace.do_smth()
>>> smth.another_subspace.do_smth_else('other')
This is compatible with Py2 and Py3. Some performance optimizations are possible in Py3 because __set_name__ tells us what the namespace is called and allows caching the instance registry.