I have the following scenario:
class A:
b = 1
pass
x = A()
y = A()
Can I change this class so that x.b = 2 is equivalent to A.b = 2, I mean, when a change the static variable for one instance it´s changed for all instances?
Edit: I want to be able to work with multiple different instances of this class.
You can, but it's kind of ugly:
class A:
b = 1
def __setattr__(self, name, value):
if name == "b":
setattr(A, "b", value)
else:
super().__setattr__(name, value)
This would work as expected now:
>>> a = A()
>>> a.b = 3
>>> A.b
3
>>> A.b = 5
>>> a.b
5
The real question is: Why would you want that?
If you're going to use this often, it might be nice to write a decorator for it:
def sharedclassvar(variable):
def __setattr__(self, name, value):
if name in self.__class__._sharedclassvars:
setattr(self.__class__, name, value)
elif hasattr(self.__class__, "__oldsetattr__"):
self.__class__.__oldsetattr__(self, name, value)
else:
super().__setattr__(name, value)
def decorator(cls):
if not hasattr(cls, "_sharedclassvars"):
cls._sharedclassvars = []
if hasattr(cls, "__setattr__"):
cls.__oldsetattr__ = getattr(cls, "__setattr__")
cls.__setattr__ = __setattr__
cls._sharedclassvars.append(variable)
return cls
return decorator
You can then define such a class like this:
#sharedclassvar("b")
class A:
b = 1
Related
I need to create a class whose instances can't have same values. If you create instance with value that have already been used you'll get old same instance.
I did it using special class method:
class A():
instances = []
def __init__(self, val):
self.val = val
#classmethod
def new(cls, val):
"""
Return instance with same value or create new.
"""
for ins in cls.instances:
if ins.val == val:
return ins
new_ins = A(val)
cls.instances.append(new_ins)
return new_ins
a1 = A.new("x")
a2 = A.new("x")
a3 = A.new("y")
print a1 # <__main__.A instance at 0x05B7FD00> S\ /M\
print a2 # <__main__.A instance at 0x05B7FD00> \A/ \E
print a3 # <__main__.A instance at 0x05B7FD28>
Is there a way to do it more elegant, without using .new method?
You could try functools.lru_cache.
For example:
from functools import lru_cache
class A:
#lru_cache()
def __new__(cls, arg):
return super().__new__(cls)
def __init__(self, arg):
self.n = arg
Sample usage:
>>> a1 = A('1')
>>> a2 = A('1')
>>> a1 is a2
True
>>> a1.n
'1'
>>> a2.n
'1'
Alternatively you could try building a custom caching class, as pointed out by Raymond Hettinger in this tweet: https://twitter.com/raymondh/status/977613745634471937.
This can be done by overriding the __new__ method, which is responsible for creating new instances of a class. Whenever you create a new instance you store it in a dict, and if the dict contains a matching instance then you return it instead of creating a new one:
class A:
instances = {}
def __new__(cls, val):
try:
return cls.instances[val]
except KeyError:
pass
obj = super().__new__(cls)
cls.instances[val] = obj
return obj
def __init__(self, val):
self.val = val
a = A(1)
b = A(2)
c = A(1)
print(a is b) # False
print(a is c) # True
One downside of this solution is that the __init__ method will be called regardless of whether the instance is a newly created one or one that's been stored in the dict. This can cause problems if your constructor has undesired side effects:
class A:
...
def __init__(self, val):
self.val = val
self.foo = 'foo'
a = A(1)
a.foo = 'bar'
b = A(1)
print(a.foo) # output: foo
Notice how a's foo attribute changed from "bar" to "foo" when b was created.
Another option is to use a metaclass and override its __call__ method:
class MemoMeta(type):
def __new__(mcs, name, bases, attrs):
cls = super().__new__(mcs, name, bases, attrs)
cls.instances = {}
return cls
def __call__(cls, val):
try:
return cls.instances[val]
except KeyError:
pass
obj = super().__call__(val)
cls.instances[val] = obj
return obj
class A(metaclass=MemoMeta):
def __init__(self, val):
self.val = val
self.foo = 'foo'
This bypasses the problem with __init__ being called on existing instances:
a = A(1)
a.foo = 'bar'
b = A(1)
print(a.foo) # output: bar
If you really want to make it more elegant, implement the duplicate check in __new__, so it will be performed when you call A(something).
Just do it in __new__:
def __new__(cls, val=None):
for i in cls.instances:
if val == i.val:
return i
return object.__new__(cls)
Python has a magic __getattr__ method that allows custom values to be returned:
class A(object):
def __getattr__(self, name):
return name
B = A()
print B.foo # 'foo'
However, calling A.foo has no similar effect, because A is not an instance.
Using metaclasses, Google App Engine raises this error on instantiation:
File "/base/python27_runtime/python27_lib/versions/1/google/appengine/ext/db/__init__.py", line 913, in __init__
key_name.__class__.__name__)
BadKeyError: Name must be string type, not tuple
Assuming the referenced question is correctly implemented, what other ways can a magic class __getattr__ be implemented?
The metaclass solution should work, here is an example:
class GetAttrMeta(type):
def __getattr__(self, name):
return name
class A(object):
__metaclass__ = GetAttrMeta
print A.foo # 'foo'
Or with Python 3.x:
class GetAttrMeta(type):
def __getattr__(self, name):
return name
class A(object, metaclass=GetAttrMeta):
pass
print(A.foo) # 'foo'
Not sure if this answers your question, but maybe checkout property descriptors ..
class RevealAccess(object):
"""A data descriptor that sets and returns values
normally and prints a message logging their access.
"""
def __init__(self, initval=None, name='var'):
self.val = initval
self.name = name
def __get__(self, obj, objtype):
print 'Retrieving', self.name
return self.val
def __set__(self, obj, val):
print 'Updating' , self.name
self.val = val
>>> class MyClass(object):
x = RevealAccess(10, 'var "x"')
y = 5
>>> MyClass.x
Retrieving var "x"
10
>>> MyClass().x
Retrieving var "x"
10
>>>
>>> m = MyClass()
>>> m.x
Retrieving var "x"
10
>>> m.x = 20
Updating var "x"
>>> m.x
Retrieving var "x"
20
>>> m.y
5
TL;DR: Having to define a unique set of getters and setters for each property()'d variable sucks. Can I define generic getters and setters and use them with whatever variable I want?
Let's say I make a class with some nice getters and setters:
class Foo
def getter(self):
return _bar+' sasquatch'
def setter(self, value):
_bar = value+' unicorns'
bar = property(getter, setter)
Pretty great, right?
Now let's say I put in another variable called "baz" and I don't want it to be left out from this sasquatch/unicorn fun. Well, I guess I could make another set of getters and setters:
class Foo
def bar_getter(self):
return _bar+' sasquatch'
def bar_setter(self, value):
_bar = value+' unicorns'
bar = property(bar_getter, bar_setter)
def baz_getter(self):
return _baz+' sasquatch'
def baz_setter(self, value):
_baz = value+' unicorns'
baz = property(baz_getter, baz_setter)
But that's not very DRY and needlessly clutters my code. I guess I could make it a bit DRYer:
class Foo
def unicornify(self, value):
return value+' unicorns'
def sasquatchify(self, value):
return value+' sasquatch'
def bar_getter(self):
return self.sasquatchify(_bar)
def bar_setter(self, value):
_bar = self.unicornify(_bar)
bar = property(bar_getter, bar_setter)
def baz_getter(self):
return self.sasquatchify(_baz)
def baz_setter(self, value):
_baz = self.unicornify(_baz)
baz = property(baz_getter, baz_setter)
Although that might make my code DRYer, it's not ideal. If I wanted to unicornify and sasquatchify two more variables, I would have to add four more functions!
There must be a better way to do this. Can I use a single generic getter and/or setter across multiple variables?
Unicorn-less and sasquatch-less real-world implementation: I'm using SQLAlchemy ORM, and want to transform some of the data when storing and retrieving it from the database. Some of the transformations are applicable to more than one variable, and I don't want to clutter my classes with getters and setters.
How about just:
def sasquatchicorn(name):
return property(lambda self: getattr(self, name) + ' sasquatch',
lambda self, val: setattr(self, name, val + ' unicorns'))
class Foo(object):
bar = sasquatchicorn('_bar')
baz = sasquatchicorn('_baz')
Somewhat more generically:
def sasquatchify(val):
return val + ' sasquatch'
def unicornify(val):
return val + ' unicorns'
def getset(name, getting, setting):
return property(lambda self: getting(getattr(self, name)),
lambda self, val: setattr(self, name, setting(val)))
class Foo(object):
bar = getset('_bar', sasquatchify, unicornify)
baz = getset('_baz', sasquatchify, unicornify)
Or, with barely more work, you can use the full descriptor protocol, as described in agf's answer.
This is what the descriptor protocol property is based on is for:
class Sasicorn(object):
def __init__(self, attr):
self.attr = "_" + attr
def __get__(self, obj, objtype):
return getattr(obj, self.attr) + ' sasquatch'
def __set__(self, obj, value):
setattr(obj, self.attr, value + ' unicorns')
class Foo(object):
def __init__(self, value = "bar"):
self.bar = value
self.baz = "baz"
bar = Sasicorn('bar')
baz = Sasicorn('baz')
foo = Foo()
foo2 = Foo('other')
print foo.bar
# prints bar unicorns sasquatch
print foo.baz
# prints baz unicorns sasquatch
print foo2.bar
# prints other unicorns sasquatch
While property in a factory function may be fine for your toy example, it sounds like maybe you need more control for your real use case.
Using getattribute and setattr you can define this for all attributes past and future.
class Foo(object):
x = 3
def __getattribute__(self, attr):
return str(object.__getattribute__(self, attr)) + ' sasquatch'
def __setattr__(self, attr, value):
object.__setattr__(self, attr, str(value) + ' unicorn')
print Foo.x
f = Foo()
print f.x
f.y = 4
print f.y
This prints:
3
3 sasquatch
4 unicorn sasquatch
A colleague of mine suggested using closures to return getter and setter functions, which is what I've decided to use.
class Foo(object):
def setter(var):
def set(self, value):
setattr(self, var, value+' unicorn')
return set
def getter(var):
def get(self):
return getattr(self, var)+' sasquatch'
return get
bar = property(getter('_bar'), setter('_bar'))
f = Foo()
f.foo = 'hi'
print f.foo
But thank you all for your answers :)
# coding=utf-8
__author__ = 'Ahmed Şeref GÜNEYSU'
class Student(object):
def __init__(self, **kwargs):
for k, v in kwargs.iteritems():
self.__setattr__(k, v)
if __name__ == '__main__':
o = Student(first_name='Ahmed Şeref', last_name='GÜNEYSU')
print "{0} {1}".format(o.first_name, o.last_name)
print o.email
Gives
Ahmed Şeref GÜNEYSU
File "/Users/ahmed/PycharmProjects/sanbox/abstract_classes/object_initializer/__init__.py", line 13, in <module>
print o.email
AttributeError: 'Student' object has no attribute 'email'
Process finished with exit code 137
Say I define this descriptor:
class MyDescriptor(object):
def __get__(self, instance, owner):
return self._value
def __set__(self, instance, value):
self._value = value
def __delete__(self, instance):
del(self._value)
And I use it in this:
class MyClass1(object):
value = MyDescriptor()
>>> m1 = MyClass1()
>>> m1.value = 1
>>> m2 = MyClass1()
>>> m2.value = 2
>>> m1.value
2
So value is a class attribute and is shared by all instances.
Now if I define this:
class MyClass2(object)
value = 1
>>> y1 = MyClass2()
>>> y1.value=1
>>> y2 = MyClass2()
>>> y2.value=2
>>> y1.value
1
In this case value is an instance attribute and is not shared by the instances.
Why is it that when value is a descriptor it can only be a class attribute, but when value is a simple integer it becomes an instance attribute?
You're ignoring the instance parameter in your implementation of MyDescriptor. That is why it appears to be a class attribute. Perhaps you want something like this:
class MyDescriptor(object):
def __get__(self, instance, owner):
return instance._value
def __set__(self, instance, value):
instance._value = value
def __delete__(self, instance):
del(instance._value)
Will not work if you try the code below:
class MyClass1(object):
value = MyDescriptor()
value2 = MyDescriptor()
c = MyClass1()
c.value = 'hello'
c.value2 = 'world'
# where c.value also equals to "world"
I have:
class C:
aaa=2
class B:
def __init__ (self,name):
self.name
self.value
How can i define class C so when i dynamically set attribute to instance it make that attribute instance of class B. And attribute name of class B have to have attribute name equal string of name of that new attribute in class C and attribute value of B instance have to have value what set in new attribute in instance of class C.
Have to give me that result:
>> c=C()
>> c.whatever= 'strinstrinsstring'
>> isinstance(c.whatever,B)
True
>> c.whatever.value
'strinstrinsstring'
>>c.whatever.name
'whatever'
Just smartly override __setattr__. If you want to do it only for a specific attribute, then put in a special case for the attribute name that you want to look for:
>>> class B:
def __init__(self, name, value):
self.name = name
self.value = value
>>> class C:
def __setattr__(self, name, value):
if name == 'makeMeB':
newb = B(name, value)
self.__dict__[name] = newb
else:
self.__dict__[name] = value
>>> c = C()
>>> c.makeMeB = 'foo'
>>> isinstance(c.makeMeB, B)
True
>>> c.makeMeB.name
'makeMeB'
>>> c.makeMeB.value
'foo'
>>> c.blah = 'foo'
>>> isinstance(c.blah, B)
False
If you want it for every attribute, just forget the if and it'll do it for everything:
>>> class B:
def __init__(self, name, value):
self.name = name
self.value = value
>>> class C:
def __setattr__(self, name, value):
attr_as_b = B(name, value)
self.__dict__[name] = attr_as_b
>>> c = C()
>>> c.makeMeB = 'foo'
>>> isinstance(c.makeMeB, B)
True
>>> c.makeMeB.name
'makeMeB'
>>> c.makeMeB.value
'foo'
>>> c.amIalsoB = 'well?'
>>> isinstance(c.amIalsoB, B)
True
>>> c.amIalsoB.name
'amIalsoB'
>>> c.amIalsoB.value
'well?'
This is a horrible thing to do, because it changes what attributes mean!
Why not just look at the __dict__ of c:
>>> class C(object):
... pass
...
>>> c = C()
>>> c.spam = 'ham'
>>> c.__dict__
{'spam': 'ham'}