How to get the class from a method in Python? - python

I am trying to write a function that gets the class when we pass a given method as argument.
For example, if we have
class Hello:
NAME = "HELLO TOTO"
def method(self) -> int:
return 5
#classmethod
def cls_method(cls) -> str:
return "Hi"
class Bonjour(Hello):
NOM = "BONJOUR TOTO"
def new_method(self) -> int:
return 0
I would get:
Hello from the methods Hello().method or Hello().cls_method
Bonjour from the methods Bonjour().new_method or Bonjour().cls_method
I searched on SO but could not find any direct answer to my question.
How could I implement such function (in Python 3.6+ if that matters)?
Thanks

I believe there's no fool-proof way, but this would work for most cases:
def get_class_of_bound_self(f):
assert hasattr(f, '__self__')
return f.__self__ if isinstance(f.__self__, type) else type(f.__self__)
Note that this would break down if f is a method of a metaclass M; it would return M instead of type.

I came with the following solution:
import inspect
def get_class(func: Callable[..., Any]) -> Any:
"""Return class of a method.
Args:
func: callable
Returns:
Class of the method, if the argument is a method
Raises:
AttributeError: if the argument is not callable or not a method
"""
if not callable(func):
raise AttributeError(f"{func} shall be callable")
if not inspect.ismethod(func):
raise AttributeError(f"Callable {func} shall be a method")
first_arg = func.__self__ # type: ignore # method have "self" attribute
return first_arg if inspect.isclass(first_arg) else first_arg.__class__
The last line return first_arg if inspect.isclass(first_arg) else first_arg.__class__ is to handle the cases of class methods (in which case func.__self__ corresponds to cls and is the class itself).
Another alternative without inspect module is with catching exceptions (a big thanks to #Elazar for the idea of using isistance(..., type)):
def get_class(func: Callable[..., Any]) -> Any:
"""Return class of a method.
Args:
func: callable
Returns:
Class of the method, if the argument is a method
Raises:
AttributeError: if the argument is not callable or not a method
"""
if not callable(func):
raise AttributeError(f"{func} shall be callable")
try:
first_arg = func.__self__ # type: ignore # method have "self" attribute
except AttributeError:
raise AttributeError(f"Callable {func} shall be a method")
cls_or_type = first_arg.__class__
return first_arg if isinstance(cls_or_type, type) else cls_or_type
And this is the code I have used to check if you might be interested:
def my_func() -> int:
"""It feels like a zero"""
return 0
for method in [
Hello().method,
Bonjour().method,
Hello().cls_method,
Bonjour().cls_method,
Bonjour().new_method,
]:
# MyClass = get_class(func)
MyClass = get_class_2(method)
for attr in ["NAME", "NOM"]:
print(f"... {method} - {attr} ...")
try:
print(getattr(MyClass, attr))
except AttributeError as exp:
print(f"Error when getting attribute: {exp}")
# class_ = get_class(my_func)
for not_method in [my_func, int, Hello]:
try:
MyClass = get_class(not_method)
print(f"{not_method} => NOK (no exception raised)")
except AttributeError:
print(f"{not_method} => OK")

Related

Defered invocation of a python classmethod obtained in a decorator

I have a decorator meant to wrap a classmethod like this:
class Class(object):
#register_classmethod
#classmethod
def my_class_method(cls):
...
My decorator gets a classmethod object. When I attempt to call it, it throws class method is not callable.
Here is a sample, with an overly-simplified decorator implementation:
from typing import Callable
all_methods: list[Callable[[type], None]] = []
def register_classmethod(classmeth: Callable[[type], None]) -> Callable[[type], None]:
all_methods.append(classmeth)
return classmeth
class Class(object):
#register_classmethod
#classmethod
def my_class_method(cls) -> None:
print(f"Hello from {cls}.my_class_method")
#classmethod
def run_registered_classmethods(cls) -> None:
for classmeth in all_methods:
classmeth(cls)
Class.run_registered_classmethods()
While mypy --strict is perfectly happy with the typing, at execution I get:
$ python3 testscripts/test-classmethod-call.py
Traceback (most recent call last):
File ".../test-classmethod-call.py", line 20, in <module>
Class.run_registered_classmethods()
File ".../test-classmethod-call.py", line 18, in run_registered_classmethods
classmeth(cls)
TypeError: 'classmethod' object is not callable
Now, I am indeed refactoring code that did not have that explicit #classmethod on my_class_method, and that code did run fine:
$ python3 testscripts/test-classmethod-call.py
Hello from <class '__main__.Class'>.my_class_method
However, with the above type annotations, mypy dutifully points out that we're trying to register an instance method here:
testscripts/test-classmethod-call.py:10: error: Argument 1 to "register_classmethod" has incompatible type "Callable[[Class], None]"; expected "Callable[[type], None]" [arg-type]
Note: I think this is also the problem faced in python how to invoke classmethod if I have only it's object, but its initial formulation was likely not enough on-the-point.
interpretation and start of a solution
It looks like what we get in this context is the descriptor object underlying the class method. I'd think that we would need to bind it in our wrapper descriptor, eg. using MethodType as shown here as of 3.11:
class ClassMethod:
"Emulate PyClassMethod_Type() in Objects/funcobject.c"
def __init__(self, f):
self.f = f
def __get__(self, obj, cls=None):
if cls is None:
cls = type(obj)
if hasattr(type(self.f), '__get__'):
# This code path was added in Python 3.9
# and was deprecated in Python 3.11.
return self.f.__get__(cls, cls)
return MethodType(self.f, cls)
But we cannot pass the classmethod object to MethodType, and have to dig it up in its (undocumented AFAICT) __func__ member.
Now this does the job:
#classmethod
def run_registered_classmethods(cls) -> None:
for classmeth in all_methods:
bound_method = types.MethodType(classmeth.__func__, cls)
bound_method()
This however brings us back to a new typing problem: the classmethod-decorated method has type classmethod, but is annotated as Callable for user programs to make sense of it, which causes mypy to complain:
testscripts/test-classmethod-call.py:19: error: "Callable[[type], None]" has no attribute "__func__" [attr-defined]
We can teach him about the real type by way of assert(isinstance(...)), and finally have working and well-typed code with:
#classmethod
def run_registered_classmethods(cls) -> None:
for classmeth in all_methods:
assert isinstance(classmeth, classmethod)
bound_method = types.MethodType(classmeth.__func__, cls)
bound_method()
This works but assert does have a runtime cost. So we will want to give a hint in a better way, e.g. using typing.cast():
#classmethod
def run_registered_classmethods(cls) -> None:
for classmeth in all_methods:
bound_method = types.MethodType(cast(classmethod, classmeth).__func__, cls)
bound_method()
But if mypy is happy with this on the surface, using its --strict option shows our typing is not as precise as it could be:
testscripts/test-classmethod-call.py:20: error: Missing type parameters for generic type "classmethod" [type-arg]
So classmethod is a generic type ? Pretty sure I did not find any hint of this in the doc. Luckily, reveal_type() and a bit of intuition seems to hint that the generic type parameter is the return type of the class method:
testscripts/test-classmethod-call.py:21: note: Revealed type is "def [_R_co] (def (*Any, **Any) -> _R_co`1) -> builtins.classmethod[_R_co`1]"
(yes, ouch!)
But if cast(classmethod[None], classmeth) reads OK to mypy, python itself is less than happy: TypeError: 'type' object is not subscriptable.
So we also have to have the interpreter and the type-checker to look at different code, using typing.TYPE_CHECKING, which brings us to the following:
import types
from typing import Callable, cast, TYPE_CHECKING
all_methods: list[Callable[[type], None]] = []
def register_classmethod(classmeth: Callable[[type], None]) -> Callable[[type], None]:
all_methods.append(classmeth)
return classmeth
class Class(object):
#register_classmethod
#classmethod
def my_class_method(cls) -> None:
pass
#classmethod
def run_registered_classmethods(cls) -> None:
for classmeth in all_methods:
if TYPE_CHECKING:
realclassmethod = cast(classmethod[None], classmeth)
else:
realclassmethod = classmeth
bound_method = types.MethodType(realclassmethod.__func__, cls)
bound_method()
Class.run_registered_classmethods()
... which passes as:
$ mypy --strict testscripts/test-classmethod-call.py
Success: no issues found in 1 source file
$ python3 testscripts/test-classmethod-call.py
Hello from <class '__main__.Class'>.my_class_method
This seems overly complicated for something we'd like to be simple and readable, and possibly a generic helper like the following could be provided to make all of this more usable - I'm not really happy with it, even though it passes all the above tests:
_ClassType = TypeVar("_ClassType")
_RType = TypeVar("_RType")
def bound_class_method(classmeth: Callable[[type[_ClassType]], _RType],
cls: type[_ClassType]) -> Callable[[], _RType]:
if TYPE_CHECKING:
realclassmethod = cast(classmethod[None], classmeth)
else:
realclassmethod = classmeth
return types.MethodType(realclassmethod.__func__, cls)
It does not handle arbitrary arguments to the class method, which we can likely get around to using ParamSpec. But this still makes use of several implementation details of classmethod (__func__ and the generic type parameter): the doc says nothing about them.
Shouldn't there be a simple way to do that ?
Is there any better way ?
Edit: curated summary of answers so far
There are tons of info in those answers, thanks :)
What I find most useful in those:
we cannot today write an annotation that would cause a method decorated with another decorator than #classmethod to get its first parameter cls to be of type type[Class] instead of Class (#droooze)
as a consequence we have several families of options, none of which are perfect:
live with the fact that the method to be decorated does not have a class method signature; let the decorator register method before wrapping it with classmethod to avoid dealing with the latter's internals, and then return the wrapped version (#chepner).
Note that when we need to use cls as a type in our classmethod-that-is-not-one-from-the-inside-for-the-typechecker, we can do something like:
#register_classmethod
def my_class_method(cls) -> None:
klass = cast(type[Class], cls)
print(f"Hello from {klass}.my_class_method")
It is sad the class name has to be hardcoded, and the type annotation for the argument to register_classmethod has to be further massaged if we want to do better than Callable[[Any], None]
live with the explicit addition of #classmethod and with us making use of its internals, which can also be annoying as forcing ue of that additional decorator causes an API change (#droooze)
tell the type-checker that our decorator is special like classmethod, and that its first parameter is a type[Class] where it would have been otherwise annotated as Class. Drawback is (aside from the cost of writing and maintaining a plugin), this requires a separate plugin for each static checker.
The built-in decorators #property, #classmethod, and #staticmethod are likely to be special-cased by each of the 4 major type-checker implementations, which means that interactions with other decorators may not make any sense, even if you theoretically have the type annotations correct.
For mypy, #classmethod is special-cased such that
it is transformed into a collections.abc.Callable even though classmethod doesn't even have a __call__ method;
the callable it decorates has its first parameter transformed into type[<owning class>]. In fact, the only way you can even get a type[<owning class>] object for the first parameter is if you decorate it with builtins.classmethod; no other custom implementation of any typing construct will work, not even a direct subclass of classmethod with no implementation in the body.
As you've found, this is the reason for your runtime error.
If you are specifically using mypy, In the example you gave, I would tweak it like this:
from __future__ import annotations
import collections.abc as cx
import typing as t
clsT = t.TypeVar("clsT", bound=type)
P = t.ParamSpec("P")
R_co = t.TypeVar("R_co", covariant=True)
all_methods: list[classmethod[t.Any]] = []
def register_classmethod(classmeth: cx.Callable[[clsT], R_co]) -> classmethod[R_co]:
# The assertion performs type-narrowing; see
# https://mypy.readthedocs.io/en/stable/type_narrowing.html
assert isinstance(classmeth, classmethod)
all_methods.append(classmeth) # type: ignore[unreachable]
return classmeth
class Class(object):
#register_classmethod
#classmethod
def my_class_method(cls) -> None:
print(f"Hello from {cls}.my_class_method")
# Not a callable!
my_class_method() # mypy: "classmethod[None]" not callable [operator]
#classmethod
def run_registered_classmethods(cls) -> None:
for classmeth in all_methods:
classmeth.__func__(cls)
# Too many arguments
#register_classmethod # mypy: Argument 1 to "register_classmethod" has incompatible type "Callable[[Type[Class], int], None]"; expected "Callable[[type], None]" [arg-type]
#classmethod
def bad_too_many_args(cls, a: int) -> None:
return
Class.run_registered_classmethods()
If you're doing anything more with classmethods and require proper type-checking in all scopes, I would re-implement the typing for classmethod, as follows:
from __future__ import annotations
import collections.abc as cx
import typing as t
# This is strictly unnecessary, but demonstrates a more accurately implemented
# `classmethod`. Accessing this from inside the class body, from an instance, or from
# a class works as expected.
# Unfortunately, you cannot use `ClassMethod` as a decorator and expect
# the first parameter to be typed correctly (see explanation 2.)
if t.TYPE_CHECKING:
import sys
clsT = t.TypeVar("clsT", bound=type)
P = t.ParamSpec("P")
R_co = t.TypeVar("R_co", covariant=True)
class ClassMethod(t.Generic[clsT, P, R_co]):
# Largely re-implemented from typeshed stubs; see
# https://github.com/python/typeshed/blob/d2d706f9d8b1a568ff9ba1acf81ef8f6a6b99b12/stdlib/builtins.pyi#L128-L139
#property
def __func__(self) -> cx.Callable[t.Concatenate[clsT, P], R_co]: ...
#property
def __isabstractmethod__(self) -> bool: ...
def __new__(cls, __f: cx.Callable[t.Concatenate[clsT, P], R_co]) -> ClassMethod[clsT, P, R_co]:...
def __get__(self, __obj: t.Any, __type: type) -> cx.Callable[P, R_co]: ...
if sys.version_info >= (3, 10):
__name__: str
__qualname__: str
#property
def __wrapped__(self) -> cx.Callable[t.Concatenate[clsT, P], R_co]: ... # Same as `__func__`
else:
ClassMethod = classmethod
all_methods: list[ClassMethod[type, [], t.Any]] = []
def register_classmethod(
classmeth: cx.Callable[[clsT], R_co]
) -> ClassMethod[clsT, [], R_co]:
# The assertion performs type-narrowing; see
# https://mypy.readthedocs.io/en/stable/type_narrowing.html
assert isinstance(classmeth, ClassMethod)
all_methods.append(classmeth) # type: ignore[unreachable]
return classmeth
class Class(object):
#register_classmethod
#classmethod
def my_class_method(cls) -> None:
print(f"Hello from {cls}.my_class_method")
# Not a callable! Fixes problem given in explanation 1.
my_class_method() # mypy: "ClassMethod[Type[Class], [], None]" not callable [operator]
#classmethod
def run_registered_classmethods(cls) -> None:
for classmeth in all_methods:
classmeth.__func__(cls)
# Not enough arguments
classmeth.__func__() # mypy: Too few arguments [call-arg]
# Too many arguments - `typing.ParamSpec` is working correctly
#register_classmethod # mypy: Argument 1 to "register_classmethod" has incompatible type "Callable[[Type[Class], int], None]"; expected "Callable[[type], None]" [arg-type]
#classmethod
def bad_too_many_args(cls, a: int) -> None:
return
Class.run_registered_classmethods()
# `__get__` working correctly - on the descriptor protocol.
# These two error out both for static type checking and runtime.
Class.my_class_method(type) # mypy: Too few arguments [call-arg]
Class.my_class_method.__func__ # mypy: "Callable[[], None]" has no attribute "__func__" [attr-defined]
Class methods aren't callable; they define a __get__ method that returns a callable method instance that will pass the class to the underlying function as the first argument.
I might let register_classmethod both store the function and return a class method:
all_methods: list[classmethod] = []
def register_classmethod(classmeth: classmethod) -> classmethod:
all_methods.append(classmeth)
return classmethod(classmeth)
class Class(object):
#register_classmethod
def my_class_method(cls) -> None:
print(f"Hello from {cls}.my_class_method")
#classmethod
def run_registered_classmethods(cls) -> None:
for classmeth in all_methods:
classmeth(cls)
This way, run_registered_classmethods doesn't need to worry about the descriptor protocol: it's just running the underlying function directly.

How to type hint overloaded method with overlapping arguments using Mypy?

I'm facing a problem referenced in Mypy documentation but with no provided workaround:
from typing import overload, Union
#overload
def unsafe_func(x: int) -> int: ...
#overload
def unsafe_func(x: object) -> str: ...
def unsafe_func(x: object) -> Union[int, str]:
if isinstance(x, int):
return 42
else:
return "some string"
This generates the following error (see Mypy playground):
main.py:4: error: Overloaded function signatures 1 and 2 overlap with incompatible return types
Found 1 error in 1 file (checked 1 source file)
I do understand the reason (int is also an object so unsafe_func(42) can't be resolved unambiguously), but I don't know how to fix it.
How to type hint the function so that express that int -> int and any other object -> str?
The real use case is to create a decorator with optional argument:
from inspect import isclass
def catcher(function_or_exception):
# Case when decorator is used with argument
# 'function_or_exception' is of type 'Exception'
if isclass(function_or_exception) and issubclass(function_or_exception, BaseException):
def decorator(decorated):
def decorate():
try:
decorated()
except function_or_exception:
print("An error occurred")
return decorate
return decorator
# Case when decorator is used without arguments.
# 'function_or_exception' is of type 'Callable'
else:
def decorate():
try:
function_or_exception()
except Exception:
print("An error occurred")
return decorate
#catcher
def my_func_1():
"1" + 1
#catcher(ZeroDivisionError)
def my_func_2():
1 / 0
my_func_1()
my_func_2()
Allow me to suggest a slightly different implementation that doesn't cause typing issues with #overload. It uses the common pattern for decorators that can be used with or without parentheses. The difference is that the exception class would have to be passed as a keyword argument.
Here is a full working example (Python 3.10+ due to the use of ParamSpec):
from collections.abc import Callable
from functools import wraps
from typing import ParamSpec, TypeVar, overload
P = ParamSpec("P")
T = TypeVar("T")
#overload
def catcher(func: Callable[P, T]) -> Callable[P, T | None]:
...
#overload
def catcher(
*,
exc: type[BaseException] = Exception,
) -> Callable[[Callable[P, T]], Callable[P, T | None]]:
...
def catcher(
func: Callable[P, T] | None = None,
*,
exc: type[BaseException] = Exception,
) -> Callable[P, T | None] | Callable[[Callable[P, T]], Callable[P, T | None]]:
def decorator(function: Callable[P, T]) -> Callable[P, T | None]:
#wraps(function)
def inner_wrapper(*args: P.args, **kwargs: P.kwargs) -> T | None:
try:
return function(*args, **kwargs)
except exc as e:
print(f"{e.__class__.__name__} occurred")
return None
return inner_wrapper
return decorator if func is None else decorator(func)
#catcher
def my_func_1() -> None:
raise TypeError
#catcher(exc=ZeroDivisionError)
def my_func_2() -> float:
return 1 / 0
if __name__ == '__main__':
my_func_1()
x = my_func_2()
Output:
TypeError occurred
ZeroDivisionError occurred
No issues with mypy --strict.
While using the decorator is only slightly more verbose, it is arguably a bit "safer" with regard to the types involved. The implementation itself on the other hand is less verbose because you only define the inner functions/wrappers once.
I tried a few things, but was unable to safely construct the desired signature using the exception class as a positional argument.

Python typing: Narrowing the scope of output subclass instance by base class parameter

I found this difficult to describe without an example.
from typing import List, Type, Optional, cast
class Base():
kind = 'base'
class Child1(Base):
kind = 'child1'
class Child2(Base):
kind = 'child2'
class Child3(Base):
kind = 'child3'
def find_in_list(lst: List[Base], SearchClass: Type[Base]): # Return Optional[??]
for obj in lst: # type: Base
if obj.kind == SearchClass.kind:
return obj
return None
lst = [Child1(), Child2(), Child3()]
def func2(c: Child2) -> None:
assert isinstance(c, Child2)
res2_opt: Optional[Child2] = find_in_list(lst, Child2)
if res2_opt:
func2(res2_opt)
def func3(c: Child3) -> None:
assert isinstance(c, Child3)
res3_opt: Optional[Child3] = find_in_list(lst, Child2) # Should be Error!
if res3_opt:
func3(res3_opt) # Is AssertionError
Because the return type of find_in_list is too open, mypy doesn't consider anything to be wrong with this, yet it at runtime correctly hits the assertion error. I tried -> Optional[SearchClass], but it (correctly) doesn't recognize that as a type.
It should be possible to narrow the scope of the return type of find_in_list to be parameterized the same as or the same way as SearchClass is, such that if you pass in Child2 as a parameter, you can restrict the output to be Optional[Child2]. How can this be done?
You should use a Generic function using TypeVar and cast the return value:
from typing import TypeVar, cast
T = TypeVar('T', bound=Base)
def find_in_list(lst: List[Base], SearchClass: Type[T]) -> Optional[T]:
for obj in lst:
if obj.kind == SearchClass.kind:
return cast(T, obj)
return None
Then:
res3_opt: Optional[Child3] = find_in_list(lst, Child2)
# Mypy: Incompatible types in assignment (expression has type "Optional[Child2]",
# variable has type "Optional [Child3]")

Python how to get __qualname__ of method wrapped with property.setter

I have an instance attribute that I made a property using Python's property decorator.
I then made a setter for the property using the decorator #property_name.setter.
How can I get the __qualname__ of the original method definition, decorated with #property.setter?
Where I Have Looked
Python: __qualname__ of function with decorator
I don't think property uses #functools.wraps()
Python #property.setter
I realize property is actually a descriptor
Decorating a class method after #property
Tells me I may want to use __get__, but I can't figure out the syntax
Example Code
This was written in Python 3.6.
#!/usr/bin/env python3
def print_qualname():
"""Wraps a method, printing its qualified name."""
def print_qualname_decorator(func):
# print(f"func = {func} and dir(): {dir(func)}")
if hasattr(func, "__qualname__"):
print(f"Qualified name = {func.__qualname__}.")
else:
print("Doesn't have qualified name.")
return print_qualname_decorator
class SomeClass:
def __init__(self):
self._some_attr = 0
self._another_attr = 0
#property
def some_attr(self) -> int:
return self._some_attr
#print_qualname()
#some_attr.setter
def some_attr(self, val: int) -> None:
self._some_attr = val
#print_qualname()
def get_another_attr(self) -> int:
return self._another_attr
Output:
Doesn't have qualified name.
Qualified name = SomeClass.get_another_attr.
How can I get the __qualname__ for some_attr from inside the print_qualname decorator? In other words, how do I get SomeClass.some_attr to be output?
You could flip the ordering of the decorators for the setter. Note I've adjusted the print_qualname_decorator to call the underlying function and return it (otherwise the setter will not execute).
from functools import wraps
def print_qualname(func):
"""Wraps a method, printing its qualified name."""
#wraps(func)
def print_qualname_decorator(*args):
if hasattr(func, "__qualname__"):
print(f"Qualified name = {func.__qualname__}.")
else:
print("Doesn't have qualified name.")
return func(*args)
return print_qualname_decorator
class SomeClass:
def __init__(self):
self._some_attr = 0
self._another_attr = 0
#property
def some_attr(self) -> int:
return self._some_attr
#some_attr.setter
#print_qualname
def some_attr(self, val: int) -> None:
self._some_attr = val
#print_qualname
def get_another_attr(self) -> int:
return self._another_attr
Use
In [46]: foo = SomeClass()
In [47]: foo.get_another_attr()
Qualified name = SomeClass.get_another_attr.
Out[47]: 0
In [48]: foo.some_attr = 5
Qualified name = SomeClass.some_attr.
In [49]: foo._some_attr
Out[49]: 5

Type annotation of a subclass method that returns an instance of the class

I have a MeasurementBase abstract parent class that have the following functionality: (1) public serialize() & deserialize()
I would love to implement the common logic on the parent public methods and to define the private _serialize() & _deserialize() abstract.
The base class implementation is as follows:
import json
from abc import ABC, abstractmethod
from typing import Any, Dict, Generic, TypeVar
TMeasurement = TypeVar("TMeasurement")
class MeasurementBase(ABC, Generic[TMeasurement]):
def serialize(self) -> str:
properties: Dict[str, Any] = {}
properties["measurement_name"] = self.__class__.__name__
properties["value"] = self._serialize()
return json.dumps(properties)
#abstractmethod
def _serialize(self) -> Any:
pass # pragma: no cover
#classmethod
def deserialize(cls, json_str: str) -> TMeasurement:
try:
properties = json.loads(json_str)
class_name = properties["measurement_name"]
if cls.__name__ != class_name:
raise KeyError()
return cls._deserialize(properties["value"])
except KeyError:
raise KeyError()
#classmethod
#abstractmethod
def _deserialize(cls, obj: Any) -> TMeasurement:
pass # pragma: no cover
Then, I created a new subclass with internal _val state that implements the abstract methods:
class MeasurementBaseMock(MeasurementBase["MeasurementBaseMock"]):
def __init__(self, val: int) -> None:
self._val = val
def _serialize(self) -> Any:
return self._val
#classmethod
def _deserialize(cls, obj: int) -> "MeasurementBaseMock":
return MeasurementBaseMock(obj)
Then, when testing the above:
properties = {"value": 324, "measurement_name": MeasurementBaseMock.__name__}
measurement = MeasurementBaseMock.deserialize(json.dumps(properties))
print(measurement._val)
I got the following error:
error: "TMeasurement" has no attribute "_val"
How do I annotate the _deserialize return type to support the typecker?
Things that I tried:
- I tried using the TMeasurement = TypeVar("TMeasurement", bound = "MeasurementBase") - didnt work and had much more mypy error.
- I moved all the state's initialization to parent class - this one did work, but I prefer not using such design.
- I treid also using cls annotations but had the same error.
Note:
I'm using mypy
> mypy --version
mypy 0.660

Categories

Resources