I am refencing the answer on this other stackoverflow post on using the Typing library Literal to specify a unique array to strings to validate the data with Pydantic but I am running into a problem of calling another class recursively.
This is what my code looks like:
from pydantic import BaseModel, PydanticValueError, ValidationError, validator
from typing import Literal,Optional
ACTION_TYPE_MAPPING = Literal["read", "write", "release"]
OBJECT_TYPE_MAPPING = Literal["multiStateValue", "multiStateInput", "multiStateOutput",
"analogValue", "analogInput", "analogOutput",
"binaryValue", "binaryInput", "binaryOutput"]
BOOLEAN_ACTION_MAPPING = Literal["active", "inactive"]
# MAIN MODEL
class BacnetRequestModel(BaseModel):
action_type: ACTION_TYPE_MAPPING
object_type: OBJECT_TYPE_MAPPING
object_instance: int
value: Optional[ValueModel(object_type)] <---- MESSED UP HERE, how to call ValueModel?
class ValueModel(BaseModel):
multiStateValue: Optional[int]
multiStateInput: Optional[int]
multiStateOutput: Optional[int]
analogValue: Optional[int]
analogInput: Optional[int]
analogOutput: Optional[int]
binaryValue: Optional[BOOLEAN_ACTION_MAPPING]
binaryInput: Optional[BOOLEAN_ACTION_MAPPING]
binaryOutput: Optional[BOOLEAN_ACTION_MAPPING]
test = BacnetRequestModel(action_type="write",
object_type="binaryOutput",
object_instance="3",
value = "active"
)
How do I call the class ValueModel based on the objectType that was inputted to the function where in this case it was binaryOutput that should only accept a value of BOOLEAN_ACTION_MAPPING. Any tips help not a lot of wisdom here...
Traceback is:
value = Optional[ValueModel(object_type)]
NameError: name 'ValueModel' is not defined
Related
I am expecting multiple data types as input to a function & want to take a specific action if its a pydantic model (pydantic model here means class StartReturnModel(BaseModel)).
In case of model instance I can check it, using isinstance(model, StartReturnModel) or isinstance(model, BaseModel) to identify its a pydantic model instance.
Based on the below test program I can see that type(StartReturnModel) returns as ModelMetaclass. Can I use this to identify a pydantic model? or is there any better way to do it?
from pydantic.main import ModelMetaclass
from typing import Optional
class StartReturnModel(BaseModel):
result: bool
pid: Optional[int]
print(type(StartReturnModel))
print(f"is base model: {bool(isinstance(StartReturnModel, BaseModel))}")
print(f"is meta model: {bool(isinstance(StartReturnModel, ModelMetaclass))}")
res = StartReturnModel(result=True, pid=500045)
print(f"\n{type(res)}")
print(f"is start model(res): {bool(isinstance(res, StartReturnModel))}")
print(f"is base model(res): {bool(isinstance(res, BaseModel))}")
print(f"is meta model(res): {bool(isinstance(res, ModelMetaclass))}")
*****Output****
<class 'pydantic.main.ModelMetaclass'>
is base model: False
is meta model: True
<class '__main__.StartReturnModel'>
is start model(res): True
is base model(res): True
is meta model(res): False
Yes you can use it, but why not use isinstance or issubclass.
After you expanded a bit in the comment thread, it is clear that you have a fundamental gap in understanding of Python classes and metaclasses. The topics have been discussed at length on SO, so I'll just refer you to the search function for details, but the short answer to your particular question is this:
from pydantic import BaseModel
from pydantic.main import ModelMetaclass
class MyModel(BaseModel):
x: int
y: str
obj = MyModel(x=1, y="a")
cls = MyModel
print(f"{isinstance(obj, BaseModel)=}")
print(f"{isinstance(obj, MyModel)=}")
print(f"{issubclass(cls, BaseModel)=}")
print(f"{issubclass(cls, MyModel)=}")
print(f"{cls is MyModel=}")
print(f"{isinstance(cls, ModelMetaclass)=}") # just to illustrate
print(f"{isinstance(cls, type)=}") # just to illustrate
Output:
isinstance(obj, BaseModel)=True
isinstance(obj, MyModel)=True
issubclass(cls, BaseModel)=True
issubclass(cls, MyModel)=True
cls is MyModel=True
isinstance(cls, ModelMetaclass)=True
isinstance(cls, type)=True
You should avoid using pydantic.main.ModelMetaclass because it is currently not (fully) exposed publicly as you correctly noted. And as you can see from the code above, there is simply no need to deal with it for you.
If you have a function that is supposed to handle both instances of a model class and specific classes that inherit from BaseModel, that could look like this:
from typing import Union
from pydantic import BaseModel
def do_stuff(obj_or_cls: Union[BaseModel, type[BaseModel]]) -> None:
if isinstance(obj_or_cls, BaseModel):
print(f"Got an instance of the model `{obj_or_cls.__class__.__name__}`")
elif isinstance(obj_or_cls, type) and issubclass(obj_or_cls, BaseModel):
print(f"Got a model subclass called `{obj_or_cls.__name__}`")
else:
raise TypeError
class MyModel(BaseModel):
x: int
y: str
obj = MyModel(x=1, y="a")
cls = MyModel
do_stuff(obj)
do_stuff(cls)
Output:
Got an instance of the model `MyModel`
Got a model subclass called `MyModel`
My pydantic nested model is defined as below:
from pydantic import BaseModel
from typing import Optional
class Location(BaseModel):
city: Optional[str]
state: str
country: str
class User(BaseModel):
id: int
name: Optional[str] = "Gandalf"
age: Optional[int]
location: Location
I would like to get all required fields for the User model.
For the above example, the expected output is ["id", "name", "state", "country"].
Any help greatly appreciated.
Here is a solution with a generator function:
from collections.abc import Iterator
def required_fields(model: type[BaseModel], recursive: bool = False) -> Iterator[str]:
for name, field in model.__fields__.items():
t = field.type_
if not field.required:
continue
if recursive and isinstance(t, type) and issubclass(t, BaseModel):
yield from required_fields(t, recursive=True)
else:
yield name
Using the models you defined in your example, we can demonstrate it like this:
print(list(required_fields(User, recursive=True)))
Output:
['id', 'state', 'country']
I have a JSON object that reads:
j = {"id": 1, "label": "x"}
I have two types:
class BaseModel:
def __init__(self, uuid):
self.uuid = uuid
class Entity(BaseModel):
def __init__(self, id, label):
super().__init__(id)
self.name = name
Note how id is stored as uuid in the BaseModel.
I can load Entity from the JSON object as:
entity = Entity(**j)
I want to re-write my model leveraging dataclass:
#dataclass
class BaseModel:
uuid = str
#dataclass
class Entity:
name = str
Since my JSON object does not have the uuid, entity = Entitye(**j) on the dataclass-based model will throw the following error:
TypeError: __init__() got an unexpected keyword argument 'id'
The "ugly" solutions I can think of:
Rename id to uuid in JSON before initialization:
j["uuid"] = j.pop("id")
Define both id and uuid:
#dataclass
class BaseModel:
uuid = str
#dataclass
class Entity:
id = str
name = str
# either use:
uuid = id
# or use this method
def __post_init__(self):
super().uuid = id
Is there any cleaner solution for this kind of object initialization in the dataclass realm?
might be ruining the idea of removing the original __init__ but how about writing a function to initialize the data class?
def init_entity(j):
j["uuid"] = j.pop("id")
return Entity(**j)
and in your code entity = initEntity(j)
I think the answer here might be to define a classmethod that acts as an alternative constructor to the dataclass.
from dataclasses import dataclass
from typing import TypeVar, Any
#dataclass
class BaseModel:
uuid: str
E = TypeVar('E', bound='Entity')
#dataclass
class Entity(BaseModel):
name: str
#classmethod
def from_json(cls: type[E], **kwargs: Any) -> E:
return cls(kwargs['id'], kwargs['label']
(For the from_json type annotation, you'll need to use typing.Type[E] instead of type[E] if you're on python <= 3.8.)
Note that you need to use colons for your type-annotations within the main body of a dataclass, rather than the = operator, as you were doing.
Example usage in the interactive REPL:
>>> my_json_dict = {'id': 1, 'label': 'x'}
>>> Entity.from_json(**my_json_dict)
Entity(uuid=1, name='x')
It's again questionable how much boilerplate code this saves, however. If you find yourself doing this much work to replicate the behaviour of a non-dataclass class, it's often better just to use a non-dataclass class. Dataclasses are not the perfect solution to every problem, nor do they try to be.
Simplest solution seems to be to use an efficient JSON serialization library that supports key remappings. There are actually tons of them that support this, but dataclass-wizard is one example of a (newer) library that supports this particular use case.
Here's an approach using an alias to dataclasses.field() which should be IDE friendly enough:
from dataclasses import dataclass
from dataclass_wizard import json_field, fromdict, asdict
#dataclass
class BaseModel:
uuid: int = json_field('id', all=True)
#dataclass
class Entity(BaseModel):
name: str = json_field('label', all=True)
j = {"id": 1, "label": "x"}
# De-serialize the dictionary object into an `Entity` instance.
e = fromdict(Entity, j)
repr(e)
# Entity(uuid=1, name='x')
# Assert we get the same object when serializing the instance back to a
# JSON-serializable dict.
assert asdict(e) == j
My intention
So, I am developing an API package for one service. I want to make good typehints for every method, which exists in my library
For example, when user types get()., after the dot pycharm will let him know, what response this method will provide.
e.g:
info = get()
info. # and here IDE help with hints.
Pitfalls
But, there are some methods, which provide different responses depending of parameters in methods.
e.g.
# this method responses with object, containing fields:
# count - count of items
# items - list of ids of users
info = get()
# but this method will give additional information. It responses with object, containing fields:
# count - count of items
# items - list of objects with users' information. It has fields:
# id - id of user
# firstname - firstname of user
# lastname - lastname of user
# ... and some others
info = get(fields='firstname')
Objects structure
Now I have such structure (i't simplified)
from typing import List, Union
from pydantic import BaseModel, Field
class UserInfo(BaseModel):
id: int = Field(...)
firstname: str = Field(None)
lastname: str = Field(None)
some_other_fields: str = Field(None)
class GetResponseNoFields(BaseModel):
count: int = Field(...)
items: List[int] = Field(...)
class GetResponseWithFields(BaseModel):
count: int = Field(...)
items: List[UserInfo] = Field(...)
class GetResponseModel(BaseModel):
response: Union[GetResponseNoFields, GetResponseWithFields] = Field(...)
def get(fields=None) -> GetResponseModel:
# some code
pass
The problem
The problem is, when I type get(fields='firsttname').response.items[0]. pycharm shows me typehints only for int. He doesn't think, that items can contain List[UserInfo], he thinks, it only can have List[int]
I have tried
I've tried to use typing.overload decorator, but method 'get' has many parameters, and actually doesn't support default parameter values. Or maybe i didn't do it properly
Here what I have tried with overload (simlified). It didn't work because of 'some_other_param', but I leave it here just in case:
from typing import overload
#overload
def get(fields: None) -> GetResponseNoFields: ...
#overload
def get(fields: str) -> GetResponseWithFields: ...
def get(some_other_param=None, fields=None):
# code here
pass
When I try to call method without parameters, pycharm says, that "Some of the parameters is unfilled"
I want to implement a put or patch request in FastAPI that supports partial update. The official documentation is really confusing and I can't figure out how to do the request. (I don't know that items is in the documentation since my data will be passed with request's body, not a hard-coded dict).
class QuestionSchema(BaseModel):
title: str = Field(..., min_length=3, max_length=50)
answer_true: str = Field(..., min_length=3, max_length=50)
answer_false: List[str] = Field(..., min_length=3, max_length=50)
category_id: int
class QuestionDB(QuestionSchema):
id: int
async def put(id: int, payload: QuestionSchema):
query = (
questions
.update()
.where(id == questions.c.id)
.values(**payload)
.returning(questions.c.id)
)
return await database.execute(query=query)
#router.put("/{id}/", response_model=QuestionDB)
async def update_question(payload: QuestionSchema, id: int = Path(..., gt=0),):
question = await crud.get(id)
if not question:
raise HTTPException(status_code=404, detail="question not found")
## what should be the stored_item_data, as documentation?
stored_item_model = QuestionSchema(**stored_item_data)
update_data = payload.dict(exclude_unset=True)
updated_item = stored_item_model.copy(update=update_data)
response_object = {
"id": question_id,
"title": payload.title,
"answer_true": payload.answer_true,
"answer_false": payload.answer_false,
"category_id": payload.category_id,
}
return response_object
How can I complete my code to get a successful partial update here?
Posting this here for googlers who are looking for an intuitive solution for creating Optional Versions of their pydantic Models without code duplication.
Let's say we have a User model, and we would like to allow for PATCH requests to update the User. But we need to create a schema that tells FastApi what to expect in the content body, and specifically that all the fields are Optional (Since that's the nature of PATCH requests). We can do so without redefining all the fields
from pydantic import BaseModel
from typing import Optional
# Creating our Base User Model
class UserBase(BaseModel):
username: str
email: str
# And a Model that will be used to create an User
class UserCreate(UserBase):
password: str
Code Duplication ❌
class UserOptional(UserCreate):
username: Optional[str]
email: Optional[str]
password: Optional[str]
One Liner ✅
# Now we can make a UserOptional class that will tell FastApi that all the fields are optional.
# Doing it this way cuts down on the duplication of fields
class UserOptional(UserCreate):
__annotations__ = {k: Optional[v] for k, v in UserCreate.__annotations__.items()}
NOTE: Even if one of the fields on the Model is already Optional, it won't make a difference due to the nature of Optional being typing.Union[type passed to Optional, None] in the background.
i.e typing.Union[str, None] == typing.Optional[str]
You can even make it into a function if your going to be using it more than once:
def convert_to_optional(schema):
return {k: Optional[v] for k, v in schema.__annotations__.items()}
class UserOptional(UserCreate):
__annotations__ = convert_to_optional(UserCreate)
I got this answer on the FastAPI's Github issues.
You could make the fields Optional on the base class and create a new QuestionCreate model that extends the QuestionSchema. As an example:
from typing import Optional
class Question(BaseModel):
title: Optional[str] = None # title is optional on the base schema
...
class QuestionCreate(Question):
title: str # Now title is required
The cookiecutter template here provides some good insight too.
I created a library (pydantic-partial) just for that, converting all the fields in the normal DTO model to being optional. See https://medium.com/#david.danier/how-to-handle-patch-requests-with-fastapi-c9a47ac51f04 for a code example and more detailed explanation.
https://github.com/team23/pydantic-partial/
Based on the answer of #cdraper, I made a partial model factory:
from typing import Mapping, Any, List, Type
from pydantic import BaseModel
def model_annotations_with_parents(model: BaseModel) -> Mapping[str, Any]:
parent_models: List[Type] = [
parent_model for parent_model in model.__bases__
if (
issubclass(parent_model, BaseModel)
and hasattr(parent_model, '__annotations__')
)
]
annotations: Mapping[str, Any] = {}
for parent_model in reversed(parent_models):
annotations.update(model_annotations_with_parents(parent_model))
annotations.update(model.__annotations__)
return annotations
def partial_model_factory(model: BaseModel, prefix: str = "Partial", name: str = None) -> BaseModel:
if not name:
name = f"{prefix}{model.__name__}"
return type(
name, (model,),
dict(
__module__=model.__module__,
__annotations__={
k: Optional[v]
for k, v in model_annotations_with_parents(model).items()
}
)
)
def partial_model(cls: BaseModel) -> BaseModel:
return partial_model_factory(cls, name=cls.__name__)
Can be used with the function partial_model_factory:
PartialQuestionSchema = partial_model_factory(QuestionSchema)
Or with decorator partial_model:
#partial_model
class PartialQuestionSchema(QuestionSchema):
pass