Mapping issues from Sqlalchemy to Pydantic - from_orm failed - python

I'm trying to get my result dictonary from sqlalchemy automatically to the Pydantic output for Fastapi to maps using the from_orm method, but I always get a validation error.
File "pydantic\main.py", line 508, in pydantic.main.BaseModel.from_orm
pydantic.error_wrappers.ValidationError: 2 validation errors for Category
name
field required (type=value_error.missing)
id
field required (type=value_error.missing)
If I create the objects with the Pydantic schema myself and add them to the list, the method works.
What would I have to change for from_orm to work?
Did I possibly miss something in the documentation?
https://pydantic-docs.helpmanual.io/usage/models/#orm-mode-aka-arbitrary-class-instances
https://fastapi.tiangolo.com/tutorial/sql-databases/#use-pydantics-orm_mode
or is there another/better way to turn the ResultProxy into a Pydantic capable output?
The output I get from the database method is the following:
[{'id': 1, 'name': 'games', 'parentid': None}, {'id': 2, 'name': 'computer', 'parentid': None}, {'id': 3, 'name': 'household', 'parentid': None}, {'id': 10, 'name': 'test', 'parentid': None}]]
Models.py
from sqlalchemy import BigInteger, Column, DateTime, ForeignKey, Integer, Numeric, String, Text, text, Table
from sqlalchemy.orm import relationship, mapper
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
metadata = Base.metadata
category = Table('category', metadata,
Column('id', Integer, primary_key=True),
Column('name', String(200)),
Column('parentid', Integer),
)
class Category(object):
def __init__(self, cat_id, name, parentid):
self.id = cat_id
self.name = name
self.parentid = parentid
mapper(Category, category)
Schemas.py
from pydantic import BaseModel, Field
class Category(BaseModel):
name: str
parentid: int = None
id: int
class Config:
orm_mode = True
main.py
def result_proxy_to_Dict(results: ResultProxy):
d, a = {}, []
for rowproxy in results:
# rowproxy.items() returns an array like [(key0, value0), (key1, value1)]
for column, value in rowproxy.items():
# build up the dictionary
d = {**d, **{column: value}}
a.append(d)
return a
def crud_read_cat(db: Session) -> dict:
# records = db.query(models.Category).all()
#query = db.query(models.Category).filter(models.Category.parentid == None)
s = select([models.Category]). \
where(models.Category.parentid == None)
result = db.execute(s)
#print(type(result))
#print(result_proxy_to_Dict(result))
#results = db.execute(query)
# result_set = db.execute("SELECT id, name, parentid FROM public.category;")
# rint(type(result_set))
# for r in result_set:
# print(r)
# return [{column: value for column, value in rowproxy.items()} for rowproxy in result_set]
# return await databasehelper.database.fetch_all(query)
return result_proxy_to_Dict(result)
#return results
#router.get("/category/", response_model=List[schemas.Category], tags=["category"])
async def read_all_category(db: Session = Depends(get_db)):
categories = crud_read_cat(db)
context = []
print(categories)
co_model = schemas.Category.from_orm(categories)
# print(co_model)
for row in categories:
print(row)
print(row.get("id", None))
print(row.get("name", None))
print(row.get("parentid", None))
tempcat = schemas.Category(id=row.get("id", None), name=row.get("name", None),
parentid=row.get("parentid", None))
context.append(tempcat)
#for dic in [dict(r) for r in categories]:
# print(dic)
# print(dic.get("category_id", None))
# print(dic.get("category_name", None))
# print(dic.get("category_parentid", None))
# tempcat = schemas.Category(id=dic.get("category_id", None), name=dic.get("category_name", None),
# parentid=dic.get("category_parentid", None))
# context.append(tempcat)
return context

New to this my self so cant promise the best answer but I noticed if you simply put Optional in the schema it works.
`
class Category(BaseModel):
name: Optional[str]
parentid: int = None
id: Optional[int]
class Config:
orm_mode = True
`
Still returning that info in the response:
[
{
"name": "games",
"parentid": null,
"id": 1
},
{
"name": "computer",
"parentid": null,
"id": 2
},
{
"name": "household",
"parentid": null,
"id": 3
},
{
"name": "test",
"parentid": null,
"id": 10
}
]
Likely still some sort of validation error but seems like a usable work around for now.

I just had the same problem. I think its related to pydantic nonethless. Please have a look at this link for more information https://github.com/samuelcolvin/pydantic/issues/506.
But having changed my model:
class Student(BaseModel):
id: Optional[int] --- changed to optional
name: Optional [str]
surname: Optional [str]
email: Optional [str]
The error validation goes away. Its a funny error - given that the entries in my database still updated with the values...I am new to fastAPI also so the workaround and the error does not really make sense for now....but yes it worked. Thank you

Related

Clickhouse-sqlalchemy types.Nested insert

I have a problem inserting data into a nested column.
I use map_imperatively.
Columns of a different type are filled. Only the nested column remains empty.
My code:
import attr
from sqlalchemy import (
create_engine, Column, MetaData, insert
)
from sqlalchemy.orm import registry
from clickhouse_sqlalchemy import (
Table, make_session, types, engines,
)
uri = 'clickhouse+native://localhost/default'
engine = create_engine(uri)
session = make_session(engine)
metadata = MetaData(bind=engine)
mapper = registry()
#attr.dataclass
class NestedAttr:
key1: int
key2: int
key3: int
#attr.dataclass
class NestedInObject:
id: int
name: str
nested_attr: NestedAttr
nested_test = Table(
'nested_test', metadata,
Column(name='id', type_=types.Int8, primary_key=True),
Column(name='name', type_=types.String),
Column(
name='nested_attr',
type_=types.Nested(
Column(name='key1', type_=types.Int8),
Column(name='key2', type_=types.Int8),
Column(name='key3', type_=types.Int8),
)
),
engines.Memory()
)
mapper.map_imperatively(
NestedInObject,
nested_test
)
nested_test.create()
values = [
{
'id': 1,
'name': 'name',
'nested_attr.key1': [1, 2],
'nested_attr.key2': [1, 2],
'nested_attr.key3': [1, 2],
}
]
session.execute(insert(NestedInObject), values)
I don't get an error, but the nested columns are empty.
I tried different data. Checked the data type in the database. I don't understand why the columns are left empty.

Pydantic: how to parse non dict objects

The following data is input
data = {
'campaigns': [
{
'title': 'GBP',
'geo_segment': 'WW',
'ac_type': 'Value',
'conversion': 'soft',
'asset_type': 'ALL',
'date': '22.04.21',
'name': 'GBP_WW_1_core_22.04.21',
'budget': '2000',
'cpa': '1,00'
}
],
'stages': [
'pre',
'post'
],
'language_mode': 'all_en'
}
To parse campaigns, I use the parse_obj() method
campaigns = parse_obj_as(List[CampaignData], data['campaigns'])
class CampaignData(BaseModel):
title: NonEmptyString
geo_segment: NonEmptyString
......
It works.
How to validate the rest of the data (stages: List, language_mode: str), which is not of type dict?
class GoogleCheckCampaignStages(BaseModel):
stages: List[str]
class GoogleCheckLanguageMode(BaseModel):
language_mode: str
If I run
stages = parse_obj_as(List[GoogleCheckCampaignStages], data['stages'])
returns
value is not a valid dict (type=type_error.dict)
Same result with data['language_mode'].
If I try with parse_raw_as() method
parse_raw_as(GoogleCheckLanguageMode, data['language_mode'])
returns
json.decoder.JSONDecodeError: Expecting value: line 1 column 1 (char 0)
So how to parse str and list values?
The error you are encountering is because you are passing in data["stages"] which is just a list/array. It has no key called stages and therefore Pydantic doesn't know how to assign it.
Assuming that NonEmptyString is defined as below, I would suggest creating one model that processes the whole data object, for example like this:
from pydantic import BaseModel, parse_obj_as, constr
from typing import List
NonEmptyString = constr(min_length=1)
class CampaignData(BaseModel):
title: NonEmptyString
geo_segment: NonEmptyString
# and so on ...
class Data(BaseModel):
campaigns: List[CampaignData]
stages: List[str]
language_mode: str
parsed_data = parse_obj_as(Data, data)
print(parsed_data)
# campaigns=[CampaignData(title='GBP', geo_segment='WW', ...)] stages=['pre', 'post'] language_mode='all_en'
If you'd like to access only specific elements, you can easily do it like this:
print(parsed_data.stages)
# ['pre', 'post']

json serialization of a list of objects of a custom class

I have a song class, which holds the attributes to a song, and it is a custom class. I also have a list of songs in a list called track list. When I try to json.dump the list, I get an error that says :
TypeError: Object of type 'Song' is not JSON serializable
How would I go about converting this list of songs to json?
Here is the additional relevant code that returns the error:
class Song:
def __init__(self, sname, sartist, coverart, albname, albartist, spotid):
self.sname = sname
self.sartist = sartist
self.coverart = coverart
self.albname = albname
self.albartist = albartist
self.spotid = spotid
tracklist = createDict(tracks) ##creates the list of songs, works fine
jsontracks = json.dumps(tracklist)
pp.pprint(jsontracks)
Thanks
I've solved this by adding an encode() method to the class:
def encode(self):
return self.__dict__
and adding some arguments to json.dumps:
jsontracks = json.dumps(tracklist, default=lambda o: o.encode(), indent=4)
This will "crawl" down your class tree (if you have any child classes) and encode every object as a json list/object automatically. This should work with just about any class and is fast to type. You may also want to control which class parameters get encoded with something like:
def encode(self):
return {'name': self.name,
'code': self.code,
'amount': self.amount,
'minimum': self.minimum,
'maximum': self.maximum}
or a little bit faster to edit (if you're lazy like me):
def encode(self):
encoded_items = ['name', 'code', 'batch_size', 'cost',
'unit', 'ingredients', 'nutrients']
return {k: v for k, v in self.__dict__.items() if k in encoded_items}
full code:
import json
class Song:
def __init__(self, sname, sartist, coverart, albname, albartist, spotid):
self.sname = sname
self.sartist = sartist
self.coverart = coverart
self.albname = albname
self.albartist = albartist
self.spotid = spotid
def encode(self):
return self.__dict__
tracklist = [
Song('Imagine', 'John Lennon', None, None, None, None),
Song('Hey Jude', 'The Beatles', None, None, None, None),
Song('(I Can\'t Get No) Satisfaction', 'The Rolling Stones', None, None, None, None),
]
jsontracks = json.dumps(tracklist, default=lambda o: o.encode(), indent=4)
print(jsontracks)
output:
[
{
"sname": "Imagine",
"sartist": "John Lennon",
"coverart": null,
"albname": null,
"albartist": null,
"spotid": null
},
{
"sname": "Hey Jude",
"sartist": "The Beatles",
"coverart": null,
"albname": null,
"albartist": null,
"spotid": null
},
{
"sname": "(I Can't Get No) Satisfaction",
"sartist": "The Rolling Stones",
"coverart": null,
"albname": null,
"albartist": null,
"spotid": null
}
]

Postgres and SqlAlchemy not updating rows properly

Whenever I execute an update statement using a session with SqlAlchemy and then call commit(), it will rarely update the database.
Here is my environment:
I have two servers running. One is for my database the other is for my python server.
Database Server:
Postgres v9.6 - On Amazon's RDS
Server with Python
Linux 3.13.0-65-generic x86_64 - On an Amazon EC2 Instance
SqlAlchemy v1.1.5
Python v3.4.3
Flask 0.11.1
Also, I use pgAdmin 4 for querying my table.
The files of importance:
server/models/category.py
from sqlalchemy.orm import backref
from .. import db
from flask import jsonify
class Category(db.Model):
__tablename__ = "categories"
id = db.Column(db.Integer, primary_key=True)
cat_name = db.Column(db.String(80))
includes = db.Column(db.ARRAY(db.String), default=[])
excludes = db.Column(db.ARRAY(db.String), default=[])
parent_id = db.Column(db.ForeignKey('categories.id', ondelete='SET NULL'), nullable=True, default=None)
subcategories = db.relationship('Category', backref=backref(
'categories',
remote_side=[id],
single_parent=True,
cascade="all, delete-orphan"
))
assigned_user = db.Column(db.String(80), nullable=True, default=None)
def to_dict(self):
return dict(
id=self.id,
cat_name=self.cat_name,
parent_id=self.parent_id,
includes=self.includes,
excludes=self.excludes,
assigned_user=self.assigned_user,
)
def json(self):
return jsonify(self.to_dict())
def __repr__(self):
return "<%s %r>" % (self.__class__, self.to_dict())
class CategoryOperations:
...
#staticmethod
def update_category(category):
return """
UPDATE categories
SET cat_name='{0}',
parent_id={1},
includes='{2}',
excludes='{3}',
assigned_user={4}
WHERE id={5}
RETURNING cat_name, parent_id, includes, excludes, assigned_user
""".format(
category.cat_name,
category.parent_id if category.parent_id is not None else 'null',
"{" + ",".join(category.includes) + "}",
"{" + ",".join(category.excludes) + "}",
"'" + category.assigned_user + "'" if category.assigned_user is not None else 'null',
category.id
)
#staticmethod
def update(category, session):
print("Updating category with id: " + str(category.id))
stmt = CategoryOperations.update_category(category)
print(stmt)
row_updated = session.execute(stmt).fetchone()
return Category(
id=category.id,
cat_name=row_updated[0],
parent_id=row_updated[1],
includes=row_updated[2],
excludes=row_updated[3],
assigned_user=row_updated[4]
)
...
server/api/category.py
from flask import jsonify, request
import json
from .api_utils.utils import valid_request as is_valid_request
from . import api
from ..models.category import Category, CategoryOperations
from ..models.users_categories import UsersCategoriesOperations, UsersCategories
from ..models.listener_item import ListenerItemOperations, ListenerItem
from ..models.user import UserOperations
from ..schemas.category import category_schema
from .. import get_session
...
#api.route('/categories/<int:id>', methods=['PUT'])
def update_category(id):
category_json = request.json
if category_json is None:
return "Bad Request: Request not sent as json", 400
valid_json, json_err = is_valid_request(category_json, ['cat_name', 'parent_id', 'includes', 'excludes', 'assigned_user'], "and")
if not valid_json:
return json_err, 400
category = Category(
id=id,
cat_name=category_json['cat_name'],
parent_id=category_json['parent_id'],
includes=category_json['includes'],
excludes=category_json['excludes'],
assigned_user=category_json['assigned_user'],
)
session = get_session()
try:
updated_category = CategoryOperations.update(category, session)
session.commit()
print(updated_category.to_dict())
return jsonify(updated_category.to_dict()), 200
except Exception as e:
print("ROLLBACK")
print(e)
session.rollback()
return str(e), 500
...
There is one more file that will probably be useful in this case:
server/__init__.py
import sqlalchemy as sa
from flask import Flask
from flask_marshmallow import Marshmallow
from flask_sqlalchemy import SQLAlchemy
from config import config
from sqlalchemy.orm import scoped_session, sessionmaker
from sqlalchemy.ext.declarative import declarative_base
from flask_cors import CORS, cross_origin
from .db_config import CONFIG
db = SQLAlchemy()
ma = Marshmallow()
Engine = sa.create_engine(
CONFIG.POSTGRES_URL,
client_encoding='utf8',
pool_size=20,
max_overflow=0
)
Session = sessionmaker(bind=Engine)
conn = Engine.connect()
def get_session():
return Session(bind=conn)
def create_app(config_name):
app = Flask(__name__, static_url_path="/app", static_folder="static")
app_config = config[config_name]()
print(app_config)
app.config.from_object(app_config)
from .api import api as api_blueprint
app.register_blueprint(api_blueprint, url_prefix='/api')
from .api.routes import routes
routes(app)
from .auth import authentication
authentication(app)
db.init_app(app)
ma.init_app(app)
CORS(app)
...
return app
To explain a little more with the environment and files I have given, let's say I have a row in my categories table like so:
{
"assigned_user": null,
"cat_name": "Category Name Before",
"excludes": [
"exclude1",
"excludeBefore"
],
"id": 2,
"includes": [
"include1",
"include2"
],
"parent_id": null
}
When I do a PUT request to /api/categories/2 with the body as:
{
"assigned_user": null,
"cat_name": "Category Name 1",
"excludes": [
"exclude1",
"exclude2"
],
"id": 2,
"includes": [
"include1",
"include2"
],
"parent_id": null
}
During the request, I print out the SQL Statement that my PUT request created (for testing) and I get this:
UPDATE categories
SET cat_name='Category Name 1',
parent_id=null,
includes='{include1,include2}',
excludes='{exclude1,exclude2}',
assigned_user=null
WHERE id=2
RETURNING cat_name, parent_id, includes, excludes, assigned_user
After it commits the UPDATE statement, it then returns the response. I get the updated object back like so:
{
"assigned_user": null,
"cat_name": "Category Name 1",
"excludes": [
"exclude1",
"exclude2"
],
"id": 2,
"includes": [
"include1",
"include2"
],
"parent_id": null
}
When I do a GET request with this URL: /api/categories/2 and I get the same object too like so:
{
"assigned_user": null,
"cat_name": "Category Name 1",
"excludes": [
"exclude1",
"exclude2"
],
"id": 2,
"includes": [
"include1",
"include2"
],
"parent_id": null
}
However, when I run the SQL command below in pgAdmin, I get the old version (it didn't update the row in the database):
SELECT * FROM categories WHERE id=2
Here is the object I get:
{
"assigned_user": null,
"cat_name": "Category Name Before",
"excludes": [
"exclude1",
"excludeBefore"
],
"id": 2,
"includes": [
"include1",
"include2"
],
"parent_id": null
}
This is the object I had before doing the PUT request. If I restart my python server and do the GET request, then I get the old object. It feels like in the session, it is storing the data, but for some reason it's not propagating to the database.
It might be good to know that if I run the update command in pgAdmin, it updates the row just fine.
UPDATE: I have also used these methods (as talked about here) to update, but still the same problem:
# using the session to update
session.query(Category).filter_by(id=category.id).update({
"cat_name": category.id,
"assigned_user": category.assigned_user,
"includes": category.includes,
"excludes": category.excludes,
"parent_id": category.parent_id
})
# using the category object to edit, then commit
category_from_db = session.query(Category).filter_by(id=category.id).first()
category_from_db.cat_name = category_json['cat_name']
category_from_db.assigned_user = category_json['assigned_user']
category_from_db.excludes = category_json['excludes']
category_from_db.includes = category_json['includes']
category_from_db.parent_id = category_json['parent_id']
session.commit()
Any ideas?
It turns out that each time I called get_session, I was creating a new session. And I was not closing the session after every HTTP request.
Here is what the server/api/category.py PUT request looks like:
#api.route('/categories/<int:id>', methods=['PUT'])
def update_category(id):
category_json = request.json
if category_json is None:
return "Bad Request: Request not sent as json", 400
valid_json, json_err = is_valid_request(category_json, ['cat_name', 'parent_id', 'includes', 'excludes', 'assigned_user'], "and")
if not valid_json:
return json_err, 400
category = Category(
id=id,
cat_name=category_json['cat_name'],
parent_id=category_json['parent_id'],
includes=category_json['includes'],
excludes=category_json['excludes'],
assigned_user=category_json['assigned_user'],
)
session = get_session()
try:
updated_category = CategoryOperations.update(category, session)
session.commit()
print(updated_category.to_dict())
return jsonify(updated_category.to_dict()), 200
except Exception as e:
print("ROLLBACK")
print(e)
session.rollback()
return str(e), 500
finally: #
session.close() # <== The fix
Once I closed every session I opened after I was done with it, the problem was solved.
Hope this helps someone.

Searching ID or property for match in Mongo

Goal:
I want to allow the user to search for a document by ID, or allow other text-based queries.
Code:
l_search_results = list(
cll_sips.find(
{
'$or': [
{'_id': ObjectId(s_term)},
{'s_text': re.compile(s_term, re.IGNORECASE)},
{'choices': re.compile(s_term, re.IGNORECASE)}
]
}
).limit(20)
)
Error:
<Whatever you searched for> is not a valid ObjectId
s_term needs to be a valid object ID (or at least in the right format) when you pass it to the ObjectId constructor. Since it's sometimes not an ID, that explains why you get the exception.
Try something like this instead:
from pymongo.errors import InvalidId
or_filter = [
{'s_text': re.compile(s_term, re.IGNORECASE)},
{'choices': re.compile(s_term, re.IGNORECASE)}
]
try:
id = ObjectId(s_term)
or_filter.append({ '_id': id })
except InvalidId:
pass
l_search_results = list(
cll_sips.find({ '$or': or_filter }).limit(20)
)

Categories

Resources