SQLAlchemy relationship() via intermediary table - python

I am struggling to define methods in SQLAlchemy to retrieve related records via an intermediary table.
Consider the following schema:
Users can create multiple posts, each post belongs to 1 user
Each post can have multiple comments on it, with each comment belonging to 1 post
What I want is to be able to, for a given user instance, retrieve all of the comments from all of their posts.
I have set this up as follows:
from sqlalchemy import ForeignKey
from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column, relationship
class Base(DeclarativeBase):
id: Mapped[int] = mapped_column(primary_key=True)
# define model classes
class User(Base):
__tablename__ = "users"
name: Mapped[str] = mapped_column()
posts: Mapped[list["Post"]] = relationship(back_populates="user")
def __repr__(self) -> str:
return f"(<{__class__.__name__}> name: {self.name})"
class Post(Base):
__tablename__ = "posts"
title: Mapped[str] = mapped_column()
user_id: Mapped[int] = mapped_column(ForeignKey("users.id"))
user: Mapped["User"] = relationship(back_populates="posts")
comments: Mapped[list["Comment"]] = relationship(back_populates="post")
def __repr__(self) -> str:
return f"(<{__class__.__name__}> title: {self.title})"
class Comment(Base):
__tablename__ = "comments"
body: Mapped[str] = mapped_column()
post_id: Mapped[int] = mapped_column(ForeignKey("posts.id"))
post: Mapped["Post"] = relationship(back_populates="comments")
def __repr__(self) -> str:
return f"(<{__class__.__name__}> body: {self.body})"
If I create a few instances of these models, you can see how things are related:
# create instances
user = User(name="greta")
post_1 = Post(title="First post", user=user)
post_2 = Post(title="Second post", user=user)
comment_1 = Comment(body="yeah wotever", post=post_1)
comment_2 = Comment(body="lol good one", post=post_1)
comment_3 = Comment(body="lmfao", post=post_2)
# show all posts, and their comments
print(user)
for post in user.posts:
print(f" └── {post}")
for comment in post.comments:
print(f" └── {comment}")
(<User> name: greta)
└── (<Post> title: First post)
└── (<Comment> body: yeah wotever)
└── (<Comment> body: lol good one)
└── (<Post> title: Second post)
└── (<Comment> body: lmfao)
I am unsure of how to use relationship() to define a method all_comments() in the User class, which would return a list of all of the comments across all of a user instance's posts.
Can anyone point me in the right direction?

Using your secondary table posts, you can use secondaryjoin and primaryjoin in relationship to get what you want.
This way you needn't create a method all_comments, you can just do user.comments and get the comments, also the other way round, comment.user gives you the user. (see edit)
You probably want to tweak the join conditions, from my rudimentary testing this seems to get where you want to be.
I have created two users, which different set of posts and comments so you can see the difference.
from sqlalchemy import ForeignKey, create_engine, select
from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column, relationship, Session
class Base(DeclarativeBase):
id: Mapped[int] = mapped_column(primary_key=True)
class User(Base):
__tablename__ = "users"
name: Mapped[str] = mapped_column()
posts: Mapped[list["Post"]] = relationship(back_populates="user")
comments: Mapped[list["Comment"]] = relationship(
back_populates="user",
secondary="posts",
primaryjoin="User.id == Post.user_id",
secondaryjoin="Comment.post_id == Post.id",
viewonly=True,
)
def __repr__(self) -> str:
return f"(<{__class__.__name__}> name: {self.name})"
class Post(Base):
__tablename__ = "posts"
title: Mapped[str] = mapped_column()
user_id: Mapped[int] = mapped_column(ForeignKey("users.id"))
user: Mapped["User"] = relationship(back_populates="posts")
comments: Mapped[list["Comment"]] = relationship(back_populates="post")
def __repr__(self) -> str:
return f"(<{__class__.__name__}> title: {self.title})"
class Comment(Base):
__tablename__ = "comments"
body: Mapped[str] = mapped_column()
post_id: Mapped[int] = mapped_column(ForeignKey("posts.id"))
post: Mapped["Post"] = relationship(back_populates="comments")
user: Mapped["User"] = relationship(
back_populates="comments",
secondary="posts",
primaryjoin="User.id == Post.user_id",
secondaryjoin="Comment.post_id == Post.id",
viewonly=True,
uselist=False,
)
def __repr__(self) -> str:
return f"(<{__class__.__name__}> body: {self.body})"
engine = create_engine("sqlite:///temp.db")
Base.metadata.create_all(engine)
with Session(engine) as session, session.begin():
user = User(name="greta")
post_1 = Post(title="First post", user=user)
post_2 = Post(title="Second post", user=user)
comment_1 = Comment(body="yeah wotever", post=post_1)
comment_2 = Comment(body="lol good one", post=post_1)
comment_3 = Comment(body="lmfao", post=post_2)
session.add_all((user, post_1, post_2, comment_1, comment_2, comment_3))
user = User(name="not greta")
post_1 = Post(title="Third post", user=user)
post_2 = Post(title="Fourth post", user=user)
comment_1 = Comment(body="wotever", post=post_1)
comment_2 = Comment(body="good one", post=post_1)
session.add_all((user, post_1, post_2, comment_1, comment_2))
with Session(engine) as session:
statement = select(User)
for user in session.scalars(statement):
print(user, user.comments)
Output
(<User> name: greta) [(<Comment> body: yeah wotever), (<Comment> body: lol good one), (<Comment> body: lmfao)]
(<User> name: not greta) [(<Comment> body: wotever), (<Comment> body: good one)]
Edit: The reverse relation "get user from a comment" seems to be bugged in this implementation, one comment has more than one user, I am not sure where I went wrong, but if all you want is the relation "get all comments for a user" then this works.

Related

Resolve "none is not an allower value"

I am currently trying to write an application in which one object (Room) inherits from the other (Building). But when I try to create a room using the Swagger API (create Room) I get the following error:
pydantic.error_wrappers.ValidationError: 1 validation error for Room
response -> content
none is not an allowed value (type=type_error.none.not_allowed).
I have only recently started with SQL and Python and am having a hard time solving this problem. I would be very happy about help and a possible explanation of what I am doing wrong.
Here is my Code:
models.py:
class Building(_database.Base):
__tablename__ = "buildings"
id = _sql.Column(_sql.Integer, primary_key=True, index=True)
title = _sql.Column(_sql.String, index=True)
date_created = _sql.Column(_sql.DateTime, default=_dt.datetime.utcnow)
date_last_updated = _sql.Column(_sql.DateTime, default=_dt.datetime.utcnow)
rooms = _orm.relationship("Room", back_populates="owner")
class Room(_database.Base):
__tablename__ = "rooms"
id = _sql.Column(_sql.Integer, primary_key=True, index=True)
title = _sql.Column(_sql.String, index=True)
content = _sql.Column(_sql.String, index=True)
owner_id = _sql.Column(_sql.Integer, _sql.ForeignKey("buildings.id"))
building_title = _sql.Column(_sql.String, index=True)
date_created = _sql.Column(_sql.DateTime, default=_dt.datetime.utcnow)
date_last_updated = _sql.Column(_sql.DateTime, default=_dt.datetime.utcnow)
owner = _orm.relationship("Building", back_populates="rooms")
schemas.py:
#Rooms
class _RoomBase(_pydantic.BaseModel):
title: str
content: str
class RoomCreate(_RoomBase):
pass
class RoomUpdate(_RoomBase):
pass
class Room(_RoomBase):
id: int
owner_id: int
building_title: str
date_created: _dt.datetime
date_last_updated: _dt.datetime
class Config:
orm_mode = True
#Buildings
class _BuildingBase(_pydantic.BaseModel):
title: str
class BuildingCreate(_BuildingBase):
pass
class BuildingUpdate(_BuildingBase):
pass
class Building(_BuildingBase):
id: int
date_created: _dt.datetime
date_last_updated: _dt.datetime
rooms: List[Room] = []
class Config:
orm_mode = True
services.py
#Buildings
def create_building(db: _orm.Session, building: _schemas.BuildingCreate):
building = _models.Building(title=building.title)
db.add(building)
db.commit()
db.refresh(building)
return building
def get_building(db: _orm.Session, building_id: int ):
return db.query(_models.Building).filter(_models.Building.id == building_id).first()
def get_building_by_title(db: _orm.Session, building_title: str ):
return db.query(_models.Building).filter(_models.Building.title == building_title).first()
def delete_building(db: _orm.Session, building_id: int):
db.query(_models.Building).filter(_models.Building.id == building_id).delete()
db.commit()
def update_building(db: _orm.Session, building_id: int, building: _schemas.BuildingCreate):
db_building = get_building(db=db, building_id=building_id)
db_building.title = building.title
db.commit()
db.refresh(db_building)
return db_building
#Rooms
def create_room(db: _orm.Session, room: _schemas.RoomCreate, building_id:int, building_title: str):
room = _models.Room(title=room.title,owner_id=building_id, building_title=building_title)
db.add(room)
db.commit()
db.refresh(room)
return room
main.py
#Building
#app.post("/buildings/", response_model=_schemas.Building)
def create_building(
building: _schemas.BuildingCreate, db: _orm.Session = _fastapi.Depends(_services.get_db)
):
return _services.create_building(db=db, building=building)
#app.get("/buildings/{building_id}", response_model=_schemas.Building)
def read_building(building_id: int, db: _orm.Session = _fastapi.Depends(_services.get_db)):
building = _services.get_building(db=db, building_id=building_id)
if building is None:
raise _fastapi.HTTPException(
status_code=404, detail="sorry this building does not exist"
)
return building
#app.delete("/buildings/{building_id}")
def delete_building(building_id: int, db: _orm.Session = _fastapi.Depends(_services.get_db)):
_services.delete_building(db=db, building_id=building_id)
return {"message": f"successfully deleted building with id: {building_id}"}
#app.put("/buildings/{building_id}", response_model=_schemas.Building)
def update_building(
building_id: int,
building: _schemas.BuildingCreate,
db: _orm.Session = _fastapi.Depends(_services.get_db),
):
return _services.update_building(db=db, building=building, building_id=building_id)
#Room
#app.post("/rooms/", response_model=_schemas.Room)
def create_room(
building_title: str,
room: _schemas.RoomCreate,
db: _orm.Session = _fastapi.Depends(_services.get_db),
):
db_building = _services.get_building_by_title(db=db, building_title=building_title)
if db_building is None:
raise _fastapi.HTTPException(
status_code=404, detail="sorry this building does not exist"
)
return _services.create_room(db=db, room=room,building_id=db_building.id, building_title=db_building.title)
Thank you for your help!
As MatsLindh points out, RoomCreate has a content field that is not used in services.create_room. Simply changing to
def create_room(db: _orm.Session, room: _schemas.RoomCreate, building_id:int, building_title: str):
room = _models.Room(title=room.title, content=room.content, owner_id=building_id, building_title=building_title)
If content is required, you should probably also define your SQLAlchemy model as content = _sql.Column(_sql.String, nullable=False, index=True).
For next time, please learn how to provide a minimal reproducible example

Playhouse's (peewee extenstion) signals does not work correctly

I have two models: User and UserSettings because I decided to divide user related properties and privacy settings.
I am using PeeweeORM for building models and creating tables. Here is a short part of my code:
class User(BaseModel):
id = UUIDField(primary_key=True, default=uuid4)
tg_id = BigIntegerField(unique=True, null=True)
class UserSettings(BaseModel):
user: User = ForeignKeyField(User, backref='settings', unique=True)
show_location: bool = BooleanField(default=True)
As far as I know peewee itself has no built-in OneToOne relation support and I have decided to use playhouse's django-like signals described in peewee docs so that there is one record in UserSettings table for each one User. As it was defined in docs, I have inherited my BaseModel class from playhouse.signal Model class to make signals working. Here is a signal itself:
#post_save(sender=User)
def on_user_created(model_class: User, instance: User, created: bool):
print("works1") # Signal is working correctly. I see this output in console
if created: # DOES NOT WORK HERE! I AM GETTING False value on created
print('works2')
us = UserSettings()
us.user = instance
us.save(force_insert=True)
So this is the way I am creating new users:
def create_or_update_user_tg(tg_id: int, name: str, age: int, city: str,
gender: Gender, search_gender: SearchGender,
profile_description: str = None, location: Location = None,
medias: typing.List[tuple[str]] = None) -> \
typing.Union[User, None]:
u, is_creating = User.get_or_none(tg_id=tg_id), False
if not u:
u, is_creating = User(), True
u.tg_id = tg_id
u.name = name
u.age = age
u.city = city
u.gender = gender.value
u.search_gender = search_gender.value
u.profile_description = profile_description
if location:
u.longitude = location.longitude
u.latitude = location.latitude
u.save(force_insert=is_creating)
upload_user_medias(u.tg_id, medias, delete_existing=True)
return u
Thanks for responses guys! Waiting for your advices.
It seems to be working fine for me. Here's a stripped-down, simplified example:
from peewee import *
from playhouse.signals import Model, post_save
from uuid import uuid4
db = SqliteDatabase(':memory:')
class User(Model):
id = UUIDField(default=uuid4)
username = TextField()
class Meta:
database = db
db.create_tables([User])
#post_save(sender=User)
def on_save(model_class, instance, created=None):
print(instance.username, created)
u = User()
u.username='foo'
u.save(force_insert=True)
u.save()
The output, as expected:
foo True
foo False

How to query a nested attribute in SQLModel?

I have these two tables named User and UserRole.
class UserRoleType(str, enum.Enum):
admin = 'admin'
client = 'client'
class UserRole(SQLModel, table=True):
__tablename__ = 'user_role'
id: int | None = Field(default=None, primary_key=True)
type: UserRoleType = Field(
default=UserRoleType.client,
sa_column=Column(Enum(UserRoleType)),
)
write_access: bool = Field(default=False)
read_access: bool = Field(default=False)
users: List['User'] = Relationship(back_populates='user_role')
class User(SQLModel, table=True):
id: int | None = Field(default=None, primary_key=True)
username: str = Field(..., index=True)
user_role_id: int = Field(..., foreign_key='user_role.id')
user_role: 'UserRole' = Relationship(back_populates='users')
I can easily insert them into the DB with:
async with get_session() as session:
role = UserRole(description=UserRoleType.client)
session.add(role)
await session.commit()
user = User( username='test', user_role_id=role.id)
session.add(user)
await session.commit()
await session.refresh(user)
And access the committed data with:
results = await session.execute(select(User).where(User.id == 1)).one()
Output:
(User(user_role_id=1, username='test', id=1),)
Notice that there's an user_role_id, but where's the user_role object?
In fact, if I try to access it, it raises:
*** AttributeError: Could not locate column in row for column 'user_role'
I also tried to pass the role instead of the user_role_id at the insertion of the User:
user = User( username='test', user_role=role)
But I got:
sqlalchemy.exc.InterfaceError: (sqlite3.InterfaceError) Error binding parameter 2 - probably unsupported type.
A few things first.
You did not include your import statements, so I will have to guess a few things.
You probably want the User.user_role_id and User.user_role fields to be "pydantically" optional. This allows you to create user instances without passing the role to the constructor, giving you the option to do so after initialization or for example by appending User objects to the UserRole.users list instead. To enforce that a user must have a role on the database level, you simply define nullable=False on the User.user_role_id field. That way, if you try to commit to the DB without having defined a user role for a user in any of the possible ways, you will get an error.
In your database insertion code you write role = UserRole(description=UserRoleType.client). I assume the description is from older code and you meant to write role = UserRole(type=UserRoleType.client).
You probably want your UserRole.type to be not nullable on the database side. You can do so by passing nullable=False to the Column constructor (not the Field constructor).
I will simplify a little by using blocking code (non-async) and a SQLite database.
This should work:
from enum import Enum as EnumPy
from sqlalchemy.sql.schema import Column
from sqlalchemy.sql.sqltypes import Enum as EnumSQL
from sqlmodel import Field, Relationship, Session, SQLModel, create_engine
class UserRoleType(str, EnumPy):
admin = 'admin'
client = 'client'
class UserRole(SQLModel, table=True):
__tablename__ = 'user_role'
id: int | None = Field(default=None, primary_key=True)
type: UserRoleType = Field(default=UserRoleType.client, sa_column=Column(EnumSQL(UserRoleType), nullable=False))
write_access: bool = Field(default=False)
read_access: bool = Field(default=False)
users: list['User'] = Relationship(back_populates='user_role')
class User(SQLModel, table=True):
__tablename__ = 'user'
id: int | None = Field(default=None, primary_key=True)
username: str = Field(..., index=True)
user_role_id: int | None = Field(foreign_key='user_role.id', default=None, nullable=False)
user_role: UserRole | None = Relationship(back_populates='users')
def test() -> None:
# Initialize database & session:
sqlite_file_name = 'user_role.db'
sqlite_url = f'sqlite:///{sqlite_file_name}'
engine = create_engine(sqlite_url)
SQLModel.metadata.drop_all(engine)
SQLModel.metadata.create_all(engine)
session = Session(engine)
# Create the test objects:
role = UserRole(type=UserRoleType.client)
user = User(username='test', user_role=role)
session.add(user)
session.commit()
session.refresh(user)
# Do some checks:
assert isinstance(user.user_role.type, EnumPy)
assert user.user_role_id == role.id and isinstance(role.id, int)
assert role.users == [user]
if __name__ == '__main__':
test()
PS: I know the question was posted a while ago, but maybe this still helps or helps someone else.

Add a custom filed to fastapi responce model (serializers)

I'm following this guide from the Fastapi documentation and I have a question what if want to add a custom field when I return an object from DB. In Django I can use serializers.
My case:
I want to save an image name into DB, but before that I need save an actual file in a static folder. When I call GET /items/1 I want to return not just an image name from DB, but full URL, so I need to execute some logic on every request in order to build the URL. The question is how can I achieve that? The only thing I can think of is to add an additional DTO layer that coverts input data to Pydantic classes, so it's gonna be:
DTO class -> Pydantic -> DB
Is there more fancy way of doing that?
Code example:
schemas.py
from typing import List, Literal, Optional
from enum import Enum, IntEnum
from pydantic import BaseModel, constr, validator
class Ingredient(BaseModel):
quantity: int
quantityUnit: QuantityUnitEnum
name: constr(max_length=50)
class RecipeBase(BaseModel):
id: int = None
title: constr(max_length=50)
# image_name: str
#validator('ingredients')
def ingredients_must_have_unique_name(cls, values):
names = []
for item in values:
names.append(item.name)
if len(names) > len(set(names)):
raise ValueError('must contain unique names')
return values
class RecipeCreate(RecipeBase):
pass
class Recipe(RecipeBase):
id: int
class Config:
orm_mode = True
model.py
class Recipe(Base):
__tablename__ = "recipes"
id = Column(Integer, primary_key=True, index=True)
title = Column(String(50), index=True, nullable=False)
image_name = Column(String(50), index=True, nullable=False)
main.py
#app.post("/recipes", response_model=schemas.Recipe)
def create_recipe(recipe: schemas.RecipeCreate, db: Session = Depends(get_db)):
return repository.create_recipe(db=db, recipe=recipe)
#app.get("/recipes/{recipe_id}", response_model=schemas.Recipe)
def get_recipe(recipe_id, db: Session = Depends(get_db)):
return repository.get_recipe(db, recipe_id=recipe_id)
repository.py
def get_recipe(db: Session, recipe_id: int):
return db.query(models.Recipe).get(recipe_id)

SQLAlchemy CRUD operation is inconsistent with or without outcome assignment

I have an app where I want the user to be able to bookmark/un-bookmark a blog, but upon un-bookmarking, I don't want to remove that bookmark record. So I have an is_bookmarked property on my Bookmark model to determine whether a bookmark is active/inactive.
In my test file, I have
def test_unbookmark_a_blog_do_assign(session):
blog = create_blog(session)
bookmark = toggle_bookmark(session, blog_id=blog.id)
assert len(blog.bookmarks) == 1
toggle_bookmark(session, blog_id=blog.id)
assert len(blog.bookmarks) == 0
This test passes. However, the following won't. (Only difference is I do not assign a variable for the toggle_bookmark's outcome.)
def test_unbookmark_a_blog_no_assign(session):
blog = create_blog(session)
toggle_bookmark(session, blog_id=blog.id)
assert len(blog.bookmarks) == 1
toggle_bookmark(session, blog_id=blog.id)
assert len(blog.bookmarks) == 0
It fails at the second assertion assert len(blog.bookmarks) == 0. Reason is that blog._bookmarks[0].is_bookmarked does not get updated outside toggle_bookmark function and is still True, making it available in blog.bookmarks. (Definition attached below)
For context, I am using classic mapping:
#dataclass
class Bookmark:
is_bookmarked: bool = True
blog_id: Optional[int] = None
#dataclass
class Blog:
_bookmarks: List[Bookmark] = field(default_factory=list)
def add_bookmark(self, bookmark):
self._bookmarks.append(bookmark)
#property
def bookmarks(self):
return [bookmark for bookmark in self._bookmarks if bookmark.is_bookmarked]
...
blog_table = Table(
"blog",
metadata,
Column("id", Integer, primary_key=True, index=True))
bookmark_table = Table(
"bookmark",
metadata,
Column("id", Integer, primary_key=True, index=True),
Column("is_bookmarked", Boolean, default=True),
Column("blog_id", ForeignKey("blog.id"), nullable=True),
)
...
mapper(
Blog,
blog_table,
properties={
"_bookmarks": relationship(Bookmark, back_populates="blog"),
},
)
mapper(
Bookmark,
bookmark_table,
properties={
"blog": relationship(Blog, back_populates="_bookmarks"),
},
)
The toggle_bookmark function:
def toggle_bookmark(db_session, *, blog_id):
blog = db_session.query(Blog).get(blog_id)
bookmark = db_session.query(Bookmark).filter(
Bookmark.blog_id == blog_id
).one_or_none()
if bookmark is None:
bookmark = Bookmark()
blog.add_bookmark(bookmark)
db_session.add(blog)
db_session.commit()
return bookmark
bookmark.is_bookmarked = not bookmark.is_bookmarked
db_session.add(bookmark)
db_session.commit()
return bookmark
I am really confused... My gut tells me that it has something to do when the query gets evaluated but I haven't managed to find any evidence to support it. Any help is appreciated. Thanks in advance!
A full example:
from dataclasses import dataclass, field
from typing import Optional, List
from sqlalchemy import (
create_engine, MetaData, Table, Column, Integer, Boolean, ForeignKey)
from sqlalchemy.orm import mapper, relationship, sessionmaker
#dataclass
class Bookmark:
is_bookmarked: bool = True
blog_id: Optional[int] = None
#dataclass
class Blog:
_bookmarks: List[Bookmark] = field(default_factory=list)
def add_bookmark(self, bookmark):
self._bookmarks.append(bookmark)
#property
def bookmarks(self):
return [bookmark for bookmark in self._bookmarks if bookmark.is_bookmarked]
engine = create_engine("sqlite:///")
metadata = MetaData(bind=engine)
blog_table = Table(
"blog",
metadata,
Column("id", Integer, primary_key=True, index=True))
bookmark_table = Table(
"bookmark",
metadata,
Column("id", Integer, primary_key=True, index=True),
Column("is_bookmarked", Boolean, default=True),
Column("blog_id", ForeignKey("blog.id"), nullable=True),
)
metadata.create_all()
mapper(
Blog,
blog_table,
properties={
"_bookmarks": relationship(Bookmark, back_populates="blog"),
},
)
mapper(
Bookmark,
bookmark_table,
properties={
"blog": relationship(Blog, back_populates="_bookmarks"),
},
)
def toggle_bookmark(db_session, *, blog_id):
blog = db_session.query(Blog).get(blog_id)
bookmark = db_session.query(Bookmark).filter(
Bookmark.blog_id == blog_id
).one_or_none()
if bookmark is None:
bookmark = Bookmark()
blog.add_bookmark(bookmark)
db_session.add(blog)
db_session.commit()
return bookmark
bookmark.is_bookmarked = not bookmark.is_bookmarked
db_session.add(bookmark)
db_session.commit()
return bookmark
def create_blog(session):
blog = Blog()
session.add(blog)
session.commit()
return blog
def test_unbookmark_a_blog_do_assign(session):
blog = create_blog(session)
bookmark = toggle_bookmark(session, blog_id=blog.id)
assert len(blog.bookmarks) == 1
toggle_bookmark(session, blog_id=blog.id)
assert len(blog.bookmarks) == 0
def test_unbookmark_a_blog_no_assign(session):
blog = create_blog(session)
toggle_bookmark(session, blog_id=blog.id)
assert len(blog.bookmarks) == 1
toggle_bookmark(session, blog_id=blog.id)
assert len(blog.bookmarks) == 0
Session = sessionmaker()
test_unbookmark_a_blog_do_assign(Session())
test_unbookmark_a_blog_no_assign(Session())
The core problem is this:
class Bookmark:
is_bookmarked: bool = True # <-- This here
Classical mapping does not install instrumentation over the existing class attribute, and so any changes to an instance's is_bookmarked are not persisted. From there it follows that without the assignment the test reads the state from the database, where it holds its default value True. With the assignment the instance is kept around in the scope of the test and so in the Session, and the later queries return the existing modified instance.
You would run into similar issues with SQLAlchemy, and dataclasses and field(), if using default=:
>>> from dataclasses import dataclass, field
>>> #dataclass
... class C:
... f: bool = field(default=True)
...
>>> C.f
True
A solution to get over the situation is to use a field() with default_factory= for is_bookmarked as well:
#dataclass
class Bookmark:
is_bookmarked: bool = field(default_factory=lambda: True)
...
since in recent enough Python the field() is then not visible in the class as an attribute, and mapping can install instrumentation.

Categories

Resources