Let's say I want to create an API with a Hero SQLModel, below are minimum viable codes illustrating this:
from typing import Optional
from sqlmodel import Field, Relationship, SQLModel
from datetime import datetime
from sqlalchemy import Column, TIMESTAMP, text
class HeroBase(SQLModel): # essential fields
name: str = Field(index=True)
secret_name: str
age: Optional[int] = Field(default=None, index=True)
created_datetime: datetime = Field(sa_column=Column(TIMESTAMP(timezone=True),
nullable=False, server_default=text("now()")))
updated_datetime: datetime = Field(sa_column=Column(TIMESTAMP(timezone=True),
nullable=False, server_onupdate=text("now()")))
team_id: Optional[int] = Field(default=None, foreign_key="team.id")
class Hero(HeroBase, table=True): # essential fields + uniq identifier + relationships
id: Optional[int] = Field(default=None, primary_key=True)
team: Optional["Team"] = Relationship(back_populates="heroes")
class HeroRead(HeroBase): # uniq identifier
id: int
class HeroCreate(HeroBase): # same and Base
pass
class HeroUpdate(SQLModel): # all essential fields without datetimes
name: Optional[str] = None
secret_name: Optional[str] = None
age: Optional[int] = None
team_id: Optional[int] = None
class HeroReadWithTeam(HeroRead):
team: Optional["TeamRead"] = None
My question is, how should the SQLModel for HeroUpdate be like?
Does it include the create_datetime and update_datetime fields?
How do I delegate the responsibility of creating these fields to the database instead of using the app to do so?
Does [the HeroUpdate model] include the create_datetime and update_datetime fields?
Well, you tell me! Should the API endpoint for updating an entry in the hero table be able to change the value in the create_datetime and update_datetime columns? I would say, obviously not.
Fields like that serve as metadata about entries in the DB and are typically only ever written to by the DB. It is strange enough that you include them in the model for creating new entries in the table. Why would you let the API set the value of when an entry in the DB was created/updated?
One could even argue that those fields should not be visible to "the outside" at all. But I suppose you could include them in HeroRead for example, if you wanted to present that metadata to the consumers of the API.
How do I delegate the responsibility of creating [the create_datetime and update_datetime] fields to the database instead of using the app to do so?
You already have delegated it. You (correctly) defined a server_default and server_onupdate values for the Column instances that represent those fields. That means the DBMS will set their values accordingly, unless they are passed explicitly in a SQL statement.
What I would suggest is the following re-arrangement of your models:
from datetime import datetime
from typing import Optional
from sqlmodel import Column, Field, SQLModel, TIMESTAMP, text
class HeroBase(SQLModel):
name: str = Field(index=True)
secret_name: str
age: Optional[int] = Field(default=None, index=True)
class Hero(HeroBase, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
created_datetime: Optional[datetime] = Field(sa_column=Column(
TIMESTAMP(timezone=True),
nullable=False,
server_default=text("CURRENT_TIMESTAMP"),
))
updated_datetime: Optional[datetime] = Field(sa_column=Column(
TIMESTAMP(timezone=True),
nullable=False,
server_default=text("CURRENT_TIMESTAMP"),
server_onupdate=text("CURRENT_TIMESTAMP"),
))
class HeroRead(HeroBase):
id: int
class HeroCreate(HeroBase):
pass
class HeroUpdate(SQLModel):
name: Optional[str] = None
secret_name: Optional[str] = None
age: Optional[int] = None
(I use CURRENT_TIMESTAMP to test with SQLite.)
Demo:
from sqlmodel import Session, create_engine, select
# Initialize database & session:
engine = create_engine("sqlite:///", echo=True)
SQLModel.metadata.create_all(engine)
session = Session(engine)
# Create:
hero_create = HeroCreate(name="foo", secret_name="bar")
session.add(Hero.from_orm(hero_create))
session.commit()
# Query (SELECT):
statement = select(Hero).filter(Hero.name == "foo")
hero = session.execute(statement).scalar()
# Read (Response):
hero_read = HeroRead.from_orm(hero)
print(hero_read.json(indent=4))
# Update (comprehensive as in the docs, although we change only one field):
hero_update = HeroUpdate(secret_name="baz")
hero_update_data = hero_update.dict(exclude_unset=True)
for key, value in hero_update_data.items():
setattr(hero, key, value)
session.add(hero)
session.commit()
# Read again:
hero_read = HeroRead.from_orm(hero)
print(hero_read.json(indent=4))
Here is what the CREATE statement looks like:
CREATE TABLE hero (
created_datetime TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL,
updated_datetime TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL,
name VARCHAR NOT NULL,
secret_name VARCHAR NOT NULL,
age INTEGER,
id INTEGER NOT NULL,
PRIMARY KEY (id)
)
Here is the output of the the two HeroRead instances:
{
"name": "foo",
"secret_name": "bar",
"age": null,
"id": 1
}
{
"name": "foo",
"secret_name": "baz",
"age": null,
"id": 1
}
I did not include the timestamp columns in the read model, but SQLite does not honor ON UPDATE anyway.
Related
I have the following table structure (I have simplified it as much as possible, narrowed down the child/inheriting tables [there are additional] and removed all irrelevant columns from the provided tables):
## Base is my declarative_base
class AbstractQuestion(Base):
questionTypeId: Column = Column(
Integer, ForeignKey("luQuestionTypes.id"), index=True, nullable=False
)
__mapper_args__ = {
"polymorphic_identity": 0,
"polymorphic_on": questionTypeId,
}
class MultiChoiceQuestion(AbstractQuestion):
id: Column = Column(Integer, ForeignKey(AbstractQuestion.id), primary_key=True)
__mapper_args__ = {"polymorphic_identity": 1}
class AbstractSurveyQuestion(AbstractQuestion):
id: Column = Column(Integer, ForeignKey(AbstractQuestion.id), primary_key=True)
surveyQuestionTypeId: Column = Column(
Integer, ForeignKey("luSurveyQuestionTypes.id"), index=True, nullable=False
)
__mapper_args__ = {"polymorphic_identity": 2}
class RatingQuestion(AbstractSurveyQuestion):
id: Column = Column(
Integer, ForeignKey(AbstractSurveyQuestion.id), primary_key=True
)
The challenge I'm facing is, that I'm trying to make AbstractSurveyQuestion have two types of polymorphic mappings - one as a child of AbstractQuestion with a polymorphic_identity that matches the questionTypeId, but I also need it to have a separate polymorphic_on mapper for its own child table, which is RatingQuestion.
The closest thing I could find was this question, but it doesn't seem to be aiming at exactly what I'm looking for.
I also looked at the official docs about inheritance, but again couldn't find an accurate example to what I'm trying to achieve.
Can anyone please help me with this?
Thanks!
I posted the same question on SQLAlchemy's GitHub repo. Got this answer from the maintainer:
https://github.com/sqlalchemy/sqlalchemy/discussions/8089#discussioncomment-2878725
I'll paste the contents below as well:
it sounds like you are looking for mult-level polymorphic_on. We don't support that right now without workarounds, and that's #2555 which is a feature we're unlikely to implement, or if we did it would be a long time from now.
It looks like you are using joined inheritance....so...two ways. The more SQL efficient one is to have an extra "supplemetary" column on your base table that can discriminate for AbstractSurveyQuestion...because if you query for all the AbstractQuestion objects, by default it's just going to query that one table, and needs to know from each row if that row is in fact a RatingQuestion.
the more convoluted way is to use mapper-configured with_polymorphic so that all queries for AbstractQuestion include all the tables (or a subset of tables, can be configured, but at minimum you'd need to join out to AbstractSurveyQuestion) using a LEFT OUTER JOIN (or if you really wanted to go crazy it can be a UNION ALL).
the workarounds are a little ugly since it's not very easy to get a "composed" value out of two columns in SQL, but they are contained to the base classes. Below examples work on SQLite and might need tweaking for other databases.
Here's the discriminator on base table demo, a query here looks like:
SELECT aq.id AS aq_id, aq.d1 AS aq_d1, aq.d2 AS aq_d2, CAST(aq.d1 AS VARCHAR) || ? || CAST(coalesce(aq.d2, ?) AS VARCHAR) AS _sa_polymorphic_on
FROM aq
from typing import Tuple, Optional
from sqlalchemy import cast
from sqlalchemy import Column
from sqlalchemy import create_engine
from sqlalchemy import event
from sqlalchemy import ForeignKey
from sqlalchemy import inspect
from sqlalchemy import Integer, func
from sqlalchemy import String
from sqlalchemy.orm import declarative_base
from sqlalchemy.orm import Session
Base = declarative_base()
class ident_(str):
"""describe a composed identity.
Using a string for easy conversion to a string SQL composition.
"""
_tup: Tuple[int, Optional[int]]
def __new__(cls, d1, d2=None):
self = super().__new__(cls, f"{d1}, {d2 or ''}")
self._tup = d1, d2
return self
def _as_tuple(self):
return self._tup
class AbstractQuestion(Base):
__tablename__ = "aq"
id = Column(Integer, primary_key=True)
d1 = Column(
Integer, nullable=False
) # this can be your FK to the other table etc.
d2 = Column(
Integer, nullable=True
) # this is a "supplementary" discrim column
__mapper_args__ = {
"polymorphic_identity": ident_(0),
"polymorphic_on": cast(d1, String)
+ ", "
+ cast(func.coalesce(d2, ""), String),
}
#event.listens_for(AbstractQuestion, "init", propagate=True)
def _setup_poly(target, args, kw):
"""receive new AbstractQuestion objects when they are constructed and
set polymorphic identity"""
# this is the ident_() object
ident = inspect(target).mapper.polymorphic_identity
d1, d2 = ident._as_tuple()
kw["d1"] = d1
if d2:
kw["d2"] = d2
class MultiChoiceQuestion(AbstractQuestion):
__tablename__ = "mcq"
id: Column = Column(
Integer, ForeignKey(AbstractQuestion.id), primary_key=True
)
__mapper_args__ = {"polymorphic_identity": ident_(1)}
class AbstractSurveyQuestion(AbstractQuestion):
__tablename__ = "acq"
id: Column = Column(
Integer, ForeignKey(AbstractQuestion.id), primary_key=True
)
__mapper_args__ = {"polymorphic_identity": ident_(2)}
class RatingQuestion(AbstractSurveyQuestion):
__tablename__ = "rq"
id: Column = Column(
Integer, ForeignKey(AbstractSurveyQuestion.id), primary_key=True
)
__mapper_args__ = {"polymorphic_identity": ident_(2, 1)}
e = create_engine("sqlite://", echo=True)
Base.metadata.create_all(e)
s = Session(e)
s.add(MultiChoiceQuestion())
s.add(RatingQuestion())
s.commit()
s.close()
for q in s.query(AbstractQuestion):
print(q)
then there's the one that maintains your schema fully, a query here looks like:
SELECT aq.id AS aq_id, aq.d1 AS aq_d1, CAST(aq.d1 AS VARCHAR) || ? || CAST(coalesce(acq.d2, ?) AS VARCHAR) AS _sa_polymorphic_on, acq.id AS acq_id, acq.d2 AS acq_d2
FROM aq LEFT OUTER JOIN acq ON aq.id = acq.id
from typing import Tuple, Optional
from sqlalchemy import cast
from sqlalchemy import Column
from sqlalchemy import create_engine
from sqlalchemy import event
from sqlalchemy import ForeignKey
from sqlalchemy import func
from sqlalchemy import inspect
from sqlalchemy import Integer
from sqlalchemy import String
from sqlalchemy.orm import declarative_base
from sqlalchemy.orm import Session
Base = declarative_base()
class ident_(str):
"""describe a composed identity.
Using a string for easy conversion to a string SQL composition.
"""
_tup: Tuple[int, Optional[int]]
def __new__(cls, d1, d2=None):
self = super().__new__(cls, f"{d1}, {d2 or ''}")
self._tup = d1, d2
return self
def _as_tuple(self):
return self._tup
class AbstractQuestion(Base):
__tablename__ = "aq"
id = Column(Integer, primary_key=True)
d1 = Column(
Integer, nullable=False
) # this can be your FK to the other table etc.
__mapper_args__ = {
"polymorphic_identity": ident_(0),
}
#event.listens_for(AbstractQuestion, "init", propagate=True)
def _setup_poly(target, args, kw):
"""receive new AbstractQuestion objects when they are constructed and
set polymorphic identity"""
# this is the ident_() object
ident = inspect(target).mapper.polymorphic_identity
d1, d2 = ident._as_tuple()
kw["d1"] = d1
if d2:
kw["d2"] = d2
class MultiChoiceQuestion(AbstractQuestion):
__tablename__ = "mcq"
id: Column = Column(
Integer, ForeignKey(AbstractQuestion.id), primary_key=True
)
__mapper_args__ = {"polymorphic_identity": ident_(1)}
class AbstractSurveyQuestion(AbstractQuestion):
__tablename__ = "acq"
id: Column = Column(
Integer, ForeignKey(AbstractQuestion.id), primary_key=True
)
d2 = Column(Integer, nullable=False)
__mapper_args__ = {
"polymorphic_identity": ident_(2),
"polymorphic_load": "inline", # adds ASQ to all AQ queries
}
# after ASQ is set up, set the discriminator on the base class
# that includes ASQ column
inspect(AbstractQuestion)._set_polymorphic_on(
cast(AbstractQuestion.d1, String)
+ ", "
+ cast(func.coalesce(AbstractSurveyQuestion.d2, ""), String)
)
class RatingQuestion(AbstractSurveyQuestion):
__tablename__ = "rq"
id: Column = Column(
Integer, ForeignKey(AbstractSurveyQuestion.id), primary_key=True
)
__mapper_args__ = {"polymorphic_identity": ident_(2, 1)}
e = create_engine("sqlite://", echo=True)
Base.metadata.create_all(e)
s = Session(e)
s.add(MultiChoiceQuestion())
s.add(RatingQuestion())
s.commit()
s.close()
for q in s.query(AbstractQuestion):
print(q)
I am getting the following error
sqlalchemy.exc.IntegrityError: (sqlite3.IntegrityError) NOT NULL constraint failed: SignalJourneyAudienceConstraints.constraintId
[SQL: INSERT INTO "SignalJourneyAudienceConstraints" ("sourceId", "constraintTypeId", "constraintValue", targeting, frequency, period) VALUES (?, ?, ?, ?, ?, ?)]
[parameters: (None, 10, 'STRING', 1, None, None)]
When trying to use the below endpoint. I have made all fields except:
constraintTypeId: int
constraintValue: str
targeting: bool
As optional for the time being.
Endpoint
# mail.py
...
#app.post("/add-new-audience-constraint", status_code=status.HTTP_200_OK)
def add_new_audience_constraint(
sjac: schema.SignalJourneyAudienceConstraints, db: Session = Depends(get_db)
):
"""Sets the main query data."""
new_audience_constraint = models.SignalJourneyAudienceConstraints(
constraintTypeId=sjac.constraintTypeId,
constraintValue=sjac.constraintValue.upper(),
targeting=True,
)
db.add(new_audience_constraint)
db.commit()
return {"message": "Data added successfully."}
Model
# models.py
class SignalJourneyAudienceConstraints(Base):
"""Signal Journey Audience Constraints"""
__tablename__ = "SignalJourneyAudienceConstraints"
constraintId = Column(Integer, primary_key=True)
audienceId = Column(
Integer,
ForeignKey("SignalJourneyAudiences.audienceId"),
primary_key=True,
)
sourceId = Column(Integer, ForeignKey("SignalJourneySources.sourceId"))
constraintTypeId = Column(
Integer, ForeignKey("SignalJourneyConstraintType.constraintTypeId")
)
constraintValue = Column(String)
targeting = Column(Boolean)
frequency = Column(Integer)
period = Column(Integer)
Schema
# schema.py
class SignalJourneyAudienceConstraints(BaseModel):
"""SignalJourneyAudienceConstraints BaseModel."""
constraintId: Optional[int] # PK
audienceId: Optional[int] # FK - SignalJourneyAudiences -> audienceId
sourceId: Optional[int] # FK - SignalJourneySources -> sourceId
constraintTypeId: int # FK - SignalJourneyConstraintType -> constraintTypeId
constraintValue: str
targeting: bool
frequency: Optional[int]
period: Optional[int]
class Config:
"""config class"""
orm_mode = True
I have looked into other SO questions, but none seem to fit what I am experiencing. Hopefully someone can explain what's going on and how to resolve it
ERD
You have two fields listed as primary_key; are they both primary keys? Since you have two fields, the primary key field will not automagically defined as an auto increment field; for that you want to only have one primary key, integer field.
SQLAlchemy will only set the auto_increment property for SQLite automagically when there is a single primary key column and it's defined as an integer.
Since defining a composite key wasn't what you wanted, using constraintId as a single auto incrementing key (as shown in your ERD) is what you want - remove the primary_key entry for audienceId field.
I have a database with a table named friends. That table has two columns, "user_id" and "friend_id".
Those are foreign keys from the Users table.
My friends table right now:
user_id | friend_id
-------------------------------------+-------------------------------------
google-oauth2|11539665289********** | google-oauth2|11746442253**********
google-oauth2|11746442253********** | google-oauth2|11539665289**********
google-oauth2|11746442253********** | google-oauth2|11111111111**********
The first two rows are the same IDs but flipped. Those Users I want to retrieve, because they added eachother. The third row only added another guy, that one shouldn't be retrieved.
My SQLModels (models.py):
class Friends(SQLModel, table=True):
__tablename__ = "friends"
user_id: str = Field(sa_column=Column('user_id', VARCHAR(length=50), primary_key=True), foreign_key="users.id")
friend_id: str = Field(sa_column=Column('friend_id', VARCHAR(length=50), primary_key=True), foreign_key="users.id")
class UserBase(SQLModel):
id: str
username: Optional[str]
country_code: Optional[str]
phone: Optional[str]
picture: Optional[str]
class Config:
allow_population_by_field_name = True
class User(UserBase, table=True):
__tablename__ = 'users'
id: str = Field(primary_key=True)
username: Optional[str] = Field(sa_column=Column('username', VARCHAR(length=50), unique=True, default=None))
phone: Optional[str] = Field(sa_column=Column('phone', VARCHAR(length=20), unique=True, default=None))
picture: Optional[str] = Field(sa_column=Column('picture', VARCHAR(length=255), default=None))
My fastapi endpoint:
#router.get("", status_code=status.HTTP_200_OK, response_model=models.FriendsList, name="Get Friends for ID",
tags=["friends"])
async def get_friends(
user_id: str = Query(default=None, description="The user_id that you want to retrieve friends for"),
session: Session = Depends(get_session)
):
stm = select(models.User, models.Friends).where(models.User.id == models.Friends.friend_id, models.Friends.user_id == user_id)
res = session.exec(stm).all()
if not res:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND,
detail="There are no friendships associated with this id.")
users = []
for item in res:
users.append(item[0])
return models.FriendsList(users=users)
My code works perfectly fine, only the query needs to be replaced.
stm = select(models.User, models.Friends).where(models.User.id == models.Friends.friend_id, models.Friends.user_id == user_id)
res = session.exec(stm).all()
This query returns every User that has the given ID as user_id, but doesn't check if there is an entry the other way around.
Example for what I want to get:
I make a GET request to my endpoint with the id google-oauth2|11746442253**********. I would get the User google-oauth2|11539665289**********. (The User google-oauth2|11111111111********** would not be retrieved because there is no entry the other way arround)
I hope you guys understand my problem. If there are any questions feel free to ask.
Best regards,
Colin
As I said in the comment, without a simple example I can't actually try myself, but I did just have an idea. You might need to modify the subquery syntax a little bit, but I reckon theoretically this could work:
stmt = select(Friends.user_id, Friends.friend_id).where(
tuple_(Friends.user_id, Friends.friend_id).in_(select(Friends.friend_id, Friends.user_id))
)
Basically it's just checking every (user_id, friend_id) if there is a matching (friend_id, user_id).
are you able to add another column called "accepted" with values like 0 or 1?
user_id | friend_id | accepted
-------------------------------------+-----------------------------------------------
google-oauth2|11539665289********** | google-oauth2|11746442253********** | 1
google-oauth2|11746442253********** | google-oauth2|11539665289********** | 1
google-oauth2|11746442253********** | google-oauth2|11111111111********** | 0
then you have two options:
you could make a relationship on the user table called "friends" and set the lazy parameter to "dynamic" (lazy='dynamic') and then query: user.friends.filter_by(accepted=1).all()
or you could write a query like:
query = Friends.query.filter(Friends.user_id==user_id).filter(Friends.accepted == 1).all()
generally relational databases aren't the best solution for these types of scenarios - if you're very flexible and not too far in you could checkout a NoSQL solution like MongoDB
I have enum defined .
from enum import Enum
class Status(Enum):
pending = 'PENDING'
processed = 'PROCESSED'
delivered = 'DELIVERED'
In model.py
class Activity(db.Model):
__tablename__ = 'activity'
id = db.Column(db.Integer, primary_key=True))
message = db.Column(db.String, nullable=False)
status = db.Column(db.Enum(Status), nullable=False)
in controller.py
jsn_data = request.get_json()
activity_obj = Activity(message=jsn_data['message'], status=jsn_data['status'])
in request json body
{
"message": 'Hi',
"status':'pending'
}
So here i want status value "PENDING" store in DB instead of 'pending'.
Let me know what i am missing .
The documentation has an example:
import enum
class MyEnum(enum.Enum):
one = 1
two = 2
three = 3
t = Table(
'data', MetaData(),
Column('value', Enum(MyEnum))
)
Above, the string names of each element, e.g. “one”, “two”, “three”,
are persisted to the database; the values of the Python Enum, here
indicated as integers, are not used; the value of each enum can
therefore be any kind of Python object whether or not it is
persistable.
In order to persist the values and not the names, the
Enum.values_callable parameter may be used. For a simple enumeration
that uses string values, a callable such as
lambda x: [e.value for e in x] is sufficient.
So, to use values instead of keys specify values_callable:
t = Table(
'data', MetaData(),
Column('value', Enum(MyEnum,
values_callable=lambda x: [str(e.value) for e in MyEnum])
)
)
)
I would suggest to use echo=True with create_engine to see what SQL was generated.
If I put the CapacityMin class and unittest class in same .py file, every things fine.
But after I move CapacityMin class to a separate file, and run unit-test, I got this error:
SQL expression, column, or mapped entity expected
DETAILS:
InvalidRequestError: SQL expression, column, or mapped entity expected - got '<module 'Entities.CapacityMin' from 'D:\trunk\AppService\Common\Entities\CapacityMin.pyc'>'
but this is not good.
CapacityMin.py :
import sqlalchemy
from sqlalchemy import *
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
class CapacityMin(Base):
'''
table definition:
ID INT NOT NULL auto_increment,
Server VARCHAR (20) NULL,
FeedID VARCHAR (10) NULL,
`DateTime` DATETIME NULL,
PeakRate INT NULL,
BytesRecv INT NULL,
MsgNoSent INT NULL,
PRIMARY KEY (ID)
'''
__tablename__ = 'capacitymin'
ID = Column(Integer, primary_key=True)
Server = Column(String)
FeedID = Column(String)
DateTime = Column(sqlalchemy.DateTime)
PeakRate = Column(Integer)
BytesRecv = Column(Integer)
MsgNoSent = Column(Integer)
def __init__(self, server, feedId, dataTime, peakRate, byteRecv, msgNoSent):
self.Server = server
self.FeedID = feedId
self.DateTime = dataTime
self.PeakRate = peakRate
self.BytesRecv = byteRecv
self.MsgNoSent = msgNoSent
def __repr__(self):
return "<CapacityMin('%s','%s','%s','%s','%s','%s')>" % (self.Server, self.FeedID ,
self.DateTime ,self.PeakRate,
self.BytesRecv, self.MsgNoSent)
if __name__ == '__main__':
pass
You are using the module, not the class within the module.
I suspect that you are using it like this:
from Entities import CapacityMin
while you meant to use:
from Entities.CapacityMin import CapacityMin
This kind of confusion is one of the reasons that the Python styleguide (PEP 8) recommends using lowercase names for your modules; your import would then be:
from entities.capacitymin import CapacityMin
and your error would have been easier to spot.