SQLAlchemy: can't understand this one2many relationship with composite keys - python

I have an SQLAlchemy model like the one below, and at first it didn't work (problems with the join, and then expecting a scalar instead of a list). I "fixed" it in the included version, but I really can't understand why it behaved like that.
At first I expected that with those ForeignKeys the Sizes.items relationship() shouldn't need an explicit primaryjoin, and when I added it SA started expecting a scalar and I had to explicitly specify uselist=True.
Why doesn't the relationship automatically detect one or both those things?
class Category(Base):
__tablename__ = 'categories'
pk = Column(String(6), primary_key=True)
class Item(Base):
__tablename__ = 'items'
pk = Column(String(6), primary_key=True)
category_pk = Column(String(6), ForeignKey('categories.pk') )
size = Column(Integer(), nullable=False)
category = relationship('Category', backref=backref('items'))
class Sizes(Base):
__tablename__ = 'sizes'
category_pk = Column(String(6), ForeignKey('categories.pk'),
ForeignKey('items.category_pk'), primary_key=True )
size = Column(Integer(), ForeignKey('items.size'), primary_key=True )
category = relationship('Category', backref=backref('sizes'))
items = relationship('Item',
uselist=True,
primaryjoin="and_(Sizes.category_pk==Item.category_pk, Sizes.size==Item.size)" )

I believe what is happening is that you have two foreign keys, not a single FK on two columns. It works a bit different the primary_key=True where you can do that.
Try something like:
class Category(Base):
__tablename__ = 'categories'
pk = Column(String(6), primary_key=True)
class Item(Base):
__tablename__ = 'items'
pk = Column(String(6), primary_key=True)
category_pk = Column(String(6), ForeignKey('categories.pk') )
size = Column(Integer(), nullable=False)
category = relationship('Category', backref=backref('items'))
class Sizes(Base):
__tablename__ = 'sizes'
category_pk = Column(String(6), primary_key=True )
size = Column(Integer(), primary_key=True )
category = relationship('Category', backref=backref('sizes'))
items = relationship('Item')
__table_args__ = (
ForeignKeyConstraint(
["category_pk", "size"],
["items.category_pk", "items.size"]
),
)

Related

Subquery/alias duplication in SqlAlchemy with polymorphic tables and "limit"

Given this polymorphic model
class OrganizationBase(Base):
__tablename__ = "x_organization_base"
__mapper_args__ = {
"polymorphic_identity": "base",
"polymorphic_on": "model_type",
}
model_type = db.Column(db.String(), nullable=False)
id = Column(Integer(), primary_key=True)
cont'd
class UmbrellaOrganization(OrganizationBase):
__tablename__ = "x_umbrella_organization"
__mapper_args__ = {"polymorphic_identity": "umbrella"}
id = db.Column(Integer, db.ForeignKey(OrganizationBase.id), primary_key=True)
umbrella_accounting_id = db.Column(db.String(255), nullable=False, unique=True)
properties = db.relationship(
"UmbrellaOrganizationProperty",
lazy="joined",
backref=backref("umbrella_organization", uselist=False),
)
class Organization(OrganizationBase):
__tablename__ = "x_organization"
__mapper_args__ = {"polymorphic_identity": "organization"}
id = db.Column(Integer, db.ForeignKey(OrganizationBase.id), primary_key=True)
umbrella_accounting_id = db.Column(
db.String(255),
db.ForeignKey(UmbrellaOrganization.umbrella_accounting_id),
nullable=False,
index=True,
)
and this eagerly loaded relationship
class UmbrellaOrganizationProperty(Base):
__tablename__ = "x_umbrella_organization_property"
id = Column(Integer(), primary_key=True)
umbrella_organization_id = db.Column(
Integer, db.ForeignKey(UmbrellaOrganization.id), nullable=False, index=True
)
type = db.Column(db.String(), nullable=False)
this query will produce invalid SQL:
query = (
db.session.query(
Organization,
UmbrellaOrganization,
)
.join(
UmbrellaOrganization,
UmbrellaOrganization.umbrella_accounting_id == Organization.umbrella_accounting_id,
)
)
y = query.limit(5)
Specically, there main query will be duplicated with the same alias 'anon_1' occuring twice:
ProgrammingError: (psycopg2.errors.DuplicateAlias) table name "anon_1" specified more than once
This only happens with limit() applied.
It appears that the polymorphism mapper wants to join the (eagerly loaded) UmbrellaOrganziationProperty to both the UmbrellaOrganization and OrganizationBase, even though it does not belong there. Without changing the model, the only way I have found to prevent this is telling it to not load OrganizationProperty eagerly, by adding this query option:
.options(lazyload(UmbrellaOrganization.properties))
This is potentially problematic because client code may expect the properties in the results. What else can I do?

Deleting many tables when one specific table is deleted in FLASK SQLAlchemy

I have an SQLAlchemy VolunteerClient Model which is a join table:
class VolunteerClient(Base):
__tablename__ = 'volunteer_client'
id = Column(Integer, primary_key=True, autoincrement=True, index=True)
volunteer_id = Column(Integer, ForeignKey('provider_user.user_id', onupdate='CASCADE', ondelete='RESTRICT'), unique=True)
client_id = Column(Integer, ForeignKey('user.id', onupdate='CASCADE', ondelete='RESTRICT'), unique=True)
and a VolunteerReport model:
class VolunteerReport(Base):
__tablename__ = 'volunteer_report'
id = Column(Integer, primary_key=True, autoincrement=True, index=True)
volunteer_id = Column(Integer, ForeignKey('volunteer_client.volunteer_id', cascade="all, delete"))
client_id = Column(Integer, ForeignKey('volunteer_client.client_id', cascade="all, delete"))
report = Column(String(255), nullable=False)
report_category = Column(String(255), nullable=False)
If I was to delete a VolunteerClient table, which essentially unassignes a volunteer from a client but does not actually delete the users which they represent. with a specific volunteer_id and client_id set,
Example: I delete VolunteerClient table where: volunteer_id = 1, & client_id = 1,
I want any and all VolunteerReports where: volunteer_id = 1, & client_id = 1 to be deleted as well. Have I set this up correctly with the FK references to volunteer_client and the cascade='all, delete'?
Any advice would be awesome.
To automatically delete child rows when a parent is deleted, you need to set ondelete='CASCADE' on the foreign key defined in the child table. In this case, the foreign key is a composite key as it consists of volunteer_id and client_id. This means you need a unique constraint over the columns in the parent as well. This simplified version of your models show how it would work (I've removed the FK definitions from VolunteerClient the tables are defined in the question).
class VolunteerClient(Base):
__tablename__ = 'volunteer_client'
id = sa.Column(sa.Integer, primary_key=True)
volunteer_id = sa.Column(sa.Integer)
client_id = sa.Column(sa.Integer)
__table_args__ = (sa.UniqueConstraint(volunteer_id, client_id),)
class VolunteerReport(Base):
__tablename__ = 'volunteer_report'
id = sa.Column(sa.Integer, primary_key=True)
volunteer_id = sa.Column(sa.Integer)
client_id = sa.Column(sa.Integer)
__table_args__ = (
sa.ForeignKeyConstraint(
[volunteer_id, client_id],
[VolunteerClient.volunteer_id, VolunteerClient.client_id],
ondelete='CASCADE',
),
)
You can also configure delete cascades on SQLAlchemy relationships, which give you more control over what happens when a parent row is deleted. As you don't seem to be using relationships, and the database cascade does what you want I won't cover that in this answer.

Flask-Admin with additional field in relationship Many to Many

I have two tables: "Product", "Ingredient" and "ProductIngredient"
class ProductIngredient(db.Model):
__tablename__ = "product_ingredient"
id = db.Column(db.Integer(), primary_key=True)
product_id = db.Column('product_id', db.Integer, db.ForeignKey('product.id'))
ingredient_id = db.Column('ingredient_id', db.Integer, db.ForeignKey('ingredient.id'))
amount = db.Column(db.DECIMAL(10, 3))
class Ingredient(db.Model):
__tablename__ = "ingredient"
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(50))
desc = db.Column(db.Text)
class Produto(db.Model):
__tablename__ = "product"
id = db.Column(db.Integer, primary_key=True)
desc = db.Column(db.String(20))
ingredients = db.relationship('Ingredient', secondary='product_ingredient', backref=db.backref('product', lazy='dynamic'))
Note that in the ProductIngredient class there is an amount field, which would take the quantity of each ingredient that makes up each product
setting the fields in admin, I get the following error
class ProdMV(ModelView):
column_display_pk = False
form_columns = [Product.desc, Ingredient.name, ProductIngredient.amount]
column_auto_select_related = True
column_hide_backrefs = False
admin.add_view(ProdMV(Product, db.session))
builtins.Exception
Exception: form column is located in another table and requires inline_models: Ingrediente.desc
I researched a lot about inline_models but found nothing that solved this problem
The problem is that Product object can have several ingredients and they cannot be specified in one form field. So flask_admin hints that you should use inline_models. You need to add relationships to the ProductIngredient model:
class ProductIngredient(db.Model):
__tablename__ = 'product_ingredient'
id = db.Column(db.Integer, primary_key=True)
product_id = db.Column(db.Integer, db.ForeignKey('product.id'))
ingredient_id = db.Column(db.Integer, db.ForeignKey('ingredient.id'))
amount = db.Column(db.DECIMAL(10, 3))
product = db.relationship('Product', backref='products')
ingredient = db.relationship('Ingredient', backref='ingredients')
And your ProductMV will look something like this:
class ProductMV(ModelView):
form_columns = ('desc',)
inline_models = ((
ProductIngredient,
{
'form_columns': ('id', 'amount', 'ingredient'),
}
),)
If you would not have ProductIngredient.amount field you could just type:
form_columns = [Product.desc, Product.ingredients]
This makes a field which allows adding items to it like tags.

SQLAlchemy One-To-One and One-To-Many at the same time (AmbiguousForeignKeysError)

I'm working with SQLAlchemy and I try to achieve one-to-one and one-to-many relationships on the same parent class.
This is for the simplicity of keeping track of the main child entity.
Unfortunately I'm getting an error:
AmbiguousForeignKeysError: Could not determine join condition between
parent/child tables on relationship Customer.contact - there are
multiple foreign key paths linking the tables. Specify the
'foreign_keys' argument, providing a list of those columns which
should be counted as containing a foreign key reference to the parent
table.
Am I doing something wrong or it is not possible?
Here is a code example:
class Customer(Base):
__tablename__ = 'customer'
id = Column(Integer, primary_key=True)
contact_id = Column(Integer, ForeignKey('contact.id'))
address_id = Column(Integer, ForeignKey('address.id'))
contact = relationship('Contact', backref=backref("contact", uselist=False))
address = relationship('Address', backref=backref("address", uselist=False))
contact_list = relationship('Contact')
address_list = relationship('Address')
class Contact(Base):
__tablename__ = 'contact'
id = Column(Integer, primary_key=True)
customer_id = Column(Integer, ForeignKey(
'customer.id',
use_alter=True, name='fk_contact_customer_id_customer',
onupdate='CASCADE', ondelete='SET NULL'
))
first_name = Column(String(32))
last_name = Column(String(32))
class Address(Base):
__tablename__ = 'address'
id = Column(Integer, primary_key=True)
customer_id = Column(Integer, ForeignKey(
'customer.id',
use_alter=True, name='fk_address_customer_id_customer',
onupdate='CASCADE', ondelete='SET NULL'
))
label = Column(String(32))
Thanks
Apparently the solution was later in the documentation:
SQLAlchemy does not know which foreign key to use, so you have to specify those as Column objects in relationship(foreign_keys=[]) like so:
class Contact(Base):
# ...
customer_id = Column(Integer, ForeignKey(
'customer.id',
use_alter=True, name='fk_contact_customer_id_customer',
onupdate='CASCADE', ondelete='SET NULL'
))
# ...
class Customer(Base):
# ...
contact_id = Column(Integer, ForeignKey('contact.id'))
#...
contact = relationship('Contact', uselist=False, foreign_keys=[contact_id])
contact_list = relationship('Contact', foreign_keys=[Contact.customer_id])
#...

sqlalchemy constraint in models inheritance

I have two simple models:
class Message(Backend.instance().get_base()):
__tablename__ = 'messages'
id = Column(Integer, primary_key=True, autoincrement=True)
sender_id = Column(Integer, ForeignKey('users.id'))
content = Column(String, nullable=False)
class ChatMessage(Message):
__tablename__ = 'chat_messages'
id = Column(Integer, ForeignKey('messages.id'), primary_key=True)
receiver_id = Column(Integer, ForeignKey('users.id'))
How to define constraint sender_id!=receiver_id?
This doesn't seem to work with joined table inheritance, I've tried and it complains that the column sender_id from Message doesn't exist when creating the constraint in ChatMessage.
This complaint makes sense, since sender_id wouldn't be in the same table as receiver_id when the tables are created, so the foreign key relationship would need to be followed to check the constraint.
One option is to make ChatMessage a single table.
Use CheckConstraint, placed in table args.
class ChatMessage(Base):
__tablename__ = 'chat_messages'
id = sa.Column(sa.Integer, primary_key=True)
sender_id = sa.Column(sa.Integer, sa.ForeignKey(User.id))
receiver_id = sa.Column(sa.Integer, sa.ForeignKey(User.id))
content = sa.Column(sa.String, nullable=False)
__table_args__ = (
sa.CheckConstraint(receiver_id != sender_id),
)

Categories

Resources