I want to have a base entity with a field deleted which marks a deleted record. And i have 2 subclasses, each of them to have their own table with all own columns:
from elixir import *
from sqlalchemy import create_engine
class Catalog(Entity):
using_options(inheritance='concrete')
deleted = Boolean
class Contact(Catalog):
using_options(inheritance='concrete')
name = Field(String(60))
class Location(Catalog):
using_options(inheritance='concrete')
name = Field(String(100))
setup_all()
metadata.bind = create_engine('sqlite:///', echo=True)
metadata.create_all()
And the result:
CREATE TABLE __main___catalog (
id INTEGER NOT NULL,
PRIMARY KEY (id)
)
CREATE TABLE __main___contact (
id INTEGER NOT NULL,
name VARCHAR(60),
PRIMARY KEY (id)
)
CREATE TABLE __main___location (
id INTEGER NOT NULL,
name VARCHAR(100),
PRIMARY KEY (id)
)
Questions:
How to avoid creation of a table for the base entity? - solved: using_options(abstract = True)
Why field deleted is not in the created tables? - this solved - i forgot to put it inside a Field
I want to avoid typing in each subclass using_options(inheritance='concrete') but still have "concrete inheritance". Is there a way to make it default for all subclasses?
This works:
class Catalog(Entity):
deleted = Field(Boolean)
using_options(abstract = True, inheritance = 'concrete')
class Contact(Catalog):
name = Field(String(60))
class Location(Catalog):
name = Field(String(100))
and creates the following tables:
CREATE TABLE __main___contact (
id INTEGER NOT NULL,
deleted BOOLEAN,
name VARCHAR(60),
PRIMARY KEY (id),
CHECK (deleted IN (0, 1))
)
CREATE TABLE __main___location (
id INTEGER NOT NULL,
deleted BOOLEAN,
name VARCHAR(100),
PRIMARY KEY (id),
CHECK (deleted IN (0, 1))
)
Related
total newbie to Alembic, SQLAlchemy, and Python. I've gotten to the point where Alembic is comparing existing objects in the database against the declarative classes I've made, and there's one pesky index (for a foreign key) that Alembic refuses to leave in-place in my initial migration.
I'm completely at a loss as to why the migration is continually trying to drop and re-create this index, which, if I leave in the migration I'll wager is going to fail anyway. Plus, if I don't reconcile the class to the database this will likely come up every time I auto-generate migrations.
Here's the pertinent part of what is in the upgrade method:
op.drop_index(
'vndr_prod_tp_cat_category_fk_idx',
table_name='vendor_product_types_magento_categories'
)
In the downgrade method:
op.create_index(
'vndr_prod_tp_cat_category_fk_idx',
'vendor_product_types_magento_categories',
['magento_category_id'],
unique=False
)
...here's the DDL for the table as it exists in MySQL:
CREATE TABLE `vendor_product_types_magento_categories` (
`id` bigint(20) unsigned NOT NULL AUTO_INCREMENT,
`vendor_product_type_id` bigint(20) unsigned NOT NULL,
`magento_category_id` bigint(20) unsigned NOT NULL,
`sequence` tinyint(3) unsigned NOT NULL,
`created_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
`updated_at` timestamp NULL DEFAULT NULL ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (`id`),
UNIQUE KEY `vendor_product_types_magento_categories_uq` (`vendor_product_type_id`,`magento_category_id`,`sequence`),
KEY `vndr_prod_tp_cat_category_fk_idx` (`magento_category_id`),
CONSTRAINT `vndr_prod_tp_cat_magento_category_fk` FOREIGN KEY (`magento_category_id`) REFERENCES `magento_categories` (`id`) ON DELETE NO ACTION ON UPDATE NO ACTION,
CONSTRAINT `vndr_prod_tp_cat_product_type_fk` FOREIGN KEY (`vendor_product_type_id`) REFERENCES `vendor_product_types` (`id`) ON DELETE NO ACTION ON UPDATE NO ACTION
) ENGINE=InnoDB AUTO_INCREMENT=101 DEFAULT CHARSET=utf8
...and here's the class I wrote:
from sqlalchemy import Column, Integer, UniqueConstraint, ForeignKeyConstraint, Index
from sqlalchemy.dialects.mysql import TIMESTAMP
from sqlalchemy.sql import text
from .base import Base
class VendorProductTypesMagentoCategories(Base):
__tablename__ = 'vendor_product_types_magento_categories'
id = Column(Integer, primary_key=True)
vendor_product_type_id = Column(
Integer,
nullable=False
)
magento_category_id = Column(
Integer,
nullable=False
)
sequence = Column(Integer, nullable=False)
created_at = Column(TIMESTAMP, server_default=text('CURRENT_TIMESTAMP'), nullable=False)
updated_at = Column(
TIMESTAMP,
server_default=text('NULL ON UPDATE CURRENT_TIMESTAMP'),
nullable=True
)
__table_args__ = (
UniqueConstraint(
'vendor_product_type_id',
'magento_category_id',
'sequence',
name='vendor_product_types_magento_categories_uq'
),
ForeignKeyConstraint(
('vendor_product_type_id',),
('vendor_product_types.id',),
name='vndr_prod_tp_cat_product_type_fk'
),
ForeignKeyConstraint(
('magento_category_id',),
('magento_categories.id',),
name='vndr_prod_tp_cat_category_fk_idx'
),
)
def __repr__(self):
return '<VendorProductTypesMagentoCategories (id={}, vendor_name={}, product_type={})>'.format(
self.id,
self.vendor_name,
self.product_type
)
You define your product foreign key in your python code as
ForeignKeyConstraint(
('magento_category_id',),
('magento_categories.id',),
name='vndr_prod_tp_cat_category_fk_idx'
)
Here you use vndr_prod_tp_cat_category_fk_idx as the name of the foreign key constraint, not as the name of the underlying index, which explains why sqlalchemy wants to drop the index.
You should use vndr_prod_tp_cat_product_type_fk as the foreign key name and have a separate Index() construct with vndr_prod_tp_cat_category_fk_idx as name to create the index.
I am using SQLAlchemy automap. When I described structure Declarative I have got backref property:
The above configuration establishes a collection of Address objects on User called User.addresses.
But now with automap my code is like next:
engine = create_engine('sqlite:///sql_test.db', echo=True)
Session = sessionmaker(bind=engine)
sess = Session()
Base = automap_base()
Base.prepare(engine, reflect=True)
User = Base.classes.Users
addresses = Base.classes.addresses
answer = sess.query(User).filter(User.id==1).first()
print('type:', type(answer)) # will print: class User
But how I can get access to addresses here? I tried: answer.addresses and so one, but it is not working.
Users:
CREATE TABLE "Users"(
"id" Integer PRIMARY KEY,
"name" Text,
CONSTRAINT "unique_id" UNIQUE ( "id" ) )
Addresses:
CREATE TABLE "addresses"(
"id" Integer PRIMARY KEY,
"email" Text,
"user_id" Integer,
CONSTRAINT "lnk_Users_addresses" FOREIGN KEY ( "user_id" ) REFERENCES "Users"( "id" ),
CONSTRAINT "unique_id" UNIQUE ( "id" ) )
The default naming scheme for collection relationships is:
return referred_cls.__name__.lower() + "_collection"
So given that you have a model class addresses, then your relationship should be
User.addresses_collection
If you wish to alter this behaviour, pass your own implementation as the name_for_collection_relationship= keyword argument to AutomapBase.prepare().
I'm trying to use SQLAlchemy's #aggregated decorator to define an attribute ('gross_amount)' for a class, Receipt. This gross_amount attribute is the sum of the Item.gross_amount for all Item instances associated with the Receipt instance by a foreign id.
I.E., a receipt is made up of items, and I want to define a receipt 'gross_amount' value which is just the total $ of all of the items on the receipt.
I've modeled my code after this document http://sqlalchemy-utils.readthedocs.io/en/latest/aggregates.html
So it looks like this...
from sqlalchemy import Column, Integer, ForeignKey
from sqlalchemy.sql import func
from sqlalchemy import orm
class Receipt(Base):
__tablename__ = "receipts"
__table_args__ = {'extend_existing': True}
id = Column(Integer, index = True, primary_key = True, nullable = False)
#aggregated('itemz', Column(Integer))
def gross_amount(self):
return func.sum(Item.gross_amount)
itemz = orm.relationship(
'Item',
backref='receipts'
)
class Item(Base):
__tablename__ = "items"
id = Column(Integer, index = True, primary_key = True, nullable = False)
'''
FE relevant
'''
gross_amount = Column(Integer)
receipt_id = Column(Integer, ForeignKey("receipts.id"), nullable=False)
In my migration, am I supposed to have a column in the receipts table for gross_amount?
1) When I DO define this column in the receipts table, any Receipt.gross_amount for any instance just points to the gross_amount values defined in the receipts table.
2) When I DO NOT define this column in the receipts table, I get a SQLAlchemy error whenever I execute a SELECT against the database:
ProgrammingError: (psycopg2.ProgrammingError) column receipts.gross_amount does not exist
FWIW, my SQLAlchemy package is the latest distributed thru PIP...
SQLAlchemy==1.1.11
SQLAlchemy-Utils==0.32.14
And my local db on which I'm running this for now is PostgreSQL 9.6.2
What am I doing wrong here? Any patient help would be greatly appreciated!
Yes, you do need to add the column to table:
CREATE TABLE receipts (
id INTEGER NOT NULL,
gross_amount INTEGER, -- <<< See, it's here :)
PRIMARY KEY (id)
);
INSERT INTO receipts VALUES(1,7);
INSERT INTO receipts VALUES(2,7);
CREATE TABLE items (
id INTEGER NOT NULL,
gross_amount INTEGER,
receipt_id INTEGER NOT NULL,
PRIMARY KEY (id),
FOREIGN KEY(receipt_id) REFERENCES receipts (id)
);
Tested with this self-contained snippet:
from sqlalchemy import Column, Integer, ForeignKey, create_engine, orm
from sqlalchemy.orm import sessionmaker
from sqlalchemy.sql import func
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy_utils import aggregated
Base = declarative_base()
class Receipt(Base):
__tablename__ = "receipts"
__table_args__ = {'extend_existing': True}
id = Column(Integer, index = True, primary_key = True, nullable = False)
#aggregated('itemz', Column(Integer))
def gross_amount(self):
return func.sum(Item.gross_amount)
itemz = orm.relationship('Item', backref='receipts')
class Item(Base):
__tablename__ = "items"
id = Column(Integer, index = True, primary_key = True, nullable = False)
gross_amount = Column(Integer)
receipt_id = Column(Integer, ForeignKey("receipts.id"), nullable=False)
def __init__(self, amount):
self.gross_amount=amount
engine = create_engine('sqlite:///xxx.db', echo=True)
Base.metadata.create_all(engine)
session = sessionmaker(bind=engine)()
receipt = Receipt()
receipt.itemz.append(Item(5))
receipt.itemz.append(Item(2))
session.add(receipt)
session.commit()
print (receipt.gross_amount)
Of course, there's also another approach called hybrid_property, which basically allows you to do both orm- and database level queries without adding extra column do your database:
#hybrid_property
def gross_sum(self):
return sum(i.gross_amount for i in self.itemz)
#gross_sum.expression
def gross_sum(cls):
return select([func.sum(Item.gross_amount)]).\
where(Item.receipt_id==cls.id).\
label('gross_sum')
The reason you're getting this error is because the new column you're adding (gross_amount) has not been created in the receipts table in the database.
Meaning, your current database table only has one created column (id). For the aggregated column to work, it needs to contain an additional column called gross_amount.
This additional column has to allow null values.
One way to go about doing that is through SQL directly in PostgreSQL:
ALTER TABLE receipts ADD gross_amount int;
Alternatively, if there's no data yet, you can drop and recreate the table via SQLAlchemy. It should create this extra column automatically.
I'm not sure what you mean by the last part:
When I DO define this column in the receipts table, any
Receipt.gross_amount for any instance just points to the gross_amount
values defined in the receipts table.
That's where it's supposed to point. I'm not sure what you mean by that. Do you mean that it doesn't contain any values, even though there are values for this receipt's items in Item? If so, I would double check that this is the case (and per their examples here, refresh the database session before seeing the results).
Let's assume the following two MySQL tables:
-- -----------------------------------------------------
-- Table `mydb`.`Person`
-- -----------------------------------------------------
CREATE TABLE IF NOT EXISTS `mydb`.`Person` (
`id` INT NOT NULL ,
`first_name` VARCHAR(45) NOT NULL ,
`last_name` VARCHAR(45) NULL ,
PRIMARY KEY (`id`) )
ENGINE = InnoDB;
-- -----------------------------------------------------
-- Table `mydb`.`Parents`
-- -----------------------------------------------------
CREATE TABLE IF NOT EXISTS `mydb`.`Parents` (
`person_id` INT NOT NULL ,
`mother` INT NOT NULL ,
`father` INT NOT NULL ,
PRIMARY KEY (`person_id`) ,
INDEX `mother_idx` (`mother` ASC) ,
INDEX `father_fk_idx` (`father` ASC) ,
CONSTRAINT `person_fk`
FOREIGN KEY (`person_id` )
REFERENCES `mydb`.`Person` (`id` )
ON DELETE NO ACTION
ON UPDATE NO ACTION,
CONSTRAINT `mother_fk`
FOREIGN KEY (`mother` )
REFERENCES `mydb`.`Person` (`id` )
ON DELETE NO ACTION
ON UPDATE NO ACTION,
CONSTRAINT `father_fk`
FOREIGN KEY (`father` )
REFERENCES `mydb`.`Person` (`id` )
ON DELETE NO ACTION
ON UPDATE NO ACTION)
ENGINE = InnoDB;
There are 3 one-to-many relationships between the two tables.
The model classes to be used by SQLAlchemy, can be something similar to:
class Person(Base)
id = Column(Integer, primary_key=True)
first_name = Column(String)
last_name = Column(String)
class Parents(Base)
person_id = Column(Integer, ForeignKey('person.id'), primary_key=True)
mother_id = Column(Integer, ForeignKey('person.id'))
father_id = Column(Integer, ForeignKey('person.id'))
And here are the three backref relationships to be added to the Parents table:
person = relationship(Person, backref=backref('parents', uselist=True))
mother = relationship(Person, backref=backref('mothers', uselist=True))
father = relationship(Person, backref=backref('fathers', uselist=True))
Unfortunately, these relationships are not valid; there is no error while creating the tables, but the following appears while trying to insert:
sqlalchemy.exc.AmbiguousForeignKeysError: Could not determine join condition between parent/child tables on relationship ...
Being very new to SQLAlchemy, I'm having trouble with this situation. Please advice.
[Edit 1]
Small corrections to the code.
I've found a solution here. The key is to use the foreign_keys argument.
Therefore, the relationships can be something similar to:
person = relationship(Person, backref=backref('parents', uselist=True), foreign_keys=person_id)
mother = relationship(Person, backref=backref('mothers', uselist=True), foreign_keys=mother_id)
father = relationship(Person, backref=backref('fathers', uselist=True), foreign_keys=father_id)
(I've added this as an answer because it is a solution to the problem and it works. I don't know if this is the right way to do things in SQLAlchemy, therefore, I'm looking forward to any other answers/alternatives.)
I have a model with a unique_together defined for 3 fields to be unique together:
class MyModel(models.Model):
clid = models.AutoField(primary_key=True, db_column='CLID')
csid = models.IntegerField(db_column='CSID')
cid = models.IntegerField(db_column='CID')
uuid = models.CharField(max_length=96, db_column='UUID', blank=True)
class Meta(models.Meta):
unique_together = [
["csid", "cid", "uuid"],
]
Now, if I attempt to save a MyModel instance with an existing csid+cid+uuid combination, I would get:
IntegrityError: (1062, "Duplicate entry '1-1-1' for key 'CSID'")
Which is correct. But, is there a way to customize that key name? (CSID in this case)
In other words, can I provide a name for a constraint listed in unique_together?
As far as I understand, this is not covered in the documentation.
Its not well documented, but depending on if you are using Django 1.6 or 1.7 there are two ways you can do this:
In Django 1.6 you can override the unique_error_message, like so:
class MyModel(models.Model):
clid = models.AutoField(primary_key=True, db_column='CLID')
csid = models.IntegerField(db_column='CSID')
cid = models.IntegerField(db_column='CID')
# ....
def unique_error_message(self, model_class, unique_check):
if model_class == type(self) and unique_check == ("csid", "cid", "uuid"):
return _('Your custom error')
else:
return super(MyModel, self).unique_error_message(model_class, unique_check)
Or in Django 1.7:
class MyModel(models.Model):
clid = models.AutoField(primary_key=True, db_column='CLID')
csid = models.IntegerField(db_column='CSID')
cid = models.IntegerField(db_column='CID')
uuid = models.CharField(max_length=96, db_column='UUID', blank=True)
class Meta(models.Meta):
unique_together = [
["csid", "cid", "uuid"],
]
error_messages = {
NON_FIELD_ERRORS: {
'unique_together': "%(model_name)s's %(field_labels)s are not unique.",
}
}
Changing index name in ./manage.py sqlall output.
You could run ./manage.py sqlall yourself and add in the constraint name yourself and apply manually instead of syncdb.
$ ./manage.py sqlall test
BEGIN;
CREATE TABLE `test_mymodel` (
`CLID` integer AUTO_INCREMENT NOT NULL PRIMARY KEY,
`CSID` integer NOT NULL,
`CID` integer NOT NULL,
`UUID` varchar(96) NOT NULL,
UNIQUE (`CSID`, `CID`, `UUID`)
)
;
COMMIT;
e.g.
$ ./manage.py sqlall test
BEGIN;
CREATE TABLE `test_mymodel` (
`CLID` integer AUTO_INCREMENT NOT NULL PRIMARY KEY,
`CSID` integer NOT NULL,
`CID` integer NOT NULL,
`UUID` varchar(96) NOT NULL,
UNIQUE constraint_name (`CSID`, `CID`, `UUID`)
)
;
COMMIT;
Overriding BaseDatabaseSchemaEditor._create_index_name
The solution pointed out by #danihp is incomplete, it only works for field updates (BaseDatabaseSchemaEditor._alter_field)
The sql I get by overriding _create_index_name is:
BEGIN;
CREATE TABLE "testapp_mymodel" (
"CLID" integer NOT NULL PRIMARY KEY AUTOINCREMENT,
"CSID" integer NOT NULL,
"CID" integer NOT NULL,
"UUID" varchar(96) NOT NULL,
UNIQUE ("CSID", "CID", "UUID")
)
;
COMMIT;
Overriding BaseDatabaseSchemaEditor.create_model
based on https://github.com/django/django/blob/master/django/db/backends/schema.py
class BaseDatabaseSchemaEditor(object):
# Overrideable SQL templates
sql_create_table_unique = "UNIQUE (%(columns)s)"
sql_create_unique = "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s UNIQUE (%(columns)s)"
sql_delete_unique = "ALTER TABLE %(table)s DROP CONSTRAINT %(name)s"
and this is the piece in create_model that is of interest:
# Add any unique_togethers
for fields in model._meta.unique_together:
columns = [model._meta.get_field_by_name(field)[0].column for field in fields]
column_sqls.append(self.sql_create_table_unique % {
"columns": ", ".join(self.quote_name(column) for column in columns),
})
Conclusion
You could:
override create_model to use _create_index_name for unique_together contraints.
modify sql_create_table_unique template to include a name parameter.
You may also be able to check a possible fix on this ticket:
https://code.djangoproject.com/ticket/24102
Integrity error is raised from database but from django:
create table t ( a int, b int , c int);
alter table t add constraint u unique ( a,b,c); <-- 'u'
insert into t values ( 1,2,3);
insert into t values ( 1,2,3);
Duplicate entry '1-2-3' for key 'u' <---- 'u'
That means that you need to create constraint with desired name in database. But is django in migrations who names constraint. Look into _create_unique_sql :
def _create_unique_sql(self, model, columns):
return self.sql_create_unique % {
"table": self.quote_name(model._meta.db_table),
"name": self.quote_name(self._create_index_name(model, columns, suffix="_uniq")),
"columns": ", ".join(self.quote_name(column) for column in columns),
}
Is _create_index_name who has the algorithm to names constraints:
def _create_index_name(self, model, column_names, suffix=""):
"""
Generates a unique name for an index/unique constraint.
"""
# If there is just one column in the index, use a default algorithm from Django
if len(column_names) == 1 and not suffix:
return truncate_name(
'%s_%s' % (model._meta.db_table, self._digest(column_names[0])),
self.connection.ops.max_name_length()
)
# Else generate the name for the index using a different algorithm
table_name = model._meta.db_table.replace('"', '').replace('.', '_')
index_unique_name = '_%x' % abs(hash((table_name, ','.join(column_names))))
max_length = self.connection.ops.max_name_length() or 200
# If the index name is too long, truncate it
index_name = ('%s_%s%s%s' % (
table_name, column_names[0], index_unique_name, suffix,
)).replace('"', '').replace('.', '_')
if len(index_name) > max_length:
part = ('_%s%s%s' % (column_names[0], index_unique_name, suffix))
index_name = '%s%s' % (table_name[:(max_length - len(part))], part)
# It shouldn't start with an underscore (Oracle hates this)
if index_name[0] == "_":
index_name = index_name[1:]
# If it's STILL too long, just hash it down
if len(index_name) > max_length:
index_name = hashlib.md5(force_bytes(index_name)).hexdigest()[:max_length]
# It can't start with a number on Oracle, so prepend D if we need to
if index_name[0].isdigit():
index_name = "D%s" % index_name[:-1]
return index_name
For the current django version (1.7) the constraint name for a composite unique constraint looks like:
>>> _create_index_name( 'people', [ 'c1', 'c2', 'c3'], '_uniq' )
'myapp_people_c1_d22a1efbe4793fd_uniq'
You should overwrite _create_index_name in some way to change algorithm. A way, maybe, writing your own db backend inhering from mysql and overwriting _create_index_name in your DatabaseSchemaEditor on your schema.py (not tested)
I believe you have to do that in your Database;
MySQL:
ALTER TABLE `votes` ADD UNIQUE `unique_index`(`user`, `email`, `address`);
I believe would then say ... for key 'unique_index'
One solution is you can catch the IntegrityError at save(), and then make custom error message as you want as below.
try:
obj = MyModel()
obj.csid=1
obj.cid=1
obj.uuid=1
obj.save()
except IntegrityError:
message = "IntegrityError: Duplicate entry '1-1-1' for key 'CSID', 'cid', 'uuid' "
Now you can use this message to display as error message.