AttributeError in defining SQLAlchemy M:N relationship - python

I have defined M:M relationship as:
association_table = Table('content.association', base.metadata,
Column('content_id', UUID(as_uuid=True),
ForeignKey('content.content.content_id')),
Column('message_id', UUID(as_uuid=True), ForeignKey('messages.message.message_id'))
)
class Content(base):
__tablename__ = "content"
__table_args__ = {"schema": "content"}
id = Column("content_id", UUID(as_uuid=True), primary_key=True)
children = relationship("Messages", secondary=association_table, back_populates="parents")
class Messages(base):
__table_args__ = {"schema": "messages"}
__tablename__ = 'message'
id = Column('message_id', UUID(as_uuid=True), primary_key=True)
url = Column(String)
parents = relationship(
Content,
secondary=association_table,
back_populates="children")
When i try to run query:
def get_all(self):
return self.session.query(Content.children).all()
AttributeError: 'ContentRepository' object has no attribute 'children'
class ContentRepository:
def __init__(self, session):
self.session = session
def get_all(self):
return self.session.query(Content).all()
def get_children(self):
return self.session.query(Content.children).all()
get_children gives the error, before that get_all was working fine.

Related

How to effectively query associated objects in SQLAlchemy ORM (SQLite3)?

I am trying to create a working relationship between two objects Mentor and Student, and be able to retrieve student objects from their mentor:
class Mentor_Student(Base):
__tablename__ = 'mentor_student'
__table_args__ = {'extend_existing': True}
mentor_id = Column("mentor_id", Integer, ForeignKey('mentor.mentor_id'), primary_key = True)
student_id = Column("student_id", Integer, ForeignKey('student.student_id'), primary_key = True)
def __init__(self, student_id):
self.mentor_id = random.choice(list(session.query(Mentor.mentor_id)))[0]
self.student_id = student_id
session.add(self)
session.commit()
class Mentor(Base):
__tablename__ = 'mentor'
__table_args__ = {'extend_existing': True}
mentor_id = Column(Integer, primary_key=True)
name = Column(String(255), nullable=False)
phone = Column(String(20), nullable=False)
mentees = relationship(
"Mentor",
secondary='mentor_student',
backref=backref("student", lazy='joined'))
def __init__(self):
self.name = Faker().name()
self.phone = Faker().phone_number()
session.add(self)
session.commit()
def __str__(self):
return self.name
class Student(Base):
__tablename__ = 'student'
__table_args__ = {'extend_existing': True}
student_id = Column(Integer, primary_key=True)
name = Column(String(255), nullable=False)
phone = Column(String(20), nullable=False)
mentors = relationship(
"Student",
secondary='mentor_student',
backref=backref("mentor",
lazy='joined'))
def __init__(self):
self.name = Faker().name()
self.phone = Faker().phone_number()
session.add(self)
session.commit()
Mentor_Student(self.student_id)
def __str__(self):
return self.name
Every mentor has multiple students. I would like to create a query that will return the students(mentees) associated with each Mentor. Please observe below:
for x, y in session.query(Mentors, Mentors.mentees).all()
print(x,':',y)
could produce the result:
MentorObject : [StudentObject, StudentObject, StudentObject]
Right now the closest I can get is printing out a single mentor object and a single student object associated with it. I also could hard code it with the accumulator pattern into a dicitonary:
maps = {}
for student, mentor in session.query(Student, Mentor).filter(Student.student_id == Mentor_Student.student_id, Mentor_Student.mentor_id == Mentor.mentor_id).all():
if mentor in maps.keys():
maps[mentor].append(student)
else:
maps[mentor] = [student]
Which gives me the result:
{<__main__.Mentor object at 0x7f3309887070>: [<__main__.Student object at 0x7f3309850f40>, <__main__.Student object at 0x7f3309887280>, <__main__.Student object at 0x7f3309887580>, <__main__.Student object at 0x7f330988f7c0>, <__main__.Student object at 0x7f330988fa00>, <__main__.Student object at 0x7f330982b4c0>],
...
<__main__.Mentor object at 0x7f33097e2550>: [<__main__.Student object at 0x7f33097e2490>, <__main__.Student object at 0x7f33097e2790>]}
'''
But this does not seem like a refined solution. Any ideas how I can improve my code. I am relatively new to SQLAlchemy.

Postgres / SQLAlchemy: no matching unique constraint

I am working on a system which requires a composite foreign key in SQLalchemy with Postgres, and have come up with the following example TestCase as a proof of concept. The idea is that every Thing must reference a unique combination of ThingFeatureType, through two columns tt_id (thing type id) and feature (a string). ThingType and Feature also have their own tables.
When I run the code below using pytest, I get the following error complaining that there is no UniqueConstraint on (thing_feature_type.tt_id, feature). However, there definitely is!
Any help on this much appreciated!
Error:
self = <sqlalchemy.dialects.postgresql.psycopg2.PGDialect_psycopg2 object at 0x7f4f61ee4320>, cursor = <cursor object at 0x7f4f61ee1808; closed: -1>
statement = '\nCREATE TABLE thing (\n\tt_id SERIAL NOT NULL, \n\ttt_id INTEGER, \n\tfeature VARCHAR(64), \n\tname VARCHAR(128) NOT...RY KEY (t_id), \n\tFOREIGN KEY(tt_id, feature) REFERENCES thing_feature_type (tt_id, feature) ON DELETE CASCADE\n)\n\n'
parameters = {}, context = <sqlalchemy.dialects.postgresql.psycopg2.PGExecutionContext_psycopg2 object at 0x7f4f5f4a91d0>
def do_execute(self, cursor, statement, parameters, context=None):
> cursor.execute(statement, parameters)
E sqlalchemy.exc.ProgrammingError: (psycopg2.ProgrammingError) there is no unique constraint matching given keys for referenced table "thing_feature_type"
E [SQL: '\nCREATE TABLE thing (\n\tt_id SERIAL NOT NULL, \n\ttt_id INTEGER, \n\tfeature VARCHAR(64), \n\tname VARCHAR(128) NOT NULL, \n\tPRIMARY KEY (t_id), \n\tFOREIGN KEY(tt_id, feature) REFERENCES thing_feature_type (tt_id, feature) ON DELETE CASCADE\n)\n\n'] (Background on this error at: http://sqlalche.me/e/f405)
venv/lib/python3.5/site-packages/SQLAlchemy-1.2.7-py3.5-linux-x86_64.egg/sqlalchemy/engine/default.py:507: ProgrammingError
Code:
from unittest import TestCase
from sqlalchemy import (case,
Column,
Float,
ForeignKey,
Integer,
String,
Table,
Text, )
from sqlalchemy.orm import relationship
from sqlalchemy.schema import ForeignKeyConstraint, UniqueConstraint
from concept_back_end.run import app
from concept_back_end.database import db
def define_feature(model):
class Feature(model):
feature = Column(String(64), primary_key=True)
#classmethod
def _define_relationships(cls):
cls.feature_types = relationship('FeatureType',
back_populates='the_feature',
cascade='save-update, delete',
lazy='select')
return Feature
def define_thing_type(model):
class ThingType(model):
tt_id = Column(Integer, primary_key=True, autoincrement=True)
name = Column(String(128), nullable=False)
#classmethod
def _define_relationships(cls):
cls.things = relationship('Thing',
back_populates='thing_type',
cascade='save-update, delete',
lazy='select')
cls.thing_feature_types = relationship(
'ThingFeatureType',
back_populates='thing_type',
cascade='save-update, delete',
lazy='select'
)
return ThingType
def define_thing_feature_type(model):
class ThingFeatureType(model):
__tablename__ = 'thing_feature_type'
ft_id = Column(Integer, primary_key=True, autoincrement=True)
feature = Column(String(64),
ForeignKey('feature.feature'))
tt_id = Column(Integer, ForeignKey('thing_type.tt_id'))
__table_args__ = (
UniqueConstraint('tt_id', 'feature'),
)
#classmethod
def _define_relationships(cls):
cls.the_feature = relationship('Feature',
back_populates='feature_types',
lazy='select')
cls.thing_type = relationship('ThingType',
back_populates='feature_types',
lazy='select')
cls.things = relationship('Thing',
back_populates='feature_type',
lazy='select')
return ThingFeatureType
def define_thing(model):
class Thing(model):
t_id = Column(Integer, primary_key=True, autoincrement=True)
tt_id = Column(Integer)
feature = Column(String(64))
name = Column(String(128), nullable=False)
__table_args__ = (
ForeignKeyConstraint(
('tt_id', 'feature'),
('thing_feature_type.tt_id', 'thing_feature_type.feature'),
ondelete='CASCADE'
),
{},
)
#classmethod
def _define_relationships(cls):
cls.thing_type = relationship('ThingType',
back_populates='things',
lazy='select')
cls.feature_type = relationship('ThingFeatureType',
back_populates='things',
lazy='select')
return Thing
model_factories = [
define_feature,
define_thing_type,
define_thing_feature_type,
define_thing,
]
"""List of factory functions"""
class ForeignKeyExampleTestCase(TestCase):
def setUp(self):
with app.app_context():
models = [m(db.Model) for m in model_factories]
for m in models:
m._define_relationships()
db.create_all()
db.session.commit()
def test_can_connect_to_db(self):
with app.app_context():
db.session.execute('SELECT * FROM thing;')
def tearDown(self):
"""And then tear them down again"""
with app.app_context():
db.session.close()
db.drop_all()
This appeared to be an issue with stale data in the db. The final working code is below.
from unittest import TestCase
from sqlalchemy import (case,
Column,
Float,
ForeignKey,
ForeignKeyConstraint,
Integer,
String,
Table,
Text,
UniqueConstraint, )
from sqlalchemy.orm import relationship
from sqlalchemy.schema import CreateTable
from concept_back_end.run import app
from concept_back_end.database import db
def define_feature(model):
class Feature(model):
feature = Column(String(64), primary_key=True, unique=True)
#classmethod
def _define_relationships(cls):
cls.feature_types = relationship('FeatureType',
back_populates='the_feature',
cascade='save-update, delete',
lazy='select')
return Feature
def define_thing_type(model):
class ThingType(model):
tt_id = Column(Integer, primary_key=True, autoincrement=True)
name = Column(String(128), nullable=False)
#classmethod
def _define_relationships(cls):
cls.things = relationship('Thing',
back_populates='thing_type',
cascade='save-update, delete',
lazy='select')
cls.thing_feature_types = relationship(
'ThingFeatureType',
back_populates='thing_type',
cascade='save-update, delete',
lazy='select'
)
return ThingType
def define_thing_feature_type(model):
class ThingFeatureType(model):
__tablename__ = 'thing_feature_type'
__table_args__ = (
UniqueConstraint('tt_id', 'feature'),
{},
)
ft_id = Column(Integer, primary_key=True, autoincrement=True)
feature = Column(String(64),
ForeignKey('feature.feature'))
tt_id = Column(Integer, ForeignKey('thing_type.tt_id'))
#classmethod
def _define_relationships(cls):
cls.the_feature = relationship('Feature',
back_populates='feature_types',
lazy='select')
cls.thing_type = relationship('ThingType',
back_populates='feature_types',
lazy='select')
cls.things = relationship('Thing',
back_populates='feature_type',
lazy='select')
return ThingFeatureType
def define_thing(model):
class Thing(model):
t_id = Column(Integer, primary_key=True, autoincrement=True)
tt_id = Column(Integer)
feature = Column(String(64))
name = Column(String(128), nullable=False)
__table_args__ = (
ForeignKeyConstraint(
['tt_id', 'feature'],
['thing_feature_type.tt_id', 'thing_feature_type.feature'],
ondelete='CASCADE'
),
{},
)
#classmethod
def _define_relationships(cls):
cls.thing_type = relationship('ThingType',
back_populates='things',
lazy='select')
cls.feature_type = relationship('ThingFeatureType',
back_populates='things',
lazy='select')
return Thing
model_factories = [
define_feature,
define_thing_type,
define_thing_feature_type,
define_thing,
]
"""List of factory functions"""
class ForeignKeyExampleTestCase(TestCase):
def setUp(self):
with app.app_context():
models = [m(db.Model) for m in model_factories]
for i, m in enumerate(models):
m._define_relationships()
models[i] = m
for m in models:
print(CreateTable(m.__table__))
db.create_all()
db.session.commit()
def test_can_connect_to_db(self):
with app.app_context():
db.session.execute('SELECT * FROM thing;')
def tearDown(self):
"""And then tear them down again"""
with app.app_context():
db.session.close()
db.drop_all()

SQLAlchemy, concise ways of defining many similar classes/tables

So I have a lot of tables with a general stucture of
Base = declarative_base()
class Thing(Base):
__tablename__ = 'thing'
uid = Column(Integer, Sequence('Thing_id_seq'), primary_key=True)
name = Column(String)
def __repr__(self):
return "something"
class ThingEntry(Base):
__tablename__ = 'thingentry'
uid = Column(Integer, Sequence('ThingEntry_id_seq'), primary_key=True)
foo = Column(Integer, ForeignKey('foo.uid'))
entity = Column(Integer, ForeignKey('thing'))
class Quu(Base):
__tablename__ = 'quu'
uid = Column(Integer, Sequence('Quu_id_seq'), primary_key=True)
name = Column(String)
description = Column(String)
def __repr__(self):
return "something"
class QuuEntry(Base):
__tablename__ = 'quuentry'
uid = Column(Integer, Sequence('QuuEntry_id_seq'), primary_key=True)
foo = Column(Integer, ForeignKey('foo.uid'))
entity = Column(Integer, ForeignKey('quu'))
What are some more concise ways of defining all these classes/tables? This method has a lot of code duplication/self-repeating.
I was thinking of some kind of inheritance so that I could bring that code down to
class Thing(Base):
pass
class ThingEntry(Base):
pass
class Quu(Base):
description = Column(String)
class QuuEntry(Base):
pass
With some magic auto-assigning the other values (__tablename__, uid, foo, etc), but I'm not sure if that's possible or optimal.
You should look at documentation about auto reflection http://docs.sqlalchemy.org/en/rel_1_1/core/reflection.html
Used a factory approach with metaclasses, as such:
class ObjectFactory:
def __new__(cls, class_name, parents, attributes):
attributes['__tablename__'] = class_name
attributes['uid'] = Column(Integer, Sequence(class_name + '_id_seq'), primary_key=True)
attributes['name'] = Column(String)
class EntryFactory:
def __new__(cls, class_name, parents, attributes):
attributes['__tablename__'] = class_name
attributes['uid'] = Column(Integer, Sequence(class_name + '_id_seq'), primary_key=True)
attributes['foo'] = Column(Integer, ForeignKey('foo.uid'), nullable=False)
attributes['entity_id'] = Column(Integer, ForeignKey(class_name[:-5]), nullable=False)
class Thing(Base, metaclass=ObjectFactory):
pass
class ThingEntry(Base, metaclass=EntryFactory):
pass
class Quu(Base, metaclass=ObjectFactory):
description = Column(String)
class QuuEntry(Base, metaclass=EntryFactory):
pass

sqlalchemy: 'InstrumentedList' object has no attribute 'filter'

I have the following 3 classes:
class Resource:
id = Column(Integer, primary_key=True)
path = Column(Text)
data = Column(Binary)
type = Column(Text)
def set_resource(self, path, data, type):
self.path = path
self.data = data
self.type = type
class EnvironmentResource(Base, Resource):
__tablename__ = 'environment_resources'
parent_id = Column(Integer, ForeignKey('environments.id', ondelete='CASCADE'))
def __init__(self, path, data, type):
self.set_resource(path, data, type)
class Environment(Base):
__tablename__ = 'environments'
id = Column(Integer, primary_key=True)
identifier = Column(Text, unique=True)
name = Column(Text)
description = Column(Text)
_resources = relationship("EnvironmentResource",
cascade="all, delete-orphan",
passive_deletes=True)
_tools = relationship("Tool",
cascade="all, delete-orphan",
passive_deletes=True)
def __init__(self, name, identifier, description):
self.name = name
self.identifier = identifier
self.description = description
def get_resource(self, path):
return self._resources.filter(EnvironmentResource.path==path).first()
On calling get_resource, I am told that 'InstrumentedList' object has no attribute 'filter' - I've gone through the documentation and can't quite figure this out. What am I missing, so that I may be able to filter the resources corresponding to an environment inside my 'get_resource' method?
PS: I know get_resource will throw an exception, that's what I'd like it to do.
In order to work with the relationship as with Query, you need to configure it with lazy='dynamic'. See more on this in Dynamic Relationship Loaders:
_resources = relationship("EnvironmentResource",
cascade="all, delete-orphan",
lazy='dynamic',
passive_deletes=True)

Is multi-level polymorphism possible in SQLAlchemy?

Is it possible to have multi-level polymorphism in SQLAlchemy? Here's an example:
class Entity(Base):
__tablename__ = 'entities'
id = Column(Integer, primary_key=True)
created_at = Column(DateTime, default=datetime.utcnow, nullable=False)
entity_type = Column(Unicode(20), nullable=False)
__mapper_args__ = {'polymorphic_on': entity_type}
class File(Entity):
__tablename__ = 'files'
id = Column(None, ForeignKey('entities.id'), primary_key=True)
filepath = Column(Unicode(255), nullable=False)
file_type = Column(Unicode(20), nullable=False)
__mapper_args__ = {'polymorphic_identity': u'file', 'polymorphic_on': file_type)
class Image(File):
__mapper_args__ = {'polymorphic_identity': u'image'}
__tablename__ = 'images'
id = Column(None, ForeignKey('files.id'), primary_key=True)
width = Column(Integer)
height = Column(Integer)
When I call Base.metadata.create_all(), SQLAlchemy raises the following error:
IntegrityError: (IntegrityError) entities.entity_type may not be NULL`.
This error goes away if I remove the Image model and the polymorphic_on key in File.
What gives?
Yes. The problem with your code is that you're making Image a type of file, when you must aim for the head of the tree, making Image a type of Entity.
Example:
from sqlalchemy import (Table, Column, Integer, String, create_engine,
MetaData, ForeignKey)
from sqlalchemy.orm import mapper, create_session
from sqlalchemy.ext.declarative import declarative_base
e = create_engine('sqlite:////tmp/foo.db', echo=True)
Base = declarative_base(bind=e)
class Employee(Base):
__tablename__ = 'employees'
employee_id = Column(Integer, primary_key=True)
name = Column(String(50))
type = Column(String(30), nullable=False)
__mapper_args__ = {'polymorphic_on': type}
def __init__(self, name):
self.name = name
class Manager(Employee):
__tablename__ = 'managers'
__mapper_args__ = {'polymorphic_identity': 'manager'}
employee_id = Column(Integer, ForeignKey('employees.employee_id'),
primary_key=True)
manager_data = Column(String(50))
def __init__(self, name, manager_data):
super(Manager, self).__init__(name)
self.manager_data = manager_data
class Owner(Manager):
__tablename__ = 'owners'
__mapper_args__ = {'polymorphic_identity': 'owner'}
employee_id = Column(Integer, ForeignKey('managers.employee_id'),
primary_key=True)
owner_secret = Column(String(50))
def __init__(self, name, manager_data, owner_secret):
super(Owner, self).__init__(name, manager_data)
self.owner_secret = owner_secret
Base.metadata.drop_all()
Base.metadata.create_all()
s = create_session(bind=e, autoflush=True, autocommit=False)
o = Owner('nosklo', 'mgr001', 'ownerpwd')
s.add(o)
s.commit()
Not possible (see SQL ALchemy doc):
Currently, only one discriminator column may be set, typically on the base-most class in the hierarchy. “Cascading” polymorphic columns are not yet supported.
So you should follow #nosklo proposal to change your heritage pattern.

Categories

Resources