I want to create a generic decision tree using SQL Alchemy. That is, each node has zero or more children of any type, and the task is to evaluate some expression using the tree root, which will pass on the logic to the children using extending classes.
I have defined the following base class:
from flask_sqlalchemy import Model, SQLAlchemy, DefaultMeta
from abc import ABCMeta, abstractmethod
from sqlalchemy import Column, Integer, String, Date, Boolean, ForeignKey, Text, Float, Unicode
db = SQLAlchemy(model_class=BaseModel)
class BaseModel(Model):
pass
class ModelABCMeta(DefaultMeta, ABCMeta):
pass
class RuleBaseNode(db.Model, metaclass=ModelABCMeta):
"""Generic base class representing a node in a decision tree"""
id = Column(Integer, primary_key=True)
type = Column(String(50))
parent_node_type = Column(Unicode(255), nullable=True)
parent_node_id = Column(Integer, nullable=True)
parent_node = generic_relationship(parent_node_type, parent_node_id)
__mapper_args__ = {
'polymorphic_on': type,
'polymorphic_identity': 'node'
}
#abstractmethod
def eval(self, input) -> bool:
"""Evaluates an input to a boolean"""
pass
Now the question is how to add an attribute of the node children.
Usually, I would use relationship with backref, but I couldn't find anything in the documentation.
I would want such a property:
class RuleBaseNode(db.Model, metaclass=ModelABCMeta):
...
#property
def sub_nodes():
return ...
Now I guess I could implement some sort of the following, but I would guess it won't work with querying an abstract class
def get_sub_nodes(session, node):
session.query(RuleBaseNode).filter(RuleBaseNode.parent_node == node).all()
Related
I am using SQLAlchemy to create tables in my project. I have a requirement where all these tables should have some specific attributes and functions. I want to create a structure such that all tables inherit from an abstract class which includes these attributes and functions.
Here's an example of what I want to achieve:
Base = declarative_base()
# pseudo
class Table(ABC, Base):
# like #abstractattribute
some_attribtue = list()
#staticmethod
def some_func(self):
pass
class Users(Table):
__tablename__ = "users"
user_id = Column(Integer, primary_key=True)
username = Column(String, nullable=False)
some_attribute = list()
#staticmethod
def some_func():
do_something()
By doing this, I hope that I can use these classes in something like:
Base.metadata.create_all(engine)
while also being able to call:
Users.some_func()
I understand that this code wouldn't work as is, due to issues like having ABC and Base at the same time, not having #abstractattribute, and needing to add __tablename__ and a Primary-Key Column to the class Table.
I am thinking of using a decorator to achieve this, but I am not sure how to implement it correctly. This is the outline of my idea:
class Table(ABC):
some_attribute=None
#staticmethod
def some_func(self):
pass
# create decorator
def sql_table():
def decorator(abstract_class):
class SQLTable(Base): # How do I name the class correctly?
__tablename__ = abstract_class.__dict__["__tablename__"]
some_attribute = abstract_class.__dict__["some_attribute"]
for name, obj in abstract_class.__dict__.items():
if isinstance(obj, Column):
locals()[name] = obj
# How do I get the some_func function?
#sql_table
class Users(Table):
__tablename__ = "users"
user_id = Column(Integer, primary_key=True)
username = Column(String, nullable=False)
some_attribute = "some_val"
#staticmethod
def some_func():
do_something()
Any help or suggestions on how to implement this (not necessarily with decorators) would be greatly appreciated.
Thanks to #snakecharmerb and #ljmc I have found an solution that works for me, although there seem to be many ways one can achieve this.
The solution that works for me is:
from sqlalchemy.ext.declarative import declarative_base, declared_attr
from sqlalchemy import Column, Integer, String
Base = declarative_base()
class Table(Base):
__abstract__ = True
#declared_attr
def __tablename__(cls) -> str: # so i don't have to specify no more
return cls.__name__.lower()
some_attribute = set() # this is the default
#staticmethod
def some_func(): # define default behavior (or pass)
do_something()
class Users(Table):
# define columns as usual
user_id = Column(Integer, primary_key=True)
username = Column(String, nullable=False)
some_attribute = set(["a"]) # overwrite the default
def some_func(): # overwrite the default behavior
do_something_else()
Now, this should be improved upon by specifying a type to some_attribute (typing is awesome).
Until now I have a parent class Entity for all my orm classes:
class AbstractEntity():
id = Column(Integer, primary_key=True)
#declared_attr
def __tablename__(self):
return AbstractEntity.table_name_for_class(self)
...
Entity = declarative_base(cls=AbstractEntity)
class Drink(Entity):
name = Entity.stringColumn()
I want my classes only to inherit from a single class Entity, not from a class Base and a mixin Entity. That works fine.
However, now I would like to introduce another parent class EntityAssociation that I can use as parent for all my asssociation classes that are used for many to many relationships, e.g.
class DrinkIngretients(EntityAssociation):
drink_id = Entity.foreign_key(Drink)
ingredient_id = Entity.foreign_key(Ingredient)
...
The class EntityAssociation should inherit from Base = declarative_base() but not from AbstractEntity. (It should not include the column id that is defined in AbstractEntity.)
=> How can I implement that inheritance structure?
I tried
class AbstractEntity():
id = Column(Integer, primary_key=True)
#declared_attr
def __tablename__(self):
return AbstractEntity.table_name_for_class(self)
...
Base = declarative_base()
class Entity(Base, AbstractEntity):
pass
class EntityAssociation(Base):
pass
However, the behavior of
Entity = declarative_base(cls=AbstractEntity)
and
class Entity(Base, AbstractEntity):
pass
seems to be different.
Class does not have a table or tablename specified and does not inherit from an existing table-mapped class.
=> How can I specify that the classes Entity and EntityAssociation should not have extra table names?
=> Any other suggestions on how to get the wanted inheritance structure?
The __abstract__ flag did the trick:
class EntityRelation(Base):
__abstract__ = True
class Entity(Base, AbstractEntity):
__abstract__ = True
Using Python 3.5 and SQLAlchemy 1.0.14 (ORM).
I have a table of items declared as such:
from sqlalchemy.ext.declarative.api import declarative_base
Base = declarative_base()
class Item(Base):
__tablename__ = 'items'
id = Column(Integer, primary_key=True)
type = Column(String)
# other non relevant attributes
My Items can be of many different types, the type identifier being stored in type.
For a few of those objects types, I need to have specific methods or attributes available.
To achieve that I tried to use single table inheritance with several SpecialisedItem as subclass of Item:
class Item(Base):
__tablename__ = 'items'
id = Column(Integer, primary_key=True)
type = Column(String, index=True)
# other non relevant attributes
__mapper_args__ = {
'polymorphic_on': type,
}
class SpecialisedItem(Base):
__mapper_args__ = {
'polymorphic_identity': 'specialitem',
}
def specialised_method(self):
return "I am special"
Now when I load my items, I'd want all specialised items (having type=='specialitem') to be loaded as such, while any other type value would result in the parent class Item being loaded.
That doesn't work, I get AssertionError: No such polymorphic_identity 'normal' is defined when loading the items.
I would like to avoid creating inherited classes that do nothing just to cover all possible type values, instead having "unmapped" type falling back to the parent class Item.
Is there any way to achieve that effect ?
Minimal test case for reference:
from sqlalchemy.engine import create_engine
from sqlalchemy.ext.declarative.api import declarative_base
from sqlalchemy.orm.session import sessionmaker
from sqlalchemy.sql.schema import Column
from sqlalchemy.sql.sqltypes import Integer, String
Base = declarative_base()
class Item(Base):
__tablename__ = 'items'
id = Column(Integer, primary_key=True)
type = Column(String, index=True)
# other non relevant attributes
__mapper_args__ = {
'polymorphic_on': type,
}
class SpecialisedItem(Item):
__mapper_args__ = {
'polymorphic_identity': 'special',
}
specialAttribute = Column(String)
def specialised_method(self):
return "I am special"
engine = create_engine("sqlite:///:memory:")
Base.metadata.create_all(engine)
Session = sessionmaker(bind=engine)
session = Session()
session.add(Item(type='normal'))
session.add(Item(type='special'))
session.commit()
# loading only specialized items works
for item in session.query(Item).filter_by(type="special"):
print(item.specialised_method())
# loading other items fails
for item in session.query(Item):
print(item.type)
Thanks,
Guillaume
A mapping of “polymorphic identity” identifiers to Mapper instances is stored in the polymorphic_map dict. You can create custom polymorphic_map that will return parent class mapper for undefined polymorphic identities.
from sqlalchemy.engine import create_engine
from sqlalchemy.ext.declarative.api import declarative_base
from sqlalchemy.orm.session import sessionmaker
from sqlalchemy.sql.schema import Column
from sqlalchemy.sql.sqltypes import Integer, String
from sqlalchemy import event
Base = declarative_base()
class Item(Base):
__tablename__ = 'items'
id = Column(Integer, primary_key=True)
type = Column(String, index=True)
# other non relevant attributes
__mapper_args__ = {
'polymorphic_on': type,
}
class SpecialisedItem(Item):
__mapper_args__ = {
'polymorphic_identity': 'special',
}
specialAttribute = Column(String)
def specialised_method(self):
return "I am special"
#http://docs.sqlalchemy.org/en/rel_1_1/orm/events.html#sqlalchemy.orm.events.MapperEvents.mapper_configured
#event.listens_for(Item, 'mapper_configured')
def receive_mapper_configured(mapper, class_):
mapper.polymorphic_map = defaultdict(lambda: mapper, mapper.polymorphic_map)
# to prevent 'incompatible polymorphic identity' warning, not mandatory
mapper._validate_polymorphic_identity = None
engine = create_engine("sqlite:///:memory:")
Base.metadata.create_all(engine)
Session = sessionmaker(bind=engine)
session = Session()
session.add(Item(type='normal'))
session.add(Item(type='special'))
session.commit()
# loading only specialized items works
for item in session.query(Item).filter_by(type="special"):
print(item.specialised_method())
# loading other items fails
for item in session.query(Item):
print(item.type)
A reusable decorator solution, based on #r-m-n answer. Custom class is also replaced with collections.defaultdict that actually does same thing.
def receive_mapper_configured(mapper, class_):
mapper.polymorphic_map = defaultdict(lambda: mapper, mapper.polymorphic_map)
# to prevent 'incompatible polymorphic identity' warning, not necessary
mapper._validate_polymorphic_identity = None
def polymorphic_fallback(mapper_klass):
event.listens_for(mapper_klass, 'mapper_configured')(receive_mapper_configured)
return mapper_klass
Then in your code you can just add this decorator to base classes:
#polymorphic_fallback
class Item:
...
class SpecificItem(Item):
...
I have code like this working fine:
def get_timestamp(ts):
return datetime.utcfromtimestamp(ts)
def set_timestamp(dt):
return time.mktime(dt.timetuple())
class Group(Base):
__tablename__ = 'group'
_created = Column('created', Integer, nullable=False)
#property
def created(self):
return get_timestamp(self._created)
#created.setter
def created(self, value):
self._created = set_timestamp(value)
I want some code like this, but it's not working:
created = synonym('_created',
descriptor=property(get_timestamp,
set_created))
Because it always passed in a self as the 1st param.
I'd like to use get_timestamp and set_timestamp across my project of cause. So I'm not going to make them methods of the class but stand alone function.
How can I achieve this?
EDIT: I would take Option2, and still open to other answers.
Option-1: Code below should work (you do not need to have a class in order to define self):
def pget_timestamp(self):
return datetime.utcfromtimestamp(self._created)
def pset_timestamp(self, dt):
self._created = time.mktime(dt.timetuple())
class Group(Base):
__tablename__ = 'group'
id = Column(Integer, primary_key=True)
_created = Column('created', Integer, nullable=False)
created = synonym(_created,
descriptor=property(pget_timestamp, pset_timestamp),
)
Option-2: If you do need the same on many classes, leverage Mixins
from sqlalchemy.ext.declarative import declared_attr
class _CreatedMixin(object):
_created = Column('created', Integer, nullable=False)
def pget_timestamp(self):
return datetime.utcfromtimestamp(self._created)
def pset_timestamp(self, dt):
self._created = time.mktime(dt.timetuple())
#declared_attr
def created(cls):
return synonym('_created',
descriptor=property(cls.pget_timestamp, cls.pset_timestamp),
)
class Group(_CreatedMixin, Base):
# #note: adding *_CreatedMixin* to bases defines both the column and the synonym
__tablename__ = 'group'
id = Column(Integer, primary_key=True)
Alternatively, if this is for all your classes, you could make _CreatedMixin a base class for all your models:
Base = declarative_base(engine, cls=_CreatedMixin)
class Group(Base):
__tablename__ = 'group'
id = Column(Integer, primary_key=True)
Option-3: You could do any of the above using Hybrid Attributes
Note: make your set/get functions in-sync: either both or none use UTC-enabled functionality. Currently (unless you are in UTC-0) setting one value to created will not return the same one back.
I'm now using a different implementation. It's not related to the original title, but in case you need it.
Use sqlalchemy.types.TypeDecorator. Defining a table with sqlalchemy with a mysql unix timestamp
class UTCTimestampType(TypeDecorator):
impl = Integer
def process_bind_param(self, value, dialect):
if value is None:
return None # support nullability
elif isinstance(value, datetime):
return int(time.mktime(value.timetuple()))
raise ValueError("Can operate only on datetime values. Offending value type: {0}".format(type(value).__name__))
def process_result_value(self, value, dialect):
if value is not None: # support nullability
return datetime.fromtimestamp(float(value))
class ModelA(Base):
__tablename__ = 'model_a'
id = Column(Integer, primary_key=True)
created = Column(UTCTimestampType, nullable=False)
issues about alembic. Alembic: How to migrate custom type in a model?
# manually change the line
sa.Column('created', sa.UTCTImestampType(), nullable=False),
# to
sa.Column('created', sa.Integer(), nullable=False),
So I have a bunch of tables using SQLAlchemy that are modelled as objects which inherit from the result to a call to declarative_base(). Ie:
Base = declarative_base()
class Table1(Base):
# __tablename__ & such here
class Table2(Base):
# __tablename__ & such here
Etc. I then wanted to have some common functionality available to each of my DB table classes, the easiest way to do this according to the docs is to just do multiple inheritance:
Base = declarative_base()
class CommonRoutines(object):
#classmethod
def somecommonaction(cls):
# body here
class Table1(CommonRoutines, Base):
# __tablename__ & such here
class Table2(CommonRoutines, Base):
# __tablename__ & such here
The thing I don't like about this is A) multiple inheritance in general is a bit icky (gets tricky resolving things like super() calls, etc), B) if I add a new table I have to remember to inherit from both Base and CommonRoutines, and C) really that "CommonRoutines" class "is-a" type of table in a sense. Really what CommonBase is is an abstract base class which defines a set of fields & routines which are common to all tables. Put another way: "its-a" abstract table.
So, what I'd like is this:
Base = declarative_base()
class AbstractTable(Base):
__metaclass__ = ABCMeta # make into abstract base class
# define common attributes for all tables here, like maybe:
id = Column(Integer, primary_key=True)
#classmethod
def somecommonaction(cls):
# body here
class Table1(AbstractTable):
# __tablename__ & Table1 specific fields here
class Table2(AbstractTable):
# __tablename__ & Table2 specific fields here
But this of course doesn't work, as I then have to A) define a __tablename__ for AbstractTable, B) the ABC aspect of things causes all sorts of headaches, and C) have to indicate some sort of DB relationship between AbstractTable and each individual table.
So my question: is it possible to achieve this in a reasonable way? Ideally I'd like to enforce:
No multiple inheritance
CommonBase/AbstractTable be abstract (ie cannot be instantiated)
SQLAlchemy version 0.7.3 introduced the __abstract__ directive which is used for abstract classes that should not be mapped to a database table, even though they are subclasses of sqlalchemy.ext.declarative.api.Base. So now you create a base class like this:
Base = declarative_base()
class CommonRoutines(Base):
__abstract__ = True
id = Column(Integer, primary_key=True)
def __init__(self):
# ...
Notice how CommonRoutines doesn't have a __tablename__ attribute. Then create subclasses like this:
class Foo(CommonRoutines):
__tablename__ = 'foo'
name = Column(...)
def __init__(self, name):
super().__init__()
self.name = name
# ...
This will map to the table foo and inherit the id attribute from CommonRoutines.
Source and more information: http://docs.sqlalchemy.org/en/rel_0_7/orm/extensions/declarative.html#abstract
It is pretty straigh-forward, you just make declarative_base() to return a Base class which inherits from your CommonBase using cls= parameter. Also shown in Augmenting The Base docs. Your code might then look similar to below:
class CommonBase(object):
#classmethod
def somecommonaction(cls):
# body here
Base = declarative_base(cls=CommonBase)
class Table1(Base):
# __tablename__ & Table1 specific fields here
class Table2(Base):
# __tablename__ & Table2 specific fields here
You can use AbstractConcreteBase to make an absract base model:
from sqlalchemy.ext.declarative import AbstractConcreteBase
class AbstractTable(AbstractConcreteBase, Base):
id = db.Column(db.Integer, primary_key=True)
#classmethod
def somecommonaction(cls):
# body here
If you want to have several models with common columns, then you can use __abstract__ and #declared_attr to inherit shared table attributes. Example:
Base = declarative_base()
class CommonRoutines(Base):
__abstract__ = True
id = Column(Integer, primary_key=True)
modified_at = Column(DateTime)
#declared_attr
def modified_by(self):
# `user.id` is another table called `user` with an `id` field
return Column(Integer, ForeignKey('user.id', name='fk_modified_by_user_id'))
def __init__(self):
self.modified_by = None
super().__init__()
class Foo(CommonRoutines):
__tablename__ = 'foo'
name = Column(...)
With this solution you will have a Foo table with the fields of Foo class (name) and the ones in CommonRoutines (id, modified_at and modified_by)