How to query with joins using sql alchemy? - python

I am trying to use SqlAlchemy with mysql as backend.The following are my table schema (defined for ORM using SQLAlchemy):
class ListItem(Base):
""""""
__tablename__ = "listitem"
ListItemID = Column(Integer, primary_key=True)
ListItemTypeID = Column(Integer, ForeignKey("ListItemType.ListItemTypeID"))
ModelID = Column(Integer, ForeignKey("Model.ModelID"))
RefCode = Column(String(25))
def __init__(self, ListItemTypeID, ModelID, RefCode):
self.ListItemTypeID= ListItemTypeID
self.ModelID= ModelID
self.RefCode= RefCode
class Model(Base):
""""""
__tablename__ = "model"
ModelID= Column(Integer, primary_key=True)
Name = Column(String(255))
def __init__(self, Name):
self.Name= Name
I am not including the class mappers for the other reference tables like (ListItemType).
I would like to know how to query joining the "ListItem" table to the "Model" table and "ListItemType" table.
An SQL equivalent of the same should be this way:
select listitem.ListItemID, model.Name, listitemtype.Name from listitemrequest
join listitemtype on listitemrequest.ListItemTypeID = listitemtype.ListItemID
join model on listitemrequest.ModelID = Model.ModelID
I am fairly new with using sqlalchemy. Thanks for any help in advance.

If the columns already have a foreign key relationship the following should work.
Read the docs on joins.
result = session.query(listitemrequest).
join(listitemtype).
join(model).
with_entities([listitem.c.ListItemID, mode.c.name,listitemtype.c.Name]).
all()

Related

How to dinamicaly create multiple Table Classes in SQLAlchemy?

I'm trying to create multiple classes in SQLAlchemy to generate specific tables.
I found different things here and more on https://docs.python.org/3/library/functions.html#import and https://python-course.eu/oop/dynamically-creating-classes-with-type.php
It seems clear enough, but their examples is using global opeartions, and I don't understand exactly how can I use as a sub function in something like this:
class _Table(Base):
__tablename__ = '_table'
id = Column(Integer, primary_key=True, autoincrement=True)
name = Column(String)
table_list = ['Table1', 'Table2']
def table_class_generator(table_list):
# here I need to create the tables
def tables_operators():
# here I make operations on tables
So I need this one to convert globally to something like:
class Table1(Base):
__tablename__ = 'table1'
id = ...
name = ...
class Table2(Base):
__tablename__ = 'table2'
id = ...
name = ...
def table_operators():
#
Thanks #Gord Thompson because he gave me another track.
I believe the solution will be this, I only test the init of database.
def tables_constructor(names: list) -> list:
"""
Creates all tables in the database
:return:
"""
engine = get_engine()
metadata_obj = MetaData()
table_names = []
for name in names:
name_obj = Table(
name,
metadata_obj,
Column('id', Integer, primary_key=True, autoincrement=True),
Column('timestamp', Integer)
)
table_names.append(name_obj)
metadata_obj.create_all(engine)
return table_names
def do_something()
#
def start_app():
names = ["Table_1", "Table_2"]
tables = tables_constructor(symbols)
do_something(tables)
Now I think I can do things in tables as they are mapped to table objects.
Would be cool to improve to define table columns somewhere else. I tried but isn't working.

Is there a way to populate one table from multiple on SQLAlchemy

I'm trying to build a database with SQLAlchemy, my problem is that I have two tables with the same columns name and trying to populate a third table from the two others. There is below a simple diagram to illustrate:
I usually set Foreign key on one table and the relationship on the other like that :
class TableA(Base):
__tablename__ = "tableA"
id = Column(Integer, primary_key=True)
name = Column(String(100))
age = Column(Integer)
name_relation = relationship("TableC", backref='owner')
class TableC(Base):
__tablename__ = "tableC"
id = Column(Integer, primary_key=True)
name = Column(String(100), ForeignKey('tableA.name'))
age = Column(Integer)
You can see that this method can only works with two table because my ForeignKey on tableC for the name specifies the name of tableA.
Is there a way to do that ?
Thanks
In SQL, the query you'd be looking for is
INSERT INTO C (id, name, age) (
SELECT *
FROM A
UNION ALL
SELECT *
FROM B
)
As per this answer, this makes the equivalent SQLAlchemy
session = Session()
query = session.query(TableA).union_all(session.query(TableB))
stmt = TableC.insert().from_select(['id', 'name', 'age'], query)
or equivalently
stmt = TableC.insert().from_select(
['id', 'name', 'age'],
TableA.select().union_all(TableB.select())
)
After which you can execute it using connection.execute(stmt) or session.execute(stmt), depending on what you're using.

sqlalchemy: Select from table where column in QUERY

I have a situation where I am trying to count up the number of rows in a table when the column value is in a subquery. For example, lets say that I have some sql like so:
select count(*) from table1
where column1 in (select column2 from table2);
I have my tables defined like so:
class table1(Base):
__tablename__ = "table1"
__table_args__ = {'schema': 'myschema'}
acct_id = Column(DECIMAL(precision=15), primary_key=True)
class table2(Base):
__tablename__ = "table2"
__table_args__ = {'schema': 'myschema'}
ban = Column(String(length=128), primary_key=True)
The tables are reflected from the database so there are other attributes present that aren't explicitly specified in the class definition.
I can try to write my query but here is where I am getting stuck...
qry=self.session.query(func.?(...)) # what to put here?
res = qry.one()
I tried looking through the documentation here but I don't see any comparable implementation to the 'in' keyword which is a feature of many SQL dialects.
I am using Teradata as my backend if that matters.
sub_stmt = session.query(table2.some_id)
stmt = session.query(table1).filter(table1.id.in_(sub_stmt))
data = stmt.all()

How to use sqlAlchemy table shema to load data

I have two scripts schema.py and load_data.py. In schema.py, I define the schema for over 20 tables using sqlAlchemy Base. Two of the tables look like:
schema.py
Base = declarative_base()
meta = MetaData()
class Table1(Base):
__tablename__ = 'table1'
id = Column(Integer, primary_key=True)
name = Column(String)
class Table2(Base):
__tablename__ = 'table2'
id = Column(Integer, primary_key=True)
bdate = Column(Date)
...
class Table20(Base):
__tablename__ = 'table20'
id = Column(Integer, primary_key=True)
bdate = Column(Date)
I want to use my load_data.py to copy those ~20 tables from one database to another. My question is how to create the table in load_data.py using the schema I defined in schema.py?? Following the examples in Introductory Tutorial of Python’s SQLAlchemy, I use import to load all the table schema class, but I find it too messy. Is there any better way to handle this situation??? I am new to sqlAlchemy,please bear me if this question seems too naive.
load_data.py
from schema import Base, Table1, Table2, Table3, Table4, Table5, Table6, Table7, Table8, Table9, Table10,..., Table20
src_engine = create_engine('sqlite:// sqlite_test.db')
dst_engine = create_engine('postgresql:///postgresql_test.db')
Base.metadata.create_all(dst_engine)
tables = Base.metadata.tables
for tbl in tables:
data = src_engine.execute(tables[tbl].select()).fetchall()
for a in data: print(a)
if data:
dst_engine.execute( tables[tbl].insert(), data)
Try from schema import *, which imports all members from a module. See also these answers regarding the difference between import schema and from schema import x.

Dynamically setting __tablename__ for sharding in SQLAlchemy?

In order to handle a growing database table, we are sharding on table name. So we could have database tables that are named like this:
table_md5one
table_md5two
table_md5three
All tables have the exact same schema.
How do we use SQLAlchemy and dynamically specify the tablename for the class that corresponds to this? Looks like the declarative_base() classes need to have tablename pre-specified.
There will eventually be too many tables to manually specify derived classes from a parent/base class. We want to be able to build a class that can have the tablename set up dynamically (maybe passed as a parameter to a function.)
OK, we went with the custom SQLAlchemy declaration rather than the declarative one.
So we create a dynamic table object like this:
from sqlalchemy import MetaData, Table, Column
def get_table_object(self, md5hash):
metadata = MetaData()
table_name = 'table_' + md5hash
table_object = Table(table_name, metadata,
Column('Column1', DATE, nullable=False),
Column('Column2', DATE, nullable=False)
)
clear_mappers()
mapper(ActualTableObject, table_object)
return ActualTableObject
Where ActualTableObject is the class mapping to the table.
In Augmenting the Base you find a way of using a custom Base class that can, for example, calculate the __tablename__ attribure dynamically:
class Base(object):
#declared_attr
def __tablename__(cls):
return cls.__name__.lower()
The only problem here is that I don't know where your hash comes from, but this should give a good starting point.
If you require this algorithm not for all your tables but only for one you could just use the declared_attr on the table you are interested in sharding.
Because I insist to use declarative classes with their __tablename__ dynamically specified by given parameter, after days of failing with other solutions and hours of studying SQLAlchemy internals, I come up with the following solution that I believe is simple, elegant and race-condition free.
def get_model(suffix):
DynamicBase = declarative_base(class_registry=dict())
class MyModel(DynamicBase):
__tablename__ = 'table_{suffix}'.format(suffix=suffix)
id = Column(Integer, primary_key=True)
name = Column(String)
...
return MyModel
Since they have their own class_registry, you will not get that warning saying:
This declarative base already contains a class with the same class name and module name as mypackage.models.MyModel, and will be replaced in the string-lookup table.
Hence, you will not be able to reference them from other models with string lookup. However, it works perfectly fine to use these on-the-fly declared models for foreign keys as well:
ParentModel1 = get_model(123)
ParentModel2 = get_model(456)
class MyChildModel(BaseModel):
__tablename__ = 'table_child'
id = Column(Integer, primary_key=True)
name = Column(String)
parent_1_id = Column(Integer, ForeignKey(ParentModel1.id))
parent_2_id = Column(Integer, ForeignKey(ParentModel2.id))
parent_1 = relationship(ParentModel1)
parent_2 = relationship(ParentModel2)
If you only use them to query/insert/update/delete without any reference left such as foreign key reference from another table, they, their base classes and also their class_registry will be garbage collected, so no trace will be left.
you can write a function with tablename parameter and send back the class with setting appropriate attributes.
def get_class(table_name):
class GenericTable(Base):
__tablename__ = table_name
ID= Column(types.Integer, primary_key=True)
def funcation(self):
......
return GenericTable
Then you can create a table using:
get_class("test").__table__.create(bind=engine) # See sqlachemy.engine
Try this
import zlib
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import Column, Integer, BigInteger, DateTime, String
from datetime import datetime
BASE = declarative_base()
ENTITY_CLASS_DICT = {}
class AbsShardingClass(BASE):
__abstract__ = True
def get_class_name_and_table_name(hashid):
return 'ShardingClass%s' % hashid, 'sharding_class_%s' % hashid
def get_sharding_entity_class(hashid):
"""
#param hashid: hashid
#type hashid: int
#rtype AbsClientUserAuth
"""
if hashid not in ENTITY_CLASS_DICT:
class_name, table_name = get_class_name_and_table_name(hashid)
cls = type(class_name, (AbsShardingClass,),
{'__tablename__': table_name})
ENTITY_CLASS_DICT[hashid] = cls
return ENTITY_CLASS_DICT[hashid]
cls = get_sharding_entity_class(1)
print session.query(cls).get(100)
Instead of using imperative creating Table object, you can use usual declarative_base and make a closure to set a table name as the following:
def make_class(Base, table_name):
class User(Base):
__tablename__ = table_name
id = Column(Integer, primary_key=True)
name= Column(String)
return User
Base = declarative_base()
engine = make_engine()
custom_named_usertable = make_class(Base, 'custom_name')
Base.metadata.create_all(engine)
session = make_session(engine)
new_user = custom_named_usertable(name='Adam')
session.add(new_user)
session.commit()
session.close()
engine.dispose()
just you need to create class object for Base.
from sqlalchemy.ext.declarative import declarative_base, declared_attr
class Base(object):
#declared_attr
def __tablename__(cls):
return cls.__name.lower()
Base = declarative_base(cls=Base)

Categories

Resources