I have two scripts schema.py and load_data.py. In schema.py, I define the schema for over 20 tables using sqlAlchemy Base. Two of the tables look like:
schema.py
Base = declarative_base()
meta = MetaData()
class Table1(Base):
__tablename__ = 'table1'
id = Column(Integer, primary_key=True)
name = Column(String)
class Table2(Base):
__tablename__ = 'table2'
id = Column(Integer, primary_key=True)
bdate = Column(Date)
...
class Table20(Base):
__tablename__ = 'table20'
id = Column(Integer, primary_key=True)
bdate = Column(Date)
I want to use my load_data.py to copy those ~20 tables from one database to another. My question is how to create the table in load_data.py using the schema I defined in schema.py?? Following the examples in Introductory Tutorial of Python’s SQLAlchemy, I use import to load all the table schema class, but I find it too messy. Is there any better way to handle this situation??? I am new to sqlAlchemy,please bear me if this question seems too naive.
load_data.py
from schema import Base, Table1, Table2, Table3, Table4, Table5, Table6, Table7, Table8, Table9, Table10,..., Table20
src_engine = create_engine('sqlite:// sqlite_test.db')
dst_engine = create_engine('postgresql:///postgresql_test.db')
Base.metadata.create_all(dst_engine)
tables = Base.metadata.tables
for tbl in tables:
data = src_engine.execute(tables[tbl].select()).fetchall()
for a in data: print(a)
if data:
dst_engine.execute( tables[tbl].insert(), data)
Try from schema import *, which imports all members from a module. See also these answers regarding the difference between import schema and from schema import x.
Related
I'm trying to create multiple classes in SQLAlchemy to generate specific tables.
I found different things here and more on https://docs.python.org/3/library/functions.html#import and https://python-course.eu/oop/dynamically-creating-classes-with-type.php
It seems clear enough, but their examples is using global opeartions, and I don't understand exactly how can I use as a sub function in something like this:
class _Table(Base):
__tablename__ = '_table'
id = Column(Integer, primary_key=True, autoincrement=True)
name = Column(String)
table_list = ['Table1', 'Table2']
def table_class_generator(table_list):
# here I need to create the tables
def tables_operators():
# here I make operations on tables
So I need this one to convert globally to something like:
class Table1(Base):
__tablename__ = 'table1'
id = ...
name = ...
class Table2(Base):
__tablename__ = 'table2'
id = ...
name = ...
def table_operators():
#
Thanks #Gord Thompson because he gave me another track.
I believe the solution will be this, I only test the init of database.
def tables_constructor(names: list) -> list:
"""
Creates all tables in the database
:return:
"""
engine = get_engine()
metadata_obj = MetaData()
table_names = []
for name in names:
name_obj = Table(
name,
metadata_obj,
Column('id', Integer, primary_key=True, autoincrement=True),
Column('timestamp', Integer)
)
table_names.append(name_obj)
metadata_obj.create_all(engine)
return table_names
def do_something()
#
def start_app():
names = ["Table_1", "Table_2"]
tables = tables_constructor(symbols)
do_something(tables)
Now I think I can do things in tables as they are mapped to table objects.
Would be cool to improve to define table columns somewhere else. I tried but isn't working.
I need some help converting this query into SQLAlchemy.
select field from table t1 join table t2 on t1.detail_id = t2.id join table t3 on t3.id = t2.rate_id where t2.name = 'fred' and t3.rate_type = 'Custom' and t3.description = 'Default';
I have been able to convert inner join queries with two tables, but need some help with this one.
Appreciate your help.TIA.
If simple SQL query is enough you can try:
session.execute("SELECT t1.field AS t1_field "
"FROM t1 JOIN t2 ON t1.detail_id = t2.id JOIN t3 ON t2.rate_id = t3.id "
"WHERE t2.name = :name AND t3.rate_type = :rate_type AND t3.description = :description",
{'name': 'fred', 'rate_type': 'Custom', 'description': 'Default'})
But if you want to use SQLAlchemy declarative base then the query would look like:
results = session.query(T1.field).join(T2, T1.detail_id == T2.id).join(T3, T2.rate_id == T3.id).\
filter(T2.name == 'fred').\
filter(T3.rate_type == 'custom').\
filter(T3.description == 'lorem ipsum').all()
For the following models:
from sqlalchemy import create_engine, Integer, ForeignKey, String, Column
from sqlalchemy.orm import sessionmaker
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
class T1(Base):
__tablename__ = 't1'
id = Column(Integer, primary_key=True)
field = Column(String)
detail_id = Column(Integer, ForeignKey("t1.id"))
class T2(Base):
__tablename__ = 't2'
id = Column(Integer, primary_key=True)
name = Column(String)
rate_id = Column(Integer, ForeignKey("t1.id"))
class T3(Base):
__tablename__ = 't3'
id = Column(Integer, primary_key=True)
rate_type = Column(String)
description = Column(String)
I hope it helps.
SQLAlchemy provide both ORM way and SQL way to operate database. You can use exactly the raw SQL language (or SQLAlachemy SQL Express) to query.
(1) RAW SQL QUERY,Sample code:
engine = create_engine(...)
q = 'SELECT foo FROM t_bar WHERE col_name=:v_parameters'
rs = engine.execute(sqlalchemy.text(q), v_parameters=your_actual_value)
Check execute and basic usage. Also take look at ResultProxy to understand how to operate on returned result.
(2) ORM. If you want to use ORM, firstly you have to define models and mapper class. Sample Code.
from sqlalchemy import Column, ForeignKey
from sqlalchemy.types import String, Integer
from sqlalchemy.orm import relationship
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
class Father(Base):
id = Column(Integer, primary_key=True)
name = Column(String(31), unique=True, nullable=False)
# Relationship attributes
children = relationship('Sons',
passive_deletes=True,
back_populates='father')
class Son(Base):
id = Column(Integer, primary_key=True)
name = Column(String(31), unique=True, nullable=False)
# foreign keys
p_id = Column(Integer, ForeignKey('Father.id',
ondelete='CASCADE',
onupdate='CASCADE'))
# Relationship attributes
parent = relationship('Father',
passive_deletes=True,
back_populates='sons')
Then you can do ORM query operations
session.query(Father).join(Father.sons).filter(Son.name=='Sam')
is equal to SQL query
SELECT father.id, father.name FROM father JOIN son ON father.id=son.p_id WHERE son.name='Sam'.
Please check ORM mapper and ORM Query for more information.
For you application. If you have well mapped all of your tables, then you can use ORM way. If you do not need ORM feature, you can just use RAW SQL query.
Thank.
I am trying to use SqlAlchemy with mysql as backend.The following are my table schema (defined for ORM using SQLAlchemy):
class ListItem(Base):
""""""
__tablename__ = "listitem"
ListItemID = Column(Integer, primary_key=True)
ListItemTypeID = Column(Integer, ForeignKey("ListItemType.ListItemTypeID"))
ModelID = Column(Integer, ForeignKey("Model.ModelID"))
RefCode = Column(String(25))
def __init__(self, ListItemTypeID, ModelID, RefCode):
self.ListItemTypeID= ListItemTypeID
self.ModelID= ModelID
self.RefCode= RefCode
class Model(Base):
""""""
__tablename__ = "model"
ModelID= Column(Integer, primary_key=True)
Name = Column(String(255))
def __init__(self, Name):
self.Name= Name
I am not including the class mappers for the other reference tables like (ListItemType).
I would like to know how to query joining the "ListItem" table to the "Model" table and "ListItemType" table.
An SQL equivalent of the same should be this way:
select listitem.ListItemID, model.Name, listitemtype.Name from listitemrequest
join listitemtype on listitemrequest.ListItemTypeID = listitemtype.ListItemID
join model on listitemrequest.ModelID = Model.ModelID
I am fairly new with using sqlalchemy. Thanks for any help in advance.
If the columns already have a foreign key relationship the following should work.
Read the docs on joins.
result = session.query(listitemrequest).
join(listitemtype).
join(model).
with_entities([listitem.c.ListItemID, mode.c.name,listitemtype.c.Name]).
all()
I'm using SQLAlchemy to query a number of similar tables, and union the results. The tables are rows of customer information, but our current database structures it so that different groups of customers are in their own tables e.g. client_group1, client_group2, client_group3:
client_group1:
| id | name | email |
| 1 | john | johnsmith#gmail.com |
| 2 | greg | gregjones#gmail.com |
Each of the other tables have identical columns. If I'm using SQLAlchemy declarative_base, I can have a class for client_group1 like the following:
def ClientGroup1(Base):
__tablename__ = 'client_group1'
__table_args__ = {u'schema': 'clients'}
id = Column(Integer, primary_key=True)
name = Column(String(32))
email = Column(String(32))
Then I can do queries such as:
session.query(ClientGroup1.name)
However, if I use union_all to combine a bunch of client tables into a viewport, such as:
query1 = session.query(ClientGroup1.name)
query2 = session.query(ClientGroup2.name)
viewport = union_all(query1, query2)
then I'm not sure how to map a viewport to an object, and instead I have to access viewport columns using:
viewport.c.name
Is there any way to map the viewport to a specific table structure? Especially considering the fact that each class points to a different __table_name__
Read Concrete Table Inheritance documentation for the idea how this can be done. The code below is a running example of how this can be done:
from sqlalchemy import create_engine, Column, String, Integer
from sqlalchemy.orm import sessionmaker, configure_mappers
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.ext.declarative import AbstractConcreteBase
engine = create_engine('sqlite:///:memory:', echo=True)
Session = sessionmaker(bind=engine)
session = Session()
Base = declarative_base(engine)
class ClientGroupBase(AbstractConcreteBase, Base):
pass
class ClientGroup1(ClientGroupBase):
__tablename__ = 'client_group1'
# __table_args__ = {'schema': 'clients'}
__mapper_args__ = {
'polymorphic_identity': 'client_group1',
'concrete': True,
}
id = Column(Integer, primary_key=True)
name = Column(String(32))
email = Column(String(32))
class ClientGroup2(ClientGroupBase):
__tablename__ = 'client_group2'
# __table_args__ = {'schema': 'clients'}
__mapper_args__ = {
'polymorphic_identity': 'client_group2',
'concrete': True,
}
id = Column(Integer, primary_key=True)
name = Column(String(32))
email = Column(String(32))
def _test_model():
# generate classes for all tables
Base.metadata.create_all()
print('-'*80)
# configure mappers (see documentation)
configure_mappers()
print('-'*80)
# add some test data
session.add(ClientGroup1(name="name1"))
session.add(ClientGroup1(name="name1"))
session.add(ClientGroup2(name="name1"))
session.add(ClientGroup2(name="name1"))
session.commit()
print('-'*80)
# perform a query
q = session.query(ClientGroupBase).all()
for r in q:
print(r)
if __name__ == '__main__':
_test_model()
The above example has an added benefit that you can also create new objects, as well as query only some tables.
You could do it mapping an SQL VIEW to a class, but you need to specify a primary key explicitly (see Is possible to mapping view with class using mapper in SqlAlchemy?). In you case, I am afraid, this might not work because of the same PK value in multiple tables, and using a multi-column PK might not be the best idea.
In order to handle a growing database table, we are sharding on table name. So we could have database tables that are named like this:
table_md5one
table_md5two
table_md5three
All tables have the exact same schema.
How do we use SQLAlchemy and dynamically specify the tablename for the class that corresponds to this? Looks like the declarative_base() classes need to have tablename pre-specified.
There will eventually be too many tables to manually specify derived classes from a parent/base class. We want to be able to build a class that can have the tablename set up dynamically (maybe passed as a parameter to a function.)
OK, we went with the custom SQLAlchemy declaration rather than the declarative one.
So we create a dynamic table object like this:
from sqlalchemy import MetaData, Table, Column
def get_table_object(self, md5hash):
metadata = MetaData()
table_name = 'table_' + md5hash
table_object = Table(table_name, metadata,
Column('Column1', DATE, nullable=False),
Column('Column2', DATE, nullable=False)
)
clear_mappers()
mapper(ActualTableObject, table_object)
return ActualTableObject
Where ActualTableObject is the class mapping to the table.
In Augmenting the Base you find a way of using a custom Base class that can, for example, calculate the __tablename__ attribure dynamically:
class Base(object):
#declared_attr
def __tablename__(cls):
return cls.__name__.lower()
The only problem here is that I don't know where your hash comes from, but this should give a good starting point.
If you require this algorithm not for all your tables but only for one you could just use the declared_attr on the table you are interested in sharding.
Because I insist to use declarative classes with their __tablename__ dynamically specified by given parameter, after days of failing with other solutions and hours of studying SQLAlchemy internals, I come up with the following solution that I believe is simple, elegant and race-condition free.
def get_model(suffix):
DynamicBase = declarative_base(class_registry=dict())
class MyModel(DynamicBase):
__tablename__ = 'table_{suffix}'.format(suffix=suffix)
id = Column(Integer, primary_key=True)
name = Column(String)
...
return MyModel
Since they have their own class_registry, you will not get that warning saying:
This declarative base already contains a class with the same class name and module name as mypackage.models.MyModel, and will be replaced in the string-lookup table.
Hence, you will not be able to reference them from other models with string lookup. However, it works perfectly fine to use these on-the-fly declared models for foreign keys as well:
ParentModel1 = get_model(123)
ParentModel2 = get_model(456)
class MyChildModel(BaseModel):
__tablename__ = 'table_child'
id = Column(Integer, primary_key=True)
name = Column(String)
parent_1_id = Column(Integer, ForeignKey(ParentModel1.id))
parent_2_id = Column(Integer, ForeignKey(ParentModel2.id))
parent_1 = relationship(ParentModel1)
parent_2 = relationship(ParentModel2)
If you only use them to query/insert/update/delete without any reference left such as foreign key reference from another table, they, their base classes and also their class_registry will be garbage collected, so no trace will be left.
you can write a function with tablename parameter and send back the class with setting appropriate attributes.
def get_class(table_name):
class GenericTable(Base):
__tablename__ = table_name
ID= Column(types.Integer, primary_key=True)
def funcation(self):
......
return GenericTable
Then you can create a table using:
get_class("test").__table__.create(bind=engine) # See sqlachemy.engine
Try this
import zlib
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import Column, Integer, BigInteger, DateTime, String
from datetime import datetime
BASE = declarative_base()
ENTITY_CLASS_DICT = {}
class AbsShardingClass(BASE):
__abstract__ = True
def get_class_name_and_table_name(hashid):
return 'ShardingClass%s' % hashid, 'sharding_class_%s' % hashid
def get_sharding_entity_class(hashid):
"""
#param hashid: hashid
#type hashid: int
#rtype AbsClientUserAuth
"""
if hashid not in ENTITY_CLASS_DICT:
class_name, table_name = get_class_name_and_table_name(hashid)
cls = type(class_name, (AbsShardingClass,),
{'__tablename__': table_name})
ENTITY_CLASS_DICT[hashid] = cls
return ENTITY_CLASS_DICT[hashid]
cls = get_sharding_entity_class(1)
print session.query(cls).get(100)
Instead of using imperative creating Table object, you can use usual declarative_base and make a closure to set a table name as the following:
def make_class(Base, table_name):
class User(Base):
__tablename__ = table_name
id = Column(Integer, primary_key=True)
name= Column(String)
return User
Base = declarative_base()
engine = make_engine()
custom_named_usertable = make_class(Base, 'custom_name')
Base.metadata.create_all(engine)
session = make_session(engine)
new_user = custom_named_usertable(name='Adam')
session.add(new_user)
session.commit()
session.close()
engine.dispose()
just you need to create class object for Base.
from sqlalchemy.ext.declarative import declarative_base, declared_attr
class Base(object):
#declared_attr
def __tablename__(cls):
return cls.__name.lower()
Base = declarative_base(cls=Base)