SQLAlchemy - create table template - python

I am just starting with SQLAlchemy and I have been wondering... I am going to have a lot of tables in my model. I would like to have own file for each table I will have in my model.
I am currently using following code:
from sqlalchemy import MetaData
from sqlalchemy.orm import scoped_session, sessionmaker
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.dialects import postgresql
import sqlalchemy as sa
__all__ = ['Session', 'engine', 'metadata']
# SQLAlchemy database engine. Updated by model.init_model()
engine = None
# SQLAlchemy session manager. Updated by model.init_model()
Session = scoped_session(sessionmaker())
# Global metadata. If you have multiple databases with overlapping table
# names, you'll need a metadata for each database
metadata = MetaData()
# declarative table definitions
Base = declarative_base()
Base.metadata = metadata
schema = 'srbam_dev'
in meta.py
Following in _init_.py
"""The application's model objects"""
import sqlalchemy as sa
from sqlalchemy import orm
from models import meta
from models.filers import Filer
from models.vfilers import Vfiler
from models.filer_options import FilerOption
def init_models(engine):
"""Call me before using any of the tables or classes in the model"""
## Reflected tables must be defined and mapped here
#global reflected_table
#reflected_table = sa.Table("Reflected", meta.metadata, autoload=True,
# autoload_with=engine)
#orm.mapper(Reflected, reflected_table)
#
meta.engine = sa.create_engine(engine)
meta.Session.configure(bind=meta.engine)
class Basic_Table(object):
id = sa.Column(
postgresql.UUID(),
nullable=False,
primary_key=True
)
created = sa.Column(
sa.types.DateTime(True),
nullable=False
)
modified = sa.Column(
sa.types.DateTime(True),
nullable=False
)
And then following in all of my models
from models.meta import Base
from models.meta import Basic_Table
from models.meta import schema
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
class Filer(Base,Basic_Table):
This works just fine, as long as I do not start to use some Foreign Keys on tables, once I use Foreign Key says
sqlalchemy.exc.NoReferencedTableError: Foreign key associated with column 't_vfilers.filer_id' could not find table 't_filers' with which to generate a foreign key to target column 'id'
I tried to define id key directly in Filer class (and remove Basic_Table from declaration), however this does not solve the issue.
My code for creating the database looks like this
#!/usr/bin/python
import ConfigParser
from sqlalchemy.engine.url import URL
from models import *
config = ConfigParser.RawConfigParser()
config.read('conf/core.conf')
db_url = URL(
'postgresql+psycopg2',
config.get('database','username'),
config.get('database','password'),
config.get('database','host'),
config.get('database','port'),
config.get('database','dbname')
)
init_models(db_url)
meta.Base.metadata.drop_all(bind=meta.engine)
meta.Base.metadata.create_all(bind=meta.engine)
Does anyone have an idea how to fix this issue?

Marek,do one thing try defining the foreign key along with the schema name i.e 'test.t_vfilers.filer_id'(here 'test' is the schema name),this will solve the problem.

Have you remembered to import the different modules containing the models.
In my init.py I have at the bottom a ton of:
from comparty3.model.users import User, UserGroup, Permission
from comparty3.model.pages import PageGroup, Page
etc...
If that's not the issue then I'm not sure; however have you tried to change:
metadata = MetaData()
# declarative table definitions
Base = declarative_base()
Base.metadata = metadata
to:
# declarative table definitions
Base = declarative_base()
metadata = Base.metadata
I'm guessing here, but it may be that declarative_base() creates a special metadata object.
This is how it is defined in my pylons projects (were I'm guessing your code is from too).

Related

SQLAlchemy Not Creating Tables in Postgres Database

I am having trouble writing tables to a postgres database using SQLAlchemy ORM and Python scripts.
I know the problem has something to do with incorrect Session imports because when I place all the code below into a single file, the script creates the table without trouble.
However, when I break the script up into multiple files (necessary for this project), I receive the error "psycopg2.errors.UndefinedTable: relation "user" does not exist".
I have read many posts here on SO, tried reorganising my files, the function call order, changing from non-scoped to scoped sessions, eliminating and adding Base.metadata.create_all(bind=engine) in various spots, changed how the sessions are organised and created in base.py, as well as other things, but the script still errors and I'm not sure which code sequence is out of order.
The code currently looks like:
base.py:
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import scoped_session, sessionmaker
# SQLAlchemy requires creating an Engine to interact with our database.
engine = create_engine('postgresql://user:pass#localhost:5432/testdb', echo=True)
# Create a configured ORM 'Session' factory to get a new Session bound to this engine
#_SessionFactory = sessionmaker(bind=engine)
# Use scoped session
db_session = scoped_session(
sessionmaker(
bind=engine,
autocommit=False,
autoflush=False
)
)
# Create a Base class for our classes definitions
Base = declarative_base()
models.py
from sqlalchemy import Column, DateTime, Integer, Text
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
class User(Base):
__tablename__ = 'users'
id = Column(Integer, primary_key=True)
email = Column(Text, nullable=False, unique=True)
name = Column(Text)
date_last_seen = Column(DateTime(timezone=True))
def __init__(self, email, name, date_last_seen):
self.email = email
self.name = name
self.date_last_seen = date_last_seen
inserts.py
from datetime import date
from base import db_session, engine, Base
from models import User
def init_db():
# Generate database schema based on our definitions in model.py
Base.metadata.create_all(bind=engine)
# Extract a new session from the session factory
#session = _SessionFactory()
# Create instance of the User class
alice = User('alice#throughthelooking.glass', 'Alice', date(1865, 11, 26))
# Use the current session to persist data
db_session.add(alice)
# Commit current session to database and close session
db_session.commit()
db_session.close()
print('Initialized the db')
return
if __name__ == '__main__':
init_db()
Thank you for any insight you're able to offer!

Can I bind a Session to a specific schema in SQLAlchemy?

I work with a postgres database which has multiple (similar) schemas. In my codebase, I reflect the different schemas in seperate schema_xy.py files. I also have a base.py file which contains a base class with abstract tables definitions, for tables that are present in multiple schemas.
My base.py file:
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
engine = create_engine('postgresql+psycopg2://dbuser#dbhost:5432/dbname')
Base = declarative_base(bind=engine)
class User(Base):
__abstract__ = True
id = ..
And an example Schema1.py file, which inherits the User table from base but also has a schema specific table S1Table:
from sqlalchemy import declarative_base
from .base import User
engine = create_engine('postgresql+psycopg2://dbuser#dbhost:5432/dbname')
Schema1Base = declarative_base(bind=engine, metadata=MetaData(schema='Schema1'))
class User(Schema1Base, User):
__tablename__ = "user"
class S1Table(Schema1Base):
__tablename__ = "s1table"
foo = ...
My question is, how do I best instantiate sessions for querying and uploading data, which are 'bound' to a specific schema, i.e. how do I make sure that I query/manipulate the User table from the correct schema?
I have found this blog post
http://www.blog.pythonlibrary.org/2010/09/10/sqlalchemy-connecting-to-pre-existing-databases/
which defines a loadSession function for Base like so:
def loadSession():
metadata = Base.metadata
Session = sessionmaker(bind=engine)
session = Session()
return session
But I don't understand what the unused metadata is supposed to do here exactly.

sqlalchemy empty schema and no tables

I am trying to use sqlalchemy via a Database first approach and generate the models for the existing database structure. The db is a standard SQLServer(express).
I can connect to my database and query it via the following
from sqlalchemy import create_engine, MetaData, Table
from sqlalchemy.orm import scoped_session, sessionmaker
from sqlalchemy.ext.declarative import declarative_base
__connectionString = "DSN=databaseDSN;Trusted_Connection=yes"
db_engine = create_engine('mssql+pyodbc:///?odbc_connect=%s' % __connectionString, echo=True)
db_session = scoped_session(sessionmaker(bind=db_engine))
result = db_session.execute("SELECT * FROM debug.BasicTable")
for row in result.fetchmany(10):
print(row)
When I try to reflect the database structure below I am unable to see any of the actual tables and the following raises a NoSuchTableError
myTable= Table('debug.BasicTable', meta, autoload=True, autoload_with=db_engine)
from a common tutorial http://pythoncentral.io/sqlalchemy-faqs/
I should be able to reflect the table objects
Base = declarative_base()
Base.metadata.reflect(db_engine)
meta = MetaData()
meta.reflect(bind=db_engine)
However there are no table objects in meta.tables at all.
This is because debug.BasicTable is most likley not the name of your table. The name of your table is BasicTable and debug is its schema. So:
Table('BasicTable', meta, schema="debug", autoload=True, autoload_with=db_engine)

SQLAlchemy alembic AmbiguousForeignKeysError for declarative type but not for equivalent non-declarative type

I have the following alembic migration:
revision = '535f7a49839'
down_revision = '46c675c68f4'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
from datetime import datetime
Session = sessionmaker()
Base = declarative_base()
metadata = sa.MetaData()
# This table definition works
organisations = sa.Table(
'organisations',
metadata,
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('creator_id', sa.Integer),
sa.Column('creator_staff_member_id', sa.Integer),
)
"""
# This doesn't...
class organisations(Base):
__tablename__ = 'organisations'
id = sa.Column(sa.Integer, primary_key=True)
creator_id = sa.Column(sa.Integer)
creator_staff_member_id = sa.Column(sa.Integer)
"""
def upgrade():
bind = op.get_bind()
session = Session(bind=bind)
session._model_changes = {} # if you are using Flask-SQLAlchemy, this works around a bug
print(session.query(organisations).all())
raise Exception("don't succeed")
def downgrade():
pass
Now the query session.query(organisations).all() works when I use the imperatively-defined table (the one not commented out). But if I use the declarative version, which as far as I understand should be equivalent, I get an error:
sqlalchemy.exc.AmbiguousForeignKeysError: Could not determine join
condition between parent/child tables on relationship
StaffMember.organisation - there are multiple foreign key paths
linking the tables. Specify the 'foreign_keys' argument, providing a
list of those columns which should be counted as containing a foreign
key reference to the parent table.
Now I understand what this error means: I have two foreign keys from organisations to staff_members in my actual models. But why does alembic care about these, and how does it even know they exist? How does this migration know that something called StaffMember exists? As far as I understand, alembic should only know about the models you explicitly tell it about in the migration.
Turns out the problem was with my Flask-script setup I was using to call alembic. The command I was using to call alembic was importing the code to initialise my Flask app which was itself importing my actual models.

Using Flask-SQLAlchemy without Flask

I had a small web service built using Flask and Flask-SQLAlchemy that only held one model. I now want to use the same database, but with a command line app, so I'd like to drop the Flask dependency.
My model looks like this:
class IPEntry(db.Model):
id = db.Column(db.Integer, primary_key=True)
ip_address = db.Column(db.String(16), unique=True)
first_seen = db.Column(db.DateTime(),
default = datetime.datetime.utcnow
)
last_seen = db.Column(db.DateTime(),
default = datetime.datetime.utcnow
)
#validates('ip')
def validate_ip(self, key, ip):
assert is_ip_addr(ip)
return ip
Since db will no longer be a reference to flask.ext.sqlalchemy.SQLAlchemy(app), how can I convert my model to use just SQLAlchemy. Is there a way for the two applications (one with Flask-SQLAlchemy the other with SQLAlchemy) to use the same database?
you can do this to replace db.Model:
from sqlalchemy import orm
from sqlalchemy.ext.declarative import declarative_base
import sqlalchemy as sa
base = declarative_base()
engine = sa.create_engine(YOUR_DB_URI)
base.metadata.bind = engine
session = orm.scoped_session(orm.sessionmaker())(bind=engine)
# after this:
# base == db.Model
# session == db.session
# other db.* values are in sa.*
# ie: old: db.Column(db.Integer,db.ForeignKey('s.id'))
# new: sa.Column(sa.Integer,sa.ForeignKey('s.id'))
# except relationship, and backref, those are in orm
# ie: orm.relationship, orm.backref
# so to define a simple model
class UserModel(base):
__tablename__ = 'users' #<- must declare name for db table
id = sa.Column(sa.Integer,primary_key=True)
name = sa.Column(sa.String(255),nullable=False)
then to create the tables:
base.metadata.create_all()
That is how to use SQLAlchemy without Flask (for example to write a bulk of objects to PostgreSQL database):
from sqlalchemy import Column, Integer, String
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
# Define variables DB_USERNAME, DB_PASSWORD, DB_HOST, DB_PORT, DB_NAME
SQLALCHEMY_DATABASE_URI = f'postgresql://{DB_USERNAME}:{DB_PASSWORD}#{DB_HOST}:
{DB_PORT}/{DB_NAME}'
# ----- This is related code -----
engine = create_engine(SQLALCHEMY_DATABASE_URI, echo=True)
Base = declarative_base()
Base.metadata.create_all(engine)
Session = sessionmaker(bind=engine)
Session.configure(bind=engine)
session = Session()
# ----- This is related code -----
class MyModel(Base):
__tablename__ = 'my_table_name'
id = Column(Integer, primary_key=True)
value = Column(String)
objects = [MyModel(id=0, value='a'), MyModel(id=1, value='b')]
session.bulk_save_objects(objects)
session.commit()
Check this one github.com/mardix/active-alchemy
Active-Alchemy is a framework agnostic wrapper for SQLAlchemy that makes it really easy to use by implementing a simple active record like api, while it still uses the db.session underneath. Inspired by Flask-SQLAlchemy
There is a great article about Flask-SQLAlchemy: how it works, and how to modify models to use them outside of Flask:
http://derrickgilland.com/posts/demystifying-flask-sqlalchemy/
The sqlalchemy docs has a good tutorial with examples that sound like what you want to do.
Shows how to connect to a db, mapping, schema creation, and querying/saving to the db.
This does not completely answer your question, because it does not remove Flask dependency, but you can use SqlAlchemy in scripts and tests by just not running the Flask app.
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from sqlalchemy import MetaData
test_app = Flask('test_app')
test_app.config['SQLALCHEMY_DATABASE_URI'] = 'database_uri'
test_app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
metadata = MetaData(schema='myschema')
db = SQLAlchemy(test_app, metadata=metadata)
class IPEntry(db.Model):
pass
One difficulty you may encounter is the requirement of using db.Model as a base class for your models if you want to target the web app and independent scripts using same codebase. Possible way to tackle it is using dynamic polymorphism and wrap the class definition in a function.
def get_ipentry(db):
class IPEntry(db.Model):
pass
return IPEntry
As you construct the class run-time in the function, you can pass in different SqlAlchemy instances. Only downside is that you need to call the function to construct the class before using it.
db = SqlAlchemy(...)
IpEntry = get_ipentry(db)
IpEntry.query.filter_by(id=123).one()
Flask (> 1.0) attempt to provide helpers for sharing code between an web application and a command line interface; i personally think it might be cleaner, lighter and easier to build libraries unbound to flask, but you might want to check:
https://flask.palletsprojects.com/en/2.1.x/cli/
https://flask.palletsprojects.com/en/2.1.x/api/#flask.Flask.cli
Create database and table
import os
from sqlalchemy import create_engine
from sqlalchemy import Column, Integer, String
from sqlalchemy.ext.declarative import declarative_base
if os.path.exists('test.db'):
os.remove('test.db')
Base = declarative_base()
class Person(Base):
__tablename__ = 'person'
id = Column(Integer(), primary_key=True)
name = Column(String())
engine = create_engine('sqlite:///test.db')
Base.metadata.create_all(engine)
Using Flask_SQLAlchemy directly
from flask import Flask
from sqlalchemy import MetaData
from flask_sqlalchemy import SQLAlchemy
from sqlalchemy import Column, Integer, String
app = Flask(__name__)
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = True
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///test.db'
db = SQLAlchemy(app, metadata=MetaData())
class Person(db.Model):
__tablename__ = 'person'
id = Column(Integer(), primary_key=True)
name = Column(String())
person = Person(name='Bob')
db.session.add(person)
db.session.commit()
print(person.id)

Categories

Resources