I have different models e.g. model1.py, model2.py etc. Some how tables are being created following pocoo link, which required to be invoked from terminal.
But
def init_db():
import model.model1
import model.model2
Base.metadata.create_all(bind=engine)
This is not working, rather requires to be invoked from terminal.
>> from database import init_db
>> init_db() #works
database.py
from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
from sqlalchemy.ext.declarative import declarative_base
engine = create_engine('sqlite:///xyz.sqlite', echo=True)
db_session = scoped_session(sessionmaker(autocommit=False,
autoflush=False,
bind=engine))
Base = declarative_base()
Base.query = db_session.query_property()
def init_db():
import model.admin # from model.admin import User doesnt help either
import model.role
Base.metadata.create_all(bind=engine)
if __name__ == '__main__':
init_db()
admin.py
from sqlalchemy import Column, Integer, String
from database import Base
class User(Base):
__tablename__ = 'users'
id = Column(Integer, primary_key=True)
name = Column(String(50), unique=True)
email = Column(String(120), unique=True)
def __init__(self, name=None, email=None):
self.name = name
self.email = email
def __repr__(self):
return '<User %r>' % (self.name)
There are no errors although an empty db file is generated.
How can database be created from multiple models?
I'm not sure why invoking directly from command line triggers table creation for you but I've always structured my Flask apps ala Digital Ocean's guide. Something that wasn't noted explicitly in the quide is the fact that you need to initialize your blueprints first before create_all is able to build the database tables for you.
(Your code as it is, lacks blueprints. Maybe try to create some first then try again?)
Related
I'm learning the way how to modularize flask application using blueprints and break database models into multiple files (one per entity)
I encounter a strange issue while building SQLite file and writing initial record for admin account, - SQLAlchemy added two records for one user.
Here is project layout and key files
runserver.py
#import os
from sbsuite import app
from sbsuite.database import init_db, createAdmin
init_db()
createAdmin()
if __name__ == "__main__":
app.run(debug=True)
models.py
from sbsuite import app
from sqlalchemy import Column, Integer, String
from sbsuite.database import Base
from flask_marshmallow import Marshmallow
class User(Base):
__tablename__ = 'users'
id = Column(Integer, primary_key=True)
name = Column(String(50))
email = Column(String(80))
password = Column(String(50))
roles = Column(String(50))
# JSON Schema
# should I create Marshmallow instance here?
ma = Marshmallow(app)
class UserSchema(ma.Schema):
class Meta:
fields = ('id', 'name', 'email', 'password')
user_schema = UserSchema()
users_schema = UserSchema(many=True)
database.py
from sbsuite import app
from sqlalchemy import create_engine
from sqlalchemy.orm import Session, scoped_session, sessionmaker
from sqlalchemy.ext.declarative import declarative_base
from flask_sqlalchemy import SQLAlchemy
engine = create_engine(app.config['SQLALCHEMY_DATABASE_URI'], convert_unicode=True)
db_session = scoped_session(sessionmaker(autocommit=False,
autoflush=False,
bind=engine))
Base = declarative_base()
Base.query = db_session.query_property()
from sbsuite.api.models import User, UserSchema
from sbsuite.api.productMdl import Product, ProductSchema
def init_db():
Base.metadata.create_all(bind=engine)
def createAdmin():
print("Create admin user")
admin = User(name="admin",email="admin#sbsuite.com", password="password", roles = "admin")
with Session(engine) as session:
session.add(admin)
session.commit()
print("admin user created") # why users added twice?
In last snippet after session.commit() I check database and see two records for admin user.
I'm new to SQLAlchemy (using Python 3) and find the following puzzling. In my simple example, there are 2 model classes defined in separate files with a relationship linking them.
Is the setup correct? My code requires that Animal.py import Owner because a relationship is defined, otherwise app/main.py will throw an error about Owner class not found. However, the official docs and other online examples do not appear to import the other classes that the current class has a relationship with.
Will having model/__init__.py be useful for my case? If so, what will it be used for? Saw an example that used a __init__.py file.
Github Repo: https://github.com/nyxynyx/sqlalchemy-class-import-error
File Structure
app/main.py
import sys
sys.path.append('..')
from lib.db import db_session
from models.foo.Animal import Animal
if __name__ == '__main__':
print(Animal.query.all())
models/foo/Animal.py
from sqlalchemy import *
from sqlalchemy.orm import relationship
from ..Base import Base
from .Owner import Owner <-------------- !!!!! if not imported, error occurs when running main.py !!!!!
class Animal(Base):
__tablename__ = 'animals'
id = Column(Integer, primary_key=True)
name = Column(Text)
owner_id = Column(Integer, ForeignKey('owners.id'))
owner = relationship('Owner')
models/Foo/Owner.py
from sqlalchemy import *
from ..Base import Base
class Owner(Base):
__tablename__ = 'owners'
id = Column(Integer, primary_key=True)
name = Column(Text)
lib/db.py
import json
from sqlalchemy.orm import scoped_session, sessionmaker
from sqlalchemy import create_engine
with open('../settings.json') as f:
settings = json.load(f)
user, password, host, port, dbname = settings['db']['user'], settings['db']['password'], settings['db']['host'], settings['db']['port'], settings['db']['dbname']
connection_url = f'postgresql://{user}:{password}#{host}:{port}/{dbname}'
engine = create_engine(connection_url)
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
db_session = scoped_session(Session)
The animal.py is fine. The issue is that if owner.py is never imported, sqlalchemy never sees the Owner model/table so it never registers it into the Base metadata. You can remove the import of Owner from animal.py into your main.py as
import models.foo.Owner
to see it work while keeping the separate model files.
I'm working on a flask application where I'm trying to isolate my unit tests. I'm using flask-sqlalchemy, and I'm trying to use the create_all and drop_all methods to clean my database after running a test.
However, it appears my create_all and drop_all methods do not actually create/drop the tables as the documentation states. I have my models imported in the application before calling create_all, like most other answers say.
This is the error I'm getting with the code below:
psycopg2.ProgrammingError: relation "tasks" does not exist
Here's my relevant code
/app.py
import os
import configparser
from flask import Flask
from src.router import router
from flask_sqlalchemy import SQLAlchemy
from flask_migrate import Migrate
app = Flask(__name__)
if not os.path.exists(os.path.join(app.root_path, 'config.ini')):
raise Exception(f'config.ini not found in the {app.root_path}')
config = configparser.ConfigParser()
config.read('config.ini')
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
app.config['SQLALCHEMY_DATABASE_URI'] = config[os.environ['APP_ENV']]['DATABASE_URI']
app.register_blueprint(router)
db = SQLAlchemy(app)
migrate = Migrate(app, db)
if __name__ == "__main__":
app.run()
/tests/test_router.py
from unittest import TestCase
from flask import Flask
from app import app, db
from src.models import Task
class TestRouter(TestCase):
def setUp(self):
db.create_all()
def tearDown(self):
db.drop_all()
def test_adds_task(self):
task = Task(task_id='task_1', name='my task')
db.session.add(task)
db.session.commit()
I think I was a little quick to post the question, but I hope this might help others come up with other ideas on how to troubleshoot a similar issue.
In my src/models.py file where I keep my models, you must make sure that your models are defined correctly. Since Flask-SQLAlchemy is a wrapper around the SQLAlchemy you must use the data types under the db object.
Essentially, I had my models defined as such:
class Task(db.Model):
__tablename__ = 'tasks'
id = Column(Integer, primary_key=True)
task_id = Column(String)
name = Column(String)
created_at = Column(DateTime, default=datetime.datetime.now)
As you can see, I was inheriting from db.Model instead of the return value of declarative_base(). I also needed to add the db. in front of all the data types, including Column, Integer, String, Float, DateTime, relationship, and ForeignKey.
So, I was able to fix my issue by changing my model to something like:
class Task(db.Model):
__tablename__ = 'tasks'
id = db.Column(db.Integer, primary_key=True)
task_id = db.Column(db.String)
name = db.Column(db.String)
created_at = db.Column(db.DateTime, default=datetime.datetime.now)
See: Documentation on declaring Flask-SQLAlchemy models
To create the User table I have to use drop_all and then create_all methods. But these two functions re-initiate an entire database. I wonder if there is a way to create the User table without erasing (or dropping) any existing tables in a database?
import sqlalchemy
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
class User(Base):
__tablename__ = 'users'
id = sqlalchemy.Column(sqlalchemy.Integer, primary_key=True)
name = sqlalchemy.Column(sqlalchemy.String)
def __init__(self, code=None, *args, **kwargs):
self.name = name
url = 'postgresql+psycopg2://user:pass#01.02.03.04/my_db'
engine = sqlalchemy.create_engine(url)
session = sqlalchemy.orm.scoped_session(sqlalchemy.orm.sessionmaker())
session.configure(bind=engine, autoflush=False, expire_on_commit=False)
Base.metadata.drop_all(engine)
Base.metadata.create_all(engine)
You can create/drop individual tables:
User.__table__.drop(engine)
User.__table__.create(engine)
from app import db
from models import User
User.__table__.create(db.engine)
User.__table__.drop(db.engine)
Another way to accomplish the task:
Base.metadata.tables['users'].create(engine)
Base.metadata.tables['users'].drop(engine)
I've modified the tutorial on the SqlAlchemy-Migrate tutorial to declarative syntax for my Pylons Pyramid project. I can successfully upgrade and downgrade, but I'm concerned about the Base.metadata.drop_all(migrate_engine) command below. Here is my migration file:
from sqlalchemy import Column
from sqlalchemy.types import Integer, String, DateTime
from sqlalchemy.sql import func
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import scoped_session, sessionmaker
from zope.sqlalchemy import ZopeTransactionExtension
DBSession = scoped_session(sessionmaker(extension=ZopeTransactionExtension()))
Base = declarative_base()
class User(Base):
__tablename__ = 'users'
id = Column(Integer, primary_key=True)
email = Column(String(75), unique=True)
fullname = Column(String(60))
password = Column(String(51))
last_login = Column(DateTime)
date_joined = Column(DateTime, default=func.now())
def upgrade(migrate_engine):
# Upgrade operations go here. Don't create your own engine; bind migrate_engine
# to your metadata
Base.metadata.bind = migrate_engine
Base.metadata.create_all(migrate_engine) # IS THIS DANGEROUS?
def downgrade(migrate_engine):
# Operations to reverse the above upgrade go here.
Base.metadata.bind = migrate_engine
Base.metadata.drop_all(migrate_engine) # IS THIS DANGEROUS?
[edit]
My question was how to individually create tables. I didn't know this was my question until asking the wrong question enough, to get to the correct question.
The proper solution on upgrade is to get the table and create it individually, like such:
def upgrade(migrate_engine):
# Upgrade operations go here. Don't create your own engine; bind migrate_engine
# to your metadata
User.__table__.create(migrate_engine)
and, for downgrading:
def downgrade(migrate_engine):
# Operations to reverse the above upgrade go here.
User.__table__.drop(migrate_engine)