I have the file with commands, separated from models. The models had executed only after I had imported all the models from models to my starter file.
db = SQLAlchemy(app)
from models import *
try:
argv = sys.argv[1]
argv == '--run' and app.run()
argv == '--create' and db.create_all()
But it is pretty ambiguously for me. I walk around Flask-Sqlalchemy source code. I saw:
def get_tables_for_bind(self, bind=None):
"""Returns a list of all tables relevant for a bind."""
result = []
for table in self.Model.metadata.tables.itervalues():
if table.info.get('bind_key') == bind:
result.append(table)
return result
And I understand the self.Model is:
def make_declarative_base(self):
"""Creates the declarative base."""
base = declarative_base(cls=Model, name='Model',
mapper=signalling_mapper,
metaclass=_BoundDeclarativeMeta)
base.query = _QueryProperty(self)
return base
Do I have more explicit way to specify the models for execution? And how the self.Model knows what the tables must be executed (after import)?
Related
I am writing a script that during development should delete the database and populate it with some dummy values. Unfortunately, the drop_all() part of it does not work:
from flask_sqlalchemy import SQLAlchemy
from my_app import create_app
from my_app.models import Block
db = SQLAlchemy()
def main():
app = create_app()
db.init_app(app)
with app.app_context():
db.session.commit()
db.drop_all() # <- I would expect this to drop everything, but it does not
db.session.commit()
db.create_all() # <- This works even if the database is empty
b1 = Block(name="foo")
db.session.add(b1) # <- Every time I run the script, another copy is added
db.session.commit()
blocks = Block.query.all()
for b in blocks:
print(b) # <- this should contain exactly one record every time, but keeps getting longer
And my_app.models.py contains:
from . import db
class Block(db.Model):
__tablename__ = "block"
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(30))
The drop all does apparently not drop the right tables. Examples I find on SO and other sources tend to feature class definitions in the same file, based on db.Model within the same file (such as here), which I cannot do. Do I need to somehow bind the imported classes to db? If so, how?
After some more searching I found the answer: instead of db = SQLAlchemy() in the script, I had to import it like in the models part: from my_app import db. Any explanation of this would be highly appreciated.
I have a database with existing tables that are not used by my Python code. I generated a migration using Flask-Migrate and ran it, and it deleted my existing tables while creating the user table. How can I run migrations without removing any existing tables?
I read the answer to the question "Preserve existing tables in database when running Flask-Migrate", but it doesn't work for me because I do not own the database, and I do not know which tables might exist at the time of deployment... Which means I cannot whitelist the tables that should be preserved.
Is there a way to tell Flask-migrate/Alembic not to drop any tables that it doesn't know about?
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from flask_script import Manager
from flask_migrate import Migrate, MigrateCommand
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = 'my_data'
db = SQLAlchemy(app)
migrate = Migrate(app, db)
manager = Manager(app)
manager.add_command('db', MigrateCommand)
class User(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(128))
if __name__ == '__main__':
manager.run()
You just need this :
# in env.py
def include_object(object, name, type_, reflected, compare_to):
if type_ == "table" and reflected and compare_to is None:
return False
else:
return True
context.configure(
# ...
include_object = include_object
See here for the documentation : https://alembic.sqlalchemy.org/en/latest/cookbook.html#don-t-generate-any-drop-table-directives-with-autogenerate
you can use a Rewriter and do an automatic check before deletion.
override the ops.DropTableOp operation.
If you want, you can also enter a provision to only drop tables that you do have control over. these will be the ones that inherit from your Base (in case of pure alembic) or db.Model (for flask).
example
from alembic.autogenerate import rewriter
from alembic.operations import ops
writer = rewriter.Rewriter()
#writer.rewrites(ops.DropTableOp)
def add_column(context, revision, op):
if op.table_name in Base.metadata.tables.keys():
return op # only return an operation when you want
return [] # we need to return an iterable
Note that you need to pass the writer object to the process_revision_directives kwarg when doing context.configure in your env.py file. (see the doc)
I have a silly question.
This my code:
from peewee import *
db = SqliteDatabase(None)
class Base(Model):
class Meta:
database = db
class Table(Base):
a_date = DateField()
url = CharField()
def __main()__
parser = argparse.ArgumentParser()
parser.add_argument('--db-dir', action='store')
args = parser.parse_args()
db_path = os.path.join(args.db_dir, 'data.db')
try:
db.init(db_path)
db.connect()
query = Table.select().order_by(Table.a_date.desc()).get()
except Exception:
sys.exit(1)
else:
print(query.url)
sys.exit(0)
if __name__ == '__main__':
main()
This code is working fine, but if the file db not exist db.connect always create it. How I can prevent this ?
Another question is , How can query table database for this field without declare the peewee Model?
Thanks
If I understand correctly peewee doc (http://docs.peewee-orm.com/en/latest/peewee/database.html), they use the api provided by python in order to connect to sqlite.
Which means you have to deal with this api (https://docs.python.org/2/library/sqlite3.html#sqlite3.connect), and the connect method always create the database beforehand.
I however believe that you can pass a custom Connection class to this method (parameter factory), you could define your behaviour in this custom class.
import os
from sqlite3 import Connection
from peewee import *
class CustomConnection(Connection):
def __init__(self, dbname, *args, **kwargs):
# Check if db already exists or not
if not os.path.exists(dbname):
raise ValueError('DB {} does not exist'.format(dbname))
super(CustomConnection, self).__init__(dbname, *args, **kwargs)
db = SqliteDatabase('mydatabase', factory=CustomConnection)
Using the following example from the documentation:
def combine_names(apps, schema_editor):
Person = apps.get_model("yourappname", "Person")
for person in Person.objects.all():
person.name = "%s %s" % (person.first_name, person.last_name)
person.save()
class Migration(migrations.Migration):
dependencies = [
('yourappname', '0001_initial'),
]
operations = [
migrations.RunPython(combine_names),
]
How would I create and run a test against this migration, confirming that the data is migrated correctly?
I was doing some google to address the same question and found an article that nailed the hammer on the nail for me and seemed less hacky than existing answers. So, putting this here in case it helps anyone else coming though.
The proposed the following subclass of Django's TestCase:
from django.apps import apps
from django.test import TestCase
from django.db.migrations.executor import MigrationExecutor
from django.db import connection
class TestMigrations(TestCase):
#property
def app(self):
return apps.get_containing_app_config(type(self).__module__).name
migrate_from = None
migrate_to = None
def setUp(self):
assert self.migrate_from and self.migrate_to, \
"TestCase '{}' must define migrate_from and migrate_to properties".format(type(self).__name__)
self.migrate_from = [(self.app, self.migrate_from)]
self.migrate_to = [(self.app, self.migrate_to)]
executor = MigrationExecutor(connection)
old_apps = executor.loader.project_state(self.migrate_from).apps
# Reverse to the original migration
executor.migrate(self.migrate_from)
self.setUpBeforeMigration(old_apps)
# Run the migration to test
executor = MigrationExecutor(connection)
executor.loader.build_graph() # reload.
executor.migrate(self.migrate_to)
self.apps = executor.loader.project_state(self.migrate_to).apps
def setUpBeforeMigration(self, apps):
pass
And an example use case that they proposed was:
class TagsTestCase(TestMigrations):
migrate_from = '0009_previous_migration'
migrate_to = '0010_migration_being_tested'
def setUpBeforeMigration(self, apps):
BlogPost = apps.get_model('blog', 'Post')
self.post_id = BlogPost.objects.create(
title = "A test post with tags",
body = "",
tags = "tag1 tag2",
).id
def test_tags_migrated(self):
BlogPost = self.apps.get_model('blog', 'Post')
post = BlogPost.objects.get(id=self.post_id)
self.assertEqual(post.tags.count(), 2)
self.assertEqual(post.tags.all()[0].name, "tag1")
self.assertEqual(post.tags.all()[1].name, "tag2")
You can use django-test-migrations package. It is suited for testing: data migrations, schema migrations, and migrations' order.
Here's how it works:
from django_test_migrations.migrator import Migrator
# You can specify any database alias you need:
migrator = Migrator(database='default')
old_state = migrator.before(('main_app', '0002_someitem_is_clean'))
SomeItem = old_state.apps.get_model('main_app', 'SomeItem')
# One instance will be `clean`, the other won't be:
SomeItem.objects.create(string_field='a')
SomeItem.objects.create(string_field='a b')
assert SomeItem.objects.count() == 2
assert SomeItem.objects.filter(is_clean=True).count() == 2
new_state = migrator.after(('main_app', '0003_auto_20191119_2125'))
SomeItem = new_state.apps.get_model('main_app', 'SomeItem')
assert SomeItem.objects.count() == 2
# One instance is clean, the other is not:
assert SomeItem.objects.filter(is_clean=True).count() == 1
assert SomeItem.objects.filter(is_clean=False).count() == 1
We also have native integrations for both pytest:
#pytest.mark.django_db
def test_main_migration0002(migrator):
"""Ensures that the second migration works."""
old_state = migrator.before(('main_app', '0002_someitem_is_clean'))
SomeItem = old_state.apps.get_model('main_app', 'SomeItem')
...
And unittest:
from django_test_migrations.contrib.unittest_case import MigratorTestCase
class TestDirectMigration(MigratorTestCase):
"""This class is used to test direct migrations."""
migrate_from = ('main_app', '0002_someitem_is_clean')
migrate_to = ('main_app', '0003_auto_20191119_2125')
def prepare(self):
"""Prepare some data before the migration."""
SomeItem = self.old_state.apps.get_model('main_app', 'SomeItem')
SomeItem.objects.create(string_field='a')
SomeItem.objects.create(string_field='a b')
def test_migration_main0003(self):
"""Run the test itself."""
SomeItem = self.new_state.apps.get_model('main_app', 'SomeItem')
assert SomeItem.objects.count() == 2
assert SomeItem.objects.filter(is_clean=True).count() == 1
Full guide: https://sobolevn.me/2019/10/testing-django-migrations
Github: https://github.com/wemake-services/django-test-migrations
PyPI: https://pypi.org/project/django-test-migrations/
EDIT:
These other answers make more sense:
https://stackoverflow.com/a/56212859
https://stackoverflow.com/a/59016744, if you don't mind the extra (dev) dependency
ORIGINAL:
Running your data-migration functions (such as combine_names from the OP's example) through some basic unit-tests, before actually applying them, makes sense to me too.
At first glance this should not be much more difficult than your normal Django unit-tests: migrations are Python modules and the migrations/ folder is a package, so it is possible to import things from them. However, it took some time to get this working.
The first difficulty arises due to the fact that the default migration file names start with a number. For example, suppose the code from the OP's (i.e. Django's) data-migration example sits in 0002_my_data_migration.py, then it is tempting to use
from yourappname.migrations.0002_my_data_migration import combine_names
but that would raise a SyntaxError because the module name starts with a number (0).
There are at least two ways to make this work:
Rename the migration file so it does not start with a number. This should be perfectly fine according to the docs: "Django just cares that each migration has a different name." Then you can just use import as above.
If you want to stick to the default numbered migration file names, you can use Python's import_module (see docs and this SO question).
The second difficulty arises from the fact that your data-migration functions are designed to be passed into RunPython (docs), so they expect two input arguments by default: apps and schema_editor. To see where these come from, you can inspect the source.
Now, I'm not sure this works for every case (please, anyone, comment if you can clarify), but for our case, it was sufficient to import apps from django.apps and get the schema_editor from the active database connection (django.db.connection).
The following is a stripped-down example showing how you can implement this for the OP example, assuming the migration file is called 0002_my_data_migration.py:
from importlib import import_module
from django.test import TestCase
from django.apps import apps
from django.db import connection
from yourappname.models import Person
# Our filename starts with a number, so we use import_module
data_migration = import_module('yourappname.migrations.0002_my_data_migration')
class DataMigrationTests(TestCase):
def __init__(self, *args, **kwargs):
super(DataMigrationTests, self).__init__(*args, **kwargs)
# Some test values
self.first_name = 'John'
self.last_name = 'Doe'
def test_combine_names(self):
# Create a dummy Person
Person.objects.create(first_name=self.first_name,
last_name=self.last_name,
name=None)
# Run the data migration function
data_migration.combine_names(apps, connection.schema_editor())
# Test the result
person = Person.objects.get(id=1)
self.assertEqual('{} {}'.format(self.first_name, self.last_name), person.name)
You could add a crude if statement to a prior migration that tests if the test suite is running, and adds initial data if it is -- that way you can just write a test to check if the objects are in the final state you want them in. Just make sure your conditional is compatible with production, here's an example that would work with python manage.py test:
import sys
if 'test in sys.argv:
# do steps to update your operations
For a more "complete" solution, this older blog post has some good info and more up-to-date comments for inspiration:
https://micknelson.wordpress.com/2013/03/01/testing-django-migrations/#comments
I have a project that I've been working on for a while, which is written in Flask, and uses SQLAlchemy with the Declarative extension http://flask.pocoo.org/docs/patterns/sqlalchemy/. I've recently decided to start unit testing my project, but for the life of me, I can't seem to figure out how to make it work.
I looked at http://flask.pocoo.org/docs/testing/, but I can't seem to make it work.
I tried a mix of things from different websites, but am unable to find something that works correctly.
class StopsTestCase(unittest.TestCase):
def setUp(self):
self.engine = create_engine('sqlite:///:memory:')
self.session = scoped_session(sessionmaker(autocommit=False,
autoflush=False,
bind=self.engine))
models.Base = declarative_base()
models.Base.query = self.session.query_property()
models.Base.metadata.create_all(bind=self.engine)
def test_empty_db(self):
stops = session.query(models.Stop).all()
assert len(stops) == 0
def tearDown(self):
session.remove()
if __name__ == '__main__':
unittest.main()
Unfortunately, the best I can seem to get, causes the following error.
OperationalError: (OperationalError) no such table: stops u'SELECT stops.agency_id AS stops_agency_id, stops.id AS stops_id, stops.name AS stops_name, stops."desc" AS stops_desc, stops.lat AS stops_lat, stops.lon AS stops_lon, stops.zone_id AS stops_zone_id \nFROM stops' ()
----------------------------------------------------------------------
Ran 1 test in 0.025s
FAILED (errors=1)
Any help on this would be greatly appreciated. If someone has ever been through this before, and made it work, I would like some pointers! Thanks in advance.
Based on what I found and how I got it working, here is a template solution that works for testing the underlying SQLAlchemy systems using the Declarative extension.**
import unittest
from database import Base
from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
import models
class StopsTestCase(unittest.TestCase):
def setUp(self):
self.engine = create_engine('sqlite:///:memory:')
self.session = scoped_session(sessionmaker(autocommit=False,
autoflush=False,
bind=self.engine))
Base.query = self.session.query_property()
Base.metadata.create_all(bind=self.engine)
#Create objects here
#These will likely involve something like the following for one of my stops
# stop1 = models.Stop(id=1, name="Stop 1")
# self.session.add(stop1)
# self.session.commit()
# But adding a stop to the database here will break the test below. Just saying.
def test_empty_db(self):
stops = self.session.query(models.Stop).all()
assert len(stops) == 0
def tearDown(self):
self.session.remove()
You're instantiating declarative_base again, you should be using the same instance you used as a base class for your models. Also, you seem to be using two different session instances, self.session and some module-global session. Try cleaning that up, too.