flask sql alchemy model (autoloaded) not getting all columns in table - python

I am building a flask application over an already existing database so there was no need to declare the whole models fully. I have this table:
class Users(db.Model):
__tablename__ = 'my_users'
__table_args__ = {
'autoload': True,
'autoload_with': db.engine
}
the table has about 10 columns but when i query the data i can see that the attribute:
.__dict__
only returns the first 4 columns. i have tried using filter and also filter by but data returned only contains the first 4 columns. Here is my query:
users = Users.query.filter(
section_serial == sectionserial
).all()
I am using the postgres database. Here is a minimal example:
CREATE TABLE public.my_users
(
user_serial integer NOT NULL DEFAULT nextval('my_users_seq'::regclass),
user_name character varying(16) NOT NULL,
user_password character varying(42) NOT NULL,
id_number character varying(155) NOT NULL,
date_added timestamp without time zone NOT NULL DEFAULT now(),
is_enabled boolean NOT NULL DEFAULT true,
expiry_date date NOT NULL DEFAULT (('now'::text)::date + 30),
phone_number character varying(254),
notes text,
section_serial integer,
full_name character varying(155) NOT NULL,
zip_code boolean NOT NULL DEFAULT false,
CONSTRAINT user_serial_pkey PRIMARY KEY (user_serial)
);
After querying the data i only get user_serial, user_name, user_password and id_number. I cannot get the rest of the columns

The problem was it was conflicting with a login model i had created though with a different name. I think models should just be declared once.
class SystemUsers(db.Model):
__tablename__ = 'my_users'
userserial = db.Column(
'user_serial', db.Integer, primary_key=True)
username = db.Column('user_name ', db.String)
password= db.Column('user_password ', db.String)
idnumber = db.Column('id_number', db.String)
isactive = True
isanonymous = False
authenticated = False

Related

Django allows model object with empty fields to be saved

Below is the model named 'Dataset' containing three fields name, desc and data_file.
class Dataset(models.Model):
name = models.CharField(max_length=256)
desc = models.TextField()
data_file = models.FileField(upload_to='datasets/')
I created a model object with python command.
>>> d = Dataset()
>>> d.save()
>>> d.name, d.desc, d.data_file
('', '', <FieldFile: None>)
Django allowed this object to be saved. Even when blank = False is the default for every field.
How can I may sure that dataset objects cannot be created with these three fields empty ?
Below is the sqlite3 schema:
CREATE TABLE IF NOT EXISTS "datasets_dataset"(
"id" integer NOT NULL PRIMARY KEY AUTOINCREMENT,
"name" varchar(256) NOT NULL,
"data_file" varchar(100) NOT NULL,
"desc" text NOT NULL
);

How to properly use SQLAlchemy's '#aggregated' class attribute decorator

I'm trying to use SQLAlchemy's #aggregated decorator to define an attribute ('gross_amount)' for a class, Receipt. This gross_amount attribute is the sum of the Item.gross_amount for all Item instances associated with the Receipt instance by a foreign id.
I.E., a receipt is made up of items, and I want to define a receipt 'gross_amount' value which is just the total $ of all of the items on the receipt.
I've modeled my code after this document http://sqlalchemy-utils.readthedocs.io/en/latest/aggregates.html
So it looks like this...
from sqlalchemy import Column, Integer, ForeignKey
from sqlalchemy.sql import func
from sqlalchemy import orm
class Receipt(Base):
__tablename__ = "receipts"
__table_args__ = {'extend_existing': True}
id = Column(Integer, index = True, primary_key = True, nullable = False)
#aggregated('itemz', Column(Integer))
def gross_amount(self):
return func.sum(Item.gross_amount)
itemz = orm.relationship(
'Item',
backref='receipts'
)
class Item(Base):
__tablename__ = "items"
id = Column(Integer, index = True, primary_key = True, nullable = False)
'''
FE relevant
'''
gross_amount = Column(Integer)
receipt_id = Column(Integer, ForeignKey("receipts.id"), nullable=False)
In my migration, am I supposed to have a column in the receipts table for gross_amount?
1) When I DO define this column in the receipts table, any Receipt.gross_amount for any instance just points to the gross_amount values defined in the receipts table.
2) When I DO NOT define this column in the receipts table, I get a SQLAlchemy error whenever I execute a SELECT against the database:
ProgrammingError: (psycopg2.ProgrammingError) column receipts.gross_amount does not exist
FWIW, my SQLAlchemy package is the latest distributed thru PIP...
SQLAlchemy==1.1.11
SQLAlchemy-Utils==0.32.14
And my local db on which I'm running this for now is PostgreSQL 9.6.2
What am I doing wrong here? Any patient help would be greatly appreciated!
Yes, you do need to add the column to table:
CREATE TABLE receipts (
id INTEGER NOT NULL,
gross_amount INTEGER, -- <<< See, it's here :)
PRIMARY KEY (id)
);
INSERT INTO receipts VALUES(1,7);
INSERT INTO receipts VALUES(2,7);
CREATE TABLE items (
id INTEGER NOT NULL,
gross_amount INTEGER,
receipt_id INTEGER NOT NULL,
PRIMARY KEY (id),
FOREIGN KEY(receipt_id) REFERENCES receipts (id)
);
Tested with this self-contained snippet:
from sqlalchemy import Column, Integer, ForeignKey, create_engine, orm
from sqlalchemy.orm import sessionmaker
from sqlalchemy.sql import func
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy_utils import aggregated
Base = declarative_base()
class Receipt(Base):
__tablename__ = "receipts"
__table_args__ = {'extend_existing': True}
id = Column(Integer, index = True, primary_key = True, nullable = False)
#aggregated('itemz', Column(Integer))
def gross_amount(self):
return func.sum(Item.gross_amount)
itemz = orm.relationship('Item', backref='receipts')
class Item(Base):
__tablename__ = "items"
id = Column(Integer, index = True, primary_key = True, nullable = False)
gross_amount = Column(Integer)
receipt_id = Column(Integer, ForeignKey("receipts.id"), nullable=False)
def __init__(self, amount):
self.gross_amount=amount
engine = create_engine('sqlite:///xxx.db', echo=True)
Base.metadata.create_all(engine)
session = sessionmaker(bind=engine)()
receipt = Receipt()
receipt.itemz.append(Item(5))
receipt.itemz.append(Item(2))
session.add(receipt)
session.commit()
print (receipt.gross_amount)
Of course, there's also another approach called hybrid_property, which basically allows you to do both orm- and database level queries without adding extra column do your database:
#hybrid_property
def gross_sum(self):
return sum(i.gross_amount for i in self.itemz)
#gross_sum.expression
def gross_sum(cls):
return select([func.sum(Item.gross_amount)]).\
where(Item.receipt_id==cls.id).\
label('gross_sum')
The reason you're getting this error is because the new column you're adding (gross_amount) has not been created in the receipts table in the database.
Meaning, your current database table only has one created column (id). For the aggregated column to work, it needs to contain an additional column called gross_amount.
This additional column has to allow null values.
One way to go about doing that is through SQL directly in PostgreSQL:
ALTER TABLE receipts ADD gross_amount int;
Alternatively, if there's no data yet, you can drop and recreate the table via SQLAlchemy. It should create this extra column automatically.
I'm not sure what you mean by the last part:
When I DO define this column in the receipts table, any
Receipt.gross_amount for any instance just points to the gross_amount
values defined in the receipts table.
That's where it's supposed to point. I'm not sure what you mean by that. Do you mean that it doesn't contain any values, even though there are values for this receipt's items in Item? If so, I would double check that this is the case (and per their examples here, refresh the database session before seeing the results).

Django ORM return None for not null columns

I have following django model
class CalculatedResults(models.Model):
id = models.IntegerField(primary_key=True)
tmstamp = models.DateTimeField()
latitude = models.FloatField()
longitude = models.FloatField()
magnitude = models.FloatField()
origin_time = models.DateTimeField()
actioncode = models.IntegerField(db_column='actionCode', blank=True, null=True) # Field name made lowercase.
class Meta:
managed = False
db_table = 'calculated_results'
verbose_name = 'Calculated Result'
verbose_name_plural = 'Calculated Results'
and relevant mysql table is :
CREATE TABLE IF NOT EXISTS `calculated_results` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`tmstamp` timestamp(5) NOT NULL DEFAULT CURRENT_TIMESTAMP,
`latitude` double NOT NULL,
`longitude` double NOT NULL,
`magnitude` double NOT NULL DEFAULT '0',
`origin_time` timestamp(5) NOT NULL DEFAULT '0000-00-00 00:00:00.00000',
`actionCode` int(11) DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `tmstamp_index` (`tmstamp`),
KEY `origin_time_index` (`origin_time`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin AUTO_INCREMENT=814 ;
When I try to retrieve data from the table using python manage.py shell using following code,
form eew_main.models import CalculatedResults
a = CalculatedResults.objects.all().order_by('-id')[:1]
I get None for tmstamp and origin_time but as far as I have investigated the issue, following sql query is sent to mysql
SELECT `calculated_results`.`id` , `calculated_results`.`tmstamp` , `calculated_results`.`latitude` , `calculated_results`.`longitude` , `calculated_results`.`magnitude` , `calculated_results`.`origin_time` , `calculated_results`.`actionCode`
FROM `calculated_results`
ORDER BY `calculated_results`.`id` DESC
LIMIT 1
When I try it on mysql with same mysql user that django uses I got all the values.
So please help me to somehow find a way to investigate and resolve this issue.
Thanks!
Seems your tables are created outside of Django. Django's models.DateTimeField will synthesize into a datetime field in MySQL not timestamp.
Check this answer for an implementation similar to DateTimeField that uses timestamp at SQL level.

How do I insert the database's default for a column in SQLAlchemy?

I'm trying to insert a row into a Postgresql table that looks like this:
CREATE TABLE launch_ids(
id SERIAL PRIMARY KEY,
launch_time TIMESTAMP WITHOUT TIME ZONE NOT NULL DEFAULT
(now() at time zone 'utc')
);
My class looks like this:
class LaunchId(Base):
"""Launch ID table for runs"""
__tablename__ = 'launch_ids'
id = Column(Integer, primary_key=True)
launch_time = Column(DateTime)
The launch_time should be managed by the database. I know it's possible to use default=datetime.datetime.utcnow(), but that uses the current time on the client. I know it's possible to use default=func.now(), but that means that if the database's definition of the default changes, then I need to change the default in two places.
Here is what I get when I try to insert a row in launch_ids without specifying a value:
l = LaunchId()
session.add(l)
session.commit()
IntegrityError: (psycopg2.IntegrityError) null value in column "launch_time" violates not-null constraint
DETAIL: Failing row contains (1, null).
[SQL: 'INSERT INTO launch_ids (launch_time) VALUES (%(launch_time)s) RETURNING launch_ids.id'] [parameters: {'launch_time': None}]
Use FetchedValue:
from sqlalchemy.schema import FetchedValue
class LaunchId(Base):
...
launch_time = Column(DateTime, FetchedValue())
Specify the server_default on the column like this:
class LaunchId(Base):
"""Launch ID table for runs"""
__tablename__ = 'launch_ids'
id = Column(Integer, primary_key=True)
launch_time = Column(DateTime, nullable=False
server_default=text("(now() at time zone 'utc')"))
Then adding a new launch_id through the session will work. server_default works differently from default in that it is generated on the server side. Official SQLAlchemy documentation: http://docs.sqlalchemy.org/en/latest/core/defaults.html#server-side-defaults
By specifying nullable=False, this model also becomes a true reflection of the CREATE TABLE you specified, and thus can be generated through Base.metadata.create_all or using alembic.

Set Column default value to NULL

I have a MySQL table as follows:
create table USER
(
USERID int not null auto_increment,
USERAVATAR varchar(1024) default NULL,
primary key (USERID)
);
I have created an entry in the table where USERID = 1 and USERAVATAR = NULL.
In Main.py
user_list = session.query(USER).all()
return jsonify(users=[r.serialize() for r in user_list])
sqltables.py
class USER(Base):
__tablename__ = 'USER'
USERID = Column(Integer, primary_key=True)
USERAVATAR = Column(String(1024))
def serialize(self):
return unicode({
'id': self.USERID,
'userAvatar': self.USERAVATAR
})
The issue is that even though USERAVATAR has been set to NULL in the database, I'm getting None as my `JSON output.
{
"users": [
"{'userAvatar': None, 'id': 1}"
]
}
Would anyone know what might be the problem here?
Your serialize function is casting into a string. Is that what you want in your JSON output, an array of strings instead of an array of objects?
If not, change your serialize function to not use unicode()

Categories

Resources