How to validate if two dates are the same? - python

How do I implement functionality where a user can only update an entry when the date_created and date_modified fields of the diary entry are the same?
This is what I have implemented. I have compared the date_created field of the entry model in the db to datetime.date.today().
Data model
class DiaryEntry():
def __init__(self):
self.title = ''
self.body = ''
self.date_modified = None
self.date_created = datetime.date.today()
def save(self, current_user_email):
# insert data into db
query = "INSERT INTO entries (owner_id, title, body, date_created, date_modified) \
VALUES ((SELECT user_id from users where email ='{}'), '{}', '{}', '{}','{}')" \
. format(current_user_email,
self.title,
self.body,
self.date_created,
self.date_modified
)
db.execute(query)
Method
def update_diary_entry(self,entry_id):
query = "select * from entries where entry_id='{}'".format(entry_id)
result = db.execute(query)
entry = result.fetchone()
data = request.get_json()
date_created = entry[4]
if date_created == datetime.date.today():
query = "update entries set title='{}',body='{}' where entry_id='{}'"\
.format(data['title'], data['body'], int(entry_id))
db.execute(query)
return {'message': 'diary entry updated succesfully','date':date_created}, 406
else:
return {'message': 'diary entry can only be updated on the day it was created'}, 406
I am currently getting the second return statement. What could I be doing wrong?

It looks like you have date_created as a string (str) within update_diary_entry(). That will cause an invalid comparison to a Python datetime.date object unless you parse the string into the same type:
>>> import datetime
>>> date_created = '2018-07-29'
>>> date_created == datetime.date.today()
False
>>> datetime.datetime.strptime(date_created, '%Y-%m-%d').date() == datetime.date.today()
True
The classmethod strptime() parses a string that looks like a date into a datetime.datetime object. You need to then grab just the date component from this to enable the comparison that you want. If you have a differently-formatted date-string, see strftime() and strptime() Behavior.

Related

How to get all objects with certain interval of time in django?

I am attempting to retrieve all objects with a time-to-live of less than 5 seconds using Django's ORM in Python. However, my current approach is not producing the expected results. Can you help me understand what I am doing wrong and how I can correctly retrieve these objects?
queryset.py
def ttl_expire_list(self):
query = self.filter(is_remove=False,ttl__range=[timezone.now() + timedelta(seconds=5), timezone.now()]).order_by("-ttl")
# query = self.filter(is_remove=False).order_by("-ttl")
return query'
models.py
class Notification(models.Model):
sender = models.CharField(_("Sender"), max_length=100,null=True,blank=True)
receiver = models.CharField(_("Receiver"), max_length=100,null=True,blank=True)
message = models.TextField(_("Message"),null=True,blank=True)
is_read = models.BooleanField(_("Read") ,default=False,null=True,blank=True)
ttl = models.DateTimeField(_("Time to live"),null=True,blank=True)
create_time = models.DateTimeField(_("Created Time"), default = timezone.now)
Solution
def ttl_expire_list(self):
print("curen",timezone.now())
print("imte :",timezone.now() + timedelta(seconds=5))
query = self.filter(is_remove=False,ttl__range=(timezone.now() , timezone.now()+timedelta(seconds=50))).order_by("-ttl")
return query
First argument of range should be start date and then end date just swap the arguments:
ttl__range=[timezone.now(), timezone.now() + timedelta(seconds=5)]

Bulk Insert into MySQL in Python

I currently have some code that takes the first value in the dataframe and either inserts it into MySQL, or updates it (depending if the id is already in the DB or not).
However, I need to be able to create a loop to loop through all the values in the dataframe instead of just one. But I'm not sure how to do it.
Here is my code for just one value:
class Example(db.Model):
__tablename__ = 'sessionAttendances'
_id = db.Column('_id', db.Unicode, primary_key=True)
wondeID = db.Column('wondeID', db.Unicode)
date = db.Column('date', db.Unicode)
timezoneType = db.Column('timezoneType', db.Unicode)
timezone = db.Column('timezone', db.Unicode)
createdAt = db.Column('createdAt', db.Date)
session = db.Column('session', db.Unicode)
updatedAt = db.Column('updatedAt', db.Date)
def __init__(self, _id, wondeID, date, timezoneType, timezone, createdAt, session, updatedAt):
self._id = _id
self.wondeID = wondeID
self.date = date
self.timezoneType = timezoneType
self.timezone = timezone
self.createdAt = createdAt
self.session = session
self.updatedAt = updatedAt
#classmethod
def add_or_update(cls, _id, wondeID, date, timezoneType, timezone, createdAt, session, updatedAt):
entity = cls.query.filter_by(_id=sessionAttendance._id.iloc[0]).first()
if not entity:
entity = cls(sessionAttendance._id.iloc[0], sessionAttendance.wondeID.iloc[0], sessionAttendance.date.iloc[0], sessionAttendance.timezoneType.iloc[0], sessionAttendance.timezone.iloc[0], sessionAttendance.createdAt.iloc[0], sessionAttendance.session.iloc[0], sessionAttendance.updatedAt.iloc[0])
db.session.add(entity)
db.session.commit()
print("Adding Record")
else:
entity.attendanceCode = 'late'
db.session.commit()
print("Updating Record")
return entity
example = Example(sessionAttendance._id.iloc[0], sessionAttendance.wondeID.iloc[0], sessionAttendance.date.iloc[0], sessionAttendance.timezoneType.iloc[0], sessionAttendance.timezone.iloc[0], sessionAttendance.createdAt.iloc[0], sessionAttendance.session.iloc[0], sessionAttendance.updatedAt.iloc[0])
example.add_or_update(sessionAttendance._id.iloc[0], sessionAttendance.wondeID.iloc[0], sessionAttendance.date.iloc[0], sessionAttendance.timezoneType.iloc[0], sessionAttendance.timezone.iloc[0], sessionAttendance.createdAt.iloc[0], sessionAttendance.session.iloc[0], sessionAttendance.updatedAt.iloc[0])
examples = Example.query.all()
for ex in examples:
print (ex.date)
Apparently the MySQL method for bulk upsert is rather slow so maybe looping is your best option.
Here is how you'd do it using .iterrows():
for idx,row in sessionAttendance.iterrows():
example = Example(row._id, row.wondeID, row.date, row.timezoneType,
row.timezone, row.createdAt, row.session, row.updatedAt)
example.add_or_update(row._id, row.wondeID, row.date, row.timezoneType,
row.timezone, row.createdAt, row.sessio, row.updatedAt)

I want to store datetime in peewee.DateTimeField(), I use datetime.datetime.now() in that field as default but it can't work

this code cannot work and give json serializable error
class Bank(peewee.Model): // create Bank table
bank_id = peewee.PrimaryKeyField()
bank_name = peewee.CharField()
account_no = peewee.CharField()
ifc_code = peewee.CharField()
swift_code = peewee.CharField(null = True)
modify_date = peewee.DateTimeField(default=datetime.datetime.now(),formats=['%Y-%m-%d'])/*date in yyyy-mm-dd formate*/
status = peewee.IntegerField(default = 0)
class Meta:
database = db
This answer is very incorrect - please see my answer below (#coleifer).
The default date that you are providing is not a datetime object. Rather it's a string!
modify_date = peewee.DateTimeField(default=datetime.datetime.now().strftime('%Y-%m-%d'))
type(datetime.datetime.now().strftime('%Y-%m-%d')) --> str
You can pass default current datetime object like this:
date = datetime.datetime.now().strftime('%Y-%m-%d')
need_date = datetime.strptime(date, '%Y-%m-%d')
modify_date = peewee.DateTimeField(default=need_date)
or
peewee.DateTimeField(default=datetime.datetime.now)
It looks like non-timezone aware datetimes work fine, so if you're using UTC then you can store datetime.utcnow() as that returns the current UTC date and time with tzinfo None i.e. as a "naive" datetime object.
I found this solution to store and retrieve the timezone aware field as text, however it's not ideal as the datetime object isn't being stored.
from datetime import datetime
from peewee import *
class TimestampTzField(Field):
"""
A timestamp field that supports a timezone by serializing the value
with isoformat.
"""
field_type = "TEXT"
def db_value(self, value: datetime) -> str:
if value:
return value.isoformat()
def python_value(self, value: str) -> str:
if value:
return datetime.fromisoformat(value)
https://compileandrun.com/python-peewee-timezone-aware-datetime/
If you want to store a date, use the DateField. Also, the default needs to be a callable -- in other words, leave OFF the parentheses!
class Bank(peewee.Model): // create Bank table
bank_id = peewee.PrimaryKeyField()
bank_name = peewee.CharField()
account_no = peewee.CharField()
ifc_code = peewee.CharField()
swift_code = peewee.CharField(null = True)
modify_date = peewee.DateField(default=datetime.date.today)
status = peewee.IntegerField(default = 0)
class Meta:
database = db
When it comes time to serialize this as Json, just use a custom json formatter that can handle python datetime.date objects. This is the proper way. You should always store your data using the appropriate format and worry about presentation (and serialization) in another layer.
It is very simple to extend Python's json serializer to handle unsupported types:
def convert_date(o):
if isinstance(o, datetime.date):
return o.__str__()
json.dumps(my_obj, default=convert_date)

sqlalchemy conditional multiple filters on dynamic lazy relationship

I am using sqlalchemy with the following models
class Page(db.Model):
id= ..
posts = db.relationship('Post', lazy='dynamic')
class Post(db.Model):
id=..
page_id=..
author= db.Column(db.String)
date= db.Column(db.DateTime)
in the Page class I have a method to get the page's posts for a specific date and author, it looks like that
def author_posts(author, start_date=None, end_date=None):
p= self.posts.filter(Post.author == author)
if start_date:
p.filter(Post.date >= start_date)
if end_date:
p.filter(Post.date <= end_date)
return p
The problem is, even if the function is given a start and end date, it returns post filtered by author but never by the dates argument.
What's the right way to do it?
Edit: The query generated
SELECT post.id AS post_id, post.page_id AS post_page_id, post.author AS post_author ... FROM post WHERE post.author = ?
filter() returns a new query object, but you do not store it. Replace p with the result each time:
if start_date:
p = p.filter(Post.date >= start_date)
if end_date:
p = p.filter(Post.date <= end_date)
return p

Flask-SQLalchemy update a row's information

How can I update a row's information?
For example I'd like to alter the name column of the row that has the id 5.
Retrieve an object using the tutorial shown in the Flask-SQLAlchemy documentation. Once you have the entity that you want to change, change the entity itself. Then, db.session.commit().
For example:
admin = User.query.filter_by(username='admin').first()
admin.email = 'my_new_email#example.com'
db.session.commit()
user = User.query.get(5)
user.name = 'New Name'
db.session.commit()
Flask-SQLAlchemy is based on SQLAlchemy, so be sure to check out the SQLAlchemy Docs as well.
There is a method update on BaseQuery object in SQLAlchemy, which is returned by filter_by.
num_rows_updated = User.query.filter_by(username='admin').update(dict(email='my_new_email#example.com')))
db.session.commit()
The advantage of using update over changing the entity comes when there are many objects to be updated.
If you want to give add_user permission to all the admins,
rows_changed = User.query.filter_by(role='admin').update(dict(permission='add_user'))
db.session.commit()
Notice that filter_by takes keyword arguments (use only one =) as opposed to filter which takes an expression.
This does not work if you modify a pickled attribute of the model. Pickled attributes should be replaced in order to trigger updates:
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
from pprint import pprint
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqllite:////tmp/users.db'
db = SQLAlchemy(app)
class User(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(80), unique=True)
data = db.Column(db.PickleType())
def __init__(self, name, data):
self.name = name
self.data = data
def __repr__(self):
return '<User %r>' % self.username
db.create_all()
# Create a user.
bob = User('Bob', {})
db.session.add(bob)
db.session.commit()
# Retrieve the row by its name.
bob = User.query.filter_by(name='Bob').first()
pprint(bob.data) # {}
# Modifying data is ignored.
bob.data['foo'] = 123
db.session.commit()
bob = User.query.filter_by(name='Bob').first()
pprint(bob.data) # {}
# Replacing data is respected.
bob.data = {'bar': 321}
db.session.commit()
bob = User.query.filter_by(name='Bob').first()
pprint(bob.data) # {'bar': 321}
# Modifying data is ignored.
bob.data['moo'] = 789
db.session.commit()
bob = User.query.filter_by(name='Bob').first()
pprint(bob.data) # {'bar': 321}
Just assigning the value and committing them will work for all the data types but JSON and Pickled attributes. Since pickled type is explained above I'll note down a slightly different but easy way to update JSONs.
class User(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(80), unique=True)
data = db.Column(db.JSON)
def __init__(self, name, data):
self.name = name
self.data = data
Let's say the model is like above.
user = User("Jon Dove", {"country":"Sri Lanka"})
db.session.add(user)
db.session.flush()
db.session.commit()
This will add the user into the MySQL database with data {"country":"Sri Lanka"}
Modifying data will be ignored. My code that didn't work is as follows.
user = User.query().filter(User.name=='Jon Dove')
data = user.data
data["province"] = "south"
user.data = data
db.session.merge(user)
db.session.flush()
db.session.commit()
Instead of going through the painful work of copying the JSON to a new dict (not assigning it to a new variable as above), which should have worked I found a simple way to do that. There is a way to flag the system that JSONs have changed.
Following is the working code.
from sqlalchemy.orm.attributes import flag_modified
user = User.query().filter(User.name=='Jon Dove')
data = user.data
data["province"] = "south"
user.data = data
flag_modified(user, "data")
db.session.merge(user)
db.session.flush()
db.session.commit()
This worked like a charm.
There is another method proposed along with this method here
Hope I've helped some one.
Models.py define the serializers
def default(o):
if isinstance(o, (date, datetime)):
return o.isoformat()
def get_model_columns(instance,exclude=[]):
columns=instance.__table__.columns.keys()
columns=list(set(columns)-set(exclude))
return columns
class User(db.Model):
__tablename__='user'
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
.......
####
def serializers(self):
cols = get_model_columns(self)
dict_val = {}
for c in cols:
dict_val[c] = getattr(self, c)
return json.loads(json.dumps(dict_val,default=default))
In RestApi, We can update the record dynamically by passing the json data into update query:
class UpdateUserDetails(Resource):
#auth_token_required
def post(self):
json_data = request.get_json()
user_id = current_user.id
try:
instance = User.query.filter(User.id==user_id)
data=instance.update(dict(json_data))
db.session.commit()
updateddata=instance.first()
msg={"msg":"User details updated successfully","data":updateddata.serializers()}
code=200
except Exception as e:
print(e)
msg = {"msg": "Failed to update the userdetails! please contact your administartor."}
code=500
return msg
I was looking for something a little less intrusive then #Ramesh's answer (which was good) but still dynamic. Here is a solution attaching an update method to a db.Model object.
You pass in a dictionary and it will update only the columns that you pass in.
class SampleObject(db.Model):
id = db.Column(db.BigInteger, primary_key=True)
name = db.Column(db.String(128), nullable=False)
notes = db.Column(db.Text, nullable=False)
def update(self, update_dictionary: dict):
for col_name in self.__table__.columns.keys():
if col_name in update_dictionary:
setattr(self, col_name, update_dictionary[col_name])
db.session.add(self)
db.session.commit()
Then in a route you can do
object = SampleObject.query.where(SampleObject.id == id).first()
object.update(update_dictionary=request.get_json())
Update the Columns in flask
admin = User.query.filter_by(username='admin').first()
admin.email = 'my_new_email#example.com'
admin.save()
To use the update method (which updates the entree outside of the session) you have to query the object in steps like this:
query = db.session.query(UserModel)
query = query.filter(UserModel.id == user_id)
query.update(user_dumped)
db.session.commit()

Categories

Resources