I have a function for filling in many fields automatically with an api.onchange() decorator.
#api.onchange('nursery_morning', 'nursery_evening', 'responsible_partner')
def retrieve_responsible_nursery(self):
if self.nursery_morning or self.nursery_evening:
if self.responsible_partner:
if not self.resp_civility1 and not self.resp_name1 and not self.resp_cp1 and not self.resp_num1 \
and not self.resp_address1 and not self.resp_town1 \
and not self.resp_phone1 and not self.resp_phonemobile1:
self.resp_civility1 = self.responsible_partner.title.name
self.resp_name1 = self.responsible_partner.name
self.resp_cp1 = self.responsible_partner.zip_id.name
self.resp_num1 = self.responsible_partner.street_number_id.name
self.resp_address1 = self.responsible_partner.street_id.name
self.resp_town1 = self.responsible_partner.city_id.name
self.resp_phone1 = self.responsible_partner.phone
self.resp_phonemobile1 = self.responsible_partner.mobile
This function works, but I do not want the fields to change until the fields are saved in the database and not before.
Currently, the fields do not change once one of the fields listed are filled but not saved in database
Two ways you can approach this, it seems that you could change these fields to be related fields if they are only ever going to hold the values from a related record (responsible_partner).
resp_civility1 = fields.Char("Field Label", related="responsible_partner.title.name")
Editing this field will edit the linked record as well, so it will not be a good fit if you want to change this value at all, or you could set it as read-only if you don't want it to be edited from here.
Another way to do it would be to override the write method, this will write the values at the time of saving the record and would look like this:
#api.multi
def write(self, vals):
res = super(ModelName, self).write(vals)
if vals.get('nursery_morning') or vals.get('nursery_evening') or vals.get('responsible_partner'):
if res.nursery_morning or res.nursery_evening:
if res.responsible_partner:
if not res.resp_civility1 and not res.resp_name1 and not res.resp_cp1 and not res.resp_num1 \
and not res.resp_address1 and not res.resp_town1 \
and not res.resp_phone1 and not res.resp_phonemobile1:
res.resp_civility1 = res.responsible_partner.title.name
res.resp_name1 = res.responsible_partner.name
res.resp_cp1 = res.responsible_partner.zip_id.name
res.resp_num1 = res.responsible_partner.street_number_id.name
res.resp_address1 = res.responsible_partner.street_id.name
res.resp_town1 = res.responsible_partner.city_id.name
res.resp_phone1 = res.responsible_partner.phone
res.resp_phonemobile1 = res.responsible_partner.mobile
return res
This will first call the write method of the model, then it will check if the three fields are being written to, if so it will run the same logic from your onchange method. Replace ModelName with whatever you have named your model class.
Related
Can anyone help please with the below:
I'm trying to divide two float fields but I'm getting an error
AttributeError: 'stock.picking' object has no attribute 'calculate_reserved_per'
here's my code:
from odoo import models, fields, api
class PickingTotalQty(models.Model):
_inherit = 'stock.picking'
sum_dmd_qty = fields.Float(compute='calculate_dmd_qty', string='Total Demand Quantity')
sum_reserved_qty = fields.Float(compute='calculate_reserved_qty', string='Total Reserved Quantity')
reserved_qty_per = fields.Float(compute='calculate_reserved_per', string='Reserved Quantity (%)')
def calculate_dmd_qty(self):
for rs in self:
dmdqty = 0
for line in rs.move_lines:
dmdqty += line.product_uom_qty
rs.sum_dmd_qty = dmdqty
def calculate_reserved_qty(self):
for rs in self:
reservedqty = 0
for line in rs.move_lines:
reservedqty += line.reserved_availability
rs.sum_reserved_qty = reservedqty
#api.depends('sum_reserved_qty', 'sum_dmd_qty')
def calculate_reserved_per(self):
for rec in self:
rec.reserved_qty_per = rec.sum_reserved_qty / rec.sum_dmd_qty
If you have already restarted the server check the indentation of the calculate_reserved_per method and make sure rec.sum_dmd_qty is different from Zero before dividing.
The strange behavior you are facing is the result of the indentation of the last line in both methods: calculate_dmd_qty and calculate_reserved_qty which ignore all records except the last one. In the form view, the method is called with a recordset containing one record, the current record.
To correct that, move the last line of each method inside the for-loop.
sum_dmd_qty and sum_reserved_qty are computed fields that depends on move_lines.product_uom_qty and move_lines.reserved_availability respectively and it should be used to specify when to recompute theire values using #api.depends.
You can use the accounting invoice _compute_amount as a reference and merge the three methods in one method that compute the value of the three fields.
Example:
#api.depends("move_lines.product_uom_qty", "move_lines.reserved_availability")
def _calculate_quantities(self):
for picking in self:
dmd_qty = 0
reserved_qty = 0
for move in picking.move_lines:
dmd_qty += move.product_uom_qty
reserved_qty += move.reserved_availability
picking.sum_dmd_qty = dmd_qty
picking.sum_reserved_qty = reserved_qty
if picking.sum_dmd_qty:
picking.reserved_qty_per = picking.sum_reserved_qty / picking.sum_dmd_qty
I did not use #api.one because it is not available in Odoo13
I'm trying to insert rows into a table after changing its schema in Cassandra with the CQLEngine python library. Before the change, the model looked like:
class MetricsByDevice(Model):
device = columns.Text(primary_key=True, partition_key=True)
datetime = columns.DateTime(primary_key=True, clustering_order="DESC")
load_power = columns.Double()
inverter_power = columns.Double()
I've changed the schema to this, adding four columns (DSO, node, park and commercializer):
class MetricsByDevice(Model):
device = columns.Text(primary_key=True, partition_key=True)
datetime = columns.DateTime(primary_key=True, clustering_order="DESC")
DSO = columns.Text(index=True, default='DSO_1'),
node = columns.Text(index=True, default='Node_1'),
park = columns.Integer(index=True, default=6),
commercializer = columns.Text(index=True, default='Commercializer_1'),
load_power = columns.Double()
inverter_power = columns.Double()
Then, I've synced the table with a script containing the line
sync_table(MetricsByDate)
I've checked the database and the four columns have been created. The existing rows has these fields with value NULL (as expected).
Then I've modified the script in charge of inserting in batch rows including the values corresponding to the new fields. It looks like:
batch = BatchQuery()
for idx, message in enumerate(consumer):
data = message.value
ts_to_insert = dateutil.parser.parse(data['timestamp'])
filters = get_filters(message.partition_key)
MetricsByDate.batch(batch).create(
device=device,
date=str(ts_to_insert.date()),
time=str(ts_to_insert.time()),
created_at=now,
DSO=str(filters['DSO']),
node=str(filters['node']),
park=int(filters['park']),
commercializer=str(filters['commercializer']),
load_power=data['loadPower'],
inverter_power=data['inverterPower'],
)
if idx % 100 == 0: # Insert every 100 messages
batch.execute()
# Reset batch
batch = BatchQuery()
I've already checked that the values corresponding to the new fields aren't None and have the correct type. Nevertheless, it's inserting all the row correctly but the values in the new fields, that are NULL in Cassandra.
The batch insertion does not return any errors. I don't know if I'm missing something, or if I need to do an extra step to update the schema. I've been looking in the docs, but I can't find anything that helps.
Is there anything I'm doing wrong?
EDIT
After Alex Ott suggestion, I've inserted the lines one by one. Changing the code to:
for idx, message in enumerate(consumer):
data = message.value
ts_to_insert = dateutil.parser.parse(data['timestamp'])
filters = get_filters(message.partition_key)
metrics_by_date = MetricsByDate(
device=device,
date=str(ts_to_insert.date()),
time=str(ts_to_insert.time()),
created_at=now,
DSO=str(filters['DSO']),
node=str(filters['node']),
park=int(filters['park']),
commercializer=str(filters['commercializer']),
load_power=data['loadPower'],
inverter_power=data['inverterPower'],
)
metrics_by_date.save()
If before executing the line metrics_by_date.save() I add these print statements:
print(metrics_by_date.DSO)
print(metrics_by_date.park)
print(metrics_by_date.load_power)
print(metrics_by_date.device)
print(metrics_by_date.date)
The output is:
(<cassandra.cqlengine.columns.Text object at 0x7ff0b492a670>,)
(<cassandra.cqlengine.columns.Integer object at 0x7ff0b492d190>,)
256.99
SQ3-3.2.3.1-70-17444
2020-04-22
In the fields that are new I'm getting a cassandra object, but in the others I get their values. It maybe is a clue, because it continues to insert NULL in the new column.
Finally I got It.
It was something stupid, in the model definition, for not knwon reasons, I've added commas to separate fields instead of linebreaks...
So correcting the model definition to:
class MetricsByDevice(Model):
device = columns.Text(primary_key=True, partition_key=True)
datetime = columns.DateTime(primary_key=True, clustering_order="DESC")
DSO = columns.Text(index=True, default='DSO_1')
node = columns.Text(index=True, default='Node_1')
park = columns.Integer(index=True, default=6)
commercializer = columns.Text(index=True, default='Commercializer_1')
load_power = columns.Double()
inverter_power = columns.Double()
It works!!
I'm trying to update a database using API-gathered data, and I need to make sure all tables are being updated.
Sometime I will receive data that's already in the database, so I want to do an INSERT IGNORE.
My current code is something like this:
def update_orders(new_orders):
entries = []
for each_order in new_orders:
shipping_id = each_order['id']
title = each_order['title']
price = each_order['price']
code = each_order['code']
source = each_order['source']
phone = each_order['phone']
category = each_order['delivery_category']
carrier = each_order['carrier_identifier']
new_entry = Orders(
id=shipping_id,
title=title,
code=code,
source=source,
phone=phone,
category=category,
carrier=carrier,
price=price
)
entries.append(new_entry)
if len(entries) == 0:
print('No new orders.')
break
else:
print('New orders:', len(entries))
db.session.add_all(entries)
db.session.commit()
This works well when I'm creating the database from scratch, but it will give me an error if there's duplicate data, and I'm not able to commit the inserts.
I've been reading for a while, and found a workaround that uses prefix_with:
print('New orders:', len(entries))
if len(entries) == 0:
print('No new orders.')
else:
insert_command = Orders.__table__.insert().prefix_with('OR IGNORE').values(entries)
db.session.execute(insert_command)
db.session.commit()
The problem is that values(entries) is a bunch of objects:
<shop.database.models.Orders object at 0x11986def0> instead of being the instance of the class, is the class instance object in memory.
Anybody has any suggestion on approaching this problem?
Feel free to suggest a different approach, or just an adjustment.
Thanks a lot.
What database are you using ? Under MySQL, "INSERT OR IGNORE" is not valid syntax, instead one should use "INSERT IGNORE". I had the same situation and got my query to work with the following:
insert_command = Orders.__table__.insert().prefix_with(' IGNORE').values(entries)
I am attempting to query all rows for a column called show_id. I would then like to compare each potential item to be added to the DB with the results. Now the simplest way I can think of doing that is by checking if each show is in the results. If so pass etc. However the results from the below snippet are returned as objects. So this check fails.
Is there a better way to create the query to achieve this?
shows_inDB = Show.query.filter(Show.show_id).all()
print(shows_inDB)
Results:
<app.models.user.Show object at 0x10c2c5fd0>,
<app.models.user.Show object at 0x10c2da080>,
<app.models.user.Show object at 0x10c2da0f0>
Code for the entire function:
def save_changes_show(show_details):
"""
Save the changes to the database
"""
try:
shows_inDB = Show.query.filter(Show.show_id).all()
print(shows_inDB)
for show in show_details:
#Check the show isnt already in the DB
if show['id'] in shows_inDB:
print(str(show['id']) + ' Already Present')
else:
#Add show to DB
tv_show = Show(
show_id = show['id'],
seriesName = str(show['seriesName']).encode(),
aliases = str(show['aliases']).encode(),
banner = str(show['banner']).encode(),
seriesId = str(show['seriesId']).encode(),
status = str(show['status']).encode(),
firstAired = str(show['firstAired']).encode(),
network = str(show['network']).encode(),
networkId = str(show['networkId']).encode(),
runtime = str(show['runtime']).encode(),
genre = str(show['genre']).encode(),
overview = str(show['overview']).encode(),
lastUpdated = str(show['lastUpdated']).encode(),
airsDayOfWeek = str(show['airsDayOfWeek']).encode(),
airsTime = str(show['airsTime']).encode(),
rating = str(show['rating']).encode(),
imdbId = str(show['imdbId']).encode(),
zap2itId = str(show['zap2itId']).encode(),
added = str(show['added']).encode(),
addedBy = str(show['addedBy']).encode(),
siteRating = str(show['siteRating']).encode(),
siteRatingCount = str(show['siteRatingCount']).encode(),
slug = str(show['slug']).encode()
)
db.session.add(tv_show)
db.session.commit()
except Exception:
print(traceback.print_exc())
I have decided to use the method above and extract the data I wanted into a list, comparing each show to the list.
show_compare = []
shows_inDB = Show.query.filter().all()
for item in shows_inDB:
show_compare.append(item.show_id)
for show in show_details:
#Check the show isnt already in the DB
if show['id'] in show_compare:
print(str(show['id']) + ' Already Present')
else:
#Add show to DB
For querying a specific column value, have a look at this question: Flask SQLAlchemy query, specify column names. This is the example code given in the top answer there:
result = SomeModel.query.with_entities(SomeModel.col1, SomeModel.col2)
The crux of your problem is that you want to create a new Show instance if that show doesn't already exist in the database.
Querying the database for all shows and looping through the result for each potential new show might become very inefficient if you end up with a lot of shows in the database, and finding an object by identity is what an RDBMS does best!
This function will check to see if an object exists, and create it if not. Inspired by this answer:
def add_if_not_exists(model, **kwargs):
if not model.query.filter_by(**kwargs).first():
instance = model(**kwargs)
db.session.add(instance)
So your example would look like:
def add_if_not_exists(model, **kwargs):
if not model.query.filter_by(**kwargs).first():
instance = model(**kwargs)
db.session.add(instance)
for show in show_details:
add_if_not_exists(Show, id=show['id'])
If you really want to query all shows upfront, instead of putting all of the id's into a list, you could use a set instead of a list which will speed up your inclusion test.
E.g:
show_compare = {item.show_id for item in Show.query.all()}
for show in show_details:
# ... same as your code
Given PostgreSQL 9.2.10, Django 1.8, python 2.7.5, the following model:
class soapProdAPI(models.Model):
soap_id = models.PositiveIntegerField(primary_key=True)
soap_host = models.CharField(max_length=20)
soap_ip = models.GenericIPAddressField(default='0.0.0.0')
soap_asset = models.CharField(max_length=20)
soap_state = models.CharField(max_length=20)
And the following code:
tableProdSoap = soapProdQuery()
#periodic_task(run_every=timedelta(minutes=2))
def saveSoapProd():
tableProdSoap = soapProdQuery()
if tableProdSoap != None:
for item in tableProdSoap:
commit = soapProdAPI(soap_id=item[0], soap_host=item[1], soap_asset=item[2], soap_state=item[3])
commit.save()
saveSoapNullIP()
To answer Josué Padilla's question:
#task
def saveSoapNullIP():
missingIP = soapProdAPI.objects.filter(soap_ip='0.0.0.0')
if missingIP:
for record in missingIP:
if str(record.soap_host).lower().startswith('1a'):
fqdn = str(record.soap_host) + 'stringvaluehere'
elif str(record.soap_host).lower().startswith('1b'):
fqdn = str(record.soap_host) + 'stringvaluehere'
elif str(record.soap_host).lower().startswith('1c'):
fqdn = str(record.soap_host) + 'stringvaluehere'
else:
fqdn = str(record.soap_host) + 'stringvaluehere'
try:
hostIp = check_output('host %s' % fqdn, shell=True)
hostIp = hostIp.split()[-1]
except:
hostIp = '0.0.0.0'
record.soap_ip = hostIp
record.save(update_fields=['soap_ip'])
My soapProdQuery only returns these 4 fields where there is a 5th field in the model (soap_ip). I know it is probably not the best way to do it but I have a separate block of code that queries the db for None values in soap_ip runs a subprocess host on them and saves it back with the ip address (The number of rows returned/updated should get smaller each pass through, as opposed to putting the logic for doing a host lookup into the request/this celery task itself which would run every API request. I have tried this already, it takes FOREVER to return the completed data.). The soap API I query does not provide the IP or I would grab it that way obviously. This all runs as background tasks using celery to make it invisible/seamless to the web user.
The issue I run into is that every time the saveSoapProd() runs it overwrites the previous soap_ip field with '0.0.0.0' thus negating the work of my other function. The other issue is that I cannot force_insert or force_update as I need both functionalities with this. My question is this: is there a way to selectively update/insert at the same time and completely exclude doing anything to the soap_ip each time saveSoapProd() runs? Any and all help is greatly appreciated. Thank you in advance.
** EDIT 1 **
I may or may not have found a solution in update_or_create or get_or_create, however I am unsure on the exact usage. The docs have me slightly confused.
** EDIT 2 **
I guess get_or_create is a bust. Works first pass through but every save after that fails with this:
Traceback (most recent call last):
File "<console>", line 1, in <module>
File "<console>", line 8, in saveSoapProd
File "/lib/python2.7/site-packages/django/db/models/base.py", line 690, in save
% ', '.join(non_model_fields))
ValueError: The following fields do not exist in this model or are m2m fields: soap_id
Here is the code:
#periodic_task(run_every=timedelta(minutes=2))
def saveSoapProd():
tableProdSoap = soapProdQuery()
if tableProdSoap != None:
for item in tableProdSoap:
obj, created = soapProdAPI.objects.get_or_create(soap_id=item[0], defaults={'soap_host': item[1], 'soap_asset': item[2], 'soap_state': item[3]})
if created == False:
commit = soapProdAPI(soap_id=item[0], soap_host=item[1], soap_asset=item[2], soap_state=item[3])
commit.save(update_fields=['soap_id', 'soap_host', 'soap_asset', 'soap_state'])
I will be honest, I am not entirely sure what is causing this error.
** EDIT 3/CURRENT SOLUTION **
I was able to resolve my own issue by modifying my model and my task function. The solution uses get_or_create, but you could easily extrapolate how to use update_or_create from the solution provided. See the selected answer below for a coded example.
** TLDR **
I want to do a .save() where it may need to do a insert for new records or update for changed records WITHOUT touching the soap_ip field (no insert_only or update_only).
I don't know if you already knew this, but you can override the save() function of your model.
class soapProdAPI(models.Model):
soap_id = models.PositiveIntegerField(primary_key=True)
soap_host = models.CharField(max_length=20)
soap_ip = models.GenericIPAddressField(default='0.0.0.0')
soap_asset = models.CharField(max_length=20)
soap_state = models.CharField(max_length=20)
# Override save
def save(self, *args, **kwargs):
if self.soap_ip != '0.0.0.0':
self.soap_ip = your_ip # Here you can get your old IP an save that instead of 0.0.0.0
EDIT
You are getting
ValueError: The following fields do not exist in this model or are m2m fields: soap_id
Because you are trying to update soap_id, that field is defined as your model's primary key, so it is immutable when updating. That's why it crashes when you do:
commit.save(update_fields=['soap_id', 'soap_host', 'soap_asset', 'soap_state'])
Try removing soap_id from update_fields.
Solved my own issue without modifying the save method by making the following changes to my model:
class soapProdAPI(models.Model):
soap_id = models.PositiveIntegerField(unique=True, null=False)
soap_host = models.CharField(max_length=20)
soap_ip = models.GenericIPAddressField(default='0.0.0.0')
soap_asset = models.CharField(max_length=20)
soap_state = models.CharField(max_length=20)
and my task:
def saveSoapProd():
tableProdSoap = soapProdQuery()
if tableProdSoap != None:
for item in tableProdSoap:
try:
obj, created = soapProdAPI.objects.get_or_create(soap_id=item[0], defaults={'soap_host': item[1], 'soap_asset': item[2], 'soap_state': item[3]})
if created == False:
obj.soap_host = item[1]
obj.soap_asset = item[2]
obj.soap_state = item[3]
obj.save(update_fields=['soap_host', 'soap_asset', 'soap_state'])
except:
continue
saveSoapMissingIP()
EDIT
Just noticed Josué Padilla's response, which was in fact part of my problem that I solved with this answer. Thank you to Josué for all of your help.