Related
Error:
Odoo Server Error
Traceback (most recent call last):
File "F:\odoo-14.0\odoo-14.0\odoo\tools\convert.py", line 677, in _tag_root
f(rec)
File "F:\odoo-14.0\odoo-14.0\odoo\tools\convert.py", line 655, in _tag_template
return self._tag_record(record)
File "F:\odoo-14.0\odoo-14.0\odoo\tools\convert.py", line 580, in _tag_record
record = model._load_records([data], self.mode == 'update')
File "F:\odoo-14.0\odoo-14.0\odoo\models.py", line 4213, in _load_records
records = self._load_records_create([data['values'] for data in to_create])
File "F:\odoo-14.0\odoo-14.0\odoo\models.py", line 4142, in _load_records_create
return self.create(values)
File "<decorator-gen-43>", line 2, in create
File "F:\odoo-14.0\odoo-14.0\odoo\api.py", line 345, in _model_create_multi
return create(self, arg)
File "F:\odoo-14.0\odoo-14.0\odoo\addons\base\models\ir_ui_view.py", line 482, in create
return super(View, self).create(vals_list)
File "<decorator-gen-65>", line 2, in create
File "F:\odoo-14.0\odoo-14.0\odoo\api.py", line 345, in _model_create_multi
return create(self, arg)
File "F:\odoo-14.0\odoo-14.0\odoo\addons\base\models\ir_fields.py", line 534, in create
recs = super().create(vals_list)
File "<decorator-gen-13>", line 2, in create
File "F:\odoo-14.0\odoo-14.0\odoo\api.py", line 345, in _model_create_multi
return create(self, arg)
File "F:\odoo-14.0\odoo-14.0\odoo\models.py", line 3903, in create
fields[0].determine_inverse(batch_recs)
File "F:\odoo-14.0\odoo-14.0\odoo\fields.py", line 1185, in determine_inverse
getattr(records, self.inverse)()
File "F:\odoo-14.0\odoo-14.0\odoo\addons\base\models\ir_ui_view.py", line 300, in _inverse_arch
view.write(data)
File "F:\odoo-14.0\odoo-14.0\odoo\addons\base\models\ir_ui_view.py", line 500, in write
res = super(View, self).write(self._compute_defaults(vals))
File "F:\odoo-14.0\odoo-14.0\odoo\models.py", line 3687, in write
real_recs._validate_fields(vals, inverse_fields)
File "F:\odoo-14.0\odoo-14.0\odoo\models.py", line 1266, in _validate_fields
check(self)
File "F:\odoo-14.0\odoo-14.0\odoo\addons\base\models\ir_ui_view.py", line 411, in _check_xml
)).with_traceback(e.__traceback__) from None
File "F:\odoo-14.0\odoo-14.0\odoo\addons\base\models\ir_ui_view.py", line 385, in _check_xml
view_def = view.read_combined(['arch'])
File "F:\odoo-14.0\odoo-14.0\odoo\addons\base\models\ir_ui_view.py", line 809, in read_combined
arch = root.apply_view_inheritance(arch_tree, self.model)
File "F:\odoo-14.0\odoo-14.0\odoo\addons\base\models\ir_ui_view.py", line 750, in apply_view_inheritance
return self._apply_view_inheritance(source, inherit_tree)
File "F:\odoo-14.0\odoo-14.0\odoo\addons\base\models\ir_ui_view.py", line 758, in _apply_view_inheritance
source = view.apply_inheritance_specs(source, arch_tree)
File "F:\odoo-14.0\odoo-14.0\odoo\addons\base\models\ir_ui_view.py", line 735, in apply_inheritance_specs
self.handle_view_error(str(e))
File "F:\odoo-14.0\odoo-14.0\odoo\addons\base\models\ir_ui_view.py", line 673, in handle_view_error
raise ValueError(formatted_message).with_traceback(from_traceback) from from_exception
odoo.exceptions.ValidationError: Error while validating view:
Element '<xpath expr="//script[last()]">' cannot be located in parent view
View name: QUnit Assets
Error context:
view: ir.ui.view(1033,)
xmlid: qunit_suite
view.parent: ir.ui.view(199,)
file: f:\odoo-14.0\odoo-14.0\custom\muk_web_utils\template\assets.xml
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "F:\odoo-14.0\odoo-14.0\odoo\addons\base\models\ir_http.py", line 237, in _dispatch
result = request.dispatch()
File "F:\odoo-14.0\odoo-14.0\odoo\http.py", line 684, in dispatch
result = self._call_function(**self.params)
File "F:\odoo-14.0\odoo-14.0\odoo\http.py", line 360, in _call_function
return checked_call(self.db, *args, **kwargs)
File "F:\odoo-14.0\odoo-14.0\odoo\service\model.py", line 94, in wrapper
return f(dbname, *args, **kwargs)
File "F:\odoo-14.0\odoo-14.0\odoo\http.py", line 348, in checked_call
result = self.endpoint(*a, **kw)
File "F:\odoo-14.0\odoo-14.0\odoo\http.py", line 913, in __call__
return self.method(*args, **kw)
File "F:\odoo-14.0\odoo-14.0\odoo\http.py", line 532, in response_wrap
response = f(*args, **kw)
File "f:\odoo-14.0\odoo-14.0\addons\web\controllers\main.py", line 1393, in call_button
action = self._call_kw(model, method, args, kwargs)
File "f:\odoo-14.0\odoo-14.0\addons\web\controllers\main.py", line 1381, in _call_kw
return call_kw(request.env[model], method, args, kwargs)
File "F:\odoo-14.0\odoo-14.0\odoo\api.py", line 396, in call_kw
result = _call_kw_multi(method, model, args, kwargs)
File "F:\odoo-14.0\odoo-14.0\odoo\api.py", line 383, in _call_kw_multi
result = method(recs, *args, **kwargs)
File "<decorator-gen-72>", line 2, in button_immediate_install
File "F:\odoo-14.0\odoo-14.0\odoo\addons\base\models\ir_module.py", line 74, in check_and_log
return method(self, *args, **kwargs)
File "F:\odoo-14.0\odoo-14.0\odoo\addons\base\models\ir_module.py", line 475, in button_immediate_install
return self._button_immediate_function(type(self).button_install)
File "F:\odoo-14.0\odoo-14.0\odoo\addons\base\models\ir_module.py", line 593, in _button_immediate_function
modules.registry.Registry.new(self._cr.dbname, update_module=True)
File "F:\odoo-14.0\odoo-14.0\odoo\modules\registry.py", line 89, in new
odoo.modules.load_modules(registry._db, force_demo, status, update_module)
File "F:\odoo-14.0\odoo-14.0\odoo\modules\loading.py", line 460, in load_modules
loaded_modules, update_module, models_to_check)
File "F:\odoo-14.0\odoo-14.0\odoo\modules\loading.py", line 348, in load_marked_modules
perform_checks=perform_checks, models_to_check=models_to_check
File "F:\odoo-14.0\odoo-14.0\odoo\modules\loading.py", line 221, in load_module_graph
load_data(cr, idref, mode, kind='data', package=package)
File "F:\odoo-14.0\odoo-14.0\odoo\modules\loading.py", line 69, in load_data
tools.convert_file(cr, package.name, filename, idref, mode, noupdate, kind)
File "F:\odoo-14.0\odoo-14.0\odoo\tools\convert.py", line 733, in convert_file
convert_xml_import(cr, module, fp, idref, mode, noupdate)
File "F:\odoo-14.0\odoo-14.0\odoo\tools\convert.py", line 799, in convert_xml_import
obj.parse(doc.getroot())
File "F:\odoo-14.0\odoo-14.0\odoo\tools\convert.py", line 719, in parse
self._tag_root(de)
File "F:\odoo-14.0\odoo-14.0\odoo\tools\convert.py", line 685, in _tag_root
)) from e
Exception
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "F:\odoo-14.0\odoo-14.0\odoo\http.py", line 640, in _handle_exception
return super(JsonRequest, self)._handle_exception(exception)
File "F:\odoo-14.0\odoo-14.0\odoo\http.py", line 316, in _handle_exception
raise exception.with_traceback(None) from new_cause
odoo.tools.convert.ParseError: while parsing None:73, near
<data name="QUnit Assets" inherit_id="web.qunit_suite">
<xpath expr="//script[last()]" position="after">
<script type="text/javascript" src="/muk_web_utils/static/tests/fields.js"/>
</xpath>
</data>
and here is my code:
/**********************************************************************************
*
* Copyright (c) 2017-2019 MuK IT GmbH.
*
* This file is part of MuK Web Utils
* (see https://mukit.at).
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
**********************************************************************************/
odoo.define('muk_web_utils.tests.fields', function (require) {
"use strict";
var basicFields = require('web.basic_fields');
var concurrency = require('web.concurrency');
var config = require('web.config');
var core = require('web.core');
var FormView = require('web.FormView');
var KanbanView = require('web.KanbanView');
var ListView = require('web.ListView');
var session = require('web.session');
var testUtils = require('web.test_utils');
var field_registry = require('web.field_registry');
var createView = testUtils.createView;
var createAsyncView = testUtils.createAsyncView;
var DebouncedField = basicFields.DebouncedField;
var JournalDashboardGraph = basicFields.JournalDashboardGraph;
var _t = core._t;
QUnit.module('muk_web_utils', {}, function () {
QUnit.module('fields', {
beforeEach: function () {
this.data = {
partner: {
fields: {
display_name: {
string: "Displayed name",
type: "char",
searchable: true
},
short: {
string: "Short",
type: "char",
searchable: true,
trim: true
},
long: {
String: "Long",
string: "txt",
type: "text",
},
document: {
string: "Binary",
type: "binary",
attachment: true,
},
},
records: [{
id: 1,
display_name: "first record",
short: "Short Text",
long: "Super looooooong Text",
document: 'coucou==\n',
}],
},
};
}
}, function () {
QUnit.module('BinaryFileCopy');
QUnit.test('Fields is correctly rendered', function (assert) {
assert.expect(2);
var form = createView({
View: FormView,
model: 'partner',
data: this.data,
arch: (
'<form string="Partners">' +
'<field name="document" widget="copy_binary" filename="short"/>' +
'<field name="short"/>' +
'</form>'
),
res_id: 1,
});
assert.strictEqual(
form.$('a.o_field_widget[name="document"] > .mk_copy_binary > .mk_copy_button').length,
1, "the copy button should be visible in readonly mode"
);
form.$buttons.find('.o_form_button_edit').click();
assert.strictEqual(
form.$('a.o_field_widget[name="document"] > .mk_copy_binary').length,
0, "the copy button shouldn't be visible in edit mode"
);
form.destroy();
});
QUnit.module('CharShare');
QUnit.test('Fields is correctly rendered', function (assert) {
assert.expect(1);
var form = createView({
View: FormView,
model: 'partner',
data: this.data,
arch: (
'<form string="Partners">' +
'<div>' +
'<field name="short" widget="share_char"/>' +
'</div>' +
'</form>'
),
res_id: 1,
});
assert.strictEqual(
form.$('span.o_field_widget[name="short"] > .mk_share_dropdown.mk_share_char').length,
1, "the copy button should be visible in readonly mode"
);
form.destroy();
});
QUnit.module('TextShare');
QUnit.test('Fields is correctly rendered', function (assert) {
assert.expect(1);
var form = createView({
View: FormView,
model: 'partner',
data: this.data,
arch: (
'<form string="Partners">' +
'<div>' +
'<field name="long" widget="share_text"/>' +
'</div>' +
'</form>'
),
res_id: 1,
});
assert.strictEqual(
form.$('span.o_field_widget[name="long"] > .mk_share_dropdown.mk_share_text').length,
1, "the copy button should be visible in readonly mode"
);
form.destroy();
});
QUnit.module('BinaryFileShare');
QUnit.test('Fields is correctly rendered', function (assert) {
assert.expect(2);
var form = createView({
View: FormView,
model: 'partner',
data: this.data,
arch: (
'<form string="Partners">' +
'<field name="document" widget="share_binary" filename="short"/>' +
'<field name="short"/>' +
'</form>'
),
res_id: 1,
});
assert.strictEqual(
form.$('a.o_field_widget[name="document"] > .mk_share_dropdown > .mk_share_button').length,
1, "the share dropdown should be visible in readonly mode"
);
form.$buttons.find('.o_form_button_edit').click();
assert.strictEqual(
form.$('a.o_field_widget[name="document"] > .mk_share_dropdown > .mk_share_button').length,
0, "the share dropdown shouldn't be visible in edit mode"
);
form.destroy();
});
});
});
});
When i upgrade this module from odoo 12 to odoo 14 then this error occured.When I changed the xpath and install this module its show an error typerror:view.getcontroller(....)then.guardedcatch is not a function. I have no idea about this error how can i fix it and in my code where i need to change please help me.
<xpath expr="//script[last()]" position="after">
Odoo can't able to get the above expr path so please check your inherited_id in v14.
or try to rewrite as this:
<template id="qunit_suite" name="QUnit Assets" inherit_id="web.qunit_suite">
<xpath expr="//script[last()]" position="after">
<script type="text/javascript" src="/muk_web_utils/static/tests/fields.js" />
</xpath>
</template>
I Have read a CSV file (that have addresses of customers) and assign the data into DataFrame table.
Description of the csv file (or the DataFrame table)
DataFrame contains several rows and 5 columns
Database example
Address1 Address3 Post_Code City_Name Full_Address
10000009 37 RUE DE LA GARE L-7535 MERSCH 37 RUE DE LA GARE,L-7535, MERSCH
10000009 37 RUE DE LA GARE L-7535 MERSCH 37 RUE DE LA GARE,L-7535, MERSCH
10000009 37 RUE DE LA GARE L-7535 MERSCH 37 RUE DE LA GARE,L-7535, MERSCH
10001998 RUE EDWARD STEICHEN L-1855 LUXEMBOURG RUE EDWARD STEICHEN,L-1855,LUXEMBOURG
11000051 9 RUE DU BRILL L-3898 FOETZ 9 RUE DU BRILL,L-3898 ,FOETZ
I have written a code (Geocode with Python) inorder to convert physical addresses to Geographic locations → Latitude and Longitude, but the code keep showing several errors
So far I have written this code :
The code is
import pandas as pd
from geopy.geocoders import Nominatim
from geopy.extra.rate_limiter import RateLimiter
# Read the CSV, by the way the csv file contains 43 columns
ERP_Data = pd.read_csv("test.csv")
# Extracting the address information into a new DataFrame
Address_info= ERP_Data[['Address1','Address3','Post_Code','City_Name']].copy()
# Adding a new column called (Full_Address) that concatenate address columns into one
# for example Karlaplan 13,115 20,STOCKHOLM,Stockholms län, Sweden
Address_info['Full_Address'] = Address_info[Address_info.columns[1:]].apply(
lambda x: ','.join(x.dropna().astype(str)), axis=1)
locator = Nominatim(user_agent="myGeocoder") # holds the Geocoding service, Nominatim
# 1 - conveneint function to delay between geocoding calls
geocode = RateLimiter(locator.geocode, min_delay_seconds=1)
# 2- create location column
Address_info['location'] = Address_info['Full_Address'].apply(geocode)
# 3 - create longitude, laatitude and altitude from location column (returns tuple)
Address_info['point'] = Address_info['location'].apply(lambda loc: tuple(loc.point) if loc else None)
# 4 - split point column into latitude, longitude and altitude columns
Address_info[['latitude', 'longitude', 'altitude']] = pd.DataFrame(Address_info['point'].tolist(), index=Address_info.index)
# using Folium to map out the points we created
folium_map = folium.Map(location=[49.61167,6.13], zoom_start=12,)
An example of the full output error is :
RateLimiter caught an error, retrying (0/2 tries). Called with (*('44 AVENUE JOHN FITZGERALD KENNEDY,L-1855,LUXEMBOURG',), **{}).
Traceback (most recent call last):
File "e:\Anaconda3\lib\urllib\request.py", line 1317, in do_open
encode_chunked=req.has_header('Transfer-encoding'))
File "e:\Anaconda3\lib\http\client.py", line 1244, in request
self._send_request(method, url, body, headers, encode_chunked)
File "e:\Anaconda3\lib\http\client.py", line 1290, in _send_request
self.endheaders(body, encode_chunked=encode_chunked)
File "e:\Anaconda3\lib\http\client.py", line 1239, in endheaders
self._send_output(message_body, encode_chunked=encode_chunked)
File "e:\Anaconda3\lib\http\client.py", line 1026, in _send_output
self.send(msg)
File "e:\Anaconda3\lib\http\client.py", line 966, in send
self.connect()
File "e:\Anaconda3\lib\http\client.py", line 1414, in connect
server_hostname=server_hostname)
File "e:\Anaconda3\lib\ssl.py", line 423, in wrap_socket
session=session
File "e:\Anaconda3\lib\ssl.py", line 870, in _create
self.do_handshake()
File "e:\Anaconda3\lib\ssl.py", line 1139, in do_handshake
self._sslobj.do_handshake()
socket.timeout: _ssl.c:1059: The handshake operation timed out
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "e:\Anaconda3\lib\site-packages\geopy\geocoders\base.py", line 355, in _call_geocoder
page = requester(req, timeout=timeout, **kwargs)
File "e:\Anaconda3\lib\urllib\request.py", line 525, in open
response = self._open(req, data)
File "e:\Anaconda3\lib\urllib\request.py", line 543, in _open
'_open', req)
File "e:\Anaconda3\lib\urllib\request.py", line 503, in _call_chain
result = func(*args)
File "e:\Anaconda3\lib\urllib\request.py", line 1360, in https_open
context=self._context, check_hostname=self._check_hostname)
File "e:\Anaconda3\lib\urllib\request.py", line 1319, in do_open
raise URLError(err)
urllib.error.URLError: <urlopen error _ssl.c:1059: The handshake operation timed out>
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "e:\Anaconda3\lib\site-packages\geopy\extra\rate_limiter.py", line 126, in __call__
return self.func(*args, **kwargs)
File "e:\Anaconda3\lib\site-packages\geopy\geocoders\osm.py", line 387, in geocode
self._call_geocoder(url, timeout=timeout), exactly_one
File "e:\Anaconda3\lib\site-packages\geopy\geocoders\base.py", line 378, in _call_geocoder
raise GeocoderTimedOut('Service timed out')
geopy.exc.GeocoderTimedOut: Service timed out
Expected output is
Address1 Address3 Post_Code City_Name Full_Address Latitude Longitude
10000009 37 RUE DE LA GARE L-7535 MERSCH 37 RUE DE LA GARE,L-7535, MERSCH 49.7508296 6.1085476
10000009 37 RUE DE LA GARE L-7535 MERSCH 37 RUE DE LA GARE,L-7535, MERSCH 49.7508296 6.1085476
10000009 37 RUE DE LA GARE L-7535 MERSCH 37 RUE DE LA GARE,L-7535, MERSCH 49.7508296 6.1085476
10001998 RUE EDWARD STEICHEN L-1855 LUXEMBOURG RUE EDWARD STEICHEN,L-1855,LUXEMBOURG 49.6302147 6.1713374
11000051 9 RUE DU BRILL L-3898 FOETZ 9 RUE DU BRILL,L-3898 ,FOETZ 49.5217917 6.0101385
I've updated your code:
Added: Address_info = Address_info.apply(lambda x: x.str.strip(), axis=1)
Removes whitespace before and after str
Added a function with try-except, to handle the lookup
from geopy.exc import GeocoderTimedOut, GeocoderQuotaExceeded
import time
ERP_Data = pd.read_csv("test.csv")
# Extracting the address information into a new DataFrame
Address_info= ERP_Data[['Address1','Address3','Post_Code','City_Name']].copy()
# Clean existing whitespace from the ends of the strings
Address_info = Address_info.apply(lambda x: x.str.strip(), axis=1) # ← added
# Adding a new column called (Full_Address) that concatenate address columns into one
# for example Karlaplan 13,115 20,STOCKHOLM,Stockholms län, Sweden
Address_info['Full_Address'] = Address_info[Address_info.columns[1:]].apply(lambda x: ','.join(x.dropna().astype(str)), axis=1)
locator = Nominatim(user_agent="myGeocoder") # holds the Geocoding service, Nominatim
# 1 - convenient function to delay between geocoding calls
# geocode = RateLimiter(locator.geocode, min_delay_seconds=1)
def geocode_me(location):
time.sleep(1.1)
try:
return locator.geocode(location)
except (GeocoderTimedOut, GeocoderQuotaExceeded) as e:
if GeocoderQuotaExceeded:
print(e)
else:
print(f'Location not found: {e}')
return None
# 2- create location column
Address_info['location'] = Address_info['Full_Address'].apply(lambda x: geocode_me(x)) # ← note the change here
# 3 - create longitude, latitude and altitude from location column (returns tuple)
Address_info['point'] = Address_info['location'].apply(lambda loc: tuple(loc.point) if loc else None)
# 4 - split point column into latitude, longitude and altitude columns
Address_info[['latitude', 'longitude', 'altitude']] = pd.DataFrame(Address_info['point'].tolist(), index=Address_info.index)
Output:
Address1 Address3 Post_Code City_Name Full_Address location point latitude longitude altitude
10000009 37 RUE DE LA GARE L-7535 MERSCH 37 RUE DE LA GARE,L-7535,MERSCH (Rue de la Gare, Mersch, Canton Mersch, 7535, Lëtzebuerg, (49.7508296, 6.1085476)) (49.7508296, 6.1085476, 0.0) 49.750830 6.108548 0.0
10000009 37 RUE DE LA GARE L-7535 MERSCH 37 RUE DE LA GARE,L-7535,MERSCH (Rue de la Gare, Mersch, Canton Mersch, 7535, Lëtzebuerg, (49.7508296, 6.1085476)) (49.7508296, 6.1085476, 0.0) 49.750830 6.108548 0.0
10000009 37 RUE DE LA GARE L-7535 MERSCH 37 RUE DE LA GARE,L-7535,MERSCH (Rue de la Gare, Mersch, Canton Mersch, 7535, Lëtzebuerg, (49.7508296, 6.1085476)) (49.7508296, 6.1085476, 0.0) 49.750830 6.108548 0.0
10001998 RUE EDWARD STEICHEN L-1855 LUXEMBOURG RUE EDWARD STEICHEN,L-1855,LUXEMBOURG (Rue Edward Steichen, Grünewald, Weimershof, Neudorf-Weimershof, Luxembourg, Canton Luxembourg, 2540, Lëtzebuerg, (49.6302147, 6.1713374)) (49.6302147, 6.1713374, 0.0) 49.630215 6.171337 0.0
11000051 9 RUE DU BRILL L-3898 FOETZ 9 RUE DU BRILL,L-3898,FOETZ (Rue du Brill, Mondercange, Canton Esch-sur-Alzette, 3898, Luxembourg, (49.5217917, 6.0101385)) (49.5217917, 6.0101385, 0.0) 49.521792 6.010139 0.0
10000052 3 RUE DU PUITS ROMAIN L-8070 BERTRANGE 3 RUE DU PUITS ROMAIN,L-8070,BERTRANGE (Rue du Puits Romain, Z.A. Bourmicht, Bertrange, Canton Luxembourg, 8070, Lëtzebuerg, (49.6084531, 6.0771901)) (49.6084531, 6.0771901, 0.0) 49.608453 6.077190 0.0
Note & Additional Resources:
The output includes the address that caused the error in your TraceBack
RateLimiter caught an error, retrying (0/2 tries). Called with (*('3 RUE DU PUITS ROMAIN ,L-8070 ,BERTRANGE ',)
Note all the extra whitespace in the address. I've added a line of code to remove whitespace from the beginning and end of the strings
GeocoderTimedOut, a real pain?
Geopy: catch timeout error
Final:
The final result is the service times out because of HTTP Error 429: Too Many Requests for the day.
Review Nominatim Usage Policy
Suggestion: Use a different Geocoder
Closed. This question needs debugging details. It is not currently accepting answers.
Edit the question to include desired behavior, a specific problem or error, and the shortest code necessary to reproduce the problem. This will help others answer the question.
Closed 4 years ago.
Improve this question
Here is a sample of my dictionnary with 3 Keys.
({'Musique': [['Musique', 'Shawn Phillips', 236, 236], ['Musique', "L'avenue Royale fête l'été!", 237, 237], ['Musique', 'Perséides musicales', 215, 215], ['Musique', 'Gaétan Leclerc chante Félix et…', 229, 229], ['Musique', 'The Ring of Fire : a Johnny Cash Experience', 202, 202], ['Musique', "Jazz'Art", 208, 210], {'Théatre': [['Théâtre', 'Coup de théâtre à la joyeuse maison hantée', 189, 189], ['Théâtre', 'Les galoches du bonheur', 203, 203], ['Théâtre', 'Le voyage de Pixelle dans le monde virtuel', 217, 217], ['Théâtre', 'Marimba à la ferme de la bonne entente', 224, 224], ['Théâtre', 'Pattes et cravates', 196, 196], {'Danse': [['Danse', 'Initiation au tango argentin suivi de la milonga', 182, 231], ['Danse', 'Samedi de danser...', 188, 188], ['Danse', 'Rusdell Nunez (latino)', 191, 191]
Keys are : 'Musique','Théâtre' and 'Danse
This is a list of sublists of events and all the int in my dictionnary are when those events are available. I need to return a list with the name of all the events with the right type who are offered at the date entereted in argument day_year.
Here is the full instructions and the function:
def obtain_events_date_type(dictio_events_par_type, day_year, type_event): #first argument in the dictionnary so dont rly worry about it.
Then, for each event of the right type as the argument type_event, if the beginning of the event(which is the first int in the dictionnary is lower or even than the int and if the end of the event(the last in of every index you could say) is higher or even than the argument day_year, we can add the name of this event in the list of event since its available on this day. i need to return that list of events.
So if i entered
def obtain_events_date_type(creer_dictio, 236, 'Musique'):
#creer_dictio is my dictio in another function
I would need to add all events that are available on the day 236, for exemple the first values in my dictionnary mentionned in this post. Its possible there is more than 1 events on the same day. If there is no event available on the day enterered in argument we return an empty list.
What have i tried :
Im actually familiar with loops and stuff in python, but i keep getting code error about tuples and a bunch of operation not allowed on dictionnary.
Someone told me that i could create a list for every type, but im still having a time reaching every events and int asked in arguments.
Thanks for the info/tips !
EDIT :
liste_type_asked = []
for element in dictio_evenements_par_type:
if 'Musique' in element:
for jour in element:
if jour_annee <= jour[2]:
if jour_annee >= jour[3]:
liste_type_asked.append(element)
return liste_type_asked
Error:
TypeError: '<=' not supported between instances of 'int' and 'str'
You can use list comprehension like this:
def obtain_events_date_type(dictio_events_par_type, day_year, type_event):
return [n for t in dictio_events_par_type for k, l in t.items() if k == type_event for _, n, s, e in l if s <= day_year <= e]
so that:
events = [
{
'Musique': [
['Musique', 'Shawn Phillips', 236, 236],
['Musique', "L'avenue Royale fête l'été!", 237, 237],
['Musique', 'Perséides musicales', 215, 215],
['Musique', 'Gaétan Leclerc chante Félix et…', 229, 229],
['Musique', 'The Ring of Fire : a Johnny Cash Experience', 202, 202],
['Musique', "Jazz'Art", 208, 210]
]
},
{
'Théâtre': [
['Théâtre', 'Coup de théâtre à la joyeuse maison hantée', 189, 189],
['Théâtre', 'Les galoches du bonheur', 203, 203],
['Théâtre', 'Le voyage de Pixelle dans le monde virtuel', 217, 217],
['Théâtre', 'Marimba à la ferme de la bonne entente', 224, 224],
['Théâtre', 'Pattes et cravates', 196, 196]
]
},
{
'Danse': [
['Danse', 'Initiation au tango argentin suivi de la milonga', 182, 231],
['Danse', 'Samedi de danser...', 188, 188],
['Danse', 'Rusdell Nunez (latino)', 191, 191]
]
}
]
print(obtain_events_date_type(events, 188, 'Danse'))
will output:
['Initiation au tango argentin suivi de la milonga', 'Samedi de danser...']
I am using scrapy to scrap blogs and then store the data in mongodb. At first i got the InvalidDocument Exception. So obvious to me is that the data is not in the right encoding. So before persisting the object, in my MongoPipeline i check if the document is in 'utf-8 strict', and only then i try to persist the object to mongodb. BUT Still i get InvalidDocument Exceptions, now that is annoying.
This is my code my MongoPipeline Object that persists objects to mongodb
# -*- coding: utf-8 -*-
# Define your item pipelines here
#
import pymongo
import sys, traceback
from scrapy.exceptions import DropItem
from crawler.items import BlogItem, CommentItem
class MongoPipeline(object):
collection_name = 'master'
def __init__(self, mongo_uri, mongo_db):
self.mongo_uri = mongo_uri
self.mongo_db = mongo_db
#classmethod
def from_crawler(cls, crawler):
return cls(
mongo_uri=crawler.settings.get('MONGO_URI'),
mongo_db=crawler.settings.get('MONGO_DATABASE', 'posts')
)
def open_spider(self, spider):
self.client = pymongo.MongoClient(self.mongo_uri)
self.db = self.client[self.mongo_db]
def close_spider(self, spider):
self.client.close()
def process_item(self, item, spider):
if type(item) is BlogItem:
try:
if 'url' in item:
item['url'] = item['url'].encode('utf-8', 'strict')
if 'domain' in item:
item['domain'] = item['domain'].encode('utf-8', 'strict')
if 'title' in item:
item['title'] = item['title'].encode('utf-8', 'strict')
if 'date' in item:
item['date'] = item['date'].encode('utf-8', 'strict')
if 'content' in item:
item['content'] = item['content'].encode('utf-8', 'strict')
if 'author' in item:
item['author'] = item['author'].encode('utf-8', 'strict')
except: # catch *all* exceptions
e = sys.exc_info()[0]
spider.logger.critical("ERROR ENCODING %s", e)
traceback.print_exc(file=sys.stdout)
raise DropItem("Error encoding BLOG %s" % item['url'])
if 'comments' in item:
comments = item['comments']
item['comments'] = []
try:
for comment in comments:
if 'date' in comment:
comment['date'] = comment['date'].encode('utf-8', 'strict')
if 'author' in comment:
comment['author'] = comment['author'].encode('utf-8', 'strict')
if 'content' in comment:
comment['content'] = comment['content'].encode('utf-8', 'strict')
item['comments'].append(comment)
except: # catch *all* exceptions
e = sys.exc_info()[0]
spider.logger.critical("ERROR ENCODING COMMENT %s", e)
traceback.print_exc(file=sys.stdout)
self.db[self.collection_name].insert(dict(item))
return item
And still i get the following exception:
au coeur de l\u2019explosion de la bulle Internet n\u2019est probablement pas \xe9tranger au succ\xe8s qui a suivi. Mais franchement, c\u2019est un peu court comme argument !Ce que je sais dire, compte tenu de ce qui pr\xe9c\xe8de, c\u2019est quelles sont les conditions pour r\xe9ussir si l\u2019on est vraiment contraint de rester en France. Ce sont des sujets que je d\xe9velopperai dans un autre article.',
'date': u'2012-06-27T23:21:25+00:00',
'domain': 'reussir-sa-boite.fr',
'title': u'Peut-on encore entreprendre en France ?\t\t\t ',
'url': 'http://www.reussir-sa-boite.fr/peut-on-encore-entreprendre-en-france/'}
Traceback (most recent call last):
File "h:\program files\anaconda\lib\site-packages\twisted\internet\defer.py", line 588, in _runCallbacks
current.result = callback(current.result, *args, **kw)
File "H:\PDS\BNP\crawler\crawler\pipelines.py", line 76, in process_item
self.db[self.collection_name].insert(dict(item))
File "h:\program files\anaconda\lib\site-packages\pymongo\collection.py", line 409, in insert
gen(), check_keys, self.uuid_subtype, client)
InvalidDocument: Cannot encode object: {'author': 'Arnaud Lemasson',
'content': 'Tellement vrai\xe2\x80\xa6 Il faut vraiment \xc3\xaatre motiv\xc3\xa9 aujourd\xe2\x80\x99hui pour monter sa bo\xc3\xaete. On est pr\xc3\xa9lev\xc3\xa9 de partout, je ne pense m\xc3\xaame pas \xc3\xa0 embaucher, cela me co\xc3\xbbterait bien trop cher. Bref, 100% d\xe2\x80\x99accord avec vous. Le probl\xc3\xa8me, je ne vois pas comment cela pourrait changer avec le gouvernement actuel\xe2\x80\xa6 A moins que si, j\xe2\x80\x99ai pu lire il me semble qu\xe2\x80\x99ils avaient en t\xc3\xaate de r\xc3\xa9duire l\xe2\x80\x99IS pour les petites entreprises et de l\xe2\x80\x99augmenter pour les grandes\xe2\x80\xa6 A voir',
'date': '2012-06-27T23:21:25+00:00'}
2015-11-04 15:29:15 [scrapy] INFO: Closing spider (finished)
2015-11-04 15:29:15 [scrapy] INFO: Dumping Scrapy stats:
{'downloader/request_bytes': 259,
'downloader/request_count': 1,
'downloader/request_method_count/GET': 1,
'downloader/response_bytes': 252396,
'downloader/response_count': 1,
'downloader/response_status_count/200': 1,
'finish_reason': 'finished',
'finish_time': datetime.datetime(2015, 11, 4, 14, 29, 15, 701000),
'log_count/DEBUG': 2,
'log_count/ERROR': 1,
'log_count/INFO': 7,
'response_received_count': 1,
'scheduler/dequeued': 1,
'scheduler/dequeued/memory': 1,
'scheduler/enqueued': 1,
'scheduler/enqueued/memory': 1,
'start)
time': datetime.datetime(2015, 11, 4, 14, 29, 13, 191000)}
Another funny thing from the comment of #eLRuLL i did the following:
>>> s = "Tellement vrai\xe2\x80\xa6 Il faut vraiment \xc3\xaatre motiv\xc3\xa9 aujourd\xe2\x80\x99hui pour monter sa bo\xc3\xaete. On est pr\xc3\xa9lev\xc3\xa9 de partout, je ne pense m\xc3\xaame pas \xc3\xa0 embaucher, cela me"
>>> s
'Tellement vrai\xe2\x80\xa6 Il faut vraiment \xc3\xaatre motiv\xc3\xa9 aujourd\xe2\x80\x99hui pour monter sa bo\xc3\xaete. On est pr\xc3\xa9lev\xc3\xa9 de partout, je ne pense m\xc3\xaame pas \xc3\xa0 embaucher, cela me'
>>> se = s.encode("utf8", "strict")
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
UnicodeDecodeError: 'ascii' codec can't decode byte 0xe2 in position 14: ordinal not in range(128)
>>> se = s.encode("utf-8", "strict")
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
UnicodeDecodeError: 'ascii' codec can't decode byte 0xe2 in position 14: ordinal not in range(128)
>>> s.decode()
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
UnicodeDecodeError: 'ascii' codec can't decode byte 0xe2 in position 14: ordinal not in range(128)
Then my question is. If this text cannot be encoded. Then why, is my MongoPipeline try catch not catching this EXCEPTION? Because only objects that don't raise any exception should be appended to item['comments'] ?
Finally I figured it out. The problem was not with encoding. It was with the structure of the documents.
Because i went off on the standard MongoPipeline example which does not deal with nested scrapy items.
What i am doing is:
BlogItem:
"url"
...
comments = [CommentItem]
So my BlogItem has a list of CommentItems. Now the problem came here, for persisting the object in the database i do:
self.db[self.collection_name].insert(dict(item))
So here i am parsing the BlogItem to a dict. But i am not parsing the list of CommentItems. And because the traceback displays the CommentItem kind of like a dict, It did not occur to me that the problematic object is not a dict!
So finally the the way to fix this problem is to change the line when appending the comment to the comment list as such:
item['comments'].append(dict(comment))
Now MongoDB considers it as a valid document.
Lastly, for the last part where i ask why i am getting a exception on the python console and not in the script.
The reason is because i was working on the python console, which only supports ascii. And thus the error.
I got this error when running a query
db.collection.find({'attr': {'$gte': 20}})
and some records in collection had a non-numeric value for attr.
First, when you do "somestring".encode(...), isn't changing "somestring", but it returns a new encoded string, so you should use something like:
item['author'] = item['author'].encode('utf-8', 'strict')
and the same for the other fields.
I ran into the same error using a numpy array in a Mongo query :
'myField' : { '$in': myList },
The fix was simply to convert the nd.array() into a list :
'myField' : { '$in': list(myList) },
in my case it was super stupid yet not easy to notice:
I accidentally wrote
f"indexes_access.{jsonData['index']}: {jsonData['newState']}"
instead of
{f"indexes_access.{jsonData['index']}": f"{jsonData['newState']}"}
(one long string parsed with f strings instead of key and value parsed separately)
I got this error
Client Traceback (most recent call last):
File "C:\Program Files\OpenERP 7.0-20130321-002353\Server\server\openerp\addons\web\http.py", line 203, in dispatch
File "C:\Program Files\OpenERP 7.0-20130321-002353\Server\server\openerp\addons\web\controllers\main.py", line 1410, in load
File "C:\Program Files\OpenERP 7.0-20130321-002353\Server\server\openerp\addons\web\session.py", line 43, in proxy
File "C:\Program Files\OpenERP 7.0-20130321-002353\Server\server\openerp\addons\web\session.py", line 31, in proxy_method
File "C:\Program Files\OpenERP 7.0-20130321-002353\Server\server\openerp\addons\web\session.py", line 104, in send
Server Traceback (most recent call last):
File "C:\Program Files\OpenERP 7.0-20130321-002353\Server\server\openerp\addons\web\session.py", line 90, in send
File "C:\Program Files\OpenERP 7.0-20130321-002353\Server\server\.\openerp\netsvc.py", line 293, in dispatch_rpc
File "C:\Program Files\OpenERP 7.0-20130321-002353\Server\server\.\openerp\service\web_services.py", line 622, in dispatch
File "C:\Program Files\OpenERP 7.0-20130321-002353\Server\server\.\openerp\service\security.py", line 40, in check
File "C:\Program Files\OpenERP 7.0-20130321-002353\Server\server\.\openerp\pooler.py", line 49, in get_pool
File "C:\Program Files\OpenERP 7.0-20130321-002353\Server\server\.\openerp\pooler.py", line 33, in get_db_and_pool
File "C:\Program Files\OpenERP 7.0-20130321-002353\Server\server\.\openerp\modules\registry.py", line 192, in get
File "C:\Program Files\OpenERP 7.0-20130321-002353\Server\server\.\openerp\modules\registry.py", line 218, in new
File "C:\Program Files\OpenERP 7.0-20130321-002353\Server\server\.\openerp\modules\loading.py", line 344, in load_modules
File "C:\Program Files\OpenERP 7.0-20130321-002353\Server\server\.\openerp\modules\loading.py", line 259, in load_marked_modules
File "C:\Program Files\OpenERP 7.0-20130321-002353\Server\server\.\openerp\modules\loading.py", line 162, in load_module_graph
File "C:\Program Files\OpenERP 7.0-20130321-002353\Server\server\.\openerp\modules\module.py", line 405, in load_openerp_module
File "C:\Program Files\OpenERP 7.0-20130321-002353\Server\server\.\openerp\modules\module.py", line 133, in load_module
File "C:\Program Files\OpenERP 7.0-20130321-002353\Server\server\openerp\addons\certificados\__init__.py", line 1, in <module>
File "C:\Program Files\OpenERP 7.0-20130321-002353\Server\server\openerp\addons\certificados\certificados.py", line 37
'Valor_En_Divisas' : fields.function(multi_a_b, type='integer', 'Valor En Divisas'),
SyntaxError: non-keyword arg after keyword arg
And this is the code in my module.py
_columns = {
'Codigo_Arancelario' : fields.integer('Codigo Arancelario'),
'product_id' : fields.many2one('product.product', 'Material'),
'Descripcion_Arancelaria' : fields.char('Descripcion Arancelaria', size=42, required = True, translate = True),
'Especificaciones_Tecnicas' : fields.char('Especificaciones Tecnicas', size=60, required = True, translate = True),
'Cantidad' : fields.float('Cantidad'), 'Unidad_de_Medida': fields.many2one('product.uom', 'Unidad de Medida'),
'Precio_Unitario_Declarado' : fields.float('Precio Unitario Declarado'), 'Moneda' : fields.many2one('res.currency', 'Moneda'),
'Valor_En_Divisas' : fields.function(multi_a_b, type='integer', 'Valor En Divisas'),
'requisicion_id' : fields.many2one('certificados.certificados', 'Certificados de No Produccion', ondelete='cascade'),
'Cantidad_Consumida' : fields.float('Cantidad Consumida'), 'Cantidad_Disponible' : fields.float('Cantidad Disponible'),
}
Being "Valor_En_Divisas" the error syntax, but i don't know what is exactly giving me this error?
Any help would be greatly appreciated.
You have a non-keyword argument:
fields.function(multi_a_b, type='integer', 'Valor En Divisas'),
^^^^^^^^^^^^^^^^^^
After a keyword argument:
fields.function(multi_a_b, type='integer', 'Valor En Divisas'),
^^^^^^^^^^^^^^
To fix it, make that last one a keyword argument:
fields.function(multi_a_b, type='integer', string='Valor En Divisas'),