I have these "json" files that I like to insert into my mongodb database.
An example of one is:
http://s.live.ksmobile.net/cheetahlive/de/ff/15201023827214369775/15201023827214369775.json
The problem is, that it is formated like this:
{ "channelType":"TEMPGROUP", ... } # line 1
{ "channelType":"TEMPGROUP", ... } # line 2
So instead of inserting it as 1 document in the DB, it insert every single line as 1 entry. That ends up with what should be 3 documents from 3 "json" files in the database become 1189 documents in the database instead.
How can I insert the whole content of the ".json" into one document?
My code is:
replay_url = "http://live.ksmobile.net/live/getreplayvideos?"
userid = 969730808384462848
url2 = replay_url + urllib.parse.urlencode({'userid': userid}) + '&page_size=1000'
raw_replay_data = requests.get(url2).json()
for i in raw_replay_data['data']['video_info']:
url3 = i['msgfile']
raw_message_data = urllib.request.urlopen(url3)
for line in raw_message_data:
json_data = json.loads(line)
messages.insert_one(json_data)
print(json_data)
Update to give more information to answer
messages.insert(json_data) gives this error:
Traceback (most recent call last):
File "/media/anon/06bcf743-8b4d-409f-addc-520fc4e19299/PycharmProjects/LiveMe/venv1/lib/python3.6/site-packages/pymongo/collection.py", line 633, in _insert
blk.execute(concern, session=session)
File "/media/anon/06bcf743-8b4d-409f-addc-520fc4e19299/PycharmProjects/LiveMe/venv1/lib/python3.6/site-packages/pymongo/bulk.py", line 432, in execute
return self.execute_command(generator, write_concern, session)
File "/media/anon/06bcf743-8b4d-409f-addc-520fc4e19299/PycharmProjects/LiveMe/venv1/lib/python3.6/site-packages/pymongo/bulk.py", line 329, in execute_command
raise BulkWriteError(full_result)
pymongo.errors.BulkWriteError: batch op errors occurred
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/media/anon/06bcf743-8b4d-409f-addc-520fc4e19299/PycharmProjects/LiveMe/import_messages_dev.py", line 43, in <module>
messages.insert(json_data)
File "/media/anon/06bcf743-8b4d-409f-addc-520fc4e19299/PycharmProjects/LiveMe/venv1/lib/python3.6/site-packages/pymongo/collection.py", line 2941, in insert
check_keys, manipulate, write_concern)
File "/media/anon/06bcf743-8b4d-409f-addc-520fc4e19299/PycharmProjects/LiveMe/venv1/lib/python3.6/site-packages/pymongo/collection.py", line 635, in _insert
_raise_last_error(bwe.details)
File "/media/anon/06bcf743-8b4d-409f-addc-520fc4e19299/PycharmProjects/LiveMe/venv1/lib/python3.6/site-packages/pymongo/helpers.py", line 220, in _raise_last_error
_raise_last_write_error(write_errors)
File "/media/anon/06bcf743-8b4d-409f-addc-520fc4e19299/PycharmProjects/LiveMe/venv1/lib/python3.6/site-packages/pymongo/helpers.py", line 188, in _raise_last_write_error
raise DuplicateKeyError(error.get("errmsg"), 11000, error)
pymongo.errors.DuplicateKeyError: E11000 duplicate key error index: liveme.messages.$_id_ dup key: { : ObjectId('5aa2fc6f5d60126499060949') }
messages.insert_one(json_data) gives me this error:
Traceback (most recent call last):
File "/media/anon/06bcf743-8b4d-409f-addc-520fc4e19299/PycharmProjects/LiveMe/import_messages_dev.py", line 43, in <module>
messages.insert_one(json_data)
File "/media/anon/06bcf743-8b4d-409f-addc-520fc4e19299/PycharmProjects/LiveMe/venv1/lib/python3.6/site-packages/pymongo/collection.py", line 676, in insert_one
common.validate_is_document_type("document", document)
File "/media/anon/06bcf743-8b4d-409f-addc-520fc4e19299/PycharmProjects/LiveMe/venv1/lib/python3.6/site-packages/pymongo/common.py", line 434, in validate_is_document_type
"collections.MutableMapping" % (option,))
TypeError: document must be an instance of dict, bson.son.SON, bson.raw_bson.RawBSONDocument, or a type that inherits from collections.MutableMapping
messages.insert_many(json_data) gives me this error:
Traceback (most recent call last):
File "/media/anon/06bcf743-8b4d-409f-addc-520fc4e19299/PycharmProjects/LiveMe/import_messages_dev.py", line 43, in <module>
messages.insert_many(json_data)
File "/media/anon/06bcf743-8b4d-409f-addc-520fc4e19299/PycharmProjects/LiveMe/venv1/lib/python3.6/site-packages/pymongo/collection.py", line 742, in insert_many
blk.execute(self.write_concern.document, session=session)
File "/media/anon/06bcf743-8b4d-409f-addc-520fc4e19299/PycharmProjects/LiveMe/venv1/lib/python3.6/site-packages/pymongo/bulk.py", line 432, in execute
return self.execute_command(generator, write_concern, session)
File "/media/anon/06bcf743-8b4d-409f-addc-520fc4e19299/PycharmProjects/LiveMe/venv1/lib/python3.6/site-packages/pymongo/bulk.py", line 329, in execute_command
raise BulkWriteError(full_result)
pymongo.errors.BulkWriteError: batch op errors occurred
messages.insert and messages.insert_many both insert 1 line and throw the error.
These files obviously do not contain properly formatted json - rather they contain a separate object on each line.
To turn them into valid json, you probably want a list of objects, i.e.:
[{ "channelType":"TEMPGROUP", ... },
{ "channelType":"TEMPGROUP", ... }]
You can achieve this by doing:
for i in raw_replay_data['data']['video_info']:
url3 = i['msgfile']
raw_message_data = urllib.request.urlopen(url3)
json_data = []
for line in raw_message_data:
json_data.append(json.loads(line))
messages.insert_one(json_data)
print(json_data)
Related
My locust file has put requests but sometimes it's passing sometimes failing. Can anyone explain why that's happening?
Here is my locust file
def _generate_put_data(self) -> str:
if len(self.ids) > 0:
teacher_name = self.generate_random_string()
teacher_email = f"{self.generate_random_string()}#{self.generate_random_string()}.{self.generate_random_string()}"
teacher_email = teacher_email.replace("#", "%40")
teacher_id = str(random.choice(self.ids))
request_string = f"{self.path_all}/{teacher_id}?teacherName={teacher_name}&teacherEmail={teacher_email}"
return request_string
#task(2)
def put_request(self):
self.client.put(url=self._generate_put_data())
...
_generate_put_data method returns a string, which is a query.
Here is the error:
[2023-01-26 11:39:12,357] pop-os/ERROR/locust.user.task: expected string or bytes-like object
Traceback (most recent call last):
File "/home/XXX/.local/lib/python3.10/site-packages/locust/user/task.py", line 347, in run
self.execute_next_task()
File "/home/XXX/.local/lib/python3.10/site-packages/locust/user/task.py", line 372, in execute_next_task
self.execute_task(self._task_queue.pop(0))
File "/home/XXX/.local/lib/python3.10/site-packages/locust/user/task.py", line 493, in execute_task
task(self.user)
File "/home/XXX/Desktop/my-projects/spring-boot-app/performans-testing/locust.py", line 44, in put_request
self.client.put(url=self._generate_put_data())
File "/home/XXX/.local/lib/python3.10/site-packages/requests/sessions.py", line 647, in put
return self.request("PUT", url, data=data, **kwargs)
File "/home/XXX/.local/lib/python3.10/site-packages/locust/clients.py", line 131, in request
url = self._build_url(url)
File "/home/XXX/.local/lib/python3.10/site-packages/locust/clients.py", line 81, in _build_url
if absolute_http_url_regexp.match(path):
TypeError: expected string or bytes-like object
I want to save the data set as a parquet file, called power.parquet, and I use df.to_parquet(<filename>). But it gives me this errer "ValueError: Error converting column "Global_reactive_power" to bytes using encoding UTF8. Original error: bad argument type for built-in operation" And I installed the fastparquet package.
from fastparquet import write, ParquetFile
dat.to_parquet("power.parquet")
df_parquet = ParquetFile("power.parquet").to_pandas()
df_parquet.head() # Test your final value
`*Traceback (most recent call last):
File "/opt/anaconda3/lib/python3.9/site-packages/fastparquet/writer.py", line 259, in convert
out = array_encode_utf8(data)
File "fastparquet/speedups.pyx", line 50, in fastparquet.speedups.array_encode_utf8
TypeError: bad argument type for built-in operation
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/var/folders/4f/bm2th1p56tz4rq_zffc8g3940000gn/T/ipykernel_85477/3080656655.py", line 1, in <module>
dat.to_parquet("power.parquet", compression="GZIP")
File "/opt/anaconda3/lib/python3.9/site-packages/dask/dataframe/core.py", line 4560, in to_parquet
return to_parquet(self, path, *args, **kwargs)
File "/opt/anaconda3/lib/python3.9/site-packages/dask/dataframe/io/parquet/core.py", line 732, in to_parquet
return compute_as_if_collection(
File "/opt/anaconda3/lib/python3.9/site-packages/dask/base.py", line 315, in compute_as_if_collection
return schedule(dsk2, keys, **kwargs)
File "/opt/anaconda3/lib/python3.9/site-packages/dask/threaded.py", line 79, in get
results = get_async(
File "/opt/anaconda3/lib/python3.9/site-packages/dask/local.py", line 507, in get_async
raise_exception(exc, tb)
File "/opt/anaconda3/lib/python3.9/site-packages/dask/local.py", line 315, in reraise
raise exc
File "/opt/anaconda3/lib/python3.9/site-packages/dask/local.py", line 220, in execute_task
result = _execute_task(task, data)
File "/opt/anaconda3/lib/python3.9/site-packages/dask/core.py", line 119, in _execute_task
return func(*(_execute_task(a, cache) for a in args))
File "/opt/anaconda3/lib/python3.9/site-packages/dask/utils.py", line 35, in apply
return func(*args, **kwargs)
File "/opt/anaconda3/lib/python3.9/site-packages/dask/dataframe/io/parquet/fastparquet.py", line 1167, in write_partition
rg = make_part_file(
File "/opt/anaconda3/lib/python3.9/site-packages/fastparquet/writer.py", line 716, in make_part_file
rg = make_row_group(f, data, schema, compression=compression,
File "/opt/anaconda3/lib/python3.9/site-packages/fastparquet/writer.py", line 701, in make_row_group
chunk = write_column(f, coldata, column,
File "/opt/anaconda3/lib/python3.9/site-packages/fastparquet/writer.py", line 554, in write_column
repetition_data, definition_data, encode[encoding](data, selement), 8 * b'\x00'
File "/opt/anaconda3/lib/python3.9/site-packages/fastparquet/writer.py", line 354, in encode_plain
out = convert(data, se)
File "/opt/anaconda3/lib/python3.9/site-packages/fastparquet/writer.py", line 284, in convert
raise ValueError('Error converting column "%s" to bytes using '
ValueError: Error converting column "Global_reactive_power" to bytes using encoding UTF8. Original error: bad argument type for built-in operation
*
I tried by adding object_coding = "bytes".I want to solve this problem.
I'm trying to set a value on this dropdown but am having trouble doing so. I've tried using the .select(index) method and that doesn't do anything. I've tried doing .typekeys("{DOWN 2}"), which usually works, but I think because another dropdown is selected before hand or something, it's not working. My usual workaround for this is to do .expand() and then .type_keys("{DOWN 2}"), and then .type_keys("{ENTER}"). I can't do this last workaround, because the control is not being wrapped as a combobox, as it should be.
Is there a way I can change it's wrapper? I tried:
from pywinauto import controls
test = controls.uia_controls.ComboBoxWrapper(formJobStock.child_window(auto_id='cboStockSize'))
test.expand()
but I get:
Traceback (most recent call last):
File "C:\Users\mflanagan\AppData\Local\Programs\Python\Python39\lib\site-packages\pywinauto\application.py", line 250, in __resolve_control
ctrl = wait_until_passes(
File "C:\Users\mflanagan\AppData\Local\Programs\Python\Python39\lib\site-packages\pywinauto\timings.py", line 458, in wait_until_passes
raise err
pywinauto.timings.TimeoutError
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "C:\Users\mflanagan\AppData\Local\Programs\Python\Python39\lib\site-packages\pywinauto\controls\uia_controls.py", line 143, in expand
if self.is_expanded():
File "C:\Users\mflanagan\AppData\Local\Programs\Python\Python39\lib\site-packages\pywinauto\controls\uiawrapper.py", line 561, in is_expanded
state = self.get_expand_state()
File "C:\Users\mflanagan\AppData\Local\Programs\Python\Python39\lib\site-packages\pywinauto\controls\uia_controls.py", line 188, in get_expand_state
return super(ComboBoxWrapper, self).get_expand_state()
File "C:\Users\mflanagan\AppData\Local\Programs\Python\Python39\lib\site-packages\pywinauto\controls\uiawrapper.py", line 556, in get_expand_state
return self.iface_expand_collapse.CurrentExpandCollapseState
File "C:\Users\mflanagan\AppData\Local\Programs\Python\Python39\lib\site-packages\pywinauto\controls\uiawrapper.py", line 132, in __get__
value = self.fget(obj)
File "C:\Users\mflanagan\AppData\Local\Programs\Python\Python39\lib\site-packages\pywinauto\controls\uiawrapper.py", line 210, in iface_expand_collapse
return uia_defs.get_elem_interface(elem, "ExpandCollapse")
File "C:\Users\mflanagan\AppData\Local\Programs\Python\Python39\lib\site-packages\pywinauto\uia_defines.py", line 233, in get_elem_interface
cur_ptrn = element_info.GetCurrentPattern(ptrn_id)
File "C:\Users\mflanagan\AppData\Local\Programs\Python\Python39\lib\site-packages\pywinauto\application.py", line 379, in __getattribute__
ctrls = self.__resolve_control(self.criteria)
File "C:\Users\mflanagan\AppData\Local\Programs\Python\Python39\lib\site-packages\pywinauto\application.py", line 261, in __resolve_control
raise e.original_exception
File "C:\Users\mflanagan\AppData\Local\Programs\Python\Python39\lib\site-packages\pywinauto\timings.py", line 436, in wait_until_passes
func_val = func(*args, **kwargs)
File "C:\Users\mflanagan\AppData\Local\Programs\Python\Python39\lib\site-packages\pywinauto\application.py", line 222, in __get_ctrl
ctrl = self.backend.generic_wrapper_class(findwindows.find_element(**ctrl_criteria))
File "C:\Users\mflanagan\AppData\Local\Programs\Python\Python39\lib\site-packages\pywinauto\findwindows.py", line 84, in find_element
elements = find_elements(**kwargs)
File "C:\Users\mflanagan\AppData\Local\Programs\Python\Python39\lib\site-packages\pywinauto\findwindows.py", line 305, in find_elements
elements = findbestmatch.find_best_control_matches(best_match, wrapped_elems)
File "C:\Users\mflanagan\AppData\Local\Programs\Python\Python39\lib\site-packages\pywinauto\findbestmatch.py", line 536, in find_best_control_matches
raise MatchError(items = name_control_map.keys(), tofind = search_text)
pywinauto.findbestmatch.MatchError: Could not find 'element' in 'dict_keys(['Edit', 'Edit0', 'Edit1', 'Edit2', 'Open', 'Button', 'OpenButton'])'
where formJobStock is a window specification:
formJobStock = window.child_window(auto_id='frmJobStock')
and I know it's found because I've done formJobStock.wrapper_object() and it comes up correctly.
It looks like I'm not passing in the element parameter correctly to the controls.controls_uia.ComboBoxWrapper class init call...any idea on how to do that correctly?
I am new to odoo, while generating an invoice an error occurred like Failed to render template %r using values %r. And I have added the full trace of the error below.
Traceback (most recent call last):
File "/opt/odoo13/addons/payment/controllers/portal.py", line 104, in payment_status_poll
tx_to_process._post_process_after_done()
File "/opt/odoo13/addons/payment/models/payment_acquirer.py", line 872, in _post_process_after_done
self._reconcile_after_transaction_done()
File "/opt/odoo13/addons/sale/models/payment.py", line 112, in _reconcile_after_transaction_done
sales_orders._send_order_confirmation_mail()
File "/opt/odoo13/addons/sale/models/sale.py", line 817, in _send_order_confirmation_mail
order.with_context(force_send=True).message_post_with_template(template_id, composition_mode='comment', email_layout_xmlid="mail.mail_notification_paynow")
File "/opt/odoo13/addons/mass_mailing/models/mail_thread.py", line 37, in message_post_with_template
return super(MailThread, no_massmail).message_post_with_template(template_id, **kwargs)
File "/opt/odoo13/addons/mail/models/mail_thread.py", line 1998, in message_post_with_template
update_values = composer.onchange_template_id(template_id, kwargs['composition_mode'], self._name, res_id)['value']
File "/opt/odoo13/addons/mail/wizard/mail_compose_message.py", line 405, in onchange_template_id
values = self.generate_email_for_composer(template_id, [res_id])[res_id]
File "/opt/odoo13/addons/mail/wizard/mail_compose_message.py", line 538, in generate_email_for_composer
template_values = self.env['mail.template'].with_context(tpl_partners_only=True).browse(template_id).generate_email(res_ids, fields=fields)
File "/opt/odoo13/addons/mail/models/mail_template.py", line 401, in generate_email
generated_field_values = Template._render_template(
File "/opt/odoo13/addons/mail/models/mail_template.py", line 290, in _render_template
raise UserError(_("Failed to render template %r using values %r") % (template, variables)
Unique index constraint in mongoengine throws this exception when I'm trying to save an object with the same id:
|2018-06-28 14:23:28.084| [DEBUG] (rec.exchange.base|26): Processing exchange data
Traceback (most recent call last):
File "/home/nst/virtualenvs/myenv/lib/python3.6/site-packages/mongoengine/document.py", line 372, in save
object_id = self._save_create(doc, force_insert, write_concern)
File "/home/nst/virtualenvs/myenv/lib/python3.6/site-packages/mongoengine/document.py", line 428, in _save_create
object_id = collection.save(doc, **write_concern)
File "/home/nst/virtualenvs/myenv/lib/python3.6/site-packages/pymongo/collection.py", line 2917, in save
to_save, True, check_keys, manipulate, write_concern)
File "/home/nst/virtualenvs/myenv/lib/python3.6/site-packages/pymongo/collection.py", line 599, in _insert
bypass_doc_val, session)
File "/home/nst/virtualenvs/myenv/lib/python3.6/site-packages/pymongo/collection.py", line 580, in _insert_one
_check_write_command_response(result)
File "/home/nst/virtualenvs/myenv/lib/python3.6/site-packages/pymongo/helpers.py", line 207, in _check_write_command_response
_raise_last_write_error(write_errors)
File "/home/nst/virtualenvs/myenv/lib/python3.6/site-packages/pymongo/helpers.py", line 188, in _raise_last_write_error
raise DuplicateKeyError(error.get("errmsg"), 11000, error)
pymongo.errors.DuplicateKeyError: E11000 duplicate key error collection: TRX.exchanges index: exchange_1_order.id_1 dup key: { : "BFNX", : "1234" }
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "run.py", line 5, in <module>
application.run()
File "/home/nst/Dev/rec/rec/trx_app.py", line 118, in run
self.run_scheduler_app()
File "/home/nst/Dev/rec/rec/trx_app.py", line 106, in run_scheduler_app
self.run_jobs_hook_without_scheduler()
File "/home/nst/Dev/rec/rec/trx_app.py", line 92, in run_jobs_hook_without_scheduler
get_exchange_job(order)
File "/home/nst/Dev/rec/rec/scheduler/job_hooks.py", line 39, in get_exchange_job
ExchangeJob(job_cfg).run()
File "/home/nst/Dev/rec/rec/exchange/job.py", line 22, in run
exchange_data = self.data_source.get_exchange_data()
File "/home/nst/Dev/rec/rec/exchange/base.py", line 15, in get_exchange_data
return self._map_data(self._read_data())
File "/home/nst/Dev/rec/rec/exchange/base.py", line 31, in _read_data
parser = self._build_data_parser()
File "/home/nst/Dev/rec/rec/exchange/impl/bitfinex.py", line 113, in _build_data_parser
ed1.save()
File "/home/nst/virtualenvs/myenv/lib/python3.6/site-packages/mongoengine/document.py", line 395, in save
raise NotUniqueError(message % six.text_type(err))
mongoengine.errors.NotUniqueError: Tried to save duplicate unique keys (E11000 duplicate key error collection: TRX.exchanges index: exchange_1_order.id_1 dup key: { : "BFNX", : "1234" })
I'm trying to write a unit test for this case:
from mongoengine.errors import NotUniqueError
def test_duplicate_exchange_data(self):
order1 = ExchangeOrder(id='1234', currency_pair='tBTGUSD',
datetime=datetime.datetime.today(),
amount=1500, price=57)
exchange_data1 = ExchangeData(exchange='BFNX', order=order1)
exchange_data1.save()
order2 = ExchangeOrder(id='1234', currency_pair='tBTCUSD',
datetime=datetime.datetime.today(),
amount=500, price=57.5)
exchange_data2 = ExchangeData(exchange='BFNX', order=order2)
self.assertRaises(NotUniqueError, exchange_data2.save)
but assertRaises() is not working because of the second exception. How can I handle this in test?