I am trying to do a SOAP call through Zeep. The WSDL does not contain the header definitions but the SOAP server expects the following header:
<soapenv:Header>
<ns2:UsernameToken xmlns:ns2="http://siebel.com/webservices">ORNODINTFC_WW#ORACLE.COM</ns2:UsernameToken>
</soapenv:Header>
This is the way I am doing this as per the zeep documentation:
header = xsd.Element(
'{http://siebel.com/webservices}UsernameToken',
xsd.AnySimpleType()
)
header_val = header('ORNODINTFC_WW#ORACLE.COM')
client.create_message(client.service, 'process', _soapheaders=[header_val],payload=msg, Mode='ODRFCQUERY', UserName='ORNODINTFC_WW#ORACLE.COM', Password='0r10nTkn')
However, I am getting the following error for create_message
Traceback (most recent call last):
File "<pyshell#119>", line 1, in <module>
xml_msg = client.create_message(client.service, 'process', _soapheaders=[header_val],payload=msg, Mode='ODRFCQUERY', UserName='ORNODINTFC_WW#ORACLE.COM', Password='0r10nTkn')
File "C:\Users\shubgang\AppData\Roaming\Python\Python36\site-packages\zeep\client.py", line 131, in create_message
operation_name, args, kwargs, client=self)
File "C:\Users\shubgang\AppData\Roaming\Python\Python36\site-packages\zeep\wsdl\bindings\soap.py", line 68, in _create
serialized = operation_obj.create(*args, **kwargs)
File "C:\Users\shubgang\AppData\Roaming\Python\Python36\site-packages\zeep\wsdl\definitions.py", line 200, in create
return self.input.serialize(*args, **kwargs)
File "C:\Users\shubgang\AppData\Roaming\Python\Python36\site-packages\zeep\wsdl\messages\soap.py", line 59, in serialize
header = self._serialize_header(headers_value, nsmap)
File "C:\Users\shubgang\AppData\Roaming\Python\Python36\site-packages\zeep\wsdl\messages\soap.py", line 327, in _serialize_header
raise ValueError("Invalid value given to _soapheaders")
ValueError: Invalid value given to _soapheaders
Please suggest how to fix this
Related
My locust file has put requests but sometimes it's passing sometimes failing. Can anyone explain why that's happening?
Here is my locust file
def _generate_put_data(self) -> str:
if len(self.ids) > 0:
teacher_name = self.generate_random_string()
teacher_email = f"{self.generate_random_string()}#{self.generate_random_string()}.{self.generate_random_string()}"
teacher_email = teacher_email.replace("#", "%40")
teacher_id = str(random.choice(self.ids))
request_string = f"{self.path_all}/{teacher_id}?teacherName={teacher_name}&teacherEmail={teacher_email}"
return request_string
#task(2)
def put_request(self):
self.client.put(url=self._generate_put_data())
...
_generate_put_data method returns a string, which is a query.
Here is the error:
[2023-01-26 11:39:12,357] pop-os/ERROR/locust.user.task: expected string or bytes-like object
Traceback (most recent call last):
File "/home/XXX/.local/lib/python3.10/site-packages/locust/user/task.py", line 347, in run
self.execute_next_task()
File "/home/XXX/.local/lib/python3.10/site-packages/locust/user/task.py", line 372, in execute_next_task
self.execute_task(self._task_queue.pop(0))
File "/home/XXX/.local/lib/python3.10/site-packages/locust/user/task.py", line 493, in execute_task
task(self.user)
File "/home/XXX/Desktop/my-projects/spring-boot-app/performans-testing/locust.py", line 44, in put_request
self.client.put(url=self._generate_put_data())
File "/home/XXX/.local/lib/python3.10/site-packages/requests/sessions.py", line 647, in put
return self.request("PUT", url, data=data, **kwargs)
File "/home/XXX/.local/lib/python3.10/site-packages/locust/clients.py", line 131, in request
url = self._build_url(url)
File "/home/XXX/.local/lib/python3.10/site-packages/locust/clients.py", line 81, in _build_url
if absolute_http_url_regexp.match(path):
TypeError: expected string or bytes-like object
I want to save the data set as a parquet file, called power.parquet, and I use df.to_parquet(<filename>). But it gives me this errer "ValueError: Error converting column "Global_reactive_power" to bytes using encoding UTF8. Original error: bad argument type for built-in operation" And I installed the fastparquet package.
from fastparquet import write, ParquetFile
dat.to_parquet("power.parquet")
df_parquet = ParquetFile("power.parquet").to_pandas()
df_parquet.head() # Test your final value
`*Traceback (most recent call last):
File "/opt/anaconda3/lib/python3.9/site-packages/fastparquet/writer.py", line 259, in convert
out = array_encode_utf8(data)
File "fastparquet/speedups.pyx", line 50, in fastparquet.speedups.array_encode_utf8
TypeError: bad argument type for built-in operation
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/var/folders/4f/bm2th1p56tz4rq_zffc8g3940000gn/T/ipykernel_85477/3080656655.py", line 1, in <module>
dat.to_parquet("power.parquet", compression="GZIP")
File "/opt/anaconda3/lib/python3.9/site-packages/dask/dataframe/core.py", line 4560, in to_parquet
return to_parquet(self, path, *args, **kwargs)
File "/opt/anaconda3/lib/python3.9/site-packages/dask/dataframe/io/parquet/core.py", line 732, in to_parquet
return compute_as_if_collection(
File "/opt/anaconda3/lib/python3.9/site-packages/dask/base.py", line 315, in compute_as_if_collection
return schedule(dsk2, keys, **kwargs)
File "/opt/anaconda3/lib/python3.9/site-packages/dask/threaded.py", line 79, in get
results = get_async(
File "/opt/anaconda3/lib/python3.9/site-packages/dask/local.py", line 507, in get_async
raise_exception(exc, tb)
File "/opt/anaconda3/lib/python3.9/site-packages/dask/local.py", line 315, in reraise
raise exc
File "/opt/anaconda3/lib/python3.9/site-packages/dask/local.py", line 220, in execute_task
result = _execute_task(task, data)
File "/opt/anaconda3/lib/python3.9/site-packages/dask/core.py", line 119, in _execute_task
return func(*(_execute_task(a, cache) for a in args))
File "/opt/anaconda3/lib/python3.9/site-packages/dask/utils.py", line 35, in apply
return func(*args, **kwargs)
File "/opt/anaconda3/lib/python3.9/site-packages/dask/dataframe/io/parquet/fastparquet.py", line 1167, in write_partition
rg = make_part_file(
File "/opt/anaconda3/lib/python3.9/site-packages/fastparquet/writer.py", line 716, in make_part_file
rg = make_row_group(f, data, schema, compression=compression,
File "/opt/anaconda3/lib/python3.9/site-packages/fastparquet/writer.py", line 701, in make_row_group
chunk = write_column(f, coldata, column,
File "/opt/anaconda3/lib/python3.9/site-packages/fastparquet/writer.py", line 554, in write_column
repetition_data, definition_data, encode[encoding](data, selement), 8 * b'\x00'
File "/opt/anaconda3/lib/python3.9/site-packages/fastparquet/writer.py", line 354, in encode_plain
out = convert(data, se)
File "/opt/anaconda3/lib/python3.9/site-packages/fastparquet/writer.py", line 284, in convert
raise ValueError('Error converting column "%s" to bytes using '
ValueError: Error converting column "Global_reactive_power" to bytes using encoding UTF8. Original error: bad argument type for built-in operation
*
I tried by adding object_coding = "bytes".I want to solve this problem.
I'm trying to retrieve some data and mail it out from within a Google Cloud Function using SendGrid
I tried convert the data as a list of dictionaries (with the same, flat, structure) to CSV as detailed here but this fails as the filesystem is read only.
To counter this, I use io.StringIO() to store the CSV in-memory.
However, I get the following error/stack trace during execution:
Traceback (most recent call last):
File "/env/local/lib/python3.7/site-packages/google/cloud/functions/worker.py",
line 383, in run_background_function_function_handler.invoke_user_function(event_object)
File "/env/local/lib/python3.7/site-packages/google/cloud/functions/worker.py",
line 217, in invoke_user_function return call_user_function(request_or_event)
File "/env/local/lib/python3.7/site-packages/google/cloud/functions/worker.py",
line 214, in call_user_function event_context.Context(**request_or_event.context))
File "/user_code/main.py", line 267, in session_updated summarize_session(sessionid, doctorid) File "/user_code/main.py", line 375, in summarize_session send_email([docemail], data)
File "/user_code/main.py", line 328, in send_email response = sg.send(message)
File "/env/local/lib/python3.7/site-packages/sendgrid/sendgrid.py",
line 98, in send response = self.client.mail.send.post(request_body=message.get())
File "/env/local/lib/python3.7/site-packages/python_http_client/client.py",
line 251, in http_request data = json.dumps(request_body).encode('utf-8')
File "/opt/python3.7/lib/python3.7/json/__init__.py",
line 231, in dumps return _default_encoder.encode(obj)
File "/opt/python3.7/lib/python3.7/json/encoder.py",
line 199, in encode chunks = self.iterencode(o, _one_shot=True)
File "/opt/python3.7/lib/python3.7/json/encoder.py",
line 257, in iterencode return _iterencode(o, 0)
File "/opt/python3.7/lib/python3.7/json/encoder.py",
line 179, in default raise TypeError(f'Object of type {o.__class__.__name__} '
TypeError: Object of type StringIO is not JSON serializable
The code is as follows:
def send_email(to_emails, datadict):
message = Mail(
from_email='info#domain.com',
#to_emails= to_emails,
to_emails= ['dude#domain.com'],
subject='Summary of your session',
html_content='<strong>Data Summary</strong>\
<p>This email is a summary of your session. Please check the attachment for details. </p>')
sg = SendGridAPIClient(os.environ.get('SENDGRID_API_KEY'))
keys = datadict[0].keys()
try:
output_file = io.StringIO()
#output_file = io.BytesIO() # results in a typeerror
# https://stackoverflow.com/questions/34283178/typeerror-a-bytes-like-object-is-required-not-str-in-python-and-csv
dict_writer = csv.DictWriter(output_file, keys)
dict_writer.writeheader()
dict_writer.writerows(datadict)
print("Attaching")
message.attachment = [
Attachment(FileContent(output_file),
FileType('text/csv'),
FileName('sessiondata.csv'),
Disposition('inline'),
ContentId('SessionData')),
]
except Exception as e:
print("Exception:")
print(e)
return
response = sg.send(message)
How do I convert the list of dictionaries to a CSV and attach it to an email without opening a physical file on the filesystem?
I have these "json" files that I like to insert into my mongodb database.
An example of one is:
http://s.live.ksmobile.net/cheetahlive/de/ff/15201023827214369775/15201023827214369775.json
The problem is, that it is formated like this:
{ "channelType":"TEMPGROUP", ... } # line 1
{ "channelType":"TEMPGROUP", ... } # line 2
So instead of inserting it as 1 document in the DB, it insert every single line as 1 entry. That ends up with what should be 3 documents from 3 "json" files in the database become 1189 documents in the database instead.
How can I insert the whole content of the ".json" into one document?
My code is:
replay_url = "http://live.ksmobile.net/live/getreplayvideos?"
userid = 969730808384462848
url2 = replay_url + urllib.parse.urlencode({'userid': userid}) + '&page_size=1000'
raw_replay_data = requests.get(url2).json()
for i in raw_replay_data['data']['video_info']:
url3 = i['msgfile']
raw_message_data = urllib.request.urlopen(url3)
for line in raw_message_data:
json_data = json.loads(line)
messages.insert_one(json_data)
print(json_data)
Update to give more information to answer
messages.insert(json_data) gives this error:
Traceback (most recent call last):
File "/media/anon/06bcf743-8b4d-409f-addc-520fc4e19299/PycharmProjects/LiveMe/venv1/lib/python3.6/site-packages/pymongo/collection.py", line 633, in _insert
blk.execute(concern, session=session)
File "/media/anon/06bcf743-8b4d-409f-addc-520fc4e19299/PycharmProjects/LiveMe/venv1/lib/python3.6/site-packages/pymongo/bulk.py", line 432, in execute
return self.execute_command(generator, write_concern, session)
File "/media/anon/06bcf743-8b4d-409f-addc-520fc4e19299/PycharmProjects/LiveMe/venv1/lib/python3.6/site-packages/pymongo/bulk.py", line 329, in execute_command
raise BulkWriteError(full_result)
pymongo.errors.BulkWriteError: batch op errors occurred
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/media/anon/06bcf743-8b4d-409f-addc-520fc4e19299/PycharmProjects/LiveMe/import_messages_dev.py", line 43, in <module>
messages.insert(json_data)
File "/media/anon/06bcf743-8b4d-409f-addc-520fc4e19299/PycharmProjects/LiveMe/venv1/lib/python3.6/site-packages/pymongo/collection.py", line 2941, in insert
check_keys, manipulate, write_concern)
File "/media/anon/06bcf743-8b4d-409f-addc-520fc4e19299/PycharmProjects/LiveMe/venv1/lib/python3.6/site-packages/pymongo/collection.py", line 635, in _insert
_raise_last_error(bwe.details)
File "/media/anon/06bcf743-8b4d-409f-addc-520fc4e19299/PycharmProjects/LiveMe/venv1/lib/python3.6/site-packages/pymongo/helpers.py", line 220, in _raise_last_error
_raise_last_write_error(write_errors)
File "/media/anon/06bcf743-8b4d-409f-addc-520fc4e19299/PycharmProjects/LiveMe/venv1/lib/python3.6/site-packages/pymongo/helpers.py", line 188, in _raise_last_write_error
raise DuplicateKeyError(error.get("errmsg"), 11000, error)
pymongo.errors.DuplicateKeyError: E11000 duplicate key error index: liveme.messages.$_id_ dup key: { : ObjectId('5aa2fc6f5d60126499060949') }
messages.insert_one(json_data) gives me this error:
Traceback (most recent call last):
File "/media/anon/06bcf743-8b4d-409f-addc-520fc4e19299/PycharmProjects/LiveMe/import_messages_dev.py", line 43, in <module>
messages.insert_one(json_data)
File "/media/anon/06bcf743-8b4d-409f-addc-520fc4e19299/PycharmProjects/LiveMe/venv1/lib/python3.6/site-packages/pymongo/collection.py", line 676, in insert_one
common.validate_is_document_type("document", document)
File "/media/anon/06bcf743-8b4d-409f-addc-520fc4e19299/PycharmProjects/LiveMe/venv1/lib/python3.6/site-packages/pymongo/common.py", line 434, in validate_is_document_type
"collections.MutableMapping" % (option,))
TypeError: document must be an instance of dict, bson.son.SON, bson.raw_bson.RawBSONDocument, or a type that inherits from collections.MutableMapping
messages.insert_many(json_data) gives me this error:
Traceback (most recent call last):
File "/media/anon/06bcf743-8b4d-409f-addc-520fc4e19299/PycharmProjects/LiveMe/import_messages_dev.py", line 43, in <module>
messages.insert_many(json_data)
File "/media/anon/06bcf743-8b4d-409f-addc-520fc4e19299/PycharmProjects/LiveMe/venv1/lib/python3.6/site-packages/pymongo/collection.py", line 742, in insert_many
blk.execute(self.write_concern.document, session=session)
File "/media/anon/06bcf743-8b4d-409f-addc-520fc4e19299/PycharmProjects/LiveMe/venv1/lib/python3.6/site-packages/pymongo/bulk.py", line 432, in execute
return self.execute_command(generator, write_concern, session)
File "/media/anon/06bcf743-8b4d-409f-addc-520fc4e19299/PycharmProjects/LiveMe/venv1/lib/python3.6/site-packages/pymongo/bulk.py", line 329, in execute_command
raise BulkWriteError(full_result)
pymongo.errors.BulkWriteError: batch op errors occurred
messages.insert and messages.insert_many both insert 1 line and throw the error.
These files obviously do not contain properly formatted json - rather they contain a separate object on each line.
To turn them into valid json, you probably want a list of objects, i.e.:
[{ "channelType":"TEMPGROUP", ... },
{ "channelType":"TEMPGROUP", ... }]
You can achieve this by doing:
for i in raw_replay_data['data']['video_info']:
url3 = i['msgfile']
raw_message_data = urllib.request.urlopen(url3)
json_data = []
for line in raw_message_data:
json_data.append(json.loads(line))
messages.insert_one(json_data)
print(json_data)
I have WSDL service which provides several links like
http://localhost:8081/View1&Wsdl
...
http://localhost:8081/ViewN&Wsdl
But the problem is that I can't work with multiple WSDL links using zeep library (2.5.0)
My code:
import zeep
view1_wsdl = 'http://localhost:8081/View1&Wsdl'
client1 = zeep.Client(wsdl=view1_wsdl)
result1 = client1.service.method1()
print(result1)
view2_wsdl = 'http://localhost:8081/View2&Wsdl'
client2 = zeep.Client(wsdl=view2_wsdl)
result2 = client2.service.method2()
print(result2)
I get correct result1, but there's error with result2:
Traceback (most recent call last):
File "C:/.../auto_tests/wsdl_check.py", line 17, in <module>
result2 = client2.service.method2
File "C:\...\lib\site-packages\zeep\client.py", line 45, in __call__
self._op_name, args, kwargs)
File "C:\...\lib\site-packages\zeep\wsdl\bindings\soap.py", line 110, in send
options=options)
File "C:\...\lib\site-packages\zeep\wsdl\bindings\soap.py", line 68, in _create
serialized = operation_obj.create(*args, **kwargs)
File "C:\...\lib\site-packages\zeep\wsdl\definitions.py", line 197, in create
return self.input.serialize(*args, **kwargs)
File "C:\...\lib\site-packages\zeep\wsdl\messages\soap.py", line 63, in serialize
body_value = self.body(*args, **kwargs)
File "C:\...\lib\site-packages\zeep\xsd\elements\element.py", line 48, in __call__
instance = self.type(*args, **kwargs)
File "C:\...\lib\site-packages\zeep\xsd\types\complex.py", line 42, in __call__
return self._value_class(*args, **kwargs)
File "C:\...\lib\site-packages\zeep\xsd\valueobjects.py", line 90, in __init__
items = _process_signature(self._xsd_type, args, kwargs)
File "C:\...\lib\site-packages\zeep\xsd\valueobjects.py", line 194, in _process_signature
len(result), num_args))
TypeError: __init__() takes at most 0 positional arguments (1 given)
How can I deal with it?
The problem was in WSDL service, not in zeep lib.