django StreamingHttpResponse and apache wsgi - not working - python

I have the following streamed response:
def reportgen_iterator(request, object_id):
output_format = request.GET.get('output', 'pdf')
response_data = {
'progress': 'Retrieving data...',
'error': False,
'err_msg': None
}
yield json.dumps(response_data)
try:
vendor_id, dr_datasets = get_dr_datasets(
object_id=object_id, ws_user=settings.WS_USER,
include_vendor_id=True, request=request
)
except Exception as e:
response_data.update({
'error': True,
'err_msg': "Unable to retrieve data for report generation. Exception message: {}".format(e.message)
})
yield "{}{}".format(DELIMITER, json.dumps(response_data))
time.sleep(BEFORE_STOP_ITERATION_SLEEP_SECS)
raise StopIteration
# data retrieved correctly, continue
response_data['progress'] = 'Data retrieved.'
yield "{}{}".format(DELIMITER, json.dumps(response_data))
domain = settings.DR['API_DOMAIN']
dr_id, delivery_opts = get_dr_ids(vendor_id=vendor_id)
delivery_option_id = delivery_opts.get(output_format)
run_idle_time = REST_RUN_IDLE_TIME_MS / 1000 or 1
headers = settings.DR['AUTHORIZATION_HEADER']
headers.update({
'Content-Type': 'application/json', 'deliveryOptionId': delivery_option_id
})
# POST request
response_data['progress'] ='Generating document...'
yield "{}{}".format(DELIMITER, json.dumps(response_data))
post_url = 'https://{domain}{rel_url}/'.format(
domain=domain,
rel_url=settings.DR['API_ENDPOINTS']['create'](ddp_id)
)
header_img, footer_img = get_images_for_template(vendor_id=vendor_id, request=None)
images = {
'HeaderImg': header_img,
'FooterImg': footer_img
}
data = OrderedDict(
[('deliveryOptionId', delivery_option_id),
('clientId', 'MyClient'),
('data', dr_datasets),
('images', images)]
)
payload = json.dumps(data, indent=4).encode(ENCODING)
req = requests.Request('POST', url=post_url, headers=headers, data=payload)
prepared_request = req.prepare()
session = requests.Session()
post_response = session.send(prepared_request)
if post_response.status_code != 200:
response_data.update({
'error': True,
'err_msg': "Error: post response status code != 200, exit."
})
yield "{}{}".format(DELIMITER, json.dumps(response_data))
time.sleep(BEFORE_STOP_ITERATION_SLEEP_SECS)
raise StopIteration
# Post response successful, continue.
# RUN URL - periodic check
post_response_dict = post_response.json()
run_url = 'https://{domain}/{path}'.format(
domain=domain,
path=post_response_dict.get('links', {}).get('self', {}).get('href'),
headers=headers
)
run_id = post_response_dict.get('runId', '')
status = 'Running'
attempt_counter = 0
file_url = '{url}/files/'.format(url=run_url)
while status == 'Running':
attempt_counter += 1
run_response = requests.get(url=run_url, headers=headers)
runs_data = run_response.json()
status = runs_data['status']
message = runs_data['message']
progress = runs_data['progress']
response_data['progress'] = '{} - {}%'.format(status, int(progress * 100))
yield "{}{}".format(DELIMITER, json.dumps(response_data))
if status == 'Error':
msg = '{sc} - run_id: {run_id} - error_id: [{error_id}]: {message}'.format(
sc=run_response.status_code, run_id=run_id,
error_id=runs_data.get('errorId', 'N/A'), message=message
)
response_data.update({
'error': True,
'err_msg': msg
})
yield "{}{}".format(DELIMITER, json.dumps(response_data))
time.sleep(BEFORE_STOP_ITERATION_SLEEP_SECS)
raise StopIteration
if status == 'Complete':
break
if attempt_counter >= ATTEMPTS_LIMIT:
msg = 'File failed to generate after {att_limit} retrieve attempts: ' \
'({progress}% progress) - {message}'.format(
att_limit=ATTEMPTS_LIMIT,
progress=int(progress * 100),
message=message
)
response_data.update({
'error': True,
'err_msg': msg
})
yield "{}{}".format(DELIMITER, json.dumps(response_data))
time.sleep(BEFORE_STOP_ITERATION_SLEEP_SECS)
raise StopIteration
time.sleep(run_idle_time)
# GET GENERATED FILE
file_url_response = requests.get(
url=file_url,
headers=headers,
params={'userId': settings.DR_CREDS['userId']},
stream=True,
)
if file_url_response.status_code != 200:
response_data.update({
'error': True,
'err_msg': 'error in retrieving file\nurl: {url}\n'.format(url=file_url)
})
yield "{}{}".format(DELIMITER, json.dumps(response_data))
time.sleep(BEFORE_STOP_ITERATION_SLEEP_SECS)
raise StopIteration
file_url_dict = file_url_response.json()
retrieve_file_rel_url = file_url_dict['files'][0]['links']['file']['href']
file_ext = DELIVERY_MAPPING.get(output_format, 'pdf')
response_data.update({
'progress': 'Generated.',
'doc_url': retrieve_file_rel_url,
'dest_file_ext': file_ext
})
yield "{}{}".format(DELIMITER, json.dumps(response_data))
class FullDownloadRosterStreamingView(View):
def get(self, request, object_id):
"""
"""
stream = reportgen_iterator(request, object_id)
try:
response = StreamingHttpResponse(
streaming_content=stream, status=200,
content_type='application/octet-stream'
)
response['Cache-Control'] = 'no-cache'
return response
except Exception as e:
return HttpResponseServerError(e.message)
def get_file(request):
domain = settings.DR['API_DOMAIN']
retrieve_file_rel_url = request.GET.get('doc_url')
file_ext = request.GET.get('file_ext')
retrieve_file_response = requests.get(
url='https://{domain}/{path}'.format(
domain=domain,
path=retrieve_file_rel_url
),
headers=settings.DR['AUTHORIZATION_HEADER'],
params={'userId': settings.DR_CREDS['userId']},
stream=True,
)
if retrieve_file_response.status_code != 200:
return HttpResponseServerError(
"Error while downloading file"
)
response = HttpResponse(content_type=CONTENT_TYPE_MAPPING.get(file_ext, 'pdf'))
response['Content-Disposition'] = (
'attachment; filename="my_doc.{}"'.format(file_ext)
)
response.write(retrieve_file_response.content)
return response
handled client side by this js code:
function getStreamedResponse(lo_id, output){
var xhr = new XMLHttpRequest(),
method = 'GET';
xhr.overrideMimeType("application/octet-stream");
var url = window.amphr.baseUrl + '/dl/stream/' + lo_id + '/?output=' + output;
url += "&" + (new Date()).getTime(); // added a timestamp to prevent xhr requests caching
this.rspObj = null;
xhr.onprogress = function (evt) {
var _this = evt.currentTarget;
if (_this.responseText.length == 0) return;
var delimiter = '|';
var responseTextChunks = _this.responseText.split(delimiter);
if (responseTextChunks.length == 0) return;
_this.rspObj = JSON.parse(responseTextChunks.slice(-1)[0]);
if (_this.rspObj.error === true) {
_this.abort(evt);
}
updateProgressMessage(_this.rspObj.progress);
};
xhr.onload = function (evt) {
toggleProgress(false);
var _this = evt.currentTarget;
var uri = window.amphr.baseUrl + "/dl/get_file/?doc_url=" + _this.rspObj.doc_url +"&file_ext=" + _this.rspObj.dest_file_ext;
getFile(uri);
};
xhr.onerror = function (evt) {
var _this = evt.currentTarget;
toggleProgress(false);
};
xhr.onabort = function (evt) {
toggleProgress(false);
var _this = evt.currentTarget;
setTimeout(function(){
if (window.confirm("Error while generating document.\nDownload original?")) {
getFile(window.amphr.originalDownloadUrl);
}}, 100);
};
var getFile = function (uri) {
var link = document.createElement("a");
link.href = uri;
document.body.appendChild(link);
link.click();
document.body.removeChild(link);
delete link;
};
xhr.open(method, url, true);
xhr.send();
}
function toggleProgress(show) {
//toggle overlay/spinner/progress message
var display = (show === true) ? 'block' : 'none';
var overlayDiv = document.getElementsByClassName('overlay')[0];
if (show === true) overlayDiv.style.display = display;
overlayDiv.style.display = display;
var loaderDiv = document.getElementsByClassName('loader')[0];
var msgDiv = document.getElementById('progress-msg');
loaderDiv.style.display = display;
msgDiv.style.display = display;
if (show === false) {
overlayDiv.style.display = display;
msgDiv.innerHTML = "";
}
}
function updateProgressMessage(msg) {
var msgDiv = document.getElementById('progress-msg');
msgDiv.innerHTML = msg;
it works fine locally using the development server (runserver or runserver_plus), the response text comes in chunks.
However, on the dev environment (Apache/wsgi_module with HTTPS), the response is returned entirely at the end, not chuncked.
Any hints about why this is happening?
thanks

Related

i want to use list in args of celery beat

i have list and in list i have 10 city i want to use all of my city to the celery beat but i cant
cityList = ["Tehran", "Shiraz", "Mashhad", "Qom", "Isfahan", "Ardabil", "Hamedan", "Yazd", "Tabriz", "Zavareh"]
app.conf.beat_schedule = {
'call_show_every_one_minute': {
"task": 'ali111.get_weather',
'schedule': crontab(minute='*/1'),
'args': ([cityList], ),
}
}
#app.task()
def get_weather(city):
con = redis.StrictRedis(host='localhost',port=6380, db=0, decode_responses=True)
appid = "b3bf68fdfc6ba46923cd50cb8b9a79c3"
URL = 'https://api.openweathermap.org/data/2.5/weather'
temp = con.get(city)
if temp is not None:
return temp
try:
PARAMS = {'q' :city, 'appid' :appid}
r = requests.get(url=URL, params=PARAMS)
city_temp = (r.json()['main']['temp']) - 273.15
my_temp = f"{round(city_temp,1)}c"
con.set(city, my_temp, ex=60)
return my_temp
except ConnectionError:
return "not internet connection"

How to pass variable from script 1 to script 2 in python

I need help please.
I have 2 scripts. The first script consumes from RabbitMQ and I need to send the body received to a variable in script 2.
However, the variable remains empty. I think that script 1 maybe is calling script 2 before the value is received from RabbitMQ?
How can I achieve this? Thanks
script 1
import pika
import time
from script2 import strQueue
class ReceiveFromMQ(object):
def __init__(self):
credentials = pika.PlainCredentials('xxxx', 'xxxx')
parameters = pika.ConnectionParameters('xxxx', xxx, 'xxx',
credentials)
self.connection = pika.BlockingConnection(parameters)
self.channel = self.connection.channel()
self.channel.basic_qos(prefetch_count=1)
self.channel.basic_consume(
queue='queue',
on_message_callback=self.on_response,
auto_ack=True)
self.response = None
self.channel.start_consuming()
def on_response(self, ch, method, props, body):
self.response = body.decode()
strQueue = body.decode()
print(" [x] Received %r" % body.decode())
# getMsg(body.decode())
time.sleep(body.count(b'.'))
print(" [x] Done")
print(' [*] Waiting for messages. To exit press CTRL+C')
return self.response
def call(self):
self.response = None
self.connection.process_data_events(time_limit=None)
print(str(self.response))
return str(self.response)
receive_mq = ReceiveFromMQ()
response = receive_mq.call()
print(response)
script 2
import requests
import json
strQueue = None
# Function Authenticate
def httpAuthenticate (in_apiusers, in_apipass, in_Tenant, in_URL):
try:
print('retrieve token...')
url = in_URL
payload = json.dumps({
"password": str(in_apipass),
"usernameOrEmailAddress": str(in_apiusers),
"tenancyName": str(in_Tenant)
})
headers = {
'Content-Type': 'application/json'
}
response = requests.request("POST", url, headers=headers, data=payload)
json_object = json.loads(response.text)
print('token code: ' + str(response.status_code))
return str(json_object["result"])
except Exception as e:
return 'Fail:'
# Function:Add Queue Item on Uipath Orchestrator
def httpAddQueueItems(in_URL, in_Token, in_QueueName, in_strjson):
try:
print('add queue item...')
url = in_URL
payload = json.dumps({
"itemData": {
"Priority": "Normal",
"Name": str(in_QueueName),
"SpecificContent": {
"in_pjsorequest": in_strpjson
},
"Reference": "ggg"
}
})
headers = {
'X-UIPATH-OrganizationUnitId': '',
'Content-Type': 'application/json',
'Authorization': 'Bearer ' + in_Token
}
response = requests.request("POST", url, headers=headers, data=payload)
except Exception as e:
print(e)
return 'Fail'
# CONSTANTS
OnPremuser = "xxxx"
OnPrempass = "xxx!"
OnPremtenant = "Default"
OnPremUrlAuth = "xxxx"
OnPremUrlAddQueue = "https://xxxx"
OnPremQueue = "JSON"
OnPremPJSON = strQueue
OnPremtoken = httpAuthenticate(OnPremuser, OnPrempass, OnPremtenant, OnPremUrlAuth)
httpAddQueueItems(OnPremUrlAddQueue, OnPremtoken, OnPremQueue, OnPremJSON)
What you are trying to achieve is not possible in this way since you are
trying to access a shared variable (Race Condition).
Moreover, only one bytecode instruction can be executed at a time, mean to
say, only one CPU bound task can be run at a time.
P.S:- It can be achieved by running a consumer for the RabbitMQ producer and then assign the json received to a variable.

Python script fails to call Jira REST API

i have a python script which makes a call to Jira API for creating a Jira issue.
The API Call istself works fine, if i test it with Postman i am able to create tickets however the same URL does not work using Python.I do not not understand why i am not able to create a secure connection to jira
Executed [createIssue] action via OEC[integrationId: f457bfd9-5fe0-4fc5-89a9-ee007e85cf1b integrationType: Jira] with errors. Reason: Err: exit status 1, Stderr: Traceback (most recent call last): File
"/home/opsgenie/oec_test/scripts/actionExecutor.py", line 279, in <module> main() File "/home/opsgenie/oec_test/scripts/actionExecutor.py", line 233, in main timeout=timeout) File
"/usr/lib/python2.7/dist-packages/requests/api.py", line 112, in post return request('post', url, data=data, json=json, **kwargs) File "/usr/lib/python2.7/dist-packages/requests/api.py", line 58, in request return session.request(method=method, url=url, **kwargs) File "/usr/lib/python2.7/dist-packages/requests/sessions.py", line 520, in
request resp = self.send(prep, **send_kwargs) File "/usr/lib/python2.7/dist-packages/requests/sessions.py", line 630, in send r = adapter.send(request, **kwargs) File
"/usr/lib/python2.7/dist-packages/requests/adapters.py", line 508, in send raise
ConnectionError(e, request=request) requests.exceptions.ConnectionError: HTTPSConnectionPool(host='jiratest.gk.gk-software.com', port=443): Max retries exceeded with url: /rest/api/2/issue
(Caused by NewConnectionError('<urllib3.connection.VerifiedHTTPSConnection object at 0x7ffa264aa1d0>: Failed to establish a new connection: [Errno -2] Name or service not known',))
The complete code looks like this:
import argparse
import json
import logging
import sys
reload(sys)
sys.setdefaultencoding('utf8')
import re
import requests
from requests.auth import HTTPBasicAuth
parser = argparse.ArgumentParser()
parser.add_argument('-payload', '--queuePayload', help='Payload from queue', required=True)
parser.add_argument('-apiKey', '--apiKey', help='The apiKey of the integration', required=True)
parser.add_argument('-opsgenieUrl', '--opsgenieUrl', help='The url', required=True)
parser.add_argument('-logLevel', '--logLevel', help='Level of log', required=True)
parser.add_argument('-username', '--username', help='Username', required=False)
parser.add_argument('-password', '--password', help='Password', required=False)
parser.add_argument('-url', '--url', help='URL', required=False)
parser.add_argument('-projectKey', '--projectKey', help='Project Key', required=False)
parser.add_argument('-issueTypeName', '--issueTypeName', help='Issue Type', required=False)
args = vars(parser.parse_args())
logging.basicConfig(stream=sys.stdout, level=args['logLevel'])
def parse_field(key, mandatory):
variable = queue_message.get(key)
if not variable:
variable = args.get(key)
if mandatory and not variable:
logging.error(LOG_PREFIX + " Skipping action, Mandatory conf item '" + key +
"' is missing. Check your configuration file.")
raise ValueError(LOG_PREFIX + " Skipping action, Mandatory conf item '" + key +
"' is missing. Check your configuration file.")
return variable
def parse_timeout():
parsed_timeout = args.get('http.timeout')
if not parsed_timeout:
return 30000
return int(parsed_timeout)
def get_alert_details(alertId):
alert_api_url = args['opsgenieUrl'] + "/v2/alerts/" + alertId
headers = {
"Content-Type": "application/json",
"Accept-Language": "application/json",
"Authorization": "GenieKey " + args['apiKey']
}
req = requests.get(alert_api_url, headers=headers)
alert = req.json()
return alert["data"]
def get_transition_id(request_headers, jira_url, transition_name, token):
transition_id = str()
response = requests.get(jira_url, None, headers=request_headers, auth=token, timeout=timeout)
try:
body = response.json()
if body and response.status_code < 299:
transition_list = body["transitions"]
for transition in transition_list:
to = transition['to']
if transition_name == to['name']:
transition_id = transition['id']
logging.info(LOG_PREFIX + " Successfully executed at Jira")
logging.debug(LOG_PREFIX + " Jira response: " + str(response.status_code) + " " + str(response.content))
else:
logging.error(
LOG_PREFIX + " Could not execute at Jira; response: " + str(response.content) + " status code: " + str(
response.status_code))
if not transition_id:
logging.debug(LOG_PREFIX + " Transition id is empty")
return transition_id
except ValueError:
logging.error("The response body is not a valid json object!")
def get_comp():
jira_comp = re.search(r"topic:\s\'(.*)[']", str(queue_message.get("description")))
if jira_comp:
jira_comp = jira_comp.group(1)
return (jira_comp)
else:
jira_comp = "Dummy"
return (jira_comp)
def get_prio():
severity = re.search(r"severity:\s\'(.*)[']",queue_message.get("description"))
if severity:
jira_prio = severity.group(1)
if jira_prio == "critical":
jira_prio = "Very High"
return (jira_prio)
if jira_prio == "Very High":
jira_prio = "High"
return (jira_prio)
else:
severity = "High"
return (severity)
def get_context():
context = re.search(r"context:\s\'(.*)[']", str(queue_message.get("description")))
if context:
context = context.group(1)
return (context)
else:
context = ""
return (context)
def main():
global LOG_PREFIX
global queue_message
global timeout
global to_customfield_20500; to_customfield_20500=[]
global project_to_customfield_20500
global cluster_to_customfield_20500
queue_message_string = args['queuePayload']
queue_message_string = queue_message_string.strip()
queue_message = json.loads(queue_message_string)
alert_id = queue_message["alertId"]
mapped_action = queue_message["mappedActionV2"]["name"]
alert_details = get_alert_details(alert_id)
LOG_PREFIX = "[" + mapped_action + "]"
logging.info("Will execute " + mapped_action + " for alertId " + alert_id)
timeout = parse_timeout()
url = parse_field('url', True)
username = parse_field('username', True)
password = parse_field('password', True)
project_key = parse_field('projectKey', False)
issue_type_name = parse_field('issueTypeName', False)
issue_key = queue_message.get("key")
logging.debug("Url: " + str(url))
logging.debug("Username: " + str(username))
logging.debug("Project Key: " + str(project_key))
logging.debug("Issue Type: " + str(issue_type_name))
logging.debug("Issue Key: " + str(issue_key))
content_params = dict()
token = HTTPBasicAuth(username, password)
headers = {
"Content-Type": "application/json",
"Accept-Language": "application/json",
}
result_url = url + "/rest/api/2/issue"
if mapped_action == "addCommentToIssue":
content_params = {
"body": queue_message.get('body')
}
result_url += "/" + issue_key + "/comment"
elif mapped_action == "createIssue":
getcontext = get_context()
getcomp = get_comp()
priority = get_prio()
content_params = {
"fields": {
"project": {"key": project_key},
"issuetype": {"name": issue_type_name},
"summary": queue_message.get("summary"),
"description": queue_message.get("description"),
"customfield_20500": [{"value": "DE - Germany"}],
"customfield_13604": "tbd",
"components": [{"name": getcomp}],
"versions": [{"name": "tbd"}],
"customfield_15000": [getcontext],
"priority": {"name": priority},
"assignee": {"name": "#cloudoperations"}
}
}
elif mapped_action == "resolveIssue":
result_url += "/" + issue_key + "/transitions"
content_params = {
"transition": {
"id": get_transition_id(headers, result_url, "Resolved", token)
},
"fields": {
"resolution": {
"name": "Done"
}
}
}
elif mapped_action == "closeIssue":
result_url += "/" + issue_key + "/transitions"
content_params = {
"transition": {
"id": get_transition_id(headers, result_url, "Closed", token)
},
"fields": {
"resolution": {
"name": "Done"
}
}
}
elif mapped_action == "issueDone":
result_url += "/" + issue_key + "/transitions"
content_params = {
"transition": {
"id": get_transition_id(headers, result_url, "Done", token)
}
}
elif mapped_action == "inProgressIssue":
result_url += "/" + issue_key + "/transitions"
content_params = {
"transition": {
"id": get_transition_id(headers, result_url, "In Progress", token)
}
}
logging.debug(str(content_params))
response = requests.post(result_url, data=json.dumps(content_params), headers=headers, auth=token,
timeout=timeout)
if response.status_code < 299:
logging.info("Successfully executed at Jira")
if mapped_action == "createIssue":
try:
response_body = response.json()
if response_body:
issue_key_from_response = response_body['key']
if issue_key_from_response:
alert_api_url = args['opsgenieUrl'] + "/v2/alerts/" + alert_id + "/details"
content = {
"details":
{
"issueKey": issue_key_from_response
}
}
headers = {
"Content-Type": "application/json",
"Accept-Language": "application/json",
"Authorization": "GenieKey " + args['apiKey']
}
logging.debug(str(alert_api_url) + str(content) + str(headers))
alert_response = requests.post(alert_api_url,
data=json.dumps(content), headers=headers,
timeout=timeout)
if alert_response.status_code < 299:
logging.info(LOG_PREFIX + " Successfully sent to Opsgenie")
logging.debug(
LOG_PREFIX + " Jira response: " + str(alert_response.content) + " " + str(
alert_response.status_code))
else:
logging.warning(
LOG_PREFIX + " Could not execute at Opsgenie; response: " + str(
alert_response.content) + " status code: " + str(alert_response.status_code))
else:
logging.warning(
LOG_PREFIX + " Jira response is empty")
except ValueError:
logging.error(ValueError)
else:
logging.warning(
LOG_PREFIX + " Could not execute at Jira; response: " + str(response.content) + " status code: " + str(
response.status_code))
if __name__ == '__main__':
main()
I'm not sure either but I suggest using the Jira Python library, which in turn uses the requests library
i found out it was an connection issue as from the VM where py script is executed,the jira URL was not reachable

Conditional Statement to re-start Python script based on response from POST request

I have a python script where I am sending a POST request for data to a server. I am expecting a particular response which indicates there is data in the response. If I do not receive this response, how can I restart my script/go to the beginning of it. The script is wrapped in a function which allows it to run every minute.
I would like to return to the beginning of my function if my response isn't as expected.
Script:
import sched, time, requests, jsonpickle, arcpy, requests, json, datetime
s = sched.scheduler(time.time, time.sleep)
def do_something(sc):
data2 = jsonpickle.decode((f2.read()))
Start = datetime.datetime.now()
# Start = datetime.datetime.strftime(data2['QueryRequest']['LastUpdatedDate'])
DD = datetime.timedelta(minutes=5)
earlier = Start - DD
earlier_str = earlier.strftime('X%m/%d/%Y %H:%M:%S').replace('X0','X').replace('X','')
data2["QueryRequest"]['LastUpdatedDate'] = str(earlier_str)
data2 = jsonpickle.encode(data2)
BulkyItemInfo = " "
spatial_ref = arcpy.SpatialReference(4326)
lastpage = 'false'
startrow = 0
newquery = 'new'
pagesize = 100
url2 = "URL"
headers2 = {'Content-type': 'text/plain', 'Accept': '/'}
while lastpage == 'false':
r2 = requests.post(url2, data=data2, headers=headers2)
print r2.text
decoded2 = json.loads(r2.text)
f2 =open('C:\Users\GeoffreyWest\Desktop\Request.json')
data2 = jsonpickle.decode((f2.read()))
if decoded2['Response']['LastPage'] == 'false':
data2['QueryRequest']['PageSize'] = pagesize
startrow = startrow + data2['QueryRequest']['PageSize']
data2['QueryRequest']['StartRowNum'] = startrow
data2['QueryRequest']['NewQuery'] = 'false'
data2 = jsonpickle.encode(data2)
print startrow
else:
lastpage = 'true'
print json.dumps(decoded2, sort_keys=True, indent=4)
items = []
for sr in decoded2['Response']['ListOfServiceRequest']['ServiceRequest']:#Where response is successful or fails
Output for successful response:
{
"status": {
"code": 311,
"message": "Service Request Successfully Queried.",
"cause": ""
},
"Response": {
"LastPage": "false",
"NumOutputObjects": "100",
"ListOfServiceRequest": {
"ServiceRequest": [
{
Output for unsuccessful response:
{"status":{"code":311,"message":"Service Request Successfully Queried.","cause":""},"Response":{"LastPage":"true","NumOutputObjects":"0","ListOfServiceRequest":{}}}

Python requests ssl [Errno 1] _ssl.c:1428: error:1408F10B:SSL routines:SSL3_GET_RECORD:wrong version number

I'm creating a python app, using requests module. I recently add multiprocessing to speed it up a bit, but I started to get some strange errors like [Errno 1] _ssl.c:1428: error:1408F10B:SSL routines:SSL3_GET_RECORD:wrong version number or [Errno 1] _ssl.c:1428: error:1408F119:SSL routines:SSL3_GET_RECORD:decryption failed or bad record mac.
The code looks like this:
def hometables_one(conn, request, s, hostname, payload, company):
date1 = request.query.get('date1', '')
date2 = request.query.get('date2', '')
prijmyCelk = 0;
vydajeCelk = 0;
neuhrPrijCelk = 0;
neuhrVydCelk = 0;
dph = 0;
dbNazev = company['dbNazev'];
nazev = company['nazev'];
if date1 and date2:
try:
r = s.get("%s/c/%s/faktura-vydana/(duzpPuv between %s %s)/$sum.json" % (hostname, dbNazev, date1[0], date2[0]), params=payload, verify=False)
r.raise_for_status()
except requests.exceptions.RequestException as err:
#response.write(ujson.dumps({ "success": False, "errors": { "reason": str(err)}}))
#return response
conn.send({ "success": False, "errors": { "reason": str(err)}})
conn.close()
return None
else:
try:
r = s.get("%s/c/%s/faktura-vydana/$sum.json" % (hostname, dbNazev), params=payload, verify=False)
r.raise_for_status()
except requests.exceptions.RequestException as err:
#response.write(ujson.dumps({ "success": False, "errors": { "reason": str(err)}}))
#return response
conn.send({ "success": False, "errors": { "reason": str(err)}})
conn.close()
return None
obj_vydana = r.json()
data_vydana = obj_vydana['winstrom']['sum']['sumDoklUcetni']['values']
prijmyCelk = float(data_vydana['sumDoklCelkem']['value'])
neuhrVydCelk = float(data_vydana['sumDoklZbyvaUh']['value'])
dph_vydane = float(data_vydana['sumDoklDphCelk']['value'])
if date1 and date2:
try:
r = s.get("%s/c/%s/faktura-prijata/(duzpPuv between %s %s)/$sum.json" % (hostname, dbNazev, date1[0], date2[0]), params=payload, verify=False)
r.raise_for_status()
except requests.exceptions.RequestException as err:
#response.write(ujson.dumps({ "success": False, "errors": { "reason": str(err)}}))
#return response
conn.send({ "success": False, "errors": { "reason": str(err)}})
conn.close()
return None
else:
try:
r = s.get("%s/c/%s/faktura-prijata/$sum.json" % (hostname, dbNazev), params=payload, verify=False)
r.raise_for_status()
except requests.exceptions.RequestException as err:
#response.write(ujson.dumps({ "success": False, "errors": { "reason": str(err)}}))
#return response
conn.send({ "success": False, "errors": { "reason": str(err)}})
conn.close()
return None
obj_prijata = r.json();
data_prijata = obj_prijata['winstrom']['sum']['sumDoklUcetni']['values']
vydajeCelk = float(data_prijata['sumDoklCelkem']['value'])
neuhrPrijCelk = float(data_prijata['sumDoklZbyvaUh']['value'])
dph_prijate = float(data_prijata['sumDoklDphCelk']['value'])
if prijmyCelk != 0:
result = {
"corporation": nazev,
"dbName": dbNazev,
"prijmyCelk": "%s €" % prijmyCelk,
"nakladyCelk": "%s €" % vydajeCelk,
"ziskCelk": "%s €" % (prijmyCelk-vydajeCelk),
"marzaCelk": ((prijmyCelk-vydajeCelk)/prijmyCelk*100),
"neuhrVydCelk": "%s €" % neuhrVydCelk,
"neuhrPrijCelk": "%s €" % neuhrPrijCelk,
"dph": "%s €" % (dph_vydane-dph_prijate),
}
else:
result = {
"corporation": nazev,
"dbName": dbNazev,
"prijmyCelk": "%s €" % prijmyCelk,
"nakladyCelk": "%s €" % vydajeCelk,
"ziskCelk": "%s €" % (prijmyCelk-vydajeCelk),
"marzaCelk": 0,
"neuhrVydCelk": "%s €" % neuhrVydCelk,
"neuhrPrijCelk": "%s €" % neuhrPrijCelk,
"dph": "%s €" % (dph_vydane-dph_prijate),
}
conn.send(result)
conn.close()
return None
#####################################################################################
def hometables(request):
s = requests.Session()
response = HTTPResponse()
hostname = request.query.get('hostname', '')[0]
auth = request.query.get('auth', '')[0]
p_queue = []
result = []
json_r = {"success": True}
payload = {'authSessionId': request.query.get('auth', '')[0]}
try:
r = s.get("%s/c.json" % hostname, params=payload, verify=False)
r.raise_for_status()
except requests.exceptions.RequestException as err:
response.write(ujson.dumps({ "success": False, "errors": { "reason": str(err)}}))
return response
obj = r.json()
data = obj['companies']['company']
data = make_list(data)
parent_conn, child_conn = Pipe()
for company in data:
p_queue.append(Process(target=hometables_one, args=(child_conn, request, s, hostname, payload, company))) #create a new process with hometables_one function
p_queue[-1].start()
for p in p_queue:
received_data = parent_conn.recv()
if "success" not in received_data:
result.append(received_data)s
p.join()
else:
response.write(ujson.dumps(received_data)) #error in hometables_one function
return response
json_r["data"] = result
response.write(ujson.dumps(json_r))
return response
In this part
try:
r = s.get("%s/c.json" % hostname, params=payload, verify=False)
r.raise_for_status()
except requests.exceptions.RequestException as err:
response.write(ujson.dumps({ "success": False, "errors": { "reason": str(err)}}))
return response
obj = r.json()
data = obj['companies']['company']
data = make_list(data)
I get a JSON request with all companies currently in system and then I run the hometables_one function for each of them. The final data may look like this:
[{"createDt":"2014-01-28T00:00:00+01:00","dbNazev":"sveatlo","id":"4","licenseGroup":"null","nazev":"Sveatlo","show":"true","stavEnum":"ESTABLISHED","watchingChanges":"false"}]
or like this:
[{"createDt":"2014-01-28T00:00:00+01:00","dbNazev":"sveatlo","id":"4","licenseGroup":"null","nazev":"Sveatlo","show":"true","stavEnum":"ESTABLISHED","watchingChanges":"false"},{"createDt":"2014-01-28T00:00:00+01:00","dbNazev":"sveatlo1","id":"4","licenseGroup":"null","nazev":"Sveatlo1","show":"true","stavEnum":"ESTABLISHED","watchingChanges":"false"}]
In the first case, when there is just one item the hometables_one function runs without any problems, but adding another item results in error [Errno 1] _ssl.c:1428: error:1408F10B:SSL routines:SSL3_GET_RECORD:wrong version number or [Errno 1] _ssl.c:1428: error:1408F119:SSL routines:SSL3_GET_RECORD:decryption failed or bad record mac. Another thing is that when I run the code without multiprocessing, i.e. content of hometable_one function is in the for loop in hometables function, it runs without any problems.
Why am I getting these errors? Could anybody help me please?
Thanks for any answer
I have experienced similar problems. I think this error is the result of multiple processes trying to access the same SSL connection. What you can try is to introduce a random delay for each process before they fire off the request:
time.sleep(random.randrange(10))
r = s.get("%s/c.json" % hostname, params=payload, verify=False)

Categories

Resources