I'm trying the following:
try:
scroll_token = response["scroll_token"]
except ValueError as e:
logging.info(f'could not find "scroll_token" {e}')
try:
scroll_token = response["scroll_id"]
except ValueError as e:
logging.info(f'could not find "scroll_id" {e}')
Pretty much if the response doesn't have "scroll_token" in the response then I want it to see if there is "scroll_id" in the response instead. but for some reason this isn't working, it just keeps failing at the 1st try case and says:
scroll_token = response["scroll_token"] KeyError: 'scroll_token'
You are catching the wrong exception; it's KeyError that you are expecting could be raised.
try:
scroll_token = response["scroll_token"]
except KeyError as e:
logging.info(f'could not find "scroll_token" {e}')
try:
scroll_token = response["scroll_id"]
except KeyError as e:
logging.info(f'could not find "scroll_id" {e}')
You can write this more simply with a loop.
for k in ["scroll_token", "scroll_id"]:
try:
scroll_token = response[k]
break
except KeyError:
logging.info("could not find %s", k)
else:
# What to do if neither key is found?
...
Related
I am using a thread pool to send requests in parallel (generate a list of urls, fetch in parallel, request all of the urls concurrently), parsing some data out of the response to a couple of dicts, cross-mapping keys across dicts and then writing it back to pandas.DataFrame
def fetch_point(point_url):
try:
r = requests.get(point_url, headers=self.headers)
r.raise_for_status()
except requests.exceptions.HTTPError as errh:
logging.error(f'HTTP Error: {errh}')
except requests.exceptions.ConnectionError as errc:
logging.error(f'Connection Error: {errc}')
except requests.exceptions.Timeout as errt:
logging.error(f'Timeout Error: {errt}')
except requests.exceptions.RequestException as e:
logging.error(e)
raise SystemExit(e)
result = r.json().get('data')
# print(result)
building_url = result['building']
return point_url, building_url
def fetch_building(building_url):
try:
r = requests.get(building_url, headers=self.headers)
r.raise_for_status()
except requests.exceptions.HTTPError as errh:
logging.error(f'HTTP Error: {errh}')
except requests.exceptions.ConnectionError as errc:
logging.error(f'Connection Error: {errc}')
except requests.exceptions.Timeout as errt:
logging.error(f'Timeout Error: {errt}')
except requests.exceptions.RequestException as e:
logging.error(e)
raise SystemExit(e)
result = r.json().get('data')
building_name = result['name']
return building_url, building_name
pool = ThreadPoolExecutor()
point_urls = df.loc[~df['Point Url'].isnull(), 'Point Url'].to_list()
building_urls = {}
for point_url, building_url in pool.map(fetch_point, point_urls):
building_urls[point_url] = building_url
building_urls_list = building_urls.values()
building_names = {}
for building_url, building_name in pool.map(fetch_building, building_urls_list):
building_names[building_url] = building_name
point_building_map = {k: building_names[building_urls[k]] for k in building_urls}
for key in point_building_map.keys():
df.loc[df['Point Url'] == key, 'Building'] = point_building_map[key]
I am wondering if there is a more optimized approach I could consider. Should I go for asyncio/aiohttp instead of ThreadPoolExecutor?
I am creating a flask application and whenever I am trying to raise a UnicodeDecodeError from the class Validation and returning to my main try block then instead of going to UnicodeDecodeError exception it's going to Exception block
This is my main block from where I am calling my class Validation and expecting a UnicodeDecodeError
try:
validation = Validation(request.files['csvfile'])
validation.checkextension()
# columns = validation.check_columns(csv)
return redirect(request.url)
except UnicodeDecodeError:
# return Response("Error : %s"% UnicodeDecodeError)
return "Please enter a .csv extension"
except Exception as e:
print("abc",repr(e))
# return Response("Error : %s"% Exception)
return "Exception"
This is my class from where i am raising the error:
class Validation:
def __init__(self,file):
self.file = file
self.location = "./uploads/" + str(file.filename)
def checkextension(self):
try:
self.file.save(os.path.join("uploads", self.file.filename))
data = pd.read_csv(self.location)
except UnicodeDecodeError:
os.remove("./uploads/" + str(self.file.filename))
raise UnicodeDecodeError
except Exception:
print(Exception)
raise Exception
Also When I am printing the statement in except Exception as e:
I am getting this output:
TypeError('function takes exactly 5 arguments (0 given)')
I am getting TypeError but I am raising UnicodeDecodeError
i got this situation:
datelist = pd.date_range(dateFrom, dateTo, dateperiods)
while i < len(datelist):
Date=datelist[i].floor('D')
print(f'{Date} STARTED')
if i!=0:
Date2=datelist[i-1].floor('D')
else:
Date2=Date
i = i + 1
try:
**COMPLEX AND LONG CODE THAT USE DATE AS A PARM**
except Exception as inst:
print(inst)
print(f'--------elaboration for {Date} failed. stated elaboration
for next date--------')
if i < len(datelist):
Date=datelist[i].floor('D')
print(f'--{Date} STARTED')
globalsdatelist.append(Date)
Date2=datelist[i-1].floor('D')
i = i + 1
else:
print('request failed')
break
**COMPLEX AND LONG CODE THAT USE DATE AS A PARM**
what i want is to continue the except while i<len(datelist) not just the first time an error occurred.
there is an easy way to do it?
thank you so much
Try this:
datelist = pd.date_range(dateFrom, dateTo, dateperiods)
while i < len(datelist):
try:
**COMPLEX AND LONG CODE THAT USE DATE AS A PARM**
except Exception as e:
print(e)
#handle exception case appropriately
The error message is not print
What's the problem? ㅠㅠㅠㅠ
uid = id
upw = pw
try:
driver.find_element_by_xpath('//*[#id="userID"]').send_keys(uid)
action.reset_actions()
driver.find_element_by_xpath('//*[#id="userPWD"]').send_keys(upw)
driver.find_element_by_xpath('//*[#id="btnLogin"]').click()
except Exception as e:
print("{} 계정 로그인이 실패하였습니다.".format(uid))
In order to print the exception, you need to actually print e, not uid:
.....
except Exception as e:
print("{} 계정 로그인이 실패하였습니다.".format(uid))
print(e)
I have a function that geocodes an address. I don't want this function to die so I am trying to catch the error and return a tuple instead.
However I also want to differentiate between errors, for this I use try/except in multiple places.
Is there such a thing as too many try/except? How would you optimize this function?
Here is my code:
def geocode(address):
js = ''
try:
urlq = urllib.urlencode({'address':address, 'sensor':'false'})
except Exception, e:
return (False, "Error url-encoding address. Error:%s" % e, js, 'failed')
try:
f = urllib2.urlopen(GEO_URL + urlq)
d = f.read()
except Exception, e:
return (False, "Error making connection. Error:%s" % e, js, 'failed')
#
try:
js = json.loads(d)
except Exception, e:
return (False, "Error converting JSON. Error:%s" % e, js, 'failed')
return (True, '', js, 'ok')
Catching Exception is always a bad idea. You want to specify which error you want to catch.
try:
...
except URLError, e:
return (False, "Error making connection. Error:%s" % e, js, 'failed')
except ValueError, e:
return (False, "Error converting JSON. Error:%s" % e, js, 'failed')
except UnicodeEncodeError, e:
return (False, "Error unicode formatting. Error:%s" % e, js, 'failed')
Also returning a tuple to indicate an error is usually not preferred. Consider putting the try except in the calling function and to let the error propagate up.