I want to execute several functions, gather their exceptions (if there are any), and raise a compound exception, calling as many of the functions as possible without breaking after one exception. For example, say I have
def f():
do_one()
do_two()
do_three()
The do_i functions don't depend on each other's status. The most obvious way to do what I want is this:
def f():
errors = []
for do_i in [do_one, do_two, do_three]:
try:
do_i()
except Exception as e:
errors.append(e)
if errors:
raise Exception(';'.join(errors))
or slightly better:
def catch_error(arr, f, *args, **kwargs):
try:
return f(*args, **kwargs)
except Exception as e:
arr.append(e)
return None
def f():
errors = []
for do_i in [do_one, do_two, do_three]:
catch_error(errors, do_i)
if errors:
raise Exception(';'.join(errors))
but this is still ugly. Is there a Pythonic way to do this that I'm missing, maybe with clever use of a with statement?
Edit: In a dream world Python would have this:
errors = []
awesome_block(errors):
do_one()
do_two()
do_three()
return 'yes!' if not errors else ';'.join(map(str, errors))
You could rewrite your function into a contextmanager, which does simplify your code a bit. I've maintained your convention of passing a list, though this yields the internal list, so you can use it later.
from contextlib import contextmanager
#contextmanager
def catch_errors(error_list=None):
error_list = error_list if error_list is not None else []
try:
yield error_list
except Exception as e:
error_list.append(e)
error_list = []
with catch_errors(error_list):
raise Exception("First exception")
with catch_errors(error_list):
raise ValueError("Second exception")
if error_list:
raise Exception(";".join(map(repr, error_list)))
I think repr is more useful than str here. #contextmanager allows usage in a with statement while you only have to write the function as a generator.
If you don't pass a list to the generator, then you need to keep track of the returned list.
with catch_errors() as errors1:
raise Exception("First exception")
print errors1 # Exception("First exception",)
Related
I have a function "main_func" , from which i am calling another function and so on.
class Error(Exception):
"""Base class for other exceptions"""
pass
def main_func():
return sub_func()
def sub_func():
return sub_sub_func()
def sub_sub_func():
return sub_sub_sub_func()
def sub_sub_sub_func():
try:
x = len(10)
res = 'b'
except:
raise Error
return res
main_func()
If you see in sub_sub_sub_func() i have added a line x = len(10) which will cause exception.
What i want is, if this happens, i should directly jump to main_func() and return a flag(str) as 'fail'
I looked into defining custom exceptions , but it didn't helped me.
I want to return after i raise.
len(10) will raise a TypeError you can catch this specific exception in your main_func and do the thing that needs to happen then.
Please note that you need to create an instance of your error class when raising. Error()
class Error(Exception):
"""Base class for other exceptions"""
pass
def main_func():
try:
return sub_func()
except (Error as e):
# The raised error will be cought here.
# Do the stuff that needs to happen here.
return 'fail'
def sub_func():
return sub_sub_func()
def sub_sub_func():
return sub_sub_sub_func()
def sub_sub_sub_func():
try:
x = len(10) # Will raise a `TypeError`
res = 'b'
except:
# `TypeError` that is raised will get here
raise Error()
return res
main_func()
Note: Your custom Error hides a lot of information that can come in handy later. What happened what raised this error. Best is to put the original TypeError as an inner exception to Error.
try:
x = len(10)
except Throwable as e:
raise Error(e)
In theory in your code a potential out of memory exception will be converted to your Error without knowing what happened.
I would like to add context to an exception like this:
def process(vals):
for key in vals:
try:
do_something(vals[key])
except Exception as ex: # base class. Not sure what to expect.
raise # with context regarding the key that was being processed.
I found a way that is uncharacteristically long winded for Python. Is there a better way than this?
try:
do_something(vals[key])
except Exception as ex:
args = list(ex.args)
if len(args) > 1:
args[0] = "{}: {}".format(key, args[0])
ex.args = tuple(args)
raise # Will re-trhow ValueError with new args[0]
The first item in ex.args is always the message -- if there is any. (Note for some exceptions, such as the one raised by assert False, ex.args is an empty tuple.)
I don't know of a cleaner way to modify the message than reassigning a new tuple to ex.args. (We can't modify the tuple since tuples are immutable).
The code below is similar to yours, except it constructs the tuple without using an intermediate list, it handles the case when ex.args is empty, and to make the code more readable, it hides the boilerplate inside a context manager:
import contextlib
def process(val):
with context(val):
do_something(val)
def do_something(val):
# assert False
return 1/val
#contextlib.contextmanager
def context(msg):
try:
yield
except Exception as ex:
msg = '{}: {}'.format(msg, ex.args[0]) if ex.args else str(msg)
ex.args = (msg,) + ex.args[1:]
raise
process(0)
yields a stack trace with this as the final message:
ZeroDivisionError: 0: division by zero
You could just raise a new exception:
def process(vals):
for key in vals:
try:
do_something(vals[key])
except Exception as ex:
raise Error(key, context=ex)
On Python 3 you don't need to provide the old exception explicitly, it will be available as __context__ attribute on the new exception object and the default exception handler will report it automatically:
def process(vals):
for key in vals:
try:
do_something(vals[key])
except Exception:
raise Error(key)
In you case, you should probably use the explicit raise Error(key) from ex syntax that sets __cause__ attribute on the new exception, see Exception Chaining and Embedded Tracebacks.
If the only issue is the verbosity of the message-amending code in your question; you could encapsulate it in a function:
try:
do_something(vals[key])
except Exception:
reraise_with_context(key=key) # reraise with extra info
where:
import inspect
import sys
def reraise_with_context(**context):
ex = sys.exc_info()[1]
if not context: # use locals from the caller scope
context = inspect.currentframe().f_back.f_locals
extra_info = ", ".join("%s=%s" % item for item in context.items())
amend_message(ex, extra_info)
raise
def amend_message(ex, extra):
msg = '{} with context: {}'.format(ex.args[0], extra) if ex.args else extra
ex.args = (msg,) + ex.args[1:]
I want to do:
try:
do()
except:
do2()
except:
do3()
except:
do4()
If do() fails, execute do2(), if do2() fails too, exceute do3() and so on.
best Regards
If you really don't care about the exceptions, you could loop over cases until you succeed:
for fn in (do, do2, do3, do4):
try:
fn()
break
except:
continue
This at least avoids having to indent once for every case. If the different functions need different arguments you can use functools.partial to 'prime' them before the loop.
I'd write a quick wrapper function first() for this.
usage: value = first([f1, f2, f3, ..., fn], default='All failed')
#!/usr/bin/env
def first(flist, default=None):
""" Try each function in `flist` until one does not throw an exception, and
return the return value of that function. If all functions throw exceptions,
return `default`
Args:
flist - list of functions to try
default - value to return if all functions fail
Returns:
return value of first function that does not throw exception, or
`default` if all throw exceptions.
TODO: Also accept a list of (f, (exceptions)) tuples, where f is the
function as above and (exceptions) is a tuple of exceptions that f should
expect. This allows you to still re-raise unexpected exceptions.
"""
for f in flist:
try:
return f()
except:
continue
else:
return default
# Testing.
def f():
raise TypeError
def g():
raise IndexError
def h():
return 1
# We skip two exception-throwing functions and return value of the last.
assert first([f, g, h]) == 1
assert first([f, g, f], default='monty') == 'monty'
It seems like a really odd thing to want to do, but I would probably loop over the functions and break out when there were no exception raised:
for func in [do, do2, do3]:
try:
func()
except Exception:
pass
else:
break
Here is the simplest way I found, just embed the try under the previous except.
try:
do()
except:
try:
do2()
except:
do3()
You should specify the type of the exception you are trying to catch each time.
try:
do()
except TypeError: #for example first one - TypeError
do_2()
except KeyError: #for example second one - KeyError
do_3()
and so on.
if you want multiple try statments you can do it like this, including the except statement. Extract (refactor) your statements. And use the magic of and and or to decide when to short-circuit.
def a():
try: # a code
except: pass # or raise
else: return True
def b():
try: # b code
except: pass # or raise
else: return True
def c():
try: # c code
except: pass # or raise
else: return True
def d():
try: # d code
except: pass # or raise
else: return True
def main():
try:
a() and b() or c() or d()
except:
pass
import sys
try:
f = open('myfile.txt')
s = f.readline()
i = int(s.strip())
except OSError as err:
print("OS error: {0}".format(err))
except ValueError:
print("Could not convert data to an integer.")
except:
print("Unexpected error:", sys.exc_info()[0])
raise
I've been working in Python and ran into something that must be a common occurrence.
I have five statements that all fall into a common pitfall of raising
FooException and BarException. I want to run each of them, guarding against
these exceptions but continuing to process even if an exception is raised after
some handling is done. Now, I could do this like so:
try:
foo()
except (FooException, BarException):
pass
try:
bar()
except (FooException, BarException):
pass
try:
baz()
except (FooException, BarException):
pass
try:
spam()
except (FooException, BarException):
pass
try:
eggs()
except (FooException, BarException):
pass
but that is really verbose and in extreme violation of DRY. A rather brute-force
and obvious solution is something like this:
def wish_i_had_macros_for_this(statements, exceptions, gd, ld):
""" execute statements inside try/except handling exceptions with gd and ld
as global dictionary and local dictionary
statements is a list of strings to be executed as statements
exceptions is a list of strings that resolve to Exceptions
gd is a globals() context dictionary
ld is a locals() context dictionary
a list containing None or an Exception if an exception that wasn't
guarded against was raised during execution of the statement for each
statement is returned
"""
s = """
try:
$STATEMENT
except (%s):
pass
""" % ','.join(exceptions)
t = string.Template(s)
code = [t.substitute({'STATEMENT': s}) for s in statements]
elist = list()
for c in code:
try:
exec c in gd, ld
elist.append(None)
except Exception, e:
elist.append(e)
return elist
With usage along the lines of:
>>> results = wish_i_had_macros_for_this(
['foo()','bar()','baz','spam()','eggs()'],
['FooException','BarException'],
globals(),
locals())
[None,None,None,SpamException,None]
Is there a better way?
def execute_silently(fn, exceptions = (FooException, BarException)):
try:
fn()
except Exception as e:
if not isinstance(e, exceptions):
raise
execute_silently(foo)
execute_silently(bar)
# ...
# or even:
for fn in (foo, bar, ...):
execute_silently(fn)
What about this?
#!/usr/bin/env python
def foo():
print "foo"
def bar():
print "bar"
def baz():
print "baz"
for f in [foo, bar, baz]:
try:
f()
except (FooException, BarException):
pass
This version allows statement execution as well:
from contextlib import contextmanager
from functools import partial
#contextmanager
def exec_silent(exc=(StandardError,)):
try:
yield
except exc:
pass
silent_foobar = partial(exec_silent, (FooException, BarException))
with silent_foobar():
print 'foo'
foo()
with silent_foobar():
print 'bar'
bar()
Can you suggest a way to code a drop-in replacement for the "with" statement that will work in Python 2.4?
It would be a hack, but it would allow me to port my project to Python 2.4 more nicely.
EDIT:
Removed irrelevant metaclass sketch
Just use try-finally.
Really, this may be nice as a mental exercise, but if you actually do it in code you care about you will end up with ugly, hard to maintain code.
You could (ab)use decorators to do this, I think. The following works, eg:
def execute_with_context_manager(man):
def decorator(f):
target = man.__enter__()
exc = True
try:
try:
f(target)
except:
exc = False
if not man.__exit__(*sys.exc_info()):
raise
finally:
if exc:
man.__exit__(None, None, None)
return None
return decorator
#execute_with_context_manager(open("/etc/motd"))
def inside(motd_file):
for line in motd_file:
print line,
(Well, in Python 2.4 file objects don't have __enter__ and __exit__ methods, but otherwise it works)
The idea is you're replacing the with line in:
with bar() as foo:
do_something_with(foo)
do_something_else_with(foo)
# etc...
with the decorated function "declaration" in:
#execute_with_context_manager( bar() )
def dummyname( foo ):
do_something_with(foo)
do_something_else_with(foo)
# etc...
but getting the same behaviour (the do_something_... code executed). Note the decorator changes the function declaration into an immediate invocation which is more than a little evil.
Since you need to exit the context manager both during errors and not errors, I don't think it's possible to do a generic usecase with metaclasses, or in fact at all. You are going to need try/finally blocks for that.
But maybe it's possible to do something else in your case. That depends on what you use the context manager for.
Using __del__ can help in some cases, like deallocating resource, but since you can't be sure it gets called, it can only be used of you need to release resources that will be released when the program exits. That also won't work if you are handling exceptions in the __exit__ method.
I guess the cleanest method is to wrap the whole context management in a sort of context managing call, and extract the code block into a method. Something like this (untested code, but mostly stolen from PEP 343):
def call_as_context_manager(mgr, function):
exit = mgr.__exit__
value = mgr.__enter__()
exc = True
try:
try:
function(value)
except:
exc = False
if not exit(*sys.exc_info()):
raise
finally:
if exc:
exit(None, None, None)
How about this?
def improvize_context_manager(*args, **kwargs):
assert (len(args) + len(kwargs)) == 1
if args:
context_manager = args[0]
as_ = None
else: # It's in kwargs
(as_, context_manager) = kwargs.items()[0]
def decorator(f):
exit_ = context_manager.__exit__ # Not calling it yet
enter_ = context_manager.__enter__()
exc = True
try:
try:
if as_:
f(*{as_: enter_})
else:
f()
except:
exc = False
if not exit_(*sys.exc_info()):
raise
finally:
if exc:
exit_(None, None, None)
return None
return decorator
Usage:
#improvize_context_manager(lock)
def null():
do(stuff)
Which parallels the with keyword without as.
Or:
#improvize_context_manager(my_lock=lock)
def null(my_lock):
do(stuff_with, my_lock)
Which parallels the with keyword with the as.
If you are OK with using def just to get a block, and decorators that immediately execute, you could use the function signature to get something more natural for the named case.
import sys
def with(func):
def decorated(body = func):
contexts = body.func_defaults
try:
exc = None, None, None
try:
for context in contexts:
context.__enter__()
body()
except:
exc = sys.exc_info()
raise
finally:
for context in reversed(contexts):
context.__exit__(*exc)
decorated()
class Context(object):
def __enter__(self):
print "Enter %s" % self
def __exit__(self, *args):
print "Exit %s(%s)" % (self, args)
x = Context()
#with
def _(it = x):
print "Body %s" % it
#with
def _(it = x):
print "Body before %s" % it
raise "Nothing"
print "Body after %s" % it