So I have this thing I don’t know if I can do on python. I want to handle access to undefined variables. Example:
class example():
def __init__():
self.variableA = 'jaa'
def set_b(self):
self.variableB = 'nope'
Here if I instantiate a object X of example and try to access variableB without calling x.set_b() I will get an Class has no attribute error. Is there any way to dig into this exception? I would like to return and error with a custom message.
May I suggest to do the following:
class Example():
def __init__(self):
self.variableA = 'jaa'
self._variableB = None
#property
def variableB(self):
if self._variableB is None:
return 'Sorry, you need to set this using `Set_b` first'
# better would probably be
# raise AttributeError("'Sorry, you need to set this using `Set_b`")
return self._variableB
#variableB.setter
def variableB(self, value):
raise AttributeError("'Sorry, you need to set this using `Set_b`")
def set_b(self):
self._variableB = 'nope'
example = Example()
print(example.variableB) # Sorry, you need to set this using `Set_b` first
try:
example.variableB = 'mymy'
except Exception as error:
print(repr(error)) # AttributeError('Sorry, you need to set this using `Set_b`)
example.set_b()
print(example.variableB) # 'nope
Related
I'm using the following class (it's from Airflow code, so I can't modify it):
class TriggerRule:
"""Class with task's trigger rules."""
ALL_SUCCESS = 'all_success'
ALL_FAILED = 'all_failed'
ALL_DONE = 'all_done'
ONE_SUCCESS = 'one_success'
ONE_FAILED = 'one_failed'
NONE_FAILED = 'none_failed'
NONE_FAILED_OR_SKIPPED = 'none_failed_or_skipped'
NONE_SKIPPED = 'none_skipped'
DUMMY = 'dummy'
_ALL_TRIGGER_RULES: Set[str] = set()
#classmethod
def is_valid(cls, trigger_rule):
"""Validates a trigger rule."""
return trigger_rule in cls.all_triggers()
#classmethod
def all_triggers(cls):
"""Returns all trigger rules."""
if not cls._ALL_TRIGGER_RULES:
cls._ALL_TRIGGER_RULES = {
getattr(cls, attr)
for attr in dir(cls)
if not attr.startswith("_") and not callable(getattr(cls, attr))
}
return cls._ALL_TRIGGER_RULES
Let's say I have this function:
def print_rule(rule):
print(rule)
and I want to type hint the parameter rule so it must be one of the rules listed in the TriggerRule class.
print_color(TriggerRule.ALL_FAILED) # OK
print_color('one_success') # I don't care if it complains or not
print_color('foo') # I want it to complain
Is this possible? I don't want to type it as str because I don't want to allow any string.
I'm using Python 3.8, but if this is possible in a newer version, I'd like to know as well.
I need to get the object method who called another function. I don't want its name, but the actual object.
So far, I got this.
import inspect
def info():
stack_call = inspect.stack()
cls_method = stack_call[1][3]
cls_obj = stack_call[1].frame.f_locals["self"]
cls_members = inspect.getmembers(cls_obj)
my_obj = None
for el in cls_members:
if el[0] == cls_method:
my_obj = el[1]
break
print(my_obj)
class Bob:
def __int__(self):
pass
def jim(self):
info()
test = Bob()
test.jim()
It does what I want to do, but I don't like having to go through the list to find the right method with its name.
I tried many things, but can't see how to make it better.
Would anybody have a better version?
A little different:
def info():
stack_call = inspect.stack()
cls_obj = stack_call[1].frame.f_locals[list(stack_call[1].frame.f_locals.keys())[0]]
cls_method = getattr(cls_obj, stack_call[1][3])
print(cls_method)
I have a function "main_func" , from which i am calling another function and so on.
class Error(Exception):
"""Base class for other exceptions"""
pass
def main_func():
return sub_func()
def sub_func():
return sub_sub_func()
def sub_sub_func():
return sub_sub_sub_func()
def sub_sub_sub_func():
try:
x = len(10)
res = 'b'
except:
raise Error
return res
main_func()
If you see in sub_sub_sub_func() i have added a line x = len(10) which will cause exception.
What i want is, if this happens, i should directly jump to main_func() and return a flag(str) as 'fail'
I looked into defining custom exceptions , but it didn't helped me.
I want to return after i raise.
len(10) will raise a TypeError you can catch this specific exception in your main_func and do the thing that needs to happen then.
Please note that you need to create an instance of your error class when raising. Error()
class Error(Exception):
"""Base class for other exceptions"""
pass
def main_func():
try:
return sub_func()
except (Error as e):
# The raised error will be cought here.
# Do the stuff that needs to happen here.
return 'fail'
def sub_func():
return sub_sub_func()
def sub_sub_func():
return sub_sub_sub_func()
def sub_sub_sub_func():
try:
x = len(10) # Will raise a `TypeError`
res = 'b'
except:
# `TypeError` that is raised will get here
raise Error()
return res
main_func()
Note: Your custom Error hides a lot of information that can come in handy later. What happened what raised this error. Best is to put the original TypeError as an inner exception to Error.
try:
x = len(10)
except Throwable as e:
raise Error(e)
In theory in your code a potential out of memory exception will be converted to your Error without knowing what happened.
I have a very long function func which takes a browser handle and performs a bunch of requests and reads a bunch of responses in a specific order:
def func(browser):
# make sure we are logged in otherwise log in
# make request to /search and check that the page has loaded
# fill form in /search and submit it
# read table of response and return the result as list of objects
Each operation require a large amount of code due to the complexity of the DOM and they tend to grow really fast.
What would be the best way to refactor this function into smaller components so that the following properties still hold:
the execution flow of the operations and/or their preconditions is guaranteed just like in the current version
the preconditions are not checked with asserts against the state, as this is a very costly operation
func can be called multiple times on the browser
?
Just wrap the three helper methods in a class, and track which methods are allowed to run in an instance.
class Helper(object):
def __init__(self):
self.a = True
self.b = False
self.c = False
def funcA(self):
if not self.A:
raise Error("Cannot run funcA now")
# do stuff here
self.a = False
self.b = True
return whatever
def funcB(self):
if not self.B:
raise Error("Cannot run funcB now")
# do stuff here
self.b = False
self.c = True
return whatever
def funcC(self):
if not self.C:
raise Error("Cannot run funcC now")
# do stuff here
self.c = False
self.a = True
return whatever
def func(...):
h = Helper()
h.funcA()
h.funcB()
h.funcC()
# etc
The only way to call a method is if its flag is true, and each method clears its own flag and sets the next method's flag before exiting. As long as you don't touch h.a et al. directly, this ensures that each method can only be called in the proper order.
Alternately, you can use a single flag that is a reference to the function currently allowed to run.
class Helper(object):
def __init__(self):
self.allowed = self.funcA
def funcA(self):
if self.allowed is not self.funcA:
raise Error("Cannot run funcA now")
# do stuff
self.allowed = self.funcB
return whatever
# etc
Here's the solution I came up with. I used a decorator (closely related to the one in this blog post) which only allows for a function to be called once.
def call_only_once(func):
def new_func(*args, **kwargs):
if not new_func._called:
try:
return func(*args, **kwargs)
finally:
new_func._called = True
else:
raise Exception("Already called this once.")
new_func._called = False
return new_func
#call_only_once
def stateA():
print 'Calling stateA only this time'
#call_only_once
def stateB():
print 'Calling stateB only this time'
#call_only_once
def stateC():
print 'Calling stateC only this time'
def state():
stateA()
stateB()
stateC()
if __name__ == "__main__":
state()
You'll see that if you re-call any of the functions, the function will throw an Exception stating that the functions have already been called.
The problem with this is that if you ever need to call state() again, you're hosed. Unless you implement these functions as private functions, I don't think you can do exactly what you want due to the nature of Python's scoping rules.
Edit
You can also remove the else in the decorator and your function will always return None.
Here a snippet I used once for my state machine
class StateMachine(object):
def __init__(self):
self.handlers = {}
self.start_state = None
self.end_states = []
def add_state(self, name, handler, end_state=0):
name = name.upper()
self.handlers[name] = handler
if end_state:
self.end_states.append(name)
def set_start(self, name):
# startup state
self.start_state = name
def run(self, **kw):
"""
Run
:param kw:
:return:
"""
# the first .run call call the first handler with kw keywords
# each registered handler should returns the following handler and the needed kw
try:
handler = self.handlers[self.start_state]
except:
raise InitializationError("must call .set_start() before .run()")
while True:
(new_state, kw) = handler(**kw)
if isinstance(new_state, str):
if new_state in self.end_states:
print("reached ", new_state)
break
else:
handler = self.handlers[new_state]
elif hasattr(new_state, "__call__"):
handler = new_state
else:
return
The use
class MyParser(StateMachine):
def __init__(self):
super().__init__()
# define handlers
# we can define many handler as we want
self.handlers["begin_parse"] = self.begin_parse
# define the startup handler
self.set_start("begin_parse")
def end(self, **kw):
logging.info("End of parsing ")
# no callable handler => end
return None, None
def second(self, **kw):
logging.info("second ")
# do something
# if condition is reach the call `self.end` handler
if ...:
return self.end, {}
def begin_parse(self, **kw):
logging.info("start of parsing ")
# long process until the condition is reach then call the `self.second` handler with kw new keywords
while True:
kw = {}
if ...:
return self.second, kw
# elif other cond:
# return self.other_handler, kw
# elif other cond 2:
# return self.other_handler 2, kw
else:
return self.end, kw
# start the state machine
MyParser().run()
will print
INFO:root:start of parsing
INFO:root:second
INFO:root:End of parsing
You could use local functions in your func function. Ok, they are still declared inside one single global function, but Python is nice enough to still give you access to them for tests.
Here is one example of one function declaring and executing 3 (supposedly heavy) subfunctions. It takes one optional parameter test that when set to TEST prevent actual execution but instead gives external access to individual sub-functions and to a local variable:
def func(test=None):
glob = []
def partA():
glob.append('A')
def partB():
glob.append('B')
def partC():
glob.append('C')
if (test == 'TEST'):
global testA, testB, testC, testCR
testA, testB, testC, testCR = partA, partB, partC, glob
return None
partA()
partB()
partC()
return glob
When you call func, the 3 parts are executed in sequence. But if you first call func('TEST'), you can then access the local glob variable as testCR, and the 3 subfunctions as testA, testB and testC. This way you can still test individually the 3 parts with well defined input and control their output.
I would insist on the suggestion given by #user3159253 in his comment on the original question:
If the sole purpose is readability I would split the func into three "private" > or "protected" ones (i.e. _func1 or __func1) and a private or protected property > which keeps the state shared between the functions.
This makes a lot of sense to me and seems more usual amongst object oriented programming than the other options. Consider this example as an alternative:
Your class (teste.py):
class Test:
def __init__(self):
self.__environment = {} # Protected information to be shared
self.public_stuff = 'public info' # Accessible to outside callers
def func(self):
print "Main function"
self.__func_a()
self.__func_b()
self.__func_c()
print self.__environment
def __func_a(self):
self.__environment['function a says'] = 'hi'
def __func_b(self):
self.__environment['function b says'] = 'hello'
def __func_c(self):
self.__environment['function c says'] = 'hey'
Other file:
from teste import Test
t = Test()
t.func()
This will output:
Main function says hey guys
{'function a says': 'hi', 'function b says': 'hello', 'function c says': 'hey'}
If you try to call one of the protected functions, an error occurs:
Traceback (most recent call last):
File "C:/Users/Lucas/PycharmProjects/testes/other.py", line 6, in <module>
t.__func_a()
AttributeError: Test instance has no attribute '__func_a'
Same thing if you try to access the protected environment variable:
Traceback (most recent call last):
File "C:/Users/Lucas/PycharmProjects/testes/other.py", line 5, in <module>
print t.__environment
AttributeError: Test instance has no attribute '__environment'
In my view this is the most elegant, simple and readable way to solve your problem, let me know if it fits your needs :)
I'm a python noob and I'm trying to solve my problems the 'pythonic' way. I have a class, who's __init__ method takes 6 parameters. I need to validate each param and throw/raise an Exception if any fails to validate.
Is this the right way?
class DefinitionRunner:
def __init__(self, canvasSize, flightId, domain, definitionPath, harPath):
self.canvasSize = canvasSize
self.flightId = flightId
self.domain = domain
self.harPath = harPath
self.definitionPath = definitionPath
... bunch of validation checks...
... if fails, raise ValueError ...
If you want the variables to be settable independently of __init__, you could use properties to implement validations in separate methods.
They work only for new style classes though, so you need to define the class as class DefinitionRunner(object)
So for example,
#property
def canvasSize(self):
return self._canvasSize
#canvasSize.setter
def canvasSize(self, value):
# some validation here
self._canvasSize = value
Broadly speaking, that looks like the way you'd do it. Though strictly speaking, you might as well do validation before rather than after assignment, especially if assignment could potentially be time or resource intensive. Also, style convention says not to align assignment blocks like you are.
I would do it like you did it. Except the validating stuff. I would validate in a setter method and use it to set the attributes.
You could do something like this. Make a validator for each type of input. Make a helper function to run validation:
def validate_and_assign(obj, items_d, validators):
#validate all entries
for key, validator in validators.items():
if not validator[key](items_d[key]):
raise ValueError("Validation for %s failed" % (key,))
#set all entries
for key, val in items_d.items():
setattr(obj, key, val)
Which you'd use like this:
class DefinitionRunner:
validators = {
'canvasSize': canvasSize_validator,
'flightId': flightId_validator,
'domain': domain_validator,
'definitionPath': definitionPath_validator,
'harPath': harPath_validator,
}
def __init__(self, canvasSize, flightId, domain, definitionPath, harPath):
validate_and_assign(self, {
'canvasSize': canvasSize,
'flightId': flightId,
'domain': domain,
'definitionPath': definitionPath,
'harPath': harPath,
}, DefinitionRunner.validators)
The validators might be the same function, of course, if the data type is the same.
I'm not sure if this is exactly "Pythonic", but I've defined a function decorator called require_type. (To be honest, I think I found it somewhere online.)
def require_type(my_arg, *valid_types):
'''
A simple decorator that performs type checking.
#param my_arg: string indicating argument name
#param valid_types: list of valid types
'''
def make_wrapper(func):
if hasattr(func, 'wrapped_args'):
wrapped = getattr(func, 'wrapped_args')
else:
body = func.func_code
wrapped = list(body.co_varnames[:body.co_argcount])
try:
idx = wrapped.index(my_arg)
except ValueError:
raise(NameError, my_arg)
def wrapper(*args, **kwargs):
def fail():
all_types = ', '.join(str(typ) for typ in valid_types)
raise(TypeError, '\'%s\' was type %s, expected to be in following list: %s' % (my_arg, all_types, type(arg)))
if len(args) > idx:
arg = args[idx]
if not isinstance(arg, valid_types):
fail()
else:
if my_arg in kwargs:
arg = kwargs[my_arg]
if not isinstance(arg, valid_types):
fail()
return func(*args, **kwargs)
wrapper.wrapped_args = wrapped
return wrapper
return make_wrapper
Then, to use it:
class SomeObject(object):
#require_type("prop1", str)
#require_type("prop2", numpy.complex128)
def __init__(self, prop1, prop2):
pass