Pass self class parameter python - python

Here is definition of my class full of static functions. I want to use all of them in "sendLog" function which call himself with time interval (10 sec here). When I run this interpreter tells me "TypeError: sendLog() takes at least 5 arguments (0 given)"
But it if I enter the same params I will need to define sendLog again and again because it calls himself repeatly.. I know its not the way But cant figure it out.
class AccessLog:
#staticmethod
def backupAccessLog(target, source):
newfile = os.path.splitext(source)[0] + "_" + time.strftime("%Y%m%d-%H%M%S") + os.path.splitext(source)[1]
copyfile(source,newfile)
shutil.move(newfile,target)
#staticmethod
def emptyAccessLog(filename):
open(filename, 'w').close()
#staticmethod
def postLogstoElastic():
fileLogs = open("example.log", "rw+")
fileBackups = open("logs_of_accesslog.log","rw+")
lines = fileLogs.read().splitlines()
logging.basicConfig(format='%(asctime)s>>>%(message)s',filename='logs_exceptions.log',level=logging.DEBUG)
es = Elasticsearch(['http://localhost:9200/'], verify_certs=True)
#es.create(index="index_log23June", doc_type="type_log23June")
es.indices.create(index='index_log23June', ignore=400)
i=0
for item in lines:
try:
i+=1
if bool(item):
es.index(index="index_log23June",doc_type="type_log23June", body={"Log":item})
else:
print "a speace line ignored. at line number:", i
raise ValueError('Error occurred on this line: ', i)
print "lines[",i,"]:",item,"\n"
except ValueError as err:
logging.error(err.args)
#staticmethod
def sendLog(interval, worker_functions, iterations=1):
def call_worker_functions():
for f in worker_functions:
f() #ERROR: Msg: 'NoneType' object is not callable
for i in range(iterations):
threading.Timer(interval * i, call_worker_functions).start()
and I want to call this method with this line:
try:
AccessLog.AccessLog.sendLog(
interval=10,
worker_functions=(
AccessLog.AccessLog.backupAccessLog("logbackups","example.log"),
AccessLog.AccessLog.emptyAccessLog("example.log"),
AccessLog.AccessLog.postLogstoElastic()
),
iterations=999
)
except ValueError as err:
logging.error(err.args)
"TypeError: sendLog() takes at least 5 arguments (0 given)" It looks normal but How can I handle this ?

Have you tried to set the #staticmethod on the same level as the function?

Apparently you want sendLog() to call the worker functions every 10 seconds or so.
Here's an easy way to do that:
class AccessLog:
#staticmethod
def sendLog(interval, worker_functions, iterations=1):
def call_worker_functions():
for f in worker_functions:
f(*worker_functions[f])
for i in range(iterations):
threading.Timer(interval * i, call_worker_functions).start()
And now use it like this:
AccessLog.AccessLog.sendLog(
interval=10,
worker_functions={
AccessLog.AccessLog.backupAccessLog: ("logbackups", "example.log"),
AccessLog.AccessLog.emptyAccessLog: ("example.log",),
AccessLog.AccessLog.postLogstoElastic: ()
),
iterations=999
)
And this is just one of many many ways, but there's no need to pass the function as its own argument like you did.

Related

The problem of using the try exception dynamically

I have a function called transform_exceptions() that takes a list of functions, then calls each of the functions (functions are without arguments) and the exceptions that occur with the above convention to an object of ExceptionProxy and finally the list of transformed errors. It returns functions in the same order
Note: If a function is executed without error, an ExceptionProxy object should be created and its msg value should be "ok!" Slow quantification
smple:
class ExceptionProxy(Exception):
# define your class here
def transform_exceptions(func_ls):
# implement your function here
def f():
1/0
def g():
pass
tr_ls = transform_exceptions([f, g])
for tr in tr_ls:
print("msg: " + tr.msg + "\nfunction name: " + tr.function.__name__)
Output:
msg: division by zero
function name: f
msg: ok!
function name: g
my code :
from mimetypes import init
class ExceptionProxy(Exception):
def __init__(self, msg, function):
self.msg = msg
self.function = function
def transform_exceptions(func_ls):
exception_list = []
for func in func_ls:
try:
func
except Exception as e:
r = ExceptionProxy(str(e), func)
exception_list.append(r)
else:
r = ExceptionProxy("ok!", func)
exception_list.append(r)
return exception_list
You should do this when calling the function name in the list
func()
Also modified code:
class ExceptionProxy(Exception):
def __init__(self,msg,function):
self.msg = msg
self.function = function
def transform_exceptions(func_ls):
out = []
for x in func_ls:
try:
x()
a = ExceptionProxy("ok!", x)
except Exception as e:
a = ExceptionProxy(str(e), x)
out.append(a)
return out

Python Executor - Passing function with argument as argument

My goal is to create a function that I can use to measure the execution and resource use of another function. Using a tutorial, I've create the below using Python's ThreadPoolExecutor:
from resource import *
from time import sleep
from concurrent.futures import ThreadPoolExecutor
class MemoryMonitor:
def __init__(self):
self.keep_measuring = True
def measure_usage(self):
max_usage = 0
u_run_time = 0
s_run_time = 0
while self.keep_measuring:
max_usage = max(max_usage, getrusage(RUSAGE_SELF).ru_maxrss)
u_run_time = max(u_run_time, getrusage(RUSAGE_SELF).ru_utime)
s_run_time = max(s_run_time, getrusage(RUSAGE_SELF).ru_stime)
sleep(0.1) # run this loop every 0.1 seconds
return [max_usage, u_run_time, s_run_time]
def execute(function):
with ThreadPoolExecutor() as executor:
monitor = MemoryMonitor()
stats_thread = executor.submit(monitor.measure_usage)
try:
fn_thread = executor.submit(function)
result = fn_thread.result()
print("print result")
print(result)
print("print result type")
print(type(result))
finally:
monitor.keep_measuring = False
stats = stats_thread.result()
print(stats)
return result
def foo():
i = 0
while i < 3:
print("foo")
i+=1
return 1
def bar(x):
while x < 3:
print("foobar")
x+=1
return 1
var = execute(foo)
print("Var = " + str(var))
var = execute(bar(0))
print("Var = " + str(var))
If I pass the function foo as an argument to the function execute, it prints the correct results and returns the value returned by foo.
If I pass the function bar in the same way, but with bar itself requiring an argument, the function runs (prints 3 times) and then I get the following error:
result = self.fn(*self.args, **self.kwargs)
TypeError: 'int' object is not callable
After some testing, the part where I'm stuck appears to be passing a function as an argument, if that function itself requires an argument. As I understand the ThreadPoolExecutor, the fn_thread object encapsulates the execution of the function submitted. The result object should simply hold the result of that execution - what am I missing that this cannot handle being passed a function with an argument?
You are submitting
bar(0)
instead of
bar, 0
To clarify, look at the submit's signature:
submit(fn, *args, **kwargs)
the result of
bar(0)
is an integer, and the executor cannot call an integer, since it is not 'callable', as the error message suggests.

python decorator TypeError missing 1 required positional argument

I'm trying to write a decorator to repeat an erroring function N times with increasingly sleeping times in between. This is my attempt so far:
def exponential_backoff(seconds=10, attempts=10):
def our_decorator(func):
def function_wrapper(*args, **kwargs):
for s in range(0, seconds*attempts, attempts):
sleep(s)
try:
return func(*args, **kwargs)
except Exception as e:
print(e)
return function_wrapper
return our_decorator
#exponential_backoff
def test():
for a in range(100):
if a - random.randint(0,1) == 0:
print('success count: {}'.format(a))
pass
else:
print('error count {}'.format(a))
'a' + 1
test()
I keep getting the error:
TypeError: our_decorator() missing 1 required positional argument: 'func'
Understand what decorator is:
#exponential_backoff
def test():
pass
equals to:
def test():
pass
test = exponential_backoff(test)
In this case, test is def our_decorator(func):. That's why you get TypeError when calling test().
So further:
#exponential_backoff()
def test():
pass
equals to:
def test():
pass
test = exponential_backoff()(test)
In this case, now test is what you need.
Further, functools.wraps helps you to copy all properties of original function to decorated function. Such as function's name or docstring:
from functools import wraps
def exponential_backoff(func):
# #wraps(func)
def function_wrapper(*args, **kwargs):
pass
return function_wrapper
#exponential_backoff
def test():
pass
print(test) # <function exponential_backoff.<locals>.function_wrapper at 0x7fcc343a4268>
# uncomment `#wraps(func)` line:
print(test) # <function test at 0x7fcc343a4400>
You should be using:
#exponential_backoff()
def test():
...
The overall decorator is not designed to have arguments be optional, so you must provide () when using it.
If want an example of how to make decorator allow argument list be optional, see:
https://wrapt.readthedocs.io/en/latest/decorators.html#decorators-with-optional-arguments
You might also consider using the wrapt package to make your decorators easier and more robust.
Either you go for the solution provided by #Graham Dumpleton or you can just modify your decorator like so:
from functools import wraps, partial
def exponential_backoff(func=None, seconds=10, attempts=10):
if func is None:
return partial(exponential_backoff, seconds=seconds, attempts=attempts)
#wraps(func)
def function_wrapper(*args, **kwargs):
for s in range(0, seconds*attempts, attempts):
sleep(s)
try:
return func(*args, **kwargs)
except Exception as e:
print(e)
return function_wrapper
#exponential_backoff
def test():
for a in range(100):
if a - random.randint(0,1) == 0:
print('success count: {}'.format(a))
pass
else:
print('error count {}'.format(a))
'a' + 1
test()
EDIT
My answer was not entirely correct, please see #GrahamDumpleton's answer which shows how to make my attempt of a solution viable (i.e. this link). Fixed it now, thank you #GrahamDumpleton !

How can I specify a class method as a callback target in Python?

I'm using the sftp module of paramiko to transfer payloads to remote hosts. Part of the sftp.put call allows for specifying a callback method with signature func(int,int). I'm trying to put a transfer stats method into my Connection class to keep track of payload progress.
Here's the class I have currently:
class Connection:
def __init__(self, endpoint, RSAKeyObj):
self.displayHost = bcolors.OKGREEN + endpoint + bcolors.ENDC
self.transport = paramiko.Transport((endpoint,4022))
self.transport.connect(username='transit', pkey=RSAKeyObj)
self.sftp = paramiko.SFTPClient.from_transport(self.transport)
try:
# initial sftp directory setup
log.info('[{0}]: Setting up remote directories...'.format(self.displayHost))
log.info(self.sftp.mkdir(JAIL_DIR))
except:
pass
def static_vars(**kwargs):
def decorate(func):
for k in kwargs:
setattr(func, k, kwargs[k])
return func
return decorate
#static_vars(counter=0)
def TransferStats(self, transferedBytes, totalBytes):
if (transferedBytes / totalBytes) >= TransferStats.counter:
log.info('Transferred: {}% [{}/{}]'.format(round((transferedBytes/totalBytes)*100,2), transferedBytes, totalBytes))
TransferStats.counter += 0.025
def Transmit(self,targetDir, payloadPath):
displayText = 'Transferring package {}...'.format(payloadPath)
self.TransferStats().counter=0
log.info('[%s] ' % self.displayHost + displayText)
log.info(self.sftp.put(payloadPath, '%s/%s' % (targetDir,payloadPath), callback=self.TransferStats()))
However when I try this, I get the following error:
ERROR - (, TypeError('TransferStats() takes exactly 3 arguments (1 given)',), )
This makes me think that the callback isn't getting recognized by paramiko when it tries to send it's (int,int) because of the self declaration. Is there a way around this?
Your problem is in :
log.info(self.sftp.put(payloadPath, '%s/%s' % (targetDir,payloadPath), callback=self.TransferStats()))
Your error :
ERROR - (, TypeError('TransferStats() takes exactly 3 arguments (1 given)',), )
Is caused by calling TransferStats with no arguments (self.TransferStats() will result in 1 argument : the class (as it is a class method))
Just pass the classmethod:
log.info(self.sftp.put(payloadPath, '%s/%s' % (targetDir,payloadPath), callback=self.TransferStats))
EDIT : You have the same problem in the following line :
self.TransferStats().counter=0
Remove the parentheses :
self.TransferStats.counter=0
Also, your counter attribute on TransferStats is a hidden a global, resetted at each Transmit call.

General decorator to wrap try except in python?

I'd interacting with a lot of deeply nested json I didn't write, and would like to make my python script more 'forgiving' to invalid input. I find myself writing involved try-except blocks, and would rather just wrap the dubious function up.
I understand it's a bad policy to swallow exceptions, but I'd rather prefer they to be printed and analysed later, than to actually stop execution. It's more valuable, in my use-case to continue executing over the loop than to get all keys.
Here's what I'm doing now:
try:
item['a'] = myobject.get('key').METHOD_THAT_DOESNT_EXIST()
except:
item['a'] = ''
try:
item['b'] = OBJECT_THAT_DOESNT_EXIST.get('key2')
except:
item['b'] = ''
try:
item['c'] = func1(ARGUMENT_THAT_DOESNT_EXIST)
except:
item['c'] = ''
...
try:
item['z'] = FUNCTION_THAT_DOESNT_EXIST(myobject.method())
except:
item['z'] = ''
Here's what I'd like, (1):
item['a'] = f(myobject.get('key').get('subkey'))
item['b'] = f(myobject.get('key2'))
item['c'] = f(func1(myobject)
...
or (2):
#f
def get_stuff():
item={}
item['a'] = myobject.get('key').get('subkey')
item['b'] = myobject.get('key2')
item['c'] = func1(myobject)
...
return(item)
...where I can wrap either the single data item (1), or a master function (2), in some function that turns execution-halting exceptions into empty fields, printed to stdout. The former would be sort of an item-wise skip - where that key isn't available, it logs blank and moves on - the latter is a row-skip, where if any of the fields don't work, the entire record is skipped.
My understanding is that some kind of wrapper should be able to fix this. Here's what I tried, with a wrapper:
def f(func):
def silenceit():
try:
func(*args,**kwargs)
except:
print('Error')
return(silenceit)
Here's why it doesn't work. Call a function that doesn't exist, it doesn't try-catch it away:
>>> f(meow())
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'meow' is not defined
Before I even add a blank return value, I'd like to get it to try-catch correctly. If the function had worked, this would have printed "Error", right?
Is a wrapper function the correct approach here?
UPDATE
I've had a lot of really useful, helpful answers below, and thank you for them---but I've edited the examples I used above to illustrate that I'm trying to catch more than nested key errors, that I'm looking specifically for a function that wraps a try-catch for...
When a method doesn't exist.
When an object doesn't exist, and is getting a method called on it.
When an object that does not exist is being called as an argument to a function.
Any combination of any of these things.
Bonus, when a function doesn't exist.
There are lots of good answers here, but I didn't see any that address the question of whether you can accomplish this via decorators.
The short answer is "no," at least not without structural changes to your code. Decorators operate at the function level, not on individual statements. Therefore, in order to use decorators, you would need to move each of the statements to be decorated into its own function.
But note that you can't just put the assignment itself inside the decorated function. You need to return the rhs expression (the value to be assigned) from the decorated function, then do the assignment outside.
To put this in terms of your example code, one might write code with the following pattern:
#return_on_failure('')
def computeA():
item['a'] = myobject.get('key').METHOD_THAT_DOESNT_EXIST()
item["a"] = computeA()
return_on_failure could be something like:
def return_on_failure(value):
def decorate(f):
def applicator(*args, **kwargs):
try:
return f(*args,**kwargs)
except:
print('Error')
return value
return applicator
return decorate
You could use a defaultdict and the context manager approach as outlined in Raymond Hettinger's PyCon 2013 presentation
from collections import defaultdict
from contextlib import contextmanager
#contextmanager
def ignored(*exceptions):
try:
yield
except exceptions:
pass
item = defaultdict(str)
obj = dict()
with ignored(Exception):
item['a'] = obj.get(2).get(3)
print item['a']
obj[2] = dict()
obj[2][3] = 4
with ignored(Exception):
item['a'] = obj.get(2).get(3)
print item['a']
It's very easy to achieve using configurable decorator.
def get_decorator(errors=(Exception, ), default_value=''):
def decorator(func):
def new_func(*args, **kwargs):
try:
return func(*args, **kwargs)
except errors, e:
print "Got error! ", repr(e)
return default_value
return new_func
return decorator
f = get_decorator((KeyError, NameError), default_value='default')
a = {}
#f
def example1(a):
return a['b']
#f
def example2(a):
return doesnt_exist()
print example1(a)
print example2(a)
Just pass to get_decorator tuples with error types which you want to silence and default value to return.
Output will be
Got error! KeyError('b',)
default
Got error! NameError("global name 'doesnt_exist' is not defined",)
default
Edit: Thanks to martineau i changed default value of errors to tuples with basic Exception to prevents errors.
It depends on what exceptions you expect.
If your only use case is get(), you could do
item['b'] = myobject.get('key2', '')
For the other cases, your decorator approach might be useful, but not in the way you do it.
I'll try to show you:
def f(func):
def silenceit(*args, **kwargs): # takes all kinds of arguments
try:
return func(*args, **kwargs) # returns func's result
except Exeption, e:
print('Error:', e)
return e # not the best way, maybe we'd better return None
# or a wrapper object containing e.
return silenceit # on the correct level
Nevertheless, f(some_undefined_function())won't work, because
a) f() isn't yet active at the execution time and
b) it is used wrong. The right way would be to wrap the function and then call it: f(function_to_wrap)().
A "layer of lambda" would help here:
wrapped_f = f(lambda: my_function())
wraps a lambda function which in turn calls a non-existing function. Calling wrapped_f() leads to calling the wrapper which calls the lambda which tries to call my_function(). If this doesn't exist, the lambda raises an exception which is caught by the wrapper.
This works because the name my_function is not executed at the time the lambda is defined, but when it is executed. And this execution is protected and wrapped by the function f() then. So the exception occurs inside the lambda and is propagated to the wrapping function provided by the decorator, which handles it gracefully.
This move towards inside the lambda function doesn't work if you try to replace the lambda function with a wrapper like
g = lambda function: lambda *a, **k: function(*a, **k)
followed by a
f(g(my_function))(arguments)
because here the name resolution is "back at the surface": my_function cannot be resolved and this happens before g() or even f() are called. So it doesn't work.
And if you try to do something like
g(print)(x.get('fail'))
it cannot work as well if you have no x, because g() protects print, not x.
If you want to protect x here, you'll have to do
value = f(lambda: x.get('fail'))
because the wrapper provided by f() calls that lambda function which raises an exception which is then silenced.
Extending #iruvar answer - starting with Python 3.4 there is an existing context manager for this in Python standard lib: https://docs.python.org/3/library/contextlib.html#contextlib.suppress
from contextlib import suppress
with suppress(FileNotFoundError):
os.remove('somefile.tmp')
with suppress(FileNotFoundError):
os.remove('someotherfile.tmp')
in your case you first evaluate the value of the meow call (which doesn't exist) and then wrap it in the decorator. this doesn't work that way.
first the exception is raised before it was wrapped, then the wrapper is wrongly indented (silenceit should not return itself). You might want to do something like:
def hardfail():
return meow() # meow doesn't exist
def f(func):
def wrapper():
try:
func()
except:
print 'error'
return wrapper
softfail =f(hardfail)
output:
>>> softfail()
error
>>> hardfail()
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "<stdin>", line 2, in hardfail
NameError: global name 'meow' is not defined
anyway in your case I don't understand why you don't use a simple method such as
def get_subkey(obj, key, subkey):
try:
return obj.get(key).get(subkey, '')
except AttributeError:
return ''
and in the code:
item['a'] = get_subkey(myobject, 'key', 'subkey')
Edited:
In case you want something that will work at any depth. You can do something like:
def get_from_object(obj, *keys):
try:
value = obj
for k in keys:
value = value.get(k)
return value
except AttributeError:
return ''
That you'd call:
>>> d = {1:{2:{3:{4:5}}}}
>>> get_from_object(d, 1, 2, 3, 4)
5
>>> get_from_object(d, 1, 2, 7)
''
>>> get_from_object(d, 1, 2, 3, 4, 5, 6, 7)
''
>>> get_from_object(d, 1, 2, 3)
{4: 5}
And using your code
item['a'] = get_from_object(obj, 2, 3)
By the way, on a personal point of view I also like #cravoori solution using contextmanager. But this would mean having three lines of code each time:
item['a'] = ''
with ignored(AttributeError):
item['a'] = obj.get(2).get(3)
Why not just use cycle?
for dst_key, src_key in (('a', 'key'), ('b', 'key2')):
try:
item[dst_key] = myobject.get(src_key).get('subkey')
except Exception: # or KeyError?
item[dst_key] = ''
Or if you wish write a little helper:
def get_value(obj, key):
try:
return obj.get(key).get('subkey')
except Exception:
return ''
Also you can combine both solutions if you have a few places where you need to get value and helper function would be more reasonable.
Not sure that you actually need a decorator for your problem.
Since you're dealing with lots of broken code, it may be excusable to use eval in this case.
def my_eval(code):
try:
return eval(code)
except: # Can catch more specific exceptions here.
return ''
Then wrap all your potentially broken statements:
item['a'] = my_eval("""myobject.get('key').get('subkey')""")
item['b'] = my_eval("""myobject.get('key2')""")
item['c'] = my_eval("""func1(myobject)""")
How about something like this:
def exception_handler(func):
def inner_function(*args, **kwargs):
try:
func(*args, **kwargs)
except TypeError:
print(f"{func.__name__} error")
return inner_function
then
#exception_handler
def doSomethingExceptional():
a=2/0
all credits go to:https://medium.com/swlh/handling-exceptions-in-python-a-cleaner-way-using-decorators-fae22aa0abec
Try Except Decorator for sync and async functions
Note: logger.error can be replaced with print
Latest version can be found here.

Categories

Resources