In Python, is there a way to assign different decorators to functions as variables?
For example (the following code doesn't execute, obviously):
def status_display(function):
def body():
print("Entering", function.__name__)
function()
print("Exited", function.__name__)
return body
def call_counter(function):
counter = 0
def body():
function()
nonlocal counter
counter += 1
print(function.__name__, 'had been executed', counter, 'times')
return body
def a_function():
print('a_function executes')
# problems start here
# is there a working alternative to this false syntax?
#status_display
a_function_with_status_display = a_function()
#call_counter
a_function_with_call_counter = a_function()
# for an even crazier feat
# I knew this wouldn't work even before executing it
a_function_with_status_display = #status_display a_function()
a_function_with_call_counter = #call_counter a_function()
Thanks in advance.
a_function_with_status_display = status_display(a_function)
a_function_with_call_counter = call_counter(a_function)
You seem to be able to write decorators, but you don't know what they do?
Related
I'm self-learning python so I don't know how to describe this in a way that would be clear, so here's the easiest by proxy example I can come up with in pseudo code:
#where r() is a random number function
objCount = 0
def mainfunc()
while playgame= True and objCount < 100:
create(r(time))
time.sleep(1)
return None
def create(tmptime)
global objCount
objCount = objCount+1
newobj = plotSomething(r(x),r(y))
time.sleep(tmptime)
selfDelete..
return None
mainfunc() #run it
Instead of it making a random "lived" object every second, it makes a random lived object every second, but waits for it's "life" to expire. I'm trying to just fire this thing off to a sidechain to timeout on its own while still making new things.
All the documentation is getting super involved using asyncio, multithreading, etc.
Is there an easy way to kick this thing out of the main loop and not hold up traffic?
laziest method for simplicity is :
import concurrent.futures as delayobj
#where r() is a random number function
objCount = 0
def mainfunc()
global objCount
with delayobj:
while objCount < 100:
delayobj.ThreapoolExecutor().submit(create,tmptime=r(time))
time.sleep(1)
return None
def create(tmptime)
global objCount
objCount = objCount+1
newobj = plotSomething(r(x),r(y))
time.sleep(tmptime)
selfDelete..
return None
mainfunc() #run it
thanks again guys
I have a python function with a lot of functionality and several inner functions. I want to mock out the return value of one of those functions. Is it possible to use the mock package to mock out the inner function?
Here's an example
def outer(values):
a = 1
def inner():
return np.mean(values)
if inner() == 1:
return None
return inner()
Ok it's a strange example, but what I want is to mock out inner() to return a certain value. I tried to mock with #mock.patch('outer.inner') and I tried #mock.patch.object(outer, 'inner'), but neither works. Is it possible to mock a closure?
As far as I've found so far the answer is "you can't". Disappointing, but actionable.
In my case I was able to mock out some other call such that the closure returned what I wanted. In the example above it would be like
def test_outer_mean_1(self):
with mock.patch('np.mean', return_value=1):
self.assertIsNone(outer(None))
def test_outer_mean_not_1(self):
with mock.patch('np.mean', return_value=2):
self.assertEqual(2, outer(None))
If anybody comes up with a better answer, I'd be eager to hear it.
related question Mocking a local variable of a function in python
Disclaimer: I'm not saying this is the right approach, mocking np.mean is much better.
I have come up with a workaround: the idea is to change the code of the function at run time and execute the new function.
Here is the code:
from _pytest._code import Code
def convert_function_in_function(func):
context = getattr(func, "__globals__", {})
code = Code.from_function(func)
source = code.source()
new_body = ["from unittest import mock", "new_mock = mock.MagicMock()"] + source.lines[0:2] + [
" inner=new_mock"] + source.lines[4:]
compiled = compile("\n".join(new_body), str(code.path), "exec")
exec(compiled, context)
return context['outer'], context['new_mock']
def test_outer_mean_specific_value():
new_outer, mock_inner = convert_function_in_function(outer)
mock_inner.return_value = 2
assert 2 == new_outer(5)
Explanation: convert_function_in_function makes the code to be
from unittest import mock
new_mock = mock.MagicMock()
def outer(values):
a = 1
inner=new_mock
if inner() == 1:
return None
return inner()
Then it returns the new function and the matching mock. You can then change the mock behaviour and call the new function.
I don't know how to effectively word my question, but I'll try my best. I want to be able to use a 'for' statement to iterate through a dictionary and access previously-made 'self' items. Like I said, hard to word the question.
I have found that I can use exec() to do it, but I had been told not to use exec() unless absolutely necessary. Also, I realize what this example does is technically useless, but it's a very simplified version of what I need.
global counter
counter = 0
class GUI:
def __init__(self):
self.stuff = ["foo","bar","fooest","barest"]
for i in self.stuff:
self.process(i)
self.printAll()
def process(self,i):
global counter
counter += 1
self.__dict__.update({"ex{}".format(counter):i})
def printAll(self):
global counter
while counter > 0:
exec("print(self.ex{})".format(counter))
counter -= 1
GUI()
This does work; printAll(self) does print self.ex1 thru ex4. Is there a way to do it without exec()?. Please help!
global counter
counter = 0
class GUI:
def __init__(self):
self.stuff = ["foo","bar","fooest","barest"]
for i in self.stuff:
self.process(i)
self.printAll()
def process(self,i):
global counter
counter += 1
self.__dict__.update({"ex{}".format(counter):i})
def printAll(self):
global counter
while counter > 0:
print(eval("self.ex{}".format(counter)))
counter -= 1
GUI()
I hope this is suitable in your case
I'm switching from Ruby to Python for a project. I appreciate the fact that Python has first-class functions and closures, so this question ought to be easy. I just haven't figured out what is idiomatically correct for Python:
In Ruby, I could write:
def with_quietude(level, &block)
begin
saved_gval = gval
gval = level
yield
ensure
gval = saved_gval
end
end
and call it like this:
with_quietude(3) {
razz_the_jazz
begin_the_beguine
}
(Note: I'm not asking about Python try/finally handling nor about saving and restoring variables -- I just wanted a non-trivial example of wrapping a block inside some other code.)
update
Or, since some of the answers are getting hung up on the global assignments in the previous example when I'm really asking about closures, what if the call was as follows? (Note that this doesn't change the definition of with_quietude):
def frumble(x)
with_quietude {
razz_the_jazz(x)
begin_the_beguine(2 * x)
}
end
How would you implement something similar in Python (and not get laughed at by the Python experts)?
Looking more into ruby's yield, it looks like you want something like contextlib.contextmanager:
from contextlib import contextmanager
def razz_the_jazz():
print gval
#contextmanager
def quietude(level):
global gval
saved_gval = gval
gval = level
try:
yield
finally:
gval = saved_gval
gval = 1
with quietude(3):
razz_the_jazz()
razz_the_jazz()
This script outputs:
3
1
indicating that our context manager did reset gval in the global namespace. Of course, I wouldn't use this context manager since it only works in the global namespace. (It won't work with locals in a function) for example.
This is basically a limitation of how assignment creates a new reference to an object and that you can never mutate an object by assignment to it directly. (The only way to mutate an object is to assign to one of it's attributes or via __setitem__ (a[x] = whatever))
A word of warning if you are coming from Ruby: All python 'def's are basically the same as ruby 'proc's.
Python doesn't have an equivalent for ruby's 'def'
You can get very similar behaviour to what you are asking for by defining your own functions in the scope of the calling function
def quietude(level, my_func):
saved_gval = gval
gval = level
my_func()
def my_func():
razz_the_jazz()
begin_the_beguine()
quietude(3, my_func)
---- EDIT: Request for further information: -----
Python's lambdas are limited to one line so they are not as flexible as ruby's.
To pass functions with arguments around I would recommend partial functions see the below code:
import functools
def run(a, b):
print a
print b
def runner(value, func):
func(value)
def start():
s = functools.partial(run, 'first')
runner('second', s)
---- Edit 2 More information ----
Python functions are only called when the '()' is added to them. This is different from ruby where the '()' are optional. The below code runs 'b_method' in start() and 'a_method' in run()
def a_method():
print 'a_method is running'
return 'a'
def b_method():
print 'b_method is running'
return 'b'
def run(a, b):
print a()
print b
def start():
run(a_method, b_method())
I like the answer that mgilson gives, so it gets the check. This is just a small expansion on the capabilities of #contextmanager for someone coming from the Ruby world.
gval = 0
from contextlib import contextmanager
#contextmanager
def quietude(level):
global gval
saved_gval = gval
gval = level
try:
yield
finally:
gval = saved_gval
def bebop(x):
with quietude(3):
print "first", x*2, "(gval =", gval, ")"
print "second", x*4, "(gval =", gval, ")"
bebop(100)
bebop("xxxx")
This prints out:
first 200 (gval = 3 )
second 400 (gval = 3 )
first xxxxxxxx (gval = 3 )
second xxxxxxxxxxxxxxxx (gval = 3 )
This shows that everything within the scope of the with has access to the lexically closed variables, and behaves more or less the way someone coming from the Ruby world would expect.
Good stuff.
I am maintaining a little library of useful functions for interacting with my company's APIs and I have come across (what I think is) a neat question that I can't find the answer to.
I frequently have to request large amounts of data from an API, so I do something like:
class Client(object):
def __init__(self):
self.data = []
def get_data(self, offset = 0):
done = False
while not done:
data = get_more_starting_at(offset)
self.data.extend(data)
offset += 1
if not data:
done = True
This works fine and allows me to restart the retrieval where I left off if something goes horribly wrong. However, since python functions are just regular objects, we can do stuff like:
def yo():
yo.hi = "yo!"
return None
and then we can interrogate yo about its properties later, like:
yo.hi => "yo!"
my question is: Can I rewrite my class-based example to pin the data to the function itself, without referring to the function by name. I know I can do this by:
def get_data(offset=0):
done = False
get_data.data = []
while not done:
data = get_more_starting_from(offset)
get_data.data.extend(data)
offset += 1
if not data:
done = True
return get_data.data
but I would like to do something like:
def get_data(offset=0):
done = False
self.data = [] # <===== this is the bit I can't figure out
while not done:
data = get_more_starting_from(offset)
self.data.extend(data) # <====== also this!
offset += 1
if not data:
done = True
return self.data # <======== want to refer to the "current" object
Is it possible to refer to the "current" object by anything other than its name?
Something like "this", "self", or "memememe!" is what I'm looking for.
I don't understand why you want to do this, but it's what a fixed point combinator allows you to do:
import functools
def Y(f):
#functools.wraps(f)
def Yf(*args):
return inner(*args)
inner = f(Yf)
return Yf
#Y
def get_data(f):
def inner_get_data(*args):
# This is your real get data function
# define it as normal
# but just refer to it as 'f' inside itself
print 'setting get_data.foo to', args
f.foo = args
return inner_get_data
get_data(1, 2, 3)
print get_data.foo
So you call get_data as normal, and it "magically" knows that f means itself.
You could do this, but (a) the data is not per-function-invocation, but per function (b) it's much easier to achieve this sort of thing with a class.
If you had to do it, you might do something like this:
def ybother(a,b,c,yrselflambda = lambda: ybother):
yrself = yrselflambda()
#other stuff
The lambda is necessary, because you need to delay evaluation of the term ybother until something has been bound to it.
Alternatively, and increasingly pointlessly:
from functools import partial
def ybother(a,b,c,yrself=None):
#whatever
yrself.data = [] # this will blow up if the default argument is used
#more stuff
bothered = partial(ybother, yrself=ybother)
Or:
def unbothered(a,b,c):
def inbothered(yrself):
#whatever
yrself.data = []
return inbothered, inbothered(inbothered)
This last version gives you a different function object each time, which you might like.
There are almost certainly introspective tricks to do this, but they are even less worthwhile.
Not sure what doing it like this gains you, but what about using a decorator.
import functools
def add_self(f):
#functools.wraps(f)
def wrapper(*args,**kwargs):
if not getattr(f, 'content', None):
f.content = []
return f(f, *args, **kwargs)
return wrapper
#add_self
def example(self, arg1):
self.content.append(arg1)
print self.content
example(1)
example(2)
example(3)
OUTPUT
[1]
[1, 2]
[1, 2, 3]