Is there a way to get query string as function parameters on flask?
For example, the request will be like this.
http://localhost:5000/user?age=15&gender=Male
And hope the code similar to this.
#app.route("/user")
def getUser(age, gender):
...
If you are willing to write a decorator, anything is possible:
from functools import wraps
def extract_args(*names, **names_and_processors):
user_args = ([{"key": name} for name in names] +
[{"key": key, "type": processor}
for (key, processor) in names_and_processors.items()])
def decorator(f):
#wraps(f)
def wrapper(*args, **kwargs):
final_args, final_kwargs = args_from_request(user_args, args, kwargs)
return f(*final_args, **final_kwargs)
return wrapper
return decorator if len(names) < 1 or not callable(names[0]) else decorator(names[0])
def args_from_request(to_extract, provided_args, provided_kwargs):
# Ignoring provided_* here - ideally, you'd merge them
# in whatever way makes the most sense for your application
results = {}
for arg in to_extract:
result[arg["key"]] = request.args.get(**arg)
return provided_args, results
Usage:
#app.route("/somewhere")
#extract_args("gender", age=int)
def somewhere(gender, age):
return jsonify(gender=gender, age=age)
Flask views themselves do not have obligatory args. You can write:
from flask import request
#app.route("/user")
def getUser():
age = request.args.get('age')
gender = request.args.get('gender')
You can do this by getting Flask-Publisher and adding "#publish()" decorator before your getUser function:
from publisher import publish
#app.route("/user")
#publish()
def getUser(age:int, gender):
...
This uses inspection to get the arguments, optionally uses type annotations to convert the incoming arguments, and pulls the values from the GET/POST data.
Related
The Problem
I 'need' to count the number of arguments expected to be passed to a function object. My commands are stored via their object, so I can reference each one individually. I already tried inspect and while it seems perfect, it for some reason cannot distinguish between 0 arguments and 1 argument as both output 1. It seems I will have to link the actual code as it must be an error with my personal script. Here is the code:
# ============== Impots ==============
from shlex import split
from inspect import getfullargspec
# ============== Main ==============
class Command:
def __init__(self, fn, name=None, aliases=None):
self.name = name or fn.__name__
self.aliases = aliases or []
self.fn = fn
def __call__(self, *args, **kwargs):
return self.fn(*args, **kwargs)
#property
def all_names(self):
return (self.name, *self.aliases)
# ============= Storage =============
class Commands(dict):
def __init__(self, not_found: str='{} was not recognized.', argmismatch: str='{} takes {} arguments but {} were given.'):
self.flag = [not_found, argmismatch]
super().__init__()
def _add_command(self, command):
for cmd_name in command.all_names:
if cmd_name in self:
raise ValueError(f'Name or alias assigned to function {command.fn.__name__} is duplicate: {cmd_name}')
self[cmd_name] = command
def add_command(self, name=None, aliases=None):
def inner_fn(fn):
self._add_command(Command(fn, name, aliases))
return fn
return inner_fn
def execute(self, user_input):
self.command, *args = self.parse(user_input or 'no_input')
if self.command.lower() in self:
# THIS IS WHERE THE ISSUE HAPPENS
print(f'Passed: {len(args)}')
print(f'Expected: {len(getfullargspec(self[self.command])[0])}')
if not (len(args)==len(getfullargspec(self[self.command])[0])):
return (False, self.flag[1].format(self.command, len(getfullargspec(self[self.command])[0]), len(args)))
self[self.command](*args)
return (True, 'Command found!')
else:
return (False, self.flag[0].format(self.command))
#staticmethod
def parse(string):
if (string=='no_input'):
return ['','']
return split(string)
cmd = Commands()
#cmd.add_command(name='foo')
def foo():
print('bar')
cmd.execute('foo bar')
Not the Problem
I have already tried seeing if the list was filled with a Null value, it is not.
It does this across platforms (VSCode, Repl.it, PyDroid) so it is not a weird os issue.
I'm not sure if it could be len() treating it weirdly, but I know a normal empty list such as [] is counted as one.
I'm using this for the error message and possibly for auto-tips on what might have been wrong with the command, I would love any help and will update my post with new info and possible, correct, and incorrect solutions.
Similar Posts
This post is similar in question but the solution provided does not match what I require.
You don't need getfullargspec if you only access the zeroth element.
my_funct.__code__.co_argcount
The below example is taken from python cookbook 3rd edition section 9.5.
I placed break points at each line to understand the flow of execution . Below is the code sample, its output and the questions I have . I have tried to explain my question , let me know if you need further info.
from functools import wraps, partial
import logging
# Utility decorator to attach a function as an attribute of obj
def attach_wrapper(obj, func=None):
if func is None:
return partial(attach_wrapper, obj)
setattr(obj, func.__name__, func)
return func
def logged(level, name=None, message=None):
def decorate(func):
logname = name if name else func.__module__
log = logging.getLogger(logname)
logmsg = message if message else func.__name__
#wraps(func)
def wrapper(*args, **kwargs):
log.log(level, logmsg)
return func(*args, **kwargs)
#attach_wrapper(wrapper)
def set_message(newmsg):
nonlocal logmsg
logmsg = newmsg
return wrapper
return decorate
# Example use
#logged(logging.DEBUG)
def add(x, y):
return x + y
logging.basicConfig(level=logging.DEBUG)
add.set_message('Add called')
#add.set_level(logging.WARNING)
print (add(2, 3))
output is
DEBUG:__main__:Add called
5
I understand the concept of decorators, but this is confusing a little.
scenario 1. When the following line is debugged #logged(logging.DEBUG) , we get
decorate = .decorate at 0x000000000< memoryaddress >>
Question : why would the control go back to execute the function " def decorate" ? Is it because the "decorate" function is on the top of the stack ?
scenario 2 :When executing #attach_wrapper(wrapper) , the control goes to execute attach_wrapper(obj, func=None) and partial function returns
func =
question : why would the control go back to execute def attach_wrapper(obj, func=None):
and how would this time the value for func is *.decorate..set_message at 0x000000000 >
being passed to the attach_wrapper ?
Scenario 1
This:
#logged(logging.DEBUG)
def add(x, y):
....
is the same as this:
def add(x, y):
....
add = logged(logging.DEBUG)(add)
Note that there are two calls there: first logged(logging.DEBUG) returns decorate and then decorate(add) is called.
Scenario 2
Same as in Scenario 1, this:
#attach_wrapper(wrapper)
def set_message(newmsg):
...
is the same as this:
def set_message(newmsg):
...
set_message = attach_wrapper(wrapper)(set_message)
Again, there are two calls: first attach_wrapper(wrapper) returns the partial object and then partial(set_message) is called.
In other words...
logged and attach_wrapper are not decorators. Those are functions which return decorators. That is why two calls are made: one to the function which returns the decorator and another the the decorator itself.
I'm developing a threaded data collector that queries storage devices.
I have the following thread runner method:
def threadrunner(cfg, filer, APPLIANCES, kwargs):
_client = login(cfg, filer)
_filer = DeviceConfig(_client, Version=True)
APPLIANCES.append(_filer)
And the code that calls it:
newthr = threading.Thread(target=threadrunner, args=(cfg, appliance, APPLIANCES, kwargs))
newthr.name = appliance
newthr.start()
What is the best way to pass a variable list of **kwargs to threadrunner()? In the example above Version=True is one version of a kwarg I need to pass. All kwargs that I need to pass are simple True|False toggles.
You can pass the kwargs keyword
You could do something like this:
def threadrunner(*args, **kwargs):
_client = login(*args)
if 'APPLIANCES' not in kwargs:
raise ValueError('Appliances not in kwargs')
APPLIANCES = kwargs.pop('APPLIANCES', None)
#Assuming your boolean values go here.
_filer = DeviceConfig(_client, **kwargs)
APPLIANCES.append(_filer)
newthr = threading.Thread(target=threadrunner, args=(cfg, appliance,) kwargs={'APPLIANCES': APPLIANCES, 'boolkey1':boolval1, ....)
newthr.name = appliance
newthr.start()
I'm a python noob and I'm trying to solve my problems the 'pythonic' way. I have a class, who's __init__ method takes 6 parameters. I need to validate each param and throw/raise an Exception if any fails to validate.
Is this the right way?
class DefinitionRunner:
def __init__(self, canvasSize, flightId, domain, definitionPath, harPath):
self.canvasSize = canvasSize
self.flightId = flightId
self.domain = domain
self.harPath = harPath
self.definitionPath = definitionPath
... bunch of validation checks...
... if fails, raise ValueError ...
If you want the variables to be settable independently of __init__, you could use properties to implement validations in separate methods.
They work only for new style classes though, so you need to define the class as class DefinitionRunner(object)
So for example,
#property
def canvasSize(self):
return self._canvasSize
#canvasSize.setter
def canvasSize(self, value):
# some validation here
self._canvasSize = value
Broadly speaking, that looks like the way you'd do it. Though strictly speaking, you might as well do validation before rather than after assignment, especially if assignment could potentially be time or resource intensive. Also, style convention says not to align assignment blocks like you are.
I would do it like you did it. Except the validating stuff. I would validate in a setter method and use it to set the attributes.
You could do something like this. Make a validator for each type of input. Make a helper function to run validation:
def validate_and_assign(obj, items_d, validators):
#validate all entries
for key, validator in validators.items():
if not validator[key](items_d[key]):
raise ValueError("Validation for %s failed" % (key,))
#set all entries
for key, val in items_d.items():
setattr(obj, key, val)
Which you'd use like this:
class DefinitionRunner:
validators = {
'canvasSize': canvasSize_validator,
'flightId': flightId_validator,
'domain': domain_validator,
'definitionPath': definitionPath_validator,
'harPath': harPath_validator,
}
def __init__(self, canvasSize, flightId, domain, definitionPath, harPath):
validate_and_assign(self, {
'canvasSize': canvasSize,
'flightId': flightId,
'domain': domain,
'definitionPath': definitionPath,
'harPath': harPath,
}, DefinitionRunner.validators)
The validators might be the same function, of course, if the data type is the same.
I'm not sure if this is exactly "Pythonic", but I've defined a function decorator called require_type. (To be honest, I think I found it somewhere online.)
def require_type(my_arg, *valid_types):
'''
A simple decorator that performs type checking.
#param my_arg: string indicating argument name
#param valid_types: list of valid types
'''
def make_wrapper(func):
if hasattr(func, 'wrapped_args'):
wrapped = getattr(func, 'wrapped_args')
else:
body = func.func_code
wrapped = list(body.co_varnames[:body.co_argcount])
try:
idx = wrapped.index(my_arg)
except ValueError:
raise(NameError, my_arg)
def wrapper(*args, **kwargs):
def fail():
all_types = ', '.join(str(typ) for typ in valid_types)
raise(TypeError, '\'%s\' was type %s, expected to be in following list: %s' % (my_arg, all_types, type(arg)))
if len(args) > idx:
arg = args[idx]
if not isinstance(arg, valid_types):
fail()
else:
if my_arg in kwargs:
arg = kwargs[my_arg]
if not isinstance(arg, valid_types):
fail()
return func(*args, **kwargs)
wrapper.wrapped_args = wrapped
return wrapper
return make_wrapper
Then, to use it:
class SomeObject(object):
#require_type("prop1", str)
#require_type("prop2", numpy.complex128)
def __init__(self, prop1, prop2):
pass
Objective:
Given something like:
stackoverflow.users['55562'].questions.unanswered()
I want it converted into the following:
http://api.stackoverflow.com/1.1/users/55562/questions/unanswered
I have been able to achieve that, using the following class:
class SO(object):
def __init__(self,**kwargs):
self.base_url = kwargs.pop('base_url',[]) or 'http://api.stackoverflow.com/1.1'
self.uriparts = kwargs.pop('uriparts',[])
for k,v in kwargs.items():
setattr(self,k,v)
def __getattr__(self,key):
self.uriparts.append(key)
return self.__class__(**self.__dict__)
def __getitem__(self,key):
return self.__getattr__(key)
def __call__(self,**kwargs):
return "%s/%s"%(self.base_url,"/".join(self.uriparts))
if __name__ == '__main__':
print SO().abc.mno.ghi.jkl()
print SO().abc.mno['ghi'].jkl()
#prints the following
http://api.stackoverflow.com/1.1/abc/mno/ghi/jkl
http://api.stackoverflow.com/1.1/abc/mno/ghi/jkl
Now my problem is I can't do something like:
stackoverflow = SO()
user1 = stackoverflow.users['55562']
user2 = stackoverflow.users['55462']
print user1.questions.unanswered
print user2.questions.unanswered
#prints the following
http://api.stackoverflow.com/1.1/users/55562/users/55462/questions/unanswered
http://api.stackoverflow.com/1.1/users/55562/users/55462/questions/unanswered/questions/unanswered
Essentially, the user1 and user2 refer to the same SO object, so it can't represent different users.
I have been thinking any pointers to do that would be helpful, because this additional level of functionality would make the API far more interesting.
IMHO, when you recreate a new stackoverflow object, you need to separate the arguments from old instance attributes with a deep copy
import copy
........
def __getattr__(self,key):
dict = copy.deepcopy(self.__dict__)
dict['uriparts'].append(key)
return self.__class__(**dict)
....
If you want more flexibility on the URI parts, an abstraction is needed for a cleaner design. For example:
class SOURIParts(object):
def __init__(self, so, uriparts, **kwargs):
self.so = so
self.uriparts = uriparts
for k,v in kwargs.items():
setattr(self,k,v)
def __getattr__(self,key):
return SOURIParts(self.so, self.uriparts+[key])
def __getitem__(self,key):
return self.__getattr__(key)
def __call__(self,**kwargs):
return "%s/%s"%(self.so.base_url,"/".join(self.uriparts))
class SO(object):
def __init__(self, base_url='http://api.stackoverflow.com/1.1'):
self.base_url = base_url
def __getattr__(self,key):
return SOURIParts(self, [])
def __getitem__(self,key):
return self.__getattr__(key)
I hope this helps.
You could override __getslice__(Python 2.7), or getitem()(Python3.x) and use a memorizing decorator so that if the slice you request (the userid) has already been looked up it would use cached results -- otherwise it could retrieve the results and populate the existing SO instance object.
However, I think a more OO way to solve the problem is make SO a pure lookup module that returns stack overflow user objects which would then have the deeper-digging lookups for profile details. But thats just me.