Fastapi Dependency Injection with CLI Arguments - python

I want my fastapi routes to include a dependency injection formed from the parameters of a CLI.
In the skeleton code below, a, b and c are the CLI parameters, Consort is the DI and the fastapi class is King.
How can this be achieved?
import charles, william, george #program modules
from fastapi import FastAPI, Depends
app = FastAPI()
class Consort:
def __init__(self, a, b, c):
self.x = a_stuff(a)
self.y = b_stuff(b)
self.z = c_stuff(c)
class King:
def __init__(self, a, b, c):
... ...
#router.post("/create")
async def create(self, consort=Depends(Consort())):
return charles.create()
#router.post("/read")
async def read(self, consort=Depends(Consort())):
return william.read()
#router.post("/update")
async def update(self, consort=Depends(Consort())):
return george.update()
#router.post("/delete")
async def delete(self, consort=Depends(Consort())):
return elizabeth.delete()
def main(args):
a, b, c = arg_parse()
service = King(a, b, c)
uvicorn.run(... ... ...)
return
if __name__ == "__main__":
main(sys.argv)

I think what you're after is you want to be able to pass a, b, and c into your constructor, use those to construct a Consort, and then be able to access that Consort instance in your routes. With classy-fastapi I think you can do what you want with something like:
from classy_fastapi import Routable, post
import charles
class Consort:
def __init__(self, a, b, c):
self.x = a_stuff(a)
self.y = b_stuff(b)
self.z = c_stuff(c)
class King(Routable):
def __init__(self, consort):
super().__init__()
self.consort = consort
#post("/create")
async def create(self):
return charles.create(self.consort)
def main(args):
a, b, c = arg_parse()
consort = Consort(a, b, c)
service = King(consort)
uvicorn.run(... ... ...)
return
Note that unlike the Depends way of doing things you do not inject into individual methods. Instead, you inject, via the constructor, into the class that holds the routes.
Full disclosure: I'm the author of classy-fastapi.

Related

Implementing a generic and dynamic facade for python classes

I wanted to implement a sort of facade pattern in python. However because I need to do the same for all methods, I'd like to do it in a generic way. Let me use an example:
class MyObject:
def __init__(self, *args, **kwargs):
# do something with args/kwargs
def method1(self, x):
# do something
def method2(self, x, a):
# do something
def method3(self, x, a, b):
# do something
class MyFacade:
def __init__(self, *args, **kwargs):
self.x = SOMETHING
self.obj = MyObject(*args, **kwargs)
def method1(self):
return self.obj.method1(self.x)
def method2(self, a):
return self.obj.method2(self.x, a)
def method3(self, a, b):
return self.obj.method3(self.x, a, b)
Now because I have several classes like MyObject, I'd like a generic way of creating a MyFacade for each of them without having to write code for each method (they all do more or less the same). Also if MyObject changes, I'd like MyFacade not being impacted and rather handle any interface change in MyObject transparently.
Thanks for the help!
EDIT:
This works but methods inherited from MyInterface raise TypeError because of the extra argument.
class MyObject:
def method1(self, x):
print(x)
def method2(self, x, a):
print(x, a)
def method3(self, x, a, b):
print(x, a, b)
class MyInterface:
def methodX(self):
print("YAY!")
class MyFacade(MyInterface, MyObject):
def __init__(self):
self.x= "WHATEVER"
def __getattribute__(self, item):
result = super().__getattribute__(item)
if callable(result):
return lambda *args, **kwargs: result(self.x, *args, **kwargs)
return result
EDIT:
I modified condition this way and now problem with MyInterface is gone:
if callable(result) and result.__name__ in MyObject.__dict__:
The obvious way of doing this is to use the fact that class and function names are variables and can be assigned so MyFacade could be defined as follows:
class MyFacade:
def __init__(self,obj, *args, **kwargs):
self.x = SOMETHING
self.obj = obj(*args, **kwargs)
def method1():
return self.obj.method1(self.x)
def method2(a):
return self.obj.method2(self.x, a)
def method3(a, b):
return self.obj.method1(self.x, a, b)
and the set-up call would be eg:
fasc = MyFscade(MyOject,*args,**kwargs)

How to add a method into a dict using decorator?

What I want is something like flask routing register:
class ClassName(object):
def __init__(self):
self.handle_dict = {}
def handle(data):
h = self.handle_dict.get(data)
h()
#handle_route("open")
def open_handle()
"""do something"""
pass
#handle_route("close")
def close_handle()
"""do something"""
pass
def handle_route()
"""How to implement this warpper?"""
I know I could write the a dict in the __init__ function:
self.handle_dict = {"open":self.open_handle, "close":self.colse_handle}
But the decorator way looks more clear, right?
I have tried
def handle_router(self, path):
def warpper(f):
self.handle_map[path] = f
return f
return warpper
but self is not allow in decorator args, #handle_router(self, "path") is invalid.
Effectively, the handle dict should be some sort of class attribute, as it is filled at class definition time and all instances share the same route-handler mapping. However, the class does not exist yet when the decorator is called and evaluated. You could do something like the following:
from functools import partial
class A:
router = {}
def handle(self, data):
m = self.router.get(data)
return m(self)
#partial(router.setdefault, 'open')
def open_handle(self):
return 5
>>> a = A()
>>> a.handle('open')
5
Or be more explicit and extract the routing functionailty to a reuasable class:
from functools import partial
class Router(dict):
__call__ = lambda self, route: partial(self.setdefault, route)
class A:
router = Router()
def handle(self, data):
m = self.router.get(data)
return m(self)
#router('open')
def open_handle(self):
return 5
Instead of decorating the functions, you could just use a consistent naming convention:
class ClassName(object):
def handle(self, type):
getattr(self, '{}_handle'.format(type))()
def open_handle(self):
print('open_handle')
def close_handle(self):
print('close_handle')
ClassName().handle('open')
>>> open_handle

Defining a Method from A String in Python 3 and Referencing As Method

I have a need to allow the user to define a function that processes data in an object (the wisdom and security implications in this have been discussed at length in another question and would just be duplicate comments here.)
I'd like the function to act just like any other method. That is
def my_method(self):...
Would be invoked with:
obj_handle.my_method()
I almost have this achieved below except that the function that results need to be explicitly passed self as an argument, rather than receiving it as the first argument as is typical for a method.
You can see this in property p where I have the odd self.process(self) call.
I imagine that I need to provide something to exec that is like the globals() dictionary, but I'm not certain of several things:
Is this correct?
What is the equivalent of globals() in a class?
Does this solve the problem? If not what do I need to do?
So the question is, how do I get an exec() defined function to act as an object's method?
class test:
def __init__(self, a, b):
self.a=a
self.b=b
#property
def p(self):
return self.process(self)
def set_process(self,program):
func_dict={}
proc_fun = exec(program,func_dict)
setattr(self,'process',func_dict['process'])
def process(self):
return self.a+self.b
t=test(1,2)
prog = '''\
def process(self):
return self.a * self.b
'''
t.set_process(prog)
t.p
Answered in #juanpa.arrivillaga's comment above:
Set the function on the class if you want its descriptor protocol to work and bind tye instance when called on an instance. So one solution just make your set_process a classmethod. – juanpa.arrivillaga 1 hour ago
Working result
class test:
def __init__(self, a, b):
self.a=a
self.b=b
#property
def p(self):
return self.process()
#classmethod
def set_process(cls,program):
func_dict={}
proc_fun = exec(program,func_dict)
setattr(cls,'process',func_dict['process'])
def process(self):
return self.a+self.b
t=test(1,2)
prog = '''\
def process(self):
return self.a * self.b
'''
test.set_process(prog)
t.p
If you want to operate on instances rather than classes:
import types
class Test:
def __init__(self, a, b):
self.a = a
self.b = b
#property
def p(self):
return self.process()
def set_process(self, program):
d = dict()
exec(program, d)
self.process = types.MethodType(d["process"], self)
def process(self):
return self.a + self.b
prog = '''\
def process(self):
return self.a * self.b
'''
t = Test(1, 2)
t.set_process(prog)
print(t.p)
t = Test(1, 2)
print(t.p)

Python encapsulate data for a class

I have two python classes, A and B that inherits from A.
At runtime, I only have one instance of class A, but many instances of class B.
class A:
def __init__(self, a):
self.a = a
def _init2 (self, AA)
self.a = AA.a
class B(A):
def __init__(self, AA, b):
super()._init2(AA)
self.b = b
AA = A(0)
BB = B(AA, 1)
Is this the good way of writing it ? It seems ugly ...
It would probably be better to remove init2 and only use __init__. Having both is confusing and unnatural.
class A:
def __init__(self, obj):
# I believe that this is what you tried to achieve
if isinstance(obj, type(self)):
self.a = obj.a
else:
self.a = obj
class B(A):
def __init__(self, A, b):
super().__init__(A)
self.b = b
On a side note, there are too many things called A here. The A in def __init__(self, A, b): is most probably not referring to the A that you expect.

self variable of class as an argument of a class method (python)

here's my problem:
given any two functions, eg. f(x,a) and g(x,b), I want to build a new function, say F(f,g), which returns the product of the f and g. So:
F(f,g) = f*g = f(x, a) * g(x, b) = F(x, a, b)
I want to do this hardcoding the least possible. So, for h(x, c, d), I would get F(f,h) = F(x, a, c, d).
Given that then I want to minimize F, I thought of building a class. Here's a MWE:
import numpy as np
from inspect import getargspec
def f(x, a):
return np.tanh(x*a)
def g(x, b):
return np.power(x,b)
def h(x, c, d):
return x*c+np.log(x)
class fit_func(object):
def __init__(self, data, *args):
self.data = data
self.func_a = args[0]
self.func_b = args[1]
self.args_a = getargspec(args[0])[0][1:]
self.args_b = getargspec(args[1])[0][1:]
at this point, I thought of including the following __call__ method:
def __call__(self, *self.args_a, *self.args_b):
return self.func_a(self.data,self.args_a)*self.func_b(data,self.args_b)
I thought: this way an instance of the class, say F = fit_func(some_data_array,f,g), would be callable as F(a,b). However, python doesn't like the self.args_a and self.args_b among the arguments of __call__ and I understand why.
Does anybody know a clever way to obtain this? Thank you very much in advance
If you just accept positional arguments you better to save the length of arguments for each functions, and then pas proper slices of args to each function in call method:
import numpy as np
from inspect import getargspec
class fit_func(object):
def __init__(self, *args):
self.func_a = args[0]
self.func_b = args[1]
self.size_arg_a = len(getargspec(self.func_a)[0])
self.size_arg_b = len(getargspec(self.func_b)[0])
def __call__(self, *args):
return self.func_a(*args[:self.size_arg_a]) * self.func_b(*args[self.size_arg_b-1:])
Demo:
def f(x, a):
return np.tanh(x*a)
def h(x, c, d):
return x*c+np.log(x)
F = fit_func(f, h)
print(F(3, 4, 3, 5, 7))
16.0986122875
If you want to pass keyword arguments to final function:
import numpy as np
from inspect import getargspec
from operator import itemgetter
class fit_func(object):
def __init__(self, *args):
self.func_a = args[0]
self.func_b = args[1]
self.arg_a = getargspec(self.func_a)[0]
self.arg_b = getargspec(self.func_b)[0]
def __call__(self, **kwargs):
arg_a = itemgetter(*self.arg_a)(kwargs)
arg_b = itemgetter(*self.arg_b)(kwargs)
return self.func_a(*arg_a) * self.func_b(*arg_b)
Demo:
def f(x, a):
return np.tanh(x*a)
def h(x, c, d):
return x*c+np.log(x)
F = fit_func(f, h)
print(F(x=3, a=4, c=5, d=7))
16.0986122875

Categories

Resources