In the Flask-RESTful example application posted here, the TODOS collection is a global variable.
After the Todo Resource is registered:
api.add_resource(Todo, '/todos/<string:todo_id>')
The Todo methods access the global TODOS variable when web requests are processed.
Instead, I want to instantiate the API within a class and pass a TODOS collection that is a class variable rather than a global variable.
When using Flask-RESTful, what is the proper way to allow methods in a Resource class to gain access to a variable provided by the calling class without using global variables?
Looks like I didn't understand you the first time, You can just use a classmethod to construct your API. Then add it as a resource
from flask import Flask
from flask.ext.restful import Api
class SomeApi(Resource):
def get(self):
return self.response
#classmethod
def make_api(cls, response):
cls.response = response
return cls
class KillerApp(object):
def __init__(self):
self.app = Flask()
app_api = Api(self.app)
MyApi = SomeAPI.make_api({"key": "value"})
app_api.add_resource(MyApi, "/api/path")
def run(self)
self.app.run()
KillerApp().run()
add_resource accepts two arguments, resource_class_args and resource_class_kwargs, used to pass arguments to the constructor. (source)
So you could have a Resource:
from flask_restful import Resource
class TodoNext(Resource):
def __init__(self, **kwargs):
# smart_engine is a black box dependency
self.smart_engine = kwargs['smart_engine']
def get(self):
return self.smart_engine.next_todo()
You can inject the required dependency into TodoNext like so:
smart_engine = SmartEngine()
api.add_resource(TodoNext, '/next',
resource_class_kwargs={ 'smart_engine': smart_engine })
based on #Greg answer I've added an initialization check in the init method:
creating and calling Todo Resource class for flask-restful api:
todo = Todo.create(InMemoryTodoRepository())
api.add_resource(todo, '/api/todos/<todo_id>')
The Todo Resource class:
from flask_restful import reqparse, abort, Resource
from server.ApiResources.DTOs.TodoDTO import TodoDTO
from server.Repositories.ITodoRepository import ITodoRepository
from server.Utils.Exceptions import InvalidInstantiationError
from server.Utils.GeneralUtils import member_exists
class Todo(Resource):
"""shows a single todo item and lets you delete a todo item
use the 'create' class method to instantiate the class
"""
def __init__(self):
if not member_exists(self, "todo_repository", of_type=ITodoRepository):
raise InvalidInstantiationError("Todo", "todo_repository", "ITodoRepository", "create")
self._parser = reqparse.RequestParser()
self._parser.add_argument('task', type=str)
#classmethod
def create(cls, todo_repository):
"""
:param todo_repository: an instance of ITodoRepository
:return: class object of Todo Resource
"""
cls.todo_repository = todo_repository
return cls
the member_exists helper methods:
def member_exists(obj, member, of_type):
member_value = getattr(obj, member, None)
if member_value is None:
return False
if not isinstance(member_value, of_type):
return False
return True
and the custom exception class:
class InvalidInstantiationError(Exception):
def __init__(self, origin_class_name, missing_argument_name, missing_argument_type, instantiation_method_to_use):
message = """Invalid instantiation for class '{class_name}':
missing instantiation argument '{arg}' of type '{arg_type}'.
Please use the '{method_name}' factory class method""" \
.format(class_name=origin_class_name,
arg=missing_argument_name,
arg_type=missing_argument_type,
method_name=instantiation_method_to_use)
# Call the base class constructor with the parameters it needs
super(InvalidInstantiationError, self).__init__(message)
Thus, trying to use the default constructor will end up in getting this exception:
server.Utils.Exceptions.InvalidInstantiationError: Invalid instantiation for class 'Todo':
missing instantiation argument 'todo_repository' of type 'ITodoRepository'.
Please use the 'create' factory class method
edit: this can be useful for using dependency injection with flask-restful api Resource classes (with or without IoC)
edit 2:
we can even go cleaner and add another help function (ready to import):
def must_have(obj, member, of_type, use_method):
if not member_exists(obj, member, of_type=of_type):
raise InvalidInstantiationError(obj.__class__.__name__,
member,
of_type.__name__,
use_method)
and then use it in the constructor like that:
from server.Utils.GeneralUtils import must_have
class Todo(Resource):
def __init__(self):
must_have(self,
member="todo_repository",
of_type=ITodoRepository,
use_method=Todo.create.__name__)
Related
I'm trying to build a python3 module for an HTTP RESTful API that I've coded.
My idea was to create a base class that should have a request.Session() attribute so I can assign an authorization token header to that and don't worry about it anymore and also a logger function and so on.
The problem is that a class called User inherits from 2 classes: PublicUser and base and I can't initialize them correctly.
It's the first time that I'm working with inherited class so obviously I'm missing something.
This is my folder structure:
examplemodule/
|--> __init__.py
|--> classes/
|--> base.py
|--> user.py
base.py
from requests import Session
from requests.sessions import session
class Logger:
def __init__(self):
pass
def log(self, message):
print(message)
class Base:
def __init__(self, token=None):
if not hasattr(self, 'logger'):
self.logger = Logger()
if not hasattr(self, 'session'):
self.session = Session()
self.session.headers.update(
{'authorization': 'Token {}'.format(token)}
)
# Try to login to see if token is valid, if not raise exception
# If token is valid then the retrieved user json is saved
self._user = {
'id': 1,
'username': 'test1',
'email': 'test#test.com'
}
user.py
from .base import Base
PUBBLIC_USER_ATTRS = ['id', 'username']
PRIVATE_USER_ATTRS = ['email']
class PublicUser:
def __init__(self, user):
for k in PUBBLIC_USER_ATTRS:
setattr(self, k, user[k])
class User(Base, PublicUser):
def __init__(self, token=None):
super(Base, self).__init__(token=token)
super(PublicUser, self).__init__(self._user)
for k in PRIVATE_USER_ATTRS:
setattr(self, k, self._user[k])
__init__.py
from .classes.user import User
then to test my module I run:
import examplemodule
examplemodule.User(token='')
but unfortunately I get a TypeError at super(Base, self).__init__(token=token)
TypeError: super() takes no keyword arguments
What is the best way to get through this?
super is meant for cooperative inheritance, where all involved classes are using super in a way that ensures all necessary methods are called. That means super should also be used by base classes, even if all they inherit from is object.
class Base:
def __init__(self, *, token=None, **kwargs):
super().__init__(**kwargs)
if not hasattr(self, 'logger'):
self.logger = Logger()
if not hasattr(self, 'session'):
self.session = Session()
self.session.headers.update(
{'authorization': 'Token {}'.format(token)}
)
# Try to login to see if token is valid, if not raise exception
# If token is valid then the retrieved user json is saved
self._user = {
'id': 1,
'username': 'test1',
'email': 'test#test.com'
}
class PublicUser:
def __init__(self, *, id, username, **kwargs):
super().__init__(**kwargs)
self.id = id
self.username = username
class User(Base, PublicUser):
def __init__(self, *, email, **kwargs):
super().__init__(**kwargs)
self.email = email
u = User(token='...', id='...', username='...', email='...')
User.__init__ only has to make one call to super().__init__, knowing that its base classes also use super().__init__ to always call the next __init__, until reaching the end of the MRO. You start with User.__init__, which calls Base.__init__, which calls PublicUser.__init__ (not object.__init__), which finally calls object.__init__.
At each step, the remaining keyword arguments are split between the "known" arguments, which are handled, and the "unknown" arguments, which are passed up the line. Eventually, all keyword objects should be extracted and handled by the time object.__init__ is called.
See https://rhettinger.wordpress.com/2011/05/26/super-considered-super/ for a more thorough explanation of how this works in practiced (in particular, why keyword arguments are preferred over positional arguments).
You probably want to call constructors of super-classes like this:
Base.__init__(self, token=token)
PublicUser.__init__(self, self._user)
So I need to have some routes inside a class, but the route methods need to have the self attr (to access the class' attributes).
However, FastAPI then assumes self is its own required argument and puts it in as a query param
This is what I've got:
app = FastAPI()
class Foo:
def __init__(y: int):
self.x = y
#app.get("/somewhere")
def bar(self): return self.x
However, this returns 422 unless you go to /somewhere?self=something. The issue with this, is that self is then str, and thus useless.
I need some way that I can still access self without having it as a required argument.
This can be done by using an APIRouter's add_api_route method:
from fastapi import FastAPI, APIRouter
class Hello:
def __init__(self, name: str):
self.name = name
self.router = APIRouter()
self.router.add_api_route("/hello", self.hello, methods=["GET"])
def hello(self):
return {"Hello": self.name}
app = FastAPI()
hello = Hello("World")
app.include_router(hello.router)
Example:
$ curl 127.0.0.1:5000/hello
{"Hello":"World"}
add_api_route's second argument (endpoint) has type Callable[..., Any], so any callable should work (as long as FastAPI can find out how to parse its arguments HTTP request data). This callable is also known in the FastAPI docs as the path operation function (referred to as "POF" below).
Why decorating methods doesn't work
WARNING: Ignore the rest of this answer if you're not interested in a technical explanation of why the code in the OP's answer doesn't work
Decorating a method with #app.get and friends in the class body doesn't work because you'd be effectively passing Hello.hello, not hello.hello (a.k.a. self.hello) to add_api_route. Bound and unbound methods (a.k.a simply as "functions" since Python 3) have different signatures:
import inspect
inspect.signature(Hello.hello) # <Signature (self)>
inspect.signature(hello.hello) # <Signature ()>
FastAPI does a lot of magic to try to automatically parse the data in the HTTP request (body or query parameters) into the objects actually used by the POF.
By using an unbound method (=regular function) (Hello.hello) as the POF, FastAPI would either have to:
Make assumptions about the nature of the class that contains the route and generate self (a.k.a call Hello.__init__) on the fly. This would likely add a lot of complexity to FastAPI and is a use case that FastAPI devs (understandably) don't seem interested in supporting. It seems the recommended way of dealing with application/resource state is deferring the whole problem to an external dependency with Depends.
Somehow be able to generate a self object from the HTTP request data (usually JSON) sent by the caller. This is not technically feasible for anything other than strings or other builtins and therefore not really usable.
What happens in the OP's code is #2. FastAPI tries to parse the first argument of Hello.hello (=self, of type Hello) from the HTTP request query parameters, obviously fails and raises a RequestValidationError which is shown to the caller as an HTTP 422 response.
Parsing self from query parameters
Just to prove #2 above, here's a (useless) example of when FastAPI can actually "parse" self from the HTTP request:
(Disclaimer: Do not use the code below for any real application)
from fastapi import FastAPI
app = FastAPI()
class Hello(str):
#app.get("/hello")
def hello(self):
return {"Hello": self}
Example:
$ curl '127.0.0.1:5000/hello?self=World'
{"Hello":"World"}
For creating class-based views you can use #cbv decorator from fastapi-utils. The motivation of using it:
Stop repeating the same dependencies over and over in the signature of related endpoints.
Your sample could be rewritten like this:
from fastapi import Depends, FastAPI
from fastapi_utils.cbv import cbv
from fastapi_utils.inferring_router import InferringRouter
def get_x():
return 10
app = FastAPI()
router = InferringRouter() # Step 1: Create a router
#cbv(router) # Step 2: Create and decorate a class to hold the endpoints
class Foo:
# Step 3: Add dependencies as class attributes
x: int = Depends(get_x)
#router.get("/somewhere")
def bar(self) -> int:
# Step 4: Use `self.<dependency_name>` to access shared dependencies
return self.x
app.include_router(router)
I didn't like the standard way of doing this, so I wrote my own library. You can install it like this:
$ pip install cbfa
Here is an example of how to use it:
from typing import Optional
from fastapi import FastAPI
from pydantic import BaseModel
from cbfa import ClassBased
app = FastAPI()
wrapper = ClassBased(app)
class Item(BaseModel):
name: str
price: float
is_offer: Optional[bool] = None
#wrapper('/item')
class Item:
def get(item_id: int, q: Optional[str] = None):
return {"item_id": item_id, "q": q}
def post(item_id: int, item: Item):
return {"item_name": item.name, "item_id": item_id}
Note that you don't need to wrap decorators around each method. It is enough to name the methods according to their purpose in the HTTP protocol. The whole class is turned into a decorator.
I put routes to def __init__. It works normally.
Example:
from fastapi import FastAPI
from fastapi.responses import HTMLResponse
class CustomAPI(FastAPI):
def __init__(self, title: str = "CustomAPI") -> None:
super().__init__(title=title)
#self.get('/')
async def home():
"""
Home page
"""
return HTMLResponse("<h1>CustomAPI</h1><br/><a href='/docs'>Try api now!</a>", status_code=status.HTTP_200_OK)
I've just released a project that lets you use a class instance for route handling with simple decorators. cbv is cool but the routing is on the class itself, not instances of the class. Being able to use a class instance lets you do dependency injection in a way that feels simpler and more intuitive to me.
For example, the following works as expected:
from classy_fastapi import Routable, get, delete
class UserRoutes(Routable):
"""Inherits from Routable."""
# Note injection here by simply passing values
# to the constructor. Other injection frameworks also
# supported as there's nothing special about this __init__ method.
def __init__(self, dao: Dao) -> None:
"""Constructor. The Dao is injected here."""
super().__init__()
self.__dao = Dao
#get('/user/{name}')
def get_user_by_name(name: str) -> User:
# Use our injected DAO instance.
return self.__dao.get_user_by_name(name)
#delete('/user/{name}')
def delete_user(name: str) -> None:
self.__dao.delete(name)
def main():
args = parse_args()
# Configure the DAO per command line arguments
dao = Dao(args.url, args.user, args.password)
# Simple intuitive injection
user_routes = UserRoutes(dao)
app = FastAPI()
# router member inherited from Routable and configured per the annotations.
app.include_router(user_routes.router)
You can find it on PyPi and install via pip install classy-fastapi.
In this case I'm able to wire controller using python class and use a collaborator passing it by dep injection.
Here full example plus tests
class UseCase:
#abstractmethod
def run(self):
pass
class ProductionUseCase(UseCase):
def run(self):
return "Production Code"
class AppController:
def __init__(self, app: FastAPI, use_case: UseCase):
#app.get("/items/{item_id}")
def read_item(item_id: int, q: Optional[str] = None):
return {
"item_id": item_id, "q": q, "use_case": use_case.run()
}
def startup(use_case: UseCase = ProductionUseCase()):
app = FastAPI()
AppController(app, use_case)
return app
if __name__ == "__main__":
uvicorn.run(startup(), host="0.0.0.0", port=8080)
Another approach is to have a decorator class that takes parameters. The routes are registered before and added at run-time:
from functools import wraps
_api_routes_registry = []
class api_route(object):
def __init__(self, path, **kwargs):
self._path = path
self._kwargs = kwargs
def __call__(self, fn):
cls, method = fn.__repr__().split(" ")[1].split(".")
_api_routes_registry.append(
{
"fn": fn,
"path": self._path,
"kwargs": self._kwargs,
"cls": cls,
"method": method,
}
)
#wraps(fn)
def decorated(*args, **kwargs):
return fn(*args, **kwargs)
return decorated
#classmethod
def add_api_routes(cls, router):
for reg in _api_routes_registry:
if router.__class__.__name__ == reg["cls"]:
router.add_api_route(
path=reg["path"],
endpoint=getattr(router, reg["method"]),
**reg["kwargs"],
)
And define a custom router that inherits the APIRouter and add the routes at __init__:
class ItemRouter(APIRouter):
#api_route("/", description="this reads an item")
def read_item(a: str = "de"):
return [7262, 324323, a]
#api_route("/", methods=["POST"], description="add an item")
def post_item(a: str = "de"):
return a
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
add_api_routes(self)
app.include_router(
ItemRouter(
prefix="/items",
)
)
You inherit from FastAPI in your class and use the FastAPI decorators as method calls (I am going to show it using APIRouter, but your example should work anlog):
class Foo(FastAPI):
def __init__(y: int):
self.x = y
self.include_router(
health.router,
prefix="/api/v1/health",
)
I'm facing some difficulties unittest my project, mainly due to the fact that the controllers reference a singleton produced by a factory.
A simple demonstration of this problem would be:
databasefactory.py
class DataBaseFactory(object):
# Lets imagine we support a number of databases. The client implementation all gives us a similar interfaces to use
# This is a singleton through the whole application
_database_client = None
#classmethod
def get_database_client(cls):
# type: () -> DataBaseClientInterFace
if not cls._database_client:
cls._database_client = DataBaseClient()
return cls._database_client
class DataBaseClientInterFace(object):
def get(self, key):
# type: (any) -> any
raise NotImplementedError()
def set(self, key, value):
# type: (any, any) -> any
raise NotImplementedError()
class DataBaseClient(DataBaseClientInterFace):
# Mock some real world database - The unittest mocking should be providing another client
_real_world_data = {}
def get(self, key):
return self._real_world_data[key]
def set(self, key, value):
self._real_world_data[key] = value
return value
model.py
from .databasefactory import DataBaseFactory
class DataModel(object):
# The DataBase type never changes so its a constant
DATA_BASE_CLIENT = DataBaseFactory.get_database_client()
def __init__(self, model_name):
self.model_name = model_name
def save(self):
# type: () -> None
"""
Save the current model into the database
"""
key = self.get_model_key()
data = vars(self)
self.DATA_BASE_CLIENT.set(key, data)
#classmethod
def load(cls):
# type: () -> DataModel
"""
Load the model
"""
key = cls.get_model_key()
data = cls.DATA_BASE_CLIENT.get(key)
return cls(**data)
#staticmethod
def get_model_key():
return 'model_test'
datacontroller.py
from .databasefactory import DataBaseFactory
from .model import DataModel
class DataBaseController(object):
"""
Does some stuff with the databaase
"""
# Also needs the database client. This is the same instance as on DataModel
DATA_BASE_CLIENT = DataBaseFactory.get_database_client()
_special_key = 'not_model_key'
#staticmethod
def save_a_model():
a_model = DataModel('test')
a_model.save()
#staticmethod
def load_a_model():
a_model = DataModel.load()
return a_model
#classmethod
def get_some_special_key(cls):
return cls.DATA_BASE_CLIENT.get(cls._special_key)
#classmethod
def set_some_special_key(cls):
return cls.DATA_BASE_CLIENT.set(cls._special_key, 1)
And finally the unittest itself:
test_simple.py
import unittest
from .databasefactory import DataBaseClientInterFace
from .datacontroller import DataBaseController
from .model import DataModel
class MockedDataBaseClient(DataBaseClientInterFace):
_mocked_data = {DataBaseController._special_key: 2,
DataModel.get_model_key(): {'model_name': 'mocked_test'}}
def get(self, key):
return self._mocked_data[key]
def set(self, key, value):
self._mocked_data[key] = value
return value
class SimpleOne(unittest.TestCase):
def test_controller(self):
"""
I want to mock the singleton instance referenced in both DataBaseController and DataModel
As DataBaseController imports DataModel, both classes have the DATA_BASE_CLIENT attributed instantiated with the factory result
"""
# Initially it'll throw a keyerror
with self.assertRaises(KeyError):
DataBaseController.get_some_special_key()
# Its impossible to just change the DATA_BASE_CLIENT in the DataBaseController as DataModel still points towards the real implementation
# Should not be done as it won't change anything to data model
DataBaseController.DATA_BASE_CLIENT = MockedDataBaseClient()
self.assertEqual(DataBaseController.get_some_special_key(), 2)
# Will fail as the DataModel still uses the real implementation
# I'd like to mock DATA_BASE_CLIENT for both classes without explicitely giving inserting a new class
# The project I'm working on has a number of these constants that make it a real hassle to inject it a new one
# There has to be a better way to tackle this issue
model = DataBaseController.load_a_model()
The moment the unittest imports the DataBaseController, DataModel is imported through the DataBaseController module.
This means that both DATA_BASE_CLIENT class variables are instantiated.
If my factory were to catch it running inside a unittest, it still would not matter as the import happens outside the unittest.
My question is: is there a way to mock this singleton and replace across the whole application at once?
Replacing the cached instance on the factory is not an option as the references in the classes point to the old object.
It might be a design flaw to put these singleton instances as class variables in the first place. But I'd rather retrieve a class variable than calling the factory each time for the singleton.
In your use case, a single module is in charge of providing the singleton to the whole application. So I would try to inject the mock in that module before it is used by anything else. The problem is that the mock cannot be fully constructed before the other classes are declared. A possible way is to construct the singleton in 2 passes: first pass does not depend on anything, then that minimal object is used to construct the classes and then its internal dictionnary is populated. Code could be:
import unittest
from .databasefactory import DataBaseClientInterFace
class MockedDataBaseClient(DataBaseClientInterFace):
_mocked_data = {} # no dependance outside databasefactory
def get(self, key):
return self._mocked_data[key]
def set(self, key, value):
self._mocked_data[key] = value
return value
# inject the mock into DataBaseFactory
from .databasefactory import DataBaseFactory
DataBaseFactory._database_client = MockedDataBaseClient()
# use the empty mock to construct other classes
from .datacontroller import DataBaseController
from .model import DataModel
# and populate the mock
DataBaseFactory._database_client._mocked_data.update(
{DataBaseController._special_key: 2,
DataModel.get_model_key(): {'model_name': 'mocked_test'}})
class SimpleOne(unittest.TestCase):
def test_controller(self):
"""
I want to mock the singleton instance referenced in both DataBaseController and DataModel
As DataBaseController imports DataModel, both classes have the DATA_BASE_CLIENT attributed instantiated with the factory result
"""
self.assertEqual(DataBaseController.get_some_special_key(), 2)
model = DataBaseController.load_a_model()
self.assertEqual('mocked_test', model.model_name)
But beware: this assumes that the test procedure does not load model.py or datacontroller.py before test_simple.py
I am currently in the process of writing a Flask application that routes endpoints to a variety of "Actions." These actions all implement a parent function called "run()"
In code:
import abc
class Action(object):
__metaclass__ = abc.ABCMeta
#classmethod
def authenticated(self):
print("bypassing action authentication")
return True
#classmethod
def authorized(self):
print("bypassing action authorization")
return True
#classmethod
#abc.abstractmethod
def execute(self):
raise NotImplementedError("must override execute!")
#classmethod
def response(self, executeResult):
return executeResult
#classmethod
def run(self):
result = ""
if self.authenticated() & self.authorized():
result = self.execute()
return self.response(result)
The intent is that all actually used actions are derived members of this Action class that bare-minimum implement an execute() function that differentiates them. Unfortunately, when I attempt to add routes for these
app.add_url_rule('/endone/', methods=['GET'], view_func=CoreActions.ActionOne.run)
app.add_url_rule('/endtwo/', methods=['GET'], view_func=CoreActions.ActionTwo.run)
I receive the following error:
AssertionError: View function mapping is overwriting an existing endpoint function: run
Does anyone know a possible solution to this issue? Thanks!
The common approach of generating view functions is to use Flask views. Subclass your Action class from flask.views.View, dispatch_request method is used instead of run:
import abc
from flask.views import View
class Action(View):
__metaclass__ = abc.ABCMeta
def authenticated(self):
print("bypassing action authentication")
return True
def authorized(self):
print("bypassing action authorization")
return True
#abc.abstractmethod
def execute(self):
raise NotImplementedError("must override execute!")
def response(self, executeResult):
return executeResult
def dispatch_request(self):
result = ""
if self.authenticated() & self.authorized():
result = self.execute()
return self.response(result)
And you can add routes using View.as_view() method which convert your class to view function:
app.add_url_rule(
'/endone/',
methods=['GET'],
view_func=CoreActions.ActionOne.as_view('endone')
)
I'm attempting to use sphinx-doc :automodule: in conjunction with Mock-ed out modules as per this answer. Specifically I'm using Mock for PyQt5 module imports which are not available on ReadTheDocs.
Strangely, I'm finding that any class that inherits from a Mock-ed module's class is not included in the resulting documentation. It appears as though sphinx-doc can't see them for some reason.
My slightly-custom Mock is as follows:
from mock import Mock as MagicMock
class Mock(MagicMock):
__all__ = ['QApplication','pyqtSignal','pyqtSlot','QObject','QAbstractItemModel','QModelIndex','QTabWidget',
'QWebPage','QTableView','QWebView','QAbstractTableModel','Qt','QWidget','QPushButton','QDoubleSpinBox',
'QListWidget','QDialog','QSize','QTableWidget','QMainWindow','QTreeWidget',
'QAbstractItemDelegate','QColor','QGraphicsItemGroup','QGraphicsItem','QGraphicsPathItem',
'QGraphicsTextItem','QGraphicsRectItem','QGraphicsScene','QGraphicsView',]
def __init__(self, *args, **kwargs):
super(Mock, self).__init__()
#classmethod
def __getattr__(cls, name):
if name in ('__file__', '__path__'):
return os.devnull
else:
return Mock
#classmethod
def __setattr__(*args, **kwargs):
pass
def __setitem__(self, *args, **kwargs):
return
def __getitem__(self, *args, **kwargs):
return Mock
The __all__ is required to allow from x import * style imports for the PyQt5 classes.
I can confirm that changing the superclass to object results in the classes being correctly documented, as does remove the Mock (generating locally). Forcing the documentation by using :autoclass: results in a single line saying that the class inherits from Mock.
I resolved this in the end by not using Mock for Qt objects. In my application there is a qt.py wrapper file that handles differences between PyQt4 and PyQt5 and allows them to be subsequently imported for use (while ignoring the Qt namespace rearrangement).
In this file I wrapped the actual import code in a test for ReadTheDocs and then if detected returned a series of dummy classes inheriting directly from object. Additions were required where objects have attributes, but this is only used once in the code base. It'll need to be kept up to date, but it solves the problem.
# ReadTheDocs
ON_RTD = os.environ.get('READTHEDOCS', None) == 'True'
if not ON_RTD:
#... do the normal import here ...
else:
class QMockObject(object):
def __init__(self, *args, **kwargs):
super(QMockObject, self).__init__()
def __call__(self, *args, **kwargs):
return None
class QApplication(QMockObject):
pass
class pyqtSignal(QMockObject):
pass
class pyqtSlot(QMockObject):
pass
class QObject(QMockObject):
pass
class QAbstractItemModel(QMockObject):
pass
class QModelIndex(QMockObject):
pass
class QTabWidget(QMockObject):
pass
class QWebPage(QMockObject):
pass
class QTableView(QMockObject):
pass
class QWebView(QMockObject):
pass
class QAbstractTableModel(QMockObject):
pass
class Qt(QMockObject):
DisplayRole = None
class QWidget(QMockObject):
pass
class QPushButton(QMockObject):
pass
class QDoubleSpinBox(QMockObject):
pass
class QListWidget(QMockObject):
pass
class QDialog(QMockObject):
pass
class QSize(QMockObject):
pass
class QTableWidget(QMockObject):
pass
class QMainWindow(QMockObject):
pass
class QTreeWidget(QMockObject):
pass
class QAbstractItemDelegate(QMockObject):
pass
class QColor(QMockObject):
pass
class QGraphicsItemGroup(QMockObject):
pass
class QGraphicsItem(QMockObject):
pass
class QGraphicsPathItem(QMockObject):
pass
class QGraphicsTextItem(QMockObject):
pass
class QGraphicsRectItem(QMockObject):
pass
class QGraphicsScene(QMockObject):
pass
class QGraphicsView(QMockObject):
pass
app = None