I'm developing a Python 2.6 package in which I would like to fetch a list of all classes in a certain directory (within the package) in order to then perform introspection on the class objects.
Specifically, if the directory containing the currently executing module has a sub-dir called 'foobar' and 'foobar' contains .py files specifying class Foo(MyBase), class Bar(MyBase), and class Bar2, I want to obtain a list of references to the class objects that inherit from MyBase, i.e. Foo and Bar, but not Bar2.
I'm not sure if this task actually need involve any dealing with the filesystem or if the modules in the sub-dir are automatically loaded and just need to be listed via introspection somehow. Any ideas here please? Example code is much appreciated, since I'm pretty new to Python, in particular introspection.
Modules are never loaded automatically, but it should be easy to iterate over the modules in the directory and load them with the __import__ builtin function:
import os
from glob import glob
for file in glob(os.path.join(os.path.dirname(os.path.abspath(__file__))), "*.py"):
name = os.path.splitext(os.path.basename(file))[0]
# add package prefix to name, if required
module = __import__(name)
for member in dir(module):
# do something with the member named ``member``
I wanted to do the same thing, this is what I ended up with:
import glob
import importlib
import inspect
import os
current_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)))
current_module_name = os.path.splitext(os.path.basename(current_dir))[0]
for file in glob.glob(current_dir + "/*.py"):
name = os.path.splitext(os.path.basename(file))[0]
# Ignore __ files
if name.startswith("__"):
continue
module = importlib.import_module("." + name,package=current_module_name)
for member in dir(module):
handler_class = getattr(module, member)
if handler_class and inspect.isclass(handler_class):
print member
Hope it helps..
Option 1: grep for "^class (\a\w+)\(Myclass" regexp with -r parameter.
Option 2: make the directory a package (create an empty __init__.py file), import it and iterate recursively over its members:
import mymodule
def itermodule(mod):
for member in dir(mymod):
...
itermodule(mymodule)
Dealt with it myself, this is my version (forked #krakover snippet):
Iterate directory and import each script placed there
Filter out abstract classes
Filter out classes that not inherit a base class
New instance for each iterated class (change it if you don't find it useful)
import importlib
import inspect
import os
import glob
def import_plugins(plugins_package_directory_path, base_class=None, create_instance=True, filter_abstract=True):
plugins_package_name = os.path.basename(plugins_package_directory_path)
# -----------------------------
# Iterate all python files within that directory
plugin_file_paths = glob.glob(os.path.join(plugins_package_directory_path, "*.py"))
for plugin_file_path in plugin_file_paths:
plugin_file_name = os.path.basename(plugin_file_path)
module_name = os.path.splitext(plugin_file_name)[0]
if module_name.startswith("__"):
continue
# -----------------------------
# Import python file
module = importlib.import_module("." + module_name, package=plugins_package_name)
# -----------------------------
# Iterate items inside imported python file
for item in dir(module):
value = getattr(module, item)
if not value:
continue
if not inspect.isclass(value):
continue
if filter_abstract and inspect.isabstract(value):
continue
if base_class is not None:
if type(value) != type(base_class):
continue
# -----------------------------
# Instantiate / return type (depends on create_instance)
yield value() if create_instance else value
Usage:
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
plugins_directory_path = os.path.join(SCRIPT_DIR, 'plugins')
plugins = import_plugins(plugins_directory_path, base_class=BasePlugin)
for plugin in plugins:
plugin.foo()
imagine there's a sub directory called plugins contains implementations of a BasePlugin class
On platforms that have egrep:
from subprocess import Popen, PIPE
from re import search
def get_classes(directory):
job = Popen(['egrep', '-ir', '--include=*.py', 'class ', str(directory), ], stdout=PIPE)
fileout, fileerr = job.communicate()
if fileerr:
raise Exception(fileerr)
while directory[-1] == '/':
directory = directory[:-1]
found = []
for line in fileout.split('\n'):
match = search('^([^:]+).py:\s*class\s*(\S+)\s*\((\S+)\):', line)
if match:
pypath = match.group(1).replace(directory, '').replace('/', '.')[1:]
cls = match.group(2)
parents = filter(lambda x: x.strip, match.group(3).split())
found.append((pypath, cls, parents, ))
return found
For get_classes('.'), egrep returns something like:
./helpers/action.py:class Action(object):
./helpers/get_classes.py: job = Popen(['egrep', '-ir', '--include=*.py', 'class ', str(directory), ], stdout=PIPE) # this is the get_classes script; not a valid result
./helpers/options.py:class Option(object):
which is converted into tuples of the path, class name and direct ancestors:
[('helpers.action', 'Action', ['object']), ('helpers.options', 'Option', ['object'])]
If you just want the paths, that's [item[0] for item in get_classes('.')].
Related
I am maintaining a python package in which I did some restructuring. Now, I want to support clients who still do from my_package.old_subpackage.foo import Foo instead of the new from my_package.new_subpackage.foo import Foo, without explicitly reintroducing many files that do the forwarding. (old_subpackage still exists, but no longer contains foo.py.)
I have learned that there are "loaders" and "finders", and my impression was that I should implement a loader for my purpose, but I only managed to implement a finder so far:
RENAMED_PACKAGES = {
'my_package.old_subpackage.foo': 'my_package.new_subpackage.foo',
}
# TODO: ideally, we would not just implement a "finder", but also a "loader"
# (using the importlib.util.module_for_loader decorator); this would enable us
# to get module contents that also pass identity checks
class RenamedFinder:
#classmethod
def find_spec(cls, fullname, path, target=None):
renamed = RENAMED_PACKAGES.get(fullname)
if renamed is not None:
sys.stderr.write(
f'WARNING: {fullname} was renamed to {renamed}; please adapt import accordingly!\n')
return importlib.util.find_spec(renamed)
return None
sys.meta_path.append(RenamedFinder())
https://docs.python.org/3.5/library/importlib.html#importlib.util.module_for_loader and related functionality, however, seem to be deprecated. I know it's not a very pythonic thing I am trying to achieve, but I would be glad to learn that it's achievable.
On import of your package's __init__.py, you can place whatever objects you want into sys.modules, the values you put in there will be returned by import statements:
from . import new_package
from .new_package import module1, module2
import sys
sys.modules["my_lib.old_package"] = new_package
sys.modules["my_lib.old_package.module1"] = module1
sys.modules["my_lib.old_package.module2"] = module2
If someone now uses import my_lib.old_package or import my_lib.old_package.module1 they will obtain a reference to my_lib.new_package.module1. Since the import machinery already finds the keys in the sys.modules dictionary, it never even begins looking for the old files.
If you want to avoid importing all the submodules immediately, you can emulate a bit of lazy loading by placing a module with a __getattr__ in sys.modules:
from types import ModuleType
import importlib
import sys
class LazyModule(ModuleType):
def __init__(self, name, mod_name):
super().__init__(name)
self.__mod_name = name
def __getattr__(self, attr):
if "_lazy_module" not in self.__dict__:
self._lazy_module = importlib.import(self.__mod_name, package="my_lib")
return self._lazy_module.__getattr__(attr)
sys.modules["my_lib.old_package"] = LazyModule("my_lib.old_package", "my_lib.new_package")
In the init file of the old module, have it import from the newer modules
Old (package.oldpkg):
foo = __import__("Path to new module")
New (package.newpkg):
class foo:
bar = "thing"
so
package.oldpkg.foo.bar is the same as package.newpkg.foo.bar
Hope this helps!
I think that this is what you are looking for:
RENAMED_PACKAGES = {
'my_package.old_subpackage.foo': 'my_package.new_subpackage.foo',
}
class RenamedFinder:
#classmethod
def find_spec(cls, fullname, path, target=None):
renamed = RENAMED_PACKAGES.get(fullname)
if renamed is not None:
sys.stderr.write(
f'WARNING: {fullname} was renamed to {renamed}; please adapt import accordingly!\n')
spec = importlib.util.find_spec(renamed)
spec.loader = cls
return spec
return None
#staticmethod
def create_module(spec):
return importlib.import_module(spec.name)
#staticmethod
def exec_module(module):
pass
sys.meta_path.append(RenamedFinder())
Still, IMO the approach that manipulates sys.modules is preferable as it is more readable, more explicit, and provides you much more control. It might become useful especially in further versions of your package when my_package.new_subpackage.foo starts to diverge from my_package.old_subpackage.foo while you would still need to provide the old one for backward compatibility. For that reason, you would maybe need to preserve the code of both anyway.
Consolidate all the old package names into my_package.
Old packages (old_package):
image_processing (class) Will be deleted and replaced by better_image_processing
text_recognition (class) Will be deleted and replaced by better_text_recognition
foo (variable) Will be moved to better_text_recognition
still_there (class) Will not move
New packages:
super_image_processing
better_text_recognition
Redirector (class of my_package):
class old_package:
image_processing = super_image_processing # Will be replaced
text_recognition = better_text_recognition # Will be replaced
Your main new module (my_package):
#imports here
class super_image_processing:
def its(gets,even,better):
pass
class better_text_recognition:
def now(better,than,ever):
pass
class old_package:
#Links
image_processing = super_image_processing
text_recognition = better_text_recognition
still_there = __import__("path to unchanged module")
This allows you to delete some files and keep the rest. If you want to redirect variables you would do:
class super_image_processing:
def its(gets,even,better):
pass
class better_text_recognition:
def now(better,than,ever):
pass
class old_package:
#Links
image_processing = super_image_processing
text_recognition = better_text_recognition
foo = text_recognition.foo
still_there = __import__("path to unchanged module")
Would this work?
I have multiple functions stored in different files, Both file names and function names are stored in lists. Is there any option to call the required function without the conditional statements?
Example, file1 has functions function11 and function12,
def function11():
pass
def function12():
pass
file2 has functions function21 and function22
def function21():
pass
def function22():
pass
and I have the lists
file_name = ["file1", "file2", "file1"]
function_name = ["function12", "function22", "funciton12"]
I will get the list index from different function, based on that I need to call the function and get the output.
If the other function will give you a list index directly, then you don't need to deal with the function names as strings. Instead, directly store (without calling) the functions in the list:
import file1, file2
functions = [file1.function12, file2.function22, file1.function12]
And then call them once you have the index:
function[index]()
There are ways to do what is called "reflection" in Python and get from the string to a matching-named function. But they solve a problem that is more advanced than what you describe, and they are more difficult (especially if you also have to work with the module names).
If you have a "whitelist" of functions and modules that are allowed to be called from the config file, but still need to find them by string, you can explicitly create the mapping with a dict:
allowed_functions = {
'file1': {
'function11': file1.function11,
'function12': file1.function12
},
'file2': {
'function21': file2.function21,
'function22': file2.function22
}
}
And then invoke the function:
try:
func = allowed_functions[module_name][function_name]
except KeyError:
raise ValueError("this function/module name is not allowed")
else:
func()
The most advanced approach is if you need to load code from a "plugin" module created by the author. You can use the standard library importlib package to use the string name to find a file to import as a module, and import it dynamically. It looks something like:
from importlib.util import spec_from_file_location, module_from_spec
# Look for the file at the specified path, figure out the module name
# from the base file name, import it and make a module object.
def load_module(path):
folder, filename = os.path.split(path)
basename, extension = os.path.splitext(filename)
spec = spec_from_file_location(basename, path)
module = module_from_spec(spec)
spec.loader.exec_module(module)
assert module.__name__ == basename
return module
This is still unsafe, in the sense that it can look anywhere on the file system for the module. Better if you specify the folder yourself, and only allow a filename to be used in the config file; but then you still have to protect against hacking the path by using things like ".." and "/" in the "filename".
(I have a project that does something like this. It chooses the paths from a whitelist that is also under the user's control, so I have to warn my users not to trust the path-whitelist file from each other. I also search the directories for modules, and then make a whitelist of plugins that may be used, based only on plugins that are in the directory - so no funny games with "..". And I'm still worried I forgot something.)
Once you have a module name, you can get a function from it by name like:
dynamic_module = load_module(some_path)
try:
func = getattr(dynamic_module, function_name)
except AttributeError:
raise ValueError("function not in module")
At any rate, there is no reason to eval anything, or generate and import code based on user input. That is most unsafe of all.
Another alternative. This is not much safer than an eval() however.
Someone with access to the lists you read from the config file could inject malicious code in the lists you import.
I.e.
'from subprocess import call; subprocess.call(["rm", "-rf", "./*" stdout=/dev/null, stderr=/dev/null, shell=True)'
Code:
import re
# You must first create a directory named "test_module"
# You can do this with code if needed.
# Python recognizes a "module" as a module by the existence of an __init__.py
# It will load that __init__.py at the "import" command, and you can access the methods it imports
m = ["os", "sys", "subprocess"] # Modules to import from
f = ["getcwd", "exit", "call; call('do', '---terrible-things')"] # Methods to import
# Create an __init__.py
with open("./test_module/__init__.py", "w") as FH:
for count in range(0, len(m), 1):
# Writes "from module import method" to __init.py
line = "from {} import {}\n".format(m[count], f[count])
# !!!! SANITIZE THE LINE !!!!!
if not re.match("^from [a-zA-Z0-9._]+ import [a-zA-Z0-9._]+$", line):
print("The line '{}' is suspicious. Will not be entered into __init__.py!!".format(line))
continue
FH.write(line)
import test_module
print(test_module.getcwd())
OUTPUT:
The line 'from subprocess import call; call('do', '---terrible-things')' is suspicious. Will not be entered into __init__.py!!
/home/rightmire/eclipse-workspace/junkcode
I'm not 100% sure I'm understanding the need. Maybe more detail in the question.
Is something like this what you're looking for?
m = ["os"]
f = ["getcwd"]
command = ''.join([m[0], ".", f[0], "()"])
# Put in some minimum sanity checking and sanitization!!!
if ";" in command or <other dangerous string> in command:
print("The line '{}' is suspicious. Will not run".format(command))
sys.exit(1)
print("This will error if the method isnt imported...")
print(eval(''.join([m[0], ".", f[0], "()"])) )
OUTPUT:
This will error if the method isnt imported...
/home/rightmire/eclipse-workspace/junkcode
As pointed out by #KarlKnechtel, having commands come in from an external file is a gargantuan security risk!
I'm using jsonnet to build json objects that will be used by Python code, calling jsonnet from Python using the bindings. I want to set up my directory structure so that the jsonnet files are in a subdirectory or subdirectories relative to where the Python code is run, something like:
foo.py
jsonnet/
jsonnet/bar.jsonnet
jsonnet/baz.libsonnet
Running foo.py should then be able to use _jsonnet.evaluate_snippet() on strings read from files in jsonnet/ that import other files from jsonnet/. What's the best way to do this?
The default importer uses paths relative to the file from which they are imported. In case of evaluate_snippet you need to pass the path manually. This way jsonnet knows where to look for imported files.
If your intention is to process the files you can use a custom importer. (Digression: jsonnet tries to avoid the need to preprocess the source files, so there is probably a better way or a missing feature in jsonnet.)
Below is the complete, working example on how to use custom importers in Python (adjusted to the directory structure provided):
import os
import unittest
import _jsonnet
# Returns content if worked, None if file not found, or throws an exception
def try_path(dir, rel):
if not rel:
raise RuntimeError('Got invalid filename (empty string).')
if rel[0] == '/':
full_path = rel
else:
full_path = dir + rel
if full_path[-1] == '/':
raise RuntimeError('Attempted to import a directory')
if not os.path.isfile(full_path):
return full_path, None
with open(full_path) as f:
return full_path, f.read()
def import_callback(dir, rel):
full_path, content = try_path(dir, rel)
if content:
return full_path, content
raise RuntimeError('File not found')
class JsonnetTests(unittest.TestCase):
def setUp(self):
self.input_filename = os.path.join(
"jsonnet",
"bar.jsonnet",
)
self.expected_str = '{\n "num": 42,\n "str": "The answer to life ..."\n}\n'
with open(self.input_filename, "r") as infile:
self.input_snippet = infile.read()
def test_evaluate_file(self):
json_str = _jsonnet.evaluate_file(
self.input_filename,
import_callback=import_callback,
)
self.assertEqual(json_str, self.expected_str)
def test_evaluate_snippet(self):
json_str = _jsonnet.evaluate_snippet(
"jsonnet/bar.jsonnet",
self.input_snippet,
import_callback=import_callback,
)
self.assertEqual(json_str, self.expected_str)
if __name__ == '__main__':
unittest.main()
Note: it's a modified version of an example from jsonnet repo.
I don't fully get why you would use evaluate_snippet() (maybe mask the actual filenames via loading them from python into strings + evaluate_snippet("blah", str) ? ), instead of evaluate_file() - in any case that structure should just work ok.
Example:
jsonnet_test.py:
import json:
import _jsonnet
jsonnet_file = "jsonnet/bar.jsonnet"
data = json.loads(_jsonnet.evaluate_file(jsonnet_file))
print("{str} => {num}".format(**data))
jsonnet/bar.jsonnet:
local baz = import "baz.libsonnet";
{
str: "The answer to life ...",
num: baz.mult(6, 7),
}
jsonnet/baz.libsonnet:
{
mult(a, b):: (
a * b
),
}
Output:
$ python jsonnet_test.py
The answer to life ... => 42
I've written an IRC bot using Twisted and now I've gotten to the point where I want to be able to dynamically reload functionality.
In my main program, I do from bots.google import GoogleBot and I've looked at how to use reload to reload modules, but I still can't figure out how to do dynamic re-importing of classes.
So, given a Python class, how do I dynamically reload the class definition?
Reload is unreliable and has many corner cases where it may fail. It is suitable for reloading simple, self-contained, scripts. If you want to dynamically reload your code without restart consider using forkloop instead:
http://opensourcehacker.com/2011/11/08/sauna-reload-the-most-awesomely-named-python-package-ever/
You cannot reload the module using reload(module) when using the from X import Y form. You'd have to do something like reload(sys.modules['module']) in that case.
This might not necessarily be the best way to do what you want, but it works!
import bots.google
class BotClass(irc.IRCClient):
def __init__(self):
global plugins
plugins = [bots.google.GoogleBot()]
def privmsg(self, user, channel, msg):
global plugins
parts = msg.split(' ')
trigger = parts[0]
if trigger == '!reload':
reload(bots.google)
plugins = [bots.google.GoogleBot()]
print "Successfully reloaded plugins"
I figured it out, here's the code I use:
def reimport_class(self, cls):
"""
Reload and reimport class "cls". Return the new definition of the class.
"""
# Get the fully qualified name of the class.
from twisted.python import reflect
full_path = reflect.qual(cls)
# Naively parse the module name and class name.
# Can be done much better...
match = re.match(r'(.*)\.([^\.]+)', full_path)
module_name = match.group(1)
class_name = match.group(2)
# This is where the good stuff happens.
mod = __import__(module_name, fromlist=[class_name])
reload(mod)
# The (reloaded definition of the) class itself is returned.
return getattr(mod, class_name)
Better yet subprocess the plugins, then hypervise the subprocess, when the files change reload the plugins process.
Edit: cleaned up.
You can use the sys.modules to dynamically reload modules based on user-input.
Say that you have a folder with multiple plugins such as:
module/
cmdtest.py
urltitle.py
...
You can use sys.modules in this way to load/reload modules based on userinput:
import sys
if sys.modules['module.' + userinput]:
reload(sys.modules['module.' + userinput])
else:
' Module not loaded. Cannot reload '
try:
module = __import__("module." + userinput)
module = sys.modules["module." + userinput]
except:
' error when trying to load %s ' % userinput
When you do a from ... import ... it binds the object into the local namespace, so all you need to is re-import it. However, since the module is already loaded, it will just re-import the same version of the class so you would need to reload the module too. So this should do it:
from bots.google import GoogleBot
...
# do stuff
...
reload(bots.google)
from bots.google import GoogleBot
If for some reason you don't know the module name you can get it from GoogleBot.module.
def reload_class(class_obj):
module_name = class_obj.__module__
module = sys.modules[module_name]
pycfile = module.__file__
modulepath = string.replace(pycfile, ".pyc", ".py")
code=open(modulepath, 'rU').read()
compile(code, module_name, "exec")
module = reload(module)
return getattr(module,class_obj.__name__)
There is a lot of error checking you can do on this, if your using global variables you will probably have to figure out what happens then.
I've seen plenty of examples of people extracting all of the classes from a module, usually something like:
# foo.py
class Foo:
pass
# test.py
import inspect
import foo
for name, obj in inspect.getmembers(foo):
if inspect.isclass(obj):
print obj
Awesome.
But I can't find out how to get all of the classes from the current module.
# foo.py
import inspect
class Foo:
pass
def print_classes():
for name, obj in inspect.getmembers(???): # what do I do here?
if inspect.isclass(obj):
print obj
# test.py
import foo
foo.print_classes()
This is probably something really obvious, but I haven't been able to find anything. Can anyone help me out?
Try this:
import sys
current_module = sys.modules[__name__]
In your context:
import sys, inspect
def print_classes():
for name, obj in inspect.getmembers(sys.modules[__name__]):
if inspect.isclass(obj):
print(obj)
And even better:
clsmembers = inspect.getmembers(sys.modules[__name__], inspect.isclass)
Because inspect.getmembers() takes a predicate.
I don't know if there's a 'proper' way to do it, but your snippet is on the right track: just add import foo to foo.py, do inspect.getmembers(foo), and it should work fine.
What about
g = globals().copy()
for name, obj in g.iteritems():
?
I was able to get all I needed from the dir built in plus getattr.
# Works on pretty much everything, but be mindful that
# you get lists of strings back
print dir(myproject)
print dir(myproject.mymodule)
print dir(myproject.mymodule.myfile)
print dir(myproject.mymodule.myfile.myclass)
# But, the string names can be resolved with getattr, (as seen below)
Though, it does come out looking like a hairball:
def list_supported_platforms():
"""
List supported platforms (to match sys.platform)
#Retirms:
list str: platform names
"""
return list(itertools.chain(
*list(
# Get the class's constant
getattr(
# Get the module's first class, which we wrote
getattr(
# Get the module
getattr(platforms, item),
dir(
getattr(platforms, item)
)[0]
),
'SYS_PLATFORMS'
)
# For each include in platforms/__init__.py
for item in dir(platforms)
# Ignore magic, ourselves (index.py) and a base class.
if not item.startswith('__') and item not in ['index', 'base']
)
))
import pyclbr
print(pyclbr.readmodule(__name__).keys())
Note that the stdlib's Python class browser module uses static source analysis, so it only works for modules that are backed by a real .py file.
If you want to have all the classes, that belong to the current module, you could use this :
import sys, inspect
def print_classes():
is_class_member = lambda member: inspect.isclass(member) and member.__module__ == __name__
clsmembers = inspect.getmembers(sys.modules[__name__], is_class_member)
If you use Nadia's answer and you were importing other classes on your module, that classes will be being imported too.
So that's why member.__module__ == __name__ is being added to the predicate used on is_class_member. This statement checks that the class really belongs to the module.
A predicate is a function (callable), that returns a boolean value.
This is the line that I use to get all of the classes that have been defined in the current module (ie not imported). It's a little long according to PEP-8 but you can change it as you see fit.
import sys
import inspect
classes = [name for name, obj in inspect.getmembers(sys.modules[__name__], inspect.isclass)
if obj.__module__ is __name__]
This gives you a list of the class names. If you want the class objects themselves just keep obj instead.
classes = [obj for name, obj in inspect.getmembers(sys.modules[__name__], inspect.isclass)
if obj.__module__ is __name__]
This is has been more useful in my experience.
Another solution which works in Python 2 and 3:
#foo.py
import sys
class Foo(object):
pass
def print_classes():
current_module = sys.modules[__name__]
for key in dir(current_module):
if isinstance( getattr(current_module, key), type ):
print(key)
# test.py
import foo
foo.print_classes()
I think that you can do something like this.
class custom(object):
__custom__ = True
class Alpha(custom):
something = 3
def GetClasses():
return [x for x in globals() if hasattr(globals()[str(x)], '__custom__')]
print(GetClasses())`
if you need own classes
I frequently find myself writing command line utilities wherein the first argument is meant to refer to one of many different classes. For example ./something.py feature command —-arguments, where Feature is a class and command is a method on that class. Here's a base class that makes this easy.
The assumption is that this base class resides in a directory alongside all of its subclasses. You can then call ArgBaseClass(foo = bar).load_subclasses() which will return a dictionary. For example, if the directory looks like this:
arg_base_class.py
feature.py
Assuming feature.py implements class Feature(ArgBaseClass), then the above invocation of load_subclasses will return { 'feature' : <Feature object> }. The same kwargs (foo = bar) will be passed into the Feature class.
#!/usr/bin/env python3
import os, pkgutil, importlib, inspect
class ArgBaseClass():
# Assign all keyword arguments as properties on self, and keep the kwargs for later.
def __init__(self, **kwargs):
self._kwargs = kwargs
for (k, v) in kwargs.items():
setattr(self, k, v)
ms = inspect.getmembers(self, predicate=inspect.ismethod)
self.methods = dict([(n, m) for (n, m) in ms if not n.startswith('_')])
# Add the names of the methods to a parser object.
def _parse_arguments(self, parser):
parser.add_argument('method', choices=list(self.methods))
return parser
# Instantiate one of each of the subclasses of this class.
def load_subclasses(self):
module_dir = os.path.dirname(__file__)
module_name = os.path.basename(os.path.normpath(module_dir))
parent_class = self.__class__
modules = {}
# Load all the modules it the package:
for (module_loader, name, ispkg) in pkgutil.iter_modules([module_dir]):
modules[name] = importlib.import_module('.' + name, module_name)
# Instantiate one of each class, passing the keyword arguments.
ret = {}
for cls in parent_class.__subclasses__():
path = cls.__module__.split('.')
ret[path[-1]] = cls(**self._kwargs)
return ret
import Foo
dir(Foo)
import collections
dir(collections)
The following can be placed at the top of the file:
def get_classes():
import inspect, sys
return dict(inspect.getmembers(
sys.modules[__name__],
lambda member: inspect.isclass(member) and member.__module__ == __name__
))
Note, this can be placed at the top of the module because we've wrapped the logic in a function definition. If you want the dictionary to exist as a top-level object you will need to place the definition at the bottom of the file to ensure all classes are included.
Go to Python Interpreter. type help ('module_name') , then press Enter.
e.g. help('os') .
Here, I've pasted one part of the output below:
class statvfs_result(__builtin__.object)
| statvfs_result: Result from statvfs or fstatvfs.
|
| This object may be accessed either as a tuple of
| (bsize, frsize, blocks, bfree, bavail, files, ffree, favail, flag, namemax),
| or via the attributes f_bsize, f_frsize, f_blocks, f_bfree, and so on.
|
| See os.statvfs for more information.
|
| Methods defined here:
|
| __add__(...)
| x.__add__(y) <==> x+y
|
| __contains__(...)
| x.__contains__(y) <==> y in x