Is there a way in grep (or vim) to print out a named function/class?
i.e. From:
class InternalTimer(Sim.Process):
def __init__(self, fsm):
Sim.Process.__init__(self, name="Timer")
random.seed()
self.fsm = fsm
def Lifecycle(self, Request):
while True:
yield Sim.waitevent, self, Request
yield Sim.hold, self, Request.signalparam[0]
if(self.interrupted()):
self.interruptReset()
else:
self.fsm.process(Request.signalparam[1])
Calling $my-func-grep '__init__(self,fsm)' filename.py would produce
def __init__(self, fsm):
Sim.Process.__init__(self, name="Timer")
random.seed()
self.fsm = fsm
You could create a vim extension which effectively performs the following:
import inspect
print inspect.getsource(name_of_function)
This prints the function signature and the body of the function. If Vim has been compiled with Python support, you can write extensions in Python itself.
Related
I have written new convenience function(greet.py) in python in order to use it on GDB.
class Greet (gdb.Function):
"""Return string to greet someone.
Takes a name as argument."""
def __init__ (self):
super (Greet, self).__init__ ("greet")
def invoke (self, name):
return "Hello, %s!" % name.string ()
Greet ()
Now I would like to use it on GDB as convenience function. What are the procedures I should do in order to use it while debugging a program on GDB?
As you discovered there's no built in user directory from which scripts are auto-loaded.
Usually a user would source individual scripts from their ~/.gdbinit file, like this:
source /home/user/gdb/scripts/my-script.py
If a user really wants scripts to be auto-sourced from a directory without having to add them to their ~/.gdbinit then this is easily done by adding the following into ~/.gdbinit:
python
import os
directory = '/home/user/gdb/scripts/'
if os.path.isdir (directory):
for filename in sorted (os.listdir(directory)):
if filename.endswith (".py") or filename.endswith (".gdb"):
path = os.path.join(directory, filename)
gdb.execute ("source {}".format (path))
end
This will load all *.py and *.gdb scripts from /home/user/gdb/scripts/.
In order to write new convenience function in GDB :
write the function and place it under "~/gdb/python/lib/gdb/function"
import gdb
class Salam(gdb.Function):
"""Return string to greet someone.
Takes a name as argument."""
def __init__(self):
super(Salam, self).__init__("salam")
def invoke(self, name):
return "Salam, jenap %s!" % name.string ()
Salam()
Edit "~/gdb/data-directory/Makefile.in" and under "PYTHON_FILE_LIST" add "gdb/function/salam.py"
./configure
make
make install
Now, after #gdb
try typing : "print salam("Aman")"
In order the convenience function to work, it must have python support under GDB.
I am trying to create a registry that I can load with name-factory_method pairs, so that client code is able to use the registry to instantiate these objects by their given names. I can get this to work if I load the registry with pairs within the registry module.
However, I cannot seem to get the registry loaded if I distribute the loading among other modules (e.g. with the factory methods). I would prefer the latter option, as then the registry module doesn't have to be aware of all the potential factory methods. But I can't seem to get this to work.
I have created a simple three module version that works and then one that fails below:
Working version
registry.py
registry = {}
def register_thing(description, thingmaker):
registry[description] = thingmaker
def get_thing(description, *args, **kwargs):
thingmaker = registry[description]
return thingmaker(*args, **kwargs)
def show_things():
return registry.keys()
from things import Thing1
from things import Thing2
register_thing("Thing1", Thing1)
register_thing("Thing2", Thing2)
things.py
class Thing1(object):
def __init__(self):
pass
def message(self):
return "This is a thing"
class Thing2(object):
def __init__(self, *args, **kwargs):
self.args = args
self.kwargs = kwargs
def message(self):
return "This is a different thing with args %r and kwargs %r" \
% (self.args, self.kwargs)
use_things.py
import registry
print("The things in the registry are: %r" % registry.show_things())
print("Getting a Thing1")
thing = registry.get_thing("Thing1")
print("It has message %s" % thing.message())
print("Getting a Thing2")
thing = registry.get_thing("Thing2", "kite", on_string="Mothers new gown")
print("It has message %s" % thing.message())
Running use_things.py gives:
The things in the registry are: dict_keys(['Thing1', 'Thing2'])
Getting a Thing1
It has message This is a thing
Getting a Thing2
It has message This is a different thing with args ('kite',) and kwargs {'on_string': 'Mothers new gown'}
Failing distributed version
registry.py
registry = {}
def register_thing(description, thingmaker):
registry[description] = thingmaker
def get_thing(description, *args, **kwargs):
thingmaker = registry[description]
return thingmaker(*args, **kwargs)
def show_things():
return registry.keys()
things.py
import registry
class Thing1(object):
def __init__(self):
pass
def message(self):
return "This is a thing"
class Thing2(object):
def __init__(self, *args, **kwargs):
self.args = args
self.kwargs = kwargs
def message(self):
return "This is a different thing with args %r and kwargs %r" \
% (self.args, self.kwargs)
register.register_thing("Thing1", Thing1)
register.register_thing("Thing2", Thing2)
use_things.py (as before)
Now if I run use_things.py I get the following:
The things in the registry are: dict_keys([])
Getting a Thing1
Traceback (most recent call last):
File "use_things.py", line 6, in <module>
thing = registry.get_thing("Thing1")
File "/home/luke/scratch/registry_example/registry.py", line 7, in get_thing
thingmaker = registry[description]
KeyError: 'Thing1'
Clearly, the things.py module is never getting called and populating the registry.
If I re-add the following line at the bottom of registry.py it again works:
import things
But again this requires registry.py to be aware of the modules needed. I would prefer the registry to be populated automatically by modules below a certain directory but I cannot seem to get this to work. Can anybody help?
What you are describing is basically a "plug-in" software architecture and there are different ways of implementing one. I personally think using a Python package to do it is a good approach because it's a well-defined "pythonic" way to organize modules and the languages supports it directly, which makes doing some of the things involved a little easier.
Here's something that I think does basically everything you want. It's based on my answer to the question How to import members of all modules within a package? which requires putting all the factory scripts in a package directory, in a file hierarchy like this:
use_things.py
things/
__init__.py
thing1.py
thing2.py
The names of the package and factory scripts can easily be changed to something else if you wish.
Instead of having an explicit public registry, it just uses the package's name, things in this example. (There is a private _registry dictionary in the module, though, if you feel you really need one for some reason.)
Although the package does have to be explicitly imported, its __init__.py initialization script will import the rest of the files in the subdirectory automatically — so adding or deleting one is simply a matter of placing its script in subdirectory or removing it from there.
There's no register_thing() function in this implementation, because the private _import_all_modules() function in __init__.py script effectively does it automatically — but note that it "auto-registers" everything public in each factory module script. You can, of course, modify how this works if you want it done in a different manner. (I have a couple of ideas if you're interested.)
Here's the contents of each of the files as outlined above:
use_things.py:
import things # Import package.
print("The things in the package are: %r" % things.show_things())
print("Getting a Thing1")
thing = things.get_thing("Thing1")
print(f"It has message {thing.message()!r}")
print("Getting a Thing2")
thing = things.get_thing("Thing2", "kite", on_string="Mothers new gown")
print(f"It has message {thing.message()!r}")
things/__init__.py:
def _import_all_modules():
""" Dynamically imports all modules in this package directory. """
import traceback
import os
globals_, locals_ = globals(), locals()
registry = {}
# Dynamically import all the package modules in this file's directory.
for filename in os.listdir(__name__):
# Process all python files in directory that don't start with an underscore
# (which also prevents this module from importing itself).
if filename[0] != '_' and filename.split('.')[-1] in ('py', 'pyw'):
modulename = filename.split('.')[0] # Filename sans extension.
package_module = '.'.join([__name__, modulename])
try:
module = __import__(package_module, globals_, locals_, [modulename])
except:
traceback.print_exc()
raise
for name in module.__dict__:
if not name.startswith('_'):
registry[name] = module.__dict__[name]
return registry
_registry = _import_all_modules()
def get_thing(description, *args, **kwargs):
thingmaker = _registry[description]
return thingmaker(*args, **kwargs)
def show_things():
return list(_registry.keys())
things/thing1.py
class Thing1(object):
def __init__(self):
pass
def message(self):
return f'This is a {type(self).__name__}'
things/thing2.py:
class Thing2(object):
def __init__(self, *args, **kwargs):
self.args = args
self.kwargs = kwargs
def message(self):
return (f"This is a different thing with args {self.args}"
f" and kwargs {self.kwargs}")
Running use_things.py gives:
The things in the package are: ['Thing1', 'Thing2']
Getting a Thing1
It has message 'This is a Thing1'
Getting a Thing2
It has message "This is a different thing with args ('kite',) and kwargs {'on_string': 'Mothers new gown'}"
Note: Martineau has mostly answered my question and the sophisticated stuff is there. However, there was a little additional requirement that I had (in my question) but which wasn't very clear. I have used martineau's answer to create a full answer and I have shared it here for anyone wanting to see it.
The additional requirements were that I could use any factory_method (not just a class' __init__ function) and that I wanted to explicitly register the ones I wanted in my registry.
So here is my final version...
I use the same directory structure as Martineau:
use_things.py
things/
__init__.py
thing1.py
thing2.py
To demonstrate the other type of factory_method I have extended use_things.py by a couple of lines:
import things # Import package.
print("The things in the package are: %r" % things.show_things())
print("Getting a Thing1")
thing = things.get_thing("Thing1")
print(f"It has message {thing.message()!r}")
print("Getting a Thing2")
thing = things.get_thing("Thing2", "kite", on_string="Mothers new gown")
print(f"It has message {thing.message()!r}")
print("Getting a Thing2 in a net")
thing = things.get_thing("Thing2_in_net", "kite", on_string="Mothers new gown")
print(f"It has message {thing.message()!r}")
Note that getting Thing2_in_net constructs an object of type Thing2 but with some precomputation applied.
thing1.py now explicitly registers Thing1's constructor (__init__) by declaring a tuple with a name starting _register_<something>. Another class UnregisteredThing is not registered.
class Thing1(object):
def __init__(self):
pass
def message(self):
return f'This is a {type(self).__name__}'
_register_thing1 = ('Thing1', Thing1)
class UnregisteredThing(object):
def __init__(self):
pass
def message(self):
return f'This is an unregistered thing'
And thing2.py registers two makers, one the basic constructor of Thing2 and one from a factory method:
class Thing2(object):
def __init__(self, *args, **kwargs):
self.args = args
self.kwargs = kwargs
def message(self):
return (f"This is a different thing with args {self.args}"
f" and kwargs {self.kwargs}")
def build_thing2_in_net(*args, **kwargs):
return Thing2(*args, located='in net', **kwargs)
_register_thing2 = ('Thing2', Thing2)
_register_thing2_in_net = ('Thing2_in_net', build_thing2_in_net)
Finally, the __init__.py script, is modified to look specifically for module attributes called _register_<something> and it will treat these as a key/maker pair to register:
def build_registry():
""" Dynamically imports all modules in this package directory. """
import traceback
import os
globals_, locals_ = globals(), locals()
registry = {}
for filename in os.listdir(__name__):
# Process all python files in directory that don't start with an underscore
# (which also prevents this module from importing itself).
if filename[0] != '_' and filename.split('.')[-1] in ('py', 'pyw'):
modulename = filename.split('.')[0] # Filename sans extension.
package_module = '.'.join([__name__, modulename])
try:
module = __import__(
package_module, globals_, locals_, [modulename])
except:
traceback.print_exc()
raise
for name in module.__dict__:
## look for attributes of module starting in _register_
if name.startswith('_register_'):
# if so assume they are key-maker pair and register them
key, maker = module.__dict__[name]
registry[key] = maker
return registry
_registry = build_registry()
def get_thing(description, *args, **kwargs):
thingmaker = _registry[description]
return thingmaker(*args, **kwargs)
def show_things():
return list(_registry.keys())
The resulting output shows that only registered things appear in the registry and these can be any method that constructs an object:
The things in the package are: ['Thing2', 'Thing2_in_net', 'Thing1']
Getting a Thing1
It has message 'This is a Thing1'
Getting a Thing2
It has message "This is a different thing with args ('kite',) and kwargs {'on_string': 'Mothers new gown'}"
Getting a Thing2 in a net
It has message "This is a different thing with args ('kite',) and kwargs {'located': 'in net', 'on_string': 'Mothers new gown'}"
i have this code in a python file:
from dec import my_decorator
import asyncio
#my_decorator
async def simple_method(bar): # , x, plc_name, var_name):
print("Henlo from simple_method\npartent:{}".format(parent))
return
#my_decorator
async def other_simple_meth(bar, value):
print("Henlo from other_simple_meth:\t Val:{}".format(value))
return
async def main():
print("Start Module-Export")
open('module_functions.py', 'a').close()
# Write all decorated functions to modue_functions.py
print("Functions in module_functions.py exported")
while True:
asyncio.sleep(2)
print("z...z...Z...")
My goal is to write all decorated functions (inc. the import dependencies) into a second module file (here "module_functions.py"). My 'module_functions.py' file should look like this:
from dec import my_decorator
import asyncio
#my_decorator
async def simple_method(bar): # , x, plc_name, var_name):
print("Henlo from simple_method\npartent:{}".format(parent))
return
#my_decorator
async def other_simple_meth(bar, value):
print("Henlo from other_simple_meth:\t Val:{}".format(value))
return
I know how to get references and names of a function, but not how to "copy/paste" the functioncode (incl. decorator and all dependencies) into a seperated file. Is this even possible?
EDIT: I know that pickle and dill exist, but this may not fullfill the goal. The problem is, that someone else may not know the order of the dumped file and loading them back may/will cause problem. As well it seems to be not possible to edit such loaded functions again.
I found a (not ideal, but ok) solution for my problems.
I) Find and write functions, coroutines etc. into a file (works):
Like #MisterMiyagi suspected, is the inspect module a good way to go. For the common stuff, it is possible with inspect.getsource() to get the code and write them into a file:
# List of wanted stuff
func_list = [simple_method, meth_with_input, meth_with_input_and_output, func_myself]
with open('module_functions.py', 'a') as module_file:
for func in func_list:
try:
module_file.write(inspect.getsource(func))
module_file.write("\n")
except:
print("Error :( ")
II) But what about decorated stuff(seems to work)?
I) will not work for decorated stuff, it is just ignored without throwing an exception. What seems to be used is from functools import wraps.
In many examples the #wraps decorator is added into the decorator class. This was not possible for me, but there is a good workaround:
#wraps(lambda: simple_method) #<---add wraps-decorator here
#my_decorator
async def simple_method(parent): # , x, plc_name, var_name):
print("Henlo from simple_method\npartent:{}".format(parent))
return
Wraps can be placed above the original decorated method/class/function and it seems to behave like I want. Now we can add simple_methodinto the func_listof I).
III) What about the imports?
Well it seems to be quite tricky/impossible to actually read the dependencies of a function. My workaround is to drop all wanted imports into a class (sigh). This class can be throw into the func_listof I) and is written into the file.
EDIT:
There is a cleaner way, which may works, after some modification, with I) and II) as well. The magic module is ast.
I have overwritten following:
class ImportVisitor(ast.NodeVisitor):
def __init__(self, target):
super().__init__()
self.file_target = target
"pick these special nodes via overwriting: visit_classname." \
"classnames are listed in https://docs.python.org/3.6/library/ast.html#abstract-grammar"
def visit_Import(self, node):
"Overwrite func!"
"Write all statements just with import like - import ast into file_target"
str = 'import '+', '.join(alias.name for alias in node.names)
self.file_target.write(str+"\n")
def visit_ImportFrom(self, node):
"Overwrite func!"
"Write all statements with from ... import (like - from os.path import basename) into file_tagrget"
str = 'from '+ node.module+ ' import '+', '.join(alias.name for alias in node.names)
self.file_target.write(str+"\n")
Now I can parse my own script name and fill the module_file with the imports and from...imports it will find while visiting all nodes in this tree:
with open('module_functions.py', 'a') as module_file:
with open(basename(__file__), "rb") as f:
tree = ast.parse(f.read(), basename(__file__))
visitor = ImportVisitor(module_file)
visitor.visit(tree)
module_file.write("\n\n")
This is an ugly, high maintenance factory. I really just need a way to use the string to instantiate an object with a name that matches the string. I think metaclass is the answer but I can't figure out how to apply it:
from commands.shVersionCmd import shVersionCmd
from commands.shVRFCmd import shVRFCmd
def CommandFactory(commandnode):
if commandnode.attrib['name'] == 'shVersionCmd': return shVersionCmd(commandnode)
if commandnode.attrib['name'] == 'shVRFCmd': return shVRFCmd(commandnode)
You can look up global names with the globals() function, which returns a dict:
from commands.shVersionCmd import shVersionCmd
from commands.shVRFCmd import shVRFCmd
# An explicit list of allowed commands to prevent malicious activity.
commands = ['shVersionCmd', 'shVRFCmd']
def CommandFactory(commandnode):
cmd = commandnode.attrib['name']
if cmd in commands:
fn = globals()[cmd]
fn(commandnode)
This answer How to make an anonymous function in Python without Christening it? discusses how to cleanly call blocks of code based on a key
eval is your friend:
from commands import *
def CommandFactory(commandnode):
name=commandnode.attrib['name']
assert name in ( "shVersionCmd", "shVRFCmd" ), "illegal command"
return eval(name+"."+name)(commandnode)
Note that if you are sure that name will never contain any illegal commands, you could remove the assert and turn the function into a no-maintenance-delight. In case of doubt, leave it in and maintain the list in a single place.
My personal preference would be to turn the dependencies between the factory and the command implementations around, so that each command registers itself with the factory.
Example implementation:
File commands/__init__.py:
import pkgutil
import commands
_commands = {}
def command(commandCls):
_commands[commandCls.__name__] = commandCls
return commandCls
def CommandFactory(commandnode):
name = commandnode.attrib['name']
if name in _commands.keys():
return _commands[name](commandnode)
# Load all commands
for loader, module_name, is_pkg in pkgutil.walk_packages(commands.__path__):
if module_name!=__name__:
module = loader.find_module(module_name).load_module(module_name)
File commands/mycommand.py:
from commands import command
#command
class MyCommand(object):
def __init__(self, commandnode):
pass
Small test:
from commands import CommandFactory
# Stub node implementation
class Node(object):
def __init__(self, name):
self.attrib = { "name": name }
if __name__=='__main__':
cmd = CommandFactory(Node("MyCommand"))
assert cmd.__class__.__name__=="MyCommand", "New command is instance of MyCommand"
cmd = CommandFactory(Node("UnknownCommand"))
assert cmd is None, "Returns None for unknown command type"
I am working on a quick python script using the cmd module that will allow the user to enter text commands followed by parameters in basic url query string format. The prompts will be answered with something like
commandname foo=bar&baz=brack
Using cmd, I can't seem to find which method to override to affect the way the argument line is handed off to all the do_* methods. I want to run urlparse.parse_qs on these values, and calling this upon line in every do_* method seems clumsy.
The precmd method gets the whole line, before the commandname is split off and interpreted, so this will not work for my purposes. I'm also not terribly familiar with how to place a decorator inside a class like this and haven't been able to pull it off without breaking the scope.
Basically, the python docs for cmd say the following
Repeatedly issue a prompt, accept input, parse an initial prefix off
the received input, and dispatch to action methods, passing them the
remainder of the line as argument.
I want to make a method that will do additional processing to that "remainder of the line" and hand that generated dictionary off to the member functions as the line argument, rather than interpreting them in every function.
Thanks!
You could potentially override the onecmd() method, as the following quick example shows. The onecmd() method there is basically a copy of the one from the original cmd.py, but adds a call to urlparse.parse_qs() before passing the arguments to a function.
import cmd
import urlparse
class myCmd(cmd.Cmd):
def onecmd(self, line):
"""Mostly ripped from Python's cmd.py"""
cmd, arg, line = self.parseline(line)
arg = urlparse.parse_qs(arg) # <- added line
if not line:
return self.emptyline()
if cmd is None:
return self.default(line)
self.lastcmd = line
if cmd == '':
return self.default(line)
else:
try:
func = getattr(self, 'do_' + cmd)
except AttributeError:
return self.default(line)
return func(arg)
def do_foo(self, arg)
print arg
my_cmd = myCmd()
my_cmd.cmdloop()
Sample output:
(Cmd) foo
{}
(Cmd) foo a b c
{}
(Cmd) foo a=b&c=d
{'a': ['b'], 'c': ['d']}
Is this what you are trying to achieve?
Here's another potential solution that uses a class decorator to modify a
cmd.Cmd subclass and basically apply a decorator function to all do_*
methods of that class:
import cmd
import urlparse
import types
# function decorator to add parse_qs to individual functions
def parse_qs_f(f):
def f2(self, arg):
return f(self, urlparse.parse_qs(arg))
return f2
# class decorator to iterate over all attributes of a class and apply
# the parse_qs_f decorator to all do_* methods
def parse_qs(cls):
for attr_name in dir(cls):
attr = getattr(cls, attr_name)
if attr_name.startswith('do_') and type(attr) == types.MethodType:
setattr(cls, attr_name, parse_qs_f(attr))
return cls
#parse_qs
class myCmd(cmd.Cmd):
def do_foo(self, args):
print args
my_cmd = myCmd()
my_cmd.cmdloop()
I quickly cobbled this together and it appears to work as intended, however, I'm
open to suggestions on any pitfalls or how this solution could be improved.