Suppose here is my models.py
models.py
from django.db import models
from django.contrib.auth.models import *
# Create your models here.
class A(models.Model):
p = models.CharField(max_length=200)
class B(models.Model):
d = models.OneToOneField(User)
e = models.ForeignKey(A)
class C(models.Model):
f = models.CharField(max_length=200)
g = models.ForeignKey(A,related_name="c")
i wanto import these models inside my views like this.
from app import models
def import():
list=['A','B','C']
for x in list:
from model import x
import()
Please suggest me a better solution ,I am new to python django.Thanks in advance.
edit
i want to use this loop for some reason.
Better to just import the models file itself and reference them from there.
from my_app import models
models.A.objects.all()
models.B.objects.all()
Avoid from my_app import *: it can lead to confusion and namespace pollution, plus explicit is better than implicit.
But of course if you already know the list of models, you can simply import those directly:
from my_app.models import A, B, C
you can also do something like this answer suggested
Dynamic module import in Python
import imp
import os
def load_from_file(file path, expected_class):
class_inst = None
mod_name,file_ext = os.path.splitext(os.path.split(filepath)[-1])
if file_ext.lower() == '.py':
py_mod = imp.load_source(mod_name, filepath)
elif file_ext.lower() == '.pyc':
py_mod = imp.load_compiled(mod_name, filepath)
if hasattr(py_mod, expected_class):
class_inst = getattr(py_mod, expected_class)()
return class_inst
i.e
module = load_from_file(file_path, expected_class)
My understanding is that you are trying to import all classes of models in your views file
simple way is:
from app.models import *
Related
So I am not even sure if what I want to do is possible but I thought I would ask and find out.
I want to build a chef "databag" via python. This is pretty much just a python dictionary. There are other things that need to happen with this databag that are encapsulated in the Databag class.
Now for the meat of the question...
I want to add key/values to this dictionary but need to build it in a way that is easily extensible. NOTE: the autodict is a class that makes it so you can build a dictionary using dot notation.
Here is what I am trying to do:
databag = Databag(
LogGroup=Sub("xva-${environment}-${uniqueid}-mygroup"),
RunList=[
"mysetup::default",
"consul::client"
]
)
databag.Consul() <-- Trying to add consul key/values to databag
print(databag.to_dict())
print(databag.to_string_list())
So you can see how I add the "consul" key values to the already existing databag object.
Here are the class definitions. I know this is wrong which is why I am here to see if this is even possible.
Databag Class
class Databag(object):
def __init__(self,uniqueid=Ref("uniqueid"),environment=Ref("environment"),LogGroup=None,RunList=[]):
self.databag = autodict()
self.databag.uniqueid = uniqueid
self.databag.environment = environment
self.databag.log.group = LogGroup
self.runlist=RunList
def to_string_list(self):
return self.convert_databag_to_string(self.databag)
def to_dict(self):
return self.databag
def get_runlist(self):
return self.convert_to_runlist_string(self.runlist)
Consul Class
class Consul(Databag):
def __init__(self, LogGroup=None):
if LogGroup == None:
Databag.consul.log.group = Databag.log.group
else:
Databag.consul.log.group = LogGroup
As you can see the Consul class is supposed to access the databag dictionary of the Databag class and add the "consul" variables, almost like an attribute. However, I don't want to add a new function to the databag class every time otherwise that class will end up being very, very large.
I was able to get something like this to work with the following method. Although I am up for an suggestions to get this to work. I just read the help posted on this link:
http://www.qtrac.eu/pyclassmulti.html
EDIT: This method is a lot easier:
Note: This uses the exact same implementation of the old method.
consul.py
from classes.databag.utils import *
class Consul:
def Consul(self, LogGroup=None):
if LogGroup == None:
self.databag.consul.log.group = self.databag.log.group
else:
self.databag.consul.log.group = LogGroup
databag.py
from classes.databag.utils import autodict
from classes.databag import consul
class Databag(consul.Consul):
def __init__(self,uniqueid=Ref("uniqueid"),environment=Ref("environment"),LogGroup=None,RunList=[]):
self.databag = autodict()
self.databag.uniqueid = uniqueid
...
...
Folder Structure
/classes/
databag/
utils.py
databag.py
consul.py
testing.py
---- OLD METHOD -----
How I implemented it
from classes.databag.databag import *
databag = Databag(
LogGroup=Sub("xva-${environment}-${uniqueid}-traefik"),
RunList=[
"mysetup::default",
"consul::client"
]
)
databag.Consul()
print(databag.to_dict())
print(databag.to_string_list())
lib.py
def add_methods_from(*modules):
def decorator(Class):
for module in modules:
for method in getattr(module, "__methods__"):
setattr(Class, method.__name__, method)
return Class
return decorator
def register_method(methods):
def register_method(method):
methods.append(method)
return method
return register_method
databay.py
from classes.databag import lib, consul
#lib.add_methods_from(consul)
class Databag(object):
def __init__(self,uniqueid=Ref("uniqueid"),environment=Ref("environment"),LogGroup=None,RunList=[]):
self.databag = autodict()
self.databag.uniqueid = uniqueid
....
....
consul.py
from classes.databag import lib
__methods__ = []
register_method = lib.register_method(__methods__)
#register_method
def Consul(self, LogGroup=None):
if LogGroup == None:
self.databag.consul.log.group = self.databag.log.group
else:
self.databag.consul.log.group = LogGroup
Folder Structure
/classes/
/databag
lib.py
databag.py
consul.py
utils.py
/testing.py
I'd like to have different database files for each Peewee ORM instance. Peewee assigns the database engine to an instance using a nested "Meta" class.
My issue seems to come down to accessing a class instance attribute from an inner class. Using the Peewee quickstart example, this is what I'm trying to achieve in (broken) Python:
from peewee import *
class Person(Model):
def __init__(self, database):
self.database = database
name = CharField()
birthday = DateField()
is_relative = BooleanField()
class Meta:
# The following is incorrect; I'm trying to access the instance
# variable for the database filename string
database = SqliteDatabase(Person.database)
# Create two instances with different databases:
john = Person('john-database.db')
jane = Person('jane-database.db')
I've found a few general answers regarding nested classes, but struggle to translate their lessons to this specific application. I appreciate your help!
I think the short answer is "peewee isn't really designed for your use case". But I played around with it a bit, and while there has to be a better solution out there, here's something worked. But it's not a good idea, and you shouldn't do it.
First, we use the standard peewee example model, except we use the Proxy class for the database connection:
from peewee import *
from playhouse import *
db = Proxy()
class Person(Model):
name = CharField()
birthday = DateField()
is_relative = BooleanField()
class Meta:
database = db
Assume we have this in model.py.
Now, to make this work, we're going to need two instances of the model module, which we can get by (ab)using the importlib module:
import importlib.util
import peewee
import sys
def load_module_as(modname, alias):
mod_spec = importlib.util.find_spec(modname)
mod = importlib.util.module_from_spec(mod_spec)
mod_spec.loader.exec_module(mod)
sys.modules[alias] = mod
return mod
This allows us to load in two separate instances of the model:
model1 = load_module_as('model', 'model1')
model2 = load_module_as('model', 'model2')
And we can then initialize two different databases:
model1.db.intitialize(pwee.SqliteDatabase('db1.db'))
model2.db.intitialize(pwee.SqliteDatabase('db2.db'))
While this sort of gets you what you want, you will always need to qualify your classes (model1.Person, model2.Person).
Here's a complete example, with unit tests:
import datetime
import importlib.util
import os
import peewee
import shutil
import sys
import tempfile
import unittest
def load_module_as(modname, alias):
mod_spec = importlib.util.find_spec(modname)
mod = importlib.util.module_from_spec(mod_spec)
mod_spec.loader.exec_module(mod)
sys.modules[alias] = mod
return mod
model1 = load_module_as('model', 'model1')
model2 = load_module_as('model', 'model2')
class TestDatabase(unittest.TestCase):
def setUp(self):
self.workdir = tempfile.mkdtemp('testXXXXXX')
self.db1_path = os.path.join(self.workdir, 'db1.db')
self.db1 = peewee.SqliteDatabase(self.db1_path)
self.db1.connect()
self.db2_path = os.path.join(self.workdir, 'db2.db')
self.db2 = peewee.SqliteDatabase(self.db2_path)
self.db2.connect()
model1.db.initialize(self.db1)
model2.db.initialize(self.db2)
self.db1.create_tables([model1.Person])
self.db2.create_tables([model2.Person])
def test_different_instances(self):
assert model1.db != model2.db
def test_create_model1_person(self):
p = model1.Person(name='testperson',
birthday=datetime.datetime.now().date(),
is_relative=True)
p.save()
def test_create_model2_person(self):
p = model2.Person(name='testperson',
birthday=datetime.datetime.now().date(),
is_relative=True)
p.save()
def test_create_both(self):
p1 = model1.Person(name='testperson',
birthday=datetime.datetime.now().date(),
is_relative=True)
p2 = model2.Person(name='testperson',
birthday=datetime.datetime.now().date(),
is_relative=False)
p1.save()
p2.save()
p1 = model1.Person.select().where(model1.Person.name == 'testperson').get()
p2 = model2.Person.select().where(model2.Person.name == 'testperson').get()
assert p1.is_relative
assert not p2.is_relative
def tearDown(self):
self.db1.close()
self.db2.close()
shutil.rmtree(self.workdir)
if __name__ == '__main__':
unittest.main(verbosity=2)
I also located this thread with some possible answers.
I have a some bunch of python files. I need to get all the classes from there and make a list.
its like I have to read with streamreader and then
Imports ActionBlock
I have to take the string ActionBlock and show it in a list. Listing and others hopefully I can do, but I am stuck in this point. Any suggestion please? Thank you.
You could use a regular expression to look for the parts you're interested in.
The following code
Dim path = "c:\path\to\your\file.py"
Dim content = File.ReadAllText(path)
Dim matchClass = "class (?<m>\w+)(:|\()+"
Dim matchImport = "(^|from \w+ )import ((?<m>\w+), )*(?<m>\w+)"
Dim result = Regex.Matches(content, String.Format("({0}|{1})", matchClass, matchImport), RegexOptions.Multiline) _
.Cast(Of Match) _
.SelectMany(Function(m) m.Groups("m").Captures.Cast(Of Capture).Select(Function(c) c.Value)) _
.ToList()
will, given a text file like
import os
import math
from time import clock
from random import randint
import DataArchiving
import TABasicFunctions
import HWDataConveterGate
import GeneralTestDataMapping
from something import FirstClass, SecondClass
def foo():
pass
def bar():
pass
class ClassOne(object):
class NestedClass:
pass
def thisisnotaclass(self):
v = [x.class for x in self]
v = [x.someimport for x in self]
class ClassTwo:
pass
class Class3:
pass
def main():
pass
if __name__ == '__main__':
main()
create a list that looks like:
factory_boy defaults to 1 for sequences. How can I pass in a number to use as a different starting number instead? I can subclass the _setup_next_sequence() method, but how can I give it a variable to use?
# File: models.py
from django.db import models
class Book(models.Model):
title = models.CharField(max_length=100)
# File: factories.py
from .models import Book
import factory
class BookFactory(factory.Factory):
FACTORY_FOR = BookModel
title = factory.Sequence(lambda n: u'Title #{}'.format(n))
#classmethod
def _setup_next_sequence(cls):
# Instead of defaulting to starting with number 1, start with starting_seq_num.
# But how do I set starting_seq_num?
return starting_seq_num
# File: make_data.py
from factories import BookFactory
# somehow set starting sequence number here?
BookFactory().create()
I'm using factory_boy 1.2.0 (via pip install factory_boy)
factory_boy code: https://github.com/dnerdy/factory_boy
In addition to the answer of Rob Bednark
We can use reset_sequence() function, which will reset the counter to a specific value.
# File: make_data.py
import factories
factories.BookFactory.reset_sequence(100)
my_book = factories.BookFactory().create()
print(my_book.title) # Title #100
I found two ways of solving this:
Use a module variable
Use a class attribute set outside of the class definition
Use a module variable:
# File: factories.py
from .models import Book
import factory
starting_seq_num = 0
class BookFactory(factory.Factory):
FACTORY_FOR = BookModel
title = factory.Sequence(lambda n: u'Title #{}'.format(n))
#classmethod
def _setup_next_sequence(cls):
# Instead of defaulting to starting with 0, start with starting_seq_num.
return starting_seq_num
# File: make_data.py
import factories
factories.starting_seq_num = 100
factories.BookFactory().create()
Use a class attribute set outside of the class definition:
# File: factories.py
from .models import Book
import factory
class BookFactory(factory.Factory):
# Note that starting_seq_num cannot be set here in the class definition,
# because Factory will then pass it as a kwarg to the model's create() method
# and cause an exception. It must be set outside the class definition.
FACTORY_FOR = BookModel
title = factory.Sequence(lambda n: u'Title #{}'.format(n))
#classmethod
def _setup_next_sequence(cls):
return getattr(cls, 'starting_seq_num', 0)
# File: make_data.py
from factories import BookFactory
BookFactory.starting_seq_num = 100
BookFactory().create()
Update: factory_boy now handles it!
In the latest version of factory_boy (2.8.1 to this day) it is now possible to force the sequence counter into a define value:
Forcing the value on a per-call basis
In order to force the counter for a specific Factory instantiation, just pass the value in the
__sequence=42 parameter:
class AccountFactory(factory.Factory):
class Meta:
model = Account
uid = factory.Sequence(lambda n: n)
name = "Test"
Then in the console:
>>> obj1 = AccountFactory(name="John Doe", __sequence=10)
>>> obj1.uid # Taken from the __sequence counter
10
>>> obj2 = AccountFactory(name="Jane Doe")
>>> obj2.uid # The base sequence counter hasn't changed
1
And it is also possible to reset the counter to a specific value:
>>> AccountFactory.reset_sequence(42)
>>> AccountFactory().uid
42
>>> AccountFactory().uid
43
The third, and simplest way:
# File: factories.py
from .models import BookModel
import factory
class BookFactory(factory.Factory, starting_seq_num):
FACTORY_FOR = BookModel
title = factory.Sequence(lambda n: u'Title #{}'.format(n + starting_seq_num))
# File: make_data.py
import factories
book = factories.BookFactory(512).create() #Start with 512
I'm only starting with Factory Boy myself, and not too experienced in Python either, so I may be missing something, but you see where I'm going here. To make it clearer, I think I'd actually prefer it to be keyworded:
class BookFactory(factory.Factory, title_seq_start=-1):
...
book = factories.BookFactory(title_seq_start=512).create()
I fear that this is a messy way to approach the problem but...
let's say that I want to make some imports in Python based on some conditions.
For this reason I want to write a function:
def conditional_import_modules(test):
if test == 'foo':
import onemodule, anothermodule
elif test == 'bar':
import thirdmodule, and_another_module
else:
import all_the_other_modules
Now how can I have the imported modules globally available?
For example:
conditional_import_modules(test='bar')
thirdmodule.myfunction()
Imported modules are just variables - names bound to some values. So all you need is to import them and make them global with global keyword.
Example:
>>> math
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'math' is not defined
>>> def f():
... global math
... import math
...
>>> f()
>>> math
<module 'math' from '/usr/local/lib/python2.6/lib-dynload/math.so'>
You can make the imports global within a function like this:
def my_imports(module_name):
globals()[module_name] = __import__(module_name)
I've just had the similar problem, here is my solution:
class GlobalImport:
def __enter__(self):
return self
def __call__(self):
import inspect
self.collector = inspect.getargvalues(inspect.getouterframes(inspect.currentframe())[1].frame).locals
def __exit__(self, *args):
globals().update(self.collector)
then, anywhere in the code:
with GlobalImport() as gi:
import os, signal, atexit, threading, _thread
# whatever you want it won't remain local
# if only
gi()
# is called before the end of this block
# there you go: use os, signal, ... from whatever place of the module
You can use the built-in function __import__ to conditionally import a module with global scope.
To import a top level module (think: import foo):
def cond_import():
global foo
foo = __import__('foo', globals(), locals())
Import from a hierarchy (think: import foo.bar):
def cond_import():
global foo
foo = __import__('foo.bar', globals(), locals())
Import from a hierarchy and alias (think: import foo.bar as bar):
def cond_import():
global bar
foo = __import__('foo.bar', globals(), locals())
bar = foo.bar
I like #badzil approach.
def global_imports(modulename,shortname = None, asfunction = False):
if shortname is None:
shortname = modulename
if asfunction is False:
globals()[shortname] = __import__(modulename)
else:
globals()[shortname] = eval(modulename + "." + shortname)
So something that is traditionally in a class module:
import numpy as np
import rpy2
import rpy2.robjects as robjects
import rpy2.robjects.packages as rpackages
from rpy2.robjects.packages import importr
Can be transformed into a global scope:
global_imports("numpy","np")
global_imports("rpy2")
global_imports("rpy2.robjects","robjects")
global_imports("rpy2.robjects.packages","rpackages")
global_imports("rpy2.robjects.packages","importr",True)
May have some bugs, which I will verify and update. The last example could also have an alias which would be another "shortname" or a hack like "importr|aliasimportr"
I like #rafaĆ grabie approach. As it even support importing all.
i.e.
from os import *
(Despite it being bad practice XD )
Not allowed to comment, but here is a python 2.7 version.
Also removed the need to call the function at the end.
class GlobalImport:
def __enter__(self):
return self
def __exit__(self, *args):
import inspect
collector = inspect.getargvalues(inspect.getouterframes(inspect.currentframe())[1][0]).locals
globals().update(collector)
def test():
with GlobalImport() as gi:
## will fire a warning as its bad practice for python.
from os import *
test()
print path.exists(__file__)
I like the answer from #maxschlepzig.
There is a bug in the approach that if you directly import a function it will not work.
For example,
global_imports("tqdm", "tqdm, True)
does not work, because the module is not imported. And this
global_imports("tqdm")
global_imports("tqdm", "tqdm, True)
works.
I change #maxschlepzig's answer a bit. Using fromlist so you can load function or module with "From" statement in a uniform way.
def global_imports(object_name: str,
short_name: str = None,
context_module_name: str = None):
"""import from local function as global import
Use this statement to import inside a function,
but effective as import at the top of the module.
Args:
object_name: the object name want to import,
could be module or function
short_name: the short name for the import
context_module_name: the context module name in the import
example usage:
import os -> global_imports("os")
import numpy as np -> global_imports("numpy", "np")
from collections import Counter ->
global_imports("Counter", None, "collections")
from google.cloud import storage ->
global_imports("storage", None, "google.cloud")
"""
if not short_name:
short_name = object_name
if not context_module_name:
globals()[short_name] = __import__(object_name)
else:
context_module = __import__(context_module_name,
fromlist=[object_name])
globals()[short_name] = getattr(context_module, object_name)
You could have this function return the names of the modules you want to import, and then use
mod == __import__(module_name)
Step-1: config.py, config_v2.py, rnd.py in same directory/folder
Step-2: config.py
HIGH_ATTENDANCE_COUNT_MIN = 0
Step-3: config_v2.py
HIGH_ATTENDANCE_COUNT_MIN = 5
Step-4: rnd.py
def versioning_test(v):
global config
if v == 'v1':
config = __import__('config', globals(), locals())
if v == 'v2':
config = __import__('config_v2', globals(), locals())
def version_test_in_another_function():
print('version_test_in_another_function: HIGH_ATTENDANCE_COUNT_MIN: ', config.HIGH_ATTENDANCE_COUNT_MIN)
versioning_test("v2")
version_test_in_another_function()
Step-5: $ python3 rnd.py
<<output>>: version_test_in_another_function: HIGH_ATTENDANCE_COUNT_MIN: 5
It is now recommended (for Python 3), to use the importlib
https://docs.python.org/3/reference/import.html#importlib
eg: globals()["np"] = importlib.import_module("numpy")
and you can now execute "np.array([1,2,3])" afterwards.
There are also other ways of importing that you might prefer. Consider seeing the aforementioned documentation.