pydantic: AttributeError: 'Config' object has no attribute - python

I am running into some problems with loading .env file contents in config file as app config attributes, I am using pydantic-0.7.1 with python 3.6.7 running the apps for validating type and existence of vars. project structure is laid out as follows
cat .env
APP_PROJECT_NAME_TOPICS='["streaming.topic"]'
cat __main__.py
from project_name_consumer.app import create_app, app_cli
if __name__ == '__main__':
app_cli(create_app)
cat __init__.py
__version__ = '1.0.0.0'
cat app.py
from typing import List
from collections import MutableMapping
from dotenv import load_dotenv
from os import getcwd, path
from time import sleep
from typing import Optional
from pydantic import BaseSettings
from argparse import ArgumentParser
class BaseConfig(BaseSettings):
def __init__(self, prefix='', **kwargs):
self.__config__.env_prefix = prefix
super().__init__(**kwargs)
def dict(self, prefix=''):
return {
key[len(prefix):]: value
for key, value
in self
if key.startswith(prefix)
}
class AppBaseConfig(BaseConfig):
name: str = 'app'
version: Optional[str] = None
#property
def app_name(self):
return self.name
#property
def app_version(self):
return self.version
class App(MutableMapping):
def __init__(self, config=AppBaseConfig, config_prefix='APP_'):
self._load_dotenv()
self.config = config(prefix=config_prefix)
self.state = {}
def __getitem__(self, key):
return self.state[key]
def __setitem__(self, key, value):
self.state[key] = value
def __delitem__(self, key):
del self.state[key]
def __iter__(self):
return iter(self.state)
def __len__(self):
return len(self.state)
def _load_dotenv(self):
cwd = getcwd()
filepath = path.join(cwd, '.env')
if path.exists(filepath):
print('Loading dotenv from %s', filepath)
load_dotenv(filepath)
def run(self, **kwargs):
run_app(self, **kwargs)
def run_app(app, loop=None, force_shutdown_delay=5.0):
while True:
sleep(1)
app.run()
class ProjectAppBaseConfig(AppBaseConfig):
project_url: str = ''
project_cert: str = None
class ProjectApp(App):
def __init__(self, config=ProjectAppBaseConfig):
super().__init__(config)
self.session = None
class Config(ProjectAppBaseConfig):
app_name: str = 'project-name-consumer'
project_name_topics: List[str]
def create_app():
app = ProjectApp(config=Config)
for uri in app.config.project_name_topics:
print(uri)
return app
def app_cli(app_factory):
parser = ArgumentParser()
parser.add_argument('-e', '--envfile', help='load dotenv file')
args = parser.parse_args()
if args.envfile:
load_dotenv(args.envfile)
if isinstance(app_factory, App):
app = app_factory
else:
app = app_factory()
app.run()
cat setup.py
from os import path, walk
import re
from setuptools import find_packages, setup, Extension
from setuptools.command.build_py import build_py as _build_py
from Cython.Build import cythonize
import sysconfig
def open_file(filepath, mode='r'):
here = path.abspath(path.dirname(__file__))
full_path = path.join(here, filepath)
return open(full_path, mode)
def find_version(package):
with open_file('{}/__init__.py'.format(package)) as f:
return re.findall(r"^__version__ = '([^']+)'\r?$", f.read(), re.M)[0]
def find_requires():
with open_file('requirements.txt') as f:
return [line.strip() for line in f]
NAME = 'minimal_example'
VERSION = find_version('project_name_consumer')
PACKAGES = find_packages()
REQUIRES = find_requires()
EXCLUDE_FILES = [
'project_name_consumer/__init__.py',
'project_name_consumer/__main__.py',
]
class build_py(_build_py):
def find_package_modules(self, package, package_dir):
ext_suffix = sysconfig.get_config_var('EXT_SUFFIX')
modules = super().find_package_modules(package, package_dir)
filtered_modules = []
for (pkg, mod, filepath) in modules:
if path.exists(filepath.replace('.py', ext_suffix)):
continue
filtered_modules.append((pkg, mod, filepath, ))
return filtered_modules
def get_ext_paths(root_dir, exclude_files):
"""get filepaths for compilation"""
paths = []
for root, dirs, files in walk(root_dir):
for filename in files:
if path.splitext(filename)[1] != '.py':
continue
file_path = path.join(root, filename)
if file_path in exclude_files:
continue
paths.append(file_path)
return paths
setup(
name=NAME,
version=VERSION,
packages=PACKAGES,
include_package_data=True,
zip_safe=False,
install_requires=REQUIRES,
ext_modules=cythonize(
get_ext_paths('project_name_consumer', EXCLUDE_FILES),
compiler_directives={'language_level': 3}
),
cmdclass={
'build_py': build_py
}
)
python traceback is as follows
$ python3 -m project -e .env
INFO:app:Loading dotenv from .env
...
AttributeError: 'Config' object has no attribute 'project_name_topics'
I am using automate.sh for redoing repeated efforts
cat automate.sh
START_TIME=$(date +%s)
deactivate
rm -rf build
rm -rf dist
rm -rf venv
python3 -m venv venv
source venv/bin/activate
pip install --upgrade pip
pip install Cython
pip install wheel
pip install -r requirements.txt
python setup.py build_ext --inplace
python setup.py bdist_wheel
pip install dist/minimal_example-1.0.0.0-cp36-cp36m-macosx_10_15_x86_64.whl
python -m project_name_consumer
END_TIME=$(date +%s)
echo "script took $(($END_TIME - $START_TIME)) seconds..."
cat requirements.txt
python-dotenv==0.8.2
pydantic==0.7.1
$ minimal_example tree
.
├── __init__.py
├── automate.sh
├── project_name_consumer
│ ├── __init__.py
│ ├── __main__.py
│ └── app.py
├── requirements.txt
├── setup.py

Related

Pytest not able to run test where script A importing another script B in the same folder level as A and giving me ModuleNotFoundError

I am trying to run the unit test using pytest in this project, here main_0.py is importing s3 file.
I am getting ModuleNotFoundError: no module named 's3'
Project Folder Structure
some_project
└───src
├───main
│ └───lambda_function
│ └───some
│ main_0.py
│ s3.py
│
└───test
└───unittest
└───lambda_function
└───some
test_main_0.py
test_s3.py
main_0.py
from s3 import PrintS3
def lambda_handler():
obj = PrintS3()
res = obj.print_txt()
return res
s3.py
class PrintS3:
def __init__(self) -> None:
self.txt = "Hello"
def print_txt(self):
print(self.txt)
return self.txt
test_main_0.py
import unittest
class TestSomeMain(unittest.TestCase):
def test_main_0(self):
from src.main.lambda_function.some.main_0 import lambda_handler
res = lambda_handler()
assert res == "Hello"
test_s3.py is empty.
I also tried adding an empty __init__.py file in both the dir but still the same error
Project Folder Structure after adding __init__.py file
some_project
└───src
├───main
│ └───lambda_function
│ └───some
│ main_0.py
│ s3.py
│ __init__.py
│
└───test
└───unittest
└───lambda_function
└───some
test_main_0.py
test_s3.py
__init__.py
the command I am using to run pytest:
python -m pytest ./src/test
and I am inside some_project folder and also using main_0.py instead of main.py because to not get confused with main folder
Edit 2:
I am to run the test case successfully by adding sys.path in the test_main_0.py file but it is breaking linting and hinting in the code editor (vscode) it didn't broke the linting and hinting, both import statement works but is there any better way.
new test_main_0.py:
import unittest
import os
import sys
sys.path.append(os.path.abspath("./src/main/lambda_function/some/"))
class TestSomeMain(unittest.TestCase):
def test_main_0(self):
from src.main.lambda_function.some.main_0 import lambda_handler # this works
from main_0 import lambda_handler # this also works but break linting and hinting in the code editor
res = lambda_handler()
assert res == "Hello"
could you please try
import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), ".."))
from some.s3 import PrintS3
def lambda_handler():
obj = PrintS3()
res = obj.print_txt()
return res
I found a somewhat working solution.
added setUp() and tearDown() methods in the class for inserting and removing path in sys.path
path in sys.path is the location of the directory where the main_0.py and s3.py is located
import unittest
import os
import sys
class TestSomeMain(unittest.TestCase):
def setUp(self) -> None:
sys.path.insert(0, os.path.abspath("./src/main/lambda_function/some/"))
def tearDown(self) -> None:
sys.path.remove(os.path.abspath("./src/main/lambda_function/some/"))
def test_main_0(self):
from src.main.lambda_function.some.main_0 import lambda_handler
res = lambda_handler()
assert res == "Hello"
also update the test command in the terminal:
python -m pytest ./src/test/unittest/lambda_function/some --cov ./src/main/lambda_function/some --cov-report html

Unable to import a module and dependancies in Python unit test

I am using Python's unittest library to add a unit test. This how the folder structure looks like in my project.
Project
|-- __init__.py
|
|-- src
| |
| |-- __init__.py
| |-- main.py
|
|-- test
|-- __init__.py
|-- test_basic.py
Code for main.py
from dotenv import load_dotenv
load_dotenv()
import newrelic.agent
newrelic.agent.initialize()
from .processAnalyticsData import run_query_and_upload_results_to_s3, upload_script_results_to_s3
import sys
import click
import getopt
import json
import os
BUCKET_NAME = os.environ["S3_BUCKET_NAME"]
cwd = os.getcwd()
#click.group(chain=True)
def run_scripts():
pass
#run_scripts.command('run_script')
#click.option('-f', '--frequency', required=True, nargs=1, type=click.Choice(['WEEKLY', 'DAILY', 'ONCE', 'MONTHLY'], case_sensitive=False))
def run_script(frequency):
with open(cwd + '/src/config.json') as config_file:
script_paths = json.load(config_file)
output_json = [x for x in script_paths["executable_scripts"] if x["frequency"] == frequency]
for item in output_json:
file_name = item["fileName"]
script_name = item["path"]
script_type = item["type"]
if script_type == 'sql':
run_query_and_upload_results_to_s3(script_name, BUCKET_NAME, file_name)
elif script_type == 'python':
upload_script_results_to_s3(script_name, BUCKET_NAME, file_name)
else:
raise Exception('Script type is incorrect. Please provide the correct value in the config file.')
if __name__ == "__main__":
run_scripts()
I started writing my unit test like this:
from src.main import *
import unittest
class WidgetTestCase(unittest.TestCase):
def setUp(self):
pass
if __name__ == '__main__':
unittest.main()
I get an error AttributeError: 'module' object has no attribute 'test_basic'
However, If I remove the line from src.main import * and run the test using python -m test.test_basic , it works perfectly fine. Is there a problem with the imports ?
Upon running python -c "from src.main import *"
I noticed this error
File "<string>", line 1, in <module>
File "src/main.py", line 1, in <module>
from dotenv import load_dotenv
ImportError: No module named dotenv ```

How to read pytest cmd configs from a non-fixture function

conftest.py
def pytest_addoption(parser):
parser.addoption('--env', action='store', default='qa',
help='setup environment: development')
#pytest.fixture(scope="session", autouse=True)
def get_config(request):
environment = request.config.getoption("--env")
with open(environment, "r") as f:
config = json.load(f)
return config
lib.py
class lib1():
def cal_message():
# get something from config, do something and return it, but how to get the config here
test_config.py
import lib
def test_lib():
a = lib.lib1()
a.cal_message()
The question is how to get config from lib.py?
You can add a custom attribute to pytest and pass variables that way.
def pytest_addoption(parser):
parser.addoption('--env', action='store', default='qa',
help='setup environment: development')
#pytest.fixture(scope="session", autouse=True)
def get_config(request):
environment = request.config.getoption("--env")
pytest.custom_config = {"env":environment}
with open(environment, "r") as f:
config = json.load(f)
return config
And, get it in lib.py:
import pytest
class lib1():
def cal_message():
env = pytest.custom_config["env"]
A caveat: This will work only when lib.py is called from test module of a pytest test session. If you try using it in a standalone script, it won't work. e.g. python lib.py --env a will not work.

Import Pybind11/C++ compiled module not working

I'm fairly new to python setuptools and dist. I can't seem to get a c++ wrapper module to import so that I may use the functions.
The compiled .so file shows up in the installed-files.txt after pip install but yet importing the wrapper does not.
setup.py
import subprocess
import os
from pathlib import Path
from setuptools import setup, Extension, find_packages
from setuptools.command.build_ext import build_ext
class CMakeExtension(Extension):
def __init__(self, name):
Extension.__init__(self, name, sources=[])
class CMakeBuild(build_ext):
def run(self):
for ext in self.extensions:
self.build_cmake(ext)
super().run()
def build_cmake(self, ext):
try:
subprocess.check_output(["cmake", "--version"])
except OSError:
raise RuntimeError(
"CMake must be installed to build the following extensions: "
+ ", ".join(e.name for e in self.extensions)
)
cwd = Path().absolute()
# these dirs will be created in build_py, so if you don't have
# any python sources to bundle, the dirs will be missing
build_temp = Path(self.build_temp)
build_temp.mkdir(parents=True, exist_ok=True)
extdir = Path(self.get_ext_fullpath(ext.name))
extdir.mkdir(parents=True, exist_ok=True)
pyenv_root = os.environ.get("PYENV_ROOT")
cmake_args = [
f"-DCMAKE_LIBRARY_OUTPUT_DIRECTORY={extdir}",
"-DCMAKE_BUILD_TYPE=Release",
"-DTRANSIT_INCLUDE_TESTS:BOOL=OFF",
]
if pyenv_root is not None:
cmake_args += [f"-DPYTHON_EXECUTABLE={pyenv_root}/shims/python"]
build_args = ["--config", "Release", "--", "-j2"]
env = os.environ.copy()
self.announce("Running CMake prepare", level=3)
subprocess.check_call(["cmake", cwd] + cmake_args, cwd=build_temp, env=env)
self.announce("Building extensions")
cmake_cmd = ["cmake", "--build", "."] + build_args
subprocess.check_call(cmake_cmd, cwd=build_temp)
setup(
name="bgtfs_py_lib",
version="3.2.2",
long_description="",
zip_safe=False,
install_requires=[
"redis==2.10.6",
"cffi==1.11.5",
"numpy==1.15.3",
"patricia-trie==10",
"PuLP==1.6.8",
"py-lz4framed==0.13.0",
"pycparser==2.19",
"pyparsing==2.2.2",
"pypng==0.0.18",
"pyproj==1.9.5.1",
"python-graph-core==1.8.2",
"pytz==2018.6",
"redis==2.10.6",
"requests==2.21.0",
"six==1.11.0",
"tabulate==0.8.2",
"unicodecsv==0.14.1",
"Unidecode==1.0.22",
],
ext_modules=[CMakeExtension("bgtfs_py_lib.bgtfs_py_lib_wrapper")],
cmdclass=dict(build_ext=CMakeBuild),
packages=find_packages(exclude=["tests"]),
package_data={"": "*.so"},
)
CMakeLists.txt
cmake_minimum_required(VERSION 3.8)
project(bgtfs_py_lib_wrapper)
include(submodules/transitLib/bgtfs/bgtfsLib/TransitUtils/transit_shared.cmake)
# bgtfsPyLib
set(PYBIND11_CPP_STANDARD -std=c++14)
set(PYBIND11_PYTHON_VERSION 3.6)
add_subdirectory(submodules/transitLib transitLib)
add_subdirectory(pybind11)
include_directories(
cpp
submodules/transitLib/bgtfs/bgtfsLib/
submodules/transitLib/bgtfs/bgtfsLib/bgtfsLib
)
pybind11_add_module(bgtfs_py_lib_wrapper MODULE NO_EXTRAS
cpp/pybindCasts.cpp
cpp/bgtfsPyLibWrapper.cpp
cpp/BgtfsFeedHandler.cpp
)
target_link_libraries(bgtfs_py_lib_wrapper PRIVATE transitLib)
set_target_properties(bgtfs_py_lib_wrapper PROPERTIES
LIBRARY_OUTPUT_DIRECTORY ${CMAKE_CURRENT_LIST_DIR}/bgtfs_py_lib
LIBRARY_OUTPUT_DIRECTORY_DEBUG ${CMAKE_CURRENT_LIST_DIR}/bgtfs_py_lib
LIBRARY_OUTPUT_DIRECTORY_RELEASE ${CMAKE_CURRENT_LIST_DIR}/bgtfs_py_lib
)
target_compile_definitions(bgtfs_py_lib_wrapper PRIVATE TRANSIT_SERVER)
target_compile_definitions(transitLib PRIVATE TRANSIT_SERVER)
target_compile_definitions(bgtfsLib PRIVATE ENABLE_BACKWARD_FILE_COMPATIBILITY YES)
set_default_target_properties(bgtfs_py_lib_wrapper)
I am trying to use virtualenv in order to isolate the modules required to run in my project.
Here is the file struct:
.
|-- CMakeLists.txt
|-- README.md
|-- bgtfs_py_lib
| |-- __init__.py
| |-- bgtfs_handler
|-- cpp
| |-- BgtfsFeedHandler.cpp
| |-- BgtfsFeedHandler.h
| |-- bgtfsPyLibWrapper.cpp
| `-- pybindCasts.cpp
|-- deploy.sh
|-- make.sh
|-- pybind11
|-- setup.py
|-- submodules
|-- test.sh
`-- tests
|-- __init__.py
|-- __pycache__
|-- fixtures
|-- test.py
`-- test_functions.py
The init.py file in bgtfs_py_lib looks like this. The functions of the wrapper are being exposed.
import bgtfs_py_lib_wrapper as _bgtfs_py_lib
from bgtfs_py_lib.bgtfs_handler.bgtfs_handler import BgtfsHandler
In the other project it is being pip installed using git+ssh and egg .
git+ssh://git#github.com/path/to/project.git#build/production/setup#egg=bgtfs_py_lib
When I ctrl+space in pyCharm the wrapper module is found and the Classes are present.
The module is located in Binary Skeletons directory but yet
import bgtfs_py_lib_wrapper as _bgtfs_py_lib just simply does not work and the following exception is thrown: ModuleNotFoundError: No module named 'bgtfs_py_lib_wrapper'
Can someone please help me figure out how to properly build C++/Pybind11 modules and use them in a pip installed package with virtualenv?
I have finally solved it
Turns out the CMakeLists.txt needed to be changed because it there was inconsistencies between the cmake_args and the build_args. So the CMakeList.txt file now looks like this:
CMakeLists.txt
cmake_minimum_required(VERSION 3.8)
project(bgtfs_py_lib_wrapper)
include(submodules/transitLib/bgtfs/bgtfsLib/TransitUtils/transit_shared.cmake)
# bgtfsPyLib
set(PYBIND11_CPP_STANDARD -std=c++14)
set(PYBIND11_PYTHON_VERSION 3.6)
add_subdirectory(submodules/transitLib transitLib)
add_subdirectory(pybind11)
include_directories(
cpp
submodules/transitLib/bgtfs/bgtfsLib/
submodules/transitLib/bgtfs/bgtfsLib/bgtfsLib
)
pybind11_add_module(bgtfs_py_lib_wrapper MODULE NO_EXTRAS
cpp/pybindCasts.cpp
cpp/bgtfsPyLibWrapper.cpp
cpp/BgtfsFeedHandler.cpp
)
target_link_libraries(bgtfs_py_lib_wrapper PRIVATE transitLib)
target_compile_definitions(bgtfs_py_lib_wrapper PRIVATE TRANSIT_SERVER)
target_compile_definitions(transitLib PRIVATE TRANSIT_SERVER)
target_compile_definitions(bgtfsLib PRIVATE ENABLE_BACKWARD_FILE_COMPATIBILITY YES)
set_default_target_properties(bgtfs_py_lib_wrapper)
and the setup.py file is now:
setup.py
import subprocess
import os
from pathlib import Path
from setuptools import setup, Extension, find_packages
from setuptools.command.build_ext import build_ext
class CMakeExtension(Extension):
def __init__(self, name):
Extension.__init__(self, name, sources=[])
class CMakeBuild(build_ext):
def run(self):
for ext in self.extensions:
self.build_cmake(ext)
super().run()
def build_cmake(self, ext):
try:
subprocess.check_output(["cmake", "--version"])
except OSError:
raise RuntimeError(
"CMake must be installed to build the following extensions: "
+ ", ".join(e.name for e in self.extensions)
)
cwd = Path().absolute()
# these dirs will be created in build_py, so if you don't have
# any python sources to bundle, the dirs will be missing
if not os.path.exists(self.build_temp):
os.makedirs(self.build_temp)
extdir = os.path.abspath(os.path.dirname(self.get_ext_fullpath(ext.name)))
pyenv_root = os.environ.get("PYENV_ROOT")
cmake_args = [
f"-DCMAKE_LIBRARY_OUTPUT_DIRECTORY={extdir}",
"-DCMAKE_BUILD_TYPE=Release",
"-DTRANSIT_INCLUDE_TESTS:BOOL=OFF",
]
if pyenv_root is not None:
cmake_args += [f"-DPYTHON_EXECUTABLE={pyenv_root}/shims/python"]
build_args = ["--config", "Release", "--", "-j2"]
env = os.environ.copy()
self.announce("Running CMake prepare", level=3)
subprocess.check_call(["cmake", cwd] + cmake_args, cwd=self.build_temp, env=env)
self.announce("Building extensions")
cmake_cmd = ["cmake", "--build", "."] + build_args
subprocess.check_call(cmake_cmd, cwd=self.build_temp)
setup(
name="bgtfs_py_lib",
version="3.2.2",
author="Transit App",
author_email="juan#transitapp.com",
description="A python wrapper for the transitLib",
long_description="",
zip_safe=False,
license="Transit",
install_requires=[
"bgtfs_py_lib",
"redis==2.10.6",
"cffi==1.11.5",
"numpy==1.15.3",
"patricia-trie==10",
"PuLP==1.6.8",
"py-lz4framed==0.13.0",
"pycparser==2.19",
"pyparsing==2.2.2",
"pypng==0.0.18",
"pyproj==1.9.5.1",
"python-graph-core==1.8.2",
"pytz==2018.6",
"redis==2.10.6",
"requests==2.21.0",
"six==1.11.0",
"tabulate==0.8.2",
"unicodecsv==0.14.1",
"Unidecode==1.0.22",
],
ext_modules=[CMakeExtension("bgtfs_py_lib_wrapper")],
cmdclass=dict(build_ext=CMakeBuild),
packages=find_packages(exclude=["tests", "*.plist"]),
package_data={"": "*.so"},
)
The extension file was not to be outputted into the bgtfs_py_lib directory but rather the virtual environment instead and the project itself was to be required
Shout out to Sergei for the help figuring things out

How do I write code to avoid error when windows service read config file?

I have file tree:
f:/src/
restore.ini
config.py
log.py
service.py
test.py
the test.py code like this:
import service
import log
import config
class Test(object):
def __init__(self):
super(Test, self).__init__()
def setUp(self):
self.currentRound = int(config.read_config_co(r'restore.ini', 'Record')['currentRound'])
def testAction(self):
log.info(self.currentRound)
def tearDown(self):
config.write_config_update_co(self.currentRound-1, 'Record', 'currentRound', r'restore.ini')
class PerfServiceThread(service.NTServiceThread):
def run (self):
while self.notifyEvent.isSet():
try:
test = Test()
test.setUp()
test.testAction()
test.tearDown()
except:
import traceback
log.info(traceback.format_exc())
class PerfService(pywinservice.NTService):
_svc_name_ = 'myservice'
_svc_display_name_ = "My Service"
_svc_description_ = "This is what My Service does"
_svc_thread_class = PerfServiceThread
if __name__ == '__main__':
pywinservice.handleCommandLine(PerfService)
Now, I use cmdline python test.py install and python test.py start to action service, but error.
If I move all files in directory src to C:\Python27\Lib\site-packages\win32\src, and change code:
self.currentRound = int(config.read_config_co(r'src\restore.ini', 'Record')['currentRound'])
config.write_config_update_co(self.currentRound-1, 'Record', 'currentRound', r'src\restore.ini')
Now, it's OK!
I want not move directory src, how do I do?
Thanks!
if you use relative paths for file or directory names python will look for them (or create them) in your current working directory (the $PWD variable in bash; something similar on windows?).
if you want to have them relative to the current python file, you can use (python 3.4)
from pathlib import Path
HERE = Path(__file__).parent.resolve()
RESTORE_INI = HERE / 'restore.ini'
or (python 2.7)
import os.path
HERE = os.path.abspath(os.path.dirname(__file__))
RESTORE_INI = os.path.join(HERE, 'restore.ini')
if your restore.ini file lives in the same directory as your python script.
then you can use that in
def setUp(self):
self.currentRound = int(config.read_config_co(RESTORE_INI,
'Record')['currentRound'])

Categories

Resources