I'm fairly new to python setuptools and dist. I can't seem to get a c++ wrapper module to import so that I may use the functions.
The compiled .so file shows up in the installed-files.txt after pip install but yet importing the wrapper does not.
setup.py
import subprocess
import os
from pathlib import Path
from setuptools import setup, Extension, find_packages
from setuptools.command.build_ext import build_ext
class CMakeExtension(Extension):
def __init__(self, name):
Extension.__init__(self, name, sources=[])
class CMakeBuild(build_ext):
def run(self):
for ext in self.extensions:
self.build_cmake(ext)
super().run()
def build_cmake(self, ext):
try:
subprocess.check_output(["cmake", "--version"])
except OSError:
raise RuntimeError(
"CMake must be installed to build the following extensions: "
+ ", ".join(e.name for e in self.extensions)
)
cwd = Path().absolute()
# these dirs will be created in build_py, so if you don't have
# any python sources to bundle, the dirs will be missing
build_temp = Path(self.build_temp)
build_temp.mkdir(parents=True, exist_ok=True)
extdir = Path(self.get_ext_fullpath(ext.name))
extdir.mkdir(parents=True, exist_ok=True)
pyenv_root = os.environ.get("PYENV_ROOT")
cmake_args = [
f"-DCMAKE_LIBRARY_OUTPUT_DIRECTORY={extdir}",
"-DCMAKE_BUILD_TYPE=Release",
"-DTRANSIT_INCLUDE_TESTS:BOOL=OFF",
]
if pyenv_root is not None:
cmake_args += [f"-DPYTHON_EXECUTABLE={pyenv_root}/shims/python"]
build_args = ["--config", "Release", "--", "-j2"]
env = os.environ.copy()
self.announce("Running CMake prepare", level=3)
subprocess.check_call(["cmake", cwd] + cmake_args, cwd=build_temp, env=env)
self.announce("Building extensions")
cmake_cmd = ["cmake", "--build", "."] + build_args
subprocess.check_call(cmake_cmd, cwd=build_temp)
setup(
name="bgtfs_py_lib",
version="3.2.2",
long_description="",
zip_safe=False,
install_requires=[
"redis==2.10.6",
"cffi==1.11.5",
"numpy==1.15.3",
"patricia-trie==10",
"PuLP==1.6.8",
"py-lz4framed==0.13.0",
"pycparser==2.19",
"pyparsing==2.2.2",
"pypng==0.0.18",
"pyproj==1.9.5.1",
"python-graph-core==1.8.2",
"pytz==2018.6",
"redis==2.10.6",
"requests==2.21.0",
"six==1.11.0",
"tabulate==0.8.2",
"unicodecsv==0.14.1",
"Unidecode==1.0.22",
],
ext_modules=[CMakeExtension("bgtfs_py_lib.bgtfs_py_lib_wrapper")],
cmdclass=dict(build_ext=CMakeBuild),
packages=find_packages(exclude=["tests"]),
package_data={"": "*.so"},
)
CMakeLists.txt
cmake_minimum_required(VERSION 3.8)
project(bgtfs_py_lib_wrapper)
include(submodules/transitLib/bgtfs/bgtfsLib/TransitUtils/transit_shared.cmake)
# bgtfsPyLib
set(PYBIND11_CPP_STANDARD -std=c++14)
set(PYBIND11_PYTHON_VERSION 3.6)
add_subdirectory(submodules/transitLib transitLib)
add_subdirectory(pybind11)
include_directories(
cpp
submodules/transitLib/bgtfs/bgtfsLib/
submodules/transitLib/bgtfs/bgtfsLib/bgtfsLib
)
pybind11_add_module(bgtfs_py_lib_wrapper MODULE NO_EXTRAS
cpp/pybindCasts.cpp
cpp/bgtfsPyLibWrapper.cpp
cpp/BgtfsFeedHandler.cpp
)
target_link_libraries(bgtfs_py_lib_wrapper PRIVATE transitLib)
set_target_properties(bgtfs_py_lib_wrapper PROPERTIES
LIBRARY_OUTPUT_DIRECTORY ${CMAKE_CURRENT_LIST_DIR}/bgtfs_py_lib
LIBRARY_OUTPUT_DIRECTORY_DEBUG ${CMAKE_CURRENT_LIST_DIR}/bgtfs_py_lib
LIBRARY_OUTPUT_DIRECTORY_RELEASE ${CMAKE_CURRENT_LIST_DIR}/bgtfs_py_lib
)
target_compile_definitions(bgtfs_py_lib_wrapper PRIVATE TRANSIT_SERVER)
target_compile_definitions(transitLib PRIVATE TRANSIT_SERVER)
target_compile_definitions(bgtfsLib PRIVATE ENABLE_BACKWARD_FILE_COMPATIBILITY YES)
set_default_target_properties(bgtfs_py_lib_wrapper)
I am trying to use virtualenv in order to isolate the modules required to run in my project.
Here is the file struct:
.
|-- CMakeLists.txt
|-- README.md
|-- bgtfs_py_lib
| |-- __init__.py
| |-- bgtfs_handler
|-- cpp
| |-- BgtfsFeedHandler.cpp
| |-- BgtfsFeedHandler.h
| |-- bgtfsPyLibWrapper.cpp
| `-- pybindCasts.cpp
|-- deploy.sh
|-- make.sh
|-- pybind11
|-- setup.py
|-- submodules
|-- test.sh
`-- tests
|-- __init__.py
|-- __pycache__
|-- fixtures
|-- test.py
`-- test_functions.py
The init.py file in bgtfs_py_lib looks like this. The functions of the wrapper are being exposed.
import bgtfs_py_lib_wrapper as _bgtfs_py_lib
from bgtfs_py_lib.bgtfs_handler.bgtfs_handler import BgtfsHandler
In the other project it is being pip installed using git+ssh and egg .
git+ssh://git#github.com/path/to/project.git#build/production/setup#egg=bgtfs_py_lib
When I ctrl+space in pyCharm the wrapper module is found and the Classes are present.
The module is located in Binary Skeletons directory but yet
import bgtfs_py_lib_wrapper as _bgtfs_py_lib just simply does not work and the following exception is thrown: ModuleNotFoundError: No module named 'bgtfs_py_lib_wrapper'
Can someone please help me figure out how to properly build C++/Pybind11 modules and use them in a pip installed package with virtualenv?
I have finally solved it
Turns out the CMakeLists.txt needed to be changed because it there was inconsistencies between the cmake_args and the build_args. So the CMakeList.txt file now looks like this:
CMakeLists.txt
cmake_minimum_required(VERSION 3.8)
project(bgtfs_py_lib_wrapper)
include(submodules/transitLib/bgtfs/bgtfsLib/TransitUtils/transit_shared.cmake)
# bgtfsPyLib
set(PYBIND11_CPP_STANDARD -std=c++14)
set(PYBIND11_PYTHON_VERSION 3.6)
add_subdirectory(submodules/transitLib transitLib)
add_subdirectory(pybind11)
include_directories(
cpp
submodules/transitLib/bgtfs/bgtfsLib/
submodules/transitLib/bgtfs/bgtfsLib/bgtfsLib
)
pybind11_add_module(bgtfs_py_lib_wrapper MODULE NO_EXTRAS
cpp/pybindCasts.cpp
cpp/bgtfsPyLibWrapper.cpp
cpp/BgtfsFeedHandler.cpp
)
target_link_libraries(bgtfs_py_lib_wrapper PRIVATE transitLib)
target_compile_definitions(bgtfs_py_lib_wrapper PRIVATE TRANSIT_SERVER)
target_compile_definitions(transitLib PRIVATE TRANSIT_SERVER)
target_compile_definitions(bgtfsLib PRIVATE ENABLE_BACKWARD_FILE_COMPATIBILITY YES)
set_default_target_properties(bgtfs_py_lib_wrapper)
and the setup.py file is now:
setup.py
import subprocess
import os
from pathlib import Path
from setuptools import setup, Extension, find_packages
from setuptools.command.build_ext import build_ext
class CMakeExtension(Extension):
def __init__(self, name):
Extension.__init__(self, name, sources=[])
class CMakeBuild(build_ext):
def run(self):
for ext in self.extensions:
self.build_cmake(ext)
super().run()
def build_cmake(self, ext):
try:
subprocess.check_output(["cmake", "--version"])
except OSError:
raise RuntimeError(
"CMake must be installed to build the following extensions: "
+ ", ".join(e.name for e in self.extensions)
)
cwd = Path().absolute()
# these dirs will be created in build_py, so if you don't have
# any python sources to bundle, the dirs will be missing
if not os.path.exists(self.build_temp):
os.makedirs(self.build_temp)
extdir = os.path.abspath(os.path.dirname(self.get_ext_fullpath(ext.name)))
pyenv_root = os.environ.get("PYENV_ROOT")
cmake_args = [
f"-DCMAKE_LIBRARY_OUTPUT_DIRECTORY={extdir}",
"-DCMAKE_BUILD_TYPE=Release",
"-DTRANSIT_INCLUDE_TESTS:BOOL=OFF",
]
if pyenv_root is not None:
cmake_args += [f"-DPYTHON_EXECUTABLE={pyenv_root}/shims/python"]
build_args = ["--config", "Release", "--", "-j2"]
env = os.environ.copy()
self.announce("Running CMake prepare", level=3)
subprocess.check_call(["cmake", cwd] + cmake_args, cwd=self.build_temp, env=env)
self.announce("Building extensions")
cmake_cmd = ["cmake", "--build", "."] + build_args
subprocess.check_call(cmake_cmd, cwd=self.build_temp)
setup(
name="bgtfs_py_lib",
version="3.2.2",
author="Transit App",
author_email="juan#transitapp.com",
description="A python wrapper for the transitLib",
long_description="",
zip_safe=False,
license="Transit",
install_requires=[
"bgtfs_py_lib",
"redis==2.10.6",
"cffi==1.11.5",
"numpy==1.15.3",
"patricia-trie==10",
"PuLP==1.6.8",
"py-lz4framed==0.13.0",
"pycparser==2.19",
"pyparsing==2.2.2",
"pypng==0.0.18",
"pyproj==1.9.5.1",
"python-graph-core==1.8.2",
"pytz==2018.6",
"redis==2.10.6",
"requests==2.21.0",
"six==1.11.0",
"tabulate==0.8.2",
"unicodecsv==0.14.1",
"Unidecode==1.0.22",
],
ext_modules=[CMakeExtension("bgtfs_py_lib_wrapper")],
cmdclass=dict(build_ext=CMakeBuild),
packages=find_packages(exclude=["tests", "*.plist"]),
package_data={"": "*.so"},
)
The extension file was not to be outputted into the bgtfs_py_lib directory but rather the virtual environment instead and the project itself was to be required
Shout out to Sergei for the help figuring things out
Related
I have a custom module that includes a setup.py file.
I run python setup.py build_ext --inplace in a Conda-built virtual environment on an M2 Mac.
I get the following runtime error during runtime (truncated):
ValueError: libs.portfolio_manager.lib.adjustment.Adjustment size changed, may indicate binary incompatibility. Expected 56 from C header, got 24 from PyObject
The directory structure is something like this:
/src
/libs
/portfolio_manager
/lib/
__init__.py
adjustment.pyx
adjustment.pxd
setup.py
Here are the relevant parts of the setup.py file:
import os
import versioneer
from setuptools import Extension, setup
class LazyBuildExtCommandClass(dict):
"""
Lazy command class that defers operations requiring Cython and numpy until
they've actually been downloaded and installed by setup_requires.
"""
def __contains__(self, key):
return key == "build_ext" or super(LazyBuildExtCommandClass, self).__contains__(
key
)
def __setitem__(self, key, value):
if key == "build_ext":
raise AssertionError("build_ext overridden!")
super(LazyBuildExtCommandClass, self).__setitem__(key, value)
def __getitem__(self, key):
if key != "build_ext":
return super(LazyBuildExtCommandClass, self).__getitem__(key)
import numpy
from Cython.Distutils import build_ext as cython_build_ext
# Cython_build_ext isn't a new-style class in Py2.
class build_ext(cython_build_ext, object):
"""
Custom build_ext command that lazily adds numpy's include_dir to
extensions.
"""
def build_extensions(self):
"""
Lazily append numpy's include directory to Extension includes.
This is done here rather than at module scope because setup.py
may be run before numpy has been installed, in which case
importing numpy and calling `numpy.get_include()` will fail.
"""
numpy_incl = numpy.get_include()
for ext in self.extensions:
ext.include_dirs.append(numpy_incl)
super(build_ext, self).build_extensions()
return build_ext
ext_modules = [
Extension(
name="src.libs.portfolio_manager.lib.adjustment",
sources=["src/libs/portfolio_manager/lib/adjustment.pyx"],
define_macros=[("NPY_NO_DEPRECATED_API", "NPY_1_7_API_VERSION")],
),
]
setup(
ext_modules=ext_modules,
cmdclass=LazyBuildExtCommandClass(versioneer.get_cmdclass()),
package_data={
root.replace(os.sep, "."): ["*.pyi", "*.pyx", "*.pxi", "*.pxd"]
for root, dirnames, filenames in os.walk("src/libs/portfolio_manager")
if "__pycache__" not in root
},
)
Everything I read about this error has to do with an unsuppressed warning in a past version of Numpy.
What is this error caused by and how do I fix it?
I am running into some problems with loading .env file contents in config file as app config attributes, I am using pydantic-0.7.1 with python 3.6.7 running the apps for validating type and existence of vars. project structure is laid out as follows
cat .env
APP_PROJECT_NAME_TOPICS='["streaming.topic"]'
cat __main__.py
from project_name_consumer.app import create_app, app_cli
if __name__ == '__main__':
app_cli(create_app)
cat __init__.py
__version__ = '1.0.0.0'
cat app.py
from typing import List
from collections import MutableMapping
from dotenv import load_dotenv
from os import getcwd, path
from time import sleep
from typing import Optional
from pydantic import BaseSettings
from argparse import ArgumentParser
class BaseConfig(BaseSettings):
def __init__(self, prefix='', **kwargs):
self.__config__.env_prefix = prefix
super().__init__(**kwargs)
def dict(self, prefix=''):
return {
key[len(prefix):]: value
for key, value
in self
if key.startswith(prefix)
}
class AppBaseConfig(BaseConfig):
name: str = 'app'
version: Optional[str] = None
#property
def app_name(self):
return self.name
#property
def app_version(self):
return self.version
class App(MutableMapping):
def __init__(self, config=AppBaseConfig, config_prefix='APP_'):
self._load_dotenv()
self.config = config(prefix=config_prefix)
self.state = {}
def __getitem__(self, key):
return self.state[key]
def __setitem__(self, key, value):
self.state[key] = value
def __delitem__(self, key):
del self.state[key]
def __iter__(self):
return iter(self.state)
def __len__(self):
return len(self.state)
def _load_dotenv(self):
cwd = getcwd()
filepath = path.join(cwd, '.env')
if path.exists(filepath):
print('Loading dotenv from %s', filepath)
load_dotenv(filepath)
def run(self, **kwargs):
run_app(self, **kwargs)
def run_app(app, loop=None, force_shutdown_delay=5.0):
while True:
sleep(1)
app.run()
class ProjectAppBaseConfig(AppBaseConfig):
project_url: str = ''
project_cert: str = None
class ProjectApp(App):
def __init__(self, config=ProjectAppBaseConfig):
super().__init__(config)
self.session = None
class Config(ProjectAppBaseConfig):
app_name: str = 'project-name-consumer'
project_name_topics: List[str]
def create_app():
app = ProjectApp(config=Config)
for uri in app.config.project_name_topics:
print(uri)
return app
def app_cli(app_factory):
parser = ArgumentParser()
parser.add_argument('-e', '--envfile', help='load dotenv file')
args = parser.parse_args()
if args.envfile:
load_dotenv(args.envfile)
if isinstance(app_factory, App):
app = app_factory
else:
app = app_factory()
app.run()
cat setup.py
from os import path, walk
import re
from setuptools import find_packages, setup, Extension
from setuptools.command.build_py import build_py as _build_py
from Cython.Build import cythonize
import sysconfig
def open_file(filepath, mode='r'):
here = path.abspath(path.dirname(__file__))
full_path = path.join(here, filepath)
return open(full_path, mode)
def find_version(package):
with open_file('{}/__init__.py'.format(package)) as f:
return re.findall(r"^__version__ = '([^']+)'\r?$", f.read(), re.M)[0]
def find_requires():
with open_file('requirements.txt') as f:
return [line.strip() for line in f]
NAME = 'minimal_example'
VERSION = find_version('project_name_consumer')
PACKAGES = find_packages()
REQUIRES = find_requires()
EXCLUDE_FILES = [
'project_name_consumer/__init__.py',
'project_name_consumer/__main__.py',
]
class build_py(_build_py):
def find_package_modules(self, package, package_dir):
ext_suffix = sysconfig.get_config_var('EXT_SUFFIX')
modules = super().find_package_modules(package, package_dir)
filtered_modules = []
for (pkg, mod, filepath) in modules:
if path.exists(filepath.replace('.py', ext_suffix)):
continue
filtered_modules.append((pkg, mod, filepath, ))
return filtered_modules
def get_ext_paths(root_dir, exclude_files):
"""get filepaths for compilation"""
paths = []
for root, dirs, files in walk(root_dir):
for filename in files:
if path.splitext(filename)[1] != '.py':
continue
file_path = path.join(root, filename)
if file_path in exclude_files:
continue
paths.append(file_path)
return paths
setup(
name=NAME,
version=VERSION,
packages=PACKAGES,
include_package_data=True,
zip_safe=False,
install_requires=REQUIRES,
ext_modules=cythonize(
get_ext_paths('project_name_consumer', EXCLUDE_FILES),
compiler_directives={'language_level': 3}
),
cmdclass={
'build_py': build_py
}
)
python traceback is as follows
$ python3 -m project -e .env
INFO:app:Loading dotenv from .env
...
AttributeError: 'Config' object has no attribute 'project_name_topics'
I am using automate.sh for redoing repeated efforts
cat automate.sh
START_TIME=$(date +%s)
deactivate
rm -rf build
rm -rf dist
rm -rf venv
python3 -m venv venv
source venv/bin/activate
pip install --upgrade pip
pip install Cython
pip install wheel
pip install -r requirements.txt
python setup.py build_ext --inplace
python setup.py bdist_wheel
pip install dist/minimal_example-1.0.0.0-cp36-cp36m-macosx_10_15_x86_64.whl
python -m project_name_consumer
END_TIME=$(date +%s)
echo "script took $(($END_TIME - $START_TIME)) seconds..."
cat requirements.txt
python-dotenv==0.8.2
pydantic==0.7.1
$ minimal_example tree
.
├── __init__.py
├── automate.sh
├── project_name_consumer
│ ├── __init__.py
│ ├── __main__.py
│ └── app.py
├── requirements.txt
├── setup.py
I have a simple package that includes a C extension. I'm managing my dependencies and build process with Poetry.
When I run poetry build, the extension is compiled and included in the .tar.gz archive but not in the .whl and I don't understand why. Pip installing from the tar.gz archive works as expected but since the wheel is lacking the .so, pip installing the wheel results in an unusable package.
I've lifted the build machinery from here: https://github.com/python-poetry/poetry/issues/2740
pyproject.toml
[tool.poetry]
name = "python_ctypes"
version = "0.1.0"
description = ""
authors = ["Me"]
include = [
{path = "_extensions/*.so", format = "wheel"}
]
[tool.poetry.dependencies]
python = "^3.9"
numpy = "^1.22.1"
[tool.poetry.dev-dependencies]
[tool.poetry.build]
generate-setup-file = false
script = "build.py"
[build-system]
requires = ["poetry-core>=1.0.0"]
build-backend = "poetry.core.masonry.api"
build.py
"""Poetry build script for python_ctypes"""
import os
import shutil
from distutils.command.build_ext import build_ext
from distutils.core import Distribution
from distutils.core import Extension
from distutils.errors import CCompilerError
from distutils.errors import DistutilsExecError
from distutils.errors import DistutilsPlatformError
extensions = [
Extension("python_ctypes._extensions.arraysum", ["python_ctypes/_extensions/arraysum.c"]),
]
class ExtBuilder(build_ext):
# This class allows C extension building to fail.
built_extensions = []
def run(self):
try:
build_ext.run(self)
except (DistutilsPlatformError, FileNotFoundError):
print("Unable to build the C extensions")
def build_extension(self, ext):
try:
build_ext.build_extension(self, ext)
except (CCompilerError, DistutilsExecError, DistutilsPlatformError, ValueError):
print('Unable to build the "{}" C extension, '
"python_ctypes will use the pure python version of the extension.".format(ext.name))
def build(setup_kwargs):
"""
This function is mandatory in order to build the extensions.
"""
print(setup_kwargs)
distribution = Distribution({"name": "python_ctypes", "ext_modules": extensions})
distribution.package_dir = "python_ctypes"
cmd = ExtBuilder(distribution)
cmd.ensure_finalized()
cmd.run()
# Copy built extensions back to the project
for output in cmd.get_outputs():
relative_extension = os.path.relpath(output, cmd.build_lib)
if not os.path.exists(output):
continue
shutil.copyfile(output, relative_extension)
mode = os.stat(relative_extension).st_mode
mode |= (mode & 0o444) >> 2
os.chmod(relative_extension, mode)
return setup_kwargs
if __name__ == "__main__":
build({})
I'm trying to incorporate a c++ extension as a submodule into an existing python library via cmake. Building the C++ extension works fine and importing it as a python module works, but not as the submodule of the header library.
I have the following directory structure:
frontend/
foo.py
bar.py
backend/
backend.cpp
The extension is bound to a python module via pybind:
PYBIND11_MODULE(backend, m)
{
m.doc() = "backend c++ implementation"; // optional module docstring
m.def("method", &method, "The method I want to call from python.");
}
In the CMakeLists.txt, the relevant line is:
pybind11_add_module(backend "frontend/backend/backend.cpp")
I've followed the instructions form here and here to write the setup.py script. I guess the most important lines look like this:
from setuptools import setup, Extension, find_packages
from setuptools.command.build_ext import build_ext
from setuptools.command.test import test as TestCommand
class CMakeExtension(Extension):
def __init__(self, name, sourcedir=".", sources=[]):
Extension.__init__(self, name, sources=[])
class CMakeBuild(build_ext):
def run(self):
build_directory = os.path.abspath(self.build_temp)
if not os.path.exists(self.build_temp):
os.makedirs(self.build_temp)
cmake_list_dir = os.path.abspath(os.path.dirname(__file__))
print("-" * 10, "Running CMake prepare", "-" * 40)
subprocess.check_call(
["cmake", cmake_list_dir], cwd=self.build_temp,
)
print("-" * 10, "Building extensions", "-" * 40)
cmake_cmd = ["cmake", "--build", "."] + self.build_args
subprocess.check_call(cmake_cmd, cwd=self.build_temp)
# Move from build temp to final position
for ext in self.extensions:
self.move_output(ext)
def move_output(self, ext):
build_temp = Path(self.build_temp).resolve()
dest_path = Path(self.get_ext_fullpath(ext.name)).resolve()
source_path = build_temp / self.get_ext_filename(ext.name)
dest_directory = dest_path.parents[0]
dest_directory.mkdir(parents=True, exist_ok=True)
self.copy_file(source_path, dest_path)
extensions = [CMakeExtension("backend")]
setup(
name="frontend",
packages=["frontend"],
ext_modules=extensions,
cmdclass=dict(build_ext=CMakeBuild),
)
But this does not make backend a submodule of frontend, but instead a module on its own. So this works:
from backend import method
But to avoid naming issues with other libraries, what I would like to have is this:
from frontend.backend import method
Changing the naming in the pybinding or in the extension call to extensions = [CMakeExtension("frontend.backend")] does unfortunately not resolve my problem, the setup does not find the backend.<platform>.so shared library then, because it looks for frontend/backend.<platform>.so, which does not exist. How could I resolve this issue?
I think I've resolved the issue with the following lines:
Change in the setup.py file:
ext_modules = [
Extension(
"frontend.backend", sources=["frontend/backend/backend.cpp"]
)
]
Change in the CMakeLists.txt file:
pybind11_add_module(backend "frontend/backend/backend.cpp")
set_target_properties( backend
PROPERTIES
ARCHIVE_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/frontend"
LIBRARY_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/frontend"
RUNTIME_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/frontend"
)
The shared library object backend.platform.so must be located in the frontend directory. Neither the pybind module name nor the sourcefile .cpp should contain any "." in the names, because the get_ext_fullpath() method from build_ext will split by dots. Only the frontend directory containts an init.py file.
i have flask application with buildout environment
./bin/pip show flask | grep Version
Version: 0.10.1
./bin/pip show flask-assets | grep Version
Version: 0.10
in src folder
src/setup.py have following strings
setup(
name = 'spf',
install_requires = [
'flask',
'flask-assets',
],
entry_points = {
'console_scripts': [
'spf_dev = spf.manage:dev', /* see manage.py dev function */
],
},
}
for generated bin/spf_dev i have
src/spf/manage.py with following code
from flask.ext import assets
from . import env
def init (app):
manager = script.Manager(app)
manager.add_command(
'assets',
assets.ManageAssets(app.assets),
)
return manager
def dev ():
init(env.dev.app).run()
for flask environment initialization i use
src/spf/env/dev.py
from spf import init
app = init({
'ASSETS_DIR': 'src/spf/static/assets',
'ASSETS_URL': '/assets/',
'SASS_STYLE': 'compressed',
'UGLIFYJS_EXTRA_ARGS': (
'-c',
'--screw-ie8',
),
})
and i implement init function for return wsgi app in
src/spf/init.py
import flask
from . import assets
def init (env_config=None):
app = flask.Flask(
'spf',
static_url_path='',
)
app.config.update(evn_config)
app.assets = assets.Assets(app)
return app
assets module with Bundle registration
src/spf/assets.py
from flask.ext.assets import (
Environment,
Bundle,
)
class Assets (Environment):
def __init__ (self, app):
super(Assets, self).__init__(app)
if 'ASSETS_DIR' in app.config:
self.directory = app.config['ASSETS_DIR']
if 'ASSETS_URL' in app.config:
self.url = app.config['ASSETS_URL']
if 'SASS_STYLE' in app.config:
self.config['sass_style'] = app.config['SASS_STYLE']
if 'UGLIFYJS_EXTRA_ARGS' in app.config:
self.config['UGLIFYJS_EXTRA_ARGS'] = \
app.config['UGLIFYJS_EXTRA_ARGS']
self.register('theme.css', Bundle(
'scss/theme.scss',
filters='scss',
output='theme.css',
))
self.append_path('src/assets')
src/assets/scss/theme.scss
#import 'btn-o';
src/assets/scss/_btn-o.scss exists, not empty and have 0777 access right
but when i run
buildout -c src/buildout.cfg
./bin/spf_dev assets -v build
i have error
Building bundle: theme.css
Failed, error was: sass: subprocess had error: stderr=Error: File to import not found or unreadable: ./btn-o. on line 1 of standard input
Use --trace for backtrace. , stdout=, returncode=65
i have read https://github.com/miracle2k/webassets/blob/master/src/webassets/filter/sass.py#L36
but i don't understand ;-(
Why sass don't use src/assets for resolve relative path in #import directive from stdin after i used self.append_path('src/assets') ?
You need to add a load path to your sass filter for it to work with 3.4.14
sass = get_filter('scss')
sass.load_paths = [os.path.join(app.static_folder, 'scss')]
Then, when you register your bundle:
self.register('theme.css', Bundle(
'scss/theme.scss',
filters=(sass,),
output='theme.css',
))
the problem was in SASS 3.4.14
workaround is
sudo gem uninstall sass
sudo gem install sass -v 3.4.13