Python setuptools.Extension() Examples
The following are 30
code examples of setuptools.Extension().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
setuptools
, or try the search function
.
Example #1
Source File: test_build_ext.py From setuptools with MIT License | 6 votes |
def test_abi3_filename(self): """ Filename needs to be loadable by several versions of Python 3 if 'is_abi3' is truthy on Extension() """ print(get_abi3_suffix()) extension = Extension('spam.eggs', ['eggs.c'], py_limited_api=True) dist = Distribution(dict(ext_modules=[extension])) cmd = build_ext(dist) cmd.finalize_options() assert 'spam.eggs' in cmd.ext_map res = cmd.get_ext_filename('spam.eggs') if six.PY2 or not get_abi3_suffix(): assert res.endswith(get_config_var('EXT_SUFFIX')) elif sys.platform == 'win32': assert res.endswith('eggs.pyd') else: assert 'abi3' in res
Example #2
Source File: cpp_extension.py From dragon with BSD 2-Clause "Simplified" License | 6 votes |
def __new__(cls, name, sources, *args, **kwargs): include_dirs = kwargs.get('include_dirs', []) include_dirs += include_paths(cuda=True) kwargs['include_dirs'] = include_dirs library_dirs = kwargs.get('library_dirs', []) library_dirs += library_paths(cuda=True) kwargs['library_dirs'] = library_dirs libraries = kwargs.get('libraries', []) libraries.extend(COMMON_LINK_LIBRARIES + ['cudart', 'dragon']) kwargs['libraries'] = libraries define_macros = kwargs.get('define_marcos', []) define_macros.append(('USE_CUDA', None)) define_macros.append(('DRAGON_API=' + DLLIMPORT_STR, None)) kwargs['define_macros'] = define_macros kwargs['language'] = 'c++' return _Extension(name, sources, *args, **kwargs)
Example #3
Source File: setup.py From urh with GNU General Public License v3.0 | 6 votes |
def get_extensions(): filenames = [os.path.splitext(f)[0] for f in os.listdir("src/urh/cythonext") if f.endswith(".pyx")] extensions = [Extension("urh.cythonext." + f, ["src/urh/cythonext/" + f + ".pyx"], extra_compile_args=[OPEN_MP_FLAG], extra_link_args=[OPEN_MP_FLAG], language="c++") for f in filenames] ExtensionHelper.USE_RELATIVE_PATHS = True device_extensions, device_extras = ExtensionHelper.get_device_extensions_and_extras() extensions += device_extensions if NO_NUMPY_WARNINGS_FLAG: for extension in extensions: extension.extra_compile_args.append(NO_NUMPY_WARNINGS_FLAG) extensions = cythonize(extensions, compiler_directives=COMPILER_DIRECTIVES, compile_time_env=device_extras) return extensions
Example #4
Source File: setup.py From python-mbedtls with MIT License | 6 votes |
def extensions(coverage=False): for dirpath, dirnames, filenames in os.walk("src"): for fn in filenames: root, ext = os.path.splitext(fn) if ext != ".pyx": continue mod = ".".join(dirpath.split(os.sep)[1:] + [root]) extension = Extension( mod, [os.path.join(dirpath, fn)], library_dirs=os.environ.get("LIBRARY_PATH", "").split(":"), libraries=["mbedcrypto", "mbedtls", "mbedx509"], define_macros=[ ("CYTHON_TRACE", "1"), ("CYTHON_TRACE_NOGIL", "1"), ] if coverage else [], ) extension.cython_directives = {"language_level": "3str"} if coverage: extension.cython_directives["linetrace"] = True yield extension
Example #5
Source File: ExtensionHelper.py From urh with GNU General Public License v3.0 | 6 votes |
def get_device_extension(dev_name: str, libraries: list, library_dirs: list, include_dirs: list): try: language = DEVICES[dev_name]["language"] except KeyError: language = "c++" cur_dir = os.path.dirname(os.path.realpath(__file__)) if USE_RELATIVE_PATHS: # We need relative paths on windows cpp_file_path = "src/urh/dev/native/lib/{0}.pyx".format(dev_name) else: cpp_file_path = os.path.join(cur_dir, "lib", "{0}.pyx".format(dev_name)) return Extension("urh.dev.native.lib." + dev_name, [cpp_file_path], libraries=libraries, library_dirs=library_dirs, include_dirs=include_dirs, language=language)
Example #6
Source File: setup_utils.py From fluxclient with GNU Affero General Public License v3.0 | 6 votes |
def create_utils_extentions(): return [ Extension( 'fluxclient.toolpath._toolpath', sources=[ "src/toolpath/gcode_parser.cpp", "src/toolpath/gcode_writer.cpp", "src/toolpath/fcode_v1_writer.cpp", "src/toolpath/py_processor.cpp", "src/toolpath/_toolpath.pyx" ], language="c++", extra_compile_args=get_default_extra_compile_args(), include_dirs=[numpy.get_include()]), Extension( 'fluxclient.utils._utils', sources=[ "src/utils/utils_module.cpp", "src/utils/utils.pyx"], language="c++", extra_compile_args=get_default_extra_compile_args()) ]
Example #7
Source File: test_distribution.py From nimporter with MIT License | 6 votes |
def test_build_all_extensions(): "Make sure all extensions within a project are compiled correctly." extension_names = {'proj2.lib1', 'proj2.performance'} extensions = Nimporter.build_nim_extensions(Path('tests/proj2')) assert len(extensions) == 2 for ext in extensions: assert ext.name in extension_names assert isinstance(ext, Extension) includes = set(Path(i) for i in ext.include_dirs) for source in ext.sources: src = Path(source).absolute() assert src.suffix == '.c' # assert src.parent in includes
Example #8
Source File: setup.py From accelerator with Apache License 2.0 | 6 votes |
def method_mod(name): code = import_module('accelerator.standard_methods.' + name).c_module_code fn = 'accelerator/standard_methods/_generated_' + name + '.c' if exists(fn): with open(fn, 'r') as fh: old_code = fh.read() else: old_code = None if code != old_code: with open(fn, 'w') as fh: fh.write(code) return Extension( 'accelerator.standard_methods._' + name, sources=[fn], libraries=['z'], extra_compile_args=['-std=c99', '-O3'], )
Example #9
Source File: nimporter.py From nimporter with MIT License | 6 votes |
def get_import_prefix(module_path, root): """ Computes the proper name of a Nim module amid a given Python project. This method is needed because Nim Extensions do not automatically know where they are within a given Python namespace. This method is vital for recursing through an entire Python project to find every Nim Extension module and library while preserving the namespace containing each one. Args: module_path(Path): the module for which to determine its namespace. root(Path): the path to the Python project containing the Extension. Returns: A tuple of packages containing the given module. """ root_path = root.resolve() full_path = module_path.resolve() assert full_path >= root_path, 'Extension path is not within root dir.' return full_path.parts[len(root_path.parts):]
Example #10
Source File: setup.py From rpforest with Apache License 2.0 | 6 votes |
def _get_extension(extension, file_ext): kwargs = { "name": ".".join(extension), "sources": ["%s.%s" % ("/".join(extension), file_ext)], "language": "c++", "extra_compile_args": ["-ffast-math"], } if NUMPY_AVAILABLE: # most of the time it's fine if the include_dirs aren't there kwargs["include_dirs"] = [numpy.get_include()] else: color_red_bold = "\033[1;31m" color_reset = "\033[m" sys.stderr.write( "%sNumpy is not available so we cannot include the libraries\n" "It will result in compilation failures where the numpy headers " "are missing.\n%s" % (color_red_bold, color_reset), ) return Extension(**kwargs)
Example #11
Source File: test_distribution.py From nimporter with MIT License | 5 votes |
def test_build_extension_library(): "Make sure a Nim library compiles to C and an Extension object is built." library = Path('tests/proj1/proj1/lib1') ext = Nimporter._build_nim_extension(library, Path('tests/proj1')) assert isinstance(ext, Extension) assert ext.name == 'proj1.lib1' includes = set(Path(i) for i in ext.include_dirs) for source in ext.sources: src = Path(source).absolute() assert src.suffix == '.c' # assert src.parent in includes
Example #12
Source File: setup.py From Det3D with Apache License 2.0 | 5 votes |
def make_cython_ext(name, module, sources, extra_compile_args={}): if platform.system() != "Windows": extra_compile_args["cxx"] += ["-Wno-unused-function", "-Wno-write-strings"] extension = Extension( "{}.{}".format(module, name), [os.path.join(*module.split("."), p) for p in sources], include_dirs=[np.get_include()], language="c++", extra_compile_args=extra_compile_args, ) return extension
Example #13
Source File: setup.py From mmaction with Apache License 2.0 | 5 votes |
def customize_compiler_for_nvcc(self): """inject deep into distutils to customize how the dispatch to cc/nvcc works. If you subclass UnixCCompiler, it's not trivial to get your subclass injected in, and still have the right customizations (i.e. distutils.sysconfig.customize_compiler) run on it. So instead of going the OO route, I have this. Note, it's kindof like a wierd functional subclassing going on.""" # tell the compiler it can processes .cu self.src_extensions.append('.cu') # save references to the default compiler_so and _comple methods default_compiler_so = self.compiler_so super = self._compile # now redefine the _compile method. This gets executed for each # object but distutils doesn't have the ability to change compilers # based on source extension: we add it. def _compile(obj, src, ext, cc_args, extra_postargs, pp_opts): if osp.splitext(src)[1] == '.cu': # use the cuda for .cu files self.set_executable('compiler_so', 'nvcc') # use only a subset of the extra_postargs, which are 1-1 translated # from the extra_compile_args in the Extension class postargs = extra_postargs['nvcc'] else: postargs = extra_postargs['cc'] super(obj, src, ext, cc_args, postargs, pp_opts) # reset the default compiler_so, which we might have changed for cuda self.compiler_so = default_compiler_so # inject our redefined _compile method into the class self._compile = _compile
Example #14
Source File: setup.py From OctoPrint-GPX with GNU Affero General Public License v3.0 | 5 votes |
def params(): # Our metadata, as defined above name = plugin_name version = plugin_version cmdclass = versioneer.get_cmdclass() description = plugin_description author = plugin_author author_email = plugin_author_email url = plugin_url license = plugin_license # we only have our plugin package to install packages = [plugin_package] # we might have additional data files in sub folders that need to be installed too package_data = {plugin_package: package_data_dirs(plugin_package, ['static', 'templates', 'translations'] + plugin_additional_data)} include_package_data = True # If you have any package data that needs to be accessible on the file system, such as templates or static assets # this plugin is not zip_safe. zip_safe = False # Read the requirements from our requirements.txt file install_requires = requirements("requirements.txt") # Hook the plugin into the "octoprint.plugin" entry point, mapping the plugin_identifier to the plugin_package. # That way OctoPrint will be able to find the plugin and load it. entry_points = { "octoprint.plugin": ["%s = %s" % (plugin_identifier, plugin_package)] } ext_modules = [ setuptools.Extension('gcodex3g', sources = plugin_ext_sources, extra_compile_args = ['-DGPX_VERSION="\\"OctoPrint\\""', '-DSERIAL_SUPPORT', '-fvisibility=hidden', '-IGPX/build/src/shared', '-IGPX/src/shared', '-IGPX/src/gpx'], extra_link_args = ['-fvisibility=hidden']) ] return locals()
Example #15
Source File: setup.py From ibench with MIT License | 5 votes |
def build_native(): '''Return cythonized extensions for native benchmarks''' try: # use icc if it is available icc = subprocess.check_output('which icc',shell=True).decode('utf-8') except: icc = None extra_args = [] else: print('Using icc: %s' % icc) os.environ['CC'] = icc os.environ['CXX'] = os.environ['CC'] extra_args = ['-mkl'] if not 'CXX' in os.environ: print('icc not detected, and CXX is not set. Skipping building native benchmarks.') print('If you want to build native benchmarks, specify a compiler in the CXX ' 'environment variable.') return try: os.mkdir('pyx') except OSError: pass def make_bench(name): tpl_env = Environment(loader=FileSystemLoader('ibench/native')) with open('pyx/%s.pyx' % name,'w') as pyxf: pyxf.write(tpl_env.get_template('tpl.bench.pyx').render({'bench': name, 'Bench': name.capitalize()})) return Extension(name='ibench.native.%s' % name, extra_compile_args=extra_args, extra_link_args=extra_args, sources=['pyx/%s.pyx' % name]) return cythonize([make_bench(i) for i in ['det', 'dot', 'inv', 'lu', 'cholesky', 'qr']])
Example #16
Source File: setup.py From python-igraph with GNU General Public License v2.0 | 5 votes |
def configure(self, ext): """Configures the given Extension object using this build configuration.""" ext.include_dirs = exclude_from_list( self.include_dirs, self.excluded_include_dirs ) ext.library_dirs = exclude_from_list( self.library_dirs, self.excluded_library_dirs ) ext.runtime_library_dirs = self.runtime_library_dirs ext.libraries = self.libraries ext.extra_compile_args = self.extra_compile_args ext.extra_link_args = self.extra_link_args ext.extra_objects = self.extra_objects ext.define_macros = self.define_macros
Example #17
Source File: setup.py From hrnet with MIT License | 5 votes |
def customize_compiler_for_nvcc(self): """inject deep into distutils to customize how the dispatch to cc/nvcc works. If you subclass UnixCCompiler, it's not trivial to get your subclass injected in, and still have the right customizations (i.e. distutils.sysconfig.customize_compiler) run on it. So instead of going the OO route, I have this. Note, it's kindof like a wierd functional subclassing going on.""" # tell the compiler it can processes .cu self.src_extensions.append('.cu') # save references to the default compiler_so and _comple methods default_compiler_so = self.compiler_so super = self._compile # now redefine the _compile method. This gets executed for each # object but distutils doesn't have the ability to change compilers # based on source extension: we add it. def _compile(obj, src, ext, cc_args, extra_postargs, pp_opts): if osp.splitext(src)[1] == '.cu': # use the cuda for .cu files self.set_executable('compiler_so', 'nvcc') # use only a subset of the extra_postargs, which are 1-1 translated # from the extra_compile_args in the Extension class postargs = extra_postargs['nvcc'] else: postargs = extra_postargs['cc'] super(obj, src, ext, cc_args, postargs, pp_opts) # reset the default compiler_so, which we might have changed for cuda self.compiler_so = default_compiler_so # inject our redefined _compile method into the class self._compile = _compile
Example #18
Source File: setup.py From kaggle-imaterialist with MIT License | 5 votes |
def customize_compiler_for_nvcc(self): """inject deep into distutils to customize how the dispatch to cc/nvcc works. If you subclass UnixCCompiler, it's not trivial to get your subclass injected in, and still have the right customizations (i.e. distutils.sysconfig.customize_compiler) run on it. So instead of going the OO route, I have this. Note, it's kindof like a wierd functional subclassing going on.""" # tell the compiler it can processes .cu self.src_extensions.append('.cu') # save references to the default compiler_so and _comple methods default_compiler_so = self.compiler_so super = self._compile # now redefine the _compile method. This gets executed for each # object but distutils doesn't have the ability to change compilers # based on source extension: we add it. def _compile(obj, src, ext, cc_args, extra_postargs, pp_opts): if osp.splitext(src)[1] == '.cu': # use the cuda for .cu files self.set_executable('compiler_so', 'nvcc') # use only a subset of the extra_postargs, which are 1-1 translated # from the extra_compile_args in the Extension class postargs = extra_postargs['nvcc'] else: postargs = extra_postargs['cc'] super(obj, src, ext, cc_args, postargs, pp_opts) # reset the default compiler_so, which we might have changed for cuda self.compiler_so = default_compiler_so # inject our redefined _compile method into the class self._compile = _compile
Example #19
Source File: test_distribution.py From nimporter with MIT License | 5 votes |
def test_compile_switches(): "Make sure that an extension can still be compiled when using a switchfile." ext = NimCompiler.compile_nim_extension( Path('tests/lib2'), Path('tests'), library=True ) assert isinstance(ext, Extension) assert ext.name == 'lib2' includes = set(Path(i) for i in ext.include_dirs) for source in ext.sources: src = Path(source).absolute() assert src.suffix == '.c' # assert src.parent in includes
Example #20
Source File: setup.py From CenterNet with Apache License 2.0 | 5 votes |
def make_cython_ext(name, module, sources): extra_compile_args = None if platform.system() != 'Windows': extra_compile_args = { 'cxx': ['-Wno-unused-function', '-Wno-write-strings'] } extension = Extension( '{}.{}'.format(module, name), [os.path.join(*module.split('.'), p) for p in sources], include_dirs=[np.get_include()], language='c++', extra_compile_args=extra_compile_args) extension, = cythonize(extension) return extension
Example #21
Source File: nimporter.py From nimporter with MIT License | 5 votes |
def _find_extensions(cls, path, exclude_dirs=set()): """ Recurses through a given path to find all Nim modules or libraries. Args: path(Path): the path to begin recursing. exclude_dirs(list): the list of Paths to skip while searching. Returns: A list of Path objects. File Paths indicate a Nim Module. Folder Paths indicate Nim Libraries. """ exclude_dirs = {p.expanduser().absolute() for i in exclude_dirs} nim_exts = [] for item in path.iterdir(): if item.expanduser().absolute() in exclude_dirs: continue if item.is_dir() and list(item.glob('*.nimble')): # Treat directory as one single Extension (nimble_file,) = item.glob('*.nimble') nim_file = nimble_file.parent / (nimble_file.stem + '.nim') # NOTE(pebaz): Folder must contain Nim file of exact same name. if nim_file.exists(): nim_exts.append(item) elif item.is_dir(): # Treat item as directory nim_exts.extend( cls._find_extensions(item, exclude_dirs=exclude_dirs) ) elif item.suffix == '.nim': # Treat item as a Nim Extension. nim_exts.append(item) return nim_exts
Example #22
Source File: nimporter.py From nimporter with MIT License | 5 votes |
def get_switches(cls, switch_file, **global_scope): """ Convenience function to return the switches from a given switchfile. Works by exposing a global scope to the switch file and then evaluating it. The resulting variable: `__switches__` should have been defined within the script which should contain the keys: "import" and "bundle". The "import" key should be a list of CLI args that should be passed to the Nim compiler when importing the given Nim library. The "bundle" key should be a list of CLI args that should be passed to the Nim compiler when creating an Extension object from the C sources. When evaluated, the switchfile can make use of a few global variables that allow it to make certain decisions regarding the outcome of the compilation: * **MODULE_PATH**: the path to the actual Nim source file to compile * **BUILD_ARTIFACT**: can be used when building a module * **BUILD_DIR**: can be used when building a library * **IS_LIBRARY**: used to determine if a library/module is being built. The reason for the switchfile being a Python script is that different platforms will require different compilation switches. The switchfile author can make use of `sys.platform` to query platform information. Returns: A dictionary containing the keys: "import" and "bundle", signifying the CLI arguments used when importing and building an extension module respectively. """ global_scope = global_scope.copy() assert switch_file.exists(), ( 'Cannot open nonexistent switch file: ' + str(switch_file) ) exec(switch_file.read_text(), global_scope) return global_scope['__switches__']
Example #23
Source File: setup.py From deap with GNU Lesser General Public License v3.0 | 5 votes |
def run_setup(build_ext): extra_modules = None if build_ext: extra_modules = list() hv_module = Extension("deap.tools._hypervolume.hv", sources=["deap/tools/_hypervolume/_hv.c", "deap/tools/_hypervolume/hv.cpp"]) extra_modules.append(hv_module) setup(name='deap', version=deap.__revision__, description='Distributed Evolutionary Algorithms in Python', long_description=long_description, long_description_content_type="text/markdown", author='deap Development Team', author_email='deap-users@googlegroups.com', url='https://www.github.com/deap', packages=find_packages(exclude=['examples']), # packages=['deap', 'deap.tools', 'deap.tools._hypervolume', 'deap.benchmarks', 'deap.tests'], platforms=['any'], keywords=['evolutionary algorithms', 'genetic algorithms', 'genetic programming', 'cma-es', 'ga', 'gp', 'es', 'pso'], license='LGPL', classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'Intended Audience :: Education', 'Intended Audience :: Science/Research', 'License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)', 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'Topic :: Scientific/Engineering', 'Topic :: Software Development', ], ext_modules=extra_modules, cmdclass={"build_ext": ve_build_ext}, install_requires=['numpy'], use_2to3=True )
Example #24
Source File: setup.py From ttfnet with Apache License 2.0 | 5 votes |
def make_cython_ext(name, module, sources): extra_compile_args = None if platform.system() != 'Windows': extra_compile_args = { 'cxx': ['-Wno-unused-function', '-Wno-write-strings'] } extension = Extension( '{}.{}'.format(module, name), [os.path.join(*module.split('.'), p) for p in sources], include_dirs=[np.get_include()], language='c++', extra_compile_args=extra_compile_args) extension, = cythonize(extension) return extension
Example #25
Source File: setup.py From Feature-Selective-Anchor-Free-Module-for-Single-Shot-Object-Detection with Apache License 2.0 | 5 votes |
def make_cython_ext(name, module, sources): extra_compile_args = None if platform.system() != 'Windows': extra_compile_args = { 'cxx': ['-Wno-unused-function', '-Wno-write-strings'] } extension = Extension( '{}.{}'.format(module, name), [os.path.join(*module.split('.'), p) for p in sources], include_dirs=[np.get_include()], language='c++', extra_compile_args=extra_compile_args) extension, = cythonize(extension) return extension
Example #26
Source File: setup.py From Cascade-RPN with Apache License 2.0 | 5 votes |
def make_cython_ext(name, module, sources): extra_compile_args = None if platform.system() != 'Windows': extra_compile_args = { 'cxx': ['-Wno-unused-function', '-Wno-write-strings'] } extension = Extension( '{}.{}'.format(module, name), [os.path.join(*module.split('.'), p) for p in sources], include_dirs=[np.get_include()], language='c++', extra_compile_args=extra_compile_args) extension, = cythonize(extension) return extension
Example #27
Source File: setup.py From FoveaBox with Apache License 2.0 | 5 votes |
def make_cython_ext(name, module, sources): extra_compile_args = None if platform.system() != 'Windows': extra_compile_args = { 'cxx': ['-Wno-unused-function', '-Wno-write-strings'] } extension = Extension( '{}.{}'.format(module, name), [os.path.join(*module.split('.'), p) for p in sources], include_dirs=[np.get_include()], language='c++', extra_compile_args=extra_compile_args) extension, = cythonize(extension) return extension
Example #28
Source File: setup.py From pydpc with GNU Lesser General Public License v3.0 | 5 votes |
def extensions(): from numpy import get_include from Cython.Build import cythonize ext_core = Extension( "pydpc.core", sources=["ext/core.pyx", "ext/_core.c"], include_dirs=[get_include()], extra_compile_args=["-O3", "-std=c99"]) exts = [ext_core] return cythonize(exts)
Example #29
Source File: setup.py From Fast_Sentence_Embeddings with GNU General Public License v3.0 | 5 votes |
def run(self): try: build_ext.run(self) except Exception: e = sys.exc_info()[1] sys.stdout.write('%s\n' % str(e)) warnings.warn( self.warning_message + 'Extension modules' + 'There was an issue with your platform configuration - see above.')
Example #30
Source File: nimporter.py From nimporter with MIT License | 5 votes |
def get_nim_extensions(cls, root): """ Convenience function to look for previously compiled Nim Extensions. When extensions are created, they are stored in the `<root>/build/nim-extensions` directory. The reason this is necessary is because `setup.py` runs the `setup()` function twice: once to gather info and once to actually compile/bundle everything. On the first pass, the extensions are compiled to C. On the second pass, they are compiled to Python-compatible shared objects. Args: root(Path): the root of the project. Returns: A list of Extensions that were compiled on the library maintainer's computer. """ extension_dir = root / NimCompiler.EXT_DIR assert extension_dir.exists() # NOTE(pebaz): The include dir and C source file paths absolutely must # be relative paths or installing with Pip will not work on Windows. return [ Extension( name=extension.name, sources=[str(c) for c in extension.glob('*.c')], include_dirs=[str(extension)] ) for extension in extension_dir.iterdir() ]