File manager - Edit - /home/newsbmcs.com/public_html/static/img/logo/setuptools.tar
Back
depends.py 0000644 00000013515 15030122015 0006532 0 ustar 00 from __future__ import annotations import contextlib import dis import marshal import sys from types import CodeType from typing import Any, Literal, TypeVar from packaging.version import Version from . import _imp from ._imp import PY_COMPILED, PY_FROZEN, PY_SOURCE, find_module _T = TypeVar("_T") __all__ = ['Require', 'find_module'] class Require: """A prerequisite to building or installing a distribution""" def __init__( self, name, requested_version, module, homepage: str = '', attribute=None, format=None, ) -> None: if format is None and requested_version is not None: format = Version if format is not None: requested_version = format(requested_version) if attribute is None: attribute = '__version__' self.__dict__.update(locals()) del self.self def full_name(self): """Return full package/distribution name, w/version""" if self.requested_version is not None: return f'{self.name}-{self.requested_version}' return self.name def version_ok(self, version): """Is 'version' sufficiently up-to-date?""" return ( self.attribute is None or self.format is None or str(version) != "unknown" and self.format(version) >= self.requested_version ) def get_version( self, paths=None, default: _T | Literal["unknown"] = "unknown" ) -> _T | Literal["unknown"] | None | Any: """Get version number of installed module, 'None', or 'default' Search 'paths' for module. If not found, return 'None'. If found, return the extracted version attribute, or 'default' if no version attribute was specified, or the value cannot be determined without importing the module. The version is formatted according to the requirement's version format (if any), unless it is 'None' or the supplied 'default'. """ if self.attribute is None: try: f, _p, _i = find_module(self.module, paths) except ImportError: return None if f: f.close() return default v = get_module_constant(self.module, self.attribute, default, paths) if v is not None and v is not default and self.format is not None: return self.format(v) return v def is_present(self, paths=None): """Return true if dependency is present on 'paths'""" return self.get_version(paths) is not None def is_current(self, paths=None): """Return true if dependency is present and up-to-date on 'paths'""" version = self.get_version(paths) if version is None: return False return self.version_ok(str(version)) def maybe_close(f): @contextlib.contextmanager def empty(): yield return if not f: return empty() return contextlib.closing(f) # Some objects are not available on some platforms. # XXX it'd be better to test assertions about bytecode instead. if not sys.platform.startswith('java') and sys.platform != 'cli': def get_module_constant( module, symbol, default: _T | int = -1, paths=None ) -> _T | int | None | Any: """Find 'module' by searching 'paths', and extract 'symbol' Return 'None' if 'module' does not exist on 'paths', or it does not define 'symbol'. If the module defines 'symbol' as a constant, return the constant. Otherwise, return 'default'.""" try: f, path, (_suffix, _mode, kind) = info = find_module(module, paths) except ImportError: # Module doesn't exist return None with maybe_close(f): if kind == PY_COMPILED: f.read(8) # skip magic & date code = marshal.load(f) elif kind == PY_FROZEN: code = _imp.get_frozen_object(module, paths) elif kind == PY_SOURCE: code = compile(f.read(), path, 'exec') else: # Not something we can parse; we'll have to import it. :( imported = _imp.get_module(module, paths, info) return getattr(imported, symbol, None) return extract_constant(code, symbol, default) def extract_constant( code: CodeType, symbol: str, default: _T | int = -1 ) -> _T | int | None | Any: """Extract the constant value of 'symbol' from 'code' If the name 'symbol' is bound to a constant value by the Python code object 'code', return that value. If 'symbol' is bound to an expression, return 'default'. Otherwise, return 'None'. Return value is based on the first assignment to 'symbol'. 'symbol' must be a global, or at least a non-"fast" local in the code block. That is, only 'STORE_NAME' and 'STORE_GLOBAL' opcodes are checked, and 'symbol' must be present in 'code.co_names'. """ if symbol not in code.co_names: # name's not there, can't possibly be an assignment return None name_idx = list(code.co_names).index(symbol) STORE_NAME = dis.opmap['STORE_NAME'] STORE_GLOBAL = dis.opmap['STORE_GLOBAL'] LOAD_CONST = dis.opmap['LOAD_CONST'] const = default for byte_code in dis.Bytecode(code): op = byte_code.opcode arg = byte_code.arg if op == LOAD_CONST: assert arg is not None const = code.co_consts[arg] elif arg == name_idx and (op == STORE_NAME or op == STORE_GLOBAL): return const else: const = default return None __all__ += ['get_module_constant', 'extract_constant'] tests/test_bdist_egg.py 0000644 00000003645 15030122015 0011243 0 ustar 00 """develop tests""" import os import re import zipfile import pytest from setuptools.dist import Distribution from . import contexts SETUP_PY = """\ from setuptools import setup setup(py_modules=['hi']) """ @pytest.fixture def setup_context(tmpdir): with (tmpdir / 'setup.py').open('w') as f: f.write(SETUP_PY) with (tmpdir / 'hi.py').open('w') as f: f.write('1\n') with tmpdir.as_cwd(): yield tmpdir class Test: @pytest.mark.usefixtures("user_override") @pytest.mark.usefixtures("setup_context") def test_bdist_egg(self): dist = Distribution( dict( script_name='setup.py', script_args=['bdist_egg'], name='foo', py_modules=['hi'], ) ) os.makedirs(os.path.join('build', 'src')) with contexts.quiet(): dist.parse_command_line() dist.run_commands() # let's see if we got our egg link at the right place [content] = os.listdir('dist') assert re.match(r'foo-0.0.0-py[23].\d+.egg$', content) @pytest.mark.xfail( os.environ.get('PYTHONDONTWRITEBYTECODE', False), reason="Byte code disabled", ) @pytest.mark.usefixtures("user_override") @pytest.mark.usefixtures("setup_context") def test_exclude_source_files(self): dist = Distribution( dict( script_name='setup.py', script_args=['bdist_egg', '--exclude-source-files'], py_modules=['hi'], ) ) with contexts.quiet(): dist.parse_command_line() dist.run_commands() [dist_name] = os.listdir('dist') dist_filename = os.path.join('dist', dist_name) zip = zipfile.ZipFile(dist_filename) names = list(zi.filename for zi in zip.filelist) assert 'hi.pyc' in names assert 'hi.py' not in names tests/text.py 0000644 00000000173 15030122015 0007232 0 ustar 00 class Filenames: unicode = 'smörbröd.py' latin_1 = unicode.encode('latin-1') utf_8 = unicode.encode('utf-8') tests/test_find_packages.py 0000644 00000017213 15030122015 0012066 0 ustar 00 """Tests for automatic package discovery""" import os import shutil import tempfile import pytest from setuptools import find_namespace_packages, find_packages from setuptools.discovery import FlatLayoutPackageFinder from .compat.py39 import os_helper class TestFindPackages: def setup_method(self, method): self.dist_dir = tempfile.mkdtemp() self._make_pkg_structure() def teardown_method(self, method): shutil.rmtree(self.dist_dir) def _make_pkg_structure(self): """Make basic package structure. dist/ docs/ conf.py pkg/ __pycache__/ nspkg/ mod.py subpkg/ assets/ asset __init__.py setup.py """ self.docs_dir = self._mkdir('docs', self.dist_dir) self._touch('conf.py', self.docs_dir) self.pkg_dir = self._mkdir('pkg', self.dist_dir) self._mkdir('__pycache__', self.pkg_dir) self.ns_pkg_dir = self._mkdir('nspkg', self.pkg_dir) self._touch('mod.py', self.ns_pkg_dir) self.sub_pkg_dir = self._mkdir('subpkg', self.pkg_dir) self.asset_dir = self._mkdir('assets', self.sub_pkg_dir) self._touch('asset', self.asset_dir) self._touch('__init__.py', self.sub_pkg_dir) self._touch('setup.py', self.dist_dir) def _mkdir(self, path, parent_dir=None): if parent_dir: path = os.path.join(parent_dir, path) os.mkdir(path) return path def _touch(self, path, dir_=None): if dir_: path = os.path.join(dir_, path) open(path, 'wb').close() return path def test_regular_package(self): self._touch('__init__.py', self.pkg_dir) packages = find_packages(self.dist_dir) assert packages == ['pkg', 'pkg.subpkg'] def test_exclude(self): self._touch('__init__.py', self.pkg_dir) packages = find_packages(self.dist_dir, exclude=('pkg.*',)) assert packages == ['pkg'] def test_exclude_recursive(self): """ Excluding a parent package should not exclude child packages as well. """ self._touch('__init__.py', self.pkg_dir) self._touch('__init__.py', self.sub_pkg_dir) packages = find_packages(self.dist_dir, exclude=('pkg',)) assert packages == ['pkg.subpkg'] def test_include_excludes_other(self): """ If include is specified, other packages should be excluded. """ self._touch('__init__.py', self.pkg_dir) alt_dir = self._mkdir('other_pkg', self.dist_dir) self._touch('__init__.py', alt_dir) packages = find_packages(self.dist_dir, include=['other_pkg']) assert packages == ['other_pkg'] def test_dir_with_dot_is_skipped(self): shutil.rmtree(os.path.join(self.dist_dir, 'pkg/subpkg/assets')) data_dir = self._mkdir('some.data', self.pkg_dir) self._touch('__init__.py', data_dir) self._touch('file.dat', data_dir) packages = find_packages(self.dist_dir) assert 'pkg.some.data' not in packages def test_dir_with_packages_in_subdir_is_excluded(self): """ Ensure that a package in a non-package such as build/pkg/__init__.py is excluded. """ build_dir = self._mkdir('build', self.dist_dir) build_pkg_dir = self._mkdir('pkg', build_dir) self._touch('__init__.py', build_pkg_dir) packages = find_packages(self.dist_dir) assert 'build.pkg' not in packages @pytest.mark.skipif(not os_helper.can_symlink(), reason='Symlink support required') def test_symlinked_packages_are_included(self): """ A symbolically-linked directory should be treated like any other directory when matched as a package. Create a link from lpkg -> pkg. """ self._touch('__init__.py', self.pkg_dir) linked_pkg = os.path.join(self.dist_dir, 'lpkg') os.symlink('pkg', linked_pkg) assert os.path.isdir(linked_pkg) packages = find_packages(self.dist_dir) assert 'lpkg' in packages def _assert_packages(self, actual, expected): assert set(actual) == set(expected) def test_pep420_ns_package(self): packages = find_namespace_packages( self.dist_dir, include=['pkg*'], exclude=['pkg.subpkg.assets'] ) self._assert_packages(packages, ['pkg', 'pkg.nspkg', 'pkg.subpkg']) def test_pep420_ns_package_no_includes(self): packages = find_namespace_packages(self.dist_dir, exclude=['pkg.subpkg.assets']) self._assert_packages(packages, ['docs', 'pkg', 'pkg.nspkg', 'pkg.subpkg']) def test_pep420_ns_package_no_includes_or_excludes(self): packages = find_namespace_packages(self.dist_dir) expected = ['docs', 'pkg', 'pkg.nspkg', 'pkg.subpkg', 'pkg.subpkg.assets'] self._assert_packages(packages, expected) def test_regular_package_with_nested_pep420_ns_packages(self): self._touch('__init__.py', self.pkg_dir) packages = find_namespace_packages( self.dist_dir, exclude=['docs', 'pkg.subpkg.assets'] ) self._assert_packages(packages, ['pkg', 'pkg.nspkg', 'pkg.subpkg']) def test_pep420_ns_package_no_non_package_dirs(self): shutil.rmtree(self.docs_dir) shutil.rmtree(os.path.join(self.dist_dir, 'pkg/subpkg/assets')) packages = find_namespace_packages(self.dist_dir) self._assert_packages(packages, ['pkg', 'pkg.nspkg', 'pkg.subpkg']) class TestFlatLayoutPackageFinder: EXAMPLES = { "hidden-folders": ( [".pkg/__init__.py", "pkg/__init__.py", "pkg/nested/file.txt"], ["pkg", "pkg.nested"], ), "private-packages": ( ["_pkg/__init__.py", "pkg/_private/__init__.py"], ["pkg", "pkg._private"], ), "invalid-name": ( ["invalid-pkg/__init__.py", "other.pkg/__init__.py", "yet,another/file.py"], [], ), "docs": (["pkg/__init__.py", "docs/conf.py", "docs/readme.rst"], ["pkg"]), "tests": ( ["pkg/__init__.py", "tests/test_pkg.py", "tests/__init__.py"], ["pkg"], ), "examples": ( [ "pkg/__init__.py", "examples/__init__.py", "examples/file.py", "example/other_file.py", # Sub-packages should always be fine "pkg/example/__init__.py", "pkg/examples/__init__.py", ], ["pkg", "pkg.examples", "pkg.example"], ), "tool-specific": ( [ "htmlcov/index.html", "pkg/__init__.py", "tasks/__init__.py", "tasks/subpackage/__init__.py", "fabfile/__init__.py", "fabfile/subpackage/__init__.py", # Sub-packages should always be fine "pkg/tasks/__init__.py", "pkg/fabfile/__init__.py", ], ["pkg", "pkg.tasks", "pkg.fabfile"], ), } @pytest.mark.parametrize("example", EXAMPLES.keys()) def test_unwanted_directories_not_included(self, tmp_path, example): files, expected_packages = self.EXAMPLES[example] ensure_files(tmp_path, files) found_packages = FlatLayoutPackageFinder.find(str(tmp_path)) assert set(found_packages) == set(expected_packages) def ensure_files(root_path, files): for file in files: path = root_path / file path.parent.mkdir(parents=True, exist_ok=True) path.touch() tests/test_build_py.py 0000644 00000033552 15030122015 0011123 0 ustar 00 import os import shutil import stat import warnings from pathlib import Path from unittest.mock import Mock import jaraco.path import pytest from setuptools import SetuptoolsDeprecationWarning from setuptools.dist import Distribution from .textwrap import DALS def test_directories_in_package_data_glob(tmpdir_cwd): """ Directories matching the glob in package_data should not be included in the package data. Regression test for #261. """ dist = Distribution( dict( script_name='setup.py', script_args=['build_py'], packages=[''], package_data={'': ['path/*']}, ) ) os.makedirs('path/subpath') dist.parse_command_line() dist.run_commands() def test_recursive_in_package_data_glob(tmpdir_cwd): """ Files matching recursive globs (**) in package_data should be included in the package data. #1806 """ dist = Distribution( dict( script_name='setup.py', script_args=['build_py'], packages=[''], package_data={'': ['path/**/data']}, ) ) os.makedirs('path/subpath/subsubpath') open('path/subpath/subsubpath/data', 'wb').close() dist.parse_command_line() dist.run_commands() assert stat.S_ISREG(os.stat('build/lib/path/subpath/subsubpath/data').st_mode), ( "File is not included" ) def test_read_only(tmpdir_cwd): """ Ensure read-only flag is not preserved in copy for package modules and package data, as that causes problems with deleting read-only files on Windows. #1451 """ dist = Distribution( dict( script_name='setup.py', script_args=['build_py'], packages=['pkg'], package_data={'pkg': ['data.dat']}, ) ) os.makedirs('pkg') open('pkg/__init__.py', 'wb').close() open('pkg/data.dat', 'wb').close() os.chmod('pkg/__init__.py', stat.S_IREAD) os.chmod('pkg/data.dat', stat.S_IREAD) dist.parse_command_line() dist.run_commands() shutil.rmtree('build') @pytest.mark.xfail( 'platform.system() == "Windows"', reason="On Windows, files do not have executable bits", raises=AssertionError, strict=True, ) def test_executable_data(tmpdir_cwd): """ Ensure executable bit is preserved in copy for package data, as users rely on it for scripts. #2041 """ dist = Distribution( dict( script_name='setup.py', script_args=['build_py'], packages=['pkg'], package_data={'pkg': ['run-me']}, ) ) os.makedirs('pkg') open('pkg/__init__.py', 'wb').close() open('pkg/run-me', 'wb').close() os.chmod('pkg/run-me', 0o700) dist.parse_command_line() dist.run_commands() assert os.stat('build/lib/pkg/run-me').st_mode & stat.S_IEXEC, ( "Script is not executable" ) EXAMPLE_WITH_MANIFEST = { "setup.cfg": DALS( """ [metadata] name = mypkg version = 42 [options] include_package_data = True packages = find: [options.packages.find] exclude = *.tests* """ ), "mypkg": { "__init__.py": "", "resource_file.txt": "", "tests": { "__init__.py": "", "test_mypkg.py": "", "test_file.txt": "", }, }, "MANIFEST.in": DALS( """ global-include *.py *.txt global-exclude *.py[cod] prune dist prune build prune *.egg-info """ ), } def test_excluded_subpackages(tmpdir_cwd): jaraco.path.build(EXAMPLE_WITH_MANIFEST) dist = Distribution({"script_name": "%PEP 517%"}) dist.parse_config_files() build_py = dist.get_command_obj("build_py") msg = r"Python recognizes 'mypkg\.tests' as an importable package" with pytest.warns(SetuptoolsDeprecationWarning, match=msg): # TODO: To fix #3260 we need some transition period to deprecate the # existing behavior of `include_package_data`. After the transition, we # should remove the warning and fix the behavior. if os.getenv("SETUPTOOLS_USE_DISTUTILS") == "stdlib": # pytest.warns reset the warning filter temporarily # https://github.com/pytest-dev/pytest/issues/4011#issuecomment-423494810 warnings.filterwarnings( "ignore", "'encoding' argument not specified", module="distutils.text_file", # This warning is already fixed in pypa/distutils but not in stdlib ) build_py.finalize_options() build_py.run() build_dir = Path(dist.get_command_obj("build_py").build_lib) assert (build_dir / "mypkg/__init__.py").exists() assert (build_dir / "mypkg/resource_file.txt").exists() # Setuptools is configured to ignore `mypkg.tests`, therefore the following # files/dirs should not be included in the distribution. for f in [ "mypkg/tests/__init__.py", "mypkg/tests/test_mypkg.py", "mypkg/tests/test_file.txt", "mypkg/tests", ]: with pytest.raises(AssertionError): # TODO: Enforce the following assertion once #3260 is fixed # (remove context manager and the following xfail). assert not (build_dir / f).exists() pytest.xfail("#3260") @pytest.mark.filterwarnings("ignore::setuptools.SetuptoolsDeprecationWarning") def test_existing_egg_info(tmpdir_cwd, monkeypatch): """When provided with the ``existing_egg_info_dir`` attribute, build_py should not attempt to run egg_info again. """ # == Pre-condition == # Generate an egg-info dir jaraco.path.build(EXAMPLE_WITH_MANIFEST) dist = Distribution({"script_name": "%PEP 517%"}) dist.parse_config_files() assert dist.include_package_data egg_info = dist.get_command_obj("egg_info") dist.run_command("egg_info") egg_info_dir = next(Path(egg_info.egg_base).glob("*.egg-info")) assert egg_info_dir.is_dir() # == Setup == build_py = dist.get_command_obj("build_py") build_py.finalize_options() egg_info = dist.get_command_obj("egg_info") egg_info_run = Mock(side_effect=egg_info.run) monkeypatch.setattr(egg_info, "run", egg_info_run) # == Remove caches == # egg_info is called when build_py looks for data_files, which gets cached. # We need to ensure it is not cached yet, otherwise it may impact on the tests build_py.__dict__.pop('data_files', None) dist.reinitialize_command(egg_info) # == Sanity check == # Ensure that if existing_egg_info is not given, build_py attempts to run egg_info build_py.existing_egg_info_dir = None build_py.run() egg_info_run.assert_called() # == Remove caches == egg_info_run.reset_mock() build_py.__dict__.pop('data_files', None) dist.reinitialize_command(egg_info) # == Actual test == # Ensure that if existing_egg_info_dir is given, egg_info doesn't run build_py.existing_egg_info_dir = egg_info_dir build_py.run() egg_info_run.assert_not_called() assert build_py.data_files # Make sure the list of outputs is actually OK outputs = map(lambda x: x.replace(os.sep, "/"), build_py.get_outputs()) assert outputs example = str(Path(build_py.build_lib, "mypkg/__init__.py")).replace(os.sep, "/") assert example in outputs EXAMPLE_ARBITRARY_MAPPING = { "pyproject.toml": DALS( """ [project] name = "mypkg" version = "42" [tool.setuptools] packages = ["mypkg", "mypkg.sub1", "mypkg.sub2", "mypkg.sub2.nested"] [tool.setuptools.package-dir] "" = "src" "mypkg.sub2" = "src/mypkg/_sub2" "mypkg.sub2.nested" = "other" """ ), "src": { "mypkg": { "__init__.py": "", "resource_file.txt": "", "sub1": { "__init__.py": "", "mod1.py": "", }, "_sub2": { "mod2.py": "", }, }, }, "other": { "__init__.py": "", "mod3.py": "", }, "MANIFEST.in": DALS( """ global-include *.py *.txt global-exclude *.py[cod] """ ), } def test_get_outputs(tmpdir_cwd): jaraco.path.build(EXAMPLE_ARBITRARY_MAPPING) dist = Distribution({"script_name": "%test%"}) dist.parse_config_files() build_py = dist.get_command_obj("build_py") build_py.editable_mode = True build_py.ensure_finalized() build_lib = build_py.build_lib.replace(os.sep, "/") outputs = {x.replace(os.sep, "/") for x in build_py.get_outputs()} assert outputs == { f"{build_lib}/mypkg/__init__.py", f"{build_lib}/mypkg/resource_file.txt", f"{build_lib}/mypkg/sub1/__init__.py", f"{build_lib}/mypkg/sub1/mod1.py", f"{build_lib}/mypkg/sub2/mod2.py", f"{build_lib}/mypkg/sub2/nested/__init__.py", f"{build_lib}/mypkg/sub2/nested/mod3.py", } mapping = { k.replace(os.sep, "/"): v.replace(os.sep, "/") for k, v in build_py.get_output_mapping().items() } assert mapping == { f"{build_lib}/mypkg/__init__.py": "src/mypkg/__init__.py", f"{build_lib}/mypkg/resource_file.txt": "src/mypkg/resource_file.txt", f"{build_lib}/mypkg/sub1/__init__.py": "src/mypkg/sub1/__init__.py", f"{build_lib}/mypkg/sub1/mod1.py": "src/mypkg/sub1/mod1.py", f"{build_lib}/mypkg/sub2/mod2.py": "src/mypkg/_sub2/mod2.py", f"{build_lib}/mypkg/sub2/nested/__init__.py": "other/__init__.py", f"{build_lib}/mypkg/sub2/nested/mod3.py": "other/mod3.py", } class TestTypeInfoFiles: PYPROJECTS = { "default_pyproject": DALS( """ [project] name = "foo" version = "1" """ ), "dont_include_package_data": DALS( """ [project] name = "foo" version = "1" [tool.setuptools] include-package-data = false """ ), "exclude_type_info": DALS( """ [project] name = "foo" version = "1" [tool.setuptools] include-package-data = false [tool.setuptools.exclude-package-data] "*" = ["py.typed", "*.pyi"] """ ), } EXAMPLES = { "simple_namespace": { "directory_structure": { "foo": { "bar.pyi": "", "py.typed": "", "__init__.py": "", } }, "expected_type_files": {"foo/bar.pyi", "foo/py.typed"}, }, "nested_inside_namespace": { "directory_structure": { "foo": { "bar": { "py.typed": "", "mod.pyi": "", } } }, "expected_type_files": {"foo/bar/mod.pyi", "foo/bar/py.typed"}, }, "namespace_nested_inside_regular": { "directory_structure": { "foo": { "namespace": { "foo.pyi": "", }, "__init__.pyi": "", "py.typed": "", } }, "expected_type_files": { "foo/namespace/foo.pyi", "foo/__init__.pyi", "foo/py.typed", }, }, } @pytest.mark.parametrize( "pyproject", [ "default_pyproject", pytest.param( "dont_include_package_data", marks=pytest.mark.xfail(reason="pypa/setuptools#4350"), ), ], ) @pytest.mark.parametrize("example", EXAMPLES.keys()) def test_type_files_included_by_default(self, tmpdir_cwd, pyproject, example): structure = { **self.EXAMPLES[example]["directory_structure"], "pyproject.toml": self.PYPROJECTS[pyproject], } expected_type_files = self.EXAMPLES[example]["expected_type_files"] jaraco.path.build(structure) build_py = get_finalized_build_py() outputs = get_outputs(build_py) assert expected_type_files <= outputs @pytest.mark.parametrize("pyproject", ["exclude_type_info"]) @pytest.mark.parametrize("example", EXAMPLES.keys()) def test_type_files_can_be_excluded(self, tmpdir_cwd, pyproject, example): structure = { **self.EXAMPLES[example]["directory_structure"], "pyproject.toml": self.PYPROJECTS[pyproject], } expected_type_files = self.EXAMPLES[example]["expected_type_files"] jaraco.path.build(structure) build_py = get_finalized_build_py() outputs = get_outputs(build_py) assert expected_type_files.isdisjoint(outputs) def test_stub_only_package(self, tmpdir_cwd): structure = { "pyproject.toml": DALS( """ [project] name = "foo-stubs" version = "1" """ ), "foo-stubs": {"__init__.pyi": "", "bar.pyi": ""}, } expected_type_files = {"foo-stubs/__init__.pyi", "foo-stubs/bar.pyi"} jaraco.path.build(structure) build_py = get_finalized_build_py() outputs = get_outputs(build_py) assert expected_type_files <= outputs def get_finalized_build_py(script_name="%build_py-test%"): dist = Distribution({"script_name": script_name}) dist.parse_config_files() build_py = dist.get_command_obj("build_py") build_py.finalize_options() return build_py def get_outputs(build_py): build_dir = Path(build_py.build_lib) return { os.path.relpath(x, build_dir).replace(os.sep, "/") for x in build_py.get_outputs() } tests/config/setupcfg_examples.txt 0000644 00000003570 15030122015 0013424 0 ustar 00 # ==================================================================== # Some popular packages that use setup.cfg (and others not so popular) # Reference: https://hugovk.github.io/top-pypi-packages/ # ==================================================================== https://github.com/pypa/setuptools/raw/52c990172fec37766b3566679724aa8bf70ae06d/setup.cfg https://github.com/pypa/wheel/raw/0acd203cd896afec7f715aa2ff5980a403459a3b/setup.cfg https://github.com/python/importlib_metadata/raw/2f05392ca980952a6960d82b2f2d2ea10aa53239/setup.cfg https://github.com/jaraco/skeleton/raw/d9008b5c510cd6969127a6a2ab6f832edddef296/setup.cfg https://github.com/jaraco/zipp/raw/700d3a96390e970b6b962823bfea78b4f7e1c537/setup.cfg https://github.com/pallets/jinja/raw/7d72eb7fefb7dce065193967f31f805180508448/setup.cfg https://github.com/tkem/cachetools/raw/2fd87a94b8d3861d80e9e4236cd480bfdd21c90d/setup.cfg https://github.com/aio-libs/aiohttp/raw/5e0e6b7080f2408d5f1dd544c0e1cf88378b7b10/setup.cfg https://github.com/pallets/flask/raw/9486b6cf57bd6a8a261f67091aca8ca78eeec1e3/setup.cfg https://github.com/pallets/click/raw/6411f425fae545f42795665af4162006b36c5e4a/setup.cfg https://github.com/sqlalchemy/sqlalchemy/raw/533f5718904b620be8d63f2474229945d6f8ba5d/setup.cfg https://github.com/pytest-dev/pluggy/raw/461ef63291d13589c4e21aa182cd1529257e9a0a/setup.cfg https://github.com/pytest-dev/pytest/raw/c7be96dae487edbd2f55b561b31b68afac1dabe6/setup.cfg https://github.com/platformdirs/platformdirs/raw/7b7852128dd6f07511b618d6edea35046bd0c6ff/setup.cfg https://github.com/pandas-dev/pandas/raw/bc17343f934a33dc231c8c74be95d8365537c376/setup.cfg https://github.com/django/django/raw/4e249d11a6e56ca8feb4b055b681cec457ef3a3d/setup.cfg https://github.com/pyscaffold/pyscaffold/raw/de7aa5dc059fbd04307419c667cc4961bc9df4b8/setup.cfg https://github.com/pypa/virtualenv/raw/f92eda6e3da26a4d28c2663ffb85c4960bdb990c/setup.cfg tests/config/test_pyprojecttoml_dynamic_deps.py 0000644 00000006307 15030122015 0016211 0 ustar 00 from inspect import cleandoc import pytest from jaraco import path from setuptools.config.pyprojecttoml import apply_configuration from setuptools.dist import Distribution from setuptools.warnings import SetuptoolsWarning def test_dynamic_dependencies(tmp_path): files = { "requirements.txt": "six\n # comment\n", "pyproject.toml": cleandoc( """ [project] name = "myproj" version = "1.0" dynamic = ["dependencies"] [build-system] requires = ["setuptools", "wheel"] build-backend = "setuptools.build_meta" [tool.setuptools.dynamic.dependencies] file = ["requirements.txt"] """ ), } path.build(files, prefix=tmp_path) dist = Distribution() dist = apply_configuration(dist, tmp_path / "pyproject.toml") assert dist.install_requires == ["six"] def test_dynamic_optional_dependencies(tmp_path): files = { "requirements-docs.txt": "sphinx\n # comment\n", "pyproject.toml": cleandoc( """ [project] name = "myproj" version = "1.0" dynamic = ["optional-dependencies"] [tool.setuptools.dynamic.optional-dependencies.docs] file = ["requirements-docs.txt"] [build-system] requires = ["setuptools", "wheel"] build-backend = "setuptools.build_meta" """ ), } path.build(files, prefix=tmp_path) dist = Distribution() dist = apply_configuration(dist, tmp_path / "pyproject.toml") assert dist.extras_require == {"docs": ["sphinx"]} def test_mixed_dynamic_optional_dependencies(tmp_path): """ Test that if PEP 621 was loosened to allow mixing of dynamic and static configurations in the case of fields containing sub-fields (groups), things would work out. """ files = { "requirements-images.txt": "pillow~=42.0\n # comment\n", "pyproject.toml": cleandoc( """ [project] name = "myproj" version = "1.0" dynamic = ["optional-dependencies"] [project.optional-dependencies] docs = ["sphinx"] [tool.setuptools.dynamic.optional-dependencies.images] file = ["requirements-images.txt"] """ ), } path.build(files, prefix=tmp_path) pyproject = tmp_path / "pyproject.toml" with pytest.raises(ValueError, match="project.optional-dependencies"): apply_configuration(Distribution(), pyproject) def test_mixed_extras_require_optional_dependencies(tmp_path): files = { "pyproject.toml": cleandoc( """ [project] name = "myproj" version = "1.0" optional-dependencies.docs = ["sphinx"] """ ), } path.build(files, prefix=tmp_path) pyproject = tmp_path / "pyproject.toml" with pytest.warns(SetuptoolsWarning, match=".extras_require. overwritten"): dist = Distribution({"extras_require": {"hello": ["world"]}}) dist = apply_configuration(dist, pyproject) assert dist.extras_require == {"docs": ["sphinx"]} tests/config/test_setupcfg.py 0000644 00000101223 15030122015 0012370 0 ustar 00 import configparser import contextlib import inspect import re from pathlib import Path from unittest.mock import Mock, patch import pytest from packaging.requirements import InvalidRequirement from setuptools.config.setupcfg import ConfigHandler, Target, read_configuration from setuptools.dist import Distribution, _Distribution from setuptools.warnings import SetuptoolsDeprecationWarning from ..textwrap import DALS from distutils.errors import DistutilsFileError, DistutilsOptionError class ErrConfigHandler(ConfigHandler[Target]): """Erroneous handler. Fails to implement required methods.""" section_prefix = "**err**" def make_package_dir(name, base_dir, ns=False): dir_package = base_dir for dir_name in name.split('/'): dir_package = dir_package.mkdir(dir_name) init_file = None if not ns: init_file = dir_package.join('__init__.py') init_file.write('') return dir_package, init_file def fake_env( tmpdir, setup_cfg, setup_py=None, encoding='ascii', package_path='fake_package' ): if setup_py is None: setup_py = 'from setuptools import setup\nsetup()\n' tmpdir.join('setup.py').write(setup_py) config = tmpdir.join('setup.cfg') config.write(setup_cfg.encode(encoding), mode='wb') package_dir, init_file = make_package_dir(package_path, tmpdir) init_file.write( 'VERSION = (1, 2, 3)\n' '\n' 'VERSION_MAJOR = 1' '\n' 'def get_version():\n' ' return [3, 4, 5, "dev"]\n' '\n' ) return package_dir, config @contextlib.contextmanager def get_dist(tmpdir, kwargs_initial=None, parse=True): kwargs_initial = kwargs_initial or {} with tmpdir.as_cwd(): dist = Distribution(kwargs_initial) dist.script_name = 'setup.py' parse and dist.parse_config_files() yield dist def test_parsers_implemented(): with pytest.raises(NotImplementedError): handler = ErrConfigHandler(None, {}, False, Mock()) handler.parsers class TestConfigurationReader: def test_basic(self, tmpdir): _, config = fake_env( tmpdir, '[metadata]\n' 'version = 10.1.1\n' 'keywords = one, two\n' '\n' '[options]\n' 'scripts = bin/a.py, bin/b.py\n', ) config_dict = read_configuration(str(config)) assert config_dict['metadata']['version'] == '10.1.1' assert config_dict['metadata']['keywords'] == ['one', 'two'] assert config_dict['options']['scripts'] == ['bin/a.py', 'bin/b.py'] def test_no_config(self, tmpdir): with pytest.raises(DistutilsFileError): read_configuration(str(tmpdir.join('setup.cfg'))) def test_ignore_errors(self, tmpdir): _, config = fake_env( tmpdir, '[metadata]\nversion = attr: none.VERSION\nkeywords = one, two\n', ) with pytest.raises(ImportError): read_configuration(str(config)) config_dict = read_configuration(str(config), ignore_option_errors=True) assert config_dict['metadata']['keywords'] == ['one', 'two'] assert 'version' not in config_dict['metadata'] config.remove() class TestMetadata: def test_basic(self, tmpdir): fake_env( tmpdir, '[metadata]\n' 'version = 10.1.1\n' 'description = Some description\n' 'long_description_content_type = text/something\n' 'long_description = file: README\n' 'name = fake_name\n' 'keywords = one, two\n' 'provides = package, package.sub\n' 'license = otherlic\n' 'download_url = http://test.test.com/test/\n' 'maintainer_email = test@test.com\n', ) tmpdir.join('README').write('readme contents\nline2') meta_initial = { # This will be used so `otherlic` won't replace it. 'license': 'BSD 3-Clause License', } with get_dist(tmpdir, meta_initial) as dist: metadata = dist.metadata assert metadata.version == '10.1.1' assert metadata.description == 'Some description' assert metadata.long_description_content_type == 'text/something' assert metadata.long_description == 'readme contents\nline2' assert metadata.provides == ['package', 'package.sub'] assert metadata.license == 'BSD 3-Clause License' assert metadata.name == 'fake_name' assert metadata.keywords == ['one', 'two'] assert metadata.download_url == 'http://test.test.com/test/' assert metadata.maintainer_email == 'test@test.com' def test_license_cfg(self, tmpdir): fake_env( tmpdir, DALS( """ [metadata] name=foo version=0.0.1 license=Apache 2.0 """ ), ) with get_dist(tmpdir) as dist: metadata = dist.metadata assert metadata.name == "foo" assert metadata.version == "0.0.1" assert metadata.license == "Apache 2.0" def test_file_mixed(self, tmpdir): fake_env( tmpdir, '[metadata]\nlong_description = file: README.rst, CHANGES.rst\n\n', ) tmpdir.join('README.rst').write('readme contents\nline2') tmpdir.join('CHANGES.rst').write('changelog contents\nand stuff') with get_dist(tmpdir) as dist: assert dist.metadata.long_description == ( 'readme contents\nline2\nchangelog contents\nand stuff' ) def test_file_sandboxed(self, tmpdir): tmpdir.ensure("README") project = tmpdir.join('depth1', 'depth2') project.ensure(dir=True) fake_env(project, '[metadata]\nlong_description = file: ../../README\n') with get_dist(project, parse=False) as dist: with pytest.raises(DistutilsOptionError): dist.parse_config_files() # file: out of sandbox def test_aliases(self, tmpdir): fake_env( tmpdir, '[metadata]\n' 'author_email = test@test.com\n' 'home_page = http://test.test.com/test/\n' 'summary = Short summary\n' 'platform = a, b\n' 'classifier =\n' ' Framework :: Django\n' ' Programming Language :: Python :: 3.5\n', ) with get_dist(tmpdir) as dist: metadata = dist.metadata assert metadata.author_email == 'test@test.com' assert metadata.url == 'http://test.test.com/test/' assert metadata.description == 'Short summary' assert metadata.platforms == ['a', 'b'] assert metadata.classifiers == [ 'Framework :: Django', 'Programming Language :: Python :: 3.5', ] def test_multiline(self, tmpdir): fake_env( tmpdir, '[metadata]\n' 'name = fake_name\n' 'keywords =\n' ' one\n' ' two\n' 'classifiers =\n' ' Framework :: Django\n' ' Programming Language :: Python :: 3.5\n', ) with get_dist(tmpdir) as dist: metadata = dist.metadata assert metadata.keywords == ['one', 'two'] assert metadata.classifiers == [ 'Framework :: Django', 'Programming Language :: Python :: 3.5', ] def test_dict(self, tmpdir): fake_env( tmpdir, '[metadata]\n' 'project_urls =\n' ' Link One = https://example.com/one/\n' ' Link Two = https://example.com/two/\n', ) with get_dist(tmpdir) as dist: metadata = dist.metadata assert metadata.project_urls == { 'Link One': 'https://example.com/one/', 'Link Two': 'https://example.com/two/', } def test_version(self, tmpdir): package_dir, config = fake_env( tmpdir, '[metadata]\nversion = attr: fake_package.VERSION\n' ) sub_a = package_dir.mkdir('subpkg_a') sub_a.join('__init__.py').write('') sub_a.join('mod.py').write('VERSION = (2016, 11, 26)') sub_b = package_dir.mkdir('subpkg_b') sub_b.join('__init__.py').write('') sub_b.join('mod.py').write( 'import third_party_module\nVERSION = (2016, 11, 26)' ) with get_dist(tmpdir) as dist: assert dist.metadata.version == '1.2.3' config.write('[metadata]\nversion = attr: fake_package.get_version\n') with get_dist(tmpdir) as dist: assert dist.metadata.version == '3.4.5.dev' config.write('[metadata]\nversion = attr: fake_package.VERSION_MAJOR\n') with get_dist(tmpdir) as dist: assert dist.metadata.version == '1' config.write('[metadata]\nversion = attr: fake_package.subpkg_a.mod.VERSION\n') with get_dist(tmpdir) as dist: assert dist.metadata.version == '2016.11.26' config.write('[metadata]\nversion = attr: fake_package.subpkg_b.mod.VERSION\n') with get_dist(tmpdir) as dist: assert dist.metadata.version == '2016.11.26' def test_version_file(self, tmpdir): fake_env(tmpdir, '[metadata]\nversion = file: fake_package/version.txt\n') tmpdir.join('fake_package', 'version.txt').write('1.2.3\n') with get_dist(tmpdir) as dist: assert dist.metadata.version == '1.2.3' tmpdir.join('fake_package', 'version.txt').write('1.2.3\n4.5.6\n') with pytest.raises(DistutilsOptionError): with get_dist(tmpdir) as dist: dist.metadata.version def test_version_with_package_dir_simple(self, tmpdir): fake_env( tmpdir, '[metadata]\n' 'version = attr: fake_package_simple.VERSION\n' '[options]\n' 'package_dir =\n' ' = src\n', package_path='src/fake_package_simple', ) with get_dist(tmpdir) as dist: assert dist.metadata.version == '1.2.3' def test_version_with_package_dir_rename(self, tmpdir): fake_env( tmpdir, '[metadata]\n' 'version = attr: fake_package_rename.VERSION\n' '[options]\n' 'package_dir =\n' ' fake_package_rename = fake_dir\n', package_path='fake_dir', ) with get_dist(tmpdir) as dist: assert dist.metadata.version == '1.2.3' def test_version_with_package_dir_complex(self, tmpdir): fake_env( tmpdir, '[metadata]\n' 'version = attr: fake_package_complex.VERSION\n' '[options]\n' 'package_dir =\n' ' fake_package_complex = src/fake_dir\n', package_path='src/fake_dir', ) with get_dist(tmpdir) as dist: assert dist.metadata.version == '1.2.3' def test_unknown_meta_item(self, tmpdir): fake_env(tmpdir, '[metadata]\nname = fake_name\nunknown = some\n') with get_dist(tmpdir, parse=False) as dist: dist.parse_config_files() # Skip unknown. def test_usupported_section(self, tmpdir): fake_env(tmpdir, '[metadata.some]\nkey = val\n') with get_dist(tmpdir, parse=False) as dist: with pytest.raises(DistutilsOptionError): dist.parse_config_files() def test_classifiers(self, tmpdir): expected = set([ 'Framework :: Django', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.5', ]) # From file. _, config = fake_env(tmpdir, '[metadata]\nclassifiers = file: classifiers\n') tmpdir.join('classifiers').write( 'Framework :: Django\n' 'Programming Language :: Python :: 3\n' 'Programming Language :: Python :: 3.5\n' ) with get_dist(tmpdir) as dist: assert set(dist.metadata.classifiers) == expected # From list notation config.write( '[metadata]\n' 'classifiers =\n' ' Framework :: Django\n' ' Programming Language :: Python :: 3\n' ' Programming Language :: Python :: 3.5\n' ) with get_dist(tmpdir) as dist: assert set(dist.metadata.classifiers) == expected def test_interpolation(self, tmpdir): fake_env(tmpdir, '[metadata]\ndescription = %(message)s\n') with pytest.raises(configparser.InterpolationMissingOptionError): with get_dist(tmpdir): pass def test_non_ascii_1(self, tmpdir): fake_env(tmpdir, '[metadata]\ndescription = éàïôñ\n', encoding='utf-8') with get_dist(tmpdir): pass def test_non_ascii_3(self, tmpdir): fake_env(tmpdir, '\n# -*- coding: invalid\n') with get_dist(tmpdir): pass def test_non_ascii_4(self, tmpdir): fake_env( tmpdir, '# -*- coding: utf-8\n[metadata]\ndescription = éàïôñ\n', encoding='utf-8', ) with get_dist(tmpdir) as dist: assert dist.metadata.description == 'éàïôñ' def test_not_utf8(self, tmpdir): """ Config files encoded not in UTF-8 will fail """ fake_env( tmpdir, '# vim: set fileencoding=iso-8859-15 :\n[metadata]\ndescription = éàïôñ\n', encoding='iso-8859-15', ) with pytest.raises(UnicodeDecodeError): with get_dist(tmpdir): pass @pytest.mark.parametrize( ("error_msg", "config", "invalid"), [ ( "Invalid dash-separated key 'author-email' in 'metadata' (setup.cfg)", DALS( """ [metadata] author-email = test@test.com maintainer_email = foo@foo.com """ ), {"author-email": "test@test.com"}, ), ( "Invalid uppercase key 'Name' in 'metadata' (setup.cfg)", DALS( """ [metadata] Name = foo description = Some description """ ), {"Name": "foo"}, ), ], ) def test_invalid_options_previously_deprecated( self, tmpdir, error_msg, config, invalid ): # This test and related methods can be removed when no longer needed. # Deprecation postponed due to push-back from the community in # https://github.com/pypa/setuptools/issues/4910 fake_env(tmpdir, config) with pytest.warns(SetuptoolsDeprecationWarning, match=re.escape(error_msg)): dist = get_dist(tmpdir).__enter__() tmpdir.join('setup.cfg').remove() for field, value in invalid.items(): attr = field.replace("-", "_").lower() assert getattr(dist.metadata, attr) == value class TestOptions: def test_basic(self, tmpdir): fake_env( tmpdir, '[options]\n' 'zip_safe = True\n' 'include_package_data = yes\n' 'package_dir = b=c, =src\n' 'packages = pack_a, pack_b.subpack\n' 'namespace_packages = pack1, pack2\n' 'scripts = bin/one.py, bin/two.py\n' 'eager_resources = bin/one.py, bin/two.py\n' 'install_requires = docutils>=0.3; pack ==1.1, ==1.3; hey\n' 'setup_requires = docutils>=0.3; spack ==1.1, ==1.3; there\n' 'dependency_links = http://some.com/here/1, ' 'http://some.com/there/2\n' 'python_requires = >=1.0, !=2.8\n' 'py_modules = module1, module2\n', ) deprec = pytest.warns(SetuptoolsDeprecationWarning, match="namespace_packages") with deprec, get_dist(tmpdir) as dist: assert dist.zip_safe assert dist.include_package_data assert dist.package_dir == {'': 'src', 'b': 'c'} assert dist.packages == ['pack_a', 'pack_b.subpack'] assert dist.namespace_packages == ['pack1', 'pack2'] assert dist.scripts == ['bin/one.py', 'bin/two.py'] assert dist.dependency_links == ([ 'http://some.com/here/1', 'http://some.com/there/2', ]) assert dist.install_requires == ([ 'docutils>=0.3', 'pack==1.1,==1.3', 'hey', ]) assert dist.setup_requires == ([ 'docutils>=0.3', 'spack ==1.1, ==1.3', 'there', ]) assert dist.python_requires == '>=1.0, !=2.8' assert dist.py_modules == ['module1', 'module2'] def test_multiline(self, tmpdir): fake_env( tmpdir, '[options]\n' 'package_dir = \n' ' b=c\n' ' =src\n' 'packages = \n' ' pack_a\n' ' pack_b.subpack\n' 'namespace_packages = \n' ' pack1\n' ' pack2\n' 'scripts = \n' ' bin/one.py\n' ' bin/two.py\n' 'eager_resources = \n' ' bin/one.py\n' ' bin/two.py\n' 'install_requires = \n' ' docutils>=0.3\n' ' pack ==1.1, ==1.3\n' ' hey\n' 'setup_requires = \n' ' docutils>=0.3\n' ' spack ==1.1, ==1.3\n' ' there\n' 'dependency_links = \n' ' http://some.com/here/1\n' ' http://some.com/there/2\n', ) deprec = pytest.warns(SetuptoolsDeprecationWarning, match="namespace_packages") with deprec, get_dist(tmpdir) as dist: assert dist.package_dir == {'': 'src', 'b': 'c'} assert dist.packages == ['pack_a', 'pack_b.subpack'] assert dist.namespace_packages == ['pack1', 'pack2'] assert dist.scripts == ['bin/one.py', 'bin/two.py'] assert dist.dependency_links == ([ 'http://some.com/here/1', 'http://some.com/there/2', ]) assert dist.install_requires == ([ 'docutils>=0.3', 'pack==1.1,==1.3', 'hey', ]) assert dist.setup_requires == ([ 'docutils>=0.3', 'spack ==1.1, ==1.3', 'there', ]) def test_package_dir_fail(self, tmpdir): fake_env(tmpdir, '[options]\npackage_dir = a b\n') with get_dist(tmpdir, parse=False) as dist: with pytest.raises(DistutilsOptionError): dist.parse_config_files() def test_package_data(self, tmpdir): fake_env( tmpdir, '[options.package_data]\n' '* = *.txt, *.rst\n' 'hello = *.msg\n' '\n' '[options.exclude_package_data]\n' '* = fake1.txt, fake2.txt\n' 'hello = *.dat\n', ) with get_dist(tmpdir) as dist: assert dist.package_data == { '': ['*.txt', '*.rst'], 'hello': ['*.msg'], } assert dist.exclude_package_data == { '': ['fake1.txt', 'fake2.txt'], 'hello': ['*.dat'], } def test_packages(self, tmpdir): fake_env(tmpdir, '[options]\npackages = find:\n') with get_dist(tmpdir) as dist: assert dist.packages == ['fake_package'] def test_find_directive(self, tmpdir): dir_package, config = fake_env(tmpdir, '[options]\npackages = find:\n') make_package_dir('sub_one', dir_package) make_package_dir('sub_two', dir_package) with get_dist(tmpdir) as dist: assert set(dist.packages) == set([ 'fake_package', 'fake_package.sub_two', 'fake_package.sub_one', ]) config.write( '[options]\n' 'packages = find:\n' '\n' '[options.packages.find]\n' 'where = .\n' 'include =\n' ' fake_package.sub_one\n' ' two\n' ) with get_dist(tmpdir) as dist: assert dist.packages == ['fake_package.sub_one'] config.write( '[options]\n' 'packages = find:\n' '\n' '[options.packages.find]\n' 'exclude =\n' ' fake_package.sub_one\n' ) with get_dist(tmpdir) as dist: assert set(dist.packages) == set(['fake_package', 'fake_package.sub_two']) def test_find_namespace_directive(self, tmpdir): dir_package, config = fake_env( tmpdir, '[options]\npackages = find_namespace:\n' ) make_package_dir('sub_one', dir_package) make_package_dir('sub_two', dir_package, ns=True) with get_dist(tmpdir) as dist: assert set(dist.packages) == { 'fake_package', 'fake_package.sub_two', 'fake_package.sub_one', } config.write( '[options]\n' 'packages = find_namespace:\n' '\n' '[options.packages.find]\n' 'where = .\n' 'include =\n' ' fake_package.sub_one\n' ' two\n' ) with get_dist(tmpdir) as dist: assert dist.packages == ['fake_package.sub_one'] config.write( '[options]\n' 'packages = find_namespace:\n' '\n' '[options.packages.find]\n' 'exclude =\n' ' fake_package.sub_one\n' ) with get_dist(tmpdir) as dist: assert set(dist.packages) == {'fake_package', 'fake_package.sub_two'} def test_extras_require(self, tmpdir): fake_env( tmpdir, '[options.extras_require]\n' 'pdf = ReportLab>=1.2; RXP\n' 'rest = \n' ' docutils>=0.3\n' ' pack ==1.1, ==1.3\n', ) with get_dist(tmpdir) as dist: assert dist.extras_require == { 'pdf': ['ReportLab>=1.2', 'RXP'], 'rest': ['docutils>=0.3', 'pack==1.1,==1.3'], } assert set(dist.metadata.provides_extras) == {'pdf', 'rest'} @pytest.mark.parametrize( "config", [ "[options.extras_require]\nfoo = bar;python_version<'3'", "[options.extras_require]\nfoo = bar;os_name=='linux'", "[options.extras_require]\nfoo = bar;python_version<'3'\n", "[options.extras_require]\nfoo = bar;os_name=='linux'\n", "[options]\ninstall_requires = bar;python_version<'3'", "[options]\ninstall_requires = bar;os_name=='linux'", "[options]\ninstall_requires = bar;python_version<'3'\n", "[options]\ninstall_requires = bar;os_name=='linux'\n", ], ) def test_raises_accidental_env_marker_misconfig(self, config, tmpdir): fake_env(tmpdir, config) match = ( r"One of the parsed requirements in `(install_requires|extras_require.+)` " "looks like a valid environment marker.*" ) with pytest.raises(InvalidRequirement, match=match): with get_dist(tmpdir) as _: pass @pytest.mark.parametrize( "config", [ "[options.extras_require]\nfoo = bar;python_version<3", "[options.extras_require]\nfoo = bar;python_version<3\n", "[options]\ninstall_requires = bar;python_version<3", "[options]\ninstall_requires = bar;python_version<3\n", ], ) def test_warn_accidental_env_marker_misconfig(self, config, tmpdir): fake_env(tmpdir, config) match = ( r"One of the parsed requirements in `(install_requires|extras_require.+)` " "looks like a valid environment marker.*" ) with pytest.warns(SetuptoolsDeprecationWarning, match=match): with get_dist(tmpdir) as _: pass @pytest.mark.parametrize( "config", [ "[options.extras_require]\nfoo =\n bar;python_version<'3'", "[options.extras_require]\nfoo = bar;baz\nboo = xxx;yyy", "[options.extras_require]\nfoo =\n bar;python_version<'3'\n", "[options.extras_require]\nfoo = bar;baz\nboo = xxx;yyy\n", "[options.extras_require]\nfoo =\n bar\n python_version<3\n", "[options]\ninstall_requires =\n bar;python_version<'3'", "[options]\ninstall_requires = bar;baz\nboo = xxx;yyy", "[options]\ninstall_requires =\n bar;python_version<'3'\n", "[options]\ninstall_requires = bar;baz\nboo = xxx;yyy\n", "[options]\ninstall_requires =\n bar\n python_version<3\n", ], ) @pytest.mark.filterwarnings("error::setuptools.SetuptoolsDeprecationWarning") def test_nowarn_accidental_env_marker_misconfig(self, config, tmpdir, recwarn): fake_env(tmpdir, config) num_warnings = len(recwarn) with get_dist(tmpdir) as _: pass # The examples are valid, no warnings shown assert len(recwarn) == num_warnings def test_dash_preserved_extras_require(self, tmpdir): fake_env(tmpdir, '[options.extras_require]\nfoo-a = foo\nfoo_b = test\n') with get_dist(tmpdir) as dist: assert dist.extras_require == {'foo-a': ['foo'], 'foo_b': ['test']} def test_entry_points(self, tmpdir): _, config = fake_env( tmpdir, '[options.entry_points]\n' 'group1 = point1 = pack.module:func, ' '.point2 = pack.module2:func_rest [rest]\n' 'group2 = point3 = pack.module:func2\n', ) with get_dist(tmpdir) as dist: assert dist.entry_points == { 'group1': [ 'point1 = pack.module:func', '.point2 = pack.module2:func_rest [rest]', ], 'group2': ['point3 = pack.module:func2'], } expected = ( '[blogtool.parsers]\n' '.rst = some.nested.module:SomeClass.some_classmethod[reST]\n' ) tmpdir.join('entry_points').write(expected) # From file. config.write('[options]\nentry_points = file: entry_points\n') with get_dist(tmpdir) as dist: assert dist.entry_points == expected def test_case_sensitive_entry_points(self, tmpdir): fake_env( tmpdir, '[options.entry_points]\n' 'GROUP1 = point1 = pack.module:func, ' '.point2 = pack.module2:func_rest [rest]\n' 'group2 = point3 = pack.module:func2\n', ) with get_dist(tmpdir) as dist: assert dist.entry_points == { 'GROUP1': [ 'point1 = pack.module:func', '.point2 = pack.module2:func_rest [rest]', ], 'group2': ['point3 = pack.module:func2'], } def test_data_files(self, tmpdir): fake_env( tmpdir, '[options.data_files]\n' 'cfg =\n' ' a/b.conf\n' ' c/d.conf\n' 'data = e/f.dat, g/h.dat\n', ) with get_dist(tmpdir) as dist: expected = [ ('cfg', ['a/b.conf', 'c/d.conf']), ('data', ['e/f.dat', 'g/h.dat']), ] assert sorted(dist.data_files) == sorted(expected) def test_data_files_globby(self, tmpdir): fake_env( tmpdir, '[options.data_files]\n' 'cfg =\n' ' a/b.conf\n' ' c/d.conf\n' 'data = *.dat\n' 'icons = \n' ' *.ico\n' 'audio = \n' ' *.wav\n' ' sounds.db\n', ) # Create dummy files for glob()'s sake: tmpdir.join('a.dat').write('') tmpdir.join('b.dat').write('') tmpdir.join('c.dat').write('') tmpdir.join('a.ico').write('') tmpdir.join('b.ico').write('') tmpdir.join('c.ico').write('') tmpdir.join('beep.wav').write('') tmpdir.join('boop.wav').write('') tmpdir.join('sounds.db').write('') with get_dist(tmpdir) as dist: expected = [ ('cfg', ['a/b.conf', 'c/d.conf']), ('data', ['a.dat', 'b.dat', 'c.dat']), ('icons', ['a.ico', 'b.ico', 'c.ico']), ('audio', ['beep.wav', 'boop.wav', 'sounds.db']), ] assert sorted(dist.data_files) == sorted(expected) def test_python_requires_simple(self, tmpdir): fake_env( tmpdir, DALS( """ [options] python_requires=>=2.7 """ ), ) with get_dist(tmpdir) as dist: dist.parse_config_files() def test_python_requires_compound(self, tmpdir): fake_env( tmpdir, DALS( """ [options] python_requires=>=2.7,!=3.0.* """ ), ) with get_dist(tmpdir) as dist: dist.parse_config_files() def test_python_requires_invalid(self, tmpdir): fake_env( tmpdir, DALS( """ [options] python_requires=invalid """ ), ) with pytest.raises(Exception): with get_dist(tmpdir) as dist: dist.parse_config_files() def test_cmdclass(self, tmpdir): module_path = Path(tmpdir, "src/custom_build.py") # auto discovery for src module_path.parent.mkdir(parents=True, exist_ok=True) module_path.write_text( "from distutils.core import Command\nclass CustomCmd(Command): pass\n", encoding="utf-8", ) setup_cfg = """ [options] cmdclass = customcmd = custom_build.CustomCmd """ fake_env(tmpdir, inspect.cleandoc(setup_cfg)) with get_dist(tmpdir) as dist: cmdclass = dist.cmdclass['customcmd'] assert cmdclass.__name__ == "CustomCmd" assert cmdclass.__module__ == "custom_build" assert module_path.samefile(inspect.getfile(cmdclass)) def test_requirements_file(self, tmpdir): fake_env( tmpdir, DALS( """ [options] install_requires = file:requirements.txt [options.extras_require] colors = file:requirements-extra.txt """ ), ) tmpdir.join('requirements.txt').write('\ndocutils>=0.3\n\n') tmpdir.join('requirements-extra.txt').write('colorama') with get_dist(tmpdir) as dist: assert dist.install_requires == ['docutils>=0.3'] assert dist.extras_require == {'colors': ['colorama']} saved_dist_init = _Distribution.__init__ class TestExternalSetters: # During creation of the setuptools Distribution() object, we call # the init of the parent distutils Distribution object via # _Distribution.__init__ (). # # It's possible distutils calls out to various keyword # implementations (i.e. distutils.setup_keywords entry points) # that may set a range of variables. # # This wraps distutil's Distribution.__init__ and simulates # pbr or something else setting these values. def _fake_distribution_init(self, dist, attrs): saved_dist_init(dist, attrs) # see self._DISTUTILS_UNSUPPORTED_METADATA dist.metadata.long_description_content_type = 'text/something' # Test overwrite setup() args dist.metadata.project_urls = { 'Link One': 'https://example.com/one/', 'Link Two': 'https://example.com/two/', } @patch.object(_Distribution, '__init__', autospec=True) def test_external_setters(self, mock_parent_init, tmpdir): mock_parent_init.side_effect = self._fake_distribution_init dist = Distribution(attrs={'project_urls': {'will_be': 'ignored'}}) assert dist.metadata.long_description_content_type == 'text/something' assert dist.metadata.project_urls == { 'Link One': 'https://example.com/one/', 'Link Two': 'https://example.com/two/', } tests/config/__pycache__/test_apply_pyprojecttoml.cpython-310.pyc 0000644 00000063216 15030122015 0021240 0 ustar 00 o �h�p � @ s� d Z ddlmZ ddlZddlZddlZddlmZ ddlm Z ddl mZ ddlZddl mZ ddlmZ ddlZdd lmZ dd lmZ ddlmZmZmZ ddlmZmZ dd lmZ ddlm Z m!Z! ddl"m#Z#m$Z$ ddl%m&Z&m'Z' e e(�j)Z*dZ+dd� Z,dd� Z-ej.�/de'e*e+ ��ej.�0d�ej.j1dd� ���Z2dZ3dZ4dZ5dZ6dZ7d e3fd!d"�Z8d#d$� Z9ej.�/d%g d&��d'd(� �Z:d)d*� Z;d+d,� Z<ej.�/d-ej=e3d.d/d0�ej=e4d1ej.j>d2d3d4�d5d6�f�d7d8� �Z?ej.�/d9ej=e6d:dd;d<d=ej.�0d>�gd?�ej=e7dd@dAdBdCd0�f�dDdE� �Z@dFdG� ZAdHdI� ZBG dJdK� dK�ZCG dLdM� dM�ZDG dNdO� dO�ZEG dPdQ� dQ�ZFG dRdS� dS�ZGG dTdU� dU�ZHG dVdW� dW�ZIG dXdY� dY�ZJd^d\d]�ZKdS )_z�Make sure that applying the configuration from pyproject.toml is equivalent to applying a similar configuration from setup.cfg To run these tests offline, please have a look on ``./downloads/preload.py`` � )�annotationsN)�cleandoc)�Path)�Mock)�LiteTranslator)�Metadata)� is_static)�write_requirements)�expand� pyprojecttoml�setupcfg)�_MissingDynamic�_some_attrgetter��Distribution)�InvalidConfigError�RemovedConfigError)�InformationOnly�SetuptoolsDeprecationWarning� )� retrieve_file�urls_from_filezsetupcfg_examples.txtc K s t d| i|��S )N�src_rootr )�path�attrs� r �c/usr/local/CyberCP/lib/python3.10/site-packages/setuptools/tests/config/test_apply_pyprojecttoml.py�makedist# s r c O s dd� | D �S )z� Allow comparing the given patterns for 2 dist objects. We need to strip special chars to avoid errors when validating. c S s$ g | ]}t jd d|t jd�pd�qS )z [^a-z0-9]+� )�flags�empty)�re�sub�I)�.0�pr r r � <listcomp>, s $ z)_mock_expand_patterns.<locals>.<listcomp>r )�patterns�_�__r r r �_mock_expand_patterns'