diff --git a/3rdparty/python/requirements.txt b/3rdparty/python/requirements.txt index e1bac9cb47f..235933b335d 100644 --- a/3rdparty/python/requirements.txt +++ b/3rdparty/python/requirements.txt @@ -3,8 +3,6 @@ beautifulsoup4>=4.6.0,<4.7 cffi==1.13.2 contextlib2==0.5.5 coverage>=4.5,<4.6 -# TODO remove this pin once we resolve https://github.com/pantsbuild/pants/issues/8502 -cryptography==2.7 dataclasses==0.6 docutils==0.14 fasteners==0.15.0 @@ -12,7 +10,7 @@ Markdown==2.1.1 packaging==16.8 parameterized==0.6.1 pathspec==0.5.9 -pex==1.6.12 +pex==2.1.4 psutil==5.6.3 Pygments==2.3.1 pyopenssl==17.3.0 @@ -24,8 +22,8 @@ py_zipkin==0.18.4 requests[security]>=2.20.1 responses==0.10.4 setproctitle==1.1.10 -setuptools==40.6.3 +setuptools==44.0.0 toml==0.10.0 typing-extensions==3.7.4 -wheel==0.31.1 +wheel==0.33.6 www-authenticate==0.9.2 diff --git a/contrib/awslambda/python/src/python/pants/contrib/awslambda/python/subsystems/lambdex.py b/contrib/awslambda/python/src/python/pants/contrib/awslambda/python/subsystems/lambdex.py index 4c976766574..357341093ef 100644 --- a/contrib/awslambda/python/src/python/pants/contrib/awslambda/python/subsystems/lambdex.py +++ b/contrib/awslambda/python/src/python/pants/contrib/awslambda/python/subsystems/lambdex.py @@ -9,5 +9,5 @@ class Lambdex(PythonToolBase): default_version = 'lambdex==0.1.3' # TODO(John Sirois): Remove when we can upgrade to a version of lambdex with # https://github.com/wickman/lambdex/issues/6 fixed. - default_extra_requirements = ['setuptools==40.8.0'] + default_extra_requirements = ['setuptools==44.0.0'] default_entry_point = 'lambdex.bin.lambdex' diff --git a/contrib/python/tests/python/pants_test/contrib/python/checks/tasks/checkstyle/BUILD b/contrib/python/tests/python/pants_test/contrib/python/checks/tasks/checkstyle/BUILD index b55369df631..1a29567e966 100644 --- a/contrib/python/tests/python/pants_test/contrib/python/checks/tasks/checkstyle/BUILD +++ b/contrib/python/tests/python/pants_test/contrib/python/checks/tasks/checkstyle/BUILD @@ -5,7 +5,6 @@ python_tests( dependencies=[ '3rdparty/python:parameterized', '3rdparty/python:pex', - '3rdparty/python:wheel', '//:build_root', '//:pants_pex', 'build-support/regexes', @@ -20,4 +19,5 @@ python_tests( 'tests/python/pants_test/backend/python/tasks:python_task_test_base', ], tags = {'partially_type_checked'}, + timeout=480, ) diff --git a/contrib/python/tests/python/pants_test/contrib/python/checks/tasks/checkstyle/test_checkstyle.py b/contrib/python/tests/python/pants_test/contrib/python/checks/tasks/checkstyle/test_checkstyle.py index 45613052fa6..0cab7305f81 100644 --- a/contrib/python/tests/python/pants_test/contrib/python/checks/tasks/checkstyle/test_checkstyle.py +++ b/contrib/python/tests/python/pants_test/contrib/python/checks/tasks/checkstyle/test_checkstyle.py @@ -17,8 +17,8 @@ from pants.util.dirutil import safe_mkdtemp, safe_rmtree from pants_test.backend.python.tasks.python_task_test_base import PythonTaskTestBase from parameterized import parameterized +from pex import resolver from pex.interpreter import PythonInterpreter -from wheel.install import WheelFile from pants.contrib.python.checks.tasks.checkstyle.checkstyle import Checkstyle @@ -26,10 +26,13 @@ CHECKER_RESOLVE_METHOD = [('sys.path', True), ('resolve', False)] +# IMPORTANT NOTE: This test fails if run in a chroot. +# To run it, use `./pants test.pytest --no-chroot`. +# One more reason to kill this plugin... class CheckstyleTest(PythonTaskTestBase): py2_constraint = 'CPython>=2.7,<3' - py3_constraint = 'CPython>=3.4,<3.6' + py3_constraint = 'CPython>=3.6,<3.8' @staticmethod def build_checker_wheel(root_dir: str) -> str: @@ -49,12 +52,9 @@ def build_checker_wheel(root_dir: str) -> str: return str(next(wheel_files)) @staticmethod - def install_wheel(wheel, root_dir): - importable_path = os.path.join(root_dir, 'install', os.path.basename(wheel)) - overrides = {path: importable_path - for path in ('purelib', 'platlib', 'headers', 'scripts', 'data')} - WheelFile(wheel).install(force=True, overrides=overrides) - return importable_path + def install_wheel(wheel): + return [resolved_dist.distribution.location + for resolved_dist in resolver.resolve(requirements=[wheel])] _distdir = None _checker_dist = None @@ -64,7 +64,7 @@ def install_wheel(wheel, root_dir): def setUpClass(cls): cls._distdir = safe_mkdtemp() cls._checker_dist = cls.build_checker_wheel(cls._distdir) - cls._checker_dist_importable_path = cls.install_wheel(cls._checker_dist, cls._distdir) + cls._checker_dist_importable_path = cls.install_wheel(cls._checker_dist) @classmethod def tearDownClass(cls): @@ -85,7 +85,7 @@ def resolve_configuration(self, resolve_local=False): self.set_options_for_scope(PythonSetup.options_scope, interpreter_constraints=[constraint]) prior = sys.path[:] - sys.path.append(self._checker_dist_importable_path) + sys.path.extend(self._checker_dist_importable_path) try: yield finally: diff --git a/pants.remote.toml b/pants.remote.toml index 560ee4cb444..f39c67a907b 100644 --- a/pants.remote.toml +++ b/pants.remote.toml @@ -36,12 +36,17 @@ process_execution_speculation_delay = 15 # TODO(#7735): This config is not ideal, that we must specify the PATH for both local and remote # platforms. This should be replaced by a proper mechanism to differentiate between the two. interpreter_search_paths = [ + # These are the interpreter paths we set up on the remote container, plus `/usr/bin`, so that + # pip can find `ld` if necessary. + "/pyenv-docker-build/versions/3.7.3/bin:/pyenv-docker-build/versions/3.6.8/bin:/pyenv-docker-build/versions/2.7.15/bin:/usr/bin", # We include the host PATH and PEXRC values so that speculation still works. - "", - "", - # This is the $PATH of the docker container, obtained by locally running `$ docker run --tag - # rbe-remote-execution sh -c 'echo $PATH'`. - "/pyenv-docker-build/versions/3.7.3/bin:/pyenv-docker-build/versions/3.6.8/bin:/pyenv-docker-build/versions/2.7.15/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/go/bin", + # NOTE: These come after the remote paths. Putting them before the remote paths means generic + # bin dirs like /usr/bin will be on the PATH ahead of the pyenv dirs we actually want to use + # on the remote side. The /pyenv-docker-build/ paths are unlikely to exist on local systems, + # and so will not interfere with interpreter discovery there. This emphasizes + # that we should fix #7735, and not commingle the paths of two unrelated systems. + '', + '', ] [python-native-code] diff --git a/pants.toml b/pants.toml index 40ec02ba6e6..ba68cab13d8 100644 --- a/pants.toml +++ b/pants.toml @@ -396,7 +396,7 @@ pytest_plugins.add = [ "pytest-rerunfailures", # TODO(#8651): We need this until we switch to implicit namespace packages so that pytest-cov # understands our __init__ files. NB: this version matches 3rdparty/python/requirements.txt. - "setuptools==40.6.3", + "setuptools==44.0.0", "ipdb", ] diff --git a/pants.travis-ci.toml b/pants.travis-ci.toml index 53925fda986..6bd56fa00a5 100644 --- a/pants.travis-ci.toml +++ b/pants.travis-ci.toml @@ -3,11 +3,16 @@ # Turn off all nailgun use. execution_strategy = "subprocess" +# If we use typical default process parallelism tied to core count, we see too many cores under +# travis and either get oomkilled from launching too many processes with too much total memory +# overhead or else just generally thrash the container and slow things down. +travis_parallelism = 4 + [compile.rsc] -# If we use the default of 1 worker per core, we see too many cores under travis -# and get oomkilled from launching too many workers with too much total memory -# overhead. -worker_count = 4 +worker_count = "%(travis_parallelism)s" + +[python-setup] +resolver_jobs = "%(travis_parallelism)s" [test.pytest] # NB: We set a maximum timeout of 9.8 minutes to fail before hitting Travis' 10 minute timeout (which diff --git a/src/python/pants/backend/awslambda/python/lambdex.py b/src/python/pants/backend/awslambda/python/lambdex.py index 787ddd47af2..055f2de72e5 100644 --- a/src/python/pants/backend/awslambda/python/lambdex.py +++ b/src/python/pants/backend/awslambda/python/lambdex.py @@ -9,5 +9,5 @@ class Lambdex(PythonToolBase): default_version = 'lambdex==0.1.3' # TODO(John Sirois): Remove when we can upgrade to a version of lambdex with # https://github.com/wickman/lambdex/issues/6 fixed. - default_extra_requirements = ['setuptools==40.8.0'] + default_extra_requirements = ['setuptools==44.0.0'] default_entry_point = 'lambdex.bin.lambdex' diff --git a/src/python/pants/backend/python/rules/download_pex_bin.py b/src/python/pants/backend/python/rules/download_pex_bin.py index 08726fbd0e3..adf5740b243 100644 --- a/src/python/pants/backend/python/rules/download_pex_bin.py +++ b/src/python/pants/backend/python/rules/download_pex_bin.py @@ -37,13 +37,14 @@ def directory_digest(self) -> Digest: class Factory(Script): options_scope = 'download-pex-bin' name = 'pex' - default_version = 'v1.6.12' + default_version = 'v2.1.4' + # Note: You can compute the digest and size using: + # curl -L $URL | tee >(wc -c) >(shasum -a 256) >/dev/null default_versions_and_digests = { PlatformConstraint.none: ToolForPlatform( - digest=Digest('ce64cb72cd23d2123dd48126af54ccf2b718d9ecb98c2ed3045ed1802e89e7e1', - 1842359), - version=ToolVersion('v1.6.12'), + digest=Digest('6c5ae1f6b9aa40c97bd26a154849044b49f4d698a6abb9ac58ce006bda9cbd4a', 2614246), + version=ToolVersion('v2.1.4'), ), } diff --git a/src/python/pants/backend/python/rules/pex.py b/src/python/pants/backend/python/rules/pex.py index 06d2b597286..d6869f4f132 100644 --- a/src/python/pants/backend/python/rules/pex.py +++ b/src/python/pants/backend/python/rules/pex.py @@ -98,10 +98,16 @@ async def create_pex( constraints.""" argv = ["--output-file", request.output_filename] + if python_setup.resolver_jobs: + argv.extend(["--jobs", python_setup.resolver_jobs]) if request.entry_point is not None: argv.extend(["--entry-point", request.entry_point]) argv.extend(request.interpreter_constraints.generate_pex_arg_list()) argv.extend(request.additional_args) + if python_setup.manylinux: + argv.extend(['--manylinux', python_setup.manylinux]) + else: + argv.append('--no-manylinux') source_dir_name = 'source_files' argv.append(f'--sources-directory={source_dir_name}') @@ -114,8 +120,8 @@ async def create_pex( merged_digest = await Get[Digest](DirectoriesToMerge(directories=all_inputs)) # NB: PEX outputs are platform dependent so in order to get a PEX that we can use locally, without - # cross-building, we specify that out PEX command be run on the current local platform. When we - # support cross-building through CLI flags we can configure requests that build a PEX for out + # cross-building, we specify that our PEX command be run on the current local platform. When we + # support cross-building through CLI flags we can configure requests that build a PEX for our # local platform that are able to execute on a different platform, but for now in order to # guarantee correct build we need to restrict this command to execute on the same platform type # that the output is intended for. The correct way to interpret the keys diff --git a/src/python/pants/backend/python/subsystems/pex_build_util.py b/src/python/pants/backend/python/subsystems/pex_build_util.py index e50ac3768a5..174a47362d1 100644 --- a/src/python/pants/backend/python/subsystems/pex_build_util.py +++ b/src/python/pants/backend/python/subsystems/pex_build_util.py @@ -25,14 +25,14 @@ def has_python_sources(tgt: Target) -> bool: return is_python_target(tgt) and tgt.has_sources() -def is_local_python_dist(tgt: Target) -> bool: - return isinstance(tgt, PythonDistribution) - - def has_resources(tgt: Target) -> bool: return isinstance(tgt, Files) and tgt.has_sources() +def is_local_python_dist(tgt: Target) -> bool: + return isinstance(tgt, PythonDistribution) + + def has_python_requirements(tgt: Target) -> bool: return isinstance(tgt, PythonRequirementLibrary) diff --git a/src/python/pants/backend/python/subsystems/python_native_code.py b/src/python/pants/backend/python/subsystems/python_native_code.py index c6e43b800e1..d830435f839 100644 --- a/src/python/pants/backend/python/subsystems/python_native_code.py +++ b/src/python/pants/backend/python/subsystems/python_native_code.py @@ -10,11 +10,9 @@ from pants.backend.native.subsystems.native_toolchain import NativeToolchain from pants.backend.native.targets.native_library import NativeLibrary from pants.backend.python.subsystems import pex_build_util -from pants.backend.python.subsystems.executable_pex_tool import ExecutablePexTool from pants.backend.python.targets.python_distribution import PythonDistribution from pants.base.exceptions import IncompatiblePlatformsError from pants.engine.rules import rule, subsystem_rule -from pants.python.python_requirement import PythonRequirement from pants.python.python_setup import PythonSetup from pants.subsystem.subsystem import Subsystem from pants.util.memo import memoized_property @@ -124,25 +122,6 @@ def check_build_for_current_platform_only(self, targets): )) -class BuildSetupRequiresPex(ExecutablePexTool): - options_scope = 'build-setup-requires-pex' - - @classmethod - def register_options(cls, register): - super().register_options(register) - register('--setuptools-version', advanced=True, fingerprint=True, default='40.6.3', - help='The setuptools version to use when executing `setup.py` scripts.') - register('--wheel-version', advanced=True, fingerprint=True, default='0.32.3', - help='The wheel version to use when executing `setup.py` scripts.') - - @property - def base_requirements(self): - return [ - PythonRequirement('setuptools=={}'.format(self.get_options().setuptools_version)), - PythonRequirement('wheel=={}'.format(self.get_options().wheel_version)), - ] - - @dataclass(frozen=True) class PexBuildEnvironment: cpp_flags: Tuple[str, ...] diff --git a/src/python/pants/backend/python/subsystems/python_setup.py b/src/python/pants/backend/python/subsystems/python_setup.py index d5352623280..c23b3d72f9a 100644 --- a/src/python/pants/backend/python/subsystems/python_setup.py +++ b/src/python/pants/backend/python/subsystems/python_setup.py @@ -2,7 +2,7 @@ # Licensed under the Apache License, Version 2.0 (see LICENSE). from pants.base.deprecated import deprecated_module -from pants.python.python_setup import PythonSetup +from pants.python.python_setup import PythonSetup as PythonSetup deprecated_module('1.27.0.dev0', 'Import from pants.python.python_setup instead.') diff --git a/src/python/pants/backend/python/targets/unpacked_whls.py b/src/python/pants/backend/python/targets/unpacked_whls.py index 960c25396bd..6bf55593994 100644 --- a/src/python/pants/backend/python/targets/unpacked_whls.py +++ b/src/python/pants/backend/python/targets/unpacked_whls.py @@ -6,6 +6,7 @@ from twitter.common.collections import maybe_list from pants.backend.python.targets.import_wheels_mixin import ImportWheelsMixin +from pants.base.deprecated import deprecated_conditional from pants.base.payload import Payload from pants.base.payload_field import PrimitiveField from pants.build_graph.target import Target @@ -48,15 +49,21 @@ def __init__(self, module_name, libraries=None, include_patterns=None, exclude_p :param compatibility: Python interpreter constraints used to create the pex for the requirement target. If unset, the default interpreter constraints are used. This argument is unnecessary unless the native code depends on libpython. - :param str within_data_subdir: If provided, descend into '-.data/' when - matching `include_patterns`. For python wheels which declare any - non-code data, this is usually needed to extract that without - manually specifying the relative path, including the package - version. For example, when `data_files` is used in a setup.py, - `within_data_subdir='data'` will allow specifying - `include_patterns` matching exactly what is specified in the - setup.py. + :param bool within_data_subdir: If True, descend into '-.data/' when matching + `include_patterns`. For python wheels which declare any non-code + data, this is usually needed to extract that without manually + specifying the relative path, including the package version. For + example, when `data_files` is used in a setup.py, + `within_data_subdir=True` will allow specifying + `include_patterns` matching exactly what is specified in the + setup.py. """ + deprecated_conditional( + lambda: type(within_data_subdir) not in (bool, type(None)), + removal_version='1.27.0.dev2', + entity_description='A non-boolean value for `within_data_subdir`', + hint_message='The location of the .data subdirectory will be inferred from the module name!', + ) payload = payload or Payload() payload.add_fields({ 'library_specs': PrimitiveField(libraries or ()), diff --git a/src/python/pants/backend/python/tasks/BUILD b/src/python/pants/backend/python/tasks/BUILD index 4fe76e5acc8..2d37d0bc48f 100644 --- a/src/python/pants/backend/python/tasks/BUILD +++ b/src/python/pants/backend/python/tasks/BUILD @@ -5,6 +5,7 @@ python_library( dependencies=[ '3rdparty/python:dataclasses', '3rdparty/python:pex', + '3rdparty/python:wheel', '3rdparty/python/twitter/commons:twitter.common.collections', '3rdparty/python/twitter/commons:twitter.common.dirutil', 'src/python/pants/backend/native/config', diff --git a/src/python/pants/backend/python/tasks/build_local_python_distributions.py b/src/python/pants/backend/python/tasks/build_local_python_distributions.py index aec66f2abf5..b744d34ea8f 100644 --- a/src/python/pants/backend/python/tasks/build_local_python_distributions.py +++ b/src/python/pants/backend/python/tasks/build_local_python_distributions.py @@ -5,26 +5,24 @@ import os import re import shutil +from pathlib import Path -from pex import pep425tags from pex.interpreter import PythonInterpreter +from wheel import pep425tags from pants.backend.native.targets.native_library import NativeLibrary from pants.backend.native.tasks.link_shared_libraries import SharedLibrary from pants.backend.python.subsystems.pex_build_util import is_local_python_dist -from pants.backend.python.subsystems.python_native_code import ( - BuildSetupRequiresPex, - PythonNativeCode, -) +from pants.backend.python.subsystems.python_native_code import PythonNativeCode from pants.backend.python.targets.python_requirement_library import PythonRequirementLibrary from pants.base.build_environment import get_buildroot from pants.base.exceptions import TargetDefinitionException, TaskError from pants.base.workunit import WorkUnitLabel from pants.build_graph.address import Address from pants.python.python_requirement import PythonRequirement +from pants.python.setup_py_runner import SetupPyRunner from pants.task.task import Task from pants.util.collections import assert_single_element -from pants.util.contextutil import pushd from pants.util.dirutil import safe_mkdir_for, split_basename_and_dirname from pants.util.memo import memoized_property from pants.util.strutil import safe_shlex_join @@ -67,7 +65,7 @@ def implementation_version(cls): @classmethod def subsystem_dependencies(cls): return super().subsystem_dependencies() + ( - BuildSetupRequiresPex.scoped(cls), + SetupPyRunner.Factory.scoped(cls), PythonNativeCode.scoped(cls), ) @@ -77,9 +75,13 @@ class BuildLocalPythonDistributionsError(TaskError): pass def _python_native_code_settings(self): return PythonNativeCode.scoped_instance(self) - @memoized_property - def _build_setup_requires_pex_settings(self): - return BuildSetupRequiresPex.scoped_instance(self) + def _build_setup_py_runner(self, extra_reqs=None, interpreter=None, pex_file_path=None): + return SetupPyRunner.Factory.create( + scope=self, + extra_reqs=extra_reqs, + interpreter=interpreter, + pex_file_path=pex_file_path + ) # TODO: This should probably be made into an @classproperty (see PR #5901). @property @@ -209,24 +211,23 @@ def _prepare_and_create_dist(self, interpreter, shared_libs_product, versioned_t 'Installing setup requirements: {}\n\n' .format([req.key for req in setup_reqs_to_resolve])) - setup_reqs_pex_path = os.path.join( + pex_file_path = os.path.join( setup_requires_dir, - f'setup-requires-{versioned_target_fingerprint}.pex') - setup_requires_pex = self._build_setup_requires_pex_settings.bootstrap( - interpreter, setup_reqs_pex_path, extra_reqs=setup_reqs_to_resolve) - self.context.log.debug('Using pex file as setup.py interpreter: {}' - .format(setup_requires_pex.path())) + f'setup-py-runner-{versioned_target_fingerprint}.pex') + setup_py_runner = self._build_setup_py_runner( + interpreter=interpreter, extra_reqs=setup_reqs_to_resolve, pex_file_path=pex_file_path) + self.context.log.debug(f'Using pex file as setup.py interpreter: {setup_py_runner}') self._create_dist( dist_target, dist_output_dir, - setup_requires_pex, + setup_py_runner, versioned_target_fingerprint, is_platform_specific) # NB: "snapshot" refers to a "snapshot release", not a Snapshot. def _generate_snapshot_bdist_wheel_argv(self, snapshot_fingerprint, is_platform_specific): - """Create a command line to pass to :class:`SetupPyRunner`. + """Create a command line to generate a wheel via `setup.py`. Note that distutils will convert `snapshot_fingerprint` into a string suitable for a version tag. Currently for versioned target fingerprints, this seems to convert all punctuation into @@ -244,8 +245,7 @@ def _generate_snapshot_bdist_wheel_argv(self, snapshot_fingerprint, is_platform_ dist_dir_args = ['--dist-dir', self._DIST_OUTPUT_DIR] - return (['setup.py'] + - egg_info_snapshot_tag_args + + return (egg_info_snapshot_tag_args + bdist_whl_args + platform_args + dist_dir_args) @@ -253,7 +253,7 @@ def _generate_snapshot_bdist_wheel_argv(self, snapshot_fingerprint, is_platform_ def _create_dist(self, dist_tgt, dist_target_dir, - setup_requires_pex, + setup_py_runner, snapshot_fingerprint, is_platform_specific): """Create a .whl file for the specified python_distribution target.""" @@ -262,24 +262,17 @@ def _create_dist(self, setup_py_snapshot_version_argv = self._generate_snapshot_bdist_wheel_argv( snapshot_fingerprint, is_platform_specific) - cmd = safe_shlex_join(setup_requires_pex.cmdline(setup_py_snapshot_version_argv)) + cmd = safe_shlex_join(setup_py_runner.cmdline(setup_py_snapshot_version_argv)) with self.context.new_workunit('setup.py', cmd=cmd, labels=[WorkUnitLabel.TOOL]) as workunit: - with pushd(dist_target_dir): - result = setup_requires_pex.run(args=setup_py_snapshot_version_argv, - stdout=workunit.output('stdout'), - stderr=workunit.output('stderr')) - if result != 0: - raise self.BuildLocalPythonDistributionsError( - "Installation of python distribution from target {target} into directory {into_dir} " - "failed (return value of run() was: {rc!r}).\n" - "The pex with any requirements is located at: {interpreter}.\n" - "The host system's compiler and linker were used.\n" - "The setup command was: {command}." - .format(target=dist_tgt, - into_dir=dist_target_dir, - rc=result, - interpreter=setup_requires_pex.path(), - command=setup_py_snapshot_version_argv)) + try: + setup_py_runner.run_setup_command(source_dir=Path(dist_target_dir), + setup_command=setup_py_snapshot_version_argv, + stdout=workunit.output('stdout'), + stderr=workunit.output('stderr')) + except SetupPyRunner.CommandFailure as e: + raise self.BuildLocalPythonDistributionsError( + f"Installation of python distribution from target {dist_tgt} into directory " + f"{dist_target_dir} failed using the host system's compiler and linker: {e}") # TODO: convert this into a SimpleCodegenTask, which does the exact same thing as this method! def _inject_synthetic_dist_requirements(self, dist, req_lib_addr): diff --git a/src/python/pants/backend/python/tasks/select_interpreter.py b/src/python/pants/backend/python/tasks/select_interpreter.py index 9e87a29c01e..9f9612c9017 100644 --- a/src/python/pants/backend/python/tasks/select_interpreter.py +++ b/src/python/pants/backend/python/tasks/select_interpreter.py @@ -4,7 +4,6 @@ import hashlib import os -from pex.executor import Executor from pex.interpreter import PythonInterpreter from pants.backend.python.interpreter_cache import PythonInterpreterCache @@ -104,9 +103,7 @@ def _get_interpreter(self, interpreter_path_file, targets): binary = infile.read().strip() try: return PythonInterpreter.from_binary(binary) - except Executor.ExecutableNotFound: - # TODO(John Sirois): Trap a more appropriate exception once available: - # https://github.com/pantsbuild/pex/issues/672 + except PythonInterpreter.Error: self.context.log.info('Stale interpreter reference detected: {}, removing reference and ' 'selecting a new interpreter.'.format(binary)) os.remove(interpreter_path_file) diff --git a/src/python/pants/backend/python/tasks/setup_py.py b/src/python/pants/backend/python/tasks/setup_py.py index 4630ee4b536..034713ead48 100644 --- a/src/python/pants/backend/python/tasks/setup_py.py +++ b/src/python/pants/backend/python/tasks/setup_py.py @@ -9,8 +9,9 @@ import textwrap from abc import ABC, abstractmethod from collections import OrderedDict, defaultdict +from pathlib import Path +from typing import Dict -from pex.installer import Packager, WheelInstaller from pex.interpreter import PythonInterpreter from pex.pex import PEX from pex.pex_builder import PEXBuilder @@ -30,6 +31,7 @@ from pants.build_graph.address_lookup_error import AddressLookupError from pants.build_graph.build_graph import sort_targets from pants.build_graph.resources import Resources +from pants.python.setup_py_runner import SetupPyRunner from pants.task.task import Task from pants.util.contextutil import temporary_file from pants.util.dirutil import safe_concurrent_creation, safe_rmtree, safe_walk @@ -47,17 +49,6 @@ """ -class SetupPyRunner(WheelInstaller): - # We extend WheelInstaller to make sure `setuptools` and `wheel` are available to setup.py. - - def __init__(self, source_dir, setup_command, **kw): - self._setup_command = setup_command - super().__init__(source_dir, **kw) - - def setup_command(self): - return self._setup_command - - class TargetAncestorIterator: """Supports iteration of target ancestor lineages.""" @@ -345,6 +336,10 @@ def dependencies(self, target): for binary in target.provided_binaries.values(): yield binary + @classmethod + def subsystem_dependencies(cls): + return super().subsystem_dependencies() + (SetupPyRunner.Factory.scoped(cls),) + @classmethod def prepare(cls, options, round_manager): round_manager.require_data(PythonInterpreter) @@ -613,14 +608,14 @@ def is_exported_python_target(t): # phase to ensure an exported target is, for example (--run="sdist upload"), uploaded before any # exported target that depends on it is uploaded. - created = {} + created: Dict[PythonTarget, Path] = {} def create(exported_python_target): if exported_python_target not in created: self.context.log.info('Creating setup.py project for {}'.format(exported_python_target)) subject = self.derived_by_original.get(exported_python_target, exported_python_target) setup_dir, dependencies = self.create_setup_py(subject, dist_dir) - created[exported_python_target] = setup_dir + created[exported_python_target] = Path(setup_dir) if self._recursive: for dep in dependencies: if is_exported_python_target(dep): @@ -631,23 +626,29 @@ def create(exported_python_target): interpreter = self.context.products.get_data(PythonInterpreter) python_dists = self.context.products.register_data(self.PYTHON_DISTS_PRODUCT, {}) + + setup_runner = SetupPyRunner.Factory.create( + scope=self, + interpreter=interpreter, + pex_file_path=os.path.join(self.workdir, self.fingerprint, 'setup-py-runner.pex') + ) for exported_python_target in reversed(sort_targets(list(created.keys()))): setup_dir = created.get(exported_python_target) if setup_dir: if not self._run: - self.context.log.info('Running packager against {}'.format(setup_dir)) - setup_runner = Packager(setup_dir, interpreter=interpreter) - tgz_name = os.path.basename(setup_runner.sdist()) + self.context.log.info('Running sdist against {}'.format(setup_dir)) + sdist = setup_runner.sdist(setup_dir) + tgz_name = sdist.name sdist_path = os.path.join(dist_dir, tgz_name) self.context.log.info('Writing {}'.format(sdist_path)) - shutil.move(setup_runner.sdist(), sdist_path) - safe_rmtree(setup_dir) + shutil.move(sdist, sdist_path) + safe_rmtree(str(setup_dir)) python_dists[exported_python_target] = sdist_path else: self.context.log.info('Running {} against {}'.format(self._run, setup_dir)) split_command = safe_shlex_split(self._run) - setup_runner = SetupPyRunner(setup_dir, split_command, interpreter=interpreter) - installed = setup_runner.run() - if not installed: - raise TaskError('Install failed.') + try: + setup_runner.run_setup_command(source_dir=setup_dir, setup_command=split_command) + except SetupPyRunner.CommandFailure as e: + raise TaskError(f'Install failed: {e}') python_dists[exported_python_target] = setup_dir diff --git a/src/python/pants/backend/python/tasks/unpack_wheels.py b/src/python/pants/backend/python/tasks/unpack_wheels.py index 15b465e5333..ee26eec48a7 100644 --- a/src/python/pants/backend/python/tasks/unpack_wheels.py +++ b/src/python/pants/backend/python/tasks/unpack_wheels.py @@ -10,7 +10,6 @@ from pants.backend.python.targets.unpacked_whls import UnpackedWheels from pants.base.exceptions import TaskError from pants.base.fingerprint_strategy import DefaultFingerprintHashingMixin, FingerprintStrategy -from pants.fs.archive import ZIP from pants.python.pex_build_util import PexBuilderWrapper from pants.python.python_setup import PythonSetup from pants.task.unpack_remote_sources_base import UnpackRemoteSourcesBase @@ -50,16 +49,30 @@ def subsystem_dependencies(cls): PythonSetup, ) - class _NativeCodeExtractionSetupFailure(Exception): pass + class SingleDistExtractionError(Exception): pass def _get_matching_wheel(self, pex_path, interpreter, requirements, module_name): - """Use PexBuilderWrapper to resolve a single wheel from the requirement specs using pex.""" + """Use PexBuilderWrapper to resolve a single wheel from the requirement specs using pex. + + N.B.: The resolved wheel is already "unpacked" by PEX. More accurately, it's installed in a + chroot. + """ with self.context.new_workunit('extract-native-wheels'): with safe_concurrent_creation(pex_path) as chroot: pex_builder = PexBuilderWrapper.Factory.create( builder=PEXBuilder(path=chroot, interpreter=interpreter), log=self.context.log) - return pex_builder.extract_single_dist_for_current_platform(requirements, module_name) + + resolved_dists = pex_builder.resolve_distributions(requirements, platforms=['current']) + + matched_dists = [resolved_dist.distribution for resolved_dist in resolved_dists + if resolved_dist.distribution.key == module_name] + if len(matched_dists) != 1: + raise self.SingleDistExtractionError( + f"Exactly one dist was expected to match name {module_name} in requirements " + f"{requirements}, found {matched_dists}" + ) + return matched_dists[0] @memoized_method def _compatible_interpreter(self, unpacked_whls): @@ -72,22 +85,18 @@ class WheelUnpackingError(TaskError): pass def unpack_target(self, unpacked_whls, unpack_dir): interpreter = self._compatible_interpreter(unpacked_whls) - with temporary_dir() as resolve_dir,\ - temporary_dir() as extract_dir: + with temporary_dir() as resolve_dir: try: matched_dist = self._get_matching_wheel(resolve_dir, interpreter, unpacked_whls.all_imported_requirements, unpacked_whls.module_name) - ZIP.extract(matched_dist.location, extract_dir) + wheel_chroot = matched_dist.location if unpacked_whls.within_data_subdir: - data_dir_prefix = '{name}-{version}.data/{subdir}'.format( - name=matched_dist.project_name, - version=matched_dist.version, - subdir=unpacked_whls.within_data_subdir, - ) - dist_data_dir = os.path.join(extract_dir, data_dir_prefix) + # N.B.: Wheels with data dirs have the data installed under the top module. + dist_data_dir = os.path.join(wheel_chroot, unpacked_whls.module_name) else: - dist_data_dir = extract_dir + dist_data_dir = wheel_chroot + unpack_filter = self.get_unpack_filter(unpacked_whls) # Copy over the module's data files into `unpack_dir`. mergetree(dist_data_dir, unpack_dir, file_filter=unpack_filter) diff --git a/src/python/pants/init/BUILD b/src/python/pants/init/BUILD index 56b6508b65d..22db835a2a8 100644 --- a/src/python/pants/init/BUILD +++ b/src/python/pants/init/BUILD @@ -7,7 +7,6 @@ python_library( '3rdparty/python:dataclasses', '3rdparty/python:pex', '3rdparty/python:setuptools', - '3rdparty/python:wheel', '3rdparty/python/twitter/commons:twitter.common.collections', 'src/python/pants/base:build_environment', 'src/python/pants/base:build_root', diff --git a/src/python/pants/init/plugin_resolver.py b/src/python/pants/init/plugin_resolver.py index b49179d4d11..f2cd7f5e768 100644 --- a/src/python/pants/init/plugin_resolver.py +++ b/src/python/pants/init/plugin_resolver.py @@ -10,12 +10,10 @@ from pex import resolver from pex.interpreter import PythonInterpreter from pkg_resources import working_set as global_working_set -from wheel.install import WheelFile from pants.option.global_options import GlobalOptionsRegistrar from pants.python.python_repos import PythonRepos -from pants.util.contextutil import temporary_dir -from pants.util.dirutil import safe_delete, safe_mkdir, safe_open +from pants.util.dirutil import safe_delete, safe_open from pants.util.memo import memoized_property from pants.util.strutil import ensure_text from pants.version import PANTS_SEMVER @@ -29,29 +27,6 @@ class PluginResolver: def _is_wheel(path): return os.path.isfile(path) and path.endswith('.whl') - @classmethod - def _activate_wheel(cls, wheel_path): - install_dir = '{}-install'.format(wheel_path) - if not os.path.isdir(install_dir): - with temporary_dir(root_dir=os.path.dirname(install_dir)) as tmp: - cls._install_wheel(wheel_path, tmp) - os.rename(tmp, install_dir) - # Activate any .pth files installed above. - site.addsitedir(install_dir) - return install_dir - - @classmethod - def _install_wheel(cls, wheel_path, install_dir): - safe_mkdir(install_dir, clean=True) - WheelFile(wheel_path).install(force=True, - overrides={ - 'purelib': install_dir, - 'headers': os.path.join(install_dir, 'headers'), - 'scripts': os.path.join(install_dir, 'bin'), - 'platlib': install_dir, - 'data': install_dir - }) - def __init__(self, options_bootstrapper, *, interpreter=None): self._options_bootstrapper = options_bootstrapper self._interpreter = interpreter or PythonInterpreter.get() @@ -71,10 +46,9 @@ def resolve(self, working_set=None): """ working_set = working_set or global_working_set if self._plugin_requirements: - for plugin_location in self._resolve_plugin_locations(): - if self._is_wheel(plugin_location): - plugin_location = self._activate_wheel(plugin_location) - working_set.add_entry(plugin_location) + for resolved_plugin_location in self._resolve_plugin_locations(): + site.addsitedir(resolved_plugin_location) # Activate any .pth files plugin wheels may have. + working_set.add_entry(resolved_plugin_location) return working_set def _resolve_plugin_locations(self): @@ -106,16 +80,11 @@ def _resolve_plugin_locations(self): def _resolve_plugins(self): logger.info('Resolving new plugins...:\n {}'.format('\n '.join(self._plugin_requirements))) resolved_dists = resolver.resolve(self._plugin_requirements, - fetchers=self._python_repos.get_fetchers(), + indexes=self._python_repos.indexes, + find_links=self._python_repos.repos, interpreter=self._interpreter, - context=self._python_repos.get_network_context(), cache=self.plugin_cache_dir, - # Effectively never expire. - cache_ttl=10 * 365 * 24 * 60 * 60, - allow_prereleases=PANTS_SEMVER.is_prerelease, - # Plugins will all depend on `pantsbuild.pants` which is - # distributed as a manylinux wheel. - use_manylinux=True) + allow_prereleases=PANTS_SEMVER.is_prerelease) return [resolved_dist.distribution for resolved_dist in resolved_dists] @property diff --git a/src/python/pants/python/BUILD b/src/python/pants/python/BUILD index 4b914b63913..82b8ce7f4a0 100644 --- a/src/python/pants/python/BUILD +++ b/src/python/pants/python/BUILD @@ -13,6 +13,7 @@ python_library( 'src/python/pants/subsystem', 'src/python/pants/util:collections', 'src/python/pants/util:contextutil', + 'src/python/pants/util:dirutil', 'src/python/pants/util:memo', ] ) diff --git a/src/python/pants/python/pex_build_util.py b/src/python/pants/python/pex_build_util.py index f15b52fec37..f4b4a2aaac4 100644 --- a/src/python/pants/python/pex_build_util.py +++ b/src/python/pants/python/pex_build_util.py @@ -6,9 +6,8 @@ from pathlib import Path from typing import Callable, Sequence, Set -from pex.fetcher import Fetcher from pex.pex_builder import PEXBuilder -from pex.resolver import resolve +from pex.resolver import resolve_multi from pex.util import DistributionHelper from twitter.common.collections import OrderedSet @@ -20,7 +19,6 @@ from pants.python.python_requirement import PythonRequirement from pants.python.python_setup import PythonSetup from pants.subsystem.subsystem import Subsystem -from pants.util.collections import assert_single_element from pants.util.contextutil import temporary_file @@ -78,7 +76,7 @@ class Factory(Subsystem): @classmethod def register_options(cls, register): super(PexBuilderWrapper.Factory, cls).register_options(register) - register('--setuptools-version', advanced=True, default='40.6.3', + register('--setuptools-version', advanced=True, default='44.0.0', help='The setuptools version to include in the pex if namespace packages need to be ' 'injected.') @@ -133,99 +131,56 @@ def add_requirement_libs_from(self, req_libs, platforms=None): reqs = [req for req_lib in req_libs for req in req_lib.requirements] self.add_resolved_requirements(reqs, platforms=platforms) - class SingleDistExtractionError(Exception): pass + def resolve_distributions(self, reqs, platforms=None): + """Multi-platform dependency resolution. - def extract_single_dist_for_current_platform(self, reqs, dist_key): - """Resolve a specific distribution from a set of requirements matching the current platform. - - :param list reqs: A list of :class:`PythonRequirement` to resolve. - :param str dist_key: The value of `distribution.key` to match for a `distribution` from the - resolved requirements. - :return: The single :class:`pkg_resources.Distribution` matching `dist_key`. - :raises: :class:`self.SingleDistExtractionError` if no dists or multiple dists matched the given - `dist_key`. + :param reqs: A list of :class:`PythonRequirement` to resolve. + :param platforms: A list of platform strings to resolve requirements for. + Defaults to the platforms specified by PythonSetup. + :returns: List of :class:`pex.resolver.ResolvedDistribution` instances meeting requirements for + the given platforms. """ - distributions = self._resolve_distributions_by_platform(reqs, platforms=['current']) - try: - matched_dist = assert_single_element(list( - dist - for _, dists in distributions.items() - for dist in dists - if dist.key == dist_key - )) - except (StopIteration, ValueError) as e: - raise self.SingleDistExtractionError( - f"Exactly one dist was expected to match name {dist_key} in requirements {reqs}: {e!r}" - ) - return matched_dist - - def _resolve_distributions_by_platform(self, reqs, platforms): deduped_reqs = OrderedSet(reqs) find_links = OrderedSet() for req in deduped_reqs: - self._log.debug(f' Dumping requirement: {req}') - self._builder.add_requirement(str(req.requirement)) if req.repository: find_links.add(req.repository) - # Resolve the requirements into distributions. - distributions = self._resolve_multi(self._builder.interpreter, deduped_reqs, platforms, - find_links) - return distributions + return self._resolve_multi(deduped_reqs, platforms=platforms, find_links=find_links) def add_resolved_requirements(self, reqs, platforms=None): """Multi-platform dependency resolution for PEX files. - :param reqs: A list of :class:`PythonRequirement` to resolve. - :param platforms: A list of :class:`Platform`s to resolve requirements for. + :param reqs: A list of :class:`PythonRequirement`s to resolve. + :param platforms: A list of platform strings to resolve requirements for. Defaults to the platforms specified by PythonSetup. """ - distributions = self._resolve_distributions_by_platform(reqs, platforms=platforms) - locations = set() - for platform, dists in distributions.items(): - for dist in dists: - if dist.location not in locations: - self._log.debug(f' Dumping distribution: .../{os.path.basename(dist.location)}') - self.add_distribution(dist) - locations.add(dist.location) - - def _resolve_multi(self, interpreter, requirements, platforms, find_links): - """Multi-platform dependency resolution for PEX files. + for resolved_dist in self.resolve_distributions(reqs, platforms=platforms): + requirement = resolved_dist.requirement + self._log.debug(f' Dumping requirement: {requirement}') + self._builder.add_requirement(str(requirement)) - Returns a list of distributions that must be included in order to satisfy a set of requirements. - That may involve distributions for multiple platforms. + distribution = resolved_dist.distribution + self._log.debug(f' Dumping distribution: .../{os.path.basename(distribution.location)}') + self.add_distribution(distribution) - :param interpreter: The :class:`PythonInterpreter` to resolve for. - :param requirements: A list of :class:`PythonRequirement` objects to resolve. - :param platforms: A list of :class:`Platform`s to resolve for. - :param find_links: Additional paths to search for source packages during resolution. - :return: Map of platform name -> list of :class:`pkg_resources.Distribution` instances needed - to satisfy the requirements on that platform. - """ + def _resolve_multi(self, requirements, platforms=None, find_links=None): python_setup = self._python_setup_subsystem python_repos = self._python_repos_subsystem platforms = platforms or python_setup.platforms - find_links = find_links or [] - distributions = {} - fetchers = python_repos.get_fetchers() - fetchers.extend(Fetcher([path]) for path in find_links) - - for platform in platforms: - requirements_cache_dir = os.path.join(python_setup.resolver_cache_dir, - str(interpreter.identity)) - resolved_dists = resolve( - requirements=[str(req.requirement) for req in requirements], - interpreter=interpreter, - fetchers=fetchers, - platform=platform, - context=python_repos.get_network_context(), - cache=requirements_cache_dir, - cache_ttl=python_setup.resolver_cache_ttl, - allow_prereleases=python_setup.resolver_allow_prereleases, - use_manylinux=python_setup.use_manylinux) - distributions[platform] = [resolved_dist.distribution for resolved_dist in resolved_dists] - - return distributions + find_links = list(find_links) if find_links else [] + find_links.extend(python_repos.repos) + + return resolve_multi( + requirements=[str(req.requirement) for req in requirements], + interpreters=[self._builder.interpreter], + indexes=python_repos.indexes, + find_links=find_links, + platforms=platforms, + cache=python_setup.resolver_cache_dir, + allow_prereleases=python_setup.resolver_allow_prereleases, + manylinux=python_setup.manylinux, + max_parallel_jobs=python_setup.resolver_jobs) def add_sources_from(self, tgt: Target) -> None: dump_source = _create_source_dumper(self._builder, tgt) @@ -238,7 +193,7 @@ def add_sources_from(self, tgt: Target) -> None: raise if (getattr(tgt, '_resource_target_specs', None) or - getattr(tgt, '_synthetic_resources_target', None)): + getattr(tgt, '_synthetic_resources_target', None)): # No one should be on old-style resources any more. And if they are, # switching to the new python pipeline will be a great opportunity to fix that. raise TaskError( diff --git a/src/python/pants/python/python_repos.py b/src/python/pants/python/python_repos.py index d058f92ce23..e49db028b96 100644 --- a/src/python/pants/python/python_repos.py +++ b/src/python/pants/python/python_repos.py @@ -1,58 +1,7 @@ # Copyright 2014 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). -import logging - -from pex.fetcher import Fetcher, PyPIFetcher -from pex.http import RequestsContext, StreamFilelike, requests - from pants.subsystem.subsystem import Subsystem -from pants.util.memo import memoized_method - - -logger = logging.getLogger(__name__) - - -# TODO: These methods of RequestsContext are monkey-patched out to work around -# https://github.com/pantsbuild/pex/issues/26: we should upstream a fix for this. -_REQUESTS_TIMEOUTS = (15, 30) - - -def _open_monkey(self, link): - # requests does not support file:// -- so we must short-circuit manually - if link.local: - return open(link.local_path, 'rb') # noqa: T802 - for attempt in range(self._max_retries + 1): - try: - return StreamFilelike(self._session.get( - link.url, verify=self._verify, stream=True, headers={'User-Agent': self.USER_AGENT}, - timeout=_REQUESTS_TIMEOUTS), - link) - except requests.exceptions.ReadTimeout: - # Connect timeouts are handled by the HTTPAdapter, unfortunately read timeouts are not - # so we'll retry them ourselves. - logger.warning(f'Read timeout trying to fetch {link.url}, retrying. ' - f'{self._max_retries - attempt} retries remain.') - except requests.exceptions.RequestException as e: - raise self.Error(e) - - raise self.Error( - requests.packages.urllib3.exceptions.MaxRetryError( - None, - link, - 'Exceeded max retries of %d' % self._max_retries)) - - -def _resolve_monkey(self, link): - return link.wrap(self._session.head( - link.url, verify=self._verify, allow_redirects=True, - headers={'User-Agent': self.USER_AGENT}, - timeout=_REQUESTS_TIMEOUTS, - ).url) - - -RequestsContext.open = _open_monkey -RequestsContext.resolve = _resolve_monkey class PythonRepos(Subsystem): @@ -78,15 +27,3 @@ def repos(self): @property def indexes(self): return self.get_options().indexes - - @memoized_method - def get_fetchers(self): - fetchers = [] - fetchers.extend(Fetcher([url]) for url in self.repos) - fetchers.extend(PyPIFetcher(url) for url in self.indexes) - return fetchers - - @memoized_method - def get_network_context(self): - # TODO(wickman): Add retry, conn_timeout, threads, etc configuration here. - return RequestsContext() diff --git a/src/python/pants/python/python_setup.py b/src/python/pants/python/python_setup.py index 8654fb6d3e8..bff2a17f83e 100644 --- a/src/python/pants/python/python_setup.py +++ b/src/python/pants/python/python_setup.py @@ -20,6 +20,8 @@ class PythonSetup(Subsystem): """A python environment.""" options_scope = 'python-setup' + _DEFAULT_MANYLINUX_UPPER_BOUND = 'manylinux2014' + @classmethod def register_options(cls, register): super().register_options(register) @@ -44,10 +46,6 @@ def register_options(cls, register): register('--resolver-cache-dir', advanced=True, default=None, metavar='', help='The parent directory for the requirement resolver cache. ' 'If unspecified, a standard path under the workdir is used.') - register('--resolver-cache-ttl', advanced=True, type=int, metavar='', - default=10 * 365 * 86400, # 10 years. - help='The time in seconds before we consider re-resolving an open-ended requirement, ' - 'e.g. "flask>=0.2" if a matching distribution is available on disk.') register('--resolver-allow-prereleases', advanced=True, type=bool, default=UnsetBool, fingerprint=True, help='Whether to include pre-releases when resolving requirements.') register('--artifact-cache-dir', advanced=True, default=None, metavar='', @@ -60,9 +58,18 @@ def register_options(cls, register): '"" (the contents of the PATH env var), ' '"" (paths in the PEX_PYTHON_PATH variable in a pexrc file), ' '"" (all python versions under $(pyenv root)/versions).') - register('--resolver-use-manylinux', advanced=True, type=bool, default=True, fingerprint=True, - help='Whether to consider manylinux wheels when resolving requirements for linux ' - 'platforms.') + register('--resolver-use-manylinux', advanced=True, type=bool, default=False, fingerprint=True, + removal_version='1.27.0.dev2', + removal_hint='Use --resolver-manylinux= instead.', + help='Whether to consider manylinux wheels when resolving requirements for foreign' + 'linux platforms.') + register('--resolver-manylinux', advanced=True, type=str, + default=cls._DEFAULT_MANYLINUX_UPPER_BOUND, fingerprint=True, + help='Whether to allow resolution of manylinux wheels when resolving requirements for ' + 'foreign linux platforms. The value should be a manylinux platform upper bound, ' + 'e.g.: manylinux2010, or else [Ff]alse, [Nn]o or [Nn]one to disallow.') + register('--resolver-jobs', type=int, default=None, advanced=True, fingerprint=True, + help='The maximum number of concurrent jobs to resolve wheels with.') @property def interpreter_constraints(self) -> Tuple[str, ...]: @@ -91,17 +98,29 @@ def resolver_cache_dir(self): return (self.get_options().resolver_cache_dir or os.path.join(self.scratch_dir, 'resolved_requirements')) - @property - def resolver_cache_ttl(self): - return self.get_options().resolver_cache_ttl - @property def resolver_allow_prereleases(self): return self.get_options().resolver_allow_prereleases @property - def use_manylinux(self): - return self.get_options().resolver_use_manylinux + def manylinux(self): + if self.get_options().resolver_manylinux: + manylinux = self.get_options().resolver_manylinux + if manylinux.lower() in ('false', 'no', 'none'): + if self.get_options().resolver_use_manylinux: + logger.warning('The [{scope}] manylinux option is explicitly set to {manylinux} ' + 'over-riding the [{scope}] use_manylinux option.'. + format(scope=self.options_scope, manylinux=manylinux)) + return None + return manylinux + elif self.get_options().resolver_use_manylinux: + return self._DEFAULT_MANYLINUX_UPPER_BOUND + else: + return None + + @property + def resolver_jobs(self): + return self.get_options().resolver_jobs @property def artifact_cache_dir(self): diff --git a/src/python/pants/python/setup_py_runner.py b/src/python/pants/python/setup_py_runner.py new file mode 100644 index 00000000000..36efdeadf32 --- /dev/null +++ b/src/python/pants/python/setup_py_runner.py @@ -0,0 +1,141 @@ +# Copyright 2019 Pants project contributors (see CONTRIBUTORS.md). +# Licensed under the Apache License, Version 2.0 (see LICENSE). + +import os +from pathlib import Path +from typing import Callable, Iterable, List, Optional + +from pex.interpreter import PythonInterpreter +from pex.pex import PEX +from pex.pex_info import PexInfo + +from pants.backend.python.subsystems.executable_pex_tool import ExecutablePexTool +from pants.option.optionable import Optionable +from pants.python.python_requirement import PythonRequirement +from pants.util.contextutil import pushd +from pants.util.dirutil import safe_mkdtemp +from pants.util.memo import memoized_method +from pants.util.strutil import safe_shlex_join + + +class SetupPyRunner: + """A utility capable of executing setup.py commands in a hermetic environment. + + Supports `setuptools` and `wheel` distutils commands by default. + """ + + class Factory(ExecutablePexTool): + options_scope = 'setup-py-runner' + deprecated_options_scope = 'build-setup-requires-pex' + deprecated_options_scope_removal_version = '1.27.0.dev2' + + @classmethod + def register_options(cls, register: Callable[..., None]) -> None: + super().register_options(register) + register('--setuptools-version', advanced=True, fingerprint=True, default='44.0.0', + help='The setuptools version to use when executing `setup.py` scripts.') + register('--wheel-version', advanced=True, fingerprint=True, default='0.33.6', + help='The wheel version to use when executing `setup.py` scripts.') + + @classmethod + def create( + cls, + *, + pex_file_path: Optional[Path] = None, + extra_reqs: Optional[List[PythonRequirement]] = None, + interpreter: Optional[PythonInterpreter] = None, + scope: Optional[Optionable] = None, + ) -> 'SetupPyRunner': + factory = cls.scoped_instance(scope) if scope is not None else cls.global_instance() + requirements_pex = factory.bootstrap( + interpreter=interpreter, + pex_file_path=pex_file_path or os.path.join(safe_mkdtemp(), 'setup-py-runner.pex'), + extra_reqs=extra_reqs + ) + return SetupPyRunner(requirements_pex=requirements_pex) + + @property + def base_requirements(self): + return [ + PythonRequirement(f'setuptools=={self.get_options().setuptools_version}'), + PythonRequirement(f'wheel=={self.get_options().wheel_version}'), + ] + + class CommandFailure(Exception): + """Indicates an error executing setup.py commands.""" + + def __init__(self, requirements_pex: PEX) -> None: + self._requirements_pex = requirements_pex + + @memoized_method + def __str__(self) -> str: + pex_path = self._requirements_pex.path() + pex_info = PexInfo.from_pex(pex_path) + requirements = "\n ".join(map(str, pex_info.requirements)) + return f'{type(self).__name__} at {pex_path} with requirements:\n {requirements} ' + + def _create_python_args(self, setup_command: Iterable[str]) -> Iterable[str]: + args = ['setup.py', '--no-user-cfg'] + args.extend(setup_command) + return args + + def cmdline(self, setup_command: Iterable[str]) -> Iterable[str]: + """Returns the command line that would be used to execute the given setup.py command.""" + args = self._create_python_args(setup_command) + cmdline: List[str] = self._requirements_pex.cmdline(args) + return cmdline + + def run_setup_command(self, *, source_dir: Path, setup_command: Iterable[str], **kwargs) -> None: + """Runs the given setup.py command against the setup.py project in `source_dir`. + + :raises: :class:`SetupPyRunner.CommandFailure` if there was a problem executing the command. + """ + with pushd(str(source_dir)): + result = self._requirements_pex.run(args=self._create_python_args(setup_command), **kwargs) + if result != 0: + pex_command = safe_shlex_join(self.cmdline(setup_command)) + raise self.CommandFailure(f'Failed to execute {pex_command} using {self}') + + def _collect_distribution( + self, + source_dir: Path, + setup_command: Iterable[str], + dist_dir: Path + ) -> Path: + + assert source_dir.is_dir() + self._source_dir = source_dir + + self.run_setup_command( + source_dir=source_dir, + setup_command=setup_command + ) + + dists = os.listdir(dist_dir) + if len(dists) == 0: + raise self.CommandFailure('No distribution was produced!') + if len(dists) > 1: + ambiguous_dists = "\n ".join(dists) + raise self.CommandFailure(f'Ambiguous distributions found:\n {ambiguous_dists}') + + return dist_dir.joinpath(dists[0]) + + @memoized_method + def sdist(self, source_dir: Path) -> Path: + """Generates an sdist from the setup.py project at `source_dir` and returns the sdist path.""" + dist_dir = safe_mkdtemp() + return self._collect_distribution( + source_dir=source_dir, + setup_command=['sdist', '--dist-dir', dist_dir], + dist_dir=Path(dist_dir) + ) + + @memoized_method + def bdist(self, source_dir: Path) -> Path: + """Generates a wheel from the setup.py project at `source_dir` and returns the wheel path.""" + dist_dir = safe_mkdtemp() + return self._collect_distribution( + source_dir=source_dir, + setup_command=['bdist_wheel', '--dist-dir', dist_dir], + dist_dir=Path(dist_dir) + ) diff --git a/src/python/pants/util/strutil.py b/src/python/pants/util/strutil.py index ee6c7d841e1..aa50bd66a3c 100644 --- a/src/python/pants/util/strutil.py +++ b/src/python/pants/util/strutil.py @@ -3,7 +3,7 @@ import re import shlex -from typing import Any, Dict, List, Optional, Sequence, Union +from typing import Any, Dict, Iterable, List, Optional, Sequence, Union def ensure_binary(text_or_binary: Union[bytes, str]) -> bytes: @@ -54,7 +54,7 @@ def shell_quote(s: str) -> str: return "'" + s.replace("'", "'\"'\"'") + "'" -def safe_shlex_join(arg_list: Sequence[str]) -> str: +def safe_shlex_join(arg_list: Iterable[str]) -> str: """Join a list of strings into a shlex-able string. Shell-quotes each argument with `shell_quote()`. diff --git a/tests/python/pants_test/backend/codegen/thrift/python/test_apache_thrift_py_gen.py b/tests/python/pants_test/backend/codegen/thrift/python/test_apache_thrift_py_gen.py index b10ea61c916..c113ac2619c 100644 --- a/tests/python/pants_test/backend/codegen/thrift/python/test_apache_thrift_py_gen.py +++ b/tests/python/pants_test/backend/codegen/thrift/python/test_apache_thrift_py_gen.py @@ -176,8 +176,8 @@ def test_namespace_effective(self): for resolved_dist in resolve([f'thrift=={self.get_thrift_version(apache_thrift_gen)}', 'setuptools==40.6.3'], interpreter=interpreter, - context=python_repos.get_network_context(), - fetchers=python_repos.get_fetchers()): + indexes=python_repos.indexes, + find_links=python_repos.repos): pythonpath.append(resolved_dist.distribution.location) process = subprocess.Popen([interpreter.binary, diff --git a/tests/python/pants_test/backend/jvm/tasks/test_jvm_run_integration.py b/tests/python/pants_test/backend/jvm/tasks/test_jvm_run_integration.py index 1e71202e091..9badf6af9c9 100644 --- a/tests/python/pants_test/backend/jvm/tasks/test_jvm_run_integration.py +++ b/tests/python/pants_test/backend/jvm/tasks/test_jvm_run_integration.py @@ -13,11 +13,7 @@ def _exec_run(self, target, *args): :param args: list of arguments to append to the command :return: stdout as a string on success, raises an Exception on error """ - # Avoid some known-to-choke-on interpreters. - command = ['run', - target, - '--python-setup-interpreter-constraints=CPython>=2.7,<3', - '--python-setup-interpreter-constraints=CPython>=3.3'] + list(args) + command = ['run', target] + list(args) pants_run = self.run_pants(command) self.assert_success(pants_run) return pants_run.stdout_data @@ -38,9 +34,7 @@ def test_no_run_cwd(self): # Make sure the test fails if you don't specify a directory pants_run = self.run_pants(['run', - 'testprojects/src/java/org/pantsbuild/testproject/cwdexample', - '--python-setup-interpreter-constraints=CPython>=2.7,<3', - '--python-setup-interpreter-constraints=CPython>=3.3']) + 'testprojects/src/java/org/pantsbuild/testproject/cwdexample']) self.assert_failure(pants_run) self.assertIn('Neither ExampleCwd.java nor readme.txt found.', pants_run.stderr_data) diff --git a/tests/python/pants_test/backend/python/tasks/native/test_ctypes_integration.py b/tests/python/pants_test/backend/python/tasks/native/test_ctypes_integration.py index 8b729d8f8f4..b22afe366d1 100644 --- a/tests/python/pants_test/backend/python/tasks/native/test_ctypes_integration.py +++ b/tests/python/pants_test/backend/python/tasks/native/test_ctypes_integration.py @@ -17,7 +17,7 @@ from pants.util.contextutil import temporary_dir from pants.util.dirutil import is_executable, read_file, safe_file_dump from pants.util.enums import match -from pants_test.backend.python.tasks.python_task_test_base import name_and_platform +from pants_test.backend.python.tasks.util.wheel import name_and_platform def invoke_pex_for_output(pex_file_to_run): @@ -208,6 +208,11 @@ def test_pants_native_source_detection_for_local_ctypes_dists_for_current_platfo (2) a different platform than the one we are currently running on. The python_binary() target below is declared with `platforms="current"`. """ + + # The implementation abbreviation of 'dne' (does not exist), is ~guaranteed not to match our + # current platform while still providing an overall valid platform identifier string. + foreign_platform = 'macosx-10.5-x86_64-dne-37-m' + command = [ 'run', 'testprojects/src/python/python_distribution/ctypes:bin' @@ -219,7 +224,7 @@ def test_pants_native_source_detection_for_local_ctypes_dists_for_current_platfo 'toolchain_variant': 'llvm', }, 'python-setup': { - 'platforms': ['current', 'this_platform_does_not_exist'] + 'platforms': ['current', foreign_platform] }, }) self.assert_success(pants_run) diff --git a/tests/python/pants_test/backend/python/tasks/python_task_test_base.py b/tests/python/pants_test/backend/python/tasks/python_task_test_base.py index 189aa90dc3f..ba82ee67b80 100644 --- a/tests/python/pants_test/backend/python/tasks/python_task_test_base.py +++ b/tests/python/pants_test/backend/python/tasks/python_task_test_base.py @@ -4,8 +4,6 @@ import os from textwrap import dedent -from pex import pep425tags - from pants.backend.python.register import build_file_aliases as register_python from pants.backend.python.targets.python_binary import PythonBinary from pants.build_graph.address import Address @@ -13,27 +11,6 @@ from pants.testutil.task_test_base import TaskTestBase -def normalize_platform_tag(platform_tag): - return platform_tag.replace('-', '_') - - -def name_and_platform(whl): - # The wheel filename is of the format - # {distribution}-{version}(-{build tag})?-{python tag}-{abi tag}-{platform tag}.whl - # See https://www.python.org/dev/peps/pep-0425/. - # We don't care about the python or abi versions (they depend on what we're currently - # running on), we just want to make sure we have all the platforms we expect. - parts = os.path.splitext(whl)[0].split('-') - dist = parts[0] - version = parts[1] - platform_tag = parts[-1] - return dist, version, normalize_platform_tag(platform_tag) - - -def normalized_current_platform(): - return normalize_platform_tag(pep425tags.get_platform()) - - class PythonTaskTestBase(TaskTestBase): """ :API: public diff --git a/tests/python/pants_test/backend/python/tasks/test_build_local_python_distributions.py b/tests/python/pants_test/backend/python/tasks/test_build_local_python_distributions.py index c87b52f67eb..4145a591420 100644 --- a/tests/python/pants_test/backend/python/tasks/test_build_local_python_distributions.py +++ b/tests/python/pants_test/backend/python/tasks/test_build_local_python_distributions.py @@ -1,13 +1,9 @@ # Copyright 2017 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). -import re from collections import OrderedDict -import pex.resolver - from pants.backend.python.targets.python_distribution import PythonDistribution -from pants.backend.python.targets.python_library import PythonLibrary from pants.backend.python.targets.python_requirement_library import PythonRequirementLibrary from pants.python.python_requirement import PythonRequirement from pants_test.backend.python.tasks.util.build_local_dists_test_base import ( @@ -85,15 +81,6 @@ class TestBuildLocalPythonDistributions(BuildLocalPythonDistributionsTestBase): """, }, }), - - ('src/python/install_requires:conflict', { - 'key': 'install_requires_conflict', - 'target_type': PythonLibrary, - 'dependencies': [ - '3rdparty/python:pycountry', - 'src/python/install_requires:install_requires', - ], - }), ]) def test_create_distribution(self): @@ -123,15 +110,3 @@ def test_install_requires(self): expected_platform=self.ExpectedPlatformType.any, dist_target=install_requires_dist, ) - - def test_install_requires_conflict(self): - install_requires_dist = self.target_dict['install_requires'] - pycountry_req_lib = self.target_dict['pycountry'] - conflicting_lib = self.target_dict['install_requires_conflict'] - - with self.assertRaisesRegex( - pex.resolver.Unsatisfiable, - re.escape('Could not satisfy all requirements for pycountry==18.5.20:')): - self._create_distribution_synthetic_target( - install_requires_dist, - extra_targets=[pycountry_req_lib, conflicting_lib]) diff --git a/tests/python/pants_test/backend/python/tasks/test_pytest_run.py b/tests/python/pants_test/backend/python/tasks/test_pytest_run.py index eb74d626c24..de7ec2ce361 100644 --- a/tests/python/pants_test/backend/python/tasks/test_pytest_run.py +++ b/tests/python/pants_test/backend/python/tasks/test_pytest_run.py @@ -2,13 +2,13 @@ # Licensed under the Apache License, Version 2.0 (see LICENSE). import configparser -import functools import os from contextlib import contextmanager from textwrap import dedent import coverage +from pants.backend.python.subsystems.python_setup import PythonSetup from pants.backend.python.targets.python_tests import PythonTests from pants.backend.python.tasks.gather_sources import GatherSources from pants.backend.python.tasks.pytest_prep import PytestPrep @@ -52,6 +52,17 @@ def task_type(cls): PytestPrepCoverageVersionPinned, ] + @classmethod + def setUpClass(cls): + super().setUpClass() + cls.resolver_cache_dir = safe_mkdtemp() + + def set_other_options(self): + self.set_options_for_scope( + PythonSetup.options_scope, + resolver_cache_dir=self.resolver_cache_dir, + ) + _CONFTEST_CONTENT = '# I am an existing root-level conftest file.' _default_test_options = { @@ -69,6 +80,7 @@ def _augment_options(self, options): def run_tests(self, targets, *passthru_args, **options): """Run the tests in the specified targets, with the specified PytestRun task options.""" self.set_options(**self._augment_options(options)) + self.set_other_options() with pushd(self.build_root): result = self.invoke_tasks( target_roots=targets, @@ -78,6 +90,7 @@ def run_tests(self, targets, *passthru_args, **options): def run_failing_tests(self, targets, failed_targets, *passthru_args, **options): self.set_options(**self._augment_options(options)) + self.set_other_options() with self.assertRaises(ErrorWhileTesting) as cm: with pushd(self.build_root): self.invoke_tasks( @@ -876,100 +889,6 @@ def test_sharding_invalid_shard_bad_format(self): with self.assertRaises(PytestRun.InvalidShardSpecification): self.run_tests(targets=[self.green], test_shard='1/a') - @contextmanager - def marking_tests(self): - init_subsystem(Target.Arguments) - init_subsystem(SourceRootConfig) - - with temporary_dir() as marker_dir: - self.create_file( - 'test/python/passthru/test_passthru.py', - dedent(""" - import inspect - import os - import pytest - import unittest - - - class PassthruTest(unittest.TestCase): - def touch(self, path): - with open(path, 'wb') as fp: - fp.close() - - def mark_test_run(self): - caller_frame_record = inspect.stack()[1] - - # For the slot breakdown of a frame record tuple, see: - # https://docs.python.org/2/library/inspect.html#the-interpreter-stack - _, _, _, caller_func_name, _, _ = caller_frame_record - - marker_file = os.path.join({marker_dir!r}, caller_func_name) - self.touch(marker_file) - - def test_one(self): - self.mark_test_run() - - @pytest.mark.purple - def test_two(self): - self.mark_test_run() - - def test_three(self): - self.mark_test_run() - - @pytest.mark.red - def test_four(self): - self.mark_test_run() - - @pytest.mark.green - def test_five(self): - self.mark_test_run() - """.format(marker_dir=marker_dir))) - - def assert_mark(exists, name): - message = f"{('Expected' if exists else 'Did not expect')} {name!r} to be executed." - marker_file = os.path.join(marker_dir, name) - self.assertEqual(exists, os.path.exists(marker_file), message) - - self.add_to_build_file('test/python/passthru', 'python_tests()') - test = self.target('test/python/passthru') - yield test, functools.partial(assert_mark, True), functools.partial(assert_mark, False) - - def test_passthrough_args_facility_single_style(self): - with self.marking_tests() as (target, assert_test_run, assert_test_not_run): - self.run_tests([target], '-ktest_one or test_two') - assert_test_run('test_one') - assert_test_run('test_two') - assert_test_not_run('test_three') - assert_test_not_run('test_four') - assert_test_not_run('test_five') - - def test_passthrough_args_facility_plus_arg_style(self): - with self.marking_tests() as (target, assert_test_run, assert_test_not_run): - self.run_tests([target], '-m', 'purple or red') - assert_test_not_run('test_one') - assert_test_run('test_two') - assert_test_not_run('test_three') - assert_test_run('test_four') - assert_test_not_run('test_five') - - def test_passthrough_added_after_options(self): - with self.marking_tests() as (target, assert_test_run, assert_test_not_run): - self.run_tests([target], '-m', 'purple or red', '-k', 'two') - assert_test_not_run('test_one') - assert_test_run('test_two') - assert_test_not_run('test_three') - assert_test_not_run('test_four') - assert_test_not_run('test_five') - - def test_options_shlexed(self): - with self.marking_tests() as (target, assert_test_run, assert_test_not_run): - self.run_tests([target], "-m", "purple or red") - assert_test_not_run('test_one') - assert_test_run('test_two') - assert_test_not_run('test_three') - assert_test_run('test_four') - assert_test_not_run('test_five') - @contextmanager def run_with_junit_xml_dir(self, targets): with temporary_dir() as dist: diff --git a/tests/python/pants_test/backend/python/tasks/test_pytest_run_integration.py b/tests/python/pants_test/backend/python/tasks/test_pytest_run_integration.py index d37fda0bb91..f635b3dd205 100644 --- a/tests/python/pants_test/backend/python/tasks/test_pytest_run_integration.py +++ b/tests/python/pants_test/backend/python/tasks/test_pytest_run_integration.py @@ -200,7 +200,7 @@ def test_pants_test_interpreter_selection_with_option_3(self): command=[ 'test', f"{os.path.join(self.testproject, 'python_3_selection_testing')}:test_py3", - '--python-setup-interpreter-constraints=["CPython>=3"]', + '--python-setup-interpreter-constraints=["CPython>=3.6"]', ], config=pants_ini_config ) diff --git a/tests/python/pants_test/backend/python/tasks/test_python_binary_integration.py b/tests/python/pants_test/backend/python/tasks/test_python_binary_integration.py index 07af51390c4..1c1b5dd18b0 100644 --- a/tests/python/pants_test/backend/python/tasks/test_python_binary_integration.py +++ b/tests/python/pants_test/backend/python/tasks/test_python_binary_integration.py @@ -12,9 +12,9 @@ from pants.util.contextutil import open_zip, temporary_dir -_LINUX_PLATFORM = "linux-x86_64" +_LINUX_PLATFORM = "linux-x86_64-cp-36-m" _LINUX_WHEEL_SUBSTRING = "manylinux" -_OSX_PLATFORM = "macosx-10.13-x86_64" +_OSX_PLATFORM = "macosx-10.13-x86_64-cp-36-m" _OSX_WHEEL_SUBSTRING = "macosx" @@ -123,8 +123,8 @@ def platforms_test_impl( want_present_platforms, want_missing_platforms=(), ): - def numpy_deps(deps): - return [d for d in deps if 'numpy' in d] + def p537_deps(deps): + return [d for d in deps if 'p537' in d] def assertInAny(substring, collection): self.assertTrue(any(substring in d for d in collection), f'Expected an entry matching "{substring}" in {collection}') @@ -139,7 +139,7 @@ def assertNotInAny(substring, collection): with self.caching_config() as config, self.mock_buildroot() as buildroot, buildroot.pushd(): config['python-setup'] = { - 'platforms': None + 'platforms': [] } buildroot.write_file(test_src, '') @@ -154,7 +154,7 @@ def assertNotInAny(substring, collection): python_requirement_library( name='numpy', requirements=[ - python_requirement('numpy==1.14.5') + python_requirement('p537==1.0.4') ] ) @@ -176,7 +176,7 @@ def assertNotInAny(substring, collection): self.assert_success(result) with open_zip(test_pex) as z: - deps = numpy_deps(z.namelist()) + deps = p537_deps(z.namelist()) for platform in want_present_platforms: assertInAny(platform, deps) for platform in want_missing_platforms: diff --git a/tests/python/pants_test/backend/python/tasks/test_python_repl_integration.py b/tests/python/pants_test/backend/python/tasks/test_python_repl_integration.py index 132f09dfabf..f84359d1c5b 100644 --- a/tests/python/pants_test/backend/python/tasks/test_python_repl_integration.py +++ b/tests/python/pants_test/backend/python/tasks/test_python_repl_integration.py @@ -31,7 +31,7 @@ def test_run_repl_with_2(self): @ensure_daemon def test_run_repl_with_3(self): - # Run a Python 3 repl on a Python 2/3 library target. Avoid some known-to-choke-on interpreters. + # Run a Python 3 repl on a Python 2/3 library target. command = ['repl', '--python-setup-interpreter-constraints=["CPython>=3.3"]', 'testprojects/src/python/interpreter_selection:echo_interpreter_version_lib', diff --git a/tests/python/pants_test/backend/python/tasks/test_select_interpreter.py b/tests/python/pants_test/backend/python/tasks/test_select_interpreter.py index 4dc80b2f2ec..f8502026afe 100644 --- a/tests/python/pants_test/backend/python/tasks/test_select_interpreter.py +++ b/tests/python/pants_test/backend/python/tasks/test_select_interpreter.py @@ -1,9 +1,11 @@ # Copyright 2016 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). +import json import os import unittest.mock from textwrap import dedent +from typing import Tuple from pex.interpreter import PythonInterpreter @@ -28,27 +30,36 @@ def setUp(self): # We're tied tightly to pex implementation details here faking out a python binary that outputs # only one value no matter what arguments, environment or input stream it has attached. That - # value is the interpreter identity which is - minimally, one line containing: - # - - def fake_interpreter(id_str): + # value is the interpreter identity which is a JSON dict with exactly the following keys: + # binary, python_tag, abi_tag, platform_tag, version, supported_tags, env_markers. + def fake_interpreter(python_tag: str, abi_tag: str, version: Tuple[int, int, int]): interpreter_dir = safe_mkdtemp() binary = os.path.join(interpreter_dir, 'python') + values = dict( + binary=binary, + python_tag=python_tag, + abi_tag=abi_tag, + platform_tag='', + version=version, + supported_tags=[], + env_markers={} + ) + id_str = json.dumps(values) with open(binary, 'w') as fp: - fp.write(dedent(""" - #!{} + fp.write(dedent(f""" + #!{PythonInterpreter.get().binary} from __future__ import print_function - print({!r}) - """.format(PythonInterpreter.get().binary, id_str)).strip()) + print({id_str!r}) + """).strip()) chmod_plus_x(binary) return PythonInterpreter.from_binary(binary) # impl, abi, impl_version, major, minor, patch self.fake_interpreters = [ - fake_interpreter('ip ip2 2 2 77 777'), - fake_interpreter('ip ip2 2 2 88 888'), - fake_interpreter('ip ip2 2 2 99 999') + fake_interpreter(python_tag='ip', abi_tag='ip2', version=(2, 77, 777)), + fake_interpreter(python_tag='ip', abi_tag='ip2', version=(2, 88, 888)), + fake_interpreter(python_tag='ip', abi_tag='ip2', version=(2, 99, 999)), ] self.set_options_for_scope( @@ -75,8 +86,6 @@ def _fake_target(self, spec, compatibility=None, sources=None, dependencies=None dependencies=dependencies, compatibility=compatibility) def _select_interpreter(self, target_roots, should_invalidate=None): - PythonInterpreter.CACHE.clear() - context = self.context(target_roots=target_roots) task = self.create_task(context) diff --git a/tests/python/pants_test/backend/python/tasks/util/BUILD b/tests/python/pants_test/backend/python/tasks/util/BUILD index ee53c6336e7..4217e80771b 100644 --- a/tests/python/pants_test/backend/python/tasks/util/BUILD +++ b/tests/python/pants_test/backend/python/tasks/util/BUILD @@ -3,6 +3,7 @@ python_library( dependencies=[ + '3rdparty/python:wheel', 'src/python/pants/backend/native', 'src/python/pants/backend/python/tasks', 'src/python/pants/python', diff --git a/tests/python/pants_test/backend/python/tasks/util/build_local_dists_test_base.py b/tests/python/pants_test/backend/python/tasks/util/build_local_dists_test_base.py index 3a34b24c44d..47b899271e9 100644 --- a/tests/python/pants_test/backend/python/tasks/util/build_local_dists_test_base.py +++ b/tests/python/pants_test/backend/python/tasks/util/build_local_dists_test_base.py @@ -18,8 +18,8 @@ from pants.util.collections import assert_single_element from pants.util.enums import match from pants.util.meta import classproperty -from pants_test.backend.python.tasks.python_task_test_base import ( - PythonTaskTestBase, +from pants_test.backend.python.tasks.python_task_test_base import PythonTaskTestBase +from pants_test.backend.python.tasks.util.wheel import ( name_and_platform, normalized_current_platform, ) @@ -123,6 +123,7 @@ def _assert_dist_and_wheel_identity(self, expected_name, expected_version, expec expected_platform = match(expected_platform, { BuildLocalPythonDistributionsTestBase.ExpectedPlatformType.any: "any", - BuildLocalPythonDistributionsTestBase.ExpectedPlatformType.current: normalized_current_platform(), + BuildLocalPythonDistributionsTestBase.ExpectedPlatformType.current: + normalized_current_platform(), }) self.assertEquals(platform, expected_platform) diff --git a/tests/python/pants_test/backend/python/tasks/util/wheel.py b/tests/python/pants_test/backend/python/tasks/util/wheel.py new file mode 100644 index 00000000000..97cd6f77fcb --- /dev/null +++ b/tests/python/pants_test/backend/python/tasks/util/wheel.py @@ -0,0 +1,29 @@ +# Copyright 2020 Pants project contributors (see CONTRIBUTORS.md). +# Licensed under the Apache License, Version 2.0 (see LICENSE). + +import os +from os import PathLike +from typing import Tuple, Union + +from wheel import pep425tags + + +def _normalize_platform_tag(platform_tag: str) -> str: + return platform_tag.replace('-', '_') + + +def name_and_platform(whl: Union[str, PathLike]) -> Tuple[str, str, str]: + # The wheel filename is of the format + # {distribution}-{version}(-{build tag})?-{python tag}-{abi tag}-{platform tag}.whl + # See https://www.python.org/dev/peps/pep-0425/. + # We don't care about the python or abi versions (they depend on what we're currently + # running on), we just want to make sure we have all the platforms we expect. + parts = os.path.splitext(os.fspath(whl))[0].split('-') + dist = parts[0] + version = parts[1] + platform_tag = parts[-1] + return dist, version, _normalize_platform_tag(platform_tag) + + +def normalized_current_platform() -> str: + return _normalize_platform_tag(pep425tags.get_platform()) diff --git a/tests/python/pants_test/init/BUILD b/tests/python/pants_test/init/BUILD index bc497071802..9f48adb2e06 100644 --- a/tests/python/pants_test/init/BUILD +++ b/tests/python/pants_test/init/BUILD @@ -18,6 +18,7 @@ python_tests( 'src/python/pants/init', 'src/python/pants/option', 'src/python/pants/pantsd:pants_daemon', + 'src/python/pants/python', 'src/python/pants/subsystem', 'src/python/pants/util:contextutil', 'src/python/pants/util:dirutil', diff --git a/tests/python/pants_test/init/test_plugin_resolver.py b/tests/python/pants_test/init/test_plugin_resolver.py index f265722b9af..61346efaa0e 100644 --- a/tests/python/pants_test/init/test_plugin_resolver.py +++ b/tests/python/pants_test/init/test_plugin_resolver.py @@ -2,47 +2,81 @@ # Licensed under the Apache License, Version 2.0 (see LICENSE). import os -import time +import shutil import unittest +from abc import ABCMeta, abstractmethod from contextlib import contextmanager +from pathlib import Path from textwrap import dedent from parameterized import parameterized -from pex.crawler import Crawler -from pex.installer import EggInstaller, Packager, WheelInstaller from pex.interpreter import PythonInterpreter from pex.resolver import Unsatisfiable from pkg_resources import Requirement, WorkingSet from pants.init.plugin_resolver import PluginResolver from pants.option.options_bootstrapper import OptionsBootstrapper +from pants.python.setup_py_runner import SetupPyRunner from pants.testutil.interpreter_selection_utils import ( PY_36, PY_37, python_interpreter_path, skip_unless_python36_and_python37_present, ) +from pants.testutil.subsystem.util import init_subsystem from pants.util.contextutil import temporary_dir -from pants.util.dirutil import safe_open, safe_rmtree, touch +from pants.util.dirutil import safe_rmtree, touch req = Requirement.parse -INSTALLERS = [('sdist', Packager), ('egg', EggInstaller), ('whl', WheelInstaller)] + +class Installer(metaclass=ABCMeta): + def __init__(self, source_dir: Path, install_dir: Path) -> None: + self._source_dir = source_dir + self._install_dir = install_dir + + def run(self) -> None: + init_subsystem(SetupPyRunner.Factory) + dist = self._create_dist(SetupPyRunner.Factory.create()) + shutil.copy(str(dist), str(self._install_dir)) + + @abstractmethod + def _create_dist(self, runner: SetupPyRunner) -> Path: + ... + + +class SdistInstaller(Installer): + def _create_dist(self, runner: SetupPyRunner) -> Path: + return runner.sdist(source_dir=self._source_dir) + + +class WheelInstaller(Installer): + def _create_dist(self, runner: SetupPyRunner): + return runner.bdist(source_dir=self._source_dir) + + +INSTALLERS = [('sdist', SdistInstaller), ('whl', WheelInstaller)] class PluginResolverTest(unittest.TestCase): - @staticmethod - def create_plugin(distribution_repo_dir, plugin, version=None, packager_cls=None): - with safe_open(os.path.join(distribution_repo_dir, plugin, 'setup.py'), 'w') as fp: - fp.write(dedent(f""" + + DEFAULT_VERSION = '0.0.0' + + @classmethod + def create_plugin(cls, distribution_repo_dir, plugin, version=None, packager_cls=None): + distribution_repo_dir = Path(distribution_repo_dir) + + source_dir = distribution_repo_dir.joinpath(plugin) + source_dir.mkdir(parents=True) + source_dir.joinpath('setup.py').write_text(dedent(f""" from setuptools import setup - setup(name="{plugin}", version="{version or '0.0.0'}") + setup(name="{plugin}", version="{version or cls.DEFAULT_VERSION}") """)) - packager_cls = packager_cls or Packager - packager = packager_cls(source_dir=os.path.join(distribution_repo_dir, plugin), + packager_cls = packager_cls or SdistInstaller + packager = packager_cls(source_dir=source_dir, install_dir=distribution_repo_dir) packager.run() @@ -83,27 +117,30 @@ def provide_chroot(existing): options_bootstrapper = OptionsBootstrapper.create(env=env, args=args) plugin_resolver = PluginResolver(options_bootstrapper, interpreter=interpreter) cache_dir = plugin_resolver.plugin_cache_dir - yield plugin_resolver.resolve(WorkingSet(entries=[])), root_dir, repo_dir, cache_dir + + working_set = plugin_resolver.resolve(WorkingSet(entries=[])) + for dist in working_set: + self.assertIn(Path(cache_dir), Path(dist.location).parents) + + yield working_set, root_dir, repo_dir, cache_dir def test_no_plugins(self): with self.plugin_resolution() as (working_set, _, _, _): - self.assertEqual([], working_set.entries) + self.assertEqual([], list(working_set)) @parameterized.expand(INSTALLERS) def test_plugins(self, unused_test_name, packager_cls): with self.plugin_resolution(plugins=[('jake', '1.2.3'), 'jane'], packager_cls=packager_cls) as (working_set, _, _, cache_dir): - self.assertEqual(2, len(working_set.entries)) - dist = working_set.find(req('jake')) - self.assertIsNotNone(dist) - self.assertEqual(os.path.realpath(cache_dir), - os.path.realpath(os.path.dirname(dist.location))) + def assert_dist_version(name, expected_version): + dist = working_set.find(req(name)) + self.assertEqual(expected_version, dist.version) - dist = working_set.find(req('jane')) - self.assertIsNotNone(dist) - self.assertEqual(os.path.realpath(cache_dir), - os.path.realpath(os.path.dirname(dist.location))) + self.assertEqual(2, len(working_set.entries)) + + assert_dist_version(name='jake', expected_version='1.2.3') + assert_dist_version(name='jane', expected_version=self.DEFAULT_VERSION) @parameterized.expand(INSTALLERS) def test_exact_requirements(self, unused_test_name, packager_cls): @@ -111,17 +148,16 @@ def test_exact_requirements(self, unused_test_name, packager_cls): packager_cls=packager_cls) as results: working_set, chroot, repo_dir, cache_dir = results - self.assertEqual(2, len(working_set.entries)) - # Kill the repo source dir and re-resolve. If the PluginResolver truly detects exact # requirements it should skip any resolves and load directly from the still in-tact cache. safe_rmtree(repo_dir) with self.plugin_resolution(chroot=chroot, plugins=[('jake', '1.2.3'), ('jane', '3.4.5')]) as results2: + working_set2, _, _, _ = results2 - self.assertEqual(working_set.entries, working_set2.entries) + self.assertEqual(list(working_set), list(working_set2)) @parameterized.expand(INSTALLERS) @skip_unless_python36_and_python37_present @@ -132,12 +168,11 @@ def test_exact_requirements_interpreter_change(self, unused_test_name, packager_ with self.plugin_resolution(interpreter=python36, plugins=[('jake', '1.2.3'), ('jane', '3.4.5')], packager_cls=packager_cls) as results: - working_set, chroot, repo_dir, cache_dir = results - self.assertEqual(2, len(working_set.entries)) + working_set, chroot, repo_dir, cache_dir = results safe_rmtree(repo_dir) - with self.assertRaises(FileNotFoundError): + with self.assertRaises(Unsatisfiable): with self.plugin_resolution(interpreter=python37, chroot=chroot, plugins=[('jake', '1.2.3'), ('jane', '3.4.5')]): @@ -149,25 +184,22 @@ def test_exact_requirements_interpreter_change(self, unused_test_name, packager_ with self.plugin_resolution(interpreter=python36, chroot=chroot, plugins=[('jake', '1.2.3'), ('jane', '3.4.5')]) as results2: - working_set2, _, _, _ = results2 - self.assertEqual(working_set.entries, working_set2.entries) + working_set2, _, _, _ = results2 + self.assertEqual(list(working_set), list(working_set2)) @parameterized.expand(INSTALLERS) def test_inexact_requirements(self, unused_test_name, packager_cls): with self.plugin_resolution(plugins=[('jake', '1.2.3'), 'jane'], packager_cls=packager_cls) as results: - working_set, chroot, repo_dir, cache_dir = results - self.assertEqual(2, len(working_set.entries)) + working_set, chroot, repo_dir, cache_dir = results # Kill the cache and the repo source dir and wait past our 1s test TTL, if the PluginResolver # truly detects inexact plugin requirements it should skip perma-caching and fall through to - # pex to a TLL expiry resolve and then fail. + # a pex resolve and then fail. safe_rmtree(repo_dir) safe_rmtree(cache_dir) - Crawler.reset_cache() - time.sleep(1.5) with self.assertRaises(Unsatisfiable): with self.plugin_resolution(chroot=chroot, plugins=[('jake', '1.2.3'), 'jane']): diff --git a/tests/python/pants_test/pantsd/test_pantsd_integration.py b/tests/python/pants_test/pantsd/test_pantsd_integration.py index d628ca2e7a1..dd56d18bbd6 100644 --- a/tests/python/pants_test/pantsd/test_pantsd_integration.py +++ b/tests/python/pants_test/pantsd/test_pantsd_integration.py @@ -756,10 +756,7 @@ def test_unhandled_exceptions_only_log_exceptions_once(self): checker.assert_running() self.assert_failure(result) # Assert that the desired exception has been triggered once. - self.assertIn( - """Exception message: Could not satisfy all requirements for badreq==99.99.99:\n badreq==99.99.99""", - result.stderr_data, - ) + self.assertRegex(result.stderr_data, r'Exception message:.*badreq==99.99.99') # Assert that it has only been triggered once. self.assertNotIn( 'During handling of the above exception, another exception occurred:',