diff --git a/conda_forge_feedstock_check_solvable/mamba_solver.py b/conda_forge_feedstock_check_solvable/mamba_solver.py index a474f69..84eea96 100644 --- a/conda_forge_feedstock_check_solvable/mamba_solver.py +++ b/conda_forge_feedstock_check_solvable/mamba_solver.py @@ -11,7 +11,6 @@ """ import rapidjson as json import os -import logging import glob import functools import pathlib @@ -24,10 +23,11 @@ from collections import defaultdict from dataclasses import dataclass, field from typing import Dict, Tuple, List, FrozenSet, Set, Iterable - +import contextlib import psutil from ruamel.yaml import YAML import cachetools.func +import wurlitzer from conda.models.match_spec import MatchSpec import conda_build.api @@ -45,14 +45,14 @@ PACKAGE_CACHE = api.MultiPackageCache(pkgs_dirs) -logger = logging.getLogger("conda_forge_tick.mamba_solver") - DEFAULT_RUN_EXPORTS = { "weak": set(), "strong": set(), "noarch": set(), } +MAX_GLIBC_MINOR = 50 + # turn off pip for python api.Context().add_pip_as_python_dependency = False @@ -72,6 +72,103 @@ "win-64", } +# I cannot get python logging to work correctly with all of the hacks to +# make conda-build be quiet. +# so theis is a thing +VERBOSITY = 1 +VERBOSITY_PREFIX = { + 0: "CRITICAL", + 1: "WARNING", + 2: "INFO", + 3: "DEBUG", +} + + +def print_verb(fmt, *args, verbosity=0): + from inspect import currentframe, getframeinfo + + frameinfo = getframeinfo(currentframe()) + + if verbosity <= VERBOSITY: + if args: + msg = fmt % args + else: + msg = fmt + print( + VERBOSITY_PREFIX[verbosity] + + ":" + + __name__ + + ":" + + "%d" % frameinfo.lineno + + ":" + + msg, + flush=True, + ) + + +def print_critical(fmt, *args): + print_verb(fmt, *args, verbosity=0) + + +def print_warning(fmt, *args): + print_verb(fmt, *args, verbosity=1) + + +def print_info(fmt, *args): + print_verb(fmt, *args, verbosity=2) + + +def print_debug(fmt, *args): + print_verb(fmt, *args, verbosity=3) + + +@contextlib.contextmanager +def suppress_conda_build_logging(): + import conda_build.conda_interface + old_val = conda_build.conda_interface.cc_conda_build.get("log_config_file") + if "CONDA_FORGE_FEEDSTOCK_CHECK_SOLVABLE_DEBUG" in os.environ: + suppress = False + else: + suppress = True + + if not suppress: + try: + yield None + finally: + pass + return + + try: + with tempfile.TemporaryDirectory() as tmpdir: + config_file = os.path.join(tmpdir, "logging.yaml") + with open(config_file, "w") as fp: + fp.write("""\ +version: 1 +loggers: + conda_build.api: + level: CRITICAL + conda_build.config: + level: CRITICAL + conda_build.metadata: + level: CRITICAL + conda_build.variants: + level: CRITICAL + urllib3: + level: CRITICAL + urllib3.connectionpool: + level: CRITICAL +""") + conda_build.conda_interface.cc_conda_build["log_config_file"] = config_file + + with wurlitzer.pipes(): + yield None + finally: + if old_val is not None: + conda_build.conda_interface.cc_conda_build["log_config_file"] = old_val + else: + if "log_config_file" in conda_build.conda_interface.cc_conda_build: + del conda_build.conda_interface.cc_conda_build["log_config_file"] + def _munge_req_star(req): reqs = [] @@ -189,12 +286,12 @@ def write(self): for subdir in all_subdirs: self._write_subdir(subdir) - logger.info("Wrote fake repodata to %s", self.base_path) + print_debug("Wrote fake repodata to %s", self.base_path) import glob for filename in glob.iglob(str(self.base_path / "**"), recursive=True): - logger.info(filename) - logger.info("repo: %s", self.channel_url) + print_debug(filename) + print_debug("repo: %s", self.channel_url) def __enter__(self): return self @@ -233,7 +330,7 @@ def _get_run_export_download(link_tuple): for key in DEFAULT_RUN_EXPORTS: if key in run_exports: - logger.debug( + print_debug( "RUN EXPORT: %s %s %s", pkg, key, @@ -299,7 +396,7 @@ def _get_run_export(link_tuple): for k in rx: if k in DEFAULT_RUN_EXPORTS: - logger.debug( + print_debug( "RUN EXPORT: %s %s %s", name, k, @@ -307,7 +404,7 @@ def _get_run_export(link_tuple): ) run_exports[k].update(rx[k]) else: - logger.debug( + print_debug( "RUN EXPORT: %s %s %s", name, "weak", @@ -317,7 +414,7 @@ def _get_run_export(link_tuple): # fall back to getting repodata shard if needed if run_exports is None: - logger.info( + print_info( "RUN EXPORTS: downloading package %s/%s/%s" % (channel_url, link_tuple[0].split("/")[-1], link_tuple[1]), ) @@ -400,14 +497,14 @@ def solve( _specs = [_norm_spec(s) for s in specs] - logger.debug("MAMBA running solver for specs \n\n%s\n", pprint.pformat(_specs)) + print_debug("MAMBA running solver for specs \n\n%s\n", pprint.pformat(_specs)) solver.add_jobs(_specs, api.SOLVER_INSTALL) success = solver.solve() err = None if not success: - logger.warning( + print_warning( "MAMBA failed to solve specs \n\n%s\n\nfor channels " "\n\n%s\n\nThe reported errors are:\n\n%s\n", pprint.pformat(_specs), @@ -432,7 +529,7 @@ def solve( ) if get_run_exports: - logger.debug( + print_debug( "MAMBA getting run exports for \n\n%s\n", pprint.pformat(solution), ) @@ -503,75 +600,72 @@ def clean(): tmp_path = pathlib.Path(tmp_dir) repodata = FakeRepoData(tmp_path) - fake_packages = [ - FakePackage("__glibc", "2.12"), - FakePackage("__glibc", "2.17"), - FakePackage("__glibc", "2.28"), - FakePackage("__cuda", "9.2"), - FakePackage("__cuda", "10.0"), - FakePackage("__cuda", "10.1"), - FakePackage("__cuda", "10.2"), - FakePackage("__cuda", "11.0"), - FakePackage("__cuda", "11.1"), - FakePackage("__cuda", "11.2"), - FakePackage("__cuda", "11.3"), - FakePackage("__cuda", "11.4"), - FakePackage("__cuda", "11.5"), - FakePackage("__cuda", "11.6"), - FakePackage("__cuda", "11.7"), - FakePackage("__cuda", "11.8"), - FakePackage("__cuda", "11.9"), - FakePackage("__cuda", "12.0"), - FakePackage("__cuda", "12.1"), - FakePackage("__cuda", "12.2"), - FakePackage("__cuda", "12.3"), - FakePackage("__cuda", "12.4"), - FakePackage("__cuda", "12.5"), - ] - for pkg in fake_packages: - repodata.add_package(pkg) - for osx_ver in [ - "10.9", - "10.10", - "10.11", - "10.12", - "10.13", - "10.14", - "10.15", - "10.16", - ]: - repodata.add_package(FakePackage("__osx", osx_ver), subdirs=["osx-64"]) - for osx_ver in [ + + # glibc + for glibc_minor in range(12, MAX_GLIBC_MINOR+1): + repodata.add_package(FakePackage("__glibc", "2.%d" % glibc_minor)) + + # cuda - get from cuda-version on conda-forge + try: + cuda_pkgs = json.loads( + subprocess.check_output( + "CONDA_SUBDIR=linux-64 conda search cuda-version -c conda-forge --json", + shell=True, + text=True, + stderr=subprocess.PIPE, + ) + ) + cuda_vers = [ + pkg["version"] + for pkg in cuda_pkgs["cuda-version"] + ] + except Exception: + cuda_vers = [] + # extra hard coded list to make sure we don't miss anything + cuda_vers += [ + "9.2", + "10.0", + "10.1", + "10.2", "11.0", - "11.0.1", "11.1", "11.2", - "11.2.1", - "11.2.2", - "11.2.3", "11.3", "11.4", "11.5", - "12.0.1", + "11.6", + "11.7", + "11.8", + "12.0", "12.1", "12.2", "12.3", "12.4", "12.5", - "13.0", - "13.1", - "13.2", - "13.3", - "13.4", - "13.5", - "13.6", - "13.7", - "13.8", + ] + cuda_vers = set(cuda_vers) + for cuda_ver in cuda_vers: + repodata.add_package(FakePackage("__cuda", cuda_ver)) + + for osx_ver in [ + "10.9", + "10.10", + "10.11", + "10.12", + "10.13", + "10.14", + "10.15", + "10.16", ]: - repodata.add_package( - FakePackage("__osx", osx_ver), - subdirs=["osx-64", "osx-arm64"], - ) + repodata.add_package(FakePackage("__osx", osx_ver), subdirs=["osx-64"]) + for osx_major in range(11, 17): + for osx_minor in range(0, 17): + osx_ver = "%d.%d" % (osx_major, osx_minor) + repodata.add_package( + FakePackage("__osx", osx_ver), + subdirs=["osx-64", "osx-arm64"], + ) + repodata.add_package( FakePackage("__win", "0"), subdirs=list(subdir for subdir in ALL_PLATFORMS if subdir.startswith("win")), @@ -591,12 +685,13 @@ def clean(): return repodata.channel_url -def _func(feedstock_dir, additional_channels, build_platform, conn): +def _func(feedstock_dir, additional_channels, build_platform, verbosity, conn): try: res = _is_recipe_solvable( feedstock_dir, additional_channels=additional_channels, build_platform=build_platform, + verbosity=verbosity, ) conn.send(res) except Exception as e: @@ -607,9 +702,10 @@ def _func(feedstock_dir, additional_channels, build_platform, conn): def is_recipe_solvable( feedstock_dir, - additional_channels=(), + additional_channels=None, timeout=600, build_platform=None, + verbosity=1, ) -> Tuple[bool, List[str], Dict[str, bool]]: """Compute if a recipe is solvable. @@ -628,6 +724,9 @@ def is_recipe_solvable( If not None, then the work will be run in a separate process and this function will return True if the work doesn't complete before `timeout` seconds. + verbosity : int + An int indicating the level of verbosity from 0 (no output) to 3 + (gobbs of output). Returns ------- @@ -645,7 +744,13 @@ def is_recipe_solvable( parent_conn, child_conn = Pipe() p = Process( target=_func, - args=(feedstock_dir, additional_channels, build_platform, child_conn), + args=( + feedstock_dir, + additional_channels, + build_platform, + verbosity, + child_conn, + ), ) p.start() if parent_conn.poll(timeout): @@ -657,7 +762,7 @@ def is_recipe_solvable( {}, ) else: - logger.warning("MAMBA SOLVER TIMEOUT for %s", feedstock_dir) + print_warning("MAMBA SOLVER TIMEOUT for %s", feedstock_dir) res = ( True, [], @@ -678,6 +783,7 @@ def is_recipe_solvable( feedstock_dir, additional_channels=additional_channels, build_platform=build_platform, + verbosity=verbosity, ) return res @@ -687,13 +793,17 @@ def _is_recipe_solvable( feedstock_dir, additional_channels=(), build_platform=None, + verbosity=1, ) -> Tuple[bool, List[str], Dict[str, bool]]: + global VERBOSITY + VERBOSITY = verbosity + build_platform = build_platform or {} - if not additional_channels: - additional_channels = [virtual_package_repodata()] - os.environ["CONDA_OVERRIDE_GLIBC"] = "2.50" + additional_channels = additional_channels or [] + additional_channels += [virtual_package_repodata()] + os.environ["CONDA_OVERRIDE_GLIBC"] = "2.%d" % MAX_GLIBC_MINOR errors = [] cbcs = sorted(glob.glob(os.path.join(feedstock_dir, ".ci_support", "*.yaml"))) @@ -703,7 +813,7 @@ def _is_recipe_solvable( "results in no builds for a recipe (e.g., a recipe is python 2.7 only). " "This attempted migration is being reported as not solvable.", ) - logger.warning(errors[-1]) + print_warning(errors[-1]) return False, errors, {} if not os.path.exists(os.path.join(feedstock_dir, "recipe", "meta.yaml")): @@ -711,9 +821,10 @@ def _is_recipe_solvable( "No `recipe/meta.yaml` file found! This issue is quite weird and " "someone should investigate!", ) - logger.warning(errors[-1]) + print_warning(errors[-1]) return False, errors, {} + print_info("CHECKING FEEDSTOCK: %s", os.path.basename(feedstock_dir)) solvable = True solvable_by_cbc = {} for cbc_fname in cbcs: @@ -730,6 +841,7 @@ def _is_recipe_solvable( if arch not in ["32", "aarch64", "ppc64le", "armv7l", "arm64"]: arch = "64" + print_info("CHECKING RECIPE SOLVABLE: %s", os.path.basename(cbc_fname)) _solvable, _errors = _is_recipe_solvable_on_platform( os.path.join(feedstock_dir, "recipe"), cbc_fname, @@ -817,7 +929,7 @@ def _is_recipe_solvable_on_platform( if additional_channels: channel_sources = list(additional_channels) + channel_sources - logger.debug( + print_debug( "MAMBA: using channels %s on platform-arch %s-%s", channel_sources, platform, @@ -826,40 +938,41 @@ def _is_recipe_solvable_on_platform( # here we extract the conda build config in roughly the same way that # it would be used in a real build - logger.debug("rendering recipe with conda build") + print_debug("rendering recipe with conda build") + + with suppress_conda_build_logging(): + for att in range(2): + try: + if att == 1: + os.system("rm -f %s/conda_build_config.yaml" % recipe_dir) + config = conda_build.config.get_or_merge_config( + None, + platform=platform, + arch=arch, + variant_config_files=[cbc_path], + ) + cbc, _ = conda_build.variants.get_package_combined_spec( + recipe_dir, + config=config, + ) + except Exception: + if att == 0: + pass + else: + raise - for att in range(2): - try: - if att == 1: - os.system("rm -f %s/conda_build_config.yaml" % recipe_dir) - config = conda_build.config.get_or_merge_config( - None, - platform=platform, - arch=arch, - variant_config_files=[cbc_path], - ) - cbc, _ = conda_build.variants.get_package_combined_spec( - recipe_dir, - config=config, - ) - except Exception: - if att == 0: - pass - else: - raise - - # now we render the meta.yaml into an actual recipe - metas = conda_build.api.render( - recipe_dir, - platform=platform, - arch=arch, - ignore_system_variants=True, - variants=cbc, - permit_undefined_jinja=True, - finalize=False, - bypass_env_check=True, - channel_urls=channel_sources, - ) + # now we render the meta.yaml into an actual recipe + metas = conda_build.api.render( + recipe_dir, + platform=platform, + arch=arch, + ignore_system_variants=True, + variants=cbc, + permit_undefined_jinja=True, + finalize=False, + bypass_env_check=True, + channel_urls=channel_sources, + ) # get build info if build_platform_arch is not None: @@ -869,17 +982,18 @@ def _is_recipe_solvable_on_platform( # now we loop through each one and check if we can solve it # we check run and host and ignore the rest - logger.debug("getting mamba solver") - solver = _mamba_factory(tuple(channel_sources), f"{platform}-{arch}") - build_solver = _mamba_factory( - tuple(channel_sources), - f"{build_platform}-{build_arch}", - ) + print_debug("getting mamba solver") + with suppress_conda_build_logging(): + solver = _mamba_factory(tuple(channel_sources), f"{platform}-{arch}") + build_solver = _mamba_factory( + tuple(channel_sources), + f"{build_platform}-{build_arch}", + ) solvable = True errors = [] outnames = [m.name() for m, _, _ in metas] for m, _, _ in metas: - logger.debug("checking recipe %s", m.name()) + print_debug("checking recipe %s", m.name()) build_req = m.get_value("requirements/build", []) host_req = m.get_value("requirements/host", []) @@ -952,8 +1066,8 @@ def _is_recipe_solvable_on_platform( if _err is not None: errors.append(_err) - logger.info("RUN EXPORT cache status: %s", _get_run_export.cache_info()) - logger.info( + print_info("RUN EXPORT CACHE STATUS: %s", _get_run_export.cache_info()) + print_info( "MAMBA SOLVER MEM USAGE: %d MB", psutil.Process().memory_info().rss // 1024**2, ) diff --git a/requirements.txt b/requirements.txt index cc8f622..df85d9a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -9,3 +9,4 @@ conda-build mamba>=0.23 ruamel.yaml.jinja2 conda-forge-metadata>=0.2.0 +wurlitzer diff --git a/tests/test_mamba_solvable.py b/tests/test_mamba_solvable.py index bf862f0..96cacbf 100644 --- a/tests/test_mamba_solvable.py +++ b/tests/test_mamba_solvable.py @@ -17,6 +17,7 @@ virtual_package_repodata, apply_pins, _mamba_factory, + suppress_conda_build_logging, ) FEEDSTOCK_DIR = os.path.join(os.path.dirname(__file__), "test_feedstock") @@ -79,30 +80,31 @@ def test_mamba_solver_apply_pins(tmp_path): ) import conda_build.api - config = conda_build.config.get_or_merge_config( - None, - platform="linux", - arch="64", - variant_config_files=[], - ) - cbc, _ = conda_build.variants.get_package_combined_spec( - str(tmp_path), - config=config, - ) + with suppress_conda_build_logging(): + config = conda_build.config.get_or_merge_config( + None, + platform="linux", + arch="64", + variant_config_files=[], + ) + cbc, _ = conda_build.variants.get_package_combined_spec( + str(tmp_path), + config=config, + ) - solver = _mamba_factory(("conda-forge", "defaults"), "linux-64") - - metas = conda_build.api.render( - str(tmp_path), - platform="linux", - arch="64", - ignore_system_variants=True, - variants=cbc, - permit_undefined_jinja=True, - finalize=False, - bypass_env_check=True, - channel_urls=("conda-forge", "defaults"), - ) + solver = _mamba_factory(("conda-forge", "defaults"), "linux-64") + + metas = conda_build.api.render( + str(tmp_path), + platform="linux", + arch="64", + ignore_system_variants=True, + variants=cbc, + permit_undefined_jinja=True, + finalize=False, + bypass_env_check=True, + channel_urls=("conda-forge", "defaults"), + ) m = metas[0][0] outnames = [m.name() for m, _, _ in metas] @@ -126,9 +128,12 @@ def test_mamba_solver_apply_pins(tmp_path): @flaky def test_mamba_solver_nvcc(): - virtual_packages = virtual_package_repodata() - solver = MambaSolver([virtual_packages, "conda-forge", "defaults"], "linux-64") - out = solver.solve(["gcc_linux-64 7.*", "gxx_linux-64 7.*", "nvcc_linux-64 11.0.*"]) + with suppress_conda_build_logging(): + virtual_packages = virtual_package_repodata() + solver = MambaSolver([virtual_packages, "conda-forge", "defaults"], "linux-64") + out = solver.solve( + ["gcc_linux-64 7.*", "gxx_linux-64 7.*", "nvcc_linux-64 11.0.*"] + ) assert out[0], out[1] @@ -453,65 +458,67 @@ def test_virtual_package(feedstock_dir, tmp_path_factory): @flaky def test_mamba_solver_hangs(): - solver = _mamba_factory(("conda-forge", "defaults"), "osx-64") - res = solver.solve( - [ - "pytest", - "selenium", - "requests-mock", - "ncurses >=6.2,<7.0a0", - "libffi >=3.2.1,<4.0a0", - "xz >=5.2.5,<6.0a0", - "nbconvert >=5.6", - "sqlalchemy", - "jsonschema", - "six >=1.11", - "python_abi 3.9.* *_cp39", - "tornado", - "jupyter", - "requests", - "jupyter_client", - "notebook >=4.2", - "tk >=8.6.10,<8.7.0a0", - "openssl >=1.1.1h,<1.1.2a", - "readline >=8.0,<9.0a0", - "fuzzywuzzy", - "python >=3.9,<3.10.0a0", - "traitlets", - "sqlite >=3.33.0,<4.0a0", - "alembic", - "zlib >=1.2.11,<1.3.0a0", - "python-dateutil", - "nbformat", - "jupyter_core", - ], - ) + with suppress_conda_build_logging(): + solver = _mamba_factory(("conda-forge", "defaults"), "osx-64") + res = solver.solve( + [ + "pytest", + "selenium", + "requests-mock", + "ncurses >=6.2,<7.0a0", + "libffi >=3.2.1,<4.0a0", + "xz >=5.2.5,<6.0a0", + "nbconvert >=5.6", + "sqlalchemy", + "jsonschema", + "six >=1.11", + "python_abi 3.9.* *_cp39", + "tornado", + "jupyter", + "requests", + "jupyter_client", + "notebook >=4.2", + "tk >=8.6.10,<8.7.0a0", + "openssl >=1.1.1h,<1.1.2a", + "readline >=8.0,<9.0a0", + "fuzzywuzzy", + "python >=3.9,<3.10.0a0", + "traitlets", + "sqlite >=3.33.0,<4.0a0", + "alembic", + "zlib >=1.2.11,<1.3.0a0", + "python-dateutil", + "nbformat", + "jupyter_core", + ], + ) assert res[0] - solver = _mamba_factory(("conda-forge", "defaults"), "linux-64") - solver.solve( - [ - "gdal >=2.1.0", - "ncurses >=6.2,<7.0a0", - "geopandas", - "scikit-image >=0.16.0", - "pandas", - "pyproj >=2.2.0", - "libffi >=3.2.1,<4.0a0", - "six", - "tk >=8.6.10,<8.7.0a0", - "spectral", - "zlib >=1.2.11,<1.3.0a0", - "shapely", - "readline >=8.0,<9.0a0", - "python >=3.8,<3.9.0a0", - "numpy", - "python_abi 3.8.* *_cp38", - "xz >=5.2.5,<6.0a0", - "openssl >=1.1.1h,<1.1.2a", - "sqlite >=3.33.0,<4.0a0", - ], - ) + with suppress_conda_build_logging(): + solver = _mamba_factory(("conda-forge", "defaults"), "linux-64") + solver.solve( + [ + "gdal >=2.1.0", + "ncurses >=6.2,<7.0a0", + "geopandas", + "scikit-image >=0.16.0", + "pandas", + "pyproj >=2.2.0", + "libffi >=3.2.1,<4.0a0", + "six", + "tk >=8.6.10,<8.7.0a0", + "spectral", + "zlib >=1.2.11,<1.3.0a0", + "shapely", + "readline >=8.0,<9.0a0", + "python >=3.8,<3.9.0a0", + "numpy", + "python_abi 3.8.* *_cp38", + "xz >=5.2.5,<6.0a0", + "openssl >=1.1.1h,<1.1.2a", + "sqlite >=3.33.0,<4.0a0", + ], + ) assert res[0] @@ -602,3 +609,9 @@ def test_pillow_solvable(tmp_path): pprint.pprint(solvable_by_variant) assert solvable, pprint.pformat(errors) assert any("python3.10" in k for k in solvable_by_variant) + + +if __name__ == "__main__": + pth = os.path.join(os.path.dirname(__file__), "xgboost-feedstock") + assert is_recipe_solvable(pth, timeout=None, verbosity=1)[0] + assert is_recipe_solvable(pth, verbosity=1)[0]