diff --git a/.travis.yml b/.travis.yml index 3324c8aba..6d662e9e7 100644 --- a/.travis.yml +++ b/.travis.yml @@ -22,11 +22,12 @@ install: - rm -rf ${CONDA_INSTALL_LOCN}/pkgs && ln -s ${HOME}/cache/pkgs ${CONDA_INSTALL_LOCN}/pkgs # Now do the things we need to do to install it. - - conda install --file requirements.txt coverage coveralls mock pytest pytest-cov ${CONDA_PKGS} --yes --quiet ${CHANNEL} + - conda install --file requirements.txt black coverage coveralls mock pytest pytest-cov ${CONDA_PKGS} --yes --quiet ${CHANNEL} - python -m pip install -e . script: - py.test tests --cov conda_smithy + - black --check --verbose --config=pyproject.toml . after_success: - coverage combine diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 3fb528ab3..31a5a18cd 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -4,6 +4,176 @@ conda-smithy Change Log .. current developments +v3.6.0 +==================== + +**Added:** + +* Ignore Drone CI files in GitHub diffs +* Run ``black --check`` on CI to verify code is formatted correctly + +**Changed:** + +* Platform independent files like `run_docker_build.sh` are moved to `.scripts` folder +* Standardize and test support for multiple docker images. +* refactored ``conda_smithy.lint_recipe.NEEDED_FAMILIES`` to top level so external projects can access +* Rerun ``black`` on the codebase. + +**Fixed:** + +* fix crash when host section was present but empty +* fix build-locally.py in skip_render by not attempting to chmod +x it +* ship conf file for black so everyone uses the same settings + + + +v3.5.0 +==================== + +**Added:** + +* conda-smithy will remove the ``.github/CODEOWNERS`` file in case the recipe + maintainers list is empty + +**Changed:** + +* Default windows provider was changed to azure. + + + +v3.4.8 +==================== + +**Fixed:** + +* Don't make assumptions in ``conda_smithy/variant_algebra.py`` about the metadata + + + +v3.4.7 +==================== + +**Added:** + +* Added a method to sync user in drone + +**Changed:** + +* Check that a project is registered if registering fails on drone +* Check that a project has the secret if adding secret fails on drone + + + +v3.4.6 +==================== + +**Added:** + +* conda-smithy can now register packages on drone.io. We plan on using this to help out with the aarch64 + architecture builds. + +**Changed:** + +* drone.io is now the default platform for aarch64 builds +* migrations folder changed from /migrations to /.ci_support/migrations + +**Fixed:** + +* Fix render_README crash when azure api returns 404 + + + +v3.4.5 +==================== + +**Fixed:** + +* YAML ``dump()`` now used ``pathlib.Path`` object. + + + +v3.4.4 +==================== + +**Fixed:** + +* Updated conda-smithy to work with ruamel.yaml v0.16+. + + + +v3.4.3 +==================== + +**Changed:** + +* In linting pins allow more than one space + +**Fixed:** + +* Don't lint setting build number + + + +v3.4.2 +==================== + +**Added:** + +* Generating feedstocks with support for the linux-armv7l platform. +* test of the downgrade functionality of the new pinning system +* Mark generated files as generated so that github collapses them by deafult in diffs. +* The linter will now recomend fixes for malformed pins, + suggesting a single space is inserted. For instance, both ``python>=3`` and + ``python >= 3`` will ought to be ``python >=3``. +* New key ``upload_on_branch`` added to conda-forge.yml the value of which is checked + against the current git branch and upload will be skipped if they are not equal. + This is optional and an empty key skips the test. +* Added `CONDA_SMITHY_LOGLEVEL` environment variable to change verbosity + of rendering. This can be either `debug` or `info`. + +**Changed:** + +* Add skip_render option to conda-forge.yaml. One could specify one or more filenames telling conda-smithy to skip making change on them. Files that could skip rendering include .gitignore, .gitattributes, README.md and LICENCE.txt. +* Reduced verbosity of rendering + +**Fixed:** + +* recipe-lint compatibility with ruamel.yaml 0.16 +* Mock PY_VER in recipe check +* Fixed badge rendering in readme template. +* yum_requirements will now work on Travis based linux builds. +* requirements: update to conda-build>=3.18.3 +* fix non-public conda import, use conda.exports +* requirements: replace pycrypto with pycryptodome + + + +v3.4.1 +==================== + +**Added:** + +* license_file is required for GPL, MIT, BSD, APACHE, PSF + +**Changed:** + +* ``build-locally.py`` now uses ``python3`` even if ``python`` is ``python2`` (Python 3.6+ was already required) + +**Removed:** + +* Github issue, PR and contributing files are removed as they are in https://github.com/conda-forge/.github +* Support for python 2 Removed + +**Fixed:** + +* Fix configuring appveyor on repos starting with an underscore +* Fixed an issue where conda system variants could be used after rendering migrations. +* Fixed issue where only the last maintainer is review requested +* Unlicense is allowed +* Support newer ``shyaml`` versions by checking whether ``shyaml -h`` succeeds. + + + v3.4.0 ==================== diff --git a/bootstrap-obvious-ci-and-miniconda.py b/bootstrap-obvious-ci-and-miniconda.py index 536ba1412..617f5fc2a 100644 --- a/bootstrap-obvious-ci-and-miniconda.py +++ b/bootstrap-obvious-ci-and-miniconda.py @@ -2,12 +2,10 @@ """ Installs Miniconda with the latest version of Obvious-CI. -This script supports Python 2 and 3 (>=2.6 and >=3.2+ respectively) and is +This script supports Python 3 (>=3.2+) and is designed to run on OSX, Linux and Windows. """ -from __future__ import print_function - import argparse import os import platform @@ -49,7 +47,7 @@ def miniconda_url( miniconda_os_ext = {"Linux": "sh", "MacOSX": "sh", "Windows": "exe"} template_values["ext"] = miniconda_os_ext[template_values["OS"]] - if major_py_version not in ["2", "3"]: + if major_py_version not in ["3"]: raise ValueError( "Unexpected major Python version {!r}.".format(major_py_version) ) @@ -133,7 +131,7 @@ def main( "major_py_version", help="""The major Python version for the miniconda root env (may still subsequently use another Python version).""", - choices=["2", "3"], + choices=["3"], ) parser.add_argument( "--without-obvci", diff --git a/conda_smithy.recipe/meta.yaml b/conda_smithy.recipe/meta.yaml index e531abb96..502a7392e 100644 --- a/conda_smithy.recipe/meta.yaml +++ b/conda_smithy.recipe/meta.yaml @@ -21,10 +21,10 @@ requirements: run: - python >=3.6 - conda >=4.2 - - conda-build >=3.1.2 + - conda-build >=3.18.3 - jinja2 - requests - - pycrypto + - pycryptodome - gitpython - pygithub <2 - ruamel.yaml diff --git a/conda_smithy/_version.py b/conda_smithy/_version.py index f0ffa1936..2549df17c 100644 --- a/conda_smithy/_version.py +++ b/conda_smithy/_version.py @@ -1,4 +1,3 @@ - # This file helps to compute a version number in source trees obtained from # git-archive tarball (such as those provided by githubs download-from-tag # feature). Distribution tarballs (built by setup.py sdist) and build diff --git a/conda_smithy/azure_ci_utils.py b/conda_smithy/azure_ci_utils.py index 27746df1d..663b37589 100644 --- a/conda_smithy/azure_ci_utils.py +++ b/conda_smithy/azure_ci_utils.py @@ -11,7 +11,9 @@ SourceRepository, ) from vsts.service_endpoint.v4_1.models import ServiceEndpoint -from vsts.service_endpoint.v4_1.service_endpoint_client import ServiceEndpointClient +from vsts.service_endpoint.v4_1.service_endpoint_client import ( + ServiceEndpointClient, +) from vsts.task_agent.v4_0.models import TaskAgentQueue from vsts.task_agent.v4_0.task_agent_client import TaskAgentClient from vsts.vss_connection import VssConnection @@ -22,7 +24,9 @@ class AzureConfig: _default_org = "conda-forge" _default_project_name = "feedstock-builds" - def __init__(self, org_or_user=None, project_name=None, team_instance=None): + def __init__( + self, org_or_user=None, project_name=None, team_instance=None + ): self.org_or_user = org_or_user or os.getenv( "AZURE_ORG_OR_USER", self._default_org ) @@ -36,7 +40,9 @@ def __init__(self, org_or_user=None, project_name=None, team_instance=None): ) try: - with open(os.path.expanduser("~/.conda-smithy/azure.token"), "r") as fh: + with open( + os.path.expanduser("~/.conda-smithy/azure.token"), "r" + ) as fh: self.token = fh.read().strip() if not self.token: raise ValueError() @@ -44,7 +50,9 @@ def __init__(self, org_or_user=None, project_name=None, team_instance=None): self.token = None # By default for now don't report on the build information back to github - self.azure_report_build_status = os.getenv("AZURE_REPORT_BUILD_STATUS", "true") + self.azure_report_build_status = os.getenv( + "AZURE_REPORT_BUILD_STATUS", "true" + ) @property def connection(self): @@ -58,7 +66,9 @@ def credentials(self): if self.token: return BasicAuthentication("", self.token) else: - warnings.warn("No token available. No modifications will be possible!") + warnings.warn( + "No token available. No modifications will be possible!" + ) return Authentication() @@ -81,7 +91,9 @@ def get_service_endpoint(config: AzureConfig = default_config): raise KeyError("Service endpoint not found") -def get_queues(config: AzureConfig = default_config) -> typing.List[TaskAgentQueue]: +def get_queues( + config: AzureConfig = default_config +) -> typing.List[TaskAgentQueue]: aclient = TaskAgentClient(config.instance_base_url, config.credentials) return aclient.get_agent_queues(config.project_name) @@ -198,7 +210,9 @@ def register_repo(github_org, repo_name, config: AzureConfig = default_config): assert len(existing_definitions) == 1 ed = existing_definitions[0] bclient.update_definition( - definition=build_definition, definition_id=ed.id, project=ed.project.name + definition=build_definition, + definition_id=ed.id, + project=ed.project.name, ) else: bclient.create_definition( @@ -207,7 +221,9 @@ def register_repo(github_org, repo_name, config: AzureConfig = default_config): def build_client(config: AzureConfig = default_config) -> BuildClient: - return config.connection.get_client("vsts.build.v4_1.build_client.BuildClient") + return config.connection.get_client( + "vsts.build.v4_1.build_client.BuildClient" + ) def repo_registered( @@ -222,7 +238,9 @@ def repo_registered( def enable_reporting(repo, config: AzureConfig = default_config) -> None: bclient = build_client(config) - bdef_header = bclient.get_definitions(project=config.project_name, name=repo)[0] + bdef_header = bclient.get_definitions( + project=config.project_name, name=repo + )[0] bdef = bclient.get_definition(bdef_header.id, bdef_header.project.name) bdef.repository.properties["reportBuildStatus"] = "true" bclient.update_definition(bdef, bdef.id, bdef.project.name) @@ -233,7 +251,9 @@ def get_build_id(repo, config: AzureConfig = default_config) -> dict: of badges. This is needed by non-conda-forge use cases""" bclient = build_client(config) - bdef_header = bclient.get_definitions(project=config.project_name, name=repo)[0] + bdef_header = bclient.get_definitions( + project=config.project_name, name=repo + )[0] bdef: BuildDefinition = bclient.get_definition( bdef_header.id, bdef_header.project.name ) diff --git a/conda_smithy/ci_register.py b/conda_smithy/ci_register.py index 7dae790c4..8b2b5bddd 100755 --- a/conda_smithy/ci_register.py +++ b/conda_smithy/ci_register.py @@ -1,12 +1,9 @@ #!/usr/bin/env python -from __future__ import print_function import os import requests import time import sys -import ruamel.yaml - from . import github from .utils import update_conda_forge_config @@ -38,6 +35,17 @@ "Put one in ~/.conda-smithy/appveyor.token" ) +try: + with open(os.path.expanduser("~/.conda-smithy/drone.token"), "r") as fh: + drone_token = fh.read().strip() + if not drone_token: + raise ValueError() +except (IOError, ValueError): + print( + "No drone token. Create a token at https://cloud.drone.io/account and\n" + "Put one in ~/.conda-smithy/drone.token" + ) + try: anaconda_token = os.environ["BINSTAR_TOKEN"] except KeyError: @@ -56,6 +64,23 @@ ) travis_endpoint = "https://api.travis-ci.org" +drone_endpoint = "https://cloud.drone.io" + + +class LiveServerSession(requests.Session): + """Utility class to avoid typing out urls all the time""" + + def __init__(self, prefix_url=None, *args, **kwargs): + super(LiveServerSession, self).__init__(*args, **kwargs) + self.prefix_url = prefix_url + + def request(self, method, url, *args, **kwargs): + from urllib.parse import urljoin + + url = urljoin(self.prefix_url, url) + return super(LiveServerSession, self).request( + method, url, *args, **kwargs + ) def travis_headers(): @@ -105,6 +130,55 @@ def add_token_to_circle(user, project): raise ValueError(response) +def drone_session(): + s = LiveServerSession(prefix_url=drone_endpoint) + s.headers.update({"Authorization": f"Bearer {drone_token}"}) + return s + + +def add_token_to_drone(user, project): + session = drone_session() + response = session.post( + f"/api/repos/{user}/{project}/secrets", + json={ + "name": "BINSTAR_TOKEN", + "data": anaconda_token, + "pull_request": False, + }, + ) + if response.status_code != 200: + # Check that the token is in secrets already + session = drone_session() + response2 = session.get(f"/api/repos/{user}/{project}/secrets") + response2.raise_for_status() + for secret in response2.json(): + if "BINSTAR_TOKEN" == secret["name"]: + return + response.raise_for_status() + + +def drone_sync(): + session = drone_session() + response = session.post("/api/user/repos?async=true") + response.raise_for_status() + + +def add_project_to_drone(user, project): + session = drone_session() + response = session.post(f"/api/repos/{user}/{project}") + if response.status_code != 200: + # Check that the project is registered already + session = drone_session() + response = session.get(f"/api/repos/{user}/{project}") + response.raise_for_status() + + +def regenerate_drone_webhooks(user, project): + session = drone_session() + response = session.post(f"/api/repos/{user}/{project}/repair") + response.raise_for_status() + + def add_project_to_circle(user, project): headers = { "Content-Type": "application/json", @@ -208,6 +282,8 @@ def appveyor_configure(user, project): """Configure appveyor so that it skips building if there is no appveyor.yml present.""" headers = {"Authorization": "Bearer {}".format(appveyor_token)} # I have reasons to believe this is all AppVeyor is doing to the API URL. + if project.startswith("_"): + project = project[1:] project = project.replace("_", "-").replace(".", "-") url = "https://ci.appveyor.com/api/projects/{}/{}/settings".format( user, project @@ -218,9 +294,9 @@ def appveyor_configure(user, project): content = response.json() settings = content["settings"] for required_setting in ( - u"skipBranchesWithoutAppveyorYml", - u"rollingBuildsOnlyForPullRequests", - u"rollingBuilds", + "skipBranchesWithoutAppveyorYml", + "rollingBuildsOnlyForPullRequests", + "rollingBuilds", ): if not settings[required_setting]: print( diff --git a/conda_smithy/cli.py b/conda_smithy/cli.py index c0ab50e3f..91f37009f 100644 --- a/conda_smithy/cli.py +++ b/conda_smithy/cli.py @@ -1,5 +1,3 @@ -from __future__ import print_function, absolute_import - import os import subprocess import sys @@ -21,7 +19,8 @@ from . import __version__ -PY2 = sys.version_info[0] == 2 +if sys.version_info[0] == 2: + raise Exception("Conda-smithy does not support python 2!") def generate_feedstock_content(target_directory, source_recipe_dir): @@ -54,14 +53,9 @@ class Subcommand(object): aliases = [] def __init__(self, parser, help=None): - if PY2: - # aliases not allowed in 2.7 :-( - subcommand_parser = parser.add_parser(self.subcommand, help=help) - else: - subcommand_parser = parser.add_parser( - self.subcommand, help=help, aliases=self.aliases - ) - + subcommand_parser = parser.add_parser( + self.subcommand, help=help, aliases=self.aliases + ) subcommand_parser.set_defaults(subcommand_func=self) self.subcommand_parser = subcommand_parser @@ -249,6 +243,13 @@ def __call__(self, args): ci_register.appveyor_configure(owner, repo) else: print("Appveyor registration disabled.") + + if args.drone: + ci_register.add_project_to_drone(owner, repo) + ci_register.add_token_to_drone(owner, repo) + else: + print("Drone registration disabled.") + ci_register.add_conda_forge_webservice_hooks(owner, repo) print( "\nCI services have been enabled. You may wish to regenerate the feedstock.\n" @@ -263,7 +264,9 @@ def __init__(self, parser): # conda-smithy azure-buildid ./ super(AddAzureBuildId, self).__init__( parser, - dedent("Update the azure configuration stored in the config file.") + dedent( + "Update the azure configuration stored in the config file." + ), ) scp = self.subcommand_parser scp.add_argument( @@ -273,7 +276,8 @@ def __init__(self, parser): ) group = scp.add_mutually_exclusive_group() group.add_argument( - "--user", help="azure username for which this repo is enabled already" + "--user", + help="azure username for which this repo is enabled already", ) group.add_argument( "--organization", @@ -281,9 +285,9 @@ def __init__(self, parser): help="azure organisation for which this repo is enabled already", ) scp.add_argument( - '--project_name', + "--project_name", default=azure_ci_utils.AzureConfig._default_project_name, - help="project name that feedstocks are registered under" + help="project name that feedstocks are registered under", ) def __call__(self, args): @@ -291,20 +295,19 @@ def __call__(self, args): repo = os.path.basename(os.path.abspath(args.feedstock_directory)) config = azure_ci_utils.AzureConfig( - org_or_user=owner, - project_name=args.project_name + org_or_user=owner, project_name=args.project_name ) build_info = azure_ci_utils.get_build_id(repo, config) - import ruamel.yaml from .utils import update_conda_forge_config + with update_conda_forge_config(args.feedstock_directory) as config: config.setdefault("azure", {}) - config["azure"]["build_id"] = build_info['build_id'] - config["azure"]["user_or_org"] = build_info['user_or_org'] - config["azure"]["project_name"] = build_info['project_name'] - config["azure"]["project_id"] = build_info['project_id'] + config["azure"]["build_id"] = build_info["build_id"] + config["azure"]["user_or_org"] = build_info["user_or_org"] + config["azure"]["project_name"] = build_info["project_name"] + config["azure"]["project_id"] = build_info["project_id"] class Regenerate(Subcommand): @@ -342,7 +345,12 @@ def __init__(self, parser): help="Exclusive conda-build config file to replace conda-forge-pinning. " + "For advanced usage only", ) - scp.add_argument("--check", action="store_true", default=False, help="Check if regenerate can be performed") + scp.add_argument( + "--check", + action="store_true", + default=False, + help="Check if regenerate can be performed", + ) def __call__(self, args): configure_feedstock.main( @@ -350,7 +358,7 @@ def __call__(self, args): no_check_uptodate=args.no_check_uptodate, commit=args.commit, exclusive_config_file=args.exclusive_config_file, - check=args.check + check=args.check, ) @@ -451,14 +459,6 @@ def main(): for subcommand in Subcommand.__subclasses__(): subcommand(subparser) # And the alias for rerender - if PY2: - - class Rerender(Regenerate): - # A poor-man's alias for regenerate. - subcommand = "rerender" - - Rerender(subparser) - parser.add_argument( "--version", action="version", diff --git a/conda_smithy/configure_feedstock.py b/conda_smithy/configure_feedstock.py index 33881f05c..a40ae5732 100644 --- a/conda_smithy/configure_feedstock.py +++ b/conda_smithy/configure_feedstock.py @@ -1,5 +1,3 @@ -from __future__ import print_function, unicode_literals - import glob from itertools import product, chain import logging @@ -17,6 +15,8 @@ import conda_build.conda_interface import conda_build.render +from copy import deepcopy + from conda_build import __version__ as conda_build_version from jinja2 import Environment, FileSystemLoader @@ -346,8 +346,8 @@ def _collapse_subpackage_variants(list_of_metas, root_path): _trim_unused_zip_keys(used_key_values) _trim_unused_pin_run_as_build(used_key_values) - print("top_level_loop_vars", top_level_loop_vars) - print("used_key_values", used_key_values) + logger.debug("top_level_loop_vars {}".format(top_level_loop_vars)) + logger.debug("used_key_values {}".format(used_key_values)) return ( break_up_top_level_values(top_level_loop_vars, used_key_values), @@ -367,12 +367,17 @@ def finalize_config(config, platform, forge_config): """For configs without essential parameters like docker_image add fallback value. """ - if platform.startswith("linux") and not "docker_image" in config: - config["docker_image"] = [forge_config["docker"]["fallback_image"]] + if platform.startswith("linux"): + if "docker_image" in config: + config["docker_image"] = [config["docker_image"][0]] + else: + config["docker_image"] = [forge_config["docker"]["fallback_image"]] return config -def dump_subspace_config_files(metas, root_path, platform, arch, upload, forge_config): +def dump_subspace_config_files( + metas, root_path, platform, arch, upload, forge_config +): """With conda-build 3, it handles the build matrix. We take what it spits out, and write a config.yaml file for each matrix entry that it spits out. References to a specific file replace all of the old environment variables that specified a matrix entry.""" @@ -380,7 +385,9 @@ def dump_subspace_config_files(metas, root_path, platform, arch, upload, forge_c # identify how to break up the complete set of used variables. Anything considered # "top-level" should be broken up into a separate CI job. - configs, top_level_loop_vars = _collapse_subpackage_variants(metas, root_path) + configs, top_level_loop_vars = _collapse_subpackage_variants( + metas, root_path + ) # get rid of the special object notation in the yaml file for objects that we dump yaml.add_representer(set, yaml.representer.SafeRepresenter.represent_list) @@ -478,21 +485,28 @@ def migrate_combined_spec(combined_spec, forge_dir, config): """ combined_spec = combined_spec.copy() - migrations_root = os.path.join(forge_dir, "migrations", "*.yaml") + migrations_root = os.path.join( + forge_dir, ".ci_support", "migrations", "*.yaml" + ) migrations = glob.glob(migrations_root) from .variant_algebra import parse_variant, variant_add migration_variants = [ - (fn, parse_variant(open(fn, "r").read(), config=config)) for fn in migrations + (fn, parse_variant(open(fn, "r").read(), config=config)) + for fn in migrations ] - migration_variants.sort(key=lambda fn_v: (fn_v[1]["migration_ts"], fn_v[0])) + migration_variants.sort( + key=lambda fn_v: (fn_v[1]["migration_ts"], fn_v[0]) + ) if len(migration_variants): - print(f"Applying migrations: {','.join(k for k, v in migration_variants)}") + logger.info( + f"Applying migrations: {','.join(k for k, v in migration_variants)}" + ) for migrator_file, migration in migration_variants: - if 'migration_ts' in migration: - del migration['migration_ts'] + if "migration_ts" in migration: + del migration["migration_ts"] if len(migration): combined_spec = variant_add(combined_spec, migration) return combined_spec @@ -521,7 +535,8 @@ def _render_ci_provider( for i, (platform, arch, keep_noarch) in enumerate( zip(platforms, archs, keep_noarchs) ): - config = conda_build.config.get_or_merge_config(None, + config = conda_build.config.get_or_merge_config( + None, exclusive_config_file=forge_config["exclusive_config_file"], platform=platform, arch=arch, @@ -529,16 +544,18 @@ def _render_ci_provider( # Get the combined variants from normal variant locations prior to running migrations combined_variant_spec, _ = conda_build.variants.get_package_combined_spec( - os.path.join(forge_dir, "recipe"), - config=config + os.path.join(forge_dir, "recipe"), config=config ) - migrated_combined_variant_spec = migrate_combined_spec(combined_variant_spec, forge_dir, config) + migrated_combined_variant_spec = migrate_combined_spec( + combined_variant_spec, forge_dir, config + ) metas = conda_build.api.render( os.path.join(forge_dir, "recipe"), platform=platform, arch=arch, + ignore_system_variants=True, variants=migrated_combined_variant_spec, permit_undefined_jinja=True, finalize=False, @@ -581,6 +598,7 @@ def _render_ci_provider( "osx": "OSX", "win": "Windows", "linux_aarch64": "aarch64", + "linux_armv7l": "armv7l", } fancy_platforms = [] unfancy_platforms = set() @@ -662,7 +680,7 @@ def _render_ci_provider( platform_specific_setup( jinja_env=jinja_env, forge_dir=forge_dir, - forge_config=forge_config, + forge_config=deepcopy(forge_config), platform=platform, ) @@ -733,7 +751,7 @@ def _get_build_setup_line(forge_dir, platform, forge_config): ) else: build_setup += textwrap.dedent( - """\ + """\ source run_conda_forge_build_setup """ @@ -748,27 +766,23 @@ def _circle_specific_setup(jinja_env, forge_config, forge_dir, platform): if yum_build_setup: forge_config["yum_build_setup"] = yum_build_setup - forge_config["build_setup"] = _get_build_setup_line(forge_dir, platform, forge_config) - - if platform == "linux": - run_file_name = "run_docker_build" - else: - run_file_name = "run_osx_build" + forge_config["build_setup"] = _get_build_setup_line( + forge_dir, platform, forge_config + ) - # TODO: Conda has a convenience for accessing nested yaml content. - template_files = [ - "{}.sh.tmpl".format(run_file_name), - "fast_finish_ci_pr_build.sh.tmpl", - ] + template_files = [".circleci/fast_finish_ci_pr_build.sh"] if platform == "linux": - template_files.append("build_steps.sh.tmpl") + template_files.append(".scripts/run_docker_build.sh") + template_files.append(".scripts/build_steps.sh") + else: + template_files.append(".circleci/run_osx_build.sh") _render_template_exe_files( forge_config=forge_config, - target_dir=os.path.join(forge_dir, ".circleci"), jinja_env=jinja_env, template_files=template_files, + forge_dir=forge_dir, ) # Fix permission of other shell files. @@ -784,7 +798,7 @@ def generate_yum_requirements(forge_dir): yum_requirements_fpath = os.path.join( forge_dir, "recipe", "yum_requirements.txt" ) - yum_build_setup = '' + yum_build_setup = "" if os.path.exists(yum_requirements_fpath): with open(yum_requirements_fpath) as fh: requirements = [ @@ -821,7 +835,7 @@ def _get_platforms_of_provider(provider, forge_config): archs = [] upload_packages = [] for platform in ["linux", "osx", "win"]: - for arch in ["64", "aarch64", "ppc64le"]: + for arch in ["64", "aarch64", "ppc64le", "armv7l"]: platform_arch = ( platform if arch == "64" else "{}_{}".format(platform, arch) ) @@ -865,8 +879,8 @@ def render_circle(jinja_env, forge_config, forge_dir): os.path.join(forge_dir, ".circleci", "fast_finish_ci_pr_build.sh"), ], "linux": [ - os.path.join(forge_dir, ".circleci", "run_docker_build.sh"), - os.path.join(forge_dir, ".circleci", "build_steps.sh"), + os.path.join(forge_dir, ".scripts", "run_docker_build.sh"), + os.path.join(forge_dir, ".scripts", "build_steps.sh"), ], "osx": [os.path.join(forge_dir, ".circleci", "run_osx_build.sh")], } @@ -896,37 +910,52 @@ def _travis_specific_setup(jinja_env, forge_config, forge_dir, platform): build_setup = _get_build_setup_line(forge_dir, platform, forge_config) platform_templates = { - "linux": [ - "run_docker_build.sh.tmpl", - "build_steps.sh.tmpl", - ], - "osx": [ - "run_osx_build.sh.tmpl", - ], + "linux": [".scripts/run_docker_build.sh", ".scripts/build_steps.sh"], + "osx": [".travis/run_osx_build.sh"], "win": [], } template_files = platform_templates.get(platform, []) + if platform == "linux": + yum_build_setup = generate_yum_requirements(forge_dir) + if yum_build_setup: + forge_config["yum_build_setup"] = yum_build_setup + + if platform == "osx": + build_setup = build_setup.strip() + build_setup = build_setup.replace("\n", "\n ") + forge_config["build_setup"] = build_setup + _render_template_exe_files( forge_config=forge_config, - target_dir=os.path.join(forge_dir, ".travis"), jinja_env=jinja_env, template_files=template_files, + forge_dir=forge_dir, ) - build_setup = build_setup.strip() - build_setup = build_setup.replace("\n", "\n ") - forge_config["build_setup"] = build_setup - def _render_template_exe_files( - forge_config, target_dir, jinja_env, template_files + forge_config, jinja_env, template_files, forge_dir ): for template_file in template_files: - template = jinja_env.get_template(template_file) - target_fname = os.path.join(target_dir, template_file[: -len(".tmpl")]) + template = jinja_env.get_template( + os.path.basename(template_file) + ".tmpl" + ) + target_fname = os.path.join(forge_dir, template_file) + new_file_contents = template.render(**forge_config) + if target_fname in get_common_scripts(forge_dir) and os.path.exists( + target_fname + ): + with open(target_fname, "r") as fh: + old_file_contents = fh.read() + if old_file_contents != new_file_contents: + raise RuntimeError( + "Same file {} is rendered twice with different contents".format( + target_fname + ) + ) with write_file(target_fname) as fh: - fh.write(template.render(**forge_config)) + fh.write(new_file_contents) # Fix permission of template shell files set_exe_file(target_fname, True) @@ -947,10 +976,10 @@ def render_travis(jinja_env, forge_config, forge_dir): extra_platform_files = { "linux": [ - os.path.join(forge_dir, ".travis", "run_docker_build.sh"), - os.path.join(forge_dir, ".travis", "build_steps.sh"), + os.path.join(forge_dir, ".scripts", "run_docker_build.sh"), + os.path.join(forge_dir, ".scripts", "build_steps.sh"), ], - "osx": [os.path.join(forge_dir, ".travis", "run_osx_build.sh")], + "osx": [os.path.join(forge_dir, ".scripts", "run_osx_build.sh")], } return _render_ci_provider( @@ -1015,32 +1044,32 @@ def render_appveyor(jinja_env, forge_config, forge_dir): def _azure_specific_setup(jinja_env, forge_config, forge_dir, platform): - platform_templates = { - "linux": [ - "azure-pipelines-linux.yml.tmpl", - "run_docker_build.sh.tmpl", - "build_steps.sh.tmpl", - ], - "osx": ["azure-pipelines-osx.yml.tmpl"], - "win": ["azure-pipelines-win.yml.tmpl"], - } - template_files = platform_templates.get(platform, []) - # Explicitly add in a newline character to ensure that jinja templating doesn't do something stupid build_setup = _get_build_setup_line(forge_dir, platform, forge_config) if platform == "linux": yum_build_setup = generate_yum_requirements(forge_dir) if yum_build_setup: - forge_config['yum_build_setup'] = yum_build_setup + forge_config["yum_build_setup"] = yum_build_setup forge_config["build_setup"] = build_setup + platform_templates = { + "linux": [ + ".scripts/run_docker_build.sh", + ".scripts/build_steps.sh", + ".azure-pipelines/azure-pipelines-linux.yml", + ], + "osx": [".azure-pipelines/azure-pipelines-osx.yml"], + "win": [".azure-pipelines/azure-pipelines-win.yml"], + } + template_files = platform_templates.get(platform, []) + _render_template_exe_files( forge_config=forge_config, - target_dir=os.path.join(forge_dir, ".azure-pipelines"), jinja_env=jinja_env, template_files=template_files, + forge_dir=forge_dir, ) @@ -1049,7 +1078,6 @@ def render_azure(jinja_env, forge_config, forge_dir): template_filename = "azure-pipelines.yml.tmpl" fast_finish_text = "" - # TODO: for now just get this ignoring other pieces platforms, archs, keep_noarchs, upload_packages = _get_platforms_of_provider( "azure", forge_config ) @@ -1072,29 +1100,26 @@ def render_azure(jinja_env, forge_config, forge_dir): def _drone_specific_setup(jinja_env, forge_config, forge_dir, platform): platform_templates = { - "linux": [ - "build_steps.sh.tmpl", - ], + "linux": [".scripts/build_steps.sh"], "osx": [], "win": [], } template_files = platform_templates.get(platform, []) - # Explicitly add in a newline character to ensure that jinja templating doesn't do something stupid - build_setup = "run_conda_forge_build_setup\n" + build_setup = _get_build_setup_line(forge_dir, platform, forge_config) if platform == "linux": yum_build_setup = generate_yum_requirements(forge_dir) if yum_build_setup: - forge_config['yum_build_setup'] = yum_build_setup + forge_config["yum_build_setup"] = yum_build_setup forge_config["build_setup"] = build_setup _render_template_exe_files( forge_config=forge_config, - target_dir=os.path.join(forge_dir, ".drone"), jinja_env=jinja_env, template_files=template_files, + forge_dir=forge_dir, ) @@ -1103,7 +1128,6 @@ def render_drone(jinja_env, forge_config, forge_dir): template_filename = "drone.yml.tmpl" fast_finish_text = "" - # TODO: for now just get this ignoring other pieces platforms, archs, keep_noarchs, upload_packages = _get_platforms_of_provider( "drone", forge_config ) @@ -1123,7 +1147,11 @@ def render_drone(jinja_env, forge_config, forge_dir): upload_packages=upload_packages, ) + def render_README(jinja_env, forge_config, forge_dir): + if "README.md" in forge_config["skip_render"]: + logger.info("README.md rendering is skipped") + return # we only care about the first metadata object for sake of readme metas = conda_build.api.render( os.path.join(forge_dir, "recipe"), @@ -1143,7 +1171,7 @@ def render_README(jinja_env, forge_config, forge_dir): variants = [] if os.path.exists(ci_support_path): for filename in os.listdir(ci_support_path): - if filename.endswith('.yaml'): + if filename.endswith(".yaml"): variant_name, _ = os.path.splitext(filename) variants.append(variant_name) @@ -1165,39 +1193,48 @@ def render_README(jinja_env, forge_config, forge_dir): ) ) - if forge_config['azure'].get('build_id') is None: + if forge_config["azure"].get("build_id") is None: # Try to retrieve the build_id from the interwebs try: import requests + resp = requests.get( "https://dev.azure.com/{org}/{project_name}/_apis/build/definitions?name={repo}".format( org=forge_config["azure"]["user_or_org"], project_name=forge_config["azure"]["project_name"], - repo=forge_config["github"]["repo_name"] - )) + repo=forge_config["github"]["repo_name"], + ) + ) + resp.raise_for_status() build_def = resp.json()["value"][0] - forge_config['azure']['build_id'] = build_def['id'] + forge_config["azure"]["build_id"] = build_def["id"] except (IndexError, IOError): pass - print("README") - print(yaml.dump(forge_config)) + logger.debug("README") + logger.debug(yaml.dump(forge_config)) with write_file(target_fname) as fh: fh.write(template.render(**forge_config)) + code_owners_file = os.path.join(forge_dir, ".github", "CODEOWNERS") if len(forge_config["maintainers"]) > 0: - code_owners_file = os.path.join(forge_dir, ".github", "CODEOWNERS") with write_file(code_owners_file) as fh: line = "*" for maintainer in forge_config["maintainers"]: line = line + " @" + maintainer fh.write(line) + else: + remove_file_or_dir(code_owners_file) -def copy_feedstock_content(forge_dir): +def copy_feedstock_content(forge_config, forge_dir): feedstock_content = os.path.join(conda_forge_content, "feedstock_content") - copytree(feedstock_content, forge_dir, ("README", "__pycache__")) + skip_files = ["README", "__pycache__"] + for f in forge_config["skip_render"]: + skip_files.append(f) + logger.info("%s rendering is skipped" % f) + copytree(feedstock_content, forge_dir, skip_files) def _load_forge_config(forge_dir, exclusive_config_file): @@ -1221,21 +1258,23 @@ def _load_forge_config(forge_dir, exclusive_config_file): "project_name": "feedstock-builds", "project_id": "84710dde-1620-425b-80d0-4cf5baca359d", # Default to a timeout of 6 hours. This is the maximum for azure by default - "timeout_minutes": 360 + "timeout_minutes": 360, }, "provider": { "linux": "azure", "osx": "azure", - "win": "appveyor", + "win": "azure", # Following platforms are disabled by default "linux_aarch64": None, "linux_ppc64le": None, + "linux_armv7l": None, }, "win": {"enabled": False}, "osx": {"enabled": False}, "linux": {"enabled": False}, "linux_aarch64": {"enabled": False}, "linux_ppc64le": {"enabled": False}, + "linux_armv7l": {"enabled": False}, # Configurable idle timeout. Used for packages that don't have chatty enough builds # Applicable only to circleci and travis "idle_timeout_minutes": None, @@ -1256,6 +1295,7 @@ def _load_forge_config(forge_dir, exclusive_config_file): "branch_name": "master", }, "recipe_dir": "recipe", + "skip_render": [], } # An older conda-smithy used to have some files which should no longer exist, @@ -1274,6 +1314,7 @@ def _load_forge_config(forge_dir, exclusive_config_file): os.path.join(".github", "ISSUE_TEMPLATE.md"), os.path.join(".github", "PULL_REQUEST_TEMPLATE.md"), ] + for old_file in old_files: remove_file_or_dir(os.path.join(forge_dir, old_file)) @@ -1296,7 +1337,9 @@ def _load_forge_config(forge_dir, exclusive_config_file): " for more info." ) - if file_config.get("docker") and file_config.get("docker").get("image"): + if file_config.get("docker") and file_config.get("docker").get( + "image" + ): raise ValueError( "Setting docker image in conda-forge.yml is removed now." " Use conda_build_config.yaml instead" @@ -1316,21 +1359,20 @@ def _load_forge_config(forge_dir, exclusive_config_file): config["azure"].setdefault("user_or_org", config["github"]["user_or_org"]) log = yaml.safe_dump(config) - print("## CONFIGURATION USED\n") - print(log) - print("## END CONFIGURATION\n") - - for platform in ["linux_aarch64"]: - if config["provider"][platform] == "default": - config["provider"][platform] = "azure" + logger.debug("## CONFIGURATION USED\n") + logger.debug(log) + logger.debug("## END CONFIGURATION\n") - # TODO: Switch default to Drone - if config["provider"]["linux_aarch64"] in {"native"}: + if config["provider"]["linux_aarch64"] in {"default", "native"}: config["provider"]["linux_aarch64"] = "drone" - if config["provider"]["linux_ppc64le"] in {"native", "default"}: + if config["provider"]["linux_ppc64le"] in {"default", "native"}: config["provider"]["linux_ppc64le"] = "travis" + # Fallback handling set to azure, for platforms that are not fully specified by this time + for platform in config["provider"]: + if config["provider"][platform] in {"default", "emulated"}: + config["provider"][platform] = "azure" # Set the environment variable for the compiler stack os.environ["CF_COMPILER_STACK"] = config["compiler_stack"] # Set valid ranger for the supported platforms @@ -1368,7 +1410,7 @@ def check_version_uptodate(resolve, name, installed_version, error_on_warn): if error_on_warn: raise RuntimeError("{} Exiting.".format(msg)) else: - print(msg) + logger.info(msg) def commit_changes(forge_file_directory, commit, cs_ver, cfp_ver, cb_ver): @@ -1380,7 +1422,7 @@ def commit_changes(forge_file_directory, commit, cs_ver, cfp_ver, cb_ver): msg = "Re-rendered with conda-build {} and conda-smithy {}".format( cb_ver, cs_ver ) - print(msg) + logger.info(msg) is_git_repo = os.path.exists(os.path.join(forge_file_directory, ".git")) if is_git_repo: @@ -1394,15 +1436,15 @@ def commit_changes(forge_file_directory, commit, cs_ver, cfp_ver, cb_ver): if commit == "edit": git_args += ["--edit", "--status", "--verbose"] subprocess.check_call(git_args, cwd=forge_file_directory) - print("") + logger.info("") else: - print( + logger.info( "You can commit the changes with:\n\n" ' git commit -m "MNT: {}"\n'.format(msg) ) - print("These changes need to be pushed to github!\n") + logger.info("These changes need to be pushed to github!\n") else: - print("No changes made. This feedstock is up-to-date.\n") + logger.info("No changes made. This feedstock is up-to-date.\n") def get_cfp_file_path(resolve=None, error_on_warn=True): @@ -1437,19 +1479,38 @@ def get_cfp_file_path(resolve=None, error_on_warn=True): def clear_variants(forge_dir): "Remove all variant files placed in the .ci_support path" if os.path.isdir(os.path.join(forge_dir, ".ci_support")): - configs = glob.glob( - os.path.join( - forge_dir, ".ci_support", "*") - ) + configs = glob.glob(os.path.join(forge_dir, ".ci_support", "*.yaml")) for config in configs: remove_file(config) +def get_common_scripts(forge_dir): + for old_file in ["run_docker_build.sh", "build_steps.sh"]: + yield os.path.join(forge_dir, ".scripts", old_file) + + +def clear_scripts(forge_dir): + for folder in [".azure-pipelines", ".circleci", ".drone", ".travis"]: + for old_file in ["run_docker_build.sh", "build_steps.sh"]: + remove_file(os.path.join(forge_dir, folder, old_file)) + + def main( - forge_file_directory, no_check_uptodate=False, commit=False, exclusive_config_file=None, check=False + forge_file_directory, + no_check_uptodate=False, + commit=False, + exclusive_config_file=None, + check=False, ): + import logging + + loglevel = os.environ.get("CONDA_SMITHY_LOGLEVEL", "INFO").upper() + logger.setLevel(loglevel) + if check: - index = conda_build.conda_interface.get_index(channel_urls=["conda-forge"]) + index = conda_build.conda_interface.get_index( + channel_urls=["conda-forge"] + ) r = conda_build.conda_interface.Resolve(index) # Check that conda-smithy is up-to-date @@ -1497,9 +1558,11 @@ def main( ), ) - copy_feedstock_content(forge_dir) - set_exe_file(os.path.join(forge_dir, "build-locally.py")) + copy_feedstock_content(config, forge_dir) + if os.path.exists(os.path.join(forge_dir, "build-locally.py")): + set_exe_file(os.path.join(forge_dir, "build-locally.py")) clear_variants(forge_dir) + clear_scripts(forge_dir) render_circle(env, config, forge_dir) render_travis(env, config, forge_dir) diff --git a/conda_smithy/feedstock_content/.gitattributes b/conda_smithy/feedstock_content/.gitattributes index 974953ecb..86ff93771 100644 --- a/conda_smithy/feedstock_content/.gitattributes +++ b/conda_smithy/feedstock_content/.gitattributes @@ -5,3 +5,20 @@ meta.yaml text eol=lf build.sh text eol=lf bld.bat text eol=crlf + +# github helper pieces to make some files not show up in diffs automatically +.azure-pipelines/* linguist-generated=true +.circleci/* linguist-generated=true +.drone/* linguist-generated=true +.drone.yml linguist-generated=true +.github/* linguist-generated=true +.travis/* linguist-generated=true +.appveyor.yml linguist-generated=true +.gitattributes linguist-generated=true +.gitignore linguist-generated=true +.travis.yml linguist-generated=true +LICENSE.txt linguist-generated=true +README.md linguist-generated=true +azure-pipelines.yml linguist-generated=true +build-locally.py linguist-generated=true +shippable.yml linguist-generated=true diff --git a/conda_smithy/feedstock_content/build-locally.py b/conda_smithy/feedstock_content/build-locally.py index 51d6a6e55..8f7ecca4f 100755 --- a/conda_smithy/feedstock_content/build-locally.py +++ b/conda_smithy/feedstock_content/build-locally.py @@ -15,11 +15,14 @@ def setup_environment(ns): def run_docker_build(ns): - script = glob.glob(".*/run_docker_build.sh")[0] - subprocess.check_call(script) + script = ".scripts/run_docker_build.sh" + subprocess.check_call([script]) + def verify_config(ns): - valid_configs = {os.path.basename(f)[:-5] for f in glob.glob(".ci_support/*.yaml")} + valid_configs = { + os.path.basename(f)[:-5] for f in glob.glob(".ci_support/*.yaml") + } print(f"valid configs are {valid_configs}") if ns.config in valid_configs: print("Using " + ns.config + " configuration") @@ -39,8 +42,10 @@ def verify_config(ns): else: raise ValueError("config " + ns.config + " is not valid") # Remove the following, as implemented - if not ns.config.startswith('linux'): - raise ValueError(f"only Linux configs currently supported, got {ns.config}") + if not ns.config.startswith("linux"): + raise ValueError( + f"only Linux configs currently supported, got {ns.config}" + ) def main(args=None): diff --git a/conda_smithy/feedstock_io.py b/conda_smithy/feedstock_io.py index 26982e69d..6878850af 100644 --- a/conda_smithy/feedstock_io.py +++ b/conda_smithy/feedstock_io.py @@ -1,5 +1,3 @@ -from __future__ import unicode_literals - from contextlib import contextmanager import io import os diff --git a/conda_smithy/feedstocks.py b/conda_smithy/feedstocks.py index 6445fed5e..4712d3e9a 100644 --- a/conda_smithy/feedstocks.py +++ b/conda_smithy/feedstocks.py @@ -1,4 +1,3 @@ -from __future__ import absolute_import, print_function import argparse import glob import multiprocessing @@ -9,10 +8,10 @@ from github import Github from . import github as smithy_github -from .utils import render_meta_yaml +from .utils import render_meta_yaml, yaml -def feedstock_repos(gh_organization): +def feedstock_repos(gh_organization="conda-forge"): token = smithy_github.gh_token() gh = Github(token) org = gh.get_organization(gh_organization) @@ -210,10 +209,7 @@ def yaml_meta(content): Read the contents of meta.yaml into a ruamel.yaml document. """ - yaml = ruamel.yaml.load( - render_meta_yaml(content), ruamel.yaml.RoundTripLoader - ) - return yaml + return yaml.load(render_meta_yaml(content)) def feedstocks_yaml( diff --git a/conda_smithy/github.py b/conda_smithy/github.py index 2c8bd0748..8d0411440 100644 --- a/conda_smithy/github.py +++ b/conda_smithy/github.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import, print_function - import os from random import choice @@ -16,7 +14,9 @@ def gh_token(): try: - with open(os.path.expanduser("~/.conda-smithy/github.token"), "r") as fh: + with open( + os.path.expanduser("~/.conda-smithy/github.token"), "r" + ) as fh: token = fh.read().strip() if not token: raise ValueError() @@ -78,13 +78,17 @@ def get_cached_team(org, team_name, description=""): try: repo = org.get_repo("{}-feedstock".format(team_name)) - team = next((team for team in repo.get_teams() if team.name == team_name), None) + team = next( + (team for team in repo.get_teams() if team.name == team_name), None + ) if team: return team except GithubException: pass - team = next((team for team in org.get_teams() if team.name == team_name), None) + team = next( + (team for team in org.get_teams() if team.name == team_name), None + ) if not team: if description: team = create_team(org, team_name, description, []) @@ -127,7 +131,9 @@ def create_github_repo(args): gh_repo = user_or_org.create_repo( repo_name, has_wiki=False, - description="A conda-smithy repository for {}.".format(feedstock_name), + description="A conda-smithy repository for {}.".format( + feedstock_name + ), ) print("Created {} on github".format(gh_repo.full_name)) except GithubException as gh_except: @@ -216,7 +222,8 @@ def configure_github_team(meta, gh_repo, org, feedstock_name): team_name = feedstock_name current_maintainer_teams = list(gh_repo.get_teams()) team = next( - (team for team in current_maintainer_teams if team.name == team_name), None + (team for team in current_maintainer_teams if team.name == team_name), + None, ) current_maintainers = set() if not team: @@ -227,7 +234,9 @@ def configure_github_team(meta, gh_repo, org, feedstock_name): ) team.add_to_repos(gh_repo) else: - current_maintainers = set([e.login.lower() for e in team.get_members()]) + current_maintainers = set( + [e.login.lower() for e in team.get_members()] + ) # Get the all-members team description = "All of the awesome {} contributors!".format(org.login) @@ -258,7 +267,9 @@ def configure_github_team(meta, gh_repo, org, feedstock_name): # Add any new maintainer team maintainer_teams = set( - m.split("/")[1] for m in maintainer_teams if m.startswith(str(org.login)) + m.split("/")[1] + for m in maintainer_teams + if m.startswith(str(org.login)) ) current_maintainer_teams = [team.name for team in current_maintainer_teams] for maintainer_team in maintainer_teams - set(current_maintainer_teams): diff --git a/conda_smithy/lint_recipe.py b/conda_smithy/lint_recipe.py index 049adb234..48d4854a7 100644 --- a/conda_smithy/lint_recipe.py +++ b/conda_smithy/lint_recipe.py @@ -1,15 +1,9 @@ # -*- coding: utf-8 -*- -from __future__ import unicode_literals +from collections.abc import Sequence, Mapping -try: - from collections.abc import Sequence, Mapping +str_type = str - str_type = str -except ImportError: # python 2 - from collections import Sequence, Mapping - - str_type = basestring import copy import io import itertools @@ -17,7 +11,6 @@ import re import github -import ruamel.yaml from conda_build.metadata import ( ensure_valid_license_family, @@ -25,7 +18,7 @@ ) import conda_build.conda_interface -from .utils import render_meta_yaml +from .utils import render_meta_yaml, yaml FIELDS = copy.deepcopy(cbfields) @@ -53,6 +46,9 @@ TEST_KEYS = {"imports", "commands"} TEST_FILES = ["run_test.py", "run_test.sh", "run_test.bat", "run_test.pl"] + +NEEDED_FAMILIES = ["gpl", "bsd", "mit", "apache", "psf"] + sel_pat = re.compile(r"(.+?)\s*(#.*)?\[([^\[\]]+)\](?(2).*)$") jinja_pat = re.compile(r"\s*\{%\s*(set)\s+[^\s]+\s*=\s*[^\s]+\s*%\}") @@ -136,7 +132,7 @@ def lintify(meta, recipe_dir=None, conda_forge=False): recipe_dirname = os.path.basename(recipe_dir) if recipe_dir else "recipe" is_staged_recipes = recipe_dirname != "recipe" - + # 0: Top level keys should be expected unexpected_sections = [] for section in major_sections: @@ -395,6 +391,56 @@ def lintify(meta, recipe_dir=None, conda_forge=False): "[here](https://conda-forge.org/docs/maintainer/knowledge_base.html#compilers)." ) + # 22: Single space in pinned requirements + for section, requirements in requirements_section.items(): + for requirement in requirements or []: + req, _, _ = requirement.partition("#") + if "{{" in req: + continue + parts = req.split() + if len(parts) > 2 and parts[1] in [ + "!=", + "=", + "==", + ">", + "<", + "<=", + ">=", + ]: + # check for too many spaces + lints.append( + ( + "``requirements: {section}: {requirement}`` should not " + "contain a space between relational operator and the version, i.e. " + "``{name} {pin}``" + ).format( + section=section, + requirement=requirement, + name=parts[0], + pin="".join(parts[1:]), + ) + ) + continue + # check that there is a space if there is a pin + bad_char_idx = [(parts[0].find(c), c) for c in "><="] + bad_char_idx = [bci for bci in bad_char_idx if bci[0] >= 0] + if bad_char_idx: + bad_char_idx.sort() + i = bad_char_idx[0][0] + lints.append( + ( + "``requirements: {section}: {requirement}`` must " + "contain a space between the name and the pin, i.e. " + "``{name} {pin}``" + ).format( + section=section, + requirement=requirement, + name=parts[0][:i], + pin=parts[0][i:] + "".join(parts[1:]), + ) + ) + continue + # hints # 1: suggest pip if "script" in build_section: @@ -409,8 +455,13 @@ def lintify(meta, recipe_dir=None, conda_forge=False): ) # 2: suggest python noarch (skip on feedstocks) - if build_section.get("noarch") is None and build_reqs and not any(["_compiler_stub" in b for b in build_reqs]) \ - and ("pip" in build_reqs) and (is_staged_recipes or not conda_forge): + if ( + build_section.get("noarch") is None + and build_reqs + and not any(["_compiler_stub" in b for b in build_reqs]) + and ("pip" in build_reqs) + and (is_staged_recipes or not conda_forge) + ): with io.open(meta_fname, "rt") as fh: in_runreqs = False no_arch_possible = True @@ -440,7 +491,9 @@ def lintify(meta, recipe_dir=None, conda_forge=False): license_family = about_section.get("license_family", license).lower() license_file = about_section.get("license_file", "") needed_families = ["gpl", "bsd", "mit", "apache", "psf"] - if license_file == "" and any(f for f in needed_families if f in license_family): + if license_file == "" and any( + f for f in needed_families if f in license_family + ): hints.append("license_file entry is missing, but is expected.") return lints, hints @@ -540,7 +593,7 @@ def main(recipe_dir, conda_forge=False, return_hints=False): with io.open(recipe_meta, "rt") as fh: content = render_meta_yaml("".join(fh)) - meta = ruamel.yaml.load(content, ruamel.yaml.RoundTripLoader) + meta = yaml.load(content) results, hints = lintify(meta, recipe_dir, conda_forge) if return_hints: return results, hints diff --git a/conda_smithy/templates/README.md.tmpl b/conda_smithy/templates/README.md.tmpl index 7860130fe..54beeaf84 100644 --- a/conda_smithy/templates/README.md.tmpl +++ b/conda_smithy/templates/README.md.tmpl @@ -156,9 +156,14 @@ Current build status {%- endif %} -{%- if not linux_ppc64le.enabled %} -![ppc64le disabled]({{ shield }}badge/ppc64le-disabled-lightgrey.svg) -{%- endif %} + {%- if not linux_ppc64le.enabled %} + + Linux_ppc64le + + ppc64le disabled + + + {%- endif %} {%- endif %} diff --git a/conda_smithy/templates/appveyor.yml.tmpl b/conda_smithy/templates/appveyor.yml.tmpl index 7feae5a24..bcfb465ca 100644 --- a/conda_smithy/templates/appveyor.yml.tmpl +++ b/conda_smithy/templates/appveyor.yml.tmpl @@ -58,4 +58,8 @@ build: off test_script: - conda.exe build {{ recipe_dir }} -m .ci_support\%CONFIG%.yaml deploy_script: + - set "GIT_BRANCH=%APPVEYOR_REPO_BRANCH%" +{%- if upload_on_branch %} + - set "UPLOAD_ON_BRANCH={{ upload_on_branch }}" +{%- endif %} - cmd: upload_package .\ .\{{ recipe_dir }} .ci_support\%CONFIG%.yaml diff --git a/conda_smithy/templates/azure-pipelines-linux.yml.tmpl b/conda_smithy/templates/azure-pipelines-linux.yml.tmpl index 40814c146..d91101d78 100644 --- a/conda_smithy/templates/azure-pipelines-linux.yml.tmpl +++ b/conda_smithy/templates/azure-pipelines-linux.yml.tmpl @@ -29,7 +29,11 @@ jobs: - script: | export CI=azure - .azure-pipelines/run_docker_build.sh + export GIT_BRANCH=$BUILD_SOURCEBRANCHNAME +{%- if upload_on_branch %} + export UPLOAD_ON_BRANCH="{{ upload_on_branch }}" +{%- endif %} + .scripts/run_docker_build.sh displayName: Run docker build env: BINSTAR_TOKEN: $(BINSTAR_TOKEN) diff --git a/conda_smithy/templates/azure-pipelines-osx.yml.tmpl b/conda_smithy/templates/azure-pipelines-osx.yml.tmpl index 75ebd6605..f1b9037fd 100644 --- a/conda_smithy/templates/azure-pipelines-osx.yml.tmpl +++ b/conda_smithy/templates/azure-pipelines-osx.yml.tmpl @@ -72,8 +72,12 @@ jobs: - script: | source activate base + export GIT_BRANCH=$BUILD_SOURCEBRANCHNAME +{%- if upload_on_branch %} + export UPLOAD_ON_BRANCH="{{ upload_on_branch }}" +{%- endif %} upload_package ./ ./recipe ./.ci_support/${CONFIG}.yaml - displayName: Upload recipe + displayName: Upload package env: BINSTAR_TOKEN: $(BINSTAR_TOKEN) condition: not(eq(variables['UPLOAD_PACKAGES'], 'False')) diff --git a/conda_smithy/templates/azure-pipelines-win.yml.tmpl b/conda_smithy/templates/azure-pipelines-win.yml.tmpl index 4dec0bfe1..e9fe6d101 100644 --- a/conda_smithy/templates/azure-pipelines-win.yml.tmpl +++ b/conda_smithy/templates/azure-pipelines-win.yml.tmpl @@ -100,7 +100,12 @@ jobs: condition: not(contains(variables['CONFIG'], 'vs2008')) - script: | + set "GIT_BRANCH=%BUILD_SOURCEBRANCHNAME%" +{%- if upload_on_branch %} + set "UPLOAD_ON_BRANCH={{ upload_on_branch }}" +{%- endif %} upload_package .\ .\{{ recipe_dir }} .ci_support\%CONFIG%.yaml + displayName: Upload package env: BINSTAR_TOKEN: $(BINSTAR_TOKEN) condition: not(eq(variables['UPLOAD_PACKAGES'], 'False')) diff --git a/conda_smithy/templates/circle.yml.tmpl b/conda_smithy/templates/circle.yml.tmpl index 9cb5d2e49..65b9cffd0 100644 --- a/conda_smithy/templates/circle.yml.tmpl +++ b/conda_smithy/templates/circle.yml.tmpl @@ -41,7 +41,7 @@ jobs: {%- if platform.startswith('linux') %} command: | export CI=circle - ./.circleci/run_docker_build.sh + ./.scripts/run_docker_build.sh {%- else %} command: | export CI=circle diff --git a/conda_smithy/templates/drone.yml.tmpl b/conda_smithy/templates/drone.yml.tmpl index dcf8a04ff..9f06b45b9 100644 --- a/conda_smithy/templates/drone.yml.tmpl +++ b/conda_smithy/templates/drone.yml.tmpl @@ -15,14 +15,19 @@ steps: CONFIG: {{ config_name }} UPLOAD_PACKAGES: {{ upload }} PLATFORM: {{ platform }} - #BINSTAR_TOKEN: "" + BINSTAR_TOKEN: + from_secret: BINSTAR_TOKEN {%- if 'linux' in platform %} commands: - export FEEDSTOCK_ROOT="$CI_WORKSPACE" - export RECIPE_ROOT="$FEEDSTOCK_ROOT/recipe" - export CI=drone + - export GIT_BRANCH="$DRONE_BRANCH" +{%- if upload_on_branch %} + - export UPLOAD_ON_BRANCH="{{ upload_on_branch }}" +{%- endif %} - sed -i '$ichown -R conda:conda "$FEEDSTOCK_ROOT"' /opt/docker/bin/entrypoint - - /opt/docker/bin/entrypoint $FEEDSTOCK_ROOT/.drone/build_steps.sh + - /opt/docker/bin/entrypoint $FEEDSTOCK_ROOT/.scripts/build_steps.sh - echo "Done building" {%- endif %} diff --git a/conda_smithy/templates/run_docker_build.sh.tmpl b/conda_smithy/templates/run_docker_build.sh.tmpl index e7601e8ec..a424a212b 100644 --- a/conda_smithy/templates/run_docker_build.sh.tmpl +++ b/conda_smithy/templates/run_docker_build.sh.tmpl @@ -64,6 +64,8 @@ export UPLOAD_PACKAGES="${UPLOAD_PACKAGES:-True}" -e BINSTAR_TOKEN \ -e HOST_USER_ID \ -e UPLOAD_PACKAGES \ + -e GIT_BRANCH \ + -e UPLOAD_ON_BRANCH \ -e CI \ $DOCKER_IMAGE \ {{ docker.command }} \ diff --git a/conda_smithy/templates/travis.yml.tmpl b/conda_smithy/templates/travis.yml.tmpl index 4a6fecb06..b5d12fbf3 100644 --- a/conda_smithy/templates/travis.yml.tmpl +++ b/conda_smithy/templates/travis.yml.tmpl @@ -40,9 +40,13 @@ matrix: {%- endif %} script: - export CI=travis + - export GIT_BRANCH="$TRAVIS_BRANCH" +{%- if upload_on_branch %} + - export UPLOAD_ON_BRANCH="{{ upload_on_branch }}" +{%- endif %} {% if 'osx' in platformset %} - if [[ ${PLATFORM} =~ .*osx.* ]]; then ./.travis/run_osx_build.sh; fi {%- endif %} {% if 'linux' in platformset %} - - if [[ ${PLATFORM} =~ .*linux.* ]]; then ./.travis/run_docker_build.sh; fi + - if [[ ${PLATFORM} =~ .*linux.* ]]; then ./.scripts/run_docker_build.sh; fi {%- endif %} diff --git a/conda_smithy/utils.py b/conda_smithy/utils.py index 5ea283a5f..61df5c237 100644 --- a/conda_smithy/utils.py +++ b/conda_smithy/utils.py @@ -1,15 +1,24 @@ import shutil import tempfile import jinja2 -import six import datetime import time import os +import sys +from pathlib import Path from collections import defaultdict from contextlib import contextmanager + import ruamel.yaml +# define global yaml API +# roundrip-loader and allowing duplicate keys +# for handling # [filter] / # [not filter] +yaml = ruamel.yaml.YAML(typ="rt") +yaml.allow_duplicate_keys = True + + @contextmanager def tmp_directory(): tmp_dir = tempfile.mkdtemp("_recipe") @@ -53,7 +62,9 @@ def render_meta_yaml(text): ) ) mockos = MockOS() - content = env.from_string(text).render(os=mockos, environ=mockos.environ) + py_ver = "3.7" + context = {"os": mockos, "environ": mockos.environ, "PY_VER": py_ver} + content = env.from_string(text).render(context) return content @@ -68,7 +79,7 @@ def update_conda_forge_config(feedstock_directory): forge_yaml = os.path.join(feedstock_directory, "conda-forge.yml") if os.path.exists(forge_yaml): with open(forge_yaml, "r") as fh: - code = ruamel.yaml.load(fh, ruamel.yaml.RoundTripLoader) + code = yaml.load(fh) else: code = {} @@ -78,5 +89,4 @@ def update_conda_forge_config(feedstock_directory): yield code - with open(forge_yaml, "w") as fh: - fh.write(ruamel.yaml.dump(code, Dumper=ruamel.yaml.RoundTripDumper)) + yaml.dump(code, Path(forge_yaml)) diff --git a/conda_smithy/variant_algebra.py b/conda_smithy/variant_algebra.py index 8127fc206..068c130a8 100644 --- a/conda_smithy/variant_algebra.py +++ b/conda_smithy/variant_algebra.py @@ -17,7 +17,7 @@ import toolz from conda_build.utils import ensure_list import conda_build.variants as variants -from conda.models.version import VersionOrder +from conda.exports import VersionOrder from conda_build.config import Config from functools import partial @@ -123,7 +123,7 @@ def variant_add(v1: dict, v2: dict) -> Dict[str, Any]: # deal with __migrator: ordering if "__migrator" in v2: print(v2) - ordering = v2["__migrator"].get("ordering") + ordering = v2["__migrator"].get("ordering", {}) print(ordering) else: ordering = {} @@ -167,7 +167,9 @@ def variant_add(v1: dict, v2: dict) -> Dict[str, Any]: zk_out.extend(zk_l) zk_out.extend(zk_r) - zk_out = sorted([sorted(zk) for zk in zk_out], key=lambda x: (len(x), str(x))) + zk_out = sorted( + [sorted(zk) for zk in zk_out], key=lambda x: (len(x), str(x)) + ) joint.remove("zip_keys") special_variants["zip_keys"] = zk_out @@ -175,7 +177,9 @@ def variant_add(v1: dict, v2: dict) -> Dict[str, Any]: joint_variant = {} for k in joint: v_left, v_right = ensure_list(v1[k]), ensure_list(v2[k]) - joint_variant[k] = variant_key_add(k, v_left, v_right, ordering=ordering.get(k)) + joint_variant[k] = variant_key_add( + k, v_left, v_right, ordering=ordering.get(k, None) + ) out = { **toolz.keyfilter(lambda k: k in left, v1), diff --git a/news/codeowners.rst b/news/codeowners.rst deleted file mode 100644 index 7c22878d1..000000000 --- a/news/codeowners.rst +++ /dev/null @@ -1,4 +0,0 @@ -**Fixed:** - -* Fixed issue where only the last maintainer is review requested - diff --git a/news/henryiii-build-locally-python3.rst b/news/henryiii-build-locally-python3.rst deleted file mode 100644 index 30eb7a1fd..000000000 --- a/news/henryiii-build-locally-python3.rst +++ /dev/null @@ -1,24 +0,0 @@ -**Added:** - -* - -**Changed:** - -* ``build-locally.py`` now uses ``python3`` even if ``python`` is ``python2`` (Python 3.6+ was already required) - -**Deprecated:** - -* - -**Removed:** - -* - -**Fixed:** - -* - -**Security:** - -* - diff --git a/news/license.rst b/news/license.rst index 7a5ba95c8..427974d24 100644 --- a/news/license.rst +++ b/news/license.rst @@ -1,8 +1,3 @@ **Added:** * license_file is hinted for GPL, MIT, BSD, APACHE, PSF - -**Fixed:** - -* Unlicense is allowed - diff --git a/news/remove_github.rst b/news/remove_github.rst deleted file mode 100644 index eb876044e..000000000 --- a/news/remove_github.rst +++ /dev/null @@ -1,3 +0,0 @@ -**Removed:** - -* Github issue, PR and contributing files are removed as they are in https://github.com/conda-forge/.github diff --git a/news/shyaml-more-versions.rst b/news/shyaml-more-versions.rst deleted file mode 100644 index a0815a8c7..000000000 --- a/news/shyaml-more-versions.rst +++ /dev/null @@ -1,4 +0,0 @@ -**Fixed:** - -* Support newer ``shyaml`` versions by checking whether ``shyaml -h`` succeeds. - diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 000000000..a8f43fefd --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,2 @@ +[tool.black] +line-length = 79 diff --git a/requirements.txt b/requirements.txt index 32cc2c89d..68ee7b1e4 100644 --- a/requirements.txt +++ b/requirements.txt @@ -5,7 +5,7 @@ conda conda-build >=3.4.2 jinja2 requests -pycrypto +pycryptodome gitpython pygithub <2 ruamel.yaml diff --git a/tests/conftest.py b/tests/conftest.py index efc1f044c..a05addcf0 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -8,10 +8,15 @@ from jinja2 import Environment, FileSystemLoader from conda_build.utils import copy_into -from conda_smithy.configure_feedstock import conda_forge_content, _load_forge_config +from conda_smithy.configure_feedstock import ( + conda_forge_content, + _load_forge_config, +) -RecipeConfigPair = collections.namedtuple("RecipeConfigPair", ("recipe", "config")) +RecipeConfigPair = collections.namedtuple( + "RecipeConfigPair", ("recipe", "config") +) @pytest.fixture(scope="function") @@ -46,15 +51,18 @@ def return_to_saved_path(): def config_yaml(testing_workdir): config = {"python": ["2.7", "3.5"], "r_base": ["3.3.2", "3.4.2"]} os.makedirs(os.path.join(testing_workdir, "recipe")) - os.makedirs(os.path.join(testing_workdir, "migrations")) with open(os.path.join(testing_workdir, "config.yaml"), "w") as f: f.write("docker:\n") f.write(" fallback_image:\n") f.write(" - centos:6\n") - with open(os.path.join(testing_workdir, "recipe", "default_config.yaml"), "w") as f: + with open( + os.path.join(testing_workdir, "recipe", "default_config.yaml"), "w" + ) as f: yaml.dump(config, f, default_flow_style=False) # need selectors, so write these more manually - f.write(dedent("""\ + f.write( + dedent( + """\ target_platform: - win-64 # [win] - win-32 # [win] @@ -68,19 +76,29 @@ def config_yaml(testing_workdir): - c_compiler # [win] - python # [win] - vc # [win] - """)) + """ + ) + ) # dummy file that needs to be present for circle ci. This is created by the init function os.makedirs(os.path.join(testing_workdir, ".circleci")) with open( - os.path.join(testing_workdir, ".circleci", "checkout_merge_commit.sh"), "w" + os.path.join(testing_workdir, ".circleci", "checkout_merge_commit.sh"), + "w", ) as f: f.write("echo dummy file") - with open(os.path.join(testing_workdir, "recipe", "short_config.yaml"), "w") as f: + with open( + os.path.join(testing_workdir, "recipe", "short_config.yaml"), "w" + ) as f: config = {"python": ["2.7"]} yaml.dump(config, f, default_flow_style=False) - with open(os.path.join(testing_workdir, "recipe", "long_config.yaml"), "w") as f: + with open( + os.path.join(testing_workdir, "recipe", "long_config.yaml"), "w" + ) as f: config = {"python": ["2.7", "3.5", "3.6"]} yaml.dump(config, f, default_flow_style=False) + with open(os.path.join(testing_workdir, "conda-forge.yml"), "w") as f: + config = {"upload_on_branch": "foo-branch"} + yaml.dump(config, f, default_flow_style=False) return testing_workdir @@ -104,8 +122,10 @@ def noarch_recipe(config_yaml, request): return RecipeConfigPair( str(config_yaml), _load_forge_config( - config_yaml, exclusive_config_file=os.path.join( - config_yaml, "recipe", "default_config.yaml") + config_yaml, + exclusive_config_file=os.path.join( + config_yaml, "recipe", "default_config.yaml" + ), ), ) @@ -130,8 +150,10 @@ def r_recipe(config_yaml, request): return RecipeConfigPair( str(config_yaml), _load_forge_config( - config_yaml, exclusive_config_file=os.path.join( - config_yaml, "recipe", "default_config.yaml") + config_yaml, + exclusive_config_file=os.path.join( + config_yaml, "recipe", "default_config.yaml" + ), ), ) @@ -158,8 +180,38 @@ def py_recipe(config_yaml, request): return RecipeConfigPair( str(config_yaml), _load_forge_config( - config_yaml, exclusive_config_file=os.path.join( - config_yaml, "recipe", "default_config.yaml") + config_yaml, + exclusive_config_file=os.path.join( + config_yaml, "recipe", "default_config.yaml" + ), + ), + ) + + +@pytest.fixture(scope="function") +def upload_on_branch_recipe(config_yaml, request): + with open(os.path.join(config_yaml, "recipe", "meta.yaml"), "w") as fh: + fh.write( + """ +package: + name: py-test + version: 1.0.0 +requirements: + build: # [win] + - {{ compiler('c') }} # [win] + host: + - python + run: + - python +about: + home: home + """ + ) + return RecipeConfigPair( + str(config_yaml), + _load_forge_config( + config_yaml, + exclusive_config_file=os.path.join(config_yaml, "conda-forge.yml"), ), ) @@ -184,17 +236,27 @@ def recipe_migration_cfep9(config_yaml, request): """ ) - with open(os.path.join(config_yaml, "migrations", "zlib.yaml"), "w") as fh: - fh.write(""" + os.makedirs( + os.path.join(config_yaml, ".ci_support", "migrations"), exist_ok=True + ) + with open( + os.path.join(config_yaml, ".ci_support", "migrations", "zlib.yaml"), + "w", + ) as fh: + fh.write( + """ zlib: - 1000 -""") +""" + ) return RecipeConfigPair( str(config_yaml), _load_forge_config( - config_yaml, exclusive_config_file=os.path.join( - config_yaml, "recipe", "default_config.yaml") + config_yaml, + exclusive_config_file=os.path.join( + config_yaml, "recipe", "default_config.yaml" + ), ), ) @@ -203,25 +265,48 @@ def recipe_migration_cfep9(config_yaml, request): def recipe_migration_cfep9_downgrade(config_yaml, recipe_migration_cfep9): # write a downgrade migrator that lives next to the current migrator. # Only this, more recent migrator should apply. - with open(os.path.join(config_yaml, "migrations", "zlib-downgrade.yaml"), "w") as fh: - fh.write(""" + os.makedirs( + os.path.join(config_yaml, ".ci_support", "migrations"), exist_ok=True + ) + with open( + os.path.join( + config_yaml, ".ci_support", "migrations", "zlib-downgrade.yaml" + ), + "w", + ) as fh: + fh.write( + """ migration_ts: 1.0 zlib: - 999 -""") - #return recipe_migration_cfep9 +""" + ) + # return recipe_migration_cfep9 return RecipeConfigPair( str(config_yaml), _load_forge_config( - config_yaml, exclusive_config_file=os.path.join( - config_yaml, "recipe", "default_config.yaml") + config_yaml, + exclusive_config_file=os.path.join( + config_yaml, "recipe", "default_config.yaml" + ), ), ) + @pytest.fixture(scope="function") def recipe_migration_win_compiled(config_yaml, py_recipe): - with open(os.path.join(config_yaml, "migrations", "vc-migrate.yaml"), "w") as fh: - fh.write(dedent(""" + os.makedirs( + os.path.join(config_yaml, ".ci_support", "migrations"), exist_ok=True + ) + with open( + os.path.join( + config_yaml, ".ci_support", "migrations", "vc-migrate.yaml" + ), + "w", + ) as fh: + fh.write( + dedent( + """ migration_ts: 1.0 c_compiler: # [win] - vs2008 # [win] @@ -237,12 +322,16 @@ def recipe_migration_win_compiled(config_yaml, py_recipe): - c_compiler # [win] - cxx_compiler # [win] - vc # [win] - """)) + """ + ) + ) return RecipeConfigPair( str(config_yaml), _load_forge_config( - config_yaml, exclusive_config_file=os.path.join( - config_yaml, "recipe", "default_config.yaml") + config_yaml, + exclusive_config_file=os.path.join( + config_yaml, "recipe", "default_config.yaml" + ), ), ) @@ -269,8 +358,10 @@ def skipped_recipe(config_yaml, request): return RecipeConfigPair( str(config_yaml), _load_forge_config( - config_yaml, exclusive_config_file=os.path.join( - config_yaml, "recipe", "default_config.yaml") + config_yaml, + exclusive_config_file=os.path.join( + config_yaml, "recipe", "default_config.yaml" + ), ), ) @@ -297,8 +388,10 @@ def python_skipped_recipe(config_yaml, request): return RecipeConfigPair( str(config_yaml), _load_forge_config( - config_yaml, exclusive_config_file=os.path.join( - config_yaml, "recipe", "default_config.yaml") + config_yaml, + exclusive_config_file=os.path.join( + config_yaml, "recipe", "default_config.yaml" + ), ), ) @@ -323,8 +416,48 @@ def linux_skipped_recipe(config_yaml, request): return RecipeConfigPair( str(config_yaml), _load_forge_config( - config_yaml, exclusive_config_file=os.path.join( - config_yaml, "recipe", "default_config.yaml") + config_yaml, + exclusive_config_file=os.path.join( + config_yaml, "recipe", "default_config.yaml" + ), + ), + ) + + +@pytest.fixture(scope="function") +def render_skipped_recipe(config_yaml, request): + with open(os.path.join(config_yaml, "recipe", "meta.yaml"), "w") as fh: + fh.write( + """ +package: + name: python-noarch-test + version: 1.0.0 +build: + noarch: python +requirements: + build: + - python + run: + - python + """ + ) + with open(os.path.join(config_yaml, "conda-forge.yml"), "a+") as fh: + fh.write( + """ +skip_render: + - .gitignore + - .gitattributes + - README.md + - LICENSE.txt + """ + ) + return RecipeConfigPair( + str(config_yaml), + _load_forge_config( + config_yaml, + exclusive_config_file=os.path.join( + config_yaml, "recipe", "default_config.yaml" + ), ), ) diff --git a/tests/recipes/cuda_docker_images/conda_build_config.yaml b/tests/recipes/cuda_docker_images/conda_build_config.yaml new file mode 100644 index 000000000..20010c163 --- /dev/null +++ b/tests/recipes/cuda_docker_images/conda_build_config.yaml @@ -0,0 +1,18 @@ +cuda_compiler: # [linux64] + - nvcc # [linux64] +cuda_compiler_version: # [linux64] + - None # [linux64] + - 9.2 # [linux64] + - 10.0 # [linux64] + - 10.1 # [linux64] + +docker_image: # [linux64] + - condaforge/linux-anvil-comp7 # [linux64] + - condaforge/linux-anvil-cuda:9.2 # [linux64] + - condaforge/linux-anvil-cuda:10.0 # [linux64] + - condaforge/linux-anvil-cuda:10.1 # [linux64] + +zip_keys: # [linux64] + - # [linux64] + - cuda_compiler_version # [linux64] + - docker_image # [linux64] diff --git a/tests/recipes/cuda_docker_images/meta.yaml b/tests/recipes/cuda_docker_images/meta.yaml new file mode 100644 index 000000000..f0b14319b --- /dev/null +++ b/tests/recipes/cuda_docker_images/meta.yaml @@ -0,0 +1,24 @@ +package: + name: test_cuda_docker_images + version: 1.0 + +build: + number: 0 + skip: true # [not linux64] + +requirements: + build: + - {{ compiler("cuda") }} # [cuda_compiler_version != "None"] + +test: + commands: + - echo 'works' + +about: + home: home + summary: summary + license: Creative Commons + +extra: + recipe-maintainers: + - gopher diff --git a/tests/recipes/multiple_docker_images/conda_build_config.yaml b/tests/recipes/multiple_docker_images/conda_build_config.yaml new file mode 100644 index 000000000..188fd2ef0 --- /dev/null +++ b/tests/recipes/multiple_docker_images/conda_build_config.yaml @@ -0,0 +1,4 @@ +docker_image: # [linux64] + - pickme_a # [linux64] + - pickme_b # [linux64] + - pickme_c # [linux64] diff --git a/tests/recipes/multiple_docker_images/meta.yaml b/tests/recipes/multiple_docker_images/meta.yaml new file mode 100644 index 000000000..afbf4f8cc --- /dev/null +++ b/tests/recipes/multiple_docker_images/meta.yaml @@ -0,0 +1,24 @@ +package: + name: test_multiple_docker_images + version: 1.0 + +build: + number: 0 + skip: true # [not linux64] + +requirements: + build: + - make + +test: + commands: + - echo 'works' + +about: + home: home + summary: summary + license: Creative Commons + +extra: + recipe-maintainers: + - gopher diff --git a/tests/test_cli.py b/tests/test_cli.py index dc59f6a0e..2dcfbe6cd 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -16,7 +16,13 @@ RegenerateArgs = collections.namedtuple( "ArgsObject", - ("commit", "feedstock_directory", "no_check_uptodate", "exclusive_config_file", "check"), + ( + "commit", + "feedstock_directory", + "no_check_uptodate", + "exclusive_config_file", + "check", + ), ) @@ -44,7 +50,9 @@ def test_init_multiple_output_matrix(testing_workdir): init_obj = cli.Init(subparser) regen_obj = cli.Regenerate(subparser) recipe = os.path.join(_thisdir, "recipes", "multiple_outputs") - feedstock_dir = os.path.join(testing_workdir, "multiple-outputs-test-feedstock") + feedstock_dir = os.path.join( + testing_workdir, "multiple-outputs-test-feedstock" + ) args = InitArgs(recipe_directory=recipe, feedstock_directory=feedstock_dir) init_obj(args) # Ignore conda-forge-pinning for this test, as the test relies on conda-forge-pinning @@ -54,7 +62,7 @@ def test_init_multiple_output_matrix(testing_workdir): commit=False, no_check_uptodate=True, exclusive_config_file="recipe/conda_build_config.yaml", - check=False + check=False, ) regen_obj(args) matrix_dir = os.path.join(feedstock_dir, ".ci_support") @@ -81,6 +89,83 @@ def test_init_multiple_output_matrix(testing_workdir): assert "zlib" not in config +def test_init_cuda_docker_images(testing_workdir): + parser = argparse.ArgumentParser() + subparser = parser.add_subparsers() + init_obj = cli.Init(subparser) + regen_obj = cli.Regenerate(subparser) + recipe = os.path.join(_thisdir, "recipes", "cuda_docker_images") + feedstock_dir = os.path.join( + testing_workdir, "cuda_docker_images-feedstock" + ) + args = InitArgs(recipe_directory=recipe, feedstock_directory=feedstock_dir) + init_obj(args) + # Ignore conda-forge-pinning for this test, as the test relies on + # conda-forge-pinning not being present + args = RegenerateArgs( + feedstock_directory=feedstock_dir, + commit=False, + no_check_uptodate=True, + exclusive_config_file="recipe/conda_build_config.yaml", + check=False, + ) + regen_obj(args) + matrix_dir = os.path.join(feedstock_dir, ".ci_support") + # the matrix should be consolidated among all outputs, as well as the + # top-level reqs. Only the top-level reqs should have indedependent config + # files, though - loops within outputs are contained in those top-level + # configs. + matrix_dir_len = len(os.listdir(matrix_dir)) + assert matrix_dir_len == 5 + for v in [None, "9.2", "10.0", "10.1"]: + fn = os.path.join(matrix_dir, f"linux_cuda_compiler_version{v}.yaml") + assert os.path.isfile(fn) + with open(fn) as fh: + config = yaml.load(fh) + assert config["cuda_compiler"] == ["nvcc"] + assert config["cuda_compiler_version"] == [f"{v}"] + if v is None: + docker_image = "condaforge/linux-anvil-comp7" + else: + docker_image = f"condaforge/linux-anvil-cuda:{v}" + assert config["docker_image"] == [docker_image] + + +def test_init_multiple_docker_images(testing_workdir): + parser = argparse.ArgumentParser() + subparser = parser.add_subparsers() + init_obj = cli.Init(subparser) + regen_obj = cli.Regenerate(subparser) + recipe = os.path.join(_thisdir, "recipes", "multiple_docker_images") + feedstock_dir = os.path.join( + testing_workdir, "multiple_docker_images-feedstock" + ) + args = InitArgs(recipe_directory=recipe, feedstock_directory=feedstock_dir) + init_obj(args) + # Ignore conda-forge-pinning for this test, as the test relies on + # conda-forge-pinning not being present + args = RegenerateArgs( + feedstock_directory=feedstock_dir, + commit=False, + no_check_uptodate=True, + exclusive_config_file="recipe/conda_build_config.yaml", + check=False, + ) + regen_obj(args) + matrix_dir = os.path.join(feedstock_dir, ".ci_support") + # the matrix should be consolidated among all outputs, as well as the + # top-level reqs. Only the top-level reqs should have indedependent config + # files, though - loops within outputs are contained in those top-level + # configs. + matrix_dir_len = len(os.listdir(matrix_dir)) + assert matrix_dir_len == 2 + fn = os.path.join(matrix_dir, "linux_.yaml") + assert os.path.isfile(fn) + with open(fn) as fh: + config = yaml.load(fh) + assert config["docker_image"] == ["pickme_a"] + + def test_regenerate(py_recipe, testing_workdir): parser = argparse.ArgumentParser() subparser = parser.add_subparsers() @@ -103,8 +188,10 @@ def test_regenerate(py_recipe, testing_workdir): feedstock_directory=dest_dir, commit=False, no_check_uptodate=True, - exclusive_config_file=os.path.join(recipe, "recipe", "default_config.yaml"), - check=False + exclusive_config_file=os.path.join( + recipe, "recipe", "default_config.yaml" + ), + check=False, ) regen_obj(args) @@ -117,8 +204,10 @@ def test_regenerate(py_recipe, testing_workdir): feedstock_directory=dest_dir, commit=False, no_check_uptodate=True, - exclusive_config_file=os.path.join(recipe, "recipe", "short_config.yaml"), - check=False + exclusive_config_file=os.path.join( + recipe, "recipe", "short_config.yaml" + ), + check=False, ) regen_obj(args) diff --git a/tests/test_configure_feedstock.py b/tests/test_configure_feedstock.py index 4cab4c386..99be86a70 100644 --- a/tests/test_configure_feedstock.py +++ b/tests/test_configure_feedstock.py @@ -7,6 +7,7 @@ def test_noarch_skips_appveyor(noarch_recipe, jinja_env): + noarch_recipe.config["provider"]["win"] = "appveyor" cnfgr_fdstk.render_appveyor( jinja_env=jinja_env, forge_config=noarch_recipe.config, @@ -14,8 +15,6 @@ def test_noarch_skips_appveyor(noarch_recipe, jinja_env): ) # this configuration should be skipped assert not noarch_recipe.config["appveyor"]["enabled"] - # no appveyor.yaml should have been written. Nothing else, either, since we only ran - # appveyor render. No matrix dir should exist. assert not os.path.isdir(os.path.join(noarch_recipe.recipe, ".ci_support")) @@ -27,8 +26,6 @@ def test_noarch_skips_travis(noarch_recipe, jinja_env): ) # this configuration should be skipped assert not noarch_recipe.config["travis"]["enabled"] - # no appveyor.yaml should have been written. Nothing else, either, since we only ran - # appveyor render. No matrix dir should exist. assert not os.path.isdir(os.path.join(noarch_recipe.recipe, ".ci_support")) @@ -44,8 +41,6 @@ def test_noarch_runs_on_circle(noarch_recipe, jinja_env): # this configuration should be run assert noarch_recipe.config["circle"]["enabled"] - # no appveyor.yaml should have been written. Nothing else, either, since we only ran - # appveyor render. No matrix dir should exist. matrix_dir = os.path.join(noarch_recipe.recipe, ".ci_support") assert os.path.isdir(matrix_dir) # single matrix entry - readme is generated later in main function @@ -60,8 +55,6 @@ def test_noarch_runs_on_azure(noarch_recipe, jinja_env): ) # this configuration should be run assert noarch_recipe.config["azure"]["enabled"] - # no appveyor.yaml should have been written. Nothing else, either, since we only ran - # appveyor render. No matrix dir should exist. matrix_dir = os.path.join(noarch_recipe.recipe, ".ci_support") assert os.path.isdir(matrix_dir) # single matrix entry - readme is generated later in main function @@ -69,13 +62,14 @@ def test_noarch_runs_on_azure(noarch_recipe, jinja_env): def test_r_skips_appveyor(r_recipe, jinja_env): + r_recipe.config["provider"]["win"] = "appveyor" cnfgr_fdstk.render_appveyor( - jinja_env=jinja_env, forge_config=r_recipe.config, forge_dir=r_recipe.recipe + jinja_env=jinja_env, + forge_config=r_recipe.config, + forge_dir=r_recipe.recipe, ) # this configuration should be skipped assert not r_recipe.config["appveyor"]["enabled"] - # no appveyor.yaml should have been written. Nothing else, either, since we only ran - # appveyor render. No matrix dir should exist. assert not os.path.isdir(os.path.join(r_recipe.recipe, ".ci_support")) @@ -84,12 +78,12 @@ def test_r_matrix_travis(r_recipe, jinja_env): r_recipe.config["provider"]["osx"] = "travis" cnfgr_fdstk.render_travis( - jinja_env=jinja_env, forge_config=r_recipe.config, forge_dir=r_recipe.recipe + jinja_env=jinja_env, + forge_config=r_recipe.config, + forge_dir=r_recipe.recipe, ) # this configuration should be run assert r_recipe.config["travis"]["enabled"] - # no appveyor.yaml should have been written. Nothing else, either, since we only ran - # appveyor render. No matrix dir should exist. matrix_dir = os.path.join(r_recipe.recipe, ".ci_support") assert os.path.isdir(matrix_dir) # single matrix entry - readme is generated later in main function @@ -101,12 +95,12 @@ def test_r_matrix_on_circle(r_recipe, jinja_env): r_recipe.config["provider"]["linux"] = "circle" cnfgr_fdstk.render_circle( - jinja_env=jinja_env, forge_config=r_recipe.config, forge_dir=r_recipe.recipe + jinja_env=jinja_env, + forge_config=r_recipe.config, + forge_dir=r_recipe.recipe, ) # this configuration should be run assert r_recipe.config["circle"]["enabled"] - # no appveyor.yaml should have been written. Nothing else, either, since we only ran - # appveyor render. No matrix dir should exist. matrix_dir = os.path.join(r_recipe.recipe, ".ci_support") assert os.path.isdir(matrix_dir) # single matrix entry - readme is generated later in main function @@ -115,12 +109,12 @@ def test_r_matrix_on_circle(r_recipe, jinja_env): def test_r_matrix_azure(r_recipe, jinja_env): cnfgr_fdstk.render_azure( - jinja_env=jinja_env, forge_config=r_recipe.config, forge_dir=r_recipe.recipe + jinja_env=jinja_env, + forge_config=r_recipe.config, + forge_dir=r_recipe.recipe, ) # this configuration should be run assert r_recipe.config["azure"]["enabled"] - # no appveyor.yaml should have been written. Nothing else, either, since we only ran - # appveyor render. No matrix dir should exist. matrix_dir = os.path.join(r_recipe.recipe, ".ci_support") assert os.path.isdir(matrix_dir) # single matrix entry - readme is generated later in main function @@ -128,8 +122,11 @@ def test_r_matrix_azure(r_recipe, jinja_env): def test_py_matrix_appveyor(py_recipe, jinja_env): + py_recipe.config["provider"]["win"] = "appveyor" cnfgr_fdstk.render_appveyor( - jinja_env=jinja_env, forge_config=py_recipe.config, forge_dir=py_recipe.recipe + jinja_env=jinja_env, + forge_config=py_recipe.config, + forge_dir=py_recipe.recipe, ) # this configuration should be skipped assert py_recipe.config["appveyor"]["enabled"] @@ -145,7 +142,9 @@ def test_py_matrix_travis(py_recipe, jinja_env): py_recipe.config["provider"]["osx"] = "travis" cnfgr_fdstk.render_travis( - jinja_env=jinja_env, forge_config=py_recipe.config, forge_dir=py_recipe.recipe + jinja_env=jinja_env, + forge_config=py_recipe.config, + forge_dir=py_recipe.recipe, ) # this configuration should be run assert py_recipe.config["travis"]["enabled"] @@ -160,12 +159,12 @@ def test_py_matrix_on_circle(py_recipe, jinja_env): py_recipe.config["provider"]["linux"] = "circle" cnfgr_fdstk.render_circle( - jinja_env=jinja_env, forge_config=py_recipe.config, forge_dir=py_recipe.recipe + jinja_env=jinja_env, + forge_config=py_recipe.config, + forge_dir=py_recipe.recipe, ) # this configuration should be run assert py_recipe.config["circle"]["enabled"] - # no appveyor.yaml should have been written. Nothing else, either, since we only ran - # appveyor render. No matrix dir should exist. matrix_dir = os.path.join(py_recipe.recipe, ".ci_support") assert os.path.isdir(matrix_dir) # single matrix entry - readme is generated later in main function @@ -174,25 +173,109 @@ def test_py_matrix_on_circle(py_recipe, jinja_env): def test_py_matrix_on_azure(py_recipe, jinja_env): cnfgr_fdstk.render_azure( - jinja_env=jinja_env, forge_config=py_recipe.config, forge_dir=py_recipe.recipe + jinja_env=jinja_env, + forge_config=py_recipe.config, + forge_dir=py_recipe.recipe, ) # this configuration should be run assert py_recipe.config["azure"]["enabled"] - # no appveyor.yaml should have been written. Nothing else, either, since we only ran - # appveyor render. No matrix dir should exist. matrix_dir = os.path.join(py_recipe.recipe, ".ci_support") assert os.path.isdir(matrix_dir) # single matrix entry - readme is generated later in main function assert len(os.listdir(matrix_dir)) == 8 +def test_upload_on_branch_azure(upload_on_branch_recipe, jinja_env): + cnfgr_fdstk.render_azure( + jinja_env=jinja_env, + forge_config=upload_on_branch_recipe.config, + forge_dir=upload_on_branch_recipe.recipe, + ) + # Check that the parameter is in the configuration. + assert "upload_on_branch" in upload_on_branch_recipe.config + assert upload_on_branch_recipe.config["upload_on_branch"] == "foo-branch" + # Check that the parameter is in the generated file. + with open( + os.path.join( + upload_on_branch_recipe.recipe, + ".azure-pipelines", + "azure-pipelines-osx.yml", + ) + ) as fp: + content_osx = yaml.load(fp) + assert ( + 'UPLOAD_ON_BRANCH="foo-branch"' + in content_osx["jobs"][0]["steps"][-1]["script"] + ) + assert ( + "BUILD_SOURCEBRANCHNAME" + in content_osx["jobs"][0]["steps"][-1]["script"] + ) + + with open( + os.path.join( + upload_on_branch_recipe.recipe, + ".azure-pipelines", + "azure-pipelines-win.yml", + ) + ) as fp: + content_win = yaml.load(fp) + assert ( + "UPLOAD_ON_BRANCH=foo-branch" + in content_win["jobs"][0]["steps"][-1]["script"] + ) + assert ( + "BUILD_SOURCEBRANCHNAME" + in content_win["jobs"][0]["steps"][-1]["script"] + ) + + with open( + os.path.join( + upload_on_branch_recipe.recipe, + ".azure-pipelines", + "azure-pipelines-linux.yml", + ) + ) as fp: + content_lin = yaml.load(fp) + assert ( + 'UPLOAD_ON_BRANCH="foo-branch"' + in content_lin["jobs"][0]["steps"][1]["script"] + ) + assert ( + "BUILD_SOURCEBRANCHNAME" + in content_lin["jobs"][0]["steps"][1]["script"] + ) + + +def test_upload_on_branch_appveyor(upload_on_branch_recipe, jinja_env): + upload_on_branch_recipe.config["provider"]["win"] = "appveyor" + cnfgr_fdstk.render_appveyor( + jinja_env=jinja_env, + forge_config=upload_on_branch_recipe.config, + forge_dir=upload_on_branch_recipe.recipe, + ) + # Check that the parameter is in the configuration. + assert "upload_on_branch" in upload_on_branch_recipe.config + assert upload_on_branch_recipe.config["upload_on_branch"] == "foo-branch" + + # Check that the parameter is in the generated file. + with open( + os.path.join(upload_on_branch_recipe.recipe, ".appveyor.yml") + ) as fp: + content = yaml.load(fp) + assert "%APPVEYOR_REPO_BRANCH%" in content["deploy_script"][0] + assert "UPLOAD_ON_BRANCH=foo-branch" in content["deploy_script"][1] + + def test_circle_with_yum_reqs(py_recipe, jinja_env): with open( os.path.join(py_recipe.recipe, "recipe", "yum_requirements.txt"), "w" ) as f: f.write("nano\n") cnfgr_fdstk.render_circle( - jinja_env=jinja_env, forge_config=py_recipe.config, forge_dir=py_recipe.recipe + jinja_env=jinja_env, + forge_config=py_recipe.config, + forge_dir=py_recipe.recipe, ) @@ -235,7 +318,9 @@ def test_circle_osx(py_recipe, jinja_env): forge_dir = py_recipe.recipe travis_yml_file = os.path.join(forge_dir, ".travis.yml") circle_osx_file = os.path.join(forge_dir, ".circleci", "run_osx_build.sh") - circle_linux_file = os.path.join(forge_dir, ".circleci", "run_docker_build.sh") + circle_linux_file = os.path.join( + forge_dir, ".scripts", "run_docker_build.sh" + ) circle_config_file = os.path.join(forge_dir, ".circleci", "config.yml") cnfgr_fdstk.render_circle( @@ -276,10 +361,13 @@ def test_circle_osx(py_recipe, jinja_env): def test_circle_skipped(linux_skipped_recipe, jinja_env): forge_dir = linux_skipped_recipe.recipe circle_osx_file = os.path.join(forge_dir, ".circleci", "run_osx_build.sh") - circle_linux_file = os.path.join(forge_dir, ".circleci", "run_docker_build.sh") + circle_linux_file = os.path.join( + forge_dir, ".scripts", "run_docker_build.sh" + ) circle_config_file = os.path.join(forge_dir, ".circleci", "config.yml") - cnfgr_fdstk.copy_feedstock_content(forge_dir) + config = copy.deepcopy(linux_skipped_recipe.config) + cnfgr_fdstk.copy_feedstock_content(config, forge_dir) cnfgr_fdstk.render_circle( jinja_env=jinja_env, forge_config=linux_skipped_recipe.config, @@ -289,10 +377,9 @@ def test_circle_skipped(linux_skipped_recipe, jinja_env): assert not os.path.exists(circle_linux_file) assert os.path.exists(circle_config_file) - config = copy.deepcopy(linux_skipped_recipe.config) config["provider"]["osx"] = "circle" - cnfgr_fdstk.copy_feedstock_content(forge_dir) + cnfgr_fdstk.copy_feedstock_content(config, forge_dir) cnfgr_fdstk.render_circle( jinja_env=jinja_env, forge_config=config, forge_dir=forge_dir ) @@ -311,11 +398,14 @@ def test_render_with_all_skipped_generates_readme(skipped_recipe, jinja_env): def test_render_windows_with_skipped_python(python_skipped_recipe, jinja_env): config = python_skipped_recipe.config + config["provider"]["win"] = "appveyor" config["exclusive_config_file"] = os.path.join( python_skipped_recipe.recipe, "recipe", "long_config.yaml" ) cnfgr_fdstk.render_appveyor( - jinja_env=jinja_env, forge_config=config, forge_dir=python_skipped_recipe.recipe + jinja_env=jinja_env, + forge_config=config, + forge_dir=python_skipped_recipe.recipe, ) # this configuration should be skipped assert python_skipped_recipe.config["appveyor"]["enabled"] @@ -347,14 +437,18 @@ def test_migrator_recipe(recipe_migration_cfep9, jinja_env): with open( os.path.join( - recipe_migration_cfep9.recipe, ".ci_support", "linux_python2.7.yaml" + recipe_migration_cfep9.recipe, + ".ci_support", + "linux_python2.7.yaml", ) ) as fo: variant = yaml.safe_load(fo) assert variant["zlib"] == ["1000"] -def test_migrator_downgrade_recipe(recipe_migration_cfep9_downgrade, jinja_env): +def test_migrator_downgrade_recipe( + recipe_migration_cfep9_downgrade, jinja_env +): """ Assert that even when we have two migrations targeting the same file the correct one wins. """ @@ -363,18 +457,33 @@ def test_migrator_downgrade_recipe(recipe_migration_cfep9_downgrade, jinja_env): forge_config=recipe_migration_cfep9_downgrade.config, forge_dir=recipe_migration_cfep9_downgrade.recipe, ) - assert len(os.listdir(os.path.join(recipe_migration_cfep9_downgrade.recipe, 'migrations'))) == 2 + assert ( + len( + os.listdir( + os.path.join( + recipe_migration_cfep9_downgrade.recipe, + ".ci_support", + "migrations", + ) + ) + ) + == 2 + ) with open( os.path.join( - recipe_migration_cfep9_downgrade.recipe, ".ci_support", "linux_python2.7.yaml" + recipe_migration_cfep9_downgrade.recipe, + ".ci_support", + "linux_python2.7.yaml", ) ) as fo: variant = yaml.safe_load(fo) assert variant["zlib"] == ["1000"] -def test_migrator_compiler_version_recipe(recipe_migration_win_compiled, jinja_env): +def test_migrator_compiler_version_recipe( + recipe_migration_win_compiled, jinja_env +): """ Assert that even when we have two migrations targeting the same file the correct one wins. """ @@ -383,11 +492,56 @@ def test_migrator_compiler_version_recipe(recipe_migration_win_compiled, jinja_e forge_config=recipe_migration_win_compiled.config, forge_dir=recipe_migration_win_compiled.recipe, ) - assert len(os.listdir(os.path.join(recipe_migration_win_compiled.recipe, 'migrations'))) == 1 + assert ( + len( + os.listdir( + os.path.join( + recipe_migration_win_compiled.recipe, + ".ci_support", + "migrations", + ) + ) + ) + == 1 + ) + + rendered_variants = os.listdir( + os.path.join(recipe_migration_win_compiled.recipe, ".ci_support") + ) + + assert ( + "win_c_compilervs2008python2.7target_platformwin-32.yaml" + in rendered_variants + ) + assert ( + "win_c_compilervs2008python2.7target_platformwin-64.yaml" + in rendered_variants + ) + assert ( + "win_c_compilervs2017python3.5target_platformwin-32.yaml" + in rendered_variants + ) + assert ( + "win_c_compilervs2017python3.5target_platformwin-64.yaml" + in rendered_variants + ) - rendered_variants = os.listdir(os.path.join(recipe_migration_win_compiled.recipe, ".ci_support")) - - assert 'win_c_compilervs2008python2.7target_platformwin-32.yaml' in rendered_variants - assert 'win_c_compilervs2008python2.7target_platformwin-64.yaml' in rendered_variants - assert 'win_c_compilervs2017python3.5target_platformwin-32.yaml' in rendered_variants - assert 'win_c_compilervs2017python3.5target_platformwin-64.yaml' in rendered_variants + +def test_files_skip_render(render_skipped_recipe, jinja_env): + cnfgr_fdstk.render_README( + jinja_env=jinja_env, + forge_config=render_skipped_recipe.config, + forge_dir=render_skipped_recipe.recipe, + ) + cnfgr_fdstk.copy_feedstock_content( + render_skipped_recipe.config, render_skipped_recipe.recipe + ) + skipped_files = [ + ".gitignore", + ".gitattributes", + "README.md", + "LICENSE.txt", + ] + for f in skipped_files: + fpath = os.path.join(render_skipped_recipe.recipe, f) + assert not os.path.exists(fpath) diff --git a/tests/test_feedstock_io.py b/tests/test_feedstock_io.py index 6ae3d501e..08c394944 100644 --- a/tests/test_feedstock_io.py +++ b/tests/test_feedstock_io.py @@ -1,5 +1,3 @@ -from __future__ import unicode_literals - import functools import io import operator as op @@ -26,7 +24,10 @@ def parameterize(): lambda pth, tmp_dir: os.path.relpath(pth, tmp_dir), lambda pth, tmp_dir: pth, ]: - for get_repo in [lambda tmp_dir: None, lambda tmp_dir: git.Repo.init(tmp_dir)]: + for get_repo in [ + lambda tmp_dir: None, + lambda tmp_dir: git.Repo.init(tmp_dir), + ]: try: tmp_dir = tempfile.mkdtemp() keep_dir(tmp_dir) @@ -34,7 +35,11 @@ def parameterize(): old_dir = os.getcwd() os.chdir(tmp_dir) - yield (tmp_dir, get_repo(tmp_dir), lambda pth: pathfunc(pth, tmp_dir)) + yield ( + tmp_dir, + get_repo(tmp_dir), + lambda pth: pathfunc(pth, tmp_dir), + ) finally: os.chdir(old_dir) shutil.rmtree(tmp_dir) @@ -57,7 +62,9 @@ def test_repo(self): if repo is None: self.assertTrue(fio.get_repo(pathfunc(tmp_dir)) is None) else: - self.assertIsInstance(fio.get_repo(pathfunc(tmp_dir)), git.Repo) + self.assertIsInstance( + fio.get_repo(pathfunc(tmp_dir)), git.Repo + ) def test_set_exe_file(self): perms = [stat.S_IXUSR, stat.S_IXGRP, stat.S_IXOTH] @@ -68,7 +75,9 @@ def test_set_exe_file(self): for tmp_dir, repo, pathfunc in parameterize(): filename = "test.txt" filename = os.path.join(tmp_dir, filename) - with io.open(filename, "w", encoding="utf-8", newline="\n") as fh: + with io.open( + filename, "w", encoding="utf-8", newline="\n" + ) as fh: fh.write("") if repo is not None: repo.index.add([filename]) @@ -79,7 +88,9 @@ def test_set_exe_file(self): self.assertEqual(file_mode & set_mode, int(set_exe) * set_mode) if repo is not None: blob = next(repo.index.iter_blobs(BlobFilter(filename)))[1] - self.assertEqual(blob.mode & set_mode, int(set_exe) * set_mode) + self.assertEqual( + blob.mode & set_mode, int(set_exe) * set_mode + ) def test_write_file(self): for tmp_dir, repo, pathfunc in parameterize(): @@ -133,7 +144,9 @@ def test_remove_file(self): filename = os.path.join(tmp_dir, filename) - with io.open(filename, "w", encoding="utf-8", newline="\n") as fh: + with io.open( + filename, "w", encoding="utf-8", newline="\n" + ) as fh: fh.write("") if repo is not None: repo.index.add([filename]) @@ -143,7 +156,9 @@ def test_remove_file(self): self.assertTrue(os.path.exists(dirname)) self.assertTrue(os.path.exists(os.path.dirname(dirname))) if repo is not None: - self.assertTrue(list(repo.index.iter_blobs(BlobFilter(filename)))) + self.assertTrue( + list(repo.index.iter_blobs(BlobFilter(filename))) + ) fio.remove_file(pathfunc(filename)) @@ -152,7 +167,9 @@ def test_remove_file(self): self.assertFalse(os.path.exists(dirname)) self.assertFalse(os.path.exists(os.path.dirname(dirname))) if repo is not None: - self.assertFalse(list(repo.index.iter_blobs(BlobFilter(filename)))) + self.assertFalse( + list(repo.index.iter_blobs(BlobFilter(filename))) + ) def test_copy_file(self): for tmp_dir, repo, pathfunc in parameterize(): @@ -169,14 +186,18 @@ def test_copy_file(self): self.assertTrue(os.path.exists(filename1)) self.assertFalse(os.path.exists(filename2)) if repo is not None: - self.assertFalse(list(repo.index.iter_blobs(BlobFilter(filename2)))) + self.assertFalse( + list(repo.index.iter_blobs(BlobFilter(filename2))) + ) fio.copy_file(pathfunc(filename1), pathfunc(filename2)) self.assertTrue(os.path.exists(filename1)) self.assertTrue(os.path.exists(filename2)) if repo is not None: - self.assertTrue(list(repo.index.iter_blobs(BlobFilter(filename2)))) + self.assertTrue( + list(repo.index.iter_blobs(BlobFilter(filename2))) + ) read_text = "" with io.open(filename2, "r", encoding="utf-8") as fh: diff --git a/tests/test_lint_recipe.py b/tests/test_lint_recipe.py index 7a9a3325f..5f8065e28 100644 --- a/tests/test_lint_recipe.py +++ b/tests/test_lint_recipe.py @@ -1,6 +1,5 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- -from __future__ import print_function, unicode_literals from collections import OrderedDict from contextlib import contextmanager import io @@ -116,12 +115,15 @@ def test_maintainers_section(self): self.assertNotIn(expected_message, lints) expected_message = ( - 'The "extra" section was expected to be a ' "dictionary, but got a list." + 'The "extra" section was expected to be a ' + "dictionary, but got a list." ) lints, hints = linter.lintify({"extra": ["recipe-maintainers"]}) self.assertIn(expected_message, lints) - lints, hints = linter.lintify({"extra": {"recipe-maintainers": "Luke"}}) + lints, hints = linter.lintify( + {"extra": {"recipe-maintainers": "Luke"}} + ) expected_message = "Recipe maintainers should be a json list." self.assertIn(expected_message, lints) @@ -205,8 +207,9 @@ def assert_selector(selector, is_good=True): "lint for '{}'.".format(selector) ) else: - message = "Expecting lints for '{}', but didn't get any." "".format( - selector + message = ( + "Expecting lints for '{}', but didn't get any." + "".format(selector) ) self.assertEqual( not is_good, @@ -230,7 +233,8 @@ def assert_noarch_selector(meta_string, is_good=False): lints = linter.main(recipe_dir) if is_good: message = ( - "Found lints when there shouldn't have " "been a lint for '{}'." + "Found lints when there shouldn't have " + "been a lint for '{}'." ).format(meta_string) else: message = ( @@ -297,6 +301,8 @@ def assert_noarch_selector(meta_string, is_good=False): requirements: build: - python + host: # empty sections are allowed and ignored + run: # empty sections are allowed and ignored tests: commands: - cp asd qwe # [unix] @@ -325,6 +331,7 @@ def assert_noarch_selector(meta_string, is_good=False): build: noarch: python requirements: + build: # empty sections are allowed and ignored run: - python - enum34 # [py2k] @@ -374,7 +381,8 @@ def assert_noarch_hint(meta_string, is_good=False): lints, hints = linter.main(recipe_dir, return_hints=True) if is_good: message = ( - "Found hints when there shouldn't have " "been a lint for '{}'." + "Found hints when there shouldn't have " + "been a lint for '{}'." ).format(meta_string) else: message = ( @@ -391,7 +399,7 @@ def assert_noarch_hint(meta_string, is_good=False): build: noarch: python script: - - echo "hello" + - echo "hello" requirements: build: - python @@ -403,7 +411,7 @@ def assert_noarch_hint(meta_string, is_good=False): """ build: script: - - echo "hello" + - echo "hello" requirements: build: - python @@ -414,7 +422,7 @@ def assert_noarch_hint(meta_string, is_good=False): """ build: script: - - echo "hello" + - echo "hello" requirements: build: - python @@ -425,7 +433,7 @@ def assert_noarch_hint(meta_string, is_good=False): """ build: script: - - echo "hello" + - echo "hello" requirements: build: - python @@ -507,7 +515,11 @@ def test_missing_build_number(self): expected_message = "The recipe must have a `build/number` section." meta = { - "build": {"skip": "True", "script": "python setup.py install", "number": 0} + "build": { + "skip": "True", + "script": "python setup.py install", + "number": 0, + } } lints, hints = linter.lintify(meta) self.assertNotIn(expected_message, lints) @@ -550,7 +562,9 @@ def test_no_sha_with_dl(self): lints, hints = linter.lintify({"source": {"url": None, "sha1": None}}) self.assertNotIn(expected_message, lints) - lints, hints = linter.lintify({"source": {"url": None, "sha256": None}}) + lints, hints = linter.lintify( + {"source": {"url": None, "sha256": None}} + ) self.assertNotIn(expected_message, lints, hints) meta = {"source": {"url": None, "md5": None}} @@ -579,9 +593,7 @@ def test_license_file_present(self): } } lints, hints = linter.lintify(meta) - expected_message = ( - "license_file entry is missing, but is expected." - ) + expected_message = "license_file entry is missing, but is expected." self.assertNotIn(expected_message, lints) self.assertIn(expected_message, hints) @@ -660,20 +672,28 @@ def test_maintainer_exists(self): expected_message = "Feedstock with the same name exists in conda-forge" # Check that feedstock exists if staged_recipes lints = linter.lintify( - {"package": {"name": "python"}}, recipe_dir="python", conda_forge=True + {"package": {"name": "python"}}, + recipe_dir="python", + conda_forge=True, ) self.assertIn(expected_message, lints) lints = linter.lintify( - {"package": {"name": "python"}}, recipe_dir="python", conda_forge=False + {"package": {"name": "python"}}, + recipe_dir="python", + conda_forge=False, ) self.assertNotIn(expected_message, lints) # No lint if in a feedstock lints = linter.lintify( - {"package": {"name": "python"}}, recipe_dir="recipe", conda_forge=True + {"package": {"name": "python"}}, + recipe_dir="recipe", + conda_forge=True, ) self.assertNotIn(expected_message, lints) lints = linter.lintify( - {"package": {"name": "python"}}, recipe_dir="recipe", conda_forge=False + {"package": {"name": "python"}}, + recipe_dir="recipe", + conda_forge=False, ) self.assertNotIn(expected_message, lints) @@ -692,7 +712,9 @@ def test_maintainer_exists(self): ) else: lints = linter.lintify( - {"package": {"name": "python1"}}, recipe_dir="python", conda_forge=True + {"package": {"name": "python1"}}, + recipe_dir="python", + conda_forge=True, ) self.assertNotIn(expected_message, lints) @@ -727,7 +749,9 @@ def test_maintainer_exists(self): ) self.assertNotIn(expected_message, lints) lints = linter.lintify( - {"package": {"name": r}}, recipe_dir="recipe", conda_forge=False + {"package": {"name": r}}, + recipe_dir="recipe", + conda_forge=False, ) self.assertNotIn(expected_message, lints) # No lint if the name isn't specified @@ -756,13 +780,21 @@ def test_bad_subheader(self): " name." ) meta = { - "build": {"skip": "True", "script": "python setup.py install", "number": 0} + "build": { + "skip": "True", + "script": "python setup.py install", + "number": 0, + } } lints, hints = linter.lintify(meta) self.assertNotIn(expected_message.format("build", "ski"), lints) meta = { - "build": {"ski": "True", "script": "python setup.py install", "number": 0} + "build": { + "ski": "True", + "script": "python setup.py install", + "number": 0, + } } lints, hints = linter.lintify(meta) self.assertIn(expected_message.format("build", "ski"), lints) @@ -786,13 +818,18 @@ def test_version(self): self.assertNotIn(expected_message, lints) meta = {"package": {"name": "python", "version": "2.0.0~alpha0"}} - expected_message = "Package version 2.0.0~alpha0 doesn't match conda spec" + expected_message = ( + "Package version 2.0.0~alpha0 doesn't match conda spec" + ) lints, hints = linter.lintify(meta) self.assertIn(expected_message, lints) @unittest.skipUnless(is_gh_token_set(), "GH_TOKEN not set") def test_examples(self): - msg = "Please move the recipe out of the example dir and into its " "own dir." + msg = ( + "Please move the recipe out of the example dir and into its " + "own dir." + ) lints, hints = linter.lintify( {"extra": {"recipe-maintainers": ["support"]}}, recipe_dir="recipes/example/", @@ -807,7 +844,9 @@ def test_examples(self): self.assertNotIn(msg, lints) def test_multiple_sources(self): - lints = linter.main(os.path.join(_thisdir, "recipes", "multiple_sources")) + lints = linter.main( + os.path.join(_thisdir, "recipes", "multiple_sources") + ) assert not lints def test_string_source(self): @@ -819,6 +858,30 @@ def test_string_source(self): ).format(type(url).__module__, type(url).__name__) self.assertIn(msg, lints) + def test_single_space_pins(self): + meta = { + "requirements": { + "build": ["{{ compilers('c') }}", "python >=3", "pip 19"], + "host": ["python >= 2", "libcblas 3.8.* *netlib"], + "run": ["xonsh>1.0", "conda= 4.*", "conda-smithy<=54.*"], + } + } + lints, hints = linter.lintify(meta) + filtered_lints = [ + lint for lint in lints if lint.startswith("``requirements: ") + ] + expected_messages = [ + "``requirements: host: python >= 2`` should not contain a space between " + "relational operator and the version, i.e. ``python >=2``", + "``requirements: run: xonsh>1.0`` must contain a space between the " + "name and the pin, i.e. ``xonsh >1.0``", + "``requirements: run: conda= 4.*`` must contain a space between the " + "name and the pin, i.e. ``conda =4.*``", + "``requirements: run: conda-smithy<=54.*`` must contain a space " + "between the name and the pin, i.e. ``conda-smithy <=54.*``", + ] + self.assertEqual(expected_messages, filtered_lints) + @pytest.mark.cli class TestCLI_recipe_lint(unittest.TestCase): @@ -836,7 +899,8 @@ def test_cli_fail(self): ) ) child = subprocess.Popen( - ["conda-smithy", "recipe-lint", recipe_dir], stdout=subprocess.PIPE + ["conda-smithy", "recipe-lint", recipe_dir], + stdout=subprocess.PIPE, ) out, _ = child.communicate() self.assertEqual(child.returncode, 1, out) @@ -866,7 +930,8 @@ def test_cli_success(self): ) ) child = subprocess.Popen( - ["conda-smithy", "recipe-lint", recipe_dir], stdout=subprocess.PIPE + ["conda-smithy", "recipe-lint", recipe_dir], + stdout=subprocess.PIPE, ) out, _ = child.communicate() self.assertEqual(child.returncode, 0, out) @@ -898,7 +963,8 @@ def test_cli_environ(self): ) ) child = subprocess.Popen( - ["conda-smithy", "recipe-lint", recipe_dir], stdout=subprocess.PIPE + ["conda-smithy", "recipe-lint", recipe_dir], + stdout=subprocess.PIPE, ) out, _ = child.communicate() self.assertEqual(child.returncode, 0, out) @@ -955,8 +1021,9 @@ def assert_jinja(jinja_var, is_good=True): "lint for '{}'.".format(jinja_var) ) else: - message = "Expecting lints for '{}', but didn't get any." "".format( - jinja_var + message = ( + "Expecting lints for '{}', but didn't get any." + "".format(jinja_var) ) self.assertEqual( not is_good, diff --git a/tests/test_variant_algebra.py b/tests/test_variant_algebra.py index a106da9d6..80991ef1e 100644 --- a/tests/test_variant_algebra.py +++ b/tests/test_variant_algebra.py @@ -96,6 +96,96 @@ def test_ordering(): # raise Exception() +def test_no_ordering(): + start = parse_variant( + dedent( + """\ + xyz: + - 1 + """ + ) + ) + + mig_compiler = parse_variant( + dedent( + """\ + __migrator: + kind: + version + migration_no: + 1 + xyz: + - 2 + """ + ) + ) + + res = variant_add(start, mig_compiler) + assert res["xyz"] == ["2"] + print(res) + # raise Exception() + + +def test_ordering_downgrade(): + start = parse_variant( + dedent( + """\ + jpeg: + - 3.0 + """ + ) + ) + + mig_compiler = parse_variant( + dedent( + """\ + __migrator: + ordering: + jpeg: + - 3.0 + - 2.0 + jpeg: + - 2.0 + """ + ) + ) + + res = variant_add(start, mig_compiler) + assert res["jpeg"] == ["2.0"] + print(res) + + +def test_new_pinned_package(): + start = parse_variant( + dedent( + """\ + pin_run_as_build: + jpeg: + max_pin: x + jpeg: + - 3.0 + """ + ) + ) + + mig_compiler = parse_variant( + dedent( + """\ + pin_run_as_build: + gprc-cpp: + max_pin: x.x + gprc-cpp: + - 1.23 + """ + ) + ) + + res = variant_add(start, mig_compiler) + assert res["gprc-cpp"] == ["1.23"] + assert res["pin_run_as_build"]["gprc-cpp"]["max_pin"] == "x.x" + print(res) + + def test_zip_keys(): start = parse_variant( dedent( @@ -134,8 +224,9 @@ def test_zip_keys(): def test_migrate_windows_compilers(): - start = parse_variant(dedent( - """ + start = parse_variant( + dedent( + """ c_compiler: - vs2008 - vs2015 @@ -146,10 +237,12 @@ def test_migrate_windows_compilers(): - - vc - c_compiler """ - )) + ) + ) - mig = parse_variant(dedent( - """ + mig = parse_variant( + dedent( + """ c_compiler: - vs2008 - vs2017 @@ -157,14 +250,15 @@ def test_migrate_windows_compilers(): - '9' - '14.1' """ - )) + ) + ) res = variant_add(start, mig) print(res) assert len(res["c_compiler"]) == 2 - assert res["c_compiler"] == ['vs2008', 'vs2017'] - assert len(res['zip_keys'][0]) == 2 + assert res["c_compiler"] == ["vs2008", "vs2017"] + assert len(res["zip_keys"][0]) == 2 def test_pin_run_as_build(): @@ -196,4 +290,3 @@ def test_pin_run_as_build(): print(res) assert len(res["pin_run_as_build"]) == 3 - diff --git a/versioneer.py b/versioneer.py index 343f42d4b..57539aad4 100644 --- a/versioneer.py +++ b/versioneer.py @@ -1,4 +1,3 @@ - # Version: 0.15+dev """The Versioneer - like a rocketeer, but for versions. @@ -342,12 +341,7 @@ """ -from __future__ import print_function - -try: - import configparser -except ImportError: - import ConfigParser as configparser +import configparser import errno import json import os