diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index d885e87..f727abc 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -36,7 +36,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: [ '2.x', '3.x'] + python-version: [ '3.x'] steps: - name: Check out code uses: actions/checkout@v1 @@ -58,7 +58,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: [ '2.x', '3.x'] + python-version: [ '3.x'] steps: - name: Check out code uses: actions/checkout@v1 @@ -74,5 +74,5 @@ jobs: echo "$HOME/.local/bin" >> $GITHUB_PATH - name: Test usage run: | - skipper make clean + sudo skipper make clean diff --git a/.github/workflows/publish-to-pypi.yml b/.github/workflows/publish-to-pypi.yml index bd6def3..059c3d3 100644 --- a/.github/workflows/publish-to-pypi.yml +++ b/.github/workflows/publish-to-pypi.yml @@ -11,10 +11,10 @@ jobs: steps: - name: Check out code uses: actions/checkout@v1 - - name: Set up Python 2.7 + - name: Set up Python 3 uses: actions/setup-python@v1 with: - python-version: 2.7.18 + python-version: '3.x' - name: Install dependencies run: | python -m pip install --upgrade pip diff --git a/.pylintrc b/.pylintrc index a05f2fa..156aeab 100644 --- a/.pylintrc +++ b/.pylintrc @@ -34,16 +34,6 @@ unsafe-load-any-extension=no # run arbitrary code extension-pkg-whitelist= -# Allow optimization of some AST trees. This will activate a peephole AST -# optimizer, which will apply various small optimizations. For instance, it can -# be used to obtain the result of joining multiple strings with the addition -# operator. Joining a lot of strings can lead to a maximum recursion error in -# Pylint and this flag can prevent that. It has one side effect, the resulting -# AST will be different than the one from reality. This option is deprecated -# and it will be removed in Pylint 2.0. -optimize-ast=no - - [MESSAGES CONTROL] # Only show warnings with the listed confidence levels. Leave empty to show @@ -65,7 +55,7 @@ confidence= # --enable=similarities". If you want to run only the classes checker, but have # no Warning level messages displayed, use"--disable=all --enable=classes # --disable=W" -disable=import-star-module-level,old-octal-literal,oct-method,print-statement,unpacking-in-except,parameter-unpacking,backtick,old-raise-syntax,old-ne-operator,long-suffix,dict-view-method,dict-iter-method,metaclass-assignment,next-method-called,raising-string,indexing-exception,raw_input-builtin,long-builtin,file-builtin,execfile-builtin,coerce-builtin,cmp-builtin,buffer-builtin,basestring-builtin,apply-builtin,filter-builtin-not-iterating,using-cmp-argument,useless-suppression,range-builtin-not-iterating,suppressed-message,no-absolute-import,old-division,cmp-method,reload-builtin,zip-builtin-not-iterating,intern-builtin,unichr-builtin,reduce-builtin,standarderror-builtin,unicode-builtin,xrange-builtin,coerce-method,delslice-method,getslice-method,setslice-method,input-builtin,round-builtin,hex-method,nonzero-method,map-builtin-not-iterating,missing-docstring,fixme,superfluous-parens,too-many-locals +disable=useless-suppression,suppressed-message,missing-docstring,fixme,superfluous-parens,too-many-locals,unspecified-encoding,missing-timeout [REPORTS] @@ -75,12 +65,6 @@ disable=import-star-module-level,old-octal-literal,oct-method,print-statement,un # mypackage.mymodule.MyReporterClass. output-format=text -# Put messages in a separate file for each module / package specified on the -# command line instead of printing them on stdout. Reports (if any) will be -# written in a file name "pylint_global.[txt|html]". This option is deprecated -# and it will be removed in Pylint 2.0. -files-output=no - # Tells whether to display a full report or only the messages reports=no @@ -141,63 +125,33 @@ property-classes=abc.abstractproperty # Regular expression matching correct function names function-rgx=[a-z_][a-z0-9_]{2,30}$ -# Naming hint for function names -function-name-hint=[a-z_][a-z0-9_]{2,30}$ - # Regular expression matching correct variable names variable-rgx=[a-z_][a-z0-9_]{2,30}$ -# Naming hint for variable names -variable-name-hint=[a-z_][a-z0-9_]{2,30}$ - # Regular expression matching correct constant names const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$ -# Naming hint for constant names -const-name-hint=(([A-Z_][A-Z0-9_]*)|(__.*__))$ - # Regular expression matching correct attribute names attr-rgx=[a-z_][a-z0-9_]{2,30}$ -# Naming hint for attribute names -attr-name-hint=[a-z_][a-z0-9_]{2,30}$ - # Regular expression matching correct argument names argument-rgx=[a-z_][a-z0-9_]{2,30}$ -# Naming hint for argument names -argument-name-hint=[a-z_][a-z0-9_]{2,30}$ - # Regular expression matching correct class attribute names class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$ -# Naming hint for class attribute names -class-attribute-name-hint=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$ - # Regular expression matching correct inline iteration names inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$ -# Naming hint for inline iteration names -inlinevar-name-hint=[A-Za-z_][A-Za-z0-9_]*$ - # Regular expression matching correct class names class-rgx=[A-Z_][a-zA-Z0-9]+$ -# Naming hint for class names -class-name-hint=[A-Z_][a-zA-Z0-9]+$ - # Regular expression matching correct module names module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ -# Naming hint for module names -module-name-hint=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ - # Regular expression matching correct method names method-rgx=[a-z_][a-z0-9_]{2,30}$ -# Naming hint for method names -method-name-hint=[a-z_][a-z0-9_]{2,30}$ - # Regular expression which should only match function or class names that do # not require a docstring. no-docstring-rgx=^_ @@ -225,12 +179,6 @@ ignore-long-lines=^\s*(# )??$ # else. single-line-if-stmt=no -# List of optional constructs for which whitespace checking is disabled. `dict- -# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}. -# `trailing-comma` allows a space between comma and closing bracket: (a, ). -# `empty-line` allows space-only lines. -no-space-check=trailing-comma,dict-separator - # Maximum number of lines in a module max-module-lines=1000 @@ -404,4 +352,4 @@ exclude-protected=_asdict,_fields,_replace,_source,_make # Exceptions that will emit a warning when being caught. Defaults to # "Exception" -overgeneral-exceptions=Exception +overgeneral-exceptions=builtins.Exception diff --git a/Dockerfile.skipper-build b/Dockerfile.skipper-build index ec3c42d..6fb8b36 100644 --- a/Dockerfile.skipper-build +++ b/Dockerfile.skipper-build @@ -1,4 +1,4 @@ -FROM python:2.7 +FROM python:3.11 COPY requirements.txt /tmp/requirements.txt RUN pip install -r /tmp/requirements.txt diff --git a/setup.cfg b/setup.cfg index 1e9dab8..945107b 100644 --- a/setup.cfg +++ b/setup.cfg @@ -9,6 +9,7 @@ long_description = file: README.md long_description_content_type = text/markdown requires-dist = setuptools +requires-python = >=3 [files] diff --git a/skipper/cli.py b/skipper/cli.py index cce94c7..4a99906 100644 --- a/skipper/cli.py +++ b/skipper/cli.py @@ -46,12 +46,12 @@ def _validate_port(matcher, port_forwarding): def _validate_port_range(start, end): if start and end and end < start: - raise click.BadParameter("Invalid port range: {0} should be bigger than {1}".format(start, end)) + raise click.BadParameter(f"Invalid port range: {start} should be bigger than {end}") def _validate_port_out_of_range(port): if port and not 1 <= int(port) <= 65535: - raise click.BadParameter("Invalid port number: port {0} is out of range".format(port)) + raise click.BadParameter(f"Invalid port number: port {port} is out of range") @click.group() @@ -102,16 +102,16 @@ def build(ctx, images_to_build, container_context, cache): else: for image in images_to_build: if image not in valid_images: - utils.logger.warning('Image %(image)s is not valid for this project! Skipping...', dict(image=image)) + utils.logger.warning('Image %s is not valid for this project! Skipping...', image) continue valid_images_to_build[image] = valid_images[image] tag = git.get_hash() for image, dockerfile in six.iteritems(valid_images_to_build): - utils.logger.info('Building image: %(image)s', dict(image=image)) + utils.logger.info('Building image: %s', image) if not os.path.exists(dockerfile): - utils.logger.warning('Dockerfile %(dockerfile)s does not exist! Skipping...', dict(dockerfile=dockerfile)) + utils.logger.warning('Dockerfile %s does not exist! Skipping...', dockerfile) continue fqdn_image = image + ':' + tag @@ -121,7 +121,7 @@ def build(ctx, images_to_build, container_context, cache): build_context = ctx.obj['container_context'] else: build_context = os.path.dirname(dockerfile) - command = ['build', '--network=host', '--build-arg', 'TAG={}'.format(tag), + command = ['build', '--network=host', '--build-arg', f'TAG={tag}', '-f', dockerfile, '-t', fqdn_image, build_context] if cache: cache_image = utils.generate_fqdn_image(ctx.obj['registry'], namespace=None, image=image, tag=DOCKER_TAG_FOR_CACHE) @@ -130,7 +130,7 @@ def build(ctx, images_to_build, container_context, cache): ret = runner.run(command) if ret != 0: - utils.logger.error('Failed to build image: %(image)s', dict(image=image)) + utils.logger.error('Failed to build image: %s', image) return ret if cache: @@ -158,7 +158,7 @@ def push(ctx, namespace, force, pbr, image): if pbr: # Format = pbr_version.short_hash # pylint: disable=protected-access - tag_to_push = "{}.{}".format(packaging._get_version_from_git().replace('dev', ''), tag[:8]) + tag_to_push = f"{packaging._get_version_from_git().replace('dev', '')}.{tag[:8]}" image_name = image + ':' + tag ret = _push(ctx, force, image, image_name, namespace, tag_to_push) @@ -169,11 +169,11 @@ def push(ctx, namespace, force, pbr, image): def _push(ctx, force, image, image_name, namespace, tag): fqdn_image = utils.generate_fqdn_image(ctx.obj['registry'], namespace, image, tag) - utils.logger.debug("Adding tag %(tag)s", dict(tag=fqdn_image)) + utils.logger.debug("Adding tag %s", fqdn_image) command = ['tag', image_name, fqdn_image] ret = runner.run(command) if ret != 0: - utils.logger.error('Failed to tag image: %(tag)s as fqdn', dict(tag=image_name, fqdn=fqdn_image)) + utils.logger.error('Failed to tag image: %s as fqdn: %s', image_name, fqdn_image) sys.exit(ret) repo_name = utils.generate_fqdn_image(None, namespace, image, tag=None) images_info = utils.get_remote_images_info([repo_name], ctx.obj['registry'], @@ -181,19 +181,19 @@ def _push(ctx, force, image, image_name, namespace, tag): tags = [info[-1] for info in images_info] if tag in tags: if not force: - utils.logger.info("Image %(image)s is already in registry %(registry)s, not pushing", - dict(image=fqdn_image, registry=ctx.obj['registry'])) + utils.logger.info("Image %s is already in registry %s, not pushing", + fqdn_image, ctx.obj['registry']) else: - utils.logger.warning("Image %(image)s is already in registry %(registry)s, pushing anyway", - dict(image=fqdn_image, registry=ctx.obj['registry'])) + utils.logger.warning("Image %s is already in registry %s, pushing anyway", + fqdn_image, ctx.obj['registry']) _push_to_registry(ctx.obj['registry'], fqdn_image) else: _push_to_registry(ctx.obj['registry'], fqdn_image) - utils.logger.debug("Removing tag %(tag)s", dict(tag=fqdn_image)) + utils.logger.debug("Removing tag %s", fqdn_image) command = ['rmi', fqdn_image] ret = runner.run(command) if ret != 0: - utils.logger.warning('Failed to remove image tag: %(tag)s', dict(tag=fqdn_image)) + utils.logger.warning('Failed to remove image tag: %s', fqdn_image) return ret @@ -208,7 +208,7 @@ def images(ctx, remote): valid_images = ctx.obj.get('containers') or utils.get_images_from_dockerfiles() images_names = valid_images.keys() - utils.logger.info("Expected images: %(images)s\n", dict(images=", ".join(images_names))) + utils.logger.info("Expected images: %s\n", ", ".join(images_names)) images_info = utils.get_local_images_info(images_names) if remote: _validate_global_params(ctx, 'registry') @@ -216,7 +216,7 @@ def images(ctx, remote): images_info += utils.get_remote_images_info(images_names, ctx.obj['registry'], ctx.obj.get('username'), ctx.obj.get('password')) except Exception as exp: - raise click.exceptions.ClickException('Got unknow error from remote registry %(error)s' % dict(error=exp.message)) + raise click.exceptions.ClickException(f'Got unknown error from remote registry {exp}') print(tabulate.tabulate(images_info, headers=['REGISTRY', 'IMAGE', 'TAG'], tablefmt='grid')) @@ -240,7 +240,7 @@ def rmi(ctx, remote, image, tag): utils.delete_local_image(image, tag) -@cli.command(context_settings=dict(ignore_unknown_options=True)) +@cli.command(context_settings={"ignore_unknown_options": True}) @click.option('-i', '--interactive', help='Interactive mode', is_flag=True, default=False, envvar='SKIPPER_INTERACTIVE') @click.option('-n', '--name', help='Container name', default=None) @click.option('-e', '--env', multiple=True, help='Environment variables to pass the container') @@ -276,7 +276,7 @@ def run(ctx, interactive, name, env, publish, cache, command): env_file=ctx.obj.get('env_file')) -@cli.command(context_settings=dict(ignore_unknown_options=True)) +@cli.command(context_settings={"ignore_unknown_options": True}) @click.option('-i', '--interactive', help='Interactive mode', is_flag=True, default=False, envvar='SKIPPER_INTERACTIVE') @click.option('-n', '--name', help='Container name', default=None) @click.option('-e', '--env', multiple=True, help='Environment variables to pass the container') @@ -369,11 +369,11 @@ def completion(): def _push_to_registry(registry, fqdn_image): - utils.logger.debug("Pushing to registry %(registry)s", dict(registry=registry)) + utils.logger.debug("Pushing to registry %s", registry) command = ['push', fqdn_image] ret = runner.run(command) if ret != 0: - utils.logger.error('Failed to push image: %(tag)s', dict(tag=fqdn_image)) + utils.logger.error('Failed to push image: %s', fqdn_image) sys.exit(ret) @@ -399,24 +399,23 @@ def runner_run(command): tagged_image_name = image + ':' + tag if utils.local_image_exist(image, tag): - utils.logger.info("Using build container: %(image_name)s", dict(image_name=tagged_image_name)) + utils.logger.info("Using build container: %s", tagged_image_name) return tagged_image_name if utils.remote_image_exist(registry, image, tag, username, password): fqdn_image = utils.generate_fqdn_image(registry, None, image, tag) - utils.logger.info("Using build container: %(fqdn_image)s", dict(fqdn_image=fqdn_image)) + utils.logger.info("Using build container: %s", fqdn_image) return fqdn_image if not git_revision: - raise click.exceptions.ClickException( - "Couldn't find build image %(image)s with tag %(tag)s" % dict(image=image, tag=tag)) + raise click.exceptions.ClickException(f"Couldn't find build image {image} with tag {tag}") else: tagged_image_name = image utils.logger.info("No build container tag was provided") docker_file = utils.image_to_dockerfile(image) - utils.logger.info("Building image using docker file: %(docker_file)s", dict(docker_file=docker_file)) + utils.logger.info("Building image using docker file: %s", docker_file) if container_context is not None: build_context = container_context else: @@ -433,10 +432,10 @@ def runner_run(command): command.extend(['--cache-from', cache_image]) if runner_run(command) != 0: - exit('Failed to build image: %(image)s' % dict(image=image)) + sys.exit(f'Failed to build image: {image}') if git_revision and not git.uncommitted_changes(): - utils.logger.info("Tagging image with git revision: %(tag)s", dict(tag=tag)) + utils.logger.info("Tagging image with git revision: %s", tag) runner_run(['tag', image, tagged_image_name]) if use_cache: @@ -456,7 +455,7 @@ def _validate_global_params(ctx, *params): def _validate_project_image(image): project_images = utils.get_images_from_dockerfiles() if image not in project_images: - raise click.BadParameter("'%s' is not an image of this project, try %s" % (image, project_images), param_hint='image') + raise click.BadParameter(f"'{image}' is not an image of this project, try {project_images}", param_hint='image') def _expend_env(ctx, extra_env): @@ -466,7 +465,7 @@ def _expend_env(ctx, extra_env): if isinstance(env, dict): for key, value in six.iteritems(env): utils.logger.debug("Adding %s=%s to environment", key, value) - environment.append("{}={}".format(key, value)) + environment.append(f"{key}={value}") elif isinstance(env, list): for item in env: if '=' in item: @@ -476,8 +475,8 @@ def _expend_env(ctx, extra_env): # if the items is just a name of environment variable, try to get it # from the host's environment variables if item in os.environ: - environment.append('{}={}'.format(item, os.environ[item])) + environment.append(f'{item}={os.environ[item]}') else: - raise TypeError('Type {} not supported for key env, use dict or list instead'.format(type(env))) + raise TypeError(f'Type {type(env)} not supported for key env, use dict or list instead') return environment + list(extra_env) diff --git a/skipper/git.py b/skipper/git.py index 448fd49..daa02c6 100644 --- a/skipper/git.py +++ b/skipper/git.py @@ -16,7 +16,7 @@ def get_hash(short=False): if uncommitted_changes(): logging.warning("*** Uncommitted changes present - Build container version might be outdated ***") - return subprocess.check_output(git_command).decode().strip() + return subprocess.check_output(git_command).strip() def uncommitted_changes(): diff --git a/skipper/runner.py b/skipper/runner.py index 5f2e03f..dc86468 100644 --- a/skipper/runner.py +++ b/skipper/runner.py @@ -1,3 +1,4 @@ +# pylint: disable=consider-using-with import getpass import grp import logging @@ -12,7 +13,7 @@ def get_default_net(): # The host networking driver only works on Linux hosts, and is not supported on Docker Desktop for Mac, # Docker Desktop for Windows, or Docker EE for Windows Server. - return 'host' if sys.platform != 'darwin' and sys.platform != 'win32' else 'bridge' + return 'host' if sys.platform not in ('darwin', 'win32') else 'bridge' # pylint: disable=too-many-arguments @@ -78,15 +79,15 @@ def _run_nested(fqdn_image, environment, command, interactive, name, net, publis user = getpass.getuser() user_id = os.getuid() - cmd += ['-e', 'SKIPPER_USERNAME=%(user)s' % dict(user=user)] - cmd += ['-e', 'SKIPPER_UID=%(user_id)s' % dict(user_id=user_id)] - cmd += ['-e', 'HOME=%(homedir)s' % dict(homedir=homedir)] - cmd += ['-e', 'CONTAINER_RUNTIME_COMMAND=%(runtime_command)s' % dict(runtime_command=utils.get_runtime_command())] + cmd += ['-e', f'SKIPPER_USERNAME={user}'] + cmd += ['-e', f'SKIPPER_UID={user_id}'] + cmd += ['-e', f'HOME={homedir}'] + cmd += ['-e', f'CONTAINER_RUNTIME_COMMAND={utils.get_runtime_command()}'] if utils.get_runtime_command() == "docker": try: docker_gid = grp.getgrnam('docker').gr_gid - cmd += ['-e', 'SKIPPER_DOCKER_GID=%(docker_gid)s' % dict(docker_gid=docker_gid)] + cmd += ['-e', f'SKIPPER_DOCKER_GID={docker_gid}'] except KeyError: pass @@ -111,7 +112,7 @@ def handle_workdir(cmd, cwd, workdir): if workdir: cmd += ['-w', workdir] else: - cmd += ['-w', '%(workdir)s' % dict(workdir=cwd)] + cmd += ['-w', cwd] return cmd @@ -128,9 +129,9 @@ def handle_networking(cmd, publish, net): def handle_volumes_bind_mount(docker_cmd, homedir, volumes, workspace): volumes = volumes or [] - volumes.extend(['%(homedir)s/.netrc:%(homedir)s/.netrc:ro' % dict(homedir=homedir), - '%(homedir)s/.gitconfig:%(homedir)s/.gitconfig:ro' % dict(homedir=homedir), - '%(homedir)s/.docker/config.json:%(homedir)s/.docker/config.json:ro' % dict(homedir=homedir)]) + volumes.extend([f'{homedir}/.netrc:{homedir}/.netrc:ro', + f'{homedir}/.gitconfig:{homedir}/.gitconfig:ro', + f'{homedir}/.docker/config.json:{homedir}/.docker/config.json:ro']) # required for docker login (certificates) if os.path.exists('/etc/docker'): @@ -138,8 +139,8 @@ def handle_volumes_bind_mount(docker_cmd, homedir, volumes, workspace): if utils.get_runtime_command() == utils.PODMAN: volumes.extend([ - '%(workspace)s:%(workspace)s:rw' % dict(workspace=workspace), - '%s:/opt/skipper/skipper-entrypoint.sh:rw' % utils.get_extra_file("skipper-entrypoint.sh"), + f'{workspace}:{workspace}:rw', + f'{utils.get_extra_file("skipper-entrypoint.sh")}:/opt/skipper/skipper-entrypoint.sh:rw', ]) if os.path.exists('/var/run/docker.sock'): volumes.append('/var/run/docker.sock:/var/run/docker.sock:rw') @@ -147,9 +148,9 @@ def handle_volumes_bind_mount(docker_cmd, homedir, volumes, workspace): volumes.append('/var/lib/osmosis:/var/lib/osmosis:rw') else: volumes.extend([ - '%(workspace)s:%(workspace)s:rw' % dict(workspace=workspace), + f'{workspace}:{workspace}:rw', '/var/run/docker.sock:/var/run/docker.sock:rw', - '%s:/opt/skipper/skipper-entrypoint.sh' % utils.get_extra_file("skipper-entrypoint.sh"), + f'{utils.get_extra_file("skipper-entrypoint.sh")}:/opt/skipper/skipper-entrypoint.sh', ]) # Will fail on Mac if os.path.exists('/var/lib/osmosis'): @@ -157,7 +158,7 @@ def handle_volumes_bind_mount(docker_cmd, homedir, volumes, workspace): for volume in volumes: if ":" not in volume: - raise ValueError("Volume entry is badly-formatted - %s" % volume) + raise ValueError(f"Volume entry is badly-formatted - {volume}") # For OSX, map anything in /var/lib or /etc to /private if sys.platform == 'darwin': @@ -218,17 +219,17 @@ def _network(net): def _create_network(net): - logging.debug("Creating network %(net)s", dict(net=net)) + logging.debug("Creating network %s", net) utils.run_container_command(['network', 'create', net]) @retry(delay=0.1, tries=10) def _destroy_network(net): - logging.debug("Deleting network %(net)s", dict(net=net)) + logging.debug("Deleting network %s", net) utils.run_container_command(['network', 'rm', net]) def _network_exists(net): - cmd = ['network', 'ls', "-f", "NAME=%s" % net] + cmd = ['network', 'ls', "-f", f"NAME={net}"] result = utils.run_container_command(cmd) return net in result diff --git a/skipper/utils.py b/skipper/utils.py index f6331f9..7ebc7c6 100644 --- a/skipper/utils.py +++ b/skipper/utils.py @@ -4,7 +4,7 @@ import logging import os import subprocess -from distutils.spawn import find_executable +from shutil import which from six.moves import http_client import requests from requests_bearer import HttpBearerAuth @@ -58,7 +58,7 @@ def local_image_exist(image, tag): def remote_image_exist(registry, image, tag, username, password): urllib3.disable_warnings() - url = IMAGE_TAGS_URL % dict(registry=registry, image=image) + url = IMAGE_TAGS_URL % {"registry": registry, "image": image} headers = {"Accept": "application/vnd.docker.distribution.manifest.v2+json"} response = requests.get(url=url, verify=False, headers=headers, auth=HttpBearerAuth(username, password)) @@ -95,7 +95,7 @@ def get_remote_images_info(images, registry, username, password): def get_remote_image_info(image, registry, username, password): urllib3.disable_warnings() image_info = [] - url = IMAGE_TAGS_URL % dict(registry=registry, image=image) + url = IMAGE_TAGS_URL % {"registry": registry, "image": image} response = requests.get(url=url, verify=False, auth=HttpBearerAuth(username, password)) info = response.json() if response.ok: @@ -105,14 +105,14 @@ def get_remote_image_info(image, registry, username, password): if info['errors'][0]['code'] in ['NAME_UNKNOWN', 'NOT_FOUND']: pass else: - raise Exception(info) + raise RuntimeError(info) return image_info def get_image_digest(registry, image, tag, username, password): urllib3.disable_warnings() - url = MANIFEST_URL % dict(registry=registry, image=image, reference=tag) + url = MANIFEST_URL % {"registry": registry, "image": image, "reference": tag} headers = {"Accept": "application/vnd.docker.distribution.manifest.v2+json"} response = requests.get(url=url, headers=headers, verify=False, auth=HttpBearerAuth(username, password)) return response.headers['Docker-Content-Digest'] @@ -120,10 +120,9 @@ def get_image_digest(registry, image, tag, username, password): def delete_image_from_registry(registry, image, tag, username, password): digest = get_image_digest(registry, image, tag, username, password) - url = MANIFEST_URL % dict(registry=registry, image=image, reference=digest) + url = MANIFEST_URL % {"registry": registry, "image": image, "reference": digest} response = requests.delete(url=url, verify=False, auth=HttpBearerAuth(username, password)) - if not response.ok: - raise Exception(response.content) + response.raise_for_status() def delete_local_image(image, tag): @@ -152,7 +151,7 @@ def dockerfile_to_image(dockerfile): def is_tool(name): """Check whether `name` is on PATH and marked as executable.""" - return find_executable(name) is not None + return which(name) is not None def get_runtime_command(): @@ -163,18 +162,18 @@ def get_runtime_command(): elif is_tool(PODMAN): CONTAINER_RUNTIME_COMMAND = PODMAN else: - raise Exception("Nor %s nor %s are installed" % (PODMAN, DOCKER)) + raise RuntimeError(f"Nor {PODMAN} nor {DOCKER} are installed") return CONTAINER_RUNTIME_COMMAND def get_extra_file(filename): - return pkg_resources.resource_filename("skipper", "data/%s" % filename) + return pkg_resources.resource_filename("skipper", f"data/{filename}") def run_container_command(args): cmd = [get_runtime_command()] cmd.extend(args) - return str(subprocess.check_output(cmd).decode().strip()) + return str(subprocess.check_output(cmd).strip()) def create_path_and_add_data(full_path, data, is_file): @@ -188,7 +187,9 @@ def create_path_and_add_data(full_path, data, is_file): def set_remote_registry_login_info(registry, ctx_object): try: - docker_config = json.load(open('/'.join([os.path.expanduser('~'), '.docker/config.json']))) + with open('/'.join([os.path.expanduser('~'), '.docker/config.json'])) as docker_file: + docker_config = json.load(docker_file) + auth = docker_config.get('auths', {}).get(registry, {}).get('auth') if auth: username, password = base64.b64decode(auth).decode().split(r':') diff --git a/tests/test_cli.py b/tests/test_cli.py index 0abcd00..ce198ab 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -5,6 +5,9 @@ import click import six from click import testing +from shutil import which +from requests import HTTPError + from skipper import cli from skipper import config, utils @@ -295,7 +298,7 @@ def test_build_existing_image_with_context(self, skipper_runner_run_mock): return_value={ 'image1': '/home/user/work/project/Dockerfile.image1', 'image2': '/home/user/work/project/Dockerfile.image2'})) - @mock.patch('__builtin__.open', mock.MagicMock(create=True)) + @mock.patch('builtins.open', mock.MagicMock(create=True)) @mock.patch('os.path.exists', mock.MagicMock(autospec=True, return_value=True)) @mock.patch('yaml.safe_load', mock.MagicMock(autospec=True, return_value=SKIPPER_CONF_WITH_CONTEXT)) @mock.patch('skipper.git.get_hash', mock.MagicMock(autospec=True, return_value='1234567')) @@ -317,7 +320,7 @@ def test_build_with_context_from_config_file(self, skipper_runner_run_mock): ] skipper_runner_run_mock.assert_called_once_with(expected_command) - @mock.patch('__builtin__.open', mock.MagicMock(create=True)) + @mock.patch('builtins.open', mock.MagicMock(create=True)) @mock.patch('os.path.exists', mock.MagicMock(autospec=True, return_value=True)) @mock.patch('yaml.safe_load', mock.MagicMock(autospec=True, return_value=SKIPPER_CONF_WITH_CONTEXT_NO_TAG)) @mock.patch('skipper.git.get_hash', mock.MagicMock(autospec=True, return_value='1234567')) @@ -461,7 +464,7 @@ def test_build_all_images(self, skipper_runner_run_mock): return_value={ 'image1': '/home/user/work/project/Dockerfile.image1', 'image2': '/home/user/work/project/Dockerfile.image2'})) - @mock.patch('__builtin__.open', mock.MagicMock(create=True)) + @mock.patch('builtins.open', mock.MagicMock(create=True)) @mock.patch('os.path.exists', mock.MagicMock(autospec=True, return_value=True)) @mock.patch('yaml.safe_load', mock.MagicMock(autospec=True, return_value=SKIPPER_CONF)) @mock.patch('skipper.git.get_hash', mock.MagicMock(autospec=True, return_value='1234567')) @@ -482,7 +485,7 @@ def test_build_with_defaults_from_config_file(self, skipper_runner_run_mock): ] skipper_runner_run_mock.assert_called_once_with(expected_command) - @mock.patch('__builtin__.open', mock.MagicMock(create=True)) + @mock.patch('builtins.open', mock.MagicMock(create=True)) @mock.patch('os.path.abspath', mock.MagicMock(autospec=True, return_value='/home/user/work/project/app1/Dockerfile')) @mock.patch('os.path.exists', mock.MagicMock(autospec=True, return_value=True)) @@ -677,7 +680,7 @@ def test_push_to_namespace(self, skipper_runner_run_mock, requests_get_mock): ] skipper_runner_run_mock.assert_has_calls(expected_commands) - @mock.patch('__builtin__.open', mock.MagicMock(create=True)) + @mock.patch('builtins.open', mock.MagicMock(create=True)) @mock.patch('os.path.exists', mock.MagicMock(autospec=True, return_value=True)) @mock.patch('yaml.safe_load', mock.MagicMock(autospec=True, return_value=SKIPPER_CONF)) @mock.patch('skipper.git.get_hash', mock.MagicMock(autospec=True, return_value='1234567')) @@ -1039,7 +1042,7 @@ def test_rmi_remote(self, requests_get_mock, requests_delete_mock, requests_bear @mock.patch('requests.get', autospec=True) def test_rmi_remote_fail(self, requests_get_mock, requests_delete_mock, requests_bearer_auth_mock): requests_get_mock.side_effect = [mock.Mock(headers={'Docker-Content-Digest': 'digest'})] - requests_delete_mock.side_effect = [mock.Mock(ok=False)] + requests_delete_mock.side_effect = HTTPError() result = self._invoke_cli( global_params=self.global_params, subcmd='rmi', @@ -1130,7 +1133,7 @@ def test_run_with_non_existing_build_container(self, requests_get_mock): ) self.assertIsInstance(ret.exception, click.exceptions.ClickException) - @mock.patch('__builtin__.open', mock.MagicMock(create=True)) + @mock.patch('builtins.open', mock.MagicMock(create=True)) @mock.patch('os.path.exists', mock.MagicMock(autospec=True, return_value=True)) @mock.patch('yaml.safe_load', mock.MagicMock(autospec=True, return_value=SKIPPER_CONF)) @mock.patch('subprocess.check_output', mock.MagicMock(autospec=True, return_value='1234567\n')) @@ -1150,7 +1153,7 @@ def test_run_with_defaults_from_config_file(self, skipper_runner_run_mock): workdir=None, workspace=None, use_cache=False, env_file=()) - @mock.patch('__builtin__.open', mock.MagicMock(create=True)) + @mock.patch('builtins.open', mock.MagicMock(create=True)) @mock.patch('os.path.exists', mock.MagicMock(autospec=True, return_value=True)) @mock.patch('yaml.safe_load', mock.MagicMock(autospec=True, return_value=SKIPPER_CONF_WITH_ENV)) @mock.patch('subprocess.check_output', mock.MagicMock(autospec=True, return_value='1234567\n')) @@ -1172,7 +1175,7 @@ def test_run_with_defaults_and_env_from_config_file(self, skipper_runner_run_moc workdir=None, workspace=None, use_cache=False, env_file=()) - @mock.patch('__builtin__.open', mock.MagicMock(create=True)) + @mock.patch('builtins.open', mock.MagicMock(create=True)) @mock.patch('os.path.exists', mock.MagicMock(autospec=True, return_value=True)) @mock.patch('yaml.safe_load', mock.MagicMock( @@ -1203,7 +1206,7 @@ def test_run_with_defaults_and_env_from_env_file( use_cache=False, env_file=(ENV_FILE_PATH,)) - @mock.patch('__builtin__.open', mock.MagicMock(create=True)) + @mock.patch('builtins.open', mock.MagicMock(create=True)) @mock.patch('os.path.exists', mock.MagicMock(autospec=True, return_value=True)) @mock.patch('yaml.safe_load', mock.MagicMock( @@ -1234,7 +1237,7 @@ def test_run_with_defaults_and_env_from_multiple_env_file( use_cache=False, env_file=tuple(ENV_FILES)) - @mock.patch('__builtin__.open', mock.MagicMock(create=True)) + @mock.patch('builtins.open', mock.MagicMock(create=True)) @mock.patch('os.path.exists', mock.MagicMock(autospec=True, return_value=True)) @mock.patch('yaml.safe_load', mock.MagicMock(autospec=True, return_value=SKIPPER_CONF_WITH_ENV)) @mock.patch('subprocess.check_output', mock.MagicMock(autospec=True, return_value='1234567\n')) @@ -1256,7 +1259,7 @@ def test_run_with_env_overriding_config_file(self, skipper_runner_run_mock): workdir=None, workspace=None, use_cache=False, env_file=()) - @mock.patch('__builtin__.open', mock.MagicMock(create=True)) + @mock.patch('builtins.open', mock.MagicMock(create=True)) @mock.patch('os.path.exists', mock.MagicMock(autospec=True, return_value=True)) @mock.patch('os.environ', {}) @mock.patch('yaml.safe_load', mock.MagicMock(autospec=True, return_value=SKIPPER_CONF_WITH_ENV_LIST)) @@ -1279,7 +1282,7 @@ def test_run_with_env_list(self, skipper_runner_run_mock): workdir=None, workspace=None, use_cache=False, env_file=()) - @mock.patch('__builtin__.open', mock.MagicMock(create=True)) + @mock.patch('builtins.open', mock.MagicMock(create=True)) @mock.patch('os.path.exists', mock.MagicMock(autospec=True, return_value=True)) @mock.patch('os.environ', {'key2': 'value2'}) @mock.patch('yaml.safe_load', mock.MagicMock(autospec=True, return_value=SKIPPER_CONF_WITH_ENV_LIST)) @@ -1302,7 +1305,7 @@ def test_run_with_env_list_get_from_env(self, skipper_runner_run_mock): workdir=None, workspace=None, use_cache=False, env_file=()) - @mock.patch('__builtin__.open', mock.MagicMock(create=True)) + @mock.patch('builtins.open', mock.MagicMock(create=True)) @mock.patch('os.path.exists', mock.MagicMock(autospec=True, return_value=True)) @mock.patch('yaml.safe_load', mock.MagicMock(autospec=True, return_value=SKIPPER_CONF_WITH_ENV_WRONG_TYPE)) @mock.patch('subprocess.check_output', mock.MagicMock(autospec=True, return_value='1234567\n')) @@ -1582,7 +1585,7 @@ def test_run_with_publish_out_of_range_port(self): self.assertEqual("Invalid port number: port 121111111 is out of range", result.exception.message) self.assertEqual(-1, result.exit_code) - @mock.patch('__builtin__.open', mock.MagicMock(create=True)) + @mock.patch('builtins.open', mock.MagicMock(create=True)) @mock.patch('os.path.exists', mock.MagicMock(autospec=True, return_value=True)) @mock.patch('yaml.safe_load', mock.MagicMock(autospec=True, return_value=SKIPPER_CONF_WITH_VOLUMES)) @mock.patch('subprocess.check_output', mock.MagicMock(autospec=True, return_value='1234567\n')) @@ -1601,7 +1604,7 @@ def test_run_with_defaults_from_config_file_including_volumes(self, skipper_runn volumes=['volume1', 'volume2'], workspace=None, workdir=None, use_cache=False, env_file=()) - @mock.patch('__builtin__.open', mock.MagicMock(create=True)) + @mock.patch('builtins.open', mock.MagicMock(create=True)) @mock.patch('os.path.exists', mock.MagicMock(autospec=True, return_value=True)) @mock.patch('yaml.safe_load', mock.MagicMock(autospec=True, return_value=SKIPPER_CONF_WITH_SHELL_INTERPOLATION)) @mock.patch('subprocess.check_output', mock.MagicMock(autospec=True, return_value='1234567\n')) @@ -1617,7 +1620,7 @@ def test_run_with_defaults_from_config_file_including_interpolated_volumes(self, expected_fqdn_image = 'skipper-conf-build-container-image:skipper-conf-build-container-tag' skipper_runner_run_mock.assert_called_once_with(command, fqdn_image=expected_fqdn_image, environment=['KEY=10'], interactive=False, name=None, net=None, publish=(), - volumes=['/bin/cat:/cat'], workspace=None, + volumes=[f'{which("cat")}:/cat'], workspace=None, workdir=None, use_cache=False, env_file=()) @mock.patch('yaml.safe_load', mock.MagicMock(autospec=True, return_value=SKIPPER_CONF_WITH_INVALID_SHELL_INTERPOLATION)) @@ -1632,7 +1635,7 @@ def test_run_with_defaults_from_config_file_including_invalid_interploated_volum subcmd_params=run_params ) - @mock.patch('__builtin__.open', mock.MagicMock(create=True)) + @mock.patch('builtins.open', mock.MagicMock(create=True)) @mock.patch('os.path.exists', mock.MagicMock(autospec=True, return_value=True)) @mock.patch('yaml.safe_load', mock.MagicMock(autospec=True, return_value=SKIPPER_CONF_WITH_WORKDIR)) @mock.patch('subprocess.check_output', mock.MagicMock(autospec=True, return_value='1234567\n')) @@ -1652,7 +1655,7 @@ def test_run_with_defaults_from_config_file_including_workdir(self, skipper_runn workdir='test-workdir', workspace=None, use_cache=False, env_file=()) - @mock.patch('__builtin__.open', mock.MagicMock(create=True)) + @mock.patch('builtins.open', mock.MagicMock(create=True)) @mock.patch('os.path.exists', mock.MagicMock(autospec=True, return_value=True)) @mock.patch('yaml.safe_load', mock.MagicMock(autospec=True, return_value=SKIPPER_CONF_WITH_WORKSPACE)) @mock.patch('subprocess.check_output', mock.MagicMock(autospec=True, return_value='1234567\n')) @@ -1672,7 +1675,7 @@ def test_run_with_defaults_from_config_file_including_workspace(self, skipper_ru workdir=None, workspace="/test/workspace", use_cache=False, env_file=()) - @mock.patch('__builtin__.open', mock.MagicMock(create=True)) + @mock.patch('builtins.open', mock.MagicMock(create=True)) @mock.patch('os.path.exists', mock.MagicMock(autospec=True, return_value=True)) @mock.patch('yaml.safe_load', mock.MagicMock(autospec=True, return_value=SKIPPER_CONF_WITH_GIT_REV)) @mock.patch('subprocess.check_output', mock.MagicMock(autospec=True, return_value='1234567\n')) @@ -1693,7 +1696,7 @@ def test_run_with_config_including_git_revision_with_uncommitted_changes(self, s workdir=None, use_cache=False, workspace=None, env_file=()) - @mock.patch('__builtin__.open', mock.MagicMock(create=True)) + @mock.patch('builtins.open', mock.MagicMock(create=True)) @mock.patch('os.path.exists', mock.MagicMock(autospec=True, return_value=True)) @mock.patch('yaml.safe_load', mock.MagicMock(autospec=True, return_value=SKIPPER_CONF_WITH_GIT_REV)) @mock.patch('subprocess.check_output', mock.MagicMock(autospec=True, return_value='1234567\n')) @@ -1769,7 +1772,7 @@ def test_make_with_additional_make_params(self, skipper_runner_run_mock): workdir=None, workspace=None, use_cache=False, env_file=()) - @mock.patch('__builtin__.open', mock.MagicMock(create=True)) + @mock.patch('builtins.open', mock.MagicMock(create=True)) @mock.patch('os.path.exists', mock.MagicMock(autospec=True, return_value=True)) @mock.patch('yaml.safe_load', mock.MagicMock(autospec=True, return_value=SKIPPER_CONF)) @mock.patch('subprocess.check_output', mock.MagicMock(autospec=True, return_value='1234567\n')) diff --git a/tests/test_utils.py b/tests/test_utils.py index c5a078c..6c8ed6b 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -8,7 +8,7 @@ class TestUtils(unittest.TestCase): - @mock.patch('skipper.utils.find_executable', autospec=False) + @mock.patch('skipper.utils.which', autospec=False) def test_get_runtime_command(self, find_executable_mock): utils.CONTAINER_RUNTIME_COMMAND = None find_executable_mock.side_effect = "done"