Skip to content

Commit

Permalink
replace remaining usage of sh with subprocess_util (#51)
Browse files Browse the repository at this point in the history
* use the pypa `build` tool instead of setup.py

* replace remaining usage of sh with subprocess_util

* replace @lru_cache(None) with @cache

If you want an unbounded cache, it's better to use `functools.cache`
(new in Python 3.9) rather than `functools.lru_cache(None)`, as the
former has a faster implementation that doesn't concern itself with
cache eviction.

* add clog frag
  • Loading branch information
gotmax23 authored Apr 10, 2023
1 parent 39a4356 commit 28f969c
Show file tree
Hide file tree
Showing 6 changed files with 37 additions and 58 deletions.
6 changes: 6 additions & 0 deletions changelogs/fragments/51-bye-bye-sh.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
---
minor_changes:
- "Replace internal usage of ``sh`` with the ``antsibull.subprocess_util`` module
(https://github.com/ansible-community/antsibull-core/pull/51)."
- "Use the pypa ``build`` tool instead of directly calling ``setup.py`` which is
deprecated (https://github.com/ansible-community/antsibull-core/pull/51)."
1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ requires-python = ">=3.9"
dependencies = [
"aiofiles",
"aiohttp >= 3.0.0",
"build",
# major/minor was introduced here
"packaging >= 20.0",
"perky",
Expand Down
32 changes: 10 additions & 22 deletions src/antsibull_core/ansible_core.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,15 +14,15 @@
import re
import tempfile
import typing as t
from functools import lru_cache, partial
from functools import cache
from urllib.parse import urljoin

import aiofiles
import sh
from packaging.version import Version as PypiVer

from . import app_context
from .logging import log
from .subprocess_util import async_log_run
from .utils.http import retry_get

if t.TYPE_CHECKING:
Expand Down Expand Up @@ -67,7 +67,7 @@ async def _get_json(self, query_url: str) -> dict[str, t.Any]:
pkg_info = await response.json()
return pkg_info

@lru_cache(None)
@cache
async def get_release_info(self) -> dict[str, t.Any]:
"""
Retrieve information about releases of the ansible-core/ansible-base package from pypi.
Expand Down Expand Up @@ -243,7 +243,7 @@ def source_is_correct_version(ansible_core_source: str | None,
return False


@lru_cache(None)
@cache
async def checkout_from_git(download_dir: str, repo_url: str = _ANSIBLE_CORE_URL) -> str:
"""
Checkout the ansible-core git repo.
Expand All @@ -252,43 +252,31 @@ async def checkout_from_git(download_dir: str, repo_url: str = _ANSIBLE_CORE_URL
:kwarg: repo_url: The url to the git repo.
:return: The directory that ansible-core has been checked out to.
"""
loop = asyncio.get_running_loop()
ansible_core_dir = os.path.join(download_dir, 'ansible-core')
await loop.run_in_executor(None, sh.git, 'clone', repo_url, ansible_core_dir)
await async_log_run(['git', 'clone', repo_url, ansible_core_dir])

return ansible_core_dir


@lru_cache(None)
@cache
async def create_sdist(source_dir: str, dest_dir: str) -> str:
"""
Create an sdist for the python package at a given path.
Note that this is not able to create an sdist for any python package. It has to have a setup.py
sdist command.
:arg source_dir: the directory that the python package source is in.
:arg dest_dir: the directory that the sdist will be written to/
:returns: path to the sdist.
"""
loop = asyncio.get_running_loop()

# Make sure setup.py exists
setup_script = os.path.join(source_dir, 'setup.py')
if not os.path.exists(setup_script):
raise CannotBuild(f'{source_dir} does not include a setup.py script. This script cannot'
' build the package')

# Make a subdir of dest_dir for returning the dist in
dist_dir_prefix = os.path.join(os.path.basename(source_dir))
dist_dir = tempfile.mkdtemp(prefix=dist_dir_prefix, dir=dest_dir)

# execute python setup.py sdist --dist-dir dest_dir/
# sh maps attributes to commands dynamically so ignore the linting errors there
# pyre-ignore[16]
python_cmd = partial(sh.python, _cwd=source_dir) # pylint:disable=no-member
try:
await loop.run_in_executor(None, python_cmd, setup_script, 'sdist', '--dist-dir', dist_dir)
await async_log_run(
['python', '-m', 'build', '--sdist', '--outdir', dist_dir, source_dir],
stderr_loglevel='warning',
)
except Exception as e:
raise CannotBuild(f'Building {source_dir} failed: {e}') # pylint:disable=raise-missing-from

Expand Down
36 changes: 11 additions & 25 deletions src/antsibull_core/collections.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,11 +9,8 @@

import asyncio
import os
from concurrent.futures import ThreadPoolExecutor

import sh

from . import app_context
from .subprocess_util import async_log_run


class CollectionFormatError(Exception):
Expand All @@ -22,24 +19,18 @@ class CollectionFormatError(Exception):

async def install_together(collection_tarballs: list[str],
ansible_collections_dir: str) -> None:
loop = asyncio.get_running_loop()
lib_ctx = app_context.lib_ctx.get()
executor = ThreadPoolExecutor(max_workers=lib_ctx.thread_max)

installers = []
for pathname in collection_tarballs:
namespace, collection, _dummy = os.path.basename(pathname).split('-', 2)
collection_dir = os.path.join(ansible_collections_dir, namespace, collection)
# Note: mkdir -p equivalent is okay because we created package_dir ourselves as a directory
# that only we can access
os.makedirs(collection_dir, mode=0o700, exist_ok=False)

# If the choice of install tools for galaxy is ever settled upon, we can switch from tar to
# using that
# sh dynamically creates functions which map to executables
# pyre-ignore[16] pylint:disable-next=no-member
installers.append(loop.run_in_executor(executor, sh.tar, '-xf', pathname, '-C',
collection_dir))
installers.append(
asyncio.create_task(
async_log_run(['tar', '-xf', pathname, '-C', collection_dir])
)
)

await asyncio.gather(*installers)

Expand All @@ -51,10 +42,6 @@ async def install_separately(collection_tarballs: list[str], collection_dir: str
if not collection_tarballs:
return collection_dirs

loop = asyncio.get_running_loop()
lib_ctx = app_context.lib_ctx.get()
executor = ThreadPoolExecutor(max_workers=lib_ctx.thread_max)

for pathname in collection_tarballs:
filename = os.path.basename(pathname)
namespace, collection, version_ext = filename.split('-', 2)
Expand All @@ -79,12 +66,11 @@ async def install_separately(collection_tarballs: list[str], collection_dir: str
# that only we can access
os.makedirs(collection_dir, mode=0o700, exist_ok=False)

# If the choice of install tools for galaxy is ever settled upon, we can switch from tar to
# using that
# sh dynamically creates functions which map to executables
# pyre-ignore[16] pylint:disable-next=no-member
installers.append(loop.run_in_executor(executor, sh.tar, '-xf', pathname, '-C',
collection_dir))
installers.append(
asyncio.create_task(
async_log_run(['tar', '-xf', pathname, '-C', collection_dir])
)
)

await asyncio.gather(*installers)

Expand Down
8 changes: 3 additions & 5 deletions src/antsibull_core/filesystem.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@

from __future__ import annotations

import sh
from antsibull_core.subprocess_util import log_run

try:
# We fallback to /usr/bin/getfacl so we can ignore failure to import this
Expand All @@ -32,10 +32,8 @@ def _get_acls(path: str) -> str:
acls = acl.to_any_text(options=posix1e.TEXT_NUMERIC_IDS).decode('utf-8')
else:
try:
# sh dynamically creates functions which map to executables
# pyre-ignore[16]
acls = sh.getfacl(path, '-n').stdout.decode('utf-8') # pylint:disable=no-member
except sh.CommandNotFound:
acls = log_run(['getfacl', path, '-n']).stdout
except FileNotFoundError:
pass
except Exception as e:
# pylint:disable-next=raise-missing-from
Expand Down
12 changes: 6 additions & 6 deletions src/antsibull_core/tarball.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@

import re

import sh
from antsibull_core.subprocess_util import async_log_run

#: Regex to find toplevel directories in tar output
TOPLEVEL_RE: re.Pattern = re.compile('^[^/]+/$')
Expand All @@ -28,11 +28,11 @@ async def unpack_tarball(tarname: str, destdir: str) -> str:
:returns: Toplevel of the unpacked directory structure. This will be
a subdirectory of `destdir`.
"""
# FIXME: Need to run tar via run_in_executor()
# FIXME: Use unpack_tarball for places that are manually calling tar now
# pyre-ignore[16]
manifest = sh.tar('-xzvf', tarname, f'-C{destdir}') # pylint:disable=no-member
toplevel_dirs = [filename for filename in manifest if TOPLEVEL_RE.match(filename)]
manifest = await async_log_run(['tar', '-xzvf', tarname, f'-C{destdir}'])
toplevel_dirs = [
filename for filename in manifest.stdout.splitlines()
if TOPLEVEL_RE.match(filename)
]

if len(toplevel_dirs) != 1:
raise InvalidTarball(f'The tarball {tarname} had more than a single toplevel dir')
Expand Down

0 comments on commit 28f969c

Please sign in to comment.