diff --git a/goth/runner/download/__init__.py b/goth/runner/download/__init__.py index 73f8facca..9b944a597 100644 --- a/goth/runner/download/__init__.py +++ b/goth/runner/download/__init__.py @@ -3,12 +3,14 @@ from abc import ABC import logging import os +import json from pathlib import Path -import re import shutil import tempfile -from typing import Any, Callable, Optional +from typing import Optional +from ghapi.all import GhApi, paged +from fastcore.utils import obj2dict import requests logging.basicConfig( @@ -18,8 +20,6 @@ ASSET_CACHE_DIR = Path(tempfile.gettempdir()) / "goth_asset_cache" -BASE_URL = "https://api.github.com/repos" - ENV_API_TOKEN = "GITHUB_API_TOKEN" ENV_YAGNA_BRANCH = "YAGNA_BRANCH" ENV_YAGNA_COMMIT = "YAGNA_COMMIT_HASH" @@ -41,8 +41,9 @@ class AssetNotFound(Exception): class GithubDownloader(ABC): """Base class for downloading assets using GitHub's REST API.""" - repo_url: str - """Repo URL to be used as base in API requests.""" + gh_api: GhApi + """GitHub REST API client.""" + session: requests.Session """Session object for making HTTP requests.""" @@ -65,7 +66,7 @@ def __init__( if purge_cache: shutil.rmtree(ASSET_CACHE_DIR) - self.repo_url = BASE_URL + f"/{owner}/{repo}" + self.gh_api = GhApi(owner=owner, repo=repo, token=token) self.session = requests.Session() self.session.headers["Authorization"] = f"token {token}" @@ -82,77 +83,17 @@ def _create_cache_dir(self, asset_id: str) -> Path: asset_path.mkdir(exist_ok=True, parents=True) return asset_path - def _parse_link_header(self, header_value: str) -> dict: - """Parse URLs and their relative positions from a `Link` header value. - - The value of a `Link` header consists of comma-separated tuples, where each - tuple has a pagination URL and its `rel` attribute. - `rel` describes its URL's relation to the request the header originates from. - The value of the `rel` attribute is one of the following: - `first`, `prev`, `next`, `last`. - """ - relation_to_url = {} - links = [link.strip() for link in header_value.split(",")] - - for link in links: - result = re.search(r'<(\S+)>; rel="(\S+)"', link) - if not result: - raise LookupError - - url = result.group(1) - relation = result.group(2) - relation_to_url[relation] = url - - return relation_to_url - - def _search_with_pagination( - self, - initial_request: requests.PreparedRequest, - selector: Callable[[requests.Response], Any], - ): - """Search response data with `Link` header pagination support. - - First request is made using `initial_request`. Consecutive requests are made - based on the `Link` header until the last page is reached - (i.e. no `next` URL is present). The `selector` function is called for each - response received. If the result from `selector` is non-null, this function - exits early returning that result. - """ - response = self.session.send(initial_request) - logger.debug("_search_with_pagination. initial_url=%s", response.url) - - while True: - response.raise_for_status() - - result = selector(response) - if result: - logger.debug("_search_with_pagination. result=%s", result) - return result - - relation_to_url = self._parse_link_header(response.headers["Link"]) - logger.debug("_search_with_pagination. relation_to_url=%s", relation_to_url) - next_url = relation_to_url.get("next") - if next_url: - logger.debug("_search_with_pagination. next_url=%s", next_url) - response = self.session.get(next_url) - else: - return None - class ArtifactDownloader(GithubDownloader): """Downloader for GitHub Actions artifacts using GitHub's REST API.""" def _get_workflow(self, workflow_name: str) -> dict: """Query the workflow on GitHub Actions.""" - url = f"{self.repo_url}/actions/workflows" - logger.debug("Fetching workflows. url=%s", url) - response = self.session.get(url) - response.raise_for_status() - - workflows = response.json()["workflows"] - logger.debug("workflows=%s", workflows) + logger.debug("Fetching workflows. name=%s", workflow_name) + workflows = self.gh_api.actions.list_repo_workflows().workflows workflow = next(filter(lambda w: w["name"] == workflow_name, workflows)) - logger.debug("workflow=%s", workflow) + logger.debug("workflow=%s", json.dumps(obj2dict(workflow))) + return workflow def _get_latest_run( @@ -160,29 +101,27 @@ def _get_latest_run( ) -> dict: """Filter out the latest workflow run.""" workflow_id = workflow["id"] - url = f"{self.repo_url}/actions/workflows/{workflow_id}/runs" - params = {"status": "completed"} - if not commit: - params["branch"] = branch + logger.debug("Fetching workflow runs. workflow_id=%s", workflow_id) - request = self.session.prepare_request( - requests.Request("GET", url, params=params) - ) - logger.debug("Fetching workflow runs. url=%s", request.url) + if commit: + paged_workflow_runs = paged( + self.gh_api.actions.list_workflow_runs, workflow_id, status="completed" + ) - def _filter_workflows(response: requests.Response) -> Optional[dict]: - workflow_runs = response.json()["workflow_runs"] - if commit: - return next( - filter(lambda r: r["head_sha"].startswith(commit), workflow_runs), + for page in paged_workflow_runs: + workflow_runs = next( + filter( + lambda r: r["head_sha"].startswith(commit), page.workflow_runs + ), None, ) - else: - return workflow_runs[0] - workflow_run = self._search_with_pagination(request, _filter_workflows) - logger.debug("workflow_run=%s", workflow_run) - return workflow_run + if workflow_runs: + return workflow_runs + + return self.gh_api.actions.list_workflow_runs( + workflow_id, branch=branch, status="completed" + ).workflow_runs[0] def _get_artifact(self, artifact_name: str, workflow_run: dict) -> Optional[dict]: artifacts_url = workflow_run["artifacts_url"] @@ -201,20 +140,19 @@ def _download_artifact(self, artifact: dict) -> Path: Return path to the extracted artifact. """ - artifact_id = str(artifact["id"]) archive_url = artifact["archive_download_url"] logger.info("Downloading artifact. url=%s", archive_url) with self.session.get(archive_url) as response: response.raise_for_status() - with tempfile.NamedTemporaryFile() as fd: - fd.write(response.content) - logger.debug("Extracting zip archive. path=%s", fd.name) - cache_dir = self._create_cache_dir(artifact_id) - shutil.unpack_archive(fd.name, format="zip", extract_dir=str(cache_dir)) - logger.debug("Extracted package. path=%s", cache_dir) - logger.info("Downloaded artifact. url=%s", archive_url) + with tempfile.NamedTemporaryFile() as fd: + fd.write(response.content) + logger.debug("Extracting zip archive. path=%s", fd.name) + cache_dir = self._create_cache_dir(str(artifact["id"])) + shutil.unpack_archive(fd.name, format="zip", extract_dir=str(cache_dir)) + logger.debug("Extracted package. path=%s", cache_dir) + logger.info("Downloaded artifact. url=%s", archive_url) return cache_dir @@ -286,13 +224,8 @@ def _get_latest_release( Only the versions with `tag_name` that contains `self.tag_substring` as a substring are considered. """ - url = f"{self.repo_url}/releases" - logger.debug("Fetching releases. url=%s", url) - response = self.session.get(url) - response.raise_for_status() - - all_releases = response.json() - logger.debug("releases=%s", all_releases) + all_releases = self.gh_api.repos.list_releases() + logger.debug("releases=%s", json.dumps(obj2dict(all_releases))) def release_filter(release: dict, tag_substring: str) -> bool: has_matching_asset = any( @@ -310,7 +243,7 @@ def _get_asset( self, release: dict, content_type: str, asset_name: Optional[str] = None ) -> Optional[dict]: assets = release["assets"] - logger.debug("assets=%s", assets) + logger.debug("assets=%s", json.dumps(obj2dict(assets))) content_assets = filter(lambda a: a["content_type"] == content_type, assets) if content_assets and asset_name: @@ -320,12 +253,11 @@ def _get_asset( def _download_asset(self, asset: dict) -> Path: """Download an asset from a specific GitHub release.""" download_url = asset["browser_download_url"] - asset_id = str(asset["id"]) logger.info("Downloading asset. url=%s", download_url) with self.session.get(download_url) as response: response.raise_for_status() - cache_file = self._create_cache_dir(asset_id) / asset["name"] + cache_file = self._create_cache_dir(str(asset["id"])) / asset["name"] with cache_file.open(mode="wb") as fd: fd.write(response.content) logger.info("Downloaded asset. path=%s", str(cache_file)) @@ -364,7 +296,7 @@ def download( ) logger.debug("Found matching asset. name=%s", asset["name"]) - logger.debug("asset=%s", asset) + logger.debug("asset=%s", json.dumps(asset)) asset_id = str(asset["id"]) cache_path = self._cache_get(asset_id) diff --git a/poetry.lock b/poetry.lock index 173db5245..0557c1f06 100644 --- a/poetry.lock +++ b/poetry.lock @@ -262,6 +262,20 @@ category = "main" optional = false python-versions = "*" +[[package]] +name = "fastcore" +version = "1.3.19" +description = "Python supercharged for fastai development" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +packaging = "*" + +[package.extras] +dev = ["numpy", "nbdev (>=0.2.39)", "matplotlib", "pillow", "torch", "pandas"] + [[package]] name = "flake8" version = "3.8.4" @@ -314,6 +328,21 @@ category = "main" optional = false python-versions = "*" +[[package]] +name = "ghapi" +version = "0.1.16" +description = "A python client for the GitHub API" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +fastcore = "*" +packaging = "*" + +[package.extras] +dev = ["jsonref"] + [[package]] name = "h11" version = "0.12.0" @@ -1147,6 +1176,8 @@ brotli = [ {file = "Brotli-1.0.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5cb1e18167792d7d21e21365d7650b72d5081ed476123ff7b8cac7f45189c0c7"}, {file = "Brotli-1.0.9-cp39-cp39-manylinux1_i686.whl", hash = "sha256:16d528a45c2e1909c2798f27f7bf0a3feec1dc9e50948e738b961618e38b6a7b"}, {file = "Brotli-1.0.9-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:56d027eace784738457437df7331965473f2c0da2c70e1a1f6fdbae5402e0389"}, + {file = "Brotli-1.0.9-cp39-cp39-win32.whl", hash = "sha256:cfc391f4429ee0a9370aa93d812a52e1fee0f37a81861f4fdd1f4fb28e8547c3"}, + {file = "Brotli-1.0.9-cp39-cp39-win_amd64.whl", hash = "sha256:854c33dad5ba0fbd6ab69185fec8dab89e13cda6b7d191ba111987df74f38761"}, {file = "Brotli-1.0.9.zip", hash = "sha256:4d1b810aa0ed773f81dceda2cc7b403d01057458730e309856356d4ef4188438"}, ] cached-property = [ @@ -1250,6 +1281,10 @@ dockerpty = [ docopt = [ {file = "docopt-0.6.2.tar.gz", hash = "sha256:49b3a825280bd66b3aa83585ef59c4a8c82f2c8a522dbe754a8bc8d08c85c491"}, ] +fastcore = [ + {file = "fastcore-1.3.19-py3-none-any.whl", hash = "sha256:278645a0233eb9b227a812ffd7761cb2b37546ac92cb9a93b8ec84d815209d4f"}, + {file = "fastcore-1.3.19.tar.gz", hash = "sha256:5d68a522d08e311c5329136e4bc485d948ccb92c48a0a96f2ed141dae7620093"}, +] flake8 = [ {file = "flake8-3.8.4-py2.py3-none-any.whl", hash = "sha256:749dbbd6bfd0cf1318af27bf97a14e28e5ff548ef8e5b1566ccfb25a11e7c839"}, {file = "flake8-3.8.4.tar.gz", hash = "sha256:aadae8761ec651813c24be05c6f7b4680857ef6afaae4651a4eccaef97ce6c3b"}, @@ -1265,6 +1300,10 @@ flask = [ func-timeout = [ {file = "func_timeout-4.3.5.tar.gz", hash = "sha256:74cd3c428ec94f4edfba81f9b2f14904846d5ffccc27c92433b8b5939b5575dd"}, ] +ghapi = [ + {file = "ghapi-0.1.16-py3-none-any.whl", hash = "sha256:f62eece00b14fbd1a3056de64048738a1300837156c875f2e80dbe478716db08"}, + {file = "ghapi-0.1.16.tar.gz", hash = "sha256:7154cb37261ebeb6d416412b5800fd0fbe653876d9fbbcc273b2c24b2408fc2c"}, +] h11 = [ {file = "h11-0.12.0-py3-none-any.whl", hash = "sha256:36a3cb8c0a032f56e2da7084577878a035d3b61d104230d4bd49c0c6b555a9c6"}, {file = "h11-0.12.0.tar.gz", hash = "sha256:47222cb6067e4a307d535814917cd98fd0a57b6788ce715755fa2b6c28b56042"}, @@ -1330,20 +1369,39 @@ markupsafe = [ {file = "MarkupSafe-1.1.1-cp35-cp35m-win32.whl", hash = "sha256:6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1"}, {file = "MarkupSafe-1.1.1-cp35-cp35m-win_amd64.whl", hash = "sha256:9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d"}, {file = "MarkupSafe-1.1.1-cp36-cp36m-macosx_10_6_intel.whl", hash = "sha256:24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff"}, + {file = "MarkupSafe-1.1.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d53bc011414228441014aa71dbec320c66468c1030aae3a6e29778a3382d96e5"}, {file = "MarkupSafe-1.1.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473"}, {file = "MarkupSafe-1.1.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e"}, + {file = "MarkupSafe-1.1.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:3b8a6499709d29c2e2399569d96719a1b21dcd94410a586a18526b143ec8470f"}, + {file = "MarkupSafe-1.1.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:84dee80c15f1b560d55bcfe6d47b27d070b4681c699c572af2e3c7cc90a3b8e0"}, + {file = "MarkupSafe-1.1.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:b1dba4527182c95a0db8b6060cc98ac49b9e2f5e64320e2b56e47cb2831978c7"}, {file = "MarkupSafe-1.1.1-cp36-cp36m-win32.whl", hash = "sha256:535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66"}, {file = "MarkupSafe-1.1.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5"}, {file = "MarkupSafe-1.1.1-cp37-cp37m-macosx_10_6_intel.whl", hash = "sha256:8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d"}, + {file = "MarkupSafe-1.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:bf5aa3cbcfdf57fa2ee9cd1822c862ef23037f5c832ad09cfea57fa846dec193"}, {file = "MarkupSafe-1.1.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e"}, {file = "MarkupSafe-1.1.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6"}, + {file = "MarkupSafe-1.1.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:6fffc775d90dcc9aed1b89219549b329a9250d918fd0b8fa8d93d154918422e1"}, + {file = "MarkupSafe-1.1.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:a6a744282b7718a2a62d2ed9d993cad6f5f585605ad352c11de459f4108df0a1"}, + {file = "MarkupSafe-1.1.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:195d7d2c4fbb0ee8139a6cf67194f3973a6b3042d742ebe0a9ed36d8b6f0c07f"}, {file = "MarkupSafe-1.1.1-cp37-cp37m-win32.whl", hash = "sha256:b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2"}, {file = "MarkupSafe-1.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c"}, {file = "MarkupSafe-1.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6788b695d50a51edb699cb55e35487e430fa21f1ed838122d722e0ff0ac5ba15"}, {file = "MarkupSafe-1.1.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:cdb132fc825c38e1aeec2c8aa9338310d29d337bebbd7baa06889d09a60a1fa2"}, {file = "MarkupSafe-1.1.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:13d3144e1e340870b25e7b10b98d779608c02016d5184cfb9927a9f10c689f42"}, + {file = "MarkupSafe-1.1.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:acf08ac40292838b3cbbb06cfe9b2cb9ec78fce8baca31ddb87aaac2e2dc3bc2"}, + {file = "MarkupSafe-1.1.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:d9be0ba6c527163cbed5e0857c451fcd092ce83947944d6c14bc95441203f032"}, + {file = "MarkupSafe-1.1.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:caabedc8323f1e93231b52fc32bdcde6db817623d33e100708d9a68e1f53b26b"}, {file = "MarkupSafe-1.1.1-cp38-cp38-win32.whl", hash = "sha256:596510de112c685489095da617b5bcbbac7dd6384aeebeda4df6025d0256a81b"}, {file = "MarkupSafe-1.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be"}, + {file = "MarkupSafe-1.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d73a845f227b0bfe8a7455ee623525ee656a9e2e749e4742706d80a6065d5e2c"}, + {file = "MarkupSafe-1.1.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:98bae9582248d6cf62321dcb52aaf5d9adf0bad3b40582925ef7c7f0ed85fceb"}, + {file = "MarkupSafe-1.1.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:2beec1e0de6924ea551859edb9e7679da6e4870d32cb766240ce17e0a0ba2014"}, + {file = "MarkupSafe-1.1.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:7fed13866cf14bba33e7176717346713881f56d9d2bcebab207f7a036f41b850"}, + {file = "MarkupSafe-1.1.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:6f1e273a344928347c1290119b493a1f0303c52f5a5eae5f16d74f48c15d4a85"}, + {file = "MarkupSafe-1.1.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:feb7b34d6325451ef96bc0e36e1a6c0c1c64bc1fbec4b854f4529e51887b1621"}, + {file = "MarkupSafe-1.1.1-cp39-cp39-win32.whl", hash = "sha256:22c178a091fc6630d0d045bdb5992d2dfe14e3259760e713c490da5323866c39"}, + {file = "MarkupSafe-1.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:b7d644ddb4dbd407d31ffb699f1d140bc35478da613b441c582aeb7c43838dd8"}, {file = "MarkupSafe-1.1.1.tar.gz", hash = "sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b"}, ] mccabe = [ @@ -1604,18 +1662,26 @@ pyyaml = [ {file = "PyYAML-5.4.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185"}, {file = "PyYAML-5.4.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253"}, {file = "PyYAML-5.4.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:4e0583d24c881e14342eaf4ec5fbc97f934b999a6828693a99157fde912540cc"}, + {file = "PyYAML-5.4.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:72a01f726a9c7851ca9bfad6fd09ca4e090a023c00945ea05ba1638c09dc3347"}, + {file = "PyYAML-5.4.1-cp36-cp36m-manylinux2014_s390x.whl", hash = "sha256:895f61ef02e8fed38159bb70f7e100e00f471eae2bc838cd0f4ebb21e28f8541"}, {file = "PyYAML-5.4.1-cp36-cp36m-win32.whl", hash = "sha256:3bd0e463264cf257d1ffd2e40223b197271046d09dadf73a0fe82b9c1fc385a5"}, {file = "PyYAML-5.4.1-cp36-cp36m-win_amd64.whl", hash = "sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df"}, {file = "PyYAML-5.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018"}, {file = "PyYAML-5.4.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63"}, + {file = "PyYAML-5.4.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:cb333c16912324fd5f769fff6bc5de372e9e7a202247b48870bc251ed40239aa"}, + {file = "PyYAML-5.4.1-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:fe69978f3f768926cfa37b867e3843918e012cf83f680806599ddce33c2c68b0"}, {file = "PyYAML-5.4.1-cp37-cp37m-win32.whl", hash = "sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b"}, {file = "PyYAML-5.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf"}, {file = "PyYAML-5.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46"}, {file = "PyYAML-5.4.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb"}, + {file = "PyYAML-5.4.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:fd7f6999a8070df521b6384004ef42833b9bd62cfee11a09bda1079b4b704247"}, + {file = "PyYAML-5.4.1-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:bfb51918d4ff3d77c1c856a9699f8492c612cde32fd3bcd344af9be34999bfdc"}, {file = "PyYAML-5.4.1-cp38-cp38-win32.whl", hash = "sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc"}, {file = "PyYAML-5.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696"}, {file = "PyYAML-5.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:294db365efa064d00b8d1ef65d8ea2c3426ac366c0c4368d930bf1c5fb497f77"}, {file = "PyYAML-5.4.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183"}, + {file = "PyYAML-5.4.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:d483ad4e639292c90170eb6f7783ad19490e7a8defb3e46f97dfe4bacae89122"}, + {file = "PyYAML-5.4.1-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:fdc842473cd33f45ff6bce46aea678a54e3d21f1b61a7750ce3c498eedfe25d6"}, {file = "PyYAML-5.4.1-cp39-cp39-win32.whl", hash = "sha256:49d4cdd9065b9b6e206d0595fee27a96b5dd22618e7520c33204a4a3239d5b10"}, {file = "PyYAML-5.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db"}, {file = "PyYAML-5.4.1.tar.gz", hash = "sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e"}, diff --git a/pyproject.toml b/pyproject.toml index 5be7fa80f..5ba624b1b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -33,6 +33,7 @@ transitions = "^0.8.1" typing_extensions = "^3.7.4.2" urllib3 = "1.25.*" ya-aioclient = "^0.5" +ghapi = "^0.1.16" [tool.poetry.dev-dependencies] black = "20.8b1"