diff --git a/.github/python.json b/.github/python.json new file mode 100644 index 0000000..1052a1c --- /dev/null +++ b/.github/python.json @@ -0,0 +1,18 @@ +{ + "problemMatcher": [ + { + "owner": "python", + "pattern": [ + { + "regexp": "^\\s*File\\s\\\"(.*)\\\",\\sline\\s(\\d+),\\sin\\s(.*)$", + "file": 1, + "line": 2 + }, + { + "regexp": "^\\s*raise\\s(.*)\\(\\'(.*)\\'\\)$", + "message": 2 + } + ] + } + ] +} \ No newline at end of file diff --git a/README.md b/README.md index fb98a3a..97bb8b8 100644 --- a/README.md +++ b/README.md @@ -2,6 +2,10 @@ Set up your GitHub Actions workflow with a specific version of [rye](https://rye-up.com/). +* Install a version of rye and add it to the path +* Cache the installed version of rye to speed up consecutive runs on self-hosted runners +* Register problem matchers for error output + ## Limitations 1. Currently only linux is supported see [issue #10](https://github.com/eifinger/setup-rye/issues/10) @@ -10,12 +14,8 @@ Set up your GitHub Actions workflow with a specific version of [rye](https://rye ## Usage ```yaml -- name: Checkout your repository - uses: actions/checkout@v3 - name: Install the latest version of rye uses: eifinger/setup-rye@v1 -- name: Sync your dependencies - run: rye sync ``` You can also specify a specific version of rye diff --git a/dist/setup/index.js b/dist/setup/index.js index 4664c31..6a29c74 100644 --- a/dist/setup/index.js +++ b/dist/setup/index.js @@ -14777,6 +14777,7 @@ function run() { cachedPath = yield setupRye(platform, arch, version); } addRyeToPath(cachedPath); + addMatchers(); } catch (err) { core.setFailed(err.message); @@ -14882,6 +14883,10 @@ function addRyeToPath(cachedPath) { core.addPath(`${cachedPath}/shims`); core.info(`Added ${cachedPath}/shims to the path`); } +function addMatchers() { + const matchersPath = path.join(__dirname, '..', '.github'); + core.info(`##[add-matcher]${path.join(matchersPath, 'python.json')}`); +} run(); diff --git a/dist/setup/index.js.map b/dist/setup/index.js.map index 1f0ced2..11d6c5c 100644 --- a/dist/setup/index.js.map +++ b/dist/setup/index.js.map @@ -1 +1 @@ -{"version":3,"file":"index.js","mappings":";;;;;;;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC3FA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC/UA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1RA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACvCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC9EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACPA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACPA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACvBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1GA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACfA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACpCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACfA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACpBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzmBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5lBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtLA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1SA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC/HA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AClFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;ACxpBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACPA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC1CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChKA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACnWA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACrJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC/XA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACnDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACz9DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACzFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AClMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AClBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACnBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACrCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACfA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC3jDA;;;;;;;;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvQA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjBA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACpnIA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACrKA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACTA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;;;;;;AClDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;;;;;;;;;;;;;;;AChDA;;ACAA;;;;;;;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;;;;;;ACnGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;;;;;;;;ACzPA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACvCA;;ACAA;;ACAA;;ACAA;;ACAA;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;ACpDA;;;;;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AChBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;ACzBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;ACtFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AC5YA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AC1QA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;ACVA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AC/JA;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;ACRA;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;ACnVA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AC5TA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;ACTA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAQA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;AChaA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;AChCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;ACPA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;ACRA;AACA;AACA;AACA;AACA;;;;;ACJA;;;;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;;;;ACNA;AACA;;;;ACDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;AErCA;AACA;AACA;AACA","sources":["../webpack://typescript-action/./node_modules/@actions/core/lib/command.js","../webpack://typescript-action/./node_modules/@actions/core/lib/core.js","../webpack://typescript-action/./node_modules/@actions/core/lib/file-command.js","../webpack://typescript-action/./node_modules/@actions/core/lib/oidc-utils.js","../webpack://typescript-action/./node_modules/@actions/core/lib/path-utils.js","../webpack://typescript-action/./node_modules/@actions/core/lib/summary.js","../webpack://typescript-action/./node_modules/@actions/core/lib/utils.js","../webpack://typescript-action/./node_modules/@actions/core/node_modules/uuid/dist/index.js","../webpack://typescript-action/./node_modules/@actions/core/node_modules/uuid/dist/md5.js","../webpack://typescript-action/./node_modules/@actions/core/node_modules/uuid/dist/nil.js","../webpack://typescript-action/./node_modules/@actions/core/node_modules/uuid/dist/parse.js","../webpack://typescript-action/./node_modules/@actions/core/node_modules/uuid/dist/regex.js","../webpack://typescript-action/./node_modules/@actions/core/node_modules/uuid/dist/rng.js","../webpack://typescript-action/./node_modules/@actions/core/node_modules/uuid/dist/sha1.js","../webpack://typescript-action/./node_modules/@actions/core/node_modules/uuid/dist/stringify.js","../webpack://typescript-action/./node_modules/@actions/core/node_modules/uuid/dist/v1.js","../webpack://typescript-action/./node_modules/@actions/core/node_modules/uuid/dist/v3.js","../webpack://typescript-action/./node_modules/@actions/core/node_modules/uuid/dist/v35.js","../webpack://typescript-action/./node_modules/@actions/core/node_modules/uuid/dist/v4.js","../webpack://typescript-action/./node_modules/@actions/core/node_modules/uuid/dist/v5.js","../webpack://typescript-action/./node_modules/@actions/core/node_modules/uuid/dist/validate.js","../webpack://typescript-action/./node_modules/@actions/core/node_modules/uuid/dist/version.js","../webpack://typescript-action/./node_modules/@actions/exec/lib/exec.js","../webpack://typescript-action/./node_modules/@actions/exec/lib/toolrunner.js","../webpack://typescript-action/./node_modules/@actions/http-client/lib/auth.js","../webpack://typescript-action/./node_modules/@actions/http-client/lib/index.js","../webpack://typescript-action/./node_modules/@actions/http-client/lib/proxy.js","../webpack://typescript-action/./node_modules/@actions/io/lib/io-util.js","../webpack://typescript-action/./node_modules/@actions/io/lib/io.js","../webpack://typescript-action/./node_modules/@actions/tool-cache/lib/manifest.js","../webpack://typescript-action/./node_modules/@actions/tool-cache/lib/retry-helper.js","../webpack://typescript-action/./node_modules/@actions/tool-cache/lib/tool-cache.js","../webpack://typescript-action/./node_modules/@actions/tool-cache/node_modules/uuid/lib/bytesToUuid.js","../webpack://typescript-action/./node_modules/@actions/tool-cache/node_modules/uuid/lib/rng.js","../webpack://typescript-action/./node_modules/@actions/tool-cache/node_modules/uuid/v4.js","../webpack://typescript-action/./node_modules/@octokit/rest/dist-node/index.js","../webpack://typescript-action/./node_modules/@octokit/rest/node_modules/@octokit/auth-token/dist-node/index.js","../webpack://typescript-action/./node_modules/@octokit/rest/node_modules/@octokit/core/dist-node/index.js","../webpack://typescript-action/./node_modules/@octokit/rest/node_modules/@octokit/endpoint/dist-node/index.js","../webpack://typescript-action/./node_modules/@octokit/rest/node_modules/@octokit/graphql/dist-node/index.js","../webpack://typescript-action/./node_modules/@octokit/rest/node_modules/@octokit/plugin-paginate-rest/dist-node/index.js","../webpack://typescript-action/./node_modules/@octokit/rest/node_modules/@octokit/plugin-request-log/dist-node/index.js","../webpack://typescript-action/./node_modules/@octokit/rest/node_modules/@octokit/plugin-rest-endpoint-methods/dist-node/index.js","../webpack://typescript-action/./node_modules/@octokit/rest/node_modules/@octokit/request-error/dist-node/index.js","../webpack://typescript-action/./node_modules/@octokit/rest/node_modules/@octokit/request/dist-node/index.js","../webpack://typescript-action/./node_modules/before-after-hook/index.js","../webpack://typescript-action/./node_modules/before-after-hook/lib/add.js","../webpack://typescript-action/./node_modules/before-after-hook/lib/register.js","../webpack://typescript-action/./node_modules/before-after-hook/lib/remove.js","../webpack://typescript-action/./node_modules/deprecation/dist-node/index.js","../webpack://typescript-action/./node_modules/is-plain-object/dist/is-plain-object.js","../webpack://typescript-action/./node_modules/node-domexception/index.js","../webpack://typescript-action/./node_modules/once/once.js","../webpack://typescript-action/./node_modules/semver/semver.js","../webpack://typescript-action/./node_modules/tunnel/index.js","../webpack://typescript-action/./node_modules/tunnel/lib/tunnel.js","../webpack://typescript-action/./node_modules/universal-user-agent/dist-node/index.js","../webpack://typescript-action/./node_modules/web-streams-polyfill/dist/ponyfill.es2018.js","../webpack://typescript-action/./node_modules/wrappy/wrappy.js","../webpack://typescript-action/./src/setup-rye.ts","../webpack://typescript-action/./src/utils.ts","../webpack://typescript-action/external node-commonjs \"assert\"","../webpack://typescript-action/external node-commonjs \"buffer\"","../webpack://typescript-action/external node-commonjs \"child_process\"","../webpack://typescript-action/external node-commonjs \"crypto\"","../webpack://typescript-action/external node-commonjs \"events\"","../webpack://typescript-action/external node-commonjs \"fs\"","../webpack://typescript-action/external node-commonjs \"http\"","../webpack://typescript-action/external node-commonjs \"https\"","../webpack://typescript-action/external node-commonjs \"net\"","../webpack://typescript-action/external node-commonjs \"node:process\"","../webpack://typescript-action/external node-commonjs \"node:stream/web\"","../webpack://typescript-action/external node-commonjs \"os\"","../webpack://typescript-action/external node-commonjs \"path\"","../webpack://typescript-action/external node-commonjs \"stream\"","../webpack://typescript-action/external node-commonjs \"string_decoder\"","../webpack://typescript-action/external node-commonjs \"timers\"","../webpack://typescript-action/external node-commonjs \"tls\"","../webpack://typescript-action/external node-commonjs \"util\"","../webpack://typescript-action/external node-commonjs \"worker_threads\"","../webpack://typescript-action/./node_modules/fetch-blob/streams.cjs","../webpack://typescript-action/./node_modules/fetch-blob/file.js","../webpack://typescript-action/external node-commonjs \"node:fs\"","../webpack://typescript-action/external node-commonjs \"node:path\"","../webpack://typescript-action/./node_modules/fetch-blob/from.js","../webpack://typescript-action/./node_modules/fetch-blob/index.js","../webpack://typescript-action/./node_modules/formdata-polyfill/esm.min.js","../webpack://typescript-action/external node-commonjs \"node:http\"","../webpack://typescript-action/external node-commonjs \"node:https\"","../webpack://typescript-action/external node-commonjs \"node:zlib\"","../webpack://typescript-action/external node-commonjs \"node:stream\"","../webpack://typescript-action/external node-commonjs \"node:buffer\"","../webpack://typescript-action/./node_modules/data-uri-to-buffer/dist/index.js","../webpack://typescript-action/external node-commonjs \"node:util\"","../webpack://typescript-action/./node_modules/node-fetch/src/errors/base.js","../webpack://typescript-action/./node_modules/node-fetch/src/errors/fetch-error.js","../webpack://typescript-action/./node_modules/node-fetch/src/utils/is.js","../webpack://typescript-action/./node_modules/node-fetch/src/body.js","../webpack://typescript-action/./node_modules/node-fetch/src/headers.js","../webpack://typescript-action/./node_modules/node-fetch/src/utils/is-redirect.js","../webpack://typescript-action/./node_modules/node-fetch/src/response.js","../webpack://typescript-action/external node-commonjs \"node:url\"","../webpack://typescript-action/./node_modules/node-fetch/src/utils/get-search.js","../webpack://typescript-action/external node-commonjs \"node:net\"","../webpack://typescript-action/./node_modules/node-fetch/src/utils/referrer.js","../webpack://typescript-action/./node_modules/node-fetch/src/request.js","../webpack://typescript-action/./node_modules/node-fetch/src/errors/abort-error.js","../webpack://typescript-action/./node_modules/node-fetch/src/index.js","../webpack://typescript-action/webpack/bootstrap","../webpack://typescript-action/webpack/runtime/define property getters","../webpack://typescript-action/webpack/runtime/ensure chunk","../webpack://typescript-action/webpack/runtime/get javascript chunk filename","../webpack://typescript-action/webpack/runtime/hasOwnProperty shorthand","../webpack://typescript-action/webpack/runtime/make namespace object","../webpack://typescript-action/webpack/runtime/compat","../webpack://typescript-action/webpack/runtime/require chunk loading","../webpack://typescript-action/webpack/before-startup","../webpack://typescript-action/webpack/startup","../webpack://typescript-action/webpack/after-startup"],"sourcesContent":["\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.issue = exports.issueCommand = void 0;\nconst os = __importStar(require(\"os\"));\nconst utils_1 = require(\"./utils\");\n/**\n * Commands\n *\n * Command Format:\n * ::name key=value,key=value::message\n *\n * Examples:\n * ::warning::This is the message\n * ::set-env name=MY_VAR::some value\n */\nfunction issueCommand(command, properties, message) {\n const cmd = new Command(command, properties, message);\n process.stdout.write(cmd.toString() + os.EOL);\n}\nexports.issueCommand = issueCommand;\nfunction issue(name, message = '') {\n issueCommand(name, {}, message);\n}\nexports.issue = issue;\nconst CMD_STRING = '::';\nclass Command {\n constructor(command, properties, message) {\n if (!command) {\n command = 'missing.command';\n }\n this.command = command;\n this.properties = properties;\n this.message = message;\n }\n toString() {\n let cmdStr = CMD_STRING + this.command;\n if (this.properties && Object.keys(this.properties).length > 0) {\n cmdStr += ' ';\n let first = true;\n for (const key in this.properties) {\n if (this.properties.hasOwnProperty(key)) {\n const val = this.properties[key];\n if (val) {\n if (first) {\n first = false;\n }\n else {\n cmdStr += ',';\n }\n cmdStr += `${key}=${escapeProperty(val)}`;\n }\n }\n }\n }\n cmdStr += `${CMD_STRING}${escapeData(this.message)}`;\n return cmdStr;\n }\n}\nfunction escapeData(s) {\n return utils_1.toCommandValue(s)\n .replace(/%/g, '%25')\n .replace(/\\r/g, '%0D')\n .replace(/\\n/g, '%0A');\n}\nfunction escapeProperty(s) {\n return utils_1.toCommandValue(s)\n .replace(/%/g, '%25')\n .replace(/\\r/g, '%0D')\n .replace(/\\n/g, '%0A')\n .replace(/:/g, '%3A')\n .replace(/,/g, '%2C');\n}\n//# sourceMappingURL=command.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getIDToken = exports.getState = exports.saveState = exports.group = exports.endGroup = exports.startGroup = exports.info = exports.notice = exports.warning = exports.error = exports.debug = exports.isDebug = exports.setFailed = exports.setCommandEcho = exports.setOutput = exports.getBooleanInput = exports.getMultilineInput = exports.getInput = exports.addPath = exports.setSecret = exports.exportVariable = exports.ExitCode = void 0;\nconst command_1 = require(\"./command\");\nconst file_command_1 = require(\"./file-command\");\nconst utils_1 = require(\"./utils\");\nconst os = __importStar(require(\"os\"));\nconst path = __importStar(require(\"path\"));\nconst oidc_utils_1 = require(\"./oidc-utils\");\n/**\n * The code to exit an action\n */\nvar ExitCode;\n(function (ExitCode) {\n /**\n * A code indicating that the action was successful\n */\n ExitCode[ExitCode[\"Success\"] = 0] = \"Success\";\n /**\n * A code indicating that the action was a failure\n */\n ExitCode[ExitCode[\"Failure\"] = 1] = \"Failure\";\n})(ExitCode = exports.ExitCode || (exports.ExitCode = {}));\n//-----------------------------------------------------------------------\n// Variables\n//-----------------------------------------------------------------------\n/**\n * Sets env variable for this action and future actions in the job\n * @param name the name of the variable to set\n * @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction exportVariable(name, val) {\n const convertedVal = utils_1.toCommandValue(val);\n process.env[name] = convertedVal;\n const filePath = process.env['GITHUB_ENV'] || '';\n if (filePath) {\n return file_command_1.issueFileCommand('ENV', file_command_1.prepareKeyValueMessage(name, val));\n }\n command_1.issueCommand('set-env', { name }, convertedVal);\n}\nexports.exportVariable = exportVariable;\n/**\n * Registers a secret which will get masked from logs\n * @param secret value of the secret\n */\nfunction setSecret(secret) {\n command_1.issueCommand('add-mask', {}, secret);\n}\nexports.setSecret = setSecret;\n/**\n * Prepends inputPath to the PATH (for this action and future actions)\n * @param inputPath\n */\nfunction addPath(inputPath) {\n const filePath = process.env['GITHUB_PATH'] || '';\n if (filePath) {\n file_command_1.issueFileCommand('PATH', inputPath);\n }\n else {\n command_1.issueCommand('add-path', {}, inputPath);\n }\n process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`;\n}\nexports.addPath = addPath;\n/**\n * Gets the value of an input.\n * Unless trimWhitespace is set to false in InputOptions, the value is also trimmed.\n * Returns an empty string if the value is not defined.\n *\n * @param name name of the input to get\n * @param options optional. See InputOptions.\n * @returns string\n */\nfunction getInput(name, options) {\n const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || '';\n if (options && options.required && !val) {\n throw new Error(`Input required and not supplied: ${name}`);\n }\n if (options && options.trimWhitespace === false) {\n return val;\n }\n return val.trim();\n}\nexports.getInput = getInput;\n/**\n * Gets the values of an multiline input. Each value is also trimmed.\n *\n * @param name name of the input to get\n * @param options optional. See InputOptions.\n * @returns string[]\n *\n */\nfunction getMultilineInput(name, options) {\n const inputs = getInput(name, options)\n .split('\\n')\n .filter(x => x !== '');\n if (options && options.trimWhitespace === false) {\n return inputs;\n }\n return inputs.map(input => input.trim());\n}\nexports.getMultilineInput = getMultilineInput;\n/**\n * Gets the input value of the boolean type in the YAML 1.2 \"core schema\" specification.\n * Support boolean input list: `true | True | TRUE | false | False | FALSE` .\n * The return value is also in boolean type.\n * ref: https://yaml.org/spec/1.2/spec.html#id2804923\n *\n * @param name name of the input to get\n * @param options optional. See InputOptions.\n * @returns boolean\n */\nfunction getBooleanInput(name, options) {\n const trueValue = ['true', 'True', 'TRUE'];\n const falseValue = ['false', 'False', 'FALSE'];\n const val = getInput(name, options);\n if (trueValue.includes(val))\n return true;\n if (falseValue.includes(val))\n return false;\n throw new TypeError(`Input does not meet YAML 1.2 \"Core Schema\" specification: ${name}\\n` +\n `Support boolean input list: \\`true | True | TRUE | false | False | FALSE\\``);\n}\nexports.getBooleanInput = getBooleanInput;\n/**\n * Sets the value of an output.\n *\n * @param name name of the output to set\n * @param value value to store. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction setOutput(name, value) {\n const filePath = process.env['GITHUB_OUTPUT'] || '';\n if (filePath) {\n return file_command_1.issueFileCommand('OUTPUT', file_command_1.prepareKeyValueMessage(name, value));\n }\n process.stdout.write(os.EOL);\n command_1.issueCommand('set-output', { name }, utils_1.toCommandValue(value));\n}\nexports.setOutput = setOutput;\n/**\n * Enables or disables the echoing of commands into stdout for the rest of the step.\n * Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set.\n *\n */\nfunction setCommandEcho(enabled) {\n command_1.issue('echo', enabled ? 'on' : 'off');\n}\nexports.setCommandEcho = setCommandEcho;\n//-----------------------------------------------------------------------\n// Results\n//-----------------------------------------------------------------------\n/**\n * Sets the action status to failed.\n * When the action exits it will be with an exit code of 1\n * @param message add error issue message\n */\nfunction setFailed(message) {\n process.exitCode = ExitCode.Failure;\n error(message);\n}\nexports.setFailed = setFailed;\n//-----------------------------------------------------------------------\n// Logging Commands\n//-----------------------------------------------------------------------\n/**\n * Gets whether Actions Step Debug is on or not\n */\nfunction isDebug() {\n return process.env['RUNNER_DEBUG'] === '1';\n}\nexports.isDebug = isDebug;\n/**\n * Writes debug message to user log\n * @param message debug message\n */\nfunction debug(message) {\n command_1.issueCommand('debug', {}, message);\n}\nexports.debug = debug;\n/**\n * Adds an error issue\n * @param message error issue message. Errors will be converted to string via toString()\n * @param properties optional properties to add to the annotation.\n */\nfunction error(message, properties = {}) {\n command_1.issueCommand('error', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);\n}\nexports.error = error;\n/**\n * Adds a warning issue\n * @param message warning issue message. Errors will be converted to string via toString()\n * @param properties optional properties to add to the annotation.\n */\nfunction warning(message, properties = {}) {\n command_1.issueCommand('warning', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);\n}\nexports.warning = warning;\n/**\n * Adds a notice issue\n * @param message notice issue message. Errors will be converted to string via toString()\n * @param properties optional properties to add to the annotation.\n */\nfunction notice(message, properties = {}) {\n command_1.issueCommand('notice', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);\n}\nexports.notice = notice;\n/**\n * Writes info to log with console.log.\n * @param message info message\n */\nfunction info(message) {\n process.stdout.write(message + os.EOL);\n}\nexports.info = info;\n/**\n * Begin an output group.\n *\n * Output until the next `groupEnd` will be foldable in this group\n *\n * @param name The name of the output group\n */\nfunction startGroup(name) {\n command_1.issue('group', name);\n}\nexports.startGroup = startGroup;\n/**\n * End an output group.\n */\nfunction endGroup() {\n command_1.issue('endgroup');\n}\nexports.endGroup = endGroup;\n/**\n * Wrap an asynchronous function call in a group.\n *\n * Returns the same type as the function itself.\n *\n * @param name The name of the group\n * @param fn The function to wrap in the group\n */\nfunction group(name, fn) {\n return __awaiter(this, void 0, void 0, function* () {\n startGroup(name);\n let result;\n try {\n result = yield fn();\n }\n finally {\n endGroup();\n }\n return result;\n });\n}\nexports.group = group;\n//-----------------------------------------------------------------------\n// Wrapper action state\n//-----------------------------------------------------------------------\n/**\n * Saves state for current action, the state can only be retrieved by this action's post job execution.\n *\n * @param name name of the state to store\n * @param value value to store. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction saveState(name, value) {\n const filePath = process.env['GITHUB_STATE'] || '';\n if (filePath) {\n return file_command_1.issueFileCommand('STATE', file_command_1.prepareKeyValueMessage(name, value));\n }\n command_1.issueCommand('save-state', { name }, utils_1.toCommandValue(value));\n}\nexports.saveState = saveState;\n/**\n * Gets the value of an state set by this action's main execution.\n *\n * @param name name of the state to get\n * @returns string\n */\nfunction getState(name) {\n return process.env[`STATE_${name}`] || '';\n}\nexports.getState = getState;\nfunction getIDToken(aud) {\n return __awaiter(this, void 0, void 0, function* () {\n return yield oidc_utils_1.OidcClient.getIDToken(aud);\n });\n}\nexports.getIDToken = getIDToken;\n/**\n * Summary exports\n */\nvar summary_1 = require(\"./summary\");\nObject.defineProperty(exports, \"summary\", { enumerable: true, get: function () { return summary_1.summary; } });\n/**\n * @deprecated use core.summary\n */\nvar summary_2 = require(\"./summary\");\nObject.defineProperty(exports, \"markdownSummary\", { enumerable: true, get: function () { return summary_2.markdownSummary; } });\n/**\n * Path exports\n */\nvar path_utils_1 = require(\"./path-utils\");\nObject.defineProperty(exports, \"toPosixPath\", { enumerable: true, get: function () { return path_utils_1.toPosixPath; } });\nObject.defineProperty(exports, \"toWin32Path\", { enumerable: true, get: function () { return path_utils_1.toWin32Path; } });\nObject.defineProperty(exports, \"toPlatformPath\", { enumerable: true, get: function () { return path_utils_1.toPlatformPath; } });\n//# sourceMappingURL=core.js.map","\"use strict\";\n// For internal use, subject to change.\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.prepareKeyValueMessage = exports.issueFileCommand = void 0;\n// We use any as a valid input type\n/* eslint-disable @typescript-eslint/no-explicit-any */\nconst fs = __importStar(require(\"fs\"));\nconst os = __importStar(require(\"os\"));\nconst uuid_1 = require(\"uuid\");\nconst utils_1 = require(\"./utils\");\nfunction issueFileCommand(command, message) {\n const filePath = process.env[`GITHUB_${command}`];\n if (!filePath) {\n throw new Error(`Unable to find environment variable for file command ${command}`);\n }\n if (!fs.existsSync(filePath)) {\n throw new Error(`Missing file at path: ${filePath}`);\n }\n fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, {\n encoding: 'utf8'\n });\n}\nexports.issueFileCommand = issueFileCommand;\nfunction prepareKeyValueMessage(key, value) {\n const delimiter = `ghadelimiter_${uuid_1.v4()}`;\n const convertedValue = utils_1.toCommandValue(value);\n // These should realistically never happen, but just in case someone finds a\n // way to exploit uuid generation let's not allow keys or values that contain\n // the delimiter.\n if (key.includes(delimiter)) {\n throw new Error(`Unexpected input: name should not contain the delimiter \"${delimiter}\"`);\n }\n if (convertedValue.includes(delimiter)) {\n throw new Error(`Unexpected input: value should not contain the delimiter \"${delimiter}\"`);\n }\n return `${key}<<${delimiter}${os.EOL}${convertedValue}${os.EOL}${delimiter}`;\n}\nexports.prepareKeyValueMessage = prepareKeyValueMessage;\n//# sourceMappingURL=file-command.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.OidcClient = void 0;\nconst http_client_1 = require(\"@actions/http-client\");\nconst auth_1 = require(\"@actions/http-client/lib/auth\");\nconst core_1 = require(\"./core\");\nclass OidcClient {\n static createHttpClient(allowRetry = true, maxRetry = 10) {\n const requestOptions = {\n allowRetries: allowRetry,\n maxRetries: maxRetry\n };\n return new http_client_1.HttpClient('actions/oidc-client', [new auth_1.BearerCredentialHandler(OidcClient.getRequestToken())], requestOptions);\n }\n static getRequestToken() {\n const token = process.env['ACTIONS_ID_TOKEN_REQUEST_TOKEN'];\n if (!token) {\n throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_TOKEN env variable');\n }\n return token;\n }\n static getIDTokenUrl() {\n const runtimeUrl = process.env['ACTIONS_ID_TOKEN_REQUEST_URL'];\n if (!runtimeUrl) {\n throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_URL env variable');\n }\n return runtimeUrl;\n }\n static getCall(id_token_url) {\n var _a;\n return __awaiter(this, void 0, void 0, function* () {\n const httpclient = OidcClient.createHttpClient();\n const res = yield httpclient\n .getJson(id_token_url)\n .catch(error => {\n throw new Error(`Failed to get ID Token. \\n \n Error Code : ${error.statusCode}\\n \n Error Message: ${error.result.message}`);\n });\n const id_token = (_a = res.result) === null || _a === void 0 ? void 0 : _a.value;\n if (!id_token) {\n throw new Error('Response json body do not have ID Token field');\n }\n return id_token;\n });\n }\n static getIDToken(audience) {\n return __awaiter(this, void 0, void 0, function* () {\n try {\n // New ID Token is requested from action service\n let id_token_url = OidcClient.getIDTokenUrl();\n if (audience) {\n const encodedAudience = encodeURIComponent(audience);\n id_token_url = `${id_token_url}&audience=${encodedAudience}`;\n }\n core_1.debug(`ID token url is ${id_token_url}`);\n const id_token = yield OidcClient.getCall(id_token_url);\n core_1.setSecret(id_token);\n return id_token;\n }\n catch (error) {\n throw new Error(`Error message: ${error.message}`);\n }\n });\n }\n}\nexports.OidcClient = OidcClient;\n//# sourceMappingURL=oidc-utils.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.toPlatformPath = exports.toWin32Path = exports.toPosixPath = void 0;\nconst path = __importStar(require(\"path\"));\n/**\n * toPosixPath converts the given path to the posix form. On Windows, \\\\ will be\n * replaced with /.\n *\n * @param pth. Path to transform.\n * @return string Posix path.\n */\nfunction toPosixPath(pth) {\n return pth.replace(/[\\\\]/g, '/');\n}\nexports.toPosixPath = toPosixPath;\n/**\n * toWin32Path converts the given path to the win32 form. On Linux, / will be\n * replaced with \\\\.\n *\n * @param pth. Path to transform.\n * @return string Win32 path.\n */\nfunction toWin32Path(pth) {\n return pth.replace(/[/]/g, '\\\\');\n}\nexports.toWin32Path = toWin32Path;\n/**\n * toPlatformPath converts the given path to a platform-specific path. It does\n * this by replacing instances of / and \\ with the platform-specific path\n * separator.\n *\n * @param pth The path to platformize.\n * @return string The platform-specific path.\n */\nfunction toPlatformPath(pth) {\n return pth.replace(/[/\\\\]/g, path.sep);\n}\nexports.toPlatformPath = toPlatformPath;\n//# sourceMappingURL=path-utils.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.summary = exports.markdownSummary = exports.SUMMARY_DOCS_URL = exports.SUMMARY_ENV_VAR = void 0;\nconst os_1 = require(\"os\");\nconst fs_1 = require(\"fs\");\nconst { access, appendFile, writeFile } = fs_1.promises;\nexports.SUMMARY_ENV_VAR = 'GITHUB_STEP_SUMMARY';\nexports.SUMMARY_DOCS_URL = 'https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary';\nclass Summary {\n constructor() {\n this._buffer = '';\n }\n /**\n * Finds the summary file path from the environment, rejects if env var is not found or file does not exist\n * Also checks r/w permissions.\n *\n * @returns step summary file path\n */\n filePath() {\n return __awaiter(this, void 0, void 0, function* () {\n if (this._filePath) {\n return this._filePath;\n }\n const pathFromEnv = process.env[exports.SUMMARY_ENV_VAR];\n if (!pathFromEnv) {\n throw new Error(`Unable to find environment variable for $${exports.SUMMARY_ENV_VAR}. Check if your runtime environment supports job summaries.`);\n }\n try {\n yield access(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK);\n }\n catch (_a) {\n throw new Error(`Unable to access summary file: '${pathFromEnv}'. Check if the file has correct read/write permissions.`);\n }\n this._filePath = pathFromEnv;\n return this._filePath;\n });\n }\n /**\n * Wraps content in an HTML tag, adding any HTML attributes\n *\n * @param {string} tag HTML tag to wrap\n * @param {string | null} content content within the tag\n * @param {[attribute: string]: string} attrs key-value list of HTML attributes to add\n *\n * @returns {string} content wrapped in HTML element\n */\n wrap(tag, content, attrs = {}) {\n const htmlAttrs = Object.entries(attrs)\n .map(([key, value]) => ` ${key}=\"${value}\"`)\n .join('');\n if (!content) {\n return `<${tag}${htmlAttrs}>`;\n }\n return `<${tag}${htmlAttrs}>${content}`;\n }\n /**\n * Writes text in the buffer to the summary buffer file and empties buffer. Will append by default.\n *\n * @param {SummaryWriteOptions} [options] (optional) options for write operation\n *\n * @returns {Promise} summary instance\n */\n write(options) {\n return __awaiter(this, void 0, void 0, function* () {\n const overwrite = !!(options === null || options === void 0 ? void 0 : options.overwrite);\n const filePath = yield this.filePath();\n const writeFunc = overwrite ? writeFile : appendFile;\n yield writeFunc(filePath, this._buffer, { encoding: 'utf8' });\n return this.emptyBuffer();\n });\n }\n /**\n * Clears the summary buffer and wipes the summary file\n *\n * @returns {Summary} summary instance\n */\n clear() {\n return __awaiter(this, void 0, void 0, function* () {\n return this.emptyBuffer().write({ overwrite: true });\n });\n }\n /**\n * Returns the current summary buffer as a string\n *\n * @returns {string} string of summary buffer\n */\n stringify() {\n return this._buffer;\n }\n /**\n * If the summary buffer is empty\n *\n * @returns {boolen} true if the buffer is empty\n */\n isEmptyBuffer() {\n return this._buffer.length === 0;\n }\n /**\n * Resets the summary buffer without writing to summary file\n *\n * @returns {Summary} summary instance\n */\n emptyBuffer() {\n this._buffer = '';\n return this;\n }\n /**\n * Adds raw text to the summary buffer\n *\n * @param {string} text content to add\n * @param {boolean} [addEOL=false] (optional) append an EOL to the raw text (default: false)\n *\n * @returns {Summary} summary instance\n */\n addRaw(text, addEOL = false) {\n this._buffer += text;\n return addEOL ? this.addEOL() : this;\n }\n /**\n * Adds the operating system-specific end-of-line marker to the buffer\n *\n * @returns {Summary} summary instance\n */\n addEOL() {\n return this.addRaw(os_1.EOL);\n }\n /**\n * Adds an HTML codeblock to the summary buffer\n *\n * @param {string} code content to render within fenced code block\n * @param {string} lang (optional) language to syntax highlight code\n *\n * @returns {Summary} summary instance\n */\n addCodeBlock(code, lang) {\n const attrs = Object.assign({}, (lang && { lang }));\n const element = this.wrap('pre', this.wrap('code', code), attrs);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML list to the summary buffer\n *\n * @param {string[]} items list of items to render\n * @param {boolean} [ordered=false] (optional) if the rendered list should be ordered or not (default: false)\n *\n * @returns {Summary} summary instance\n */\n addList(items, ordered = false) {\n const tag = ordered ? 'ol' : 'ul';\n const listItems = items.map(item => this.wrap('li', item)).join('');\n const element = this.wrap(tag, listItems);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML table to the summary buffer\n *\n * @param {SummaryTableCell[]} rows table rows\n *\n * @returns {Summary} summary instance\n */\n addTable(rows) {\n const tableBody = rows\n .map(row => {\n const cells = row\n .map(cell => {\n if (typeof cell === 'string') {\n return this.wrap('td', cell);\n }\n const { header, data, colspan, rowspan } = cell;\n const tag = header ? 'th' : 'td';\n const attrs = Object.assign(Object.assign({}, (colspan && { colspan })), (rowspan && { rowspan }));\n return this.wrap(tag, data, attrs);\n })\n .join('');\n return this.wrap('tr', cells);\n })\n .join('');\n const element = this.wrap('table', tableBody);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds a collapsable HTML details element to the summary buffer\n *\n * @param {string} label text for the closed state\n * @param {string} content collapsable content\n *\n * @returns {Summary} summary instance\n */\n addDetails(label, content) {\n const element = this.wrap('details', this.wrap('summary', label) + content);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML image tag to the summary buffer\n *\n * @param {string} src path to the image you to embed\n * @param {string} alt text description of the image\n * @param {SummaryImageOptions} options (optional) addition image attributes\n *\n * @returns {Summary} summary instance\n */\n addImage(src, alt, options) {\n const { width, height } = options || {};\n const attrs = Object.assign(Object.assign({}, (width && { width })), (height && { height }));\n const element = this.wrap('img', null, Object.assign({ src, alt }, attrs));\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML section heading element\n *\n * @param {string} text heading text\n * @param {number | string} [level=1] (optional) the heading level, default: 1\n *\n * @returns {Summary} summary instance\n */\n addHeading(text, level) {\n const tag = `h${level}`;\n const allowedTag = ['h1', 'h2', 'h3', 'h4', 'h5', 'h6'].includes(tag)\n ? tag\n : 'h1';\n const element = this.wrap(allowedTag, text);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML thematic break (
) to the summary buffer\n *\n * @returns {Summary} summary instance\n */\n addSeparator() {\n const element = this.wrap('hr', null);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML line break (
) to the summary buffer\n *\n * @returns {Summary} summary instance\n */\n addBreak() {\n const element = this.wrap('br', null);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML blockquote to the summary buffer\n *\n * @param {string} text quote text\n * @param {string} cite (optional) citation url\n *\n * @returns {Summary} summary instance\n */\n addQuote(text, cite) {\n const attrs = Object.assign({}, (cite && { cite }));\n const element = this.wrap('blockquote', text, attrs);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML anchor tag to the summary buffer\n *\n * @param {string} text link text/content\n * @param {string} href hyperlink\n *\n * @returns {Summary} summary instance\n */\n addLink(text, href) {\n const element = this.wrap('a', text, { href });\n return this.addRaw(element).addEOL();\n }\n}\nconst _summary = new Summary();\n/**\n * @deprecated use `core.summary`\n */\nexports.markdownSummary = _summary;\nexports.summary = _summary;\n//# sourceMappingURL=summary.js.map","\"use strict\";\n// We use any as a valid input type\n/* eslint-disable @typescript-eslint/no-explicit-any */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.toCommandProperties = exports.toCommandValue = void 0;\n/**\n * Sanitizes an input into a string so it can be passed into issueCommand safely\n * @param input input to sanitize into a string\n */\nfunction toCommandValue(input) {\n if (input === null || input === undefined) {\n return '';\n }\n else if (typeof input === 'string' || input instanceof String) {\n return input;\n }\n return JSON.stringify(input);\n}\nexports.toCommandValue = toCommandValue;\n/**\n *\n * @param annotationProperties\n * @returns The command properties to send with the actual annotation command\n * See IssueCommandProperties: https://github.com/actions/runner/blob/main/src/Runner.Worker/ActionCommandManager.cs#L646\n */\nfunction toCommandProperties(annotationProperties) {\n if (!Object.keys(annotationProperties).length) {\n return {};\n }\n return {\n title: annotationProperties.title,\n file: annotationProperties.file,\n line: annotationProperties.startLine,\n endLine: annotationProperties.endLine,\n col: annotationProperties.startColumn,\n endColumn: annotationProperties.endColumn\n };\n}\nexports.toCommandProperties = toCommandProperties;\n//# sourceMappingURL=utils.js.map","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nObject.defineProperty(exports, \"v1\", {\n enumerable: true,\n get: function () {\n return _v.default;\n }\n});\nObject.defineProperty(exports, \"v3\", {\n enumerable: true,\n get: function () {\n return _v2.default;\n }\n});\nObject.defineProperty(exports, \"v4\", {\n enumerable: true,\n get: function () {\n return _v3.default;\n }\n});\nObject.defineProperty(exports, \"v5\", {\n enumerable: true,\n get: function () {\n return _v4.default;\n }\n});\nObject.defineProperty(exports, \"NIL\", {\n enumerable: true,\n get: function () {\n return _nil.default;\n }\n});\nObject.defineProperty(exports, \"version\", {\n enumerable: true,\n get: function () {\n return _version.default;\n }\n});\nObject.defineProperty(exports, \"validate\", {\n enumerable: true,\n get: function () {\n return _validate.default;\n }\n});\nObject.defineProperty(exports, \"stringify\", {\n enumerable: true,\n get: function () {\n return _stringify.default;\n }\n});\nObject.defineProperty(exports, \"parse\", {\n enumerable: true,\n get: function () {\n return _parse.default;\n }\n});\n\nvar _v = _interopRequireDefault(require(\"./v1.js\"));\n\nvar _v2 = _interopRequireDefault(require(\"./v3.js\"));\n\nvar _v3 = _interopRequireDefault(require(\"./v4.js\"));\n\nvar _v4 = _interopRequireDefault(require(\"./v5.js\"));\n\nvar _nil = _interopRequireDefault(require(\"./nil.js\"));\n\nvar _version = _interopRequireDefault(require(\"./version.js\"));\n\nvar _validate = _interopRequireDefault(require(\"./validate.js\"));\n\nvar _stringify = _interopRequireDefault(require(\"./stringify.js\"));\n\nvar _parse = _interopRequireDefault(require(\"./parse.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _crypto = _interopRequireDefault(require(\"crypto\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction md5(bytes) {\n if (Array.isArray(bytes)) {\n bytes = Buffer.from(bytes);\n } else if (typeof bytes === 'string') {\n bytes = Buffer.from(bytes, 'utf8');\n }\n\n return _crypto.default.createHash('md5').update(bytes).digest();\n}\n\nvar _default = md5;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\nvar _default = '00000000-0000-0000-0000-000000000000';\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _validate = _interopRequireDefault(require(\"./validate.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction parse(uuid) {\n if (!(0, _validate.default)(uuid)) {\n throw TypeError('Invalid UUID');\n }\n\n let v;\n const arr = new Uint8Array(16); // Parse ########-....-....-....-............\n\n arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24;\n arr[1] = v >>> 16 & 0xff;\n arr[2] = v >>> 8 & 0xff;\n arr[3] = v & 0xff; // Parse ........-####-....-....-............\n\n arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8;\n arr[5] = v & 0xff; // Parse ........-....-####-....-............\n\n arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8;\n arr[7] = v & 0xff; // Parse ........-....-....-####-............\n\n arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8;\n arr[9] = v & 0xff; // Parse ........-....-....-....-############\n // (Use \"/\" to avoid 32-bit truncation when bit-shifting high-order bytes)\n\n arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff;\n arr[11] = v / 0x100000000 & 0xff;\n arr[12] = v >>> 24 & 0xff;\n arr[13] = v >>> 16 & 0xff;\n arr[14] = v >>> 8 & 0xff;\n arr[15] = v & 0xff;\n return arr;\n}\n\nvar _default = parse;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\nvar _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = rng;\n\nvar _crypto = _interopRequireDefault(require(\"crypto\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nconst rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate\n\nlet poolPtr = rnds8Pool.length;\n\nfunction rng() {\n if (poolPtr > rnds8Pool.length - 16) {\n _crypto.default.randomFillSync(rnds8Pool);\n\n poolPtr = 0;\n }\n\n return rnds8Pool.slice(poolPtr, poolPtr += 16);\n}","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _crypto = _interopRequireDefault(require(\"crypto\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction sha1(bytes) {\n if (Array.isArray(bytes)) {\n bytes = Buffer.from(bytes);\n } else if (typeof bytes === 'string') {\n bytes = Buffer.from(bytes, 'utf8');\n }\n\n return _crypto.default.createHash('sha1').update(bytes).digest();\n}\n\nvar _default = sha1;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _validate = _interopRequireDefault(require(\"./validate.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\n/**\n * Convert array of 16 byte values to UUID string format of the form:\n * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX\n */\nconst byteToHex = [];\n\nfor (let i = 0; i < 256; ++i) {\n byteToHex.push((i + 0x100).toString(16).substr(1));\n}\n\nfunction stringify(arr, offset = 0) {\n // Note: Be careful editing this code! It's been tuned for performance\n // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434\n const uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one\n // of the following:\n // - One or more input array values don't map to a hex octet (leading to\n // \"undefined\" in the uuid)\n // - Invalid input values for the RFC `version` or `variant` fields\n\n if (!(0, _validate.default)(uuid)) {\n throw TypeError('Stringified UUID is invalid');\n }\n\n return uuid;\n}\n\nvar _default = stringify;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _rng = _interopRequireDefault(require(\"./rng.js\"));\n\nvar _stringify = _interopRequireDefault(require(\"./stringify.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\n// **`v1()` - Generate time-based UUID**\n//\n// Inspired by https://github.com/LiosK/UUID.js\n// and http://docs.python.org/library/uuid.html\nlet _nodeId;\n\nlet _clockseq; // Previous uuid creation time\n\n\nlet _lastMSecs = 0;\nlet _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details\n\nfunction v1(options, buf, offset) {\n let i = buf && offset || 0;\n const b = buf || new Array(16);\n options = options || {};\n let node = options.node || _nodeId;\n let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not\n // specified. We do this lazily to minimize issues related to insufficient\n // system entropy. See #189\n\n if (node == null || clockseq == null) {\n const seedBytes = options.random || (options.rng || _rng.default)();\n\n if (node == null) {\n // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1)\n node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]];\n }\n\n if (clockseq == null) {\n // Per 4.2.2, randomize (14 bit) clockseq\n clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff;\n }\n } // UUID timestamps are 100 nano-second units since the Gregorian epoch,\n // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so\n // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs'\n // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00.\n\n\n let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock\n // cycle to simulate higher resolution clock\n\n let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs)\n\n const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression\n\n if (dt < 0 && options.clockseq === undefined) {\n clockseq = clockseq + 1 & 0x3fff;\n } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new\n // time interval\n\n\n if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) {\n nsecs = 0;\n } // Per 4.2.1.2 Throw error if too many uuids are requested\n\n\n if (nsecs >= 10000) {\n throw new Error(\"uuid.v1(): Can't create more than 10M uuids/sec\");\n }\n\n _lastMSecs = msecs;\n _lastNSecs = nsecs;\n _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch\n\n msecs += 12219292800000; // `time_low`\n\n const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000;\n b[i++] = tl >>> 24 & 0xff;\n b[i++] = tl >>> 16 & 0xff;\n b[i++] = tl >>> 8 & 0xff;\n b[i++] = tl & 0xff; // `time_mid`\n\n const tmh = msecs / 0x100000000 * 10000 & 0xfffffff;\n b[i++] = tmh >>> 8 & 0xff;\n b[i++] = tmh & 0xff; // `time_high_and_version`\n\n b[i++] = tmh >>> 24 & 0xf | 0x10; // include version\n\n b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant)\n\n b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low`\n\n b[i++] = clockseq & 0xff; // `node`\n\n for (let n = 0; n < 6; ++n) {\n b[i + n] = node[n];\n }\n\n return buf || (0, _stringify.default)(b);\n}\n\nvar _default = v1;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _v = _interopRequireDefault(require(\"./v35.js\"));\n\nvar _md = _interopRequireDefault(require(\"./md5.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nconst v3 = (0, _v.default)('v3', 0x30, _md.default);\nvar _default = v3;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = _default;\nexports.URL = exports.DNS = void 0;\n\nvar _stringify = _interopRequireDefault(require(\"./stringify.js\"));\n\nvar _parse = _interopRequireDefault(require(\"./parse.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction stringToBytes(str) {\n str = unescape(encodeURIComponent(str)); // UTF8 escape\n\n const bytes = [];\n\n for (let i = 0; i < str.length; ++i) {\n bytes.push(str.charCodeAt(i));\n }\n\n return bytes;\n}\n\nconst DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8';\nexports.DNS = DNS;\nconst URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8';\nexports.URL = URL;\n\nfunction _default(name, version, hashfunc) {\n function generateUUID(value, namespace, buf, offset) {\n if (typeof value === 'string') {\n value = stringToBytes(value);\n }\n\n if (typeof namespace === 'string') {\n namespace = (0, _parse.default)(namespace);\n }\n\n if (namespace.length !== 16) {\n throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)');\n } // Compute hash of namespace and value, Per 4.3\n // Future: Use spread syntax when supported on all platforms, e.g. `bytes =\n // hashfunc([...namespace, ... value])`\n\n\n let bytes = new Uint8Array(16 + value.length);\n bytes.set(namespace);\n bytes.set(value, namespace.length);\n bytes = hashfunc(bytes);\n bytes[6] = bytes[6] & 0x0f | version;\n bytes[8] = bytes[8] & 0x3f | 0x80;\n\n if (buf) {\n offset = offset || 0;\n\n for (let i = 0; i < 16; ++i) {\n buf[offset + i] = bytes[i];\n }\n\n return buf;\n }\n\n return (0, _stringify.default)(bytes);\n } // Function#name is not settable on some platforms (#270)\n\n\n try {\n generateUUID.name = name; // eslint-disable-next-line no-empty\n } catch (err) {} // For CommonJS default export support\n\n\n generateUUID.DNS = DNS;\n generateUUID.URL = URL;\n return generateUUID;\n}","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _rng = _interopRequireDefault(require(\"./rng.js\"));\n\nvar _stringify = _interopRequireDefault(require(\"./stringify.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction v4(options, buf, offset) {\n options = options || {};\n\n const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved`\n\n\n rnds[6] = rnds[6] & 0x0f | 0x40;\n rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided\n\n if (buf) {\n offset = offset || 0;\n\n for (let i = 0; i < 16; ++i) {\n buf[offset + i] = rnds[i];\n }\n\n return buf;\n }\n\n return (0, _stringify.default)(rnds);\n}\n\nvar _default = v4;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _v = _interopRequireDefault(require(\"./v35.js\"));\n\nvar _sha = _interopRequireDefault(require(\"./sha1.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nconst v5 = (0, _v.default)('v5', 0x50, _sha.default);\nvar _default = v5;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _regex = _interopRequireDefault(require(\"./regex.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction validate(uuid) {\n return typeof uuid === 'string' && _regex.default.test(uuid);\n}\n\nvar _default = validate;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _validate = _interopRequireDefault(require(\"./validate.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction version(uuid) {\n if (!(0, _validate.default)(uuid)) {\n throw TypeError('Invalid UUID');\n }\n\n return parseInt(uuid.substr(14, 1), 16);\n}\n\nvar _default = version;\nexports.default = _default;","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getExecOutput = exports.exec = void 0;\nconst string_decoder_1 = require(\"string_decoder\");\nconst tr = __importStar(require(\"./toolrunner\"));\n/**\n * Exec a command.\n * Output will be streamed to the live console.\n * Returns promise with return code\n *\n * @param commandLine command to execute (can include additional args). Must be correctly escaped.\n * @param args optional arguments for tool. Escaping is handled by the lib.\n * @param options optional exec options. See ExecOptions\n * @returns Promise exit code\n */\nfunction exec(commandLine, args, options) {\n return __awaiter(this, void 0, void 0, function* () {\n const commandArgs = tr.argStringToArray(commandLine);\n if (commandArgs.length === 0) {\n throw new Error(`Parameter 'commandLine' cannot be null or empty.`);\n }\n // Path to tool to execute should be first arg\n const toolPath = commandArgs[0];\n args = commandArgs.slice(1).concat(args || []);\n const runner = new tr.ToolRunner(toolPath, args, options);\n return runner.exec();\n });\n}\nexports.exec = exec;\n/**\n * Exec a command and get the output.\n * Output will be streamed to the live console.\n * Returns promise with the exit code and collected stdout and stderr\n *\n * @param commandLine command to execute (can include additional args). Must be correctly escaped.\n * @param args optional arguments for tool. Escaping is handled by the lib.\n * @param options optional exec options. See ExecOptions\n * @returns Promise exit code, stdout, and stderr\n */\nfunction getExecOutput(commandLine, args, options) {\n var _a, _b;\n return __awaiter(this, void 0, void 0, function* () {\n let stdout = '';\n let stderr = '';\n //Using string decoder covers the case where a mult-byte character is split\n const stdoutDecoder = new string_decoder_1.StringDecoder('utf8');\n const stderrDecoder = new string_decoder_1.StringDecoder('utf8');\n const originalStdoutListener = (_a = options === null || options === void 0 ? void 0 : options.listeners) === null || _a === void 0 ? void 0 : _a.stdout;\n const originalStdErrListener = (_b = options === null || options === void 0 ? void 0 : options.listeners) === null || _b === void 0 ? void 0 : _b.stderr;\n const stdErrListener = (data) => {\n stderr += stderrDecoder.write(data);\n if (originalStdErrListener) {\n originalStdErrListener(data);\n }\n };\n const stdOutListener = (data) => {\n stdout += stdoutDecoder.write(data);\n if (originalStdoutListener) {\n originalStdoutListener(data);\n }\n };\n const listeners = Object.assign(Object.assign({}, options === null || options === void 0 ? void 0 : options.listeners), { stdout: stdOutListener, stderr: stdErrListener });\n const exitCode = yield exec(commandLine, args, Object.assign(Object.assign({}, options), { listeners }));\n //flush any remaining characters\n stdout += stdoutDecoder.end();\n stderr += stderrDecoder.end();\n return {\n exitCode,\n stdout,\n stderr\n };\n });\n}\nexports.getExecOutput = getExecOutput;\n//# sourceMappingURL=exec.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.argStringToArray = exports.ToolRunner = void 0;\nconst os = __importStar(require(\"os\"));\nconst events = __importStar(require(\"events\"));\nconst child = __importStar(require(\"child_process\"));\nconst path = __importStar(require(\"path\"));\nconst io = __importStar(require(\"@actions/io\"));\nconst ioUtil = __importStar(require(\"@actions/io/lib/io-util\"));\nconst timers_1 = require(\"timers\");\n/* eslint-disable @typescript-eslint/unbound-method */\nconst IS_WINDOWS = process.platform === 'win32';\n/*\n * Class for running command line tools. Handles quoting and arg parsing in a platform agnostic way.\n */\nclass ToolRunner extends events.EventEmitter {\n constructor(toolPath, args, options) {\n super();\n if (!toolPath) {\n throw new Error(\"Parameter 'toolPath' cannot be null or empty.\");\n }\n this.toolPath = toolPath;\n this.args = args || [];\n this.options = options || {};\n }\n _debug(message) {\n if (this.options.listeners && this.options.listeners.debug) {\n this.options.listeners.debug(message);\n }\n }\n _getCommandString(options, noPrefix) {\n const toolPath = this._getSpawnFileName();\n const args = this._getSpawnArgs(options);\n let cmd = noPrefix ? '' : '[command]'; // omit prefix when piped to a second tool\n if (IS_WINDOWS) {\n // Windows + cmd file\n if (this._isCmdFile()) {\n cmd += toolPath;\n for (const a of args) {\n cmd += ` ${a}`;\n }\n }\n // Windows + verbatim\n else if (options.windowsVerbatimArguments) {\n cmd += `\"${toolPath}\"`;\n for (const a of args) {\n cmd += ` ${a}`;\n }\n }\n // Windows (regular)\n else {\n cmd += this._windowsQuoteCmdArg(toolPath);\n for (const a of args) {\n cmd += ` ${this._windowsQuoteCmdArg(a)}`;\n }\n }\n }\n else {\n // OSX/Linux - this can likely be improved with some form of quoting.\n // creating processes on Unix is fundamentally different than Windows.\n // on Unix, execvp() takes an arg array.\n cmd += toolPath;\n for (const a of args) {\n cmd += ` ${a}`;\n }\n }\n return cmd;\n }\n _processLineBuffer(data, strBuffer, onLine) {\n try {\n let s = strBuffer + data.toString();\n let n = s.indexOf(os.EOL);\n while (n > -1) {\n const line = s.substring(0, n);\n onLine(line);\n // the rest of the string ...\n s = s.substring(n + os.EOL.length);\n n = s.indexOf(os.EOL);\n }\n return s;\n }\n catch (err) {\n // streaming lines to console is best effort. Don't fail a build.\n this._debug(`error processing line. Failed with error ${err}`);\n return '';\n }\n }\n _getSpawnFileName() {\n if (IS_WINDOWS) {\n if (this._isCmdFile()) {\n return process.env['COMSPEC'] || 'cmd.exe';\n }\n }\n return this.toolPath;\n }\n _getSpawnArgs(options) {\n if (IS_WINDOWS) {\n if (this._isCmdFile()) {\n let argline = `/D /S /C \"${this._windowsQuoteCmdArg(this.toolPath)}`;\n for (const a of this.args) {\n argline += ' ';\n argline += options.windowsVerbatimArguments\n ? a\n : this._windowsQuoteCmdArg(a);\n }\n argline += '\"';\n return [argline];\n }\n }\n return this.args;\n }\n _endsWith(str, end) {\n return str.endsWith(end);\n }\n _isCmdFile() {\n const upperToolPath = this.toolPath.toUpperCase();\n return (this._endsWith(upperToolPath, '.CMD') ||\n this._endsWith(upperToolPath, '.BAT'));\n }\n _windowsQuoteCmdArg(arg) {\n // for .exe, apply the normal quoting rules that libuv applies\n if (!this._isCmdFile()) {\n return this._uvQuoteCmdArg(arg);\n }\n // otherwise apply quoting rules specific to the cmd.exe command line parser.\n // the libuv rules are generic and are not designed specifically for cmd.exe\n // command line parser.\n //\n // for a detailed description of the cmd.exe command line parser, refer to\n // http://stackoverflow.com/questions/4094699/how-does-the-windows-command-interpreter-cmd-exe-parse-scripts/7970912#7970912\n // need quotes for empty arg\n if (!arg) {\n return '\"\"';\n }\n // determine whether the arg needs to be quoted\n const cmdSpecialChars = [\n ' ',\n '\\t',\n '&',\n '(',\n ')',\n '[',\n ']',\n '{',\n '}',\n '^',\n '=',\n ';',\n '!',\n \"'\",\n '+',\n ',',\n '`',\n '~',\n '|',\n '<',\n '>',\n '\"'\n ];\n let needsQuotes = false;\n for (const char of arg) {\n if (cmdSpecialChars.some(x => x === char)) {\n needsQuotes = true;\n break;\n }\n }\n // short-circuit if quotes not needed\n if (!needsQuotes) {\n return arg;\n }\n // the following quoting rules are very similar to the rules that by libuv applies.\n //\n // 1) wrap the string in quotes\n //\n // 2) double-up quotes - i.e. \" => \"\"\n //\n // this is different from the libuv quoting rules. libuv replaces \" with \\\", which unfortunately\n // doesn't work well with a cmd.exe command line.\n //\n // note, replacing \" with \"\" also works well if the arg is passed to a downstream .NET console app.\n // for example, the command line:\n // foo.exe \"myarg:\"\"my val\"\"\"\n // is parsed by a .NET console app into an arg array:\n // [ \"myarg:\\\"my val\\\"\" ]\n // which is the same end result when applying libuv quoting rules. although the actual\n // command line from libuv quoting rules would look like:\n // foo.exe \"myarg:\\\"my val\\\"\"\n //\n // 3) double-up slashes that precede a quote,\n // e.g. hello \\world => \"hello \\world\"\n // hello\\\"world => \"hello\\\\\"\"world\"\n // hello\\\\\"world => \"hello\\\\\\\\\"\"world\"\n // hello world\\ => \"hello world\\\\\"\n //\n // technically this is not required for a cmd.exe command line, or the batch argument parser.\n // the reasons for including this as a .cmd quoting rule are:\n //\n // a) this is optimized for the scenario where the argument is passed from the .cmd file to an\n // external program. many programs (e.g. .NET console apps) rely on the slash-doubling rule.\n //\n // b) it's what we've been doing previously (by deferring to node default behavior) and we\n // haven't heard any complaints about that aspect.\n //\n // note, a weakness of the quoting rules chosen here, is that % is not escaped. in fact, % cannot be\n // escaped when used on the command line directly - even though within a .cmd file % can be escaped\n // by using %%.\n //\n // the saving grace is, on the command line, %var% is left as-is if var is not defined. this contrasts\n // the line parsing rules within a .cmd file, where if var is not defined it is replaced with nothing.\n //\n // one option that was explored was replacing % with ^% - i.e. %var% => ^%var^%. this hack would\n // often work, since it is unlikely that var^ would exist, and the ^ character is removed when the\n // variable is used. the problem, however, is that ^ is not removed when %* is used to pass the args\n // to an external program.\n //\n // an unexplored potential solution for the % escaping problem, is to create a wrapper .cmd file.\n // % can be escaped within a .cmd file.\n let reverse = '\"';\n let quoteHit = true;\n for (let i = arg.length; i > 0; i--) {\n // walk the string in reverse\n reverse += arg[i - 1];\n if (quoteHit && arg[i - 1] === '\\\\') {\n reverse += '\\\\'; // double the slash\n }\n else if (arg[i - 1] === '\"') {\n quoteHit = true;\n reverse += '\"'; // double the quote\n }\n else {\n quoteHit = false;\n }\n }\n reverse += '\"';\n return reverse\n .split('')\n .reverse()\n .join('');\n }\n _uvQuoteCmdArg(arg) {\n // Tool runner wraps child_process.spawn() and needs to apply the same quoting as\n // Node in certain cases where the undocumented spawn option windowsVerbatimArguments\n // is used.\n //\n // Since this function is a port of quote_cmd_arg from Node 4.x (technically, lib UV,\n // see https://github.com/nodejs/node/blob/v4.x/deps/uv/src/win/process.c for details),\n // pasting copyright notice from Node within this function:\n //\n // Copyright Joyent, Inc. and other Node contributors. All rights reserved.\n //\n // Permission is hereby granted, free of charge, to any person obtaining a copy\n // of this software and associated documentation files (the \"Software\"), to\n // deal in the Software without restriction, including without limitation the\n // rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n // sell copies of the Software, and to permit persons to whom the Software is\n // furnished to do so, subject to the following conditions:\n //\n // The above copyright notice and this permission notice shall be included in\n // all copies or substantial portions of the Software.\n //\n // THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n // IN THE SOFTWARE.\n if (!arg) {\n // Need double quotation for empty argument\n return '\"\"';\n }\n if (!arg.includes(' ') && !arg.includes('\\t') && !arg.includes('\"')) {\n // No quotation needed\n return arg;\n }\n if (!arg.includes('\"') && !arg.includes('\\\\')) {\n // No embedded double quotes or backslashes, so I can just wrap\n // quote marks around the whole thing.\n return `\"${arg}\"`;\n }\n // Expected input/output:\n // input : hello\"world\n // output: \"hello\\\"world\"\n // input : hello\"\"world\n // output: \"hello\\\"\\\"world\"\n // input : hello\\world\n // output: hello\\world\n // input : hello\\\\world\n // output: hello\\\\world\n // input : hello\\\"world\n // output: \"hello\\\\\\\"world\"\n // input : hello\\\\\"world\n // output: \"hello\\\\\\\\\\\"world\"\n // input : hello world\\\n // output: \"hello world\\\\\" - note the comment in libuv actually reads \"hello world\\\"\n // but it appears the comment is wrong, it should be \"hello world\\\\\"\n let reverse = '\"';\n let quoteHit = true;\n for (let i = arg.length; i > 0; i--) {\n // walk the string in reverse\n reverse += arg[i - 1];\n if (quoteHit && arg[i - 1] === '\\\\') {\n reverse += '\\\\';\n }\n else if (arg[i - 1] === '\"') {\n quoteHit = true;\n reverse += '\\\\';\n }\n else {\n quoteHit = false;\n }\n }\n reverse += '\"';\n return reverse\n .split('')\n .reverse()\n .join('');\n }\n _cloneExecOptions(options) {\n options = options || {};\n const result = {\n cwd: options.cwd || process.cwd(),\n env: options.env || process.env,\n silent: options.silent || false,\n windowsVerbatimArguments: options.windowsVerbatimArguments || false,\n failOnStdErr: options.failOnStdErr || false,\n ignoreReturnCode: options.ignoreReturnCode || false,\n delay: options.delay || 10000\n };\n result.outStream = options.outStream || process.stdout;\n result.errStream = options.errStream || process.stderr;\n return result;\n }\n _getSpawnOptions(options, toolPath) {\n options = options || {};\n const result = {};\n result.cwd = options.cwd;\n result.env = options.env;\n result['windowsVerbatimArguments'] =\n options.windowsVerbatimArguments || this._isCmdFile();\n if (options.windowsVerbatimArguments) {\n result.argv0 = `\"${toolPath}\"`;\n }\n return result;\n }\n /**\n * Exec a tool.\n * Output will be streamed to the live console.\n * Returns promise with return code\n *\n * @param tool path to tool to exec\n * @param options optional exec options. See ExecOptions\n * @returns number\n */\n exec() {\n return __awaiter(this, void 0, void 0, function* () {\n // root the tool path if it is unrooted and contains relative pathing\n if (!ioUtil.isRooted(this.toolPath) &&\n (this.toolPath.includes('/') ||\n (IS_WINDOWS && this.toolPath.includes('\\\\')))) {\n // prefer options.cwd if it is specified, however options.cwd may also need to be rooted\n this.toolPath = path.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath);\n }\n // if the tool is only a file name, then resolve it from the PATH\n // otherwise verify it exists (add extension on Windows if necessary)\n this.toolPath = yield io.which(this.toolPath, true);\n return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () {\n this._debug(`exec tool: ${this.toolPath}`);\n this._debug('arguments:');\n for (const arg of this.args) {\n this._debug(` ${arg}`);\n }\n const optionsNonNull = this._cloneExecOptions(this.options);\n if (!optionsNonNull.silent && optionsNonNull.outStream) {\n optionsNonNull.outStream.write(this._getCommandString(optionsNonNull) + os.EOL);\n }\n const state = new ExecState(optionsNonNull, this.toolPath);\n state.on('debug', (message) => {\n this._debug(message);\n });\n if (this.options.cwd && !(yield ioUtil.exists(this.options.cwd))) {\n return reject(new Error(`The cwd: ${this.options.cwd} does not exist!`));\n }\n const fileName = this._getSpawnFileName();\n const cp = child.spawn(fileName, this._getSpawnArgs(optionsNonNull), this._getSpawnOptions(this.options, fileName));\n let stdbuffer = '';\n if (cp.stdout) {\n cp.stdout.on('data', (data) => {\n if (this.options.listeners && this.options.listeners.stdout) {\n this.options.listeners.stdout(data);\n }\n if (!optionsNonNull.silent && optionsNonNull.outStream) {\n optionsNonNull.outStream.write(data);\n }\n stdbuffer = this._processLineBuffer(data, stdbuffer, (line) => {\n if (this.options.listeners && this.options.listeners.stdline) {\n this.options.listeners.stdline(line);\n }\n });\n });\n }\n let errbuffer = '';\n if (cp.stderr) {\n cp.stderr.on('data', (data) => {\n state.processStderr = true;\n if (this.options.listeners && this.options.listeners.stderr) {\n this.options.listeners.stderr(data);\n }\n if (!optionsNonNull.silent &&\n optionsNonNull.errStream &&\n optionsNonNull.outStream) {\n const s = optionsNonNull.failOnStdErr\n ? optionsNonNull.errStream\n : optionsNonNull.outStream;\n s.write(data);\n }\n errbuffer = this._processLineBuffer(data, errbuffer, (line) => {\n if (this.options.listeners && this.options.listeners.errline) {\n this.options.listeners.errline(line);\n }\n });\n });\n }\n cp.on('error', (err) => {\n state.processError = err.message;\n state.processExited = true;\n state.processClosed = true;\n state.CheckComplete();\n });\n cp.on('exit', (code) => {\n state.processExitCode = code;\n state.processExited = true;\n this._debug(`Exit code ${code} received from tool '${this.toolPath}'`);\n state.CheckComplete();\n });\n cp.on('close', (code) => {\n state.processExitCode = code;\n state.processExited = true;\n state.processClosed = true;\n this._debug(`STDIO streams have closed for tool '${this.toolPath}'`);\n state.CheckComplete();\n });\n state.on('done', (error, exitCode) => {\n if (stdbuffer.length > 0) {\n this.emit('stdline', stdbuffer);\n }\n if (errbuffer.length > 0) {\n this.emit('errline', errbuffer);\n }\n cp.removeAllListeners();\n if (error) {\n reject(error);\n }\n else {\n resolve(exitCode);\n }\n });\n if (this.options.input) {\n if (!cp.stdin) {\n throw new Error('child process missing stdin');\n }\n cp.stdin.end(this.options.input);\n }\n }));\n });\n }\n}\nexports.ToolRunner = ToolRunner;\n/**\n * Convert an arg string to an array of args. Handles escaping\n *\n * @param argString string of arguments\n * @returns string[] array of arguments\n */\nfunction argStringToArray(argString) {\n const args = [];\n let inQuotes = false;\n let escaped = false;\n let arg = '';\n function append(c) {\n // we only escape double quotes.\n if (escaped && c !== '\"') {\n arg += '\\\\';\n }\n arg += c;\n escaped = false;\n }\n for (let i = 0; i < argString.length; i++) {\n const c = argString.charAt(i);\n if (c === '\"') {\n if (!escaped) {\n inQuotes = !inQuotes;\n }\n else {\n append(c);\n }\n continue;\n }\n if (c === '\\\\' && escaped) {\n append(c);\n continue;\n }\n if (c === '\\\\' && inQuotes) {\n escaped = true;\n continue;\n }\n if (c === ' ' && !inQuotes) {\n if (arg.length > 0) {\n args.push(arg);\n arg = '';\n }\n continue;\n }\n append(c);\n }\n if (arg.length > 0) {\n args.push(arg.trim());\n }\n return args;\n}\nexports.argStringToArray = argStringToArray;\nclass ExecState extends events.EventEmitter {\n constructor(options, toolPath) {\n super();\n this.processClosed = false; // tracks whether the process has exited and stdio is closed\n this.processError = '';\n this.processExitCode = 0;\n this.processExited = false; // tracks whether the process has exited\n this.processStderr = false; // tracks whether stderr was written to\n this.delay = 10000; // 10 seconds\n this.done = false;\n this.timeout = null;\n if (!toolPath) {\n throw new Error('toolPath must not be empty');\n }\n this.options = options;\n this.toolPath = toolPath;\n if (options.delay) {\n this.delay = options.delay;\n }\n }\n CheckComplete() {\n if (this.done) {\n return;\n }\n if (this.processClosed) {\n this._setResult();\n }\n else if (this.processExited) {\n this.timeout = timers_1.setTimeout(ExecState.HandleTimeout, this.delay, this);\n }\n }\n _debug(message) {\n this.emit('debug', message);\n }\n _setResult() {\n // determine whether there is an error\n let error;\n if (this.processExited) {\n if (this.processError) {\n error = new Error(`There was an error when attempting to execute the process '${this.toolPath}'. This may indicate the process failed to start. Error: ${this.processError}`);\n }\n else if (this.processExitCode !== 0 && !this.options.ignoreReturnCode) {\n error = new Error(`The process '${this.toolPath}' failed with exit code ${this.processExitCode}`);\n }\n else if (this.processStderr && this.options.failOnStdErr) {\n error = new Error(`The process '${this.toolPath}' failed because one or more lines were written to the STDERR stream`);\n }\n }\n // clear the timeout\n if (this.timeout) {\n clearTimeout(this.timeout);\n this.timeout = null;\n }\n this.done = true;\n this.emit('done', error, this.processExitCode);\n }\n static HandleTimeout(state) {\n if (state.done) {\n return;\n }\n if (!state.processClosed && state.processExited) {\n const message = `The STDIO streams did not close within ${state.delay /\n 1000} seconds of the exit event from process '${state.toolPath}'. This may indicate a child process inherited the STDIO streams and has not yet exited.`;\n state._debug(message);\n }\n state._setResult();\n }\n}\n//# sourceMappingURL=toolrunner.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.PersonalAccessTokenCredentialHandler = exports.BearerCredentialHandler = exports.BasicCredentialHandler = void 0;\nclass BasicCredentialHandler {\n constructor(username, password) {\n this.username = username;\n this.password = password;\n }\n prepareRequest(options) {\n if (!options.headers) {\n throw Error('The request has no headers');\n }\n options.headers['Authorization'] = `Basic ${Buffer.from(`${this.username}:${this.password}`).toString('base64')}`;\n }\n // This handler cannot handle 401\n canHandleAuthentication() {\n return false;\n }\n handleAuthentication() {\n return __awaiter(this, void 0, void 0, function* () {\n throw new Error('not implemented');\n });\n }\n}\nexports.BasicCredentialHandler = BasicCredentialHandler;\nclass BearerCredentialHandler {\n constructor(token) {\n this.token = token;\n }\n // currently implements pre-authorization\n // TODO: support preAuth = false where it hooks on 401\n prepareRequest(options) {\n if (!options.headers) {\n throw Error('The request has no headers');\n }\n options.headers['Authorization'] = `Bearer ${this.token}`;\n }\n // This handler cannot handle 401\n canHandleAuthentication() {\n return false;\n }\n handleAuthentication() {\n return __awaiter(this, void 0, void 0, function* () {\n throw new Error('not implemented');\n });\n }\n}\nexports.BearerCredentialHandler = BearerCredentialHandler;\nclass PersonalAccessTokenCredentialHandler {\n constructor(token) {\n this.token = token;\n }\n // currently implements pre-authorization\n // TODO: support preAuth = false where it hooks on 401\n prepareRequest(options) {\n if (!options.headers) {\n throw Error('The request has no headers');\n }\n options.headers['Authorization'] = `Basic ${Buffer.from(`PAT:${this.token}`).toString('base64')}`;\n }\n // This handler cannot handle 401\n canHandleAuthentication() {\n return false;\n }\n handleAuthentication() {\n return __awaiter(this, void 0, void 0, function* () {\n throw new Error('not implemented');\n });\n }\n}\nexports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler;\n//# sourceMappingURL=auth.js.map","\"use strict\";\n/* eslint-disable @typescript-eslint/no-explicit-any */\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0;\nconst http = __importStar(require(\"http\"));\nconst https = __importStar(require(\"https\"));\nconst pm = __importStar(require(\"./proxy\"));\nconst tunnel = __importStar(require(\"tunnel\"));\nvar HttpCodes;\n(function (HttpCodes) {\n HttpCodes[HttpCodes[\"OK\"] = 200] = \"OK\";\n HttpCodes[HttpCodes[\"MultipleChoices\"] = 300] = \"MultipleChoices\";\n HttpCodes[HttpCodes[\"MovedPermanently\"] = 301] = \"MovedPermanently\";\n HttpCodes[HttpCodes[\"ResourceMoved\"] = 302] = \"ResourceMoved\";\n HttpCodes[HttpCodes[\"SeeOther\"] = 303] = \"SeeOther\";\n HttpCodes[HttpCodes[\"NotModified\"] = 304] = \"NotModified\";\n HttpCodes[HttpCodes[\"UseProxy\"] = 305] = \"UseProxy\";\n HttpCodes[HttpCodes[\"SwitchProxy\"] = 306] = \"SwitchProxy\";\n HttpCodes[HttpCodes[\"TemporaryRedirect\"] = 307] = \"TemporaryRedirect\";\n HttpCodes[HttpCodes[\"PermanentRedirect\"] = 308] = \"PermanentRedirect\";\n HttpCodes[HttpCodes[\"BadRequest\"] = 400] = \"BadRequest\";\n HttpCodes[HttpCodes[\"Unauthorized\"] = 401] = \"Unauthorized\";\n HttpCodes[HttpCodes[\"PaymentRequired\"] = 402] = \"PaymentRequired\";\n HttpCodes[HttpCodes[\"Forbidden\"] = 403] = \"Forbidden\";\n HttpCodes[HttpCodes[\"NotFound\"] = 404] = \"NotFound\";\n HttpCodes[HttpCodes[\"MethodNotAllowed\"] = 405] = \"MethodNotAllowed\";\n HttpCodes[HttpCodes[\"NotAcceptable\"] = 406] = \"NotAcceptable\";\n HttpCodes[HttpCodes[\"ProxyAuthenticationRequired\"] = 407] = \"ProxyAuthenticationRequired\";\n HttpCodes[HttpCodes[\"RequestTimeout\"] = 408] = \"RequestTimeout\";\n HttpCodes[HttpCodes[\"Conflict\"] = 409] = \"Conflict\";\n HttpCodes[HttpCodes[\"Gone\"] = 410] = \"Gone\";\n HttpCodes[HttpCodes[\"TooManyRequests\"] = 429] = \"TooManyRequests\";\n HttpCodes[HttpCodes[\"InternalServerError\"] = 500] = \"InternalServerError\";\n HttpCodes[HttpCodes[\"NotImplemented\"] = 501] = \"NotImplemented\";\n HttpCodes[HttpCodes[\"BadGateway\"] = 502] = \"BadGateway\";\n HttpCodes[HttpCodes[\"ServiceUnavailable\"] = 503] = \"ServiceUnavailable\";\n HttpCodes[HttpCodes[\"GatewayTimeout\"] = 504] = \"GatewayTimeout\";\n})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {}));\nvar Headers;\n(function (Headers) {\n Headers[\"Accept\"] = \"accept\";\n Headers[\"ContentType\"] = \"content-type\";\n})(Headers = exports.Headers || (exports.Headers = {}));\nvar MediaTypes;\n(function (MediaTypes) {\n MediaTypes[\"ApplicationJson\"] = \"application/json\";\n})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {}));\n/**\n * Returns the proxy URL, depending upon the supplied url and proxy environment variables.\n * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com\n */\nfunction getProxyUrl(serverUrl) {\n const proxyUrl = pm.getProxyUrl(new URL(serverUrl));\n return proxyUrl ? proxyUrl.href : '';\n}\nexports.getProxyUrl = getProxyUrl;\nconst HttpRedirectCodes = [\n HttpCodes.MovedPermanently,\n HttpCodes.ResourceMoved,\n HttpCodes.SeeOther,\n HttpCodes.TemporaryRedirect,\n HttpCodes.PermanentRedirect\n];\nconst HttpResponseRetryCodes = [\n HttpCodes.BadGateway,\n HttpCodes.ServiceUnavailable,\n HttpCodes.GatewayTimeout\n];\nconst RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD'];\nconst ExponentialBackoffCeiling = 10;\nconst ExponentialBackoffTimeSlice = 5;\nclass HttpClientError extends Error {\n constructor(message, statusCode) {\n super(message);\n this.name = 'HttpClientError';\n this.statusCode = statusCode;\n Object.setPrototypeOf(this, HttpClientError.prototype);\n }\n}\nexports.HttpClientError = HttpClientError;\nclass HttpClientResponse {\n constructor(message) {\n this.message = message;\n }\n readBody() {\n return __awaiter(this, void 0, void 0, function* () {\n return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () {\n let output = Buffer.alloc(0);\n this.message.on('data', (chunk) => {\n output = Buffer.concat([output, chunk]);\n });\n this.message.on('end', () => {\n resolve(output.toString());\n });\n }));\n });\n }\n}\nexports.HttpClientResponse = HttpClientResponse;\nfunction isHttps(requestUrl) {\n const parsedUrl = new URL(requestUrl);\n return parsedUrl.protocol === 'https:';\n}\nexports.isHttps = isHttps;\nclass HttpClient {\n constructor(userAgent, handlers, requestOptions) {\n this._ignoreSslError = false;\n this._allowRedirects = true;\n this._allowRedirectDowngrade = false;\n this._maxRedirects = 50;\n this._allowRetries = false;\n this._maxRetries = 1;\n this._keepAlive = false;\n this._disposed = false;\n this.userAgent = userAgent;\n this.handlers = handlers || [];\n this.requestOptions = requestOptions;\n if (requestOptions) {\n if (requestOptions.ignoreSslError != null) {\n this._ignoreSslError = requestOptions.ignoreSslError;\n }\n this._socketTimeout = requestOptions.socketTimeout;\n if (requestOptions.allowRedirects != null) {\n this._allowRedirects = requestOptions.allowRedirects;\n }\n if (requestOptions.allowRedirectDowngrade != null) {\n this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade;\n }\n if (requestOptions.maxRedirects != null) {\n this._maxRedirects = Math.max(requestOptions.maxRedirects, 0);\n }\n if (requestOptions.keepAlive != null) {\n this._keepAlive = requestOptions.keepAlive;\n }\n if (requestOptions.allowRetries != null) {\n this._allowRetries = requestOptions.allowRetries;\n }\n if (requestOptions.maxRetries != null) {\n this._maxRetries = requestOptions.maxRetries;\n }\n }\n }\n options(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('OPTIONS', requestUrl, null, additionalHeaders || {});\n });\n }\n get(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('GET', requestUrl, null, additionalHeaders || {});\n });\n }\n del(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('DELETE', requestUrl, null, additionalHeaders || {});\n });\n }\n post(requestUrl, data, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('POST', requestUrl, data, additionalHeaders || {});\n });\n }\n patch(requestUrl, data, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('PATCH', requestUrl, data, additionalHeaders || {});\n });\n }\n put(requestUrl, data, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('PUT', requestUrl, data, additionalHeaders || {});\n });\n }\n head(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('HEAD', requestUrl, null, additionalHeaders || {});\n });\n }\n sendStream(verb, requestUrl, stream, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request(verb, requestUrl, stream, additionalHeaders);\n });\n }\n /**\n * Gets a typed object from an endpoint\n * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise\n */\n getJson(requestUrl, additionalHeaders = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n const res = yield this.get(requestUrl, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n postJson(requestUrl, obj, additionalHeaders = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n const data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n const res = yield this.post(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n putJson(requestUrl, obj, additionalHeaders = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n const data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n const res = yield this.put(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n patchJson(requestUrl, obj, additionalHeaders = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n const data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n const res = yield this.patch(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n /**\n * Makes a raw http request.\n * All other methods such as get, post, patch, and request ultimately call this.\n * Prefer get, del, post and patch\n */\n request(verb, requestUrl, data, headers) {\n return __awaiter(this, void 0, void 0, function* () {\n if (this._disposed) {\n throw new Error('Client has already been disposed.');\n }\n const parsedUrl = new URL(requestUrl);\n let info = this._prepareRequest(verb, parsedUrl, headers);\n // Only perform retries on reads since writes may not be idempotent.\n const maxTries = this._allowRetries && RetryableHttpVerbs.includes(verb)\n ? this._maxRetries + 1\n : 1;\n let numTries = 0;\n let response;\n do {\n response = yield this.requestRaw(info, data);\n // Check if it's an authentication challenge\n if (response &&\n response.message &&\n response.message.statusCode === HttpCodes.Unauthorized) {\n let authenticationHandler;\n for (const handler of this.handlers) {\n if (handler.canHandleAuthentication(response)) {\n authenticationHandler = handler;\n break;\n }\n }\n if (authenticationHandler) {\n return authenticationHandler.handleAuthentication(this, info, data);\n }\n else {\n // We have received an unauthorized response but have no handlers to handle it.\n // Let the response return to the caller.\n return response;\n }\n }\n let redirectsRemaining = this._maxRedirects;\n while (response.message.statusCode &&\n HttpRedirectCodes.includes(response.message.statusCode) &&\n this._allowRedirects &&\n redirectsRemaining > 0) {\n const redirectUrl = response.message.headers['location'];\n if (!redirectUrl) {\n // if there's no location to redirect to, we won't\n break;\n }\n const parsedRedirectUrl = new URL(redirectUrl);\n if (parsedUrl.protocol === 'https:' &&\n parsedUrl.protocol !== parsedRedirectUrl.protocol &&\n !this._allowRedirectDowngrade) {\n throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.');\n }\n // we need to finish reading the response before reassigning response\n // which will leak the open socket.\n yield response.readBody();\n // strip authorization header if redirected to a different hostname\n if (parsedRedirectUrl.hostname !== parsedUrl.hostname) {\n for (const header in headers) {\n // header names are case insensitive\n if (header.toLowerCase() === 'authorization') {\n delete headers[header];\n }\n }\n }\n // let's make the request with the new redirectUrl\n info = this._prepareRequest(verb, parsedRedirectUrl, headers);\n response = yield this.requestRaw(info, data);\n redirectsRemaining--;\n }\n if (!response.message.statusCode ||\n !HttpResponseRetryCodes.includes(response.message.statusCode)) {\n // If not a retry code, return immediately instead of retrying\n return response;\n }\n numTries += 1;\n if (numTries < maxTries) {\n yield response.readBody();\n yield this._performExponentialBackoff(numTries);\n }\n } while (numTries < maxTries);\n return response;\n });\n }\n /**\n * Needs to be called if keepAlive is set to true in request options.\n */\n dispose() {\n if (this._agent) {\n this._agent.destroy();\n }\n this._disposed = true;\n }\n /**\n * Raw request.\n * @param info\n * @param data\n */\n requestRaw(info, data) {\n return __awaiter(this, void 0, void 0, function* () {\n return new Promise((resolve, reject) => {\n function callbackForResult(err, res) {\n if (err) {\n reject(err);\n }\n else if (!res) {\n // If `err` is not passed, then `res` must be passed.\n reject(new Error('Unknown error'));\n }\n else {\n resolve(res);\n }\n }\n this.requestRawWithCallback(info, data, callbackForResult);\n });\n });\n }\n /**\n * Raw request with callback.\n * @param info\n * @param data\n * @param onResult\n */\n requestRawWithCallback(info, data, onResult) {\n if (typeof data === 'string') {\n if (!info.options.headers) {\n info.options.headers = {};\n }\n info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8');\n }\n let callbackCalled = false;\n function handleResult(err, res) {\n if (!callbackCalled) {\n callbackCalled = true;\n onResult(err, res);\n }\n }\n const req = info.httpModule.request(info.options, (msg) => {\n const res = new HttpClientResponse(msg);\n handleResult(undefined, res);\n });\n let socket;\n req.on('socket', sock => {\n socket = sock;\n });\n // If we ever get disconnected, we want the socket to timeout eventually\n req.setTimeout(this._socketTimeout || 3 * 60000, () => {\n if (socket) {\n socket.end();\n }\n handleResult(new Error(`Request timeout: ${info.options.path}`));\n });\n req.on('error', function (err) {\n // err has statusCode property\n // res should have headers\n handleResult(err);\n });\n if (data && typeof data === 'string') {\n req.write(data, 'utf8');\n }\n if (data && typeof data !== 'string') {\n data.on('close', function () {\n req.end();\n });\n data.pipe(req);\n }\n else {\n req.end();\n }\n }\n /**\n * Gets an http agent. This function is useful when you need an http agent that handles\n * routing through a proxy server - depending upon the url and proxy environment variables.\n * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com\n */\n getAgent(serverUrl) {\n const parsedUrl = new URL(serverUrl);\n return this._getAgent(parsedUrl);\n }\n _prepareRequest(method, requestUrl, headers) {\n const info = {};\n info.parsedUrl = requestUrl;\n const usingSsl = info.parsedUrl.protocol === 'https:';\n info.httpModule = usingSsl ? https : http;\n const defaultPort = usingSsl ? 443 : 80;\n info.options = {};\n info.options.host = info.parsedUrl.hostname;\n info.options.port = info.parsedUrl.port\n ? parseInt(info.parsedUrl.port)\n : defaultPort;\n info.options.path =\n (info.parsedUrl.pathname || '') + (info.parsedUrl.search || '');\n info.options.method = method;\n info.options.headers = this._mergeHeaders(headers);\n if (this.userAgent != null) {\n info.options.headers['user-agent'] = this.userAgent;\n }\n info.options.agent = this._getAgent(info.parsedUrl);\n // gives handlers an opportunity to participate\n if (this.handlers) {\n for (const handler of this.handlers) {\n handler.prepareRequest(info.options);\n }\n }\n return info;\n }\n _mergeHeaders(headers) {\n if (this.requestOptions && this.requestOptions.headers) {\n return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers || {}));\n }\n return lowercaseKeys(headers || {});\n }\n _getExistingOrDefaultHeader(additionalHeaders, header, _default) {\n let clientHeader;\n if (this.requestOptions && this.requestOptions.headers) {\n clientHeader = lowercaseKeys(this.requestOptions.headers)[header];\n }\n return additionalHeaders[header] || clientHeader || _default;\n }\n _getAgent(parsedUrl) {\n let agent;\n const proxyUrl = pm.getProxyUrl(parsedUrl);\n const useProxy = proxyUrl && proxyUrl.hostname;\n if (this._keepAlive && useProxy) {\n agent = this._proxyAgent;\n }\n if (this._keepAlive && !useProxy) {\n agent = this._agent;\n }\n // if agent is already assigned use that agent.\n if (agent) {\n return agent;\n }\n const usingSsl = parsedUrl.protocol === 'https:';\n let maxSockets = 100;\n if (this.requestOptions) {\n maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets;\n }\n // This is `useProxy` again, but we need to check `proxyURl` directly for TypeScripts's flow analysis.\n if (proxyUrl && proxyUrl.hostname) {\n const agentOptions = {\n maxSockets,\n keepAlive: this._keepAlive,\n proxy: Object.assign(Object.assign({}, ((proxyUrl.username || proxyUrl.password) && {\n proxyAuth: `${proxyUrl.username}:${proxyUrl.password}`\n })), { host: proxyUrl.hostname, port: proxyUrl.port })\n };\n let tunnelAgent;\n const overHttps = proxyUrl.protocol === 'https:';\n if (usingSsl) {\n tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp;\n }\n else {\n tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp;\n }\n agent = tunnelAgent(agentOptions);\n this._proxyAgent = agent;\n }\n // if reusing agent across request and tunneling agent isn't assigned create a new agent\n if (this._keepAlive && !agent) {\n const options = { keepAlive: this._keepAlive, maxSockets };\n agent = usingSsl ? new https.Agent(options) : new http.Agent(options);\n this._agent = agent;\n }\n // if not using private agent and tunnel agent isn't setup then use global agent\n if (!agent) {\n agent = usingSsl ? https.globalAgent : http.globalAgent;\n }\n if (usingSsl && this._ignoreSslError) {\n // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process\n // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options\n // we have to cast it to any and change it directly\n agent.options = Object.assign(agent.options || {}, {\n rejectUnauthorized: false\n });\n }\n return agent;\n }\n _performExponentialBackoff(retryNumber) {\n return __awaiter(this, void 0, void 0, function* () {\n retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber);\n const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber);\n return new Promise(resolve => setTimeout(() => resolve(), ms));\n });\n }\n _processResponse(res, options) {\n return __awaiter(this, void 0, void 0, function* () {\n return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () {\n const statusCode = res.message.statusCode || 0;\n const response = {\n statusCode,\n result: null,\n headers: {}\n };\n // not found leads to null obj returned\n if (statusCode === HttpCodes.NotFound) {\n resolve(response);\n }\n // get the result from the body\n function dateTimeDeserializer(key, value) {\n if (typeof value === 'string') {\n const a = new Date(value);\n if (!isNaN(a.valueOf())) {\n return a;\n }\n }\n return value;\n }\n let obj;\n let contents;\n try {\n contents = yield res.readBody();\n if (contents && contents.length > 0) {\n if (options && options.deserializeDates) {\n obj = JSON.parse(contents, dateTimeDeserializer);\n }\n else {\n obj = JSON.parse(contents);\n }\n response.result = obj;\n }\n response.headers = res.message.headers;\n }\n catch (err) {\n // Invalid resource (contents not json); leaving result obj null\n }\n // note that 3xx redirects are handled by the http layer.\n if (statusCode > 299) {\n let msg;\n // if exception/error in body, attempt to get better error\n if (obj && obj.message) {\n msg = obj.message;\n }\n else if (contents && contents.length > 0) {\n // it may be the case that the exception is in the body message as string\n msg = contents;\n }\n else {\n msg = `Failed request: (${statusCode})`;\n }\n const err = new HttpClientError(msg, statusCode);\n err.result = response.result;\n reject(err);\n }\n else {\n resolve(response);\n }\n }));\n });\n }\n}\nexports.HttpClient = HttpClient;\nconst lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});\n//# sourceMappingURL=index.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.checkBypass = exports.getProxyUrl = void 0;\nfunction getProxyUrl(reqUrl) {\n const usingSsl = reqUrl.protocol === 'https:';\n if (checkBypass(reqUrl)) {\n return undefined;\n }\n const proxyVar = (() => {\n if (usingSsl) {\n return process.env['https_proxy'] || process.env['HTTPS_PROXY'];\n }\n else {\n return process.env['http_proxy'] || process.env['HTTP_PROXY'];\n }\n })();\n if (proxyVar) {\n return new URL(proxyVar);\n }\n else {\n return undefined;\n }\n}\nexports.getProxyUrl = getProxyUrl;\nfunction checkBypass(reqUrl) {\n if (!reqUrl.hostname) {\n return false;\n }\n const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';\n if (!noProxy) {\n return false;\n }\n // Determine the request port\n let reqPort;\n if (reqUrl.port) {\n reqPort = Number(reqUrl.port);\n }\n else if (reqUrl.protocol === 'http:') {\n reqPort = 80;\n }\n else if (reqUrl.protocol === 'https:') {\n reqPort = 443;\n }\n // Format the request hostname and hostname with port\n const upperReqHosts = [reqUrl.hostname.toUpperCase()];\n if (typeof reqPort === 'number') {\n upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`);\n }\n // Compare request host against noproxy\n for (const upperNoProxyItem of noProxy\n .split(',')\n .map(x => x.trim().toUpperCase())\n .filter(x => x)) {\n if (upperReqHosts.some(x => x === upperNoProxyItem)) {\n return true;\n }\n }\n return false;\n}\nexports.checkBypass = checkBypass;\n//# sourceMappingURL=proxy.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar _a;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getCmdPath = exports.tryGetExecutablePath = exports.isRooted = exports.isDirectory = exports.exists = exports.READONLY = exports.UV_FS_O_EXLOCK = exports.IS_WINDOWS = exports.unlink = exports.symlink = exports.stat = exports.rmdir = exports.rm = exports.rename = exports.readlink = exports.readdir = exports.open = exports.mkdir = exports.lstat = exports.copyFile = exports.chmod = void 0;\nconst fs = __importStar(require(\"fs\"));\nconst path = __importStar(require(\"path\"));\n_a = fs.promises\n// export const {open} = 'fs'\n, exports.chmod = _a.chmod, exports.copyFile = _a.copyFile, exports.lstat = _a.lstat, exports.mkdir = _a.mkdir, exports.open = _a.open, exports.readdir = _a.readdir, exports.readlink = _a.readlink, exports.rename = _a.rename, exports.rm = _a.rm, exports.rmdir = _a.rmdir, exports.stat = _a.stat, exports.symlink = _a.symlink, exports.unlink = _a.unlink;\n// export const {open} = 'fs'\nexports.IS_WINDOWS = process.platform === 'win32';\n// See https://github.com/nodejs/node/blob/d0153aee367422d0858105abec186da4dff0a0c5/deps/uv/include/uv/win.h#L691\nexports.UV_FS_O_EXLOCK = 0x10000000;\nexports.READONLY = fs.constants.O_RDONLY;\nfunction exists(fsPath) {\n return __awaiter(this, void 0, void 0, function* () {\n try {\n yield exports.stat(fsPath);\n }\n catch (err) {\n if (err.code === 'ENOENT') {\n return false;\n }\n throw err;\n }\n return true;\n });\n}\nexports.exists = exists;\nfunction isDirectory(fsPath, useStat = false) {\n return __awaiter(this, void 0, void 0, function* () {\n const stats = useStat ? yield exports.stat(fsPath) : yield exports.lstat(fsPath);\n return stats.isDirectory();\n });\n}\nexports.isDirectory = isDirectory;\n/**\n * On OSX/Linux, true if path starts with '/'. On Windows, true for paths like:\n * \\, \\hello, \\\\hello\\share, C:, and C:\\hello (and corresponding alternate separator cases).\n */\nfunction isRooted(p) {\n p = normalizeSeparators(p);\n if (!p) {\n throw new Error('isRooted() parameter \"p\" cannot be empty');\n }\n if (exports.IS_WINDOWS) {\n return (p.startsWith('\\\\') || /^[A-Z]:/i.test(p) // e.g. \\ or \\hello or \\\\hello\n ); // e.g. C: or C:\\hello\n }\n return p.startsWith('/');\n}\nexports.isRooted = isRooted;\n/**\n * Best effort attempt to determine whether a file exists and is executable.\n * @param filePath file path to check\n * @param extensions additional file extensions to try\n * @return if file exists and is executable, returns the file path. otherwise empty string.\n */\nfunction tryGetExecutablePath(filePath, extensions) {\n return __awaiter(this, void 0, void 0, function* () {\n let stats = undefined;\n try {\n // test file exists\n stats = yield exports.stat(filePath);\n }\n catch (err) {\n if (err.code !== 'ENOENT') {\n // eslint-disable-next-line no-console\n console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`);\n }\n }\n if (stats && stats.isFile()) {\n if (exports.IS_WINDOWS) {\n // on Windows, test for valid extension\n const upperExt = path.extname(filePath).toUpperCase();\n if (extensions.some(validExt => validExt.toUpperCase() === upperExt)) {\n return filePath;\n }\n }\n else {\n if (isUnixExecutable(stats)) {\n return filePath;\n }\n }\n }\n // try each extension\n const originalFilePath = filePath;\n for (const extension of extensions) {\n filePath = originalFilePath + extension;\n stats = undefined;\n try {\n stats = yield exports.stat(filePath);\n }\n catch (err) {\n if (err.code !== 'ENOENT') {\n // eslint-disable-next-line no-console\n console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`);\n }\n }\n if (stats && stats.isFile()) {\n if (exports.IS_WINDOWS) {\n // preserve the case of the actual file (since an extension was appended)\n try {\n const directory = path.dirname(filePath);\n const upperName = path.basename(filePath).toUpperCase();\n for (const actualName of yield exports.readdir(directory)) {\n if (upperName === actualName.toUpperCase()) {\n filePath = path.join(directory, actualName);\n break;\n }\n }\n }\n catch (err) {\n // eslint-disable-next-line no-console\n console.log(`Unexpected error attempting to determine the actual case of the file '${filePath}': ${err}`);\n }\n return filePath;\n }\n else {\n if (isUnixExecutable(stats)) {\n return filePath;\n }\n }\n }\n }\n return '';\n });\n}\nexports.tryGetExecutablePath = tryGetExecutablePath;\nfunction normalizeSeparators(p) {\n p = p || '';\n if (exports.IS_WINDOWS) {\n // convert slashes on Windows\n p = p.replace(/\\//g, '\\\\');\n // remove redundant slashes\n return p.replace(/\\\\\\\\+/g, '\\\\');\n }\n // remove redundant slashes\n return p.replace(/\\/\\/+/g, '/');\n}\n// on Mac/Linux, test the execute bit\n// R W X R W X R W X\n// 256 128 64 32 16 8 4 2 1\nfunction isUnixExecutable(stats) {\n return ((stats.mode & 1) > 0 ||\n ((stats.mode & 8) > 0 && stats.gid === process.getgid()) ||\n ((stats.mode & 64) > 0 && stats.uid === process.getuid()));\n}\n// Get the path of cmd.exe in windows\nfunction getCmdPath() {\n var _a;\n return (_a = process.env['COMSPEC']) !== null && _a !== void 0 ? _a : `cmd.exe`;\n}\nexports.getCmdPath = getCmdPath;\n//# sourceMappingURL=io-util.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.findInPath = exports.which = exports.mkdirP = exports.rmRF = exports.mv = exports.cp = void 0;\nconst assert_1 = require(\"assert\");\nconst path = __importStar(require(\"path\"));\nconst ioUtil = __importStar(require(\"./io-util\"));\n/**\n * Copies a file or folder.\n * Based off of shelljs - https://github.com/shelljs/shelljs/blob/9237f66c52e5daa40458f94f9565e18e8132f5a6/src/cp.js\n *\n * @param source source path\n * @param dest destination path\n * @param options optional. See CopyOptions.\n */\nfunction cp(source, dest, options = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n const { force, recursive, copySourceDirectory } = readCopyOptions(options);\n const destStat = (yield ioUtil.exists(dest)) ? yield ioUtil.stat(dest) : null;\n // Dest is an existing file, but not forcing\n if (destStat && destStat.isFile() && !force) {\n return;\n }\n // If dest is an existing directory, should copy inside.\n const newDest = destStat && destStat.isDirectory() && copySourceDirectory\n ? path.join(dest, path.basename(source))\n : dest;\n if (!(yield ioUtil.exists(source))) {\n throw new Error(`no such file or directory: ${source}`);\n }\n const sourceStat = yield ioUtil.stat(source);\n if (sourceStat.isDirectory()) {\n if (!recursive) {\n throw new Error(`Failed to copy. ${source} is a directory, but tried to copy without recursive flag.`);\n }\n else {\n yield cpDirRecursive(source, newDest, 0, force);\n }\n }\n else {\n if (path.relative(source, newDest) === '') {\n // a file cannot be copied to itself\n throw new Error(`'${newDest}' and '${source}' are the same file`);\n }\n yield copyFile(source, newDest, force);\n }\n });\n}\nexports.cp = cp;\n/**\n * Moves a path.\n *\n * @param source source path\n * @param dest destination path\n * @param options optional. See MoveOptions.\n */\nfunction mv(source, dest, options = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n if (yield ioUtil.exists(dest)) {\n let destExists = true;\n if (yield ioUtil.isDirectory(dest)) {\n // If dest is directory copy src into dest\n dest = path.join(dest, path.basename(source));\n destExists = yield ioUtil.exists(dest);\n }\n if (destExists) {\n if (options.force == null || options.force) {\n yield rmRF(dest);\n }\n else {\n throw new Error('Destination already exists');\n }\n }\n }\n yield mkdirP(path.dirname(dest));\n yield ioUtil.rename(source, dest);\n });\n}\nexports.mv = mv;\n/**\n * Remove a path recursively with force\n *\n * @param inputPath path to remove\n */\nfunction rmRF(inputPath) {\n return __awaiter(this, void 0, void 0, function* () {\n if (ioUtil.IS_WINDOWS) {\n // Check for invalid characters\n // https://docs.microsoft.com/en-us/windows/win32/fileio/naming-a-file\n if (/[*\"<>|]/.test(inputPath)) {\n throw new Error('File path must not contain `*`, `\"`, `<`, `>` or `|` on Windows');\n }\n }\n try {\n // note if path does not exist, error is silent\n yield ioUtil.rm(inputPath, {\n force: true,\n maxRetries: 3,\n recursive: true,\n retryDelay: 300\n });\n }\n catch (err) {\n throw new Error(`File was unable to be removed ${err}`);\n }\n });\n}\nexports.rmRF = rmRF;\n/**\n * Make a directory. Creates the full path with folders in between\n * Will throw if it fails\n *\n * @param fsPath path to create\n * @returns Promise\n */\nfunction mkdirP(fsPath) {\n return __awaiter(this, void 0, void 0, function* () {\n assert_1.ok(fsPath, 'a path argument must be provided');\n yield ioUtil.mkdir(fsPath, { recursive: true });\n });\n}\nexports.mkdirP = mkdirP;\n/**\n * Returns path of a tool had the tool actually been invoked. Resolves via paths.\n * If you check and the tool does not exist, it will throw.\n *\n * @param tool name of the tool\n * @param check whether to check if tool exists\n * @returns Promise path to tool\n */\nfunction which(tool, check) {\n return __awaiter(this, void 0, void 0, function* () {\n if (!tool) {\n throw new Error(\"parameter 'tool' is required\");\n }\n // recursive when check=true\n if (check) {\n const result = yield which(tool, false);\n if (!result) {\n if (ioUtil.IS_WINDOWS) {\n throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also verify the file has a valid extension for an executable file.`);\n }\n else {\n throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also check the file mode to verify the file is executable.`);\n }\n }\n return result;\n }\n const matches = yield findInPath(tool);\n if (matches && matches.length > 0) {\n return matches[0];\n }\n return '';\n });\n}\nexports.which = which;\n/**\n * Returns a list of all occurrences of the given tool on the system path.\n *\n * @returns Promise the paths of the tool\n */\nfunction findInPath(tool) {\n return __awaiter(this, void 0, void 0, function* () {\n if (!tool) {\n throw new Error(\"parameter 'tool' is required\");\n }\n // build the list of extensions to try\n const extensions = [];\n if (ioUtil.IS_WINDOWS && process.env['PATHEXT']) {\n for (const extension of process.env['PATHEXT'].split(path.delimiter)) {\n if (extension) {\n extensions.push(extension);\n }\n }\n }\n // if it's rooted, return it if exists. otherwise return empty.\n if (ioUtil.isRooted(tool)) {\n const filePath = yield ioUtil.tryGetExecutablePath(tool, extensions);\n if (filePath) {\n return [filePath];\n }\n return [];\n }\n // if any path separators, return empty\n if (tool.includes(path.sep)) {\n return [];\n }\n // build the list of directories\n //\n // Note, technically \"where\" checks the current directory on Windows. From a toolkit perspective,\n // it feels like we should not do this. Checking the current directory seems like more of a use\n // case of a shell, and the which() function exposed by the toolkit should strive for consistency\n // across platforms.\n const directories = [];\n if (process.env.PATH) {\n for (const p of process.env.PATH.split(path.delimiter)) {\n if (p) {\n directories.push(p);\n }\n }\n }\n // find all matches\n const matches = [];\n for (const directory of directories) {\n const filePath = yield ioUtil.tryGetExecutablePath(path.join(directory, tool), extensions);\n if (filePath) {\n matches.push(filePath);\n }\n }\n return matches;\n });\n}\nexports.findInPath = findInPath;\nfunction readCopyOptions(options) {\n const force = options.force == null ? true : options.force;\n const recursive = Boolean(options.recursive);\n const copySourceDirectory = options.copySourceDirectory == null\n ? true\n : Boolean(options.copySourceDirectory);\n return { force, recursive, copySourceDirectory };\n}\nfunction cpDirRecursive(sourceDir, destDir, currentDepth, force) {\n return __awaiter(this, void 0, void 0, function* () {\n // Ensure there is not a run away recursive copy\n if (currentDepth >= 255)\n return;\n currentDepth++;\n yield mkdirP(destDir);\n const files = yield ioUtil.readdir(sourceDir);\n for (const fileName of files) {\n const srcFile = `${sourceDir}/${fileName}`;\n const destFile = `${destDir}/${fileName}`;\n const srcFileStat = yield ioUtil.lstat(srcFile);\n if (srcFileStat.isDirectory()) {\n // Recurse\n yield cpDirRecursive(srcFile, destFile, currentDepth, force);\n }\n else {\n yield copyFile(srcFile, destFile, force);\n }\n }\n // Change the mode for the newly created directory\n yield ioUtil.chmod(destDir, (yield ioUtil.stat(sourceDir)).mode);\n });\n}\n// Buffered file copy\nfunction copyFile(srcFile, destFile, force) {\n return __awaiter(this, void 0, void 0, function* () {\n if ((yield ioUtil.lstat(srcFile)).isSymbolicLink()) {\n // unlink/re-link it\n try {\n yield ioUtil.lstat(destFile);\n yield ioUtil.unlink(destFile);\n }\n catch (e) {\n // Try to override file permission\n if (e.code === 'EPERM') {\n yield ioUtil.chmod(destFile, '0666');\n yield ioUtil.unlink(destFile);\n }\n // other errors = it doesn't exist, no work to do\n }\n // Copy over symlink\n const symlinkFull = yield ioUtil.readlink(srcFile);\n yield ioUtil.symlink(symlinkFull, destFile, ioUtil.IS_WINDOWS ? 'junction' : null);\n }\n else if (!(yield ioUtil.exists(destFile)) || force) {\n yield ioUtil.copyFile(srcFile, destFile);\n }\n });\n}\n//# sourceMappingURL=io.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports._readLinuxVersionFile = exports._getOsVersion = exports._findMatch = void 0;\nconst semver = __importStar(require(\"semver\"));\nconst core_1 = require(\"@actions/core\");\n// needs to be require for core node modules to be mocked\n/* eslint @typescript-eslint/no-require-imports: 0 */\nconst os = require(\"os\");\nconst cp = require(\"child_process\");\nconst fs = require(\"fs\");\nfunction _findMatch(versionSpec, stable, candidates, archFilter) {\n return __awaiter(this, void 0, void 0, function* () {\n const platFilter = os.platform();\n let result;\n let match;\n let file;\n for (const candidate of candidates) {\n const version = candidate.version;\n core_1.debug(`check ${version} satisfies ${versionSpec}`);\n if (semver.satisfies(version, versionSpec) &&\n (!stable || candidate.stable === stable)) {\n file = candidate.files.find(item => {\n core_1.debug(`${item.arch}===${archFilter} && ${item.platform}===${platFilter}`);\n let chk = item.arch === archFilter && item.platform === platFilter;\n if (chk && item.platform_version) {\n const osVersion = module.exports._getOsVersion();\n if (osVersion === item.platform_version) {\n chk = true;\n }\n else {\n chk = semver.satisfies(osVersion, item.platform_version);\n }\n }\n return chk;\n });\n if (file) {\n core_1.debug(`matched ${candidate.version}`);\n match = candidate;\n break;\n }\n }\n }\n if (match && file) {\n // clone since we're mutating the file list to be only the file that matches\n result = Object.assign({}, match);\n result.files = [file];\n }\n return result;\n });\n}\nexports._findMatch = _findMatch;\nfunction _getOsVersion() {\n // TODO: add windows and other linux, arm variants\n // right now filtering on version is only an ubuntu and macos scenario for tools we build for hosted (python)\n const plat = os.platform();\n let version = '';\n if (plat === 'darwin') {\n version = cp.execSync('sw_vers -productVersion').toString();\n }\n else if (plat === 'linux') {\n // lsb_release process not in some containers, readfile\n // Run cat /etc/lsb-release\n // DISTRIB_ID=Ubuntu\n // DISTRIB_RELEASE=18.04\n // DISTRIB_CODENAME=bionic\n // DISTRIB_DESCRIPTION=\"Ubuntu 18.04.4 LTS\"\n const lsbContents = module.exports._readLinuxVersionFile();\n if (lsbContents) {\n const lines = lsbContents.split('\\n');\n for (const line of lines) {\n const parts = line.split('=');\n if (parts.length === 2 &&\n (parts[0].trim() === 'VERSION_ID' ||\n parts[0].trim() === 'DISTRIB_RELEASE')) {\n version = parts[1]\n .trim()\n .replace(/^\"/, '')\n .replace(/\"$/, '');\n break;\n }\n }\n }\n }\n return version;\n}\nexports._getOsVersion = _getOsVersion;\nfunction _readLinuxVersionFile() {\n const lsbReleaseFile = '/etc/lsb-release';\n const osReleaseFile = '/etc/os-release';\n let contents = '';\n if (fs.existsSync(lsbReleaseFile)) {\n contents = fs.readFileSync(lsbReleaseFile).toString();\n }\n else if (fs.existsSync(osReleaseFile)) {\n contents = fs.readFileSync(osReleaseFile).toString();\n }\n return contents;\n}\nexports._readLinuxVersionFile = _readLinuxVersionFile;\n//# sourceMappingURL=manifest.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.RetryHelper = void 0;\nconst core = __importStar(require(\"@actions/core\"));\n/**\n * Internal class for retries\n */\nclass RetryHelper {\n constructor(maxAttempts, minSeconds, maxSeconds) {\n if (maxAttempts < 1) {\n throw new Error('max attempts should be greater than or equal to 1');\n }\n this.maxAttempts = maxAttempts;\n this.minSeconds = Math.floor(minSeconds);\n this.maxSeconds = Math.floor(maxSeconds);\n if (this.minSeconds > this.maxSeconds) {\n throw new Error('min seconds should be less than or equal to max seconds');\n }\n }\n execute(action, isRetryable) {\n return __awaiter(this, void 0, void 0, function* () {\n let attempt = 1;\n while (attempt < this.maxAttempts) {\n // Try\n try {\n return yield action();\n }\n catch (err) {\n if (isRetryable && !isRetryable(err)) {\n throw err;\n }\n core.info(err.message);\n }\n // Sleep\n const seconds = this.getSleepAmount();\n core.info(`Waiting ${seconds} seconds before trying again`);\n yield this.sleep(seconds);\n attempt++;\n }\n // Last attempt\n return yield action();\n });\n }\n getSleepAmount() {\n return (Math.floor(Math.random() * (this.maxSeconds - this.minSeconds + 1)) +\n this.minSeconds);\n }\n sleep(seconds) {\n return __awaiter(this, void 0, void 0, function* () {\n return new Promise(resolve => setTimeout(resolve, seconds * 1000));\n });\n }\n}\nexports.RetryHelper = RetryHelper;\n//# sourceMappingURL=retry-helper.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.evaluateVersions = exports.isExplicitVersion = exports.findFromManifest = exports.getManifestFromRepo = exports.findAllVersions = exports.find = exports.cacheFile = exports.cacheDir = exports.extractZip = exports.extractXar = exports.extractTar = exports.extract7z = exports.downloadTool = exports.HTTPError = void 0;\nconst core = __importStar(require(\"@actions/core\"));\nconst io = __importStar(require(\"@actions/io\"));\nconst fs = __importStar(require(\"fs\"));\nconst mm = __importStar(require(\"./manifest\"));\nconst os = __importStar(require(\"os\"));\nconst path = __importStar(require(\"path\"));\nconst httpm = __importStar(require(\"@actions/http-client\"));\nconst semver = __importStar(require(\"semver\"));\nconst stream = __importStar(require(\"stream\"));\nconst util = __importStar(require(\"util\"));\nconst assert_1 = require(\"assert\");\nconst v4_1 = __importDefault(require(\"uuid/v4\"));\nconst exec_1 = require(\"@actions/exec/lib/exec\");\nconst retry_helper_1 = require(\"./retry-helper\");\nclass HTTPError extends Error {\n constructor(httpStatusCode) {\n super(`Unexpected HTTP response: ${httpStatusCode}`);\n this.httpStatusCode = httpStatusCode;\n Object.setPrototypeOf(this, new.target.prototype);\n }\n}\nexports.HTTPError = HTTPError;\nconst IS_WINDOWS = process.platform === 'win32';\nconst IS_MAC = process.platform === 'darwin';\nconst userAgent = 'actions/tool-cache';\n/**\n * Download a tool from an url and stream it into a file\n *\n * @param url url of tool to download\n * @param dest path to download tool\n * @param auth authorization header\n * @param headers other headers\n * @returns path to downloaded tool\n */\nfunction downloadTool(url, dest, auth, headers) {\n return __awaiter(this, void 0, void 0, function* () {\n dest = dest || path.join(_getTempDirectory(), v4_1.default());\n yield io.mkdirP(path.dirname(dest));\n core.debug(`Downloading ${url}`);\n core.debug(`Destination ${dest}`);\n const maxAttempts = 3;\n const minSeconds = _getGlobal('TEST_DOWNLOAD_TOOL_RETRY_MIN_SECONDS', 10);\n const maxSeconds = _getGlobal('TEST_DOWNLOAD_TOOL_RETRY_MAX_SECONDS', 20);\n const retryHelper = new retry_helper_1.RetryHelper(maxAttempts, minSeconds, maxSeconds);\n return yield retryHelper.execute(() => __awaiter(this, void 0, void 0, function* () {\n return yield downloadToolAttempt(url, dest || '', auth, headers);\n }), (err) => {\n if (err instanceof HTTPError && err.httpStatusCode) {\n // Don't retry anything less than 500, except 408 Request Timeout and 429 Too Many Requests\n if (err.httpStatusCode < 500 &&\n err.httpStatusCode !== 408 &&\n err.httpStatusCode !== 429) {\n return false;\n }\n }\n // Otherwise retry\n return true;\n });\n });\n}\nexports.downloadTool = downloadTool;\nfunction downloadToolAttempt(url, dest, auth, headers) {\n return __awaiter(this, void 0, void 0, function* () {\n if (fs.existsSync(dest)) {\n throw new Error(`Destination file path ${dest} already exists`);\n }\n // Get the response headers\n const http = new httpm.HttpClient(userAgent, [], {\n allowRetries: false\n });\n if (auth) {\n core.debug('set auth');\n if (headers === undefined) {\n headers = {};\n }\n headers.authorization = auth;\n }\n const response = yield http.get(url, headers);\n if (response.message.statusCode !== 200) {\n const err = new HTTPError(response.message.statusCode);\n core.debug(`Failed to download from \"${url}\". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`);\n throw err;\n }\n // Download the response body\n const pipeline = util.promisify(stream.pipeline);\n const responseMessageFactory = _getGlobal('TEST_DOWNLOAD_TOOL_RESPONSE_MESSAGE_FACTORY', () => response.message);\n const readStream = responseMessageFactory();\n let succeeded = false;\n try {\n yield pipeline(readStream, fs.createWriteStream(dest));\n core.debug('download complete');\n succeeded = true;\n return dest;\n }\n finally {\n // Error, delete dest before retry\n if (!succeeded) {\n core.debug('download failed');\n try {\n yield io.rmRF(dest);\n }\n catch (err) {\n core.debug(`Failed to delete '${dest}'. ${err.message}`);\n }\n }\n }\n });\n}\n/**\n * Extract a .7z file\n *\n * @param file path to the .7z file\n * @param dest destination directory. Optional.\n * @param _7zPath path to 7zr.exe. Optional, for long path support. Most .7z archives do not have this\n * problem. If your .7z archive contains very long paths, you can pass the path to 7zr.exe which will\n * gracefully handle long paths. By default 7zdec.exe is used because it is a very small program and is\n * bundled with the tool lib. However it does not support long paths. 7zr.exe is the reduced command line\n * interface, it is smaller than the full command line interface, and it does support long paths. At the\n * time of this writing, it is freely available from the LZMA SDK that is available on the 7zip website.\n * Be sure to check the current license agreement. If 7zr.exe is bundled with your action, then the path\n * to 7zr.exe can be pass to this function.\n * @returns path to the destination directory\n */\nfunction extract7z(file, dest, _7zPath) {\n return __awaiter(this, void 0, void 0, function* () {\n assert_1.ok(IS_WINDOWS, 'extract7z() not supported on current OS');\n assert_1.ok(file, 'parameter \"file\" is required');\n dest = yield _createExtractFolder(dest);\n const originalCwd = process.cwd();\n process.chdir(dest);\n if (_7zPath) {\n try {\n const logLevel = core.isDebug() ? '-bb1' : '-bb0';\n const args = [\n 'x',\n logLevel,\n '-bd',\n '-sccUTF-8',\n file\n ];\n const options = {\n silent: true\n };\n yield exec_1.exec(`\"${_7zPath}\"`, args, options);\n }\n finally {\n process.chdir(originalCwd);\n }\n }\n else {\n const escapedScript = path\n .join(__dirname, '..', 'scripts', 'Invoke-7zdec.ps1')\n .replace(/'/g, \"''\")\n .replace(/\"|\\n|\\r/g, ''); // double-up single quotes, remove double quotes and newlines\n const escapedFile = file.replace(/'/g, \"''\").replace(/\"|\\n|\\r/g, '');\n const escapedTarget = dest.replace(/'/g, \"''\").replace(/\"|\\n|\\r/g, '');\n const command = `& '${escapedScript}' -Source '${escapedFile}' -Target '${escapedTarget}'`;\n const args = [\n '-NoLogo',\n '-Sta',\n '-NoProfile',\n '-NonInteractive',\n '-ExecutionPolicy',\n 'Unrestricted',\n '-Command',\n command\n ];\n const options = {\n silent: true\n };\n try {\n const powershellPath = yield io.which('powershell', true);\n yield exec_1.exec(`\"${powershellPath}\"`, args, options);\n }\n finally {\n process.chdir(originalCwd);\n }\n }\n return dest;\n });\n}\nexports.extract7z = extract7z;\n/**\n * Extract a compressed tar archive\n *\n * @param file path to the tar\n * @param dest destination directory. Optional.\n * @param flags flags for the tar command to use for extraction. Defaults to 'xz' (extracting gzipped tars). Optional.\n * @returns path to the destination directory\n */\nfunction extractTar(file, dest, flags = 'xz') {\n return __awaiter(this, void 0, void 0, function* () {\n if (!file) {\n throw new Error(\"parameter 'file' is required\");\n }\n // Create dest\n dest = yield _createExtractFolder(dest);\n // Determine whether GNU tar\n core.debug('Checking tar --version');\n let versionOutput = '';\n yield exec_1.exec('tar --version', [], {\n ignoreReturnCode: true,\n silent: true,\n listeners: {\n stdout: (data) => (versionOutput += data.toString()),\n stderr: (data) => (versionOutput += data.toString())\n }\n });\n core.debug(versionOutput.trim());\n const isGnuTar = versionOutput.toUpperCase().includes('GNU TAR');\n // Initialize args\n let args;\n if (flags instanceof Array) {\n args = flags;\n }\n else {\n args = [flags];\n }\n if (core.isDebug() && !flags.includes('v')) {\n args.push('-v');\n }\n let destArg = dest;\n let fileArg = file;\n if (IS_WINDOWS && isGnuTar) {\n args.push('--force-local');\n destArg = dest.replace(/\\\\/g, '/');\n // Technically only the dest needs to have `/` but for aesthetic consistency\n // convert slashes in the file arg too.\n fileArg = file.replace(/\\\\/g, '/');\n }\n if (isGnuTar) {\n // Suppress warnings when using GNU tar to extract archives created by BSD tar\n args.push('--warning=no-unknown-keyword');\n args.push('--overwrite');\n }\n args.push('-C', destArg, '-f', fileArg);\n yield exec_1.exec(`tar`, args);\n return dest;\n });\n}\nexports.extractTar = extractTar;\n/**\n * Extract a xar compatible archive\n *\n * @param file path to the archive\n * @param dest destination directory. Optional.\n * @param flags flags for the xar. Optional.\n * @returns path to the destination directory\n */\nfunction extractXar(file, dest, flags = []) {\n return __awaiter(this, void 0, void 0, function* () {\n assert_1.ok(IS_MAC, 'extractXar() not supported on current OS');\n assert_1.ok(file, 'parameter \"file\" is required');\n dest = yield _createExtractFolder(dest);\n let args;\n if (flags instanceof Array) {\n args = flags;\n }\n else {\n args = [flags];\n }\n args.push('-x', '-C', dest, '-f', file);\n if (core.isDebug()) {\n args.push('-v');\n }\n const xarPath = yield io.which('xar', true);\n yield exec_1.exec(`\"${xarPath}\"`, _unique(args));\n return dest;\n });\n}\nexports.extractXar = extractXar;\n/**\n * Extract a zip\n *\n * @param file path to the zip\n * @param dest destination directory. Optional.\n * @returns path to the destination directory\n */\nfunction extractZip(file, dest) {\n return __awaiter(this, void 0, void 0, function* () {\n if (!file) {\n throw new Error(\"parameter 'file' is required\");\n }\n dest = yield _createExtractFolder(dest);\n if (IS_WINDOWS) {\n yield extractZipWin(file, dest);\n }\n else {\n yield extractZipNix(file, dest);\n }\n return dest;\n });\n}\nexports.extractZip = extractZip;\nfunction extractZipWin(file, dest) {\n return __awaiter(this, void 0, void 0, function* () {\n // build the powershell command\n const escapedFile = file.replace(/'/g, \"''\").replace(/\"|\\n|\\r/g, ''); // double-up single quotes, remove double quotes and newlines\n const escapedDest = dest.replace(/'/g, \"''\").replace(/\"|\\n|\\r/g, '');\n const pwshPath = yield io.which('pwsh', false);\n //To match the file overwrite behavior on nix systems, we use the overwrite = true flag for ExtractToDirectory\n //and the -Force flag for Expand-Archive as a fallback\n if (pwshPath) {\n //attempt to use pwsh with ExtractToDirectory, if this fails attempt Expand-Archive\n const pwshCommand = [\n `$ErrorActionPreference = 'Stop' ;`,\n `try { Add-Type -AssemblyName System.IO.Compression.ZipFile } catch { } ;`,\n `try { [System.IO.Compression.ZipFile]::ExtractToDirectory('${escapedFile}', '${escapedDest}', $true) }`,\n `catch { if (($_.Exception.GetType().FullName -eq 'System.Management.Automation.MethodException') -or ($_.Exception.GetType().FullName -eq 'System.Management.Automation.RuntimeException') ){ Expand-Archive -LiteralPath '${escapedFile}' -DestinationPath '${escapedDest}' -Force } else { throw $_ } } ;`\n ].join(' ');\n const args = [\n '-NoLogo',\n '-NoProfile',\n '-NonInteractive',\n '-ExecutionPolicy',\n 'Unrestricted',\n '-Command',\n pwshCommand\n ];\n core.debug(`Using pwsh at path: ${pwshPath}`);\n yield exec_1.exec(`\"${pwshPath}\"`, args);\n }\n else {\n const powershellCommand = [\n `$ErrorActionPreference = 'Stop' ;`,\n `try { Add-Type -AssemblyName System.IO.Compression.FileSystem } catch { } ;`,\n `if ((Get-Command -Name Expand-Archive -Module Microsoft.PowerShell.Archive -ErrorAction Ignore)) { Expand-Archive -LiteralPath '${escapedFile}' -DestinationPath '${escapedDest}' -Force }`,\n `else {[System.IO.Compression.ZipFile]::ExtractToDirectory('${escapedFile}', '${escapedDest}', $true) }`\n ].join(' ');\n const args = [\n '-NoLogo',\n '-Sta',\n '-NoProfile',\n '-NonInteractive',\n '-ExecutionPolicy',\n 'Unrestricted',\n '-Command',\n powershellCommand\n ];\n const powershellPath = yield io.which('powershell', true);\n core.debug(`Using powershell at path: ${powershellPath}`);\n yield exec_1.exec(`\"${powershellPath}\"`, args);\n }\n });\n}\nfunction extractZipNix(file, dest) {\n return __awaiter(this, void 0, void 0, function* () {\n const unzipPath = yield io.which('unzip', true);\n const args = [file];\n if (!core.isDebug()) {\n args.unshift('-q');\n }\n args.unshift('-o'); //overwrite with -o, otherwise a prompt is shown which freezes the run\n yield exec_1.exec(`\"${unzipPath}\"`, args, { cwd: dest });\n });\n}\n/**\n * Caches a directory and installs it into the tool cacheDir\n *\n * @param sourceDir the directory to cache into tools\n * @param tool tool name\n * @param version version of the tool. semver format\n * @param arch architecture of the tool. Optional. Defaults to machine architecture\n */\nfunction cacheDir(sourceDir, tool, version, arch) {\n return __awaiter(this, void 0, void 0, function* () {\n version = semver.clean(version) || version;\n arch = arch || os.arch();\n core.debug(`Caching tool ${tool} ${version} ${arch}`);\n core.debug(`source dir: ${sourceDir}`);\n if (!fs.statSync(sourceDir).isDirectory()) {\n throw new Error('sourceDir is not a directory');\n }\n // Create the tool dir\n const destPath = yield _createToolPath(tool, version, arch);\n // copy each child item. do not move. move can fail on Windows\n // due to anti-virus software having an open handle on a file.\n for (const itemName of fs.readdirSync(sourceDir)) {\n const s = path.join(sourceDir, itemName);\n yield io.cp(s, destPath, { recursive: true });\n }\n // write .complete\n _completeToolPath(tool, version, arch);\n return destPath;\n });\n}\nexports.cacheDir = cacheDir;\n/**\n * Caches a downloaded file (GUID) and installs it\n * into the tool cache with a given targetName\n *\n * @param sourceFile the file to cache into tools. Typically a result of downloadTool which is a guid.\n * @param targetFile the name of the file name in the tools directory\n * @param tool tool name\n * @param version version of the tool. semver format\n * @param arch architecture of the tool. Optional. Defaults to machine architecture\n */\nfunction cacheFile(sourceFile, targetFile, tool, version, arch) {\n return __awaiter(this, void 0, void 0, function* () {\n version = semver.clean(version) || version;\n arch = arch || os.arch();\n core.debug(`Caching tool ${tool} ${version} ${arch}`);\n core.debug(`source file: ${sourceFile}`);\n if (!fs.statSync(sourceFile).isFile()) {\n throw new Error('sourceFile is not a file');\n }\n // create the tool dir\n const destFolder = yield _createToolPath(tool, version, arch);\n // copy instead of move. move can fail on Windows due to\n // anti-virus software having an open handle on a file.\n const destPath = path.join(destFolder, targetFile);\n core.debug(`destination file ${destPath}`);\n yield io.cp(sourceFile, destPath);\n // write .complete\n _completeToolPath(tool, version, arch);\n return destFolder;\n });\n}\nexports.cacheFile = cacheFile;\n/**\n * Finds the path to a tool version in the local installed tool cache\n *\n * @param toolName name of the tool\n * @param versionSpec version of the tool\n * @param arch optional arch. defaults to arch of computer\n */\nfunction find(toolName, versionSpec, arch) {\n if (!toolName) {\n throw new Error('toolName parameter is required');\n }\n if (!versionSpec) {\n throw new Error('versionSpec parameter is required');\n }\n arch = arch || os.arch();\n // attempt to resolve an explicit version\n if (!isExplicitVersion(versionSpec)) {\n const localVersions = findAllVersions(toolName, arch);\n const match = evaluateVersions(localVersions, versionSpec);\n versionSpec = match;\n }\n // check for the explicit version in the cache\n let toolPath = '';\n if (versionSpec) {\n versionSpec = semver.clean(versionSpec) || '';\n const cachePath = path.join(_getCacheDirectory(), toolName, versionSpec, arch);\n core.debug(`checking cache: ${cachePath}`);\n if (fs.existsSync(cachePath) && fs.existsSync(`${cachePath}.complete`)) {\n core.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch}`);\n toolPath = cachePath;\n }\n else {\n core.debug('not found');\n }\n }\n return toolPath;\n}\nexports.find = find;\n/**\n * Finds the paths to all versions of a tool that are installed in the local tool cache\n *\n * @param toolName name of the tool\n * @param arch optional arch. defaults to arch of computer\n */\nfunction findAllVersions(toolName, arch) {\n const versions = [];\n arch = arch || os.arch();\n const toolPath = path.join(_getCacheDirectory(), toolName);\n if (fs.existsSync(toolPath)) {\n const children = fs.readdirSync(toolPath);\n for (const child of children) {\n if (isExplicitVersion(child)) {\n const fullPath = path.join(toolPath, child, arch || '');\n if (fs.existsSync(fullPath) && fs.existsSync(`${fullPath}.complete`)) {\n versions.push(child);\n }\n }\n }\n }\n return versions;\n}\nexports.findAllVersions = findAllVersions;\nfunction getManifestFromRepo(owner, repo, auth, branch = 'master') {\n return __awaiter(this, void 0, void 0, function* () {\n let releases = [];\n const treeUrl = `https://api.github.com/repos/${owner}/${repo}/git/trees/${branch}`;\n const http = new httpm.HttpClient('tool-cache');\n const headers = {};\n if (auth) {\n core.debug('set auth');\n headers.authorization = auth;\n }\n const response = yield http.getJson(treeUrl, headers);\n if (!response.result) {\n return releases;\n }\n let manifestUrl = '';\n for (const item of response.result.tree) {\n if (item.path === 'versions-manifest.json') {\n manifestUrl = item.url;\n break;\n }\n }\n headers['accept'] = 'application/vnd.github.VERSION.raw';\n let versionsRaw = yield (yield http.get(manifestUrl, headers)).readBody();\n if (versionsRaw) {\n // shouldn't be needed but protects against invalid json saved with BOM\n versionsRaw = versionsRaw.replace(/^\\uFEFF/, '');\n try {\n releases = JSON.parse(versionsRaw);\n }\n catch (_a) {\n core.debug('Invalid json');\n }\n }\n return releases;\n });\n}\nexports.getManifestFromRepo = getManifestFromRepo;\nfunction findFromManifest(versionSpec, stable, manifest, archFilter = os.arch()) {\n return __awaiter(this, void 0, void 0, function* () {\n // wrap the internal impl\n const match = yield mm._findMatch(versionSpec, stable, manifest, archFilter);\n return match;\n });\n}\nexports.findFromManifest = findFromManifest;\nfunction _createExtractFolder(dest) {\n return __awaiter(this, void 0, void 0, function* () {\n if (!dest) {\n // create a temp dir\n dest = path.join(_getTempDirectory(), v4_1.default());\n }\n yield io.mkdirP(dest);\n return dest;\n });\n}\nfunction _createToolPath(tool, version, arch) {\n return __awaiter(this, void 0, void 0, function* () {\n const folderPath = path.join(_getCacheDirectory(), tool, semver.clean(version) || version, arch || '');\n core.debug(`destination ${folderPath}`);\n const markerPath = `${folderPath}.complete`;\n yield io.rmRF(folderPath);\n yield io.rmRF(markerPath);\n yield io.mkdirP(folderPath);\n return folderPath;\n });\n}\nfunction _completeToolPath(tool, version, arch) {\n const folderPath = path.join(_getCacheDirectory(), tool, semver.clean(version) || version, arch || '');\n const markerPath = `${folderPath}.complete`;\n fs.writeFileSync(markerPath, '');\n core.debug('finished caching tool');\n}\n/**\n * Check if version string is explicit\n *\n * @param versionSpec version string to check\n */\nfunction isExplicitVersion(versionSpec) {\n const c = semver.clean(versionSpec) || '';\n core.debug(`isExplicit: ${c}`);\n const valid = semver.valid(c) != null;\n core.debug(`explicit? ${valid}`);\n return valid;\n}\nexports.isExplicitVersion = isExplicitVersion;\n/**\n * Get the highest satisfiying semantic version in `versions` which satisfies `versionSpec`\n *\n * @param versions array of versions to evaluate\n * @param versionSpec semantic version spec to satisfy\n */\nfunction evaluateVersions(versions, versionSpec) {\n let version = '';\n core.debug(`evaluating ${versions.length} versions`);\n versions = versions.sort((a, b) => {\n if (semver.gt(a, b)) {\n return 1;\n }\n return -1;\n });\n for (let i = versions.length - 1; i >= 0; i--) {\n const potential = versions[i];\n const satisfied = semver.satisfies(potential, versionSpec);\n if (satisfied) {\n version = potential;\n break;\n }\n }\n if (version) {\n core.debug(`matched: ${version}`);\n }\n else {\n core.debug('match not found');\n }\n return version;\n}\nexports.evaluateVersions = evaluateVersions;\n/**\n * Gets RUNNER_TOOL_CACHE\n */\nfunction _getCacheDirectory() {\n const cacheDirectory = process.env['RUNNER_TOOL_CACHE'] || '';\n assert_1.ok(cacheDirectory, 'Expected RUNNER_TOOL_CACHE to be defined');\n return cacheDirectory;\n}\n/**\n * Gets RUNNER_TEMP\n */\nfunction _getTempDirectory() {\n const tempDirectory = process.env['RUNNER_TEMP'] || '';\n assert_1.ok(tempDirectory, 'Expected RUNNER_TEMP to be defined');\n return tempDirectory;\n}\n/**\n * Gets a global variable\n */\nfunction _getGlobal(key, defaultValue) {\n /* eslint-disable @typescript-eslint/no-explicit-any */\n const value = global[key];\n /* eslint-enable @typescript-eslint/no-explicit-any */\n return value !== undefined ? value : defaultValue;\n}\n/**\n * Returns an array of unique values.\n * @param values Values to make unique.\n */\nfunction _unique(values) {\n return Array.from(new Set(values));\n}\n//# sourceMappingURL=tool-cache.js.map","/**\n * Convert array of 16 byte values to UUID string format of the form:\n * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX\n */\nvar byteToHex = [];\nfor (var i = 0; i < 256; ++i) {\n byteToHex[i] = (i + 0x100).toString(16).substr(1);\n}\n\nfunction bytesToUuid(buf, offset) {\n var i = offset || 0;\n var bth = byteToHex;\n // join used to fix memory issue caused by concatenation: https://bugs.chromium.org/p/v8/issues/detail?id=3175#c4\n return ([\n bth[buf[i++]], bth[buf[i++]],\n bth[buf[i++]], bth[buf[i++]], '-',\n bth[buf[i++]], bth[buf[i++]], '-',\n bth[buf[i++]], bth[buf[i++]], '-',\n bth[buf[i++]], bth[buf[i++]], '-',\n bth[buf[i++]], bth[buf[i++]],\n bth[buf[i++]], bth[buf[i++]],\n bth[buf[i++]], bth[buf[i++]]\n ]).join('');\n}\n\nmodule.exports = bytesToUuid;\n","// Unique ID creation requires a high quality random # generator. In node.js\n// this is pretty straight-forward - we use the crypto API.\n\nvar crypto = require('crypto');\n\nmodule.exports = function nodeRNG() {\n return crypto.randomBytes(16);\n};\n","var rng = require('./lib/rng');\nvar bytesToUuid = require('./lib/bytesToUuid');\n\nfunction v4(options, buf, offset) {\n var i = buf && offset || 0;\n\n if (typeof(options) == 'string') {\n buf = options === 'binary' ? new Array(16) : null;\n options = null;\n }\n options = options || {};\n\n var rnds = options.random || (options.rng || rng)();\n\n // Per 4.4, set bits for version and `clock_seq_hi_and_reserved`\n rnds[6] = (rnds[6] & 0x0f) | 0x40;\n rnds[8] = (rnds[8] & 0x3f) | 0x80;\n\n // Copy bytes to buffer, if provided\n if (buf) {\n for (var ii = 0; ii < 16; ++ii) {\n buf[i + ii] = rnds[ii];\n }\n }\n\n return buf || bytesToUuid(rnds);\n}\n\nmodule.exports = v4;\n","\"use strict\";\nvar __defProp = Object.defineProperty;\nvar __getOwnPropDesc = Object.getOwnPropertyDescriptor;\nvar __getOwnPropNames = Object.getOwnPropertyNames;\nvar __hasOwnProp = Object.prototype.hasOwnProperty;\nvar __export = (target, all) => {\n for (var name in all)\n __defProp(target, name, { get: all[name], enumerable: true });\n};\nvar __copyProps = (to, from, except, desc) => {\n if (from && typeof from === \"object\" || typeof from === \"function\") {\n for (let key of __getOwnPropNames(from))\n if (!__hasOwnProp.call(to, key) && key !== except)\n __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });\n }\n return to;\n};\nvar __toCommonJS = (mod) => __copyProps(__defProp({}, \"__esModule\", { value: true }), mod);\n\n// pkg/dist-src/index.js\nvar dist_src_exports = {};\n__export(dist_src_exports, {\n Octokit: () => Octokit\n});\nmodule.exports = __toCommonJS(dist_src_exports);\nvar import_core = require(\"@octokit/core\");\nvar import_plugin_request_log = require(\"@octokit/plugin-request-log\");\nvar import_plugin_paginate_rest = require(\"@octokit/plugin-paginate-rest\");\nvar import_plugin_rest_endpoint_methods = require(\"@octokit/plugin-rest-endpoint-methods\");\n\n// pkg/dist-src/version.js\nvar VERSION = \"20.0.1\";\n\n// pkg/dist-src/index.js\nvar Octokit = import_core.Octokit.plugin(\n import_plugin_request_log.requestLog,\n import_plugin_rest_endpoint_methods.legacyRestEndpointMethods,\n import_plugin_paginate_rest.paginateRest\n).defaults({\n userAgent: `octokit-rest.js/${VERSION}`\n});\n// Annotate the CommonJS export names for ESM import in node:\n0 && (module.exports = {\n Octokit\n});\n","\"use strict\";\nvar __defProp = Object.defineProperty;\nvar __getOwnPropDesc = Object.getOwnPropertyDescriptor;\nvar __getOwnPropNames = Object.getOwnPropertyNames;\nvar __hasOwnProp = Object.prototype.hasOwnProperty;\nvar __export = (target, all) => {\n for (var name in all)\n __defProp(target, name, { get: all[name], enumerable: true });\n};\nvar __copyProps = (to, from, except, desc) => {\n if (from && typeof from === \"object\" || typeof from === \"function\") {\n for (let key of __getOwnPropNames(from))\n if (!__hasOwnProp.call(to, key) && key !== except)\n __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });\n }\n return to;\n};\nvar __toCommonJS = (mod) => __copyProps(__defProp({}, \"__esModule\", { value: true }), mod);\n\n// pkg/dist-src/index.js\nvar dist_src_exports = {};\n__export(dist_src_exports, {\n createTokenAuth: () => createTokenAuth\n});\nmodule.exports = __toCommonJS(dist_src_exports);\n\n// pkg/dist-src/auth.js\nvar REGEX_IS_INSTALLATION_LEGACY = /^v1\\./;\nvar REGEX_IS_INSTALLATION = /^ghs_/;\nvar REGEX_IS_USER_TO_SERVER = /^ghu_/;\nasync function auth(token) {\n const isApp = token.split(/\\./).length === 3;\n const isInstallation = REGEX_IS_INSTALLATION_LEGACY.test(token) || REGEX_IS_INSTALLATION.test(token);\n const isUserToServer = REGEX_IS_USER_TO_SERVER.test(token);\n const tokenType = isApp ? \"app\" : isInstallation ? \"installation\" : isUserToServer ? \"user-to-server\" : \"oauth\";\n return {\n type: \"token\",\n token,\n tokenType\n };\n}\n\n// pkg/dist-src/with-authorization-prefix.js\nfunction withAuthorizationPrefix(token) {\n if (token.split(/\\./).length === 3) {\n return `bearer ${token}`;\n }\n return `token ${token}`;\n}\n\n// pkg/dist-src/hook.js\nasync function hook(token, request, route, parameters) {\n const endpoint = request.endpoint.merge(\n route,\n parameters\n );\n endpoint.headers.authorization = withAuthorizationPrefix(token);\n return request(endpoint);\n}\n\n// pkg/dist-src/index.js\nvar createTokenAuth = function createTokenAuth2(token) {\n if (!token) {\n throw new Error(\"[@octokit/auth-token] No token passed to createTokenAuth\");\n }\n if (typeof token !== \"string\") {\n throw new Error(\n \"[@octokit/auth-token] Token passed to createTokenAuth is not a string\"\n );\n }\n token = token.replace(/^(token|bearer) +/i, \"\");\n return Object.assign(auth.bind(null, token), {\n hook: hook.bind(null, token)\n });\n};\n// Annotate the CommonJS export names for ESM import in node:\n0 && (module.exports = {\n createTokenAuth\n});\n","\"use strict\";\nvar __defProp = Object.defineProperty;\nvar __getOwnPropDesc = Object.getOwnPropertyDescriptor;\nvar __getOwnPropNames = Object.getOwnPropertyNames;\nvar __hasOwnProp = Object.prototype.hasOwnProperty;\nvar __export = (target, all) => {\n for (var name in all)\n __defProp(target, name, { get: all[name], enumerable: true });\n};\nvar __copyProps = (to, from, except, desc) => {\n if (from && typeof from === \"object\" || typeof from === \"function\") {\n for (let key of __getOwnPropNames(from))\n if (!__hasOwnProp.call(to, key) && key !== except)\n __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });\n }\n return to;\n};\nvar __toCommonJS = (mod) => __copyProps(__defProp({}, \"__esModule\", { value: true }), mod);\n\n// pkg/dist-src/index.js\nvar dist_src_exports = {};\n__export(dist_src_exports, {\n Octokit: () => Octokit\n});\nmodule.exports = __toCommonJS(dist_src_exports);\nvar import_universal_user_agent = require(\"universal-user-agent\");\nvar import_before_after_hook = require(\"before-after-hook\");\nvar import_request = require(\"@octokit/request\");\nvar import_graphql = require(\"@octokit/graphql\");\nvar import_auth_token = require(\"@octokit/auth-token\");\n\n// pkg/dist-src/version.js\nvar VERSION = \"5.0.0\";\n\n// pkg/dist-src/index.js\nvar Octokit = class {\n static {\n this.VERSION = VERSION;\n }\n static defaults(defaults) {\n const OctokitWithDefaults = class extends this {\n constructor(...args) {\n const options = args[0] || {};\n if (typeof defaults === \"function\") {\n super(defaults(options));\n return;\n }\n super(\n Object.assign(\n {},\n defaults,\n options,\n options.userAgent && defaults.userAgent ? {\n userAgent: `${options.userAgent} ${defaults.userAgent}`\n } : null\n )\n );\n }\n };\n return OctokitWithDefaults;\n }\n static {\n this.plugins = [];\n }\n /**\n * Attach a plugin (or many) to your Octokit instance.\n *\n * @example\n * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...)\n */\n static plugin(...newPlugins) {\n const currentPlugins = this.plugins;\n const NewOctokit = class extends this {\n static {\n this.plugins = currentPlugins.concat(\n newPlugins.filter((plugin) => !currentPlugins.includes(plugin))\n );\n }\n };\n return NewOctokit;\n }\n constructor(options = {}) {\n const hook = new import_before_after_hook.Collection();\n const requestDefaults = {\n baseUrl: import_request.request.endpoint.DEFAULTS.baseUrl,\n headers: {},\n request: Object.assign({}, options.request, {\n // @ts-ignore internal usage only, no need to type\n hook: hook.bind(null, \"request\")\n }),\n mediaType: {\n previews: [],\n format: \"\"\n }\n };\n requestDefaults.headers[\"user-agent\"] = [\n options.userAgent,\n `octokit-core.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}`\n ].filter(Boolean).join(\" \");\n if (options.baseUrl) {\n requestDefaults.baseUrl = options.baseUrl;\n }\n if (options.previews) {\n requestDefaults.mediaType.previews = options.previews;\n }\n if (options.timeZone) {\n requestDefaults.headers[\"time-zone\"] = options.timeZone;\n }\n this.request = import_request.request.defaults(requestDefaults);\n this.graphql = (0, import_graphql.withCustomRequest)(this.request).defaults(requestDefaults);\n this.log = Object.assign(\n {\n debug: () => {\n },\n info: () => {\n },\n warn: console.warn.bind(console),\n error: console.error.bind(console)\n },\n options.log\n );\n this.hook = hook;\n if (!options.authStrategy) {\n if (!options.auth) {\n this.auth = async () => ({\n type: \"unauthenticated\"\n });\n } else {\n const auth = (0, import_auth_token.createTokenAuth)(options.auth);\n hook.wrap(\"request\", auth.hook);\n this.auth = auth;\n }\n } else {\n const { authStrategy, ...otherOptions } = options;\n const auth = authStrategy(\n Object.assign(\n {\n request: this.request,\n log: this.log,\n // we pass the current octokit instance as well as its constructor options\n // to allow for authentication strategies that return a new octokit instance\n // that shares the same internal state as the current one. The original\n // requirement for this was the \"event-octokit\" authentication strategy\n // of https://github.com/probot/octokit-auth-probot.\n octokit: this,\n octokitOptions: otherOptions\n },\n options.auth\n )\n );\n hook.wrap(\"request\", auth.hook);\n this.auth = auth;\n }\n const classConstructor = this.constructor;\n classConstructor.plugins.forEach((plugin) => {\n Object.assign(this, plugin(this, options));\n });\n }\n};\n// Annotate the CommonJS export names for ESM import in node:\n0 && (module.exports = {\n Octokit\n});\n","\"use strict\";\nvar __defProp = Object.defineProperty;\nvar __getOwnPropDesc = Object.getOwnPropertyDescriptor;\nvar __getOwnPropNames = Object.getOwnPropertyNames;\nvar __hasOwnProp = Object.prototype.hasOwnProperty;\nvar __export = (target, all) => {\n for (var name in all)\n __defProp(target, name, { get: all[name], enumerable: true });\n};\nvar __copyProps = (to, from, except, desc) => {\n if (from && typeof from === \"object\" || typeof from === \"function\") {\n for (let key of __getOwnPropNames(from))\n if (!__hasOwnProp.call(to, key) && key !== except)\n __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });\n }\n return to;\n};\nvar __toCommonJS = (mod) => __copyProps(__defProp({}, \"__esModule\", { value: true }), mod);\n\n// pkg/dist-src/index.js\nvar dist_src_exports = {};\n__export(dist_src_exports, {\n endpoint: () => endpoint\n});\nmodule.exports = __toCommonJS(dist_src_exports);\n\n// pkg/dist-src/defaults.js\nvar import_universal_user_agent = require(\"universal-user-agent\");\n\n// pkg/dist-src/version.js\nvar VERSION = \"9.0.0\";\n\n// pkg/dist-src/defaults.js\nvar userAgent = `octokit-endpoint.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}`;\nvar DEFAULTS = {\n method: \"GET\",\n baseUrl: \"https://api.github.com\",\n headers: {\n accept: \"application/vnd.github.v3+json\",\n \"user-agent\": userAgent\n },\n mediaType: {\n format: \"\"\n }\n};\n\n// pkg/dist-src/util/lowercase-keys.js\nfunction lowercaseKeys(object) {\n if (!object) {\n return {};\n }\n return Object.keys(object).reduce((newObj, key) => {\n newObj[key.toLowerCase()] = object[key];\n return newObj;\n }, {});\n}\n\n// pkg/dist-src/util/merge-deep.js\nvar import_is_plain_object = require(\"is-plain-object\");\nfunction mergeDeep(defaults, options) {\n const result = Object.assign({}, defaults);\n Object.keys(options).forEach((key) => {\n if ((0, import_is_plain_object.isPlainObject)(options[key])) {\n if (!(key in defaults))\n Object.assign(result, { [key]: options[key] });\n else\n result[key] = mergeDeep(defaults[key], options[key]);\n } else {\n Object.assign(result, { [key]: options[key] });\n }\n });\n return result;\n}\n\n// pkg/dist-src/util/remove-undefined-properties.js\nfunction removeUndefinedProperties(obj) {\n for (const key in obj) {\n if (obj[key] === void 0) {\n delete obj[key];\n }\n }\n return obj;\n}\n\n// pkg/dist-src/merge.js\nfunction merge(defaults, route, options) {\n if (typeof route === \"string\") {\n let [method, url] = route.split(\" \");\n options = Object.assign(url ? { method, url } : { url: method }, options);\n } else {\n options = Object.assign({}, route);\n }\n options.headers = lowercaseKeys(options.headers);\n removeUndefinedProperties(options);\n removeUndefinedProperties(options.headers);\n const mergedOptions = mergeDeep(defaults || {}, options);\n if (options.url === \"/graphql\") {\n if (defaults && defaults.mediaType.previews?.length) {\n mergedOptions.mediaType.previews = defaults.mediaType.previews.filter(\n (preview) => !mergedOptions.mediaType.previews.includes(preview)\n ).concat(mergedOptions.mediaType.previews);\n }\n mergedOptions.mediaType.previews = (mergedOptions.mediaType.previews || []).map((preview) => preview.replace(/-preview/, \"\"));\n }\n return mergedOptions;\n}\n\n// pkg/dist-src/util/add-query-parameters.js\nfunction addQueryParameters(url, parameters) {\n const separator = /\\?/.test(url) ? \"&\" : \"?\";\n const names = Object.keys(parameters);\n if (names.length === 0) {\n return url;\n }\n return url + separator + names.map((name) => {\n if (name === \"q\") {\n return \"q=\" + parameters.q.split(\"+\").map(encodeURIComponent).join(\"+\");\n }\n return `${name}=${encodeURIComponent(parameters[name])}`;\n }).join(\"&\");\n}\n\n// pkg/dist-src/util/extract-url-variable-names.js\nvar urlVariableRegex = /\\{[^}]+\\}/g;\nfunction removeNonChars(variableName) {\n return variableName.replace(/^\\W+|\\W+$/g, \"\").split(/,/);\n}\nfunction extractUrlVariableNames(url) {\n const matches = url.match(urlVariableRegex);\n if (!matches) {\n return [];\n }\n return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []);\n}\n\n// pkg/dist-src/util/omit.js\nfunction omit(object, keysToOmit) {\n return Object.keys(object).filter((option) => !keysToOmit.includes(option)).reduce((obj, key) => {\n obj[key] = object[key];\n return obj;\n }, {});\n}\n\n// pkg/dist-src/util/url-template.js\nfunction encodeReserved(str) {\n return str.split(/(%[0-9A-Fa-f]{2})/g).map(function(part) {\n if (!/%[0-9A-Fa-f]/.test(part)) {\n part = encodeURI(part).replace(/%5B/g, \"[\").replace(/%5D/g, \"]\");\n }\n return part;\n }).join(\"\");\n}\nfunction encodeUnreserved(str) {\n return encodeURIComponent(str).replace(/[!'()*]/g, function(c) {\n return \"%\" + c.charCodeAt(0).toString(16).toUpperCase();\n });\n}\nfunction encodeValue(operator, value, key) {\n value = operator === \"+\" || operator === \"#\" ? encodeReserved(value) : encodeUnreserved(value);\n if (key) {\n return encodeUnreserved(key) + \"=\" + value;\n } else {\n return value;\n }\n}\nfunction isDefined(value) {\n return value !== void 0 && value !== null;\n}\nfunction isKeyOperator(operator) {\n return operator === \";\" || operator === \"&\" || operator === \"?\";\n}\nfunction getValues(context, operator, key, modifier) {\n var value = context[key], result = [];\n if (isDefined(value) && value !== \"\") {\n if (typeof value === \"string\" || typeof value === \"number\" || typeof value === \"boolean\") {\n value = value.toString();\n if (modifier && modifier !== \"*\") {\n value = value.substring(0, parseInt(modifier, 10));\n }\n result.push(\n encodeValue(operator, value, isKeyOperator(operator) ? key : \"\")\n );\n } else {\n if (modifier === \"*\") {\n if (Array.isArray(value)) {\n value.filter(isDefined).forEach(function(value2) {\n result.push(\n encodeValue(operator, value2, isKeyOperator(operator) ? key : \"\")\n );\n });\n } else {\n Object.keys(value).forEach(function(k) {\n if (isDefined(value[k])) {\n result.push(encodeValue(operator, value[k], k));\n }\n });\n }\n } else {\n const tmp = [];\n if (Array.isArray(value)) {\n value.filter(isDefined).forEach(function(value2) {\n tmp.push(encodeValue(operator, value2));\n });\n } else {\n Object.keys(value).forEach(function(k) {\n if (isDefined(value[k])) {\n tmp.push(encodeUnreserved(k));\n tmp.push(encodeValue(operator, value[k].toString()));\n }\n });\n }\n if (isKeyOperator(operator)) {\n result.push(encodeUnreserved(key) + \"=\" + tmp.join(\",\"));\n } else if (tmp.length !== 0) {\n result.push(tmp.join(\",\"));\n }\n }\n }\n } else {\n if (operator === \";\") {\n if (isDefined(value)) {\n result.push(encodeUnreserved(key));\n }\n } else if (value === \"\" && (operator === \"&\" || operator === \"?\")) {\n result.push(encodeUnreserved(key) + \"=\");\n } else if (value === \"\") {\n result.push(\"\");\n }\n }\n return result;\n}\nfunction parseUrl(template) {\n return {\n expand: expand.bind(null, template)\n };\n}\nfunction expand(template, context) {\n var operators = [\"+\", \"#\", \".\", \"/\", \";\", \"?\", \"&\"];\n return template.replace(\n /\\{([^\\{\\}]+)\\}|([^\\{\\}]+)/g,\n function(_, expression, literal) {\n if (expression) {\n let operator = \"\";\n const values = [];\n if (operators.indexOf(expression.charAt(0)) !== -1) {\n operator = expression.charAt(0);\n expression = expression.substr(1);\n }\n expression.split(/,/g).forEach(function(variable) {\n var tmp = /([^:\\*]*)(?::(\\d+)|(\\*))?/.exec(variable);\n values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3]));\n });\n if (operator && operator !== \"+\") {\n var separator = \",\";\n if (operator === \"?\") {\n separator = \"&\";\n } else if (operator !== \"#\") {\n separator = operator;\n }\n return (values.length !== 0 ? operator : \"\") + values.join(separator);\n } else {\n return values.join(\",\");\n }\n } else {\n return encodeReserved(literal);\n }\n }\n );\n}\n\n// pkg/dist-src/parse.js\nfunction parse(options) {\n let method = options.method.toUpperCase();\n let url = (options.url || \"/\").replace(/:([a-z]\\w+)/g, \"{$1}\");\n let headers = Object.assign({}, options.headers);\n let body;\n let parameters = omit(options, [\n \"method\",\n \"baseUrl\",\n \"url\",\n \"headers\",\n \"request\",\n \"mediaType\"\n ]);\n const urlVariableNames = extractUrlVariableNames(url);\n url = parseUrl(url).expand(parameters);\n if (!/^http/.test(url)) {\n url = options.baseUrl + url;\n }\n const omittedParameters = Object.keys(options).filter((option) => urlVariableNames.includes(option)).concat(\"baseUrl\");\n const remainingParameters = omit(parameters, omittedParameters);\n const isBinaryRequest = /application\\/octet-stream/i.test(headers.accept);\n if (!isBinaryRequest) {\n if (options.mediaType.format) {\n headers.accept = headers.accept.split(/,/).map(\n (format) => format.replace(\n /application\\/vnd(\\.\\w+)(\\.v3)?(\\.\\w+)?(\\+json)?$/,\n `application/vnd$1$2.${options.mediaType.format}`\n )\n ).join(\",\");\n }\n if (url.endsWith(\"/graphql\")) {\n if (options.mediaType.previews?.length) {\n const previewsFromAcceptHeader = headers.accept.match(/[\\w-]+(?=-preview)/g) || [];\n headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map((preview) => {\n const format = options.mediaType.format ? `.${options.mediaType.format}` : \"+json\";\n return `application/vnd.github.${preview}-preview${format}`;\n }).join(\",\");\n }\n }\n }\n if ([\"GET\", \"HEAD\"].includes(method)) {\n url = addQueryParameters(url, remainingParameters);\n } else {\n if (\"data\" in remainingParameters) {\n body = remainingParameters.data;\n } else {\n if (Object.keys(remainingParameters).length) {\n body = remainingParameters;\n }\n }\n }\n if (!headers[\"content-type\"] && typeof body !== \"undefined\") {\n headers[\"content-type\"] = \"application/json; charset=utf-8\";\n }\n if ([\"PATCH\", \"PUT\"].includes(method) && typeof body === \"undefined\") {\n body = \"\";\n }\n return Object.assign(\n { method, url, headers },\n typeof body !== \"undefined\" ? { body } : null,\n options.request ? { request: options.request } : null\n );\n}\n\n// pkg/dist-src/endpoint-with-defaults.js\nfunction endpointWithDefaults(defaults, route, options) {\n return parse(merge(defaults, route, options));\n}\n\n// pkg/dist-src/with-defaults.js\nfunction withDefaults(oldDefaults, newDefaults) {\n const DEFAULTS2 = merge(oldDefaults, newDefaults);\n const endpoint2 = endpointWithDefaults.bind(null, DEFAULTS2);\n return Object.assign(endpoint2, {\n DEFAULTS: DEFAULTS2,\n defaults: withDefaults.bind(null, DEFAULTS2),\n merge: merge.bind(null, DEFAULTS2),\n parse\n });\n}\n\n// pkg/dist-src/index.js\nvar endpoint = withDefaults(null, DEFAULTS);\n// Annotate the CommonJS export names for ESM import in node:\n0 && (module.exports = {\n endpoint\n});\n","\"use strict\";\nvar __defProp = Object.defineProperty;\nvar __getOwnPropDesc = Object.getOwnPropertyDescriptor;\nvar __getOwnPropNames = Object.getOwnPropertyNames;\nvar __hasOwnProp = Object.prototype.hasOwnProperty;\nvar __export = (target, all) => {\n for (var name in all)\n __defProp(target, name, { get: all[name], enumerable: true });\n};\nvar __copyProps = (to, from, except, desc) => {\n if (from && typeof from === \"object\" || typeof from === \"function\") {\n for (let key of __getOwnPropNames(from))\n if (!__hasOwnProp.call(to, key) && key !== except)\n __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });\n }\n return to;\n};\nvar __toCommonJS = (mod) => __copyProps(__defProp({}, \"__esModule\", { value: true }), mod);\n\n// pkg/dist-src/index.js\nvar dist_src_exports = {};\n__export(dist_src_exports, {\n GraphqlResponseError: () => GraphqlResponseError,\n graphql: () => graphql2,\n withCustomRequest: () => withCustomRequest\n});\nmodule.exports = __toCommonJS(dist_src_exports);\nvar import_request3 = require(\"@octokit/request\");\nvar import_universal_user_agent = require(\"universal-user-agent\");\n\n// pkg/dist-src/version.js\nvar VERSION = \"7.0.1\";\n\n// pkg/dist-src/with-defaults.js\nvar import_request2 = require(\"@octokit/request\");\n\n// pkg/dist-src/graphql.js\nvar import_request = require(\"@octokit/request\");\n\n// pkg/dist-src/error.js\nfunction _buildMessageForResponseErrors(data) {\n return `Request failed due to following response errors:\n` + data.errors.map((e) => ` - ${e.message}`).join(\"\\n\");\n}\nvar GraphqlResponseError = class extends Error {\n constructor(request2, headers, response) {\n super(_buildMessageForResponseErrors(response));\n this.request = request2;\n this.headers = headers;\n this.response = response;\n this.name = \"GraphqlResponseError\";\n this.errors = response.errors;\n this.data = response.data;\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n }\n};\n\n// pkg/dist-src/graphql.js\nvar NON_VARIABLE_OPTIONS = [\n \"method\",\n \"baseUrl\",\n \"url\",\n \"headers\",\n \"request\",\n \"query\",\n \"mediaType\"\n];\nvar FORBIDDEN_VARIABLE_OPTIONS = [\"query\", \"method\", \"url\"];\nvar GHES_V3_SUFFIX_REGEX = /\\/api\\/v3\\/?$/;\nfunction graphql(request2, query, options) {\n if (options) {\n if (typeof query === \"string\" && \"query\" in options) {\n return Promise.reject(\n new Error(`[@octokit/graphql] \"query\" cannot be used as variable name`)\n );\n }\n for (const key in options) {\n if (!FORBIDDEN_VARIABLE_OPTIONS.includes(key))\n continue;\n return Promise.reject(\n new Error(\n `[@octokit/graphql] \"${key}\" cannot be used as variable name`\n )\n );\n }\n }\n const parsedOptions = typeof query === \"string\" ? Object.assign({ query }, options) : query;\n const requestOptions = Object.keys(\n parsedOptions\n ).reduce((result, key) => {\n if (NON_VARIABLE_OPTIONS.includes(key)) {\n result[key] = parsedOptions[key];\n return result;\n }\n if (!result.variables) {\n result.variables = {};\n }\n result.variables[key] = parsedOptions[key];\n return result;\n }, {});\n const baseUrl = parsedOptions.baseUrl || request2.endpoint.DEFAULTS.baseUrl;\n if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) {\n requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, \"/api/graphql\");\n }\n return request2(requestOptions).then((response) => {\n if (response.data.errors) {\n const headers = {};\n for (const key of Object.keys(response.headers)) {\n headers[key] = response.headers[key];\n }\n throw new GraphqlResponseError(\n requestOptions,\n headers,\n response.data\n );\n }\n return response.data.data;\n });\n}\n\n// pkg/dist-src/with-defaults.js\nfunction withDefaults(request2, newDefaults) {\n const newRequest = request2.defaults(newDefaults);\n const newApi = (query, options) => {\n return graphql(newRequest, query, options);\n };\n return Object.assign(newApi, {\n defaults: withDefaults.bind(null, newRequest),\n endpoint: newRequest.endpoint\n });\n}\n\n// pkg/dist-src/index.js\nvar graphql2 = withDefaults(import_request3.request, {\n headers: {\n \"user-agent\": `octokit-graphql.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}`\n },\n method: \"POST\",\n url: \"/graphql\"\n});\nfunction withCustomRequest(customRequest) {\n return withDefaults(customRequest, {\n method: \"POST\",\n url: \"/graphql\"\n });\n}\n// Annotate the CommonJS export names for ESM import in node:\n0 && (module.exports = {\n GraphqlResponseError,\n graphql,\n withCustomRequest\n});\n","\"use strict\";\nvar __defProp = Object.defineProperty;\nvar __getOwnPropDesc = Object.getOwnPropertyDescriptor;\nvar __getOwnPropNames = Object.getOwnPropertyNames;\nvar __hasOwnProp = Object.prototype.hasOwnProperty;\nvar __export = (target, all) => {\n for (var name in all)\n __defProp(target, name, { get: all[name], enumerable: true });\n};\nvar __copyProps = (to, from, except, desc) => {\n if (from && typeof from === \"object\" || typeof from === \"function\") {\n for (let key of __getOwnPropNames(from))\n if (!__hasOwnProp.call(to, key) && key !== except)\n __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });\n }\n return to;\n};\nvar __toCommonJS = (mod) => __copyProps(__defProp({}, \"__esModule\", { value: true }), mod);\n\n// pkg/dist-src/index.js\nvar dist_src_exports = {};\n__export(dist_src_exports, {\n composePaginateRest: () => composePaginateRest,\n isPaginatingEndpoint: () => isPaginatingEndpoint,\n paginateRest: () => paginateRest,\n paginatingEndpoints: () => paginatingEndpoints\n});\nmodule.exports = __toCommonJS(dist_src_exports);\n\n// pkg/dist-src/version.js\nvar VERSION = \"8.0.0\";\n\n// pkg/dist-src/normalize-paginated-list-response.js\nfunction normalizePaginatedListResponse(response) {\n if (!response.data) {\n return {\n ...response,\n data: []\n };\n }\n const responseNeedsNormalization = \"total_count\" in response.data && !(\"url\" in response.data);\n if (!responseNeedsNormalization)\n return response;\n const incompleteResults = response.data.incomplete_results;\n const repositorySelection = response.data.repository_selection;\n const totalCount = response.data.total_count;\n delete response.data.incomplete_results;\n delete response.data.repository_selection;\n delete response.data.total_count;\n const namespaceKey = Object.keys(response.data)[0];\n const data = response.data[namespaceKey];\n response.data = data;\n if (typeof incompleteResults !== \"undefined\") {\n response.data.incomplete_results = incompleteResults;\n }\n if (typeof repositorySelection !== \"undefined\") {\n response.data.repository_selection = repositorySelection;\n }\n response.data.total_count = totalCount;\n return response;\n}\n\n// pkg/dist-src/iterator.js\nfunction iterator(octokit, route, parameters) {\n const options = typeof route === \"function\" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters);\n const requestMethod = typeof route === \"function\" ? route : octokit.request;\n const method = options.method;\n const headers = options.headers;\n let url = options.url;\n return {\n [Symbol.asyncIterator]: () => ({\n async next() {\n if (!url)\n return { done: true };\n try {\n const response = await requestMethod({ method, url, headers });\n const normalizedResponse = normalizePaginatedListResponse(response);\n url = ((normalizedResponse.headers.link || \"\").match(\n /<([^>]+)>;\\s*rel=\"next\"/\n ) || [])[1];\n return { value: normalizedResponse };\n } catch (error) {\n if (error.status !== 409)\n throw error;\n url = \"\";\n return {\n value: {\n status: 200,\n headers: {},\n data: []\n }\n };\n }\n }\n })\n };\n}\n\n// pkg/dist-src/paginate.js\nfunction paginate(octokit, route, parameters, mapFn) {\n if (typeof parameters === \"function\") {\n mapFn = parameters;\n parameters = void 0;\n }\n return gather(\n octokit,\n [],\n iterator(octokit, route, parameters)[Symbol.asyncIterator](),\n mapFn\n );\n}\nfunction gather(octokit, results, iterator2, mapFn) {\n return iterator2.next().then((result) => {\n if (result.done) {\n return results;\n }\n let earlyExit = false;\n function done() {\n earlyExit = true;\n }\n results = results.concat(\n mapFn ? mapFn(result.value, done) : result.value.data\n );\n if (earlyExit) {\n return results;\n }\n return gather(octokit, results, iterator2, mapFn);\n });\n}\n\n// pkg/dist-src/compose-paginate.js\nvar composePaginateRest = Object.assign(paginate, {\n iterator\n});\n\n// pkg/dist-src/generated/paginating-endpoints.js\nvar paginatingEndpoints = [\n \"GET /app/hook/deliveries\",\n \"GET /app/installation-requests\",\n \"GET /app/installations\",\n \"GET /enterprises/{enterprise}/dependabot/alerts\",\n \"GET /enterprises/{enterprise}/secret-scanning/alerts\",\n \"GET /events\",\n \"GET /gists\",\n \"GET /gists/public\",\n \"GET /gists/starred\",\n \"GET /gists/{gist_id}/comments\",\n \"GET /gists/{gist_id}/commits\",\n \"GET /gists/{gist_id}/forks\",\n \"GET /installation/repositories\",\n \"GET /issues\",\n \"GET /licenses\",\n \"GET /marketplace_listing/plans\",\n \"GET /marketplace_listing/plans/{plan_id}/accounts\",\n \"GET /marketplace_listing/stubbed/plans\",\n \"GET /marketplace_listing/stubbed/plans/{plan_id}/accounts\",\n \"GET /networks/{owner}/{repo}/events\",\n \"GET /notifications\",\n \"GET /organizations\",\n \"GET /organizations/{org}/personal-access-token-requests\",\n \"GET /organizations/{org}/personal-access-token-requests/{pat_request_id}/repositories\",\n \"GET /organizations/{org}/personal-access-tokens\",\n \"GET /organizations/{org}/personal-access-tokens/{pat_id}/repositories\",\n \"GET /orgs/{org}/actions/cache/usage-by-repository\",\n \"GET /orgs/{org}/actions/permissions/repositories\",\n \"GET /orgs/{org}/actions/required_workflows\",\n \"GET /orgs/{org}/actions/runners\",\n \"GET /orgs/{org}/actions/secrets\",\n \"GET /orgs/{org}/actions/secrets/{secret_name}/repositories\",\n \"GET /orgs/{org}/actions/variables\",\n \"GET /orgs/{org}/actions/variables/{name}/repositories\",\n \"GET /orgs/{org}/blocks\",\n \"GET /orgs/{org}/code-scanning/alerts\",\n \"GET /orgs/{org}/codespaces\",\n \"GET /orgs/{org}/codespaces/secrets\",\n \"GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories\",\n \"GET /orgs/{org}/dependabot/alerts\",\n \"GET /orgs/{org}/dependabot/secrets\",\n \"GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories\",\n \"GET /orgs/{org}/events\",\n \"GET /orgs/{org}/failed_invitations\",\n \"GET /orgs/{org}/hooks\",\n \"GET /orgs/{org}/hooks/{hook_id}/deliveries\",\n \"GET /orgs/{org}/installations\",\n \"GET /orgs/{org}/invitations\",\n \"GET /orgs/{org}/invitations/{invitation_id}/teams\",\n \"GET /orgs/{org}/issues\",\n \"GET /orgs/{org}/members\",\n \"GET /orgs/{org}/members/{username}/codespaces\",\n \"GET /orgs/{org}/migrations\",\n \"GET /orgs/{org}/migrations/{migration_id}/repositories\",\n \"GET /orgs/{org}/outside_collaborators\",\n \"GET /orgs/{org}/packages\",\n \"GET /orgs/{org}/packages/{package_type}/{package_name}/versions\",\n \"GET /orgs/{org}/projects\",\n \"GET /orgs/{org}/public_members\",\n \"GET /orgs/{org}/repos\",\n \"GET /orgs/{org}/rulesets\",\n \"GET /orgs/{org}/secret-scanning/alerts\",\n \"GET /orgs/{org}/teams\",\n \"GET /orgs/{org}/teams/{team_slug}/discussions\",\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\",\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\",\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\",\n \"GET /orgs/{org}/teams/{team_slug}/invitations\",\n \"GET /orgs/{org}/teams/{team_slug}/members\",\n \"GET /orgs/{org}/teams/{team_slug}/projects\",\n \"GET /orgs/{org}/teams/{team_slug}/repos\",\n \"GET /orgs/{org}/teams/{team_slug}/teams\",\n \"GET /projects/columns/{column_id}/cards\",\n \"GET /projects/{project_id}/collaborators\",\n \"GET /projects/{project_id}/columns\",\n \"GET /repos/{org}/{repo}/actions/required_workflows\",\n \"GET /repos/{owner}/{repo}/actions/artifacts\",\n \"GET /repos/{owner}/{repo}/actions/caches\",\n \"GET /repos/{owner}/{repo}/actions/organization-secrets\",\n \"GET /repos/{owner}/{repo}/actions/organization-variables\",\n \"GET /repos/{owner}/{repo}/actions/required_workflows/{required_workflow_id_for_repo}/runs\",\n \"GET /repos/{owner}/{repo}/actions/runners\",\n \"GET /repos/{owner}/{repo}/actions/runs\",\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts\",\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs\",\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs\",\n \"GET /repos/{owner}/{repo}/actions/secrets\",\n \"GET /repos/{owner}/{repo}/actions/variables\",\n \"GET /repos/{owner}/{repo}/actions/workflows\",\n \"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs\",\n \"GET /repos/{owner}/{repo}/assignees\",\n \"GET /repos/{owner}/{repo}/branches\",\n \"GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations\",\n \"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs\",\n \"GET /repos/{owner}/{repo}/code-scanning/alerts\",\n \"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances\",\n \"GET /repos/{owner}/{repo}/code-scanning/analyses\",\n \"GET /repos/{owner}/{repo}/codespaces\",\n \"GET /repos/{owner}/{repo}/codespaces/devcontainers\",\n \"GET /repos/{owner}/{repo}/codespaces/secrets\",\n \"GET /repos/{owner}/{repo}/collaborators\",\n \"GET /repos/{owner}/{repo}/comments\",\n \"GET /repos/{owner}/{repo}/comments/{comment_id}/reactions\",\n \"GET /repos/{owner}/{repo}/commits\",\n \"GET /repos/{owner}/{repo}/commits/{commit_sha}/comments\",\n \"GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls\",\n \"GET /repos/{owner}/{repo}/commits/{ref}/check-runs\",\n \"GET /repos/{owner}/{repo}/commits/{ref}/check-suites\",\n \"GET /repos/{owner}/{repo}/commits/{ref}/status\",\n \"GET /repos/{owner}/{repo}/commits/{ref}/statuses\",\n \"GET /repos/{owner}/{repo}/contributors\",\n \"GET /repos/{owner}/{repo}/dependabot/alerts\",\n \"GET /repos/{owner}/{repo}/dependabot/secrets\",\n \"GET /repos/{owner}/{repo}/deployments\",\n \"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\",\n \"GET /repos/{owner}/{repo}/environments\",\n \"GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies\",\n \"GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps\",\n \"GET /repos/{owner}/{repo}/events\",\n \"GET /repos/{owner}/{repo}/forks\",\n \"GET /repos/{owner}/{repo}/hooks\",\n \"GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries\",\n \"GET /repos/{owner}/{repo}/invitations\",\n \"GET /repos/{owner}/{repo}/issues\",\n \"GET /repos/{owner}/{repo}/issues/comments\",\n \"GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\",\n \"GET /repos/{owner}/{repo}/issues/events\",\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/comments\",\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/events\",\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/labels\",\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/reactions\",\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/timeline\",\n \"GET /repos/{owner}/{repo}/keys\",\n \"GET /repos/{owner}/{repo}/labels\",\n \"GET /repos/{owner}/{repo}/milestones\",\n \"GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels\",\n \"GET /repos/{owner}/{repo}/notifications\",\n \"GET /repos/{owner}/{repo}/pages/builds\",\n \"GET /repos/{owner}/{repo}/projects\",\n \"GET /repos/{owner}/{repo}/pulls\",\n \"GET /repos/{owner}/{repo}/pulls/comments\",\n \"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\",\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/comments\",\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/commits\",\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/files\",\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews\",\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments\",\n \"GET /repos/{owner}/{repo}/releases\",\n \"GET /repos/{owner}/{repo}/releases/{release_id}/assets\",\n \"GET /repos/{owner}/{repo}/releases/{release_id}/reactions\",\n \"GET /repos/{owner}/{repo}/rules/branches/{branch}\",\n \"GET /repos/{owner}/{repo}/rulesets\",\n \"GET /repos/{owner}/{repo}/secret-scanning/alerts\",\n \"GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations\",\n \"GET /repos/{owner}/{repo}/security-advisories\",\n \"GET /repos/{owner}/{repo}/stargazers\",\n \"GET /repos/{owner}/{repo}/subscribers\",\n \"GET /repos/{owner}/{repo}/tags\",\n \"GET /repos/{owner}/{repo}/teams\",\n \"GET /repos/{owner}/{repo}/topics\",\n \"GET /repositories\",\n \"GET /repositories/{repository_id}/environments/{environment_name}/secrets\",\n \"GET /repositories/{repository_id}/environments/{environment_name}/variables\",\n \"GET /search/code\",\n \"GET /search/commits\",\n \"GET /search/issues\",\n \"GET /search/labels\",\n \"GET /search/repositories\",\n \"GET /search/topics\",\n \"GET /search/users\",\n \"GET /teams/{team_id}/discussions\",\n \"GET /teams/{team_id}/discussions/{discussion_number}/comments\",\n \"GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions\",\n \"GET /teams/{team_id}/discussions/{discussion_number}/reactions\",\n \"GET /teams/{team_id}/invitations\",\n \"GET /teams/{team_id}/members\",\n \"GET /teams/{team_id}/projects\",\n \"GET /teams/{team_id}/repos\",\n \"GET /teams/{team_id}/teams\",\n \"GET /user/blocks\",\n \"GET /user/codespaces\",\n \"GET /user/codespaces/secrets\",\n \"GET /user/emails\",\n \"GET /user/followers\",\n \"GET /user/following\",\n \"GET /user/gpg_keys\",\n \"GET /user/installations\",\n \"GET /user/installations/{installation_id}/repositories\",\n \"GET /user/issues\",\n \"GET /user/keys\",\n \"GET /user/marketplace_purchases\",\n \"GET /user/marketplace_purchases/stubbed\",\n \"GET /user/memberships/orgs\",\n \"GET /user/migrations\",\n \"GET /user/migrations/{migration_id}/repositories\",\n \"GET /user/orgs\",\n \"GET /user/packages\",\n \"GET /user/packages/{package_type}/{package_name}/versions\",\n \"GET /user/public_emails\",\n \"GET /user/repos\",\n \"GET /user/repository_invitations\",\n \"GET /user/social_accounts\",\n \"GET /user/ssh_signing_keys\",\n \"GET /user/starred\",\n \"GET /user/subscriptions\",\n \"GET /user/teams\",\n \"GET /users\",\n \"GET /users/{username}/events\",\n \"GET /users/{username}/events/orgs/{org}\",\n \"GET /users/{username}/events/public\",\n \"GET /users/{username}/followers\",\n \"GET /users/{username}/following\",\n \"GET /users/{username}/gists\",\n \"GET /users/{username}/gpg_keys\",\n \"GET /users/{username}/keys\",\n \"GET /users/{username}/orgs\",\n \"GET /users/{username}/packages\",\n \"GET /users/{username}/projects\",\n \"GET /users/{username}/received_events\",\n \"GET /users/{username}/received_events/public\",\n \"GET /users/{username}/repos\",\n \"GET /users/{username}/social_accounts\",\n \"GET /users/{username}/ssh_signing_keys\",\n \"GET /users/{username}/starred\",\n \"GET /users/{username}/subscriptions\"\n];\n\n// pkg/dist-src/paginating-endpoints.js\nfunction isPaginatingEndpoint(arg) {\n if (typeof arg === \"string\") {\n return paginatingEndpoints.includes(arg);\n } else {\n return false;\n }\n}\n\n// pkg/dist-src/index.js\nfunction paginateRest(octokit) {\n return {\n paginate: Object.assign(paginate.bind(null, octokit), {\n iterator: iterator.bind(null, octokit)\n })\n };\n}\npaginateRest.VERSION = VERSION;\n// Annotate the CommonJS export names for ESM import in node:\n0 && (module.exports = {\n composePaginateRest,\n isPaginatingEndpoint,\n paginateRest,\n paginatingEndpoints\n});\n","\"use strict\";\nvar __defProp = Object.defineProperty;\nvar __getOwnPropDesc = Object.getOwnPropertyDescriptor;\nvar __getOwnPropNames = Object.getOwnPropertyNames;\nvar __hasOwnProp = Object.prototype.hasOwnProperty;\nvar __export = (target, all) => {\n for (var name in all)\n __defProp(target, name, { get: all[name], enumerable: true });\n};\nvar __copyProps = (to, from, except, desc) => {\n if (from && typeof from === \"object\" || typeof from === \"function\") {\n for (let key of __getOwnPropNames(from))\n if (!__hasOwnProp.call(to, key) && key !== except)\n __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });\n }\n return to;\n};\nvar __toCommonJS = (mod) => __copyProps(__defProp({}, \"__esModule\", { value: true }), mod);\n\n// pkg/dist-src/index.js\nvar dist_src_exports = {};\n__export(dist_src_exports, {\n requestLog: () => requestLog\n});\nmodule.exports = __toCommonJS(dist_src_exports);\n\n// pkg/dist-src/version.js\nvar VERSION = \"4.0.0\";\n\n// pkg/dist-src/index.js\nfunction requestLog(octokit) {\n octokit.hook.wrap(\"request\", (request, options) => {\n octokit.log.debug(\"request\", options);\n const start = Date.now();\n const requestOptions = octokit.request.endpoint.parse(options);\n const path = requestOptions.url.replace(options.baseUrl, \"\");\n return request(options).then((response) => {\n octokit.log.info(\n `${requestOptions.method} ${path} - ${response.status} in ${Date.now() - start}ms`\n );\n return response;\n }).catch((error) => {\n octokit.log.info(\n `${requestOptions.method} ${path} - ${error.status} in ${Date.now() - start}ms`\n );\n throw error;\n });\n });\n}\nrequestLog.VERSION = VERSION;\n// Annotate the CommonJS export names for ESM import in node:\n0 && (module.exports = {\n requestLog\n});\n","\"use strict\";\nvar __defProp = Object.defineProperty;\nvar __getOwnPropDesc = Object.getOwnPropertyDescriptor;\nvar __getOwnPropNames = Object.getOwnPropertyNames;\nvar __hasOwnProp = Object.prototype.hasOwnProperty;\nvar __export = (target, all) => {\n for (var name in all)\n __defProp(target, name, { get: all[name], enumerable: true });\n};\nvar __copyProps = (to, from, except, desc) => {\n if (from && typeof from === \"object\" || typeof from === \"function\") {\n for (let key of __getOwnPropNames(from))\n if (!__hasOwnProp.call(to, key) && key !== except)\n __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });\n }\n return to;\n};\nvar __toCommonJS = (mod) => __copyProps(__defProp({}, \"__esModule\", { value: true }), mod);\n\n// pkg/dist-src/index.js\nvar dist_src_exports = {};\n__export(dist_src_exports, {\n legacyRestEndpointMethods: () => legacyRestEndpointMethods,\n restEndpointMethods: () => restEndpointMethods\n});\nmodule.exports = __toCommonJS(dist_src_exports);\n\n// pkg/dist-src/version.js\nvar VERSION = \"9.0.0\";\n\n// pkg/dist-src/generated/endpoints.js\nvar Endpoints = {\n actions: {\n addCustomLabelsToSelfHostedRunnerForOrg: [\n \"POST /orgs/{org}/actions/runners/{runner_id}/labels\"\n ],\n addCustomLabelsToSelfHostedRunnerForRepo: [\n \"POST /repos/{owner}/{repo}/actions/runners/{runner_id}/labels\"\n ],\n addSelectedRepoToOrgSecret: [\n \"PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}\"\n ],\n addSelectedRepoToOrgVariable: [\n \"PUT /orgs/{org}/actions/variables/{name}/repositories/{repository_id}\"\n ],\n addSelectedRepoToRequiredWorkflow: [\n \"PUT /orgs/{org}/actions/required_workflows/{required_workflow_id}/repositories/{repository_id}\"\n ],\n approveWorkflowRun: [\n \"POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve\"\n ],\n cancelWorkflowRun: [\n \"POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel\"\n ],\n createEnvironmentVariable: [\n \"POST /repositories/{repository_id}/environments/{environment_name}/variables\"\n ],\n createOrUpdateEnvironmentSecret: [\n \"PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}\"\n ],\n createOrUpdateOrgSecret: [\"PUT /orgs/{org}/actions/secrets/{secret_name}\"],\n createOrUpdateRepoSecret: [\n \"PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}\"\n ],\n createOrgVariable: [\"POST /orgs/{org}/actions/variables\"],\n createRegistrationTokenForOrg: [\n \"POST /orgs/{org}/actions/runners/registration-token\"\n ],\n createRegistrationTokenForRepo: [\n \"POST /repos/{owner}/{repo}/actions/runners/registration-token\"\n ],\n createRemoveTokenForOrg: [\"POST /orgs/{org}/actions/runners/remove-token\"],\n createRemoveTokenForRepo: [\n \"POST /repos/{owner}/{repo}/actions/runners/remove-token\"\n ],\n createRepoVariable: [\"POST /repos/{owner}/{repo}/actions/variables\"],\n createRequiredWorkflow: [\"POST /orgs/{org}/actions/required_workflows\"],\n createWorkflowDispatch: [\n \"POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches\"\n ],\n deleteActionsCacheById: [\n \"DELETE /repos/{owner}/{repo}/actions/caches/{cache_id}\"\n ],\n deleteActionsCacheByKey: [\n \"DELETE /repos/{owner}/{repo}/actions/caches{?key,ref}\"\n ],\n deleteArtifact: [\n \"DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}\"\n ],\n deleteEnvironmentSecret: [\n \"DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}\"\n ],\n deleteEnvironmentVariable: [\n \"DELETE /repositories/{repository_id}/environments/{environment_name}/variables/{name}\"\n ],\n deleteOrgSecret: [\"DELETE /orgs/{org}/actions/secrets/{secret_name}\"],\n deleteOrgVariable: [\"DELETE /orgs/{org}/actions/variables/{name}\"],\n deleteRepoSecret: [\n \"DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}\"\n ],\n deleteRepoVariable: [\n \"DELETE /repos/{owner}/{repo}/actions/variables/{name}\"\n ],\n deleteRequiredWorkflow: [\n \"DELETE /orgs/{org}/actions/required_workflows/{required_workflow_id}\"\n ],\n deleteSelfHostedRunnerFromOrg: [\n \"DELETE /orgs/{org}/actions/runners/{runner_id}\"\n ],\n deleteSelfHostedRunnerFromRepo: [\n \"DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}\"\n ],\n deleteWorkflowRun: [\"DELETE /repos/{owner}/{repo}/actions/runs/{run_id}\"],\n deleteWorkflowRunLogs: [\n \"DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs\"\n ],\n disableSelectedRepositoryGithubActionsOrganization: [\n \"DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}\"\n ],\n disableWorkflow: [\n \"PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable\"\n ],\n downloadArtifact: [\n \"GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}\"\n ],\n downloadJobLogsForWorkflowRun: [\n \"GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs\"\n ],\n downloadWorkflowRunAttemptLogs: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs\"\n ],\n downloadWorkflowRunLogs: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs\"\n ],\n enableSelectedRepositoryGithubActionsOrganization: [\n \"PUT /orgs/{org}/actions/permissions/repositories/{repository_id}\"\n ],\n enableWorkflow: [\n \"PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable\"\n ],\n generateRunnerJitconfigForOrg: [\n \"POST /orgs/{org}/actions/runners/generate-jitconfig\"\n ],\n generateRunnerJitconfigForRepo: [\n \"POST /repos/{owner}/{repo}/actions/runners/generate-jitconfig\"\n ],\n getActionsCacheList: [\"GET /repos/{owner}/{repo}/actions/caches\"],\n getActionsCacheUsage: [\"GET /repos/{owner}/{repo}/actions/cache/usage\"],\n getActionsCacheUsageByRepoForOrg: [\n \"GET /orgs/{org}/actions/cache/usage-by-repository\"\n ],\n getActionsCacheUsageForOrg: [\"GET /orgs/{org}/actions/cache/usage\"],\n getAllowedActionsOrganization: [\n \"GET /orgs/{org}/actions/permissions/selected-actions\"\n ],\n getAllowedActionsRepository: [\n \"GET /repos/{owner}/{repo}/actions/permissions/selected-actions\"\n ],\n getArtifact: [\"GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}\"],\n getEnvironmentPublicKey: [\n \"GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key\"\n ],\n getEnvironmentSecret: [\n \"GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}\"\n ],\n getEnvironmentVariable: [\n \"GET /repositories/{repository_id}/environments/{environment_name}/variables/{name}\"\n ],\n getGithubActionsDefaultWorkflowPermissionsOrganization: [\n \"GET /orgs/{org}/actions/permissions/workflow\"\n ],\n getGithubActionsDefaultWorkflowPermissionsRepository: [\n \"GET /repos/{owner}/{repo}/actions/permissions/workflow\"\n ],\n getGithubActionsPermissionsOrganization: [\n \"GET /orgs/{org}/actions/permissions\"\n ],\n getGithubActionsPermissionsRepository: [\n \"GET /repos/{owner}/{repo}/actions/permissions\"\n ],\n getJobForWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/jobs/{job_id}\"],\n getOrgPublicKey: [\"GET /orgs/{org}/actions/secrets/public-key\"],\n getOrgSecret: [\"GET /orgs/{org}/actions/secrets/{secret_name}\"],\n getOrgVariable: [\"GET /orgs/{org}/actions/variables/{name}\"],\n getPendingDeploymentsForRun: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments\"\n ],\n getRepoPermissions: [\n \"GET /repos/{owner}/{repo}/actions/permissions\",\n {},\n { renamed: [\"actions\", \"getGithubActionsPermissionsRepository\"] }\n ],\n getRepoPublicKey: [\"GET /repos/{owner}/{repo}/actions/secrets/public-key\"],\n getRepoRequiredWorkflow: [\n \"GET /repos/{org}/{repo}/actions/required_workflows/{required_workflow_id_for_repo}\"\n ],\n getRepoRequiredWorkflowUsage: [\n \"GET /repos/{org}/{repo}/actions/required_workflows/{required_workflow_id_for_repo}/timing\"\n ],\n getRepoSecret: [\"GET /repos/{owner}/{repo}/actions/secrets/{secret_name}\"],\n getRepoVariable: [\"GET /repos/{owner}/{repo}/actions/variables/{name}\"],\n getRequiredWorkflow: [\n \"GET /orgs/{org}/actions/required_workflows/{required_workflow_id}\"\n ],\n getReviewsForRun: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals\"\n ],\n getSelfHostedRunnerForOrg: [\"GET /orgs/{org}/actions/runners/{runner_id}\"],\n getSelfHostedRunnerForRepo: [\n \"GET /repos/{owner}/{repo}/actions/runners/{runner_id}\"\n ],\n getWorkflow: [\"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}\"],\n getWorkflowAccessToRepository: [\n \"GET /repos/{owner}/{repo}/actions/permissions/access\"\n ],\n getWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}\"],\n getWorkflowRunAttempt: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}\"\n ],\n getWorkflowRunUsage: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing\"\n ],\n getWorkflowUsage: [\n \"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing\"\n ],\n listArtifactsForRepo: [\"GET /repos/{owner}/{repo}/actions/artifacts\"],\n listEnvironmentSecrets: [\n \"GET /repositories/{repository_id}/environments/{environment_name}/secrets\"\n ],\n listEnvironmentVariables: [\n \"GET /repositories/{repository_id}/environments/{environment_name}/variables\"\n ],\n listJobsForWorkflowRun: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs\"\n ],\n listJobsForWorkflowRunAttempt: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs\"\n ],\n listLabelsForSelfHostedRunnerForOrg: [\n \"GET /orgs/{org}/actions/runners/{runner_id}/labels\"\n ],\n listLabelsForSelfHostedRunnerForRepo: [\n \"GET /repos/{owner}/{repo}/actions/runners/{runner_id}/labels\"\n ],\n listOrgSecrets: [\"GET /orgs/{org}/actions/secrets\"],\n listOrgVariables: [\"GET /orgs/{org}/actions/variables\"],\n listRepoOrganizationSecrets: [\n \"GET /repos/{owner}/{repo}/actions/organization-secrets\"\n ],\n listRepoOrganizationVariables: [\n \"GET /repos/{owner}/{repo}/actions/organization-variables\"\n ],\n listRepoRequiredWorkflows: [\n \"GET /repos/{org}/{repo}/actions/required_workflows\"\n ],\n listRepoSecrets: [\"GET /repos/{owner}/{repo}/actions/secrets\"],\n listRepoVariables: [\"GET /repos/{owner}/{repo}/actions/variables\"],\n listRepoWorkflows: [\"GET /repos/{owner}/{repo}/actions/workflows\"],\n listRequiredWorkflowRuns: [\n \"GET /repos/{owner}/{repo}/actions/required_workflows/{required_workflow_id_for_repo}/runs\"\n ],\n listRequiredWorkflows: [\"GET /orgs/{org}/actions/required_workflows\"],\n listRunnerApplicationsForOrg: [\"GET /orgs/{org}/actions/runners/downloads\"],\n listRunnerApplicationsForRepo: [\n \"GET /repos/{owner}/{repo}/actions/runners/downloads\"\n ],\n listSelectedReposForOrgSecret: [\n \"GET /orgs/{org}/actions/secrets/{secret_name}/repositories\"\n ],\n listSelectedReposForOrgVariable: [\n \"GET /orgs/{org}/actions/variables/{name}/repositories\"\n ],\n listSelectedRepositoriesEnabledGithubActionsOrganization: [\n \"GET /orgs/{org}/actions/permissions/repositories\"\n ],\n listSelectedRepositoriesRequiredWorkflow: [\n \"GET /orgs/{org}/actions/required_workflows/{required_workflow_id}/repositories\"\n ],\n listSelfHostedRunnersForOrg: [\"GET /orgs/{org}/actions/runners\"],\n listSelfHostedRunnersForRepo: [\"GET /repos/{owner}/{repo}/actions/runners\"],\n listWorkflowRunArtifacts: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts\"\n ],\n listWorkflowRuns: [\n \"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs\"\n ],\n listWorkflowRunsForRepo: [\"GET /repos/{owner}/{repo}/actions/runs\"],\n reRunJobForWorkflowRun: [\n \"POST /repos/{owner}/{repo}/actions/jobs/{job_id}/rerun\"\n ],\n reRunWorkflow: [\"POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun\"],\n reRunWorkflowFailedJobs: [\n \"POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs\"\n ],\n removeAllCustomLabelsFromSelfHostedRunnerForOrg: [\n \"DELETE /orgs/{org}/actions/runners/{runner_id}/labels\"\n ],\n removeAllCustomLabelsFromSelfHostedRunnerForRepo: [\n \"DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels\"\n ],\n removeCustomLabelFromSelfHostedRunnerForOrg: [\n \"DELETE /orgs/{org}/actions/runners/{runner_id}/labels/{name}\"\n ],\n removeCustomLabelFromSelfHostedRunnerForRepo: [\n \"DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}\"\n ],\n removeSelectedRepoFromOrgSecret: [\n \"DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}\"\n ],\n removeSelectedRepoFromOrgVariable: [\n \"DELETE /orgs/{org}/actions/variables/{name}/repositories/{repository_id}\"\n ],\n removeSelectedRepoFromRequiredWorkflow: [\n \"DELETE /orgs/{org}/actions/required_workflows/{required_workflow_id}/repositories/{repository_id}\"\n ],\n reviewCustomGatesForRun: [\n \"POST /repos/{owner}/{repo}/actions/runs/{run_id}/deployment_protection_rule\"\n ],\n reviewPendingDeploymentsForRun: [\n \"POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments\"\n ],\n setAllowedActionsOrganization: [\n \"PUT /orgs/{org}/actions/permissions/selected-actions\"\n ],\n setAllowedActionsRepository: [\n \"PUT /repos/{owner}/{repo}/actions/permissions/selected-actions\"\n ],\n setCustomLabelsForSelfHostedRunnerForOrg: [\n \"PUT /orgs/{org}/actions/runners/{runner_id}/labels\"\n ],\n setCustomLabelsForSelfHostedRunnerForRepo: [\n \"PUT /repos/{owner}/{repo}/actions/runners/{runner_id}/labels\"\n ],\n setGithubActionsDefaultWorkflowPermissionsOrganization: [\n \"PUT /orgs/{org}/actions/permissions/workflow\"\n ],\n setGithubActionsDefaultWorkflowPermissionsRepository: [\n \"PUT /repos/{owner}/{repo}/actions/permissions/workflow\"\n ],\n setGithubActionsPermissionsOrganization: [\n \"PUT /orgs/{org}/actions/permissions\"\n ],\n setGithubActionsPermissionsRepository: [\n \"PUT /repos/{owner}/{repo}/actions/permissions\"\n ],\n setSelectedReposForOrgSecret: [\n \"PUT /orgs/{org}/actions/secrets/{secret_name}/repositories\"\n ],\n setSelectedReposForOrgVariable: [\n \"PUT /orgs/{org}/actions/variables/{name}/repositories\"\n ],\n setSelectedReposToRequiredWorkflow: [\n \"PUT /orgs/{org}/actions/required_workflows/{required_workflow_id}/repositories\"\n ],\n setSelectedRepositoriesEnabledGithubActionsOrganization: [\n \"PUT /orgs/{org}/actions/permissions/repositories\"\n ],\n setWorkflowAccessToRepository: [\n \"PUT /repos/{owner}/{repo}/actions/permissions/access\"\n ],\n updateEnvironmentVariable: [\n \"PATCH /repositories/{repository_id}/environments/{environment_name}/variables/{name}\"\n ],\n updateOrgVariable: [\"PATCH /orgs/{org}/actions/variables/{name}\"],\n updateRepoVariable: [\n \"PATCH /repos/{owner}/{repo}/actions/variables/{name}\"\n ],\n updateRequiredWorkflow: [\n \"PATCH /orgs/{org}/actions/required_workflows/{required_workflow_id}\"\n ]\n },\n activity: {\n checkRepoIsStarredByAuthenticatedUser: [\"GET /user/starred/{owner}/{repo}\"],\n deleteRepoSubscription: [\"DELETE /repos/{owner}/{repo}/subscription\"],\n deleteThreadSubscription: [\n \"DELETE /notifications/threads/{thread_id}/subscription\"\n ],\n getFeeds: [\"GET /feeds\"],\n getRepoSubscription: [\"GET /repos/{owner}/{repo}/subscription\"],\n getThread: [\"GET /notifications/threads/{thread_id}\"],\n getThreadSubscriptionForAuthenticatedUser: [\n \"GET /notifications/threads/{thread_id}/subscription\"\n ],\n listEventsForAuthenticatedUser: [\"GET /users/{username}/events\"],\n listNotificationsForAuthenticatedUser: [\"GET /notifications\"],\n listOrgEventsForAuthenticatedUser: [\n \"GET /users/{username}/events/orgs/{org}\"\n ],\n listPublicEvents: [\"GET /events\"],\n listPublicEventsForRepoNetwork: [\"GET /networks/{owner}/{repo}/events\"],\n listPublicEventsForUser: [\"GET /users/{username}/events/public\"],\n listPublicOrgEvents: [\"GET /orgs/{org}/events\"],\n listReceivedEventsForUser: [\"GET /users/{username}/received_events\"],\n listReceivedPublicEventsForUser: [\n \"GET /users/{username}/received_events/public\"\n ],\n listRepoEvents: [\"GET /repos/{owner}/{repo}/events\"],\n listRepoNotificationsForAuthenticatedUser: [\n \"GET /repos/{owner}/{repo}/notifications\"\n ],\n listReposStarredByAuthenticatedUser: [\"GET /user/starred\"],\n listReposStarredByUser: [\"GET /users/{username}/starred\"],\n listReposWatchedByUser: [\"GET /users/{username}/subscriptions\"],\n listStargazersForRepo: [\"GET /repos/{owner}/{repo}/stargazers\"],\n listWatchedReposForAuthenticatedUser: [\"GET /user/subscriptions\"],\n listWatchersForRepo: [\"GET /repos/{owner}/{repo}/subscribers\"],\n markNotificationsAsRead: [\"PUT /notifications\"],\n markRepoNotificationsAsRead: [\"PUT /repos/{owner}/{repo}/notifications\"],\n markThreadAsRead: [\"PATCH /notifications/threads/{thread_id}\"],\n setRepoSubscription: [\"PUT /repos/{owner}/{repo}/subscription\"],\n setThreadSubscription: [\n \"PUT /notifications/threads/{thread_id}/subscription\"\n ],\n starRepoForAuthenticatedUser: [\"PUT /user/starred/{owner}/{repo}\"],\n unstarRepoForAuthenticatedUser: [\"DELETE /user/starred/{owner}/{repo}\"]\n },\n apps: {\n addRepoToInstallation: [\n \"PUT /user/installations/{installation_id}/repositories/{repository_id}\",\n {},\n { renamed: [\"apps\", \"addRepoToInstallationForAuthenticatedUser\"] }\n ],\n addRepoToInstallationForAuthenticatedUser: [\n \"PUT /user/installations/{installation_id}/repositories/{repository_id}\"\n ],\n checkToken: [\"POST /applications/{client_id}/token\"],\n createFromManifest: [\"POST /app-manifests/{code}/conversions\"],\n createInstallationAccessToken: [\n \"POST /app/installations/{installation_id}/access_tokens\"\n ],\n deleteAuthorization: [\"DELETE /applications/{client_id}/grant\"],\n deleteInstallation: [\"DELETE /app/installations/{installation_id}\"],\n deleteToken: [\"DELETE /applications/{client_id}/token\"],\n getAuthenticated: [\"GET /app\"],\n getBySlug: [\"GET /apps/{app_slug}\"],\n getInstallation: [\"GET /app/installations/{installation_id}\"],\n getOrgInstallation: [\"GET /orgs/{org}/installation\"],\n getRepoInstallation: [\"GET /repos/{owner}/{repo}/installation\"],\n getSubscriptionPlanForAccount: [\n \"GET /marketplace_listing/accounts/{account_id}\"\n ],\n getSubscriptionPlanForAccountStubbed: [\n \"GET /marketplace_listing/stubbed/accounts/{account_id}\"\n ],\n getUserInstallation: [\"GET /users/{username}/installation\"],\n getWebhookConfigForApp: [\"GET /app/hook/config\"],\n getWebhookDelivery: [\"GET /app/hook/deliveries/{delivery_id}\"],\n listAccountsForPlan: [\"GET /marketplace_listing/plans/{plan_id}/accounts\"],\n listAccountsForPlanStubbed: [\n \"GET /marketplace_listing/stubbed/plans/{plan_id}/accounts\"\n ],\n listInstallationReposForAuthenticatedUser: [\n \"GET /user/installations/{installation_id}/repositories\"\n ],\n listInstallationRequestsForAuthenticatedApp: [\n \"GET /app/installation-requests\"\n ],\n listInstallations: [\"GET /app/installations\"],\n listInstallationsForAuthenticatedUser: [\"GET /user/installations\"],\n listPlans: [\"GET /marketplace_listing/plans\"],\n listPlansStubbed: [\"GET /marketplace_listing/stubbed/plans\"],\n listReposAccessibleToInstallation: [\"GET /installation/repositories\"],\n listSubscriptionsForAuthenticatedUser: [\"GET /user/marketplace_purchases\"],\n listSubscriptionsForAuthenticatedUserStubbed: [\n \"GET /user/marketplace_purchases/stubbed\"\n ],\n listWebhookDeliveries: [\"GET /app/hook/deliveries\"],\n redeliverWebhookDelivery: [\n \"POST /app/hook/deliveries/{delivery_id}/attempts\"\n ],\n removeRepoFromInstallation: [\n \"DELETE /user/installations/{installation_id}/repositories/{repository_id}\",\n {},\n { renamed: [\"apps\", \"removeRepoFromInstallationForAuthenticatedUser\"] }\n ],\n removeRepoFromInstallationForAuthenticatedUser: [\n \"DELETE /user/installations/{installation_id}/repositories/{repository_id}\"\n ],\n resetToken: [\"PATCH /applications/{client_id}/token\"],\n revokeInstallationAccessToken: [\"DELETE /installation/token\"],\n scopeToken: [\"POST /applications/{client_id}/token/scoped\"],\n suspendInstallation: [\"PUT /app/installations/{installation_id}/suspended\"],\n unsuspendInstallation: [\n \"DELETE /app/installations/{installation_id}/suspended\"\n ],\n updateWebhookConfigForApp: [\"PATCH /app/hook/config\"]\n },\n billing: {\n getGithubActionsBillingOrg: [\"GET /orgs/{org}/settings/billing/actions\"],\n getGithubActionsBillingUser: [\n \"GET /users/{username}/settings/billing/actions\"\n ],\n getGithubPackagesBillingOrg: [\"GET /orgs/{org}/settings/billing/packages\"],\n getGithubPackagesBillingUser: [\n \"GET /users/{username}/settings/billing/packages\"\n ],\n getSharedStorageBillingOrg: [\n \"GET /orgs/{org}/settings/billing/shared-storage\"\n ],\n getSharedStorageBillingUser: [\n \"GET /users/{username}/settings/billing/shared-storage\"\n ]\n },\n checks: {\n create: [\"POST /repos/{owner}/{repo}/check-runs\"],\n createSuite: [\"POST /repos/{owner}/{repo}/check-suites\"],\n get: [\"GET /repos/{owner}/{repo}/check-runs/{check_run_id}\"],\n getSuite: [\"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}\"],\n listAnnotations: [\n \"GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations\"\n ],\n listForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/check-runs\"],\n listForSuite: [\n \"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs\"\n ],\n listSuitesForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/check-suites\"],\n rerequestRun: [\n \"POST /repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest\"\n ],\n rerequestSuite: [\n \"POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest\"\n ],\n setSuitesPreferences: [\n \"PATCH /repos/{owner}/{repo}/check-suites/preferences\"\n ],\n update: [\"PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}\"]\n },\n codeScanning: {\n deleteAnalysis: [\n \"DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}\"\n ],\n getAlert: [\n \"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}\",\n {},\n { renamedParameters: { alert_id: \"alert_number\" } }\n ],\n getAnalysis: [\n \"GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}\"\n ],\n getCodeqlDatabase: [\n \"GET /repos/{owner}/{repo}/code-scanning/codeql/databases/{language}\"\n ],\n getDefaultSetup: [\"GET /repos/{owner}/{repo}/code-scanning/default-setup\"],\n getSarif: [\"GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}\"],\n listAlertInstances: [\n \"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances\"\n ],\n listAlertsForOrg: [\"GET /orgs/{org}/code-scanning/alerts\"],\n listAlertsForRepo: [\"GET /repos/{owner}/{repo}/code-scanning/alerts\"],\n listAlertsInstances: [\n \"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances\",\n {},\n { renamed: [\"codeScanning\", \"listAlertInstances\"] }\n ],\n listCodeqlDatabases: [\n \"GET /repos/{owner}/{repo}/code-scanning/codeql/databases\"\n ],\n listRecentAnalyses: [\"GET /repos/{owner}/{repo}/code-scanning/analyses\"],\n updateAlert: [\n \"PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}\"\n ],\n updateDefaultSetup: [\n \"PATCH /repos/{owner}/{repo}/code-scanning/default-setup\"\n ],\n uploadSarif: [\"POST /repos/{owner}/{repo}/code-scanning/sarifs\"]\n },\n codesOfConduct: {\n getAllCodesOfConduct: [\"GET /codes_of_conduct\"],\n getConductCode: [\"GET /codes_of_conduct/{key}\"]\n },\n codespaces: {\n addRepositoryForSecretForAuthenticatedUser: [\n \"PUT /user/codespaces/secrets/{secret_name}/repositories/{repository_id}\"\n ],\n addSelectedRepoToOrgSecret: [\n \"PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}\"\n ],\n codespaceMachinesForAuthenticatedUser: [\n \"GET /user/codespaces/{codespace_name}/machines\"\n ],\n createForAuthenticatedUser: [\"POST /user/codespaces\"],\n createOrUpdateOrgSecret: [\n \"PUT /orgs/{org}/codespaces/secrets/{secret_name}\"\n ],\n createOrUpdateRepoSecret: [\n \"PUT /repos/{owner}/{repo}/codespaces/secrets/{secret_name}\"\n ],\n createOrUpdateSecretForAuthenticatedUser: [\n \"PUT /user/codespaces/secrets/{secret_name}\"\n ],\n createWithPrForAuthenticatedUser: [\n \"POST /repos/{owner}/{repo}/pulls/{pull_number}/codespaces\"\n ],\n createWithRepoForAuthenticatedUser: [\n \"POST /repos/{owner}/{repo}/codespaces\"\n ],\n deleteCodespacesBillingUsers: [\n \"DELETE /orgs/{org}/codespaces/billing/selected_users\"\n ],\n deleteForAuthenticatedUser: [\"DELETE /user/codespaces/{codespace_name}\"],\n deleteFromOrganization: [\n \"DELETE /orgs/{org}/members/{username}/codespaces/{codespace_name}\"\n ],\n deleteOrgSecret: [\"DELETE /orgs/{org}/codespaces/secrets/{secret_name}\"],\n deleteRepoSecret: [\n \"DELETE /repos/{owner}/{repo}/codespaces/secrets/{secret_name}\"\n ],\n deleteSecretForAuthenticatedUser: [\n \"DELETE /user/codespaces/secrets/{secret_name}\"\n ],\n exportForAuthenticatedUser: [\n \"POST /user/codespaces/{codespace_name}/exports\"\n ],\n getCodespacesForUserInOrg: [\n \"GET /orgs/{org}/members/{username}/codespaces\"\n ],\n getExportDetailsForAuthenticatedUser: [\n \"GET /user/codespaces/{codespace_name}/exports/{export_id}\"\n ],\n getForAuthenticatedUser: [\"GET /user/codespaces/{codespace_name}\"],\n getOrgPublicKey: [\"GET /orgs/{org}/codespaces/secrets/public-key\"],\n getOrgSecret: [\"GET /orgs/{org}/codespaces/secrets/{secret_name}\"],\n getPublicKeyForAuthenticatedUser: [\n \"GET /user/codespaces/secrets/public-key\"\n ],\n getRepoPublicKey: [\n \"GET /repos/{owner}/{repo}/codespaces/secrets/public-key\"\n ],\n getRepoSecret: [\n \"GET /repos/{owner}/{repo}/codespaces/secrets/{secret_name}\"\n ],\n getSecretForAuthenticatedUser: [\n \"GET /user/codespaces/secrets/{secret_name}\"\n ],\n listDevcontainersInRepositoryForAuthenticatedUser: [\n \"GET /repos/{owner}/{repo}/codespaces/devcontainers\"\n ],\n listForAuthenticatedUser: [\"GET /user/codespaces\"],\n listInOrganization: [\n \"GET /orgs/{org}/codespaces\",\n {},\n { renamedParameters: { org_id: \"org\" } }\n ],\n listInRepositoryForAuthenticatedUser: [\n \"GET /repos/{owner}/{repo}/codespaces\"\n ],\n listOrgSecrets: [\"GET /orgs/{org}/codespaces/secrets\"],\n listRepoSecrets: [\"GET /repos/{owner}/{repo}/codespaces/secrets\"],\n listRepositoriesForSecretForAuthenticatedUser: [\n \"GET /user/codespaces/secrets/{secret_name}/repositories\"\n ],\n listSecretsForAuthenticatedUser: [\"GET /user/codespaces/secrets\"],\n listSelectedReposForOrgSecret: [\n \"GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories\"\n ],\n preFlightWithRepoForAuthenticatedUser: [\n \"GET /repos/{owner}/{repo}/codespaces/new\"\n ],\n publishForAuthenticatedUser: [\n \"POST /user/codespaces/{codespace_name}/publish\"\n ],\n removeRepositoryForSecretForAuthenticatedUser: [\n \"DELETE /user/codespaces/secrets/{secret_name}/repositories/{repository_id}\"\n ],\n removeSelectedRepoFromOrgSecret: [\n \"DELETE /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}\"\n ],\n repoMachinesForAuthenticatedUser: [\n \"GET /repos/{owner}/{repo}/codespaces/machines\"\n ],\n setCodespacesBilling: [\"PUT /orgs/{org}/codespaces/billing\"],\n setCodespacesBillingUsers: [\n \"POST /orgs/{org}/codespaces/billing/selected_users\"\n ],\n setRepositoriesForSecretForAuthenticatedUser: [\n \"PUT /user/codespaces/secrets/{secret_name}/repositories\"\n ],\n setSelectedReposForOrgSecret: [\n \"PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories\"\n ],\n startForAuthenticatedUser: [\"POST /user/codespaces/{codespace_name}/start\"],\n stopForAuthenticatedUser: [\"POST /user/codespaces/{codespace_name}/stop\"],\n stopInOrganization: [\n \"POST /orgs/{org}/members/{username}/codespaces/{codespace_name}/stop\"\n ],\n updateForAuthenticatedUser: [\"PATCH /user/codespaces/{codespace_name}\"]\n },\n dependabot: {\n addSelectedRepoToOrgSecret: [\n \"PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}\"\n ],\n createOrUpdateOrgSecret: [\n \"PUT /orgs/{org}/dependabot/secrets/{secret_name}\"\n ],\n createOrUpdateRepoSecret: [\n \"PUT /repos/{owner}/{repo}/dependabot/secrets/{secret_name}\"\n ],\n deleteOrgSecret: [\"DELETE /orgs/{org}/dependabot/secrets/{secret_name}\"],\n deleteRepoSecret: [\n \"DELETE /repos/{owner}/{repo}/dependabot/secrets/{secret_name}\"\n ],\n getAlert: [\"GET /repos/{owner}/{repo}/dependabot/alerts/{alert_number}\"],\n getOrgPublicKey: [\"GET /orgs/{org}/dependabot/secrets/public-key\"],\n getOrgSecret: [\"GET /orgs/{org}/dependabot/secrets/{secret_name}\"],\n getRepoPublicKey: [\n \"GET /repos/{owner}/{repo}/dependabot/secrets/public-key\"\n ],\n getRepoSecret: [\n \"GET /repos/{owner}/{repo}/dependabot/secrets/{secret_name}\"\n ],\n listAlertsForEnterprise: [\n \"GET /enterprises/{enterprise}/dependabot/alerts\"\n ],\n listAlertsForOrg: [\"GET /orgs/{org}/dependabot/alerts\"],\n listAlertsForRepo: [\"GET /repos/{owner}/{repo}/dependabot/alerts\"],\n listOrgSecrets: [\"GET /orgs/{org}/dependabot/secrets\"],\n listRepoSecrets: [\"GET /repos/{owner}/{repo}/dependabot/secrets\"],\n listSelectedReposForOrgSecret: [\n \"GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories\"\n ],\n removeSelectedRepoFromOrgSecret: [\n \"DELETE /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}\"\n ],\n setSelectedReposForOrgSecret: [\n \"PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories\"\n ],\n updateAlert: [\n \"PATCH /repos/{owner}/{repo}/dependabot/alerts/{alert_number}\"\n ]\n },\n dependencyGraph: {\n createRepositorySnapshot: [\n \"POST /repos/{owner}/{repo}/dependency-graph/snapshots\"\n ],\n diffRange: [\n \"GET /repos/{owner}/{repo}/dependency-graph/compare/{basehead}\"\n ],\n exportSbom: [\"GET /repos/{owner}/{repo}/dependency-graph/sbom\"]\n },\n emojis: { get: [\"GET /emojis\"] },\n gists: {\n checkIsStarred: [\"GET /gists/{gist_id}/star\"],\n create: [\"POST /gists\"],\n createComment: [\"POST /gists/{gist_id}/comments\"],\n delete: [\"DELETE /gists/{gist_id}\"],\n deleteComment: [\"DELETE /gists/{gist_id}/comments/{comment_id}\"],\n fork: [\"POST /gists/{gist_id}/forks\"],\n get: [\"GET /gists/{gist_id}\"],\n getComment: [\"GET /gists/{gist_id}/comments/{comment_id}\"],\n getRevision: [\"GET /gists/{gist_id}/{sha}\"],\n list: [\"GET /gists\"],\n listComments: [\"GET /gists/{gist_id}/comments\"],\n listCommits: [\"GET /gists/{gist_id}/commits\"],\n listForUser: [\"GET /users/{username}/gists\"],\n listForks: [\"GET /gists/{gist_id}/forks\"],\n listPublic: [\"GET /gists/public\"],\n listStarred: [\"GET /gists/starred\"],\n star: [\"PUT /gists/{gist_id}/star\"],\n unstar: [\"DELETE /gists/{gist_id}/star\"],\n update: [\"PATCH /gists/{gist_id}\"],\n updateComment: [\"PATCH /gists/{gist_id}/comments/{comment_id}\"]\n },\n git: {\n createBlob: [\"POST /repos/{owner}/{repo}/git/blobs\"],\n createCommit: [\"POST /repos/{owner}/{repo}/git/commits\"],\n createRef: [\"POST /repos/{owner}/{repo}/git/refs\"],\n createTag: [\"POST /repos/{owner}/{repo}/git/tags\"],\n createTree: [\"POST /repos/{owner}/{repo}/git/trees\"],\n deleteRef: [\"DELETE /repos/{owner}/{repo}/git/refs/{ref}\"],\n getBlob: [\"GET /repos/{owner}/{repo}/git/blobs/{file_sha}\"],\n getCommit: [\"GET /repos/{owner}/{repo}/git/commits/{commit_sha}\"],\n getRef: [\"GET /repos/{owner}/{repo}/git/ref/{ref}\"],\n getTag: [\"GET /repos/{owner}/{repo}/git/tags/{tag_sha}\"],\n getTree: [\"GET /repos/{owner}/{repo}/git/trees/{tree_sha}\"],\n listMatchingRefs: [\"GET /repos/{owner}/{repo}/git/matching-refs/{ref}\"],\n updateRef: [\"PATCH /repos/{owner}/{repo}/git/refs/{ref}\"]\n },\n gitignore: {\n getAllTemplates: [\"GET /gitignore/templates\"],\n getTemplate: [\"GET /gitignore/templates/{name}\"]\n },\n interactions: {\n getRestrictionsForAuthenticatedUser: [\"GET /user/interaction-limits\"],\n getRestrictionsForOrg: [\"GET /orgs/{org}/interaction-limits\"],\n getRestrictionsForRepo: [\"GET /repos/{owner}/{repo}/interaction-limits\"],\n getRestrictionsForYourPublicRepos: [\n \"GET /user/interaction-limits\",\n {},\n { renamed: [\"interactions\", \"getRestrictionsForAuthenticatedUser\"] }\n ],\n removeRestrictionsForAuthenticatedUser: [\"DELETE /user/interaction-limits\"],\n removeRestrictionsForOrg: [\"DELETE /orgs/{org}/interaction-limits\"],\n removeRestrictionsForRepo: [\n \"DELETE /repos/{owner}/{repo}/interaction-limits\"\n ],\n removeRestrictionsForYourPublicRepos: [\n \"DELETE /user/interaction-limits\",\n {},\n { renamed: [\"interactions\", \"removeRestrictionsForAuthenticatedUser\"] }\n ],\n setRestrictionsForAuthenticatedUser: [\"PUT /user/interaction-limits\"],\n setRestrictionsForOrg: [\"PUT /orgs/{org}/interaction-limits\"],\n setRestrictionsForRepo: [\"PUT /repos/{owner}/{repo}/interaction-limits\"],\n setRestrictionsForYourPublicRepos: [\n \"PUT /user/interaction-limits\",\n {},\n { renamed: [\"interactions\", \"setRestrictionsForAuthenticatedUser\"] }\n ]\n },\n issues: {\n addAssignees: [\n \"POST /repos/{owner}/{repo}/issues/{issue_number}/assignees\"\n ],\n addLabels: [\"POST /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n checkUserCanBeAssigned: [\"GET /repos/{owner}/{repo}/assignees/{assignee}\"],\n checkUserCanBeAssignedToIssue: [\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/assignees/{assignee}\"\n ],\n create: [\"POST /repos/{owner}/{repo}/issues\"],\n createComment: [\n \"POST /repos/{owner}/{repo}/issues/{issue_number}/comments\"\n ],\n createLabel: [\"POST /repos/{owner}/{repo}/labels\"],\n createMilestone: [\"POST /repos/{owner}/{repo}/milestones\"],\n deleteComment: [\n \"DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}\"\n ],\n deleteLabel: [\"DELETE /repos/{owner}/{repo}/labels/{name}\"],\n deleteMilestone: [\n \"DELETE /repos/{owner}/{repo}/milestones/{milestone_number}\"\n ],\n get: [\"GET /repos/{owner}/{repo}/issues/{issue_number}\"],\n getComment: [\"GET /repos/{owner}/{repo}/issues/comments/{comment_id}\"],\n getEvent: [\"GET /repos/{owner}/{repo}/issues/events/{event_id}\"],\n getLabel: [\"GET /repos/{owner}/{repo}/labels/{name}\"],\n getMilestone: [\"GET /repos/{owner}/{repo}/milestones/{milestone_number}\"],\n list: [\"GET /issues\"],\n listAssignees: [\"GET /repos/{owner}/{repo}/assignees\"],\n listComments: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/comments\"],\n listCommentsForRepo: [\"GET /repos/{owner}/{repo}/issues/comments\"],\n listEvents: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/events\"],\n listEventsForRepo: [\"GET /repos/{owner}/{repo}/issues/events\"],\n listEventsForTimeline: [\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/timeline\"\n ],\n listForAuthenticatedUser: [\"GET /user/issues\"],\n listForOrg: [\"GET /orgs/{org}/issues\"],\n listForRepo: [\"GET /repos/{owner}/{repo}/issues\"],\n listLabelsForMilestone: [\n \"GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels\"\n ],\n listLabelsForRepo: [\"GET /repos/{owner}/{repo}/labels\"],\n listLabelsOnIssue: [\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/labels\"\n ],\n listMilestones: [\"GET /repos/{owner}/{repo}/milestones\"],\n lock: [\"PUT /repos/{owner}/{repo}/issues/{issue_number}/lock\"],\n removeAllLabels: [\n \"DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels\"\n ],\n removeAssignees: [\n \"DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees\"\n ],\n removeLabel: [\n \"DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}\"\n ],\n setLabels: [\"PUT /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n unlock: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock\"],\n update: [\"PATCH /repos/{owner}/{repo}/issues/{issue_number}\"],\n updateComment: [\"PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}\"],\n updateLabel: [\"PATCH /repos/{owner}/{repo}/labels/{name}\"],\n updateMilestone: [\n \"PATCH /repos/{owner}/{repo}/milestones/{milestone_number}\"\n ]\n },\n licenses: {\n get: [\"GET /licenses/{license}\"],\n getAllCommonlyUsed: [\"GET /licenses\"],\n getForRepo: [\"GET /repos/{owner}/{repo}/license\"]\n },\n markdown: {\n render: [\"POST /markdown\"],\n renderRaw: [\n \"POST /markdown/raw\",\n { headers: { \"content-type\": \"text/plain; charset=utf-8\" } }\n ]\n },\n meta: {\n get: [\"GET /meta\"],\n getAllVersions: [\"GET /versions\"],\n getOctocat: [\"GET /octocat\"],\n getZen: [\"GET /zen\"],\n root: [\"GET /\"]\n },\n migrations: {\n cancelImport: [\"DELETE /repos/{owner}/{repo}/import\"],\n deleteArchiveForAuthenticatedUser: [\n \"DELETE /user/migrations/{migration_id}/archive\"\n ],\n deleteArchiveForOrg: [\n \"DELETE /orgs/{org}/migrations/{migration_id}/archive\"\n ],\n downloadArchiveForOrg: [\n \"GET /orgs/{org}/migrations/{migration_id}/archive\"\n ],\n getArchiveForAuthenticatedUser: [\n \"GET /user/migrations/{migration_id}/archive\"\n ],\n getCommitAuthors: [\"GET /repos/{owner}/{repo}/import/authors\"],\n getImportStatus: [\"GET /repos/{owner}/{repo}/import\"],\n getLargeFiles: [\"GET /repos/{owner}/{repo}/import/large_files\"],\n getStatusForAuthenticatedUser: [\"GET /user/migrations/{migration_id}\"],\n getStatusForOrg: [\"GET /orgs/{org}/migrations/{migration_id}\"],\n listForAuthenticatedUser: [\"GET /user/migrations\"],\n listForOrg: [\"GET /orgs/{org}/migrations\"],\n listReposForAuthenticatedUser: [\n \"GET /user/migrations/{migration_id}/repositories\"\n ],\n listReposForOrg: [\"GET /orgs/{org}/migrations/{migration_id}/repositories\"],\n listReposForUser: [\n \"GET /user/migrations/{migration_id}/repositories\",\n {},\n { renamed: [\"migrations\", \"listReposForAuthenticatedUser\"] }\n ],\n mapCommitAuthor: [\"PATCH /repos/{owner}/{repo}/import/authors/{author_id}\"],\n setLfsPreference: [\"PATCH /repos/{owner}/{repo}/import/lfs\"],\n startForAuthenticatedUser: [\"POST /user/migrations\"],\n startForOrg: [\"POST /orgs/{org}/migrations\"],\n startImport: [\"PUT /repos/{owner}/{repo}/import\"],\n unlockRepoForAuthenticatedUser: [\n \"DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock\"\n ],\n unlockRepoForOrg: [\n \"DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock\"\n ],\n updateImport: [\"PATCH /repos/{owner}/{repo}/import\"]\n },\n orgs: {\n addSecurityManagerTeam: [\n \"PUT /orgs/{org}/security-managers/teams/{team_slug}\"\n ],\n blockUser: [\"PUT /orgs/{org}/blocks/{username}\"],\n cancelInvitation: [\"DELETE /orgs/{org}/invitations/{invitation_id}\"],\n checkBlockedUser: [\"GET /orgs/{org}/blocks/{username}\"],\n checkMembershipForUser: [\"GET /orgs/{org}/members/{username}\"],\n checkPublicMembershipForUser: [\"GET /orgs/{org}/public_members/{username}\"],\n convertMemberToOutsideCollaborator: [\n \"PUT /orgs/{org}/outside_collaborators/{username}\"\n ],\n createInvitation: [\"POST /orgs/{org}/invitations\"],\n createWebhook: [\"POST /orgs/{org}/hooks\"],\n delete: [\"DELETE /orgs/{org}\"],\n deleteWebhook: [\"DELETE /orgs/{org}/hooks/{hook_id}\"],\n enableOrDisableSecurityProductOnAllOrgRepos: [\n \"POST /orgs/{org}/{security_product}/{enablement}\"\n ],\n get: [\"GET /orgs/{org}\"],\n getMembershipForAuthenticatedUser: [\"GET /user/memberships/orgs/{org}\"],\n getMembershipForUser: [\"GET /orgs/{org}/memberships/{username}\"],\n getWebhook: [\"GET /orgs/{org}/hooks/{hook_id}\"],\n getWebhookConfigForOrg: [\"GET /orgs/{org}/hooks/{hook_id}/config\"],\n getWebhookDelivery: [\n \"GET /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}\"\n ],\n list: [\"GET /organizations\"],\n listAppInstallations: [\"GET /orgs/{org}/installations\"],\n listBlockedUsers: [\"GET /orgs/{org}/blocks\"],\n listFailedInvitations: [\"GET /orgs/{org}/failed_invitations\"],\n listForAuthenticatedUser: [\"GET /user/orgs\"],\n listForUser: [\"GET /users/{username}/orgs\"],\n listInvitationTeams: [\"GET /orgs/{org}/invitations/{invitation_id}/teams\"],\n listMembers: [\"GET /orgs/{org}/members\"],\n listMembershipsForAuthenticatedUser: [\"GET /user/memberships/orgs\"],\n listOutsideCollaborators: [\"GET /orgs/{org}/outside_collaborators\"],\n listPatGrantRepositories: [\n \"GET /organizations/{org}/personal-access-tokens/{pat_id}/repositories\"\n ],\n listPatGrantRequestRepositories: [\n \"GET /organizations/{org}/personal-access-token-requests/{pat_request_id}/repositories\"\n ],\n listPatGrantRequests: [\n \"GET /organizations/{org}/personal-access-token-requests\"\n ],\n listPatGrants: [\"GET /organizations/{org}/personal-access-tokens\"],\n listPendingInvitations: [\"GET /orgs/{org}/invitations\"],\n listPublicMembers: [\"GET /orgs/{org}/public_members\"],\n listSecurityManagerTeams: [\"GET /orgs/{org}/security-managers\"],\n listWebhookDeliveries: [\"GET /orgs/{org}/hooks/{hook_id}/deliveries\"],\n listWebhooks: [\"GET /orgs/{org}/hooks\"],\n pingWebhook: [\"POST /orgs/{org}/hooks/{hook_id}/pings\"],\n redeliverWebhookDelivery: [\n \"POST /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts\"\n ],\n removeMember: [\"DELETE /orgs/{org}/members/{username}\"],\n removeMembershipForUser: [\"DELETE /orgs/{org}/memberships/{username}\"],\n removeOutsideCollaborator: [\n \"DELETE /orgs/{org}/outside_collaborators/{username}\"\n ],\n removePublicMembershipForAuthenticatedUser: [\n \"DELETE /orgs/{org}/public_members/{username}\"\n ],\n removeSecurityManagerTeam: [\n \"DELETE /orgs/{org}/security-managers/teams/{team_slug}\"\n ],\n reviewPatGrantRequest: [\n \"POST /organizations/{org}/personal-access-token-requests/{pat_request_id}\"\n ],\n reviewPatGrantRequestsInBulk: [\n \"POST /organizations/{org}/personal-access-token-requests\"\n ],\n setMembershipForUser: [\"PUT /orgs/{org}/memberships/{username}\"],\n setPublicMembershipForAuthenticatedUser: [\n \"PUT /orgs/{org}/public_members/{username}\"\n ],\n unblockUser: [\"DELETE /orgs/{org}/blocks/{username}\"],\n update: [\"PATCH /orgs/{org}\"],\n updateMembershipForAuthenticatedUser: [\n \"PATCH /user/memberships/orgs/{org}\"\n ],\n updatePatAccess: [\n \"POST /organizations/{org}/personal-access-tokens/{pat_id}\"\n ],\n updatePatAccesses: [\"POST /organizations/{org}/personal-access-tokens\"],\n updateWebhook: [\"PATCH /orgs/{org}/hooks/{hook_id}\"],\n updateWebhookConfigForOrg: [\"PATCH /orgs/{org}/hooks/{hook_id}/config\"]\n },\n packages: {\n deletePackageForAuthenticatedUser: [\n \"DELETE /user/packages/{package_type}/{package_name}\"\n ],\n deletePackageForOrg: [\n \"DELETE /orgs/{org}/packages/{package_type}/{package_name}\"\n ],\n deletePackageForUser: [\n \"DELETE /users/{username}/packages/{package_type}/{package_name}\"\n ],\n deletePackageVersionForAuthenticatedUser: [\n \"DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}\"\n ],\n deletePackageVersionForOrg: [\n \"DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}\"\n ],\n deletePackageVersionForUser: [\n \"DELETE /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}\"\n ],\n getAllPackageVersionsForAPackageOwnedByAnOrg: [\n \"GET /orgs/{org}/packages/{package_type}/{package_name}/versions\",\n {},\n { renamed: [\"packages\", \"getAllPackageVersionsForPackageOwnedByOrg\"] }\n ],\n getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: [\n \"GET /user/packages/{package_type}/{package_name}/versions\",\n {},\n {\n renamed: [\n \"packages\",\n \"getAllPackageVersionsForPackageOwnedByAuthenticatedUser\"\n ]\n }\n ],\n getAllPackageVersionsForPackageOwnedByAuthenticatedUser: [\n \"GET /user/packages/{package_type}/{package_name}/versions\"\n ],\n getAllPackageVersionsForPackageOwnedByOrg: [\n \"GET /orgs/{org}/packages/{package_type}/{package_name}/versions\"\n ],\n getAllPackageVersionsForPackageOwnedByUser: [\n \"GET /users/{username}/packages/{package_type}/{package_name}/versions\"\n ],\n getPackageForAuthenticatedUser: [\n \"GET /user/packages/{package_type}/{package_name}\"\n ],\n getPackageForOrganization: [\n \"GET /orgs/{org}/packages/{package_type}/{package_name}\"\n ],\n getPackageForUser: [\n \"GET /users/{username}/packages/{package_type}/{package_name}\"\n ],\n getPackageVersionForAuthenticatedUser: [\n \"GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}\"\n ],\n getPackageVersionForOrganization: [\n \"GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}\"\n ],\n getPackageVersionForUser: [\n \"GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}\"\n ],\n listDockerMigrationConflictingPackagesForAuthenticatedUser: [\n \"GET /user/docker/conflicts\"\n ],\n listDockerMigrationConflictingPackagesForOrganization: [\n \"GET /orgs/{org}/docker/conflicts\"\n ],\n listDockerMigrationConflictingPackagesForUser: [\n \"GET /users/{username}/docker/conflicts\"\n ],\n listPackagesForAuthenticatedUser: [\"GET /user/packages\"],\n listPackagesForOrganization: [\"GET /orgs/{org}/packages\"],\n listPackagesForUser: [\"GET /users/{username}/packages\"],\n restorePackageForAuthenticatedUser: [\n \"POST /user/packages/{package_type}/{package_name}/restore{?token}\"\n ],\n restorePackageForOrg: [\n \"POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}\"\n ],\n restorePackageForUser: [\n \"POST /users/{username}/packages/{package_type}/{package_name}/restore{?token}\"\n ],\n restorePackageVersionForAuthenticatedUser: [\n \"POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore\"\n ],\n restorePackageVersionForOrg: [\n \"POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore\"\n ],\n restorePackageVersionForUser: [\n \"POST /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore\"\n ]\n },\n projects: {\n addCollaborator: [\"PUT /projects/{project_id}/collaborators/{username}\"],\n createCard: [\"POST /projects/columns/{column_id}/cards\"],\n createColumn: [\"POST /projects/{project_id}/columns\"],\n createForAuthenticatedUser: [\"POST /user/projects\"],\n createForOrg: [\"POST /orgs/{org}/projects\"],\n createForRepo: [\"POST /repos/{owner}/{repo}/projects\"],\n delete: [\"DELETE /projects/{project_id}\"],\n deleteCard: [\"DELETE /projects/columns/cards/{card_id}\"],\n deleteColumn: [\"DELETE /projects/columns/{column_id}\"],\n get: [\"GET /projects/{project_id}\"],\n getCard: [\"GET /projects/columns/cards/{card_id}\"],\n getColumn: [\"GET /projects/columns/{column_id}\"],\n getPermissionForUser: [\n \"GET /projects/{project_id}/collaborators/{username}/permission\"\n ],\n listCards: [\"GET /projects/columns/{column_id}/cards\"],\n listCollaborators: [\"GET /projects/{project_id}/collaborators\"],\n listColumns: [\"GET /projects/{project_id}/columns\"],\n listForOrg: [\"GET /orgs/{org}/projects\"],\n listForRepo: [\"GET /repos/{owner}/{repo}/projects\"],\n listForUser: [\"GET /users/{username}/projects\"],\n moveCard: [\"POST /projects/columns/cards/{card_id}/moves\"],\n moveColumn: [\"POST /projects/columns/{column_id}/moves\"],\n removeCollaborator: [\n \"DELETE /projects/{project_id}/collaborators/{username}\"\n ],\n update: [\"PATCH /projects/{project_id}\"],\n updateCard: [\"PATCH /projects/columns/cards/{card_id}\"],\n updateColumn: [\"PATCH /projects/columns/{column_id}\"]\n },\n pulls: {\n checkIfMerged: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/merge\"],\n create: [\"POST /repos/{owner}/{repo}/pulls\"],\n createReplyForReviewComment: [\n \"POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies\"\n ],\n createReview: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews\"],\n createReviewComment: [\n \"POST /repos/{owner}/{repo}/pulls/{pull_number}/comments\"\n ],\n deletePendingReview: [\n \"DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\"\n ],\n deleteReviewComment: [\n \"DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}\"\n ],\n dismissReview: [\n \"PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals\"\n ],\n get: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}\"],\n getReview: [\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\"\n ],\n getReviewComment: [\"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}\"],\n list: [\"GET /repos/{owner}/{repo}/pulls\"],\n listCommentsForReview: [\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments\"\n ],\n listCommits: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/commits\"],\n listFiles: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/files\"],\n listRequestedReviewers: [\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\"\n ],\n listReviewComments: [\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/comments\"\n ],\n listReviewCommentsForRepo: [\"GET /repos/{owner}/{repo}/pulls/comments\"],\n listReviews: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews\"],\n merge: [\"PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge\"],\n removeRequestedReviewers: [\n \"DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\"\n ],\n requestReviewers: [\n \"POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\"\n ],\n submitReview: [\n \"POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events\"\n ],\n update: [\"PATCH /repos/{owner}/{repo}/pulls/{pull_number}\"],\n updateBranch: [\n \"PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch\"\n ],\n updateReview: [\n \"PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\"\n ],\n updateReviewComment: [\n \"PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}\"\n ]\n },\n rateLimit: { get: [\"GET /rate_limit\"] },\n reactions: {\n createForCommitComment: [\n \"POST /repos/{owner}/{repo}/comments/{comment_id}/reactions\"\n ],\n createForIssue: [\n \"POST /repos/{owner}/{repo}/issues/{issue_number}/reactions\"\n ],\n createForIssueComment: [\n \"POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\"\n ],\n createForPullRequestReviewComment: [\n \"POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\"\n ],\n createForRelease: [\n \"POST /repos/{owner}/{repo}/releases/{release_id}/reactions\"\n ],\n createForTeamDiscussionCommentInOrg: [\n \"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\"\n ],\n createForTeamDiscussionInOrg: [\n \"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\"\n ],\n deleteForCommitComment: [\n \"DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}\"\n ],\n deleteForIssue: [\n \"DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}\"\n ],\n deleteForIssueComment: [\n \"DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}\"\n ],\n deleteForPullRequestComment: [\n \"DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}\"\n ],\n deleteForRelease: [\n \"DELETE /repos/{owner}/{repo}/releases/{release_id}/reactions/{reaction_id}\"\n ],\n deleteForTeamDiscussion: [\n \"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}\"\n ],\n deleteForTeamDiscussionComment: [\n \"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}\"\n ],\n listForCommitComment: [\n \"GET /repos/{owner}/{repo}/comments/{comment_id}/reactions\"\n ],\n listForIssue: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/reactions\"],\n listForIssueComment: [\n \"GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\"\n ],\n listForPullRequestReviewComment: [\n \"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\"\n ],\n listForRelease: [\n \"GET /repos/{owner}/{repo}/releases/{release_id}/reactions\"\n ],\n listForTeamDiscussionCommentInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\"\n ],\n listForTeamDiscussionInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\"\n ]\n },\n repos: {\n acceptInvitation: [\n \"PATCH /user/repository_invitations/{invitation_id}\",\n {},\n { renamed: [\"repos\", \"acceptInvitationForAuthenticatedUser\"] }\n ],\n acceptInvitationForAuthenticatedUser: [\n \"PATCH /user/repository_invitations/{invitation_id}\"\n ],\n addAppAccessRestrictions: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\",\n {},\n { mapToData: \"apps\" }\n ],\n addCollaborator: [\"PUT /repos/{owner}/{repo}/collaborators/{username}\"],\n addStatusCheckContexts: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\",\n {},\n { mapToData: \"contexts\" }\n ],\n addTeamAccessRestrictions: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\",\n {},\n { mapToData: \"teams\" }\n ],\n addUserAccessRestrictions: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\",\n {},\n { mapToData: \"users\" }\n ],\n checkCollaborator: [\"GET /repos/{owner}/{repo}/collaborators/{username}\"],\n checkVulnerabilityAlerts: [\n \"GET /repos/{owner}/{repo}/vulnerability-alerts\"\n ],\n codeownersErrors: [\"GET /repos/{owner}/{repo}/codeowners/errors\"],\n compareCommits: [\"GET /repos/{owner}/{repo}/compare/{base}...{head}\"],\n compareCommitsWithBasehead: [\n \"GET /repos/{owner}/{repo}/compare/{basehead}\"\n ],\n createAutolink: [\"POST /repos/{owner}/{repo}/autolinks\"],\n createCommitComment: [\n \"POST /repos/{owner}/{repo}/commits/{commit_sha}/comments\"\n ],\n createCommitSignatureProtection: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\"\n ],\n createCommitStatus: [\"POST /repos/{owner}/{repo}/statuses/{sha}\"],\n createDeployKey: [\"POST /repos/{owner}/{repo}/keys\"],\n createDeployment: [\"POST /repos/{owner}/{repo}/deployments\"],\n createDeploymentBranchPolicy: [\n \"POST /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies\"\n ],\n createDeploymentProtectionRule: [\n \"POST /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules\"\n ],\n createDeploymentStatus: [\n \"POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\"\n ],\n createDispatchEvent: [\"POST /repos/{owner}/{repo}/dispatches\"],\n createForAuthenticatedUser: [\"POST /user/repos\"],\n createFork: [\"POST /repos/{owner}/{repo}/forks\"],\n createInOrg: [\"POST /orgs/{org}/repos\"],\n createOrUpdateEnvironment: [\n \"PUT /repos/{owner}/{repo}/environments/{environment_name}\"\n ],\n createOrUpdateFileContents: [\"PUT /repos/{owner}/{repo}/contents/{path}\"],\n createOrgRuleset: [\"POST /orgs/{org}/rulesets\"],\n createPagesDeployment: [\"POST /repos/{owner}/{repo}/pages/deployment\"],\n createPagesSite: [\"POST /repos/{owner}/{repo}/pages\"],\n createRelease: [\"POST /repos/{owner}/{repo}/releases\"],\n createRepoRuleset: [\"POST /repos/{owner}/{repo}/rulesets\"],\n createTagProtection: [\"POST /repos/{owner}/{repo}/tags/protection\"],\n createUsingTemplate: [\n \"POST /repos/{template_owner}/{template_repo}/generate\"\n ],\n createWebhook: [\"POST /repos/{owner}/{repo}/hooks\"],\n declineInvitation: [\n \"DELETE /user/repository_invitations/{invitation_id}\",\n {},\n { renamed: [\"repos\", \"declineInvitationForAuthenticatedUser\"] }\n ],\n declineInvitationForAuthenticatedUser: [\n \"DELETE /user/repository_invitations/{invitation_id}\"\n ],\n delete: [\"DELETE /repos/{owner}/{repo}\"],\n deleteAccessRestrictions: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions\"\n ],\n deleteAdminBranchProtection: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\"\n ],\n deleteAnEnvironment: [\n \"DELETE /repos/{owner}/{repo}/environments/{environment_name}\"\n ],\n deleteAutolink: [\"DELETE /repos/{owner}/{repo}/autolinks/{autolink_id}\"],\n deleteBranchProtection: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection\"\n ],\n deleteCommitComment: [\"DELETE /repos/{owner}/{repo}/comments/{comment_id}\"],\n deleteCommitSignatureProtection: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\"\n ],\n deleteDeployKey: [\"DELETE /repos/{owner}/{repo}/keys/{key_id}\"],\n deleteDeployment: [\n \"DELETE /repos/{owner}/{repo}/deployments/{deployment_id}\"\n ],\n deleteDeploymentBranchPolicy: [\n \"DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}\"\n ],\n deleteFile: [\"DELETE /repos/{owner}/{repo}/contents/{path}\"],\n deleteInvitation: [\n \"DELETE /repos/{owner}/{repo}/invitations/{invitation_id}\"\n ],\n deleteOrgRuleset: [\"DELETE /orgs/{org}/rulesets/{ruleset_id}\"],\n deletePagesSite: [\"DELETE /repos/{owner}/{repo}/pages\"],\n deletePullRequestReviewProtection: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\"\n ],\n deleteRelease: [\"DELETE /repos/{owner}/{repo}/releases/{release_id}\"],\n deleteReleaseAsset: [\n \"DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}\"\n ],\n deleteRepoRuleset: [\"DELETE /repos/{owner}/{repo}/rulesets/{ruleset_id}\"],\n deleteTagProtection: [\n \"DELETE /repos/{owner}/{repo}/tags/protection/{tag_protection_id}\"\n ],\n deleteWebhook: [\"DELETE /repos/{owner}/{repo}/hooks/{hook_id}\"],\n disableAutomatedSecurityFixes: [\n \"DELETE /repos/{owner}/{repo}/automated-security-fixes\"\n ],\n disableDeploymentProtectionRule: [\n \"DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}\"\n ],\n disableLfsForRepo: [\"DELETE /repos/{owner}/{repo}/lfs\"],\n disableVulnerabilityAlerts: [\n \"DELETE /repos/{owner}/{repo}/vulnerability-alerts\"\n ],\n downloadArchive: [\n \"GET /repos/{owner}/{repo}/zipball/{ref}\",\n {},\n { renamed: [\"repos\", \"downloadZipballArchive\"] }\n ],\n downloadTarballArchive: [\"GET /repos/{owner}/{repo}/tarball/{ref}\"],\n downloadZipballArchive: [\"GET /repos/{owner}/{repo}/zipball/{ref}\"],\n enableAutomatedSecurityFixes: [\n \"PUT /repos/{owner}/{repo}/automated-security-fixes\"\n ],\n enableLfsForRepo: [\"PUT /repos/{owner}/{repo}/lfs\"],\n enableVulnerabilityAlerts: [\n \"PUT /repos/{owner}/{repo}/vulnerability-alerts\"\n ],\n generateReleaseNotes: [\n \"POST /repos/{owner}/{repo}/releases/generate-notes\"\n ],\n get: [\"GET /repos/{owner}/{repo}\"],\n getAccessRestrictions: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions\"\n ],\n getAdminBranchProtection: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\"\n ],\n getAllDeploymentProtectionRules: [\n \"GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules\"\n ],\n getAllEnvironments: [\"GET /repos/{owner}/{repo}/environments\"],\n getAllStatusCheckContexts: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\"\n ],\n getAllTopics: [\"GET /repos/{owner}/{repo}/topics\"],\n getAppsWithAccessToProtectedBranch: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\"\n ],\n getAutolink: [\"GET /repos/{owner}/{repo}/autolinks/{autolink_id}\"],\n getBranch: [\"GET /repos/{owner}/{repo}/branches/{branch}\"],\n getBranchProtection: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection\"\n ],\n getBranchRules: [\"GET /repos/{owner}/{repo}/rules/branches/{branch}\"],\n getClones: [\"GET /repos/{owner}/{repo}/traffic/clones\"],\n getCodeFrequencyStats: [\"GET /repos/{owner}/{repo}/stats/code_frequency\"],\n getCollaboratorPermissionLevel: [\n \"GET /repos/{owner}/{repo}/collaborators/{username}/permission\"\n ],\n getCombinedStatusForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/status\"],\n getCommit: [\"GET /repos/{owner}/{repo}/commits/{ref}\"],\n getCommitActivityStats: [\"GET /repos/{owner}/{repo}/stats/commit_activity\"],\n getCommitComment: [\"GET /repos/{owner}/{repo}/comments/{comment_id}\"],\n getCommitSignatureProtection: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\"\n ],\n getCommunityProfileMetrics: [\"GET /repos/{owner}/{repo}/community/profile\"],\n getContent: [\"GET /repos/{owner}/{repo}/contents/{path}\"],\n getContributorsStats: [\"GET /repos/{owner}/{repo}/stats/contributors\"],\n getCustomDeploymentProtectionRule: [\n \"GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}\"\n ],\n getDeployKey: [\"GET /repos/{owner}/{repo}/keys/{key_id}\"],\n getDeployment: [\"GET /repos/{owner}/{repo}/deployments/{deployment_id}\"],\n getDeploymentBranchPolicy: [\n \"GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}\"\n ],\n getDeploymentStatus: [\n \"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}\"\n ],\n getEnvironment: [\n \"GET /repos/{owner}/{repo}/environments/{environment_name}\"\n ],\n getLatestPagesBuild: [\"GET /repos/{owner}/{repo}/pages/builds/latest\"],\n getLatestRelease: [\"GET /repos/{owner}/{repo}/releases/latest\"],\n getOrgRuleset: [\"GET /orgs/{org}/rulesets/{ruleset_id}\"],\n getOrgRulesets: [\"GET /orgs/{org}/rulesets\"],\n getPages: [\"GET /repos/{owner}/{repo}/pages\"],\n getPagesBuild: [\"GET /repos/{owner}/{repo}/pages/builds/{build_id}\"],\n getPagesHealthCheck: [\"GET /repos/{owner}/{repo}/pages/health\"],\n getParticipationStats: [\"GET /repos/{owner}/{repo}/stats/participation\"],\n getPullRequestReviewProtection: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\"\n ],\n getPunchCardStats: [\"GET /repos/{owner}/{repo}/stats/punch_card\"],\n getReadme: [\"GET /repos/{owner}/{repo}/readme\"],\n getReadmeInDirectory: [\"GET /repos/{owner}/{repo}/readme/{dir}\"],\n getRelease: [\"GET /repos/{owner}/{repo}/releases/{release_id}\"],\n getReleaseAsset: [\"GET /repos/{owner}/{repo}/releases/assets/{asset_id}\"],\n getReleaseByTag: [\"GET /repos/{owner}/{repo}/releases/tags/{tag}\"],\n getRepoRuleset: [\"GET /repos/{owner}/{repo}/rulesets/{ruleset_id}\"],\n getRepoRulesets: [\"GET /repos/{owner}/{repo}/rulesets\"],\n getStatusChecksProtection: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\"\n ],\n getTeamsWithAccessToProtectedBranch: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\"\n ],\n getTopPaths: [\"GET /repos/{owner}/{repo}/traffic/popular/paths\"],\n getTopReferrers: [\"GET /repos/{owner}/{repo}/traffic/popular/referrers\"],\n getUsersWithAccessToProtectedBranch: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\"\n ],\n getViews: [\"GET /repos/{owner}/{repo}/traffic/views\"],\n getWebhook: [\"GET /repos/{owner}/{repo}/hooks/{hook_id}\"],\n getWebhookConfigForRepo: [\n \"GET /repos/{owner}/{repo}/hooks/{hook_id}/config\"\n ],\n getWebhookDelivery: [\n \"GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}\"\n ],\n listAutolinks: [\"GET /repos/{owner}/{repo}/autolinks\"],\n listBranches: [\"GET /repos/{owner}/{repo}/branches\"],\n listBranchesForHeadCommit: [\n \"GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head\"\n ],\n listCollaborators: [\"GET /repos/{owner}/{repo}/collaborators\"],\n listCommentsForCommit: [\n \"GET /repos/{owner}/{repo}/commits/{commit_sha}/comments\"\n ],\n listCommitCommentsForRepo: [\"GET /repos/{owner}/{repo}/comments\"],\n listCommitStatusesForRef: [\n \"GET /repos/{owner}/{repo}/commits/{ref}/statuses\"\n ],\n listCommits: [\"GET /repos/{owner}/{repo}/commits\"],\n listContributors: [\"GET /repos/{owner}/{repo}/contributors\"],\n listCustomDeploymentRuleIntegrations: [\n \"GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps\"\n ],\n listDeployKeys: [\"GET /repos/{owner}/{repo}/keys\"],\n listDeploymentBranchPolicies: [\n \"GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies\"\n ],\n listDeploymentStatuses: [\n \"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\"\n ],\n listDeployments: [\"GET /repos/{owner}/{repo}/deployments\"],\n listForAuthenticatedUser: [\"GET /user/repos\"],\n listForOrg: [\"GET /orgs/{org}/repos\"],\n listForUser: [\"GET /users/{username}/repos\"],\n listForks: [\"GET /repos/{owner}/{repo}/forks\"],\n listInvitations: [\"GET /repos/{owner}/{repo}/invitations\"],\n listInvitationsForAuthenticatedUser: [\"GET /user/repository_invitations\"],\n listLanguages: [\"GET /repos/{owner}/{repo}/languages\"],\n listPagesBuilds: [\"GET /repos/{owner}/{repo}/pages/builds\"],\n listPublic: [\"GET /repositories\"],\n listPullRequestsAssociatedWithCommit: [\n \"GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls\"\n ],\n listReleaseAssets: [\n \"GET /repos/{owner}/{repo}/releases/{release_id}/assets\"\n ],\n listReleases: [\"GET /repos/{owner}/{repo}/releases\"],\n listTagProtection: [\"GET /repos/{owner}/{repo}/tags/protection\"],\n listTags: [\"GET /repos/{owner}/{repo}/tags\"],\n listTeams: [\"GET /repos/{owner}/{repo}/teams\"],\n listWebhookDeliveries: [\n \"GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries\"\n ],\n listWebhooks: [\"GET /repos/{owner}/{repo}/hooks\"],\n merge: [\"POST /repos/{owner}/{repo}/merges\"],\n mergeUpstream: [\"POST /repos/{owner}/{repo}/merge-upstream\"],\n pingWebhook: [\"POST /repos/{owner}/{repo}/hooks/{hook_id}/pings\"],\n redeliverWebhookDelivery: [\n \"POST /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts\"\n ],\n removeAppAccessRestrictions: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\",\n {},\n { mapToData: \"apps\" }\n ],\n removeCollaborator: [\n \"DELETE /repos/{owner}/{repo}/collaborators/{username}\"\n ],\n removeStatusCheckContexts: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\",\n {},\n { mapToData: \"contexts\" }\n ],\n removeStatusCheckProtection: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\"\n ],\n removeTeamAccessRestrictions: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\",\n {},\n { mapToData: \"teams\" }\n ],\n removeUserAccessRestrictions: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\",\n {},\n { mapToData: \"users\" }\n ],\n renameBranch: [\"POST /repos/{owner}/{repo}/branches/{branch}/rename\"],\n replaceAllTopics: [\"PUT /repos/{owner}/{repo}/topics\"],\n requestPagesBuild: [\"POST /repos/{owner}/{repo}/pages/builds\"],\n setAdminBranchProtection: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\"\n ],\n setAppAccessRestrictions: [\n \"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\",\n {},\n { mapToData: \"apps\" }\n ],\n setStatusCheckContexts: [\n \"PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\",\n {},\n { mapToData: \"contexts\" }\n ],\n setTeamAccessRestrictions: [\n \"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\",\n {},\n { mapToData: \"teams\" }\n ],\n setUserAccessRestrictions: [\n \"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\",\n {},\n { mapToData: \"users\" }\n ],\n testPushWebhook: [\"POST /repos/{owner}/{repo}/hooks/{hook_id}/tests\"],\n transfer: [\"POST /repos/{owner}/{repo}/transfer\"],\n update: [\"PATCH /repos/{owner}/{repo}\"],\n updateBranchProtection: [\n \"PUT /repos/{owner}/{repo}/branches/{branch}/protection\"\n ],\n updateCommitComment: [\"PATCH /repos/{owner}/{repo}/comments/{comment_id}\"],\n updateDeploymentBranchPolicy: [\n \"PUT /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}\"\n ],\n updateInformationAboutPagesSite: [\"PUT /repos/{owner}/{repo}/pages\"],\n updateInvitation: [\n \"PATCH /repos/{owner}/{repo}/invitations/{invitation_id}\"\n ],\n updateOrgRuleset: [\"PUT /orgs/{org}/rulesets/{ruleset_id}\"],\n updatePullRequestReviewProtection: [\n \"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\"\n ],\n updateRelease: [\"PATCH /repos/{owner}/{repo}/releases/{release_id}\"],\n updateReleaseAsset: [\n \"PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}\"\n ],\n updateRepoRuleset: [\"PUT /repos/{owner}/{repo}/rulesets/{ruleset_id}\"],\n updateStatusCheckPotection: [\n \"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\",\n {},\n { renamed: [\"repos\", \"updateStatusCheckProtection\"] }\n ],\n updateStatusCheckProtection: [\n \"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\"\n ],\n updateWebhook: [\"PATCH /repos/{owner}/{repo}/hooks/{hook_id}\"],\n updateWebhookConfigForRepo: [\n \"PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config\"\n ],\n uploadReleaseAsset: [\n \"POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}\",\n { baseUrl: \"https://uploads.github.com\" }\n ]\n },\n search: {\n code: [\"GET /search/code\"],\n commits: [\"GET /search/commits\"],\n issuesAndPullRequests: [\"GET /search/issues\"],\n labels: [\"GET /search/labels\"],\n repos: [\"GET /search/repositories\"],\n topics: [\"GET /search/topics\"],\n users: [\"GET /search/users\"]\n },\n secretScanning: {\n getAlert: [\n \"GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}\"\n ],\n listAlertsForEnterprise: [\n \"GET /enterprises/{enterprise}/secret-scanning/alerts\"\n ],\n listAlertsForOrg: [\"GET /orgs/{org}/secret-scanning/alerts\"],\n listAlertsForRepo: [\"GET /repos/{owner}/{repo}/secret-scanning/alerts\"],\n listLocationsForAlert: [\n \"GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations\"\n ],\n updateAlert: [\n \"PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}\"\n ]\n },\n securityAdvisories: {\n createPrivateVulnerabilityReport: [\n \"POST /repos/{owner}/{repo}/security-advisories/reports\"\n ],\n createRepositoryAdvisory: [\n \"POST /repos/{owner}/{repo}/security-advisories\"\n ],\n getRepositoryAdvisory: [\n \"GET /repos/{owner}/{repo}/security-advisories/{ghsa_id}\"\n ],\n listRepositoryAdvisories: [\"GET /repos/{owner}/{repo}/security-advisories\"],\n updateRepositoryAdvisory: [\n \"PATCH /repos/{owner}/{repo}/security-advisories/{ghsa_id}\"\n ]\n },\n teams: {\n addOrUpdateMembershipForUserInOrg: [\n \"PUT /orgs/{org}/teams/{team_slug}/memberships/{username}\"\n ],\n addOrUpdateProjectPermissionsInOrg: [\n \"PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}\"\n ],\n addOrUpdateRepoPermissionsInOrg: [\n \"PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\"\n ],\n checkPermissionsForProjectInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/projects/{project_id}\"\n ],\n checkPermissionsForRepoInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\"\n ],\n create: [\"POST /orgs/{org}/teams\"],\n createDiscussionCommentInOrg: [\n \"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\"\n ],\n createDiscussionInOrg: [\"POST /orgs/{org}/teams/{team_slug}/discussions\"],\n deleteDiscussionCommentInOrg: [\n \"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\"\n ],\n deleteDiscussionInOrg: [\n \"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\"\n ],\n deleteInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}\"],\n getByName: [\"GET /orgs/{org}/teams/{team_slug}\"],\n getDiscussionCommentInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\"\n ],\n getDiscussionInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\"\n ],\n getMembershipForUserInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/memberships/{username}\"\n ],\n list: [\"GET /orgs/{org}/teams\"],\n listChildInOrg: [\"GET /orgs/{org}/teams/{team_slug}/teams\"],\n listDiscussionCommentsInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\"\n ],\n listDiscussionsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions\"],\n listForAuthenticatedUser: [\"GET /user/teams\"],\n listMembersInOrg: [\"GET /orgs/{org}/teams/{team_slug}/members\"],\n listPendingInvitationsInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/invitations\"\n ],\n listProjectsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/projects\"],\n listReposInOrg: [\"GET /orgs/{org}/teams/{team_slug}/repos\"],\n removeMembershipForUserInOrg: [\n \"DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}\"\n ],\n removeProjectInOrg: [\n \"DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}\"\n ],\n removeRepoInOrg: [\n \"DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\"\n ],\n updateDiscussionCommentInOrg: [\n \"PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\"\n ],\n updateDiscussionInOrg: [\n \"PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\"\n ],\n updateInOrg: [\"PATCH /orgs/{org}/teams/{team_slug}\"]\n },\n users: {\n addEmailForAuthenticated: [\n \"POST /user/emails\",\n {},\n { renamed: [\"users\", \"addEmailForAuthenticatedUser\"] }\n ],\n addEmailForAuthenticatedUser: [\"POST /user/emails\"],\n addSocialAccountForAuthenticatedUser: [\"POST /user/social_accounts\"],\n block: [\"PUT /user/blocks/{username}\"],\n checkBlocked: [\"GET /user/blocks/{username}\"],\n checkFollowingForUser: [\"GET /users/{username}/following/{target_user}\"],\n checkPersonIsFollowedByAuthenticated: [\"GET /user/following/{username}\"],\n createGpgKeyForAuthenticated: [\n \"POST /user/gpg_keys\",\n {},\n { renamed: [\"users\", \"createGpgKeyForAuthenticatedUser\"] }\n ],\n createGpgKeyForAuthenticatedUser: [\"POST /user/gpg_keys\"],\n createPublicSshKeyForAuthenticated: [\n \"POST /user/keys\",\n {},\n { renamed: [\"users\", \"createPublicSshKeyForAuthenticatedUser\"] }\n ],\n createPublicSshKeyForAuthenticatedUser: [\"POST /user/keys\"],\n createSshSigningKeyForAuthenticatedUser: [\"POST /user/ssh_signing_keys\"],\n deleteEmailForAuthenticated: [\n \"DELETE /user/emails\",\n {},\n { renamed: [\"users\", \"deleteEmailForAuthenticatedUser\"] }\n ],\n deleteEmailForAuthenticatedUser: [\"DELETE /user/emails\"],\n deleteGpgKeyForAuthenticated: [\n \"DELETE /user/gpg_keys/{gpg_key_id}\",\n {},\n { renamed: [\"users\", \"deleteGpgKeyForAuthenticatedUser\"] }\n ],\n deleteGpgKeyForAuthenticatedUser: [\"DELETE /user/gpg_keys/{gpg_key_id}\"],\n deletePublicSshKeyForAuthenticated: [\n \"DELETE /user/keys/{key_id}\",\n {},\n { renamed: [\"users\", \"deletePublicSshKeyForAuthenticatedUser\"] }\n ],\n deletePublicSshKeyForAuthenticatedUser: [\"DELETE /user/keys/{key_id}\"],\n deleteSocialAccountForAuthenticatedUser: [\"DELETE /user/social_accounts\"],\n deleteSshSigningKeyForAuthenticatedUser: [\n \"DELETE /user/ssh_signing_keys/{ssh_signing_key_id}\"\n ],\n follow: [\"PUT /user/following/{username}\"],\n getAuthenticated: [\"GET /user\"],\n getByUsername: [\"GET /users/{username}\"],\n getContextForUser: [\"GET /users/{username}/hovercard\"],\n getGpgKeyForAuthenticated: [\n \"GET /user/gpg_keys/{gpg_key_id}\",\n {},\n { renamed: [\"users\", \"getGpgKeyForAuthenticatedUser\"] }\n ],\n getGpgKeyForAuthenticatedUser: [\"GET /user/gpg_keys/{gpg_key_id}\"],\n getPublicSshKeyForAuthenticated: [\n \"GET /user/keys/{key_id}\",\n {},\n { renamed: [\"users\", \"getPublicSshKeyForAuthenticatedUser\"] }\n ],\n getPublicSshKeyForAuthenticatedUser: [\"GET /user/keys/{key_id}\"],\n getSshSigningKeyForAuthenticatedUser: [\n \"GET /user/ssh_signing_keys/{ssh_signing_key_id}\"\n ],\n list: [\"GET /users\"],\n listBlockedByAuthenticated: [\n \"GET /user/blocks\",\n {},\n { renamed: [\"users\", \"listBlockedByAuthenticatedUser\"] }\n ],\n listBlockedByAuthenticatedUser: [\"GET /user/blocks\"],\n listEmailsForAuthenticated: [\n \"GET /user/emails\",\n {},\n { renamed: [\"users\", \"listEmailsForAuthenticatedUser\"] }\n ],\n listEmailsForAuthenticatedUser: [\"GET /user/emails\"],\n listFollowedByAuthenticated: [\n \"GET /user/following\",\n {},\n { renamed: [\"users\", \"listFollowedByAuthenticatedUser\"] }\n ],\n listFollowedByAuthenticatedUser: [\"GET /user/following\"],\n listFollowersForAuthenticatedUser: [\"GET /user/followers\"],\n listFollowersForUser: [\"GET /users/{username}/followers\"],\n listFollowingForUser: [\"GET /users/{username}/following\"],\n listGpgKeysForAuthenticated: [\n \"GET /user/gpg_keys\",\n {},\n { renamed: [\"users\", \"listGpgKeysForAuthenticatedUser\"] }\n ],\n listGpgKeysForAuthenticatedUser: [\"GET /user/gpg_keys\"],\n listGpgKeysForUser: [\"GET /users/{username}/gpg_keys\"],\n listPublicEmailsForAuthenticated: [\n \"GET /user/public_emails\",\n {},\n { renamed: [\"users\", \"listPublicEmailsForAuthenticatedUser\"] }\n ],\n listPublicEmailsForAuthenticatedUser: [\"GET /user/public_emails\"],\n listPublicKeysForUser: [\"GET /users/{username}/keys\"],\n listPublicSshKeysForAuthenticated: [\n \"GET /user/keys\",\n {},\n { renamed: [\"users\", \"listPublicSshKeysForAuthenticatedUser\"] }\n ],\n listPublicSshKeysForAuthenticatedUser: [\"GET /user/keys\"],\n listSocialAccountsForAuthenticatedUser: [\"GET /user/social_accounts\"],\n listSocialAccountsForUser: [\"GET /users/{username}/social_accounts\"],\n listSshSigningKeysForAuthenticatedUser: [\"GET /user/ssh_signing_keys\"],\n listSshSigningKeysForUser: [\"GET /users/{username}/ssh_signing_keys\"],\n setPrimaryEmailVisibilityForAuthenticated: [\n \"PATCH /user/email/visibility\",\n {},\n { renamed: [\"users\", \"setPrimaryEmailVisibilityForAuthenticatedUser\"] }\n ],\n setPrimaryEmailVisibilityForAuthenticatedUser: [\n \"PATCH /user/email/visibility\"\n ],\n unblock: [\"DELETE /user/blocks/{username}\"],\n unfollow: [\"DELETE /user/following/{username}\"],\n updateAuthenticated: [\"PATCH /user\"]\n }\n};\nvar endpoints_default = Endpoints;\n\n// pkg/dist-src/endpoints-to-methods.js\nvar endpointMethodsMap = /* @__PURE__ */ new Map();\nfor (const [scope, endpoints] of Object.entries(endpoints_default)) {\n for (const [methodName, endpoint] of Object.entries(endpoints)) {\n const [route, defaults, decorations] = endpoint;\n const [method, url] = route.split(/ /);\n const endpointDefaults = Object.assign(\n {\n method,\n url\n },\n defaults\n );\n if (!endpointMethodsMap.has(scope)) {\n endpointMethodsMap.set(scope, /* @__PURE__ */ new Map());\n }\n endpointMethodsMap.get(scope).set(methodName, {\n scope,\n methodName,\n endpointDefaults,\n decorations\n });\n }\n}\nvar handler = {\n get({ octokit, scope, cache }, methodName) {\n if (cache[methodName]) {\n return cache[methodName];\n }\n const { decorations, endpointDefaults } = endpointMethodsMap.get(scope).get(methodName);\n if (decorations) {\n cache[methodName] = decorate(\n octokit,\n scope,\n methodName,\n endpointDefaults,\n decorations\n );\n } else {\n cache[methodName] = octokit.request.defaults(endpointDefaults);\n }\n return cache[methodName];\n }\n};\nfunction endpointsToMethods(octokit) {\n const newMethods = {};\n for (const scope of endpointMethodsMap.keys()) {\n newMethods[scope] = new Proxy({ octokit, scope, cache: {} }, handler);\n }\n return newMethods;\n}\nfunction decorate(octokit, scope, methodName, defaults, decorations) {\n const requestWithDefaults = octokit.request.defaults(defaults);\n function withDecorations(...args) {\n let options = requestWithDefaults.endpoint.merge(...args);\n if (decorations.mapToData) {\n options = Object.assign({}, options, {\n data: options[decorations.mapToData],\n [decorations.mapToData]: void 0\n });\n return requestWithDefaults(options);\n }\n if (decorations.renamed) {\n const [newScope, newMethodName] = decorations.renamed;\n octokit.log.warn(\n `octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`\n );\n }\n if (decorations.deprecated) {\n octokit.log.warn(decorations.deprecated);\n }\n if (decorations.renamedParameters) {\n const options2 = requestWithDefaults.endpoint.merge(...args);\n for (const [name, alias] of Object.entries(\n decorations.renamedParameters\n )) {\n if (name in options2) {\n octokit.log.warn(\n `\"${name}\" parameter is deprecated for \"octokit.${scope}.${methodName}()\". Use \"${alias}\" instead`\n );\n if (!(alias in options2)) {\n options2[alias] = options2[name];\n }\n delete options2[name];\n }\n }\n return requestWithDefaults(options2);\n }\n return requestWithDefaults(...args);\n }\n return Object.assign(withDecorations, requestWithDefaults);\n}\n\n// pkg/dist-src/index.js\nfunction restEndpointMethods(octokit) {\n const api = endpointsToMethods(octokit);\n return {\n rest: api\n };\n}\nrestEndpointMethods.VERSION = VERSION;\nfunction legacyRestEndpointMethods(octokit) {\n const api = endpointsToMethods(octokit);\n return {\n ...api,\n rest: api\n };\n}\nlegacyRestEndpointMethods.VERSION = VERSION;\n// Annotate the CommonJS export names for ESM import in node:\n0 && (module.exports = {\n legacyRestEndpointMethods,\n restEndpointMethods\n});\n","\"use strict\";\nvar __create = Object.create;\nvar __defProp = Object.defineProperty;\nvar __getOwnPropDesc = Object.getOwnPropertyDescriptor;\nvar __getOwnPropNames = Object.getOwnPropertyNames;\nvar __getProtoOf = Object.getPrototypeOf;\nvar __hasOwnProp = Object.prototype.hasOwnProperty;\nvar __export = (target, all) => {\n for (var name in all)\n __defProp(target, name, { get: all[name], enumerable: true });\n};\nvar __copyProps = (to, from, except, desc) => {\n if (from && typeof from === \"object\" || typeof from === \"function\") {\n for (let key of __getOwnPropNames(from))\n if (!__hasOwnProp.call(to, key) && key !== except)\n __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });\n }\n return to;\n};\nvar __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(\n // If the importer is in node compatibility mode or this is not an ESM\n // file that has been converted to a CommonJS file using a Babel-\n // compatible transform (i.e. \"__esModule\" has not been set), then set\n // \"default\" to the CommonJS \"module.exports\" for node compatibility.\n isNodeMode || !mod || !mod.__esModule ? __defProp(target, \"default\", { value: mod, enumerable: true }) : target,\n mod\n));\nvar __toCommonJS = (mod) => __copyProps(__defProp({}, \"__esModule\", { value: true }), mod);\n\n// pkg/dist-src/index.js\nvar dist_src_exports = {};\n__export(dist_src_exports, {\n RequestError: () => RequestError\n});\nmodule.exports = __toCommonJS(dist_src_exports);\nvar import_deprecation = require(\"deprecation\");\nvar import_once = __toESM(require(\"once\"));\nvar logOnceCode = (0, import_once.default)((deprecation) => console.warn(deprecation));\nvar logOnceHeaders = (0, import_once.default)((deprecation) => console.warn(deprecation));\nvar RequestError = class extends Error {\n constructor(message, statusCode, options) {\n super(message);\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n this.name = \"HttpError\";\n this.status = statusCode;\n let headers;\n if (\"headers\" in options && typeof options.headers !== \"undefined\") {\n headers = options.headers;\n }\n if (\"response\" in options) {\n this.response = options.response;\n headers = options.response.headers;\n }\n const requestCopy = Object.assign({}, options.request);\n if (options.request.headers.authorization) {\n requestCopy.headers = Object.assign({}, options.request.headers, {\n authorization: options.request.headers.authorization.replace(\n / .*$/,\n \" [REDACTED]\"\n )\n });\n }\n requestCopy.url = requestCopy.url.replace(/\\bclient_secret=\\w+/g, \"client_secret=[REDACTED]\").replace(/\\baccess_token=\\w+/g, \"access_token=[REDACTED]\");\n this.request = requestCopy;\n Object.defineProperty(this, \"code\", {\n get() {\n logOnceCode(\n new import_deprecation.Deprecation(\n \"[@octokit/request-error] `error.code` is deprecated, use `error.status`.\"\n )\n );\n return statusCode;\n }\n });\n Object.defineProperty(this, \"headers\", {\n get() {\n logOnceHeaders(\n new import_deprecation.Deprecation(\n \"[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`.\"\n )\n );\n return headers || {};\n }\n });\n }\n};\n// Annotate the CommonJS export names for ESM import in node:\n0 && (module.exports = {\n RequestError\n});\n","\"use strict\";\nvar __defProp = Object.defineProperty;\nvar __getOwnPropDesc = Object.getOwnPropertyDescriptor;\nvar __getOwnPropNames = Object.getOwnPropertyNames;\nvar __hasOwnProp = Object.prototype.hasOwnProperty;\nvar __export = (target, all) => {\n for (var name in all)\n __defProp(target, name, { get: all[name], enumerable: true });\n};\nvar __copyProps = (to, from, except, desc) => {\n if (from && typeof from === \"object\" || typeof from === \"function\") {\n for (let key of __getOwnPropNames(from))\n if (!__hasOwnProp.call(to, key) && key !== except)\n __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });\n }\n return to;\n};\nvar __toCommonJS = (mod) => __copyProps(__defProp({}, \"__esModule\", { value: true }), mod);\n\n// pkg/dist-src/index.js\nvar dist_src_exports = {};\n__export(dist_src_exports, {\n request: () => request\n});\nmodule.exports = __toCommonJS(dist_src_exports);\nvar import_endpoint = require(\"@octokit/endpoint\");\nvar import_universal_user_agent = require(\"universal-user-agent\");\n\n// pkg/dist-src/version.js\nvar VERSION = \"8.1.0\";\n\n// pkg/dist-src/fetch-wrapper.js\nvar import_is_plain_object = require(\"is-plain-object\");\nvar import_request_error = require(\"@octokit/request-error\");\n\n// pkg/dist-src/get-buffer-response.js\nfunction getBufferResponse(response) {\n return response.arrayBuffer();\n}\n\n// pkg/dist-src/fetch-wrapper.js\nfunction fetchWrapper(requestOptions) {\n var _a, _b, _c;\n const log = requestOptions.request && requestOptions.request.log ? requestOptions.request.log : console;\n const parseSuccessResponseBody = ((_a = requestOptions.request) == null ? void 0 : _a.parseSuccessResponseBody) !== false;\n if ((0, import_is_plain_object.isPlainObject)(requestOptions.body) || Array.isArray(requestOptions.body)) {\n requestOptions.body = JSON.stringify(requestOptions.body);\n }\n let headers = {};\n let status;\n let url;\n let { fetch } = globalThis;\n if ((_b = requestOptions.request) == null ? void 0 : _b.fetch) {\n fetch = requestOptions.request.fetch;\n }\n if (!fetch) {\n throw new Error(\n 'Global \"fetch\" not found. Please provide `options.request.fetch` to octokit or upgrade to node@18 or newer.'\n );\n }\n return fetch(requestOptions.url, {\n method: requestOptions.method,\n body: requestOptions.body,\n headers: requestOptions.headers,\n signal: (_c = requestOptions.request) == null ? void 0 : _c.signal,\n // duplex must be set if request.body is ReadableStream or Async Iterables.\n // See https://fetch.spec.whatwg.org/#dom-requestinit-duplex.\n ...requestOptions.body && { duplex: \"half\" }\n }).then(async (response) => {\n url = response.url;\n status = response.status;\n for (const keyAndValue of response.headers) {\n headers[keyAndValue[0]] = keyAndValue[1];\n }\n if (\"deprecation\" in headers) {\n const matches = headers.link && headers.link.match(/<([^>]+)>; rel=\"deprecation\"/);\n const deprecationLink = matches && matches.pop();\n log.warn(\n `[@octokit/request] \"${requestOptions.method} ${requestOptions.url}\" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : \"\"}`\n );\n }\n if (status === 204 || status === 205) {\n return;\n }\n if (requestOptions.method === \"HEAD\") {\n if (status < 400) {\n return;\n }\n throw new import_request_error.RequestError(response.statusText, status, {\n response: {\n url,\n status,\n headers,\n data: void 0\n },\n request: requestOptions\n });\n }\n if (status === 304) {\n throw new import_request_error.RequestError(\"Not modified\", status, {\n response: {\n url,\n status,\n headers,\n data: await getResponseData(response)\n },\n request: requestOptions\n });\n }\n if (status >= 400) {\n const data = await getResponseData(response);\n const error = new import_request_error.RequestError(toErrorMessage(data), status, {\n response: {\n url,\n status,\n headers,\n data\n },\n request: requestOptions\n });\n throw error;\n }\n return parseSuccessResponseBody ? await getResponseData(response) : response.body;\n }).then((data) => {\n return {\n status,\n url,\n headers,\n data\n };\n }).catch((error) => {\n if (error instanceof import_request_error.RequestError)\n throw error;\n else if (error.name === \"AbortError\")\n throw error;\n throw new import_request_error.RequestError(error.message, 500, {\n request: requestOptions\n });\n });\n}\nasync function getResponseData(response) {\n const contentType = response.headers.get(\"content-type\");\n if (/application\\/json/.test(contentType)) {\n return response.json();\n }\n if (!contentType || /^text\\/|charset=utf-8$/.test(contentType)) {\n return response.text();\n }\n return getBufferResponse(response);\n}\nfunction toErrorMessage(data) {\n if (typeof data === \"string\")\n return data;\n if (\"message\" in data) {\n if (Array.isArray(data.errors)) {\n return `${data.message}: ${data.errors.map(JSON.stringify).join(\", \")}`;\n }\n return data.message;\n }\n return `Unknown error: ${JSON.stringify(data)}`;\n}\n\n// pkg/dist-src/with-defaults.js\nfunction withDefaults(oldEndpoint, newDefaults) {\n const endpoint2 = oldEndpoint.defaults(newDefaults);\n const newApi = function(route, parameters) {\n const endpointOptions = endpoint2.merge(route, parameters);\n if (!endpointOptions.request || !endpointOptions.request.hook) {\n return fetchWrapper(endpoint2.parse(endpointOptions));\n }\n const request2 = (route2, parameters2) => {\n return fetchWrapper(\n endpoint2.parse(endpoint2.merge(route2, parameters2))\n );\n };\n Object.assign(request2, {\n endpoint: endpoint2,\n defaults: withDefaults.bind(null, endpoint2)\n });\n return endpointOptions.request.hook(request2, endpointOptions);\n };\n return Object.assign(newApi, {\n endpoint: endpoint2,\n defaults: withDefaults.bind(null, endpoint2)\n });\n}\n\n// pkg/dist-src/index.js\nvar request = withDefaults(import_endpoint.endpoint, {\n headers: {\n \"user-agent\": `octokit-request.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}`\n }\n});\n// Annotate the CommonJS export names for ESM import in node:\n0 && (module.exports = {\n request\n});\n","var register = require(\"./lib/register\");\nvar addHook = require(\"./lib/add\");\nvar removeHook = require(\"./lib/remove\");\n\n// bind with array of arguments: https://stackoverflow.com/a/21792913\nvar bind = Function.bind;\nvar bindable = bind.bind(bind);\n\nfunction bindApi(hook, state, name) {\n var removeHookRef = bindable(removeHook, null).apply(\n null,\n name ? [state, name] : [state]\n );\n hook.api = { remove: removeHookRef };\n hook.remove = removeHookRef;\n [\"before\", \"error\", \"after\", \"wrap\"].forEach(function (kind) {\n var args = name ? [state, kind, name] : [state, kind];\n hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args);\n });\n}\n\nfunction HookSingular() {\n var singularHookName = \"h\";\n var singularHookState = {\n registry: {},\n };\n var singularHook = register.bind(null, singularHookState, singularHookName);\n bindApi(singularHook, singularHookState, singularHookName);\n return singularHook;\n}\n\nfunction HookCollection() {\n var state = {\n registry: {},\n };\n\n var hook = register.bind(null, state);\n bindApi(hook, state);\n\n return hook;\n}\n\nvar collectionHookDeprecationMessageDisplayed = false;\nfunction Hook() {\n if (!collectionHookDeprecationMessageDisplayed) {\n console.warn(\n '[before-after-hook]: \"Hook()\" repurposing warning, use \"Hook.Collection()\". Read more: https://git.io/upgrade-before-after-hook-to-1.4'\n );\n collectionHookDeprecationMessageDisplayed = true;\n }\n return HookCollection();\n}\n\nHook.Singular = HookSingular.bind();\nHook.Collection = HookCollection.bind();\n\nmodule.exports = Hook;\n// expose constructors as a named property for TypeScript\nmodule.exports.Hook = Hook;\nmodule.exports.Singular = Hook.Singular;\nmodule.exports.Collection = Hook.Collection;\n","module.exports = addHook;\n\nfunction addHook(state, kind, name, hook) {\n var orig = hook;\n if (!state.registry[name]) {\n state.registry[name] = [];\n }\n\n if (kind === \"before\") {\n hook = function (method, options) {\n return Promise.resolve()\n .then(orig.bind(null, options))\n .then(method.bind(null, options));\n };\n }\n\n if (kind === \"after\") {\n hook = function (method, options) {\n var result;\n return Promise.resolve()\n .then(method.bind(null, options))\n .then(function (result_) {\n result = result_;\n return orig(result, options);\n })\n .then(function () {\n return result;\n });\n };\n }\n\n if (kind === \"error\") {\n hook = function (method, options) {\n return Promise.resolve()\n .then(method.bind(null, options))\n .catch(function (error) {\n return orig(error, options);\n });\n };\n }\n\n state.registry[name].push({\n hook: hook,\n orig: orig,\n });\n}\n","module.exports = register;\n\nfunction register(state, name, method, options) {\n if (typeof method !== \"function\") {\n throw new Error(\"method for before hook must be a function\");\n }\n\n if (!options) {\n options = {};\n }\n\n if (Array.isArray(name)) {\n return name.reverse().reduce(function (callback, name) {\n return register.bind(null, state, name, callback, options);\n }, method)();\n }\n\n return Promise.resolve().then(function () {\n if (!state.registry[name]) {\n return method(options);\n }\n\n return state.registry[name].reduce(function (method, registered) {\n return registered.hook.bind(null, method, options);\n }, method)();\n });\n}\n","module.exports = removeHook;\n\nfunction removeHook(state, name, method) {\n if (!state.registry[name]) {\n return;\n }\n\n var index = state.registry[name]\n .map(function (registered) {\n return registered.orig;\n })\n .indexOf(method);\n\n if (index === -1) {\n return;\n }\n\n state.registry[name].splice(index, 1);\n}\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nclass Deprecation extends Error {\n constructor(message) {\n super(message); // Maintains proper stack trace (only available on V8)\n\n /* istanbul ignore next */\n\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n\n this.name = 'Deprecation';\n }\n\n}\n\nexports.Deprecation = Deprecation;\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\n/*!\n * is-plain-object \n *\n * Copyright (c) 2014-2017, Jon Schlinkert.\n * Released under the MIT License.\n */\n\nfunction isObject(o) {\n return Object.prototype.toString.call(o) === '[object Object]';\n}\n\nfunction isPlainObject(o) {\n var ctor,prot;\n\n if (isObject(o) === false) return false;\n\n // If has modified constructor\n ctor = o.constructor;\n if (ctor === undefined) return true;\n\n // If has modified prototype\n prot = ctor.prototype;\n if (isObject(prot) === false) return false;\n\n // If constructor does not have an Object-specific method\n if (prot.hasOwnProperty('isPrototypeOf') === false) {\n return false;\n }\n\n // Most likely a plain Object\n return true;\n}\n\nexports.isPlainObject = isPlainObject;\n","/*! node-domexception. MIT License. Jimmy Wärting */\n\nif (!globalThis.DOMException) {\n try {\n const { MessageChannel } = require('worker_threads'),\n port = new MessageChannel().port1,\n ab = new ArrayBuffer()\n port.postMessage(ab, [ab, ab])\n } catch (err) {\n err.constructor.name === 'DOMException' && (\n globalThis.DOMException = err.constructor\n )\n }\n}\n\nmodule.exports = globalThis.DOMException\n","var wrappy = require('wrappy')\nmodule.exports = wrappy(once)\nmodule.exports.strict = wrappy(onceStrict)\n\nonce.proto = once(function () {\n Object.defineProperty(Function.prototype, 'once', {\n value: function () {\n return once(this)\n },\n configurable: true\n })\n\n Object.defineProperty(Function.prototype, 'onceStrict', {\n value: function () {\n return onceStrict(this)\n },\n configurable: true\n })\n})\n\nfunction once (fn) {\n var f = function () {\n if (f.called) return f.value\n f.called = true\n return f.value = fn.apply(this, arguments)\n }\n f.called = false\n return f\n}\n\nfunction onceStrict (fn) {\n var f = function () {\n if (f.called)\n throw new Error(f.onceError)\n f.called = true\n return f.value = fn.apply(this, arguments)\n }\n var name = fn.name || 'Function wrapped with `once`'\n f.onceError = name + \" shouldn't be called more than once\"\n f.called = false\n return f\n}\n","exports = module.exports = SemVer\n\nvar debug\n/* istanbul ignore next */\nif (typeof process === 'object' &&\n process.env &&\n process.env.NODE_DEBUG &&\n /\\bsemver\\b/i.test(process.env.NODE_DEBUG)) {\n debug = function () {\n var args = Array.prototype.slice.call(arguments, 0)\n args.unshift('SEMVER')\n console.log.apply(console, args)\n }\n} else {\n debug = function () {}\n}\n\n// Note: this is the semver.org version of the spec that it implements\n// Not necessarily the package version of this code.\nexports.SEMVER_SPEC_VERSION = '2.0.0'\n\nvar MAX_LENGTH = 256\nvar MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER ||\n /* istanbul ignore next */ 9007199254740991\n\n// Max safe segment length for coercion.\nvar MAX_SAFE_COMPONENT_LENGTH = 16\n\n// The actual regexps go on exports.re\nvar re = exports.re = []\nvar src = exports.src = []\nvar t = exports.tokens = {}\nvar R = 0\n\nfunction tok (n) {\n t[n] = R++\n}\n\n// The following Regular Expressions can be used for tokenizing,\n// validating, and parsing SemVer version strings.\n\n// ## Numeric Identifier\n// A single `0`, or a non-zero digit followed by zero or more digits.\n\ntok('NUMERICIDENTIFIER')\nsrc[t.NUMERICIDENTIFIER] = '0|[1-9]\\\\d*'\ntok('NUMERICIDENTIFIERLOOSE')\nsrc[t.NUMERICIDENTIFIERLOOSE] = '[0-9]+'\n\n// ## Non-numeric Identifier\n// Zero or more digits, followed by a letter or hyphen, and then zero or\n// more letters, digits, or hyphens.\n\ntok('NONNUMERICIDENTIFIER')\nsrc[t.NONNUMERICIDENTIFIER] = '\\\\d*[a-zA-Z-][a-zA-Z0-9-]*'\n\n// ## Main Version\n// Three dot-separated numeric identifiers.\n\ntok('MAINVERSION')\nsrc[t.MAINVERSION] = '(' + src[t.NUMERICIDENTIFIER] + ')\\\\.' +\n '(' + src[t.NUMERICIDENTIFIER] + ')\\\\.' +\n '(' + src[t.NUMERICIDENTIFIER] + ')'\n\ntok('MAINVERSIONLOOSE')\nsrc[t.MAINVERSIONLOOSE] = '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')\\\\.' +\n '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')\\\\.' +\n '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')'\n\n// ## Pre-release Version Identifier\n// A numeric identifier, or a non-numeric identifier.\n\ntok('PRERELEASEIDENTIFIER')\nsrc[t.PRERELEASEIDENTIFIER] = '(?:' + src[t.NUMERICIDENTIFIER] +\n '|' + src[t.NONNUMERICIDENTIFIER] + ')'\n\ntok('PRERELEASEIDENTIFIERLOOSE')\nsrc[t.PRERELEASEIDENTIFIERLOOSE] = '(?:' + src[t.NUMERICIDENTIFIERLOOSE] +\n '|' + src[t.NONNUMERICIDENTIFIER] + ')'\n\n// ## Pre-release Version\n// Hyphen, followed by one or more dot-separated pre-release version\n// identifiers.\n\ntok('PRERELEASE')\nsrc[t.PRERELEASE] = '(?:-(' + src[t.PRERELEASEIDENTIFIER] +\n '(?:\\\\.' + src[t.PRERELEASEIDENTIFIER] + ')*))'\n\ntok('PRERELEASELOOSE')\nsrc[t.PRERELEASELOOSE] = '(?:-?(' + src[t.PRERELEASEIDENTIFIERLOOSE] +\n '(?:\\\\.' + src[t.PRERELEASEIDENTIFIERLOOSE] + ')*))'\n\n// ## Build Metadata Identifier\n// Any combination of digits, letters, or hyphens.\n\ntok('BUILDIDENTIFIER')\nsrc[t.BUILDIDENTIFIER] = '[0-9A-Za-z-]+'\n\n// ## Build Metadata\n// Plus sign, followed by one or more period-separated build metadata\n// identifiers.\n\ntok('BUILD')\nsrc[t.BUILD] = '(?:\\\\+(' + src[t.BUILDIDENTIFIER] +\n '(?:\\\\.' + src[t.BUILDIDENTIFIER] + ')*))'\n\n// ## Full Version String\n// A main version, followed optionally by a pre-release version and\n// build metadata.\n\n// Note that the only major, minor, patch, and pre-release sections of\n// the version string are capturing groups. The build metadata is not a\n// capturing group, because it should not ever be used in version\n// comparison.\n\ntok('FULL')\ntok('FULLPLAIN')\nsrc[t.FULLPLAIN] = 'v?' + src[t.MAINVERSION] +\n src[t.PRERELEASE] + '?' +\n src[t.BUILD] + '?'\n\nsrc[t.FULL] = '^' + src[t.FULLPLAIN] + '$'\n\n// like full, but allows v1.2.3 and =1.2.3, which people do sometimes.\n// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty\n// common in the npm registry.\ntok('LOOSEPLAIN')\nsrc[t.LOOSEPLAIN] = '[v=\\\\s]*' + src[t.MAINVERSIONLOOSE] +\n src[t.PRERELEASELOOSE] + '?' +\n src[t.BUILD] + '?'\n\ntok('LOOSE')\nsrc[t.LOOSE] = '^' + src[t.LOOSEPLAIN] + '$'\n\ntok('GTLT')\nsrc[t.GTLT] = '((?:<|>)?=?)'\n\n// Something like \"2.*\" or \"1.2.x\".\n// Note that \"x.x\" is a valid xRange identifer, meaning \"any version\"\n// Only the first item is strictly required.\ntok('XRANGEIDENTIFIERLOOSE')\nsrc[t.XRANGEIDENTIFIERLOOSE] = src[t.NUMERICIDENTIFIERLOOSE] + '|x|X|\\\\*'\ntok('XRANGEIDENTIFIER')\nsrc[t.XRANGEIDENTIFIER] = src[t.NUMERICIDENTIFIER] + '|x|X|\\\\*'\n\ntok('XRANGEPLAIN')\nsrc[t.XRANGEPLAIN] = '[v=\\\\s]*(' + src[t.XRANGEIDENTIFIER] + ')' +\n '(?:\\\\.(' + src[t.XRANGEIDENTIFIER] + ')' +\n '(?:\\\\.(' + src[t.XRANGEIDENTIFIER] + ')' +\n '(?:' + src[t.PRERELEASE] + ')?' +\n src[t.BUILD] + '?' +\n ')?)?'\n\ntok('XRANGEPLAINLOOSE')\nsrc[t.XRANGEPLAINLOOSE] = '[v=\\\\s]*(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' +\n '(?:\\\\.(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' +\n '(?:\\\\.(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' +\n '(?:' + src[t.PRERELEASELOOSE] + ')?' +\n src[t.BUILD] + '?' +\n ')?)?'\n\ntok('XRANGE')\nsrc[t.XRANGE] = '^' + src[t.GTLT] + '\\\\s*' + src[t.XRANGEPLAIN] + '$'\ntok('XRANGELOOSE')\nsrc[t.XRANGELOOSE] = '^' + src[t.GTLT] + '\\\\s*' + src[t.XRANGEPLAINLOOSE] + '$'\n\n// Coercion.\n// Extract anything that could conceivably be a part of a valid semver\ntok('COERCE')\nsrc[t.COERCE] = '(^|[^\\\\d])' +\n '(\\\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '})' +\n '(?:\\\\.(\\\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' +\n '(?:\\\\.(\\\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' +\n '(?:$|[^\\\\d])'\ntok('COERCERTL')\nre[t.COERCERTL] = new RegExp(src[t.COERCE], 'g')\n\n// Tilde ranges.\n// Meaning is \"reasonably at or greater than\"\ntok('LONETILDE')\nsrc[t.LONETILDE] = '(?:~>?)'\n\ntok('TILDETRIM')\nsrc[t.TILDETRIM] = '(\\\\s*)' + src[t.LONETILDE] + '\\\\s+'\nre[t.TILDETRIM] = new RegExp(src[t.TILDETRIM], 'g')\nvar tildeTrimReplace = '$1~'\n\ntok('TILDE')\nsrc[t.TILDE] = '^' + src[t.LONETILDE] + src[t.XRANGEPLAIN] + '$'\ntok('TILDELOOSE')\nsrc[t.TILDELOOSE] = '^' + src[t.LONETILDE] + src[t.XRANGEPLAINLOOSE] + '$'\n\n// Caret ranges.\n// Meaning is \"at least and backwards compatible with\"\ntok('LONECARET')\nsrc[t.LONECARET] = '(?:\\\\^)'\n\ntok('CARETTRIM')\nsrc[t.CARETTRIM] = '(\\\\s*)' + src[t.LONECARET] + '\\\\s+'\nre[t.CARETTRIM] = new RegExp(src[t.CARETTRIM], 'g')\nvar caretTrimReplace = '$1^'\n\ntok('CARET')\nsrc[t.CARET] = '^' + src[t.LONECARET] + src[t.XRANGEPLAIN] + '$'\ntok('CARETLOOSE')\nsrc[t.CARETLOOSE] = '^' + src[t.LONECARET] + src[t.XRANGEPLAINLOOSE] + '$'\n\n// A simple gt/lt/eq thing, or just \"\" to indicate \"any version\"\ntok('COMPARATORLOOSE')\nsrc[t.COMPARATORLOOSE] = '^' + src[t.GTLT] + '\\\\s*(' + src[t.LOOSEPLAIN] + ')$|^$'\ntok('COMPARATOR')\nsrc[t.COMPARATOR] = '^' + src[t.GTLT] + '\\\\s*(' + src[t.FULLPLAIN] + ')$|^$'\n\n// An expression to strip any whitespace between the gtlt and the thing\n// it modifies, so that `> 1.2.3` ==> `>1.2.3`\ntok('COMPARATORTRIM')\nsrc[t.COMPARATORTRIM] = '(\\\\s*)' + src[t.GTLT] +\n '\\\\s*(' + src[t.LOOSEPLAIN] + '|' + src[t.XRANGEPLAIN] + ')'\n\n// this one has to use the /g flag\nre[t.COMPARATORTRIM] = new RegExp(src[t.COMPARATORTRIM], 'g')\nvar comparatorTrimReplace = '$1$2$3'\n\n// Something like `1.2.3 - 1.2.4`\n// Note that these all use the loose form, because they'll be\n// checked against either the strict or loose comparator form\n// later.\ntok('HYPHENRANGE')\nsrc[t.HYPHENRANGE] = '^\\\\s*(' + src[t.XRANGEPLAIN] + ')' +\n '\\\\s+-\\\\s+' +\n '(' + src[t.XRANGEPLAIN] + ')' +\n '\\\\s*$'\n\ntok('HYPHENRANGELOOSE')\nsrc[t.HYPHENRANGELOOSE] = '^\\\\s*(' + src[t.XRANGEPLAINLOOSE] + ')' +\n '\\\\s+-\\\\s+' +\n '(' + src[t.XRANGEPLAINLOOSE] + ')' +\n '\\\\s*$'\n\n// Star ranges basically just allow anything at all.\ntok('STAR')\nsrc[t.STAR] = '(<|>)?=?\\\\s*\\\\*'\n\n// Compile to actual regexp objects.\n// All are flag-free, unless they were created above with a flag.\nfor (var i = 0; i < R; i++) {\n debug(i, src[i])\n if (!re[i]) {\n re[i] = new RegExp(src[i])\n }\n}\n\nexports.parse = parse\nfunction parse (version, options) {\n if (!options || typeof options !== 'object') {\n options = {\n loose: !!options,\n includePrerelease: false\n }\n }\n\n if (version instanceof SemVer) {\n return version\n }\n\n if (typeof version !== 'string') {\n return null\n }\n\n if (version.length > MAX_LENGTH) {\n return null\n }\n\n var r = options.loose ? re[t.LOOSE] : re[t.FULL]\n if (!r.test(version)) {\n return null\n }\n\n try {\n return new SemVer(version, options)\n } catch (er) {\n return null\n }\n}\n\nexports.valid = valid\nfunction valid (version, options) {\n var v = parse(version, options)\n return v ? v.version : null\n}\n\nexports.clean = clean\nfunction clean (version, options) {\n var s = parse(version.trim().replace(/^[=v]+/, ''), options)\n return s ? s.version : null\n}\n\nexports.SemVer = SemVer\n\nfunction SemVer (version, options) {\n if (!options || typeof options !== 'object') {\n options = {\n loose: !!options,\n includePrerelease: false\n }\n }\n if (version instanceof SemVer) {\n if (version.loose === options.loose) {\n return version\n } else {\n version = version.version\n }\n } else if (typeof version !== 'string') {\n throw new TypeError('Invalid Version: ' + version)\n }\n\n if (version.length > MAX_LENGTH) {\n throw new TypeError('version is longer than ' + MAX_LENGTH + ' characters')\n }\n\n if (!(this instanceof SemVer)) {\n return new SemVer(version, options)\n }\n\n debug('SemVer', version, options)\n this.options = options\n this.loose = !!options.loose\n\n var m = version.trim().match(options.loose ? re[t.LOOSE] : re[t.FULL])\n\n if (!m) {\n throw new TypeError('Invalid Version: ' + version)\n }\n\n this.raw = version\n\n // these are actually numbers\n this.major = +m[1]\n this.minor = +m[2]\n this.patch = +m[3]\n\n if (this.major > MAX_SAFE_INTEGER || this.major < 0) {\n throw new TypeError('Invalid major version')\n }\n\n if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) {\n throw new TypeError('Invalid minor version')\n }\n\n if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) {\n throw new TypeError('Invalid patch version')\n }\n\n // numberify any prerelease numeric ids\n if (!m[4]) {\n this.prerelease = []\n } else {\n this.prerelease = m[4].split('.').map(function (id) {\n if (/^[0-9]+$/.test(id)) {\n var num = +id\n if (num >= 0 && num < MAX_SAFE_INTEGER) {\n return num\n }\n }\n return id\n })\n }\n\n this.build = m[5] ? m[5].split('.') : []\n this.format()\n}\n\nSemVer.prototype.format = function () {\n this.version = this.major + '.' + this.minor + '.' + this.patch\n if (this.prerelease.length) {\n this.version += '-' + this.prerelease.join('.')\n }\n return this.version\n}\n\nSemVer.prototype.toString = function () {\n return this.version\n}\n\nSemVer.prototype.compare = function (other) {\n debug('SemVer.compare', this.version, this.options, other)\n if (!(other instanceof SemVer)) {\n other = new SemVer(other, this.options)\n }\n\n return this.compareMain(other) || this.comparePre(other)\n}\n\nSemVer.prototype.compareMain = function (other) {\n if (!(other instanceof SemVer)) {\n other = new SemVer(other, this.options)\n }\n\n return compareIdentifiers(this.major, other.major) ||\n compareIdentifiers(this.minor, other.minor) ||\n compareIdentifiers(this.patch, other.patch)\n}\n\nSemVer.prototype.comparePre = function (other) {\n if (!(other instanceof SemVer)) {\n other = new SemVer(other, this.options)\n }\n\n // NOT having a prerelease is > having one\n if (this.prerelease.length && !other.prerelease.length) {\n return -1\n } else if (!this.prerelease.length && other.prerelease.length) {\n return 1\n } else if (!this.prerelease.length && !other.prerelease.length) {\n return 0\n }\n\n var i = 0\n do {\n var a = this.prerelease[i]\n var b = other.prerelease[i]\n debug('prerelease compare', i, a, b)\n if (a === undefined && b === undefined) {\n return 0\n } else if (b === undefined) {\n return 1\n } else if (a === undefined) {\n return -1\n } else if (a === b) {\n continue\n } else {\n return compareIdentifiers(a, b)\n }\n } while (++i)\n}\n\nSemVer.prototype.compareBuild = function (other) {\n if (!(other instanceof SemVer)) {\n other = new SemVer(other, this.options)\n }\n\n var i = 0\n do {\n var a = this.build[i]\n var b = other.build[i]\n debug('prerelease compare', i, a, b)\n if (a === undefined && b === undefined) {\n return 0\n } else if (b === undefined) {\n return 1\n } else if (a === undefined) {\n return -1\n } else if (a === b) {\n continue\n } else {\n return compareIdentifiers(a, b)\n }\n } while (++i)\n}\n\n// preminor will bump the version up to the next minor release, and immediately\n// down to pre-release. premajor and prepatch work the same way.\nSemVer.prototype.inc = function (release, identifier) {\n switch (release) {\n case 'premajor':\n this.prerelease.length = 0\n this.patch = 0\n this.minor = 0\n this.major++\n this.inc('pre', identifier)\n break\n case 'preminor':\n this.prerelease.length = 0\n this.patch = 0\n this.minor++\n this.inc('pre', identifier)\n break\n case 'prepatch':\n // If this is already a prerelease, it will bump to the next version\n // drop any prereleases that might already exist, since they are not\n // relevant at this point.\n this.prerelease.length = 0\n this.inc('patch', identifier)\n this.inc('pre', identifier)\n break\n // If the input is a non-prerelease version, this acts the same as\n // prepatch.\n case 'prerelease':\n if (this.prerelease.length === 0) {\n this.inc('patch', identifier)\n }\n this.inc('pre', identifier)\n break\n\n case 'major':\n // If this is a pre-major version, bump up to the same major version.\n // Otherwise increment major.\n // 1.0.0-5 bumps to 1.0.0\n // 1.1.0 bumps to 2.0.0\n if (this.minor !== 0 ||\n this.patch !== 0 ||\n this.prerelease.length === 0) {\n this.major++\n }\n this.minor = 0\n this.patch = 0\n this.prerelease = []\n break\n case 'minor':\n // If this is a pre-minor version, bump up to the same minor version.\n // Otherwise increment minor.\n // 1.2.0-5 bumps to 1.2.0\n // 1.2.1 bumps to 1.3.0\n if (this.patch !== 0 || this.prerelease.length === 0) {\n this.minor++\n }\n this.patch = 0\n this.prerelease = []\n break\n case 'patch':\n // If this is not a pre-release version, it will increment the patch.\n // If it is a pre-release it will bump up to the same patch version.\n // 1.2.0-5 patches to 1.2.0\n // 1.2.0 patches to 1.2.1\n if (this.prerelease.length === 0) {\n this.patch++\n }\n this.prerelease = []\n break\n // This probably shouldn't be used publicly.\n // 1.0.0 \"pre\" would become 1.0.0-0 which is the wrong direction.\n case 'pre':\n if (this.prerelease.length === 0) {\n this.prerelease = [0]\n } else {\n var i = this.prerelease.length\n while (--i >= 0) {\n if (typeof this.prerelease[i] === 'number') {\n this.prerelease[i]++\n i = -2\n }\n }\n if (i === -1) {\n // didn't increment anything\n this.prerelease.push(0)\n }\n }\n if (identifier) {\n // 1.2.0-beta.1 bumps to 1.2.0-beta.2,\n // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0\n if (this.prerelease[0] === identifier) {\n if (isNaN(this.prerelease[1])) {\n this.prerelease = [identifier, 0]\n }\n } else {\n this.prerelease = [identifier, 0]\n }\n }\n break\n\n default:\n throw new Error('invalid increment argument: ' + release)\n }\n this.format()\n this.raw = this.version\n return this\n}\n\nexports.inc = inc\nfunction inc (version, release, loose, identifier) {\n if (typeof (loose) === 'string') {\n identifier = loose\n loose = undefined\n }\n\n try {\n return new SemVer(version, loose).inc(release, identifier).version\n } catch (er) {\n return null\n }\n}\n\nexports.diff = diff\nfunction diff (version1, version2) {\n if (eq(version1, version2)) {\n return null\n } else {\n var v1 = parse(version1)\n var v2 = parse(version2)\n var prefix = ''\n if (v1.prerelease.length || v2.prerelease.length) {\n prefix = 'pre'\n var defaultResult = 'prerelease'\n }\n for (var key in v1) {\n if (key === 'major' || key === 'minor' || key === 'patch') {\n if (v1[key] !== v2[key]) {\n return prefix + key\n }\n }\n }\n return defaultResult // may be undefined\n }\n}\n\nexports.compareIdentifiers = compareIdentifiers\n\nvar numeric = /^[0-9]+$/\nfunction compareIdentifiers (a, b) {\n var anum = numeric.test(a)\n var bnum = numeric.test(b)\n\n if (anum && bnum) {\n a = +a\n b = +b\n }\n\n return a === b ? 0\n : (anum && !bnum) ? -1\n : (bnum && !anum) ? 1\n : a < b ? -1\n : 1\n}\n\nexports.rcompareIdentifiers = rcompareIdentifiers\nfunction rcompareIdentifiers (a, b) {\n return compareIdentifiers(b, a)\n}\n\nexports.major = major\nfunction major (a, loose) {\n return new SemVer(a, loose).major\n}\n\nexports.minor = minor\nfunction minor (a, loose) {\n return new SemVer(a, loose).minor\n}\n\nexports.patch = patch\nfunction patch (a, loose) {\n return new SemVer(a, loose).patch\n}\n\nexports.compare = compare\nfunction compare (a, b, loose) {\n return new SemVer(a, loose).compare(new SemVer(b, loose))\n}\n\nexports.compareLoose = compareLoose\nfunction compareLoose (a, b) {\n return compare(a, b, true)\n}\n\nexports.compareBuild = compareBuild\nfunction compareBuild (a, b, loose) {\n var versionA = new SemVer(a, loose)\n var versionB = new SemVer(b, loose)\n return versionA.compare(versionB) || versionA.compareBuild(versionB)\n}\n\nexports.rcompare = rcompare\nfunction rcompare (a, b, loose) {\n return compare(b, a, loose)\n}\n\nexports.sort = sort\nfunction sort (list, loose) {\n return list.sort(function (a, b) {\n return exports.compareBuild(a, b, loose)\n })\n}\n\nexports.rsort = rsort\nfunction rsort (list, loose) {\n return list.sort(function (a, b) {\n return exports.compareBuild(b, a, loose)\n })\n}\n\nexports.gt = gt\nfunction gt (a, b, loose) {\n return compare(a, b, loose) > 0\n}\n\nexports.lt = lt\nfunction lt (a, b, loose) {\n return compare(a, b, loose) < 0\n}\n\nexports.eq = eq\nfunction eq (a, b, loose) {\n return compare(a, b, loose) === 0\n}\n\nexports.neq = neq\nfunction neq (a, b, loose) {\n return compare(a, b, loose) !== 0\n}\n\nexports.gte = gte\nfunction gte (a, b, loose) {\n return compare(a, b, loose) >= 0\n}\n\nexports.lte = lte\nfunction lte (a, b, loose) {\n return compare(a, b, loose) <= 0\n}\n\nexports.cmp = cmp\nfunction cmp (a, op, b, loose) {\n switch (op) {\n case '===':\n if (typeof a === 'object')\n a = a.version\n if (typeof b === 'object')\n b = b.version\n return a === b\n\n case '!==':\n if (typeof a === 'object')\n a = a.version\n if (typeof b === 'object')\n b = b.version\n return a !== b\n\n case '':\n case '=':\n case '==':\n return eq(a, b, loose)\n\n case '!=':\n return neq(a, b, loose)\n\n case '>':\n return gt(a, b, loose)\n\n case '>=':\n return gte(a, b, loose)\n\n case '<':\n return lt(a, b, loose)\n\n case '<=':\n return lte(a, b, loose)\n\n default:\n throw new TypeError('Invalid operator: ' + op)\n }\n}\n\nexports.Comparator = Comparator\nfunction Comparator (comp, options) {\n if (!options || typeof options !== 'object') {\n options = {\n loose: !!options,\n includePrerelease: false\n }\n }\n\n if (comp instanceof Comparator) {\n if (comp.loose === !!options.loose) {\n return comp\n } else {\n comp = comp.value\n }\n }\n\n if (!(this instanceof Comparator)) {\n return new Comparator(comp, options)\n }\n\n debug('comparator', comp, options)\n this.options = options\n this.loose = !!options.loose\n this.parse(comp)\n\n if (this.semver === ANY) {\n this.value = ''\n } else {\n this.value = this.operator + this.semver.version\n }\n\n debug('comp', this)\n}\n\nvar ANY = {}\nComparator.prototype.parse = function (comp) {\n var r = this.options.loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR]\n var m = comp.match(r)\n\n if (!m) {\n throw new TypeError('Invalid comparator: ' + comp)\n }\n\n this.operator = m[1] !== undefined ? m[1] : ''\n if (this.operator === '=') {\n this.operator = ''\n }\n\n // if it literally is just '>' or '' then allow anything.\n if (!m[2]) {\n this.semver = ANY\n } else {\n this.semver = new SemVer(m[2], this.options.loose)\n }\n}\n\nComparator.prototype.toString = function () {\n return this.value\n}\n\nComparator.prototype.test = function (version) {\n debug('Comparator.test', version, this.options.loose)\n\n if (this.semver === ANY || version === ANY) {\n return true\n }\n\n if (typeof version === 'string') {\n try {\n version = new SemVer(version, this.options)\n } catch (er) {\n return false\n }\n }\n\n return cmp(version, this.operator, this.semver, this.options)\n}\n\nComparator.prototype.intersects = function (comp, options) {\n if (!(comp instanceof Comparator)) {\n throw new TypeError('a Comparator is required')\n }\n\n if (!options || typeof options !== 'object') {\n options = {\n loose: !!options,\n includePrerelease: false\n }\n }\n\n var rangeTmp\n\n if (this.operator === '') {\n if (this.value === '') {\n return true\n }\n rangeTmp = new Range(comp.value, options)\n return satisfies(this.value, rangeTmp, options)\n } else if (comp.operator === '') {\n if (comp.value === '') {\n return true\n }\n rangeTmp = new Range(this.value, options)\n return satisfies(comp.semver, rangeTmp, options)\n }\n\n var sameDirectionIncreasing =\n (this.operator === '>=' || this.operator === '>') &&\n (comp.operator === '>=' || comp.operator === '>')\n var sameDirectionDecreasing =\n (this.operator === '<=' || this.operator === '<') &&\n (comp.operator === '<=' || comp.operator === '<')\n var sameSemVer = this.semver.version === comp.semver.version\n var differentDirectionsInclusive =\n (this.operator === '>=' || this.operator === '<=') &&\n (comp.operator === '>=' || comp.operator === '<=')\n var oppositeDirectionsLessThan =\n cmp(this.semver, '<', comp.semver, options) &&\n ((this.operator === '>=' || this.operator === '>') &&\n (comp.operator === '<=' || comp.operator === '<'))\n var oppositeDirectionsGreaterThan =\n cmp(this.semver, '>', comp.semver, options) &&\n ((this.operator === '<=' || this.operator === '<') &&\n (comp.operator === '>=' || comp.operator === '>'))\n\n return sameDirectionIncreasing || sameDirectionDecreasing ||\n (sameSemVer && differentDirectionsInclusive) ||\n oppositeDirectionsLessThan || oppositeDirectionsGreaterThan\n}\n\nexports.Range = Range\nfunction Range (range, options) {\n if (!options || typeof options !== 'object') {\n options = {\n loose: !!options,\n includePrerelease: false\n }\n }\n\n if (range instanceof Range) {\n if (range.loose === !!options.loose &&\n range.includePrerelease === !!options.includePrerelease) {\n return range\n } else {\n return new Range(range.raw, options)\n }\n }\n\n if (range instanceof Comparator) {\n return new Range(range.value, options)\n }\n\n if (!(this instanceof Range)) {\n return new Range(range, options)\n }\n\n this.options = options\n this.loose = !!options.loose\n this.includePrerelease = !!options.includePrerelease\n\n // First, split based on boolean or ||\n this.raw = range\n this.set = range.split(/\\s*\\|\\|\\s*/).map(function (range) {\n return this.parseRange(range.trim())\n }, this).filter(function (c) {\n // throw out any that are not relevant for whatever reason\n return c.length\n })\n\n if (!this.set.length) {\n throw new TypeError('Invalid SemVer Range: ' + range)\n }\n\n this.format()\n}\n\nRange.prototype.format = function () {\n this.range = this.set.map(function (comps) {\n return comps.join(' ').trim()\n }).join('||').trim()\n return this.range\n}\n\nRange.prototype.toString = function () {\n return this.range\n}\n\nRange.prototype.parseRange = function (range) {\n var loose = this.options.loose\n range = range.trim()\n // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4`\n var hr = loose ? re[t.HYPHENRANGELOOSE] : re[t.HYPHENRANGE]\n range = range.replace(hr, hyphenReplace)\n debug('hyphen replace', range)\n // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5`\n range = range.replace(re[t.COMPARATORTRIM], comparatorTrimReplace)\n debug('comparator trim', range, re[t.COMPARATORTRIM])\n\n // `~ 1.2.3` => `~1.2.3`\n range = range.replace(re[t.TILDETRIM], tildeTrimReplace)\n\n // `^ 1.2.3` => `^1.2.3`\n range = range.replace(re[t.CARETTRIM], caretTrimReplace)\n\n // normalize spaces\n range = range.split(/\\s+/).join(' ')\n\n // At this point, the range is completely trimmed and\n // ready to be split into comparators.\n\n var compRe = loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR]\n var set = range.split(' ').map(function (comp) {\n return parseComparator(comp, this.options)\n }, this).join(' ').split(/\\s+/)\n if (this.options.loose) {\n // in loose mode, throw out any that are not valid comparators\n set = set.filter(function (comp) {\n return !!comp.match(compRe)\n })\n }\n set = set.map(function (comp) {\n return new Comparator(comp, this.options)\n }, this)\n\n return set\n}\n\nRange.prototype.intersects = function (range, options) {\n if (!(range instanceof Range)) {\n throw new TypeError('a Range is required')\n }\n\n return this.set.some(function (thisComparators) {\n return (\n isSatisfiable(thisComparators, options) &&\n range.set.some(function (rangeComparators) {\n return (\n isSatisfiable(rangeComparators, options) &&\n thisComparators.every(function (thisComparator) {\n return rangeComparators.every(function (rangeComparator) {\n return thisComparator.intersects(rangeComparator, options)\n })\n })\n )\n })\n )\n })\n}\n\n// take a set of comparators and determine whether there\n// exists a version which can satisfy it\nfunction isSatisfiable (comparators, options) {\n var result = true\n var remainingComparators = comparators.slice()\n var testComparator = remainingComparators.pop()\n\n while (result && remainingComparators.length) {\n result = remainingComparators.every(function (otherComparator) {\n return testComparator.intersects(otherComparator, options)\n })\n\n testComparator = remainingComparators.pop()\n }\n\n return result\n}\n\n// Mostly just for testing and legacy API reasons\nexports.toComparators = toComparators\nfunction toComparators (range, options) {\n return new Range(range, options).set.map(function (comp) {\n return comp.map(function (c) {\n return c.value\n }).join(' ').trim().split(' ')\n })\n}\n\n// comprised of xranges, tildes, stars, and gtlt's at this point.\n// already replaced the hyphen ranges\n// turn into a set of JUST comparators.\nfunction parseComparator (comp, options) {\n debug('comp', comp, options)\n comp = replaceCarets(comp, options)\n debug('caret', comp)\n comp = replaceTildes(comp, options)\n debug('tildes', comp)\n comp = replaceXRanges(comp, options)\n debug('xrange', comp)\n comp = replaceStars(comp, options)\n debug('stars', comp)\n return comp\n}\n\nfunction isX (id) {\n return !id || id.toLowerCase() === 'x' || id === '*'\n}\n\n// ~, ~> --> * (any, kinda silly)\n// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0\n// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0\n// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0\n// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0\n// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0\nfunction replaceTildes (comp, options) {\n return comp.trim().split(/\\s+/).map(function (comp) {\n return replaceTilde(comp, options)\n }).join(' ')\n}\n\nfunction replaceTilde (comp, options) {\n var r = options.loose ? re[t.TILDELOOSE] : re[t.TILDE]\n return comp.replace(r, function (_, M, m, p, pr) {\n debug('tilde', comp, _, M, m, p, pr)\n var ret\n\n if (isX(M)) {\n ret = ''\n } else if (isX(m)) {\n ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0'\n } else if (isX(p)) {\n // ~1.2 == >=1.2.0 <1.3.0\n ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0'\n } else if (pr) {\n debug('replaceTilde pr', pr)\n ret = '>=' + M + '.' + m + '.' + p + '-' + pr +\n ' <' + M + '.' + (+m + 1) + '.0'\n } else {\n // ~1.2.3 == >=1.2.3 <1.3.0\n ret = '>=' + M + '.' + m + '.' + p +\n ' <' + M + '.' + (+m + 1) + '.0'\n }\n\n debug('tilde return', ret)\n return ret\n })\n}\n\n// ^ --> * (any, kinda silly)\n// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0\n// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0\n// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0\n// ^1.2.3 --> >=1.2.3 <2.0.0\n// ^1.2.0 --> >=1.2.0 <2.0.0\nfunction replaceCarets (comp, options) {\n return comp.trim().split(/\\s+/).map(function (comp) {\n return replaceCaret(comp, options)\n }).join(' ')\n}\n\nfunction replaceCaret (comp, options) {\n debug('caret', comp, options)\n var r = options.loose ? re[t.CARETLOOSE] : re[t.CARET]\n return comp.replace(r, function (_, M, m, p, pr) {\n debug('caret', comp, _, M, m, p, pr)\n var ret\n\n if (isX(M)) {\n ret = ''\n } else if (isX(m)) {\n ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0'\n } else if (isX(p)) {\n if (M === '0') {\n ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0'\n } else {\n ret = '>=' + M + '.' + m + '.0 <' + (+M + 1) + '.0.0'\n }\n } else if (pr) {\n debug('replaceCaret pr', pr)\n if (M === '0') {\n if (m === '0') {\n ret = '>=' + M + '.' + m + '.' + p + '-' + pr +\n ' <' + M + '.' + m + '.' + (+p + 1)\n } else {\n ret = '>=' + M + '.' + m + '.' + p + '-' + pr +\n ' <' + M + '.' + (+m + 1) + '.0'\n }\n } else {\n ret = '>=' + M + '.' + m + '.' + p + '-' + pr +\n ' <' + (+M + 1) + '.0.0'\n }\n } else {\n debug('no pr')\n if (M === '0') {\n if (m === '0') {\n ret = '>=' + M + '.' + m + '.' + p +\n ' <' + M + '.' + m + '.' + (+p + 1)\n } else {\n ret = '>=' + M + '.' + m + '.' + p +\n ' <' + M + '.' + (+m + 1) + '.0'\n }\n } else {\n ret = '>=' + M + '.' + m + '.' + p +\n ' <' + (+M + 1) + '.0.0'\n }\n }\n\n debug('caret return', ret)\n return ret\n })\n}\n\nfunction replaceXRanges (comp, options) {\n debug('replaceXRanges', comp, options)\n return comp.split(/\\s+/).map(function (comp) {\n return replaceXRange(comp, options)\n }).join(' ')\n}\n\nfunction replaceXRange (comp, options) {\n comp = comp.trim()\n var r = options.loose ? re[t.XRANGELOOSE] : re[t.XRANGE]\n return comp.replace(r, function (ret, gtlt, M, m, p, pr) {\n debug('xRange', comp, ret, gtlt, M, m, p, pr)\n var xM = isX(M)\n var xm = xM || isX(m)\n var xp = xm || isX(p)\n var anyX = xp\n\n if (gtlt === '=' && anyX) {\n gtlt = ''\n }\n\n // if we're including prereleases in the match, then we need\n // to fix this to -0, the lowest possible prerelease value\n pr = options.includePrerelease ? '-0' : ''\n\n if (xM) {\n if (gtlt === '>' || gtlt === '<') {\n // nothing is allowed\n ret = '<0.0.0-0'\n } else {\n // nothing is forbidden\n ret = '*'\n }\n } else if (gtlt && anyX) {\n // we know patch is an x, because we have any x at all.\n // replace X with 0\n if (xm) {\n m = 0\n }\n p = 0\n\n if (gtlt === '>') {\n // >1 => >=2.0.0\n // >1.2 => >=1.3.0\n // >1.2.3 => >= 1.2.4\n gtlt = '>='\n if (xm) {\n M = +M + 1\n m = 0\n p = 0\n } else {\n m = +m + 1\n p = 0\n }\n } else if (gtlt === '<=') {\n // <=0.7.x is actually <0.8.0, since any 0.7.x should\n // pass. Similarly, <=7.x is actually <8.0.0, etc.\n gtlt = '<'\n if (xm) {\n M = +M + 1\n } else {\n m = +m + 1\n }\n }\n\n ret = gtlt + M + '.' + m + '.' + p + pr\n } else if (xm) {\n ret = '>=' + M + '.0.0' + pr + ' <' + (+M + 1) + '.0.0' + pr\n } else if (xp) {\n ret = '>=' + M + '.' + m + '.0' + pr +\n ' <' + M + '.' + (+m + 1) + '.0' + pr\n }\n\n debug('xRange return', ret)\n\n return ret\n })\n}\n\n// Because * is AND-ed with everything else in the comparator,\n// and '' means \"any version\", just remove the *s entirely.\nfunction replaceStars (comp, options) {\n debug('replaceStars', comp, options)\n // Looseness is ignored here. star is always as loose as it gets!\n return comp.trim().replace(re[t.STAR], '')\n}\n\n// This function is passed to string.replace(re[t.HYPHENRANGE])\n// M, m, patch, prerelease, build\n// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5\n// 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do\n// 1.2 - 3.4 => >=1.2.0 <3.5.0\nfunction hyphenReplace ($0,\n from, fM, fm, fp, fpr, fb,\n to, tM, tm, tp, tpr, tb) {\n if (isX(fM)) {\n from = ''\n } else if (isX(fm)) {\n from = '>=' + fM + '.0.0'\n } else if (isX(fp)) {\n from = '>=' + fM + '.' + fm + '.0'\n } else {\n from = '>=' + from\n }\n\n if (isX(tM)) {\n to = ''\n } else if (isX(tm)) {\n to = '<' + (+tM + 1) + '.0.0'\n } else if (isX(tp)) {\n to = '<' + tM + '.' + (+tm + 1) + '.0'\n } else if (tpr) {\n to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr\n } else {\n to = '<=' + to\n }\n\n return (from + ' ' + to).trim()\n}\n\n// if ANY of the sets match ALL of its comparators, then pass\nRange.prototype.test = function (version) {\n if (!version) {\n return false\n }\n\n if (typeof version === 'string') {\n try {\n version = new SemVer(version, this.options)\n } catch (er) {\n return false\n }\n }\n\n for (var i = 0; i < this.set.length; i++) {\n if (testSet(this.set[i], version, this.options)) {\n return true\n }\n }\n return false\n}\n\nfunction testSet (set, version, options) {\n for (var i = 0; i < set.length; i++) {\n if (!set[i].test(version)) {\n return false\n }\n }\n\n if (version.prerelease.length && !options.includePrerelease) {\n // Find the set of versions that are allowed to have prereleases\n // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0\n // That should allow `1.2.3-pr.2` to pass.\n // However, `1.2.4-alpha.notready` should NOT be allowed,\n // even though it's within the range set by the comparators.\n for (i = 0; i < set.length; i++) {\n debug(set[i].semver)\n if (set[i].semver === ANY) {\n continue\n }\n\n if (set[i].semver.prerelease.length > 0) {\n var allowed = set[i].semver\n if (allowed.major === version.major &&\n allowed.minor === version.minor &&\n allowed.patch === version.patch) {\n return true\n }\n }\n }\n\n // Version has a -pre, but it's not one of the ones we like.\n return false\n }\n\n return true\n}\n\nexports.satisfies = satisfies\nfunction satisfies (version, range, options) {\n try {\n range = new Range(range, options)\n } catch (er) {\n return false\n }\n return range.test(version)\n}\n\nexports.maxSatisfying = maxSatisfying\nfunction maxSatisfying (versions, range, options) {\n var max = null\n var maxSV = null\n try {\n var rangeObj = new Range(range, options)\n } catch (er) {\n return null\n }\n versions.forEach(function (v) {\n if (rangeObj.test(v)) {\n // satisfies(v, range, options)\n if (!max || maxSV.compare(v) === -1) {\n // compare(max, v, true)\n max = v\n maxSV = new SemVer(max, options)\n }\n }\n })\n return max\n}\n\nexports.minSatisfying = minSatisfying\nfunction minSatisfying (versions, range, options) {\n var min = null\n var minSV = null\n try {\n var rangeObj = new Range(range, options)\n } catch (er) {\n return null\n }\n versions.forEach(function (v) {\n if (rangeObj.test(v)) {\n // satisfies(v, range, options)\n if (!min || minSV.compare(v) === 1) {\n // compare(min, v, true)\n min = v\n minSV = new SemVer(min, options)\n }\n }\n })\n return min\n}\n\nexports.minVersion = minVersion\nfunction minVersion (range, loose) {\n range = new Range(range, loose)\n\n var minver = new SemVer('0.0.0')\n if (range.test(minver)) {\n return minver\n }\n\n minver = new SemVer('0.0.0-0')\n if (range.test(minver)) {\n return minver\n }\n\n minver = null\n for (var i = 0; i < range.set.length; ++i) {\n var comparators = range.set[i]\n\n comparators.forEach(function (comparator) {\n // Clone to avoid manipulating the comparator's semver object.\n var compver = new SemVer(comparator.semver.version)\n switch (comparator.operator) {\n case '>':\n if (compver.prerelease.length === 0) {\n compver.patch++\n } else {\n compver.prerelease.push(0)\n }\n compver.raw = compver.format()\n /* fallthrough */\n case '':\n case '>=':\n if (!minver || gt(minver, compver)) {\n minver = compver\n }\n break\n case '<':\n case '<=':\n /* Ignore maximum versions */\n break\n /* istanbul ignore next */\n default:\n throw new Error('Unexpected operation: ' + comparator.operator)\n }\n })\n }\n\n if (minver && range.test(minver)) {\n return minver\n }\n\n return null\n}\n\nexports.validRange = validRange\nfunction validRange (range, options) {\n try {\n // Return '*' instead of '' so that truthiness works.\n // This will throw if it's invalid anyway\n return new Range(range, options).range || '*'\n } catch (er) {\n return null\n }\n}\n\n// Determine if version is less than all the versions possible in the range\nexports.ltr = ltr\nfunction ltr (version, range, options) {\n return outside(version, range, '<', options)\n}\n\n// Determine if version is greater than all the versions possible in the range.\nexports.gtr = gtr\nfunction gtr (version, range, options) {\n return outside(version, range, '>', options)\n}\n\nexports.outside = outside\nfunction outside (version, range, hilo, options) {\n version = new SemVer(version, options)\n range = new Range(range, options)\n\n var gtfn, ltefn, ltfn, comp, ecomp\n switch (hilo) {\n case '>':\n gtfn = gt\n ltefn = lte\n ltfn = lt\n comp = '>'\n ecomp = '>='\n break\n case '<':\n gtfn = lt\n ltefn = gte\n ltfn = gt\n comp = '<'\n ecomp = '<='\n break\n default:\n throw new TypeError('Must provide a hilo val of \"<\" or \">\"')\n }\n\n // If it satisifes the range it is not outside\n if (satisfies(version, range, options)) {\n return false\n }\n\n // From now on, variable terms are as if we're in \"gtr\" mode.\n // but note that everything is flipped for the \"ltr\" function.\n\n for (var i = 0; i < range.set.length; ++i) {\n var comparators = range.set[i]\n\n var high = null\n var low = null\n\n comparators.forEach(function (comparator) {\n if (comparator.semver === ANY) {\n comparator = new Comparator('>=0.0.0')\n }\n high = high || comparator\n low = low || comparator\n if (gtfn(comparator.semver, high.semver, options)) {\n high = comparator\n } else if (ltfn(comparator.semver, low.semver, options)) {\n low = comparator\n }\n })\n\n // If the edge version comparator has a operator then our version\n // isn't outside it\n if (high.operator === comp || high.operator === ecomp) {\n return false\n }\n\n // If the lowest version comparator has an operator and our version\n // is less than it then it isn't higher than the range\n if ((!low.operator || low.operator === comp) &&\n ltefn(version, low.semver)) {\n return false\n } else if (low.operator === ecomp && ltfn(version, low.semver)) {\n return false\n }\n }\n return true\n}\n\nexports.prerelease = prerelease\nfunction prerelease (version, options) {\n var parsed = parse(version, options)\n return (parsed && parsed.prerelease.length) ? parsed.prerelease : null\n}\n\nexports.intersects = intersects\nfunction intersects (r1, r2, options) {\n r1 = new Range(r1, options)\n r2 = new Range(r2, options)\n return r1.intersects(r2)\n}\n\nexports.coerce = coerce\nfunction coerce (version, options) {\n if (version instanceof SemVer) {\n return version\n }\n\n if (typeof version === 'number') {\n version = String(version)\n }\n\n if (typeof version !== 'string') {\n return null\n }\n\n options = options || {}\n\n var match = null\n if (!options.rtl) {\n match = version.match(re[t.COERCE])\n } else {\n // Find the right-most coercible string that does not share\n // a terminus with a more left-ward coercible string.\n // Eg, '1.2.3.4' wants to coerce '2.3.4', not '3.4' or '4'\n //\n // Walk through the string checking with a /g regexp\n // Manually set the index so as to pick up overlapping matches.\n // Stop when we get a match that ends at the string end, since no\n // coercible string can be more right-ward without the same terminus.\n var next\n while ((next = re[t.COERCERTL].exec(version)) &&\n (!match || match.index + match[0].length !== version.length)\n ) {\n if (!match ||\n next.index + next[0].length !== match.index + match[0].length) {\n match = next\n }\n re[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length\n }\n // leave it in a clean state\n re[t.COERCERTL].lastIndex = -1\n }\n\n if (match === null) {\n return null\n }\n\n return parse(match[2] +\n '.' + (match[3] || '0') +\n '.' + (match[4] || '0'), options)\n}\n","module.exports = require('./lib/tunnel');\n","'use strict';\n\nvar net = require('net');\nvar tls = require('tls');\nvar http = require('http');\nvar https = require('https');\nvar events = require('events');\nvar assert = require('assert');\nvar util = require('util');\n\n\nexports.httpOverHttp = httpOverHttp;\nexports.httpsOverHttp = httpsOverHttp;\nexports.httpOverHttps = httpOverHttps;\nexports.httpsOverHttps = httpsOverHttps;\n\n\nfunction httpOverHttp(options) {\n var agent = new TunnelingAgent(options);\n agent.request = http.request;\n return agent;\n}\n\nfunction httpsOverHttp(options) {\n var agent = new TunnelingAgent(options);\n agent.request = http.request;\n agent.createSocket = createSecureSocket;\n agent.defaultPort = 443;\n return agent;\n}\n\nfunction httpOverHttps(options) {\n var agent = new TunnelingAgent(options);\n agent.request = https.request;\n return agent;\n}\n\nfunction httpsOverHttps(options) {\n var agent = new TunnelingAgent(options);\n agent.request = https.request;\n agent.createSocket = createSecureSocket;\n agent.defaultPort = 443;\n return agent;\n}\n\n\nfunction TunnelingAgent(options) {\n var self = this;\n self.options = options || {};\n self.proxyOptions = self.options.proxy || {};\n self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets;\n self.requests = [];\n self.sockets = [];\n\n self.on('free', function onFree(socket, host, port, localAddress) {\n var options = toOptions(host, port, localAddress);\n for (var i = 0, len = self.requests.length; i < len; ++i) {\n var pending = self.requests[i];\n if (pending.host === options.host && pending.port === options.port) {\n // Detect the request to connect same origin server,\n // reuse the connection.\n self.requests.splice(i, 1);\n pending.request.onSocket(socket);\n return;\n }\n }\n socket.destroy();\n self.removeSocket(socket);\n });\n}\nutil.inherits(TunnelingAgent, events.EventEmitter);\n\nTunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) {\n var self = this;\n var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress));\n\n if (self.sockets.length >= this.maxSockets) {\n // We are over limit so we'll add it to the queue.\n self.requests.push(options);\n return;\n }\n\n // If we are under maxSockets create a new one.\n self.createSocket(options, function(socket) {\n socket.on('free', onFree);\n socket.on('close', onCloseOrRemove);\n socket.on('agentRemove', onCloseOrRemove);\n req.onSocket(socket);\n\n function onFree() {\n self.emit('free', socket, options);\n }\n\n function onCloseOrRemove(err) {\n self.removeSocket(socket);\n socket.removeListener('free', onFree);\n socket.removeListener('close', onCloseOrRemove);\n socket.removeListener('agentRemove', onCloseOrRemove);\n }\n });\n};\n\nTunnelingAgent.prototype.createSocket = function createSocket(options, cb) {\n var self = this;\n var placeholder = {};\n self.sockets.push(placeholder);\n\n var connectOptions = mergeOptions({}, self.proxyOptions, {\n method: 'CONNECT',\n path: options.host + ':' + options.port,\n agent: false,\n headers: {\n host: options.host + ':' + options.port\n }\n });\n if (options.localAddress) {\n connectOptions.localAddress = options.localAddress;\n }\n if (connectOptions.proxyAuth) {\n connectOptions.headers = connectOptions.headers || {};\n connectOptions.headers['Proxy-Authorization'] = 'Basic ' +\n new Buffer(connectOptions.proxyAuth).toString('base64');\n }\n\n debug('making CONNECT request');\n var connectReq = self.request(connectOptions);\n connectReq.useChunkedEncodingByDefault = false; // for v0.6\n connectReq.once('response', onResponse); // for v0.6\n connectReq.once('upgrade', onUpgrade); // for v0.6\n connectReq.once('connect', onConnect); // for v0.7 or later\n connectReq.once('error', onError);\n connectReq.end();\n\n function onResponse(res) {\n // Very hacky. This is necessary to avoid http-parser leaks.\n res.upgrade = true;\n }\n\n function onUpgrade(res, socket, head) {\n // Hacky.\n process.nextTick(function() {\n onConnect(res, socket, head);\n });\n }\n\n function onConnect(res, socket, head) {\n connectReq.removeAllListeners();\n socket.removeAllListeners();\n\n if (res.statusCode !== 200) {\n debug('tunneling socket could not be established, statusCode=%d',\n res.statusCode);\n socket.destroy();\n var error = new Error('tunneling socket could not be established, ' +\n 'statusCode=' + res.statusCode);\n error.code = 'ECONNRESET';\n options.request.emit('error', error);\n self.removeSocket(placeholder);\n return;\n }\n if (head.length > 0) {\n debug('got illegal response body from proxy');\n socket.destroy();\n var error = new Error('got illegal response body from proxy');\n error.code = 'ECONNRESET';\n options.request.emit('error', error);\n self.removeSocket(placeholder);\n return;\n }\n debug('tunneling connection has established');\n self.sockets[self.sockets.indexOf(placeholder)] = socket;\n return cb(socket);\n }\n\n function onError(cause) {\n connectReq.removeAllListeners();\n\n debug('tunneling socket could not be established, cause=%s\\n',\n cause.message, cause.stack);\n var error = new Error('tunneling socket could not be established, ' +\n 'cause=' + cause.message);\n error.code = 'ECONNRESET';\n options.request.emit('error', error);\n self.removeSocket(placeholder);\n }\n};\n\nTunnelingAgent.prototype.removeSocket = function removeSocket(socket) {\n var pos = this.sockets.indexOf(socket)\n if (pos === -1) {\n return;\n }\n this.sockets.splice(pos, 1);\n\n var pending = this.requests.shift();\n if (pending) {\n // If we have pending requests and a socket gets closed a new one\n // needs to be created to take over in the pool for the one that closed.\n this.createSocket(pending, function(socket) {\n pending.request.onSocket(socket);\n });\n }\n};\n\nfunction createSecureSocket(options, cb) {\n var self = this;\n TunnelingAgent.prototype.createSocket.call(self, options, function(socket) {\n var hostHeader = options.request.getHeader('host');\n var tlsOptions = mergeOptions({}, self.options, {\n socket: socket,\n servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host\n });\n\n // 0 is dummy port for v0.6\n var secureSocket = tls.connect(0, tlsOptions);\n self.sockets[self.sockets.indexOf(socket)] = secureSocket;\n cb(secureSocket);\n });\n}\n\n\nfunction toOptions(host, port, localAddress) {\n if (typeof host === 'string') { // since v0.10\n return {\n host: host,\n port: port,\n localAddress: localAddress\n };\n }\n return host; // for v0.11 or later\n}\n\nfunction mergeOptions(target) {\n for (var i = 1, len = arguments.length; i < len; ++i) {\n var overrides = arguments[i];\n if (typeof overrides === 'object') {\n var keys = Object.keys(overrides);\n for (var j = 0, keyLen = keys.length; j < keyLen; ++j) {\n var k = keys[j];\n if (overrides[k] !== undefined) {\n target[k] = overrides[k];\n }\n }\n }\n }\n return target;\n}\n\n\nvar debug;\nif (process.env.NODE_DEBUG && /\\btunnel\\b/.test(process.env.NODE_DEBUG)) {\n debug = function() {\n var args = Array.prototype.slice.call(arguments);\n if (typeof args[0] === 'string') {\n args[0] = 'TUNNEL: ' + args[0];\n } else {\n args.unshift('TUNNEL:');\n }\n console.error.apply(console, args);\n }\n} else {\n debug = function() {};\n}\nexports.debug = debug; // for test\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction getUserAgent() {\n if (typeof navigator === \"object\" && \"userAgent\" in navigator) {\n return navigator.userAgent;\n }\n\n if (typeof process === \"object\" && \"version\" in process) {\n return `Node.js/${process.version.substr(1)} (${process.platform}; ${process.arch})`;\n }\n\n return \"\";\n}\n\nexports.getUserAgent = getUserAgent;\n//# sourceMappingURL=index.js.map\n","/**\n * web-streams-polyfill v3.2.1\n */\n(function (global, factory) {\n typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) :\n typeof define === 'function' && define.amd ? define(['exports'], factory) :\n (global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.WebStreamsPolyfill = {}));\n}(this, (function (exports) { 'use strict';\n\n /// \n const SymbolPolyfill = typeof Symbol === 'function' && typeof Symbol.iterator === 'symbol' ?\n Symbol :\n description => `Symbol(${description})`;\n\n /// \n function noop() {\n return undefined;\n }\n function getGlobals() {\n if (typeof self !== 'undefined') {\n return self;\n }\n else if (typeof window !== 'undefined') {\n return window;\n }\n else if (typeof global !== 'undefined') {\n return global;\n }\n return undefined;\n }\n const globals = getGlobals();\n\n function typeIsObject(x) {\n return (typeof x === 'object' && x !== null) || typeof x === 'function';\n }\n const rethrowAssertionErrorRejection = noop;\n\n const originalPromise = Promise;\n const originalPromiseThen = Promise.prototype.then;\n const originalPromiseResolve = Promise.resolve.bind(originalPromise);\n const originalPromiseReject = Promise.reject.bind(originalPromise);\n function newPromise(executor) {\n return new originalPromise(executor);\n }\n function promiseResolvedWith(value) {\n return originalPromiseResolve(value);\n }\n function promiseRejectedWith(reason) {\n return originalPromiseReject(reason);\n }\n function PerformPromiseThen(promise, onFulfilled, onRejected) {\n // There doesn't appear to be any way to correctly emulate the behaviour from JavaScript, so this is just an\n // approximation.\n return originalPromiseThen.call(promise, onFulfilled, onRejected);\n }\n function uponPromise(promise, onFulfilled, onRejected) {\n PerformPromiseThen(PerformPromiseThen(promise, onFulfilled, onRejected), undefined, rethrowAssertionErrorRejection);\n }\n function uponFulfillment(promise, onFulfilled) {\n uponPromise(promise, onFulfilled);\n }\n function uponRejection(promise, onRejected) {\n uponPromise(promise, undefined, onRejected);\n }\n function transformPromiseWith(promise, fulfillmentHandler, rejectionHandler) {\n return PerformPromiseThen(promise, fulfillmentHandler, rejectionHandler);\n }\n function setPromiseIsHandledToTrue(promise) {\n PerformPromiseThen(promise, undefined, rethrowAssertionErrorRejection);\n }\n const queueMicrotask = (() => {\n const globalQueueMicrotask = globals && globals.queueMicrotask;\n if (typeof globalQueueMicrotask === 'function') {\n return globalQueueMicrotask;\n }\n const resolvedPromise = promiseResolvedWith(undefined);\n return (fn) => PerformPromiseThen(resolvedPromise, fn);\n })();\n function reflectCall(F, V, args) {\n if (typeof F !== 'function') {\n throw new TypeError('Argument is not a function');\n }\n return Function.prototype.apply.call(F, V, args);\n }\n function promiseCall(F, V, args) {\n try {\n return promiseResolvedWith(reflectCall(F, V, args));\n }\n catch (value) {\n return promiseRejectedWith(value);\n }\n }\n\n // Original from Chromium\n // https://chromium.googlesource.com/chromium/src/+/0aee4434a4dba42a42abaea9bfbc0cd196a63bc1/third_party/blink/renderer/core/streams/SimpleQueue.js\n const QUEUE_MAX_ARRAY_SIZE = 16384;\n /**\n * Simple queue structure.\n *\n * Avoids scalability issues with using a packed array directly by using\n * multiple arrays in a linked list and keeping the array size bounded.\n */\n class SimpleQueue {\n constructor() {\n this._cursor = 0;\n this._size = 0;\n // _front and _back are always defined.\n this._front = {\n _elements: [],\n _next: undefined\n };\n this._back = this._front;\n // The cursor is used to avoid calling Array.shift().\n // It contains the index of the front element of the array inside the\n // front-most node. It is always in the range [0, QUEUE_MAX_ARRAY_SIZE).\n this._cursor = 0;\n // When there is only one node, size === elements.length - cursor.\n this._size = 0;\n }\n get length() {\n return this._size;\n }\n // For exception safety, this method is structured in order:\n // 1. Read state\n // 2. Calculate required state mutations\n // 3. Perform state mutations\n push(element) {\n const oldBack = this._back;\n let newBack = oldBack;\n if (oldBack._elements.length === QUEUE_MAX_ARRAY_SIZE - 1) {\n newBack = {\n _elements: [],\n _next: undefined\n };\n }\n // push() is the mutation most likely to throw an exception, so it\n // goes first.\n oldBack._elements.push(element);\n if (newBack !== oldBack) {\n this._back = newBack;\n oldBack._next = newBack;\n }\n ++this._size;\n }\n // Like push(), shift() follows the read -> calculate -> mutate pattern for\n // exception safety.\n shift() { // must not be called on an empty queue\n const oldFront = this._front;\n let newFront = oldFront;\n const oldCursor = this._cursor;\n let newCursor = oldCursor + 1;\n const elements = oldFront._elements;\n const element = elements[oldCursor];\n if (newCursor === QUEUE_MAX_ARRAY_SIZE) {\n newFront = oldFront._next;\n newCursor = 0;\n }\n // No mutations before this point.\n --this._size;\n this._cursor = newCursor;\n if (oldFront !== newFront) {\n this._front = newFront;\n }\n // Permit shifted element to be garbage collected.\n elements[oldCursor] = undefined;\n return element;\n }\n // The tricky thing about forEach() is that it can be called\n // re-entrantly. The queue may be mutated inside the callback. It is easy to\n // see that push() within the callback has no negative effects since the end\n // of the queue is checked for on every iteration. If shift() is called\n // repeatedly within the callback then the next iteration may return an\n // element that has been removed. In this case the callback will be called\n // with undefined values until we either \"catch up\" with elements that still\n // exist or reach the back of the queue.\n forEach(callback) {\n let i = this._cursor;\n let node = this._front;\n let elements = node._elements;\n while (i !== elements.length || node._next !== undefined) {\n if (i === elements.length) {\n node = node._next;\n elements = node._elements;\n i = 0;\n if (elements.length === 0) {\n break;\n }\n }\n callback(elements[i]);\n ++i;\n }\n }\n // Return the element that would be returned if shift() was called now,\n // without modifying the queue.\n peek() { // must not be called on an empty queue\n const front = this._front;\n const cursor = this._cursor;\n return front._elements[cursor];\n }\n }\n\n function ReadableStreamReaderGenericInitialize(reader, stream) {\n reader._ownerReadableStream = stream;\n stream._reader = reader;\n if (stream._state === 'readable') {\n defaultReaderClosedPromiseInitialize(reader);\n }\n else if (stream._state === 'closed') {\n defaultReaderClosedPromiseInitializeAsResolved(reader);\n }\n else {\n defaultReaderClosedPromiseInitializeAsRejected(reader, stream._storedError);\n }\n }\n // A client of ReadableStreamDefaultReader and ReadableStreamBYOBReader may use these functions directly to bypass state\n // check.\n function ReadableStreamReaderGenericCancel(reader, reason) {\n const stream = reader._ownerReadableStream;\n return ReadableStreamCancel(stream, reason);\n }\n function ReadableStreamReaderGenericRelease(reader) {\n if (reader._ownerReadableStream._state === 'readable') {\n defaultReaderClosedPromiseReject(reader, new TypeError(`Reader was released and can no longer be used to monitor the stream's closedness`));\n }\n else {\n defaultReaderClosedPromiseResetToRejected(reader, new TypeError(`Reader was released and can no longer be used to monitor the stream's closedness`));\n }\n reader._ownerReadableStream._reader = undefined;\n reader._ownerReadableStream = undefined;\n }\n // Helper functions for the readers.\n function readerLockException(name) {\n return new TypeError('Cannot ' + name + ' a stream using a released reader');\n }\n // Helper functions for the ReadableStreamDefaultReader.\n function defaultReaderClosedPromiseInitialize(reader) {\n reader._closedPromise = newPromise((resolve, reject) => {\n reader._closedPromise_resolve = resolve;\n reader._closedPromise_reject = reject;\n });\n }\n function defaultReaderClosedPromiseInitializeAsRejected(reader, reason) {\n defaultReaderClosedPromiseInitialize(reader);\n defaultReaderClosedPromiseReject(reader, reason);\n }\n function defaultReaderClosedPromiseInitializeAsResolved(reader) {\n defaultReaderClosedPromiseInitialize(reader);\n defaultReaderClosedPromiseResolve(reader);\n }\n function defaultReaderClosedPromiseReject(reader, reason) {\n if (reader._closedPromise_reject === undefined) {\n return;\n }\n setPromiseIsHandledToTrue(reader._closedPromise);\n reader._closedPromise_reject(reason);\n reader._closedPromise_resolve = undefined;\n reader._closedPromise_reject = undefined;\n }\n function defaultReaderClosedPromiseResetToRejected(reader, reason) {\n defaultReaderClosedPromiseInitializeAsRejected(reader, reason);\n }\n function defaultReaderClosedPromiseResolve(reader) {\n if (reader._closedPromise_resolve === undefined) {\n return;\n }\n reader._closedPromise_resolve(undefined);\n reader._closedPromise_resolve = undefined;\n reader._closedPromise_reject = undefined;\n }\n\n const AbortSteps = SymbolPolyfill('[[AbortSteps]]');\n const ErrorSteps = SymbolPolyfill('[[ErrorSteps]]');\n const CancelSteps = SymbolPolyfill('[[CancelSteps]]');\n const PullSteps = SymbolPolyfill('[[PullSteps]]');\n\n /// \n // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/isFinite#Polyfill\n const NumberIsFinite = Number.isFinite || function (x) {\n return typeof x === 'number' && isFinite(x);\n };\n\n /// \n // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Math/trunc#Polyfill\n const MathTrunc = Math.trunc || function (v) {\n return v < 0 ? Math.ceil(v) : Math.floor(v);\n };\n\n // https://heycam.github.io/webidl/#idl-dictionaries\n function isDictionary(x) {\n return typeof x === 'object' || typeof x === 'function';\n }\n function assertDictionary(obj, context) {\n if (obj !== undefined && !isDictionary(obj)) {\n throw new TypeError(`${context} is not an object.`);\n }\n }\n // https://heycam.github.io/webidl/#idl-callback-functions\n function assertFunction(x, context) {\n if (typeof x !== 'function') {\n throw new TypeError(`${context} is not a function.`);\n }\n }\n // https://heycam.github.io/webidl/#idl-object\n function isObject(x) {\n return (typeof x === 'object' && x !== null) || typeof x === 'function';\n }\n function assertObject(x, context) {\n if (!isObject(x)) {\n throw new TypeError(`${context} is not an object.`);\n }\n }\n function assertRequiredArgument(x, position, context) {\n if (x === undefined) {\n throw new TypeError(`Parameter ${position} is required in '${context}'.`);\n }\n }\n function assertRequiredField(x, field, context) {\n if (x === undefined) {\n throw new TypeError(`${field} is required in '${context}'.`);\n }\n }\n // https://heycam.github.io/webidl/#idl-unrestricted-double\n function convertUnrestrictedDouble(value) {\n return Number(value);\n }\n function censorNegativeZero(x) {\n return x === 0 ? 0 : x;\n }\n function integerPart(x) {\n return censorNegativeZero(MathTrunc(x));\n }\n // https://heycam.github.io/webidl/#idl-unsigned-long-long\n function convertUnsignedLongLongWithEnforceRange(value, context) {\n const lowerBound = 0;\n const upperBound = Number.MAX_SAFE_INTEGER;\n let x = Number(value);\n x = censorNegativeZero(x);\n if (!NumberIsFinite(x)) {\n throw new TypeError(`${context} is not a finite number`);\n }\n x = integerPart(x);\n if (x < lowerBound || x > upperBound) {\n throw new TypeError(`${context} is outside the accepted range of ${lowerBound} to ${upperBound}, inclusive`);\n }\n if (!NumberIsFinite(x) || x === 0) {\n return 0;\n }\n // TODO Use BigInt if supported?\n // let xBigInt = BigInt(integerPart(x));\n // xBigInt = BigInt.asUintN(64, xBigInt);\n // return Number(xBigInt);\n return x;\n }\n\n function assertReadableStream(x, context) {\n if (!IsReadableStream(x)) {\n throw new TypeError(`${context} is not a ReadableStream.`);\n }\n }\n\n // Abstract operations for the ReadableStream.\n function AcquireReadableStreamDefaultReader(stream) {\n return new ReadableStreamDefaultReader(stream);\n }\n // ReadableStream API exposed for controllers.\n function ReadableStreamAddReadRequest(stream, readRequest) {\n stream._reader._readRequests.push(readRequest);\n }\n function ReadableStreamFulfillReadRequest(stream, chunk, done) {\n const reader = stream._reader;\n const readRequest = reader._readRequests.shift();\n if (done) {\n readRequest._closeSteps();\n }\n else {\n readRequest._chunkSteps(chunk);\n }\n }\n function ReadableStreamGetNumReadRequests(stream) {\n return stream._reader._readRequests.length;\n }\n function ReadableStreamHasDefaultReader(stream) {\n const reader = stream._reader;\n if (reader === undefined) {\n return false;\n }\n if (!IsReadableStreamDefaultReader(reader)) {\n return false;\n }\n return true;\n }\n /**\n * A default reader vended by a {@link ReadableStream}.\n *\n * @public\n */\n class ReadableStreamDefaultReader {\n constructor(stream) {\n assertRequiredArgument(stream, 1, 'ReadableStreamDefaultReader');\n assertReadableStream(stream, 'First parameter');\n if (IsReadableStreamLocked(stream)) {\n throw new TypeError('This stream has already been locked for exclusive reading by another reader');\n }\n ReadableStreamReaderGenericInitialize(this, stream);\n this._readRequests = new SimpleQueue();\n }\n /**\n * Returns a promise that will be fulfilled when the stream becomes closed,\n * or rejected if the stream ever errors or the reader's lock is released before the stream finishes closing.\n */\n get closed() {\n if (!IsReadableStreamDefaultReader(this)) {\n return promiseRejectedWith(defaultReaderBrandCheckException('closed'));\n }\n return this._closedPromise;\n }\n /**\n * If the reader is active, behaves the same as {@link ReadableStream.cancel | stream.cancel(reason)}.\n */\n cancel(reason = undefined) {\n if (!IsReadableStreamDefaultReader(this)) {\n return promiseRejectedWith(defaultReaderBrandCheckException('cancel'));\n }\n if (this._ownerReadableStream === undefined) {\n return promiseRejectedWith(readerLockException('cancel'));\n }\n return ReadableStreamReaderGenericCancel(this, reason);\n }\n /**\n * Returns a promise that allows access to the next chunk from the stream's internal queue, if available.\n *\n * If reading a chunk causes the queue to become empty, more data will be pulled from the underlying source.\n */\n read() {\n if (!IsReadableStreamDefaultReader(this)) {\n return promiseRejectedWith(defaultReaderBrandCheckException('read'));\n }\n if (this._ownerReadableStream === undefined) {\n return promiseRejectedWith(readerLockException('read from'));\n }\n let resolvePromise;\n let rejectPromise;\n const promise = newPromise((resolve, reject) => {\n resolvePromise = resolve;\n rejectPromise = reject;\n });\n const readRequest = {\n _chunkSteps: chunk => resolvePromise({ value: chunk, done: false }),\n _closeSteps: () => resolvePromise({ value: undefined, done: true }),\n _errorSteps: e => rejectPromise(e)\n };\n ReadableStreamDefaultReaderRead(this, readRequest);\n return promise;\n }\n /**\n * Releases the reader's lock on the corresponding stream. After the lock is released, the reader is no longer active.\n * If the associated stream is errored when the lock is released, the reader will appear errored in the same way\n * from now on; otherwise, the reader will appear closed.\n *\n * A reader's lock cannot be released while it still has a pending read request, i.e., if a promise returned by\n * the reader's {@link ReadableStreamDefaultReader.read | read()} method has not yet been settled. Attempting to\n * do so will throw a `TypeError` and leave the reader locked to the stream.\n */\n releaseLock() {\n if (!IsReadableStreamDefaultReader(this)) {\n throw defaultReaderBrandCheckException('releaseLock');\n }\n if (this._ownerReadableStream === undefined) {\n return;\n }\n if (this._readRequests.length > 0) {\n throw new TypeError('Tried to release a reader lock when that reader has pending read() calls un-settled');\n }\n ReadableStreamReaderGenericRelease(this);\n }\n }\n Object.defineProperties(ReadableStreamDefaultReader.prototype, {\n cancel: { enumerable: true },\n read: { enumerable: true },\n releaseLock: { enumerable: true },\n closed: { enumerable: true }\n });\n if (typeof SymbolPolyfill.toStringTag === 'symbol') {\n Object.defineProperty(ReadableStreamDefaultReader.prototype, SymbolPolyfill.toStringTag, {\n value: 'ReadableStreamDefaultReader',\n configurable: true\n });\n }\n // Abstract operations for the readers.\n function IsReadableStreamDefaultReader(x) {\n if (!typeIsObject(x)) {\n return false;\n }\n if (!Object.prototype.hasOwnProperty.call(x, '_readRequests')) {\n return false;\n }\n return x instanceof ReadableStreamDefaultReader;\n }\n function ReadableStreamDefaultReaderRead(reader, readRequest) {\n const stream = reader._ownerReadableStream;\n stream._disturbed = true;\n if (stream._state === 'closed') {\n readRequest._closeSteps();\n }\n else if (stream._state === 'errored') {\n readRequest._errorSteps(stream._storedError);\n }\n else {\n stream._readableStreamController[PullSteps](readRequest);\n }\n }\n // Helper functions for the ReadableStreamDefaultReader.\n function defaultReaderBrandCheckException(name) {\n return new TypeError(`ReadableStreamDefaultReader.prototype.${name} can only be used on a ReadableStreamDefaultReader`);\n }\n\n /// \n /* eslint-disable @typescript-eslint/no-empty-function */\n const AsyncIteratorPrototype = Object.getPrototypeOf(Object.getPrototypeOf(async function* () { }).prototype);\n\n /// \n class ReadableStreamAsyncIteratorImpl {\n constructor(reader, preventCancel) {\n this._ongoingPromise = undefined;\n this._isFinished = false;\n this._reader = reader;\n this._preventCancel = preventCancel;\n }\n next() {\n const nextSteps = () => this._nextSteps();\n this._ongoingPromise = this._ongoingPromise ?\n transformPromiseWith(this._ongoingPromise, nextSteps, nextSteps) :\n nextSteps();\n return this._ongoingPromise;\n }\n return(value) {\n const returnSteps = () => this._returnSteps(value);\n return this._ongoingPromise ?\n transformPromiseWith(this._ongoingPromise, returnSteps, returnSteps) :\n returnSteps();\n }\n _nextSteps() {\n if (this._isFinished) {\n return Promise.resolve({ value: undefined, done: true });\n }\n const reader = this._reader;\n if (reader._ownerReadableStream === undefined) {\n return promiseRejectedWith(readerLockException('iterate'));\n }\n let resolvePromise;\n let rejectPromise;\n const promise = newPromise((resolve, reject) => {\n resolvePromise = resolve;\n rejectPromise = reject;\n });\n const readRequest = {\n _chunkSteps: chunk => {\n this._ongoingPromise = undefined;\n // This needs to be delayed by one microtask, otherwise we stop pulling too early which breaks a test.\n // FIXME Is this a bug in the specification, or in the test?\n queueMicrotask(() => resolvePromise({ value: chunk, done: false }));\n },\n _closeSteps: () => {\n this._ongoingPromise = undefined;\n this._isFinished = true;\n ReadableStreamReaderGenericRelease(reader);\n resolvePromise({ value: undefined, done: true });\n },\n _errorSteps: reason => {\n this._ongoingPromise = undefined;\n this._isFinished = true;\n ReadableStreamReaderGenericRelease(reader);\n rejectPromise(reason);\n }\n };\n ReadableStreamDefaultReaderRead(reader, readRequest);\n return promise;\n }\n _returnSteps(value) {\n if (this._isFinished) {\n return Promise.resolve({ value, done: true });\n }\n this._isFinished = true;\n const reader = this._reader;\n if (reader._ownerReadableStream === undefined) {\n return promiseRejectedWith(readerLockException('finish iterating'));\n }\n if (!this._preventCancel) {\n const result = ReadableStreamReaderGenericCancel(reader, value);\n ReadableStreamReaderGenericRelease(reader);\n return transformPromiseWith(result, () => ({ value, done: true }));\n }\n ReadableStreamReaderGenericRelease(reader);\n return promiseResolvedWith({ value, done: true });\n }\n }\n const ReadableStreamAsyncIteratorPrototype = {\n next() {\n if (!IsReadableStreamAsyncIterator(this)) {\n return promiseRejectedWith(streamAsyncIteratorBrandCheckException('next'));\n }\n return this._asyncIteratorImpl.next();\n },\n return(value) {\n if (!IsReadableStreamAsyncIterator(this)) {\n return promiseRejectedWith(streamAsyncIteratorBrandCheckException('return'));\n }\n return this._asyncIteratorImpl.return(value);\n }\n };\n if (AsyncIteratorPrototype !== undefined) {\n Object.setPrototypeOf(ReadableStreamAsyncIteratorPrototype, AsyncIteratorPrototype);\n }\n // Abstract operations for the ReadableStream.\n function AcquireReadableStreamAsyncIterator(stream, preventCancel) {\n const reader = AcquireReadableStreamDefaultReader(stream);\n const impl = new ReadableStreamAsyncIteratorImpl(reader, preventCancel);\n const iterator = Object.create(ReadableStreamAsyncIteratorPrototype);\n iterator._asyncIteratorImpl = impl;\n return iterator;\n }\n function IsReadableStreamAsyncIterator(x) {\n if (!typeIsObject(x)) {\n return false;\n }\n if (!Object.prototype.hasOwnProperty.call(x, '_asyncIteratorImpl')) {\n return false;\n }\n try {\n // noinspection SuspiciousTypeOfGuard\n return x._asyncIteratorImpl instanceof\n ReadableStreamAsyncIteratorImpl;\n }\n catch (_a) {\n return false;\n }\n }\n // Helper functions for the ReadableStream.\n function streamAsyncIteratorBrandCheckException(name) {\n return new TypeError(`ReadableStreamAsyncIterator.${name} can only be used on a ReadableSteamAsyncIterator`);\n }\n\n /// \n // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/isNaN#Polyfill\n const NumberIsNaN = Number.isNaN || function (x) {\n // eslint-disable-next-line no-self-compare\n return x !== x;\n };\n\n function CreateArrayFromList(elements) {\n // We use arrays to represent lists, so this is basically a no-op.\n // Do a slice though just in case we happen to depend on the unique-ness.\n return elements.slice();\n }\n function CopyDataBlockBytes(dest, destOffset, src, srcOffset, n) {\n new Uint8Array(dest).set(new Uint8Array(src, srcOffset, n), destOffset);\n }\n // Not implemented correctly\n function TransferArrayBuffer(O) {\n return O;\n }\n // Not implemented correctly\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n function IsDetachedBuffer(O) {\n return false;\n }\n function ArrayBufferSlice(buffer, begin, end) {\n // ArrayBuffer.prototype.slice is not available on IE10\n // https://www.caniuse.com/mdn-javascript_builtins_arraybuffer_slice\n if (buffer.slice) {\n return buffer.slice(begin, end);\n }\n const length = end - begin;\n const slice = new ArrayBuffer(length);\n CopyDataBlockBytes(slice, 0, buffer, begin, length);\n return slice;\n }\n\n function IsNonNegativeNumber(v) {\n if (typeof v !== 'number') {\n return false;\n }\n if (NumberIsNaN(v)) {\n return false;\n }\n if (v < 0) {\n return false;\n }\n return true;\n }\n function CloneAsUint8Array(O) {\n const buffer = ArrayBufferSlice(O.buffer, O.byteOffset, O.byteOffset + O.byteLength);\n return new Uint8Array(buffer);\n }\n\n function DequeueValue(container) {\n const pair = container._queue.shift();\n container._queueTotalSize -= pair.size;\n if (container._queueTotalSize < 0) {\n container._queueTotalSize = 0;\n }\n return pair.value;\n }\n function EnqueueValueWithSize(container, value, size) {\n if (!IsNonNegativeNumber(size) || size === Infinity) {\n throw new RangeError('Size must be a finite, non-NaN, non-negative number.');\n }\n container._queue.push({ value, size });\n container._queueTotalSize += size;\n }\n function PeekQueueValue(container) {\n const pair = container._queue.peek();\n return pair.value;\n }\n function ResetQueue(container) {\n container._queue = new SimpleQueue();\n container._queueTotalSize = 0;\n }\n\n /**\n * A pull-into request in a {@link ReadableByteStreamController}.\n *\n * @public\n */\n class ReadableStreamBYOBRequest {\n constructor() {\n throw new TypeError('Illegal constructor');\n }\n /**\n * Returns the view for writing in to, or `null` if the BYOB request has already been responded to.\n */\n get view() {\n if (!IsReadableStreamBYOBRequest(this)) {\n throw byobRequestBrandCheckException('view');\n }\n return this._view;\n }\n respond(bytesWritten) {\n if (!IsReadableStreamBYOBRequest(this)) {\n throw byobRequestBrandCheckException('respond');\n }\n assertRequiredArgument(bytesWritten, 1, 'respond');\n bytesWritten = convertUnsignedLongLongWithEnforceRange(bytesWritten, 'First parameter');\n if (this._associatedReadableByteStreamController === undefined) {\n throw new TypeError('This BYOB request has been invalidated');\n }\n if (IsDetachedBuffer(this._view.buffer)) ;\n ReadableByteStreamControllerRespond(this._associatedReadableByteStreamController, bytesWritten);\n }\n respondWithNewView(view) {\n if (!IsReadableStreamBYOBRequest(this)) {\n throw byobRequestBrandCheckException('respondWithNewView');\n }\n assertRequiredArgument(view, 1, 'respondWithNewView');\n if (!ArrayBuffer.isView(view)) {\n throw new TypeError('You can only respond with array buffer views');\n }\n if (this._associatedReadableByteStreamController === undefined) {\n throw new TypeError('This BYOB request has been invalidated');\n }\n if (IsDetachedBuffer(view.buffer)) ;\n ReadableByteStreamControllerRespondWithNewView(this._associatedReadableByteStreamController, view);\n }\n }\n Object.defineProperties(ReadableStreamBYOBRequest.prototype, {\n respond: { enumerable: true },\n respondWithNewView: { enumerable: true },\n view: { enumerable: true }\n });\n if (typeof SymbolPolyfill.toStringTag === 'symbol') {\n Object.defineProperty(ReadableStreamBYOBRequest.prototype, SymbolPolyfill.toStringTag, {\n value: 'ReadableStreamBYOBRequest',\n configurable: true\n });\n }\n /**\n * Allows control of a {@link ReadableStream | readable byte stream}'s state and internal queue.\n *\n * @public\n */\n class ReadableByteStreamController {\n constructor() {\n throw new TypeError('Illegal constructor');\n }\n /**\n * Returns the current BYOB pull request, or `null` if there isn't one.\n */\n get byobRequest() {\n if (!IsReadableByteStreamController(this)) {\n throw byteStreamControllerBrandCheckException('byobRequest');\n }\n return ReadableByteStreamControllerGetBYOBRequest(this);\n }\n /**\n * Returns the desired size to fill the controlled stream's internal queue. It can be negative, if the queue is\n * over-full. An underlying byte source ought to use this information to determine when and how to apply backpressure.\n */\n get desiredSize() {\n if (!IsReadableByteStreamController(this)) {\n throw byteStreamControllerBrandCheckException('desiredSize');\n }\n return ReadableByteStreamControllerGetDesiredSize(this);\n }\n /**\n * Closes the controlled readable stream. Consumers will still be able to read any previously-enqueued chunks from\n * the stream, but once those are read, the stream will become closed.\n */\n close() {\n if (!IsReadableByteStreamController(this)) {\n throw byteStreamControllerBrandCheckException('close');\n }\n if (this._closeRequested) {\n throw new TypeError('The stream has already been closed; do not close it again!');\n }\n const state = this._controlledReadableByteStream._state;\n if (state !== 'readable') {\n throw new TypeError(`The stream (in ${state} state) is not in the readable state and cannot be closed`);\n }\n ReadableByteStreamControllerClose(this);\n }\n enqueue(chunk) {\n if (!IsReadableByteStreamController(this)) {\n throw byteStreamControllerBrandCheckException('enqueue');\n }\n assertRequiredArgument(chunk, 1, 'enqueue');\n if (!ArrayBuffer.isView(chunk)) {\n throw new TypeError('chunk must be an array buffer view');\n }\n if (chunk.byteLength === 0) {\n throw new TypeError('chunk must have non-zero byteLength');\n }\n if (chunk.buffer.byteLength === 0) {\n throw new TypeError(`chunk's buffer must have non-zero byteLength`);\n }\n if (this._closeRequested) {\n throw new TypeError('stream is closed or draining');\n }\n const state = this._controlledReadableByteStream._state;\n if (state !== 'readable') {\n throw new TypeError(`The stream (in ${state} state) is not in the readable state and cannot be enqueued to`);\n }\n ReadableByteStreamControllerEnqueue(this, chunk);\n }\n /**\n * Errors the controlled readable stream, making all future interactions with it fail with the given error `e`.\n */\n error(e = undefined) {\n if (!IsReadableByteStreamController(this)) {\n throw byteStreamControllerBrandCheckException('error');\n }\n ReadableByteStreamControllerError(this, e);\n }\n /** @internal */\n [CancelSteps](reason) {\n ReadableByteStreamControllerClearPendingPullIntos(this);\n ResetQueue(this);\n const result = this._cancelAlgorithm(reason);\n ReadableByteStreamControllerClearAlgorithms(this);\n return result;\n }\n /** @internal */\n [PullSteps](readRequest) {\n const stream = this._controlledReadableByteStream;\n if (this._queueTotalSize > 0) {\n const entry = this._queue.shift();\n this._queueTotalSize -= entry.byteLength;\n ReadableByteStreamControllerHandleQueueDrain(this);\n const view = new Uint8Array(entry.buffer, entry.byteOffset, entry.byteLength);\n readRequest._chunkSteps(view);\n return;\n }\n const autoAllocateChunkSize = this._autoAllocateChunkSize;\n if (autoAllocateChunkSize !== undefined) {\n let buffer;\n try {\n buffer = new ArrayBuffer(autoAllocateChunkSize);\n }\n catch (bufferE) {\n readRequest._errorSteps(bufferE);\n return;\n }\n const pullIntoDescriptor = {\n buffer,\n bufferByteLength: autoAllocateChunkSize,\n byteOffset: 0,\n byteLength: autoAllocateChunkSize,\n bytesFilled: 0,\n elementSize: 1,\n viewConstructor: Uint8Array,\n readerType: 'default'\n };\n this._pendingPullIntos.push(pullIntoDescriptor);\n }\n ReadableStreamAddReadRequest(stream, readRequest);\n ReadableByteStreamControllerCallPullIfNeeded(this);\n }\n }\n Object.defineProperties(ReadableByteStreamController.prototype, {\n close: { enumerable: true },\n enqueue: { enumerable: true },\n error: { enumerable: true },\n byobRequest: { enumerable: true },\n desiredSize: { enumerable: true }\n });\n if (typeof SymbolPolyfill.toStringTag === 'symbol') {\n Object.defineProperty(ReadableByteStreamController.prototype, SymbolPolyfill.toStringTag, {\n value: 'ReadableByteStreamController',\n configurable: true\n });\n }\n // Abstract operations for the ReadableByteStreamController.\n function IsReadableByteStreamController(x) {\n if (!typeIsObject(x)) {\n return false;\n }\n if (!Object.prototype.hasOwnProperty.call(x, '_controlledReadableByteStream')) {\n return false;\n }\n return x instanceof ReadableByteStreamController;\n }\n function IsReadableStreamBYOBRequest(x) {\n if (!typeIsObject(x)) {\n return false;\n }\n if (!Object.prototype.hasOwnProperty.call(x, '_associatedReadableByteStreamController')) {\n return false;\n }\n return x instanceof ReadableStreamBYOBRequest;\n }\n function ReadableByteStreamControllerCallPullIfNeeded(controller) {\n const shouldPull = ReadableByteStreamControllerShouldCallPull(controller);\n if (!shouldPull) {\n return;\n }\n if (controller._pulling) {\n controller._pullAgain = true;\n return;\n }\n controller._pulling = true;\n // TODO: Test controller argument\n const pullPromise = controller._pullAlgorithm();\n uponPromise(pullPromise, () => {\n controller._pulling = false;\n if (controller._pullAgain) {\n controller._pullAgain = false;\n ReadableByteStreamControllerCallPullIfNeeded(controller);\n }\n }, e => {\n ReadableByteStreamControllerError(controller, e);\n });\n }\n function ReadableByteStreamControllerClearPendingPullIntos(controller) {\n ReadableByteStreamControllerInvalidateBYOBRequest(controller);\n controller._pendingPullIntos = new SimpleQueue();\n }\n function ReadableByteStreamControllerCommitPullIntoDescriptor(stream, pullIntoDescriptor) {\n let done = false;\n if (stream._state === 'closed') {\n done = true;\n }\n const filledView = ReadableByteStreamControllerConvertPullIntoDescriptor(pullIntoDescriptor);\n if (pullIntoDescriptor.readerType === 'default') {\n ReadableStreamFulfillReadRequest(stream, filledView, done);\n }\n else {\n ReadableStreamFulfillReadIntoRequest(stream, filledView, done);\n }\n }\n function ReadableByteStreamControllerConvertPullIntoDescriptor(pullIntoDescriptor) {\n const bytesFilled = pullIntoDescriptor.bytesFilled;\n const elementSize = pullIntoDescriptor.elementSize;\n return new pullIntoDescriptor.viewConstructor(pullIntoDescriptor.buffer, pullIntoDescriptor.byteOffset, bytesFilled / elementSize);\n }\n function ReadableByteStreamControllerEnqueueChunkToQueue(controller, buffer, byteOffset, byteLength) {\n controller._queue.push({ buffer, byteOffset, byteLength });\n controller._queueTotalSize += byteLength;\n }\n function ReadableByteStreamControllerFillPullIntoDescriptorFromQueue(controller, pullIntoDescriptor) {\n const elementSize = pullIntoDescriptor.elementSize;\n const currentAlignedBytes = pullIntoDescriptor.bytesFilled - pullIntoDescriptor.bytesFilled % elementSize;\n const maxBytesToCopy = Math.min(controller._queueTotalSize, pullIntoDescriptor.byteLength - pullIntoDescriptor.bytesFilled);\n const maxBytesFilled = pullIntoDescriptor.bytesFilled + maxBytesToCopy;\n const maxAlignedBytes = maxBytesFilled - maxBytesFilled % elementSize;\n let totalBytesToCopyRemaining = maxBytesToCopy;\n let ready = false;\n if (maxAlignedBytes > currentAlignedBytes) {\n totalBytesToCopyRemaining = maxAlignedBytes - pullIntoDescriptor.bytesFilled;\n ready = true;\n }\n const queue = controller._queue;\n while (totalBytesToCopyRemaining > 0) {\n const headOfQueue = queue.peek();\n const bytesToCopy = Math.min(totalBytesToCopyRemaining, headOfQueue.byteLength);\n const destStart = pullIntoDescriptor.byteOffset + pullIntoDescriptor.bytesFilled;\n CopyDataBlockBytes(pullIntoDescriptor.buffer, destStart, headOfQueue.buffer, headOfQueue.byteOffset, bytesToCopy);\n if (headOfQueue.byteLength === bytesToCopy) {\n queue.shift();\n }\n else {\n headOfQueue.byteOffset += bytesToCopy;\n headOfQueue.byteLength -= bytesToCopy;\n }\n controller._queueTotalSize -= bytesToCopy;\n ReadableByteStreamControllerFillHeadPullIntoDescriptor(controller, bytesToCopy, pullIntoDescriptor);\n totalBytesToCopyRemaining -= bytesToCopy;\n }\n return ready;\n }\n function ReadableByteStreamControllerFillHeadPullIntoDescriptor(controller, size, pullIntoDescriptor) {\n pullIntoDescriptor.bytesFilled += size;\n }\n function ReadableByteStreamControllerHandleQueueDrain(controller) {\n if (controller._queueTotalSize === 0 && controller._closeRequested) {\n ReadableByteStreamControllerClearAlgorithms(controller);\n ReadableStreamClose(controller._controlledReadableByteStream);\n }\n else {\n ReadableByteStreamControllerCallPullIfNeeded(controller);\n }\n }\n function ReadableByteStreamControllerInvalidateBYOBRequest(controller) {\n if (controller._byobRequest === null) {\n return;\n }\n controller._byobRequest._associatedReadableByteStreamController = undefined;\n controller._byobRequest._view = null;\n controller._byobRequest = null;\n }\n function ReadableByteStreamControllerProcessPullIntoDescriptorsUsingQueue(controller) {\n while (controller._pendingPullIntos.length > 0) {\n if (controller._queueTotalSize === 0) {\n return;\n }\n const pullIntoDescriptor = controller._pendingPullIntos.peek();\n if (ReadableByteStreamControllerFillPullIntoDescriptorFromQueue(controller, pullIntoDescriptor)) {\n ReadableByteStreamControllerShiftPendingPullInto(controller);\n ReadableByteStreamControllerCommitPullIntoDescriptor(controller._controlledReadableByteStream, pullIntoDescriptor);\n }\n }\n }\n function ReadableByteStreamControllerPullInto(controller, view, readIntoRequest) {\n const stream = controller._controlledReadableByteStream;\n let elementSize = 1;\n if (view.constructor !== DataView) {\n elementSize = view.constructor.BYTES_PER_ELEMENT;\n }\n const ctor = view.constructor;\n // try {\n const buffer = TransferArrayBuffer(view.buffer);\n // } catch (e) {\n // readIntoRequest._errorSteps(e);\n // return;\n // }\n const pullIntoDescriptor = {\n buffer,\n bufferByteLength: buffer.byteLength,\n byteOffset: view.byteOffset,\n byteLength: view.byteLength,\n bytesFilled: 0,\n elementSize,\n viewConstructor: ctor,\n readerType: 'byob'\n };\n if (controller._pendingPullIntos.length > 0) {\n controller._pendingPullIntos.push(pullIntoDescriptor);\n // No ReadableByteStreamControllerCallPullIfNeeded() call since:\n // - No change happens on desiredSize\n // - The source has already been notified of that there's at least 1 pending read(view)\n ReadableStreamAddReadIntoRequest(stream, readIntoRequest);\n return;\n }\n if (stream._state === 'closed') {\n const emptyView = new ctor(pullIntoDescriptor.buffer, pullIntoDescriptor.byteOffset, 0);\n readIntoRequest._closeSteps(emptyView);\n return;\n }\n if (controller._queueTotalSize > 0) {\n if (ReadableByteStreamControllerFillPullIntoDescriptorFromQueue(controller, pullIntoDescriptor)) {\n const filledView = ReadableByteStreamControllerConvertPullIntoDescriptor(pullIntoDescriptor);\n ReadableByteStreamControllerHandleQueueDrain(controller);\n readIntoRequest._chunkSteps(filledView);\n return;\n }\n if (controller._closeRequested) {\n const e = new TypeError('Insufficient bytes to fill elements in the given buffer');\n ReadableByteStreamControllerError(controller, e);\n readIntoRequest._errorSteps(e);\n return;\n }\n }\n controller._pendingPullIntos.push(pullIntoDescriptor);\n ReadableStreamAddReadIntoRequest(stream, readIntoRequest);\n ReadableByteStreamControllerCallPullIfNeeded(controller);\n }\n function ReadableByteStreamControllerRespondInClosedState(controller, firstDescriptor) {\n const stream = controller._controlledReadableByteStream;\n if (ReadableStreamHasBYOBReader(stream)) {\n while (ReadableStreamGetNumReadIntoRequests(stream) > 0) {\n const pullIntoDescriptor = ReadableByteStreamControllerShiftPendingPullInto(controller);\n ReadableByteStreamControllerCommitPullIntoDescriptor(stream, pullIntoDescriptor);\n }\n }\n }\n function ReadableByteStreamControllerRespondInReadableState(controller, bytesWritten, pullIntoDescriptor) {\n ReadableByteStreamControllerFillHeadPullIntoDescriptor(controller, bytesWritten, pullIntoDescriptor);\n if (pullIntoDescriptor.bytesFilled < pullIntoDescriptor.elementSize) {\n return;\n }\n ReadableByteStreamControllerShiftPendingPullInto(controller);\n const remainderSize = pullIntoDescriptor.bytesFilled % pullIntoDescriptor.elementSize;\n if (remainderSize > 0) {\n const end = pullIntoDescriptor.byteOffset + pullIntoDescriptor.bytesFilled;\n const remainder = ArrayBufferSlice(pullIntoDescriptor.buffer, end - remainderSize, end);\n ReadableByteStreamControllerEnqueueChunkToQueue(controller, remainder, 0, remainder.byteLength);\n }\n pullIntoDescriptor.bytesFilled -= remainderSize;\n ReadableByteStreamControllerCommitPullIntoDescriptor(controller._controlledReadableByteStream, pullIntoDescriptor);\n ReadableByteStreamControllerProcessPullIntoDescriptorsUsingQueue(controller);\n }\n function ReadableByteStreamControllerRespondInternal(controller, bytesWritten) {\n const firstDescriptor = controller._pendingPullIntos.peek();\n ReadableByteStreamControllerInvalidateBYOBRequest(controller);\n const state = controller._controlledReadableByteStream._state;\n if (state === 'closed') {\n ReadableByteStreamControllerRespondInClosedState(controller);\n }\n else {\n ReadableByteStreamControllerRespondInReadableState(controller, bytesWritten, firstDescriptor);\n }\n ReadableByteStreamControllerCallPullIfNeeded(controller);\n }\n function ReadableByteStreamControllerShiftPendingPullInto(controller) {\n const descriptor = controller._pendingPullIntos.shift();\n return descriptor;\n }\n function ReadableByteStreamControllerShouldCallPull(controller) {\n const stream = controller._controlledReadableByteStream;\n if (stream._state !== 'readable') {\n return false;\n }\n if (controller._closeRequested) {\n return false;\n }\n if (!controller._started) {\n return false;\n }\n if (ReadableStreamHasDefaultReader(stream) && ReadableStreamGetNumReadRequests(stream) > 0) {\n return true;\n }\n if (ReadableStreamHasBYOBReader(stream) && ReadableStreamGetNumReadIntoRequests(stream) > 0) {\n return true;\n }\n const desiredSize = ReadableByteStreamControllerGetDesiredSize(controller);\n if (desiredSize > 0) {\n return true;\n }\n return false;\n }\n function ReadableByteStreamControllerClearAlgorithms(controller) {\n controller._pullAlgorithm = undefined;\n controller._cancelAlgorithm = undefined;\n }\n // A client of ReadableByteStreamController may use these functions directly to bypass state check.\n function ReadableByteStreamControllerClose(controller) {\n const stream = controller._controlledReadableByteStream;\n if (controller._closeRequested || stream._state !== 'readable') {\n return;\n }\n if (controller._queueTotalSize > 0) {\n controller._closeRequested = true;\n return;\n }\n if (controller._pendingPullIntos.length > 0) {\n const firstPendingPullInto = controller._pendingPullIntos.peek();\n if (firstPendingPullInto.bytesFilled > 0) {\n const e = new TypeError('Insufficient bytes to fill elements in the given buffer');\n ReadableByteStreamControllerError(controller, e);\n throw e;\n }\n }\n ReadableByteStreamControllerClearAlgorithms(controller);\n ReadableStreamClose(stream);\n }\n function ReadableByteStreamControllerEnqueue(controller, chunk) {\n const stream = controller._controlledReadableByteStream;\n if (controller._closeRequested || stream._state !== 'readable') {\n return;\n }\n const buffer = chunk.buffer;\n const byteOffset = chunk.byteOffset;\n const byteLength = chunk.byteLength;\n const transferredBuffer = TransferArrayBuffer(buffer);\n if (controller._pendingPullIntos.length > 0) {\n const firstPendingPullInto = controller._pendingPullIntos.peek();\n if (IsDetachedBuffer(firstPendingPullInto.buffer)) ;\n firstPendingPullInto.buffer = TransferArrayBuffer(firstPendingPullInto.buffer);\n }\n ReadableByteStreamControllerInvalidateBYOBRequest(controller);\n if (ReadableStreamHasDefaultReader(stream)) {\n if (ReadableStreamGetNumReadRequests(stream) === 0) {\n ReadableByteStreamControllerEnqueueChunkToQueue(controller, transferredBuffer, byteOffset, byteLength);\n }\n else {\n if (controller._pendingPullIntos.length > 0) {\n ReadableByteStreamControllerShiftPendingPullInto(controller);\n }\n const transferredView = new Uint8Array(transferredBuffer, byteOffset, byteLength);\n ReadableStreamFulfillReadRequest(stream, transferredView, false);\n }\n }\n else if (ReadableStreamHasBYOBReader(stream)) {\n // TODO: Ideally in this branch detaching should happen only if the buffer is not consumed fully.\n ReadableByteStreamControllerEnqueueChunkToQueue(controller, transferredBuffer, byteOffset, byteLength);\n ReadableByteStreamControllerProcessPullIntoDescriptorsUsingQueue(controller);\n }\n else {\n ReadableByteStreamControllerEnqueueChunkToQueue(controller, transferredBuffer, byteOffset, byteLength);\n }\n ReadableByteStreamControllerCallPullIfNeeded(controller);\n }\n function ReadableByteStreamControllerError(controller, e) {\n const stream = controller._controlledReadableByteStream;\n if (stream._state !== 'readable') {\n return;\n }\n ReadableByteStreamControllerClearPendingPullIntos(controller);\n ResetQueue(controller);\n ReadableByteStreamControllerClearAlgorithms(controller);\n ReadableStreamError(stream, e);\n }\n function ReadableByteStreamControllerGetBYOBRequest(controller) {\n if (controller._byobRequest === null && controller._pendingPullIntos.length > 0) {\n const firstDescriptor = controller._pendingPullIntos.peek();\n const view = new Uint8Array(firstDescriptor.buffer, firstDescriptor.byteOffset + firstDescriptor.bytesFilled, firstDescriptor.byteLength - firstDescriptor.bytesFilled);\n const byobRequest = Object.create(ReadableStreamBYOBRequest.prototype);\n SetUpReadableStreamBYOBRequest(byobRequest, controller, view);\n controller._byobRequest = byobRequest;\n }\n return controller._byobRequest;\n }\n function ReadableByteStreamControllerGetDesiredSize(controller) {\n const state = controller._controlledReadableByteStream._state;\n if (state === 'errored') {\n return null;\n }\n if (state === 'closed') {\n return 0;\n }\n return controller._strategyHWM - controller._queueTotalSize;\n }\n function ReadableByteStreamControllerRespond(controller, bytesWritten) {\n const firstDescriptor = controller._pendingPullIntos.peek();\n const state = controller._controlledReadableByteStream._state;\n if (state === 'closed') {\n if (bytesWritten !== 0) {\n throw new TypeError('bytesWritten must be 0 when calling respond() on a closed stream');\n }\n }\n else {\n if (bytesWritten === 0) {\n throw new TypeError('bytesWritten must be greater than 0 when calling respond() on a readable stream');\n }\n if (firstDescriptor.bytesFilled + bytesWritten > firstDescriptor.byteLength) {\n throw new RangeError('bytesWritten out of range');\n }\n }\n firstDescriptor.buffer = TransferArrayBuffer(firstDescriptor.buffer);\n ReadableByteStreamControllerRespondInternal(controller, bytesWritten);\n }\n function ReadableByteStreamControllerRespondWithNewView(controller, view) {\n const firstDescriptor = controller._pendingPullIntos.peek();\n const state = controller._controlledReadableByteStream._state;\n if (state === 'closed') {\n if (view.byteLength !== 0) {\n throw new TypeError('The view\\'s length must be 0 when calling respondWithNewView() on a closed stream');\n }\n }\n else {\n if (view.byteLength === 0) {\n throw new TypeError('The view\\'s length must be greater than 0 when calling respondWithNewView() on a readable stream');\n }\n }\n if (firstDescriptor.byteOffset + firstDescriptor.bytesFilled !== view.byteOffset) {\n throw new RangeError('The region specified by view does not match byobRequest');\n }\n if (firstDescriptor.bufferByteLength !== view.buffer.byteLength) {\n throw new RangeError('The buffer of view has different capacity than byobRequest');\n }\n if (firstDescriptor.bytesFilled + view.byteLength > firstDescriptor.byteLength) {\n throw new RangeError('The region specified by view is larger than byobRequest');\n }\n const viewByteLength = view.byteLength;\n firstDescriptor.buffer = TransferArrayBuffer(view.buffer);\n ReadableByteStreamControllerRespondInternal(controller, viewByteLength);\n }\n function SetUpReadableByteStreamController(stream, controller, startAlgorithm, pullAlgorithm, cancelAlgorithm, highWaterMark, autoAllocateChunkSize) {\n controller._controlledReadableByteStream = stream;\n controller._pullAgain = false;\n controller._pulling = false;\n controller._byobRequest = null;\n // Need to set the slots so that the assert doesn't fire. In the spec the slots already exist implicitly.\n controller._queue = controller._queueTotalSize = undefined;\n ResetQueue(controller);\n controller._closeRequested = false;\n controller._started = false;\n controller._strategyHWM = highWaterMark;\n controller._pullAlgorithm = pullAlgorithm;\n controller._cancelAlgorithm = cancelAlgorithm;\n controller._autoAllocateChunkSize = autoAllocateChunkSize;\n controller._pendingPullIntos = new SimpleQueue();\n stream._readableStreamController = controller;\n const startResult = startAlgorithm();\n uponPromise(promiseResolvedWith(startResult), () => {\n controller._started = true;\n ReadableByteStreamControllerCallPullIfNeeded(controller);\n }, r => {\n ReadableByteStreamControllerError(controller, r);\n });\n }\n function SetUpReadableByteStreamControllerFromUnderlyingSource(stream, underlyingByteSource, highWaterMark) {\n const controller = Object.create(ReadableByteStreamController.prototype);\n let startAlgorithm = () => undefined;\n let pullAlgorithm = () => promiseResolvedWith(undefined);\n let cancelAlgorithm = () => promiseResolvedWith(undefined);\n if (underlyingByteSource.start !== undefined) {\n startAlgorithm = () => underlyingByteSource.start(controller);\n }\n if (underlyingByteSource.pull !== undefined) {\n pullAlgorithm = () => underlyingByteSource.pull(controller);\n }\n if (underlyingByteSource.cancel !== undefined) {\n cancelAlgorithm = reason => underlyingByteSource.cancel(reason);\n }\n const autoAllocateChunkSize = underlyingByteSource.autoAllocateChunkSize;\n if (autoAllocateChunkSize === 0) {\n throw new TypeError('autoAllocateChunkSize must be greater than 0');\n }\n SetUpReadableByteStreamController(stream, controller, startAlgorithm, pullAlgorithm, cancelAlgorithm, highWaterMark, autoAllocateChunkSize);\n }\n function SetUpReadableStreamBYOBRequest(request, controller, view) {\n request._associatedReadableByteStreamController = controller;\n request._view = view;\n }\n // Helper functions for the ReadableStreamBYOBRequest.\n function byobRequestBrandCheckException(name) {\n return new TypeError(`ReadableStreamBYOBRequest.prototype.${name} can only be used on a ReadableStreamBYOBRequest`);\n }\n // Helper functions for the ReadableByteStreamController.\n function byteStreamControllerBrandCheckException(name) {\n return new TypeError(`ReadableByteStreamController.prototype.${name} can only be used on a ReadableByteStreamController`);\n }\n\n // Abstract operations for the ReadableStream.\n function AcquireReadableStreamBYOBReader(stream) {\n return new ReadableStreamBYOBReader(stream);\n }\n // ReadableStream API exposed for controllers.\n function ReadableStreamAddReadIntoRequest(stream, readIntoRequest) {\n stream._reader._readIntoRequests.push(readIntoRequest);\n }\n function ReadableStreamFulfillReadIntoRequest(stream, chunk, done) {\n const reader = stream._reader;\n const readIntoRequest = reader._readIntoRequests.shift();\n if (done) {\n readIntoRequest._closeSteps(chunk);\n }\n else {\n readIntoRequest._chunkSteps(chunk);\n }\n }\n function ReadableStreamGetNumReadIntoRequests(stream) {\n return stream._reader._readIntoRequests.length;\n }\n function ReadableStreamHasBYOBReader(stream) {\n const reader = stream._reader;\n if (reader === undefined) {\n return false;\n }\n if (!IsReadableStreamBYOBReader(reader)) {\n return false;\n }\n return true;\n }\n /**\n * A BYOB reader vended by a {@link ReadableStream}.\n *\n * @public\n */\n class ReadableStreamBYOBReader {\n constructor(stream) {\n assertRequiredArgument(stream, 1, 'ReadableStreamBYOBReader');\n assertReadableStream(stream, 'First parameter');\n if (IsReadableStreamLocked(stream)) {\n throw new TypeError('This stream has already been locked for exclusive reading by another reader');\n }\n if (!IsReadableByteStreamController(stream._readableStreamController)) {\n throw new TypeError('Cannot construct a ReadableStreamBYOBReader for a stream not constructed with a byte ' +\n 'source');\n }\n ReadableStreamReaderGenericInitialize(this, stream);\n this._readIntoRequests = new SimpleQueue();\n }\n /**\n * Returns a promise that will be fulfilled when the stream becomes closed, or rejected if the stream ever errors or\n * the reader's lock is released before the stream finishes closing.\n */\n get closed() {\n if (!IsReadableStreamBYOBReader(this)) {\n return promiseRejectedWith(byobReaderBrandCheckException('closed'));\n }\n return this._closedPromise;\n }\n /**\n * If the reader is active, behaves the same as {@link ReadableStream.cancel | stream.cancel(reason)}.\n */\n cancel(reason = undefined) {\n if (!IsReadableStreamBYOBReader(this)) {\n return promiseRejectedWith(byobReaderBrandCheckException('cancel'));\n }\n if (this._ownerReadableStream === undefined) {\n return promiseRejectedWith(readerLockException('cancel'));\n }\n return ReadableStreamReaderGenericCancel(this, reason);\n }\n /**\n * Attempts to reads bytes into view, and returns a promise resolved with the result.\n *\n * If reading a chunk causes the queue to become empty, more data will be pulled from the underlying source.\n */\n read(view) {\n if (!IsReadableStreamBYOBReader(this)) {\n return promiseRejectedWith(byobReaderBrandCheckException('read'));\n }\n if (!ArrayBuffer.isView(view)) {\n return promiseRejectedWith(new TypeError('view must be an array buffer view'));\n }\n if (view.byteLength === 0) {\n return promiseRejectedWith(new TypeError('view must have non-zero byteLength'));\n }\n if (view.buffer.byteLength === 0) {\n return promiseRejectedWith(new TypeError(`view's buffer must have non-zero byteLength`));\n }\n if (IsDetachedBuffer(view.buffer)) ;\n if (this._ownerReadableStream === undefined) {\n return promiseRejectedWith(readerLockException('read from'));\n }\n let resolvePromise;\n let rejectPromise;\n const promise = newPromise((resolve, reject) => {\n resolvePromise = resolve;\n rejectPromise = reject;\n });\n const readIntoRequest = {\n _chunkSteps: chunk => resolvePromise({ value: chunk, done: false }),\n _closeSteps: chunk => resolvePromise({ value: chunk, done: true }),\n _errorSteps: e => rejectPromise(e)\n };\n ReadableStreamBYOBReaderRead(this, view, readIntoRequest);\n return promise;\n }\n /**\n * Releases the reader's lock on the corresponding stream. After the lock is released, the reader is no longer active.\n * If the associated stream is errored when the lock is released, the reader will appear errored in the same way\n * from now on; otherwise, the reader will appear closed.\n *\n * A reader's lock cannot be released while it still has a pending read request, i.e., if a promise returned by\n * the reader's {@link ReadableStreamBYOBReader.read | read()} method has not yet been settled. Attempting to\n * do so will throw a `TypeError` and leave the reader locked to the stream.\n */\n releaseLock() {\n if (!IsReadableStreamBYOBReader(this)) {\n throw byobReaderBrandCheckException('releaseLock');\n }\n if (this._ownerReadableStream === undefined) {\n return;\n }\n if (this._readIntoRequests.length > 0) {\n throw new TypeError('Tried to release a reader lock when that reader has pending read() calls un-settled');\n }\n ReadableStreamReaderGenericRelease(this);\n }\n }\n Object.defineProperties(ReadableStreamBYOBReader.prototype, {\n cancel: { enumerable: true },\n read: { enumerable: true },\n releaseLock: { enumerable: true },\n closed: { enumerable: true }\n });\n if (typeof SymbolPolyfill.toStringTag === 'symbol') {\n Object.defineProperty(ReadableStreamBYOBReader.prototype, SymbolPolyfill.toStringTag, {\n value: 'ReadableStreamBYOBReader',\n configurable: true\n });\n }\n // Abstract operations for the readers.\n function IsReadableStreamBYOBReader(x) {\n if (!typeIsObject(x)) {\n return false;\n }\n if (!Object.prototype.hasOwnProperty.call(x, '_readIntoRequests')) {\n return false;\n }\n return x instanceof ReadableStreamBYOBReader;\n }\n function ReadableStreamBYOBReaderRead(reader, view, readIntoRequest) {\n const stream = reader._ownerReadableStream;\n stream._disturbed = true;\n if (stream._state === 'errored') {\n readIntoRequest._errorSteps(stream._storedError);\n }\n else {\n ReadableByteStreamControllerPullInto(stream._readableStreamController, view, readIntoRequest);\n }\n }\n // Helper functions for the ReadableStreamBYOBReader.\n function byobReaderBrandCheckException(name) {\n return new TypeError(`ReadableStreamBYOBReader.prototype.${name} can only be used on a ReadableStreamBYOBReader`);\n }\n\n function ExtractHighWaterMark(strategy, defaultHWM) {\n const { highWaterMark } = strategy;\n if (highWaterMark === undefined) {\n return defaultHWM;\n }\n if (NumberIsNaN(highWaterMark) || highWaterMark < 0) {\n throw new RangeError('Invalid highWaterMark');\n }\n return highWaterMark;\n }\n function ExtractSizeAlgorithm(strategy) {\n const { size } = strategy;\n if (!size) {\n return () => 1;\n }\n return size;\n }\n\n function convertQueuingStrategy(init, context) {\n assertDictionary(init, context);\n const highWaterMark = init === null || init === void 0 ? void 0 : init.highWaterMark;\n const size = init === null || init === void 0 ? void 0 : init.size;\n return {\n highWaterMark: highWaterMark === undefined ? undefined : convertUnrestrictedDouble(highWaterMark),\n size: size === undefined ? undefined : convertQueuingStrategySize(size, `${context} has member 'size' that`)\n };\n }\n function convertQueuingStrategySize(fn, context) {\n assertFunction(fn, context);\n return chunk => convertUnrestrictedDouble(fn(chunk));\n }\n\n function convertUnderlyingSink(original, context) {\n assertDictionary(original, context);\n const abort = original === null || original === void 0 ? void 0 : original.abort;\n const close = original === null || original === void 0 ? void 0 : original.close;\n const start = original === null || original === void 0 ? void 0 : original.start;\n const type = original === null || original === void 0 ? void 0 : original.type;\n const write = original === null || original === void 0 ? void 0 : original.write;\n return {\n abort: abort === undefined ?\n undefined :\n convertUnderlyingSinkAbortCallback(abort, original, `${context} has member 'abort' that`),\n close: close === undefined ?\n undefined :\n convertUnderlyingSinkCloseCallback(close, original, `${context} has member 'close' that`),\n start: start === undefined ?\n undefined :\n convertUnderlyingSinkStartCallback(start, original, `${context} has member 'start' that`),\n write: write === undefined ?\n undefined :\n convertUnderlyingSinkWriteCallback(write, original, `${context} has member 'write' that`),\n type\n };\n }\n function convertUnderlyingSinkAbortCallback(fn, original, context) {\n assertFunction(fn, context);\n return (reason) => promiseCall(fn, original, [reason]);\n }\n function convertUnderlyingSinkCloseCallback(fn, original, context) {\n assertFunction(fn, context);\n return () => promiseCall(fn, original, []);\n }\n function convertUnderlyingSinkStartCallback(fn, original, context) {\n assertFunction(fn, context);\n return (controller) => reflectCall(fn, original, [controller]);\n }\n function convertUnderlyingSinkWriteCallback(fn, original, context) {\n assertFunction(fn, context);\n return (chunk, controller) => promiseCall(fn, original, [chunk, controller]);\n }\n\n function assertWritableStream(x, context) {\n if (!IsWritableStream(x)) {\n throw new TypeError(`${context} is not a WritableStream.`);\n }\n }\n\n function isAbortSignal(value) {\n if (typeof value !== 'object' || value === null) {\n return false;\n }\n try {\n return typeof value.aborted === 'boolean';\n }\n catch (_a) {\n // AbortSignal.prototype.aborted throws if its brand check fails\n return false;\n }\n }\n const supportsAbortController = typeof AbortController === 'function';\n /**\n * Construct a new AbortController, if supported by the platform.\n *\n * @internal\n */\n function createAbortController() {\n if (supportsAbortController) {\n return new AbortController();\n }\n return undefined;\n }\n\n /**\n * A writable stream represents a destination for data, into which you can write.\n *\n * @public\n */\n class WritableStream {\n constructor(rawUnderlyingSink = {}, rawStrategy = {}) {\n if (rawUnderlyingSink === undefined) {\n rawUnderlyingSink = null;\n }\n else {\n assertObject(rawUnderlyingSink, 'First parameter');\n }\n const strategy = convertQueuingStrategy(rawStrategy, 'Second parameter');\n const underlyingSink = convertUnderlyingSink(rawUnderlyingSink, 'First parameter');\n InitializeWritableStream(this);\n const type = underlyingSink.type;\n if (type !== undefined) {\n throw new RangeError('Invalid type is specified');\n }\n const sizeAlgorithm = ExtractSizeAlgorithm(strategy);\n const highWaterMark = ExtractHighWaterMark(strategy, 1);\n SetUpWritableStreamDefaultControllerFromUnderlyingSink(this, underlyingSink, highWaterMark, sizeAlgorithm);\n }\n /**\n * Returns whether or not the writable stream is locked to a writer.\n */\n get locked() {\n if (!IsWritableStream(this)) {\n throw streamBrandCheckException$2('locked');\n }\n return IsWritableStreamLocked(this);\n }\n /**\n * Aborts the stream, signaling that the producer can no longer successfully write to the stream and it is to be\n * immediately moved to an errored state, with any queued-up writes discarded. This will also execute any abort\n * mechanism of the underlying sink.\n *\n * The returned promise will fulfill if the stream shuts down successfully, or reject if the underlying sink signaled\n * that there was an error doing so. Additionally, it will reject with a `TypeError` (without attempting to cancel\n * the stream) if the stream is currently locked.\n */\n abort(reason = undefined) {\n if (!IsWritableStream(this)) {\n return promiseRejectedWith(streamBrandCheckException$2('abort'));\n }\n if (IsWritableStreamLocked(this)) {\n return promiseRejectedWith(new TypeError('Cannot abort a stream that already has a writer'));\n }\n return WritableStreamAbort(this, reason);\n }\n /**\n * Closes the stream. The underlying sink will finish processing any previously-written chunks, before invoking its\n * close behavior. During this time any further attempts to write will fail (without erroring the stream).\n *\n * The method returns a promise that will fulfill if all remaining chunks are successfully written and the stream\n * successfully closes, or rejects if an error is encountered during this process. Additionally, it will reject with\n * a `TypeError` (without attempting to cancel the stream) if the stream is currently locked.\n */\n close() {\n if (!IsWritableStream(this)) {\n return promiseRejectedWith(streamBrandCheckException$2('close'));\n }\n if (IsWritableStreamLocked(this)) {\n return promiseRejectedWith(new TypeError('Cannot close a stream that already has a writer'));\n }\n if (WritableStreamCloseQueuedOrInFlight(this)) {\n return promiseRejectedWith(new TypeError('Cannot close an already-closing stream'));\n }\n return WritableStreamClose(this);\n }\n /**\n * Creates a {@link WritableStreamDefaultWriter | writer} and locks the stream to the new writer. While the stream\n * is locked, no other writer can be acquired until this one is released.\n *\n * This functionality is especially useful for creating abstractions that desire the ability to write to a stream\n * without interruption or interleaving. By getting a writer for the stream, you can ensure nobody else can write at\n * the same time, which would cause the resulting written data to be unpredictable and probably useless.\n */\n getWriter() {\n if (!IsWritableStream(this)) {\n throw streamBrandCheckException$2('getWriter');\n }\n return AcquireWritableStreamDefaultWriter(this);\n }\n }\n Object.defineProperties(WritableStream.prototype, {\n abort: { enumerable: true },\n close: { enumerable: true },\n getWriter: { enumerable: true },\n locked: { enumerable: true }\n });\n if (typeof SymbolPolyfill.toStringTag === 'symbol') {\n Object.defineProperty(WritableStream.prototype, SymbolPolyfill.toStringTag, {\n value: 'WritableStream',\n configurable: true\n });\n }\n // Abstract operations for the WritableStream.\n function AcquireWritableStreamDefaultWriter(stream) {\n return new WritableStreamDefaultWriter(stream);\n }\n // Throws if and only if startAlgorithm throws.\n function CreateWritableStream(startAlgorithm, writeAlgorithm, closeAlgorithm, abortAlgorithm, highWaterMark = 1, sizeAlgorithm = () => 1) {\n const stream = Object.create(WritableStream.prototype);\n InitializeWritableStream(stream);\n const controller = Object.create(WritableStreamDefaultController.prototype);\n SetUpWritableStreamDefaultController(stream, controller, startAlgorithm, writeAlgorithm, closeAlgorithm, abortAlgorithm, highWaterMark, sizeAlgorithm);\n return stream;\n }\n function InitializeWritableStream(stream) {\n stream._state = 'writable';\n // The error that will be reported by new method calls once the state becomes errored. Only set when [[state]] is\n // 'erroring' or 'errored'. May be set to an undefined value.\n stream._storedError = undefined;\n stream._writer = undefined;\n // Initialize to undefined first because the constructor of the controller checks this\n // variable to validate the caller.\n stream._writableStreamController = undefined;\n // This queue is placed here instead of the writer class in order to allow for passing a writer to the next data\n // producer without waiting for the queued writes to finish.\n stream._writeRequests = new SimpleQueue();\n // Write requests are removed from _writeRequests when write() is called on the underlying sink. This prevents\n // them from being erroneously rejected on error. If a write() call is in-flight, the request is stored here.\n stream._inFlightWriteRequest = undefined;\n // The promise that was returned from writer.close(). Stored here because it may be fulfilled after the writer\n // has been detached.\n stream._closeRequest = undefined;\n // Close request is removed from _closeRequest when close() is called on the underlying sink. This prevents it\n // from being erroneously rejected on error. If a close() call is in-flight, the request is stored here.\n stream._inFlightCloseRequest = undefined;\n // The promise that was returned from writer.abort(). This may also be fulfilled after the writer has detached.\n stream._pendingAbortRequest = undefined;\n // The backpressure signal set by the controller.\n stream._backpressure = false;\n }\n function IsWritableStream(x) {\n if (!typeIsObject(x)) {\n return false;\n }\n if (!Object.prototype.hasOwnProperty.call(x, '_writableStreamController')) {\n return false;\n }\n return x instanceof WritableStream;\n }\n function IsWritableStreamLocked(stream) {\n if (stream._writer === undefined) {\n return false;\n }\n return true;\n }\n function WritableStreamAbort(stream, reason) {\n var _a;\n if (stream._state === 'closed' || stream._state === 'errored') {\n return promiseResolvedWith(undefined);\n }\n stream._writableStreamController._abortReason = reason;\n (_a = stream._writableStreamController._abortController) === null || _a === void 0 ? void 0 : _a.abort();\n // TypeScript narrows the type of `stream._state` down to 'writable' | 'erroring',\n // but it doesn't know that signaling abort runs author code that might have changed the state.\n // Widen the type again by casting to WritableStreamState.\n const state = stream._state;\n if (state === 'closed' || state === 'errored') {\n return promiseResolvedWith(undefined);\n }\n if (stream._pendingAbortRequest !== undefined) {\n return stream._pendingAbortRequest._promise;\n }\n let wasAlreadyErroring = false;\n if (state === 'erroring') {\n wasAlreadyErroring = true;\n // reason will not be used, so don't keep a reference to it.\n reason = undefined;\n }\n const promise = newPromise((resolve, reject) => {\n stream._pendingAbortRequest = {\n _promise: undefined,\n _resolve: resolve,\n _reject: reject,\n _reason: reason,\n _wasAlreadyErroring: wasAlreadyErroring\n };\n });\n stream._pendingAbortRequest._promise = promise;\n if (!wasAlreadyErroring) {\n WritableStreamStartErroring(stream, reason);\n }\n return promise;\n }\n function WritableStreamClose(stream) {\n const state = stream._state;\n if (state === 'closed' || state === 'errored') {\n return promiseRejectedWith(new TypeError(`The stream (in ${state} state) is not in the writable state and cannot be closed`));\n }\n const promise = newPromise((resolve, reject) => {\n const closeRequest = {\n _resolve: resolve,\n _reject: reject\n };\n stream._closeRequest = closeRequest;\n });\n const writer = stream._writer;\n if (writer !== undefined && stream._backpressure && state === 'writable') {\n defaultWriterReadyPromiseResolve(writer);\n }\n WritableStreamDefaultControllerClose(stream._writableStreamController);\n return promise;\n }\n // WritableStream API exposed for controllers.\n function WritableStreamAddWriteRequest(stream) {\n const promise = newPromise((resolve, reject) => {\n const writeRequest = {\n _resolve: resolve,\n _reject: reject\n };\n stream._writeRequests.push(writeRequest);\n });\n return promise;\n }\n function WritableStreamDealWithRejection(stream, error) {\n const state = stream._state;\n if (state === 'writable') {\n WritableStreamStartErroring(stream, error);\n return;\n }\n WritableStreamFinishErroring(stream);\n }\n function WritableStreamStartErroring(stream, reason) {\n const controller = stream._writableStreamController;\n stream._state = 'erroring';\n stream._storedError = reason;\n const writer = stream._writer;\n if (writer !== undefined) {\n WritableStreamDefaultWriterEnsureReadyPromiseRejected(writer, reason);\n }\n if (!WritableStreamHasOperationMarkedInFlight(stream) && controller._started) {\n WritableStreamFinishErroring(stream);\n }\n }\n function WritableStreamFinishErroring(stream) {\n stream._state = 'errored';\n stream._writableStreamController[ErrorSteps]();\n const storedError = stream._storedError;\n stream._writeRequests.forEach(writeRequest => {\n writeRequest._reject(storedError);\n });\n stream._writeRequests = new SimpleQueue();\n if (stream._pendingAbortRequest === undefined) {\n WritableStreamRejectCloseAndClosedPromiseIfNeeded(stream);\n return;\n }\n const abortRequest = stream._pendingAbortRequest;\n stream._pendingAbortRequest = undefined;\n if (abortRequest._wasAlreadyErroring) {\n abortRequest._reject(storedError);\n WritableStreamRejectCloseAndClosedPromiseIfNeeded(stream);\n return;\n }\n const promise = stream._writableStreamController[AbortSteps](abortRequest._reason);\n uponPromise(promise, () => {\n abortRequest._resolve();\n WritableStreamRejectCloseAndClosedPromiseIfNeeded(stream);\n }, (reason) => {\n abortRequest._reject(reason);\n WritableStreamRejectCloseAndClosedPromiseIfNeeded(stream);\n });\n }\n function WritableStreamFinishInFlightWrite(stream) {\n stream._inFlightWriteRequest._resolve(undefined);\n stream._inFlightWriteRequest = undefined;\n }\n function WritableStreamFinishInFlightWriteWithError(stream, error) {\n stream._inFlightWriteRequest._reject(error);\n stream._inFlightWriteRequest = undefined;\n WritableStreamDealWithRejection(stream, error);\n }\n function WritableStreamFinishInFlightClose(stream) {\n stream._inFlightCloseRequest._resolve(undefined);\n stream._inFlightCloseRequest = undefined;\n const state = stream._state;\n if (state === 'erroring') {\n // The error was too late to do anything, so it is ignored.\n stream._storedError = undefined;\n if (stream._pendingAbortRequest !== undefined) {\n stream._pendingAbortRequest._resolve();\n stream._pendingAbortRequest = undefined;\n }\n }\n stream._state = 'closed';\n const writer = stream._writer;\n if (writer !== undefined) {\n defaultWriterClosedPromiseResolve(writer);\n }\n }\n function WritableStreamFinishInFlightCloseWithError(stream, error) {\n stream._inFlightCloseRequest._reject(error);\n stream._inFlightCloseRequest = undefined;\n // Never execute sink abort() after sink close().\n if (stream._pendingAbortRequest !== undefined) {\n stream._pendingAbortRequest._reject(error);\n stream._pendingAbortRequest = undefined;\n }\n WritableStreamDealWithRejection(stream, error);\n }\n // TODO(ricea): Fix alphabetical order.\n function WritableStreamCloseQueuedOrInFlight(stream) {\n if (stream._closeRequest === undefined && stream._inFlightCloseRequest === undefined) {\n return false;\n }\n return true;\n }\n function WritableStreamHasOperationMarkedInFlight(stream) {\n if (stream._inFlightWriteRequest === undefined && stream._inFlightCloseRequest === undefined) {\n return false;\n }\n return true;\n }\n function WritableStreamMarkCloseRequestInFlight(stream) {\n stream._inFlightCloseRequest = stream._closeRequest;\n stream._closeRequest = undefined;\n }\n function WritableStreamMarkFirstWriteRequestInFlight(stream) {\n stream._inFlightWriteRequest = stream._writeRequests.shift();\n }\n function WritableStreamRejectCloseAndClosedPromiseIfNeeded(stream) {\n if (stream._closeRequest !== undefined) {\n stream._closeRequest._reject(stream._storedError);\n stream._closeRequest = undefined;\n }\n const writer = stream._writer;\n if (writer !== undefined) {\n defaultWriterClosedPromiseReject(writer, stream._storedError);\n }\n }\n function WritableStreamUpdateBackpressure(stream, backpressure) {\n const writer = stream._writer;\n if (writer !== undefined && backpressure !== stream._backpressure) {\n if (backpressure) {\n defaultWriterReadyPromiseReset(writer);\n }\n else {\n defaultWriterReadyPromiseResolve(writer);\n }\n }\n stream._backpressure = backpressure;\n }\n /**\n * A default writer vended by a {@link WritableStream}.\n *\n * @public\n */\n class WritableStreamDefaultWriter {\n constructor(stream) {\n assertRequiredArgument(stream, 1, 'WritableStreamDefaultWriter');\n assertWritableStream(stream, 'First parameter');\n if (IsWritableStreamLocked(stream)) {\n throw new TypeError('This stream has already been locked for exclusive writing by another writer');\n }\n this._ownerWritableStream = stream;\n stream._writer = this;\n const state = stream._state;\n if (state === 'writable') {\n if (!WritableStreamCloseQueuedOrInFlight(stream) && stream._backpressure) {\n defaultWriterReadyPromiseInitialize(this);\n }\n else {\n defaultWriterReadyPromiseInitializeAsResolved(this);\n }\n defaultWriterClosedPromiseInitialize(this);\n }\n else if (state === 'erroring') {\n defaultWriterReadyPromiseInitializeAsRejected(this, stream._storedError);\n defaultWriterClosedPromiseInitialize(this);\n }\n else if (state === 'closed') {\n defaultWriterReadyPromiseInitializeAsResolved(this);\n defaultWriterClosedPromiseInitializeAsResolved(this);\n }\n else {\n const storedError = stream._storedError;\n defaultWriterReadyPromiseInitializeAsRejected(this, storedError);\n defaultWriterClosedPromiseInitializeAsRejected(this, storedError);\n }\n }\n /**\n * Returns a promise that will be fulfilled when the stream becomes closed, or rejected if the stream ever errors or\n * the writer’s lock is released before the stream finishes closing.\n */\n get closed() {\n if (!IsWritableStreamDefaultWriter(this)) {\n return promiseRejectedWith(defaultWriterBrandCheckException('closed'));\n }\n return this._closedPromise;\n }\n /**\n * Returns the desired size to fill the stream’s internal queue. It can be negative, if the queue is over-full.\n * A producer can use this information to determine the right amount of data to write.\n *\n * It will be `null` if the stream cannot be successfully written to (due to either being errored, or having an abort\n * queued up). It will return zero if the stream is closed. And the getter will throw an exception if invoked when\n * the writer’s lock is released.\n */\n get desiredSize() {\n if (!IsWritableStreamDefaultWriter(this)) {\n throw defaultWriterBrandCheckException('desiredSize');\n }\n if (this._ownerWritableStream === undefined) {\n throw defaultWriterLockException('desiredSize');\n }\n return WritableStreamDefaultWriterGetDesiredSize(this);\n }\n /**\n * Returns a promise that will be fulfilled when the desired size to fill the stream’s internal queue transitions\n * from non-positive to positive, signaling that it is no longer applying backpressure. Once the desired size dips\n * back to zero or below, the getter will return a new promise that stays pending until the next transition.\n *\n * If the stream becomes errored or aborted, or the writer’s lock is released, the returned promise will become\n * rejected.\n */\n get ready() {\n if (!IsWritableStreamDefaultWriter(this)) {\n return promiseRejectedWith(defaultWriterBrandCheckException('ready'));\n }\n return this._readyPromise;\n }\n /**\n * If the reader is active, behaves the same as {@link WritableStream.abort | stream.abort(reason)}.\n */\n abort(reason = undefined) {\n if (!IsWritableStreamDefaultWriter(this)) {\n return promiseRejectedWith(defaultWriterBrandCheckException('abort'));\n }\n if (this._ownerWritableStream === undefined) {\n return promiseRejectedWith(defaultWriterLockException('abort'));\n }\n return WritableStreamDefaultWriterAbort(this, reason);\n }\n /**\n * If the reader is active, behaves the same as {@link WritableStream.close | stream.close()}.\n */\n close() {\n if (!IsWritableStreamDefaultWriter(this)) {\n return promiseRejectedWith(defaultWriterBrandCheckException('close'));\n }\n const stream = this._ownerWritableStream;\n if (stream === undefined) {\n return promiseRejectedWith(defaultWriterLockException('close'));\n }\n if (WritableStreamCloseQueuedOrInFlight(stream)) {\n return promiseRejectedWith(new TypeError('Cannot close an already-closing stream'));\n }\n return WritableStreamDefaultWriterClose(this);\n }\n /**\n * Releases the writer’s lock on the corresponding stream. After the lock is released, the writer is no longer active.\n * If the associated stream is errored when the lock is released, the writer will appear errored in the same way from\n * now on; otherwise, the writer will appear closed.\n *\n * Note that the lock can still be released even if some ongoing writes have not yet finished (i.e. even if the\n * promises returned from previous calls to {@link WritableStreamDefaultWriter.write | write()} have not yet settled).\n * It’s not necessary to hold the lock on the writer for the duration of the write; the lock instead simply prevents\n * other producers from writing in an interleaved manner.\n */\n releaseLock() {\n if (!IsWritableStreamDefaultWriter(this)) {\n throw defaultWriterBrandCheckException('releaseLock');\n }\n const stream = this._ownerWritableStream;\n if (stream === undefined) {\n return;\n }\n WritableStreamDefaultWriterRelease(this);\n }\n write(chunk = undefined) {\n if (!IsWritableStreamDefaultWriter(this)) {\n return promiseRejectedWith(defaultWriterBrandCheckException('write'));\n }\n if (this._ownerWritableStream === undefined) {\n return promiseRejectedWith(defaultWriterLockException('write to'));\n }\n return WritableStreamDefaultWriterWrite(this, chunk);\n }\n }\n Object.defineProperties(WritableStreamDefaultWriter.prototype, {\n abort: { enumerable: true },\n close: { enumerable: true },\n releaseLock: { enumerable: true },\n write: { enumerable: true },\n closed: { enumerable: true },\n desiredSize: { enumerable: true },\n ready: { enumerable: true }\n });\n if (typeof SymbolPolyfill.toStringTag === 'symbol') {\n Object.defineProperty(WritableStreamDefaultWriter.prototype, SymbolPolyfill.toStringTag, {\n value: 'WritableStreamDefaultWriter',\n configurable: true\n });\n }\n // Abstract operations for the WritableStreamDefaultWriter.\n function IsWritableStreamDefaultWriter(x) {\n if (!typeIsObject(x)) {\n return false;\n }\n if (!Object.prototype.hasOwnProperty.call(x, '_ownerWritableStream')) {\n return false;\n }\n return x instanceof WritableStreamDefaultWriter;\n }\n // A client of WritableStreamDefaultWriter may use these functions directly to bypass state check.\n function WritableStreamDefaultWriterAbort(writer, reason) {\n const stream = writer._ownerWritableStream;\n return WritableStreamAbort(stream, reason);\n }\n function WritableStreamDefaultWriterClose(writer) {\n const stream = writer._ownerWritableStream;\n return WritableStreamClose(stream);\n }\n function WritableStreamDefaultWriterCloseWithErrorPropagation(writer) {\n const stream = writer._ownerWritableStream;\n const state = stream._state;\n if (WritableStreamCloseQueuedOrInFlight(stream) || state === 'closed') {\n return promiseResolvedWith(undefined);\n }\n if (state === 'errored') {\n return promiseRejectedWith(stream._storedError);\n }\n return WritableStreamDefaultWriterClose(writer);\n }\n function WritableStreamDefaultWriterEnsureClosedPromiseRejected(writer, error) {\n if (writer._closedPromiseState === 'pending') {\n defaultWriterClosedPromiseReject(writer, error);\n }\n else {\n defaultWriterClosedPromiseResetToRejected(writer, error);\n }\n }\n function WritableStreamDefaultWriterEnsureReadyPromiseRejected(writer, error) {\n if (writer._readyPromiseState === 'pending') {\n defaultWriterReadyPromiseReject(writer, error);\n }\n else {\n defaultWriterReadyPromiseResetToRejected(writer, error);\n }\n }\n function WritableStreamDefaultWriterGetDesiredSize(writer) {\n const stream = writer._ownerWritableStream;\n const state = stream._state;\n if (state === 'errored' || state === 'erroring') {\n return null;\n }\n if (state === 'closed') {\n return 0;\n }\n return WritableStreamDefaultControllerGetDesiredSize(stream._writableStreamController);\n }\n function WritableStreamDefaultWriterRelease(writer) {\n const stream = writer._ownerWritableStream;\n const releasedError = new TypeError(`Writer was released and can no longer be used to monitor the stream's closedness`);\n WritableStreamDefaultWriterEnsureReadyPromiseRejected(writer, releasedError);\n // The state transitions to \"errored\" before the sink abort() method runs, but the writer.closed promise is not\n // rejected until afterwards. This means that simply testing state will not work.\n WritableStreamDefaultWriterEnsureClosedPromiseRejected(writer, releasedError);\n stream._writer = undefined;\n writer._ownerWritableStream = undefined;\n }\n function WritableStreamDefaultWriterWrite(writer, chunk) {\n const stream = writer._ownerWritableStream;\n const controller = stream._writableStreamController;\n const chunkSize = WritableStreamDefaultControllerGetChunkSize(controller, chunk);\n if (stream !== writer._ownerWritableStream) {\n return promiseRejectedWith(defaultWriterLockException('write to'));\n }\n const state = stream._state;\n if (state === 'errored') {\n return promiseRejectedWith(stream._storedError);\n }\n if (WritableStreamCloseQueuedOrInFlight(stream) || state === 'closed') {\n return promiseRejectedWith(new TypeError('The stream is closing or closed and cannot be written to'));\n }\n if (state === 'erroring') {\n return promiseRejectedWith(stream._storedError);\n }\n const promise = WritableStreamAddWriteRequest(stream);\n WritableStreamDefaultControllerWrite(controller, chunk, chunkSize);\n return promise;\n }\n const closeSentinel = {};\n /**\n * Allows control of a {@link WritableStream | writable stream}'s state and internal queue.\n *\n * @public\n */\n class WritableStreamDefaultController {\n constructor() {\n throw new TypeError('Illegal constructor');\n }\n /**\n * The reason which was passed to `WritableStream.abort(reason)` when the stream was aborted.\n *\n * @deprecated\n * This property has been removed from the specification, see https://github.com/whatwg/streams/pull/1177.\n * Use {@link WritableStreamDefaultController.signal}'s `reason` instead.\n */\n get abortReason() {\n if (!IsWritableStreamDefaultController(this)) {\n throw defaultControllerBrandCheckException$2('abortReason');\n }\n return this._abortReason;\n }\n /**\n * An `AbortSignal` that can be used to abort the pending write or close operation when the stream is aborted.\n */\n get signal() {\n if (!IsWritableStreamDefaultController(this)) {\n throw defaultControllerBrandCheckException$2('signal');\n }\n if (this._abortController === undefined) {\n // Older browsers or older Node versions may not support `AbortController` or `AbortSignal`.\n // We don't want to bundle and ship an `AbortController` polyfill together with our polyfill,\n // so instead we only implement support for `signal` if we find a global `AbortController` constructor.\n throw new TypeError('WritableStreamDefaultController.prototype.signal is not supported');\n }\n return this._abortController.signal;\n }\n /**\n * Closes the controlled writable stream, making all future interactions with it fail with the given error `e`.\n *\n * This method is rarely used, since usually it suffices to return a rejected promise from one of the underlying\n * sink's methods. However, it can be useful for suddenly shutting down a stream in response to an event outside the\n * normal lifecycle of interactions with the underlying sink.\n */\n error(e = undefined) {\n if (!IsWritableStreamDefaultController(this)) {\n throw defaultControllerBrandCheckException$2('error');\n }\n const state = this._controlledWritableStream._state;\n if (state !== 'writable') {\n // The stream is closed, errored or will be soon. The sink can't do anything useful if it gets an error here, so\n // just treat it as a no-op.\n return;\n }\n WritableStreamDefaultControllerError(this, e);\n }\n /** @internal */\n [AbortSteps](reason) {\n const result = this._abortAlgorithm(reason);\n WritableStreamDefaultControllerClearAlgorithms(this);\n return result;\n }\n /** @internal */\n [ErrorSteps]() {\n ResetQueue(this);\n }\n }\n Object.defineProperties(WritableStreamDefaultController.prototype, {\n abortReason: { enumerable: true },\n signal: { enumerable: true },\n error: { enumerable: true }\n });\n if (typeof SymbolPolyfill.toStringTag === 'symbol') {\n Object.defineProperty(WritableStreamDefaultController.prototype, SymbolPolyfill.toStringTag, {\n value: 'WritableStreamDefaultController',\n configurable: true\n });\n }\n // Abstract operations implementing interface required by the WritableStream.\n function IsWritableStreamDefaultController(x) {\n if (!typeIsObject(x)) {\n return false;\n }\n if (!Object.prototype.hasOwnProperty.call(x, '_controlledWritableStream')) {\n return false;\n }\n return x instanceof WritableStreamDefaultController;\n }\n function SetUpWritableStreamDefaultController(stream, controller, startAlgorithm, writeAlgorithm, closeAlgorithm, abortAlgorithm, highWaterMark, sizeAlgorithm) {\n controller._controlledWritableStream = stream;\n stream._writableStreamController = controller;\n // Need to set the slots so that the assert doesn't fire. In the spec the slots already exist implicitly.\n controller._queue = undefined;\n controller._queueTotalSize = undefined;\n ResetQueue(controller);\n controller._abortReason = undefined;\n controller._abortController = createAbortController();\n controller._started = false;\n controller._strategySizeAlgorithm = sizeAlgorithm;\n controller._strategyHWM = highWaterMark;\n controller._writeAlgorithm = writeAlgorithm;\n controller._closeAlgorithm = closeAlgorithm;\n controller._abortAlgorithm = abortAlgorithm;\n const backpressure = WritableStreamDefaultControllerGetBackpressure(controller);\n WritableStreamUpdateBackpressure(stream, backpressure);\n const startResult = startAlgorithm();\n const startPromise = promiseResolvedWith(startResult);\n uponPromise(startPromise, () => {\n controller._started = true;\n WritableStreamDefaultControllerAdvanceQueueIfNeeded(controller);\n }, r => {\n controller._started = true;\n WritableStreamDealWithRejection(stream, r);\n });\n }\n function SetUpWritableStreamDefaultControllerFromUnderlyingSink(stream, underlyingSink, highWaterMark, sizeAlgorithm) {\n const controller = Object.create(WritableStreamDefaultController.prototype);\n let startAlgorithm = () => undefined;\n let writeAlgorithm = () => promiseResolvedWith(undefined);\n let closeAlgorithm = () => promiseResolvedWith(undefined);\n let abortAlgorithm = () => promiseResolvedWith(undefined);\n if (underlyingSink.start !== undefined) {\n startAlgorithm = () => underlyingSink.start(controller);\n }\n if (underlyingSink.write !== undefined) {\n writeAlgorithm = chunk => underlyingSink.write(chunk, controller);\n }\n if (underlyingSink.close !== undefined) {\n closeAlgorithm = () => underlyingSink.close();\n }\n if (underlyingSink.abort !== undefined) {\n abortAlgorithm = reason => underlyingSink.abort(reason);\n }\n SetUpWritableStreamDefaultController(stream, controller, startAlgorithm, writeAlgorithm, closeAlgorithm, abortAlgorithm, highWaterMark, sizeAlgorithm);\n }\n // ClearAlgorithms may be called twice. Erroring the same stream in multiple ways will often result in redundant calls.\n function WritableStreamDefaultControllerClearAlgorithms(controller) {\n controller._writeAlgorithm = undefined;\n controller._closeAlgorithm = undefined;\n controller._abortAlgorithm = undefined;\n controller._strategySizeAlgorithm = undefined;\n }\n function WritableStreamDefaultControllerClose(controller) {\n EnqueueValueWithSize(controller, closeSentinel, 0);\n WritableStreamDefaultControllerAdvanceQueueIfNeeded(controller);\n }\n function WritableStreamDefaultControllerGetChunkSize(controller, chunk) {\n try {\n return controller._strategySizeAlgorithm(chunk);\n }\n catch (chunkSizeE) {\n WritableStreamDefaultControllerErrorIfNeeded(controller, chunkSizeE);\n return 1;\n }\n }\n function WritableStreamDefaultControllerGetDesiredSize(controller) {\n return controller._strategyHWM - controller._queueTotalSize;\n }\n function WritableStreamDefaultControllerWrite(controller, chunk, chunkSize) {\n try {\n EnqueueValueWithSize(controller, chunk, chunkSize);\n }\n catch (enqueueE) {\n WritableStreamDefaultControllerErrorIfNeeded(controller, enqueueE);\n return;\n }\n const stream = controller._controlledWritableStream;\n if (!WritableStreamCloseQueuedOrInFlight(stream) && stream._state === 'writable') {\n const backpressure = WritableStreamDefaultControllerGetBackpressure(controller);\n WritableStreamUpdateBackpressure(stream, backpressure);\n }\n WritableStreamDefaultControllerAdvanceQueueIfNeeded(controller);\n }\n // Abstract operations for the WritableStreamDefaultController.\n function WritableStreamDefaultControllerAdvanceQueueIfNeeded(controller) {\n const stream = controller._controlledWritableStream;\n if (!controller._started) {\n return;\n }\n if (stream._inFlightWriteRequest !== undefined) {\n return;\n }\n const state = stream._state;\n if (state === 'erroring') {\n WritableStreamFinishErroring(stream);\n return;\n }\n if (controller._queue.length === 0) {\n return;\n }\n const value = PeekQueueValue(controller);\n if (value === closeSentinel) {\n WritableStreamDefaultControllerProcessClose(controller);\n }\n else {\n WritableStreamDefaultControllerProcessWrite(controller, value);\n }\n }\n function WritableStreamDefaultControllerErrorIfNeeded(controller, error) {\n if (controller._controlledWritableStream._state === 'writable') {\n WritableStreamDefaultControllerError(controller, error);\n }\n }\n function WritableStreamDefaultControllerProcessClose(controller) {\n const stream = controller._controlledWritableStream;\n WritableStreamMarkCloseRequestInFlight(stream);\n DequeueValue(controller);\n const sinkClosePromise = controller._closeAlgorithm();\n WritableStreamDefaultControllerClearAlgorithms(controller);\n uponPromise(sinkClosePromise, () => {\n WritableStreamFinishInFlightClose(stream);\n }, reason => {\n WritableStreamFinishInFlightCloseWithError(stream, reason);\n });\n }\n function WritableStreamDefaultControllerProcessWrite(controller, chunk) {\n const stream = controller._controlledWritableStream;\n WritableStreamMarkFirstWriteRequestInFlight(stream);\n const sinkWritePromise = controller._writeAlgorithm(chunk);\n uponPromise(sinkWritePromise, () => {\n WritableStreamFinishInFlightWrite(stream);\n const state = stream._state;\n DequeueValue(controller);\n if (!WritableStreamCloseQueuedOrInFlight(stream) && state === 'writable') {\n const backpressure = WritableStreamDefaultControllerGetBackpressure(controller);\n WritableStreamUpdateBackpressure(stream, backpressure);\n }\n WritableStreamDefaultControllerAdvanceQueueIfNeeded(controller);\n }, reason => {\n if (stream._state === 'writable') {\n WritableStreamDefaultControllerClearAlgorithms(controller);\n }\n WritableStreamFinishInFlightWriteWithError(stream, reason);\n });\n }\n function WritableStreamDefaultControllerGetBackpressure(controller) {\n const desiredSize = WritableStreamDefaultControllerGetDesiredSize(controller);\n return desiredSize <= 0;\n }\n // A client of WritableStreamDefaultController may use these functions directly to bypass state check.\n function WritableStreamDefaultControllerError(controller, error) {\n const stream = controller._controlledWritableStream;\n WritableStreamDefaultControllerClearAlgorithms(controller);\n WritableStreamStartErroring(stream, error);\n }\n // Helper functions for the WritableStream.\n function streamBrandCheckException$2(name) {\n return new TypeError(`WritableStream.prototype.${name} can only be used on a WritableStream`);\n }\n // Helper functions for the WritableStreamDefaultController.\n function defaultControllerBrandCheckException$2(name) {\n return new TypeError(`WritableStreamDefaultController.prototype.${name} can only be used on a WritableStreamDefaultController`);\n }\n // Helper functions for the WritableStreamDefaultWriter.\n function defaultWriterBrandCheckException(name) {\n return new TypeError(`WritableStreamDefaultWriter.prototype.${name} can only be used on a WritableStreamDefaultWriter`);\n }\n function defaultWriterLockException(name) {\n return new TypeError('Cannot ' + name + ' a stream using a released writer');\n }\n function defaultWriterClosedPromiseInitialize(writer) {\n writer._closedPromise = newPromise((resolve, reject) => {\n writer._closedPromise_resolve = resolve;\n writer._closedPromise_reject = reject;\n writer._closedPromiseState = 'pending';\n });\n }\n function defaultWriterClosedPromiseInitializeAsRejected(writer, reason) {\n defaultWriterClosedPromiseInitialize(writer);\n defaultWriterClosedPromiseReject(writer, reason);\n }\n function defaultWriterClosedPromiseInitializeAsResolved(writer) {\n defaultWriterClosedPromiseInitialize(writer);\n defaultWriterClosedPromiseResolve(writer);\n }\n function defaultWriterClosedPromiseReject(writer, reason) {\n if (writer._closedPromise_reject === undefined) {\n return;\n }\n setPromiseIsHandledToTrue(writer._closedPromise);\n writer._closedPromise_reject(reason);\n writer._closedPromise_resolve = undefined;\n writer._closedPromise_reject = undefined;\n writer._closedPromiseState = 'rejected';\n }\n function defaultWriterClosedPromiseResetToRejected(writer, reason) {\n defaultWriterClosedPromiseInitializeAsRejected(writer, reason);\n }\n function defaultWriterClosedPromiseResolve(writer) {\n if (writer._closedPromise_resolve === undefined) {\n return;\n }\n writer._closedPromise_resolve(undefined);\n writer._closedPromise_resolve = undefined;\n writer._closedPromise_reject = undefined;\n writer._closedPromiseState = 'resolved';\n }\n function defaultWriterReadyPromiseInitialize(writer) {\n writer._readyPromise = newPromise((resolve, reject) => {\n writer._readyPromise_resolve = resolve;\n writer._readyPromise_reject = reject;\n });\n writer._readyPromiseState = 'pending';\n }\n function defaultWriterReadyPromiseInitializeAsRejected(writer, reason) {\n defaultWriterReadyPromiseInitialize(writer);\n defaultWriterReadyPromiseReject(writer, reason);\n }\n function defaultWriterReadyPromiseInitializeAsResolved(writer) {\n defaultWriterReadyPromiseInitialize(writer);\n defaultWriterReadyPromiseResolve(writer);\n }\n function defaultWriterReadyPromiseReject(writer, reason) {\n if (writer._readyPromise_reject === undefined) {\n return;\n }\n setPromiseIsHandledToTrue(writer._readyPromise);\n writer._readyPromise_reject(reason);\n writer._readyPromise_resolve = undefined;\n writer._readyPromise_reject = undefined;\n writer._readyPromiseState = 'rejected';\n }\n function defaultWriterReadyPromiseReset(writer) {\n defaultWriterReadyPromiseInitialize(writer);\n }\n function defaultWriterReadyPromiseResetToRejected(writer, reason) {\n defaultWriterReadyPromiseInitializeAsRejected(writer, reason);\n }\n function defaultWriterReadyPromiseResolve(writer) {\n if (writer._readyPromise_resolve === undefined) {\n return;\n }\n writer._readyPromise_resolve(undefined);\n writer._readyPromise_resolve = undefined;\n writer._readyPromise_reject = undefined;\n writer._readyPromiseState = 'fulfilled';\n }\n\n /// \n const NativeDOMException = typeof DOMException !== 'undefined' ? DOMException : undefined;\n\n /// \n function isDOMExceptionConstructor(ctor) {\n if (!(typeof ctor === 'function' || typeof ctor === 'object')) {\n return false;\n }\n try {\n new ctor();\n return true;\n }\n catch (_a) {\n return false;\n }\n }\n function createDOMExceptionPolyfill() {\n // eslint-disable-next-line no-shadow\n const ctor = function DOMException(message, name) {\n this.message = message || '';\n this.name = name || 'Error';\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n };\n ctor.prototype = Object.create(Error.prototype);\n Object.defineProperty(ctor.prototype, 'constructor', { value: ctor, writable: true, configurable: true });\n return ctor;\n }\n // eslint-disable-next-line no-redeclare\n const DOMException$1 = isDOMExceptionConstructor(NativeDOMException) ? NativeDOMException : createDOMExceptionPolyfill();\n\n function ReadableStreamPipeTo(source, dest, preventClose, preventAbort, preventCancel, signal) {\n const reader = AcquireReadableStreamDefaultReader(source);\n const writer = AcquireWritableStreamDefaultWriter(dest);\n source._disturbed = true;\n let shuttingDown = false;\n // This is used to keep track of the spec's requirement that we wait for ongoing writes during shutdown.\n let currentWrite = promiseResolvedWith(undefined);\n return newPromise((resolve, reject) => {\n let abortAlgorithm;\n if (signal !== undefined) {\n abortAlgorithm = () => {\n const error = new DOMException$1('Aborted', 'AbortError');\n const actions = [];\n if (!preventAbort) {\n actions.push(() => {\n if (dest._state === 'writable') {\n return WritableStreamAbort(dest, error);\n }\n return promiseResolvedWith(undefined);\n });\n }\n if (!preventCancel) {\n actions.push(() => {\n if (source._state === 'readable') {\n return ReadableStreamCancel(source, error);\n }\n return promiseResolvedWith(undefined);\n });\n }\n shutdownWithAction(() => Promise.all(actions.map(action => action())), true, error);\n };\n if (signal.aborted) {\n abortAlgorithm();\n return;\n }\n signal.addEventListener('abort', abortAlgorithm);\n }\n // Using reader and writer, read all chunks from this and write them to dest\n // - Backpressure must be enforced\n // - Shutdown must stop all activity\n function pipeLoop() {\n return newPromise((resolveLoop, rejectLoop) => {\n function next(done) {\n if (done) {\n resolveLoop();\n }\n else {\n // Use `PerformPromiseThen` instead of `uponPromise` to avoid\n // adding unnecessary `.catch(rethrowAssertionErrorRejection)` handlers\n PerformPromiseThen(pipeStep(), next, rejectLoop);\n }\n }\n next(false);\n });\n }\n function pipeStep() {\n if (shuttingDown) {\n return promiseResolvedWith(true);\n }\n return PerformPromiseThen(writer._readyPromise, () => {\n return newPromise((resolveRead, rejectRead) => {\n ReadableStreamDefaultReaderRead(reader, {\n _chunkSteps: chunk => {\n currentWrite = PerformPromiseThen(WritableStreamDefaultWriterWrite(writer, chunk), undefined, noop);\n resolveRead(false);\n },\n _closeSteps: () => resolveRead(true),\n _errorSteps: rejectRead\n });\n });\n });\n }\n // Errors must be propagated forward\n isOrBecomesErrored(source, reader._closedPromise, storedError => {\n if (!preventAbort) {\n shutdownWithAction(() => WritableStreamAbort(dest, storedError), true, storedError);\n }\n else {\n shutdown(true, storedError);\n }\n });\n // Errors must be propagated backward\n isOrBecomesErrored(dest, writer._closedPromise, storedError => {\n if (!preventCancel) {\n shutdownWithAction(() => ReadableStreamCancel(source, storedError), true, storedError);\n }\n else {\n shutdown(true, storedError);\n }\n });\n // Closing must be propagated forward\n isOrBecomesClosed(source, reader._closedPromise, () => {\n if (!preventClose) {\n shutdownWithAction(() => WritableStreamDefaultWriterCloseWithErrorPropagation(writer));\n }\n else {\n shutdown();\n }\n });\n // Closing must be propagated backward\n if (WritableStreamCloseQueuedOrInFlight(dest) || dest._state === 'closed') {\n const destClosed = new TypeError('the destination writable stream closed before all data could be piped to it');\n if (!preventCancel) {\n shutdownWithAction(() => ReadableStreamCancel(source, destClosed), true, destClosed);\n }\n else {\n shutdown(true, destClosed);\n }\n }\n setPromiseIsHandledToTrue(pipeLoop());\n function waitForWritesToFinish() {\n // Another write may have started while we were waiting on this currentWrite, so we have to be sure to wait\n // for that too.\n const oldCurrentWrite = currentWrite;\n return PerformPromiseThen(currentWrite, () => oldCurrentWrite !== currentWrite ? waitForWritesToFinish() : undefined);\n }\n function isOrBecomesErrored(stream, promise, action) {\n if (stream._state === 'errored') {\n action(stream._storedError);\n }\n else {\n uponRejection(promise, action);\n }\n }\n function isOrBecomesClosed(stream, promise, action) {\n if (stream._state === 'closed') {\n action();\n }\n else {\n uponFulfillment(promise, action);\n }\n }\n function shutdownWithAction(action, originalIsError, originalError) {\n if (shuttingDown) {\n return;\n }\n shuttingDown = true;\n if (dest._state === 'writable' && !WritableStreamCloseQueuedOrInFlight(dest)) {\n uponFulfillment(waitForWritesToFinish(), doTheRest);\n }\n else {\n doTheRest();\n }\n function doTheRest() {\n uponPromise(action(), () => finalize(originalIsError, originalError), newError => finalize(true, newError));\n }\n }\n function shutdown(isError, error) {\n if (shuttingDown) {\n return;\n }\n shuttingDown = true;\n if (dest._state === 'writable' && !WritableStreamCloseQueuedOrInFlight(dest)) {\n uponFulfillment(waitForWritesToFinish(), () => finalize(isError, error));\n }\n else {\n finalize(isError, error);\n }\n }\n function finalize(isError, error) {\n WritableStreamDefaultWriterRelease(writer);\n ReadableStreamReaderGenericRelease(reader);\n if (signal !== undefined) {\n signal.removeEventListener('abort', abortAlgorithm);\n }\n if (isError) {\n reject(error);\n }\n else {\n resolve(undefined);\n }\n }\n });\n }\n\n /**\n * Allows control of a {@link ReadableStream | readable stream}'s state and internal queue.\n *\n * @public\n */\n class ReadableStreamDefaultController {\n constructor() {\n throw new TypeError('Illegal constructor');\n }\n /**\n * Returns the desired size to fill the controlled stream's internal queue. It can be negative, if the queue is\n * over-full. An underlying source ought to use this information to determine when and how to apply backpressure.\n */\n get desiredSize() {\n if (!IsReadableStreamDefaultController(this)) {\n throw defaultControllerBrandCheckException$1('desiredSize');\n }\n return ReadableStreamDefaultControllerGetDesiredSize(this);\n }\n /**\n * Closes the controlled readable stream. Consumers will still be able to read any previously-enqueued chunks from\n * the stream, but once those are read, the stream will become closed.\n */\n close() {\n if (!IsReadableStreamDefaultController(this)) {\n throw defaultControllerBrandCheckException$1('close');\n }\n if (!ReadableStreamDefaultControllerCanCloseOrEnqueue(this)) {\n throw new TypeError('The stream is not in a state that permits close');\n }\n ReadableStreamDefaultControllerClose(this);\n }\n enqueue(chunk = undefined) {\n if (!IsReadableStreamDefaultController(this)) {\n throw defaultControllerBrandCheckException$1('enqueue');\n }\n if (!ReadableStreamDefaultControllerCanCloseOrEnqueue(this)) {\n throw new TypeError('The stream is not in a state that permits enqueue');\n }\n return ReadableStreamDefaultControllerEnqueue(this, chunk);\n }\n /**\n * Errors the controlled readable stream, making all future interactions with it fail with the given error `e`.\n */\n error(e = undefined) {\n if (!IsReadableStreamDefaultController(this)) {\n throw defaultControllerBrandCheckException$1('error');\n }\n ReadableStreamDefaultControllerError(this, e);\n }\n /** @internal */\n [CancelSteps](reason) {\n ResetQueue(this);\n const result = this._cancelAlgorithm(reason);\n ReadableStreamDefaultControllerClearAlgorithms(this);\n return result;\n }\n /** @internal */\n [PullSteps](readRequest) {\n const stream = this._controlledReadableStream;\n if (this._queue.length > 0) {\n const chunk = DequeueValue(this);\n if (this._closeRequested && this._queue.length === 0) {\n ReadableStreamDefaultControllerClearAlgorithms(this);\n ReadableStreamClose(stream);\n }\n else {\n ReadableStreamDefaultControllerCallPullIfNeeded(this);\n }\n readRequest._chunkSteps(chunk);\n }\n else {\n ReadableStreamAddReadRequest(stream, readRequest);\n ReadableStreamDefaultControllerCallPullIfNeeded(this);\n }\n }\n }\n Object.defineProperties(ReadableStreamDefaultController.prototype, {\n close: { enumerable: true },\n enqueue: { enumerable: true },\n error: { enumerable: true },\n desiredSize: { enumerable: true }\n });\n if (typeof SymbolPolyfill.toStringTag === 'symbol') {\n Object.defineProperty(ReadableStreamDefaultController.prototype, SymbolPolyfill.toStringTag, {\n value: 'ReadableStreamDefaultController',\n configurable: true\n });\n }\n // Abstract operations for the ReadableStreamDefaultController.\n function IsReadableStreamDefaultController(x) {\n if (!typeIsObject(x)) {\n return false;\n }\n if (!Object.prototype.hasOwnProperty.call(x, '_controlledReadableStream')) {\n return false;\n }\n return x instanceof ReadableStreamDefaultController;\n }\n function ReadableStreamDefaultControllerCallPullIfNeeded(controller) {\n const shouldPull = ReadableStreamDefaultControllerShouldCallPull(controller);\n if (!shouldPull) {\n return;\n }\n if (controller._pulling) {\n controller._pullAgain = true;\n return;\n }\n controller._pulling = true;\n const pullPromise = controller._pullAlgorithm();\n uponPromise(pullPromise, () => {\n controller._pulling = false;\n if (controller._pullAgain) {\n controller._pullAgain = false;\n ReadableStreamDefaultControllerCallPullIfNeeded(controller);\n }\n }, e => {\n ReadableStreamDefaultControllerError(controller, e);\n });\n }\n function ReadableStreamDefaultControllerShouldCallPull(controller) {\n const stream = controller._controlledReadableStream;\n if (!ReadableStreamDefaultControllerCanCloseOrEnqueue(controller)) {\n return false;\n }\n if (!controller._started) {\n return false;\n }\n if (IsReadableStreamLocked(stream) && ReadableStreamGetNumReadRequests(stream) > 0) {\n return true;\n }\n const desiredSize = ReadableStreamDefaultControllerGetDesiredSize(controller);\n if (desiredSize > 0) {\n return true;\n }\n return false;\n }\n function ReadableStreamDefaultControllerClearAlgorithms(controller) {\n controller._pullAlgorithm = undefined;\n controller._cancelAlgorithm = undefined;\n controller._strategySizeAlgorithm = undefined;\n }\n // A client of ReadableStreamDefaultController may use these functions directly to bypass state check.\n function ReadableStreamDefaultControllerClose(controller) {\n if (!ReadableStreamDefaultControllerCanCloseOrEnqueue(controller)) {\n return;\n }\n const stream = controller._controlledReadableStream;\n controller._closeRequested = true;\n if (controller._queue.length === 0) {\n ReadableStreamDefaultControllerClearAlgorithms(controller);\n ReadableStreamClose(stream);\n }\n }\n function ReadableStreamDefaultControllerEnqueue(controller, chunk) {\n if (!ReadableStreamDefaultControllerCanCloseOrEnqueue(controller)) {\n return;\n }\n const stream = controller._controlledReadableStream;\n if (IsReadableStreamLocked(stream) && ReadableStreamGetNumReadRequests(stream) > 0) {\n ReadableStreamFulfillReadRequest(stream, chunk, false);\n }\n else {\n let chunkSize;\n try {\n chunkSize = controller._strategySizeAlgorithm(chunk);\n }\n catch (chunkSizeE) {\n ReadableStreamDefaultControllerError(controller, chunkSizeE);\n throw chunkSizeE;\n }\n try {\n EnqueueValueWithSize(controller, chunk, chunkSize);\n }\n catch (enqueueE) {\n ReadableStreamDefaultControllerError(controller, enqueueE);\n throw enqueueE;\n }\n }\n ReadableStreamDefaultControllerCallPullIfNeeded(controller);\n }\n function ReadableStreamDefaultControllerError(controller, e) {\n const stream = controller._controlledReadableStream;\n if (stream._state !== 'readable') {\n return;\n }\n ResetQueue(controller);\n ReadableStreamDefaultControllerClearAlgorithms(controller);\n ReadableStreamError(stream, e);\n }\n function ReadableStreamDefaultControllerGetDesiredSize(controller) {\n const state = controller._controlledReadableStream._state;\n if (state === 'errored') {\n return null;\n }\n if (state === 'closed') {\n return 0;\n }\n return controller._strategyHWM - controller._queueTotalSize;\n }\n // This is used in the implementation of TransformStream.\n function ReadableStreamDefaultControllerHasBackpressure(controller) {\n if (ReadableStreamDefaultControllerShouldCallPull(controller)) {\n return false;\n }\n return true;\n }\n function ReadableStreamDefaultControllerCanCloseOrEnqueue(controller) {\n const state = controller._controlledReadableStream._state;\n if (!controller._closeRequested && state === 'readable') {\n return true;\n }\n return false;\n }\n function SetUpReadableStreamDefaultController(stream, controller, startAlgorithm, pullAlgorithm, cancelAlgorithm, highWaterMark, sizeAlgorithm) {\n controller._controlledReadableStream = stream;\n controller._queue = undefined;\n controller._queueTotalSize = undefined;\n ResetQueue(controller);\n controller._started = false;\n controller._closeRequested = false;\n controller._pullAgain = false;\n controller._pulling = false;\n controller._strategySizeAlgorithm = sizeAlgorithm;\n controller._strategyHWM = highWaterMark;\n controller._pullAlgorithm = pullAlgorithm;\n controller._cancelAlgorithm = cancelAlgorithm;\n stream._readableStreamController = controller;\n const startResult = startAlgorithm();\n uponPromise(promiseResolvedWith(startResult), () => {\n controller._started = true;\n ReadableStreamDefaultControllerCallPullIfNeeded(controller);\n }, r => {\n ReadableStreamDefaultControllerError(controller, r);\n });\n }\n function SetUpReadableStreamDefaultControllerFromUnderlyingSource(stream, underlyingSource, highWaterMark, sizeAlgorithm) {\n const controller = Object.create(ReadableStreamDefaultController.prototype);\n let startAlgorithm = () => undefined;\n let pullAlgorithm = () => promiseResolvedWith(undefined);\n let cancelAlgorithm = () => promiseResolvedWith(undefined);\n if (underlyingSource.start !== undefined) {\n startAlgorithm = () => underlyingSource.start(controller);\n }\n if (underlyingSource.pull !== undefined) {\n pullAlgorithm = () => underlyingSource.pull(controller);\n }\n if (underlyingSource.cancel !== undefined) {\n cancelAlgorithm = reason => underlyingSource.cancel(reason);\n }\n SetUpReadableStreamDefaultController(stream, controller, startAlgorithm, pullAlgorithm, cancelAlgorithm, highWaterMark, sizeAlgorithm);\n }\n // Helper functions for the ReadableStreamDefaultController.\n function defaultControllerBrandCheckException$1(name) {\n return new TypeError(`ReadableStreamDefaultController.prototype.${name} can only be used on a ReadableStreamDefaultController`);\n }\n\n function ReadableStreamTee(stream, cloneForBranch2) {\n if (IsReadableByteStreamController(stream._readableStreamController)) {\n return ReadableByteStreamTee(stream);\n }\n return ReadableStreamDefaultTee(stream);\n }\n function ReadableStreamDefaultTee(stream, cloneForBranch2) {\n const reader = AcquireReadableStreamDefaultReader(stream);\n let reading = false;\n let readAgain = false;\n let canceled1 = false;\n let canceled2 = false;\n let reason1;\n let reason2;\n let branch1;\n let branch2;\n let resolveCancelPromise;\n const cancelPromise = newPromise(resolve => {\n resolveCancelPromise = resolve;\n });\n function pullAlgorithm() {\n if (reading) {\n readAgain = true;\n return promiseResolvedWith(undefined);\n }\n reading = true;\n const readRequest = {\n _chunkSteps: chunk => {\n // This needs to be delayed a microtask because it takes at least a microtask to detect errors (using\n // reader._closedPromise below), and we want errors in stream to error both branches immediately. We cannot let\n // successful synchronously-available reads get ahead of asynchronously-available errors.\n queueMicrotask(() => {\n readAgain = false;\n const chunk1 = chunk;\n const chunk2 = chunk;\n // There is no way to access the cloning code right now in the reference implementation.\n // If we add one then we'll need an implementation for serializable objects.\n // if (!canceled2 && cloneForBranch2) {\n // chunk2 = StructuredDeserialize(StructuredSerialize(chunk2));\n // }\n if (!canceled1) {\n ReadableStreamDefaultControllerEnqueue(branch1._readableStreamController, chunk1);\n }\n if (!canceled2) {\n ReadableStreamDefaultControllerEnqueue(branch2._readableStreamController, chunk2);\n }\n reading = false;\n if (readAgain) {\n pullAlgorithm();\n }\n });\n },\n _closeSteps: () => {\n reading = false;\n if (!canceled1) {\n ReadableStreamDefaultControllerClose(branch1._readableStreamController);\n }\n if (!canceled2) {\n ReadableStreamDefaultControllerClose(branch2._readableStreamController);\n }\n if (!canceled1 || !canceled2) {\n resolveCancelPromise(undefined);\n }\n },\n _errorSteps: () => {\n reading = false;\n }\n };\n ReadableStreamDefaultReaderRead(reader, readRequest);\n return promiseResolvedWith(undefined);\n }\n function cancel1Algorithm(reason) {\n canceled1 = true;\n reason1 = reason;\n if (canceled2) {\n const compositeReason = CreateArrayFromList([reason1, reason2]);\n const cancelResult = ReadableStreamCancel(stream, compositeReason);\n resolveCancelPromise(cancelResult);\n }\n return cancelPromise;\n }\n function cancel2Algorithm(reason) {\n canceled2 = true;\n reason2 = reason;\n if (canceled1) {\n const compositeReason = CreateArrayFromList([reason1, reason2]);\n const cancelResult = ReadableStreamCancel(stream, compositeReason);\n resolveCancelPromise(cancelResult);\n }\n return cancelPromise;\n }\n function startAlgorithm() {\n // do nothing\n }\n branch1 = CreateReadableStream(startAlgorithm, pullAlgorithm, cancel1Algorithm);\n branch2 = CreateReadableStream(startAlgorithm, pullAlgorithm, cancel2Algorithm);\n uponRejection(reader._closedPromise, (r) => {\n ReadableStreamDefaultControllerError(branch1._readableStreamController, r);\n ReadableStreamDefaultControllerError(branch2._readableStreamController, r);\n if (!canceled1 || !canceled2) {\n resolveCancelPromise(undefined);\n }\n });\n return [branch1, branch2];\n }\n function ReadableByteStreamTee(stream) {\n let reader = AcquireReadableStreamDefaultReader(stream);\n let reading = false;\n let readAgainForBranch1 = false;\n let readAgainForBranch2 = false;\n let canceled1 = false;\n let canceled2 = false;\n let reason1;\n let reason2;\n let branch1;\n let branch2;\n let resolveCancelPromise;\n const cancelPromise = newPromise(resolve => {\n resolveCancelPromise = resolve;\n });\n function forwardReaderError(thisReader) {\n uponRejection(thisReader._closedPromise, r => {\n if (thisReader !== reader) {\n return;\n }\n ReadableByteStreamControllerError(branch1._readableStreamController, r);\n ReadableByteStreamControllerError(branch2._readableStreamController, r);\n if (!canceled1 || !canceled2) {\n resolveCancelPromise(undefined);\n }\n });\n }\n function pullWithDefaultReader() {\n if (IsReadableStreamBYOBReader(reader)) {\n ReadableStreamReaderGenericRelease(reader);\n reader = AcquireReadableStreamDefaultReader(stream);\n forwardReaderError(reader);\n }\n const readRequest = {\n _chunkSteps: chunk => {\n // This needs to be delayed a microtask because it takes at least a microtask to detect errors (using\n // reader._closedPromise below), and we want errors in stream to error both branches immediately. We cannot let\n // successful synchronously-available reads get ahead of asynchronously-available errors.\n queueMicrotask(() => {\n readAgainForBranch1 = false;\n readAgainForBranch2 = false;\n const chunk1 = chunk;\n let chunk2 = chunk;\n if (!canceled1 && !canceled2) {\n try {\n chunk2 = CloneAsUint8Array(chunk);\n }\n catch (cloneE) {\n ReadableByteStreamControllerError(branch1._readableStreamController, cloneE);\n ReadableByteStreamControllerError(branch2._readableStreamController, cloneE);\n resolveCancelPromise(ReadableStreamCancel(stream, cloneE));\n return;\n }\n }\n if (!canceled1) {\n ReadableByteStreamControllerEnqueue(branch1._readableStreamController, chunk1);\n }\n if (!canceled2) {\n ReadableByteStreamControllerEnqueue(branch2._readableStreamController, chunk2);\n }\n reading = false;\n if (readAgainForBranch1) {\n pull1Algorithm();\n }\n else if (readAgainForBranch2) {\n pull2Algorithm();\n }\n });\n },\n _closeSteps: () => {\n reading = false;\n if (!canceled1) {\n ReadableByteStreamControllerClose(branch1._readableStreamController);\n }\n if (!canceled2) {\n ReadableByteStreamControllerClose(branch2._readableStreamController);\n }\n if (branch1._readableStreamController._pendingPullIntos.length > 0) {\n ReadableByteStreamControllerRespond(branch1._readableStreamController, 0);\n }\n if (branch2._readableStreamController._pendingPullIntos.length > 0) {\n ReadableByteStreamControllerRespond(branch2._readableStreamController, 0);\n }\n if (!canceled1 || !canceled2) {\n resolveCancelPromise(undefined);\n }\n },\n _errorSteps: () => {\n reading = false;\n }\n };\n ReadableStreamDefaultReaderRead(reader, readRequest);\n }\n function pullWithBYOBReader(view, forBranch2) {\n if (IsReadableStreamDefaultReader(reader)) {\n ReadableStreamReaderGenericRelease(reader);\n reader = AcquireReadableStreamBYOBReader(stream);\n forwardReaderError(reader);\n }\n const byobBranch = forBranch2 ? branch2 : branch1;\n const otherBranch = forBranch2 ? branch1 : branch2;\n const readIntoRequest = {\n _chunkSteps: chunk => {\n // This needs to be delayed a microtask because it takes at least a microtask to detect errors (using\n // reader._closedPromise below), and we want errors in stream to error both branches immediately. We cannot let\n // successful synchronously-available reads get ahead of asynchronously-available errors.\n queueMicrotask(() => {\n readAgainForBranch1 = false;\n readAgainForBranch2 = false;\n const byobCanceled = forBranch2 ? canceled2 : canceled1;\n const otherCanceled = forBranch2 ? canceled1 : canceled2;\n if (!otherCanceled) {\n let clonedChunk;\n try {\n clonedChunk = CloneAsUint8Array(chunk);\n }\n catch (cloneE) {\n ReadableByteStreamControllerError(byobBranch._readableStreamController, cloneE);\n ReadableByteStreamControllerError(otherBranch._readableStreamController, cloneE);\n resolveCancelPromise(ReadableStreamCancel(stream, cloneE));\n return;\n }\n if (!byobCanceled) {\n ReadableByteStreamControllerRespondWithNewView(byobBranch._readableStreamController, chunk);\n }\n ReadableByteStreamControllerEnqueue(otherBranch._readableStreamController, clonedChunk);\n }\n else if (!byobCanceled) {\n ReadableByteStreamControllerRespondWithNewView(byobBranch._readableStreamController, chunk);\n }\n reading = false;\n if (readAgainForBranch1) {\n pull1Algorithm();\n }\n else if (readAgainForBranch2) {\n pull2Algorithm();\n }\n });\n },\n _closeSteps: chunk => {\n reading = false;\n const byobCanceled = forBranch2 ? canceled2 : canceled1;\n const otherCanceled = forBranch2 ? canceled1 : canceled2;\n if (!byobCanceled) {\n ReadableByteStreamControllerClose(byobBranch._readableStreamController);\n }\n if (!otherCanceled) {\n ReadableByteStreamControllerClose(otherBranch._readableStreamController);\n }\n if (chunk !== undefined) {\n if (!byobCanceled) {\n ReadableByteStreamControllerRespondWithNewView(byobBranch._readableStreamController, chunk);\n }\n if (!otherCanceled && otherBranch._readableStreamController._pendingPullIntos.length > 0) {\n ReadableByteStreamControllerRespond(otherBranch._readableStreamController, 0);\n }\n }\n if (!byobCanceled || !otherCanceled) {\n resolveCancelPromise(undefined);\n }\n },\n _errorSteps: () => {\n reading = false;\n }\n };\n ReadableStreamBYOBReaderRead(reader, view, readIntoRequest);\n }\n function pull1Algorithm() {\n if (reading) {\n readAgainForBranch1 = true;\n return promiseResolvedWith(undefined);\n }\n reading = true;\n const byobRequest = ReadableByteStreamControllerGetBYOBRequest(branch1._readableStreamController);\n if (byobRequest === null) {\n pullWithDefaultReader();\n }\n else {\n pullWithBYOBReader(byobRequest._view, false);\n }\n return promiseResolvedWith(undefined);\n }\n function pull2Algorithm() {\n if (reading) {\n readAgainForBranch2 = true;\n return promiseResolvedWith(undefined);\n }\n reading = true;\n const byobRequest = ReadableByteStreamControllerGetBYOBRequest(branch2._readableStreamController);\n if (byobRequest === null) {\n pullWithDefaultReader();\n }\n else {\n pullWithBYOBReader(byobRequest._view, true);\n }\n return promiseResolvedWith(undefined);\n }\n function cancel1Algorithm(reason) {\n canceled1 = true;\n reason1 = reason;\n if (canceled2) {\n const compositeReason = CreateArrayFromList([reason1, reason2]);\n const cancelResult = ReadableStreamCancel(stream, compositeReason);\n resolveCancelPromise(cancelResult);\n }\n return cancelPromise;\n }\n function cancel2Algorithm(reason) {\n canceled2 = true;\n reason2 = reason;\n if (canceled1) {\n const compositeReason = CreateArrayFromList([reason1, reason2]);\n const cancelResult = ReadableStreamCancel(stream, compositeReason);\n resolveCancelPromise(cancelResult);\n }\n return cancelPromise;\n }\n function startAlgorithm() {\n return;\n }\n branch1 = CreateReadableByteStream(startAlgorithm, pull1Algorithm, cancel1Algorithm);\n branch2 = CreateReadableByteStream(startAlgorithm, pull2Algorithm, cancel2Algorithm);\n forwardReaderError(reader);\n return [branch1, branch2];\n }\n\n function convertUnderlyingDefaultOrByteSource(source, context) {\n assertDictionary(source, context);\n const original = source;\n const autoAllocateChunkSize = original === null || original === void 0 ? void 0 : original.autoAllocateChunkSize;\n const cancel = original === null || original === void 0 ? void 0 : original.cancel;\n const pull = original === null || original === void 0 ? void 0 : original.pull;\n const start = original === null || original === void 0 ? void 0 : original.start;\n const type = original === null || original === void 0 ? void 0 : original.type;\n return {\n autoAllocateChunkSize: autoAllocateChunkSize === undefined ?\n undefined :\n convertUnsignedLongLongWithEnforceRange(autoAllocateChunkSize, `${context} has member 'autoAllocateChunkSize' that`),\n cancel: cancel === undefined ?\n undefined :\n convertUnderlyingSourceCancelCallback(cancel, original, `${context} has member 'cancel' that`),\n pull: pull === undefined ?\n undefined :\n convertUnderlyingSourcePullCallback(pull, original, `${context} has member 'pull' that`),\n start: start === undefined ?\n undefined :\n convertUnderlyingSourceStartCallback(start, original, `${context} has member 'start' that`),\n type: type === undefined ? undefined : convertReadableStreamType(type, `${context} has member 'type' that`)\n };\n }\n function convertUnderlyingSourceCancelCallback(fn, original, context) {\n assertFunction(fn, context);\n return (reason) => promiseCall(fn, original, [reason]);\n }\n function convertUnderlyingSourcePullCallback(fn, original, context) {\n assertFunction(fn, context);\n return (controller) => promiseCall(fn, original, [controller]);\n }\n function convertUnderlyingSourceStartCallback(fn, original, context) {\n assertFunction(fn, context);\n return (controller) => reflectCall(fn, original, [controller]);\n }\n function convertReadableStreamType(type, context) {\n type = `${type}`;\n if (type !== 'bytes') {\n throw new TypeError(`${context} '${type}' is not a valid enumeration value for ReadableStreamType`);\n }\n return type;\n }\n\n function convertReaderOptions(options, context) {\n assertDictionary(options, context);\n const mode = options === null || options === void 0 ? void 0 : options.mode;\n return {\n mode: mode === undefined ? undefined : convertReadableStreamReaderMode(mode, `${context} has member 'mode' that`)\n };\n }\n function convertReadableStreamReaderMode(mode, context) {\n mode = `${mode}`;\n if (mode !== 'byob') {\n throw new TypeError(`${context} '${mode}' is not a valid enumeration value for ReadableStreamReaderMode`);\n }\n return mode;\n }\n\n function convertIteratorOptions(options, context) {\n assertDictionary(options, context);\n const preventCancel = options === null || options === void 0 ? void 0 : options.preventCancel;\n return { preventCancel: Boolean(preventCancel) };\n }\n\n function convertPipeOptions(options, context) {\n assertDictionary(options, context);\n const preventAbort = options === null || options === void 0 ? void 0 : options.preventAbort;\n const preventCancel = options === null || options === void 0 ? void 0 : options.preventCancel;\n const preventClose = options === null || options === void 0 ? void 0 : options.preventClose;\n const signal = options === null || options === void 0 ? void 0 : options.signal;\n if (signal !== undefined) {\n assertAbortSignal(signal, `${context} has member 'signal' that`);\n }\n return {\n preventAbort: Boolean(preventAbort),\n preventCancel: Boolean(preventCancel),\n preventClose: Boolean(preventClose),\n signal\n };\n }\n function assertAbortSignal(signal, context) {\n if (!isAbortSignal(signal)) {\n throw new TypeError(`${context} is not an AbortSignal.`);\n }\n }\n\n function convertReadableWritablePair(pair, context) {\n assertDictionary(pair, context);\n const readable = pair === null || pair === void 0 ? void 0 : pair.readable;\n assertRequiredField(readable, 'readable', 'ReadableWritablePair');\n assertReadableStream(readable, `${context} has member 'readable' that`);\n const writable = pair === null || pair === void 0 ? void 0 : pair.writable;\n assertRequiredField(writable, 'writable', 'ReadableWritablePair');\n assertWritableStream(writable, `${context} has member 'writable' that`);\n return { readable, writable };\n }\n\n /**\n * A readable stream represents a source of data, from which you can read.\n *\n * @public\n */\n class ReadableStream {\n constructor(rawUnderlyingSource = {}, rawStrategy = {}) {\n if (rawUnderlyingSource === undefined) {\n rawUnderlyingSource = null;\n }\n else {\n assertObject(rawUnderlyingSource, 'First parameter');\n }\n const strategy = convertQueuingStrategy(rawStrategy, 'Second parameter');\n const underlyingSource = convertUnderlyingDefaultOrByteSource(rawUnderlyingSource, 'First parameter');\n InitializeReadableStream(this);\n if (underlyingSource.type === 'bytes') {\n if (strategy.size !== undefined) {\n throw new RangeError('The strategy for a byte stream cannot have a size function');\n }\n const highWaterMark = ExtractHighWaterMark(strategy, 0);\n SetUpReadableByteStreamControllerFromUnderlyingSource(this, underlyingSource, highWaterMark);\n }\n else {\n const sizeAlgorithm = ExtractSizeAlgorithm(strategy);\n const highWaterMark = ExtractHighWaterMark(strategy, 1);\n SetUpReadableStreamDefaultControllerFromUnderlyingSource(this, underlyingSource, highWaterMark, sizeAlgorithm);\n }\n }\n /**\n * Whether or not the readable stream is locked to a {@link ReadableStreamDefaultReader | reader}.\n */\n get locked() {\n if (!IsReadableStream(this)) {\n throw streamBrandCheckException$1('locked');\n }\n return IsReadableStreamLocked(this);\n }\n /**\n * Cancels the stream, signaling a loss of interest in the stream by a consumer.\n *\n * The supplied `reason` argument will be given to the underlying source's {@link UnderlyingSource.cancel | cancel()}\n * method, which might or might not use it.\n */\n cancel(reason = undefined) {\n if (!IsReadableStream(this)) {\n return promiseRejectedWith(streamBrandCheckException$1('cancel'));\n }\n if (IsReadableStreamLocked(this)) {\n return promiseRejectedWith(new TypeError('Cannot cancel a stream that already has a reader'));\n }\n return ReadableStreamCancel(this, reason);\n }\n getReader(rawOptions = undefined) {\n if (!IsReadableStream(this)) {\n throw streamBrandCheckException$1('getReader');\n }\n const options = convertReaderOptions(rawOptions, 'First parameter');\n if (options.mode === undefined) {\n return AcquireReadableStreamDefaultReader(this);\n }\n return AcquireReadableStreamBYOBReader(this);\n }\n pipeThrough(rawTransform, rawOptions = {}) {\n if (!IsReadableStream(this)) {\n throw streamBrandCheckException$1('pipeThrough');\n }\n assertRequiredArgument(rawTransform, 1, 'pipeThrough');\n const transform = convertReadableWritablePair(rawTransform, 'First parameter');\n const options = convertPipeOptions(rawOptions, 'Second parameter');\n if (IsReadableStreamLocked(this)) {\n throw new TypeError('ReadableStream.prototype.pipeThrough cannot be used on a locked ReadableStream');\n }\n if (IsWritableStreamLocked(transform.writable)) {\n throw new TypeError('ReadableStream.prototype.pipeThrough cannot be used on a locked WritableStream');\n }\n const promise = ReadableStreamPipeTo(this, transform.writable, options.preventClose, options.preventAbort, options.preventCancel, options.signal);\n setPromiseIsHandledToTrue(promise);\n return transform.readable;\n }\n pipeTo(destination, rawOptions = {}) {\n if (!IsReadableStream(this)) {\n return promiseRejectedWith(streamBrandCheckException$1('pipeTo'));\n }\n if (destination === undefined) {\n return promiseRejectedWith(`Parameter 1 is required in 'pipeTo'.`);\n }\n if (!IsWritableStream(destination)) {\n return promiseRejectedWith(new TypeError(`ReadableStream.prototype.pipeTo's first argument must be a WritableStream`));\n }\n let options;\n try {\n options = convertPipeOptions(rawOptions, 'Second parameter');\n }\n catch (e) {\n return promiseRejectedWith(e);\n }\n if (IsReadableStreamLocked(this)) {\n return promiseRejectedWith(new TypeError('ReadableStream.prototype.pipeTo cannot be used on a locked ReadableStream'));\n }\n if (IsWritableStreamLocked(destination)) {\n return promiseRejectedWith(new TypeError('ReadableStream.prototype.pipeTo cannot be used on a locked WritableStream'));\n }\n return ReadableStreamPipeTo(this, destination, options.preventClose, options.preventAbort, options.preventCancel, options.signal);\n }\n /**\n * Tees this readable stream, returning a two-element array containing the two resulting branches as\n * new {@link ReadableStream} instances.\n *\n * Teeing a stream will lock it, preventing any other consumer from acquiring a reader.\n * To cancel the stream, cancel both of the resulting branches; a composite cancellation reason will then be\n * propagated to the stream's underlying source.\n *\n * Note that the chunks seen in each branch will be the same object. If the chunks are not immutable,\n * this could allow interference between the two branches.\n */\n tee() {\n if (!IsReadableStream(this)) {\n throw streamBrandCheckException$1('tee');\n }\n const branches = ReadableStreamTee(this);\n return CreateArrayFromList(branches);\n }\n values(rawOptions = undefined) {\n if (!IsReadableStream(this)) {\n throw streamBrandCheckException$1('values');\n }\n const options = convertIteratorOptions(rawOptions, 'First parameter');\n return AcquireReadableStreamAsyncIterator(this, options.preventCancel);\n }\n }\n Object.defineProperties(ReadableStream.prototype, {\n cancel: { enumerable: true },\n getReader: { enumerable: true },\n pipeThrough: { enumerable: true },\n pipeTo: { enumerable: true },\n tee: { enumerable: true },\n values: { enumerable: true },\n locked: { enumerable: true }\n });\n if (typeof SymbolPolyfill.toStringTag === 'symbol') {\n Object.defineProperty(ReadableStream.prototype, SymbolPolyfill.toStringTag, {\n value: 'ReadableStream',\n configurable: true\n });\n }\n if (typeof SymbolPolyfill.asyncIterator === 'symbol') {\n Object.defineProperty(ReadableStream.prototype, SymbolPolyfill.asyncIterator, {\n value: ReadableStream.prototype.values,\n writable: true,\n configurable: true\n });\n }\n // Abstract operations for the ReadableStream.\n // Throws if and only if startAlgorithm throws.\n function CreateReadableStream(startAlgorithm, pullAlgorithm, cancelAlgorithm, highWaterMark = 1, sizeAlgorithm = () => 1) {\n const stream = Object.create(ReadableStream.prototype);\n InitializeReadableStream(stream);\n const controller = Object.create(ReadableStreamDefaultController.prototype);\n SetUpReadableStreamDefaultController(stream, controller, startAlgorithm, pullAlgorithm, cancelAlgorithm, highWaterMark, sizeAlgorithm);\n return stream;\n }\n // Throws if and only if startAlgorithm throws.\n function CreateReadableByteStream(startAlgorithm, pullAlgorithm, cancelAlgorithm) {\n const stream = Object.create(ReadableStream.prototype);\n InitializeReadableStream(stream);\n const controller = Object.create(ReadableByteStreamController.prototype);\n SetUpReadableByteStreamController(stream, controller, startAlgorithm, pullAlgorithm, cancelAlgorithm, 0, undefined);\n return stream;\n }\n function InitializeReadableStream(stream) {\n stream._state = 'readable';\n stream._reader = undefined;\n stream._storedError = undefined;\n stream._disturbed = false;\n }\n function IsReadableStream(x) {\n if (!typeIsObject(x)) {\n return false;\n }\n if (!Object.prototype.hasOwnProperty.call(x, '_readableStreamController')) {\n return false;\n }\n return x instanceof ReadableStream;\n }\n function IsReadableStreamLocked(stream) {\n if (stream._reader === undefined) {\n return false;\n }\n return true;\n }\n // ReadableStream API exposed for controllers.\n function ReadableStreamCancel(stream, reason) {\n stream._disturbed = true;\n if (stream._state === 'closed') {\n return promiseResolvedWith(undefined);\n }\n if (stream._state === 'errored') {\n return promiseRejectedWith(stream._storedError);\n }\n ReadableStreamClose(stream);\n const reader = stream._reader;\n if (reader !== undefined && IsReadableStreamBYOBReader(reader)) {\n reader._readIntoRequests.forEach(readIntoRequest => {\n readIntoRequest._closeSteps(undefined);\n });\n reader._readIntoRequests = new SimpleQueue();\n }\n const sourceCancelPromise = stream._readableStreamController[CancelSteps](reason);\n return transformPromiseWith(sourceCancelPromise, noop);\n }\n function ReadableStreamClose(stream) {\n stream._state = 'closed';\n const reader = stream._reader;\n if (reader === undefined) {\n return;\n }\n defaultReaderClosedPromiseResolve(reader);\n if (IsReadableStreamDefaultReader(reader)) {\n reader._readRequests.forEach(readRequest => {\n readRequest._closeSteps();\n });\n reader._readRequests = new SimpleQueue();\n }\n }\n function ReadableStreamError(stream, e) {\n stream._state = 'errored';\n stream._storedError = e;\n const reader = stream._reader;\n if (reader === undefined) {\n return;\n }\n defaultReaderClosedPromiseReject(reader, e);\n if (IsReadableStreamDefaultReader(reader)) {\n reader._readRequests.forEach(readRequest => {\n readRequest._errorSteps(e);\n });\n reader._readRequests = new SimpleQueue();\n }\n else {\n reader._readIntoRequests.forEach(readIntoRequest => {\n readIntoRequest._errorSteps(e);\n });\n reader._readIntoRequests = new SimpleQueue();\n }\n }\n // Helper functions for the ReadableStream.\n function streamBrandCheckException$1(name) {\n return new TypeError(`ReadableStream.prototype.${name} can only be used on a ReadableStream`);\n }\n\n function convertQueuingStrategyInit(init, context) {\n assertDictionary(init, context);\n const highWaterMark = init === null || init === void 0 ? void 0 : init.highWaterMark;\n assertRequiredField(highWaterMark, 'highWaterMark', 'QueuingStrategyInit');\n return {\n highWaterMark: convertUnrestrictedDouble(highWaterMark)\n };\n }\n\n // The size function must not have a prototype property nor be a constructor\n const byteLengthSizeFunction = (chunk) => {\n return chunk.byteLength;\n };\n try {\n Object.defineProperty(byteLengthSizeFunction, 'name', {\n value: 'size',\n configurable: true\n });\n }\n catch (_a) {\n // This property is non-configurable in older browsers, so ignore if this throws.\n // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Function/name#browser_compatibility\n }\n /**\n * A queuing strategy that counts the number of bytes in each chunk.\n *\n * @public\n */\n class ByteLengthQueuingStrategy {\n constructor(options) {\n assertRequiredArgument(options, 1, 'ByteLengthQueuingStrategy');\n options = convertQueuingStrategyInit(options, 'First parameter');\n this._byteLengthQueuingStrategyHighWaterMark = options.highWaterMark;\n }\n /**\n * Returns the high water mark provided to the constructor.\n */\n get highWaterMark() {\n if (!IsByteLengthQueuingStrategy(this)) {\n throw byteLengthBrandCheckException('highWaterMark');\n }\n return this._byteLengthQueuingStrategyHighWaterMark;\n }\n /**\n * Measures the size of `chunk` by returning the value of its `byteLength` property.\n */\n get size() {\n if (!IsByteLengthQueuingStrategy(this)) {\n throw byteLengthBrandCheckException('size');\n }\n return byteLengthSizeFunction;\n }\n }\n Object.defineProperties(ByteLengthQueuingStrategy.prototype, {\n highWaterMark: { enumerable: true },\n size: { enumerable: true }\n });\n if (typeof SymbolPolyfill.toStringTag === 'symbol') {\n Object.defineProperty(ByteLengthQueuingStrategy.prototype, SymbolPolyfill.toStringTag, {\n value: 'ByteLengthQueuingStrategy',\n configurable: true\n });\n }\n // Helper functions for the ByteLengthQueuingStrategy.\n function byteLengthBrandCheckException(name) {\n return new TypeError(`ByteLengthQueuingStrategy.prototype.${name} can only be used on a ByteLengthQueuingStrategy`);\n }\n function IsByteLengthQueuingStrategy(x) {\n if (!typeIsObject(x)) {\n return false;\n }\n if (!Object.prototype.hasOwnProperty.call(x, '_byteLengthQueuingStrategyHighWaterMark')) {\n return false;\n }\n return x instanceof ByteLengthQueuingStrategy;\n }\n\n // The size function must not have a prototype property nor be a constructor\n const countSizeFunction = () => {\n return 1;\n };\n try {\n Object.defineProperty(countSizeFunction, 'name', {\n value: 'size',\n configurable: true\n });\n }\n catch (_a) {\n // This property is non-configurable in older browsers, so ignore if this throws.\n // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Function/name#browser_compatibility\n }\n /**\n * A queuing strategy that counts the number of chunks.\n *\n * @public\n */\n class CountQueuingStrategy {\n constructor(options) {\n assertRequiredArgument(options, 1, 'CountQueuingStrategy');\n options = convertQueuingStrategyInit(options, 'First parameter');\n this._countQueuingStrategyHighWaterMark = options.highWaterMark;\n }\n /**\n * Returns the high water mark provided to the constructor.\n */\n get highWaterMark() {\n if (!IsCountQueuingStrategy(this)) {\n throw countBrandCheckException('highWaterMark');\n }\n return this._countQueuingStrategyHighWaterMark;\n }\n /**\n * Measures the size of `chunk` by always returning 1.\n * This ensures that the total queue size is a count of the number of chunks in the queue.\n */\n get size() {\n if (!IsCountQueuingStrategy(this)) {\n throw countBrandCheckException('size');\n }\n return countSizeFunction;\n }\n }\n Object.defineProperties(CountQueuingStrategy.prototype, {\n highWaterMark: { enumerable: true },\n size: { enumerable: true }\n });\n if (typeof SymbolPolyfill.toStringTag === 'symbol') {\n Object.defineProperty(CountQueuingStrategy.prototype, SymbolPolyfill.toStringTag, {\n value: 'CountQueuingStrategy',\n configurable: true\n });\n }\n // Helper functions for the CountQueuingStrategy.\n function countBrandCheckException(name) {\n return new TypeError(`CountQueuingStrategy.prototype.${name} can only be used on a CountQueuingStrategy`);\n }\n function IsCountQueuingStrategy(x) {\n if (!typeIsObject(x)) {\n return false;\n }\n if (!Object.prototype.hasOwnProperty.call(x, '_countQueuingStrategyHighWaterMark')) {\n return false;\n }\n return x instanceof CountQueuingStrategy;\n }\n\n function convertTransformer(original, context) {\n assertDictionary(original, context);\n const flush = original === null || original === void 0 ? void 0 : original.flush;\n const readableType = original === null || original === void 0 ? void 0 : original.readableType;\n const start = original === null || original === void 0 ? void 0 : original.start;\n const transform = original === null || original === void 0 ? void 0 : original.transform;\n const writableType = original === null || original === void 0 ? void 0 : original.writableType;\n return {\n flush: flush === undefined ?\n undefined :\n convertTransformerFlushCallback(flush, original, `${context} has member 'flush' that`),\n readableType,\n start: start === undefined ?\n undefined :\n convertTransformerStartCallback(start, original, `${context} has member 'start' that`),\n transform: transform === undefined ?\n undefined :\n convertTransformerTransformCallback(transform, original, `${context} has member 'transform' that`),\n writableType\n };\n }\n function convertTransformerFlushCallback(fn, original, context) {\n assertFunction(fn, context);\n return (controller) => promiseCall(fn, original, [controller]);\n }\n function convertTransformerStartCallback(fn, original, context) {\n assertFunction(fn, context);\n return (controller) => reflectCall(fn, original, [controller]);\n }\n function convertTransformerTransformCallback(fn, original, context) {\n assertFunction(fn, context);\n return (chunk, controller) => promiseCall(fn, original, [chunk, controller]);\n }\n\n // Class TransformStream\n /**\n * A transform stream consists of a pair of streams: a {@link WritableStream | writable stream},\n * known as its writable side, and a {@link ReadableStream | readable stream}, known as its readable side.\n * In a manner specific to the transform stream in question, writes to the writable side result in new data being\n * made available for reading from the readable side.\n *\n * @public\n */\n class TransformStream {\n constructor(rawTransformer = {}, rawWritableStrategy = {}, rawReadableStrategy = {}) {\n if (rawTransformer === undefined) {\n rawTransformer = null;\n }\n const writableStrategy = convertQueuingStrategy(rawWritableStrategy, 'Second parameter');\n const readableStrategy = convertQueuingStrategy(rawReadableStrategy, 'Third parameter');\n const transformer = convertTransformer(rawTransformer, 'First parameter');\n if (transformer.readableType !== undefined) {\n throw new RangeError('Invalid readableType specified');\n }\n if (transformer.writableType !== undefined) {\n throw new RangeError('Invalid writableType specified');\n }\n const readableHighWaterMark = ExtractHighWaterMark(readableStrategy, 0);\n const readableSizeAlgorithm = ExtractSizeAlgorithm(readableStrategy);\n const writableHighWaterMark = ExtractHighWaterMark(writableStrategy, 1);\n const writableSizeAlgorithm = ExtractSizeAlgorithm(writableStrategy);\n let startPromise_resolve;\n const startPromise = newPromise(resolve => {\n startPromise_resolve = resolve;\n });\n InitializeTransformStream(this, startPromise, writableHighWaterMark, writableSizeAlgorithm, readableHighWaterMark, readableSizeAlgorithm);\n SetUpTransformStreamDefaultControllerFromTransformer(this, transformer);\n if (transformer.start !== undefined) {\n startPromise_resolve(transformer.start(this._transformStreamController));\n }\n else {\n startPromise_resolve(undefined);\n }\n }\n /**\n * The readable side of the transform stream.\n */\n get readable() {\n if (!IsTransformStream(this)) {\n throw streamBrandCheckException('readable');\n }\n return this._readable;\n }\n /**\n * The writable side of the transform stream.\n */\n get writable() {\n if (!IsTransformStream(this)) {\n throw streamBrandCheckException('writable');\n }\n return this._writable;\n }\n }\n Object.defineProperties(TransformStream.prototype, {\n readable: { enumerable: true },\n writable: { enumerable: true }\n });\n if (typeof SymbolPolyfill.toStringTag === 'symbol') {\n Object.defineProperty(TransformStream.prototype, SymbolPolyfill.toStringTag, {\n value: 'TransformStream',\n configurable: true\n });\n }\n function InitializeTransformStream(stream, startPromise, writableHighWaterMark, writableSizeAlgorithm, readableHighWaterMark, readableSizeAlgorithm) {\n function startAlgorithm() {\n return startPromise;\n }\n function writeAlgorithm(chunk) {\n return TransformStreamDefaultSinkWriteAlgorithm(stream, chunk);\n }\n function abortAlgorithm(reason) {\n return TransformStreamDefaultSinkAbortAlgorithm(stream, reason);\n }\n function closeAlgorithm() {\n return TransformStreamDefaultSinkCloseAlgorithm(stream);\n }\n stream._writable = CreateWritableStream(startAlgorithm, writeAlgorithm, closeAlgorithm, abortAlgorithm, writableHighWaterMark, writableSizeAlgorithm);\n function pullAlgorithm() {\n return TransformStreamDefaultSourcePullAlgorithm(stream);\n }\n function cancelAlgorithm(reason) {\n TransformStreamErrorWritableAndUnblockWrite(stream, reason);\n return promiseResolvedWith(undefined);\n }\n stream._readable = CreateReadableStream(startAlgorithm, pullAlgorithm, cancelAlgorithm, readableHighWaterMark, readableSizeAlgorithm);\n // The [[backpressure]] slot is set to undefined so that it can be initialised by TransformStreamSetBackpressure.\n stream._backpressure = undefined;\n stream._backpressureChangePromise = undefined;\n stream._backpressureChangePromise_resolve = undefined;\n TransformStreamSetBackpressure(stream, true);\n stream._transformStreamController = undefined;\n }\n function IsTransformStream(x) {\n if (!typeIsObject(x)) {\n return false;\n }\n if (!Object.prototype.hasOwnProperty.call(x, '_transformStreamController')) {\n return false;\n }\n return x instanceof TransformStream;\n }\n // This is a no-op if both sides are already errored.\n function TransformStreamError(stream, e) {\n ReadableStreamDefaultControllerError(stream._readable._readableStreamController, e);\n TransformStreamErrorWritableAndUnblockWrite(stream, e);\n }\n function TransformStreamErrorWritableAndUnblockWrite(stream, e) {\n TransformStreamDefaultControllerClearAlgorithms(stream._transformStreamController);\n WritableStreamDefaultControllerErrorIfNeeded(stream._writable._writableStreamController, e);\n if (stream._backpressure) {\n // Pretend that pull() was called to permit any pending write() calls to complete. TransformStreamSetBackpressure()\n // cannot be called from enqueue() or pull() once the ReadableStream is errored, so this will will be the final time\n // _backpressure is set.\n TransformStreamSetBackpressure(stream, false);\n }\n }\n function TransformStreamSetBackpressure(stream, backpressure) {\n // Passes also when called during construction.\n if (stream._backpressureChangePromise !== undefined) {\n stream._backpressureChangePromise_resolve();\n }\n stream._backpressureChangePromise = newPromise(resolve => {\n stream._backpressureChangePromise_resolve = resolve;\n });\n stream._backpressure = backpressure;\n }\n // Class TransformStreamDefaultController\n /**\n * Allows control of the {@link ReadableStream} and {@link WritableStream} of the associated {@link TransformStream}.\n *\n * @public\n */\n class TransformStreamDefaultController {\n constructor() {\n throw new TypeError('Illegal constructor');\n }\n /**\n * Returns the desired size to fill the readable side’s internal queue. It can be negative, if the queue is over-full.\n */\n get desiredSize() {\n if (!IsTransformStreamDefaultController(this)) {\n throw defaultControllerBrandCheckException('desiredSize');\n }\n const readableController = this._controlledTransformStream._readable._readableStreamController;\n return ReadableStreamDefaultControllerGetDesiredSize(readableController);\n }\n enqueue(chunk = undefined) {\n if (!IsTransformStreamDefaultController(this)) {\n throw defaultControllerBrandCheckException('enqueue');\n }\n TransformStreamDefaultControllerEnqueue(this, chunk);\n }\n /**\n * Errors both the readable side and the writable side of the controlled transform stream, making all future\n * interactions with it fail with the given error `e`. Any chunks queued for transformation will be discarded.\n */\n error(reason = undefined) {\n if (!IsTransformStreamDefaultController(this)) {\n throw defaultControllerBrandCheckException('error');\n }\n TransformStreamDefaultControllerError(this, reason);\n }\n /**\n * Closes the readable side and errors the writable side of the controlled transform stream. This is useful when the\n * transformer only needs to consume a portion of the chunks written to the writable side.\n */\n terminate() {\n if (!IsTransformStreamDefaultController(this)) {\n throw defaultControllerBrandCheckException('terminate');\n }\n TransformStreamDefaultControllerTerminate(this);\n }\n }\n Object.defineProperties(TransformStreamDefaultController.prototype, {\n enqueue: { enumerable: true },\n error: { enumerable: true },\n terminate: { enumerable: true },\n desiredSize: { enumerable: true }\n });\n if (typeof SymbolPolyfill.toStringTag === 'symbol') {\n Object.defineProperty(TransformStreamDefaultController.prototype, SymbolPolyfill.toStringTag, {\n value: 'TransformStreamDefaultController',\n configurable: true\n });\n }\n // Transform Stream Default Controller Abstract Operations\n function IsTransformStreamDefaultController(x) {\n if (!typeIsObject(x)) {\n return false;\n }\n if (!Object.prototype.hasOwnProperty.call(x, '_controlledTransformStream')) {\n return false;\n }\n return x instanceof TransformStreamDefaultController;\n }\n function SetUpTransformStreamDefaultController(stream, controller, transformAlgorithm, flushAlgorithm) {\n controller._controlledTransformStream = stream;\n stream._transformStreamController = controller;\n controller._transformAlgorithm = transformAlgorithm;\n controller._flushAlgorithm = flushAlgorithm;\n }\n function SetUpTransformStreamDefaultControllerFromTransformer(stream, transformer) {\n const controller = Object.create(TransformStreamDefaultController.prototype);\n let transformAlgorithm = (chunk) => {\n try {\n TransformStreamDefaultControllerEnqueue(controller, chunk);\n return promiseResolvedWith(undefined);\n }\n catch (transformResultE) {\n return promiseRejectedWith(transformResultE);\n }\n };\n let flushAlgorithm = () => promiseResolvedWith(undefined);\n if (transformer.transform !== undefined) {\n transformAlgorithm = chunk => transformer.transform(chunk, controller);\n }\n if (transformer.flush !== undefined) {\n flushAlgorithm = () => transformer.flush(controller);\n }\n SetUpTransformStreamDefaultController(stream, controller, transformAlgorithm, flushAlgorithm);\n }\n function TransformStreamDefaultControllerClearAlgorithms(controller) {\n controller._transformAlgorithm = undefined;\n controller._flushAlgorithm = undefined;\n }\n function TransformStreamDefaultControllerEnqueue(controller, chunk) {\n const stream = controller._controlledTransformStream;\n const readableController = stream._readable._readableStreamController;\n if (!ReadableStreamDefaultControllerCanCloseOrEnqueue(readableController)) {\n throw new TypeError('Readable side is not in a state that permits enqueue');\n }\n // We throttle transform invocations based on the backpressure of the ReadableStream, but we still\n // accept TransformStreamDefaultControllerEnqueue() calls.\n try {\n ReadableStreamDefaultControllerEnqueue(readableController, chunk);\n }\n catch (e) {\n // This happens when readableStrategy.size() throws.\n TransformStreamErrorWritableAndUnblockWrite(stream, e);\n throw stream._readable._storedError;\n }\n const backpressure = ReadableStreamDefaultControllerHasBackpressure(readableController);\n if (backpressure !== stream._backpressure) {\n TransformStreamSetBackpressure(stream, true);\n }\n }\n function TransformStreamDefaultControllerError(controller, e) {\n TransformStreamError(controller._controlledTransformStream, e);\n }\n function TransformStreamDefaultControllerPerformTransform(controller, chunk) {\n const transformPromise = controller._transformAlgorithm(chunk);\n return transformPromiseWith(transformPromise, undefined, r => {\n TransformStreamError(controller._controlledTransformStream, r);\n throw r;\n });\n }\n function TransformStreamDefaultControllerTerminate(controller) {\n const stream = controller._controlledTransformStream;\n const readableController = stream._readable._readableStreamController;\n ReadableStreamDefaultControllerClose(readableController);\n const error = new TypeError('TransformStream terminated');\n TransformStreamErrorWritableAndUnblockWrite(stream, error);\n }\n // TransformStreamDefaultSink Algorithms\n function TransformStreamDefaultSinkWriteAlgorithm(stream, chunk) {\n const controller = stream._transformStreamController;\n if (stream._backpressure) {\n const backpressureChangePromise = stream._backpressureChangePromise;\n return transformPromiseWith(backpressureChangePromise, () => {\n const writable = stream._writable;\n const state = writable._state;\n if (state === 'erroring') {\n throw writable._storedError;\n }\n return TransformStreamDefaultControllerPerformTransform(controller, chunk);\n });\n }\n return TransformStreamDefaultControllerPerformTransform(controller, chunk);\n }\n function TransformStreamDefaultSinkAbortAlgorithm(stream, reason) {\n // abort() is not called synchronously, so it is possible for abort() to be called when the stream is already\n // errored.\n TransformStreamError(stream, reason);\n return promiseResolvedWith(undefined);\n }\n function TransformStreamDefaultSinkCloseAlgorithm(stream) {\n // stream._readable cannot change after construction, so caching it across a call to user code is safe.\n const readable = stream._readable;\n const controller = stream._transformStreamController;\n const flushPromise = controller._flushAlgorithm();\n TransformStreamDefaultControllerClearAlgorithms(controller);\n // Return a promise that is fulfilled with undefined on success.\n return transformPromiseWith(flushPromise, () => {\n if (readable._state === 'errored') {\n throw readable._storedError;\n }\n ReadableStreamDefaultControllerClose(readable._readableStreamController);\n }, r => {\n TransformStreamError(stream, r);\n throw readable._storedError;\n });\n }\n // TransformStreamDefaultSource Algorithms\n function TransformStreamDefaultSourcePullAlgorithm(stream) {\n // Invariant. Enforced by the promises returned by start() and pull().\n TransformStreamSetBackpressure(stream, false);\n // Prevent the next pull() call until there is backpressure.\n return stream._backpressureChangePromise;\n }\n // Helper functions for the TransformStreamDefaultController.\n function defaultControllerBrandCheckException(name) {\n return new TypeError(`TransformStreamDefaultController.prototype.${name} can only be used on a TransformStreamDefaultController`);\n }\n // Helper functions for the TransformStream.\n function streamBrandCheckException(name) {\n return new TypeError(`TransformStream.prototype.${name} can only be used on a TransformStream`);\n }\n\n exports.ByteLengthQueuingStrategy = ByteLengthQueuingStrategy;\n exports.CountQueuingStrategy = CountQueuingStrategy;\n exports.ReadableByteStreamController = ReadableByteStreamController;\n exports.ReadableStream = ReadableStream;\n exports.ReadableStreamBYOBReader = ReadableStreamBYOBReader;\n exports.ReadableStreamBYOBRequest = ReadableStreamBYOBRequest;\n exports.ReadableStreamDefaultController = ReadableStreamDefaultController;\n exports.ReadableStreamDefaultReader = ReadableStreamDefaultReader;\n exports.TransformStream = TransformStream;\n exports.TransformStreamDefaultController = TransformStreamDefaultController;\n exports.WritableStream = WritableStream;\n exports.WritableStreamDefaultController = WritableStreamDefaultController;\n exports.WritableStreamDefaultWriter = WritableStreamDefaultWriter;\n\n Object.defineProperty(exports, '__esModule', { value: true });\n\n})));\n//# sourceMappingURL=ponyfill.es2018.js.map\n","// Returns a wrapper function that returns a wrapped callback\n// The wrapper function should do some stuff, and return a\n// presumably different callback function.\n// This makes sure that own properties are retained, so that\n// decorations and such are not lost along the way.\nmodule.exports = wrappy\nfunction wrappy (fn, cb) {\n if (fn && cb) return wrappy(fn)(cb)\n\n if (typeof fn !== 'function')\n throw new TypeError('need wrapper function')\n\n Object.keys(fn).forEach(function (k) {\n wrapper[k] = fn[k]\n })\n\n return wrapper\n\n function wrapper() {\n var args = new Array(arguments.length)\n for (var i = 0; i < args.length; i++) {\n args[i] = arguments[i]\n }\n var ret = fn.apply(this, args)\n var cb = args[args.length-1]\n if (typeof ret === 'function' && ret !== cb) {\n Object.keys(cb).forEach(function (k) {\n ret[k] = cb[k]\n })\n }\n return ret\n }\n}\n","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst core = __importStar(require(\"@actions/core\"));\nconst tc = __importStar(require(\"@actions/tool-cache\"));\nconst exec = __importStar(require(\"@actions/exec\"));\nconst io = __importStar(require(\"@actions/io\"));\nconst octokit = __importStar(require(\"@octokit/rest\"));\nconst path = __importStar(require(\"path\"));\nconst node_fetch_1 = __importDefault(require(\"node-fetch\"));\nconst utils_1 = require(\"./utils\");\nfunction run() {\n return __awaiter(this, void 0, void 0, function* () {\n const platform = 'linux';\n const arch = 'x64';\n const versionInput = core.getInput('version');\n try {\n const version = yield resolveVersion(versionInput);\n let cachedPath = tryGetFromCache(arch, version);\n if (cachedPath) {\n core.info(`Found Rye in cache for ${version}`);\n }\n else {\n cachedPath = yield setupRye(platform, arch, version);\n }\n addRyeToPath(cachedPath);\n }\n catch (err) {\n core.setFailed(err.message);\n }\n });\n}\nfunction resolveVersion(versionInput) {\n return __awaiter(this, void 0, void 0, function* () {\n const availableVersion = yield getAvailableVersions();\n if (!availableVersion.includes(versionInput)) {\n if (versionInput === 'latest') {\n core.info(`Latest version is ${availableVersion[0]}`);\n return availableVersion[0];\n }\n else {\n throw new Error(`Version ${versionInput} is not available. Available version are: ${availableVersion}`);\n }\n }\n return versionInput;\n });\n}\nfunction getAvailableVersions() {\n return __awaiter(this, void 0, void 0, function* () {\n const githubClient = new octokit.Octokit({\n userAgent: 'setup-rye',\n request: { fetch: node_fetch_1.default }\n });\n const response = yield githubClient.rest.repos.listReleases({\n owner: utils_1.OWNER,\n repo: utils_1.REPO\n });\n const releases = response.data.map(release => release.tag_name);\n return releases;\n });\n}\nfunction tryGetFromCache(arch, version) {\n core.debug(`Trying to get Rye from cache for ${version}...`);\n const cachedVersions = tc.findAllVersions('rye', arch);\n core.debug(`Cached versions: ${cachedVersions}`);\n return tc.find('rye', version, arch);\n}\nfunction setupRye(platform, arch, version) {\n return __awaiter(this, void 0, void 0, function* () {\n const downloadPath = yield downloadVersion(platform, arch, version);\n const cachedPath = yield installRye(downloadPath, arch, version);\n return cachedPath;\n });\n}\nfunction downloadVersion(platform, arch, version) {\n return __awaiter(this, void 0, void 0, function* () {\n const binary = `rye-x86_64-${platform}`;\n const downloadUrl = `https://github.com/mitsuhiko/rye/releases/download/${version}/${binary}.gz`;\n core.info(`Downloading Rye from \"${downloadUrl}\" ...`);\n try {\n const downloadPath = yield tc.downloadTool(downloadUrl);\n yield extract(downloadPath);\n return downloadPath;\n }\n catch (err) {\n if (err instanceof Error) {\n // Rate limit?\n if (err instanceof tc.HTTPError &&\n (err.httpStatusCode === 403 || err.httpStatusCode === 429)) {\n core.info(`Received HTTP status code ${err.httpStatusCode}. This usually indicates the rate limit has been exceeded`);\n }\n else {\n core.info(err.message);\n }\n if (err.stack !== undefined) {\n core.debug(err.stack);\n }\n }\n throw err;\n }\n });\n}\nfunction extract(downloadPath) {\n return __awaiter(this, void 0, void 0, function* () {\n core.info('Extracting downloaded archive...');\n const pathForGunzip = `${downloadPath}.gz`;\n yield io.mv(downloadPath, pathForGunzip);\n yield exec.exec('gunzip', [pathForGunzip]);\n yield exec.exec('chmod', ['+x', downloadPath]);\n });\n}\nfunction installRye(downloadPath, arch, version) {\n return __awaiter(this, void 0, void 0, function* () {\n const tempDir = path.join(process.env['RUNNER_TEMP'] || '', 'rye_temp_home');\n yield io.mkdirP(tempDir);\n core.debug(`Created temporary directory ${tempDir}`);\n const options = {\n cwd: tempDir,\n env: Object.assign(Object.assign({}, process.env), { RYE_HOME: tempDir })\n };\n core.info(`Installing Rye into ${tempDir}`);\n yield exec.exec(downloadPath, ['self', 'install', '--yes'], options);\n const cachedPath = yield tc.cacheDir(tempDir, 'rye', version, arch);\n core.info(`Moved Rye into ${cachedPath}`);\n return cachedPath;\n });\n}\nfunction addRyeToPath(cachedPath) {\n core.addPath(`${cachedPath}/shims`);\n core.info(`Added ${cachedPath}/shims to the path`);\n}\nrun();\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.OWNER = exports.REPO = exports.WINDOWS_PLATFORMS = exports.WINDOWS_ARCHS = exports.IS_MAC = exports.IS_LINUX = exports.IS_WINDOWS = void 0;\nexports.IS_WINDOWS = process.platform === 'win32';\nexports.IS_LINUX = process.platform === 'linux';\nexports.IS_MAC = process.platform === 'darwin';\nexports.WINDOWS_ARCHS = ['x86', 'x64'];\nexports.WINDOWS_PLATFORMS = ['win32', 'win64'];\nexports.REPO = 'rye';\nexports.OWNER = 'mitsuhiko';\n","module.exports = require(\"assert\");","module.exports = require(\"buffer\");","module.exports = require(\"child_process\");","module.exports = require(\"crypto\");","module.exports = require(\"events\");","module.exports = require(\"fs\");","module.exports = require(\"http\");","module.exports = require(\"https\");","module.exports = require(\"net\");","module.exports = require(\"node:process\");","module.exports = require(\"node:stream/web\");","module.exports = require(\"os\");","module.exports = require(\"path\");","module.exports = require(\"stream\");","module.exports = require(\"string_decoder\");","module.exports = require(\"timers\");","module.exports = require(\"tls\");","module.exports = require(\"util\");","module.exports = require(\"worker_threads\");","/* c8 ignore start */\n// 64 KiB (same size chrome slice theirs blob into Uint8array's)\nconst POOL_SIZE = 65536\n\nif (!globalThis.ReadableStream) {\n // `node:stream/web` got introduced in v16.5.0 as experimental\n // and it's preferred over the polyfilled version. So we also\n // suppress the warning that gets emitted by NodeJS for using it.\n try {\n const process = require('node:process')\n const { emitWarning } = process\n try {\n process.emitWarning = () => {}\n Object.assign(globalThis, require('node:stream/web'))\n process.emitWarning = emitWarning\n } catch (error) {\n process.emitWarning = emitWarning\n throw error\n }\n } catch (error) {\n // fallback to polyfill implementation\n Object.assign(globalThis, require('web-streams-polyfill/dist/ponyfill.es2018.js'))\n }\n}\n\ntry {\n // Don't use node: prefix for this, require+node: is not supported until node v14.14\n // Only `import()` can use prefix in 12.20 and later\n const { Blob } = require('buffer')\n if (Blob && !Blob.prototype.stream) {\n Blob.prototype.stream = function name (params) {\n let position = 0\n const blob = this\n\n return new ReadableStream({\n type: 'bytes',\n async pull (ctrl) {\n const chunk = blob.slice(position, Math.min(blob.size, position + POOL_SIZE))\n const buffer = await chunk.arrayBuffer()\n position += buffer.byteLength\n ctrl.enqueue(new Uint8Array(buffer))\n\n if (position === blob.size) {\n ctrl.close()\n }\n }\n })\n }\n }\n} catch (error) {}\n/* c8 ignore end */\n","import Blob from './index.js'\n\nconst _File = class File extends Blob {\n #lastModified = 0\n #name = ''\n\n /**\n * @param {*[]} fileBits\n * @param {string} fileName\n * @param {{lastModified?: number, type?: string}} options\n */// @ts-ignore\n constructor (fileBits, fileName, options = {}) {\n if (arguments.length < 2) {\n throw new TypeError(`Failed to construct 'File': 2 arguments required, but only ${arguments.length} present.`)\n }\n super(fileBits, options)\n\n if (options === null) options = {}\n\n // Simulate WebIDL type casting for NaN value in lastModified option.\n const lastModified = options.lastModified === undefined ? Date.now() : Number(options.lastModified)\n if (!Number.isNaN(lastModified)) {\n this.#lastModified = lastModified\n }\n\n this.#name = String(fileName)\n }\n\n get name () {\n return this.#name\n }\n\n get lastModified () {\n return this.#lastModified\n }\n\n get [Symbol.toStringTag] () {\n return 'File'\n }\n\n static [Symbol.hasInstance] (object) {\n return !!object && object instanceof Blob &&\n /^(File)$/.test(object[Symbol.toStringTag])\n }\n}\n\n/** @type {typeof globalThis.File} */// @ts-ignore\nexport const File = _File\nexport default File\n","const __WEBPACK_NAMESPACE_OBJECT__ = require(\"node:fs\");","const __WEBPACK_NAMESPACE_OBJECT__ = require(\"node:path\");","import { statSync, createReadStream, promises as fs } from 'node:fs'\nimport { basename } from 'node:path'\nimport DOMException from 'node-domexception'\n\nimport File from './file.js'\nimport Blob from './index.js'\n\nconst { stat } = fs\n\n/**\n * @param {string} path filepath on the disk\n * @param {string} [type] mimetype to use\n */\nconst blobFromSync = (path, type) => fromBlob(statSync(path), path, type)\n\n/**\n * @param {string} path filepath on the disk\n * @param {string} [type] mimetype to use\n * @returns {Promise}\n */\nconst blobFrom = (path, type) => stat(path).then(stat => fromBlob(stat, path, type))\n\n/**\n * @param {string} path filepath on the disk\n * @param {string} [type] mimetype to use\n * @returns {Promise}\n */\nconst fileFrom = (path, type) => stat(path).then(stat => fromFile(stat, path, type))\n\n/**\n * @param {string} path filepath on the disk\n * @param {string} [type] mimetype to use\n */\nconst fileFromSync = (path, type) => fromFile(statSync(path), path, type)\n\n// @ts-ignore\nconst fromBlob = (stat, path, type = '') => new Blob([new BlobDataItem({\n path,\n size: stat.size,\n lastModified: stat.mtimeMs,\n start: 0\n})], { type })\n\n// @ts-ignore\nconst fromFile = (stat, path, type = '') => new File([new BlobDataItem({\n path,\n size: stat.size,\n lastModified: stat.mtimeMs,\n start: 0\n})], basename(path), { type, lastModified: stat.mtimeMs })\n\n/**\n * This is a blob backed up by a file on the disk\n * with minium requirement. Its wrapped around a Blob as a blobPart\n * so you have no direct access to this.\n *\n * @private\n */\nclass BlobDataItem {\n #path\n #start\n\n constructor (options) {\n this.#path = options.path\n this.#start = options.start\n this.size = options.size\n this.lastModified = options.lastModified\n }\n\n /**\n * Slicing arguments is first validated and formatted\n * to not be out of range by Blob.prototype.slice\n */\n slice (start, end) {\n return new BlobDataItem({\n path: this.#path,\n lastModified: this.lastModified,\n size: end - start,\n start: this.#start + start\n })\n }\n\n async * stream () {\n const { mtimeMs } = await stat(this.#path)\n if (mtimeMs > this.lastModified) {\n throw new DOMException('The requested file could not be read, typically due to permission problems that have occurred after a reference to a file was acquired.', 'NotReadableError')\n }\n yield * createReadStream(this.#path, {\n start: this.#start,\n end: this.#start + this.size - 1\n })\n }\n\n get [Symbol.toStringTag] () {\n return 'Blob'\n }\n}\n\nexport default blobFromSync\nexport { File, Blob, blobFrom, blobFromSync, fileFrom, fileFromSync }\n","/*! fetch-blob. MIT License. Jimmy Wärting */\n\n// TODO (jimmywarting): in the feature use conditional loading with top level await (requires 14.x)\n// Node has recently added whatwg stream into core\n\nimport './streams.cjs'\n\n// 64 KiB (same size chrome slice theirs blob into Uint8array's)\nconst POOL_SIZE = 65536\n\n/** @param {(Blob | Uint8Array)[]} parts */\nasync function * toIterator (parts, clone = true) {\n for (const part of parts) {\n if ('stream' in part) {\n yield * (/** @type {AsyncIterableIterator} */ (part.stream()))\n } else if (ArrayBuffer.isView(part)) {\n if (clone) {\n let position = part.byteOffset\n const end = part.byteOffset + part.byteLength\n while (position !== end) {\n const size = Math.min(end - position, POOL_SIZE)\n const chunk = part.buffer.slice(position, position + size)\n position += chunk.byteLength\n yield new Uint8Array(chunk)\n }\n } else {\n yield part\n }\n /* c8 ignore next 10 */\n } else {\n // For blobs that have arrayBuffer but no stream method (nodes buffer.Blob)\n let position = 0, b = (/** @type {Blob} */ (part))\n while (position !== b.size) {\n const chunk = b.slice(position, Math.min(b.size, position + POOL_SIZE))\n const buffer = await chunk.arrayBuffer()\n position += buffer.byteLength\n yield new Uint8Array(buffer)\n }\n }\n }\n}\n\nconst _Blob = class Blob {\n /** @type {Array.<(Blob|Uint8Array)>} */\n #parts = []\n #type = ''\n #size = 0\n #endings = 'transparent'\n\n /**\n * The Blob() constructor returns a new Blob object. The content\n * of the blob consists of the concatenation of the values given\n * in the parameter array.\n *\n * @param {*} blobParts\n * @param {{ type?: string, endings?: string }} [options]\n */\n constructor (blobParts = [], options = {}) {\n if (typeof blobParts !== 'object' || blobParts === null) {\n throw new TypeError('Failed to construct \\'Blob\\': The provided value cannot be converted to a sequence.')\n }\n\n if (typeof blobParts[Symbol.iterator] !== 'function') {\n throw new TypeError('Failed to construct \\'Blob\\': The object must have a callable @@iterator property.')\n }\n\n if (typeof options !== 'object' && typeof options !== 'function') {\n throw new TypeError('Failed to construct \\'Blob\\': parameter 2 cannot convert to dictionary.')\n }\n\n if (options === null) options = {}\n\n const encoder = new TextEncoder()\n for (const element of blobParts) {\n let part\n if (ArrayBuffer.isView(element)) {\n part = new Uint8Array(element.buffer.slice(element.byteOffset, element.byteOffset + element.byteLength))\n } else if (element instanceof ArrayBuffer) {\n part = new Uint8Array(element.slice(0))\n } else if (element instanceof Blob) {\n part = element\n } else {\n part = encoder.encode(`${element}`)\n }\n\n this.#size += ArrayBuffer.isView(part) ? part.byteLength : part.size\n this.#parts.push(part)\n }\n\n this.#endings = `${options.endings === undefined ? 'transparent' : options.endings}`\n const type = options.type === undefined ? '' : String(options.type)\n this.#type = /^[\\x20-\\x7E]*$/.test(type) ? type : ''\n }\n\n /**\n * The Blob interface's size property returns the\n * size of the Blob in bytes.\n */\n get size () {\n return this.#size\n }\n\n /**\n * The type property of a Blob object returns the MIME type of the file.\n */\n get type () {\n return this.#type\n }\n\n /**\n * The text() method in the Blob interface returns a Promise\n * that resolves with a string containing the contents of\n * the blob, interpreted as UTF-8.\n *\n * @return {Promise}\n */\n async text () {\n // More optimized than using this.arrayBuffer()\n // that requires twice as much ram\n const decoder = new TextDecoder()\n let str = ''\n for await (const part of toIterator(this.#parts, false)) {\n str += decoder.decode(part, { stream: true })\n }\n // Remaining\n str += decoder.decode()\n return str\n }\n\n /**\n * The arrayBuffer() method in the Blob interface returns a\n * Promise that resolves with the contents of the blob as\n * binary data contained in an ArrayBuffer.\n *\n * @return {Promise}\n */\n async arrayBuffer () {\n // Easier way... Just a unnecessary overhead\n // const view = new Uint8Array(this.size);\n // await this.stream().getReader({mode: 'byob'}).read(view);\n // return view.buffer;\n\n const data = new Uint8Array(this.size)\n let offset = 0\n for await (const chunk of toIterator(this.#parts, false)) {\n data.set(chunk, offset)\n offset += chunk.length\n }\n\n return data.buffer\n }\n\n stream () {\n const it = toIterator(this.#parts, true)\n\n return new globalThis.ReadableStream({\n // @ts-ignore\n type: 'bytes',\n async pull (ctrl) {\n const chunk = await it.next()\n chunk.done ? ctrl.close() : ctrl.enqueue(chunk.value)\n },\n\n async cancel () {\n await it.return()\n }\n })\n }\n\n /**\n * The Blob interface's slice() method creates and returns a\n * new Blob object which contains data from a subset of the\n * blob on which it's called.\n *\n * @param {number} [start]\n * @param {number} [end]\n * @param {string} [type]\n */\n slice (start = 0, end = this.size, type = '') {\n const { size } = this\n\n let relativeStart = start < 0 ? Math.max(size + start, 0) : Math.min(start, size)\n let relativeEnd = end < 0 ? Math.max(size + end, 0) : Math.min(end, size)\n\n const span = Math.max(relativeEnd - relativeStart, 0)\n const parts = this.#parts\n const blobParts = []\n let added = 0\n\n for (const part of parts) {\n // don't add the overflow to new blobParts\n if (added >= span) {\n break\n }\n\n const size = ArrayBuffer.isView(part) ? part.byteLength : part.size\n if (relativeStart && size <= relativeStart) {\n // Skip the beginning and change the relative\n // start & end position as we skip the unwanted parts\n relativeStart -= size\n relativeEnd -= size\n } else {\n let chunk\n if (ArrayBuffer.isView(part)) {\n chunk = part.subarray(relativeStart, Math.min(size, relativeEnd))\n added += chunk.byteLength\n } else {\n chunk = part.slice(relativeStart, Math.min(size, relativeEnd))\n added += chunk.size\n }\n relativeEnd -= size\n blobParts.push(chunk)\n relativeStart = 0 // All next sequential parts should start at 0\n }\n }\n\n const blob = new Blob([], { type: String(type).toLowerCase() })\n blob.#size = span\n blob.#parts = blobParts\n\n return blob\n }\n\n get [Symbol.toStringTag] () {\n return 'Blob'\n }\n\n static [Symbol.hasInstance] (object) {\n return (\n object &&\n typeof object === 'object' &&\n typeof object.constructor === 'function' &&\n (\n typeof object.stream === 'function' ||\n typeof object.arrayBuffer === 'function'\n ) &&\n /^(Blob|File)$/.test(object[Symbol.toStringTag])\n )\n }\n}\n\nObject.defineProperties(_Blob.prototype, {\n size: { enumerable: true },\n type: { enumerable: true },\n slice: { enumerable: true }\n})\n\n/** @type {typeof globalThis.Blob} */\nexport const Blob = _Blob\nexport default Blob\n","/*! formdata-polyfill. MIT License. Jimmy Wärting */\n\nimport C from 'fetch-blob'\nimport F from 'fetch-blob/file.js'\n\nvar {toStringTag:t,iterator:i,hasInstance:h}=Symbol,\nr=Math.random,\nm='append,set,get,getAll,delete,keys,values,entries,forEach,constructor'.split(','),\nf=(a,b,c)=>(a+='',/^(Blob|File)$/.test(b && b[t])?[(c=c!==void 0?c+'':b[t]=='File'?b.name:'blob',a),b.name!==c||b[t]=='blob'?new F([b],c,b):b]:[a,b+'']),\ne=(c,f)=>(f?c:c.replace(/\\r?\\n|\\r/g,'\\r\\n')).replace(/\\n/g,'%0A').replace(/\\r/g,'%0D').replace(/\"/g,'%22'),\nx=(n, a, e)=>{if(a.lengthtypeof o[m]!='function')}\nappend(...a){x('append',arguments,2);this.#d.push(f(...a))}\ndelete(a){x('delete',arguments,1);a+='';this.#d=this.#d.filter(([b])=>b!==a)}\nget(a){x('get',arguments,1);a+='';for(var b=this.#d,l=b.length,c=0;cc[0]===a&&b.push(c[1]));return b}\nhas(a){x('has',arguments,1);a+='';return this.#d.some(b=>b[0]===a)}\nforEach(a,b){x('forEach',arguments,1);for(var [c,d]of this)a.call(b,d,c,this)}\nset(...a){x('set',arguments,2);var b=[],c=!0;a=f(...a);this.#d.forEach(d=>{d[0]===a[0]?c&&(c=!b.push(a)):b.push(d)});c&&b.push(a);this.#d=b}\n*entries(){yield*this.#d}\n*keys(){for(var[a]of this)yield a}\n*values(){for(var[,a]of this)yield a}}\n\n/** @param {FormData} F */\nexport function formDataToBlob (F,B=C){\nvar b=`${r()}${r()}`.replace(/\\./g, '').slice(-28).padStart(32, '-'),c=[],p=`--${b}\\r\\nContent-Disposition: form-data; name=\"`\nF.forEach((v,n)=>typeof v=='string'\n?c.push(p+e(n)+`\"\\r\\n\\r\\n${v.replace(/\\r(?!\\n)|(? {\n\treturn (\n\t\ttypeof object === 'object' &&\n\t\ttypeof object.append === 'function' &&\n\t\ttypeof object.delete === 'function' &&\n\t\ttypeof object.get === 'function' &&\n\t\ttypeof object.getAll === 'function' &&\n\t\ttypeof object.has === 'function' &&\n\t\ttypeof object.set === 'function' &&\n\t\ttypeof object.sort === 'function' &&\n\t\tobject[NAME] === 'URLSearchParams'\n\t);\n};\n\n/**\n * Check if `object` is a W3C `Blob` object (which `File` inherits from)\n * @param {*} object - Object to check for\n * @return {boolean}\n */\nexport const isBlob = object => {\n\treturn (\n\t\tobject &&\n\t\ttypeof object === 'object' &&\n\t\ttypeof object.arrayBuffer === 'function' &&\n\t\ttypeof object.type === 'string' &&\n\t\ttypeof object.stream === 'function' &&\n\t\ttypeof object.constructor === 'function' &&\n\t\t/^(Blob|File)$/.test(object[NAME])\n\t);\n};\n\n/**\n * Check if `obj` is an instance of AbortSignal.\n * @param {*} object - Object to check for\n * @return {boolean}\n */\nexport const isAbortSignal = object => {\n\treturn (\n\t\ttypeof object === 'object' && (\n\t\t\tobject[NAME] === 'AbortSignal' ||\n\t\t\tobject[NAME] === 'EventTarget'\n\t\t)\n\t);\n};\n\n/**\n * isDomainOrSubdomain reports whether sub is a subdomain (or exact match) of\n * the parent domain.\n *\n * Both domains must already be in canonical form.\n * @param {string|URL} original\n * @param {string|URL} destination\n */\nexport const isDomainOrSubdomain = (destination, original) => {\n\tconst orig = new URL(original).hostname;\n\tconst dest = new URL(destination).hostname;\n\n\treturn orig === dest || orig.endsWith(`.${dest}`);\n};\n\n/**\n * isSameProtocol reports whether the two provided URLs use the same protocol.\n *\n * Both domains must already be in canonical form.\n * @param {string|URL} original\n * @param {string|URL} destination\n */\nexport const isSameProtocol = (destination, original) => {\n\tconst orig = new URL(original).protocol;\n\tconst dest = new URL(destination).protocol;\n\n\treturn orig === dest;\n};\n","\n/**\n * Body.js\n *\n * Body interface provides common methods for Request and Response\n */\n\nimport Stream, {PassThrough} from 'node:stream';\nimport {types, deprecate, promisify} from 'node:util';\nimport {Buffer} from 'node:buffer';\n\nimport Blob from 'fetch-blob';\nimport {FormData, formDataToBlob} from 'formdata-polyfill/esm.min.js';\n\nimport {FetchError} from './errors/fetch-error.js';\nimport {FetchBaseError} from './errors/base.js';\nimport {isBlob, isURLSearchParameters} from './utils/is.js';\n\nconst pipeline = promisify(Stream.pipeline);\nconst INTERNALS = Symbol('Body internals');\n\n/**\n * Body mixin\n *\n * Ref: https://fetch.spec.whatwg.org/#body\n *\n * @param Stream body Readable stream\n * @param Object opts Response options\n * @return Void\n */\nexport default class Body {\n\tconstructor(body, {\n\t\tsize = 0\n\t} = {}) {\n\t\tlet boundary = null;\n\n\t\tif (body === null) {\n\t\t\t// Body is undefined or null\n\t\t\tbody = null;\n\t\t} else if (isURLSearchParameters(body)) {\n\t\t\t// Body is a URLSearchParams\n\t\t\tbody = Buffer.from(body.toString());\n\t\t} else if (isBlob(body)) {\n\t\t\t// Body is blob\n\t\t} else if (Buffer.isBuffer(body)) {\n\t\t\t// Body is Buffer\n\t\t} else if (types.isAnyArrayBuffer(body)) {\n\t\t\t// Body is ArrayBuffer\n\t\t\tbody = Buffer.from(body);\n\t\t} else if (ArrayBuffer.isView(body)) {\n\t\t\t// Body is ArrayBufferView\n\t\t\tbody = Buffer.from(body.buffer, body.byteOffset, body.byteLength);\n\t\t} else if (body instanceof Stream) {\n\t\t\t// Body is stream\n\t\t} else if (body instanceof FormData) {\n\t\t\t// Body is FormData\n\t\t\tbody = formDataToBlob(body);\n\t\t\tboundary = body.type.split('=')[1];\n\t\t} else {\n\t\t\t// None of the above\n\t\t\t// coerce to string then buffer\n\t\t\tbody = Buffer.from(String(body));\n\t\t}\n\n\t\tlet stream = body;\n\n\t\tif (Buffer.isBuffer(body)) {\n\t\t\tstream = Stream.Readable.from(body);\n\t\t} else if (isBlob(body)) {\n\t\t\tstream = Stream.Readable.from(body.stream());\n\t\t}\n\n\t\tthis[INTERNALS] = {\n\t\t\tbody,\n\t\t\tstream,\n\t\t\tboundary,\n\t\t\tdisturbed: false,\n\t\t\terror: null\n\t\t};\n\t\tthis.size = size;\n\n\t\tif (body instanceof Stream) {\n\t\t\tbody.on('error', error_ => {\n\t\t\t\tconst error = error_ instanceof FetchBaseError ?\n\t\t\t\t\terror_ :\n\t\t\t\t\tnew FetchError(`Invalid response body while trying to fetch ${this.url}: ${error_.message}`, 'system', error_);\n\t\t\t\tthis[INTERNALS].error = error;\n\t\t\t});\n\t\t}\n\t}\n\n\tget body() {\n\t\treturn this[INTERNALS].stream;\n\t}\n\n\tget bodyUsed() {\n\t\treturn this[INTERNALS].disturbed;\n\t}\n\n\t/**\n\t * Decode response as ArrayBuffer\n\t *\n\t * @return Promise\n\t */\n\tasync arrayBuffer() {\n\t\tconst {buffer, byteOffset, byteLength} = await consumeBody(this);\n\t\treturn buffer.slice(byteOffset, byteOffset + byteLength);\n\t}\n\n\tasync formData() {\n\t\tconst ct = this.headers.get('content-type');\n\n\t\tif (ct.startsWith('application/x-www-form-urlencoded')) {\n\t\t\tconst formData = new FormData();\n\t\t\tconst parameters = new URLSearchParams(await this.text());\n\n\t\t\tfor (const [name, value] of parameters) {\n\t\t\t\tformData.append(name, value);\n\t\t\t}\n\n\t\t\treturn formData;\n\t\t}\n\n\t\tconst {toFormData} = await import('./utils/multipart-parser.js');\n\t\treturn toFormData(this.body, ct);\n\t}\n\n\t/**\n\t * Return raw response as Blob\n\t *\n\t * @return Promise\n\t */\n\tasync blob() {\n\t\tconst ct = (this.headers && this.headers.get('content-type')) || (this[INTERNALS].body && this[INTERNALS].body.type) || '';\n\t\tconst buf = await this.arrayBuffer();\n\n\t\treturn new Blob([buf], {\n\t\t\ttype: ct\n\t\t});\n\t}\n\n\t/**\n\t * Decode response as json\n\t *\n\t * @return Promise\n\t */\n\tasync json() {\n\t\tconst text = await this.text();\n\t\treturn JSON.parse(text);\n\t}\n\n\t/**\n\t * Decode response as text\n\t *\n\t * @return Promise\n\t */\n\tasync text() {\n\t\tconst buffer = await consumeBody(this);\n\t\treturn new TextDecoder().decode(buffer);\n\t}\n\n\t/**\n\t * Decode response as buffer (non-spec api)\n\t *\n\t * @return Promise\n\t */\n\tbuffer() {\n\t\treturn consumeBody(this);\n\t}\n}\n\nBody.prototype.buffer = deprecate(Body.prototype.buffer, 'Please use \\'response.arrayBuffer()\\' instead of \\'response.buffer()\\'', 'node-fetch#buffer');\n\n// In browsers, all properties are enumerable.\nObject.defineProperties(Body.prototype, {\n\tbody: {enumerable: true},\n\tbodyUsed: {enumerable: true},\n\tarrayBuffer: {enumerable: true},\n\tblob: {enumerable: true},\n\tjson: {enumerable: true},\n\ttext: {enumerable: true},\n\tdata: {get: deprecate(() => {},\n\t\t'data doesn\\'t exist, use json(), text(), arrayBuffer(), or body instead',\n\t\t'https://github.com/node-fetch/node-fetch/issues/1000 (response)')}\n});\n\n/**\n * Consume and convert an entire Body to a Buffer.\n *\n * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body\n *\n * @return Promise\n */\nasync function consumeBody(data) {\n\tif (data[INTERNALS].disturbed) {\n\t\tthrow new TypeError(`body used already for: ${data.url}`);\n\t}\n\n\tdata[INTERNALS].disturbed = true;\n\n\tif (data[INTERNALS].error) {\n\t\tthrow data[INTERNALS].error;\n\t}\n\n\tconst {body} = data;\n\n\t// Body is null\n\tif (body === null) {\n\t\treturn Buffer.alloc(0);\n\t}\n\n\t/* c8 ignore next 3 */\n\tif (!(body instanceof Stream)) {\n\t\treturn Buffer.alloc(0);\n\t}\n\n\t// Body is stream\n\t// get ready to actually consume the body\n\tconst accum = [];\n\tlet accumBytes = 0;\n\n\ttry {\n\t\tfor await (const chunk of body) {\n\t\t\tif (data.size > 0 && accumBytes + chunk.length > data.size) {\n\t\t\t\tconst error = new FetchError(`content size at ${data.url} over limit: ${data.size}`, 'max-size');\n\t\t\t\tbody.destroy(error);\n\t\t\t\tthrow error;\n\t\t\t}\n\n\t\t\taccumBytes += chunk.length;\n\t\t\taccum.push(chunk);\n\t\t}\n\t} catch (error) {\n\t\tconst error_ = error instanceof FetchBaseError ? error : new FetchError(`Invalid response body while trying to fetch ${data.url}: ${error.message}`, 'system', error);\n\t\tthrow error_;\n\t}\n\n\tif (body.readableEnded === true || body._readableState.ended === true) {\n\t\ttry {\n\t\t\tif (accum.every(c => typeof c === 'string')) {\n\t\t\t\treturn Buffer.from(accum.join(''));\n\t\t\t}\n\n\t\t\treturn Buffer.concat(accum, accumBytes);\n\t\t} catch (error) {\n\t\t\tthrow new FetchError(`Could not create Buffer from response body for ${data.url}: ${error.message}`, 'system', error);\n\t\t}\n\t} else {\n\t\tthrow new FetchError(`Premature close of server response while trying to fetch ${data.url}`);\n\t}\n}\n\n/**\n * Clone body given Res/Req instance\n *\n * @param Mixed instance Response or Request instance\n * @param String highWaterMark highWaterMark for both PassThrough body streams\n * @return Mixed\n */\nexport const clone = (instance, highWaterMark) => {\n\tlet p1;\n\tlet p2;\n\tlet {body} = instance[INTERNALS];\n\n\t// Don't allow cloning a used body\n\tif (instance.bodyUsed) {\n\t\tthrow new Error('cannot clone body after it is used');\n\t}\n\n\t// Check that body is a stream and not form-data object\n\t// note: we can't clone the form-data object without having it as a dependency\n\tif ((body instanceof Stream) && (typeof body.getBoundary !== 'function')) {\n\t\t// Tee instance body\n\t\tp1 = new PassThrough({highWaterMark});\n\t\tp2 = new PassThrough({highWaterMark});\n\t\tbody.pipe(p1);\n\t\tbody.pipe(p2);\n\t\t// Set instance body to teed body and return the other teed body\n\t\tinstance[INTERNALS].stream = p1;\n\t\tbody = p2;\n\t}\n\n\treturn body;\n};\n\nconst getNonSpecFormDataBoundary = deprecate(\n\tbody => body.getBoundary(),\n\t'form-data doesn\\'t follow the spec and requires special treatment. Use alternative package',\n\t'https://github.com/node-fetch/node-fetch/issues/1167'\n);\n\n/**\n * Performs the operation \"extract a `Content-Type` value from |object|\" as\n * specified in the specification:\n * https://fetch.spec.whatwg.org/#concept-bodyinit-extract\n *\n * This function assumes that instance.body is present.\n *\n * @param {any} body Any options.body input\n * @returns {string | null}\n */\nexport const extractContentType = (body, request) => {\n\t// Body is null or undefined\n\tif (body === null) {\n\t\treturn null;\n\t}\n\n\t// Body is string\n\tif (typeof body === 'string') {\n\t\treturn 'text/plain;charset=UTF-8';\n\t}\n\n\t// Body is a URLSearchParams\n\tif (isURLSearchParameters(body)) {\n\t\treturn 'application/x-www-form-urlencoded;charset=UTF-8';\n\t}\n\n\t// Body is blob\n\tif (isBlob(body)) {\n\t\treturn body.type || null;\n\t}\n\n\t// Body is a Buffer (Buffer, ArrayBuffer or ArrayBufferView)\n\tif (Buffer.isBuffer(body) || types.isAnyArrayBuffer(body) || ArrayBuffer.isView(body)) {\n\t\treturn null;\n\t}\n\n\tif (body instanceof FormData) {\n\t\treturn `multipart/form-data; boundary=${request[INTERNALS].boundary}`;\n\t}\n\n\t// Detect form data input from form-data module\n\tif (body && typeof body.getBoundary === 'function') {\n\t\treturn `multipart/form-data;boundary=${getNonSpecFormDataBoundary(body)}`;\n\t}\n\n\t// Body is stream - can't really do much about this\n\tif (body instanceof Stream) {\n\t\treturn null;\n\t}\n\n\t// Body constructor defaults other things to string\n\treturn 'text/plain;charset=UTF-8';\n};\n\n/**\n * The Fetch Standard treats this as if \"total bytes\" is a property on the body.\n * For us, we have to explicitly get it with a function.\n *\n * ref: https://fetch.spec.whatwg.org/#concept-body-total-bytes\n *\n * @param {any} obj.body Body object from the Body instance.\n * @returns {number | null}\n */\nexport const getTotalBytes = request => {\n\tconst {body} = request[INTERNALS];\n\n\t// Body is null or undefined\n\tif (body === null) {\n\t\treturn 0;\n\t}\n\n\t// Body is Blob\n\tif (isBlob(body)) {\n\t\treturn body.size;\n\t}\n\n\t// Body is Buffer\n\tif (Buffer.isBuffer(body)) {\n\t\treturn body.length;\n\t}\n\n\t// Detect form data input from form-data module\n\tif (body && typeof body.getLengthSync === 'function') {\n\t\treturn body.hasKnownLength && body.hasKnownLength() ? body.getLengthSync() : null;\n\t}\n\n\t// Body is stream\n\treturn null;\n};\n\n/**\n * Write a Body to a Node.js WritableStream (e.g. http.Request) object.\n *\n * @param {Stream.Writable} dest The stream to write to.\n * @param obj.body Body object from the Body instance.\n * @returns {Promise}\n */\nexport const writeToStream = async (dest, {body}) => {\n\tif (body === null) {\n\t\t// Body is null\n\t\tdest.end();\n\t} else {\n\t\t// Body is stream\n\t\tawait pipeline(body, dest);\n\t}\n};\n","/**\n * Headers.js\n *\n * Headers class offers convenient helpers\n */\n\nimport {types} from 'node:util';\nimport http from 'node:http';\n\n/* c8 ignore next 9 */\nconst validateHeaderName = typeof http.validateHeaderName === 'function' ?\n\thttp.validateHeaderName :\n\tname => {\n\t\tif (!/^[\\^`\\-\\w!#$%&'*+.|~]+$/.test(name)) {\n\t\t\tconst error = new TypeError(`Header name must be a valid HTTP token [${name}]`);\n\t\t\tObject.defineProperty(error, 'code', {value: 'ERR_INVALID_HTTP_TOKEN'});\n\t\t\tthrow error;\n\t\t}\n\t};\n\n/* c8 ignore next 9 */\nconst validateHeaderValue = typeof http.validateHeaderValue === 'function' ?\n\thttp.validateHeaderValue :\n\t(name, value) => {\n\t\tif (/[^\\t\\u0020-\\u007E\\u0080-\\u00FF]/.test(value)) {\n\t\t\tconst error = new TypeError(`Invalid character in header content [\"${name}\"]`);\n\t\t\tObject.defineProperty(error, 'code', {value: 'ERR_INVALID_CHAR'});\n\t\t\tthrow error;\n\t\t}\n\t};\n\n/**\n * @typedef {Headers | Record | Iterable | Iterable>} HeadersInit\n */\n\n/**\n * This Fetch API interface allows you to perform various actions on HTTP request and response headers.\n * These actions include retrieving, setting, adding to, and removing.\n * A Headers object has an associated header list, which is initially empty and consists of zero or more name and value pairs.\n * You can add to this using methods like append() (see Examples.)\n * In all methods of this interface, header names are matched by case-insensitive byte sequence.\n *\n */\nexport default class Headers extends URLSearchParams {\n\t/**\n\t * Headers class\n\t *\n\t * @constructor\n\t * @param {HeadersInit} [init] - Response headers\n\t */\n\tconstructor(init) {\n\t\t// Validate and normalize init object in [name, value(s)][]\n\t\t/** @type {string[][]} */\n\t\tlet result = [];\n\t\tif (init instanceof Headers) {\n\t\t\tconst raw = init.raw();\n\t\t\tfor (const [name, values] of Object.entries(raw)) {\n\t\t\t\tresult.push(...values.map(value => [name, value]));\n\t\t\t}\n\t\t} else if (init == null) { // eslint-disable-line no-eq-null, eqeqeq\n\t\t\t// No op\n\t\t} else if (typeof init === 'object' && !types.isBoxedPrimitive(init)) {\n\t\t\tconst method = init[Symbol.iterator];\n\t\t\t// eslint-disable-next-line no-eq-null, eqeqeq\n\t\t\tif (method == null) {\n\t\t\t\t// Record\n\t\t\t\tresult.push(...Object.entries(init));\n\t\t\t} else {\n\t\t\t\tif (typeof method !== 'function') {\n\t\t\t\t\tthrow new TypeError('Header pairs must be iterable');\n\t\t\t\t}\n\n\t\t\t\t// Sequence>\n\t\t\t\t// Note: per spec we have to first exhaust the lists then process them\n\t\t\t\tresult = [...init]\n\t\t\t\t\t.map(pair => {\n\t\t\t\t\t\tif (\n\t\t\t\t\t\t\ttypeof pair !== 'object' || types.isBoxedPrimitive(pair)\n\t\t\t\t\t\t) {\n\t\t\t\t\t\t\tthrow new TypeError('Each header pair must be an iterable object');\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\treturn [...pair];\n\t\t\t\t\t}).map(pair => {\n\t\t\t\t\t\tif (pair.length !== 2) {\n\t\t\t\t\t\t\tthrow new TypeError('Each header pair must be a name/value tuple');\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\treturn [...pair];\n\t\t\t\t\t});\n\t\t\t}\n\t\t} else {\n\t\t\tthrow new TypeError('Failed to construct \\'Headers\\': The provided value is not of type \\'(sequence> or record)');\n\t\t}\n\n\t\t// Validate and lowercase\n\t\tresult =\n\t\t\tresult.length > 0 ?\n\t\t\t\tresult.map(([name, value]) => {\n\t\t\t\t\tvalidateHeaderName(name);\n\t\t\t\t\tvalidateHeaderValue(name, String(value));\n\t\t\t\t\treturn [String(name).toLowerCase(), String(value)];\n\t\t\t\t}) :\n\t\t\t\tundefined;\n\n\t\tsuper(result);\n\n\t\t// Returning a Proxy that will lowercase key names, validate parameters and sort keys\n\t\t// eslint-disable-next-line no-constructor-return\n\t\treturn new Proxy(this, {\n\t\t\tget(target, p, receiver) {\n\t\t\t\tswitch (p) {\n\t\t\t\t\tcase 'append':\n\t\t\t\t\tcase 'set':\n\t\t\t\t\t\treturn (name, value) => {\n\t\t\t\t\t\t\tvalidateHeaderName(name);\n\t\t\t\t\t\t\tvalidateHeaderValue(name, String(value));\n\t\t\t\t\t\t\treturn URLSearchParams.prototype[p].call(\n\t\t\t\t\t\t\t\ttarget,\n\t\t\t\t\t\t\t\tString(name).toLowerCase(),\n\t\t\t\t\t\t\t\tString(value)\n\t\t\t\t\t\t\t);\n\t\t\t\t\t\t};\n\n\t\t\t\t\tcase 'delete':\n\t\t\t\t\tcase 'has':\n\t\t\t\t\tcase 'getAll':\n\t\t\t\t\t\treturn name => {\n\t\t\t\t\t\t\tvalidateHeaderName(name);\n\t\t\t\t\t\t\treturn URLSearchParams.prototype[p].call(\n\t\t\t\t\t\t\t\ttarget,\n\t\t\t\t\t\t\t\tString(name).toLowerCase()\n\t\t\t\t\t\t\t);\n\t\t\t\t\t\t};\n\n\t\t\t\t\tcase 'keys':\n\t\t\t\t\t\treturn () => {\n\t\t\t\t\t\t\ttarget.sort();\n\t\t\t\t\t\t\treturn new Set(URLSearchParams.prototype.keys.call(target)).keys();\n\t\t\t\t\t\t};\n\n\t\t\t\t\tdefault:\n\t\t\t\t\t\treturn Reflect.get(target, p, receiver);\n\t\t\t\t}\n\t\t\t}\n\t\t});\n\t\t/* c8 ignore next */\n\t}\n\n\tget [Symbol.toStringTag]() {\n\t\treturn this.constructor.name;\n\t}\n\n\ttoString() {\n\t\treturn Object.prototype.toString.call(this);\n\t}\n\n\tget(name) {\n\t\tconst values = this.getAll(name);\n\t\tif (values.length === 0) {\n\t\t\treturn null;\n\t\t}\n\n\t\tlet value = values.join(', ');\n\t\tif (/^content-encoding$/i.test(name)) {\n\t\t\tvalue = value.toLowerCase();\n\t\t}\n\n\t\treturn value;\n\t}\n\n\tforEach(callback, thisArg = undefined) {\n\t\tfor (const name of this.keys()) {\n\t\t\tReflect.apply(callback, thisArg, [this.get(name), name, this]);\n\t\t}\n\t}\n\n\t* values() {\n\t\tfor (const name of this.keys()) {\n\t\t\tyield this.get(name);\n\t\t}\n\t}\n\n\t/**\n\t * @type {() => IterableIterator<[string, string]>}\n\t */\n\t* entries() {\n\t\tfor (const name of this.keys()) {\n\t\t\tyield [name, this.get(name)];\n\t\t}\n\t}\n\n\t[Symbol.iterator]() {\n\t\treturn this.entries();\n\t}\n\n\t/**\n\t * Node-fetch non-spec method\n\t * returning all headers and their values as array\n\t * @returns {Record}\n\t */\n\traw() {\n\t\treturn [...this.keys()].reduce((result, key) => {\n\t\t\tresult[key] = this.getAll(key);\n\t\t\treturn result;\n\t\t}, {});\n\t}\n\n\t/**\n\t * For better console.log(headers) and also to convert Headers into Node.js Request compatible format\n\t */\n\t[Symbol.for('nodejs.util.inspect.custom')]() {\n\t\treturn [...this.keys()].reduce((result, key) => {\n\t\t\tconst values = this.getAll(key);\n\t\t\t// Http.request() only supports string as Host header.\n\t\t\t// This hack makes specifying custom Host header possible.\n\t\t\tif (key === 'host') {\n\t\t\t\tresult[key] = values[0];\n\t\t\t} else {\n\t\t\t\tresult[key] = values.length > 1 ? values : values[0];\n\t\t\t}\n\n\t\t\treturn result;\n\t\t}, {});\n\t}\n}\n\n/**\n * Re-shaping object for Web IDL tests\n * Only need to do it for overridden methods\n */\nObject.defineProperties(\n\tHeaders.prototype,\n\t['get', 'entries', 'forEach', 'values'].reduce((result, property) => {\n\t\tresult[property] = {enumerable: true};\n\t\treturn result;\n\t}, {})\n);\n\n/**\n * Create a Headers object from an http.IncomingMessage.rawHeaders, ignoring those that do\n * not conform to HTTP grammar productions.\n * @param {import('http').IncomingMessage['rawHeaders']} headers\n */\nexport function fromRawHeaders(headers = []) {\n\treturn new Headers(\n\t\theaders\n\t\t\t// Split into pairs\n\t\t\t.reduce((result, value, index, array) => {\n\t\t\t\tif (index % 2 === 0) {\n\t\t\t\t\tresult.push(array.slice(index, index + 2));\n\t\t\t\t}\n\n\t\t\t\treturn result;\n\t\t\t}, [])\n\t\t\t.filter(([name, value]) => {\n\t\t\t\ttry {\n\t\t\t\t\tvalidateHeaderName(name);\n\t\t\t\t\tvalidateHeaderValue(name, String(value));\n\t\t\t\t\treturn true;\n\t\t\t\t} catch {\n\t\t\t\t\treturn false;\n\t\t\t\t}\n\t\t\t})\n\n\t);\n}\n","const redirectStatus = new Set([301, 302, 303, 307, 308]);\n\n/**\n * Redirect code matching\n *\n * @param {number} code - Status code\n * @return {boolean}\n */\nexport const isRedirect = code => {\n\treturn redirectStatus.has(code);\n};\n","/**\n * Response.js\n *\n * Response class provides content decoding\n */\n\nimport Headers from './headers.js';\nimport Body, {clone, extractContentType} from './body.js';\nimport {isRedirect} from './utils/is-redirect.js';\n\nconst INTERNALS = Symbol('Response internals');\n\n/**\n * Response class\n *\n * Ref: https://fetch.spec.whatwg.org/#response-class\n *\n * @param Stream body Readable stream\n * @param Object opts Response options\n * @return Void\n */\nexport default class Response extends Body {\n\tconstructor(body = null, options = {}) {\n\t\tsuper(body, options);\n\n\t\t// eslint-disable-next-line no-eq-null, eqeqeq, no-negated-condition\n\t\tconst status = options.status != null ? options.status : 200;\n\n\t\tconst headers = new Headers(options.headers);\n\n\t\tif (body !== null && !headers.has('Content-Type')) {\n\t\t\tconst contentType = extractContentType(body, this);\n\t\t\tif (contentType) {\n\t\t\t\theaders.append('Content-Type', contentType);\n\t\t\t}\n\t\t}\n\n\t\tthis[INTERNALS] = {\n\t\t\ttype: 'default',\n\t\t\turl: options.url,\n\t\t\tstatus,\n\t\t\tstatusText: options.statusText || '',\n\t\t\theaders,\n\t\t\tcounter: options.counter,\n\t\t\thighWaterMark: options.highWaterMark\n\t\t};\n\t}\n\n\tget type() {\n\t\treturn this[INTERNALS].type;\n\t}\n\n\tget url() {\n\t\treturn this[INTERNALS].url || '';\n\t}\n\n\tget status() {\n\t\treturn this[INTERNALS].status;\n\t}\n\n\t/**\n\t * Convenience property representing if the request ended normally\n\t */\n\tget ok() {\n\t\treturn this[INTERNALS].status >= 200 && this[INTERNALS].status < 300;\n\t}\n\n\tget redirected() {\n\t\treturn this[INTERNALS].counter > 0;\n\t}\n\n\tget statusText() {\n\t\treturn this[INTERNALS].statusText;\n\t}\n\n\tget headers() {\n\t\treturn this[INTERNALS].headers;\n\t}\n\n\tget highWaterMark() {\n\t\treturn this[INTERNALS].highWaterMark;\n\t}\n\n\t/**\n\t * Clone this response\n\t *\n\t * @return Response\n\t */\n\tclone() {\n\t\treturn new Response(clone(this, this.highWaterMark), {\n\t\t\ttype: this.type,\n\t\t\turl: this.url,\n\t\t\tstatus: this.status,\n\t\t\tstatusText: this.statusText,\n\t\t\theaders: this.headers,\n\t\t\tok: this.ok,\n\t\t\tredirected: this.redirected,\n\t\t\tsize: this.size,\n\t\t\thighWaterMark: this.highWaterMark\n\t\t});\n\t}\n\n\t/**\n\t * @param {string} url The URL that the new response is to originate from.\n\t * @param {number} status An optional status code for the response (e.g., 302.)\n\t * @returns {Response} A Response object.\n\t */\n\tstatic redirect(url, status = 302) {\n\t\tif (!isRedirect(status)) {\n\t\t\tthrow new RangeError('Failed to execute \"redirect\" on \"response\": Invalid status code');\n\t\t}\n\n\t\treturn new Response(null, {\n\t\t\theaders: {\n\t\t\t\tlocation: new URL(url).toString()\n\t\t\t},\n\t\t\tstatus\n\t\t});\n\t}\n\n\tstatic error() {\n\t\tconst response = new Response(null, {status: 0, statusText: ''});\n\t\tresponse[INTERNALS].type = 'error';\n\t\treturn response;\n\t}\n\n\tstatic json(data = undefined, init = {}) {\n\t\tconst body = JSON.stringify(data);\n\n\t\tif (body === undefined) {\n\t\t\tthrow new TypeError('data is not JSON serializable');\n\t\t}\n\n\t\tconst headers = new Headers(init && init.headers);\n\n\t\tif (!headers.has('content-type')) {\n\t\t\theaders.set('content-type', 'application/json');\n\t\t}\n\n\t\treturn new Response(body, {\n\t\t\t...init,\n\t\t\theaders\n\t\t});\n\t}\n\n\tget [Symbol.toStringTag]() {\n\t\treturn 'Response';\n\t}\n}\n\nObject.defineProperties(Response.prototype, {\n\ttype: {enumerable: true},\n\turl: {enumerable: true},\n\tstatus: {enumerable: true},\n\tok: {enumerable: true},\n\tredirected: {enumerable: true},\n\tstatusText: {enumerable: true},\n\theaders: {enumerable: true},\n\tclone: {enumerable: true}\n});\n","const __WEBPACK_NAMESPACE_OBJECT__ = require(\"node:url\");","export const getSearch = parsedURL => {\n\tif (parsedURL.search) {\n\t\treturn parsedURL.search;\n\t}\n\n\tconst lastOffset = parsedURL.href.length - 1;\n\tconst hash = parsedURL.hash || (parsedURL.href[lastOffset] === '#' ? '#' : '');\n\treturn parsedURL.href[lastOffset - hash.length] === '?' ? '?' : '';\n};\n","const __WEBPACK_NAMESPACE_OBJECT__ = require(\"node:net\");","import {isIP} from 'node:net';\n\n/**\n * @external URL\n * @see {@link https://developer.mozilla.org/en-US/docs/Web/API/URL|URL}\n */\n\n/**\n * @module utils/referrer\n * @private\n */\n\n/**\n * @see {@link https://w3c.github.io/webappsec-referrer-policy/#strip-url|Referrer Policy §8.4. Strip url for use as a referrer}\n * @param {string} URL\n * @param {boolean} [originOnly=false]\n */\nexport function stripURLForUseAsAReferrer(url, originOnly = false) {\n\t// 1. If url is null, return no referrer.\n\tif (url == null) { // eslint-disable-line no-eq-null, eqeqeq\n\t\treturn 'no-referrer';\n\t}\n\n\turl = new URL(url);\n\n\t// 2. If url's scheme is a local scheme, then return no referrer.\n\tif (/^(about|blob|data):$/.test(url.protocol)) {\n\t\treturn 'no-referrer';\n\t}\n\n\t// 3. Set url's username to the empty string.\n\turl.username = '';\n\n\t// 4. Set url's password to null.\n\t// Note: `null` appears to be a mistake as this actually results in the password being `\"null\"`.\n\turl.password = '';\n\n\t// 5. Set url's fragment to null.\n\t// Note: `null` appears to be a mistake as this actually results in the fragment being `\"#null\"`.\n\turl.hash = '';\n\n\t// 6. If the origin-only flag is true, then:\n\tif (originOnly) {\n\t\t// 6.1. Set url's path to null.\n\t\t// Note: `null` appears to be a mistake as this actually results in the path being `\"/null\"`.\n\t\turl.pathname = '';\n\n\t\t// 6.2. Set url's query to null.\n\t\t// Note: `null` appears to be a mistake as this actually results in the query being `\"?null\"`.\n\t\turl.search = '';\n\t}\n\n\t// 7. Return url.\n\treturn url;\n}\n\n/**\n * @see {@link https://w3c.github.io/webappsec-referrer-policy/#enumdef-referrerpolicy|enum ReferrerPolicy}\n */\nexport const ReferrerPolicy = new Set([\n\t'',\n\t'no-referrer',\n\t'no-referrer-when-downgrade',\n\t'same-origin',\n\t'origin',\n\t'strict-origin',\n\t'origin-when-cross-origin',\n\t'strict-origin-when-cross-origin',\n\t'unsafe-url'\n]);\n\n/**\n * @see {@link https://w3c.github.io/webappsec-referrer-policy/#default-referrer-policy|default referrer policy}\n */\nexport const DEFAULT_REFERRER_POLICY = 'strict-origin-when-cross-origin';\n\n/**\n * @see {@link https://w3c.github.io/webappsec-referrer-policy/#referrer-policies|Referrer Policy §3. Referrer Policies}\n * @param {string} referrerPolicy\n * @returns {string} referrerPolicy\n */\nexport function validateReferrerPolicy(referrerPolicy) {\n\tif (!ReferrerPolicy.has(referrerPolicy)) {\n\t\tthrow new TypeError(`Invalid referrerPolicy: ${referrerPolicy}`);\n\t}\n\n\treturn referrerPolicy;\n}\n\n/**\n * @see {@link https://w3c.github.io/webappsec-secure-contexts/#is-origin-trustworthy|Referrer Policy §3.2. Is origin potentially trustworthy?}\n * @param {external:URL} url\n * @returns `true`: \"Potentially Trustworthy\", `false`: \"Not Trustworthy\"\n */\nexport function isOriginPotentiallyTrustworthy(url) {\n\t// 1. If origin is an opaque origin, return \"Not Trustworthy\".\n\t// Not applicable\n\n\t// 2. Assert: origin is a tuple origin.\n\t// Not for implementations\n\n\t// 3. If origin's scheme is either \"https\" or \"wss\", return \"Potentially Trustworthy\".\n\tif (/^(http|ws)s:$/.test(url.protocol)) {\n\t\treturn true;\n\t}\n\n\t// 4. If origin's host component matches one of the CIDR notations 127.0.0.0/8 or ::1/128 [RFC4632], return \"Potentially Trustworthy\".\n\tconst hostIp = url.host.replace(/(^\\[)|(]$)/g, '');\n\tconst hostIPVersion = isIP(hostIp);\n\n\tif (hostIPVersion === 4 && /^127\\./.test(hostIp)) {\n\t\treturn true;\n\t}\n\n\tif (hostIPVersion === 6 && /^(((0+:){7})|(::(0+:){0,6}))0*1$/.test(hostIp)) {\n\t\treturn true;\n\t}\n\n\t// 5. If origin's host component is \"localhost\" or falls within \".localhost\", and the user agent conforms to the name resolution rules in [let-localhost-be-localhost], return \"Potentially Trustworthy\".\n\t// We are returning FALSE here because we cannot ensure conformance to\n\t// let-localhost-be-loalhost (https://tools.ietf.org/html/draft-west-let-localhost-be-localhost)\n\tif (url.host === 'localhost' || url.host.endsWith('.localhost')) {\n\t\treturn false;\n\t}\n\n\t// 6. If origin's scheme component is file, return \"Potentially Trustworthy\".\n\tif (url.protocol === 'file:') {\n\t\treturn true;\n\t}\n\n\t// 7. If origin's scheme component is one which the user agent considers to be authenticated, return \"Potentially Trustworthy\".\n\t// Not supported\n\n\t// 8. If origin has been configured as a trustworthy origin, return \"Potentially Trustworthy\".\n\t// Not supported\n\n\t// 9. Return \"Not Trustworthy\".\n\treturn false;\n}\n\n/**\n * @see {@link https://w3c.github.io/webappsec-secure-contexts/#is-url-trustworthy|Referrer Policy §3.3. Is url potentially trustworthy?}\n * @param {external:URL} url\n * @returns `true`: \"Potentially Trustworthy\", `false`: \"Not Trustworthy\"\n */\nexport function isUrlPotentiallyTrustworthy(url) {\n\t// 1. If url is \"about:blank\" or \"about:srcdoc\", return \"Potentially Trustworthy\".\n\tif (/^about:(blank|srcdoc)$/.test(url)) {\n\t\treturn true;\n\t}\n\n\t// 2. If url's scheme is \"data\", return \"Potentially Trustworthy\".\n\tif (url.protocol === 'data:') {\n\t\treturn true;\n\t}\n\n\t// Note: The origin of blob: and filesystem: URLs is the origin of the context in which they were\n\t// created. Therefore, blobs created in a trustworthy origin will themselves be potentially\n\t// trustworthy.\n\tif (/^(blob|filesystem):$/.test(url.protocol)) {\n\t\treturn true;\n\t}\n\n\t// 3. Return the result of executing §3.2 Is origin potentially trustworthy? on url's origin.\n\treturn isOriginPotentiallyTrustworthy(url);\n}\n\n/**\n * Modifies the referrerURL to enforce any extra security policy considerations.\n * @see {@link https://w3c.github.io/webappsec-referrer-policy/#determine-requests-referrer|Referrer Policy §8.3. Determine request's Referrer}, step 7\n * @callback module:utils/referrer~referrerURLCallback\n * @param {external:URL} referrerURL\n * @returns {external:URL} modified referrerURL\n */\n\n/**\n * Modifies the referrerOrigin to enforce any extra security policy considerations.\n * @see {@link https://w3c.github.io/webappsec-referrer-policy/#determine-requests-referrer|Referrer Policy §8.3. Determine request's Referrer}, step 7\n * @callback module:utils/referrer~referrerOriginCallback\n * @param {external:URL} referrerOrigin\n * @returns {external:URL} modified referrerOrigin\n */\n\n/**\n * @see {@link https://w3c.github.io/webappsec-referrer-policy/#determine-requests-referrer|Referrer Policy §8.3. Determine request's Referrer}\n * @param {Request} request\n * @param {object} o\n * @param {module:utils/referrer~referrerURLCallback} o.referrerURLCallback\n * @param {module:utils/referrer~referrerOriginCallback} o.referrerOriginCallback\n * @returns {external:URL} Request's referrer\n */\nexport function determineRequestsReferrer(request, {referrerURLCallback, referrerOriginCallback} = {}) {\n\t// There are 2 notes in the specification about invalid pre-conditions. We return null, here, for\n\t// these cases:\n\t// > Note: If request's referrer is \"no-referrer\", Fetch will not call into this algorithm.\n\t// > Note: If request's referrer policy is the empty string, Fetch will not call into this\n\t// > algorithm.\n\tif (request.referrer === 'no-referrer' || request.referrerPolicy === '') {\n\t\treturn null;\n\t}\n\n\t// 1. Let policy be request's associated referrer policy.\n\tconst policy = request.referrerPolicy;\n\n\t// 2. Let environment be request's client.\n\t// not applicable to node.js\n\n\t// 3. Switch on request's referrer:\n\tif (request.referrer === 'about:client') {\n\t\treturn 'no-referrer';\n\t}\n\n\t// \"a URL\": Let referrerSource be request's referrer.\n\tconst referrerSource = request.referrer;\n\n\t// 4. Let request's referrerURL be the result of stripping referrerSource for use as a referrer.\n\tlet referrerURL = stripURLForUseAsAReferrer(referrerSource);\n\n\t// 5. Let referrerOrigin be the result of stripping referrerSource for use as a referrer, with the\n\t// origin-only flag set to true.\n\tlet referrerOrigin = stripURLForUseAsAReferrer(referrerSource, true);\n\n\t// 6. If the result of serializing referrerURL is a string whose length is greater than 4096, set\n\t// referrerURL to referrerOrigin.\n\tif (referrerURL.toString().length > 4096) {\n\t\treferrerURL = referrerOrigin;\n\t}\n\n\t// 7. The user agent MAY alter referrerURL or referrerOrigin at this point to enforce arbitrary\n\t// policy considerations in the interests of minimizing data leakage. For example, the user\n\t// agent could strip the URL down to an origin, modify its host, replace it with an empty\n\t// string, etc.\n\tif (referrerURLCallback) {\n\t\treferrerURL = referrerURLCallback(referrerURL);\n\t}\n\n\tif (referrerOriginCallback) {\n\t\treferrerOrigin = referrerOriginCallback(referrerOrigin);\n\t}\n\n\t// 8.Execute the statements corresponding to the value of policy:\n\tconst currentURL = new URL(request.url);\n\n\tswitch (policy) {\n\t\tcase 'no-referrer':\n\t\t\treturn 'no-referrer';\n\n\t\tcase 'origin':\n\t\t\treturn referrerOrigin;\n\n\t\tcase 'unsafe-url':\n\t\t\treturn referrerURL;\n\n\t\tcase 'strict-origin':\n\t\t\t// 1. If referrerURL is a potentially trustworthy URL and request's current URL is not a\n\t\t\t// potentially trustworthy URL, then return no referrer.\n\t\t\tif (isUrlPotentiallyTrustworthy(referrerURL) && !isUrlPotentiallyTrustworthy(currentURL)) {\n\t\t\t\treturn 'no-referrer';\n\t\t\t}\n\n\t\t\t// 2. Return referrerOrigin.\n\t\t\treturn referrerOrigin.toString();\n\n\t\tcase 'strict-origin-when-cross-origin':\n\t\t\t// 1. If the origin of referrerURL and the origin of request's current URL are the same, then\n\t\t\t// return referrerURL.\n\t\t\tif (referrerURL.origin === currentURL.origin) {\n\t\t\t\treturn referrerURL;\n\t\t\t}\n\n\t\t\t// 2. If referrerURL is a potentially trustworthy URL and request's current URL is not a\n\t\t\t// potentially trustworthy URL, then return no referrer.\n\t\t\tif (isUrlPotentiallyTrustworthy(referrerURL) && !isUrlPotentiallyTrustworthy(currentURL)) {\n\t\t\t\treturn 'no-referrer';\n\t\t\t}\n\n\t\t\t// 3. Return referrerOrigin.\n\t\t\treturn referrerOrigin;\n\n\t\tcase 'same-origin':\n\t\t\t// 1. If the origin of referrerURL and the origin of request's current URL are the same, then\n\t\t\t// return referrerURL.\n\t\t\tif (referrerURL.origin === currentURL.origin) {\n\t\t\t\treturn referrerURL;\n\t\t\t}\n\n\t\t\t// 2. Return no referrer.\n\t\t\treturn 'no-referrer';\n\n\t\tcase 'origin-when-cross-origin':\n\t\t\t// 1. If the origin of referrerURL and the origin of request's current URL are the same, then\n\t\t\t// return referrerURL.\n\t\t\tif (referrerURL.origin === currentURL.origin) {\n\t\t\t\treturn referrerURL;\n\t\t\t}\n\n\t\t\t// Return referrerOrigin.\n\t\t\treturn referrerOrigin;\n\n\t\tcase 'no-referrer-when-downgrade':\n\t\t\t// 1. If referrerURL is a potentially trustworthy URL and request's current URL is not a\n\t\t\t// potentially trustworthy URL, then return no referrer.\n\t\t\tif (isUrlPotentiallyTrustworthy(referrerURL) && !isUrlPotentiallyTrustworthy(currentURL)) {\n\t\t\t\treturn 'no-referrer';\n\t\t\t}\n\n\t\t\t// 2. Return referrerURL.\n\t\t\treturn referrerURL;\n\n\t\tdefault:\n\t\t\tthrow new TypeError(`Invalid referrerPolicy: ${policy}`);\n\t}\n}\n\n/**\n * @see {@link https://w3c.github.io/webappsec-referrer-policy/#parse-referrer-policy-from-header|Referrer Policy §8.1. Parse a referrer policy from a Referrer-Policy header}\n * @param {Headers} headers Response headers\n * @returns {string} policy\n */\nexport function parseReferrerPolicyFromHeader(headers) {\n\t// 1. Let policy-tokens be the result of extracting header list values given `Referrer-Policy`\n\t// and response’s header list.\n\tconst policyTokens = (headers.get('referrer-policy') || '').split(/[,\\s]+/);\n\n\t// 2. Let policy be the empty string.\n\tlet policy = '';\n\n\t// 3. For each token in policy-tokens, if token is a referrer policy and token is not the empty\n\t// string, then set policy to token.\n\t// Note: This algorithm loops over multiple policy values to allow deployment of new policy\n\t// values with fallbacks for older user agents, as described in § 11.1 Unknown Policy Values.\n\tfor (const token of policyTokens) {\n\t\tif (token && ReferrerPolicy.has(token)) {\n\t\t\tpolicy = token;\n\t\t}\n\t}\n\n\t// 4. Return policy.\n\treturn policy;\n}\n","/**\n * Request.js\n *\n * Request class contains server only options\n *\n * All spec algorithm step numbers are based on https://fetch.spec.whatwg.org/commit-snapshots/ae716822cb3a61843226cd090eefc6589446c1d2/.\n */\n\nimport {format as formatUrl} from 'node:url';\nimport {deprecate} from 'node:util';\nimport Headers from './headers.js';\nimport Body, {clone, extractContentType, getTotalBytes} from './body.js';\nimport {isAbortSignal} from './utils/is.js';\nimport {getSearch} from './utils/get-search.js';\nimport {\n\tvalidateReferrerPolicy, determineRequestsReferrer, DEFAULT_REFERRER_POLICY\n} from './utils/referrer.js';\n\nconst INTERNALS = Symbol('Request internals');\n\n/**\n * Check if `obj` is an instance of Request.\n *\n * @param {*} object\n * @return {boolean}\n */\nconst isRequest = object => {\n\treturn (\n\t\ttypeof object === 'object' &&\n\t\ttypeof object[INTERNALS] === 'object'\n\t);\n};\n\nconst doBadDataWarn = deprecate(() => {},\n\t'.data is not a valid RequestInit property, use .body instead',\n\t'https://github.com/node-fetch/node-fetch/issues/1000 (request)');\n\n/**\n * Request class\n *\n * Ref: https://fetch.spec.whatwg.org/#request-class\n *\n * @param Mixed input Url or Request instance\n * @param Object init Custom options\n * @return Void\n */\nexport default class Request extends Body {\n\tconstructor(input, init = {}) {\n\t\tlet parsedURL;\n\n\t\t// Normalize input and force URL to be encoded as UTF-8 (https://github.com/node-fetch/node-fetch/issues/245)\n\t\tif (isRequest(input)) {\n\t\t\tparsedURL = new URL(input.url);\n\t\t} else {\n\t\t\tparsedURL = new URL(input);\n\t\t\tinput = {};\n\t\t}\n\n\t\tif (parsedURL.username !== '' || parsedURL.password !== '') {\n\t\t\tthrow new TypeError(`${parsedURL} is an url with embedded credentials.`);\n\t\t}\n\n\t\tlet method = init.method || input.method || 'GET';\n\t\tif (/^(delete|get|head|options|post|put)$/i.test(method)) {\n\t\t\tmethod = method.toUpperCase();\n\t\t}\n\n\t\tif (!isRequest(init) && 'data' in init) {\n\t\t\tdoBadDataWarn();\n\t\t}\n\n\t\t// eslint-disable-next-line no-eq-null, eqeqeq\n\t\tif ((init.body != null || (isRequest(input) && input.body !== null)) &&\n\t\t\t(method === 'GET' || method === 'HEAD')) {\n\t\t\tthrow new TypeError('Request with GET/HEAD method cannot have body');\n\t\t}\n\n\t\tconst inputBody = init.body ?\n\t\t\tinit.body :\n\t\t\t(isRequest(input) && input.body !== null ?\n\t\t\t\tclone(input) :\n\t\t\t\tnull);\n\n\t\tsuper(inputBody, {\n\t\t\tsize: init.size || input.size || 0\n\t\t});\n\n\t\tconst headers = new Headers(init.headers || input.headers || {});\n\n\t\tif (inputBody !== null && !headers.has('Content-Type')) {\n\t\t\tconst contentType = extractContentType(inputBody, this);\n\t\t\tif (contentType) {\n\t\t\t\theaders.set('Content-Type', contentType);\n\t\t\t}\n\t\t}\n\n\t\tlet signal = isRequest(input) ?\n\t\t\tinput.signal :\n\t\t\tnull;\n\t\tif ('signal' in init) {\n\t\t\tsignal = init.signal;\n\t\t}\n\n\t\t// eslint-disable-next-line no-eq-null, eqeqeq\n\t\tif (signal != null && !isAbortSignal(signal)) {\n\t\t\tthrow new TypeError('Expected signal to be an instanceof AbortSignal or EventTarget');\n\t\t}\n\n\t\t// §5.4, Request constructor steps, step 15.1\n\t\t// eslint-disable-next-line no-eq-null, eqeqeq\n\t\tlet referrer = init.referrer == null ? input.referrer : init.referrer;\n\t\tif (referrer === '') {\n\t\t\t// §5.4, Request constructor steps, step 15.2\n\t\t\treferrer = 'no-referrer';\n\t\t} else if (referrer) {\n\t\t\t// §5.4, Request constructor steps, step 15.3.1, 15.3.2\n\t\t\tconst parsedReferrer = new URL(referrer);\n\t\t\t// §5.4, Request constructor steps, step 15.3.3, 15.3.4\n\t\t\treferrer = /^about:(\\/\\/)?client$/.test(parsedReferrer) ? 'client' : parsedReferrer;\n\t\t} else {\n\t\t\treferrer = undefined;\n\t\t}\n\n\t\tthis[INTERNALS] = {\n\t\t\tmethod,\n\t\t\tredirect: init.redirect || input.redirect || 'follow',\n\t\t\theaders,\n\t\t\tparsedURL,\n\t\t\tsignal,\n\t\t\treferrer\n\t\t};\n\n\t\t// Node-fetch-only options\n\t\tthis.follow = init.follow === undefined ? (input.follow === undefined ? 20 : input.follow) : init.follow;\n\t\tthis.compress = init.compress === undefined ? (input.compress === undefined ? true : input.compress) : init.compress;\n\t\tthis.counter = init.counter || input.counter || 0;\n\t\tthis.agent = init.agent || input.agent;\n\t\tthis.highWaterMark = init.highWaterMark || input.highWaterMark || 16384;\n\t\tthis.insecureHTTPParser = init.insecureHTTPParser || input.insecureHTTPParser || false;\n\n\t\t// §5.4, Request constructor steps, step 16.\n\t\t// Default is empty string per https://fetch.spec.whatwg.org/#concept-request-referrer-policy\n\t\tthis.referrerPolicy = init.referrerPolicy || input.referrerPolicy || '';\n\t}\n\n\t/** @returns {string} */\n\tget method() {\n\t\treturn this[INTERNALS].method;\n\t}\n\n\t/** @returns {string} */\n\tget url() {\n\t\treturn formatUrl(this[INTERNALS].parsedURL);\n\t}\n\n\t/** @returns {Headers} */\n\tget headers() {\n\t\treturn this[INTERNALS].headers;\n\t}\n\n\tget redirect() {\n\t\treturn this[INTERNALS].redirect;\n\t}\n\n\t/** @returns {AbortSignal} */\n\tget signal() {\n\t\treturn this[INTERNALS].signal;\n\t}\n\n\t// https://fetch.spec.whatwg.org/#dom-request-referrer\n\tget referrer() {\n\t\tif (this[INTERNALS].referrer === 'no-referrer') {\n\t\t\treturn '';\n\t\t}\n\n\t\tif (this[INTERNALS].referrer === 'client') {\n\t\t\treturn 'about:client';\n\t\t}\n\n\t\tif (this[INTERNALS].referrer) {\n\t\t\treturn this[INTERNALS].referrer.toString();\n\t\t}\n\n\t\treturn undefined;\n\t}\n\n\tget referrerPolicy() {\n\t\treturn this[INTERNALS].referrerPolicy;\n\t}\n\n\tset referrerPolicy(referrerPolicy) {\n\t\tthis[INTERNALS].referrerPolicy = validateReferrerPolicy(referrerPolicy);\n\t}\n\n\t/**\n\t * Clone this request\n\t *\n\t * @return Request\n\t */\n\tclone() {\n\t\treturn new Request(this);\n\t}\n\n\tget [Symbol.toStringTag]() {\n\t\treturn 'Request';\n\t}\n}\n\nObject.defineProperties(Request.prototype, {\n\tmethod: {enumerable: true},\n\turl: {enumerable: true},\n\theaders: {enumerable: true},\n\tredirect: {enumerable: true},\n\tclone: {enumerable: true},\n\tsignal: {enumerable: true},\n\treferrer: {enumerable: true},\n\treferrerPolicy: {enumerable: true}\n});\n\n/**\n * Convert a Request to Node.js http request options.\n *\n * @param {Request} request - A Request instance\n * @return The options object to be passed to http.request\n */\nexport const getNodeRequestOptions = request => {\n\tconst {parsedURL} = request[INTERNALS];\n\tconst headers = new Headers(request[INTERNALS].headers);\n\n\t// Fetch step 1.3\n\tif (!headers.has('Accept')) {\n\t\theaders.set('Accept', '*/*');\n\t}\n\n\t// HTTP-network-or-cache fetch steps 2.4-2.7\n\tlet contentLengthValue = null;\n\tif (request.body === null && /^(post|put)$/i.test(request.method)) {\n\t\tcontentLengthValue = '0';\n\t}\n\n\tif (request.body !== null) {\n\t\tconst totalBytes = getTotalBytes(request);\n\t\t// Set Content-Length if totalBytes is a number (that is not NaN)\n\t\tif (typeof totalBytes === 'number' && !Number.isNaN(totalBytes)) {\n\t\t\tcontentLengthValue = String(totalBytes);\n\t\t}\n\t}\n\n\tif (contentLengthValue) {\n\t\theaders.set('Content-Length', contentLengthValue);\n\t}\n\n\t// 4.1. Main fetch, step 2.6\n\t// > If request's referrer policy is the empty string, then set request's referrer policy to the\n\t// > default referrer policy.\n\tif (request.referrerPolicy === '') {\n\t\trequest.referrerPolicy = DEFAULT_REFERRER_POLICY;\n\t}\n\n\t// 4.1. Main fetch, step 2.7\n\t// > If request's referrer is not \"no-referrer\", set request's referrer to the result of invoking\n\t// > determine request's referrer.\n\tif (request.referrer && request.referrer !== 'no-referrer') {\n\t\trequest[INTERNALS].referrer = determineRequestsReferrer(request);\n\t} else {\n\t\trequest[INTERNALS].referrer = 'no-referrer';\n\t}\n\n\t// 4.5. HTTP-network-or-cache fetch, step 6.9\n\t// > If httpRequest's referrer is a URL, then append `Referer`/httpRequest's referrer, serialized\n\t// > and isomorphic encoded, to httpRequest's header list.\n\tif (request[INTERNALS].referrer instanceof URL) {\n\t\theaders.set('Referer', request.referrer);\n\t}\n\n\t// HTTP-network-or-cache fetch step 2.11\n\tif (!headers.has('User-Agent')) {\n\t\theaders.set('User-Agent', 'node-fetch');\n\t}\n\n\t// HTTP-network-or-cache fetch step 2.15\n\tif (request.compress && !headers.has('Accept-Encoding')) {\n\t\theaders.set('Accept-Encoding', 'gzip, deflate, br');\n\t}\n\n\tlet {agent} = request;\n\tif (typeof agent === 'function') {\n\t\tagent = agent(parsedURL);\n\t}\n\n\tif (!headers.has('Connection') && !agent) {\n\t\theaders.set('Connection', 'close');\n\t}\n\n\t// HTTP-network fetch step 4.2\n\t// chunked encoding is handled by Node.js\n\n\tconst search = getSearch(parsedURL);\n\n\t// Pass the full URL directly to request(), but overwrite the following\n\t// options:\n\tconst options = {\n\t\t// Overwrite search to retain trailing ? (issue #776)\n\t\tpath: parsedURL.pathname + search,\n\t\t// The following options are not expressed in the URL\n\t\tmethod: request.method,\n\t\theaders: headers[Symbol.for('nodejs.util.inspect.custom')](),\n\t\tinsecureHTTPParser: request.insecureHTTPParser,\n\t\tagent\n\t};\n\n\treturn {\n\t\t/** @type {URL} */\n\t\tparsedURL,\n\t\toptions\n\t};\n};\n","import {FetchBaseError} from './base.js';\n\n/**\n * AbortError interface for cancelled requests\n */\nexport class AbortError extends FetchBaseError {\n\tconstructor(message, type = 'aborted') {\n\t\tsuper(message, type);\n\t}\n}\n","/**\n * Index.js\n *\n * a request API compatible with window.fetch\n *\n * All spec algorithm step numbers are based on https://fetch.spec.whatwg.org/commit-snapshots/ae716822cb3a61843226cd090eefc6589446c1d2/.\n */\n\nimport http from 'node:http';\nimport https from 'node:https';\nimport zlib from 'node:zlib';\nimport Stream, {PassThrough, pipeline as pump} from 'node:stream';\nimport {Buffer} from 'node:buffer';\n\nimport dataUriToBuffer from 'data-uri-to-buffer';\n\nimport {writeToStream, clone} from './body.js';\nimport Response from './response.js';\nimport Headers, {fromRawHeaders} from './headers.js';\nimport Request, {getNodeRequestOptions} from './request.js';\nimport {FetchError} from './errors/fetch-error.js';\nimport {AbortError} from './errors/abort-error.js';\nimport {isRedirect} from './utils/is-redirect.js';\nimport {FormData} from 'formdata-polyfill/esm.min.js';\nimport {isDomainOrSubdomain, isSameProtocol} from './utils/is.js';\nimport {parseReferrerPolicyFromHeader} from './utils/referrer.js';\nimport {\n\tBlob,\n\tFile,\n\tfileFromSync,\n\tfileFrom,\n\tblobFromSync,\n\tblobFrom\n} from 'fetch-blob/from.js';\n\nexport {FormData, Headers, Request, Response, FetchError, AbortError, isRedirect};\nexport {Blob, File, fileFromSync, fileFrom, blobFromSync, blobFrom};\n\nconst supportedSchemas = new Set(['data:', 'http:', 'https:']);\n\n/**\n * Fetch function\n *\n * @param {string | URL | import('./request').default} url - Absolute url or Request instance\n * @param {*} [options_] - Fetch options\n * @return {Promise}\n */\nexport default async function fetch(url, options_) {\n\treturn new Promise((resolve, reject) => {\n\t\t// Build request object\n\t\tconst request = new Request(url, options_);\n\t\tconst {parsedURL, options} = getNodeRequestOptions(request);\n\t\tif (!supportedSchemas.has(parsedURL.protocol)) {\n\t\t\tthrow new TypeError(`node-fetch cannot load ${url}. URL scheme \"${parsedURL.protocol.replace(/:$/, '')}\" is not supported.`);\n\t\t}\n\n\t\tif (parsedURL.protocol === 'data:') {\n\t\t\tconst data = dataUriToBuffer(request.url);\n\t\t\tconst response = new Response(data, {headers: {'Content-Type': data.typeFull}});\n\t\t\tresolve(response);\n\t\t\treturn;\n\t\t}\n\n\t\t// Wrap http.request into fetch\n\t\tconst send = (parsedURL.protocol === 'https:' ? https : http).request;\n\t\tconst {signal} = request;\n\t\tlet response = null;\n\n\t\tconst abort = () => {\n\t\t\tconst error = new AbortError('The operation was aborted.');\n\t\t\treject(error);\n\t\t\tif (request.body && request.body instanceof Stream.Readable) {\n\t\t\t\trequest.body.destroy(error);\n\t\t\t}\n\n\t\t\tif (!response || !response.body) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tresponse.body.emit('error', error);\n\t\t};\n\n\t\tif (signal && signal.aborted) {\n\t\t\tabort();\n\t\t\treturn;\n\t\t}\n\n\t\tconst abortAndFinalize = () => {\n\t\t\tabort();\n\t\t\tfinalize();\n\t\t};\n\n\t\t// Send request\n\t\tconst request_ = send(parsedURL.toString(), options);\n\n\t\tif (signal) {\n\t\t\tsignal.addEventListener('abort', abortAndFinalize);\n\t\t}\n\n\t\tconst finalize = () => {\n\t\t\trequest_.abort();\n\t\t\tif (signal) {\n\t\t\t\tsignal.removeEventListener('abort', abortAndFinalize);\n\t\t\t}\n\t\t};\n\n\t\trequest_.on('error', error => {\n\t\t\treject(new FetchError(`request to ${request.url} failed, reason: ${error.message}`, 'system', error));\n\t\t\tfinalize();\n\t\t});\n\n\t\tfixResponseChunkedTransferBadEnding(request_, error => {\n\t\t\tif (response && response.body) {\n\t\t\t\tresponse.body.destroy(error);\n\t\t\t}\n\t\t});\n\n\t\t/* c8 ignore next 18 */\n\t\tif (process.version < 'v14') {\n\t\t\t// Before Node.js 14, pipeline() does not fully support async iterators and does not always\n\t\t\t// properly handle when the socket close/end events are out of order.\n\t\t\trequest_.on('socket', s => {\n\t\t\t\tlet endedWithEventsCount;\n\t\t\t\ts.prependListener('end', () => {\n\t\t\t\t\tendedWithEventsCount = s._eventsCount;\n\t\t\t\t});\n\t\t\t\ts.prependListener('close', hadError => {\n\t\t\t\t\t// if end happened before close but the socket didn't emit an error, do it now\n\t\t\t\t\tif (response && endedWithEventsCount < s._eventsCount && !hadError) {\n\t\t\t\t\t\tconst error = new Error('Premature close');\n\t\t\t\t\t\terror.code = 'ERR_STREAM_PREMATURE_CLOSE';\n\t\t\t\t\t\tresponse.body.emit('error', error);\n\t\t\t\t\t}\n\t\t\t\t});\n\t\t\t});\n\t\t}\n\n\t\trequest_.on('response', response_ => {\n\t\t\trequest_.setTimeout(0);\n\t\t\tconst headers = fromRawHeaders(response_.rawHeaders);\n\n\t\t\t// HTTP fetch step 5\n\t\t\tif (isRedirect(response_.statusCode)) {\n\t\t\t\t// HTTP fetch step 5.2\n\t\t\t\tconst location = headers.get('Location');\n\n\t\t\t\t// HTTP fetch step 5.3\n\t\t\t\tlet locationURL = null;\n\t\t\t\ttry {\n\t\t\t\t\tlocationURL = location === null ? null : new URL(location, request.url);\n\t\t\t\t} catch {\n\t\t\t\t\t// error here can only be invalid URL in Location: header\n\t\t\t\t\t// do not throw when options.redirect == manual\n\t\t\t\t\t// let the user extract the errorneous redirect URL\n\t\t\t\t\tif (request.redirect !== 'manual') {\n\t\t\t\t\t\treject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect'));\n\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\treturn;\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\t// HTTP fetch step 5.5\n\t\t\t\tswitch (request.redirect) {\n\t\t\t\t\tcase 'error':\n\t\t\t\t\t\treject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect'));\n\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\treturn;\n\t\t\t\t\tcase 'manual':\n\t\t\t\t\t\t// Nothing to do\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase 'follow': {\n\t\t\t\t\t\t// HTTP-redirect fetch step 2\n\t\t\t\t\t\tif (locationURL === null) {\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 5\n\t\t\t\t\t\tif (request.counter >= request.follow) {\n\t\t\t\t\t\t\treject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect'));\n\t\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\t\treturn;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 6 (counter increment)\n\t\t\t\t\t\t// Create a new Request object.\n\t\t\t\t\t\tconst requestOptions = {\n\t\t\t\t\t\t\theaders: new Headers(request.headers),\n\t\t\t\t\t\t\tfollow: request.follow,\n\t\t\t\t\t\t\tcounter: request.counter + 1,\n\t\t\t\t\t\t\tagent: request.agent,\n\t\t\t\t\t\t\tcompress: request.compress,\n\t\t\t\t\t\t\tmethod: request.method,\n\t\t\t\t\t\t\tbody: clone(request),\n\t\t\t\t\t\t\tsignal: request.signal,\n\t\t\t\t\t\t\tsize: request.size,\n\t\t\t\t\t\t\treferrer: request.referrer,\n\t\t\t\t\t\t\treferrerPolicy: request.referrerPolicy\n\t\t\t\t\t\t};\n\n\t\t\t\t\t\t// when forwarding sensitive headers like \"Authorization\",\n\t\t\t\t\t\t// \"WWW-Authenticate\", and \"Cookie\" to untrusted targets,\n\t\t\t\t\t\t// headers will be ignored when following a redirect to a domain\n\t\t\t\t\t\t// that is not a subdomain match or exact match of the initial domain.\n\t\t\t\t\t\t// For example, a redirect from \"foo.com\" to either \"foo.com\" or \"sub.foo.com\"\n\t\t\t\t\t\t// will forward the sensitive headers, but a redirect to \"bar.com\" will not.\n\t\t\t\t\t\t// headers will also be ignored when following a redirect to a domain using\n\t\t\t\t\t\t// a different protocol. For example, a redirect from \"https://foo.com\" to \"http://foo.com\"\n\t\t\t\t\t\t// will not forward the sensitive headers\n\t\t\t\t\t\tif (!isDomainOrSubdomain(request.url, locationURL) || !isSameProtocol(request.url, locationURL)) {\n\t\t\t\t\t\t\tfor (const name of ['authorization', 'www-authenticate', 'cookie', 'cookie2']) {\n\t\t\t\t\t\t\t\trequestOptions.headers.delete(name);\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 9\n\t\t\t\t\t\tif (response_.statusCode !== 303 && request.body && options_.body instanceof Stream.Readable) {\n\t\t\t\t\t\t\treject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect'));\n\t\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\t\treturn;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 11\n\t\t\t\t\t\tif (response_.statusCode === 303 || ((response_.statusCode === 301 || response_.statusCode === 302) && request.method === 'POST')) {\n\t\t\t\t\t\t\trequestOptions.method = 'GET';\n\t\t\t\t\t\t\trequestOptions.body = undefined;\n\t\t\t\t\t\t\trequestOptions.headers.delete('content-length');\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 14\n\t\t\t\t\t\tconst responseReferrerPolicy = parseReferrerPolicyFromHeader(headers);\n\t\t\t\t\t\tif (responseReferrerPolicy) {\n\t\t\t\t\t\t\trequestOptions.referrerPolicy = responseReferrerPolicy;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 15\n\t\t\t\t\t\tresolve(fetch(new Request(locationURL, requestOptions)));\n\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\treturn;\n\t\t\t\t\t}\n\n\t\t\t\t\tdefault:\n\t\t\t\t\t\treturn reject(new TypeError(`Redirect option '${request.redirect}' is not a valid value of RequestRedirect`));\n\t\t\t\t}\n\t\t\t}\n\n\t\t\t// Prepare response\n\t\t\tif (signal) {\n\t\t\t\tresponse_.once('end', () => {\n\t\t\t\t\tsignal.removeEventListener('abort', abortAndFinalize);\n\t\t\t\t});\n\t\t\t}\n\n\t\t\tlet body = pump(response_, new PassThrough(), error => {\n\t\t\t\tif (error) {\n\t\t\t\t\treject(error);\n\t\t\t\t}\n\t\t\t});\n\t\t\t// see https://github.com/nodejs/node/pull/29376\n\t\t\t/* c8 ignore next 3 */\n\t\t\tif (process.version < 'v12.10') {\n\t\t\t\tresponse_.on('aborted', abortAndFinalize);\n\t\t\t}\n\n\t\t\tconst responseOptions = {\n\t\t\t\turl: request.url,\n\t\t\t\tstatus: response_.statusCode,\n\t\t\t\tstatusText: response_.statusMessage,\n\t\t\t\theaders,\n\t\t\t\tsize: request.size,\n\t\t\t\tcounter: request.counter,\n\t\t\t\thighWaterMark: request.highWaterMark\n\t\t\t};\n\n\t\t\t// HTTP-network fetch step 12.1.1.3\n\t\t\tconst codings = headers.get('Content-Encoding');\n\n\t\t\t// HTTP-network fetch step 12.1.1.4: handle content codings\n\n\t\t\t// in following scenarios we ignore compression support\n\t\t\t// 1. compression support is disabled\n\t\t\t// 2. HEAD request\n\t\t\t// 3. no Content-Encoding header\n\t\t\t// 4. no content response (204)\n\t\t\t// 5. content not modified response (304)\n\t\t\tif (!request.compress || request.method === 'HEAD' || codings === null || response_.statusCode === 204 || response_.statusCode === 304) {\n\t\t\t\tresponse = new Response(body, responseOptions);\n\t\t\t\tresolve(response);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// For Node v6+\n\t\t\t// Be less strict when decoding compressed responses, since sometimes\n\t\t\t// servers send slightly invalid responses that are still accepted\n\t\t\t// by common browsers.\n\t\t\t// Always using Z_SYNC_FLUSH is what cURL does.\n\t\t\tconst zlibOptions = {\n\t\t\t\tflush: zlib.Z_SYNC_FLUSH,\n\t\t\t\tfinishFlush: zlib.Z_SYNC_FLUSH\n\t\t\t};\n\n\t\t\t// For gzip\n\t\t\tif (codings === 'gzip' || codings === 'x-gzip') {\n\t\t\t\tbody = pump(body, zlib.createGunzip(zlibOptions), error => {\n\t\t\t\t\tif (error) {\n\t\t\t\t\t\treject(error);\n\t\t\t\t\t}\n\t\t\t\t});\n\t\t\t\tresponse = new Response(body, responseOptions);\n\t\t\t\tresolve(response);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// For deflate\n\t\t\tif (codings === 'deflate' || codings === 'x-deflate') {\n\t\t\t\t// Handle the infamous raw deflate response from old servers\n\t\t\t\t// a hack for old IIS and Apache servers\n\t\t\t\tconst raw = pump(response_, new PassThrough(), error => {\n\t\t\t\t\tif (error) {\n\t\t\t\t\t\treject(error);\n\t\t\t\t\t}\n\t\t\t\t});\n\t\t\t\traw.once('data', chunk => {\n\t\t\t\t\t// See http://stackoverflow.com/questions/37519828\n\t\t\t\t\tif ((chunk[0] & 0x0F) === 0x08) {\n\t\t\t\t\t\tbody = pump(body, zlib.createInflate(), error => {\n\t\t\t\t\t\t\tif (error) {\n\t\t\t\t\t\t\t\treject(error);\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t});\n\t\t\t\t\t} else {\n\t\t\t\t\t\tbody = pump(body, zlib.createInflateRaw(), error => {\n\t\t\t\t\t\t\tif (error) {\n\t\t\t\t\t\t\t\treject(error);\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t});\n\t\t\t\t\t}\n\n\t\t\t\t\tresponse = new Response(body, responseOptions);\n\t\t\t\t\tresolve(response);\n\t\t\t\t});\n\t\t\t\traw.once('end', () => {\n\t\t\t\t\t// Some old IIS servers return zero-length OK deflate responses, so\n\t\t\t\t\t// 'data' is never emitted. See https://github.com/node-fetch/node-fetch/pull/903\n\t\t\t\t\tif (!response) {\n\t\t\t\t\t\tresponse = new Response(body, responseOptions);\n\t\t\t\t\t\tresolve(response);\n\t\t\t\t\t}\n\t\t\t\t});\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// For br\n\t\t\tif (codings === 'br') {\n\t\t\t\tbody = pump(body, zlib.createBrotliDecompress(), error => {\n\t\t\t\t\tif (error) {\n\t\t\t\t\t\treject(error);\n\t\t\t\t\t}\n\t\t\t\t});\n\t\t\t\tresponse = new Response(body, responseOptions);\n\t\t\t\tresolve(response);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// Otherwise, use response as-is\n\t\t\tresponse = new Response(body, responseOptions);\n\t\t\tresolve(response);\n\t\t});\n\n\t\t// eslint-disable-next-line promise/prefer-await-to-then\n\t\twriteToStream(request_, request).catch(reject);\n\t});\n}\n\nfunction fixResponseChunkedTransferBadEnding(request, errorCallback) {\n\tconst LAST_CHUNK = Buffer.from('0\\r\\n\\r\\n');\n\n\tlet isChunkedTransfer = false;\n\tlet properLastChunkReceived = false;\n\tlet previousChunk;\n\n\trequest.on('response', response => {\n\t\tconst {headers} = response;\n\t\tisChunkedTransfer = headers['transfer-encoding'] === 'chunked' && !headers['content-length'];\n\t});\n\n\trequest.on('socket', socket => {\n\t\tconst onSocketClose = () => {\n\t\t\tif (isChunkedTransfer && !properLastChunkReceived) {\n\t\t\t\tconst error = new Error('Premature close');\n\t\t\t\terror.code = 'ERR_STREAM_PREMATURE_CLOSE';\n\t\t\t\terrorCallback(error);\n\t\t\t}\n\t\t};\n\n\t\tconst onData = buf => {\n\t\t\tproperLastChunkReceived = Buffer.compare(buf.slice(-5), LAST_CHUNK) === 0;\n\n\t\t\t// Sometimes final 0-length chunk and end of message code are in separate packets\n\t\t\tif (!properLastChunkReceived && previousChunk) {\n\t\t\t\tproperLastChunkReceived = (\n\t\t\t\t\tBuffer.compare(previousChunk.slice(-3), LAST_CHUNK.slice(0, 3)) === 0 &&\n\t\t\t\t\tBuffer.compare(buf.slice(-2), LAST_CHUNK.slice(3)) === 0\n\t\t\t\t);\n\t\t\t}\n\n\t\t\tpreviousChunk = buf;\n\t\t};\n\n\t\tsocket.prependListener('close', onSocketClose);\n\t\tsocket.on('data', onData);\n\n\t\trequest.on('close', () => {\n\t\t\tsocket.removeListener('close', onSocketClose);\n\t\t\tsocket.removeListener('data', onData);\n\t\t});\n\t});\n}\n","// The module cache\nvar __webpack_module_cache__ = {};\n\n// The require function\nfunction __webpack_require__(moduleId) {\n\t// Check if module is in cache\n\tvar cachedModule = __webpack_module_cache__[moduleId];\n\tif (cachedModule !== undefined) {\n\t\treturn cachedModule.exports;\n\t}\n\t// Create a new module (and put it into the cache)\n\tvar module = __webpack_module_cache__[moduleId] = {\n\t\t// no module.id needed\n\t\t// no module.loaded needed\n\t\texports: {}\n\t};\n\n\t// Execute the module function\n\tvar threw = true;\n\ttry {\n\t\t__webpack_modules__[moduleId].call(module.exports, module, module.exports, __webpack_require__);\n\t\tthrew = false;\n\t} finally {\n\t\tif(threw) delete __webpack_module_cache__[moduleId];\n\t}\n\n\t// Return the exports of the module\n\treturn module.exports;\n}\n\n// expose the modules object (__webpack_modules__)\n__webpack_require__.m = __webpack_modules__;\n\n","// define getter functions for harmony exports\n__webpack_require__.d = (exports, definition) => {\n\tfor(var key in definition) {\n\t\tif(__webpack_require__.o(definition, key) && !__webpack_require__.o(exports, key)) {\n\t\t\tObject.defineProperty(exports, key, { enumerable: true, get: definition[key] });\n\t\t}\n\t}\n};","__webpack_require__.f = {};\n// This file contains only the entry chunk.\n// The chunk loading function for additional chunks\n__webpack_require__.e = (chunkId) => {\n\treturn Promise.all(Object.keys(__webpack_require__.f).reduce((promises, key) => {\n\t\t__webpack_require__.f[key](chunkId, promises);\n\t\treturn promises;\n\t}, []));\n};","// This function allow to reference async chunks\n__webpack_require__.u = (chunkId) => {\n\t// return url for filenames based on template\n\treturn \"\" + chunkId + \".index.js\";\n};","__webpack_require__.o = (obj, prop) => (Object.prototype.hasOwnProperty.call(obj, prop))","// define __esModule on exports\n__webpack_require__.r = (exports) => {\n\tif(typeof Symbol !== 'undefined' && Symbol.toStringTag) {\n\t\tObject.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });\n\t}\n\tObject.defineProperty(exports, '__esModule', { value: true });\n};","\nif (typeof __webpack_require__ !== 'undefined') __webpack_require__.ab = __dirname + \"/\";","// no baseURI\n\n// object to store loaded chunks\n// \"1\" means \"loaded\", otherwise not loaded yet\nvar installedChunks = {\n\t179: 1\n};\n\n// no on chunks loaded\n\nvar installChunk = (chunk) => {\n\tvar moreModules = chunk.modules, chunkIds = chunk.ids, runtime = chunk.runtime;\n\tfor(var moduleId in moreModules) {\n\t\tif(__webpack_require__.o(moreModules, moduleId)) {\n\t\t\t__webpack_require__.m[moduleId] = moreModules[moduleId];\n\t\t}\n\t}\n\tif(runtime) runtime(__webpack_require__);\n\tfor(var i = 0; i < chunkIds.length; i++)\n\t\tinstalledChunks[chunkIds[i]] = 1;\n\n};\n\n// require() chunk loading for javascript\n__webpack_require__.f.require = (chunkId, promises) => {\n\t// \"1\" is the signal for \"already loaded\"\n\tif(!installedChunks[chunkId]) {\n\t\tif(true) { // all chunks have JS\n\t\t\tinstallChunk(require(\"./\" + __webpack_require__.u(chunkId)));\n\t\t} else installedChunks[chunkId] = 1;\n\t}\n};\n\n// no external install chunk\n\n// no HMR\n\n// no HMR manifest","","// startup\n// Load entry module and return exports\n// This entry module is referenced by other modules so it can't be inlined\nvar __webpack_exports__ = __webpack_require__(5869);\n",""],"names":[],"sourceRoot":""} \ No newline at end of file +{"version":3,"file":"index.js","mappings":";;;;;;;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC3FA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC/UA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1RA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACvCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC9EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACPA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACPA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACvBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1GA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACfA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACpCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACfA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACpBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzmBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5lBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtLA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1SA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC/HA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AClFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;ACxpBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACPA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC1CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChKA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACnWA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACrJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC/XA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACnDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACz9DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACzFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AClMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AClBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACnBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACrCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACfA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC3jDA;;;;;;;;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvQA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjBA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACpnIA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC1KA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACTA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;;;;;;AClDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;;;;;;;;;;;;;;;AChDA;;ACAA;;;;;;;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;;;;;;ACnGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;;;;;;;;ACzPA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACvCA;;ACAA;;ACAA;;ACAA;;ACAA;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;ACpDA;;;;;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AChBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;ACzBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;ACtFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AC5YA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AC1QA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;ACVA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AC/JA;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;ACRA;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;ACnVA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AC5TA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;ACTA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAQA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;AChaA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;AChCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;ACPA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;ACRA;AACA;AACA;AACA;AACA;;;;;ACJA;;;;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;;;;ACNA;AACA;;;;ACDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;AErCA;AACA;AACA;AACA","sources":["../webpack://typescript-action/./node_modules/@actions/core/lib/command.js","../webpack://typescript-action/./node_modules/@actions/core/lib/core.js","../webpack://typescript-action/./node_modules/@actions/core/lib/file-command.js","../webpack://typescript-action/./node_modules/@actions/core/lib/oidc-utils.js","../webpack://typescript-action/./node_modules/@actions/core/lib/path-utils.js","../webpack://typescript-action/./node_modules/@actions/core/lib/summary.js","../webpack://typescript-action/./node_modules/@actions/core/lib/utils.js","../webpack://typescript-action/./node_modules/@actions/core/node_modules/uuid/dist/index.js","../webpack://typescript-action/./node_modules/@actions/core/node_modules/uuid/dist/md5.js","../webpack://typescript-action/./node_modules/@actions/core/node_modules/uuid/dist/nil.js","../webpack://typescript-action/./node_modules/@actions/core/node_modules/uuid/dist/parse.js","../webpack://typescript-action/./node_modules/@actions/core/node_modules/uuid/dist/regex.js","../webpack://typescript-action/./node_modules/@actions/core/node_modules/uuid/dist/rng.js","../webpack://typescript-action/./node_modules/@actions/core/node_modules/uuid/dist/sha1.js","../webpack://typescript-action/./node_modules/@actions/core/node_modules/uuid/dist/stringify.js","../webpack://typescript-action/./node_modules/@actions/core/node_modules/uuid/dist/v1.js","../webpack://typescript-action/./node_modules/@actions/core/node_modules/uuid/dist/v3.js","../webpack://typescript-action/./node_modules/@actions/core/node_modules/uuid/dist/v35.js","../webpack://typescript-action/./node_modules/@actions/core/node_modules/uuid/dist/v4.js","../webpack://typescript-action/./node_modules/@actions/core/node_modules/uuid/dist/v5.js","../webpack://typescript-action/./node_modules/@actions/core/node_modules/uuid/dist/validate.js","../webpack://typescript-action/./node_modules/@actions/core/node_modules/uuid/dist/version.js","../webpack://typescript-action/./node_modules/@actions/exec/lib/exec.js","../webpack://typescript-action/./node_modules/@actions/exec/lib/toolrunner.js","../webpack://typescript-action/./node_modules/@actions/http-client/lib/auth.js","../webpack://typescript-action/./node_modules/@actions/http-client/lib/index.js","../webpack://typescript-action/./node_modules/@actions/http-client/lib/proxy.js","../webpack://typescript-action/./node_modules/@actions/io/lib/io-util.js","../webpack://typescript-action/./node_modules/@actions/io/lib/io.js","../webpack://typescript-action/./node_modules/@actions/tool-cache/lib/manifest.js","../webpack://typescript-action/./node_modules/@actions/tool-cache/lib/retry-helper.js","../webpack://typescript-action/./node_modules/@actions/tool-cache/lib/tool-cache.js","../webpack://typescript-action/./node_modules/@actions/tool-cache/node_modules/uuid/lib/bytesToUuid.js","../webpack://typescript-action/./node_modules/@actions/tool-cache/node_modules/uuid/lib/rng.js","../webpack://typescript-action/./node_modules/@actions/tool-cache/node_modules/uuid/v4.js","../webpack://typescript-action/./node_modules/@octokit/rest/dist-node/index.js","../webpack://typescript-action/./node_modules/@octokit/rest/node_modules/@octokit/auth-token/dist-node/index.js","../webpack://typescript-action/./node_modules/@octokit/rest/node_modules/@octokit/core/dist-node/index.js","../webpack://typescript-action/./node_modules/@octokit/rest/node_modules/@octokit/endpoint/dist-node/index.js","../webpack://typescript-action/./node_modules/@octokit/rest/node_modules/@octokit/graphql/dist-node/index.js","../webpack://typescript-action/./node_modules/@octokit/rest/node_modules/@octokit/plugin-paginate-rest/dist-node/index.js","../webpack://typescript-action/./node_modules/@octokit/rest/node_modules/@octokit/plugin-request-log/dist-node/index.js","../webpack://typescript-action/./node_modules/@octokit/rest/node_modules/@octokit/plugin-rest-endpoint-methods/dist-node/index.js","../webpack://typescript-action/./node_modules/@octokit/rest/node_modules/@octokit/request-error/dist-node/index.js","../webpack://typescript-action/./node_modules/@octokit/rest/node_modules/@octokit/request/dist-node/index.js","../webpack://typescript-action/./node_modules/before-after-hook/index.js","../webpack://typescript-action/./node_modules/before-after-hook/lib/add.js","../webpack://typescript-action/./node_modules/before-after-hook/lib/register.js","../webpack://typescript-action/./node_modules/before-after-hook/lib/remove.js","../webpack://typescript-action/./node_modules/deprecation/dist-node/index.js","../webpack://typescript-action/./node_modules/is-plain-object/dist/is-plain-object.js","../webpack://typescript-action/./node_modules/node-domexception/index.js","../webpack://typescript-action/./node_modules/once/once.js","../webpack://typescript-action/./node_modules/semver/semver.js","../webpack://typescript-action/./node_modules/tunnel/index.js","../webpack://typescript-action/./node_modules/tunnel/lib/tunnel.js","../webpack://typescript-action/./node_modules/universal-user-agent/dist-node/index.js","../webpack://typescript-action/./node_modules/web-streams-polyfill/dist/ponyfill.es2018.js","../webpack://typescript-action/./node_modules/wrappy/wrappy.js","../webpack://typescript-action/./src/setup-rye.ts","../webpack://typescript-action/./src/utils.ts","../webpack://typescript-action/external node-commonjs \"assert\"","../webpack://typescript-action/external node-commonjs \"buffer\"","../webpack://typescript-action/external node-commonjs \"child_process\"","../webpack://typescript-action/external node-commonjs \"crypto\"","../webpack://typescript-action/external node-commonjs \"events\"","../webpack://typescript-action/external node-commonjs \"fs\"","../webpack://typescript-action/external node-commonjs \"http\"","../webpack://typescript-action/external node-commonjs \"https\"","../webpack://typescript-action/external node-commonjs \"net\"","../webpack://typescript-action/external node-commonjs \"node:process\"","../webpack://typescript-action/external node-commonjs \"node:stream/web\"","../webpack://typescript-action/external node-commonjs \"os\"","../webpack://typescript-action/external node-commonjs \"path\"","../webpack://typescript-action/external node-commonjs \"stream\"","../webpack://typescript-action/external node-commonjs \"string_decoder\"","../webpack://typescript-action/external node-commonjs \"timers\"","../webpack://typescript-action/external node-commonjs \"tls\"","../webpack://typescript-action/external node-commonjs \"util\"","../webpack://typescript-action/external node-commonjs \"worker_threads\"","../webpack://typescript-action/./node_modules/fetch-blob/streams.cjs","../webpack://typescript-action/./node_modules/fetch-blob/file.js","../webpack://typescript-action/external node-commonjs \"node:fs\"","../webpack://typescript-action/external node-commonjs \"node:path\"","../webpack://typescript-action/./node_modules/fetch-blob/from.js","../webpack://typescript-action/./node_modules/fetch-blob/index.js","../webpack://typescript-action/./node_modules/formdata-polyfill/esm.min.js","../webpack://typescript-action/external node-commonjs \"node:http\"","../webpack://typescript-action/external node-commonjs \"node:https\"","../webpack://typescript-action/external node-commonjs \"node:zlib\"","../webpack://typescript-action/external node-commonjs \"node:stream\"","../webpack://typescript-action/external node-commonjs \"node:buffer\"","../webpack://typescript-action/./node_modules/data-uri-to-buffer/dist/index.js","../webpack://typescript-action/external node-commonjs \"node:util\"","../webpack://typescript-action/./node_modules/node-fetch/src/errors/base.js","../webpack://typescript-action/./node_modules/node-fetch/src/errors/fetch-error.js","../webpack://typescript-action/./node_modules/node-fetch/src/utils/is.js","../webpack://typescript-action/./node_modules/node-fetch/src/body.js","../webpack://typescript-action/./node_modules/node-fetch/src/headers.js","../webpack://typescript-action/./node_modules/node-fetch/src/utils/is-redirect.js","../webpack://typescript-action/./node_modules/node-fetch/src/response.js","../webpack://typescript-action/external node-commonjs \"node:url\"","../webpack://typescript-action/./node_modules/node-fetch/src/utils/get-search.js","../webpack://typescript-action/external node-commonjs \"node:net\"","../webpack://typescript-action/./node_modules/node-fetch/src/utils/referrer.js","../webpack://typescript-action/./node_modules/node-fetch/src/request.js","../webpack://typescript-action/./node_modules/node-fetch/src/errors/abort-error.js","../webpack://typescript-action/./node_modules/node-fetch/src/index.js","../webpack://typescript-action/webpack/bootstrap","../webpack://typescript-action/webpack/runtime/define property getters","../webpack://typescript-action/webpack/runtime/ensure chunk","../webpack://typescript-action/webpack/runtime/get javascript chunk filename","../webpack://typescript-action/webpack/runtime/hasOwnProperty shorthand","../webpack://typescript-action/webpack/runtime/make namespace object","../webpack://typescript-action/webpack/runtime/compat","../webpack://typescript-action/webpack/runtime/require chunk loading","../webpack://typescript-action/webpack/before-startup","../webpack://typescript-action/webpack/startup","../webpack://typescript-action/webpack/after-startup"],"sourcesContent":["\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.issue = exports.issueCommand = void 0;\nconst os = __importStar(require(\"os\"));\nconst utils_1 = require(\"./utils\");\n/**\n * Commands\n *\n * Command Format:\n * ::name key=value,key=value::message\n *\n * Examples:\n * ::warning::This is the message\n * ::set-env name=MY_VAR::some value\n */\nfunction issueCommand(command, properties, message) {\n const cmd = new Command(command, properties, message);\n process.stdout.write(cmd.toString() + os.EOL);\n}\nexports.issueCommand = issueCommand;\nfunction issue(name, message = '') {\n issueCommand(name, {}, message);\n}\nexports.issue = issue;\nconst CMD_STRING = '::';\nclass Command {\n constructor(command, properties, message) {\n if (!command) {\n command = 'missing.command';\n }\n this.command = command;\n this.properties = properties;\n this.message = message;\n }\n toString() {\n let cmdStr = CMD_STRING + this.command;\n if (this.properties && Object.keys(this.properties).length > 0) {\n cmdStr += ' ';\n let first = true;\n for (const key in this.properties) {\n if (this.properties.hasOwnProperty(key)) {\n const val = this.properties[key];\n if (val) {\n if (first) {\n first = false;\n }\n else {\n cmdStr += ',';\n }\n cmdStr += `${key}=${escapeProperty(val)}`;\n }\n }\n }\n }\n cmdStr += `${CMD_STRING}${escapeData(this.message)}`;\n return cmdStr;\n }\n}\nfunction escapeData(s) {\n return utils_1.toCommandValue(s)\n .replace(/%/g, '%25')\n .replace(/\\r/g, '%0D')\n .replace(/\\n/g, '%0A');\n}\nfunction escapeProperty(s) {\n return utils_1.toCommandValue(s)\n .replace(/%/g, '%25')\n .replace(/\\r/g, '%0D')\n .replace(/\\n/g, '%0A')\n .replace(/:/g, '%3A')\n .replace(/,/g, '%2C');\n}\n//# sourceMappingURL=command.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getIDToken = exports.getState = exports.saveState = exports.group = exports.endGroup = exports.startGroup = exports.info = exports.notice = exports.warning = exports.error = exports.debug = exports.isDebug = exports.setFailed = exports.setCommandEcho = exports.setOutput = exports.getBooleanInput = exports.getMultilineInput = exports.getInput = exports.addPath = exports.setSecret = exports.exportVariable = exports.ExitCode = void 0;\nconst command_1 = require(\"./command\");\nconst file_command_1 = require(\"./file-command\");\nconst utils_1 = require(\"./utils\");\nconst os = __importStar(require(\"os\"));\nconst path = __importStar(require(\"path\"));\nconst oidc_utils_1 = require(\"./oidc-utils\");\n/**\n * The code to exit an action\n */\nvar ExitCode;\n(function (ExitCode) {\n /**\n * A code indicating that the action was successful\n */\n ExitCode[ExitCode[\"Success\"] = 0] = \"Success\";\n /**\n * A code indicating that the action was a failure\n */\n ExitCode[ExitCode[\"Failure\"] = 1] = \"Failure\";\n})(ExitCode = exports.ExitCode || (exports.ExitCode = {}));\n//-----------------------------------------------------------------------\n// Variables\n//-----------------------------------------------------------------------\n/**\n * Sets env variable for this action and future actions in the job\n * @param name the name of the variable to set\n * @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction exportVariable(name, val) {\n const convertedVal = utils_1.toCommandValue(val);\n process.env[name] = convertedVal;\n const filePath = process.env['GITHUB_ENV'] || '';\n if (filePath) {\n return file_command_1.issueFileCommand('ENV', file_command_1.prepareKeyValueMessage(name, val));\n }\n command_1.issueCommand('set-env', { name }, convertedVal);\n}\nexports.exportVariable = exportVariable;\n/**\n * Registers a secret which will get masked from logs\n * @param secret value of the secret\n */\nfunction setSecret(secret) {\n command_1.issueCommand('add-mask', {}, secret);\n}\nexports.setSecret = setSecret;\n/**\n * Prepends inputPath to the PATH (for this action and future actions)\n * @param inputPath\n */\nfunction addPath(inputPath) {\n const filePath = process.env['GITHUB_PATH'] || '';\n if (filePath) {\n file_command_1.issueFileCommand('PATH', inputPath);\n }\n else {\n command_1.issueCommand('add-path', {}, inputPath);\n }\n process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`;\n}\nexports.addPath = addPath;\n/**\n * Gets the value of an input.\n * Unless trimWhitespace is set to false in InputOptions, the value is also trimmed.\n * Returns an empty string if the value is not defined.\n *\n * @param name name of the input to get\n * @param options optional. See InputOptions.\n * @returns string\n */\nfunction getInput(name, options) {\n const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || '';\n if (options && options.required && !val) {\n throw new Error(`Input required and not supplied: ${name}`);\n }\n if (options && options.trimWhitespace === false) {\n return val;\n }\n return val.trim();\n}\nexports.getInput = getInput;\n/**\n * Gets the values of an multiline input. Each value is also trimmed.\n *\n * @param name name of the input to get\n * @param options optional. See InputOptions.\n * @returns string[]\n *\n */\nfunction getMultilineInput(name, options) {\n const inputs = getInput(name, options)\n .split('\\n')\n .filter(x => x !== '');\n if (options && options.trimWhitespace === false) {\n return inputs;\n }\n return inputs.map(input => input.trim());\n}\nexports.getMultilineInput = getMultilineInput;\n/**\n * Gets the input value of the boolean type in the YAML 1.2 \"core schema\" specification.\n * Support boolean input list: `true | True | TRUE | false | False | FALSE` .\n * The return value is also in boolean type.\n * ref: https://yaml.org/spec/1.2/spec.html#id2804923\n *\n * @param name name of the input to get\n * @param options optional. See InputOptions.\n * @returns boolean\n */\nfunction getBooleanInput(name, options) {\n const trueValue = ['true', 'True', 'TRUE'];\n const falseValue = ['false', 'False', 'FALSE'];\n const val = getInput(name, options);\n if (trueValue.includes(val))\n return true;\n if (falseValue.includes(val))\n return false;\n throw new TypeError(`Input does not meet YAML 1.2 \"Core Schema\" specification: ${name}\\n` +\n `Support boolean input list: \\`true | True | TRUE | false | False | FALSE\\``);\n}\nexports.getBooleanInput = getBooleanInput;\n/**\n * Sets the value of an output.\n *\n * @param name name of the output to set\n * @param value value to store. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction setOutput(name, value) {\n const filePath = process.env['GITHUB_OUTPUT'] || '';\n if (filePath) {\n return file_command_1.issueFileCommand('OUTPUT', file_command_1.prepareKeyValueMessage(name, value));\n }\n process.stdout.write(os.EOL);\n command_1.issueCommand('set-output', { name }, utils_1.toCommandValue(value));\n}\nexports.setOutput = setOutput;\n/**\n * Enables or disables the echoing of commands into stdout for the rest of the step.\n * Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set.\n *\n */\nfunction setCommandEcho(enabled) {\n command_1.issue('echo', enabled ? 'on' : 'off');\n}\nexports.setCommandEcho = setCommandEcho;\n//-----------------------------------------------------------------------\n// Results\n//-----------------------------------------------------------------------\n/**\n * Sets the action status to failed.\n * When the action exits it will be with an exit code of 1\n * @param message add error issue message\n */\nfunction setFailed(message) {\n process.exitCode = ExitCode.Failure;\n error(message);\n}\nexports.setFailed = setFailed;\n//-----------------------------------------------------------------------\n// Logging Commands\n//-----------------------------------------------------------------------\n/**\n * Gets whether Actions Step Debug is on or not\n */\nfunction isDebug() {\n return process.env['RUNNER_DEBUG'] === '1';\n}\nexports.isDebug = isDebug;\n/**\n * Writes debug message to user log\n * @param message debug message\n */\nfunction debug(message) {\n command_1.issueCommand('debug', {}, message);\n}\nexports.debug = debug;\n/**\n * Adds an error issue\n * @param message error issue message. Errors will be converted to string via toString()\n * @param properties optional properties to add to the annotation.\n */\nfunction error(message, properties = {}) {\n command_1.issueCommand('error', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);\n}\nexports.error = error;\n/**\n * Adds a warning issue\n * @param message warning issue message. Errors will be converted to string via toString()\n * @param properties optional properties to add to the annotation.\n */\nfunction warning(message, properties = {}) {\n command_1.issueCommand('warning', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);\n}\nexports.warning = warning;\n/**\n * Adds a notice issue\n * @param message notice issue message. Errors will be converted to string via toString()\n * @param properties optional properties to add to the annotation.\n */\nfunction notice(message, properties = {}) {\n command_1.issueCommand('notice', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);\n}\nexports.notice = notice;\n/**\n * Writes info to log with console.log.\n * @param message info message\n */\nfunction info(message) {\n process.stdout.write(message + os.EOL);\n}\nexports.info = info;\n/**\n * Begin an output group.\n *\n * Output until the next `groupEnd` will be foldable in this group\n *\n * @param name The name of the output group\n */\nfunction startGroup(name) {\n command_1.issue('group', name);\n}\nexports.startGroup = startGroup;\n/**\n * End an output group.\n */\nfunction endGroup() {\n command_1.issue('endgroup');\n}\nexports.endGroup = endGroup;\n/**\n * Wrap an asynchronous function call in a group.\n *\n * Returns the same type as the function itself.\n *\n * @param name The name of the group\n * @param fn The function to wrap in the group\n */\nfunction group(name, fn) {\n return __awaiter(this, void 0, void 0, function* () {\n startGroup(name);\n let result;\n try {\n result = yield fn();\n }\n finally {\n endGroup();\n }\n return result;\n });\n}\nexports.group = group;\n//-----------------------------------------------------------------------\n// Wrapper action state\n//-----------------------------------------------------------------------\n/**\n * Saves state for current action, the state can only be retrieved by this action's post job execution.\n *\n * @param name name of the state to store\n * @param value value to store. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction saveState(name, value) {\n const filePath = process.env['GITHUB_STATE'] || '';\n if (filePath) {\n return file_command_1.issueFileCommand('STATE', file_command_1.prepareKeyValueMessage(name, value));\n }\n command_1.issueCommand('save-state', { name }, utils_1.toCommandValue(value));\n}\nexports.saveState = saveState;\n/**\n * Gets the value of an state set by this action's main execution.\n *\n * @param name name of the state to get\n * @returns string\n */\nfunction getState(name) {\n return process.env[`STATE_${name}`] || '';\n}\nexports.getState = getState;\nfunction getIDToken(aud) {\n return __awaiter(this, void 0, void 0, function* () {\n return yield oidc_utils_1.OidcClient.getIDToken(aud);\n });\n}\nexports.getIDToken = getIDToken;\n/**\n * Summary exports\n */\nvar summary_1 = require(\"./summary\");\nObject.defineProperty(exports, \"summary\", { enumerable: true, get: function () { return summary_1.summary; } });\n/**\n * @deprecated use core.summary\n */\nvar summary_2 = require(\"./summary\");\nObject.defineProperty(exports, \"markdownSummary\", { enumerable: true, get: function () { return summary_2.markdownSummary; } });\n/**\n * Path exports\n */\nvar path_utils_1 = require(\"./path-utils\");\nObject.defineProperty(exports, \"toPosixPath\", { enumerable: true, get: function () { return path_utils_1.toPosixPath; } });\nObject.defineProperty(exports, \"toWin32Path\", { enumerable: true, get: function () { return path_utils_1.toWin32Path; } });\nObject.defineProperty(exports, \"toPlatformPath\", { enumerable: true, get: function () { return path_utils_1.toPlatformPath; } });\n//# sourceMappingURL=core.js.map","\"use strict\";\n// For internal use, subject to change.\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.prepareKeyValueMessage = exports.issueFileCommand = void 0;\n// We use any as a valid input type\n/* eslint-disable @typescript-eslint/no-explicit-any */\nconst fs = __importStar(require(\"fs\"));\nconst os = __importStar(require(\"os\"));\nconst uuid_1 = require(\"uuid\");\nconst utils_1 = require(\"./utils\");\nfunction issueFileCommand(command, message) {\n const filePath = process.env[`GITHUB_${command}`];\n if (!filePath) {\n throw new Error(`Unable to find environment variable for file command ${command}`);\n }\n if (!fs.existsSync(filePath)) {\n throw new Error(`Missing file at path: ${filePath}`);\n }\n fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, {\n encoding: 'utf8'\n });\n}\nexports.issueFileCommand = issueFileCommand;\nfunction prepareKeyValueMessage(key, value) {\n const delimiter = `ghadelimiter_${uuid_1.v4()}`;\n const convertedValue = utils_1.toCommandValue(value);\n // These should realistically never happen, but just in case someone finds a\n // way to exploit uuid generation let's not allow keys or values that contain\n // the delimiter.\n if (key.includes(delimiter)) {\n throw new Error(`Unexpected input: name should not contain the delimiter \"${delimiter}\"`);\n }\n if (convertedValue.includes(delimiter)) {\n throw new Error(`Unexpected input: value should not contain the delimiter \"${delimiter}\"`);\n }\n return `${key}<<${delimiter}${os.EOL}${convertedValue}${os.EOL}${delimiter}`;\n}\nexports.prepareKeyValueMessage = prepareKeyValueMessage;\n//# sourceMappingURL=file-command.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.OidcClient = void 0;\nconst http_client_1 = require(\"@actions/http-client\");\nconst auth_1 = require(\"@actions/http-client/lib/auth\");\nconst core_1 = require(\"./core\");\nclass OidcClient {\n static createHttpClient(allowRetry = true, maxRetry = 10) {\n const requestOptions = {\n allowRetries: allowRetry,\n maxRetries: maxRetry\n };\n return new http_client_1.HttpClient('actions/oidc-client', [new auth_1.BearerCredentialHandler(OidcClient.getRequestToken())], requestOptions);\n }\n static getRequestToken() {\n const token = process.env['ACTIONS_ID_TOKEN_REQUEST_TOKEN'];\n if (!token) {\n throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_TOKEN env variable');\n }\n return token;\n }\n static getIDTokenUrl() {\n const runtimeUrl = process.env['ACTIONS_ID_TOKEN_REQUEST_URL'];\n if (!runtimeUrl) {\n throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_URL env variable');\n }\n return runtimeUrl;\n }\n static getCall(id_token_url) {\n var _a;\n return __awaiter(this, void 0, void 0, function* () {\n const httpclient = OidcClient.createHttpClient();\n const res = yield httpclient\n .getJson(id_token_url)\n .catch(error => {\n throw new Error(`Failed to get ID Token. \\n \n Error Code : ${error.statusCode}\\n \n Error Message: ${error.result.message}`);\n });\n const id_token = (_a = res.result) === null || _a === void 0 ? void 0 : _a.value;\n if (!id_token) {\n throw new Error('Response json body do not have ID Token field');\n }\n return id_token;\n });\n }\n static getIDToken(audience) {\n return __awaiter(this, void 0, void 0, function* () {\n try {\n // New ID Token is requested from action service\n let id_token_url = OidcClient.getIDTokenUrl();\n if (audience) {\n const encodedAudience = encodeURIComponent(audience);\n id_token_url = `${id_token_url}&audience=${encodedAudience}`;\n }\n core_1.debug(`ID token url is ${id_token_url}`);\n const id_token = yield OidcClient.getCall(id_token_url);\n core_1.setSecret(id_token);\n return id_token;\n }\n catch (error) {\n throw new Error(`Error message: ${error.message}`);\n }\n });\n }\n}\nexports.OidcClient = OidcClient;\n//# sourceMappingURL=oidc-utils.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.toPlatformPath = exports.toWin32Path = exports.toPosixPath = void 0;\nconst path = __importStar(require(\"path\"));\n/**\n * toPosixPath converts the given path to the posix form. On Windows, \\\\ will be\n * replaced with /.\n *\n * @param pth. Path to transform.\n * @return string Posix path.\n */\nfunction toPosixPath(pth) {\n return pth.replace(/[\\\\]/g, '/');\n}\nexports.toPosixPath = toPosixPath;\n/**\n * toWin32Path converts the given path to the win32 form. On Linux, / will be\n * replaced with \\\\.\n *\n * @param pth. Path to transform.\n * @return string Win32 path.\n */\nfunction toWin32Path(pth) {\n return pth.replace(/[/]/g, '\\\\');\n}\nexports.toWin32Path = toWin32Path;\n/**\n * toPlatformPath converts the given path to a platform-specific path. It does\n * this by replacing instances of / and \\ with the platform-specific path\n * separator.\n *\n * @param pth The path to platformize.\n * @return string The platform-specific path.\n */\nfunction toPlatformPath(pth) {\n return pth.replace(/[/\\\\]/g, path.sep);\n}\nexports.toPlatformPath = toPlatformPath;\n//# sourceMappingURL=path-utils.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.summary = exports.markdownSummary = exports.SUMMARY_DOCS_URL = exports.SUMMARY_ENV_VAR = void 0;\nconst os_1 = require(\"os\");\nconst fs_1 = require(\"fs\");\nconst { access, appendFile, writeFile } = fs_1.promises;\nexports.SUMMARY_ENV_VAR = 'GITHUB_STEP_SUMMARY';\nexports.SUMMARY_DOCS_URL = 'https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary';\nclass Summary {\n constructor() {\n this._buffer = '';\n }\n /**\n * Finds the summary file path from the environment, rejects if env var is not found or file does not exist\n * Also checks r/w permissions.\n *\n * @returns step summary file path\n */\n filePath() {\n return __awaiter(this, void 0, void 0, function* () {\n if (this._filePath) {\n return this._filePath;\n }\n const pathFromEnv = process.env[exports.SUMMARY_ENV_VAR];\n if (!pathFromEnv) {\n throw new Error(`Unable to find environment variable for $${exports.SUMMARY_ENV_VAR}. Check if your runtime environment supports job summaries.`);\n }\n try {\n yield access(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK);\n }\n catch (_a) {\n throw new Error(`Unable to access summary file: '${pathFromEnv}'. Check if the file has correct read/write permissions.`);\n }\n this._filePath = pathFromEnv;\n return this._filePath;\n });\n }\n /**\n * Wraps content in an HTML tag, adding any HTML attributes\n *\n * @param {string} tag HTML tag to wrap\n * @param {string | null} content content within the tag\n * @param {[attribute: string]: string} attrs key-value list of HTML attributes to add\n *\n * @returns {string} content wrapped in HTML element\n */\n wrap(tag, content, attrs = {}) {\n const htmlAttrs = Object.entries(attrs)\n .map(([key, value]) => ` ${key}=\"${value}\"`)\n .join('');\n if (!content) {\n return `<${tag}${htmlAttrs}>`;\n }\n return `<${tag}${htmlAttrs}>${content}`;\n }\n /**\n * Writes text in the buffer to the summary buffer file and empties buffer. Will append by default.\n *\n * @param {SummaryWriteOptions} [options] (optional) options for write operation\n *\n * @returns {Promise} summary instance\n */\n write(options) {\n return __awaiter(this, void 0, void 0, function* () {\n const overwrite = !!(options === null || options === void 0 ? void 0 : options.overwrite);\n const filePath = yield this.filePath();\n const writeFunc = overwrite ? writeFile : appendFile;\n yield writeFunc(filePath, this._buffer, { encoding: 'utf8' });\n return this.emptyBuffer();\n });\n }\n /**\n * Clears the summary buffer and wipes the summary file\n *\n * @returns {Summary} summary instance\n */\n clear() {\n return __awaiter(this, void 0, void 0, function* () {\n return this.emptyBuffer().write({ overwrite: true });\n });\n }\n /**\n * Returns the current summary buffer as a string\n *\n * @returns {string} string of summary buffer\n */\n stringify() {\n return this._buffer;\n }\n /**\n * If the summary buffer is empty\n *\n * @returns {boolen} true if the buffer is empty\n */\n isEmptyBuffer() {\n return this._buffer.length === 0;\n }\n /**\n * Resets the summary buffer without writing to summary file\n *\n * @returns {Summary} summary instance\n */\n emptyBuffer() {\n this._buffer = '';\n return this;\n }\n /**\n * Adds raw text to the summary buffer\n *\n * @param {string} text content to add\n * @param {boolean} [addEOL=false] (optional) append an EOL to the raw text (default: false)\n *\n * @returns {Summary} summary instance\n */\n addRaw(text, addEOL = false) {\n this._buffer += text;\n return addEOL ? this.addEOL() : this;\n }\n /**\n * Adds the operating system-specific end-of-line marker to the buffer\n *\n * @returns {Summary} summary instance\n */\n addEOL() {\n return this.addRaw(os_1.EOL);\n }\n /**\n * Adds an HTML codeblock to the summary buffer\n *\n * @param {string} code content to render within fenced code block\n * @param {string} lang (optional) language to syntax highlight code\n *\n * @returns {Summary} summary instance\n */\n addCodeBlock(code, lang) {\n const attrs = Object.assign({}, (lang && { lang }));\n const element = this.wrap('pre', this.wrap('code', code), attrs);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML list to the summary buffer\n *\n * @param {string[]} items list of items to render\n * @param {boolean} [ordered=false] (optional) if the rendered list should be ordered or not (default: false)\n *\n * @returns {Summary} summary instance\n */\n addList(items, ordered = false) {\n const tag = ordered ? 'ol' : 'ul';\n const listItems = items.map(item => this.wrap('li', item)).join('');\n const element = this.wrap(tag, listItems);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML table to the summary buffer\n *\n * @param {SummaryTableCell[]} rows table rows\n *\n * @returns {Summary} summary instance\n */\n addTable(rows) {\n const tableBody = rows\n .map(row => {\n const cells = row\n .map(cell => {\n if (typeof cell === 'string') {\n return this.wrap('td', cell);\n }\n const { header, data, colspan, rowspan } = cell;\n const tag = header ? 'th' : 'td';\n const attrs = Object.assign(Object.assign({}, (colspan && { colspan })), (rowspan && { rowspan }));\n return this.wrap(tag, data, attrs);\n })\n .join('');\n return this.wrap('tr', cells);\n })\n .join('');\n const element = this.wrap('table', tableBody);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds a collapsable HTML details element to the summary buffer\n *\n * @param {string} label text for the closed state\n * @param {string} content collapsable content\n *\n * @returns {Summary} summary instance\n */\n addDetails(label, content) {\n const element = this.wrap('details', this.wrap('summary', label) + content);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML image tag to the summary buffer\n *\n * @param {string} src path to the image you to embed\n * @param {string} alt text description of the image\n * @param {SummaryImageOptions} options (optional) addition image attributes\n *\n * @returns {Summary} summary instance\n */\n addImage(src, alt, options) {\n const { width, height } = options || {};\n const attrs = Object.assign(Object.assign({}, (width && { width })), (height && { height }));\n const element = this.wrap('img', null, Object.assign({ src, alt }, attrs));\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML section heading element\n *\n * @param {string} text heading text\n * @param {number | string} [level=1] (optional) the heading level, default: 1\n *\n * @returns {Summary} summary instance\n */\n addHeading(text, level) {\n const tag = `h${level}`;\n const allowedTag = ['h1', 'h2', 'h3', 'h4', 'h5', 'h6'].includes(tag)\n ? tag\n : 'h1';\n const element = this.wrap(allowedTag, text);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML thematic break (
) to the summary buffer\n *\n * @returns {Summary} summary instance\n */\n addSeparator() {\n const element = this.wrap('hr', null);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML line break (
) to the summary buffer\n *\n * @returns {Summary} summary instance\n */\n addBreak() {\n const element = this.wrap('br', null);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML blockquote to the summary buffer\n *\n * @param {string} text quote text\n * @param {string} cite (optional) citation url\n *\n * @returns {Summary} summary instance\n */\n addQuote(text, cite) {\n const attrs = Object.assign({}, (cite && { cite }));\n const element = this.wrap('blockquote', text, attrs);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML anchor tag to the summary buffer\n *\n * @param {string} text link text/content\n * @param {string} href hyperlink\n *\n * @returns {Summary} summary instance\n */\n addLink(text, href) {\n const element = this.wrap('a', text, { href });\n return this.addRaw(element).addEOL();\n }\n}\nconst _summary = new Summary();\n/**\n * @deprecated use `core.summary`\n */\nexports.markdownSummary = _summary;\nexports.summary = _summary;\n//# sourceMappingURL=summary.js.map","\"use strict\";\n// We use any as a valid input type\n/* eslint-disable @typescript-eslint/no-explicit-any */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.toCommandProperties = exports.toCommandValue = void 0;\n/**\n * Sanitizes an input into a string so it can be passed into issueCommand safely\n * @param input input to sanitize into a string\n */\nfunction toCommandValue(input) {\n if (input === null || input === undefined) {\n return '';\n }\n else if (typeof input === 'string' || input instanceof String) {\n return input;\n }\n return JSON.stringify(input);\n}\nexports.toCommandValue = toCommandValue;\n/**\n *\n * @param annotationProperties\n * @returns The command properties to send with the actual annotation command\n * See IssueCommandProperties: https://github.com/actions/runner/blob/main/src/Runner.Worker/ActionCommandManager.cs#L646\n */\nfunction toCommandProperties(annotationProperties) {\n if (!Object.keys(annotationProperties).length) {\n return {};\n }\n return {\n title: annotationProperties.title,\n file: annotationProperties.file,\n line: annotationProperties.startLine,\n endLine: annotationProperties.endLine,\n col: annotationProperties.startColumn,\n endColumn: annotationProperties.endColumn\n };\n}\nexports.toCommandProperties = toCommandProperties;\n//# sourceMappingURL=utils.js.map","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nObject.defineProperty(exports, \"v1\", {\n enumerable: true,\n get: function () {\n return _v.default;\n }\n});\nObject.defineProperty(exports, \"v3\", {\n enumerable: true,\n get: function () {\n return _v2.default;\n }\n});\nObject.defineProperty(exports, \"v4\", {\n enumerable: true,\n get: function () {\n return _v3.default;\n }\n});\nObject.defineProperty(exports, \"v5\", {\n enumerable: true,\n get: function () {\n return _v4.default;\n }\n});\nObject.defineProperty(exports, \"NIL\", {\n enumerable: true,\n get: function () {\n return _nil.default;\n }\n});\nObject.defineProperty(exports, \"version\", {\n enumerable: true,\n get: function () {\n return _version.default;\n }\n});\nObject.defineProperty(exports, \"validate\", {\n enumerable: true,\n get: function () {\n return _validate.default;\n }\n});\nObject.defineProperty(exports, \"stringify\", {\n enumerable: true,\n get: function () {\n return _stringify.default;\n }\n});\nObject.defineProperty(exports, \"parse\", {\n enumerable: true,\n get: function () {\n return _parse.default;\n }\n});\n\nvar _v = _interopRequireDefault(require(\"./v1.js\"));\n\nvar _v2 = _interopRequireDefault(require(\"./v3.js\"));\n\nvar _v3 = _interopRequireDefault(require(\"./v4.js\"));\n\nvar _v4 = _interopRequireDefault(require(\"./v5.js\"));\n\nvar _nil = _interopRequireDefault(require(\"./nil.js\"));\n\nvar _version = _interopRequireDefault(require(\"./version.js\"));\n\nvar _validate = _interopRequireDefault(require(\"./validate.js\"));\n\nvar _stringify = _interopRequireDefault(require(\"./stringify.js\"));\n\nvar _parse = _interopRequireDefault(require(\"./parse.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _crypto = _interopRequireDefault(require(\"crypto\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction md5(bytes) {\n if (Array.isArray(bytes)) {\n bytes = Buffer.from(bytes);\n } else if (typeof bytes === 'string') {\n bytes = Buffer.from(bytes, 'utf8');\n }\n\n return _crypto.default.createHash('md5').update(bytes).digest();\n}\n\nvar _default = md5;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\nvar _default = '00000000-0000-0000-0000-000000000000';\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _validate = _interopRequireDefault(require(\"./validate.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction parse(uuid) {\n if (!(0, _validate.default)(uuid)) {\n throw TypeError('Invalid UUID');\n }\n\n let v;\n const arr = new Uint8Array(16); // Parse ########-....-....-....-............\n\n arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24;\n arr[1] = v >>> 16 & 0xff;\n arr[2] = v >>> 8 & 0xff;\n arr[3] = v & 0xff; // Parse ........-####-....-....-............\n\n arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8;\n arr[5] = v & 0xff; // Parse ........-....-####-....-............\n\n arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8;\n arr[7] = v & 0xff; // Parse ........-....-....-####-............\n\n arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8;\n arr[9] = v & 0xff; // Parse ........-....-....-....-############\n // (Use \"/\" to avoid 32-bit truncation when bit-shifting high-order bytes)\n\n arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff;\n arr[11] = v / 0x100000000 & 0xff;\n arr[12] = v >>> 24 & 0xff;\n arr[13] = v >>> 16 & 0xff;\n arr[14] = v >>> 8 & 0xff;\n arr[15] = v & 0xff;\n return arr;\n}\n\nvar _default = parse;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\nvar _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = rng;\n\nvar _crypto = _interopRequireDefault(require(\"crypto\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nconst rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate\n\nlet poolPtr = rnds8Pool.length;\n\nfunction rng() {\n if (poolPtr > rnds8Pool.length - 16) {\n _crypto.default.randomFillSync(rnds8Pool);\n\n poolPtr = 0;\n }\n\n return rnds8Pool.slice(poolPtr, poolPtr += 16);\n}","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _crypto = _interopRequireDefault(require(\"crypto\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction sha1(bytes) {\n if (Array.isArray(bytes)) {\n bytes = Buffer.from(bytes);\n } else if (typeof bytes === 'string') {\n bytes = Buffer.from(bytes, 'utf8');\n }\n\n return _crypto.default.createHash('sha1').update(bytes).digest();\n}\n\nvar _default = sha1;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _validate = _interopRequireDefault(require(\"./validate.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\n/**\n * Convert array of 16 byte values to UUID string format of the form:\n * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX\n */\nconst byteToHex = [];\n\nfor (let i = 0; i < 256; ++i) {\n byteToHex.push((i + 0x100).toString(16).substr(1));\n}\n\nfunction stringify(arr, offset = 0) {\n // Note: Be careful editing this code! It's been tuned for performance\n // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434\n const uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one\n // of the following:\n // - One or more input array values don't map to a hex octet (leading to\n // \"undefined\" in the uuid)\n // - Invalid input values for the RFC `version` or `variant` fields\n\n if (!(0, _validate.default)(uuid)) {\n throw TypeError('Stringified UUID is invalid');\n }\n\n return uuid;\n}\n\nvar _default = stringify;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _rng = _interopRequireDefault(require(\"./rng.js\"));\n\nvar _stringify = _interopRequireDefault(require(\"./stringify.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\n// **`v1()` - Generate time-based UUID**\n//\n// Inspired by https://github.com/LiosK/UUID.js\n// and http://docs.python.org/library/uuid.html\nlet _nodeId;\n\nlet _clockseq; // Previous uuid creation time\n\n\nlet _lastMSecs = 0;\nlet _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details\n\nfunction v1(options, buf, offset) {\n let i = buf && offset || 0;\n const b = buf || new Array(16);\n options = options || {};\n let node = options.node || _nodeId;\n let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not\n // specified. We do this lazily to minimize issues related to insufficient\n // system entropy. See #189\n\n if (node == null || clockseq == null) {\n const seedBytes = options.random || (options.rng || _rng.default)();\n\n if (node == null) {\n // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1)\n node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]];\n }\n\n if (clockseq == null) {\n // Per 4.2.2, randomize (14 bit) clockseq\n clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff;\n }\n } // UUID timestamps are 100 nano-second units since the Gregorian epoch,\n // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so\n // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs'\n // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00.\n\n\n let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock\n // cycle to simulate higher resolution clock\n\n let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs)\n\n const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression\n\n if (dt < 0 && options.clockseq === undefined) {\n clockseq = clockseq + 1 & 0x3fff;\n } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new\n // time interval\n\n\n if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) {\n nsecs = 0;\n } // Per 4.2.1.2 Throw error if too many uuids are requested\n\n\n if (nsecs >= 10000) {\n throw new Error(\"uuid.v1(): Can't create more than 10M uuids/sec\");\n }\n\n _lastMSecs = msecs;\n _lastNSecs = nsecs;\n _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch\n\n msecs += 12219292800000; // `time_low`\n\n const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000;\n b[i++] = tl >>> 24 & 0xff;\n b[i++] = tl >>> 16 & 0xff;\n b[i++] = tl >>> 8 & 0xff;\n b[i++] = tl & 0xff; // `time_mid`\n\n const tmh = msecs / 0x100000000 * 10000 & 0xfffffff;\n b[i++] = tmh >>> 8 & 0xff;\n b[i++] = tmh & 0xff; // `time_high_and_version`\n\n b[i++] = tmh >>> 24 & 0xf | 0x10; // include version\n\n b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant)\n\n b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low`\n\n b[i++] = clockseq & 0xff; // `node`\n\n for (let n = 0; n < 6; ++n) {\n b[i + n] = node[n];\n }\n\n return buf || (0, _stringify.default)(b);\n}\n\nvar _default = v1;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _v = _interopRequireDefault(require(\"./v35.js\"));\n\nvar _md = _interopRequireDefault(require(\"./md5.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nconst v3 = (0, _v.default)('v3', 0x30, _md.default);\nvar _default = v3;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = _default;\nexports.URL = exports.DNS = void 0;\n\nvar _stringify = _interopRequireDefault(require(\"./stringify.js\"));\n\nvar _parse = _interopRequireDefault(require(\"./parse.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction stringToBytes(str) {\n str = unescape(encodeURIComponent(str)); // UTF8 escape\n\n const bytes = [];\n\n for (let i = 0; i < str.length; ++i) {\n bytes.push(str.charCodeAt(i));\n }\n\n return bytes;\n}\n\nconst DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8';\nexports.DNS = DNS;\nconst URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8';\nexports.URL = URL;\n\nfunction _default(name, version, hashfunc) {\n function generateUUID(value, namespace, buf, offset) {\n if (typeof value === 'string') {\n value = stringToBytes(value);\n }\n\n if (typeof namespace === 'string') {\n namespace = (0, _parse.default)(namespace);\n }\n\n if (namespace.length !== 16) {\n throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)');\n } // Compute hash of namespace and value, Per 4.3\n // Future: Use spread syntax when supported on all platforms, e.g. `bytes =\n // hashfunc([...namespace, ... value])`\n\n\n let bytes = new Uint8Array(16 + value.length);\n bytes.set(namespace);\n bytes.set(value, namespace.length);\n bytes = hashfunc(bytes);\n bytes[6] = bytes[6] & 0x0f | version;\n bytes[8] = bytes[8] & 0x3f | 0x80;\n\n if (buf) {\n offset = offset || 0;\n\n for (let i = 0; i < 16; ++i) {\n buf[offset + i] = bytes[i];\n }\n\n return buf;\n }\n\n return (0, _stringify.default)(bytes);\n } // Function#name is not settable on some platforms (#270)\n\n\n try {\n generateUUID.name = name; // eslint-disable-next-line no-empty\n } catch (err) {} // For CommonJS default export support\n\n\n generateUUID.DNS = DNS;\n generateUUID.URL = URL;\n return generateUUID;\n}","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _rng = _interopRequireDefault(require(\"./rng.js\"));\n\nvar _stringify = _interopRequireDefault(require(\"./stringify.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction v4(options, buf, offset) {\n options = options || {};\n\n const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved`\n\n\n rnds[6] = rnds[6] & 0x0f | 0x40;\n rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided\n\n if (buf) {\n offset = offset || 0;\n\n for (let i = 0; i < 16; ++i) {\n buf[offset + i] = rnds[i];\n }\n\n return buf;\n }\n\n return (0, _stringify.default)(rnds);\n}\n\nvar _default = v4;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _v = _interopRequireDefault(require(\"./v35.js\"));\n\nvar _sha = _interopRequireDefault(require(\"./sha1.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nconst v5 = (0, _v.default)('v5', 0x50, _sha.default);\nvar _default = v5;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _regex = _interopRequireDefault(require(\"./regex.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction validate(uuid) {\n return typeof uuid === 'string' && _regex.default.test(uuid);\n}\n\nvar _default = validate;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _validate = _interopRequireDefault(require(\"./validate.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction version(uuid) {\n if (!(0, _validate.default)(uuid)) {\n throw TypeError('Invalid UUID');\n }\n\n return parseInt(uuid.substr(14, 1), 16);\n}\n\nvar _default = version;\nexports.default = _default;","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getExecOutput = exports.exec = void 0;\nconst string_decoder_1 = require(\"string_decoder\");\nconst tr = __importStar(require(\"./toolrunner\"));\n/**\n * Exec a command.\n * Output will be streamed to the live console.\n * Returns promise with return code\n *\n * @param commandLine command to execute (can include additional args). Must be correctly escaped.\n * @param args optional arguments for tool. Escaping is handled by the lib.\n * @param options optional exec options. See ExecOptions\n * @returns Promise exit code\n */\nfunction exec(commandLine, args, options) {\n return __awaiter(this, void 0, void 0, function* () {\n const commandArgs = tr.argStringToArray(commandLine);\n if (commandArgs.length === 0) {\n throw new Error(`Parameter 'commandLine' cannot be null or empty.`);\n }\n // Path to tool to execute should be first arg\n const toolPath = commandArgs[0];\n args = commandArgs.slice(1).concat(args || []);\n const runner = new tr.ToolRunner(toolPath, args, options);\n return runner.exec();\n });\n}\nexports.exec = exec;\n/**\n * Exec a command and get the output.\n * Output will be streamed to the live console.\n * Returns promise with the exit code and collected stdout and stderr\n *\n * @param commandLine command to execute (can include additional args). Must be correctly escaped.\n * @param args optional arguments for tool. Escaping is handled by the lib.\n * @param options optional exec options. See ExecOptions\n * @returns Promise exit code, stdout, and stderr\n */\nfunction getExecOutput(commandLine, args, options) {\n var _a, _b;\n return __awaiter(this, void 0, void 0, function* () {\n let stdout = '';\n let stderr = '';\n //Using string decoder covers the case where a mult-byte character is split\n const stdoutDecoder = new string_decoder_1.StringDecoder('utf8');\n const stderrDecoder = new string_decoder_1.StringDecoder('utf8');\n const originalStdoutListener = (_a = options === null || options === void 0 ? void 0 : options.listeners) === null || _a === void 0 ? void 0 : _a.stdout;\n const originalStdErrListener = (_b = options === null || options === void 0 ? void 0 : options.listeners) === null || _b === void 0 ? void 0 : _b.stderr;\n const stdErrListener = (data) => {\n stderr += stderrDecoder.write(data);\n if (originalStdErrListener) {\n originalStdErrListener(data);\n }\n };\n const stdOutListener = (data) => {\n stdout += stdoutDecoder.write(data);\n if (originalStdoutListener) {\n originalStdoutListener(data);\n }\n };\n const listeners = Object.assign(Object.assign({}, options === null || options === void 0 ? void 0 : options.listeners), { stdout: stdOutListener, stderr: stdErrListener });\n const exitCode = yield exec(commandLine, args, Object.assign(Object.assign({}, options), { listeners }));\n //flush any remaining characters\n stdout += stdoutDecoder.end();\n stderr += stderrDecoder.end();\n return {\n exitCode,\n stdout,\n stderr\n };\n });\n}\nexports.getExecOutput = getExecOutput;\n//# sourceMappingURL=exec.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.argStringToArray = exports.ToolRunner = void 0;\nconst os = __importStar(require(\"os\"));\nconst events = __importStar(require(\"events\"));\nconst child = __importStar(require(\"child_process\"));\nconst path = __importStar(require(\"path\"));\nconst io = __importStar(require(\"@actions/io\"));\nconst ioUtil = __importStar(require(\"@actions/io/lib/io-util\"));\nconst timers_1 = require(\"timers\");\n/* eslint-disable @typescript-eslint/unbound-method */\nconst IS_WINDOWS = process.platform === 'win32';\n/*\n * Class for running command line tools. Handles quoting and arg parsing in a platform agnostic way.\n */\nclass ToolRunner extends events.EventEmitter {\n constructor(toolPath, args, options) {\n super();\n if (!toolPath) {\n throw new Error(\"Parameter 'toolPath' cannot be null or empty.\");\n }\n this.toolPath = toolPath;\n this.args = args || [];\n this.options = options || {};\n }\n _debug(message) {\n if (this.options.listeners && this.options.listeners.debug) {\n this.options.listeners.debug(message);\n }\n }\n _getCommandString(options, noPrefix) {\n const toolPath = this._getSpawnFileName();\n const args = this._getSpawnArgs(options);\n let cmd = noPrefix ? '' : '[command]'; // omit prefix when piped to a second tool\n if (IS_WINDOWS) {\n // Windows + cmd file\n if (this._isCmdFile()) {\n cmd += toolPath;\n for (const a of args) {\n cmd += ` ${a}`;\n }\n }\n // Windows + verbatim\n else if (options.windowsVerbatimArguments) {\n cmd += `\"${toolPath}\"`;\n for (const a of args) {\n cmd += ` ${a}`;\n }\n }\n // Windows (regular)\n else {\n cmd += this._windowsQuoteCmdArg(toolPath);\n for (const a of args) {\n cmd += ` ${this._windowsQuoteCmdArg(a)}`;\n }\n }\n }\n else {\n // OSX/Linux - this can likely be improved with some form of quoting.\n // creating processes on Unix is fundamentally different than Windows.\n // on Unix, execvp() takes an arg array.\n cmd += toolPath;\n for (const a of args) {\n cmd += ` ${a}`;\n }\n }\n return cmd;\n }\n _processLineBuffer(data, strBuffer, onLine) {\n try {\n let s = strBuffer + data.toString();\n let n = s.indexOf(os.EOL);\n while (n > -1) {\n const line = s.substring(0, n);\n onLine(line);\n // the rest of the string ...\n s = s.substring(n + os.EOL.length);\n n = s.indexOf(os.EOL);\n }\n return s;\n }\n catch (err) {\n // streaming lines to console is best effort. Don't fail a build.\n this._debug(`error processing line. Failed with error ${err}`);\n return '';\n }\n }\n _getSpawnFileName() {\n if (IS_WINDOWS) {\n if (this._isCmdFile()) {\n return process.env['COMSPEC'] || 'cmd.exe';\n }\n }\n return this.toolPath;\n }\n _getSpawnArgs(options) {\n if (IS_WINDOWS) {\n if (this._isCmdFile()) {\n let argline = `/D /S /C \"${this._windowsQuoteCmdArg(this.toolPath)}`;\n for (const a of this.args) {\n argline += ' ';\n argline += options.windowsVerbatimArguments\n ? a\n : this._windowsQuoteCmdArg(a);\n }\n argline += '\"';\n return [argline];\n }\n }\n return this.args;\n }\n _endsWith(str, end) {\n return str.endsWith(end);\n }\n _isCmdFile() {\n const upperToolPath = this.toolPath.toUpperCase();\n return (this._endsWith(upperToolPath, '.CMD') ||\n this._endsWith(upperToolPath, '.BAT'));\n }\n _windowsQuoteCmdArg(arg) {\n // for .exe, apply the normal quoting rules that libuv applies\n if (!this._isCmdFile()) {\n return this._uvQuoteCmdArg(arg);\n }\n // otherwise apply quoting rules specific to the cmd.exe command line parser.\n // the libuv rules are generic and are not designed specifically for cmd.exe\n // command line parser.\n //\n // for a detailed description of the cmd.exe command line parser, refer to\n // http://stackoverflow.com/questions/4094699/how-does-the-windows-command-interpreter-cmd-exe-parse-scripts/7970912#7970912\n // need quotes for empty arg\n if (!arg) {\n return '\"\"';\n }\n // determine whether the arg needs to be quoted\n const cmdSpecialChars = [\n ' ',\n '\\t',\n '&',\n '(',\n ')',\n '[',\n ']',\n '{',\n '}',\n '^',\n '=',\n ';',\n '!',\n \"'\",\n '+',\n ',',\n '`',\n '~',\n '|',\n '<',\n '>',\n '\"'\n ];\n let needsQuotes = false;\n for (const char of arg) {\n if (cmdSpecialChars.some(x => x === char)) {\n needsQuotes = true;\n break;\n }\n }\n // short-circuit if quotes not needed\n if (!needsQuotes) {\n return arg;\n }\n // the following quoting rules are very similar to the rules that by libuv applies.\n //\n // 1) wrap the string in quotes\n //\n // 2) double-up quotes - i.e. \" => \"\"\n //\n // this is different from the libuv quoting rules. libuv replaces \" with \\\", which unfortunately\n // doesn't work well with a cmd.exe command line.\n //\n // note, replacing \" with \"\" also works well if the arg is passed to a downstream .NET console app.\n // for example, the command line:\n // foo.exe \"myarg:\"\"my val\"\"\"\n // is parsed by a .NET console app into an arg array:\n // [ \"myarg:\\\"my val\\\"\" ]\n // which is the same end result when applying libuv quoting rules. although the actual\n // command line from libuv quoting rules would look like:\n // foo.exe \"myarg:\\\"my val\\\"\"\n //\n // 3) double-up slashes that precede a quote,\n // e.g. hello \\world => \"hello \\world\"\n // hello\\\"world => \"hello\\\\\"\"world\"\n // hello\\\\\"world => \"hello\\\\\\\\\"\"world\"\n // hello world\\ => \"hello world\\\\\"\n //\n // technically this is not required for a cmd.exe command line, or the batch argument parser.\n // the reasons for including this as a .cmd quoting rule are:\n //\n // a) this is optimized for the scenario where the argument is passed from the .cmd file to an\n // external program. many programs (e.g. .NET console apps) rely on the slash-doubling rule.\n //\n // b) it's what we've been doing previously (by deferring to node default behavior) and we\n // haven't heard any complaints about that aspect.\n //\n // note, a weakness of the quoting rules chosen here, is that % is not escaped. in fact, % cannot be\n // escaped when used on the command line directly - even though within a .cmd file % can be escaped\n // by using %%.\n //\n // the saving grace is, on the command line, %var% is left as-is if var is not defined. this contrasts\n // the line parsing rules within a .cmd file, where if var is not defined it is replaced with nothing.\n //\n // one option that was explored was replacing % with ^% - i.e. %var% => ^%var^%. this hack would\n // often work, since it is unlikely that var^ would exist, and the ^ character is removed when the\n // variable is used. the problem, however, is that ^ is not removed when %* is used to pass the args\n // to an external program.\n //\n // an unexplored potential solution for the % escaping problem, is to create a wrapper .cmd file.\n // % can be escaped within a .cmd file.\n let reverse = '\"';\n let quoteHit = true;\n for (let i = arg.length; i > 0; i--) {\n // walk the string in reverse\n reverse += arg[i - 1];\n if (quoteHit && arg[i - 1] === '\\\\') {\n reverse += '\\\\'; // double the slash\n }\n else if (arg[i - 1] === '\"') {\n quoteHit = true;\n reverse += '\"'; // double the quote\n }\n else {\n quoteHit = false;\n }\n }\n reverse += '\"';\n return reverse\n .split('')\n .reverse()\n .join('');\n }\n _uvQuoteCmdArg(arg) {\n // Tool runner wraps child_process.spawn() and needs to apply the same quoting as\n // Node in certain cases where the undocumented spawn option windowsVerbatimArguments\n // is used.\n //\n // Since this function is a port of quote_cmd_arg from Node 4.x (technically, lib UV,\n // see https://github.com/nodejs/node/blob/v4.x/deps/uv/src/win/process.c for details),\n // pasting copyright notice from Node within this function:\n //\n // Copyright Joyent, Inc. and other Node contributors. All rights reserved.\n //\n // Permission is hereby granted, free of charge, to any person obtaining a copy\n // of this software and associated documentation files (the \"Software\"), to\n // deal in the Software without restriction, including without limitation the\n // rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n // sell copies of the Software, and to permit persons to whom the Software is\n // furnished to do so, subject to the following conditions:\n //\n // The above copyright notice and this permission notice shall be included in\n // all copies or substantial portions of the Software.\n //\n // THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n // IN THE SOFTWARE.\n if (!arg) {\n // Need double quotation for empty argument\n return '\"\"';\n }\n if (!arg.includes(' ') && !arg.includes('\\t') && !arg.includes('\"')) {\n // No quotation needed\n return arg;\n }\n if (!arg.includes('\"') && !arg.includes('\\\\')) {\n // No embedded double quotes or backslashes, so I can just wrap\n // quote marks around the whole thing.\n return `\"${arg}\"`;\n }\n // Expected input/output:\n // input : hello\"world\n // output: \"hello\\\"world\"\n // input : hello\"\"world\n // output: \"hello\\\"\\\"world\"\n // input : hello\\world\n // output: hello\\world\n // input : hello\\\\world\n // output: hello\\\\world\n // input : hello\\\"world\n // output: \"hello\\\\\\\"world\"\n // input : hello\\\\\"world\n // output: \"hello\\\\\\\\\\\"world\"\n // input : hello world\\\n // output: \"hello world\\\\\" - note the comment in libuv actually reads \"hello world\\\"\n // but it appears the comment is wrong, it should be \"hello world\\\\\"\n let reverse = '\"';\n let quoteHit = true;\n for (let i = arg.length; i > 0; i--) {\n // walk the string in reverse\n reverse += arg[i - 1];\n if (quoteHit && arg[i - 1] === '\\\\') {\n reverse += '\\\\';\n }\n else if (arg[i - 1] === '\"') {\n quoteHit = true;\n reverse += '\\\\';\n }\n else {\n quoteHit = false;\n }\n }\n reverse += '\"';\n return reverse\n .split('')\n .reverse()\n .join('');\n }\n _cloneExecOptions(options) {\n options = options || {};\n const result = {\n cwd: options.cwd || process.cwd(),\n env: options.env || process.env,\n silent: options.silent || false,\n windowsVerbatimArguments: options.windowsVerbatimArguments || false,\n failOnStdErr: options.failOnStdErr || false,\n ignoreReturnCode: options.ignoreReturnCode || false,\n delay: options.delay || 10000\n };\n result.outStream = options.outStream || process.stdout;\n result.errStream = options.errStream || process.stderr;\n return result;\n }\n _getSpawnOptions(options, toolPath) {\n options = options || {};\n const result = {};\n result.cwd = options.cwd;\n result.env = options.env;\n result['windowsVerbatimArguments'] =\n options.windowsVerbatimArguments || this._isCmdFile();\n if (options.windowsVerbatimArguments) {\n result.argv0 = `\"${toolPath}\"`;\n }\n return result;\n }\n /**\n * Exec a tool.\n * Output will be streamed to the live console.\n * Returns promise with return code\n *\n * @param tool path to tool to exec\n * @param options optional exec options. See ExecOptions\n * @returns number\n */\n exec() {\n return __awaiter(this, void 0, void 0, function* () {\n // root the tool path if it is unrooted and contains relative pathing\n if (!ioUtil.isRooted(this.toolPath) &&\n (this.toolPath.includes('/') ||\n (IS_WINDOWS && this.toolPath.includes('\\\\')))) {\n // prefer options.cwd if it is specified, however options.cwd may also need to be rooted\n this.toolPath = path.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath);\n }\n // if the tool is only a file name, then resolve it from the PATH\n // otherwise verify it exists (add extension on Windows if necessary)\n this.toolPath = yield io.which(this.toolPath, true);\n return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () {\n this._debug(`exec tool: ${this.toolPath}`);\n this._debug('arguments:');\n for (const arg of this.args) {\n this._debug(` ${arg}`);\n }\n const optionsNonNull = this._cloneExecOptions(this.options);\n if (!optionsNonNull.silent && optionsNonNull.outStream) {\n optionsNonNull.outStream.write(this._getCommandString(optionsNonNull) + os.EOL);\n }\n const state = new ExecState(optionsNonNull, this.toolPath);\n state.on('debug', (message) => {\n this._debug(message);\n });\n if (this.options.cwd && !(yield ioUtil.exists(this.options.cwd))) {\n return reject(new Error(`The cwd: ${this.options.cwd} does not exist!`));\n }\n const fileName = this._getSpawnFileName();\n const cp = child.spawn(fileName, this._getSpawnArgs(optionsNonNull), this._getSpawnOptions(this.options, fileName));\n let stdbuffer = '';\n if (cp.stdout) {\n cp.stdout.on('data', (data) => {\n if (this.options.listeners && this.options.listeners.stdout) {\n this.options.listeners.stdout(data);\n }\n if (!optionsNonNull.silent && optionsNonNull.outStream) {\n optionsNonNull.outStream.write(data);\n }\n stdbuffer = this._processLineBuffer(data, stdbuffer, (line) => {\n if (this.options.listeners && this.options.listeners.stdline) {\n this.options.listeners.stdline(line);\n }\n });\n });\n }\n let errbuffer = '';\n if (cp.stderr) {\n cp.stderr.on('data', (data) => {\n state.processStderr = true;\n if (this.options.listeners && this.options.listeners.stderr) {\n this.options.listeners.stderr(data);\n }\n if (!optionsNonNull.silent &&\n optionsNonNull.errStream &&\n optionsNonNull.outStream) {\n const s = optionsNonNull.failOnStdErr\n ? optionsNonNull.errStream\n : optionsNonNull.outStream;\n s.write(data);\n }\n errbuffer = this._processLineBuffer(data, errbuffer, (line) => {\n if (this.options.listeners && this.options.listeners.errline) {\n this.options.listeners.errline(line);\n }\n });\n });\n }\n cp.on('error', (err) => {\n state.processError = err.message;\n state.processExited = true;\n state.processClosed = true;\n state.CheckComplete();\n });\n cp.on('exit', (code) => {\n state.processExitCode = code;\n state.processExited = true;\n this._debug(`Exit code ${code} received from tool '${this.toolPath}'`);\n state.CheckComplete();\n });\n cp.on('close', (code) => {\n state.processExitCode = code;\n state.processExited = true;\n state.processClosed = true;\n this._debug(`STDIO streams have closed for tool '${this.toolPath}'`);\n state.CheckComplete();\n });\n state.on('done', (error, exitCode) => {\n if (stdbuffer.length > 0) {\n this.emit('stdline', stdbuffer);\n }\n if (errbuffer.length > 0) {\n this.emit('errline', errbuffer);\n }\n cp.removeAllListeners();\n if (error) {\n reject(error);\n }\n else {\n resolve(exitCode);\n }\n });\n if (this.options.input) {\n if (!cp.stdin) {\n throw new Error('child process missing stdin');\n }\n cp.stdin.end(this.options.input);\n }\n }));\n });\n }\n}\nexports.ToolRunner = ToolRunner;\n/**\n * Convert an arg string to an array of args. Handles escaping\n *\n * @param argString string of arguments\n * @returns string[] array of arguments\n */\nfunction argStringToArray(argString) {\n const args = [];\n let inQuotes = false;\n let escaped = false;\n let arg = '';\n function append(c) {\n // we only escape double quotes.\n if (escaped && c !== '\"') {\n arg += '\\\\';\n }\n arg += c;\n escaped = false;\n }\n for (let i = 0; i < argString.length; i++) {\n const c = argString.charAt(i);\n if (c === '\"') {\n if (!escaped) {\n inQuotes = !inQuotes;\n }\n else {\n append(c);\n }\n continue;\n }\n if (c === '\\\\' && escaped) {\n append(c);\n continue;\n }\n if (c === '\\\\' && inQuotes) {\n escaped = true;\n continue;\n }\n if (c === ' ' && !inQuotes) {\n if (arg.length > 0) {\n args.push(arg);\n arg = '';\n }\n continue;\n }\n append(c);\n }\n if (arg.length > 0) {\n args.push(arg.trim());\n }\n return args;\n}\nexports.argStringToArray = argStringToArray;\nclass ExecState extends events.EventEmitter {\n constructor(options, toolPath) {\n super();\n this.processClosed = false; // tracks whether the process has exited and stdio is closed\n this.processError = '';\n this.processExitCode = 0;\n this.processExited = false; // tracks whether the process has exited\n this.processStderr = false; // tracks whether stderr was written to\n this.delay = 10000; // 10 seconds\n this.done = false;\n this.timeout = null;\n if (!toolPath) {\n throw new Error('toolPath must not be empty');\n }\n this.options = options;\n this.toolPath = toolPath;\n if (options.delay) {\n this.delay = options.delay;\n }\n }\n CheckComplete() {\n if (this.done) {\n return;\n }\n if (this.processClosed) {\n this._setResult();\n }\n else if (this.processExited) {\n this.timeout = timers_1.setTimeout(ExecState.HandleTimeout, this.delay, this);\n }\n }\n _debug(message) {\n this.emit('debug', message);\n }\n _setResult() {\n // determine whether there is an error\n let error;\n if (this.processExited) {\n if (this.processError) {\n error = new Error(`There was an error when attempting to execute the process '${this.toolPath}'. This may indicate the process failed to start. Error: ${this.processError}`);\n }\n else if (this.processExitCode !== 0 && !this.options.ignoreReturnCode) {\n error = new Error(`The process '${this.toolPath}' failed with exit code ${this.processExitCode}`);\n }\n else if (this.processStderr && this.options.failOnStdErr) {\n error = new Error(`The process '${this.toolPath}' failed because one or more lines were written to the STDERR stream`);\n }\n }\n // clear the timeout\n if (this.timeout) {\n clearTimeout(this.timeout);\n this.timeout = null;\n }\n this.done = true;\n this.emit('done', error, this.processExitCode);\n }\n static HandleTimeout(state) {\n if (state.done) {\n return;\n }\n if (!state.processClosed && state.processExited) {\n const message = `The STDIO streams did not close within ${state.delay /\n 1000} seconds of the exit event from process '${state.toolPath}'. This may indicate a child process inherited the STDIO streams and has not yet exited.`;\n state._debug(message);\n }\n state._setResult();\n }\n}\n//# sourceMappingURL=toolrunner.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.PersonalAccessTokenCredentialHandler = exports.BearerCredentialHandler = exports.BasicCredentialHandler = void 0;\nclass BasicCredentialHandler {\n constructor(username, password) {\n this.username = username;\n this.password = password;\n }\n prepareRequest(options) {\n if (!options.headers) {\n throw Error('The request has no headers');\n }\n options.headers['Authorization'] = `Basic ${Buffer.from(`${this.username}:${this.password}`).toString('base64')}`;\n }\n // This handler cannot handle 401\n canHandleAuthentication() {\n return false;\n }\n handleAuthentication() {\n return __awaiter(this, void 0, void 0, function* () {\n throw new Error('not implemented');\n });\n }\n}\nexports.BasicCredentialHandler = BasicCredentialHandler;\nclass BearerCredentialHandler {\n constructor(token) {\n this.token = token;\n }\n // currently implements pre-authorization\n // TODO: support preAuth = false where it hooks on 401\n prepareRequest(options) {\n if (!options.headers) {\n throw Error('The request has no headers');\n }\n options.headers['Authorization'] = `Bearer ${this.token}`;\n }\n // This handler cannot handle 401\n canHandleAuthentication() {\n return false;\n }\n handleAuthentication() {\n return __awaiter(this, void 0, void 0, function* () {\n throw new Error('not implemented');\n });\n }\n}\nexports.BearerCredentialHandler = BearerCredentialHandler;\nclass PersonalAccessTokenCredentialHandler {\n constructor(token) {\n this.token = token;\n }\n // currently implements pre-authorization\n // TODO: support preAuth = false where it hooks on 401\n prepareRequest(options) {\n if (!options.headers) {\n throw Error('The request has no headers');\n }\n options.headers['Authorization'] = `Basic ${Buffer.from(`PAT:${this.token}`).toString('base64')}`;\n }\n // This handler cannot handle 401\n canHandleAuthentication() {\n return false;\n }\n handleAuthentication() {\n return __awaiter(this, void 0, void 0, function* () {\n throw new Error('not implemented');\n });\n }\n}\nexports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler;\n//# sourceMappingURL=auth.js.map","\"use strict\";\n/* eslint-disable @typescript-eslint/no-explicit-any */\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0;\nconst http = __importStar(require(\"http\"));\nconst https = __importStar(require(\"https\"));\nconst pm = __importStar(require(\"./proxy\"));\nconst tunnel = __importStar(require(\"tunnel\"));\nvar HttpCodes;\n(function (HttpCodes) {\n HttpCodes[HttpCodes[\"OK\"] = 200] = \"OK\";\n HttpCodes[HttpCodes[\"MultipleChoices\"] = 300] = \"MultipleChoices\";\n HttpCodes[HttpCodes[\"MovedPermanently\"] = 301] = \"MovedPermanently\";\n HttpCodes[HttpCodes[\"ResourceMoved\"] = 302] = \"ResourceMoved\";\n HttpCodes[HttpCodes[\"SeeOther\"] = 303] = \"SeeOther\";\n HttpCodes[HttpCodes[\"NotModified\"] = 304] = \"NotModified\";\n HttpCodes[HttpCodes[\"UseProxy\"] = 305] = \"UseProxy\";\n HttpCodes[HttpCodes[\"SwitchProxy\"] = 306] = \"SwitchProxy\";\n HttpCodes[HttpCodes[\"TemporaryRedirect\"] = 307] = \"TemporaryRedirect\";\n HttpCodes[HttpCodes[\"PermanentRedirect\"] = 308] = \"PermanentRedirect\";\n HttpCodes[HttpCodes[\"BadRequest\"] = 400] = \"BadRequest\";\n HttpCodes[HttpCodes[\"Unauthorized\"] = 401] = \"Unauthorized\";\n HttpCodes[HttpCodes[\"PaymentRequired\"] = 402] = \"PaymentRequired\";\n HttpCodes[HttpCodes[\"Forbidden\"] = 403] = \"Forbidden\";\n HttpCodes[HttpCodes[\"NotFound\"] = 404] = \"NotFound\";\n HttpCodes[HttpCodes[\"MethodNotAllowed\"] = 405] = \"MethodNotAllowed\";\n HttpCodes[HttpCodes[\"NotAcceptable\"] = 406] = \"NotAcceptable\";\n HttpCodes[HttpCodes[\"ProxyAuthenticationRequired\"] = 407] = \"ProxyAuthenticationRequired\";\n HttpCodes[HttpCodes[\"RequestTimeout\"] = 408] = \"RequestTimeout\";\n HttpCodes[HttpCodes[\"Conflict\"] = 409] = \"Conflict\";\n HttpCodes[HttpCodes[\"Gone\"] = 410] = \"Gone\";\n HttpCodes[HttpCodes[\"TooManyRequests\"] = 429] = \"TooManyRequests\";\n HttpCodes[HttpCodes[\"InternalServerError\"] = 500] = \"InternalServerError\";\n HttpCodes[HttpCodes[\"NotImplemented\"] = 501] = \"NotImplemented\";\n HttpCodes[HttpCodes[\"BadGateway\"] = 502] = \"BadGateway\";\n HttpCodes[HttpCodes[\"ServiceUnavailable\"] = 503] = \"ServiceUnavailable\";\n HttpCodes[HttpCodes[\"GatewayTimeout\"] = 504] = \"GatewayTimeout\";\n})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {}));\nvar Headers;\n(function (Headers) {\n Headers[\"Accept\"] = \"accept\";\n Headers[\"ContentType\"] = \"content-type\";\n})(Headers = exports.Headers || (exports.Headers = {}));\nvar MediaTypes;\n(function (MediaTypes) {\n MediaTypes[\"ApplicationJson\"] = \"application/json\";\n})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {}));\n/**\n * Returns the proxy URL, depending upon the supplied url and proxy environment variables.\n * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com\n */\nfunction getProxyUrl(serverUrl) {\n const proxyUrl = pm.getProxyUrl(new URL(serverUrl));\n return proxyUrl ? proxyUrl.href : '';\n}\nexports.getProxyUrl = getProxyUrl;\nconst HttpRedirectCodes = [\n HttpCodes.MovedPermanently,\n HttpCodes.ResourceMoved,\n HttpCodes.SeeOther,\n HttpCodes.TemporaryRedirect,\n HttpCodes.PermanentRedirect\n];\nconst HttpResponseRetryCodes = [\n HttpCodes.BadGateway,\n HttpCodes.ServiceUnavailable,\n HttpCodes.GatewayTimeout\n];\nconst RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD'];\nconst ExponentialBackoffCeiling = 10;\nconst ExponentialBackoffTimeSlice = 5;\nclass HttpClientError extends Error {\n constructor(message, statusCode) {\n super(message);\n this.name = 'HttpClientError';\n this.statusCode = statusCode;\n Object.setPrototypeOf(this, HttpClientError.prototype);\n }\n}\nexports.HttpClientError = HttpClientError;\nclass HttpClientResponse {\n constructor(message) {\n this.message = message;\n }\n readBody() {\n return __awaiter(this, void 0, void 0, function* () {\n return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () {\n let output = Buffer.alloc(0);\n this.message.on('data', (chunk) => {\n output = Buffer.concat([output, chunk]);\n });\n this.message.on('end', () => {\n resolve(output.toString());\n });\n }));\n });\n }\n}\nexports.HttpClientResponse = HttpClientResponse;\nfunction isHttps(requestUrl) {\n const parsedUrl = new URL(requestUrl);\n return parsedUrl.protocol === 'https:';\n}\nexports.isHttps = isHttps;\nclass HttpClient {\n constructor(userAgent, handlers, requestOptions) {\n this._ignoreSslError = false;\n this._allowRedirects = true;\n this._allowRedirectDowngrade = false;\n this._maxRedirects = 50;\n this._allowRetries = false;\n this._maxRetries = 1;\n this._keepAlive = false;\n this._disposed = false;\n this.userAgent = userAgent;\n this.handlers = handlers || [];\n this.requestOptions = requestOptions;\n if (requestOptions) {\n if (requestOptions.ignoreSslError != null) {\n this._ignoreSslError = requestOptions.ignoreSslError;\n }\n this._socketTimeout = requestOptions.socketTimeout;\n if (requestOptions.allowRedirects != null) {\n this._allowRedirects = requestOptions.allowRedirects;\n }\n if (requestOptions.allowRedirectDowngrade != null) {\n this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade;\n }\n if (requestOptions.maxRedirects != null) {\n this._maxRedirects = Math.max(requestOptions.maxRedirects, 0);\n }\n if (requestOptions.keepAlive != null) {\n this._keepAlive = requestOptions.keepAlive;\n }\n if (requestOptions.allowRetries != null) {\n this._allowRetries = requestOptions.allowRetries;\n }\n if (requestOptions.maxRetries != null) {\n this._maxRetries = requestOptions.maxRetries;\n }\n }\n }\n options(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('OPTIONS', requestUrl, null, additionalHeaders || {});\n });\n }\n get(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('GET', requestUrl, null, additionalHeaders || {});\n });\n }\n del(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('DELETE', requestUrl, null, additionalHeaders || {});\n });\n }\n post(requestUrl, data, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('POST', requestUrl, data, additionalHeaders || {});\n });\n }\n patch(requestUrl, data, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('PATCH', requestUrl, data, additionalHeaders || {});\n });\n }\n put(requestUrl, data, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('PUT', requestUrl, data, additionalHeaders || {});\n });\n }\n head(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('HEAD', requestUrl, null, additionalHeaders || {});\n });\n }\n sendStream(verb, requestUrl, stream, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request(verb, requestUrl, stream, additionalHeaders);\n });\n }\n /**\n * Gets a typed object from an endpoint\n * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise\n */\n getJson(requestUrl, additionalHeaders = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n const res = yield this.get(requestUrl, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n postJson(requestUrl, obj, additionalHeaders = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n const data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n const res = yield this.post(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n putJson(requestUrl, obj, additionalHeaders = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n const data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n const res = yield this.put(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n patchJson(requestUrl, obj, additionalHeaders = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n const data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n const res = yield this.patch(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n /**\n * Makes a raw http request.\n * All other methods such as get, post, patch, and request ultimately call this.\n * Prefer get, del, post and patch\n */\n request(verb, requestUrl, data, headers) {\n return __awaiter(this, void 0, void 0, function* () {\n if (this._disposed) {\n throw new Error('Client has already been disposed.');\n }\n const parsedUrl = new URL(requestUrl);\n let info = this._prepareRequest(verb, parsedUrl, headers);\n // Only perform retries on reads since writes may not be idempotent.\n const maxTries = this._allowRetries && RetryableHttpVerbs.includes(verb)\n ? this._maxRetries + 1\n : 1;\n let numTries = 0;\n let response;\n do {\n response = yield this.requestRaw(info, data);\n // Check if it's an authentication challenge\n if (response &&\n response.message &&\n response.message.statusCode === HttpCodes.Unauthorized) {\n let authenticationHandler;\n for (const handler of this.handlers) {\n if (handler.canHandleAuthentication(response)) {\n authenticationHandler = handler;\n break;\n }\n }\n if (authenticationHandler) {\n return authenticationHandler.handleAuthentication(this, info, data);\n }\n else {\n // We have received an unauthorized response but have no handlers to handle it.\n // Let the response return to the caller.\n return response;\n }\n }\n let redirectsRemaining = this._maxRedirects;\n while (response.message.statusCode &&\n HttpRedirectCodes.includes(response.message.statusCode) &&\n this._allowRedirects &&\n redirectsRemaining > 0) {\n const redirectUrl = response.message.headers['location'];\n if (!redirectUrl) {\n // if there's no location to redirect to, we won't\n break;\n }\n const parsedRedirectUrl = new URL(redirectUrl);\n if (parsedUrl.protocol === 'https:' &&\n parsedUrl.protocol !== parsedRedirectUrl.protocol &&\n !this._allowRedirectDowngrade) {\n throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.');\n }\n // we need to finish reading the response before reassigning response\n // which will leak the open socket.\n yield response.readBody();\n // strip authorization header if redirected to a different hostname\n if (parsedRedirectUrl.hostname !== parsedUrl.hostname) {\n for (const header in headers) {\n // header names are case insensitive\n if (header.toLowerCase() === 'authorization') {\n delete headers[header];\n }\n }\n }\n // let's make the request with the new redirectUrl\n info = this._prepareRequest(verb, parsedRedirectUrl, headers);\n response = yield this.requestRaw(info, data);\n redirectsRemaining--;\n }\n if (!response.message.statusCode ||\n !HttpResponseRetryCodes.includes(response.message.statusCode)) {\n // If not a retry code, return immediately instead of retrying\n return response;\n }\n numTries += 1;\n if (numTries < maxTries) {\n yield response.readBody();\n yield this._performExponentialBackoff(numTries);\n }\n } while (numTries < maxTries);\n return response;\n });\n }\n /**\n * Needs to be called if keepAlive is set to true in request options.\n */\n dispose() {\n if (this._agent) {\n this._agent.destroy();\n }\n this._disposed = true;\n }\n /**\n * Raw request.\n * @param info\n * @param data\n */\n requestRaw(info, data) {\n return __awaiter(this, void 0, void 0, function* () {\n return new Promise((resolve, reject) => {\n function callbackForResult(err, res) {\n if (err) {\n reject(err);\n }\n else if (!res) {\n // If `err` is not passed, then `res` must be passed.\n reject(new Error('Unknown error'));\n }\n else {\n resolve(res);\n }\n }\n this.requestRawWithCallback(info, data, callbackForResult);\n });\n });\n }\n /**\n * Raw request with callback.\n * @param info\n * @param data\n * @param onResult\n */\n requestRawWithCallback(info, data, onResult) {\n if (typeof data === 'string') {\n if (!info.options.headers) {\n info.options.headers = {};\n }\n info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8');\n }\n let callbackCalled = false;\n function handleResult(err, res) {\n if (!callbackCalled) {\n callbackCalled = true;\n onResult(err, res);\n }\n }\n const req = info.httpModule.request(info.options, (msg) => {\n const res = new HttpClientResponse(msg);\n handleResult(undefined, res);\n });\n let socket;\n req.on('socket', sock => {\n socket = sock;\n });\n // If we ever get disconnected, we want the socket to timeout eventually\n req.setTimeout(this._socketTimeout || 3 * 60000, () => {\n if (socket) {\n socket.end();\n }\n handleResult(new Error(`Request timeout: ${info.options.path}`));\n });\n req.on('error', function (err) {\n // err has statusCode property\n // res should have headers\n handleResult(err);\n });\n if (data && typeof data === 'string') {\n req.write(data, 'utf8');\n }\n if (data && typeof data !== 'string') {\n data.on('close', function () {\n req.end();\n });\n data.pipe(req);\n }\n else {\n req.end();\n }\n }\n /**\n * Gets an http agent. This function is useful when you need an http agent that handles\n * routing through a proxy server - depending upon the url and proxy environment variables.\n * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com\n */\n getAgent(serverUrl) {\n const parsedUrl = new URL(serverUrl);\n return this._getAgent(parsedUrl);\n }\n _prepareRequest(method, requestUrl, headers) {\n const info = {};\n info.parsedUrl = requestUrl;\n const usingSsl = info.parsedUrl.protocol === 'https:';\n info.httpModule = usingSsl ? https : http;\n const defaultPort = usingSsl ? 443 : 80;\n info.options = {};\n info.options.host = info.parsedUrl.hostname;\n info.options.port = info.parsedUrl.port\n ? parseInt(info.parsedUrl.port)\n : defaultPort;\n info.options.path =\n (info.parsedUrl.pathname || '') + (info.parsedUrl.search || '');\n info.options.method = method;\n info.options.headers = this._mergeHeaders(headers);\n if (this.userAgent != null) {\n info.options.headers['user-agent'] = this.userAgent;\n }\n info.options.agent = this._getAgent(info.parsedUrl);\n // gives handlers an opportunity to participate\n if (this.handlers) {\n for (const handler of this.handlers) {\n handler.prepareRequest(info.options);\n }\n }\n return info;\n }\n _mergeHeaders(headers) {\n if (this.requestOptions && this.requestOptions.headers) {\n return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers || {}));\n }\n return lowercaseKeys(headers || {});\n }\n _getExistingOrDefaultHeader(additionalHeaders, header, _default) {\n let clientHeader;\n if (this.requestOptions && this.requestOptions.headers) {\n clientHeader = lowercaseKeys(this.requestOptions.headers)[header];\n }\n return additionalHeaders[header] || clientHeader || _default;\n }\n _getAgent(parsedUrl) {\n let agent;\n const proxyUrl = pm.getProxyUrl(parsedUrl);\n const useProxy = proxyUrl && proxyUrl.hostname;\n if (this._keepAlive && useProxy) {\n agent = this._proxyAgent;\n }\n if (this._keepAlive && !useProxy) {\n agent = this._agent;\n }\n // if agent is already assigned use that agent.\n if (agent) {\n return agent;\n }\n const usingSsl = parsedUrl.protocol === 'https:';\n let maxSockets = 100;\n if (this.requestOptions) {\n maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets;\n }\n // This is `useProxy` again, but we need to check `proxyURl` directly for TypeScripts's flow analysis.\n if (proxyUrl && proxyUrl.hostname) {\n const agentOptions = {\n maxSockets,\n keepAlive: this._keepAlive,\n proxy: Object.assign(Object.assign({}, ((proxyUrl.username || proxyUrl.password) && {\n proxyAuth: `${proxyUrl.username}:${proxyUrl.password}`\n })), { host: proxyUrl.hostname, port: proxyUrl.port })\n };\n let tunnelAgent;\n const overHttps = proxyUrl.protocol === 'https:';\n if (usingSsl) {\n tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp;\n }\n else {\n tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp;\n }\n agent = tunnelAgent(agentOptions);\n this._proxyAgent = agent;\n }\n // if reusing agent across request and tunneling agent isn't assigned create a new agent\n if (this._keepAlive && !agent) {\n const options = { keepAlive: this._keepAlive, maxSockets };\n agent = usingSsl ? new https.Agent(options) : new http.Agent(options);\n this._agent = agent;\n }\n // if not using private agent and tunnel agent isn't setup then use global agent\n if (!agent) {\n agent = usingSsl ? https.globalAgent : http.globalAgent;\n }\n if (usingSsl && this._ignoreSslError) {\n // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process\n // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options\n // we have to cast it to any and change it directly\n agent.options = Object.assign(agent.options || {}, {\n rejectUnauthorized: false\n });\n }\n return agent;\n }\n _performExponentialBackoff(retryNumber) {\n return __awaiter(this, void 0, void 0, function* () {\n retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber);\n const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber);\n return new Promise(resolve => setTimeout(() => resolve(), ms));\n });\n }\n _processResponse(res, options) {\n return __awaiter(this, void 0, void 0, function* () {\n return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () {\n const statusCode = res.message.statusCode || 0;\n const response = {\n statusCode,\n result: null,\n headers: {}\n };\n // not found leads to null obj returned\n if (statusCode === HttpCodes.NotFound) {\n resolve(response);\n }\n // get the result from the body\n function dateTimeDeserializer(key, value) {\n if (typeof value === 'string') {\n const a = new Date(value);\n if (!isNaN(a.valueOf())) {\n return a;\n }\n }\n return value;\n }\n let obj;\n let contents;\n try {\n contents = yield res.readBody();\n if (contents && contents.length > 0) {\n if (options && options.deserializeDates) {\n obj = JSON.parse(contents, dateTimeDeserializer);\n }\n else {\n obj = JSON.parse(contents);\n }\n response.result = obj;\n }\n response.headers = res.message.headers;\n }\n catch (err) {\n // Invalid resource (contents not json); leaving result obj null\n }\n // note that 3xx redirects are handled by the http layer.\n if (statusCode > 299) {\n let msg;\n // if exception/error in body, attempt to get better error\n if (obj && obj.message) {\n msg = obj.message;\n }\n else if (contents && contents.length > 0) {\n // it may be the case that the exception is in the body message as string\n msg = contents;\n }\n else {\n msg = `Failed request: (${statusCode})`;\n }\n const err = new HttpClientError(msg, statusCode);\n err.result = response.result;\n reject(err);\n }\n else {\n resolve(response);\n }\n }));\n });\n }\n}\nexports.HttpClient = HttpClient;\nconst lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});\n//# sourceMappingURL=index.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.checkBypass = exports.getProxyUrl = void 0;\nfunction getProxyUrl(reqUrl) {\n const usingSsl = reqUrl.protocol === 'https:';\n if (checkBypass(reqUrl)) {\n return undefined;\n }\n const proxyVar = (() => {\n if (usingSsl) {\n return process.env['https_proxy'] || process.env['HTTPS_PROXY'];\n }\n else {\n return process.env['http_proxy'] || process.env['HTTP_PROXY'];\n }\n })();\n if (proxyVar) {\n return new URL(proxyVar);\n }\n else {\n return undefined;\n }\n}\nexports.getProxyUrl = getProxyUrl;\nfunction checkBypass(reqUrl) {\n if (!reqUrl.hostname) {\n return false;\n }\n const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';\n if (!noProxy) {\n return false;\n }\n // Determine the request port\n let reqPort;\n if (reqUrl.port) {\n reqPort = Number(reqUrl.port);\n }\n else if (reqUrl.protocol === 'http:') {\n reqPort = 80;\n }\n else if (reqUrl.protocol === 'https:') {\n reqPort = 443;\n }\n // Format the request hostname and hostname with port\n const upperReqHosts = [reqUrl.hostname.toUpperCase()];\n if (typeof reqPort === 'number') {\n upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`);\n }\n // Compare request host against noproxy\n for (const upperNoProxyItem of noProxy\n .split(',')\n .map(x => x.trim().toUpperCase())\n .filter(x => x)) {\n if (upperReqHosts.some(x => x === upperNoProxyItem)) {\n return true;\n }\n }\n return false;\n}\nexports.checkBypass = checkBypass;\n//# sourceMappingURL=proxy.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar _a;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getCmdPath = exports.tryGetExecutablePath = exports.isRooted = exports.isDirectory = exports.exists = exports.READONLY = exports.UV_FS_O_EXLOCK = exports.IS_WINDOWS = exports.unlink = exports.symlink = exports.stat = exports.rmdir = exports.rm = exports.rename = exports.readlink = exports.readdir = exports.open = exports.mkdir = exports.lstat = exports.copyFile = exports.chmod = void 0;\nconst fs = __importStar(require(\"fs\"));\nconst path = __importStar(require(\"path\"));\n_a = fs.promises\n// export const {open} = 'fs'\n, exports.chmod = _a.chmod, exports.copyFile = _a.copyFile, exports.lstat = _a.lstat, exports.mkdir = _a.mkdir, exports.open = _a.open, exports.readdir = _a.readdir, exports.readlink = _a.readlink, exports.rename = _a.rename, exports.rm = _a.rm, exports.rmdir = _a.rmdir, exports.stat = _a.stat, exports.symlink = _a.symlink, exports.unlink = _a.unlink;\n// export const {open} = 'fs'\nexports.IS_WINDOWS = process.platform === 'win32';\n// See https://github.com/nodejs/node/blob/d0153aee367422d0858105abec186da4dff0a0c5/deps/uv/include/uv/win.h#L691\nexports.UV_FS_O_EXLOCK = 0x10000000;\nexports.READONLY = fs.constants.O_RDONLY;\nfunction exists(fsPath) {\n return __awaiter(this, void 0, void 0, function* () {\n try {\n yield exports.stat(fsPath);\n }\n catch (err) {\n if (err.code === 'ENOENT') {\n return false;\n }\n throw err;\n }\n return true;\n });\n}\nexports.exists = exists;\nfunction isDirectory(fsPath, useStat = false) {\n return __awaiter(this, void 0, void 0, function* () {\n const stats = useStat ? yield exports.stat(fsPath) : yield exports.lstat(fsPath);\n return stats.isDirectory();\n });\n}\nexports.isDirectory = isDirectory;\n/**\n * On OSX/Linux, true if path starts with '/'. On Windows, true for paths like:\n * \\, \\hello, \\\\hello\\share, C:, and C:\\hello (and corresponding alternate separator cases).\n */\nfunction isRooted(p) {\n p = normalizeSeparators(p);\n if (!p) {\n throw new Error('isRooted() parameter \"p\" cannot be empty');\n }\n if (exports.IS_WINDOWS) {\n return (p.startsWith('\\\\') || /^[A-Z]:/i.test(p) // e.g. \\ or \\hello or \\\\hello\n ); // e.g. C: or C:\\hello\n }\n return p.startsWith('/');\n}\nexports.isRooted = isRooted;\n/**\n * Best effort attempt to determine whether a file exists and is executable.\n * @param filePath file path to check\n * @param extensions additional file extensions to try\n * @return if file exists and is executable, returns the file path. otherwise empty string.\n */\nfunction tryGetExecutablePath(filePath, extensions) {\n return __awaiter(this, void 0, void 0, function* () {\n let stats = undefined;\n try {\n // test file exists\n stats = yield exports.stat(filePath);\n }\n catch (err) {\n if (err.code !== 'ENOENT') {\n // eslint-disable-next-line no-console\n console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`);\n }\n }\n if (stats && stats.isFile()) {\n if (exports.IS_WINDOWS) {\n // on Windows, test for valid extension\n const upperExt = path.extname(filePath).toUpperCase();\n if (extensions.some(validExt => validExt.toUpperCase() === upperExt)) {\n return filePath;\n }\n }\n else {\n if (isUnixExecutable(stats)) {\n return filePath;\n }\n }\n }\n // try each extension\n const originalFilePath = filePath;\n for (const extension of extensions) {\n filePath = originalFilePath + extension;\n stats = undefined;\n try {\n stats = yield exports.stat(filePath);\n }\n catch (err) {\n if (err.code !== 'ENOENT') {\n // eslint-disable-next-line no-console\n console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`);\n }\n }\n if (stats && stats.isFile()) {\n if (exports.IS_WINDOWS) {\n // preserve the case of the actual file (since an extension was appended)\n try {\n const directory = path.dirname(filePath);\n const upperName = path.basename(filePath).toUpperCase();\n for (const actualName of yield exports.readdir(directory)) {\n if (upperName === actualName.toUpperCase()) {\n filePath = path.join(directory, actualName);\n break;\n }\n }\n }\n catch (err) {\n // eslint-disable-next-line no-console\n console.log(`Unexpected error attempting to determine the actual case of the file '${filePath}': ${err}`);\n }\n return filePath;\n }\n else {\n if (isUnixExecutable(stats)) {\n return filePath;\n }\n }\n }\n }\n return '';\n });\n}\nexports.tryGetExecutablePath = tryGetExecutablePath;\nfunction normalizeSeparators(p) {\n p = p || '';\n if (exports.IS_WINDOWS) {\n // convert slashes on Windows\n p = p.replace(/\\//g, '\\\\');\n // remove redundant slashes\n return p.replace(/\\\\\\\\+/g, '\\\\');\n }\n // remove redundant slashes\n return p.replace(/\\/\\/+/g, '/');\n}\n// on Mac/Linux, test the execute bit\n// R W X R W X R W X\n// 256 128 64 32 16 8 4 2 1\nfunction isUnixExecutable(stats) {\n return ((stats.mode & 1) > 0 ||\n ((stats.mode & 8) > 0 && stats.gid === process.getgid()) ||\n ((stats.mode & 64) > 0 && stats.uid === process.getuid()));\n}\n// Get the path of cmd.exe in windows\nfunction getCmdPath() {\n var _a;\n return (_a = process.env['COMSPEC']) !== null && _a !== void 0 ? _a : `cmd.exe`;\n}\nexports.getCmdPath = getCmdPath;\n//# sourceMappingURL=io-util.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.findInPath = exports.which = exports.mkdirP = exports.rmRF = exports.mv = exports.cp = void 0;\nconst assert_1 = require(\"assert\");\nconst path = __importStar(require(\"path\"));\nconst ioUtil = __importStar(require(\"./io-util\"));\n/**\n * Copies a file or folder.\n * Based off of shelljs - https://github.com/shelljs/shelljs/blob/9237f66c52e5daa40458f94f9565e18e8132f5a6/src/cp.js\n *\n * @param source source path\n * @param dest destination path\n * @param options optional. See CopyOptions.\n */\nfunction cp(source, dest, options = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n const { force, recursive, copySourceDirectory } = readCopyOptions(options);\n const destStat = (yield ioUtil.exists(dest)) ? yield ioUtil.stat(dest) : null;\n // Dest is an existing file, but not forcing\n if (destStat && destStat.isFile() && !force) {\n return;\n }\n // If dest is an existing directory, should copy inside.\n const newDest = destStat && destStat.isDirectory() && copySourceDirectory\n ? path.join(dest, path.basename(source))\n : dest;\n if (!(yield ioUtil.exists(source))) {\n throw new Error(`no such file or directory: ${source}`);\n }\n const sourceStat = yield ioUtil.stat(source);\n if (sourceStat.isDirectory()) {\n if (!recursive) {\n throw new Error(`Failed to copy. ${source} is a directory, but tried to copy without recursive flag.`);\n }\n else {\n yield cpDirRecursive(source, newDest, 0, force);\n }\n }\n else {\n if (path.relative(source, newDest) === '') {\n // a file cannot be copied to itself\n throw new Error(`'${newDest}' and '${source}' are the same file`);\n }\n yield copyFile(source, newDest, force);\n }\n });\n}\nexports.cp = cp;\n/**\n * Moves a path.\n *\n * @param source source path\n * @param dest destination path\n * @param options optional. See MoveOptions.\n */\nfunction mv(source, dest, options = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n if (yield ioUtil.exists(dest)) {\n let destExists = true;\n if (yield ioUtil.isDirectory(dest)) {\n // If dest is directory copy src into dest\n dest = path.join(dest, path.basename(source));\n destExists = yield ioUtil.exists(dest);\n }\n if (destExists) {\n if (options.force == null || options.force) {\n yield rmRF(dest);\n }\n else {\n throw new Error('Destination already exists');\n }\n }\n }\n yield mkdirP(path.dirname(dest));\n yield ioUtil.rename(source, dest);\n });\n}\nexports.mv = mv;\n/**\n * Remove a path recursively with force\n *\n * @param inputPath path to remove\n */\nfunction rmRF(inputPath) {\n return __awaiter(this, void 0, void 0, function* () {\n if (ioUtil.IS_WINDOWS) {\n // Check for invalid characters\n // https://docs.microsoft.com/en-us/windows/win32/fileio/naming-a-file\n if (/[*\"<>|]/.test(inputPath)) {\n throw new Error('File path must not contain `*`, `\"`, `<`, `>` or `|` on Windows');\n }\n }\n try {\n // note if path does not exist, error is silent\n yield ioUtil.rm(inputPath, {\n force: true,\n maxRetries: 3,\n recursive: true,\n retryDelay: 300\n });\n }\n catch (err) {\n throw new Error(`File was unable to be removed ${err}`);\n }\n });\n}\nexports.rmRF = rmRF;\n/**\n * Make a directory. Creates the full path with folders in between\n * Will throw if it fails\n *\n * @param fsPath path to create\n * @returns Promise\n */\nfunction mkdirP(fsPath) {\n return __awaiter(this, void 0, void 0, function* () {\n assert_1.ok(fsPath, 'a path argument must be provided');\n yield ioUtil.mkdir(fsPath, { recursive: true });\n });\n}\nexports.mkdirP = mkdirP;\n/**\n * Returns path of a tool had the tool actually been invoked. Resolves via paths.\n * If you check and the tool does not exist, it will throw.\n *\n * @param tool name of the tool\n * @param check whether to check if tool exists\n * @returns Promise path to tool\n */\nfunction which(tool, check) {\n return __awaiter(this, void 0, void 0, function* () {\n if (!tool) {\n throw new Error(\"parameter 'tool' is required\");\n }\n // recursive when check=true\n if (check) {\n const result = yield which(tool, false);\n if (!result) {\n if (ioUtil.IS_WINDOWS) {\n throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also verify the file has a valid extension for an executable file.`);\n }\n else {\n throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also check the file mode to verify the file is executable.`);\n }\n }\n return result;\n }\n const matches = yield findInPath(tool);\n if (matches && matches.length > 0) {\n return matches[0];\n }\n return '';\n });\n}\nexports.which = which;\n/**\n * Returns a list of all occurrences of the given tool on the system path.\n *\n * @returns Promise the paths of the tool\n */\nfunction findInPath(tool) {\n return __awaiter(this, void 0, void 0, function* () {\n if (!tool) {\n throw new Error(\"parameter 'tool' is required\");\n }\n // build the list of extensions to try\n const extensions = [];\n if (ioUtil.IS_WINDOWS && process.env['PATHEXT']) {\n for (const extension of process.env['PATHEXT'].split(path.delimiter)) {\n if (extension) {\n extensions.push(extension);\n }\n }\n }\n // if it's rooted, return it if exists. otherwise return empty.\n if (ioUtil.isRooted(tool)) {\n const filePath = yield ioUtil.tryGetExecutablePath(tool, extensions);\n if (filePath) {\n return [filePath];\n }\n return [];\n }\n // if any path separators, return empty\n if (tool.includes(path.sep)) {\n return [];\n }\n // build the list of directories\n //\n // Note, technically \"where\" checks the current directory on Windows. From a toolkit perspective,\n // it feels like we should not do this. Checking the current directory seems like more of a use\n // case of a shell, and the which() function exposed by the toolkit should strive for consistency\n // across platforms.\n const directories = [];\n if (process.env.PATH) {\n for (const p of process.env.PATH.split(path.delimiter)) {\n if (p) {\n directories.push(p);\n }\n }\n }\n // find all matches\n const matches = [];\n for (const directory of directories) {\n const filePath = yield ioUtil.tryGetExecutablePath(path.join(directory, tool), extensions);\n if (filePath) {\n matches.push(filePath);\n }\n }\n return matches;\n });\n}\nexports.findInPath = findInPath;\nfunction readCopyOptions(options) {\n const force = options.force == null ? true : options.force;\n const recursive = Boolean(options.recursive);\n const copySourceDirectory = options.copySourceDirectory == null\n ? true\n : Boolean(options.copySourceDirectory);\n return { force, recursive, copySourceDirectory };\n}\nfunction cpDirRecursive(sourceDir, destDir, currentDepth, force) {\n return __awaiter(this, void 0, void 0, function* () {\n // Ensure there is not a run away recursive copy\n if (currentDepth >= 255)\n return;\n currentDepth++;\n yield mkdirP(destDir);\n const files = yield ioUtil.readdir(sourceDir);\n for (const fileName of files) {\n const srcFile = `${sourceDir}/${fileName}`;\n const destFile = `${destDir}/${fileName}`;\n const srcFileStat = yield ioUtil.lstat(srcFile);\n if (srcFileStat.isDirectory()) {\n // Recurse\n yield cpDirRecursive(srcFile, destFile, currentDepth, force);\n }\n else {\n yield copyFile(srcFile, destFile, force);\n }\n }\n // Change the mode for the newly created directory\n yield ioUtil.chmod(destDir, (yield ioUtil.stat(sourceDir)).mode);\n });\n}\n// Buffered file copy\nfunction copyFile(srcFile, destFile, force) {\n return __awaiter(this, void 0, void 0, function* () {\n if ((yield ioUtil.lstat(srcFile)).isSymbolicLink()) {\n // unlink/re-link it\n try {\n yield ioUtil.lstat(destFile);\n yield ioUtil.unlink(destFile);\n }\n catch (e) {\n // Try to override file permission\n if (e.code === 'EPERM') {\n yield ioUtil.chmod(destFile, '0666');\n yield ioUtil.unlink(destFile);\n }\n // other errors = it doesn't exist, no work to do\n }\n // Copy over symlink\n const symlinkFull = yield ioUtil.readlink(srcFile);\n yield ioUtil.symlink(symlinkFull, destFile, ioUtil.IS_WINDOWS ? 'junction' : null);\n }\n else if (!(yield ioUtil.exists(destFile)) || force) {\n yield ioUtil.copyFile(srcFile, destFile);\n }\n });\n}\n//# sourceMappingURL=io.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports._readLinuxVersionFile = exports._getOsVersion = exports._findMatch = void 0;\nconst semver = __importStar(require(\"semver\"));\nconst core_1 = require(\"@actions/core\");\n// needs to be require for core node modules to be mocked\n/* eslint @typescript-eslint/no-require-imports: 0 */\nconst os = require(\"os\");\nconst cp = require(\"child_process\");\nconst fs = require(\"fs\");\nfunction _findMatch(versionSpec, stable, candidates, archFilter) {\n return __awaiter(this, void 0, void 0, function* () {\n const platFilter = os.platform();\n let result;\n let match;\n let file;\n for (const candidate of candidates) {\n const version = candidate.version;\n core_1.debug(`check ${version} satisfies ${versionSpec}`);\n if (semver.satisfies(version, versionSpec) &&\n (!stable || candidate.stable === stable)) {\n file = candidate.files.find(item => {\n core_1.debug(`${item.arch}===${archFilter} && ${item.platform}===${platFilter}`);\n let chk = item.arch === archFilter && item.platform === platFilter;\n if (chk && item.platform_version) {\n const osVersion = module.exports._getOsVersion();\n if (osVersion === item.platform_version) {\n chk = true;\n }\n else {\n chk = semver.satisfies(osVersion, item.platform_version);\n }\n }\n return chk;\n });\n if (file) {\n core_1.debug(`matched ${candidate.version}`);\n match = candidate;\n break;\n }\n }\n }\n if (match && file) {\n // clone since we're mutating the file list to be only the file that matches\n result = Object.assign({}, match);\n result.files = [file];\n }\n return result;\n });\n}\nexports._findMatch = _findMatch;\nfunction _getOsVersion() {\n // TODO: add windows and other linux, arm variants\n // right now filtering on version is only an ubuntu and macos scenario for tools we build for hosted (python)\n const plat = os.platform();\n let version = '';\n if (plat === 'darwin') {\n version = cp.execSync('sw_vers -productVersion').toString();\n }\n else if (plat === 'linux') {\n // lsb_release process not in some containers, readfile\n // Run cat /etc/lsb-release\n // DISTRIB_ID=Ubuntu\n // DISTRIB_RELEASE=18.04\n // DISTRIB_CODENAME=bionic\n // DISTRIB_DESCRIPTION=\"Ubuntu 18.04.4 LTS\"\n const lsbContents = module.exports._readLinuxVersionFile();\n if (lsbContents) {\n const lines = lsbContents.split('\\n');\n for (const line of lines) {\n const parts = line.split('=');\n if (parts.length === 2 &&\n (parts[0].trim() === 'VERSION_ID' ||\n parts[0].trim() === 'DISTRIB_RELEASE')) {\n version = parts[1]\n .trim()\n .replace(/^\"/, '')\n .replace(/\"$/, '');\n break;\n }\n }\n }\n }\n return version;\n}\nexports._getOsVersion = _getOsVersion;\nfunction _readLinuxVersionFile() {\n const lsbReleaseFile = '/etc/lsb-release';\n const osReleaseFile = '/etc/os-release';\n let contents = '';\n if (fs.existsSync(lsbReleaseFile)) {\n contents = fs.readFileSync(lsbReleaseFile).toString();\n }\n else if (fs.existsSync(osReleaseFile)) {\n contents = fs.readFileSync(osReleaseFile).toString();\n }\n return contents;\n}\nexports._readLinuxVersionFile = _readLinuxVersionFile;\n//# sourceMappingURL=manifest.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.RetryHelper = void 0;\nconst core = __importStar(require(\"@actions/core\"));\n/**\n * Internal class for retries\n */\nclass RetryHelper {\n constructor(maxAttempts, minSeconds, maxSeconds) {\n if (maxAttempts < 1) {\n throw new Error('max attempts should be greater than or equal to 1');\n }\n this.maxAttempts = maxAttempts;\n this.minSeconds = Math.floor(minSeconds);\n this.maxSeconds = Math.floor(maxSeconds);\n if (this.minSeconds > this.maxSeconds) {\n throw new Error('min seconds should be less than or equal to max seconds');\n }\n }\n execute(action, isRetryable) {\n return __awaiter(this, void 0, void 0, function* () {\n let attempt = 1;\n while (attempt < this.maxAttempts) {\n // Try\n try {\n return yield action();\n }\n catch (err) {\n if (isRetryable && !isRetryable(err)) {\n throw err;\n }\n core.info(err.message);\n }\n // Sleep\n const seconds = this.getSleepAmount();\n core.info(`Waiting ${seconds} seconds before trying again`);\n yield this.sleep(seconds);\n attempt++;\n }\n // Last attempt\n return yield action();\n });\n }\n getSleepAmount() {\n return (Math.floor(Math.random() * (this.maxSeconds - this.minSeconds + 1)) +\n this.minSeconds);\n }\n sleep(seconds) {\n return __awaiter(this, void 0, void 0, function* () {\n return new Promise(resolve => setTimeout(resolve, seconds * 1000));\n });\n }\n}\nexports.RetryHelper = RetryHelper;\n//# sourceMappingURL=retry-helper.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.evaluateVersions = exports.isExplicitVersion = exports.findFromManifest = exports.getManifestFromRepo = exports.findAllVersions = exports.find = exports.cacheFile = exports.cacheDir = exports.extractZip = exports.extractXar = exports.extractTar = exports.extract7z = exports.downloadTool = exports.HTTPError = void 0;\nconst core = __importStar(require(\"@actions/core\"));\nconst io = __importStar(require(\"@actions/io\"));\nconst fs = __importStar(require(\"fs\"));\nconst mm = __importStar(require(\"./manifest\"));\nconst os = __importStar(require(\"os\"));\nconst path = __importStar(require(\"path\"));\nconst httpm = __importStar(require(\"@actions/http-client\"));\nconst semver = __importStar(require(\"semver\"));\nconst stream = __importStar(require(\"stream\"));\nconst util = __importStar(require(\"util\"));\nconst assert_1 = require(\"assert\");\nconst v4_1 = __importDefault(require(\"uuid/v4\"));\nconst exec_1 = require(\"@actions/exec/lib/exec\");\nconst retry_helper_1 = require(\"./retry-helper\");\nclass HTTPError extends Error {\n constructor(httpStatusCode) {\n super(`Unexpected HTTP response: ${httpStatusCode}`);\n this.httpStatusCode = httpStatusCode;\n Object.setPrototypeOf(this, new.target.prototype);\n }\n}\nexports.HTTPError = HTTPError;\nconst IS_WINDOWS = process.platform === 'win32';\nconst IS_MAC = process.platform === 'darwin';\nconst userAgent = 'actions/tool-cache';\n/**\n * Download a tool from an url and stream it into a file\n *\n * @param url url of tool to download\n * @param dest path to download tool\n * @param auth authorization header\n * @param headers other headers\n * @returns path to downloaded tool\n */\nfunction downloadTool(url, dest, auth, headers) {\n return __awaiter(this, void 0, void 0, function* () {\n dest = dest || path.join(_getTempDirectory(), v4_1.default());\n yield io.mkdirP(path.dirname(dest));\n core.debug(`Downloading ${url}`);\n core.debug(`Destination ${dest}`);\n const maxAttempts = 3;\n const minSeconds = _getGlobal('TEST_DOWNLOAD_TOOL_RETRY_MIN_SECONDS', 10);\n const maxSeconds = _getGlobal('TEST_DOWNLOAD_TOOL_RETRY_MAX_SECONDS', 20);\n const retryHelper = new retry_helper_1.RetryHelper(maxAttempts, minSeconds, maxSeconds);\n return yield retryHelper.execute(() => __awaiter(this, void 0, void 0, function* () {\n return yield downloadToolAttempt(url, dest || '', auth, headers);\n }), (err) => {\n if (err instanceof HTTPError && err.httpStatusCode) {\n // Don't retry anything less than 500, except 408 Request Timeout and 429 Too Many Requests\n if (err.httpStatusCode < 500 &&\n err.httpStatusCode !== 408 &&\n err.httpStatusCode !== 429) {\n return false;\n }\n }\n // Otherwise retry\n return true;\n });\n });\n}\nexports.downloadTool = downloadTool;\nfunction downloadToolAttempt(url, dest, auth, headers) {\n return __awaiter(this, void 0, void 0, function* () {\n if (fs.existsSync(dest)) {\n throw new Error(`Destination file path ${dest} already exists`);\n }\n // Get the response headers\n const http = new httpm.HttpClient(userAgent, [], {\n allowRetries: false\n });\n if (auth) {\n core.debug('set auth');\n if (headers === undefined) {\n headers = {};\n }\n headers.authorization = auth;\n }\n const response = yield http.get(url, headers);\n if (response.message.statusCode !== 200) {\n const err = new HTTPError(response.message.statusCode);\n core.debug(`Failed to download from \"${url}\". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`);\n throw err;\n }\n // Download the response body\n const pipeline = util.promisify(stream.pipeline);\n const responseMessageFactory = _getGlobal('TEST_DOWNLOAD_TOOL_RESPONSE_MESSAGE_FACTORY', () => response.message);\n const readStream = responseMessageFactory();\n let succeeded = false;\n try {\n yield pipeline(readStream, fs.createWriteStream(dest));\n core.debug('download complete');\n succeeded = true;\n return dest;\n }\n finally {\n // Error, delete dest before retry\n if (!succeeded) {\n core.debug('download failed');\n try {\n yield io.rmRF(dest);\n }\n catch (err) {\n core.debug(`Failed to delete '${dest}'. ${err.message}`);\n }\n }\n }\n });\n}\n/**\n * Extract a .7z file\n *\n * @param file path to the .7z file\n * @param dest destination directory. Optional.\n * @param _7zPath path to 7zr.exe. Optional, for long path support. Most .7z archives do not have this\n * problem. If your .7z archive contains very long paths, you can pass the path to 7zr.exe which will\n * gracefully handle long paths. By default 7zdec.exe is used because it is a very small program and is\n * bundled with the tool lib. However it does not support long paths. 7zr.exe is the reduced command line\n * interface, it is smaller than the full command line interface, and it does support long paths. At the\n * time of this writing, it is freely available from the LZMA SDK that is available on the 7zip website.\n * Be sure to check the current license agreement. If 7zr.exe is bundled with your action, then the path\n * to 7zr.exe can be pass to this function.\n * @returns path to the destination directory\n */\nfunction extract7z(file, dest, _7zPath) {\n return __awaiter(this, void 0, void 0, function* () {\n assert_1.ok(IS_WINDOWS, 'extract7z() not supported on current OS');\n assert_1.ok(file, 'parameter \"file\" is required');\n dest = yield _createExtractFolder(dest);\n const originalCwd = process.cwd();\n process.chdir(dest);\n if (_7zPath) {\n try {\n const logLevel = core.isDebug() ? '-bb1' : '-bb0';\n const args = [\n 'x',\n logLevel,\n '-bd',\n '-sccUTF-8',\n file\n ];\n const options = {\n silent: true\n };\n yield exec_1.exec(`\"${_7zPath}\"`, args, options);\n }\n finally {\n process.chdir(originalCwd);\n }\n }\n else {\n const escapedScript = path\n .join(__dirname, '..', 'scripts', 'Invoke-7zdec.ps1')\n .replace(/'/g, \"''\")\n .replace(/\"|\\n|\\r/g, ''); // double-up single quotes, remove double quotes and newlines\n const escapedFile = file.replace(/'/g, \"''\").replace(/\"|\\n|\\r/g, '');\n const escapedTarget = dest.replace(/'/g, \"''\").replace(/\"|\\n|\\r/g, '');\n const command = `& '${escapedScript}' -Source '${escapedFile}' -Target '${escapedTarget}'`;\n const args = [\n '-NoLogo',\n '-Sta',\n '-NoProfile',\n '-NonInteractive',\n '-ExecutionPolicy',\n 'Unrestricted',\n '-Command',\n command\n ];\n const options = {\n silent: true\n };\n try {\n const powershellPath = yield io.which('powershell', true);\n yield exec_1.exec(`\"${powershellPath}\"`, args, options);\n }\n finally {\n process.chdir(originalCwd);\n }\n }\n return dest;\n });\n}\nexports.extract7z = extract7z;\n/**\n * Extract a compressed tar archive\n *\n * @param file path to the tar\n * @param dest destination directory. Optional.\n * @param flags flags for the tar command to use for extraction. Defaults to 'xz' (extracting gzipped tars). Optional.\n * @returns path to the destination directory\n */\nfunction extractTar(file, dest, flags = 'xz') {\n return __awaiter(this, void 0, void 0, function* () {\n if (!file) {\n throw new Error(\"parameter 'file' is required\");\n }\n // Create dest\n dest = yield _createExtractFolder(dest);\n // Determine whether GNU tar\n core.debug('Checking tar --version');\n let versionOutput = '';\n yield exec_1.exec('tar --version', [], {\n ignoreReturnCode: true,\n silent: true,\n listeners: {\n stdout: (data) => (versionOutput += data.toString()),\n stderr: (data) => (versionOutput += data.toString())\n }\n });\n core.debug(versionOutput.trim());\n const isGnuTar = versionOutput.toUpperCase().includes('GNU TAR');\n // Initialize args\n let args;\n if (flags instanceof Array) {\n args = flags;\n }\n else {\n args = [flags];\n }\n if (core.isDebug() && !flags.includes('v')) {\n args.push('-v');\n }\n let destArg = dest;\n let fileArg = file;\n if (IS_WINDOWS && isGnuTar) {\n args.push('--force-local');\n destArg = dest.replace(/\\\\/g, '/');\n // Technically only the dest needs to have `/` but for aesthetic consistency\n // convert slashes in the file arg too.\n fileArg = file.replace(/\\\\/g, '/');\n }\n if (isGnuTar) {\n // Suppress warnings when using GNU tar to extract archives created by BSD tar\n args.push('--warning=no-unknown-keyword');\n args.push('--overwrite');\n }\n args.push('-C', destArg, '-f', fileArg);\n yield exec_1.exec(`tar`, args);\n return dest;\n });\n}\nexports.extractTar = extractTar;\n/**\n * Extract a xar compatible archive\n *\n * @param file path to the archive\n * @param dest destination directory. Optional.\n * @param flags flags for the xar. Optional.\n * @returns path to the destination directory\n */\nfunction extractXar(file, dest, flags = []) {\n return __awaiter(this, void 0, void 0, function* () {\n assert_1.ok(IS_MAC, 'extractXar() not supported on current OS');\n assert_1.ok(file, 'parameter \"file\" is required');\n dest = yield _createExtractFolder(dest);\n let args;\n if (flags instanceof Array) {\n args = flags;\n }\n else {\n args = [flags];\n }\n args.push('-x', '-C', dest, '-f', file);\n if (core.isDebug()) {\n args.push('-v');\n }\n const xarPath = yield io.which('xar', true);\n yield exec_1.exec(`\"${xarPath}\"`, _unique(args));\n return dest;\n });\n}\nexports.extractXar = extractXar;\n/**\n * Extract a zip\n *\n * @param file path to the zip\n * @param dest destination directory. Optional.\n * @returns path to the destination directory\n */\nfunction extractZip(file, dest) {\n return __awaiter(this, void 0, void 0, function* () {\n if (!file) {\n throw new Error(\"parameter 'file' is required\");\n }\n dest = yield _createExtractFolder(dest);\n if (IS_WINDOWS) {\n yield extractZipWin(file, dest);\n }\n else {\n yield extractZipNix(file, dest);\n }\n return dest;\n });\n}\nexports.extractZip = extractZip;\nfunction extractZipWin(file, dest) {\n return __awaiter(this, void 0, void 0, function* () {\n // build the powershell command\n const escapedFile = file.replace(/'/g, \"''\").replace(/\"|\\n|\\r/g, ''); // double-up single quotes, remove double quotes and newlines\n const escapedDest = dest.replace(/'/g, \"''\").replace(/\"|\\n|\\r/g, '');\n const pwshPath = yield io.which('pwsh', false);\n //To match the file overwrite behavior on nix systems, we use the overwrite = true flag for ExtractToDirectory\n //and the -Force flag for Expand-Archive as a fallback\n if (pwshPath) {\n //attempt to use pwsh with ExtractToDirectory, if this fails attempt Expand-Archive\n const pwshCommand = [\n `$ErrorActionPreference = 'Stop' ;`,\n `try { Add-Type -AssemblyName System.IO.Compression.ZipFile } catch { } ;`,\n `try { [System.IO.Compression.ZipFile]::ExtractToDirectory('${escapedFile}', '${escapedDest}', $true) }`,\n `catch { if (($_.Exception.GetType().FullName -eq 'System.Management.Automation.MethodException') -or ($_.Exception.GetType().FullName -eq 'System.Management.Automation.RuntimeException') ){ Expand-Archive -LiteralPath '${escapedFile}' -DestinationPath '${escapedDest}' -Force } else { throw $_ } } ;`\n ].join(' ');\n const args = [\n '-NoLogo',\n '-NoProfile',\n '-NonInteractive',\n '-ExecutionPolicy',\n 'Unrestricted',\n '-Command',\n pwshCommand\n ];\n core.debug(`Using pwsh at path: ${pwshPath}`);\n yield exec_1.exec(`\"${pwshPath}\"`, args);\n }\n else {\n const powershellCommand = [\n `$ErrorActionPreference = 'Stop' ;`,\n `try { Add-Type -AssemblyName System.IO.Compression.FileSystem } catch { } ;`,\n `if ((Get-Command -Name Expand-Archive -Module Microsoft.PowerShell.Archive -ErrorAction Ignore)) { Expand-Archive -LiteralPath '${escapedFile}' -DestinationPath '${escapedDest}' -Force }`,\n `else {[System.IO.Compression.ZipFile]::ExtractToDirectory('${escapedFile}', '${escapedDest}', $true) }`\n ].join(' ');\n const args = [\n '-NoLogo',\n '-Sta',\n '-NoProfile',\n '-NonInteractive',\n '-ExecutionPolicy',\n 'Unrestricted',\n '-Command',\n powershellCommand\n ];\n const powershellPath = yield io.which('powershell', true);\n core.debug(`Using powershell at path: ${powershellPath}`);\n yield exec_1.exec(`\"${powershellPath}\"`, args);\n }\n });\n}\nfunction extractZipNix(file, dest) {\n return __awaiter(this, void 0, void 0, function* () {\n const unzipPath = yield io.which('unzip', true);\n const args = [file];\n if (!core.isDebug()) {\n args.unshift('-q');\n }\n args.unshift('-o'); //overwrite with -o, otherwise a prompt is shown which freezes the run\n yield exec_1.exec(`\"${unzipPath}\"`, args, { cwd: dest });\n });\n}\n/**\n * Caches a directory and installs it into the tool cacheDir\n *\n * @param sourceDir the directory to cache into tools\n * @param tool tool name\n * @param version version of the tool. semver format\n * @param arch architecture of the tool. Optional. Defaults to machine architecture\n */\nfunction cacheDir(sourceDir, tool, version, arch) {\n return __awaiter(this, void 0, void 0, function* () {\n version = semver.clean(version) || version;\n arch = arch || os.arch();\n core.debug(`Caching tool ${tool} ${version} ${arch}`);\n core.debug(`source dir: ${sourceDir}`);\n if (!fs.statSync(sourceDir).isDirectory()) {\n throw new Error('sourceDir is not a directory');\n }\n // Create the tool dir\n const destPath = yield _createToolPath(tool, version, arch);\n // copy each child item. do not move. move can fail on Windows\n // due to anti-virus software having an open handle on a file.\n for (const itemName of fs.readdirSync(sourceDir)) {\n const s = path.join(sourceDir, itemName);\n yield io.cp(s, destPath, { recursive: true });\n }\n // write .complete\n _completeToolPath(tool, version, arch);\n return destPath;\n });\n}\nexports.cacheDir = cacheDir;\n/**\n * Caches a downloaded file (GUID) and installs it\n * into the tool cache with a given targetName\n *\n * @param sourceFile the file to cache into tools. Typically a result of downloadTool which is a guid.\n * @param targetFile the name of the file name in the tools directory\n * @param tool tool name\n * @param version version of the tool. semver format\n * @param arch architecture of the tool. Optional. Defaults to machine architecture\n */\nfunction cacheFile(sourceFile, targetFile, tool, version, arch) {\n return __awaiter(this, void 0, void 0, function* () {\n version = semver.clean(version) || version;\n arch = arch || os.arch();\n core.debug(`Caching tool ${tool} ${version} ${arch}`);\n core.debug(`source file: ${sourceFile}`);\n if (!fs.statSync(sourceFile).isFile()) {\n throw new Error('sourceFile is not a file');\n }\n // create the tool dir\n const destFolder = yield _createToolPath(tool, version, arch);\n // copy instead of move. move can fail on Windows due to\n // anti-virus software having an open handle on a file.\n const destPath = path.join(destFolder, targetFile);\n core.debug(`destination file ${destPath}`);\n yield io.cp(sourceFile, destPath);\n // write .complete\n _completeToolPath(tool, version, arch);\n return destFolder;\n });\n}\nexports.cacheFile = cacheFile;\n/**\n * Finds the path to a tool version in the local installed tool cache\n *\n * @param toolName name of the tool\n * @param versionSpec version of the tool\n * @param arch optional arch. defaults to arch of computer\n */\nfunction find(toolName, versionSpec, arch) {\n if (!toolName) {\n throw new Error('toolName parameter is required');\n }\n if (!versionSpec) {\n throw new Error('versionSpec parameter is required');\n }\n arch = arch || os.arch();\n // attempt to resolve an explicit version\n if (!isExplicitVersion(versionSpec)) {\n const localVersions = findAllVersions(toolName, arch);\n const match = evaluateVersions(localVersions, versionSpec);\n versionSpec = match;\n }\n // check for the explicit version in the cache\n let toolPath = '';\n if (versionSpec) {\n versionSpec = semver.clean(versionSpec) || '';\n const cachePath = path.join(_getCacheDirectory(), toolName, versionSpec, arch);\n core.debug(`checking cache: ${cachePath}`);\n if (fs.existsSync(cachePath) && fs.existsSync(`${cachePath}.complete`)) {\n core.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch}`);\n toolPath = cachePath;\n }\n else {\n core.debug('not found');\n }\n }\n return toolPath;\n}\nexports.find = find;\n/**\n * Finds the paths to all versions of a tool that are installed in the local tool cache\n *\n * @param toolName name of the tool\n * @param arch optional arch. defaults to arch of computer\n */\nfunction findAllVersions(toolName, arch) {\n const versions = [];\n arch = arch || os.arch();\n const toolPath = path.join(_getCacheDirectory(), toolName);\n if (fs.existsSync(toolPath)) {\n const children = fs.readdirSync(toolPath);\n for (const child of children) {\n if (isExplicitVersion(child)) {\n const fullPath = path.join(toolPath, child, arch || '');\n if (fs.existsSync(fullPath) && fs.existsSync(`${fullPath}.complete`)) {\n versions.push(child);\n }\n }\n }\n }\n return versions;\n}\nexports.findAllVersions = findAllVersions;\nfunction getManifestFromRepo(owner, repo, auth, branch = 'master') {\n return __awaiter(this, void 0, void 0, function* () {\n let releases = [];\n const treeUrl = `https://api.github.com/repos/${owner}/${repo}/git/trees/${branch}`;\n const http = new httpm.HttpClient('tool-cache');\n const headers = {};\n if (auth) {\n core.debug('set auth');\n headers.authorization = auth;\n }\n const response = yield http.getJson(treeUrl, headers);\n if (!response.result) {\n return releases;\n }\n let manifestUrl = '';\n for (const item of response.result.tree) {\n if (item.path === 'versions-manifest.json') {\n manifestUrl = item.url;\n break;\n }\n }\n headers['accept'] = 'application/vnd.github.VERSION.raw';\n let versionsRaw = yield (yield http.get(manifestUrl, headers)).readBody();\n if (versionsRaw) {\n // shouldn't be needed but protects against invalid json saved with BOM\n versionsRaw = versionsRaw.replace(/^\\uFEFF/, '');\n try {\n releases = JSON.parse(versionsRaw);\n }\n catch (_a) {\n core.debug('Invalid json');\n }\n }\n return releases;\n });\n}\nexports.getManifestFromRepo = getManifestFromRepo;\nfunction findFromManifest(versionSpec, stable, manifest, archFilter = os.arch()) {\n return __awaiter(this, void 0, void 0, function* () {\n // wrap the internal impl\n const match = yield mm._findMatch(versionSpec, stable, manifest, archFilter);\n return match;\n });\n}\nexports.findFromManifest = findFromManifest;\nfunction _createExtractFolder(dest) {\n return __awaiter(this, void 0, void 0, function* () {\n if (!dest) {\n // create a temp dir\n dest = path.join(_getTempDirectory(), v4_1.default());\n }\n yield io.mkdirP(dest);\n return dest;\n });\n}\nfunction _createToolPath(tool, version, arch) {\n return __awaiter(this, void 0, void 0, function* () {\n const folderPath = path.join(_getCacheDirectory(), tool, semver.clean(version) || version, arch || '');\n core.debug(`destination ${folderPath}`);\n const markerPath = `${folderPath}.complete`;\n yield io.rmRF(folderPath);\n yield io.rmRF(markerPath);\n yield io.mkdirP(folderPath);\n return folderPath;\n });\n}\nfunction _completeToolPath(tool, version, arch) {\n const folderPath = path.join(_getCacheDirectory(), tool, semver.clean(version) || version, arch || '');\n const markerPath = `${folderPath}.complete`;\n fs.writeFileSync(markerPath, '');\n core.debug('finished caching tool');\n}\n/**\n * Check if version string is explicit\n *\n * @param versionSpec version string to check\n */\nfunction isExplicitVersion(versionSpec) {\n const c = semver.clean(versionSpec) || '';\n core.debug(`isExplicit: ${c}`);\n const valid = semver.valid(c) != null;\n core.debug(`explicit? ${valid}`);\n return valid;\n}\nexports.isExplicitVersion = isExplicitVersion;\n/**\n * Get the highest satisfiying semantic version in `versions` which satisfies `versionSpec`\n *\n * @param versions array of versions to evaluate\n * @param versionSpec semantic version spec to satisfy\n */\nfunction evaluateVersions(versions, versionSpec) {\n let version = '';\n core.debug(`evaluating ${versions.length} versions`);\n versions = versions.sort((a, b) => {\n if (semver.gt(a, b)) {\n return 1;\n }\n return -1;\n });\n for (let i = versions.length - 1; i >= 0; i--) {\n const potential = versions[i];\n const satisfied = semver.satisfies(potential, versionSpec);\n if (satisfied) {\n version = potential;\n break;\n }\n }\n if (version) {\n core.debug(`matched: ${version}`);\n }\n else {\n core.debug('match not found');\n }\n return version;\n}\nexports.evaluateVersions = evaluateVersions;\n/**\n * Gets RUNNER_TOOL_CACHE\n */\nfunction _getCacheDirectory() {\n const cacheDirectory = process.env['RUNNER_TOOL_CACHE'] || '';\n assert_1.ok(cacheDirectory, 'Expected RUNNER_TOOL_CACHE to be defined');\n return cacheDirectory;\n}\n/**\n * Gets RUNNER_TEMP\n */\nfunction _getTempDirectory() {\n const tempDirectory = process.env['RUNNER_TEMP'] || '';\n assert_1.ok(tempDirectory, 'Expected RUNNER_TEMP to be defined');\n return tempDirectory;\n}\n/**\n * Gets a global variable\n */\nfunction _getGlobal(key, defaultValue) {\n /* eslint-disable @typescript-eslint/no-explicit-any */\n const value = global[key];\n /* eslint-enable @typescript-eslint/no-explicit-any */\n return value !== undefined ? value : defaultValue;\n}\n/**\n * Returns an array of unique values.\n * @param values Values to make unique.\n */\nfunction _unique(values) {\n return Array.from(new Set(values));\n}\n//# sourceMappingURL=tool-cache.js.map","/**\n * Convert array of 16 byte values to UUID string format of the form:\n * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX\n */\nvar byteToHex = [];\nfor (var i = 0; i < 256; ++i) {\n byteToHex[i] = (i + 0x100).toString(16).substr(1);\n}\n\nfunction bytesToUuid(buf, offset) {\n var i = offset || 0;\n var bth = byteToHex;\n // join used to fix memory issue caused by concatenation: https://bugs.chromium.org/p/v8/issues/detail?id=3175#c4\n return ([\n bth[buf[i++]], bth[buf[i++]],\n bth[buf[i++]], bth[buf[i++]], '-',\n bth[buf[i++]], bth[buf[i++]], '-',\n bth[buf[i++]], bth[buf[i++]], '-',\n bth[buf[i++]], bth[buf[i++]], '-',\n bth[buf[i++]], bth[buf[i++]],\n bth[buf[i++]], bth[buf[i++]],\n bth[buf[i++]], bth[buf[i++]]\n ]).join('');\n}\n\nmodule.exports = bytesToUuid;\n","// Unique ID creation requires a high quality random # generator. In node.js\n// this is pretty straight-forward - we use the crypto API.\n\nvar crypto = require('crypto');\n\nmodule.exports = function nodeRNG() {\n return crypto.randomBytes(16);\n};\n","var rng = require('./lib/rng');\nvar bytesToUuid = require('./lib/bytesToUuid');\n\nfunction v4(options, buf, offset) {\n var i = buf && offset || 0;\n\n if (typeof(options) == 'string') {\n buf = options === 'binary' ? new Array(16) : null;\n options = null;\n }\n options = options || {};\n\n var rnds = options.random || (options.rng || rng)();\n\n // Per 4.4, set bits for version and `clock_seq_hi_and_reserved`\n rnds[6] = (rnds[6] & 0x0f) | 0x40;\n rnds[8] = (rnds[8] & 0x3f) | 0x80;\n\n // Copy bytes to buffer, if provided\n if (buf) {\n for (var ii = 0; ii < 16; ++ii) {\n buf[i + ii] = rnds[ii];\n }\n }\n\n return buf || bytesToUuid(rnds);\n}\n\nmodule.exports = v4;\n","\"use strict\";\nvar __defProp = Object.defineProperty;\nvar __getOwnPropDesc = Object.getOwnPropertyDescriptor;\nvar __getOwnPropNames = Object.getOwnPropertyNames;\nvar __hasOwnProp = Object.prototype.hasOwnProperty;\nvar __export = (target, all) => {\n for (var name in all)\n __defProp(target, name, { get: all[name], enumerable: true });\n};\nvar __copyProps = (to, from, except, desc) => {\n if (from && typeof from === \"object\" || typeof from === \"function\") {\n for (let key of __getOwnPropNames(from))\n if (!__hasOwnProp.call(to, key) && key !== except)\n __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });\n }\n return to;\n};\nvar __toCommonJS = (mod) => __copyProps(__defProp({}, \"__esModule\", { value: true }), mod);\n\n// pkg/dist-src/index.js\nvar dist_src_exports = {};\n__export(dist_src_exports, {\n Octokit: () => Octokit\n});\nmodule.exports = __toCommonJS(dist_src_exports);\nvar import_core = require(\"@octokit/core\");\nvar import_plugin_request_log = require(\"@octokit/plugin-request-log\");\nvar import_plugin_paginate_rest = require(\"@octokit/plugin-paginate-rest\");\nvar import_plugin_rest_endpoint_methods = require(\"@octokit/plugin-rest-endpoint-methods\");\n\n// pkg/dist-src/version.js\nvar VERSION = \"20.0.1\";\n\n// pkg/dist-src/index.js\nvar Octokit = import_core.Octokit.plugin(\n import_plugin_request_log.requestLog,\n import_plugin_rest_endpoint_methods.legacyRestEndpointMethods,\n import_plugin_paginate_rest.paginateRest\n).defaults({\n userAgent: `octokit-rest.js/${VERSION}`\n});\n// Annotate the CommonJS export names for ESM import in node:\n0 && (module.exports = {\n Octokit\n});\n","\"use strict\";\nvar __defProp = Object.defineProperty;\nvar __getOwnPropDesc = Object.getOwnPropertyDescriptor;\nvar __getOwnPropNames = Object.getOwnPropertyNames;\nvar __hasOwnProp = Object.prototype.hasOwnProperty;\nvar __export = (target, all) => {\n for (var name in all)\n __defProp(target, name, { get: all[name], enumerable: true });\n};\nvar __copyProps = (to, from, except, desc) => {\n if (from && typeof from === \"object\" || typeof from === \"function\") {\n for (let key of __getOwnPropNames(from))\n if (!__hasOwnProp.call(to, key) && key !== except)\n __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });\n }\n return to;\n};\nvar __toCommonJS = (mod) => __copyProps(__defProp({}, \"__esModule\", { value: true }), mod);\n\n// pkg/dist-src/index.js\nvar dist_src_exports = {};\n__export(dist_src_exports, {\n createTokenAuth: () => createTokenAuth\n});\nmodule.exports = __toCommonJS(dist_src_exports);\n\n// pkg/dist-src/auth.js\nvar REGEX_IS_INSTALLATION_LEGACY = /^v1\\./;\nvar REGEX_IS_INSTALLATION = /^ghs_/;\nvar REGEX_IS_USER_TO_SERVER = /^ghu_/;\nasync function auth(token) {\n const isApp = token.split(/\\./).length === 3;\n const isInstallation = REGEX_IS_INSTALLATION_LEGACY.test(token) || REGEX_IS_INSTALLATION.test(token);\n const isUserToServer = REGEX_IS_USER_TO_SERVER.test(token);\n const tokenType = isApp ? \"app\" : isInstallation ? \"installation\" : isUserToServer ? \"user-to-server\" : \"oauth\";\n return {\n type: \"token\",\n token,\n tokenType\n };\n}\n\n// pkg/dist-src/with-authorization-prefix.js\nfunction withAuthorizationPrefix(token) {\n if (token.split(/\\./).length === 3) {\n return `bearer ${token}`;\n }\n return `token ${token}`;\n}\n\n// pkg/dist-src/hook.js\nasync function hook(token, request, route, parameters) {\n const endpoint = request.endpoint.merge(\n route,\n parameters\n );\n endpoint.headers.authorization = withAuthorizationPrefix(token);\n return request(endpoint);\n}\n\n// pkg/dist-src/index.js\nvar createTokenAuth = function createTokenAuth2(token) {\n if (!token) {\n throw new Error(\"[@octokit/auth-token] No token passed to createTokenAuth\");\n }\n if (typeof token !== \"string\") {\n throw new Error(\n \"[@octokit/auth-token] Token passed to createTokenAuth is not a string\"\n );\n }\n token = token.replace(/^(token|bearer) +/i, \"\");\n return Object.assign(auth.bind(null, token), {\n hook: hook.bind(null, token)\n });\n};\n// Annotate the CommonJS export names for ESM import in node:\n0 && (module.exports = {\n createTokenAuth\n});\n","\"use strict\";\nvar __defProp = Object.defineProperty;\nvar __getOwnPropDesc = Object.getOwnPropertyDescriptor;\nvar __getOwnPropNames = Object.getOwnPropertyNames;\nvar __hasOwnProp = Object.prototype.hasOwnProperty;\nvar __export = (target, all) => {\n for (var name in all)\n __defProp(target, name, { get: all[name], enumerable: true });\n};\nvar __copyProps = (to, from, except, desc) => {\n if (from && typeof from === \"object\" || typeof from === \"function\") {\n for (let key of __getOwnPropNames(from))\n if (!__hasOwnProp.call(to, key) && key !== except)\n __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });\n }\n return to;\n};\nvar __toCommonJS = (mod) => __copyProps(__defProp({}, \"__esModule\", { value: true }), mod);\n\n// pkg/dist-src/index.js\nvar dist_src_exports = {};\n__export(dist_src_exports, {\n Octokit: () => Octokit\n});\nmodule.exports = __toCommonJS(dist_src_exports);\nvar import_universal_user_agent = require(\"universal-user-agent\");\nvar import_before_after_hook = require(\"before-after-hook\");\nvar import_request = require(\"@octokit/request\");\nvar import_graphql = require(\"@octokit/graphql\");\nvar import_auth_token = require(\"@octokit/auth-token\");\n\n// pkg/dist-src/version.js\nvar VERSION = \"5.0.0\";\n\n// pkg/dist-src/index.js\nvar Octokit = class {\n static {\n this.VERSION = VERSION;\n }\n static defaults(defaults) {\n const OctokitWithDefaults = class extends this {\n constructor(...args) {\n const options = args[0] || {};\n if (typeof defaults === \"function\") {\n super(defaults(options));\n return;\n }\n super(\n Object.assign(\n {},\n defaults,\n options,\n options.userAgent && defaults.userAgent ? {\n userAgent: `${options.userAgent} ${defaults.userAgent}`\n } : null\n )\n );\n }\n };\n return OctokitWithDefaults;\n }\n static {\n this.plugins = [];\n }\n /**\n * Attach a plugin (or many) to your Octokit instance.\n *\n * @example\n * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...)\n */\n static plugin(...newPlugins) {\n const currentPlugins = this.plugins;\n const NewOctokit = class extends this {\n static {\n this.plugins = currentPlugins.concat(\n newPlugins.filter((plugin) => !currentPlugins.includes(plugin))\n );\n }\n };\n return NewOctokit;\n }\n constructor(options = {}) {\n const hook = new import_before_after_hook.Collection();\n const requestDefaults = {\n baseUrl: import_request.request.endpoint.DEFAULTS.baseUrl,\n headers: {},\n request: Object.assign({}, options.request, {\n // @ts-ignore internal usage only, no need to type\n hook: hook.bind(null, \"request\")\n }),\n mediaType: {\n previews: [],\n format: \"\"\n }\n };\n requestDefaults.headers[\"user-agent\"] = [\n options.userAgent,\n `octokit-core.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}`\n ].filter(Boolean).join(\" \");\n if (options.baseUrl) {\n requestDefaults.baseUrl = options.baseUrl;\n }\n if (options.previews) {\n requestDefaults.mediaType.previews = options.previews;\n }\n if (options.timeZone) {\n requestDefaults.headers[\"time-zone\"] = options.timeZone;\n }\n this.request = import_request.request.defaults(requestDefaults);\n this.graphql = (0, import_graphql.withCustomRequest)(this.request).defaults(requestDefaults);\n this.log = Object.assign(\n {\n debug: () => {\n },\n info: () => {\n },\n warn: console.warn.bind(console),\n error: console.error.bind(console)\n },\n options.log\n );\n this.hook = hook;\n if (!options.authStrategy) {\n if (!options.auth) {\n this.auth = async () => ({\n type: \"unauthenticated\"\n });\n } else {\n const auth = (0, import_auth_token.createTokenAuth)(options.auth);\n hook.wrap(\"request\", auth.hook);\n this.auth = auth;\n }\n } else {\n const { authStrategy, ...otherOptions } = options;\n const auth = authStrategy(\n Object.assign(\n {\n request: this.request,\n log: this.log,\n // we pass the current octokit instance as well as its constructor options\n // to allow for authentication strategies that return a new octokit instance\n // that shares the same internal state as the current one. The original\n // requirement for this was the \"event-octokit\" authentication strategy\n // of https://github.com/probot/octokit-auth-probot.\n octokit: this,\n octokitOptions: otherOptions\n },\n options.auth\n )\n );\n hook.wrap(\"request\", auth.hook);\n this.auth = auth;\n }\n const classConstructor = this.constructor;\n classConstructor.plugins.forEach((plugin) => {\n Object.assign(this, plugin(this, options));\n });\n }\n};\n// Annotate the CommonJS export names for ESM import in node:\n0 && (module.exports = {\n Octokit\n});\n","\"use strict\";\nvar __defProp = Object.defineProperty;\nvar __getOwnPropDesc = Object.getOwnPropertyDescriptor;\nvar __getOwnPropNames = Object.getOwnPropertyNames;\nvar __hasOwnProp = Object.prototype.hasOwnProperty;\nvar __export = (target, all) => {\n for (var name in all)\n __defProp(target, name, { get: all[name], enumerable: true });\n};\nvar __copyProps = (to, from, except, desc) => {\n if (from && typeof from === \"object\" || typeof from === \"function\") {\n for (let key of __getOwnPropNames(from))\n if (!__hasOwnProp.call(to, key) && key !== except)\n __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });\n }\n return to;\n};\nvar __toCommonJS = (mod) => __copyProps(__defProp({}, \"__esModule\", { value: true }), mod);\n\n// pkg/dist-src/index.js\nvar dist_src_exports = {};\n__export(dist_src_exports, {\n endpoint: () => endpoint\n});\nmodule.exports = __toCommonJS(dist_src_exports);\n\n// pkg/dist-src/defaults.js\nvar import_universal_user_agent = require(\"universal-user-agent\");\n\n// pkg/dist-src/version.js\nvar VERSION = \"9.0.0\";\n\n// pkg/dist-src/defaults.js\nvar userAgent = `octokit-endpoint.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}`;\nvar DEFAULTS = {\n method: \"GET\",\n baseUrl: \"https://api.github.com\",\n headers: {\n accept: \"application/vnd.github.v3+json\",\n \"user-agent\": userAgent\n },\n mediaType: {\n format: \"\"\n }\n};\n\n// pkg/dist-src/util/lowercase-keys.js\nfunction lowercaseKeys(object) {\n if (!object) {\n return {};\n }\n return Object.keys(object).reduce((newObj, key) => {\n newObj[key.toLowerCase()] = object[key];\n return newObj;\n }, {});\n}\n\n// pkg/dist-src/util/merge-deep.js\nvar import_is_plain_object = require(\"is-plain-object\");\nfunction mergeDeep(defaults, options) {\n const result = Object.assign({}, defaults);\n Object.keys(options).forEach((key) => {\n if ((0, import_is_plain_object.isPlainObject)(options[key])) {\n if (!(key in defaults))\n Object.assign(result, { [key]: options[key] });\n else\n result[key] = mergeDeep(defaults[key], options[key]);\n } else {\n Object.assign(result, { [key]: options[key] });\n }\n });\n return result;\n}\n\n// pkg/dist-src/util/remove-undefined-properties.js\nfunction removeUndefinedProperties(obj) {\n for (const key in obj) {\n if (obj[key] === void 0) {\n delete obj[key];\n }\n }\n return obj;\n}\n\n// pkg/dist-src/merge.js\nfunction merge(defaults, route, options) {\n if (typeof route === \"string\") {\n let [method, url] = route.split(\" \");\n options = Object.assign(url ? { method, url } : { url: method }, options);\n } else {\n options = Object.assign({}, route);\n }\n options.headers = lowercaseKeys(options.headers);\n removeUndefinedProperties(options);\n removeUndefinedProperties(options.headers);\n const mergedOptions = mergeDeep(defaults || {}, options);\n if (options.url === \"/graphql\") {\n if (defaults && defaults.mediaType.previews?.length) {\n mergedOptions.mediaType.previews = defaults.mediaType.previews.filter(\n (preview) => !mergedOptions.mediaType.previews.includes(preview)\n ).concat(mergedOptions.mediaType.previews);\n }\n mergedOptions.mediaType.previews = (mergedOptions.mediaType.previews || []).map((preview) => preview.replace(/-preview/, \"\"));\n }\n return mergedOptions;\n}\n\n// pkg/dist-src/util/add-query-parameters.js\nfunction addQueryParameters(url, parameters) {\n const separator = /\\?/.test(url) ? \"&\" : \"?\";\n const names = Object.keys(parameters);\n if (names.length === 0) {\n return url;\n }\n return url + separator + names.map((name) => {\n if (name === \"q\") {\n return \"q=\" + parameters.q.split(\"+\").map(encodeURIComponent).join(\"+\");\n }\n return `${name}=${encodeURIComponent(parameters[name])}`;\n }).join(\"&\");\n}\n\n// pkg/dist-src/util/extract-url-variable-names.js\nvar urlVariableRegex = /\\{[^}]+\\}/g;\nfunction removeNonChars(variableName) {\n return variableName.replace(/^\\W+|\\W+$/g, \"\").split(/,/);\n}\nfunction extractUrlVariableNames(url) {\n const matches = url.match(urlVariableRegex);\n if (!matches) {\n return [];\n }\n return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []);\n}\n\n// pkg/dist-src/util/omit.js\nfunction omit(object, keysToOmit) {\n return Object.keys(object).filter((option) => !keysToOmit.includes(option)).reduce((obj, key) => {\n obj[key] = object[key];\n return obj;\n }, {});\n}\n\n// pkg/dist-src/util/url-template.js\nfunction encodeReserved(str) {\n return str.split(/(%[0-9A-Fa-f]{2})/g).map(function(part) {\n if (!/%[0-9A-Fa-f]/.test(part)) {\n part = encodeURI(part).replace(/%5B/g, \"[\").replace(/%5D/g, \"]\");\n }\n return part;\n }).join(\"\");\n}\nfunction encodeUnreserved(str) {\n return encodeURIComponent(str).replace(/[!'()*]/g, function(c) {\n return \"%\" + c.charCodeAt(0).toString(16).toUpperCase();\n });\n}\nfunction encodeValue(operator, value, key) {\n value = operator === \"+\" || operator === \"#\" ? encodeReserved(value) : encodeUnreserved(value);\n if (key) {\n return encodeUnreserved(key) + \"=\" + value;\n } else {\n return value;\n }\n}\nfunction isDefined(value) {\n return value !== void 0 && value !== null;\n}\nfunction isKeyOperator(operator) {\n return operator === \";\" || operator === \"&\" || operator === \"?\";\n}\nfunction getValues(context, operator, key, modifier) {\n var value = context[key], result = [];\n if (isDefined(value) && value !== \"\") {\n if (typeof value === \"string\" || typeof value === \"number\" || typeof value === \"boolean\") {\n value = value.toString();\n if (modifier && modifier !== \"*\") {\n value = value.substring(0, parseInt(modifier, 10));\n }\n result.push(\n encodeValue(operator, value, isKeyOperator(operator) ? key : \"\")\n );\n } else {\n if (modifier === \"*\") {\n if (Array.isArray(value)) {\n value.filter(isDefined).forEach(function(value2) {\n result.push(\n encodeValue(operator, value2, isKeyOperator(operator) ? key : \"\")\n );\n });\n } else {\n Object.keys(value).forEach(function(k) {\n if (isDefined(value[k])) {\n result.push(encodeValue(operator, value[k], k));\n }\n });\n }\n } else {\n const tmp = [];\n if (Array.isArray(value)) {\n value.filter(isDefined).forEach(function(value2) {\n tmp.push(encodeValue(operator, value2));\n });\n } else {\n Object.keys(value).forEach(function(k) {\n if (isDefined(value[k])) {\n tmp.push(encodeUnreserved(k));\n tmp.push(encodeValue(operator, value[k].toString()));\n }\n });\n }\n if (isKeyOperator(operator)) {\n result.push(encodeUnreserved(key) + \"=\" + tmp.join(\",\"));\n } else if (tmp.length !== 0) {\n result.push(tmp.join(\",\"));\n }\n }\n }\n } else {\n if (operator === \";\") {\n if (isDefined(value)) {\n result.push(encodeUnreserved(key));\n }\n } else if (value === \"\" && (operator === \"&\" || operator === \"?\")) {\n result.push(encodeUnreserved(key) + \"=\");\n } else if (value === \"\") {\n result.push(\"\");\n }\n }\n return result;\n}\nfunction parseUrl(template) {\n return {\n expand: expand.bind(null, template)\n };\n}\nfunction expand(template, context) {\n var operators = [\"+\", \"#\", \".\", \"/\", \";\", \"?\", \"&\"];\n return template.replace(\n /\\{([^\\{\\}]+)\\}|([^\\{\\}]+)/g,\n function(_, expression, literal) {\n if (expression) {\n let operator = \"\";\n const values = [];\n if (operators.indexOf(expression.charAt(0)) !== -1) {\n operator = expression.charAt(0);\n expression = expression.substr(1);\n }\n expression.split(/,/g).forEach(function(variable) {\n var tmp = /([^:\\*]*)(?::(\\d+)|(\\*))?/.exec(variable);\n values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3]));\n });\n if (operator && operator !== \"+\") {\n var separator = \",\";\n if (operator === \"?\") {\n separator = \"&\";\n } else if (operator !== \"#\") {\n separator = operator;\n }\n return (values.length !== 0 ? operator : \"\") + values.join(separator);\n } else {\n return values.join(\",\");\n }\n } else {\n return encodeReserved(literal);\n }\n }\n );\n}\n\n// pkg/dist-src/parse.js\nfunction parse(options) {\n let method = options.method.toUpperCase();\n let url = (options.url || \"/\").replace(/:([a-z]\\w+)/g, \"{$1}\");\n let headers = Object.assign({}, options.headers);\n let body;\n let parameters = omit(options, [\n \"method\",\n \"baseUrl\",\n \"url\",\n \"headers\",\n \"request\",\n \"mediaType\"\n ]);\n const urlVariableNames = extractUrlVariableNames(url);\n url = parseUrl(url).expand(parameters);\n if (!/^http/.test(url)) {\n url = options.baseUrl + url;\n }\n const omittedParameters = Object.keys(options).filter((option) => urlVariableNames.includes(option)).concat(\"baseUrl\");\n const remainingParameters = omit(parameters, omittedParameters);\n const isBinaryRequest = /application\\/octet-stream/i.test(headers.accept);\n if (!isBinaryRequest) {\n if (options.mediaType.format) {\n headers.accept = headers.accept.split(/,/).map(\n (format) => format.replace(\n /application\\/vnd(\\.\\w+)(\\.v3)?(\\.\\w+)?(\\+json)?$/,\n `application/vnd$1$2.${options.mediaType.format}`\n )\n ).join(\",\");\n }\n if (url.endsWith(\"/graphql\")) {\n if (options.mediaType.previews?.length) {\n const previewsFromAcceptHeader = headers.accept.match(/[\\w-]+(?=-preview)/g) || [];\n headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map((preview) => {\n const format = options.mediaType.format ? `.${options.mediaType.format}` : \"+json\";\n return `application/vnd.github.${preview}-preview${format}`;\n }).join(\",\");\n }\n }\n }\n if ([\"GET\", \"HEAD\"].includes(method)) {\n url = addQueryParameters(url, remainingParameters);\n } else {\n if (\"data\" in remainingParameters) {\n body = remainingParameters.data;\n } else {\n if (Object.keys(remainingParameters).length) {\n body = remainingParameters;\n }\n }\n }\n if (!headers[\"content-type\"] && typeof body !== \"undefined\") {\n headers[\"content-type\"] = \"application/json; charset=utf-8\";\n }\n if ([\"PATCH\", \"PUT\"].includes(method) && typeof body === \"undefined\") {\n body = \"\";\n }\n return Object.assign(\n { method, url, headers },\n typeof body !== \"undefined\" ? { body } : null,\n options.request ? { request: options.request } : null\n );\n}\n\n// pkg/dist-src/endpoint-with-defaults.js\nfunction endpointWithDefaults(defaults, route, options) {\n return parse(merge(defaults, route, options));\n}\n\n// pkg/dist-src/with-defaults.js\nfunction withDefaults(oldDefaults, newDefaults) {\n const DEFAULTS2 = merge(oldDefaults, newDefaults);\n const endpoint2 = endpointWithDefaults.bind(null, DEFAULTS2);\n return Object.assign(endpoint2, {\n DEFAULTS: DEFAULTS2,\n defaults: withDefaults.bind(null, DEFAULTS2),\n merge: merge.bind(null, DEFAULTS2),\n parse\n });\n}\n\n// pkg/dist-src/index.js\nvar endpoint = withDefaults(null, DEFAULTS);\n// Annotate the CommonJS export names for ESM import in node:\n0 && (module.exports = {\n endpoint\n});\n","\"use strict\";\nvar __defProp = Object.defineProperty;\nvar __getOwnPropDesc = Object.getOwnPropertyDescriptor;\nvar __getOwnPropNames = Object.getOwnPropertyNames;\nvar __hasOwnProp = Object.prototype.hasOwnProperty;\nvar __export = (target, all) => {\n for (var name in all)\n __defProp(target, name, { get: all[name], enumerable: true });\n};\nvar __copyProps = (to, from, except, desc) => {\n if (from && typeof from === \"object\" || typeof from === \"function\") {\n for (let key of __getOwnPropNames(from))\n if (!__hasOwnProp.call(to, key) && key !== except)\n __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });\n }\n return to;\n};\nvar __toCommonJS = (mod) => __copyProps(__defProp({}, \"__esModule\", { value: true }), mod);\n\n// pkg/dist-src/index.js\nvar dist_src_exports = {};\n__export(dist_src_exports, {\n GraphqlResponseError: () => GraphqlResponseError,\n graphql: () => graphql2,\n withCustomRequest: () => withCustomRequest\n});\nmodule.exports = __toCommonJS(dist_src_exports);\nvar import_request3 = require(\"@octokit/request\");\nvar import_universal_user_agent = require(\"universal-user-agent\");\n\n// pkg/dist-src/version.js\nvar VERSION = \"7.0.1\";\n\n// pkg/dist-src/with-defaults.js\nvar import_request2 = require(\"@octokit/request\");\n\n// pkg/dist-src/graphql.js\nvar import_request = require(\"@octokit/request\");\n\n// pkg/dist-src/error.js\nfunction _buildMessageForResponseErrors(data) {\n return `Request failed due to following response errors:\n` + data.errors.map((e) => ` - ${e.message}`).join(\"\\n\");\n}\nvar GraphqlResponseError = class extends Error {\n constructor(request2, headers, response) {\n super(_buildMessageForResponseErrors(response));\n this.request = request2;\n this.headers = headers;\n this.response = response;\n this.name = \"GraphqlResponseError\";\n this.errors = response.errors;\n this.data = response.data;\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n }\n};\n\n// pkg/dist-src/graphql.js\nvar NON_VARIABLE_OPTIONS = [\n \"method\",\n \"baseUrl\",\n \"url\",\n \"headers\",\n \"request\",\n \"query\",\n \"mediaType\"\n];\nvar FORBIDDEN_VARIABLE_OPTIONS = [\"query\", \"method\", \"url\"];\nvar GHES_V3_SUFFIX_REGEX = /\\/api\\/v3\\/?$/;\nfunction graphql(request2, query, options) {\n if (options) {\n if (typeof query === \"string\" && \"query\" in options) {\n return Promise.reject(\n new Error(`[@octokit/graphql] \"query\" cannot be used as variable name`)\n );\n }\n for (const key in options) {\n if (!FORBIDDEN_VARIABLE_OPTIONS.includes(key))\n continue;\n return Promise.reject(\n new Error(\n `[@octokit/graphql] \"${key}\" cannot be used as variable name`\n )\n );\n }\n }\n const parsedOptions = typeof query === \"string\" ? Object.assign({ query }, options) : query;\n const requestOptions = Object.keys(\n parsedOptions\n ).reduce((result, key) => {\n if (NON_VARIABLE_OPTIONS.includes(key)) {\n result[key] = parsedOptions[key];\n return result;\n }\n if (!result.variables) {\n result.variables = {};\n }\n result.variables[key] = parsedOptions[key];\n return result;\n }, {});\n const baseUrl = parsedOptions.baseUrl || request2.endpoint.DEFAULTS.baseUrl;\n if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) {\n requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, \"/api/graphql\");\n }\n return request2(requestOptions).then((response) => {\n if (response.data.errors) {\n const headers = {};\n for (const key of Object.keys(response.headers)) {\n headers[key] = response.headers[key];\n }\n throw new GraphqlResponseError(\n requestOptions,\n headers,\n response.data\n );\n }\n return response.data.data;\n });\n}\n\n// pkg/dist-src/with-defaults.js\nfunction withDefaults(request2, newDefaults) {\n const newRequest = request2.defaults(newDefaults);\n const newApi = (query, options) => {\n return graphql(newRequest, query, options);\n };\n return Object.assign(newApi, {\n defaults: withDefaults.bind(null, newRequest),\n endpoint: newRequest.endpoint\n });\n}\n\n// pkg/dist-src/index.js\nvar graphql2 = withDefaults(import_request3.request, {\n headers: {\n \"user-agent\": `octokit-graphql.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}`\n },\n method: \"POST\",\n url: \"/graphql\"\n});\nfunction withCustomRequest(customRequest) {\n return withDefaults(customRequest, {\n method: \"POST\",\n url: \"/graphql\"\n });\n}\n// Annotate the CommonJS export names for ESM import in node:\n0 && (module.exports = {\n GraphqlResponseError,\n graphql,\n withCustomRequest\n});\n","\"use strict\";\nvar __defProp = Object.defineProperty;\nvar __getOwnPropDesc = Object.getOwnPropertyDescriptor;\nvar __getOwnPropNames = Object.getOwnPropertyNames;\nvar __hasOwnProp = Object.prototype.hasOwnProperty;\nvar __export = (target, all) => {\n for (var name in all)\n __defProp(target, name, { get: all[name], enumerable: true });\n};\nvar __copyProps = (to, from, except, desc) => {\n if (from && typeof from === \"object\" || typeof from === \"function\") {\n for (let key of __getOwnPropNames(from))\n if (!__hasOwnProp.call(to, key) && key !== except)\n __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });\n }\n return to;\n};\nvar __toCommonJS = (mod) => __copyProps(__defProp({}, \"__esModule\", { value: true }), mod);\n\n// pkg/dist-src/index.js\nvar dist_src_exports = {};\n__export(dist_src_exports, {\n composePaginateRest: () => composePaginateRest,\n isPaginatingEndpoint: () => isPaginatingEndpoint,\n paginateRest: () => paginateRest,\n paginatingEndpoints: () => paginatingEndpoints\n});\nmodule.exports = __toCommonJS(dist_src_exports);\n\n// pkg/dist-src/version.js\nvar VERSION = \"8.0.0\";\n\n// pkg/dist-src/normalize-paginated-list-response.js\nfunction normalizePaginatedListResponse(response) {\n if (!response.data) {\n return {\n ...response,\n data: []\n };\n }\n const responseNeedsNormalization = \"total_count\" in response.data && !(\"url\" in response.data);\n if (!responseNeedsNormalization)\n return response;\n const incompleteResults = response.data.incomplete_results;\n const repositorySelection = response.data.repository_selection;\n const totalCount = response.data.total_count;\n delete response.data.incomplete_results;\n delete response.data.repository_selection;\n delete response.data.total_count;\n const namespaceKey = Object.keys(response.data)[0];\n const data = response.data[namespaceKey];\n response.data = data;\n if (typeof incompleteResults !== \"undefined\") {\n response.data.incomplete_results = incompleteResults;\n }\n if (typeof repositorySelection !== \"undefined\") {\n response.data.repository_selection = repositorySelection;\n }\n response.data.total_count = totalCount;\n return response;\n}\n\n// pkg/dist-src/iterator.js\nfunction iterator(octokit, route, parameters) {\n const options = typeof route === \"function\" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters);\n const requestMethod = typeof route === \"function\" ? route : octokit.request;\n const method = options.method;\n const headers = options.headers;\n let url = options.url;\n return {\n [Symbol.asyncIterator]: () => ({\n async next() {\n if (!url)\n return { done: true };\n try {\n const response = await requestMethod({ method, url, headers });\n const normalizedResponse = normalizePaginatedListResponse(response);\n url = ((normalizedResponse.headers.link || \"\").match(\n /<([^>]+)>;\\s*rel=\"next\"/\n ) || [])[1];\n return { value: normalizedResponse };\n } catch (error) {\n if (error.status !== 409)\n throw error;\n url = \"\";\n return {\n value: {\n status: 200,\n headers: {},\n data: []\n }\n };\n }\n }\n })\n };\n}\n\n// pkg/dist-src/paginate.js\nfunction paginate(octokit, route, parameters, mapFn) {\n if (typeof parameters === \"function\") {\n mapFn = parameters;\n parameters = void 0;\n }\n return gather(\n octokit,\n [],\n iterator(octokit, route, parameters)[Symbol.asyncIterator](),\n mapFn\n );\n}\nfunction gather(octokit, results, iterator2, mapFn) {\n return iterator2.next().then((result) => {\n if (result.done) {\n return results;\n }\n let earlyExit = false;\n function done() {\n earlyExit = true;\n }\n results = results.concat(\n mapFn ? mapFn(result.value, done) : result.value.data\n );\n if (earlyExit) {\n return results;\n }\n return gather(octokit, results, iterator2, mapFn);\n });\n}\n\n// pkg/dist-src/compose-paginate.js\nvar composePaginateRest = Object.assign(paginate, {\n iterator\n});\n\n// pkg/dist-src/generated/paginating-endpoints.js\nvar paginatingEndpoints = [\n \"GET /app/hook/deliveries\",\n \"GET /app/installation-requests\",\n \"GET /app/installations\",\n \"GET /enterprises/{enterprise}/dependabot/alerts\",\n \"GET /enterprises/{enterprise}/secret-scanning/alerts\",\n \"GET /events\",\n \"GET /gists\",\n \"GET /gists/public\",\n \"GET /gists/starred\",\n \"GET /gists/{gist_id}/comments\",\n \"GET /gists/{gist_id}/commits\",\n \"GET /gists/{gist_id}/forks\",\n \"GET /installation/repositories\",\n \"GET /issues\",\n \"GET /licenses\",\n \"GET /marketplace_listing/plans\",\n \"GET /marketplace_listing/plans/{plan_id}/accounts\",\n \"GET /marketplace_listing/stubbed/plans\",\n \"GET /marketplace_listing/stubbed/plans/{plan_id}/accounts\",\n \"GET /networks/{owner}/{repo}/events\",\n \"GET /notifications\",\n \"GET /organizations\",\n \"GET /organizations/{org}/personal-access-token-requests\",\n \"GET /organizations/{org}/personal-access-token-requests/{pat_request_id}/repositories\",\n \"GET /organizations/{org}/personal-access-tokens\",\n \"GET /organizations/{org}/personal-access-tokens/{pat_id}/repositories\",\n \"GET /orgs/{org}/actions/cache/usage-by-repository\",\n \"GET /orgs/{org}/actions/permissions/repositories\",\n \"GET /orgs/{org}/actions/required_workflows\",\n \"GET /orgs/{org}/actions/runners\",\n \"GET /orgs/{org}/actions/secrets\",\n \"GET /orgs/{org}/actions/secrets/{secret_name}/repositories\",\n \"GET /orgs/{org}/actions/variables\",\n \"GET /orgs/{org}/actions/variables/{name}/repositories\",\n \"GET /orgs/{org}/blocks\",\n \"GET /orgs/{org}/code-scanning/alerts\",\n \"GET /orgs/{org}/codespaces\",\n \"GET /orgs/{org}/codespaces/secrets\",\n \"GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories\",\n \"GET /orgs/{org}/dependabot/alerts\",\n \"GET /orgs/{org}/dependabot/secrets\",\n \"GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories\",\n \"GET /orgs/{org}/events\",\n \"GET /orgs/{org}/failed_invitations\",\n \"GET /orgs/{org}/hooks\",\n \"GET /orgs/{org}/hooks/{hook_id}/deliveries\",\n \"GET /orgs/{org}/installations\",\n \"GET /orgs/{org}/invitations\",\n \"GET /orgs/{org}/invitations/{invitation_id}/teams\",\n \"GET /orgs/{org}/issues\",\n \"GET /orgs/{org}/members\",\n \"GET /orgs/{org}/members/{username}/codespaces\",\n \"GET /orgs/{org}/migrations\",\n \"GET /orgs/{org}/migrations/{migration_id}/repositories\",\n \"GET /orgs/{org}/outside_collaborators\",\n \"GET /orgs/{org}/packages\",\n \"GET /orgs/{org}/packages/{package_type}/{package_name}/versions\",\n \"GET /orgs/{org}/projects\",\n \"GET /orgs/{org}/public_members\",\n \"GET /orgs/{org}/repos\",\n \"GET /orgs/{org}/rulesets\",\n \"GET /orgs/{org}/secret-scanning/alerts\",\n \"GET /orgs/{org}/teams\",\n \"GET /orgs/{org}/teams/{team_slug}/discussions\",\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\",\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\",\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\",\n \"GET /orgs/{org}/teams/{team_slug}/invitations\",\n \"GET /orgs/{org}/teams/{team_slug}/members\",\n \"GET /orgs/{org}/teams/{team_slug}/projects\",\n \"GET /orgs/{org}/teams/{team_slug}/repos\",\n \"GET /orgs/{org}/teams/{team_slug}/teams\",\n \"GET /projects/columns/{column_id}/cards\",\n \"GET /projects/{project_id}/collaborators\",\n \"GET /projects/{project_id}/columns\",\n \"GET /repos/{org}/{repo}/actions/required_workflows\",\n \"GET /repos/{owner}/{repo}/actions/artifacts\",\n \"GET /repos/{owner}/{repo}/actions/caches\",\n \"GET /repos/{owner}/{repo}/actions/organization-secrets\",\n \"GET /repos/{owner}/{repo}/actions/organization-variables\",\n \"GET /repos/{owner}/{repo}/actions/required_workflows/{required_workflow_id_for_repo}/runs\",\n \"GET /repos/{owner}/{repo}/actions/runners\",\n \"GET /repos/{owner}/{repo}/actions/runs\",\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts\",\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs\",\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs\",\n \"GET /repos/{owner}/{repo}/actions/secrets\",\n \"GET /repos/{owner}/{repo}/actions/variables\",\n \"GET /repos/{owner}/{repo}/actions/workflows\",\n \"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs\",\n \"GET /repos/{owner}/{repo}/assignees\",\n \"GET /repos/{owner}/{repo}/branches\",\n \"GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations\",\n \"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs\",\n \"GET /repos/{owner}/{repo}/code-scanning/alerts\",\n \"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances\",\n \"GET /repos/{owner}/{repo}/code-scanning/analyses\",\n \"GET /repos/{owner}/{repo}/codespaces\",\n \"GET /repos/{owner}/{repo}/codespaces/devcontainers\",\n \"GET /repos/{owner}/{repo}/codespaces/secrets\",\n \"GET /repos/{owner}/{repo}/collaborators\",\n \"GET /repos/{owner}/{repo}/comments\",\n \"GET /repos/{owner}/{repo}/comments/{comment_id}/reactions\",\n \"GET /repos/{owner}/{repo}/commits\",\n \"GET /repos/{owner}/{repo}/commits/{commit_sha}/comments\",\n \"GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls\",\n \"GET /repos/{owner}/{repo}/commits/{ref}/check-runs\",\n \"GET /repos/{owner}/{repo}/commits/{ref}/check-suites\",\n \"GET /repos/{owner}/{repo}/commits/{ref}/status\",\n \"GET /repos/{owner}/{repo}/commits/{ref}/statuses\",\n \"GET /repos/{owner}/{repo}/contributors\",\n \"GET /repos/{owner}/{repo}/dependabot/alerts\",\n \"GET /repos/{owner}/{repo}/dependabot/secrets\",\n \"GET /repos/{owner}/{repo}/deployments\",\n \"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\",\n \"GET /repos/{owner}/{repo}/environments\",\n \"GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies\",\n \"GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps\",\n \"GET /repos/{owner}/{repo}/events\",\n \"GET /repos/{owner}/{repo}/forks\",\n \"GET /repos/{owner}/{repo}/hooks\",\n \"GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries\",\n \"GET /repos/{owner}/{repo}/invitations\",\n \"GET /repos/{owner}/{repo}/issues\",\n \"GET /repos/{owner}/{repo}/issues/comments\",\n \"GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\",\n \"GET /repos/{owner}/{repo}/issues/events\",\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/comments\",\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/events\",\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/labels\",\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/reactions\",\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/timeline\",\n \"GET /repos/{owner}/{repo}/keys\",\n \"GET /repos/{owner}/{repo}/labels\",\n \"GET /repos/{owner}/{repo}/milestones\",\n \"GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels\",\n \"GET /repos/{owner}/{repo}/notifications\",\n \"GET /repos/{owner}/{repo}/pages/builds\",\n \"GET /repos/{owner}/{repo}/projects\",\n \"GET /repos/{owner}/{repo}/pulls\",\n \"GET /repos/{owner}/{repo}/pulls/comments\",\n \"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\",\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/comments\",\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/commits\",\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/files\",\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews\",\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments\",\n \"GET /repos/{owner}/{repo}/releases\",\n \"GET /repos/{owner}/{repo}/releases/{release_id}/assets\",\n \"GET /repos/{owner}/{repo}/releases/{release_id}/reactions\",\n \"GET /repos/{owner}/{repo}/rules/branches/{branch}\",\n \"GET /repos/{owner}/{repo}/rulesets\",\n \"GET /repos/{owner}/{repo}/secret-scanning/alerts\",\n \"GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations\",\n \"GET /repos/{owner}/{repo}/security-advisories\",\n \"GET /repos/{owner}/{repo}/stargazers\",\n \"GET /repos/{owner}/{repo}/subscribers\",\n \"GET /repos/{owner}/{repo}/tags\",\n \"GET /repos/{owner}/{repo}/teams\",\n \"GET /repos/{owner}/{repo}/topics\",\n \"GET /repositories\",\n \"GET /repositories/{repository_id}/environments/{environment_name}/secrets\",\n \"GET /repositories/{repository_id}/environments/{environment_name}/variables\",\n \"GET /search/code\",\n \"GET /search/commits\",\n \"GET /search/issues\",\n \"GET /search/labels\",\n \"GET /search/repositories\",\n \"GET /search/topics\",\n \"GET /search/users\",\n \"GET /teams/{team_id}/discussions\",\n \"GET /teams/{team_id}/discussions/{discussion_number}/comments\",\n \"GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions\",\n \"GET /teams/{team_id}/discussions/{discussion_number}/reactions\",\n \"GET /teams/{team_id}/invitations\",\n \"GET /teams/{team_id}/members\",\n \"GET /teams/{team_id}/projects\",\n \"GET /teams/{team_id}/repos\",\n \"GET /teams/{team_id}/teams\",\n \"GET /user/blocks\",\n \"GET /user/codespaces\",\n \"GET /user/codespaces/secrets\",\n \"GET /user/emails\",\n \"GET /user/followers\",\n \"GET /user/following\",\n \"GET /user/gpg_keys\",\n \"GET /user/installations\",\n \"GET /user/installations/{installation_id}/repositories\",\n \"GET /user/issues\",\n \"GET /user/keys\",\n \"GET /user/marketplace_purchases\",\n \"GET /user/marketplace_purchases/stubbed\",\n \"GET /user/memberships/orgs\",\n \"GET /user/migrations\",\n \"GET /user/migrations/{migration_id}/repositories\",\n \"GET /user/orgs\",\n \"GET /user/packages\",\n \"GET /user/packages/{package_type}/{package_name}/versions\",\n \"GET /user/public_emails\",\n \"GET /user/repos\",\n \"GET /user/repository_invitations\",\n \"GET /user/social_accounts\",\n \"GET /user/ssh_signing_keys\",\n \"GET /user/starred\",\n \"GET /user/subscriptions\",\n \"GET /user/teams\",\n \"GET /users\",\n \"GET /users/{username}/events\",\n \"GET /users/{username}/events/orgs/{org}\",\n \"GET /users/{username}/events/public\",\n \"GET /users/{username}/followers\",\n \"GET /users/{username}/following\",\n \"GET /users/{username}/gists\",\n \"GET /users/{username}/gpg_keys\",\n \"GET /users/{username}/keys\",\n \"GET /users/{username}/orgs\",\n \"GET /users/{username}/packages\",\n \"GET /users/{username}/projects\",\n \"GET /users/{username}/received_events\",\n \"GET /users/{username}/received_events/public\",\n \"GET /users/{username}/repos\",\n \"GET /users/{username}/social_accounts\",\n \"GET /users/{username}/ssh_signing_keys\",\n \"GET /users/{username}/starred\",\n \"GET /users/{username}/subscriptions\"\n];\n\n// pkg/dist-src/paginating-endpoints.js\nfunction isPaginatingEndpoint(arg) {\n if (typeof arg === \"string\") {\n return paginatingEndpoints.includes(arg);\n } else {\n return false;\n }\n}\n\n// pkg/dist-src/index.js\nfunction paginateRest(octokit) {\n return {\n paginate: Object.assign(paginate.bind(null, octokit), {\n iterator: iterator.bind(null, octokit)\n })\n };\n}\npaginateRest.VERSION = VERSION;\n// Annotate the CommonJS export names for ESM import in node:\n0 && (module.exports = {\n composePaginateRest,\n isPaginatingEndpoint,\n paginateRest,\n paginatingEndpoints\n});\n","\"use strict\";\nvar __defProp = Object.defineProperty;\nvar __getOwnPropDesc = Object.getOwnPropertyDescriptor;\nvar __getOwnPropNames = Object.getOwnPropertyNames;\nvar __hasOwnProp = Object.prototype.hasOwnProperty;\nvar __export = (target, all) => {\n for (var name in all)\n __defProp(target, name, { get: all[name], enumerable: true });\n};\nvar __copyProps = (to, from, except, desc) => {\n if (from && typeof from === \"object\" || typeof from === \"function\") {\n for (let key of __getOwnPropNames(from))\n if (!__hasOwnProp.call(to, key) && key !== except)\n __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });\n }\n return to;\n};\nvar __toCommonJS = (mod) => __copyProps(__defProp({}, \"__esModule\", { value: true }), mod);\n\n// pkg/dist-src/index.js\nvar dist_src_exports = {};\n__export(dist_src_exports, {\n requestLog: () => requestLog\n});\nmodule.exports = __toCommonJS(dist_src_exports);\n\n// pkg/dist-src/version.js\nvar VERSION = \"4.0.0\";\n\n// pkg/dist-src/index.js\nfunction requestLog(octokit) {\n octokit.hook.wrap(\"request\", (request, options) => {\n octokit.log.debug(\"request\", options);\n const start = Date.now();\n const requestOptions = octokit.request.endpoint.parse(options);\n const path = requestOptions.url.replace(options.baseUrl, \"\");\n return request(options).then((response) => {\n octokit.log.info(\n `${requestOptions.method} ${path} - ${response.status} in ${Date.now() - start}ms`\n );\n return response;\n }).catch((error) => {\n octokit.log.info(\n `${requestOptions.method} ${path} - ${error.status} in ${Date.now() - start}ms`\n );\n throw error;\n });\n });\n}\nrequestLog.VERSION = VERSION;\n// Annotate the CommonJS export names for ESM import in node:\n0 && (module.exports = {\n requestLog\n});\n","\"use strict\";\nvar __defProp = Object.defineProperty;\nvar __getOwnPropDesc = Object.getOwnPropertyDescriptor;\nvar __getOwnPropNames = Object.getOwnPropertyNames;\nvar __hasOwnProp = Object.prototype.hasOwnProperty;\nvar __export = (target, all) => {\n for (var name in all)\n __defProp(target, name, { get: all[name], enumerable: true });\n};\nvar __copyProps = (to, from, except, desc) => {\n if (from && typeof from === \"object\" || typeof from === \"function\") {\n for (let key of __getOwnPropNames(from))\n if (!__hasOwnProp.call(to, key) && key !== except)\n __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });\n }\n return to;\n};\nvar __toCommonJS = (mod) => __copyProps(__defProp({}, \"__esModule\", { value: true }), mod);\n\n// pkg/dist-src/index.js\nvar dist_src_exports = {};\n__export(dist_src_exports, {\n legacyRestEndpointMethods: () => legacyRestEndpointMethods,\n restEndpointMethods: () => restEndpointMethods\n});\nmodule.exports = __toCommonJS(dist_src_exports);\n\n// pkg/dist-src/version.js\nvar VERSION = \"9.0.0\";\n\n// pkg/dist-src/generated/endpoints.js\nvar Endpoints = {\n actions: {\n addCustomLabelsToSelfHostedRunnerForOrg: [\n \"POST /orgs/{org}/actions/runners/{runner_id}/labels\"\n ],\n addCustomLabelsToSelfHostedRunnerForRepo: [\n \"POST /repos/{owner}/{repo}/actions/runners/{runner_id}/labels\"\n ],\n addSelectedRepoToOrgSecret: [\n \"PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}\"\n ],\n addSelectedRepoToOrgVariable: [\n \"PUT /orgs/{org}/actions/variables/{name}/repositories/{repository_id}\"\n ],\n addSelectedRepoToRequiredWorkflow: [\n \"PUT /orgs/{org}/actions/required_workflows/{required_workflow_id}/repositories/{repository_id}\"\n ],\n approveWorkflowRun: [\n \"POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve\"\n ],\n cancelWorkflowRun: [\n \"POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel\"\n ],\n createEnvironmentVariable: [\n \"POST /repositories/{repository_id}/environments/{environment_name}/variables\"\n ],\n createOrUpdateEnvironmentSecret: [\n \"PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}\"\n ],\n createOrUpdateOrgSecret: [\"PUT /orgs/{org}/actions/secrets/{secret_name}\"],\n createOrUpdateRepoSecret: [\n \"PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}\"\n ],\n createOrgVariable: [\"POST /orgs/{org}/actions/variables\"],\n createRegistrationTokenForOrg: [\n \"POST /orgs/{org}/actions/runners/registration-token\"\n ],\n createRegistrationTokenForRepo: [\n \"POST /repos/{owner}/{repo}/actions/runners/registration-token\"\n ],\n createRemoveTokenForOrg: [\"POST /orgs/{org}/actions/runners/remove-token\"],\n createRemoveTokenForRepo: [\n \"POST /repos/{owner}/{repo}/actions/runners/remove-token\"\n ],\n createRepoVariable: [\"POST /repos/{owner}/{repo}/actions/variables\"],\n createRequiredWorkflow: [\"POST /orgs/{org}/actions/required_workflows\"],\n createWorkflowDispatch: [\n \"POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches\"\n ],\n deleteActionsCacheById: [\n \"DELETE /repos/{owner}/{repo}/actions/caches/{cache_id}\"\n ],\n deleteActionsCacheByKey: [\n \"DELETE /repos/{owner}/{repo}/actions/caches{?key,ref}\"\n ],\n deleteArtifact: [\n \"DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}\"\n ],\n deleteEnvironmentSecret: [\n \"DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}\"\n ],\n deleteEnvironmentVariable: [\n \"DELETE /repositories/{repository_id}/environments/{environment_name}/variables/{name}\"\n ],\n deleteOrgSecret: [\"DELETE /orgs/{org}/actions/secrets/{secret_name}\"],\n deleteOrgVariable: [\"DELETE /orgs/{org}/actions/variables/{name}\"],\n deleteRepoSecret: [\n \"DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}\"\n ],\n deleteRepoVariable: [\n \"DELETE /repos/{owner}/{repo}/actions/variables/{name}\"\n ],\n deleteRequiredWorkflow: [\n \"DELETE /orgs/{org}/actions/required_workflows/{required_workflow_id}\"\n ],\n deleteSelfHostedRunnerFromOrg: [\n \"DELETE /orgs/{org}/actions/runners/{runner_id}\"\n ],\n deleteSelfHostedRunnerFromRepo: [\n \"DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}\"\n ],\n deleteWorkflowRun: [\"DELETE /repos/{owner}/{repo}/actions/runs/{run_id}\"],\n deleteWorkflowRunLogs: [\n \"DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs\"\n ],\n disableSelectedRepositoryGithubActionsOrganization: [\n \"DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}\"\n ],\n disableWorkflow: [\n \"PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable\"\n ],\n downloadArtifact: [\n \"GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}\"\n ],\n downloadJobLogsForWorkflowRun: [\n \"GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs\"\n ],\n downloadWorkflowRunAttemptLogs: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs\"\n ],\n downloadWorkflowRunLogs: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs\"\n ],\n enableSelectedRepositoryGithubActionsOrganization: [\n \"PUT /orgs/{org}/actions/permissions/repositories/{repository_id}\"\n ],\n enableWorkflow: [\n \"PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable\"\n ],\n generateRunnerJitconfigForOrg: [\n \"POST /orgs/{org}/actions/runners/generate-jitconfig\"\n ],\n generateRunnerJitconfigForRepo: [\n \"POST /repos/{owner}/{repo}/actions/runners/generate-jitconfig\"\n ],\n getActionsCacheList: [\"GET /repos/{owner}/{repo}/actions/caches\"],\n getActionsCacheUsage: [\"GET /repos/{owner}/{repo}/actions/cache/usage\"],\n getActionsCacheUsageByRepoForOrg: [\n \"GET /orgs/{org}/actions/cache/usage-by-repository\"\n ],\n getActionsCacheUsageForOrg: [\"GET /orgs/{org}/actions/cache/usage\"],\n getAllowedActionsOrganization: [\n \"GET /orgs/{org}/actions/permissions/selected-actions\"\n ],\n getAllowedActionsRepository: [\n \"GET /repos/{owner}/{repo}/actions/permissions/selected-actions\"\n ],\n getArtifact: [\"GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}\"],\n getEnvironmentPublicKey: [\n \"GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key\"\n ],\n getEnvironmentSecret: [\n \"GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}\"\n ],\n getEnvironmentVariable: [\n \"GET /repositories/{repository_id}/environments/{environment_name}/variables/{name}\"\n ],\n getGithubActionsDefaultWorkflowPermissionsOrganization: [\n \"GET /orgs/{org}/actions/permissions/workflow\"\n ],\n getGithubActionsDefaultWorkflowPermissionsRepository: [\n \"GET /repos/{owner}/{repo}/actions/permissions/workflow\"\n ],\n getGithubActionsPermissionsOrganization: [\n \"GET /orgs/{org}/actions/permissions\"\n ],\n getGithubActionsPermissionsRepository: [\n \"GET /repos/{owner}/{repo}/actions/permissions\"\n ],\n getJobForWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/jobs/{job_id}\"],\n getOrgPublicKey: [\"GET /orgs/{org}/actions/secrets/public-key\"],\n getOrgSecret: [\"GET /orgs/{org}/actions/secrets/{secret_name}\"],\n getOrgVariable: [\"GET /orgs/{org}/actions/variables/{name}\"],\n getPendingDeploymentsForRun: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments\"\n ],\n getRepoPermissions: [\n \"GET /repos/{owner}/{repo}/actions/permissions\",\n {},\n { renamed: [\"actions\", \"getGithubActionsPermissionsRepository\"] }\n ],\n getRepoPublicKey: [\"GET /repos/{owner}/{repo}/actions/secrets/public-key\"],\n getRepoRequiredWorkflow: [\n \"GET /repos/{org}/{repo}/actions/required_workflows/{required_workflow_id_for_repo}\"\n ],\n getRepoRequiredWorkflowUsage: [\n \"GET /repos/{org}/{repo}/actions/required_workflows/{required_workflow_id_for_repo}/timing\"\n ],\n getRepoSecret: [\"GET /repos/{owner}/{repo}/actions/secrets/{secret_name}\"],\n getRepoVariable: [\"GET /repos/{owner}/{repo}/actions/variables/{name}\"],\n getRequiredWorkflow: [\n \"GET /orgs/{org}/actions/required_workflows/{required_workflow_id}\"\n ],\n getReviewsForRun: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals\"\n ],\n getSelfHostedRunnerForOrg: [\"GET /orgs/{org}/actions/runners/{runner_id}\"],\n getSelfHostedRunnerForRepo: [\n \"GET /repos/{owner}/{repo}/actions/runners/{runner_id}\"\n ],\n getWorkflow: [\"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}\"],\n getWorkflowAccessToRepository: [\n \"GET /repos/{owner}/{repo}/actions/permissions/access\"\n ],\n getWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}\"],\n getWorkflowRunAttempt: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}\"\n ],\n getWorkflowRunUsage: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing\"\n ],\n getWorkflowUsage: [\n \"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing\"\n ],\n listArtifactsForRepo: [\"GET /repos/{owner}/{repo}/actions/artifacts\"],\n listEnvironmentSecrets: [\n \"GET /repositories/{repository_id}/environments/{environment_name}/secrets\"\n ],\n listEnvironmentVariables: [\n \"GET /repositories/{repository_id}/environments/{environment_name}/variables\"\n ],\n listJobsForWorkflowRun: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs\"\n ],\n listJobsForWorkflowRunAttempt: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs\"\n ],\n listLabelsForSelfHostedRunnerForOrg: [\n \"GET /orgs/{org}/actions/runners/{runner_id}/labels\"\n ],\n listLabelsForSelfHostedRunnerForRepo: [\n \"GET /repos/{owner}/{repo}/actions/runners/{runner_id}/labels\"\n ],\n listOrgSecrets: [\"GET /orgs/{org}/actions/secrets\"],\n listOrgVariables: [\"GET /orgs/{org}/actions/variables\"],\n listRepoOrganizationSecrets: [\n \"GET /repos/{owner}/{repo}/actions/organization-secrets\"\n ],\n listRepoOrganizationVariables: [\n \"GET /repos/{owner}/{repo}/actions/organization-variables\"\n ],\n listRepoRequiredWorkflows: [\n \"GET /repos/{org}/{repo}/actions/required_workflows\"\n ],\n listRepoSecrets: [\"GET /repos/{owner}/{repo}/actions/secrets\"],\n listRepoVariables: [\"GET /repos/{owner}/{repo}/actions/variables\"],\n listRepoWorkflows: [\"GET /repos/{owner}/{repo}/actions/workflows\"],\n listRequiredWorkflowRuns: [\n \"GET /repos/{owner}/{repo}/actions/required_workflows/{required_workflow_id_for_repo}/runs\"\n ],\n listRequiredWorkflows: [\"GET /orgs/{org}/actions/required_workflows\"],\n listRunnerApplicationsForOrg: [\"GET /orgs/{org}/actions/runners/downloads\"],\n listRunnerApplicationsForRepo: [\n \"GET /repos/{owner}/{repo}/actions/runners/downloads\"\n ],\n listSelectedReposForOrgSecret: [\n \"GET /orgs/{org}/actions/secrets/{secret_name}/repositories\"\n ],\n listSelectedReposForOrgVariable: [\n \"GET /orgs/{org}/actions/variables/{name}/repositories\"\n ],\n listSelectedRepositoriesEnabledGithubActionsOrganization: [\n \"GET /orgs/{org}/actions/permissions/repositories\"\n ],\n listSelectedRepositoriesRequiredWorkflow: [\n \"GET /orgs/{org}/actions/required_workflows/{required_workflow_id}/repositories\"\n ],\n listSelfHostedRunnersForOrg: [\"GET /orgs/{org}/actions/runners\"],\n listSelfHostedRunnersForRepo: [\"GET /repos/{owner}/{repo}/actions/runners\"],\n listWorkflowRunArtifacts: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts\"\n ],\n listWorkflowRuns: [\n \"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs\"\n ],\n listWorkflowRunsForRepo: [\"GET /repos/{owner}/{repo}/actions/runs\"],\n reRunJobForWorkflowRun: [\n \"POST /repos/{owner}/{repo}/actions/jobs/{job_id}/rerun\"\n ],\n reRunWorkflow: [\"POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun\"],\n reRunWorkflowFailedJobs: [\n \"POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs\"\n ],\n removeAllCustomLabelsFromSelfHostedRunnerForOrg: [\n \"DELETE /orgs/{org}/actions/runners/{runner_id}/labels\"\n ],\n removeAllCustomLabelsFromSelfHostedRunnerForRepo: [\n \"DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels\"\n ],\n removeCustomLabelFromSelfHostedRunnerForOrg: [\n \"DELETE /orgs/{org}/actions/runners/{runner_id}/labels/{name}\"\n ],\n removeCustomLabelFromSelfHostedRunnerForRepo: [\n \"DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}\"\n ],\n removeSelectedRepoFromOrgSecret: [\n \"DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}\"\n ],\n removeSelectedRepoFromOrgVariable: [\n \"DELETE /orgs/{org}/actions/variables/{name}/repositories/{repository_id}\"\n ],\n removeSelectedRepoFromRequiredWorkflow: [\n \"DELETE /orgs/{org}/actions/required_workflows/{required_workflow_id}/repositories/{repository_id}\"\n ],\n reviewCustomGatesForRun: [\n \"POST /repos/{owner}/{repo}/actions/runs/{run_id}/deployment_protection_rule\"\n ],\n reviewPendingDeploymentsForRun: [\n \"POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments\"\n ],\n setAllowedActionsOrganization: [\n \"PUT /orgs/{org}/actions/permissions/selected-actions\"\n ],\n setAllowedActionsRepository: [\n \"PUT /repos/{owner}/{repo}/actions/permissions/selected-actions\"\n ],\n setCustomLabelsForSelfHostedRunnerForOrg: [\n \"PUT /orgs/{org}/actions/runners/{runner_id}/labels\"\n ],\n setCustomLabelsForSelfHostedRunnerForRepo: [\n \"PUT /repos/{owner}/{repo}/actions/runners/{runner_id}/labels\"\n ],\n setGithubActionsDefaultWorkflowPermissionsOrganization: [\n \"PUT /orgs/{org}/actions/permissions/workflow\"\n ],\n setGithubActionsDefaultWorkflowPermissionsRepository: [\n \"PUT /repos/{owner}/{repo}/actions/permissions/workflow\"\n ],\n setGithubActionsPermissionsOrganization: [\n \"PUT /orgs/{org}/actions/permissions\"\n ],\n setGithubActionsPermissionsRepository: [\n \"PUT /repos/{owner}/{repo}/actions/permissions\"\n ],\n setSelectedReposForOrgSecret: [\n \"PUT /orgs/{org}/actions/secrets/{secret_name}/repositories\"\n ],\n setSelectedReposForOrgVariable: [\n \"PUT /orgs/{org}/actions/variables/{name}/repositories\"\n ],\n setSelectedReposToRequiredWorkflow: [\n \"PUT /orgs/{org}/actions/required_workflows/{required_workflow_id}/repositories\"\n ],\n setSelectedRepositoriesEnabledGithubActionsOrganization: [\n \"PUT /orgs/{org}/actions/permissions/repositories\"\n ],\n setWorkflowAccessToRepository: [\n \"PUT /repos/{owner}/{repo}/actions/permissions/access\"\n ],\n updateEnvironmentVariable: [\n \"PATCH /repositories/{repository_id}/environments/{environment_name}/variables/{name}\"\n ],\n updateOrgVariable: [\"PATCH /orgs/{org}/actions/variables/{name}\"],\n updateRepoVariable: [\n \"PATCH /repos/{owner}/{repo}/actions/variables/{name}\"\n ],\n updateRequiredWorkflow: [\n \"PATCH /orgs/{org}/actions/required_workflows/{required_workflow_id}\"\n ]\n },\n activity: {\n checkRepoIsStarredByAuthenticatedUser: [\"GET /user/starred/{owner}/{repo}\"],\n deleteRepoSubscription: [\"DELETE /repos/{owner}/{repo}/subscription\"],\n deleteThreadSubscription: [\n \"DELETE /notifications/threads/{thread_id}/subscription\"\n ],\n getFeeds: [\"GET /feeds\"],\n getRepoSubscription: [\"GET /repos/{owner}/{repo}/subscription\"],\n getThread: [\"GET /notifications/threads/{thread_id}\"],\n getThreadSubscriptionForAuthenticatedUser: [\n \"GET /notifications/threads/{thread_id}/subscription\"\n ],\n listEventsForAuthenticatedUser: [\"GET /users/{username}/events\"],\n listNotificationsForAuthenticatedUser: [\"GET /notifications\"],\n listOrgEventsForAuthenticatedUser: [\n \"GET /users/{username}/events/orgs/{org}\"\n ],\n listPublicEvents: [\"GET /events\"],\n listPublicEventsForRepoNetwork: [\"GET /networks/{owner}/{repo}/events\"],\n listPublicEventsForUser: [\"GET /users/{username}/events/public\"],\n listPublicOrgEvents: [\"GET /orgs/{org}/events\"],\n listReceivedEventsForUser: [\"GET /users/{username}/received_events\"],\n listReceivedPublicEventsForUser: [\n \"GET /users/{username}/received_events/public\"\n ],\n listRepoEvents: [\"GET /repos/{owner}/{repo}/events\"],\n listRepoNotificationsForAuthenticatedUser: [\n \"GET /repos/{owner}/{repo}/notifications\"\n ],\n listReposStarredByAuthenticatedUser: [\"GET /user/starred\"],\n listReposStarredByUser: [\"GET /users/{username}/starred\"],\n listReposWatchedByUser: [\"GET /users/{username}/subscriptions\"],\n listStargazersForRepo: [\"GET /repos/{owner}/{repo}/stargazers\"],\n listWatchedReposForAuthenticatedUser: [\"GET /user/subscriptions\"],\n listWatchersForRepo: [\"GET /repos/{owner}/{repo}/subscribers\"],\n markNotificationsAsRead: [\"PUT /notifications\"],\n markRepoNotificationsAsRead: [\"PUT /repos/{owner}/{repo}/notifications\"],\n markThreadAsRead: [\"PATCH /notifications/threads/{thread_id}\"],\n setRepoSubscription: [\"PUT /repos/{owner}/{repo}/subscription\"],\n setThreadSubscription: [\n \"PUT /notifications/threads/{thread_id}/subscription\"\n ],\n starRepoForAuthenticatedUser: [\"PUT /user/starred/{owner}/{repo}\"],\n unstarRepoForAuthenticatedUser: [\"DELETE /user/starred/{owner}/{repo}\"]\n },\n apps: {\n addRepoToInstallation: [\n \"PUT /user/installations/{installation_id}/repositories/{repository_id}\",\n {},\n { renamed: [\"apps\", \"addRepoToInstallationForAuthenticatedUser\"] }\n ],\n addRepoToInstallationForAuthenticatedUser: [\n \"PUT /user/installations/{installation_id}/repositories/{repository_id}\"\n ],\n checkToken: [\"POST /applications/{client_id}/token\"],\n createFromManifest: [\"POST /app-manifests/{code}/conversions\"],\n createInstallationAccessToken: [\n \"POST /app/installations/{installation_id}/access_tokens\"\n ],\n deleteAuthorization: [\"DELETE /applications/{client_id}/grant\"],\n deleteInstallation: [\"DELETE /app/installations/{installation_id}\"],\n deleteToken: [\"DELETE /applications/{client_id}/token\"],\n getAuthenticated: [\"GET /app\"],\n getBySlug: [\"GET /apps/{app_slug}\"],\n getInstallation: [\"GET /app/installations/{installation_id}\"],\n getOrgInstallation: [\"GET /orgs/{org}/installation\"],\n getRepoInstallation: [\"GET /repos/{owner}/{repo}/installation\"],\n getSubscriptionPlanForAccount: [\n \"GET /marketplace_listing/accounts/{account_id}\"\n ],\n getSubscriptionPlanForAccountStubbed: [\n \"GET /marketplace_listing/stubbed/accounts/{account_id}\"\n ],\n getUserInstallation: [\"GET /users/{username}/installation\"],\n getWebhookConfigForApp: [\"GET /app/hook/config\"],\n getWebhookDelivery: [\"GET /app/hook/deliveries/{delivery_id}\"],\n listAccountsForPlan: [\"GET /marketplace_listing/plans/{plan_id}/accounts\"],\n listAccountsForPlanStubbed: [\n \"GET /marketplace_listing/stubbed/plans/{plan_id}/accounts\"\n ],\n listInstallationReposForAuthenticatedUser: [\n \"GET /user/installations/{installation_id}/repositories\"\n ],\n listInstallationRequestsForAuthenticatedApp: [\n \"GET /app/installation-requests\"\n ],\n listInstallations: [\"GET /app/installations\"],\n listInstallationsForAuthenticatedUser: [\"GET /user/installations\"],\n listPlans: [\"GET /marketplace_listing/plans\"],\n listPlansStubbed: [\"GET /marketplace_listing/stubbed/plans\"],\n listReposAccessibleToInstallation: [\"GET /installation/repositories\"],\n listSubscriptionsForAuthenticatedUser: [\"GET /user/marketplace_purchases\"],\n listSubscriptionsForAuthenticatedUserStubbed: [\n \"GET /user/marketplace_purchases/stubbed\"\n ],\n listWebhookDeliveries: [\"GET /app/hook/deliveries\"],\n redeliverWebhookDelivery: [\n \"POST /app/hook/deliveries/{delivery_id}/attempts\"\n ],\n removeRepoFromInstallation: [\n \"DELETE /user/installations/{installation_id}/repositories/{repository_id}\",\n {},\n { renamed: [\"apps\", \"removeRepoFromInstallationForAuthenticatedUser\"] }\n ],\n removeRepoFromInstallationForAuthenticatedUser: [\n \"DELETE /user/installations/{installation_id}/repositories/{repository_id}\"\n ],\n resetToken: [\"PATCH /applications/{client_id}/token\"],\n revokeInstallationAccessToken: [\"DELETE /installation/token\"],\n scopeToken: [\"POST /applications/{client_id}/token/scoped\"],\n suspendInstallation: [\"PUT /app/installations/{installation_id}/suspended\"],\n unsuspendInstallation: [\n \"DELETE /app/installations/{installation_id}/suspended\"\n ],\n updateWebhookConfigForApp: [\"PATCH /app/hook/config\"]\n },\n billing: {\n getGithubActionsBillingOrg: [\"GET /orgs/{org}/settings/billing/actions\"],\n getGithubActionsBillingUser: [\n \"GET /users/{username}/settings/billing/actions\"\n ],\n getGithubPackagesBillingOrg: [\"GET /orgs/{org}/settings/billing/packages\"],\n getGithubPackagesBillingUser: [\n \"GET /users/{username}/settings/billing/packages\"\n ],\n getSharedStorageBillingOrg: [\n \"GET /orgs/{org}/settings/billing/shared-storage\"\n ],\n getSharedStorageBillingUser: [\n \"GET /users/{username}/settings/billing/shared-storage\"\n ]\n },\n checks: {\n create: [\"POST /repos/{owner}/{repo}/check-runs\"],\n createSuite: [\"POST /repos/{owner}/{repo}/check-suites\"],\n get: [\"GET /repos/{owner}/{repo}/check-runs/{check_run_id}\"],\n getSuite: [\"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}\"],\n listAnnotations: [\n \"GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations\"\n ],\n listForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/check-runs\"],\n listForSuite: [\n \"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs\"\n ],\n listSuitesForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/check-suites\"],\n rerequestRun: [\n \"POST /repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest\"\n ],\n rerequestSuite: [\n \"POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest\"\n ],\n setSuitesPreferences: [\n \"PATCH /repos/{owner}/{repo}/check-suites/preferences\"\n ],\n update: [\"PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}\"]\n },\n codeScanning: {\n deleteAnalysis: [\n \"DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}\"\n ],\n getAlert: [\n \"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}\",\n {},\n { renamedParameters: { alert_id: \"alert_number\" } }\n ],\n getAnalysis: [\n \"GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}\"\n ],\n getCodeqlDatabase: [\n \"GET /repos/{owner}/{repo}/code-scanning/codeql/databases/{language}\"\n ],\n getDefaultSetup: [\"GET /repos/{owner}/{repo}/code-scanning/default-setup\"],\n getSarif: [\"GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}\"],\n listAlertInstances: [\n \"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances\"\n ],\n listAlertsForOrg: [\"GET /orgs/{org}/code-scanning/alerts\"],\n listAlertsForRepo: [\"GET /repos/{owner}/{repo}/code-scanning/alerts\"],\n listAlertsInstances: [\n \"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances\",\n {},\n { renamed: [\"codeScanning\", \"listAlertInstances\"] }\n ],\n listCodeqlDatabases: [\n \"GET /repos/{owner}/{repo}/code-scanning/codeql/databases\"\n ],\n listRecentAnalyses: [\"GET /repos/{owner}/{repo}/code-scanning/analyses\"],\n updateAlert: [\n \"PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}\"\n ],\n updateDefaultSetup: [\n \"PATCH /repos/{owner}/{repo}/code-scanning/default-setup\"\n ],\n uploadSarif: [\"POST /repos/{owner}/{repo}/code-scanning/sarifs\"]\n },\n codesOfConduct: {\n getAllCodesOfConduct: [\"GET /codes_of_conduct\"],\n getConductCode: [\"GET /codes_of_conduct/{key}\"]\n },\n codespaces: {\n addRepositoryForSecretForAuthenticatedUser: [\n \"PUT /user/codespaces/secrets/{secret_name}/repositories/{repository_id}\"\n ],\n addSelectedRepoToOrgSecret: [\n \"PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}\"\n ],\n codespaceMachinesForAuthenticatedUser: [\n \"GET /user/codespaces/{codespace_name}/machines\"\n ],\n createForAuthenticatedUser: [\"POST /user/codespaces\"],\n createOrUpdateOrgSecret: [\n \"PUT /orgs/{org}/codespaces/secrets/{secret_name}\"\n ],\n createOrUpdateRepoSecret: [\n \"PUT /repos/{owner}/{repo}/codespaces/secrets/{secret_name}\"\n ],\n createOrUpdateSecretForAuthenticatedUser: [\n \"PUT /user/codespaces/secrets/{secret_name}\"\n ],\n createWithPrForAuthenticatedUser: [\n \"POST /repos/{owner}/{repo}/pulls/{pull_number}/codespaces\"\n ],\n createWithRepoForAuthenticatedUser: [\n \"POST /repos/{owner}/{repo}/codespaces\"\n ],\n deleteCodespacesBillingUsers: [\n \"DELETE /orgs/{org}/codespaces/billing/selected_users\"\n ],\n deleteForAuthenticatedUser: [\"DELETE /user/codespaces/{codespace_name}\"],\n deleteFromOrganization: [\n \"DELETE /orgs/{org}/members/{username}/codespaces/{codespace_name}\"\n ],\n deleteOrgSecret: [\"DELETE /orgs/{org}/codespaces/secrets/{secret_name}\"],\n deleteRepoSecret: [\n \"DELETE /repos/{owner}/{repo}/codespaces/secrets/{secret_name}\"\n ],\n deleteSecretForAuthenticatedUser: [\n \"DELETE /user/codespaces/secrets/{secret_name}\"\n ],\n exportForAuthenticatedUser: [\n \"POST /user/codespaces/{codespace_name}/exports\"\n ],\n getCodespacesForUserInOrg: [\n \"GET /orgs/{org}/members/{username}/codespaces\"\n ],\n getExportDetailsForAuthenticatedUser: [\n \"GET /user/codespaces/{codespace_name}/exports/{export_id}\"\n ],\n getForAuthenticatedUser: [\"GET /user/codespaces/{codespace_name}\"],\n getOrgPublicKey: [\"GET /orgs/{org}/codespaces/secrets/public-key\"],\n getOrgSecret: [\"GET /orgs/{org}/codespaces/secrets/{secret_name}\"],\n getPublicKeyForAuthenticatedUser: [\n \"GET /user/codespaces/secrets/public-key\"\n ],\n getRepoPublicKey: [\n \"GET /repos/{owner}/{repo}/codespaces/secrets/public-key\"\n ],\n getRepoSecret: [\n \"GET /repos/{owner}/{repo}/codespaces/secrets/{secret_name}\"\n ],\n getSecretForAuthenticatedUser: [\n \"GET /user/codespaces/secrets/{secret_name}\"\n ],\n listDevcontainersInRepositoryForAuthenticatedUser: [\n \"GET /repos/{owner}/{repo}/codespaces/devcontainers\"\n ],\n listForAuthenticatedUser: [\"GET /user/codespaces\"],\n listInOrganization: [\n \"GET /orgs/{org}/codespaces\",\n {},\n { renamedParameters: { org_id: \"org\" } }\n ],\n listInRepositoryForAuthenticatedUser: [\n \"GET /repos/{owner}/{repo}/codespaces\"\n ],\n listOrgSecrets: [\"GET /orgs/{org}/codespaces/secrets\"],\n listRepoSecrets: [\"GET /repos/{owner}/{repo}/codespaces/secrets\"],\n listRepositoriesForSecretForAuthenticatedUser: [\n \"GET /user/codespaces/secrets/{secret_name}/repositories\"\n ],\n listSecretsForAuthenticatedUser: [\"GET /user/codespaces/secrets\"],\n listSelectedReposForOrgSecret: [\n \"GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories\"\n ],\n preFlightWithRepoForAuthenticatedUser: [\n \"GET /repos/{owner}/{repo}/codespaces/new\"\n ],\n publishForAuthenticatedUser: [\n \"POST /user/codespaces/{codespace_name}/publish\"\n ],\n removeRepositoryForSecretForAuthenticatedUser: [\n \"DELETE /user/codespaces/secrets/{secret_name}/repositories/{repository_id}\"\n ],\n removeSelectedRepoFromOrgSecret: [\n \"DELETE /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}\"\n ],\n repoMachinesForAuthenticatedUser: [\n \"GET /repos/{owner}/{repo}/codespaces/machines\"\n ],\n setCodespacesBilling: [\"PUT /orgs/{org}/codespaces/billing\"],\n setCodespacesBillingUsers: [\n \"POST /orgs/{org}/codespaces/billing/selected_users\"\n ],\n setRepositoriesForSecretForAuthenticatedUser: [\n \"PUT /user/codespaces/secrets/{secret_name}/repositories\"\n ],\n setSelectedReposForOrgSecret: [\n \"PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories\"\n ],\n startForAuthenticatedUser: [\"POST /user/codespaces/{codespace_name}/start\"],\n stopForAuthenticatedUser: [\"POST /user/codespaces/{codespace_name}/stop\"],\n stopInOrganization: [\n \"POST /orgs/{org}/members/{username}/codespaces/{codespace_name}/stop\"\n ],\n updateForAuthenticatedUser: [\"PATCH /user/codespaces/{codespace_name}\"]\n },\n dependabot: {\n addSelectedRepoToOrgSecret: [\n \"PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}\"\n ],\n createOrUpdateOrgSecret: [\n \"PUT /orgs/{org}/dependabot/secrets/{secret_name}\"\n ],\n createOrUpdateRepoSecret: [\n \"PUT /repos/{owner}/{repo}/dependabot/secrets/{secret_name}\"\n ],\n deleteOrgSecret: [\"DELETE /orgs/{org}/dependabot/secrets/{secret_name}\"],\n deleteRepoSecret: [\n \"DELETE /repos/{owner}/{repo}/dependabot/secrets/{secret_name}\"\n ],\n getAlert: [\"GET /repos/{owner}/{repo}/dependabot/alerts/{alert_number}\"],\n getOrgPublicKey: [\"GET /orgs/{org}/dependabot/secrets/public-key\"],\n getOrgSecret: [\"GET /orgs/{org}/dependabot/secrets/{secret_name}\"],\n getRepoPublicKey: [\n \"GET /repos/{owner}/{repo}/dependabot/secrets/public-key\"\n ],\n getRepoSecret: [\n \"GET /repos/{owner}/{repo}/dependabot/secrets/{secret_name}\"\n ],\n listAlertsForEnterprise: [\n \"GET /enterprises/{enterprise}/dependabot/alerts\"\n ],\n listAlertsForOrg: [\"GET /orgs/{org}/dependabot/alerts\"],\n listAlertsForRepo: [\"GET /repos/{owner}/{repo}/dependabot/alerts\"],\n listOrgSecrets: [\"GET /orgs/{org}/dependabot/secrets\"],\n listRepoSecrets: [\"GET /repos/{owner}/{repo}/dependabot/secrets\"],\n listSelectedReposForOrgSecret: [\n \"GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories\"\n ],\n removeSelectedRepoFromOrgSecret: [\n \"DELETE /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}\"\n ],\n setSelectedReposForOrgSecret: [\n \"PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories\"\n ],\n updateAlert: [\n \"PATCH /repos/{owner}/{repo}/dependabot/alerts/{alert_number}\"\n ]\n },\n dependencyGraph: {\n createRepositorySnapshot: [\n \"POST /repos/{owner}/{repo}/dependency-graph/snapshots\"\n ],\n diffRange: [\n \"GET /repos/{owner}/{repo}/dependency-graph/compare/{basehead}\"\n ],\n exportSbom: [\"GET /repos/{owner}/{repo}/dependency-graph/sbom\"]\n },\n emojis: { get: [\"GET /emojis\"] },\n gists: {\n checkIsStarred: [\"GET /gists/{gist_id}/star\"],\n create: [\"POST /gists\"],\n createComment: [\"POST /gists/{gist_id}/comments\"],\n delete: [\"DELETE /gists/{gist_id}\"],\n deleteComment: [\"DELETE /gists/{gist_id}/comments/{comment_id}\"],\n fork: [\"POST /gists/{gist_id}/forks\"],\n get: [\"GET /gists/{gist_id}\"],\n getComment: [\"GET /gists/{gist_id}/comments/{comment_id}\"],\n getRevision: [\"GET /gists/{gist_id}/{sha}\"],\n list: [\"GET /gists\"],\n listComments: [\"GET /gists/{gist_id}/comments\"],\n listCommits: [\"GET /gists/{gist_id}/commits\"],\n listForUser: [\"GET /users/{username}/gists\"],\n listForks: [\"GET /gists/{gist_id}/forks\"],\n listPublic: [\"GET /gists/public\"],\n listStarred: [\"GET /gists/starred\"],\n star: [\"PUT /gists/{gist_id}/star\"],\n unstar: [\"DELETE /gists/{gist_id}/star\"],\n update: [\"PATCH /gists/{gist_id}\"],\n updateComment: [\"PATCH /gists/{gist_id}/comments/{comment_id}\"]\n },\n git: {\n createBlob: [\"POST /repos/{owner}/{repo}/git/blobs\"],\n createCommit: [\"POST /repos/{owner}/{repo}/git/commits\"],\n createRef: [\"POST /repos/{owner}/{repo}/git/refs\"],\n createTag: [\"POST /repos/{owner}/{repo}/git/tags\"],\n createTree: [\"POST /repos/{owner}/{repo}/git/trees\"],\n deleteRef: [\"DELETE /repos/{owner}/{repo}/git/refs/{ref}\"],\n getBlob: [\"GET /repos/{owner}/{repo}/git/blobs/{file_sha}\"],\n getCommit: [\"GET /repos/{owner}/{repo}/git/commits/{commit_sha}\"],\n getRef: [\"GET /repos/{owner}/{repo}/git/ref/{ref}\"],\n getTag: [\"GET /repos/{owner}/{repo}/git/tags/{tag_sha}\"],\n getTree: [\"GET /repos/{owner}/{repo}/git/trees/{tree_sha}\"],\n listMatchingRefs: [\"GET /repos/{owner}/{repo}/git/matching-refs/{ref}\"],\n updateRef: [\"PATCH /repos/{owner}/{repo}/git/refs/{ref}\"]\n },\n gitignore: {\n getAllTemplates: [\"GET /gitignore/templates\"],\n getTemplate: [\"GET /gitignore/templates/{name}\"]\n },\n interactions: {\n getRestrictionsForAuthenticatedUser: [\"GET /user/interaction-limits\"],\n getRestrictionsForOrg: [\"GET /orgs/{org}/interaction-limits\"],\n getRestrictionsForRepo: [\"GET /repos/{owner}/{repo}/interaction-limits\"],\n getRestrictionsForYourPublicRepos: [\n \"GET /user/interaction-limits\",\n {},\n { renamed: [\"interactions\", \"getRestrictionsForAuthenticatedUser\"] }\n ],\n removeRestrictionsForAuthenticatedUser: [\"DELETE /user/interaction-limits\"],\n removeRestrictionsForOrg: [\"DELETE /orgs/{org}/interaction-limits\"],\n removeRestrictionsForRepo: [\n \"DELETE /repos/{owner}/{repo}/interaction-limits\"\n ],\n removeRestrictionsForYourPublicRepos: [\n \"DELETE /user/interaction-limits\",\n {},\n { renamed: [\"interactions\", \"removeRestrictionsForAuthenticatedUser\"] }\n ],\n setRestrictionsForAuthenticatedUser: [\"PUT /user/interaction-limits\"],\n setRestrictionsForOrg: [\"PUT /orgs/{org}/interaction-limits\"],\n setRestrictionsForRepo: [\"PUT /repos/{owner}/{repo}/interaction-limits\"],\n setRestrictionsForYourPublicRepos: [\n \"PUT /user/interaction-limits\",\n {},\n { renamed: [\"interactions\", \"setRestrictionsForAuthenticatedUser\"] }\n ]\n },\n issues: {\n addAssignees: [\n \"POST /repos/{owner}/{repo}/issues/{issue_number}/assignees\"\n ],\n addLabels: [\"POST /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n checkUserCanBeAssigned: [\"GET /repos/{owner}/{repo}/assignees/{assignee}\"],\n checkUserCanBeAssignedToIssue: [\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/assignees/{assignee}\"\n ],\n create: [\"POST /repos/{owner}/{repo}/issues\"],\n createComment: [\n \"POST /repos/{owner}/{repo}/issues/{issue_number}/comments\"\n ],\n createLabel: [\"POST /repos/{owner}/{repo}/labels\"],\n createMilestone: [\"POST /repos/{owner}/{repo}/milestones\"],\n deleteComment: [\n \"DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}\"\n ],\n deleteLabel: [\"DELETE /repos/{owner}/{repo}/labels/{name}\"],\n deleteMilestone: [\n \"DELETE /repos/{owner}/{repo}/milestones/{milestone_number}\"\n ],\n get: [\"GET /repos/{owner}/{repo}/issues/{issue_number}\"],\n getComment: [\"GET /repos/{owner}/{repo}/issues/comments/{comment_id}\"],\n getEvent: [\"GET /repos/{owner}/{repo}/issues/events/{event_id}\"],\n getLabel: [\"GET /repos/{owner}/{repo}/labels/{name}\"],\n getMilestone: [\"GET /repos/{owner}/{repo}/milestones/{milestone_number}\"],\n list: [\"GET /issues\"],\n listAssignees: [\"GET /repos/{owner}/{repo}/assignees\"],\n listComments: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/comments\"],\n listCommentsForRepo: [\"GET /repos/{owner}/{repo}/issues/comments\"],\n listEvents: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/events\"],\n listEventsForRepo: [\"GET /repos/{owner}/{repo}/issues/events\"],\n listEventsForTimeline: [\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/timeline\"\n ],\n listForAuthenticatedUser: [\"GET /user/issues\"],\n listForOrg: [\"GET /orgs/{org}/issues\"],\n listForRepo: [\"GET /repos/{owner}/{repo}/issues\"],\n listLabelsForMilestone: [\n \"GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels\"\n ],\n listLabelsForRepo: [\"GET /repos/{owner}/{repo}/labels\"],\n listLabelsOnIssue: [\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/labels\"\n ],\n listMilestones: [\"GET /repos/{owner}/{repo}/milestones\"],\n lock: [\"PUT /repos/{owner}/{repo}/issues/{issue_number}/lock\"],\n removeAllLabels: [\n \"DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels\"\n ],\n removeAssignees: [\n \"DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees\"\n ],\n removeLabel: [\n \"DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}\"\n ],\n setLabels: [\"PUT /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n unlock: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock\"],\n update: [\"PATCH /repos/{owner}/{repo}/issues/{issue_number}\"],\n updateComment: [\"PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}\"],\n updateLabel: [\"PATCH /repos/{owner}/{repo}/labels/{name}\"],\n updateMilestone: [\n \"PATCH /repos/{owner}/{repo}/milestones/{milestone_number}\"\n ]\n },\n licenses: {\n get: [\"GET /licenses/{license}\"],\n getAllCommonlyUsed: [\"GET /licenses\"],\n getForRepo: [\"GET /repos/{owner}/{repo}/license\"]\n },\n markdown: {\n render: [\"POST /markdown\"],\n renderRaw: [\n \"POST /markdown/raw\",\n { headers: { \"content-type\": \"text/plain; charset=utf-8\" } }\n ]\n },\n meta: {\n get: [\"GET /meta\"],\n getAllVersions: [\"GET /versions\"],\n getOctocat: [\"GET /octocat\"],\n getZen: [\"GET /zen\"],\n root: [\"GET /\"]\n },\n migrations: {\n cancelImport: [\"DELETE /repos/{owner}/{repo}/import\"],\n deleteArchiveForAuthenticatedUser: [\n \"DELETE /user/migrations/{migration_id}/archive\"\n ],\n deleteArchiveForOrg: [\n \"DELETE /orgs/{org}/migrations/{migration_id}/archive\"\n ],\n downloadArchiveForOrg: [\n \"GET /orgs/{org}/migrations/{migration_id}/archive\"\n ],\n getArchiveForAuthenticatedUser: [\n \"GET /user/migrations/{migration_id}/archive\"\n ],\n getCommitAuthors: [\"GET /repos/{owner}/{repo}/import/authors\"],\n getImportStatus: [\"GET /repos/{owner}/{repo}/import\"],\n getLargeFiles: [\"GET /repos/{owner}/{repo}/import/large_files\"],\n getStatusForAuthenticatedUser: [\"GET /user/migrations/{migration_id}\"],\n getStatusForOrg: [\"GET /orgs/{org}/migrations/{migration_id}\"],\n listForAuthenticatedUser: [\"GET /user/migrations\"],\n listForOrg: [\"GET /orgs/{org}/migrations\"],\n listReposForAuthenticatedUser: [\n \"GET /user/migrations/{migration_id}/repositories\"\n ],\n listReposForOrg: [\"GET /orgs/{org}/migrations/{migration_id}/repositories\"],\n listReposForUser: [\n \"GET /user/migrations/{migration_id}/repositories\",\n {},\n { renamed: [\"migrations\", \"listReposForAuthenticatedUser\"] }\n ],\n mapCommitAuthor: [\"PATCH /repos/{owner}/{repo}/import/authors/{author_id}\"],\n setLfsPreference: [\"PATCH /repos/{owner}/{repo}/import/lfs\"],\n startForAuthenticatedUser: [\"POST /user/migrations\"],\n startForOrg: [\"POST /orgs/{org}/migrations\"],\n startImport: [\"PUT /repos/{owner}/{repo}/import\"],\n unlockRepoForAuthenticatedUser: [\n \"DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock\"\n ],\n unlockRepoForOrg: [\n \"DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock\"\n ],\n updateImport: [\"PATCH /repos/{owner}/{repo}/import\"]\n },\n orgs: {\n addSecurityManagerTeam: [\n \"PUT /orgs/{org}/security-managers/teams/{team_slug}\"\n ],\n blockUser: [\"PUT /orgs/{org}/blocks/{username}\"],\n cancelInvitation: [\"DELETE /orgs/{org}/invitations/{invitation_id}\"],\n checkBlockedUser: [\"GET /orgs/{org}/blocks/{username}\"],\n checkMembershipForUser: [\"GET /orgs/{org}/members/{username}\"],\n checkPublicMembershipForUser: [\"GET /orgs/{org}/public_members/{username}\"],\n convertMemberToOutsideCollaborator: [\n \"PUT /orgs/{org}/outside_collaborators/{username}\"\n ],\n createInvitation: [\"POST /orgs/{org}/invitations\"],\n createWebhook: [\"POST /orgs/{org}/hooks\"],\n delete: [\"DELETE /orgs/{org}\"],\n deleteWebhook: [\"DELETE /orgs/{org}/hooks/{hook_id}\"],\n enableOrDisableSecurityProductOnAllOrgRepos: [\n \"POST /orgs/{org}/{security_product}/{enablement}\"\n ],\n get: [\"GET /orgs/{org}\"],\n getMembershipForAuthenticatedUser: [\"GET /user/memberships/orgs/{org}\"],\n getMembershipForUser: [\"GET /orgs/{org}/memberships/{username}\"],\n getWebhook: [\"GET /orgs/{org}/hooks/{hook_id}\"],\n getWebhookConfigForOrg: [\"GET /orgs/{org}/hooks/{hook_id}/config\"],\n getWebhookDelivery: [\n \"GET /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}\"\n ],\n list: [\"GET /organizations\"],\n listAppInstallations: [\"GET /orgs/{org}/installations\"],\n listBlockedUsers: [\"GET /orgs/{org}/blocks\"],\n listFailedInvitations: [\"GET /orgs/{org}/failed_invitations\"],\n listForAuthenticatedUser: [\"GET /user/orgs\"],\n listForUser: [\"GET /users/{username}/orgs\"],\n listInvitationTeams: [\"GET /orgs/{org}/invitations/{invitation_id}/teams\"],\n listMembers: [\"GET /orgs/{org}/members\"],\n listMembershipsForAuthenticatedUser: [\"GET /user/memberships/orgs\"],\n listOutsideCollaborators: [\"GET /orgs/{org}/outside_collaborators\"],\n listPatGrantRepositories: [\n \"GET /organizations/{org}/personal-access-tokens/{pat_id}/repositories\"\n ],\n listPatGrantRequestRepositories: [\n \"GET /organizations/{org}/personal-access-token-requests/{pat_request_id}/repositories\"\n ],\n listPatGrantRequests: [\n \"GET /organizations/{org}/personal-access-token-requests\"\n ],\n listPatGrants: [\"GET /organizations/{org}/personal-access-tokens\"],\n listPendingInvitations: [\"GET /orgs/{org}/invitations\"],\n listPublicMembers: [\"GET /orgs/{org}/public_members\"],\n listSecurityManagerTeams: [\"GET /orgs/{org}/security-managers\"],\n listWebhookDeliveries: [\"GET /orgs/{org}/hooks/{hook_id}/deliveries\"],\n listWebhooks: [\"GET /orgs/{org}/hooks\"],\n pingWebhook: [\"POST /orgs/{org}/hooks/{hook_id}/pings\"],\n redeliverWebhookDelivery: [\n \"POST /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts\"\n ],\n removeMember: [\"DELETE /orgs/{org}/members/{username}\"],\n removeMembershipForUser: [\"DELETE /orgs/{org}/memberships/{username}\"],\n removeOutsideCollaborator: [\n \"DELETE /orgs/{org}/outside_collaborators/{username}\"\n ],\n removePublicMembershipForAuthenticatedUser: [\n \"DELETE /orgs/{org}/public_members/{username}\"\n ],\n removeSecurityManagerTeam: [\n \"DELETE /orgs/{org}/security-managers/teams/{team_slug}\"\n ],\n reviewPatGrantRequest: [\n \"POST /organizations/{org}/personal-access-token-requests/{pat_request_id}\"\n ],\n reviewPatGrantRequestsInBulk: [\n \"POST /organizations/{org}/personal-access-token-requests\"\n ],\n setMembershipForUser: [\"PUT /orgs/{org}/memberships/{username}\"],\n setPublicMembershipForAuthenticatedUser: [\n \"PUT /orgs/{org}/public_members/{username}\"\n ],\n unblockUser: [\"DELETE /orgs/{org}/blocks/{username}\"],\n update: [\"PATCH /orgs/{org}\"],\n updateMembershipForAuthenticatedUser: [\n \"PATCH /user/memberships/orgs/{org}\"\n ],\n updatePatAccess: [\n \"POST /organizations/{org}/personal-access-tokens/{pat_id}\"\n ],\n updatePatAccesses: [\"POST /organizations/{org}/personal-access-tokens\"],\n updateWebhook: [\"PATCH /orgs/{org}/hooks/{hook_id}\"],\n updateWebhookConfigForOrg: [\"PATCH /orgs/{org}/hooks/{hook_id}/config\"]\n },\n packages: {\n deletePackageForAuthenticatedUser: [\n \"DELETE /user/packages/{package_type}/{package_name}\"\n ],\n deletePackageForOrg: [\n \"DELETE /orgs/{org}/packages/{package_type}/{package_name}\"\n ],\n deletePackageForUser: [\n \"DELETE /users/{username}/packages/{package_type}/{package_name}\"\n ],\n deletePackageVersionForAuthenticatedUser: [\n \"DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}\"\n ],\n deletePackageVersionForOrg: [\n \"DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}\"\n ],\n deletePackageVersionForUser: [\n \"DELETE /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}\"\n ],\n getAllPackageVersionsForAPackageOwnedByAnOrg: [\n \"GET /orgs/{org}/packages/{package_type}/{package_name}/versions\",\n {},\n { renamed: [\"packages\", \"getAllPackageVersionsForPackageOwnedByOrg\"] }\n ],\n getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: [\n \"GET /user/packages/{package_type}/{package_name}/versions\",\n {},\n {\n renamed: [\n \"packages\",\n \"getAllPackageVersionsForPackageOwnedByAuthenticatedUser\"\n ]\n }\n ],\n getAllPackageVersionsForPackageOwnedByAuthenticatedUser: [\n \"GET /user/packages/{package_type}/{package_name}/versions\"\n ],\n getAllPackageVersionsForPackageOwnedByOrg: [\n \"GET /orgs/{org}/packages/{package_type}/{package_name}/versions\"\n ],\n getAllPackageVersionsForPackageOwnedByUser: [\n \"GET /users/{username}/packages/{package_type}/{package_name}/versions\"\n ],\n getPackageForAuthenticatedUser: [\n \"GET /user/packages/{package_type}/{package_name}\"\n ],\n getPackageForOrganization: [\n \"GET /orgs/{org}/packages/{package_type}/{package_name}\"\n ],\n getPackageForUser: [\n \"GET /users/{username}/packages/{package_type}/{package_name}\"\n ],\n getPackageVersionForAuthenticatedUser: [\n \"GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}\"\n ],\n getPackageVersionForOrganization: [\n \"GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}\"\n ],\n getPackageVersionForUser: [\n \"GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}\"\n ],\n listDockerMigrationConflictingPackagesForAuthenticatedUser: [\n \"GET /user/docker/conflicts\"\n ],\n listDockerMigrationConflictingPackagesForOrganization: [\n \"GET /orgs/{org}/docker/conflicts\"\n ],\n listDockerMigrationConflictingPackagesForUser: [\n \"GET /users/{username}/docker/conflicts\"\n ],\n listPackagesForAuthenticatedUser: [\"GET /user/packages\"],\n listPackagesForOrganization: [\"GET /orgs/{org}/packages\"],\n listPackagesForUser: [\"GET /users/{username}/packages\"],\n restorePackageForAuthenticatedUser: [\n \"POST /user/packages/{package_type}/{package_name}/restore{?token}\"\n ],\n restorePackageForOrg: [\n \"POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}\"\n ],\n restorePackageForUser: [\n \"POST /users/{username}/packages/{package_type}/{package_name}/restore{?token}\"\n ],\n restorePackageVersionForAuthenticatedUser: [\n \"POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore\"\n ],\n restorePackageVersionForOrg: [\n \"POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore\"\n ],\n restorePackageVersionForUser: [\n \"POST /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore\"\n ]\n },\n projects: {\n addCollaborator: [\"PUT /projects/{project_id}/collaborators/{username}\"],\n createCard: [\"POST /projects/columns/{column_id}/cards\"],\n createColumn: [\"POST /projects/{project_id}/columns\"],\n createForAuthenticatedUser: [\"POST /user/projects\"],\n createForOrg: [\"POST /orgs/{org}/projects\"],\n createForRepo: [\"POST /repos/{owner}/{repo}/projects\"],\n delete: [\"DELETE /projects/{project_id}\"],\n deleteCard: [\"DELETE /projects/columns/cards/{card_id}\"],\n deleteColumn: [\"DELETE /projects/columns/{column_id}\"],\n get: [\"GET /projects/{project_id}\"],\n getCard: [\"GET /projects/columns/cards/{card_id}\"],\n getColumn: [\"GET /projects/columns/{column_id}\"],\n getPermissionForUser: [\n \"GET /projects/{project_id}/collaborators/{username}/permission\"\n ],\n listCards: [\"GET /projects/columns/{column_id}/cards\"],\n listCollaborators: [\"GET /projects/{project_id}/collaborators\"],\n listColumns: [\"GET /projects/{project_id}/columns\"],\n listForOrg: [\"GET /orgs/{org}/projects\"],\n listForRepo: [\"GET /repos/{owner}/{repo}/projects\"],\n listForUser: [\"GET /users/{username}/projects\"],\n moveCard: [\"POST /projects/columns/cards/{card_id}/moves\"],\n moveColumn: [\"POST /projects/columns/{column_id}/moves\"],\n removeCollaborator: [\n \"DELETE /projects/{project_id}/collaborators/{username}\"\n ],\n update: [\"PATCH /projects/{project_id}\"],\n updateCard: [\"PATCH /projects/columns/cards/{card_id}\"],\n updateColumn: [\"PATCH /projects/columns/{column_id}\"]\n },\n pulls: {\n checkIfMerged: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/merge\"],\n create: [\"POST /repos/{owner}/{repo}/pulls\"],\n createReplyForReviewComment: [\n \"POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies\"\n ],\n createReview: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews\"],\n createReviewComment: [\n \"POST /repos/{owner}/{repo}/pulls/{pull_number}/comments\"\n ],\n deletePendingReview: [\n \"DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\"\n ],\n deleteReviewComment: [\n \"DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}\"\n ],\n dismissReview: [\n \"PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals\"\n ],\n get: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}\"],\n getReview: [\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\"\n ],\n getReviewComment: [\"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}\"],\n list: [\"GET /repos/{owner}/{repo}/pulls\"],\n listCommentsForReview: [\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments\"\n ],\n listCommits: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/commits\"],\n listFiles: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/files\"],\n listRequestedReviewers: [\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\"\n ],\n listReviewComments: [\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/comments\"\n ],\n listReviewCommentsForRepo: [\"GET /repos/{owner}/{repo}/pulls/comments\"],\n listReviews: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews\"],\n merge: [\"PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge\"],\n removeRequestedReviewers: [\n \"DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\"\n ],\n requestReviewers: [\n \"POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\"\n ],\n submitReview: [\n \"POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events\"\n ],\n update: [\"PATCH /repos/{owner}/{repo}/pulls/{pull_number}\"],\n updateBranch: [\n \"PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch\"\n ],\n updateReview: [\n \"PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\"\n ],\n updateReviewComment: [\n \"PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}\"\n ]\n },\n rateLimit: { get: [\"GET /rate_limit\"] },\n reactions: {\n createForCommitComment: [\n \"POST /repos/{owner}/{repo}/comments/{comment_id}/reactions\"\n ],\n createForIssue: [\n \"POST /repos/{owner}/{repo}/issues/{issue_number}/reactions\"\n ],\n createForIssueComment: [\n \"POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\"\n ],\n createForPullRequestReviewComment: [\n \"POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\"\n ],\n createForRelease: [\n \"POST /repos/{owner}/{repo}/releases/{release_id}/reactions\"\n ],\n createForTeamDiscussionCommentInOrg: [\n \"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\"\n ],\n createForTeamDiscussionInOrg: [\n \"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\"\n ],\n deleteForCommitComment: [\n \"DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}\"\n ],\n deleteForIssue: [\n \"DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}\"\n ],\n deleteForIssueComment: [\n \"DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}\"\n ],\n deleteForPullRequestComment: [\n \"DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}\"\n ],\n deleteForRelease: [\n \"DELETE /repos/{owner}/{repo}/releases/{release_id}/reactions/{reaction_id}\"\n ],\n deleteForTeamDiscussion: [\n \"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}\"\n ],\n deleteForTeamDiscussionComment: [\n \"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}\"\n ],\n listForCommitComment: [\n \"GET /repos/{owner}/{repo}/comments/{comment_id}/reactions\"\n ],\n listForIssue: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/reactions\"],\n listForIssueComment: [\n \"GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\"\n ],\n listForPullRequestReviewComment: [\n \"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\"\n ],\n listForRelease: [\n \"GET /repos/{owner}/{repo}/releases/{release_id}/reactions\"\n ],\n listForTeamDiscussionCommentInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\"\n ],\n listForTeamDiscussionInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\"\n ]\n },\n repos: {\n acceptInvitation: [\n \"PATCH /user/repository_invitations/{invitation_id}\",\n {},\n { renamed: [\"repos\", \"acceptInvitationForAuthenticatedUser\"] }\n ],\n acceptInvitationForAuthenticatedUser: [\n \"PATCH /user/repository_invitations/{invitation_id}\"\n ],\n addAppAccessRestrictions: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\",\n {},\n { mapToData: \"apps\" }\n ],\n addCollaborator: [\"PUT /repos/{owner}/{repo}/collaborators/{username}\"],\n addStatusCheckContexts: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\",\n {},\n { mapToData: \"contexts\" }\n ],\n addTeamAccessRestrictions: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\",\n {},\n { mapToData: \"teams\" }\n ],\n addUserAccessRestrictions: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\",\n {},\n { mapToData: \"users\" }\n ],\n checkCollaborator: [\"GET /repos/{owner}/{repo}/collaborators/{username}\"],\n checkVulnerabilityAlerts: [\n \"GET /repos/{owner}/{repo}/vulnerability-alerts\"\n ],\n codeownersErrors: [\"GET /repos/{owner}/{repo}/codeowners/errors\"],\n compareCommits: [\"GET /repos/{owner}/{repo}/compare/{base}...{head}\"],\n compareCommitsWithBasehead: [\n \"GET /repos/{owner}/{repo}/compare/{basehead}\"\n ],\n createAutolink: [\"POST /repos/{owner}/{repo}/autolinks\"],\n createCommitComment: [\n \"POST /repos/{owner}/{repo}/commits/{commit_sha}/comments\"\n ],\n createCommitSignatureProtection: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\"\n ],\n createCommitStatus: [\"POST /repos/{owner}/{repo}/statuses/{sha}\"],\n createDeployKey: [\"POST /repos/{owner}/{repo}/keys\"],\n createDeployment: [\"POST /repos/{owner}/{repo}/deployments\"],\n createDeploymentBranchPolicy: [\n \"POST /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies\"\n ],\n createDeploymentProtectionRule: [\n \"POST /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules\"\n ],\n createDeploymentStatus: [\n \"POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\"\n ],\n createDispatchEvent: [\"POST /repos/{owner}/{repo}/dispatches\"],\n createForAuthenticatedUser: [\"POST /user/repos\"],\n createFork: [\"POST /repos/{owner}/{repo}/forks\"],\n createInOrg: [\"POST /orgs/{org}/repos\"],\n createOrUpdateEnvironment: [\n \"PUT /repos/{owner}/{repo}/environments/{environment_name}\"\n ],\n createOrUpdateFileContents: [\"PUT /repos/{owner}/{repo}/contents/{path}\"],\n createOrgRuleset: [\"POST /orgs/{org}/rulesets\"],\n createPagesDeployment: [\"POST /repos/{owner}/{repo}/pages/deployment\"],\n createPagesSite: [\"POST /repos/{owner}/{repo}/pages\"],\n createRelease: [\"POST /repos/{owner}/{repo}/releases\"],\n createRepoRuleset: [\"POST /repos/{owner}/{repo}/rulesets\"],\n createTagProtection: [\"POST /repos/{owner}/{repo}/tags/protection\"],\n createUsingTemplate: [\n \"POST /repos/{template_owner}/{template_repo}/generate\"\n ],\n createWebhook: [\"POST /repos/{owner}/{repo}/hooks\"],\n declineInvitation: [\n \"DELETE /user/repository_invitations/{invitation_id}\",\n {},\n { renamed: [\"repos\", \"declineInvitationForAuthenticatedUser\"] }\n ],\n declineInvitationForAuthenticatedUser: [\n \"DELETE /user/repository_invitations/{invitation_id}\"\n ],\n delete: [\"DELETE /repos/{owner}/{repo}\"],\n deleteAccessRestrictions: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions\"\n ],\n deleteAdminBranchProtection: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\"\n ],\n deleteAnEnvironment: [\n \"DELETE /repos/{owner}/{repo}/environments/{environment_name}\"\n ],\n deleteAutolink: [\"DELETE /repos/{owner}/{repo}/autolinks/{autolink_id}\"],\n deleteBranchProtection: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection\"\n ],\n deleteCommitComment: [\"DELETE /repos/{owner}/{repo}/comments/{comment_id}\"],\n deleteCommitSignatureProtection: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\"\n ],\n deleteDeployKey: [\"DELETE /repos/{owner}/{repo}/keys/{key_id}\"],\n deleteDeployment: [\n \"DELETE /repos/{owner}/{repo}/deployments/{deployment_id}\"\n ],\n deleteDeploymentBranchPolicy: [\n \"DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}\"\n ],\n deleteFile: [\"DELETE /repos/{owner}/{repo}/contents/{path}\"],\n deleteInvitation: [\n \"DELETE /repos/{owner}/{repo}/invitations/{invitation_id}\"\n ],\n deleteOrgRuleset: [\"DELETE /orgs/{org}/rulesets/{ruleset_id}\"],\n deletePagesSite: [\"DELETE /repos/{owner}/{repo}/pages\"],\n deletePullRequestReviewProtection: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\"\n ],\n deleteRelease: [\"DELETE /repos/{owner}/{repo}/releases/{release_id}\"],\n deleteReleaseAsset: [\n \"DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}\"\n ],\n deleteRepoRuleset: [\"DELETE /repos/{owner}/{repo}/rulesets/{ruleset_id}\"],\n deleteTagProtection: [\n \"DELETE /repos/{owner}/{repo}/tags/protection/{tag_protection_id}\"\n ],\n deleteWebhook: [\"DELETE /repos/{owner}/{repo}/hooks/{hook_id}\"],\n disableAutomatedSecurityFixes: [\n \"DELETE /repos/{owner}/{repo}/automated-security-fixes\"\n ],\n disableDeploymentProtectionRule: [\n \"DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}\"\n ],\n disableLfsForRepo: [\"DELETE /repos/{owner}/{repo}/lfs\"],\n disableVulnerabilityAlerts: [\n \"DELETE /repos/{owner}/{repo}/vulnerability-alerts\"\n ],\n downloadArchive: [\n \"GET /repos/{owner}/{repo}/zipball/{ref}\",\n {},\n { renamed: [\"repos\", \"downloadZipballArchive\"] }\n ],\n downloadTarballArchive: [\"GET /repos/{owner}/{repo}/tarball/{ref}\"],\n downloadZipballArchive: [\"GET /repos/{owner}/{repo}/zipball/{ref}\"],\n enableAutomatedSecurityFixes: [\n \"PUT /repos/{owner}/{repo}/automated-security-fixes\"\n ],\n enableLfsForRepo: [\"PUT /repos/{owner}/{repo}/lfs\"],\n enableVulnerabilityAlerts: [\n \"PUT /repos/{owner}/{repo}/vulnerability-alerts\"\n ],\n generateReleaseNotes: [\n \"POST /repos/{owner}/{repo}/releases/generate-notes\"\n ],\n get: [\"GET /repos/{owner}/{repo}\"],\n getAccessRestrictions: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions\"\n ],\n getAdminBranchProtection: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\"\n ],\n getAllDeploymentProtectionRules: [\n \"GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules\"\n ],\n getAllEnvironments: [\"GET /repos/{owner}/{repo}/environments\"],\n getAllStatusCheckContexts: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\"\n ],\n getAllTopics: [\"GET /repos/{owner}/{repo}/topics\"],\n getAppsWithAccessToProtectedBranch: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\"\n ],\n getAutolink: [\"GET /repos/{owner}/{repo}/autolinks/{autolink_id}\"],\n getBranch: [\"GET /repos/{owner}/{repo}/branches/{branch}\"],\n getBranchProtection: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection\"\n ],\n getBranchRules: [\"GET /repos/{owner}/{repo}/rules/branches/{branch}\"],\n getClones: [\"GET /repos/{owner}/{repo}/traffic/clones\"],\n getCodeFrequencyStats: [\"GET /repos/{owner}/{repo}/stats/code_frequency\"],\n getCollaboratorPermissionLevel: [\n \"GET /repos/{owner}/{repo}/collaborators/{username}/permission\"\n ],\n getCombinedStatusForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/status\"],\n getCommit: [\"GET /repos/{owner}/{repo}/commits/{ref}\"],\n getCommitActivityStats: [\"GET /repos/{owner}/{repo}/stats/commit_activity\"],\n getCommitComment: [\"GET /repos/{owner}/{repo}/comments/{comment_id}\"],\n getCommitSignatureProtection: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\"\n ],\n getCommunityProfileMetrics: [\"GET /repos/{owner}/{repo}/community/profile\"],\n getContent: [\"GET /repos/{owner}/{repo}/contents/{path}\"],\n getContributorsStats: [\"GET /repos/{owner}/{repo}/stats/contributors\"],\n getCustomDeploymentProtectionRule: [\n \"GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}\"\n ],\n getDeployKey: [\"GET /repos/{owner}/{repo}/keys/{key_id}\"],\n getDeployment: [\"GET /repos/{owner}/{repo}/deployments/{deployment_id}\"],\n getDeploymentBranchPolicy: [\n \"GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}\"\n ],\n getDeploymentStatus: [\n \"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}\"\n ],\n getEnvironment: [\n \"GET /repos/{owner}/{repo}/environments/{environment_name}\"\n ],\n getLatestPagesBuild: [\"GET /repos/{owner}/{repo}/pages/builds/latest\"],\n getLatestRelease: [\"GET /repos/{owner}/{repo}/releases/latest\"],\n getOrgRuleset: [\"GET /orgs/{org}/rulesets/{ruleset_id}\"],\n getOrgRulesets: [\"GET /orgs/{org}/rulesets\"],\n getPages: [\"GET /repos/{owner}/{repo}/pages\"],\n getPagesBuild: [\"GET /repos/{owner}/{repo}/pages/builds/{build_id}\"],\n getPagesHealthCheck: [\"GET /repos/{owner}/{repo}/pages/health\"],\n getParticipationStats: [\"GET /repos/{owner}/{repo}/stats/participation\"],\n getPullRequestReviewProtection: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\"\n ],\n getPunchCardStats: [\"GET /repos/{owner}/{repo}/stats/punch_card\"],\n getReadme: [\"GET /repos/{owner}/{repo}/readme\"],\n getReadmeInDirectory: [\"GET /repos/{owner}/{repo}/readme/{dir}\"],\n getRelease: [\"GET /repos/{owner}/{repo}/releases/{release_id}\"],\n getReleaseAsset: [\"GET /repos/{owner}/{repo}/releases/assets/{asset_id}\"],\n getReleaseByTag: [\"GET /repos/{owner}/{repo}/releases/tags/{tag}\"],\n getRepoRuleset: [\"GET /repos/{owner}/{repo}/rulesets/{ruleset_id}\"],\n getRepoRulesets: [\"GET /repos/{owner}/{repo}/rulesets\"],\n getStatusChecksProtection: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\"\n ],\n getTeamsWithAccessToProtectedBranch: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\"\n ],\n getTopPaths: [\"GET /repos/{owner}/{repo}/traffic/popular/paths\"],\n getTopReferrers: [\"GET /repos/{owner}/{repo}/traffic/popular/referrers\"],\n getUsersWithAccessToProtectedBranch: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\"\n ],\n getViews: [\"GET /repos/{owner}/{repo}/traffic/views\"],\n getWebhook: [\"GET /repos/{owner}/{repo}/hooks/{hook_id}\"],\n getWebhookConfigForRepo: [\n \"GET /repos/{owner}/{repo}/hooks/{hook_id}/config\"\n ],\n getWebhookDelivery: [\n \"GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}\"\n ],\n listAutolinks: [\"GET /repos/{owner}/{repo}/autolinks\"],\n listBranches: [\"GET /repos/{owner}/{repo}/branches\"],\n listBranchesForHeadCommit: [\n \"GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head\"\n ],\n listCollaborators: [\"GET /repos/{owner}/{repo}/collaborators\"],\n listCommentsForCommit: [\n \"GET /repos/{owner}/{repo}/commits/{commit_sha}/comments\"\n ],\n listCommitCommentsForRepo: [\"GET /repos/{owner}/{repo}/comments\"],\n listCommitStatusesForRef: [\n \"GET /repos/{owner}/{repo}/commits/{ref}/statuses\"\n ],\n listCommits: [\"GET /repos/{owner}/{repo}/commits\"],\n listContributors: [\"GET /repos/{owner}/{repo}/contributors\"],\n listCustomDeploymentRuleIntegrations: [\n \"GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps\"\n ],\n listDeployKeys: [\"GET /repos/{owner}/{repo}/keys\"],\n listDeploymentBranchPolicies: [\n \"GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies\"\n ],\n listDeploymentStatuses: [\n \"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\"\n ],\n listDeployments: [\"GET /repos/{owner}/{repo}/deployments\"],\n listForAuthenticatedUser: [\"GET /user/repos\"],\n listForOrg: [\"GET /orgs/{org}/repos\"],\n listForUser: [\"GET /users/{username}/repos\"],\n listForks: [\"GET /repos/{owner}/{repo}/forks\"],\n listInvitations: [\"GET /repos/{owner}/{repo}/invitations\"],\n listInvitationsForAuthenticatedUser: [\"GET /user/repository_invitations\"],\n listLanguages: [\"GET /repos/{owner}/{repo}/languages\"],\n listPagesBuilds: [\"GET /repos/{owner}/{repo}/pages/builds\"],\n listPublic: [\"GET /repositories\"],\n listPullRequestsAssociatedWithCommit: [\n \"GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls\"\n ],\n listReleaseAssets: [\n \"GET /repos/{owner}/{repo}/releases/{release_id}/assets\"\n ],\n listReleases: [\"GET /repos/{owner}/{repo}/releases\"],\n listTagProtection: [\"GET /repos/{owner}/{repo}/tags/protection\"],\n listTags: [\"GET /repos/{owner}/{repo}/tags\"],\n listTeams: [\"GET /repos/{owner}/{repo}/teams\"],\n listWebhookDeliveries: [\n \"GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries\"\n ],\n listWebhooks: [\"GET /repos/{owner}/{repo}/hooks\"],\n merge: [\"POST /repos/{owner}/{repo}/merges\"],\n mergeUpstream: [\"POST /repos/{owner}/{repo}/merge-upstream\"],\n pingWebhook: [\"POST /repos/{owner}/{repo}/hooks/{hook_id}/pings\"],\n redeliverWebhookDelivery: [\n \"POST /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts\"\n ],\n removeAppAccessRestrictions: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\",\n {},\n { mapToData: \"apps\" }\n ],\n removeCollaborator: [\n \"DELETE /repos/{owner}/{repo}/collaborators/{username}\"\n ],\n removeStatusCheckContexts: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\",\n {},\n { mapToData: \"contexts\" }\n ],\n removeStatusCheckProtection: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\"\n ],\n removeTeamAccessRestrictions: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\",\n {},\n { mapToData: \"teams\" }\n ],\n removeUserAccessRestrictions: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\",\n {},\n { mapToData: \"users\" }\n ],\n renameBranch: [\"POST /repos/{owner}/{repo}/branches/{branch}/rename\"],\n replaceAllTopics: [\"PUT /repos/{owner}/{repo}/topics\"],\n requestPagesBuild: [\"POST /repos/{owner}/{repo}/pages/builds\"],\n setAdminBranchProtection: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\"\n ],\n setAppAccessRestrictions: [\n \"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\",\n {},\n { mapToData: \"apps\" }\n ],\n setStatusCheckContexts: [\n \"PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\",\n {},\n { mapToData: \"contexts\" }\n ],\n setTeamAccessRestrictions: [\n \"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\",\n {},\n { mapToData: \"teams\" }\n ],\n setUserAccessRestrictions: [\n \"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\",\n {},\n { mapToData: \"users\" }\n ],\n testPushWebhook: [\"POST /repos/{owner}/{repo}/hooks/{hook_id}/tests\"],\n transfer: [\"POST /repos/{owner}/{repo}/transfer\"],\n update: [\"PATCH /repos/{owner}/{repo}\"],\n updateBranchProtection: [\n \"PUT /repos/{owner}/{repo}/branches/{branch}/protection\"\n ],\n updateCommitComment: [\"PATCH /repos/{owner}/{repo}/comments/{comment_id}\"],\n updateDeploymentBranchPolicy: [\n \"PUT /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}\"\n ],\n updateInformationAboutPagesSite: [\"PUT /repos/{owner}/{repo}/pages\"],\n updateInvitation: [\n \"PATCH /repos/{owner}/{repo}/invitations/{invitation_id}\"\n ],\n updateOrgRuleset: [\"PUT /orgs/{org}/rulesets/{ruleset_id}\"],\n updatePullRequestReviewProtection: [\n \"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\"\n ],\n updateRelease: [\"PATCH /repos/{owner}/{repo}/releases/{release_id}\"],\n updateReleaseAsset: [\n \"PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}\"\n ],\n updateRepoRuleset: [\"PUT /repos/{owner}/{repo}/rulesets/{ruleset_id}\"],\n updateStatusCheckPotection: [\n \"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\",\n {},\n { renamed: [\"repos\", \"updateStatusCheckProtection\"] }\n ],\n updateStatusCheckProtection: [\n \"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\"\n ],\n updateWebhook: [\"PATCH /repos/{owner}/{repo}/hooks/{hook_id}\"],\n updateWebhookConfigForRepo: [\n \"PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config\"\n ],\n uploadReleaseAsset: [\n \"POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}\",\n { baseUrl: \"https://uploads.github.com\" }\n ]\n },\n search: {\n code: [\"GET /search/code\"],\n commits: [\"GET /search/commits\"],\n issuesAndPullRequests: [\"GET /search/issues\"],\n labels: [\"GET /search/labels\"],\n repos: [\"GET /search/repositories\"],\n topics: [\"GET /search/topics\"],\n users: [\"GET /search/users\"]\n },\n secretScanning: {\n getAlert: [\n \"GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}\"\n ],\n listAlertsForEnterprise: [\n \"GET /enterprises/{enterprise}/secret-scanning/alerts\"\n ],\n listAlertsForOrg: [\"GET /orgs/{org}/secret-scanning/alerts\"],\n listAlertsForRepo: [\"GET /repos/{owner}/{repo}/secret-scanning/alerts\"],\n listLocationsForAlert: [\n \"GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations\"\n ],\n updateAlert: [\n \"PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}\"\n ]\n },\n securityAdvisories: {\n createPrivateVulnerabilityReport: [\n \"POST /repos/{owner}/{repo}/security-advisories/reports\"\n ],\n createRepositoryAdvisory: [\n \"POST /repos/{owner}/{repo}/security-advisories\"\n ],\n getRepositoryAdvisory: [\n \"GET /repos/{owner}/{repo}/security-advisories/{ghsa_id}\"\n ],\n listRepositoryAdvisories: [\"GET /repos/{owner}/{repo}/security-advisories\"],\n updateRepositoryAdvisory: [\n \"PATCH /repos/{owner}/{repo}/security-advisories/{ghsa_id}\"\n ]\n },\n teams: {\n addOrUpdateMembershipForUserInOrg: [\n \"PUT /orgs/{org}/teams/{team_slug}/memberships/{username}\"\n ],\n addOrUpdateProjectPermissionsInOrg: [\n \"PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}\"\n ],\n addOrUpdateRepoPermissionsInOrg: [\n \"PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\"\n ],\n checkPermissionsForProjectInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/projects/{project_id}\"\n ],\n checkPermissionsForRepoInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\"\n ],\n create: [\"POST /orgs/{org}/teams\"],\n createDiscussionCommentInOrg: [\n \"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\"\n ],\n createDiscussionInOrg: [\"POST /orgs/{org}/teams/{team_slug}/discussions\"],\n deleteDiscussionCommentInOrg: [\n \"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\"\n ],\n deleteDiscussionInOrg: [\n \"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\"\n ],\n deleteInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}\"],\n getByName: [\"GET /orgs/{org}/teams/{team_slug}\"],\n getDiscussionCommentInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\"\n ],\n getDiscussionInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\"\n ],\n getMembershipForUserInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/memberships/{username}\"\n ],\n list: [\"GET /orgs/{org}/teams\"],\n listChildInOrg: [\"GET /orgs/{org}/teams/{team_slug}/teams\"],\n listDiscussionCommentsInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\"\n ],\n listDiscussionsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions\"],\n listForAuthenticatedUser: [\"GET /user/teams\"],\n listMembersInOrg: [\"GET /orgs/{org}/teams/{team_slug}/members\"],\n listPendingInvitationsInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/invitations\"\n ],\n listProjectsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/projects\"],\n listReposInOrg: [\"GET /orgs/{org}/teams/{team_slug}/repos\"],\n removeMembershipForUserInOrg: [\n \"DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}\"\n ],\n removeProjectInOrg: [\n \"DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}\"\n ],\n removeRepoInOrg: [\n \"DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\"\n ],\n updateDiscussionCommentInOrg: [\n \"PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\"\n ],\n updateDiscussionInOrg: [\n \"PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\"\n ],\n updateInOrg: [\"PATCH /orgs/{org}/teams/{team_slug}\"]\n },\n users: {\n addEmailForAuthenticated: [\n \"POST /user/emails\",\n {},\n { renamed: [\"users\", \"addEmailForAuthenticatedUser\"] }\n ],\n addEmailForAuthenticatedUser: [\"POST /user/emails\"],\n addSocialAccountForAuthenticatedUser: [\"POST /user/social_accounts\"],\n block: [\"PUT /user/blocks/{username}\"],\n checkBlocked: [\"GET /user/blocks/{username}\"],\n checkFollowingForUser: [\"GET /users/{username}/following/{target_user}\"],\n checkPersonIsFollowedByAuthenticated: [\"GET /user/following/{username}\"],\n createGpgKeyForAuthenticated: [\n \"POST /user/gpg_keys\",\n {},\n { renamed: [\"users\", \"createGpgKeyForAuthenticatedUser\"] }\n ],\n createGpgKeyForAuthenticatedUser: [\"POST /user/gpg_keys\"],\n createPublicSshKeyForAuthenticated: [\n \"POST /user/keys\",\n {},\n { renamed: [\"users\", \"createPublicSshKeyForAuthenticatedUser\"] }\n ],\n createPublicSshKeyForAuthenticatedUser: [\"POST /user/keys\"],\n createSshSigningKeyForAuthenticatedUser: [\"POST /user/ssh_signing_keys\"],\n deleteEmailForAuthenticated: [\n \"DELETE /user/emails\",\n {},\n { renamed: [\"users\", \"deleteEmailForAuthenticatedUser\"] }\n ],\n deleteEmailForAuthenticatedUser: [\"DELETE /user/emails\"],\n deleteGpgKeyForAuthenticated: [\n \"DELETE /user/gpg_keys/{gpg_key_id}\",\n {},\n { renamed: [\"users\", \"deleteGpgKeyForAuthenticatedUser\"] }\n ],\n deleteGpgKeyForAuthenticatedUser: [\"DELETE /user/gpg_keys/{gpg_key_id}\"],\n deletePublicSshKeyForAuthenticated: [\n \"DELETE /user/keys/{key_id}\",\n {},\n { renamed: [\"users\", \"deletePublicSshKeyForAuthenticatedUser\"] }\n ],\n deletePublicSshKeyForAuthenticatedUser: [\"DELETE /user/keys/{key_id}\"],\n deleteSocialAccountForAuthenticatedUser: [\"DELETE /user/social_accounts\"],\n deleteSshSigningKeyForAuthenticatedUser: [\n \"DELETE /user/ssh_signing_keys/{ssh_signing_key_id}\"\n ],\n follow: [\"PUT /user/following/{username}\"],\n getAuthenticated: [\"GET /user\"],\n getByUsername: [\"GET /users/{username}\"],\n getContextForUser: [\"GET /users/{username}/hovercard\"],\n getGpgKeyForAuthenticated: [\n \"GET /user/gpg_keys/{gpg_key_id}\",\n {},\n { renamed: [\"users\", \"getGpgKeyForAuthenticatedUser\"] }\n ],\n getGpgKeyForAuthenticatedUser: [\"GET /user/gpg_keys/{gpg_key_id}\"],\n getPublicSshKeyForAuthenticated: [\n \"GET /user/keys/{key_id}\",\n {},\n { renamed: [\"users\", \"getPublicSshKeyForAuthenticatedUser\"] }\n ],\n getPublicSshKeyForAuthenticatedUser: [\"GET /user/keys/{key_id}\"],\n getSshSigningKeyForAuthenticatedUser: [\n \"GET /user/ssh_signing_keys/{ssh_signing_key_id}\"\n ],\n list: [\"GET /users\"],\n listBlockedByAuthenticated: [\n \"GET /user/blocks\",\n {},\n { renamed: [\"users\", \"listBlockedByAuthenticatedUser\"] }\n ],\n listBlockedByAuthenticatedUser: [\"GET /user/blocks\"],\n listEmailsForAuthenticated: [\n \"GET /user/emails\",\n {},\n { renamed: [\"users\", \"listEmailsForAuthenticatedUser\"] }\n ],\n listEmailsForAuthenticatedUser: [\"GET /user/emails\"],\n listFollowedByAuthenticated: [\n \"GET /user/following\",\n {},\n { renamed: [\"users\", \"listFollowedByAuthenticatedUser\"] }\n ],\n listFollowedByAuthenticatedUser: [\"GET /user/following\"],\n listFollowersForAuthenticatedUser: [\"GET /user/followers\"],\n listFollowersForUser: [\"GET /users/{username}/followers\"],\n listFollowingForUser: [\"GET /users/{username}/following\"],\n listGpgKeysForAuthenticated: [\n \"GET /user/gpg_keys\",\n {},\n { renamed: [\"users\", \"listGpgKeysForAuthenticatedUser\"] }\n ],\n listGpgKeysForAuthenticatedUser: [\"GET /user/gpg_keys\"],\n listGpgKeysForUser: [\"GET /users/{username}/gpg_keys\"],\n listPublicEmailsForAuthenticated: [\n \"GET /user/public_emails\",\n {},\n { renamed: [\"users\", \"listPublicEmailsForAuthenticatedUser\"] }\n ],\n listPublicEmailsForAuthenticatedUser: [\"GET /user/public_emails\"],\n listPublicKeysForUser: [\"GET /users/{username}/keys\"],\n listPublicSshKeysForAuthenticated: [\n \"GET /user/keys\",\n {},\n { renamed: [\"users\", \"listPublicSshKeysForAuthenticatedUser\"] }\n ],\n listPublicSshKeysForAuthenticatedUser: [\"GET /user/keys\"],\n listSocialAccountsForAuthenticatedUser: [\"GET /user/social_accounts\"],\n listSocialAccountsForUser: [\"GET /users/{username}/social_accounts\"],\n listSshSigningKeysForAuthenticatedUser: [\"GET /user/ssh_signing_keys\"],\n listSshSigningKeysForUser: [\"GET /users/{username}/ssh_signing_keys\"],\n setPrimaryEmailVisibilityForAuthenticated: [\n \"PATCH /user/email/visibility\",\n {},\n { renamed: [\"users\", \"setPrimaryEmailVisibilityForAuthenticatedUser\"] }\n ],\n setPrimaryEmailVisibilityForAuthenticatedUser: [\n \"PATCH /user/email/visibility\"\n ],\n unblock: [\"DELETE /user/blocks/{username}\"],\n unfollow: [\"DELETE /user/following/{username}\"],\n updateAuthenticated: [\"PATCH /user\"]\n }\n};\nvar endpoints_default = Endpoints;\n\n// pkg/dist-src/endpoints-to-methods.js\nvar endpointMethodsMap = /* @__PURE__ */ new Map();\nfor (const [scope, endpoints] of Object.entries(endpoints_default)) {\n for (const [methodName, endpoint] of Object.entries(endpoints)) {\n const [route, defaults, decorations] = endpoint;\n const [method, url] = route.split(/ /);\n const endpointDefaults = Object.assign(\n {\n method,\n url\n },\n defaults\n );\n if (!endpointMethodsMap.has(scope)) {\n endpointMethodsMap.set(scope, /* @__PURE__ */ new Map());\n }\n endpointMethodsMap.get(scope).set(methodName, {\n scope,\n methodName,\n endpointDefaults,\n decorations\n });\n }\n}\nvar handler = {\n get({ octokit, scope, cache }, methodName) {\n if (cache[methodName]) {\n return cache[methodName];\n }\n const { decorations, endpointDefaults } = endpointMethodsMap.get(scope).get(methodName);\n if (decorations) {\n cache[methodName] = decorate(\n octokit,\n scope,\n methodName,\n endpointDefaults,\n decorations\n );\n } else {\n cache[methodName] = octokit.request.defaults(endpointDefaults);\n }\n return cache[methodName];\n }\n};\nfunction endpointsToMethods(octokit) {\n const newMethods = {};\n for (const scope of endpointMethodsMap.keys()) {\n newMethods[scope] = new Proxy({ octokit, scope, cache: {} }, handler);\n }\n return newMethods;\n}\nfunction decorate(octokit, scope, methodName, defaults, decorations) {\n const requestWithDefaults = octokit.request.defaults(defaults);\n function withDecorations(...args) {\n let options = requestWithDefaults.endpoint.merge(...args);\n if (decorations.mapToData) {\n options = Object.assign({}, options, {\n data: options[decorations.mapToData],\n [decorations.mapToData]: void 0\n });\n return requestWithDefaults(options);\n }\n if (decorations.renamed) {\n const [newScope, newMethodName] = decorations.renamed;\n octokit.log.warn(\n `octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`\n );\n }\n if (decorations.deprecated) {\n octokit.log.warn(decorations.deprecated);\n }\n if (decorations.renamedParameters) {\n const options2 = requestWithDefaults.endpoint.merge(...args);\n for (const [name, alias] of Object.entries(\n decorations.renamedParameters\n )) {\n if (name in options2) {\n octokit.log.warn(\n `\"${name}\" parameter is deprecated for \"octokit.${scope}.${methodName}()\". Use \"${alias}\" instead`\n );\n if (!(alias in options2)) {\n options2[alias] = options2[name];\n }\n delete options2[name];\n }\n }\n return requestWithDefaults(options2);\n }\n return requestWithDefaults(...args);\n }\n return Object.assign(withDecorations, requestWithDefaults);\n}\n\n// pkg/dist-src/index.js\nfunction restEndpointMethods(octokit) {\n const api = endpointsToMethods(octokit);\n return {\n rest: api\n };\n}\nrestEndpointMethods.VERSION = VERSION;\nfunction legacyRestEndpointMethods(octokit) {\n const api = endpointsToMethods(octokit);\n return {\n ...api,\n rest: api\n };\n}\nlegacyRestEndpointMethods.VERSION = VERSION;\n// Annotate the CommonJS export names for ESM import in node:\n0 && (module.exports = {\n legacyRestEndpointMethods,\n restEndpointMethods\n});\n","\"use strict\";\nvar __create = Object.create;\nvar __defProp = Object.defineProperty;\nvar __getOwnPropDesc = Object.getOwnPropertyDescriptor;\nvar __getOwnPropNames = Object.getOwnPropertyNames;\nvar __getProtoOf = Object.getPrototypeOf;\nvar __hasOwnProp = Object.prototype.hasOwnProperty;\nvar __export = (target, all) => {\n for (var name in all)\n __defProp(target, name, { get: all[name], enumerable: true });\n};\nvar __copyProps = (to, from, except, desc) => {\n if (from && typeof from === \"object\" || typeof from === \"function\") {\n for (let key of __getOwnPropNames(from))\n if (!__hasOwnProp.call(to, key) && key !== except)\n __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });\n }\n return to;\n};\nvar __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(\n // If the importer is in node compatibility mode or this is not an ESM\n // file that has been converted to a CommonJS file using a Babel-\n // compatible transform (i.e. \"__esModule\" has not been set), then set\n // \"default\" to the CommonJS \"module.exports\" for node compatibility.\n isNodeMode || !mod || !mod.__esModule ? __defProp(target, \"default\", { value: mod, enumerable: true }) : target,\n mod\n));\nvar __toCommonJS = (mod) => __copyProps(__defProp({}, \"__esModule\", { value: true }), mod);\n\n// pkg/dist-src/index.js\nvar dist_src_exports = {};\n__export(dist_src_exports, {\n RequestError: () => RequestError\n});\nmodule.exports = __toCommonJS(dist_src_exports);\nvar import_deprecation = require(\"deprecation\");\nvar import_once = __toESM(require(\"once\"));\nvar logOnceCode = (0, import_once.default)((deprecation) => console.warn(deprecation));\nvar logOnceHeaders = (0, import_once.default)((deprecation) => console.warn(deprecation));\nvar RequestError = class extends Error {\n constructor(message, statusCode, options) {\n super(message);\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n this.name = \"HttpError\";\n this.status = statusCode;\n let headers;\n if (\"headers\" in options && typeof options.headers !== \"undefined\") {\n headers = options.headers;\n }\n if (\"response\" in options) {\n this.response = options.response;\n headers = options.response.headers;\n }\n const requestCopy = Object.assign({}, options.request);\n if (options.request.headers.authorization) {\n requestCopy.headers = Object.assign({}, options.request.headers, {\n authorization: options.request.headers.authorization.replace(\n / .*$/,\n \" [REDACTED]\"\n )\n });\n }\n requestCopy.url = requestCopy.url.replace(/\\bclient_secret=\\w+/g, \"client_secret=[REDACTED]\").replace(/\\baccess_token=\\w+/g, \"access_token=[REDACTED]\");\n this.request = requestCopy;\n Object.defineProperty(this, \"code\", {\n get() {\n logOnceCode(\n new import_deprecation.Deprecation(\n \"[@octokit/request-error] `error.code` is deprecated, use `error.status`.\"\n )\n );\n return statusCode;\n }\n });\n Object.defineProperty(this, \"headers\", {\n get() {\n logOnceHeaders(\n new import_deprecation.Deprecation(\n \"[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`.\"\n )\n );\n return headers || {};\n }\n });\n }\n};\n// Annotate the CommonJS export names for ESM import in node:\n0 && (module.exports = {\n RequestError\n});\n","\"use strict\";\nvar __defProp = Object.defineProperty;\nvar __getOwnPropDesc = Object.getOwnPropertyDescriptor;\nvar __getOwnPropNames = Object.getOwnPropertyNames;\nvar __hasOwnProp = Object.prototype.hasOwnProperty;\nvar __export = (target, all) => {\n for (var name in all)\n __defProp(target, name, { get: all[name], enumerable: true });\n};\nvar __copyProps = (to, from, except, desc) => {\n if (from && typeof from === \"object\" || typeof from === \"function\") {\n for (let key of __getOwnPropNames(from))\n if (!__hasOwnProp.call(to, key) && key !== except)\n __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });\n }\n return to;\n};\nvar __toCommonJS = (mod) => __copyProps(__defProp({}, \"__esModule\", { value: true }), mod);\n\n// pkg/dist-src/index.js\nvar dist_src_exports = {};\n__export(dist_src_exports, {\n request: () => request\n});\nmodule.exports = __toCommonJS(dist_src_exports);\nvar import_endpoint = require(\"@octokit/endpoint\");\nvar import_universal_user_agent = require(\"universal-user-agent\");\n\n// pkg/dist-src/version.js\nvar VERSION = \"8.1.0\";\n\n// pkg/dist-src/fetch-wrapper.js\nvar import_is_plain_object = require(\"is-plain-object\");\nvar import_request_error = require(\"@octokit/request-error\");\n\n// pkg/dist-src/get-buffer-response.js\nfunction getBufferResponse(response) {\n return response.arrayBuffer();\n}\n\n// pkg/dist-src/fetch-wrapper.js\nfunction fetchWrapper(requestOptions) {\n var _a, _b, _c;\n const log = requestOptions.request && requestOptions.request.log ? requestOptions.request.log : console;\n const parseSuccessResponseBody = ((_a = requestOptions.request) == null ? void 0 : _a.parseSuccessResponseBody) !== false;\n if ((0, import_is_plain_object.isPlainObject)(requestOptions.body) || Array.isArray(requestOptions.body)) {\n requestOptions.body = JSON.stringify(requestOptions.body);\n }\n let headers = {};\n let status;\n let url;\n let { fetch } = globalThis;\n if ((_b = requestOptions.request) == null ? void 0 : _b.fetch) {\n fetch = requestOptions.request.fetch;\n }\n if (!fetch) {\n throw new Error(\n 'Global \"fetch\" not found. Please provide `options.request.fetch` to octokit or upgrade to node@18 or newer.'\n );\n }\n return fetch(requestOptions.url, {\n method: requestOptions.method,\n body: requestOptions.body,\n headers: requestOptions.headers,\n signal: (_c = requestOptions.request) == null ? void 0 : _c.signal,\n // duplex must be set if request.body is ReadableStream or Async Iterables.\n // See https://fetch.spec.whatwg.org/#dom-requestinit-duplex.\n ...requestOptions.body && { duplex: \"half\" }\n }).then(async (response) => {\n url = response.url;\n status = response.status;\n for (const keyAndValue of response.headers) {\n headers[keyAndValue[0]] = keyAndValue[1];\n }\n if (\"deprecation\" in headers) {\n const matches = headers.link && headers.link.match(/<([^>]+)>; rel=\"deprecation\"/);\n const deprecationLink = matches && matches.pop();\n log.warn(\n `[@octokit/request] \"${requestOptions.method} ${requestOptions.url}\" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : \"\"}`\n );\n }\n if (status === 204 || status === 205) {\n return;\n }\n if (requestOptions.method === \"HEAD\") {\n if (status < 400) {\n return;\n }\n throw new import_request_error.RequestError(response.statusText, status, {\n response: {\n url,\n status,\n headers,\n data: void 0\n },\n request: requestOptions\n });\n }\n if (status === 304) {\n throw new import_request_error.RequestError(\"Not modified\", status, {\n response: {\n url,\n status,\n headers,\n data: await getResponseData(response)\n },\n request: requestOptions\n });\n }\n if (status >= 400) {\n const data = await getResponseData(response);\n const error = new import_request_error.RequestError(toErrorMessage(data), status, {\n response: {\n url,\n status,\n headers,\n data\n },\n request: requestOptions\n });\n throw error;\n }\n return parseSuccessResponseBody ? await getResponseData(response) : response.body;\n }).then((data) => {\n return {\n status,\n url,\n headers,\n data\n };\n }).catch((error) => {\n if (error instanceof import_request_error.RequestError)\n throw error;\n else if (error.name === \"AbortError\")\n throw error;\n throw new import_request_error.RequestError(error.message, 500, {\n request: requestOptions\n });\n });\n}\nasync function getResponseData(response) {\n const contentType = response.headers.get(\"content-type\");\n if (/application\\/json/.test(contentType)) {\n return response.json();\n }\n if (!contentType || /^text\\/|charset=utf-8$/.test(contentType)) {\n return response.text();\n }\n return getBufferResponse(response);\n}\nfunction toErrorMessage(data) {\n if (typeof data === \"string\")\n return data;\n if (\"message\" in data) {\n if (Array.isArray(data.errors)) {\n return `${data.message}: ${data.errors.map(JSON.stringify).join(\", \")}`;\n }\n return data.message;\n }\n return `Unknown error: ${JSON.stringify(data)}`;\n}\n\n// pkg/dist-src/with-defaults.js\nfunction withDefaults(oldEndpoint, newDefaults) {\n const endpoint2 = oldEndpoint.defaults(newDefaults);\n const newApi = function(route, parameters) {\n const endpointOptions = endpoint2.merge(route, parameters);\n if (!endpointOptions.request || !endpointOptions.request.hook) {\n return fetchWrapper(endpoint2.parse(endpointOptions));\n }\n const request2 = (route2, parameters2) => {\n return fetchWrapper(\n endpoint2.parse(endpoint2.merge(route2, parameters2))\n );\n };\n Object.assign(request2, {\n endpoint: endpoint2,\n defaults: withDefaults.bind(null, endpoint2)\n });\n return endpointOptions.request.hook(request2, endpointOptions);\n };\n return Object.assign(newApi, {\n endpoint: endpoint2,\n defaults: withDefaults.bind(null, endpoint2)\n });\n}\n\n// pkg/dist-src/index.js\nvar request = withDefaults(import_endpoint.endpoint, {\n headers: {\n \"user-agent\": `octokit-request.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}`\n }\n});\n// Annotate the CommonJS export names for ESM import in node:\n0 && (module.exports = {\n request\n});\n","var register = require(\"./lib/register\");\nvar addHook = require(\"./lib/add\");\nvar removeHook = require(\"./lib/remove\");\n\n// bind with array of arguments: https://stackoverflow.com/a/21792913\nvar bind = Function.bind;\nvar bindable = bind.bind(bind);\n\nfunction bindApi(hook, state, name) {\n var removeHookRef = bindable(removeHook, null).apply(\n null,\n name ? [state, name] : [state]\n );\n hook.api = { remove: removeHookRef };\n hook.remove = removeHookRef;\n [\"before\", \"error\", \"after\", \"wrap\"].forEach(function (kind) {\n var args = name ? [state, kind, name] : [state, kind];\n hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args);\n });\n}\n\nfunction HookSingular() {\n var singularHookName = \"h\";\n var singularHookState = {\n registry: {},\n };\n var singularHook = register.bind(null, singularHookState, singularHookName);\n bindApi(singularHook, singularHookState, singularHookName);\n return singularHook;\n}\n\nfunction HookCollection() {\n var state = {\n registry: {},\n };\n\n var hook = register.bind(null, state);\n bindApi(hook, state);\n\n return hook;\n}\n\nvar collectionHookDeprecationMessageDisplayed = false;\nfunction Hook() {\n if (!collectionHookDeprecationMessageDisplayed) {\n console.warn(\n '[before-after-hook]: \"Hook()\" repurposing warning, use \"Hook.Collection()\". Read more: https://git.io/upgrade-before-after-hook-to-1.4'\n );\n collectionHookDeprecationMessageDisplayed = true;\n }\n return HookCollection();\n}\n\nHook.Singular = HookSingular.bind();\nHook.Collection = HookCollection.bind();\n\nmodule.exports = Hook;\n// expose constructors as a named property for TypeScript\nmodule.exports.Hook = Hook;\nmodule.exports.Singular = Hook.Singular;\nmodule.exports.Collection = Hook.Collection;\n","module.exports = addHook;\n\nfunction addHook(state, kind, name, hook) {\n var orig = hook;\n if (!state.registry[name]) {\n state.registry[name] = [];\n }\n\n if (kind === \"before\") {\n hook = function (method, options) {\n return Promise.resolve()\n .then(orig.bind(null, options))\n .then(method.bind(null, options));\n };\n }\n\n if (kind === \"after\") {\n hook = function (method, options) {\n var result;\n return Promise.resolve()\n .then(method.bind(null, options))\n .then(function (result_) {\n result = result_;\n return orig(result, options);\n })\n .then(function () {\n return result;\n });\n };\n }\n\n if (kind === \"error\") {\n hook = function (method, options) {\n return Promise.resolve()\n .then(method.bind(null, options))\n .catch(function (error) {\n return orig(error, options);\n });\n };\n }\n\n state.registry[name].push({\n hook: hook,\n orig: orig,\n });\n}\n","module.exports = register;\n\nfunction register(state, name, method, options) {\n if (typeof method !== \"function\") {\n throw new Error(\"method for before hook must be a function\");\n }\n\n if (!options) {\n options = {};\n }\n\n if (Array.isArray(name)) {\n return name.reverse().reduce(function (callback, name) {\n return register.bind(null, state, name, callback, options);\n }, method)();\n }\n\n return Promise.resolve().then(function () {\n if (!state.registry[name]) {\n return method(options);\n }\n\n return state.registry[name].reduce(function (method, registered) {\n return registered.hook.bind(null, method, options);\n }, method)();\n });\n}\n","module.exports = removeHook;\n\nfunction removeHook(state, name, method) {\n if (!state.registry[name]) {\n return;\n }\n\n var index = state.registry[name]\n .map(function (registered) {\n return registered.orig;\n })\n .indexOf(method);\n\n if (index === -1) {\n return;\n }\n\n state.registry[name].splice(index, 1);\n}\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nclass Deprecation extends Error {\n constructor(message) {\n super(message); // Maintains proper stack trace (only available on V8)\n\n /* istanbul ignore next */\n\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n\n this.name = 'Deprecation';\n }\n\n}\n\nexports.Deprecation = Deprecation;\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\n/*!\n * is-plain-object \n *\n * Copyright (c) 2014-2017, Jon Schlinkert.\n * Released under the MIT License.\n */\n\nfunction isObject(o) {\n return Object.prototype.toString.call(o) === '[object Object]';\n}\n\nfunction isPlainObject(o) {\n var ctor,prot;\n\n if (isObject(o) === false) return false;\n\n // If has modified constructor\n ctor = o.constructor;\n if (ctor === undefined) return true;\n\n // If has modified prototype\n prot = ctor.prototype;\n if (isObject(prot) === false) return false;\n\n // If constructor does not have an Object-specific method\n if (prot.hasOwnProperty('isPrototypeOf') === false) {\n return false;\n }\n\n // Most likely a plain Object\n return true;\n}\n\nexports.isPlainObject = isPlainObject;\n","/*! node-domexception. MIT License. Jimmy Wärting */\n\nif (!globalThis.DOMException) {\n try {\n const { MessageChannel } = require('worker_threads'),\n port = new MessageChannel().port1,\n ab = new ArrayBuffer()\n port.postMessage(ab, [ab, ab])\n } catch (err) {\n err.constructor.name === 'DOMException' && (\n globalThis.DOMException = err.constructor\n )\n }\n}\n\nmodule.exports = globalThis.DOMException\n","var wrappy = require('wrappy')\nmodule.exports = wrappy(once)\nmodule.exports.strict = wrappy(onceStrict)\n\nonce.proto = once(function () {\n Object.defineProperty(Function.prototype, 'once', {\n value: function () {\n return once(this)\n },\n configurable: true\n })\n\n Object.defineProperty(Function.prototype, 'onceStrict', {\n value: function () {\n return onceStrict(this)\n },\n configurable: true\n })\n})\n\nfunction once (fn) {\n var f = function () {\n if (f.called) return f.value\n f.called = true\n return f.value = fn.apply(this, arguments)\n }\n f.called = false\n return f\n}\n\nfunction onceStrict (fn) {\n var f = function () {\n if (f.called)\n throw new Error(f.onceError)\n f.called = true\n return f.value = fn.apply(this, arguments)\n }\n var name = fn.name || 'Function wrapped with `once`'\n f.onceError = name + \" shouldn't be called more than once\"\n f.called = false\n return f\n}\n","exports = module.exports = SemVer\n\nvar debug\n/* istanbul ignore next */\nif (typeof process === 'object' &&\n process.env &&\n process.env.NODE_DEBUG &&\n /\\bsemver\\b/i.test(process.env.NODE_DEBUG)) {\n debug = function () {\n var args = Array.prototype.slice.call(arguments, 0)\n args.unshift('SEMVER')\n console.log.apply(console, args)\n }\n} else {\n debug = function () {}\n}\n\n// Note: this is the semver.org version of the spec that it implements\n// Not necessarily the package version of this code.\nexports.SEMVER_SPEC_VERSION = '2.0.0'\n\nvar MAX_LENGTH = 256\nvar MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER ||\n /* istanbul ignore next */ 9007199254740991\n\n// Max safe segment length for coercion.\nvar MAX_SAFE_COMPONENT_LENGTH = 16\n\n// The actual regexps go on exports.re\nvar re = exports.re = []\nvar src = exports.src = []\nvar t = exports.tokens = {}\nvar R = 0\n\nfunction tok (n) {\n t[n] = R++\n}\n\n// The following Regular Expressions can be used for tokenizing,\n// validating, and parsing SemVer version strings.\n\n// ## Numeric Identifier\n// A single `0`, or a non-zero digit followed by zero or more digits.\n\ntok('NUMERICIDENTIFIER')\nsrc[t.NUMERICIDENTIFIER] = '0|[1-9]\\\\d*'\ntok('NUMERICIDENTIFIERLOOSE')\nsrc[t.NUMERICIDENTIFIERLOOSE] = '[0-9]+'\n\n// ## Non-numeric Identifier\n// Zero or more digits, followed by a letter or hyphen, and then zero or\n// more letters, digits, or hyphens.\n\ntok('NONNUMERICIDENTIFIER')\nsrc[t.NONNUMERICIDENTIFIER] = '\\\\d*[a-zA-Z-][a-zA-Z0-9-]*'\n\n// ## Main Version\n// Three dot-separated numeric identifiers.\n\ntok('MAINVERSION')\nsrc[t.MAINVERSION] = '(' + src[t.NUMERICIDENTIFIER] + ')\\\\.' +\n '(' + src[t.NUMERICIDENTIFIER] + ')\\\\.' +\n '(' + src[t.NUMERICIDENTIFIER] + ')'\n\ntok('MAINVERSIONLOOSE')\nsrc[t.MAINVERSIONLOOSE] = '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')\\\\.' +\n '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')\\\\.' +\n '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')'\n\n// ## Pre-release Version Identifier\n// A numeric identifier, or a non-numeric identifier.\n\ntok('PRERELEASEIDENTIFIER')\nsrc[t.PRERELEASEIDENTIFIER] = '(?:' + src[t.NUMERICIDENTIFIER] +\n '|' + src[t.NONNUMERICIDENTIFIER] + ')'\n\ntok('PRERELEASEIDENTIFIERLOOSE')\nsrc[t.PRERELEASEIDENTIFIERLOOSE] = '(?:' + src[t.NUMERICIDENTIFIERLOOSE] +\n '|' + src[t.NONNUMERICIDENTIFIER] + ')'\n\n// ## Pre-release Version\n// Hyphen, followed by one or more dot-separated pre-release version\n// identifiers.\n\ntok('PRERELEASE')\nsrc[t.PRERELEASE] = '(?:-(' + src[t.PRERELEASEIDENTIFIER] +\n '(?:\\\\.' + src[t.PRERELEASEIDENTIFIER] + ')*))'\n\ntok('PRERELEASELOOSE')\nsrc[t.PRERELEASELOOSE] = '(?:-?(' + src[t.PRERELEASEIDENTIFIERLOOSE] +\n '(?:\\\\.' + src[t.PRERELEASEIDENTIFIERLOOSE] + ')*))'\n\n// ## Build Metadata Identifier\n// Any combination of digits, letters, or hyphens.\n\ntok('BUILDIDENTIFIER')\nsrc[t.BUILDIDENTIFIER] = '[0-9A-Za-z-]+'\n\n// ## Build Metadata\n// Plus sign, followed by one or more period-separated build metadata\n// identifiers.\n\ntok('BUILD')\nsrc[t.BUILD] = '(?:\\\\+(' + src[t.BUILDIDENTIFIER] +\n '(?:\\\\.' + src[t.BUILDIDENTIFIER] + ')*))'\n\n// ## Full Version String\n// A main version, followed optionally by a pre-release version and\n// build metadata.\n\n// Note that the only major, minor, patch, and pre-release sections of\n// the version string are capturing groups. The build metadata is not a\n// capturing group, because it should not ever be used in version\n// comparison.\n\ntok('FULL')\ntok('FULLPLAIN')\nsrc[t.FULLPLAIN] = 'v?' + src[t.MAINVERSION] +\n src[t.PRERELEASE] + '?' +\n src[t.BUILD] + '?'\n\nsrc[t.FULL] = '^' + src[t.FULLPLAIN] + '$'\n\n// like full, but allows v1.2.3 and =1.2.3, which people do sometimes.\n// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty\n// common in the npm registry.\ntok('LOOSEPLAIN')\nsrc[t.LOOSEPLAIN] = '[v=\\\\s]*' + src[t.MAINVERSIONLOOSE] +\n src[t.PRERELEASELOOSE] + '?' +\n src[t.BUILD] + '?'\n\ntok('LOOSE')\nsrc[t.LOOSE] = '^' + src[t.LOOSEPLAIN] + '$'\n\ntok('GTLT')\nsrc[t.GTLT] = '((?:<|>)?=?)'\n\n// Something like \"2.*\" or \"1.2.x\".\n// Note that \"x.x\" is a valid xRange identifer, meaning \"any version\"\n// Only the first item is strictly required.\ntok('XRANGEIDENTIFIERLOOSE')\nsrc[t.XRANGEIDENTIFIERLOOSE] = src[t.NUMERICIDENTIFIERLOOSE] + '|x|X|\\\\*'\ntok('XRANGEIDENTIFIER')\nsrc[t.XRANGEIDENTIFIER] = src[t.NUMERICIDENTIFIER] + '|x|X|\\\\*'\n\ntok('XRANGEPLAIN')\nsrc[t.XRANGEPLAIN] = '[v=\\\\s]*(' + src[t.XRANGEIDENTIFIER] + ')' +\n '(?:\\\\.(' + src[t.XRANGEIDENTIFIER] + ')' +\n '(?:\\\\.(' + src[t.XRANGEIDENTIFIER] + ')' +\n '(?:' + src[t.PRERELEASE] + ')?' +\n src[t.BUILD] + '?' +\n ')?)?'\n\ntok('XRANGEPLAINLOOSE')\nsrc[t.XRANGEPLAINLOOSE] = '[v=\\\\s]*(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' +\n '(?:\\\\.(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' +\n '(?:\\\\.(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' +\n '(?:' + src[t.PRERELEASELOOSE] + ')?' +\n src[t.BUILD] + '?' +\n ')?)?'\n\ntok('XRANGE')\nsrc[t.XRANGE] = '^' + src[t.GTLT] + '\\\\s*' + src[t.XRANGEPLAIN] + '$'\ntok('XRANGELOOSE')\nsrc[t.XRANGELOOSE] = '^' + src[t.GTLT] + '\\\\s*' + src[t.XRANGEPLAINLOOSE] + '$'\n\n// Coercion.\n// Extract anything that could conceivably be a part of a valid semver\ntok('COERCE')\nsrc[t.COERCE] = '(^|[^\\\\d])' +\n '(\\\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '})' +\n '(?:\\\\.(\\\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' +\n '(?:\\\\.(\\\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' +\n '(?:$|[^\\\\d])'\ntok('COERCERTL')\nre[t.COERCERTL] = new RegExp(src[t.COERCE], 'g')\n\n// Tilde ranges.\n// Meaning is \"reasonably at or greater than\"\ntok('LONETILDE')\nsrc[t.LONETILDE] = '(?:~>?)'\n\ntok('TILDETRIM')\nsrc[t.TILDETRIM] = '(\\\\s*)' + src[t.LONETILDE] + '\\\\s+'\nre[t.TILDETRIM] = new RegExp(src[t.TILDETRIM], 'g')\nvar tildeTrimReplace = '$1~'\n\ntok('TILDE')\nsrc[t.TILDE] = '^' + src[t.LONETILDE] + src[t.XRANGEPLAIN] + '$'\ntok('TILDELOOSE')\nsrc[t.TILDELOOSE] = '^' + src[t.LONETILDE] + src[t.XRANGEPLAINLOOSE] + '$'\n\n// Caret ranges.\n// Meaning is \"at least and backwards compatible with\"\ntok('LONECARET')\nsrc[t.LONECARET] = '(?:\\\\^)'\n\ntok('CARETTRIM')\nsrc[t.CARETTRIM] = '(\\\\s*)' + src[t.LONECARET] + '\\\\s+'\nre[t.CARETTRIM] = new RegExp(src[t.CARETTRIM], 'g')\nvar caretTrimReplace = '$1^'\n\ntok('CARET')\nsrc[t.CARET] = '^' + src[t.LONECARET] + src[t.XRANGEPLAIN] + '$'\ntok('CARETLOOSE')\nsrc[t.CARETLOOSE] = '^' + src[t.LONECARET] + src[t.XRANGEPLAINLOOSE] + '$'\n\n// A simple gt/lt/eq thing, or just \"\" to indicate \"any version\"\ntok('COMPARATORLOOSE')\nsrc[t.COMPARATORLOOSE] = '^' + src[t.GTLT] + '\\\\s*(' + src[t.LOOSEPLAIN] + ')$|^$'\ntok('COMPARATOR')\nsrc[t.COMPARATOR] = '^' + src[t.GTLT] + '\\\\s*(' + src[t.FULLPLAIN] + ')$|^$'\n\n// An expression to strip any whitespace between the gtlt and the thing\n// it modifies, so that `> 1.2.3` ==> `>1.2.3`\ntok('COMPARATORTRIM')\nsrc[t.COMPARATORTRIM] = '(\\\\s*)' + src[t.GTLT] +\n '\\\\s*(' + src[t.LOOSEPLAIN] + '|' + src[t.XRANGEPLAIN] + ')'\n\n// this one has to use the /g flag\nre[t.COMPARATORTRIM] = new RegExp(src[t.COMPARATORTRIM], 'g')\nvar comparatorTrimReplace = '$1$2$3'\n\n// Something like `1.2.3 - 1.2.4`\n// Note that these all use the loose form, because they'll be\n// checked against either the strict or loose comparator form\n// later.\ntok('HYPHENRANGE')\nsrc[t.HYPHENRANGE] = '^\\\\s*(' + src[t.XRANGEPLAIN] + ')' +\n '\\\\s+-\\\\s+' +\n '(' + src[t.XRANGEPLAIN] + ')' +\n '\\\\s*$'\n\ntok('HYPHENRANGELOOSE')\nsrc[t.HYPHENRANGELOOSE] = '^\\\\s*(' + src[t.XRANGEPLAINLOOSE] + ')' +\n '\\\\s+-\\\\s+' +\n '(' + src[t.XRANGEPLAINLOOSE] + ')' +\n '\\\\s*$'\n\n// Star ranges basically just allow anything at all.\ntok('STAR')\nsrc[t.STAR] = '(<|>)?=?\\\\s*\\\\*'\n\n// Compile to actual regexp objects.\n// All are flag-free, unless they were created above with a flag.\nfor (var i = 0; i < R; i++) {\n debug(i, src[i])\n if (!re[i]) {\n re[i] = new RegExp(src[i])\n }\n}\n\nexports.parse = parse\nfunction parse (version, options) {\n if (!options || typeof options !== 'object') {\n options = {\n loose: !!options,\n includePrerelease: false\n }\n }\n\n if (version instanceof SemVer) {\n return version\n }\n\n if (typeof version !== 'string') {\n return null\n }\n\n if (version.length > MAX_LENGTH) {\n return null\n }\n\n var r = options.loose ? re[t.LOOSE] : re[t.FULL]\n if (!r.test(version)) {\n return null\n }\n\n try {\n return new SemVer(version, options)\n } catch (er) {\n return null\n }\n}\n\nexports.valid = valid\nfunction valid (version, options) {\n var v = parse(version, options)\n return v ? v.version : null\n}\n\nexports.clean = clean\nfunction clean (version, options) {\n var s = parse(version.trim().replace(/^[=v]+/, ''), options)\n return s ? s.version : null\n}\n\nexports.SemVer = SemVer\n\nfunction SemVer (version, options) {\n if (!options || typeof options !== 'object') {\n options = {\n loose: !!options,\n includePrerelease: false\n }\n }\n if (version instanceof SemVer) {\n if (version.loose === options.loose) {\n return version\n } else {\n version = version.version\n }\n } else if (typeof version !== 'string') {\n throw new TypeError('Invalid Version: ' + version)\n }\n\n if (version.length > MAX_LENGTH) {\n throw new TypeError('version is longer than ' + MAX_LENGTH + ' characters')\n }\n\n if (!(this instanceof SemVer)) {\n return new SemVer(version, options)\n }\n\n debug('SemVer', version, options)\n this.options = options\n this.loose = !!options.loose\n\n var m = version.trim().match(options.loose ? re[t.LOOSE] : re[t.FULL])\n\n if (!m) {\n throw new TypeError('Invalid Version: ' + version)\n }\n\n this.raw = version\n\n // these are actually numbers\n this.major = +m[1]\n this.minor = +m[2]\n this.patch = +m[3]\n\n if (this.major > MAX_SAFE_INTEGER || this.major < 0) {\n throw new TypeError('Invalid major version')\n }\n\n if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) {\n throw new TypeError('Invalid minor version')\n }\n\n if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) {\n throw new TypeError('Invalid patch version')\n }\n\n // numberify any prerelease numeric ids\n if (!m[4]) {\n this.prerelease = []\n } else {\n this.prerelease = m[4].split('.').map(function (id) {\n if (/^[0-9]+$/.test(id)) {\n var num = +id\n if (num >= 0 && num < MAX_SAFE_INTEGER) {\n return num\n }\n }\n return id\n })\n }\n\n this.build = m[5] ? m[5].split('.') : []\n this.format()\n}\n\nSemVer.prototype.format = function () {\n this.version = this.major + '.' + this.minor + '.' + this.patch\n if (this.prerelease.length) {\n this.version += '-' + this.prerelease.join('.')\n }\n return this.version\n}\n\nSemVer.prototype.toString = function () {\n return this.version\n}\n\nSemVer.prototype.compare = function (other) {\n debug('SemVer.compare', this.version, this.options, other)\n if (!(other instanceof SemVer)) {\n other = new SemVer(other, this.options)\n }\n\n return this.compareMain(other) || this.comparePre(other)\n}\n\nSemVer.prototype.compareMain = function (other) {\n if (!(other instanceof SemVer)) {\n other = new SemVer(other, this.options)\n }\n\n return compareIdentifiers(this.major, other.major) ||\n compareIdentifiers(this.minor, other.minor) ||\n compareIdentifiers(this.patch, other.patch)\n}\n\nSemVer.prototype.comparePre = function (other) {\n if (!(other instanceof SemVer)) {\n other = new SemVer(other, this.options)\n }\n\n // NOT having a prerelease is > having one\n if (this.prerelease.length && !other.prerelease.length) {\n return -1\n } else if (!this.prerelease.length && other.prerelease.length) {\n return 1\n } else if (!this.prerelease.length && !other.prerelease.length) {\n return 0\n }\n\n var i = 0\n do {\n var a = this.prerelease[i]\n var b = other.prerelease[i]\n debug('prerelease compare', i, a, b)\n if (a === undefined && b === undefined) {\n return 0\n } else if (b === undefined) {\n return 1\n } else if (a === undefined) {\n return -1\n } else if (a === b) {\n continue\n } else {\n return compareIdentifiers(a, b)\n }\n } while (++i)\n}\n\nSemVer.prototype.compareBuild = function (other) {\n if (!(other instanceof SemVer)) {\n other = new SemVer(other, this.options)\n }\n\n var i = 0\n do {\n var a = this.build[i]\n var b = other.build[i]\n debug('prerelease compare', i, a, b)\n if (a === undefined && b === undefined) {\n return 0\n } else if (b === undefined) {\n return 1\n } else if (a === undefined) {\n return -1\n } else if (a === b) {\n continue\n } else {\n return compareIdentifiers(a, b)\n }\n } while (++i)\n}\n\n// preminor will bump the version up to the next minor release, and immediately\n// down to pre-release. premajor and prepatch work the same way.\nSemVer.prototype.inc = function (release, identifier) {\n switch (release) {\n case 'premajor':\n this.prerelease.length = 0\n this.patch = 0\n this.minor = 0\n this.major++\n this.inc('pre', identifier)\n break\n case 'preminor':\n this.prerelease.length = 0\n this.patch = 0\n this.minor++\n this.inc('pre', identifier)\n break\n case 'prepatch':\n // If this is already a prerelease, it will bump to the next version\n // drop any prereleases that might already exist, since they are not\n // relevant at this point.\n this.prerelease.length = 0\n this.inc('patch', identifier)\n this.inc('pre', identifier)\n break\n // If the input is a non-prerelease version, this acts the same as\n // prepatch.\n case 'prerelease':\n if (this.prerelease.length === 0) {\n this.inc('patch', identifier)\n }\n this.inc('pre', identifier)\n break\n\n case 'major':\n // If this is a pre-major version, bump up to the same major version.\n // Otherwise increment major.\n // 1.0.0-5 bumps to 1.0.0\n // 1.1.0 bumps to 2.0.0\n if (this.minor !== 0 ||\n this.patch !== 0 ||\n this.prerelease.length === 0) {\n this.major++\n }\n this.minor = 0\n this.patch = 0\n this.prerelease = []\n break\n case 'minor':\n // If this is a pre-minor version, bump up to the same minor version.\n // Otherwise increment minor.\n // 1.2.0-5 bumps to 1.2.0\n // 1.2.1 bumps to 1.3.0\n if (this.patch !== 0 || this.prerelease.length === 0) {\n this.minor++\n }\n this.patch = 0\n this.prerelease = []\n break\n case 'patch':\n // If this is not a pre-release version, it will increment the patch.\n // If it is a pre-release it will bump up to the same patch version.\n // 1.2.0-5 patches to 1.2.0\n // 1.2.0 patches to 1.2.1\n if (this.prerelease.length === 0) {\n this.patch++\n }\n this.prerelease = []\n break\n // This probably shouldn't be used publicly.\n // 1.0.0 \"pre\" would become 1.0.0-0 which is the wrong direction.\n case 'pre':\n if (this.prerelease.length === 0) {\n this.prerelease = [0]\n } else {\n var i = this.prerelease.length\n while (--i >= 0) {\n if (typeof this.prerelease[i] === 'number') {\n this.prerelease[i]++\n i = -2\n }\n }\n if (i === -1) {\n // didn't increment anything\n this.prerelease.push(0)\n }\n }\n if (identifier) {\n // 1.2.0-beta.1 bumps to 1.2.0-beta.2,\n // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0\n if (this.prerelease[0] === identifier) {\n if (isNaN(this.prerelease[1])) {\n this.prerelease = [identifier, 0]\n }\n } else {\n this.prerelease = [identifier, 0]\n }\n }\n break\n\n default:\n throw new Error('invalid increment argument: ' + release)\n }\n this.format()\n this.raw = this.version\n return this\n}\n\nexports.inc = inc\nfunction inc (version, release, loose, identifier) {\n if (typeof (loose) === 'string') {\n identifier = loose\n loose = undefined\n }\n\n try {\n return new SemVer(version, loose).inc(release, identifier).version\n } catch (er) {\n return null\n }\n}\n\nexports.diff = diff\nfunction diff (version1, version2) {\n if (eq(version1, version2)) {\n return null\n } else {\n var v1 = parse(version1)\n var v2 = parse(version2)\n var prefix = ''\n if (v1.prerelease.length || v2.prerelease.length) {\n prefix = 'pre'\n var defaultResult = 'prerelease'\n }\n for (var key in v1) {\n if (key === 'major' || key === 'minor' || key === 'patch') {\n if (v1[key] !== v2[key]) {\n return prefix + key\n }\n }\n }\n return defaultResult // may be undefined\n }\n}\n\nexports.compareIdentifiers = compareIdentifiers\n\nvar numeric = /^[0-9]+$/\nfunction compareIdentifiers (a, b) {\n var anum = numeric.test(a)\n var bnum = numeric.test(b)\n\n if (anum && bnum) {\n a = +a\n b = +b\n }\n\n return a === b ? 0\n : (anum && !bnum) ? -1\n : (bnum && !anum) ? 1\n : a < b ? -1\n : 1\n}\n\nexports.rcompareIdentifiers = rcompareIdentifiers\nfunction rcompareIdentifiers (a, b) {\n return compareIdentifiers(b, a)\n}\n\nexports.major = major\nfunction major (a, loose) {\n return new SemVer(a, loose).major\n}\n\nexports.minor = minor\nfunction minor (a, loose) {\n return new SemVer(a, loose).minor\n}\n\nexports.patch = patch\nfunction patch (a, loose) {\n return new SemVer(a, loose).patch\n}\n\nexports.compare = compare\nfunction compare (a, b, loose) {\n return new SemVer(a, loose).compare(new SemVer(b, loose))\n}\n\nexports.compareLoose = compareLoose\nfunction compareLoose (a, b) {\n return compare(a, b, true)\n}\n\nexports.compareBuild = compareBuild\nfunction compareBuild (a, b, loose) {\n var versionA = new SemVer(a, loose)\n var versionB = new SemVer(b, loose)\n return versionA.compare(versionB) || versionA.compareBuild(versionB)\n}\n\nexports.rcompare = rcompare\nfunction rcompare (a, b, loose) {\n return compare(b, a, loose)\n}\n\nexports.sort = sort\nfunction sort (list, loose) {\n return list.sort(function (a, b) {\n return exports.compareBuild(a, b, loose)\n })\n}\n\nexports.rsort = rsort\nfunction rsort (list, loose) {\n return list.sort(function (a, b) {\n return exports.compareBuild(b, a, loose)\n })\n}\n\nexports.gt = gt\nfunction gt (a, b, loose) {\n return compare(a, b, loose) > 0\n}\n\nexports.lt = lt\nfunction lt (a, b, loose) {\n return compare(a, b, loose) < 0\n}\n\nexports.eq = eq\nfunction eq (a, b, loose) {\n return compare(a, b, loose) === 0\n}\n\nexports.neq = neq\nfunction neq (a, b, loose) {\n return compare(a, b, loose) !== 0\n}\n\nexports.gte = gte\nfunction gte (a, b, loose) {\n return compare(a, b, loose) >= 0\n}\n\nexports.lte = lte\nfunction lte (a, b, loose) {\n return compare(a, b, loose) <= 0\n}\n\nexports.cmp = cmp\nfunction cmp (a, op, b, loose) {\n switch (op) {\n case '===':\n if (typeof a === 'object')\n a = a.version\n if (typeof b === 'object')\n b = b.version\n return a === b\n\n case '!==':\n if (typeof a === 'object')\n a = a.version\n if (typeof b === 'object')\n b = b.version\n return a !== b\n\n case '':\n case '=':\n case '==':\n return eq(a, b, loose)\n\n case '!=':\n return neq(a, b, loose)\n\n case '>':\n return gt(a, b, loose)\n\n case '>=':\n return gte(a, b, loose)\n\n case '<':\n return lt(a, b, loose)\n\n case '<=':\n return lte(a, b, loose)\n\n default:\n throw new TypeError('Invalid operator: ' + op)\n }\n}\n\nexports.Comparator = Comparator\nfunction Comparator (comp, options) {\n if (!options || typeof options !== 'object') {\n options = {\n loose: !!options,\n includePrerelease: false\n }\n }\n\n if (comp instanceof Comparator) {\n if (comp.loose === !!options.loose) {\n return comp\n } else {\n comp = comp.value\n }\n }\n\n if (!(this instanceof Comparator)) {\n return new Comparator(comp, options)\n }\n\n debug('comparator', comp, options)\n this.options = options\n this.loose = !!options.loose\n this.parse(comp)\n\n if (this.semver === ANY) {\n this.value = ''\n } else {\n this.value = this.operator + this.semver.version\n }\n\n debug('comp', this)\n}\n\nvar ANY = {}\nComparator.prototype.parse = function (comp) {\n var r = this.options.loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR]\n var m = comp.match(r)\n\n if (!m) {\n throw new TypeError('Invalid comparator: ' + comp)\n }\n\n this.operator = m[1] !== undefined ? m[1] : ''\n if (this.operator === '=') {\n this.operator = ''\n }\n\n // if it literally is just '>' or '' then allow anything.\n if (!m[2]) {\n this.semver = ANY\n } else {\n this.semver = new SemVer(m[2], this.options.loose)\n }\n}\n\nComparator.prototype.toString = function () {\n return this.value\n}\n\nComparator.prototype.test = function (version) {\n debug('Comparator.test', version, this.options.loose)\n\n if (this.semver === ANY || version === ANY) {\n return true\n }\n\n if (typeof version === 'string') {\n try {\n version = new SemVer(version, this.options)\n } catch (er) {\n return false\n }\n }\n\n return cmp(version, this.operator, this.semver, this.options)\n}\n\nComparator.prototype.intersects = function (comp, options) {\n if (!(comp instanceof Comparator)) {\n throw new TypeError('a Comparator is required')\n }\n\n if (!options || typeof options !== 'object') {\n options = {\n loose: !!options,\n includePrerelease: false\n }\n }\n\n var rangeTmp\n\n if (this.operator === '') {\n if (this.value === '') {\n return true\n }\n rangeTmp = new Range(comp.value, options)\n return satisfies(this.value, rangeTmp, options)\n } else if (comp.operator === '') {\n if (comp.value === '') {\n return true\n }\n rangeTmp = new Range(this.value, options)\n return satisfies(comp.semver, rangeTmp, options)\n }\n\n var sameDirectionIncreasing =\n (this.operator === '>=' || this.operator === '>') &&\n (comp.operator === '>=' || comp.operator === '>')\n var sameDirectionDecreasing =\n (this.operator === '<=' || this.operator === '<') &&\n (comp.operator === '<=' || comp.operator === '<')\n var sameSemVer = this.semver.version === comp.semver.version\n var differentDirectionsInclusive =\n (this.operator === '>=' || this.operator === '<=') &&\n (comp.operator === '>=' || comp.operator === '<=')\n var oppositeDirectionsLessThan =\n cmp(this.semver, '<', comp.semver, options) &&\n ((this.operator === '>=' || this.operator === '>') &&\n (comp.operator === '<=' || comp.operator === '<'))\n var oppositeDirectionsGreaterThan =\n cmp(this.semver, '>', comp.semver, options) &&\n ((this.operator === '<=' || this.operator === '<') &&\n (comp.operator === '>=' || comp.operator === '>'))\n\n return sameDirectionIncreasing || sameDirectionDecreasing ||\n (sameSemVer && differentDirectionsInclusive) ||\n oppositeDirectionsLessThan || oppositeDirectionsGreaterThan\n}\n\nexports.Range = Range\nfunction Range (range, options) {\n if (!options || typeof options !== 'object') {\n options = {\n loose: !!options,\n includePrerelease: false\n }\n }\n\n if (range instanceof Range) {\n if (range.loose === !!options.loose &&\n range.includePrerelease === !!options.includePrerelease) {\n return range\n } else {\n return new Range(range.raw, options)\n }\n }\n\n if (range instanceof Comparator) {\n return new Range(range.value, options)\n }\n\n if (!(this instanceof Range)) {\n return new Range(range, options)\n }\n\n this.options = options\n this.loose = !!options.loose\n this.includePrerelease = !!options.includePrerelease\n\n // First, split based on boolean or ||\n this.raw = range\n this.set = range.split(/\\s*\\|\\|\\s*/).map(function (range) {\n return this.parseRange(range.trim())\n }, this).filter(function (c) {\n // throw out any that are not relevant for whatever reason\n return c.length\n })\n\n if (!this.set.length) {\n throw new TypeError('Invalid SemVer Range: ' + range)\n }\n\n this.format()\n}\n\nRange.prototype.format = function () {\n this.range = this.set.map(function (comps) {\n return comps.join(' ').trim()\n }).join('||').trim()\n return this.range\n}\n\nRange.prototype.toString = function () {\n return this.range\n}\n\nRange.prototype.parseRange = function (range) {\n var loose = this.options.loose\n range = range.trim()\n // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4`\n var hr = loose ? re[t.HYPHENRANGELOOSE] : re[t.HYPHENRANGE]\n range = range.replace(hr, hyphenReplace)\n debug('hyphen replace', range)\n // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5`\n range = range.replace(re[t.COMPARATORTRIM], comparatorTrimReplace)\n debug('comparator trim', range, re[t.COMPARATORTRIM])\n\n // `~ 1.2.3` => `~1.2.3`\n range = range.replace(re[t.TILDETRIM], tildeTrimReplace)\n\n // `^ 1.2.3` => `^1.2.3`\n range = range.replace(re[t.CARETTRIM], caretTrimReplace)\n\n // normalize spaces\n range = range.split(/\\s+/).join(' ')\n\n // At this point, the range is completely trimmed and\n // ready to be split into comparators.\n\n var compRe = loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR]\n var set = range.split(' ').map(function (comp) {\n return parseComparator(comp, this.options)\n }, this).join(' ').split(/\\s+/)\n if (this.options.loose) {\n // in loose mode, throw out any that are not valid comparators\n set = set.filter(function (comp) {\n return !!comp.match(compRe)\n })\n }\n set = set.map(function (comp) {\n return new Comparator(comp, this.options)\n }, this)\n\n return set\n}\n\nRange.prototype.intersects = function (range, options) {\n if (!(range instanceof Range)) {\n throw new TypeError('a Range is required')\n }\n\n return this.set.some(function (thisComparators) {\n return (\n isSatisfiable(thisComparators, options) &&\n range.set.some(function (rangeComparators) {\n return (\n isSatisfiable(rangeComparators, options) &&\n thisComparators.every(function (thisComparator) {\n return rangeComparators.every(function (rangeComparator) {\n return thisComparator.intersects(rangeComparator, options)\n })\n })\n )\n })\n )\n })\n}\n\n// take a set of comparators and determine whether there\n// exists a version which can satisfy it\nfunction isSatisfiable (comparators, options) {\n var result = true\n var remainingComparators = comparators.slice()\n var testComparator = remainingComparators.pop()\n\n while (result && remainingComparators.length) {\n result = remainingComparators.every(function (otherComparator) {\n return testComparator.intersects(otherComparator, options)\n })\n\n testComparator = remainingComparators.pop()\n }\n\n return result\n}\n\n// Mostly just for testing and legacy API reasons\nexports.toComparators = toComparators\nfunction toComparators (range, options) {\n return new Range(range, options).set.map(function (comp) {\n return comp.map(function (c) {\n return c.value\n }).join(' ').trim().split(' ')\n })\n}\n\n// comprised of xranges, tildes, stars, and gtlt's at this point.\n// already replaced the hyphen ranges\n// turn into a set of JUST comparators.\nfunction parseComparator (comp, options) {\n debug('comp', comp, options)\n comp = replaceCarets(comp, options)\n debug('caret', comp)\n comp = replaceTildes(comp, options)\n debug('tildes', comp)\n comp = replaceXRanges(comp, options)\n debug('xrange', comp)\n comp = replaceStars(comp, options)\n debug('stars', comp)\n return comp\n}\n\nfunction isX (id) {\n return !id || id.toLowerCase() === 'x' || id === '*'\n}\n\n// ~, ~> --> * (any, kinda silly)\n// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0\n// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0\n// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0\n// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0\n// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0\nfunction replaceTildes (comp, options) {\n return comp.trim().split(/\\s+/).map(function (comp) {\n return replaceTilde(comp, options)\n }).join(' ')\n}\n\nfunction replaceTilde (comp, options) {\n var r = options.loose ? re[t.TILDELOOSE] : re[t.TILDE]\n return comp.replace(r, function (_, M, m, p, pr) {\n debug('tilde', comp, _, M, m, p, pr)\n var ret\n\n if (isX(M)) {\n ret = ''\n } else if (isX(m)) {\n ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0'\n } else if (isX(p)) {\n // ~1.2 == >=1.2.0 <1.3.0\n ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0'\n } else if (pr) {\n debug('replaceTilde pr', pr)\n ret = '>=' + M + '.' + m + '.' + p + '-' + pr +\n ' <' + M + '.' + (+m + 1) + '.0'\n } else {\n // ~1.2.3 == >=1.2.3 <1.3.0\n ret = '>=' + M + '.' + m + '.' + p +\n ' <' + M + '.' + (+m + 1) + '.0'\n }\n\n debug('tilde return', ret)\n return ret\n })\n}\n\n// ^ --> * (any, kinda silly)\n// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0\n// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0\n// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0\n// ^1.2.3 --> >=1.2.3 <2.0.0\n// ^1.2.0 --> >=1.2.0 <2.0.0\nfunction replaceCarets (comp, options) {\n return comp.trim().split(/\\s+/).map(function (comp) {\n return replaceCaret(comp, options)\n }).join(' ')\n}\n\nfunction replaceCaret (comp, options) {\n debug('caret', comp, options)\n var r = options.loose ? re[t.CARETLOOSE] : re[t.CARET]\n return comp.replace(r, function (_, M, m, p, pr) {\n debug('caret', comp, _, M, m, p, pr)\n var ret\n\n if (isX(M)) {\n ret = ''\n } else if (isX(m)) {\n ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0'\n } else if (isX(p)) {\n if (M === '0') {\n ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0'\n } else {\n ret = '>=' + M + '.' + m + '.0 <' + (+M + 1) + '.0.0'\n }\n } else if (pr) {\n debug('replaceCaret pr', pr)\n if (M === '0') {\n if (m === '0') {\n ret = '>=' + M + '.' + m + '.' + p + '-' + pr +\n ' <' + M + '.' + m + '.' + (+p + 1)\n } else {\n ret = '>=' + M + '.' + m + '.' + p + '-' + pr +\n ' <' + M + '.' + (+m + 1) + '.0'\n }\n } else {\n ret = '>=' + M + '.' + m + '.' + p + '-' + pr +\n ' <' + (+M + 1) + '.0.0'\n }\n } else {\n debug('no pr')\n if (M === '0') {\n if (m === '0') {\n ret = '>=' + M + '.' + m + '.' + p +\n ' <' + M + '.' + m + '.' + (+p + 1)\n } else {\n ret = '>=' + M + '.' + m + '.' + p +\n ' <' + M + '.' + (+m + 1) + '.0'\n }\n } else {\n ret = '>=' + M + '.' + m + '.' + p +\n ' <' + (+M + 1) + '.0.0'\n }\n }\n\n debug('caret return', ret)\n return ret\n })\n}\n\nfunction replaceXRanges (comp, options) {\n debug('replaceXRanges', comp, options)\n return comp.split(/\\s+/).map(function (comp) {\n return replaceXRange(comp, options)\n }).join(' ')\n}\n\nfunction replaceXRange (comp, options) {\n comp = comp.trim()\n var r = options.loose ? re[t.XRANGELOOSE] : re[t.XRANGE]\n return comp.replace(r, function (ret, gtlt, M, m, p, pr) {\n debug('xRange', comp, ret, gtlt, M, m, p, pr)\n var xM = isX(M)\n var xm = xM || isX(m)\n var xp = xm || isX(p)\n var anyX = xp\n\n if (gtlt === '=' && anyX) {\n gtlt = ''\n }\n\n // if we're including prereleases in the match, then we need\n // to fix this to -0, the lowest possible prerelease value\n pr = options.includePrerelease ? '-0' : ''\n\n if (xM) {\n if (gtlt === '>' || gtlt === '<') {\n // nothing is allowed\n ret = '<0.0.0-0'\n } else {\n // nothing is forbidden\n ret = '*'\n }\n } else if (gtlt && anyX) {\n // we know patch is an x, because we have any x at all.\n // replace X with 0\n if (xm) {\n m = 0\n }\n p = 0\n\n if (gtlt === '>') {\n // >1 => >=2.0.0\n // >1.2 => >=1.3.0\n // >1.2.3 => >= 1.2.4\n gtlt = '>='\n if (xm) {\n M = +M + 1\n m = 0\n p = 0\n } else {\n m = +m + 1\n p = 0\n }\n } else if (gtlt === '<=') {\n // <=0.7.x is actually <0.8.0, since any 0.7.x should\n // pass. Similarly, <=7.x is actually <8.0.0, etc.\n gtlt = '<'\n if (xm) {\n M = +M + 1\n } else {\n m = +m + 1\n }\n }\n\n ret = gtlt + M + '.' + m + '.' + p + pr\n } else if (xm) {\n ret = '>=' + M + '.0.0' + pr + ' <' + (+M + 1) + '.0.0' + pr\n } else if (xp) {\n ret = '>=' + M + '.' + m + '.0' + pr +\n ' <' + M + '.' + (+m + 1) + '.0' + pr\n }\n\n debug('xRange return', ret)\n\n return ret\n })\n}\n\n// Because * is AND-ed with everything else in the comparator,\n// and '' means \"any version\", just remove the *s entirely.\nfunction replaceStars (comp, options) {\n debug('replaceStars', comp, options)\n // Looseness is ignored here. star is always as loose as it gets!\n return comp.trim().replace(re[t.STAR], '')\n}\n\n// This function is passed to string.replace(re[t.HYPHENRANGE])\n// M, m, patch, prerelease, build\n// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5\n// 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do\n// 1.2 - 3.4 => >=1.2.0 <3.5.0\nfunction hyphenReplace ($0,\n from, fM, fm, fp, fpr, fb,\n to, tM, tm, tp, tpr, tb) {\n if (isX(fM)) {\n from = ''\n } else if (isX(fm)) {\n from = '>=' + fM + '.0.0'\n } else if (isX(fp)) {\n from = '>=' + fM + '.' + fm + '.0'\n } else {\n from = '>=' + from\n }\n\n if (isX(tM)) {\n to = ''\n } else if (isX(tm)) {\n to = '<' + (+tM + 1) + '.0.0'\n } else if (isX(tp)) {\n to = '<' + tM + '.' + (+tm + 1) + '.0'\n } else if (tpr) {\n to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr\n } else {\n to = '<=' + to\n }\n\n return (from + ' ' + to).trim()\n}\n\n// if ANY of the sets match ALL of its comparators, then pass\nRange.prototype.test = function (version) {\n if (!version) {\n return false\n }\n\n if (typeof version === 'string') {\n try {\n version = new SemVer(version, this.options)\n } catch (er) {\n return false\n }\n }\n\n for (var i = 0; i < this.set.length; i++) {\n if (testSet(this.set[i], version, this.options)) {\n return true\n }\n }\n return false\n}\n\nfunction testSet (set, version, options) {\n for (var i = 0; i < set.length; i++) {\n if (!set[i].test(version)) {\n return false\n }\n }\n\n if (version.prerelease.length && !options.includePrerelease) {\n // Find the set of versions that are allowed to have prereleases\n // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0\n // That should allow `1.2.3-pr.2` to pass.\n // However, `1.2.4-alpha.notready` should NOT be allowed,\n // even though it's within the range set by the comparators.\n for (i = 0; i < set.length; i++) {\n debug(set[i].semver)\n if (set[i].semver === ANY) {\n continue\n }\n\n if (set[i].semver.prerelease.length > 0) {\n var allowed = set[i].semver\n if (allowed.major === version.major &&\n allowed.minor === version.minor &&\n allowed.patch === version.patch) {\n return true\n }\n }\n }\n\n // Version has a -pre, but it's not one of the ones we like.\n return false\n }\n\n return true\n}\n\nexports.satisfies = satisfies\nfunction satisfies (version, range, options) {\n try {\n range = new Range(range, options)\n } catch (er) {\n return false\n }\n return range.test(version)\n}\n\nexports.maxSatisfying = maxSatisfying\nfunction maxSatisfying (versions, range, options) {\n var max = null\n var maxSV = null\n try {\n var rangeObj = new Range(range, options)\n } catch (er) {\n return null\n }\n versions.forEach(function (v) {\n if (rangeObj.test(v)) {\n // satisfies(v, range, options)\n if (!max || maxSV.compare(v) === -1) {\n // compare(max, v, true)\n max = v\n maxSV = new SemVer(max, options)\n }\n }\n })\n return max\n}\n\nexports.minSatisfying = minSatisfying\nfunction minSatisfying (versions, range, options) {\n var min = null\n var minSV = null\n try {\n var rangeObj = new Range(range, options)\n } catch (er) {\n return null\n }\n versions.forEach(function (v) {\n if (rangeObj.test(v)) {\n // satisfies(v, range, options)\n if (!min || minSV.compare(v) === 1) {\n // compare(min, v, true)\n min = v\n minSV = new SemVer(min, options)\n }\n }\n })\n return min\n}\n\nexports.minVersion = minVersion\nfunction minVersion (range, loose) {\n range = new Range(range, loose)\n\n var minver = new SemVer('0.0.0')\n if (range.test(minver)) {\n return minver\n }\n\n minver = new SemVer('0.0.0-0')\n if (range.test(minver)) {\n return minver\n }\n\n minver = null\n for (var i = 0; i < range.set.length; ++i) {\n var comparators = range.set[i]\n\n comparators.forEach(function (comparator) {\n // Clone to avoid manipulating the comparator's semver object.\n var compver = new SemVer(comparator.semver.version)\n switch (comparator.operator) {\n case '>':\n if (compver.prerelease.length === 0) {\n compver.patch++\n } else {\n compver.prerelease.push(0)\n }\n compver.raw = compver.format()\n /* fallthrough */\n case '':\n case '>=':\n if (!minver || gt(minver, compver)) {\n minver = compver\n }\n break\n case '<':\n case '<=':\n /* Ignore maximum versions */\n break\n /* istanbul ignore next */\n default:\n throw new Error('Unexpected operation: ' + comparator.operator)\n }\n })\n }\n\n if (minver && range.test(minver)) {\n return minver\n }\n\n return null\n}\n\nexports.validRange = validRange\nfunction validRange (range, options) {\n try {\n // Return '*' instead of '' so that truthiness works.\n // This will throw if it's invalid anyway\n return new Range(range, options).range || '*'\n } catch (er) {\n return null\n }\n}\n\n// Determine if version is less than all the versions possible in the range\nexports.ltr = ltr\nfunction ltr (version, range, options) {\n return outside(version, range, '<', options)\n}\n\n// Determine if version is greater than all the versions possible in the range.\nexports.gtr = gtr\nfunction gtr (version, range, options) {\n return outside(version, range, '>', options)\n}\n\nexports.outside = outside\nfunction outside (version, range, hilo, options) {\n version = new SemVer(version, options)\n range = new Range(range, options)\n\n var gtfn, ltefn, ltfn, comp, ecomp\n switch (hilo) {\n case '>':\n gtfn = gt\n ltefn = lte\n ltfn = lt\n comp = '>'\n ecomp = '>='\n break\n case '<':\n gtfn = lt\n ltefn = gte\n ltfn = gt\n comp = '<'\n ecomp = '<='\n break\n default:\n throw new TypeError('Must provide a hilo val of \"<\" or \">\"')\n }\n\n // If it satisifes the range it is not outside\n if (satisfies(version, range, options)) {\n return false\n }\n\n // From now on, variable terms are as if we're in \"gtr\" mode.\n // but note that everything is flipped for the \"ltr\" function.\n\n for (var i = 0; i < range.set.length; ++i) {\n var comparators = range.set[i]\n\n var high = null\n var low = null\n\n comparators.forEach(function (comparator) {\n if (comparator.semver === ANY) {\n comparator = new Comparator('>=0.0.0')\n }\n high = high || comparator\n low = low || comparator\n if (gtfn(comparator.semver, high.semver, options)) {\n high = comparator\n } else if (ltfn(comparator.semver, low.semver, options)) {\n low = comparator\n }\n })\n\n // If the edge version comparator has a operator then our version\n // isn't outside it\n if (high.operator === comp || high.operator === ecomp) {\n return false\n }\n\n // If the lowest version comparator has an operator and our version\n // is less than it then it isn't higher than the range\n if ((!low.operator || low.operator === comp) &&\n ltefn(version, low.semver)) {\n return false\n } else if (low.operator === ecomp && ltfn(version, low.semver)) {\n return false\n }\n }\n return true\n}\n\nexports.prerelease = prerelease\nfunction prerelease (version, options) {\n var parsed = parse(version, options)\n return (parsed && parsed.prerelease.length) ? parsed.prerelease : null\n}\n\nexports.intersects = intersects\nfunction intersects (r1, r2, options) {\n r1 = new Range(r1, options)\n r2 = new Range(r2, options)\n return r1.intersects(r2)\n}\n\nexports.coerce = coerce\nfunction coerce (version, options) {\n if (version instanceof SemVer) {\n return version\n }\n\n if (typeof version === 'number') {\n version = String(version)\n }\n\n if (typeof version !== 'string') {\n return null\n }\n\n options = options || {}\n\n var match = null\n if (!options.rtl) {\n match = version.match(re[t.COERCE])\n } else {\n // Find the right-most coercible string that does not share\n // a terminus with a more left-ward coercible string.\n // Eg, '1.2.3.4' wants to coerce '2.3.4', not '3.4' or '4'\n //\n // Walk through the string checking with a /g regexp\n // Manually set the index so as to pick up overlapping matches.\n // Stop when we get a match that ends at the string end, since no\n // coercible string can be more right-ward without the same terminus.\n var next\n while ((next = re[t.COERCERTL].exec(version)) &&\n (!match || match.index + match[0].length !== version.length)\n ) {\n if (!match ||\n next.index + next[0].length !== match.index + match[0].length) {\n match = next\n }\n re[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length\n }\n // leave it in a clean state\n re[t.COERCERTL].lastIndex = -1\n }\n\n if (match === null) {\n return null\n }\n\n return parse(match[2] +\n '.' + (match[3] || '0') +\n '.' + (match[4] || '0'), options)\n}\n","module.exports = require('./lib/tunnel');\n","'use strict';\n\nvar net = require('net');\nvar tls = require('tls');\nvar http = require('http');\nvar https = require('https');\nvar events = require('events');\nvar assert = require('assert');\nvar util = require('util');\n\n\nexports.httpOverHttp = httpOverHttp;\nexports.httpsOverHttp = httpsOverHttp;\nexports.httpOverHttps = httpOverHttps;\nexports.httpsOverHttps = httpsOverHttps;\n\n\nfunction httpOverHttp(options) {\n var agent = new TunnelingAgent(options);\n agent.request = http.request;\n return agent;\n}\n\nfunction httpsOverHttp(options) {\n var agent = new TunnelingAgent(options);\n agent.request = http.request;\n agent.createSocket = createSecureSocket;\n agent.defaultPort = 443;\n return agent;\n}\n\nfunction httpOverHttps(options) {\n var agent = new TunnelingAgent(options);\n agent.request = https.request;\n return agent;\n}\n\nfunction httpsOverHttps(options) {\n var agent = new TunnelingAgent(options);\n agent.request = https.request;\n agent.createSocket = createSecureSocket;\n agent.defaultPort = 443;\n return agent;\n}\n\n\nfunction TunnelingAgent(options) {\n var self = this;\n self.options = options || {};\n self.proxyOptions = self.options.proxy || {};\n self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets;\n self.requests = [];\n self.sockets = [];\n\n self.on('free', function onFree(socket, host, port, localAddress) {\n var options = toOptions(host, port, localAddress);\n for (var i = 0, len = self.requests.length; i < len; ++i) {\n var pending = self.requests[i];\n if (pending.host === options.host && pending.port === options.port) {\n // Detect the request to connect same origin server,\n // reuse the connection.\n self.requests.splice(i, 1);\n pending.request.onSocket(socket);\n return;\n }\n }\n socket.destroy();\n self.removeSocket(socket);\n });\n}\nutil.inherits(TunnelingAgent, events.EventEmitter);\n\nTunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) {\n var self = this;\n var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress));\n\n if (self.sockets.length >= this.maxSockets) {\n // We are over limit so we'll add it to the queue.\n self.requests.push(options);\n return;\n }\n\n // If we are under maxSockets create a new one.\n self.createSocket(options, function(socket) {\n socket.on('free', onFree);\n socket.on('close', onCloseOrRemove);\n socket.on('agentRemove', onCloseOrRemove);\n req.onSocket(socket);\n\n function onFree() {\n self.emit('free', socket, options);\n }\n\n function onCloseOrRemove(err) {\n self.removeSocket(socket);\n socket.removeListener('free', onFree);\n socket.removeListener('close', onCloseOrRemove);\n socket.removeListener('agentRemove', onCloseOrRemove);\n }\n });\n};\n\nTunnelingAgent.prototype.createSocket = function createSocket(options, cb) {\n var self = this;\n var placeholder = {};\n self.sockets.push(placeholder);\n\n var connectOptions = mergeOptions({}, self.proxyOptions, {\n method: 'CONNECT',\n path: options.host + ':' + options.port,\n agent: false,\n headers: {\n host: options.host + ':' + options.port\n }\n });\n if (options.localAddress) {\n connectOptions.localAddress = options.localAddress;\n }\n if (connectOptions.proxyAuth) {\n connectOptions.headers = connectOptions.headers || {};\n connectOptions.headers['Proxy-Authorization'] = 'Basic ' +\n new Buffer(connectOptions.proxyAuth).toString('base64');\n }\n\n debug('making CONNECT request');\n var connectReq = self.request(connectOptions);\n connectReq.useChunkedEncodingByDefault = false; // for v0.6\n connectReq.once('response', onResponse); // for v0.6\n connectReq.once('upgrade', onUpgrade); // for v0.6\n connectReq.once('connect', onConnect); // for v0.7 or later\n connectReq.once('error', onError);\n connectReq.end();\n\n function onResponse(res) {\n // Very hacky. This is necessary to avoid http-parser leaks.\n res.upgrade = true;\n }\n\n function onUpgrade(res, socket, head) {\n // Hacky.\n process.nextTick(function() {\n onConnect(res, socket, head);\n });\n }\n\n function onConnect(res, socket, head) {\n connectReq.removeAllListeners();\n socket.removeAllListeners();\n\n if (res.statusCode !== 200) {\n debug('tunneling socket could not be established, statusCode=%d',\n res.statusCode);\n socket.destroy();\n var error = new Error('tunneling socket could not be established, ' +\n 'statusCode=' + res.statusCode);\n error.code = 'ECONNRESET';\n options.request.emit('error', error);\n self.removeSocket(placeholder);\n return;\n }\n if (head.length > 0) {\n debug('got illegal response body from proxy');\n socket.destroy();\n var error = new Error('got illegal response body from proxy');\n error.code = 'ECONNRESET';\n options.request.emit('error', error);\n self.removeSocket(placeholder);\n return;\n }\n debug('tunneling connection has established');\n self.sockets[self.sockets.indexOf(placeholder)] = socket;\n return cb(socket);\n }\n\n function onError(cause) {\n connectReq.removeAllListeners();\n\n debug('tunneling socket could not be established, cause=%s\\n',\n cause.message, cause.stack);\n var error = new Error('tunneling socket could not be established, ' +\n 'cause=' + cause.message);\n error.code = 'ECONNRESET';\n options.request.emit('error', error);\n self.removeSocket(placeholder);\n }\n};\n\nTunnelingAgent.prototype.removeSocket = function removeSocket(socket) {\n var pos = this.sockets.indexOf(socket)\n if (pos === -1) {\n return;\n }\n this.sockets.splice(pos, 1);\n\n var pending = this.requests.shift();\n if (pending) {\n // If we have pending requests and a socket gets closed a new one\n // needs to be created to take over in the pool for the one that closed.\n this.createSocket(pending, function(socket) {\n pending.request.onSocket(socket);\n });\n }\n};\n\nfunction createSecureSocket(options, cb) {\n var self = this;\n TunnelingAgent.prototype.createSocket.call(self, options, function(socket) {\n var hostHeader = options.request.getHeader('host');\n var tlsOptions = mergeOptions({}, self.options, {\n socket: socket,\n servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host\n });\n\n // 0 is dummy port for v0.6\n var secureSocket = tls.connect(0, tlsOptions);\n self.sockets[self.sockets.indexOf(socket)] = secureSocket;\n cb(secureSocket);\n });\n}\n\n\nfunction toOptions(host, port, localAddress) {\n if (typeof host === 'string') { // since v0.10\n return {\n host: host,\n port: port,\n localAddress: localAddress\n };\n }\n return host; // for v0.11 or later\n}\n\nfunction mergeOptions(target) {\n for (var i = 1, len = arguments.length; i < len; ++i) {\n var overrides = arguments[i];\n if (typeof overrides === 'object') {\n var keys = Object.keys(overrides);\n for (var j = 0, keyLen = keys.length; j < keyLen; ++j) {\n var k = keys[j];\n if (overrides[k] !== undefined) {\n target[k] = overrides[k];\n }\n }\n }\n }\n return target;\n}\n\n\nvar debug;\nif (process.env.NODE_DEBUG && /\\btunnel\\b/.test(process.env.NODE_DEBUG)) {\n debug = function() {\n var args = Array.prototype.slice.call(arguments);\n if (typeof args[0] === 'string') {\n args[0] = 'TUNNEL: ' + args[0];\n } else {\n args.unshift('TUNNEL:');\n }\n console.error.apply(console, args);\n }\n} else {\n debug = function() {};\n}\nexports.debug = debug; // for test\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction getUserAgent() {\n if (typeof navigator === \"object\" && \"userAgent\" in navigator) {\n return navigator.userAgent;\n }\n\n if (typeof process === \"object\" && \"version\" in process) {\n return `Node.js/${process.version.substr(1)} (${process.platform}; ${process.arch})`;\n }\n\n return \"\";\n}\n\nexports.getUserAgent = getUserAgent;\n//# sourceMappingURL=index.js.map\n","/**\n * web-streams-polyfill v3.2.1\n */\n(function (global, factory) {\n typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) :\n typeof define === 'function' && define.amd ? define(['exports'], factory) :\n (global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.WebStreamsPolyfill = {}));\n}(this, (function (exports) { 'use strict';\n\n /// \n const SymbolPolyfill = typeof Symbol === 'function' && typeof Symbol.iterator === 'symbol' ?\n Symbol :\n description => `Symbol(${description})`;\n\n /// \n function noop() {\n return undefined;\n }\n function getGlobals() {\n if (typeof self !== 'undefined') {\n return self;\n }\n else if (typeof window !== 'undefined') {\n return window;\n }\n else if (typeof global !== 'undefined') {\n return global;\n }\n return undefined;\n }\n const globals = getGlobals();\n\n function typeIsObject(x) {\n return (typeof x === 'object' && x !== null) || typeof x === 'function';\n }\n const rethrowAssertionErrorRejection = noop;\n\n const originalPromise = Promise;\n const originalPromiseThen = Promise.prototype.then;\n const originalPromiseResolve = Promise.resolve.bind(originalPromise);\n const originalPromiseReject = Promise.reject.bind(originalPromise);\n function newPromise(executor) {\n return new originalPromise(executor);\n }\n function promiseResolvedWith(value) {\n return originalPromiseResolve(value);\n }\n function promiseRejectedWith(reason) {\n return originalPromiseReject(reason);\n }\n function PerformPromiseThen(promise, onFulfilled, onRejected) {\n // There doesn't appear to be any way to correctly emulate the behaviour from JavaScript, so this is just an\n // approximation.\n return originalPromiseThen.call(promise, onFulfilled, onRejected);\n }\n function uponPromise(promise, onFulfilled, onRejected) {\n PerformPromiseThen(PerformPromiseThen(promise, onFulfilled, onRejected), undefined, rethrowAssertionErrorRejection);\n }\n function uponFulfillment(promise, onFulfilled) {\n uponPromise(promise, onFulfilled);\n }\n function uponRejection(promise, onRejected) {\n uponPromise(promise, undefined, onRejected);\n }\n function transformPromiseWith(promise, fulfillmentHandler, rejectionHandler) {\n return PerformPromiseThen(promise, fulfillmentHandler, rejectionHandler);\n }\n function setPromiseIsHandledToTrue(promise) {\n PerformPromiseThen(promise, undefined, rethrowAssertionErrorRejection);\n }\n const queueMicrotask = (() => {\n const globalQueueMicrotask = globals && globals.queueMicrotask;\n if (typeof globalQueueMicrotask === 'function') {\n return globalQueueMicrotask;\n }\n const resolvedPromise = promiseResolvedWith(undefined);\n return (fn) => PerformPromiseThen(resolvedPromise, fn);\n })();\n function reflectCall(F, V, args) {\n if (typeof F !== 'function') {\n throw new TypeError('Argument is not a function');\n }\n return Function.prototype.apply.call(F, V, args);\n }\n function promiseCall(F, V, args) {\n try {\n return promiseResolvedWith(reflectCall(F, V, args));\n }\n catch (value) {\n return promiseRejectedWith(value);\n }\n }\n\n // Original from Chromium\n // https://chromium.googlesource.com/chromium/src/+/0aee4434a4dba42a42abaea9bfbc0cd196a63bc1/third_party/blink/renderer/core/streams/SimpleQueue.js\n const QUEUE_MAX_ARRAY_SIZE = 16384;\n /**\n * Simple queue structure.\n *\n * Avoids scalability issues with using a packed array directly by using\n * multiple arrays in a linked list and keeping the array size bounded.\n */\n class SimpleQueue {\n constructor() {\n this._cursor = 0;\n this._size = 0;\n // _front and _back are always defined.\n this._front = {\n _elements: [],\n _next: undefined\n };\n this._back = this._front;\n // The cursor is used to avoid calling Array.shift().\n // It contains the index of the front element of the array inside the\n // front-most node. It is always in the range [0, QUEUE_MAX_ARRAY_SIZE).\n this._cursor = 0;\n // When there is only one node, size === elements.length - cursor.\n this._size = 0;\n }\n get length() {\n return this._size;\n }\n // For exception safety, this method is structured in order:\n // 1. Read state\n // 2. Calculate required state mutations\n // 3. Perform state mutations\n push(element) {\n const oldBack = this._back;\n let newBack = oldBack;\n if (oldBack._elements.length === QUEUE_MAX_ARRAY_SIZE - 1) {\n newBack = {\n _elements: [],\n _next: undefined\n };\n }\n // push() is the mutation most likely to throw an exception, so it\n // goes first.\n oldBack._elements.push(element);\n if (newBack !== oldBack) {\n this._back = newBack;\n oldBack._next = newBack;\n }\n ++this._size;\n }\n // Like push(), shift() follows the read -> calculate -> mutate pattern for\n // exception safety.\n shift() { // must not be called on an empty queue\n const oldFront = this._front;\n let newFront = oldFront;\n const oldCursor = this._cursor;\n let newCursor = oldCursor + 1;\n const elements = oldFront._elements;\n const element = elements[oldCursor];\n if (newCursor === QUEUE_MAX_ARRAY_SIZE) {\n newFront = oldFront._next;\n newCursor = 0;\n }\n // No mutations before this point.\n --this._size;\n this._cursor = newCursor;\n if (oldFront !== newFront) {\n this._front = newFront;\n }\n // Permit shifted element to be garbage collected.\n elements[oldCursor] = undefined;\n return element;\n }\n // The tricky thing about forEach() is that it can be called\n // re-entrantly. The queue may be mutated inside the callback. It is easy to\n // see that push() within the callback has no negative effects since the end\n // of the queue is checked for on every iteration. If shift() is called\n // repeatedly within the callback then the next iteration may return an\n // element that has been removed. In this case the callback will be called\n // with undefined values until we either \"catch up\" with elements that still\n // exist or reach the back of the queue.\n forEach(callback) {\n let i = this._cursor;\n let node = this._front;\n let elements = node._elements;\n while (i !== elements.length || node._next !== undefined) {\n if (i === elements.length) {\n node = node._next;\n elements = node._elements;\n i = 0;\n if (elements.length === 0) {\n break;\n }\n }\n callback(elements[i]);\n ++i;\n }\n }\n // Return the element that would be returned if shift() was called now,\n // without modifying the queue.\n peek() { // must not be called on an empty queue\n const front = this._front;\n const cursor = this._cursor;\n return front._elements[cursor];\n }\n }\n\n function ReadableStreamReaderGenericInitialize(reader, stream) {\n reader._ownerReadableStream = stream;\n stream._reader = reader;\n if (stream._state === 'readable') {\n defaultReaderClosedPromiseInitialize(reader);\n }\n else if (stream._state === 'closed') {\n defaultReaderClosedPromiseInitializeAsResolved(reader);\n }\n else {\n defaultReaderClosedPromiseInitializeAsRejected(reader, stream._storedError);\n }\n }\n // A client of ReadableStreamDefaultReader and ReadableStreamBYOBReader may use these functions directly to bypass state\n // check.\n function ReadableStreamReaderGenericCancel(reader, reason) {\n const stream = reader._ownerReadableStream;\n return ReadableStreamCancel(stream, reason);\n }\n function ReadableStreamReaderGenericRelease(reader) {\n if (reader._ownerReadableStream._state === 'readable') {\n defaultReaderClosedPromiseReject(reader, new TypeError(`Reader was released and can no longer be used to monitor the stream's closedness`));\n }\n else {\n defaultReaderClosedPromiseResetToRejected(reader, new TypeError(`Reader was released and can no longer be used to monitor the stream's closedness`));\n }\n reader._ownerReadableStream._reader = undefined;\n reader._ownerReadableStream = undefined;\n }\n // Helper functions for the readers.\n function readerLockException(name) {\n return new TypeError('Cannot ' + name + ' a stream using a released reader');\n }\n // Helper functions for the ReadableStreamDefaultReader.\n function defaultReaderClosedPromiseInitialize(reader) {\n reader._closedPromise = newPromise((resolve, reject) => {\n reader._closedPromise_resolve = resolve;\n reader._closedPromise_reject = reject;\n });\n }\n function defaultReaderClosedPromiseInitializeAsRejected(reader, reason) {\n defaultReaderClosedPromiseInitialize(reader);\n defaultReaderClosedPromiseReject(reader, reason);\n }\n function defaultReaderClosedPromiseInitializeAsResolved(reader) {\n defaultReaderClosedPromiseInitialize(reader);\n defaultReaderClosedPromiseResolve(reader);\n }\n function defaultReaderClosedPromiseReject(reader, reason) {\n if (reader._closedPromise_reject === undefined) {\n return;\n }\n setPromiseIsHandledToTrue(reader._closedPromise);\n reader._closedPromise_reject(reason);\n reader._closedPromise_resolve = undefined;\n reader._closedPromise_reject = undefined;\n }\n function defaultReaderClosedPromiseResetToRejected(reader, reason) {\n defaultReaderClosedPromiseInitializeAsRejected(reader, reason);\n }\n function defaultReaderClosedPromiseResolve(reader) {\n if (reader._closedPromise_resolve === undefined) {\n return;\n }\n reader._closedPromise_resolve(undefined);\n reader._closedPromise_resolve = undefined;\n reader._closedPromise_reject = undefined;\n }\n\n const AbortSteps = SymbolPolyfill('[[AbortSteps]]');\n const ErrorSteps = SymbolPolyfill('[[ErrorSteps]]');\n const CancelSteps = SymbolPolyfill('[[CancelSteps]]');\n const PullSteps = SymbolPolyfill('[[PullSteps]]');\n\n /// \n // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/isFinite#Polyfill\n const NumberIsFinite = Number.isFinite || function (x) {\n return typeof x === 'number' && isFinite(x);\n };\n\n /// \n // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Math/trunc#Polyfill\n const MathTrunc = Math.trunc || function (v) {\n return v < 0 ? Math.ceil(v) : Math.floor(v);\n };\n\n // https://heycam.github.io/webidl/#idl-dictionaries\n function isDictionary(x) {\n return typeof x === 'object' || typeof x === 'function';\n }\n function assertDictionary(obj, context) {\n if (obj !== undefined && !isDictionary(obj)) {\n throw new TypeError(`${context} is not an object.`);\n }\n }\n // https://heycam.github.io/webidl/#idl-callback-functions\n function assertFunction(x, context) {\n if (typeof x !== 'function') {\n throw new TypeError(`${context} is not a function.`);\n }\n }\n // https://heycam.github.io/webidl/#idl-object\n function isObject(x) {\n return (typeof x === 'object' && x !== null) || typeof x === 'function';\n }\n function assertObject(x, context) {\n if (!isObject(x)) {\n throw new TypeError(`${context} is not an object.`);\n }\n }\n function assertRequiredArgument(x, position, context) {\n if (x === undefined) {\n throw new TypeError(`Parameter ${position} is required in '${context}'.`);\n }\n }\n function assertRequiredField(x, field, context) {\n if (x === undefined) {\n throw new TypeError(`${field} is required in '${context}'.`);\n }\n }\n // https://heycam.github.io/webidl/#idl-unrestricted-double\n function convertUnrestrictedDouble(value) {\n return Number(value);\n }\n function censorNegativeZero(x) {\n return x === 0 ? 0 : x;\n }\n function integerPart(x) {\n return censorNegativeZero(MathTrunc(x));\n }\n // https://heycam.github.io/webidl/#idl-unsigned-long-long\n function convertUnsignedLongLongWithEnforceRange(value, context) {\n const lowerBound = 0;\n const upperBound = Number.MAX_SAFE_INTEGER;\n let x = Number(value);\n x = censorNegativeZero(x);\n if (!NumberIsFinite(x)) {\n throw new TypeError(`${context} is not a finite number`);\n }\n x = integerPart(x);\n if (x < lowerBound || x > upperBound) {\n throw new TypeError(`${context} is outside the accepted range of ${lowerBound} to ${upperBound}, inclusive`);\n }\n if (!NumberIsFinite(x) || x === 0) {\n return 0;\n }\n // TODO Use BigInt if supported?\n // let xBigInt = BigInt(integerPart(x));\n // xBigInt = BigInt.asUintN(64, xBigInt);\n // return Number(xBigInt);\n return x;\n }\n\n function assertReadableStream(x, context) {\n if (!IsReadableStream(x)) {\n throw new TypeError(`${context} is not a ReadableStream.`);\n }\n }\n\n // Abstract operations for the ReadableStream.\n function AcquireReadableStreamDefaultReader(stream) {\n return new ReadableStreamDefaultReader(stream);\n }\n // ReadableStream API exposed for controllers.\n function ReadableStreamAddReadRequest(stream, readRequest) {\n stream._reader._readRequests.push(readRequest);\n }\n function ReadableStreamFulfillReadRequest(stream, chunk, done) {\n const reader = stream._reader;\n const readRequest = reader._readRequests.shift();\n if (done) {\n readRequest._closeSteps();\n }\n else {\n readRequest._chunkSteps(chunk);\n }\n }\n function ReadableStreamGetNumReadRequests(stream) {\n return stream._reader._readRequests.length;\n }\n function ReadableStreamHasDefaultReader(stream) {\n const reader = stream._reader;\n if (reader === undefined) {\n return false;\n }\n if (!IsReadableStreamDefaultReader(reader)) {\n return false;\n }\n return true;\n }\n /**\n * A default reader vended by a {@link ReadableStream}.\n *\n * @public\n */\n class ReadableStreamDefaultReader {\n constructor(stream) {\n assertRequiredArgument(stream, 1, 'ReadableStreamDefaultReader');\n assertReadableStream(stream, 'First parameter');\n if (IsReadableStreamLocked(stream)) {\n throw new TypeError('This stream has already been locked for exclusive reading by another reader');\n }\n ReadableStreamReaderGenericInitialize(this, stream);\n this._readRequests = new SimpleQueue();\n }\n /**\n * Returns a promise that will be fulfilled when the stream becomes closed,\n * or rejected if the stream ever errors or the reader's lock is released before the stream finishes closing.\n */\n get closed() {\n if (!IsReadableStreamDefaultReader(this)) {\n return promiseRejectedWith(defaultReaderBrandCheckException('closed'));\n }\n return this._closedPromise;\n }\n /**\n * If the reader is active, behaves the same as {@link ReadableStream.cancel | stream.cancel(reason)}.\n */\n cancel(reason = undefined) {\n if (!IsReadableStreamDefaultReader(this)) {\n return promiseRejectedWith(defaultReaderBrandCheckException('cancel'));\n }\n if (this._ownerReadableStream === undefined) {\n return promiseRejectedWith(readerLockException('cancel'));\n }\n return ReadableStreamReaderGenericCancel(this, reason);\n }\n /**\n * Returns a promise that allows access to the next chunk from the stream's internal queue, if available.\n *\n * If reading a chunk causes the queue to become empty, more data will be pulled from the underlying source.\n */\n read() {\n if (!IsReadableStreamDefaultReader(this)) {\n return promiseRejectedWith(defaultReaderBrandCheckException('read'));\n }\n if (this._ownerReadableStream === undefined) {\n return promiseRejectedWith(readerLockException('read from'));\n }\n let resolvePromise;\n let rejectPromise;\n const promise = newPromise((resolve, reject) => {\n resolvePromise = resolve;\n rejectPromise = reject;\n });\n const readRequest = {\n _chunkSteps: chunk => resolvePromise({ value: chunk, done: false }),\n _closeSteps: () => resolvePromise({ value: undefined, done: true }),\n _errorSteps: e => rejectPromise(e)\n };\n ReadableStreamDefaultReaderRead(this, readRequest);\n return promise;\n }\n /**\n * Releases the reader's lock on the corresponding stream. After the lock is released, the reader is no longer active.\n * If the associated stream is errored when the lock is released, the reader will appear errored in the same way\n * from now on; otherwise, the reader will appear closed.\n *\n * A reader's lock cannot be released while it still has a pending read request, i.e., if a promise returned by\n * the reader's {@link ReadableStreamDefaultReader.read | read()} method has not yet been settled. Attempting to\n * do so will throw a `TypeError` and leave the reader locked to the stream.\n */\n releaseLock() {\n if (!IsReadableStreamDefaultReader(this)) {\n throw defaultReaderBrandCheckException('releaseLock');\n }\n if (this._ownerReadableStream === undefined) {\n return;\n }\n if (this._readRequests.length > 0) {\n throw new TypeError('Tried to release a reader lock when that reader has pending read() calls un-settled');\n }\n ReadableStreamReaderGenericRelease(this);\n }\n }\n Object.defineProperties(ReadableStreamDefaultReader.prototype, {\n cancel: { enumerable: true },\n read: { enumerable: true },\n releaseLock: { enumerable: true },\n closed: { enumerable: true }\n });\n if (typeof SymbolPolyfill.toStringTag === 'symbol') {\n Object.defineProperty(ReadableStreamDefaultReader.prototype, SymbolPolyfill.toStringTag, {\n value: 'ReadableStreamDefaultReader',\n configurable: true\n });\n }\n // Abstract operations for the readers.\n function IsReadableStreamDefaultReader(x) {\n if (!typeIsObject(x)) {\n return false;\n }\n if (!Object.prototype.hasOwnProperty.call(x, '_readRequests')) {\n return false;\n }\n return x instanceof ReadableStreamDefaultReader;\n }\n function ReadableStreamDefaultReaderRead(reader, readRequest) {\n const stream = reader._ownerReadableStream;\n stream._disturbed = true;\n if (stream._state === 'closed') {\n readRequest._closeSteps();\n }\n else if (stream._state === 'errored') {\n readRequest._errorSteps(stream._storedError);\n }\n else {\n stream._readableStreamController[PullSteps](readRequest);\n }\n }\n // Helper functions for the ReadableStreamDefaultReader.\n function defaultReaderBrandCheckException(name) {\n return new TypeError(`ReadableStreamDefaultReader.prototype.${name} can only be used on a ReadableStreamDefaultReader`);\n }\n\n /// \n /* eslint-disable @typescript-eslint/no-empty-function */\n const AsyncIteratorPrototype = Object.getPrototypeOf(Object.getPrototypeOf(async function* () { }).prototype);\n\n /// \n class ReadableStreamAsyncIteratorImpl {\n constructor(reader, preventCancel) {\n this._ongoingPromise = undefined;\n this._isFinished = false;\n this._reader = reader;\n this._preventCancel = preventCancel;\n }\n next() {\n const nextSteps = () => this._nextSteps();\n this._ongoingPromise = this._ongoingPromise ?\n transformPromiseWith(this._ongoingPromise, nextSteps, nextSteps) :\n nextSteps();\n return this._ongoingPromise;\n }\n return(value) {\n const returnSteps = () => this._returnSteps(value);\n return this._ongoingPromise ?\n transformPromiseWith(this._ongoingPromise, returnSteps, returnSteps) :\n returnSteps();\n }\n _nextSteps() {\n if (this._isFinished) {\n return Promise.resolve({ value: undefined, done: true });\n }\n const reader = this._reader;\n if (reader._ownerReadableStream === undefined) {\n return promiseRejectedWith(readerLockException('iterate'));\n }\n let resolvePromise;\n let rejectPromise;\n const promise = newPromise((resolve, reject) => {\n resolvePromise = resolve;\n rejectPromise = reject;\n });\n const readRequest = {\n _chunkSteps: chunk => {\n this._ongoingPromise = undefined;\n // This needs to be delayed by one microtask, otherwise we stop pulling too early which breaks a test.\n // FIXME Is this a bug in the specification, or in the test?\n queueMicrotask(() => resolvePromise({ value: chunk, done: false }));\n },\n _closeSteps: () => {\n this._ongoingPromise = undefined;\n this._isFinished = true;\n ReadableStreamReaderGenericRelease(reader);\n resolvePromise({ value: undefined, done: true });\n },\n _errorSteps: reason => {\n this._ongoingPromise = undefined;\n this._isFinished = true;\n ReadableStreamReaderGenericRelease(reader);\n rejectPromise(reason);\n }\n };\n ReadableStreamDefaultReaderRead(reader, readRequest);\n return promise;\n }\n _returnSteps(value) {\n if (this._isFinished) {\n return Promise.resolve({ value, done: true });\n }\n this._isFinished = true;\n const reader = this._reader;\n if (reader._ownerReadableStream === undefined) {\n return promiseRejectedWith(readerLockException('finish iterating'));\n }\n if (!this._preventCancel) {\n const result = ReadableStreamReaderGenericCancel(reader, value);\n ReadableStreamReaderGenericRelease(reader);\n return transformPromiseWith(result, () => ({ value, done: true }));\n }\n ReadableStreamReaderGenericRelease(reader);\n return promiseResolvedWith({ value, done: true });\n }\n }\n const ReadableStreamAsyncIteratorPrototype = {\n next() {\n if (!IsReadableStreamAsyncIterator(this)) {\n return promiseRejectedWith(streamAsyncIteratorBrandCheckException('next'));\n }\n return this._asyncIteratorImpl.next();\n },\n return(value) {\n if (!IsReadableStreamAsyncIterator(this)) {\n return promiseRejectedWith(streamAsyncIteratorBrandCheckException('return'));\n }\n return this._asyncIteratorImpl.return(value);\n }\n };\n if (AsyncIteratorPrototype !== undefined) {\n Object.setPrototypeOf(ReadableStreamAsyncIteratorPrototype, AsyncIteratorPrototype);\n }\n // Abstract operations for the ReadableStream.\n function AcquireReadableStreamAsyncIterator(stream, preventCancel) {\n const reader = AcquireReadableStreamDefaultReader(stream);\n const impl = new ReadableStreamAsyncIteratorImpl(reader, preventCancel);\n const iterator = Object.create(ReadableStreamAsyncIteratorPrototype);\n iterator._asyncIteratorImpl = impl;\n return iterator;\n }\n function IsReadableStreamAsyncIterator(x) {\n if (!typeIsObject(x)) {\n return false;\n }\n if (!Object.prototype.hasOwnProperty.call(x, '_asyncIteratorImpl')) {\n return false;\n }\n try {\n // noinspection SuspiciousTypeOfGuard\n return x._asyncIteratorImpl instanceof\n ReadableStreamAsyncIteratorImpl;\n }\n catch (_a) {\n return false;\n }\n }\n // Helper functions for the ReadableStream.\n function streamAsyncIteratorBrandCheckException(name) {\n return new TypeError(`ReadableStreamAsyncIterator.${name} can only be used on a ReadableSteamAsyncIterator`);\n }\n\n /// \n // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/isNaN#Polyfill\n const NumberIsNaN = Number.isNaN || function (x) {\n // eslint-disable-next-line no-self-compare\n return x !== x;\n };\n\n function CreateArrayFromList(elements) {\n // We use arrays to represent lists, so this is basically a no-op.\n // Do a slice though just in case we happen to depend on the unique-ness.\n return elements.slice();\n }\n function CopyDataBlockBytes(dest, destOffset, src, srcOffset, n) {\n new Uint8Array(dest).set(new Uint8Array(src, srcOffset, n), destOffset);\n }\n // Not implemented correctly\n function TransferArrayBuffer(O) {\n return O;\n }\n // Not implemented correctly\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n function IsDetachedBuffer(O) {\n return false;\n }\n function ArrayBufferSlice(buffer, begin, end) {\n // ArrayBuffer.prototype.slice is not available on IE10\n // https://www.caniuse.com/mdn-javascript_builtins_arraybuffer_slice\n if (buffer.slice) {\n return buffer.slice(begin, end);\n }\n const length = end - begin;\n const slice = new ArrayBuffer(length);\n CopyDataBlockBytes(slice, 0, buffer, begin, length);\n return slice;\n }\n\n function IsNonNegativeNumber(v) {\n if (typeof v !== 'number') {\n return false;\n }\n if (NumberIsNaN(v)) {\n return false;\n }\n if (v < 0) {\n return false;\n }\n return true;\n }\n function CloneAsUint8Array(O) {\n const buffer = ArrayBufferSlice(O.buffer, O.byteOffset, O.byteOffset + O.byteLength);\n return new Uint8Array(buffer);\n }\n\n function DequeueValue(container) {\n const pair = container._queue.shift();\n container._queueTotalSize -= pair.size;\n if (container._queueTotalSize < 0) {\n container._queueTotalSize = 0;\n }\n return pair.value;\n }\n function EnqueueValueWithSize(container, value, size) {\n if (!IsNonNegativeNumber(size) || size === Infinity) {\n throw new RangeError('Size must be a finite, non-NaN, non-negative number.');\n }\n container._queue.push({ value, size });\n container._queueTotalSize += size;\n }\n function PeekQueueValue(container) {\n const pair = container._queue.peek();\n return pair.value;\n }\n function ResetQueue(container) {\n container._queue = new SimpleQueue();\n container._queueTotalSize = 0;\n }\n\n /**\n * A pull-into request in a {@link ReadableByteStreamController}.\n *\n * @public\n */\n class ReadableStreamBYOBRequest {\n constructor() {\n throw new TypeError('Illegal constructor');\n }\n /**\n * Returns the view for writing in to, or `null` if the BYOB request has already been responded to.\n */\n get view() {\n if (!IsReadableStreamBYOBRequest(this)) {\n throw byobRequestBrandCheckException('view');\n }\n return this._view;\n }\n respond(bytesWritten) {\n if (!IsReadableStreamBYOBRequest(this)) {\n throw byobRequestBrandCheckException('respond');\n }\n assertRequiredArgument(bytesWritten, 1, 'respond');\n bytesWritten = convertUnsignedLongLongWithEnforceRange(bytesWritten, 'First parameter');\n if (this._associatedReadableByteStreamController === undefined) {\n throw new TypeError('This BYOB request has been invalidated');\n }\n if (IsDetachedBuffer(this._view.buffer)) ;\n ReadableByteStreamControllerRespond(this._associatedReadableByteStreamController, bytesWritten);\n }\n respondWithNewView(view) {\n if (!IsReadableStreamBYOBRequest(this)) {\n throw byobRequestBrandCheckException('respondWithNewView');\n }\n assertRequiredArgument(view, 1, 'respondWithNewView');\n if (!ArrayBuffer.isView(view)) {\n throw new TypeError('You can only respond with array buffer views');\n }\n if (this._associatedReadableByteStreamController === undefined) {\n throw new TypeError('This BYOB request has been invalidated');\n }\n if (IsDetachedBuffer(view.buffer)) ;\n ReadableByteStreamControllerRespondWithNewView(this._associatedReadableByteStreamController, view);\n }\n }\n Object.defineProperties(ReadableStreamBYOBRequest.prototype, {\n respond: { enumerable: true },\n respondWithNewView: { enumerable: true },\n view: { enumerable: true }\n });\n if (typeof SymbolPolyfill.toStringTag === 'symbol') {\n Object.defineProperty(ReadableStreamBYOBRequest.prototype, SymbolPolyfill.toStringTag, {\n value: 'ReadableStreamBYOBRequest',\n configurable: true\n });\n }\n /**\n * Allows control of a {@link ReadableStream | readable byte stream}'s state and internal queue.\n *\n * @public\n */\n class ReadableByteStreamController {\n constructor() {\n throw new TypeError('Illegal constructor');\n }\n /**\n * Returns the current BYOB pull request, or `null` if there isn't one.\n */\n get byobRequest() {\n if (!IsReadableByteStreamController(this)) {\n throw byteStreamControllerBrandCheckException('byobRequest');\n }\n return ReadableByteStreamControllerGetBYOBRequest(this);\n }\n /**\n * Returns the desired size to fill the controlled stream's internal queue. It can be negative, if the queue is\n * over-full. An underlying byte source ought to use this information to determine when and how to apply backpressure.\n */\n get desiredSize() {\n if (!IsReadableByteStreamController(this)) {\n throw byteStreamControllerBrandCheckException('desiredSize');\n }\n return ReadableByteStreamControllerGetDesiredSize(this);\n }\n /**\n * Closes the controlled readable stream. Consumers will still be able to read any previously-enqueued chunks from\n * the stream, but once those are read, the stream will become closed.\n */\n close() {\n if (!IsReadableByteStreamController(this)) {\n throw byteStreamControllerBrandCheckException('close');\n }\n if (this._closeRequested) {\n throw new TypeError('The stream has already been closed; do not close it again!');\n }\n const state = this._controlledReadableByteStream._state;\n if (state !== 'readable') {\n throw new TypeError(`The stream (in ${state} state) is not in the readable state and cannot be closed`);\n }\n ReadableByteStreamControllerClose(this);\n }\n enqueue(chunk) {\n if (!IsReadableByteStreamController(this)) {\n throw byteStreamControllerBrandCheckException('enqueue');\n }\n assertRequiredArgument(chunk, 1, 'enqueue');\n if (!ArrayBuffer.isView(chunk)) {\n throw new TypeError('chunk must be an array buffer view');\n }\n if (chunk.byteLength === 0) {\n throw new TypeError('chunk must have non-zero byteLength');\n }\n if (chunk.buffer.byteLength === 0) {\n throw new TypeError(`chunk's buffer must have non-zero byteLength`);\n }\n if (this._closeRequested) {\n throw new TypeError('stream is closed or draining');\n }\n const state = this._controlledReadableByteStream._state;\n if (state !== 'readable') {\n throw new TypeError(`The stream (in ${state} state) is not in the readable state and cannot be enqueued to`);\n }\n ReadableByteStreamControllerEnqueue(this, chunk);\n }\n /**\n * Errors the controlled readable stream, making all future interactions with it fail with the given error `e`.\n */\n error(e = undefined) {\n if (!IsReadableByteStreamController(this)) {\n throw byteStreamControllerBrandCheckException('error');\n }\n ReadableByteStreamControllerError(this, e);\n }\n /** @internal */\n [CancelSteps](reason) {\n ReadableByteStreamControllerClearPendingPullIntos(this);\n ResetQueue(this);\n const result = this._cancelAlgorithm(reason);\n ReadableByteStreamControllerClearAlgorithms(this);\n return result;\n }\n /** @internal */\n [PullSteps](readRequest) {\n const stream = this._controlledReadableByteStream;\n if (this._queueTotalSize > 0) {\n const entry = this._queue.shift();\n this._queueTotalSize -= entry.byteLength;\n ReadableByteStreamControllerHandleQueueDrain(this);\n const view = new Uint8Array(entry.buffer, entry.byteOffset, entry.byteLength);\n readRequest._chunkSteps(view);\n return;\n }\n const autoAllocateChunkSize = this._autoAllocateChunkSize;\n if (autoAllocateChunkSize !== undefined) {\n let buffer;\n try {\n buffer = new ArrayBuffer(autoAllocateChunkSize);\n }\n catch (bufferE) {\n readRequest._errorSteps(bufferE);\n return;\n }\n const pullIntoDescriptor = {\n buffer,\n bufferByteLength: autoAllocateChunkSize,\n byteOffset: 0,\n byteLength: autoAllocateChunkSize,\n bytesFilled: 0,\n elementSize: 1,\n viewConstructor: Uint8Array,\n readerType: 'default'\n };\n this._pendingPullIntos.push(pullIntoDescriptor);\n }\n ReadableStreamAddReadRequest(stream, readRequest);\n ReadableByteStreamControllerCallPullIfNeeded(this);\n }\n }\n Object.defineProperties(ReadableByteStreamController.prototype, {\n close: { enumerable: true },\n enqueue: { enumerable: true },\n error: { enumerable: true },\n byobRequest: { enumerable: true },\n desiredSize: { enumerable: true }\n });\n if (typeof SymbolPolyfill.toStringTag === 'symbol') {\n Object.defineProperty(ReadableByteStreamController.prototype, SymbolPolyfill.toStringTag, {\n value: 'ReadableByteStreamController',\n configurable: true\n });\n }\n // Abstract operations for the ReadableByteStreamController.\n function IsReadableByteStreamController(x) {\n if (!typeIsObject(x)) {\n return false;\n }\n if (!Object.prototype.hasOwnProperty.call(x, '_controlledReadableByteStream')) {\n return false;\n }\n return x instanceof ReadableByteStreamController;\n }\n function IsReadableStreamBYOBRequest(x) {\n if (!typeIsObject(x)) {\n return false;\n }\n if (!Object.prototype.hasOwnProperty.call(x, '_associatedReadableByteStreamController')) {\n return false;\n }\n return x instanceof ReadableStreamBYOBRequest;\n }\n function ReadableByteStreamControllerCallPullIfNeeded(controller) {\n const shouldPull = ReadableByteStreamControllerShouldCallPull(controller);\n if (!shouldPull) {\n return;\n }\n if (controller._pulling) {\n controller._pullAgain = true;\n return;\n }\n controller._pulling = true;\n // TODO: Test controller argument\n const pullPromise = controller._pullAlgorithm();\n uponPromise(pullPromise, () => {\n controller._pulling = false;\n if (controller._pullAgain) {\n controller._pullAgain = false;\n ReadableByteStreamControllerCallPullIfNeeded(controller);\n }\n }, e => {\n ReadableByteStreamControllerError(controller, e);\n });\n }\n function ReadableByteStreamControllerClearPendingPullIntos(controller) {\n ReadableByteStreamControllerInvalidateBYOBRequest(controller);\n controller._pendingPullIntos = new SimpleQueue();\n }\n function ReadableByteStreamControllerCommitPullIntoDescriptor(stream, pullIntoDescriptor) {\n let done = false;\n if (stream._state === 'closed') {\n done = true;\n }\n const filledView = ReadableByteStreamControllerConvertPullIntoDescriptor(pullIntoDescriptor);\n if (pullIntoDescriptor.readerType === 'default') {\n ReadableStreamFulfillReadRequest(stream, filledView, done);\n }\n else {\n ReadableStreamFulfillReadIntoRequest(stream, filledView, done);\n }\n }\n function ReadableByteStreamControllerConvertPullIntoDescriptor(pullIntoDescriptor) {\n const bytesFilled = pullIntoDescriptor.bytesFilled;\n const elementSize = pullIntoDescriptor.elementSize;\n return new pullIntoDescriptor.viewConstructor(pullIntoDescriptor.buffer, pullIntoDescriptor.byteOffset, bytesFilled / elementSize);\n }\n function ReadableByteStreamControllerEnqueueChunkToQueue(controller, buffer, byteOffset, byteLength) {\n controller._queue.push({ buffer, byteOffset, byteLength });\n controller._queueTotalSize += byteLength;\n }\n function ReadableByteStreamControllerFillPullIntoDescriptorFromQueue(controller, pullIntoDescriptor) {\n const elementSize = pullIntoDescriptor.elementSize;\n const currentAlignedBytes = pullIntoDescriptor.bytesFilled - pullIntoDescriptor.bytesFilled % elementSize;\n const maxBytesToCopy = Math.min(controller._queueTotalSize, pullIntoDescriptor.byteLength - pullIntoDescriptor.bytesFilled);\n const maxBytesFilled = pullIntoDescriptor.bytesFilled + maxBytesToCopy;\n const maxAlignedBytes = maxBytesFilled - maxBytesFilled % elementSize;\n let totalBytesToCopyRemaining = maxBytesToCopy;\n let ready = false;\n if (maxAlignedBytes > currentAlignedBytes) {\n totalBytesToCopyRemaining = maxAlignedBytes - pullIntoDescriptor.bytesFilled;\n ready = true;\n }\n const queue = controller._queue;\n while (totalBytesToCopyRemaining > 0) {\n const headOfQueue = queue.peek();\n const bytesToCopy = Math.min(totalBytesToCopyRemaining, headOfQueue.byteLength);\n const destStart = pullIntoDescriptor.byteOffset + pullIntoDescriptor.bytesFilled;\n CopyDataBlockBytes(pullIntoDescriptor.buffer, destStart, headOfQueue.buffer, headOfQueue.byteOffset, bytesToCopy);\n if (headOfQueue.byteLength === bytesToCopy) {\n queue.shift();\n }\n else {\n headOfQueue.byteOffset += bytesToCopy;\n headOfQueue.byteLength -= bytesToCopy;\n }\n controller._queueTotalSize -= bytesToCopy;\n ReadableByteStreamControllerFillHeadPullIntoDescriptor(controller, bytesToCopy, pullIntoDescriptor);\n totalBytesToCopyRemaining -= bytesToCopy;\n }\n return ready;\n }\n function ReadableByteStreamControllerFillHeadPullIntoDescriptor(controller, size, pullIntoDescriptor) {\n pullIntoDescriptor.bytesFilled += size;\n }\n function ReadableByteStreamControllerHandleQueueDrain(controller) {\n if (controller._queueTotalSize === 0 && controller._closeRequested) {\n ReadableByteStreamControllerClearAlgorithms(controller);\n ReadableStreamClose(controller._controlledReadableByteStream);\n }\n else {\n ReadableByteStreamControllerCallPullIfNeeded(controller);\n }\n }\n function ReadableByteStreamControllerInvalidateBYOBRequest(controller) {\n if (controller._byobRequest === null) {\n return;\n }\n controller._byobRequest._associatedReadableByteStreamController = undefined;\n controller._byobRequest._view = null;\n controller._byobRequest = null;\n }\n function ReadableByteStreamControllerProcessPullIntoDescriptorsUsingQueue(controller) {\n while (controller._pendingPullIntos.length > 0) {\n if (controller._queueTotalSize === 0) {\n return;\n }\n const pullIntoDescriptor = controller._pendingPullIntos.peek();\n if (ReadableByteStreamControllerFillPullIntoDescriptorFromQueue(controller, pullIntoDescriptor)) {\n ReadableByteStreamControllerShiftPendingPullInto(controller);\n ReadableByteStreamControllerCommitPullIntoDescriptor(controller._controlledReadableByteStream, pullIntoDescriptor);\n }\n }\n }\n function ReadableByteStreamControllerPullInto(controller, view, readIntoRequest) {\n const stream = controller._controlledReadableByteStream;\n let elementSize = 1;\n if (view.constructor !== DataView) {\n elementSize = view.constructor.BYTES_PER_ELEMENT;\n }\n const ctor = view.constructor;\n // try {\n const buffer = TransferArrayBuffer(view.buffer);\n // } catch (e) {\n // readIntoRequest._errorSteps(e);\n // return;\n // }\n const pullIntoDescriptor = {\n buffer,\n bufferByteLength: buffer.byteLength,\n byteOffset: view.byteOffset,\n byteLength: view.byteLength,\n bytesFilled: 0,\n elementSize,\n viewConstructor: ctor,\n readerType: 'byob'\n };\n if (controller._pendingPullIntos.length > 0) {\n controller._pendingPullIntos.push(pullIntoDescriptor);\n // No ReadableByteStreamControllerCallPullIfNeeded() call since:\n // - No change happens on desiredSize\n // - The source has already been notified of that there's at least 1 pending read(view)\n ReadableStreamAddReadIntoRequest(stream, readIntoRequest);\n return;\n }\n if (stream._state === 'closed') {\n const emptyView = new ctor(pullIntoDescriptor.buffer, pullIntoDescriptor.byteOffset, 0);\n readIntoRequest._closeSteps(emptyView);\n return;\n }\n if (controller._queueTotalSize > 0) {\n if (ReadableByteStreamControllerFillPullIntoDescriptorFromQueue(controller, pullIntoDescriptor)) {\n const filledView = ReadableByteStreamControllerConvertPullIntoDescriptor(pullIntoDescriptor);\n ReadableByteStreamControllerHandleQueueDrain(controller);\n readIntoRequest._chunkSteps(filledView);\n return;\n }\n if (controller._closeRequested) {\n const e = new TypeError('Insufficient bytes to fill elements in the given buffer');\n ReadableByteStreamControllerError(controller, e);\n readIntoRequest._errorSteps(e);\n return;\n }\n }\n controller._pendingPullIntos.push(pullIntoDescriptor);\n ReadableStreamAddReadIntoRequest(stream, readIntoRequest);\n ReadableByteStreamControllerCallPullIfNeeded(controller);\n }\n function ReadableByteStreamControllerRespondInClosedState(controller, firstDescriptor) {\n const stream = controller._controlledReadableByteStream;\n if (ReadableStreamHasBYOBReader(stream)) {\n while (ReadableStreamGetNumReadIntoRequests(stream) > 0) {\n const pullIntoDescriptor = ReadableByteStreamControllerShiftPendingPullInto(controller);\n ReadableByteStreamControllerCommitPullIntoDescriptor(stream, pullIntoDescriptor);\n }\n }\n }\n function ReadableByteStreamControllerRespondInReadableState(controller, bytesWritten, pullIntoDescriptor) {\n ReadableByteStreamControllerFillHeadPullIntoDescriptor(controller, bytesWritten, pullIntoDescriptor);\n if (pullIntoDescriptor.bytesFilled < pullIntoDescriptor.elementSize) {\n return;\n }\n ReadableByteStreamControllerShiftPendingPullInto(controller);\n const remainderSize = pullIntoDescriptor.bytesFilled % pullIntoDescriptor.elementSize;\n if (remainderSize > 0) {\n const end = pullIntoDescriptor.byteOffset + pullIntoDescriptor.bytesFilled;\n const remainder = ArrayBufferSlice(pullIntoDescriptor.buffer, end - remainderSize, end);\n ReadableByteStreamControllerEnqueueChunkToQueue(controller, remainder, 0, remainder.byteLength);\n }\n pullIntoDescriptor.bytesFilled -= remainderSize;\n ReadableByteStreamControllerCommitPullIntoDescriptor(controller._controlledReadableByteStream, pullIntoDescriptor);\n ReadableByteStreamControllerProcessPullIntoDescriptorsUsingQueue(controller);\n }\n function ReadableByteStreamControllerRespondInternal(controller, bytesWritten) {\n const firstDescriptor = controller._pendingPullIntos.peek();\n ReadableByteStreamControllerInvalidateBYOBRequest(controller);\n const state = controller._controlledReadableByteStream._state;\n if (state === 'closed') {\n ReadableByteStreamControllerRespondInClosedState(controller);\n }\n else {\n ReadableByteStreamControllerRespondInReadableState(controller, bytesWritten, firstDescriptor);\n }\n ReadableByteStreamControllerCallPullIfNeeded(controller);\n }\n function ReadableByteStreamControllerShiftPendingPullInto(controller) {\n const descriptor = controller._pendingPullIntos.shift();\n return descriptor;\n }\n function ReadableByteStreamControllerShouldCallPull(controller) {\n const stream = controller._controlledReadableByteStream;\n if (stream._state !== 'readable') {\n return false;\n }\n if (controller._closeRequested) {\n return false;\n }\n if (!controller._started) {\n return false;\n }\n if (ReadableStreamHasDefaultReader(stream) && ReadableStreamGetNumReadRequests(stream) > 0) {\n return true;\n }\n if (ReadableStreamHasBYOBReader(stream) && ReadableStreamGetNumReadIntoRequests(stream) > 0) {\n return true;\n }\n const desiredSize = ReadableByteStreamControllerGetDesiredSize(controller);\n if (desiredSize > 0) {\n return true;\n }\n return false;\n }\n function ReadableByteStreamControllerClearAlgorithms(controller) {\n controller._pullAlgorithm = undefined;\n controller._cancelAlgorithm = undefined;\n }\n // A client of ReadableByteStreamController may use these functions directly to bypass state check.\n function ReadableByteStreamControllerClose(controller) {\n const stream = controller._controlledReadableByteStream;\n if (controller._closeRequested || stream._state !== 'readable') {\n return;\n }\n if (controller._queueTotalSize > 0) {\n controller._closeRequested = true;\n return;\n }\n if (controller._pendingPullIntos.length > 0) {\n const firstPendingPullInto = controller._pendingPullIntos.peek();\n if (firstPendingPullInto.bytesFilled > 0) {\n const e = new TypeError('Insufficient bytes to fill elements in the given buffer');\n ReadableByteStreamControllerError(controller, e);\n throw e;\n }\n }\n ReadableByteStreamControllerClearAlgorithms(controller);\n ReadableStreamClose(stream);\n }\n function ReadableByteStreamControllerEnqueue(controller, chunk) {\n const stream = controller._controlledReadableByteStream;\n if (controller._closeRequested || stream._state !== 'readable') {\n return;\n }\n const buffer = chunk.buffer;\n const byteOffset = chunk.byteOffset;\n const byteLength = chunk.byteLength;\n const transferredBuffer = TransferArrayBuffer(buffer);\n if (controller._pendingPullIntos.length > 0) {\n const firstPendingPullInto = controller._pendingPullIntos.peek();\n if (IsDetachedBuffer(firstPendingPullInto.buffer)) ;\n firstPendingPullInto.buffer = TransferArrayBuffer(firstPendingPullInto.buffer);\n }\n ReadableByteStreamControllerInvalidateBYOBRequest(controller);\n if (ReadableStreamHasDefaultReader(stream)) {\n if (ReadableStreamGetNumReadRequests(stream) === 0) {\n ReadableByteStreamControllerEnqueueChunkToQueue(controller, transferredBuffer, byteOffset, byteLength);\n }\n else {\n if (controller._pendingPullIntos.length > 0) {\n ReadableByteStreamControllerShiftPendingPullInto(controller);\n }\n const transferredView = new Uint8Array(transferredBuffer, byteOffset, byteLength);\n ReadableStreamFulfillReadRequest(stream, transferredView, false);\n }\n }\n else if (ReadableStreamHasBYOBReader(stream)) {\n // TODO: Ideally in this branch detaching should happen only if the buffer is not consumed fully.\n ReadableByteStreamControllerEnqueueChunkToQueue(controller, transferredBuffer, byteOffset, byteLength);\n ReadableByteStreamControllerProcessPullIntoDescriptorsUsingQueue(controller);\n }\n else {\n ReadableByteStreamControllerEnqueueChunkToQueue(controller, transferredBuffer, byteOffset, byteLength);\n }\n ReadableByteStreamControllerCallPullIfNeeded(controller);\n }\n function ReadableByteStreamControllerError(controller, e) {\n const stream = controller._controlledReadableByteStream;\n if (stream._state !== 'readable') {\n return;\n }\n ReadableByteStreamControllerClearPendingPullIntos(controller);\n ResetQueue(controller);\n ReadableByteStreamControllerClearAlgorithms(controller);\n ReadableStreamError(stream, e);\n }\n function ReadableByteStreamControllerGetBYOBRequest(controller) {\n if (controller._byobRequest === null && controller._pendingPullIntos.length > 0) {\n const firstDescriptor = controller._pendingPullIntos.peek();\n const view = new Uint8Array(firstDescriptor.buffer, firstDescriptor.byteOffset + firstDescriptor.bytesFilled, firstDescriptor.byteLength - firstDescriptor.bytesFilled);\n const byobRequest = Object.create(ReadableStreamBYOBRequest.prototype);\n SetUpReadableStreamBYOBRequest(byobRequest, controller, view);\n controller._byobRequest = byobRequest;\n }\n return controller._byobRequest;\n }\n function ReadableByteStreamControllerGetDesiredSize(controller) {\n const state = controller._controlledReadableByteStream._state;\n if (state === 'errored') {\n return null;\n }\n if (state === 'closed') {\n return 0;\n }\n return controller._strategyHWM - controller._queueTotalSize;\n }\n function ReadableByteStreamControllerRespond(controller, bytesWritten) {\n const firstDescriptor = controller._pendingPullIntos.peek();\n const state = controller._controlledReadableByteStream._state;\n if (state === 'closed') {\n if (bytesWritten !== 0) {\n throw new TypeError('bytesWritten must be 0 when calling respond() on a closed stream');\n }\n }\n else {\n if (bytesWritten === 0) {\n throw new TypeError('bytesWritten must be greater than 0 when calling respond() on a readable stream');\n }\n if (firstDescriptor.bytesFilled + bytesWritten > firstDescriptor.byteLength) {\n throw new RangeError('bytesWritten out of range');\n }\n }\n firstDescriptor.buffer = TransferArrayBuffer(firstDescriptor.buffer);\n ReadableByteStreamControllerRespondInternal(controller, bytesWritten);\n }\n function ReadableByteStreamControllerRespondWithNewView(controller, view) {\n const firstDescriptor = controller._pendingPullIntos.peek();\n const state = controller._controlledReadableByteStream._state;\n if (state === 'closed') {\n if (view.byteLength !== 0) {\n throw new TypeError('The view\\'s length must be 0 when calling respondWithNewView() on a closed stream');\n }\n }\n else {\n if (view.byteLength === 0) {\n throw new TypeError('The view\\'s length must be greater than 0 when calling respondWithNewView() on a readable stream');\n }\n }\n if (firstDescriptor.byteOffset + firstDescriptor.bytesFilled !== view.byteOffset) {\n throw new RangeError('The region specified by view does not match byobRequest');\n }\n if (firstDescriptor.bufferByteLength !== view.buffer.byteLength) {\n throw new RangeError('The buffer of view has different capacity than byobRequest');\n }\n if (firstDescriptor.bytesFilled + view.byteLength > firstDescriptor.byteLength) {\n throw new RangeError('The region specified by view is larger than byobRequest');\n }\n const viewByteLength = view.byteLength;\n firstDescriptor.buffer = TransferArrayBuffer(view.buffer);\n ReadableByteStreamControllerRespondInternal(controller, viewByteLength);\n }\n function SetUpReadableByteStreamController(stream, controller, startAlgorithm, pullAlgorithm, cancelAlgorithm, highWaterMark, autoAllocateChunkSize) {\n controller._controlledReadableByteStream = stream;\n controller._pullAgain = false;\n controller._pulling = false;\n controller._byobRequest = null;\n // Need to set the slots so that the assert doesn't fire. In the spec the slots already exist implicitly.\n controller._queue = controller._queueTotalSize = undefined;\n ResetQueue(controller);\n controller._closeRequested = false;\n controller._started = false;\n controller._strategyHWM = highWaterMark;\n controller._pullAlgorithm = pullAlgorithm;\n controller._cancelAlgorithm = cancelAlgorithm;\n controller._autoAllocateChunkSize = autoAllocateChunkSize;\n controller._pendingPullIntos = new SimpleQueue();\n stream._readableStreamController = controller;\n const startResult = startAlgorithm();\n uponPromise(promiseResolvedWith(startResult), () => {\n controller._started = true;\n ReadableByteStreamControllerCallPullIfNeeded(controller);\n }, r => {\n ReadableByteStreamControllerError(controller, r);\n });\n }\n function SetUpReadableByteStreamControllerFromUnderlyingSource(stream, underlyingByteSource, highWaterMark) {\n const controller = Object.create(ReadableByteStreamController.prototype);\n let startAlgorithm = () => undefined;\n let pullAlgorithm = () => promiseResolvedWith(undefined);\n let cancelAlgorithm = () => promiseResolvedWith(undefined);\n if (underlyingByteSource.start !== undefined) {\n startAlgorithm = () => underlyingByteSource.start(controller);\n }\n if (underlyingByteSource.pull !== undefined) {\n pullAlgorithm = () => underlyingByteSource.pull(controller);\n }\n if (underlyingByteSource.cancel !== undefined) {\n cancelAlgorithm = reason => underlyingByteSource.cancel(reason);\n }\n const autoAllocateChunkSize = underlyingByteSource.autoAllocateChunkSize;\n if (autoAllocateChunkSize === 0) {\n throw new TypeError('autoAllocateChunkSize must be greater than 0');\n }\n SetUpReadableByteStreamController(stream, controller, startAlgorithm, pullAlgorithm, cancelAlgorithm, highWaterMark, autoAllocateChunkSize);\n }\n function SetUpReadableStreamBYOBRequest(request, controller, view) {\n request._associatedReadableByteStreamController = controller;\n request._view = view;\n }\n // Helper functions for the ReadableStreamBYOBRequest.\n function byobRequestBrandCheckException(name) {\n return new TypeError(`ReadableStreamBYOBRequest.prototype.${name} can only be used on a ReadableStreamBYOBRequest`);\n }\n // Helper functions for the ReadableByteStreamController.\n function byteStreamControllerBrandCheckException(name) {\n return new TypeError(`ReadableByteStreamController.prototype.${name} can only be used on a ReadableByteStreamController`);\n }\n\n // Abstract operations for the ReadableStream.\n function AcquireReadableStreamBYOBReader(stream) {\n return new ReadableStreamBYOBReader(stream);\n }\n // ReadableStream API exposed for controllers.\n function ReadableStreamAddReadIntoRequest(stream, readIntoRequest) {\n stream._reader._readIntoRequests.push(readIntoRequest);\n }\n function ReadableStreamFulfillReadIntoRequest(stream, chunk, done) {\n const reader = stream._reader;\n const readIntoRequest = reader._readIntoRequests.shift();\n if (done) {\n readIntoRequest._closeSteps(chunk);\n }\n else {\n readIntoRequest._chunkSteps(chunk);\n }\n }\n function ReadableStreamGetNumReadIntoRequests(stream) {\n return stream._reader._readIntoRequests.length;\n }\n function ReadableStreamHasBYOBReader(stream) {\n const reader = stream._reader;\n if (reader === undefined) {\n return false;\n }\n if (!IsReadableStreamBYOBReader(reader)) {\n return false;\n }\n return true;\n }\n /**\n * A BYOB reader vended by a {@link ReadableStream}.\n *\n * @public\n */\n class ReadableStreamBYOBReader {\n constructor(stream) {\n assertRequiredArgument(stream, 1, 'ReadableStreamBYOBReader');\n assertReadableStream(stream, 'First parameter');\n if (IsReadableStreamLocked(stream)) {\n throw new TypeError('This stream has already been locked for exclusive reading by another reader');\n }\n if (!IsReadableByteStreamController(stream._readableStreamController)) {\n throw new TypeError('Cannot construct a ReadableStreamBYOBReader for a stream not constructed with a byte ' +\n 'source');\n }\n ReadableStreamReaderGenericInitialize(this, stream);\n this._readIntoRequests = new SimpleQueue();\n }\n /**\n * Returns a promise that will be fulfilled when the stream becomes closed, or rejected if the stream ever errors or\n * the reader's lock is released before the stream finishes closing.\n */\n get closed() {\n if (!IsReadableStreamBYOBReader(this)) {\n return promiseRejectedWith(byobReaderBrandCheckException('closed'));\n }\n return this._closedPromise;\n }\n /**\n * If the reader is active, behaves the same as {@link ReadableStream.cancel | stream.cancel(reason)}.\n */\n cancel(reason = undefined) {\n if (!IsReadableStreamBYOBReader(this)) {\n return promiseRejectedWith(byobReaderBrandCheckException('cancel'));\n }\n if (this._ownerReadableStream === undefined) {\n return promiseRejectedWith(readerLockException('cancel'));\n }\n return ReadableStreamReaderGenericCancel(this, reason);\n }\n /**\n * Attempts to reads bytes into view, and returns a promise resolved with the result.\n *\n * If reading a chunk causes the queue to become empty, more data will be pulled from the underlying source.\n */\n read(view) {\n if (!IsReadableStreamBYOBReader(this)) {\n return promiseRejectedWith(byobReaderBrandCheckException('read'));\n }\n if (!ArrayBuffer.isView(view)) {\n return promiseRejectedWith(new TypeError('view must be an array buffer view'));\n }\n if (view.byteLength === 0) {\n return promiseRejectedWith(new TypeError('view must have non-zero byteLength'));\n }\n if (view.buffer.byteLength === 0) {\n return promiseRejectedWith(new TypeError(`view's buffer must have non-zero byteLength`));\n }\n if (IsDetachedBuffer(view.buffer)) ;\n if (this._ownerReadableStream === undefined) {\n return promiseRejectedWith(readerLockException('read from'));\n }\n let resolvePromise;\n let rejectPromise;\n const promise = newPromise((resolve, reject) => {\n resolvePromise = resolve;\n rejectPromise = reject;\n });\n const readIntoRequest = {\n _chunkSteps: chunk => resolvePromise({ value: chunk, done: false }),\n _closeSteps: chunk => resolvePromise({ value: chunk, done: true }),\n _errorSteps: e => rejectPromise(e)\n };\n ReadableStreamBYOBReaderRead(this, view, readIntoRequest);\n return promise;\n }\n /**\n * Releases the reader's lock on the corresponding stream. After the lock is released, the reader is no longer active.\n * If the associated stream is errored when the lock is released, the reader will appear errored in the same way\n * from now on; otherwise, the reader will appear closed.\n *\n * A reader's lock cannot be released while it still has a pending read request, i.e., if a promise returned by\n * the reader's {@link ReadableStreamBYOBReader.read | read()} method has not yet been settled. Attempting to\n * do so will throw a `TypeError` and leave the reader locked to the stream.\n */\n releaseLock() {\n if (!IsReadableStreamBYOBReader(this)) {\n throw byobReaderBrandCheckException('releaseLock');\n }\n if (this._ownerReadableStream === undefined) {\n return;\n }\n if (this._readIntoRequests.length > 0) {\n throw new TypeError('Tried to release a reader lock when that reader has pending read() calls un-settled');\n }\n ReadableStreamReaderGenericRelease(this);\n }\n }\n Object.defineProperties(ReadableStreamBYOBReader.prototype, {\n cancel: { enumerable: true },\n read: { enumerable: true },\n releaseLock: { enumerable: true },\n closed: { enumerable: true }\n });\n if (typeof SymbolPolyfill.toStringTag === 'symbol') {\n Object.defineProperty(ReadableStreamBYOBReader.prototype, SymbolPolyfill.toStringTag, {\n value: 'ReadableStreamBYOBReader',\n configurable: true\n });\n }\n // Abstract operations for the readers.\n function IsReadableStreamBYOBReader(x) {\n if (!typeIsObject(x)) {\n return false;\n }\n if (!Object.prototype.hasOwnProperty.call(x, '_readIntoRequests')) {\n return false;\n }\n return x instanceof ReadableStreamBYOBReader;\n }\n function ReadableStreamBYOBReaderRead(reader, view, readIntoRequest) {\n const stream = reader._ownerReadableStream;\n stream._disturbed = true;\n if (stream._state === 'errored') {\n readIntoRequest._errorSteps(stream._storedError);\n }\n else {\n ReadableByteStreamControllerPullInto(stream._readableStreamController, view, readIntoRequest);\n }\n }\n // Helper functions for the ReadableStreamBYOBReader.\n function byobReaderBrandCheckException(name) {\n return new TypeError(`ReadableStreamBYOBReader.prototype.${name} can only be used on a ReadableStreamBYOBReader`);\n }\n\n function ExtractHighWaterMark(strategy, defaultHWM) {\n const { highWaterMark } = strategy;\n if (highWaterMark === undefined) {\n return defaultHWM;\n }\n if (NumberIsNaN(highWaterMark) || highWaterMark < 0) {\n throw new RangeError('Invalid highWaterMark');\n }\n return highWaterMark;\n }\n function ExtractSizeAlgorithm(strategy) {\n const { size } = strategy;\n if (!size) {\n return () => 1;\n }\n return size;\n }\n\n function convertQueuingStrategy(init, context) {\n assertDictionary(init, context);\n const highWaterMark = init === null || init === void 0 ? void 0 : init.highWaterMark;\n const size = init === null || init === void 0 ? void 0 : init.size;\n return {\n highWaterMark: highWaterMark === undefined ? undefined : convertUnrestrictedDouble(highWaterMark),\n size: size === undefined ? undefined : convertQueuingStrategySize(size, `${context} has member 'size' that`)\n };\n }\n function convertQueuingStrategySize(fn, context) {\n assertFunction(fn, context);\n return chunk => convertUnrestrictedDouble(fn(chunk));\n }\n\n function convertUnderlyingSink(original, context) {\n assertDictionary(original, context);\n const abort = original === null || original === void 0 ? void 0 : original.abort;\n const close = original === null || original === void 0 ? void 0 : original.close;\n const start = original === null || original === void 0 ? void 0 : original.start;\n const type = original === null || original === void 0 ? void 0 : original.type;\n const write = original === null || original === void 0 ? void 0 : original.write;\n return {\n abort: abort === undefined ?\n undefined :\n convertUnderlyingSinkAbortCallback(abort, original, `${context} has member 'abort' that`),\n close: close === undefined ?\n undefined :\n convertUnderlyingSinkCloseCallback(close, original, `${context} has member 'close' that`),\n start: start === undefined ?\n undefined :\n convertUnderlyingSinkStartCallback(start, original, `${context} has member 'start' that`),\n write: write === undefined ?\n undefined :\n convertUnderlyingSinkWriteCallback(write, original, `${context} has member 'write' that`),\n type\n };\n }\n function convertUnderlyingSinkAbortCallback(fn, original, context) {\n assertFunction(fn, context);\n return (reason) => promiseCall(fn, original, [reason]);\n }\n function convertUnderlyingSinkCloseCallback(fn, original, context) {\n assertFunction(fn, context);\n return () => promiseCall(fn, original, []);\n }\n function convertUnderlyingSinkStartCallback(fn, original, context) {\n assertFunction(fn, context);\n return (controller) => reflectCall(fn, original, [controller]);\n }\n function convertUnderlyingSinkWriteCallback(fn, original, context) {\n assertFunction(fn, context);\n return (chunk, controller) => promiseCall(fn, original, [chunk, controller]);\n }\n\n function assertWritableStream(x, context) {\n if (!IsWritableStream(x)) {\n throw new TypeError(`${context} is not a WritableStream.`);\n }\n }\n\n function isAbortSignal(value) {\n if (typeof value !== 'object' || value === null) {\n return false;\n }\n try {\n return typeof value.aborted === 'boolean';\n }\n catch (_a) {\n // AbortSignal.prototype.aborted throws if its brand check fails\n return false;\n }\n }\n const supportsAbortController = typeof AbortController === 'function';\n /**\n * Construct a new AbortController, if supported by the platform.\n *\n * @internal\n */\n function createAbortController() {\n if (supportsAbortController) {\n return new AbortController();\n }\n return undefined;\n }\n\n /**\n * A writable stream represents a destination for data, into which you can write.\n *\n * @public\n */\n class WritableStream {\n constructor(rawUnderlyingSink = {}, rawStrategy = {}) {\n if (rawUnderlyingSink === undefined) {\n rawUnderlyingSink = null;\n }\n else {\n assertObject(rawUnderlyingSink, 'First parameter');\n }\n const strategy = convertQueuingStrategy(rawStrategy, 'Second parameter');\n const underlyingSink = convertUnderlyingSink(rawUnderlyingSink, 'First parameter');\n InitializeWritableStream(this);\n const type = underlyingSink.type;\n if (type !== undefined) {\n throw new RangeError('Invalid type is specified');\n }\n const sizeAlgorithm = ExtractSizeAlgorithm(strategy);\n const highWaterMark = ExtractHighWaterMark(strategy, 1);\n SetUpWritableStreamDefaultControllerFromUnderlyingSink(this, underlyingSink, highWaterMark, sizeAlgorithm);\n }\n /**\n * Returns whether or not the writable stream is locked to a writer.\n */\n get locked() {\n if (!IsWritableStream(this)) {\n throw streamBrandCheckException$2('locked');\n }\n return IsWritableStreamLocked(this);\n }\n /**\n * Aborts the stream, signaling that the producer can no longer successfully write to the stream and it is to be\n * immediately moved to an errored state, with any queued-up writes discarded. This will also execute any abort\n * mechanism of the underlying sink.\n *\n * The returned promise will fulfill if the stream shuts down successfully, or reject if the underlying sink signaled\n * that there was an error doing so. Additionally, it will reject with a `TypeError` (without attempting to cancel\n * the stream) if the stream is currently locked.\n */\n abort(reason = undefined) {\n if (!IsWritableStream(this)) {\n return promiseRejectedWith(streamBrandCheckException$2('abort'));\n }\n if (IsWritableStreamLocked(this)) {\n return promiseRejectedWith(new TypeError('Cannot abort a stream that already has a writer'));\n }\n return WritableStreamAbort(this, reason);\n }\n /**\n * Closes the stream. The underlying sink will finish processing any previously-written chunks, before invoking its\n * close behavior. During this time any further attempts to write will fail (without erroring the stream).\n *\n * The method returns a promise that will fulfill if all remaining chunks are successfully written and the stream\n * successfully closes, or rejects if an error is encountered during this process. Additionally, it will reject with\n * a `TypeError` (without attempting to cancel the stream) if the stream is currently locked.\n */\n close() {\n if (!IsWritableStream(this)) {\n return promiseRejectedWith(streamBrandCheckException$2('close'));\n }\n if (IsWritableStreamLocked(this)) {\n return promiseRejectedWith(new TypeError('Cannot close a stream that already has a writer'));\n }\n if (WritableStreamCloseQueuedOrInFlight(this)) {\n return promiseRejectedWith(new TypeError('Cannot close an already-closing stream'));\n }\n return WritableStreamClose(this);\n }\n /**\n * Creates a {@link WritableStreamDefaultWriter | writer} and locks the stream to the new writer. While the stream\n * is locked, no other writer can be acquired until this one is released.\n *\n * This functionality is especially useful for creating abstractions that desire the ability to write to a stream\n * without interruption or interleaving. By getting a writer for the stream, you can ensure nobody else can write at\n * the same time, which would cause the resulting written data to be unpredictable and probably useless.\n */\n getWriter() {\n if (!IsWritableStream(this)) {\n throw streamBrandCheckException$2('getWriter');\n }\n return AcquireWritableStreamDefaultWriter(this);\n }\n }\n Object.defineProperties(WritableStream.prototype, {\n abort: { enumerable: true },\n close: { enumerable: true },\n getWriter: { enumerable: true },\n locked: { enumerable: true }\n });\n if (typeof SymbolPolyfill.toStringTag === 'symbol') {\n Object.defineProperty(WritableStream.prototype, SymbolPolyfill.toStringTag, {\n value: 'WritableStream',\n configurable: true\n });\n }\n // Abstract operations for the WritableStream.\n function AcquireWritableStreamDefaultWriter(stream) {\n return new WritableStreamDefaultWriter(stream);\n }\n // Throws if and only if startAlgorithm throws.\n function CreateWritableStream(startAlgorithm, writeAlgorithm, closeAlgorithm, abortAlgorithm, highWaterMark = 1, sizeAlgorithm = () => 1) {\n const stream = Object.create(WritableStream.prototype);\n InitializeWritableStream(stream);\n const controller = Object.create(WritableStreamDefaultController.prototype);\n SetUpWritableStreamDefaultController(stream, controller, startAlgorithm, writeAlgorithm, closeAlgorithm, abortAlgorithm, highWaterMark, sizeAlgorithm);\n return stream;\n }\n function InitializeWritableStream(stream) {\n stream._state = 'writable';\n // The error that will be reported by new method calls once the state becomes errored. Only set when [[state]] is\n // 'erroring' or 'errored'. May be set to an undefined value.\n stream._storedError = undefined;\n stream._writer = undefined;\n // Initialize to undefined first because the constructor of the controller checks this\n // variable to validate the caller.\n stream._writableStreamController = undefined;\n // This queue is placed here instead of the writer class in order to allow for passing a writer to the next data\n // producer without waiting for the queued writes to finish.\n stream._writeRequests = new SimpleQueue();\n // Write requests are removed from _writeRequests when write() is called on the underlying sink. This prevents\n // them from being erroneously rejected on error. If a write() call is in-flight, the request is stored here.\n stream._inFlightWriteRequest = undefined;\n // The promise that was returned from writer.close(). Stored here because it may be fulfilled after the writer\n // has been detached.\n stream._closeRequest = undefined;\n // Close request is removed from _closeRequest when close() is called on the underlying sink. This prevents it\n // from being erroneously rejected on error. If a close() call is in-flight, the request is stored here.\n stream._inFlightCloseRequest = undefined;\n // The promise that was returned from writer.abort(). This may also be fulfilled after the writer has detached.\n stream._pendingAbortRequest = undefined;\n // The backpressure signal set by the controller.\n stream._backpressure = false;\n }\n function IsWritableStream(x) {\n if (!typeIsObject(x)) {\n return false;\n }\n if (!Object.prototype.hasOwnProperty.call(x, '_writableStreamController')) {\n return false;\n }\n return x instanceof WritableStream;\n }\n function IsWritableStreamLocked(stream) {\n if (stream._writer === undefined) {\n return false;\n }\n return true;\n }\n function WritableStreamAbort(stream, reason) {\n var _a;\n if (stream._state === 'closed' || stream._state === 'errored') {\n return promiseResolvedWith(undefined);\n }\n stream._writableStreamController._abortReason = reason;\n (_a = stream._writableStreamController._abortController) === null || _a === void 0 ? void 0 : _a.abort();\n // TypeScript narrows the type of `stream._state` down to 'writable' | 'erroring',\n // but it doesn't know that signaling abort runs author code that might have changed the state.\n // Widen the type again by casting to WritableStreamState.\n const state = stream._state;\n if (state === 'closed' || state === 'errored') {\n return promiseResolvedWith(undefined);\n }\n if (stream._pendingAbortRequest !== undefined) {\n return stream._pendingAbortRequest._promise;\n }\n let wasAlreadyErroring = false;\n if (state === 'erroring') {\n wasAlreadyErroring = true;\n // reason will not be used, so don't keep a reference to it.\n reason = undefined;\n }\n const promise = newPromise((resolve, reject) => {\n stream._pendingAbortRequest = {\n _promise: undefined,\n _resolve: resolve,\n _reject: reject,\n _reason: reason,\n _wasAlreadyErroring: wasAlreadyErroring\n };\n });\n stream._pendingAbortRequest._promise = promise;\n if (!wasAlreadyErroring) {\n WritableStreamStartErroring(stream, reason);\n }\n return promise;\n }\n function WritableStreamClose(stream) {\n const state = stream._state;\n if (state === 'closed' || state === 'errored') {\n return promiseRejectedWith(new TypeError(`The stream (in ${state} state) is not in the writable state and cannot be closed`));\n }\n const promise = newPromise((resolve, reject) => {\n const closeRequest = {\n _resolve: resolve,\n _reject: reject\n };\n stream._closeRequest = closeRequest;\n });\n const writer = stream._writer;\n if (writer !== undefined && stream._backpressure && state === 'writable') {\n defaultWriterReadyPromiseResolve(writer);\n }\n WritableStreamDefaultControllerClose(stream._writableStreamController);\n return promise;\n }\n // WritableStream API exposed for controllers.\n function WritableStreamAddWriteRequest(stream) {\n const promise = newPromise((resolve, reject) => {\n const writeRequest = {\n _resolve: resolve,\n _reject: reject\n };\n stream._writeRequests.push(writeRequest);\n });\n return promise;\n }\n function WritableStreamDealWithRejection(stream, error) {\n const state = stream._state;\n if (state === 'writable') {\n WritableStreamStartErroring(stream, error);\n return;\n }\n WritableStreamFinishErroring(stream);\n }\n function WritableStreamStartErroring(stream, reason) {\n const controller = stream._writableStreamController;\n stream._state = 'erroring';\n stream._storedError = reason;\n const writer = stream._writer;\n if (writer !== undefined) {\n WritableStreamDefaultWriterEnsureReadyPromiseRejected(writer, reason);\n }\n if (!WritableStreamHasOperationMarkedInFlight(stream) && controller._started) {\n WritableStreamFinishErroring(stream);\n }\n }\n function WritableStreamFinishErroring(stream) {\n stream._state = 'errored';\n stream._writableStreamController[ErrorSteps]();\n const storedError = stream._storedError;\n stream._writeRequests.forEach(writeRequest => {\n writeRequest._reject(storedError);\n });\n stream._writeRequests = new SimpleQueue();\n if (stream._pendingAbortRequest === undefined) {\n WritableStreamRejectCloseAndClosedPromiseIfNeeded(stream);\n return;\n }\n const abortRequest = stream._pendingAbortRequest;\n stream._pendingAbortRequest = undefined;\n if (abortRequest._wasAlreadyErroring) {\n abortRequest._reject(storedError);\n WritableStreamRejectCloseAndClosedPromiseIfNeeded(stream);\n return;\n }\n const promise = stream._writableStreamController[AbortSteps](abortRequest._reason);\n uponPromise(promise, () => {\n abortRequest._resolve();\n WritableStreamRejectCloseAndClosedPromiseIfNeeded(stream);\n }, (reason) => {\n abortRequest._reject(reason);\n WritableStreamRejectCloseAndClosedPromiseIfNeeded(stream);\n });\n }\n function WritableStreamFinishInFlightWrite(stream) {\n stream._inFlightWriteRequest._resolve(undefined);\n stream._inFlightWriteRequest = undefined;\n }\n function WritableStreamFinishInFlightWriteWithError(stream, error) {\n stream._inFlightWriteRequest._reject(error);\n stream._inFlightWriteRequest = undefined;\n WritableStreamDealWithRejection(stream, error);\n }\n function WritableStreamFinishInFlightClose(stream) {\n stream._inFlightCloseRequest._resolve(undefined);\n stream._inFlightCloseRequest = undefined;\n const state = stream._state;\n if (state === 'erroring') {\n // The error was too late to do anything, so it is ignored.\n stream._storedError = undefined;\n if (stream._pendingAbortRequest !== undefined) {\n stream._pendingAbortRequest._resolve();\n stream._pendingAbortRequest = undefined;\n }\n }\n stream._state = 'closed';\n const writer = stream._writer;\n if (writer !== undefined) {\n defaultWriterClosedPromiseResolve(writer);\n }\n }\n function WritableStreamFinishInFlightCloseWithError(stream, error) {\n stream._inFlightCloseRequest._reject(error);\n stream._inFlightCloseRequest = undefined;\n // Never execute sink abort() after sink close().\n if (stream._pendingAbortRequest !== undefined) {\n stream._pendingAbortRequest._reject(error);\n stream._pendingAbortRequest = undefined;\n }\n WritableStreamDealWithRejection(stream, error);\n }\n // TODO(ricea): Fix alphabetical order.\n function WritableStreamCloseQueuedOrInFlight(stream) {\n if (stream._closeRequest === undefined && stream._inFlightCloseRequest === undefined) {\n return false;\n }\n return true;\n }\n function WritableStreamHasOperationMarkedInFlight(stream) {\n if (stream._inFlightWriteRequest === undefined && stream._inFlightCloseRequest === undefined) {\n return false;\n }\n return true;\n }\n function WritableStreamMarkCloseRequestInFlight(stream) {\n stream._inFlightCloseRequest = stream._closeRequest;\n stream._closeRequest = undefined;\n }\n function WritableStreamMarkFirstWriteRequestInFlight(stream) {\n stream._inFlightWriteRequest = stream._writeRequests.shift();\n }\n function WritableStreamRejectCloseAndClosedPromiseIfNeeded(stream) {\n if (stream._closeRequest !== undefined) {\n stream._closeRequest._reject(stream._storedError);\n stream._closeRequest = undefined;\n }\n const writer = stream._writer;\n if (writer !== undefined) {\n defaultWriterClosedPromiseReject(writer, stream._storedError);\n }\n }\n function WritableStreamUpdateBackpressure(stream, backpressure) {\n const writer = stream._writer;\n if (writer !== undefined && backpressure !== stream._backpressure) {\n if (backpressure) {\n defaultWriterReadyPromiseReset(writer);\n }\n else {\n defaultWriterReadyPromiseResolve(writer);\n }\n }\n stream._backpressure = backpressure;\n }\n /**\n * A default writer vended by a {@link WritableStream}.\n *\n * @public\n */\n class WritableStreamDefaultWriter {\n constructor(stream) {\n assertRequiredArgument(stream, 1, 'WritableStreamDefaultWriter');\n assertWritableStream(stream, 'First parameter');\n if (IsWritableStreamLocked(stream)) {\n throw new TypeError('This stream has already been locked for exclusive writing by another writer');\n }\n this._ownerWritableStream = stream;\n stream._writer = this;\n const state = stream._state;\n if (state === 'writable') {\n if (!WritableStreamCloseQueuedOrInFlight(stream) && stream._backpressure) {\n defaultWriterReadyPromiseInitialize(this);\n }\n else {\n defaultWriterReadyPromiseInitializeAsResolved(this);\n }\n defaultWriterClosedPromiseInitialize(this);\n }\n else if (state === 'erroring') {\n defaultWriterReadyPromiseInitializeAsRejected(this, stream._storedError);\n defaultWriterClosedPromiseInitialize(this);\n }\n else if (state === 'closed') {\n defaultWriterReadyPromiseInitializeAsResolved(this);\n defaultWriterClosedPromiseInitializeAsResolved(this);\n }\n else {\n const storedError = stream._storedError;\n defaultWriterReadyPromiseInitializeAsRejected(this, storedError);\n defaultWriterClosedPromiseInitializeAsRejected(this, storedError);\n }\n }\n /**\n * Returns a promise that will be fulfilled when the stream becomes closed, or rejected if the stream ever errors or\n * the writer’s lock is released before the stream finishes closing.\n */\n get closed() {\n if (!IsWritableStreamDefaultWriter(this)) {\n return promiseRejectedWith(defaultWriterBrandCheckException('closed'));\n }\n return this._closedPromise;\n }\n /**\n * Returns the desired size to fill the stream’s internal queue. It can be negative, if the queue is over-full.\n * A producer can use this information to determine the right amount of data to write.\n *\n * It will be `null` if the stream cannot be successfully written to (due to either being errored, or having an abort\n * queued up). It will return zero if the stream is closed. And the getter will throw an exception if invoked when\n * the writer’s lock is released.\n */\n get desiredSize() {\n if (!IsWritableStreamDefaultWriter(this)) {\n throw defaultWriterBrandCheckException('desiredSize');\n }\n if (this._ownerWritableStream === undefined) {\n throw defaultWriterLockException('desiredSize');\n }\n return WritableStreamDefaultWriterGetDesiredSize(this);\n }\n /**\n * Returns a promise that will be fulfilled when the desired size to fill the stream’s internal queue transitions\n * from non-positive to positive, signaling that it is no longer applying backpressure. Once the desired size dips\n * back to zero or below, the getter will return a new promise that stays pending until the next transition.\n *\n * If the stream becomes errored or aborted, or the writer’s lock is released, the returned promise will become\n * rejected.\n */\n get ready() {\n if (!IsWritableStreamDefaultWriter(this)) {\n return promiseRejectedWith(defaultWriterBrandCheckException('ready'));\n }\n return this._readyPromise;\n }\n /**\n * If the reader is active, behaves the same as {@link WritableStream.abort | stream.abort(reason)}.\n */\n abort(reason = undefined) {\n if (!IsWritableStreamDefaultWriter(this)) {\n return promiseRejectedWith(defaultWriterBrandCheckException('abort'));\n }\n if (this._ownerWritableStream === undefined) {\n return promiseRejectedWith(defaultWriterLockException('abort'));\n }\n return WritableStreamDefaultWriterAbort(this, reason);\n }\n /**\n * If the reader is active, behaves the same as {@link WritableStream.close | stream.close()}.\n */\n close() {\n if (!IsWritableStreamDefaultWriter(this)) {\n return promiseRejectedWith(defaultWriterBrandCheckException('close'));\n }\n const stream = this._ownerWritableStream;\n if (stream === undefined) {\n return promiseRejectedWith(defaultWriterLockException('close'));\n }\n if (WritableStreamCloseQueuedOrInFlight(stream)) {\n return promiseRejectedWith(new TypeError('Cannot close an already-closing stream'));\n }\n return WritableStreamDefaultWriterClose(this);\n }\n /**\n * Releases the writer’s lock on the corresponding stream. After the lock is released, the writer is no longer active.\n * If the associated stream is errored when the lock is released, the writer will appear errored in the same way from\n * now on; otherwise, the writer will appear closed.\n *\n * Note that the lock can still be released even if some ongoing writes have not yet finished (i.e. even if the\n * promises returned from previous calls to {@link WritableStreamDefaultWriter.write | write()} have not yet settled).\n * It’s not necessary to hold the lock on the writer for the duration of the write; the lock instead simply prevents\n * other producers from writing in an interleaved manner.\n */\n releaseLock() {\n if (!IsWritableStreamDefaultWriter(this)) {\n throw defaultWriterBrandCheckException('releaseLock');\n }\n const stream = this._ownerWritableStream;\n if (stream === undefined) {\n return;\n }\n WritableStreamDefaultWriterRelease(this);\n }\n write(chunk = undefined) {\n if (!IsWritableStreamDefaultWriter(this)) {\n return promiseRejectedWith(defaultWriterBrandCheckException('write'));\n }\n if (this._ownerWritableStream === undefined) {\n return promiseRejectedWith(defaultWriterLockException('write to'));\n }\n return WritableStreamDefaultWriterWrite(this, chunk);\n }\n }\n Object.defineProperties(WritableStreamDefaultWriter.prototype, {\n abort: { enumerable: true },\n close: { enumerable: true },\n releaseLock: { enumerable: true },\n write: { enumerable: true },\n closed: { enumerable: true },\n desiredSize: { enumerable: true },\n ready: { enumerable: true }\n });\n if (typeof SymbolPolyfill.toStringTag === 'symbol') {\n Object.defineProperty(WritableStreamDefaultWriter.prototype, SymbolPolyfill.toStringTag, {\n value: 'WritableStreamDefaultWriter',\n configurable: true\n });\n }\n // Abstract operations for the WritableStreamDefaultWriter.\n function IsWritableStreamDefaultWriter(x) {\n if (!typeIsObject(x)) {\n return false;\n }\n if (!Object.prototype.hasOwnProperty.call(x, '_ownerWritableStream')) {\n return false;\n }\n return x instanceof WritableStreamDefaultWriter;\n }\n // A client of WritableStreamDefaultWriter may use these functions directly to bypass state check.\n function WritableStreamDefaultWriterAbort(writer, reason) {\n const stream = writer._ownerWritableStream;\n return WritableStreamAbort(stream, reason);\n }\n function WritableStreamDefaultWriterClose(writer) {\n const stream = writer._ownerWritableStream;\n return WritableStreamClose(stream);\n }\n function WritableStreamDefaultWriterCloseWithErrorPropagation(writer) {\n const stream = writer._ownerWritableStream;\n const state = stream._state;\n if (WritableStreamCloseQueuedOrInFlight(stream) || state === 'closed') {\n return promiseResolvedWith(undefined);\n }\n if (state === 'errored') {\n return promiseRejectedWith(stream._storedError);\n }\n return WritableStreamDefaultWriterClose(writer);\n }\n function WritableStreamDefaultWriterEnsureClosedPromiseRejected(writer, error) {\n if (writer._closedPromiseState === 'pending') {\n defaultWriterClosedPromiseReject(writer, error);\n }\n else {\n defaultWriterClosedPromiseResetToRejected(writer, error);\n }\n }\n function WritableStreamDefaultWriterEnsureReadyPromiseRejected(writer, error) {\n if (writer._readyPromiseState === 'pending') {\n defaultWriterReadyPromiseReject(writer, error);\n }\n else {\n defaultWriterReadyPromiseResetToRejected(writer, error);\n }\n }\n function WritableStreamDefaultWriterGetDesiredSize(writer) {\n const stream = writer._ownerWritableStream;\n const state = stream._state;\n if (state === 'errored' || state === 'erroring') {\n return null;\n }\n if (state === 'closed') {\n return 0;\n }\n return WritableStreamDefaultControllerGetDesiredSize(stream._writableStreamController);\n }\n function WritableStreamDefaultWriterRelease(writer) {\n const stream = writer._ownerWritableStream;\n const releasedError = new TypeError(`Writer was released and can no longer be used to monitor the stream's closedness`);\n WritableStreamDefaultWriterEnsureReadyPromiseRejected(writer, releasedError);\n // The state transitions to \"errored\" before the sink abort() method runs, but the writer.closed promise is not\n // rejected until afterwards. This means that simply testing state will not work.\n WritableStreamDefaultWriterEnsureClosedPromiseRejected(writer, releasedError);\n stream._writer = undefined;\n writer._ownerWritableStream = undefined;\n }\n function WritableStreamDefaultWriterWrite(writer, chunk) {\n const stream = writer._ownerWritableStream;\n const controller = stream._writableStreamController;\n const chunkSize = WritableStreamDefaultControllerGetChunkSize(controller, chunk);\n if (stream !== writer._ownerWritableStream) {\n return promiseRejectedWith(defaultWriterLockException('write to'));\n }\n const state = stream._state;\n if (state === 'errored') {\n return promiseRejectedWith(stream._storedError);\n }\n if (WritableStreamCloseQueuedOrInFlight(stream) || state === 'closed') {\n return promiseRejectedWith(new TypeError('The stream is closing or closed and cannot be written to'));\n }\n if (state === 'erroring') {\n return promiseRejectedWith(stream._storedError);\n }\n const promise = WritableStreamAddWriteRequest(stream);\n WritableStreamDefaultControllerWrite(controller, chunk, chunkSize);\n return promise;\n }\n const closeSentinel = {};\n /**\n * Allows control of a {@link WritableStream | writable stream}'s state and internal queue.\n *\n * @public\n */\n class WritableStreamDefaultController {\n constructor() {\n throw new TypeError('Illegal constructor');\n }\n /**\n * The reason which was passed to `WritableStream.abort(reason)` when the stream was aborted.\n *\n * @deprecated\n * This property has been removed from the specification, see https://github.com/whatwg/streams/pull/1177.\n * Use {@link WritableStreamDefaultController.signal}'s `reason` instead.\n */\n get abortReason() {\n if (!IsWritableStreamDefaultController(this)) {\n throw defaultControllerBrandCheckException$2('abortReason');\n }\n return this._abortReason;\n }\n /**\n * An `AbortSignal` that can be used to abort the pending write or close operation when the stream is aborted.\n */\n get signal() {\n if (!IsWritableStreamDefaultController(this)) {\n throw defaultControllerBrandCheckException$2('signal');\n }\n if (this._abortController === undefined) {\n // Older browsers or older Node versions may not support `AbortController` or `AbortSignal`.\n // We don't want to bundle and ship an `AbortController` polyfill together with our polyfill,\n // so instead we only implement support for `signal` if we find a global `AbortController` constructor.\n throw new TypeError('WritableStreamDefaultController.prototype.signal is not supported');\n }\n return this._abortController.signal;\n }\n /**\n * Closes the controlled writable stream, making all future interactions with it fail with the given error `e`.\n *\n * This method is rarely used, since usually it suffices to return a rejected promise from one of the underlying\n * sink's methods. However, it can be useful for suddenly shutting down a stream in response to an event outside the\n * normal lifecycle of interactions with the underlying sink.\n */\n error(e = undefined) {\n if (!IsWritableStreamDefaultController(this)) {\n throw defaultControllerBrandCheckException$2('error');\n }\n const state = this._controlledWritableStream._state;\n if (state !== 'writable') {\n // The stream is closed, errored or will be soon. The sink can't do anything useful if it gets an error here, so\n // just treat it as a no-op.\n return;\n }\n WritableStreamDefaultControllerError(this, e);\n }\n /** @internal */\n [AbortSteps](reason) {\n const result = this._abortAlgorithm(reason);\n WritableStreamDefaultControllerClearAlgorithms(this);\n return result;\n }\n /** @internal */\n [ErrorSteps]() {\n ResetQueue(this);\n }\n }\n Object.defineProperties(WritableStreamDefaultController.prototype, {\n abortReason: { enumerable: true },\n signal: { enumerable: true },\n error: { enumerable: true }\n });\n if (typeof SymbolPolyfill.toStringTag === 'symbol') {\n Object.defineProperty(WritableStreamDefaultController.prototype, SymbolPolyfill.toStringTag, {\n value: 'WritableStreamDefaultController',\n configurable: true\n });\n }\n // Abstract operations implementing interface required by the WritableStream.\n function IsWritableStreamDefaultController(x) {\n if (!typeIsObject(x)) {\n return false;\n }\n if (!Object.prototype.hasOwnProperty.call(x, '_controlledWritableStream')) {\n return false;\n }\n return x instanceof WritableStreamDefaultController;\n }\n function SetUpWritableStreamDefaultController(stream, controller, startAlgorithm, writeAlgorithm, closeAlgorithm, abortAlgorithm, highWaterMark, sizeAlgorithm) {\n controller._controlledWritableStream = stream;\n stream._writableStreamController = controller;\n // Need to set the slots so that the assert doesn't fire. In the spec the slots already exist implicitly.\n controller._queue = undefined;\n controller._queueTotalSize = undefined;\n ResetQueue(controller);\n controller._abortReason = undefined;\n controller._abortController = createAbortController();\n controller._started = false;\n controller._strategySizeAlgorithm = sizeAlgorithm;\n controller._strategyHWM = highWaterMark;\n controller._writeAlgorithm = writeAlgorithm;\n controller._closeAlgorithm = closeAlgorithm;\n controller._abortAlgorithm = abortAlgorithm;\n const backpressure = WritableStreamDefaultControllerGetBackpressure(controller);\n WritableStreamUpdateBackpressure(stream, backpressure);\n const startResult = startAlgorithm();\n const startPromise = promiseResolvedWith(startResult);\n uponPromise(startPromise, () => {\n controller._started = true;\n WritableStreamDefaultControllerAdvanceQueueIfNeeded(controller);\n }, r => {\n controller._started = true;\n WritableStreamDealWithRejection(stream, r);\n });\n }\n function SetUpWritableStreamDefaultControllerFromUnderlyingSink(stream, underlyingSink, highWaterMark, sizeAlgorithm) {\n const controller = Object.create(WritableStreamDefaultController.prototype);\n let startAlgorithm = () => undefined;\n let writeAlgorithm = () => promiseResolvedWith(undefined);\n let closeAlgorithm = () => promiseResolvedWith(undefined);\n let abortAlgorithm = () => promiseResolvedWith(undefined);\n if (underlyingSink.start !== undefined) {\n startAlgorithm = () => underlyingSink.start(controller);\n }\n if (underlyingSink.write !== undefined) {\n writeAlgorithm = chunk => underlyingSink.write(chunk, controller);\n }\n if (underlyingSink.close !== undefined) {\n closeAlgorithm = () => underlyingSink.close();\n }\n if (underlyingSink.abort !== undefined) {\n abortAlgorithm = reason => underlyingSink.abort(reason);\n }\n SetUpWritableStreamDefaultController(stream, controller, startAlgorithm, writeAlgorithm, closeAlgorithm, abortAlgorithm, highWaterMark, sizeAlgorithm);\n }\n // ClearAlgorithms may be called twice. Erroring the same stream in multiple ways will often result in redundant calls.\n function WritableStreamDefaultControllerClearAlgorithms(controller) {\n controller._writeAlgorithm = undefined;\n controller._closeAlgorithm = undefined;\n controller._abortAlgorithm = undefined;\n controller._strategySizeAlgorithm = undefined;\n }\n function WritableStreamDefaultControllerClose(controller) {\n EnqueueValueWithSize(controller, closeSentinel, 0);\n WritableStreamDefaultControllerAdvanceQueueIfNeeded(controller);\n }\n function WritableStreamDefaultControllerGetChunkSize(controller, chunk) {\n try {\n return controller._strategySizeAlgorithm(chunk);\n }\n catch (chunkSizeE) {\n WritableStreamDefaultControllerErrorIfNeeded(controller, chunkSizeE);\n return 1;\n }\n }\n function WritableStreamDefaultControllerGetDesiredSize(controller) {\n return controller._strategyHWM - controller._queueTotalSize;\n }\n function WritableStreamDefaultControllerWrite(controller, chunk, chunkSize) {\n try {\n EnqueueValueWithSize(controller, chunk, chunkSize);\n }\n catch (enqueueE) {\n WritableStreamDefaultControllerErrorIfNeeded(controller, enqueueE);\n return;\n }\n const stream = controller._controlledWritableStream;\n if (!WritableStreamCloseQueuedOrInFlight(stream) && stream._state === 'writable') {\n const backpressure = WritableStreamDefaultControllerGetBackpressure(controller);\n WritableStreamUpdateBackpressure(stream, backpressure);\n }\n WritableStreamDefaultControllerAdvanceQueueIfNeeded(controller);\n }\n // Abstract operations for the WritableStreamDefaultController.\n function WritableStreamDefaultControllerAdvanceQueueIfNeeded(controller) {\n const stream = controller._controlledWritableStream;\n if (!controller._started) {\n return;\n }\n if (stream._inFlightWriteRequest !== undefined) {\n return;\n }\n const state = stream._state;\n if (state === 'erroring') {\n WritableStreamFinishErroring(stream);\n return;\n }\n if (controller._queue.length === 0) {\n return;\n }\n const value = PeekQueueValue(controller);\n if (value === closeSentinel) {\n WritableStreamDefaultControllerProcessClose(controller);\n }\n else {\n WritableStreamDefaultControllerProcessWrite(controller, value);\n }\n }\n function WritableStreamDefaultControllerErrorIfNeeded(controller, error) {\n if (controller._controlledWritableStream._state === 'writable') {\n WritableStreamDefaultControllerError(controller, error);\n }\n }\n function WritableStreamDefaultControllerProcessClose(controller) {\n const stream = controller._controlledWritableStream;\n WritableStreamMarkCloseRequestInFlight(stream);\n DequeueValue(controller);\n const sinkClosePromise = controller._closeAlgorithm();\n WritableStreamDefaultControllerClearAlgorithms(controller);\n uponPromise(sinkClosePromise, () => {\n WritableStreamFinishInFlightClose(stream);\n }, reason => {\n WritableStreamFinishInFlightCloseWithError(stream, reason);\n });\n }\n function WritableStreamDefaultControllerProcessWrite(controller, chunk) {\n const stream = controller._controlledWritableStream;\n WritableStreamMarkFirstWriteRequestInFlight(stream);\n const sinkWritePromise = controller._writeAlgorithm(chunk);\n uponPromise(sinkWritePromise, () => {\n WritableStreamFinishInFlightWrite(stream);\n const state = stream._state;\n DequeueValue(controller);\n if (!WritableStreamCloseQueuedOrInFlight(stream) && state === 'writable') {\n const backpressure = WritableStreamDefaultControllerGetBackpressure(controller);\n WritableStreamUpdateBackpressure(stream, backpressure);\n }\n WritableStreamDefaultControllerAdvanceQueueIfNeeded(controller);\n }, reason => {\n if (stream._state === 'writable') {\n WritableStreamDefaultControllerClearAlgorithms(controller);\n }\n WritableStreamFinishInFlightWriteWithError(stream, reason);\n });\n }\n function WritableStreamDefaultControllerGetBackpressure(controller) {\n const desiredSize = WritableStreamDefaultControllerGetDesiredSize(controller);\n return desiredSize <= 0;\n }\n // A client of WritableStreamDefaultController may use these functions directly to bypass state check.\n function WritableStreamDefaultControllerError(controller, error) {\n const stream = controller._controlledWritableStream;\n WritableStreamDefaultControllerClearAlgorithms(controller);\n WritableStreamStartErroring(stream, error);\n }\n // Helper functions for the WritableStream.\n function streamBrandCheckException$2(name) {\n return new TypeError(`WritableStream.prototype.${name} can only be used on a WritableStream`);\n }\n // Helper functions for the WritableStreamDefaultController.\n function defaultControllerBrandCheckException$2(name) {\n return new TypeError(`WritableStreamDefaultController.prototype.${name} can only be used on a WritableStreamDefaultController`);\n }\n // Helper functions for the WritableStreamDefaultWriter.\n function defaultWriterBrandCheckException(name) {\n return new TypeError(`WritableStreamDefaultWriter.prototype.${name} can only be used on a WritableStreamDefaultWriter`);\n }\n function defaultWriterLockException(name) {\n return new TypeError('Cannot ' + name + ' a stream using a released writer');\n }\n function defaultWriterClosedPromiseInitialize(writer) {\n writer._closedPromise = newPromise((resolve, reject) => {\n writer._closedPromise_resolve = resolve;\n writer._closedPromise_reject = reject;\n writer._closedPromiseState = 'pending';\n });\n }\n function defaultWriterClosedPromiseInitializeAsRejected(writer, reason) {\n defaultWriterClosedPromiseInitialize(writer);\n defaultWriterClosedPromiseReject(writer, reason);\n }\n function defaultWriterClosedPromiseInitializeAsResolved(writer) {\n defaultWriterClosedPromiseInitialize(writer);\n defaultWriterClosedPromiseResolve(writer);\n }\n function defaultWriterClosedPromiseReject(writer, reason) {\n if (writer._closedPromise_reject === undefined) {\n return;\n }\n setPromiseIsHandledToTrue(writer._closedPromise);\n writer._closedPromise_reject(reason);\n writer._closedPromise_resolve = undefined;\n writer._closedPromise_reject = undefined;\n writer._closedPromiseState = 'rejected';\n }\n function defaultWriterClosedPromiseResetToRejected(writer, reason) {\n defaultWriterClosedPromiseInitializeAsRejected(writer, reason);\n }\n function defaultWriterClosedPromiseResolve(writer) {\n if (writer._closedPromise_resolve === undefined) {\n return;\n }\n writer._closedPromise_resolve(undefined);\n writer._closedPromise_resolve = undefined;\n writer._closedPromise_reject = undefined;\n writer._closedPromiseState = 'resolved';\n }\n function defaultWriterReadyPromiseInitialize(writer) {\n writer._readyPromise = newPromise((resolve, reject) => {\n writer._readyPromise_resolve = resolve;\n writer._readyPromise_reject = reject;\n });\n writer._readyPromiseState = 'pending';\n }\n function defaultWriterReadyPromiseInitializeAsRejected(writer, reason) {\n defaultWriterReadyPromiseInitialize(writer);\n defaultWriterReadyPromiseReject(writer, reason);\n }\n function defaultWriterReadyPromiseInitializeAsResolved(writer) {\n defaultWriterReadyPromiseInitialize(writer);\n defaultWriterReadyPromiseResolve(writer);\n }\n function defaultWriterReadyPromiseReject(writer, reason) {\n if (writer._readyPromise_reject === undefined) {\n return;\n }\n setPromiseIsHandledToTrue(writer._readyPromise);\n writer._readyPromise_reject(reason);\n writer._readyPromise_resolve = undefined;\n writer._readyPromise_reject = undefined;\n writer._readyPromiseState = 'rejected';\n }\n function defaultWriterReadyPromiseReset(writer) {\n defaultWriterReadyPromiseInitialize(writer);\n }\n function defaultWriterReadyPromiseResetToRejected(writer, reason) {\n defaultWriterReadyPromiseInitializeAsRejected(writer, reason);\n }\n function defaultWriterReadyPromiseResolve(writer) {\n if (writer._readyPromise_resolve === undefined) {\n return;\n }\n writer._readyPromise_resolve(undefined);\n writer._readyPromise_resolve = undefined;\n writer._readyPromise_reject = undefined;\n writer._readyPromiseState = 'fulfilled';\n }\n\n /// \n const NativeDOMException = typeof DOMException !== 'undefined' ? DOMException : undefined;\n\n /// \n function isDOMExceptionConstructor(ctor) {\n if (!(typeof ctor === 'function' || typeof ctor === 'object')) {\n return false;\n }\n try {\n new ctor();\n return true;\n }\n catch (_a) {\n return false;\n }\n }\n function createDOMExceptionPolyfill() {\n // eslint-disable-next-line no-shadow\n const ctor = function DOMException(message, name) {\n this.message = message || '';\n this.name = name || 'Error';\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n };\n ctor.prototype = Object.create(Error.prototype);\n Object.defineProperty(ctor.prototype, 'constructor', { value: ctor, writable: true, configurable: true });\n return ctor;\n }\n // eslint-disable-next-line no-redeclare\n const DOMException$1 = isDOMExceptionConstructor(NativeDOMException) ? NativeDOMException : createDOMExceptionPolyfill();\n\n function ReadableStreamPipeTo(source, dest, preventClose, preventAbort, preventCancel, signal) {\n const reader = AcquireReadableStreamDefaultReader(source);\n const writer = AcquireWritableStreamDefaultWriter(dest);\n source._disturbed = true;\n let shuttingDown = false;\n // This is used to keep track of the spec's requirement that we wait for ongoing writes during shutdown.\n let currentWrite = promiseResolvedWith(undefined);\n return newPromise((resolve, reject) => {\n let abortAlgorithm;\n if (signal !== undefined) {\n abortAlgorithm = () => {\n const error = new DOMException$1('Aborted', 'AbortError');\n const actions = [];\n if (!preventAbort) {\n actions.push(() => {\n if (dest._state === 'writable') {\n return WritableStreamAbort(dest, error);\n }\n return promiseResolvedWith(undefined);\n });\n }\n if (!preventCancel) {\n actions.push(() => {\n if (source._state === 'readable') {\n return ReadableStreamCancel(source, error);\n }\n return promiseResolvedWith(undefined);\n });\n }\n shutdownWithAction(() => Promise.all(actions.map(action => action())), true, error);\n };\n if (signal.aborted) {\n abortAlgorithm();\n return;\n }\n signal.addEventListener('abort', abortAlgorithm);\n }\n // Using reader and writer, read all chunks from this and write them to dest\n // - Backpressure must be enforced\n // - Shutdown must stop all activity\n function pipeLoop() {\n return newPromise((resolveLoop, rejectLoop) => {\n function next(done) {\n if (done) {\n resolveLoop();\n }\n else {\n // Use `PerformPromiseThen` instead of `uponPromise` to avoid\n // adding unnecessary `.catch(rethrowAssertionErrorRejection)` handlers\n PerformPromiseThen(pipeStep(), next, rejectLoop);\n }\n }\n next(false);\n });\n }\n function pipeStep() {\n if (shuttingDown) {\n return promiseResolvedWith(true);\n }\n return PerformPromiseThen(writer._readyPromise, () => {\n return newPromise((resolveRead, rejectRead) => {\n ReadableStreamDefaultReaderRead(reader, {\n _chunkSteps: chunk => {\n currentWrite = PerformPromiseThen(WritableStreamDefaultWriterWrite(writer, chunk), undefined, noop);\n resolveRead(false);\n },\n _closeSteps: () => resolveRead(true),\n _errorSteps: rejectRead\n });\n });\n });\n }\n // Errors must be propagated forward\n isOrBecomesErrored(source, reader._closedPromise, storedError => {\n if (!preventAbort) {\n shutdownWithAction(() => WritableStreamAbort(dest, storedError), true, storedError);\n }\n else {\n shutdown(true, storedError);\n }\n });\n // Errors must be propagated backward\n isOrBecomesErrored(dest, writer._closedPromise, storedError => {\n if (!preventCancel) {\n shutdownWithAction(() => ReadableStreamCancel(source, storedError), true, storedError);\n }\n else {\n shutdown(true, storedError);\n }\n });\n // Closing must be propagated forward\n isOrBecomesClosed(source, reader._closedPromise, () => {\n if (!preventClose) {\n shutdownWithAction(() => WritableStreamDefaultWriterCloseWithErrorPropagation(writer));\n }\n else {\n shutdown();\n }\n });\n // Closing must be propagated backward\n if (WritableStreamCloseQueuedOrInFlight(dest) || dest._state === 'closed') {\n const destClosed = new TypeError('the destination writable stream closed before all data could be piped to it');\n if (!preventCancel) {\n shutdownWithAction(() => ReadableStreamCancel(source, destClosed), true, destClosed);\n }\n else {\n shutdown(true, destClosed);\n }\n }\n setPromiseIsHandledToTrue(pipeLoop());\n function waitForWritesToFinish() {\n // Another write may have started while we were waiting on this currentWrite, so we have to be sure to wait\n // for that too.\n const oldCurrentWrite = currentWrite;\n return PerformPromiseThen(currentWrite, () => oldCurrentWrite !== currentWrite ? waitForWritesToFinish() : undefined);\n }\n function isOrBecomesErrored(stream, promise, action) {\n if (stream._state === 'errored') {\n action(stream._storedError);\n }\n else {\n uponRejection(promise, action);\n }\n }\n function isOrBecomesClosed(stream, promise, action) {\n if (stream._state === 'closed') {\n action();\n }\n else {\n uponFulfillment(promise, action);\n }\n }\n function shutdownWithAction(action, originalIsError, originalError) {\n if (shuttingDown) {\n return;\n }\n shuttingDown = true;\n if (dest._state === 'writable' && !WritableStreamCloseQueuedOrInFlight(dest)) {\n uponFulfillment(waitForWritesToFinish(), doTheRest);\n }\n else {\n doTheRest();\n }\n function doTheRest() {\n uponPromise(action(), () => finalize(originalIsError, originalError), newError => finalize(true, newError));\n }\n }\n function shutdown(isError, error) {\n if (shuttingDown) {\n return;\n }\n shuttingDown = true;\n if (dest._state === 'writable' && !WritableStreamCloseQueuedOrInFlight(dest)) {\n uponFulfillment(waitForWritesToFinish(), () => finalize(isError, error));\n }\n else {\n finalize(isError, error);\n }\n }\n function finalize(isError, error) {\n WritableStreamDefaultWriterRelease(writer);\n ReadableStreamReaderGenericRelease(reader);\n if (signal !== undefined) {\n signal.removeEventListener('abort', abortAlgorithm);\n }\n if (isError) {\n reject(error);\n }\n else {\n resolve(undefined);\n }\n }\n });\n }\n\n /**\n * Allows control of a {@link ReadableStream | readable stream}'s state and internal queue.\n *\n * @public\n */\n class ReadableStreamDefaultController {\n constructor() {\n throw new TypeError('Illegal constructor');\n }\n /**\n * Returns the desired size to fill the controlled stream's internal queue. It can be negative, if the queue is\n * over-full. An underlying source ought to use this information to determine when and how to apply backpressure.\n */\n get desiredSize() {\n if (!IsReadableStreamDefaultController(this)) {\n throw defaultControllerBrandCheckException$1('desiredSize');\n }\n return ReadableStreamDefaultControllerGetDesiredSize(this);\n }\n /**\n * Closes the controlled readable stream. Consumers will still be able to read any previously-enqueued chunks from\n * the stream, but once those are read, the stream will become closed.\n */\n close() {\n if (!IsReadableStreamDefaultController(this)) {\n throw defaultControllerBrandCheckException$1('close');\n }\n if (!ReadableStreamDefaultControllerCanCloseOrEnqueue(this)) {\n throw new TypeError('The stream is not in a state that permits close');\n }\n ReadableStreamDefaultControllerClose(this);\n }\n enqueue(chunk = undefined) {\n if (!IsReadableStreamDefaultController(this)) {\n throw defaultControllerBrandCheckException$1('enqueue');\n }\n if (!ReadableStreamDefaultControllerCanCloseOrEnqueue(this)) {\n throw new TypeError('The stream is not in a state that permits enqueue');\n }\n return ReadableStreamDefaultControllerEnqueue(this, chunk);\n }\n /**\n * Errors the controlled readable stream, making all future interactions with it fail with the given error `e`.\n */\n error(e = undefined) {\n if (!IsReadableStreamDefaultController(this)) {\n throw defaultControllerBrandCheckException$1('error');\n }\n ReadableStreamDefaultControllerError(this, e);\n }\n /** @internal */\n [CancelSteps](reason) {\n ResetQueue(this);\n const result = this._cancelAlgorithm(reason);\n ReadableStreamDefaultControllerClearAlgorithms(this);\n return result;\n }\n /** @internal */\n [PullSteps](readRequest) {\n const stream = this._controlledReadableStream;\n if (this._queue.length > 0) {\n const chunk = DequeueValue(this);\n if (this._closeRequested && this._queue.length === 0) {\n ReadableStreamDefaultControllerClearAlgorithms(this);\n ReadableStreamClose(stream);\n }\n else {\n ReadableStreamDefaultControllerCallPullIfNeeded(this);\n }\n readRequest._chunkSteps(chunk);\n }\n else {\n ReadableStreamAddReadRequest(stream, readRequest);\n ReadableStreamDefaultControllerCallPullIfNeeded(this);\n }\n }\n }\n Object.defineProperties(ReadableStreamDefaultController.prototype, {\n close: { enumerable: true },\n enqueue: { enumerable: true },\n error: { enumerable: true },\n desiredSize: { enumerable: true }\n });\n if (typeof SymbolPolyfill.toStringTag === 'symbol') {\n Object.defineProperty(ReadableStreamDefaultController.prototype, SymbolPolyfill.toStringTag, {\n value: 'ReadableStreamDefaultController',\n configurable: true\n });\n }\n // Abstract operations for the ReadableStreamDefaultController.\n function IsReadableStreamDefaultController(x) {\n if (!typeIsObject(x)) {\n return false;\n }\n if (!Object.prototype.hasOwnProperty.call(x, '_controlledReadableStream')) {\n return false;\n }\n return x instanceof ReadableStreamDefaultController;\n }\n function ReadableStreamDefaultControllerCallPullIfNeeded(controller) {\n const shouldPull = ReadableStreamDefaultControllerShouldCallPull(controller);\n if (!shouldPull) {\n return;\n }\n if (controller._pulling) {\n controller._pullAgain = true;\n return;\n }\n controller._pulling = true;\n const pullPromise = controller._pullAlgorithm();\n uponPromise(pullPromise, () => {\n controller._pulling = false;\n if (controller._pullAgain) {\n controller._pullAgain = false;\n ReadableStreamDefaultControllerCallPullIfNeeded(controller);\n }\n }, e => {\n ReadableStreamDefaultControllerError(controller, e);\n });\n }\n function ReadableStreamDefaultControllerShouldCallPull(controller) {\n const stream = controller._controlledReadableStream;\n if (!ReadableStreamDefaultControllerCanCloseOrEnqueue(controller)) {\n return false;\n }\n if (!controller._started) {\n return false;\n }\n if (IsReadableStreamLocked(stream) && ReadableStreamGetNumReadRequests(stream) > 0) {\n return true;\n }\n const desiredSize = ReadableStreamDefaultControllerGetDesiredSize(controller);\n if (desiredSize > 0) {\n return true;\n }\n return false;\n }\n function ReadableStreamDefaultControllerClearAlgorithms(controller) {\n controller._pullAlgorithm = undefined;\n controller._cancelAlgorithm = undefined;\n controller._strategySizeAlgorithm = undefined;\n }\n // A client of ReadableStreamDefaultController may use these functions directly to bypass state check.\n function ReadableStreamDefaultControllerClose(controller) {\n if (!ReadableStreamDefaultControllerCanCloseOrEnqueue(controller)) {\n return;\n }\n const stream = controller._controlledReadableStream;\n controller._closeRequested = true;\n if (controller._queue.length === 0) {\n ReadableStreamDefaultControllerClearAlgorithms(controller);\n ReadableStreamClose(stream);\n }\n }\n function ReadableStreamDefaultControllerEnqueue(controller, chunk) {\n if (!ReadableStreamDefaultControllerCanCloseOrEnqueue(controller)) {\n return;\n }\n const stream = controller._controlledReadableStream;\n if (IsReadableStreamLocked(stream) && ReadableStreamGetNumReadRequests(stream) > 0) {\n ReadableStreamFulfillReadRequest(stream, chunk, false);\n }\n else {\n let chunkSize;\n try {\n chunkSize = controller._strategySizeAlgorithm(chunk);\n }\n catch (chunkSizeE) {\n ReadableStreamDefaultControllerError(controller, chunkSizeE);\n throw chunkSizeE;\n }\n try {\n EnqueueValueWithSize(controller, chunk, chunkSize);\n }\n catch (enqueueE) {\n ReadableStreamDefaultControllerError(controller, enqueueE);\n throw enqueueE;\n }\n }\n ReadableStreamDefaultControllerCallPullIfNeeded(controller);\n }\n function ReadableStreamDefaultControllerError(controller, e) {\n const stream = controller._controlledReadableStream;\n if (stream._state !== 'readable') {\n return;\n }\n ResetQueue(controller);\n ReadableStreamDefaultControllerClearAlgorithms(controller);\n ReadableStreamError(stream, e);\n }\n function ReadableStreamDefaultControllerGetDesiredSize(controller) {\n const state = controller._controlledReadableStream._state;\n if (state === 'errored') {\n return null;\n }\n if (state === 'closed') {\n return 0;\n }\n return controller._strategyHWM - controller._queueTotalSize;\n }\n // This is used in the implementation of TransformStream.\n function ReadableStreamDefaultControllerHasBackpressure(controller) {\n if (ReadableStreamDefaultControllerShouldCallPull(controller)) {\n return false;\n }\n return true;\n }\n function ReadableStreamDefaultControllerCanCloseOrEnqueue(controller) {\n const state = controller._controlledReadableStream._state;\n if (!controller._closeRequested && state === 'readable') {\n return true;\n }\n return false;\n }\n function SetUpReadableStreamDefaultController(stream, controller, startAlgorithm, pullAlgorithm, cancelAlgorithm, highWaterMark, sizeAlgorithm) {\n controller._controlledReadableStream = stream;\n controller._queue = undefined;\n controller._queueTotalSize = undefined;\n ResetQueue(controller);\n controller._started = false;\n controller._closeRequested = false;\n controller._pullAgain = false;\n controller._pulling = false;\n controller._strategySizeAlgorithm = sizeAlgorithm;\n controller._strategyHWM = highWaterMark;\n controller._pullAlgorithm = pullAlgorithm;\n controller._cancelAlgorithm = cancelAlgorithm;\n stream._readableStreamController = controller;\n const startResult = startAlgorithm();\n uponPromise(promiseResolvedWith(startResult), () => {\n controller._started = true;\n ReadableStreamDefaultControllerCallPullIfNeeded(controller);\n }, r => {\n ReadableStreamDefaultControllerError(controller, r);\n });\n }\n function SetUpReadableStreamDefaultControllerFromUnderlyingSource(stream, underlyingSource, highWaterMark, sizeAlgorithm) {\n const controller = Object.create(ReadableStreamDefaultController.prototype);\n let startAlgorithm = () => undefined;\n let pullAlgorithm = () => promiseResolvedWith(undefined);\n let cancelAlgorithm = () => promiseResolvedWith(undefined);\n if (underlyingSource.start !== undefined) {\n startAlgorithm = () => underlyingSource.start(controller);\n }\n if (underlyingSource.pull !== undefined) {\n pullAlgorithm = () => underlyingSource.pull(controller);\n }\n if (underlyingSource.cancel !== undefined) {\n cancelAlgorithm = reason => underlyingSource.cancel(reason);\n }\n SetUpReadableStreamDefaultController(stream, controller, startAlgorithm, pullAlgorithm, cancelAlgorithm, highWaterMark, sizeAlgorithm);\n }\n // Helper functions for the ReadableStreamDefaultController.\n function defaultControllerBrandCheckException$1(name) {\n return new TypeError(`ReadableStreamDefaultController.prototype.${name} can only be used on a ReadableStreamDefaultController`);\n }\n\n function ReadableStreamTee(stream, cloneForBranch2) {\n if (IsReadableByteStreamController(stream._readableStreamController)) {\n return ReadableByteStreamTee(stream);\n }\n return ReadableStreamDefaultTee(stream);\n }\n function ReadableStreamDefaultTee(stream, cloneForBranch2) {\n const reader = AcquireReadableStreamDefaultReader(stream);\n let reading = false;\n let readAgain = false;\n let canceled1 = false;\n let canceled2 = false;\n let reason1;\n let reason2;\n let branch1;\n let branch2;\n let resolveCancelPromise;\n const cancelPromise = newPromise(resolve => {\n resolveCancelPromise = resolve;\n });\n function pullAlgorithm() {\n if (reading) {\n readAgain = true;\n return promiseResolvedWith(undefined);\n }\n reading = true;\n const readRequest = {\n _chunkSteps: chunk => {\n // This needs to be delayed a microtask because it takes at least a microtask to detect errors (using\n // reader._closedPromise below), and we want errors in stream to error both branches immediately. We cannot let\n // successful synchronously-available reads get ahead of asynchronously-available errors.\n queueMicrotask(() => {\n readAgain = false;\n const chunk1 = chunk;\n const chunk2 = chunk;\n // There is no way to access the cloning code right now in the reference implementation.\n // If we add one then we'll need an implementation for serializable objects.\n // if (!canceled2 && cloneForBranch2) {\n // chunk2 = StructuredDeserialize(StructuredSerialize(chunk2));\n // }\n if (!canceled1) {\n ReadableStreamDefaultControllerEnqueue(branch1._readableStreamController, chunk1);\n }\n if (!canceled2) {\n ReadableStreamDefaultControllerEnqueue(branch2._readableStreamController, chunk2);\n }\n reading = false;\n if (readAgain) {\n pullAlgorithm();\n }\n });\n },\n _closeSteps: () => {\n reading = false;\n if (!canceled1) {\n ReadableStreamDefaultControllerClose(branch1._readableStreamController);\n }\n if (!canceled2) {\n ReadableStreamDefaultControllerClose(branch2._readableStreamController);\n }\n if (!canceled1 || !canceled2) {\n resolveCancelPromise(undefined);\n }\n },\n _errorSteps: () => {\n reading = false;\n }\n };\n ReadableStreamDefaultReaderRead(reader, readRequest);\n return promiseResolvedWith(undefined);\n }\n function cancel1Algorithm(reason) {\n canceled1 = true;\n reason1 = reason;\n if (canceled2) {\n const compositeReason = CreateArrayFromList([reason1, reason2]);\n const cancelResult = ReadableStreamCancel(stream, compositeReason);\n resolveCancelPromise(cancelResult);\n }\n return cancelPromise;\n }\n function cancel2Algorithm(reason) {\n canceled2 = true;\n reason2 = reason;\n if (canceled1) {\n const compositeReason = CreateArrayFromList([reason1, reason2]);\n const cancelResult = ReadableStreamCancel(stream, compositeReason);\n resolveCancelPromise(cancelResult);\n }\n return cancelPromise;\n }\n function startAlgorithm() {\n // do nothing\n }\n branch1 = CreateReadableStream(startAlgorithm, pullAlgorithm, cancel1Algorithm);\n branch2 = CreateReadableStream(startAlgorithm, pullAlgorithm, cancel2Algorithm);\n uponRejection(reader._closedPromise, (r) => {\n ReadableStreamDefaultControllerError(branch1._readableStreamController, r);\n ReadableStreamDefaultControllerError(branch2._readableStreamController, r);\n if (!canceled1 || !canceled2) {\n resolveCancelPromise(undefined);\n }\n });\n return [branch1, branch2];\n }\n function ReadableByteStreamTee(stream) {\n let reader = AcquireReadableStreamDefaultReader(stream);\n let reading = false;\n let readAgainForBranch1 = false;\n let readAgainForBranch2 = false;\n let canceled1 = false;\n let canceled2 = false;\n let reason1;\n let reason2;\n let branch1;\n let branch2;\n let resolveCancelPromise;\n const cancelPromise = newPromise(resolve => {\n resolveCancelPromise = resolve;\n });\n function forwardReaderError(thisReader) {\n uponRejection(thisReader._closedPromise, r => {\n if (thisReader !== reader) {\n return;\n }\n ReadableByteStreamControllerError(branch1._readableStreamController, r);\n ReadableByteStreamControllerError(branch2._readableStreamController, r);\n if (!canceled1 || !canceled2) {\n resolveCancelPromise(undefined);\n }\n });\n }\n function pullWithDefaultReader() {\n if (IsReadableStreamBYOBReader(reader)) {\n ReadableStreamReaderGenericRelease(reader);\n reader = AcquireReadableStreamDefaultReader(stream);\n forwardReaderError(reader);\n }\n const readRequest = {\n _chunkSteps: chunk => {\n // This needs to be delayed a microtask because it takes at least a microtask to detect errors (using\n // reader._closedPromise below), and we want errors in stream to error both branches immediately. We cannot let\n // successful synchronously-available reads get ahead of asynchronously-available errors.\n queueMicrotask(() => {\n readAgainForBranch1 = false;\n readAgainForBranch2 = false;\n const chunk1 = chunk;\n let chunk2 = chunk;\n if (!canceled1 && !canceled2) {\n try {\n chunk2 = CloneAsUint8Array(chunk);\n }\n catch (cloneE) {\n ReadableByteStreamControllerError(branch1._readableStreamController, cloneE);\n ReadableByteStreamControllerError(branch2._readableStreamController, cloneE);\n resolveCancelPromise(ReadableStreamCancel(stream, cloneE));\n return;\n }\n }\n if (!canceled1) {\n ReadableByteStreamControllerEnqueue(branch1._readableStreamController, chunk1);\n }\n if (!canceled2) {\n ReadableByteStreamControllerEnqueue(branch2._readableStreamController, chunk2);\n }\n reading = false;\n if (readAgainForBranch1) {\n pull1Algorithm();\n }\n else if (readAgainForBranch2) {\n pull2Algorithm();\n }\n });\n },\n _closeSteps: () => {\n reading = false;\n if (!canceled1) {\n ReadableByteStreamControllerClose(branch1._readableStreamController);\n }\n if (!canceled2) {\n ReadableByteStreamControllerClose(branch2._readableStreamController);\n }\n if (branch1._readableStreamController._pendingPullIntos.length > 0) {\n ReadableByteStreamControllerRespond(branch1._readableStreamController, 0);\n }\n if (branch2._readableStreamController._pendingPullIntos.length > 0) {\n ReadableByteStreamControllerRespond(branch2._readableStreamController, 0);\n }\n if (!canceled1 || !canceled2) {\n resolveCancelPromise(undefined);\n }\n },\n _errorSteps: () => {\n reading = false;\n }\n };\n ReadableStreamDefaultReaderRead(reader, readRequest);\n }\n function pullWithBYOBReader(view, forBranch2) {\n if (IsReadableStreamDefaultReader(reader)) {\n ReadableStreamReaderGenericRelease(reader);\n reader = AcquireReadableStreamBYOBReader(stream);\n forwardReaderError(reader);\n }\n const byobBranch = forBranch2 ? branch2 : branch1;\n const otherBranch = forBranch2 ? branch1 : branch2;\n const readIntoRequest = {\n _chunkSteps: chunk => {\n // This needs to be delayed a microtask because it takes at least a microtask to detect errors (using\n // reader._closedPromise below), and we want errors in stream to error both branches immediately. We cannot let\n // successful synchronously-available reads get ahead of asynchronously-available errors.\n queueMicrotask(() => {\n readAgainForBranch1 = false;\n readAgainForBranch2 = false;\n const byobCanceled = forBranch2 ? canceled2 : canceled1;\n const otherCanceled = forBranch2 ? canceled1 : canceled2;\n if (!otherCanceled) {\n let clonedChunk;\n try {\n clonedChunk = CloneAsUint8Array(chunk);\n }\n catch (cloneE) {\n ReadableByteStreamControllerError(byobBranch._readableStreamController, cloneE);\n ReadableByteStreamControllerError(otherBranch._readableStreamController, cloneE);\n resolveCancelPromise(ReadableStreamCancel(stream, cloneE));\n return;\n }\n if (!byobCanceled) {\n ReadableByteStreamControllerRespondWithNewView(byobBranch._readableStreamController, chunk);\n }\n ReadableByteStreamControllerEnqueue(otherBranch._readableStreamController, clonedChunk);\n }\n else if (!byobCanceled) {\n ReadableByteStreamControllerRespondWithNewView(byobBranch._readableStreamController, chunk);\n }\n reading = false;\n if (readAgainForBranch1) {\n pull1Algorithm();\n }\n else if (readAgainForBranch2) {\n pull2Algorithm();\n }\n });\n },\n _closeSteps: chunk => {\n reading = false;\n const byobCanceled = forBranch2 ? canceled2 : canceled1;\n const otherCanceled = forBranch2 ? canceled1 : canceled2;\n if (!byobCanceled) {\n ReadableByteStreamControllerClose(byobBranch._readableStreamController);\n }\n if (!otherCanceled) {\n ReadableByteStreamControllerClose(otherBranch._readableStreamController);\n }\n if (chunk !== undefined) {\n if (!byobCanceled) {\n ReadableByteStreamControllerRespondWithNewView(byobBranch._readableStreamController, chunk);\n }\n if (!otherCanceled && otherBranch._readableStreamController._pendingPullIntos.length > 0) {\n ReadableByteStreamControllerRespond(otherBranch._readableStreamController, 0);\n }\n }\n if (!byobCanceled || !otherCanceled) {\n resolveCancelPromise(undefined);\n }\n },\n _errorSteps: () => {\n reading = false;\n }\n };\n ReadableStreamBYOBReaderRead(reader, view, readIntoRequest);\n }\n function pull1Algorithm() {\n if (reading) {\n readAgainForBranch1 = true;\n return promiseResolvedWith(undefined);\n }\n reading = true;\n const byobRequest = ReadableByteStreamControllerGetBYOBRequest(branch1._readableStreamController);\n if (byobRequest === null) {\n pullWithDefaultReader();\n }\n else {\n pullWithBYOBReader(byobRequest._view, false);\n }\n return promiseResolvedWith(undefined);\n }\n function pull2Algorithm() {\n if (reading) {\n readAgainForBranch2 = true;\n return promiseResolvedWith(undefined);\n }\n reading = true;\n const byobRequest = ReadableByteStreamControllerGetBYOBRequest(branch2._readableStreamController);\n if (byobRequest === null) {\n pullWithDefaultReader();\n }\n else {\n pullWithBYOBReader(byobRequest._view, true);\n }\n return promiseResolvedWith(undefined);\n }\n function cancel1Algorithm(reason) {\n canceled1 = true;\n reason1 = reason;\n if (canceled2) {\n const compositeReason = CreateArrayFromList([reason1, reason2]);\n const cancelResult = ReadableStreamCancel(stream, compositeReason);\n resolveCancelPromise(cancelResult);\n }\n return cancelPromise;\n }\n function cancel2Algorithm(reason) {\n canceled2 = true;\n reason2 = reason;\n if (canceled1) {\n const compositeReason = CreateArrayFromList([reason1, reason2]);\n const cancelResult = ReadableStreamCancel(stream, compositeReason);\n resolveCancelPromise(cancelResult);\n }\n return cancelPromise;\n }\n function startAlgorithm() {\n return;\n }\n branch1 = CreateReadableByteStream(startAlgorithm, pull1Algorithm, cancel1Algorithm);\n branch2 = CreateReadableByteStream(startAlgorithm, pull2Algorithm, cancel2Algorithm);\n forwardReaderError(reader);\n return [branch1, branch2];\n }\n\n function convertUnderlyingDefaultOrByteSource(source, context) {\n assertDictionary(source, context);\n const original = source;\n const autoAllocateChunkSize = original === null || original === void 0 ? void 0 : original.autoAllocateChunkSize;\n const cancel = original === null || original === void 0 ? void 0 : original.cancel;\n const pull = original === null || original === void 0 ? void 0 : original.pull;\n const start = original === null || original === void 0 ? void 0 : original.start;\n const type = original === null || original === void 0 ? void 0 : original.type;\n return {\n autoAllocateChunkSize: autoAllocateChunkSize === undefined ?\n undefined :\n convertUnsignedLongLongWithEnforceRange(autoAllocateChunkSize, `${context} has member 'autoAllocateChunkSize' that`),\n cancel: cancel === undefined ?\n undefined :\n convertUnderlyingSourceCancelCallback(cancel, original, `${context} has member 'cancel' that`),\n pull: pull === undefined ?\n undefined :\n convertUnderlyingSourcePullCallback(pull, original, `${context} has member 'pull' that`),\n start: start === undefined ?\n undefined :\n convertUnderlyingSourceStartCallback(start, original, `${context} has member 'start' that`),\n type: type === undefined ? undefined : convertReadableStreamType(type, `${context} has member 'type' that`)\n };\n }\n function convertUnderlyingSourceCancelCallback(fn, original, context) {\n assertFunction(fn, context);\n return (reason) => promiseCall(fn, original, [reason]);\n }\n function convertUnderlyingSourcePullCallback(fn, original, context) {\n assertFunction(fn, context);\n return (controller) => promiseCall(fn, original, [controller]);\n }\n function convertUnderlyingSourceStartCallback(fn, original, context) {\n assertFunction(fn, context);\n return (controller) => reflectCall(fn, original, [controller]);\n }\n function convertReadableStreamType(type, context) {\n type = `${type}`;\n if (type !== 'bytes') {\n throw new TypeError(`${context} '${type}' is not a valid enumeration value for ReadableStreamType`);\n }\n return type;\n }\n\n function convertReaderOptions(options, context) {\n assertDictionary(options, context);\n const mode = options === null || options === void 0 ? void 0 : options.mode;\n return {\n mode: mode === undefined ? undefined : convertReadableStreamReaderMode(mode, `${context} has member 'mode' that`)\n };\n }\n function convertReadableStreamReaderMode(mode, context) {\n mode = `${mode}`;\n if (mode !== 'byob') {\n throw new TypeError(`${context} '${mode}' is not a valid enumeration value for ReadableStreamReaderMode`);\n }\n return mode;\n }\n\n function convertIteratorOptions(options, context) {\n assertDictionary(options, context);\n const preventCancel = options === null || options === void 0 ? void 0 : options.preventCancel;\n return { preventCancel: Boolean(preventCancel) };\n }\n\n function convertPipeOptions(options, context) {\n assertDictionary(options, context);\n const preventAbort = options === null || options === void 0 ? void 0 : options.preventAbort;\n const preventCancel = options === null || options === void 0 ? void 0 : options.preventCancel;\n const preventClose = options === null || options === void 0 ? void 0 : options.preventClose;\n const signal = options === null || options === void 0 ? void 0 : options.signal;\n if (signal !== undefined) {\n assertAbortSignal(signal, `${context} has member 'signal' that`);\n }\n return {\n preventAbort: Boolean(preventAbort),\n preventCancel: Boolean(preventCancel),\n preventClose: Boolean(preventClose),\n signal\n };\n }\n function assertAbortSignal(signal, context) {\n if (!isAbortSignal(signal)) {\n throw new TypeError(`${context} is not an AbortSignal.`);\n }\n }\n\n function convertReadableWritablePair(pair, context) {\n assertDictionary(pair, context);\n const readable = pair === null || pair === void 0 ? void 0 : pair.readable;\n assertRequiredField(readable, 'readable', 'ReadableWritablePair');\n assertReadableStream(readable, `${context} has member 'readable' that`);\n const writable = pair === null || pair === void 0 ? void 0 : pair.writable;\n assertRequiredField(writable, 'writable', 'ReadableWritablePair');\n assertWritableStream(writable, `${context} has member 'writable' that`);\n return { readable, writable };\n }\n\n /**\n * A readable stream represents a source of data, from which you can read.\n *\n * @public\n */\n class ReadableStream {\n constructor(rawUnderlyingSource = {}, rawStrategy = {}) {\n if (rawUnderlyingSource === undefined) {\n rawUnderlyingSource = null;\n }\n else {\n assertObject(rawUnderlyingSource, 'First parameter');\n }\n const strategy = convertQueuingStrategy(rawStrategy, 'Second parameter');\n const underlyingSource = convertUnderlyingDefaultOrByteSource(rawUnderlyingSource, 'First parameter');\n InitializeReadableStream(this);\n if (underlyingSource.type === 'bytes') {\n if (strategy.size !== undefined) {\n throw new RangeError('The strategy for a byte stream cannot have a size function');\n }\n const highWaterMark = ExtractHighWaterMark(strategy, 0);\n SetUpReadableByteStreamControllerFromUnderlyingSource(this, underlyingSource, highWaterMark);\n }\n else {\n const sizeAlgorithm = ExtractSizeAlgorithm(strategy);\n const highWaterMark = ExtractHighWaterMark(strategy, 1);\n SetUpReadableStreamDefaultControllerFromUnderlyingSource(this, underlyingSource, highWaterMark, sizeAlgorithm);\n }\n }\n /**\n * Whether or not the readable stream is locked to a {@link ReadableStreamDefaultReader | reader}.\n */\n get locked() {\n if (!IsReadableStream(this)) {\n throw streamBrandCheckException$1('locked');\n }\n return IsReadableStreamLocked(this);\n }\n /**\n * Cancels the stream, signaling a loss of interest in the stream by a consumer.\n *\n * The supplied `reason` argument will be given to the underlying source's {@link UnderlyingSource.cancel | cancel()}\n * method, which might or might not use it.\n */\n cancel(reason = undefined) {\n if (!IsReadableStream(this)) {\n return promiseRejectedWith(streamBrandCheckException$1('cancel'));\n }\n if (IsReadableStreamLocked(this)) {\n return promiseRejectedWith(new TypeError('Cannot cancel a stream that already has a reader'));\n }\n return ReadableStreamCancel(this, reason);\n }\n getReader(rawOptions = undefined) {\n if (!IsReadableStream(this)) {\n throw streamBrandCheckException$1('getReader');\n }\n const options = convertReaderOptions(rawOptions, 'First parameter');\n if (options.mode === undefined) {\n return AcquireReadableStreamDefaultReader(this);\n }\n return AcquireReadableStreamBYOBReader(this);\n }\n pipeThrough(rawTransform, rawOptions = {}) {\n if (!IsReadableStream(this)) {\n throw streamBrandCheckException$1('pipeThrough');\n }\n assertRequiredArgument(rawTransform, 1, 'pipeThrough');\n const transform = convertReadableWritablePair(rawTransform, 'First parameter');\n const options = convertPipeOptions(rawOptions, 'Second parameter');\n if (IsReadableStreamLocked(this)) {\n throw new TypeError('ReadableStream.prototype.pipeThrough cannot be used on a locked ReadableStream');\n }\n if (IsWritableStreamLocked(transform.writable)) {\n throw new TypeError('ReadableStream.prototype.pipeThrough cannot be used on a locked WritableStream');\n }\n const promise = ReadableStreamPipeTo(this, transform.writable, options.preventClose, options.preventAbort, options.preventCancel, options.signal);\n setPromiseIsHandledToTrue(promise);\n return transform.readable;\n }\n pipeTo(destination, rawOptions = {}) {\n if (!IsReadableStream(this)) {\n return promiseRejectedWith(streamBrandCheckException$1('pipeTo'));\n }\n if (destination === undefined) {\n return promiseRejectedWith(`Parameter 1 is required in 'pipeTo'.`);\n }\n if (!IsWritableStream(destination)) {\n return promiseRejectedWith(new TypeError(`ReadableStream.prototype.pipeTo's first argument must be a WritableStream`));\n }\n let options;\n try {\n options = convertPipeOptions(rawOptions, 'Second parameter');\n }\n catch (e) {\n return promiseRejectedWith(e);\n }\n if (IsReadableStreamLocked(this)) {\n return promiseRejectedWith(new TypeError('ReadableStream.prototype.pipeTo cannot be used on a locked ReadableStream'));\n }\n if (IsWritableStreamLocked(destination)) {\n return promiseRejectedWith(new TypeError('ReadableStream.prototype.pipeTo cannot be used on a locked WritableStream'));\n }\n return ReadableStreamPipeTo(this, destination, options.preventClose, options.preventAbort, options.preventCancel, options.signal);\n }\n /**\n * Tees this readable stream, returning a two-element array containing the two resulting branches as\n * new {@link ReadableStream} instances.\n *\n * Teeing a stream will lock it, preventing any other consumer from acquiring a reader.\n * To cancel the stream, cancel both of the resulting branches; a composite cancellation reason will then be\n * propagated to the stream's underlying source.\n *\n * Note that the chunks seen in each branch will be the same object. If the chunks are not immutable,\n * this could allow interference between the two branches.\n */\n tee() {\n if (!IsReadableStream(this)) {\n throw streamBrandCheckException$1('tee');\n }\n const branches = ReadableStreamTee(this);\n return CreateArrayFromList(branches);\n }\n values(rawOptions = undefined) {\n if (!IsReadableStream(this)) {\n throw streamBrandCheckException$1('values');\n }\n const options = convertIteratorOptions(rawOptions, 'First parameter');\n return AcquireReadableStreamAsyncIterator(this, options.preventCancel);\n }\n }\n Object.defineProperties(ReadableStream.prototype, {\n cancel: { enumerable: true },\n getReader: { enumerable: true },\n pipeThrough: { enumerable: true },\n pipeTo: { enumerable: true },\n tee: { enumerable: true },\n values: { enumerable: true },\n locked: { enumerable: true }\n });\n if (typeof SymbolPolyfill.toStringTag === 'symbol') {\n Object.defineProperty(ReadableStream.prototype, SymbolPolyfill.toStringTag, {\n value: 'ReadableStream',\n configurable: true\n });\n }\n if (typeof SymbolPolyfill.asyncIterator === 'symbol') {\n Object.defineProperty(ReadableStream.prototype, SymbolPolyfill.asyncIterator, {\n value: ReadableStream.prototype.values,\n writable: true,\n configurable: true\n });\n }\n // Abstract operations for the ReadableStream.\n // Throws if and only if startAlgorithm throws.\n function CreateReadableStream(startAlgorithm, pullAlgorithm, cancelAlgorithm, highWaterMark = 1, sizeAlgorithm = () => 1) {\n const stream = Object.create(ReadableStream.prototype);\n InitializeReadableStream(stream);\n const controller = Object.create(ReadableStreamDefaultController.prototype);\n SetUpReadableStreamDefaultController(stream, controller, startAlgorithm, pullAlgorithm, cancelAlgorithm, highWaterMark, sizeAlgorithm);\n return stream;\n }\n // Throws if and only if startAlgorithm throws.\n function CreateReadableByteStream(startAlgorithm, pullAlgorithm, cancelAlgorithm) {\n const stream = Object.create(ReadableStream.prototype);\n InitializeReadableStream(stream);\n const controller = Object.create(ReadableByteStreamController.prototype);\n SetUpReadableByteStreamController(stream, controller, startAlgorithm, pullAlgorithm, cancelAlgorithm, 0, undefined);\n return stream;\n }\n function InitializeReadableStream(stream) {\n stream._state = 'readable';\n stream._reader = undefined;\n stream._storedError = undefined;\n stream._disturbed = false;\n }\n function IsReadableStream(x) {\n if (!typeIsObject(x)) {\n return false;\n }\n if (!Object.prototype.hasOwnProperty.call(x, '_readableStreamController')) {\n return false;\n }\n return x instanceof ReadableStream;\n }\n function IsReadableStreamLocked(stream) {\n if (stream._reader === undefined) {\n return false;\n }\n return true;\n }\n // ReadableStream API exposed for controllers.\n function ReadableStreamCancel(stream, reason) {\n stream._disturbed = true;\n if (stream._state === 'closed') {\n return promiseResolvedWith(undefined);\n }\n if (stream._state === 'errored') {\n return promiseRejectedWith(stream._storedError);\n }\n ReadableStreamClose(stream);\n const reader = stream._reader;\n if (reader !== undefined && IsReadableStreamBYOBReader(reader)) {\n reader._readIntoRequests.forEach(readIntoRequest => {\n readIntoRequest._closeSteps(undefined);\n });\n reader._readIntoRequests = new SimpleQueue();\n }\n const sourceCancelPromise = stream._readableStreamController[CancelSteps](reason);\n return transformPromiseWith(sourceCancelPromise, noop);\n }\n function ReadableStreamClose(stream) {\n stream._state = 'closed';\n const reader = stream._reader;\n if (reader === undefined) {\n return;\n }\n defaultReaderClosedPromiseResolve(reader);\n if (IsReadableStreamDefaultReader(reader)) {\n reader._readRequests.forEach(readRequest => {\n readRequest._closeSteps();\n });\n reader._readRequests = new SimpleQueue();\n }\n }\n function ReadableStreamError(stream, e) {\n stream._state = 'errored';\n stream._storedError = e;\n const reader = stream._reader;\n if (reader === undefined) {\n return;\n }\n defaultReaderClosedPromiseReject(reader, e);\n if (IsReadableStreamDefaultReader(reader)) {\n reader._readRequests.forEach(readRequest => {\n readRequest._errorSteps(e);\n });\n reader._readRequests = new SimpleQueue();\n }\n else {\n reader._readIntoRequests.forEach(readIntoRequest => {\n readIntoRequest._errorSteps(e);\n });\n reader._readIntoRequests = new SimpleQueue();\n }\n }\n // Helper functions for the ReadableStream.\n function streamBrandCheckException$1(name) {\n return new TypeError(`ReadableStream.prototype.${name} can only be used on a ReadableStream`);\n }\n\n function convertQueuingStrategyInit(init, context) {\n assertDictionary(init, context);\n const highWaterMark = init === null || init === void 0 ? void 0 : init.highWaterMark;\n assertRequiredField(highWaterMark, 'highWaterMark', 'QueuingStrategyInit');\n return {\n highWaterMark: convertUnrestrictedDouble(highWaterMark)\n };\n }\n\n // The size function must not have a prototype property nor be a constructor\n const byteLengthSizeFunction = (chunk) => {\n return chunk.byteLength;\n };\n try {\n Object.defineProperty(byteLengthSizeFunction, 'name', {\n value: 'size',\n configurable: true\n });\n }\n catch (_a) {\n // This property is non-configurable in older browsers, so ignore if this throws.\n // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Function/name#browser_compatibility\n }\n /**\n * A queuing strategy that counts the number of bytes in each chunk.\n *\n * @public\n */\n class ByteLengthQueuingStrategy {\n constructor(options) {\n assertRequiredArgument(options, 1, 'ByteLengthQueuingStrategy');\n options = convertQueuingStrategyInit(options, 'First parameter');\n this._byteLengthQueuingStrategyHighWaterMark = options.highWaterMark;\n }\n /**\n * Returns the high water mark provided to the constructor.\n */\n get highWaterMark() {\n if (!IsByteLengthQueuingStrategy(this)) {\n throw byteLengthBrandCheckException('highWaterMark');\n }\n return this._byteLengthQueuingStrategyHighWaterMark;\n }\n /**\n * Measures the size of `chunk` by returning the value of its `byteLength` property.\n */\n get size() {\n if (!IsByteLengthQueuingStrategy(this)) {\n throw byteLengthBrandCheckException('size');\n }\n return byteLengthSizeFunction;\n }\n }\n Object.defineProperties(ByteLengthQueuingStrategy.prototype, {\n highWaterMark: { enumerable: true },\n size: { enumerable: true }\n });\n if (typeof SymbolPolyfill.toStringTag === 'symbol') {\n Object.defineProperty(ByteLengthQueuingStrategy.prototype, SymbolPolyfill.toStringTag, {\n value: 'ByteLengthQueuingStrategy',\n configurable: true\n });\n }\n // Helper functions for the ByteLengthQueuingStrategy.\n function byteLengthBrandCheckException(name) {\n return new TypeError(`ByteLengthQueuingStrategy.prototype.${name} can only be used on a ByteLengthQueuingStrategy`);\n }\n function IsByteLengthQueuingStrategy(x) {\n if (!typeIsObject(x)) {\n return false;\n }\n if (!Object.prototype.hasOwnProperty.call(x, '_byteLengthQueuingStrategyHighWaterMark')) {\n return false;\n }\n return x instanceof ByteLengthQueuingStrategy;\n }\n\n // The size function must not have a prototype property nor be a constructor\n const countSizeFunction = () => {\n return 1;\n };\n try {\n Object.defineProperty(countSizeFunction, 'name', {\n value: 'size',\n configurable: true\n });\n }\n catch (_a) {\n // This property is non-configurable in older browsers, so ignore if this throws.\n // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Function/name#browser_compatibility\n }\n /**\n * A queuing strategy that counts the number of chunks.\n *\n * @public\n */\n class CountQueuingStrategy {\n constructor(options) {\n assertRequiredArgument(options, 1, 'CountQueuingStrategy');\n options = convertQueuingStrategyInit(options, 'First parameter');\n this._countQueuingStrategyHighWaterMark = options.highWaterMark;\n }\n /**\n * Returns the high water mark provided to the constructor.\n */\n get highWaterMark() {\n if (!IsCountQueuingStrategy(this)) {\n throw countBrandCheckException('highWaterMark');\n }\n return this._countQueuingStrategyHighWaterMark;\n }\n /**\n * Measures the size of `chunk` by always returning 1.\n * This ensures that the total queue size is a count of the number of chunks in the queue.\n */\n get size() {\n if (!IsCountQueuingStrategy(this)) {\n throw countBrandCheckException('size');\n }\n return countSizeFunction;\n }\n }\n Object.defineProperties(CountQueuingStrategy.prototype, {\n highWaterMark: { enumerable: true },\n size: { enumerable: true }\n });\n if (typeof SymbolPolyfill.toStringTag === 'symbol') {\n Object.defineProperty(CountQueuingStrategy.prototype, SymbolPolyfill.toStringTag, {\n value: 'CountQueuingStrategy',\n configurable: true\n });\n }\n // Helper functions for the CountQueuingStrategy.\n function countBrandCheckException(name) {\n return new TypeError(`CountQueuingStrategy.prototype.${name} can only be used on a CountQueuingStrategy`);\n }\n function IsCountQueuingStrategy(x) {\n if (!typeIsObject(x)) {\n return false;\n }\n if (!Object.prototype.hasOwnProperty.call(x, '_countQueuingStrategyHighWaterMark')) {\n return false;\n }\n return x instanceof CountQueuingStrategy;\n }\n\n function convertTransformer(original, context) {\n assertDictionary(original, context);\n const flush = original === null || original === void 0 ? void 0 : original.flush;\n const readableType = original === null || original === void 0 ? void 0 : original.readableType;\n const start = original === null || original === void 0 ? void 0 : original.start;\n const transform = original === null || original === void 0 ? void 0 : original.transform;\n const writableType = original === null || original === void 0 ? void 0 : original.writableType;\n return {\n flush: flush === undefined ?\n undefined :\n convertTransformerFlushCallback(flush, original, `${context} has member 'flush' that`),\n readableType,\n start: start === undefined ?\n undefined :\n convertTransformerStartCallback(start, original, `${context} has member 'start' that`),\n transform: transform === undefined ?\n undefined :\n convertTransformerTransformCallback(transform, original, `${context} has member 'transform' that`),\n writableType\n };\n }\n function convertTransformerFlushCallback(fn, original, context) {\n assertFunction(fn, context);\n return (controller) => promiseCall(fn, original, [controller]);\n }\n function convertTransformerStartCallback(fn, original, context) {\n assertFunction(fn, context);\n return (controller) => reflectCall(fn, original, [controller]);\n }\n function convertTransformerTransformCallback(fn, original, context) {\n assertFunction(fn, context);\n return (chunk, controller) => promiseCall(fn, original, [chunk, controller]);\n }\n\n // Class TransformStream\n /**\n * A transform stream consists of a pair of streams: a {@link WritableStream | writable stream},\n * known as its writable side, and a {@link ReadableStream | readable stream}, known as its readable side.\n * In a manner specific to the transform stream in question, writes to the writable side result in new data being\n * made available for reading from the readable side.\n *\n * @public\n */\n class TransformStream {\n constructor(rawTransformer = {}, rawWritableStrategy = {}, rawReadableStrategy = {}) {\n if (rawTransformer === undefined) {\n rawTransformer = null;\n }\n const writableStrategy = convertQueuingStrategy(rawWritableStrategy, 'Second parameter');\n const readableStrategy = convertQueuingStrategy(rawReadableStrategy, 'Third parameter');\n const transformer = convertTransformer(rawTransformer, 'First parameter');\n if (transformer.readableType !== undefined) {\n throw new RangeError('Invalid readableType specified');\n }\n if (transformer.writableType !== undefined) {\n throw new RangeError('Invalid writableType specified');\n }\n const readableHighWaterMark = ExtractHighWaterMark(readableStrategy, 0);\n const readableSizeAlgorithm = ExtractSizeAlgorithm(readableStrategy);\n const writableHighWaterMark = ExtractHighWaterMark(writableStrategy, 1);\n const writableSizeAlgorithm = ExtractSizeAlgorithm(writableStrategy);\n let startPromise_resolve;\n const startPromise = newPromise(resolve => {\n startPromise_resolve = resolve;\n });\n InitializeTransformStream(this, startPromise, writableHighWaterMark, writableSizeAlgorithm, readableHighWaterMark, readableSizeAlgorithm);\n SetUpTransformStreamDefaultControllerFromTransformer(this, transformer);\n if (transformer.start !== undefined) {\n startPromise_resolve(transformer.start(this._transformStreamController));\n }\n else {\n startPromise_resolve(undefined);\n }\n }\n /**\n * The readable side of the transform stream.\n */\n get readable() {\n if (!IsTransformStream(this)) {\n throw streamBrandCheckException('readable');\n }\n return this._readable;\n }\n /**\n * The writable side of the transform stream.\n */\n get writable() {\n if (!IsTransformStream(this)) {\n throw streamBrandCheckException('writable');\n }\n return this._writable;\n }\n }\n Object.defineProperties(TransformStream.prototype, {\n readable: { enumerable: true },\n writable: { enumerable: true }\n });\n if (typeof SymbolPolyfill.toStringTag === 'symbol') {\n Object.defineProperty(TransformStream.prototype, SymbolPolyfill.toStringTag, {\n value: 'TransformStream',\n configurable: true\n });\n }\n function InitializeTransformStream(stream, startPromise, writableHighWaterMark, writableSizeAlgorithm, readableHighWaterMark, readableSizeAlgorithm) {\n function startAlgorithm() {\n return startPromise;\n }\n function writeAlgorithm(chunk) {\n return TransformStreamDefaultSinkWriteAlgorithm(stream, chunk);\n }\n function abortAlgorithm(reason) {\n return TransformStreamDefaultSinkAbortAlgorithm(stream, reason);\n }\n function closeAlgorithm() {\n return TransformStreamDefaultSinkCloseAlgorithm(stream);\n }\n stream._writable = CreateWritableStream(startAlgorithm, writeAlgorithm, closeAlgorithm, abortAlgorithm, writableHighWaterMark, writableSizeAlgorithm);\n function pullAlgorithm() {\n return TransformStreamDefaultSourcePullAlgorithm(stream);\n }\n function cancelAlgorithm(reason) {\n TransformStreamErrorWritableAndUnblockWrite(stream, reason);\n return promiseResolvedWith(undefined);\n }\n stream._readable = CreateReadableStream(startAlgorithm, pullAlgorithm, cancelAlgorithm, readableHighWaterMark, readableSizeAlgorithm);\n // The [[backpressure]] slot is set to undefined so that it can be initialised by TransformStreamSetBackpressure.\n stream._backpressure = undefined;\n stream._backpressureChangePromise = undefined;\n stream._backpressureChangePromise_resolve = undefined;\n TransformStreamSetBackpressure(stream, true);\n stream._transformStreamController = undefined;\n }\n function IsTransformStream(x) {\n if (!typeIsObject(x)) {\n return false;\n }\n if (!Object.prototype.hasOwnProperty.call(x, '_transformStreamController')) {\n return false;\n }\n return x instanceof TransformStream;\n }\n // This is a no-op if both sides are already errored.\n function TransformStreamError(stream, e) {\n ReadableStreamDefaultControllerError(stream._readable._readableStreamController, e);\n TransformStreamErrorWritableAndUnblockWrite(stream, e);\n }\n function TransformStreamErrorWritableAndUnblockWrite(stream, e) {\n TransformStreamDefaultControllerClearAlgorithms(stream._transformStreamController);\n WritableStreamDefaultControllerErrorIfNeeded(stream._writable._writableStreamController, e);\n if (stream._backpressure) {\n // Pretend that pull() was called to permit any pending write() calls to complete. TransformStreamSetBackpressure()\n // cannot be called from enqueue() or pull() once the ReadableStream is errored, so this will will be the final time\n // _backpressure is set.\n TransformStreamSetBackpressure(stream, false);\n }\n }\n function TransformStreamSetBackpressure(stream, backpressure) {\n // Passes also when called during construction.\n if (stream._backpressureChangePromise !== undefined) {\n stream._backpressureChangePromise_resolve();\n }\n stream._backpressureChangePromise = newPromise(resolve => {\n stream._backpressureChangePromise_resolve = resolve;\n });\n stream._backpressure = backpressure;\n }\n // Class TransformStreamDefaultController\n /**\n * Allows control of the {@link ReadableStream} and {@link WritableStream} of the associated {@link TransformStream}.\n *\n * @public\n */\n class TransformStreamDefaultController {\n constructor() {\n throw new TypeError('Illegal constructor');\n }\n /**\n * Returns the desired size to fill the readable side’s internal queue. It can be negative, if the queue is over-full.\n */\n get desiredSize() {\n if (!IsTransformStreamDefaultController(this)) {\n throw defaultControllerBrandCheckException('desiredSize');\n }\n const readableController = this._controlledTransformStream._readable._readableStreamController;\n return ReadableStreamDefaultControllerGetDesiredSize(readableController);\n }\n enqueue(chunk = undefined) {\n if (!IsTransformStreamDefaultController(this)) {\n throw defaultControllerBrandCheckException('enqueue');\n }\n TransformStreamDefaultControllerEnqueue(this, chunk);\n }\n /**\n * Errors both the readable side and the writable side of the controlled transform stream, making all future\n * interactions with it fail with the given error `e`. Any chunks queued for transformation will be discarded.\n */\n error(reason = undefined) {\n if (!IsTransformStreamDefaultController(this)) {\n throw defaultControllerBrandCheckException('error');\n }\n TransformStreamDefaultControllerError(this, reason);\n }\n /**\n * Closes the readable side and errors the writable side of the controlled transform stream. This is useful when the\n * transformer only needs to consume a portion of the chunks written to the writable side.\n */\n terminate() {\n if (!IsTransformStreamDefaultController(this)) {\n throw defaultControllerBrandCheckException('terminate');\n }\n TransformStreamDefaultControllerTerminate(this);\n }\n }\n Object.defineProperties(TransformStreamDefaultController.prototype, {\n enqueue: { enumerable: true },\n error: { enumerable: true },\n terminate: { enumerable: true },\n desiredSize: { enumerable: true }\n });\n if (typeof SymbolPolyfill.toStringTag === 'symbol') {\n Object.defineProperty(TransformStreamDefaultController.prototype, SymbolPolyfill.toStringTag, {\n value: 'TransformStreamDefaultController',\n configurable: true\n });\n }\n // Transform Stream Default Controller Abstract Operations\n function IsTransformStreamDefaultController(x) {\n if (!typeIsObject(x)) {\n return false;\n }\n if (!Object.prototype.hasOwnProperty.call(x, '_controlledTransformStream')) {\n return false;\n }\n return x instanceof TransformStreamDefaultController;\n }\n function SetUpTransformStreamDefaultController(stream, controller, transformAlgorithm, flushAlgorithm) {\n controller._controlledTransformStream = stream;\n stream._transformStreamController = controller;\n controller._transformAlgorithm = transformAlgorithm;\n controller._flushAlgorithm = flushAlgorithm;\n }\n function SetUpTransformStreamDefaultControllerFromTransformer(stream, transformer) {\n const controller = Object.create(TransformStreamDefaultController.prototype);\n let transformAlgorithm = (chunk) => {\n try {\n TransformStreamDefaultControllerEnqueue(controller, chunk);\n return promiseResolvedWith(undefined);\n }\n catch (transformResultE) {\n return promiseRejectedWith(transformResultE);\n }\n };\n let flushAlgorithm = () => promiseResolvedWith(undefined);\n if (transformer.transform !== undefined) {\n transformAlgorithm = chunk => transformer.transform(chunk, controller);\n }\n if (transformer.flush !== undefined) {\n flushAlgorithm = () => transformer.flush(controller);\n }\n SetUpTransformStreamDefaultController(stream, controller, transformAlgorithm, flushAlgorithm);\n }\n function TransformStreamDefaultControllerClearAlgorithms(controller) {\n controller._transformAlgorithm = undefined;\n controller._flushAlgorithm = undefined;\n }\n function TransformStreamDefaultControllerEnqueue(controller, chunk) {\n const stream = controller._controlledTransformStream;\n const readableController = stream._readable._readableStreamController;\n if (!ReadableStreamDefaultControllerCanCloseOrEnqueue(readableController)) {\n throw new TypeError('Readable side is not in a state that permits enqueue');\n }\n // We throttle transform invocations based on the backpressure of the ReadableStream, but we still\n // accept TransformStreamDefaultControllerEnqueue() calls.\n try {\n ReadableStreamDefaultControllerEnqueue(readableController, chunk);\n }\n catch (e) {\n // This happens when readableStrategy.size() throws.\n TransformStreamErrorWritableAndUnblockWrite(stream, e);\n throw stream._readable._storedError;\n }\n const backpressure = ReadableStreamDefaultControllerHasBackpressure(readableController);\n if (backpressure !== stream._backpressure) {\n TransformStreamSetBackpressure(stream, true);\n }\n }\n function TransformStreamDefaultControllerError(controller, e) {\n TransformStreamError(controller._controlledTransformStream, e);\n }\n function TransformStreamDefaultControllerPerformTransform(controller, chunk) {\n const transformPromise = controller._transformAlgorithm(chunk);\n return transformPromiseWith(transformPromise, undefined, r => {\n TransformStreamError(controller._controlledTransformStream, r);\n throw r;\n });\n }\n function TransformStreamDefaultControllerTerminate(controller) {\n const stream = controller._controlledTransformStream;\n const readableController = stream._readable._readableStreamController;\n ReadableStreamDefaultControllerClose(readableController);\n const error = new TypeError('TransformStream terminated');\n TransformStreamErrorWritableAndUnblockWrite(stream, error);\n }\n // TransformStreamDefaultSink Algorithms\n function TransformStreamDefaultSinkWriteAlgorithm(stream, chunk) {\n const controller = stream._transformStreamController;\n if (stream._backpressure) {\n const backpressureChangePromise = stream._backpressureChangePromise;\n return transformPromiseWith(backpressureChangePromise, () => {\n const writable = stream._writable;\n const state = writable._state;\n if (state === 'erroring') {\n throw writable._storedError;\n }\n return TransformStreamDefaultControllerPerformTransform(controller, chunk);\n });\n }\n return TransformStreamDefaultControllerPerformTransform(controller, chunk);\n }\n function TransformStreamDefaultSinkAbortAlgorithm(stream, reason) {\n // abort() is not called synchronously, so it is possible for abort() to be called when the stream is already\n // errored.\n TransformStreamError(stream, reason);\n return promiseResolvedWith(undefined);\n }\n function TransformStreamDefaultSinkCloseAlgorithm(stream) {\n // stream._readable cannot change after construction, so caching it across a call to user code is safe.\n const readable = stream._readable;\n const controller = stream._transformStreamController;\n const flushPromise = controller._flushAlgorithm();\n TransformStreamDefaultControllerClearAlgorithms(controller);\n // Return a promise that is fulfilled with undefined on success.\n return transformPromiseWith(flushPromise, () => {\n if (readable._state === 'errored') {\n throw readable._storedError;\n }\n ReadableStreamDefaultControllerClose(readable._readableStreamController);\n }, r => {\n TransformStreamError(stream, r);\n throw readable._storedError;\n });\n }\n // TransformStreamDefaultSource Algorithms\n function TransformStreamDefaultSourcePullAlgorithm(stream) {\n // Invariant. Enforced by the promises returned by start() and pull().\n TransformStreamSetBackpressure(stream, false);\n // Prevent the next pull() call until there is backpressure.\n return stream._backpressureChangePromise;\n }\n // Helper functions for the TransformStreamDefaultController.\n function defaultControllerBrandCheckException(name) {\n return new TypeError(`TransformStreamDefaultController.prototype.${name} can only be used on a TransformStreamDefaultController`);\n }\n // Helper functions for the TransformStream.\n function streamBrandCheckException(name) {\n return new TypeError(`TransformStream.prototype.${name} can only be used on a TransformStream`);\n }\n\n exports.ByteLengthQueuingStrategy = ByteLengthQueuingStrategy;\n exports.CountQueuingStrategy = CountQueuingStrategy;\n exports.ReadableByteStreamController = ReadableByteStreamController;\n exports.ReadableStream = ReadableStream;\n exports.ReadableStreamBYOBReader = ReadableStreamBYOBReader;\n exports.ReadableStreamBYOBRequest = ReadableStreamBYOBRequest;\n exports.ReadableStreamDefaultController = ReadableStreamDefaultController;\n exports.ReadableStreamDefaultReader = ReadableStreamDefaultReader;\n exports.TransformStream = TransformStream;\n exports.TransformStreamDefaultController = TransformStreamDefaultController;\n exports.WritableStream = WritableStream;\n exports.WritableStreamDefaultController = WritableStreamDefaultController;\n exports.WritableStreamDefaultWriter = WritableStreamDefaultWriter;\n\n Object.defineProperty(exports, '__esModule', { value: true });\n\n})));\n//# sourceMappingURL=ponyfill.es2018.js.map\n","// Returns a wrapper function that returns a wrapped callback\n// The wrapper function should do some stuff, and return a\n// presumably different callback function.\n// This makes sure that own properties are retained, so that\n// decorations and such are not lost along the way.\nmodule.exports = wrappy\nfunction wrappy (fn, cb) {\n if (fn && cb) return wrappy(fn)(cb)\n\n if (typeof fn !== 'function')\n throw new TypeError('need wrapper function')\n\n Object.keys(fn).forEach(function (k) {\n wrapper[k] = fn[k]\n })\n\n return wrapper\n\n function wrapper() {\n var args = new Array(arguments.length)\n for (var i = 0; i < args.length; i++) {\n args[i] = arguments[i]\n }\n var ret = fn.apply(this, args)\n var cb = args[args.length-1]\n if (typeof ret === 'function' && ret !== cb) {\n Object.keys(cb).forEach(function (k) {\n ret[k] = cb[k]\n })\n }\n return ret\n }\n}\n","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst core = __importStar(require(\"@actions/core\"));\nconst tc = __importStar(require(\"@actions/tool-cache\"));\nconst exec = __importStar(require(\"@actions/exec\"));\nconst io = __importStar(require(\"@actions/io\"));\nconst octokit = __importStar(require(\"@octokit/rest\"));\nconst path = __importStar(require(\"path\"));\nconst node_fetch_1 = __importDefault(require(\"node-fetch\"));\nconst utils_1 = require(\"./utils\");\nfunction run() {\n return __awaiter(this, void 0, void 0, function* () {\n const platform = 'linux';\n const arch = 'x64';\n const versionInput = core.getInput('version');\n try {\n const version = yield resolveVersion(versionInput);\n let cachedPath = tryGetFromCache(arch, version);\n if (cachedPath) {\n core.info(`Found Rye in cache for ${version}`);\n }\n else {\n cachedPath = yield setupRye(platform, arch, version);\n }\n addRyeToPath(cachedPath);\n addMatchers();\n }\n catch (err) {\n core.setFailed(err.message);\n }\n });\n}\nfunction resolveVersion(versionInput) {\n return __awaiter(this, void 0, void 0, function* () {\n const availableVersion = yield getAvailableVersions();\n if (!availableVersion.includes(versionInput)) {\n if (versionInput === 'latest') {\n core.info(`Latest version is ${availableVersion[0]}`);\n return availableVersion[0];\n }\n else {\n throw new Error(`Version ${versionInput} is not available. Available version are: ${availableVersion}`);\n }\n }\n return versionInput;\n });\n}\nfunction getAvailableVersions() {\n return __awaiter(this, void 0, void 0, function* () {\n const githubClient = new octokit.Octokit({\n userAgent: 'setup-rye',\n request: { fetch: node_fetch_1.default }\n });\n const response = yield githubClient.rest.repos.listReleases({\n owner: utils_1.OWNER,\n repo: utils_1.REPO\n });\n const releases = response.data.map(release => release.tag_name);\n return releases;\n });\n}\nfunction tryGetFromCache(arch, version) {\n core.debug(`Trying to get Rye from cache for ${version}...`);\n const cachedVersions = tc.findAllVersions('rye', arch);\n core.debug(`Cached versions: ${cachedVersions}`);\n return tc.find('rye', version, arch);\n}\nfunction setupRye(platform, arch, version) {\n return __awaiter(this, void 0, void 0, function* () {\n const downloadPath = yield downloadVersion(platform, arch, version);\n const cachedPath = yield installRye(downloadPath, arch, version);\n return cachedPath;\n });\n}\nfunction downloadVersion(platform, arch, version) {\n return __awaiter(this, void 0, void 0, function* () {\n const binary = `rye-x86_64-${platform}`;\n const downloadUrl = `https://github.com/mitsuhiko/rye/releases/download/${version}/${binary}.gz`;\n core.info(`Downloading Rye from \"${downloadUrl}\" ...`);\n try {\n const downloadPath = yield tc.downloadTool(downloadUrl);\n yield extract(downloadPath);\n return downloadPath;\n }\n catch (err) {\n if (err instanceof Error) {\n // Rate limit?\n if (err instanceof tc.HTTPError &&\n (err.httpStatusCode === 403 || err.httpStatusCode === 429)) {\n core.info(`Received HTTP status code ${err.httpStatusCode}. This usually indicates the rate limit has been exceeded`);\n }\n else {\n core.info(err.message);\n }\n if (err.stack !== undefined) {\n core.debug(err.stack);\n }\n }\n throw err;\n }\n });\n}\nfunction extract(downloadPath) {\n return __awaiter(this, void 0, void 0, function* () {\n core.info('Extracting downloaded archive...');\n const pathForGunzip = `${downloadPath}.gz`;\n yield io.mv(downloadPath, pathForGunzip);\n yield exec.exec('gunzip', [pathForGunzip]);\n yield exec.exec('chmod', ['+x', downloadPath]);\n });\n}\nfunction installRye(downloadPath, arch, version) {\n return __awaiter(this, void 0, void 0, function* () {\n const tempDir = path.join(process.env['RUNNER_TEMP'] || '', 'rye_temp_home');\n yield io.mkdirP(tempDir);\n core.debug(`Created temporary directory ${tempDir}`);\n const options = {\n cwd: tempDir,\n env: Object.assign(Object.assign({}, process.env), { RYE_HOME: tempDir })\n };\n core.info(`Installing Rye into ${tempDir}`);\n yield exec.exec(downloadPath, ['self', 'install', '--yes'], options);\n const cachedPath = yield tc.cacheDir(tempDir, 'rye', version, arch);\n core.info(`Moved Rye into ${cachedPath}`);\n return cachedPath;\n });\n}\nfunction addRyeToPath(cachedPath) {\n core.addPath(`${cachedPath}/shims`);\n core.info(`Added ${cachedPath}/shims to the path`);\n}\nfunction addMatchers() {\n const matchersPath = path.join(__dirname, '..', '.github');\n core.info(`##[add-matcher]${path.join(matchersPath, 'python.json')}`);\n}\nrun();\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.OWNER = exports.REPO = exports.WINDOWS_PLATFORMS = exports.WINDOWS_ARCHS = exports.IS_MAC = exports.IS_LINUX = exports.IS_WINDOWS = void 0;\nexports.IS_WINDOWS = process.platform === 'win32';\nexports.IS_LINUX = process.platform === 'linux';\nexports.IS_MAC = process.platform === 'darwin';\nexports.WINDOWS_ARCHS = ['x86', 'x64'];\nexports.WINDOWS_PLATFORMS = ['win32', 'win64'];\nexports.REPO = 'rye';\nexports.OWNER = 'mitsuhiko';\n","module.exports = require(\"assert\");","module.exports = require(\"buffer\");","module.exports = require(\"child_process\");","module.exports = require(\"crypto\");","module.exports = require(\"events\");","module.exports = require(\"fs\");","module.exports = require(\"http\");","module.exports = require(\"https\");","module.exports = require(\"net\");","module.exports = require(\"node:process\");","module.exports = require(\"node:stream/web\");","module.exports = require(\"os\");","module.exports = require(\"path\");","module.exports = require(\"stream\");","module.exports = require(\"string_decoder\");","module.exports = require(\"timers\");","module.exports = require(\"tls\");","module.exports = require(\"util\");","module.exports = require(\"worker_threads\");","/* c8 ignore start */\n// 64 KiB (same size chrome slice theirs blob into Uint8array's)\nconst POOL_SIZE = 65536\n\nif (!globalThis.ReadableStream) {\n // `node:stream/web` got introduced in v16.5.0 as experimental\n // and it's preferred over the polyfilled version. So we also\n // suppress the warning that gets emitted by NodeJS for using it.\n try {\n const process = require('node:process')\n const { emitWarning } = process\n try {\n process.emitWarning = () => {}\n Object.assign(globalThis, require('node:stream/web'))\n process.emitWarning = emitWarning\n } catch (error) {\n process.emitWarning = emitWarning\n throw error\n }\n } catch (error) {\n // fallback to polyfill implementation\n Object.assign(globalThis, require('web-streams-polyfill/dist/ponyfill.es2018.js'))\n }\n}\n\ntry {\n // Don't use node: prefix for this, require+node: is not supported until node v14.14\n // Only `import()` can use prefix in 12.20 and later\n const { Blob } = require('buffer')\n if (Blob && !Blob.prototype.stream) {\n Blob.prototype.stream = function name (params) {\n let position = 0\n const blob = this\n\n return new ReadableStream({\n type: 'bytes',\n async pull (ctrl) {\n const chunk = blob.slice(position, Math.min(blob.size, position + POOL_SIZE))\n const buffer = await chunk.arrayBuffer()\n position += buffer.byteLength\n ctrl.enqueue(new Uint8Array(buffer))\n\n if (position === blob.size) {\n ctrl.close()\n }\n }\n })\n }\n }\n} catch (error) {}\n/* c8 ignore end */\n","import Blob from './index.js'\n\nconst _File = class File extends Blob {\n #lastModified = 0\n #name = ''\n\n /**\n * @param {*[]} fileBits\n * @param {string} fileName\n * @param {{lastModified?: number, type?: string}} options\n */// @ts-ignore\n constructor (fileBits, fileName, options = {}) {\n if (arguments.length < 2) {\n throw new TypeError(`Failed to construct 'File': 2 arguments required, but only ${arguments.length} present.`)\n }\n super(fileBits, options)\n\n if (options === null) options = {}\n\n // Simulate WebIDL type casting for NaN value in lastModified option.\n const lastModified = options.lastModified === undefined ? Date.now() : Number(options.lastModified)\n if (!Number.isNaN(lastModified)) {\n this.#lastModified = lastModified\n }\n\n this.#name = String(fileName)\n }\n\n get name () {\n return this.#name\n }\n\n get lastModified () {\n return this.#lastModified\n }\n\n get [Symbol.toStringTag] () {\n return 'File'\n }\n\n static [Symbol.hasInstance] (object) {\n return !!object && object instanceof Blob &&\n /^(File)$/.test(object[Symbol.toStringTag])\n }\n}\n\n/** @type {typeof globalThis.File} */// @ts-ignore\nexport const File = _File\nexport default File\n","const __WEBPACK_NAMESPACE_OBJECT__ = require(\"node:fs\");","const __WEBPACK_NAMESPACE_OBJECT__ = require(\"node:path\");","import { statSync, createReadStream, promises as fs } from 'node:fs'\nimport { basename } from 'node:path'\nimport DOMException from 'node-domexception'\n\nimport File from './file.js'\nimport Blob from './index.js'\n\nconst { stat } = fs\n\n/**\n * @param {string} path filepath on the disk\n * @param {string} [type] mimetype to use\n */\nconst blobFromSync = (path, type) => fromBlob(statSync(path), path, type)\n\n/**\n * @param {string} path filepath on the disk\n * @param {string} [type] mimetype to use\n * @returns {Promise}\n */\nconst blobFrom = (path, type) => stat(path).then(stat => fromBlob(stat, path, type))\n\n/**\n * @param {string} path filepath on the disk\n * @param {string} [type] mimetype to use\n * @returns {Promise}\n */\nconst fileFrom = (path, type) => stat(path).then(stat => fromFile(stat, path, type))\n\n/**\n * @param {string} path filepath on the disk\n * @param {string} [type] mimetype to use\n */\nconst fileFromSync = (path, type) => fromFile(statSync(path), path, type)\n\n// @ts-ignore\nconst fromBlob = (stat, path, type = '') => new Blob([new BlobDataItem({\n path,\n size: stat.size,\n lastModified: stat.mtimeMs,\n start: 0\n})], { type })\n\n// @ts-ignore\nconst fromFile = (stat, path, type = '') => new File([new BlobDataItem({\n path,\n size: stat.size,\n lastModified: stat.mtimeMs,\n start: 0\n})], basename(path), { type, lastModified: stat.mtimeMs })\n\n/**\n * This is a blob backed up by a file on the disk\n * with minium requirement. Its wrapped around a Blob as a blobPart\n * so you have no direct access to this.\n *\n * @private\n */\nclass BlobDataItem {\n #path\n #start\n\n constructor (options) {\n this.#path = options.path\n this.#start = options.start\n this.size = options.size\n this.lastModified = options.lastModified\n }\n\n /**\n * Slicing arguments is first validated and formatted\n * to not be out of range by Blob.prototype.slice\n */\n slice (start, end) {\n return new BlobDataItem({\n path: this.#path,\n lastModified: this.lastModified,\n size: end - start,\n start: this.#start + start\n })\n }\n\n async * stream () {\n const { mtimeMs } = await stat(this.#path)\n if (mtimeMs > this.lastModified) {\n throw new DOMException('The requested file could not be read, typically due to permission problems that have occurred after a reference to a file was acquired.', 'NotReadableError')\n }\n yield * createReadStream(this.#path, {\n start: this.#start,\n end: this.#start + this.size - 1\n })\n }\n\n get [Symbol.toStringTag] () {\n return 'Blob'\n }\n}\n\nexport default blobFromSync\nexport { File, Blob, blobFrom, blobFromSync, fileFrom, fileFromSync }\n","/*! fetch-blob. MIT License. Jimmy Wärting */\n\n// TODO (jimmywarting): in the feature use conditional loading with top level await (requires 14.x)\n// Node has recently added whatwg stream into core\n\nimport './streams.cjs'\n\n// 64 KiB (same size chrome slice theirs blob into Uint8array's)\nconst POOL_SIZE = 65536\n\n/** @param {(Blob | Uint8Array)[]} parts */\nasync function * toIterator (parts, clone = true) {\n for (const part of parts) {\n if ('stream' in part) {\n yield * (/** @type {AsyncIterableIterator} */ (part.stream()))\n } else if (ArrayBuffer.isView(part)) {\n if (clone) {\n let position = part.byteOffset\n const end = part.byteOffset + part.byteLength\n while (position !== end) {\n const size = Math.min(end - position, POOL_SIZE)\n const chunk = part.buffer.slice(position, position + size)\n position += chunk.byteLength\n yield new Uint8Array(chunk)\n }\n } else {\n yield part\n }\n /* c8 ignore next 10 */\n } else {\n // For blobs that have arrayBuffer but no stream method (nodes buffer.Blob)\n let position = 0, b = (/** @type {Blob} */ (part))\n while (position !== b.size) {\n const chunk = b.slice(position, Math.min(b.size, position + POOL_SIZE))\n const buffer = await chunk.arrayBuffer()\n position += buffer.byteLength\n yield new Uint8Array(buffer)\n }\n }\n }\n}\n\nconst _Blob = class Blob {\n /** @type {Array.<(Blob|Uint8Array)>} */\n #parts = []\n #type = ''\n #size = 0\n #endings = 'transparent'\n\n /**\n * The Blob() constructor returns a new Blob object. The content\n * of the blob consists of the concatenation of the values given\n * in the parameter array.\n *\n * @param {*} blobParts\n * @param {{ type?: string, endings?: string }} [options]\n */\n constructor (blobParts = [], options = {}) {\n if (typeof blobParts !== 'object' || blobParts === null) {\n throw new TypeError('Failed to construct \\'Blob\\': The provided value cannot be converted to a sequence.')\n }\n\n if (typeof blobParts[Symbol.iterator] !== 'function') {\n throw new TypeError('Failed to construct \\'Blob\\': The object must have a callable @@iterator property.')\n }\n\n if (typeof options !== 'object' && typeof options !== 'function') {\n throw new TypeError('Failed to construct \\'Blob\\': parameter 2 cannot convert to dictionary.')\n }\n\n if (options === null) options = {}\n\n const encoder = new TextEncoder()\n for (const element of blobParts) {\n let part\n if (ArrayBuffer.isView(element)) {\n part = new Uint8Array(element.buffer.slice(element.byteOffset, element.byteOffset + element.byteLength))\n } else if (element instanceof ArrayBuffer) {\n part = new Uint8Array(element.slice(0))\n } else if (element instanceof Blob) {\n part = element\n } else {\n part = encoder.encode(`${element}`)\n }\n\n this.#size += ArrayBuffer.isView(part) ? part.byteLength : part.size\n this.#parts.push(part)\n }\n\n this.#endings = `${options.endings === undefined ? 'transparent' : options.endings}`\n const type = options.type === undefined ? '' : String(options.type)\n this.#type = /^[\\x20-\\x7E]*$/.test(type) ? type : ''\n }\n\n /**\n * The Blob interface's size property returns the\n * size of the Blob in bytes.\n */\n get size () {\n return this.#size\n }\n\n /**\n * The type property of a Blob object returns the MIME type of the file.\n */\n get type () {\n return this.#type\n }\n\n /**\n * The text() method in the Blob interface returns a Promise\n * that resolves with a string containing the contents of\n * the blob, interpreted as UTF-8.\n *\n * @return {Promise}\n */\n async text () {\n // More optimized than using this.arrayBuffer()\n // that requires twice as much ram\n const decoder = new TextDecoder()\n let str = ''\n for await (const part of toIterator(this.#parts, false)) {\n str += decoder.decode(part, { stream: true })\n }\n // Remaining\n str += decoder.decode()\n return str\n }\n\n /**\n * The arrayBuffer() method in the Blob interface returns a\n * Promise that resolves with the contents of the blob as\n * binary data contained in an ArrayBuffer.\n *\n * @return {Promise}\n */\n async arrayBuffer () {\n // Easier way... Just a unnecessary overhead\n // const view = new Uint8Array(this.size);\n // await this.stream().getReader({mode: 'byob'}).read(view);\n // return view.buffer;\n\n const data = new Uint8Array(this.size)\n let offset = 0\n for await (const chunk of toIterator(this.#parts, false)) {\n data.set(chunk, offset)\n offset += chunk.length\n }\n\n return data.buffer\n }\n\n stream () {\n const it = toIterator(this.#parts, true)\n\n return new globalThis.ReadableStream({\n // @ts-ignore\n type: 'bytes',\n async pull (ctrl) {\n const chunk = await it.next()\n chunk.done ? ctrl.close() : ctrl.enqueue(chunk.value)\n },\n\n async cancel () {\n await it.return()\n }\n })\n }\n\n /**\n * The Blob interface's slice() method creates and returns a\n * new Blob object which contains data from a subset of the\n * blob on which it's called.\n *\n * @param {number} [start]\n * @param {number} [end]\n * @param {string} [type]\n */\n slice (start = 0, end = this.size, type = '') {\n const { size } = this\n\n let relativeStart = start < 0 ? Math.max(size + start, 0) : Math.min(start, size)\n let relativeEnd = end < 0 ? Math.max(size + end, 0) : Math.min(end, size)\n\n const span = Math.max(relativeEnd - relativeStart, 0)\n const parts = this.#parts\n const blobParts = []\n let added = 0\n\n for (const part of parts) {\n // don't add the overflow to new blobParts\n if (added >= span) {\n break\n }\n\n const size = ArrayBuffer.isView(part) ? part.byteLength : part.size\n if (relativeStart && size <= relativeStart) {\n // Skip the beginning and change the relative\n // start & end position as we skip the unwanted parts\n relativeStart -= size\n relativeEnd -= size\n } else {\n let chunk\n if (ArrayBuffer.isView(part)) {\n chunk = part.subarray(relativeStart, Math.min(size, relativeEnd))\n added += chunk.byteLength\n } else {\n chunk = part.slice(relativeStart, Math.min(size, relativeEnd))\n added += chunk.size\n }\n relativeEnd -= size\n blobParts.push(chunk)\n relativeStart = 0 // All next sequential parts should start at 0\n }\n }\n\n const blob = new Blob([], { type: String(type).toLowerCase() })\n blob.#size = span\n blob.#parts = blobParts\n\n return blob\n }\n\n get [Symbol.toStringTag] () {\n return 'Blob'\n }\n\n static [Symbol.hasInstance] (object) {\n return (\n object &&\n typeof object === 'object' &&\n typeof object.constructor === 'function' &&\n (\n typeof object.stream === 'function' ||\n typeof object.arrayBuffer === 'function'\n ) &&\n /^(Blob|File)$/.test(object[Symbol.toStringTag])\n )\n }\n}\n\nObject.defineProperties(_Blob.prototype, {\n size: { enumerable: true },\n type: { enumerable: true },\n slice: { enumerable: true }\n})\n\n/** @type {typeof globalThis.Blob} */\nexport const Blob = _Blob\nexport default Blob\n","/*! formdata-polyfill. MIT License. Jimmy Wärting */\n\nimport C from 'fetch-blob'\nimport F from 'fetch-blob/file.js'\n\nvar {toStringTag:t,iterator:i,hasInstance:h}=Symbol,\nr=Math.random,\nm='append,set,get,getAll,delete,keys,values,entries,forEach,constructor'.split(','),\nf=(a,b,c)=>(a+='',/^(Blob|File)$/.test(b && b[t])?[(c=c!==void 0?c+'':b[t]=='File'?b.name:'blob',a),b.name!==c||b[t]=='blob'?new F([b],c,b):b]:[a,b+'']),\ne=(c,f)=>(f?c:c.replace(/\\r?\\n|\\r/g,'\\r\\n')).replace(/\\n/g,'%0A').replace(/\\r/g,'%0D').replace(/\"/g,'%22'),\nx=(n, a, e)=>{if(a.lengthtypeof o[m]!='function')}\nappend(...a){x('append',arguments,2);this.#d.push(f(...a))}\ndelete(a){x('delete',arguments,1);a+='';this.#d=this.#d.filter(([b])=>b!==a)}\nget(a){x('get',arguments,1);a+='';for(var b=this.#d,l=b.length,c=0;cc[0]===a&&b.push(c[1]));return b}\nhas(a){x('has',arguments,1);a+='';return this.#d.some(b=>b[0]===a)}\nforEach(a,b){x('forEach',arguments,1);for(var [c,d]of this)a.call(b,d,c,this)}\nset(...a){x('set',arguments,2);var b=[],c=!0;a=f(...a);this.#d.forEach(d=>{d[0]===a[0]?c&&(c=!b.push(a)):b.push(d)});c&&b.push(a);this.#d=b}\n*entries(){yield*this.#d}\n*keys(){for(var[a]of this)yield a}\n*values(){for(var[,a]of this)yield a}}\n\n/** @param {FormData} F */\nexport function formDataToBlob (F,B=C){\nvar b=`${r()}${r()}`.replace(/\\./g, '').slice(-28).padStart(32, '-'),c=[],p=`--${b}\\r\\nContent-Disposition: form-data; name=\"`\nF.forEach((v,n)=>typeof v=='string'\n?c.push(p+e(n)+`\"\\r\\n\\r\\n${v.replace(/\\r(?!\\n)|(? {\n\treturn (\n\t\ttypeof object === 'object' &&\n\t\ttypeof object.append === 'function' &&\n\t\ttypeof object.delete === 'function' &&\n\t\ttypeof object.get === 'function' &&\n\t\ttypeof object.getAll === 'function' &&\n\t\ttypeof object.has === 'function' &&\n\t\ttypeof object.set === 'function' &&\n\t\ttypeof object.sort === 'function' &&\n\t\tobject[NAME] === 'URLSearchParams'\n\t);\n};\n\n/**\n * Check if `object` is a W3C `Blob` object (which `File` inherits from)\n * @param {*} object - Object to check for\n * @return {boolean}\n */\nexport const isBlob = object => {\n\treturn (\n\t\tobject &&\n\t\ttypeof object === 'object' &&\n\t\ttypeof object.arrayBuffer === 'function' &&\n\t\ttypeof object.type === 'string' &&\n\t\ttypeof object.stream === 'function' &&\n\t\ttypeof object.constructor === 'function' &&\n\t\t/^(Blob|File)$/.test(object[NAME])\n\t);\n};\n\n/**\n * Check if `obj` is an instance of AbortSignal.\n * @param {*} object - Object to check for\n * @return {boolean}\n */\nexport const isAbortSignal = object => {\n\treturn (\n\t\ttypeof object === 'object' && (\n\t\t\tobject[NAME] === 'AbortSignal' ||\n\t\t\tobject[NAME] === 'EventTarget'\n\t\t)\n\t);\n};\n\n/**\n * isDomainOrSubdomain reports whether sub is a subdomain (or exact match) of\n * the parent domain.\n *\n * Both domains must already be in canonical form.\n * @param {string|URL} original\n * @param {string|URL} destination\n */\nexport const isDomainOrSubdomain = (destination, original) => {\n\tconst orig = new URL(original).hostname;\n\tconst dest = new URL(destination).hostname;\n\n\treturn orig === dest || orig.endsWith(`.${dest}`);\n};\n\n/**\n * isSameProtocol reports whether the two provided URLs use the same protocol.\n *\n * Both domains must already be in canonical form.\n * @param {string|URL} original\n * @param {string|URL} destination\n */\nexport const isSameProtocol = (destination, original) => {\n\tconst orig = new URL(original).protocol;\n\tconst dest = new URL(destination).protocol;\n\n\treturn orig === dest;\n};\n","\n/**\n * Body.js\n *\n * Body interface provides common methods for Request and Response\n */\n\nimport Stream, {PassThrough} from 'node:stream';\nimport {types, deprecate, promisify} from 'node:util';\nimport {Buffer} from 'node:buffer';\n\nimport Blob from 'fetch-blob';\nimport {FormData, formDataToBlob} from 'formdata-polyfill/esm.min.js';\n\nimport {FetchError} from './errors/fetch-error.js';\nimport {FetchBaseError} from './errors/base.js';\nimport {isBlob, isURLSearchParameters} from './utils/is.js';\n\nconst pipeline = promisify(Stream.pipeline);\nconst INTERNALS = Symbol('Body internals');\n\n/**\n * Body mixin\n *\n * Ref: https://fetch.spec.whatwg.org/#body\n *\n * @param Stream body Readable stream\n * @param Object opts Response options\n * @return Void\n */\nexport default class Body {\n\tconstructor(body, {\n\t\tsize = 0\n\t} = {}) {\n\t\tlet boundary = null;\n\n\t\tif (body === null) {\n\t\t\t// Body is undefined or null\n\t\t\tbody = null;\n\t\t} else if (isURLSearchParameters(body)) {\n\t\t\t// Body is a URLSearchParams\n\t\t\tbody = Buffer.from(body.toString());\n\t\t} else if (isBlob(body)) {\n\t\t\t// Body is blob\n\t\t} else if (Buffer.isBuffer(body)) {\n\t\t\t// Body is Buffer\n\t\t} else if (types.isAnyArrayBuffer(body)) {\n\t\t\t// Body is ArrayBuffer\n\t\t\tbody = Buffer.from(body);\n\t\t} else if (ArrayBuffer.isView(body)) {\n\t\t\t// Body is ArrayBufferView\n\t\t\tbody = Buffer.from(body.buffer, body.byteOffset, body.byteLength);\n\t\t} else if (body instanceof Stream) {\n\t\t\t// Body is stream\n\t\t} else if (body instanceof FormData) {\n\t\t\t// Body is FormData\n\t\t\tbody = formDataToBlob(body);\n\t\t\tboundary = body.type.split('=')[1];\n\t\t} else {\n\t\t\t// None of the above\n\t\t\t// coerce to string then buffer\n\t\t\tbody = Buffer.from(String(body));\n\t\t}\n\n\t\tlet stream = body;\n\n\t\tif (Buffer.isBuffer(body)) {\n\t\t\tstream = Stream.Readable.from(body);\n\t\t} else if (isBlob(body)) {\n\t\t\tstream = Stream.Readable.from(body.stream());\n\t\t}\n\n\t\tthis[INTERNALS] = {\n\t\t\tbody,\n\t\t\tstream,\n\t\t\tboundary,\n\t\t\tdisturbed: false,\n\t\t\terror: null\n\t\t};\n\t\tthis.size = size;\n\n\t\tif (body instanceof Stream) {\n\t\t\tbody.on('error', error_ => {\n\t\t\t\tconst error = error_ instanceof FetchBaseError ?\n\t\t\t\t\terror_ :\n\t\t\t\t\tnew FetchError(`Invalid response body while trying to fetch ${this.url}: ${error_.message}`, 'system', error_);\n\t\t\t\tthis[INTERNALS].error = error;\n\t\t\t});\n\t\t}\n\t}\n\n\tget body() {\n\t\treturn this[INTERNALS].stream;\n\t}\n\n\tget bodyUsed() {\n\t\treturn this[INTERNALS].disturbed;\n\t}\n\n\t/**\n\t * Decode response as ArrayBuffer\n\t *\n\t * @return Promise\n\t */\n\tasync arrayBuffer() {\n\t\tconst {buffer, byteOffset, byteLength} = await consumeBody(this);\n\t\treturn buffer.slice(byteOffset, byteOffset + byteLength);\n\t}\n\n\tasync formData() {\n\t\tconst ct = this.headers.get('content-type');\n\n\t\tif (ct.startsWith('application/x-www-form-urlencoded')) {\n\t\t\tconst formData = new FormData();\n\t\t\tconst parameters = new URLSearchParams(await this.text());\n\n\t\t\tfor (const [name, value] of parameters) {\n\t\t\t\tformData.append(name, value);\n\t\t\t}\n\n\t\t\treturn formData;\n\t\t}\n\n\t\tconst {toFormData} = await import('./utils/multipart-parser.js');\n\t\treturn toFormData(this.body, ct);\n\t}\n\n\t/**\n\t * Return raw response as Blob\n\t *\n\t * @return Promise\n\t */\n\tasync blob() {\n\t\tconst ct = (this.headers && this.headers.get('content-type')) || (this[INTERNALS].body && this[INTERNALS].body.type) || '';\n\t\tconst buf = await this.arrayBuffer();\n\n\t\treturn new Blob([buf], {\n\t\t\ttype: ct\n\t\t});\n\t}\n\n\t/**\n\t * Decode response as json\n\t *\n\t * @return Promise\n\t */\n\tasync json() {\n\t\tconst text = await this.text();\n\t\treturn JSON.parse(text);\n\t}\n\n\t/**\n\t * Decode response as text\n\t *\n\t * @return Promise\n\t */\n\tasync text() {\n\t\tconst buffer = await consumeBody(this);\n\t\treturn new TextDecoder().decode(buffer);\n\t}\n\n\t/**\n\t * Decode response as buffer (non-spec api)\n\t *\n\t * @return Promise\n\t */\n\tbuffer() {\n\t\treturn consumeBody(this);\n\t}\n}\n\nBody.prototype.buffer = deprecate(Body.prototype.buffer, 'Please use \\'response.arrayBuffer()\\' instead of \\'response.buffer()\\'', 'node-fetch#buffer');\n\n// In browsers, all properties are enumerable.\nObject.defineProperties(Body.prototype, {\n\tbody: {enumerable: true},\n\tbodyUsed: {enumerable: true},\n\tarrayBuffer: {enumerable: true},\n\tblob: {enumerable: true},\n\tjson: {enumerable: true},\n\ttext: {enumerable: true},\n\tdata: {get: deprecate(() => {},\n\t\t'data doesn\\'t exist, use json(), text(), arrayBuffer(), or body instead',\n\t\t'https://github.com/node-fetch/node-fetch/issues/1000 (response)')}\n});\n\n/**\n * Consume and convert an entire Body to a Buffer.\n *\n * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body\n *\n * @return Promise\n */\nasync function consumeBody(data) {\n\tif (data[INTERNALS].disturbed) {\n\t\tthrow new TypeError(`body used already for: ${data.url}`);\n\t}\n\n\tdata[INTERNALS].disturbed = true;\n\n\tif (data[INTERNALS].error) {\n\t\tthrow data[INTERNALS].error;\n\t}\n\n\tconst {body} = data;\n\n\t// Body is null\n\tif (body === null) {\n\t\treturn Buffer.alloc(0);\n\t}\n\n\t/* c8 ignore next 3 */\n\tif (!(body instanceof Stream)) {\n\t\treturn Buffer.alloc(0);\n\t}\n\n\t// Body is stream\n\t// get ready to actually consume the body\n\tconst accum = [];\n\tlet accumBytes = 0;\n\n\ttry {\n\t\tfor await (const chunk of body) {\n\t\t\tif (data.size > 0 && accumBytes + chunk.length > data.size) {\n\t\t\t\tconst error = new FetchError(`content size at ${data.url} over limit: ${data.size}`, 'max-size');\n\t\t\t\tbody.destroy(error);\n\t\t\t\tthrow error;\n\t\t\t}\n\n\t\t\taccumBytes += chunk.length;\n\t\t\taccum.push(chunk);\n\t\t}\n\t} catch (error) {\n\t\tconst error_ = error instanceof FetchBaseError ? error : new FetchError(`Invalid response body while trying to fetch ${data.url}: ${error.message}`, 'system', error);\n\t\tthrow error_;\n\t}\n\n\tif (body.readableEnded === true || body._readableState.ended === true) {\n\t\ttry {\n\t\t\tif (accum.every(c => typeof c === 'string')) {\n\t\t\t\treturn Buffer.from(accum.join(''));\n\t\t\t}\n\n\t\t\treturn Buffer.concat(accum, accumBytes);\n\t\t} catch (error) {\n\t\t\tthrow new FetchError(`Could not create Buffer from response body for ${data.url}: ${error.message}`, 'system', error);\n\t\t}\n\t} else {\n\t\tthrow new FetchError(`Premature close of server response while trying to fetch ${data.url}`);\n\t}\n}\n\n/**\n * Clone body given Res/Req instance\n *\n * @param Mixed instance Response or Request instance\n * @param String highWaterMark highWaterMark for both PassThrough body streams\n * @return Mixed\n */\nexport const clone = (instance, highWaterMark) => {\n\tlet p1;\n\tlet p2;\n\tlet {body} = instance[INTERNALS];\n\n\t// Don't allow cloning a used body\n\tif (instance.bodyUsed) {\n\t\tthrow new Error('cannot clone body after it is used');\n\t}\n\n\t// Check that body is a stream and not form-data object\n\t// note: we can't clone the form-data object without having it as a dependency\n\tif ((body instanceof Stream) && (typeof body.getBoundary !== 'function')) {\n\t\t// Tee instance body\n\t\tp1 = new PassThrough({highWaterMark});\n\t\tp2 = new PassThrough({highWaterMark});\n\t\tbody.pipe(p1);\n\t\tbody.pipe(p2);\n\t\t// Set instance body to teed body and return the other teed body\n\t\tinstance[INTERNALS].stream = p1;\n\t\tbody = p2;\n\t}\n\n\treturn body;\n};\n\nconst getNonSpecFormDataBoundary = deprecate(\n\tbody => body.getBoundary(),\n\t'form-data doesn\\'t follow the spec and requires special treatment. Use alternative package',\n\t'https://github.com/node-fetch/node-fetch/issues/1167'\n);\n\n/**\n * Performs the operation \"extract a `Content-Type` value from |object|\" as\n * specified in the specification:\n * https://fetch.spec.whatwg.org/#concept-bodyinit-extract\n *\n * This function assumes that instance.body is present.\n *\n * @param {any} body Any options.body input\n * @returns {string | null}\n */\nexport const extractContentType = (body, request) => {\n\t// Body is null or undefined\n\tif (body === null) {\n\t\treturn null;\n\t}\n\n\t// Body is string\n\tif (typeof body === 'string') {\n\t\treturn 'text/plain;charset=UTF-8';\n\t}\n\n\t// Body is a URLSearchParams\n\tif (isURLSearchParameters(body)) {\n\t\treturn 'application/x-www-form-urlencoded;charset=UTF-8';\n\t}\n\n\t// Body is blob\n\tif (isBlob(body)) {\n\t\treturn body.type || null;\n\t}\n\n\t// Body is a Buffer (Buffer, ArrayBuffer or ArrayBufferView)\n\tif (Buffer.isBuffer(body) || types.isAnyArrayBuffer(body) || ArrayBuffer.isView(body)) {\n\t\treturn null;\n\t}\n\n\tif (body instanceof FormData) {\n\t\treturn `multipart/form-data; boundary=${request[INTERNALS].boundary}`;\n\t}\n\n\t// Detect form data input from form-data module\n\tif (body && typeof body.getBoundary === 'function') {\n\t\treturn `multipart/form-data;boundary=${getNonSpecFormDataBoundary(body)}`;\n\t}\n\n\t// Body is stream - can't really do much about this\n\tif (body instanceof Stream) {\n\t\treturn null;\n\t}\n\n\t// Body constructor defaults other things to string\n\treturn 'text/plain;charset=UTF-8';\n};\n\n/**\n * The Fetch Standard treats this as if \"total bytes\" is a property on the body.\n * For us, we have to explicitly get it with a function.\n *\n * ref: https://fetch.spec.whatwg.org/#concept-body-total-bytes\n *\n * @param {any} obj.body Body object from the Body instance.\n * @returns {number | null}\n */\nexport const getTotalBytes = request => {\n\tconst {body} = request[INTERNALS];\n\n\t// Body is null or undefined\n\tif (body === null) {\n\t\treturn 0;\n\t}\n\n\t// Body is Blob\n\tif (isBlob(body)) {\n\t\treturn body.size;\n\t}\n\n\t// Body is Buffer\n\tif (Buffer.isBuffer(body)) {\n\t\treturn body.length;\n\t}\n\n\t// Detect form data input from form-data module\n\tif (body && typeof body.getLengthSync === 'function') {\n\t\treturn body.hasKnownLength && body.hasKnownLength() ? body.getLengthSync() : null;\n\t}\n\n\t// Body is stream\n\treturn null;\n};\n\n/**\n * Write a Body to a Node.js WritableStream (e.g. http.Request) object.\n *\n * @param {Stream.Writable} dest The stream to write to.\n * @param obj.body Body object from the Body instance.\n * @returns {Promise}\n */\nexport const writeToStream = async (dest, {body}) => {\n\tif (body === null) {\n\t\t// Body is null\n\t\tdest.end();\n\t} else {\n\t\t// Body is stream\n\t\tawait pipeline(body, dest);\n\t}\n};\n","/**\n * Headers.js\n *\n * Headers class offers convenient helpers\n */\n\nimport {types} from 'node:util';\nimport http from 'node:http';\n\n/* c8 ignore next 9 */\nconst validateHeaderName = typeof http.validateHeaderName === 'function' ?\n\thttp.validateHeaderName :\n\tname => {\n\t\tif (!/^[\\^`\\-\\w!#$%&'*+.|~]+$/.test(name)) {\n\t\t\tconst error = new TypeError(`Header name must be a valid HTTP token [${name}]`);\n\t\t\tObject.defineProperty(error, 'code', {value: 'ERR_INVALID_HTTP_TOKEN'});\n\t\t\tthrow error;\n\t\t}\n\t};\n\n/* c8 ignore next 9 */\nconst validateHeaderValue = typeof http.validateHeaderValue === 'function' ?\n\thttp.validateHeaderValue :\n\t(name, value) => {\n\t\tif (/[^\\t\\u0020-\\u007E\\u0080-\\u00FF]/.test(value)) {\n\t\t\tconst error = new TypeError(`Invalid character in header content [\"${name}\"]`);\n\t\t\tObject.defineProperty(error, 'code', {value: 'ERR_INVALID_CHAR'});\n\t\t\tthrow error;\n\t\t}\n\t};\n\n/**\n * @typedef {Headers | Record | Iterable | Iterable>} HeadersInit\n */\n\n/**\n * This Fetch API interface allows you to perform various actions on HTTP request and response headers.\n * These actions include retrieving, setting, adding to, and removing.\n * A Headers object has an associated header list, which is initially empty and consists of zero or more name and value pairs.\n * You can add to this using methods like append() (see Examples.)\n * In all methods of this interface, header names are matched by case-insensitive byte sequence.\n *\n */\nexport default class Headers extends URLSearchParams {\n\t/**\n\t * Headers class\n\t *\n\t * @constructor\n\t * @param {HeadersInit} [init] - Response headers\n\t */\n\tconstructor(init) {\n\t\t// Validate and normalize init object in [name, value(s)][]\n\t\t/** @type {string[][]} */\n\t\tlet result = [];\n\t\tif (init instanceof Headers) {\n\t\t\tconst raw = init.raw();\n\t\t\tfor (const [name, values] of Object.entries(raw)) {\n\t\t\t\tresult.push(...values.map(value => [name, value]));\n\t\t\t}\n\t\t} else if (init == null) { // eslint-disable-line no-eq-null, eqeqeq\n\t\t\t// No op\n\t\t} else if (typeof init === 'object' && !types.isBoxedPrimitive(init)) {\n\t\t\tconst method = init[Symbol.iterator];\n\t\t\t// eslint-disable-next-line no-eq-null, eqeqeq\n\t\t\tif (method == null) {\n\t\t\t\t// Record\n\t\t\t\tresult.push(...Object.entries(init));\n\t\t\t} else {\n\t\t\t\tif (typeof method !== 'function') {\n\t\t\t\t\tthrow new TypeError('Header pairs must be iterable');\n\t\t\t\t}\n\n\t\t\t\t// Sequence>\n\t\t\t\t// Note: per spec we have to first exhaust the lists then process them\n\t\t\t\tresult = [...init]\n\t\t\t\t\t.map(pair => {\n\t\t\t\t\t\tif (\n\t\t\t\t\t\t\ttypeof pair !== 'object' || types.isBoxedPrimitive(pair)\n\t\t\t\t\t\t) {\n\t\t\t\t\t\t\tthrow new TypeError('Each header pair must be an iterable object');\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\treturn [...pair];\n\t\t\t\t\t}).map(pair => {\n\t\t\t\t\t\tif (pair.length !== 2) {\n\t\t\t\t\t\t\tthrow new TypeError('Each header pair must be a name/value tuple');\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\treturn [...pair];\n\t\t\t\t\t});\n\t\t\t}\n\t\t} else {\n\t\t\tthrow new TypeError('Failed to construct \\'Headers\\': The provided value is not of type \\'(sequence> or record)');\n\t\t}\n\n\t\t// Validate and lowercase\n\t\tresult =\n\t\t\tresult.length > 0 ?\n\t\t\t\tresult.map(([name, value]) => {\n\t\t\t\t\tvalidateHeaderName(name);\n\t\t\t\t\tvalidateHeaderValue(name, String(value));\n\t\t\t\t\treturn [String(name).toLowerCase(), String(value)];\n\t\t\t\t}) :\n\t\t\t\tundefined;\n\n\t\tsuper(result);\n\n\t\t// Returning a Proxy that will lowercase key names, validate parameters and sort keys\n\t\t// eslint-disable-next-line no-constructor-return\n\t\treturn new Proxy(this, {\n\t\t\tget(target, p, receiver) {\n\t\t\t\tswitch (p) {\n\t\t\t\t\tcase 'append':\n\t\t\t\t\tcase 'set':\n\t\t\t\t\t\treturn (name, value) => {\n\t\t\t\t\t\t\tvalidateHeaderName(name);\n\t\t\t\t\t\t\tvalidateHeaderValue(name, String(value));\n\t\t\t\t\t\t\treturn URLSearchParams.prototype[p].call(\n\t\t\t\t\t\t\t\ttarget,\n\t\t\t\t\t\t\t\tString(name).toLowerCase(),\n\t\t\t\t\t\t\t\tString(value)\n\t\t\t\t\t\t\t);\n\t\t\t\t\t\t};\n\n\t\t\t\t\tcase 'delete':\n\t\t\t\t\tcase 'has':\n\t\t\t\t\tcase 'getAll':\n\t\t\t\t\t\treturn name => {\n\t\t\t\t\t\t\tvalidateHeaderName(name);\n\t\t\t\t\t\t\treturn URLSearchParams.prototype[p].call(\n\t\t\t\t\t\t\t\ttarget,\n\t\t\t\t\t\t\t\tString(name).toLowerCase()\n\t\t\t\t\t\t\t);\n\t\t\t\t\t\t};\n\n\t\t\t\t\tcase 'keys':\n\t\t\t\t\t\treturn () => {\n\t\t\t\t\t\t\ttarget.sort();\n\t\t\t\t\t\t\treturn new Set(URLSearchParams.prototype.keys.call(target)).keys();\n\t\t\t\t\t\t};\n\n\t\t\t\t\tdefault:\n\t\t\t\t\t\treturn Reflect.get(target, p, receiver);\n\t\t\t\t}\n\t\t\t}\n\t\t});\n\t\t/* c8 ignore next */\n\t}\n\n\tget [Symbol.toStringTag]() {\n\t\treturn this.constructor.name;\n\t}\n\n\ttoString() {\n\t\treturn Object.prototype.toString.call(this);\n\t}\n\n\tget(name) {\n\t\tconst values = this.getAll(name);\n\t\tif (values.length === 0) {\n\t\t\treturn null;\n\t\t}\n\n\t\tlet value = values.join(', ');\n\t\tif (/^content-encoding$/i.test(name)) {\n\t\t\tvalue = value.toLowerCase();\n\t\t}\n\n\t\treturn value;\n\t}\n\n\tforEach(callback, thisArg = undefined) {\n\t\tfor (const name of this.keys()) {\n\t\t\tReflect.apply(callback, thisArg, [this.get(name), name, this]);\n\t\t}\n\t}\n\n\t* values() {\n\t\tfor (const name of this.keys()) {\n\t\t\tyield this.get(name);\n\t\t}\n\t}\n\n\t/**\n\t * @type {() => IterableIterator<[string, string]>}\n\t */\n\t* entries() {\n\t\tfor (const name of this.keys()) {\n\t\t\tyield [name, this.get(name)];\n\t\t}\n\t}\n\n\t[Symbol.iterator]() {\n\t\treturn this.entries();\n\t}\n\n\t/**\n\t * Node-fetch non-spec method\n\t * returning all headers and their values as array\n\t * @returns {Record}\n\t */\n\traw() {\n\t\treturn [...this.keys()].reduce((result, key) => {\n\t\t\tresult[key] = this.getAll(key);\n\t\t\treturn result;\n\t\t}, {});\n\t}\n\n\t/**\n\t * For better console.log(headers) and also to convert Headers into Node.js Request compatible format\n\t */\n\t[Symbol.for('nodejs.util.inspect.custom')]() {\n\t\treturn [...this.keys()].reduce((result, key) => {\n\t\t\tconst values = this.getAll(key);\n\t\t\t// Http.request() only supports string as Host header.\n\t\t\t// This hack makes specifying custom Host header possible.\n\t\t\tif (key === 'host') {\n\t\t\t\tresult[key] = values[0];\n\t\t\t} else {\n\t\t\t\tresult[key] = values.length > 1 ? values : values[0];\n\t\t\t}\n\n\t\t\treturn result;\n\t\t}, {});\n\t}\n}\n\n/**\n * Re-shaping object for Web IDL tests\n * Only need to do it for overridden methods\n */\nObject.defineProperties(\n\tHeaders.prototype,\n\t['get', 'entries', 'forEach', 'values'].reduce((result, property) => {\n\t\tresult[property] = {enumerable: true};\n\t\treturn result;\n\t}, {})\n);\n\n/**\n * Create a Headers object from an http.IncomingMessage.rawHeaders, ignoring those that do\n * not conform to HTTP grammar productions.\n * @param {import('http').IncomingMessage['rawHeaders']} headers\n */\nexport function fromRawHeaders(headers = []) {\n\treturn new Headers(\n\t\theaders\n\t\t\t// Split into pairs\n\t\t\t.reduce((result, value, index, array) => {\n\t\t\t\tif (index % 2 === 0) {\n\t\t\t\t\tresult.push(array.slice(index, index + 2));\n\t\t\t\t}\n\n\t\t\t\treturn result;\n\t\t\t}, [])\n\t\t\t.filter(([name, value]) => {\n\t\t\t\ttry {\n\t\t\t\t\tvalidateHeaderName(name);\n\t\t\t\t\tvalidateHeaderValue(name, String(value));\n\t\t\t\t\treturn true;\n\t\t\t\t} catch {\n\t\t\t\t\treturn false;\n\t\t\t\t}\n\t\t\t})\n\n\t);\n}\n","const redirectStatus = new Set([301, 302, 303, 307, 308]);\n\n/**\n * Redirect code matching\n *\n * @param {number} code - Status code\n * @return {boolean}\n */\nexport const isRedirect = code => {\n\treturn redirectStatus.has(code);\n};\n","/**\n * Response.js\n *\n * Response class provides content decoding\n */\n\nimport Headers from './headers.js';\nimport Body, {clone, extractContentType} from './body.js';\nimport {isRedirect} from './utils/is-redirect.js';\n\nconst INTERNALS = Symbol('Response internals');\n\n/**\n * Response class\n *\n * Ref: https://fetch.spec.whatwg.org/#response-class\n *\n * @param Stream body Readable stream\n * @param Object opts Response options\n * @return Void\n */\nexport default class Response extends Body {\n\tconstructor(body = null, options = {}) {\n\t\tsuper(body, options);\n\n\t\t// eslint-disable-next-line no-eq-null, eqeqeq, no-negated-condition\n\t\tconst status = options.status != null ? options.status : 200;\n\n\t\tconst headers = new Headers(options.headers);\n\n\t\tif (body !== null && !headers.has('Content-Type')) {\n\t\t\tconst contentType = extractContentType(body, this);\n\t\t\tif (contentType) {\n\t\t\t\theaders.append('Content-Type', contentType);\n\t\t\t}\n\t\t}\n\n\t\tthis[INTERNALS] = {\n\t\t\ttype: 'default',\n\t\t\turl: options.url,\n\t\t\tstatus,\n\t\t\tstatusText: options.statusText || '',\n\t\t\theaders,\n\t\t\tcounter: options.counter,\n\t\t\thighWaterMark: options.highWaterMark\n\t\t};\n\t}\n\n\tget type() {\n\t\treturn this[INTERNALS].type;\n\t}\n\n\tget url() {\n\t\treturn this[INTERNALS].url || '';\n\t}\n\n\tget status() {\n\t\treturn this[INTERNALS].status;\n\t}\n\n\t/**\n\t * Convenience property representing if the request ended normally\n\t */\n\tget ok() {\n\t\treturn this[INTERNALS].status >= 200 && this[INTERNALS].status < 300;\n\t}\n\n\tget redirected() {\n\t\treturn this[INTERNALS].counter > 0;\n\t}\n\n\tget statusText() {\n\t\treturn this[INTERNALS].statusText;\n\t}\n\n\tget headers() {\n\t\treturn this[INTERNALS].headers;\n\t}\n\n\tget highWaterMark() {\n\t\treturn this[INTERNALS].highWaterMark;\n\t}\n\n\t/**\n\t * Clone this response\n\t *\n\t * @return Response\n\t */\n\tclone() {\n\t\treturn new Response(clone(this, this.highWaterMark), {\n\t\t\ttype: this.type,\n\t\t\turl: this.url,\n\t\t\tstatus: this.status,\n\t\t\tstatusText: this.statusText,\n\t\t\theaders: this.headers,\n\t\t\tok: this.ok,\n\t\t\tredirected: this.redirected,\n\t\t\tsize: this.size,\n\t\t\thighWaterMark: this.highWaterMark\n\t\t});\n\t}\n\n\t/**\n\t * @param {string} url The URL that the new response is to originate from.\n\t * @param {number} status An optional status code for the response (e.g., 302.)\n\t * @returns {Response} A Response object.\n\t */\n\tstatic redirect(url, status = 302) {\n\t\tif (!isRedirect(status)) {\n\t\t\tthrow new RangeError('Failed to execute \"redirect\" on \"response\": Invalid status code');\n\t\t}\n\n\t\treturn new Response(null, {\n\t\t\theaders: {\n\t\t\t\tlocation: new URL(url).toString()\n\t\t\t},\n\t\t\tstatus\n\t\t});\n\t}\n\n\tstatic error() {\n\t\tconst response = new Response(null, {status: 0, statusText: ''});\n\t\tresponse[INTERNALS].type = 'error';\n\t\treturn response;\n\t}\n\n\tstatic json(data = undefined, init = {}) {\n\t\tconst body = JSON.stringify(data);\n\n\t\tif (body === undefined) {\n\t\t\tthrow new TypeError('data is not JSON serializable');\n\t\t}\n\n\t\tconst headers = new Headers(init && init.headers);\n\n\t\tif (!headers.has('content-type')) {\n\t\t\theaders.set('content-type', 'application/json');\n\t\t}\n\n\t\treturn new Response(body, {\n\t\t\t...init,\n\t\t\theaders\n\t\t});\n\t}\n\n\tget [Symbol.toStringTag]() {\n\t\treturn 'Response';\n\t}\n}\n\nObject.defineProperties(Response.prototype, {\n\ttype: {enumerable: true},\n\turl: {enumerable: true},\n\tstatus: {enumerable: true},\n\tok: {enumerable: true},\n\tredirected: {enumerable: true},\n\tstatusText: {enumerable: true},\n\theaders: {enumerable: true},\n\tclone: {enumerable: true}\n});\n","const __WEBPACK_NAMESPACE_OBJECT__ = require(\"node:url\");","export const getSearch = parsedURL => {\n\tif (parsedURL.search) {\n\t\treturn parsedURL.search;\n\t}\n\n\tconst lastOffset = parsedURL.href.length - 1;\n\tconst hash = parsedURL.hash || (parsedURL.href[lastOffset] === '#' ? '#' : '');\n\treturn parsedURL.href[lastOffset - hash.length] === '?' ? '?' : '';\n};\n","const __WEBPACK_NAMESPACE_OBJECT__ = require(\"node:net\");","import {isIP} from 'node:net';\n\n/**\n * @external URL\n * @see {@link https://developer.mozilla.org/en-US/docs/Web/API/URL|URL}\n */\n\n/**\n * @module utils/referrer\n * @private\n */\n\n/**\n * @see {@link https://w3c.github.io/webappsec-referrer-policy/#strip-url|Referrer Policy §8.4. Strip url for use as a referrer}\n * @param {string} URL\n * @param {boolean} [originOnly=false]\n */\nexport function stripURLForUseAsAReferrer(url, originOnly = false) {\n\t// 1. If url is null, return no referrer.\n\tif (url == null) { // eslint-disable-line no-eq-null, eqeqeq\n\t\treturn 'no-referrer';\n\t}\n\n\turl = new URL(url);\n\n\t// 2. If url's scheme is a local scheme, then return no referrer.\n\tif (/^(about|blob|data):$/.test(url.protocol)) {\n\t\treturn 'no-referrer';\n\t}\n\n\t// 3. Set url's username to the empty string.\n\turl.username = '';\n\n\t// 4. Set url's password to null.\n\t// Note: `null` appears to be a mistake as this actually results in the password being `\"null\"`.\n\turl.password = '';\n\n\t// 5. Set url's fragment to null.\n\t// Note: `null` appears to be a mistake as this actually results in the fragment being `\"#null\"`.\n\turl.hash = '';\n\n\t// 6. If the origin-only flag is true, then:\n\tif (originOnly) {\n\t\t// 6.1. Set url's path to null.\n\t\t// Note: `null` appears to be a mistake as this actually results in the path being `\"/null\"`.\n\t\turl.pathname = '';\n\n\t\t// 6.2. Set url's query to null.\n\t\t// Note: `null` appears to be a mistake as this actually results in the query being `\"?null\"`.\n\t\turl.search = '';\n\t}\n\n\t// 7. Return url.\n\treturn url;\n}\n\n/**\n * @see {@link https://w3c.github.io/webappsec-referrer-policy/#enumdef-referrerpolicy|enum ReferrerPolicy}\n */\nexport const ReferrerPolicy = new Set([\n\t'',\n\t'no-referrer',\n\t'no-referrer-when-downgrade',\n\t'same-origin',\n\t'origin',\n\t'strict-origin',\n\t'origin-when-cross-origin',\n\t'strict-origin-when-cross-origin',\n\t'unsafe-url'\n]);\n\n/**\n * @see {@link https://w3c.github.io/webappsec-referrer-policy/#default-referrer-policy|default referrer policy}\n */\nexport const DEFAULT_REFERRER_POLICY = 'strict-origin-when-cross-origin';\n\n/**\n * @see {@link https://w3c.github.io/webappsec-referrer-policy/#referrer-policies|Referrer Policy §3. Referrer Policies}\n * @param {string} referrerPolicy\n * @returns {string} referrerPolicy\n */\nexport function validateReferrerPolicy(referrerPolicy) {\n\tif (!ReferrerPolicy.has(referrerPolicy)) {\n\t\tthrow new TypeError(`Invalid referrerPolicy: ${referrerPolicy}`);\n\t}\n\n\treturn referrerPolicy;\n}\n\n/**\n * @see {@link https://w3c.github.io/webappsec-secure-contexts/#is-origin-trustworthy|Referrer Policy §3.2. Is origin potentially trustworthy?}\n * @param {external:URL} url\n * @returns `true`: \"Potentially Trustworthy\", `false`: \"Not Trustworthy\"\n */\nexport function isOriginPotentiallyTrustworthy(url) {\n\t// 1. If origin is an opaque origin, return \"Not Trustworthy\".\n\t// Not applicable\n\n\t// 2. Assert: origin is a tuple origin.\n\t// Not for implementations\n\n\t// 3. If origin's scheme is either \"https\" or \"wss\", return \"Potentially Trustworthy\".\n\tif (/^(http|ws)s:$/.test(url.protocol)) {\n\t\treturn true;\n\t}\n\n\t// 4. If origin's host component matches one of the CIDR notations 127.0.0.0/8 or ::1/128 [RFC4632], return \"Potentially Trustworthy\".\n\tconst hostIp = url.host.replace(/(^\\[)|(]$)/g, '');\n\tconst hostIPVersion = isIP(hostIp);\n\n\tif (hostIPVersion === 4 && /^127\\./.test(hostIp)) {\n\t\treturn true;\n\t}\n\n\tif (hostIPVersion === 6 && /^(((0+:){7})|(::(0+:){0,6}))0*1$/.test(hostIp)) {\n\t\treturn true;\n\t}\n\n\t// 5. If origin's host component is \"localhost\" or falls within \".localhost\", and the user agent conforms to the name resolution rules in [let-localhost-be-localhost], return \"Potentially Trustworthy\".\n\t// We are returning FALSE here because we cannot ensure conformance to\n\t// let-localhost-be-loalhost (https://tools.ietf.org/html/draft-west-let-localhost-be-localhost)\n\tif (url.host === 'localhost' || url.host.endsWith('.localhost')) {\n\t\treturn false;\n\t}\n\n\t// 6. If origin's scheme component is file, return \"Potentially Trustworthy\".\n\tif (url.protocol === 'file:') {\n\t\treturn true;\n\t}\n\n\t// 7. If origin's scheme component is one which the user agent considers to be authenticated, return \"Potentially Trustworthy\".\n\t// Not supported\n\n\t// 8. If origin has been configured as a trustworthy origin, return \"Potentially Trustworthy\".\n\t// Not supported\n\n\t// 9. Return \"Not Trustworthy\".\n\treturn false;\n}\n\n/**\n * @see {@link https://w3c.github.io/webappsec-secure-contexts/#is-url-trustworthy|Referrer Policy §3.3. Is url potentially trustworthy?}\n * @param {external:URL} url\n * @returns `true`: \"Potentially Trustworthy\", `false`: \"Not Trustworthy\"\n */\nexport function isUrlPotentiallyTrustworthy(url) {\n\t// 1. If url is \"about:blank\" or \"about:srcdoc\", return \"Potentially Trustworthy\".\n\tif (/^about:(blank|srcdoc)$/.test(url)) {\n\t\treturn true;\n\t}\n\n\t// 2. If url's scheme is \"data\", return \"Potentially Trustworthy\".\n\tif (url.protocol === 'data:') {\n\t\treturn true;\n\t}\n\n\t// Note: The origin of blob: and filesystem: URLs is the origin of the context in which they were\n\t// created. Therefore, blobs created in a trustworthy origin will themselves be potentially\n\t// trustworthy.\n\tif (/^(blob|filesystem):$/.test(url.protocol)) {\n\t\treturn true;\n\t}\n\n\t// 3. Return the result of executing §3.2 Is origin potentially trustworthy? on url's origin.\n\treturn isOriginPotentiallyTrustworthy(url);\n}\n\n/**\n * Modifies the referrerURL to enforce any extra security policy considerations.\n * @see {@link https://w3c.github.io/webappsec-referrer-policy/#determine-requests-referrer|Referrer Policy §8.3. Determine request's Referrer}, step 7\n * @callback module:utils/referrer~referrerURLCallback\n * @param {external:URL} referrerURL\n * @returns {external:URL} modified referrerURL\n */\n\n/**\n * Modifies the referrerOrigin to enforce any extra security policy considerations.\n * @see {@link https://w3c.github.io/webappsec-referrer-policy/#determine-requests-referrer|Referrer Policy §8.3. Determine request's Referrer}, step 7\n * @callback module:utils/referrer~referrerOriginCallback\n * @param {external:URL} referrerOrigin\n * @returns {external:URL} modified referrerOrigin\n */\n\n/**\n * @see {@link https://w3c.github.io/webappsec-referrer-policy/#determine-requests-referrer|Referrer Policy §8.3. Determine request's Referrer}\n * @param {Request} request\n * @param {object} o\n * @param {module:utils/referrer~referrerURLCallback} o.referrerURLCallback\n * @param {module:utils/referrer~referrerOriginCallback} o.referrerOriginCallback\n * @returns {external:URL} Request's referrer\n */\nexport function determineRequestsReferrer(request, {referrerURLCallback, referrerOriginCallback} = {}) {\n\t// There are 2 notes in the specification about invalid pre-conditions. We return null, here, for\n\t// these cases:\n\t// > Note: If request's referrer is \"no-referrer\", Fetch will not call into this algorithm.\n\t// > Note: If request's referrer policy is the empty string, Fetch will not call into this\n\t// > algorithm.\n\tif (request.referrer === 'no-referrer' || request.referrerPolicy === '') {\n\t\treturn null;\n\t}\n\n\t// 1. Let policy be request's associated referrer policy.\n\tconst policy = request.referrerPolicy;\n\n\t// 2. Let environment be request's client.\n\t// not applicable to node.js\n\n\t// 3. Switch on request's referrer:\n\tif (request.referrer === 'about:client') {\n\t\treturn 'no-referrer';\n\t}\n\n\t// \"a URL\": Let referrerSource be request's referrer.\n\tconst referrerSource = request.referrer;\n\n\t// 4. Let request's referrerURL be the result of stripping referrerSource for use as a referrer.\n\tlet referrerURL = stripURLForUseAsAReferrer(referrerSource);\n\n\t// 5. Let referrerOrigin be the result of stripping referrerSource for use as a referrer, with the\n\t// origin-only flag set to true.\n\tlet referrerOrigin = stripURLForUseAsAReferrer(referrerSource, true);\n\n\t// 6. If the result of serializing referrerURL is a string whose length is greater than 4096, set\n\t// referrerURL to referrerOrigin.\n\tif (referrerURL.toString().length > 4096) {\n\t\treferrerURL = referrerOrigin;\n\t}\n\n\t// 7. The user agent MAY alter referrerURL or referrerOrigin at this point to enforce arbitrary\n\t// policy considerations in the interests of minimizing data leakage. For example, the user\n\t// agent could strip the URL down to an origin, modify its host, replace it with an empty\n\t// string, etc.\n\tif (referrerURLCallback) {\n\t\treferrerURL = referrerURLCallback(referrerURL);\n\t}\n\n\tif (referrerOriginCallback) {\n\t\treferrerOrigin = referrerOriginCallback(referrerOrigin);\n\t}\n\n\t// 8.Execute the statements corresponding to the value of policy:\n\tconst currentURL = new URL(request.url);\n\n\tswitch (policy) {\n\t\tcase 'no-referrer':\n\t\t\treturn 'no-referrer';\n\n\t\tcase 'origin':\n\t\t\treturn referrerOrigin;\n\n\t\tcase 'unsafe-url':\n\t\t\treturn referrerURL;\n\n\t\tcase 'strict-origin':\n\t\t\t// 1. If referrerURL is a potentially trustworthy URL and request's current URL is not a\n\t\t\t// potentially trustworthy URL, then return no referrer.\n\t\t\tif (isUrlPotentiallyTrustworthy(referrerURL) && !isUrlPotentiallyTrustworthy(currentURL)) {\n\t\t\t\treturn 'no-referrer';\n\t\t\t}\n\n\t\t\t// 2. Return referrerOrigin.\n\t\t\treturn referrerOrigin.toString();\n\n\t\tcase 'strict-origin-when-cross-origin':\n\t\t\t// 1. If the origin of referrerURL and the origin of request's current URL are the same, then\n\t\t\t// return referrerURL.\n\t\t\tif (referrerURL.origin === currentURL.origin) {\n\t\t\t\treturn referrerURL;\n\t\t\t}\n\n\t\t\t// 2. If referrerURL is a potentially trustworthy URL and request's current URL is not a\n\t\t\t// potentially trustworthy URL, then return no referrer.\n\t\t\tif (isUrlPotentiallyTrustworthy(referrerURL) && !isUrlPotentiallyTrustworthy(currentURL)) {\n\t\t\t\treturn 'no-referrer';\n\t\t\t}\n\n\t\t\t// 3. Return referrerOrigin.\n\t\t\treturn referrerOrigin;\n\n\t\tcase 'same-origin':\n\t\t\t// 1. If the origin of referrerURL and the origin of request's current URL are the same, then\n\t\t\t// return referrerURL.\n\t\t\tif (referrerURL.origin === currentURL.origin) {\n\t\t\t\treturn referrerURL;\n\t\t\t}\n\n\t\t\t// 2. Return no referrer.\n\t\t\treturn 'no-referrer';\n\n\t\tcase 'origin-when-cross-origin':\n\t\t\t// 1. If the origin of referrerURL and the origin of request's current URL are the same, then\n\t\t\t// return referrerURL.\n\t\t\tif (referrerURL.origin === currentURL.origin) {\n\t\t\t\treturn referrerURL;\n\t\t\t}\n\n\t\t\t// Return referrerOrigin.\n\t\t\treturn referrerOrigin;\n\n\t\tcase 'no-referrer-when-downgrade':\n\t\t\t// 1. If referrerURL is a potentially trustworthy URL and request's current URL is not a\n\t\t\t// potentially trustworthy URL, then return no referrer.\n\t\t\tif (isUrlPotentiallyTrustworthy(referrerURL) && !isUrlPotentiallyTrustworthy(currentURL)) {\n\t\t\t\treturn 'no-referrer';\n\t\t\t}\n\n\t\t\t// 2. Return referrerURL.\n\t\t\treturn referrerURL;\n\n\t\tdefault:\n\t\t\tthrow new TypeError(`Invalid referrerPolicy: ${policy}`);\n\t}\n}\n\n/**\n * @see {@link https://w3c.github.io/webappsec-referrer-policy/#parse-referrer-policy-from-header|Referrer Policy §8.1. Parse a referrer policy from a Referrer-Policy header}\n * @param {Headers} headers Response headers\n * @returns {string} policy\n */\nexport function parseReferrerPolicyFromHeader(headers) {\n\t// 1. Let policy-tokens be the result of extracting header list values given `Referrer-Policy`\n\t// and response’s header list.\n\tconst policyTokens = (headers.get('referrer-policy') || '').split(/[,\\s]+/);\n\n\t// 2. Let policy be the empty string.\n\tlet policy = '';\n\n\t// 3. For each token in policy-tokens, if token is a referrer policy and token is not the empty\n\t// string, then set policy to token.\n\t// Note: This algorithm loops over multiple policy values to allow deployment of new policy\n\t// values with fallbacks for older user agents, as described in § 11.1 Unknown Policy Values.\n\tfor (const token of policyTokens) {\n\t\tif (token && ReferrerPolicy.has(token)) {\n\t\t\tpolicy = token;\n\t\t}\n\t}\n\n\t// 4. Return policy.\n\treturn policy;\n}\n","/**\n * Request.js\n *\n * Request class contains server only options\n *\n * All spec algorithm step numbers are based on https://fetch.spec.whatwg.org/commit-snapshots/ae716822cb3a61843226cd090eefc6589446c1d2/.\n */\n\nimport {format as formatUrl} from 'node:url';\nimport {deprecate} from 'node:util';\nimport Headers from './headers.js';\nimport Body, {clone, extractContentType, getTotalBytes} from './body.js';\nimport {isAbortSignal} from './utils/is.js';\nimport {getSearch} from './utils/get-search.js';\nimport {\n\tvalidateReferrerPolicy, determineRequestsReferrer, DEFAULT_REFERRER_POLICY\n} from './utils/referrer.js';\n\nconst INTERNALS = Symbol('Request internals');\n\n/**\n * Check if `obj` is an instance of Request.\n *\n * @param {*} object\n * @return {boolean}\n */\nconst isRequest = object => {\n\treturn (\n\t\ttypeof object === 'object' &&\n\t\ttypeof object[INTERNALS] === 'object'\n\t);\n};\n\nconst doBadDataWarn = deprecate(() => {},\n\t'.data is not a valid RequestInit property, use .body instead',\n\t'https://github.com/node-fetch/node-fetch/issues/1000 (request)');\n\n/**\n * Request class\n *\n * Ref: https://fetch.spec.whatwg.org/#request-class\n *\n * @param Mixed input Url or Request instance\n * @param Object init Custom options\n * @return Void\n */\nexport default class Request extends Body {\n\tconstructor(input, init = {}) {\n\t\tlet parsedURL;\n\n\t\t// Normalize input and force URL to be encoded as UTF-8 (https://github.com/node-fetch/node-fetch/issues/245)\n\t\tif (isRequest(input)) {\n\t\t\tparsedURL = new URL(input.url);\n\t\t} else {\n\t\t\tparsedURL = new URL(input);\n\t\t\tinput = {};\n\t\t}\n\n\t\tif (parsedURL.username !== '' || parsedURL.password !== '') {\n\t\t\tthrow new TypeError(`${parsedURL} is an url with embedded credentials.`);\n\t\t}\n\n\t\tlet method = init.method || input.method || 'GET';\n\t\tif (/^(delete|get|head|options|post|put)$/i.test(method)) {\n\t\t\tmethod = method.toUpperCase();\n\t\t}\n\n\t\tif (!isRequest(init) && 'data' in init) {\n\t\t\tdoBadDataWarn();\n\t\t}\n\n\t\t// eslint-disable-next-line no-eq-null, eqeqeq\n\t\tif ((init.body != null || (isRequest(input) && input.body !== null)) &&\n\t\t\t(method === 'GET' || method === 'HEAD')) {\n\t\t\tthrow new TypeError('Request with GET/HEAD method cannot have body');\n\t\t}\n\n\t\tconst inputBody = init.body ?\n\t\t\tinit.body :\n\t\t\t(isRequest(input) && input.body !== null ?\n\t\t\t\tclone(input) :\n\t\t\t\tnull);\n\n\t\tsuper(inputBody, {\n\t\t\tsize: init.size || input.size || 0\n\t\t});\n\n\t\tconst headers = new Headers(init.headers || input.headers || {});\n\n\t\tif (inputBody !== null && !headers.has('Content-Type')) {\n\t\t\tconst contentType = extractContentType(inputBody, this);\n\t\t\tif (contentType) {\n\t\t\t\theaders.set('Content-Type', contentType);\n\t\t\t}\n\t\t}\n\n\t\tlet signal = isRequest(input) ?\n\t\t\tinput.signal :\n\t\t\tnull;\n\t\tif ('signal' in init) {\n\t\t\tsignal = init.signal;\n\t\t}\n\n\t\t// eslint-disable-next-line no-eq-null, eqeqeq\n\t\tif (signal != null && !isAbortSignal(signal)) {\n\t\t\tthrow new TypeError('Expected signal to be an instanceof AbortSignal or EventTarget');\n\t\t}\n\n\t\t// §5.4, Request constructor steps, step 15.1\n\t\t// eslint-disable-next-line no-eq-null, eqeqeq\n\t\tlet referrer = init.referrer == null ? input.referrer : init.referrer;\n\t\tif (referrer === '') {\n\t\t\t// §5.4, Request constructor steps, step 15.2\n\t\t\treferrer = 'no-referrer';\n\t\t} else if (referrer) {\n\t\t\t// §5.4, Request constructor steps, step 15.3.1, 15.3.2\n\t\t\tconst parsedReferrer = new URL(referrer);\n\t\t\t// §5.4, Request constructor steps, step 15.3.3, 15.3.4\n\t\t\treferrer = /^about:(\\/\\/)?client$/.test(parsedReferrer) ? 'client' : parsedReferrer;\n\t\t} else {\n\t\t\treferrer = undefined;\n\t\t}\n\n\t\tthis[INTERNALS] = {\n\t\t\tmethod,\n\t\t\tredirect: init.redirect || input.redirect || 'follow',\n\t\t\theaders,\n\t\t\tparsedURL,\n\t\t\tsignal,\n\t\t\treferrer\n\t\t};\n\n\t\t// Node-fetch-only options\n\t\tthis.follow = init.follow === undefined ? (input.follow === undefined ? 20 : input.follow) : init.follow;\n\t\tthis.compress = init.compress === undefined ? (input.compress === undefined ? true : input.compress) : init.compress;\n\t\tthis.counter = init.counter || input.counter || 0;\n\t\tthis.agent = init.agent || input.agent;\n\t\tthis.highWaterMark = init.highWaterMark || input.highWaterMark || 16384;\n\t\tthis.insecureHTTPParser = init.insecureHTTPParser || input.insecureHTTPParser || false;\n\n\t\t// §5.4, Request constructor steps, step 16.\n\t\t// Default is empty string per https://fetch.spec.whatwg.org/#concept-request-referrer-policy\n\t\tthis.referrerPolicy = init.referrerPolicy || input.referrerPolicy || '';\n\t}\n\n\t/** @returns {string} */\n\tget method() {\n\t\treturn this[INTERNALS].method;\n\t}\n\n\t/** @returns {string} */\n\tget url() {\n\t\treturn formatUrl(this[INTERNALS].parsedURL);\n\t}\n\n\t/** @returns {Headers} */\n\tget headers() {\n\t\treturn this[INTERNALS].headers;\n\t}\n\n\tget redirect() {\n\t\treturn this[INTERNALS].redirect;\n\t}\n\n\t/** @returns {AbortSignal} */\n\tget signal() {\n\t\treturn this[INTERNALS].signal;\n\t}\n\n\t// https://fetch.spec.whatwg.org/#dom-request-referrer\n\tget referrer() {\n\t\tif (this[INTERNALS].referrer === 'no-referrer') {\n\t\t\treturn '';\n\t\t}\n\n\t\tif (this[INTERNALS].referrer === 'client') {\n\t\t\treturn 'about:client';\n\t\t}\n\n\t\tif (this[INTERNALS].referrer) {\n\t\t\treturn this[INTERNALS].referrer.toString();\n\t\t}\n\n\t\treturn undefined;\n\t}\n\n\tget referrerPolicy() {\n\t\treturn this[INTERNALS].referrerPolicy;\n\t}\n\n\tset referrerPolicy(referrerPolicy) {\n\t\tthis[INTERNALS].referrerPolicy = validateReferrerPolicy(referrerPolicy);\n\t}\n\n\t/**\n\t * Clone this request\n\t *\n\t * @return Request\n\t */\n\tclone() {\n\t\treturn new Request(this);\n\t}\n\n\tget [Symbol.toStringTag]() {\n\t\treturn 'Request';\n\t}\n}\n\nObject.defineProperties(Request.prototype, {\n\tmethod: {enumerable: true},\n\turl: {enumerable: true},\n\theaders: {enumerable: true},\n\tredirect: {enumerable: true},\n\tclone: {enumerable: true},\n\tsignal: {enumerable: true},\n\treferrer: {enumerable: true},\n\treferrerPolicy: {enumerable: true}\n});\n\n/**\n * Convert a Request to Node.js http request options.\n *\n * @param {Request} request - A Request instance\n * @return The options object to be passed to http.request\n */\nexport const getNodeRequestOptions = request => {\n\tconst {parsedURL} = request[INTERNALS];\n\tconst headers = new Headers(request[INTERNALS].headers);\n\n\t// Fetch step 1.3\n\tif (!headers.has('Accept')) {\n\t\theaders.set('Accept', '*/*');\n\t}\n\n\t// HTTP-network-or-cache fetch steps 2.4-2.7\n\tlet contentLengthValue = null;\n\tif (request.body === null && /^(post|put)$/i.test(request.method)) {\n\t\tcontentLengthValue = '0';\n\t}\n\n\tif (request.body !== null) {\n\t\tconst totalBytes = getTotalBytes(request);\n\t\t// Set Content-Length if totalBytes is a number (that is not NaN)\n\t\tif (typeof totalBytes === 'number' && !Number.isNaN(totalBytes)) {\n\t\t\tcontentLengthValue = String(totalBytes);\n\t\t}\n\t}\n\n\tif (contentLengthValue) {\n\t\theaders.set('Content-Length', contentLengthValue);\n\t}\n\n\t// 4.1. Main fetch, step 2.6\n\t// > If request's referrer policy is the empty string, then set request's referrer policy to the\n\t// > default referrer policy.\n\tif (request.referrerPolicy === '') {\n\t\trequest.referrerPolicy = DEFAULT_REFERRER_POLICY;\n\t}\n\n\t// 4.1. Main fetch, step 2.7\n\t// > If request's referrer is not \"no-referrer\", set request's referrer to the result of invoking\n\t// > determine request's referrer.\n\tif (request.referrer && request.referrer !== 'no-referrer') {\n\t\trequest[INTERNALS].referrer = determineRequestsReferrer(request);\n\t} else {\n\t\trequest[INTERNALS].referrer = 'no-referrer';\n\t}\n\n\t// 4.5. HTTP-network-or-cache fetch, step 6.9\n\t// > If httpRequest's referrer is a URL, then append `Referer`/httpRequest's referrer, serialized\n\t// > and isomorphic encoded, to httpRequest's header list.\n\tif (request[INTERNALS].referrer instanceof URL) {\n\t\theaders.set('Referer', request.referrer);\n\t}\n\n\t// HTTP-network-or-cache fetch step 2.11\n\tif (!headers.has('User-Agent')) {\n\t\theaders.set('User-Agent', 'node-fetch');\n\t}\n\n\t// HTTP-network-or-cache fetch step 2.15\n\tif (request.compress && !headers.has('Accept-Encoding')) {\n\t\theaders.set('Accept-Encoding', 'gzip, deflate, br');\n\t}\n\n\tlet {agent} = request;\n\tif (typeof agent === 'function') {\n\t\tagent = agent(parsedURL);\n\t}\n\n\tif (!headers.has('Connection') && !agent) {\n\t\theaders.set('Connection', 'close');\n\t}\n\n\t// HTTP-network fetch step 4.2\n\t// chunked encoding is handled by Node.js\n\n\tconst search = getSearch(parsedURL);\n\n\t// Pass the full URL directly to request(), but overwrite the following\n\t// options:\n\tconst options = {\n\t\t// Overwrite search to retain trailing ? (issue #776)\n\t\tpath: parsedURL.pathname + search,\n\t\t// The following options are not expressed in the URL\n\t\tmethod: request.method,\n\t\theaders: headers[Symbol.for('nodejs.util.inspect.custom')](),\n\t\tinsecureHTTPParser: request.insecureHTTPParser,\n\t\tagent\n\t};\n\n\treturn {\n\t\t/** @type {URL} */\n\t\tparsedURL,\n\t\toptions\n\t};\n};\n","import {FetchBaseError} from './base.js';\n\n/**\n * AbortError interface for cancelled requests\n */\nexport class AbortError extends FetchBaseError {\n\tconstructor(message, type = 'aborted') {\n\t\tsuper(message, type);\n\t}\n}\n","/**\n * Index.js\n *\n * a request API compatible with window.fetch\n *\n * All spec algorithm step numbers are based on https://fetch.spec.whatwg.org/commit-snapshots/ae716822cb3a61843226cd090eefc6589446c1d2/.\n */\n\nimport http from 'node:http';\nimport https from 'node:https';\nimport zlib from 'node:zlib';\nimport Stream, {PassThrough, pipeline as pump} from 'node:stream';\nimport {Buffer} from 'node:buffer';\n\nimport dataUriToBuffer from 'data-uri-to-buffer';\n\nimport {writeToStream, clone} from './body.js';\nimport Response from './response.js';\nimport Headers, {fromRawHeaders} from './headers.js';\nimport Request, {getNodeRequestOptions} from './request.js';\nimport {FetchError} from './errors/fetch-error.js';\nimport {AbortError} from './errors/abort-error.js';\nimport {isRedirect} from './utils/is-redirect.js';\nimport {FormData} from 'formdata-polyfill/esm.min.js';\nimport {isDomainOrSubdomain, isSameProtocol} from './utils/is.js';\nimport {parseReferrerPolicyFromHeader} from './utils/referrer.js';\nimport {\n\tBlob,\n\tFile,\n\tfileFromSync,\n\tfileFrom,\n\tblobFromSync,\n\tblobFrom\n} from 'fetch-blob/from.js';\n\nexport {FormData, Headers, Request, Response, FetchError, AbortError, isRedirect};\nexport {Blob, File, fileFromSync, fileFrom, blobFromSync, blobFrom};\n\nconst supportedSchemas = new Set(['data:', 'http:', 'https:']);\n\n/**\n * Fetch function\n *\n * @param {string | URL | import('./request').default} url - Absolute url or Request instance\n * @param {*} [options_] - Fetch options\n * @return {Promise}\n */\nexport default async function fetch(url, options_) {\n\treturn new Promise((resolve, reject) => {\n\t\t// Build request object\n\t\tconst request = new Request(url, options_);\n\t\tconst {parsedURL, options} = getNodeRequestOptions(request);\n\t\tif (!supportedSchemas.has(parsedURL.protocol)) {\n\t\t\tthrow new TypeError(`node-fetch cannot load ${url}. URL scheme \"${parsedURL.protocol.replace(/:$/, '')}\" is not supported.`);\n\t\t}\n\n\t\tif (parsedURL.protocol === 'data:') {\n\t\t\tconst data = dataUriToBuffer(request.url);\n\t\t\tconst response = new Response(data, {headers: {'Content-Type': data.typeFull}});\n\t\t\tresolve(response);\n\t\t\treturn;\n\t\t}\n\n\t\t// Wrap http.request into fetch\n\t\tconst send = (parsedURL.protocol === 'https:' ? https : http).request;\n\t\tconst {signal} = request;\n\t\tlet response = null;\n\n\t\tconst abort = () => {\n\t\t\tconst error = new AbortError('The operation was aborted.');\n\t\t\treject(error);\n\t\t\tif (request.body && request.body instanceof Stream.Readable) {\n\t\t\t\trequest.body.destroy(error);\n\t\t\t}\n\n\t\t\tif (!response || !response.body) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tresponse.body.emit('error', error);\n\t\t};\n\n\t\tif (signal && signal.aborted) {\n\t\t\tabort();\n\t\t\treturn;\n\t\t}\n\n\t\tconst abortAndFinalize = () => {\n\t\t\tabort();\n\t\t\tfinalize();\n\t\t};\n\n\t\t// Send request\n\t\tconst request_ = send(parsedURL.toString(), options);\n\n\t\tif (signal) {\n\t\t\tsignal.addEventListener('abort', abortAndFinalize);\n\t\t}\n\n\t\tconst finalize = () => {\n\t\t\trequest_.abort();\n\t\t\tif (signal) {\n\t\t\t\tsignal.removeEventListener('abort', abortAndFinalize);\n\t\t\t}\n\t\t};\n\n\t\trequest_.on('error', error => {\n\t\t\treject(new FetchError(`request to ${request.url} failed, reason: ${error.message}`, 'system', error));\n\t\t\tfinalize();\n\t\t});\n\n\t\tfixResponseChunkedTransferBadEnding(request_, error => {\n\t\t\tif (response && response.body) {\n\t\t\t\tresponse.body.destroy(error);\n\t\t\t}\n\t\t});\n\n\t\t/* c8 ignore next 18 */\n\t\tif (process.version < 'v14') {\n\t\t\t// Before Node.js 14, pipeline() does not fully support async iterators and does not always\n\t\t\t// properly handle when the socket close/end events are out of order.\n\t\t\trequest_.on('socket', s => {\n\t\t\t\tlet endedWithEventsCount;\n\t\t\t\ts.prependListener('end', () => {\n\t\t\t\t\tendedWithEventsCount = s._eventsCount;\n\t\t\t\t});\n\t\t\t\ts.prependListener('close', hadError => {\n\t\t\t\t\t// if end happened before close but the socket didn't emit an error, do it now\n\t\t\t\t\tif (response && endedWithEventsCount < s._eventsCount && !hadError) {\n\t\t\t\t\t\tconst error = new Error('Premature close');\n\t\t\t\t\t\terror.code = 'ERR_STREAM_PREMATURE_CLOSE';\n\t\t\t\t\t\tresponse.body.emit('error', error);\n\t\t\t\t\t}\n\t\t\t\t});\n\t\t\t});\n\t\t}\n\n\t\trequest_.on('response', response_ => {\n\t\t\trequest_.setTimeout(0);\n\t\t\tconst headers = fromRawHeaders(response_.rawHeaders);\n\n\t\t\t// HTTP fetch step 5\n\t\t\tif (isRedirect(response_.statusCode)) {\n\t\t\t\t// HTTP fetch step 5.2\n\t\t\t\tconst location = headers.get('Location');\n\n\t\t\t\t// HTTP fetch step 5.3\n\t\t\t\tlet locationURL = null;\n\t\t\t\ttry {\n\t\t\t\t\tlocationURL = location === null ? null : new URL(location, request.url);\n\t\t\t\t} catch {\n\t\t\t\t\t// error here can only be invalid URL in Location: header\n\t\t\t\t\t// do not throw when options.redirect == manual\n\t\t\t\t\t// let the user extract the errorneous redirect URL\n\t\t\t\t\tif (request.redirect !== 'manual') {\n\t\t\t\t\t\treject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect'));\n\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\treturn;\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\t// HTTP fetch step 5.5\n\t\t\t\tswitch (request.redirect) {\n\t\t\t\t\tcase 'error':\n\t\t\t\t\t\treject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect'));\n\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\treturn;\n\t\t\t\t\tcase 'manual':\n\t\t\t\t\t\t// Nothing to do\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase 'follow': {\n\t\t\t\t\t\t// HTTP-redirect fetch step 2\n\t\t\t\t\t\tif (locationURL === null) {\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 5\n\t\t\t\t\t\tif (request.counter >= request.follow) {\n\t\t\t\t\t\t\treject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect'));\n\t\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\t\treturn;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 6 (counter increment)\n\t\t\t\t\t\t// Create a new Request object.\n\t\t\t\t\t\tconst requestOptions = {\n\t\t\t\t\t\t\theaders: new Headers(request.headers),\n\t\t\t\t\t\t\tfollow: request.follow,\n\t\t\t\t\t\t\tcounter: request.counter + 1,\n\t\t\t\t\t\t\tagent: request.agent,\n\t\t\t\t\t\t\tcompress: request.compress,\n\t\t\t\t\t\t\tmethod: request.method,\n\t\t\t\t\t\t\tbody: clone(request),\n\t\t\t\t\t\t\tsignal: request.signal,\n\t\t\t\t\t\t\tsize: request.size,\n\t\t\t\t\t\t\treferrer: request.referrer,\n\t\t\t\t\t\t\treferrerPolicy: request.referrerPolicy\n\t\t\t\t\t\t};\n\n\t\t\t\t\t\t// when forwarding sensitive headers like \"Authorization\",\n\t\t\t\t\t\t// \"WWW-Authenticate\", and \"Cookie\" to untrusted targets,\n\t\t\t\t\t\t// headers will be ignored when following a redirect to a domain\n\t\t\t\t\t\t// that is not a subdomain match or exact match of the initial domain.\n\t\t\t\t\t\t// For example, a redirect from \"foo.com\" to either \"foo.com\" or \"sub.foo.com\"\n\t\t\t\t\t\t// will forward the sensitive headers, but a redirect to \"bar.com\" will not.\n\t\t\t\t\t\t// headers will also be ignored when following a redirect to a domain using\n\t\t\t\t\t\t// a different protocol. For example, a redirect from \"https://foo.com\" to \"http://foo.com\"\n\t\t\t\t\t\t// will not forward the sensitive headers\n\t\t\t\t\t\tif (!isDomainOrSubdomain(request.url, locationURL) || !isSameProtocol(request.url, locationURL)) {\n\t\t\t\t\t\t\tfor (const name of ['authorization', 'www-authenticate', 'cookie', 'cookie2']) {\n\t\t\t\t\t\t\t\trequestOptions.headers.delete(name);\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 9\n\t\t\t\t\t\tif (response_.statusCode !== 303 && request.body && options_.body instanceof Stream.Readable) {\n\t\t\t\t\t\t\treject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect'));\n\t\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\t\treturn;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 11\n\t\t\t\t\t\tif (response_.statusCode === 303 || ((response_.statusCode === 301 || response_.statusCode === 302) && request.method === 'POST')) {\n\t\t\t\t\t\t\trequestOptions.method = 'GET';\n\t\t\t\t\t\t\trequestOptions.body = undefined;\n\t\t\t\t\t\t\trequestOptions.headers.delete('content-length');\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 14\n\t\t\t\t\t\tconst responseReferrerPolicy = parseReferrerPolicyFromHeader(headers);\n\t\t\t\t\t\tif (responseReferrerPolicy) {\n\t\t\t\t\t\t\trequestOptions.referrerPolicy = responseReferrerPolicy;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 15\n\t\t\t\t\t\tresolve(fetch(new Request(locationURL, requestOptions)));\n\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\treturn;\n\t\t\t\t\t}\n\n\t\t\t\t\tdefault:\n\t\t\t\t\t\treturn reject(new TypeError(`Redirect option '${request.redirect}' is not a valid value of RequestRedirect`));\n\t\t\t\t}\n\t\t\t}\n\n\t\t\t// Prepare response\n\t\t\tif (signal) {\n\t\t\t\tresponse_.once('end', () => {\n\t\t\t\t\tsignal.removeEventListener('abort', abortAndFinalize);\n\t\t\t\t});\n\t\t\t}\n\n\t\t\tlet body = pump(response_, new PassThrough(), error => {\n\t\t\t\tif (error) {\n\t\t\t\t\treject(error);\n\t\t\t\t}\n\t\t\t});\n\t\t\t// see https://github.com/nodejs/node/pull/29376\n\t\t\t/* c8 ignore next 3 */\n\t\t\tif (process.version < 'v12.10') {\n\t\t\t\tresponse_.on('aborted', abortAndFinalize);\n\t\t\t}\n\n\t\t\tconst responseOptions = {\n\t\t\t\turl: request.url,\n\t\t\t\tstatus: response_.statusCode,\n\t\t\t\tstatusText: response_.statusMessage,\n\t\t\t\theaders,\n\t\t\t\tsize: request.size,\n\t\t\t\tcounter: request.counter,\n\t\t\t\thighWaterMark: request.highWaterMark\n\t\t\t};\n\n\t\t\t// HTTP-network fetch step 12.1.1.3\n\t\t\tconst codings = headers.get('Content-Encoding');\n\n\t\t\t// HTTP-network fetch step 12.1.1.4: handle content codings\n\n\t\t\t// in following scenarios we ignore compression support\n\t\t\t// 1. compression support is disabled\n\t\t\t// 2. HEAD request\n\t\t\t// 3. no Content-Encoding header\n\t\t\t// 4. no content response (204)\n\t\t\t// 5. content not modified response (304)\n\t\t\tif (!request.compress || request.method === 'HEAD' || codings === null || response_.statusCode === 204 || response_.statusCode === 304) {\n\t\t\t\tresponse = new Response(body, responseOptions);\n\t\t\t\tresolve(response);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// For Node v6+\n\t\t\t// Be less strict when decoding compressed responses, since sometimes\n\t\t\t// servers send slightly invalid responses that are still accepted\n\t\t\t// by common browsers.\n\t\t\t// Always using Z_SYNC_FLUSH is what cURL does.\n\t\t\tconst zlibOptions = {\n\t\t\t\tflush: zlib.Z_SYNC_FLUSH,\n\t\t\t\tfinishFlush: zlib.Z_SYNC_FLUSH\n\t\t\t};\n\n\t\t\t// For gzip\n\t\t\tif (codings === 'gzip' || codings === 'x-gzip') {\n\t\t\t\tbody = pump(body, zlib.createGunzip(zlibOptions), error => {\n\t\t\t\t\tif (error) {\n\t\t\t\t\t\treject(error);\n\t\t\t\t\t}\n\t\t\t\t});\n\t\t\t\tresponse = new Response(body, responseOptions);\n\t\t\t\tresolve(response);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// For deflate\n\t\t\tif (codings === 'deflate' || codings === 'x-deflate') {\n\t\t\t\t// Handle the infamous raw deflate response from old servers\n\t\t\t\t// a hack for old IIS and Apache servers\n\t\t\t\tconst raw = pump(response_, new PassThrough(), error => {\n\t\t\t\t\tif (error) {\n\t\t\t\t\t\treject(error);\n\t\t\t\t\t}\n\t\t\t\t});\n\t\t\t\traw.once('data', chunk => {\n\t\t\t\t\t// See http://stackoverflow.com/questions/37519828\n\t\t\t\t\tif ((chunk[0] & 0x0F) === 0x08) {\n\t\t\t\t\t\tbody = pump(body, zlib.createInflate(), error => {\n\t\t\t\t\t\t\tif (error) {\n\t\t\t\t\t\t\t\treject(error);\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t});\n\t\t\t\t\t} else {\n\t\t\t\t\t\tbody = pump(body, zlib.createInflateRaw(), error => {\n\t\t\t\t\t\t\tif (error) {\n\t\t\t\t\t\t\t\treject(error);\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t});\n\t\t\t\t\t}\n\n\t\t\t\t\tresponse = new Response(body, responseOptions);\n\t\t\t\t\tresolve(response);\n\t\t\t\t});\n\t\t\t\traw.once('end', () => {\n\t\t\t\t\t// Some old IIS servers return zero-length OK deflate responses, so\n\t\t\t\t\t// 'data' is never emitted. See https://github.com/node-fetch/node-fetch/pull/903\n\t\t\t\t\tif (!response) {\n\t\t\t\t\t\tresponse = new Response(body, responseOptions);\n\t\t\t\t\t\tresolve(response);\n\t\t\t\t\t}\n\t\t\t\t});\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// For br\n\t\t\tif (codings === 'br') {\n\t\t\t\tbody = pump(body, zlib.createBrotliDecompress(), error => {\n\t\t\t\t\tif (error) {\n\t\t\t\t\t\treject(error);\n\t\t\t\t\t}\n\t\t\t\t});\n\t\t\t\tresponse = new Response(body, responseOptions);\n\t\t\t\tresolve(response);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// Otherwise, use response as-is\n\t\t\tresponse = new Response(body, responseOptions);\n\t\t\tresolve(response);\n\t\t});\n\n\t\t// eslint-disable-next-line promise/prefer-await-to-then\n\t\twriteToStream(request_, request).catch(reject);\n\t});\n}\n\nfunction fixResponseChunkedTransferBadEnding(request, errorCallback) {\n\tconst LAST_CHUNK = Buffer.from('0\\r\\n\\r\\n');\n\n\tlet isChunkedTransfer = false;\n\tlet properLastChunkReceived = false;\n\tlet previousChunk;\n\n\trequest.on('response', response => {\n\t\tconst {headers} = response;\n\t\tisChunkedTransfer = headers['transfer-encoding'] === 'chunked' && !headers['content-length'];\n\t});\n\n\trequest.on('socket', socket => {\n\t\tconst onSocketClose = () => {\n\t\t\tif (isChunkedTransfer && !properLastChunkReceived) {\n\t\t\t\tconst error = new Error('Premature close');\n\t\t\t\terror.code = 'ERR_STREAM_PREMATURE_CLOSE';\n\t\t\t\terrorCallback(error);\n\t\t\t}\n\t\t};\n\n\t\tconst onData = buf => {\n\t\t\tproperLastChunkReceived = Buffer.compare(buf.slice(-5), LAST_CHUNK) === 0;\n\n\t\t\t// Sometimes final 0-length chunk and end of message code are in separate packets\n\t\t\tif (!properLastChunkReceived && previousChunk) {\n\t\t\t\tproperLastChunkReceived = (\n\t\t\t\t\tBuffer.compare(previousChunk.slice(-3), LAST_CHUNK.slice(0, 3)) === 0 &&\n\t\t\t\t\tBuffer.compare(buf.slice(-2), LAST_CHUNK.slice(3)) === 0\n\t\t\t\t);\n\t\t\t}\n\n\t\t\tpreviousChunk = buf;\n\t\t};\n\n\t\tsocket.prependListener('close', onSocketClose);\n\t\tsocket.on('data', onData);\n\n\t\trequest.on('close', () => {\n\t\t\tsocket.removeListener('close', onSocketClose);\n\t\t\tsocket.removeListener('data', onData);\n\t\t});\n\t});\n}\n","// The module cache\nvar __webpack_module_cache__ = {};\n\n// The require function\nfunction __webpack_require__(moduleId) {\n\t// Check if module is in cache\n\tvar cachedModule = __webpack_module_cache__[moduleId];\n\tif (cachedModule !== undefined) {\n\t\treturn cachedModule.exports;\n\t}\n\t// Create a new module (and put it into the cache)\n\tvar module = __webpack_module_cache__[moduleId] = {\n\t\t// no module.id needed\n\t\t// no module.loaded needed\n\t\texports: {}\n\t};\n\n\t// Execute the module function\n\tvar threw = true;\n\ttry {\n\t\t__webpack_modules__[moduleId].call(module.exports, module, module.exports, __webpack_require__);\n\t\tthrew = false;\n\t} finally {\n\t\tif(threw) delete __webpack_module_cache__[moduleId];\n\t}\n\n\t// Return the exports of the module\n\treturn module.exports;\n}\n\n// expose the modules object (__webpack_modules__)\n__webpack_require__.m = __webpack_modules__;\n\n","// define getter functions for harmony exports\n__webpack_require__.d = (exports, definition) => {\n\tfor(var key in definition) {\n\t\tif(__webpack_require__.o(definition, key) && !__webpack_require__.o(exports, key)) {\n\t\t\tObject.defineProperty(exports, key, { enumerable: true, get: definition[key] });\n\t\t}\n\t}\n};","__webpack_require__.f = {};\n// This file contains only the entry chunk.\n// The chunk loading function for additional chunks\n__webpack_require__.e = (chunkId) => {\n\treturn Promise.all(Object.keys(__webpack_require__.f).reduce((promises, key) => {\n\t\t__webpack_require__.f[key](chunkId, promises);\n\t\treturn promises;\n\t}, []));\n};","// This function allow to reference async chunks\n__webpack_require__.u = (chunkId) => {\n\t// return url for filenames based on template\n\treturn \"\" + chunkId + \".index.js\";\n};","__webpack_require__.o = (obj, prop) => (Object.prototype.hasOwnProperty.call(obj, prop))","// define __esModule on exports\n__webpack_require__.r = (exports) => {\n\tif(typeof Symbol !== 'undefined' && Symbol.toStringTag) {\n\t\tObject.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });\n\t}\n\tObject.defineProperty(exports, '__esModule', { value: true });\n};","\nif (typeof __webpack_require__ !== 'undefined') __webpack_require__.ab = __dirname + \"/\";","// no baseURI\n\n// object to store loaded chunks\n// \"1\" means \"loaded\", otherwise not loaded yet\nvar installedChunks = {\n\t179: 1\n};\n\n// no on chunks loaded\n\nvar installChunk = (chunk) => {\n\tvar moreModules = chunk.modules, chunkIds = chunk.ids, runtime = chunk.runtime;\n\tfor(var moduleId in moreModules) {\n\t\tif(__webpack_require__.o(moreModules, moduleId)) {\n\t\t\t__webpack_require__.m[moduleId] = moreModules[moduleId];\n\t\t}\n\t}\n\tif(runtime) runtime(__webpack_require__);\n\tfor(var i = 0; i < chunkIds.length; i++)\n\t\tinstalledChunks[chunkIds[i]] = 1;\n\n};\n\n// require() chunk loading for javascript\n__webpack_require__.f.require = (chunkId, promises) => {\n\t// \"1\" is the signal for \"already loaded\"\n\tif(!installedChunks[chunkId]) {\n\t\tif(true) { // all chunks have JS\n\t\t\tinstallChunk(require(\"./\" + __webpack_require__.u(chunkId)));\n\t\t} else installedChunks[chunkId] = 1;\n\t}\n};\n\n// no external install chunk\n\n// no HMR\n\n// no HMR manifest","","// startup\n// Load entry module and return exports\n// This entry module is referenced by other modules so it can't be inlined\nvar __webpack_exports__ = __webpack_require__(5869);\n",""],"names":[],"sourceRoot":""} \ No newline at end of file diff --git a/package.json b/package.json index d3acf2b..e1822b3 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { - "name": "typescript-action", - "version": "0.0.0", + "name": "setup-rye", + "version": "1.1.0", "private": true, "description": "TypeScript template action", "main": "dist/index.js", diff --git a/src/setup-rye.ts b/src/setup-rye.ts index 0bf528f..c2e407f 100644 --- a/src/setup-rye.ts +++ b/src/setup-rye.ts @@ -21,6 +21,7 @@ async function run(): Promise { cachedPath = await setupRye(platform, arch, version) } addRyeToPath(cachedPath) + addMatchers() } catch (err) { core.setFailed((err as Error).message) } @@ -142,4 +143,9 @@ function addRyeToPath(cachedPath: string): void { core.info(`Added ${cachedPath}/shims to the path`) } +function addMatchers(): void { + const matchersPath = path.join(__dirname, '..', '.github') + core.info(`##[add-matcher]${path.join(matchersPath, 'python.json')}`) +} + run()