diff --git a/.github/actions/setup-go/action.yml b/.github/actions/setup-go/action.yml deleted file mode 100644 index 1c1a6eb186a75..0000000000000 --- a/.github/actions/setup-go/action.yml +++ /dev/null @@ -1,37 +0,0 @@ -name: "Turborepo Go Setup" -description: "Sets Go up for CI" -inputs: - github-token: - description: "GitHub token. You can pass secrets.GITHUB_TOKEN" - required: true - -runs: - using: "composite" - steps: - - name: Set up Go - uses: actions/setup-go@v3 - with: - go-version: "1.20" - cache: true - cache-dependency-path: cli/go.sum - - - name: Set Up Protoc - id: set-up-protoc - continue-on-error: true - uses: arduino/setup-protoc@v1.2.0 - with: - version: "3.x" - repo-token: ${{ inputs.github-token }} - - - name: Set Up Protoc (second try) - if: steps.set-up-protoc.outcome == 'failure' - uses: arduino/setup-protoc@v1.2.0 - with: - version: "3.x" - repo-token: ${{ inputs.github-token }} - - - name: Set Up GRPC protobuf - shell: bash - run: | - go install google.golang.org/protobuf/cmd/protoc-gen-go@v1.28.0 - go install google.golang.org/grpc/cmd/protoc-gen-go-grpc@v1.2.0 diff --git a/.github/actions/setup-rust/action.yml b/.github/actions/setup-rust/action.yml index 6a7eef8070f32..bd94dae3798eb 100644 --- a/.github/actions/setup-rust/action.yml +++ b/.github/actions/setup-rust/action.yml @@ -11,7 +11,9 @@ inputs: components: description: "Comma-separated list of components to be additionally installed" required: false - + github-token: + description: "GitHub token. You can pass secrets.GITHUB_TOKEN" + required: true shared-cache-key: description: "A cache key that is used instead of the automatic `job`-based key, and is stable over multiple jobs." required: false @@ -53,6 +55,21 @@ runs: shell: bash run: rustup set default-host x86_64-pc-windows-gnu && rustup show + - name: Set Up Protoc + id: set-up-protoc + continue-on-error: true + uses: arduino/setup-protoc@v1.2.0 + with: + version: "3.x" + repo-token: ${{ inputs.github-token }} + + - name: Set Up Protoc (second try) + if: steps.set-up-protoc.outcome == 'failure' + uses: arduino/setup-protoc@v1.2.0 + with: + version: "3.x" + repo-token: ${{ inputs.github-token }} + - name: "Add cargo problem matchers" shell: bash run: echo "::add-matcher::${{ github.action_path }}/matchers.json" diff --git a/.github/actions/setup-turborepo-environment/action.yml b/.github/actions/setup-turborepo-environment/action.yml index 24f63192e4c4a..05dbf73578c1d 100644 --- a/.github/actions/setup-turborepo-environment/action.yml +++ b/.github/actions/setup-turborepo-environment/action.yml @@ -17,11 +17,6 @@ runs: with: extra-flags: --no-optional - - name: "Setup Go" - uses: ./.github/actions/setup-go - with: - github-token: ${{ inputs.github-token }} - - name: "Setup Rust" uses: ./.github/actions/setup-rust with: @@ -29,6 +24,7 @@ runs: shared-cache-key: turborepo-debug-build cache-key: ${{ inputs.target }} save-cache: true + github-token: ${{ inputs.github-token }} - name: "Setup capnproto" uses: ./.github/actions/setup-capnproto diff --git a/.github/workflows/bench-turbopack-scheduled.yml b/.github/workflows/bench-turbopack-scheduled.yml index 4b5b379a13763..fb505f7442c88 100644 --- a/.github/workflows/bench-turbopack-scheduled.yml +++ b/.github/workflows/bench-turbopack-scheduled.yml @@ -93,6 +93,7 @@ jobs: - uses: ./.github/actions/setup-rust with: shared-cache-key: benchmark-bundlers + github-token: ${{ secrets.GITHUB_TOKEN }} - name: Clear potentially cached benchmarks run: rm -rf target/criterion diff --git a/.github/workflows/bench-turbopack.yml b/.github/workflows/bench-turbopack.yml index e1c541b856ed5..50d7ed7504385 100644 --- a/.github/workflows/bench-turbopack.yml +++ b/.github/workflows/bench-turbopack.yml @@ -124,11 +124,11 @@ jobs: needs.determine_jobs.outputs.cargo_only == 'true' name: Turbopack rust check runs-on: - - 'self-hosted' - - 'linux' - - 'x64' - - 'metal' - + - "self-hosted" + - "linux" + - "x64" + - "metal" + steps: - name: Checkout uses: actions/checkout@v3 @@ -138,11 +138,7 @@ jobs: with: components: clippy targets: wasm32-unknown-unknown - - - name: Setup Go - uses: ./.github/actions/setup-go - with: - github-token: "${{ secrets.GITHUB_TOKEN }}" + github-token: ${{ secrets.GITHUB_TOKEN }} - name: Run cargo check release run: | @@ -188,10 +184,10 @@ jobs: TURBOPACK_BENCH_COUNTS: 1000,10000 runs-on: - - 'self-hosted' - - 'linux' - - 'x64' - - 'metal' + - "self-hosted" + - "linux" + - "x64" + - "metal" name: Benchmark on ${{ matrix.bench.name }} @@ -199,11 +195,6 @@ jobs: - name: Checkout uses: actions/checkout@v3 - - name: Setup Go - uses: ./.github/actions/setup-go - with: - github-token: "${{ secrets.GITHUB_TOKEN }}" - - name: Setup Node.js uses: ./.github/actions/setup-node @@ -212,6 +203,7 @@ jobs: with: shared-cache-key: benchmark-${{ matrix.bench.cache_key }} save-cache: true + github-token: ${{ secrets.GITHUB_TOKEN }} - name: Clear benchmarks run: rm -rf target/criterion @@ -328,10 +320,10 @@ jobs: title: Linux quiet: false runs-on: - - 'self-hosted' - - 'linux' - - 'x64' - - 'metal' + - "self-hosted" + - "linux" + - "x64" + - "metal" # - name: macos # title: MacOS # quiet: true @@ -352,11 +344,6 @@ jobs: - name: Fetch the base branch run: git -c protocol.version=2 fetch --no-tags --progress --no-recurse-submodules --depth=1 origin +${{ github.base_ref }}:base - - name: Setup Go - uses: ./.github/actions/setup-go - with: - github-token: "${{ secrets.GITHUB_TOKEN }}" - - name: Setup Node.js uses: ./.github/actions/setup-node @@ -364,6 +351,7 @@ jobs: uses: ./.github/actions/setup-rust with: shared-cache-key: benchmark-${{ matrix.os.name }} + github-token: ${{ secrets.GITHUB_TOKEN }} - name: Benchmark and compare with base branch uses: sokra/criterion-compare-action@main diff --git a/.github/workflows/bench-turborepo.yml b/.github/workflows/bench-turborepo.yml index 569256c88f43e..fe30867987ac8 100644 --- a/.github/workflows/bench-turborepo.yml +++ b/.github/workflows/bench-turborepo.yml @@ -24,9 +24,6 @@ jobs: steps: - uses: actions/checkout@v3 - uses: ./.github/actions/setup-node - - uses: ./.github/actions/setup-go - with: - github-token: "${{ secrets.GITHUB_TOKEN }}" - uses: ./.github/actions/setup-turborepo-environment with: diff --git a/.github/workflows/bench-turbotrace-against-node-nft.yml b/.github/workflows/bench-turbotrace-against-node-nft.yml index 616b797623185..0308b16d0eeec 100644 --- a/.github/workflows/bench-turbotrace-against-node-nft.yml +++ b/.github/workflows/bench-turbotrace-against-node-nft.yml @@ -31,10 +31,8 @@ jobs: package-install: false - uses: ./.github/actions/setup-rust - - - uses: ./.github/actions/setup-go with: - github-token: "${{ secrets.GITHUB_TOKEN }}" + github-token: ${{ secrets.GITHUB_TOKEN }} - name: Install hoisted npm dependencies working-directory: crates/turbopack/tests/node-file-trace diff --git a/.github/workflows/publish-npm.yml b/.github/workflows/publish-npm.yml index 3868d8c00de51..c77896da47a4c 100644 --- a/.github/workflows/publish-npm.yml +++ b/.github/workflows/publish-npm.yml @@ -14,6 +14,8 @@ jobs: - uses: ./.github/actions/setup-node - uses: ./.github/actions/setup-rust + with: + github-token: ${{ secrets.GITHUB_TOKEN }} - name: Build packages run: pnpx turbo@canary run build:ts diff --git a/.github/workflows/test-turbopack-rust-bench-test.yml b/.github/workflows/test-turbopack-rust-bench-test.yml index 8ce0bc528c5f6..f6cb6001024d2 100644 --- a/.github/workflows/test-turbopack-rust-bench-test.yml +++ b/.github/workflows/test-turbopack-rust-bench-test.yml @@ -33,11 +33,6 @@ jobs: with: save-cache: true - - name: Setup Go - uses: ./.github/actions/setup-go - with: - github-token: "${{ secrets.GITHUB_TOKEN }}" - - name: Setup Node.js uses: ./.github/actions/setup-node with: diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 99a7ee2300fb0..3b9bd60f0961f 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -131,15 +131,6 @@ jobs: !**.md !**.mdx - # We need to keep detecting changes to cli/ because they impact the final build and - # we want to build and test turborepo on these changes. - - name: Turborepo Go related changes - id: turborepo_go - uses: technote-space/get-diff-action@v6 - with: - PATTERNS: | - cli/** - - name: Turborepo integration tests changes id: turborepo_integration uses: technote-space/get-diff-action@v6 @@ -189,10 +180,8 @@ jobs: turbopack_typescript: ${{ steps.ci.outputs.diff != '' || steps.turbopack_typescript.outputs.diff != '' }} turborepo_rust: ${{ steps.ci.outputs.diff != '' || steps.turborepo_rust.outputs.diff != '' }} turbopack_bench: ${{ steps.ci.outputs.diff != '' || steps.turbopack_bench.outputs.diff != '' }} - turborepo_go_unit: ${{ steps.ci.outputs.diff != '' || steps.turborepo_go.outputs.diff != '' || steps.turborepo_rust.outputs.diff != '' }} - turborepo_go_lint: ${{ steps.ci.outputs.diff != '' || steps.turborepo_go.outputs.diff != ''}} - turborepo_build: ${{ steps.ci.outputs.diff != '' || steps.turborepo_go.outputs.diff != '' || steps.turborepo_rust.outputs.diff != '' || steps.turborepo_integration.outputs.diff != ''}} - turborepo_integration: ${{ steps.ci.outputs.diff != '' || steps.turborepo_go.outputs.diff != '' || steps.turborepo_rust.outputs.diff != '' || steps.turborepo_integration.outputs.diff != '' }} + turborepo_build: ${{ steps.ci.outputs.diff != '' || steps.turborepo_rust.outputs.diff != '' || steps.turborepo_integration.outputs.diff != ''}} + turborepo_integration: ${{ steps.ci.outputs.diff != '' || steps.turborepo_rust.outputs.diff != '' || steps.turborepo_integration.outputs.diff != '' }} examples: ${{ steps.ci.outputs.diff != '' || steps.examples.outputs.diff != '' || steps.turborepo_version.outputs.diff != '' }} turborepo_js: ${{ steps.ci.outputs.diff != '' || steps.turborepo_js.outputs.diff != '' }} docs: ${{ steps.ci.outputs.diff != '' || steps.docs.outputs.diff != '' }} @@ -251,71 +240,6 @@ jobs: run: ${SCCACHE_PATH} --show-stats if: ${{ !github.event.pull_request.head.repo.fork }} - go_lint: - name: Go linting - runs-on: - - "self-hosted" - - "linux" - - "x64" - - "metal" - needs: determine_jobs - if: needs.determine_jobs.outputs.turborepo_go_lint == 'true' - steps: - - name: Checkout - uses: actions/checkout@v3 - with: - fetch-depth: 0 - - - name: Setup Go - uses: ./.github/actions/setup-go - with: - github-token: "${{ secrets.GITHUB_TOKEN }}" - - - name: Setup Rust - uses: ./.github/actions/setup-rust - - - name: Setup Protos - run: cd cli && make compile-protos - - - name: Build turborepo-ffi - run: cd cli && make turborepo-ffi-install - - - name: golangci Linting - uses: golangci/golangci-lint-action@v3 - with: - # More config options documented here: https://github.com/golangci/golangci-lint-action - version: latest - working-directory: cli - args: --new-from-rev=${{ github.event.pull_request.base.sha || 'HEAD~1' }} - - go_unit: - name: Go Unit Tests - needs: determine_jobs - if: needs.determine_jobs.outputs.turborepo_go_unit == 'true' - timeout-minutes: 45 - runs-on: ${{ matrix.os.runner }} - strategy: - fail-fast: false - matrix: - os: - - name: ubuntu - runner: - - "self-hosted" - - "linux" - - "x64" - - "metal" - - runner: macos-latest - - runner: windows-latest - - steps: - - uses: actions/checkout@v3 - - uses: ./.github/actions/setup-turborepo-environment - with: - windows: ${{ matrix.os.runner == 'windows-latest' }} - github-token: "${{ secrets.GITHUB_TOKEN }}" - - - run: turbo run test --filter=cli --color - turborepo_integration: name: Turborepo Integration needs: [determine_jobs, build_turborepo] @@ -364,53 +288,6 @@ jobs: env: EXPERIMENTAL_RUST_CODEPATH: true - turborepo_integration_go: - name: Turborepo Integration (Go Fallback) - needs: [determine_jobs, build_turborepo] - if: needs.determine_jobs.outputs.turborepo_integration == 'true' - runs-on: ${{ matrix.os.runner }} - strategy: - fail-fast: false - matrix: - os: - - runner: ubuntu-latest - - runner: macos-latest - - runner: windows-latest - steps: - # On Windows, set autocrlf to input so that when the repo is cloned down - # the fixtures retain their line endings and don't get updated to CRLF. - # We want this because this repo also contains the fixtures for our test cases - # and these fixtures have files that need stable file hashes. If we let git update - # the line endings on checkout, the file hashes will change. - # https://www.git-scm.com/book/en/v2/Customizing-Git-Git-Configuration#_core_autocrlf - - name: set crlf - if: matrix.os.runner == 'windows-latest' - shell: bash - run: git config --global core.autocrlf input - - - uses: actions/checkout@v3 - - uses: ./.github/actions/setup-turborepo-environment - with: - windows: ${{ matrix.os.runner == 'windows-latest' }} - github-token: "${{ secrets.GITHUB_TOKEN }}" - - - name: Setup Graphviz - uses: ts-graphviz/setup-graphviz@v1 - with: - macos-skip-brew-update: "true" - env: - HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK: true - - - name: Cache Prysk - id: cache-prysk - uses: actions/cache@v3 - with: - path: cli/.cram_env - key: prysk-venv-${{ matrix.os.runner }} - - - name: Integration Tests - run: turbo run test --filter=turborepo-tests-integration-go --color --env-mode=strict --token=${{ secrets.TURBO_TOKEN }} --team=${{ vars.TURBO_TEAM }} - turborepo_examples: name: Turborepo Examples needs: [determine_jobs] @@ -541,6 +418,7 @@ jobs: uses: ./.github/actions/setup-rust with: components: rustfmt + github-token: "${{ secrets.GITHUB_TOKEN }}" - name: Run cargo fmt check run: | @@ -571,20 +449,12 @@ jobs: - name: Checkout uses: actions/checkout@v3 - - name: Setup Rust - uses: ./.github/actions/setup-rust - with: - components: clippy - targets: wasm32-unknown-unknown - - - name: Setup Go - uses: ./.github/actions/setup-go + - name: Setup Turborepo Environment + uses: ./.github/actions/setup-turborepo-environment with: + windows: ${{ matrix.os.name == 'windows' }} github-token: "${{ secrets.GITHUB_TOKEN }}" - - name: Setup capnproto - uses: ./.github/actions/setup-capnproto - - name: Run cargo check run: | cargo groups check turborepo-libraries --features rustls-tls @@ -601,20 +471,12 @@ jobs: - name: Checkout uses: actions/checkout@v3 - - name: Setup Rust - uses: ./.github/actions/setup-rust - with: - components: clippy - targets: wasm32-unknown-unknown - - - name: Setup Go - uses: ./.github/actions/setup-go + - name: Setup Turborepo Environment + uses: ./.github/actions/setup-turborepo-environment with: + windows: ${{ matrix.os.name == 'windows' }} github-token: "${{ secrets.GITHUB_TOKEN }}" - - name: Setup capnproto - uses: ./.github/actions/setup-capnproto - - name: Run cargo clippy run: | cargo groups clippy turborepo-libraries --features rustls-tls -- --deny clippy::all @@ -646,11 +508,6 @@ jobs: components: clippy targets: wasm32-unknown-unknown - - name: Setup Go - uses: ./.github/actions/setup-go - with: - github-token: "${{ secrets.GITHUB_TOKEN }}" - - name: Run cargo check release run: | RUSTFLAGS="-D warnings -A deprecated" cargo groups check turbopack --features rustls-tls --release @@ -672,10 +529,6 @@ jobs: with: components: clippy targets: wasm32-unknown-unknown - - - name: Setup Go - uses: ./.github/actions/setup-go - with: github-token: "${{ secrets.GITHUB_TOKEN }}" - name: Run cargo clippy @@ -705,6 +558,8 @@ jobs: - name: Setup Rust uses: ./.github/actions/setup-rust + with: + github-token: "${{ secrets.GITHUB_TOKEN }}" - name: Build next-swc run: | @@ -791,18 +646,11 @@ jobs: - name: Checkout uses: actions/checkout@v3 - - name: Setup Rust - uses: ./.github/actions/setup-rust - with: - save-cache: true - - - name: Setup capnproto - uses: ./.github/actions/setup-capnproto - - - name: Setup Protoc - uses: arduino/setup-protoc@v1 + - name: Setup Turborepo Environment + uses: ./.github/actions/setup-turborepo-environment with: - repo-token: ${{ secrets.GITHUB_TOKEN }} + windows: ${{ matrix.os.name == 'windows' }} + github-token: "${{ secrets.GITHUB_TOKEN }}" - name: Run tests timeout-minutes: 120 @@ -825,6 +673,7 @@ jobs: uses: ./.github/actions/setup-rust with: save-cache: true + github-token: "${{ secrets.GITHUB_TOKEN }}" - name: Setup Node.js uses: ./.github/actions/setup-node @@ -893,17 +742,13 @@ jobs: uses: ./.github/actions/setup-rust with: save-cache: true + github-token: "${{ secrets.GITHUB_TOKEN }}" - name: Setup Node.js uses: ./.github/actions/setup-node with: node-version: 18 - - name: Setup Protoc - uses: arduino/setup-protoc@v1 - with: - repo-token: ${{ secrets.GITHUB_TOKEN }} - - name: Prepare toolchain on Windows run: | pnpx node-gyp install @@ -1008,11 +853,8 @@ jobs: name: Ok needs: - determine_jobs - - go_lint - - go_unit - turborepo_examples - turborepo_integration - - turborepo_integration_go - js_packages - rust_lint - turborepo_rust_check @@ -1047,11 +889,8 @@ jobs: fi } subjob ${{needs.determine_jobs.result}} "Determining jobs" - subjob ${{needs.go_lint.result}} "Go linting" - subjob ${{needs.go_unit.result}} "Go unit tests" subjob ${{needs.turborepo_examples.result}} "Turborepo examples" subjob ${{needs.turborepo_integration.result}} "Turborepo integration tests" - subjob ${{needs.turborepo_integration_go.result}} "Turborepo integration tests (Go fallback)" subjob ${{needs.js_packages.result}} "JS Package tests" subjob ${{needs.rust_lint.result}} "Rust lints" subjob ${{needs.turborepo_rust_check.result}} "Turborepo Rust checks" @@ -1162,11 +1001,8 @@ jobs: needs: - final - determine_jobs - - go_lint - - go_unit - turborepo_examples - turborepo_integration - - turborepo_integration_go - rust_lint - turborepo_rust_check - turborepo_rust_clippy @@ -1203,11 +1039,8 @@ jobs: fi } subjob ${{needs.determine_jobs.result}} "Determining jobs" - subjob ${{needs.go_lint.result}} "Go lints" - subjob ${{needs.go_unit.result}} "Go unit tests" subjob ${{needs.turborepo_examples.result}} "Turborepo examples" subjob ${{needs.turborepo_integration.result}} "Turborepo integration tests" - subjob ${{needs.turborepo_integration_go.result}} "Turborepo integration tests (Go fallback)" subjob ${{needs.rust_lint.result}} "Rust lints" subjob ${{needs.turborepo_rust_check.result}} "Turborepo Rust checks" subjob ${{needs.turborepo_rust_clippy.result}} "Turborepo Rust clippy" diff --git a/.github/workflows/turborepo-release.yml b/.github/workflows/turborepo-release.yml index 40a3706439d1b..7457772d96499 100644 --- a/.github/workflows/turborepo-release.yml +++ b/.github/workflows/turborepo-release.yml @@ -4,10 +4,9 @@ # # 1. Create a staging branch # 2. Run some smoke tests on that branch -# 3. Compile the Go binary for macOS and for Windows/Linux -# 4. Build the Rust binary -# 5. Publish JS packages npm (including turbo itself) -# 6. Create a release branch and open a PR. +# 3. Build the Rust binary +# 4. Publish JS packages npm (including turbo itself) +# 5. Create a release branch and open a PR. # You can opt into a dry run, which will skip publishing to npm and opening the release branch @@ -62,23 +61,6 @@ jobs: outputs: stage-branch: "${{ steps.stage.outputs.STAGE_BRANCH }}" - go-smoke-test: - name: Go Unit Tests - runs-on: ubuntu-latest - needs: [stage] - steps: - - name: Show Stage Commit - run: echo "${{ needs.stage.outputs.stage-branch }}" - - uses: actions/checkout@v3 - with: - ref: ${{ needs.stage.outputs.stage-branch }} - - name: Build turborepo CLI from source - uses: ./.github/actions/setup-turborepo-environment - with: - github-token: "${{ secrets.GITHUB_TOKEN }}" - - name: Run Go Unit Tests - run: turbo run test --filter=cli --color - rust-smoke-test: name: Rust Unit Tests runs-on: ubuntu-latest @@ -113,94 +95,9 @@ jobs: - name: Run JS Package Tests run: turbo run check-types test --filter="./packages/*" --color - build-go-darwin: - name: "Build Go for macOS" - runs-on: macos-latest - needs: [stage, go-smoke-test, rust-smoke-test, js-smoke-test] - steps: - - name: Show Stage Commit - run: echo "${{ needs.stage.outputs.stage-branch }}" - - uses: actions/checkout@v3 - with: - ref: ${{ needs.stage.outputs.stage-branch }} - - run: git fetch origin --tags - - uses: ./.github/actions/setup-go - with: - github-token: "${{ secrets.GITHUB_TOKEN }}" - - uses: actions-rs/toolchain@v1 - - run: | - rustup target add x86_64-apple-darwin - rustup target add aarch64-apple-darwin - - name: Install GoReleaser - uses: goreleaser/goreleaser-action@v3 - with: - distribution: goreleaser-pro - version: v1.18.2 - install-only: true - env: - GORELEASER_KEY: ${{ secrets.GORELEASER_KEY }} - - name: Build Artifacts - run: cd cli && make build-go-turbo-darwin - env: - GORELEASER_KEY: ${{ secrets.GORELEASER_KEY }} - - name: Upload Artifacts - uses: actions/upload-artifact@v3 - with: - name: turbo-go-darwin-${{ needs.stage.outputs.stage-branch }} - path: cli/dist-darwin - - # compiles linux and windows in a container - build-go-cross: - name: "Build Go for Windows and Linux" - runs-on: ubuntu-latest - needs: [stage, go-smoke-test, rust-smoke-test, js-smoke-test] - container: - image: docker://ghcr.io/vercel/turbo-cross:v1.18.5 - steps: - - name: Show Stage Commit - run: echo "${{ needs.stage.outputs.stage-branch }}" - - uses: actions/checkout@v3 - with: - ref: "${{ needs.stage.outputs.stage-branch }}" - - run: git fetch origin --tags - - uses: ./.github/actions/setup-go - with: - github-token: "${{ secrets.GITHUB_TOKEN }}" - - run: apt-get install -y build-essential binutils-aarch64-linux-gnu - # Yes, I'm aware of the irony of installing Zig to compile our Go + Rust project - - run: | - mkdir ../zig - curl --show-error --location https://ziglang.org/builds/zig-linux-x86_64-0.11.0-dev.1908+06b263825.tar.xz | tar -J -xf - -C ../zig --strip-components 1 - export PATH=$PATH:$(pwd)/../zig - echo "$(pwd)/../zig" >> $GITHUB_PATH - - uses: actions-rs/toolchain@v1 - - run: | - rustup target add aarch64-unknown-linux-musl - rustup target add x86_64-pc-windows-gnu - rustup target add x86_64-unknown-linux-musl - - - name: Install GoReleaser - uses: goreleaser/goreleaser-action@v3 - with: - distribution: goreleaser-pro - version: v1.18.2 - install-only: true - env: - GORELEASER_KEY: ${{ secrets.GORELEASER_KEY }} - - name: Build Artifacts - run: cd cli && make build-go-turbo-cross - env: - CC_aarch64_unknown_linux_musl: zig cc -target aarch64-linux-musl - GORELEASER_KEY: ${{ secrets.GORELEASER_KEY }} - - name: Upload Artifacts - uses: actions/upload-artifact@v3 - with: - name: turbo-go-cross-${{ needs.stage.outputs.stage-branch }} - path: cli/dist-cross - build-rust: name: "Build Rust" - needs: [stage, go-smoke-test, rust-smoke-test, js-smoke-test] + needs: [stage, rust-smoke-test, js-smoke-test] strategy: fail-fast: false matrix: @@ -269,12 +166,12 @@ jobs: uses: actions/upload-artifact@v3 with: name: turbo-${{ matrix.settings.target }} - path: target/${{ matrix.settings.target }}/release/turbo* + path: target/${{ matrix.settings.target }}/release-turborepo/turbo* npm-publish: name: "Publish To NPM" runs-on: ubuntu-latest - needs: [stage, build-go-darwin, build-go-cross, build-rust] + needs: [stage, build-rust] steps: - name: Show Stage Commit run: echo "${{ needs.stage.outputs.stage-branch }}" @@ -285,17 +182,16 @@ jobs: - uses: ./.github/actions/setup-node with: enable-corepack: false - - uses: ./.github/actions/setup-go - with: - github-token: "${{ secrets.GITHUB_TOKEN }}" - name: Configure git run: | git config --global user.name 'Turbobot' git config --global user.email 'turbobot@vercel.com' - - name: Setup capnproto - uses: ./.github/actions/setup-capnproto + - name: Setup turborepo environment + uses: ./.github/actions/setup-turborepo-environment + with: + github-token: "${{ secrets.GITHUB_TOKEN }}" - name: Install GoReleaser uses: goreleaser/goreleaser-action@v3 @@ -320,40 +216,6 @@ jobs: mv rust-artifacts/turbo-x86_64-apple-darwin cli/dist-darwin-amd64 mv rust-artifacts/turbo-x86_64-pc-windows-gnu cli/dist-windows-amd64 - # TODO: This is a duplicate download of artifacts, but since the download path is different, - # we will leave this for a future optimization. - - name: Download Go artifacts - uses: actions/download-artifact@v3 - with: - path: go-artifacts - - - name: Move Go artifacts into place - run: | - ls go-artifacts - mv go-artifacts/turbo-go-cross-${{ needs.stage.outputs.stage-branch }}/turbo_linux_amd64_v1/bin/* cli/dist-linux-amd64 - chmod a+x cli/dist-linux-amd64/turbo - chmod a+x cli/dist-linux-amd64/go-turbo - mv go-artifacts/turbo-go-cross-${{ needs.stage.outputs.stage-branch }}/turbo_linux_arm64/bin/* cli/dist-linux-arm64 - chmod a+x cli/dist-linux-arm64/turbo - chmod a+x cli/dist-linux-arm64/go-turbo - - # rust doesn't have a toolchain for arm + windows + gnu, so we just use the exe from the amd64 build - # and rely on windows' arm JITer to do the work for us. this is because the go exe cannot be build w/ msvc - cp -r go-artifacts/turbo-go-cross-${{ needs.stage.outputs.stage-branch }}/turbo_windows_amd64_v1/bin/* cli/dist-windows-arm64 - chmod a+x cli/dist-windows-arm64/turbo.exe - chmod a+x cli/dist-windows-arm64/go-turbo.exe - - mv go-artifacts/turbo-go-cross-${{ needs.stage.outputs.stage-branch }}/turbo_windows_amd64_v1/bin/* cli/dist-windows-amd64 - chmod a+x cli/dist-windows-amd64/turbo.exe - chmod a+x cli/dist-windows-amd64/go-turbo.exe - - mv go-artifacts/turbo-go-darwin-${{ needs.stage.outputs.stage-branch }}/turbo_darwin_amd64_v1/bin/* cli/dist-darwin-amd64 - chmod a+x cli/dist-darwin-amd64/turbo - chmod a+x cli/dist-darwin-amd64/go-turbo - mv go-artifacts/turbo-go-darwin-${{ needs.stage.outputs.stage-branch }}/turbo_darwin_arm64/bin/* cli/dist-darwin-arm64 - chmod a+x cli/dist-darwin-arm64/turbo - chmod a+x cli/dist-darwin-arm64/go-turbo - - name: Perform Release run: cd cli && make publish-turbo SKIP_PUBLISH=${{ inputs.dry_run && '--skip-publish' || '' }} env: diff --git a/.gitignore b/.gitignore index 8bc68af884b56..0665139a289f0 100644 --- a/.gitignore +++ b/.gitignore @@ -45,9 +45,6 @@ rustc-ice-*.txt # CI sweep.timestamp -crates/turborepo-ffi/bindings.h -crates/turborepo-ffi/ffi/proto/* -cli/internal/ffi/libturborepo_ffi*.a # Prysk test error files *.t.err diff --git a/Cargo.lock b/Cargo.lock index beaeed0491f93..a83457104e945 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1342,25 +1342,6 @@ version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a2698f953def977c68f935bb0dfa959375ad4638570e969e2f1e9f433cbf1af6" -[[package]] -name = "cbindgen" -version = "0.24.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a6358dedf60f4d9b8db43ad187391afe959746101346fe51bb978126bec61dfb" -dependencies = [ - "clap 3.2.23", - "heck 0.4.1", - "indexmap 1.9.3", - "log 0.4.20", - "proc-macro2", - "quote", - "serde", - "serde_json", - "syn 1.0.109", - "tempfile", - "toml 0.5.11", -] - [[package]] name = "cc" version = "1.0.83" @@ -1547,12 +1528,9 @@ version = "3.2.23" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "71655c45cb9845d3270c9d6df84ebe72b4dad3c2ba3f7023ad47c144e4e473a5" dependencies = [ - "atty", "bitflags 1.3.2", "clap_lex 0.2.4", "indexmap 1.9.3", - "strsim 0.10.0", - "termcolor", "textwrap 0.16.0", ] @@ -10580,6 +10558,7 @@ dependencies = [ "dunce", "human-panic", "itertools 0.10.5", + "miette 5.10.0", "pretty_assertions", "serde", "serde_json", @@ -11640,24 +11619,6 @@ dependencies = [ "test-case", ] -[[package]] -name = "turborepo-ffi" -version = "0.1.0" -dependencies = [ - "cbindgen", - "directories 4.0.1", - "globwalk", - "prost", - "prost-build", - "thiserror", - "turbopath", - "turborepo-cache", - "turborepo-env", - "turborepo-fs", - "turborepo-lockfiles", - "turborepo-scm", -] - [[package]] name = "turborepo-filewatch" version = "0.1.0" diff --git a/Cargo.toml b/Cargo.toml index ae4b222996398..a59734097b839 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -158,7 +158,6 @@ turborepo-cache = { path = "crates/turborepo-cache" } turborepo-ci = { path = "crates/turborepo-ci" } turborepo-env = { path = "crates/turborepo-env" } turborepo-errors = { path = "crates/turborepo-errors" } -turborepo-ffi = { path = "crates/turborepo-ffi" } turborepo-fs = { path = "crates/turborepo-fs" } turborepo-lib = { path = "crates/turborepo-lib", default-features = false } turborepo-lockfiles = { path = "crates/turborepo-lockfiles" } diff --git a/cli/.gitignore b/cli/.gitignore index 10e16a2c2f545..e4dbcb696cca5 100644 --- a/cli/.gitignore +++ b/cli/.gitignore @@ -1,5 +1,3 @@ -/internal/turbodprotocol/*.go - /demo/ /dist/ /dist-* @@ -9,8 +7,6 @@ /turbo-new /turbo-new.exe /turbo.exe -/go-turbo -/go-turbo.exe testbed diff --git a/cli/Makefile b/cli/Makefile index e3b81c848a6c7..a0f44276f4ac3 100644 --- a/cli/Makefile +++ b/cli/Makefile @@ -9,24 +9,8 @@ else UNAME := $(shell uname -s) endif -GOARCH:=$(shell go env GOARCH | xargs) -GOOS:=$(shell go env GOOS | xargs) - -# Strip debug info -GO_FLAGS += "-ldflags=-s -w" - -# Avoid embedding the build path in the executable for more reproducible builds -GO_FLAGS += -trimpath - CLI_DIR = $(shell pwd) -# allow opting in to the rust codepaths -GO_TAG ?= rust - -GO_FILES = $(shell find . -name "*.go") -SRC_FILES = $(shell find . -name "*.go" | grep -v "_test.go") -GENERATED_FILES = internal/turbodprotocol/turbod.pb.go internal/turbodprotocol/turbod_grpc.pb.go - # We do not set go-turbo as a dependency because the Rust build.rs # script will call it for us and copy over the binary turbo: @@ -38,115 +22,6 @@ turbo-prod: turbo-capnp: cd ../crates/turborepo-lib/src/hash && capnp compile -I std -ogo proto.capnp && mv ./proto.capnp.go ../../../../cli/internal/fs/hash/capnp -go-turbo$(EXT): $(GENERATED_FILES) $(SRC_FILES) go.mod turborepo-ffi-install - CGO_ENABLED=1 go build -tags $(GO_TAG) -o go-turbo$(EXT) ./cmd/turbo - - -.PHONY: turborepo-ffi-install -turborepo-ffi-install: turborepo-ffi turborepo-ffi-copy-bindings - cp ../crates/turborepo-ffi/target/debug/libturborepo_ffi.a ./internal/ffi/libturborepo_ffi_$(GOOS)_$(GOARCH).a - -.PHONY: turborepo-ffi -turborepo-ffi: - cd ../crates/turborepo-ffi && cargo build --target-dir ./target - -.PHONY: turborepo-ffi-copy-bindings -turborepo-ffi-copy-bindings: - cp ../crates/turborepo-ffi/bindings.h ./internal/ffi/bindings.h - -# -# ffi cross compiling -# -# these targets are used to build the ffi library for each platform -# when doing a release. they _may_ work on your local machine, but -# they're not intended to be used for development. -# - -.PHONY: turborepo-ffi-install-windows-amd64 -turborepo-ffi-install-windows-amd64: turborepo-ffi-windows-amd64 turborepo-ffi-copy-bindings - cp ../crates/turborepo-ffi/target/x86_64-pc-windows-gnu/release/libturborepo_ffi.a ./internal/ffi/libturborepo_ffi_windows_amd64.a - -.PHONY: turborepo-ffi-install-darwin-arm64 -turborepo-ffi-install-darwin-arm64: turborepo-ffi-darwin-arm64 turborepo-ffi-copy-bindings - cp ../crates/turborepo-ffi/target/aarch64-apple-darwin/release/libturborepo_ffi.a ./internal/ffi/libturborepo_ffi_darwin_arm64.a - -.PHONY: turborepo-ffi-install-darwin-amd64 -turborepo-ffi-install-darwin-amd64: turborepo-ffi-darwin-amd64 turborepo-ffi-copy-bindings - cp ../crates/turborepo-ffi/target/x86_64-apple-darwin/release/libturborepo_ffi.a ./internal/ffi/libturborepo_ffi_darwin_amd64.a - -.PHONY: turborepo-ffi-install-linux-arm64 -turborepo-ffi-install-linux-arm64: turborepo-ffi-linux-arm64 turborepo-ffi-copy-bindings - cp ../crates/turborepo-ffi/target/aarch64-unknown-linux-musl/release/libturborepo_ffi.a ./internal/ffi/libturborepo_ffi_linux_arm64.a - -.PHONY: turborepo-ffi-install-linux-amd64 -turborepo-ffi-install-linux-amd64: turborepo-ffi-linux-amd64 turborepo-ffi-copy-bindings - cp ../crates/turborepo-ffi/target/x86_64-unknown-linux-musl/release/libturborepo_ffi.a ./internal/ffi/libturborepo_ffi_linux_amd64.a - -.PHONY: turborepo-ffi-windows-amd64 -turborepo-ffi-windows-amd64: - cd ../crates/turborepo-ffi && cargo build --release --target-dir ./target --target x86_64-pc-windows-gnu - -.PHONY: turborepo-ffi-darwin-arm64 -turborepo-ffi-darwin-arm64: - cd ../crates/turborepo-ffi && cargo build --release --target-dir ./target --target aarch64-apple-darwin - -.PHONY: turborepo-ffi-darwin-amd64 -turborepo-ffi-darwin-amd64: - cd ../crates/turborepo-ffi && cargo build --release --target-dir ./target --target x86_64-apple-darwin - -.PHONY: turborepo-ffi-linux-arm64 -turborepo-ffi-linux-arm64: - cd ../crates/turborepo-ffi && CC="zig cc -target aarch64-linux-musl" cargo build --release --target-dir ./target --target aarch64-unknown-linux-musl - -.PHONY: turborepo-ffi-linux-amd64 -turborepo-ffi-linux-amd64: - cd ../crates/turborepo-ffi && CC="zig cc -target x86_64-linux-musl" cargo build --release --target-dir ./target --target x86_64-unknown-linux-musl - -# -# end -# - -.PHONY: turborepo-ffi-proto -turborepo-ffi-proto: - protoc -I../crates/ ../crates/turborepo-ffi/messages.proto --go_out=./internal/ - -protoc: internal/turbodprotocol/turbod.proto - protoc --go_out=. --go_opt=paths=source_relative \ - --go-grpc_out=. --go-grpc_opt=paths=source_relative \ - internal/turbodprotocol/turbod.proto - -$(GENERATED_FILES): internal/turbodprotocol/turbod.proto - make protoc - -compile-protos: $(GENERATED_FILES) - -check-go-version: - @go version | grep ' go1\.18\.0 ' || (echo 'Please install Go version 1.18.0' && false) - -# This "TURBO_RACE" variable exists at the request of a user on GitHub who -# wants to run "make test-go" on an unsupported version of macOS (version 10.9). -# Go's race detector does not run correctly on that version. With this flag -# you can run "TURBO_RACE= make test-go" to disable the race detector. -TURBO_RACE ?= -race - -ifeq ($(UNAME), Windows) - TURBO_RACE= -endif - -clean-go: - go clean -testcache -r - -test-go: $(GENERATED_FILES) $(GO_FILES) go.mod go.sum turborepo-ffi-install - go test $(TURBO_RACE) -tags $(GO_TAG) ./... - -# protos need to be compiled before linting, since linting needs to pick up -# some types from the generated code -lint-go: $(GENERATED_FILES) $(GO_FILES) go.mod go.sum - golangci-lint run --new-from-rev=main - -fmt-go: $(GO_FILES) go.mod go.sum - go fmt ./... - install: | ./package.json pnpm install --filter=cli @@ -154,11 +29,6 @@ corepack: which corepack || npm install -g corepack@latest corepack enable -cmd/turbo/version.go: ../version.txt - # Update this atomically to avoid issues with this being overwritten during use - node -e 'console.log(`package main\n\nconst turboVersion = "$(TURBO_VERSION)"`)' > cmd/turbo/version.go.txt - mv cmd/turbo/version.go.txt cmd/turbo/version.go - build: install cd $(CLI_DIR)/../ && pnpm build:turbo cd $(CLI_DIR)/../ && pnpm install --filter=create-turbo && pnpm turbo-prebuilt build --filter=create-turbo... @@ -170,10 +40,6 @@ build: install cd $(CLI_DIR)/../ && pnpm install --filter=eslint-config-turbo && pnpm turbo-prebuilt build --filter=eslint-config-turbo... cd $(CLI_DIR)/../ && pnpm install --filter=@turbo/types && pnpm turbo-prebuilt build --filter=@turbo/types... -.PHONY: prepublish -prepublish: compile-protos cmd/turbo/version.go - make -j3 bench/turbo test-go - .PHONY: publish-turbo-cross publish-turbo-cross: prepublish goreleaser release --rm-dist -f cross-release.yml @@ -215,7 +81,7 @@ build-lib-turbo-cross: goreleaser release --rm-dist -f cross-lib.yml .PHONY: stage-release -stage-release: cmd/turbo/version.go +stage-release: echo "Version: $(TURBO_VERSION)" echo "Tag: $(TURBO_TAG)" cat $(CLI_DIR)/../version.txt @@ -225,7 +91,6 @@ stage-release: cmd/turbo/version.go # Stop if versions are not updated. @test "" != "`git diff -- $(CLI_DIR)/../version.txt`" || (echo "Refusing to publish with unupdated version.txt" && false) - @test "" != "`git diff -- $(CLI_DIR)/cmd/turbo/version.go`" || (echo "Refusing to publish with unupdated version.go" && false) # Prepare the packages. cd $(CLI_DIR)/../packages/turbo && pnpm version "$(TURBO_VERSION)" --allow-same-version @@ -309,7 +174,7 @@ bench/turbo: demo/turbo turbo bench: bench/lerna bench/lage bench/nx bench/turbo -clean: clean-go clean-build clean-demo clean-rust +clean: clean-build clean-demo clean-rust clean-rust: cargo clean diff --git a/cli/cmd/turbo/main.go b/cli/cmd/turbo/main.go deleted file mode 100644 index 465fd8d9d3d42..0000000000000 --- a/cli/cmd/turbo/main.go +++ /dev/null @@ -1,32 +0,0 @@ -package main - -import ( - "encoding/json" - "fmt" - "os" - "strings" - - "github.com/vercel/turbo/cli/internal/cmd" - "github.com/vercel/turbo/cli/internal/turbostate" -) - -func main() { - if len(os.Args) != 2 { - fmt.Printf("go-turbo is expected to be invoked via turbo") - os.Exit(1) - } - - executionStateString := os.Args[1] - var executionState turbostate.ExecutionState - decoder := json.NewDecoder(strings.NewReader(executionStateString)) - decoder.DisallowUnknownFields() - - err := decoder.Decode(&executionState) - if err != nil { - fmt.Printf("Error unmarshalling execution state: %v\n Execution state string: %v\n", err, executionStateString) - os.Exit(1) - } - - exitCode := cmd.RunWithExecutionState(&executionState, turboVersion) - os.Exit(exitCode) -} diff --git a/cli/cmd/turbo/version.go b/cli/cmd/turbo/version.go deleted file mode 100644 index b0049c8026ac4..0000000000000 --- a/cli/cmd/turbo/version.go +++ /dev/null @@ -1,3 +0,0 @@ -package main - -const turboVersion = "1.11.4-canary.2" diff --git a/cli/go.mod b/cli/go.mod deleted file mode 100644 index 79ab82e4355fb..0000000000000 --- a/cli/go.mod +++ /dev/null @@ -1,85 +0,0 @@ -module github.com/vercel/turbo/cli - -go 1.20 - -require ( - capnproto.org/go/capnp/v3 v3.0.0-alpha-29 - github.com/AlecAivazis/survey/v2 v2.3.5 - github.com/DataDog/zstd v1.5.2 - github.com/Masterminds/semver v1.5.0 - github.com/adrg/xdg v0.3.3 - github.com/bgentry/speakeasy v0.1.0 - github.com/briandowns/spinner v1.18.1 - github.com/cenkalti/backoff/v4 v4.1.3 - github.com/deckarep/golang-set v1.8.0 - github.com/fatih/color v1.13.0 - github.com/fsnotify/fsevents v0.1.1 - github.com/fsnotify/fsnotify v1.6.0 - github.com/gobwas/glob v0.2.3 - github.com/google/chrometracing v0.0.0-20210413150014-55fded0163e7 - github.com/google/uuid v1.3.0 - github.com/grpc-ecosystem/go-grpc-middleware v1.3.0 - github.com/hashicorp/go-gatedio v0.5.0 - github.com/hashicorp/go-hclog v1.2.1 - github.com/hashicorp/go-multierror v1.1.1 - github.com/hashicorp/go-retryablehttp v0.6.8 - github.com/karrick/godirwalk v1.16.1 - github.com/mattn/go-isatty v0.0.14 - github.com/mitchellh/cli v1.1.5 - github.com/mitchellh/mapstructure v1.5.0 - github.com/moby/sys/sequential v0.5.0 - github.com/muhammadmuzzammil1998/jsonc v1.0.0 - github.com/nightlyone/lockfile v1.0.0 - github.com/pkg/errors v0.9.1 - github.com/pyr-sh/dag v1.0.0 - github.com/sabhiram/go-gitignore v0.0.0-20201211210132-54b8a0bf510f - github.com/schollz/progressbar/v3 v3.9.0 - github.com/segmentio/ksuid v1.0.4 - github.com/spf13/cobra v1.3.0 - github.com/stretchr/testify v1.8.2 - github.com/yookoala/realpath v1.0.0 - golang.org/x/sync v0.1.0 - golang.org/x/sys v0.13.0 - google.golang.org/grpc v1.46.2 - google.golang.org/protobuf v1.28.0 - gotest.tools/v3 v3.3.0 -) - -require ( - github.com/google/go-cmp v0.5.8 // indirect - github.com/tinylib/msgp v1.1.8 // indirect - zenhack.net/go/util v0.0.0-20230414204917-531d38494cf5 // indirect -) - -require ( - github.com/Masterminds/goutils v1.1.1 // indirect - github.com/Masterminds/semver/v3 v3.1.1 // indirect - github.com/Masterminds/sprig/v3 v3.2.1 // indirect - github.com/armon/go-radix v1.0.0 // indirect - github.com/davecgh/go-spew v1.1.1 // indirect - github.com/golang/protobuf v1.5.2 // indirect - github.com/hashicorp/errwrap v1.1.0 // indirect - github.com/hashicorp/go-cleanhttp v0.5.2 // indirect - github.com/huandu/xstrings v1.3.2 // indirect - github.com/imdario/mergo v0.3.11 // indirect - github.com/inconshreveable/mousetrap v1.0.0 // indirect - github.com/mattn/go-colorable v0.1.12 // indirect - github.com/mattn/go-runewidth v0.0.13 // indirect - github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db // indirect - github.com/mitchellh/copystructure v1.0.0 // indirect - github.com/mitchellh/reflectwalk v1.0.1 // indirect - github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e // indirect - github.com/pmezard/go-difflib v1.0.0 // indirect - github.com/posener/complete v1.2.3 // indirect - github.com/rivo/uniseg v0.2.0 // indirect - github.com/shopspring/decimal v1.2.0 // indirect - github.com/spf13/cast v1.5.0 // indirect - github.com/spf13/pflag v1.0.5 // indirect - golang.org/x/crypto v0.14.0 // indirect - golang.org/x/net v0.17.0 // indirect - golang.org/x/term v0.13.0 // indirect - golang.org/x/text v0.13.0 // indirect - google.golang.org/genproto v0.0.0-20220519153652-3a47de7e79bd // indirect - gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f // indirect - gopkg.in/yaml.v3 v3.0.1 // indirect -) diff --git a/cli/go.sum b/cli/go.sum deleted file mode 100644 index d0379f607753c..0000000000000 --- a/cli/go.sum +++ /dev/null @@ -1,945 +0,0 @@ -capnproto.org/go/capnp/v3 v3.0.0-alpha-29 h1:ICLhiy4Jmp0d7hLQO+HzFAVIft/oxpPAUPV8tqx+eUE= -capnproto.org/go/capnp/v3 v3.0.0-alpha-29/go.mod h1:+ysMHvOh1EWNOyorxJWs1omhRFiDoKxKkWQACp54jKM= -cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= -cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= -cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= -cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU= -cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= -cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc= -cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0= -cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To= -cloud.google.com/go v0.52.0/go.mod h1:pXajvRH/6o3+F9jDHZWQ5PbGhn+o8w9qiu/CffaVdO4= -cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M= -cloud.google.com/go v0.54.0/go.mod h1:1rq2OEkV3YMf6n/9ZvGWI3GWw0VoqH/1x2nd8Is/bPc= -cloud.google.com/go v0.56.0/go.mod h1:jr7tqZxxKOVYizybht9+26Z/gUq7tiRzu+ACVAMbKVk= -cloud.google.com/go v0.57.0/go.mod h1:oXiQ6Rzq3RAkkY7N6t3TcE6jE+CIBBbA36lwQ1JyzZs= -cloud.google.com/go v0.62.0/go.mod h1:jmCYTdRCQuc1PHIIJ/maLInMho30T/Y0M4hTdTShOYc= -cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY= -cloud.google.com/go v0.72.0/go.mod h1:M+5Vjvlc2wnp6tjzE102Dw08nGShTscUx2nZMufOKPI= -cloud.google.com/go v0.74.0/go.mod h1:VV1xSbzvo+9QJOxLDaJfTjx5e+MePCpCWwvftOeQmWk= -cloud.google.com/go v0.78.0/go.mod h1:QjdrLG0uq+YwhjoVOLsS1t7TW8fs36kLs4XO5R5ECHg= -cloud.google.com/go v0.79.0/go.mod h1:3bzgcEeQlzbuEAYu4mrWhKqWjmpprinYgKJLgKHnbb8= -cloud.google.com/go v0.81.0/go.mod h1:mk/AM35KwGk/Nm2YSeZbxXdrNK3KZOYHmLkOqC2V6E0= -cloud.google.com/go v0.83.0/go.mod h1:Z7MJUsANfY0pYPdw0lbnivPx4/vhy/e2FEkSkF7vAVY= -cloud.google.com/go v0.84.0/go.mod h1:RazrYuxIK6Kb7YrzzhPoLmCVzl7Sup4NrbKPg8KHSUM= -cloud.google.com/go v0.87.0/go.mod h1:TpDYlFy7vuLzZMMZ+B6iRiELaY7z/gJPaqbMx6mlWcY= -cloud.google.com/go v0.90.0/go.mod h1:kRX0mNRHe0e2rC6oNakvwQqzyDmg57xJ+SZU1eT2aDQ= -cloud.google.com/go v0.93.3/go.mod h1:8utlLll2EF5XMAV15woO4lSbWQlk8rer9aLOfLh7+YI= -cloud.google.com/go v0.94.1/go.mod h1:qAlAugsXlC+JWO+Bke5vCtc9ONxjQT3drlTTnAplMW4= -cloud.google.com/go v0.97.0/go.mod h1:GF7l59pYBVlXQIBLx3a761cZ41F9bBH3JUlihCt2Udc= -cloud.google.com/go v0.98.0/go.mod h1:ua6Ush4NALrHk5QXDWnjvZHN93OuF0HfuEPq9I1X0cM= -cloud.google.com/go v0.99.0/go.mod h1:w0Xx2nLzqWJPuozYQX+hFfCSI8WioryfRDzkoI/Y2ZA= -cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= -cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= -cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= -cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg= -cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc= -cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ= -cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= -cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= -cloud.google.com/go/firestore v1.6.1/go.mod h1:asNXNOzBdyVQmEU+ggO8UPodTkEVFW5Qx+rwHnAz+EY= -cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= -cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= -cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= -cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU= -cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw= -cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos= -cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= -cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= -cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= -dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= -github.com/AlecAivazis/survey/v2 v2.3.5 h1:A8cYupsAZkjaUmhtTYv3sSqc7LO5mp1XDfqe5E/9wRQ= -github.com/AlecAivazis/survey/v2 v2.3.5/go.mod h1:4AuI9b7RjAR+G7v9+C4YSlX/YL3K3cWNXgWXOhllqvI= -github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= -github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= -github.com/DataDog/datadog-go v3.2.0+incompatible/go.mod h1:LButxg5PwREeZtORoXG3tL4fMGNddJ+vMq1mwgfaqoQ= -github.com/DataDog/zstd v1.5.2 h1:vUG4lAyuPCXO0TLbXvPv7EB7cNK1QV/luu55UHLrrn8= -github.com/DataDog/zstd v1.5.2/go.mod h1:g4AWEaM3yOg3HYfnJ3YIawPnVdXJh9QME85blwSAmyw= -github.com/Masterminds/goutils v1.1.1 h1:5nUrii3FMTL5diU80unEVvNevw1nH4+ZV4DSLVJLSYI= -github.com/Masterminds/goutils v1.1.1/go.mod h1:8cTjp+g8YejhMuvIA5y2vz3BpJxksy863GQaJW2MFNU= -github.com/Masterminds/semver v1.5.0 h1:H65muMkzWKEuNDnfl9d70GUjFniHKHRbFPGBuZ3QEww= -github.com/Masterminds/semver v1.5.0/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF078ddwwvV3Y= -github.com/Masterminds/semver/v3 v3.1.1 h1:hLg3sBzpNErnxhQtUy/mmLR2I9foDujNK030IGemrRc= -github.com/Masterminds/semver/v3 v3.1.1/go.mod h1:VPu/7SZ7ePZ3QOrcuXROw5FAcLl4a0cBrbBpGY/8hQs= -github.com/Masterminds/sprig/v3 v3.2.1 h1:n6EPaDyLSvCEa3frruQvAiHuNp2dhBlMSmkEr+HuzGc= -github.com/Masterminds/sprig/v3 v3.2.1/go.mod h1:UoaO7Yp8KlPnJIYWTFkMaqPUYKTfGFPhxNuwnnxkKlk= -github.com/Netflix/go-expect v0.0.0-20220104043353-73e0943537d2/go.mod h1:HBCaDeC1lPdgDeDbhX8XFpy1jqjK0IBG8W5K+xYqA0w= -github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= -github.com/adrg/xdg v0.3.3 h1:s/tV7MdqQnzB1nKY8aqHvAMD+uCiuEDzVB5HLRY849U= -github.com/adrg/xdg v0.3.3/go.mod h1:61xAR2VZcggl2St4O9ohF5qCKe08+JDmE4VNzPFQvOQ= -github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= -github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= -github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= -github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= -github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= -github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o= -github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY= -github.com/armon/go-metrics v0.3.10/go.mod h1:4O98XIr/9W0sxpJ8UaYkvjk10Iff7SnFrb4QAOwNTFc= -github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= -github.com/armon/go-radix v1.0.0 h1:F4z6KzEeeQIMeLFa97iZU6vupzoecKdU5TX24SNppXI= -github.com/armon/go-radix v1.0.0/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= -github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= -github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= -github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= -github.com/bgentry/speakeasy v0.1.0 h1:ByYyxL9InA1OWqxJqqp2A5pYHUrCiAL6K3J+LKSsQkY= -github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= -github.com/briandowns/spinner v1.18.1 h1:yhQmQtM1zsqFsouh09Bk/jCjd50pC3EOGsh28gLVvwY= -github.com/briandowns/spinner v1.18.1/go.mod h1:mQak9GHqbspjC/5iUx3qMlIho8xBS/ppAL/hX5SmPJU= -github.com/cenkalti/backoff/v4 v4.1.3 h1:cFAlzYUlVYDysBEH2T5hyJZMh3+5+WCBvSnK6Q8UtC4= -github.com/cenkalti/backoff/v4 v4.1.3/go.mod h1:scbssz8iZGpm3xbr14ovlUdkxfGXNInqkPWOWmG2CLw= -github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= -github.com/census-instrumentation/opencensus-proto v0.3.0/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= -github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= -github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= -github.com/cespare/xxhash/v2 v2.1.2/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= -github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= -github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= -github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= -github.com/circonus-labs/circonus-gometrics v2.3.1+incompatible/go.mod h1:nmEj6Dob7S7YxXgwXpfOuvO54S+tGdZdw9fuRZt25Ag= -github.com/circonus-labs/circonusllhist v0.1.3/go.mod h1:kMXHVDlOchFAehlya5ePtbp5jckzBHf4XRpQvBOLI+I= -github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= -github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= -github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= -github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= -github.com/cncf/udpa/go v0.0.0-20210930031921-04548b0d99d4/go.mod h1:6pvJx4me5XPnfI9Z40ddWsdw2W/uZgQLFXToKeRcDiI= -github.com/cncf/xds/go v0.0.0-20210312221358-fbca930ec8ed/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= -github.com/cncf/xds/go v0.0.0-20210805033703-aa0b78936158/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= -github.com/cncf/xds/go v0.0.0-20210922020428-25de7278fc84/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= -github.com/cncf/xds/go v0.0.0-20211001041855-01bcc9b48dfe/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= -github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= -github.com/cncf/xds/go v0.0.0-20211130200136-a8f946100490/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= -github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= -github.com/coreos/go-systemd/v22 v22.3.2/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= -github.com/cpuguy83/go-md2man/v2 v2.0.1/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= -github.com/creack/pty v1.1.17/go.mod h1:MOBLtS5ELjhRRrroQr9kyvTxUAFNvYEK993ew/Vr4O4= -github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= -github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/deckarep/golang-set v1.8.0 h1:sk9/l/KqpunDwP7pSjUg0keiOOLEnOBHzykLrsPppp4= -github.com/deckarep/golang-set v1.8.0/go.mod h1:5nI87KwE7wgsBU1F4GKAw2Qod7p5kyS383rP6+o6qqo= -github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= -github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= -github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= -github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5ynNVH9qI8YYLbd1fK2po= -github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= -github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= -github.com/envoyproxy/go-control-plane v0.9.9-0.20210512163311-63b5d3c536b0/go.mod h1:hliV/p42l8fGbc6Y9bQ70uLwIvmJyVE5k4iMKlh8wCQ= -github.com/envoyproxy/go-control-plane v0.9.10-0.20210907150352-cf90f659a021/go.mod h1:AFq3mo9L8Lqqiid3OhADV3RfLJnjiw63cSpi+fDTRC0= -github.com/envoyproxy/go-control-plane v0.10.1/go.mod h1:AY7fTTXNdv/aJ2O5jwpxAPOWUZ7hQAEvzN5Pf27BkQQ= -github.com/envoyproxy/go-control-plane v0.10.2-0.20220325020618-49ff273808a1/go.mod h1:KJwIaB5Mv44NWtYuAOFCVOjcI94vtpEz2JU/D2v6IjE= -github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= -github.com/envoyproxy/protoc-gen-validate v0.6.2/go.mod h1:2t7qjJNvHPx8IjnBOzl9E9/baC+qXE/TeeyBRzgJDws= -github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= -github.com/fatih/color v1.9.0/go.mod h1:eQcE1qtQxscV5RaZvpXrrb8Drkc3/DdQ+uUYCNjL+zU= -github.com/fatih/color v1.13.0 h1:8LOYc1KYPPmyKMuN8QV2DNRWNbLo6LZ0iLs8+mlH53w= -github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk= -github.com/frankban/quicktest v1.14.3 h1:FJKSZTDHjyhriyC81FLQ0LY93eSai0ZyR/ZIkd3ZUKE= -github.com/fsnotify/fsevents v0.1.1 h1:/125uxJvvoSDDBPen6yUZbil8J9ydKZnnl3TWWmvnkw= -github.com/fsnotify/fsevents v0.1.1/go.mod h1:+d+hS27T6k5J8CRaPLKFgwKYcpS7GwW3Ule9+SC2ZRc= -github.com/fsnotify/fsnotify v1.5.1/go.mod h1:T3375wBYaZdLLcVNkcVbzGHY7f1l/uK5T5Ai1i3InKU= -github.com/fsnotify/fsnotify v1.6.0 h1:n+5WquG0fcWoWp6xPWfHdbskMCQaFnG6PfBrh1Ky4HY= -github.com/fsnotify/fsnotify v1.6.0/go.mod h1:sl3t1tCWJFWoRz9R8WJCbQihKKwmorjAbSClcnxKAGw= -github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= -github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= -github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= -github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= -github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= -github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= -github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= -github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= -github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= -github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y= -github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8= -github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= -github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= -github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= -github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= -github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= -github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= -github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= -github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= -github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= -github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= -github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= -github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= -github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= -github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= -github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4= -github.com/golang/mock v1.5.0/go.mod h1:CWnOUgYIOo4TcNZ0wHX3YZCqsaM1I1Jvs6v3mP3KVu8= -github.com/golang/mock v1.6.0/go.mod h1:p6yTPP+5HYm5mzsMV8JkE6ZKdX+/wYM6Hr+LicevLPs= -github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= -github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= -github.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk= -github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= -github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= -github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= -github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= -github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= -github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= -github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= -github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= -github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= -github.com/golang/protobuf v1.5.1/go.mod h1:DopwsBzvsk0Fs44TXzsVbJyPhcCPeIwnvohx4u74HPM= -github.com/golang/protobuf v1.5.2 h1:ROPKBNFfQgOUMifHyP+KYbvpjbdoFNs+aK7DXlji0Tw= -github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= -github.com/golang/snappy v0.0.3/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= -github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= -github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= -github.com/google/chrometracing v0.0.0-20210413150014-55fded0163e7 h1:mc1AFRocuO7EtVJgn4YIg97gBJ9VjiT4+UbCAM2IS/k= -github.com/google/chrometracing v0.0.0-20210413150014-55fded0163e7/go.mod h1:k2+go54tKjJPxWHxllhAI7WtOaxnnIaB0LjnGEsbyj0= -github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= -github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= -github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= -github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.8 h1:e6P7q2lk1O+qJJb4BtCQXlK8vWEO8V1ZeuEdJNOqZyg= -github.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= -github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= -github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= -github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= -github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= -github.com/google/martian/v3 v3.2.1/go.mod h1:oBOf6HBosgwRXnUGWUB05QECsc6uvmMiJ3+6W4l/CUk= -github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= -github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= -github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20210122040257-d980be63207e/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20210601050228-01bbb1931b22/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20210609004039-a478d1d731e9/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= -github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I= -github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= -github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= -github.com/googleapis/gax-go/v2 v2.1.0/go.mod h1:Q3nei7sK6ybPYH7twZdmQpAd1MKb7pfu6SK+H1/DsU0= -github.com/googleapis/gax-go/v2 v2.1.1/go.mod h1:hddJymUZASv3XPyGkUpKj8pPO47Rmb0eJc8R6ouapiM= -github.com/grpc-ecosystem/go-grpc-middleware v1.3.0 h1:+9834+KizmvFV7pXQGSXQTsaWhq2GjuNUt0aUU0YBYw= -github.com/grpc-ecosystem/go-grpc-middleware v1.3.0/go.mod h1:z0ButlSOZa5vEBq9m2m2hlwIgKw+rp3sdCBRoJY+30Y= -github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= -github.com/hashicorp/consul/api v1.11.0/go.mod h1:XjsvQN+RJGWI2TWy1/kqaE16HrR2J/FWgkYjdZQsX9M= -github.com/hashicorp/consul/sdk v0.8.0/go.mod h1:GBvyrGALthsZObzUGsfgHZQDXjg4lOjagTIwIR1vPms= -github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= -github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I= -github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= -github.com/hashicorp/go-cleanhttp v0.5.0/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= -github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= -github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9neXJWAZQ= -github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48= -github.com/hashicorp/go-gatedio v0.5.0 h1:Jm1X5yP4yCqqWj5L1TgW7iZwCVPGtVc+mro5r/XX7Tg= -github.com/hashicorp/go-gatedio v0.5.0/go.mod h1:Lr3t8L6IyxD3DAeaUxGcgl2JnRUpWMCsmBl4Omu/2t4= -github.com/hashicorp/go-hclog v0.9.2/go.mod h1:5CU+agLiy3J7N7QjHK5d05KxGsuXiQLrjA0H7acj2lQ= -github.com/hashicorp/go-hclog v0.12.0/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ= -github.com/hashicorp/go-hclog v1.0.0/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ= -github.com/hashicorp/go-hclog v1.2.1 h1:YQsLlGDJgwhXFpucSPyVbCBviQtjlHv3jLTlp8YmtEw= -github.com/hashicorp/go-hclog v1.2.1/go.mod h1:W4Qnvbt70Wk/zYJryRzDRU/4r0kIg0PVHBcfoyhpF5M= -github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= -github.com/hashicorp/go-immutable-radix v1.3.1/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= -github.com/hashicorp/go-msgpack v0.5.3/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iPBM1vqhUKIvfM= -github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk= -github.com/hashicorp/go-multierror v1.1.0/go.mod h1:spPvp8C1qA32ftKqdAHm4hHTbPw+vmowP0z+KUhOZdA= -github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= -github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= -github.com/hashicorp/go-retryablehttp v0.5.3/go.mod h1:9B5zBasrRhHXnJnui7y6sL7es7NDiJgTc6Er0maI1Xs= -github.com/hashicorp/go-retryablehttp v0.6.8 h1:92lWxgpa+fF3FozM4B3UZtHZMJX8T5XT+TFdCxsPyWs= -github.com/hashicorp/go-retryablehttp v0.6.8/go.mod h1:vAew36LZh98gCBJNLH42IQ1ER/9wtLZZ8meHqQvEYWY= -github.com/hashicorp/go-rootcerts v1.0.2/go.mod h1:pqUvnprVnM5bf7AOirdbb01K4ccR319Vf4pU3K5EGc8= -github.com/hashicorp/go-sockaddr v1.0.0/go.mod h1:7Xibr9yA9JjQq1JpNB2Vw7kxv8xerXegt+ozgdvDeDU= -github.com/hashicorp/go-syslog v1.0.0/go.mod h1:qPfqrKkXGihmCqbJM2mZgkZGvKG1dFdvsLplgctolz4= -github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= -github.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= -github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= -github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= -github.com/hashicorp/golang-lru v0.5.4/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4= -github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= -github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64= -github.com/hashicorp/mdns v1.0.1/go.mod h1:4gW7WsVCke5TE7EPeYliwHlRUyBtfCwuFwuMg2DmyNY= -github.com/hashicorp/mdns v1.0.4/go.mod h1:mtBihi+LeNXGtG8L9dX59gAEa12BDtBQSp4v/YAJqrc= -github.com/hashicorp/memberlist v0.2.2/go.mod h1:MS2lj3INKhZjWNqd3N0m3J+Jxf3DAOnAH9VT3Sh9MUE= -github.com/hashicorp/memberlist v0.3.0/go.mod h1:MS2lj3INKhZjWNqd3N0m3J+Jxf3DAOnAH9VT3Sh9MUE= -github.com/hashicorp/serf v0.9.5/go.mod h1:UWDWwZeL5cuWDJdl0C6wrvrUwEqtQ4ZKBKKENpqIUyk= -github.com/hashicorp/serf v0.9.6/go.mod h1:TXZNMjZQijwlDvp+r0b63xZ45H7JmCmgg4gpTwn9UV4= -github.com/hinshun/vt10x v0.0.0-20220119200601-820417d04eec/go.mod h1:Q48J4R4DvxnHolD5P8pOtXigYlRuPLGl6moFx3ulM68= -github.com/huandu/xstrings v1.3.1/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE= -github.com/huandu/xstrings v1.3.2 h1:L18LIDzqlW6xN2rEkpdV8+oL/IXWJ1APd+vsdYy4Wdw= -github.com/huandu/xstrings v1.3.2/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE= -github.com/iancoleman/strcase v0.2.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho= -github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= -github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= -github.com/imdario/mergo v0.3.11 h1:3tnifQM4i+fbajXKBHXWEH+KvNHqojZ778UH75j3bGA= -github.com/imdario/mergo v0.3.11/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA= -github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NHg9XEKhtSvM= -github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= -github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= -github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= -github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= -github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= -github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= -github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= -github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= -github.com/k0kubun/go-ansi v0.0.0-20180517002512-3bf9e2903213/go.mod h1:vNUNkEQ1e29fT/6vq2aBdFsgNPmy8qMdSay1npru+Sw= -github.com/karrick/godirwalk v1.16.1 h1:DynhcF+bztK8gooS0+NDJFrdNZjJ3gzVzC545UNA9iw= -github.com/karrick/godirwalk v1.16.1/go.mod h1:j4mkqPuvaLI8mp1DroR3P6ad7cyYd4c1qeJ3RV7ULlk= -github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51/go.mod h1:CzGEWj7cYgsdH8dAjBGEr58BoE7ScuLd+fwFZ44+/x8= -github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= -github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= -github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= -github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= -github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= -github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= -github.com/kr/pretty v0.2.0/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= -github.com/kr/pretty v0.3.0 h1:WgNl7dwNpEZ6jJ9k1snq4pZsg7DOEN8hP9Xw0Tsjwk0= -github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= -github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= -github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= -github.com/lyft/protoc-gen-star v0.5.3/go.mod h1:V0xaHgaf5oCCqmcxYcWiDfTiKsZsRc87/1qhoTACD8w= -github.com/magiconair/properties v1.8.5/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60= -github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= -github.com/mattn/go-colorable v0.1.2/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= -github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= -github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= -github.com/mattn/go-colorable v0.1.9/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= -github.com/mattn/go-colorable v0.1.12 h1:jF+Du6AlPIjs2BiUiQlKOX0rt3SujHxPnksPKZbaA40= -github.com/mattn/go-colorable v0.1.12/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4= -github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= -github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= -github.com/mattn/go-isatty v0.0.10/go.mod h1:qgIWMr58cqv1PHHyhnkY9lrL7etaEgOFcMEpPG5Rm84= -github.com/mattn/go-isatty v0.0.11/go.mod h1:PhnuNfih5lzO57/f3n+odYbM4JtupLOxQOAqxQCu2WE= -github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= -github.com/mattn/go-isatty v0.0.14 h1:yVuAays6BHfxijgZPzw+3Zlu5yQgKGP2/hcQbHb7S9Y= -github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= -github.com/mattn/go-runewidth v0.0.13 h1:lTGmDsbAYt5DmK6OnoV7EuIF1wEIFAcxld6ypU4OSgU= -github.com/mattn/go-runewidth v0.0.13/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= -github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= -github.com/mgutz/ansi v0.0.0-20170206155736-9520e82c474b/go.mod h1:01TrycV0kFyexm33Z7vhZRXopbI8J3TDReVlkTgMUxE= -github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg= -github.com/miekg/dns v1.1.26/go.mod h1:bPDLeHnStXmXAq1m/Ch/hvfNHr14JKNPMBo3VZKjuso= -github.com/miekg/dns v1.1.41/go.mod h1:p6aan82bvRIyn+zDIv9xYNUpwa73JcSh9BKwknJysuI= -github.com/mitchellh/cli v1.1.0/go.mod h1:xcISNoH86gajksDmfB23e/pu+B+GeFRMYmoHXxx3xhI= -github.com/mitchellh/cli v1.1.5 h1:OxRIeJXpAMztws/XHlN2vu6imG5Dpq+j61AzAX5fLng= -github.com/mitchellh/cli v1.1.5/go.mod h1:v8+iFts2sPIKUV1ltktPXMCC8fumSKFItNcD2cLtRR4= -github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db h1:62I3jR2EmQ4l5rM/4FEfDWcRD+abF5XlKShorW5LRoQ= -github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db/go.mod h1:l0dey0ia/Uv7NcFFVbCLtqEBQbrT4OCwCSKTEv6enCw= -github.com/mitchellh/copystructure v1.0.0 h1:Laisrj+bAB6b/yJwB5Bt3ITZhGJdqmxquMKeZ+mmkFQ= -github.com/mitchellh/copystructure v1.0.0/go.mod h1:SNtv71yrdKgLRyLFxmLdkAbkKEFWgYaq1OVrnRcwhnw= -github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= -github.com/mitchellh/go-testing-interface v1.0.0/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI= -github.com/mitchellh/mapstructure v0.0.0-20160808181253-ca63d7c062ee/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= -github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= -github.com/mitchellh/mapstructure v1.4.3/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= -github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= -github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= -github.com/mitchellh/reflectwalk v1.0.0/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= -github.com/mitchellh/reflectwalk v1.0.1 h1:FVzMWA5RllMAKIdUSC8mdWo3XtwoecrH79BY70sEEpE= -github.com/mitchellh/reflectwalk v1.0.1/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= -github.com/moby/sys/sequential v0.5.0 h1:OPvI35Lzn9K04PBbCLW0g4LcFAJgHsvXsRyewg5lXtc= -github.com/moby/sys/sequential v0.5.0/go.mod h1:tH2cOOs5V9MlPiXcQzRC+eEyab644PWKGRYaaV5ZZlo= -github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= -github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= -github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= -github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= -github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= -github.com/muhammadmuzzammil1998/jsonc v1.0.0 h1:8o5gBQn4ZA3NBA9DlTujCj2a4w0tqWrPVjDwhzkgTIs= -github.com/muhammadmuzzammil1998/jsonc v1.0.0/go.mod h1:saF2fIVw4banK0H4+/EuqfFLpRnoy5S+ECwTOCcRcSU= -github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= -github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs= -github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= -github.com/nightlyone/lockfile v1.0.0 h1:RHep2cFKK4PonZJDdEl4GmkabuhbsRMgk/k3uAmxBiA= -github.com/nightlyone/lockfile v1.0.0/go.mod h1:rywoIealpdNse2r832aiD9jRk8ErCatROs6LzC841CI= -github.com/opentracing/opentracing-go v1.1.0/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o= -github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= -github.com/pascaldekloe/goe v0.1.0/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= -github.com/pelletier/go-toml v1.9.4/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= -github.com/philhofer/fwd v1.1.2 h1:bnDivRJ1EWPjUIRXV5KfORO897HTbpFAQddBdE8t7Gw= -github.com/philhofer/fwd v1.1.2/go.mod h1:qkPdfjR2SIEbspLqpe1tO4n5yICnr2DY7mqEx2tUTP0= -github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= -github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= -github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= -github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= -github.com/pkg/sftp v1.10.1/go.mod h1:lYOWFsE0bwd1+KfKJaKeuokY15vzFx25BLbzYYoAxZI= -github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= -github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI= -github.com/posener/complete v1.2.3 h1:NP0eAhjcjImqslEwo/1hq7gpajME0fTLTezBKDqfXqo= -github.com/posener/complete v1.2.3/go.mod h1:WZIdtGGp+qx0sLrYKtIRAruyNpv6hFCicSgv7Sy7s/s= -github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= -github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo= -github.com/prometheus/client_golang v1.4.0/go.mod h1:e9GMxYsXl05ICDXkRhurwBS4Q3OK1iX/F2sw+iXX5zU= -github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= -github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= -github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= -github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= -github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= -github.com/prometheus/common v0.9.1/go.mod h1:yhUN8i9wzaXS3w1O07YhxHEBxD+W35wd8bs7vj7HSQ4= -github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= -github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= -github.com/prometheus/procfs v0.0.8/go.mod h1:7Qr8sr6344vo1JqZ6HhLceV9o3AJ1Ff+GxbHq6oeK9A= -github.com/pyr-sh/dag v1.0.0 h1:hIyuIe8nvHZuwFUjTXoLJP7cN5Xr3IAQukiX2S8NAS0= -github.com/pyr-sh/dag v1.0.0/go.mod h1:alhhyzDdT3KwVmFc+pF4uhMSfRSTbiAUMcqfrfPSs0Y= -github.com/rivo/uniseg v0.2.0 h1:S1pD9weZBuJdFmowNwbpi7BJ8TNftyUImj/0WQi72jY= -github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= -github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= -github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= -github.com/rogpeppe/go-internal v1.6.1 h1:/FiVV8dS/e+YqF2JvO3yXRFbBLTIuSDkuC7aBOAvL+k= -github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= -github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= -github.com/sabhiram/go-gitignore v0.0.0-20201211210132-54b8a0bf510f h1:8P2MkG70G76gnZBOPGwmMIgwBb/rESQuwsJ7K8ds4NE= -github.com/sabhiram/go-gitignore v0.0.0-20201211210132-54b8a0bf510f/go.mod h1:+ePHsJ1keEjQtpvf9HHw0f4ZeJ0TLRsxhunSI2hYJSs= -github.com/sagikazarmark/crypt v0.3.0/go.mod h1:uD/D+6UF4SrIR1uGEv7bBNkNqLGqUr43MRiaGWX1Nig= -github.com/schollz/progressbar/v3 v3.9.0 h1:k9SRNQ8KZyibz1UZOaKxnkUE3iGtmGSDt1YY9KlCYQk= -github.com/schollz/progressbar/v3 v3.9.0/go.mod h1:W5IEwbJecncFGBvuEh4A7HT1nZZ6WNIL2i3qbnI0WKY= -github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc= -github.com/segmentio/ksuid v1.0.4 h1:sBo2BdShXjmcugAMwjugoGUdUV0pcxY5mW4xKRn3v4c= -github.com/segmentio/ksuid v1.0.4/go.mod h1:/XUiZBD3kVx5SmUOl55voK5yeAbBNNIed+2O73XgrPE= -github.com/shopspring/decimal v1.2.0 h1:abSATXmQEYyShuxI4/vyW3tV1MrKAJzCZ/0zLUXYbsQ= -github.com/shopspring/decimal v1.2.0/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= -github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= -github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= -github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= -github.com/spf13/afero v1.3.3/go.mod h1:5KUK8ByomD5Ti5Artl0RtHeI5pTF7MIDuXL3yY520V4= -github.com/spf13/afero v1.6.0/go.mod h1:Ai8FlHk4v/PARR026UzYexafAt9roJ7LcLMAmO6Z93I= -github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= -github.com/spf13/cast v1.4.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= -github.com/spf13/cast v1.5.0 h1:rj3WzYc11XZaIZMPKmwP96zkFEnnAmV8s6XbB2aY32w= -github.com/spf13/cast v1.5.0/go.mod h1:SpXXQ5YoyJw6s3/6cMTQuxvgRl3PCJiyaX9p6b155UU= -github.com/spf13/cobra v1.3.0 h1:R7cSvGu+Vv+qX0gW5R/85dx2kmmJT5z5NM8ifdYjdn0= -github.com/spf13/cobra v1.3.0/go.mod h1:BrRVncBjOJa/eUcVVm9CE+oC6as8k+VYr4NY7WCi9V4= -github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo= -github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= -github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= -github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= -github.com/spf13/viper v1.10.0/go.mod h1:SoyBPwAtKDzypXNDFKN5kzH7ppppbGZtls1UpIy5AsM= -github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= -github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= -github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= -github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= -github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= -github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= -github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -github.com/stretchr/testify v1.7.2/go.mod h1:R6va5+xMeoiuVRoj+gSkQ7d3FALtqAAGI1FQKckRals= -github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= -github.com/stretchr/testify v1.8.2 h1:+h33VjcLVPDHtOdpUCuF+7gSuG3yGIftsP1YvFihtJ8= -github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= -github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw= -github.com/tinylib/msgp v1.1.8 h1:FCXC1xanKO4I8plpHGH2P7koL/RzZs12l/+r7vakfm0= -github.com/tinylib/msgp v1.1.8/go.mod h1:qkpG+2ldGg4xRFmx+jfTvZPxfGFhi64BcnL9vkCm/Tw= -github.com/tv42/httpunix v0.0.0-20150427012821-b75d8614f926/go.mod h1:9ESjWnEqriFuLhtthL60Sar/7RFoluCcXsuvEwTV5KM= -github.com/yookoala/realpath v1.0.0 h1:7OA9pj4FZd+oZDsyvXWQvjn5oBdcHRTV44PpdMSuImQ= -github.com/yookoala/realpath v1.0.0/go.mod h1:gJJMA9wuX7AcqLy1+ffPatSCySA1FQ2S8Ya9AIoYBpE= -github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= -github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= -go.etcd.io/etcd/api/v3 v3.5.1/go.mod h1:cbVKeC6lCfl7j/8jBhAK6aIYO9XOjdptoxU/nLQcPvs= -go.etcd.io/etcd/client/pkg/v3 v3.5.1/go.mod h1:IJHfcCEKxYu1Os13ZdwCwIUTUVGYTSAM3YSwc9/Ac1g= -go.etcd.io/etcd/client/v2 v2.305.1/go.mod h1:pMEacxZW7o8pg4CrFE7pquyCJJzZvkvdD2RibOCCCGs= -go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= -go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= -go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= -go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= -go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= -go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= -go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E= -go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= -go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= -go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= -go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0= -go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU= -go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= -go.uber.org/zap v1.17.0/go.mod h1:MXVU+bhUf/A7Xi2HNOnopQOrmycQ5Ih87HtOu4q5SSo= -golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= -golang.org/x/crypto v0.0.0-20181029021203-45a5f77698d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= -golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= -golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20190923035154-9ee001bba392/go.mod h1:/lpIB1dKB+9EgE3H3cr1v9wB50oz8l4C4h62xy7jSTY= -golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20200414173820-0848c9571904/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.0.0-20200820211705-5c72a883971a/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.0.0-20210817164053-32db794688a5/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.0.0-20220131195533-30dcbda58838/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= -golang.org/x/crypto v0.14.0 h1:wBqGXzWJW6m1XrIKlAH0Hs1JJ7+9KBwnIO8v66Q9cHc= -golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4= -golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= -golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= -golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= -golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek= -golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY= -golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= -golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= -golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= -golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= -golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= -golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= -golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= -golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= -golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= -golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= -golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs= -golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= -golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= -golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= -golang.org/x/lint v0.0.0-20210508222113-6edffad5e616/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= -golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= -golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= -golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= -golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY= -golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= -golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= -golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.5.0/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro= -golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= -golang.org/x/mod v0.7.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= -golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= -golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20190923162816-aa69164e4478/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= -golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= -golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= -golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.0.0-20201031054903-ff519b6c9102/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.0.0-20201209123823-ac852fbbde11/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= -golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= -golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= -golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc= -golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= -golang.org/x/net v0.0.0-20210410081132-afb366fc7cd1/go.mod h1:9tjilg8BloeKEkVJvy7fQ90B1CfIiPueXVOjqfkSzI8= -golang.org/x/net v0.0.0-20210503060351-7fd8e65b6420/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.0.0-20210813160813-60bc85c4be6d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= -golang.org/x/net v0.3.0/go.mod h1:MBQ8lrhLObU/6UmLb4fmbmk5OcyYmqtbGd/9yIeKjEE= -golang.org/x/net v0.17.0 h1:pVaXccu2ozPjCXewfr1S7xza/zcXTity9cCdXQYSjIM= -golang.org/x/net v0.17.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE= -golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= -golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.0.0-20200902213428-5d25da1a8d43/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20201109201403-9fd604954f58/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20201208152858-08078c50e5b5/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210220000619-9bb904979d93/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210313182246-cd4f82c27b84/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210628180205-a41e5a781914/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210805134026-6f1e6394065a/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210819190943-2bc19b11175f/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20211005180243-6b3c2da341f1/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20211104180415-d3ed0bb246c8/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.1.0 h1:wsuoTGHzEhffawBOhz5CYhcrV4IdKZbEyZjBMuTp12o= -golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20181026203630-95b1ffbd15a5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190922100055-0a153f010e69/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190924154521-2837fb4f24fe/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191008105621-543471e840be/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200124204421-9fbb57f87de9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200331124033-c3d80250170d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210220050731-9a76102bfb43/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210303074136-134d130e1a04/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210305230114-8fe3ee5dd75b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210315160823-c6e025ad8005/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210403161142-5e06dd20ab57/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210514084401-e8d321eab015/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210603125802-9665404d3644/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210806184541-e5e7981a1069/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210816183151-1e6c022a8912/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210823070655-63515b42dcdf/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210908233432-aa78b53d3365/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20211007075335-d3039528d8ac/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20211124211545-fe61309f8881/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20211205182925-97ca703d548d/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220128215802-99c3d69c2c27/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220422013727-9388b58f7150/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220503163025-988cb79eb6c6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220908164124-27713097b956/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.3.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.13.0 h1:Af8nKPmuFypiUBjVoU9V20FiaFXOcuZI21p0ycVYYGE= -golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= -golang.org/x/term v0.0.0-20210503060354-a79de5458b56/go.mod h1:tfny5GFUkzUvx4ps4ajbZsCe5lw1metzhBm9T3x7oIY= -golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= -golang.org/x/term v0.3.0/go.mod h1:q750SLmJuPmVoN1blW3UFBPREJfb1KmY3vwxfr+nFDA= -golang.org/x/term v0.13.0 h1:bb+I9cTfFazGW51MZqBVmZy7+JEJMouUHTUSKVQLBek= -golang.org/x/term v0.13.0/go.mod h1:LTmsnFJwVN6bCy1rVCoS+qHT1HhALEFxKncY3WNNh4U= -golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= -golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= -golang.org/x/text v0.5.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= -golang.org/x/text v0.13.0 h1:ablQoSUd0tRdKxZewP80B+BaqeKJuVhuRxj/dkrun3k= -golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= -golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= -golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= -golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= -golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= -golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= -golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= -golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= -golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20190907020128-2ca718005c18/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200204074204-1cc6d1ef6c74/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200224181240-023911ca70b2/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200227222343-706bc42d1f0d/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200304193943-95d2e580d8eb/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= -golang.org/x/tools v0.0.0-20200312045724-11d5b4c81c7d/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= -golang.org/x/tools v0.0.0-20200331025713-a30bf2db82d4/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= -golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= -golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= -golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= -golang.org/x/tools v0.0.0-20200904185747-39188db58858/go.mod h1:Cj7w3i3Rnn0Xh82ur9kSqwfTHTeVxaDqrfMjpcNT6bE= -golang.org/x/tools v0.0.0-20201110124207-079ba7bd75cd/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= -golang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= -golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= -golang.org/x/tools v0.1.3/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= -golang.org/x/tools v0.1.4/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= -golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= -golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= -golang.org/x/tools v0.4.0/go.mod h1:UE5sM2OK9E/d67R0ANs2xJizIymRP5gJU295PvKXxjQ= -golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= -google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= -google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= -google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= -google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= -google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= -google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= -google.golang.org/api v0.17.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.18.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.19.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.20.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.22.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.24.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= -google.golang.org/api v0.28.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= -google.golang.org/api v0.29.0/go.mod h1:Lcubydp8VUV7KeIHD9z2Bys/sm/vGKnG1UHuDBSrHWM= -google.golang.org/api v0.30.0/go.mod h1:QGmEvQ87FHZNiUVJkT14jQNYJ4ZJjdRF23ZXz5138Fc= -google.golang.org/api v0.35.0/go.mod h1:/XrVsuzM0rZmrsbjJutiuftIzeuTQcEeaYcSk/mQ1dg= -google.golang.org/api v0.36.0/go.mod h1:+z5ficQTmoYpPn8LCUNVpK5I7hwkpjbcgqA7I34qYtE= -google.golang.org/api v0.40.0/go.mod h1:fYKFpnQN0DsDSKRVRcQSDQNtqWPfM9i+zNPxepjRCQ8= -google.golang.org/api v0.41.0/go.mod h1:RkxM5lITDfTzmyKFPt+wGrCJbVfniCr2ool8kTBzRTU= -google.golang.org/api v0.43.0/go.mod h1:nQsDGjRXMo4lvh5hP0TKqF244gqhGcr/YSIykhUk/94= -google.golang.org/api v0.47.0/go.mod h1:Wbvgpq1HddcWVtzsVLyfLp8lDg6AA241LmgIL59tHXo= -google.golang.org/api v0.48.0/go.mod h1:71Pr1vy+TAZRPkPs/xlCf5SsU8WjuAWv1Pfjbtukyy4= -google.golang.org/api v0.50.0/go.mod h1:4bNT5pAuq5ji4SRZm+5QIkjny9JAyVD/3gaSihNefaw= -google.golang.org/api v0.51.0/go.mod h1:t4HdrdoNgyN5cbEfm7Lum0lcLDLiise1F8qDKX00sOU= -google.golang.org/api v0.54.0/go.mod h1:7C4bFFOvVDGXjfDTAsgGwDgAxRDeQ4X8NvUedIt6z3k= -google.golang.org/api v0.55.0/go.mod h1:38yMfeP1kfjsl8isn0tliTjIb1rJXcQi4UXlbqivdVE= -google.golang.org/api v0.56.0/go.mod h1:38yMfeP1kfjsl8isn0tliTjIb1rJXcQi4UXlbqivdVE= -google.golang.org/api v0.57.0/go.mod h1:dVPlbZyBo2/OjBpmvNdpn2GRm6rPy75jyU7bmhdrMgI= -google.golang.org/api v0.59.0/go.mod h1:sT2boj7M9YJxZzgeZqXogmhfmRWDtPzT31xkieUbuZU= -google.golang.org/api v0.61.0/go.mod h1:xQRti5UdCmoCEqFxcz93fTl338AVqDgyaDRuOZ3hg9I= -google.golang.org/api v0.62.0/go.mod h1:dKmwPCydfsad4qCH08MSdgWjfHOyfpd4VtDGgRFdavw= -google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= -google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= -google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= -google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= -google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= -google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= -google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= -google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= -google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= -google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= -google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8= -google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20191230161307-f3c370f40bfb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20200115191322-ca5a22157cba/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20200122232147-0452cf42e150/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20200204135345-fa8e72b47b90/go.mod h1:GmwEX6Z4W5gMy59cAlVYjN9JhxgbQH6Gn+gFDQe2lzA= -google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200228133532-8c2c7df3a383/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200305110556-506484158171/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200423170343-7949de9c1215/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U= -google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= -google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA= -google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20200904004341-0bd0a958aa1d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20201109203340-2640f1f9cdfb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20201201144952-b05cb90ed32e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20201210142538-e3217bee35cc/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20210222152913-aa3ee6e6a81c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20210303154014-9728d6b83eeb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20210310155132-4ce2db91004e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20210319143718-93e7006c17a6/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20210402141018-6c239bbf2bb1/go.mod h1:9lPAdzaEmUacj36I+k7YKbEc5CXzPIeORRgDAUOu28A= -google.golang.org/genproto v0.0.0-20210513213006-bf773b8c8384/go.mod h1:P3QM42oQyzQSnHPnZ/vqoCdDmzH28fzWByN9asMeM8A= -google.golang.org/genproto v0.0.0-20210602131652-f16073e35f0c/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= -google.golang.org/genproto v0.0.0-20210604141403-392c879c8b08/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= -google.golang.org/genproto v0.0.0-20210608205507-b6d2f5bf0d7d/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= -google.golang.org/genproto v0.0.0-20210624195500-8bfb893ecb84/go.mod h1:SzzZ/N+nwJDaO1kznhnlzqS8ocJICar6hYhVyhi++24= -google.golang.org/genproto v0.0.0-20210713002101-d411969a0d9a/go.mod h1:AxrInvYm1dci+enl5hChSFPOmmUF1+uAa/UsgNRWd7k= -google.golang.org/genproto v0.0.0-20210716133855-ce7ef5c701ea/go.mod h1:AxrInvYm1dci+enl5hChSFPOmmUF1+uAa/UsgNRWd7k= -google.golang.org/genproto v0.0.0-20210728212813-7823e685a01f/go.mod h1:ob2IJxKrgPT52GcgX759i1sleT07tiKowYBGbczaW48= -google.golang.org/genproto v0.0.0-20210805201207-89edb61ffb67/go.mod h1:ob2IJxKrgPT52GcgX759i1sleT07tiKowYBGbczaW48= -google.golang.org/genproto v0.0.0-20210813162853-db860fec028c/go.mod h1:cFeNkxwySK631ADgubI+/XFU/xp8FD5KIVV4rj8UC5w= -google.golang.org/genproto v0.0.0-20210821163610-241b8fcbd6c8/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= -google.golang.org/genproto v0.0.0-20210828152312-66f60bf46e71/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= -google.golang.org/genproto v0.0.0-20210831024726-fe130286e0e2/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= -google.golang.org/genproto v0.0.0-20210903162649-d08c68adba83/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= -google.golang.org/genproto v0.0.0-20210909211513-a8c4777a87af/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= -google.golang.org/genproto v0.0.0-20210924002016-3dee208752a0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20211008145708-270636b82663/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20211028162531-8db9c33dc351/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20211118181313-81c1377c94b1/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20211129164237-f09f9a12af12/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20211203200212-54befc351ae9/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20211206160659-862468c7d6e0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20211208223120-3a66f561d7aa/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20220519153652-3a47de7e79bd h1:e0TwkXOdbnH/1x5rc5MZ/VYyiZ4v+RdVfrGMqEwT68I= -google.golang.org/genproto v0.0.0-20220519153652-3a47de7e79bd/go.mod h1:RAyBrSAP7Fh3Nc84ghnVLDPuV51xc9agzmm4Ph6i0Q4= -google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= -google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= -google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= -google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= -google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= -google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= -google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= -google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= -google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKal+60= -google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= -google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= -google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= -google.golang.org/grpc v1.31.1/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= -google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0= -google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= -google.golang.org/grpc v1.34.0/go.mod h1:WotjhfgOW/POjDeRt8vscBtXq+2VjORFy659qA51WJ8= -google.golang.org/grpc v1.35.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= -google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= -google.golang.org/grpc v1.36.1/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= -google.golang.org/grpc v1.37.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= -google.golang.org/grpc v1.37.1/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= -google.golang.org/grpc v1.38.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= -google.golang.org/grpc v1.39.0/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnDzfrE= -google.golang.org/grpc v1.39.1/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnDzfrE= -google.golang.org/grpc v1.40.0/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= -google.golang.org/grpc v1.40.1/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= -google.golang.org/grpc v1.42.0/go.mod h1:k+4IHHFw41K8+bbowsex27ge2rCb65oeWqe4jJ590SU= -google.golang.org/grpc v1.46.0/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk= -google.golang.org/grpc v1.46.2 h1:u+MLGgVf7vRdjEYZ8wDFhAVNmhkbJ5hmrA1LMWK1CAQ= -google.golang.org/grpc v1.46.2/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk= -google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw= -google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= -google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= -google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= -google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= -google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= -google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= -google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= -google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= -google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4= -google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= -google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= -google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= -google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= -google.golang.org/protobuf v1.28.0 h1:w43yiav+6bVFTBQFZX0r7ipe9JQ1QsbMgHwbBziscLw= -google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= -gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= -gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8XK9/i0At2xKjWk4p6zsU= -gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= -gopkg.in/ini.v1 v1.66.2/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= -gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= -gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= -gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= -gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gotest.tools/v3 v3.3.0 h1:MfDY1b1/0xN1CyMlQDac0ziEy9zJQd9CXBRRDHw2jJo= -gotest.tools/v3 v3.3.0/go.mod h1:Mcr9QNxkg0uMvy/YElmo4SpXgJKWgQvYrT7Kw5RzJ1A= -honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= -honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= -honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= -rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= -rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= -rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= -zenhack.net/go/util v0.0.0-20230414204917-531d38494cf5 h1:yksDCGMVzyn3vlyf0GZ3huiF5FFaMGQpQ3UJvR0EoGA= -zenhack.net/go/util v0.0.0-20230414204917-531d38494cf5/go.mod h1:1LtNdPAs8WH+BTcQiZAOo2MIKD/5jyK/u7sZ9ZPe5SE= diff --git a/cli/internal/analytics/analytics.go b/cli/internal/analytics/analytics.go deleted file mode 100644 index a6686c8a6ab4a..0000000000000 --- a/cli/internal/analytics/analytics.go +++ /dev/null @@ -1,179 +0,0 @@ -package analytics - -import ( - "context" - "sync" - "time" - - "github.com/google/uuid" - "github.com/hashicorp/go-hclog" - "github.com/mitchellh/mapstructure" - "github.com/vercel/turbo/cli/internal/util" -) - -type Events = []map[string]interface{} - -type EventPayload = interface{} - -type Recorder interface { - LogEvent(payload EventPayload) -} - -type Client interface { - Recorder - Close() - CloseWithTimeout(timeout time.Duration) -} - -type Sink interface { - RecordAnalyticsEvents(ctx context.Context, events Events) error -} - -type nullSink struct{} - -func (n *nullSink) RecordAnalyticsEvents(_ context.Context, _ Events) error { - return nil -} - -// NullSink is an analytics sink to use in the event that we don't want to send -// analytics -var NullSink = &nullSink{} - -type client struct { - ch chan<- EventPayload - cancel func() - - worker *worker -} - -type worker struct { - buffer []EventPayload - ch <-chan EventPayload - ctx context.Context - doneSemaphore util.Semaphore - sessionID uuid.UUID - sink Sink - wg sync.WaitGroup - logger hclog.Logger -} - -const bufferThreshold = 10 -const eventTimeout = 200 * time.Millisecond -const noTimeout = 24 * time.Hour -const requestTimeout = 10 * time.Second - -func newWorker(ctx context.Context, ch <-chan EventPayload, sink Sink, logger hclog.Logger) *worker { - buffer := []EventPayload{} - sessionID := uuid.New() - w := &worker{ - buffer: buffer, - ch: ch, - ctx: ctx, - doneSemaphore: util.NewSemaphore(1), - sessionID: sessionID, - sink: sink, - logger: logger, - } - w.doneSemaphore.Acquire() - go w.analyticsClient() - return w -} - -func NewClient(parent context.Context, sink Sink, logger hclog.Logger) Client { - ch := make(chan EventPayload) - ctx, cancel := context.WithCancel(parent) - // creates and starts the worker - worker := newWorker(ctx, ch, sink, logger) - s := &client{ - ch: ch, - cancel: cancel, - worker: worker, - } - return s -} - -func (s *client) LogEvent(event EventPayload) { - s.ch <- event -} - -func (s *client) Close() { - s.cancel() - s.worker.Wait() -} - -func (s *client) CloseWithTimeout(timeout time.Duration) { - ch := make(chan struct{}) - go func() { - s.Close() - close(ch) - }() - select { - case <-ch: - case <-time.After(timeout): - } -} - -func (w *worker) Wait() { - w.doneSemaphore.Acquire() - w.wg.Wait() -} - -func (w *worker) analyticsClient() { - timeout := time.After(noTimeout) - for { - select { - case e := <-w.ch: - w.buffer = append(w.buffer, e) - if len(w.buffer) == bufferThreshold { - w.flush() - timeout = time.After(noTimeout) - } else { - timeout = time.After(eventTimeout) - } - case <-timeout: - w.flush() - timeout = time.After(noTimeout) - case <-w.ctx.Done(): - w.flush() - w.doneSemaphore.Release() - return - } - } -} - -func (w *worker) flush() { - if len(w.buffer) > 0 { - w.sendEvents(w.buffer) - w.buffer = []EventPayload{} - } -} - -func (w *worker) sendEvents(events []EventPayload) { - w.wg.Add(1) - go func() { - defer w.wg.Done() - payload, err := addSessionID(w.sessionID.String(), events) - if err != nil { - w.logger.Debug("failed to encode cache usage analytics", "error", err) - return - } - ctx, cancel := context.WithTimeout(context.Background(), requestTimeout) - defer cancel() - err = w.sink.RecordAnalyticsEvents(ctx, payload) - if err != nil { - w.logger.Debug("failed to record cache usage analytics", "error", err) - } - }() -} - -func addSessionID(sessionID string, events []EventPayload) (Events, error) { - eventMaps := []map[string]interface{}{} - err := mapstructure.Decode(events, &eventMaps) - if err != nil { - return nil, err - } - for _, event := range eventMaps { - event["sessionId"] = sessionID - } - return eventMaps, nil -} diff --git a/cli/internal/analytics/analytics_test.go b/cli/internal/analytics/analytics_test.go deleted file mode 100644 index 03f02acaacc63..0000000000000 --- a/cli/internal/analytics/analytics_test.go +++ /dev/null @@ -1,192 +0,0 @@ -package analytics - -import ( - "context" - "sync" - "testing" - "time" - - "github.com/hashicorp/go-hclog" -) - -type dummySink struct { - events []*Events - err error - mu sync.Mutex - ch chan struct{} -} - -type evt struct { - I int -} - -func newDummySink() *dummySink { - return &dummySink{ - events: []*Events{}, - ch: make(chan struct{}, 1), - } -} - -func (d *dummySink) RecordAnalyticsEvents(_ context.Context, events Events) error { - d.mu.Lock() - defer d.mu.Unlock() - // Make a copy in case a test is holding a copy too - eventsCopy := make([]*Events, len(d.events)) - copy(eventsCopy, d.events) - d.events = append(eventsCopy, &events) - d.ch <- struct{}{} - return d.err -} - -func (d *dummySink) Events() []*Events { - d.mu.Lock() - defer d.mu.Unlock() - return d.events -} - -func (d *dummySink) ExpectImmediateMessage(t *testing.T) { - select { - case <-time.After(150 * time.Millisecond): - t.Errorf("expected to not wait out the flush timeout") - case <-d.ch: - } -} - -func (d *dummySink) ExpectTimeoutThenMessage(t *testing.T) { - select { - case <-d.ch: - t.Errorf("Expected to wait out the flush timeout") - case <-time.After(150 * time.Millisecond): - } - <-d.ch -} - -func Test_batching(t *testing.T) { - d := newDummySink() - ctx := context.Background() - c := NewClient(ctx, d, hclog.Default()) - for i := 0; i < 2; i++ { - c.LogEvent(&evt{i}) - } - found := d.Events() - if len(found) != 0 { - t.Errorf("got %v events, want 0 due to batching", len(found)) - } - // Should timeout - d.ExpectTimeoutThenMessage(t) - found = d.Events() - if len(found) != 1 { - t.Errorf("got %v, want 1 batch to have been flushed", len(found)) - } - payloads := *found[0] - if len(payloads) != 2 { - t.Errorf("got %v, want 2 payloads to have been flushed", len(payloads)) - } -} - -func Test_batchingAcrossTwoBatches(t *testing.T) { - d := newDummySink() - ctx := context.Background() - c := NewClient(ctx, d, hclog.Default()) - for i := 0; i < 12; i++ { - c.LogEvent(&evt{i}) - } - // We sent more than the batch size, expect a message immediately - d.ExpectImmediateMessage(t) - found := d.Events() - if len(found) != 1 { - t.Errorf("got %v, want 1 batch to have been flushed", len(found)) - } - payloads := *found[0] - if len(payloads) != 10 { - t.Errorf("got %v, want 10 payloads to have been flushed", len(payloads)) - } - // Should timeout second batch - d.ExpectTimeoutThenMessage(t) - found = d.Events() - if len(found) != 2 { - t.Errorf("got %v, want 2 batches to have been flushed", len(found)) - } - payloads = *found[1] - if len(payloads) != 2 { - t.Errorf("got %v, want 2 payloads to have been flushed", len(payloads)) - } -} - -func Test_closing(t *testing.T) { - d := newDummySink() - ctx := context.Background() - c := NewClient(ctx, d, hclog.Default()) - for i := 0; i < 2; i++ { - c.LogEvent(&evt{i}) - } - found := d.Events() - if len(found) != 0 { - t.Errorf("got %v events, want 0 due to batching", len(found)) - } - c.Close() - found = d.Events() - if len(found) != 1 { - t.Errorf("got %v, want 1 batch to have been flushed", len(found)) - } - payloads := *found[0] - if len(payloads) != 2 { - t.Errorf("got %v, want 2 payloads to have been flushed", len(payloads)) - } -} - -func Test_closingByContext(t *testing.T) { - d := newDummySink() - ctx, cancel := context.WithCancel(context.Background()) - c := NewClient(ctx, d, hclog.Default()) - for i := 0; i < 2; i++ { - c.LogEvent(&evt{i}) - } - found := d.Events() - if len(found) != 0 { - t.Errorf("got %v events, want 0 due to batching", len(found)) - } - cancel() - d.ExpectImmediateMessage(t) - found = d.Events() - if len(found) != 1 { - t.Errorf("got %v, want 1 batch to have been flushed", len(found)) - } - payloads := *found[0] - if len(payloads) != 2 { - t.Errorf("got %v, want 2 payloads to have been flushed", len(payloads)) - } -} - -func Test_addSessionId(t *testing.T) { - events := []struct { - Foo string `mapstructure:"foo"` - }{ - { - Foo: "foo1", - }, - { - Foo: "foo2", - }, - } - arr := make([]interface{}, len(events)) - for i, event := range events { - arr[i] = event - } - sessionID := "my-uuid" - output, err := addSessionID(sessionID, arr) - if err != nil { - t.Errorf("failed to encode analytics events: %v", err) - } - if len(output) != 2 { - t.Errorf("len output got %v, want 2", len(output)) - } - if output[0]["foo"] != "foo1" { - t.Errorf("first event foo got %v, want foo1", output[0]["foo"]) - } - for i, event := range output { - if event["sessionId"] != "my-uuid" { - t.Errorf("event %v sessionId got %v, want %v", i, event["sessionId"], sessionID) - } - } -} diff --git a/cli/internal/cache/async_cache.go b/cli/internal/cache/async_cache.go deleted file mode 100644 index 191d7a3856bef..0000000000000 --- a/cli/internal/cache/async_cache.go +++ /dev/null @@ -1,82 +0,0 @@ -// Adapted from https://github.com/thought-machine/please -// Copyright Thought Machine, Inc. or its affiliates. All Rights Reserved. -// SPDX-License-Identifier: Apache-2.0 -package cache - -import ( - "sync" - - "github.com/vercel/turbo/cli/internal/turbopath" -) - -// An asyncCache is a wrapper around a Cache interface that handles incoming -// store requests asynchronously and attempts to return immediately. -// The requests are handled on an internal queue, if that fills up then -// incoming requests will start to block again until it empties. -// Retrieval requests are still handled synchronously. -type asyncCache struct { - requests chan cacheRequest - realCache Cache - wg sync.WaitGroup -} - -// A cacheRequest models an incoming cache request on our queue. -type cacheRequest struct { - anchor turbopath.AbsoluteSystemPath - key string - duration int - files []turbopath.AnchoredSystemPath -} - -func newAsyncCache(realCache Cache, opts Opts) Cache { - c := &asyncCache{ - requests: make(chan cacheRequest), - realCache: realCache, - } - c.wg.Add(opts.Workers) - for i := 0; i < opts.Workers; i++ { - go c.run() - } - return c -} - -func (c *asyncCache) Put(anchor turbopath.AbsoluteSystemPath, key string, duration int, files []turbopath.AnchoredSystemPath) error { - c.requests <- cacheRequest{ - anchor: anchor, - key: key, - files: files, - duration: duration, - } - return nil -} - -func (c *asyncCache) Fetch(anchor turbopath.AbsoluteSystemPath, key string, files []string) (ItemStatus, []turbopath.AnchoredSystemPath, error) { - return c.realCache.Fetch(anchor, key, files) -} - -func (c *asyncCache) Exists(key string) ItemStatus { - return c.realCache.Exists(key) -} - -func (c *asyncCache) Clean(anchor turbopath.AbsoluteSystemPath) { - c.realCache.Clean(anchor) -} - -func (c *asyncCache) CleanAll() { - c.realCache.CleanAll() -} - -func (c *asyncCache) Shutdown() { - // fmt.Println("Shutting down cache workers...") - close(c.requests) - c.wg.Wait() - // fmt.Println("Shut down all cache workers") -} - -// run implements the actual async logic. -func (c *asyncCache) run() { - for r := range c.requests { - _ = c.realCache.Put(r.anchor, r.key, r.duration, r.files) - } - c.wg.Done() -} diff --git a/cli/internal/cache/cache.go b/cli/internal/cache/cache.go deleted file mode 100644 index 94a183ab65fb9..0000000000000 --- a/cli/internal/cache/cache.go +++ /dev/null @@ -1,336 +0,0 @@ -// Package cache abstracts storing and fetching previously run tasks -// -// Adapted from https://github.com/thought-machine/please -// Copyright Thought Machine, Inc. or its affiliates. All Rights Reserved. -// SPDX-License-Identifier: Apache-2.0 -package cache - -import ( - "errors" - "sync" - - "github.com/vercel/turbo/cli/internal/analytics" - "github.com/vercel/turbo/cli/internal/fs" - "github.com/vercel/turbo/cli/internal/turbopath" - "github.com/vercel/turbo/cli/internal/util" - "golang.org/x/sync/errgroup" -) - -// Cache is abstracted way to cache/fetch previously run tasks -type Cache interface { - // Fetch returns true if there is a cache it. It is expected to move files - // into their correct position as a side effect - Fetch(anchor turbopath.AbsoluteSystemPath, hash string, files []string) (ItemStatus, []turbopath.AnchoredSystemPath, error) - Exists(hash string) ItemStatus - // Put caches files for a given hash - Put(anchor turbopath.AbsoluteSystemPath, hash string, duration int, files []turbopath.AnchoredSystemPath) error - Clean(anchor turbopath.AbsoluteSystemPath) - CleanAll() - Shutdown() -} - -// ItemStatus holds whether artifacts exists for a given hash on local -// and/or remote caching server -type ItemStatus struct { - Hit bool - Source string // only relevant if Hit is true - TimeSaved int // will be 0 if Hit is false -} - -// NewCacheMiss returns an ItemStatus with the fields set to indicate a cache miss -func NewCacheMiss() ItemStatus { - return ItemStatus{ - Source: CacheSourceNone, - Hit: false, - TimeSaved: 0, - } -} - -// newFSTaskCacheStatus returns an ItemStatus with the fields set to indicate a local cache hit -func newFSTaskCacheStatus(hit bool, timeSaved int) ItemStatus { - return ItemStatus{ - Source: CacheSourceFS, - Hit: hit, - TimeSaved: timeSaved, - } -} - -func newRemoteTaskCacheStatus(hit bool, timeSaved int) ItemStatus { - return ItemStatus{ - Source: CacheSourceRemote, - Hit: hit, - TimeSaved: timeSaved, - } -} - -const ( - // CacheSourceFS is a constant to indicate local cache hit - CacheSourceFS = "LOCAL" - // CacheSourceRemote is a constant to indicate remote cache hit - CacheSourceRemote = "REMOTE" - // CacheSourceNone is an empty string because there is no source for a cache miss - CacheSourceNone = "" - // CacheEventHit is a constant to indicate a cache hit - CacheEventHit = "HIT" - // CacheEventMiss is a constant to indicate a cache miss - CacheEventMiss = "MISS" -) - -type CacheEvent struct { - Source string `mapstructure:"source"` - Event string `mapstructure:"event"` - Hash string `mapstructure:"hash"` - Duration int `mapstructure:"duration"` -} - -// DefaultLocation returns the default filesystem cache location, given a repo root -func DefaultLocation(repoRoot turbopath.AbsoluteSystemPath) turbopath.AbsoluteSystemPath { - return repoRoot.UntypedJoin("node_modules", ".cache", "turbo") -} - -// OnCacheRemoved defines a callback that the cache system calls if a particular cache -// needs to be removed. In practice, this happens when Remote Caching has been disabled -// the but CLI continues to try to use it. -type OnCacheRemoved = func(cache Cache, err error) - -// ErrNoCachesEnabled is returned when both the filesystem and http cache are unavailable -var ErrNoCachesEnabled = errors.New("no caches are enabled") - -// Opts holds configuration options for the cache -// TODO(gsoltis): further refactor this into fs cache opts and http cache opts -type Opts struct { - OverrideDir string - SkipRemote bool - SkipFilesystem bool - Workers int - Signature bool -} - -// resolveCacheDir calculates the location turbo should use to cache artifacts, -// based on the options supplied by the user. -func (o *Opts) resolveCacheDir(repoRoot turbopath.AbsoluteSystemPath) turbopath.AbsoluteSystemPath { - if o.OverrideDir != "" { - return fs.ResolveUnknownPath(repoRoot, o.OverrideDir) - } - return DefaultLocation(repoRoot) -} - -// New creates a new cache -func New(opts Opts, repoRoot turbopath.AbsoluteSystemPath, client client, recorder analytics.Recorder, onCacheRemoved OnCacheRemoved) (Cache, error) { - c, err := newSyncCache(opts, repoRoot, client, recorder, onCacheRemoved) - if err != nil && !errors.Is(err, ErrNoCachesEnabled) { - return nil, err - } - if opts.Workers > 0 { - return newAsyncCache(c, opts), err - } - return c, err -} - -// newSyncCache can return an error with a usable noopCache. -func newSyncCache(opts Opts, repoRoot turbopath.AbsoluteSystemPath, client client, recorder analytics.Recorder, onCacheRemoved OnCacheRemoved) (Cache, error) { - // Check to see if the user has turned off particular cache implementations. - useFsCache := !opts.SkipFilesystem - useHTTPCache := !opts.SkipRemote - - // Since the above two flags are not mutually exclusive it is possible to configure - // yourself out of having a cache. We should tell you about it but we shouldn't fail - // your build for that reason. - // - // Further, since the httpCache can be removed at runtime, we need to insert a noopCache - // as a backup if you are configured to have *just* an httpCache. - // - // This is reduced from (!useFsCache && !useHTTPCache) || (!useFsCache && useHTTPCache) - useNoopCache := !useFsCache - - // Build up an array of cache implementations, we can only ever have 1 or 2. - cacheImplementations := make([]Cache, 0, 2) - - if useFsCache { - implementation, err := newFsCache(opts, recorder, repoRoot) - if err != nil { - return nil, err - } - cacheImplementations = append(cacheImplementations, implementation) - } - - if useHTTPCache { - implementation := newHTTPCache(opts, client, recorder, repoRoot) - cacheImplementations = append(cacheImplementations, implementation) - } - - if useNoopCache { - implementation := newNoopCache() - cacheImplementations = append(cacheImplementations, implementation) - } - - // Precisely two cache implementations: - // fsCache and httpCache OR httpCache and noopCache - useMultiplexer := len(cacheImplementations) > 1 - if useMultiplexer { - // We have early-returned any possible errors for this scenario. - return &cacheMultiplexer{ - onCacheRemoved: onCacheRemoved, - opts: opts, - caches: cacheImplementations, - }, nil - } - - // Precisely one cache implementation: fsCache OR noopCache - implementation := cacheImplementations[0] - _, isNoopCache := implementation.(*noopCache) - - // We want to let the user know something is wonky, but we don't want - // to trigger their build to fail. - if isNoopCache { - return implementation, ErrNoCachesEnabled - } - return implementation, nil -} - -// A cacheMultiplexer multiplexes several caches into one. -// Used when we have several active (eg. http, dir). -type cacheMultiplexer struct { - caches []Cache - opts Opts - mu sync.RWMutex - onCacheRemoved OnCacheRemoved -} - -func (mplex *cacheMultiplexer) Put(anchor turbopath.AbsoluteSystemPath, key string, duration int, files []turbopath.AnchoredSystemPath) error { - return mplex.storeUntil(anchor, key, duration, files, len(mplex.caches)) -} - -type cacheRemoval struct { - cache Cache - err *util.CacheDisabledError -} - -// storeUntil stores artifacts into higher priority caches than the given one. -// Used after artifact retrieval to ensure we have them in eg. the directory cache after -// downloading from the RPC cache. -func (mplex *cacheMultiplexer) storeUntil(anchor turbopath.AbsoluteSystemPath, key string, duration int, files []turbopath.AnchoredSystemPath, stopAt int) error { - // Attempt to store on all caches simultaneously. - toRemove := make([]*cacheRemoval, stopAt) - g := &errgroup.Group{} - mplex.mu.RLock() - for i, cache := range mplex.caches { - if i == stopAt { - break - } - c := cache - i := i - g.Go(func() error { - err := c.Put(anchor, key, duration, files) - if err != nil { - cd := &util.CacheDisabledError{} - if errors.As(err, &cd) { - toRemove[i] = &cacheRemoval{ - cache: c, - err: cd, - } - // we don't want this to cancel other cache actions - return nil - } - return err - } - return nil - }) - } - mplex.mu.RUnlock() - - if err := g.Wait(); err != nil { - return err - } - - for _, removal := range toRemove { - if removal != nil { - mplex.removeCache(removal) - } - } - return nil -} - -// removeCache takes a requested removal and tries to actually remove it. However, -// multiple requests could result in concurrent requests to remove the same cache. -// Let one of them win and propagate the error, the rest will no-op. -func (mplex *cacheMultiplexer) removeCache(removal *cacheRemoval) { - mplex.mu.Lock() - defer mplex.mu.Unlock() - for i, cache := range mplex.caches { - if cache == removal.cache { - mplex.caches = append(mplex.caches[:i], mplex.caches[i+1:]...) - mplex.onCacheRemoved(cache, removal.err) - break - } - } -} - -func (mplex *cacheMultiplexer) Fetch(anchor turbopath.AbsoluteSystemPath, key string, files []string) (ItemStatus, []turbopath.AnchoredSystemPath, error) { - // Make a shallow copy of the caches, since storeUntil can call removeCache - mplex.mu.RLock() - caches := make([]Cache, len(mplex.caches)) - copy(caches, mplex.caches) - mplex.mu.RUnlock() - - // Retrieve from caches sequentially; if we did them simultaneously we could - // easily write the same file from two goroutines at once. - for i, cache := range caches { - itemStatus, actualFiles, err := cache.Fetch(anchor, key, files) - if err != nil { - cd := &util.CacheDisabledError{} - if errors.As(err, &cd) { - mplex.removeCache(&cacheRemoval{ - cache: cache, - err: cd, - }) - } - // We're ignoring the error in the else case, since with this cache - // abstraction, we want to check lower priority caches rather than fail - // the operation. Future work that plumbs UI / Logging into the cache system - // should probably log this at least. - } - - if itemStatus.Hit { - // Store this into other caches. We can ignore errors here because we know - // we have previously successfully stored in a higher-priority cache, and so the overall - // result is a success at fetching. Storing in lower-priority caches is an optimization. - _ = mplex.storeUntil(anchor, key, itemStatus.TimeSaved, actualFiles, i) - - // Return this cache, and exit the for loop, since we don't need to keep looking. - return itemStatus, actualFiles, nil - } - } - - return NewCacheMiss(), nil, nil -} - -// Exists check each cache sequentially and return the first one that has a cache hit -func (mplex *cacheMultiplexer) Exists(target string) ItemStatus { - for _, cache := range mplex.caches { - itemStatus := cache.Exists(target) - if itemStatus.Hit { - return itemStatus - } - } - - return NewCacheMiss() -} - -func (mplex *cacheMultiplexer) Clean(anchor turbopath.AbsoluteSystemPath) { - for _, cache := range mplex.caches { - cache.Clean(anchor) - } -} - -func (mplex *cacheMultiplexer) CleanAll() { - for _, cache := range mplex.caches { - cache.CleanAll() - } -} - -func (mplex *cacheMultiplexer) Shutdown() { - for _, cache := range mplex.caches { - cache.Shutdown() - } -} diff --git a/cli/internal/cache/cache_fs.go b/cli/internal/cache/cache_fs.go deleted file mode 100644 index de2ad0bc5c800..0000000000000 --- a/cli/internal/cache/cache_fs.go +++ /dev/null @@ -1,184 +0,0 @@ -// Adapted from https://github.com/thought-machine/please -// Copyright Thought Machine, Inc. or its affiliates. All Rights Reserved. -// SPDX-License-Identifier: Apache-2.0 - -// Package cache implements our cache abstraction. -package cache - -import ( - "encoding/json" - "fmt" - - "github.com/vercel/turbo/cli/internal/analytics" - "github.com/vercel/turbo/cli/internal/cacheitem" - "github.com/vercel/turbo/cli/internal/turbopath" -) - -// fsCache is a local filesystem cache -type fsCache struct { - cacheDirectory turbopath.AbsoluteSystemPath - recorder analytics.Recorder -} - -// newFsCache creates a new filesystem cache -func newFsCache(opts Opts, recorder analytics.Recorder, repoRoot turbopath.AbsoluteSystemPath) (*fsCache, error) { - cacheDir := opts.resolveCacheDir(repoRoot) - if err := cacheDir.MkdirAll(0775); err != nil { - return nil, err - } - return &fsCache{ - cacheDirectory: cacheDir, - recorder: recorder, - }, nil -} - -// Fetch returns true if items are cached. It moves them into position as a side effect. -func (f *fsCache) Fetch(anchor turbopath.AbsoluteSystemPath, hash string, _ []string) (ItemStatus, []turbopath.AnchoredSystemPath, error) { - uncompressedCachePath := f.cacheDirectory.UntypedJoin(hash + ".tar") - compressedCachePath := f.cacheDirectory.UntypedJoin(hash + ".tar.zst") - - var actualCachePath turbopath.AbsoluteSystemPath - if uncompressedCachePath.FileExists() { - actualCachePath = uncompressedCachePath - } else if compressedCachePath.FileExists() { - actualCachePath = compressedCachePath - } else { - // It's not in the cache, bail now - f.logFetch(false, hash, 0) - return newFSTaskCacheStatus(false, 0), nil, nil - } - - cacheItem, openErr := cacheitem.Open(actualCachePath) - if openErr != nil { - return newFSTaskCacheStatus(false, 0), nil, openErr - } - - restoredFiles, restoreErr := cacheItem.Restore(anchor) - if restoreErr != nil { - _ = cacheItem.Close() - return newFSTaskCacheStatus(false, 0), nil, restoreErr - } - - meta, err := ReadCacheMetaFile(f.cacheDirectory.UntypedJoin(hash + "-meta.json")) - if err != nil { - _ = cacheItem.Close() - return newFSTaskCacheStatus(false, 0), nil, fmt.Errorf("error reading cache metadata: %w", err) - } - f.logFetch(true, hash, meta.Duration) - - // Wait to see what happens with close. - closeErr := cacheItem.Close() - if closeErr != nil { - return newFSTaskCacheStatus(false, 0), restoredFiles, closeErr - } - return newFSTaskCacheStatus(true, meta.Duration), restoredFiles, nil -} - -// Exists returns the ItemStatus and the timeSaved -func (f *fsCache) Exists(hash string) ItemStatus { - uncompressedCachePath := f.cacheDirectory.UntypedJoin(hash + ".tar") - compressedCachePath := f.cacheDirectory.UntypedJoin(hash + ".tar.zst") - - status := newFSTaskCacheStatus(false, 0) - if compressedCachePath.FileExists() || uncompressedCachePath.FileExists() { - status.Hit = true - } - - // Swallow the error - if meta, err := ReadCacheMetaFile(f.cacheDirectory.UntypedJoin(hash + "-meta.json")); err != nil { - status.TimeSaved = 0 - } else { - status.TimeSaved = meta.Duration - } - - return status - -} - -func (f *fsCache) logFetch(hit bool, hash string, duration int) { - var event string - if hit { - event = CacheEventHit - } else { - event = CacheEventMiss - } - payload := &CacheEvent{ - Source: CacheSourceFS, - Event: event, - Hash: hash, - Duration: duration, - } - f.recorder.LogEvent(payload) -} - -func (f *fsCache) Put(anchor turbopath.AbsoluteSystemPath, hash string, duration int, files []turbopath.AnchoredSystemPath) error { - cachePath := f.cacheDirectory.UntypedJoin(hash + ".tar.zst") - cacheItem, err := cacheitem.Create(cachePath) - if err != nil { - return err - } - - for _, file := range files { - err := cacheItem.AddFile(anchor, file) - if err != nil { - _ = cacheItem.Close() - return err - } - } - - writeErr := WriteCacheMetaFile(f.cacheDirectory.UntypedJoin(hash+"-meta.json"), &CacheMetadata{ - Duration: duration, - Hash: hash, - }) - - if writeErr != nil { - _ = cacheItem.Close() - return writeErr - } - - return cacheItem.Close() -} - -func (f *fsCache) Clean(_ turbopath.AbsoluteSystemPath) { - fmt.Println("Not implemented yet") -} - -func (f *fsCache) CleanAll() { - fmt.Println("Not implemented yet") -} - -func (f *fsCache) Shutdown() {} - -// CacheMetadata stores duration and hash information for a cache entry so that aggregate Time Saved calculations -// can be made from artifacts from various caches -type CacheMetadata struct { - Hash string `json:"hash"` - Duration int `json:"duration"` -} - -// WriteCacheMetaFile writes cache metadata file at a path -func WriteCacheMetaFile(path turbopath.AbsoluteSystemPath, config *CacheMetadata) error { - jsonBytes, marshalErr := json.Marshal(config) - if marshalErr != nil { - return marshalErr - } - writeFilErr := path.WriteFile(jsonBytes, 0644) - if writeFilErr != nil { - return writeFilErr - } - return nil -} - -// ReadCacheMetaFile reads cache metadata file at a path -func ReadCacheMetaFile(path turbopath.AbsoluteSystemPath) (*CacheMetadata, error) { - jsonBytes, readFileErr := path.ReadFile() - if readFileErr != nil { - return nil, readFileErr - } - var config CacheMetadata - marshalErr := json.Unmarshal(jsonBytes, &config) - if marshalErr != nil { - return nil, marshalErr - } - return &config, nil -} diff --git a/cli/internal/cache/cache_fs_test.go b/cli/internal/cache/cache_fs_test.go deleted file mode 100644 index 19ec937892102..0000000000000 --- a/cli/internal/cache/cache_fs_test.go +++ /dev/null @@ -1,253 +0,0 @@ -package cache - -import ( - "path/filepath" - "testing" - - "github.com/vercel/turbo/cli/internal/analytics" - "github.com/vercel/turbo/cli/internal/cacheitem" - "github.com/vercel/turbo/cli/internal/turbopath" - "gotest.tools/v3/assert" -) - -type dummyRecorder struct{} - -func (dr *dummyRecorder) LogEvent(payload analytics.EventPayload) {} - -func TestPut(t *testing.T) { - // Set up a test source and cache directory - // The "source" directory simulates a package - // - // / - // b - // child/ - // a - // link -> ../b - // broken -> missing - // - // Ensure we end up with a matching directory under a - // "cache" directory: - // - // /the-hash//... - - src := turbopath.AbsoluteSystemPath(t.TempDir()) - childDir := src.UntypedJoin("child") - err := childDir.MkdirAll(0775) - assert.NilError(t, err, "Mkdir") - aPath := childDir.UntypedJoin("a") - aFile, err := aPath.Create() - assert.NilError(t, err, "Create") - _, err = aFile.WriteString("hello") - assert.NilError(t, err, "WriteString") - assert.NilError(t, aFile.Close(), "Close") - - bPath := src.UntypedJoin("b") - bFile, err := bPath.Create() - assert.NilError(t, err, "Create") - _, err = bFile.WriteString("bFile") - assert.NilError(t, err, "WriteString") - assert.NilError(t, bFile.Close(), "Close") - - srcLinkPath := childDir.UntypedJoin("link") - linkTarget := filepath.FromSlash("../b") - assert.NilError(t, srcLinkPath.Symlink(linkTarget), "Symlink") - - srcBrokenLinkPath := childDir.Join("broken") - assert.NilError(t, srcBrokenLinkPath.Symlink("missing"), "Symlink") - circlePath := childDir.Join("circle") - assert.NilError(t, circlePath.Symlink(filepath.FromSlash("../child")), "Symlink") - - files := []turbopath.AnchoredSystemPath{ - turbopath.AnchoredUnixPath("child/").ToSystemPath(), // childDir - turbopath.AnchoredUnixPath("child/a").ToSystemPath(), // aPath, - turbopath.AnchoredUnixPath("b").ToSystemPath(), // bPath, - turbopath.AnchoredUnixPath("child/link").ToSystemPath(), // srcLinkPath, - turbopath.AnchoredUnixPath("child/broken").ToSystemPath(), // srcBrokenLinkPath, - turbopath.AnchoredUnixPath("child/circle").ToSystemPath(), // circlePath - } - - dst := turbopath.AbsoluteSystemPath(t.TempDir()) - dr := &dummyRecorder{} - - cache := &fsCache{ - cacheDirectory: dst, - recorder: dr, - } - - hash := "the-hash" - duration := 0 - putErr := cache.Put(src, hash, duration, files) - assert.NilError(t, putErr, "Put") - - // Verify that we got the files that we're expecting - dstCachePath := dst.UntypedJoin(hash) - - // This test checks outputs, so we go ahead and pull things back out. - // Attempting to satisfy our beliefs that the change is viable with - // as few changes to the tests as possible. - cacheItem, openErr := cacheitem.Open(dst.UntypedJoin(hash + ".tar.zst")) - assert.NilError(t, openErr, "Open") - - _, restoreErr := cacheItem.Restore(dstCachePath) - assert.NilError(t, restoreErr, "Restore") - - dstAPath := dstCachePath.UntypedJoin("child", "a") - assertFileMatches(t, aPath, dstAPath) - - dstBPath := dstCachePath.UntypedJoin("b") - assertFileMatches(t, bPath, dstBPath) - - dstLinkPath := dstCachePath.UntypedJoin("child", "link") - target, err := dstLinkPath.Readlink() - assert.NilError(t, err, "Readlink") - if target != linkTarget { - t.Errorf("Readlink got %v, want %v", target, linkTarget) - } - - dstBrokenLinkPath := dstCachePath.UntypedJoin("child", "broken") - target, err = dstBrokenLinkPath.Readlink() - assert.NilError(t, err, "Readlink") - if target != "missing" { - t.Errorf("Readlink got %v, want missing", target) - } - - dstCirclePath := dstCachePath.UntypedJoin("child", "circle") - circleLinkDest, err := dstCirclePath.Readlink() - assert.NilError(t, err, "Readlink") - expectedCircleLinkDest := filepath.FromSlash("../child") - if circleLinkDest != expectedCircleLinkDest { - t.Errorf("Cache link got %v, want %v", circleLinkDest, expectedCircleLinkDest) - } - - assert.NilError(t, cacheItem.Close(), "Close") -} - -func assertFileMatches(t *testing.T, orig turbopath.AbsoluteSystemPath, copy turbopath.AbsoluteSystemPath) { - t.Helper() - origBytes, err := orig.ReadFile() - assert.NilError(t, err, "ReadFile") - copyBytes, err := copy.ReadFile() - assert.NilError(t, err, "ReadFile") - assert.DeepEqual(t, origBytes, copyBytes) - origStat, err := orig.Lstat() - assert.NilError(t, err, "Lstat") - copyStat, err := copy.Lstat() - assert.NilError(t, err, "Lstat") - assert.Equal(t, origStat.Mode(), copyStat.Mode()) -} - -func TestFetch(t *testing.T) { - // Set up a test cache directory and target output directory - // The "cacheDir" directory simulates a cached package - // - // / - // the-hash-meta.json - // the-hash/ - // some-package/ - // b - // child/ - // a - // link -> ../b - // broken -> missing - // circle -> ../child - // - // Ensure we end up with a matching directory under a - // "some-package" directory: - // - // "some-package"/... - - cacheDir := turbopath.AbsoluteSystemPath(t.TempDir()) - hash := "the-hash" - src := cacheDir.UntypedJoin(hash, "some-package") - err := src.MkdirAll(0775) - assert.NilError(t, err, "mkdirAll") - - childDir := src.UntypedJoin("child") - err = childDir.MkdirAll(0775) - assert.NilError(t, err, "Mkdir") - aPath := childDir.UntypedJoin("a") - aFile, err := aPath.Create() - assert.NilError(t, err, "Create") - _, err = aFile.WriteString("hello") - assert.NilError(t, err, "WriteString") - assert.NilError(t, aFile.Close(), "Close") - - bPath := src.UntypedJoin("b") - bFile, err := bPath.Create() - assert.NilError(t, err, "Create") - _, err = bFile.WriteString("bFile") - assert.NilError(t, err, "WriteString") - assert.NilError(t, bFile.Close(), "Close") - - srcLinkPath := childDir.UntypedJoin("link") - linkTarget := filepath.FromSlash("../b") - assert.NilError(t, srcLinkPath.Symlink(linkTarget), "Symlink") - - srcBrokenLinkPath := childDir.UntypedJoin("broken") - srcBrokenLinkTarget := turbopath.AnchoredUnixPath("missing").ToSystemPath() - assert.NilError(t, srcBrokenLinkPath.Symlink(srcBrokenLinkTarget.ToString()), "Symlink") - - circlePath := childDir.Join("circle") - srcCircleLinkTarget := turbopath.AnchoredUnixPath("../child").ToSystemPath() - assert.NilError(t, circlePath.Symlink(srcCircleLinkTarget.ToString()), "Symlink") - - metadataPath := cacheDir.UntypedJoin("the-hash-meta.json") - err = metadataPath.WriteFile([]byte(`{"hash":"the-hash","duration":0}`), 0777) - assert.NilError(t, err, "WriteFile") - - dr := &dummyRecorder{} - - cache := &fsCache{ - cacheDirectory: cacheDir, - recorder: dr, - } - - inputFiles := []turbopath.AnchoredSystemPath{ - turbopath.AnchoredUnixPath("some-package/child/").ToSystemPath(), // childDir - turbopath.AnchoredUnixPath("some-package/child/a").ToSystemPath(), // aPath, - turbopath.AnchoredUnixPath("some-package/b").ToSystemPath(), // bPath, - turbopath.AnchoredUnixPath("some-package/child/link").ToSystemPath(), // srcLinkPath, - turbopath.AnchoredUnixPath("some-package/child/broken").ToSystemPath(), // srcBrokenLinkPath, - turbopath.AnchoredUnixPath("some-package/child/circle").ToSystemPath(), // circlePath - } - - putErr := cache.Put(cacheDir.UntypedJoin(hash), hash, 0, inputFiles) - assert.NilError(t, putErr, "Put") - - outputDir := turbopath.AbsoluteSystemPath(t.TempDir()) - dstOutputPath := "some-package" - cacheStatus, files, err := cache.Fetch(outputDir, "the-hash", []string{}) - assert.NilError(t, err, "Fetch") - hit := cacheStatus.Hit - if !hit { - t.Error("Fetch got false, want true") - } - if len(files) != len(inputFiles) { - t.Errorf("len(files) got %v, want %v", len(files), len(inputFiles)) - } - - dstAPath := outputDir.UntypedJoin(dstOutputPath, "child", "a") - assertFileMatches(t, aPath, dstAPath) - - dstBPath := outputDir.UntypedJoin(dstOutputPath, "b") - assertFileMatches(t, bPath, dstBPath) - - dstLinkPath := outputDir.UntypedJoin(dstOutputPath, "child", "link") - target, err := dstLinkPath.Readlink() - assert.NilError(t, err, "Readlink") - if target != linkTarget { - t.Errorf("Readlink got %v, want %v", target, linkTarget) - } - - // Assert that we restore broken symlinks correctly - dstBrokenLinkPath := outputDir.UntypedJoin(dstOutputPath, "child", "broken") - target, readlinkErr := dstBrokenLinkPath.Readlink() - assert.NilError(t, readlinkErr, "Readlink") - assert.Equal(t, target, srcBrokenLinkTarget.ToString()) - - // Assert that we restore symlinks to directories correctly - dstCirclePath := outputDir.UntypedJoin(dstOutputPath, "child", "circle") - circleTarget, circleReadlinkErr := dstCirclePath.Readlink() - assert.NilError(t, circleReadlinkErr, "Circle Readlink") - assert.Equal(t, circleTarget, srcCircleLinkTarget.ToString()) -} diff --git a/cli/internal/cache/cache_http.go b/cli/internal/cache/cache_http.go deleted file mode 100644 index 7bf48d230417b..0000000000000 --- a/cli/internal/cache/cache_http.go +++ /dev/null @@ -1,250 +0,0 @@ -// Adapted from https://github.com/thought-machine/please -// Copyright Thought Machine, Inc. or its affiliates. All Rights Reserved. -// SPDX-License-Identifier: Apache-2.0 -package cache - -import ( - "bytes" - "errors" - "fmt" - "io" - "io/ioutil" - "net/http" - "strconv" - - "github.com/vercel/turbo/cli/internal/analytics" - "github.com/vercel/turbo/cli/internal/cacheitem" - "github.com/vercel/turbo/cli/internal/turbopath" -) - -type client interface { - PutArtifact(hash string, body []byte, duration int, tag string) error - FetchArtifact(hash string) (*http.Response, error) - ArtifactExists(hash string) (*http.Response, error) - GetTeamID() string -} - -type httpCache struct { - writable bool - client client - requestLimiter limiter - recorder analytics.Recorder - signerVerifier *ArtifactSignatureAuthentication - repoRoot turbopath.AbsoluteSystemPath -} - -type limiter chan struct{} - -func (l limiter) acquire() { - l <- struct{}{} -} - -func (l limiter) release() { - <-l -} - -func (cache *httpCache) Put(anchor turbopath.AbsoluteSystemPath, hash string, duration int, files []turbopath.AnchoredSystemPath) error { - // if cache.writable { - cache.requestLimiter.acquire() - defer cache.requestLimiter.release() - - r, w := io.Pipe() - - cacheErrorChan := make(chan error, 1) - go cache.write(w, anchor, files, cacheErrorChan) - - // Read the entire artifact tar into memory so we can easily compute the signature. - // Note: retryablehttp.NewRequest reads the files into memory anyways so there's no - // additional overhead by doing the ioutil.ReadAll here instead. - artifactBody, err := ioutil.ReadAll(r) - if err != nil { - return fmt.Errorf("failed to store files in HTTP cache: %w", err) - } - tag := "" - if cache.signerVerifier.isEnabled() { - tag, err = cache.signerVerifier.generateTag(hash, artifactBody) - if err != nil { - return fmt.Errorf("failed to store files in HTTP cache: %w", err) - } - } - - cacheCreateError := <-cacheErrorChan - if cacheCreateError != nil { - return cacheCreateError - } - - return cache.client.PutArtifact(hash, artifactBody, duration, tag) -} - -// write writes a series of files into the given Writer. -func (cache *httpCache) write(w io.WriteCloser, anchor turbopath.AbsoluteSystemPath, files []turbopath.AnchoredSystemPath, cacheErrorChan chan error) { - cacheItem := cacheitem.CreateWriter(w) - - for _, file := range files { - err := cacheItem.AddFile(anchor, file) - if err != nil { - _ = cacheItem.Close() - cacheErrorChan <- err - return - } - } - - cacheErrorChan <- cacheItem.Close() -} - -func (cache *httpCache) Fetch(_ turbopath.AbsoluteSystemPath, key string, _ []string) (ItemStatus, []turbopath.AnchoredSystemPath, error) { - cache.requestLimiter.acquire() - defer cache.requestLimiter.release() - hit, files, duration, err := cache.retrieve(key) - if err != nil { - // TODO: analytics event? - return newRemoteTaskCacheStatus(false, duration), files, fmt.Errorf("failed to retrieve files from HTTP cache: %w", err) - } - cache.logFetch(hit, key, duration) - return newRemoteTaskCacheStatus(hit, duration), files, err -} - -func (cache *httpCache) Exists(key string) ItemStatus { - cache.requestLimiter.acquire() - defer cache.requestLimiter.release() - hit, timeSaved, err := cache.exists(key) - if err != nil { - return newRemoteTaskCacheStatus(false, 0) - } - return newRemoteTaskCacheStatus(hit, timeSaved) -} - -func (cache *httpCache) logFetch(hit bool, hash string, duration int) { - var event string - if hit { - event = CacheEventHit - } else { - event = CacheEventMiss - } - payload := &CacheEvent{ - Source: CacheSourceRemote, - Event: event, - Hash: hash, - Duration: duration, - } - cache.recorder.LogEvent(payload) -} - -func (cache *httpCache) exists(hash string) (bool, int, error) { - resp, err := cache.client.ArtifactExists(hash) - if err != nil { - return false, 0, nil - } - - defer func() { err = resp.Body.Close() }() - - if resp.StatusCode == http.StatusNotFound { - return false, 0, nil - } else if resp.StatusCode != http.StatusOK { - return false, 0, fmt.Errorf("%s", strconv.Itoa(resp.StatusCode)) - } - - duration, err := getDurationFromResponse(resp) - if err != nil { - return false, 0, err - } - - return true, duration, err -} - -func (cache *httpCache) retrieve(hash string) (bool, []turbopath.AnchoredSystemPath, int, error) { - resp, err := cache.client.FetchArtifact(hash) - if err != nil { - return false, nil, 0, err - } - defer resp.Body.Close() - if resp.StatusCode == http.StatusNotFound { - return false, nil, 0, nil // doesn't exist - not an error - } else if resp.StatusCode != http.StatusOK { - b, _ := ioutil.ReadAll(resp.Body) - return false, nil, 0, fmt.Errorf("%s", string(b)) - } - - duration, err := getDurationFromResponse(resp) - if err != nil { - return false, nil, 0, err - } - - var tarReader io.Reader - - defer func() { _ = resp.Body.Close() }() - if cache.signerVerifier.isEnabled() { - expectedTag := resp.Header.Get("x-artifact-tag") - if expectedTag == "" { - // If the verifier is enabled all incoming artifact downloads must have a signature - return false, nil, 0, errors.New("artifact verification failed: Downloaded artifact is missing required x-artifact-tag header") - } - b, err := ioutil.ReadAll(resp.Body) - if err != nil { - return false, nil, 0, fmt.Errorf("artifact verification failed: %w", err) - } - isValid, err := cache.signerVerifier.validate(hash, b, expectedTag) - if err != nil { - return false, nil, 0, fmt.Errorf("artifact verification failed: %w", err) - } - if !isValid { - err = fmt.Errorf("artifact verification failed: artifact tag does not match expected tag %s", expectedTag) - return false, nil, 0, err - } - // The artifact has been verified and the body can be read and untarred - tarReader = bytes.NewReader(b) - } else { - tarReader = resp.Body - } - files, err := restoreTar(cache.repoRoot, tarReader) - if err != nil { - return false, nil, 0, err - } - return true, files, duration, nil -} - -// getDurationFromResponse extracts the duration from the response header -func getDurationFromResponse(resp *http.Response) (int, error) { - duration := 0 - if resp.Header.Get("x-artifact-duration") != "" { - // If we had an error reading the duration header, just swallow it for now. - intVar, err := strconv.Atoi(resp.Header.Get("x-artifact-duration")) - if err != nil { - return 0, fmt.Errorf("invalid x-artifact-duration header: %w", err) - } - duration = intVar - } - - return duration, nil -} - -func restoreTar(root turbopath.AbsoluteSystemPath, reader io.Reader) ([]turbopath.AnchoredSystemPath, error) { - cache := cacheitem.FromReader(reader, true) - return cache.Restore(root) -} - -func (cache *httpCache) Clean(_ turbopath.AbsoluteSystemPath) { - // Not possible; this implementation can only clean for a hash. -} - -func (cache *httpCache) CleanAll() { - // Also not possible. -} - -func (cache *httpCache) Shutdown() {} - -func newHTTPCache(opts Opts, client client, recorder analytics.Recorder, repoRoot turbopath.AbsoluteSystemPath) *httpCache { - return &httpCache{ - writable: true, - client: client, - requestLimiter: make(limiter, 20), - recorder: recorder, - repoRoot: repoRoot, - signerVerifier: &ArtifactSignatureAuthentication{ - // TODO(Gaspar): this should use RemoteCacheOptions.TeamId once we start - // enforcing team restrictions for repositories. - teamID: client.GetTeamID(), - enabled: opts.Signature, - }, - } -} diff --git a/cli/internal/cache/cache_http_test.go b/cli/internal/cache/cache_http_test.go deleted file mode 100644 index bdd9bd00c0988..0000000000000 --- a/cli/internal/cache/cache_http_test.go +++ /dev/null @@ -1,286 +0,0 @@ -package cache - -import ( - "archive/tar" - "bytes" - "errors" - "net/http" - "os" - "testing" - - "github.com/DataDog/zstd" - "github.com/vercel/turbo/cli/internal/cacheitem" - "github.com/vercel/turbo/cli/internal/fs" - "github.com/vercel/turbo/cli/internal/turbopath" - "github.com/vercel/turbo/cli/internal/util" - "gotest.tools/v3/assert" -) - -type errorResp struct { - err error - t *testing.T -} - -func (sr *errorResp) PutArtifact(hash string, body []byte, duration int, tag string) error { - sr.t.Helper() - outdir := turbopath.AbsoluteSystemPathFromUpstream(sr.t.TempDir()) - cache := cacheitem.FromReader(bytes.NewReader(body), true) - restored, err := cache.Restore(outdir) - - sr.t.Log(restored) - assert.Equal(sr.t, restored[0].ToString(), "one") - assert.Equal(sr.t, restored[1].ToString(), "two") - assert.Equal(sr.t, len(restored), 2) - assert.NilError(sr.t, err, "Restoration was successful.") - - return sr.err -} - -func (sr *errorResp) FetchArtifact(hash string) (*http.Response, error) { - return nil, sr.err -} - -func (sr *errorResp) ArtifactExists(hash string) (*http.Response, error) { - return nil, sr.err -} - -func (sr *errorResp) GetTeamID() string { - return "" -} - -func TestRemoteCachingDisabled(t *testing.T) { - clientErr := &util.CacheDisabledError{ - Status: util.CachingStatusDisabled, - Message: "Remote Caching has been disabled for this team. A team owner can enable it here: $URL", - } - client := &errorResp{err: clientErr} - cache := &httpCache{ - client: client, - requestLimiter: make(limiter, 20), - } - cd := &util.CacheDisabledError{} - _, _, err := cache.Fetch("unused-target", "some-hash", []string{"unused", "outputs"}) - if !errors.As(err, &cd) { - t.Errorf("cache.Fetch err got %v, want a CacheDisabled error", err) - } - if cd.Status != util.CachingStatusDisabled { - t.Errorf("CacheDisabled.Status got %v, want %v", cd.Status, util.CachingStatusDisabled) - } -} - -func makeValidTar(t *testing.T) *bytes.Buffer { - // - // my-pkg/ - // some-file - // link-to-extra-file -> ../extra-file - // broken-link -> ../../global-dep - // extra-file - - t.Helper() - buf := &bytes.Buffer{} - zw := zstd.NewWriter(buf) - defer func() { - if err := zw.Close(); err != nil { - t.Fatalf("failed to close gzip: %v", err) - } - }() - tw := tar.NewWriter(zw) - defer func() { - if err := tw.Close(); err != nil { - t.Fatalf("failed to close tar: %v", err) - } - }() - - // my-pkg - h := &tar.Header{ - Name: "my-pkg/", - Mode: int64(0755), - Typeflag: tar.TypeDir, - } - if err := tw.WriteHeader(h); err != nil { - t.Fatalf("failed to write header: %v", err) - } - // my-pkg/some-file - contents := []byte("some-file-contents") - h = &tar.Header{ - Name: "my-pkg/some-file", - Mode: int64(0644), - Typeflag: tar.TypeReg, - Size: int64(len(contents)), - } - if err := tw.WriteHeader(h); err != nil { - t.Fatalf("failed to write header: %v", err) - } - if _, err := tw.Write(contents); err != nil { - t.Fatalf("failed to write file: %v", err) - } - // my-pkg/link-to-extra-file - h = &tar.Header{ - Name: "my-pkg/link-to-extra-file", - Mode: int64(0644), - Typeflag: tar.TypeSymlink, - Linkname: "../extra-file", - } - if err := tw.WriteHeader(h); err != nil { - t.Fatalf("failed to write header: %v", err) - } - // my-pkg/broken-link - h = &tar.Header{ - Name: "my-pkg/broken-link", - Mode: int64(0644), - Typeflag: tar.TypeSymlink, - Linkname: "../../global-dep", - } - if err := tw.WriteHeader(h); err != nil { - t.Fatalf("failed to write header: %v", err) - } - // extra-file - contents = []byte("extra-file-contents") - h = &tar.Header{ - Name: "extra-file", - Mode: int64(0644), - Typeflag: tar.TypeReg, - Size: int64(len(contents)), - } - if err := tw.WriteHeader(h); err != nil { - t.Fatalf("failed to write header: %v", err) - } - if _, err := tw.Write(contents); err != nil { - t.Fatalf("failed to write file: %v", err) - } - - return buf -} - -func makeInvalidTar(t *testing.T) *bytes.Buffer { - // contains a single file that traverses out - // ../some-file - - t.Helper() - buf := &bytes.Buffer{} - zw := zstd.NewWriter(buf) - defer func() { - if err := zw.Close(); err != nil { - t.Fatalf("failed to close gzip: %v", err) - } - }() - tw := tar.NewWriter(zw) - defer func() { - if err := tw.Close(); err != nil { - t.Fatalf("failed to close tar: %v", err) - } - }() - - // my-pkg/some-file - contents := []byte("some-file-contents") - h := &tar.Header{ - Name: "../some-file", - Mode: int64(0644), - Typeflag: tar.TypeReg, - Size: int64(len(contents)), - } - if err := tw.WriteHeader(h); err != nil { - t.Fatalf("failed to write header: %v", err) - } - if _, err := tw.Write(contents); err != nil { - t.Fatalf("failed to write file: %v", err) - } - return buf -} - -func TestRestoreTar(t *testing.T) { - root := fs.AbsoluteSystemPathFromUpstream(t.TempDir()) - - tar := makeValidTar(t) - - expectedFiles := []turbopath.AnchoredSystemPath{ - turbopath.AnchoredUnixPath("extra-file").ToSystemPath(), - turbopath.AnchoredUnixPath("my-pkg").ToSystemPath(), - turbopath.AnchoredUnixPath("my-pkg/some-file").ToSystemPath(), - turbopath.AnchoredUnixPath("my-pkg/link-to-extra-file").ToSystemPath(), - turbopath.AnchoredUnixPath("my-pkg/broken-link").ToSystemPath(), - } - files, err := restoreTar(root, tar) - assert.NilError(t, err, "readTar") - - expectedSet := make(util.Set) - for _, file := range expectedFiles { - expectedSet.Add(file.ToString()) - } - gotSet := make(util.Set) - for _, file := range files { - gotSet.Add(file.ToString()) - } - extraFiles := gotSet.Difference(expectedSet) - if extraFiles.Len() > 0 { - t.Errorf("got extra files: %v", extraFiles.UnsafeListOfStrings()) - } - missingFiles := expectedSet.Difference(gotSet) - if missingFiles.Len() > 0 { - t.Errorf("missing expected files: %v", missingFiles.UnsafeListOfStrings()) - } - - // Verify file contents - extraFile := root.UntypedJoin("extra-file") - contents, err := extraFile.ReadFile() - assert.NilError(t, err, "ReadFile") - assert.DeepEqual(t, contents, []byte("extra-file-contents")) - - someFile := root.UntypedJoin("my-pkg", "some-file") - contents, err = someFile.ReadFile() - assert.NilError(t, err, "ReadFile") - assert.DeepEqual(t, contents, []byte("some-file-contents")) -} - -func TestRestoreInvalidTar(t *testing.T) { - root := fs.AbsoluteSystemPathFromUpstream(t.TempDir()) - expectedContents := []byte("important-data") - someFile := root.UntypedJoin("some-file") - err := someFile.WriteFile(expectedContents, 0644) - assert.NilError(t, err, "WriteFile") - - tar := makeInvalidTar(t) - // use a child directory so that blindly untarring will squash the file - // that we just wrote above. - repoRoot := root.UntypedJoin("repo") - _, err = restoreTar(repoRoot, tar) - if err == nil { - t.Error("expected error untarring invalid tar") - } - - contents, err := someFile.ReadFile() - assert.NilError(t, err, "ReadFile") - assert.Equal(t, string(contents), string(expectedContents), "expected to not overwrite file") -} - -func Test_httpCache_Put(t *testing.T) { - root := fs.AbsoluteSystemPathFromUpstream(t.TempDir()) - _ = root.Join("one").WriteFile(nil, 0644) - _ = root.Join("two").WriteFile(nil, 0644) - - clientErr := errors.New("PutArtifact") - client := &errorResp{err: clientErr, t: t} - - cache := newHTTPCache(Opts{}, client, nil, root) - - assert.ErrorIs( - t, - cache.Put(root, "000", 10, []turbopath.AnchoredSystemPath{"one", "two"}), - clientErr, - "Succeeds at writing, cache item is successfully passed through.", - ) - - assert.ErrorIs( - t, - cache.Put(root, "000", 10, []turbopath.AnchoredSystemPath{"one", "two", "missing"}), - os.ErrNotExist, - "Errors with missing file.", - ) - - assert.ErrorIs( - t, - cache.Put(root, "000", 10, []turbopath.AnchoredSystemPath{"missing", "one", "two"}), - os.ErrNotExist, - "Errors with missing file at first load.", - ) -} diff --git a/cli/internal/cache/cache_noop.go b/cli/internal/cache/cache_noop.go deleted file mode 100644 index 86b37677ec447..0000000000000 --- a/cli/internal/cache/cache_noop.go +++ /dev/null @@ -1,24 +0,0 @@ -package cache - -import "github.com/vercel/turbo/cli/internal/turbopath" - -type noopCache struct{} - -func newNoopCache() *noopCache { - return &noopCache{} -} - -func (c *noopCache) Put(_ turbopath.AbsoluteSystemPath, _ string, _ int, _ []turbopath.AnchoredSystemPath) error { - return nil -} -func (c *noopCache) Fetch(_ turbopath.AbsoluteSystemPath, _ string, _ []string) (ItemStatus, []turbopath.AnchoredSystemPath, error) { - return NewCacheMiss(), nil, nil -} - -func (c *noopCache) Exists(_ string) ItemStatus { - return NewCacheMiss() -} - -func (c *noopCache) Clean(_ turbopath.AbsoluteSystemPath) {} -func (c *noopCache) CleanAll() {} -func (c *noopCache) Shutdown() {} diff --git a/cli/internal/cache/cache_signature_authentication.go b/cli/internal/cache/cache_signature_authentication.go deleted file mode 100644 index 315197047fa94..0000000000000 --- a/cli/internal/cache/cache_signature_authentication.go +++ /dev/null @@ -1,86 +0,0 @@ -// Adapted from https://github.com/thought-machine/please -// Copyright Thought Machine, Inc. or its affiliates. All Rights Reserved. -// SPDX-License-Identifier: Apache-2.0 -package cache - -import ( - "crypto/hmac" - "crypto/sha256" - "encoding/base64" - "errors" - "fmt" - "hash" - "os" -) - -type ArtifactSignatureAuthentication struct { - teamID string - // Used for testing purposes - secretKeyOverride []byte - enabled bool -} - -func (asa *ArtifactSignatureAuthentication) isEnabled() bool { - return asa.enabled -} - -// If the secret key is not found or the secret key length is 0, an error is returned -// Preference is given to the environment specified secret key. -func (asa *ArtifactSignatureAuthentication) getSecretKey() ([]byte, error) { - var secret []byte - if asa.secretKeyOverride != nil { - secret = asa.secretKeyOverride - } else { - secret = []byte(os.Getenv("TURBO_REMOTE_CACHE_SIGNATURE_KEY")) - } - - if len(secret) == 0 { - return nil, errors.New("signature secret key not found. You must specify a secret key in the TURBO_REMOTE_CACHE_SIGNATURE_KEY environment variable") - } - return secret, nil -} - -func (asa *ArtifactSignatureAuthentication) generateTag(hash string, artifactBody []byte) (string, error) { - tag, err := asa.getTagGenerator(hash) - if err != nil { - return "", err - } - tag.Write(artifactBody) - return base64.StdEncoding.EncodeToString(tag.Sum(nil)), nil -} - -func (asa *ArtifactSignatureAuthentication) getTagGenerator(hash string) (hash.Hash, error) { - teamID := asa.teamID - secret, err := asa.getSecretKey() - if err != nil { - return nil, err - } - metadata := []byte(hash) - metadata = append(metadata, []byte(teamID)...) - - // TODO(Gaspar) Support additional signing algorithms here - h := hmac.New(sha256.New, secret) - h.Write(metadata) - return h, nil -} - -func (asa *ArtifactSignatureAuthentication) validate(hash string, artifactBody []byte, expectedTag string) (bool, error) { - computedTag, err := asa.generateTag(hash, artifactBody) - if err != nil { - return false, fmt.Errorf("failed to verify artifact tag: %w", err) - } - return hmac.Equal([]byte(computedTag), []byte(expectedTag)), nil -} - -type StreamValidator struct { - currentHash hash.Hash -} - -func (sv *StreamValidator) Validate(expectedTag string) bool { - computedTag := base64.StdEncoding.EncodeToString(sv.currentHash.Sum(nil)) - return hmac.Equal([]byte(computedTag), []byte(expectedTag)) -} - -func (sv *StreamValidator) CurrentValue() string { - return base64.StdEncoding.EncodeToString(sv.currentHash.Sum(nil)) -} diff --git a/cli/internal/cache/cache_signature_authentication_test.go b/cli/internal/cache/cache_signature_authentication_test.go deleted file mode 100644 index f2599154211c7..0000000000000 --- a/cli/internal/cache/cache_signature_authentication_test.go +++ /dev/null @@ -1,267 +0,0 @@ -// Adapted from https://github.com/thought-machine/please -// Copyright Thought Machine, Inc. or its affiliates. All Rights Reserved. -// SPDX-License-Identifier: Apache-2.0 -package cache - -import ( - "crypto/hmac" - "crypto/sha256" - "encoding/base64" - "encoding/hex" - "fmt" - "math/rand" - "testing" - - "github.com/stretchr/testify/assert" - "github.com/vercel/turbo/cli/internal/edgecases" - "github.com/vercel/turbo/cli/internal/ffi" - "github.com/vercel/turbo/cli/internal/xxhash" -) - -func Test_SecretKeySuccess(t *testing.T) { - teamID := "team_someid" - secretKeyEnvName := "TURBO_REMOTE_CACHE_SIGNATURE_KEY" - secretKeyEnvValue := "my-secret-key-env" - t.Setenv(secretKeyEnvName, secretKeyEnvValue) - - cases := []struct { - name string - asa *ArtifactSignatureAuthentication - expectedSecretKey string - expectedSecretKeyError bool - }{ - { - name: "Accepts secret key", - asa: &ArtifactSignatureAuthentication{ - teamID: teamID, - enabled: true, - }, - expectedSecretKey: secretKeyEnvValue, - expectedSecretKeyError: false, - }, - } - - for _, tc := range cases { - t.Run(tc.name, func(t *testing.T) { - secretKey, err := tc.asa.getSecretKey() - if tc.expectedSecretKeyError { - assert.Error(t, err) - } else { - assert.NoError(t, err) - assert.Equal(t, tc.expectedSecretKey, string(secretKey)) - } - }) - } -} -func Test_SecretKeyErrors(t *testing.T) { - teamID := "team_someid" - - // Env secret key TURBO_REMOTE_CACHE_SIGNATURE_KEY is not set - - cases := []struct { - name string - asa *ArtifactSignatureAuthentication - expectedSecretKey string - expectedSecretKeyError bool - }{ - { - name: "Secret key not defined errors", - asa: &ArtifactSignatureAuthentication{ - teamID: teamID, - enabled: true, - }, - expectedSecretKey: "", - expectedSecretKeyError: true, - }, - { - name: "Secret key is empty errors", - asa: &ArtifactSignatureAuthentication{ - teamID: teamID, - enabled: true, - }, - expectedSecretKey: "", - expectedSecretKeyError: true, - }, - } - - for _, tc := range cases { - t.Run(tc.name, func(t *testing.T) { - secretKey, err := tc.asa.getSecretKey() - if tc.expectedSecretKeyError { - assert.Error(t, err) - } else { - assert.NoError(t, err) - assert.Equal(t, tc.expectedSecretKey, string(secretKey)) - } - }) - } -} - -var MinimumLength = 10 - -func generateRandomBytes() []byte { - length := MinimumLength + rand.Intn(250) - b := make([]byte, length) - rand.Read(b) - return b -} - -func generateRandomHash() (string, error) { - bytes := generateRandomBytes() - hash := xxhash.New() - - _, err := hash.Write(bytes) - - return hex.EncodeToString(hash.Sum(nil)), err -} - -func getRandomEdgecase() string { - return edgecases.Strings[rand.Intn(len(edgecases.Strings))] -} - -func Test_EdgecaseStrings(t *testing.T) { - TestCases := 1000 - for i := 0; i < TestCases; i++ { - teamID := getRandomEdgecase() - hash := getRandomEdgecase() - artifactBody := getRandomEdgecase() - secretKey := getRandomEdgecase() - asa := &ArtifactSignatureAuthentication{ - teamID: teamID, - secretKeyOverride: []byte(secretKey), - } - - tag, err := asa.generateTag(hash, []byte(artifactBody)) - assert.NoError(t, err) - - isValid, err := ffi.VerifySignature([]byte(teamID), hash, []byte(artifactBody), tag, []byte(secretKey)) - assert.NoError(t, err) - assert.True(t, isValid) - } -} - -func Test_RandomlyGenerateCases(t *testing.T) { - TestCases := 1000 - - for i := 0; i < TestCases; i++ { - t.Run(fmt.Sprintf("Case %v", i), func(t *testing.T) { - teamID := generateRandomBytes() - hash, err := generateRandomHash() - assert.NoError(t, err) - artifactBody := generateRandomBytes() - secretKey := generateRandomBytes() - - asa := &ArtifactSignatureAuthentication{ - teamID: string(teamID), - secretKeyOverride: secretKey, - } - - tag, err := asa.generateTag(hash, artifactBody) - assert.NoError(t, err) - - isValid, err := ffi.VerifySignature(teamID, hash, artifactBody, tag, secretKey) - assert.NoError(t, err) - assert.True(t, isValid) - }) - } -} - -func Test_GenerateTagAndValidate(t *testing.T) { - teamID := "team_someid" - hash := "the-artifact-hash" - artifactBody := []byte("the artifact body as bytes") - secretKeyEnvName := "TURBO_REMOTE_CACHE_SIGNATURE_KEY" - secretKeyEnvValue := "my-secret-key-env" - t.Setenv(secretKeyEnvName, secretKeyEnvValue) - - cases := []struct { - name string - asa *ArtifactSignatureAuthentication - expectedTagMatches string - expectedTagDoesNotMatch string - }{ - { - name: "Uses hash to generate tag", - asa: &ArtifactSignatureAuthentication{ - teamID: teamID, - enabled: true, - }, - expectedTagMatches: testUtilGetHMACTag(hash, teamID, artifactBody, secretKeyEnvValue), - expectedTagDoesNotMatch: testUtilGetHMACTag("wrong-hash", teamID, artifactBody, secretKeyEnvValue), - }, - { - name: "Uses teamID to generate tag", - asa: &ArtifactSignatureAuthentication{ - teamID: teamID, - enabled: true, - }, - expectedTagMatches: testUtilGetHMACTag(hash, teamID, artifactBody, secretKeyEnvValue), - expectedTagDoesNotMatch: testUtilGetHMACTag(hash, "wrong-teamID", artifactBody, secretKeyEnvValue), - }, - { - name: "Uses artifactBody to generate tag", - asa: &ArtifactSignatureAuthentication{ - teamID: teamID, - enabled: true, - }, - expectedTagMatches: testUtilGetHMACTag(hash, teamID, artifactBody, secretKeyEnvValue), - expectedTagDoesNotMatch: testUtilGetHMACTag(hash, teamID, []byte("wrong-artifact-body"), secretKeyEnvValue), - }, - { - name: "Uses secret to generate tag", - asa: &ArtifactSignatureAuthentication{ - teamID: teamID, - enabled: true, - }, - expectedTagMatches: testUtilGetHMACTag(hash, teamID, artifactBody, secretKeyEnvValue), - expectedTagDoesNotMatch: testUtilGetHMACTag(hash, teamID, artifactBody, "wrong-secret"), - }, - } - - for _, tc := range cases { - t.Run(tc.name, func(t *testing.T) { - tag, err := tc.asa.generateTag(hash, artifactBody) - assert.NoError(t, err) - - // validates the tag - assert.Equal(t, tc.expectedTagMatches, tag) - isValid, err := tc.asa.validate(hash, artifactBody, tc.expectedTagMatches) - assert.NoError(t, err) - assert.True(t, isValid) - - isValid, err = ffi.VerifySignature([]byte(teamID), hash, artifactBody, tag, nil) - assert.NoError(t, err) - assert.True(t, isValid) - - // does not validate the tag - assert.NotEqual(t, tc.expectedTagDoesNotMatch, tag) - isValid, err = tc.asa.validate(hash, artifactBody, tc.expectedTagDoesNotMatch) - assert.NoError(t, err) - assert.False(t, isValid) - - }) - } -} - -// Test utils - -// Return the Base64 encoded HMAC given the artifact metadata and artifact body -func testUtilGetHMACTag(hash string, teamID string, artifactBody []byte, secret string) string { - metadata := []byte(hash) - metadata = append(metadata, []byte(teamID)...) - h := hmac.New(sha256.New, []byte(secret)) - h.Write(metadata) - h.Write(artifactBody) - return base64.StdEncoding.EncodeToString(h.Sum(nil)) -} - -func Test_Utils(t *testing.T) { - teamID := "team_someid" - secret := "my-secret" - hash := "the-artifact-hash" - artifactBody := []byte("the artifact body as bytes") - testTag := testUtilGetHMACTag(hash, teamID, artifactBody, secret) - fmt.Println(testTag) - expectedTag := "mh3PI05JSXRfAy3hL0Dz3Gjq0UhZYKalu1HwmLNvYjs=" - assert.True(t, hmac.Equal([]byte(testTag), []byte(expectedTag))) -} diff --git a/cli/internal/cache/cache_test.go b/cli/internal/cache/cache_test.go deleted file mode 100644 index 44b1bffa2793f..0000000000000 --- a/cli/internal/cache/cache_test.go +++ /dev/null @@ -1,314 +0,0 @@ -package cache - -import ( - "net/http" - "reflect" - "sync/atomic" - "testing" - - "github.com/vercel/turbo/cli/internal/analytics" - "github.com/vercel/turbo/cli/internal/fs" - "github.com/vercel/turbo/cli/internal/turbopath" - "github.com/vercel/turbo/cli/internal/util" -) - -type testCache struct { - disabledErr *util.CacheDisabledError - entries map[string][]turbopath.AnchoredSystemPath -} - -func (tc *testCache) Fetch(_ turbopath.AbsoluteSystemPath, hash string, _ []string) (ItemStatus, []turbopath.AnchoredSystemPath, error) { - if tc.disabledErr != nil { - return ItemStatus{}, nil, tc.disabledErr - } - foundFiles, ok := tc.entries[hash] - if ok { - duration := 5 - return newFSTaskCacheStatus(true, duration), foundFiles, nil - } - return NewCacheMiss(), nil, nil -} - -func (tc *testCache) Exists(hash string) ItemStatus { - if tc.disabledErr != nil { - return ItemStatus{} - } - _, ok := tc.entries[hash] - if ok { - return newFSTaskCacheStatus(true, 0) - } - return ItemStatus{} -} - -func (tc *testCache) Put(_ turbopath.AbsoluteSystemPath, hash string, _ int, files []turbopath.AnchoredSystemPath) error { - if tc.disabledErr != nil { - return tc.disabledErr - } - tc.entries[hash] = files - return nil -} - -func (tc *testCache) Clean(_ turbopath.AbsoluteSystemPath) {} -func (tc *testCache) CleanAll() {} -func (tc *testCache) Shutdown() {} - -func newEnabledCache() *testCache { - return &testCache{ - entries: make(map[string][]turbopath.AnchoredSystemPath), - } -} - -func newDisabledCache() *testCache { - return &testCache{ - disabledErr: &util.CacheDisabledError{ - Status: util.CachingStatusDisabled, - Message: "remote caching is disabled", - }, - } -} - -func TestPutCachingDisabled(t *testing.T) { - disabledCache := newDisabledCache() - caches := []Cache{ - newEnabledCache(), - disabledCache, - newEnabledCache(), - newEnabledCache(), - } - var removeCalled uint64 - mplex := &cacheMultiplexer{ - caches: caches, - onCacheRemoved: func(cache Cache, err error) { - atomic.AddUint64(&removeCalled, 1) - }, - } - - err := mplex.Put("unused-target", "some-hash", 5, []turbopath.AnchoredSystemPath{"a-file"}) - if err != nil { - // don't leak the cache removal - t.Errorf("Put got error %v, want ", err) - } - - removes := atomic.LoadUint64(&removeCalled) - if removes != 1 { - t.Errorf("removes count: %v, want 1", removes) - } - - mplex.mu.RLock() - if len(mplex.caches) != 3 { - t.Errorf("found %v caches, expected to have 3 after one was removed", len(mplex.caches)) - } - for _, cache := range mplex.caches { - if cache == disabledCache { - t.Error("found disabled cache, expected it to be removed") - } - } - mplex.mu.RUnlock() - - // subsequent Fetch should still work - cacheStatus, _, err := mplex.Fetch("unused-target", "some-hash", []string{"unused", "files"}) - if err != nil { - t.Errorf("got error fetching files: %v", err) - } - hit := cacheStatus.Hit - if !hit { - t.Error("failed to find previously stored files") - } - - removes = atomic.LoadUint64(&removeCalled) - if removes != 1 { - t.Errorf("removes count: %v, want 1", removes) - } -} - -func TestExists(t *testing.T) { - caches := []Cache{ - newEnabledCache(), - } - - mplex := &cacheMultiplexer{ - caches: caches, - } - - itemStatus := mplex.Exists("some-hash") - if itemStatus.Hit { - t.Error("did not expect file to exist") - } - - err := mplex.Put("unused-target", "some-hash", 5, []turbopath.AnchoredSystemPath{"a-file"}) - if err != nil { - // don't leak the cache removal - t.Errorf("Put got error %v, want ", err) - } - - itemStatus = mplex.Exists("some-hash") - if !itemStatus.Hit { - t.Error("failed to find previously stored files") - } -} - -type fakeClient struct{} - -// FetchArtifact implements client -func (*fakeClient) FetchArtifact(hash string) (*http.Response, error) { - panic("unimplemented") -} - -func (*fakeClient) ArtifactExists(hash string) (*http.Response, error) { - panic("unimplemented") -} - -// GetTeamID implements client -func (*fakeClient) GetTeamID() string { - return "fake-team-id" -} - -// PutArtifact implements client -func (*fakeClient) PutArtifact(hash string, body []byte, duration int, tag string) error { - panic("unimplemented") -} - -var _ client = &fakeClient{} - -func TestFetchCachingDisabled(t *testing.T) { - disabledCache := newDisabledCache() - caches := []Cache{ - newEnabledCache(), - disabledCache, - newEnabledCache(), - newEnabledCache(), - } - var removeCalled uint64 - mplex := &cacheMultiplexer{ - caches: caches, - onCacheRemoved: func(cache Cache, err error) { - atomic.AddUint64(&removeCalled, 1) - }, - } - - cacheStatus, _, err := mplex.Fetch("unused-target", "some-hash", []string{"unused", "files"}) - if err != nil { - // don't leak the cache removal - t.Errorf("Fetch got error %v, want ", err) - } - hit := cacheStatus.Hit - if hit { - t.Error("hit on empty cache, expected miss") - } - - removes := atomic.LoadUint64(&removeCalled) - if removes != 1 { - t.Errorf("removes count: %v, want 1", removes) - } - - mplex.mu.RLock() - if len(mplex.caches) != 3 { - t.Errorf("found %v caches, expected to have 3 after one was removed", len(mplex.caches)) - } - for _, cache := range mplex.caches { - if cache == disabledCache { - t.Error("found disabled cache, expected it to be removed") - } - } - mplex.mu.RUnlock() -} - -type nullRecorder struct{} - -func (nullRecorder) LogEvent(analytics.EventPayload) {} - -func TestNew(t *testing.T) { - // Test will bomb if this fails, no need to specially handle the error - repoRoot := fs.AbsoluteSystemPathFromUpstream(t.TempDir()) - type args struct { - opts Opts - recorder analytics.Recorder - onCacheRemoved OnCacheRemoved - client fakeClient - } - tests := []struct { - name string - args args - want Cache - wantErr bool - }{ - { - name: "With no caches configured, new returns a noopCache and an error", - args: args{ - opts: Opts{ - SkipFilesystem: true, - SkipRemote: true, - }, - recorder: &nullRecorder{}, - onCacheRemoved: func(Cache, error) {}, - }, - want: &noopCache{}, - wantErr: true, - }, - { - name: "With just httpCache configured, new returns an httpCache and a noopCache", - args: args{ - opts: Opts{ - SkipFilesystem: true, - Signature: true, - }, - recorder: &nullRecorder{}, - onCacheRemoved: func(Cache, error) {}, - }, - want: &cacheMultiplexer{ - caches: []Cache{&httpCache{}, &noopCache{}}, - }, - wantErr: false, - }, - { - name: "With just fsCache configured, new returns only an fsCache", - args: args{ - opts: Opts{ - SkipRemote: true, - }, - recorder: &nullRecorder{}, - onCacheRemoved: func(Cache, error) {}, - }, - want: &fsCache{}, - }, - { - name: "With both configured, new returns an fsCache and httpCache", - args: args{ - opts: Opts{ - Signature: true, - }, - recorder: &nullRecorder{}, - onCacheRemoved: func(Cache, error) {}, - }, - want: &cacheMultiplexer{ - caches: []Cache{&fsCache{}, &httpCache{}}, - }, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - got, err := New(tt.args.opts, repoRoot, &tt.args.client, tt.args.recorder, tt.args.onCacheRemoved) - if (err != nil) != tt.wantErr { - t.Errorf("New() error = %v, wantErr %v", err, tt.wantErr) - return - } - switch multiplexer := got.(type) { - case *cacheMultiplexer: - want := tt.want.(*cacheMultiplexer) - for i := range multiplexer.caches { - if reflect.TypeOf(multiplexer.caches[i]) != reflect.TypeOf(want.caches[i]) { - t.Errorf("New() = %v, want %v", reflect.TypeOf(multiplexer.caches[i]), reflect.TypeOf(want.caches[i])) - } - } - case *fsCache: - if reflect.TypeOf(got) != reflect.TypeOf(tt.want) { - t.Errorf("New() = %v, want %v", reflect.TypeOf(got), reflect.TypeOf(tt.want)) - } - case *noopCache: - if reflect.TypeOf(got) != reflect.TypeOf(tt.want) { - t.Errorf("New() = %v, want %v", reflect.TypeOf(got), reflect.TypeOf(tt.want)) - } - } - }) - } -} diff --git a/cli/internal/cacheitem/cacheitem.go b/cli/internal/cacheitem/cacheitem.go deleted file mode 100644 index 839171ee904a7..0000000000000 --- a/cli/internal/cacheitem/cacheitem.go +++ /dev/null @@ -1,86 +0,0 @@ -// Package cacheitem is an abstraction over the creation and restoration of a cache -package cacheitem - -import ( - "archive/tar" - "bufio" - "crypto/sha512" - "errors" - "io" - - "github.com/vercel/turbo/cli/internal/turbopath" -) - -var ( - errMissingSymlinkTarget = errors.New("symlink restoration is delayed") - errCycleDetected = errors.New("links in the cache are cyclic") - errTraversal = errors.New("tar attempts to write outside of directory") - errNameMalformed = errors.New("file name is malformed") - errNameWindowsUnsafe = errors.New("file name is not Windows-safe") - errUnsupportedFileType = errors.New("attempted to restore unsupported file type") -) - -// CacheItem is a `tar` utility with a little bit extra. -type CacheItem struct { - // Path is the location on disk for the CacheItem. - Path turbopath.AbsoluteSystemPath - // Anchor is the position on disk at which the CacheItem will be restored. - Anchor turbopath.AbsoluteSystemPath - - // For creation. - tw *tar.Writer - zw io.WriteCloser - fileBuffer *bufio.Writer - handle interface{} - compressed bool -} - -// Close any open pipes -func (ci *CacheItem) Close() error { - if ci.tw != nil { - if err := ci.tw.Close(); err != nil { - return err - } - } - - if ci.zw != nil { - if err := ci.zw.Close(); err != nil { - return err - } - } - - if ci.fileBuffer != nil { - if err := ci.fileBuffer.Flush(); err != nil { - return err - } - } - - if ci.handle != nil { - closer, isCloser := ci.handle.(io.Closer) - - if isCloser { - if err := closer.Close(); err != nil { - return err - } - } - - } - - return nil -} - -// GetSha returns the SHA-512 hash for the CacheItem. -func (ci *CacheItem) GetSha() ([]byte, error) { - sha := sha512.New() - - reader, isReader := ci.handle.(io.Reader) - if !isReader { - panic("can't read from this cache item") - } - - if _, err := io.Copy(sha, reader); err != nil { - return nil, err - } - - return sha.Sum(nil), nil -} diff --git a/cli/internal/cacheitem/create.go b/cli/internal/cacheitem/create.go deleted file mode 100644 index a4ba158e05a1c..0000000000000 --- a/cli/internal/cacheitem/create.go +++ /dev/null @@ -1,135 +0,0 @@ -package cacheitem - -import ( - "archive/tar" - "bufio" - "io" - "os" - "strings" - "time" - - "github.com/DataDog/zstd" - - "github.com/moby/sys/sequential" - "github.com/vercel/turbo/cli/internal/tarpatch" - "github.com/vercel/turbo/cli/internal/turbopath" -) - -// Create makes a new CacheItem at the specified path. -func Create(path turbopath.AbsoluteSystemPath) (*CacheItem, error) { - handle, err := path.OpenFile(os.O_WRONLY|os.O_CREATE|os.O_TRUNC|os.O_APPEND, 0644) - if err != nil { - return nil, err - } - - cacheItem := &CacheItem{ - Path: path, - handle: handle, - compressed: strings.HasSuffix(path.ToString(), ".zst"), - } - - cacheItem.init() - return cacheItem, nil -} - -// CreateWriter makes a new CacheItem using the specified writer. -func CreateWriter(writer io.WriteCloser) *CacheItem { - cacheItem := &CacheItem{ - handle: writer, - compressed: true, - } - - cacheItem.init() - return cacheItem -} - -// init prepares the CacheItem for writing. -// Wires all the writers end-to-end: -// tar.Writer -> zstd.Writer -> fileBuffer -> file -func (ci *CacheItem) init() { - writer, isWriter := ci.handle.(io.Writer) - if !isWriter { - panic("can't write to this cache item") - } - - fileBuffer := bufio.NewWriterSize(writer, 2^20) // Flush to disk in 1mb chunks. - - var tw *tar.Writer - if ci.compressed { - zw := zstd.NewWriter(fileBuffer) - tw = tar.NewWriter(zw) - ci.zw = zw - } else { - tw = tar.NewWriter(fileBuffer) - } - - ci.tw = tw - ci.fileBuffer = fileBuffer -} - -// AddFile adds a user-cached item to the tar. -func (ci *CacheItem) AddFile(fsAnchor turbopath.AbsoluteSystemPath, filePath turbopath.AnchoredSystemPath) error { - // Calculate the fully-qualified path to the file to read it. - sourcePath := filePath.RestoreAnchor(fsAnchor) - - // We grab the FileInfo which tar.FileInfoHeader accepts. - fileInfo, lstatErr := sourcePath.Lstat() - if lstatErr != nil { - return lstatErr - } - - // Determine if we need to populate the additional link argument to tar.FileInfoHeader. - var link string - if fileInfo.Mode()&os.ModeSymlink != 0 { - linkTarget, readlinkErr := sourcePath.Readlink() - if readlinkErr != nil { - return readlinkErr - } - link = linkTarget - } - - // Normalize the path within the cache. - cacheDestinationName := filePath.ToUnixPath() - - // Generate the the header. - // We do not use header generation from stdlib because it can throw an error. - header, headerErr := tarpatch.FileInfoHeader(cacheDestinationName, fileInfo, link) - if headerErr != nil { - return headerErr - } - - // Throw an error if trying to create a cache that contains a type we don't support. - if (header.Typeflag != tar.TypeReg) && (header.Typeflag != tar.TypeDir) && (header.Typeflag != tar.TypeSymlink) { - return errUnsupportedFileType - } - - // Consistent creation. - header.Uid = 0 - header.Gid = 0 - header.AccessTime = time.Unix(0, 0) - header.ModTime = time.Unix(0, 0) - header.ChangeTime = time.Unix(0, 0) - - // Always write the header. - if err := ci.tw.WriteHeader(header); err != nil { - return err - } - - // If there is a body to be written, do so. - if header.Typeflag == tar.TypeReg && header.Size > 0 { - // Windows has a distinct "sequential read" opening mode. - // We use a library that will switch to this mode for Windows. - sourceFile, sourceErr := sequential.OpenFile(sourcePath.ToString(), os.O_RDONLY, 0777) - if sourceErr != nil { - return sourceErr - } - - if _, err := io.Copy(ci.tw, sourceFile); err != nil { - return err - } - - return sourceFile.Close() - } - - return nil -} diff --git a/cli/internal/cacheitem/create_test.go b/cli/internal/cacheitem/create_test.go deleted file mode 100644 index 97eeb01950a56..0000000000000 --- a/cli/internal/cacheitem/create_test.go +++ /dev/null @@ -1,205 +0,0 @@ -package cacheitem - -import ( - "encoding/hex" - "io/fs" - "os" - "runtime" - "testing" - - "github.com/vercel/turbo/cli/internal/turbopath" - "gotest.tools/v3/assert" -) - -type createFileDefinition struct { - Path turbopath.AnchoredSystemPath - Linkname string - fs.FileMode -} - -func createEntry(t *testing.T, anchor turbopath.AbsoluteSystemPath, fileDefinition createFileDefinition) error { - t.Helper() - if fileDefinition.FileMode.IsDir() { - return createDir(t, anchor, fileDefinition) - } else if fileDefinition.FileMode&os.ModeSymlink != 0 { - return createSymlink(t, anchor, fileDefinition) - } else if fileDefinition.FileMode&os.ModeNamedPipe != 0 { - return createFifo(t, anchor, fileDefinition) - } else { - return createFile(t, anchor, fileDefinition) - } -} - -func createDir(t *testing.T, anchor turbopath.AbsoluteSystemPath, fileDefinition createFileDefinition) error { - t.Helper() - path := fileDefinition.Path.RestoreAnchor(anchor) - mkdirAllErr := path.MkdirAllMode(fileDefinition.FileMode & 0777) - assert.NilError(t, mkdirAllErr, "MkdirAll") - return mkdirAllErr -} -func createFile(t *testing.T, anchor turbopath.AbsoluteSystemPath, fileDefinition createFileDefinition) error { - t.Helper() - path := fileDefinition.Path.RestoreAnchor(anchor) - writeErr := path.WriteFile([]byte("file contents"), fileDefinition.FileMode&0777) - assert.NilError(t, writeErr, "WriteFile") - return writeErr -} -func createSymlink(t *testing.T, anchor turbopath.AbsoluteSystemPath, fileDefinition createFileDefinition) error { - t.Helper() - path := fileDefinition.Path.RestoreAnchor(anchor) - symlinkErr := path.Symlink(fileDefinition.Linkname) - assert.NilError(t, symlinkErr, "Symlink") - lchmodErr := path.Lchmod(fileDefinition.FileMode & 0777) - assert.NilError(t, lchmodErr, "Lchmod") - return symlinkErr -} - -func TestCreate(t *testing.T) { - tests := []struct { - name string - files []createFileDefinition - wantDarwin string - wantUnix string - wantWindows string - wantErr error - }{ - { - name: "hello world", - files: []createFileDefinition{ - { - Path: turbopath.AnchoredSystemPath("hello world.txt"), - FileMode: 0 | 0644, - }, - }, - wantDarwin: "4f39f1cab23906f3b89f313392ef7c26f2586e1c15fa6b577cce640c4781d082817927b4875a5413bc23e1248f0b198218998d70e7336e8b1244542ba446ca07", - wantUnix: "4f39f1cab23906f3b89f313392ef7c26f2586e1c15fa6b577cce640c4781d082817927b4875a5413bc23e1248f0b198218998d70e7336e8b1244542ba446ca07", - wantWindows: "e304d1ba8c51209f97bd11dabf27ca06996b70a850db592343942c49480de47bcbb4b7131fb3dd4d7564021d3bc0e648919e4876572b46ac1da97fca92b009c5", - }, - { - name: "links", - files: []createFileDefinition{ - { - Path: turbopath.AnchoredSystemPath("one"), - Linkname: "two", - FileMode: 0 | os.ModeSymlink | 0777, - }, - { - Path: turbopath.AnchoredSystemPath("two"), - Linkname: "three", - FileMode: 0 | os.ModeSymlink | 0777, - }, - { - Path: turbopath.AnchoredSystemPath("three"), - Linkname: "real", - FileMode: 0 | os.ModeSymlink | 0777, - }, - { - Path: turbopath.AnchoredSystemPath("real"), - FileMode: 0 | 0644, - }, - }, - wantDarwin: "07278fdf37db4b212352367f391377bd6bac8f361dd834ae5522d809539bcf3b34d046873c1b45876d7372251446bb12c32f9fa9824914c4a1a01f6d7a206702", - wantUnix: "07278fdf37db4b212352367f391377bd6bac8f361dd834ae5522d809539bcf3b34d046873c1b45876d7372251446bb12c32f9fa9824914c4a1a01f6d7a206702", - wantWindows: "d4dac527e40860ee1ba3fdf2b9b12a1eba385050cf4f5877558dd531f0ecf2a06952fd5f88b852ad99e010943ed7b7f1437b727796369524e85f0c06f25d62c9", - }, - { - name: "subdirectory", - files: []createFileDefinition{ - { - Path: turbopath.AnchoredSystemPath("parent"), - FileMode: 0 | os.ModeDir | 0755, - }, - { - Path: turbopath.AnchoredSystemPath("parent/child"), - FileMode: 0 | 0644, - }, - }, - wantDarwin: "b513eea231daa84245d1d23d99fc398ccf17166ca49754ffbdcc1a3269cd75b7ad176a9c7095ff2481f71dca9fc350189747035f13d53b3a864e4fe35165233f", - wantUnix: "b513eea231daa84245d1d23d99fc398ccf17166ca49754ffbdcc1a3269cd75b7ad176a9c7095ff2481f71dca9fc350189747035f13d53b3a864e4fe35165233f", - wantWindows: "a8c3cba54e4dc214d3b21c3fa284d4032fe317d2f88943159efd5d16f3551ab53fae5c92ebf8acdd1bdb85d1238510b7938772cb11a0daa1b72b5e0f2700b5c7", - }, - { - name: "symlink permissions", - files: []createFileDefinition{ - { - Path: turbopath.AnchoredSystemPath("one"), - Linkname: "two", - FileMode: 0 | os.ModeSymlink | 0644, - }, - }, - wantDarwin: "3ea9d8a4581a0c2ba77557c72447b240c5ac622edcdac570a0bf597c276c2917b4ea73e6c373bbac593a480e396845651fa4b51e049531ff5d44c0adb807c2d9", - wantUnix: "99d953cbe1c0d8545e6f8382208fcefe14bcbefe39872f7b6310da14ac195b9a1b04b6d7b4b56f01a27216176193344a92488f99e124fcd68693f313f7137a1c", - wantWindows: "a4b1dc5c296f8ac4c9124727c1d84d70f72872c7bb4ced6d83ee312889e822baf1eaa72f88e624fb1aac4339d0a1f766ede77eabd2e4524eb26e89f883dc479d", - }, - { - name: "unsupported types error", - files: []createFileDefinition{ - { - Path: turbopath.AnchoredSystemPath("fifo"), - FileMode: 0 | os.ModeNamedPipe | 0644, - }, - }, - wantErr: errUnsupportedFileType, - }, - } - for _, tt := range tests { - getTestFunc := func(compressed bool) func(t *testing.T) { - return func(t *testing.T) { - inputDir := turbopath.AbsoluteSystemPath(t.TempDir()) - archiveDir := turbopath.AbsoluteSystemPath(t.TempDir()) - var archivePath turbopath.AbsoluteSystemPath - if compressed { - archivePath = turbopath.AnchoredSystemPath("out.tar.zst").RestoreAnchor(archiveDir) - } else { - archivePath = turbopath.AnchoredSystemPath("out.tar").RestoreAnchor(archiveDir) - } - - cacheItem, cacheCreateErr := Create(archivePath) - assert.NilError(t, cacheCreateErr, "Cache Create") - - for _, file := range tt.files { - createErr := createEntry(t, inputDir, file) - if createErr != nil { - assert.ErrorIs(t, createErr, tt.wantErr) - assert.NilError(t, cacheItem.Close(), "Close") - return - } - - addFileError := cacheItem.AddFile(inputDir, file.Path) - if addFileError != nil { - assert.ErrorIs(t, addFileError, tt.wantErr) - assert.NilError(t, cacheItem.Close(), "Close") - return - } - } - - assert.NilError(t, cacheItem.Close(), "Cache Close") - - // We only check for repeatability on compressed caches. - if compressed { - openedCacheItem, openedCacheItemErr := Open(archivePath) - assert.NilError(t, openedCacheItemErr, "Cache Open") - - // We actually only need to compare the generated SHA. - // That ensures we got the same output. (Effectively snapshots.) - // This must be called after `Close` because both `tar` and `gzip` have footers. - shaOne, shaOneErr := openedCacheItem.GetSha() - assert.NilError(t, shaOneErr, "GetSha") - snapshot := hex.EncodeToString(shaOne) - - switch runtime.GOOS { - case "darwin": - assert.Equal(t, snapshot, tt.wantDarwin, "Got expected hash.") - case "windows": - assert.Equal(t, snapshot, tt.wantWindows, "Got expected hash.") - default: - assert.Equal(t, snapshot, tt.wantUnix, "Got expected hash.") - } - assert.NilError(t, openedCacheItem.Close(), "Close") - } - } - } - t.Run(tt.name, getTestFunc(false)) - t.Run(tt.name+"zst", getTestFunc(true)) - } -} diff --git a/cli/internal/cacheitem/create_unix_test.go b/cli/internal/cacheitem/create_unix_test.go deleted file mode 100644 index 812d1eb45f5f3..0000000000000 --- a/cli/internal/cacheitem/create_unix_test.go +++ /dev/null @@ -1,20 +0,0 @@ -//go:build darwin || linux -// +build darwin linux - -package cacheitem - -import ( - "syscall" - "testing" - - "github.com/vercel/turbo/cli/internal/turbopath" - "gotest.tools/v3/assert" -) - -func createFifo(t *testing.T, anchor turbopath.AbsoluteSystemPath, fileDefinition createFileDefinition) error { - t.Helper() - path := fileDefinition.Path.RestoreAnchor(anchor) - fifoErr := syscall.Mknod(path.ToString(), syscall.S_IFIFO|0666, 0) - assert.NilError(t, fifoErr, "FIFO") - return fifoErr -} diff --git a/cli/internal/cacheitem/create_windows_test.go b/cli/internal/cacheitem/create_windows_test.go deleted file mode 100644 index 2cbb8b948a4f4..0000000000000 --- a/cli/internal/cacheitem/create_windows_test.go +++ /dev/null @@ -1,14 +0,0 @@ -//go:build windows -// +build windows - -package cacheitem - -import ( - "testing" - - "github.com/vercel/turbo/cli/internal/turbopath" -) - -func createFifo(t *testing.T, anchor turbopath.AbsoluteSystemPath, fileDefinition createFileDefinition) error { - return errUnsupportedFileType -} diff --git a/cli/internal/cacheitem/filepath.go b/cli/internal/cacheitem/filepath.go deleted file mode 100644 index 4fd1681d0ec2d..0000000000000 --- a/cli/internal/cacheitem/filepath.go +++ /dev/null @@ -1,162 +0,0 @@ -// Copyright 2009 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package cacheitem - -import "os" - -const _separator = os.PathSeparator - -// A lazybuf is a lazily constructed path buffer. -// It supports append, reading previously appended bytes, -// and retrieving the final string. It does not allocate a buffer -// to hold the output until that output diverges from s. -type lazybuf struct { - path string - buf []byte - w int - volAndPath string - volLen int -} - -func (b *lazybuf) index(i int) byte { - if b.buf != nil { - return b.buf[i] - } - return b.path[i] -} - -func (b *lazybuf) append(c byte) { - if b.buf == nil { - if b.w < len(b.path) && b.path[b.w] == c { - b.w++ - return - } - b.buf = make([]byte, len(b.path)) - copy(b.buf, b.path[:b.w]) - } - b.buf[b.w] = c - b.w++ -} - -func (b *lazybuf) string() string { - if b.buf == nil { - return b.volAndPath[:b.volLen+b.w] - } - return b.volAndPath[:b.volLen] + string(b.buf[:b.w]) -} - -// Clean is extracted from stdlib and removes `FromSlash` processing -// of the stdlib version. -// -// Clean returns the shortest path name equivalent to path -// by purely lexical processing. It applies the following rules -// iteratively until no further processing can be done: -// -// 1. Replace multiple Separator elements with a single one. -// 2. Eliminate each . path name element (the current directory). -// 3. Eliminate each inner .. path name element (the parent directory) -// along with the non-.. element that precedes it. -// 4. Eliminate .. elements that begin a rooted path: -// that is, replace "/.." by "/" at the beginning of a path, -// assuming Separator is '/'. -// -// The returned path ends in a slash only if it represents a root directory, -// such as "/" on Unix or `C:\` on Windows. -// -// Finally, any occurrences of slash are replaced by Separator. -// -// If the result of this process is an empty string, Clean -// returns the string ".". -// -// See also Rob Pike, “Lexical File Names in Plan 9 or -// Getting Dot-Dot Right,” -// https://9p.io/sys/doc/lexnames.html -func Clean(path string) string { - originalPath := path - volLen := volumeNameLen(path) - path = path[volLen:] - if path == "" { - if volLen > 1 && originalPath[1] != ':' { - // should be UNC - // ORIGINAL: return FromSlash(originalPath) - return originalPath - } - return originalPath + "." - } - rooted := os.IsPathSeparator(path[0]) - - // Invariants: - // reading from path; r is index of next byte to process. - // writing to buf; w is index of next byte to write. - // dotdot is index in buf where .. must stop, either because - // it is the leading slash or it is a leading ../../.. prefix. - n := len(path) - out := lazybuf{path: path, volAndPath: originalPath, volLen: volLen} - r, dotdot := 0, 0 - if rooted { - out.append(_separator) - r, dotdot = 1, 1 - } - - for r < n { - switch { - case os.IsPathSeparator(path[r]): - // empty path element - r++ - case path[r] == '.' && r+1 == n: - // . element - r++ - case path[r] == '.' && os.IsPathSeparator(path[r+1]): - // ./ element - r++ - - for r < len(path) && os.IsPathSeparator(path[r]) { - r++ - } - if out.w == 0 && volumeNameLen(path[r:]) > 0 { - // When joining prefix "." and an absolute path on Windows, - // the prefix should not be removed. - out.append('.') - } - case path[r] == '.' && path[r+1] == '.' && (r+2 == n || os.IsPathSeparator(path[r+2])): - // .. element: remove to last separator - r += 2 - switch { - case out.w > dotdot: - // can backtrack - out.w-- - for out.w > dotdot && !os.IsPathSeparator(out.index(out.w)) { - out.w-- - } - case !rooted: - // cannot backtrack, but not rooted, so append .. element. - if out.w > 0 { - out.append(_separator) - } - out.append('.') - out.append('.') - dotdot = out.w - } - default: - // real path element. - // add slash if needed - if rooted && out.w != 1 || !rooted && out.w != 0 { - out.append(_separator) - } - // copy element - for ; r < n && !os.IsPathSeparator(path[r]); r++ { - out.append(path[r]) - } - } - } - - // Turn empty string into "." - if out.w == 0 { - out.append('.') - } - - // ORIGINAL: return FromSlash(out.string()) - return out.string() -} diff --git a/cli/internal/cacheitem/filepath_unix.go b/cli/internal/cacheitem/filepath_unix.go deleted file mode 100644 index d0f67861b83e6..0000000000000 --- a/cli/internal/cacheitem/filepath_unix.go +++ /dev/null @@ -1,14 +0,0 @@ -//go:build !windows -// +build !windows - -// Copyright 2010 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package cacheitem - -// volumeNameLen returns length of the leading volume name on Windows. -// It returns 0 elsewhere. -func volumeNameLen(path string) int { - return 0 -} diff --git a/cli/internal/cacheitem/filepath_windows.go b/cli/internal/cacheitem/filepath_windows.go deleted file mode 100644 index 2c3b852677a2b..0000000000000 --- a/cli/internal/cacheitem/filepath_windows.go +++ /dev/null @@ -1,50 +0,0 @@ -//go:build windows -// +build windows - -// Copyright 2010 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package cacheitem - -func isSlash(c uint8) bool { - return c == '\\' || c == '/' -} - -// volumeNameLen returns length of the leading volume name on Windows. -// It returns 0 elsewhere. -func volumeNameLen(path string) int { - if len(path) < 2 { - return 0 - } - // with drive letter - c := path[0] - if path[1] == ':' && ('a' <= c && c <= 'z' || 'A' <= c && c <= 'Z') { - return 2 - } - // is it UNC? https://msdn.microsoft.com/en-us/library/windows/desktop/aa365247(v=vs.85).aspx - if l := len(path); l >= 5 && isSlash(path[0]) && isSlash(path[1]) && - !isSlash(path[2]) && path[2] != '.' { - // first, leading `\\` and next shouldn't be `\`. its server name. - for n := 3; n < l-1; n++ { - // second, next '\' shouldn't be repeated. - if isSlash(path[n]) { - n++ - // third, following something characters. its share name. - if !isSlash(path[n]) { - if path[n] == '.' { - break - } - for ; n < l; n++ { - if isSlash(path[n]) { - break - } - } - return n - } - break - } - } - } - return 0 -} diff --git a/cli/internal/cacheitem/restore.go b/cli/internal/cacheitem/restore.go deleted file mode 100644 index be60000f778af..0000000000000 --- a/cli/internal/cacheitem/restore.go +++ /dev/null @@ -1,213 +0,0 @@ -package cacheitem - -import ( - "archive/tar" - "errors" - "io" - "os" - "runtime" - "strings" - - "github.com/DataDog/zstd" - - "github.com/moby/sys/sequential" - "github.com/vercel/turbo/cli/internal/turbopath" -) - -// FromReader returns an existing CacheItem at the specified path. -func FromReader(reader io.Reader, compressed bool) *CacheItem { - return &CacheItem{ - handle: reader, - compressed: compressed, - } -} - -// Open returns an existing CacheItem at the specified path. -func Open(path turbopath.AbsoluteSystemPath) (*CacheItem, error) { - handle, err := sequential.OpenFile(path.ToString(), os.O_RDONLY, 0777) - if err != nil { - return nil, err - } - - return &CacheItem{ - Path: path, - handle: handle, - compressed: strings.HasSuffix(path.ToString(), ".zst"), - }, nil -} - -// Restore extracts a cache to a specified disk location. -func (ci *CacheItem) Restore(anchor turbopath.AbsoluteSystemPath) ([]turbopath.AnchoredSystemPath, error) { - var tr *tar.Reader - var closeError error - - reader, isReader := ci.handle.(io.Reader) - if !isReader { - panic("can't read from this cache item") - } - - // We're reading a tar, possibly wrapped in zstd. - if ci.compressed { - zr := zstd.NewReader(reader) - - // The `Close` function for compression effectively just returns the singular - // error field on the decompressor instance. This is extremely unlikely to be - // set without triggering one of the numerous other errors, but we should still - // handle that possible edge case. - defer func() { closeError = zr.Close() }() - tr = tar.NewReader(zr) - } else { - tr = tar.NewReader(reader) - } - - // On first attempt to restore it's possible that a link target doesn't exist. - // Save them and topsort them. - var symlinks []*tar.Header - - restored := make([]turbopath.AnchoredSystemPath, 0) - - restorePointErr := anchor.MkdirAll(0755) - if restorePointErr != nil { - return nil, restorePointErr - } - - // We're going to make the following two assumptions here for "fast" path restoration: - // - All directories are enumerated in the `tar`. - // - The contents of the tar are enumerated depth-first. - // - // This allows us to avoid: - // - Attempts at recursive creation of directories. - // - Repetitive `lstat` on restore of a file. - // - // Violating these assumptions won't cause things to break but we're only going to maintain - // an `lstat` cache for the current tree. If you violate these assumptions and the current - // cache does not apply for your path, it will clobber and re-start from the common - // shared prefix. - dirCache := &cachedDirTree{ - anchorAtDepth: []turbopath.AbsoluteSystemPath{anchor}, - } - - for { - header, trErr := tr.Next() - if trErr == io.EOF { - // The end, time to restore any missing links. - symlinksRestored, symlinksErr := topologicallyRestoreSymlinks(dirCache, anchor, symlinks, tr) - restored = append(restored, symlinksRestored...) - if symlinksErr != nil { - return restored, symlinksErr - } - - break - } - if trErr != nil { - return restored, trErr - } - - // The reader will not advance until tr.Next is called. - // We can treat this as file metadata + body reader. - - // Attempt to place the file on disk. - file, restoreErr := restoreEntry(dirCache, anchor, header, tr) - if restoreErr != nil { - if errors.Is(restoreErr, errMissingSymlinkTarget) { - // Links get one shot to be valid, then they're accumulated, DAG'd, and restored on delay. - symlinks = append(symlinks, header) - continue - } - return restored, restoreErr - } - restored = append(restored, file) - } - - return restored, closeError -} - -// restoreRegular is the entry point for all things read from the tar. -func restoreEntry(dirCache *cachedDirTree, anchor turbopath.AbsoluteSystemPath, header *tar.Header, reader *tar.Reader) (turbopath.AnchoredSystemPath, error) { - // We're permissive on creation, but restrictive on restoration. - // There is no need to prevent the cache creation in any case. - // And on restoration, if we fail, we simply run the task. - switch header.Typeflag { - case tar.TypeDir: - return restoreDirectory(dirCache, anchor, header) - case tar.TypeReg: - return restoreRegular(dirCache, anchor, header, reader) - case tar.TypeSymlink: - return restoreSymlink(dirCache, anchor, header) - default: - return "", errUnsupportedFileType - } -} - -// canonicalizeName returns either an AnchoredSystemPath or an error. -func canonicalizeName(name string) (turbopath.AnchoredSystemPath, error) { - // Assuming this was a `turbo`-created input, we currently have an AnchoredUnixPath. - // Assuming this is malicious input we don't really care if we do the wrong thing. - wellFormed, windowsSafe := checkName(name) - - // Determine if the future filename is a well-formed AnchoredUnixPath - if !wellFormed { - return "", errNameMalformed - } - - // Determine if the AnchoredUnixPath is safe to be used on Windows - if runtime.GOOS == "windows" && !windowsSafe { - return "", errNameWindowsUnsafe - } - - // Directories will have a trailing slash. Remove it. - noTrailingSlash := strings.TrimSuffix(name, "/") - - // Okay, we're all set here. - return turbopath.AnchoredUnixPathFromUpstream(noTrailingSlash).ToSystemPath(), nil -} - -// checkName returns `wellFormed, windowsSafe` via inspection of separators and traversal -func checkName(name string) (bool, bool) { - length := len(name) - - // Name is of length 0. - if length == 0 { - return false, false - } - - wellFormed := true - windowsSafe := true - - // Name is: - // - "." - // - ".." - if wellFormed && (name == "." || name == "..") { - wellFormed = false - } - - // Name starts with: - // - `/` - // - `./` - // - `../` - if wellFormed && (strings.HasPrefix(name, "/") || strings.HasPrefix(name, "./") || strings.HasPrefix(name, "../")) { - wellFormed = false - } - - // Name ends in: - // - `/.` - // - `/..` - if wellFormed && (strings.HasSuffix(name, "/.") || strings.HasSuffix(name, "/..")) { - wellFormed = false - } - - // Name contains: - // - `//` - // - `/./` - // - `/../` - if wellFormed && (strings.Contains(name, "//") || strings.Contains(name, "/./") || strings.Contains(name, "/../")) { - wellFormed = false - } - - // Name contains: `\` - if strings.ContainsRune(name, '\\') { - windowsSafe = false - } - - return wellFormed, windowsSafe -} diff --git a/cli/internal/cacheitem/restore_directory.go b/cli/internal/cacheitem/restore_directory.go deleted file mode 100644 index bba366d9ff7b4..0000000000000 --- a/cli/internal/cacheitem/restore_directory.go +++ /dev/null @@ -1,144 +0,0 @@ -package cacheitem - -import ( - "archive/tar" - "os" - "path/filepath" - "strings" - - "github.com/vercel/turbo/cli/internal/turbopath" -) - -// restoreDirectory restores a directory. -func restoreDirectory(dirCache *cachedDirTree, anchor turbopath.AbsoluteSystemPath, header *tar.Header) (turbopath.AnchoredSystemPath, error) { - processedName, err := canonicalizeName(header.Name) - if err != nil { - return "", err - } - - // We need to traverse `processedName` from base to root split at - // `os.Separator` to make sure we don't end up following a symlink - // outside of the restore path. - - // Create the directory. - if err := safeMkdirAll(dirCache, anchor, processedName, header.Mode); err != nil { - return "", err - } - - return processedName, nil -} - -type cachedDirTree struct { - anchorAtDepth []turbopath.AbsoluteSystemPath - prefix []turbopath.RelativeSystemPath -} - -func (cr *cachedDirTree) getStartingPoint(path turbopath.AnchoredSystemPath) (turbopath.AbsoluteSystemPath, []turbopath.RelativeSystemPath) { - pathSegmentStrings := strings.Split(path.ToString(), string(os.PathSeparator)) - pathSegments := make([]turbopath.RelativeSystemPath, len(pathSegmentStrings)) - for index, pathSegmentString := range pathSegmentStrings { - pathSegments[index] = turbopath.RelativeSystemPathFromUpstream(pathSegmentString) - } - - i := 0 - for i = 0; i < len(cr.prefix) && i < len(pathSegments); i++ { - if pathSegments[i] != cr.prefix[i] { - break - } - } - - // 0: root anchor, can't remove it. - cr.anchorAtDepth = cr.anchorAtDepth[:i+1] - - // 0: first prefix. - cr.prefix = cr.prefix[:i] - - return cr.anchorAtDepth[i], pathSegments[i:] -} - -func (cr *cachedDirTree) Update(anchor turbopath.AbsoluteSystemPath, newSegment turbopath.RelativeSystemPath) { - cr.anchorAtDepth = append(cr.anchorAtDepth, anchor) - cr.prefix = append(cr.prefix, newSegment) -} - -// safeMkdirAll creates all directories, assuming that the leaf node is a directory. -// FIXME: Recheck the symlink cache before creating a directory. -func safeMkdirAll(dirCache *cachedDirTree, anchor turbopath.AbsoluteSystemPath, processedName turbopath.AnchoredSystemPath, mode int64) error { - // Iterate through path segments by os.Separator, appending them onto the anchor. - // Check to see if that path segment is a symlink with a target outside of anchor. - - // Pull the iteration starting point from the directory cache. - calculatedAnchor, pathSegments := dirCache.getStartingPoint(processedName) - for _, segment := range pathSegments { - calculatedAnchor, checkPathErr := checkPath(anchor, calculatedAnchor, segment) - // We hit an existing directory or absolute path that was invalid. - if checkPathErr != nil { - return checkPathErr - } - - // Otherwise we continue and check the next segment. - dirCache.Update(calculatedAnchor, segment) - } - - // If we have made it here we know that it is safe to call os.MkdirAll - // on the Join of anchor and processedName. - // - // This could _still_ error, but we don't care. - return processedName.RestoreAnchor(anchor).MkdirAll(os.FileMode(mode)) -} - -// checkPath ensures that the resolved path (if restoring symlinks). -// It makes sure to never traverse outside of the anchor. -func checkPath(originalAnchor turbopath.AbsoluteSystemPath, accumulatedAnchor turbopath.AbsoluteSystemPath, segment turbopath.RelativeSystemPath) (turbopath.AbsoluteSystemPath, error) { - // Check if the segment itself is sneakily an absolute path... - // (looking at you, Windows. CON, AUX...) - if filepath.IsAbs(segment.ToString()) { - return "", errTraversal - } - - // Find out if this portion of the path is a symlink. - combinedPath := accumulatedAnchor.Join(segment) - fileInfo, err := combinedPath.Lstat() - - // Getting an error here means we failed to stat the path. - // Assume that means we're safe and continue. - if err != nil { - return combinedPath, nil - } - - // Find out if we have a symlink. - isSymlink := fileInfo.Mode()&os.ModeSymlink != 0 - - // If we don't have a symlink it's safe. - if !isSymlink { - return combinedPath, nil - } - - // Check to see if the symlink targets outside of the originalAnchor. - // We don't do eval symlinks because we could find ourself in a totally - // different place. - - // 1. Get the target. - linkTarget, readLinkErr := combinedPath.Readlink() - if readLinkErr != nil { - return "", readLinkErr - } - - // 2. See if the target is absolute. - if filepath.IsAbs(linkTarget) { - absoluteLinkTarget := turbopath.AbsoluteSystemPathFromUpstream(linkTarget) - if originalAnchor.HasPrefix(absoluteLinkTarget) { - return absoluteLinkTarget, nil - } - return "", errTraversal - } - - // 3. Target is relative (or absolute Windows on a Unix device) - relativeLinkTarget := turbopath.RelativeSystemPathFromUpstream(linkTarget) - computedTarget := accumulatedAnchor.UntypedJoin(linkTarget) - if computedTarget.HasPrefix(originalAnchor) { - // Need to recurse and make sure the target doesn't link out. - return checkPath(originalAnchor, accumulatedAnchor, relativeLinkTarget) - } - return "", errTraversal -} diff --git a/cli/internal/cacheitem/restore_directory_test.go b/cli/internal/cacheitem/restore_directory_test.go deleted file mode 100644 index f75bd47e2dab1..0000000000000 --- a/cli/internal/cacheitem/restore_directory_test.go +++ /dev/null @@ -1,103 +0,0 @@ -package cacheitem - -import ( - "reflect" - "testing" - - "github.com/vercel/turbo/cli/internal/turbopath" -) - -func Test_cachedDirTree_getStartingPoint(t *testing.T) { - testDir := turbopath.AbsoluteSystemPath("") - tests := []struct { - name string - - // STATE - cachedDirTree cachedDirTree - - // INPUT - path turbopath.AnchoredSystemPath - - // OUTPUT - calculatedAnchor turbopath.AbsoluteSystemPath - pathSegments []turbopath.RelativeSystemPath - }{ - { - name: "hello world", - cachedDirTree: cachedDirTree{ - anchorAtDepth: []turbopath.AbsoluteSystemPath{testDir}, - prefix: []turbopath.RelativeSystemPath{}, - }, - path: turbopath.AnchoredUnixPath("hello/world").ToSystemPath(), - calculatedAnchor: testDir, - pathSegments: []turbopath.RelativeSystemPath{"hello", "world"}, - }, - { - name: "has a cache", - cachedDirTree: cachedDirTree{ - anchorAtDepth: []turbopath.AbsoluteSystemPath{ - testDir, - testDir.UntypedJoin("hello"), - }, - prefix: []turbopath.RelativeSystemPath{"hello"}, - }, - path: turbopath.AnchoredUnixPath("hello/world").ToSystemPath(), - calculatedAnchor: testDir.UntypedJoin("hello"), - pathSegments: []turbopath.RelativeSystemPath{"world"}, - }, - { - name: "ask for yourself", - cachedDirTree: cachedDirTree{ - anchorAtDepth: []turbopath.AbsoluteSystemPath{ - testDir, - testDir.UntypedJoin("hello"), - testDir.UntypedJoin("hello", "world"), - }, - prefix: []turbopath.RelativeSystemPath{"hello", "world"}, - }, - path: turbopath.AnchoredUnixPath("hello/world").ToSystemPath(), - calculatedAnchor: testDir.UntypedJoin("hello", "world"), - pathSegments: []turbopath.RelativeSystemPath{}, - }, - { - name: "three layer cake", - cachedDirTree: cachedDirTree{ - anchorAtDepth: []turbopath.AbsoluteSystemPath{ - testDir, - testDir.UntypedJoin("hello"), - testDir.UntypedJoin("hello", "world"), - }, - prefix: []turbopath.RelativeSystemPath{"hello", "world"}, - }, - path: turbopath.AnchoredUnixPath("hello/world/again").ToSystemPath(), - calculatedAnchor: testDir.UntypedJoin("hello", "world"), - pathSegments: []turbopath.RelativeSystemPath{"again"}, - }, - { - name: "outside of cache hierarchy", - cachedDirTree: cachedDirTree{ - anchorAtDepth: []turbopath.AbsoluteSystemPath{ - testDir, - testDir.UntypedJoin("hello"), - testDir.UntypedJoin("hello", "world"), - }, - prefix: []turbopath.RelativeSystemPath{"hello", "world"}, - }, - path: turbopath.AnchoredUnixPath("somewhere/else").ToSystemPath(), - calculatedAnchor: testDir, - pathSegments: []turbopath.RelativeSystemPath{"somewhere", "else"}, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - cr := tt.cachedDirTree - calculatedAnchor, pathSegments := cr.getStartingPoint(tt.path) - if !reflect.DeepEqual(calculatedAnchor, tt.calculatedAnchor) { - t.Errorf("cachedDirTree.getStartingPoint() calculatedAnchor = %v, want %v", calculatedAnchor, tt.calculatedAnchor) - } - if !reflect.DeepEqual(pathSegments, tt.pathSegments) { - t.Errorf("cachedDirTree.getStartingPoint() pathSegments = %v, want %v", pathSegments, tt.pathSegments) - } - }) - } -} diff --git a/cli/internal/cacheitem/restore_regular.go b/cli/internal/cacheitem/restore_regular.go deleted file mode 100644 index ed8946eb3c91f..0000000000000 --- a/cli/internal/cacheitem/restore_regular.go +++ /dev/null @@ -1,46 +0,0 @@ -package cacheitem - -import ( - "archive/tar" - "io" - "os" - - "github.com/vercel/turbo/cli/internal/turbopath" -) - -// restoreRegular restores a file. -func restoreRegular(dirCache *cachedDirTree, anchor turbopath.AbsoluteSystemPath, header *tar.Header, reader *tar.Reader) (turbopath.AnchoredSystemPath, error) { - // Assuming this was a `turbo`-created input, we currently have an AnchoredUnixPath. - // Assuming this is malicious input we don't really care if we do the wrong thing. - processedName, err := canonicalizeName(header.Name) - if err != nil { - return "", err - } - - // We need to traverse `processedName` from base to root split at - // `os.Separator` to make sure we don't end up following a symlink - // outside of the restore path. - if err := safeMkdirFile(dirCache, anchor, processedName, header.Mode); err != nil { - return "", err - } - - // Create the file. - if f, err := processedName.RestoreAnchor(anchor).OpenFile(os.O_WRONLY|os.O_TRUNC|os.O_CREATE, os.FileMode(header.Mode)); err != nil { - return "", err - } else if _, err := io.Copy(f, reader); err != nil { - return "", err - } else if err := f.Close(); err != nil { - return "", err - } - return processedName, nil -} - -// safeMkdirAll creates all directories, assuming that the leaf node is a file. -func safeMkdirFile(dirCache *cachedDirTree, anchor turbopath.AbsoluteSystemPath, processedName turbopath.AnchoredSystemPath, mode int64) error { - isRootFile := processedName.Dir() == "." - if !isRootFile { - return safeMkdirAll(dirCache, anchor, processedName.Dir(), 0755) - } - - return nil -} diff --git a/cli/internal/cacheitem/restore_symlink.go b/cli/internal/cacheitem/restore_symlink.go deleted file mode 100644 index 4cb29f5196046..0000000000000 --- a/cli/internal/cacheitem/restore_symlink.go +++ /dev/null @@ -1,180 +0,0 @@ -package cacheitem - -import ( - "archive/tar" - "io/fs" - "os" - "path/filepath" - - "github.com/pyr-sh/dag" - "github.com/vercel/turbo/cli/internal/turbopath" -) - -// restoreSymlink restores a symlink and errors if the target is missing. -func restoreSymlink(dirCache *cachedDirTree, anchor turbopath.AbsoluteSystemPath, header *tar.Header) (turbopath.AnchoredSystemPath, error) { - processedName, canonicalizeNameErr := canonicalizeName(header.Name) - if canonicalizeNameErr != nil { - return "", canonicalizeNameErr - } - - // Check to see if the target exists. - processedLinkname := canonicalizeLinkname(anchor, processedName, header.Linkname) - if _, err := os.Lstat(processedLinkname); err != nil { - return "", errMissingSymlinkTarget - } - - return actuallyRestoreSymlink(dirCache, anchor, processedName, header) -} - -// restoreSymlinkMissingTarget restores a symlink and does not error if the target is missing. -func restoreSymlinkMissingTarget(dirCache *cachedDirTree, anchor turbopath.AbsoluteSystemPath, header *tar.Header) (turbopath.AnchoredSystemPath, error) { - processedName, canonicalizeNameErr := canonicalizeName(header.Name) - if canonicalizeNameErr != nil { - return "", canonicalizeNameErr - } - - return actuallyRestoreSymlink(dirCache, anchor, processedName, header) -} - -func actuallyRestoreSymlink(dirCache *cachedDirTree, anchor turbopath.AbsoluteSystemPath, processedName turbopath.AnchoredSystemPath, header *tar.Header) (turbopath.AnchoredSystemPath, error) { - // We need to traverse `processedName` from base to root split at - // `os.Separator` to make sure we don't end up following a symlink - // outside of the restore path. - if err := safeMkdirFile(dirCache, anchor, processedName, header.Mode); err != nil { - return "", err - } - - // Specify where we restoring this symlink. - symlinkFrom := processedName.RestoreAnchor(anchor) - - // Remove any existing object at that location. - // If it errors we'll catch it on creation. - _ = symlinkFrom.Remove() - - // Create the symlink. - // Explicitly uses the _original_ header.Linkname as the target. - // This does not support file names with `\` in them in a cross-platform manner. - symlinkErr := symlinkFrom.Symlink(header.Linkname) - if symlinkErr != nil { - return "", symlinkErr - } - - // Darwin allows you to change the permissions of a symlink. - lchmodErr := symlinkFrom.Lchmod(fs.FileMode(header.Mode)) - if lchmodErr != nil { - return "", lchmodErr - } - - return processedName, nil -} - -// topologicallyRestoreSymlinks ensures that targets of symlinks are created in advance -// of the things that link to them. It does this by topologically sorting all -// of the symlinks. This also enables us to ensure we do not create cycles. -func topologicallyRestoreSymlinks(dirCache *cachedDirTree, anchor turbopath.AbsoluteSystemPath, symlinks []*tar.Header, tr *tar.Reader) ([]turbopath.AnchoredSystemPath, error) { - restored := make([]turbopath.AnchoredSystemPath, 0) - lookup := make(map[string]*tar.Header) - - var g dag.AcyclicGraph - for _, header := range symlinks { - processedName, err := canonicalizeName(header.Name) - processedSourcename := canonicalizeLinkname(anchor, processedName, processedName.ToString()) - processedLinkname := canonicalizeLinkname(anchor, processedName, header.Linkname) - if err != nil { - return nil, err - } - g.Add(processedSourcename) - g.Add(processedLinkname) - g.Connect(dag.BasicEdge(processedLinkname, processedSourcename)) - lookup[processedSourcename] = header - } - - cycles := g.Cycles() - if cycles != nil { - return restored, errCycleDetected - } - - roots := make(dag.Set) - for _, v := range g.Vertices() { - if g.UpEdges(v).Len() == 0 { - roots.Add(v) - } - } - - walkFunc := func(vertex dag.Vertex, depth int) error { - key, ok := vertex.(string) - if !ok { - return nil - } - header, exists := lookup[key] - if !exists { - return nil - } - - file, restoreErr := restoreSymlinkMissingTarget(dirCache, anchor, header) - if restoreErr != nil { - return restoreErr - } - - restored = append(restored, file) - return nil - } - - walkError := g.DepthFirstWalk(roots, walkFunc) - if walkError != nil { - return restored, walkError - } - - return restored, nil -} - -// canonicalizeLinkname determines (lexically) what the resolved path on the -// system will be when linkname is restored verbatim. -func canonicalizeLinkname(anchor turbopath.AbsoluteSystemPath, processedName turbopath.AnchoredSystemPath, linkname string) string { - // We don't know _anything_ about linkname. It could be any of: - // - // - Absolute Unix Path - // - Absolute Windows Path - // - Relative Unix Path - // - Relative Windows Path - // - // We also can't _truly_ distinguish if the path is Unix or Windows. - // Take for example: `/Users/turbobot/weird-filenames/\foo\/lol` - // It is a valid file on Unix, but if we do slash conversion it breaks. - // Or `i\am\a\normal\unix\file\but\super\nested\on\windows`. - // - // We also can't safely assume that paths in link targets on one platform - // should be treated as targets for that platform. The author may be - // generating an artifact that should work on Windows on a Unix device. - // - // Given all of that, our best option is to restore link targets _verbatim_. - // No modification, no slash conversion. - // - // In order to DAG sort them, however, we do need to canonicalize them. - // We canonicalize them as if we're restoring them verbatim. - // - // 0. We've extracted a version of `Clean` from stdlib which does nothing but - // separator and traversal collapsing. - cleanedLinkname := Clean(linkname) - - // 1. Check to see if the link target is absolute _on the current platform_. - // If it is an absolute path it's canonical by rule. - if filepath.IsAbs(cleanedLinkname) { - return cleanedLinkname - } - - // Remaining options: - // - Absolute (other platform) Path - // - Relative Unix Path - // - Relative Windows Path - // - // At this point we simply assume that it's a relative path—no matter - // which separators appear in it and where they appear, We can't do - // anything else because the OS will also treat it like that when it is - // a link target. - // - // We manually join these to avoid calls to stdlib's `Clean`. - source := processedName.RestoreAnchor(anchor) - canonicalized := source.Dir().ToString() + string(os.PathSeparator) + cleanedLinkname - return Clean(canonicalized) -} diff --git a/cli/internal/cacheitem/restore_test.go b/cli/internal/cacheitem/restore_test.go deleted file mode 100644 index a0a33d6472edd..0000000000000 --- a/cli/internal/cacheitem/restore_test.go +++ /dev/null @@ -1,1493 +0,0 @@ -package cacheitem - -import ( - "archive/tar" - "errors" - "fmt" - "io" - "io/fs" - "os" - "path/filepath" - "reflect" - "runtime" - "syscall" - "testing" - - "github.com/DataDog/zstd" - "github.com/vercel/turbo/cli/internal/turbopath" - "gotest.tools/v3/assert" -) - -type tarFile struct { - Body string - *tar.Header -} - -type restoreFile struct { - Name turbopath.AnchoredUnixPath - Linkname string - fs.FileMode -} - -// generateTar is used specifically to generate tar files that Turborepo would -// rarely or never encounter without malicious or pathological inputs. We use it -// to make sure that we respond well in these scenarios during restore attempts. -func generateTar(t *testing.T, files []tarFile) turbopath.AbsoluteSystemPath { - t.Helper() - testDir := turbopath.AbsoluteSystemPath(t.TempDir()) - testArchivePath := testDir.UntypedJoin("out.tar") - - handle, handleCreateErr := testArchivePath.Create() - assert.NilError(t, handleCreateErr, "os.Create") - - tw := tar.NewWriter(handle) - - for _, file := range files { - if file.Header.Typeflag == tar.TypeReg { - file.Header.Size = int64(len(file.Body)) - } - - writeHeaderErr := tw.WriteHeader(file.Header) - assert.NilError(t, writeHeaderErr, "tw.WriteHeader") - - _, writeErr := tw.Write([]byte(file.Body)) - assert.NilError(t, writeErr, "tw.Write") - } - - twCloseErr := tw.Close() - assert.NilError(t, twCloseErr, "tw.Close") - - handleCloseErr := handle.Close() - assert.NilError(t, handleCloseErr, "handle.Close") - - return testArchivePath -} - -// compressTar splits the compression of a tar file so that we don't -// accidentally diverge in tar creation while still being able to test -// restoration from tar and from .tar.zst. -func compressTar(t *testing.T, archivePath turbopath.AbsoluteSystemPath) turbopath.AbsoluteSystemPath { - t.Helper() - - inputHandle, inputHandleOpenErr := archivePath.Open() - assert.NilError(t, inputHandleOpenErr, "os.Open") - - outputPath := archivePath + ".zst" - outputHandle, outputHandleCreateErr := outputPath.Create() - assert.NilError(t, outputHandleCreateErr, "os.Create") - - zw := zstd.NewWriter(outputHandle) - _, copyError := io.Copy(zw, inputHandle) - assert.NilError(t, copyError, "io.Copy") - - zwCloseErr := zw.Close() - assert.NilError(t, zwCloseErr, "zw.Close") - - inputHandleCloseErr := inputHandle.Close() - assert.NilError(t, inputHandleCloseErr, "inputHandle.Close") - - outputHandleCloseErr := outputHandle.Close() - assert.NilError(t, outputHandleCloseErr, "outputHandle.Close") - - return outputPath -} - -func generateAnchor(t *testing.T) turbopath.AbsoluteSystemPath { - t.Helper() - testDir := turbopath.AbsoluteSystemPath(t.TempDir()) - anchorPoint := testDir.UntypedJoin("anchor") - - mkdirErr := anchorPoint.Mkdir(0777) - assert.NilError(t, mkdirErr, "Mkdir") - - return anchorPoint -} - -func assertFileExists(t *testing.T, anchor turbopath.AbsoluteSystemPath, diskFile restoreFile) { - t.Helper() - // If we have gotten here we can assume this to be true. - processedName := diskFile.Name.ToSystemPath() - fullName := processedName.RestoreAnchor(anchor) - fileInfo, err := fullName.Lstat() - assert.NilError(t, err, "Lstat") - - assert.Equal(t, fileInfo.Mode()&fs.ModePerm, diskFile.FileMode&fs.ModePerm, "File has the expected permissions: "+processedName) - assert.Equal(t, fileInfo.Mode()|fs.ModePerm, diskFile.FileMode|fs.ModePerm, "File has the expected mode.") - - if diskFile.FileMode&os.ModeSymlink != 0 { - linkname, err := fullName.Readlink() - assert.NilError(t, err, "Readlink") - - // We restore Linkname verbatim. - assert.Equal(t, linkname, diskFile.Linkname, "Link target matches.") - } -} - -func TestOpen(t *testing.T) { - type wantErr struct { - unix error - windows error - } - type wantOutput struct { - unix []turbopath.AnchoredSystemPath - windows []turbopath.AnchoredSystemPath - } - type wantFiles struct { - unix []restoreFile - windows []restoreFile - } - tests := []struct { - name string - tarFiles []tarFile - wantOutput wantOutput - wantFiles wantFiles - wantErr wantErr - }{ - { - name: "cache optimized", - tarFiles: []tarFile{ - { - Header: &tar.Header{ - Name: "one/", - Typeflag: tar.TypeDir, - Mode: 0755, - }, - }, - { - Header: &tar.Header{ - Name: "one/two/", - Typeflag: tar.TypeDir, - Mode: 0755, - }, - }, - { - Header: &tar.Header{ - Name: "one/two/three/", - Typeflag: tar.TypeDir, - Mode: 0755, - }, - }, - { - Header: &tar.Header{ - Name: "one/two/three/file-one", - Typeflag: tar.TypeReg, - Mode: 0644, - }, - }, - { - Header: &tar.Header{ - Name: "one/two/three/file-two", - Typeflag: tar.TypeReg, - Mode: 0644, - }, - }, - { - Header: &tar.Header{ - Name: "one/two/a/", - Typeflag: tar.TypeDir, - Mode: 0755, - }, - }, - { - Header: &tar.Header{ - Name: "one/two/a/file", - Typeflag: tar.TypeReg, - Mode: 0644, - }, - }, - { - Header: &tar.Header{ - Name: "one/two/b/", - Typeflag: tar.TypeDir, - Mode: 0755, - }, - }, - { - Header: &tar.Header{ - Name: "one/two/b/file", - Typeflag: tar.TypeReg, - Mode: 0644, - }, - }, - }, - wantFiles: wantFiles{ - unix: []restoreFile{ - { - Name: "one", - FileMode: 0 | os.ModeDir | 0755, - }, - { - Name: "one/two", - FileMode: 0 | os.ModeDir | 0755, - }, - { - Name: "one/two/three", - FileMode: 0 | os.ModeDir | 0755, - }, - { - Name: "one/two/three/file-one", - FileMode: 0644, - }, - { - Name: "one/two/three/file-two", - FileMode: 0644, - }, - { - Name: "one/two/a", - FileMode: 0 | os.ModeDir | 0755, - }, - { - Name: "one/two/a/file", - FileMode: 0644, - }, - { - Name: "one/two/b", - FileMode: 0 | os.ModeDir | 0755, - }, - { - Name: "one/two/b/file", - FileMode: 0644, - }, - }, - windows: []restoreFile{ - { - Name: "one", - FileMode: 0 | os.ModeDir | 0777, - }, - { - Name: "one/two", - FileMode: 0 | os.ModeDir | 0777, - }, - { - Name: "one/two/three", - FileMode: 0 | os.ModeDir | 0777, - }, - { - Name: "one/two/three/file-one", - FileMode: 0666, - }, - { - Name: "one/two/three/file-two", - FileMode: 0666, - }, - { - Name: "one/two/a", - FileMode: 0 | os.ModeDir | 0777, - }, - { - Name: "one/two/a/file", - FileMode: 0666, - }, - { - Name: "one/two/b", - FileMode: 0 | os.ModeDir | 0777, - }, - { - Name: "one/two/b/file", - FileMode: 0666, - }, - }, - }, - wantOutput: wantOutput{ - unix: turbopath.AnchoredUnixPathArray{ - "one", - "one/two", - "one/two/three", - "one/two/three/file-one", - "one/two/three/file-two", - "one/two/a", - "one/two/a/file", - "one/two/b", - "one/two/b/file", - }.ToSystemPathArray(), - }, - }, - { - name: "pathological cache works", - tarFiles: []tarFile{ - { - Header: &tar.Header{ - Name: "one/", - Typeflag: tar.TypeDir, - Mode: 0755, - }, - }, - { - Header: &tar.Header{ - Name: "one/two/", - Typeflag: tar.TypeDir, - Mode: 0755, - }, - }, - { - Header: &tar.Header{ - Name: "one/two/a/", - Typeflag: tar.TypeDir, - Mode: 0755, - }, - }, - { - Header: &tar.Header{ - Name: "one/two/b/", - Typeflag: tar.TypeDir, - Mode: 0755, - }, - }, - { - Header: &tar.Header{ - Name: "one/two/three/", - Typeflag: tar.TypeDir, - Mode: 0755, - }, - }, - { - Header: &tar.Header{ - Name: "one/two/a/file", - Typeflag: tar.TypeReg, - Mode: 0644, - }, - }, - { - Header: &tar.Header{ - Name: "one/two/b/file", - Typeflag: tar.TypeReg, - Mode: 0644, - }, - }, - { - Header: &tar.Header{ - Name: "one/two/three/file-one", - Typeflag: tar.TypeReg, - Mode: 0644, - }, - }, - { - Header: &tar.Header{ - Name: "one/two/three/file-two", - Typeflag: tar.TypeReg, - Mode: 0644, - }, - }, - }, - wantFiles: wantFiles{ - unix: []restoreFile{ - { - Name: "one", - FileMode: 0 | os.ModeDir | 0755, - }, - { - Name: "one/two", - FileMode: 0 | os.ModeDir | 0755, - }, - { - Name: "one/two/three", - FileMode: 0 | os.ModeDir | 0755, - }, - { - Name: "one/two/three/file-one", - FileMode: 0644, - }, - { - Name: "one/two/three/file-two", - FileMode: 0644, - }, - { - Name: "one/two/a", - FileMode: 0 | os.ModeDir | 0755, - }, - { - Name: "one/two/a/file", - FileMode: 0644, - }, - { - Name: "one/two/b", - FileMode: 0 | os.ModeDir | 0755, - }, - { - Name: "one/two/b/file", - FileMode: 0644, - }, - }, - windows: []restoreFile{ - { - Name: "one", - FileMode: 0 | os.ModeDir | 0777, - }, - { - Name: "one/two", - FileMode: 0 | os.ModeDir | 0777, - }, - { - Name: "one/two/three", - FileMode: 0 | os.ModeDir | 0777, - }, - { - Name: "one/two/three/file-one", - FileMode: 0666, - }, - { - Name: "one/two/three/file-two", - FileMode: 0666, - }, - { - Name: "one/two/a", - FileMode: 0 | os.ModeDir | 0777, - }, - { - Name: "one/two/a/file", - FileMode: 0666, - }, - { - Name: "one/two/b", - FileMode: 0 | os.ModeDir | 0777, - }, - { - Name: "one/two/b/file", - FileMode: 0666, - }, - }, - }, - wantOutput: wantOutput{ - unix: turbopath.AnchoredUnixPathArray{ - "one", - "one/two", - "one/two/a", - "one/two/b", - "one/two/three", - "one/two/a/file", - "one/two/b/file", - "one/two/three/file-one", - "one/two/three/file-two", - }.ToSystemPathArray(), - }, - }, - { - name: "hello world", - tarFiles: []tarFile{ - { - Header: &tar.Header{ - Name: "target", - Typeflag: tar.TypeReg, - Mode: 0644, - }, - Body: "target", - }, - { - Header: &tar.Header{ - Name: "source", - Linkname: "target", - Typeflag: tar.TypeSymlink, - Mode: 0777, - }, - }, - }, - wantFiles: wantFiles{ - unix: []restoreFile{ - { - Name: "source", - Linkname: "target", - FileMode: 0 | os.ModeSymlink | 0777, - }, - { - Name: "target", - FileMode: 0644, - }, - }, - windows: []restoreFile{ - { - Name: "source", - Linkname: "target", - FileMode: 0 | os.ModeSymlink | 0666, - }, - { - Name: "target", - FileMode: 0666, - }, - }, - }, - wantOutput: wantOutput{ - unix: turbopath.AnchoredUnixPathArray{"target", "source"}.ToSystemPathArray(), - }, - }, - { - name: "nested file", - tarFiles: []tarFile{ - { - Header: &tar.Header{ - Name: "folder/", - Typeflag: tar.TypeDir, - Mode: 0755, - }, - }, - { - Header: &tar.Header{ - Name: "folder/file", - Typeflag: tar.TypeReg, - Mode: 0644, - }, - Body: "file", - }, - }, - wantFiles: wantFiles{ - unix: []restoreFile{ - { - Name: "folder", - FileMode: 0 | os.ModeDir | 0755, - }, - { - Name: "folder/file", - FileMode: 0644, - }, - }, - windows: []restoreFile{ - { - Name: "folder", - FileMode: 0 | os.ModeDir | 0777, - }, - { - Name: "folder/file", - FileMode: 0666, - }, - }, - }, - wantOutput: wantOutput{ - unix: turbopath.AnchoredUnixPathArray{"folder", "folder/file"}.ToSystemPathArray(), - }, - }, - { - name: "nested symlink", - tarFiles: []tarFile{ - { - Header: &tar.Header{ - Name: "folder/", - Typeflag: tar.TypeDir, - Mode: 0755, - }, - }, - { - Header: &tar.Header{ - Name: "folder/symlink", - Linkname: "../", - Typeflag: tar.TypeSymlink, - Mode: 0777, - }, - }, - { - Header: &tar.Header{ - Name: "folder/symlink/folder-sibling", - Typeflag: tar.TypeReg, - Mode: 0644, - }, - Body: "folder-sibling", - }, - }, - wantFiles: wantFiles{ - unix: []restoreFile{ - { - Name: "folder", - FileMode: 0 | os.ModeDir | 0755, - }, - { - Name: "folder/symlink", - FileMode: 0 | os.ModeSymlink | 0777, - Linkname: "../", - }, - { - Name: "folder/symlink/folder-sibling", - FileMode: 0644, - }, - { - Name: "folder-sibling", - FileMode: 0644, - }, - }, - windows: []restoreFile{ - { - Name: "folder", - FileMode: 0 | os.ModeDir | 0777, - }, - { - Name: "folder/symlink", - FileMode: 0 | os.ModeSymlink | 0666, - Linkname: "..\\", - }, - { - Name: "folder/symlink/folder-sibling", - FileMode: 0666, - }, - { - Name: "folder-sibling", - FileMode: 0666, - }, - }, - }, - wantOutput: wantOutput{ - unix: turbopath.AnchoredUnixPathArray{"folder", "folder/symlink", "folder/symlink/folder-sibling"}.ToSystemPathArray(), - }, - }, - { - name: "pathological symlinks", - tarFiles: []tarFile{ - { - Header: &tar.Header{ - Name: "one", - Linkname: "two", - Typeflag: tar.TypeSymlink, - Mode: 0777, - }, - }, - { - Header: &tar.Header{ - Name: "two", - Linkname: "three", - Typeflag: tar.TypeSymlink, - Mode: 0777, - }, - }, - { - Header: &tar.Header{ - Name: "three", - Linkname: "real", - Typeflag: tar.TypeSymlink, - Mode: 0777, - }, - }, - { - Header: &tar.Header{ - Name: "real", - Typeflag: tar.TypeReg, - Mode: 0755, - }, - Body: "real", - }, - }, - wantFiles: wantFiles{ - unix: []restoreFile{ - { - Name: "one", - Linkname: "two", - FileMode: 0 | os.ModeSymlink | 0777, - }, - { - Name: "two", - Linkname: "three", - FileMode: 0 | os.ModeSymlink | 0777, - }, - { - Name: "three", - Linkname: "real", - FileMode: 0 | os.ModeSymlink | 0777, - }, - { - Name: "real", - FileMode: 0 | 0755, - }, - }, - windows: []restoreFile{ - { - Name: "one", - Linkname: "two", - FileMode: 0 | os.ModeSymlink | 0666, - }, - { - Name: "two", - Linkname: "three", - FileMode: 0 | os.ModeSymlink | 0666, - }, - { - Name: "three", - Linkname: "real", - FileMode: 0 | os.ModeSymlink | 0666, - }, - { - Name: "real", - FileMode: 0 | 0666, - }, - }, - }, - wantOutput: wantOutput{ - unix: turbopath.AnchoredUnixPathArray{"real", "three", "two", "one"}.ToSystemPathArray(), - }, - }, - { - name: "place file at dir location", - tarFiles: []tarFile{ - { - Header: &tar.Header{ - Name: "folder-not-file/", - Typeflag: tar.TypeDir, - Mode: 0755, - }, - }, - { - Header: &tar.Header{ - Name: "folder-not-file/subfile", - Typeflag: tar.TypeReg, - Mode: 0755, - }, - Body: "subfile", - }, - { - Header: &tar.Header{ - Name: "folder-not-file", - Typeflag: tar.TypeReg, - Mode: 0755, - }, - Body: "this shouldn't work", - }, - }, - wantFiles: wantFiles{ - unix: []restoreFile{ - { - Name: "folder-not-file", - FileMode: 0 | os.ModeDir | 0755, - }, - { - Name: "folder-not-file/subfile", - FileMode: 0755, - }, - }, - windows: []restoreFile{ - { - Name: "folder-not-file", - FileMode: 0 | os.ModeDir | 0777, - }, - { - Name: "folder-not-file/subfile", - FileMode: 0666, - }, - }, - }, - wantOutput: wantOutput{ - unix: turbopath.AnchoredUnixPathArray{"folder-not-file", "folder-not-file/subfile"}.ToSystemPathArray(), - }, - wantErr: wantErr{ - unix: syscall.EISDIR, - windows: syscall.EISDIR, - }, - }, - // { - // name: "missing symlink with file at subdir", - // tarFiles: []tarFile{ - // { - // Header: &tar.Header{ - // Name: "one", - // Linkname: "two", - // Typeflag: tar.TypeSymlink, - // Mode: 0777, - // }, - // }, - // { - // Header: &tar.Header{ - // Name: "one/file", - // Typeflag: tar.TypeReg, - // Mode: 0755, - // }, - // Body: "file", - // }, - // }, - // wantFiles: wantFiles{ - // unix: []restoreFile{ - // { - // Name: "one", - // Linkname: "two", - // FileMode: 0 | os.ModeSymlink | 0777, - // }, - // }, - // }, - // wantOutput: wantOutput{ - // unix: turbopath.AnchoredUnixPathArray{"one"}.ToSystemPathArray(), - // windows: nil, - // }, - // wantErr: wantErr{ - // unix: os.ErrExist, - // windows: os.ErrExist, - // }, - // }, - { - name: "symlink cycle", - tarFiles: []tarFile{ - { - Header: &tar.Header{ - Name: "one", - Linkname: "two", - Typeflag: tar.TypeSymlink, - Mode: 0777, - }, - }, - { - Header: &tar.Header{ - Name: "two", - Linkname: "three", - Typeflag: tar.TypeSymlink, - Mode: 0777, - }, - }, - { - Header: &tar.Header{ - Name: "three", - Linkname: "one", - Typeflag: tar.TypeSymlink, - Mode: 0777, - }, - }, - }, - wantFiles: wantFiles{ - unix: []restoreFile{}, - }, - wantOutput: wantOutput{ - unix: []turbopath.AnchoredSystemPath{}, - }, - wantErr: wantErr{ - unix: errCycleDetected, - windows: errCycleDetected, - }, - }, - { - name: "symlink clobber", - tarFiles: []tarFile{ - { - Header: &tar.Header{ - Name: "one", - Linkname: "two", - Typeflag: tar.TypeSymlink, - Mode: 0777, - }, - }, - { - Header: &tar.Header{ - Name: "one", - Linkname: "three", - Typeflag: tar.TypeSymlink, - Mode: 0777, - }, - }, - { - Header: &tar.Header{ - Name: "one", - Linkname: "real", - Typeflag: tar.TypeSymlink, - Mode: 0777, - }, - }, - { - Header: &tar.Header{ - Name: "real", - Typeflag: tar.TypeReg, - Mode: 0755, - }, - Body: "real", - }, - }, - wantFiles: wantFiles{ - unix: []restoreFile{ - { - Name: "one", - Linkname: "real", - FileMode: 0 | os.ModeSymlink | 0777, - }, - { - Name: "real", - FileMode: 0755, - }, - }, - windows: []restoreFile{ - { - Name: "one", - Linkname: "real", - FileMode: 0 | os.ModeSymlink | 0666, - }, - { - Name: "real", - FileMode: 0666, - }, - }, - }, - wantOutput: wantOutput{ - unix: turbopath.AnchoredUnixPathArray{"real", "one"}.ToSystemPathArray(), - }, - }, - { - name: "symlink traversal", - tarFiles: []tarFile{ - { - Header: &tar.Header{ - Name: "escape", - Linkname: "../", - Typeflag: tar.TypeSymlink, - Mode: 0777, - }, - }, - { - Header: &tar.Header{ - Name: "escape/file", - Typeflag: tar.TypeReg, - Mode: 0644, - }, - Body: "file", - }, - }, - wantFiles: wantFiles{ - unix: []restoreFile{ - { - Name: "escape", - Linkname: "../", - FileMode: 0 | os.ModeSymlink | 0777, - }, - }, - windows: []restoreFile{ - { - Name: "escape", - Linkname: "..\\", - FileMode: 0 | os.ModeSymlink | 0666, - }, - }, - }, - wantOutput: wantOutput{ - unix: turbopath.AnchoredUnixPathArray{"escape"}.ToSystemPathArray(), - }, - wantErr: wantErr{ - unix: errTraversal, - windows: errTraversal, - }, - }, - { - name: "Double indirection: file", - tarFiles: []tarFile{ - { - Header: &tar.Header{ - Name: "up", - Linkname: "../", - Typeflag: tar.TypeSymlink, - Mode: 0777, - }, - }, - { - Header: &tar.Header{ - Name: "link", - Linkname: "up", - Typeflag: tar.TypeSymlink, - Mode: 0777, - }, - }, - { - Header: &tar.Header{ - Name: "link/outside-file", - Typeflag: tar.TypeReg, - Mode: 0755, - }, - }, - }, - wantErr: wantErr{unix: errTraversal, windows: errTraversal}, - wantOutput: wantOutput{ - unix: turbopath.AnchoredUnixPathArray{ - "up", - "link", - }.ToSystemPathArray(), - }, - }, - { - name: "Double indirection: folder", - tarFiles: []tarFile{ - { - Header: &tar.Header{ - Name: "up", - Linkname: "../", - Typeflag: tar.TypeSymlink, - Mode: 0777, - }, - }, - { - Header: &tar.Header{ - Name: "link", - Linkname: "up", - Typeflag: tar.TypeSymlink, - Mode: 0777, - }, - }, - { - Header: &tar.Header{ - Name: "link/level-one/level-two/", - Typeflag: tar.TypeDir, - Mode: 0755, - }, - }, - }, - wantErr: wantErr{unix: errTraversal, windows: errTraversal}, - wantOutput: wantOutput{ - unix: turbopath.AnchoredUnixPathArray{ - "up", - "link", - }.ToSystemPathArray(), - }, - }, - { - name: "name traversal", - tarFiles: []tarFile{ - { - Header: &tar.Header{ - Name: "../escape", - Typeflag: tar.TypeReg, - Mode: 0644, - }, - Body: "file", - }, - }, - wantFiles: wantFiles{ - unix: []restoreFile{}, - }, - wantOutput: wantOutput{ - unix: []turbopath.AnchoredSystemPath{}, - }, - wantErr: wantErr{ - unix: errNameMalformed, - windows: errNameMalformed, - }, - }, - { - name: "windows unsafe", - tarFiles: []tarFile{ - { - Header: &tar.Header{ - Name: "back\\slash\\file", - Typeflag: tar.TypeReg, - Mode: 0644, - }, - Body: "file", - }, - }, - wantFiles: wantFiles{ - unix: []restoreFile{ - { - Name: "back\\slash\\file", - FileMode: 0644, - }, - }, - windows: []restoreFile{}, - }, - wantOutput: wantOutput{ - unix: turbopath.AnchoredUnixPathArray{"back\\slash\\file"}.ToSystemPathArray(), - windows: turbopath.AnchoredUnixPathArray{}.ToSystemPathArray(), - }, - wantErr: wantErr{ - unix: nil, - windows: errNameWindowsUnsafe, - }, - }, - { - name: "fifo (and others) unsupported", - tarFiles: []tarFile{ - { - Header: &tar.Header{ - Name: "fifo", - Typeflag: tar.TypeFifo, - }, - }, - }, - wantFiles: wantFiles{ - unix: []restoreFile{}, - }, - wantOutput: wantOutput{ - unix: []turbopath.AnchoredSystemPath{}, - }, - wantErr: wantErr{ - unix: errUnsupportedFileType, - windows: errUnsupportedFileType, - }, - }, - } - for _, tt := range tests { - getTestFunc := func(compressed bool) func(t *testing.T) { - return func(t *testing.T) { - var archivePath turbopath.AbsoluteSystemPath - if compressed { - archivePath = compressTar(t, generateTar(t, tt.tarFiles)) - } else { - archivePath = generateTar(t, tt.tarFiles) - } - anchor := generateAnchor(t) - - cacheItem, err := Open(archivePath) - assert.NilError(t, err, "Open") - - restoreOutput, restoreErr := cacheItem.Restore(anchor) - var desiredErr error - if runtime.GOOS == "windows" { - desiredErr = tt.wantErr.windows - } else { - desiredErr = tt.wantErr.unix - } - if desiredErr != nil { - if !errors.Is(restoreErr, desiredErr) { - t.Errorf("wanted err: %v, got err: %v", tt.wantErr, restoreErr) - } - } else { - assert.NilError(t, restoreErr, "Restore") - } - - outputComparison := tt.wantOutput.unix - if runtime.GOOS == "windows" && tt.wantOutput.windows != nil { - outputComparison = tt.wantOutput.windows - } - - if !reflect.DeepEqual(restoreOutput, outputComparison) { - t.Errorf("Restore() = %v, want %v", restoreOutput, outputComparison) - } - - // Check files on disk. - filesComparison := tt.wantFiles.unix - if runtime.GOOS == "windows" && tt.wantFiles.windows != nil { - filesComparison = tt.wantFiles.windows - } - for _, diskFile := range filesComparison { - assertFileExists(t, anchor, diskFile) - } - - assert.NilError(t, cacheItem.Close(), "Close") - } - } - t.Run(tt.name+"zst", getTestFunc(true)) - t.Run(tt.name, getTestFunc(false)) - } -} - -func Test_checkName(t *testing.T) { - tests := []struct { - path string - wellFormed bool - windowsSafe bool - }{ - // Empty - { - path: "", - wellFormed: false, - windowsSafe: false, - }, - // Bad prefix - { - path: ".", - wellFormed: false, - windowsSafe: true, - }, - { - path: "..", - wellFormed: false, - windowsSafe: true, - }, - { - path: "/", - wellFormed: false, - windowsSafe: true, - }, - { - path: "./", - wellFormed: false, - windowsSafe: true, - }, - { - path: "../", - wellFormed: false, - windowsSafe: true, - }, - // Bad prefix, suffixed - { - path: "/a", - wellFormed: false, - windowsSafe: true, - }, - { - path: "./a", - wellFormed: false, - windowsSafe: true, - }, - { - path: "../a", - wellFormed: false, - windowsSafe: true, - }, - // Bad Suffix - { - path: "/.", - wellFormed: false, - windowsSafe: true, - }, - { - path: "/..", - wellFormed: false, - windowsSafe: true, - }, - // Bad Suffix, with prefix - { - path: "a/.", - wellFormed: false, - windowsSafe: true, - }, - { - path: "a/..", - wellFormed: false, - windowsSafe: true, - }, - // Bad middle - { - path: "//", - wellFormed: false, - windowsSafe: true, - }, - { - path: "/./", - wellFormed: false, - windowsSafe: true, - }, - { - path: "/../", - wellFormed: false, - windowsSafe: true, - }, - // Bad middle, prefixed - { - path: "a//", - wellFormed: false, - windowsSafe: true, - }, - { - path: "a/./", - wellFormed: false, - windowsSafe: true, - }, - { - path: "a/../", - wellFormed: false, - windowsSafe: true, - }, - // Bad middle, suffixed - { - path: "//a", - wellFormed: false, - windowsSafe: true, - }, - { - path: "/./a", - wellFormed: false, - windowsSafe: true, - }, - { - path: "/../a", - wellFormed: false, - windowsSafe: true, - }, - // Bad middle, wrapped - { - path: "a//a", - wellFormed: false, - windowsSafe: true, - }, - { - path: "a/./a", - wellFormed: false, - windowsSafe: true, - }, - { - path: "a/../a", - wellFormed: false, - windowsSafe: true, - }, - // False positive tests - { - path: "...", - wellFormed: true, - windowsSafe: true, - }, - { - path: ".../a", - wellFormed: true, - windowsSafe: true, - }, - { - path: "a/...", - wellFormed: true, - windowsSafe: true, - }, - { - path: "a/.../a", - wellFormed: true, - windowsSafe: true, - }, - { - path: ".../...", - wellFormed: true, - windowsSafe: true, - }, - } - for _, tt := range tests { - t.Run(fmt.Sprintf("Path: \"%v\"", tt.path), func(t *testing.T) { - wellFormed, windowsSafe := checkName(tt.path) - if wellFormed != tt.wellFormed || windowsSafe != tt.windowsSafe { - t.Errorf("\nwantOutput: checkName(\"%v\") wellFormed = %v, windowsSafe %v\ngot: checkName(\"%v\") wellFormed = %v, windowsSafe %v", tt.path, tt.wellFormed, tt.windowsSafe, tt.path, wellFormed, windowsSafe) - } - }) - } -} - -func Test_canonicalizeLinkname(t *testing.T) { - // We're lying that this thing is absolute, but that's not relevant for tests. - anchor := turbopath.AbsoluteSystemPath(filepath.Join("path", "to", "anchor")) - - tests := []struct { - name string - processedName turbopath.AnchoredSystemPath - linkname string - canonicalUnix string - canonicalWindows string - }{ - { - name: "hello world", - processedName: turbopath.AnchoredSystemPath("source"), - linkname: "target", - canonicalUnix: "path/to/anchor/target", - canonicalWindows: "path\\to\\anchor\\target", - }, - { - name: "Unix path subdirectory traversal", - processedName: turbopath.AnchoredUnixPath("child/source").ToSystemPath(), - linkname: "../sibling/target", - canonicalUnix: "path/to/anchor/sibling/target", - canonicalWindows: "path\\to\\anchor\\sibling\\target", - }, - { - name: "Windows path subdirectory traversal", - processedName: turbopath.AnchoredUnixPath("child/source").ToSystemPath(), - linkname: "..\\sibling\\target", - canonicalUnix: "path/to/anchor/child/..\\sibling\\target", - canonicalWindows: "path\\to\\anchor\\sibling\\target", - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - canonical := tt.canonicalUnix - if runtime.GOOS == "windows" { - canonical = tt.canonicalWindows - } - if got := canonicalizeLinkname(anchor, tt.processedName, tt.linkname); got != canonical { - t.Errorf("canonicalizeLinkname() = %v, want %v", got, canonical) - } - }) - } -} - -func Test_canonicalizeName(t *testing.T) { - tests := []struct { - name string - fileName string - want turbopath.AnchoredSystemPath - wantErr error - }{ - { - name: "hello world", - fileName: "test.txt", - want: "test.txt", - }, - { - name: "directory", - fileName: "something/", - want: "something", - }, - { - name: "malformed name", - fileName: "//", - want: "", - wantErr: errNameMalformed, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - got, err := canonicalizeName(tt.fileName) - if tt.wantErr != nil && !errors.Is(err, tt.wantErr) { - t.Errorf("canonicalizeName() error = %v, wantErr %v", err, tt.wantErr) - return - } - if !reflect.DeepEqual(got, tt.want) { - t.Errorf("canonicalizeName() = %v, want %v", got, tt.want) - } - }) - } -} - -func TestCacheItem_Restore(t *testing.T) { - tests := []struct { - name string - tarFiles []tarFile - want []turbopath.AnchoredSystemPath - }{ - { - name: "duplicate restores", - tarFiles: []tarFile{ - { - Header: &tar.Header{ - Name: "target", - Typeflag: tar.TypeReg, - Mode: 0644, - }, - Body: "target", - }, - { - Header: &tar.Header{ - Name: "source", - Linkname: "target", - Typeflag: tar.TypeSymlink, - Mode: 0777, - }, - }, - { - Header: &tar.Header{ - Name: "one/", - Typeflag: tar.TypeDir, - Mode: 0755, - }, - }, - { - Header: &tar.Header{ - Name: "one/two/", - Typeflag: tar.TypeDir, - Mode: 0755, - }, - }, - }, - want: turbopath.AnchoredUnixPathArray{"target", "source", "one", "one/two"}.ToSystemPathArray(), - }, - } - for _, tt := range tests { - getTestFunc := func(compressed bool) func(t *testing.T) { - return func(t *testing.T) { - var archivePath turbopath.AbsoluteSystemPath - if compressed { - archivePath = compressTar(t, generateTar(t, tt.tarFiles)) - } else { - archivePath = generateTar(t, tt.tarFiles) - } - anchor := generateAnchor(t) - - cacheItem, err := Open(archivePath) - assert.NilError(t, err, "Open") - - restoreOutput, restoreErr := cacheItem.Restore(anchor) - if !reflect.DeepEqual(restoreOutput, tt.want) { - t.Errorf("#1 CacheItem.Restore() = %v, want %v", restoreOutput, tt.want) - } - assert.NilError(t, restoreErr, "Restore #1") - assert.NilError(t, cacheItem.Close(), "Close") - - cacheItem2, err2 := Open(archivePath) - assert.NilError(t, err2, "Open") - - restoreOutput2, restoreErr2 := cacheItem2.Restore(anchor) - if !reflect.DeepEqual(restoreOutput2, tt.want) { - t.Errorf("#2 CacheItem.Restore() = %v, want %v", restoreOutput2, tt.want) - } - assert.NilError(t, restoreErr2, "Restore #2") - assert.NilError(t, cacheItem2.Close(), "Close") - } - } - t.Run(tt.name+"zst", getTestFunc(true)) - t.Run(tt.name, getTestFunc(false)) - } -} diff --git a/cli/internal/chrometracing/chrometracing.go b/cli/internal/chrometracing/chrometracing.go deleted file mode 100644 index d9325fdd5706e..0000000000000 --- a/cli/internal/chrometracing/chrometracing.go +++ /dev/null @@ -1,227 +0,0 @@ -// Copyright 2020 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Package chrometracing writes per-process Chrome trace_event files that can be -// loaded into chrome://tracing. -package chrometracing - -import ( - "encoding/json" - "fmt" - "os" - "path/filepath" - "strings" - "sync" - "time" - - "github.com/google/chrometracing/traceinternal" -) - -var trace = struct { - start time.Time - pid uint64 - - fileMu sync.Mutex - file *os.File -}{ - pid: uint64(os.Getpid()), -} - -var out = setup(false) - -// Path returns the full path of the chrome://tracing trace_event file for -// display in log messages. -func Path() string { return out } - -// EnableTracing turns on tracing, regardless of running in a test or -// not. Tracing is enabled by default if the CHROMETRACING_DIR environment -// variable is present and non-empty. -func EnableTracing() { - trace.fileMu.Lock() - alreadyEnabled := trace.file != nil - trace.fileMu.Unlock() - if alreadyEnabled { - return - } - out = setup(true) -} - -func setup(overrideEnable bool) string { - inTest := os.Getenv("TEST_TMPDIR") != "" - explicitlyEnabled := os.Getenv("CHROMETRACING_DIR") != "" - enableTracing := inTest || explicitlyEnabled || overrideEnable - if !enableTracing { - return "" - } - - var err error - dir := os.Getenv("TEST_UNDECLARED_OUTPUTS_DIR") - if dir == "" { - dir = os.Getenv("CHROMETRACING_DIR") - } - if dir == "" { - dir = os.TempDir() - } - fn := filepath.Join(dir, fmt.Sprintf("%s.%d.trace", filepath.Base(os.Args[0]), trace.pid)) - trace.file, err = os.OpenFile(fn, os.O_WRONLY|os.O_CREATE|os.O_TRUNC|os.O_EXCL, 0644) - if err != nil { - // Using the log package from func init results in an error message - // being printed. - fmt.Fprintf(os.Stderr, "continuing without tracing: %v\n", err) - return "" - } - - // We only ever open a JSON array. Ending the array is optional as per - // go/trace_event so that not cleanly finished traces can still be read. - trace.file.Write([]byte{'['}) - trace.start = time.Now() - - writeEvent(&traceinternal.ViewerEvent{ - Name: "process_name", - Phase: "M", // Metadata Event - Pid: trace.pid, - Tid: trace.pid, - Arg: struct { - Name string `json:"name"` - }{ - Name: strings.Join(os.Args, " "), - }, - }) - return fn -} - -func writeEvent(ev *traceinternal.ViewerEvent) { - b, err := json.Marshal(&ev) - if err != nil { - fmt.Fprintf(os.Stderr, "%v\n", err) - return - } - trace.fileMu.Lock() - defer trace.fileMu.Unlock() - if _, err = trace.file.Write(b); err != nil { - fmt.Fprintf(os.Stderr, "%v\n", err) - return - } - if _, err = trace.file.Write([]byte{',', '\n'}); err != nil { - fmt.Fprintf(os.Stderr, "%v\n", err) - return - } -} - -const ( - begin = "B" - end = "E" -) - -// A PendingEvent represents an ongoing unit of work. The begin trace event has -// already been written, and calling Done will write the end trace event. -type PendingEvent struct { - name string - tid uint64 -} - -// Done writes the end trace event for this unit of work. -func (pe *PendingEvent) Done() { - if pe == nil || pe.name == "" || trace.file == nil { - return - } - writeEvent(&traceinternal.ViewerEvent{ - Name: pe.name, - Phase: end, - Pid: trace.pid, - Tid: pe.tid, - Time: float64(time.Since(trace.start).Microseconds()), - }) - releaseTid(pe.tid) -} - -// Event logs a unit of work. To instrument a Go function, use e.g.: -// -// func calcPi() { -// defer chrometracing.Event("calculate pi").Done() -// // … -// } -// -// For more finely-granular traces, use e.g.: -// -// for _, cmd := range commands { -// ev := chrometracing.Event("initialize " + cmd.Name) -// cmd.Init() -// ev.Done() -// } -func Event(name string) *PendingEvent { - if trace.file == nil { - return &PendingEvent{} - } - tid := tid() - writeEvent(&traceinternal.ViewerEvent{ - Name: name, - Phase: begin, - Pid: trace.pid, - Tid: tid, - Time: float64(time.Since(trace.start).Microseconds()), - }) - return &PendingEvent{ - name: name, - tid: tid, - } -} - -// tids is a chrome://tracing thread id pool. Go does not permit accessing the -// goroutine id, so we need to maintain our own identifier. The chrome://tracing -// file format requires a numeric thread id, so we just increment whenever we -// need a thread id, and reuse the ones no longer in use. -// -// In practice, parallelized sections of the code (many goroutines) end up using -// only as few thread ids as are concurrently in use, and the rest of the events -// mirror the code call stack nicely. See e.g. http://screen/7MPcAcvXQNUE3JZ -var tids struct { - sync.Mutex - - // We allocate chrome://tracing thread ids based on the index of the - // corresponding entry in the used slice. - used []bool - - // next points to the earliest unused tid to consider for the next tid to - // hand out. This is purely a performance optimization to avoid O(n) slice - // iteration. - next int -} - -func tid() uint64 { - tids.Lock() - defer tids.Unlock() - // re-use released tids if any - for t := tids.next; t < len(tids.used); t++ { - if !tids.used[t] { - tids.used[t] = true - tids.next = t + 1 - return uint64(t) - } - } - // allocate a new tid - t := len(tids.used) - tids.used = append(tids.used, true) - tids.next = t + 1 - return uint64(t) -} - -func releaseTid(t uint64) { - tids.Lock() - defer tids.Unlock() - tids.used[int(t)] = false - if tids.next > int(t) { - tids.next = int(t) - } -} diff --git a/cli/internal/chrometracing/chrometracing_close.go b/cli/internal/chrometracing/chrometracing_close.go deleted file mode 100644 index 1b3a7b949a6b3..0000000000000 --- a/cli/internal/chrometracing/chrometracing_close.go +++ /dev/null @@ -1,26 +0,0 @@ -package chrometracing - -// Close overwrites the trailing (,\n) with (]\n) and closes the trace file. -// Close is implemented in a separate file to keep a separation between custom -// code and upstream from github.com/google/chrometracing. Additionally, we can -// enable linting for code we author, while leaving upstream code alone. -func Close() error { - trace.fileMu.Lock() - defer trace.fileMu.Unlock() - // Seek backwards two bytes (,\n) - if _, err := trace.file.Seek(-2, 1); err != nil { - return err - } - // Write 1 byte, ']', leaving the trailing '\n' in place - if _, err := trace.file.Write([]byte{']'}); err != nil { - return err - } - // Force the filesystem to write to disk - if err := trace.file.Sync(); err != nil { - return err - } - if err := trace.file.Close(); err != nil { - return err - } - return nil -} diff --git a/cli/internal/ci/ci.go b/cli/internal/ci/ci.go deleted file mode 100644 index a22ad78d2d534..0000000000000 --- a/cli/internal/ci/ci.go +++ /dev/null @@ -1,58 +0,0 @@ -// Package ci is a simple utility to check if a program is being executed in common CI/CD/PaaS vendors. -// This is a partial port of https://github.com/watson/ci-info -package ci - -import "os" - -var isCI = os.Getenv("BUILD_ID") != "" || os.Getenv("BUILD_NUMBER") != "" || os.Getenv("CI") != "" || os.Getenv("CI_APP_ID") != "" || os.Getenv("CI_BUILD_ID") != "" || os.Getenv("CI_BUILD_NUMBER") != "" || os.Getenv("CI_NAME") != "" || os.Getenv("CONTINUOUS_INTEGRATION") != "" || os.Getenv("RUN_ID") != "" || os.Getenv("TEAMCITY_VERSION") != "" || false - -// IsCi returns true if the program is executing in a CI/CD environment -func IsCi() bool { - return isCI -} - -// Name returns the name of the CI vendor -func Name() string { - return Info().Name -} - -// Constant returns the name of the CI vendor as a constant -func Constant() string { - return Info().Constant -} - -// Info returns information about a CI vendor -func Info() Vendor { - // check both the env var key and value - for _, env := range Vendors { - if env.EvalEnv != nil { - for name, value := range env.EvalEnv { - if os.Getenv(name) == value { - return env - } - } - } else { - // check for any of the listed env var keys, with any value - if env.Env.Any != nil && len(env.Env.Any) > 0 { - for _, envVar := range env.Env.Any { - if os.Getenv(envVar) != "" { - return env - } - } - // check for all of the listed env var keys, with any value - } else if env.Env.All != nil && len(env.Env.All) > 0 { - all := true - for _, envVar := range env.Env.All { - if os.Getenv(envVar) == "" { - all = false - break - } - } - if all { - return env - } - } - } - } - return Vendor{} -} diff --git a/cli/internal/ci/ci_test.go b/cli/internal/ci/ci_test.go deleted file mode 100644 index 333ff610c528d..0000000000000 --- a/cli/internal/ci/ci_test.go +++ /dev/null @@ -1,105 +0,0 @@ -package ci - -import ( - "os" - "reflect" - "strings" - "testing" -) - -func getVendor(name string) Vendor { - for _, v := range Vendors { - if v.Name == name { - return v - } - } - return Vendor{} -} - -func TestInfo(t *testing.T) { - tests := []struct { - name string - setEnv []string - want Vendor - }{ - { - name: "AppVeyor", - setEnv: []string{"APPVEYOR"}, - want: getVendor("AppVeyor"), - }, - { - name: "Vercel", - setEnv: []string{"VERCEL", "NOW_BUILDER"}, - want: getVendor("Vercel"), - }, - { - name: "Render", - setEnv: []string{"RENDER"}, - want: getVendor("Render"), - }, - { - name: "Netlify", - setEnv: []string{"NETLIFY"}, - want: getVendor("Netlify CI"), - }, - { - name: "Jenkins", - setEnv: []string{"BUILD_ID", "JENKINS_URL"}, - want: getVendor("Jenkins"), - }, - { - name: "Jenkins - failing", - setEnv: []string{"BUILD_ID"}, - want: getVendor(""), - }, - { - name: "GitHub Actions", - setEnv: []string{"GITHUB_ACTIONS"}, - want: getVendor("GitHub Actions"), - }, - { - name: "Codeship", - setEnv: []string{"CI_NAME=codeship"}, - want: getVendor("Codeship"), - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - // unset existing envs - liveCi := "" - if Name() == "GitHub Actions" { - liveCi = os.Getenv("GITHUB_ACTIONS") - err := os.Unsetenv("GITHUB_ACTIONS") - if err != nil { - t.Errorf("Error un-setting GITHUB_ACTIONS env: %s", err) - } - } - // set envs - for _, env := range tt.setEnv { - envParts := strings.Split(env, "=") - val := "some value" - if len(envParts) > 1 { - val = envParts[1] - } - err := os.Setenv(envParts[0], val) - if err != nil { - t.Errorf("Error setting %s for %s test", envParts[0], tt.name) - } - defer os.Unsetenv(envParts[0]) //nolint errcheck - - } - // run test - if got := Info(); !reflect.DeepEqual(got, tt.want) { - t.Errorf("Info() = %v, want %v", got, tt.want) - } - - // reset env - if Name() == "GitHub Actions" { - err := os.Setenv("GITHUB_ACTIONS", liveCi) - if err != nil { - t.Errorf("Error re-setting GITHUB_ACTIONS env: %s", err) - } - } - }) - } -} diff --git a/cli/internal/ci/vendors.go b/cli/internal/ci/vendors.go deleted file mode 100644 index 66fdc2cf22946..0000000000000 --- a/cli/internal/ci/vendors.go +++ /dev/null @@ -1,270 +0,0 @@ -package ci - -type vendorEnvs struct { - Any []string - All []string -} - -// Vendor describes a CI/CD vendor execution environment -type Vendor struct { - // Name is the name of the vendor - Name string - // Constant is the environment variable prefix used by the vendor - Constant string - // Env is one or many environment variables that can be used to quickly determine the vendor (using simple os.Getenv(env) check) - Env vendorEnvs - // EvalEnv is key/value map of environment variables that can be used to quickly determine the vendor - EvalEnv map[string]string - - // The name of the environment variable that contains the current git sha - ShaEnvVar string - - // The name of the environment variable that contains the current checked out branch - BranchEnvVar string - - // The name of the environment variable that contains the user using turbo - UsernameEnvVar string -} - -// Vendors is a list of common CI/CD vendors (from https://github.com/watson/ci-info/blob/master/vendors.json) -var Vendors = []Vendor{ - { - Name: "Appcircle", - Constant: "APPCIRCLE", - Env: vendorEnvs{Any: []string{"AC_APPCIRCLE"}}, - }, - { - Name: "AppVeyor", - Constant: "APPVEYOR", - Env: vendorEnvs{Any: []string{"APPVEYOR"}}, - }, - { - Name: "AWS CodeBuild", - Constant: "CODEBUILD", - Env: vendorEnvs{Any: []string{"CODEBUILD_BUILD_ARN"}}, - }, - { - Name: "Azure Pipelines", - Constant: "AZURE_PIPELINES", - Env: vendorEnvs{Any: []string{"SYSTEM_TEAMFOUNDATIONCOLLECTIONURI"}}, - }, - { - Name: "Bamboo", - Constant: "BAMBOO", - Env: vendorEnvs{Any: []string{"bamboo_planKey"}}, - }, - { - Name: "Bitbucket Pipelines", - Constant: "BITBUCKET", - Env: vendorEnvs{Any: []string{"BITBUCKET_COMMIT"}}, - }, - { - Name: "Bitrise", - Constant: "BITRISE", - Env: vendorEnvs{Any: []string{"BITRISE_IO"}}, - }, - { - Name: "Buddy", - Constant: "BUDDY", - Env: vendorEnvs{Any: []string{"BUDDY_WORKSPACE_ID"}}, - }, - { - Name: "Buildkite", - Constant: "BUILDKITE", - Env: vendorEnvs{Any: []string{"BUILDKITE"}}, - }, - { - Name: "CircleCI", - Constant: "CIRCLE", - Env: vendorEnvs{Any: []string{"CIRCLECI"}}, - }, - { - Name: "Cirrus CI", - Constant: "CIRRUS", - Env: vendorEnvs{Any: []string{"CIRRUS_CI"}}, - }, - { - Name: "Codefresh", - Constant: "CODEFRESH", - Env: vendorEnvs{Any: []string{"CF_BUILD_ID"}}, - }, - { - Name: "Codemagic", - Constant: "CODEMAGIC", - Env: vendorEnvs{Any: []string{"CM_BUILD_ID"}}, - }, - { - Name: "Codeship", - Constant: "CODESHIP", - EvalEnv: map[string]string{ - "CI_NAME": "codeship", - }, - }, - { - Name: "Drone", - Constant: "DRONE", - Env: vendorEnvs{Any: []string{"DRONE"}}, - }, - { - Name: "dsari", - Constant: "DSARI", - Env: vendorEnvs{Any: []string{"DSARI"}}, - }, - { - Name: "Expo Application Services", - Constant: "EAS", - Env: vendorEnvs{Any: []string{"EAS_BUILD"}}, - }, - // https://docs.github.com/en/actions/learn-github-actions/variables#default-environment-variables - { - Name: "GitHub Actions", - Constant: "GITHUB_ACTIONS", - Env: vendorEnvs{Any: []string{"GITHUB_ACTIONS"}}, - ShaEnvVar: "GITHUB_SHA", - BranchEnvVar: "GITHUB_REF_NAME", - UsernameEnvVar: "GITHUB_ACTOR", - }, - { - Name: "GitLab CI", - Constant: "GITLAB", - Env: vendorEnvs{Any: []string{"GITLAB_CI"}}, - }, - { - Name: "GoCD", - Constant: "GOCD", - Env: vendorEnvs{Any: []string{"GO_PIPELINE_LABEL"}}, - }, - { - Name: "Google Cloud Build", - Constant: "GOOGLE_CLOUD_BUILD", - Env: vendorEnvs{Any: []string{"BUILDER_OUTPUT"}}, - }, - { - Name: "LayerCI", - Constant: "LAYERCI", - Env: vendorEnvs{Any: []string{"LAYERCI"}}, - }, - { - Name: "Gerrit", - Constant: "GERRIT", - Env: vendorEnvs{Any: []string{"GERRIT_PROJECT"}}, - }, - { - Name: "Hudson", - Constant: "HUDSON", - Env: vendorEnvs{Any: []string{"HUDSON"}}, - }, - { - Name: "Jenkins", - Constant: "JENKINS", - Env: vendorEnvs{All: []string{"JENKINS_URL", "BUILD_ID"}}, - }, - { - Name: "Magnum CI", - Constant: "MAGNUM", - Env: vendorEnvs{Any: []string{"MAGNUM"}}, - }, - { - Name: "Netlify CI", - Constant: "NETLIFY", - Env: vendorEnvs{Any: []string{"NETLIFY"}}, - }, - { - Name: "Nevercode", - Constant: "NEVERCODE", - Env: vendorEnvs{Any: []string{"NEVERCODE"}}, - }, - { - Name: "ReleaseHub", - Constant: "RELEASEHUB", - Env: vendorEnvs{Any: []string{"RELEASE_BUILD_ID"}}, - }, - { - Name: "Render", - Constant: "RENDER", - Env: vendorEnvs{Any: []string{"RENDER"}}, - }, - { - Name: "Sail CI", - Constant: "SAIL", - Env: vendorEnvs{Any: []string{"SAILCI"}}, - }, - { - Name: "Screwdriver", - Constant: "SCREWDRIVER", - Env: vendorEnvs{Any: []string{"SCREWDRIVER"}}, - }, - { - Name: "Semaphore", - Constant: "SEMAPHORE", - Env: vendorEnvs{Any: []string{"SEMAPHORE"}}, - }, - { - Name: "Shippable", - Constant: "SHIPPABLE", - Env: vendorEnvs{Any: []string{"SHIPPABLE"}}, - }, - { - Name: "Solano CI", - Constant: "SOLANO", - Env: vendorEnvs{Any: []string{"TDDIUM"}}, - }, - { - Name: "Sourcehut", - Constant: "SOURCEHUT", - EvalEnv: map[string]string{ - "CI_NAME": "sourcehut", - }, - }, - { - Name: "Strider CD", - Constant: "STRIDER", - Env: vendorEnvs{Any: []string{"STRIDER"}}, - }, - { - Name: "TaskCluster", - Constant: "TASKCLUSTER", - Env: vendorEnvs{All: []string{"TASK_ID", "RUN_ID"}}, - }, - { - Name: "TeamCity", - Constant: "TEAMCITY", - Env: vendorEnvs{Any: []string{"TEAMCITY_VERSION"}}, - }, - { - Name: "Travis CI", - Constant: "TRAVIS", - Env: vendorEnvs{Any: []string{"TRAVIS"}}, - }, - // https://vercel.com/docs/concepts/projects/environment-variables/system-environment-variables - { - Name: "Vercel", - Constant: "VERCEL", - Env: vendorEnvs{Any: []string{"NOW_BUILDER", "VERCEL"}}, - ShaEnvVar: "VERCEL_GIT_COMMIT_SHA", - BranchEnvVar: "VERCEL_GIT_COMMIT_REF", - UsernameEnvVar: "VERCEL_GIT_COMMIT_AUTHOR_LOGIN", - }, - { - Name: "Visual Studio App Center", - Constant: "APPCENTER", - Env: vendorEnvs{Any: []string{"APPCENTER"}}, - }, - { - Name: "Woodpecker", - Constant: "WOODPECKER", - EvalEnv: map[string]string{ - "CI": "woodpecker", - }, - }, - { - Name: "Xcode Cloud", - Constant: "XCODE_CLOUD", - Env: vendorEnvs{Any: []string{"CI_XCODE_PROJECT"}}, - }, - { - Name: "Xcode Server", - Constant: "XCODE_SERVER", - Env: vendorEnvs{Any: []string{"XCS"}}, - }, -} diff --git a/cli/internal/client/analytics.go b/cli/internal/client/analytics.go deleted file mode 100644 index 30a250d01aa48..0000000000000 --- a/cli/internal/client/analytics.go +++ /dev/null @@ -1,22 +0,0 @@ -package client - -import ( - "context" - "encoding/json" -) - -// RecordAnalyticsEvents is a specific method for POSTing events to Vercel -func (c *APIClient) RecordAnalyticsEvents(ctx context.Context, events []map[string]interface{}) error { - body, err := json.Marshal(events) - if err != nil { - return err - - } - - // We don't care about the response here - if _, err := c.JSONPost(ctx, "/v8/artifacts/events", body); err != nil { - return err - } - - return nil -} diff --git a/cli/internal/client/cache.go b/cli/internal/client/cache.go deleted file mode 100644 index 533d89c5b8983..0000000000000 --- a/cli/internal/client/cache.go +++ /dev/null @@ -1,169 +0,0 @@ -package client - -import ( - "encoding/json" - "fmt" - "io" - "io/ioutil" - "net/http" - "net/url" - "strings" - - "github.com/hashicorp/go-retryablehttp" - "github.com/vercel/turbo/cli/internal/ci" - "github.com/vercel/turbo/cli/internal/util" -) - -// PutArtifact uploads an artifact associated with a given hash string to the remote cache -func (c *APIClient) PutArtifact(hash string, artifactBody []byte, duration int, tag string) error { - if err := c.okToRequest(); err != nil { - return err - } - params := url.Values{} - c.addTeamParam(¶ms) - // only add a ? if it's actually needed (makes logging cleaner) - encoded := params.Encode() - if encoded != "" { - encoded = "?" + encoded - } - - requestURL := c.makeURL("/v8/artifacts/" + hash + encoded) - allowAuth := true - if c.usePreflight { - resp, latestRequestURL, err := c.doPreflight(requestURL, - http.MethodPut, - "Content-Type, x-artifact-duration, Authorization, User-Agent, x-artifact-tag") - if err != nil { - return fmt.Errorf("pre-flight request failed before trying to store in HTTP cache: %w", err) - } - requestURL = latestRequestURL - headers := resp.Header.Get("Access-Control-Allow-Headers") - allowAuth = strings.Contains(strings.ToLower(headers), strings.ToLower("Authorization")) - } - - req, err := retryablehttp.NewRequest(http.MethodPut, requestURL, artifactBody) - req.Header.Set("Content-Type", "application/octet-stream") - req.Header.Set("x-artifact-duration", fmt.Sprintf("%v", duration)) - if allowAuth { - req.Header.Set("Authorization", "Bearer "+c.token) - } - req.Header.Set("User-Agent", c.userAgent()) - if ci.IsCi() { - req.Header.Set("x-artifact-client-ci", ci.Constant()) - } - if tag != "" { - req.Header.Set("x-artifact-tag", tag) - } - if err != nil { - return fmt.Errorf("[WARNING] Invalid cache URL: %w", err) - } - - resp, err := c.HTTPClient.Do(req) - if err != nil { - return fmt.Errorf("[ERROR] Failed to store files in HTTP cache: %w", err) - } - defer func() { _ = resp.Body.Close() }() - if resp.StatusCode == http.StatusForbidden { - return c.handle403(resp.Body) - } - if resp.StatusCode != http.StatusOK { - return fmt.Errorf("[ERROR] Failed to store files in HTTP cache: %s against URL %s", resp.Status, requestURL) - } - return nil -} - -// FetchArtifact attempts to retrieve the build artifact with the given hash from the remote cache -func (c *APIClient) FetchArtifact(hash string) (*http.Response, error) { - return c.getArtifact(hash, http.MethodGet) -} - -// ArtifactExists attempts to determine if the build artifact with the given hash exists in the Remote Caching server -func (c *APIClient) ArtifactExists(hash string) (*http.Response, error) { - return c.getArtifact(hash, http.MethodHead) -} - -// getArtifact attempts to retrieve the build artifact with the given hash from the remote cache -func (c *APIClient) getArtifact(hash string, httpMethod string) (*http.Response, error) { - if httpMethod != http.MethodHead && httpMethod != http.MethodGet { - return nil, fmt.Errorf("invalid httpMethod %v, expected GET or HEAD", httpMethod) - } - - if err := c.okToRequest(); err != nil { - return nil, err - } - params := url.Values{} - c.addTeamParam(¶ms) - // only add a ? if it's actually needed (makes logging cleaner) - encoded := params.Encode() - if encoded != "" { - encoded = "?" + encoded - } - - requestURL := c.makeURL("/v8/artifacts/" + hash + encoded) - allowAuth := true - if c.usePreflight { - resp, latestRequestURL, err := c.doPreflight(requestURL, http.MethodGet, "Authorization, User-Agent") - if err != nil { - return nil, fmt.Errorf("pre-flight request failed before trying to fetch files in HTTP cache: %w", err) - } - requestURL = latestRequestURL - headers := resp.Header.Get("Access-Control-Allow-Headers") - allowAuth = strings.Contains(strings.ToLower(headers), strings.ToLower("Authorization")) - } - - req, err := retryablehttp.NewRequest(httpMethod, requestURL, nil) - if allowAuth { - req.Header.Set("Authorization", "Bearer "+c.token) - } - req.Header.Set("User-Agent", c.userAgent()) - if err != nil { - return nil, fmt.Errorf("invalid cache URL: %w", err) - } - - resp, err := c.HTTPClient.Do(req) - if err != nil { - return nil, fmt.Errorf("failed to fetch artifact: %v", err) - } else if resp.StatusCode == http.StatusForbidden { - err = c.handle403(resp.Body) - _ = resp.Body.Close() - return nil, err - } - return resp, nil -} - -func (c *APIClient) handle403(body io.Reader) error { - raw, err := ioutil.ReadAll(body) - if err != nil { - return fmt.Errorf("failed to read response %v", err) - } - apiError := &apiError{} - err = json.Unmarshal(raw, apiError) - if err != nil { - return fmt.Errorf("failed to read response (%v): %v", string(raw), err) - } - disabledErr, err := apiError.cacheDisabled() - if err != nil { - return err - } - return disabledErr -} - -type apiError struct { - Code string `json:"code"` - Message string `json:"message"` -} - -func (ae *apiError) cacheDisabled() (*util.CacheDisabledError, error) { - if strings.HasPrefix(ae.Code, "remote_caching_") { - statusString := ae.Code[len("remote_caching_"):] - status, err := util.CachingStatusFromString(statusString) - if err != nil { - return nil, err - } - return &util.CacheDisabledError{ - Status: status, - Message: ae.Message, - }, nil - } - return nil, fmt.Errorf("unknown status %v: %v", ae.Code, ae.Message) -} diff --git a/cli/internal/client/client.go b/cli/internal/client/client.go deleted file mode 100644 index e94cd38efd2fa..0000000000000 --- a/cli/internal/client/client.go +++ /dev/null @@ -1,276 +0,0 @@ -// Package client implements some interfaces and convenience methods to interact with Vercel APIs and Remote Cache -package client - -import ( - "context" - "crypto/x509" - "errors" - "fmt" - "io/ioutil" - "net/http" - "net/url" - "runtime" - "strings" - "sync/atomic" - "time" - - "github.com/hashicorp/go-hclog" - "github.com/hashicorp/go-retryablehttp" - "github.com/vercel/turbo/cli/internal/ci" - "github.com/vercel/turbo/cli/internal/turbostate" -) - -// APIClient is the main interface for making network requests to Vercel -type APIClient struct { - // The api's base URL - baseURL string - token string - turboVersion string - - // Must be used via atomic package - currentFailCount uint64 - HTTPClient *retryablehttp.Client - teamID string - teamSlug string - // Whether or not to send preflight requests before uploads - usePreflight bool -} - -// ErrTooManyFailures is returned from remote cache API methods after `maxRemoteFailCount` errors have occurred -var ErrTooManyFailures = errors.New("skipping HTTP Request, too many failures have occurred") - -// _maxRemoteFailCount is the number of failed requests before we stop trying to upload/download -// artifacts to the remote cache -const _maxRemoteFailCount = uint64(3) - -// SetToken updates the APIClient's Token -func (c *APIClient) SetToken(token string) { - c.token = token -} - -// NewClient creates a new APIClient -func NewClient(config turbostate.APIClientConfig, logger hclog.Logger, turboVersion string) *APIClient { - client := &APIClient{ - baseURL: config.APIURL, - turboVersion: turboVersion, - HTTPClient: &retryablehttp.Client{ - HTTPClient: &http.Client{ - Timeout: time.Duration(config.Timeout) * time.Second, - }, - RetryWaitMin: 2 * time.Second, - RetryWaitMax: 10 * time.Second, - RetryMax: 2, - Backoff: retryablehttp.DefaultBackoff, - Logger: logger, - }, - token: config.Token, - teamID: config.TeamID, - teamSlug: config.TeamSlug, - usePreflight: config.UsePreflight, - } - client.HTTPClient.CheckRetry = client.checkRetry - return client -} - -// hasUser returns true if we have credentials for a user -func (c *APIClient) hasUser() bool { - return c.token != "" -} - -// IsLinked returns true if we have a user and linked team -func (c *APIClient) IsLinked() bool { - return c.hasUser() && (c.teamID != "" || c.teamSlug != "") -} - -// GetTeamID returns the currently configured team id -func (c *APIClient) GetTeamID() string { - return c.teamID -} - -func (c *APIClient) retryCachePolicy(resp *http.Response, err error) (bool, error) { - if err != nil { - if errors.As(err, &x509.UnknownAuthorityError{}) { - // Don't retry if the error was due to TLS cert verification failure. - atomic.AddUint64(&c.currentFailCount, 1) - return false, err - } - atomic.AddUint64(&c.currentFailCount, 1) - return true, nil - } - - // 429 Too Many Requests is recoverable. Sometimes the server puts - // a Retry-After response header to indicate when the server is - // available to start processing request from client. - if resp.StatusCode == http.StatusTooManyRequests { - atomic.AddUint64(&c.currentFailCount, 1) - return true, nil - } - - // Check the response code. We retry on 500-range responses to allow - // the server time to recover, as 500's are typically not permanent - // errors and may relate to outages on the server side. This will catch - // invalid response codes as well, like 0 and 999. - if resp.StatusCode == 0 || (resp.StatusCode >= 500 && resp.StatusCode != 501) { - atomic.AddUint64(&c.currentFailCount, 1) - return true, fmt.Errorf("unexpected HTTP status %s", resp.Status) - } - - // swallow the error and stop retrying - return false, nil -} - -func (c *APIClient) checkRetry(ctx context.Context, resp *http.Response, err error) (bool, error) { - // do not retry on context.Canceled or context.DeadlineExceeded - if ctx.Err() != nil { - atomic.AddUint64(&c.currentFailCount, 1) - return false, ctx.Err() - } - - // we're squashing the error from the request and substituting any error that might come - // from our retry policy. - shouldRetry, err := c.retryCachePolicy(resp, err) - if shouldRetry { - // Our policy says it's ok to retry, but we need to check the failure count - if retryErr := c.okToRequest(); retryErr != nil { - return false, retryErr - } - } - return shouldRetry, err -} - -// okToRequest returns nil if it's ok to make a request, and returns the error to -// return to the caller if a request is not allowed -func (c *APIClient) okToRequest() error { - if atomic.LoadUint64(&c.currentFailCount) < _maxRemoteFailCount { - return nil - } - return ErrTooManyFailures -} - -func (c *APIClient) makeURL(endpoint string) string { - return fmt.Sprintf("%v%v", c.baseURL, endpoint) -} - -func (c *APIClient) userAgent() string { - return fmt.Sprintf("turbo %v %v %v (%v)", c.turboVersion, runtime.Version(), runtime.GOOS, runtime.GOARCH) -} - -// doPreflight returns response with closed body, latest request url, and any errors to the caller -func (c *APIClient) doPreflight(requestURL string, requestMethod string, requestHeaders string) (*http.Response, string, error) { - req, err := retryablehttp.NewRequest(http.MethodOptions, requestURL, nil) - req.Header.Set("User-Agent", c.userAgent()) - req.Header.Set("Access-Control-Request-Method", requestMethod) - req.Header.Set("Access-Control-Request-Headers", requestHeaders) - req.Header.Set("Authorization", "Bearer "+c.token) - if err != nil { - return nil, requestURL, fmt.Errorf("[WARNING] Invalid cache URL: %w", err) - } - - // If resp is not nil, ignore any errors - // because most likely unimportant for preflight to handle. - // Let follow-up request handle potential errors. - resp, err := c.HTTPClient.Do(req) - if resp == nil { - return resp, requestURL, err - } - defer resp.Body.Close() //nolint:golint,errcheck // nothing to do - // The client will continue following 307, 308 redirects until it hits - // max redirects, gets an error, or gets a normal response. - // Get the url from the Location header or get the url used in the last - // request (could have changed after following redirects). - // Note that net/http client does not continue redirecting the preflight - // request with the OPTIONS method for 301, 302, and 303 redirects. - // See golang/go Issue 18570. - if locationURL, err := resp.Location(); err == nil { - requestURL = locationURL.String() - } else { - requestURL = resp.Request.URL.String() - } - return resp, requestURL, nil -} - -func (c *APIClient) addTeamParam(params *url.Values) { - if c.teamID != "" && strings.HasPrefix(c.teamID, "team_") { - params.Add("teamId", c.teamID) - } - if c.teamSlug != "" { - params.Add("slug", c.teamSlug) - } -} - -// JSONPatch sends a byte array (json.marshalled payload) to a given endpoint with PATCH -func (c *APIClient) JSONPatch(ctx context.Context, endpoint string, body []byte) ([]byte, error) { - return c.request(ctx, endpoint, http.MethodPatch, body) -} - -// JSONPost sends a byte array (json.marshalled payload) to a given endpoint with POST -func (c *APIClient) JSONPost(ctx context.Context, endpoint string, body []byte) ([]byte, error) { - return c.request(ctx, endpoint, http.MethodPost, body) -} - -func (c *APIClient) request(ctx context.Context, endpoint string, method string, body []byte) ([]byte, error) { - if err := c.okToRequest(); err != nil { - return nil, err - } - - params := url.Values{} - c.addTeamParam(¶ms) - encoded := params.Encode() - if encoded != "" { - encoded = "?" + encoded - } - - requestURL := c.makeURL(endpoint + encoded) - - allowAuth := true - if c.usePreflight { - resp, latestRequestURL, err := c.doPreflight(requestURL, method, "Authorization, User-Agent") - if err != nil { - return nil, fmt.Errorf("pre-flight request failed before trying to fetch files in HTTP cache: %w", err) - } - - requestURL = latestRequestURL - headers := resp.Header.Get("Access-Control-Allow-Headers") - allowAuth = strings.Contains(strings.ToLower(headers), strings.ToLower("Authorization")) - } - - req, err := retryablehttp.NewRequest(method, requestURL, body) - if err != nil { - return nil, err - } - req.WithContext(ctx) - - // Set headers - req.Header.Set("Content-Type", "application/json") - req.Header.Set("User-Agent", c.userAgent()) - - if allowAuth { - req.Header.Set("Authorization", "Bearer "+c.token) - } - - if ci.IsCi() { - req.Header.Set("x-artifact-client-ci", ci.Constant()) - } - - resp, err := c.HTTPClient.Do(req) - if err != nil { - return nil, err - } - - // If there isn't a response, something else probably went wrong - if resp == nil { - return nil, fmt.Errorf("response from %s is nil, something went wrong", requestURL) - } - - rawResponse, err := ioutil.ReadAll(resp.Body) - if err != nil { - return nil, fmt.Errorf("failed to read response %v", err) - } - - // For non 200/201 status codes, return the response body as an error - if resp.StatusCode != http.StatusOK && resp.StatusCode != http.StatusCreated { - return nil, fmt.Errorf("%s", string(rawResponse)) - } - - return rawResponse, nil -} diff --git a/cli/internal/client/client_test.go b/cli/internal/client/client_test.go deleted file mode 100644 index 0dc3a9491ca7d..0000000000000 --- a/cli/internal/client/client_test.go +++ /dev/null @@ -1,186 +0,0 @@ -package client - -import ( - "bytes" - "context" - "encoding/json" - "errors" - "io/ioutil" - "net/http" - "net/http/httptest" - "reflect" - "testing" - "time" - - "github.com/google/uuid" - "github.com/hashicorp/go-hclog" - "github.com/vercel/turbo/cli/internal/turbostate" - "github.com/vercel/turbo/cli/internal/util" - "gotest.tools/v3/assert" -) - -func Test_sendToServer(t *testing.T) { - ch := make(chan []byte, 1) - ts := httptest.NewServer( - http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) { - defer req.Body.Close() - b, err := ioutil.ReadAll(req.Body) - if err != nil { - t.Errorf("failed to read request %v", err) - } - ch <- b - w.WriteHeader(200) - w.Write([]byte{}) - })) - defer ts.Close() - - apiClientConfig := turbostate.APIClientConfig{ - TeamSlug: "my-team-slug", - APIURL: ts.URL, - Token: "my-token", - } - apiClient := NewClient(apiClientConfig, hclog.Default(), "v1") - - myUUID, err := uuid.NewUUID() - if err != nil { - t.Errorf("failed to create uuid %v", err) - } - events := []map[string]interface{}{ - { - "sessionId": myUUID.String(), - "hash": "foo", - "source": "LOCAL", - "event": "hit", - }, - { - "sessionId": myUUID.String(), - "hash": "bar", - "source": "REMOTE", - "event": "MISS", - }, - } - ctx := context.Background() - err = apiClient.RecordAnalyticsEvents(ctx, events) - assert.NilError(t, err, "RecordAnalyticsEvent") - - body := <-ch - - result := []map[string]interface{}{} - err = json.Unmarshal(body, &result) - if err != nil { - t.Errorf("unmarshalling body %v", err) - } - if !reflect.DeepEqual(events, result) { - t.Errorf("roundtrip got %v, want %v", result, events) - } -} - -func Test_PutArtifact(t *testing.T) { - ch := make(chan []byte, 1) - ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) { - defer req.Body.Close() - b, err := ioutil.ReadAll(req.Body) - if err != nil { - t.Errorf("failed to read request %v", err) - } - ch <- b - w.WriteHeader(200) - w.Write([]byte{}) - })) - defer ts.Close() - - // Set up test expected values - apiClientConfig := turbostate.APIClientConfig{ - TeamSlug: "my-team-slug", - APIURL: ts.URL, - Token: "my-token", - } - apiClient := NewClient(apiClientConfig, hclog.Default(), "v1") - expectedArtifactBody := []byte("My string artifact") - - // Test Put Artifact - apiClient.PutArtifact("hash", expectedArtifactBody, 500, "") - testBody := <-ch - if !bytes.Equal(expectedArtifactBody, testBody) { - t.Errorf("Handler read '%v', wants '%v'", testBody, expectedArtifactBody) - } - -} - -func Test_PutWhenCachingDisabled(t *testing.T) { - ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) { - defer func() { _ = req.Body.Close() }() - w.WriteHeader(403) - _, _ = w.Write([]byte("{\"code\": \"remote_caching_disabled\",\"message\":\"caching disabled\"}")) - })) - defer ts.Close() - - // Set up test expected values - apiClientConfig := turbostate.APIClientConfig{ - TeamSlug: "my-team-slug", - APIURL: ts.URL, - Token: "my-token", - } - apiClient := NewClient(apiClientConfig, hclog.Default(), "v1") - expectedArtifactBody := []byte("My string artifact") - // Test Put Artifact - err := apiClient.PutArtifact("hash", expectedArtifactBody, 500, "") - cd := &util.CacheDisabledError{} - if !errors.As(err, &cd) { - t.Errorf("expected cache disabled error, got %v", err) - } - if cd.Status != util.CachingStatusDisabled { - t.Errorf("caching status: expected %v, got %v", util.CachingStatusDisabled, cd.Status) - } -} - -func Test_FetchWhenCachingDisabled(t *testing.T) { - ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) { - defer func() { _ = req.Body.Close() }() - w.WriteHeader(403) - _, _ = w.Write([]byte("{\"code\": \"remote_caching_disabled\",\"message\":\"caching disabled\"}")) - })) - defer ts.Close() - - // Set up test expected values - apiClientConfig := turbostate.APIClientConfig{ - TeamSlug: "my-team-slug", - APIURL: ts.URL, - Token: "my-token", - } - apiClient := NewClient(apiClientConfig, hclog.Default(), "v1") - // Test Put Artifact - resp, err := apiClient.FetchArtifact("hash") - cd := &util.CacheDisabledError{} - if !errors.As(err, &cd) { - t.Errorf("expected cache disabled error, got %v", err) - } - if cd.Status != util.CachingStatusDisabled { - t.Errorf("caching status: expected %v, got %v", util.CachingStatusDisabled, cd.Status) - } - if resp != nil { - t.Errorf("response got %v, want ", resp) - } -} - -func Test_Timeout(t *testing.T) { - ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) { - <-time.After(50 * time.Millisecond) - })) - defer ts.Close() - - // Set up test expected values - apiClientConfig := turbostate.APIClientConfig{ - TeamSlug: "my-team-slug", - APIURL: ts.URL, - Token: "my-token", - } - apiClient := NewClient(apiClientConfig, hclog.Default(), "v1") - - ctx, cancel := context.WithTimeout(context.Background(), 1*time.Millisecond) - defer cancel() - _, err := apiClient.JSONPost(ctx, "/", []byte{}) - if !errors.Is(err, context.DeadlineExceeded) { - t.Errorf("JSONPost got %v, want DeadlineExceeded", err) - } -} diff --git a/cli/internal/cmd/root.go b/cli/internal/cmd/root.go deleted file mode 100644 index e5651b6226dd3..0000000000000 --- a/cli/internal/cmd/root.go +++ /dev/null @@ -1,153 +0,0 @@ -// Package cmd holds the root cobra command for turbo -package cmd - -import ( - "context" - "fmt" - "os" - "runtime/pprof" - "runtime/trace" - - "github.com/pkg/errors" - "github.com/vercel/turbo/cli/internal/cmdutil" - "github.com/vercel/turbo/cli/internal/daemon" - "github.com/vercel/turbo/cli/internal/process" - "github.com/vercel/turbo/cli/internal/run" - "github.com/vercel/turbo/cli/internal/signals" - "github.com/vercel/turbo/cli/internal/turbostate" - "github.com/vercel/turbo/cli/internal/util" -) - -func initializeOutputFiles(helper *cmdutil.Helper, parsedArgs *turbostate.ParsedArgsFromRust) error { - if parsedArgs.Trace != "" { - cleanup, err := createTraceFile(parsedArgs.Trace) - if err != nil { - return fmt.Errorf("failed to create trace file: %v", err) - } - helper.RegisterCleanup(cleanup) - } - if parsedArgs.Heap != "" { - cleanup, err := createHeapFile(parsedArgs.Heap) - if err != nil { - return fmt.Errorf("failed to create heap file: %v", err) - } - helper.RegisterCleanup(cleanup) - } - if parsedArgs.CPUProfile != "" { - cleanup, err := createCpuprofileFile(parsedArgs.CPUProfile) - if err != nil { - return fmt.Errorf("failed to create CPU profile file: %v", err) - } - helper.RegisterCleanup(cleanup) - } - - return nil -} - -// RunWithExecutionState runs turbo with the ParsedArgsFromRust that is passed from the Rust side. -func RunWithExecutionState(executionState *turbostate.ExecutionState, turboVersion string) int { - util.InitPrintf() - // TODO: replace this with a context - signalWatcher := signals.NewWatcher() - helper := cmdutil.NewHelper(turboVersion, &executionState.CLIArgs) - ctx := context.Background() - - err := initializeOutputFiles(helper, &executionState.CLIArgs) - if err != nil { - fmt.Printf("%v", err) - return 1 - } - defer helper.Cleanup(&executionState.CLIArgs) - - doneCh := make(chan struct{}) - var execErr error - go func() { - command := executionState.CLIArgs.Command - if command.Daemon != nil { - execErr = daemon.ExecuteDaemon(ctx, helper, signalWatcher, executionState) - } else if command.Run != nil { - execErr = run.ExecuteRun(ctx, helper, signalWatcher, executionState) - } else { - execErr = fmt.Errorf("unknown command: %v", command) - } - - close(doneCh) - }() - - // Wait for either our command to finish, in which case we need to clean up, - // or to receive a signal, in which case the signal handler above does the cleanup - select { - case <-doneCh: - // We finished whatever task we were running - signalWatcher.Close() - exitErr := &process.ChildExit{} - if errors.As(execErr, &exitErr) { - return exitErr.ExitCode - } else if execErr != nil { - return 1 - } - return 0 - case <-signalWatcher.Done(): - // We caught a signal, which already called the close handlers - return 1 - } -} - -type profileCleanup func() error - -// Close implements io.Close for profileCleanup -func (pc profileCleanup) Close() error { - return pc() -} - -// To view a CPU trace, use "go tool trace [file]". Note that the trace -// viewer doesn't work under Windows Subsystem for Linux for some reason. -func createTraceFile(traceFile string) (profileCleanup, error) { - f, err := os.Create(traceFile) - if err != nil { - return nil, errors.Wrapf(err, "failed to create trace file: %v", traceFile) - } - if err := trace.Start(f); err != nil { - return nil, errors.Wrap(err, "failed to start tracing") - } - return func() error { - trace.Stop() - return f.Close() - }, nil -} - -// To view a heap trace, use "go tool pprof [file]" and type "top". You can -// also drop it into https://speedscope.app and use the "left heavy" or -// "sandwich" view modes. -func createHeapFile(heapFile string) (profileCleanup, error) { - f, err := os.Create(heapFile) - if err != nil { - return nil, errors.Wrapf(err, "failed to create heap file: %v", heapFile) - } - return func() error { - if err := pprof.WriteHeapProfile(f); err != nil { - // we don't care if we fail to close the file we just failed to write to - _ = f.Close() - return errors.Wrapf(err, "failed to write heap file: %v", heapFile) - } - return f.Close() - }, nil -} - -// To view a CPU profile, drop the file into https://speedscope.app. -// Note: Running the CPU profiler doesn't work under Windows subsystem for -// Linux. The profiler has to be built for native Windows and run using the -// command prompt instead. -func createCpuprofileFile(cpuprofileFile string) (profileCleanup, error) { - f, err := os.Create(cpuprofileFile) - if err != nil { - return nil, errors.Wrapf(err, "failed to create cpuprofile file: %v", cpuprofileFile) - } - if err := pprof.StartCPUProfile(f); err != nil { - return nil, errors.Wrap(err, "failed to start CPU profiling") - } - return func() error { - pprof.StopCPUProfile() - return f.Close() - }, nil -} diff --git a/cli/internal/cmdutil/cmdutil.go b/cli/internal/cmdutil/cmdutil.go deleted file mode 100644 index 60ec15d9f209b..0000000000000 --- a/cli/internal/cmdutil/cmdutil.go +++ /dev/null @@ -1,218 +0,0 @@ -// Package cmdutil holds functionality to run turbo via cobra. That includes flag parsing and configuration -// of components common to all subcommands -package cmdutil - -import ( - "fmt" - "io" - "io/ioutil" - "os" - "sync" - - "github.com/hashicorp/go-hclog" - - "github.com/fatih/color" - "github.com/mitchellh/cli" - "github.com/vercel/turbo/cli/internal/client" - "github.com/vercel/turbo/cli/internal/fs" - "github.com/vercel/turbo/cli/internal/turbopath" - "github.com/vercel/turbo/cli/internal/turbostate" - "github.com/vercel/turbo/cli/internal/ui" -) - -const ( - // _envLogLevel is the environment log level - _envLogLevel = "TURBO_LOG_LEVEL" -) - -// Helper is a struct used to hold configuration values passed via flag, env vars, -// config files, etc. It is not intended for direct use by turbo commands, it drives -// the creation of CmdBase, which is then used by the commands themselves. -type Helper struct { - // TurboVersion is the version of turbo that is currently executing - TurboVersion string - - // for logging - verbosity int - - rawRepoRoot string - - cleanupsMu sync.Mutex - cleanups []io.Closer -} - -// RegisterCleanup saves a function to be run after turbo execution, -// even if the command that runs returns an error -func (h *Helper) RegisterCleanup(cleanup io.Closer) { - h.cleanupsMu.Lock() - defer h.cleanupsMu.Unlock() - h.cleanups = append(h.cleanups, cleanup) -} - -// Cleanup runs the register cleanup handlers. It requires the flags -// to the root command so that it can construct a UI if necessary -func (h *Helper) Cleanup(cliConfig *turbostate.ParsedArgsFromRust) { - h.cleanupsMu.Lock() - defer h.cleanupsMu.Unlock() - var ui cli.Ui - for _, cleanup := range h.cleanups { - if err := cleanup.Close(); err != nil { - if ui == nil { - ui = h.getUI(cliConfig) - } - ui.Warn(fmt.Sprintf("failed cleanup: %v", err)) - } - } -} - -func (h *Helper) getUI(cliArgs *turbostate.ParsedArgsFromRust) cli.Ui { - factory := h.getUIFactory(cliArgs) - return factory.Build(os.Stdout, os.Stdin, os.Stderr) -} - -func (h *Helper) getUIFactory(cliArgs *turbostate.ParsedArgsFromRust) ui.Factory { - colorMode := ui.GetColorModeFromEnv() - if cliArgs.NoColor { - colorMode = ui.ColorModeSuppressed - } - if cliArgs.Color { - colorMode = ui.ColorModeForced - } - return &ui.ColoredUIFactory{ - ColorMode: colorMode, - Base: &ui.BasicUIFactory{}, - } -} - -func (h *Helper) getLogger() (hclog.Logger, error) { - var level hclog.Level - switch h.verbosity { - case 0: - if v := os.Getenv(_envLogLevel); v != "" { - level = hclog.LevelFromString(v) - if level == hclog.NoLevel { - return nil, fmt.Errorf("%s value %q is not a valid log level", _envLogLevel, v) - } - } else { - level = hclog.NoLevel - } - case 1: - level = hclog.Info - case 2: - level = hclog.Debug - case 3: - level = hclog.Trace - default: - level = hclog.Trace - } - // Default output is nowhere unless we enable logging. - output := ioutil.Discard - color := hclog.ColorOff - if level != hclog.NoLevel { - output = os.Stderr - color = hclog.AutoColor - } - - return hclog.New(&hclog.LoggerOptions{ - Name: "turbo", - Level: level, - Color: color, - Output: output, - }), nil -} - -// NewHelper returns a new helper instance to hold configuration values for the root -// turbo command. -func NewHelper(turboVersion string, args *turbostate.ParsedArgsFromRust) *Helper { - return &Helper{ - TurboVersion: turboVersion, - verbosity: args.Verbosity, - } -} - -// GetCmdBase returns a CmdBase instance configured with values from this helper. -// It additionally returns a mechanism to set an error, so -func (h *Helper) GetCmdBase(executionState *turbostate.ExecutionState) (*CmdBase, error) { - // terminal is for color/no-color output - uiFactory := h.getUIFactory(&executionState.CLIArgs) - terminal := uiFactory.Build(os.Stdin, os.Stdout, os.Stderr) - // logger is configured with verbosity level using --verbosity flag from end users - logger, err := h.getLogger() - if err != nil { - return nil, err - } - cwdRaw := executionState.CLIArgs.CWD - if err != nil { - return nil, err - } - cwd, err := fs.GetCwd(cwdRaw) - if err != nil { - return nil, err - } - repoRoot := fs.ResolveUnknownPath(cwd, h.rawRepoRoot) - repoRoot, err = repoRoot.EvalSymlinks() - if err != nil { - return nil, err - } - apiClientConfig := executionState.APIClientConfig - spacesAPIClientConfig := executionState.SpacesAPIClientConfig - - apiClient := client.NewClient( - apiClientConfig, - logger, - h.TurboVersion, - ) - - spacesClient := client.NewClient( - spacesAPIClientConfig, - logger, - h.TurboVersion, - ) - - return &CmdBase{ - UI: terminal, - UIFactory: uiFactory, - Logger: logger, - RepoRoot: repoRoot, - Config: executionState.Config, - APIClient: apiClient, - SpacesAPIClient: spacesClient, - TurboVersion: h.TurboVersion, - }, nil -} - -// CmdBase encompasses configured components common to all turbo commands. -type CmdBase struct { - UI cli.Ui - UIFactory ui.Factory - Logger hclog.Logger - RepoRoot turbopath.AbsoluteSystemPath - Config turbostate.Config - APIClient *client.APIClient - SpacesAPIClient *client.APIClient - TurboVersion string -} - -// LogError prints an error to the UI -func (b *CmdBase) LogError(format string, args ...interface{}) { - err := fmt.Errorf(format, args...) - b.Logger.Error(fmt.Sprintf("error: %v", err)) - b.UI.Error(fmt.Sprintf("%s%s", ui.ERROR_PREFIX, color.RedString(" %v", err))) -} - -// LogWarning logs an error and outputs it to the UI. -func (b *CmdBase) LogWarning(prefix string, err error) { - b.Logger.Warn(prefix, "warning", err) - - if prefix != "" { - prefix = " " + prefix + ": " - } - - b.UI.Warn(fmt.Sprintf("%s%s%s", ui.WARNING_PREFIX, prefix, color.YellowString(" %v", err))) -} - -// LogInfo logs an message and outputs it to the UI. -func (b *CmdBase) LogInfo(msg string) { - b.Logger.Info(msg) - b.UI.Info(fmt.Sprintf("%s%s", ui.InfoPrefix, color.WhiteString(" %v", msg))) -} diff --git a/cli/internal/cmdutil/cmdutil_test.go b/cli/internal/cmdutil/cmdutil_test.go deleted file mode 100644 index bf54d82952b8d..0000000000000 --- a/cli/internal/cmdutil/cmdutil_test.go +++ /dev/null @@ -1,64 +0,0 @@ -package cmdutil - -import ( - "os" - "testing" - "time" - - "github.com/vercel/turbo/cli/internal/turbostate" - "gotest.tools/v3/assert" -) - -func TestRemoteCacheTimeoutFlag(t *testing.T) { - args := turbostate.ParsedArgsFromRust{ - CWD: "", - } - - executionState := turbostate.ExecutionState{ - APIClientConfig: turbostate.APIClientConfig{ - Timeout: 599, - }, - CLIArgs: args, - } - - h := NewHelper("test-version", &args) - - base, err := h.GetCmdBase(&executionState) - if err != nil { - t.Fatalf("failed to get command base %v", err) - } - - assert.Equal(t, base.APIClient.HTTPClient.HTTPClient.Timeout, time.Duration(599)*time.Second) -} - -func TestRemoteCacheTimeoutPrimacy(t *testing.T) { - key := "TURBO_REMOTE_CACHE_TIMEOUT" - value := "2" - - t.Run(key, func(t *testing.T) { - t.Cleanup(func() { - _ = os.Unsetenv(key) - }) - args := turbostate.ParsedArgsFromRust{ - CWD: "", - } - executionState := turbostate.ExecutionState{ - APIClientConfig: turbostate.APIClientConfig{ - Timeout: 1, - }, - CLIArgs: args, - } - h := NewHelper("test-version", &args) - - err := os.Setenv(key, value) - if err != nil { - t.Fatalf("setenv %v", err) - } - - base, err := h.GetCmdBase(&executionState) - if err != nil { - t.Fatalf("failed to get command base %v", err) - } - assert.Equal(t, base.APIClient.HTTPClient.HTTPClient.Timeout, time.Duration(1)*time.Second) - }) -} diff --git a/cli/internal/colorcache/colorcache.go b/cli/internal/colorcache/colorcache.go deleted file mode 100644 index 08a15e83f6f7b..0000000000000 --- a/cli/internal/colorcache/colorcache.go +++ /dev/null @@ -1,56 +0,0 @@ -package colorcache - -import ( - "sync" - - "github.com/vercel/turbo/cli/internal/util" - - "github.com/fatih/color" -) - -type colorFn = func(format string, a ...interface{}) string - -func getTerminalPackageColors() []colorFn { - return []colorFn{color.CyanString, color.MagentaString, color.GreenString, color.YellowString, color.BlueString} -} - -type ColorCache struct { - mu sync.Mutex - index int - TermColors []colorFn - Cache map[interface{}]colorFn -} - -// New creates an instance of ColorCache with helpers for adding colors to task outputs -func New() *ColorCache { - return &ColorCache{ - TermColors: getTerminalPackageColors(), - index: 0, - Cache: make(map[interface{}]colorFn), - } -} - -// colorForKey returns a color function for a given package name -func (c *ColorCache) colorForKey(key string) colorFn { - c.mu.Lock() - defer c.mu.Unlock() - colorFn, ok := c.Cache[key] - if ok { - return colorFn - } - c.index++ - colorFn = c.TermColors[util.PositiveMod(c.index, len(c.TermColors))] // 5 possible colors - c.Cache[key] = colorFn - return colorFn -} - -// PrefixWithColor returns a string consisting of the provided prefix in a consistent -// color based on the cacheKey -func (c *ColorCache) PrefixWithColor(cacheKey string, prefix string) string { - colorFn := c.colorForKey(cacheKey) - if prefix != "" { - return colorFn("%s: ", prefix) - } - - return "" -} diff --git a/cli/internal/context/context.go b/cli/internal/context/context.go deleted file mode 100644 index 9c100c10fc0c6..0000000000000 --- a/cli/internal/context/context.go +++ /dev/null @@ -1,493 +0,0 @@ -package context - -import ( - "fmt" - "path/filepath" - "sort" - "strings" - "sync" - - "github.com/hashicorp/go-multierror" - "github.com/vercel/turbo/cli/internal/core" - "github.com/vercel/turbo/cli/internal/fs" - "github.com/vercel/turbo/cli/internal/lockfile" - "github.com/vercel/turbo/cli/internal/packagemanager" - "github.com/vercel/turbo/cli/internal/turbopath" - "github.com/vercel/turbo/cli/internal/util" - "github.com/vercel/turbo/cli/internal/workspace" - - "github.com/Masterminds/semver" - "github.com/pyr-sh/dag" - "golang.org/x/sync/errgroup" -) - -// Warnings Error type for errors that don't prevent the creation of a functional Context -type Warnings struct { - warns *multierror.Error - mu sync.Mutex -} - -var _ error = (*Warnings)(nil) - -func (w *Warnings) Error() string { - return w.warns.Error() -} - -func (w *Warnings) errorOrNil() error { - if w.warns != nil { - return w - } - return nil -} - -func (w *Warnings) append(err error) { - w.mu.Lock() - defer w.mu.Unlock() - w.warns = multierror.Append(w.warns, err) -} - -// Context of the CLI -type Context struct { - // WorkspaceInfos contains the contents of package.json for every workspace - // TODO(gsoltis): should the RootPackageJSON be included in WorkspaceInfos? - WorkspaceInfos workspace.Catalog - - // WorkspaceNames is all the names of the workspaces - WorkspaceNames []string - - // WorkspaceGraph is a graph of workspace dependencies - // (based on package.json dependencies and devDependencies) - WorkspaceGraph dag.AcyclicGraph - - // RootNode is a sigil identifying the root workspace - RootNode string - - // Lockfile is a struct to read the lockfile based on the package manager - Lockfile lockfile.Lockfile - - // PackageManager is an abstraction for all the info a package manager - // can give us about the repo. - PackageManager *packagemanager.PackageManager - - // Used to arbitrate access to the graph. We parallelise most build operations - // and Go maps aren't natively threadsafe so this is needed. - mutex sync.Mutex -} - -// Splits "npm:^1.2.3" and "github:foo/bar.git" into a protocol part and a version part. -func parseDependencyProtocol(version string) (string, string) { - parts := strings.Split(version, ":") - if len(parts) == 1 { - return "", parts[0] - } - - return parts[0], strings.Join(parts[1:], ":") -} - -func isProtocolExternal(protocol string) bool { - // The npm protocol for yarn by default still uses the workspace package if the workspace - // version is in a compatible semver range. See https://github.com/yarnpkg/berry/discussions/4015 - // For now, we will just assume if the npm protocol is being used and the version matches - // its an internal dependency which matches the existing behavior before this additional - // logic was added. - - // TODO: extend this to support the `enableTransparentWorkspaces` yarn option - return protocol != "" && protocol != "npm" -} - -func isWorkspaceReference(packageVersion string, dependencyVersion string, cwd string, rootpath string) bool { - protocol, dependencyVersion := parseDependencyProtocol(dependencyVersion) - - if protocol == "workspace" { - // TODO: Since support at the moment is non-existent for workspaces that contain multiple - // versions of the same package name, just assume its a match and don't check the range - // for an exact match. - return true - } else if protocol == "file" || protocol == "link" { - abs, err := filepath.Abs(filepath.Join(cwd, dependencyVersion)) - if err != nil { - // Default to internal if we have the package but somehow cannot get the path - // TODO(gsoltis): log this? - return true - } - isWithinRepo, err := fs.DirContainsPath(rootpath, filepath.FromSlash(abs)) - if err != nil { - // Default to internal if we have the package but somehow cannot get the path - // TODO(gsoltis): log this? - return true - } - return isWithinRepo - } else if isProtocolExternal(protocol) { - // Other protocols are assumed to be external references ("github:", etc) - return false - } else if dependencyVersion == "*" { - return true - } - - // If we got this far, then we need to check the workspace package version to see it satisfies - // the dependencies range to determin whether or not its an internal or external dependency. - - constraint, constraintErr := semver.NewConstraint(dependencyVersion) - pkgVersion, packageVersionErr := semver.NewVersion(packageVersion) - if constraintErr != nil || packageVersionErr != nil { - // For backwards compatibility with existing behavior, if we can't parse the version then we - // treat the dependency as an internal package reference and swallow the error. - - // TODO: some package managers also support tags like "latest". Does extra handling need to be - // added for this corner-case - return true - } - - return constraint.Check(pkgVersion) -} - -// SinglePackageGraph constructs a Context instance from a single package. -func SinglePackageGraph(rootPackageJSON *fs.PackageJSON, packageManagerName string) (*Context, error) { - workspaceInfos := workspace.Catalog{ - PackageJSONs: map[string]*fs.PackageJSON{util.RootPkgName: rootPackageJSON}, - TurboConfigs: map[string]*fs.TurboJSON{}, - } - c := &Context{ - WorkspaceInfos: workspaceInfos, - RootNode: core.ROOT_NODE_NAME, - } - c.WorkspaceGraph.Connect(dag.BasicEdge(util.RootPkgName, core.ROOT_NODE_NAME)) - packageManager, err := packagemanager.GetPackageManager(packageManagerName) - if err != nil { - return nil, err - } - c.PackageManager = packageManager - return c, nil -} - -// BuildPackageGraph constructs a Context instance with information about the package dependency graph -func BuildPackageGraph(repoRoot turbopath.AbsoluteSystemPath, rootPackageJSON *fs.PackageJSON, packageManagerName string) (*Context, error) { - c := &Context{} - rootpath := repoRoot.ToStringDuringMigration() - c.WorkspaceInfos = workspace.Catalog{ - PackageJSONs: map[string]*fs.PackageJSON{}, - TurboConfigs: map[string]*fs.TurboJSON{}, - } - c.RootNode = core.ROOT_NODE_NAME - - var warnings Warnings - - packageManager, err := packagemanager.GetPackageManager(packageManagerName) - if err != nil { - return nil, err - } - c.PackageManager = packageManager - - if err := c.resolveWorkspaceRootDeps(rootPackageJSON, &warnings); err != nil { - // TODO(Gaspar) was this the intended return error? - return nil, fmt.Errorf("could not resolve workspaces: %w", err) - } - - // Get the workspaces from the package manager. - // workspaces are absolute paths - workspaces, err := c.PackageManager.GetWorkspaces(repoRoot) - - if err != nil { - return nil, fmt.Errorf("workspace configuration error: %w", err) - } - - // We will parse all package.json's simultaneously. We use a - // wait group because we cannot fully populate the graph (the next step) - // until all parsing is complete - parseJSONWaitGroup := &errgroup.Group{} - for _, workspace := range workspaces { - pkgJSONPath := fs.UnsafeToAbsoluteSystemPath(workspace) - parseJSONWaitGroup.Go(func() error { - return c.parsePackageJSON(repoRoot, pkgJSONPath) - }) - } - - if err := parseJSONWaitGroup.Wait(); err != nil { - return nil, err - } - populateGraphWaitGroup := &errgroup.Group{} - for _, pkg := range c.WorkspaceInfos.PackageJSONs { - pkg := pkg - populateGraphWaitGroup.Go(func() error { - return c.populateWorkspaceGraphForPackageJSON(pkg, rootpath, pkg.Name, &warnings) - }) - } - - if err := populateGraphWaitGroup.Wait(); err != nil { - return nil, err - } - // Resolve dependencies for the root package. We override the vertexName in the graph - // for the root package, since it can have an arbitrary name. We need it to have our - // RootPkgName so that we can identify it as the root later on. - err = c.populateWorkspaceGraphForPackageJSON(rootPackageJSON, rootpath, util.RootPkgName, &warnings) - if err != nil { - return nil, fmt.Errorf("failed to resolve dependencies for root package: %v", err) - } - c.WorkspaceInfos.PackageJSONs[util.RootPkgName] = rootPackageJSON - - if err := c.populateExternalDeps(repoRoot, rootPackageJSON, &warnings); err != nil { - return nil, err - } - - return c, warnings.errorOrNil() -} - -func (c *Context) resolveWorkspaceRootDeps(rootPackageJSON *fs.PackageJSON, warnings *Warnings) error { - pkg := rootPackageJSON - pkg.UnresolvedExternalDeps = make(map[string]string) - for dep, version := range pkg.DevDependencies { - pkg.UnresolvedExternalDeps[dep] = version - } - for dep, version := range pkg.OptionalDependencies { - pkg.UnresolvedExternalDeps[dep] = version - } - for dep, version := range pkg.Dependencies { - pkg.UnresolvedExternalDeps[dep] = version - } - return nil -} - -type dependencySplitter struct { - workspaces map[string]*fs.PackageJSON - pkgDir string - rootPath string -} - -func (d *dependencySplitter) isInternal(name, version string) (string, bool) { - resolvedName := name - if withoutProtocol := strings.TrimPrefix(version, "workspace:"); withoutProtocol != version { - parts := strings.Split(withoutProtocol, "@") - lastIndex := len(parts) - 1 - if len(parts) > 1 && (parts[lastIndex] == "*" || parts[lastIndex] == "^" || parts[lastIndex] == "~") { - resolvedName = strings.Join(parts[:lastIndex], "@") - } - } - item, ok := d.workspaces[resolvedName] - if ok && isWorkspaceReference(item.Version, version, d.pkgDir, d.rootPath) { - return resolvedName, true - } - return "", false -} - -// populateWorkspaceGraphForPackageJSON fills in the edges for the dependencies of the given package -// that are within the monorepo, as well as collecting and hashing the dependencies of the package -// that are not within the monorepo. The vertexName is used to override the package name in the graph. -// This can happen when adding the root package, which can have an arbitrary name. -func (c *Context) populateWorkspaceGraphForPackageJSON(pkg *fs.PackageJSON, rootpath string, vertexName string, warnings *Warnings) error { - c.mutex.Lock() - defer c.mutex.Unlock() - depMap := make(map[string]string) - internalDepsSet := make(dag.Set) - externalUnresolvedDepsSet := make(dag.Set) - pkg.UnresolvedExternalDeps = make(map[string]string) - - for dep, version := range pkg.DevDependencies { - depMap[dep] = version - } - - for dep, version := range pkg.OptionalDependencies { - depMap[dep] = version - } - - for dep, version := range pkg.Dependencies { - depMap[dep] = version - } - - splitter := dependencySplitter{ - workspaces: c.WorkspaceInfos.PackageJSONs, - pkgDir: pkg.Dir.ToStringDuringMigration(), - rootPath: rootpath, - } - - // split out internal vs. external deps - for depName, depVersion := range depMap { - if name, internal := splitter.isInternal(depName, depVersion); internal { - internalDepsSet.Add(name) - c.WorkspaceGraph.Connect(dag.BasicEdge(vertexName, name)) - } else { - externalUnresolvedDepsSet.Add(depName) - } - } - - for _, name := range externalUnresolvedDepsSet.List() { - name := name.(string) - if item, ok := pkg.DevDependencies[name]; ok { - pkg.UnresolvedExternalDeps[name] = item - } - - if item, ok := pkg.OptionalDependencies[name]; ok { - pkg.UnresolvedExternalDeps[name] = item - } - - if item, ok := pkg.Dependencies[name]; ok { - pkg.UnresolvedExternalDeps[name] = item - } - } - - // when there are no internal dependencies, we need to still add these leafs to the graph - if internalDepsSet.Len() == 0 { - c.WorkspaceGraph.Connect(dag.BasicEdge(pkg.Name, core.ROOT_NODE_NAME)) - } - - pkg.InternalDeps = make([]string, 0, internalDepsSet.Len()) - for _, v := range internalDepsSet.List() { - pkg.InternalDeps = append(pkg.InternalDeps, fmt.Sprintf("%v", v)) - } - - sort.Strings(pkg.InternalDeps) - - return nil -} - -func (c *Context) parsePackageJSON(repoRoot turbopath.AbsoluteSystemPath, pkgJSONPath turbopath.AbsoluteSystemPath) error { - c.mutex.Lock() - defer c.mutex.Unlock() - - if pkgJSONPath.FileExists() { - pkg, err := fs.ReadPackageJSON(pkgJSONPath) - if err != nil { - return fmt.Errorf("parsing %s: %w", pkgJSONPath, err) - } - - relativePkgJSONPath, err := repoRoot.PathTo(pkgJSONPath) - if err != nil { - return err - } - c.WorkspaceGraph.Add(pkg.Name) - pkg.PackageJSONPath = turbopath.AnchoredSystemPathFromUpstream(relativePkgJSONPath) - pkg.Dir = turbopath.AnchoredSystemPathFromUpstream(filepath.Dir(relativePkgJSONPath)) - if c.WorkspaceInfos.PackageJSONs[pkg.Name] != nil { - existing := c.WorkspaceInfos.PackageJSONs[pkg.Name] - return fmt.Errorf("Failed to add workspace \"%s\" from %s, it already exists at %s", pkg.Name, pkg.Dir, existing.Dir) - } - c.WorkspaceInfos.PackageJSONs[pkg.Name] = pkg - c.WorkspaceNames = append(c.WorkspaceNames, pkg.Name) - } - return nil -} - -func (c *Context) externalWorkspaceDeps() map[turbopath.AnchoredUnixPath]map[string]string { - workspaces := make(map[turbopath.AnchoredUnixPath]map[string]string, len(c.WorkspaceInfos.PackageJSONs)) - for _, pkg := range c.WorkspaceInfos.PackageJSONs { - workspaces[pkg.Dir.ToUnixPath()] = pkg.UnresolvedExternalDeps - } - return workspaces -} - -func (c *Context) populateExternalDeps(repoRoot turbopath.AbsoluteSystemPath, rootPackageJSON *fs.PackageJSON, warnings *Warnings) error { - if lockFile, err := c.PackageManager.ReadLockfile(repoRoot, rootPackageJSON); err != nil { - warnings.append(err) - rootPackageJSON.TransitiveDeps = nil - rootPackageJSON.ExternalDepsHash = "" - } else { - c.Lockfile = lockFile - if closures, err := lockfile.AllTransitiveClosures(c.externalWorkspaceDeps(), c.Lockfile); err != nil { - warnings.append(err) - } else { - for _, pkg := range c.WorkspaceInfos.PackageJSONs { - if closure, ok := closures[pkg.Dir.ToUnixPath()]; ok { - if err := pkg.SetExternalDeps(closure); err != nil { - return err - } - } else { - return fmt.Errorf("Unable to calculate closure for workspace %s", pkg.Dir.ToString()) - } - } - } - } - - return nil -} - -// InternalDependencies finds all dependencies required by the slice of starting -// packages, as well as the starting packages themselves. -func (c *Context) InternalDependencies(start []string) ([]string, error) { - vertices := make(dag.Set) - for _, v := range start { - vertices.Add(v) - } - s := make(dag.Set) - memoFunc := func(v dag.Vertex, d int) error { - s.Add(v) - return nil - } - - if err := c.WorkspaceGraph.DepthFirstWalk(vertices, memoFunc); err != nil { - return nil, err - } - - // Use for loop so we can coerce to string - // .List() returns a list of interface{} types, but - // we know they are strings. - targets := make([]string, 0, s.Len()) - for _, dep := range s.List() { - targets = append(targets, dep.(string)) - } - sort.Strings(targets) - - return targets, nil -} - -// ChangedPackages returns a list of changed packages based on the contents of a previous lockfile -// This assumes that none of the package.json in the workspace change, it is -// the responsibility of the caller to verify this. -func (c *Context) ChangedPackages(previousLockfile lockfile.Lockfile) ([]string, error) { - if lockfile.IsNil(previousLockfile) || lockfile.IsNil(c.Lockfile) { - return nil, fmt.Errorf("Cannot detect changed packages without previous and current lockfile") - } - - closures, err := lockfile.AllTransitiveClosures(c.externalWorkspaceDeps(), previousLockfile) - if err != nil { - return nil, err - } - - didPackageChange := func(pkgName string, pkg *fs.PackageJSON) bool { - previousDeps, ok := closures[pkg.Dir.ToUnixPath()] - if !ok || previousDeps.Cardinality() != len(pkg.TransitiveDeps) { - return true - } - - prevExternalDeps := make([]lockfile.Package, 0, previousDeps.Cardinality()) - for _, d := range previousDeps.ToSlice() { - prevExternalDeps = append(prevExternalDeps, d.(lockfile.Package)) - } - sort.Sort(lockfile.ByKey(prevExternalDeps)) - - for i := range prevExternalDeps { - if prevExternalDeps[i] != pkg.TransitiveDeps[i] { - return true - } - } - return false - } - - changedPkgs := make([]string, 0, len(c.WorkspaceInfos.PackageJSONs)) - - // check if prev and current have "global" changes e.g. lockfile bump - globalChange := c.Lockfile.GlobalChange(previousLockfile) - - for pkgName, pkg := range c.WorkspaceInfos.PackageJSONs { - if globalChange { - break - } - if didPackageChange(pkgName, pkg) { - if pkgName == util.RootPkgName { - globalChange = true - } else { - changedPkgs = append(changedPkgs, pkgName) - } - } - } - - if globalChange { - changedPkgs = make([]string, 0, len(c.WorkspaceInfos.PackageJSONs)) - for pkgName := range c.WorkspaceInfos.PackageJSONs { - changedPkgs = append(changedPkgs, pkgName) - } - sort.Strings(changedPkgs) - return changedPkgs, nil - } - - sort.Strings(changedPkgs) - return changedPkgs, nil -} diff --git a/cli/internal/context/context_test.go b/cli/internal/context/context_test.go deleted file mode 100644 index d6191fe86ec3a..0000000000000 --- a/cli/internal/context/context_test.go +++ /dev/null @@ -1,250 +0,0 @@ -package context - -import ( - "errors" - "os" - "path/filepath" - "regexp" - "sync" - "testing" - - testifyAssert "github.com/stretchr/testify/assert" - "github.com/vercel/turbo/cli/internal/fs" - "github.com/vercel/turbo/cli/internal/lockfile" - "github.com/vercel/turbo/cli/internal/packagemanager" - "github.com/vercel/turbo/cli/internal/turbopath" - "github.com/vercel/turbo/cli/internal/workspace" - "gotest.tools/v3/assert" -) - -func Test_isInternal(t *testing.T) { - rootpath, err := filepath.Abs(filepath.FromSlash("/some/repo")) - if err != nil { - t.Fatalf("failed to create absolute root path %v", err) - } - pkgDir, err := filepath.Abs(filepath.FromSlash("/some/repo/packages/libA")) - if err != nil { - t.Fatalf("failed to create absolute pkgDir %v", err) - } - tests := []struct { - name string - packageVersion string - dependencyVersion string - depName string - want bool - wantDepName string - }{ - { - name: "handles exact match", - packageVersion: "1.2.3", - dependencyVersion: "1.2.3", - want: true, - wantDepName: "@scope/foo", - }, - { - name: "handles semver range satisfied", - packageVersion: "1.2.3", - dependencyVersion: "^1.0.0", - want: true, - wantDepName: "@scope/foo", - }, - { - name: "handles semver range not-satisfied", - packageVersion: "2.3.4", - dependencyVersion: "^1.0.0", - want: false, - }, - { - name: "handles workspace protocol with version", - packageVersion: "1.2.3", - dependencyVersion: "workspace:1.2.3", - want: true, - wantDepName: "@scope/foo", - }, - { - name: "handles workspace protocol with relative path", - packageVersion: "1.2.3", - dependencyVersion: "workspace:../other-package/", - want: true, - wantDepName: "@scope/foo", - }, - { - name: "handles workspace protocol with relative path", - packageVersion: "1.2.3", - dependencyVersion: "workspace:../@scope/foo", - want: true, - wantDepName: "@scope/foo", - }, - { - name: "handles npm protocol with satisfied semver range", - packageVersion: "1.2.3", - dependencyVersion: "npm:^1.2.3", - want: true, // default in yarn is to use the workspace version unless `enableTransparentWorkspaces: true`. This isn't currently being checked. - wantDepName: "@scope/foo", - }, - { - name: "handles npm protocol with non-satisfied semver range", - packageVersion: "2.3.4", - dependencyVersion: "npm:^1.2.3", - want: false, - }, - { - name: "handles pre-release versions", - packageVersion: "1.2.3", - dependencyVersion: "1.2.2-alpha-1234abcd.0", - want: false, - }, - { - name: "handles non-semver package version", - packageVersion: "sometag", - dependencyVersion: "1.2.3", - want: true, // for backwards compatability with the code before versions were verified - wantDepName: "@scope/foo", - }, - { - name: "handles non-semver package version", - packageVersion: "1.2.3", - dependencyVersion: "sometag", - want: true, // for backwards compatability with the code before versions were verified - wantDepName: "@scope/foo", - }, - { - name: "handles file:... inside repo", - packageVersion: "1.2.3", - dependencyVersion: "file:../libB", - want: true, // this is a sibling package - wantDepName: "@scope/foo", - }, - { - name: "handles file:... outside repo", - packageVersion: "1.2.3", - dependencyVersion: "file:../../../otherproject", - want: false, // this is not within the repo root - }, - { - name: "handles link:... inside repo", - packageVersion: "1.2.3", - dependencyVersion: "link:../libB", - want: true, // this is a sibling package - wantDepName: "@scope/foo", - }, - { - name: "handles link:... outside repo", - packageVersion: "1.2.3", - dependencyVersion: "link:../../../otherproject", - want: false, // this is not within the repo root - }, - { - name: "handles development versions", - packageVersion: "0.0.0-development", - dependencyVersion: "*", - want: true, // "*" should always match - wantDepName: "@scope/foo", - }, - { - name: "handles pnpm alias star", - packageVersion: "1.2.3", - depName: "foo", - dependencyVersion: "workspace:@scope/foo@*", - want: true, - wantDepName: "@scope/foo", - }, - { - name: "handles pnpm alias tilda", - packageVersion: "1.2.3", - depName: "foo", - dependencyVersion: "workspace:@scope/foo@~", - want: true, - wantDepName: "@scope/foo", - }, - { - name: "handles pnpm alias caret", - packageVersion: "1.2.3", - depName: "foo", - dependencyVersion: "workspace:@scope/foo@^", - want: true, - wantDepName: "@scope/foo", - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - splitter := dependencySplitter{ - workspaces: map[string]*fs.PackageJSON{"@scope/foo": {Version: tt.packageVersion}}, - pkgDir: pkgDir, - rootPath: rootpath, - } - depName := tt.depName - if depName == "" { - depName = "@scope/foo" - } - name, got := splitter.isInternal(depName, tt.dependencyVersion) - assert.Equal(t, got, tt.want, tt.name) - assert.Equal(t, name, tt.wantDepName, tt.name) - }) - } -} - -func TestBuildPackageGraph_DuplicateNames(t *testing.T) { - path := getTestDir(t, "dupe-workspace-names") - pkgJSON := &fs.PackageJSON{ - Name: "dupe-workspace-names", - PackageManager: "pnpm@7.15.0", - } - - _, actualErr := BuildPackageGraph(path, pkgJSON, "pnpm") - - // Not asserting the full error message, because it includes a path with slashes and backslashes - // getting the regex incantation to check that is not worth it. - // We have to use regex because the actual error may be different depending on which workspace was - // added first and which one was second, causing the error. - testifyAssert.Regexp(t, regexp.MustCompile("^Failed to add workspace \"same-name\".+$"), actualErr) -} - -func Test_populateExternalDeps_NoTransitiveDepsWithoutLockfile(t *testing.T) { - path := getTestDir(t, "dupe-workspace-names") - pkgJSON := &fs.PackageJSON{ - Name: "dupe-workspace-names", - PackageManager: "pnpm@7.15.0", - } - - pm, err := packagemanager.GetPackageManager("pnpm") - assert.NilError(t, err) - pm.UnmarshalLockfile = func(rootPackageJSON *fs.PackageJSON, contents []byte) (lockfile.Lockfile, error) { - return nil, errors.New("bad lockfile") - } - context := Context{ - WorkspaceInfos: workspace.Catalog{ - PackageJSONs: map[string]*fs.PackageJSON{ - "a": {}, - }, - }, - WorkspaceNames: []string{}, - PackageManager: pm, - mutex: sync.Mutex{}, - } - var warnings Warnings - err = context.populateExternalDeps(path, pkgJSON, &warnings) - assert.NilError(t, err) - - assert.DeepEqual(t, pkgJSON.ExternalDepsHash, "") - assert.DeepEqual(t, context.WorkspaceInfos.PackageJSONs["a"].ExternalDepsHash, "") - assert.Assert(t, warnings.errorOrNil() != nil) -} - -// This is duplicated from fs.turbo_json_test.go. -// I wasn't able to pull it into a helper file/package because -// it requires the `fs` package and it would cause cyclical dependencies -// when used in turbo_json_test.go and would require more changes to fix that. -func getTestDir(t *testing.T, testName string) turbopath.AbsoluteSystemPath { - defaultCwd, err := os.Getwd() - if err != nil { - t.Errorf("failed to get cwd: %v", err) - } - cwd, err := fs.CheckedToAbsoluteSystemPath(defaultCwd) - if err != nil { - t.Fatalf("cwd is not an absolute directory %v: %v", defaultCwd, err) - } - - return cwd.UntypedJoin("testdata", testName) -} diff --git a/cli/internal/context/testdata/dupe-workspace-names/apps/a/package.json b/cli/internal/context/testdata/dupe-workspace-names/apps/a/package.json deleted file mode 100644 index 94301a36730bc..0000000000000 --- a/cli/internal/context/testdata/dupe-workspace-names/apps/a/package.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "name": "same-name", - "dependencies": { - "ui": "workspace:*" - } -} diff --git a/cli/internal/context/testdata/dupe-workspace-names/apps/b/package.json b/cli/internal/context/testdata/dupe-workspace-names/apps/b/package.json deleted file mode 100644 index 94301a36730bc..0000000000000 --- a/cli/internal/context/testdata/dupe-workspace-names/apps/b/package.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "name": "same-name", - "dependencies": { - "ui": "workspace:*" - } -} diff --git a/cli/internal/context/testdata/dupe-workspace-names/package.json b/cli/internal/context/testdata/dupe-workspace-names/package.json deleted file mode 100644 index 3bf7403d277f1..0000000000000 --- a/cli/internal/context/testdata/dupe-workspace-names/package.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "name": "dupe-workspace-names", - "workspaces": [ - "apps/*" - ], - "packageManager": "pnpm@7.15.0" -} diff --git a/cli/internal/context/testdata/dupe-workspace-names/packages/ui/package.json b/cli/internal/context/testdata/dupe-workspace-names/packages/ui/package.json deleted file mode 100644 index 1cd75b539ecc2..0000000000000 --- a/cli/internal/context/testdata/dupe-workspace-names/packages/ui/package.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "name": "ui" -} diff --git a/cli/internal/context/testdata/dupe-workspace-names/pnpm-lock.yaml b/cli/internal/context/testdata/dupe-workspace-names/pnpm-lock.yaml deleted file mode 100644 index 0909cde8effa7..0000000000000 --- a/cli/internal/context/testdata/dupe-workspace-names/pnpm-lock.yaml +++ /dev/null @@ -1,21 +0,0 @@ -lockfileVersion: 5.4 - -importers: - - .: - specifiers: {} - - apps/a: - specifiers: - ui: workspace:* - dependencies: - ui: link:../../packages/ui - - apps/b: - specifiers: - ui: workspace:* - dependencies: - ui: link:../../packages/ui - - packages/ui: - specifiers: {} diff --git a/cli/internal/context/testdata/dupe-workspace-names/pnpm-workspace.yaml b/cli/internal/context/testdata/dupe-workspace-names/pnpm-workspace.yaml deleted file mode 100644 index 3ff5faaaf5f13..0000000000000 --- a/cli/internal/context/testdata/dupe-workspace-names/pnpm-workspace.yaml +++ /dev/null @@ -1,3 +0,0 @@ -packages: - - "apps/*" - - "packages/*" diff --git a/cli/internal/core/engine.go b/cli/internal/core/engine.go deleted file mode 100644 index d176ba83fad1f..0000000000000 --- a/cli/internal/core/engine.go +++ /dev/null @@ -1,645 +0,0 @@ -package core - -import ( - "errors" - "fmt" - "os" - "sort" - "strings" - "sync" - "sync/atomic" - - "github.com/vercel/turbo/cli/internal/fs" - "github.com/vercel/turbo/cli/internal/graph" - "github.com/vercel/turbo/cli/internal/util" - - "github.com/pyr-sh/dag" -) - -const ROOT_NODE_NAME = "___ROOT___" - -// Task is a higher level struct that contains the underlying TaskDefinition -// but also some adjustments to it, based on business logic. -type Task struct { - Name string - // TaskDefinition contains the config for the task from turbo.json - TaskDefinition fs.TaskDefinition -} - -type Visitor = func(taskID string) error - -// Engine contains both the DAG for the packages and the tasks and implements the methods to execute tasks in them -type Engine struct { - // TaskGraph is a graph of package-tasks - TaskGraph *dag.AcyclicGraph - PackageTaskDeps map[string][]string - rootEnabledTasks util.Set - - // completeGraph is the CompleteGraph. We need this to look up the Pipeline, etc. - completeGraph *graph.CompleteGraph - // isSinglePackage is used to load turbo.json correctly - isSinglePackage bool -} - -// NewEngine creates a new engine given a topologic graph of workspace package names -func NewEngine( - completeGraph *graph.CompleteGraph, - isSinglePackage bool, -) *Engine { - return &Engine{ - completeGraph: completeGraph, - TaskGraph: &dag.AcyclicGraph{}, - PackageTaskDeps: map[string][]string{}, - rootEnabledTasks: make(util.Set), - isSinglePackage: isSinglePackage, - } -} - -// EngineBuildingOptions help construct the TaskGraph -type EngineBuildingOptions struct { - // Packages in the execution scope, if nil, all packages will be considered in scope - Packages []string - // TaskNames in the execution scope, if nil, all tasks will be executed - TaskNames []string - // Restrict execution to only the listed task names - TasksOnly bool -} - -// EngineExecutionOptions controls a single walk of the task graph -type EngineExecutionOptions struct { - // Parallel is whether to run tasks in parallel - Parallel bool - // Concurrency is the number of concurrent tasks that can be executed - Concurrency int -} - -// StopExecutionSentinel is used to return an error from a graph Walk that indicates that -// all further walking should stop. -type StopExecutionSentinel struct { - err error -} - -// StopExecution wraps the given error in a sentinel error indicating that -// graph traversal should stop. Note that this will stop all tasks, not just -// downstream tasks. -func StopExecution(reason error) *StopExecutionSentinel { - return &StopExecutionSentinel{ - err: reason, - } -} - -// Error implements error.Error for StopExecutionSentinel -func (se *StopExecutionSentinel) Error() string { - return fmt.Sprintf("Execution stopped due to error: %v", se.err) -} - -// Execute executes the pipeline, constructing an internal task graph and walking it accordingly. -func (e *Engine) Execute(visitor Visitor, opts EngineExecutionOptions) []error { - var sema = util.NewSemaphore(opts.Concurrency) - var errored int32 - - // The dag library's behavior is that returning an error from the Walk callback cancels downstream - // tasks, but not unrelated tasks. - // The behavior we want is to either cancel everything or nothing (--continue). So, we do our own - // error handling. Collect any errors that occur in "errors", and report them as the result of - // Execute. panic on any other error returned by Walk. - var errorMu sync.Mutex - var errors []error - recordErr := func(err error) { - errorMu.Lock() - defer errorMu.Unlock() - errors = append(errors, err) - } - unusedErrs := e.TaskGraph.Walk(func(v dag.Vertex) error { - // Use an extra func() to ensure that we are not returning any errors to Walk - func() { - // If something has already errored, short-circuit. - // There is a race here between concurrent tasks. However, if there is not a - // dependency edge between them, we are not required to have a strict order - // between them, so a failed task can fail to short-circuit a concurrent - // task that happened to be starting at the same time. - if atomic.LoadInt32(&errored) != 0 { - return - } - // Each vertex in the graph is a taskID (package#task format) - taskID := dag.VertexName(v) - - // Always return if it is the root node - if strings.Contains(taskID, ROOT_NODE_NAME) { - return - } - - // Acquire the semaphore unless parallel - if !opts.Parallel { - sema.Acquire() - defer sema.Release() - } - - if err := visitor(taskID); err != nil { - if se, ok := err.(*StopExecutionSentinel); ok { - // We only ever flip from false to true, so we don't need to compare and swap the atomic - atomic.StoreInt32(&errored, 1) - recordErr(se.err) - // Note: returning an error here would cancel execution of downstream tasks only, and show - // up in the errors returned from Walk. However, we are doing our own error collection - // and intentionally ignoring errors from walk, so fallthrough and use the "errored" mechanism - // to skip downstream tasks - } else { - recordErr(err) - } - } - }() - return nil - }) - if len(unusedErrs) > 0 { - panic("we should be handling execution errors via our own errors + errored mechanism") - } - return errors -} - -// MissingTaskError is a specialized Error thrown in the case that we can't find a task. -// We want to allow this error when getting task definitions, so we have to special case it. -type MissingTaskError struct { - workspaceName string - taskID string - taskName string -} - -func (m *MissingTaskError) Error() string { - return fmt.Sprintf("Could not find \"%s\" or \"%s\" in workspace \"%s\"", m.taskName, m.taskID, m.workspaceName) -} - -func (e *Engine) getTaskDefinition(pkg string, taskName string, taskID string) (*Task, error) { - pipeline, err := e.completeGraph.GetPipelineFromWorkspace(pkg, e.isSinglePackage) - - if err != nil { - if pkg != util.RootPkgName { - // If there was no turbo.json in the workspace, fallback to the root turbo.json - if errors.Is(err, os.ErrNotExist) { - return e.getTaskDefinition(util.RootPkgName, taskName, taskID) - } - - // otherwise bubble it up - return nil, err - } - - return nil, err - } - - if task, ok := pipeline[taskID]; ok { - return &Task{ - Name: taskName, - TaskDefinition: task.GetTaskDefinition(), - }, nil - } - - if task, ok := pipeline[taskName]; ok { - return &Task{ - Name: taskName, - TaskDefinition: task.GetTaskDefinition(), - }, nil - } - - // An error here means turbo.json exists, but didn't define the task. - // Fallback to the root pipeline to find the task. - if pkg != util.RootPkgName { - return e.getTaskDefinition(util.RootPkgName, taskName, taskID) - } - - // Return this as a custom type so we can ignore it specifically - return nil, &MissingTaskError{ - taskName: taskName, - taskID: taskID, - workspaceName: pkg, - } -} - -// Prepare constructs the Task Graph for a list of packages and tasks -func (e *Engine) Prepare(options *EngineBuildingOptions) error { - pkgs := options.Packages - taskNames := options.TaskNames - tasksOnly := options.TasksOnly - - // If there are no affected packages, we don't need to go through all this work - // we can just exit early. - // TODO(mehulkar): but we still need to validate bad task names? - if len(pkgs) == 0 { - return nil - } - - traversalQueue := []string{} - - // get a set of taskNames passed in. we'll remove the ones that have a definition - missing := util.SetFromStrings(taskNames) - - // Get a list of entry points into our TaskGraph. - // We do this by taking the input taskNames, and pkgs - // and creating a queue of taskIDs that we can traverse and gather dependencies from. - for _, pkg := range pkgs { - for _, taskName := range taskNames { - taskID := util.GetTaskId(pkg, taskName) - - // Look up the task in the package - foundTask, err := e.getTaskDefinition(pkg, taskName, taskID) - - // We can skip MissingTaskErrors because we'll validate against them later - // Return all other errors - if err != nil { - var e *MissingTaskError - if errors.As(err, &e) { - // Initially, non-package tasks are not required to exist, as long as some - // package in the list packages defines it as a package-task. Dependencies - // *are* required to have a definition. - continue - } - - return err - } - - // If we found a task definition, remove it from the missing list - if foundTask != nil { - // delete taskName if it was found - missing.Delete(taskName) - - // Even if a task definition was found, we _only_ want to add it as an entry point to - // the task graph (i.e. the traversalQueue), if it's: - // - A task from the non-root workspace (i.e. tasks from every other workspace) - // - A task that we *know* is rootEnabled task (in which case, the root workspace is acceptable) - isRootPkg := pkg == util.RootPkgName - if !isRootPkg || e.rootEnabledTasks.Includes(taskName) { - traversalQueue = append(traversalQueue, taskID) - } - } - } - } - - visited := make(util.Set) - - // validate that all tasks passed were found - missingList := missing.UnsafeListOfStrings() - sort.Strings(missingList) - - if len(missingList) > 0 { - return fmt.Errorf("Could not find the following tasks in project: %s", strings.Join(missingList, ", ")) - } - - // Things get appended to traversalQueue inside this loop, so we use the len() check instead of range. - for len(traversalQueue) > 0 { - // pop off the first item from the traversalQueue - taskID := traversalQueue[0] - traversalQueue = traversalQueue[1:] - - pkg, taskName := util.GetPackageTaskFromId(taskID) - - if pkg == util.RootPkgName && !e.rootEnabledTasks.Includes(taskName) { - return fmt.Errorf("%v needs an entry in turbo.json before it can be depended on because it is a task run from the root package", taskID) - } - - if pkg != ROOT_NODE_NAME { - if _, ok := e.completeGraph.WorkspaceInfos.PackageJSONs[pkg]; !ok { - // If we have a pkg it should be in WorkspaceInfos. - // If we're hitting this error something has gone wrong earlier when building WorkspaceInfos - // or the workspace really doesn't exist and turbo.json is misconfigured. - return fmt.Errorf("Could not find workspace \"%s\" from task \"%s\" in project", pkg, taskID) - } - } - - taskDefinitions, err := e.getTaskDefinitionChain(taskID, taskName) - if err != nil { - return err - } - - taskDefinition, err := fs.MergeTaskDefinitions(taskDefinitions) - if err != nil { - return err - } - - // Skip this iteration of the loop if we've already seen this taskID - if visited.Includes(taskID) { - continue - } - - visited.Add(taskID) - - // Put this taskDefinition into the Graph so we can look it up later during execution. - e.completeGraph.TaskDefinitions[taskID] = taskDefinition - - topoDeps := util.SetFromStrings(taskDefinition.TopologicalDependencies) - deps := make(util.Set) - isPackageTask := util.IsPackageTask(taskName) - - for _, dependency := range taskDefinition.TaskDependencies { - // If the current task is a workspace-specific task (including root Task) - // and its dependency is _also_ a workspace-specific task, we need to add - // a reference to this dependency directly into the engine. - // TODO @mehulkar: Why do we need this? - if isPackageTask && util.IsPackageTask(dependency) { - if err := e.AddDep(dependency, taskName); err != nil { - return err - } - } else { - // For non-workspace-specific dependencies, we attach a reference to - // the task that is added into the engine. - deps.Add(dependency) - } - } - - // Filter down the tasks if there's a filter in place - // https: //turbo.build/repo/docs/reference/command-line-reference/run#--only - if tasksOnly { - deps = deps.Filter(func(d interface{}) bool { - for _, target := range taskNames { - return fmt.Sprintf("%v", d) == target - } - return false - }) - topoDeps = topoDeps.Filter(func(d interface{}) bool { - for _, target := range taskNames { - return fmt.Sprintf("%v", d) == target - } - return false - }) - } - - toTaskID := taskID - - // hasTopoDeps will be true if the task depends on any tasks from dependency packages - // E.g. `dev: { dependsOn: [^dev] }` - nonRootDepPkgs := e.completeGraph.WorkspaceGraph.DownEdges(pkg).Filter(func(node interface{}) bool { - if packageName, ok := node.(string); ok { - return packageName != ROOT_NODE_NAME - } - return true - }) - hasTopoDeps := topoDeps.Len() > 0 && nonRootDepPkgs.Len() > 0 - - // hasDeps will be true if the task depends on any tasks from its own package - // E.g. `build: { dependsOn: [dev] }` - hasDeps := deps.Len() > 0 - - // hasPackageTaskDeps will be true if this is a workspace-specific task, and - // it depends on another workspace-specific tasks - // E.g. `my-package#build: { dependsOn: [my-package#beforebuild] }`. - hasPackageTaskDeps := false - if _, ok := e.PackageTaskDeps[toTaskID]; ok { - hasPackageTaskDeps = true - } - - if hasTopoDeps { - for _, from := range topoDeps.UnsafeListOfStrings() { - // add task dep from all the package deps within repo - for depPkg := range nonRootDepPkgs { - fromTaskID := util.GetTaskId(depPkg, from) - e.TaskGraph.Add(fromTaskID) - e.TaskGraph.Add(toTaskID) - e.TaskGraph.Connect(dag.BasicEdge(toTaskID, fromTaskID)) - traversalQueue = append(traversalQueue, fromTaskID) - } - } - } - - if hasDeps { - for _, from := range deps.UnsafeListOfStrings() { - fromTaskID := util.GetTaskId(pkg, from) - e.TaskGraph.Add(fromTaskID) - e.TaskGraph.Add(toTaskID) - e.TaskGraph.Connect(dag.BasicEdge(toTaskID, fromTaskID)) - traversalQueue = append(traversalQueue, fromTaskID) - } - } - - if hasPackageTaskDeps { - if pkgTaskDeps, ok := e.PackageTaskDeps[toTaskID]; ok { - for _, fromTaskID := range pkgTaskDeps { - e.TaskGraph.Add(fromTaskID) - e.TaskGraph.Add(toTaskID) - e.TaskGraph.Connect(dag.BasicEdge(toTaskID, fromTaskID)) - traversalQueue = append(traversalQueue, fromTaskID) - } - } - } - - // Add the root node into the graph - if !hasDeps && !hasTopoDeps && !hasPackageTaskDeps { - e.TaskGraph.Add(ROOT_NODE_NAME) - e.TaskGraph.Add(toTaskID) - e.TaskGraph.Connect(dag.BasicEdge(toTaskID, ROOT_NODE_NAME)) - } - } - - return nil -} - -// AddTask adds root tasks to the engine so they can be looked up later. -func (e *Engine) AddTask(taskName string) { - if util.IsPackageTask(taskName) { - pkg, taskName := util.GetPackageTaskFromId(taskName) - if pkg == util.RootPkgName { - e.rootEnabledTasks.Add(taskName) - } - } -} - -// AddDep adds tuples from+to task ID combos in tuple format so they can be looked up later. -func (e *Engine) AddDep(fromTaskID string, toTaskID string) error { - fromPkg, _ := util.GetPackageTaskFromId(fromTaskID) - if fromPkg != ROOT_NODE_NAME && fromPkg != util.RootPkgName && !e.completeGraph.WorkspaceGraph.HasVertex(fromPkg) { - return fmt.Errorf("found reference to unknown package: %v in task %v", fromPkg, fromTaskID) - } - - if _, ok := e.PackageTaskDeps[toTaskID]; !ok { - e.PackageTaskDeps[toTaskID] = []string{} - } - - e.PackageTaskDeps[toTaskID] = append(e.PackageTaskDeps[toTaskID], fromTaskID) - - return nil -} - -// ValidatePersistentDependencies checks if any task dependsOn persistent tasks and throws -// an error if that task is actually implemented -func (e *Engine) ValidatePersistentDependencies(graph *graph.CompleteGraph, concurrency int) error { - var validationErrors []string - persistentCount := 0 - - // Adding in a lock because otherwise walking the graph can introduce a data race - // (reproducible with `go test -race`) - var mu sync.Mutex - - errs := e.TaskGraph.Walk(func(v dag.Vertex) error { - vertexName := dag.VertexName(v) // vertexName is a taskID - - // No need to check the root node if that's where we are. - if strings.Contains(vertexName, ROOT_NODE_NAME) { - return nil - } - - currentTaskDefinition, currentTaskExists := e.completeGraph.TaskDefinitions[vertexName] - if currentTaskExists && currentTaskDefinition.Persistent { - persistentCount++ - } - - currentPackageName, currentTaskName := util.GetPackageTaskFromId(vertexName) - - // For each "downEdge" (i.e. each task that _this_ task dependsOn) - // check if the downEdge is a Persistent task, and if it actually has the script implemented - // in that package's package.json - for dep := range e.TaskGraph.DownEdges(vertexName) { - depTaskID := dep.(string) - // No need to check the root node - if strings.Contains(depTaskID, ROOT_NODE_NAME) { - return nil - } - - // Parse the taskID of this dependency task - packageName, taskName := util.GetPackageTaskFromId(depTaskID) - - // Get the Task Definition so we can check if it is Persistent - depTaskDefinition, taskExists := e.completeGraph.TaskDefinitions[depTaskID] - - if !taskExists { - return fmt.Errorf("Cannot find task definition for %v in package %v", depTaskID, packageName) - } - - // Get information about the package - pkg, pkgExists := graph.WorkspaceInfos.PackageJSONs[packageName] - if !pkgExists { - return fmt.Errorf("Cannot find package %v", packageName) - } - _, hasScript := pkg.Scripts[taskName] - - // If both conditions are true set a value and break out of checking the dependencies - if depTaskDefinition.Persistent && hasScript { - // Aquire a lock, because otherwise walking this group can cause a race condition - // writing to the same validationErrors var defined outside the Walk(). This shows - // up when running tests with the `-race` flag. - mu.Lock() - defer mu.Unlock() - validationErrors = append(validationErrors, fmt.Sprintf( - "\"%s\" is a persistent task, \"%s\" cannot depend on it", - util.GetTaskId(packageName, taskName), - util.GetTaskId(currentPackageName, currentTaskName), - )) - - break - } - } - - return nil - }) - - for _, err := range errs { - return fmt.Errorf("Validation failed: %v", err) - } - - if len(validationErrors) > 0 { - sort.Strings(validationErrors) - return fmt.Errorf("%s", strings.Join(validationErrors, "\n")) - } else if persistentCount >= concurrency { - return fmt.Errorf("You have %v persistent tasks but `turbo` is configured for concurrency of %v. Set --concurrency to at least %v", persistentCount, concurrency, persistentCount+1) - } - - return nil -} - -// getTaskDefinitionChain gets a set of TaskDefinitions that apply to the taskID. -// These definitions should be merged by the consumer. -func (e *Engine) getTaskDefinitionChain(taskID string, taskName string) ([]fs.BookkeepingTaskDefinition, error) { - // Start a list of TaskDefinitions we've found for this TaskID - taskDefinitions := []fs.BookkeepingTaskDefinition{} - - rootPipeline, err := e.completeGraph.GetPipelineFromWorkspace(util.RootPkgName, e.isSinglePackage) - if err != nil { - // It should be very unlikely that we can't find a root pipeline. Even for single package repos - // the pipeline is synthesized from package.json, so there should be _something_ here. - return nil, err - } - - // Look for the taskDefinition in the root pipeline. - if rootTaskDefinition, err := rootPipeline.GetTask(taskID, taskName); err == nil { - taskDefinitions = append(taskDefinitions, *rootTaskDefinition) - } - - // If we're in a single package repo, we can just exit with the TaskDefinition in the root pipeline - // since there are no workspaces, and we don't need to follow any extends keys. - if e.isSinglePackage { - if len(taskDefinitions) == 0 { - return nil, fmt.Errorf("Could not find \"%s\" in root turbo.json", taskID) - } - return taskDefinitions, nil - } - - // If the taskID is a root task (e.g. //#build), we don't need to look - // for a workspace task, since these can only be defined in the root turbo.json. - taskIDPackage, _ := util.GetPackageTaskFromId(taskID) - if taskIDPackage != util.RootPkgName && taskIDPackage != ROOT_NODE_NAME { - // If there is an error, we can ignore it, since turbo.json config is not required in the workspace. - if workspaceTurboJSON, err := e.completeGraph.GetTurboConfigFromWorkspace(taskIDPackage, e.isSinglePackage); err != nil { - // swallow the error where the config file doesn't exist, but bubble up other things - if !errors.Is(err, os.ErrNotExist) { - return nil, err - } - } else { - // Run some validations on a workspace turbo.json. Note that these validations are on - // the whole struct, and not relevant to the taskID we're looking at right now. - validationErrors := workspaceTurboJSON.Validate([]fs.TurboJSONValidation{ - validateNoPackageTaskSyntax, - validateExtends, - }) - - if len(validationErrors) > 0 { - fullError := errors.New("Invalid turbo.json") - for _, validationErr := range validationErrors { - fullError = fmt.Errorf("%w\n - %s", fullError, validationErr) - } - - return nil, fullError - } - - // If there are no errors, we can (try to) add the TaskDefinition to our list. - if workspaceDefinition, ok := workspaceTurboJSON.Pipeline[taskName]; ok { - taskDefinitions = append(taskDefinitions, workspaceDefinition) - } - } - } - - if len(taskDefinitions) == 0 { - return nil, fmt.Errorf("Could not find \"%s\" in root turbo.json or \"%s\" in workspace", taskID, taskName) - } - - return taskDefinitions, nil -} - -func validateNoPackageTaskSyntax(turboJSON *fs.TurboJSON) []error { - errors := []error{} - - for taskIDOrName := range turboJSON.Pipeline { - if util.IsPackageTask(taskIDOrName) { - taskName := util.StripPackageName(taskIDOrName) - errors = append(errors, fmt.Errorf("\"%s\". Use \"%s\" instead", taskIDOrName, taskName)) - } - } - - return errors -} - -func validateExtends(turboJSON *fs.TurboJSON) []error { - extendErrors := []error{} - extends := turboJSON.Extends - // TODO(mehulkar): Enable extending from more than one workspace. - if len(extends) > 1 { - extendErrors = append(extendErrors, fmt.Errorf("You can only extend from the root workspace")) - } - - // We don't support this right now - if len(extends) == 0 { - extendErrors = append(extendErrors, fmt.Errorf("No \"extends\" key found")) - } - - // TODO(mehulkar): Enable extending from non-root workspace. - if len(extends) == 1 && extends[0] != util.RootPkgName { - extendErrors = append(extendErrors, fmt.Errorf("You can only extend from the root workspace")) - } - - return extendErrors -} diff --git a/cli/internal/core/engine_test.go b/cli/internal/core/engine_test.go deleted file mode 100644 index 3ce00bf44573f..0000000000000 --- a/cli/internal/core/engine_test.go +++ /dev/null @@ -1,88 +0,0 @@ -package core - -import ( - "errors" - "testing" - - "github.com/vercel/turbo/cli/internal/fs" - "github.com/vercel/turbo/cli/internal/graph" - "github.com/vercel/turbo/cli/internal/workspace" - "gotest.tools/v3/assert" - - "github.com/pyr-sh/dag" -) - -func TestShortCircuiting(t *testing.T) { - var workspaceGraph dag.AcyclicGraph - workspaceGraph.Add("a") - workspaceGraph.Add("b") - workspaceGraph.Add("c") - // Dependencies: a -> b -> c - workspaceGraph.Connect(dag.BasicEdge("a", "b")) - workspaceGraph.Connect(dag.BasicEdge("b", "c")) - - buildTask := &fs.BookkeepingTaskDefinition{} - err := buildTask.UnmarshalJSON([]byte("{\"dependsOn\": [\"^build\"]}")) - assert.NilError(t, err, "BookkeepingTaskDefinition unmarshall") - - pipeline := map[string]fs.BookkeepingTaskDefinition{ - "build": *buildTask, - } - - p := NewEngine(&graph.CompleteGraph{ - WorkspaceGraph: workspaceGraph, - Pipeline: pipeline, - TaskDefinitions: map[string]*fs.TaskDefinition{}, - WorkspaceInfos: workspace.Catalog{ - PackageJSONs: map[string]*fs.PackageJSON{ - "//": {}, - "a": {}, - "b": {}, - "c": {}, - }, - TurboConfigs: map[string]*fs.TurboJSON{ - "//": { - Pipeline: pipeline, - }, - }, - }, - }, false) - - p.AddTask("build") - - err = p.Prepare(&EngineBuildingOptions{ - Packages: []string{"a", "b", "c"}, - TaskNames: []string{"build"}, - TasksOnly: false, - }) - - if err != nil { - t.Fatalf("%v", err) - } - - executed := map[string]bool{ - "a#build": false, - "b#build": false, - "c#build": false, - } - expectedErr := errors.New("an error occurred") - // b#build is going to error, we expect to not execute a#build, which depends on b - testVisitor := func(taskID string) error { - println(taskID) - executed[taskID] = true - if taskID == "b#build" { - return StopExecution(expectedErr) - } - return nil - } - - errs := p.Execute(testVisitor, EngineExecutionOptions{ - Concurrency: 10, - }) - assert.Equal(t, len(errs), 1) - assert.Equal(t, errs[0], expectedErr) - - assert.Equal(t, executed["c#build"], true) - assert.Equal(t, executed["b#build"], true) - assert.Equal(t, executed["a#build"], false) -} diff --git a/cli/internal/daemon/connector/connector.go b/cli/internal/daemon/connector/connector.go deleted file mode 100644 index 14bdd7a5272c6..0000000000000 --- a/cli/internal/daemon/connector/connector.go +++ /dev/null @@ -1,408 +0,0 @@ -package connector - -import ( - "context" - "fmt" - "io/fs" - "os" - "os/exec" - "time" - - "github.com/cenkalti/backoff/v4" - "github.com/hashicorp/go-hclog" - "github.com/nightlyone/lockfile" - "github.com/pkg/errors" - "github.com/vercel/turbo/cli/internal/turbodprotocol" - "github.com/vercel/turbo/cli/internal/turbopath" - "google.golang.org/grpc" - "google.golang.org/grpc/codes" - "google.golang.org/grpc/credentials/insecure" - "google.golang.org/grpc/status" -) - -var ( - // ErrFailedToStart is returned when the daemon process cannot be started - ErrFailedToStart = errors.New("daemon could not be started") - // ErrVersionMismatch is returned when the daemon process was spawned by a different version than the connecting client - ErrVersionMismatch = errors.New("daemon version does not match client version") - errConnectionFailure = errors.New("could not connect to daemon") - errUnavailable = errors.New("the server is not ready yet") - // ErrTooManyAttempts is returned when the client fails to connect too many times - ErrTooManyAttempts = errors.New("reached maximum number of attempts contacting daemon") - // ErrDaemonNotRunning is returned when the client cannot contact the daemon and has - // been instructed not to attempt to start a new daemon - ErrDaemonNotRunning = errors.New("the daemon is not running") -) - -// Opts is the set of configurable options for the client connection, -// including some options to be passed through to the daemon process if -// it needs to be started. -type Opts struct { - ServerTimeout time.Duration - DontStart bool // if true, don't attempt to start the daemon - DontKill bool // if true, don't attempt to kill the daemon -} - -// Client represents a connection to the daemon process -type Client struct { - turbodprotocol.TurbodClient - *grpc.ClientConn - SockPath turbopath.AbsoluteSystemPath - PidPath turbopath.AbsoluteSystemPath - LogPath turbopath.AbsoluteSystemPath -} - -// Connector instances are used to create a connection to turbo's daemon process -// The daemon will be started , or killed and restarted, if necessary -type Connector struct { - Logger hclog.Logger - Bin string - Opts Opts - SockPath turbopath.AbsoluteSystemPath - PidPath turbopath.AbsoluteSystemPath - LogPath turbopath.AbsoluteSystemPath - TurboVersion string -} - -// ConnectionError is returned in the error case from connect. It wraps the underlying -// cause and adds a message with the relevant files for the user to check. -type ConnectionError struct { - SockPath turbopath.AbsoluteSystemPath - PidPath turbopath.AbsoluteSystemPath - LogPath turbopath.AbsoluteSystemPath - cause error -} - -func (ce *ConnectionError) Error() string { - return fmt.Sprintf(`connection to turbo daemon process failed. - To quickly resolve the issue, try running: - - $ turbo daemon clean - - To debug further - please ensure the following: - - the process identified by the pid in the file at %v is not running, and remove %v - - check the logs at %v - - the unix domain socket at %v has been removed - - You can also run without the daemon process by passing --no-daemon`, ce.PidPath, ce.PidPath, ce.LogPath, ce.SockPath) -} - -// Unwrap allows a connection error to work with standard library "errors" and compatible packages -func (ce *ConnectionError) Unwrap() error { - return ce.cause -} - -func (c *Connector) wrapConnectionError(err error) error { - return &ConnectionError{ - SockPath: c.SockPath, - PidPath: c.PidPath, - LogPath: c.LogPath, - cause: err, - } -} - -// lockFile returns a pointer to where a lockfile should be. -// lockfile.New does not perform IO and the only error it produces -// is in the case a non-absolute path was provided. We're guaranteeing an -// turbopath.AbsoluteSystemPath, so an error here is an indication of a bug and -// we should crash. -func (c *Connector) lockFile() lockfile.Lockfile { - lockFile, err := lockfile.New(c.PidPath.ToString()) - if err != nil { - panic(err) - } - return lockFile -} - -func (c *Connector) addr() string { - // grpc special-cases parsing of unix: urls - // to avoid url.Parse. This lets us pass through our absolute - // paths unmodified, even on windows. - // See code here: https://github.com/grpc/grpc-go/blob/d83070ec0d9043f713b6a63e1963c593b447208c/internal/transport/http_util.go#L392 - return fmt.Sprintf("unix:%v", c.SockPath.ToString()) -} - -// We defer to the daemon's pid file as the locking mechanism. -// If it doesn't exist, we will attempt to start the daemon. -// If the daemon has a different version, ask it to shut down. -// If the pid file exists but we can't connect, try to kill -// the daemon. -// If we can't cause the daemon to remove the pid file, report -// an error to the user that includes the file location so that -// they can resolve it. -const ( - _maxAttempts = 3 - _shutdownTimeout = 1 * time.Second - _socketPollTimeout = 1 * time.Second - _notReadyTimeout = 3 * time.Millisecond -) - -// killLiveServer tells a running server to shut down. This method is also responsible -// for closing this client connection. -func (c *Connector) killLiveServer(ctx context.Context, client *Client, serverPid int) error { - defer func() { _ = client.Close() }() - - _, err := client.Shutdown(ctx, &turbodprotocol.ShutdownRequest{}) - if err != nil { - c.Logger.Error(fmt.Sprintf("failed to shutdown running daemon. attempting to force it closed: %v", err)) - return c.killDeadServer(serverPid) - } - // Wait for the server to gracefully exit - err = backoff.Retry(func() error { - lockFile := c.lockFile() - owner, err := lockFile.GetOwner() - if os.IsNotExist(err) { - // If there is no pid more file, we can conclude that the daemon successfully - // exited and cleaned up after itself. - return nil - } else if err != nil { - // some other error occurred getting the lockfile owner - return backoff.Permanent(err) - } else if owner.Pid == serverPid { - // // We're still waiting for the server to shut down - return errNeedsRetry - } - // if there's no error and the lockfile has a new pid, someone else must've started a new daemon. - // Consider the old one killed and move on. - return nil - }, backoffWithTimeout(_shutdownTimeout)) - if errors.Is(err, errNeedsRetry) { - c.Logger.Error(fmt.Sprintf("daemon did not exit after %v, attempting to force it closed", _shutdownTimeout.String())) - return c.killDeadServer(serverPid) - } else if err != nil { - return err - } - return nil -} - -func (c *Connector) killDeadServer(pid int) error { - // currently the only error that this constructor returns is - // in the case that you don't provide an absolute path. - // Given that we require an absolute path as input, this should - // hopefully never happen. - lockFile := c.lockFile() - process, err := lockFile.GetOwner() - if err == nil { - // Check that this is the same process that we failed to connect to. - // Otherwise, connectInternal will loop around again and start with whatever - // new process has the pid file. - if process.Pid == pid { - // we have a process that we need to kill - // TODO(gsoltis): graceful kill? the process is already not responding to requests, - // but it could be in the middle of a graceful shutdown. Probably should let it clean - // itself up, and report an error and defer to a force-kill by the user - if err := process.Kill(); err != nil { - return err - } - } - return nil - } else if errors.Is(err, os.ErrNotExist) { - // There's no pid file. Someone else killed it. Returning no error will cause the - // connectInternal to loop around and try the connection again. - return nil - } - return err -} - -// Connect attempts to create a connection to a turbo daemon. -// Retries and daemon restarts are built in. If this fails, -// it is unlikely to succeed after an automated retry. -func (c *Connector) Connect(ctx context.Context) (*Client, error) { - client, err := c.connectInternal(ctx) - if err != nil { - return nil, c.wrapConnectionError(err) - } - return client, nil -} - -func (c *Connector) connectInternal(ctx context.Context) (*Client, error) { - // for each attempt, we: - // 1. try to find or start a daemon process, getting its pid - // 2. wait for the unix domain socket file to appear - // 3. connect to the unix domain socket. Note that this connection is not validated - // 4. send a hello message. This validates the connection as a side effect of - // negotiating versions, which currently requires exact match. - // In the event of a live, but incompatible server, we attempt to shut it down and start - // a new one. In the event of an unresponsive server, we attempt to kill the process - // identified by the pid file, with the hope that it will clean up after itself. - // Failures include details about where to find logs, the pid file, and the socket file. - for i := 0; i < _maxAttempts; i++ { - serverPid, err := c.getOrStartDaemon() - if err != nil { - // If we fail to even start the daemon process, return immediately, we're unlikely - // to succeed without user intervention - return nil, err - } - if err := c.waitForSocket(); errors.Is(err, ErrFailedToStart) { - // If we didn't see the socket file, try again. It's possible that - // the daemon encountered an transitory error - continue - } else if err != nil { - return nil, err - } - client, err := c.getClientConn() - if err != nil { - return nil, err - } - if err := c.sendHello(ctx, client); err == nil { - // We connected and negotiated a version, we're all set - return client, nil - } else if errors.Is(err, ErrVersionMismatch) { - // We don't want to knock down a perfectly fine daemon in a status check. - if c.Opts.DontKill { - return nil, err - } - - // We now know we aren't going to return this client, - // but killLiveServer still needs it to send the Shutdown request. - // killLiveServer will close the client when it is done with it. - if err := c.killLiveServer(ctx, client, serverPid); err != nil { - return nil, err - } - // Loops back around and tries again. - } else if errors.Is(err, errUnavailable) { - // The rust daemon will open the socket a few ms before it's ready to accept connections. - // If we get here, we know that the socket exists, but the server isn't ready yet. - // We'll wait a few ms and try again. - c.Logger.Debug("server not ready yet") - time.Sleep(_notReadyTimeout) - } else if err != nil { - // Some other error occurred, close the client and - // report the error to the user - if closeErr := client.Close(); closeErr != nil { - // In the event that we fail to close the client, bundle that error along also. - // Keep the original error in the error chain, as it's more likely to be useful - // or needed for matching on later. - err = errors.Wrapf(err, "also failed to close client connection: %v", closeErr) - } - return nil, err - } - } - return nil, ErrTooManyAttempts -} - -// getOrStartDaemon returns the PID of the daemon process on success. It may start -// the daemon if it doesn't find one running. -func (c *Connector) getOrStartDaemon() (int, error) { - lockFile := c.lockFile() - daemonProcess, getDaemonProcessErr := lockFile.GetOwner() - if getDaemonProcessErr != nil { - // We expect the daemon to write the pid file, so a non-existent or stale - // pid file is fine. The daemon will write its own, after verifying that it - // doesn't exist or is stale. - if errors.Is(getDaemonProcessErr, fs.ErrNotExist) || errors.Is(getDaemonProcessErr, lockfile.ErrDeadOwner) { - if c.Opts.DontStart { - return 0, ErrDaemonNotRunning - } - pid, startDaemonErr := c.startDaemon() - if startDaemonErr != nil { - return 0, startDaemonErr - } - return pid, nil - } - - // We could have hit any number of errors. - // - Failed to read the file for permission reasons. - // - User emptied the file's contents. - // - etc. - return 0, errors.Wrapf(getDaemonProcessErr, "An issue was encountered with the pid file. Please remove it and try again: %v", c.PidPath) - } - - return daemonProcess.Pid, nil -} - -func (c *Connector) getClientConn() (*Client, error) { - creds := insecure.NewCredentials() - conn, err := grpc.Dial(c.addr(), grpc.WithTransportCredentials(creds)) - if err != nil { - return nil, err - } - tc := turbodprotocol.NewTurbodClient(conn) - return &Client{ - TurbodClient: tc, - ClientConn: conn, - SockPath: c.SockPath, - PidPath: c.PidPath, - LogPath: c.LogPath, - }, nil -} - -func (c *Connector) sendHello(ctx context.Context, client turbodprotocol.TurbodClient) error { - _, err := client.Hello(ctx, &turbodprotocol.HelloRequest{ - Version: c.TurboVersion, - // TODO: add session id - }) - status := status.Convert(err) - switch status.Code() { - case codes.OK: - return nil - case codes.Unimplemented: - fallthrough // some versions of the rust daemon return Unimplemented rather than FailedPrecondition - case codes.FailedPrecondition: - return ErrVersionMismatch - case codes.Unavailable: - return errUnavailable - default: - return err - } -} - -var errNeedsRetry = errors.New("retry the operation") - -// backoffWithTimeout returns an exponential backoff, starting at 2ms and doubling until -// the specific timeout has elapsed. Note that backoff instances are stateful, so we need -// a new one each time we do a Retry. -func backoffWithTimeout(timeout time.Duration) *backoff.ExponentialBackOff { - return &backoff.ExponentialBackOff{ - Multiplier: 2, - InitialInterval: 2 * time.Millisecond, - MaxElapsedTime: timeout, - Clock: backoff.SystemClock, - Stop: backoff.Stop, - } -} - -// waitForSocket waits for the unix domain socket to appear -func (c *Connector) waitForSocket() error { - // Note that we don't care if this is our daemon - // or not. We started a process, but someone else could beat - // use to listening. That's fine, we'll check the version - // later. However, we need to ensure that _some_ pid file - // exists to protect against stale .sock files - if err := waitForFile(c.PidPath); err != nil { - return err - } - return waitForFile(c.SockPath) -} - -func waitForFile(file turbopath.AbsoluteSystemPath) error { - err := backoff.Retry(func() error { - if !file.FileExists() { - return errNeedsRetry - } - return nil - }, backoffWithTimeout(_socketPollTimeout)) - if errors.Is(err, errNeedsRetry) { - return ErrFailedToStart - } else if err != nil { - return err - } - return nil -} - -// startDaemon starts the daemon and returns the pid for the new process -func (c *Connector) startDaemon() (int, error) { - args := []string{"--skip-infer", "daemon"} - if c.Opts.ServerTimeout != 0 { - args = append(args, fmt.Sprintf("--idle-time=%v", c.Opts.ServerTimeout.String())) - } - c.Logger.Debug(fmt.Sprintf("starting turbod binary %v", c.Bin)) - cmd := exec.Command(c.Bin, args...) - // For the daemon to have its own process group id so that any attempts - // to kill it and its process tree don't kill this client. - cmd.SysProcAttr = getSysProcAttrs() - err := cmd.Start() - if err != nil { - return 0, err - } - return cmd.Process.Pid, nil -} diff --git a/cli/internal/daemon/connector/connector_test.go b/cli/internal/daemon/connector/connector_test.go deleted file mode 100644 index 62b450491b00a..0000000000000 --- a/cli/internal/daemon/connector/connector_test.go +++ /dev/null @@ -1,256 +0,0 @@ -package connector - -import ( - "context" - "errors" - "net" - "os/exec" - "runtime" - "strconv" - "testing" - - "github.com/hashicorp/go-hclog" - "github.com/nightlyone/lockfile" - "github.com/vercel/turbo/cli/internal/fs" - "github.com/vercel/turbo/cli/internal/turbodprotocol" - "github.com/vercel/turbo/cli/internal/turbopath" - "google.golang.org/grpc" - "google.golang.org/grpc/codes" - "google.golang.org/grpc/credentials/insecure" - "google.golang.org/grpc/status" - "google.golang.org/grpc/test/bufconn" - "gotest.tools/v3/assert" -) - -// testBin returns a platform-appropriate executable to run node. -// Node works here as an arbitrary process to start, since it's -// required for turbo development. It will obviously not implement -// our grpc service, use a mockServer instance where that's needed. -func testBin() string { - if runtime.GOOS == "windows" { - return "node.exe" - } - return "node" -} - -func getUnixSocket(dir turbopath.AbsoluteSystemPath) turbopath.AbsoluteSystemPath { - return dir.UntypedJoin("turbod-test.sock") -} - -func getPidFile(dir turbopath.AbsoluteSystemPath) turbopath.AbsoluteSystemPath { - return dir.UntypedJoin("turbod-test.pid") -} - -func TestGetOrStartDaemonInvalidPIDFile(t *testing.T) { - logger := hclog.Default() - dir := t.TempDir() - dirPath := fs.AbsoluteSystemPathFromUpstream(dir) - - pidPath := getPidFile(dirPath) - writeFileErr := pidPath.WriteFile(nil, 0777) - assert.NilError(t, writeFileErr, "WriteFile") - - c := &Connector{ - Logger: logger, - Opts: Opts{}, - PidPath: pidPath, - } - - pid, err := c.getOrStartDaemon() - assert.Equal(t, pid, 0) - assert.ErrorContains(t, err, "issue was encountered with the pid file") -} - -func TestConnectFailsWithoutGrpcServer(t *testing.T) { - // We aren't starting a server that is going to write - // to our socket file, so we should see a series of connection - // failures, followed by ErrTooManyAttempts - logger := hclog.Default() - dir := t.TempDir() - dirPath := fs.AbsoluteSystemPathFromUpstream(dir) - - sockPath := getUnixSocket(dirPath) - pidPath := getPidFile(dirPath) - ctx := context.Background() - bin := testBin() - c := &Connector{ - Logger: logger, - Bin: bin, - Opts: Opts{}, - SockPath: sockPath, - PidPath: pidPath, - } - // Note that we expect ~3s here, for 3 attempts with a timeout of 1s - _, err := c.connectInternal(ctx) - assert.ErrorIs(t, err, ErrTooManyAttempts) -} - -func TestKillDeadServerNoPid(t *testing.T) { - logger := hclog.Default() - dir := t.TempDir() - dirPath := fs.AbsoluteSystemPathFromUpstream(dir) - - sockPath := getUnixSocket(dirPath) - pidPath := getPidFile(dirPath) - c := &Connector{ - Logger: logger, - Bin: "nonexistent", - Opts: Opts{}, - SockPath: sockPath, - PidPath: pidPath, - } - - err := c.killDeadServer(99999) - assert.NilError(t, err, "killDeadServer") -} - -func TestKillDeadServerNoProcess(t *testing.T) { - logger := hclog.Default() - dir := t.TempDir() - dirPath := fs.AbsoluteSystemPathFromUpstream(dir) - - sockPath := getUnixSocket(dirPath) - pidPath := getPidFile(dirPath) - // Simulate the socket already existing, with no live daemon - err := sockPath.WriteFile([]byte("junk"), 0644) - assert.NilError(t, err, "WriteFile") - err = pidPath.WriteFile([]byte("99999"), 0644) - assert.NilError(t, err, "WriteFile") - c := &Connector{ - Logger: logger, - Bin: "nonexistent", - Opts: Opts{}, - SockPath: sockPath, - PidPath: pidPath, - } - - err = c.killDeadServer(99999) - assert.ErrorIs(t, err, lockfile.ErrDeadOwner) - stillExists := pidPath.FileExists() - if !stillExists { - t.Error("pidPath should still exist, expected the user to clean it up") - } -} - -func TestKillDeadServerWithProcess(t *testing.T) { - logger := hclog.Default() - dir := t.TempDir() - dirPath := fs.AbsoluteSystemPathFromUpstream(dir) - - sockPath := getUnixSocket(dirPath) - pidPath := getPidFile(dirPath) - // Simulate the socket already existing, with no live daemon - err := sockPath.WriteFile([]byte("junk"), 0644) - assert.NilError(t, err, "WriteFile") - bin := testBin() - cmd := exec.Command(bin) - err = cmd.Start() - assert.NilError(t, err, "cmd.Start") - pid := cmd.Process.Pid - if pid == 0 { - t.Fatalf("failed to start process %v", bin) - } - - err = pidPath.WriteFile([]byte(strconv.Itoa(pid)), 0644) - assert.NilError(t, err, "WriteFile") - c := &Connector{ - Logger: logger, - Bin: "nonexistent", - Opts: Opts{}, - SockPath: sockPath, - PidPath: pidPath, - } - - err = c.killDeadServer(pid) - assert.NilError(t, err, "killDeadServer") - stillExists := pidPath.FileExists() - if !stillExists { - t.Error("pidPath no longer exists, expected client to not clean it up") - } - err = cmd.Wait() - exitErr := &exec.ExitError{} - if !errors.As(err, &exitErr) { - t.Errorf("expected an exit error from %v, got %v", bin, err) - } -} - -type mockServer struct { - turbodprotocol.UnimplementedTurbodServer - helloErr error - shutdownResp *turbodprotocol.ShutdownResponse - pidFile turbopath.AbsoluteSystemPath -} - -// Simulates server exiting by cleaning up the pid file -func (s *mockServer) Shutdown(ctx context.Context, req *turbodprotocol.ShutdownRequest) (*turbodprotocol.ShutdownResponse, error) { - if err := s.pidFile.Remove(); err != nil { - return nil, err - } - return s.shutdownResp, nil -} - -func (s *mockServer) Hello(ctx context.Context, req *turbodprotocol.HelloRequest) (*turbodprotocol.HelloResponse, error) { - if req.Version == "" { - return nil, errors.New("missing version") - } - return nil, s.helloErr -} - -func TestKillLiveServer(t *testing.T) { - logger := hclog.Default() - dir := t.TempDir() - dirPath := fs.AbsoluteSystemPathFromUpstream(dir) - - sockPath := getUnixSocket(dirPath) - pidPath := getPidFile(dirPath) - err := pidPath.WriteFile([]byte("99999"), 0644) - assert.NilError(t, err, "WriteFile") - - ctx := context.Background() - c := &Connector{ - Logger: logger, - Bin: "nonexistent", - Opts: Opts{}, - SockPath: sockPath, - PidPath: pidPath, - TurboVersion: "some-version", - } - - st := status.New(codes.FailedPrecondition, "version mismatch") - mock := &mockServer{ - shutdownResp: &turbodprotocol.ShutdownResponse{}, - helloErr: st.Err(), - pidFile: pidPath, - } - lis := bufconn.Listen(1024 * 1024) - grpcServer := grpc.NewServer() - turbodprotocol.RegisterTurbodServer(grpcServer, mock) - go func(t *testing.T) { - if err := grpcServer.Serve(lis); err != nil { - t.Logf("server closed: %v", err) - } - }(t) - - conn, err := grpc.DialContext(ctx, "bufnet", grpc.WithContextDialer(func(ctx context.Context, s string) (net.Conn, error) { - return lis.Dial() - }), grpc.WithTransportCredentials(insecure.NewCredentials())) - assert.NilError(t, err, "DialContext") - turboClient := turbodprotocol.NewTurbodClient(conn) - client := &Client{ - TurbodClient: turboClient, - ClientConn: conn, - } - err = c.sendHello(ctx, client) - if !errors.Is(err, ErrVersionMismatch) { - t.Errorf("sendHello error got %v, want %v", err, ErrVersionMismatch) - } - err = c.killLiveServer(ctx, client, 99999) - assert.NilError(t, err, "killLiveServer") - // Expect the pid file and socket files to have been cleaned up - if pidPath.FileExists() { - t.Errorf("expected pid file to have been deleted: %v", pidPath) - } - if sockPath.FileExists() { - t.Errorf("expected socket file to have been deleted: %v", sockPath) - } -} diff --git a/cli/internal/daemon/connector/fork.go b/cli/internal/daemon/connector/fork.go deleted file mode 100644 index 8a6d01da558ce..0000000000000 --- a/cli/internal/daemon/connector/fork.go +++ /dev/null @@ -1,15 +0,0 @@ -//go:build !windows -// +build !windows - -package connector - -import "syscall" - -// getSysProcAttrs returns the platform-specific attributes we want to -// use while forking the daemon process. Currently this is limited to -// forcing a new process group -func getSysProcAttrs() *syscall.SysProcAttr { - return &syscall.SysProcAttr{ - Setpgid: true, - } -} diff --git a/cli/internal/daemon/connector/fork_windows.go b/cli/internal/daemon/connector/fork_windows.go deleted file mode 100644 index b9d6e77908397..0000000000000 --- a/cli/internal/daemon/connector/fork_windows.go +++ /dev/null @@ -1,15 +0,0 @@ -//go:build windows -// +build windows - -package connector - -import "syscall" - -// getSysProcAttrs returns the platform-specific attributes we want to -// use while forking the daemon process. Currently this is limited to -// forcing a new process group -func getSysProcAttrs() *syscall.SysProcAttr { - return &syscall.SysProcAttr{ - CreationFlags: syscall.CREATE_NEW_PROCESS_GROUP, - } -} diff --git a/cli/internal/daemon/daemon.go b/cli/internal/daemon/daemon.go deleted file mode 100644 index 2d3b63abdd7c1..0000000000000 --- a/cli/internal/daemon/daemon.go +++ /dev/null @@ -1,307 +0,0 @@ -package daemon - -import ( - "context" - "crypto/sha256" - "encoding/hex" - "fmt" - "io" - "net" - "os" - "path/filepath" - "strings" - "time" - - grpc_recovery "github.com/grpc-ecosystem/go-grpc-middleware/recovery" - "github.com/hashicorp/go-hclog" - "github.com/nightlyone/lockfile" - "github.com/pkg/errors" - "github.com/vercel/turbo/cli/internal/cmdutil" - "github.com/vercel/turbo/cli/internal/daemon/connector" - "github.com/vercel/turbo/cli/internal/fs" - "github.com/vercel/turbo/cli/internal/server" - "github.com/vercel/turbo/cli/internal/signals" - "github.com/vercel/turbo/cli/internal/turbopath" - "github.com/vercel/turbo/cli/internal/turbostate" - "google.golang.org/grpc" - "google.golang.org/grpc/codes" - "google.golang.org/grpc/status" -) - -type daemon struct { - logger hclog.Logger - repoRoot turbopath.AbsoluteSystemPath - timeout time.Duration - reqCh chan struct{} - timedOutCh chan struct{} -} - -func getRepoHash(repoRoot turbopath.AbsoluteSystemPath) string { - pathHash := sha256.Sum256([]byte(repoRoot.ToString())) - // We grab a substring of the hash because there is a 108-character limit on the length - // of a filepath for unix domain socket. - return hex.EncodeToString(pathHash[:])[:16] -} - -func getDaemonFileRoot(repoRoot turbopath.AbsoluteSystemPath) turbopath.AbsoluteSystemPath { - tempDir := fs.TempDir("turbod") - hexHash := getRepoHash(repoRoot) - return tempDir.UntypedJoin(hexHash) -} - -func getLogFilePath(repoRoot turbopath.AbsoluteSystemPath) (turbopath.AbsoluteSystemPath, error) { - hexHash := getRepoHash(repoRoot) - base := repoRoot.Base() - logFilename := fmt.Sprintf("%v-%v.log", hexHash, base) - - logsDir := fs.GetTurboDataDir().UntypedJoin("logs") - return logsDir.UntypedJoin(logFilename), nil -} - -func getUnixSocket(repoRoot turbopath.AbsoluteSystemPath) turbopath.AbsoluteSystemPath { - root := getDaemonFileRoot(repoRoot) - return root.UntypedJoin("turbod.sock") -} - -func getPidFile(repoRoot turbopath.AbsoluteSystemPath) turbopath.AbsoluteSystemPath { - root := getDaemonFileRoot(repoRoot) - return root.UntypedJoin("turbod.pid") -} - -// logError logs an error and outputs it to the UI. -func (d *daemon) logError(err error) { - d.logger.Error(fmt.Sprintf("error %v", err)) -} - -// we're only appending, and we're creating the file if it doesn't exist. -// we do not need to read the log file. -var _logFileFlags = os.O_WRONLY | os.O_APPEND | os.O_CREATE - -// ExecuteDaemon executes the root daemon command -func ExecuteDaemon(ctx context.Context, helper *cmdutil.Helper, signalWatcher *signals.Watcher, executionState *turbostate.ExecutionState) error { - base, err := helper.GetCmdBase(executionState) - if err != nil { - return err - } - if executionState.CLIArgs.TestRun { - base.UI.Info("Daemon test run successful") - return nil - } - - idleTimeout := 4 * time.Hour - if executionState.CLIArgs.Command.Daemon.IdleTimeout != "" { - idleTimeout, err = time.ParseDuration(executionState.CLIArgs.Command.Daemon.IdleTimeout) - if err != nil { - return err - } - } - - logFilePath, err := getLogFilePath(base.RepoRoot) - if err != nil { - return err - } - if err := logFilePath.EnsureDir(); err != nil { - return err - } - logFile, err := logFilePath.OpenFile(_logFileFlags, 0644) - if err != nil { - return err - } - defer func() { _ = logFile.Close() }() - logger := hclog.New(&hclog.LoggerOptions{ - Output: io.MultiWriter(logFile, os.Stdout), - Level: hclog.Info, - Color: hclog.ColorOff, - Name: "turbod", - }) - - d := &daemon{ - logger: logger, - repoRoot: base.RepoRoot, - timeout: idleTimeout, - reqCh: make(chan struct{}), - timedOutCh: make(chan struct{}), - } - serverName := getRepoHash(base.RepoRoot) - turboServer, err := server.New(serverName, d.logger.Named("rpc server"), base.RepoRoot, base.TurboVersion, logFilePath) - if err != nil { - d.logError(err) - return err - } - defer func() { _ = turboServer.Close() }() - err = d.runTurboServer(ctx, turboServer, signalWatcher) - if err != nil { - d.logError(err) - return err - } - return nil -} - -var errInactivityTimeout = errors.New("turbod shut down from inactivity") - -// tryAcquirePidfileLock attempts to ensure that only one daemon is running from the given pid file path -// at a time. If this process fails to write its PID to the lockfile, it must exit. -func tryAcquirePidfileLock(pidPath turbopath.AbsoluteSystemPath) (lockfile.Lockfile, error) { - if err := pidPath.EnsureDir(); err != nil { - return "", err - } - lockFile, err := lockfile.New(pidPath.ToString()) - if err != nil { - // lockfile.New should only return an error if it wasn't given an absolute path. - // We are attempting to use the type system to enforce that we are passing an - // absolute path. An error here likely means a bug, and we should crash. - panic(err) - } - if err := lockFile.TryLock(); err != nil { - return "", err - } - return lockFile, nil -} - -type rpcServer interface { - Register(grpcServer server.GRPCServer) -} - -func (d *daemon) runTurboServer(parentContext context.Context, rpcServer rpcServer, signalWatcher *signals.Watcher) error { - ctx, cancel := context.WithCancel(parentContext) - defer cancel() - pidPath := getPidFile(d.repoRoot) - lock, err := tryAcquirePidfileLock(pidPath) - if err != nil { - return errors.Wrapf(err, "failed to lock the pid file at %v. Is another turbo daemon running?", lock) - } - // When we're done serving, clean up the pid file. - // Also, if *this* goroutine panics, make sure we unlock the pid file. - defer func() { - if err := lock.Unlock(); err != nil { - d.logger.Error(errors.Wrapf(err, "failed unlocking pid file at %v", lock).Error()) - } - }() - // This handler runs in request goroutines. If a request causes a panic, - // this handler will get called after a call to recover(), meaning we are - // no longer panicking. We return a server error and cancel our context, - // which triggers a shutdown of the server. - panicHandler := func(thePanic interface{}) error { - cancel() - d.logger.Error(fmt.Sprintf("Caught panic %v", thePanic)) - return status.Error(codes.Internal, "server panicked") - } - - // If we have the lock, assume that we are the owners of the socket file, - // whether it already exists or not. That means we are free to remove it. - sockPath := getUnixSocket(d.repoRoot) - if err := sockPath.Remove(); err != nil && !errors.Is(err, os.ErrNotExist) { - return err - } - d.logger.Debug(fmt.Sprintf("Using socket path %v (%v)\n", sockPath, len(sockPath))) - lis, err := net.Listen("unix", sockPath.ToString()) - if err != nil { - return err - } - // We don't need to explicitly close 'lis', the grpc server will handle that - s := grpc.NewServer( - grpc.ChainUnaryInterceptor( - d.onRequest, - grpc_recovery.UnaryServerInterceptor(grpc_recovery.WithRecoveryHandler(panicHandler)), - ), - ) - go d.timeoutLoop(ctx) - - rpcServer.Register(s) - errCh := make(chan error) - go func(errCh chan<- error) { - if err := s.Serve(lis); err != nil { - errCh <- err - } - close(errCh) - }(errCh) - - // Note that we aren't deferring s.GracefulStop here because we also need - // to drain the error channel, which isn't guaranteed to happen until - // the server has stopped. That in turn may depend on GracefulStop being - // called. - // Future work could restructure this to make that simpler. - var exitErr error - select { - case err, ok := <-errCh: - // The server exited - if ok { - exitErr = err - } - case <-d.timedOutCh: - // This is the inactivity timeout case - exitErr = errInactivityTimeout - s.GracefulStop() - case <-ctx.Done(): - // If a request handler panics, it will cancel this context - s.GracefulStop() - case <-signalWatcher.Done(): - // This is fired if caught a signal - s.GracefulStop() - } - // Wait for the server to exit, if it hasn't already. - // When it does, this channel will close. We don't - // care about the error in this scenario because we've - // either requested a close via cancelling the context, - // an inactivity timeout, or caught a signal. - for range errCh { - } - return exitErr -} - -func (d *daemon) onRequest(ctx context.Context, req interface{}, info *grpc.UnaryServerInfo, handler grpc.UnaryHandler) (resp interface{}, err error) { - d.reqCh <- struct{}{} - return handler(ctx, req) -} - -func (d *daemon) timeoutLoop(ctx context.Context) { - timeoutCh := time.After(d.timeout) -outer: - for { - select { - case <-d.reqCh: - timeoutCh = time.After(d.timeout) - case <-timeoutCh: - close(d.timedOutCh) - break outer - case <-ctx.Done(): - break outer - } - } -} - -// ClientOpts re-exports connector.Ops to encapsulate the connector package -type ClientOpts = connector.Opts - -// Client re-exports connector.Client to encapsulate the connector package -type Client = connector.Client - -// GetClient returns a client that can be used to interact with the daemon -func GetClient(ctx context.Context, repoRoot turbopath.AbsoluteSystemPath, logger hclog.Logger, turboVersion string, opts ClientOpts) (*Client, error) { - sockPath := getUnixSocket(repoRoot) - pidPath := getPidFile(repoRoot) - logPath, err := getLogFilePath(repoRoot) - if err != nil { - return nil, err - } - bin, err := os.Executable() - if err != nil { - return nil, err - } - // The Go binary can no longer be called directly, so we need to route back to the rust wrapper - if strings.HasSuffix(bin, "go-turbo") { - bin = filepath.Join(filepath.Dir(bin), "turbo") - } else if strings.HasSuffix(bin, "go-turbo.exe") { - bin = filepath.Join(filepath.Dir(bin), "turbo.exe") - } - c := &connector.Connector{ - Logger: logger.Named("TurbodClient"), - Bin: bin, - Opts: opts, - SockPath: sockPath, - PidPath: pidPath, - LogPath: logPath, - TurboVersion: turboVersion, - } - return c.Connect(ctx) -} diff --git a/cli/internal/daemon/daemon_test.go b/cli/internal/daemon/daemon_test.go deleted file mode 100644 index 66a714d3eea55..0000000000000 --- a/cli/internal/daemon/daemon_test.go +++ /dev/null @@ -1,262 +0,0 @@ -package daemon - -import ( - "context" - "errors" - "os/exec" - "runtime" - "strconv" - "sync" - "testing" - "time" - - "github.com/hashicorp/go-hclog" - "github.com/nightlyone/lockfile" - "github.com/vercel/turbo/cli/internal/fs" - "github.com/vercel/turbo/cli/internal/server" - "github.com/vercel/turbo/cli/internal/signals" - "github.com/vercel/turbo/cli/internal/turbopath" - "google.golang.org/grpc" - "google.golang.org/grpc/credentials/insecure" - "google.golang.org/grpc/test/grpc_testing" - "gotest.tools/v3/assert" -) - -// testBin returns a platform-appropriate node binary. -// We need some process to be running and findable by the -// lockfile library, and we don't particularly care what it is. -// Since node is required for turbo development, it makes a decent -// candidate. -func testBin() string { - if runtime.GOOS == "windows" { - return "node.exe" - } - return "node" -} - -func TestPidFileLock(t *testing.T) { - repoRoot := fs.AbsoluteSystemPathFromUpstream(t.TempDir()) - - pidPath := getPidFile(repoRoot) - // the lockfile library handles removing pids from dead owners - _, err := tryAcquirePidfileLock(pidPath) - assert.NilError(t, err, "acquirePidLock") - - // Start up a node process and fake a pid file for it. - // Ensure that we can't start the daemon while the node process is live - bin := testBin() - node := exec.Command(bin) - err = node.Start() - assert.NilError(t, err, "Start") - stopNode := func() error { - if err := node.Process.Kill(); err != nil { - return err - } - // We expect an error from node, we just sent a kill signal - _ = node.Wait() - return nil - } - // In case we fail the test, still try to kill the node process - t.Cleanup(func() { _ = stopNode() }) - nodePid := node.Process.Pid - err = pidPath.WriteFile([]byte(strconv.Itoa(nodePid)), 0644) - assert.NilError(t, err, "WriteFile") - - _, err = tryAcquirePidfileLock(pidPath) - assert.ErrorIs(t, err, lockfile.ErrBusy) - - // Stop the node process, but leave the pid file there - // This simulates a crash - err = stopNode() - assert.NilError(t, err, "stopNode") - // the lockfile library handles removing pids from dead owners - _, err = tryAcquirePidfileLock(pidPath) - assert.NilError(t, err, "acquirePidLock") -} - -type testRPCServer struct { - grpc_testing.UnimplementedTestServiceServer - registered chan struct{} -} - -func (ts *testRPCServer) EmptyCall(ctx context.Context, req *grpc_testing.Empty) (*grpc_testing.Empty, error) { - panic("intended to panic") -} - -func (ts *testRPCServer) Register(grpcServer server.GRPCServer) { - grpc_testing.RegisterTestServiceServer(grpcServer, ts) - ts.registered <- struct{}{} -} - -func newTestRPCServer() *testRPCServer { - return &testRPCServer{ - registered: make(chan struct{}, 1), - } -} - -func waitForFile(t *testing.T, filename turbopath.AbsoluteSystemPath, timeout time.Duration) { - t.Helper() - deadline := time.After(timeout) -outer: - for !filename.FileExists() { - select { - case <-deadline: - break outer - case <-time.After(10 * time.Millisecond): - } - } - if !filename.FileExists() { - t.Errorf("timed out waiting for %v to exist after %v", filename, timeout) - } -} - -func TestDaemonLifecycle(t *testing.T) { - logger := hclog.Default() - logger.SetLevel(hclog.Debug) - repoRoot := fs.AbsoluteSystemPathFromUpstream(t.TempDir()) - - ts := newTestRPCServer() - watcher := signals.NewWatcher() - ctx, cancel := context.WithCancel(context.Background()) - - d := &daemon{ - logger: logger, - repoRoot: repoRoot, - timeout: 10 * time.Second, - reqCh: make(chan struct{}), - timedOutCh: make(chan struct{}), - } - - var serverErr error - wg := &sync.WaitGroup{} - wg.Add(1) - go func() { - serverErr = d.runTurboServer(ctx, ts, watcher) - wg.Done() - }() - - sockPath := getUnixSocket(repoRoot) - waitForFile(t, sockPath, 30*time.Second) - pidPath := getPidFile(repoRoot) - waitForFile(t, pidPath, 1*time.Second) - cancel() - wg.Wait() - assert.NilError(t, serverErr, "runTurboServer") - if sockPath.FileExists() { - t.Errorf("%v still exists, should have been cleaned up", sockPath) - } - if pidPath.FileExists() { - t.Errorf("%v still exists, should have been cleaned up", sockPath) - } -} - -func TestTimeout(t *testing.T) { - logger := hclog.Default() - logger.SetLevel(hclog.Debug) - repoRoot := fs.AbsoluteSystemPathFromUpstream(t.TempDir()) - - ts := newTestRPCServer() - watcher := signals.NewWatcher() - ctx := context.Background() - - d := &daemon{ - logger: logger, - repoRoot: repoRoot, - timeout: 5 * time.Millisecond, - reqCh: make(chan struct{}), - timedOutCh: make(chan struct{}), - } - err := d.runTurboServer(ctx, ts, watcher) - if !errors.Is(err, errInactivityTimeout) { - t.Errorf("server error got %v, want %v", err, errInactivityTimeout) - } -} - -func TestCaughtSignal(t *testing.T) { - logger := hclog.Default() - logger.SetLevel(hclog.Debug) - repoRoot := fs.AbsoluteSystemPathFromUpstream(t.TempDir()) - - ts := newTestRPCServer() - watcher := signals.NewWatcher() - ctx := context.Background() - - d := &daemon{ - logger: logger, - repoRoot: repoRoot, - timeout: 5 * time.Second, - reqCh: make(chan struct{}), - timedOutCh: make(chan struct{}), - } - errCh := make(chan error) - go func() { - err := d.runTurboServer(ctx, ts, watcher) - errCh <- err - }() - <-ts.registered - // grpc doesn't provide a signal to know when the server is serving. - // So while this call to Close can race with the call to grpc.Server.Serve, if we've - // registered with the turboserver, we've registered all of our - // signal handlers as well. We just may or may not be serving when Close() - // is called. It shouldn't matter for the purposes of this test: - // Either we are serving, and Serve will return with nil when GracefulStop is - // called, or we aren't serving yet, and the subsequent call to Serve will - // immediately return with grpc.ErrServerStopped. So, both nil and grpc.ErrServerStopped - // are acceptable outcomes for runTurboServer. Any other error, or a timeout, is a - // failure. - watcher.Close() - - err := <-errCh - pidPath := getPidFile(repoRoot) - if pidPath.FileExists() { - t.Errorf("expected to clean up %v, but it still exists", pidPath) - } - // We'll either get nil or ErrServerStopped, depending on whether - // or not we close the signal watcher before grpc.Server.Serve was - // called. - if err != nil && !errors.Is(err, grpc.ErrServerStopped) { - t.Errorf("runTurboServer got err %v, want nil or ErrServerStopped", err) - } -} - -func TestCleanupOnPanic(t *testing.T) { - logger := hclog.Default() - logger.SetLevel(hclog.Debug) - repoRoot := fs.AbsoluteSystemPathFromUpstream(t.TempDir()) - - ts := newTestRPCServer() - watcher := signals.NewWatcher() - ctx := context.Background() - - d := &daemon{ - logger: logger, - repoRoot: repoRoot, - timeout: 5 * time.Second, - reqCh: make(chan struct{}), - timedOutCh: make(chan struct{}), - } - errCh := make(chan error) - go func() { - err := d.runTurboServer(ctx, ts, watcher) - errCh <- err - }() - <-ts.registered - - creds := insecure.NewCredentials() - sockFile := getUnixSocket(repoRoot) - conn, err := grpc.Dial("unix://"+sockFile.ToString(), grpc.WithTransportCredentials(creds)) - assert.NilError(t, err, "Dial") - - client := grpc_testing.NewTestServiceClient(conn) - _, err = client.EmptyCall(ctx, &grpc_testing.Empty{}) - if err == nil { - t.Error("nil error") - } - // wait for the server to finish - <-errCh - - pidPath := getPidFile(repoRoot) - if pidPath.FileExists() { - t.Errorf("expected to clean up %v, but it still exists", pidPath) - } -} diff --git a/cli/internal/daemonclient/daemonclient.go b/cli/internal/daemonclient/daemonclient.go deleted file mode 100644 index 23a3fb53481f1..0000000000000 --- a/cli/internal/daemonclient/daemonclient.go +++ /dev/null @@ -1,107 +0,0 @@ -// Package daemonclient is a wrapper around a grpc client -// to talk to turbod -package daemonclient - -import ( - "context" - "path/filepath" - "runtime" - "strings" - - "github.com/vercel/turbo/cli/internal/daemon/connector" - "github.com/vercel/turbo/cli/internal/fs/hash" - "github.com/vercel/turbo/cli/internal/turbodprotocol" - "github.com/vercel/turbo/cli/internal/turbopath" -) - -// DaemonClient provides access to higher-level functionality from the daemon to a turbo run. -type DaemonClient struct { - client *connector.Client -} - -// Status provides details about the daemon's status -type Status struct { - UptimeMs uint64 `json:"uptimeMs"` - LogFile turbopath.AbsoluteSystemPath `json:"logFile"` - PidFile turbopath.AbsoluteSystemPath `json:"pidFile"` - SockFile turbopath.AbsoluteSystemPath `json:"sockFile"` -} - -// New creates a new instance of a DaemonClient. -func New(client *connector.Client) *DaemonClient { - return &DaemonClient{ - client: client, - } -} - -// formats a repo-relative glob to unix format with ':' characters handled. -// On windows, ':' is an invalid path character, but you can, and Turborepo does, -// read to and write from files that contain alternate data streams denoted by ':'. -// In the case of windows and an alternate data stream, we want change notifications just -// for the root file. Note that since ':' denotes a data stream for a _file_, it cannot -// appear in a directory name. Thus, if we find one, we know it's in the filename. -// See https://learn.microsoft.com/en-us/sysinternals/downloads/streams -func formatRepoRelativeGlob(input string) string { - unixInput := filepath.ToSlash(input) - if runtime.GOOS == "windows" { - colonIndex := strings.Index(input, ":") - if colonIndex > -1 { - // we found an alternate data stream - unixInput = unixInput[:colonIndex] - } - return unixInput - } - return strings.ReplaceAll(unixInput, ":", "\\:") -} - -// GetChangedOutputs implements runcache.OutputWatcher.GetChangedOutputs -func (d *DaemonClient) GetChangedOutputs(ctx context.Context, hash string, repoRelativeOutputGlobs []string) ([]string, int, error) { - // The daemon expects globs to be unix paths - var outputGlobs []string - for _, outputGlob := range repoRelativeOutputGlobs { - outputGlobs = append(outputGlobs, formatRepoRelativeGlob(outputGlob)) - } - resp, err := d.client.GetChangedOutputs(ctx, &turbodprotocol.GetChangedOutputsRequest{ - Hash: hash, - OutputGlobs: outputGlobs, - }) - if err != nil { - return nil, 0, err - } - return resp.ChangedOutputGlobs, int(resp.TimeSaved), nil -} - -// NotifyOutputsWritten implements runcache.OutputWatcher.NotifyOutputsWritten -func (d *DaemonClient) NotifyOutputsWritten(ctx context.Context, hash string, repoRelativeOutputGlobs hash.TaskOutputs, timeSaved int) error { - // The daemon expects globs to be unix paths - var inclusions []string - var exclusions []string - for _, inclusion := range repoRelativeOutputGlobs.Inclusions { - inclusions = append(inclusions, formatRepoRelativeGlob(inclusion)) - } - for _, exclusion := range repoRelativeOutputGlobs.Exclusions { - exclusions = append(exclusions, formatRepoRelativeGlob(exclusion)) - } - _, err := d.client.NotifyOutputsWritten(ctx, &turbodprotocol.NotifyOutputsWrittenRequest{ - Hash: hash, - OutputGlobs: inclusions, - OutputExclusionGlobs: exclusions, - TimeSaved: uint64(timeSaved), - }) - return err -} - -// Status returns the DaemonStatus from the daemon -func (d *DaemonClient) Status(ctx context.Context) (*Status, error) { - resp, err := d.client.Status(ctx, &turbodprotocol.StatusRequest{}) - if err != nil { - return nil, err - } - daemonStatus := resp.DaemonStatus - return &Status{ - UptimeMs: daemonStatus.UptimeMsec, - LogFile: d.client.LogPath, - PidFile: d.client.PidPath, - SockFile: d.client.SockPath, - }, nil -} diff --git a/cli/internal/daemonclient/daemonclient_test.go b/cli/internal/daemonclient/daemonclient_test.go deleted file mode 100644 index 634f0bfc79c94..0000000000000 --- a/cli/internal/daemonclient/daemonclient_test.go +++ /dev/null @@ -1,23 +0,0 @@ -package daemonclient - -import ( - "path/filepath" - "runtime" - "testing" -) - -func TestFormatRepoRelativeGlob(t *testing.T) { - rawGlob := filepath.Join("some", ".turbo", "turbo-foo:bar.log") - // Note that we expect unix slashes whether or not we are on Windows - var expected string - if runtime.GOOS == "windows" { - expected = "some/.turbo/turbo-foo" - } else { - expected = "some/.turbo/turbo-foo\\:bar.log" - } - - result := formatRepoRelativeGlob(rawGlob) - if result != expected { - t.Errorf("formatRepoRelativeGlob(%v) got %v, want %v", rawGlob, result, expected) - } -} diff --git a/cli/internal/doublestar/doublestar.go b/cli/internal/doublestar/doublestar.go deleted file mode 100644 index 6fa05f115cb49..0000000000000 --- a/cli/internal/doublestar/doublestar.go +++ /dev/null @@ -1,11 +0,0 @@ -// Package doublestar is adapted from https://github.com/bmatcuk/doublestar -// Copyright Bob Matcuk. All Rights Reserved. -// SPDX-License-Identifier: MIT -package doublestar - -import ( - "path" -) - -// ErrBadPattern indicates a pattern was malformed. -var ErrBadPattern = path.ErrBadPattern diff --git a/cli/internal/doublestar/doublestar_test.go b/cli/internal/doublestar/doublestar_test.go deleted file mode 100644 index 512f8b7ad7265..0000000000000 --- a/cli/internal/doublestar/doublestar_test.go +++ /dev/null @@ -1,557 +0,0 @@ -// Package doublestar is adapted from https://github.com/bmatcuk/doublestar -// Copyright Bob Matcuk. All Rights Reserved. -// SPDX-License-Identifier: MIT - -// This file is mostly copied from Go's path/match_test.go - -package doublestar - -import ( - "io/fs" - "log" - "os" - "path" - "path/filepath" - "runtime" - "strings" - "testing" -) - -type MatchTest struct { - pattern, testPath string // a pattern and path to test the pattern on - shouldMatch bool // true if the pattern should match the path - expectedErr error // an expected error - isStandard bool // pattern doesn't use any doublestar features - testOnDisk bool // true: test pattern against files in "test" directory - numResults int // number of glob results if testing on disk - winNumResults int // number of glob results on Windows -} - -// Tests which contain escapes and symlinks will not work on Windows -var onWindows = runtime.GOOS == "windows" - -var matchTests = []MatchTest{ - {"*", "", true, nil, true, false, 0, 0}, - {"*", "/", false, nil, true, false, 0, 0}, - {"/*", "/", true, nil, true, false, 0, 0}, - {"/*", "/debug/", false, nil, true, false, 0, 0}, - {"/*", "//", false, nil, true, false, 0, 0}, - {"abc", "abc", true, nil, true, true, 1, 1}, - {"*", "abc", true, nil, true, true, 19, 15}, - {"*c", "abc", true, nil, true, true, 2, 2}, - {"*/", "a/", true, nil, true, false, 0, 0}, - {"a*", "a", true, nil, true, true, 9, 9}, - {"a*", "abc", true, nil, true, true, 9, 9}, - {"a*", "ab/c", false, nil, true, true, 9, 9}, - {"a*/b", "abc/b", true, nil, true, true, 2, 2}, - {"a*/b", "a/c/b", false, nil, true, true, 2, 2}, - {"a*b*c*d*e*", "axbxcxdxe", true, nil, true, true, 3, 3}, - {"a*b*c*d*e*/f", "axbxcxdxe/f", true, nil, true, true, 2, 2}, - {"a*b*c*d*e*/f", "axbxcxdxexxx/f", true, nil, true, true, 2, 2}, - {"a*b*c*d*e*/f", "axbxcxdxe/xxx/f", false, nil, true, true, 2, 2}, - {"a*b*c*d*e*/f", "axbxcxdxexxx/fff", false, nil, true, true, 2, 2}, - {"a*b?c*x", "abxbbxdbxebxczzx", true, nil, true, true, 2, 2}, - {"a*b?c*x", "abxbbxdbxebxczzy", false, nil, true, true, 2, 2}, - {"ab[c]", "abc", true, nil, true, true, 1, 1}, - {"ab[b-d]", "abc", true, nil, true, true, 1, 1}, - {"ab[e-g]", "abc", false, nil, true, true, 0, 0}, - {"ab[^c]", "abc", false, nil, true, true, 0, 0}, - {"ab[^b-d]", "abc", false, nil, true, true, 0, 0}, - {"ab[^e-g]", "abc", true, nil, true, true, 1, 1}, - {"a\\*b", "ab", false, nil, true, true, 0, 0}, - {"a?b", "a☺b", true, nil, true, true, 1, 1}, - {"a[^a]b", "a☺b", true, nil, true, true, 1, 1}, - {"a[!a]b", "a☺b", true, nil, false, true, 1, 1}, - {"a???b", "a☺b", false, nil, true, true, 0, 0}, - {"a[^a][^a][^a]b", "a☺b", false, nil, true, true, 0, 0}, - {"[a-ζ]*", "α", true, nil, true, true, 17, 15}, - {"*[a-ζ]", "A", false, nil, true, true, 17, 15}, - {"a?b", "a/b", false, nil, true, true, 1, 1}, - {"a*b", "a/b", false, nil, true, true, 1, 1}, - {"[\\]a]", "]", true, nil, true, !onWindows, 2, 2}, - {"[\\-]", "-", true, nil, true, !onWindows, 1, 1}, - {"[x\\-]", "x", true, nil, true, !onWindows, 2, 2}, - {"[x\\-]", "-", true, nil, true, !onWindows, 2, 2}, - {"[x\\-]", "z", false, nil, true, !onWindows, 2, 2}, - {"[\\-x]", "x", true, nil, true, !onWindows, 2, 2}, - {"[\\-x]", "-", true, nil, true, !onWindows, 2, 2}, - {"[\\-x]", "a", false, nil, true, !onWindows, 2, 2}, - {"[]a]", "]", false, ErrBadPattern, true, true, 0, 0}, - // doublestar, like bash, allows these when path.Match() does not - {"[-]", "-", true, nil, false, !onWindows, 1, 0}, - {"[x-]", "x", true, nil, false, true, 2, 1}, - {"[x-]", "-", true, nil, false, !onWindows, 2, 1}, - {"[x-]", "z", false, nil, false, true, 2, 1}, - {"[-x]", "x", true, nil, false, true, 2, 1}, - {"[-x]", "-", true, nil, false, !onWindows, 2, 1}, - {"[-x]", "a", false, nil, false, true, 2, 1}, - {"[a-b-d]", "a", true, nil, false, true, 3, 2}, - {"[a-b-d]", "b", true, nil, false, true, 3, 2}, - {"[a-b-d]", "-", true, nil, false, !onWindows, 3, 2}, - {"[a-b-d]", "c", false, nil, false, true, 3, 2}, - {"[a-b-x]", "x", true, nil, false, true, 4, 3}, - {"\\", "a", false, ErrBadPattern, true, !onWindows, 0, 0}, - {"[", "a", false, ErrBadPattern, true, true, 0, 0}, - {"[^", "a", false, ErrBadPattern, true, true, 0, 0}, - {"[^bc", "a", false, ErrBadPattern, true, true, 0, 0}, - {"a[", "a", false, ErrBadPattern, true, true, 0, 0}, - {"a[", "ab", false, ErrBadPattern, true, true, 0, 0}, - {"ad[", "ab", false, ErrBadPattern, true, true, 0, 0}, - {"*x", "xxx", true, nil, true, true, 4, 4}, - {"[abc]", "b", true, nil, true, true, 3, 3}, - {"**", "", true, nil, false, false, 38, 38}, - {"a/**", "a", true, nil, false, true, 7, 7}, - {"a/**", "a/", true, nil, false, false, 7, 7}, - {"a/**", "a/b", true, nil, false, true, 7, 7}, - {"a/**", "a/b/c", true, nil, false, true, 7, 7}, - // These tests differ since we've disabled walking symlinks - {"**/c", "c", true, nil, false, true, 4, 4}, - {"**/c", "b/c", true, nil, false, true, 4, 4}, - {"**/c", "a/b/c", true, nil, false, true, 4, 4}, - {"**/c", "a/b", false, nil, false, true, 4, 4}, - {"**/c", "abcd", false, nil, false, true, 4, 4}, - {"**/c", "a/abc", false, nil, false, true, 4, 4}, - {"a/**/b", "a/b", true, nil, false, true, 2, 2}, - {"a/**/c", "a/b/c", true, nil, false, true, 2, 2}, - {"a/**/d", "a/b/c/d", true, nil, false, true, 1, 1}, - {"a/\\**", "a/b/c", false, nil, false, !onWindows, 0, 0}, - {"a/\\[*\\]", "a/bc", false, nil, true, !onWindows, 0, 0}, - // this is an odd case: filepath.Glob() will return results - {"a//b/c", "a/b/c", false, nil, true, false, 0, 0}, - {"a/b/c", "a/b//c", false, nil, true, true, 1, 1}, - // also odd: Glob + filepath.Glob return results - {"a/", "a", false, nil, true, false, 0, 0}, - {"ab{c,d}", "abc", true, nil, false, true, 1, 1}, - {"ab{c,d,*}", "abcde", true, nil, false, true, 5, 5}, - {"ab{c,d}[", "abcd", false, ErrBadPattern, false, true, 0, 0}, - {"a{,bc}", "a", true, nil, false, true, 2, 2}, - {"a{,bc}", "abc", true, nil, false, true, 2, 2}, - {"a/{b/c,c/b}", "a/b/c", true, nil, false, true, 2, 2}, - {"a/{b/c,c/b}", "a/c/b", true, nil, false, true, 2, 2}, - {"{a/{b,c},abc}", "a/b", true, nil, false, true, 3, 3}, - {"{a/{b,c},abc}", "a/c", true, nil, false, true, 3, 3}, - {"{a/{b,c},abc}", "abc", true, nil, false, true, 3, 3}, - {"{a/{b,c},abc}", "a/b/c", false, nil, false, true, 3, 3}, - {"{a/ab*}", "a/abc", true, nil, false, true, 1, 1}, - {"{a/*}", "a/b", true, nil, false, true, 3, 3}, - {"{a/abc}", "a/abc", true, nil, false, true, 1, 1}, - {"{a/b,a/c}", "a/c", true, nil, false, true, 2, 2}, - {"abc/**", "abc/b", true, nil, false, true, 3, 3}, - {"**/abc", "abc", true, nil, false, true, 2, 2}, - {"abc**", "abc/b", false, nil, false, true, 3, 3}, - {"**/*.txt", "abc/【test】.txt", true, nil, false, true, 1, 1}, - {"**/【*", "abc/【test】.txt", true, nil, false, true, 1, 1}, - // unfortunately, io/fs can't handle this, so neither can Glob =( - {"broken-symlink", "broken-symlink", true, nil, true, false, 1, 1}, - // We don't care about matching a particular file, we want to verify - // that we don't traverse the symlink - {"working-symlink/c/*", "working-symlink/c/d", true, nil, true, !onWindows, 1, 1}, - {"working-sym*/*", "irrelevant", false, nil, false, !onWindows, 0, 0}, - {"b/**/f", "irrelevant", false, nil, false, !onWindows, 0, 0}, -} - -func TestValidatePattern(t *testing.T) { - for idx, tt := range matchTests { - testValidatePatternWith(t, idx, tt) - } -} - -func testValidatePatternWith(t *testing.T, idx int, tt MatchTest) { - defer func() { - if r := recover(); r != nil { - t.Errorf("#%v. Validate(%#q) panicked: %#v", idx, tt.pattern, r) - } - }() - - result := ValidatePattern(tt.pattern) - if result != (tt.expectedErr == nil) { - t.Errorf("#%v. ValidatePattern(%#q) = %v want %v", idx, tt.pattern, result, !result) - } -} - -func TestMatch(t *testing.T) { - for idx, tt := range matchTests { - // Since Match() always uses "/" as the separator, we - // don't need to worry about the tt.testOnDisk flag - testMatchWith(t, idx, tt) - } -} - -func testMatchWith(t *testing.T, idx int, tt MatchTest) { - defer func() { - if r := recover(); r != nil { - t.Errorf("#%v. Match(%#q, %#q) panicked: %#v", idx, tt.pattern, tt.testPath, r) - } - }() - - // Match() always uses "/" as the separator - ok, err := Match(tt.pattern, tt.testPath) - if ok != tt.shouldMatch || err != tt.expectedErr { - t.Errorf("#%v. Match(%#q, %#q) = %v, %v want %v, %v", idx, tt.pattern, tt.testPath, ok, err, tt.shouldMatch, tt.expectedErr) - } - - if tt.isStandard { - stdOk, stdErr := path.Match(tt.pattern, tt.testPath) - if ok != stdOk || !compareErrors(err, stdErr) { - t.Errorf("#%v. Match(%#q, %#q) != path.Match(...). Got %v, %v want %v, %v", idx, tt.pattern, tt.testPath, ok, err, stdOk, stdErr) - } - } -} - -func BenchmarkMatch(b *testing.B) { - b.ReportAllocs() - for i := 0; i < b.N; i++ { - for _, tt := range matchTests { - if tt.isStandard { - _, _ = Match(tt.pattern, tt.testPath) - } - } - } -} - -func BenchmarkGoMatch(b *testing.B) { - b.ReportAllocs() - for i := 0; i < b.N; i++ { - for _, tt := range matchTests { - if tt.isStandard { - _, _ = path.Match(tt.pattern, tt.testPath) - } - } - } -} - -func TestPathMatch(t *testing.T) { - for idx, tt := range matchTests { - // Even though we aren't actually matching paths on disk, we are using - // PathMatch() which will use the system's separator. As a result, any - // patterns that might cause problems on-disk need to also be avoided - // here in this test. - if tt.testOnDisk { - testPathMatchWith(t, idx, tt) - } - } -} - -func testPathMatchWith(t *testing.T, idx int, tt MatchTest) { - defer func() { - if r := recover(); r != nil { - t.Errorf("#%v. Match(%#q, %#q) panicked: %#v", idx, tt.pattern, tt.testPath, r) - } - }() - - pattern := filepath.FromSlash(tt.pattern) - testPath := filepath.FromSlash(tt.testPath) - ok, err := PathMatch(pattern, testPath) - if ok != tt.shouldMatch || err != tt.expectedErr { - t.Errorf("#%v. PathMatch(%#q, %#q) = %v, %v want %v, %v", idx, pattern, testPath, ok, err, tt.shouldMatch, tt.expectedErr) - } - - if tt.isStandard { - stdOk, stdErr := filepath.Match(pattern, testPath) - if ok != stdOk || !compareErrors(err, stdErr) { - t.Errorf("#%v. PathMatch(%#q, %#q) != filepath.Match(...). Got %v, %v want %v, %v", idx, pattern, testPath, ok, err, stdOk, stdErr) - } - } -} - -func TestPathMatchFake(t *testing.T) { - // This test fakes that our path separator is `\\` so we can test what it - // would be like on Windows - obviously, we don't need to do that if we - // actually _are_ on Windows, since TestPathMatch will cover it. - if onWindows { - return - } - - for idx, tt := range matchTests { - // Even though we aren't actually matching paths on disk, we are using - // PathMatch() which will use the system's separator. As a result, any - // patterns that might cause problems on-disk need to also be avoided - // here in this test. - if tt.testOnDisk && tt.pattern != "\\" { - testPathMatchFakeWith(t, idx, tt) - } - } -} - -func testPathMatchFakeWith(t *testing.T, idx int, tt MatchTest) { - defer func() { - if r := recover(); r != nil { - t.Errorf("#%v. Match(%#q, %#q) panicked: %#v", idx, tt.pattern, tt.testPath, r) - } - }() - - pattern := strings.ReplaceAll(tt.pattern, "/", "\\") - testPath := strings.ReplaceAll(tt.testPath, "/", "\\") - ok, err := matchWithSeparator(pattern, testPath, '\\', true) - if ok != tt.shouldMatch || err != tt.expectedErr { - t.Errorf("#%v. PathMatch(%#q, %#q) = %v, %v want %v, %v", idx, pattern, testPath, ok, err, tt.shouldMatch, tt.expectedErr) - } -} - -func BenchmarkPathMatch(b *testing.B) { - b.ReportAllocs() - for i := 0; i < b.N; i++ { - for _, tt := range matchTests { - if tt.isStandard && tt.testOnDisk { - pattern := filepath.FromSlash(tt.pattern) - testPath := filepath.FromSlash(tt.testPath) - _, _ = PathMatch(pattern, testPath) - } - } - } -} - -func BenchmarkGoPathMatch(b *testing.B) { - b.ReportAllocs() - for i := 0; i < b.N; i++ { - for _, tt := range matchTests { - if tt.isStandard && tt.testOnDisk { - pattern := filepath.FromSlash(tt.pattern) - testPath := filepath.FromSlash(tt.testPath) - _, _ = filepath.Match(pattern, testPath) - } - } - } -} - -func TestGlob(t *testing.T) { - fsys := os.DirFS("test") - for idx, tt := range matchTests { - if tt.testOnDisk { - testGlobWith(t, idx, tt, fsys) - } - } -} - -func testGlobWith(t *testing.T, idx int, tt MatchTest, fsys fs.FS) { - defer func() { - if r := recover(); r != nil { - t.Errorf("#%v. Glob(%#q) panicked: %#v", idx, tt.pattern, r) - } - }() - - matches, err := Glob(fsys, tt.pattern) - verifyGlobResults(t, idx, "Glob", tt, fsys, matches, err) -} - -func TestGlobWalk(t *testing.T) { - fsys := os.DirFS("test") - for idx, tt := range matchTests { - if tt.testOnDisk { - testGlobWalkWith(t, idx, tt, fsys) - } - } -} - -func testGlobWalkWith(t *testing.T, idx int, tt MatchTest, fsys fs.FS) { - defer func() { - if r := recover(); r != nil { - t.Errorf("#%v. Glob(%#q) panicked: %#v", idx, tt.pattern, r) - } - }() - - var matches []string - err := GlobWalk(fsys, tt.pattern, func(p string, d fs.DirEntry) error { - matches = append(matches, p) - return nil - }) - verifyGlobResults(t, idx, "GlobWalk", tt, fsys, matches, err) -} - -func verifyGlobResults(t *testing.T, idx int, fn string, tt MatchTest, fsys fs.FS, matches []string, err error) { - numResults := tt.numResults - if onWindows { - numResults = tt.winNumResults - } - if len(matches) != numResults { - t.Errorf("#%v. %v(%#q) = %#v - should have %#v results", idx, fn, tt.pattern, matches, tt.numResults) - } - if inSlice(tt.testPath, matches) != tt.shouldMatch { - if tt.shouldMatch { - t.Errorf("#%v. %v(%#q) = %#v - doesn't contain %v, but should", idx, fn, tt.pattern, matches, tt.testPath) - } else { - t.Errorf("#%v. %v(%#q) = %#v - contains %v, but shouldn't", idx, fn, tt.pattern, matches, tt.testPath) - } - } - if err != tt.expectedErr { - t.Errorf("#%v. %v(%#q) has error %v, but should be %v", idx, fn, tt.pattern, err, tt.expectedErr) - } - - if tt.isStandard { - stdMatches, stdErr := fs.Glob(fsys, tt.pattern) - if !compareSlices(matches, stdMatches) || !compareErrors(err, stdErr) { - t.Errorf("#%v. %v(%#q) != fs.Glob(...). Got %#v, %v want %#v, %v", idx, fn, tt.pattern, matches, err, stdMatches, stdErr) - } - } -} - -func BenchmarkGlob(b *testing.B) { - fsys := os.DirFS("test") - b.ReportAllocs() - for i := 0; i < b.N; i++ { - for _, tt := range matchTests { - if tt.isStandard && tt.testOnDisk { - _, _ = Glob(fsys, tt.pattern) - } - } - } -} - -func BenchmarkGlobWalk(b *testing.B) { - fsys := os.DirFS("test") - b.ReportAllocs() - for i := 0; i < b.N; i++ { - for _, tt := range matchTests { - if tt.isStandard && tt.testOnDisk { - _ = GlobWalk(fsys, tt.pattern, func(p string, d fs.DirEntry) error { - return nil - }) - } - } - } -} - -func BenchmarkGoGlob(b *testing.B) { - fsys := os.DirFS("test") - b.ReportAllocs() - for i := 0; i < b.N; i++ { - for _, tt := range matchTests { - if tt.isStandard && tt.testOnDisk { - _, _ = fs.Glob(fsys, tt.pattern) - } - } - } -} - -func compareErrors(a, b error) bool { - if a == nil { - return b == nil - } - return b != nil -} - -func inSlice(s string, a []string) bool { - for _, i := range a { - if i == s { - return true - } - } - return false -} - -func compareSlices(a, b []string) bool { - if len(a) != len(b) { - return false - } - - diff := make(map[string]int, len(a)) - - for _, x := range a { - diff[x]++ - } - - for _, y := range b { - if _, ok := diff[y]; !ok { - return false - } - - diff[y]-- - if diff[y] == 0 { - delete(diff, y) - } - } - - return len(diff) == 0 -} - -func mkdirp(parts ...string) { - dirs := path.Join(parts...) - err := os.MkdirAll(dirs, 0755) - if err != nil { - log.Fatalf("Could not create test directories %v: %v\n", dirs, err) - } -} - -func touch(parts ...string) { - filename := path.Join(parts...) - f, err := os.Create(filename) - if err != nil { - log.Fatalf("Could not create test file %v: %v\n", filename, err) - } - _ = f.Close() -} - -func symlink(oldname, newname string) { - // since this will only run on non-windows, we can assume "/" as path separator - err := os.Symlink(oldname, newname) - if err != nil && !os.IsExist(err) { - log.Fatalf("Could not create symlink %v -> %v: %v\n", oldname, newname, err) - } -} - -func TestGlobSorted(t *testing.T) { - fsys := os.DirFS("test") - expected := []string{"a", "abc", "abcd", "abcde", "abxbbxdbxebxczzx", "abxbbxdbxebxczzy", "axbxcxdxe", "axbxcxdxexxx", "a☺b"} - matches, err := Glob(fsys, "a*") - if err != nil { - t.Errorf("Unexpected error %v", err) - return - } - - if len(matches) != len(expected) { - t.Errorf("Glob returned %#v; expected %#v", matches, expected) - return - } - for idx, match := range matches { - if match != expected[idx] { - t.Errorf("Glob returned %#v; expected %#v", matches, expected) - return - } - } -} - -func TestMain(m *testing.M) { - // create the test directory - mkdirp("test", "a", "b", "c") - mkdirp("test", "a", "c") - mkdirp("test", "abc") - mkdirp("test", "axbxcxdxe", "xxx") - mkdirp("test", "axbxcxdxexxx") - mkdirp("test", "b") - - // create test files - touch("test", "a", "abc") - touch("test", "a", "b", "c", "d") - touch("test", "a", "c", "b") - touch("test", "abc", "b") - touch("test", "abcd") - touch("test", "abcde") - touch("test", "abxbbxdbxebxczzx") - touch("test", "abxbbxdbxebxczzy") - touch("test", "axbxcxdxe", "f") - touch("test", "axbxcxdxe", "xxx", "f") - touch("test", "axbxcxdxexxx", "f") - touch("test", "axbxcxdxexxx", "fff") - touch("test", "a☺b") - touch("test", "b", "c") - touch("test", "c") - touch("test", "x") - touch("test", "xxx") - touch("test", "z") - touch("test", "α") - touch("test", "abc", "【test】.txt") - - if !onWindows { - // these files/symlinks won't work on Windows - touch("test", "-") - touch("test", "]") - symlink("../axbxcxdxe/", "test/b/symlink-dir") - symlink("/tmp/nonexistant-file-20160902155705", "test/broken-symlink") - symlink("a/b", "test/working-symlink") - } - - // os.Exit(m.Run()) - exitCode := m.Run() - _ = os.RemoveAll("test") - os.Exit(exitCode) -} diff --git a/cli/internal/doublestar/glob.go b/cli/internal/doublestar/glob.go deleted file mode 100644 index eee8920952d49..0000000000000 --- a/cli/internal/doublestar/glob.go +++ /dev/null @@ -1,393 +0,0 @@ -// Package doublestar is adapted from https://github.com/bmatcuk/doublestar -// Copyright Bob Matcuk. All Rights Reserved. -// SPDX-License-Identifier: MIT -package doublestar - -import ( - "io/fs" - "path" -) - -// Glob returns the names of all files matching pattern or nil if there is no -// matching file. The syntax of pattern is the same as in Match(). The pattern -// may describe hierarchical names such as usr/*/bin/ed. -// -// Glob ignores file system errors such as I/O errors reading directories. -// The only possible returned error is ErrBadPattern, reporting that the -// pattern is malformed. -// -// Note: this is meant as a drop-in replacement for io/fs.Glob(). Like -// io/fs.Glob(), this function assumes that your pattern uses `/` as the path -// separator even if that's not correct for your OS (like Windows). If you -// aren't sure if that's the case, you can use filepath.ToSlash() on your -// pattern before calling Glob(). -// -// Like `io/fs.Glob()`, patterns containing `/./`, `/../`, or starting with `/` -// will return no results and no errors. You can use SplitPattern to divide a -// pattern into a base path (to initialize an `FS` object) and pattern. -func Glob(fsys fs.FS, pattern string) ([]string, error) { - if !ValidatePattern(pattern) { - return nil, ErrBadPattern - } - if hasMidDoubleStar(pattern) { - // If the pattern has a `**` anywhere but the very end, GlobWalk is more - // performant because it can get away with less allocations. If the pattern - // ends in a `**`, both methods are pretty much the same, but Glob has a - // _very_ slight advantage because of lower function call overhead. - var matches []string - err := doGlobWalk(fsys, pattern, true, func(p string, d fs.DirEntry) error { - matches = append(matches, p) - return nil - }) - return matches, err - } - return doGlob(fsys, pattern, nil, true) -} - -// Does the actual globbin' -func doGlob(fsys fs.FS, pattern string, m []string, firstSegment bool) ([]string, error) { - matches := m - patternStart := indexMeta(pattern) - if patternStart == -1 { - // pattern doesn't contain any meta characters - does a file matching the - // pattern exist? - if exists(fsys, pattern) { - matches = append(matches, pattern) - } - return matches, nil - } - - dir := "." - splitIdx := lastIndexSlashOrAlt(pattern) - if splitIdx != -1 { - if pattern[splitIdx] == '}' { - openingIdx := indexMatchedOpeningAlt(pattern[:splitIdx]) - if openingIdx == -1 { - // if there's no matching opening index, technically Match() will treat - // an unmatched `}` as nothing special, so... we will, too! - splitIdx = lastIndexSlash(pattern[:splitIdx]) - } else { - // otherwise, we have to handle the alts: - return globAlts(fsys, pattern, openingIdx, splitIdx, matches, firstSegment) - } - } - - dir = pattern[:splitIdx] - pattern = pattern[splitIdx+1:] - } - - // if `splitIdx` is less than `patternStart`, we know `dir` has no meta - // characters. They would be equal if they are both -1, which means `dir` - // will be ".", and we know that doesn't have meta characters either. - if splitIdx <= patternStart { - return globDir(fsys, dir, pattern, matches, firstSegment) - } - - var dirs []string - var err error - dirs, err = doGlob(fsys, dir, matches, false) - if err != nil { - return nil, err - } - for _, d := range dirs { - matches, err = globDir(fsys, d, pattern, matches, firstSegment) - if err != nil { - return nil, err - } - } - - return matches, nil -} - -// handle alts in the glob pattern - `openingIdx` and `closingIdx` are the -// indexes of `{` and `}`, respectively -func globAlts(fsys fs.FS, pattern string, openingIdx, closingIdx int, m []string, firstSegment bool) ([]string, error) { - matches := m - - var dirs []string - startIdx := 0 - afterIdx := closingIdx + 1 - splitIdx := lastIndexSlashOrAlt(pattern[:openingIdx]) - if splitIdx == -1 || pattern[splitIdx] == '}' { - // no common prefix - dirs = []string{""} - } else { - // our alts have a common prefix that we can process first - var err error - dirs, err = doGlob(fsys, pattern[:splitIdx], matches, false) - if err != nil { - return nil, err - } - - startIdx = splitIdx + 1 - } - - for _, d := range dirs { - patIdx := openingIdx + 1 - altResultsStartIdx := len(matches) - thisResultStartIdx := altResultsStartIdx - for patIdx < closingIdx { - nextIdx := indexNextAlt(pattern[patIdx:closingIdx], true) - if nextIdx == -1 { - nextIdx = closingIdx - } else { - nextIdx += patIdx - } - - alt := buildAlt(d, pattern, startIdx, openingIdx, patIdx, nextIdx, afterIdx) - var err error - matches, err = doGlob(fsys, alt, matches, firstSegment) - if err != nil { - return nil, err - } - - matchesLen := len(matches) - if altResultsStartIdx != thisResultStartIdx && thisResultStartIdx != matchesLen { - // Alts can result in matches that aren't sorted, or, worse, duplicates - // (consider the trivial pattern `path/to/{a,*}`). Since doGlob returns - // sorted results, we can do a sort of in-place merge and remove - // duplicates. But, we only need to do this if this isn't the first alt - // (ie, `altResultsStartIdx != thisResultsStartIdx`) and if the latest - // alt actually added some matches (`thisResultStartIdx != - // len(matches)`) - matches = sortAndRemoveDups(matches, altResultsStartIdx, thisResultStartIdx, matchesLen) - - // length of matches may have changed - thisResultStartIdx = len(matches) - } else { - thisResultStartIdx = matchesLen - } - - patIdx = nextIdx + 1 - } - } - - return matches, nil -} - -// find files/subdirectories in the given `dir` that match `pattern` -func globDir(fsys fs.FS, dir, pattern string, matches []string, canMatchFiles bool) ([]string, error) { - m := matches - - if pattern == "" { - // pattern can be an empty string if the original pattern ended in a slash, - // in which case, we should just return dir, but only if it actually exists - // and it's a directory (or a symlink to a directory) - if isPathDir(fsys, dir) { - m = append(m, dir) - } - return m, nil - } - - if pattern == "**" { - m = globDoubleStar(fsys, dir, m, canMatchFiles) - return m, nil - } - - dirs, err := fs.ReadDir(fsys, dir) - if err != nil { - // ignore IO errors - return m, nil - } - - var matched bool - for _, info := range dirs { - name := info.Name() - if canMatchFiles || isDir(fsys, dir, name, info) { - matched, err = matchWithSeparator(pattern, name, '/', false) - if err != nil { - return nil, err - } - if matched { - m = append(m, path.Join(dir, name)) - } - } - } - - return m, nil -} - -func globDoubleStar(fsys fs.FS, dir string, matches []string, canMatchFiles bool) []string { - dirs, err := fs.ReadDir(fsys, dir) - if err != nil { - // ignore IO errors - return matches - } - - // `**` can match *this* dir, so add it - matches = append(matches, dir) - for _, info := range dirs { - name := info.Name() - if isDir(fsys, dir, name, info) { - matches = globDoubleStar(fsys, path.Join(dir, name), matches, canMatchFiles) - } else if canMatchFiles { - matches = append(matches, path.Join(dir, name)) - } - } - - return matches -} - -// Returns true if the pattern has a doublestar in the middle of the pattern. -// In this case, GlobWalk is faster because it can get away with less -// allocations. However, Glob has a _very_ slight edge if the pattern ends in -// `**`. -func hasMidDoubleStar(p string) bool { - // subtract 3: 2 because we want to return false if the pattern ends in `**` - // (Glob is _very_ slightly faster in that case), and the extra 1 because our - // loop checks p[i] and p[i+1]. - l := len(p) - 3 - for i := 0; i < l; i++ { - if p[i] == '\\' { - // escape next byte - i++ - } else if p[i] == '*' && p[i+1] == '*' { - return true - } - } - return false -} - -// Returns the index of the first unescaped meta character, or negative 1. -func indexMeta(s string) int { - var c byte - l := len(s) - for i := 0; i < l; i++ { - c = s[i] - if c == '*' || c == '?' || c == '[' || c == '{' { - return i - } else if c == '\\' { - // skip next byte - i++ - } - } - return -1 -} - -// Returns the index of the last unescaped slash or closing alt (`}`) in the -// string, or negative 1. -func lastIndexSlashOrAlt(s string) int { - for i := len(s) - 1; i >= 0; i-- { - if (s[i] == '/' || s[i] == '}') && (i == 0 || s[i-1] != '\\') { - return i - } - } - return -1 -} - -// Returns the index of the last unescaped slash in the string, or negative 1. -func lastIndexSlash(s string) int { - for i := len(s) - 1; i >= 0; i-- { - if s[i] == '/' && (i == 0 || s[i-1] != '\\') { - return i - } - } - return -1 -} - -// Assuming the byte after the end of `s` is a closing `}`, this function will -// find the index of the matching `{`. That is, it'll skip over any nested `{}` -// and account for escaping. -func indexMatchedOpeningAlt(s string) int { - alts := 1 - for i := len(s) - 1; i >= 0; i-- { - if s[i] == '}' && (i == 0 || s[i-1] != '\\') { - alts++ - } else if s[i] == '{' && (i == 0 || s[i-1] != '\\') { - if alts--; alts == 0 { - return i - } - } - } - return -1 -} - -// Returns true if the path exists -func exists(fsys fs.FS, name string) bool { - if _, err := fs.Stat(fsys, name); err != nil { - return false - } - return true -} - -// Returns true if the path is a directory, or a symlink to a directory -func isPathDir(fsys fs.FS, name string) bool { - info, err := fs.Stat(fsys, name) - if err != nil { - return false - } - return info.IsDir() -} - -// Returns whether or not the given DirEntry is a directory. If the DirEntry -// represents a symbolic link, return false -func isDir(fsys fs.FS, dir string, name string, info fs.DirEntry) bool { - if (info.Type() & fs.ModeSymlink) > 0 { - return false - } - return info.IsDir() -} - -// Builds a string from an alt -func buildAlt(prefix, pattern string, startIdx, openingIdx, currentIdx, nextIdx, afterIdx int) string { - // pattern: - // ignored/start{alts,go,here}remaining - len = 36 - // | | | | ^--- afterIdx = 27 - // | | | \--------- nextIdx = 21 - // | | \----------- currentIdx = 19 - // | \----------------- openingIdx = 13 - // \---------------------- startIdx = 8 - // - // result: - // prefix/startgoremaining - len = 7 + 5 + 2 + 9 = 23 - var buf []byte - patLen := len(pattern) - size := (openingIdx - startIdx) + (nextIdx - currentIdx) + (patLen - afterIdx) - if prefix != "" { - buf = make([]byte, 0, size+len(prefix)+1) - buf = append(buf, prefix...) - buf = append(buf, '/') - } else { - buf = make([]byte, 0, size) - } - buf = append(buf, pattern[startIdx:openingIdx]...) - buf = append(buf, pattern[currentIdx:nextIdx]...) - if afterIdx < patLen { - buf = append(buf, pattern[afterIdx:]...) - } - return string(buf) -} - -// Running alts can produce results that are not sorted, and, worse, can cause -// duplicates (consider the trivial pattern `path/to/{a,*}`). Since we know -// each run of doGlob is sorted, we can basically do the "merge" step of a -// merge sort in-place. -func sortAndRemoveDups(matches []string, idx1, idx2, l int) []string { - var tmp string - for ; idx1 < idx2; idx1++ { - if matches[idx1] < matches[idx2] { - // order is correct - continue - } else if matches[idx1] > matches[idx2] { - // need to swap and then re-sort matches above idx2 - tmp = matches[idx1] - matches[idx1] = matches[idx2] - - shft := idx2 + 1 - for ; shft < l && matches[shft] < tmp; shft++ { - matches[shft-1] = matches[shft] - } - matches[shft-1] = tmp - } else { - // duplicate - shift matches above idx2 down one and decrement l - for shft := idx2 + 1; shft < l; shft++ { - matches[shft-1] = matches[shft] - } - if l--; idx2 == l { - // nothing left to do... matches[idx2:] must have been full of dups - break - } - } - } - return matches[:l] -} diff --git a/cli/internal/doublestar/globwalk.go b/cli/internal/doublestar/globwalk.go deleted file mode 100644 index 6caec3e379560..0000000000000 --- a/cli/internal/doublestar/globwalk.go +++ /dev/null @@ -1,277 +0,0 @@ -// Package doublestar is adapted from https://github.com/bmatcuk/doublestar -// Copyright Bob Matcuk. All Rights Reserved. -// SPDX-License-Identifier: MIT -package doublestar - -import ( - "io/fs" - "path" -) - -// GlobWalkFunc is a callback function for GlobWalk(). If the function returns an error, GlobWalk -// will end immediately and return the same error. -type GlobWalkFunc func(path string, d fs.DirEntry) error - -// GlobWalk calls the callback function `fn` for every file matching pattern. -// The syntax of pattern is the same as in Match() and the behavior is the same -// as Glob(), with regard to limitations (such as patterns containing `/./`, -// `/../`, or starting with `/`). The pattern may describe hierarchical names -// such as usr/*/bin/ed. -// -// GlobWalk may have a small performance benefit over Glob if you do not need a -// slice of matches because it can avoid allocating memory for the matches. -// Additionally, GlobWalk gives you access to the `fs.DirEntry` objects for -// each match, and lets you quit early by returning a non-nil error from your -// callback function. -// -// GlobWalk ignores file system errors such as I/O errors reading directories. -// GlobWalk may return ErrBadPattern, reporting that the pattern is malformed. -// Additionally, if the callback function `fn` returns an error, GlobWalk will -// exit immediately and return that error. -// -// Like Glob(), this function assumes that your pattern uses `/` as the path -// separator even if that's not correct for your OS (like Windows). If you -// aren't sure if that's the case, you can use filepath.ToSlash() on your -// pattern before calling GlobWalk(). -func GlobWalk(fsys fs.FS, pattern string, fn GlobWalkFunc) error { - if !ValidatePattern(pattern) { - return ErrBadPattern - } - return doGlobWalk(fsys, pattern, true, fn) -} - -// Actually execute GlobWalk -func doGlobWalk(fsys fs.FS, pattern string, firstSegment bool, fn GlobWalkFunc) error { - patternStart := indexMeta(pattern) - if patternStart == -1 { - // pattern doesn't contain any meta characters - does a file matching the - // pattern exist? - info, err := fs.Stat(fsys, pattern) - if err == nil { - err = fn(pattern, newDirEntryFromFileInfo(info)) - return err - } - // ignore IO errors - return nil - } - - dir := "." - splitIdx := lastIndexSlashOrAlt(pattern) - if splitIdx != -1 { - if pattern[splitIdx] == '}' { - openingIdx := indexMatchedOpeningAlt(pattern[:splitIdx]) - if openingIdx == -1 { - // if there's no matching opening index, technically Match() will treat - // an unmatched `}` as nothing special, so... we will, too! - splitIdx = lastIndexSlash(pattern[:splitIdx]) - } else { - // otherwise, we have to handle the alts: - return globAltsWalk(fsys, pattern, openingIdx, splitIdx, firstSegment, fn) - } - } - - dir = pattern[:splitIdx] - pattern = pattern[splitIdx+1:] - } - - // if `splitIdx` is less than `patternStart`, we know `dir` has no meta - // characters. They would be equal if they are both -1, which means `dir` - // will be ".", and we know that doesn't have meta characters either. - if splitIdx <= patternStart { - return globDirWalk(fsys, dir, pattern, firstSegment, fn) - } - - return doGlobWalk(fsys, dir, false, func(p string, d fs.DirEntry) error { - if err := globDirWalk(fsys, p, pattern, firstSegment, fn); err != nil { - return err - } - return nil - }) -} - -// handle alts in the glob pattern - `openingIdx` and `closingIdx` are the -// indexes of `{` and `}`, respectively -func globAltsWalk(fsys fs.FS, pattern string, openingIdx, closingIdx int, firstSegment bool, fn GlobWalkFunc) error { - var matches []dirEntryWithFullPath - startIdx := 0 - afterIdx := closingIdx + 1 - splitIdx := lastIndexSlashOrAlt(pattern[:openingIdx]) - if splitIdx == -1 || pattern[splitIdx] == '}' { - // no common prefix - var err error - matches, err = doGlobAltsWalk(fsys, "", pattern, startIdx, openingIdx, closingIdx, afterIdx, firstSegment, matches) - if err != nil { - return err - } - } else { - // our alts have a common prefix that we can process first - startIdx = splitIdx + 1 - err := doGlobWalk(fsys, pattern[:splitIdx], false, func(p string, d fs.DirEntry) (e error) { - matches, e = doGlobAltsWalk(fsys, p, pattern, startIdx, openingIdx, closingIdx, afterIdx, firstSegment, matches) - return e - }) - if err != nil { - return err - } - } - - for _, m := range matches { - if err := fn(m.Path, m.Entry); err != nil { - return err - } - } - - return nil -} - -// runs actual matching for alts -func doGlobAltsWalk(fsys fs.FS, d, pattern string, startIdx, openingIdx, closingIdx, afterIdx int, firstSegment bool, m []dirEntryWithFullPath) ([]dirEntryWithFullPath, error) { - matches := m - matchesLen := len(m) - patIdx := openingIdx + 1 - for patIdx < closingIdx { - nextIdx := indexNextAlt(pattern[patIdx:closingIdx], true) - if nextIdx == -1 { - nextIdx = closingIdx - } else { - nextIdx += patIdx - } - - alt := buildAlt(d, pattern, startIdx, openingIdx, patIdx, nextIdx, afterIdx) - err := doGlobWalk(fsys, alt, firstSegment, func(p string, d fs.DirEntry) error { - // insertion sort, ignoring dups - insertIdx := matchesLen - for insertIdx > 0 && matches[insertIdx-1].Path > p { - insertIdx-- - } - if insertIdx > 0 && matches[insertIdx-1].Path == p { - // dup - return nil - } - - // append to grow the slice, then insert - entry := dirEntryWithFullPath{d, p} - matches = append(matches, entry) - for i := matchesLen; i > insertIdx; i-- { - matches[i] = matches[i-1] - } - matches[insertIdx] = entry - matchesLen++ - - return nil - }) - if err != nil { - return nil, err - } - - patIdx = nextIdx + 1 - } - - return matches, nil -} - -func globDirWalk(fsys fs.FS, dir, pattern string, canMatchFiles bool, fn GlobWalkFunc) error { - if pattern == "" { - // pattern can be an empty string if the original pattern ended in a slash, - // in which case, we should just return dir, but only if it actually exists - // and it's a directory (or a symlink to a directory) - info, err := fs.Stat(fsys, dir) - if err != nil || !info.IsDir() { - return nil - } - return fn(dir, newDirEntryFromFileInfo(info)) - } - - if pattern == "**" { - // `**` can match *this* dir - info, err := fs.Stat(fsys, dir) - if err != nil || !info.IsDir() { - return nil - } - if err = fn(dir, newDirEntryFromFileInfo(info)); err != nil { - return err - } - return globDoubleStarWalk(fsys, dir, canMatchFiles, fn) - } - - dirs, err := fs.ReadDir(fsys, dir) - if err != nil { - // ignore IO errors - return nil - } - - var matched bool - for _, info := range dirs { - name := info.Name() - if canMatchFiles || isDir(fsys, dir, name, info) { - matched, err = matchWithSeparator(pattern, name, '/', false) - if err != nil { - return err - } - if matched { - if err = fn(path.Join(dir, name), info); err != nil { - return err - } - } - } - } - - return nil -} - -func globDoubleStarWalk(fsys fs.FS, dir string, canMatchFiles bool, fn GlobWalkFunc) error { - dirs, err := fs.ReadDir(fsys, dir) - if err != nil { - // ignore IO errors - return nil - } - - // `**` can match *this* dir, so add it - for _, info := range dirs { - name := info.Name() - if isDir(fsys, dir, name, info) { - p := path.Join(dir, name) - if e := fn(p, info); e != nil { - return e - } - if e := globDoubleStarWalk(fsys, p, canMatchFiles, fn); e != nil { - return e - } - } else if canMatchFiles { - if e := fn(path.Join(dir, name), info); e != nil { - return e - } - } - } - - return nil -} - -type dirEntryFromFileInfo struct { - fi fs.FileInfo -} - -func (d *dirEntryFromFileInfo) Name() string { - return d.fi.Name() -} - -func (d *dirEntryFromFileInfo) IsDir() bool { - return d.fi.IsDir() -} - -func (d *dirEntryFromFileInfo) Type() fs.FileMode { - return d.fi.Mode().Type() -} - -func (d *dirEntryFromFileInfo) Info() (fs.FileInfo, error) { - return d.fi, nil -} - -func newDirEntryFromFileInfo(fi fs.FileInfo) fs.DirEntry { - return &dirEntryFromFileInfo{fi} -} - -type dirEntryWithFullPath struct { - Entry fs.DirEntry - Path string -} diff --git a/cli/internal/doublestar/match.go b/cli/internal/doublestar/match.go deleted file mode 100644 index d8c953676a65c..0000000000000 --- a/cli/internal/doublestar/match.go +++ /dev/null @@ -1,377 +0,0 @@ -// Package doublestar is adapted from https://github.com/bmatcuk/doublestar -// Copyright Bob Matcuk. All Rights Reserved. -// SPDX-License-Identifier: MIT -package doublestar - -import ( - "path/filepath" - "unicode/utf8" -) - -// Match reports whether name matches the shell pattern. -// The pattern syntax is: -// -// pattern: -// { term } -// term: -// '*' matches any sequence of non-path-separators -// '/**/' matches zero or more directories -// '?' matches any single non-path-separator character -// '[' [ '^' '!' ] { character-range } ']' -// character class (must be non-empty) -// starting with `^` or `!` negates the class -// '{' { term } [ ',' { term } ... ] '}' -// alternatives -// c matches character c (c != '*', '?', '\\', '[') -// '\\' c matches character c -// -// character-range: -// c matches character c (c != '\\', '-', ']') -// '\\' c matches character c -// lo '-' hi matches character c for lo <= c <= hi -// -// Match returns true if `name` matches the file name `pattern`. `name` and -// `pattern` are split on forward slash (`/`) characters and may be relative or -// absolute. -// -// Match requires pattern to match all of name, not just a substring. -// The only possible returned error is ErrBadPattern, when pattern -// is malformed. -// -// A doublestar (`**`) should appear surrounded by path separators such as -// `/**/`. A mid-pattern doublestar (`**`) behaves like bash's globstar -// option: a pattern such as `path/to/**.txt` would return the same results as -// `path/to/*.txt`. The pattern you're looking for is `path/to/**/*.txt`. -// -// Note: this is meant as a drop-in replacement for path.Match() which -// always uses '/' as the path separator. If you want to support systems -// which use a different path separator (such as Windows), what you want -// is PathMatch(). Alternatively, you can run filepath.ToSlash() on both -// pattern and name and then use this function. -func Match(pattern, name string) (bool, error) { - return matchWithSeparator(pattern, name, '/', true) -} - -// PathMatch returns true if `name` matches the file name `pattern`. The -// difference between Match and PathMatch is that PathMatch will automatically -// use your system's path separator to split `name` and `pattern`. On systems -// where the path separator is `'\'`, escaping will be disabled. -// -// Note: this is meant as a drop-in replacement for filepath.Match(). It -// assumes that both `pattern` and `name` are using the system's path -// separator. If you can't be sure of that, use filepath.ToSlash() on both -// `pattern` and `name`, and then use the Match() function instead. -func PathMatch(pattern, name string) (bool, error) { - return matchWithSeparator(pattern, name, filepath.Separator, true) -} - -func matchWithSeparator(pattern, name string, separator rune, validate bool) (matched bool, err error) { - doublestarPatternBacktrack := -1 - doublestarNameBacktrack := -1 - starPatternBacktrack := -1 - starNameBacktrack := -1 - patIdx := 0 - nameIdx := 0 - patLen := len(pattern) - nameLen := len(name) - startOfSegment := true -MATCH: - for nameIdx < nameLen { - if patIdx < patLen { - switch pattern[patIdx] { - case '*': - if patIdx++; patIdx < patLen && pattern[patIdx] == '*' { - // doublestar - must begin with a path separator, otherwise we'll - // treat it like a single star like bash - patIdx++ - if startOfSegment { - if patIdx >= patLen { - // pattern ends in `/**`: return true - return true, nil - } - - // doublestar must also end with a path separator, otherwise we're - // just going to treat the doublestar as a single star like bash - patRune, patRuneLen := utf8.DecodeRuneInString(pattern[patIdx:]) - if patRune == separator { - patIdx += patRuneLen - - doublestarPatternBacktrack = patIdx - doublestarNameBacktrack = nameIdx - starPatternBacktrack = -1 - starNameBacktrack = -1 - continue - } - } - } - startOfSegment = false - - starPatternBacktrack = patIdx - starNameBacktrack = nameIdx - continue - - case '?': - startOfSegment = false - nameRune, nameRuneLen := utf8.DecodeRuneInString(name[nameIdx:]) - if nameRune == separator { - // `?` cannot match the separator - break - } - - patIdx++ - nameIdx += nameRuneLen - continue - - case '[': - startOfSegment = false - if patIdx++; patIdx >= patLen { - // class didn't end - return false, ErrBadPattern - } - nameRune, nameRuneLen := utf8.DecodeRuneInString(name[nameIdx:]) - - matched := false - negate := pattern[patIdx] == '!' || pattern[patIdx] == '^' - if negate { - patIdx++ - } - - if patIdx >= patLen || pattern[patIdx] == ']' { - // class didn't end or empty character class - return false, ErrBadPattern - } - - last := utf8.MaxRune - for patIdx < patLen && pattern[patIdx] != ']' { - patRune, patRuneLen := utf8.DecodeRuneInString(pattern[patIdx:]) - patIdx += patRuneLen - - // match a range - if last < utf8.MaxRune && patRune == '-' && patIdx < patLen && pattern[patIdx] != ']' { - if pattern[patIdx] == '\\' { - // next character is escaped - patIdx++ - } - patRune, patRuneLen = utf8.DecodeRuneInString(pattern[patIdx:]) - patIdx += patRuneLen - - if last <= nameRune && nameRune <= patRune { - matched = true - break - } - - // didn't match range - reset `last` - last = utf8.MaxRune - continue - } - - // not a range - check if the next rune is escaped - if patRune == '\\' { - patRune, patRuneLen = utf8.DecodeRuneInString(pattern[patIdx:]) - patIdx += patRuneLen - } - - // check if the rune matches - if patRune == nameRune { - matched = true - break - } - - // no matches yet - last = patRune - } - - if matched == negate { - // failed to match - if we reached the end of the pattern, that means - // we never found a closing `]` - if patIdx >= patLen { - return false, ErrBadPattern - } - break - } - - closingIdx := indexUnescapedByte(pattern[patIdx:], ']', true) - if closingIdx == -1 { - // no closing `]` - return false, ErrBadPattern - } - - patIdx += closingIdx + 1 - nameIdx += nameRuneLen - continue - - case '{': - // Note: removed 'startOfSegment = false' here. - // This block is guaranteed to return, so assigning it was useless - // and triggering a lint error - patIdx++ - closingIdx := indexMatchedClosingAlt(pattern[patIdx:], separator != '\\') - if closingIdx == -1 { - // no closing `}` - return false, ErrBadPattern - } - closingIdx += patIdx - - for { - commaIdx := indexNextAlt(pattern[patIdx:closingIdx], separator != '\\') - if commaIdx == -1 { - break - } - commaIdx += patIdx - - result, err := matchWithSeparator(pattern[patIdx:commaIdx]+pattern[closingIdx+1:], name[nameIdx:], separator, validate) - if result || err != nil { - return result, err - } - - patIdx = commaIdx + 1 - } - return matchWithSeparator(pattern[patIdx:closingIdx]+pattern[closingIdx+1:], name[nameIdx:], separator, validate) - - case '\\': - if separator != '\\' { - // next rune is "escaped" in the pattern - literal match - if patIdx++; patIdx >= patLen { - // pattern ended - return false, ErrBadPattern - } - } - fallthrough - - default: - patRune, patRuneLen := utf8.DecodeRuneInString(pattern[patIdx:]) - nameRune, nameRuneLen := utf8.DecodeRuneInString(name[nameIdx:]) - if patRune != nameRune { - if separator != '\\' && patIdx > 0 && pattern[patIdx-1] == '\\' { - // if this rune was meant to be escaped, we need to move patIdx - // back to the backslash before backtracking or validating below - patIdx-- - } - break - } - - patIdx += patRuneLen - nameIdx += nameRuneLen - startOfSegment = patRune == separator - continue - } - } - - if starPatternBacktrack >= 0 { - // `*` backtrack, but only if the `name` rune isn't the separator - nameRune, nameRuneLen := utf8.DecodeRuneInString(name[starNameBacktrack:]) - if nameRune != separator { - starNameBacktrack += nameRuneLen - patIdx = starPatternBacktrack - nameIdx = starNameBacktrack - startOfSegment = false - continue - } - } - - if doublestarPatternBacktrack >= 0 { - // `**` backtrack, advance `name` past next separator - nameIdx = doublestarNameBacktrack - for nameIdx < nameLen { - nameRune, nameRuneLen := utf8.DecodeRuneInString(name[nameIdx:]) - nameIdx += nameRuneLen - if nameRune == separator { - doublestarNameBacktrack = nameIdx - patIdx = doublestarPatternBacktrack - startOfSegment = true - continue MATCH - } - } - } - - if validate && patIdx < patLen && !doValidatePattern(pattern[patIdx:], separator) { - return false, ErrBadPattern - } - return false, nil - } - - if nameIdx < nameLen { - // we reached the end of `pattern` before the end of `name` - return false, nil - } - - // we've reached the end of `name`; we've successfully matched if we've also - // reached the end of `pattern`, or if the rest of `pattern` can match a - // zero-length string - return isZeroLengthPattern(pattern[patIdx:], separator) -} - -func isZeroLengthPattern(pattern string, separator rune) (ret bool, err error) { - // `/**` is a special case - a pattern such as `path/to/a/**` *should* match - // `path/to/a` because `a` might be a directory - if pattern == "" || pattern == "*" || pattern == "**" || pattern == string(separator)+"**" { - return true, nil - } - - if pattern[0] == '{' { - closingIdx := indexMatchedClosingAlt(pattern[1:], separator != '\\') - if closingIdx == -1 { - // no closing '}' - return false, ErrBadPattern - } - closingIdx++ - - patIdx := 1 - for { - commaIdx := indexNextAlt(pattern[patIdx:closingIdx], separator != '\\') - if commaIdx == -1 { - break - } - commaIdx += patIdx - - ret, err = isZeroLengthPattern(pattern[patIdx:commaIdx]+pattern[closingIdx+1:], separator) - if ret || err != nil { - return - } - - patIdx = commaIdx + 1 - } - return isZeroLengthPattern(pattern[patIdx:closingIdx]+pattern[closingIdx+1:], separator) - } - - // no luck - validate the rest of the pattern - if !doValidatePattern(pattern, separator) { - return false, ErrBadPattern - } - return false, nil -} - -// Finds the index of the first unescaped byte `c`, or negative 1. -func indexUnescapedByte(s string, c byte, allowEscaping bool) int { - l := len(s) - for i := 0; i < l; i++ { - if allowEscaping && s[i] == '\\' { - // skip next byte - i++ - } else if s[i] == c { - return i - } - } - return -1 -} - -// Assuming the byte before the beginning of `s` is an opening `{`, this -// function will find the index of the matching `}`. That is, it'll skip over -// any nested `{}` and account for escaping -func indexMatchedClosingAlt(s string, allowEscaping bool) int { - alts := 1 - l := len(s) - for i := 0; i < l; i++ { - if allowEscaping && s[i] == '\\' { - // skip next byte - i++ - } else if s[i] == '{' { - alts++ - } else if s[i] == '}' { - if alts--; alts == 0 { - return i - } - } - } - return -1 -} diff --git a/cli/internal/doublestar/utils.go b/cli/internal/doublestar/utils.go deleted file mode 100644 index 7236cd0701560..0000000000000 --- a/cli/internal/doublestar/utils.go +++ /dev/null @@ -1,71 +0,0 @@ -// Package doublestar is adapted from https://github.com/bmatcuk/doublestar -// Copyright Bob Matcuk. All Rights Reserved. -// SPDX-License-Identifier: MIT -package doublestar - -// SplitPattern is a utility function. Given a pattern, SplitPattern will -// return two strings: the first string is everything up to the last slash -// (`/`) that appears _before_ any unescaped "meta" characters (ie, `*?[{`). -// The second string is everything after that slash. For example, given the -// pattern: -// -// ../../path/to/meta*/** -// ^----------- split here -// -// SplitPattern returns "../../path/to" and "meta*/**". This is useful for -// initializing os.DirFS() to call Glob() because Glob() will silently fail if -// your pattern includes `/./` or `/../`. For example: -// -// base, pattern := SplitPattern("../../path/to/meta*/**") -// fsys := os.DirFS(base) -// matches, err := Glob(fsys, pattern) -// -// If SplitPattern cannot find somewhere to split the pattern (for example, -// `meta*/**`), it will return "." and the unaltered pattern (`meta*/**` in -// this example). -// -// Of course, it is your responsibility to decide if the returned base path is -// "safe" in the context of your application. Perhaps you could use Match() to -// validate against a list of approved base directories? -func SplitPattern(p string) (string, string) { - base := "." - pattern := p - - splitIdx := -1 - for i := 0; i < len(p); i++ { - c := p[i] - if c == '\\' { - i++ - } else if c == '/' { - splitIdx = i - } else if c == '*' || c == '?' || c == '[' || c == '{' { - break - } - } - - if splitIdx >= 0 { - return p[:splitIdx], p[splitIdx+1:] - } - - return base, pattern -} - -// Finds the next comma, but ignores any commas that appear inside nested `{}`. -// Assumes that each opening bracket has a corresponding closing bracket. -func indexNextAlt(s string, allowEscaping bool) int { - alts := 1 - l := len(s) - for i := 0; i < l; i++ { - if allowEscaping && s[i] == '\\' { - // skip next byte - i++ - } else if s[i] == '{' { - alts++ - } else if s[i] == '}' { - alts-- - } else if s[i] == ',' && alts == 1 { - return i - } - } - return -1 -} diff --git a/cli/internal/doublestar/validate.go b/cli/internal/doublestar/validate.go deleted file mode 100644 index 225fc5e8accd4..0000000000000 --- a/cli/internal/doublestar/validate.go +++ /dev/null @@ -1,83 +0,0 @@ -// Package doublestar is adapted from https://github.com/bmatcuk/doublestar -// Copyright Bob Matcuk. All Rights Reserved. -// SPDX-License-Identifier: MIT -package doublestar - -import "path/filepath" - -// ValidatePattern validates a pattern. Patterns are validated while they run in Match(), -// PathMatch(), and Glob(), so, you normally wouldn't need to call this. -// However, there are cases where this might be useful: for example, if your -// program allows a user to enter a pattern that you'll run at a later time, -// you might want to validate it. -// -// ValidatePattern assumes your pattern uses '/' as the path separator. -func ValidatePattern(s string) bool { - return doValidatePattern(s, '/') -} - -// ValidatePathPattern only uses your OS path separator. In other words, use -// ValidatePattern if you would normally use Match() or Glob(). Use -// ValidatePathPattern if you would normally use PathMatch(). Keep in mind, -// Glob() requires '/' separators, even if your OS uses something else. -func ValidatePathPattern(s string) bool { - return doValidatePattern(s, filepath.Separator) -} - -func doValidatePattern(s string, separator rune) bool { - altDepth := 0 - l := len(s) -VALIDATE: - for i := 0; i < l; i++ { - switch s[i] { - case '\\': - if separator != '\\' { - // skip the next byte - return false if there is no next byte - if i++; i >= l { - return false - } - } - continue - - case '[': - if i++; i >= l { - // class didn't end - return false - } - if s[i] == '^' || s[i] == '!' { - i++ - } - if i >= l || s[i] == ']' { - // class didn't end or empty character class - return false - } - - for ; i < l; i++ { - if separator != '\\' && s[i] == '\\' { - i++ - } else if s[i] == ']' { - // looks good - continue VALIDATE - } - } - - // class didn't end - return false - - case '{': - altDepth++ - continue - - case '}': - if altDepth == 0 { - // alt end without a corresponding start - return false - } - altDepth-- - continue - } - } - - // valid as long as all alts are closed - return altDepth == 0 -} diff --git a/cli/internal/edgecases/strings.go b/cli/internal/edgecases/strings.go deleted file mode 100644 index f9fb0d4c81c0b..0000000000000 --- a/cli/internal/edgecases/strings.go +++ /dev/null @@ -1,517 +0,0 @@ -// Package edgecases is a bunch of edge cases for testing purposes -package edgecases - -// Strings is a bunch of edge cases for various things courtesy of https://github.com/minimaxir/big-list-of-naughty-strings -var Strings = []string{ - "undefined", - "undef", - "null", - "NULL", - "(null)", - "nil", - "NIL", - "true", - "false", - "True", - "False", - "TRUE", - "FALSE", - "None", - "hasOwnProperty", - "then", - "\\", - "\\\\", - "0", - "1", - "1.00", - "$1.00", - "1/2", - "1E2", - "1E02", - "1E+02", - "-1", - "-1.00", - "-$1.00", - "-1/2", - "-1E2", - "-1E02", - "-1E+02", - "1/0", - "0/0", - "-2147483648/-1", - "-9223372036854775808/-1", - "-0", - "-0.0", - "+0", - "+0.0", - "0.00", - "0..0", - ".", - "0.0.0", - "0,00", - "0,,0", - ",", - "0,0,0", - "0.0/0", - "1.0/0.0", - "0.0/0.0", - "1,0/0,0", - "0,0/0,0", - "--1", - "-", - "-.", - "-,", - "999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999", - "NaN", - "Infinity", - "-Infinity", - "INF", - "1#INF", - "-1#IND", - "1#QNAN", - "1#SNAN", - "1#IND", - "0x0", - "0xffffffff", - "0xffffffffffffffff", - "0xabad1dea", - "123456789012345678901234567890123456789", - "1,000.00", - "1 000.00", - "1'000.00", - "1,000,000.00", - "1 000 000.00", - "1'000'000.00", - "1.000,00", - "1 000,00", - "1'000,00", - "1.000.000,00", - "1 000 000,00", - "1'000'000,00", - "01000", - "08", - "09", - "2.2250738585072011e-308", - ",./;'[]\\-=", - "<>?:\"{}|_+", - "!@#$%^&*()`~", - "\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\u000e\u000f\u0010\u0011\u0012\u0013\u0014\u0015\u0016\u0017\u0018\u0019\u001a\u001b\u001c\u001d\u001e\u001f", - "€‚ƒ„†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ", - "\t\u000b\f …             ​

   ", - "￾", - "Ω≈ç√∫˜µ≤≥÷", - "åß∂ƒ©˙∆˚¬…æ", - "œ∑´®†¥¨ˆøπ“‘", - "¡™£¢∞§¶•ªº–≠", - "¸˛Ç◊ı˜Â¯˘¿", - "ÅÍÎÏ˝ÓÔÒÚÆ☃", - "Œ„´‰ˇÁ¨ˆØ∏”’", - "`⁄€‹›fifl‡°·‚—±", - "⅛⅜⅝⅞", - "ЁЂЃЄЅІЇЈЉЊЋЌЍЎЏАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюя", - "٠١٢٣٤٥٦٧٨٩", - "⁰⁴⁵", - "₀₁₂", - "⁰⁴⁵₀₁₂", - "ด้้้้้็็็็็้้้้้็็็็็้้้้้้้้็็็็็้้้้้็็็็็้้้้้้้้็็็็็้้้้้็็็็็้้้้้้้้็็็็็้้้้้็็็็ ด้้้้้็็็็็้้้้้็็็็็้้้้้้้้็็็็็้้้้้็็็็็้้้้้้้้็็็็็้้้้้็็็็็้้้้้้้้็็็็็้้้้้็็็็ ด้้้้้็็็็็้้้้้็็็็็้้้้้้้้็็็็็้้้้้็็็็็้้้้้้้้็็็็็้้้้้็็็็็้้้้้้้้็็็็็้้้้้็็็็", - "'", - "\"", - "''", - "\"\"", - "'\"'", - "\"''''\"'\"", - "\"'\"'\"''''\"", - "", - "", - "", - "", - "田中さんにあげて下さい", - "パーティーへ行かないか", - "和製漢語", - "部落格", - "사회과학원 어학연구소", - "찦차를 타고 온 펲시맨과 쑛다리 똠방각하", - "社會科學院語學研究所", - "울란바토르", - "𠜎𠜱𠝹𠱓𠱸𠲖𠳏", - "𐐜 𐐔𐐇𐐝𐐀𐐡𐐇𐐓 𐐙𐐊𐐡𐐝𐐓/𐐝𐐇𐐗𐐊𐐤𐐔 𐐒𐐋𐐗 𐐒𐐌 𐐜 𐐡𐐀𐐖𐐇𐐤𐐓𐐝 𐐱𐑂 𐑄 𐐔𐐇𐐝𐐀𐐡𐐇𐐓 𐐏𐐆𐐅𐐤𐐆𐐚𐐊𐐡𐐝𐐆𐐓𐐆", - "表ポあA鷗ŒéB逍Üߪąñ丂㐀𠀀", - "Ⱥ", - "Ⱦ", - "ヽ༼ຈل͜ຈ༽ノ ヽ༼ຈل͜ຈ༽ノ", - "(。◕ ∀ ◕。)", - "`ィ(´∀`∩", - "__ロ(,_,*)", - "・( ̄∀ ̄)・:*:", - "゚・✿ヾ╲(。◕‿◕。)╱✿・゚", - ",。・:*:・゜’( ☻ ω ☻ )。・:*:・゜’", - "(╯°□°)╯︵ ┻━┻)", - "┬─┬ノ( º _ ºノ)", - "( ͡° ͜ʖ ͡°)", - "¯\\_(ツ)_/¯", - "😍", - "👩🏽", - "👨‍🦰 👨🏿‍🦰 👨‍🦱 👨🏿‍🦱 🦹🏿‍♂️", - "👾 🙇 💁 🙅 🙆 🙋 🙎 🙍", - "🐵 🙈 🙉 🙊", - "❤️ 💔 💌 💕 💞 💓 💗 💖 💘 💝 💟 💜 💛 💚 💙", - "✋🏿 💪🏿 👐🏿 🙌🏿 👏🏿 🙏🏿", - "👨‍👩‍👦 👨‍👩‍👧‍👦 👨‍👨‍👦 👩‍👩‍👧 👨‍👦 👨‍👧‍👦 👩‍👦 👩‍👧‍👦", - "🚾 🆒 🆓 🆕 🆖 🆗 🆙 🏧", - "0️⃣ 1️⃣ 2️⃣ 3️⃣ 4️⃣ 5️⃣ 6️⃣ 7️⃣ 8️⃣ 9️⃣ 🔟", - "🇺🇸🇷🇺🇸 🇦🇫🇦🇲🇸", - "🇺🇸🇷🇺🇸🇦🇫🇦🇲", - "🇺🇸🇷🇺🇸🇦", - "123", - "١٢٣", - "ثم نفس سقطت وبالتحديد،, جزيرتي باستخدام أن دنو. إذ هنا؟ الستار وتنصيب كان. أهّل ايطاليا، بريطانيا-فرنسا قد أخذ. سليمان، إتفاقية بين ما, يذكر الحدود أي بعد, معاملة بولندا، الإطلاق عل إيو.", - "בְּרֵאשִׁית, בָּרָא אֱלֹהִים, אֵת הַשָּׁמַיִם, וְאֵת הָאָרֶץ", - "הָיְתָהtestالصفحات التّحول", - "﷽", - "ﷺ", - "مُنَاقَشَةُ سُبُلِ اِسْتِخْدَامِ اللُّغَةِ فِي النُّظُمِ الْقَائِمَةِ وَفِيم يَخُصَّ التَّطْبِيقَاتُ الْحاسُوبِيَّةُ، ", - "᚛ᚄᚓᚐᚋᚒᚄ ᚑᚄᚂᚑᚏᚅ᚜‪‪‪", - "‪‪᚛                 ᚜‪", - "‪‪test‪", - "‫test‫", - "
test
", - "test⁠test‫", - "⁦test⁧", - "Ṱ̺̺̕o͞ ̷i̲̬͇̪͙n̝̗͕v̟̜̘̦͟o̶̙̰̠kè͚̮̺̪̹̱̤ ̖t̝͕̳̣̻̪͞h̼͓̲̦̳̘̲e͇̣̰̦̬͎ ̢̼̻̱̘h͚͎͙̜̣̲ͅi̦̲̣̰̤v̻͍e̺̭̳̪̰-m̢iͅn̖̺̞̲̯̰d̵̼̟͙̩̼̘̳ ̞̥̱̳̭r̛̗̘e͙p͠r̼̞̻̭̗e̺̠̣͟s̘͇̳͍̝͉e͉̥̯̞̲͚̬͜ǹ̬͎͎̟̖͇̤t͍̬̤͓̼̭͘ͅi̪̱n͠g̴͉ ͏͉ͅc̬̟h͡a̫̻̯͘o̫̟̖͍̙̝͉s̗̦̲.̨̹͈̣", - "̡͓̞ͅI̗̘̦͝n͇͇͙v̮̫ok̲̫̙͈i̖͙̭̹̠̞n̡̻̮̣̺g̲͈͙̭͙̬͎ ̰t͔̦h̞̲e̢̤ ͍̬̲͖f̴̘͕̣è͖ẹ̥̩l͖͔͚i͓͚̦͠n͖͍̗͓̳̮g͍ ̨o͚̪͡f̘̣̬ ̖̘͖̟͙̮c҉͔̫͖͓͇͖ͅh̵̤̣͚͔á̗̼͕ͅo̼̣̥s̱͈̺̖̦̻͢.̛̖̞̠̫̰", - "̗̺͖̹̯͓Ṯ̤͍̥͇͈h̲́e͏͓̼̗̙̼̣͔ ͇̜̱̠͓͍ͅN͕͠e̗̱z̘̝̜̺͙p̤̺̹͍̯͚e̠̻̠͜r̨̤͍̺̖͔̖̖d̠̟̭̬̝͟i̦͖̩͓͔̤a̠̗̬͉̙n͚͜ ̻̞̰͚ͅh̵͉i̳̞v̢͇ḙ͎͟-҉̭̩̼͔m̤̭̫i͕͇̝̦n̗͙ḍ̟ ̯̲͕͞ǫ̟̯̰̲͙̻̝f ̪̰̰̗̖̭̘͘c̦͍̲̞͍̩̙ḥ͚a̮͎̟̙͜ơ̩̹͎s̤.̝̝ ҉Z̡̖̜͖̰̣͉̜a͖̰͙̬͡l̲̫̳͍̩g̡̟̼̱͚̞̬ͅo̗͜.̟", - "̦H̬̤̗̤͝e͜ ̜̥̝̻͍̟́w̕h̖̯͓o̝͙̖͎̱̮ ҉̺̙̞̟͈W̷̼̭a̺̪͍į͈͕̭͙̯̜t̶̼̮s̘͙͖̕ ̠̫̠B̻͍͙͉̳ͅe̵h̵̬͇̫͙i̹͓̳̳̮͎̫̕n͟d̴̪̜̖ ̰͉̩͇͙̲͞ͅT͖̼͓̪͢h͏͓̮̻e̬̝̟ͅ ̤̹̝W͙̞̝͔͇͝ͅa͏͓͔̹̼̣l̴͔̰̤̟͔ḽ̫.͕", - "Z̮̞̠͙͔ͅḀ̗̞͈̻̗Ḷ͙͎̯̹̞͓G̻O̭̗̮", - "˙ɐnbᴉlɐ ɐuƃɐɯ ǝɹolop ʇǝ ǝɹoqɐl ʇn ʇunpᴉpᴉɔuᴉ ɹodɯǝʇ poɯsnᴉǝ op pǝs 'ʇᴉlǝ ƃuᴉɔsᴉdᴉpɐ ɹnʇǝʇɔǝsuoɔ 'ʇǝɯɐ ʇᴉs ɹolop ɯnsdᴉ ɯǝɹo˥", - "00˙Ɩ$-", - "The quick brown fox jumps over the lazy dog", - "𝐓𝐡𝐞 𝐪𝐮𝐢𝐜𝐤 𝐛𝐫𝐨𝐰𝐧 𝐟𝐨𝐱 𝐣𝐮𝐦𝐩𝐬 𝐨𝐯𝐞𝐫 𝐭𝐡𝐞 𝐥𝐚𝐳𝐲 𝐝𝐨𝐠", - "𝕿𝖍𝖊 𝖖𝖚𝖎𝖈𝖐 𝖇𝖗𝖔𝖜𝖓 𝖋𝖔𝖝 𝖏𝖚𝖒𝖕𝖘 𝖔𝖛𝖊𝖗 𝖙𝖍𝖊 𝖑𝖆𝖟𝖞 𝖉𝖔𝖌", - "𝑻𝒉𝒆 𝒒𝒖𝒊𝒄𝒌 𝒃𝒓𝒐𝒘𝒏 𝒇𝒐𝒙 𝒋𝒖𝒎𝒑𝒔 𝒐𝒗𝒆𝒓 𝒕𝒉𝒆 𝒍𝒂𝒛𝒚 𝒅𝒐𝒈", - "𝓣𝓱𝓮 𝓺𝓾𝓲𝓬𝓴 𝓫𝓻𝓸𝔀𝓷 𝓯𝓸𝔁 𝓳𝓾𝓶𝓹𝓼 𝓸𝓿𝓮𝓻 𝓽𝓱𝓮 𝓵𝓪𝔃𝔂 𝓭𝓸𝓰", - "𝕋𝕙𝕖 𝕢𝕦𝕚𝕔𝕜 𝕓𝕣𝕠𝕨𝕟 𝕗𝕠𝕩 𝕛𝕦𝕞𝕡𝕤 𝕠𝕧𝕖𝕣 𝕥𝕙𝕖 𝕝𝕒𝕫𝕪 𝕕𝕠𝕘", - "𝚃𝚑𝚎 𝚚𝚞𝚒𝚌𝚔 𝚋𝚛𝚘𝚠𝚗 𝚏𝚘𝚡 𝚓𝚞𝚖𝚙𝚜 𝚘𝚟𝚎𝚛 𝚝𝚑𝚎 𝚕𝚊𝚣𝚢 𝚍𝚘𝚐", - "⒯⒣⒠ ⒬⒰⒤⒞⒦ ⒝⒭⒪⒲⒩ ⒡⒪⒳ ⒥⒰⒨⒫⒮ ⒪⒱⒠⒭ ⒯⒣⒠ ⒧⒜⒵⒴ ⒟⒪⒢", - "", - "<script>alert('123');</script>", - "", - "", - "\">", - "'>", - ">", - "", - "< / script >< script >alert(123)< / script >", - " onfocus=JaVaSCript:alert(123) autofocus", - "\" onfocus=JaVaSCript:alert(123) autofocus", - "' onfocus=JaVaSCript:alert(123) autofocus", - "<script>alert(123)</script>", - "ript>alert(123)ript>", - "-->", - "\";alert(123);t=\"", - "';alert(123);t='", - "JavaSCript:alert(123)", - ";alert(123);", - "src=JaVaSCript:prompt(132)", - "\"><\\x3Cscript>javascript:alert(1)", - "'`\"><\\x00script>javascript:alert(1)", - "ABC
DEF", - "ABC
DEF", - "ABC
DEF", - "ABC
DEF", - "ABC
DEF", - "ABC
DEF", - "ABC
DEF", - "ABC
DEF", - "ABC
DEF", - "ABC
DEF", - "ABC
DEF", - "ABC
DEF", - "ABC
DEF", - "ABC
DEF", - "ABC
DEF", - "ABC
DEF", - "ABC
DEF", - "ABC
DEF", - "ABC
DEF", - "ABC
DEF", - "ABC
DEF", - "ABC
DEF", - "ABC
DEF", - "ABC
DEF", - "ABC
DEF", - "ABC
DEF", - "ABC
DEF", - "test", - "test", - "test", - "test", - "test", - "test", - "test", - "test", - "test", - "test", - "test", - "test", - "test", - "test", - "test", - "test", - "test", - "test", - "test", - "test", - "test", - "test", - "test", - "test", - "test", - "test", - "test", - "test", - "test", - "test", - "test", - "test", - "test", - "test", - "test", - "test", - "test", - "test", - "test", - "test", - "test", - "test", - "test", - "test", - "test", - "test", - "test", - "test", - "test", - "test", - "test", - "test", - "test", - "test", - "test", - "test", - "test", - "`\"'>", - "`\"'>", - "`\"'>", - "`\"'>", - "`\"'>", - "`\"'>", - "`\"'>", - "`\"'>", - "`\"'>", - "`\"'>", - "\"`'>", - "\"`'>", - "\"`'>", - "\"`'>", - "\"`'>", - "\"`'>", - "\"`'>", - "\"`'>", - "\"`'>", - "\"`'>", - "\"`'>", - "\"`'>", - "\"`'>", - "\"`'>", - "\"`'>", - "\"`'>", - "\"`'>", - "\"`'>", - "\"`'>", - "\"`'>", - "\"`'>", - "\"`'>", - "\"`'>", - "\"`'>", - "\"`'>", - "\"`'>", - "\"`'>", - "\"`'>", - "\"`'>", - "\"`'>", - "\"`'>", - "\"`'>", - "\"`'>", - "\"`'>", - "\"`'>", - "\"`'>", - "\"`'>", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "XXX", - "javascript:alert(1)\"` `>", - "", - "", - "<a href=http://foo.bar/#x=`y></a><img alt=\"`><img src=x:x onerror=javascript:alert(1)></a>\">", - "<!--[if]><script>javascript:alert(1)</script -->", - "<!--[if<img src=x onerror=javascript:alert(1)//]> -->", - "<script src=\"/\\%(jscript)s\"></script>", - "<script src=\"\\\\%(jscript)s\"></script>", - "<IMG \"\"\"><SCRIPT>alert(\"XSS\")</SCRIPT>\">", - "<IMG SRC=javascript:alert(String.fromCharCode(88,83,83))>", - "<IMG SRC=# onmouseover=\"alert('xxs')\">", - "<IMG SRC= onmouseover=\"alert('xxs')\">", - "<IMG onmouseover=\"alert('xxs')\">", - "<IMG SRC=javascript:alert('XSS')>", - "<IMG SRC=javascript:alert('XSS')>", - "<IMG SRC=javascript:alert('XSS')>", - "<IMG SRC=\"jav ascript:alert('XSS');\">", - "<IMG SRC=\"jav ascript:alert('XSS');\">", - "<IMG SRC=\"jav ascript:alert('XSS');\">", - "<IMG SRC=\"jav ascript:alert('XSS');\">", - "perl -e 'print \"<IMG SRC=java\\0script:alert(\\\"XSS\\\")>\";' > out", - "<IMG SRC=\"  javascript:alert('XSS');\">", - "<SCRIPT/XSS SRC=\"http://ha.ckers.org/xss.js\"></SCRIPT>", - "<BODY onload!#$%&()*~+-_.,:;?@[/|\\]^`=alert(\"XSS\")>", - "<SCRIPT/SRC=\"http://ha.ckers.org/xss.js\"></SCRIPT>", - "<<SCRIPT>alert(\"XSS\");//<</SCRIPT>", - "<SCRIPT SRC=http://ha.ckers.org/xss.js?< B >", - "<SCRIPT SRC=//ha.ckers.org/.j>", - "<IMG SRC=\"javascript:alert('XSS')\"", - "<iframe src=http://ha.ckers.org/scriptlet.html <", - "\\\";alert('XSS');//", - "<u oncopy=alert()> Copy me</u>", - "<i onwheel=alert(1)> Scroll over me </i>", - "<plaintext>", - "http://a/%%30%30", - "</textarea><script>alert(123)</script>", - "1;DROP TABLE users", - "1'; DROP TABLE users-- 1", - "' OR 1=1 -- 1", - "' OR '1'='1", - "'; EXEC sp_MSForEachTable 'DROP TABLE ?'; --", - " ", - "%", - "_", - "-", - "--", - "--version", - "--help", - "$USER", - "/dev/null; touch /tmp/blns.fail ; echo", - "`touch /tmp/blns.fail`", - "$(touch /tmp/blns.fail)", - "@{[system \"touch /tmp/blns.fail\"]}", - "eval(\"puts 'hello world'\")", - "System(\"ls -al /\")", - "`ls -al /`", - "Kernel.exec(\"ls -al /\")", - "Kernel.exit(1)", - "%x('ls -al /')", - "<?xml version=\"1.0\" encoding=\"ISO-8859-1\"?><!DOCTYPE foo [ <!ELEMENT foo ANY ><!ENTITY xxe SYSTEM \"file:///etc/passwd\" >]><foo>&xxe;</foo>", - "$HOME", - "$ENV{'HOME'}", - "%d", - "%s%s%s%s%s", - "{0}", - "%*.*s", - "%@", - "%n", - "File:///", - "../../../../../../../../../../../etc/passwd%00", - "../../../../../../../../../../../etc/hosts", - "() { 0; }; touch /tmp/blns.shellshock1.fail;", - "() { _; } >_[$($())] { touch /tmp/blns.shellshock2.fail; }", - "<<< %s(un='%s') = %u", - "+++ATH0", - "CON", - "PRN", - "AUX", - "CLOCK$", - "NUL", - "A:", - "ZZ:", - "COM1", - "LPT1", - "LPT2", - "LPT3", - "COM2", - "COM3", - "COM4", - "DCC SEND STARTKEYLOGGER 0 0 0", - "Scunthorpe General Hospital", - "Penistone Community Church", - "Lightwater Country Park", - "Jimmy Clitheroe", - "Horniman Museum", - "shitake mushrooms", - "RomansInSussex.co.uk", - "http://www.cum.qc.ca/", - "Craig Cockburn, Software Specialist", - "Linda Callahan", - "Dr. Herman I. Libshitz", - "magna cum laude", - "Super Bowl XXX", - "medieval erection of parapets", - "evaluate", - "mocha", - "expression", - "Arsenal canal", - "classic", - "Tyson Gay", - "Dick Van Dyke", - "basement", - "If you're reading this, you've been in a coma for almost 20 years now. We're trying a new technique. We don't know where this message will end up in your dream, but we hope it works. Please wake up, we miss you.", - "Roses are \u001b[0;31mred\u001b[0m, violets are \u001b[0;34mblue. Hope you enjoy terminal hue", - "But now...\u001b[20Cfor my greatest trick...\u001b[8m", - "The quic\b\b\b\b\b\bk brown fo\u0007\u0007\u0007\u0007\u0007\u0007\u0007\u0007\u0007\u0007\u0007x... [Beeeep]", - "Powerلُلُصّبُلُلصّبُررً ॣ ॣh ॣ ॣ冗", - "🏳0🌈️", - "జ్ఞ‌ా", - "گچپژ", - "{% print 'x' * 64 * 1024**3 %}", - `{{ \"\".__class__.__mro__[2].__subclasses__()[40](\"/etc/passwd\").read() }}`, -} diff --git a/cli/internal/encoding/gitoutput/gitoutput.go b/cli/internal/encoding/gitoutput/gitoutput.go deleted file mode 100644 index 1c2ad4ff6345f..0000000000000 --- a/cli/internal/encoding/gitoutput/gitoutput.go +++ /dev/null @@ -1,345 +0,0 @@ -// Package gitoutput reads the output of calls to `git`. -package gitoutput - -import ( - "bufio" - "bytes" - "errors" - "fmt" - "io" -) - -// These describe the structure of fields in the output of `git` commands. -var ( - LsTreeFields = []Field{ObjectMode, ObjectType, ObjectName, Path} - LsFilesFields = []Field{ObjectMode, ObjectName, ObjectStage, Path} - StatusFields = []Field{StatusX, StatusY, Path} -) - -var _lsTreeFieldToIndex = map[Field]int{ - ObjectMode: 0, - ObjectType: 1, - ObjectName: 2, - Path: 3, -} - -var _lsFilesFieldToIndex = map[Field]int{ - ObjectMode: 0, - ObjectName: 1, - ObjectStage: 2, - Path: 3, -} - -var _statusFieldToIndex = map[Field]int{ - StatusX: 0, - StatusY: 1, - Path: 2, -} - -// Field is the type for fields available in outputs to `git`. -// Used for naming and sensible call sites. -type Field int - -const ( - // ObjectMode is the mode field from `git` outputs. e.g. 100644 - ObjectMode Field = iota + 1 - // ObjectType is the set of allowed types from `git` outputs: blob, tree, commit - ObjectType - // ObjectName is the 40-character SHA hash - ObjectName - // ObjectStage is a value 0-3. - ObjectStage - // StatusX is the first character of the two-character output from `git status`. - StatusX - // StatusY is the second character of the two-character output from `git status`. - StatusY - // Path is the file path under version control in `git`. - Path -) - -// LsTreeEntry is the result from call `git ls-files` -type LsTreeEntry []string - -// LsFilesEntry is the result from call `git ls-tree` -type LsFilesEntry []string - -// StatusEntry is the result from call `git status` -type StatusEntry []string - -// GetField returns the value of the specified field. -func (e LsTreeEntry) GetField(field Field) string { - value, exists := _lsTreeFieldToIndex[field] - if !exists { - panic("Received an invalid field for LsTreeEntry.") - } - return e[value] -} - -// GetField returns the value of the specified field. -func (e LsFilesEntry) GetField(field Field) string { - value, exists := _lsFilesFieldToIndex[field] - if !exists { - panic("Received an invalid field for LsFilesEntry.") - } - return e[value] -} - -// GetField returns the value of the specified field. -func (e StatusEntry) GetField(field Field) string { - value, exists := _statusFieldToIndex[field] - if !exists { - panic("Received an invalid field for StatusEntry.") - } - return e[value] -} - -// Separators that appear in the output of `git` commands. -const ( - _space = ' ' - _tab = '\t' - _nul = '\000' -) - -// A ParseError is returned for parsing errors. -// Entries and columns are both 1-indexed. -type ParseError struct { - Entry int // Entry where the error occurred - Column int // Column where the error occurred - Err error // The actual error -} - -// Error creates a string for a parse error. -func (e *ParseError) Error() string { - return fmt.Sprintf("parse error on entry %d, column %d: %v", e.Entry, e.Column, e.Err) -} - -// Unwrap returns the raw error. -func (e *ParseError) Unwrap() error { return e.Err } - -// These are the errors that can be returned in ParseError.Err. -var ( - ErrInvalidObjectMode = errors.New("object mode is not valid") - ErrInvalidObjectType = errors.New("object type is not valid") - ErrInvalidObjectName = errors.New("object name is not valid") - ErrInvalidObjectStage = errors.New("object stage is not valid") - ErrInvalidObjectStatusX = errors.New("object status x is not valid") - ErrInvalidObjectStatusY = errors.New("object status y is not valid") - ErrInvalidPath = errors.New("path is not valid") - ErrUnknownField = errors.New("unknown field") -) - -// A Reader reads records from `git`'s output`. -type Reader struct { - // ReuseRecord controls whether calls to Read may return a slice sharing - // the backing array of the previous call's returned slice for performance. - // By default, each call to Read returns newly allocated memory owned by the caller. - ReuseRecord bool - - // Fields specifies the type of each field. - Fields []Field - - reader *bufio.Reader - - // numEntry is the current entry being read in the `git` output. - numEntry int - - // rawBuffer is an entry buffer only used by the readEntry method. - rawBuffer []byte - - // recordBuffer holds the unescaped fields, one after another. - // The fields can be accessed by using the indexes in fieldIndexes. - recordBuffer []byte - - // fieldIndexes is an index of fields inside recordBuffer. - // The i'th field ends at offset fieldIndexes[i] in recordBuffer. - fieldIndexes []int - - // fieldPositions is an index of field positions for the - // last record returned by Read. - fieldPositions []position - - // lastRecord is a record cache and only used when ReuseRecord == true. - lastRecord []string -} - -// NewLSTreeReader returns a new Reader that reads from reader. -func NewLSTreeReader(reader io.Reader) *Reader { - return &Reader{ - reader: bufio.NewReader(reader), - Fields: LsTreeFields, - } -} - -// NewLSFilesReader returns a new Reader that reads from reader. -func NewLSFilesReader(reader io.Reader) *Reader { - return &Reader{ - reader: bufio.NewReader(reader), - Fields: LsFilesFields, - } -} - -// NewStatusReader returns a new Reader that reads from reader. -func NewStatusReader(reader io.Reader) *Reader { - return &Reader{ - reader: bufio.NewReader(reader), - Fields: StatusFields, - } -} - -// Read reads one record from `reader`. -// Read always returns either a non-nil record or a non-nil error, -// but not both. -// -// If there is no data left to be read, Read returns nil, io.EOF. -// -// If ReuseRecord is true, the returned slice may be shared -// between multiple calls to Read. -func (r *Reader) Read() (record []string, err error) { - if r.ReuseRecord { - record, err = r.readRecord(r.lastRecord) - r.lastRecord = record - } else { - record, err = r.readRecord(nil) - } - return record, err -} - -// FieldPos returns the entry and column corresponding to -// the start of the field with the given index in the slice most recently -// returned by Read. Numbering of entries and columns starts at 1; -// columns are counted in bytes, not runes. -// -// If this is called with an out-of-bounds index, it panics. -func (r *Reader) FieldPos(field int) (entry int, column int) { - if field < 0 || field >= len(r.fieldPositions) { - panic("out of range index passed to FieldPos") - } - p := &r.fieldPositions[field] - return p.entry, p.col -} - -// pos holds the position of a field in the current entry. -type position struct { - entry, col int -} - -// ReadAll reads all the records from reader until EOF. -// -// A successful call returns err == nil, not err == io.EOF. Because ReadAll is -// defined to read until EOF, it does not treat end of file as an error to be -// reported. -func (r *Reader) ReadAll() (records [][]string, err error) { - for { - record, err := r.readRecord(nil) - if err == io.EOF { - return records, nil - } - if err != nil { - return nil, err - } - records = append(records, record) - } -} - -// readEntry reads the next entry (with the trailing NUL). -// If EOF is hit without a trailing NUL, it will be omitted. -// If some bytes were read then the error is never io.EOF. -// The result is only valid until the next call to readEntry. -func (r *Reader) readEntry() ([]byte, error) { - entry, err := r.reader.ReadSlice('\000') - if err == bufio.ErrBufferFull { - r.rawBuffer = append(r.rawBuffer[:0], entry...) - for err == bufio.ErrBufferFull { - entry, err = r.reader.ReadSlice('\000') - r.rawBuffer = append(r.rawBuffer, entry...) - } - entry = r.rawBuffer - } - if len(entry) > 0 && err == io.EOF { - entry = append(entry, '\000') - err = nil - } - r.numEntry++ - - return entry, err -} - -// getFieldLength returns the field length and the separator length for advancing. -func getFieldLength(fieldType Field, fieldNumber int, fieldCount int, entry *[]byte) (int, int) { - switch fieldType { - case StatusX: - return 1, 0 - case StatusY: - return 1, 1 - default: - return bytes.IndexRune(*entry, getSeparator(fieldNumber, fieldCount)), 1 - } -} - -// getSeparator returns the separator between the current field and the next field. -// Since fields separators are regular it doesn't hard code them. -func getSeparator(fieldNumber int, fieldCount int) rune { - remaining := fieldCount - fieldNumber - - switch remaining { - default: - return _space - case 2: - return _tab - case 1: - return _nul - } -} - -// readRecord reads a single record. -func (r *Reader) readRecord(dst []string) ([]string, error) { - entry, errRead := r.readEntry() - if errRead == io.EOF { - return nil, errRead - } - - // Parse each field in the record. - r.recordBuffer = r.recordBuffer[:0] - r.fieldIndexes = r.fieldIndexes[:0] - r.fieldPositions = r.fieldPositions[:0] - pos := position{entry: r.numEntry, col: 1} - - fieldCount := len(r.Fields) - - for fieldNumber, fieldType := range r.Fields { - length, advance := getFieldLength(fieldType, fieldNumber, fieldCount, &entry) - field := entry[:length] - - fieldError := checkValid(fieldType, field) - if fieldError != nil { - return nil, &ParseError{ - Entry: pos.entry, - Column: pos.col, - Err: fieldError, - } - } - - offset := length + advance - entry = entry[offset:] - r.recordBuffer = append(r.recordBuffer, field...) - r.fieldIndexes = append(r.fieldIndexes, len(r.recordBuffer)) - r.fieldPositions = append(r.fieldPositions, pos) - pos.col += offset - } - - // Create a single string and create slices out of it. - // This pins the memory of the fields together, but allocates once. - str := string(r.recordBuffer) // Convert to string once to batch allocations - dst = dst[:0] - if cap(dst) < len(r.fieldIndexes) { - dst = make([]string, len(r.fieldIndexes)) - } - dst = dst[:len(r.fieldIndexes)] - var preIdx int - for i, idx := range r.fieldIndexes { - dst[i] = str[preIdx:idx] - preIdx = idx - } - - return dst, nil -} diff --git a/cli/internal/encoding/gitoutput/gitoutput_test.go b/cli/internal/encoding/gitoutput/gitoutput_test.go deleted file mode 100644 index 19ab056565b4a..0000000000000 --- a/cli/internal/encoding/gitoutput/gitoutput_test.go +++ /dev/null @@ -1,377 +0,0 @@ -package gitoutput - -import ( - "fmt" - "io" - "reflect" - "strings" - "testing" - "unicode/utf8" -) - -type readTest struct { - Name string - Input string - Output [][]string - Reader func(io.Reader) *Reader - Positions [][][2]int - Errors []error - - // These fields are copied into the Reader - ReuseRecord bool -} - -// In these tests, the § and ∑ characters in readTest.Input are used to denote -// the start of a field and the position of an error respectively. -// They are removed before parsing and are used to verify the position -// information reported by FieldPos. - -var lsTreeTests = []readTest{ - { - Name: "simple", - Input: "§100644 §blob §e69de29bb2d1d6434b8b29ae775ad8c2e48c5391\t§package.json\000", - Output: [][]string{{"100644", "blob", "e69de29bb2d1d6434b8b29ae775ad8c2e48c5391", "package.json"}}, - Reader: NewLSTreeReader, - }, - { - Name: "no trailing nul", - Input: "§100644 §blob §e69de29bb2d1d6434b8b29ae775ad8c2e48c5391\t§package.json", - Output: [][]string{{"100644", "blob", "e69de29bb2d1d6434b8b29ae775ad8c2e48c5391", "package.json"}}, - Reader: NewLSTreeReader, - }, - { - Name: "weird file names", - Input: "§100644 §blob §e69de29bb2d1d6434b8b29ae775ad8c2e48c5391\t§\t\000§100644 §blob §e69de29bb2d1d6434b8b29ae775ad8c2e48c5391\t§\"\000§100644 §blob §5b999efa470b056e329b4c23a73904e0794bdc2f\t§\n\000§100644 §blob §f44f57fff95196c5f7139dfa0b96875f1e9650a9\t§.gitignore\000§100644 §blob §33dbaf21275ca2a5f460249d941cbc27d5da3121\t§README.md\000§040000 §tree §7360f2d292aec95907cebdcbb412a6bf2bd10f8a\t§apps\000§100644 §blob §9ec2879b24ce2c817296eebe2cb3846f8e4751ea\t§package.json\000§040000 §tree §5759aadaea2cde55468a61e7104eb0a9d86c1d30\t§packages\000§100644 §blob §33d0621ee2f4da4a2f6f6bdd51a42618d181e337\t§turbo.json\000", - Output: [][]string{ - {"100644", "blob", "e69de29bb2d1d6434b8b29ae775ad8c2e48c5391", "\t"}, - {"100644", "blob", "e69de29bb2d1d6434b8b29ae775ad8c2e48c5391", "\""}, - {"100644", "blob", "5b999efa470b056e329b4c23a73904e0794bdc2f", "\n"}, - {"100644", "blob", "f44f57fff95196c5f7139dfa0b96875f1e9650a9", ".gitignore"}, - {"100644", "blob", "33dbaf21275ca2a5f460249d941cbc27d5da3121", "README.md"}, - {"040000", "tree", "7360f2d292aec95907cebdcbb412a6bf2bd10f8a", "apps"}, - {"100644", "blob", "9ec2879b24ce2c817296eebe2cb3846f8e4751ea", "package.json"}, - {"040000", "tree", "5759aadaea2cde55468a61e7104eb0a9d86c1d30", "packages"}, - {"100644", "blob", "33d0621ee2f4da4a2f6f6bdd51a42618d181e337", "turbo.json"}, - }, - Reader: NewLSTreeReader, - }, - { - Name: "invalid object mode", - Input: "∑888888 §blob §5b999efa470b056e329b4c23a73904e0794bdc2f\t§.eslintrc.js\000", - Output: [][]string{}, - Reader: NewLSTreeReader, - Errors: []error{&ParseError{Err: ErrInvalidObjectMode}}, - }, - { - Name: "invalid object type", - Input: "§100644 ∑bush §5b999efa470b056e329b4c23a73904e0794bdc2f\t§.eslintrc.js\000", - Output: [][]string{}, - Reader: NewLSTreeReader, - Errors: []error{&ParseError{Err: ErrInvalidObjectType}}, - }, - { - Name: "invalid object name", - Input: "§100644 §blob ∑Zb999efa470b056e329b4c23a73904e0794bdc2f\t§.eslintrc.js\000", - Output: [][]string{}, - Reader: NewLSTreeReader, - Errors: []error{&ParseError{Err: ErrInvalidObjectName}}, - }, - { - Name: "invalid path", - Input: "§100644 §blob §5b999efa470b056e329b4c23a73904e0794bdc2f\t∑\000", - Output: [][]string{}, - Reader: NewLSTreeReader, - Errors: []error{&ParseError{Err: ErrInvalidPath}}, - }, -} - -var lsFilesTests = []readTest{ - { - Name: "simple", - Input: "§100644 §e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 §0\t§package.json\000", - Output: [][]string{{"100644", "e69de29bb2d1d6434b8b29ae775ad8c2e48c5391", "0", "package.json"}}, - Reader: NewLSFilesReader, - }, - { - Name: "no trailing nul", - Input: "§100644 §e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 §0\t§package.json", - Output: [][]string{{"100644", "e69de29bb2d1d6434b8b29ae775ad8c2e48c5391", "0", "package.json"}}, - Reader: NewLSFilesReader, - }, - { - Name: "invalid object mode", - Input: "∑888888 §e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 §0\t§package.json", - Output: [][]string{}, - Reader: NewLSFilesReader, - Errors: []error{&ParseError{Err: ErrInvalidObjectMode}}, - }, - { - Name: "invalid object name", - Input: "§100644 ∑Z69de29bb2d1d6434b8b29ae775ad8c2e48c5391 §0\t§package.json", - Output: [][]string{}, - Reader: NewLSFilesReader, - Errors: []error{&ParseError{Err: ErrInvalidObjectName}}, - }, - { - Name: "invalid object stage", - Input: "§100644 §e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 ∑4\t§package.json", - Output: [][]string{}, - Reader: NewLSFilesReader, - Errors: []error{&ParseError{Err: ErrInvalidObjectStage}}, - }, - { - Name: "invalid path", - Input: "§100644 §e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 §0\t∑", - Output: [][]string{}, - Reader: NewLSFilesReader, - Errors: []error{&ParseError{Err: ErrInvalidPath}}, - }, -} - -var statusTests = []readTest{ - { - Name: "simple", - Input: "§A§D §package.json\000", - Output: [][]string{{"A", "D", "package.json"}}, - Reader: NewStatusReader, - }, - { - Name: "no trailing nul", - Input: "§A§D §package.json", - Output: [][]string{{"A", "D", "package.json"}}, - Reader: NewStatusReader, - }, - { - Name: "invalid status X", - Input: "∑~§D §package.json\000", - Output: [][]string{}, - Reader: NewStatusReader, - Errors: []error{&ParseError{Err: ErrInvalidObjectStatusX}}, - }, - { - Name: "invalid status Y", - Input: "§D∑~ §package.json\000", - Output: [][]string{}, - Reader: NewStatusReader, - Errors: []error{&ParseError{Err: ErrInvalidObjectStatusY}}, - }, - { - Name: "invalid path", - Input: "§A§D ∑\000", - Output: [][]string{}, - Reader: NewStatusReader, - Errors: []error{&ParseError{Err: ErrInvalidPath}}, - }, -} - -func TestRead(t *testing.T) { - newReader := func(tt readTest) (*Reader, [][][2]int, map[int][2]int) { - positions, errPositions, input := makePositions(tt.Input) - r := tt.Reader(strings.NewReader(input)) - - r.ReuseRecord = tt.ReuseRecord - return r, positions, errPositions - } - - allTests := []readTest{} - allTests = append(allTests, lsTreeTests...) - allTests = append(allTests, lsFilesTests...) - allTests = append(allTests, statusTests...) - - for _, tt := range allTests { - t.Run(tt.Name, func(t *testing.T) { - r, positions, errPositions := newReader(tt) - out, err := r.ReadAll() - if wantErr := firstError(tt.Errors, positions, errPositions); wantErr != nil { - if !reflect.DeepEqual(err, wantErr) { - t.Fatalf("ReadAll() error mismatch:\ngot %v (%#v)\nwant %v (%#v)", err, err, wantErr, wantErr) - } - if out != nil { - t.Fatalf("ReadAll() output:\ngot %q\nwant nil", out) - } - } else { - if err != nil { - t.Fatalf("unexpected Readall() error: %v", err) - } - if !reflect.DeepEqual(out, tt.Output) { - t.Fatalf("ReadAll() output:\ngot %q\nwant %q", out, tt.Output) - } - } - - // Check field and error positions. - r, _, _ = newReader(tt) - for recNum := 0; ; recNum++ { - rec, err := r.Read() - var wantErr error - if recNum < len(tt.Errors) && tt.Errors[recNum] != nil { - wantErr = errorWithPosition(tt.Errors[recNum], recNum, positions, errPositions) - } else if recNum >= len(tt.Output) { - wantErr = io.EOF - } - if !reflect.DeepEqual(err, wantErr) { - t.Fatalf("Read() error at record %d:\ngot %v (%#v)\nwant %v (%#v)", recNum, err, err, wantErr, wantErr) - } - if err != nil { - if recNum < len(tt.Output) { - t.Fatalf("need more records; got %d want %d", recNum, len(tt.Output)) - } - break - } - if got, want := rec, tt.Output[recNum]; !reflect.DeepEqual(got, want) { - t.Errorf("Read vs ReadAll mismatch;\ngot %q\nwant %q", got, want) - } - pos := positions[recNum] - if len(pos) != len(rec) { - t.Fatalf("mismatched position length at record %d", recNum) - } - for i := range rec { - entry, col := r.FieldPos(i) - if got, want := [2]int{entry, col}, pos[i]; got != want { - t.Errorf("position mismatch at record %d, field %d;\ngot %v\nwant %v", recNum, i, got, want) - } - } - } - }) - } -} - -// firstError returns the first non-nil error in errs, -// with the position adjusted according to the error's -// index inside positions. -func firstError(errs []error, positions [][][2]int, errPositions map[int][2]int) error { - for i, err := range errs { - if err != nil { - return errorWithPosition(err, i, positions, errPositions) - } - } - return nil -} - -func errorWithPosition(err error, recNum int, positions [][][2]int, errPositions map[int][2]int) error { - parseErr, ok := err.(*ParseError) - if !ok { - return err - } - if recNum >= len(positions) { - panic(fmt.Errorf("no positions found for error at record %d", recNum)) - } - errPos, ok := errPositions[recNum] - if !ok { - panic(fmt.Errorf("no error position found for error at record %d", recNum)) - } - parseErr1 := *parseErr - parseErr1.Entry = errPos[0] - parseErr1.Column = errPos[1] - return &parseErr1 -} - -// makePositions returns the expected field positions of all the fields in text, -// the positions of any errors, and the text with the position markers removed. -// -// The start of each field is marked with a § symbol; -// Error positions are marked with ∑ symbols. -func makePositions(text string) ([][][2]int, map[int][2]int, string) { - buf := make([]byte, 0, len(text)) - var positions [][][2]int - errPositions := make(map[int][2]int) - entry, col := 1, 1 - recNum := 0 - - for len(text) > 0 { - r, size := utf8.DecodeRuneInString(text) - switch r { - case '\000': - col = 1 - buf = append(buf, '\000') - positions = append(positions, [][2]int{}) - entry++ - recNum++ - case '§': - if len(positions) == 0 { - positions = append(positions, [][2]int{}) - } - positions[len(positions)-1] = append(positions[len(positions)-1], [2]int{entry, col}) - case '∑': - errPositions[recNum] = [2]int{entry, col} - default: - buf = append(buf, text[:size]...) - col += size - } - text = text[size:] - } - return positions, errPositions, string(buf) -} - -// nTimes is an io.Reader which yields the string s n times. -type nTimes struct { - s string - n int - off int -} - -func (r *nTimes) Read(p []byte) (n int, err error) { - for { - if r.n <= 0 || r.s == "" { - return n, io.EOF - } - n0 := copy(p, r.s[r.off:]) - p = p[n0:] - n += n0 - r.off += n0 - if r.off == len(r.s) { - r.off = 0 - r.n-- - } - if len(p) == 0 { - return - } - } -} - -// TODO: track other types. -// benchmarkRead measures reading the provided ls-tree data. -// initReader, if non-nil, modifies the Reader before it's used. -func benchmarkRead(b *testing.B, getReader func(reader io.Reader) *Reader, initReader func(*Reader), rows string) { - b.ReportAllocs() - r := getReader(&nTimes{s: rows, n: b.N}) - if initReader != nil { - initReader(r) - } - for { - _, err := r.Read() - if err == io.EOF { - break - } - if err != nil { - b.Fatal(err) - } - } -} - -const benchmarkLSTreeData = `100644 blob e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 \000100644 blob e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 "\000100644 blob 5b999efa470b056e329b4c23a73904e0794bdc2f .eslintrc.js\000100644 blob f44f57fff95196c5f7139dfa0b96875f1e9650a9 .gitignore\000100644 blob 33dbaf21275ca2a5f460249d941cbc27d5da3121 README.md\000040000 tree 7360f2d292aec95907cebdcbb412a6bf2bd10f8a apps\000100644 blob 9ec2879b24ce2c817296eebe2cb3846f8e4751ea package.json\000040000 tree 5759aadaea2cde55468a61e7104eb0a9d86c1d30 packages\000100644 blob 33d0621ee2f4da4a2f6f6bdd51a42618d181e337 turbo.json\000` -const benchmarkLSFilesData = `100644 13e399637190f1edb7f034b4281ecfafb5dab9e2 0 Makefile\000100644 6c1c500409989499db51f1eff37b38b857547fdc 0 cmd/turbo/main.go\000100644 2d2b9a2c3ba82f6b806f58c7f7d5eb55fefa837e 0 cmd/turbo/main_utils.go\000100644 3329c8a7f6edee487caeeaf56c600f7c85fc69e7 0 cmd/turbo/signals.go\000100644 e81df7b6ed9a277c30dd35e3524d00e8b13cf584 0 cmd/turbo/version.go\000100644 8992ebf37df05fc5ff64c0f811a3259adff10d70 0 go.mod\000100644 3da872301c79986673d6a12914fbd48c924f5999 0 go.sum\000100644 d7b2d20a037aa9bf8b48eef451eb5f9ba5904237 0 internal/analytics/analytics.go\000` -const benchmarkStatusData = ` M cli/internal/encoding/gitoutput/gitoutput.go\000 M cli/internal/encoding/gitoutput/gitoutput_test.go\000?? NOTICES.md\000 M cli/internal/encoding/gitoutput/gitoutput.go\000 M cli/internal/encoding/gitoutput/gitoutput_test.go\000?? NOTICES.md\000 M cli/internal/encoding/gitoutput/gitoutput.go\000 M cli/internal/encoding/gitoutput/gitoutput_test.go\000?? NOTICES.md\000 M cli/internal/encoding/gitoutput/gitoutput.go\000 M cli/internal/encoding/gitoutput/gitoutput_test.go\000?? NOTICES.md\000 M cli/internal/encoding/gitoutput/gitoutput.go\000 M cli/internal/encoding/gitoutput/gitoutput_test.go\000` - -func BenchmarkLSTreeRead(b *testing.B) { - benchmarkRead(b, NewLSTreeReader, nil, benchmarkLSTreeData) -} - -func BenchmarkLSTreeReadReuseRecord(b *testing.B) { - benchmarkRead(b, NewLSTreeReader, func(r *Reader) { r.ReuseRecord = true }, benchmarkLSTreeData) -} - -func BenchmarkLSFilesRead(b *testing.B) { - benchmarkRead(b, NewLSFilesReader, nil, benchmarkLSFilesData) -} - -func BenchmarkLSFilesReadReuseRecord(b *testing.B) { - benchmarkRead(b, NewLSFilesReader, func(r *Reader) { r.ReuseRecord = true }, benchmarkLSFilesData) -} - -func BenchmarkStatusRead(b *testing.B) { - benchmarkRead(b, NewStatusReader, nil, benchmarkStatusData) -} - -func BenchmarkStatusReadReuseRecord(b *testing.B) { - benchmarkRead(b, NewStatusReader, func(r *Reader) { r.ReuseRecord = true }, benchmarkStatusData) -} diff --git a/cli/internal/encoding/gitoutput/validators.go b/cli/internal/encoding/gitoutput/validators.go deleted file mode 100644 index e13c2d5a25744..0000000000000 --- a/cli/internal/encoding/gitoutput/validators.go +++ /dev/null @@ -1,148 +0,0 @@ -package gitoutput - -import "bytes" - -var _allowedObjectType = []byte(" blob tree commit ") -var _allowedStatusChars = []byte(" MTADRCU?!") - -// checkValid provides a uniform interface for calling `gitoutput` validators. -func checkValid(fieldType Field, value []byte) error { - switch fieldType { - case ObjectMode: - return checkObjectMode(value) - case ObjectType: - return checkObjectType(value) - case ObjectName: - return CheckObjectName(value) - case ObjectStage: - return checkObjectStage(value) - case StatusX: - return checkStatusX(value) - case StatusY: - return checkStatusY(value) - case Path: - return checkPath(value) - default: - return ErrUnknownField - } -} - -// checkObjectMode asserts that a byte slice is a six digit octal string (100644). -// It does not attempt to ensure that the values in particular positions are reasonable. -func checkObjectMode(value []byte) error { - if len(value) != 6 { - return ErrInvalidObjectMode - } - - // 0-7 are 0x30 - 0x37 - for _, currentByte := range value { - if (currentByte ^ 0x30) > 7 { - return ErrInvalidObjectMode - } - } - - // length of 6, 0-7 - return nil -} - -// checkObjectType asserts that a byte slice is a valid possibility (blob, tree, commit). -func checkObjectType(value []byte) error { - typeLength := len(value) - // Based upon: - // min(len("blob"), len("tree"), len("commit")) - // max(len("blob"), len("tree"), len("commit")) - if typeLength < 4 || typeLength > 6 { - return ErrInvalidObjectType - } - - // Because of the space separator there is no way to pass in a space. - // We use that trick to enable fast lookups in _allowedObjectType. - index := bytes.Index(_allowedObjectType, value) - - // Impossible to match at 0, not found is -1. - if index < 1 { - return ErrInvalidObjectType - } - - // Followed by a space. - if _allowedObjectType[index-1] != byte(_space) { - return ErrInvalidObjectType - } - - // Preceded by a space. - if _allowedObjectType[index+typeLength] != byte(_space) { - return ErrInvalidObjectType - } - return nil -} - -// CheckObjectName asserts that a byte slice looks like a SHA hash. -func CheckObjectName(value []byte) error { - if len(value) != 40 { - return ErrInvalidObjectName - } - - // 0-9 are 0x30 - 0x39 - // a-f are 0x61 - 0x66 - for _, currentByte := range value { - isNumber := (currentByte ^ 0x30) < 10 - numericAlpha := (currentByte ^ 0x60) - isAlpha := (numericAlpha < 7) && (numericAlpha > 0) - if !(isNumber || isAlpha) { - return ErrInvalidObjectName - } - } - - // length of 40, hex - return nil -} - -// checkObjectStage asserts that a byte slice is a valid possibility (0-3). -func checkObjectStage(value []byte) error { - // 0-3 are 0x30 - 0x33 - if len(value) != 1 { - return ErrInvalidObjectStage - } - - currentByte := value[0] - if (currentByte ^ 0x30) >= 4 { - return ErrInvalidObjectStage - } - - return nil -} - -// checkStatusX asserts that a byte slice is a valid possibility (" MTADRCU?!"). -func checkStatusX(value []byte) error { - if len(value) != 1 { - return ErrInvalidObjectStatusX - } - - index := bytes.Index(_allowedStatusChars, value) - if index == -1 { - return ErrInvalidObjectStatusX - } - return nil -} - -// checkStatusY asserts that a byte slice is a valid possibility (" MTADRCU?!"). -func checkStatusY(value []byte) error { - if len(value) != 1 { - return ErrInvalidObjectStatusY - } - - index := bytes.Index(_allowedStatusChars, value) - if index == -1 { - return ErrInvalidObjectStatusY - } - return nil -} - -// checkPath asserts that a byte slice is non-empty. -func checkPath(value []byte) error { - // Exists at all. This is best effort as trying to be fully-compatible is silly. - if len(value) == 0 { - return ErrInvalidPath - } - return nil -} diff --git a/cli/internal/encoding/gitoutput/validators_test.go b/cli/internal/encoding/gitoutput/validators_test.go deleted file mode 100644 index 29e12742517d8..0000000000000 --- a/cli/internal/encoding/gitoutput/validators_test.go +++ /dev/null @@ -1,514 +0,0 @@ -package gitoutput - -import ( - "testing" -) - -func Test_checkValid(t *testing.T) { - type args struct { - fieldType Field - value []byte - } - tests := []struct { - name string - args args - wantErr bool - }{ - { - name: "ObjectMode", - args: args{ - fieldType: ObjectMode, - value: []byte("100644"), - }, - wantErr: false, - }, - { - name: "ObjectType", - args: args{ - fieldType: ObjectType, - value: []byte("blob"), - }, - wantErr: false, - }, - { - name: "ObjectName", - args: args{ - fieldType: ObjectName, - value: []byte("8992ebf37df05fc5ff64c0f811a3259adff10d70"), - }, - wantErr: false, - }, - { - name: "ObjectStage", - args: args{ - fieldType: ObjectStage, - value: []byte("0"), - }, - wantErr: false, - }, - { - name: "StatusX", - args: args{ - fieldType: StatusX, - value: []byte("!"), - }, - wantErr: false, - }, - { - name: "StatusY", - args: args{ - fieldType: StatusY, - value: []byte("?"), - }, - wantErr: false, - }, - { - name: "Path", - args: args{ - fieldType: Path, - value: []byte("/hello/world"), - }, - wantErr: false, - }, - { - name: "Unknown", - args: args{ - fieldType: Field(12), - value: []byte("unused"), - }, - wantErr: true, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - if err := checkValid(tt.args.fieldType, tt.args.value); (err != nil) != tt.wantErr { - t.Errorf("checkValid() error = %v, wantErr %v", err, tt.wantErr) - } - }) - } -} - -func Test_checkObjectMode(t *testing.T) { - type args struct { - value []byte - } - tests := []struct { - name string - args args - wantErr bool - }{ - { - name: "Simple", - args: args{ - value: []byte("100644"), - }, - wantErr: false, - }, - { - name: "All sevens", - args: args{ - value: []byte("777777"), - }, - wantErr: false, - }, - { - name: "All zeroes", - args: args{ - value: []byte("000000"), - }, - wantErr: false, - }, - { - name: "Non-octal chars", - args: args{ - value: []byte("sixsix"), - }, - wantErr: true, - }, - { - name: "nul", - args: args{ - value: []byte("\000\000\000\000\000\000"), - }, - wantErr: true, - }, - { - name: "too long", - args: args{ - value: []byte("1234567"), - }, - wantErr: true, - }, - { - name: "off by plus one", - args: args{ - value: []byte("888888"), - }, - wantErr: true, - }, - { - name: "off by minus one", - args: args{ - value: []byte("//////"), - }, - wantErr: true, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - if err := checkObjectMode(tt.args.value); (err != nil) != tt.wantErr { - t.Errorf("checkObjectMode() error = %v, wantErr %v", err, tt.wantErr) - } - }) - } -} - -func Test_checkObjectType(t *testing.T) { - type args struct { - value []byte - } - tests := []struct { - name string - args args - wantErr bool - }{ - { - name: "Finds blob", - args: args{ - value: []byte("blob"), - }, - wantErr: false, - }, - { - name: "Finds tree", - args: args{ - value: []byte("tree"), - }, - wantErr: false, - }, - { - name: "Finds commit", - args: args{ - value: []byte("commit"), - }, - wantErr: false, - }, - { - name: "nonsense input", - args: args{ - value: []byte("input"), - }, - wantErr: true, - }, - { - name: "Knows too much about the implementation details (all 3)", - args: args{ - value: []byte("blob tree commit"), - }, - wantErr: true, - }, - { - name: "Knows too much about the implementation details (first two)", - args: args{ - value: []byte("blob tree"), - }, - wantErr: true, - }, - { - name: "Knows too much about the implementation details (last two)", - args: args{ - value: []byte("tree commit"), - }, - wantErr: true, - }, - { - name: "Knows too much about the implementation details (arbitrary substring)", - args: args{ - value: []byte("tree c"), - }, - wantErr: true, - }, - { - name: "Knows too much about the implementation details (space)", - args: args{ - value: []byte(" "), - }, - wantErr: true, - }, - { - name: "Knows too much about the implementation details (empty string)", - args: args{ - value: []byte(""), - }, - wantErr: true, - }, - { - name: "Knows too much about the implementation details (leading space)", - args: args{ - value: []byte(" tree"), - }, - wantErr: true, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - if err := checkObjectType(tt.args.value); (err != nil) != tt.wantErr { - t.Errorf("checkObjectType() error = %v, wantErr %v", err, tt.wantErr) - } - }) - } -} - -func TestCheckObjectName(t *testing.T) { - type args struct { - value []byte - } - tests := []struct { - name string - args args - wantErr bool - }{ - { - name: "Simple", - args: args{ - value: []byte("8992ebf37df05fc5ff64c0f811a3259adff10d70"), - }, - wantErr: false, - }, - { - name: "Too short", - args: args{ - value: []byte("8992ebf37df05fc5ff64"), - }, - wantErr: true, - }, - { - name: "Too long", - args: args{ - value: []byte("8992ebf37df05fc5ff64c0f811a3259adff10d708992ebf37df05fc5ff64c0f811a3259adff10d70"), - }, - wantErr: true, - }, - { - name: "Not hex", - args: args{ - value: []byte("z992ebf37df05fc5ff64c0f811a3259adff10d70"), - }, - wantErr: true, - }, - { - name: "Not lowercase", - args: args{ - value: []byte("8992EBF37DF05FC5FF64C0F811A3259ADFF10D70"), - }, - wantErr: true, - }, - { - name: "Off by plus one in the ASCII table (a-f).", - args: args{ - value: []byte("gggggggggggggggggggggggggggggggggggggggg"), - }, - wantErr: true, - }, - { - name: "Off by minus one in the ASCII table (a-f).", - args: args{ - value: []byte("````````````````````````````````````````"), - }, - wantErr: true, - }, - { - name: "Off by minus one in the ASCII table (0-9).", - args: args{ - value: []byte("////////////////////////////////////////"), - }, - wantErr: true, - }, - { - name: "Off by plus one in the ASCII table (0-9).", - args: args{ - value: []byte("::::::::::::::::::::::::::::::::::::::::"), - }, - wantErr: true, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - if err := CheckObjectName(tt.args.value); (err != nil) != tt.wantErr { - t.Errorf("CheckObjectName() error = %v, wantErr %v", err, tt.wantErr) - } - }) - } -} - -func Test_checkObjectStage(t *testing.T) { - type args struct { - value []byte - } - tests := []struct { - name string - args args - wantErr bool - }{ - { - name: "0", - args: args{ - value: []byte("0"), - }, - wantErr: false, - }, - { - name: "1", - args: args{ - value: []byte("1"), - }, - wantErr: false, - }, - { - name: "2", - args: args{ - value: []byte("2"), - }, - wantErr: false, - }, - { - name: "3", - args: args{ - value: []byte("3"), - }, - wantErr: false, - }, - { - name: "/", - args: args{ - value: []byte("/"), - }, - wantErr: true, - }, - { - name: "4", - args: args{ - value: []byte("4"), - }, - wantErr: true, - }, - { - name: "00", - args: args{ - value: []byte("00"), - }, - wantErr: true, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - if err := checkObjectStage(tt.args.value); (err != nil) != tt.wantErr { - t.Errorf("checkObjectStage() error = %v, wantErr %v", err, tt.wantErr) - } - }) - } -} - -func Test_checkStatus(t *testing.T) { - type args struct { - value []byte - } - tests := []struct { - name string - args args - wantErr bool - }{ - { - name: "Simple", - args: args{ - value: []byte("D"), - }, - wantErr: false, - }, - { - name: "Space", - args: args{ - value: []byte(" "), - }, - wantErr: false, - }, - { - name: "Empty", - args: args{ - value: []byte(""), - }, - wantErr: true, - }, - { - name: "Too long", - args: args{ - value: []byte("?!"), - }, - wantErr: true, - }, - { - name: "nul", - args: args{ - value: []byte("\000"), - }, - wantErr: true, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - if err := checkStatusX(tt.args.value); (err != nil) != tt.wantErr { - t.Errorf("checkStatusX() error = %v, wantErr %v", err, tt.wantErr) - } - if err := checkStatusY(tt.args.value); (err != nil) != tt.wantErr { - t.Errorf("checkStatusY() error = %v, wantErr %v", err, tt.wantErr) - } - }) - } -} - -func Test_checkPath(t *testing.T) { - type args struct { - value []byte - } - tests := []struct { - name string - args args - wantErr bool - }{ - { - name: "Simple", - args: args{ - value: []byte("./"), - }, - wantErr: false, - }, - { - name: "newline", - args: args{ - value: []byte("has\nnewline"), - }, - wantErr: false, - }, - { - name: "Empty", - args: args{ - value: []byte(""), - }, - wantErr: true, - }, - { - name: "newline", - args: args{ - value: []byte("\n"), - }, - wantErr: false, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - if err := checkPath(tt.args.value); (err != nil) != tt.wantErr { - t.Errorf("checkPath() error = %v, wantErr %v", err, tt.wantErr) - } - }) - } -} diff --git a/cli/internal/env/env.go b/cli/internal/env/env.go deleted file mode 100644 index 878cd5e006b19..0000000000000 --- a/cli/internal/env/env.go +++ /dev/null @@ -1,245 +0,0 @@ -package env - -import ( - "crypto/sha256" - "fmt" - "os" - "regexp" - "sort" - "strings" -) - -// EnvironmentVariableMap is a map of env variables and their values -type EnvironmentVariableMap map[string]string - -// BySource contains a map of environment variables broken down by the source -type BySource struct { - Explicit EnvironmentVariableMap - Matching EnvironmentVariableMap -} - -// DetailedMap contains the composite and the detailed maps of environment variables -// All is used as a taskhash input (taskhash.CalculateTaskHash) -// BySource is used to print out a Dry Run Summary -type DetailedMap struct { - All EnvironmentVariableMap - BySource BySource -} - -// EnvironmentVariablePairs is a list of "k=v" strings for env variables and their values -type EnvironmentVariablePairs []string - -// WildcardMaps is a pair of EnvironmentVariableMaps. -type WildcardMaps struct { - Inclusions EnvironmentVariableMap - Exclusions EnvironmentVariableMap -} - -// Resolve collapses a WildcardSet into a single EnvironmentVariableMap. -func (ws WildcardMaps) Resolve() EnvironmentVariableMap { - output := EnvironmentVariableMap{} - output.Union(ws.Inclusions) - output.Difference(ws.Exclusions) - return output -} - -// GetEnvMap returns a map of env vars and their values from os.Environ -func GetEnvMap() EnvironmentVariableMap { - envMap := make(map[string]string) - for _, envVar := range os.Environ() { - if i := strings.Index(envVar, "="); i >= 0 { - parts := strings.SplitN(envVar, "=", 2) - envMap[parts[0]] = strings.Join(parts[1:], "") - } - } - return envMap -} - -// Union takes another EnvironmentVariableMap and adds it into the receiver -// It overwrites values if they already exist. -func (evm EnvironmentVariableMap) Union(another EnvironmentVariableMap) { - for k, v := range another { - evm[k] = v - } -} - -// Difference takes another EnvironmentVariableMap and removes matching keys from the receiver -func (evm EnvironmentVariableMap) Difference(another EnvironmentVariableMap) { - for k := range another { - delete(evm, k) - } -} - -// Add creates one new environment variable. -func (evm EnvironmentVariableMap) Add(key string, value string) { - evm[key] = value -} - -// Names returns a sorted list of env var names for the EnvironmentVariableMap -func (evm EnvironmentVariableMap) Names() []string { - names := []string{} - for k := range evm { - names = append(names, k) - } - sort.Strings(names) - return names -} - -// mapToPair returns a deterministically sorted set of EnvironmentVariablePairs from an EnvironmentVariableMap -// It takes a transformer value to operate on each key-value pair and return a string -func (evm EnvironmentVariableMap) mapToPair(transformer func(k string, v string) string) EnvironmentVariablePairs { - if evm == nil { - return nil - } - - // convert to set to eliminate duplicates - pairs := make([]string, 0, len(evm)) - for k, v := range evm { - paired := transformer(k, v) - pairs = append(pairs, paired) - } - - // sort it so it's deterministic - sort.Strings(pairs) - - return pairs -} - -// ToSecretHashable returns a deterministically sorted set of EnvironmentVariablePairs from an EnvironmentVariableMap -// This is the value used to print out the task hash input, so the values are cryptographically hashed -func (evm EnvironmentVariableMap) ToSecretHashable() EnvironmentVariablePairs { - return evm.mapToPair(func(k, v string) string { - if v != "" { - hashedValue := sha256.Sum256([]byte(v)) - return fmt.Sprintf("%v=%x", k, hashedValue) - } - - return fmt.Sprintf("%v=%s", k, "") - }) -} - -// ToHashable returns a deterministically sorted set of EnvironmentVariablePairs from an EnvironmentVariableMap -// This is the value that is used upstream as a task hash input, so we need it to be deterministic -func (evm EnvironmentVariableMap) ToHashable() EnvironmentVariablePairs { - return evm.mapToPair(func(k, v string) string { - return fmt.Sprintf("%v=%v", k, v) - }) -} - -const wildcard = '*' -const wildcardEscape = '\\' -const regexWildcardSegment = ".*" - -func wildcardToRegexPattern(pattern string) string { - var regexString []string - - var previousIndex int - var previousRune rune - - for i, char := range pattern { - if char == wildcard { - if previousRune == wildcardEscape { - // Found a literal * - - // Replace the trailing "\*" with just "*" before adding the segment. - regexString = append(regexString, regexp.QuoteMeta(pattern[previousIndex:i-1]+"*")) - } else { - // Found a wildcard - - // Add in the static segment since the last wildcard. Can be zero length. - regexString = append(regexString, regexp.QuoteMeta(pattern[previousIndex:i])) - - // Add a dynamic segment if it isn't adjacent to another dynamic segment. - if regexString[len(regexString)-1] != regexWildcardSegment { - regexString = append(regexString, regexWildcardSegment) - } - } - - // Advance the pointer. - previousIndex = i + 1 - } - previousRune = char - } - - // Add the last static segment. Can be zero length. - regexString = append(regexString, regexp.QuoteMeta(pattern[previousIndex:])) - - return strings.Join(regexString, "") -} - -// fromWildcards returns a wildcardSet after processing wildcards against it. -func (evm EnvironmentVariableMap) fromWildcards(wildcardPatterns []string) (WildcardMaps, error) { - output := WildcardMaps{ - Inclusions: EnvironmentVariableMap{}, - Exclusions: EnvironmentVariableMap{}, - } - - includePatterns := make([]string, 0) - excludePatterns := make([]string, 0) - - for _, wildcardPattern := range wildcardPatterns { - isExclude := strings.HasPrefix(wildcardPattern, "!") - isLiteralLeadingExclamation := strings.HasPrefix(wildcardPattern, "\\!") - - if isExclude { - excludePattern := wildcardToRegexPattern(wildcardPattern[1:]) - excludePatterns = append(excludePatterns, excludePattern) - } else if isLiteralLeadingExclamation { - includePattern := wildcardToRegexPattern(wildcardPattern[1:]) - includePatterns = append(includePatterns, includePattern) - } else { - includePattern := wildcardToRegexPattern(wildcardPattern[0:]) - includePatterns = append(includePatterns, includePattern) - } - } - - includeRegexString := "^(" + strings.Join(includePatterns, "|") + ")$" - excludeRegexString := "^(" + strings.Join(excludePatterns, "|") + ")$" - - includeRegex, err := regexp.Compile(includeRegexString) - if err != nil { - return output, err - } - - excludeRegex, err := regexp.Compile(excludeRegexString) - if err != nil { - return output, err - } - - for envVar, envValue := range evm { - if len(includePatterns) > 0 && includeRegex.MatchString(envVar) { - output.Inclusions[envVar] = envValue - } - if len(excludePatterns) > 0 && excludeRegex.MatchString(envVar) { - output.Exclusions[envVar] = envValue - } - } - - return output, nil -} - -// FromWildcards returns an EnvironmentVariableMap containing the variables -// in the environment which match an array of wildcard patterns. -func (evm EnvironmentVariableMap) FromWildcards(wildcardPatterns []string) (EnvironmentVariableMap, error) { - if wildcardPatterns == nil { - return nil, nil - } - - resolvedSet, err := evm.fromWildcards(wildcardPatterns) - if err != nil { - return nil, err - } - - return resolvedSet.Resolve(), nil -} - -// FromWildcardsUnresolved returns a wildcardSet specifying the inclusions and -// exclusions discovered from a set of wildcard patterns. This is used to ensure -// that user exclusions have primacy over inferred inclusions. -func (evm EnvironmentVariableMap) FromWildcardsUnresolved(wildcardPatterns []string) (WildcardMaps, error) { - if wildcardPatterns == nil { - return WildcardMaps{}, nil - } - - return evm.fromWildcards(wildcardPatterns) -} diff --git a/cli/internal/env/env_test.go b/cli/internal/env/env_test.go deleted file mode 100644 index 2ee385cfb2711..0000000000000 --- a/cli/internal/env/env_test.go +++ /dev/null @@ -1,215 +0,0 @@ -package env - -import ( - "reflect" - "testing" - - "github.com/vercel/turbo/cli/internal/ffi" - "gotest.tools/v3/assert" -) - -func TestGetEnvVarsFromWildcards(t *testing.T) { - tests := []struct { - name string - self EnvironmentVariableMap - wildcardPatterns []string - want EnvironmentVariableMap - }{ - { - name: "nil wildcard patterns", - self: EnvironmentVariableMap{}, - wildcardPatterns: nil, - want: nil, - }, - { - name: "empty wildcard patterns", - self: EnvironmentVariableMap{}, - wildcardPatterns: []string{}, - want: EnvironmentVariableMap{}, - }, - { - name: "leading wildcard", - self: EnvironmentVariableMap{ - "STATIC": "VALUE", - "_STATIC": "VALUE", - "FOO_STATIC": "VALUE", - }, - wildcardPatterns: []string{"*_STATIC"}, - want: EnvironmentVariableMap{ - "_STATIC": "VALUE", - "FOO_STATIC": "VALUE", - }, - }, - { - name: "trailing wildcard", - self: EnvironmentVariableMap{ - "STATIC": "VALUE", - "STATIC_": "VALUE", - "STATIC_TRAILER": "VALUE", - }, - wildcardPatterns: []string{"STATIC_*"}, - want: EnvironmentVariableMap{ - "STATIC_": "VALUE", - "STATIC_TRAILER": "VALUE", - }, - }, - { - name: "leading & trailing wildcard", - self: EnvironmentVariableMap{ - "STATIC": "VALUE", - "STATIC_": "VALUE", - "_STATIC": "VALUE", - "_STATIC_": "VALUE", - "_STATIC_B": "VALUE", - "A_STATIC_": "VALUE", - "A_STATIC_B": "VALUE", - }, - wildcardPatterns: []string{"*_STATIC_*"}, - want: EnvironmentVariableMap{ - "_STATIC_": "VALUE", - "_STATIC_B": "VALUE", - "A_STATIC_": "VALUE", - "A_STATIC_B": "VALUE", - }, - }, - { - name: "adjacent wildcard", - self: EnvironmentVariableMap{ - "FOO__BAR": "VALUE", - "FOO_1_BAR": "VALUE", - "FOO_12_BAR": "VALUE", - }, - wildcardPatterns: []string{"FOO_**_BAR"}, - want: EnvironmentVariableMap{ - "FOO__BAR": "VALUE", - "FOO_1_BAR": "VALUE", - "FOO_12_BAR": "VALUE", - }, - }, - { - name: "literal *", - self: EnvironmentVariableMap{ - "LITERAL_*": "VALUE", - }, - wildcardPatterns: []string{"LITERAL_\\*"}, - want: EnvironmentVariableMap{ - "LITERAL_*": "VALUE", - }, - }, - { - name: "literal *, then wildcard", - self: EnvironmentVariableMap{ - "LITERAL_*": "VALUE", - "LITERAL_*_ANYTHING": "VALUE", - }, - wildcardPatterns: []string{"LITERAL_\\**"}, - want: EnvironmentVariableMap{ - "LITERAL_*": "VALUE", - "LITERAL_*_ANYTHING": "VALUE", - }, - }, - // Check ! for exclusion. - { - name: "literal leading !", - self: EnvironmentVariableMap{ - "!LITERAL": "VALUE", - }, - wildcardPatterns: []string{"\\!LITERAL"}, - want: EnvironmentVariableMap{ - "!LITERAL": "VALUE", - }, - }, - { - name: "literal ! anywhere else", - self: EnvironmentVariableMap{ - "ANYWHERE!ELSE": "VALUE", - }, - wildcardPatterns: []string{"ANYWHERE!ELSE"}, - want: EnvironmentVariableMap{ - "ANYWHERE!ELSE": "VALUE", - }, - }, - // The following tests are to confirm exclusion behavior. - // They're focused on set difference, not wildcard behavior. - // Wildcard regex construction is identical to inclusions. - { - name: "include everything", - self: EnvironmentVariableMap{ - "ALL": "VALUE", - "OF": "VALUE", - "THESE": "VALUE", - "ARE": "VALUE", - "INCLUDED": "VALUE", - }, - wildcardPatterns: []string{"*"}, - want: EnvironmentVariableMap{ - "ALL": "VALUE", - "OF": "VALUE", - "THESE": "VALUE", - "ARE": "VALUE", - "INCLUDED": "VALUE", - }, - }, - { - name: "include everything, exclude everything", - self: EnvironmentVariableMap{ - "ALL": "VALUE", - "OF": "VALUE", - "THESE": "VALUE", - "ARE": "VALUE", - "EXCLUDED": "VALUE", - }, - wildcardPatterns: []string{"*", "!*"}, - want: EnvironmentVariableMap{}, - }, - { - name: "include everything, exclude one", - self: EnvironmentVariableMap{ - "ONE": "VALUE", - "OF": "VALUE", - "THESE": "VALUE", - "IS": "VALUE", - "EXCLUDED": "VALUE", - }, - wildcardPatterns: []string{"*", "!EXCLUDED"}, - want: EnvironmentVariableMap{ - "ONE": "VALUE", - "OF": "VALUE", - "THESE": "VALUE", - "IS": "VALUE", - }, - }, - { - name: "include everything, exclude a prefix", - self: EnvironmentVariableMap{ - "EXCLUDED_SHA": "VALUE", - "EXCLUDED_URL": "VALUE", - "EXCLUDED_USER": "VALUE", - "EXCLUDED_PASS": "VALUE", - "THIS": "VALUE", - "IS": "VALUE", - "INCLUDED": "VALUE", - }, - wildcardPatterns: []string{"*", "!EXCLUDED_*"}, - want: EnvironmentVariableMap{ - "THIS": "VALUE", - "IS": "VALUE", - "INCLUDED": "VALUE", - }, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - got, err := tt.self.FromWildcards(tt.wildcardPatterns) - var rustResult EnvironmentVariableMap - rustResult, rustErr := ffi.FromWildcards(tt.self, tt.wildcardPatterns) - assert.NilError(t, rustErr, "Rust implementation failed.") - assert.NilError(t, err, "Did not fail regexp compile.") - - assert.DeepEqual(t, got, rustResult) - if !reflect.DeepEqual(got, tt.want) { - t.Errorf("GetEnvVarsFromWildcards() = %v, want %v", got, tt.want) - } - }) - } -} diff --git a/cli/internal/ffi/bindings.h b/cli/internal/ffi/bindings.h deleted file mode 100644 index a24e069ee2f96..0000000000000 --- a/cli/internal/ffi/bindings.h +++ /dev/null @@ -1,39 +0,0 @@ -#include <stdarg.h> -#include <stdbool.h> -#include <stdint.h> -#include <stdlib.h> - -typedef struct Buffer { - uint32_t len; - uint8_t *data; -} Buffer; - -void free_buffer(struct Buffer buffer); - -struct Buffer get_turbo_data_dir(void); - -struct Buffer changed_files(struct Buffer buffer); - -struct Buffer previous_content(struct Buffer buffer); - -struct Buffer recursive_copy(struct Buffer buffer); - -struct Buffer verify_signature(struct Buffer buffer); - -struct Buffer get_package_file_hashes(struct Buffer buffer); - -struct Buffer get_hashes_for_files(struct Buffer buffer); - -struct Buffer glob(struct Buffer buffer); - -struct Buffer from_wildcards(struct Buffer buffer); - -struct Buffer get_global_hashable_env_vars(struct Buffer buffer); - -struct Buffer transitive_closure(struct Buffer buf); - -struct Buffer subgraph(struct Buffer buf); - -struct Buffer patches(struct Buffer buf); - -struct Buffer global_change(struct Buffer buf); diff --git a/cli/internal/ffi/ffi.go b/cli/internal/ffi/ffi.go deleted file mode 100644 index 5f50e6e926500..0000000000000 --- a/cli/internal/ffi/ffi.go +++ /dev/null @@ -1,380 +0,0 @@ -package ffi - -// ffi -// -// Please read the notes about safety (marked with `SAFETY`) in both this file, -// and in turborepo-ffi/lib.rs before modifying this file. - -// #include "bindings.h" -// -// #cgo darwin,arm64 LDFLAGS: -L${SRCDIR} -lturborepo_ffi_darwin_arm64 -lz -liconv -framework Security -framework CoreFoundation -framework SystemConfiguration -// #cgo darwin,amd64 LDFLAGS: -L${SRCDIR} -lturborepo_ffi_darwin_amd64 -lz -liconv -framework Security -framework CoreFoundation -framework SystemConfiguration -// #cgo linux,arm64,staticbinary LDFLAGS: -L${SRCDIR} -lturborepo_ffi_linux_arm64 -lunwind -lm -// #cgo linux,amd64,staticbinary LDFLAGS: -L${SRCDIR} -lturborepo_ffi_linux_amd64 -lunwind -lm -// #cgo linux,arm64,!staticbinary LDFLAGS: -L${SRCDIR} -lturborepo_ffi_linux_arm64 -lz -lm -// #cgo linux,amd64,!staticbinary LDFLAGS: -L${SRCDIR} -lturborepo_ffi_linux_amd64 -lz -lm -// #cgo windows,amd64 LDFLAGS: -L${SRCDIR} -lturborepo_ffi_windows_amd64 -lole32 -lbcrypt -lws2_32 -luserenv -lntdll -import "C" - -import ( - "errors" - "fmt" - "reflect" - "unsafe" - - ffi_proto "github.com/vercel/turbo/cli/internal/ffi/proto" - "google.golang.org/protobuf/proto" -) - -// Unmarshal consumes a buffer and parses it into a proto.Message -func Unmarshal[M proto.Message](b C.Buffer, c M) error { - bytes := toBytes(b) - if err := proto.Unmarshal(bytes, c); err != nil { - return err - } - - // free the buffer on the rust side - // - // SAFETY: do not use `C.free_buffer` to free a buffer that has been allocated - // on the go side. If you happen to accidentally use the wrong one, you can - // expect a segfault on some platforms. This is the only valid callsite. - C.free_buffer(b) - - return nil -} - -// Marshal consumes a proto.Message and returns a bufferfire -// -// NOTE: the buffer must be freed by calling `Free` on it -func Marshal[M proto.Message](c M) C.Buffer { - bytes, err := proto.Marshal(c) - if err != nil { - panic(err) - } - - return toBuffer(bytes) -} - -// Free frees a buffer that has been allocated *on the go side*. -// -// SAFETY: this is not the same as `C.free_buffer`, which frees a buffer that -// has been allocated *on the rust side*. If you happen to accidentally use -// the wrong one, you can expect a segfault on some platforms. -// -// EXAMPLE: it is recommended use this function via a `defer` statement, like so: -// -// reqBuf := Marshal(&req) -// defer reqBuf.Free() -func (c C.Buffer) Free() { - C.free(unsafe.Pointer(c.data)) -} - -// rather than use C.GoBytes, we use this function to avoid copying the bytes, -// since it is going to be immediately Unmarshalled into a proto.Message -// -// SAFETY: go slices contain a pointer to an underlying buffer with a length. -// if the buffer is known to the garbage collector, dropping the last slice will -// cause the memory to be freed. this memory is owned by the rust side (and is -// not known the garbage collector), so dropping the slice will do nothing -func toBytes(b C.Buffer) []byte { - var out []byte - - len := (uint32)(b.len) - - sh := (*reflect.SliceHeader)(unsafe.Pointer(&out)) - sh.Data = uintptr(unsafe.Pointer(b.data)) - sh.Len = int(len) - sh.Cap = int(len) - - return out -} - -func toBuffer(bytes []byte) C.Buffer { - b := C.Buffer{} - b.len = C.uint(len(bytes)) - b.data = (*C.uchar)(C.CBytes(bytes)) - return b -} - -// GetTurboDataDir returns the path to the Turbo data directory -func GetTurboDataDir() string { - buffer := C.get_turbo_data_dir() - resp := ffi_proto.TurboDataDirResp{} - if err := Unmarshal(buffer, resp.ProtoReflect().Interface()); err != nil { - panic(err) - } - return resp.Dir -} - -// Go convention is to use an empty string for an uninitialized or null-valued -// string. Rust convention is to use an Option<String> for the same purpose, which -// is encoded on the Go side as *string. This converts between the two. -func stringToRef(s string) *string { - if s == "" { - return nil - } - return &s -} - -// ChangedFiles returns the files changed in between two commits, the workdir and the index, and optionally untracked files -func ChangedFiles(gitRoot string, turboRoot string, fromCommit string, toCommit string) ([]string, error) { - fromCommitRef := stringToRef(fromCommit) - - req := ffi_proto.ChangedFilesReq{ - GitRoot: gitRoot, - FromCommit: fromCommitRef, - ToCommit: toCommit, - TurboRoot: turboRoot, - } - - reqBuf := Marshal(&req) - defer reqBuf.Free() - - respBuf := C.changed_files(reqBuf) - - resp := ffi_proto.ChangedFilesResp{} - if err := Unmarshal(respBuf, resp.ProtoReflect().Interface()); err != nil { - panic(err) - } - if err := resp.GetError(); err != "" { - return nil, errors.New(err) - } - - return resp.GetFiles().GetFiles(), nil -} - -// PreviousContent returns the content of a file at a previous commit -func PreviousContent(gitRoot, fromCommit, filePath string) ([]byte, error) { - req := ffi_proto.PreviousContentReq{ - GitRoot: gitRoot, - FromCommit: fromCommit, - FilePath: filePath, - } - - reqBuf := Marshal(&req) - defer reqBuf.Free() - - respBuf := C.previous_content(reqBuf) - - resp := ffi_proto.PreviousContentResp{} - if err := Unmarshal(respBuf, resp.ProtoReflect().Interface()); err != nil { - panic(err) - } - content := resp.GetContent() - if err := resp.GetError(); err != "" { - return nil, errors.New(err) - } - - return []byte(content), nil -} - -// TransitiveDeps returns the transitive external deps for all provided workspaces -func TransitiveDeps(content []byte, packageManager string, workspaces map[string]map[string]string, resolutions map[string]string) (map[string]*ffi_proto.LockfilePackageList, error) { - var additionalData *ffi_proto.AdditionalBerryData - if resolutions != nil { - additionalData = &ffi_proto.AdditionalBerryData{Resolutions: resolutions} - } - flatWorkspaces := make(map[string]*ffi_proto.PackageDependencyList) - for workspace, deps := range workspaces { - packageDependencyList := make([]*ffi_proto.PackageDependency, len(deps)) - i := 0 - for name, version := range deps { - packageDependencyList[i] = &ffi_proto.PackageDependency{ - Name: name, - Range: version, - } - i++ - } - flatWorkspaces[workspace] = &ffi_proto.PackageDependencyList{List: packageDependencyList} - } - req := ffi_proto.TransitiveDepsRequest{ - Contents: content, - PackageManager: toPackageManager(packageManager), - Workspaces: flatWorkspaces, - Resolutions: additionalData, - } - reqBuf := Marshal(&req) - resBuf := C.transitive_closure(reqBuf) - reqBuf.Free() - - resp := ffi_proto.TransitiveDepsResponse{} - if err := Unmarshal(resBuf, resp.ProtoReflect().Interface()); err != nil { - panic(err) - } - - if err := resp.GetError(); err != "" { - return nil, errors.New(err) - } - - dependencies := resp.GetDependencies() - return dependencies.GetDependencies(), nil -} - -func toPackageManager(packageManager string) ffi_proto.PackageManager { - switch packageManager { - case "npm": - return ffi_proto.PackageManager_NPM - case "berry": - return ffi_proto.PackageManager_BERRY - case "pnpm": - return ffi_proto.PackageManager_PNPM - case "yarn": - return ffi_proto.PackageManager_YARN - case "bun": - return ffi_proto.PackageManager_BUN - default: - panic(fmt.Sprintf("Invalid package manager string: %s", packageManager)) - } -} - -// GlobalChange checks if there are any differences between lockfiles that would completely invalidate -// the cache. -func GlobalChange(packageManager string, prevContents []byte, currContents []byte) bool { - req := ffi_proto.GlobalChangeRequest{ - PackageManager: toPackageManager(packageManager), - PrevContents: prevContents, - CurrContents: currContents, - } - reqBuf := Marshal(&req) - resBuf := C.global_change(reqBuf) - reqBuf.Free() - - resp := ffi_proto.GlobalChangeResponse{} - if err := Unmarshal(resBuf, resp.ProtoReflect().Interface()); err != nil { - panic(err) - } - - return resp.GetGlobalChange() -} - -// VerifySignature checks that the signature of an artifact matches the expected tag -func VerifySignature(teamID []byte, hash string, artifactBody []byte, expectedTag string, secretKeyOverride []byte) (bool, error) { - req := ffi_proto.VerifySignatureRequest{ - TeamId: teamID, - Hash: hash, - ArtifactBody: artifactBody, - ExpectedTag: expectedTag, - SecretKeyOverride: secretKeyOverride, - } - reqBuf := Marshal(&req) - resBuf := C.verify_signature(reqBuf) - reqBuf.Free() - - resp := ffi_proto.VerifySignatureResponse{} - if err := Unmarshal(resBuf, resp.ProtoReflect().Interface()); err != nil { - panic(err) - } - - if err := resp.GetError(); err != "" { - return false, errors.New(err) - } - - return resp.GetVerified(), nil -} - -// GetPackageFileHashes proxies to rust for hashing the files in a package -func GetPackageFileHashes(rootPath string, packagePath string, inputs []string) (map[string]string, error) { - req := ffi_proto.GetPackageFileHashesRequest{ - TurboRoot: rootPath, - PackagePath: packagePath, - Inputs: inputs, - } - reqBuf := Marshal(&req) - resBuf := C.get_package_file_hashes(reqBuf) - reqBuf.Free() - - resp := ffi_proto.GetPackageFileHashesResponse{} - if err := Unmarshal(resBuf, resp.ProtoReflect().Interface()); err != nil { - panic(err) - } - - if err := resp.GetError(); err != "" { - return nil, errors.New(err) - } - - hashes := resp.GetHashes() - return hashes.GetHashes(), nil -} - -// GetHashesForFiles proxies to rust for hashing a given set of files -func GetHashesForFiles(rootPath string, files []string, allowMissing bool) (map[string]string, error) { - req := ffi_proto.GetHashesForFilesRequest{ - TurboRoot: rootPath, - Files: files, - AllowMissing: allowMissing, - } - reqBuf := Marshal(&req) - resBuf := C.get_hashes_for_files(reqBuf) - reqBuf.Free() - - resp := ffi_proto.GetHashesForFilesResponse{} - if err := Unmarshal(resBuf, resp.ProtoReflect().Interface()); err != nil { - panic(err) - } - - if err := resp.GetError(); err != "" { - return nil, errors.New(err) - } - hashes := resp.GetHashes() - return hashes.GetHashes(), nil -} - -// FromWildcards returns an EnvironmentVariableMap containing the variables -// in the environment which match an array of wildcard patterns. -func FromWildcards(environmentMap map[string]string, wildcardPatterns []string) (map[string]string, error) { - if wildcardPatterns == nil { - return nil, nil - } - req := ffi_proto.FromWildcardsRequest{ - EnvVars: &ffi_proto.EnvVarMap{ - Map: environmentMap, - }, - WildcardPatterns: wildcardPatterns, - } - reqBuf := Marshal(&req) - resBuf := C.from_wildcards(reqBuf) - reqBuf.Free() - - resp := ffi_proto.FromWildcardsResponse{} - if err := Unmarshal(resBuf, resp.ProtoReflect().Interface()); err != nil { - panic(err) - } - - if err := resp.GetError(); err != "" { - return nil, errors.New(err) - } - envVarMap := resp.GetEnvVars().GetMap() - // If the map is nil, return an empty map instead of nil - // to match with existing Go code. - if envVarMap == nil { - return map[string]string{}, nil - } - return envVarMap, nil -} - -// GetGlobalHashableEnvVars calculates env var dependencies -func GetGlobalHashableEnvVars(envAtExecutionStart map[string]string, globalEnv []string) (*ffi_proto.DetailedMap, error) { - req := ffi_proto.GetGlobalHashableEnvVarsRequest{ - EnvAtExecutionStart: &ffi_proto.EnvVarMap{Map: envAtExecutionStart}, - GlobalEnv: globalEnv, - } - reqBuf := Marshal(&req) - resBuf := C.get_global_hashable_env_vars(reqBuf) - reqBuf.Free() - - resp := ffi_proto.GetGlobalHashableEnvVarsResponse{} - if err := Unmarshal(resBuf, resp.ProtoReflect().Interface()); err != nil { - panic(err) - } - - if err := resp.GetError(); err != "" { - return nil, errors.New(err) - } - - respDetailedMap := resp.GetDetailedMap() - if respDetailedMap == nil { - return nil, nil - } - - return respDetailedMap, nil -} diff --git a/cli/internal/ffi/ffi_test.go b/cli/internal/ffi/ffi_test.go deleted file mode 100644 index e3441d8ea61cd..0000000000000 --- a/cli/internal/ffi/ffi_test.go +++ /dev/null @@ -1,15 +0,0 @@ -package ffi - -import ( - "testing" - - "gotest.tools/v3/assert" -) - -// This test is here to verify that we correctly handle zero length buffers -// with null data pointers. -func Test_EmptyBuffer(t *testing.T) { - buffer := toBuffer(nil) - bytes := toBytes(buffer) - assert.DeepEqual(t, bytes, []byte{}) -} diff --git a/cli/internal/ffi/proto/messages.pb.go b/cli/internal/ffi/proto/messages.pb.go deleted file mode 100644 index 989c237f4f08c..0000000000000 --- a/cli/internal/ffi/proto/messages.pb.go +++ /dev/null @@ -1,3566 +0,0 @@ -// Code generated by protoc-gen-go. DO NOT EDIT. -// versions: -// protoc-gen-go v1.31.0 -// protoc v4.23.4 -// source: turborepo-ffi/messages.proto - -package proto - -import ( - protoreflect "google.golang.org/protobuf/reflect/protoreflect" - protoimpl "google.golang.org/protobuf/runtime/protoimpl" - reflect "reflect" - sync "sync" -) - -const ( - // Verify that this generated code is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) - // Verify that runtime/protoimpl is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) -) - -type PackageManager int32 - -const ( - PackageManager_NPM PackageManager = 0 - PackageManager_BERRY PackageManager = 1 - PackageManager_PNPM PackageManager = 2 - PackageManager_YARN PackageManager = 3 - PackageManager_BUN PackageManager = 4 -) - -// Enum value maps for PackageManager. -var ( - PackageManager_name = map[int32]string{ - 0: "NPM", - 1: "BERRY", - 2: "PNPM", - 3: "YARN", - 4: "BUN", - } - PackageManager_value = map[string]int32{ - "NPM": 0, - "BERRY": 1, - "PNPM": 2, - "YARN": 3, - "BUN": 4, - } -) - -func (x PackageManager) Enum() *PackageManager { - p := new(PackageManager) - *p = x - return p -} - -func (x PackageManager) String() string { - return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) -} - -func (PackageManager) Descriptor() protoreflect.EnumDescriptor { - return file_turborepo_ffi_messages_proto_enumTypes[0].Descriptor() -} - -func (PackageManager) Type() protoreflect.EnumType { - return &file_turborepo_ffi_messages_proto_enumTypes[0] -} - -func (x PackageManager) Number() protoreflect.EnumNumber { - return protoreflect.EnumNumber(x) -} - -// Deprecated: Use PackageManager.Descriptor instead. -func (PackageManager) EnumDescriptor() ([]byte, []int) { - return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{0} -} - -type TurboDataDirResp struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Dir string `protobuf:"bytes,1,opt,name=dir,proto3" json:"dir,omitempty"` -} - -func (x *TurboDataDirResp) Reset() { - *x = TurboDataDirResp{} - if protoimpl.UnsafeEnabled { - mi := &file_turborepo_ffi_messages_proto_msgTypes[0] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *TurboDataDirResp) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*TurboDataDirResp) ProtoMessage() {} - -func (x *TurboDataDirResp) ProtoReflect() protoreflect.Message { - mi := &file_turborepo_ffi_messages_proto_msgTypes[0] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use TurboDataDirResp.ProtoReflect.Descriptor instead. -func (*TurboDataDirResp) Descriptor() ([]byte, []int) { - return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{0} -} - -func (x *TurboDataDirResp) GetDir() string { - if x != nil { - return x.Dir - } - return "" -} - -type GlobReq struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - BasePath string `protobuf:"bytes,1,opt,name=base_path,json=basePath,proto3" json:"base_path,omitempty"` - IncludePatterns []string `protobuf:"bytes,2,rep,name=include_patterns,json=includePatterns,proto3" json:"include_patterns,omitempty"` - ExcludePatterns []string `protobuf:"bytes,3,rep,name=exclude_patterns,json=excludePatterns,proto3" json:"exclude_patterns,omitempty"` - FilesOnly bool `protobuf:"varint,4,opt,name=files_only,json=filesOnly,proto3" json:"files_only,omitempty"` // note that the default for a bool is false -} - -func (x *GlobReq) Reset() { - *x = GlobReq{} - if protoimpl.UnsafeEnabled { - mi := &file_turborepo_ffi_messages_proto_msgTypes[1] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *GlobReq) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*GlobReq) ProtoMessage() {} - -func (x *GlobReq) ProtoReflect() protoreflect.Message { - mi := &file_turborepo_ffi_messages_proto_msgTypes[1] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use GlobReq.ProtoReflect.Descriptor instead. -func (*GlobReq) Descriptor() ([]byte, []int) { - return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{1} -} - -func (x *GlobReq) GetBasePath() string { - if x != nil { - return x.BasePath - } - return "" -} - -func (x *GlobReq) GetIncludePatterns() []string { - if x != nil { - return x.IncludePatterns - } - return nil -} - -func (x *GlobReq) GetExcludePatterns() []string { - if x != nil { - return x.ExcludePatterns - } - return nil -} - -func (x *GlobReq) GetFilesOnly() bool { - if x != nil { - return x.FilesOnly - } - return false -} - -type GlobResp struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Types that are assignable to Response: - // - // *GlobResp_Files - // *GlobResp_Error - Response isGlobResp_Response `protobuf_oneof:"response"` -} - -func (x *GlobResp) Reset() { - *x = GlobResp{} - if protoimpl.UnsafeEnabled { - mi := &file_turborepo_ffi_messages_proto_msgTypes[2] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *GlobResp) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*GlobResp) ProtoMessage() {} - -func (x *GlobResp) ProtoReflect() protoreflect.Message { - mi := &file_turborepo_ffi_messages_proto_msgTypes[2] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use GlobResp.ProtoReflect.Descriptor instead. -func (*GlobResp) Descriptor() ([]byte, []int) { - return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{2} -} - -func (m *GlobResp) GetResponse() isGlobResp_Response { - if m != nil { - return m.Response - } - return nil -} - -func (x *GlobResp) GetFiles() *GlobRespList { - if x, ok := x.GetResponse().(*GlobResp_Files); ok { - return x.Files - } - return nil -} - -func (x *GlobResp) GetError() string { - if x, ok := x.GetResponse().(*GlobResp_Error); ok { - return x.Error - } - return "" -} - -type isGlobResp_Response interface { - isGlobResp_Response() -} - -type GlobResp_Files struct { - Files *GlobRespList `protobuf:"bytes,1,opt,name=files,proto3,oneof"` -} - -type GlobResp_Error struct { - Error string `protobuf:"bytes,2,opt,name=error,proto3,oneof"` -} - -func (*GlobResp_Files) isGlobResp_Response() {} - -func (*GlobResp_Error) isGlobResp_Response() {} - -type GlobRespList struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Files []string `protobuf:"bytes,1,rep,name=files,proto3" json:"files,omitempty"` -} - -func (x *GlobRespList) Reset() { - *x = GlobRespList{} - if protoimpl.UnsafeEnabled { - mi := &file_turborepo_ffi_messages_proto_msgTypes[3] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *GlobRespList) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*GlobRespList) ProtoMessage() {} - -func (x *GlobRespList) ProtoReflect() protoreflect.Message { - mi := &file_turborepo_ffi_messages_proto_msgTypes[3] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use GlobRespList.ProtoReflect.Descriptor instead. -func (*GlobRespList) Descriptor() ([]byte, []int) { - return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{3} -} - -func (x *GlobRespList) GetFiles() []string { - if x != nil { - return x.Files - } - return nil -} - -type ChangedFilesReq struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - GitRoot string `protobuf:"bytes,1,opt,name=git_root,json=gitRoot,proto3" json:"git_root,omitempty"` - TurboRoot string `protobuf:"bytes,2,opt,name=turbo_root,json=turboRoot,proto3" json:"turbo_root,omitempty"` - FromCommit *string `protobuf:"bytes,3,opt,name=from_commit,json=fromCommit,proto3,oneof" json:"from_commit,omitempty"` - ToCommit string `protobuf:"bytes,4,opt,name=to_commit,json=toCommit,proto3" json:"to_commit,omitempty"` -} - -func (x *ChangedFilesReq) Reset() { - *x = ChangedFilesReq{} - if protoimpl.UnsafeEnabled { - mi := &file_turborepo_ffi_messages_proto_msgTypes[4] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *ChangedFilesReq) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*ChangedFilesReq) ProtoMessage() {} - -func (x *ChangedFilesReq) ProtoReflect() protoreflect.Message { - mi := &file_turborepo_ffi_messages_proto_msgTypes[4] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use ChangedFilesReq.ProtoReflect.Descriptor instead. -func (*ChangedFilesReq) Descriptor() ([]byte, []int) { - return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{4} -} - -func (x *ChangedFilesReq) GetGitRoot() string { - if x != nil { - return x.GitRoot - } - return "" -} - -func (x *ChangedFilesReq) GetTurboRoot() string { - if x != nil { - return x.TurboRoot - } - return "" -} - -func (x *ChangedFilesReq) GetFromCommit() string { - if x != nil && x.FromCommit != nil { - return *x.FromCommit - } - return "" -} - -func (x *ChangedFilesReq) GetToCommit() string { - if x != nil { - return x.ToCommit - } - return "" -} - -type ChangedFilesResp struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Types that are assignable to Response: - // - // *ChangedFilesResp_Files - // *ChangedFilesResp_Error - Response isChangedFilesResp_Response `protobuf_oneof:"response"` -} - -func (x *ChangedFilesResp) Reset() { - *x = ChangedFilesResp{} - if protoimpl.UnsafeEnabled { - mi := &file_turborepo_ffi_messages_proto_msgTypes[5] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *ChangedFilesResp) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*ChangedFilesResp) ProtoMessage() {} - -func (x *ChangedFilesResp) ProtoReflect() protoreflect.Message { - mi := &file_turborepo_ffi_messages_proto_msgTypes[5] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use ChangedFilesResp.ProtoReflect.Descriptor instead. -func (*ChangedFilesResp) Descriptor() ([]byte, []int) { - return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{5} -} - -func (m *ChangedFilesResp) GetResponse() isChangedFilesResp_Response { - if m != nil { - return m.Response - } - return nil -} - -func (x *ChangedFilesResp) GetFiles() *ChangedFilesList { - if x, ok := x.GetResponse().(*ChangedFilesResp_Files); ok { - return x.Files - } - return nil -} - -func (x *ChangedFilesResp) GetError() string { - if x, ok := x.GetResponse().(*ChangedFilesResp_Error); ok { - return x.Error - } - return "" -} - -type isChangedFilesResp_Response interface { - isChangedFilesResp_Response() -} - -type ChangedFilesResp_Files struct { - Files *ChangedFilesList `protobuf:"bytes,1,opt,name=files,proto3,oneof"` -} - -type ChangedFilesResp_Error struct { - Error string `protobuf:"bytes,2,opt,name=error,proto3,oneof"` -} - -func (*ChangedFilesResp_Files) isChangedFilesResp_Response() {} - -func (*ChangedFilesResp_Error) isChangedFilesResp_Response() {} - -type ChangedFilesList struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Files []string `protobuf:"bytes,1,rep,name=files,proto3" json:"files,omitempty"` -} - -func (x *ChangedFilesList) Reset() { - *x = ChangedFilesList{} - if protoimpl.UnsafeEnabled { - mi := &file_turborepo_ffi_messages_proto_msgTypes[6] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *ChangedFilesList) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*ChangedFilesList) ProtoMessage() {} - -func (x *ChangedFilesList) ProtoReflect() protoreflect.Message { - mi := &file_turborepo_ffi_messages_proto_msgTypes[6] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use ChangedFilesList.ProtoReflect.Descriptor instead. -func (*ChangedFilesList) Descriptor() ([]byte, []int) { - return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{6} -} - -func (x *ChangedFilesList) GetFiles() []string { - if x != nil { - return x.Files - } - return nil -} - -type PreviousContentReq struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - GitRoot string `protobuf:"bytes,1,opt,name=git_root,json=gitRoot,proto3" json:"git_root,omitempty"` - FromCommit string `protobuf:"bytes,2,opt,name=from_commit,json=fromCommit,proto3" json:"from_commit,omitempty"` - FilePath string `protobuf:"bytes,3,opt,name=file_path,json=filePath,proto3" json:"file_path,omitempty"` -} - -func (x *PreviousContentReq) Reset() { - *x = PreviousContentReq{} - if protoimpl.UnsafeEnabled { - mi := &file_turborepo_ffi_messages_proto_msgTypes[7] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *PreviousContentReq) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*PreviousContentReq) ProtoMessage() {} - -func (x *PreviousContentReq) ProtoReflect() protoreflect.Message { - mi := &file_turborepo_ffi_messages_proto_msgTypes[7] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use PreviousContentReq.ProtoReflect.Descriptor instead. -func (*PreviousContentReq) Descriptor() ([]byte, []int) { - return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{7} -} - -func (x *PreviousContentReq) GetGitRoot() string { - if x != nil { - return x.GitRoot - } - return "" -} - -func (x *PreviousContentReq) GetFromCommit() string { - if x != nil { - return x.FromCommit - } - return "" -} - -func (x *PreviousContentReq) GetFilePath() string { - if x != nil { - return x.FilePath - } - return "" -} - -type PreviousContentResp struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Types that are assignable to Response: - // - // *PreviousContentResp_Content - // *PreviousContentResp_Error - Response isPreviousContentResp_Response `protobuf_oneof:"response"` -} - -func (x *PreviousContentResp) Reset() { - *x = PreviousContentResp{} - if protoimpl.UnsafeEnabled { - mi := &file_turborepo_ffi_messages_proto_msgTypes[8] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *PreviousContentResp) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*PreviousContentResp) ProtoMessage() {} - -func (x *PreviousContentResp) ProtoReflect() protoreflect.Message { - mi := &file_turborepo_ffi_messages_proto_msgTypes[8] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use PreviousContentResp.ProtoReflect.Descriptor instead. -func (*PreviousContentResp) Descriptor() ([]byte, []int) { - return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{8} -} - -func (m *PreviousContentResp) GetResponse() isPreviousContentResp_Response { - if m != nil { - return m.Response - } - return nil -} - -func (x *PreviousContentResp) GetContent() []byte { - if x, ok := x.GetResponse().(*PreviousContentResp_Content); ok { - return x.Content - } - return nil -} - -func (x *PreviousContentResp) GetError() string { - if x, ok := x.GetResponse().(*PreviousContentResp_Error); ok { - return x.Error - } - return "" -} - -type isPreviousContentResp_Response interface { - isPreviousContentResp_Response() -} - -type PreviousContentResp_Content struct { - Content []byte `protobuf:"bytes,1,opt,name=content,proto3,oneof"` -} - -type PreviousContentResp_Error struct { - Error string `protobuf:"bytes,2,opt,name=error,proto3,oneof"` -} - -func (*PreviousContentResp_Content) isPreviousContentResp_Response() {} - -func (*PreviousContentResp_Error) isPreviousContentResp_Response() {} - -type PackageDependency struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` - Range string `protobuf:"bytes,2,opt,name=range,proto3" json:"range,omitempty"` -} - -func (x *PackageDependency) Reset() { - *x = PackageDependency{} - if protoimpl.UnsafeEnabled { - mi := &file_turborepo_ffi_messages_proto_msgTypes[9] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *PackageDependency) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*PackageDependency) ProtoMessage() {} - -func (x *PackageDependency) ProtoReflect() protoreflect.Message { - mi := &file_turborepo_ffi_messages_proto_msgTypes[9] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use PackageDependency.ProtoReflect.Descriptor instead. -func (*PackageDependency) Descriptor() ([]byte, []int) { - return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{9} -} - -func (x *PackageDependency) GetName() string { - if x != nil { - return x.Name - } - return "" -} - -func (x *PackageDependency) GetRange() string { - if x != nil { - return x.Range - } - return "" -} - -type PackageDependencyList struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - List []*PackageDependency `protobuf:"bytes,1,rep,name=list,proto3" json:"list,omitempty"` -} - -func (x *PackageDependencyList) Reset() { - *x = PackageDependencyList{} - if protoimpl.UnsafeEnabled { - mi := &file_turborepo_ffi_messages_proto_msgTypes[10] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *PackageDependencyList) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*PackageDependencyList) ProtoMessage() {} - -func (x *PackageDependencyList) ProtoReflect() protoreflect.Message { - mi := &file_turborepo_ffi_messages_proto_msgTypes[10] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use PackageDependencyList.ProtoReflect.Descriptor instead. -func (*PackageDependencyList) Descriptor() ([]byte, []int) { - return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{10} -} - -func (x *PackageDependencyList) GetList() []*PackageDependency { - if x != nil { - return x.List - } - return nil -} - -type WorkspaceDependencies struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Dependencies map[string]*LockfilePackageList `protobuf:"bytes,1,rep,name=dependencies,proto3" json:"dependencies,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` -} - -func (x *WorkspaceDependencies) Reset() { - *x = WorkspaceDependencies{} - if protoimpl.UnsafeEnabled { - mi := &file_turborepo_ffi_messages_proto_msgTypes[11] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *WorkspaceDependencies) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*WorkspaceDependencies) ProtoMessage() {} - -func (x *WorkspaceDependencies) ProtoReflect() protoreflect.Message { - mi := &file_turborepo_ffi_messages_proto_msgTypes[11] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use WorkspaceDependencies.ProtoReflect.Descriptor instead. -func (*WorkspaceDependencies) Descriptor() ([]byte, []int) { - return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{11} -} - -func (x *WorkspaceDependencies) GetDependencies() map[string]*LockfilePackageList { - if x != nil { - return x.Dependencies - } - return nil -} - -type TransitiveDepsRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Contents []byte `protobuf:"bytes,1,opt,name=contents,proto3" json:"contents,omitempty"` - PackageManager PackageManager `protobuf:"varint,2,opt,name=package_manager,json=packageManager,proto3,enum=PackageManager" json:"package_manager,omitempty"` - Workspaces map[string]*PackageDependencyList `protobuf:"bytes,3,rep,name=workspaces,proto3" json:"workspaces,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` - Resolutions *AdditionalBerryData `protobuf:"bytes,4,opt,name=resolutions,proto3,oneof" json:"resolutions,omitempty"` -} - -func (x *TransitiveDepsRequest) Reset() { - *x = TransitiveDepsRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_turborepo_ffi_messages_proto_msgTypes[12] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *TransitiveDepsRequest) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*TransitiveDepsRequest) ProtoMessage() {} - -func (x *TransitiveDepsRequest) ProtoReflect() protoreflect.Message { - mi := &file_turborepo_ffi_messages_proto_msgTypes[12] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use TransitiveDepsRequest.ProtoReflect.Descriptor instead. -func (*TransitiveDepsRequest) Descriptor() ([]byte, []int) { - return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{12} -} - -func (x *TransitiveDepsRequest) GetContents() []byte { - if x != nil { - return x.Contents - } - return nil -} - -func (x *TransitiveDepsRequest) GetPackageManager() PackageManager { - if x != nil { - return x.PackageManager - } - return PackageManager_NPM -} - -func (x *TransitiveDepsRequest) GetWorkspaces() map[string]*PackageDependencyList { - if x != nil { - return x.Workspaces - } - return nil -} - -func (x *TransitiveDepsRequest) GetResolutions() *AdditionalBerryData { - if x != nil { - return x.Resolutions - } - return nil -} - -type TransitiveDepsResponse struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Types that are assignable to Response: - // - // *TransitiveDepsResponse_Dependencies - // *TransitiveDepsResponse_Error - Response isTransitiveDepsResponse_Response `protobuf_oneof:"response"` -} - -func (x *TransitiveDepsResponse) Reset() { - *x = TransitiveDepsResponse{} - if protoimpl.UnsafeEnabled { - mi := &file_turborepo_ffi_messages_proto_msgTypes[13] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *TransitiveDepsResponse) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*TransitiveDepsResponse) ProtoMessage() {} - -func (x *TransitiveDepsResponse) ProtoReflect() protoreflect.Message { - mi := &file_turborepo_ffi_messages_proto_msgTypes[13] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use TransitiveDepsResponse.ProtoReflect.Descriptor instead. -func (*TransitiveDepsResponse) Descriptor() ([]byte, []int) { - return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{13} -} - -func (m *TransitiveDepsResponse) GetResponse() isTransitiveDepsResponse_Response { - if m != nil { - return m.Response - } - return nil -} - -func (x *TransitiveDepsResponse) GetDependencies() *WorkspaceDependencies { - if x, ok := x.GetResponse().(*TransitiveDepsResponse_Dependencies); ok { - return x.Dependencies - } - return nil -} - -func (x *TransitiveDepsResponse) GetError() string { - if x, ok := x.GetResponse().(*TransitiveDepsResponse_Error); ok { - return x.Error - } - return "" -} - -type isTransitiveDepsResponse_Response interface { - isTransitiveDepsResponse_Response() -} - -type TransitiveDepsResponse_Dependencies struct { - Dependencies *WorkspaceDependencies `protobuf:"bytes,1,opt,name=dependencies,proto3,oneof"` -} - -type TransitiveDepsResponse_Error struct { - Error string `protobuf:"bytes,2,opt,name=error,proto3,oneof"` -} - -func (*TransitiveDepsResponse_Dependencies) isTransitiveDepsResponse_Response() {} - -func (*TransitiveDepsResponse_Error) isTransitiveDepsResponse_Response() {} - -type AdditionalBerryData struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Resolutions map[string]string `protobuf:"bytes,1,rep,name=resolutions,proto3" json:"resolutions,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` -} - -func (x *AdditionalBerryData) Reset() { - *x = AdditionalBerryData{} - if protoimpl.UnsafeEnabled { - mi := &file_turborepo_ffi_messages_proto_msgTypes[14] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *AdditionalBerryData) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*AdditionalBerryData) ProtoMessage() {} - -func (x *AdditionalBerryData) ProtoReflect() protoreflect.Message { - mi := &file_turborepo_ffi_messages_proto_msgTypes[14] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use AdditionalBerryData.ProtoReflect.Descriptor instead. -func (*AdditionalBerryData) Descriptor() ([]byte, []int) { - return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{14} -} - -func (x *AdditionalBerryData) GetResolutions() map[string]string { - if x != nil { - return x.Resolutions - } - return nil -} - -type LockfilePackage struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Key string `protobuf:"bytes,1,opt,name=key,proto3" json:"key,omitempty"` - Version string `protobuf:"bytes,2,opt,name=version,proto3" json:"version,omitempty"` - Found bool `protobuf:"varint,3,opt,name=found,proto3" json:"found,omitempty"` -} - -func (x *LockfilePackage) Reset() { - *x = LockfilePackage{} - if protoimpl.UnsafeEnabled { - mi := &file_turborepo_ffi_messages_proto_msgTypes[15] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *LockfilePackage) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*LockfilePackage) ProtoMessage() {} - -func (x *LockfilePackage) ProtoReflect() protoreflect.Message { - mi := &file_turborepo_ffi_messages_proto_msgTypes[15] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use LockfilePackage.ProtoReflect.Descriptor instead. -func (*LockfilePackage) Descriptor() ([]byte, []int) { - return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{15} -} - -func (x *LockfilePackage) GetKey() string { - if x != nil { - return x.Key - } - return "" -} - -func (x *LockfilePackage) GetVersion() string { - if x != nil { - return x.Version - } - return "" -} - -func (x *LockfilePackage) GetFound() bool { - if x != nil { - return x.Found - } - return false -} - -type LockfilePackageList struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - List []*LockfilePackage `protobuf:"bytes,1,rep,name=list,proto3" json:"list,omitempty"` -} - -func (x *LockfilePackageList) Reset() { - *x = LockfilePackageList{} - if protoimpl.UnsafeEnabled { - mi := &file_turborepo_ffi_messages_proto_msgTypes[16] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *LockfilePackageList) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*LockfilePackageList) ProtoMessage() {} - -func (x *LockfilePackageList) ProtoReflect() protoreflect.Message { - mi := &file_turborepo_ffi_messages_proto_msgTypes[16] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use LockfilePackageList.ProtoReflect.Descriptor instead. -func (*LockfilePackageList) Descriptor() ([]byte, []int) { - return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{16} -} - -func (x *LockfilePackageList) GetList() []*LockfilePackage { - if x != nil { - return x.List - } - return nil -} - -type SubgraphRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Contents []byte `protobuf:"bytes,1,opt,name=contents,proto3" json:"contents,omitempty"` - PackageManager PackageManager `protobuf:"varint,2,opt,name=package_manager,json=packageManager,proto3,enum=PackageManager" json:"package_manager,omitempty"` - Workspaces []string `protobuf:"bytes,3,rep,name=workspaces,proto3" json:"workspaces,omitempty"` - Packages []string `protobuf:"bytes,4,rep,name=packages,proto3" json:"packages,omitempty"` - Resolutions *AdditionalBerryData `protobuf:"bytes,5,opt,name=resolutions,proto3,oneof" json:"resolutions,omitempty"` -} - -func (x *SubgraphRequest) Reset() { - *x = SubgraphRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_turborepo_ffi_messages_proto_msgTypes[17] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *SubgraphRequest) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*SubgraphRequest) ProtoMessage() {} - -func (x *SubgraphRequest) ProtoReflect() protoreflect.Message { - mi := &file_turborepo_ffi_messages_proto_msgTypes[17] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use SubgraphRequest.ProtoReflect.Descriptor instead. -func (*SubgraphRequest) Descriptor() ([]byte, []int) { - return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{17} -} - -func (x *SubgraphRequest) GetContents() []byte { - if x != nil { - return x.Contents - } - return nil -} - -func (x *SubgraphRequest) GetPackageManager() PackageManager { - if x != nil { - return x.PackageManager - } - return PackageManager_NPM -} - -func (x *SubgraphRequest) GetWorkspaces() []string { - if x != nil { - return x.Workspaces - } - return nil -} - -func (x *SubgraphRequest) GetPackages() []string { - if x != nil { - return x.Packages - } - return nil -} - -func (x *SubgraphRequest) GetResolutions() *AdditionalBerryData { - if x != nil { - return x.Resolutions - } - return nil -} - -type SubgraphResponse struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Types that are assignable to Response: - // - // *SubgraphResponse_Contents - // *SubgraphResponse_Error - Response isSubgraphResponse_Response `protobuf_oneof:"response"` -} - -func (x *SubgraphResponse) Reset() { - *x = SubgraphResponse{} - if protoimpl.UnsafeEnabled { - mi := &file_turborepo_ffi_messages_proto_msgTypes[18] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *SubgraphResponse) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*SubgraphResponse) ProtoMessage() {} - -func (x *SubgraphResponse) ProtoReflect() protoreflect.Message { - mi := &file_turborepo_ffi_messages_proto_msgTypes[18] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use SubgraphResponse.ProtoReflect.Descriptor instead. -func (*SubgraphResponse) Descriptor() ([]byte, []int) { - return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{18} -} - -func (m *SubgraphResponse) GetResponse() isSubgraphResponse_Response { - if m != nil { - return m.Response - } - return nil -} - -func (x *SubgraphResponse) GetContents() []byte { - if x, ok := x.GetResponse().(*SubgraphResponse_Contents); ok { - return x.Contents - } - return nil -} - -func (x *SubgraphResponse) GetError() string { - if x, ok := x.GetResponse().(*SubgraphResponse_Error); ok { - return x.Error - } - return "" -} - -type isSubgraphResponse_Response interface { - isSubgraphResponse_Response() -} - -type SubgraphResponse_Contents struct { - Contents []byte `protobuf:"bytes,1,opt,name=contents,proto3,oneof"` -} - -type SubgraphResponse_Error struct { - Error string `protobuf:"bytes,2,opt,name=error,proto3,oneof"` -} - -func (*SubgraphResponse_Contents) isSubgraphResponse_Response() {} - -func (*SubgraphResponse_Error) isSubgraphResponse_Response() {} - -type PatchesRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Contents []byte `protobuf:"bytes,1,opt,name=contents,proto3" json:"contents,omitempty"` - PackageManager PackageManager `protobuf:"varint,2,opt,name=package_manager,json=packageManager,proto3,enum=PackageManager" json:"package_manager,omitempty"` -} - -func (x *PatchesRequest) Reset() { - *x = PatchesRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_turborepo_ffi_messages_proto_msgTypes[19] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *PatchesRequest) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*PatchesRequest) ProtoMessage() {} - -func (x *PatchesRequest) ProtoReflect() protoreflect.Message { - mi := &file_turborepo_ffi_messages_proto_msgTypes[19] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use PatchesRequest.ProtoReflect.Descriptor instead. -func (*PatchesRequest) Descriptor() ([]byte, []int) { - return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{19} -} - -func (x *PatchesRequest) GetContents() []byte { - if x != nil { - return x.Contents - } - return nil -} - -func (x *PatchesRequest) GetPackageManager() PackageManager { - if x != nil { - return x.PackageManager - } - return PackageManager_NPM -} - -type PatchesResponse struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Types that are assignable to Response: - // - // *PatchesResponse_Patches - // *PatchesResponse_Error - Response isPatchesResponse_Response `protobuf_oneof:"response"` -} - -func (x *PatchesResponse) Reset() { - *x = PatchesResponse{} - if protoimpl.UnsafeEnabled { - mi := &file_turborepo_ffi_messages_proto_msgTypes[20] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *PatchesResponse) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*PatchesResponse) ProtoMessage() {} - -func (x *PatchesResponse) ProtoReflect() protoreflect.Message { - mi := &file_turborepo_ffi_messages_proto_msgTypes[20] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use PatchesResponse.ProtoReflect.Descriptor instead. -func (*PatchesResponse) Descriptor() ([]byte, []int) { - return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{20} -} - -func (m *PatchesResponse) GetResponse() isPatchesResponse_Response { - if m != nil { - return m.Response - } - return nil -} - -func (x *PatchesResponse) GetPatches() *Patches { - if x, ok := x.GetResponse().(*PatchesResponse_Patches); ok { - return x.Patches - } - return nil -} - -func (x *PatchesResponse) GetError() string { - if x, ok := x.GetResponse().(*PatchesResponse_Error); ok { - return x.Error - } - return "" -} - -type isPatchesResponse_Response interface { - isPatchesResponse_Response() -} - -type PatchesResponse_Patches struct { - Patches *Patches `protobuf:"bytes,1,opt,name=patches,proto3,oneof"` -} - -type PatchesResponse_Error struct { - Error string `protobuf:"bytes,2,opt,name=error,proto3,oneof"` -} - -func (*PatchesResponse_Patches) isPatchesResponse_Response() {} - -func (*PatchesResponse_Error) isPatchesResponse_Response() {} - -type Patches struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Patches []string `protobuf:"bytes,1,rep,name=patches,proto3" json:"patches,omitempty"` -} - -func (x *Patches) Reset() { - *x = Patches{} - if protoimpl.UnsafeEnabled { - mi := &file_turborepo_ffi_messages_proto_msgTypes[21] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *Patches) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*Patches) ProtoMessage() {} - -func (x *Patches) ProtoReflect() protoreflect.Message { - mi := &file_turborepo_ffi_messages_proto_msgTypes[21] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use Patches.ProtoReflect.Descriptor instead. -func (*Patches) Descriptor() ([]byte, []int) { - return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{21} -} - -func (x *Patches) GetPatches() []string { - if x != nil { - return x.Patches - } - return nil -} - -type GlobalChangeRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - PackageManager PackageManager `protobuf:"varint,1,opt,name=package_manager,json=packageManager,proto3,enum=PackageManager" json:"package_manager,omitempty"` - PrevContents []byte `protobuf:"bytes,2,opt,name=prev_contents,json=prevContents,proto3" json:"prev_contents,omitempty"` - CurrContents []byte `protobuf:"bytes,3,opt,name=curr_contents,json=currContents,proto3" json:"curr_contents,omitempty"` -} - -func (x *GlobalChangeRequest) Reset() { - *x = GlobalChangeRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_turborepo_ffi_messages_proto_msgTypes[22] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *GlobalChangeRequest) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*GlobalChangeRequest) ProtoMessage() {} - -func (x *GlobalChangeRequest) ProtoReflect() protoreflect.Message { - mi := &file_turborepo_ffi_messages_proto_msgTypes[22] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use GlobalChangeRequest.ProtoReflect.Descriptor instead. -func (*GlobalChangeRequest) Descriptor() ([]byte, []int) { - return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{22} -} - -func (x *GlobalChangeRequest) GetPackageManager() PackageManager { - if x != nil { - return x.PackageManager - } - return PackageManager_NPM -} - -func (x *GlobalChangeRequest) GetPrevContents() []byte { - if x != nil { - return x.PrevContents - } - return nil -} - -func (x *GlobalChangeRequest) GetCurrContents() []byte { - if x != nil { - return x.CurrContents - } - return nil -} - -type GlobalChangeResponse struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - GlobalChange bool `protobuf:"varint,1,opt,name=global_change,json=globalChange,proto3" json:"global_change,omitempty"` -} - -func (x *GlobalChangeResponse) Reset() { - *x = GlobalChangeResponse{} - if protoimpl.UnsafeEnabled { - mi := &file_turborepo_ffi_messages_proto_msgTypes[23] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *GlobalChangeResponse) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*GlobalChangeResponse) ProtoMessage() {} - -func (x *GlobalChangeResponse) ProtoReflect() protoreflect.Message { - mi := &file_turborepo_ffi_messages_proto_msgTypes[23] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use GlobalChangeResponse.ProtoReflect.Descriptor instead. -func (*GlobalChangeResponse) Descriptor() ([]byte, []int) { - return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{23} -} - -func (x *GlobalChangeResponse) GetGlobalChange() bool { - if x != nil { - return x.GlobalChange - } - return false -} - -type RecursiveCopyRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Src string `protobuf:"bytes,1,opt,name=src,proto3" json:"src,omitempty"` - Dst string `protobuf:"bytes,2,opt,name=dst,proto3" json:"dst,omitempty"` -} - -func (x *RecursiveCopyRequest) Reset() { - *x = RecursiveCopyRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_turborepo_ffi_messages_proto_msgTypes[24] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *RecursiveCopyRequest) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*RecursiveCopyRequest) ProtoMessage() {} - -func (x *RecursiveCopyRequest) ProtoReflect() protoreflect.Message { - mi := &file_turborepo_ffi_messages_proto_msgTypes[24] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use RecursiveCopyRequest.ProtoReflect.Descriptor instead. -func (*RecursiveCopyRequest) Descriptor() ([]byte, []int) { - return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{24} -} - -func (x *RecursiveCopyRequest) GetSrc() string { - if x != nil { - return x.Src - } - return "" -} - -func (x *RecursiveCopyRequest) GetDst() string { - if x != nil { - return x.Dst - } - return "" -} - -type RecursiveCopyResponse struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Error *string `protobuf:"bytes,1,opt,name=error,proto3,oneof" json:"error,omitempty"` -} - -func (x *RecursiveCopyResponse) Reset() { - *x = RecursiveCopyResponse{} - if protoimpl.UnsafeEnabled { - mi := &file_turborepo_ffi_messages_proto_msgTypes[25] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *RecursiveCopyResponse) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*RecursiveCopyResponse) ProtoMessage() {} - -func (x *RecursiveCopyResponse) ProtoReflect() protoreflect.Message { - mi := &file_turborepo_ffi_messages_proto_msgTypes[25] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use RecursiveCopyResponse.ProtoReflect.Descriptor instead. -func (*RecursiveCopyResponse) Descriptor() ([]byte, []int) { - return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{25} -} - -func (x *RecursiveCopyResponse) GetError() string { - if x != nil && x.Error != nil { - return *x.Error - } - return "" -} - -type VerifySignatureRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Hash string `protobuf:"bytes,1,opt,name=hash,proto3" json:"hash,omitempty"` - ArtifactBody []byte `protobuf:"bytes,2,opt,name=artifact_body,json=artifactBody,proto3" json:"artifact_body,omitempty"` - TeamId []byte `protobuf:"bytes,3,opt,name=team_id,json=teamId,proto3" json:"team_id,omitempty"` - ExpectedTag string `protobuf:"bytes,4,opt,name=expected_tag,json=expectedTag,proto3" json:"expected_tag,omitempty"` - SecretKeyOverride []byte `protobuf:"bytes,5,opt,name=secret_key_override,json=secretKeyOverride,proto3,oneof" json:"secret_key_override,omitempty"` -} - -func (x *VerifySignatureRequest) Reset() { - *x = VerifySignatureRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_turborepo_ffi_messages_proto_msgTypes[26] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *VerifySignatureRequest) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*VerifySignatureRequest) ProtoMessage() {} - -func (x *VerifySignatureRequest) ProtoReflect() protoreflect.Message { - mi := &file_turborepo_ffi_messages_proto_msgTypes[26] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use VerifySignatureRequest.ProtoReflect.Descriptor instead. -func (*VerifySignatureRequest) Descriptor() ([]byte, []int) { - return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{26} -} - -func (x *VerifySignatureRequest) GetHash() string { - if x != nil { - return x.Hash - } - return "" -} - -func (x *VerifySignatureRequest) GetArtifactBody() []byte { - if x != nil { - return x.ArtifactBody - } - return nil -} - -func (x *VerifySignatureRequest) GetTeamId() []byte { - if x != nil { - return x.TeamId - } - return nil -} - -func (x *VerifySignatureRequest) GetExpectedTag() string { - if x != nil { - return x.ExpectedTag - } - return "" -} - -func (x *VerifySignatureRequest) GetSecretKeyOverride() []byte { - if x != nil { - return x.SecretKeyOverride - } - return nil -} - -type VerifySignatureResponse struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Types that are assignable to Response: - // - // *VerifySignatureResponse_Verified - // *VerifySignatureResponse_Error - Response isVerifySignatureResponse_Response `protobuf_oneof:"response"` -} - -func (x *VerifySignatureResponse) Reset() { - *x = VerifySignatureResponse{} - if protoimpl.UnsafeEnabled { - mi := &file_turborepo_ffi_messages_proto_msgTypes[27] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *VerifySignatureResponse) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*VerifySignatureResponse) ProtoMessage() {} - -func (x *VerifySignatureResponse) ProtoReflect() protoreflect.Message { - mi := &file_turborepo_ffi_messages_proto_msgTypes[27] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use VerifySignatureResponse.ProtoReflect.Descriptor instead. -func (*VerifySignatureResponse) Descriptor() ([]byte, []int) { - return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{27} -} - -func (m *VerifySignatureResponse) GetResponse() isVerifySignatureResponse_Response { - if m != nil { - return m.Response - } - return nil -} - -func (x *VerifySignatureResponse) GetVerified() bool { - if x, ok := x.GetResponse().(*VerifySignatureResponse_Verified); ok { - return x.Verified - } - return false -} - -func (x *VerifySignatureResponse) GetError() string { - if x, ok := x.GetResponse().(*VerifySignatureResponse_Error); ok { - return x.Error - } - return "" -} - -type isVerifySignatureResponse_Response interface { - isVerifySignatureResponse_Response() -} - -type VerifySignatureResponse_Verified struct { - Verified bool `protobuf:"varint,1,opt,name=verified,proto3,oneof"` -} - -type VerifySignatureResponse_Error struct { - Error string `protobuf:"bytes,2,opt,name=error,proto3,oneof"` -} - -func (*VerifySignatureResponse_Verified) isVerifySignatureResponse_Response() {} - -func (*VerifySignatureResponse_Error) isVerifySignatureResponse_Response() {} - -type GetPackageFileHashesRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - TurboRoot string `protobuf:"bytes,1,opt,name=turbo_root,json=turboRoot,proto3" json:"turbo_root,omitempty"` - PackagePath string `protobuf:"bytes,2,opt,name=package_path,json=packagePath,proto3" json:"package_path,omitempty"` - Inputs []string `protobuf:"bytes,3,rep,name=inputs,proto3" json:"inputs,omitempty"` -} - -func (x *GetPackageFileHashesRequest) Reset() { - *x = GetPackageFileHashesRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_turborepo_ffi_messages_proto_msgTypes[28] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *GetPackageFileHashesRequest) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*GetPackageFileHashesRequest) ProtoMessage() {} - -func (x *GetPackageFileHashesRequest) ProtoReflect() protoreflect.Message { - mi := &file_turborepo_ffi_messages_proto_msgTypes[28] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use GetPackageFileHashesRequest.ProtoReflect.Descriptor instead. -func (*GetPackageFileHashesRequest) Descriptor() ([]byte, []int) { - return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{28} -} - -func (x *GetPackageFileHashesRequest) GetTurboRoot() string { - if x != nil { - return x.TurboRoot - } - return "" -} - -func (x *GetPackageFileHashesRequest) GetPackagePath() string { - if x != nil { - return x.PackagePath - } - return "" -} - -func (x *GetPackageFileHashesRequest) GetInputs() []string { - if x != nil { - return x.Inputs - } - return nil -} - -type GetPackageFileHashesResponse struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Types that are assignable to Response: - // - // *GetPackageFileHashesResponse_Hashes - // *GetPackageFileHashesResponse_Error - Response isGetPackageFileHashesResponse_Response `protobuf_oneof:"response"` -} - -func (x *GetPackageFileHashesResponse) Reset() { - *x = GetPackageFileHashesResponse{} - if protoimpl.UnsafeEnabled { - mi := &file_turborepo_ffi_messages_proto_msgTypes[29] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *GetPackageFileHashesResponse) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*GetPackageFileHashesResponse) ProtoMessage() {} - -func (x *GetPackageFileHashesResponse) ProtoReflect() protoreflect.Message { - mi := &file_turborepo_ffi_messages_proto_msgTypes[29] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use GetPackageFileHashesResponse.ProtoReflect.Descriptor instead. -func (*GetPackageFileHashesResponse) Descriptor() ([]byte, []int) { - return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{29} -} - -func (m *GetPackageFileHashesResponse) GetResponse() isGetPackageFileHashesResponse_Response { - if m != nil { - return m.Response - } - return nil -} - -func (x *GetPackageFileHashesResponse) GetHashes() *FileHashes { - if x, ok := x.GetResponse().(*GetPackageFileHashesResponse_Hashes); ok { - return x.Hashes - } - return nil -} - -func (x *GetPackageFileHashesResponse) GetError() string { - if x, ok := x.GetResponse().(*GetPackageFileHashesResponse_Error); ok { - return x.Error - } - return "" -} - -type isGetPackageFileHashesResponse_Response interface { - isGetPackageFileHashesResponse_Response() -} - -type GetPackageFileHashesResponse_Hashes struct { - Hashes *FileHashes `protobuf:"bytes,1,opt,name=hashes,proto3,oneof"` -} - -type GetPackageFileHashesResponse_Error struct { - Error string `protobuf:"bytes,2,opt,name=error,proto3,oneof"` -} - -func (*GetPackageFileHashesResponse_Hashes) isGetPackageFileHashesResponse_Response() {} - -func (*GetPackageFileHashesResponse_Error) isGetPackageFileHashesResponse_Response() {} - -type GetHashesForFilesRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - TurboRoot string `protobuf:"bytes,1,opt,name=turbo_root,json=turboRoot,proto3" json:"turbo_root,omitempty"` - Files []string `protobuf:"bytes,2,rep,name=files,proto3" json:"files,omitempty"` - AllowMissing bool `protobuf:"varint,3,opt,name=allow_missing,json=allowMissing,proto3" json:"allow_missing,omitempty"` -} - -func (x *GetHashesForFilesRequest) Reset() { - *x = GetHashesForFilesRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_turborepo_ffi_messages_proto_msgTypes[30] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *GetHashesForFilesRequest) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*GetHashesForFilesRequest) ProtoMessage() {} - -func (x *GetHashesForFilesRequest) ProtoReflect() protoreflect.Message { - mi := &file_turborepo_ffi_messages_proto_msgTypes[30] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use GetHashesForFilesRequest.ProtoReflect.Descriptor instead. -func (*GetHashesForFilesRequest) Descriptor() ([]byte, []int) { - return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{30} -} - -func (x *GetHashesForFilesRequest) GetTurboRoot() string { - if x != nil { - return x.TurboRoot - } - return "" -} - -func (x *GetHashesForFilesRequest) GetFiles() []string { - if x != nil { - return x.Files - } - return nil -} - -func (x *GetHashesForFilesRequest) GetAllowMissing() bool { - if x != nil { - return x.AllowMissing - } - return false -} - -type GetHashesForFilesResponse struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Types that are assignable to Response: - // - // *GetHashesForFilesResponse_Hashes - // *GetHashesForFilesResponse_Error - Response isGetHashesForFilesResponse_Response `protobuf_oneof:"response"` -} - -func (x *GetHashesForFilesResponse) Reset() { - *x = GetHashesForFilesResponse{} - if protoimpl.UnsafeEnabled { - mi := &file_turborepo_ffi_messages_proto_msgTypes[31] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *GetHashesForFilesResponse) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*GetHashesForFilesResponse) ProtoMessage() {} - -func (x *GetHashesForFilesResponse) ProtoReflect() protoreflect.Message { - mi := &file_turborepo_ffi_messages_proto_msgTypes[31] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use GetHashesForFilesResponse.ProtoReflect.Descriptor instead. -func (*GetHashesForFilesResponse) Descriptor() ([]byte, []int) { - return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{31} -} - -func (m *GetHashesForFilesResponse) GetResponse() isGetHashesForFilesResponse_Response { - if m != nil { - return m.Response - } - return nil -} - -func (x *GetHashesForFilesResponse) GetHashes() *FileHashes { - if x, ok := x.GetResponse().(*GetHashesForFilesResponse_Hashes); ok { - return x.Hashes - } - return nil -} - -func (x *GetHashesForFilesResponse) GetError() string { - if x, ok := x.GetResponse().(*GetHashesForFilesResponse_Error); ok { - return x.Error - } - return "" -} - -type isGetHashesForFilesResponse_Response interface { - isGetHashesForFilesResponse_Response() -} - -type GetHashesForFilesResponse_Hashes struct { - Hashes *FileHashes `protobuf:"bytes,1,opt,name=hashes,proto3,oneof"` -} - -type GetHashesForFilesResponse_Error struct { - Error string `protobuf:"bytes,2,opt,name=error,proto3,oneof"` -} - -func (*GetHashesForFilesResponse_Hashes) isGetHashesForFilesResponse_Response() {} - -func (*GetHashesForFilesResponse_Error) isGetHashesForFilesResponse_Response() {} - -type FileHashes struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Hashes map[string]string `protobuf:"bytes,1,rep,name=hashes,proto3" json:"hashes,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` -} - -func (x *FileHashes) Reset() { - *x = FileHashes{} - if protoimpl.UnsafeEnabled { - mi := &file_turborepo_ffi_messages_proto_msgTypes[32] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *FileHashes) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*FileHashes) ProtoMessage() {} - -func (x *FileHashes) ProtoReflect() protoreflect.Message { - mi := &file_turborepo_ffi_messages_proto_msgTypes[32] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use FileHashes.ProtoReflect.Descriptor instead. -func (*FileHashes) Descriptor() ([]byte, []int) { - return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{32} -} - -func (x *FileHashes) GetHashes() map[string]string { - if x != nil { - return x.Hashes - } - return nil -} - -type FromWildcardsRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - EnvVars *EnvVarMap `protobuf:"bytes,1,opt,name=env_vars,json=envVars,proto3" json:"env_vars,omitempty"` - WildcardPatterns []string `protobuf:"bytes,2,rep,name=wildcard_patterns,json=wildcardPatterns,proto3" json:"wildcard_patterns,omitempty"` -} - -func (x *FromWildcardsRequest) Reset() { - *x = FromWildcardsRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_turborepo_ffi_messages_proto_msgTypes[33] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *FromWildcardsRequest) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*FromWildcardsRequest) ProtoMessage() {} - -func (x *FromWildcardsRequest) ProtoReflect() protoreflect.Message { - mi := &file_turborepo_ffi_messages_proto_msgTypes[33] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use FromWildcardsRequest.ProtoReflect.Descriptor instead. -func (*FromWildcardsRequest) Descriptor() ([]byte, []int) { - return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{33} -} - -func (x *FromWildcardsRequest) GetEnvVars() *EnvVarMap { - if x != nil { - return x.EnvVars - } - return nil -} - -func (x *FromWildcardsRequest) GetWildcardPatterns() []string { - if x != nil { - return x.WildcardPatterns - } - return nil -} - -type FromWildcardsResponse struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Types that are assignable to Response: - // - // *FromWildcardsResponse_EnvVars - // *FromWildcardsResponse_Error - Response isFromWildcardsResponse_Response `protobuf_oneof:"response"` -} - -func (x *FromWildcardsResponse) Reset() { - *x = FromWildcardsResponse{} - if protoimpl.UnsafeEnabled { - mi := &file_turborepo_ffi_messages_proto_msgTypes[34] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *FromWildcardsResponse) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*FromWildcardsResponse) ProtoMessage() {} - -func (x *FromWildcardsResponse) ProtoReflect() protoreflect.Message { - mi := &file_turborepo_ffi_messages_proto_msgTypes[34] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use FromWildcardsResponse.ProtoReflect.Descriptor instead. -func (*FromWildcardsResponse) Descriptor() ([]byte, []int) { - return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{34} -} - -func (m *FromWildcardsResponse) GetResponse() isFromWildcardsResponse_Response { - if m != nil { - return m.Response - } - return nil -} - -func (x *FromWildcardsResponse) GetEnvVars() *EnvVarMap { - if x, ok := x.GetResponse().(*FromWildcardsResponse_EnvVars); ok { - return x.EnvVars - } - return nil -} - -func (x *FromWildcardsResponse) GetError() string { - if x, ok := x.GetResponse().(*FromWildcardsResponse_Error); ok { - return x.Error - } - return "" -} - -type isFromWildcardsResponse_Response interface { - isFromWildcardsResponse_Response() -} - -type FromWildcardsResponse_EnvVars struct { - EnvVars *EnvVarMap `protobuf:"bytes,1,opt,name=env_vars,json=envVars,proto3,oneof"` -} - -type FromWildcardsResponse_Error struct { - Error string `protobuf:"bytes,2,opt,name=error,proto3,oneof"` -} - -func (*FromWildcardsResponse_EnvVars) isFromWildcardsResponse_Response() {} - -func (*FromWildcardsResponse_Error) isFromWildcardsResponse_Response() {} - -type EnvVarMap struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Map map[string]string `protobuf:"bytes,1,rep,name=map,proto3" json:"map,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` -} - -func (x *EnvVarMap) Reset() { - *x = EnvVarMap{} - if protoimpl.UnsafeEnabled { - mi := &file_turborepo_ffi_messages_proto_msgTypes[35] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *EnvVarMap) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*EnvVarMap) ProtoMessage() {} - -func (x *EnvVarMap) ProtoReflect() protoreflect.Message { - mi := &file_turborepo_ffi_messages_proto_msgTypes[35] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use EnvVarMap.ProtoReflect.Descriptor instead. -func (*EnvVarMap) Descriptor() ([]byte, []int) { - return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{35} -} - -func (x *EnvVarMap) GetMap() map[string]string { - if x != nil { - return x.Map - } - return nil -} - -type DetailedMap struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - All map[string]string `protobuf:"bytes,1,rep,name=all,proto3" json:"all,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` - BySource *BySource `protobuf:"bytes,2,opt,name=by_source,json=bySource,proto3" json:"by_source,omitempty"` -} - -func (x *DetailedMap) Reset() { - *x = DetailedMap{} - if protoimpl.UnsafeEnabled { - mi := &file_turborepo_ffi_messages_proto_msgTypes[36] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *DetailedMap) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*DetailedMap) ProtoMessage() {} - -func (x *DetailedMap) ProtoReflect() protoreflect.Message { - mi := &file_turborepo_ffi_messages_proto_msgTypes[36] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use DetailedMap.ProtoReflect.Descriptor instead. -func (*DetailedMap) Descriptor() ([]byte, []int) { - return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{36} -} - -func (x *DetailedMap) GetAll() map[string]string { - if x != nil { - return x.All - } - return nil -} - -func (x *DetailedMap) GetBySource() *BySource { - if x != nil { - return x.BySource - } - return nil -} - -type BySource struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Explicit map[string]string `protobuf:"bytes,1,rep,name=explicit,proto3" json:"explicit,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` - Matching map[string]string `protobuf:"bytes,2,rep,name=matching,proto3" json:"matching,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` -} - -func (x *BySource) Reset() { - *x = BySource{} - if protoimpl.UnsafeEnabled { - mi := &file_turborepo_ffi_messages_proto_msgTypes[37] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *BySource) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*BySource) ProtoMessage() {} - -func (x *BySource) ProtoReflect() protoreflect.Message { - mi := &file_turborepo_ffi_messages_proto_msgTypes[37] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use BySource.ProtoReflect.Descriptor instead. -func (*BySource) Descriptor() ([]byte, []int) { - return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{37} -} - -func (x *BySource) GetExplicit() map[string]string { - if x != nil { - return x.Explicit - } - return nil -} - -func (x *BySource) GetMatching() map[string]string { - if x != nil { - return x.Matching - } - return nil -} - -type GetGlobalHashableEnvVarsRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - EnvAtExecutionStart *EnvVarMap `protobuf:"bytes,1,opt,name=env_at_execution_start,json=envAtExecutionStart,proto3" json:"env_at_execution_start,omitempty"` - GlobalEnv []string `protobuf:"bytes,2,rep,name=global_env,json=globalEnv,proto3" json:"global_env,omitempty"` -} - -func (x *GetGlobalHashableEnvVarsRequest) Reset() { - *x = GetGlobalHashableEnvVarsRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_turborepo_ffi_messages_proto_msgTypes[38] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *GetGlobalHashableEnvVarsRequest) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*GetGlobalHashableEnvVarsRequest) ProtoMessage() {} - -func (x *GetGlobalHashableEnvVarsRequest) ProtoReflect() protoreflect.Message { - mi := &file_turborepo_ffi_messages_proto_msgTypes[38] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use GetGlobalHashableEnvVarsRequest.ProtoReflect.Descriptor instead. -func (*GetGlobalHashableEnvVarsRequest) Descriptor() ([]byte, []int) { - return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{38} -} - -func (x *GetGlobalHashableEnvVarsRequest) GetEnvAtExecutionStart() *EnvVarMap { - if x != nil { - return x.EnvAtExecutionStart - } - return nil -} - -func (x *GetGlobalHashableEnvVarsRequest) GetGlobalEnv() []string { - if x != nil { - return x.GlobalEnv - } - return nil -} - -type GetGlobalHashableEnvVarsResponse struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Types that are assignable to Response: - // - // *GetGlobalHashableEnvVarsResponse_DetailedMap - // *GetGlobalHashableEnvVarsResponse_Error - Response isGetGlobalHashableEnvVarsResponse_Response `protobuf_oneof:"response"` -} - -func (x *GetGlobalHashableEnvVarsResponse) Reset() { - *x = GetGlobalHashableEnvVarsResponse{} - if protoimpl.UnsafeEnabled { - mi := &file_turborepo_ffi_messages_proto_msgTypes[39] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *GetGlobalHashableEnvVarsResponse) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*GetGlobalHashableEnvVarsResponse) ProtoMessage() {} - -func (x *GetGlobalHashableEnvVarsResponse) ProtoReflect() protoreflect.Message { - mi := &file_turborepo_ffi_messages_proto_msgTypes[39] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use GetGlobalHashableEnvVarsResponse.ProtoReflect.Descriptor instead. -func (*GetGlobalHashableEnvVarsResponse) Descriptor() ([]byte, []int) { - return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{39} -} - -func (m *GetGlobalHashableEnvVarsResponse) GetResponse() isGetGlobalHashableEnvVarsResponse_Response { - if m != nil { - return m.Response - } - return nil -} - -func (x *GetGlobalHashableEnvVarsResponse) GetDetailedMap() *DetailedMap { - if x, ok := x.GetResponse().(*GetGlobalHashableEnvVarsResponse_DetailedMap); ok { - return x.DetailedMap - } - return nil -} - -func (x *GetGlobalHashableEnvVarsResponse) GetError() string { - if x, ok := x.GetResponse().(*GetGlobalHashableEnvVarsResponse_Error); ok { - return x.Error - } - return "" -} - -type isGetGlobalHashableEnvVarsResponse_Response interface { - isGetGlobalHashableEnvVarsResponse_Response() -} - -type GetGlobalHashableEnvVarsResponse_DetailedMap struct { - DetailedMap *DetailedMap `protobuf:"bytes,1,opt,name=detailed_map,json=detailedMap,proto3,oneof"` -} - -type GetGlobalHashableEnvVarsResponse_Error struct { - Error string `protobuf:"bytes,2,opt,name=error,proto3,oneof"` -} - -func (*GetGlobalHashableEnvVarsResponse_DetailedMap) isGetGlobalHashableEnvVarsResponse_Response() {} - -func (*GetGlobalHashableEnvVarsResponse_Error) isGetGlobalHashableEnvVarsResponse_Response() {} - -var File_turborepo_ffi_messages_proto protoreflect.FileDescriptor - -var file_turborepo_ffi_messages_proto_rawDesc = []byte{ - 0x0a, 0x1c, 0x74, 0x75, 0x72, 0x62, 0x6f, 0x72, 0x65, 0x70, 0x6f, 0x2d, 0x66, 0x66, 0x69, 0x2f, - 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x24, - 0x0a, 0x10, 0x54, 0x75, 0x72, 0x62, 0x6f, 0x44, 0x61, 0x74, 0x61, 0x44, 0x69, 0x72, 0x52, 0x65, - 0x73, 0x70, 0x12, 0x10, 0x0a, 0x03, 0x64, 0x69, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x03, 0x64, 0x69, 0x72, 0x22, 0x9b, 0x01, 0x0a, 0x07, 0x47, 0x6c, 0x6f, 0x62, 0x52, 0x65, 0x71, - 0x12, 0x1b, 0x0a, 0x09, 0x62, 0x61, 0x73, 0x65, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x08, 0x62, 0x61, 0x73, 0x65, 0x50, 0x61, 0x74, 0x68, 0x12, 0x29, 0x0a, - 0x10, 0x69, 0x6e, 0x63, 0x6c, 0x75, 0x64, 0x65, 0x5f, 0x70, 0x61, 0x74, 0x74, 0x65, 0x72, 0x6e, - 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0f, 0x69, 0x6e, 0x63, 0x6c, 0x75, 0x64, 0x65, - 0x50, 0x61, 0x74, 0x74, 0x65, 0x72, 0x6e, 0x73, 0x12, 0x29, 0x0a, 0x10, 0x65, 0x78, 0x63, 0x6c, - 0x75, 0x64, 0x65, 0x5f, 0x70, 0x61, 0x74, 0x74, 0x65, 0x72, 0x6e, 0x73, 0x18, 0x03, 0x20, 0x03, - 0x28, 0x09, 0x52, 0x0f, 0x65, 0x78, 0x63, 0x6c, 0x75, 0x64, 0x65, 0x50, 0x61, 0x74, 0x74, 0x65, - 0x72, 0x6e, 0x73, 0x12, 0x1d, 0x0a, 0x0a, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x5f, 0x6f, 0x6e, 0x6c, - 0x79, 0x18, 0x04, 0x20, 0x01, 0x28, 0x08, 0x52, 0x09, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x4f, 0x6e, - 0x6c, 0x79, 0x22, 0x55, 0x0a, 0x08, 0x47, 0x6c, 0x6f, 0x62, 0x52, 0x65, 0x73, 0x70, 0x12, 0x25, - 0x0a, 0x05, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0d, 0x2e, - 0x47, 0x6c, 0x6f, 0x62, 0x52, 0x65, 0x73, 0x70, 0x4c, 0x69, 0x73, 0x74, 0x48, 0x00, 0x52, 0x05, - 0x66, 0x69, 0x6c, 0x65, 0x73, 0x12, 0x16, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x02, - 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x42, 0x0a, 0x0a, - 0x08, 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x24, 0x0a, 0x0c, 0x47, 0x6c, 0x6f, - 0x62, 0x52, 0x65, 0x73, 0x70, 0x4c, 0x69, 0x73, 0x74, 0x12, 0x14, 0x0a, 0x05, 0x66, 0x69, 0x6c, - 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x05, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x22, - 0x9e, 0x01, 0x0a, 0x0f, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x64, 0x46, 0x69, 0x6c, 0x65, 0x73, - 0x52, 0x65, 0x71, 0x12, 0x19, 0x0a, 0x08, 0x67, 0x69, 0x74, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x67, 0x69, 0x74, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x1d, - 0x0a, 0x0a, 0x74, 0x75, 0x72, 0x62, 0x6f, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0x02, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x09, 0x74, 0x75, 0x72, 0x62, 0x6f, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x24, 0x0a, - 0x0b, 0x66, 0x72, 0x6f, 0x6d, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x18, 0x03, 0x20, 0x01, - 0x28, 0x09, 0x48, 0x00, 0x52, 0x0a, 0x66, 0x72, 0x6f, 0x6d, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, - 0x88, 0x01, 0x01, 0x12, 0x1b, 0x0a, 0x09, 0x74, 0x6f, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, - 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x74, 0x6f, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, - 0x42, 0x0e, 0x0a, 0x0c, 0x5f, 0x66, 0x72, 0x6f, 0x6d, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, - 0x22, 0x61, 0x0a, 0x10, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x64, 0x46, 0x69, 0x6c, 0x65, 0x73, - 0x52, 0x65, 0x73, 0x70, 0x12, 0x29, 0x0a, 0x05, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x64, 0x46, 0x69, 0x6c, - 0x65, 0x73, 0x4c, 0x69, 0x73, 0x74, 0x48, 0x00, 0x52, 0x05, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x12, - 0x16, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, - 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x42, 0x0a, 0x0a, 0x08, 0x72, 0x65, 0x73, 0x70, 0x6f, - 0x6e, 0x73, 0x65, 0x22, 0x28, 0x0a, 0x10, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x64, 0x46, 0x69, - 0x6c, 0x65, 0x73, 0x4c, 0x69, 0x73, 0x74, 0x12, 0x14, 0x0a, 0x05, 0x66, 0x69, 0x6c, 0x65, 0x73, - 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x05, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x22, 0x6d, 0x0a, - 0x12, 0x50, 0x72, 0x65, 0x76, 0x69, 0x6f, 0x75, 0x73, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, - 0x52, 0x65, 0x71, 0x12, 0x19, 0x0a, 0x08, 0x67, 0x69, 0x74, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x67, 0x69, 0x74, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x1f, - 0x0a, 0x0b, 0x66, 0x72, 0x6f, 0x6d, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x0a, 0x66, 0x72, 0x6f, 0x6d, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x12, - 0x1b, 0x0a, 0x09, 0x66, 0x69, 0x6c, 0x65, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x18, 0x03, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x08, 0x66, 0x69, 0x6c, 0x65, 0x50, 0x61, 0x74, 0x68, 0x22, 0x55, 0x0a, 0x13, - 0x50, 0x72, 0x65, 0x76, 0x69, 0x6f, 0x75, 0x73, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x52, - 0x65, 0x73, 0x70, 0x12, 0x1a, 0x0a, 0x07, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x0c, 0x48, 0x00, 0x52, 0x07, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x12, - 0x16, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, - 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x42, 0x0a, 0x0a, 0x08, 0x72, 0x65, 0x73, 0x70, 0x6f, - 0x6e, 0x73, 0x65, 0x22, 0x3d, 0x0a, 0x11, 0x50, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x44, 0x65, - 0x70, 0x65, 0x6e, 0x64, 0x65, 0x6e, 0x63, 0x79, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, - 0x72, 0x61, 0x6e, 0x67, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x72, 0x61, 0x6e, - 0x67, 0x65, 0x22, 0x3f, 0x0a, 0x15, 0x50, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x44, 0x65, 0x70, - 0x65, 0x6e, 0x64, 0x65, 0x6e, 0x63, 0x79, 0x4c, 0x69, 0x73, 0x74, 0x12, 0x26, 0x0a, 0x04, 0x6c, - 0x69, 0x73, 0x74, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x50, 0x61, 0x63, 0x6b, - 0x61, 0x67, 0x65, 0x44, 0x65, 0x70, 0x65, 0x6e, 0x64, 0x65, 0x6e, 0x63, 0x79, 0x52, 0x04, 0x6c, - 0x69, 0x73, 0x74, 0x22, 0xbc, 0x01, 0x0a, 0x15, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, - 0x65, 0x44, 0x65, 0x70, 0x65, 0x6e, 0x64, 0x65, 0x6e, 0x63, 0x69, 0x65, 0x73, 0x12, 0x4c, 0x0a, - 0x0c, 0x64, 0x65, 0x70, 0x65, 0x6e, 0x64, 0x65, 0x6e, 0x63, 0x69, 0x65, 0x73, 0x18, 0x01, 0x20, - 0x03, 0x28, 0x0b, 0x32, 0x28, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x44, - 0x65, 0x70, 0x65, 0x6e, 0x64, 0x65, 0x6e, 0x63, 0x69, 0x65, 0x73, 0x2e, 0x44, 0x65, 0x70, 0x65, - 0x6e, 0x64, 0x65, 0x6e, 0x63, 0x69, 0x65, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0c, 0x64, - 0x65, 0x70, 0x65, 0x6e, 0x64, 0x65, 0x6e, 0x63, 0x69, 0x65, 0x73, 0x1a, 0x55, 0x0a, 0x11, 0x44, - 0x65, 0x70, 0x65, 0x6e, 0x64, 0x65, 0x6e, 0x63, 0x69, 0x65, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, - 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, - 0x65, 0x79, 0x12, 0x2a, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, - 0x0b, 0x32, 0x14, 0x2e, 0x4c, 0x6f, 0x63, 0x6b, 0x66, 0x69, 0x6c, 0x65, 0x50, 0x61, 0x63, 0x6b, - 0x61, 0x67, 0x65, 0x4c, 0x69, 0x73, 0x74, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, - 0x38, 0x01, 0x22, 0xd9, 0x02, 0x0a, 0x15, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x69, 0x74, 0x69, 0x76, - 0x65, 0x44, 0x65, 0x70, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1a, 0x0a, 0x08, - 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x08, - 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x38, 0x0a, 0x0f, 0x70, 0x61, 0x63, 0x6b, - 0x61, 0x67, 0x65, 0x5f, 0x6d, 0x61, 0x6e, 0x61, 0x67, 0x65, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, - 0x0e, 0x32, 0x0f, 0x2e, 0x50, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x4d, 0x61, 0x6e, 0x61, 0x67, - 0x65, 0x72, 0x52, 0x0e, 0x70, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x4d, 0x61, 0x6e, 0x61, 0x67, - 0x65, 0x72, 0x12, 0x46, 0x0a, 0x0a, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x73, - 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x26, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x69, 0x74, - 0x69, 0x76, 0x65, 0x44, 0x65, 0x70, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x57, - 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0a, - 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x73, 0x12, 0x3b, 0x0a, 0x0b, 0x72, 0x65, - 0x73, 0x6f, 0x6c, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x14, 0x2e, 0x41, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x42, 0x65, 0x72, 0x72, - 0x79, 0x44, 0x61, 0x74, 0x61, 0x48, 0x00, 0x52, 0x0b, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x75, 0x74, - 0x69, 0x6f, 0x6e, 0x73, 0x88, 0x01, 0x01, 0x1a, 0x55, 0x0a, 0x0f, 0x57, 0x6f, 0x72, 0x6b, 0x73, - 0x70, 0x61, 0x63, 0x65, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, - 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x2c, 0x0a, 0x05, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x50, 0x61, - 0x63, 0x6b, 0x61, 0x67, 0x65, 0x44, 0x65, 0x70, 0x65, 0x6e, 0x64, 0x65, 0x6e, 0x63, 0x79, 0x4c, - 0x69, 0x73, 0x74, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x42, 0x0e, - 0x0a, 0x0c, 0x5f, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x22, 0x7a, - 0x0a, 0x16, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x69, 0x74, 0x69, 0x76, 0x65, 0x44, 0x65, 0x70, 0x73, - 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x3c, 0x0a, 0x0c, 0x64, 0x65, 0x70, 0x65, - 0x6e, 0x64, 0x65, 0x6e, 0x63, 0x69, 0x65, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x16, - 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x44, 0x65, 0x70, 0x65, 0x6e, 0x64, - 0x65, 0x6e, 0x63, 0x69, 0x65, 0x73, 0x48, 0x00, 0x52, 0x0c, 0x64, 0x65, 0x70, 0x65, 0x6e, 0x64, - 0x65, 0x6e, 0x63, 0x69, 0x65, 0x73, 0x12, 0x16, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, - 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x42, 0x0a, - 0x0a, 0x08, 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x9e, 0x01, 0x0a, 0x13, 0x41, - 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x42, 0x65, 0x72, 0x72, 0x79, 0x44, 0x61, - 0x74, 0x61, 0x12, 0x47, 0x0a, 0x0b, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x75, 0x74, 0x69, 0x6f, 0x6e, - 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x25, 0x2e, 0x41, 0x64, 0x64, 0x69, 0x74, 0x69, - 0x6f, 0x6e, 0x61, 0x6c, 0x42, 0x65, 0x72, 0x72, 0x79, 0x44, 0x61, 0x74, 0x61, 0x2e, 0x52, 0x65, - 0x73, 0x6f, 0x6c, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0b, - 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x1a, 0x3e, 0x0a, 0x10, 0x52, - 0x65, 0x73, 0x6f, 0x6c, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, - 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, - 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x53, 0x0a, 0x0f, 0x4c, - 0x6f, 0x63, 0x6b, 0x66, 0x69, 0x6c, 0x65, 0x50, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x12, 0x10, - 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, - 0x12, 0x18, 0x0a, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x14, 0x0a, 0x05, 0x66, 0x6f, - 0x75, 0x6e, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x52, 0x05, 0x66, 0x6f, 0x75, 0x6e, 0x64, - 0x22, 0x3b, 0x0a, 0x13, 0x4c, 0x6f, 0x63, 0x6b, 0x66, 0x69, 0x6c, 0x65, 0x50, 0x61, 0x63, 0x6b, - 0x61, 0x67, 0x65, 0x4c, 0x69, 0x73, 0x74, 0x12, 0x24, 0x0a, 0x04, 0x6c, 0x69, 0x73, 0x74, 0x18, - 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x4c, 0x6f, 0x63, 0x6b, 0x66, 0x69, 0x6c, 0x65, - 0x50, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x52, 0x04, 0x6c, 0x69, 0x73, 0x74, 0x22, 0xf0, 0x01, - 0x0a, 0x0f, 0x53, 0x75, 0x62, 0x67, 0x72, 0x61, 0x70, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, - 0x74, 0x12, 0x1a, 0x0a, 0x08, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x0c, 0x52, 0x08, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x38, 0x0a, - 0x0f, 0x70, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x5f, 0x6d, 0x61, 0x6e, 0x61, 0x67, 0x65, 0x72, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x0f, 0x2e, 0x50, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, - 0x4d, 0x61, 0x6e, 0x61, 0x67, 0x65, 0x72, 0x52, 0x0e, 0x70, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, - 0x4d, 0x61, 0x6e, 0x61, 0x67, 0x65, 0x72, 0x12, 0x1e, 0x0a, 0x0a, 0x77, 0x6f, 0x72, 0x6b, 0x73, - 0x70, 0x61, 0x63, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0a, 0x77, 0x6f, 0x72, - 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x73, 0x12, 0x1a, 0x0a, 0x08, 0x70, 0x61, 0x63, 0x6b, 0x61, - 0x67, 0x65, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x09, 0x52, 0x08, 0x70, 0x61, 0x63, 0x6b, 0x61, - 0x67, 0x65, 0x73, 0x12, 0x3b, 0x0a, 0x0b, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x75, 0x74, 0x69, 0x6f, - 0x6e, 0x73, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x41, 0x64, 0x64, 0x69, 0x74, - 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x42, 0x65, 0x72, 0x72, 0x79, 0x44, 0x61, 0x74, 0x61, 0x48, 0x00, - 0x52, 0x0b, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x88, 0x01, 0x01, - 0x42, 0x0e, 0x0a, 0x0c, 0x5f, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x73, - 0x22, 0x54, 0x0a, 0x10, 0x53, 0x75, 0x62, 0x67, 0x72, 0x61, 0x70, 0x68, 0x52, 0x65, 0x73, 0x70, - 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x1c, 0x0a, 0x08, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x48, 0x00, 0x52, 0x08, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, - 0x74, 0x73, 0x12, 0x16, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, - 0x09, 0x48, 0x00, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x42, 0x0a, 0x0a, 0x08, 0x72, 0x65, - 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x66, 0x0a, 0x0e, 0x50, 0x61, 0x74, 0x63, 0x68, 0x65, - 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1a, 0x0a, 0x08, 0x63, 0x6f, 0x6e, 0x74, - 0x65, 0x6e, 0x74, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x08, 0x63, 0x6f, 0x6e, 0x74, - 0x65, 0x6e, 0x74, 0x73, 0x12, 0x38, 0x0a, 0x0f, 0x70, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x5f, - 0x6d, 0x61, 0x6e, 0x61, 0x67, 0x65, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x0f, 0x2e, - 0x50, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x4d, 0x61, 0x6e, 0x61, 0x67, 0x65, 0x72, 0x52, 0x0e, - 0x70, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x4d, 0x61, 0x6e, 0x61, 0x67, 0x65, 0x72, 0x22, 0x5b, - 0x0a, 0x0f, 0x50, 0x61, 0x74, 0x63, 0x68, 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, - 0x65, 0x12, 0x24, 0x0a, 0x07, 0x70, 0x61, 0x74, 0x63, 0x68, 0x65, 0x73, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x08, 0x2e, 0x50, 0x61, 0x74, 0x63, 0x68, 0x65, 0x73, 0x48, 0x00, 0x52, 0x07, - 0x70, 0x61, 0x74, 0x63, 0x68, 0x65, 0x73, 0x12, 0x16, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x42, - 0x0a, 0x0a, 0x08, 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x23, 0x0a, 0x07, 0x50, - 0x61, 0x74, 0x63, 0x68, 0x65, 0x73, 0x12, 0x18, 0x0a, 0x07, 0x70, 0x61, 0x74, 0x63, 0x68, 0x65, - 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x07, 0x70, 0x61, 0x74, 0x63, 0x68, 0x65, 0x73, - 0x22, 0x99, 0x01, 0x0a, 0x13, 0x47, 0x6c, 0x6f, 0x62, 0x61, 0x6c, 0x43, 0x68, 0x61, 0x6e, 0x67, - 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x38, 0x0a, 0x0f, 0x70, 0x61, 0x63, 0x6b, - 0x61, 0x67, 0x65, 0x5f, 0x6d, 0x61, 0x6e, 0x61, 0x67, 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x0e, 0x32, 0x0f, 0x2e, 0x50, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x4d, 0x61, 0x6e, 0x61, 0x67, - 0x65, 0x72, 0x52, 0x0e, 0x70, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x4d, 0x61, 0x6e, 0x61, 0x67, - 0x65, 0x72, 0x12, 0x23, 0x0a, 0x0d, 0x70, 0x72, 0x65, 0x76, 0x5f, 0x63, 0x6f, 0x6e, 0x74, 0x65, - 0x6e, 0x74, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0c, 0x70, 0x72, 0x65, 0x76, 0x43, - 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x23, 0x0a, 0x0d, 0x63, 0x75, 0x72, 0x72, 0x5f, - 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0c, - 0x63, 0x75, 0x72, 0x72, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x22, 0x3b, 0x0a, 0x14, - 0x47, 0x6c, 0x6f, 0x62, 0x61, 0x6c, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x52, 0x65, 0x73, 0x70, - 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x23, 0x0a, 0x0d, 0x67, 0x6c, 0x6f, 0x62, 0x61, 0x6c, 0x5f, 0x63, - 0x68, 0x61, 0x6e, 0x67, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0c, 0x67, 0x6c, 0x6f, - 0x62, 0x61, 0x6c, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x22, 0x3a, 0x0a, 0x14, 0x52, 0x65, 0x63, - 0x75, 0x72, 0x73, 0x69, 0x76, 0x65, 0x43, 0x6f, 0x70, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, - 0x74, 0x12, 0x10, 0x0a, 0x03, 0x73, 0x72, 0x63, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, - 0x73, 0x72, 0x63, 0x12, 0x10, 0x0a, 0x03, 0x64, 0x73, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x03, 0x64, 0x73, 0x74, 0x22, 0x3c, 0x0a, 0x15, 0x52, 0x65, 0x63, 0x75, 0x72, 0x73, 0x69, - 0x76, 0x65, 0x43, 0x6f, 0x70, 0x79, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x19, - 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, - 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x88, 0x01, 0x01, 0x42, 0x08, 0x0a, 0x06, 0x5f, 0x65, 0x72, - 0x72, 0x6f, 0x72, 0x22, 0xda, 0x01, 0x0a, 0x16, 0x56, 0x65, 0x72, 0x69, 0x66, 0x79, 0x53, 0x69, - 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x12, - 0x0a, 0x04, 0x68, 0x61, 0x73, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x68, 0x61, - 0x73, 0x68, 0x12, 0x23, 0x0a, 0x0d, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x5f, 0x62, - 0x6f, 0x64, 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0c, 0x61, 0x72, 0x74, 0x69, 0x66, - 0x61, 0x63, 0x74, 0x42, 0x6f, 0x64, 0x79, 0x12, 0x17, 0x0a, 0x07, 0x74, 0x65, 0x61, 0x6d, 0x5f, - 0x69, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x06, 0x74, 0x65, 0x61, 0x6d, 0x49, 0x64, - 0x12, 0x21, 0x0a, 0x0c, 0x65, 0x78, 0x70, 0x65, 0x63, 0x74, 0x65, 0x64, 0x5f, 0x74, 0x61, 0x67, - 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x65, 0x78, 0x70, 0x65, 0x63, 0x74, 0x65, 0x64, - 0x54, 0x61, 0x67, 0x12, 0x33, 0x0a, 0x13, 0x73, 0x65, 0x63, 0x72, 0x65, 0x74, 0x5f, 0x6b, 0x65, - 0x79, 0x5f, 0x6f, 0x76, 0x65, 0x72, 0x72, 0x69, 0x64, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0c, - 0x48, 0x00, 0x52, 0x11, 0x73, 0x65, 0x63, 0x72, 0x65, 0x74, 0x4b, 0x65, 0x79, 0x4f, 0x76, 0x65, - 0x72, 0x72, 0x69, 0x64, 0x65, 0x88, 0x01, 0x01, 0x42, 0x16, 0x0a, 0x14, 0x5f, 0x73, 0x65, 0x63, - 0x72, 0x65, 0x74, 0x5f, 0x6b, 0x65, 0x79, 0x5f, 0x6f, 0x76, 0x65, 0x72, 0x72, 0x69, 0x64, 0x65, - 0x22, 0x5b, 0x0a, 0x17, 0x56, 0x65, 0x72, 0x69, 0x66, 0x79, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x74, - 0x75, 0x72, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x1c, 0x0a, 0x08, 0x76, - 0x65, 0x72, 0x69, 0x66, 0x69, 0x65, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x48, 0x00, 0x52, - 0x08, 0x76, 0x65, 0x72, 0x69, 0x66, 0x69, 0x65, 0x64, 0x12, 0x16, 0x0a, 0x05, 0x65, 0x72, 0x72, - 0x6f, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, - 0x72, 0x42, 0x0a, 0x0a, 0x08, 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x77, 0x0a, - 0x1b, 0x47, 0x65, 0x74, 0x50, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x46, 0x69, 0x6c, 0x65, 0x48, - 0x61, 0x73, 0x68, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1d, 0x0a, 0x0a, - 0x74, 0x75, 0x72, 0x62, 0x6f, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x09, 0x74, 0x75, 0x72, 0x62, 0x6f, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x21, 0x0a, 0x0c, 0x70, - 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x0b, 0x70, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x50, 0x61, 0x74, 0x68, 0x12, 0x16, - 0x0a, 0x06, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x09, 0x52, 0x06, - 0x69, 0x6e, 0x70, 0x75, 0x74, 0x73, 0x22, 0x69, 0x0a, 0x1c, 0x47, 0x65, 0x74, 0x50, 0x61, 0x63, - 0x6b, 0x61, 0x67, 0x65, 0x46, 0x69, 0x6c, 0x65, 0x48, 0x61, 0x73, 0x68, 0x65, 0x73, 0x52, 0x65, - 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x25, 0x0a, 0x06, 0x68, 0x61, 0x73, 0x68, 0x65, 0x73, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0b, 0x2e, 0x46, 0x69, 0x6c, 0x65, 0x48, 0x61, 0x73, - 0x68, 0x65, 0x73, 0x48, 0x00, 0x52, 0x06, 0x68, 0x61, 0x73, 0x68, 0x65, 0x73, 0x12, 0x16, 0x0a, - 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x05, - 0x65, 0x72, 0x72, 0x6f, 0x72, 0x42, 0x0a, 0x0a, 0x08, 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, - 0x65, 0x22, 0x74, 0x0a, 0x18, 0x47, 0x65, 0x74, 0x48, 0x61, 0x73, 0x68, 0x65, 0x73, 0x46, 0x6f, - 0x72, 0x46, 0x69, 0x6c, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1d, 0x0a, - 0x0a, 0x74, 0x75, 0x72, 0x62, 0x6f, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x09, 0x74, 0x75, 0x72, 0x62, 0x6f, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x14, 0x0a, 0x05, - 0x66, 0x69, 0x6c, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x09, 0x52, 0x05, 0x66, 0x69, 0x6c, - 0x65, 0x73, 0x12, 0x23, 0x0a, 0x0d, 0x61, 0x6c, 0x6c, 0x6f, 0x77, 0x5f, 0x6d, 0x69, 0x73, 0x73, - 0x69, 0x6e, 0x67, 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0c, 0x61, 0x6c, 0x6c, 0x6f, 0x77, - 0x4d, 0x69, 0x73, 0x73, 0x69, 0x6e, 0x67, 0x22, 0x66, 0x0a, 0x19, 0x47, 0x65, 0x74, 0x48, 0x61, - 0x73, 0x68, 0x65, 0x73, 0x46, 0x6f, 0x72, 0x46, 0x69, 0x6c, 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, - 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x25, 0x0a, 0x06, 0x68, 0x61, 0x73, 0x68, 0x65, 0x73, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0b, 0x2e, 0x46, 0x69, 0x6c, 0x65, 0x48, 0x61, 0x73, 0x68, 0x65, - 0x73, 0x48, 0x00, 0x52, 0x06, 0x68, 0x61, 0x73, 0x68, 0x65, 0x73, 0x12, 0x16, 0x0a, 0x05, 0x65, - 0x72, 0x72, 0x6f, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x05, 0x65, 0x72, - 0x72, 0x6f, 0x72, 0x42, 0x0a, 0x0a, 0x08, 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, - 0x78, 0x0a, 0x0a, 0x46, 0x69, 0x6c, 0x65, 0x48, 0x61, 0x73, 0x68, 0x65, 0x73, 0x12, 0x2f, 0x0a, - 0x06, 0x68, 0x61, 0x73, 0x68, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x17, 0x2e, - 0x46, 0x69, 0x6c, 0x65, 0x48, 0x61, 0x73, 0x68, 0x65, 0x73, 0x2e, 0x48, 0x61, 0x73, 0x68, 0x65, - 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x06, 0x68, 0x61, 0x73, 0x68, 0x65, 0x73, 0x1a, 0x39, - 0x0a, 0x0b, 0x48, 0x61, 0x73, 0x68, 0x65, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, - 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, - 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x6a, 0x0a, 0x14, 0x46, 0x72, 0x6f, - 0x6d, 0x57, 0x69, 0x6c, 0x64, 0x63, 0x61, 0x72, 0x64, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, - 0x74, 0x12, 0x25, 0x0a, 0x08, 0x65, 0x6e, 0x76, 0x5f, 0x76, 0x61, 0x72, 0x73, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x0a, 0x2e, 0x45, 0x6e, 0x76, 0x56, 0x61, 0x72, 0x4d, 0x61, 0x70, 0x52, - 0x07, 0x65, 0x6e, 0x76, 0x56, 0x61, 0x72, 0x73, 0x12, 0x2b, 0x0a, 0x11, 0x77, 0x69, 0x6c, 0x64, - 0x63, 0x61, 0x72, 0x64, 0x5f, 0x70, 0x61, 0x74, 0x74, 0x65, 0x72, 0x6e, 0x73, 0x18, 0x02, 0x20, - 0x03, 0x28, 0x09, 0x52, 0x10, 0x77, 0x69, 0x6c, 0x64, 0x63, 0x61, 0x72, 0x64, 0x50, 0x61, 0x74, - 0x74, 0x65, 0x72, 0x6e, 0x73, 0x22, 0x64, 0x0a, 0x15, 0x46, 0x72, 0x6f, 0x6d, 0x57, 0x69, 0x6c, - 0x64, 0x63, 0x61, 0x72, 0x64, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x27, - 0x0a, 0x08, 0x65, 0x6e, 0x76, 0x5f, 0x76, 0x61, 0x72, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, - 0x32, 0x0a, 0x2e, 0x45, 0x6e, 0x76, 0x56, 0x61, 0x72, 0x4d, 0x61, 0x70, 0x48, 0x00, 0x52, 0x07, - 0x65, 0x6e, 0x76, 0x56, 0x61, 0x72, 0x73, 0x12, 0x16, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x42, - 0x0a, 0x0a, 0x08, 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x6a, 0x0a, 0x09, 0x45, - 0x6e, 0x76, 0x56, 0x61, 0x72, 0x4d, 0x61, 0x70, 0x12, 0x25, 0x0a, 0x03, 0x6d, 0x61, 0x70, 0x18, - 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x45, 0x6e, 0x76, 0x56, 0x61, 0x72, 0x4d, 0x61, - 0x70, 0x2e, 0x4d, 0x61, 0x70, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x03, 0x6d, 0x61, 0x70, 0x1a, - 0x36, 0x0a, 0x08, 0x4d, 0x61, 0x70, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, - 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, - 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x96, 0x01, 0x0a, 0x0b, 0x44, 0x65, 0x74, 0x61, - 0x69, 0x6c, 0x65, 0x64, 0x4d, 0x61, 0x70, 0x12, 0x27, 0x0a, 0x03, 0x61, 0x6c, 0x6c, 0x18, 0x01, - 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x65, 0x64, 0x4d, - 0x61, 0x70, 0x2e, 0x41, 0x6c, 0x6c, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x03, 0x61, 0x6c, 0x6c, - 0x12, 0x26, 0x0a, 0x09, 0x62, 0x79, 0x5f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x09, 0x2e, 0x42, 0x79, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x08, - 0x62, 0x79, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x1a, 0x36, 0x0a, 0x08, 0x41, 0x6c, 0x6c, 0x45, - 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, - 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, - 0x22, 0xee, 0x01, 0x0a, 0x08, 0x42, 0x79, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x33, 0x0a, - 0x08, 0x65, 0x78, 0x70, 0x6c, 0x69, 0x63, 0x69, 0x74, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, - 0x17, 0x2e, 0x42, 0x79, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x45, 0x78, 0x70, 0x6c, 0x69, - 0x63, 0x69, 0x74, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x08, 0x65, 0x78, 0x70, 0x6c, 0x69, 0x63, - 0x69, 0x74, 0x12, 0x33, 0x0a, 0x08, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x69, 0x6e, 0x67, 0x18, 0x02, - 0x20, 0x03, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x42, 0x79, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, - 0x4d, 0x61, 0x74, 0x63, 0x68, 0x69, 0x6e, 0x67, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x08, 0x6d, - 0x61, 0x74, 0x63, 0x68, 0x69, 0x6e, 0x67, 0x1a, 0x3b, 0x0a, 0x0d, 0x45, 0x78, 0x70, 0x6c, 0x69, - 0x63, 0x69, 0x74, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x3a, 0x02, 0x38, 0x01, 0x1a, 0x3b, 0x0a, 0x0d, 0x4d, 0x61, 0x74, 0x63, 0x68, 0x69, 0x6e, 0x67, - 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, - 0x01, 0x22, 0x81, 0x01, 0x0a, 0x1f, 0x47, 0x65, 0x74, 0x47, 0x6c, 0x6f, 0x62, 0x61, 0x6c, 0x48, - 0x61, 0x73, 0x68, 0x61, 0x62, 0x6c, 0x65, 0x45, 0x6e, 0x76, 0x56, 0x61, 0x72, 0x73, 0x52, 0x65, - 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x3f, 0x0a, 0x16, 0x65, 0x6e, 0x76, 0x5f, 0x61, 0x74, 0x5f, - 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x74, 0x61, 0x72, 0x74, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0a, 0x2e, 0x45, 0x6e, 0x76, 0x56, 0x61, 0x72, 0x4d, 0x61, - 0x70, 0x52, 0x13, 0x65, 0x6e, 0x76, 0x41, 0x74, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, - 0x6e, 0x53, 0x74, 0x61, 0x72, 0x74, 0x12, 0x1d, 0x0a, 0x0a, 0x67, 0x6c, 0x6f, 0x62, 0x61, 0x6c, - 0x5f, 0x65, 0x6e, 0x76, 0x18, 0x02, 0x20, 0x03, 0x28, 0x09, 0x52, 0x09, 0x67, 0x6c, 0x6f, 0x62, - 0x61, 0x6c, 0x45, 0x6e, 0x76, 0x22, 0x79, 0x0a, 0x20, 0x47, 0x65, 0x74, 0x47, 0x6c, 0x6f, 0x62, - 0x61, 0x6c, 0x48, 0x61, 0x73, 0x68, 0x61, 0x62, 0x6c, 0x65, 0x45, 0x6e, 0x76, 0x56, 0x61, 0x72, - 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x31, 0x0a, 0x0c, 0x64, 0x65, 0x74, - 0x61, 0x69, 0x6c, 0x65, 0x64, 0x5f, 0x6d, 0x61, 0x70, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x0c, 0x2e, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x65, 0x64, 0x4d, 0x61, 0x70, 0x48, 0x00, 0x52, - 0x0b, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x65, 0x64, 0x4d, 0x61, 0x70, 0x12, 0x16, 0x0a, 0x05, - 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x05, 0x65, - 0x72, 0x72, 0x6f, 0x72, 0x42, 0x0a, 0x0a, 0x08, 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, - 0x2a, 0x41, 0x0a, 0x0e, 0x50, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x4d, 0x61, 0x6e, 0x61, 0x67, - 0x65, 0x72, 0x12, 0x07, 0x0a, 0x03, 0x4e, 0x50, 0x4d, 0x10, 0x00, 0x12, 0x09, 0x0a, 0x05, 0x42, - 0x45, 0x52, 0x52, 0x59, 0x10, 0x01, 0x12, 0x08, 0x0a, 0x04, 0x50, 0x4e, 0x50, 0x4d, 0x10, 0x02, - 0x12, 0x08, 0x0a, 0x04, 0x59, 0x41, 0x52, 0x4e, 0x10, 0x03, 0x12, 0x07, 0x0a, 0x03, 0x42, 0x55, - 0x4e, 0x10, 0x04, 0x42, 0x0b, 0x5a, 0x09, 0x66, 0x66, 0x69, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, -} - -var ( - file_turborepo_ffi_messages_proto_rawDescOnce sync.Once - file_turborepo_ffi_messages_proto_rawDescData = file_turborepo_ffi_messages_proto_rawDesc -) - -func file_turborepo_ffi_messages_proto_rawDescGZIP() []byte { - file_turborepo_ffi_messages_proto_rawDescOnce.Do(func() { - file_turborepo_ffi_messages_proto_rawDescData = protoimpl.X.CompressGZIP(file_turborepo_ffi_messages_proto_rawDescData) - }) - return file_turborepo_ffi_messages_proto_rawDescData -} - -var file_turborepo_ffi_messages_proto_enumTypes = make([]protoimpl.EnumInfo, 1) -var file_turborepo_ffi_messages_proto_msgTypes = make([]protoimpl.MessageInfo, 48) -var file_turborepo_ffi_messages_proto_goTypes = []interface{}{ - (PackageManager)(0), // 0: PackageManager - (*TurboDataDirResp)(nil), // 1: TurboDataDirResp - (*GlobReq)(nil), // 2: GlobReq - (*GlobResp)(nil), // 3: GlobResp - (*GlobRespList)(nil), // 4: GlobRespList - (*ChangedFilesReq)(nil), // 5: ChangedFilesReq - (*ChangedFilesResp)(nil), // 6: ChangedFilesResp - (*ChangedFilesList)(nil), // 7: ChangedFilesList - (*PreviousContentReq)(nil), // 8: PreviousContentReq - (*PreviousContentResp)(nil), // 9: PreviousContentResp - (*PackageDependency)(nil), // 10: PackageDependency - (*PackageDependencyList)(nil), // 11: PackageDependencyList - (*WorkspaceDependencies)(nil), // 12: WorkspaceDependencies - (*TransitiveDepsRequest)(nil), // 13: TransitiveDepsRequest - (*TransitiveDepsResponse)(nil), // 14: TransitiveDepsResponse - (*AdditionalBerryData)(nil), // 15: AdditionalBerryData - (*LockfilePackage)(nil), // 16: LockfilePackage - (*LockfilePackageList)(nil), // 17: LockfilePackageList - (*SubgraphRequest)(nil), // 18: SubgraphRequest - (*SubgraphResponse)(nil), // 19: SubgraphResponse - (*PatchesRequest)(nil), // 20: PatchesRequest - (*PatchesResponse)(nil), // 21: PatchesResponse - (*Patches)(nil), // 22: Patches - (*GlobalChangeRequest)(nil), // 23: GlobalChangeRequest - (*GlobalChangeResponse)(nil), // 24: GlobalChangeResponse - (*RecursiveCopyRequest)(nil), // 25: RecursiveCopyRequest - (*RecursiveCopyResponse)(nil), // 26: RecursiveCopyResponse - (*VerifySignatureRequest)(nil), // 27: VerifySignatureRequest - (*VerifySignatureResponse)(nil), // 28: VerifySignatureResponse - (*GetPackageFileHashesRequest)(nil), // 29: GetPackageFileHashesRequest - (*GetPackageFileHashesResponse)(nil), // 30: GetPackageFileHashesResponse - (*GetHashesForFilesRequest)(nil), // 31: GetHashesForFilesRequest - (*GetHashesForFilesResponse)(nil), // 32: GetHashesForFilesResponse - (*FileHashes)(nil), // 33: FileHashes - (*FromWildcardsRequest)(nil), // 34: FromWildcardsRequest - (*FromWildcardsResponse)(nil), // 35: FromWildcardsResponse - (*EnvVarMap)(nil), // 36: EnvVarMap - (*DetailedMap)(nil), // 37: DetailedMap - (*BySource)(nil), // 38: BySource - (*GetGlobalHashableEnvVarsRequest)(nil), // 39: GetGlobalHashableEnvVarsRequest - (*GetGlobalHashableEnvVarsResponse)(nil), // 40: GetGlobalHashableEnvVarsResponse - nil, // 41: WorkspaceDependencies.DependenciesEntry - nil, // 42: TransitiveDepsRequest.WorkspacesEntry - nil, // 43: AdditionalBerryData.ResolutionsEntry - nil, // 44: FileHashes.HashesEntry - nil, // 45: EnvVarMap.MapEntry - nil, // 46: DetailedMap.AllEntry - nil, // 47: BySource.ExplicitEntry - nil, // 48: BySource.MatchingEntry -} -var file_turborepo_ffi_messages_proto_depIdxs = []int32{ - 4, // 0: GlobResp.files:type_name -> GlobRespList - 7, // 1: ChangedFilesResp.files:type_name -> ChangedFilesList - 10, // 2: PackageDependencyList.list:type_name -> PackageDependency - 41, // 3: WorkspaceDependencies.dependencies:type_name -> WorkspaceDependencies.DependenciesEntry - 0, // 4: TransitiveDepsRequest.package_manager:type_name -> PackageManager - 42, // 5: TransitiveDepsRequest.workspaces:type_name -> TransitiveDepsRequest.WorkspacesEntry - 15, // 6: TransitiveDepsRequest.resolutions:type_name -> AdditionalBerryData - 12, // 7: TransitiveDepsResponse.dependencies:type_name -> WorkspaceDependencies - 43, // 8: AdditionalBerryData.resolutions:type_name -> AdditionalBerryData.ResolutionsEntry - 16, // 9: LockfilePackageList.list:type_name -> LockfilePackage - 0, // 10: SubgraphRequest.package_manager:type_name -> PackageManager - 15, // 11: SubgraphRequest.resolutions:type_name -> AdditionalBerryData - 0, // 12: PatchesRequest.package_manager:type_name -> PackageManager - 22, // 13: PatchesResponse.patches:type_name -> Patches - 0, // 14: GlobalChangeRequest.package_manager:type_name -> PackageManager - 33, // 15: GetPackageFileHashesResponse.hashes:type_name -> FileHashes - 33, // 16: GetHashesForFilesResponse.hashes:type_name -> FileHashes - 44, // 17: FileHashes.hashes:type_name -> FileHashes.HashesEntry - 36, // 18: FromWildcardsRequest.env_vars:type_name -> EnvVarMap - 36, // 19: FromWildcardsResponse.env_vars:type_name -> EnvVarMap - 45, // 20: EnvVarMap.map:type_name -> EnvVarMap.MapEntry - 46, // 21: DetailedMap.all:type_name -> DetailedMap.AllEntry - 38, // 22: DetailedMap.by_source:type_name -> BySource - 47, // 23: BySource.explicit:type_name -> BySource.ExplicitEntry - 48, // 24: BySource.matching:type_name -> BySource.MatchingEntry - 36, // 25: GetGlobalHashableEnvVarsRequest.env_at_execution_start:type_name -> EnvVarMap - 37, // 26: GetGlobalHashableEnvVarsResponse.detailed_map:type_name -> DetailedMap - 17, // 27: WorkspaceDependencies.DependenciesEntry.value:type_name -> LockfilePackageList - 11, // 28: TransitiveDepsRequest.WorkspacesEntry.value:type_name -> PackageDependencyList - 29, // [29:29] is the sub-list for method output_type - 29, // [29:29] is the sub-list for method input_type - 29, // [29:29] is the sub-list for extension type_name - 29, // [29:29] is the sub-list for extension extendee - 0, // [0:29] is the sub-list for field type_name -} - -func init() { file_turborepo_ffi_messages_proto_init() } -func file_turborepo_ffi_messages_proto_init() { - if File_turborepo_ffi_messages_proto != nil { - return - } - if !protoimpl.UnsafeEnabled { - file_turborepo_ffi_messages_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*TurboDataDirResp); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_turborepo_ffi_messages_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GlobReq); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_turborepo_ffi_messages_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GlobResp); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_turborepo_ffi_messages_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GlobRespList); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_turborepo_ffi_messages_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ChangedFilesReq); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_turborepo_ffi_messages_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ChangedFilesResp); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_turborepo_ffi_messages_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ChangedFilesList); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_turborepo_ffi_messages_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*PreviousContentReq); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_turborepo_ffi_messages_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*PreviousContentResp); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_turborepo_ffi_messages_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*PackageDependency); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_turborepo_ffi_messages_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*PackageDependencyList); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_turborepo_ffi_messages_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*WorkspaceDependencies); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_turborepo_ffi_messages_proto_msgTypes[12].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*TransitiveDepsRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_turborepo_ffi_messages_proto_msgTypes[13].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*TransitiveDepsResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_turborepo_ffi_messages_proto_msgTypes[14].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*AdditionalBerryData); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_turborepo_ffi_messages_proto_msgTypes[15].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*LockfilePackage); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_turborepo_ffi_messages_proto_msgTypes[16].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*LockfilePackageList); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_turborepo_ffi_messages_proto_msgTypes[17].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*SubgraphRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_turborepo_ffi_messages_proto_msgTypes[18].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*SubgraphResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_turborepo_ffi_messages_proto_msgTypes[19].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*PatchesRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_turborepo_ffi_messages_proto_msgTypes[20].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*PatchesResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_turborepo_ffi_messages_proto_msgTypes[21].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Patches); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_turborepo_ffi_messages_proto_msgTypes[22].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GlobalChangeRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_turborepo_ffi_messages_proto_msgTypes[23].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GlobalChangeResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_turborepo_ffi_messages_proto_msgTypes[24].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*RecursiveCopyRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_turborepo_ffi_messages_proto_msgTypes[25].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*RecursiveCopyResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_turborepo_ffi_messages_proto_msgTypes[26].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*VerifySignatureRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_turborepo_ffi_messages_proto_msgTypes[27].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*VerifySignatureResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_turborepo_ffi_messages_proto_msgTypes[28].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetPackageFileHashesRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_turborepo_ffi_messages_proto_msgTypes[29].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetPackageFileHashesResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_turborepo_ffi_messages_proto_msgTypes[30].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetHashesForFilesRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_turborepo_ffi_messages_proto_msgTypes[31].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetHashesForFilesResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_turborepo_ffi_messages_proto_msgTypes[32].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*FileHashes); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_turborepo_ffi_messages_proto_msgTypes[33].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*FromWildcardsRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_turborepo_ffi_messages_proto_msgTypes[34].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*FromWildcardsResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_turborepo_ffi_messages_proto_msgTypes[35].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*EnvVarMap); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_turborepo_ffi_messages_proto_msgTypes[36].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*DetailedMap); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_turborepo_ffi_messages_proto_msgTypes[37].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*BySource); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_turborepo_ffi_messages_proto_msgTypes[38].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetGlobalHashableEnvVarsRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_turborepo_ffi_messages_proto_msgTypes[39].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetGlobalHashableEnvVarsResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - } - file_turborepo_ffi_messages_proto_msgTypes[2].OneofWrappers = []interface{}{ - (*GlobResp_Files)(nil), - (*GlobResp_Error)(nil), - } - file_turborepo_ffi_messages_proto_msgTypes[4].OneofWrappers = []interface{}{} - file_turborepo_ffi_messages_proto_msgTypes[5].OneofWrappers = []interface{}{ - (*ChangedFilesResp_Files)(nil), - (*ChangedFilesResp_Error)(nil), - } - file_turborepo_ffi_messages_proto_msgTypes[8].OneofWrappers = []interface{}{ - (*PreviousContentResp_Content)(nil), - (*PreviousContentResp_Error)(nil), - } - file_turborepo_ffi_messages_proto_msgTypes[12].OneofWrappers = []interface{}{} - file_turborepo_ffi_messages_proto_msgTypes[13].OneofWrappers = []interface{}{ - (*TransitiveDepsResponse_Dependencies)(nil), - (*TransitiveDepsResponse_Error)(nil), - } - file_turborepo_ffi_messages_proto_msgTypes[17].OneofWrappers = []interface{}{} - file_turborepo_ffi_messages_proto_msgTypes[18].OneofWrappers = []interface{}{ - (*SubgraphResponse_Contents)(nil), - (*SubgraphResponse_Error)(nil), - } - file_turborepo_ffi_messages_proto_msgTypes[20].OneofWrappers = []interface{}{ - (*PatchesResponse_Patches)(nil), - (*PatchesResponse_Error)(nil), - } - file_turborepo_ffi_messages_proto_msgTypes[25].OneofWrappers = []interface{}{} - file_turborepo_ffi_messages_proto_msgTypes[26].OneofWrappers = []interface{}{} - file_turborepo_ffi_messages_proto_msgTypes[27].OneofWrappers = []interface{}{ - (*VerifySignatureResponse_Verified)(nil), - (*VerifySignatureResponse_Error)(nil), - } - file_turborepo_ffi_messages_proto_msgTypes[29].OneofWrappers = []interface{}{ - (*GetPackageFileHashesResponse_Hashes)(nil), - (*GetPackageFileHashesResponse_Error)(nil), - } - file_turborepo_ffi_messages_proto_msgTypes[31].OneofWrappers = []interface{}{ - (*GetHashesForFilesResponse_Hashes)(nil), - (*GetHashesForFilesResponse_Error)(nil), - } - file_turborepo_ffi_messages_proto_msgTypes[34].OneofWrappers = []interface{}{ - (*FromWildcardsResponse_EnvVars)(nil), - (*FromWildcardsResponse_Error)(nil), - } - file_turborepo_ffi_messages_proto_msgTypes[39].OneofWrappers = []interface{}{ - (*GetGlobalHashableEnvVarsResponse_DetailedMap)(nil), - (*GetGlobalHashableEnvVarsResponse_Error)(nil), - } - type x struct{} - out := protoimpl.TypeBuilder{ - File: protoimpl.DescBuilder{ - GoPackagePath: reflect.TypeOf(x{}).PkgPath(), - RawDescriptor: file_turborepo_ffi_messages_proto_rawDesc, - NumEnums: 1, - NumMessages: 48, - NumExtensions: 0, - NumServices: 0, - }, - GoTypes: file_turborepo_ffi_messages_proto_goTypes, - DependencyIndexes: file_turborepo_ffi_messages_proto_depIdxs, - EnumInfos: file_turborepo_ffi_messages_proto_enumTypes, - MessageInfos: file_turborepo_ffi_messages_proto_msgTypes, - }.Build() - File_turborepo_ffi_messages_proto = out.File - file_turborepo_ffi_messages_proto_rawDesc = nil - file_turborepo_ffi_messages_proto_goTypes = nil - file_turborepo_ffi_messages_proto_depIdxs = nil -} diff --git a/cli/internal/filewatcher/backend.go b/cli/internal/filewatcher/backend.go deleted file mode 100644 index 81c39373c1157..0000000000000 --- a/cli/internal/filewatcher/backend.go +++ /dev/null @@ -1,213 +0,0 @@ -//go:build !darwin -// +build !darwin - -package filewatcher - -import ( - "fmt" - "os" - "path/filepath" - "sync" - - "github.com/fsnotify/fsnotify" - "github.com/hashicorp/go-hclog" - "github.com/karrick/godirwalk" - "github.com/pkg/errors" - "github.com/vercel/turbo/cli/internal/doublestar" - "github.com/vercel/turbo/cli/internal/fs" - "github.com/vercel/turbo/cli/internal/turbopath" -) - -// watchAddMode is used to indicate whether watchRecursively should synthesize events -// for existing files. -type watchAddMode int - -const ( - dontSynthesizeEvents watchAddMode = iota - synthesizeEvents -) - -type fsNotifyBackend struct { - watcher *fsnotify.Watcher - events chan Event - errors chan error - logger hclog.Logger - - mu sync.Mutex - allExcludes []string - closed bool -} - -func (f *fsNotifyBackend) Events() <-chan Event { - return f.events -} - -func (f *fsNotifyBackend) Errors() <-chan error { - return f.errors -} - -func (f *fsNotifyBackend) Close() error { - f.mu.Lock() - defer f.mu.Unlock() - if f.closed { - return ErrFilewatchingClosed - } - f.closed = true - close(f.events) - close(f.errors) - if err := f.watcher.Close(); err != nil { - return err - } - return nil -} - -// onFileAdded helps up paper over cross-platform inconsistencies in fsnotify. -// Some fsnotify backends automatically add the contents of directories. Some do -// not. Adding a watch is idempotent, so anytime any file we care about gets added, -// watch it. -func (f *fsNotifyBackend) onFileAdded(name turbopath.AbsoluteSystemPath) error { - info, err := name.Lstat() - if err != nil { - if errors.Is(err, os.ErrNotExist) { - // We can race with a file being added and removed. Ignore it - return nil - } - return errors.Wrapf(err, "error checking lstat of new file %v", name) - } - if info.IsDir() { - // If a directory has been added, we need to synthesize events for everything it contains - if err := f.watchRecursively(name, []string{}, synthesizeEvents); err != nil { - return errors.Wrapf(err, "failed recursive watch of %v", name) - } - } - // Note that for symlinks and regular files, we don't add any watches, including traversing links - return nil -} - -func (f *fsNotifyBackend) watchRecursively(root turbopath.AbsoluteSystemPath, excludePatterns []string, addMode watchAddMode) error { - f.mu.Lock() - defer f.mu.Unlock() - err := fs.WalkMode(root.ToString(), func(name string, isDir bool, info os.FileMode) error { - for _, excludePattern := range excludePatterns { - excluded, err := doublestar.Match(excludePattern, filepath.ToSlash(name)) - if err != nil { - return err - } - if excluded { - return godirwalk.SkipThis - } - } - if info.IsDir() && (info&os.ModeSymlink == 0) { - if err := f.watcher.Add(name); err != nil { - return errors.Wrapf(err, "failed adding watch to %v", name) - } - f.logger.Debug(fmt.Sprintf("watching directory %v", name)) - } - if addMode == synthesizeEvents { - f.events <- Event{ - Path: fs.AbsoluteSystemPathFromUpstream(name), - EventType: FileAdded, - } - } - return nil - }) - if err != nil { - return err - } - f.allExcludes = append(f.allExcludes, excludePatterns...) - - return nil -} - -func (f *fsNotifyBackend) watch() { -outer: - for { - select { - case ev, ok := <-f.watcher.Events: - if !ok { - break outer - } - eventType := toFileEvent(ev.Op) - path := fs.AbsoluteSystemPathFromUpstream(ev.Name) - if eventType == FileAdded { - if err := f.onFileAdded(path); err != nil { - f.errors <- err - } - } - if eventType == FileRenamed { - // synthesize a delete event for a rename - f.events <- Event{ - Path: path, - EventType: FileDeleted, - } - } - f.events <- Event{ - Path: path, - EventType: eventType, - } - case err, ok := <-f.watcher.Errors: - if !ok { - break outer - } - f.errors <- err - } - } -} - -var _modifiedMask = fsnotify.Chmod | fsnotify.Write - -func toFileEvent(op fsnotify.Op) FileEvent { - if op&fsnotify.Create != 0 { - return FileAdded - } else if op&fsnotify.Remove != 0 { - return FileDeleted - } else if op&_modifiedMask != 0 { - return FileModified - } else if op&fsnotify.Rename != 0 { - return FileRenamed - } - return FileOther -} - -func (f *fsNotifyBackend) Start() error { - f.mu.Lock() - defer f.mu.Unlock() - if f.closed { - return ErrFilewatchingClosed - } - for _, dir := range f.watcher.WatchList() { - for _, excludePattern := range f.allExcludes { - excluded, err := doublestar.Match(excludePattern, filepath.ToSlash(dir)) - if err != nil { - return err - } - if excluded { - if err := f.watcher.Remove(dir); err != nil { - return err - } - } - } - } - go f.watch() - return nil -} - -func (f *fsNotifyBackend) AddRoot(root turbopath.AbsoluteSystemPath, excludePatterns ...string) error { - // We don't synthesize events for the initial watch - return f.watchRecursively(root, excludePatterns, dontSynthesizeEvents) -} - -// GetPlatformSpecificBackend returns a filewatching backend appropriate for the OS we are -// running on. -func GetPlatformSpecificBackend(logger hclog.Logger) (Backend, error) { - watcher, err := fsnotify.NewWatcher() - if err != nil { - return nil, err - } - return &fsNotifyBackend{ - watcher: watcher, - events: make(chan Event), - errors: make(chan error), - logger: logger.Named("fsnotify"), - }, nil -} diff --git a/cli/internal/filewatcher/backend_darwin.go b/cli/internal/filewatcher/backend_darwin.go deleted file mode 100644 index 06977ff4a3a06..0000000000000 --- a/cli/internal/filewatcher/backend_darwin.go +++ /dev/null @@ -1,225 +0,0 @@ -//go:build darwin -// +build darwin - -package filewatcher - -import ( - "fmt" - "strings" - "sync" - "time" - - "github.com/pkg/errors" - "github.com/yookoala/realpath" - - "github.com/fsnotify/fsevents" - "github.com/hashicorp/go-hclog" - "github.com/vercel/turbo/cli/internal/doublestar" - "github.com/vercel/turbo/cli/internal/fs" - "github.com/vercel/turbo/cli/internal/turbopath" -) - -type fseventsBackend struct { - events chan Event - errors chan error - logger hclog.Logger - mu sync.Mutex - streams []*fsevents.EventStream - closed bool -} - -func (f *fseventsBackend) Events() <-chan Event { - return f.events -} - -func (f *fseventsBackend) Errors() <-chan error { - return f.errors -} - -func (f *fseventsBackend) Close() error { - f.mu.Lock() - defer f.mu.Unlock() - if f.closed { - return ErrFilewatchingClosed - } - f.closed = true - for _, stream := range f.streams { - stream.Stop() - } - close(f.events) - close(f.errors) - return nil -} - -func (f *fseventsBackend) Start() error { - return nil -} - -var ( - _eventLatency = 10 * time.Millisecond - _cookieTimeout = 500 * time.Millisecond -) - -// AddRoot starts watching a new directory hierarchy. Events matching the provided excludePatterns -// will not be forwarded. -func (f *fseventsBackend) AddRoot(someRoot turbopath.AbsoluteSystemPath, excludePatterns ...string) error { - // We need to resolve the real path to the hierarchy that we are going to watch - realRoot, err := realpath.Realpath(someRoot.ToString()) - if err != nil { - return err - } - root := fs.AbsoluteSystemPathFromUpstream(realRoot) - dev, err := fsevents.DeviceForPath(root.ToString()) - if err != nil { - return err - } - - // Optimistically set up and start a stream, assuming the watch is still valid. - s := &fsevents.EventStream{ - Paths: []string{root.ToString()}, - Latency: _eventLatency, - Device: dev, - Flags: fsevents.FileEvents | fsevents.WatchRoot, - } - s.Start() - events := s.Events - - // fsevents delivers events for all existing files first, so use a cookie to detect when we're ready for new events - if err := waitForCookie(root, events, _cookieTimeout); err != nil { - s.Stop() - return err - } - - // Now try to persist the stream. - f.mu.Lock() - defer f.mu.Unlock() - if f.closed { - s.Stop() - return ErrFilewatchingClosed - } - f.streams = append(f.streams, s) - f.logger.Debug(fmt.Sprintf("watching root %v, excluding %v", root, excludePatterns)) - - go func() { - for evs := range events { - for _, ev := range evs { - isExcluded := false - - // 1. Ensure that we have a `/`-prefixed path from the event. - var eventPath string - if !strings.HasPrefix("/", ev.Path) { - eventPath = "/" + ev.Path - } else { - eventPath = ev.Path - } - - // 2. We're getting events from the real path, but we need to translate - // back to the path we were provided since that's what the caller will - // expect in terms of event paths. - watchRootRelativePath := eventPath[len(realRoot):] - processedEventPath := someRoot.UntypedJoin(watchRootRelativePath) - - // 3. Compare the event to all exclude patterns, short-circuit if we know - // we are not watching this file. - processedPathString := processedEventPath.ToString() // loop invariant - for _, pattern := range excludePatterns { - matches, err := doublestar.Match(pattern, processedPathString) - if err != nil { - f.errors <- err - } else if matches { - isExcluded = true - break - } - } - - // 4. Report the file events we care about. - if !isExcluded { - f.events <- Event{ - Path: processedEventPath, - EventType: toFileEvent(ev.Flags, processedEventPath), - } - } - } - } - }() - - return nil -} - -func waitForCookie(root turbopath.AbsoluteSystemPath, events <-chan []fsevents.Event, timeout time.Duration) error { - // This cookie needs to be in a location that we're watching, and at this point we can't guarantee - // what the root is, or if something like "node_modules/.cache/turbo" would make sense. As a compromise, ensure - // that we clean it up even in the event of a failure. - cookiePath := root.UntypedJoin(".turbo-cookie") - if err := cookiePath.WriteFile([]byte("cookie"), 0755); err != nil { - return err - } - expected := cookiePath.ToString()[1:] // trim leading slash - if err := waitForEvent(events, expected, fsevents.ItemCreated, timeout); err != nil { - // Attempt to not leave the cookie file lying around. - // Ignore the error, since there's not much we can do with it. - _ = cookiePath.Remove() - return err - } - if err := cookiePath.Remove(); err != nil { - return err - } - if err := waitForEvent(events, expected, fsevents.ItemRemoved, timeout); err != nil { - return err - } - return nil -} - -func waitForEvent(events <-chan []fsevents.Event, path string, flag fsevents.EventFlags, timeout time.Duration) error { - ch := make(chan struct{}) - go func() { - for evs := range events { - for _, ev := range evs { - if ev.Path == path && ev.Flags&flag != 0 { - close(ch) - return - } - } - } - }() - select { - case <-time.After(timeout): - return errors.Wrap(ErrFailedToStart, "timed out waiting for initial fsevents cookie") - case <-ch: - return nil - } -} - -var _modifiedMask = fsevents.ItemModified | fsevents.ItemInodeMetaMod | fsevents.ItemFinderInfoMod | fsevents.ItemChangeOwner | fsevents.ItemXattrMod - -func toFileEvent(flags fsevents.EventFlags, path turbopath.AbsoluteSystemPath) FileEvent { - if flags&fsevents.ItemCreated != 0 { - return FileAdded - } else if flags&fsevents.ItemRemoved != 0 { - return FileDeleted - } else if flags&_modifiedMask != 0 { - return FileModified - } else if flags&fsevents.ItemRenamed != 0 { - // FSEvents sends ItemRenamed for both the old and new files, - // and does not send delete / create events - if path.Exists() { - return FileAdded - } - return FileDeleted - } else if flags&fsevents.RootChanged != 0 { - // count this as a delete, something affected the path to the root - // of the stream - return FileDeleted - } - return FileOther -} - -// GetPlatformSpecificBackend returns a filewatching backend appropriate for the OS we are -// running on. -func GetPlatformSpecificBackend(logger hclog.Logger) (Backend, error) { - return &fseventsBackend{ - events: make(chan Event), - errors: make(chan error), - logger: logger.Named("fsevents"), - }, nil -} diff --git a/cli/internal/filewatcher/cookie.go b/cli/internal/filewatcher/cookie.go deleted file mode 100644 index 7a4931eea011d..0000000000000 --- a/cli/internal/filewatcher/cookie.go +++ /dev/null @@ -1,160 +0,0 @@ -package filewatcher - -import ( - "fmt" - "os" - "sync" - "sync/atomic" - "time" - - "github.com/pkg/errors" - "github.com/vercel/turbo/cli/internal/fs" - "github.com/vercel/turbo/cli/internal/turbopath" -) - -// CookieWaiter is the interface used by clients that need to wait -// for a roundtrip through the filewatching API. -type CookieWaiter interface { - WaitForCookie() error -} - -var ( - // ErrCookieTimeout is returned when we did not see our cookie file within the given time constraints - ErrCookieTimeout = errors.New("timed out waiting for cookie") - // ErrCookieWatchingClosed is returned when the underlying filewatching has been closed. - ErrCookieWatchingClosed = errors.New("filewatching has closed, cannot watch cookies") -) - -// CookieJar is used for tracking roundtrips through the filesystem watching API -type CookieJar struct { - timeout time.Duration - dir turbopath.AbsoluteSystemPath - serial uint64 - mu sync.Mutex - cookies map[turbopath.AbsoluteSystemPath]chan error - closed bool -} - -// NewCookieJar returns a new instance of a CookieJar. There should only ever be a single -// instance live per cookieDir, since they expect to have full control over that directory. -func NewCookieJar(cookieDir turbopath.AbsoluteSystemPath, timeout time.Duration) (*CookieJar, error) { - if err := cookieDir.RemoveAll(); err != nil { - return nil, err - } - if err := cookieDir.MkdirAll(0775); err != nil { - return nil, err - } - return &CookieJar{ - timeout: timeout, - dir: cookieDir, - cookies: make(map[turbopath.AbsoluteSystemPath]chan error), - }, nil -} - -// removeAllCookiesWithError sends the error to every channel, closes every channel, -// and attempts to remove every cookie file. Must be called while the cj.mu is held. -// If the cookie jar is going to be reused afterwards, the cookies map must be reinitialized. -func (cj *CookieJar) removeAllCookiesWithError(err error) { - for p, ch := range cj.cookies { - _ = p.Remove() - ch <- err - close(ch) - } - // Drop all of the references so they can be cleaned up - cj.cookies = nil -} - -// OnFileWatchClosed handles the case where filewatching had to close for some reason -// We send an error to all of our cookies and stop accepting new ones. -func (cj *CookieJar) OnFileWatchClosed() { - cj.mu.Lock() - defer cj.mu.Unlock() - cj.closed = true - cj.removeAllCookiesWithError(ErrCookieWatchingClosed) - -} - -// OnFileWatchError handles when filewatching has encountered an error. -// In the error case, we remove all cookies and send them errors. We remain -// available for later cookies. -func (cj *CookieJar) OnFileWatchError(err error) { - // We are now in an inconsistent state. Drop all of our cookies, - // but we still allow new ones to be created - cj.mu.Lock() - defer cj.mu.Unlock() - cj.removeAllCookiesWithError(err) - cj.cookies = make(map[turbopath.AbsoluteSystemPath]chan error) -} - -// OnFileWatchEvent determines if the specified event is relevant -// for cookie watching and notifies the appropriate cookie if so. -func (cj *CookieJar) OnFileWatchEvent(ev Event) { - if ev.EventType == FileAdded { - isCookie, err := fs.DirContainsPath(cj.dir.ToStringDuringMigration(), ev.Path.ToStringDuringMigration()) - if err != nil { - cj.OnFileWatchError(errors.Wrapf(err, "failed to determine if path is a cookie: %v", ev.Path)) - } else if isCookie { - cj.notifyCookie(ev.Path, nil) - } - } -} - -// WaitForCookie touches a unique file, then waits for it to show up in filesystem notifications. -// This provides a theoretical bound on filesystem operations, although it's possible -// that underlying filewatch mechanisms don't respect this ordering. -func (cj *CookieJar) WaitForCookie() error { - // we're only ever going to send a single error on the channel, add a buffer so that we never - // block sending it. - ch := make(chan error, 1) - serial := atomic.AddUint64(&cj.serial, 1) - cookiePath := cj.dir.UntypedJoin(fmt.Sprintf("%v.cookie", serial)) - cj.mu.Lock() - if cj.closed { - cj.mu.Unlock() - return ErrCookieWatchingClosed - } - cj.cookies[cookiePath] = ch - cj.mu.Unlock() - if err := touchCookieFile(cookiePath); err != nil { - cj.notifyCookie(cookiePath, err) - return err - } - select { - case <-time.After(cj.timeout): - return ErrCookieTimeout - case err, ok := <-ch: - if !ok { - // the channel closed without an error, we're all set - return nil - } - // the channel didn't close, meaning we got some error. - // We don't need to wait on channel close, it's going to be closed - // immediately by whoever sent the error. Return the error directly - return err - } -} - -func (cj *CookieJar) notifyCookie(cookie turbopath.AbsoluteSystemPath, err error) { - cj.mu.Lock() - ch, ok := cj.cookies[cookie] - // delete is a no-op if the key doesn't exist - delete(cj.cookies, cookie) - cj.mu.Unlock() - if ok { - if err != nil { - ch <- err - } - close(ch) - } -} - -func touchCookieFile(cookie turbopath.AbsoluteSystemPath) error { - f, err := cookie.OpenFile(os.O_CREATE|os.O_TRUNC|os.O_WRONLY, 0700) - if err != nil { - return err - } - if err := f.Close(); err != nil { - return err - } - return nil -} diff --git a/cli/internal/filewatcher/cookie_test.go b/cli/internal/filewatcher/cookie_test.go deleted file mode 100644 index 96241b45a41c1..0000000000000 --- a/cli/internal/filewatcher/cookie_test.go +++ /dev/null @@ -1,130 +0,0 @@ -package filewatcher - -import ( - "testing" - "time" - - "github.com/hashicorp/go-hclog" - "github.com/pkg/errors" - "github.com/vercel/turbo/cli/internal/fs" - "gotest.tools/v3/assert" -) - -func TestWaitForCookie(t *testing.T) { - logger := hclog.Default() - cookieDir := fs.AbsoluteSystemPathFromUpstream(t.TempDir()) - repoRoot := fs.AbsoluteSystemPathFromUpstream(t.TempDir()) - - jar, err := NewCookieJar(cookieDir, 5*time.Second) - assert.NilError(t, err, "NewCookieJar") - - watcher, err := GetPlatformSpecificBackend(logger) - assert.NilError(t, err, "NewWatcher") - fw := New(logger, repoRoot, watcher) - err = fw.Start() - assert.NilError(t, err, "Start") - fw.AddClient(jar) - err = fw.AddRoot(cookieDir) - assert.NilError(t, err, "Add") - - err = jar.WaitForCookie() - assert.NilError(t, err, "failed to roundtrip cookie") -} - -func TestWaitForCookieAfterClose(t *testing.T) { - logger := hclog.Default() - cookieDir := fs.AbsoluteSystemPathFromUpstream(t.TempDir()) - repoRoot := fs.AbsoluteSystemPathFromUpstream(t.TempDir()) - - jar, err := NewCookieJar(cookieDir, 5*time.Second) - assert.NilError(t, err, "NewCookieJar") - - watcher, err := GetPlatformSpecificBackend(logger) - assert.NilError(t, err, "NewWatcher") - fw := New(logger, repoRoot, watcher) - err = fw.Start() - assert.NilError(t, err, "Start") - fw.AddClient(jar) - err = fw.AddRoot(cookieDir) - assert.NilError(t, err, "Add") - - err = fw.Close() - assert.NilError(t, err, "Close") - err = jar.WaitForCookie() - assert.ErrorIs(t, err, ErrCookieWatchingClosed) -} - -func TestWaitForCookieTimeout(t *testing.T) { - logger := hclog.Default() - cookieDir := fs.AbsoluteSystemPathFromUpstream(t.TempDir()) - repoRoot := fs.AbsoluteSystemPathFromUpstream(t.TempDir()) - - jar, err := NewCookieJar(cookieDir, 10*time.Millisecond) - assert.NilError(t, err, "NewCookieJar") - - watcher, err := GetPlatformSpecificBackend(logger) - assert.NilError(t, err, "NewWatcher") - fw := New(logger, repoRoot, watcher) - err = fw.Start() - assert.NilError(t, err, "Start") - fw.AddClient(jar) - - // NOTE: don't call fw.Add here so that no file event gets delivered - - err = jar.WaitForCookie() - assert.ErrorIs(t, err, ErrCookieTimeout) -} - -func TestWaitForCookieWithError(t *testing.T) { - logger := hclog.Default() - cookieDir := fs.AbsoluteSystemPathFromUpstream(t.TempDir()) - repoRoot := fs.AbsoluteSystemPathFromUpstream(t.TempDir()) - - jar, err := NewCookieJar(cookieDir, 10*time.Second) - assert.NilError(t, err, "NewCookieJar") - - watcher, err := GetPlatformSpecificBackend(logger) - assert.NilError(t, err, "NewWatcher") - fw := New(logger, repoRoot, watcher) - err = fw.Start() - assert.NilError(t, err, "Start") - fw.AddClient(jar) - - // NOTE: don't call fw.Add here so that no file event gets delivered - myErr := errors.New("an error") - ch := make(chan error) - go func() { - if err := jar.WaitForCookie(); err != nil { - ch <- err - } - close(ch) - }() - // wait for the cookie to be registered in the jar - for { - found := false - jar.mu.Lock() - if len(jar.cookies) == 1 { - found = true - } - jar.mu.Unlock() - if found { - break - } - <-time.After(10 * time.Millisecond) - } - jar.OnFileWatchError(myErr) - - err, ok := <-ch - if !ok { - t.Error("expected to get an error from cookie watching") - } - assert.ErrorIs(t, err, myErr) - - // ensure waiting for a new cookie still works. - // Add the filewatch to allow cookies work normally - err = fw.AddRoot(cookieDir) - assert.NilError(t, err, "Add") - - err = jar.WaitForCookie() - assert.NilError(t, err, "WaitForCookie") -} diff --git a/cli/internal/filewatcher/filewatcher.go b/cli/internal/filewatcher/filewatcher.go deleted file mode 100644 index 4f79495f2231f..0000000000000 --- a/cli/internal/filewatcher/filewatcher.go +++ /dev/null @@ -1,167 +0,0 @@ -// Package filewatcher is used to handle watching for file changes inside the monorepo -package filewatcher - -import ( - "path/filepath" - "strings" - "sync" - - "github.com/hashicorp/go-hclog" - "github.com/pkg/errors" - "github.com/vercel/turbo/cli/internal/turbopath" -) - -// _ignores is the set of paths we exempt from file-watching -var _ignores = []string{".git", "node_modules"} - -// FileWatchClient defines the callbacks used by the file watching loop. -// All methods are called from the same goroutine so they: -// 1) do not need synchronization -// 2) should minimize the work they are doing when called, if possible -type FileWatchClient interface { - OnFileWatchEvent(ev Event) - OnFileWatchError(err error) - OnFileWatchClosed() -} - -// FileEvent is an enum covering the kinds of things that can happen -// to files that we might be interested in -type FileEvent int - -const ( - // FileAdded - this is a new file - FileAdded FileEvent = iota + 1 - // FileDeleted - this file has been removed - FileDeleted - // FileModified - this file has been changed in some way - FileModified - // FileRenamed - a file's name has changed - FileRenamed - // FileOther - some other backend-specific event has happened - FileOther -) - -var ( - // ErrFilewatchingClosed is returned when filewatching has been closed - ErrFilewatchingClosed = errors.New("Close() has already been called for filewatching") - // ErrFailedToStart is returned when filewatching fails to start up - ErrFailedToStart = errors.New("filewatching failed to start") -) - -// Event is the backend-independent information about a file change -type Event struct { - Path turbopath.AbsoluteSystemPath - EventType FileEvent -} - -// Backend is the interface that describes what an underlying filesystem watching backend -// must provide. -type Backend interface { - AddRoot(root turbopath.AbsoluteSystemPath, excludePatterns ...string) error - Events() <-chan Event - Errors() <-chan error - Close() error - Start() error -} - -// FileWatcher handles watching all of the files in the monorepo. -// We currently ignore .git and top-level node_modules. We can revisit -// if necessary. -type FileWatcher struct { - backend Backend - - logger hclog.Logger - repoRoot turbopath.AbsoluteSystemPath - excludePattern string - - clientsMu sync.RWMutex - clients []FileWatchClient - closed bool -} - -// New returns a new FileWatcher instance -func New(logger hclog.Logger, repoRoot turbopath.AbsoluteSystemPath, backend Backend) *FileWatcher { - excludes := make([]string, len(_ignores)) - for i, ignore := range _ignores { - excludes[i] = filepath.ToSlash(repoRoot.UntypedJoin(ignore).ToString() + "/**") - } - excludePattern := "{" + strings.Join(excludes, ",") + "}" - return &FileWatcher{ - backend: backend, - logger: logger, - repoRoot: repoRoot, - excludePattern: excludePattern, - } -} - -// Close shuts down filewatching -func (fw *FileWatcher) Close() error { - return fw.backend.Close() -} - -// Start recursively adds all directories from the repo root, redacts the excluded ones, -// then fires off a goroutine to respond to filesystem events -func (fw *FileWatcher) Start() error { - if err := fw.backend.AddRoot(fw.repoRoot, fw.excludePattern); err != nil { - return err - } - if err := fw.backend.Start(); err != nil { - return err - } - go fw.watch() - return nil -} - -// AddRoot registers the root a filesystem hierarchy to be watched for changes. Events are *not* -// fired for existing files when AddRoot is called, only for subsequent changes. -// NOTE: if it appears helpful, we could change this behavior so that we provide a stream of initial -// events. -func (fw *FileWatcher) AddRoot(root turbopath.AbsoluteSystemPath, excludePatterns ...string) error { - return fw.backend.AddRoot(root, excludePatterns...) -} - -// watch is the main file-watching loop. Watching is not recursive, -// so when new directories are added, they are manually recursively watched. -func (fw *FileWatcher) watch() { -outer: - for { - select { - case ev, ok := <-fw.backend.Events(): - if !ok { - fw.logger.Info("Events channel closed. Exiting watch loop") - break outer - } - fw.clientsMu.RLock() - for _, client := range fw.clients { - client.OnFileWatchEvent(ev) - } - fw.clientsMu.RUnlock() - case err, ok := <-fw.backend.Errors(): - if !ok { - fw.logger.Info("Errors channel closed. Exiting watch loop") - break outer - } - fw.clientsMu.RLock() - for _, client := range fw.clients { - client.OnFileWatchError(err) - } - fw.clientsMu.RUnlock() - } - } - fw.clientsMu.Lock() - fw.closed = true - for _, client := range fw.clients { - client.OnFileWatchClosed() - } - fw.clientsMu.Unlock() -} - -// AddClient registers a client for filesystem events -func (fw *FileWatcher) AddClient(client FileWatchClient) { - fw.clientsMu.Lock() - defer fw.clientsMu.Unlock() - fw.clients = append(fw.clients, client) - if fw.closed { - client.OnFileWatchClosed() - } -} diff --git a/cli/internal/filewatcher/filewatcher_test.go b/cli/internal/filewatcher/filewatcher_test.go deleted file mode 100644 index 9d1725e977882..0000000000000 --- a/cli/internal/filewatcher/filewatcher_test.go +++ /dev/null @@ -1,707 +0,0 @@ -package filewatcher - -import ( - "fmt" - "runtime" - "sync" - "testing" - "time" - - "github.com/hashicorp/go-hclog" - "github.com/vercel/turbo/cli/internal/fs" - "github.com/vercel/turbo/cli/internal/turbopath" - "gotest.tools/v3/assert" -) - -type testClient struct { - mu sync.Mutex - createEvents []Event - notify chan Event -} - -func (c *testClient) OnFileWatchEvent(ev Event) { - if ev.EventType == FileAdded { - c.mu.Lock() - defer c.mu.Unlock() - c.createEvents = append(c.createEvents, ev) - } - if ev.EventType != FileModified { - c.notify <- ev - } -} - -func (c *testClient) OnFileWatchError(err error) {} - -func (c *testClient) OnFileWatchClosed() {} - -func expectFilesystemEvent(t *testing.T, ch <-chan Event, expected Event) { - // mark this method as a helper - t.Helper() - timeout := time.After(10 * time.Second) - for { - select { - case ev := <-ch: - t.Logf("got event %v", ev) - if ev.Path == expected.Path && ev.EventType == expected.EventType { - return - } - case <-timeout: - t.Fatalf("Timed out waiting for filesystem event at %v %v", expected.EventType, expected.Path) - return - } - } -} - -func expectNoFilesystemEvent(t *testing.T, ch <-chan Event) { - // mark this method as a helper - t.Helper() - select { - case ev, ok := <-ch: - if ok { - t.Errorf("got unexpected filesystem event %v", ev) - } else { - t.Error("filewatching closed unexpectedly") - } - case <-time.After(500 * time.Millisecond): - return - } -} - -// Hack to avoid duplicate filenames. Count the number of test files we create. -// Not thread-safe -var testFileCount = 0 - -func expectWatching(t *testing.T, c *testClient, dirs []turbopath.AbsoluteSystemPath) { - t.Helper() - thisFileCount := testFileCount - testFileCount++ - filename := fmt.Sprintf("test-%v", thisFileCount) - for _, dir := range dirs { - file := dir.UntypedJoin(filename) - err := file.WriteFile([]byte("hello"), 0755) - assert.NilError(t, err, "WriteFile") - expectFilesystemEvent(t, c.notify, Event{ - Path: file, - EventType: FileAdded, - }) - } -} - -func TestFileWatching(t *testing.T) { - logger := hclog.Default() - logger.SetLevel(hclog.Debug) - repoRoot := fs.AbsoluteSystemPathFromUpstream(t.TempDir()) - err := repoRoot.UntypedJoin(".git").MkdirAll(0775) - assert.NilError(t, err, "MkdirAll") - err = repoRoot.UntypedJoin("node_modules", "some-dep").MkdirAll(0775) - assert.NilError(t, err, "MkdirAll") - err = repoRoot.UntypedJoin("parent", "child").MkdirAll(0775) - assert.NilError(t, err, "MkdirAll") - err = repoRoot.UntypedJoin("parent", "sibling").MkdirAll(0775) - assert.NilError(t, err, "MkdirAll") - - // Directory layout: - // <repoRoot>/ - // .git/ - // node_modules/ - // some-dep/ - // parent/ - // child/ - // sibling/ - - watcher, err := GetPlatformSpecificBackend(logger) - assert.NilError(t, err, "GetPlatformSpecificBackend") - fw := New(logger, repoRoot, watcher) - err = fw.Start() - assert.NilError(t, err, "fw.Start") - - // Add a client - ch := make(chan Event, 1) - c := &testClient{ - notify: ch, - } - fw.AddClient(c) - expectedWatching := []turbopath.AbsoluteSystemPath{ - repoRoot, - repoRoot.UntypedJoin("parent"), - repoRoot.UntypedJoin("parent", "child"), - repoRoot.UntypedJoin("parent", "sibling"), - } - expectWatching(t, c, expectedWatching) - - fooPath := repoRoot.UntypedJoin("parent", "child", "foo") - err = fooPath.WriteFile([]byte("hello"), 0644) - assert.NilError(t, err, "WriteFile") - expectFilesystemEvent(t, ch, Event{ - EventType: FileAdded, - Path: fooPath, - }) - - deepPath := repoRoot.UntypedJoin("parent", "sibling", "deep", "path") - err = deepPath.MkdirAll(0775) - assert.NilError(t, err, "MkdirAll") - // We'll catch an event for "deep", but not "deep/path" since - // we don't have a recursive watch - expectFilesystemEvent(t, ch, Event{ - Path: repoRoot.UntypedJoin("parent", "sibling", "deep"), - EventType: FileAdded, - }) - expectFilesystemEvent(t, ch, Event{ - Path: repoRoot.UntypedJoin("parent", "sibling", "deep", "path"), - EventType: FileAdded, - }) - expectedWatching = append(expectedWatching, deepPath, repoRoot.UntypedJoin("parent", "sibling", "deep")) - expectWatching(t, c, expectedWatching) - - gitFilePath := repoRoot.UntypedJoin(".git", "git-file") - err = gitFilePath.WriteFile([]byte("nope"), 0644) - assert.NilError(t, err, "WriteFile") - expectNoFilesystemEvent(t, ch) -} - -// TestFileWatchingParentDeletion tests that when a repo subfolder is deleted, -// recursive watching will still work for new folders -// -// ✅ macOS -// ✅ Linux -// ✅ Windows -func TestFileWatchingSubfolderDeletion(t *testing.T) { - logger := hclog.Default() - logger.SetLevel(hclog.Debug) - repoRoot := fs.AbsoluteSystemPathFromUpstream(t.TempDir()) - err := repoRoot.UntypedJoin(".git").MkdirAll(0775) - assert.NilError(t, err, "MkdirAll") - err = repoRoot.UntypedJoin("node_modules", "some-dep").MkdirAll(0775) - assert.NilError(t, err, "MkdirAll") - err = repoRoot.UntypedJoin("parent", "child").MkdirAll(0775) - assert.NilError(t, err, "MkdirAll") - - // Directory layout: - // <repoRoot>/ - // .git/ - // node_modules/ - // some-dep/ - // parent/ - // child/ - - watcher, err := GetPlatformSpecificBackend(logger) - assert.NilError(t, err, "GetPlatformSpecificBackend") - fw := New(logger, repoRoot, watcher) - err = fw.Start() - assert.NilError(t, err, "fw.Start") - - // Add a client - ch := make(chan Event, 1) - c := &testClient{ - notify: ch, - } - fw.AddClient(c) - expectedWatching := []turbopath.AbsoluteSystemPath{ - repoRoot, - repoRoot.UntypedJoin("parent"), - repoRoot.UntypedJoin("parent", "child"), - } - expectWatching(t, c, expectedWatching) - - // Delete parent folder during file watching - err = repoRoot.UntypedJoin("parent").RemoveAll() - assert.NilError(t, err, "RemoveAll") - - // Ensure we don't get any event when creating file in deleted directory - folder := repoRoot.UntypedJoin("parent", "child") - err = folder.MkdirAllMode(0755) - assert.NilError(t, err, "MkdirAll") - - expectFilesystemEvent(t, ch, Event{ - EventType: FileAdded, - Path: repoRoot.UntypedJoin("parent"), - }) - - expectFilesystemEvent(t, ch, Event{ - EventType: FileAdded, - Path: folder, - }) - - fooPath := folder.UntypedJoin("foo") - err = fooPath.WriteFile([]byte("hello"), 0644) - assert.NilError(t, err, "WriteFile") - - expectFilesystemEvent(t, ch, Event{ - EventType: FileAdded, - Path: folder.UntypedJoin("foo"), - }) - // We cannot guarantee no more events, windows sends multiple delete events -} - -// TestFileWatchingRootDeletion tests that when the root is deleted, -// we get a deleted event at the root. -// -// ✅ macOS -// ✅ Linux -// ❌ Windows - we do not get an event when the root is recreated L287 -func TestFileWatchingRootDeletion(t *testing.T) { - logger := hclog.Default() - logger.SetLevel(hclog.Debug) - repoRoot := fs.AbsoluteSystemPathFromUpstream(t.TempDir()) - err := repoRoot.UntypedJoin(".git").MkdirAll(0775) - assert.NilError(t, err, "MkdirAll") - err = repoRoot.UntypedJoin("node_modules", "some-dep").MkdirAll(0775) - assert.NilError(t, err, "MkdirAll") - err = repoRoot.UntypedJoin("parent", "child").MkdirAll(0775) - assert.NilError(t, err, "MkdirAll") - - // Directory layout: - // <repoRoot>/ - // .git/ - // node_modules/ - // some-dep/ - // parent/ - // child/ - - watcher, err := GetPlatformSpecificBackend(logger) - assert.NilError(t, err, "GetPlatformSpecificBackend") - fw := New(logger, repoRoot, watcher) - err = fw.Start() - assert.NilError(t, err, "fw.Start") - - // Add a client - ch := make(chan Event, 1) - c := &testClient{ - notify: ch, - } - fw.AddClient(c) - expectedWatching := []turbopath.AbsoluteSystemPath{ - repoRoot, - repoRoot.UntypedJoin("parent"), - repoRoot.UntypedJoin("parent", "child"), - } - expectWatching(t, c, expectedWatching) - - // Delete parent folder during file watching - err = repoRoot.RemoveAll() - assert.NilError(t, err, "RemoveAll") - - expectFilesystemEvent(t, ch, Event{ - EventType: FileDeleted, - Path: repoRoot, - }) -} - -// TestFileWatchingSubfolderRename tests that when a repo subfolder is renamed, -// file watching will continue, and a rename event will be sent. -// -// ✅ macOS -// ✅ Linux -// ❌ Windows - you cannot rename a watched folder (see https://github.com/fsnotify/fsnotify/issues/356) -func TestFileWatchingSubfolderRename(t *testing.T) { - if runtime.GOOS == "windows" { - return - } - logger := hclog.Default() - logger.SetLevel(hclog.Debug) - repoRoot := fs.AbsoluteSystemPathFromUpstream(t.TempDir()) - err := repoRoot.UntypedJoin(".git").MkdirAll(0775) - assert.NilError(t, err, "MkdirAll") - err = repoRoot.UntypedJoin("node_modules", "some-dep").MkdirAll(0775) - assert.NilError(t, err, "MkdirAll") - err = repoRoot.UntypedJoin("parent", "child").MkdirAll(0775) - assert.NilError(t, err, "MkdirAll") - - // Directory layout: - // <repoRoot>/ - // .git/ - // node_modules/ - // some-dep/ - // parent/ - // child/ - - watcher, err := GetPlatformSpecificBackend(logger) - assert.NilError(t, err, "GetPlatformSpecificBackend") - fw := New(logger, repoRoot, watcher) - err = fw.Start() - assert.NilError(t, err, "fw.Start") - - // Add a client - ch := make(chan Event, 1) - c := &testClient{ - notify: ch, - } - fw.AddClient(c) - expectedWatching := []turbopath.AbsoluteSystemPath{ - repoRoot, - repoRoot.UntypedJoin("parent"), - repoRoot.UntypedJoin("parent", "child"), - } - expectWatching(t, c, expectedWatching) - - // Rename parent folder during file watching - err = repoRoot.UntypedJoin("parent").Rename(repoRoot.UntypedJoin("new_parent")) - assert.NilError(t, err, "Rename") - expectFilesystemEvent(t, ch, Event{ - EventType: FileDeleted, - Path: repoRoot.UntypedJoin("parent"), - }) - expectFilesystemEvent(t, ch, Event{ - EventType: FileAdded, - Path: repoRoot.UntypedJoin("new_parent"), - }) - - // Ensure we get an event when creating a file in renamed directory - fooPath := repoRoot.UntypedJoin("new_parent", "child", "foo") - err = fooPath.WriteFile([]byte("hello"), 0644) - assert.NilError(t, err, "WriteFile") - expectFilesystemEvent(t, ch, Event{ - EventType: FileAdded, - Path: fooPath, - }) -} - -// TestFileWatchingRootRename tests that when the root is renamed, -// a delete event will be sent -// -// ✅ macOS -// ✅ Linux -// ❌ Windows - you cannot rename a watched folder (see https://github.com/fsnotify/fsnotify/issues/356) -func TestFileWatchingRootRename(t *testing.T) { - if runtime.GOOS == "windows" { - return - } - logger := hclog.Default() - logger.SetLevel(hclog.Debug) - oldRepoRoot := fs.AbsoluteSystemPathFromUpstream(t.TempDir()) - err := oldRepoRoot.UntypedJoin(".git").MkdirAll(0775) - assert.NilError(t, err, "MkdirAll") - err = oldRepoRoot.UntypedJoin("node_modules", "some-dep").MkdirAll(0775) - assert.NilError(t, err, "MkdirAll") - err = oldRepoRoot.UntypedJoin("parent", "child").MkdirAll(0775) - assert.NilError(t, err, "MkdirAll") - - // Directory layout: - // <oldRepoRoot>/ - // .git/ - // node_modules/ - // some-dep/ - // parent/ - // child/ - - watcher, err := GetPlatformSpecificBackend(logger) - assert.NilError(t, err, "GetPlatformSpecificBackend") - fw := New(logger, oldRepoRoot, watcher) - err = fw.Start() - assert.NilError(t, err, "fw.Start") - - // Add a client - ch := make(chan Event, 1) - c := &testClient{ - notify: ch, - } - fw.AddClient(c) - expectedWatching := []turbopath.AbsoluteSystemPath{ - oldRepoRoot, - oldRepoRoot.UntypedJoin("parent"), - oldRepoRoot.UntypedJoin("parent", "child"), - } - expectWatching(t, c, expectedWatching) - - // Rename root folder during file watching - newRepoRoot := oldRepoRoot.Dir().UntypedJoin("new_repo_root") - err = oldRepoRoot.Rename(newRepoRoot) - assert.NilError(t, err, "Rename") - - expectFilesystemEvent(t, ch, Event{ - EventType: FileDeleted, - Path: oldRepoRoot, - }) - // We got the root delete event, no guarantees about what happens after that -} - -// TestFileWatchSymlinkCreate tests that when a symlink is created, -// file watching will continue, and a file create event is sent. -// it also validates that new files in the symlinked directory will -// be watched, and raise events with the original path. -// -// ✅ macOS -// ✅ Linux -// ✅ Windows - requires admin permissions -func TestFileWatchSymlinkCreate(t *testing.T) { - logger := hclog.Default() - logger.SetLevel(hclog.Debug) - repoRoot := fs.AbsoluteSystemPathFromUpstream(t.TempDir()) - err := repoRoot.UntypedJoin(".git").MkdirAll(0775) - assert.NilError(t, err, "MkdirAll") - err = repoRoot.UntypedJoin("node_modules", "some-dep").MkdirAll(0775) - assert.NilError(t, err, "MkdirAll") - err = repoRoot.UntypedJoin("parent", "child").MkdirAll(0775) - assert.NilError(t, err, "MkdirAll") - - // Directory layout: - // <repoRoot>/ - // .git/ - // node_modules/ - // some-dep/ - // parent/ - // child/ - - watcher, err := GetPlatformSpecificBackend(logger) - assert.NilError(t, err, "GetPlatformSpecificBackend") - fw := New(logger, repoRoot, watcher) - err = fw.Start() - assert.NilError(t, err, "fw.Start") - - // Add a client - ch := make(chan Event, 1) - c := &testClient{ - notify: ch, - } - fw.AddClient(c) - expectedWatching := []turbopath.AbsoluteSystemPath{ - repoRoot, - repoRoot.UntypedJoin("parent"), - repoRoot.UntypedJoin("parent", "child"), - } - expectWatching(t, c, expectedWatching) - - // Create symlink during file watching - symlinkPath := repoRoot.UntypedJoin("symlink") - err = symlinkPath.Symlink(repoRoot.UntypedJoin("parent", "child").ToString()) - assert.NilError(t, err, "Symlink") - expectFilesystemEvent(t, ch, - Event{ - EventType: FileAdded, - Path: symlinkPath, - }, - ) - - // we expect that events in the symlinked directory will be raised with the original path - symlinkSubfile := symlinkPath.UntypedJoin("symlink_subfile") - err = symlinkSubfile.WriteFile([]byte("hello"), 0644) - assert.NilError(t, err, "WriteFile") - expectFilesystemEvent(t, ch, - Event{ - EventType: FileAdded, - Path: repoRoot.UntypedJoin("parent", "child", "symlink_subfile"), - }, - ) -} - -// TestFileWatchSymlinkDelete tests that when a symlink is deleted, -// file watching raises no events for the virtual path -// -// ✅ macOS -// ✅ Linux -// ✅ Windows - requires admin permissions -func TestFileWatchSymlinkDelete(t *testing.T) { - logger := hclog.Default() - logger.SetLevel(hclog.Debug) - repoRoot := fs.AbsoluteSystemPathFromUpstream(t.TempDir()) - err := repoRoot.UntypedJoin(".git").MkdirAll(0775) - assert.NilError(t, err, "MkdirAll") - err = repoRoot.UntypedJoin("node_modules", "some-dep").MkdirAll(0775) - assert.NilError(t, err, "MkdirAll") - err = repoRoot.UntypedJoin("parent", "child").MkdirAll(0775) - assert.NilError(t, err, "MkdirAll") - symlinkPath := repoRoot.UntypedJoin("symlink") - err = symlinkPath.Symlink(repoRoot.UntypedJoin("parent", "child").ToString()) - assert.NilError(t, err, "Symlink") - - // Directory layout: - // <repoRoot>/ - // .git/ - // node_modules/ - // some-dep/ - // parent/ - // child/ - // symlink -> parent/child - - watcher, err := GetPlatformSpecificBackend(logger) - assert.NilError(t, err, "GetPlatformSpecificBackend") - fw := New(logger, repoRoot, watcher) - err = fw.Start() - assert.NilError(t, err, "fw.Start") - - // Add a client - ch := make(chan Event, 1) - c := &testClient{ - notify: ch, - } - fw.AddClient(c) - expectedWatching := []turbopath.AbsoluteSystemPath{ - repoRoot, - repoRoot.UntypedJoin("parent"), - repoRoot.UntypedJoin("parent", "child"), - } - expectWatching(t, c, expectedWatching) - - // Delete symlink during file watching - err = symlinkPath.Remove() - assert.NilError(t, err, "Remove") - expectFilesystemEvent(t, ch, Event{ - EventType: FileDeleted, - Path: symlinkPath, - }) -} - -// TestFileWatchSymlinkRename tests that when a symlink is renamed, -// file watching raises a create event for the virtual path -// -// ✅ macOS -// ✅ Linux -// ❌ Windows - raises an event for creating the file -func TestFileWatchSymlinkRename(t *testing.T) { - logger := hclog.Default() - logger.SetLevel(hclog.Debug) - repoRoot := fs.AbsoluteSystemPathFromUpstream(t.TempDir()) - err := repoRoot.UntypedJoin(".git").MkdirAll(0775) - assert.NilError(t, err, "MkdirAll") - err = repoRoot.UntypedJoin("node_modules", "some-dep").MkdirAll(0775) - assert.NilError(t, err, "MkdirAll") - err = repoRoot.UntypedJoin("parent", "child").MkdirAll(0775) - assert.NilError(t, err, "MkdirAll") - symlinkPath := repoRoot.UntypedJoin("symlink") - err = symlinkPath.Symlink(repoRoot.UntypedJoin("parent", "child").ToString()) - assert.NilError(t, err, "Symlink") - - // Directory layout: - // <repoRoot>/ - // .git/ - // node_modules/ - // some-dep/ - // parent/ - // child/ - // symlink -> parent/child - - watcher, err := GetPlatformSpecificBackend(logger) - assert.NilError(t, err, "GetPlatformSpecificBackend") - fw := New(logger, repoRoot, watcher) - err = fw.Start() - assert.NilError(t, err, "fw.Start") - - // Add a client - ch := make(chan Event, 1) - c := &testClient{ - notify: ch, - } - fw.AddClient(c) - expectedWatching := []turbopath.AbsoluteSystemPath{ - repoRoot, - repoRoot.UntypedJoin("parent"), - repoRoot.UntypedJoin("parent", "child"), - } - expectWatching(t, c, expectedWatching) - - // Rename symlink during file watching - newSymlinkPath := repoRoot.UntypedJoin("new_symlink") - err = symlinkPath.Rename(newSymlinkPath) - assert.NilError(t, err, "Rename") - - expectFilesystemEvent(t, ch, Event{ - EventType: FileDeleted, - Path: symlinkPath, - }) - - expectFilesystemEvent(t, ch, Event{ - EventType: FileAdded, - Path: newSymlinkPath, - }) - -} - -// TestFileWatchRootParentRename tests that when the parent directory of the root is renamed, -// file watching stops reporting events -// -// additionally, renmaing the root parent directory back to its original name should cause file watching -// to start reporting events again -// -// ✅ macOS -// ✅ Linux -// ❌ Windows -func TestFileWatchRootParentRename(t *testing.T) { - if runtime.GOOS == "windows" { - return - } - logger := hclog.Default() - logger.SetLevel(hclog.Debug) - - parent := fs.AbsoluteSystemPathFromUpstream(t.TempDir()) - repoRoot := parent.UntypedJoin("repo") - err := repoRoot.UntypedJoin(".git").MkdirAll(0775) - assert.NilError(t, err, "MkdirAll") - - // Directory layout: - // <parent>/ - // repo/ - // .git/ - - watcher, err := GetPlatformSpecificBackend(logger) - assert.NilError(t, err, "GetPlatformSpecificBackend") - fw := New(logger, repoRoot, watcher) - err = fw.Start() - assert.NilError(t, err, "fw.Start") - - // Add a client - ch := make(chan Event, 1) - c := &testClient{ - notify: ch, - } - fw.AddClient(c) - expectedWatching := []turbopath.AbsoluteSystemPath{ - repoRoot, - } - expectWatching(t, c, expectedWatching) - - // Rename parent directory during file watching - newRepoRoot := parent.UntypedJoin("new_repo") - err = repoRoot.Rename(newRepoRoot) - assert.NilError(t, err, "Rename") - expectFilesystemEvent(t, ch, Event{ - EventType: FileDeleted, - Path: repoRoot, - }) - // We got the root delete event, no guarantees about what happens after that -} - -// TestFileWatchRootParentDelete tests that when the parent directory of the root is deleted -// -// ✅ macOS -// ✅ Linux -// ❌ Windows - L721 no create event is emitted -func TestFileWatchRootParentDelete(t *testing.T) { - logger := hclog.Default() - logger.SetLevel(hclog.Debug) - - parent := fs.AbsoluteSystemPathFromUpstream(t.TempDir()) - repoRoot := parent.UntypedJoin("repo") - err := repoRoot.UntypedJoin(".git").MkdirAll(0775) - assert.NilError(t, err, "MkdirAll") - - // Directory layout: - // <parent>/ - // repo/ - // .git/ - - watcher, err := GetPlatformSpecificBackend(logger) - assert.NilError(t, err, "GetPlatformSpecificBackend") - fw := New(logger, repoRoot, watcher) - err = fw.Start() - assert.NilError(t, err, "fw.Start") - - // Add a client - ch := make(chan Event, 1) - c := &testClient{ - notify: ch, - } - fw.AddClient(c) - expectedWatching := []turbopath.AbsoluteSystemPath{ - repoRoot, - } - expectWatching(t, c, expectedWatching) - - // Delete parent directory during file watching - err = parent.RemoveAll() - assert.NilError(t, err, "RemoveAll") - expectFilesystemEvent(t, ch, Event{ - EventType: FileDeleted, - Path: repoRoot, - }) - // We got the root delete event, no guarantees about what happens after that -} diff --git a/cli/internal/fs/copy_file.go b/cli/internal/fs/copy_file.go deleted file mode 100644 index 9b9617659035c..0000000000000 --- a/cli/internal/fs/copy_file.go +++ /dev/null @@ -1,52 +0,0 @@ -// Adapted from https://github.com/thought-machine/please -// Copyright Thought Machine, Inc. or its affiliates. All Rights Reserved. -// SPDX-License-Identifier: Apache-2.0 -package fs - -import ( - "errors" - "os" - - "github.com/karrick/godirwalk" -) - -// Walk implements an equivalent to filepath.Walk. -// It's implemented over github.com/karrick/godirwalk but the provided interface doesn't use that -// to make it a little easier to handle. -func Walk(rootPath string, callback func(name string, isDir bool) error) error { - return WalkMode(rootPath, func(name string, isDir bool, mode os.FileMode) error { - return callback(name, isDir) - }) -} - -// WalkMode is like Walk but the callback receives an additional type specifying the file mode type. -// N.B. This only includes the bits of the mode that determine the mode type, not the permissions. -func WalkMode(rootPath string, callback func(name string, isDir bool, mode os.FileMode) error) error { - return godirwalk.Walk(rootPath, &godirwalk.Options{ - Callback: func(name string, info *godirwalk.Dirent) error { - // currently we support symlinked files, but not symlinked directories: - // For copying, we Mkdir and bail if we encounter a symlink to a directoy - // For finding packages, we enumerate the symlink, but don't follow inside - isDir, err := info.IsDirOrSymlinkToDir() - if err != nil { - pathErr := &os.PathError{} - if errors.As(err, &pathErr) { - // If we have a broken link, skip this entry - return godirwalk.SkipThis - } - return err - } - return callback(name, isDir, info.ModeType()) - }, - ErrorCallback: func(pathname string, err error) godirwalk.ErrorAction { - pathErr := &os.PathError{} - if errors.As(err, &pathErr) { - return godirwalk.SkipNode - } - return godirwalk.Halt - }, - Unsorted: true, - AllowNonDirectory: true, - FollowSymbolicLinks: false, - }) -} diff --git a/cli/internal/fs/copy_file_test.go b/cli/internal/fs/copy_file_test.go deleted file mode 100644 index 7e36af17d47a6..0000000000000 --- a/cli/internal/fs/copy_file_test.go +++ /dev/null @@ -1,99 +0,0 @@ -package fs - -import ( - "errors" - "os" - "testing" - - "github.com/vercel/turbo/cli/internal/turbopath" - "gotest.tools/v3/assert" -) - -func TestCopyFile(t *testing.T) { - srcTmpDir := turbopath.AbsoluteSystemPath(t.TempDir()) - destTmpDir := turbopath.AbsoluteSystemPath(t.TempDir()) - srcFilePath := srcTmpDir.UntypedJoin("src") - destFilePath := destTmpDir.UntypedJoin("dest") - from := &LstatCachedFile{Path: srcFilePath} - - // The src file doesn't exist, will error. - err := CopyFile(from, destFilePath.ToString()) - pathErr := &os.PathError{} - if !errors.As(err, &pathErr) { - t.Errorf("got %v, want PathError", err) - } - - // Create the src file. - srcFile, err := srcFilePath.Create() - assert.NilError(t, err, "Create") - _, err = srcFile.WriteString("src") - assert.NilError(t, err, "WriteString") - assert.NilError(t, srcFile.Close(), "Close") - - // Copy the src to the dest. - err = CopyFile(from, destFilePath.ToString()) - assert.NilError(t, err, "src exists dest does not, should not error.") - - // Now test for symlinks. - symlinkSrcDir := turbopath.AbsoluteSystemPath(t.TempDir()) - symlinkTargetDir := turbopath.AbsoluteSystemPath(t.TempDir()) - symlinkDestDir := turbopath.AbsoluteSystemPath(t.TempDir()) - symlinkSrcPath := symlinkSrcDir.UntypedJoin("symlink") - symlinkTargetPath := symlinkTargetDir.UntypedJoin("target") - symlinkDestPath := symlinkDestDir.UntypedJoin("dest") - fromSymlink := &LstatCachedFile{Path: symlinkSrcPath} - - // Create the symlink target. - symlinkTargetFile, err := symlinkTargetPath.Create() - assert.NilError(t, err, "Create") - _, err = symlinkTargetFile.WriteString("Target") - assert.NilError(t, err, "WriteString") - assert.NilError(t, symlinkTargetFile.Close(), "Close") - - // Link things up. - err = symlinkSrcPath.Symlink(symlinkTargetPath.ToString()) - assert.NilError(t, err, "Symlink") - - // Run the test. - err = CopyFile(fromSymlink, symlinkDestPath.ToString()) - assert.NilError(t, err, "Copying a valid symlink does not error.") - - // Break the symlink. - err = symlinkTargetPath.Remove() - assert.NilError(t, err, "breaking the symlink") - - // Remove the existing copy. - err = symlinkDestPath.Remove() - assert.NilError(t, err, "existing copy is removed") - - // Try copying the now-broken symlink. - err = CopyFile(fromSymlink, symlinkDestPath.ToString()) - assert.NilError(t, err, "CopyFile") - - // Confirm that it copied - target, err := symlinkDestPath.Readlink() - assert.NilError(t, err, "Readlink") - assert.Equal(t, target, symlinkTargetPath.ToString()) -} - -func TestCopyOrLinkFileWithPerms(t *testing.T) { - // Directory layout: - // - // <src>/ - // foo - readonlyMode := os.FileMode(0444) - srcDir := turbopath.AbsoluteSystemPath(t.TempDir()) - dstDir := turbopath.AbsoluteSystemPath(t.TempDir()) - srcFilePath := srcDir.UntypedJoin("src") - dstFilePath := dstDir.UntypedJoin("dst") - srcFile, err := srcFilePath.Create() - defer func() { _ = srcFile.Close() }() - assert.NilError(t, err, "Create") - err = srcFile.Chmod(readonlyMode) - assert.NilError(t, err, "Chmod") - err = CopyFile(&LstatCachedFile{Path: srcFilePath}, dstFilePath.ToStringDuringMigration()) - assert.NilError(t, err, "CopyOrLinkFile") - info, err := dstFilePath.Lstat() - assert.NilError(t, err, "Lstat") - assert.Equal(t, info.Mode(), readonlyMode, "expected dest to have matching permissions") -} diff --git a/cli/internal/fs/fs.go b/cli/internal/fs/fs.go deleted file mode 100644 index 77804c0708010..0000000000000 --- a/cli/internal/fs/fs.go +++ /dev/null @@ -1,191 +0,0 @@ -package fs - -import ( - "io" - "io/ioutil" - "log" - "os" - "path/filepath" - "runtime" - "strings" - - "github.com/pkg/errors" - "github.com/vercel/turbo/cli/internal/util" -) - -// https://github.com/thought-machine/please/blob/master/src/fs/fs.go - -// DirPermissions are the default permission bits we apply to directories. -const DirPermissions = os.ModeDir | 0775 - -// EnsureDir ensures that the directory of the given file has been created. -func EnsureDir(filename string) error { - dir := filepath.Dir(filename) - err := os.MkdirAll(dir, DirPermissions) - if err != nil && FileExists(dir) { - // It looks like this is a file and not a directory. Attempt to remove it; this can - // happen in some cases if you change a rule from outputting a file to a directory. - log.Printf("Attempting to remove file %s; a subdirectory is required", dir) - if err2 := os.Remove(dir); err2 == nil { - err = os.MkdirAll(dir, DirPermissions) - } else { - return err - } - } - return err -} - -var nonRelativeSentinel string = ".." + string(filepath.Separator) - -// DirContainsPath returns true if the path 'target' is contained within 'dir' -// Expects both paths to be absolute and does not verify that either path exists. -func DirContainsPath(dir string, target string) (bool, error) { - // On windows, trying to get a relative path between files on different volumes - // is an error. We don't care about the error, it's good enough for us to say - // that one path doesn't contain the other if they're on different volumes. - if runtime.GOOS == "windows" && filepath.VolumeName(dir) != filepath.VolumeName(target) { - return false, nil - } - // In Go, filepath.Rel can return a path that starts with "../" or equivalent. - // Checking filesystem-level contains can get extremely complicated - // (see https://github.com/golang/dep/blob/f13583b555deaa6742f141a9c1185af947720d60/internal/fs/fs.go#L33) - // As a compromise, rely on the stdlib to generate a relative path and then check - // if the first step is "../". - rel, err := filepath.Rel(dir, target) - if err != nil { - return false, err - } - return !strings.HasPrefix(rel, nonRelativeSentinel), nil -} - -// PathExists returns true if the given path exists, as a file or a directory. -func PathExists(filename string) bool { - _, err := os.Lstat(filename) - return err == nil -} - -// FileExists returns true if the given path exists and is a file. -func FileExists(filename string) bool { - info, err := os.Lstat(filename) - return err == nil && !info.IsDir() -} - -// CopyFile copies a file from 'from' to 'to', with an attempt to perform a copy & rename -// to avoid chaos if anything goes wrong partway. -func CopyFile(from *LstatCachedFile, to string) error { - fromMode, err := from.GetMode() - if err != nil { - return errors.Wrapf(err, "getting mode for %v", from.Path) - } - if fromMode&os.ModeSymlink != 0 { - target, err := from.Path.Readlink() - if err != nil { - return errors.Wrapf(err, "reading link target for %v", from.Path) - } - if err := EnsureDir(to); err != nil { - return err - } - if _, err := os.Lstat(to); err == nil { - // target link file exist, should remove it first - err := os.Remove(to) - if err != nil { - return err - } - } - return os.Symlink(target, to) - } - fromFile, err := from.Path.Open() - if err != nil { - return err - } - defer util.CloseAndIgnoreError(fromFile) - return writeFileFromStream(fromFile, to, fromMode) -} - -// writeFileFromStream writes data from a reader to the file named 'to', with an attempt to perform -// a copy & rename to avoid chaos if anything goes wrong partway. -func writeFileFromStream(fromFile io.Reader, to string, mode os.FileMode) error { - dir, file := filepath.Split(to) - if dir != "" { - if err := os.MkdirAll(dir, DirPermissions); err != nil { - return err - } - } - tempFile, err := ioutil.TempFile(dir, file) - if err != nil { - return err - } - if _, err := io.Copy(tempFile, fromFile); err != nil { - return err - } - if err := tempFile.Close(); err != nil { - return err - } - // OK, now file is written; adjust permissions appropriately. - if mode == 0 { - mode = 0664 - } - if err := os.Chmod(tempFile.Name(), mode); err != nil { - return err - } - // And move it to its final destination. - return renameFile(tempFile.Name(), to) -} - -// IsDirectory checks if a given path is a directory -func IsDirectory(path string) bool { - info, err := os.Stat(path) - return err == nil && info.IsDir() -} - -// Try to gracefully rename the file as the os.Rename does not work across -// filesystems and on most Linux systems /tmp is mounted as tmpfs -func renameFile(from, to string) (err error) { - err = os.Rename(from, to) - if err == nil { - return nil - } - err = copyFile(from, to) - if err != nil { - return err - } - err = os.RemoveAll(from) - if err != nil { - return err - } - return nil -} - -func copyFile(from, to string) (err error) { - in, err := os.Open(from) - if err != nil { - return err - } - defer in.Close() - - out, err := os.Create(to) - if err != nil { - return err - } - defer func() { - if e := out.Close(); e != nil { - err = e - } - }() - - _, err = io.Copy(out, in) - if err != nil { - return err - } - - si, err := os.Stat(from) - if err != nil { - return err - } - err = os.Chmod(to, si.Mode()) - if err != nil { - return err - } - - return nil -} diff --git a/cli/internal/fs/fs_test.go b/cli/internal/fs/fs_test.go deleted file mode 100644 index 0598d430d1401..0000000000000 --- a/cli/internal/fs/fs_test.go +++ /dev/null @@ -1,60 +0,0 @@ -package fs - -import ( - "path/filepath" - "testing" -) - -func Test_DirContainsPath(t *testing.T) { - parent, err := filepath.Abs(filepath.Join("some", "path")) - if err != nil { - t.Fatalf("failed to construct parent path %v", err) - } - testcases := []struct { - target []string - want bool - }{ - { - []string{"..", "elsewhere"}, - false, - }, - { - []string{"sibling"}, - false, - }, - { - // The same path as parent - []string{"some", "path"}, - true, - }, - { - []string{"some", "path", "..", "path", "inside", "parent"}, - true, - }, - { - []string{"some", "path", "inside", "..", "inside", "parent"}, - true, - }, - { - []string{"some", "path", "inside", "..", "..", "outside", "parent"}, - false, - }, - { - []string{"some", "pathprefix"}, - false, - }, - } - for _, tc := range testcases { - target, err := filepath.Abs(filepath.Join(tc.target...)) - if err != nil { - t.Fatalf("failed to construct path for %v: %v", tc.target, err) - } - got, err := DirContainsPath(parent, target) - if err != nil { - t.Fatalf("failed to check ") - } - if got != tc.want { - t.Errorf("DirContainsPath(%v, %v) got %v, want %v", parent, target, got, tc.want) - } - } -} diff --git a/cli/internal/fs/fs_windows_test.go b/cli/internal/fs/fs_windows_test.go deleted file mode 100644 index 4e71e2c816b61..0000000000000 --- a/cli/internal/fs/fs_windows_test.go +++ /dev/null @@ -1,18 +0,0 @@ -//go:build windows -// +build windows - -package fs - -import "testing" - -func TestDifferentVolumes(t *testing.T) { - p1 := "C:\\some\\path" - p2 := "D:\\other\\path" - contains, err := DirContainsPath(p1, p2) - if err != nil { - t.Errorf("DirContainsPath got error %v, want <nil>", err) - } - if contains { - t.Errorf("DirContainsPath got true, want false") - } -} diff --git a/cli/internal/fs/get_turbo_data_dir_go.go b/cli/internal/fs/get_turbo_data_dir_go.go deleted file mode 100644 index 2cf459aa1810e..0000000000000 --- a/cli/internal/fs/get_turbo_data_dir_go.go +++ /dev/null @@ -1,16 +0,0 @@ -//go:build go || !rust -// +build go !rust - -package fs - -import ( - "github.com/adrg/xdg" - "github.com/vercel/turbo/cli/internal/turbopath" -) - -// GetTurboDataDir returns a directory outside of the repo -// where turbo can store data files related to turbo. -func GetTurboDataDir() turbopath.AbsoluteSystemPath { - dataHome := AbsoluteSystemPathFromUpstream(xdg.DataHome) - return dataHome.UntypedJoin("turborepo") -} diff --git a/cli/internal/fs/get_turbo_data_dir_rust.go b/cli/internal/fs/get_turbo_data_dir_rust.go deleted file mode 100644 index dbc80f3eefad1..0000000000000 --- a/cli/internal/fs/get_turbo_data_dir_rust.go +++ /dev/null @@ -1,16 +0,0 @@ -//go:build rust -// +build rust - -package fs - -import ( - "github.com/vercel/turbo/cli/internal/ffi" - "github.com/vercel/turbo/cli/internal/turbopath" -) - -// GetTurboDataDir returns a directory outside of the repo -// where turbo can store data files related to turbo. -func GetTurboDataDir() turbopath.AbsoluteSystemPath { - dir := ffi.GetTurboDataDir() - return turbopath.AbsoluteSystemPathFromUpstream(dir) -} diff --git a/cli/internal/fs/hash.go b/cli/internal/fs/hash.go deleted file mode 100644 index 79905aae6b2c5..0000000000000 --- a/cli/internal/fs/hash.go +++ /dev/null @@ -1,78 +0,0 @@ -package fs - -import ( - "crypto/sha1" - "encoding/hex" - "fmt" - "io" - "strconv" - - "github.com/vercel/turbo/cli/internal/fs/hash" - - "github.com/vercel/turbo/cli/internal/lockfile" - "github.com/vercel/turbo/cli/internal/turbopath" - "github.com/vercel/turbo/cli/internal/xxhash" -) - -// LockfilePackages is a hashable list of packages -type LockfilePackages []lockfile.Package - -// FileHashes is a hashable map of files to the hash of their contents -type FileHashes map[turbopath.AnchoredUnixPath]string - -// HashLockfilePackages hashes a list of packages -func HashLockfilePackages(packages LockfilePackages) (string, error) { - return hash.HashLockfilePackages(packages) -} - -// HashFileHashes produces a single hash for a set of file hashes -func HashFileHashes(hashes FileHashes) (string, error) { - return hash.HashFileHashes(hashes) -} - -// HashTask produces the hash for a particular task -func HashTask(task *hash.TaskHashable) (string, error) { - return hash.HashTaskHashable(task) -} - -// HashGlobal produces the global hash value to be incorporated in every task hash -func HashGlobal(global hash.GlobalHashable) (string, error) { - return hash.HashGlobalHashable(&global) -} - -// hashObject is the internal generic hash function. It should not be used directly, -// but instead via a helper above to ensure that we are properly enumerating all of the -// the kinds of data that we hash. -func hashObject(i interface{}) (string, error) { - hash := xxhash.New() - - _, err := hash.Write([]byte(fmt.Sprintf("%v", i))) - - return hex.EncodeToString(hash.Sum(nil)), err -} - -// GitLikeHashFile is a function that mimics how Git -// calculates the SHA1 for a file (or, in Git terms, a "blob") (without git) -func GitLikeHashFile(filePath turbopath.AbsoluteSystemPath) (string, error) { - file, err := filePath.Open() - if err != nil { - return "", err - } - defer file.Close() - - stat, err := file.Stat() - if err != nil { - return "", err - } - hash := sha1.New() - hash.Write([]byte("blob")) - hash.Write([]byte(" ")) - hash.Write([]byte(strconv.FormatInt(stat.Size(), 10))) - hash.Write([]byte{0}) - - if _, err := io.Copy(hash, file); err != nil { - return "", err - } - - return hex.EncodeToString(hash.Sum(nil)), nil -} diff --git a/cli/internal/fs/hash/capnp.go b/cli/internal/fs/hash/capnp.go deleted file mode 100644 index ab105b15cb0d2..0000000000000 --- a/cli/internal/fs/hash/capnp.go +++ /dev/null @@ -1,461 +0,0 @@ -// Package hash contains the capnp schema and hashing functions for the turbo cache -// -// it depends on the generated capnp schema in ./capnp. to regenerate the schema, -// you need the capnp binary as well as capnpc-go available in your path. then run: -// -// capnp compile -I std -ogo proto.capnp -// -// in crates/turborepo-lib/src/hash or run `make turbo-capnp` in the `cli` directory. -package hash - -import ( - "encoding/hex" - "sort" - - capnp "capnproto.org/go/capnp/v3" - "github.com/vercel/turbo/cli/internal/env" - turbo_capnp "github.com/vercel/turbo/cli/internal/fs/hash/capnp" - "github.com/vercel/turbo/cli/internal/lockfile" - "github.com/vercel/turbo/cli/internal/turbopath" - "github.com/vercel/turbo/cli/internal/util" - "github.com/vercel/turbo/cli/internal/xxhash" -) - -// TaskHashable is a hashable representation of a task to be run -type TaskHashable struct { - GlobalHash string - TaskDependencyHashes []string - HashOfFiles string - ExternalDepsHash string - - PackageDir turbopath.AnchoredUnixPath - Task string - Outputs TaskOutputs - PassThruArgs []string - - Env []string - ResolvedEnvVars env.EnvironmentVariablePairs - PassThroughEnv []string - EnvMode util.EnvMode - DotEnv turbopath.AnchoredUnixPathArray -} - -// GlobalHashable is a hashable representation of global dependencies for tasks -type GlobalHashable struct { - GlobalCacheKey string - GlobalFileHashMap map[turbopath.AnchoredUnixPath]string - RootExternalDepsHash string - Env []string - ResolvedEnvVars env.EnvironmentVariablePairs - PassThroughEnv []string - EnvMode util.EnvMode - FrameworkInference bool - - // NOTE! This field is _explicitly_ ordered and should not be sorted. - DotEnv turbopath.AnchoredUnixPathArray -} - -// TaskOutputs represents the patterns for including and excluding files from outputs -type TaskOutputs struct { - Inclusions []string - Exclusions []string -} - -// Sort contents of task outputs -func (to *TaskOutputs) Sort() { - sort.Strings(to.Inclusions) - sort.Strings(to.Exclusions) -} - -// HashTaskHashable performs the hash for a TaskHashable, using capnproto for stable cross platform / language hashing -// -// NOTE: This function is _explicitly_ ordered and should not be sorted. -// -// Order is important for the hash, and is as follows: -// - GlobalHash -// - PackageDir -// - HashOfFiles -// - ExternalDepsHash -// - Task -// - EnvMode -// - Outputs -// - TaskDependencyHashes -// - PassThruArgs -// - Env -// - PassThroughEnv -// - DotEnv -// - ResolvedEnvVars -func HashTaskHashable(task *TaskHashable) (string, error) { - arena := capnp.SingleSegment(nil) - - _, seg, err := capnp.NewMessage(arena) - if err != nil { - return "", err - } - - taskMsg, err := turbo_capnp.NewRootTaskHashable(seg) - if err != nil { - return "", err - } - - err = taskMsg.SetGlobalHash(task.GlobalHash) - if err != nil { - return "", err - } - - err = taskMsg.SetPackageDir(task.PackageDir.ToString()) - if err != nil { - return "", err - } - - err = taskMsg.SetHashOfFiles(task.HashOfFiles) - if err != nil { - return "", err - } - - err = taskMsg.SetExternalDepsHash(task.ExternalDepsHash) - if err != nil { - return "", err - } - - err = taskMsg.SetTask(task.Task) - if err != nil { - return "", err - } - - { - var envMode turbo_capnp.TaskHashable_EnvMode - switch task.EnvMode { - case util.Infer: - panic("task inferred status should have already been resolved") - case util.Loose: - envMode = turbo_capnp.TaskHashable_EnvMode_loose - case util.Strict: - envMode = turbo_capnp.TaskHashable_EnvMode_strict - } - - taskMsg.SetEnvMode(envMode) - } - - { - deps, err := taskMsg.NewOutputs() - if err != nil { - return "", err - } - - err = assignList(task.Outputs.Inclusions, deps.SetInclusions, seg) - if err != nil { - return "", err - } - - err = assignList(task.Outputs.Exclusions, deps.SetExclusions, seg) - if err != nil { - return "", err - } - - err = taskMsg.SetOutputs(deps) - if err != nil { - return "", err - } - } - - err = assignList(task.TaskDependencyHashes, taskMsg.SetTaskDependencyHashes, seg) - if err != nil { - return "", err - } - - err = assignList(task.PassThruArgs, taskMsg.SetPassThruArgs, seg) - if err != nil { - return "", err - } - - err = assignList(task.Env, taskMsg.SetEnv, seg) - if err != nil { - return "", err - } - - err = assignList(task.PassThroughEnv, taskMsg.SetPassThruEnv, seg) - if err != nil { - return "", err - } - - err = assignAnchoredUnixArray(task.DotEnv, taskMsg.SetDotEnv, seg) - if err != nil { - return "", err - } - - err = assignList(task.ResolvedEnvVars, taskMsg.SetResolvedEnvVars, seg) - if err != nil { - return "", err - } - - return HashMessage(taskMsg.Message()) -} - -// HashGlobalHashable performs the hash for a GlobalHashable, using capnproto for stable cross platform / language hashing -// -// NOTE: This function is _explicitly_ ordered and should not be sorted. -// -// Order is important for the hash, and is as follows: -// - GlobalCacheKey -// - GlobalFileHashMap -// - RootExternalDepsHash -// - Env -// - ResolvedEnvVars -// - PassThroughEnv -// - EnvMode -// - FrameworkInference -// - DotEnv -func HashGlobalHashable(global *GlobalHashable) (string, error) { - arena := capnp.SingleSegment(nil) - - _, seg, err := capnp.NewMessage(arena) - if err != nil { - return "", err - } - - globalMsg, err := turbo_capnp.NewRootGlobalHashable(seg) - if err != nil { - return "", err - } - - err = globalMsg.SetGlobalCacheKey(global.GlobalCacheKey) - if err != nil { - return "", err - } - - { - entries, err := globalMsg.NewGlobalFileHashMap(int32(len(global.GlobalFileHashMap))) - if err != nil { - return "", err - } - - err = assignSortedHashMap(global.GlobalFileHashMap, func(i int, key string, value string) error { - entry := entries.At(i) - - err = entry.SetKey(key) - if err != nil { - return err - } - - err = entry.SetValue(value) - if err != nil { - return err - } - - return nil - }) - if err != nil { - return "", err - } - } - - err = globalMsg.SetRootExternalDepsHash(global.RootExternalDepsHash) - if err != nil { - return "", err - } - - err = assignList(global.Env, globalMsg.SetEnv, seg) - if err != nil { - return "", err - } - - err = assignList(global.ResolvedEnvVars, globalMsg.SetResolvedEnvVars, seg) - if err != nil { - return "", err - } - - err = assignList(global.PassThroughEnv, globalMsg.SetPassThroughEnv, seg) - if err != nil { - return "", err - } - - { - var envMode turbo_capnp.GlobalHashable_EnvMode - switch global.EnvMode { - case util.Infer: - envMode = turbo_capnp.GlobalHashable_EnvMode_infer - case util.Loose: - envMode = turbo_capnp.GlobalHashable_EnvMode_loose - case util.Strict: - envMode = turbo_capnp.GlobalHashable_EnvMode_strict - } - - globalMsg.SetEnvMode(envMode) - } - - globalMsg.SetFrameworkInference(global.FrameworkInference) - - err = assignAnchoredUnixArray(global.DotEnv, globalMsg.SetDotEnv, seg) - if err != nil { - return "", err - } - - return HashMessage(globalMsg.Message()) -} - -// HashLockfilePackages hashes lockfile packages -func HashLockfilePackages(packages []lockfile.Package) (string, error) { - arena := capnp.SingleSegment(nil) - - _, seg, err := capnp.NewMessage(arena) - if err != nil { - return "", err - } - - globalMsg, err := turbo_capnp.NewRootLockFilePackages(seg) - if err != nil { - return "", err - } - - entries, err := globalMsg.NewPackages(int32(len(packages))) - if err != nil { - return "", err - } - for i, pkg := range packages { - entry := entries.At(i) - - err = entry.SetKey(pkg.Key) - if err != nil { - return "", err - } - - // We explicitly write Version to match Rust behavior when writing empty strings - // The Go library will emit a null pointer if the string is empty instead - // of a zero length list. - err = capnp.Struct(entry).SetNewText(1, pkg.Version) - if err != nil { - return "", err - } - - entry.SetFound(pkg.Found) - } - - return HashMessage(globalMsg.Message()) -} - -// HashFileHashes hashes files -func HashFileHashes(fileHashes map[turbopath.AnchoredUnixPath]string) (string, error) { - arena := capnp.SingleSegment(nil) - - _, seg, err := capnp.NewMessage(arena) - if err != nil { - return "", err - } - - globalMsg, err := turbo_capnp.NewRootFileHashes(seg) - if err != nil { - return "", err - } - - { - entries, err := globalMsg.NewFileHashes(int32(len(fileHashes))) - if err != nil { - return "", err - } - - err = assignSortedHashMap(fileHashes, func(i int, key string, value string) error { - entry := entries.At(i) - - err = entry.SetKey(key) - if err != nil { - return err - } - - err = entry.SetValue(value) - if err != nil { - return err - } - - return nil - }) - if err != nil { - return "", err - } - } - - return HashMessage(globalMsg.Message()) -} - -// HashMessage hashes a capnp message using xxhash -func HashMessage(msg *capnp.Message) (string, error) { - root, err := msg.Root() - if err != nil { - return "", err - } - - bytes, err := capnp.Canonicalize(root.Struct()) - if err != nil { - return "", err - } - - // _ = turbopath.AbsoluteSystemPath(".turbo/go-hash").WriteFile(bytes, 0644) - - digest := xxhash.New() - _, err = digest.Write(bytes) - if err != nil { - return "", err - } - - out := digest.Sum(nil) - - return hex.EncodeToString(out), nil -} - -// assignSortedHashMap gets a list of key value pairs and then sort them by key -// to do this we need three lists, one for the keys, one for the string representation of the keys, -// and one for the indices of the keys -func assignSortedHashMap(packages map[turbopath.AnchoredUnixPath]string, setEntry func(int, string, string) error) error { - keys := make([]turbopath.AnchoredUnixPath, len(packages)) - keyStrs := make([]string, len(packages)) - keyIndices := make([]int, len(packages)) - - i := 0 - for k := range packages { - keys[i] = k - keyStrs[i] = k.ToString() - keyIndices[i] = i - i++ - } - - sort.Slice(keyIndices, func(i, j int) bool { - return keyStrs[keyIndices[i]] < keyStrs[keyIndices[j]] - }) - - for i, idx := range keyIndices { - err := setEntry(i, keyStrs[idx], packages[keys[idx]]) - if err != nil { - return err - } - } - - return nil -} - -func assignList(list []string, fn func(capnp.TextList) error, seg *capnp.Segment) error { - textList, err := capnp.NewTextList(seg, int32(len(list))) - if err != nil { - return err - } - for i, v := range list { - err = textList.Set(i, v) - if err != nil { - return err - } - } - return fn(textList) -} - -func assignAnchoredUnixArray(paths turbopath.AnchoredUnixPathArray, fn func(capnp.TextList) error, seg *capnp.Segment) error { - textList, err := capnp.NewTextList(seg, int32(len(paths))) - if err != nil { - return err - } - for i, v := range paths { - err = textList.Set(i, v.ToString()) - if err != nil { - return err - } - } - return fn(textList) -} diff --git a/cli/internal/fs/hash/capnp/proto.capnp.go b/cli/internal/fs/hash/capnp/proto.capnp.go deleted file mode 100644 index 33dbd555208c3..0000000000000 --- a/cli/internal/fs/hash/capnp/proto.capnp.go +++ /dev/null @@ -1,1357 +0,0 @@ -// Code generated by capnpc-go. DO NOT EDIT. - -package capnp - -import ( - capnp "capnproto.org/go/capnp/v3" - text "capnproto.org/go/capnp/v3/encoding/text" - schemas "capnproto.org/go/capnp/v3/schemas" -) - -type TaskHashable capnp.Struct - -// TaskHashable_TypeID is the unique identifier for the type TaskHashable. -const TaskHashable_TypeID = 0xe1f09ceb4ef5e479 - -func NewTaskHashable(s *capnp.Segment) (TaskHashable, error) { - st, err := capnp.NewStruct(s, capnp.ObjectSize{DataSize: 8, PointerCount: 12}) - return TaskHashable(st), err -} - -func NewRootTaskHashable(s *capnp.Segment) (TaskHashable, error) { - st, err := capnp.NewRootStruct(s, capnp.ObjectSize{DataSize: 8, PointerCount: 12}) - return TaskHashable(st), err -} - -func ReadRootTaskHashable(msg *capnp.Message) (TaskHashable, error) { - root, err := msg.Root() - return TaskHashable(root.Struct()), err -} - -func (s TaskHashable) String() string { - str, _ := text.Marshal(0xe1f09ceb4ef5e479, capnp.Struct(s)) - return str -} - -func (s TaskHashable) EncodeAsPtr(seg *capnp.Segment) capnp.Ptr { - return capnp.Struct(s).EncodeAsPtr(seg) -} - -func (TaskHashable) DecodeFromPtr(p capnp.Ptr) TaskHashable { - return TaskHashable(capnp.Struct{}.DecodeFromPtr(p)) -} - -func (s TaskHashable) ToPtr() capnp.Ptr { - return capnp.Struct(s).ToPtr() -} -func (s TaskHashable) IsValid() bool { - return capnp.Struct(s).IsValid() -} - -func (s TaskHashable) Message() *capnp.Message { - return capnp.Struct(s).Message() -} - -func (s TaskHashable) Segment() *capnp.Segment { - return capnp.Struct(s).Segment() -} -func (s TaskHashable) GlobalHash() (string, error) { - p, err := capnp.Struct(s).Ptr(0) - return p.Text(), err -} - -func (s TaskHashable) HasGlobalHash() bool { - return capnp.Struct(s).HasPtr(0) -} - -func (s TaskHashable) GlobalHashBytes() ([]byte, error) { - p, err := capnp.Struct(s).Ptr(0) - return p.TextBytes(), err -} - -func (s TaskHashable) SetGlobalHash(v string) error { - return capnp.Struct(s).SetText(0, v) -} - -func (s TaskHashable) TaskDependencyHashes() (capnp.TextList, error) { - p, err := capnp.Struct(s).Ptr(1) - return capnp.TextList(p.List()), err -} - -func (s TaskHashable) HasTaskDependencyHashes() bool { - return capnp.Struct(s).HasPtr(1) -} - -func (s TaskHashable) SetTaskDependencyHashes(v capnp.TextList) error { - return capnp.Struct(s).SetPtr(1, v.ToPtr()) -} - -// NewTaskDependencyHashes sets the taskDependencyHashes field to a newly -// allocated capnp.TextList, preferring placement in s's segment. -func (s TaskHashable) NewTaskDependencyHashes(n int32) (capnp.TextList, error) { - l, err := capnp.NewTextList(capnp.Struct(s).Segment(), n) - if err != nil { - return capnp.TextList{}, err - } - err = capnp.Struct(s).SetPtr(1, l.ToPtr()) - return l, err -} -func (s TaskHashable) PackageDir() (string, error) { - p, err := capnp.Struct(s).Ptr(2) - return p.Text(), err -} - -func (s TaskHashable) HasPackageDir() bool { - return capnp.Struct(s).HasPtr(2) -} - -func (s TaskHashable) PackageDirBytes() ([]byte, error) { - p, err := capnp.Struct(s).Ptr(2) - return p.TextBytes(), err -} - -func (s TaskHashable) SetPackageDir(v string) error { - return capnp.Struct(s).SetText(2, v) -} - -func (s TaskHashable) HashOfFiles() (string, error) { - p, err := capnp.Struct(s).Ptr(3) - return p.Text(), err -} - -func (s TaskHashable) HasHashOfFiles() bool { - return capnp.Struct(s).HasPtr(3) -} - -func (s TaskHashable) HashOfFilesBytes() ([]byte, error) { - p, err := capnp.Struct(s).Ptr(3) - return p.TextBytes(), err -} - -func (s TaskHashable) SetHashOfFiles(v string) error { - return capnp.Struct(s).SetText(3, v) -} - -func (s TaskHashable) ExternalDepsHash() (string, error) { - p, err := capnp.Struct(s).Ptr(4) - return p.Text(), err -} - -func (s TaskHashable) HasExternalDepsHash() bool { - return capnp.Struct(s).HasPtr(4) -} - -func (s TaskHashable) ExternalDepsHashBytes() ([]byte, error) { - p, err := capnp.Struct(s).Ptr(4) - return p.TextBytes(), err -} - -func (s TaskHashable) SetExternalDepsHash(v string) error { - return capnp.Struct(s).SetText(4, v) -} - -func (s TaskHashable) Task() (string, error) { - p, err := capnp.Struct(s).Ptr(5) - return p.Text(), err -} - -func (s TaskHashable) HasTask() bool { - return capnp.Struct(s).HasPtr(5) -} - -func (s TaskHashable) TaskBytes() ([]byte, error) { - p, err := capnp.Struct(s).Ptr(5) - return p.TextBytes(), err -} - -func (s TaskHashable) SetTask(v string) error { - return capnp.Struct(s).SetText(5, v) -} - -func (s TaskHashable) Outputs() (TaskOutputs, error) { - p, err := capnp.Struct(s).Ptr(6) - return TaskOutputs(p.Struct()), err -} - -func (s TaskHashable) HasOutputs() bool { - return capnp.Struct(s).HasPtr(6) -} - -func (s TaskHashable) SetOutputs(v TaskOutputs) error { - return capnp.Struct(s).SetPtr(6, capnp.Struct(v).ToPtr()) -} - -// NewOutputs sets the outputs field to a newly -// allocated TaskOutputs struct, preferring placement in s's segment. -func (s TaskHashable) NewOutputs() (TaskOutputs, error) { - ss, err := NewTaskOutputs(capnp.Struct(s).Segment()) - if err != nil { - return TaskOutputs{}, err - } - err = capnp.Struct(s).SetPtr(6, capnp.Struct(ss).ToPtr()) - return ss, err -} - -func (s TaskHashable) PassThruArgs() (capnp.TextList, error) { - p, err := capnp.Struct(s).Ptr(7) - return capnp.TextList(p.List()), err -} - -func (s TaskHashable) HasPassThruArgs() bool { - return capnp.Struct(s).HasPtr(7) -} - -func (s TaskHashable) SetPassThruArgs(v capnp.TextList) error { - return capnp.Struct(s).SetPtr(7, v.ToPtr()) -} - -// NewPassThruArgs sets the passThruArgs field to a newly -// allocated capnp.TextList, preferring placement in s's segment. -func (s TaskHashable) NewPassThruArgs(n int32) (capnp.TextList, error) { - l, err := capnp.NewTextList(capnp.Struct(s).Segment(), n) - if err != nil { - return capnp.TextList{}, err - } - err = capnp.Struct(s).SetPtr(7, l.ToPtr()) - return l, err -} -func (s TaskHashable) Env() (capnp.TextList, error) { - p, err := capnp.Struct(s).Ptr(8) - return capnp.TextList(p.List()), err -} - -func (s TaskHashable) HasEnv() bool { - return capnp.Struct(s).HasPtr(8) -} - -func (s TaskHashable) SetEnv(v capnp.TextList) error { - return capnp.Struct(s).SetPtr(8, v.ToPtr()) -} - -// NewEnv sets the env field to a newly -// allocated capnp.TextList, preferring placement in s's segment. -func (s TaskHashable) NewEnv(n int32) (capnp.TextList, error) { - l, err := capnp.NewTextList(capnp.Struct(s).Segment(), n) - if err != nil { - return capnp.TextList{}, err - } - err = capnp.Struct(s).SetPtr(8, l.ToPtr()) - return l, err -} -func (s TaskHashable) ResolvedEnvVars() (capnp.TextList, error) { - p, err := capnp.Struct(s).Ptr(9) - return capnp.TextList(p.List()), err -} - -func (s TaskHashable) HasResolvedEnvVars() bool { - return capnp.Struct(s).HasPtr(9) -} - -func (s TaskHashable) SetResolvedEnvVars(v capnp.TextList) error { - return capnp.Struct(s).SetPtr(9, v.ToPtr()) -} - -// NewResolvedEnvVars sets the resolvedEnvVars field to a newly -// allocated capnp.TextList, preferring placement in s's segment. -func (s TaskHashable) NewResolvedEnvVars(n int32) (capnp.TextList, error) { - l, err := capnp.NewTextList(capnp.Struct(s).Segment(), n) - if err != nil { - return capnp.TextList{}, err - } - err = capnp.Struct(s).SetPtr(9, l.ToPtr()) - return l, err -} -func (s TaskHashable) PassThruEnv() (capnp.TextList, error) { - p, err := capnp.Struct(s).Ptr(10) - return capnp.TextList(p.List()), err -} - -func (s TaskHashable) HasPassThruEnv() bool { - return capnp.Struct(s).HasPtr(10) -} - -func (s TaskHashable) SetPassThruEnv(v capnp.TextList) error { - return capnp.Struct(s).SetPtr(10, v.ToPtr()) -} - -// NewPassThruEnv sets the passThruEnv field to a newly -// allocated capnp.TextList, preferring placement in s's segment. -func (s TaskHashable) NewPassThruEnv(n int32) (capnp.TextList, error) { - l, err := capnp.NewTextList(capnp.Struct(s).Segment(), n) - if err != nil { - return capnp.TextList{}, err - } - err = capnp.Struct(s).SetPtr(10, l.ToPtr()) - return l, err -} -func (s TaskHashable) EnvMode() TaskHashable_EnvMode { - return TaskHashable_EnvMode(capnp.Struct(s).Uint16(0)) -} - -func (s TaskHashable) SetEnvMode(v TaskHashable_EnvMode) { - capnp.Struct(s).SetUint16(0, uint16(v)) -} - -func (s TaskHashable) DotEnv() (capnp.TextList, error) { - p, err := capnp.Struct(s).Ptr(11) - return capnp.TextList(p.List()), err -} - -func (s TaskHashable) HasDotEnv() bool { - return capnp.Struct(s).HasPtr(11) -} - -func (s TaskHashable) SetDotEnv(v capnp.TextList) error { - return capnp.Struct(s).SetPtr(11, v.ToPtr()) -} - -// NewDotEnv sets the dotEnv field to a newly -// allocated capnp.TextList, preferring placement in s's segment. -func (s TaskHashable) NewDotEnv(n int32) (capnp.TextList, error) { - l, err := capnp.NewTextList(capnp.Struct(s).Segment(), n) - if err != nil { - return capnp.TextList{}, err - } - err = capnp.Struct(s).SetPtr(11, l.ToPtr()) - return l, err -} - -// TaskHashable_List is a list of TaskHashable. -type TaskHashable_List = capnp.StructList[TaskHashable] - -// NewTaskHashable creates a new list of TaskHashable. -func NewTaskHashable_List(s *capnp.Segment, sz int32) (TaskHashable_List, error) { - l, err := capnp.NewCompositeList(s, capnp.ObjectSize{DataSize: 8, PointerCount: 12}, sz) - return capnp.StructList[TaskHashable](l), err -} - -// TaskHashable_Future is a wrapper for a TaskHashable promised by a client call. -type TaskHashable_Future struct{ *capnp.Future } - -func (f TaskHashable_Future) Struct() (TaskHashable, error) { - p, err := f.Future.Ptr() - return TaskHashable(p.Struct()), err -} -func (p TaskHashable_Future) Outputs() TaskOutputs_Future { - return TaskOutputs_Future{Future: p.Future.Field(6, nil)} -} - -type TaskHashable_EnvMode uint16 - -// TaskHashable_EnvMode_TypeID is the unique identifier for the type TaskHashable_EnvMode. -const TaskHashable_EnvMode_TypeID = 0x8dc08a1d29f69b16 - -// Values of TaskHashable_EnvMode. -const ( - TaskHashable_EnvMode_loose TaskHashable_EnvMode = 0 - TaskHashable_EnvMode_strict TaskHashable_EnvMode = 1 -) - -// String returns the enum's constant name. -func (c TaskHashable_EnvMode) String() string { - switch c { - case TaskHashable_EnvMode_loose: - return "loose" - case TaskHashable_EnvMode_strict: - return "strict" - - default: - return "" - } -} - -// TaskHashable_EnvModeFromString returns the enum value with a name, -// or the zero value if there's no such value. -func TaskHashable_EnvModeFromString(c string) TaskHashable_EnvMode { - switch c { - case "loose": - return TaskHashable_EnvMode_loose - case "strict": - return TaskHashable_EnvMode_strict - - default: - return 0 - } -} - -type TaskHashable_EnvMode_List = capnp.EnumList[TaskHashable_EnvMode] - -func NewTaskHashable_EnvMode_List(s *capnp.Segment, sz int32) (TaskHashable_EnvMode_List, error) { - return capnp.NewEnumList[TaskHashable_EnvMode](s, sz) -} - -type TaskOutputs capnp.Struct - -// TaskOutputs_TypeID is the unique identifier for the type TaskOutputs. -const TaskOutputs_TypeID = 0xd58300fce8aba267 - -func NewTaskOutputs(s *capnp.Segment) (TaskOutputs, error) { - st, err := capnp.NewStruct(s, capnp.ObjectSize{DataSize: 0, PointerCount: 2}) - return TaskOutputs(st), err -} - -func NewRootTaskOutputs(s *capnp.Segment) (TaskOutputs, error) { - st, err := capnp.NewRootStruct(s, capnp.ObjectSize{DataSize: 0, PointerCount: 2}) - return TaskOutputs(st), err -} - -func ReadRootTaskOutputs(msg *capnp.Message) (TaskOutputs, error) { - root, err := msg.Root() - return TaskOutputs(root.Struct()), err -} - -func (s TaskOutputs) String() string { - str, _ := text.Marshal(0xd58300fce8aba267, capnp.Struct(s)) - return str -} - -func (s TaskOutputs) EncodeAsPtr(seg *capnp.Segment) capnp.Ptr { - return capnp.Struct(s).EncodeAsPtr(seg) -} - -func (TaskOutputs) DecodeFromPtr(p capnp.Ptr) TaskOutputs { - return TaskOutputs(capnp.Struct{}.DecodeFromPtr(p)) -} - -func (s TaskOutputs) ToPtr() capnp.Ptr { - return capnp.Struct(s).ToPtr() -} -func (s TaskOutputs) IsValid() bool { - return capnp.Struct(s).IsValid() -} - -func (s TaskOutputs) Message() *capnp.Message { - return capnp.Struct(s).Message() -} - -func (s TaskOutputs) Segment() *capnp.Segment { - return capnp.Struct(s).Segment() -} -func (s TaskOutputs) Inclusions() (capnp.TextList, error) { - p, err := capnp.Struct(s).Ptr(0) - return capnp.TextList(p.List()), err -} - -func (s TaskOutputs) HasInclusions() bool { - return capnp.Struct(s).HasPtr(0) -} - -func (s TaskOutputs) SetInclusions(v capnp.TextList) error { - return capnp.Struct(s).SetPtr(0, v.ToPtr()) -} - -// NewInclusions sets the inclusions field to a newly -// allocated capnp.TextList, preferring placement in s's segment. -func (s TaskOutputs) NewInclusions(n int32) (capnp.TextList, error) { - l, err := capnp.NewTextList(capnp.Struct(s).Segment(), n) - if err != nil { - return capnp.TextList{}, err - } - err = capnp.Struct(s).SetPtr(0, l.ToPtr()) - return l, err -} -func (s TaskOutputs) Exclusions() (capnp.TextList, error) { - p, err := capnp.Struct(s).Ptr(1) - return capnp.TextList(p.List()), err -} - -func (s TaskOutputs) HasExclusions() bool { - return capnp.Struct(s).HasPtr(1) -} - -func (s TaskOutputs) SetExclusions(v capnp.TextList) error { - return capnp.Struct(s).SetPtr(1, v.ToPtr()) -} - -// NewExclusions sets the exclusions field to a newly -// allocated capnp.TextList, preferring placement in s's segment. -func (s TaskOutputs) NewExclusions(n int32) (capnp.TextList, error) { - l, err := capnp.NewTextList(capnp.Struct(s).Segment(), n) - if err != nil { - return capnp.TextList{}, err - } - err = capnp.Struct(s).SetPtr(1, l.ToPtr()) - return l, err -} - -// TaskOutputs_List is a list of TaskOutputs. -type TaskOutputs_List = capnp.StructList[TaskOutputs] - -// NewTaskOutputs creates a new list of TaskOutputs. -func NewTaskOutputs_List(s *capnp.Segment, sz int32) (TaskOutputs_List, error) { - l, err := capnp.NewCompositeList(s, capnp.ObjectSize{DataSize: 0, PointerCount: 2}, sz) - return capnp.StructList[TaskOutputs](l), err -} - -// TaskOutputs_Future is a wrapper for a TaskOutputs promised by a client call. -type TaskOutputs_Future struct{ *capnp.Future } - -func (f TaskOutputs_Future) Struct() (TaskOutputs, error) { - p, err := f.Future.Ptr() - return TaskOutputs(p.Struct()), err -} - -type GlobalHashable capnp.Struct - -// GlobalHashable_TypeID is the unique identifier for the type GlobalHashable. -const GlobalHashable_TypeID = 0xea0b3688577e30b4 - -func NewGlobalHashable(s *capnp.Segment) (GlobalHashable, error) { - st, err := capnp.NewStruct(s, capnp.ObjectSize{DataSize: 8, PointerCount: 7}) - return GlobalHashable(st), err -} - -func NewRootGlobalHashable(s *capnp.Segment) (GlobalHashable, error) { - st, err := capnp.NewRootStruct(s, capnp.ObjectSize{DataSize: 8, PointerCount: 7}) - return GlobalHashable(st), err -} - -func ReadRootGlobalHashable(msg *capnp.Message) (GlobalHashable, error) { - root, err := msg.Root() - return GlobalHashable(root.Struct()), err -} - -func (s GlobalHashable) String() string { - str, _ := text.Marshal(0xea0b3688577e30b4, capnp.Struct(s)) - return str -} - -func (s GlobalHashable) EncodeAsPtr(seg *capnp.Segment) capnp.Ptr { - return capnp.Struct(s).EncodeAsPtr(seg) -} - -func (GlobalHashable) DecodeFromPtr(p capnp.Ptr) GlobalHashable { - return GlobalHashable(capnp.Struct{}.DecodeFromPtr(p)) -} - -func (s GlobalHashable) ToPtr() capnp.Ptr { - return capnp.Struct(s).ToPtr() -} -func (s GlobalHashable) IsValid() bool { - return capnp.Struct(s).IsValid() -} - -func (s GlobalHashable) Message() *capnp.Message { - return capnp.Struct(s).Message() -} - -func (s GlobalHashable) Segment() *capnp.Segment { - return capnp.Struct(s).Segment() -} -func (s GlobalHashable) GlobalCacheKey() (string, error) { - p, err := capnp.Struct(s).Ptr(0) - return p.Text(), err -} - -func (s GlobalHashable) HasGlobalCacheKey() bool { - return capnp.Struct(s).HasPtr(0) -} - -func (s GlobalHashable) GlobalCacheKeyBytes() ([]byte, error) { - p, err := capnp.Struct(s).Ptr(0) - return p.TextBytes(), err -} - -func (s GlobalHashable) SetGlobalCacheKey(v string) error { - return capnp.Struct(s).SetText(0, v) -} - -func (s GlobalHashable) GlobalFileHashMap() (GlobalHashable_Entry_List, error) { - p, err := capnp.Struct(s).Ptr(1) - return GlobalHashable_Entry_List(p.List()), err -} - -func (s GlobalHashable) HasGlobalFileHashMap() bool { - return capnp.Struct(s).HasPtr(1) -} - -func (s GlobalHashable) SetGlobalFileHashMap(v GlobalHashable_Entry_List) error { - return capnp.Struct(s).SetPtr(1, v.ToPtr()) -} - -// NewGlobalFileHashMap sets the globalFileHashMap field to a newly -// allocated GlobalHashable_Entry_List, preferring placement in s's segment. -func (s GlobalHashable) NewGlobalFileHashMap(n int32) (GlobalHashable_Entry_List, error) { - l, err := NewGlobalHashable_Entry_List(capnp.Struct(s).Segment(), n) - if err != nil { - return GlobalHashable_Entry_List{}, err - } - err = capnp.Struct(s).SetPtr(1, l.ToPtr()) - return l, err -} -func (s GlobalHashable) RootExternalDepsHash() (string, error) { - p, err := capnp.Struct(s).Ptr(2) - return p.Text(), err -} - -func (s GlobalHashable) HasRootExternalDepsHash() bool { - return capnp.Struct(s).HasPtr(2) -} - -func (s GlobalHashable) RootExternalDepsHashBytes() ([]byte, error) { - p, err := capnp.Struct(s).Ptr(2) - return p.TextBytes(), err -} - -func (s GlobalHashable) SetRootExternalDepsHash(v string) error { - return capnp.Struct(s).SetText(2, v) -} - -func (s GlobalHashable) Env() (capnp.TextList, error) { - p, err := capnp.Struct(s).Ptr(3) - return capnp.TextList(p.List()), err -} - -func (s GlobalHashable) HasEnv() bool { - return capnp.Struct(s).HasPtr(3) -} - -func (s GlobalHashable) SetEnv(v capnp.TextList) error { - return capnp.Struct(s).SetPtr(3, v.ToPtr()) -} - -// NewEnv sets the env field to a newly -// allocated capnp.TextList, preferring placement in s's segment. -func (s GlobalHashable) NewEnv(n int32) (capnp.TextList, error) { - l, err := capnp.NewTextList(capnp.Struct(s).Segment(), n) - if err != nil { - return capnp.TextList{}, err - } - err = capnp.Struct(s).SetPtr(3, l.ToPtr()) - return l, err -} -func (s GlobalHashable) ResolvedEnvVars() (capnp.TextList, error) { - p, err := capnp.Struct(s).Ptr(4) - return capnp.TextList(p.List()), err -} - -func (s GlobalHashable) HasResolvedEnvVars() bool { - return capnp.Struct(s).HasPtr(4) -} - -func (s GlobalHashable) SetResolvedEnvVars(v capnp.TextList) error { - return capnp.Struct(s).SetPtr(4, v.ToPtr()) -} - -// NewResolvedEnvVars sets the resolvedEnvVars field to a newly -// allocated capnp.TextList, preferring placement in s's segment. -func (s GlobalHashable) NewResolvedEnvVars(n int32) (capnp.TextList, error) { - l, err := capnp.NewTextList(capnp.Struct(s).Segment(), n) - if err != nil { - return capnp.TextList{}, err - } - err = capnp.Struct(s).SetPtr(4, l.ToPtr()) - return l, err -} -func (s GlobalHashable) PassThroughEnv() (capnp.TextList, error) { - p, err := capnp.Struct(s).Ptr(5) - return capnp.TextList(p.List()), err -} - -func (s GlobalHashable) HasPassThroughEnv() bool { - return capnp.Struct(s).HasPtr(5) -} - -func (s GlobalHashable) SetPassThroughEnv(v capnp.TextList) error { - return capnp.Struct(s).SetPtr(5, v.ToPtr()) -} - -// NewPassThroughEnv sets the passThroughEnv field to a newly -// allocated capnp.TextList, preferring placement in s's segment. -func (s GlobalHashable) NewPassThroughEnv(n int32) (capnp.TextList, error) { - l, err := capnp.NewTextList(capnp.Struct(s).Segment(), n) - if err != nil { - return capnp.TextList{}, err - } - err = capnp.Struct(s).SetPtr(5, l.ToPtr()) - return l, err -} -func (s GlobalHashable) EnvMode() GlobalHashable_EnvMode { - return GlobalHashable_EnvMode(capnp.Struct(s).Uint16(0)) -} - -func (s GlobalHashable) SetEnvMode(v GlobalHashable_EnvMode) { - capnp.Struct(s).SetUint16(0, uint16(v)) -} - -func (s GlobalHashable) FrameworkInference() bool { - return capnp.Struct(s).Bit(16) -} - -func (s GlobalHashable) SetFrameworkInference(v bool) { - capnp.Struct(s).SetBit(16, v) -} - -func (s GlobalHashable) DotEnv() (capnp.TextList, error) { - p, err := capnp.Struct(s).Ptr(6) - return capnp.TextList(p.List()), err -} - -func (s GlobalHashable) HasDotEnv() bool { - return capnp.Struct(s).HasPtr(6) -} - -func (s GlobalHashable) SetDotEnv(v capnp.TextList) error { - return capnp.Struct(s).SetPtr(6, v.ToPtr()) -} - -// NewDotEnv sets the dotEnv field to a newly -// allocated capnp.TextList, preferring placement in s's segment. -func (s GlobalHashable) NewDotEnv(n int32) (capnp.TextList, error) { - l, err := capnp.NewTextList(capnp.Struct(s).Segment(), n) - if err != nil { - return capnp.TextList{}, err - } - err = capnp.Struct(s).SetPtr(6, l.ToPtr()) - return l, err -} - -// GlobalHashable_List is a list of GlobalHashable. -type GlobalHashable_List = capnp.StructList[GlobalHashable] - -// NewGlobalHashable creates a new list of GlobalHashable. -func NewGlobalHashable_List(s *capnp.Segment, sz int32) (GlobalHashable_List, error) { - l, err := capnp.NewCompositeList(s, capnp.ObjectSize{DataSize: 8, PointerCount: 7}, sz) - return capnp.StructList[GlobalHashable](l), err -} - -// GlobalHashable_Future is a wrapper for a GlobalHashable promised by a client call. -type GlobalHashable_Future struct{ *capnp.Future } - -func (f GlobalHashable_Future) Struct() (GlobalHashable, error) { - p, err := f.Future.Ptr() - return GlobalHashable(p.Struct()), err -} - -type GlobalHashable_EnvMode uint16 - -// GlobalHashable_EnvMode_TypeID is the unique identifier for the type GlobalHashable_EnvMode. -const GlobalHashable_EnvMode_TypeID = 0xab4200df8263c5d4 - -// Values of GlobalHashable_EnvMode. -const ( - GlobalHashable_EnvMode_infer GlobalHashable_EnvMode = 0 - GlobalHashable_EnvMode_loose GlobalHashable_EnvMode = 1 - GlobalHashable_EnvMode_strict GlobalHashable_EnvMode = 2 -) - -// String returns the enum's constant name. -func (c GlobalHashable_EnvMode) String() string { - switch c { - case GlobalHashable_EnvMode_infer: - return "infer" - case GlobalHashable_EnvMode_loose: - return "loose" - case GlobalHashable_EnvMode_strict: - return "strict" - - default: - return "" - } -} - -// GlobalHashable_EnvModeFromString returns the enum value with a name, -// or the zero value if there's no such value. -func GlobalHashable_EnvModeFromString(c string) GlobalHashable_EnvMode { - switch c { - case "infer": - return GlobalHashable_EnvMode_infer - case "loose": - return GlobalHashable_EnvMode_loose - case "strict": - return GlobalHashable_EnvMode_strict - - default: - return 0 - } -} - -type GlobalHashable_EnvMode_List = capnp.EnumList[GlobalHashable_EnvMode] - -func NewGlobalHashable_EnvMode_List(s *capnp.Segment, sz int32) (GlobalHashable_EnvMode_List, error) { - return capnp.NewEnumList[GlobalHashable_EnvMode](s, sz) -} - -type GlobalHashable_Entry capnp.Struct - -// GlobalHashable_Entry_TypeID is the unique identifier for the type GlobalHashable_Entry. -const GlobalHashable_Entry_TypeID = 0xdd6c3d394436cf49 - -func NewGlobalHashable_Entry(s *capnp.Segment) (GlobalHashable_Entry, error) { - st, err := capnp.NewStruct(s, capnp.ObjectSize{DataSize: 0, PointerCount: 2}) - return GlobalHashable_Entry(st), err -} - -func NewRootGlobalHashable_Entry(s *capnp.Segment) (GlobalHashable_Entry, error) { - st, err := capnp.NewRootStruct(s, capnp.ObjectSize{DataSize: 0, PointerCount: 2}) - return GlobalHashable_Entry(st), err -} - -func ReadRootGlobalHashable_Entry(msg *capnp.Message) (GlobalHashable_Entry, error) { - root, err := msg.Root() - return GlobalHashable_Entry(root.Struct()), err -} - -func (s GlobalHashable_Entry) String() string { - str, _ := text.Marshal(0xdd6c3d394436cf49, capnp.Struct(s)) - return str -} - -func (s GlobalHashable_Entry) EncodeAsPtr(seg *capnp.Segment) capnp.Ptr { - return capnp.Struct(s).EncodeAsPtr(seg) -} - -func (GlobalHashable_Entry) DecodeFromPtr(p capnp.Ptr) GlobalHashable_Entry { - return GlobalHashable_Entry(capnp.Struct{}.DecodeFromPtr(p)) -} - -func (s GlobalHashable_Entry) ToPtr() capnp.Ptr { - return capnp.Struct(s).ToPtr() -} -func (s GlobalHashable_Entry) IsValid() bool { - return capnp.Struct(s).IsValid() -} - -func (s GlobalHashable_Entry) Message() *capnp.Message { - return capnp.Struct(s).Message() -} - -func (s GlobalHashable_Entry) Segment() *capnp.Segment { - return capnp.Struct(s).Segment() -} -func (s GlobalHashable_Entry) Key() (string, error) { - p, err := capnp.Struct(s).Ptr(0) - return p.Text(), err -} - -func (s GlobalHashable_Entry) HasKey() bool { - return capnp.Struct(s).HasPtr(0) -} - -func (s GlobalHashable_Entry) KeyBytes() ([]byte, error) { - p, err := capnp.Struct(s).Ptr(0) - return p.TextBytes(), err -} - -func (s GlobalHashable_Entry) SetKey(v string) error { - return capnp.Struct(s).SetText(0, v) -} - -func (s GlobalHashable_Entry) Value() (string, error) { - p, err := capnp.Struct(s).Ptr(1) - return p.Text(), err -} - -func (s GlobalHashable_Entry) HasValue() bool { - return capnp.Struct(s).HasPtr(1) -} - -func (s GlobalHashable_Entry) ValueBytes() ([]byte, error) { - p, err := capnp.Struct(s).Ptr(1) - return p.TextBytes(), err -} - -func (s GlobalHashable_Entry) SetValue(v string) error { - return capnp.Struct(s).SetText(1, v) -} - -// GlobalHashable_Entry_List is a list of GlobalHashable_Entry. -type GlobalHashable_Entry_List = capnp.StructList[GlobalHashable_Entry] - -// NewGlobalHashable_Entry creates a new list of GlobalHashable_Entry. -func NewGlobalHashable_Entry_List(s *capnp.Segment, sz int32) (GlobalHashable_Entry_List, error) { - l, err := capnp.NewCompositeList(s, capnp.ObjectSize{DataSize: 0, PointerCount: 2}, sz) - return capnp.StructList[GlobalHashable_Entry](l), err -} - -// GlobalHashable_Entry_Future is a wrapper for a GlobalHashable_Entry promised by a client call. -type GlobalHashable_Entry_Future struct{ *capnp.Future } - -func (f GlobalHashable_Entry_Future) Struct() (GlobalHashable_Entry, error) { - p, err := f.Future.Ptr() - return GlobalHashable_Entry(p.Struct()), err -} - -type LockFilePackages capnp.Struct - -// LockFilePackages_TypeID is the unique identifier for the type LockFilePackages. -const LockFilePackages_TypeID = 0xb470b49a14912305 - -func NewLockFilePackages(s *capnp.Segment) (LockFilePackages, error) { - st, err := capnp.NewStruct(s, capnp.ObjectSize{DataSize: 0, PointerCount: 1}) - return LockFilePackages(st), err -} - -func NewRootLockFilePackages(s *capnp.Segment) (LockFilePackages, error) { - st, err := capnp.NewRootStruct(s, capnp.ObjectSize{DataSize: 0, PointerCount: 1}) - return LockFilePackages(st), err -} - -func ReadRootLockFilePackages(msg *capnp.Message) (LockFilePackages, error) { - root, err := msg.Root() - return LockFilePackages(root.Struct()), err -} - -func (s LockFilePackages) String() string { - str, _ := text.Marshal(0xb470b49a14912305, capnp.Struct(s)) - return str -} - -func (s LockFilePackages) EncodeAsPtr(seg *capnp.Segment) capnp.Ptr { - return capnp.Struct(s).EncodeAsPtr(seg) -} - -func (LockFilePackages) DecodeFromPtr(p capnp.Ptr) LockFilePackages { - return LockFilePackages(capnp.Struct{}.DecodeFromPtr(p)) -} - -func (s LockFilePackages) ToPtr() capnp.Ptr { - return capnp.Struct(s).ToPtr() -} -func (s LockFilePackages) IsValid() bool { - return capnp.Struct(s).IsValid() -} - -func (s LockFilePackages) Message() *capnp.Message { - return capnp.Struct(s).Message() -} - -func (s LockFilePackages) Segment() *capnp.Segment { - return capnp.Struct(s).Segment() -} -func (s LockFilePackages) Packages() (Package_List, error) { - p, err := capnp.Struct(s).Ptr(0) - return Package_List(p.List()), err -} - -func (s LockFilePackages) HasPackages() bool { - return capnp.Struct(s).HasPtr(0) -} - -func (s LockFilePackages) SetPackages(v Package_List) error { - return capnp.Struct(s).SetPtr(0, v.ToPtr()) -} - -// NewPackages sets the packages field to a newly -// allocated Package_List, preferring placement in s's segment. -func (s LockFilePackages) NewPackages(n int32) (Package_List, error) { - l, err := NewPackage_List(capnp.Struct(s).Segment(), n) - if err != nil { - return Package_List{}, err - } - err = capnp.Struct(s).SetPtr(0, l.ToPtr()) - return l, err -} - -// LockFilePackages_List is a list of LockFilePackages. -type LockFilePackages_List = capnp.StructList[LockFilePackages] - -// NewLockFilePackages creates a new list of LockFilePackages. -func NewLockFilePackages_List(s *capnp.Segment, sz int32) (LockFilePackages_List, error) { - l, err := capnp.NewCompositeList(s, capnp.ObjectSize{DataSize: 0, PointerCount: 1}, sz) - return capnp.StructList[LockFilePackages](l), err -} - -// LockFilePackages_Future is a wrapper for a LockFilePackages promised by a client call. -type LockFilePackages_Future struct{ *capnp.Future } - -func (f LockFilePackages_Future) Struct() (LockFilePackages, error) { - p, err := f.Future.Ptr() - return LockFilePackages(p.Struct()), err -} - -type Package capnp.Struct - -// Package_TypeID is the unique identifier for the type Package. -const Package_TypeID = 0xc2d08935698f1b78 - -func NewPackage(s *capnp.Segment) (Package, error) { - st, err := capnp.NewStruct(s, capnp.ObjectSize{DataSize: 8, PointerCount: 2}) - return Package(st), err -} - -func NewRootPackage(s *capnp.Segment) (Package, error) { - st, err := capnp.NewRootStruct(s, capnp.ObjectSize{DataSize: 8, PointerCount: 2}) - return Package(st), err -} - -func ReadRootPackage(msg *capnp.Message) (Package, error) { - root, err := msg.Root() - return Package(root.Struct()), err -} - -func (s Package) String() string { - str, _ := text.Marshal(0xc2d08935698f1b78, capnp.Struct(s)) - return str -} - -func (s Package) EncodeAsPtr(seg *capnp.Segment) capnp.Ptr { - return capnp.Struct(s).EncodeAsPtr(seg) -} - -func (Package) DecodeFromPtr(p capnp.Ptr) Package { - return Package(capnp.Struct{}.DecodeFromPtr(p)) -} - -func (s Package) ToPtr() capnp.Ptr { - return capnp.Struct(s).ToPtr() -} -func (s Package) IsValid() bool { - return capnp.Struct(s).IsValid() -} - -func (s Package) Message() *capnp.Message { - return capnp.Struct(s).Message() -} - -func (s Package) Segment() *capnp.Segment { - return capnp.Struct(s).Segment() -} -func (s Package) Key() (string, error) { - p, err := capnp.Struct(s).Ptr(0) - return p.Text(), err -} - -func (s Package) HasKey() bool { - return capnp.Struct(s).HasPtr(0) -} - -func (s Package) KeyBytes() ([]byte, error) { - p, err := capnp.Struct(s).Ptr(0) - return p.TextBytes(), err -} - -func (s Package) SetKey(v string) error { - return capnp.Struct(s).SetText(0, v) -} - -func (s Package) Version() (string, error) { - p, err := capnp.Struct(s).Ptr(1) - return p.Text(), err -} - -func (s Package) HasVersion() bool { - return capnp.Struct(s).HasPtr(1) -} - -func (s Package) VersionBytes() ([]byte, error) { - p, err := capnp.Struct(s).Ptr(1) - return p.TextBytes(), err -} - -func (s Package) SetVersion(v string) error { - return capnp.Struct(s).SetText(1, v) -} - -func (s Package) Found() bool { - return capnp.Struct(s).Bit(0) -} - -func (s Package) SetFound(v bool) { - capnp.Struct(s).SetBit(0, v) -} - -// Package_List is a list of Package. -type Package_List = capnp.StructList[Package] - -// NewPackage creates a new list of Package. -func NewPackage_List(s *capnp.Segment, sz int32) (Package_List, error) { - l, err := capnp.NewCompositeList(s, capnp.ObjectSize{DataSize: 8, PointerCount: 2}, sz) - return capnp.StructList[Package](l), err -} - -// Package_Future is a wrapper for a Package promised by a client call. -type Package_Future struct{ *capnp.Future } - -func (f Package_Future) Struct() (Package, error) { - p, err := f.Future.Ptr() - return Package(p.Struct()), err -} - -type FileHashes capnp.Struct - -// FileHashes_TypeID is the unique identifier for the type FileHashes. -const FileHashes_TypeID = 0xed110dc172c21b8f - -func NewFileHashes(s *capnp.Segment) (FileHashes, error) { - st, err := capnp.NewStruct(s, capnp.ObjectSize{DataSize: 0, PointerCount: 1}) - return FileHashes(st), err -} - -func NewRootFileHashes(s *capnp.Segment) (FileHashes, error) { - st, err := capnp.NewRootStruct(s, capnp.ObjectSize{DataSize: 0, PointerCount: 1}) - return FileHashes(st), err -} - -func ReadRootFileHashes(msg *capnp.Message) (FileHashes, error) { - root, err := msg.Root() - return FileHashes(root.Struct()), err -} - -func (s FileHashes) String() string { - str, _ := text.Marshal(0xed110dc172c21b8f, capnp.Struct(s)) - return str -} - -func (s FileHashes) EncodeAsPtr(seg *capnp.Segment) capnp.Ptr { - return capnp.Struct(s).EncodeAsPtr(seg) -} - -func (FileHashes) DecodeFromPtr(p capnp.Ptr) FileHashes { - return FileHashes(capnp.Struct{}.DecodeFromPtr(p)) -} - -func (s FileHashes) ToPtr() capnp.Ptr { - return capnp.Struct(s).ToPtr() -} -func (s FileHashes) IsValid() bool { - return capnp.Struct(s).IsValid() -} - -func (s FileHashes) Message() *capnp.Message { - return capnp.Struct(s).Message() -} - -func (s FileHashes) Segment() *capnp.Segment { - return capnp.Struct(s).Segment() -} -func (s FileHashes) FileHashes() (FileHashes_Entry_List, error) { - p, err := capnp.Struct(s).Ptr(0) - return FileHashes_Entry_List(p.List()), err -} - -func (s FileHashes) HasFileHashes() bool { - return capnp.Struct(s).HasPtr(0) -} - -func (s FileHashes) SetFileHashes(v FileHashes_Entry_List) error { - return capnp.Struct(s).SetPtr(0, v.ToPtr()) -} - -// NewFileHashes sets the fileHashes field to a newly -// allocated FileHashes_Entry_List, preferring placement in s's segment. -func (s FileHashes) NewFileHashes(n int32) (FileHashes_Entry_List, error) { - l, err := NewFileHashes_Entry_List(capnp.Struct(s).Segment(), n) - if err != nil { - return FileHashes_Entry_List{}, err - } - err = capnp.Struct(s).SetPtr(0, l.ToPtr()) - return l, err -} - -// FileHashes_List is a list of FileHashes. -type FileHashes_List = capnp.StructList[FileHashes] - -// NewFileHashes creates a new list of FileHashes. -func NewFileHashes_List(s *capnp.Segment, sz int32) (FileHashes_List, error) { - l, err := capnp.NewCompositeList(s, capnp.ObjectSize{DataSize: 0, PointerCount: 1}, sz) - return capnp.StructList[FileHashes](l), err -} - -// FileHashes_Future is a wrapper for a FileHashes promised by a client call. -type FileHashes_Future struct{ *capnp.Future } - -func (f FileHashes_Future) Struct() (FileHashes, error) { - p, err := f.Future.Ptr() - return FileHashes(p.Struct()), err -} - -type FileHashes_Entry capnp.Struct - -// FileHashes_Entry_TypeID is the unique identifier for the type FileHashes_Entry. -const FileHashes_Entry_TypeID = 0x8932f7433db89d99 - -func NewFileHashes_Entry(s *capnp.Segment) (FileHashes_Entry, error) { - st, err := capnp.NewStruct(s, capnp.ObjectSize{DataSize: 0, PointerCount: 2}) - return FileHashes_Entry(st), err -} - -func NewRootFileHashes_Entry(s *capnp.Segment) (FileHashes_Entry, error) { - st, err := capnp.NewRootStruct(s, capnp.ObjectSize{DataSize: 0, PointerCount: 2}) - return FileHashes_Entry(st), err -} - -func ReadRootFileHashes_Entry(msg *capnp.Message) (FileHashes_Entry, error) { - root, err := msg.Root() - return FileHashes_Entry(root.Struct()), err -} - -func (s FileHashes_Entry) String() string { - str, _ := text.Marshal(0x8932f7433db89d99, capnp.Struct(s)) - return str -} - -func (s FileHashes_Entry) EncodeAsPtr(seg *capnp.Segment) capnp.Ptr { - return capnp.Struct(s).EncodeAsPtr(seg) -} - -func (FileHashes_Entry) DecodeFromPtr(p capnp.Ptr) FileHashes_Entry { - return FileHashes_Entry(capnp.Struct{}.DecodeFromPtr(p)) -} - -func (s FileHashes_Entry) ToPtr() capnp.Ptr { - return capnp.Struct(s).ToPtr() -} -func (s FileHashes_Entry) IsValid() bool { - return capnp.Struct(s).IsValid() -} - -func (s FileHashes_Entry) Message() *capnp.Message { - return capnp.Struct(s).Message() -} - -func (s FileHashes_Entry) Segment() *capnp.Segment { - return capnp.Struct(s).Segment() -} -func (s FileHashes_Entry) Key() (string, error) { - p, err := capnp.Struct(s).Ptr(0) - return p.Text(), err -} - -func (s FileHashes_Entry) HasKey() bool { - return capnp.Struct(s).HasPtr(0) -} - -func (s FileHashes_Entry) KeyBytes() ([]byte, error) { - p, err := capnp.Struct(s).Ptr(0) - return p.TextBytes(), err -} - -func (s FileHashes_Entry) SetKey(v string) error { - return capnp.Struct(s).SetText(0, v) -} - -func (s FileHashes_Entry) Value() (string, error) { - p, err := capnp.Struct(s).Ptr(1) - return p.Text(), err -} - -func (s FileHashes_Entry) HasValue() bool { - return capnp.Struct(s).HasPtr(1) -} - -func (s FileHashes_Entry) ValueBytes() ([]byte, error) { - p, err := capnp.Struct(s).Ptr(1) - return p.TextBytes(), err -} - -func (s FileHashes_Entry) SetValue(v string) error { - return capnp.Struct(s).SetText(1, v) -} - -// FileHashes_Entry_List is a list of FileHashes_Entry. -type FileHashes_Entry_List = capnp.StructList[FileHashes_Entry] - -// NewFileHashes_Entry creates a new list of FileHashes_Entry. -func NewFileHashes_Entry_List(s *capnp.Segment, sz int32) (FileHashes_Entry_List, error) { - l, err := capnp.NewCompositeList(s, capnp.ObjectSize{DataSize: 0, PointerCount: 2}, sz) - return capnp.StructList[FileHashes_Entry](l), err -} - -// FileHashes_Entry_Future is a wrapper for a FileHashes_Entry promised by a client call. -type FileHashes_Entry_Future struct{ *capnp.Future } - -func (f FileHashes_Entry_Future) Struct() (FileHashes_Entry, error) { - p, err := f.Future.Ptr() - return FileHashes_Entry(p.Struct()), err -} - -const schema_e1dde60149aeb063 = "x\xda\xacV]l\x14U\x14\xfe\xce\xbd\xb3\xdd\xedv" + - "\xebv3\xa3\xa8\x95\xa0\x88\x11L \xa5E\x94&X" + - "(]l\x15\xa4\xe3\xa2\x86\x06\x1e\x86\xedmw\xed:" + - "\xb3\xce\xec\x96\x96\x07Q\xf4\x01\x88\x86`xP\x84D" + - "\x09&F\xe2\xcf\x03&hbBI\xfabb\xc4\x04" + - "bbB\x1a\x125\x88\x90\xe8\x03\xca\x03\xc9\x983\xfb" + - "7m\x11\x1f\xf4\xa9\xd3o\xbe\xf9\xce\xb9\xe7~\xe7\x9c" + - "\xedxB\xae\xd3V\xb6\xfe\xa0A\x98\x8fG\x9a\xfcw" + - "\x8f\x9d^\xbb\xe1\xaf\xce\xfdH- \xff`\xfbYw" + - "\xaa5u\x0d\x11\x11\x05tS\\\xd1w\x04O\xdb\xc4" + - ".\x90\x7f\xd7{\x7f.[x\xe0\xcc[H\xdd#\xfc" + - "\xc9\x9f\xae?\xf3\xdb\xd1\xdf/\x01\xd4\xf5\x85\x88\x93>" + - "\x1d0\xa7\xc4c\xa0\x99\xf3\xd3\xd9\xbd3\xbd'S\xed" + - "\xc2?\xd5\xf1\xca\x0b\xfbV\xb7\\a\xde\xb4\xb8\x97\xf4" + - "\x0b\x01\xef{\xb1\x05\xe4G\x1e<d\x1c9U<\x85" + - "T\x82\xfc\xec\xe7\x9f\x0e\xd0/\x17/!B\xcc\xf8Y" + - "\\\xd1\xff\x08\xb8\xd7D\x0f\xc8\x9fh?\x98\x7ft\xff" + - "\xb9\xb30\x13\x14&\x07\x94\x07\xe4q}\x99\xe4\xa7\x87" + - "\xe4g\xa0\x99\xd1\xe3'/\xdf|\xfd\xc2l\xd9\x80\xf9" + - "\xb5<\xabO\x07\xcc)\xc9\x87\x1a\xf8nu\xdf\x9a\xb5" + - "\x85\x8bH\xb5S#\xd9\x80\xdb\x95\xd6\xe2\xa4?\xa7\x05" + - "\xb5\xd0\x98\\?\xf4\x9c\x1c\x12L\xf9D\xfbF\xffR" + - "[\x00tMi3\x02!\xb59\xec(\xb3\xd7\xc4~" + - "\xd4\xd3\xb1\x87\x81\xaem1\x9f\x10*\xfe-j\xf1m" + - "\xfc+\xfdB|\x01\xa0_\x8c\xf7`\xb9_t\x9d\x92" + - "\xb3\"k\x89\xa2]\xec\xde\x98/\xa8~\xcb\xcb)o" + - "E:i\x97\xdc\xc9A\"3&5@# \xb5l" + - "1`.\x91dv\x08J\x11\x19\xc4\xe0\xf2N\xc0\\" + - "*\xc9\\%(:\xa6&)\x01A\x09\xd0\xa2q\xab" + - "PV\xb5\xff\xeaq$\xc7\xd9jyc\x1c\xc7\xdaY" + - "P+\xd2\xf6\xf8fgX\x01A0\x12@*\xd5\x09" + - "\x10\xa5\x9a\xbb\x81E\x05\xc7\xf1T\x8fWr\xf3\xd9\xd2" + - "l\x95'\x0b\xceN\xab0[G\x0e+\x96I\x042" + - "\x0b+2w\xf2\x1f\x91je\xb5\xbc=\xa2\xdc[k" + - "\x06\x15\xd8\xe4d\xc7\xb8\x0a\x83Vv,i\x8d*\x8f" + - "\xd5\xb4z\x05Z\x9f\x02\xcc\x84$s\xa9 \xbfhe" + - "\xc7\x98\x02\x80\xee\x00\x0dJ\xa2\xb6\x86\xb7@\x0c\xd6\xd5" + - "\x89\xd5Y5j\x8dVR\xac\x8b\xa6\xb9\xac\xeb$\x99" + - "\x9bBe\x1d\xe8\x05\xcc>I\xe6\xa0 \x12Fp\x9e" + - "\xcd\\\xea~I\xe6\xd6\xd9\xa5\xde3\xae\\/\xef\xd8" + - "\xf5\xd2\x8f8e{\x98\x08\x82(\x94\x82\xa8\x95~K" + - "\xb9T,\x97<\xcc\xb9\xdd\xa1\xeaE\xf6\x85\xd2X?" + - "T\xcdm\xbb ?og\x0be/\xef@\xda^\xed" + - "\xc4\x1c\x92\xcf\xa9&\xfe\xf9\xdd\xedo\xad\xe4N\x02\xff" + - "\xbf\xd1\xc4\\\xa3\x01\xa6F\xe1\xd1C\xbd{\xaa\xde3" + - "W\xd5B\xeb;h\x08\xc8l'I\x99\x1c5\xa2\xeb" + - "\x8a>\x0629\xc6\xdf`\\T\xeeD\x7f-\xe0\xbf" + - "\xca\xf8\x9b\x8cKi\x90\x04\xf4\xfd\xb4\x13\xc8\xecc\xfc" + - "0\xe3\x9af\x90\x06\xe8\x87\xe8\x00\x909\xcc\xf8\xfb\x8c" + - "G\"\x06E\x00\xfd\x18=\x02d\xdea\xfc\x04\xe3M" + - "M\x065\x01\xfa\x07\xd4\x0bd\x8e2\xfe\x11\xe3\xd1\xa8" + - "\x11\xf4\xf1\x87\xf4\"\x909\xc1\xf8\x19\xc6c1\x83b" + - "<\x94h1\x909\xcd\xf8y\xc6\x9b\x9b\x0dj\xe6)" + - "I{\x81\xcc9\xc6\xaf2\x1e\x8f\x1b\x14\x07\xf4_\x83" + - "</3\xae\x09A\xd4bP\x0b\xa0\x93\xe0\xb07\x19" + - "\x8e\x09A\xa9D\x8bA\x09@\x8f\x88n\xe0Y!)" + - "s\x9f\x10\xe4\x8fV/\x13\xd2\xcb\xd5o\xa0dyc" + - "}\xaa\xa8\xc8\x1eVvv\xb2\xdfJ\xf2D\x99g\x8a" + - "J\xf7\xf4A\xe6\xdd\xfa\xa79\xcb\xcbm\x19\xd9\x98G" + - "\xb4\xa0\xbc:\xaa&J\xca\xb5\xad\x02\xf5\xa9\xa2\xc7\xd7" + - "\x09\xd4\xde%9X\xbd\x0f\x9c\x8a\xb1\xa9\xad6\xb5A" + - "\xd4\x16\xc4\xf2\xbc\xad9\xb7\x8c\xe4zwtn&Q" + - "e\x8f\xcfM\xceU\x9eS\x18W\xc3\x94\xb6\xc7\x9f\xb7" + - "\\\x0f\xf3\xb3\xaf*F\xd3\xf3\xbe\xde\xa3*\xae\xa2d" + - "\xc3l J\x82z\x86\x9d\xd2|\xfel\xcb\x86\xfbC" + - "\x16\x94\x19\xa3\xc6\x12l\xee\x0d-\x99Hg\xcd\xbe\x8b" + - "\x82&2\x97\xd4\xfb\xe7\xdan\xc0\xbc*\xc9\xbc\x11\xea" + - "\x9f\xebo\x03\xe6\x0dI\x19#\xec\xdfT\xe0k\x83/" + - "\xfa\xfe\xb0\x7f\x17\x06>\xba\x9b\xf1Ua\xff\xae\x0c|" + - "\xd4\xc1\xf8\xa6\xb0\x7f\x07h7\x90\xe9g|\x98\x04Q" + - "\xd5\xbeV`\xdfF;E\xdb*\xf6Ut\xa4\xd6N" + - "\xa5\xc0\xbeM\x15\xfb\xbeL\xdd@\xa6\xc0\xf8>\xaa\x1b" + - "l\x83\x85\x9elN=\xddh\xfa\xea\x8b\x8dy\xaal" + - "\xac\xcd\x16\x15\x1bC\xb8^\xa4\xea\x10v\x1d\xa7\x94\x9e" + - "()b\x17\xb1\x89\x92\xfcMM\xeb\xbf\x18\xc0AO" + - "y4w;\x0f\xd4.\xaf\xe2\x00\x7f\xc4\xb5^R\xbb" + - "\x1c\x97\xc6\x06x\x15)[fUmR\xff\x8b=\xa8" + - "\xb6\xa2{*;:\x18g\x8d_]\xd4Y\xf5Ah" + - "]\x0d\x85\xd6\xd5Hu\xb9C6\x9a\xb1\xad\xf1}\xa5" + - "T\x7f\x07\x00\x00\xff\xffy\xdeB\xa6" - -func RegisterSchema(reg *schemas.Registry) { - reg.Register(&schemas.Schema{ - String: schema_e1dde60149aeb063, - Nodes: []uint64{ - 0x8932f7433db89d99, - 0x8dc08a1d29f69b16, - 0xab4200df8263c5d4, - 0xb470b49a14912305, - 0xc2d08935698f1b78, - 0xd58300fce8aba267, - 0xdd6c3d394436cf49, - 0xe1f09ceb4ef5e479, - 0xea0b3688577e30b4, - 0xed110dc172c21b8f, - }, - Compressed: true, - }) -} diff --git a/cli/internal/fs/hash/capnp_test.go b/cli/internal/fs/hash/capnp_test.go deleted file mode 100644 index 34dac6eecf01a..0000000000000 --- a/cli/internal/fs/hash/capnp_test.go +++ /dev/null @@ -1,193 +0,0 @@ -package hash - -import ( - "testing" - - "github.com/stretchr/testify/assert" - "github.com/vercel/turbo/cli/internal/env" - "github.com/vercel/turbo/cli/internal/lockfile" - "github.com/vercel/turbo/cli/internal/turbopath" - "github.com/vercel/turbo/cli/internal/util" -) - -// Code generated by capnpc-go. DO NOT EDIT. - -func Test_CapnpHash(t *testing.T) { - taskHashable := TaskHashable{ - GlobalHash: "global_hash", - TaskDependencyHashes: []string{"task_dependency_hash"}, - PackageDir: turbopath.AnchoredUnixPath("package_dir"), - HashOfFiles: "hash_of_files", - ExternalDepsHash: "external_deps_hash", - Task: "task", - Outputs: TaskOutputs{ - Inclusions: []string{"inclusions"}, - Exclusions: []string{"exclusions"}, - }, - PassThruArgs: []string{"pass_thru_args"}, - Env: []string{"env"}, - ResolvedEnvVars: env.EnvironmentVariablePairs{}, - PassThroughEnv: []string{"pass_thru_env"}, - EnvMode: util.Loose, - DotEnv: []turbopath.AnchoredUnixPath{"dotenv"}, - } - - hash, err := HashTaskHashable(&taskHashable) - if err != nil { - t.Fatal(err) - } - - assert.Equal(t, "ff765ee2f83bc034", hash) -} - -func Test_CapnpHashGlobal(t *testing.T) { - globalHashable := GlobalHashable{ - GlobalCacheKey: "global_cache_key", - GlobalFileHashMap: map[turbopath.AnchoredUnixPath]string{ - turbopath.AnchoredUnixPath("global_file_hash_map"): "global_file_hash_map", - }, - RootExternalDepsHash: "root_external_deps_hash", - Env: []string{"env"}, - ResolvedEnvVars: env.EnvironmentVariablePairs{}, - PassThroughEnv: []string{"pass_through_env"}, - EnvMode: util.Infer, - FrameworkInference: true, - - DotEnv: []turbopath.AnchoredUnixPath{"dotenv"}, - } - - hash, err := HashGlobalHashable(&globalHashable) - if err != nil { - t.Fatal(err) - } - - assert.Equal(t, "1d13a81d4c129bed", hash) -} - -func Test_LockfilePackagesEmpty(t *testing.T) { - packages := []lockfile.Package{} - - hash, err := HashLockfilePackages(packages) - if err != nil { - t.Fatal(err) - } - - assert.Equal(t, "459c029558afe716", hash) -} - -func Test_LockfilePackagesNonEmpty(t *testing.T) { - packages := []lockfile.Package{ - { - Key: "key", - Version: "version", - Found: false, - }, - } - - hash, err := HashLockfilePackages(packages) - if err != nil { - t.Fatal(err) - } - - assert.Equal(t, "9e60782f386d8ff1", hash) -} - -func Test_LockfilePackagesEmptyVersion(t *testing.T) { - packages := []lockfile.Package{ - { - Key: "key", - Version: "", - Found: true, - }, - } - - hash, err := HashLockfilePackages(packages) - if err != nil { - t.Fatal(err) - } - - assert.Equal(t, "bde280722f61644a", hash) -} - -func Test_CapnpLockfilePackages_InOrder(t *testing.T) { - packages := []lockfile.Package{ - { - Key: "key", - Version: "version", - Found: false, - }, - { - Key: "zey", - Version: "version", - Found: false, - }, - } - - hash, err := HashLockfilePackages(packages) - if err != nil { - t.Fatal(err) - } - - assert.Equal(t, "765a46fa6c11f363", hash) -} - -func Test_CapnpLockfilePackages_OutOfOrder(t *testing.T) { - packages := []lockfile.Package{ - { - Key: "zey", - Version: "version", - Found: false, - }, - { - Key: "key", - Version: "version", - Found: false, - }, - } - - hash, err := HashLockfilePackages(packages) - if err != nil { - t.Fatal(err) - } - - assert.Equal(t, "1f5d2d372b4398db", hash) -} - -func Test_CapnpFileHashes_Empty(t *testing.T) { - fileHashes := map[turbopath.AnchoredUnixPath]string{} - - hash, err := HashFileHashes(fileHashes) - if err != nil { - t.Fatal(err) - } - - assert.Equal(t, "459c029558afe716", hash) -} - -func Test_CapnpFileHashes_NonEmpty(t *testing.T) { - fileHashes := map[turbopath.AnchoredUnixPath]string{ - turbopath.AnchoredUnixPath("a"): "b", - turbopath.AnchoredUnixPath("c"): "d", - } - - hash, err := HashFileHashes(fileHashes) - if err != nil { - t.Fatal(err) - } - - assert.Equal(t, "c9301c0bf1899c07", hash) -} - -func Test_CapnpFileHashes_OrderResistant(t *testing.T) { - fileHashes := map[turbopath.AnchoredUnixPath]string{ - turbopath.AnchoredUnixPath("c"): "d", - turbopath.AnchoredUnixPath("a"): "b", - } - - hash, err := HashFileHashes(fileHashes) - if err != nil { - t.Fatal(err) - } - - assert.Equal(t, "c9301c0bf1899c07", hash) -} diff --git a/cli/internal/fs/hash_test.go b/cli/internal/fs/hash_test.go deleted file mode 100644 index f8f24b4b9625f..0000000000000 --- a/cli/internal/fs/hash_test.go +++ /dev/null @@ -1,54 +0,0 @@ -package fs - -import ( - "testing" - - "github.com/vercel/turbo/cli/internal/fs/hash" - "gotest.tools/v3/assert" -) - -const _numOfRuns = 20 - -func Test_HashObjectStability(t *testing.T) { - type TestCase struct { - name string - obj interface{} - } - type complexStruct struct { - nested hash.TaskOutputs - foo string - bar []string - } - - testCases := []TestCase{ - { - name: "task object", - obj: hash.TaskOutputs{ - Inclusions: []string{"foo", "bar"}, - Exclusions: []string{"baz"}, - }, - }, - { - name: "complex struct", - obj: complexStruct{ - nested: hash.TaskOutputs{ - Exclusions: []string{"bar", "baz"}, - Inclusions: []string{"foo"}, - }, - foo: "a", - bar: []string{"b", "c"}, - }, - }, - } - - for _, tc := range testCases { - expectedHash, err := hashObject(tc.obj) - assert.NilError(t, err, tc.name) - - for n := 0; n < _numOfRuns; n++ { - hash, err := hashObject(tc.obj) - assert.NilError(t, err, tc.name) - assert.Equal(t, expectedHash, hash, tc.name) - } - } -} diff --git a/cli/internal/fs/lstat.go b/cli/internal/fs/lstat.go deleted file mode 100644 index eff081005baed..0000000000000 --- a/cli/internal/fs/lstat.go +++ /dev/null @@ -1,74 +0,0 @@ -package fs - -import ( - "io/fs" - "os" - - "github.com/vercel/turbo/cli/internal/turbopath" -) - -// LstatCachedFile maintains a cache of file info, mode and type for the given Path -type LstatCachedFile struct { - Path turbopath.AbsoluteSystemPath - fileInfo fs.FileInfo - fileMode *fs.FileMode - fileType *fs.FileMode -} - -// GetInfo returns, and caches the file info for the LstatCachedFile.Path -func (file *LstatCachedFile) GetInfo() (fs.FileInfo, error) { - if file.fileInfo != nil { - return file.fileInfo, nil - } - - err := file.lstat() - if err != nil { - return nil, err - } - - return file.fileInfo, nil -} - -// GetMode returns, and caches the file mode for the LstatCachedFile.Path -func (file *LstatCachedFile) GetMode() (fs.FileMode, error) { - if file.fileMode != nil { - return *file.fileMode, nil - } - - err := file.lstat() - if err != nil { - return 0, err - } - - return *file.fileMode, nil -} - -// GetType returns, and caches the type bits of (FileMode & os.ModeType) for the LstatCachedFile.Path -func (file *LstatCachedFile) GetType() (fs.FileMode, error) { - if file.fileType != nil { - return *file.fileType, nil - } - - err := file.lstat() - if err != nil { - return 0, err - } - - return *file.fileType, nil -} - -func (file *LstatCachedFile) lstat() error { - fileInfo, err := file.Path.Lstat() - if err != nil { - return err - } - - fileMode := fileInfo.Mode() - fileModeType := fileMode & os.ModeType - - file.fileInfo = fileInfo - file.fileMode = &fileMode - file.fileType = &fileModeType - - return nil -} diff --git a/cli/internal/fs/package_json.go b/cli/internal/fs/package_json.go deleted file mode 100644 index adc7b05adb1a2..0000000000000 --- a/cli/internal/fs/package_json.go +++ /dev/null @@ -1,162 +0,0 @@ -package fs - -import ( - "bytes" - "encoding/json" - "sort" - "sync" - - mapset "github.com/deckarep/golang-set" - "github.com/vercel/turbo/cli/internal/lockfile" - "github.com/vercel/turbo/cli/internal/turbopath" -) - -// PackageJSON represents NodeJS package.json -type PackageJSON struct { - Name string `json:"name"` - Version string `json:"version"` - Scripts map[string]string `json:"scripts"` - Dependencies map[string]string `json:"dependencies"` - DevDependencies map[string]string `json:"devDependencies"` - OptionalDependencies map[string]string `json:"optionalDependencies"` - PeerDependencies map[string]string `json:"peerDependencies"` - PackageManager string `json:"packageManager"` - Os []string `json:"os"` - Workspaces Workspaces `json:"workspaces"` - Private bool `json:"private"` - // Exact JSON object stored in package.json including unknown fields - // During marshalling struct fields will take priority over raw fields - RawJSON map[string]interface{} `json:"-"` - - // relative path from repo root to the package.json file - PackageJSONPath turbopath.AnchoredSystemPath `json:"-"` - // relative path from repo root to the package - Dir turbopath.AnchoredSystemPath `json:"-"` - InternalDeps []string `json:"-"` - UnresolvedExternalDeps map[string]string `json:"-"` - TransitiveDeps []lockfile.Package `json:"-"` - LegacyTurboConfig *TurboJSON `json:"turbo"` - Mu sync.Mutex `json:"-"` - ExternalDepsHash string `json:"-"` -} - -type Workspaces []string - -type WorkspacesAlt struct { - Packages []string `json:"packages,omitempty"` -} - -func (r *Workspaces) UnmarshalJSON(data []byte) error { - var tmp = &WorkspacesAlt{} - if err := json.Unmarshal(data, tmp); err == nil { - *r = Workspaces(tmp.Packages) - return nil - } - var tempstr = []string{} - if err := json.Unmarshal(data, &tempstr); err != nil { - return err - } - *r = tempstr - return nil -} - -// ReadPackageJSON returns a struct of package.json -func ReadPackageJSON(path turbopath.AbsoluteSystemPath) (*PackageJSON, error) { - b, err := path.ReadFile() - if err != nil { - return nil, err - } - return UnmarshalPackageJSON(b) -} - -// UnmarshalPackageJSON decodes a byte slice into a PackageJSON struct -func UnmarshalPackageJSON(data []byte) (*PackageJSON, error) { - var rawJSON map[string]interface{} - if err := json.Unmarshal(data, &rawJSON); err != nil { - return nil, err - } - - pkgJSON := &PackageJSON{} - if err := json.Unmarshal(data, &pkgJSON); err != nil { - return nil, err - } - pkgJSON.RawJSON = rawJSON - - return pkgJSON, nil -} - -// MarshalPackageJSON Serialize PackageJSON to a slice of bytes -func MarshalPackageJSON(pkgJSON *PackageJSON) ([]byte, error) { - structuredContent, err := json.Marshal(pkgJSON) - if err != nil { - return nil, err - } - var structuredFields map[string]interface{} - if err := json.Unmarshal(structuredContent, &structuredFields); err != nil { - return nil, err - } - - fieldsToSerialize := make(map[string]interface{}, len(pkgJSON.RawJSON)) - - // copy pkgJSON.RawJSON - for key, value := range pkgJSON.RawJSON { - fieldsToSerialize[key] = value - } - - for key, value := range structuredFields { - if isEmpty(value) { - delete(fieldsToSerialize, key) - } else { - fieldsToSerialize[key] = value - } - } - - var b bytes.Buffer - encoder := json.NewEncoder(&b) - encoder.SetEscapeHTML(false) - encoder.SetIndent("", " ") - if err := encoder.Encode(fieldsToSerialize); err != nil { - return nil, err - } - - return b.Bytes(), nil -} - -// SetExternalDeps sets TransitiveDeps and populates ExternalDepsHash -func (p *PackageJSON) SetExternalDeps(externalDeps mapset.Set) error { - p.Mu.Lock() - defer p.Mu.Unlock() - p.TransitiveDeps = make([]lockfile.Package, 0, externalDeps.Cardinality()) - for _, dependency := range externalDeps.ToSlice() { - dependency := dependency.(lockfile.Package) - p.TransitiveDeps = append(p.TransitiveDeps, dependency) - } - sort.Sort(lockfile.ByKey(p.TransitiveDeps)) - hashOfExternalDeps, err := HashLockfilePackages(p.TransitiveDeps) - if err != nil { - return err - } - p.ExternalDepsHash = hashOfExternalDeps - return nil -} - -func isEmpty(value interface{}) bool { - if value == nil { - return true - } - switch s := value.(type) { - case string: - return s == "" - case bool: - return !s - case []string: - return len(s) == 0 - case map[string]interface{}: - return len(s) == 0 - case Workspaces: - return len(s) == 0 - default: - // Assume any unknown types aren't empty - return false - } -} diff --git a/cli/internal/fs/package_json_test.go b/cli/internal/fs/package_json_test.go deleted file mode 100644 index e40649f498922..0000000000000 --- a/cli/internal/fs/package_json_test.go +++ /dev/null @@ -1,184 +0,0 @@ -package fs - -import ( - "testing" - - mapset "github.com/deckarep/golang-set" - "gotest.tools/v3/assert" -) - -func Test_UnmarshalPackageJSON(t *testing.T) { - type Case struct { - name string - json string - expectedFields *PackageJSON - } - - testCases := []Case{ - { - name: "basic types are in raw and processed", - json: `{"name":"foo","version":"1.2.3"}`, - expectedFields: &PackageJSON{ - Name: "foo", - Version: "1.2.3", - RawJSON: map[string]interface{}{ - "name": "foo", - "version": "1.2.3", - }, - }, - }, - { - name: "map types get copied", - json: `{"dependencies":{"foo":"1.2.3"},"devDependencies":{"bar": "^1.0.0"}}`, - expectedFields: &PackageJSON{ - Dependencies: map[string]string{"foo": "1.2.3"}, - DevDependencies: map[string]string{"bar": "^1.0.0"}, - RawJSON: map[string]interface{}{ - "dependencies": map[string]interface{}{"foo": "1.2.3"}, - "devDependencies": map[string]interface{}{"bar": "^1.0.0"}, - }, - }, - }, - { - name: "array types get copied", - json: `{"os":["linux", "windows"]}`, - expectedFields: &PackageJSON{ - Os: []string{"linux", "windows"}, - RawJSON: map[string]interface{}{ - "os": []interface{}{"linux", "windows"}, - }, - }, - }, - } - - for _, testCase := range testCases { - actual, err := UnmarshalPackageJSON([]byte(testCase.json)) - assert.NilError(t, err, testCase.name) - assertPackageJSONEqual(t, actual, testCase.expectedFields) - } -} - -func Test_MarshalPackageJSON(t *testing.T) { - type TestCase struct { - name string - input *PackageJSON - expected *PackageJSON - } - - testCases := []TestCase{ - { - name: "roundtrip should have no effect", - input: &PackageJSON{ - Name: "foo", - Version: "1.2.3", - RawJSON: map[string]interface{}{ - "name": "foo", - "version": "1.2.3", - }, - }, - expected: &PackageJSON{ - Name: "foo", - Version: "1.2.3", - RawJSON: map[string]interface{}{ - "name": "foo", - "version": "1.2.3", - }, - }, - }, - { - name: "structured fields should take priority over raw values", - input: &PackageJSON{ - Name: "foo", - Version: "2.3.4", - RawJSON: map[string]interface{}{ - "name": "foo", - "version": "1.2.3", - }, - }, - expected: &PackageJSON{ - Name: "foo", - Version: "2.3.4", - RawJSON: map[string]interface{}{ - "name": "foo", - "version": "2.3.4", - }, - }, - }, - { - name: "empty structured fields don't get serialized", - input: &PackageJSON{ - Name: "foo", - Version: "", - RawJSON: map[string]interface{}{ - "name": "foo", - "version": "1.2.3", - }, - }, - expected: &PackageJSON{ - Name: "foo", - Version: "", - RawJSON: map[string]interface{}{ - "name": "foo", - }, - }, - }, - { - name: "unstructured fields survive the round trip", - input: &PackageJSON{ - Name: "foo", - RawJSON: map[string]interface{}{ - "name": "foo", - "special-field": "special-value", - "special-config": map[string]interface{}{ - "flag": true, - "value": "toggled", - }, - }, - }, - expected: &PackageJSON{ - Name: "foo", - RawJSON: map[string]interface{}{ - "name": "foo", - "special-field": "special-value", - "special-config": map[string]interface{}{ - "flag": true, - "value": "toggled", - }, - }, - }, - }, - } - - for _, testCase := range testCases { - serializedInput, err := MarshalPackageJSON(testCase.input) - assert.NilError(t, err, testCase.name) - actual, err := UnmarshalPackageJSON(serializedInput) - assert.NilError(t, err, testCase.name) - assertPackageJSONEqual(t, actual, testCase.expected) - } -} - -func Test_SetExternalDepsWithEmptySet(t *testing.T) { - pkg := &PackageJSON{} - err := pkg.SetExternalDeps(mapset.NewSet()) - assert.NilError(t, err) - assert.Assert(t, pkg.TransitiveDeps != nil) - assert.Equal(t, len(pkg.TransitiveDeps), 0) - assert.DeepEqual(t, pkg.ExternalDepsHash, "459c029558afe716") -} - -// Asserts that the data section of two PackageJSON structs are equal -func assertPackageJSONEqual(t *testing.T, x *PackageJSON, y *PackageJSON) { - t.Helper() - assert.Equal(t, x.Name, y.Name) - assert.Equal(t, x.Version, y.Version) - assert.DeepEqual(t, x.Scripts, y.Scripts) - assert.DeepEqual(t, x.Dependencies, y.Dependencies) - assert.DeepEqual(t, x.DevDependencies, y.DevDependencies) - assert.DeepEqual(t, x.OptionalDependencies, y.OptionalDependencies) - assert.DeepEqual(t, x.PeerDependencies, y.PeerDependencies) - assert.Equal(t, x.PackageManager, y.PackageManager) - assert.DeepEqual(t, x.Workspaces, y.Workspaces) - assert.DeepEqual(t, x.Private, y.Private) - assert.DeepEqual(t, x.RawJSON, y.RawJSON) -} diff --git a/cli/internal/fs/path.go b/cli/internal/fs/path.go deleted file mode 100644 index 2023d69fd04fe..0000000000000 --- a/cli/internal/fs/path.go +++ /dev/null @@ -1,113 +0,0 @@ -package fs - -import ( - "fmt" - iofs "io/fs" - "os" - "path/filepath" - "reflect" - - "github.com/adrg/xdg" - "github.com/vercel/turbo/cli/internal/turbopath" -) - -// CheckedToAbsoluteSystemPath inspects a string and determines if it is an absolute path. -func CheckedToAbsoluteSystemPath(s string) (turbopath.AbsoluteSystemPath, error) { - if filepath.IsAbs(s) { - return turbopath.AbsoluteSystemPath(s), nil - } - return "", fmt.Errorf("%v is not an absolute path", s) -} - -// ResolveUnknownPath returns unknown if it is an absolute path, otherwise, it -// assumes unknown is a path relative to the given root. -func ResolveUnknownPath(root turbopath.AbsoluteSystemPath, unknown string) turbopath.AbsoluteSystemPath { - if filepath.IsAbs(unknown) { - return turbopath.AbsoluteSystemPath(unknown) - } - return root.UntypedJoin(unknown) -} - -// UnsafeToAbsoluteSystemPath directly converts a string to an AbsoluteSystemPath -func UnsafeToAbsoluteSystemPath(s string) turbopath.AbsoluteSystemPath { - return turbopath.AbsoluteSystemPath(s) -} - -// UnsafeToAnchoredSystemPath directly converts a string to an AbsoluteSystemPath -func UnsafeToAnchoredSystemPath(s string) turbopath.AnchoredSystemPath { - return turbopath.AnchoredSystemPath(s) -} - -// AbsoluteSystemPathFromUpstream is used to mark return values from APIs that we -// expect to give us absolute paths. No checking is performed. -// Prefer to use this over a cast to maintain the search-ability of interfaces -// into and out of the turbopath.AbsoluteSystemPath type. -func AbsoluteSystemPathFromUpstream(s string) turbopath.AbsoluteSystemPath { - return turbopath.AbsoluteSystemPath(s) -} - -// GetCwd returns the calculated working directory after traversing symlinks. -func GetCwd(cwdRaw string) (turbopath.AbsoluteSystemPath, error) { - if cwdRaw == "" { - var err error - cwdRaw, err = os.Getwd() - if err != nil { - return "", err - } - } - // We evaluate symlinks here because the package managers - // we support do the same. - cwdRaw, err := filepath.EvalSymlinks(cwdRaw) - if err != nil { - return "", fmt.Errorf("evaluating symlinks in cwd: %w", err) - } - cwd, err := CheckedToAbsoluteSystemPath(cwdRaw) - if err != nil { - return "", fmt.Errorf("cwd is not an absolute path %v: %v", cwdRaw, err) - } - return cwd, nil -} - -// GetVolumeRoot returns the root directory given an absolute path. -func GetVolumeRoot(absolutePath string) string { - return filepath.VolumeName(absolutePath) + string(os.PathSeparator) -} - -// CreateDirFSAtRoot creates an `os.dirFS` instance at the root of the -// volume containing the specified path. -func CreateDirFSAtRoot(absolutePath string) iofs.FS { - return os.DirFS(GetVolumeRoot(absolutePath)) -} - -// GetDirFSRootPath returns the root path of a os.dirFS. -func GetDirFSRootPath(fsys iofs.FS) string { - // We can't typecheck fsys to enforce using an `os.dirFS` because the - // type isn't exported from `os`. So instead, reflection. 🤷‍♂️ - - fsysType := reflect.TypeOf(fsys).Name() - if fsysType != "dirFS" { - // This is not a user error, fail fast - panic("GetDirFSRootPath must receive an os.dirFS") - } - - // The underlying type is a string; this is the original path passed in. - return reflect.ValueOf(fsys).String() -} - -// IofsRelativePath calculates a `os.dirFS`-friendly path from an absolute system path. -func IofsRelativePath(fsysRoot string, absolutePath string) (string, error) { - return filepath.Rel(fsysRoot, absolutePath) -} - -// TempDir returns the absolute path of a directory with the given name -// under the system's default temp directory location -func TempDir(subDir string) turbopath.AbsoluteSystemPath { - return turbopath.AbsoluteSystemPath(os.TempDir()).UntypedJoin(subDir) -} - -// GetUserConfigDir returns the platform-specific common location -// for configuration files that belong to a user. -func GetUserConfigDir() turbopath.AbsoluteSystemPath { - configHome := AbsoluteSystemPathFromUpstream(xdg.ConfigHome) - return configHome.UntypedJoin("turborepo") -} diff --git a/cli/internal/fs/testdata/both/package.json b/cli/internal/fs/testdata/both/package.json deleted file mode 100644 index 03534b72145e1..0000000000000 --- a/cli/internal/fs/testdata/both/package.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "turbo": { - "pipeline": { - "build": {} - } - } -} diff --git a/cli/internal/fs/testdata/both/turbo.json b/cli/internal/fs/testdata/both/turbo.json deleted file mode 100644 index 721e897f10b61..0000000000000 --- a/cli/internal/fs/testdata/both/turbo.json +++ /dev/null @@ -1,18 +0,0 @@ -// mocked test comment -{ - "pipeline": { - "build": { - // mocked test comment - "dependsOn": [ - // mocked test comment - "^build" - ], - "outputs": ["dist/**", ".next/**", "!dist/assets/**"], - "outputMode": "new-only" - } // mocked test comment - }, - "remoteCache": { - "teamId": "team_id", - "signature": true - } -} diff --git a/cli/internal/fs/testdata/correct/turbo.json b/cli/internal/fs/testdata/correct/turbo.json deleted file mode 100644 index a35221d7b3ba0..0000000000000 --- a/cli/internal/fs/testdata/correct/turbo.json +++ /dev/null @@ -1,50 +0,0 @@ -// mocked test comment -{ - "pipeline": { - "build": { - "passThroughEnv": ["GITHUB_TOKEN"], - // mocked test comment - "dependsOn": [ - // mocked test comment - "^build" - ], - "outputs": ["dist/**", "!dist/assets/**", ".next/**"], - "outputMode": "new-only" - }, // mocked test comment - "lint": { - "outputs": [], - "dependsOn": ["$MY_VAR"], - "cache": true, - "outputMode": "new-only" - }, - "dev": { - "cache": false, - "outputMode": "full", - "passThroughEnv": [] - }, - /* mocked test comment */ - "publish": { - "outputs": ["dist/**"], - "inputs": [ - /* - mocked test comment - */ - "build/**/*" - ], - "dependsOn": [ - /* mocked test comment */ "^publish", - "^build", - "build", - "admin#lint" - ], - "cache": false - } - }, - "globalDependencies": ["some-file", "../another-dir/**", "$GLOBAL_ENV_VAR"], - "globlaEnv": ["SOME_VAR", "ANOTHER_VAR"], - "globalPassThroughEnv": ["AWS_SECRET_KEY"], - "remoteCache": { - "teamId": "team_id", - "signature": true - } -} diff --git a/cli/internal/fs/testdata/dotenv-empty/turbo.json b/cli/internal/fs/testdata/dotenv-empty/turbo.json deleted file mode 100644 index 9d6efe855eac1..0000000000000 --- a/cli/internal/fs/testdata/dotenv-empty/turbo.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "globalDotEnv": [], - "pipeline": { - "build": { - "dotEnv": [] - } - } -} diff --git a/cli/internal/fs/testdata/dotenv-null/turbo.json b/cli/internal/fs/testdata/dotenv-null/turbo.json deleted file mode 100644 index 7b11852313a03..0000000000000 --- a/cli/internal/fs/testdata/dotenv-null/turbo.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "globalDotEnv": null, - "pipeline": { - "build": { - "dotEnv": null - } - } -} diff --git a/cli/internal/fs/testdata/dotenv-populated/turbo.json b/cli/internal/fs/testdata/dotenv-populated/turbo.json deleted file mode 100644 index 407a5d0963bc9..0000000000000 --- a/cli/internal/fs/testdata/dotenv-populated/turbo.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "globalDotEnv": ["z", "y", "x"], - "pipeline": { - "build": { - "dotEnv": ["3", "2", "1"] - } - } -} diff --git a/cli/internal/fs/testdata/dotenv-undefined/turbo.json b/cli/internal/fs/testdata/dotenv-undefined/turbo.json deleted file mode 100644 index f27ee6c9b5ebf..0000000000000 --- a/cli/internal/fs/testdata/dotenv-undefined/turbo.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "pipeline": { - "build": {} - } -} diff --git a/cli/internal/fs/testdata/invalid-env-1/turbo.json b/cli/internal/fs/testdata/invalid-env-1/turbo.json deleted file mode 100644 index e4a6517a00487..0000000000000 --- a/cli/internal/fs/testdata/invalid-env-1/turbo.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "pipeline": { - "task1": { - // all invalid value - "env": ["$A", "$B"] - } - } -} diff --git a/cli/internal/fs/testdata/invalid-env-2/turbo.json b/cli/internal/fs/testdata/invalid-env-2/turbo.json deleted file mode 100644 index 92eec96f607a9..0000000000000 --- a/cli/internal/fs/testdata/invalid-env-2/turbo.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "pipeline": { - "task1": { - // Mixed values - "env": ["$A", "B"] - } - } -} diff --git a/cli/internal/fs/testdata/invalid-global-env/turbo.json b/cli/internal/fs/testdata/invalid-global-env/turbo.json deleted file mode 100644 index 2ae9ff9c7d42b..0000000000000 --- a/cli/internal/fs/testdata/invalid-global-env/turbo.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - // Both global declarations with duplicates - "globalDependencies": ["$FOO", "$BAR", "somefile.txt", "somefile.txt"], - // some invalid values - "globalEnv": ["FOO", "BAZ", "$QUX"], - "pipeline": { - "task1": { - "dependsOn": ["$A"] - } - } -} diff --git a/cli/internal/fs/testdata/legacy-env/turbo.json b/cli/internal/fs/testdata/legacy-env/turbo.json deleted file mode 100644 index 6b082c4926bae..0000000000000 --- a/cli/internal/fs/testdata/legacy-env/turbo.json +++ /dev/null @@ -1,34 +0,0 @@ -// mocked test comment -{ - // Both global declarations with duplicates and with - "globalDependencies": ["$FOO", "$BAR", "somefile.txt", "somefile.txt"], - "globalEnv": ["FOO", "BAZ", "QUX"], - "pipeline": { - // Only legacy declaration - "task1": { - "dependsOn": ["$A"] - }, - // Only new declaration - "task2": { - "env": ["A"] - }, - // Same var declared in both - "task3": { - "dependsOn": ["$A"], - "env": ["A"] - }, - // Different vars declared in both - "task4": { - "dependsOn": ["$A"], - "env": ["B"] - }, - - // some edge cases - "task6": { "env": ["A", "B", "C"], "dependsOn": ["$D", "$E", "$F"] }, - "task7": { "env": ["A", "B", "C"], "dependsOn": ["$A", "$B", "$C"] }, - "task8": { "env": ["A", "B", "C"], "dependsOn": ["A", "B", "C"] }, - "task9": { "env": [], "dependsOn": ["$A"] }, - "task10": { "env": ["A", "A"], "dependsOn": ["$A", "$A"] }, - "task11": { "env": ["A", "A"], "dependsOn": ["$B", "$B"] } - } -} diff --git a/cli/internal/fs/testdata/legacy-only/package.json b/cli/internal/fs/testdata/legacy-only/package.json deleted file mode 100644 index 03534b72145e1..0000000000000 --- a/cli/internal/fs/testdata/legacy-only/package.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "turbo": { - "pipeline": { - "build": {} - } - } -} diff --git a/cli/internal/fs/testdata/passthrough-empty/turbo.json b/cli/internal/fs/testdata/passthrough-empty/turbo.json deleted file mode 100644 index 086a4c2ad5043..0000000000000 --- a/cli/internal/fs/testdata/passthrough-empty/turbo.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "globalPassThroughEnv": [], - "pipeline": { - "build": { - "passThroughEnv": [] - } - } -} diff --git a/cli/internal/fs/testdata/passthrough-null/turbo.json b/cli/internal/fs/testdata/passthrough-null/turbo.json deleted file mode 100644 index 8e2ba9c093c1e..0000000000000 --- a/cli/internal/fs/testdata/passthrough-null/turbo.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "globalPassThroughEnv": null, - "pipeline": { - "build": { - "passThroughEnv": null - } - } -} diff --git a/cli/internal/fs/testdata/passthrough-populated/turbo.json b/cli/internal/fs/testdata/passthrough-populated/turbo.json deleted file mode 100644 index 693d877e66f75..0000000000000 --- a/cli/internal/fs/testdata/passthrough-populated/turbo.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "globalPassThroughEnv": ["C", "B", "A"], - "pipeline": { - "build": { - "passThroughEnv": ["Z", "Y", "X"] - } - } -} diff --git a/cli/internal/fs/testdata/passthrough-undefined/turbo.json b/cli/internal/fs/testdata/passthrough-undefined/turbo.json deleted file mode 100644 index f27ee6c9b5ebf..0000000000000 --- a/cli/internal/fs/testdata/passthrough-undefined/turbo.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "pipeline": { - "build": {} - } -} diff --git a/cli/internal/fs/turbo_json.go b/cli/internal/fs/turbo_json.go deleted file mode 100644 index 911ee2ddb6a46..0000000000000 --- a/cli/internal/fs/turbo_json.go +++ /dev/null @@ -1,839 +0,0 @@ -package fs - -import ( - "encoding/json" - "fmt" - "io/ioutil" - "log" - "os" - "path/filepath" - "sort" - "strings" - - "github.com/muhammadmuzzammil1998/jsonc" - "github.com/pkg/errors" - "github.com/vercel/turbo/cli/internal/fs/hash" - "github.com/vercel/turbo/cli/internal/turbopath" - "github.com/vercel/turbo/cli/internal/util" -) - -const ( - configFile = "turbo.json" - envPipelineDelimiter = "$" - topologicalPipelineDelimiter = "^" -) - -// SpaceConfig is used to marshal and unmarshal the -// `experimentalSpaceId` field in a turbo.json -type SpaceConfig struct { - ID string `json:"id"` -} - -type rawTurboJSON struct { - // Global root filesystem dependencies - GlobalDependencies []string `json:"globalDependencies,omitempty"` - // Global env - GlobalEnv []string `json:"globalEnv,omitempty"` - - // Global passthrough env - GlobalPassThroughEnv []string `json:"globalPassThroughEnv,omitempty"` - - // .env files to consider, in order. - GlobalDotEnv []string `json:"globalDotEnv,omitempty"` - - // Pipeline is a map of Turbo pipeline entries which define the task graph - // and cache behavior on a per task or per package-task basis. - Pipeline Pipeline `json:"pipeline"` - // Configuration options when interfacing with the remote cache - RemoteCacheOptions *RemoteCacheOptions `json:"remoteCache,omitempty"` - - // Extends can be the name of another workspace - Extends []string `json:"extends,omitempty"` - - // Configuration for the space - Space *SpaceConfig `json:"experimentalSpaces,omitempty"` -} - -// pristineTurboJSON is used when marshaling a TurboJSON object into a json string -// Notably, it includes a PristinePipeline instead of the regular Pipeline. (i.e. TaskDefinition -// instead of BookkeepingTaskDefinition.) -type pristineTurboJSON struct { - GlobalDependencies []string `json:"globalDependencies,omitempty"` - GlobalEnv []string `json:"globalEnv,omitempty"` - GlobalPassThroughEnv []string `json:"globalPassThroughEnv"` - GlobalDotEnv turbopath.AnchoredUnixPathArray `json:"globalDotEnv"` - Pipeline PristinePipeline `json:"pipeline"` - RemoteCacheOptions RemoteCacheOptions `json:"remoteCache,omitempty"` - Extends []string `json:"extends,omitempty"` - Space *SpaceConfig `json:"experimentalSpaces,omitempty"` -} - -// TurboJSON represents a turbo.json configuration file -type TurboJSON struct { - GlobalDeps []string - GlobalEnv []string - GlobalPassThroughEnv []string - GlobalDotEnv turbopath.AnchoredUnixPathArray - Pipeline Pipeline - RemoteCacheOptions RemoteCacheOptions - Extends []string // A list of Workspace names - SpaceID string -} - -// RemoteCacheOptions is a struct for deserializing .remoteCache of configFile -type RemoteCacheOptions struct { - TeamID string `json:"teamId,omitempty"` - Signature bool `json:"signature,omitempty"` - Enabled bool `json:"enabled,omitempty"` -} - -// rawTaskWithDefaults exists to Marshal (i.e. turn a TaskDefinition into json). -// We use this for printing ResolvedTaskConfiguration, because we _want_ to show -// the user the default values for key they have not configured. -type rawTaskWithDefaults struct { - Outputs []string `json:"outputs"` - Cache *bool `json:"cache"` - DependsOn []string `json:"dependsOn"` - Inputs []string `json:"inputs"` - OutputMode util.TaskOutputMode `json:"outputMode"` - Persistent bool `json:"persistent"` - Env []string `json:"env"` - PassThroughEnv []string `json:"passThroughEnv"` - DotEnv turbopath.AnchoredUnixPathArray `json:"dotEnv"` -} - -// rawTask exists to Unmarshal from json. When fields are omitted, we _want_ -// them to be missing, so that we can distinguish missing from empty value. -type rawTask struct { - Outputs []string `json:"outputs,omitempty"` - Cache *bool `json:"cache,omitempty"` - DependsOn []string `json:"dependsOn,omitempty"` - Inputs []string `json:"inputs,omitempty"` - OutputMode *util.TaskOutputMode `json:"outputMode,omitempty"` - Persistent *bool `json:"persistent,omitempty"` - Env []string `json:"env,omitempty"` - PassThroughEnv []string `json:"passThroughEnv,omitempty"` - DotEnv []string `json:"dotEnv,omitempty"` -} - -// taskDefinitionHashable exists as a definition for PristinePipeline, which is used down -// stream for calculating the global hash. We want to exclude experimental fields here -// because we don't want experimental fields to be part of the global hash. -type taskDefinitionHashable struct { - Outputs hash.TaskOutputs - Cache bool - TopologicalDependencies []string - TaskDependencies []string - Inputs []string - OutputMode util.TaskOutputMode - Persistent bool - Env []string - PassThroughEnv []string - DotEnv turbopath.AnchoredUnixPathArray -} - -// taskDefinitionExperiments is a list of config fields in a task definition that are considered -// experimental. We keep these separated so we can compute a global hash without these. -type taskDefinitionExperiments struct { -} - -// PristinePipeline is a map of task names to TaskDefinition or taskDefinitionHashable. -// Depending on whether any experimental fields are defined, we will use either struct. -// The purpose is to omit experimental fields when making a pristine version, so that -// it doesn't show up in --dry/--summarize output or affect the global hash. -type PristinePipeline map[string]interface{} - -// Pipeline is a struct for deserializing .pipeline in configFile -type Pipeline map[string]BookkeepingTaskDefinition - -// BookkeepingTaskDefinition holds the underlying TaskDefinition and some bookkeeping data -// about the TaskDefinition. This wrapper struct allows us to leave TaskDefinition untouched. -type BookkeepingTaskDefinition struct { - definedFields util.Set - experimentalFields util.Set - experimental taskDefinitionExperiments - TaskDefinition taskDefinitionHashable -} - -// TaskDefinition is a representation of the configFile pipeline for further computation. -type TaskDefinition struct { - Outputs hash.TaskOutputs - Cache bool - - // TopologicalDependencies are tasks from package dependencies. - // E.g. "build" is a topological dependency in: - // dependsOn: ['^build']. - // This field is custom-marshalled from rawTask.DependsOn - TopologicalDependencies []string - - // TaskDependencies are anything that is not a topological dependency - // E.g. both something and //whatever are TaskDependencies in: - // dependsOn: ['something', '//whatever'] - // This field is custom-marshalled from rawTask.DependsOn - TaskDependencies []string - - // Inputs indicate the list of files this Task depends on. If any of those files change - // we can conclude that any cached outputs or logs for this Task should be invalidated. - Inputs []string - - // OutputMode determins how we should log the output. - OutputMode util.TaskOutputMode - - // Persistent indicates whether the Task is expected to exit or not - // Tasks marked Persistent do not exit (e.g. --watch mode or dev servers) - Persistent bool - - // This field is custom-marshalled from rawTask.Env and rawTask.DependsOn - Env []string - - // rawTask.PassThroughEnv - PassThroughEnv []string - - // rawTask.DotEnv - DotEnv turbopath.AnchoredUnixPathArray -} - -// GetTask returns a TaskDefinition based on the ID (package#task format) or name (e.g. "build") -func (pc Pipeline) GetTask(taskID string, taskName string) (*BookkeepingTaskDefinition, error) { - // first check for package-tasks - taskDefinition, ok := pc[taskID] - if !ok { - // then check for regular tasks - fallbackTaskDefinition, notcool := pc[taskName] - // if neither, then bail - if !notcool { - // Return an empty TaskDefinition - return nil, fmt.Errorf("Could not find task \"%s\" in pipeline", taskID) - } - - // override if we need to... - taskDefinition = fallbackTaskDefinition - } - - return &taskDefinition, nil -} - -// LoadTurboConfig loads, or optionally, synthesizes a TurboJSON instance -func LoadTurboConfig(dir turbopath.AbsoluteSystemPath, rootPackageJSON *PackageJSON, includeSynthesizedFromRootPackageJSON bool) (*TurboJSON, error) { - // If the root package.json stil has a `turbo` key, log a warning and remove it. - if rootPackageJSON.LegacyTurboConfig != nil { - log.Printf("[WARNING] \"turbo\" in package.json is no longer supported. Migrate to %s by running \"npx @turbo/codemod create-turbo-config\"\n", configFile) - rootPackageJSON.LegacyTurboConfig = nil - } - - var turboJSON *TurboJSON - turboFromFiles, err := readTurboConfig(dir.UntypedJoin(configFile)) - - if !includeSynthesizedFromRootPackageJSON && err != nil { - // If the file didn't exist, throw a custom error here instead of propagating - if errors.Is(err, os.ErrNotExist) { - return nil, errors.Wrap(err, fmt.Sprintf("Could not find %s. Follow directions at https://turbo.build/repo/docs to create one", configFile)) - - } - - // There was an error, and we don't have any chance of recovering - // because we aren't synthesizing anything - return nil, err - } else if !includeSynthesizedFromRootPackageJSON { - // We're not synthesizing anything and there was no error, we're done - return turboFromFiles, nil - } else if errors.Is(err, os.ErrNotExist) { - // turbo.json doesn't exist, but we're going try to synthesize something - turboJSON = &TurboJSON{ - GlobalEnv: []string{}, - Pipeline: make(Pipeline), - } - } else if err != nil { - // some other happened, we can't recover - return nil, err - } else { - // we're synthesizing, but we have a starting point - // Note: this will have to change to support task inference in a monorepo - // for now, we're going to error on any "root" tasks and turn non-root tasks into root tasks - pipeline := make(Pipeline) - for taskID, taskDefinition := range turboFromFiles.Pipeline { - if util.IsPackageTask(taskID) { - return nil, fmt.Errorf("Package tasks (<package>#<task>) are not allowed in single-package repositories: found %v", taskID) - } - pipeline[util.RootTaskID(taskID)] = taskDefinition - } - turboJSON = turboFromFiles - turboJSON.Pipeline = pipeline - } - - for scriptName := range rootPackageJSON.Scripts { - if !turboJSON.Pipeline.HasTask(scriptName) { - taskName := util.RootTaskID(scriptName) - // Explicitly set Cache to false in this definition and add the bookkeeping fields - // so downstream we can pretend that it was set on purpose (as if read from a config file) - // rather than defaulting to the 0-value of a boolean field. - turboJSON.Pipeline[taskName] = BookkeepingTaskDefinition{ - definedFields: util.SetFromStrings([]string{"Cache"}), - TaskDefinition: taskDefinitionHashable{ - Cache: false, - }, - } - } - } - return turboJSON, nil -} - -// TurboJSONValidation is the signature for a validation function passed to Validate() -type TurboJSONValidation func(*TurboJSON) []error - -// Validate calls an array of validation functions on the TurboJSON struct. -// The validations can be customized by the caller. -func (tj *TurboJSON) Validate(validations []TurboJSONValidation) []error { - allErrors := []error{} - for _, validation := range validations { - errors := validation(tj) - allErrors = append(allErrors, errors...) - } - - return allErrors -} - -// readTurboConfig reads turbo.json from a provided path -func readTurboConfig(turboJSONPath turbopath.AbsoluteSystemPath) (*TurboJSON, error) { - // If the configFile exists, use that - if turboJSONPath.FileExists() { - turboJSON, err := readTurboJSON(turboJSONPath) - if err != nil { - return nil, fmt.Errorf("%s: %w", configFile, err) - } - - return turboJSON, nil - } - - // If there's no turbo.json, return an error. - return nil, os.ErrNotExist -} - -// readTurboJSON reads the configFile in to a struct -func readTurboJSON(path turbopath.AbsoluteSystemPath) (*TurboJSON, error) { - file, err := path.Open() - if err != nil { - return nil, err - } - var turboJSON *TurboJSON - data, err := ioutil.ReadAll(file) - if err != nil { - return nil, err - } - - err = jsonc.Unmarshal(data, &turboJSON) - - if err != nil { - return nil, err - } - - return turboJSON, nil -} - -// GetTaskDefinition returns a TaskDefinition from a serialized definition in configFile -func (pc Pipeline) GetTaskDefinition(taskID string) (TaskDefinition, bool) { - if entry, ok := pc[taskID]; ok { - return entry.GetTaskDefinition(), true - } - _, task := util.GetPackageTaskFromId(taskID) - entry, ok := pc[task] - return entry.GetTaskDefinition(), ok -} - -// HasTask returns true if the given task is defined in the pipeline, either directly or -// via a package task (`pkg#task`) -func (pc Pipeline) HasTask(task string) bool { - for key := range pc { - if key == task { - return true - } - if util.IsPackageTask(key) { - _, taskName := util.GetPackageTaskFromId(key) - if taskName == task { - return true - } - } - } - return false -} - -// Pristine returns a PristinePipeline, this is used for printing to console and pruning -func (pc Pipeline) Pristine() PristinePipeline { - pristine := PristinePipeline{} - for taskName, taskDef := range pc { - // If there are any experimental fields, we will include them with 0-values - // if there aren't, we will omit them entirely - if taskDef.hasExperimentalFields() { - pristine[taskName] = taskDef.GetTaskDefinition() // merges experimental fields in - } else { - pristine[taskName] = taskDef.TaskDefinition // has no experimental fields - } - } - return pristine -} - -// hasField checks the internal bookkeeping definedFields field to -// see whether a field was actually in the underlying turbo.json -// or whether it was initialized with its 0-value. -func (btd BookkeepingTaskDefinition) hasField(fieldName string) bool { - return btd.definedFields.Includes(fieldName) || btd.experimentalFields.Includes(fieldName) -} - -// hasExperimentalFields keeps track of whether any experimental fields were found -func (btd BookkeepingTaskDefinition) hasExperimentalFields() bool { - return len(btd.experimentalFields) > 0 -} - -// GetTaskDefinition gets a TaskDefinition by merging the experimental and non-experimental fields -// into a single representation to use downstream. -func (btd BookkeepingTaskDefinition) GetTaskDefinition() TaskDefinition { - return TaskDefinition{ - Outputs: btd.TaskDefinition.Outputs, - Cache: btd.TaskDefinition.Cache, - TopologicalDependencies: btd.TaskDefinition.TopologicalDependencies, - TaskDependencies: btd.TaskDefinition.TaskDependencies, - Inputs: btd.TaskDefinition.Inputs, - OutputMode: btd.TaskDefinition.OutputMode, - Persistent: btd.TaskDefinition.Persistent, - Env: btd.TaskDefinition.Env, - DotEnv: btd.TaskDefinition.DotEnv, - PassThroughEnv: btd.TaskDefinition.PassThroughEnv, - } -} - -// MergeTaskDefinitions accepts an array of BookkeepingTaskDefinitions and merges them into -// a single TaskDefinition. It uses the bookkeeping definedFields to determine which fields should -// be overwritten and when 0-values should be respected. -func MergeTaskDefinitions(taskDefinitions []BookkeepingTaskDefinition) (*TaskDefinition, error) { - // Start with an empty definition - mergedTaskDefinition := &TaskDefinition{} - - // Set the default, because the 0-value will be false, and if no turbo.jsons had - // this field set for this task, we want it to be true. - mergedTaskDefinition.Cache = true - - // For each of the TaskDefinitions we know of, merge them in - for _, bookkeepingTaskDef := range taskDefinitions { - taskDef := bookkeepingTaskDef.GetTaskDefinition() - - if bookkeepingTaskDef.hasField("Outputs") { - mergedTaskDefinition.Outputs = taskDef.Outputs - } - - if bookkeepingTaskDef.hasField("Cache") { - mergedTaskDefinition.Cache = taskDef.Cache - } - - if bookkeepingTaskDef.hasField("DependsOn") { - mergedTaskDefinition.TopologicalDependencies = taskDef.TopologicalDependencies - } - - if bookkeepingTaskDef.hasField("DependsOn") { - mergedTaskDefinition.TaskDependencies = taskDef.TaskDependencies - } - - if bookkeepingTaskDef.hasField("Inputs") { - mergedTaskDefinition.Inputs = taskDef.Inputs - } - - if bookkeepingTaskDef.hasField("OutputMode") { - mergedTaskDefinition.OutputMode = taskDef.OutputMode - } - - if bookkeepingTaskDef.hasField("Persistent") { - mergedTaskDefinition.Persistent = taskDef.Persistent - } - - if bookkeepingTaskDef.hasField("Env") { - mergedTaskDefinition.Env = taskDef.Env - } - - if bookkeepingTaskDef.hasField("PassThroughEnv") { - mergedTaskDefinition.PassThroughEnv = taskDef.PassThroughEnv - } - - if bookkeepingTaskDef.hasField("DotEnv") { - mergedTaskDefinition.DotEnv = taskDef.DotEnv - } - } - - return mergedTaskDefinition, nil -} - -// UnmarshalJSON deserializes a single task definition from -// turbo.json into a TaskDefinition struct -func (btd *BookkeepingTaskDefinition) UnmarshalJSON(data []byte) error { - task := rawTask{} - if err := json.Unmarshal(data, &task); err != nil { - return err - } - - btd.definedFields = util.Set{} - btd.experimentalFields = util.Set{} - - if task.Outputs != nil { - var inclusions []string - var exclusions []string - // Assign a bookkeeping field so we know that there really were - // outputs configured in the underlying config file. - btd.definedFields.Add("Outputs") - - for _, glob := range task.Outputs { - if strings.HasPrefix(glob, "!") { - if filepath.IsAbs(glob[1:]) { - log.Printf("[WARNING] Using an absolute path in \"outputs\" (%v) will not work and will be an error in a future version", glob) - } - exclusions = append(exclusions, glob[1:]) - } else { - if filepath.IsAbs(glob) { - log.Printf("[WARNING] Using an absolute path in \"outputs\" (%v) will not work and will be an error in a future version", glob) - } - inclusions = append(inclusions, glob) - } - } - - btd.TaskDefinition.Outputs = hash.TaskOutputs{ - Inclusions: inclusions, - Exclusions: exclusions, - } - - sort.Strings(btd.TaskDefinition.Outputs.Inclusions) - sort.Strings(btd.TaskDefinition.Outputs.Exclusions) - } - - if task.Cache == nil { - btd.TaskDefinition.Cache = true - } else { - btd.definedFields.Add("Cache") - btd.TaskDefinition.Cache = *task.Cache - } - - envVarDependencies := make(util.Set) - envVarPassThroughs := make(util.Set) - - btd.TaskDefinition.TopologicalDependencies = []string{} // TODO @mehulkar: this should be a set - btd.TaskDefinition.TaskDependencies = []string{} // TODO @mehulkar: this should be a set - - // If there was a dependsOn field, add the bookkeeping - // we don't care what's in the field, just that it was there - // We'll use this marker to overwrite while merging TaskDefinitions. - if task.DependsOn != nil { - btd.definedFields.Add("DependsOn") - } - - for _, dependency := range task.DependsOn { - if strings.HasPrefix(dependency, envPipelineDelimiter) { - log.Printf("[DEPRECATED] Declaring an environment variable in \"dependsOn\" is deprecated, found %s. Use the \"env\" key or use `npx @turbo/codemod migrate-env-var-dependencies`.\n", dependency) - btd.definedFields.Add("Env") - envVarDependencies.Add(strings.TrimPrefix(dependency, envPipelineDelimiter)) - } else if strings.HasPrefix(dependency, topologicalPipelineDelimiter) { - // Note: This will get assigned multiple times in the loop, but we only care that it's true - btd.TaskDefinition.TopologicalDependencies = append(btd.TaskDefinition.TopologicalDependencies, strings.TrimPrefix(dependency, topologicalPipelineDelimiter)) - } else { - btd.TaskDefinition.TaskDependencies = append(btd.TaskDefinition.TaskDependencies, dependency) - } - } - - sort.Strings(btd.TaskDefinition.TaskDependencies) - sort.Strings(btd.TaskDefinition.TopologicalDependencies) - - // Append env key into Env - if task.Env != nil { - btd.definedFields.Add("Env") - if err := gatherEnvVars(task.Env, "env", &envVarDependencies); err != nil { - return err - } - } - - btd.TaskDefinition.Env = envVarDependencies.UnsafeListOfStrings() - sort.Strings(btd.TaskDefinition.Env) - - if task.PassThroughEnv != nil { - btd.definedFields.Add("PassThroughEnv") - if err := gatherEnvVars(task.PassThroughEnv, "passThroughEnv", &envVarPassThroughs); err != nil { - return err - } - - btd.TaskDefinition.PassThroughEnv = envVarPassThroughs.UnsafeListOfStrings() - sort.Strings(btd.TaskDefinition.PassThroughEnv) - } - - if task.DotEnv != nil { - btd.definedFields.Add("DotEnv") - - // Going to _at least_ be an empty array. - btd.TaskDefinition.DotEnv = make(turbopath.AnchoredUnixPathArray, 0, len(task.DotEnv)) - - // Port the raw dotEnv values in. - for _, dotEnvPath := range task.DotEnv { - typeCheckedPath, err := turbopath.CheckedToAnchoredUnixPath(dotEnvPath) - if err != nil { - return err - } - - // These are _explicitly_ not sorted. - btd.TaskDefinition.DotEnv = append(btd.TaskDefinition.DotEnv, typeCheckedPath) - } - } - - if task.Inputs != nil { - // Note that we don't require Inputs to be sorted, we're going to - // hash the resulting files and sort that instead - btd.definedFields.Add("Inputs") - // TODO: during rust port, this should be moved to a post-parse validation step - for _, input := range task.Inputs { - if filepath.IsAbs(input) { - log.Printf("[WARNING] Using an absolute path in \"inputs\" (%v) will not work and will be an error in a future version", input) - } - } - btd.TaskDefinition.Inputs = task.Inputs - } - - if task.OutputMode != nil { - btd.definedFields.Add("OutputMode") - btd.TaskDefinition.OutputMode = *task.OutputMode - } - - if task.Persistent != nil { - btd.definedFields.Add("Persistent") - btd.TaskDefinition.Persistent = *task.Persistent - } else { - btd.TaskDefinition.Persistent = false - } - return nil -} - -// MarshalJSON serializes taskDefinitionHashable struct into json -func (c taskDefinitionHashable) MarshalJSON() ([]byte, error) { - task := makeRawTask( - c.Outputs, - c.Cache, - c.TopologicalDependencies, - c.TaskDependencies, - c.Inputs, - c.OutputMode, - c.Persistent, - c.Env, - c.PassThroughEnv, - c.DotEnv, - ) - return json.Marshal(task) -} - -// MarshalJSON serializes TaskDefinition struct into json -func (c TaskDefinition) MarshalJSON() ([]byte, error) { - task := makeRawTask( - c.Outputs, - c.Cache, - c.TopologicalDependencies, - c.TaskDependencies, - c.Inputs, - c.OutputMode, - c.Persistent, - c.Env, - c.PassThroughEnv, - c.DotEnv, - ) - return json.Marshal(task) -} - -// UnmarshalJSON deserializes the contents of turbo.json into a TurboJSON struct -func (tj *TurboJSON) UnmarshalJSON(data []byte) error { - raw := &rawTurboJSON{} - if err := json.Unmarshal(data, &raw); err != nil { - return err - } - - globalEnv := make(util.Set) - globalPassThroughEnv := make(util.Set) - globalFileDependencies := make(util.Set) - - if err := gatherEnvVars(raw.GlobalEnv, "globalEnv", &globalEnv); err != nil { - return err - } - if err := gatherEnvVars(raw.GlobalPassThroughEnv, "globalPassThroughEnv", &globalPassThroughEnv); err != nil { - return err - } - - // TODO: In the rust port, warnings should be refactored to a post-parse validation step - for _, value := range raw.GlobalDependencies { - if strings.HasPrefix(value, envPipelineDelimiter) { - log.Printf("[DEPRECATED] Declaring an environment variable in \"globalDependencies\" is deprecated, found %s. Use the \"globalEnv\" key or use `npx @turbo/codemod migrate-env-var-dependencies`.\n", value) - globalEnv.Add(strings.TrimPrefix(value, envPipelineDelimiter)) - } else { - if filepath.IsAbs(value) { - log.Printf("[WARNING] Using an absolute path in \"globalDependencies\" (%v) will not work and will be an error in a future version", value) - } - globalFileDependencies.Add(value) - } - } - - // turn the set into an array and assign to the TurboJSON struct fields. - tj.GlobalEnv = globalEnv.UnsafeListOfStrings() - sort.Strings(tj.GlobalEnv) - - if raw.GlobalPassThroughEnv != nil { - tj.GlobalPassThroughEnv = globalPassThroughEnv.UnsafeListOfStrings() - sort.Strings(tj.GlobalPassThroughEnv) - } - - tj.GlobalDeps = globalFileDependencies.UnsafeListOfStrings() - sort.Strings(tj.GlobalDeps) - - // Port the raw globalDotEnv values in. - if raw.GlobalDotEnv != nil { - tj.GlobalDotEnv = make(turbopath.AnchoredUnixPathArray, 0, len(raw.GlobalDotEnv)) - - for _, dotEnvPath := range raw.GlobalDotEnv { - typeCheckedPath, err := turbopath.CheckedToAnchoredUnixPath(dotEnvPath) - if err != nil { - return err - } - tj.GlobalDotEnv = append(tj.GlobalDotEnv, typeCheckedPath) - } - } - - // copy these over, we don't need any changes here. - tj.Pipeline = raw.Pipeline - tj.Extends = raw.Extends - // Directly to SpaceID, we don't need to keep the struct - if raw.Space != nil { - tj.SpaceID = raw.Space.ID - } - - if raw.RemoteCacheOptions == nil { - tj.RemoteCacheOptions = RemoteCacheOptions{ - Enabled: true, - } - } else { - tj.RemoteCacheOptions = *raw.RemoteCacheOptions - } - - return nil -} - -// UnmarshalJSON deserializes the remoteCache key from turbo.json into an in-memory struct -func (rc *RemoteCacheOptions) UnmarshalJSON(data []byte) error { - type Alias RemoteCacheOptions // alias type to prevent infinite recursion - - // tmp struct with the same fields, but replacing the field we care about (Enabled) with a pointer - type Tmp struct { - *Alias - Enabled *bool `json:"enabled"` - } - // initialize this tmp struct with the rc options we have - tmp := Tmp{Alias: (*Alias)(rc)} - - if err := json.Unmarshal(data, &tmp); err != nil { - return err - } - - // If the Enabled field is missing in the JSON, set it to true - if tmp.Enabled == nil { - rc.Enabled = true - } - - return nil -} - -// MarshalJSON converts a TurboJSON into the equivalent json object in bytes -// note: we go via rawTurboJSON so that the output format is correct. -// This is used by `turbo prune` to generate a pruned turbo.json -// and also by --summarize & --dry=json to serialize the known config -// into something we can print to screen -func (tj *TurboJSON) MarshalJSON() ([]byte, error) { - raw := pristineTurboJSON{} - raw.GlobalDependencies = tj.GlobalDeps - raw.GlobalEnv = tj.GlobalEnv - raw.GlobalDotEnv = tj.GlobalDotEnv - raw.GlobalPassThroughEnv = tj.GlobalPassThroughEnv - raw.Pipeline = tj.Pipeline.Pristine() - raw.RemoteCacheOptions = tj.RemoteCacheOptions - - if tj.SpaceID != "" { - raw.Space = &SpaceConfig{ID: tj.SpaceID} - } - - return json.Marshal(&raw) -} - -func makeRawTask( - outputs hash.TaskOutputs, - shouldCache bool, - topologicalDependencies []string, - taskDependencies []string, - inputs []string, - outputMode util.TaskOutputMode, - persistent bool, - env []string, - passThroughEnv []string, - dotEnv turbopath.AnchoredUnixPathArray, -) *rawTaskWithDefaults { - // Initialize with empty arrays, so we get empty arrays serialized into JSON - task := &rawTaskWithDefaults{ - Outputs: []string{}, - Inputs: []string{}, - Env: []string{}, - DependsOn: []string{}, - } - - task.Persistent = persistent - task.Cache = &shouldCache - task.OutputMode = outputMode - - // This should _not_ be sorted. - task.DotEnv = dotEnv - - if len(inputs) > 0 { - task.Inputs = inputs - } - - if len(env) > 0 { - task.Env = append(task.Env, env...) - } - - if len(outputs.Inclusions) > 0 { - task.Outputs = append(task.Outputs, outputs.Inclusions...) - } - - for _, i := range outputs.Exclusions { - task.Outputs = append(task.Outputs, "!"+i) - } - - if len(taskDependencies) > 0 { - task.DependsOn = append(task.DependsOn, taskDependencies...) - } - - for _, i := range topologicalDependencies { - task.DependsOn = append(task.DependsOn, "^"+i) - } - - if passThroughEnv != nil { - task.PassThroughEnv = passThroughEnv - sort.Strings(task.PassThroughEnv) - } - - // These _should_ already be sorted when the TaskDefinition struct was unmarshaled, - // but we want to ensure they're sorted on the way out also, just in case something - // in the middle mutates the items. - sort.Strings(task.DependsOn) - sort.Strings(task.Outputs) - sort.Strings(task.Env) - sort.Strings(task.Inputs) - return task -} - -// gatherEnvVars puts env vars into the provided set as long as they don't have an invalid value. -func gatherEnvVars(vars []string, key string, into *util.Set) error { - for _, value := range vars { - if strings.HasPrefix(value, envPipelineDelimiter) { - // Hard error to help people specify this correctly during migration. - // TODO: Remove this error after we have run summary. - return fmt.Errorf("You specified \"%s\" in the \"%s\" key. You should not prefix your environment variables with \"%s\"", value, key, envPipelineDelimiter) - } - - into.Add(value) - } - - return nil -} diff --git a/cli/internal/fs/turbo_json_test.go b/cli/internal/fs/turbo_json_test.go deleted file mode 100644 index dd2f4cb401cd7..0000000000000 --- a/cli/internal/fs/turbo_json_test.go +++ /dev/null @@ -1,549 +0,0 @@ -package fs - -import ( - "os" - "reflect" - "sort" - "strings" - "testing" - - "github.com/stretchr/testify/assert" - "github.com/vercel/turbo/cli/internal/fs/hash" - "github.com/vercel/turbo/cli/internal/turbopath" - "github.com/vercel/turbo/cli/internal/util" - "gotest.tools/v3/assert/cmp" -) - -func assertIsSorted(t *testing.T, arr []string, msg string) { - t.Helper() - if arr == nil { - return - } - - copied := make([]string, len(arr)) - copy(copied, arr) - sort.Strings(copied) - if !reflect.DeepEqual(arr, copied) { - t.Errorf("Expected sorted, got %v: %v", arr, msg) - } -} - -func Test_ReadTurboConfigDotEnvUndefined(t *testing.T) { - testDir := getTestDir(t, "dotenv-undefined") - turboJSON, turboJSONReadErr := readTurboConfig(testDir.UntypedJoin("turbo.json")) - if turboJSONReadErr != nil { - t.Fatalf("invalid parse: %#v", turboJSONReadErr) - } - - // Undefined is nil. - var typedNil turbopath.AnchoredUnixPathArray - - assert.Equal(t, typedNil, turboJSON.GlobalDotEnv) - - pipelineExpected := Pipeline{ - "build": { - definedFields: util.SetFromStrings([]string{}), - experimentalFields: util.SetFromStrings([]string{}), - experimental: taskDefinitionExperiments{}, - TaskDefinition: taskDefinitionHashable{ - Outputs: hash.TaskOutputs{}, - Cache: true, - TopologicalDependencies: []string{}, - TaskDependencies: []string{}, - OutputMode: util.FullTaskOutput, - Env: []string{}, - }, - }, - } - - assert.Equal(t, pipelineExpected, turboJSON.Pipeline) - - // Snapshot test of serialization. - bytes, _ := turboJSON.MarshalJSON() - assert.Equal(t, "{\"globalPassThroughEnv\":null,\"globalDotEnv\":null,\"pipeline\":{\"build\":{\"outputs\":[],\"cache\":true,\"dependsOn\":[],\"inputs\":[],\"outputMode\":\"full\",\"persistent\":false,\"env\":[],\"passThroughEnv\":null,\"dotEnv\":null}},\"remoteCache\":{\"enabled\":true}}", string(bytes)) -} - -func Test_ReadTurboConfigDotEnvNull(t *testing.T) { - testDir := getTestDir(t, "dotenv-null") - turboJSON, turboJSONReadErr := readTurboConfig(testDir.UntypedJoin("turbo.json")) - if turboJSONReadErr != nil { - t.Fatalf("invalid parse: %#v", turboJSONReadErr) - } - - // Undefined is nil. - var typedNil turbopath.AnchoredUnixPathArray - - assert.Equal(t, typedNil, turboJSON.GlobalDotEnv) - - pipelineExpected := Pipeline{ - "build": { - definedFields: util.SetFromStrings([]string{}), - experimentalFields: util.SetFromStrings([]string{}), - experimental: taskDefinitionExperiments{}, - TaskDefinition: taskDefinitionHashable{ - Outputs: hash.TaskOutputs{}, - Cache: true, - TopologicalDependencies: []string{}, - TaskDependencies: []string{}, - OutputMode: util.FullTaskOutput, - Env: []string{}, - }, - }, - } - - assert.Equal(t, pipelineExpected, turboJSON.Pipeline) - - // Snapshot test of serialization. - bytes, _ := turboJSON.MarshalJSON() - assert.Equal(t, "{\"globalPassThroughEnv\":null,\"globalDotEnv\":null,\"pipeline\":{\"build\":{\"outputs\":[],\"cache\":true,\"dependsOn\":[],\"inputs\":[],\"outputMode\":\"full\",\"persistent\":false,\"env\":[],\"passThroughEnv\":null,\"dotEnv\":null}},\"remoteCache\":{\"enabled\":true}}", string(bytes)) -} - -func Test_ReadTurboConfigDotEnvEmpty(t *testing.T) { - testDir := getTestDir(t, "dotenv-empty") - turboJSON, turboJSONReadErr := readTurboConfig(testDir.UntypedJoin("turbo.json")) - if turboJSONReadErr != nil { - t.Fatalf("invalid parse: %#v", turboJSONReadErr) - } - - assert.Equal(t, make(turbopath.AnchoredUnixPathArray, 0), turboJSON.GlobalDotEnv) - - pipelineExpected := Pipeline{ - "build": { - definedFields: util.SetFromStrings([]string{"DotEnv"}), - experimentalFields: util.SetFromStrings([]string{}), - experimental: taskDefinitionExperiments{}, - TaskDefinition: taskDefinitionHashable{ - Outputs: hash.TaskOutputs{}, - Cache: true, - TopologicalDependencies: []string{}, - TaskDependencies: []string{}, - OutputMode: util.FullTaskOutput, - Env: []string{}, - DotEnv: make(turbopath.AnchoredUnixPathArray, 0), - }, - }, - } - - assert.Equal(t, pipelineExpected, turboJSON.Pipeline) - - // Snapshot test of serialization. - bytes, _ := turboJSON.MarshalJSON() - assert.Equal(t, "{\"globalPassThroughEnv\":null,\"globalDotEnv\":[],\"pipeline\":{\"build\":{\"outputs\":[],\"cache\":true,\"dependsOn\":[],\"inputs\":[],\"outputMode\":\"full\",\"persistent\":false,\"env\":[],\"passThroughEnv\":null,\"dotEnv\":[]}},\"remoteCache\":{\"enabled\":true}}", string(bytes)) -} - -func Test_ReadTurboConfigDotEnvPopulated(t *testing.T) { - testDir := getTestDir(t, "dotenv-populated") - turboJSON, turboJSONReadErr := readTurboConfig(testDir.UntypedJoin("turbo.json")) - if turboJSONReadErr != nil { - t.Fatalf("invalid parse: %#v", turboJSONReadErr) - } - - assert.Equal(t, turbopath.AnchoredUnixPathArray{"z", "y", "x"}, turboJSON.GlobalDotEnv) - - pipelineExpected := Pipeline{ - "build": { - definedFields: util.SetFromStrings([]string{"DotEnv"}), - experimentalFields: util.SetFromStrings([]string{}), - experimental: taskDefinitionExperiments{}, - TaskDefinition: taskDefinitionHashable{ - Outputs: hash.TaskOutputs{}, - Cache: true, - TopologicalDependencies: []string{}, - TaskDependencies: []string{}, - OutputMode: util.FullTaskOutput, - Env: []string{}, - DotEnv: turbopath.AnchoredUnixPathArray{"3", "2", "1"}, - }, - }, - } - - assert.Equal(t, pipelineExpected, turboJSON.Pipeline) - - // Snapshot test of serialization. - bytes, _ := turboJSON.MarshalJSON() - assert.Equal(t, "{\"globalPassThroughEnv\":null,\"globalDotEnv\":[\"z\",\"y\",\"x\"],\"pipeline\":{\"build\":{\"outputs\":[],\"cache\":true,\"dependsOn\":[],\"inputs\":[],\"outputMode\":\"full\",\"persistent\":false,\"env\":[],\"passThroughEnv\":null,\"dotEnv\":[\"3\",\"2\",\"1\"]}},\"remoteCache\":{\"enabled\":true}}", string(bytes)) -} - -func Test_ReadTurboConfigPassThroughEnvUndefined(t *testing.T) { - testDir := getTestDir(t, "passthrough-undefined") - turboJSON, turboJSONReadErr := readTurboConfig(testDir.UntypedJoin("turbo.json")) - if turboJSONReadErr != nil { - t.Fatalf("invalid parse: %#v", turboJSONReadErr) - } - - // Undefined is nil. - var typedNil []string - - assert.Equal(t, typedNil, turboJSON.GlobalPassThroughEnv) - - pipelineExpected := Pipeline{ - "build": { - definedFields: util.SetFromStrings([]string{}), - experimentalFields: util.SetFromStrings([]string{}), - experimental: taskDefinitionExperiments{}, - TaskDefinition: taskDefinitionHashable{ - Outputs: hash.TaskOutputs{}, - Cache: true, - TopologicalDependencies: []string{}, - TaskDependencies: []string{}, - OutputMode: util.FullTaskOutput, - Env: []string{}, - PassThroughEnv: typedNil, - }, - }, - } - - assert.Equal(t, pipelineExpected, turboJSON.Pipeline) - - // Snapshot test of serialization. - bytes, _ := turboJSON.MarshalJSON() - assert.Equal(t, "{\"globalPassThroughEnv\":null,\"globalDotEnv\":null,\"pipeline\":{\"build\":{\"outputs\":[],\"cache\":true,\"dependsOn\":[],\"inputs\":[],\"outputMode\":\"full\",\"persistent\":false,\"env\":[],\"passThroughEnv\":null,\"dotEnv\":null}},\"remoteCache\":{\"enabled\":true}}", string(bytes)) -} - -func Test_ReadTurboConfigPassThroughEnvNull(t *testing.T) { - testDir := getTestDir(t, "passthrough-null") - turboJSON, turboJSONReadErr := readTurboConfig(testDir.UntypedJoin("turbo.json")) - if turboJSONReadErr != nil { - t.Fatalf("invalid parse: %#v", turboJSONReadErr) - } - - // Undefined is nil. - var typedNil []string - - assert.Equal(t, typedNil, turboJSON.GlobalPassThroughEnv) - - pipelineExpected := Pipeline{ - "build": { - definedFields: util.SetFromStrings([]string{}), - experimentalFields: util.SetFromStrings([]string{}), - experimental: taskDefinitionExperiments{}, - TaskDefinition: taskDefinitionHashable{ - Outputs: hash.TaskOutputs{}, - Cache: true, - TopologicalDependencies: []string{}, - TaskDependencies: []string{}, - OutputMode: util.FullTaskOutput, - Env: []string{}, - PassThroughEnv: typedNil, - }, - }, - } - - assert.Equal(t, pipelineExpected, turboJSON.Pipeline) - - // Snapshot test of serialization. - bytes, _ := turboJSON.MarshalJSON() - assert.Equal(t, "{\"globalPassThroughEnv\":null,\"globalDotEnv\":null,\"pipeline\":{\"build\":{\"outputs\":[],\"cache\":true,\"dependsOn\":[],\"inputs\":[],\"outputMode\":\"full\",\"persistent\":false,\"env\":[],\"passThroughEnv\":null,\"dotEnv\":null}},\"remoteCache\":{\"enabled\":true}}", string(bytes)) -} - -func Test_ReadTurboConfigPassThroughEnvEmpty(t *testing.T) { - testDir := getTestDir(t, "passthrough-empty") - turboJSON, turboJSONReadErr := readTurboConfig(testDir.UntypedJoin("turbo.json")) - if turboJSONReadErr != nil { - t.Fatalf("invalid parse: %#v", turboJSONReadErr) - } - - assert.Equal(t, []string{}, turboJSON.GlobalPassThroughEnv) - - pipelineExpected := Pipeline{ - "build": { - definedFields: util.SetFromStrings([]string{"PassThroughEnv"}), - experimentalFields: util.SetFromStrings([]string{}), - experimental: taskDefinitionExperiments{}, - TaskDefinition: taskDefinitionHashable{ - Outputs: hash.TaskOutputs{}, - Cache: true, - TopologicalDependencies: []string{}, - TaskDependencies: []string{}, - OutputMode: util.FullTaskOutput, - Env: []string{}, - PassThroughEnv: []string{}, - }, - }, - } - - assert.Equal(t, pipelineExpected, turboJSON.Pipeline) - - // Snapshot test of serialization. - bytes, _ := turboJSON.MarshalJSON() - assert.Equal(t, "{\"globalPassThroughEnv\":[],\"globalDotEnv\":null,\"pipeline\":{\"build\":{\"outputs\":[],\"cache\":true,\"dependsOn\":[],\"inputs\":[],\"outputMode\":\"full\",\"persistent\":false,\"env\":[],\"passThroughEnv\":[],\"dotEnv\":null}},\"remoteCache\":{\"enabled\":true}}", string(bytes)) -} - -func Test_ReadTurboConfigPassThroughEnvPopulated(t *testing.T) { - testDir := getTestDir(t, "passthrough-populated") - turboJSON, turboJSONReadErr := readTurboConfig(testDir.UntypedJoin("turbo.json")) - if turboJSONReadErr != nil { - t.Fatalf("invalid parse: %#v", turboJSONReadErr) - } - - assert.Equal(t, []string{"A", "B", "C"}, turboJSON.GlobalPassThroughEnv) - - pipelineExpected := Pipeline{ - "build": { - definedFields: util.SetFromStrings([]string{"PassThroughEnv"}), - experimentalFields: util.SetFromStrings([]string{}), - experimental: taskDefinitionExperiments{}, - TaskDefinition: taskDefinitionHashable{ - Outputs: hash.TaskOutputs{}, - Cache: true, - TopologicalDependencies: []string{}, - TaskDependencies: []string{}, - OutputMode: util.FullTaskOutput, - Env: []string{}, - PassThroughEnv: []string{"X", "Y", "Z"}, - }, - }, - } - - assert.Equal(t, pipelineExpected, turboJSON.Pipeline) - - // Snapshot test of serialization. - bytes, _ := turboJSON.MarshalJSON() - assert.Equal(t, "{\"globalPassThroughEnv\":[\"A\",\"B\",\"C\"],\"globalDotEnv\":null,\"pipeline\":{\"build\":{\"outputs\":[],\"cache\":true,\"dependsOn\":[],\"inputs\":[],\"outputMode\":\"full\",\"persistent\":false,\"env\":[],\"passThroughEnv\":[\"X\",\"Y\",\"Z\"],\"dotEnv\":null}},\"remoteCache\":{\"enabled\":true}}", string(bytes)) -} - -func Test_ReadTurboConfig(t *testing.T) { - testDir := getTestDir(t, "correct") - turboJSON, turboJSONReadErr := readTurboConfig(testDir.UntypedJoin("turbo.json")) - - if turboJSONReadErr != nil { - t.Fatalf("invalid parse: %#v", turboJSONReadErr) - } - - assert.EqualValues(t, []string{"AWS_SECRET_KEY"}, turboJSON.GlobalPassThroughEnv) - - pipelineExpected := map[string]BookkeepingTaskDefinition{ - "build": { - definedFields: util.SetFromStrings([]string{"Outputs", "OutputMode", "DependsOn", "PassThroughEnv"}), - experimentalFields: util.SetFromStrings([]string{}), - experimental: taskDefinitionExperiments{}, - TaskDefinition: taskDefinitionHashable{ - Outputs: hash.TaskOutputs{Inclusions: []string{".next/**", "dist/**"}, Exclusions: []string{"dist/assets/**"}}, - Cache: true, - TopologicalDependencies: []string{"build"}, - TaskDependencies: []string{}, - OutputMode: util.NewTaskOutput, - Env: []string{}, - PassThroughEnv: []string{"GITHUB_TOKEN"}, - }, - }, - "lint": { - definedFields: util.SetFromStrings([]string{"Outputs", "OutputMode", "Cache", "DependsOn", "Env"}), - experimentalFields: util.SetFromStrings([]string{}), - experimental: taskDefinitionExperiments{}, - TaskDefinition: taskDefinitionHashable{ - Outputs: hash.TaskOutputs{}, - Cache: true, - TopologicalDependencies: []string{}, - TaskDependencies: []string{}, - OutputMode: util.NewTaskOutput, - Env: []string{"MY_VAR"}, - PassThroughEnv: nil, - }, - }, - "dev": { - definedFields: util.SetFromStrings([]string{"OutputMode", "Cache", "PassThroughEnv"}), - experimentalFields: util.SetFromStrings([]string{}), - experimental: taskDefinitionExperiments{}, - TaskDefinition: taskDefinitionHashable{ - Outputs: hash.TaskOutputs{}, - Cache: false, - TopologicalDependencies: []string{}, - TaskDependencies: []string{}, - OutputMode: util.FullTaskOutput, - Env: []string{}, - PassThroughEnv: []string{}, - }, - }, - "publish": { - definedFields: util.SetFromStrings([]string{"Inputs", "Outputs", "DependsOn", "Cache"}), - experimentalFields: util.SetFromStrings([]string{}), - experimental: taskDefinitionExperiments{}, - TaskDefinition: taskDefinitionHashable{ - Outputs: hash.TaskOutputs{Inclusions: []string{"dist/**"}}, - Cache: false, - TopologicalDependencies: []string{"build", "publish"}, - TaskDependencies: []string{"admin#lint", "build"}, - Inputs: []string{"build/**/*"}, - OutputMode: util.FullTaskOutput, - Env: []string{}, - PassThroughEnv: nil, - }, - }, - } - - validateOutput(t, turboJSON, pipelineExpected) - remoteCacheOptionsExpected := RemoteCacheOptions{"team_id", true, true} - assert.EqualValues(t, remoteCacheOptionsExpected, turboJSON.RemoteCacheOptions) -} - -func Test_LoadTurboConfig_Legacy(t *testing.T) { - testDir := getTestDir(t, "legacy-only") - packageJSONPath := testDir.UntypedJoin("package.json") - rootPackageJSON, pkgJSONReadErr := ReadPackageJSON(packageJSONPath) - - if pkgJSONReadErr != nil { - t.Fatalf("invalid parse: %#v", pkgJSONReadErr) - } - - _, turboJSONReadErr := LoadTurboConfig(testDir, rootPackageJSON, false) - expectedErrorMsg := "Could not find turbo.json. Follow directions at https://turbo.build/repo/docs to create one: file does not exist" - assert.EqualErrorf(t, turboJSONReadErr, expectedErrorMsg, "Error should be: %v, got: %v", expectedErrorMsg, turboJSONReadErr) -} - -func Test_LoadTurboConfig_BothCorrectAndLegacy(t *testing.T) { - testDir := getTestDir(t, "both") - - packageJSONPath := testDir.UntypedJoin("package.json") - rootPackageJSON, pkgJSONReadErr := ReadPackageJSON(packageJSONPath) - - if pkgJSONReadErr != nil { - t.Fatalf("invalid parse: %#v", pkgJSONReadErr) - } - - turboJSON, turboJSONReadErr := LoadTurboConfig(testDir, rootPackageJSON, false) - - if turboJSONReadErr != nil { - t.Fatalf("invalid parse: %#v", turboJSONReadErr) - } - - pipelineExpected := map[string]BookkeepingTaskDefinition{ - "build": { - definedFields: util.SetFromStrings([]string{"Outputs", "OutputMode", "DependsOn"}), - experimentalFields: util.SetFromStrings([]string{}), - experimental: taskDefinitionExperiments{}, - TaskDefinition: taskDefinitionHashable{ - Outputs: hash.TaskOutputs{Inclusions: []string{".next/**", "dist/**"}, Exclusions: []string{"dist/assets/**"}}, - Cache: true, - TopologicalDependencies: []string{"build"}, - TaskDependencies: []string{}, - OutputMode: util.NewTaskOutput, - Env: []string{}, - PassThroughEnv: nil, - }, - }, - } - - validateOutput(t, turboJSON, pipelineExpected) - - remoteCacheOptionsExpected := RemoteCacheOptions{"team_id", true, true} - assert.EqualValues(t, remoteCacheOptionsExpected, turboJSON.RemoteCacheOptions) - assert.Equal(t, rootPackageJSON.LegacyTurboConfig == nil, true) -} - -func Test_ReadTurboConfig_InvalidEnvDeclarations1(t *testing.T) { - testDir := getTestDir(t, "invalid-env-1") - _, turboJSONReadErr := readTurboConfig(testDir.UntypedJoin("turbo.json")) - - expectedErrorMsg := "turbo.json: You specified \"$A\" in the \"env\" key. You should not prefix your environment variables with \"$\"" - assert.EqualErrorf(t, turboJSONReadErr, expectedErrorMsg, "Error should be: %v, got: %v", expectedErrorMsg, turboJSONReadErr) -} - -func Test_ReadTurboConfig_InvalidEnvDeclarations2(t *testing.T) { - testDir := getTestDir(t, "invalid-env-2") - _, turboJSONReadErr := readTurboConfig(testDir.UntypedJoin("turbo.json")) - expectedErrorMsg := "turbo.json: You specified \"$A\" in the \"env\" key. You should not prefix your environment variables with \"$\"" - assert.EqualErrorf(t, turboJSONReadErr, expectedErrorMsg, "Error should be: %v, got: %v", expectedErrorMsg, turboJSONReadErr) -} - -func Test_ReadTurboConfig_InvalidGlobalEnvDeclarations(t *testing.T) { - testDir := getTestDir(t, "invalid-global-env") - _, turboJSONReadErr := readTurboConfig(testDir.UntypedJoin("turbo.json")) - expectedErrorMsg := "turbo.json: You specified \"$QUX\" in the \"globalEnv\" key. You should not prefix your environment variables with \"$\"" - assert.EqualErrorf(t, turboJSONReadErr, expectedErrorMsg, "Error should be: %v, got: %v", expectedErrorMsg, turboJSONReadErr) -} - -func Test_ReadTurboConfig_EnvDeclarations(t *testing.T) { - testDir := getTestDir(t, "legacy-env") - turboJSON, turboJSONReadErr := readTurboConfig(testDir.UntypedJoin("turbo.json")) - - if turboJSONReadErr != nil { - t.Fatalf("invalid parse: %#v", turboJSONReadErr) - } - - pipeline := turboJSON.Pipeline - assert.EqualValues(t, pipeline["task1"].TaskDefinition.Env, sortedArray([]string{"A"})) - assert.EqualValues(t, pipeline["task2"].TaskDefinition.Env, sortedArray([]string{"A"})) - assert.EqualValues(t, pipeline["task3"].TaskDefinition.Env, sortedArray([]string{"A"})) - assert.EqualValues(t, pipeline["task4"].TaskDefinition.Env, sortedArray([]string{"A", "B"})) - assert.EqualValues(t, pipeline["task6"].TaskDefinition.Env, sortedArray([]string{"A", "B", "C", "D", "E", "F"})) - assert.EqualValues(t, pipeline["task7"].TaskDefinition.Env, sortedArray([]string{"A", "B", "C"})) - assert.EqualValues(t, pipeline["task8"].TaskDefinition.Env, sortedArray([]string{"A", "B", "C"})) - assert.EqualValues(t, pipeline["task9"].TaskDefinition.Env, sortedArray([]string{"A"})) - assert.EqualValues(t, pipeline["task10"].TaskDefinition.Env, sortedArray([]string{"A"})) - assert.EqualValues(t, pipeline["task11"].TaskDefinition.Env, sortedArray([]string{"A", "B"})) - - // check global env vars also - assert.EqualValues(t, sortedArray([]string{"FOO", "BAR", "BAZ", "QUX"}), sortedArray(turboJSON.GlobalEnv)) - assert.EqualValues(t, sortedArray([]string{"somefile.txt"}), sortedArray(turboJSON.GlobalDeps)) -} - -func Test_TaskOutputsSort(t *testing.T) { - inclusions := []string{"foo/**", "bar"} - exclusions := []string{"special-file", ".hidden/**"} - taskOutputs := hash.TaskOutputs{Inclusions: inclusions, Exclusions: exclusions} - taskOutputs.Sort() - assertIsSorted(t, taskOutputs.Inclusions, "Inclusions") - assertIsSorted(t, taskOutputs.Exclusions, "Exclusions") - - assert.True(t, cmp.DeepEqual(taskOutputs, hash.TaskOutputs{Inclusions: []string{"bar", "foo/**"}, Exclusions: []string{".hidden/**", "special-file"}})().Success()) -} - -// Helpers -func validateOutput(t *testing.T, turboJSON *TurboJSON, expectedPipeline Pipeline) { - t.Helper() - assertIsSorted(t, turboJSON.GlobalDeps, "Global Deps") - assertIsSorted(t, turboJSON.GlobalEnv, "Global Env") - assertIsSorted(t, turboJSON.GlobalPassThroughEnv, "Global Pass Through Env") - validatePipeline(t, turboJSON.Pipeline, expectedPipeline) -} - -func validatePipeline(t *testing.T, actual Pipeline, expected Pipeline) { - t.Helper() - // check top level keys - if len(actual) != len(expected) { - expectedKeys := []string{} - for k := range expected { - expectedKeys = append(expectedKeys, k) - } - actualKeys := []string{} - for k := range actual { - actualKeys = append(actualKeys, k) - } - t.Errorf("pipeline tasks mismatch. got %v, want %v", strings.Join(actualKeys, ","), strings.Join(expectedKeys, ",")) - } - - // check individual task definitions - for taskName, expectedTaskDefinition := range expected { - bookkeepingTaskDef, ok := actual[taskName] - if !ok { - t.Errorf("missing expected task: %v", taskName) - } - actualTaskDefinition := bookkeepingTaskDef.GetTaskDefinition() - assertIsSorted(t, actualTaskDefinition.Outputs.Inclusions, "Task output inclusions") - assertIsSorted(t, actualTaskDefinition.Outputs.Exclusions, "Task output exclusions") - assertIsSorted(t, actualTaskDefinition.Env, "Task env vars") - assertIsSorted(t, actualTaskDefinition.PassThroughEnv, "Task passthrough env vars") - assertIsSorted(t, actualTaskDefinition.TopologicalDependencies, "Topo deps") - assertIsSorted(t, actualTaskDefinition.TaskDependencies, "Task deps") - assert.EqualValuesf(t, expectedTaskDefinition, bookkeepingTaskDef, "task definition mismatch for %v", taskName) - } -} - -func getTestDir(t *testing.T, testName string) turbopath.AbsoluteSystemPath { - defaultCwd, err := os.Getwd() - if err != nil { - t.Errorf("failed to get cwd: %v", err) - } - cwd, err := CheckedToAbsoluteSystemPath(defaultCwd) - if err != nil { - t.Fatalf("cwd is not an absolute directory %v: %v", defaultCwd, err) - } - - return cwd.UntypedJoin("testdata", testName) -} - -func sortedArray(arr []string) []string { - sort.Strings(arr) - return arr -} diff --git a/cli/internal/globby/globby.go b/cli/internal/globby/globby.go deleted file mode 100644 index 14c40d92c00eb..0000000000000 --- a/cli/internal/globby/globby.go +++ /dev/null @@ -1,187 +0,0 @@ -package globby - -import ( - "fmt" - "path/filepath" - "sort" - "strings" - - iofs "io/fs" - - "github.com/vercel/turbo/cli/internal/fs" - - "github.com/vercel/turbo/cli/internal/doublestar" - "github.com/vercel/turbo/cli/internal/util" -) - -// GlobAll returns an array of files and folders that match the specified set of glob patterns. -// The returned files and folders are absolute paths, assuming that basePath is an absolute path. -func GlobAll(basePath string, includePatterns []string, excludePatterns []string) ([]string, error) { - fsys := fs.CreateDirFSAtRoot(basePath) - fsysRoot := fs.GetDirFSRootPath(fsys) - output, err := globAllFs(fsys, fsysRoot, basePath, includePatterns, excludePatterns) - - // Because this is coming out of a map output is in no way ordered. - // Sorting will put the files in a depth-first order. - sort.Strings(output) - return output, err -} - -// GlobFiles returns an array of files that match the specified set of glob patterns. -// The return files are absolute paths, assuming that basePath is an absolute path. -func GlobFiles(basePath string, includePatterns []string, excludePatterns []string) ([]string, error) { - fsys := fs.CreateDirFSAtRoot(basePath) - fsysRoot := fs.GetDirFSRootPath(fsys) - output, err := globFilesFs(fsys, fsysRoot, basePath, includePatterns, excludePatterns) - - // Because this is coming out of a map output is in no way ordered. - // Sorting will put the files in a depth-first order. - sort.Strings(output) - return output, err -} - -// checkRelativePath ensures that the the requested file path is a child of `from`. -func checkRelativePath(from string, to string) error { - relativePath, err := filepath.Rel(from, to) - - if err != nil { - return err - } - - if strings.HasPrefix(relativePath, "..") { - return fmt.Errorf("the path you are attempting to specify (%s) is outside of the root", to) - } - - return nil -} - -// globFilesFs searches the specified file system to enumerate all files to include. -func globFilesFs(fsys iofs.FS, fsysRoot string, basePath string, includePatterns []string, excludePatterns []string) ([]string, error) { - return globWalkFs(fsys, fsysRoot, basePath, includePatterns, excludePatterns, false) -} - -// globAllFs searches the specified file system to enumerate all files to include. -func globAllFs(fsys iofs.FS, fsysRoot string, basePath string, includePatterns []string, excludePatterns []string) ([]string, error) { - return globWalkFs(fsys, fsysRoot, basePath, includePatterns, excludePatterns, true) -} - -// globWalkFs searches the specified file system to enumerate all files and folders to include. -func globWalkFs(fsys iofs.FS, fsysRoot string, basePath string, includePatterns []string, excludePatterns []string, includeDirs bool) ([]string, error) { - var processedIncludes []string - var processedExcludes []string - result := make(util.Set) - - for _, includePattern := range includePatterns { - includePath := filepath.Join(basePath, includePattern) - err := checkRelativePath(basePath, includePath) - - if err != nil { - return nil, err - } - - // fs.FS paths may not include leading separators. Calculate the - // correct path for this relative to the filesystem root. - // This will not error as it follows the call to checkRelativePath. - iofsRelativePath, _ := fs.IofsRelativePath(fsysRoot, includePath) - - // Includes only operate on files. - processedIncludes = append(processedIncludes, iofsRelativePath) - } - - for _, excludePattern := range excludePatterns { - excludePath := filepath.Join(basePath, excludePattern) - err := checkRelativePath(basePath, excludePath) - - if err != nil { - return nil, err - } - - // fs.FS paths may not include leading separators. Calculate the - // correct path for this relative to the filesystem root. - // This will not error as it follows the call to checkRelativePath. - iofsRelativePath, _ := fs.IofsRelativePath(fsysRoot, excludePath) - - // In case this is a file pattern and not a directory, add the exact pattern. - // In the event that the user has already specified /**, - if !strings.HasSuffix(iofsRelativePath, string(filepath.Separator)+"**") { - processedExcludes = append(processedExcludes, iofsRelativePath) - } - // TODO: we need to either document or change this behavior - // Excludes operate on entire folders, so we also exclude everything under this in case it represents a directory - processedExcludes = append(processedExcludes, filepath.Join(iofsRelativePath, "**")) - } - - // We start from a naive includePattern - includePattern := "" - includeCount := len(processedIncludes) - - // Do not use alternation if unnecessary. - if includeCount == 1 { - includePattern = processedIncludes[0] - } else if includeCount > 1 { - // We use alternation from the very root of the path. This avoids fs.Stat of the basePath. - includePattern = "{" + strings.Join(processedIncludes, ",") + "}" - } - - // We start with an empty string excludePattern which we only use if excludeCount > 0. - excludePattern := "" - excludeCount := len(processedExcludes) - - // Do not use alternation if unnecessary. - if excludeCount == 1 { - excludePattern = processedExcludes[0] - } else if excludeCount > 1 { - // We use alternation from the very root of the path. This avoids fs.Stat of the basePath. - excludePattern = "{" + strings.Join(processedExcludes, ",") + "}" - } - - // GlobWalk expects that everything uses Unix path conventions. - includePattern = filepath.ToSlash(includePattern) - excludePattern = filepath.ToSlash(excludePattern) - - err := doublestar.GlobWalk(fsys, includePattern, func(path string, dirEntry iofs.DirEntry) error { - if !includeDirs && dirEntry.IsDir() { - return nil - } - - // All files that are returned by doublestar.GlobWalk are relative to - // the fsys root. Go, however, has decided that `fs.FS` filesystems do - // not address the root of the file system using `/` and instead use - // paths without leading separators. - // - // We need to track where the `fsys` root is so that when we hand paths back - // we hand them back as the path addressable in the actual OS filesystem. - // - // As a consequence, when processing, we need to *restore* the original - // root to the file path after returning. This works because when we create - // the `os.dirFS` filesystem we do so at the root of the current volume. - if excludeCount == 0 { - // Reconstruct via string concatenation since the root is already pre-composed. - result.Add(fsysRoot + path) - return nil - } - - isExcluded, err := doublestar.Match(excludePattern, filepath.ToSlash(path)) - if err != nil { - return err - } - - if !isExcluded { - // Reconstruct via string concatenation since the root is already pre-composed. - result.Add(fsysRoot + path) - } - - return nil - }) - - // GlobWalk threw an error. - if err != nil { - return nil, err - } - - // Never actually capture the root folder. - // This is a risk because of how we rework the globs. - result.Delete(strings.TrimSuffix(basePath, "/")) - - return result.UnsafeListOfStrings(), nil -} diff --git a/cli/internal/globby/globby_test.go b/cli/internal/globby/globby_test.go deleted file mode 100644 index 2fdd613b93625..0000000000000 --- a/cli/internal/globby/globby_test.go +++ /dev/null @@ -1,832 +0,0 @@ -package globby - -import ( - "io/fs" - "path/filepath" - "reflect" - "sort" - "testing" - - "testing/fstest" -) - -// setup prepares the test file system contents and returns the file system. -func setup(fsysRoot string, files []string) fs.FS { - fsys := fstest.MapFS{} - for _, file := range files { - // We're populating a `fs.FS` filesytem which requires paths to have no - // leading slash. As a consequence we strip it during creation. - iofsRelativePath := file[1:] - - fsys[iofsRelativePath] = &fstest.MapFile{Mode: 0666} - } - - return fsys -} - -func TestGlobFilesFs(t *testing.T) { - type args struct { - basePath string - includePatterns []string - excludePatterns []string - } - tests := []struct { - name string - files []string - args args - wantAll []string - wantFiles []string - wantErr bool - }{ - { - name: "hello world", - files: []string{"/test.txt"}, - args: args{ - basePath: "/", - includePatterns: []string{"*.txt"}, - excludePatterns: []string{}, - }, - wantAll: []string{"/test.txt"}, - wantFiles: []string{"/test.txt"}, - }, - { - name: "bullet files", - files: []string{ - "/test.txt", - "/subdir/test.txt", - "/other/test.txt", - }, - args: args{ - basePath: "/", - includePatterns: []string{"subdir/test.txt", "test.txt"}, - excludePatterns: []string{}, - }, - wantAll: []string{ - "/subdir/test.txt", - "/test.txt", - }, - wantFiles: []string{ - "/subdir/test.txt", - "/test.txt", - }, - }, - { - name: "finding workspace package.json files", - files: []string{ - "/external/file.txt", - "/repos/some-app/apps/docs/package.json", - "/repos/some-app/apps/web/package.json", - "/repos/some-app/bower_components/readline/package.json", - "/repos/some-app/examples/package.json", - "/repos/some-app/node_modules/gulp/bower_components/readline/package.json", - "/repos/some-app/node_modules/react/package.json", - "/repos/some-app/package.json", - "/repos/some-app/packages/colors/package.json", - "/repos/some-app/packages/faker/package.json", - "/repos/some-app/packages/left-pad/package.json", - "/repos/some-app/test/mocks/kitchen-sink/package.json", - "/repos/some-app/tests/mocks/kitchen-sink/package.json", - }, - args: args{ - basePath: "/repos/some-app/", - includePatterns: []string{"packages/*/package.json", "apps/*/package.json"}, - excludePatterns: []string{"**/node_modules/", "**/bower_components/", "**/test/", "**/tests/"}, - }, - wantAll: []string{ - "/repos/some-app/apps/docs/package.json", - "/repos/some-app/apps/web/package.json", - "/repos/some-app/packages/colors/package.json", - "/repos/some-app/packages/faker/package.json", - "/repos/some-app/packages/left-pad/package.json", - }, - wantFiles: []string{ - "/repos/some-app/apps/docs/package.json", - "/repos/some-app/apps/web/package.json", - "/repos/some-app/packages/colors/package.json", - "/repos/some-app/packages/faker/package.json", - "/repos/some-app/packages/left-pad/package.json", - }, - }, - { - name: "excludes unexpected workspace package.json files", - files: []string{ - "/external/file.txt", - "/repos/some-app/apps/docs/package.json", - "/repos/some-app/apps/web/package.json", - "/repos/some-app/bower_components/readline/package.json", - "/repos/some-app/examples/package.json", - "/repos/some-app/node_modules/gulp/bower_components/readline/package.json", - "/repos/some-app/node_modules/react/package.json", - "/repos/some-app/package.json", - "/repos/some-app/packages/colors/package.json", - "/repos/some-app/packages/faker/package.json", - "/repos/some-app/packages/left-pad/package.json", - "/repos/some-app/test/mocks/spanish-inquisition/package.json", - "/repos/some-app/tests/mocks/spanish-inquisition/package.json", - }, - args: args{ - basePath: "/repos/some-app/", - includePatterns: []string{"**/package.json"}, - excludePatterns: []string{"**/node_modules/", "**/bower_components/", "**/test/", "**/tests/"}, - }, - wantAll: []string{ - "/repos/some-app/apps/docs/package.json", - "/repos/some-app/apps/web/package.json", - "/repos/some-app/examples/package.json", - "/repos/some-app/package.json", - "/repos/some-app/packages/colors/package.json", - "/repos/some-app/packages/faker/package.json", - "/repos/some-app/packages/left-pad/package.json", - }, - wantFiles: []string{ - "/repos/some-app/apps/docs/package.json", - "/repos/some-app/apps/web/package.json", - "/repos/some-app/examples/package.json", - "/repos/some-app/package.json", - "/repos/some-app/packages/colors/package.json", - "/repos/some-app/packages/faker/package.json", - "/repos/some-app/packages/left-pad/package.json", - }, - }, - { - name: "nested packages work", - files: []string{ - "/external/file.txt", - "/repos/some-app/apps/docs/package.json", - "/repos/some-app/apps/web/package.json", - "/repos/some-app/bower_components/readline/package.json", - "/repos/some-app/examples/package.json", - "/repos/some-app/node_modules/gulp/bower_components/readline/package.json", - "/repos/some-app/node_modules/react/package.json", - "/repos/some-app/package.json", - "/repos/some-app/packages/xzibit/package.json", - "/repos/some-app/packages/xzibit/node_modules/street-legal/package.json", - "/repos/some-app/packages/xzibit/node_modules/paint-colors/package.json", - "/repos/some-app/packages/xzibit/packages/yo-dawg/package.json", - "/repos/some-app/packages/xzibit/packages/yo-dawg/node_modules/meme/package.json", - "/repos/some-app/packages/xzibit/packages/yo-dawg/node_modules/yo-dawg/package.json", - "/repos/some-app/packages/colors/package.json", - "/repos/some-app/packages/faker/package.json", - "/repos/some-app/packages/left-pad/package.json", - "/repos/some-app/test/mocks/spanish-inquisition/package.json", - "/repos/some-app/tests/mocks/spanish-inquisition/package.json", - }, - args: args{ - basePath: "/repos/some-app/", - includePatterns: []string{"packages/**/package.json"}, - excludePatterns: []string{"**/node_modules/", "**/bower_components/", "**/test/", "**/tests/"}, - }, - wantAll: []string{ - "/repos/some-app/packages/colors/package.json", - "/repos/some-app/packages/faker/package.json", - "/repos/some-app/packages/left-pad/package.json", - "/repos/some-app/packages/xzibit/package.json", - "/repos/some-app/packages/xzibit/packages/yo-dawg/package.json", - }, - wantFiles: []string{ - "/repos/some-app/packages/colors/package.json", - "/repos/some-app/packages/faker/package.json", - "/repos/some-app/packages/left-pad/package.json", - "/repos/some-app/packages/xzibit/package.json", - "/repos/some-app/packages/xzibit/packages/yo-dawg/package.json", - }, - }, - { - name: "includes do not override excludes", - files: []string{ - "/external/file.txt", - "/repos/some-app/apps/docs/package.json", - "/repos/some-app/apps/web/package.json", - "/repos/some-app/bower_components/readline/package.json", - "/repos/some-app/examples/package.json", - "/repos/some-app/node_modules/gulp/bower_components/readline/package.json", - "/repos/some-app/node_modules/react/package.json", - "/repos/some-app/package.json", - "/repos/some-app/packages/xzibit/package.json", - "/repos/some-app/packages/xzibit/node_modules/street-legal/package.json", - "/repos/some-app/packages/xzibit/node_modules/paint-colors/package.json", - "/repos/some-app/packages/xzibit/packages/yo-dawg/package.json", - "/repos/some-app/packages/xzibit/packages/yo-dawg/node_modules/meme/package.json", - "/repos/some-app/packages/xzibit/packages/yo-dawg/node_modules/yo-dawg/package.json", - "/repos/some-app/packages/colors/package.json", - "/repos/some-app/packages/faker/package.json", - "/repos/some-app/packages/left-pad/package.json", - "/repos/some-app/test/mocks/spanish-inquisition/package.json", - "/repos/some-app/tests/mocks/spanish-inquisition/package.json", - }, - args: args{ - basePath: "/repos/some-app/", - includePatterns: []string{"packages/**/package.json", "tests/mocks/*/package.json"}, - excludePatterns: []string{"**/node_modules/", "**/bower_components/", "**/test/", "**/tests/"}, - }, - wantAll: []string{ - "/repos/some-app/packages/colors/package.json", - "/repos/some-app/packages/faker/package.json", - "/repos/some-app/packages/left-pad/package.json", - "/repos/some-app/packages/xzibit/package.json", - "/repos/some-app/packages/xzibit/packages/yo-dawg/package.json", - }, - wantFiles: []string{ - "/repos/some-app/packages/colors/package.json", - "/repos/some-app/packages/faker/package.json", - "/repos/some-app/packages/left-pad/package.json", - "/repos/some-app/packages/xzibit/package.json", - "/repos/some-app/packages/xzibit/packages/yo-dawg/package.json", - }, - }, - { - name: "output globbing grabs the desired content", - files: []string{ - "/external/file.txt", - "/repos/some-app/src/index.js", - "/repos/some-app/public/src/css/index.css", - "/repos/some-app/.turbo/turbo-build.log", - "/repos/some-app/.turbo/somebody-touched-this-file-into-existence.txt", - "/repos/some-app/.next/log.txt", - "/repos/some-app/.next/cache/db6a76a62043520e7aaadd0bb2104e78.txt", - "/repos/some-app/dist/index.html", - "/repos/some-app/dist/js/index.js", - "/repos/some-app/dist/js/lib.js", - "/repos/some-app/dist/js/node_modules/browserify.js", - "/repos/some-app/public/dist/css/index.css", - "/repos/some-app/public/dist/images/rick_astley.jpg", - }, - args: args{ - basePath: "/repos/some-app/", - includePatterns: []string{".turbo/turbo-build.log", "dist/**", ".next/**", "public/dist/**"}, - excludePatterns: []string{}, - }, - wantAll: []string{ - "/repos/some-app/.next", - "/repos/some-app/.next/cache", - "/repos/some-app/.next/cache/db6a76a62043520e7aaadd0bb2104e78.txt", - "/repos/some-app/.next/log.txt", - "/repos/some-app/.turbo/turbo-build.log", - "/repos/some-app/dist", - "/repos/some-app/dist/index.html", - "/repos/some-app/dist/js", - "/repos/some-app/dist/js/index.js", - "/repos/some-app/dist/js/lib.js", - "/repos/some-app/dist/js/node_modules", - "/repos/some-app/dist/js/node_modules/browserify.js", - "/repos/some-app/public/dist", - "/repos/some-app/public/dist/css", - "/repos/some-app/public/dist/css/index.css", - "/repos/some-app/public/dist/images", - "/repos/some-app/public/dist/images/rick_astley.jpg", - }, - wantFiles: []string{ - "/repos/some-app/.next/cache/db6a76a62043520e7aaadd0bb2104e78.txt", - "/repos/some-app/.next/log.txt", - "/repos/some-app/.turbo/turbo-build.log", - "/repos/some-app/dist/index.html", - "/repos/some-app/dist/js/index.js", - "/repos/some-app/dist/js/lib.js", - "/repos/some-app/dist/js/node_modules/browserify.js", - "/repos/some-app/public/dist/css/index.css", - "/repos/some-app/public/dist/images/rick_astley.jpg", - }, - }, - { - name: "passing ** captures all children", - files: []string{ - "/repos/some-app/dist/index.html", - "/repos/some-app/dist/js/index.js", - "/repos/some-app/dist/js/lib.js", - "/repos/some-app/dist/js/node_modules/browserify.js", - }, - args: args{ - basePath: "/repos/some-app/", - includePatterns: []string{"dist/**"}, - excludePatterns: []string{}, - }, - wantAll: []string{ - "/repos/some-app/dist", - "/repos/some-app/dist/index.html", - "/repos/some-app/dist/js", - "/repos/some-app/dist/js/index.js", - "/repos/some-app/dist/js/lib.js", - "/repos/some-app/dist/js/node_modules", - "/repos/some-app/dist/js/node_modules/browserify.js", - }, - wantFiles: []string{ - "/repos/some-app/dist/index.html", - "/repos/some-app/dist/js/index.js", - "/repos/some-app/dist/js/lib.js", - "/repos/some-app/dist/js/node_modules/browserify.js", - }, - }, - { - name: "passing just a directory captures no children", - files: []string{ - "/repos/some-app/dist/index.html", - "/repos/some-app/dist/js/index.js", - "/repos/some-app/dist/js/lib.js", - "/repos/some-app/dist/js/node_modules/browserify.js", - }, - args: args{ - basePath: "/repos/some-app/", - includePatterns: []string{"dist"}, - excludePatterns: []string{}, - }, - wantAll: []string{"/repos/some-app/dist"}, - wantFiles: []string{}, - }, - { - name: "redundant includes do not duplicate", - files: []string{ - "/repos/some-app/dist/index.html", - "/repos/some-app/dist/js/index.js", - "/repos/some-app/dist/js/lib.js", - "/repos/some-app/dist/js/node_modules/browserify.js", - }, - args: args{ - basePath: "/repos/some-app/", - includePatterns: []string{"**/*", "dist/**"}, - excludePatterns: []string{}, - }, - wantAll: []string{ - "/repos/some-app/dist", - "/repos/some-app/dist/index.html", - "/repos/some-app/dist/js", - "/repos/some-app/dist/js/index.js", - "/repos/some-app/dist/js/lib.js", - "/repos/some-app/dist/js/node_modules", - "/repos/some-app/dist/js/node_modules/browserify.js", - }, - wantFiles: []string{ - "/repos/some-app/dist/index.html", - "/repos/some-app/dist/js/index.js", - "/repos/some-app/dist/js/lib.js", - "/repos/some-app/dist/js/node_modules/browserify.js", - }, - }, - { - name: "exclude everything, include everything", - files: []string{ - "/repos/some-app/dist/index.html", - "/repos/some-app/dist/js/index.js", - "/repos/some-app/dist/js/lib.js", - "/repos/some-app/dist/js/node_modules/browserify.js", - }, - args: args{ - basePath: "/repos/some-app/", - includePatterns: []string{"**"}, - excludePatterns: []string{"**"}, - }, - wantAll: []string{}, - wantFiles: []string{}, - }, - { - name: "passing just a directory to exclude prevents capture of children", - files: []string{ - "/repos/some-app/dist/index.html", - "/repos/some-app/dist/js/index.js", - "/repos/some-app/dist/js/lib.js", - "/repos/some-app/dist/js/node_modules/browserify.js", - }, - args: args{ - basePath: "/repos/some-app/", - includePatterns: []string{"dist/**"}, - excludePatterns: []string{"dist/js"}, - }, - wantAll: []string{ - "/repos/some-app/dist", - "/repos/some-app/dist/index.html", - }, - wantFiles: []string{ - "/repos/some-app/dist/index.html", - }, - }, - { - name: "passing ** to exclude prevents capture of children", - files: []string{ - "/repos/some-app/dist/index.html", - "/repos/some-app/dist/js/index.js", - "/repos/some-app/dist/js/lib.js", - "/repos/some-app/dist/js/node_modules/browserify.js", - }, - args: args{ - basePath: "/repos/some-app/", - includePatterns: []string{"dist/**"}, - excludePatterns: []string{"dist/js/**"}, - }, - wantAll: []string{ - "/repos/some-app/dist", - "/repos/some-app/dist/index.html", - "/repos/some-app/dist/js", - }, - wantFiles: []string{ - "/repos/some-app/dist/index.html", - }, - }, - { - name: "exclude everything with folder . applies at base path", - files: []string{ - "/repos/some-app/dist/index.html", - "/repos/some-app/dist/js/index.js", - "/repos/some-app/dist/js/lib.js", - "/repos/some-app/dist/js/node_modules/browserify.js", - }, - args: args{ - basePath: "/repos/some-app/", - includePatterns: []string{"**"}, - excludePatterns: []string{"./"}, - }, - wantAll: []string{}, - wantFiles: []string{}, - }, - { - name: "exclude everything with traversal applies at a non-base path", - files: []string{ - "/repos/some-app/dist/index.html", - "/repos/some-app/dist/js/index.js", - "/repos/some-app/dist/js/lib.js", - "/repos/some-app/dist/js/node_modules/browserify.js", - }, - args: args{ - basePath: "/repos/some-app/", - includePatterns: []string{"**"}, - excludePatterns: []string{"./dist"}, - }, - wantAll: []string{}, - wantFiles: []string{}, - }, - { - name: "exclude everything with folder traversal (..) applies at base path", - files: []string{ - "/repos/some-app/dist/index.html", - "/repos/some-app/dist/js/index.js", - "/repos/some-app/dist/js/lib.js", - "/repos/some-app/dist/js/node_modules/browserify.js", - }, - args: args{ - basePath: "/repos/some-app/", - includePatterns: []string{"**"}, - excludePatterns: []string{"dist/../"}, - }, - wantAll: []string{}, - wantFiles: []string{}, - }, - { - name: "how do globs even work bad glob microformat", - files: []string{ - "/repos/some-app/dist/index.html", - "/repos/some-app/dist/js/index.js", - "/repos/some-app/dist/js/lib.js", - "/repos/some-app/dist/js/node_modules/browserify.js", - }, - args: args{ - basePath: "/repos/some-app/", - includePatterns: []string{"**/**/**"}, - excludePatterns: []string{}, - }, - wantAll: []string{ - "/repos/some-app/dist", - "/repos/some-app/dist/index.html", - "/repos/some-app/dist/js", - "/repos/some-app/dist/js/index.js", - "/repos/some-app/dist/js/lib.js", - "/repos/some-app/dist/js/node_modules", - "/repos/some-app/dist/js/node_modules/browserify.js", - }, - wantFiles: []string{ - "/repos/some-app/dist/index.html", - "/repos/some-app/dist/js/index.js", - "/repos/some-app/dist/js/lib.js", - "/repos/some-app/dist/js/node_modules/browserify.js", - }, - }, - { - name: "directory traversal stops at base path", - files: []string{ - "/repos/spanish-inquisition/index.html", - "/repos/some-app/dist/index.html", - "/repos/some-app/dist/js/index.js", - "/repos/some-app/dist/js/lib.js", - "/repos/some-app/dist/js/node_modules/browserify.js", - }, - args: args{ - basePath: "/repos/some-app/", - includePatterns: []string{"../spanish-inquisition/**", "dist/**"}, - excludePatterns: []string{}, - }, - wantAll: []string{}, - wantFiles: []string{}, - wantErr: true, - }, - { - name: "globs and traversal and globs do not cross base path", - files: []string{ - "/repos/spanish-inquisition/index.html", - "/repos/some-app/dist/index.html", - "/repos/some-app/dist/js/index.js", - "/repos/some-app/dist/js/lib.js", - "/repos/some-app/dist/js/node_modules/browserify.js", - }, - args: args{ - basePath: "/repos/some-app/", - includePatterns: []string{"**/../../spanish-inquisition/**"}, - excludePatterns: []string{}, - }, - wantAll: []string{}, - wantFiles: []string{}, - wantErr: true, - }, - { - name: "traversal works within base path", - files: []string{ - "/repos/some-app/dist/index.html", - "/repos/some-app/dist/js/index.js", - "/repos/some-app/dist/js/lib.js", - "/repos/some-app/dist/js/node_modules/browserify.js", - }, - args: args{ - basePath: "/repos/some-app/", - includePatterns: []string{"dist/js/../**"}, - excludePatterns: []string{}, - }, - wantAll: []string{ - "/repos/some-app/dist", - "/repos/some-app/dist/index.html", - "/repos/some-app/dist/js", - "/repos/some-app/dist/js/index.js", - "/repos/some-app/dist/js/lib.js", - "/repos/some-app/dist/js/node_modules", - "/repos/some-app/dist/js/node_modules/browserify.js", - }, - wantFiles: []string{ - "/repos/some-app/dist/index.html", - "/repos/some-app/dist/js/index.js", - "/repos/some-app/dist/js/lib.js", - "/repos/some-app/dist/js/node_modules/browserify.js", - }, - }, - { - name: "self-references (.) work", - files: []string{ - "/repos/some-app/dist/index.html", - "/repos/some-app/dist/js/index.js", - "/repos/some-app/dist/js/lib.js", - "/repos/some-app/dist/js/node_modules/browserify.js", - }, - args: args{ - basePath: "/repos/some-app/", - includePatterns: []string{"dist/./././**"}, - excludePatterns: []string{}, - }, - wantAll: []string{ - "/repos/some-app/dist", - "/repos/some-app/dist/index.html", - "/repos/some-app/dist/js", - "/repos/some-app/dist/js/index.js", - "/repos/some-app/dist/js/lib.js", - "/repos/some-app/dist/js/node_modules", - "/repos/some-app/dist/js/node_modules/browserify.js", - }, - wantFiles: []string{ - "/repos/some-app/dist/index.html", - "/repos/some-app/dist/js/index.js", - "/repos/some-app/dist/js/lib.js", - "/repos/some-app/dist/js/node_modules/browserify.js", - }, - }, - { - name: "depth of 1 includes handles folders properly", - files: []string{ - "/repos/some-app/package.json", - "/repos/some-app/dist/index.html", - "/repos/some-app/dist/js/index.js", - "/repos/some-app/dist/js/lib.js", - "/repos/some-app/dist/js/node_modules/browserify.js", - }, - args: args{ - basePath: "/repos/some-app/", - includePatterns: []string{"*"}, - excludePatterns: []string{}, - }, - wantAll: []string{ - "/repos/some-app/dist", - "/repos/some-app/package.json", - }, - wantFiles: []string{"/repos/some-app/package.json"}, - }, - { - name: "depth of 1 excludes prevents capturing folders", - files: []string{ - "/repos/some-app/package.json", - "/repos/some-app/dist/index.html", - "/repos/some-app/dist/js/index.js", - "/repos/some-app/dist/js/lib.js", - "/repos/some-app/dist/js/node_modules/browserify.js", - }, - args: args{ - basePath: "/repos/some-app/", - includePatterns: []string{"**"}, - excludePatterns: []string{"dist/*"}, - }, - wantAll: []string{ - "/repos/some-app/dist", - "/repos/some-app/package.json", - }, - wantFiles: []string{"/repos/some-app/package.json"}, - }, - { - name: "No-trailing slash basePath works", - files: []string{ - "/repos/some-app/dist/index.html", - "/repos/some-app/dist/js/index.js", - "/repos/some-app/dist/js/lib.js", - "/repos/some-app/dist/js/node_modules/browserify.js", - }, - args: args{ - basePath: "/repos/some-app", - includePatterns: []string{"dist/**"}, - excludePatterns: []string{}, - }, - wantAll: []string{ - "/repos/some-app/dist", - "/repos/some-app/dist/index.html", - "/repos/some-app/dist/js", - "/repos/some-app/dist/js/index.js", - "/repos/some-app/dist/js/lib.js", - "/repos/some-app/dist/js/node_modules", - "/repos/some-app/dist/js/node_modules/browserify.js", - }, - wantFiles: []string{ - "/repos/some-app/dist/index.html", - "/repos/some-app/dist/js/index.js", - "/repos/some-app/dist/js/lib.js", - "/repos/some-app/dist/js/node_modules/browserify.js", - }, - }, - { - name: "exclude single file", - files: []string{ - "/repos/some-app/included.txt", - "/repos/some-app/excluded.txt", - }, - args: args{ - basePath: "/repos/some-app", - includePatterns: []string{"*.txt"}, - excludePatterns: []string{"excluded.txt"}, - }, - wantAll: []string{ - "/repos/some-app/included.txt", - }, - wantFiles: []string{ - "/repos/some-app/included.txt", - }, - }, - { - name: "exclude nested single file", - files: []string{ - "/repos/some-app/one/included.txt", - "/repos/some-app/one/two/included.txt", - "/repos/some-app/one/two/three/included.txt", - "/repos/some-app/one/excluded.txt", - "/repos/some-app/one/two/excluded.txt", - "/repos/some-app/one/two/three/excluded.txt", - }, - args: args{ - basePath: "/repos/some-app", - includePatterns: []string{"**"}, - excludePatterns: []string{"**/excluded.txt"}, - }, - wantAll: []string{ - "/repos/some-app/one/included.txt", - "/repos/some-app/one/two/included.txt", - "/repos/some-app/one/two/three/included.txt", - "/repos/some-app/one", - "/repos/some-app/one/two", - "/repos/some-app/one/two/three", - }, - wantFiles: []string{ - "/repos/some-app/one/included.txt", - "/repos/some-app/one/two/included.txt", - "/repos/some-app/one/two/three/included.txt", - }, - }, - { - name: "exclude everything", - files: []string{ - "/repos/some-app/one/included.txt", - "/repos/some-app/one/two/included.txt", - "/repos/some-app/one/two/three/included.txt", - "/repos/some-app/one/excluded.txt", - "/repos/some-app/one/two/excluded.txt", - "/repos/some-app/one/two/three/excluded.txt", - }, - args: args{ - basePath: "/repos/some-app", - includePatterns: []string{"**"}, - excludePatterns: []string{"**"}, - }, - wantAll: []string{}, - wantFiles: []string{}, - }, - { - name: "exclude everything with slash", - files: []string{ - "/repos/some-app/one/included.txt", - "/repos/some-app/one/two/included.txt", - "/repos/some-app/one/two/three/included.txt", - "/repos/some-app/one/excluded.txt", - "/repos/some-app/one/two/excluded.txt", - "/repos/some-app/one/two/three/excluded.txt", - }, - args: args{ - basePath: "/repos/some-app", - includePatterns: []string{"**"}, - excludePatterns: []string{"**/"}, - }, - wantAll: []string{}, - wantFiles: []string{}, - }, - { - name: "exclude everything with leading **", - files: []string{ - "/repos/some-app/foo/bar", - "/repos/some-app/some-foo", - "/repos/some-app/some-foo/bar", - "/repos/some-app/included", - }, - args: args{ - basePath: "/repos/some-app", - includePatterns: []string{"**"}, - excludePatterns: []string{"**foo"}, - }, - wantAll: []string{ - "/repos/some-app/included", - }, - wantFiles: []string{ - "/repos/some-app/included", - }, - }, - { - name: "exclude everything with trailing **", - files: []string{ - "/repos/some-app/foo/bar", - "/repos/some-app/foo-file", - "/repos/some-app/foo-dir/bar", - "/repos/some-app/included", - }, - args: args{ - basePath: "/repos/some-app", - includePatterns: []string{"**"}, - excludePatterns: []string{"foo**"}, - }, - wantAll: []string{ - "/repos/some-app/included", - }, - wantFiles: []string{ - "/repos/some-app/included", - }, - }, - } - for _, tt := range tests { - fsysRoot := "/" - fsys := setup(fsysRoot, tt.files) - - t.Run(tt.name, func(t *testing.T) { - got, err := globFilesFs(fsys, fsysRoot, tt.args.basePath, tt.args.includePatterns, tt.args.excludePatterns) - - if (err != nil) != tt.wantErr { - t.Errorf("globFilesFs() error = %v, wantErr %v", err, tt.wantErr) - return - } - - gotToSlash := make([]string, len(got)) - for index, path := range got { - gotToSlash[index] = filepath.ToSlash(path) - } - - sort.Strings(gotToSlash) - - if !reflect.DeepEqual(gotToSlash, tt.wantFiles) { - t.Errorf("globFilesFs() = %v, want %v", gotToSlash, tt.wantFiles) - } - }) - - t.Run(tt.name, func(t *testing.T) { - got, err := globAllFs(fsys, fsysRoot, tt.args.basePath, tt.args.includePatterns, tt.args.excludePatterns) - - if (err != nil) != tt.wantErr { - t.Errorf("globAllFs() error = %v, wantErr %v", err, tt.wantErr) - return - } - - gotToSlash := make([]string, len(got)) - for index, path := range got { - gotToSlash[index] = filepath.ToSlash(path) - } - - sort.Strings(gotToSlash) - sort.Strings(tt.wantAll) - - if !reflect.DeepEqual(gotToSlash, tt.wantAll) { - t.Errorf("globAllFs() = %v, want %v", gotToSlash, tt.wantAll) - } - }) - } -} diff --git a/cli/internal/globwatcher/globwatcher.go b/cli/internal/globwatcher/globwatcher.go deleted file mode 100644 index 4463c93f0c5c3..0000000000000 --- a/cli/internal/globwatcher/globwatcher.go +++ /dev/null @@ -1,210 +0,0 @@ -package globwatcher - -import ( - "errors" - "fmt" - "path/filepath" - "sync" - - "github.com/hashicorp/go-hclog" - "github.com/vercel/turbo/cli/internal/doublestar" - "github.com/vercel/turbo/cli/internal/filewatcher" - "github.com/vercel/turbo/cli/internal/fs/hash" - "github.com/vercel/turbo/cli/internal/turbopath" - "github.com/vercel/turbo/cli/internal/util" -) - -// ErrClosed is returned when attempting to get changed globs after glob watching has closed -var ErrClosed = errors.New("glob watching is closed") - -type globs struct { - Inclusions util.Set - Exclusions util.Set -} - -// GlobWatcher is used to track unchanged globs by hash. Once a glob registers a file change -// it is no longer tracked until a new hash requests it. Once all globs for a particular hash -// have changed, that hash is no longer tracked. -type GlobWatcher struct { - logger hclog.Logger - repoRoot turbopath.AbsoluteSystemPath - cookieWaiter filewatcher.CookieWaiter - - mu sync.RWMutex // protects field below - hashGlobs map[string]globs - globStatus map[string]util.Set // glob -> hashes where this glob hasn't changed - - closed bool -} - -// New returns a new GlobWatcher instance -func New(logger hclog.Logger, repoRoot turbopath.AbsoluteSystemPath, cookieWaiter filewatcher.CookieWaiter) *GlobWatcher { - return &GlobWatcher{ - logger: logger, - repoRoot: repoRoot, - cookieWaiter: cookieWaiter, - hashGlobs: make(map[string]globs), - globStatus: make(map[string]util.Set), - } -} - -func (g *GlobWatcher) setClosed() { - g.mu.Lock() - g.closed = true - g.mu.Unlock() -} - -func (g *GlobWatcher) isClosed() bool { - g.mu.RLock() - defer g.mu.RUnlock() - return g.closed -} - -// WatchGlobs registers the given set of globs to be watched for changes and grouped -// under the given hash. This method pairs with GetChangedGlobs to determine which globs -// out of a set of candidates have changed since WatchGlobs was called for the same hash. -func (g *GlobWatcher) WatchGlobs(hash string, globsToWatch hash.TaskOutputs) error { - if g.isClosed() { - return ErrClosed - } - // Wait for a cookie here - // that will ensure that we have seen all filesystem writes - // *by the calling client*. Other tasks _could_ write to the - // same output directories, however we are relying on task - // execution dependencies to prevent that. - if err := g.cookieWaiter.WaitForCookie(); err != nil { - return err - } - g.mu.Lock() - defer g.mu.Unlock() - g.hashGlobs[hash] = globs{ - Inclusions: util.SetFromStrings(globsToWatch.Inclusions), - Exclusions: util.SetFromStrings(globsToWatch.Exclusions), - } - - for _, glob := range globsToWatch.Inclusions { - existing, ok := g.globStatus[glob] - if !ok { - existing = make(util.Set) - } - existing.Add(hash) - g.globStatus[glob] = existing - } - return nil -} - -// GetChangedGlobs returns the subset of the given candidates that we are not currently -// tracking as "unchanged". -func (g *GlobWatcher) GetChangedGlobs(hash string, candidates []string) ([]string, error) { - if g.isClosed() { - // If filewatching has crashed, return all candidates as changed. - return candidates, nil - } - // Wait for a cookie here - // that will ensure that we have seen all filesystem writes - // *by the calling client*. Other tasks _could_ write to the - // same output directories, however we are relying on task - // execution dependencies to prevent that. - if err := g.cookieWaiter.WaitForCookie(); err != nil { - return nil, err - } - // hashGlobs tracks all of the unchanged globs for a given hash - // If hashGlobs doesn't have our hash, either everything has changed, - // or we were never tracking it. Either way, consider all the candidates - // to be changed globs. - g.mu.RLock() - defer g.mu.RUnlock() - globsToCheck, ok := g.hashGlobs[hash] - if !ok { - return candidates, nil - } - allGlobs := util.SetFromStrings(candidates) - diff := allGlobs.Difference(globsToCheck.Inclusions) - - return diff.UnsafeListOfStrings(), nil -} - -// OnFileWatchEvent implements FileWatchClient.OnFileWatchEvent -// On a file change, check if we have a glob that matches this file. Invalidate -// any matching globs, and remove them from the set of unchanged globs for the corresponding -// hashes. If this is the last glob for a hash, remove the hash from being tracked. -func (g *GlobWatcher) OnFileWatchEvent(ev filewatcher.Event) { - // At this point, we don't care what the Op is, any Op represents a change - // that should invalidate matching globs - g.logger.Trace(fmt.Sprintf("Got fsnotify event %v", ev)) - absolutePath := ev.Path - repoRelativePath, err := g.repoRoot.RelativePathString(absolutePath.ToStringDuringMigration()) - if err != nil { - g.logger.Debug(fmt.Sprintf("could not get relative path from %v to %v: %v", g.repoRoot, absolutePath, err)) - return - } - g.mu.Lock() - defer g.mu.Unlock() - for glob, hashStatus := range g.globStatus { - matches, err := doublestar.Match(glob, filepath.ToSlash(repoRelativePath)) - if err != nil { - g.logger.Error(fmt.Sprintf("failed to check path %v against glob %v: %v", repoRelativePath, glob, err)) - continue - } - // If this glob matches, we know that it has changed for every hash that included this glob - // and is not excluded by a hash's exclusion globs. - // So, we can delete this glob from every hash tracking it as well as stop watching this glob. - // To stop watching, we unref each of the directories corresponding to this glob. - if matches { - for hashUntyped := range hashStatus { - hash := hashUntyped.(string) - hashGlobs, ok := g.hashGlobs[hash] - - if !ok { - g.logger.Warn(fmt.Sprintf("failed to find hash %v referenced from glob %v", hash, glob)) - continue - } - - isExcluded := false - // Check if we've excluded this path by going through exclusion globs - for exclusionGlob := range hashGlobs.Exclusions { - matches, err := doublestar.Match(exclusionGlob.(string), filepath.ToSlash(repoRelativePath)) - if err != nil { - g.logger.Error(fmt.Sprintf("failed to check path %v against glob %v: %v", repoRelativePath, glob, err)) - continue - } - - if matches { - isExcluded = true - break - } - } - - // If we have excluded this path, then we skip it - if isExcluded { - continue - } - - // We delete hash from the globStatus entry - g.globStatus[glob].Delete(hash) - - // If we've deleted the last hash for a glob in globStatus, delete the whole glob entry - if len(g.globStatus[glob]) == 0 { - delete(g.globStatus, glob) - } - - hashGlobs.Inclusions.Delete(glob) - // If we've deleted the last glob for a hash, delete the whole hash entry - if hashGlobs.Inclusions.Len() == 0 { - delete(g.hashGlobs, hash) - } - } - } - } -} - -// OnFileWatchError implements FileWatchClient.OnFileWatchError -func (g *GlobWatcher) OnFileWatchError(err error) { - g.logger.Error(fmt.Sprintf("file watching received an error: %v", err)) -} - -// OnFileWatchClosed implements FileWatchClient.OnFileWatchClosed -func (g *GlobWatcher) OnFileWatchClosed() { - g.setClosed() - g.logger.Warn("GlobWatching is closing due to file watching closing") -} diff --git a/cli/internal/globwatcher/globwatcher_test.go b/cli/internal/globwatcher/globwatcher_test.go deleted file mode 100644 index e93327fc1c2b0..0000000000000 --- a/cli/internal/globwatcher/globwatcher_test.go +++ /dev/null @@ -1,233 +0,0 @@ -package globwatcher - -import ( - "testing" - - "github.com/hashicorp/go-hclog" - "github.com/vercel/turbo/cli/internal/filewatcher" - "github.com/vercel/turbo/cli/internal/fs" - "github.com/vercel/turbo/cli/internal/fs/hash" - "github.com/vercel/turbo/cli/internal/turbopath" - "gotest.tools/v3/assert" -) - -func setup(t *testing.T, repoRoot turbopath.AbsoluteSystemPath) { - // Directory layout: - // <repoRoot>/ - // my-pkg/ - // irrelevant - // dist/ - // dist-file - // distChild/ - // child-file - // .next/ - // next-file - distPath := repoRoot.UntypedJoin("my-pkg", "dist") - childFilePath := distPath.UntypedJoin("distChild", "child-file") - err := childFilePath.EnsureDir() - assert.NilError(t, err, "EnsureDir") - f, err := childFilePath.Create() - assert.NilError(t, err, "Create") - err = f.Close() - assert.NilError(t, err, "Close") - distFilePath := repoRoot.UntypedJoin("my-pkg", "dist", "dist-file") - f, err = distFilePath.Create() - assert.NilError(t, err, "Create") - err = f.Close() - assert.NilError(t, err, "Close") - nextFilePath := repoRoot.UntypedJoin("my-pkg", ".next", "next-file") - err = nextFilePath.EnsureDir() - assert.NilError(t, err, "EnsureDir") - f, err = nextFilePath.Create() - assert.NilError(t, err, "Create") - err = f.Close() - assert.NilError(t, err, "Close") - irrelevantPath := repoRoot.UntypedJoin("my-pkg", "irrelevant") - f, err = irrelevantPath.Create() - assert.NilError(t, err, "Create") - err = f.Close() - assert.NilError(t, err, "Close") -} - -type noopCookieWaiter struct{} - -func (*noopCookieWaiter) WaitForCookie() error { - return nil -} - -var _noopCookieWaiter = &noopCookieWaiter{} - -func TestTrackOutputs(t *testing.T) { - logger := hclog.Default() - - repoRootRaw := t.TempDir() - repoRoot := fs.AbsoluteSystemPathFromUpstream(repoRootRaw) - - setup(t, repoRoot) - - globWatcher := New(logger, repoRoot, _noopCookieWaiter) - - globs := hash.TaskOutputs{ - Inclusions: []string{ - "my-pkg/dist/**", - "my-pkg/.next/**", - }, - Exclusions: []string{"my-pkg/.next/cache/**"}, - } - - hash := "the-hash" - err := globWatcher.WatchGlobs(hash, globs) - assert.NilError(t, err, "WatchGlobs") - - changed, err := globWatcher.GetChangedGlobs(hash, globs.Inclusions) - assert.NilError(t, err, "GetChangedGlobs") - assert.Equal(t, 0, len(changed), "Expected no changed paths") - - // Make an irrelevant change - globWatcher.OnFileWatchEvent(filewatcher.Event{ - EventType: filewatcher.FileAdded, - Path: repoRoot.UntypedJoin("my-pkg", "irrelevant"), - }) - - changed, err = globWatcher.GetChangedGlobs(hash, globs.Inclusions) - assert.NilError(t, err, "GetChangedGlobs") - assert.Equal(t, 0, len(changed), "Expected no changed paths") - - // Make an excluded change - globWatcher.OnFileWatchEvent(filewatcher.Event{ - EventType: filewatcher.FileAdded, - Path: repoRoot.Join("my-pkg", ".next", "cache", "foo"), - }) - - changed, err = globWatcher.GetChangedGlobs(hash, globs.Inclusions) - assert.NilError(t, err, "GetChangedGlobs") - assert.Equal(t, 0, len(changed), "Expected no changed paths") - - // Make a relevant change - globWatcher.OnFileWatchEvent(filewatcher.Event{ - EventType: filewatcher.FileAdded, - Path: repoRoot.UntypedJoin("my-pkg", "dist", "foo"), - }) - - changed, err = globWatcher.GetChangedGlobs(hash, globs.Inclusions) - assert.NilError(t, err, "GetChangedGlobs") - assert.Equal(t, 1, len(changed), "Expected one changed path remaining") - expected := "my-pkg/dist/**" - assert.Equal(t, expected, changed[0], "Expected dist glob to have changed") - - // Change a file matching the other glob - globWatcher.OnFileWatchEvent(filewatcher.Event{ - EventType: filewatcher.FileAdded, - Path: repoRoot.UntypedJoin("my-pkg", ".next", "foo"), - }) - // We should no longer be watching anything, since both globs have - // registered changes - if len(globWatcher.hashGlobs) != 0 { - t.Errorf("expected to not track any hashes, found %v", globWatcher.hashGlobs) - } - - // Both globs have changed, we should have stopped tracking - // this hash - changed, err = globWatcher.GetChangedGlobs(hash, globs.Inclusions) - assert.NilError(t, err, "GetChangedGlobs") - assert.DeepEqual(t, globs.Inclusions, changed) -} - -func TestTrackMultipleHashes(t *testing.T) { - logger := hclog.Default() - - repoRootRaw := t.TempDir() - repoRoot := fs.AbsoluteSystemPathFromUpstream(repoRootRaw) - - setup(t, repoRoot) - - globWatcher := New(logger, repoRoot, _noopCookieWaiter) - - globs := hash.TaskOutputs{ - Inclusions: []string{ - "my-pkg/dist/**", - "my-pkg/.next/**", - }, - } - - hashToWatch := "the-hash" - err := globWatcher.WatchGlobs(hashToWatch, globs) - assert.NilError(t, err, "WatchGlobs") - - secondGlobs := hash.TaskOutputs{ - Inclusions: []string{ - "my-pkg/.next/**", - }, - Exclusions: []string{"my-pkg/.next/cache/**"}, - } - - secondHash := "the-second-hash" - err = globWatcher.WatchGlobs(secondHash, secondGlobs) - assert.NilError(t, err, "WatchGlobs") - - changed, err := globWatcher.GetChangedGlobs(hashToWatch, globs.Inclusions) - assert.NilError(t, err, "GetChangedGlobs") - assert.Equal(t, 0, len(changed), "Expected no changed paths") - - changed, err = globWatcher.GetChangedGlobs(secondHash, secondGlobs.Inclusions) - assert.NilError(t, err, "GetChangedGlobs") - assert.Equal(t, 0, len(changed), "Expected no changed paths") - - // Make a change that is excluded in one of the hashes but not in the other - globWatcher.OnFileWatchEvent(filewatcher.Event{ - EventType: filewatcher.FileAdded, - Path: repoRoot.UntypedJoin("my-pkg", ".next", "cache", "foo"), - }) - - changed, err = globWatcher.GetChangedGlobs(hashToWatch, globs.Inclusions) - assert.NilError(t, err, "GetChangedGlobs") - assert.Equal(t, 1, len(changed), "Expected one changed path remaining") - - changed, err = globWatcher.GetChangedGlobs(secondHash, secondGlobs.Inclusions) - assert.NilError(t, err, "GetChangedGlobs") - assert.Equal(t, 0, len(changed), "Expected no changed paths") - - assert.Equal(t, 1, len(globWatcher.globStatus["my-pkg/.next/**"]), "Expected to be still watching `my-pkg/.next/**`") - - // Make a change for secondHash - globWatcher.OnFileWatchEvent(filewatcher.Event{ - EventType: filewatcher.FileAdded, - Path: repoRoot.UntypedJoin("my-pkg", ".next", "bar"), - }) - - assert.Equal(t, 0, len(globWatcher.globStatus["my-pkg/.next/**"]), "Expected to be no longer watching `my-pkg/.next/**`") -} - -func TestWatchSingleFile(t *testing.T) { - logger := hclog.Default() - - repoRoot := fs.AbsoluteSystemPathFromUpstream(t.TempDir()) - - setup(t, repoRoot) - - //watcher := newTestWatcher() - globWatcher := New(logger, repoRoot, _noopCookieWaiter) - globs := hash.TaskOutputs{ - Inclusions: []string{"my-pkg/.next/next-file"}, - Exclusions: []string{}, - } - hash := "the-hash" - err := globWatcher.WatchGlobs(hash, globs) - assert.NilError(t, err, "WatchGlobs") - - assert.Equal(t, 1, len(globWatcher.hashGlobs)) - - // A change to an irrelevant file - globWatcher.OnFileWatchEvent(filewatcher.Event{ - EventType: filewatcher.FileAdded, - Path: repoRoot.UntypedJoin("my-pkg", ".next", "foo"), - }) - assert.Equal(t, 1, len(globWatcher.hashGlobs)) - - // Change the watched file - globWatcher.OnFileWatchEvent(filewatcher.Event{ - EventType: filewatcher.FileAdded, - Path: repoRoot.UntypedJoin("my-pkg", ".next", "next-file"), - }) - assert.Equal(t, 0, len(globWatcher.hashGlobs)) -} diff --git a/cli/internal/graph/graph.go b/cli/internal/graph/graph.go deleted file mode 100644 index 5c69d3d0b3c0b..0000000000000 --- a/cli/internal/graph/graph.go +++ /dev/null @@ -1,270 +0,0 @@ -// Package graph contains the CompleteGraph struct and some methods around it -package graph - -import ( - gocontext "context" - "fmt" - "regexp" - "sort" - "strings" - - "github.com/hashicorp/go-hclog" - "github.com/pyr-sh/dag" - "github.com/vercel/turbo/cli/internal/env" - "github.com/vercel/turbo/cli/internal/fs" - "github.com/vercel/turbo/cli/internal/nodes" - "github.com/vercel/turbo/cli/internal/runsummary" - "github.com/vercel/turbo/cli/internal/taskhash" - "github.com/vercel/turbo/cli/internal/turbopath" - "github.com/vercel/turbo/cli/internal/util" - "github.com/vercel/turbo/cli/internal/workspace" -) - -// CompleteGraph represents the common state inferred from the filesystem and pipeline. -// It is not intended to include information specific to a particular run. -type CompleteGraph struct { - // WorkspaceGraph expresses the dependencies between packages - WorkspaceGraph dag.AcyclicGraph - - // Pipeline is config from turbo.json - Pipeline fs.Pipeline - - // WorkspaceInfos stores the package.json contents by package name - WorkspaceInfos workspace.Catalog - - // GlobalHash is the hash of all global dependencies - GlobalHash string - - RootNode string - - // Map of TaskDefinitions by taskID - TaskDefinitions map[string]*fs.TaskDefinition - RepoRoot turbopath.AbsoluteSystemPath - - TaskHashTracker *taskhash.Tracker -} - -// GetPackageTaskVisitor wraps a `visitor` function that is used for walking the TaskGraph -// during execution (or dry-runs). The function returned here does not execute any tasks itself, -// but it helps curry some data from the Complete Graph and pass it into the visitor function. -func (g *CompleteGraph) GetPackageTaskVisitor( - ctx gocontext.Context, - taskGraph *dag.AcyclicGraph, - frameworkInference bool, - globalEnvMode util.EnvMode, - getArgs func(taskID string) []string, - logger hclog.Logger, - execFunc func(ctx gocontext.Context, packageTask *nodes.PackageTask, taskSummary *runsummary.TaskSummary) error, -) func(taskID string) error { - return func(taskID string) error { - packageName, taskName := util.GetPackageTaskFromId(taskID) - pkg, ok := g.WorkspaceInfos.PackageJSONs[packageName] - if !ok { - return fmt.Errorf("cannot find package %v for task %v", packageName, taskID) - } - - // Check for root task - var command string - if cmd, ok := pkg.Scripts[taskName]; ok { - command = cmd - } - - if packageName == util.RootPkgName && commandLooksLikeTurbo(command) { - return fmt.Errorf("root task %v (%v) looks like it invokes turbo and might cause a loop", taskName, command) - } - - taskDefinition, ok := g.TaskDefinitions[taskID] - if !ok { - return fmt.Errorf("Could not find definition for task") - } - - // Task env mode is only independent when global env mode is `infer`. - taskEnvMode := globalEnvMode - if taskEnvMode == util.Infer { - if taskDefinition.PassThroughEnv != nil { - taskEnvMode = util.Strict - } else { - // If we're in infer mode we have just detected non-usage of strict env vars. - // But our behavior's actual meaning of this state is `loose`. - taskEnvMode = util.Loose - } - } - - // TODO: maybe we can remove this PackageTask struct at some point - packageTask := &nodes.PackageTask{ - TaskID: taskID, - Task: taskName, - PackageName: packageName, - Pkg: pkg, - EnvMode: taskEnvMode, - Dir: pkg.Dir.ToString(), - TaskDefinition: taskDefinition, - Outputs: taskDefinition.Outputs.Inclusions, - ExcludedOutputs: taskDefinition.Outputs.Exclusions, - } - - passThruArgs := getArgs(taskName) - hash, err := g.TaskHashTracker.CalculateTaskHash( - logger, - packageTask, - taskGraph.DownEdges(taskID), - frameworkInference, - passThruArgs, - ) - - // Not being able to construct the task hash is a hard error - if err != nil { - return fmt.Errorf("Hashing error: %v", err) - } - - pkgDir := pkg.Dir - packageTask.Hash = hash - envVars := g.TaskHashTracker.GetEnvVars(taskID) - expandedInputs := g.TaskHashTracker.GetExpandedInputs(packageTask) - framework := g.TaskHashTracker.GetFramework(taskID) - - packageTask.Command = command - - envVarPassThroughMap, err := g.TaskHashTracker.EnvAtExecutionStart.FromWildcards(taskDefinition.PassThroughEnv) - if err != nil { - return err - } - - specifiedEnvVarsPresentation := []string{} - if taskDefinition.Env != nil { - specifiedEnvVarsPresentation = taskDefinition.Env - } - - summary := &runsummary.TaskSummary{ - TaskID: taskID, - Task: taskName, - Hash: hash, - Package: packageName, - Dir: pkgDir.ToString(), - Outputs: taskDefinition.Outputs.Inclusions, - ExcludedOutputs: taskDefinition.Outputs.Exclusions, - LogFileRelativePath: packageTask.RepoRelativeSystemLogFile(), - ResolvedTaskDefinition: taskDefinition, - ExpandedInputs: expandedInputs, - ExpandedOutputs: []turbopath.AnchoredSystemPath{}, - Command: command, - CommandArguments: passThruArgs, - Framework: framework, - EnvMode: taskEnvMode, - EnvVars: runsummary.TaskEnvVarSummary{ - Specified: runsummary.TaskEnvConfiguration{ - Env: specifiedEnvVarsPresentation, - PassThroughEnv: taskDefinition.PassThroughEnv, - }, - Configured: env.EnvironmentVariableMap(envVars.BySource.Explicit).ToSecretHashable(), - Inferred: env.EnvironmentVariableMap(envVars.BySource.Matching).ToSecretHashable(), - PassThrough: envVarPassThroughMap.ToSecretHashable(), - }, - DotEnv: taskDefinition.DotEnv, - ExternalDepsHash: pkg.ExternalDepsHash, - } - - if ancestors, err := g.getTaskGraphAncestors(taskGraph, packageTask.TaskID); err == nil { - summary.Dependencies = ancestors - } - if descendents, err := g.getTaskGraphDescendants(taskGraph, packageTask.TaskID); err == nil { - summary.Dependents = descendents - } - - return execFunc(ctx, packageTask, summary) - } -} - -// GetPipelineFromWorkspace returns the Unmarshaled fs.Pipeline struct from turbo.json in the given workspace. -func (g *CompleteGraph) GetPipelineFromWorkspace(workspaceName string, isSinglePackage bool) (fs.Pipeline, error) { - turboConfig, err := g.GetTurboConfigFromWorkspace(workspaceName, isSinglePackage) - - if err != nil { - return nil, err - } - - return turboConfig.Pipeline, nil -} - -// GetTurboConfigFromWorkspace returns the Unmarshaled fs.TurboJSON from turbo.json in the given workspace. -func (g *CompleteGraph) GetTurboConfigFromWorkspace(workspaceName string, isSinglePackage bool) (*fs.TurboJSON, error) { - cachedTurboConfig, ok := g.WorkspaceInfos.TurboConfigs[workspaceName] - - if ok { - return cachedTurboConfig, nil - } - - var workspacePackageJSON *fs.PackageJSON - if pkgJSON, err := g.GetPackageJSONFromWorkspace(workspaceName); err == nil { - workspacePackageJSON = pkgJSON - } else { - return nil, err - } - - // Note: pkgJSON.Dir for the root workspace will be an empty string, and for - // other workspaces, it will be a relative path. - workspaceAbsolutePath := workspacePackageJSON.Dir.RestoreAnchor(g.RepoRoot) - turboConfig, err := fs.LoadTurboConfig(workspaceAbsolutePath, workspacePackageJSON, isSinglePackage) - - // If we failed to load a TurboConfig, bubble up the error - if err != nil { - return nil, err - } - - // add to cache - g.WorkspaceInfos.TurboConfigs[workspaceName] = turboConfig - - return g.WorkspaceInfos.TurboConfigs[workspaceName], nil -} - -// GetPackageJSONFromWorkspace returns an Unmarshaled struct of the package.json in the given workspace -func (g *CompleteGraph) GetPackageJSONFromWorkspace(workspaceName string) (*fs.PackageJSON, error) { - if pkgJSON, ok := g.WorkspaceInfos.PackageJSONs[workspaceName]; ok { - return pkgJSON, nil - } - - return nil, fmt.Errorf("No package.json for %s", workspaceName) -} - -// getTaskGraphAncestors gets all the ancestors for a given task in the graph. -// "ancestors" are all tasks that the given task depends on. -func (g *CompleteGraph) getTaskGraphAncestors(taskGraph *dag.AcyclicGraph, taskID string) ([]string, error) { - ancestors, err := taskGraph.Ancestors(taskID) - if err != nil { - return nil, err - } - stringAncestors := []string{} - for _, dep := range ancestors { - // Don't leak out internal root node name, which are just placeholders - if !strings.Contains(dep.(string), g.RootNode) { - stringAncestors = append(stringAncestors, dep.(string)) - } - } - - sort.Strings(stringAncestors) - return stringAncestors, nil -} - -// getTaskGraphDescendants gets all the descendants for a given task in the graph. -// "descendants" are all tasks that depend on the given taskID. -func (g *CompleteGraph) getTaskGraphDescendants(taskGraph *dag.AcyclicGraph, taskID string) ([]string, error) { - descendents, err := taskGraph.Descendents(taskID) - if err != nil { - return nil, err - } - stringDescendents := []string{} - for _, dep := range descendents { - // Don't leak out internal root node name, which are just placeholders - if !strings.Contains(dep.(string), g.RootNode) { - stringDescendents = append(stringDescendents, dep.(string)) - } - } - sort.Strings(stringDescendents) - return stringDescendents, nil -} - -var _isTurbo = regexp.MustCompile(`(?:^|\s)turbo(?:$|\s)`) - -func commandLooksLikeTurbo(command string) bool { - return _isTurbo.MatchString(command) -} diff --git a/cli/internal/graph/graph_test.go b/cli/internal/graph/graph_test.go deleted file mode 100644 index 9323e19465444..0000000000000 --- a/cli/internal/graph/graph_test.go +++ /dev/null @@ -1,50 +0,0 @@ -package graph - -import ( - "testing" - - "gotest.tools/v3/assert" -) - -func Test_CommandsInvokingTurbo(t *testing.T) { - type testCase struct { - command string - match bool - } - testCases := []testCase{ - { - "turbo run foo", - true, - }, - { - "rm -rf ~/Library/Caches/pnpm && turbo run foo && rm -rf ~/.npm", - true, - }, - { - "FLAG=true turbo run foo", - true, - }, - { - "npx turbo run foo", - true, - }, - { - "echo starting; turbo foo; echo done", - true, - }, - // We don't catch this as if people are going to try to invoke the turbo - // binary directly, they'll always be able to work around us. - { - "./node_modules/.bin/turbo foo", - false, - }, - { - "rm -rf ~/Library/Caches/pnpm && rm -rf ~/Library/Caches/turbo && rm -rf ~/.npm && rm -rf ~/.pnpm-store && rm -rf ~/.turbo", - false, - }, - } - - for _, tc := range testCases { - assert.Equal(t, commandLooksLikeTurbo(tc.command), tc.match, tc.command) - } -} diff --git a/cli/internal/graphvisualizer/graphvisualizer.go b/cli/internal/graphvisualizer/graphvisualizer.go deleted file mode 100644 index 8142ab62f4847..0000000000000 --- a/cli/internal/graphvisualizer/graphvisualizer.go +++ /dev/null @@ -1,206 +0,0 @@ -package graphvisualizer - -import ( - "fmt" - "io" - "math/rand" - "os/exec" - "path/filepath" - "sort" - "strings" - - "github.com/fatih/color" - "github.com/mitchellh/cli" - "github.com/pyr-sh/dag" - "github.com/vercel/turbo/cli/internal/turbopath" - "github.com/vercel/turbo/cli/internal/ui" - "github.com/vercel/turbo/cli/internal/util" - "github.com/vercel/turbo/cli/internal/util/browser" -) - -// GraphVisualizer requirements -type GraphVisualizer struct { - repoRoot turbopath.AbsoluteSystemPath - ui cli.Ui - TaskGraph *dag.AcyclicGraph -} - -// hasGraphViz checks for the presence of https://graphviz.org/ -func hasGraphViz() bool { - err := exec.Command("dot", "-V").Run() - return err == nil -} - -func getRandChar() string { - i := rand.Intn(25) + 65 - return string(rune(i)) -} - -func getRandID() string { - return getRandChar() + getRandChar() + getRandChar() + getRandChar() -} - -// New creates an instance of ColorCache with helpers for adding colors to task outputs -func New(repoRoot turbopath.AbsoluteSystemPath, ui cli.Ui, TaskGraph *dag.AcyclicGraph) *GraphVisualizer { - return &GraphVisualizer{ - repoRoot: repoRoot, - ui: ui, - TaskGraph: TaskGraph, - } -} - -// Converts the TaskGraph dag into a string -func (g *GraphVisualizer) generateDotString() string { - return string(g.TaskGraph.Dot(&dag.DotOpts{ - Verbose: true, - DrawCycles: true, - })) -} - -// Outputs a warning when a file was requested, but graphviz is not available -func (g *GraphVisualizer) graphVizWarnUI() { - g.ui.Warn(color.New(color.FgYellow, color.Bold, color.ReverseVideo).Sprint(" WARNING ") + color.YellowString(" `turbo` uses Graphviz to generate an image of your\ngraph, but Graphviz isn't installed on this machine.\n\nYou can download Graphviz from https://graphviz.org/download.\n\nIn the meantime, you can use this string output with an\nonline Dot graph viewer.")) -} - -// RenderDotGraph renders a dot graph string for the current TaskGraph -func (g *GraphVisualizer) RenderDotGraph() { - g.ui.Output("") - g.ui.Output(g.generateDotString()) -} - -type nameCache map[string]string - -func (nc nameCache) getName(in string) string { - if existing, ok := nc[in]; ok { - return existing - } - newName := getRandID() - nc[in] = newName - return newName -} - -type sortableEdge dag.Edge -type sortableEdges []sortableEdge - -// methods mostly copied from marshalEdges in the dag library -func (e sortableEdges) Less(i, j int) bool { - iSrc := dag.VertexName(e[i].Source()) - jSrc := dag.VertexName(e[j].Source()) - if iSrc < jSrc { - return true - } else if iSrc > jSrc { - return false - } - return dag.VertexName(e[i].Target()) < dag.VertexName(e[j].Target()) -} -func (e sortableEdges) Len() int { return len(e) } -func (e sortableEdges) Swap(i, j int) { e[i], e[j] = e[j], e[i] } - -func (g *GraphVisualizer) generateMermaid(out io.StringWriter) error { - if _, err := out.WriteString("graph TD\n"); err != nil { - return err - } - cache := make(nameCache) - // cast edges to our custom type so we can sort them - // this allows us to generate the same graph every time - var edges sortableEdges - for _, edge := range g.TaskGraph.Edges() { - edges = append(edges, sortableEdge(edge)) - } - sort.Sort(edges) - for _, edge := range edges { - left := dag.VertexName(edge.Source()) - right := dag.VertexName(edge.Target()) - leftName := cache.getName(left) - rightName := cache.getName(right) - if _, err := out.WriteString(fmt.Sprintf("\t%v(\"%v\") --> %v(\"%v\")\n", leftName, left, rightName, right)); err != nil { - return err - } - } - return nil -} - -// GenerateGraphFile saves a visualization of the TaskGraph to a file (or renders a DotGraph as a fallback)) -func (g *GraphVisualizer) GenerateGraphFile(outputName string) error { - outputFilename := g.repoRoot.UntypedJoin(outputName) - ext := outputFilename.Ext() - // use .jpg as default extension if none is provided - if ext == "" { - ext = ".jpg" - outputFilename = g.repoRoot.UntypedJoin(outputName + ext) - } - if ext == ".mermaid" { - f, err := outputFilename.Create() - if err != nil { - return fmt.Errorf("error creating file: %w", err) - } - defer util.CloseAndIgnoreError(f) - if err := g.generateMermaid(f); err != nil { - return err - } - g.ui.Output("") - g.ui.Output(fmt.Sprintf("✔ Generated task graph in %s", ui.Bold(outputFilename.ToString()))) - return nil - } - graphString := g.generateDotString() - if ext == ".html" { - f, err := outputFilename.Create() - if err != nil { - return fmt.Errorf("error creating file: %w", err) - } - defer f.Close() //nolint errcheck - _, writeErr1 := f.WriteString(`<!DOCTYPE html> - <html> - <head> - <meta charset="utf-8"> - <title>Graph - - - - - - - `) - if writeErr3 != nil { - return fmt.Errorf("error creating file: %w", writeErr3) - } - - g.ui.Output("") - g.ui.Output(fmt.Sprintf("✔ Generated task graph in %s", ui.Bold(outputFilename.ToString()))) - if ui.IsTTY { - if err := browser.OpenBrowser(outputFilename.ToString()); err != nil { - g.ui.Warn(color.New(color.FgYellow, color.Bold, color.ReverseVideo).Sprintf("failed to open browser. Please navigate to file://%v", filepath.ToSlash(outputFilename.ToString()))) - } - } - return nil - } - hasDot := hasGraphViz() - if hasDot { - dotArgs := []string{"-T" + ext[1:], "-o", outputFilename.ToString()} - cmd := exec.Command("dot", dotArgs...) - cmd.Stdin = strings.NewReader(graphString) - if err := cmd.Run(); err != nil { - return fmt.Errorf("could not generate task graphfile %v: %w", outputFilename, err) - } - g.ui.Output("") - g.ui.Output(fmt.Sprintf("✔ Generated task graph in %s", ui.Bold(outputFilename.ToString()))) - - } else { - g.ui.Output("") - // User requested a file, but we're falling back to console here so warn about installing graphViz correctly - g.graphVizWarnUI() - g.RenderDotGraph() - } - return nil -} diff --git a/cli/internal/hashing/package_deps_hash_go.go b/cli/internal/hashing/package_deps_hash_go.go deleted file mode 100644 index f0d323aee6ba0..0000000000000 --- a/cli/internal/hashing/package_deps_hash_go.go +++ /dev/null @@ -1,562 +0,0 @@ -//go:build go || !rust -// +build go !rust - -package hashing - -import ( - "bufio" - "fmt" - "io" - "os" - "os/exec" - "path/filepath" - "strings" - "sync" - - "github.com/pkg/errors" - gitignore "github.com/sabhiram/go-gitignore" - "github.com/vercel/turbo/cli/internal/doublestar" - "github.com/vercel/turbo/cli/internal/encoding/gitoutput" - "github.com/vercel/turbo/cli/internal/fs" - "github.com/vercel/turbo/cli/internal/globby" - "github.com/vercel/turbo/cli/internal/turbopath" - "github.com/vercel/turbo/cli/internal/util" -) - -// GetPackageFileHashes Builds an object containing git hashes for the files under the specified `packagePath` folder. -func GetPackageFileHashes(rootPath turbopath.AbsoluteSystemPath, packagePath turbopath.AnchoredSystemPath, inputs []string) (map[turbopath.AnchoredUnixPath]string, error) { - if len(inputs) == 0 { - result, err := getPackageFileHashesFromGitIndex(rootPath, packagePath) - if err != nil { - return getPackageFileHashesFromProcessingGitIgnore(rootPath, packagePath, nil) - } - return result, nil - } - - result, err := getPackageFileHashesFromInputs(rootPath, packagePath, inputs) - if err != nil { - return getPackageFileHashesFromProcessingGitIgnore(rootPath, packagePath, inputs) - } - return result, nil -} - -// GetHashesForFiles hashes the list of given files, then returns a map of normalized path to hash. -// This map is suitable for cross-platform caching. -func GetHashesForFiles(rootPath turbopath.AbsoluteSystemPath, files []turbopath.AnchoredSystemPath) (map[turbopath.AnchoredUnixPath]string, error) { - // Try to use `git` first. - gitHashedFiles, err := gitHashObject(rootPath, files) - if err == nil { - return gitHashedFiles, nil - } - - // Fall back to manual hashing. - return manuallyHashFiles(rootPath, files, false) -} - -// GetHashesForExistingFiles hashes the list of given files, -// does not error if a file does not exist, then -// returns a map of normalized path to hash. -// This map is suitable for cross-platform caching. -func GetHashesForExistingFiles(rootPath turbopath.AbsoluteSystemPath, files []turbopath.AnchoredSystemPath) (map[turbopath.AnchoredUnixPath]string, error) { - return manuallyHashFiles(rootPath, files, true) -} - -// gitHashObject returns a map of paths to their SHA hashes calculated by passing the paths to `git hash-object`. -// `git hash-object` expects paths to use Unix separators, even on Windows. -// -// Note: paths of files to hash passed to `git hash-object` are processed as relative to the given anchor. -// For that reason we convert all input paths and make them relative to the anchor prior to passing them -// to `git hash-object`. -func gitHashObject(anchor turbopath.AbsoluteSystemPath, filesToHash []turbopath.AnchoredSystemPath) (map[turbopath.AnchoredUnixPath]string, error) { - fileCount := len(filesToHash) - output := make(map[turbopath.AnchoredUnixPath]string, fileCount) - - if fileCount > 0 { - cmd := exec.Command( - "git", // Using `git` from $PATH, - "hash-object", // hash a file, - "--stdin-paths", // using a list of newline-separated paths from stdin. - ) - cmd.Dir = anchor.ToString() // Start at this directory. - - // The functionality for gitHashObject is different enough that it isn't reasonable to - // generalize the behavior for `runGitCmd`. In fact, it doesn't even use the `gitoutput` - // encoding library, instead relying on its own separate `bufio.Scanner`. - - // We're going to send the list of files in via `stdin`, so we grab that pipe. - // This prevents a huge number of encoding issues and shell compatibility issues - // before they even start. - stdinPipe, stdinPipeError := cmd.StdinPipe() - if stdinPipeError != nil { - return nil, stdinPipeError - } - - // Kick the processing off in a goroutine so while that is doing its thing we can go ahead - // and wire up the consumer of `stdout`. - go func() { - defer util.CloseAndIgnoreError(stdinPipe) - - // `git hash-object` understands all relative paths to be relative to the repository. - // This function's result needs to be relative to `rootPath`. - // We convert all files to absolute paths and assume that they will be inside of the repository. - for _, file := range filesToHash { - converted := file.RestoreAnchor(anchor) - - // `git hash-object` expects paths to use Unix separators, even on Windows. - // `git hash-object` expects paths to be one per line so we must escape newlines. - // In order to understand the escapes, the path must be quoted. - // In order to quote the path, the quotes in the path must be escaped. - // Other than that, we just write everything with full Unicode. - stringPath := converted.ToString() - toSlashed := filepath.ToSlash(stringPath) - escapedNewLines := strings.ReplaceAll(toSlashed, "\n", "\\n") - escapedQuotes := strings.ReplaceAll(escapedNewLines, "\"", "\\\"") - prepared := fmt.Sprintf("\"%s\"\n", escapedQuotes) - _, err := io.WriteString(stdinPipe, prepared) - if err != nil { - return - } - } - }() - - // This gives us an io.ReadCloser so that we never have to read the entire input in - // at a single time. It is doing stream processing instead of string processing. - stdoutPipe, stdoutPipeError := cmd.StdoutPipe() - if stdoutPipeError != nil { - return nil, fmt.Errorf("failed to read `git hash-object`: %w", stdoutPipeError) - } - - startError := cmd.Start() - if startError != nil { - return nil, fmt.Errorf("failed to read `git hash-object`: %w", startError) - } - - // The output of `git hash-object` is a 40-character SHA per input, then a newline. - // We need to track the SHA that corresponds to the input file path. - index := 0 - hashes := make([]string, len(filesToHash)) - scanner := bufio.NewScanner(stdoutPipe) - - // Read the output line-by-line (which is our separator) until exhausted. - for scanner.Scan() { - bytes := scanner.Bytes() - - scanError := scanner.Err() - if scanError != nil { - return nil, fmt.Errorf("failed to read `git hash-object`: %w", scanError) - } - - hashError := gitoutput.CheckObjectName(bytes) - if hashError != nil { - return nil, fmt.Errorf("failed to read `git hash-object`: %s", "invalid hash received") - } - - // Worked, save it off. - hashes[index] = string(bytes) - index++ - } - - // Waits until stdout is closed before proceeding. - waitErr := cmd.Wait() - if waitErr != nil { - return nil, fmt.Errorf("failed to read `git hash-object`: %w", waitErr) - } - - // Make sure we end up with a matching number of files and hashes. - hashCount := len(hashes) - if fileCount != hashCount { - return nil, fmt.Errorf("failed to read `git hash-object`: %d files %d hashes", fileCount, hashCount) - } - - // The API of this method specifies that we return a `map[turbopath.AnchoredUnixPath]string`. - for i, hash := range hashes { - filePath := filesToHash[i] - output[filePath.ToUnixPath()] = hash - } - } - - return output, nil -} - -func manuallyHashFiles(rootPath turbopath.AbsoluteSystemPath, files []turbopath.AnchoredSystemPath, allowMissing bool) (map[turbopath.AnchoredUnixPath]string, error) { - hashObject := make(map[turbopath.AnchoredUnixPath]string, len(files)) - for _, file := range files { - hash, err := fs.GitLikeHashFile(file.RestoreAnchor(rootPath)) - if allowMissing && errors.Is(err, os.ErrNotExist) { - continue - } - if err != nil { - return nil, fmt.Errorf("could not hash file %v. \n%w", file.ToString(), err) - } - - hashObject[file.ToUnixPath()] = hash - } - return hashObject, nil -} - -func getPackageFileHashesFromGitIndex(rootPath turbopath.AbsoluteSystemPath, packagePath turbopath.AnchoredSystemPath) (map[turbopath.AnchoredUnixPath]string, error) { - var result map[turbopath.AnchoredUnixPath]string - absolutePackagePath := packagePath.RestoreAnchor(rootPath) - - // Get the state of the git index. - gitLsTreeOutput, err := gitLsTree(absolutePackagePath) - if err != nil { - return nil, fmt.Errorf("could not get git hashes for files in package %s: %w", packagePath, err) - } - result = gitLsTreeOutput - - // Update the with the state of the working directory. - // The paths returned from this call are anchored at the package directory - gitStatusOutput, err := gitStatus(absolutePackagePath) - if err != nil { - return nil, fmt.Errorf("Could not get git hashes from git status: %v", err) - } - - // Review status output to identify the delta. - var filesToHash []turbopath.AnchoredSystemPath - for filePath, status := range gitStatusOutput { - if status.isDelete() { - delete(result, filePath) - } else { - filesToHash = append(filesToHash, filePath.ToSystemPath()) - } - } - - // Get the hashes for any modified files in the working directory. - hashes, err := GetHashesForFiles(absolutePackagePath, filesToHash) - if err != nil { - return nil, err - } - - // Zip up file paths and hashes together - for filePath, hash := range hashes { - result[filePath] = hash - } - - return result, nil -} - -// gitStatus returns a map of paths to their `git` status code. This can be used to identify what should -// be done with files that do not currently match what is in the index. -// -// Note: `git status -z`'s relative path results are relative to the repository's location. -// We need to calculate where the repository's location is in order to determine what the full path is -// before we can return those paths relative to the calling directory, normalizing to the behavior of -// `ls-files` and `ls-tree`. -func gitStatus(rootPath turbopath.AbsoluteSystemPath) (map[turbopath.AnchoredUnixPath]statusCode, error) { - cmd := exec.Command( - "git", // Using `git` from $PATH, - "status", // tell me about the status of the working tree, - "--untracked-files", // including information about untracked files, - "--no-renames", // do not detect renames, - "-z", // with each file path relative to the repository root and \000-terminated, - "--", // and any additional argument you see is a path, promise. - ) - cmd.Args = append(cmd.Args, ".") // Operate in the current directory instead of the root of the working tree. - cmd.Dir = rootPath.ToString() // Include files only from this directory. - - entries, err := runGitCommand(cmd, "status", gitoutput.NewStatusReader) - if err != nil { - return nil, err - } - - output := make(map[turbopath.AnchoredUnixPath]statusCode, len(entries)) - convertedRootPath := turbopath.AbsoluteSystemPathFromUpstream(rootPath.ToString()) - - traversePath, err := memoizedGetTraversePath(convertedRootPath) - if err != nil { - return nil, err - } - - for _, entry := range entries { - statusEntry := gitoutput.StatusEntry(entry) - // Anchored at repository. - pathFromStatus := turbopath.AnchoredUnixPathFromUpstream(statusEntry.GetField(gitoutput.Path)) - var outputPath turbopath.AnchoredUnixPath - - if len(traversePath) > 0 { - repositoryPath := convertedRootPath.Join(traversePath.ToSystemPath()) - fileFullPath := pathFromStatus.ToSystemPath().RestoreAnchor(repositoryPath) - - relativePath, err := fileFullPath.RelativeTo(convertedRootPath) - if err != nil { - return nil, err - } - - outputPath = relativePath.ToUnixPath() - } else { - outputPath = pathFromStatus - } - - output[outputPath] = statusCode{x: statusEntry.GetField(gitoutput.StatusX), y: statusEntry.GetField(gitoutput.StatusY)} - } - - return output, nil -} - -func safeCompileIgnoreFile(filepath turbopath.AbsoluteSystemPath) (*gitignore.GitIgnore, error) { - if filepath.FileExists() { - return gitignore.CompileIgnoreFile(filepath.ToString()) - } - // no op - return gitignore.CompileIgnoreLines([]string{}...), nil -} - -func getPackageFileHashesFromProcessingGitIgnore(rootPath turbopath.AbsoluteSystemPath, packagePath turbopath.AnchoredSystemPath, inputs []string) (map[turbopath.AnchoredUnixPath]string, error) { - result := make(map[turbopath.AnchoredUnixPath]string) - absolutePackagePath := packagePath.RestoreAnchor(rootPath) - - // Instead of implementing all gitignore properly, we hack it. We only respect .gitignore in the root and in - // the directory of a package. - ignore, err := safeCompileIgnoreFile(rootPath.UntypedJoin(".gitignore")) - if err != nil { - return nil, err - } - - ignorePkg, err := safeCompileIgnoreFile(absolutePackagePath.UntypedJoin(".gitignore")) - if err != nil { - return nil, err - } - - includePattern := "" - excludePattern := "" - if len(inputs) > 0 { - var includePatterns []string - var excludePatterns []string - for _, pattern := range inputs { - if len(pattern) > 0 && pattern[0] == '!' { - excludePatterns = append(excludePatterns, absolutePackagePath.UntypedJoin(pattern[1:]).ToString()) - } else { - includePatterns = append(includePatterns, absolutePackagePath.UntypedJoin(pattern).ToString()) - } - } - if len(includePatterns) > 0 { - includePattern = "{" + strings.Join(includePatterns, ",") + "}" - } - if len(excludePatterns) > 0 { - excludePattern = "{" + strings.Join(excludePatterns, ",") + "}" - } - } - - err = fs.Walk(absolutePackagePath.ToStringDuringMigration(), func(name string, isDir bool) error { - convertedName := turbopath.AbsoluteSystemPathFromUpstream(name) - rootMatch := ignore.MatchesPath(convertedName.ToString()) - otherMatch := ignorePkg.MatchesPath(convertedName.ToString()) - if !rootMatch && !otherMatch { - if !isDir { - if includePattern != "" { - val, err := doublestar.PathMatch(includePattern, convertedName.ToString()) - if err != nil { - return err - } - if !val { - return nil - } - } - if excludePattern != "" { - val, err := doublestar.PathMatch(excludePattern, convertedName.ToString()) - if err != nil { - return err - } - if val { - return nil - } - } - hash, err := fs.GitLikeHashFile(convertedName) - if err != nil { - return fmt.Errorf("could not hash file %v. \n%w", convertedName.ToString(), err) - } - - relativePath, err := convertedName.RelativeTo(absolutePackagePath) - if err != nil { - return fmt.Errorf("File path cannot be made relative: %w", err) - } - result[relativePath.ToUnixPath()] = hash - } - } - return nil - }) - if err != nil { - return nil, err - } - return result, nil -} - -// gitLsTree returns a map of paths to their SHA hashes starting at a particular directory -// that are present in the `git` index at a particular revision. -func gitLsTree(rootPath turbopath.AbsoluteSystemPath) (map[turbopath.AnchoredUnixPath]string, error) { - cmd := exec.Command( - "git", // Using `git` from $PATH, - "ls-tree", // list the contents of the git index, - "-r", // recursively, - "-z", // with each file path relative to the invocation directory and \000-terminated, - "HEAD", // at this specified version. - ) - cmd.Dir = rootPath.ToString() // Include files only from this directory. - - entries, err := runGitCommand(cmd, "ls-tree", gitoutput.NewLSTreeReader) - if err != nil { - return nil, err - } - - output := make(map[turbopath.AnchoredUnixPath]string, len(entries)) - - for _, entry := range entries { - lsTreeEntry := gitoutput.LsTreeEntry(entry) - output[turbopath.AnchoredUnixPathFromUpstream(lsTreeEntry.GetField(gitoutput.Path))] = lsTreeEntry[2] - } - - return output, nil -} - -// statusCode represents the two-letter status code from `git status` with two "named" fields, x & y. -// They have different meanings based upon the actual state of the working tree. Using x & y maps -// to upstream behavior. -type statusCode struct { - x string - y string -} - -func (s statusCode) isDelete() bool { - return s.x == "D" || s.y == "D" -} - -func getPackageFileHashesFromInputs(rootPath turbopath.AbsoluteSystemPath, packagePath turbopath.AnchoredSystemPath, inputs []string) (map[turbopath.AnchoredUnixPath]string, error) { - absolutePackagePath := packagePath.RestoreAnchor(rootPath) - // Add all the checked in hashes. - - // make a copy of the inputPatterns array, because we may be appending to it later. - calculatedInputs := make([]string, len(inputs)) - copy(calculatedInputs, inputs) - - // Add in package.json and turbo.json to input patterns. Both file paths are relative to pkgPath - // - // - package.json is an input because if the `scripts` in - // the package.json change (i.e. the tasks that turbo executes), we want - // a cache miss, since any existing cache could be invalid. - // - turbo.json because it's the definition of the tasks themselves. The root turbo.json - // is similarly included in the global hash. This file may not exist in the workspace, but - // that is ok, because it will get ignored downstream. - calculatedInputs = append(calculatedInputs, "package.json") - calculatedInputs = append(calculatedInputs, "turbo.json") - - // The input patterns are relative to the package. - // However, we need to change the globbing to be relative to the repo root. - // Prepend the package path to each of the input patterns. - prefixedInputPatterns := []string{} - prefixedExcludePatterns := []string{} - for _, pattern := range calculatedInputs { - if len(pattern) > 0 && pattern[0] == '!' { - rerooted, err := rootPath.PathTo(absolutePackagePath.UntypedJoin(pattern[1:])) - if err != nil { - return nil, err - } - prefixedExcludePatterns = append(prefixedExcludePatterns, rerooted) - } else { - rerooted, err := rootPath.PathTo(absolutePackagePath.UntypedJoin(pattern)) - if err != nil { - return nil, err - } - prefixedInputPatterns = append(prefixedInputPatterns, rerooted) - } - } - absoluteFilesToHash, err := globby.GlobFiles(rootPath.ToStringDuringMigration(), prefixedInputPatterns, prefixedExcludePatterns) - - if err != nil { - return nil, errors.Wrapf(err, "failed to resolve input globs %v", calculatedInputs) - } - - filesToHash := make([]turbopath.AnchoredSystemPath, len(absoluteFilesToHash)) - for i, rawPath := range absoluteFilesToHash { - relativePathString, err := absolutePackagePath.RelativePathString(rawPath) - - if err != nil { - return nil, errors.Wrapf(err, "not relative to package: %v", rawPath) - } - - filesToHash[i] = turbopath.AnchoredSystemPathFromUpstream(relativePathString) - } - - // Note that in this scenario, we don't need to check git status. - // We're hashing the current state, not state at a commit. - result, err := GetHashesForFiles(absolutePackagePath, filesToHash) - if err != nil { - return nil, errors.Wrap(err, "failed hashing resolved inputs globs") - } - - return result, nil -} - -// runGitCommand provides boilerplate command handling for `ls-tree`, `ls-files`, and `status` -// Rather than doing string processing, it does stream processing of `stdout`. -func runGitCommand(cmd *exec.Cmd, commandName string, handler func(io.Reader) *gitoutput.Reader) ([][]string, error) { - stdoutPipe, pipeError := cmd.StdoutPipe() - if pipeError != nil { - return nil, fmt.Errorf("failed to read `git %s`: %w", commandName, pipeError) - } - - startError := cmd.Start() - if startError != nil { - return nil, fmt.Errorf("failed to read `git %s`: %w", commandName, startError) - } - - reader := handler(stdoutPipe) - entries, readErr := reader.ReadAll() - if readErr != nil { - return nil, fmt.Errorf("failed to read `git %s`: %w", commandName, readErr) - } - - waitErr := cmd.Wait() - if waitErr != nil { - return nil, fmt.Errorf("failed to read `git %s`: %w", commandName, waitErr) - } - - return entries, nil -} - -// getTraversePath gets the distance of the current working directory to the repository root. -// This is used to convert repo-relative paths to cwd-relative paths. -// -// `git rev-parse --show-cdup` always returns Unix paths, even on Windows. -func getTraversePath(rootPath turbopath.AbsoluteSystemPath) (turbopath.RelativeUnixPath, error) { - cmd := exec.Command("git", "rev-parse", "--show-cdup") - cmd.Dir = rootPath.ToString() - - traversePath, err := cmd.Output() - if err != nil { - return "", err - } - - trimmedTraversePath := strings.TrimSuffix(string(traversePath), "\n") - - return turbopath.RelativeUnixPathFromUpstream(trimmedTraversePath), nil -} - -// Don't shell out if we already know where you are in the repository. -// `memoize` is a good candidate for generics. -func memoizeGetTraversePath() func(turbopath.AbsoluteSystemPath) (turbopath.RelativeUnixPath, error) { - cacheMutex := &sync.RWMutex{} - cachedResult := map[turbopath.AbsoluteSystemPath]turbopath.RelativeUnixPath{} - cachedError := map[turbopath.AbsoluteSystemPath]error{} - - return func(rootPath turbopath.AbsoluteSystemPath) (turbopath.RelativeUnixPath, error) { - cacheMutex.RLock() - result, resultExists := cachedResult[rootPath] - err, errExists := cachedError[rootPath] - cacheMutex.RUnlock() - - if resultExists && errExists { - return result, err - } - - invokedResult, invokedErr := getTraversePath(rootPath) - cacheMutex.Lock() - cachedResult[rootPath] = invokedResult - cachedError[rootPath] = invokedErr - cacheMutex.Unlock() - - return invokedResult, invokedErr - } -} - -var memoizedGetTraversePath = memoizeGetTraversePath() diff --git a/cli/internal/hashing/package_deps_hash_go_test.go b/cli/internal/hashing/package_deps_hash_go_test.go deleted file mode 100644 index 1463bb167e280..0000000000000 --- a/cli/internal/hashing/package_deps_hash_go_test.go +++ /dev/null @@ -1,57 +0,0 @@ -//go:build go || !rust -// +build go !rust - -package hashing - -import ( - "reflect" - "testing" - - "github.com/vercel/turbo/cli/internal/turbopath" - "gotest.tools/v3/assert" -) - -func Test_memoizedGetTraversePath(t *testing.T) { - fixturePath := getFixture(1) - - gotOne, _ := memoizedGetTraversePath(fixturePath) - gotTwo, _ := memoizedGetTraversePath(fixturePath) - - assert.Check(t, gotOne == gotTwo, "The strings are identical.") -} - -func Test_getTraversePath(t *testing.T) { - fixturePath := getFixture(1) - - tests := []struct { - name string - rootPath turbopath.AbsoluteSystemPath - want turbopath.RelativeUnixPath - wantErr bool - }{ - { - name: "From fixture location", - rootPath: fixturePath, - want: turbopath.RelativeUnixPath("../../../"), - wantErr: false, - }, - { - name: "Traverse out of git repo", - rootPath: fixturePath.UntypedJoin("..", "..", "..", ".."), - want: "", - wantErr: true, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - got, err := getTraversePath(tt.rootPath) - if (err != nil) != tt.wantErr { - t.Errorf("getTraversePath() error = %v, wantErr %v", err, tt.wantErr) - return - } - if !reflect.DeepEqual(got, tt.want) { - t.Errorf("getTraversePath() = %v, want %v", got, tt.want) - } - }) - } -} diff --git a/cli/internal/hashing/package_deps_hash_rust.go b/cli/internal/hashing/package_deps_hash_rust.go deleted file mode 100644 index 202e073bf062d..0000000000000 --- a/cli/internal/hashing/package_deps_hash_rust.go +++ /dev/null @@ -1,56 +0,0 @@ -//go:build rust -// +build rust - -package hashing - -import ( - "github.com/vercel/turbo/cli/internal/ffi" - "github.com/vercel/turbo/cli/internal/turbopath" -) - -func GetPackageFileHashes(rootPath turbopath.AbsoluteSystemPath, packagePath turbopath.AnchoredSystemPath, inputs []string) (map[turbopath.AnchoredUnixPath]string, error) { - rawHashes, err := ffi.GetPackageFileHashes(rootPath.ToString(), packagePath.ToString(), inputs) - if err != nil { - return nil, err - } - - hashes := make(map[turbopath.AnchoredUnixPath]string, len(rawHashes)) - for rawPath, hash := range rawHashes { - hashes[turbopath.AnchoredUnixPathFromUpstream(rawPath)] = hash - } - return hashes, nil -} - -func GetHashesForFiles(rootPath turbopath.AbsoluteSystemPath, files []turbopath.AnchoredSystemPath) (map[turbopath.AnchoredUnixPath]string, error) { - rawFiles := make([]string, len(files)) - for i, file := range files { - rawFiles[i] = file.ToString() - } - rawHashes, err := ffi.GetHashesForFiles(rootPath.ToString(), rawFiles, false) - if err != nil { - return nil, err - } - - hashes := make(map[turbopath.AnchoredUnixPath]string, len(rawHashes)) - for rawPath, hash := range rawHashes { - hashes[turbopath.AnchoredUnixPathFromUpstream(rawPath)] = hash - } - return hashes, nil -} - -func GetHashesForExistingFiles(rootPath turbopath.AbsoluteSystemPath, files []turbopath.AnchoredSystemPath) (map[turbopath.AnchoredUnixPath]string, error) { - rawFiles := make([]string, len(files)) - for i, file := range files { - rawFiles[i] = file.ToString() - } - rawHashes, err := ffi.GetHashesForFiles(rootPath.ToString(), rawFiles, true) - if err != nil { - return nil, err - } - - hashes := make(map[turbopath.AnchoredUnixPath]string, len(rawHashes)) - for rawPath, hash := range rawHashes { - hashes[turbopath.AnchoredUnixPathFromUpstream(rawPath)] = hash - } - return hashes, nil -} diff --git a/cli/internal/hashing/package_deps_hash_test.go b/cli/internal/hashing/package_deps_hash_test.go deleted file mode 100644 index 6298ad99a4632..0000000000000 --- a/cli/internal/hashing/package_deps_hash_test.go +++ /dev/null @@ -1,623 +0,0 @@ -package hashing - -import ( - "errors" - "fmt" - "os" - "os/exec" - "path/filepath" - "reflect" - "runtime" - "strings" - "testing" - - "github.com/vercel/turbo/cli/internal/fs" - "github.com/vercel/turbo/cli/internal/turbopath" - "gotest.tools/v3/assert" -) - -func getFixture(id int) turbopath.AbsoluteSystemPath { - cwd, _ := os.Getwd() - root := turbopath.AbsoluteSystemPath(filepath.VolumeName(cwd) + string(os.PathSeparator)) - checking := turbopath.AbsoluteSystemPath(cwd) - - for checking != root { - fixtureDirectory := checking.Join("fixtures") - _, err := os.Stat(fixtureDirectory.ToString()) - if !errors.Is(err, os.ErrNotExist) { - // Found the fixture directory! - files, _ := os.ReadDir(fixtureDirectory.ToString()) - - // Grab the specified fixture. - for _, file := range files { - fileName := turbopath.RelativeSystemPath(file.Name()) - if strings.Index(fileName.ToString(), fmt.Sprintf("%02d-", id)) == 0 { - return turbopath.AbsoluteSystemPath(fixtureDirectory.Join(fileName)) - } - } - } - checking = checking.Join("..") - } - - panic("fixtures not found!") -} - -func TestSpecialCharacters(t *testing.T) { - if runtime.GOOS == "windows" { - return - } - - fixturePath := getFixture(1) - newlinePath := turbopath.AnchoredUnixPath("new\nline").ToSystemPath() - quotePath := turbopath.AnchoredUnixPath("\"quote\"").ToSystemPath() - newline := newlinePath.RestoreAnchor(fixturePath) - quote := quotePath.RestoreAnchor(fixturePath) - - // Setup - one := os.WriteFile(newline.ToString(), []byte{}, 0644) - two := os.WriteFile(quote.ToString(), []byte{}, 0644) - - // Cleanup - defer func() { - one := os.Remove(newline.ToString()) - two := os.Remove(quote.ToString()) - - if one != nil || two != nil { - return - } - }() - - // Setup error check - if one != nil || two != nil { - return - } - - tests := []struct { - name string - rootPath turbopath.AbsoluteSystemPath - filesToHash []turbopath.AnchoredSystemPath - want map[turbopath.AnchoredUnixPath]string - wantErr bool - }{ - { - name: "Quotes", - rootPath: fixturePath, - filesToHash: []turbopath.AnchoredSystemPath{ - quotePath, - }, - want: map[turbopath.AnchoredUnixPath]string{ - quotePath.ToUnixPath(): "e69de29bb2d1d6434b8b29ae775ad8c2e48c5391", - }, - }, - { - name: "Newlines", - rootPath: fixturePath, - filesToHash: []turbopath.AnchoredSystemPath{ - newlinePath, - }, - want: map[turbopath.AnchoredUnixPath]string{ - newlinePath.ToUnixPath(): "e69de29bb2d1d6434b8b29ae775ad8c2e48c5391", - }, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - got, err := GetHashesForFiles(tt.rootPath, tt.filesToHash) - if (err != nil) != tt.wantErr { - t.Errorf("gitHashObject() error = %v, wantErr %v", err, tt.wantErr) - return - } - if !reflect.DeepEqual(got, tt.want) { - t.Errorf("gitHashObject() = %v, want %v", got, tt.want) - } - }) - } -} - -// getTraversePath gets the distance of the current working directory to the repository root. -// This is used to convert repo-relative paths to cwd-relative paths. -// -// `git rev-parse --show-cdup` always returns Unix paths, even on Windows. -func getTraversePathInTests(rootPath turbopath.AbsoluteSystemPath) (turbopath.RelativeUnixPath, error) { - cmd := exec.Command("git", "rev-parse", "--show-cdup") - cmd.Dir = rootPath.ToString() - - traversePath, err := cmd.Output() - if err != nil { - return "", err - } - - trimmedTraversePath := strings.TrimSuffix(string(traversePath), "\n") - - return turbopath.RelativeUnixPathFromUpstream(trimmedTraversePath), nil -} - -func Test_gitHashObject(t *testing.T) { - fixturePath := getFixture(1) - traversePath, err := getTraversePathInTests(fixturePath) - if err != nil { - return - } - - tests := []struct { - name string - rootPath turbopath.AbsoluteSystemPath - filesToHash []turbopath.AnchoredSystemPath - want map[turbopath.AnchoredUnixPath]string - wantErr bool - }{ - { - name: "No paths", - rootPath: fixturePath, - filesToHash: []turbopath.AnchoredSystemPath{}, - want: map[turbopath.AnchoredUnixPath]string{}, - }, - { - name: "Absolute paths come back relative to rootPath", - rootPath: fixturePath.Join("child"), - filesToHash: []turbopath.AnchoredSystemPath{ - turbopath.AnchoredUnixPath("../root.json").ToSystemPath(), - turbopath.AnchoredUnixPath("child.json").ToSystemPath(), - turbopath.AnchoredUnixPath("grandchild/grandchild.json").ToSystemPath(), - }, - want: map[turbopath.AnchoredUnixPath]string{ - "../root.json": "e69de29bb2d1d6434b8b29ae775ad8c2e48c5391", - "child.json": "e69de29bb2d1d6434b8b29ae775ad8c2e48c5391", - "grandchild/grandchild.json": "e69de29bb2d1d6434b8b29ae775ad8c2e48c5391", - }, - }, - { - name: "Traverse outside of the repo", - rootPath: fixturePath.Join(traversePath.ToSystemPath(), ".."), - filesToHash: []turbopath.AnchoredSystemPath{ - turbopath.AnchoredUnixPath("null.json").ToSystemPath(), - }, - want: nil, - wantErr: true, - }, - { - name: "Nonexistent file", - rootPath: fixturePath, - filesToHash: []turbopath.AnchoredSystemPath{ - turbopath.AnchoredUnixPath("nonexistent.json").ToSystemPath(), - }, - want: nil, - wantErr: true, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - got, err := GetHashesForFiles(tt.rootPath, tt.filesToHash) - if (err != nil) != tt.wantErr { - t.Errorf("gitHashObject() error = %v, wantErr %v", err, tt.wantErr) - return - } - if !reflect.DeepEqual(got, tt.want) { - t.Errorf("gitHashObject() = %v, want %v", got, tt.want) - } - }) - } -} - -func requireGitCmd(t *testing.T, repoRoot turbopath.AbsoluteSystemPath, args ...string) { - t.Helper() - cmd := exec.Command("git", args...) - cmd.Dir = repoRoot.ToString() - out, err := cmd.CombinedOutput() - if err != nil { - t.Fatalf("git commit failed: %v %v", err, string(out)) - } -} - -func TestGetPackageDeps(t *testing.T) { - // Directory structure: - // / - // new-root-file <- new file not added to git - // my-pkg/ - // committed-file - // deleted-file - // uncommitted-file <- new file not added to git - // dir/ - // nested-file - - repoRoot := fs.AbsoluteSystemPathFromUpstream(t.TempDir()) - myPkgDir := repoRoot.UntypedJoin("my-pkg") - - // create the dir first - err := myPkgDir.MkdirAll(0775) - assert.NilError(t, err, "CreateDir") - - // create file 1 - committedFilePath := myPkgDir.UntypedJoin("committed-file") - err = committedFilePath.WriteFile([]byte("committed bytes"), 0644) - assert.NilError(t, err, "WriteFile") - - // create file 2 - deletedFilePath := myPkgDir.UntypedJoin("deleted-file") - err = deletedFilePath.WriteFile([]byte("delete-me"), 0644) - assert.NilError(t, err, "WriteFile") - - // create file 3 - nestedPath := myPkgDir.UntypedJoin("dir", "nested-file") - assert.NilError(t, nestedPath.EnsureDir(), "EnsureDir") - assert.NilError(t, nestedPath.WriteFile([]byte("nested"), 0644), "WriteFile") - - // create a package.json - packageJSONPath := myPkgDir.UntypedJoin("package.json") - err = packageJSONPath.WriteFile([]byte("{}"), 0644) - assert.NilError(t, err, "WriteFile") - - // set up git repo and commit all - requireGitCmd(t, repoRoot, "init", ".") - requireGitCmd(t, repoRoot, "config", "--local", "user.name", "test") - requireGitCmd(t, repoRoot, "config", "--local", "user.email", "test@example.com") - requireGitCmd(t, repoRoot, "add", ".") - requireGitCmd(t, repoRoot, "commit", "-m", "foo") - - // remove a file - err = deletedFilePath.Remove() - assert.NilError(t, err, "Remove") - - // create another untracked file in git - uncommittedFilePath := myPkgDir.UntypedJoin("uncommitted-file") - err = uncommittedFilePath.WriteFile([]byte("uncommitted bytes"), 0644) - assert.NilError(t, err, "WriteFile") - - // create an untracked file in git up a level - rootFilePath := repoRoot.UntypedJoin("new-root-file") - err = rootFilePath.WriteFile([]byte("new-root bytes"), 0644) - assert.NilError(t, err, "WriteFile") - - // PackageDepsOptions are parameters for getting git hashes for a filesystem - type PackageDepsOptions struct { - // PackagePath is the folder path to derive the package dependencies from. This is typically the folder - // containing package.json. If omitted, the default value is the current working directory. - PackagePath turbopath.AnchoredSystemPath - - InputPatterns []string - } - - tests := []struct { - opts *PackageDepsOptions - expected map[turbopath.AnchoredUnixPath]string - }{ - // base case. when inputs aren't specified, all files hashes are computed - { - opts: &PackageDepsOptions{ - PackagePath: "my-pkg", - }, - expected: map[turbopath.AnchoredUnixPath]string{ - "committed-file": "3a29e62ea9ba15c4a4009d1f605d391cdd262033", - "uncommitted-file": "4e56ad89387e6379e4e91ddfe9872cf6a72c9976", - "package.json": "9e26dfeeb6e641a33dae4961196235bdb965b21b", - "dir/nested-file": "bfe53d766e64d78f80050b73cd1c88095bc70abb", - }, - }, - // with inputs, only the specified inputs are hashed - { - opts: &PackageDepsOptions{ - PackagePath: "my-pkg", - InputPatterns: []string{"uncommitted-file"}, - }, - expected: map[turbopath.AnchoredUnixPath]string{ - "package.json": "9e26dfeeb6e641a33dae4961196235bdb965b21b", - "uncommitted-file": "4e56ad89387e6379e4e91ddfe9872cf6a72c9976", - }, - }, - // inputs with glob pattern also works - { - opts: &PackageDepsOptions{ - PackagePath: "my-pkg", - InputPatterns: []string{"**/*-file"}, - }, - expected: map[turbopath.AnchoredUnixPath]string{ - "committed-file": "3a29e62ea9ba15c4a4009d1f605d391cdd262033", - "uncommitted-file": "4e56ad89387e6379e4e91ddfe9872cf6a72c9976", - "package.json": "9e26dfeeb6e641a33dae4961196235bdb965b21b", - "dir/nested-file": "bfe53d766e64d78f80050b73cd1c88095bc70abb", - }, - }, - // inputs with traversal work - { - opts: &PackageDepsOptions{ - PackagePath: "my-pkg", - InputPatterns: []string{"../**/*-file"}, - }, - expected: map[turbopath.AnchoredUnixPath]string{ - "../new-root-file": "8906ddcdd634706188bd8ef1c98ac07b9be3425e", - "committed-file": "3a29e62ea9ba15c4a4009d1f605d391cdd262033", - "uncommitted-file": "4e56ad89387e6379e4e91ddfe9872cf6a72c9976", - "package.json": "9e26dfeeb6e641a33dae4961196235bdb965b21b", - "dir/nested-file": "bfe53d766e64d78f80050b73cd1c88095bc70abb", - }, - }, - // inputs with another glob pattern works - { - opts: &PackageDepsOptions{ - PackagePath: "my-pkg", - InputPatterns: []string{"**/{uncommitted,committed}-file"}, - }, - expected: map[turbopath.AnchoredUnixPath]string{ - "committed-file": "3a29e62ea9ba15c4a4009d1f605d391cdd262033", - "package.json": "9e26dfeeb6e641a33dae4961196235bdb965b21b", - "uncommitted-file": "4e56ad89387e6379e4e91ddfe9872cf6a72c9976", - }, - }, - // inputs with another glob pattern + traversal work - { - opts: &PackageDepsOptions{ - PackagePath: "my-pkg", - InputPatterns: []string{"../**/{new-root,uncommitted,committed}-file"}, - }, - expected: map[turbopath.AnchoredUnixPath]string{ - "../new-root-file": "8906ddcdd634706188bd8ef1c98ac07b9be3425e", - "committed-file": "3a29e62ea9ba15c4a4009d1f605d391cdd262033", - "package.json": "9e26dfeeb6e641a33dae4961196235bdb965b21b", - "uncommitted-file": "4e56ad89387e6379e4e91ddfe9872cf6a72c9976", - }, - }, - } - for _, tt := range tests { - got, _ := GetPackageFileHashes(repoRoot, tt.opts.PackagePath, tt.opts.InputPatterns) - assert.DeepEqual(t, got, tt.expected) - } -} - -func Test_getPackageFileHashesFromProcessingGitIgnore(t *testing.T) { - rootIgnore := strings.Join([]string{ - "ignoreme", - "ignorethisdir/", - }, "\n") - pkgIgnore := strings.Join([]string{ - "pkgignoreme", - "pkgignorethisdir/", - }, "\n") - root := t.TempDir() - repoRoot := turbopath.AbsoluteSystemPathFromUpstream(root) - pkgName := turbopath.AnchoredUnixPath("child-dir/libA").ToSystemPath() - type fileHash struct { - contents string - hash string - } - files := map[turbopath.AnchoredUnixPath]fileHash{ - "top-level-file": {"top-level-file-contents", ""}, - "other-dir/other-dir-file": {"other-dir-file-contents", ""}, - "ignoreme": {"anything", ""}, - "child-dir/libA/some-file": {"some-file-contents", "7e59c6a6ea9098c6d3beb00e753e2c54ea502311"}, - "child-dir/libA/some-dir/other-file": {"some-file-contents", "7e59c6a6ea9098c6d3beb00e753e2c54ea502311"}, - "child-dir/libA/some-dir/another-one": {"some-file-contents", "7e59c6a6ea9098c6d3beb00e753e2c54ea502311"}, - "child-dir/libA/some-dir/excluded-file": {"some-file-contents", "7e59c6a6ea9098c6d3beb00e753e2c54ea502311"}, - "child-dir/libA/ignoreme": {"anything", ""}, - "child-dir/libA/ignorethisdir/anything": {"anything", ""}, - "child-dir/libA/pkgignoreme": {"anything", ""}, - "child-dir/libA/pkgignorethisdir/file": {"anything", ""}, - } - - rootIgnoreFile, err := repoRoot.Join(".gitignore").Create() - if err != nil { - t.Fatalf("failed to create .gitignore: %v", err) - } - _, err = rootIgnoreFile.WriteString(rootIgnore) - if err != nil { - t.Fatalf("failed to write contents to .gitignore: %v", err) - } - err = rootIgnoreFile.Close() - if err != nil { - t.Fatalf("failed to close root ignore file") - } - pkgIgnoreFilename := pkgName.RestoreAnchor(repoRoot).Join(".gitignore") - err = pkgIgnoreFilename.EnsureDir() - if err != nil { - t.Fatalf("failed to ensure directories for %v: %v", pkgIgnoreFilename, err) - } - pkgIgnoreFile, err := pkgIgnoreFilename.Create() - if err != nil { - t.Fatalf("failed to create libA/.gitignore: %v", err) - } - _, err = pkgIgnoreFile.WriteString(pkgIgnore) - if err != nil { - t.Fatalf("failed to write contents to libA/.gitignore: %v", err) - } - err = pkgIgnoreFile.Close() - if err != nil { - t.Fatalf("failed to close package ignore file") - } - for path, spec := range files { - filename := path.ToSystemPath().RestoreAnchor(repoRoot) - err = filename.EnsureDir() - if err != nil { - t.Fatalf("failed to ensure directories for %v: %v", filename, err) - } - f, err := filename.Create() - if err != nil { - t.Fatalf("failed to create file: %v: %v", filename, err) - } - _, err = f.WriteString(spec.contents) - if err != nil { - t.Fatalf("failed to write contents to %v: %v", filename, err) - } - err = f.Close() - if err != nil { - t.Fatalf("failed to close package ignore file") - } - } - // now that we've created the repo, expect our .gitignore file too - files[turbopath.AnchoredUnixPath("child-dir/libA/.gitignore")] = fileHash{contents: "", hash: "3237694bc3312ded18386964a855074af7b066af"} - - pkg := &fs.PackageJSON{ - Dir: pkgName, - } - hashes, err := GetPackageFileHashes(repoRoot, pkg.Dir, []string{}) - if err != nil { - t.Fatalf("failed to calculate manual hashes: %v", err) - } - - count := 0 - for path, spec := range files { - systemPath := path.ToSystemPath() - if systemPath.HasPrefix(pkgName) { - relPath := systemPath[len(pkgName)+1:] - got, ok := hashes[relPath.ToUnixPath()] - if !ok { - if spec.hash != "" { - t.Errorf("did not find hash for %v, but wanted one", path) - } - } else if got != spec.hash { - t.Errorf("hash of %v, got %v want %v", path, got, spec.hash) - } else { - count++ - } - } - } - if count != len(hashes) { - t.Errorf("found extra hashes in %v", hashes) - } - - // expect the ignored file for manual hashing - files[turbopath.AnchoredUnixPath("child-dir/libA/pkgignorethisdir/file")] = fileHash{contents: "anything", hash: "67aed78ea231bdee3de45b6d47d8f32a0a792f6d"} - - count = 0 - justFileHashes, err := GetPackageFileHashes(repoRoot, pkg.Dir, []string{filepath.FromSlash("**/*file"), "!" + filepath.FromSlash("some-dir/excluded-file")}) - if err != nil { - t.Fatalf("failed to calculate manual hashes: %v", err) - } - for path, spec := range files { - systemPath := path.ToSystemPath() - if systemPath.HasPrefix(pkgName) { - shouldInclude := strings.HasSuffix(systemPath.ToString(), "file") && !strings.HasSuffix(systemPath.ToString(), "excluded-file") - relPath := systemPath[len(pkgName)+1:] - got, ok := justFileHashes[relPath.ToUnixPath()] - if !ok && shouldInclude { - if spec.hash != "" { - t.Errorf("did not find hash for %v, but wanted one", path) - } - } else if shouldInclude && got != spec.hash { - t.Errorf("hash of %v, got %v want %v", path, got, spec.hash) - } else if shouldInclude { - count++ - } - } - } - if count != len(justFileHashes) { - t.Errorf("found extra hashes in %v", hashes) - } - -} - -func Test_manuallyHashFiles(t *testing.T) { - testDir := turbopath.AbsoluteSystemPath(t.TempDir()) - - testFile := testDir.UntypedJoin("existing-file.txt") - assert.NilError(t, testFile.WriteFile([]byte(""), 0644)) - - type args struct { - rootPath turbopath.AbsoluteSystemPath - files []turbopath.AnchoredSystemPath - allowMissing bool - } - tests := []struct { - name string - args args - want map[turbopath.AnchoredUnixPath]string - wantErr bool - }{ - // Tests for allowMissing = true - { - name: "allowMissing, all missing", - args: args{ - rootPath: testDir, - files: []turbopath.AnchoredSystemPath{"non-existent-file.txt"}, - allowMissing: true, - }, - want: map[turbopath.AnchoredUnixPath]string{}, - wantErr: false, - }, - { - name: "allowMissing, some missing, some not", - args: args{ - rootPath: testDir, - files: []turbopath.AnchoredSystemPath{ - "existing-file.txt", - "non-existent-file.txt", - }, - allowMissing: true, - }, - want: map[turbopath.AnchoredUnixPath]string{ - "existing-file.txt": "e69de29bb2d1d6434b8b29ae775ad8c2e48c5391", - }, - wantErr: false, - }, - { - name: "allowMissing, none missing", - args: args{ - rootPath: testDir, - files: []turbopath.AnchoredSystemPath{ - "existing-file.txt", - }, - allowMissing: true, - }, - want: map[turbopath.AnchoredUnixPath]string{ - "existing-file.txt": "e69de29bb2d1d6434b8b29ae775ad8c2e48c5391", - }, - wantErr: false, - }, - - // Tests for allowMissing = false - { - name: "don't allowMissing, all missing", - args: args{ - rootPath: testDir, - files: []turbopath.AnchoredSystemPath{"non-existent-file.txt"}, - allowMissing: false, - }, - want: nil, - wantErr: true, - }, - { - name: "don't allowMissing, some missing, some not", - args: args{ - rootPath: testDir, - files: []turbopath.AnchoredSystemPath{ - "existing-file.txt", - "non-existent-file.txt", - }, - allowMissing: false, - }, - want: nil, - wantErr: true, - }, - { - name: "don't allowMissing, none missing", - args: args{ - rootPath: testDir, - files: []turbopath.AnchoredSystemPath{ - "existing-file.txt", - }, - allowMissing: false, - }, - want: map[turbopath.AnchoredUnixPath]string{ - "existing-file.txt": "e69de29bb2d1d6434b8b29ae775ad8c2e48c5391", - }, - wantErr: false, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - var got map[turbopath.AnchoredUnixPath]string - var err error - if tt.args.allowMissing { - got, err = GetHashesForExistingFiles(tt.args.rootPath, tt.args.files) - } else { - got, err = GetHashesForFiles(tt.args.rootPath, tt.args.files) - } - //got, err := manuallyHashFiles(tt.args.rootPath, tt.args.files, tt.args.allowMissing) - if (err != nil) != tt.wantErr { - t.Errorf("manuallyHashFiles() error = %v, wantErr %v", err, tt.wantErr) - return - } - if !reflect.DeepEqual(got, tt.want) { - t.Errorf("manuallyHashFiles() = %v, want %v", got, tt.want) - } - }) - } -} diff --git a/cli/internal/inference/inference.go b/cli/internal/inference/inference.go deleted file mode 100644 index c69e9927388cc..0000000000000 --- a/cli/internal/inference/inference.go +++ /dev/null @@ -1,167 +0,0 @@ -package inference - -import "github.com/vercel/turbo/cli/internal/fs" - -// Framework is an identifier for something that we wish to inference against. -type Framework struct { - Slug string - EnvWildcards []string - DependencyMatch matcher -} - -type matcher struct { - strategy matchStrategy - dependencies []string -} - -type matchStrategy int - -const ( - all matchStrategy = iota + 1 - some -) - -var _frameworks = []Framework{ - { - Slug: "blitzjs", - EnvWildcards: []string{"NEXT_PUBLIC_*"}, - DependencyMatch: matcher{ - strategy: all, - dependencies: []string{"blitz"}, - }, - }, - { - Slug: "nextjs", - EnvWildcards: []string{"NEXT_PUBLIC_*"}, - DependencyMatch: matcher{ - strategy: all, - dependencies: []string{"next"}, - }, - }, - { - Slug: "gatsby", - EnvWildcards: []string{"GATSBY_*"}, - DependencyMatch: matcher{ - strategy: all, - dependencies: []string{"gatsby"}, - }, - }, - { - Slug: "astro", - EnvWildcards: []string{"PUBLIC_*"}, - DependencyMatch: matcher{ - strategy: all, - dependencies: []string{"astro"}, - }, - }, - { - Slug: "solidstart", - EnvWildcards: []string{"VITE_*"}, - DependencyMatch: matcher{ - strategy: all, - dependencies: []string{"solid-js", "solid-start"}, - }, - }, - { - Slug: "vue", - EnvWildcards: []string{"VUE_APP_*"}, - DependencyMatch: matcher{ - strategy: all, - dependencies: []string{"@vue/cli-service"}, - }, - }, - { - Slug: "sveltekit", - EnvWildcards: []string{"VITE_*"}, - DependencyMatch: matcher{ - strategy: all, - dependencies: []string{"@sveltejs/kit"}, - }, - }, - { - Slug: "create-react-app", - EnvWildcards: []string{"REACT_APP_*"}, - DependencyMatch: matcher{ - strategy: some, - dependencies: []string{"react-scripts", "react-dev-utils"}, - }, - }, - { - Slug: "nuxtjs", - EnvWildcards: []string{"NUXT_ENV_*"}, - DependencyMatch: matcher{ - strategy: some, - dependencies: []string{"nuxt", "nuxt-edge", "nuxt3", "nuxt3-edge"}, - }, - }, - { - Slug: "redwoodjs", - EnvWildcards: []string{"REDWOOD_ENV_*"}, - DependencyMatch: matcher{ - strategy: all, - dependencies: []string{"@redwoodjs/core"}, - }, - }, - { - Slug: "vite", - EnvWildcards: []string{"VITE_*"}, - DependencyMatch: matcher{ - strategy: all, - dependencies: []string{"vite"}, - }, - }, - { - Slug: "sanity", - EnvWildcards: []string{"SANITY_STUDIO_*"}, - DependencyMatch: matcher{ - strategy: all, - dependencies: []string{"@sanity/cli"}, - }, - }, -} - -func (m matcher) match(pkg *fs.PackageJSON) bool { - deps := pkg.UnresolvedExternalDeps - // only check dependencies if we're in a non-monorepo - if pkg.Workspaces != nil && len(pkg.Workspaces) == 0 { - deps = pkg.Dependencies - } - - if m.strategy == all { - for _, dependency := range m.dependencies { - _, exists := deps[dependency] - if !exists { - return false - } - } - return true - } - - // m.strategy == some - for _, dependency := range m.dependencies { - _, exists := deps[dependency] - if exists { - return true - } - } - return false -} - -func (f Framework) match(pkg *fs.PackageJSON) bool { - return f.DependencyMatch.match(pkg) -} - -// InferFramework returns a reference to a matched framework -func InferFramework(pkg *fs.PackageJSON) *Framework { - if pkg == nil { - return nil - } - - for _, candidateFramework := range _frameworks { - if candidateFramework.match(pkg) { - return &candidateFramework - } - } - - return nil -} diff --git a/cli/internal/inference/inference_test.go b/cli/internal/inference/inference_test.go deleted file mode 100644 index ed82ecc39a112..0000000000000 --- a/cli/internal/inference/inference_test.go +++ /dev/null @@ -1,97 +0,0 @@ -package inference - -import ( - "reflect" - "testing" - - "github.com/vercel/turbo/cli/internal/fs" -) - -func getFrameworkBySlug(slug string) *Framework { - for _, framework := range _frameworks { - if framework.Slug == slug { - return &framework - } - } - panic("that framework doesn't exist") -} - -func TestInferFramework(t *testing.T) { - tests := []struct { - name string - pkg *fs.PackageJSON - want *Framework - }{ - { - name: "Hello world", - pkg: nil, - want: nil, - }, - { - name: "Empty dependencies", - pkg: &fs.PackageJSON{UnresolvedExternalDeps: map[string]string{}}, - want: nil, - }, - { - name: "Finds Blitz", - pkg: &fs.PackageJSON{UnresolvedExternalDeps: map[string]string{ - "blitz": "*", - }}, - want: getFrameworkBySlug("blitzjs"), - }, - { - name: "Order is preserved (returns blitz, not next)", - pkg: &fs.PackageJSON{UnresolvedExternalDeps: map[string]string{ - "blitz": "*", - "next": "*", - }}, - want: getFrameworkBySlug("blitzjs"), - }, - { - name: "Finds next without blitz", - pkg: &fs.PackageJSON{UnresolvedExternalDeps: map[string]string{ - "next": "*", - }}, - want: getFrameworkBySlug("nextjs"), - }, - { - name: "match strategy of all works (solid)", - pkg: &fs.PackageJSON{UnresolvedExternalDeps: map[string]string{ - "solid-js": "*", - "solid-start": "*", - }}, - want: getFrameworkBySlug("solidstart"), - }, - { - name: "match strategy of some works (nuxt)", - pkg: &fs.PackageJSON{UnresolvedExternalDeps: map[string]string{ - "nuxt3": "*", - }}, - want: getFrameworkBySlug("nuxtjs"), - }, - { - name: "match strategy of some works (c-r-a)", - pkg: &fs.PackageJSON{UnresolvedExternalDeps: map[string]string{ - "react-scripts": "*", - }}, - want: getFrameworkBySlug("create-react-app"), - }, - { - name: "Finds next in non monorepo", - pkg: &fs.PackageJSON{ - Dependencies: map[string]string{ - "next": "*", - }, - Workspaces: []string{}, - }, - want: getFrameworkBySlug("nextjs"), - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - if got := InferFramework(tt.pkg); !reflect.DeepEqual(got, tt.want) { - t.Errorf("InferFramework() = %v, want %v", got, tt.want) - } - }) - } -} diff --git a/cli/internal/lockfile/berry_lockfile.go b/cli/internal/lockfile/berry_lockfile.go deleted file mode 100644 index f6e02a04a0456..0000000000000 --- a/cli/internal/lockfile/berry_lockfile.go +++ /dev/null @@ -1,46 +0,0 @@ -package lockfile - -import ( - "github.com/vercel/turbo/cli/internal/ffi" - "github.com/vercel/turbo/cli/internal/turbopath" -) - -// BerryLockfile representation of berry lockfile -type BerryLockfile struct { - contents []byte - resolutions map[string]string -} - -// BerryDependencyMetaEntry Structure for holding if a package is optional or not -type BerryDependencyMetaEntry struct { - Optional bool `yaml:"optional,omitempty"` - Unplugged bool `yaml:"unplugged,omitempty"` -} - -var _ Lockfile = (*BerryLockfile)(nil) - -// ResolvePackage Given a package and version returns the key, resolved version, and if it was found -func (l *BerryLockfile) ResolvePackage(_workspace turbopath.AnchoredUnixPath, name string, version string) (Package, error) { - panic("Should use Rust implementation") -} - -// AllDependencies Given a lockfile key return all (dev/optional/peer) dependencies of that package -func (l *BerryLockfile) AllDependencies(key string) (map[string]string, bool) { - panic("Should use Rust implementation") -} - -// DecodeBerryLockfile Takes the contents of a berry lockfile and returns a struct representation -func DecodeBerryLockfile(contents []byte, resolutions map[string]string) (*BerryLockfile, error) { - return &BerryLockfile{contents: contents, resolutions: resolutions}, nil -} - -// GlobalChange checks if there are any differences between lockfiles that would completely invalidate -// the cache. -func (l *BerryLockfile) GlobalChange(other Lockfile) bool { - o, ok := other.(*BerryLockfile) - if !ok { - return true - } - - return ffi.GlobalChange("berry", o.contents, l.contents) -} diff --git a/cli/internal/lockfile/berry_lockfile_test.go b/cli/internal/lockfile/berry_lockfile_test.go deleted file mode 100644 index cf941b06fa0e7..0000000000000 --- a/cli/internal/lockfile/berry_lockfile_test.go +++ /dev/null @@ -1,31 +0,0 @@ -package lockfile - -import ( - "testing" - - "github.com/vercel/turbo/cli/internal/turbopath" - "gotest.tools/v3/assert" -) - -func Test_BerryTransitiveClosure(t *testing.T) { - contents := getRustFixture(t, "berry.lock") - lf, err := DecodeBerryLockfile(contents, map[string]string{"lodash@^4.17.21": "patch:lodash@npm%3A4.17.21#./.yarn/patches/lodash-npm-4.17.21-6382451519.patch"}) - assert.NilError(t, err) - closures, err := AllTransitiveClosures(map[turbopath.AnchoredUnixPath]map[string]string{ - turbopath.AnchoredUnixPath(""): {}, - turbopath.AnchoredUnixPath("apps/web"): {}, - turbopath.AnchoredUnixPath("apps/docs"): { - "lodash": "^4.17.21", - }, - }, lf) - assert.NilError(t, err) - assert.Equal(t, len(closures), 3) - - lodash := Package{ - Key: "lodash@npm:4.17.21", - Version: "4.17.21", - Found: true, - } - assert.Assert(t, !closures[turbopath.AnchoredUnixPath("apps/web")].Contains(lodash)) - assert.Assert(t, closures[turbopath.AnchoredUnixPath("apps/docs")].Contains(lodash)) -} diff --git a/cli/internal/lockfile/bun_lockfile.go b/cli/internal/lockfile/bun_lockfile.go deleted file mode 100644 index 058c4b3dd493e..0000000000000 --- a/cli/internal/lockfile/bun_lockfile.go +++ /dev/null @@ -1,38 +0,0 @@ -package lockfile - -import ( - "github.com/vercel/turbo/cli/internal/turbopath" -) - -// BunLockfile representation of bun lockfile -type BunLockfile struct { - contents []byte -} - -var _ Lockfile = (*BunLockfile)(nil) - -// ResolvePackage Given a package and version returns the key, resolved version, and if it was found -func (l *BunLockfile) ResolvePackage(_ turbopath.AnchoredUnixPath, _ string, _ string) (Package, error) { - // This is only used when doing calculating the transitive deps, but Rust - // implementations do this calculation on the Rust side. - panic("Unreachable") -} - -// AllDependencies Given a lockfile key return all (dev/optional/peer) dependencies of that package -func (l *BunLockfile) AllDependencies(_ string) (map[string]string, bool) { - // This is only used when doing calculating the transitive deps, but Rust - // implementations do this calculation on the Rust side. - panic("Unreachable") -} - -// DecodeBunLockfile Takes the contents of a bun lockfile and returns a struct representation -func DecodeBunLockfile(contents []byte) (*BunLockfile, error) { - return &BunLockfile{contents: contents}, nil -} - -// GlobalChange checks if there are any differences between lockfiles that would completely invalidate -// the cache. -func (l *BunLockfile) GlobalChange(other Lockfile) bool { - _, ok := other.(*BunLockfile) - return !ok -} diff --git a/cli/internal/lockfile/lockfile.go b/cli/internal/lockfile/lockfile.go deleted file mode 100644 index 846814124d02e..0000000000000 --- a/cli/internal/lockfile/lockfile.go +++ /dev/null @@ -1,198 +0,0 @@ -// Package lockfile provides the lockfile interface and implementations for the various package managers -package lockfile - -import ( - "fmt" - "reflect" - "sort" - - mapset "github.com/deckarep/golang-set" - "github.com/vercel/turbo/cli/internal/ffi" - "github.com/vercel/turbo/cli/internal/turbopath" - "golang.org/x/sync/errgroup" -) - -// Lockfile Interface for general operations that work across all lockfiles -type Lockfile interface { - // ResolvePackage Given a workspace, a package it imports and version returns the key, resolved version, and if it was found - ResolvePackage(workspacePath turbopath.AnchoredUnixPath, name string, version string) (Package, error) - // AllDependencies Given a lockfile key return all (dev/optional/peer) dependencies of that package - AllDependencies(key string) (map[string]string, bool) - // GlobalChange checks if there are any differences between lockfiles that would completely invalidate - // the cache. - GlobalChange(other Lockfile) bool -} - -// IsNil checks if lockfile is nil -func IsNil(l Lockfile) bool { - return l == nil || reflect.ValueOf(l).IsNil() -} - -// Package Structure representing a possible Pack -type Package struct { - // Key used to lookup a package in the lockfile - Key string `json:"key"` - // The resolved version of a package as it appears in the lockfile - Version string `json:"version"` - // Set to true iff Key and Version are set - Found bool `json:"-"` -} - -// ByKey sort package structures by key -type ByKey []Package - -func (p ByKey) Len() int { - return len(p) -} - -func (p ByKey) Swap(i, j int) { - p[i], p[j] = p[j], p[i] -} - -func (p ByKey) Less(i, j int) bool { - if p[i].Key == p[j].Key { - return p[i].Version < p[j].Version - } - - return p[i].Key < p[j].Key -} - -var _ (sort.Interface) = (*ByKey)(nil) - -type closureMsg struct { - workspace turbopath.AnchoredUnixPath - closure mapset.Set -} - -// AllTransitiveClosures computes closures for all workspaces -func AllTransitiveClosures( - workspaces map[turbopath.AnchoredUnixPath]map[string]string, - lockFile Lockfile, -) (map[turbopath.AnchoredUnixPath]mapset.Set, error) { - // We special case as Rust implementations have their own dep crawl - if lf, ok := lockFile.(*NpmLockfile); ok { - return rustTransitiveDeps(lf.contents, "npm", workspaces, nil) - } - if lf, ok := lockFile.(*BerryLockfile); ok { - return rustTransitiveDeps(lf.contents, "berry", workspaces, lf.resolutions) - } - if lf, ok := lockFile.(*PnpmLockfile); ok { - return rustTransitiveDeps(lf.contents, "pnpm", workspaces, nil) - } - if lf, ok := lockFile.(*YarnLockfile); ok { - return rustTransitiveDeps(lf.contents, "yarn", workspaces, nil) - } - if lf, ok := lockFile.(*BunLockfile); ok { - return rustTransitiveDeps(lf.contents, "bun", workspaces, nil) - } - - g := new(errgroup.Group) - c := make(chan closureMsg, len(workspaces)) - closures := make(map[turbopath.AnchoredUnixPath]mapset.Set, len(workspaces)) - for workspace, deps := range workspaces { - workspace := workspace - deps := deps - g.Go(func() error { - closure, err := transitiveClosure(workspace, deps, lockFile) - if err != nil { - return err - } - c <- closureMsg{workspace: workspace, closure: closure} - return nil - }) - } - err := g.Wait() - close(c) - if err != nil { - return nil, err - } - for msg := range c { - closures[msg.workspace] = msg.closure - } - return closures, nil -} - -func transitiveClosure( - workspaceDir turbopath.AnchoredUnixPath, - unresolvedDeps map[string]string, - lockFile Lockfile, -) (mapset.Set, error) { - if IsNil(lockFile) { - return nil, fmt.Errorf("No lockfile available to do analysis on") - } - - resolvedPkgs := mapset.NewSet() - lockfileEg := &errgroup.Group{} - - transitiveClosureHelper(lockfileEg, workspaceDir, lockFile, unresolvedDeps, resolvedPkgs) - - if err := lockfileEg.Wait(); err != nil { - return nil, err - } - - return resolvedPkgs, nil -} - -func transitiveClosureHelper( - wg *errgroup.Group, - workspacePath turbopath.AnchoredUnixPath, - lockfile Lockfile, - unresolvedDirectDeps map[string]string, - resolvedDeps mapset.Set, -) { - for directDepName, unresolvedVersion := range unresolvedDirectDeps { - directDepName := directDepName - unresolvedVersion := unresolvedVersion - wg.Go(func() error { - - lockfilePkg, err := lockfile.ResolvePackage(workspacePath, directDepName, unresolvedVersion) - - if err != nil { - return err - } - - if !lockfilePkg.Found || resolvedDeps.Contains(lockfilePkg) { - return nil - } - - resolvedDeps.Add(lockfilePkg) - - allDeps, ok := lockfile.AllDependencies(lockfilePkg.Key) - - if !ok { - panic(fmt.Sprintf("Unable to find entry for %s", lockfilePkg.Key)) - } - - if len(allDeps) > 0 { - transitiveClosureHelper(wg, workspacePath, lockfile, allDeps, resolvedDeps) - } - - return nil - }) - } -} - -func rustTransitiveDeps(content []byte, packageManager string, workspaces map[turbopath.AnchoredUnixPath]map[string]string, resolutions map[string]string) (map[turbopath.AnchoredUnixPath]mapset.Set, error) { - processedWorkspaces := make(map[string]map[string]string, len(workspaces)) - for workspacePath, workspace := range workspaces { - processedWorkspaces[workspacePath.ToString()] = workspace - } - workspaceDeps, err := ffi.TransitiveDeps(content, packageManager, processedWorkspaces, resolutions) - if err != nil { - return nil, err - } - resolvedWorkspaces := make(map[turbopath.AnchoredUnixPath]mapset.Set, len(workspaceDeps)) - for workspace, dependencies := range workspaceDeps { - depsSet := mapset.NewSet() - for _, pkg := range dependencies.GetList() { - depsSet.Add(Package{ - Found: pkg.Found, - Key: pkg.Key, - Version: pkg.Version, - }) - } - workspacePath := turbopath.AnchoredUnixPath(workspace) - resolvedWorkspaces[workspacePath] = depsSet - } - return resolvedWorkspaces, nil -} diff --git a/cli/internal/lockfile/lockfile_test.go b/cli/internal/lockfile/lockfile_test.go deleted file mode 100644 index 1ffedc58b1a2d..0000000000000 --- a/cli/internal/lockfile/lockfile_test.go +++ /dev/null @@ -1,70 +0,0 @@ -package lockfile - -import ( - "io" - "reflect" - "sort" - "testing" - - "github.com/vercel/turbo/cli/internal/turbopath" - "gotest.tools/v3/assert" -) - -func Test_ByKeySortIsStable(t *testing.T) { - packagesA := []Package{ - {"/foo/1.2.3", "1.2.3", true}, - {"/baz/1.0.9", "/baz/1.0.9", true}, - {"/bar/1.2.3", "1.2.3", true}, - {"/foo/1.2.3", "/foo/1.2.3", true}, - {"/baz/1.0.9", "1.0.9", true}, - } - packagesB := make([]Package, len(packagesA)) - copy(packagesB, packagesA) - - sort.Sort(ByKey(packagesA)) - sort.Sort(ByKey(packagesB)) - - assert.DeepEqual(t, packagesA, packagesB) -} - -type mockLockfile struct{} - -func (m *mockLockfile) ResolvePackage(_ turbopath.AnchoredUnixPath, _ string, _ string) (Package, error) { - panic("unimplemented") -} - -func (m *mockLockfile) AllDependencies(_ string) (map[string]string, bool) { - panic("unimplemented") -} - -func (m *mockLockfile) Subgraph(_ []turbopath.AnchoredSystemPath, _ []string) (Lockfile, error) { - panic("unimplemented") -} - -func (m *mockLockfile) Encode(_ io.Writer) error { - panic("unimplemented") -} - -func (m *mockLockfile) Patches() []turbopath.AnchoredUnixPath { - panic("unimplemented") -} - -func (m *mockLockfile) GlobalChange(_ Lockfile) bool { - panic("unimplemented") -} - -var _ (Lockfile) = (*mockLockfile)(nil) - -func Test_AllTransitiveClosureReturnsEmptySets(t *testing.T) { - closures, err := AllTransitiveClosures(map[turbopath.AnchoredUnixPath]map[string]string{ - turbopath.AnchoredUnixPath("."): {}, - turbopath.AnchoredUnixPath("a"): {}, - turbopath.AnchoredUnixPath("b"): {}, - }, &mockLockfile{}) - assert.NilError(t, err) - assert.Assert(t, len(closures) == 3) - for _, closure := range closures { - assert.Assert(t, closure != nil && !reflect.ValueOf(closure).IsNil()) - assert.Equal(t, closure.Cardinality(), 0) - } -} diff --git a/cli/internal/lockfile/npm_lockfile.go b/cli/internal/lockfile/npm_lockfile.go deleted file mode 100644 index d7414544c3168..0000000000000 --- a/cli/internal/lockfile/npm_lockfile.go +++ /dev/null @@ -1,44 +0,0 @@ -package lockfile - -import ( - "github.com/vercel/turbo/cli/internal/ffi" - "github.com/vercel/turbo/cli/internal/turbopath" -) - -// NpmLockfile representation of package-lock.json -type NpmLockfile struct { - // We just story the entire lockfile in memory and pass it for every call - contents []byte -} - -// ResolvePackage Given a workspace, a package it imports and version returns the key, resolved version, and if it was found -func (l *NpmLockfile) ResolvePackage(workspacePath turbopath.AnchoredUnixPath, name string, version string) (Package, error) { - // This is only used when doing calculating the transitive deps, but Rust - // implementations do this calculation on the Rust side. - panic("Unreachable") -} - -// AllDependencies Given a lockfile key return all (dev/optional/peer) dependencies of that package -func (l *NpmLockfile) AllDependencies(key string) (map[string]string, bool) { - // This is only used when doing calculating the transitive deps, but Rust - // implementations do this calculation on the Rust side. - panic("Unreachable") -} - -// GlobalChange checks if there are any differences between lockfiles that would completely invalidate -// the cache. -func (l *NpmLockfile) GlobalChange(other Lockfile) bool { - o, ok := other.(*NpmLockfile) - if !ok { - return true - } - - return ffi.GlobalChange("npm", o.contents, l.contents) -} - -var _ (Lockfile) = (*NpmLockfile)(nil) - -// DecodeNpmLockfile Parse contents of package-lock.json into NpmLockfile -func DecodeNpmLockfile(contents []byte) (Lockfile, error) { - return &NpmLockfile{contents: contents}, nil -} diff --git a/cli/internal/lockfile/npm_lockfile_test.go b/cli/internal/lockfile/npm_lockfile_test.go deleted file mode 100644 index 5dfd327b08f7b..0000000000000 --- a/cli/internal/lockfile/npm_lockfile_test.go +++ /dev/null @@ -1,74 +0,0 @@ -package lockfile - -import ( - "os" - "testing" - - "github.com/vercel/turbo/cli/internal/turbopath" - "gotest.tools/v3/assert" -) - -func getRustFixture(t *testing.T, fixture string) []byte { - defaultCwd, err := os.Getwd() - assert.NilError(t, err, "getRustFixture") - cwd := turbopath.AbsoluteSystemPath(defaultCwd) - lockfilePath := cwd.UntypedJoin("../../../crates/turborepo-lockfiles/fixtures", fixture) - if !lockfilePath.FileExists() { - t.Errorf("unable to find 'turborepo-lockfiles/fixtures/%s'", fixture) - } - bytes, err := os.ReadFile(lockfilePath.ToString()) - assert.NilError(t, err, "unable to read fixture") - return bytes -} - -func getNpmFixture(t *testing.T, fixture string) Lockfile { - bytes := getRustFixture(t, fixture) - lf, err := DecodeNpmLockfile(bytes) - assert.NilError(t, err) - return lf -} - -func TestAllDependenciesNpm(t *testing.T) { - lf := getNpmFixture(t, "npm-lock.json") - closures, err := AllTransitiveClosures(map[turbopath.AnchoredUnixPath]map[string]string{ - turbopath.AnchoredUnixPath(""): { - "turbo": "latest", - "prettier": "latest", - }, - turbopath.AnchoredUnixPath("apps/web"): { - "lodash": "^4.17.21", - "next": "12.3.0", - }, - }, lf) - assert.NilError(t, err) - assert.Equal(t, len(closures), 2) - rootClosure := closures[turbopath.AnchoredUnixPath("")] - webClosure := closures[turbopath.AnchoredUnixPath("apps/web")] - - assert.Assert(t, rootClosure.Contains(Package{ - Key: "node_modules/turbo", - Version: "1.5.5", - Found: true, - })) - assert.Assert(t, rootClosure.Contains(Package{ - Key: "node_modules/turbo-darwin-64", - Version: "1.5.5", - Found: true, - })) - - assert.Assert(t, webClosure.Contains(Package{ - Key: "apps/web/node_modules/lodash", - Version: "4.17.21", - Found: true, - })) - assert.Assert(t, webClosure.Contains(Package{ - Key: "node_modules/next", - Version: "12.3.0", - Found: true, - })) - assert.Assert(t, webClosure.Contains(Package{ - Key: "node_modules/postcss", - Version: "8.4.14", - Found: true, - })) -} diff --git a/cli/internal/lockfile/pnpm_lockfile.go b/cli/internal/lockfile/pnpm_lockfile.go deleted file mode 100644 index 8af887b22b1dc..0000000000000 --- a/cli/internal/lockfile/pnpm_lockfile.go +++ /dev/null @@ -1,44 +0,0 @@ -package lockfile - -import ( - "github.com/vercel/turbo/cli/internal/ffi" - "github.com/vercel/turbo/cli/internal/turbopath" -) - -// PnpmLockfile Go representation of the contents of 'pnpm-lock.yaml' -// Reference https://github.com/pnpm/pnpm/blob/main/packages/lockfile-types/src/index.ts -type PnpmLockfile struct { - contents []byte -} - -var _ Lockfile = (*PnpmLockfile)(nil) - -// DecodePnpmLockfile parse a pnpm lockfile -func DecodePnpmLockfile(contents []byte) (*PnpmLockfile, error) { - return &PnpmLockfile{contents: contents}, nil -} - -// ResolvePackage Given a package and version returns the key, resolved version, and if it was found -func (p *PnpmLockfile) ResolvePackage(workspacePath turbopath.AnchoredUnixPath, name string, version string) (Package, error) { - // This is only used when doing calculating the transitive deps, but Rust - // implementations do this calculation on the Rust side. - panic("Unreachable") -} - -// AllDependencies Given a lockfile key return all (dev/optional/peer) dependencies of that package -func (p *PnpmLockfile) AllDependencies(key string) (map[string]string, bool) { - // This is only used when doing calculating the transitive deps, but Rust - // implementations do this calculation on the Rust side. - panic("Unreachable") -} - -// GlobalChange checks if there are any differences between lockfiles that would completely invalidate -// the cache. -func (p *PnpmLockfile) GlobalChange(other Lockfile) bool { - o, ok := other.(*PnpmLockfile) - if !ok { - return true - } - - return ffi.GlobalChange("pnpm", o.contents, p.contents) -} diff --git a/cli/internal/lockfile/pnpm_lockfile_test.go b/cli/internal/lockfile/pnpm_lockfile_test.go deleted file mode 100644 index e39720508f8a2..0000000000000 --- a/cli/internal/lockfile/pnpm_lockfile_test.go +++ /dev/null @@ -1,53 +0,0 @@ -package lockfile - -import ( - "os" - "sort" - "testing" - - "github.com/pkg/errors" - "github.com/vercel/turbo/cli/internal/turbopath" - "gotest.tools/v3/assert" -) - -func getFixture(t *testing.T, name string) ([]byte, error) { - defaultCwd, err := os.Getwd() - if err != nil { - t.Errorf("failed to get cwd: %v", err) - } - cwd := turbopath.AbsoluteSystemPath(defaultCwd) - lockfilePath := cwd.UntypedJoin("testdata", name) - if !lockfilePath.FileExists() { - return nil, errors.Errorf("unable to find 'testdata/%s'", name) - } - return os.ReadFile(lockfilePath.ToString()) -} - -func Test_PnpmAliasesOverlap(t *testing.T) { - contents, err := getFixture(t, "pnpm-absolute.yaml") - assert.NilError(t, err) - - lockfile, err := DecodePnpmLockfile(contents) - assert.NilError(t, err) - - closures, err := AllTransitiveClosures(map[turbopath.AnchoredUnixPath]map[string]string{"packages/a": {"@scope/parent": "^1.0.0", "another": "^1.0.0", "special": "npm:Special@1.2.3"}}, lockfile) - assert.NilError(t, err) - - closure, ok := closures[turbopath.AnchoredUnixPath("packages/a")] - assert.Assert(t, ok) - - deps := []Package{} - for _, v := range closure.ToSlice() { - dep := v.(Package) - deps = append(deps, dep) - } - sort.Sort(ByKey(deps)) - - assert.DeepEqual(t, deps, []Package{ - {"/@scope/child/1.0.0", "1.0.0", true}, - {"/@scope/parent/1.0.0", "1.0.0", true}, - {"/Special/1.2.3", "1.2.3", true}, - {"/another/1.0.0", "1.0.0", true}, - {"/foo/1.0.0", "1.0.0", true}, - }) -} diff --git a/cli/internal/lockfile/testdata/berry.lock b/cli/internal/lockfile/testdata/berry.lock deleted file mode 100644 index f4436e4aeb2f6..0000000000000 --- a/cli/internal/lockfile/testdata/berry.lock +++ /dev/null @@ -1,3283 +0,0 @@ -# This file is generated by running "yarn install" inside your project. -# Manual changes might be lost - proceed with caution! - -__metadata: - version: 6 - cacheKey: 8c0 - -"@ampproject/remapping@npm:^2.1.0": - version: 2.2.0 - resolution: "@ampproject/remapping@npm:2.2.0" - dependencies: - "@jridgewell/gen-mapping": ^0.1.0 - "@jridgewell/trace-mapping": ^0.3.9 - checksum: d74d170d06468913921d72430259424b7e4c826b5a7d39ff839a29d547efb97dc577caa8ba3fb5cf023624e9af9d09651afc3d4112a45e2050328abc9b3a2292 - languageName: node - linkType: hard - -"@babel/code-frame@npm:7.12.11": - version: 7.12.11 - resolution: "@babel/code-frame@npm:7.12.11" - dependencies: - "@babel/highlight": ^7.10.4 - checksum: 3963eff3ebfb0e091c7e6f99596ef4b258683e4ba8a134e4e95f77afe85be5c931e184fff6435fb4885d12eba04a5e25532f7fbc292ca13b48e7da943474e2f3 - languageName: node - linkType: hard - -"@babel/code-frame@npm:^7.18.6": - version: 7.18.6 - resolution: "@babel/code-frame@npm:7.18.6" - dependencies: - "@babel/highlight": ^7.18.6 - checksum: 195e2be3172d7684bf95cff69ae3b7a15a9841ea9d27d3c843662d50cdd7d6470fd9c8e64be84d031117e4a4083486effba39f9aef6bbb2c89f7f21bcfba33ba - languageName: node - linkType: hard - -"@babel/compat-data@npm:^7.19.1": - version: 7.19.1 - resolution: "@babel/compat-data@npm:7.19.1" - checksum: f985887ea08a140e4af87a94d3fb17af0345491eb97f5a85b1840255c2e2a97429f32a8fd12a7aae9218af5f1024f1eb12a5cd280d2d69b2337583c17ea506ba - languageName: node - linkType: hard - -"@babel/core@npm:^7.0.0": - version: 7.19.1 - resolution: "@babel/core@npm:7.19.1" - dependencies: - "@ampproject/remapping": ^2.1.0 - "@babel/code-frame": ^7.18.6 - "@babel/generator": ^7.19.0 - "@babel/helper-compilation-targets": ^7.19.1 - "@babel/helper-module-transforms": ^7.19.0 - "@babel/helpers": ^7.19.0 - "@babel/parser": ^7.19.1 - "@babel/template": ^7.18.10 - "@babel/traverse": ^7.19.1 - "@babel/types": ^7.19.0 - convert-source-map: ^1.7.0 - debug: ^4.1.0 - gensync: ^1.0.0-beta.2 - json5: ^2.2.1 - semver: ^6.3.0 - checksum: 941c8c119b80bdba5fafc80bbaa424d51146b6d3c30b8fae35879358dd37c11d3d0926bc7e970a0861229656eedaa8c884d4a3a25cc904086eb73b827a2f1168 - languageName: node - linkType: hard - -"@babel/generator@npm:^7.19.0": - version: 7.19.0 - resolution: "@babel/generator@npm:7.19.0" - dependencies: - "@babel/types": ^7.19.0 - "@jridgewell/gen-mapping": ^0.3.2 - jsesc: ^2.5.1 - checksum: aa3d5785cf8f8e81672dcc61aef351188efeadb20d9f66d79113d82cbcf3bbbdeb829989fa14582108572ddbc4e4027bdceb06ccaf5ec40fa93c2dda8fbcd4aa - languageName: node - linkType: hard - -"@babel/helper-compilation-targets@npm:^7.19.1": - version: 7.19.1 - resolution: "@babel/helper-compilation-targets@npm:7.19.1" - dependencies: - "@babel/compat-data": ^7.19.1 - "@babel/helper-validator-option": ^7.18.6 - browserslist: ^4.21.3 - semver: ^6.3.0 - peerDependencies: - "@babel/core": ^7.0.0 - checksum: c2d3039265e498b341a6b597f855f2fcef02659050fefedf36ad4e6815e6aafe1011a761214cc80d98260ed07ab15a8cbe959a0458e97bec5f05a450e1b1741b - languageName: node - linkType: hard - -"@babel/helper-environment-visitor@npm:^7.18.9": - version: 7.18.9 - resolution: "@babel/helper-environment-visitor@npm:7.18.9" - checksum: b25101f6162ddca2d12da73942c08ad203d7668e06663df685634a8fde54a98bc015f6f62938e8554457a592a024108d45b8f3e651fd6dcdb877275b73cc4420 - languageName: node - linkType: hard - -"@babel/helper-function-name@npm:^7.19.0": - version: 7.19.0 - resolution: "@babel/helper-function-name@npm:7.19.0" - dependencies: - "@babel/template": ^7.18.10 - "@babel/types": ^7.19.0 - checksum: eac1f5db428ba546270c2b8d750c24eb528b8fcfe50c81de2e0bdebf0e20f24bec688d4331533b782e4a907fad435244621ca2193cfcf80a86731299840e0f6e - languageName: node - linkType: hard - -"@babel/helper-hoist-variables@npm:^7.18.6": - version: 7.18.6 - resolution: "@babel/helper-hoist-variables@npm:7.18.6" - dependencies: - "@babel/types": ^7.18.6 - checksum: fd9c35bb435fda802bf9ff7b6f2df06308a21277c6dec2120a35b09f9de68f68a33972e2c15505c1a1a04b36ec64c9ace97d4a9e26d6097b76b4396b7c5fa20f - languageName: node - linkType: hard - -"@babel/helper-module-imports@npm:^7.18.6": - version: 7.18.6 - resolution: "@babel/helper-module-imports@npm:7.18.6" - dependencies: - "@babel/types": ^7.18.6 - checksum: f393f8a3b3304b1b7a288a38c10989de754f01d29caf62ce7c4e5835daf0a27b81f3ac687d9d2780d39685aae7b55267324b512150e7b2be967b0c493b6a1def - languageName: node - linkType: hard - -"@babel/helper-module-transforms@npm:^7.19.0": - version: 7.19.0 - resolution: "@babel/helper-module-transforms@npm:7.19.0" - dependencies: - "@babel/helper-environment-visitor": ^7.18.9 - "@babel/helper-module-imports": ^7.18.6 - "@babel/helper-simple-access": ^7.18.6 - "@babel/helper-split-export-declaration": ^7.18.6 - "@babel/helper-validator-identifier": ^7.18.6 - "@babel/template": ^7.18.10 - "@babel/traverse": ^7.19.0 - "@babel/types": ^7.19.0 - checksum: 4483276c66f56cf3b5b063634092ad9438c2593725de5c143ba277dda82f1501e6d73b311c1b28036f181dbe36eaeff29f24726cde37a599d4e735af294e5359 - languageName: node - linkType: hard - -"@babel/helper-simple-access@npm:^7.18.6": - version: 7.18.6 - resolution: "@babel/helper-simple-access@npm:7.18.6" - dependencies: - "@babel/types": ^7.18.6 - checksum: 37cd36eef199e0517845763c1e6ff6ea5e7876d6d707a6f59c9267c547a50aa0e84260ba9285d49acfaf2cfa0a74a772d92967f32ac1024c961517d40b6c16a5 - languageName: node - linkType: hard - -"@babel/helper-split-export-declaration@npm:^7.18.6": - version: 7.18.6 - resolution: "@babel/helper-split-export-declaration@npm:7.18.6" - dependencies: - "@babel/types": ^7.18.6 - checksum: c6d3dede53878f6be1d869e03e9ffbbb36f4897c7cc1527dc96c56d127d834ffe4520a6f7e467f5b6f3c2843ea0e81a7819d66ae02f707f6ac057f3d57943a2b - languageName: node - linkType: hard - -"@babel/helper-string-parser@npm:^7.18.10": - version: 7.18.10 - resolution: "@babel/helper-string-parser@npm:7.18.10" - checksum: d554a4393365b624916b5c00a4cc21c990c6617e7f3fe30be7d9731f107f12c33229a7a3db9d829bfa110d2eb9f04790745d421640e3bd245bb412dc0ea123c1 - languageName: node - linkType: hard - -"@babel/helper-validator-identifier@npm:^7.18.6": - version: 7.19.1 - resolution: "@babel/helper-validator-identifier@npm:7.19.1" - checksum: 0eca5e86a729162af569b46c6c41a63e18b43dbe09fda1d2a3c8924f7d617116af39cac5e4cd5d431bb760b4dca3c0970e0c444789b1db42bcf1fa41fbad0a3a - languageName: node - linkType: hard - -"@babel/helper-validator-option@npm:^7.18.6": - version: 7.18.6 - resolution: "@babel/helper-validator-option@npm:7.18.6" - checksum: f9cc6eb7cc5d759c5abf006402180f8d5e4251e9198197428a97e05d65eb2f8ae5a0ce73b1dfd2d35af41d0eb780627a64edf98a4e71f064eeeacef8de58f2cf - languageName: node - linkType: hard - -"@babel/helpers@npm:^7.19.0": - version: 7.19.0 - resolution: "@babel/helpers@npm:7.19.0" - dependencies: - "@babel/template": ^7.18.10 - "@babel/traverse": ^7.19.0 - "@babel/types": ^7.19.0 - checksum: e50e78e0dbb0435075fa3f85021a6bcae529589800bca0292721afd7f7c874bea54508d6dc57eca16e5b8224f8142c6b0e32e3b0140029dc09865da747da4623 - languageName: node - linkType: hard - -"@babel/highlight@npm:^7.10.4, @babel/highlight@npm:^7.18.6": - version: 7.18.6 - resolution: "@babel/highlight@npm:7.18.6" - dependencies: - "@babel/helper-validator-identifier": ^7.18.6 - chalk: ^2.0.0 - js-tokens: ^4.0.0 - checksum: 92d8ee61549de5ff5120e945e774728e5ccd57fd3b2ed6eace020ec744823d4a98e242be1453d21764a30a14769ecd62170fba28539b211799bbaf232bbb2789 - languageName: node - linkType: hard - -"@babel/parser@npm:^7.18.10, @babel/parser@npm:^7.19.1": - version: 7.19.1 - resolution: "@babel/parser@npm:7.19.1" - bin: - parser: ./bin/babel-parser.js - checksum: b1e0acb346b2a533c857e1e97ac0886cdcbd76aafef67835a2b23f760c10568eb53ad8a27dd5f862d8ba4e583742e6067f107281ccbd68959d61bc61e4ddaa51 - languageName: node - linkType: hard - -"@babel/runtime-corejs3@npm:^7.10.2": - version: 7.19.1 - resolution: "@babel/runtime-corejs3@npm:7.19.1" - dependencies: - core-js-pure: ^3.25.1 - regenerator-runtime: ^0.13.4 - checksum: 38a1e8fcd2ba1f76c951259c98a5a11052123923adbf30ec8b2fec202dbbe38c6db61658ef9398e00c30f799e2e54ea036e56a09f43229261918bf5ec1b7d03a - languageName: node - linkType: hard - -"@babel/runtime@npm:^7.10.2, @babel/runtime@npm:^7.18.9": - version: 7.19.0 - resolution: "@babel/runtime@npm:7.19.0" - dependencies: - regenerator-runtime: ^0.13.4 - checksum: fa69c351bb05e1db3ceb9a02fdcf620c234180af68cdda02152d3561015f6d55277265d3109815992f96d910f3db709458cae4f8df1c3def66f32e0867d82294 - languageName: node - linkType: hard - -"@babel/template@npm:^7.18.10": - version: 7.18.10 - resolution: "@babel/template@npm:7.18.10" - dependencies: - "@babel/code-frame": ^7.18.6 - "@babel/parser": ^7.18.10 - "@babel/types": ^7.18.10 - checksum: 93a6aa094af5f355a72bd55f67fa1828a046c70e46f01b1606e6118fa1802b6df535ca06be83cc5a5e834022be95c7b714f0a268b5f20af984465a71e28f1473 - languageName: node - linkType: hard - -"@babel/traverse@npm:^7.19.0, @babel/traverse@npm:^7.19.1": - version: 7.19.1 - resolution: "@babel/traverse@npm:7.19.1" - dependencies: - "@babel/code-frame": ^7.18.6 - "@babel/generator": ^7.19.0 - "@babel/helper-environment-visitor": ^7.18.9 - "@babel/helper-function-name": ^7.19.0 - "@babel/helper-hoist-variables": ^7.18.6 - "@babel/helper-split-export-declaration": ^7.18.6 - "@babel/parser": ^7.19.1 - "@babel/types": ^7.19.0 - debug: ^4.1.0 - globals: ^11.1.0 - checksum: 9d782b5089ebc989e54c2406814ed1206cb745ed2734e6602dee3e23d4b6ebbb703ff86e536276630f8de83fda6cde99f0634e3c3d847ddb40572d0303ba8800 - languageName: node - linkType: hard - -"@babel/types@npm:^7.18.10, @babel/types@npm:^7.18.6, @babel/types@npm:^7.19.0, @babel/types@npm:^7.8.3": - version: 7.19.0 - resolution: "@babel/types@npm:7.19.0" - dependencies: - "@babel/helper-string-parser": ^7.18.10 - "@babel/helper-validator-identifier": ^7.18.6 - to-fast-properties: ^2.0.0 - checksum: 9b346715a68aeede70ba9c685a144b0b26c53bcd595d448e24c8fa8df4d5956a5712e56ebadb7c85dcc32f218ee42788e37b93d50d3295c992072224cb3ef3fe - languageName: node - linkType: hard - -"@eslint/eslintrc@npm:^0.4.3": - version: 0.4.3 - resolution: "@eslint/eslintrc@npm:0.4.3" - dependencies: - ajv: ^6.12.4 - debug: ^4.1.1 - espree: ^7.3.0 - globals: ^13.9.0 - ignore: ^4.0.6 - import-fresh: ^3.2.1 - js-yaml: ^3.13.1 - minimatch: ^3.0.4 - strip-json-comments: ^3.1.1 - checksum: 03a7704150b868c318aab6a94d87a33d30dc2ec579d27374575014f06237ba1370ae11178db772f985ef680d469dc237e7b16a1c5d8edaaeb8c3733e7a95a6d3 - languageName: node - linkType: hard - -"@humanwhocodes/config-array@npm:^0.5.0": - version: 0.5.0 - resolution: "@humanwhocodes/config-array@npm:0.5.0" - dependencies: - "@humanwhocodes/object-schema": ^1.2.0 - debug: ^4.1.1 - minimatch: ^3.0.4 - checksum: 44ee6a9f05d93dd9d5935a006b17572328ba9caff8002442f601736cbda79c580cc0f5a49ce9eb88fbacc5c3a6b62098357c2e95326cd17bb9f1a6c61d6e95e7 - languageName: node - linkType: hard - -"@humanwhocodes/object-schema@npm:^1.2.0": - version: 1.2.1 - resolution: "@humanwhocodes/object-schema@npm:1.2.1" - checksum: a824a1ec31591231e4bad5787641f59e9633827d0a2eaae131a288d33c9ef0290bd16fda8da6f7c0fcb014147865d12118df10db57f27f41e20da92369fcb3f1 - languageName: node - linkType: hard - -"@jridgewell/gen-mapping@npm:^0.1.0": - version: 0.1.1 - resolution: "@jridgewell/gen-mapping@npm:0.1.1" - dependencies: - "@jridgewell/set-array": ^1.0.0 - "@jridgewell/sourcemap-codec": ^1.4.10 - checksum: 3bcc21fe786de6ffbf35c399a174faab05eb23ce6a03e8769569de28abbf4facc2db36a9ddb0150545ae23a8d35a7cf7237b2aa9e9356a7c626fb4698287d5cc - languageName: node - linkType: hard - -"@jridgewell/gen-mapping@npm:^0.3.2": - version: 0.3.2 - resolution: "@jridgewell/gen-mapping@npm:0.3.2" - dependencies: - "@jridgewell/set-array": ^1.0.1 - "@jridgewell/sourcemap-codec": ^1.4.10 - "@jridgewell/trace-mapping": ^0.3.9 - checksum: 1832707a1c476afebe4d0fbbd4b9434fdb51a4c3e009ab1e9938648e21b7a97049fa6009393bdf05cab7504108413441df26d8a3c12193996e65493a4efb6882 - languageName: node - linkType: hard - -"@jridgewell/resolve-uri@npm:^3.0.3": - version: 3.1.0 - resolution: "@jridgewell/resolve-uri@npm:3.1.0" - checksum: b5ceaaf9a110fcb2780d1d8f8d4a0bfd216702f31c988d8042e5f8fbe353c55d9b0f55a1733afdc64806f8e79c485d2464680ac48a0d9fcadb9548ee6b81d267 - languageName: node - linkType: hard - -"@jridgewell/set-array@npm:^1.0.0, @jridgewell/set-array@npm:^1.0.1": - version: 1.1.2 - resolution: "@jridgewell/set-array@npm:1.1.2" - checksum: 69a84d5980385f396ff60a175f7177af0b8da4ddb81824cb7016a9ef914eee9806c72b6b65942003c63f7983d4f39a5c6c27185bbca88eb4690b62075602e28e - languageName: node - linkType: hard - -"@jridgewell/sourcemap-codec@npm:^1.4.10": - version: 1.4.14 - resolution: "@jridgewell/sourcemap-codec@npm:1.4.14" - checksum: 61100637b6d173d3ba786a5dff019e1a74b1f394f323c1fee337ff390239f053b87266c7a948777f4b1ee68c01a8ad0ab61e5ff4abb5a012a0b091bec391ab97 - languageName: node - linkType: hard - -"@jridgewell/trace-mapping@npm:^0.3.9": - version: 0.3.15 - resolution: "@jridgewell/trace-mapping@npm:0.3.15" - dependencies: - "@jridgewell/resolve-uri": ^3.0.3 - "@jridgewell/sourcemap-codec": ^1.4.10 - checksum: 38917e9c2b014d469a9f51c016ed506acbe44dd16ec2f6f99b553ebf3764d22abadbf992f2367b6d2b3511f3eae8ed3a8963f6c1030093fda23efd35ecab2bae - languageName: node - linkType: hard - -"@next/env@npm:12.2.5": - version: 12.2.5 - resolution: "@next/env@npm:12.2.5" - checksum: a44939e59b46d5951831529a43dba9daa2e4e467e8680ea96e21ae127d1bf7f11757aaf3a6cff8a51273abfe7af782903e1304405a481361c7ba3e66d47e3238 - languageName: node - linkType: hard - -"@next/eslint-plugin-next@npm:12.3.0": - version: 12.3.0 - resolution: "@next/eslint-plugin-next@npm:12.3.0" - dependencies: - glob: 7.1.7 - checksum: f08582b36ff01a776183b3c33d6d81be3a110c1c3c39c81a33aff91277ea822aa4a952d4f2271a08ce56692ca5c58c9e958aaf4e08348c10cc45a85213b208f0 - languageName: node - linkType: hard - -"@next/swc-android-arm-eabi@npm:12.2.5": - version: 12.2.5 - resolution: "@next/swc-android-arm-eabi@npm:12.2.5" - conditions: os=android & cpu=arm - languageName: node - linkType: hard - -"@next/swc-android-arm64@npm:12.2.5": - version: 12.2.5 - resolution: "@next/swc-android-arm64@npm:12.2.5" - conditions: os=android & cpu=arm64 - languageName: node - linkType: hard - -"@next/swc-darwin-arm64@npm:12.2.5": - version: 12.2.5 - resolution: "@next/swc-darwin-arm64@npm:12.2.5" - conditions: os=darwin & cpu=arm64 - languageName: node - linkType: hard - -"@next/swc-darwin-x64@npm:12.2.5": - version: 12.2.5 - resolution: "@next/swc-darwin-x64@npm:12.2.5" - conditions: os=darwin & cpu=x64 - languageName: node - linkType: hard - -"@next/swc-freebsd-x64@npm:12.2.5": - version: 12.2.5 - resolution: "@next/swc-freebsd-x64@npm:12.2.5" - conditions: os=freebsd & cpu=x64 - languageName: node - linkType: hard - -"@next/swc-linux-arm-gnueabihf@npm:12.2.5": - version: 12.2.5 - resolution: "@next/swc-linux-arm-gnueabihf@npm:12.2.5" - conditions: os=linux & cpu=arm - languageName: node - linkType: hard - -"@next/swc-linux-arm64-gnu@npm:12.2.5": - version: 12.2.5 - resolution: "@next/swc-linux-arm64-gnu@npm:12.2.5" - conditions: os=linux & cpu=arm64 & libc=glibc - languageName: node - linkType: hard - -"@next/swc-linux-arm64-musl@npm:12.2.5": - version: 12.2.5 - resolution: "@next/swc-linux-arm64-musl@npm:12.2.5" - conditions: os=linux & cpu=arm64 & libc=musl - languageName: node - linkType: hard - -"@next/swc-linux-x64-gnu@npm:12.2.5": - version: 12.2.5 - resolution: "@next/swc-linux-x64-gnu@npm:12.2.5" - conditions: os=linux & cpu=x64 & libc=glibc - languageName: node - linkType: hard - -"@next/swc-linux-x64-musl@npm:12.2.5": - version: 12.2.5 - resolution: "@next/swc-linux-x64-musl@npm:12.2.5" - conditions: os=linux & cpu=x64 & libc=musl - languageName: node - linkType: hard - -"@next/swc-win32-arm64-msvc@npm:12.2.5": - version: 12.2.5 - resolution: "@next/swc-win32-arm64-msvc@npm:12.2.5" - conditions: os=win32 & cpu=arm64 - languageName: node - linkType: hard - -"@next/swc-win32-ia32-msvc@npm:12.2.5": - version: 12.2.5 - resolution: "@next/swc-win32-ia32-msvc@npm:12.2.5" - conditions: os=win32 & cpu=ia32 - languageName: node - linkType: hard - -"@next/swc-win32-x64-msvc@npm:12.2.5": - version: 12.2.5 - resolution: "@next/swc-win32-x64-msvc@npm:12.2.5" - conditions: os=win32 & cpu=x64 - languageName: node - linkType: hard - -"@nodelib/fs.scandir@npm:2.1.5": - version: 2.1.5 - resolution: "@nodelib/fs.scandir@npm:2.1.5" - dependencies: - "@nodelib/fs.stat": 2.0.5 - run-parallel: ^1.1.9 - checksum: a970d595bd23c66c880e0ef1817791432dbb7acbb8d44b7e7d0e7a22f4521260d4a83f7f9fd61d44fda4610105577f8f58a60718105fb38352baed612fd79e59 - languageName: node - linkType: hard - -"@nodelib/fs.stat@npm:2.0.5, @nodelib/fs.stat@npm:^2.0.2": - version: 2.0.5 - resolution: "@nodelib/fs.stat@npm:2.0.5" - checksum: 012480b5ca9d97bff9261571dbbec7bbc6033f69cc92908bc1ecfad0792361a5a1994bc48674b9ef76419d056a03efadfce5a6cf6dbc0a36559571a7a483f6f0 - languageName: node - linkType: hard - -"@nodelib/fs.walk@npm:^1.2.3": - version: 1.2.8 - resolution: "@nodelib/fs.walk@npm:1.2.8" - dependencies: - "@nodelib/fs.scandir": 2.1.5 - fastq: ^1.6.0 - checksum: 190c643f156d8f8f277bf2a6078af1ffde1fd43f498f187c2db24d35b4b4b5785c02c7dc52e356497b9a1b65b13edc996de08de0b961c32844364da02986dc53 - languageName: node - linkType: hard - -"@rushstack/eslint-patch@npm:^1.1.3": - version: 1.2.0 - resolution: "@rushstack/eslint-patch@npm:1.2.0" - checksum: faa749faae0e83c26ae9eb00ad36a897ac78f3cf27da8e8ff21c00bcf7973b598d823d8f2b3957ef66079288bcf577f94df831eae2d65f3f68d8ca32f18b6aff - languageName: node - linkType: hard - -"@swc/helpers@npm:0.4.3": - version: 0.4.3 - resolution: "@swc/helpers@npm:0.4.3" - dependencies: - tslib: ^2.4.0 - checksum: 5c2f173e950dd3929d84ae48b3586a274d5a874e7cf2013b3d8081e4f8c723fa3a4d4e63b263e84bb7f06431f87b640e91a12655410463c81a3dc2bbc15eceda - languageName: node - linkType: hard - -"@types/json5@npm:^0.0.29": - version: 0.0.29 - resolution: "@types/json5@npm:0.0.29" - checksum: e60b153664572116dfea673c5bda7778dbff150498f44f998e34b5886d8afc47f16799280e4b6e241c0472aef1bc36add771c569c68fc5125fc2ae519a3eb9ac - languageName: node - linkType: hard - -"@types/node@npm:^17.0.12": - version: 17.0.45 - resolution: "@types/node@npm:17.0.45" - checksum: aa04366b9103b7d6cfd6b2ef64182e0eaa7d4462c3f817618486ea0422984c51fc69fd0d436eae6c9e696ddfdbec9ccaa27a917f7c2e8c75c5d57827fe3d95e8 - languageName: node - linkType: hard - -"@types/prop-types@npm:*": - version: 15.7.5 - resolution: "@types/prop-types@npm:15.7.5" - checksum: 5b43b8b15415e1f298243165f1d44390403bb2bd42e662bca3b5b5633fdd39c938e91b7fce3a9483699db0f7a715d08cef220c121f723a634972fdf596aec980 - languageName: node - linkType: hard - -"@types/react-dom@npm:^17.0.11": - version: 17.0.17 - resolution: "@types/react-dom@npm:17.0.17" - dependencies: - "@types/react": ^17 - checksum: 23caf98aa03e968811560f92a2c8f451694253ebe16b670929b24eaf0e7fa62ba549abe9db0ac028a9d8a9086acd6ab9c6c773f163fa21224845edbc00ba6232 - languageName: node - linkType: hard - -"@types/react@npm:18.0.17": - version: 18.0.17 - resolution: "@types/react@npm:18.0.17" - dependencies: - "@types/prop-types": "*" - "@types/scheduler": "*" - csstype: ^3.0.2 - checksum: 18cae64f5bfd6bb58fbd8ee2ba52ec82de844f114254e26de7b513e4b86621f643f9b71d7066958cd571b0d78cb86cbceda449c5289f9349ca573df29ab69252 - languageName: node - linkType: hard - -"@types/react@npm:^17, @types/react@npm:^17.0.37": - version: 17.0.50 - resolution: "@types/react@npm:17.0.50" - dependencies: - "@types/prop-types": "*" - "@types/scheduler": "*" - csstype: ^3.0.2 - checksum: b5629dff7c2f3e9fcba95a19b2b3bfd78d7cacc33ba5fc26413dba653d34afcac3b93ddabe563e8062382688a1eac7db68e93739bb8e712d27637a03aaafbbb8 - languageName: node - linkType: hard - -"@types/scheduler@npm:*": - version: 0.16.2 - resolution: "@types/scheduler@npm:0.16.2" - checksum: b6b4dcfeae6deba2e06a70941860fb1435730576d3689225a421280b7742318d1548b3d22c1f66ab68e414f346a9542f29240bc955b6332c5b11e561077583bc - languageName: node - linkType: hard - -"@typescript-eslint/parser@npm:^5.21.0": - version: 5.37.0 - resolution: "@typescript-eslint/parser@npm:5.37.0" - dependencies: - "@typescript-eslint/scope-manager": 5.37.0 - "@typescript-eslint/types": 5.37.0 - "@typescript-eslint/typescript-estree": 5.37.0 - debug: ^4.3.4 - peerDependencies: - eslint: ^6.0.0 || ^7.0.0 || ^8.0.0 - peerDependenciesMeta: - typescript: - optional: true - checksum: 33343e27c9602820d43ee12de9797365d97a5cf3f716e750fa44de760f2a2c6800f3bc4fa54931ac70c0e0ede77a92224f8151da7f30fed3bf692a029d6659af - languageName: node - linkType: hard - -"@typescript-eslint/scope-manager@npm:5.37.0": - version: 5.37.0 - resolution: "@typescript-eslint/scope-manager@npm:5.37.0" - dependencies: - "@typescript-eslint/types": 5.37.0 - "@typescript-eslint/visitor-keys": 5.37.0 - checksum: 1c439e21ffa63ebaadb8c8363e9d668132a835a28203e5b779366bfa56772f332e5dedb50d63dffb836839b9d9c4e66aa9e3ea47b8c59465b18a0cbd063ec7a3 - languageName: node - linkType: hard - -"@typescript-eslint/types@npm:5.37.0": - version: 5.37.0 - resolution: "@typescript-eslint/types@npm:5.37.0" - checksum: 899e59e7775fa95c2d9fcac5cc02cc49d83af5f1ffc706df495046c3b3733f79d5489568b01bfaf8c9ae4636e057056866adc783113036f774580086d0189f21 - languageName: node - linkType: hard - -"@typescript-eslint/typescript-estree@npm:5.37.0": - version: 5.37.0 - resolution: "@typescript-eslint/typescript-estree@npm:5.37.0" - dependencies: - "@typescript-eslint/types": 5.37.0 - "@typescript-eslint/visitor-keys": 5.37.0 - debug: ^4.3.4 - globby: ^11.1.0 - is-glob: ^4.0.3 - semver: ^7.3.7 - tsutils: ^3.21.0 - peerDependenciesMeta: - typescript: - optional: true - checksum: 80365a50fa11ed39bf54d9ef06e264fbbf3bdbcc55b7d7d555ef0be915edae40ec30e98d08b3f6ef048e1874450cbcb1e7d9f429d4f420dacbbde45d3376a7bc - languageName: node - linkType: hard - -"@typescript-eslint/visitor-keys@npm:5.37.0": - version: 5.37.0 - resolution: "@typescript-eslint/visitor-keys@npm:5.37.0" - dependencies: - "@typescript-eslint/types": 5.37.0 - eslint-visitor-keys: ^3.3.0 - checksum: d6193550f77413aead0cb267e058df80b80a488c8fb4e39beb5f0a70b971c41682a6391903fbc5f3dd859a872016288c434d631b8efc3ac5a04edbdb7b63b5f6 - languageName: node - linkType: hard - -"acorn-jsx@npm:^5.3.1": - version: 5.3.2 - resolution: "acorn-jsx@npm:5.3.2" - peerDependencies: - acorn: ^6.0.0 || ^7.0.0 || ^8.0.0 - checksum: c3d3b2a89c9a056b205b69530a37b972b404ee46ec8e5b341666f9513d3163e2a4f214a71f4dfc7370f5a9c07472d2fd1c11c91c3f03d093e37637d95da98950 - languageName: node - linkType: hard - -"acorn@npm:^7.4.0": - version: 7.4.1 - resolution: "acorn@npm:7.4.1" - bin: - acorn: bin/acorn - checksum: 1860f23c2107c910c6177b7b7be71be350db9e1080d814493fae143ae37605189504152d1ba8743ba3178d0b37269ce1ffc42b101547fdc1827078f82671e407 - languageName: node - linkType: hard - -"ajv@npm:^6.10.0, ajv@npm:^6.12.4": - version: 6.12.6 - resolution: "ajv@npm:6.12.6" - dependencies: - fast-deep-equal: ^3.1.1 - fast-json-stable-stringify: ^2.0.0 - json-schema-traverse: ^0.4.1 - uri-js: ^4.2.2 - checksum: 874972efe5c4202ab0a68379481fbd3d1b5d0a7bd6d3cc21d40d3536ebff3352a2a1fabb632d4fd2cc7fe4cbdcd5ed6782084c9bbf7f32a1536d18f9da5007d4 - languageName: node - linkType: hard - -"ajv@npm:^8.0.1": - version: 8.11.0 - resolution: "ajv@npm:8.11.0" - dependencies: - fast-deep-equal: ^3.1.1 - json-schema-traverse: ^1.0.0 - require-from-string: ^2.0.2 - uri-js: ^4.2.2 - checksum: 5e0ff226806763be73e93dd7805b634f6f5921e3e90ca04acdf8db81eed9d8d3f0d4c5f1213047f45ebbf8047ffe0c840fa1ef2ec42c3a644899f69aa72b5bef - languageName: node - linkType: hard - -"ansi-colors@npm:^4.1.1": - version: 4.1.3 - resolution: "ansi-colors@npm:4.1.3" - checksum: a9c2ec842038a1fabc7db9ece7d3177e2fe1c5dc6f0c51ecfbf5f39911427b89c00b5dc6b8bd95f82a26e9b16aaae2e83d45f060e98070ce4d1333038edceb0e - languageName: node - linkType: hard - -"ansi-regex@npm:^5.0.1": - version: 5.0.1 - resolution: "ansi-regex@npm:5.0.1" - checksum: 2aa4bb54caf2d622f1afdad09441695af2a83aa3fe8b8afa581d205e57ed4261c183c4d3877cee25794443fde5876417d859c108078ab788d6af7e4fe52eb66b - languageName: node - linkType: hard - -"ansi-styles@npm:^3.2.1": - version: 3.2.1 - resolution: "ansi-styles@npm:3.2.1" - dependencies: - color-convert: ^1.9.0 - checksum: d85ade01c10e5dd77b6c89f34ed7531da5830d2cb5882c645f330079975b716438cd7ebb81d0d6e6b4f9c577f19ae41ab55f07f19786b02f9dfd9e0377395665 - languageName: node - linkType: hard - -"ansi-styles@npm:^4.0.0, ansi-styles@npm:^4.1.0": - version: 4.3.0 - resolution: "ansi-styles@npm:4.3.0" - dependencies: - color-convert: ^2.0.1 - checksum: 513b44c3b2105dd14cc42a19271e80f386466c4be574bccf60b627432f9198571ebf4ab1e4c3ba17347658f4ee1711c163d574248c0c1cdc2d5917a0ad582ec4 - languageName: node - linkType: hard - -"argparse@npm:^1.0.7": - version: 1.0.10 - resolution: "argparse@npm:1.0.10" - dependencies: - sprintf-js: ~1.0.2 - checksum: 7ca6e45583a28de7258e39e13d81e925cfa25d7d4aacbf806a382d3c02fcb13403a07fb8aeef949f10a7cfe4a62da0e2e807b348a5980554cc28ee573ef95945 - languageName: node - linkType: hard - -"aria-query@npm:^4.2.2": - version: 4.2.2 - resolution: "aria-query@npm:4.2.2" - dependencies: - "@babel/runtime": ^7.10.2 - "@babel/runtime-corejs3": ^7.10.2 - checksum: 38401a9a400f26f3dcc24b84997461a16b32869a9893d323602bed8da40a8bcc0243b8d2880e942249a1496cea7a7de769e93d21c0baa439f01e1ee936fed665 - languageName: node - linkType: hard - -"array-includes@npm:^3.1.4, array-includes@npm:^3.1.5": - version: 3.1.5 - resolution: "array-includes@npm:3.1.5" - dependencies: - call-bind: ^1.0.2 - define-properties: ^1.1.4 - es-abstract: ^1.19.5 - get-intrinsic: ^1.1.1 - is-string: ^1.0.7 - checksum: f6f24d834179604656b7bec3e047251d5cc87e9e87fab7c175c61af48e80e75acd296017abcde21fb52292ab6a2a449ab2ee37213ee48c8709f004d75983f9c5 - languageName: node - linkType: hard - -"array-union@npm:^2.1.0": - version: 2.1.0 - resolution: "array-union@npm:2.1.0" - checksum: 5bee12395cba82da674931df6d0fea23c4aa4660cb3b338ced9f828782a65caa232573e6bf3968f23e0c5eb301764a382cef2f128b170a9dc59de0e36c39f98d - languageName: node - linkType: hard - -"array.prototype.flat@npm:^1.2.5": - version: 1.3.0 - resolution: "array.prototype.flat@npm:1.3.0" - dependencies: - call-bind: ^1.0.2 - define-properties: ^1.1.3 - es-abstract: ^1.19.2 - es-shim-unscopables: ^1.0.0 - checksum: 2a652b3e8dc0bebb6117e42a5ab5738af0203a14c27341d7bb2431467bdb4b348e2c5dc555dfcda8af0a5e4075c400b85311ded73861c87290a71a17c3e0a257 - languageName: node - linkType: hard - -"array.prototype.flatmap@npm:^1.3.0": - version: 1.3.0 - resolution: "array.prototype.flatmap@npm:1.3.0" - dependencies: - call-bind: ^1.0.2 - define-properties: ^1.1.3 - es-abstract: ^1.19.2 - es-shim-unscopables: ^1.0.0 - checksum: 818538f39409c4045d874be85df0dbd195e1446b14d22f95bdcfefea44ae77db44e42dcd89a559254ec5a7c8b338cfc986cc6d641e3472f9a5326b21eb2976a2 - languageName: node - linkType: hard - -"ast-types-flow@npm:^0.0.7": - version: 0.0.7 - resolution: "ast-types-flow@npm:0.0.7" - checksum: a26dcc2182ffee111cad7c471759b0bda22d3b7ebacf27c348b22c55f16896b18ab0a4d03b85b4020dce7f3e634b8f00b593888f622915096ea1927fa51866c4 - languageName: node - linkType: hard - -"astral-regex@npm:^2.0.0": - version: 2.0.0 - resolution: "astral-regex@npm:2.0.0" - checksum: 876231688c66400473ba505731df37ea436e574dd524520294cc3bbc54ea40334865e01fa0d074d74d036ee874ee7e62f486ea38bc421ee8e6a871c06f011766 - languageName: node - linkType: hard - -"axe-core@npm:^4.4.3": - version: 4.4.3 - resolution: "axe-core@npm:4.4.3" - checksum: c3ea000d9ace3ba0bc747c8feafc24b0de62a0f7d93021d0f77b19c73fca15341843510f6170da563d51535d6cfb7a46c5fc0ea36170549dbb44b170208450a2 - languageName: node - linkType: hard - -"axobject-query@npm:^2.2.0": - version: 2.2.0 - resolution: "axobject-query@npm:2.2.0" - checksum: 96b8c7d807ca525f41ad9b286186e2089b561ba63a6d36c3e7d73dc08150714660995c7ad19cda05784458446a0793b45246db45894631e13853f48c1aa3117f - languageName: node - linkType: hard - -"balanced-match@npm:^1.0.0": - version: 1.0.2 - resolution: "balanced-match@npm:1.0.2" - checksum: 9706c088a283058a8a99e0bf91b0a2f75497f185980d9ffa8b304de1d9e58ebda7c72c07ebf01dadedaac5b2907b2c6f566f660d62bd336c3468e960403b9d65 - languageName: node - linkType: hard - -"berry-patch@workspace:.": - version: 0.0.0-use.local - resolution: "berry-patch@workspace:." - dependencies: - eslint-config-custom: "*" - prettier: latest - turbo: latest - languageName: unknown - linkType: soft - -"brace-expansion@npm:^1.1.7": - version: 1.1.11 - resolution: "brace-expansion@npm:1.1.11" - dependencies: - balanced-match: ^1.0.0 - concat-map: 0.0.1 - checksum: faf34a7bb0c3fcf4b59c7808bc5d2a96a40988addf2e7e09dfbb67a2251800e0d14cd2bfc1aa79174f2f5095c54ff27f46fb1289fe2d77dac755b5eb3434cc07 - languageName: node - linkType: hard - -"braces@npm:^3.0.2": - version: 3.0.2 - resolution: "braces@npm:3.0.2" - dependencies: - fill-range: ^7.0.1 - checksum: e2a8e769a863f3d4ee887b5fe21f63193a891c68b612ddb4b68d82d1b5f3ff9073af066c343e9867a393fe4c2555dcb33e89b937195feb9c1613d259edfcd459 - languageName: node - linkType: hard - -"browserslist@npm:^4.21.3": - version: 4.21.4 - resolution: "browserslist@npm:4.21.4" - dependencies: - caniuse-lite: ^1.0.30001400 - electron-to-chromium: ^1.4.251 - node-releases: ^2.0.6 - update-browserslist-db: ^1.0.9 - bin: - browserslist: cli.js - checksum: 4af3793704dbb4615bcd29059ab472344dc7961c8680aa6c4bb84f05340e14038d06a5aead58724eae69455b8fade8b8c69f1638016e87e5578969d74c078b79 - languageName: node - linkType: hard - -"call-bind@npm:^1.0.0, call-bind@npm:^1.0.2": - version: 1.0.2 - resolution: "call-bind@npm:1.0.2" - dependencies: - function-bind: ^1.1.1 - get-intrinsic: ^1.0.2 - checksum: f8e31de9d19988a4b80f3e704788c4a2d6b6f3d17cfec4f57dc29ced450c53a49270dc66bf0fbd693329ee948dd33e6c90a329519aef17474a4d961e8d6426b0 - languageName: node - linkType: hard - -"callsites@npm:^3.0.0": - version: 3.1.0 - resolution: "callsites@npm:3.1.0" - checksum: 072d17b6abb459c2ba96598918b55868af677154bec7e73d222ef95a8fdb9bbf7dae96a8421085cdad8cd190d86653b5b6dc55a4484f2e5b2e27d5e0c3fc15b3 - languageName: node - linkType: hard - -"caniuse-lite@npm:^1.0.30001332, caniuse-lite@npm:^1.0.30001400": - version: 1.0.30001400 - resolution: "caniuse-lite@npm:1.0.30001400" - checksum: 984e29d3c02fd02a59cc92ef4a5e9390fce250de3791056362347cf901f0d91041246961a57cfa8fed800538d03ee341bc4f7eaed19bf7be0ef8a181d94cd848 - languageName: node - linkType: hard - -"chalk@npm:^2.0.0": - version: 2.4.2 - resolution: "chalk@npm:2.4.2" - dependencies: - ansi-styles: ^3.2.1 - escape-string-regexp: ^1.0.5 - supports-color: ^5.3.0 - checksum: ec3661d38fe77f681200f878edbd9448821924e0f93a9cefc0e26a33b145f1027a2084bf19967160d11e1f03bfe4eaffcabf5493b89098b2782c3fe0b03d80c2 - languageName: node - linkType: hard - -"chalk@npm:^4.0.0": - version: 4.1.2 - resolution: "chalk@npm:4.1.2" - dependencies: - ansi-styles: ^4.1.0 - supports-color: ^7.1.0 - checksum: fe75c9d5c76a7a98d45495b91b2172fa3b7a09e0cc9370e5c8feb1c567b85c4288e2b3fded7cfdd7359ac28d6b3844feb8b82b8686842e93d23c827c417e83fc - languageName: node - linkType: hard - -"color-convert@npm:^1.9.0": - version: 1.9.3 - resolution: "color-convert@npm:1.9.3" - dependencies: - color-name: 1.1.3 - checksum: fd7a64a17cde98fb923b1dd05c5f2e6f7aefda1b60d67e8d449f9328b4e53b228a428fd38bfeaeb2db2ff6b6503a776a996150b80cdf224062af08a5c8a3a203 - languageName: node - linkType: hard - -"color-convert@npm:^2.0.1": - version: 2.0.1 - resolution: "color-convert@npm:2.0.1" - dependencies: - color-name: ~1.1.4 - checksum: 79e6bdb9fd479a205c71d89574fccfb22bd9053bd98c6c4d870d65c132e5e904e6034978e55b43d69fcaa7433af2016ee203ce76eeba9cfa554b373e7f7db336 - languageName: node - linkType: hard - -"color-name@npm:1.1.3": - version: 1.1.3 - resolution: "color-name@npm:1.1.3" - checksum: 09c5d3e33d2105850153b14466501f2bfb30324a2f76568a408763a3b7433b0e50e5b4ab1947868e65cb101bb7cb75029553f2c333b6d4b8138a73fcc133d69d - languageName: node - linkType: hard - -"color-name@npm:~1.1.4": - version: 1.1.4 - resolution: "color-name@npm:1.1.4" - checksum: b0445859521eb4021cd0fb0cc1a75cecf67fceecae89b63f62b201cca8d345baf8b952c966862a9d9a2632987d4f6581f0ec8d957dfacece86f0a7919316f610 - languageName: node - linkType: hard - -"concat-map@npm:0.0.1": - version: 0.0.1 - resolution: "concat-map@npm:0.0.1" - checksum: 902a9f5d8967a3e2faf138d5cb784b9979bad2e6db5357c5b21c568df4ebe62bcb15108af1b2253744844eb964fc023fbd9afbbbb6ddd0bcc204c6fb5b7bf3af - languageName: node - linkType: hard - -"convert-source-map@npm:^1.7.0": - version: 1.8.0 - resolution: "convert-source-map@npm:1.8.0" - dependencies: - safe-buffer: ~5.1.1 - checksum: 985d974a2d33e1a2543ada51c93e1ba2f73eaed608dc39f229afc78f71dcc4c8b7d7c684aa647e3c6a3a204027444d69e53e169ce94e8d1fa8d7dee80c9c8fed - languageName: node - linkType: hard - -"core-js-pure@npm:^3.25.1": - version: 3.25.1 - resolution: "core-js-pure@npm:3.25.1" - checksum: 0123131ec7ab3a1e56f0b4df4ae659de03d9c245ce281637d4d0f18f9839d8e0cfbfa989bd577ce1b67826f889a7dcc734421f697cf1bbe59f605f29c537a678 - languageName: node - linkType: hard - -"cross-spawn@npm:^7.0.2": - version: 7.0.3 - resolution: "cross-spawn@npm:7.0.3" - dependencies: - path-key: ^3.1.0 - shebang-command: ^2.0.0 - which: ^2.0.1 - checksum: 671cc7c7288c3a8406f3c69a3ae2fc85555c04169e9d611def9a675635472614f1c0ed0ef80955d5b6d4e724f6ced67f0ad1bb006c2ea643488fcfef994d7f52 - languageName: node - linkType: hard - -"csstype@npm:^3.0.2": - version: 3.1.1 - resolution: "csstype@npm:3.1.1" - checksum: 1f7b4f5fdd955b7444b18ebdddf3f5c699159f13e9cf8ac9027ae4a60ae226aef9bbb14a6e12ca7dba3358b007cee6354b116e720262867c398de6c955ea451d - languageName: node - linkType: hard - -"damerau-levenshtein@npm:^1.0.8": - version: 1.0.8 - resolution: "damerau-levenshtein@npm:1.0.8" - checksum: d240b7757544460ae0586a341a53110ab0a61126570ef2d8c731e3eab3f0cb6e488e2609e6a69b46727635de49be20b071688698744417ff1b6c1d7ccd03e0de - languageName: node - linkType: hard - -"debug@npm:^2.6.9": - version: 2.6.9 - resolution: "debug@npm:2.6.9" - dependencies: - ms: 2.0.0 - checksum: d2f51589ca66df60bf36e1fa6e4386b318c3f1e06772280eea5b1ae9fd3d05e9c2b7fd8a7d862457d00853c75b00451aa2d7459b924629ee385287a650f58fe6 - languageName: node - linkType: hard - -"debug@npm:^3.2.7": - version: 3.2.7 - resolution: "debug@npm:3.2.7" - dependencies: - ms: ^2.1.1 - checksum: b3d8c5940799914d30314b7c3304a43305fd0715581a919dacb8b3176d024a782062368405b47491516d2091d6462d4d11f2f4974a405048094f8bfebfa3071c - languageName: node - linkType: hard - -"debug@npm:^4.0.1, debug@npm:^4.1.0, debug@npm:^4.1.1, debug@npm:^4.3.4": - version: 4.3.4 - resolution: "debug@npm:4.3.4" - dependencies: - ms: 2.1.2 - peerDependenciesMeta: - supports-color: - optional: true - checksum: 3dbad3f94ea64f34431a9cbf0bafb61853eda57bff2880036153438f50fb5a84f27683ba0d8e5426bf41a8c6ff03879488120cf5b3a761e77953169c0600a708 - languageName: node - linkType: hard - -"deep-is@npm:^0.1.3": - version: 0.1.4 - resolution: "deep-is@npm:0.1.4" - checksum: edb65dd0d7d1b9c40b2f50219aef30e116cedd6fc79290e740972c132c09106d2e80aa0bc8826673dd5a00222d4179c84b36a790eef63a4c4bca75a37ef90804 - languageName: node - linkType: hard - -"define-properties@npm:^1.1.3, define-properties@npm:^1.1.4": - version: 1.1.4 - resolution: "define-properties@npm:1.1.4" - dependencies: - has-property-descriptors: ^1.0.0 - object-keys: ^1.1.1 - checksum: ce0aef3f9eb193562b5cfb79b2d2c86b6a109dfc9fdcb5f45d680631a1a908c06824ddcdb72b7573b54e26ace07f0a23420aaba0d5c627b34d2c1de8ef527e2b - languageName: node - linkType: hard - -"dir-glob@npm:^3.0.1": - version: 3.0.1 - resolution: "dir-glob@npm:3.0.1" - dependencies: - path-type: ^4.0.0 - checksum: fa05e18324510d7283f55862f3161c6759a3f2f8dbce491a2fc14c8324c498286c54282c1f0e933cb930da8419b30679389499b919122952a4f8592362ef4615 - languageName: node - linkType: hard - -"docs@workspace:apps/docs": - version: 0.0.0-use.local - resolution: "docs@workspace:apps/docs" - dependencies: - "@babel/core": ^7.0.0 - "@types/node": ^17.0.12 - "@types/react": 18.0.17 - eslint: 7.32.0 - eslint-config-custom: "*" - lodash: ^4.17.21 - next: 12.2.5 - next-transpile-modules: 9.0.0 - react: 18.2.0 - react-dom: 18.2.0 - tsconfig: "*" - typescript: ^4.5.3 - ui: "*" - languageName: unknown - linkType: soft - -"doctrine@npm:^2.1.0": - version: 2.1.0 - resolution: "doctrine@npm:2.1.0" - dependencies: - esutils: ^2.0.2 - checksum: a45e277f7feaed309fe658ace1ff286c6e2002ac515af0aaf37145b8baa96e49899638c7cd47dccf84c3d32abfc113246625b3ac8f552d1046072adee13b0dc8 - languageName: node - linkType: hard - -"doctrine@npm:^3.0.0": - version: 3.0.0 - resolution: "doctrine@npm:3.0.0" - dependencies: - esutils: ^2.0.2 - checksum: fd7673ca77fe26cd5cba38d816bc72d641f500f1f9b25b83e8ce28827fe2da7ad583a8da26ab6af85f834138cf8dae9f69b0cd6ab925f52ddab1754db44d99ce - languageName: node - linkType: hard - -"electron-to-chromium@npm:^1.4.251": - version: 1.4.251 - resolution: "electron-to-chromium@npm:1.4.251" - checksum: 470a04dfe1d34814f8bc7e1dde606851b6f787a6d78655a57df063844fc71feb64ce793c52a3a130ceac1fc368b8d3e25a4c55c847a1e9c02c3090f9dcbf40ac - languageName: node - linkType: hard - -"emoji-regex@npm:^8.0.0": - version: 8.0.0 - resolution: "emoji-regex@npm:8.0.0" - checksum: d4c5c39d5a9868b5fa152f00cada8a936868fd3367f33f71be515ecee4c803132d11b31a6222b2571b1e5f7e13890156a94880345594d0ce7e3c9895f560f192 - languageName: node - linkType: hard - -"emoji-regex@npm:^9.2.2": - version: 9.2.2 - resolution: "emoji-regex@npm:9.2.2" - checksum: 8487182da74aabd810ac6d6f1994111dfc0e331b01271ae01ec1eb0ad7b5ecc2bbbbd2f053c05cb55a1ac30449527d819bbfbf0e3de1023db308cbcb47f86601 - languageName: node - linkType: hard - -"enhanced-resolve@npm:^5.7.0": - version: 5.10.0 - resolution: "enhanced-resolve@npm:5.10.0" - dependencies: - graceful-fs: ^4.2.4 - tapable: ^2.2.0 - checksum: 0bb9830704db271610f900e8d79d70a740ea16f251263362b0c91af545576d09fe50103496606c1300a05e588372d6f9780a9bc2e30ce8ef9b827ec8f44687ff - languageName: node - linkType: hard - -"enquirer@npm:^2.3.5": - version: 2.3.6 - resolution: "enquirer@npm:2.3.6" - dependencies: - ansi-colors: ^4.1.1 - checksum: 1c0911e14a6f8d26721c91e01db06092a5f7675159f0261d69c403396a385afd13dd76825e7678f66daffa930cfaa8d45f506fb35f818a2788463d022af1b884 - languageName: node - linkType: hard - -"es-abstract@npm:^1.19.0, es-abstract@npm:^1.19.1, es-abstract@npm:^1.19.2, es-abstract@npm:^1.19.5": - version: 1.20.2 - resolution: "es-abstract@npm:1.20.2" - dependencies: - call-bind: ^1.0.2 - es-to-primitive: ^1.2.1 - function-bind: ^1.1.1 - function.prototype.name: ^1.1.5 - get-intrinsic: ^1.1.2 - get-symbol-description: ^1.0.0 - has: ^1.0.3 - has-property-descriptors: ^1.0.0 - has-symbols: ^1.0.3 - internal-slot: ^1.0.3 - is-callable: ^1.2.4 - is-negative-zero: ^2.0.2 - is-regex: ^1.1.4 - is-shared-array-buffer: ^1.0.2 - is-string: ^1.0.7 - is-weakref: ^1.0.2 - object-inspect: ^1.12.2 - object-keys: ^1.1.1 - object.assign: ^4.1.4 - regexp.prototype.flags: ^1.4.3 - string.prototype.trimend: ^1.0.5 - string.prototype.trimstart: ^1.0.5 - unbox-primitive: ^1.0.2 - checksum: ab893dd1f849250f5a2da82656b4e21b511f76429b25a4aea5c8b2a3007ff01cb8e112987d0dd7693b9ad9e6399f8f7be133285d6196a5ebd1b13a4ee2258f70 - languageName: node - linkType: hard - -"es-shim-unscopables@npm:^1.0.0": - version: 1.0.0 - resolution: "es-shim-unscopables@npm:1.0.0" - dependencies: - has: ^1.0.3 - checksum: 83e95cadbb6ee44d3644dfad60dcad7929edbc42c85e66c3e99aefd68a3a5c5665f2686885cddb47dfeabfd77bd5ea5a7060f2092a955a729bbd8834f0d86fa1 - languageName: node - linkType: hard - -"es-to-primitive@npm:^1.2.1": - version: 1.2.1 - resolution: "es-to-primitive@npm:1.2.1" - dependencies: - is-callable: ^1.1.4 - is-date-object: ^1.0.1 - is-symbol: ^1.0.2 - checksum: 4ead6671a2c1402619bdd77f3503991232ca15e17e46222b0a41a5d81aebc8740a77822f5b3c965008e631153e9ef0580540007744521e72de8e33599fca2eed - languageName: node - linkType: hard - -"escalade@npm:^3.1.1": - version: 3.1.1 - resolution: "escalade@npm:3.1.1" - checksum: a3e2a99f07acb74b3ad4989c48ca0c3140f69f923e56d0cba0526240ee470b91010f9d39001f2a4a313841d237ede70a729e92125191ba5d21e74b106800b133 - languageName: node - linkType: hard - -"escape-string-regexp@npm:^1.0.5": - version: 1.0.5 - resolution: "escape-string-regexp@npm:1.0.5" - checksum: 6092fda75c63b110c706b6a9bfde8a612ad595b628f0bd2147eea1d3406723020810e591effc7db1da91d80a71a737a313567c5abb3813e8d9c71f4aa595b410 - languageName: node - linkType: hard - -"escape-string-regexp@npm:^4.0.0": - version: 4.0.0 - resolution: "escape-string-regexp@npm:4.0.0" - checksum: 98b48897d93060f2322108bf29db0feba7dd774be96cd069458d1453347b25ce8682ecc39859d4bca2203cc0ab19c237bcc71755eff49a0f8d90beadeeba5cc5 - languageName: node - linkType: hard - -"eslint-config-custom@*, eslint-config-custom@workspace:packages/eslint-config-custom": - version: 0.0.0-use.local - resolution: "eslint-config-custom@workspace:packages/eslint-config-custom" - dependencies: - eslint: ^7.23.0 - eslint-config-next: ^12.0.8 - eslint-config-prettier: ^8.3.0 - eslint-config-turbo: latest - eslint-plugin-react: 7.31.7 - typescript: ^4.7.4 - languageName: unknown - linkType: soft - -"eslint-config-next@npm:^12.0.8": - version: 12.3.0 - resolution: "eslint-config-next@npm:12.3.0" - dependencies: - "@next/eslint-plugin-next": 12.3.0 - "@rushstack/eslint-patch": ^1.1.3 - "@typescript-eslint/parser": ^5.21.0 - eslint-import-resolver-node: ^0.3.6 - eslint-import-resolver-typescript: ^2.7.1 - eslint-plugin-import: ^2.26.0 - eslint-plugin-jsx-a11y: ^6.5.1 - eslint-plugin-react: ^7.29.4 - eslint-plugin-react-hooks: ^4.5.0 - peerDependencies: - eslint: ^7.23.0 || ^8.0.0 - typescript: ">=3.3.1" - peerDependenciesMeta: - typescript: - optional: true - checksum: 50a2e43c515350c689cd848973b953c1d058303b84e05ecba5b5bf0f8feffe3935011de3b574ba35d48de8a5d7d5c42567d21d1a17f02189a701edeb6d76a8e0 - languageName: node - linkType: hard - -"eslint-config-prettier@npm:^8.3.0": - version: 8.5.0 - resolution: "eslint-config-prettier@npm:8.5.0" - peerDependencies: - eslint: ">=7.0.0" - bin: - eslint-config-prettier: bin/cli.js - checksum: 0d0f5c32e7a0ad91249467ce71ca92394ccd343178277d318baf32063b79ea90216f4c81d1065d60f96366fdc60f151d4d68ae7811a58bd37228b84c2083f893 - languageName: node - linkType: hard - -eslint-config-turbo@latest: - version: 0.0.3 - resolution: "eslint-config-turbo@npm:0.0.3" - dependencies: - eslint-plugin-turbo: 0.0.3 - peerDependencies: - eslint: ^7.23.0 || ^8.0.0 - checksum: c92255e91dd0865faeebc857eb3a862e8ca2ccb37fc54ffce93b73cd41e95ad456826ae6634772450dfa9c705b67c288f476e8e413fab3d8194dc271754528e2 - languageName: node - linkType: hard - -"eslint-import-resolver-node@npm:^0.3.6": - version: 0.3.6 - resolution: "eslint-import-resolver-node@npm:0.3.6" - dependencies: - debug: ^3.2.7 - resolve: ^1.20.0 - checksum: 6266733af1e112970e855a5bcc2d2058fb5ae16ad2a6d400705a86b29552b36131ffc5581b744c23d550de844206fb55e9193691619ee4dbf225c4bde526b1c8 - languageName: node - linkType: hard - -"eslint-import-resolver-typescript@npm:^2.7.1": - version: 2.7.1 - resolution: "eslint-import-resolver-typescript@npm:2.7.1" - dependencies: - debug: ^4.3.4 - glob: ^7.2.0 - is-glob: ^4.0.3 - resolve: ^1.22.0 - tsconfig-paths: ^3.14.1 - peerDependencies: - eslint: "*" - eslint-plugin-import: "*" - checksum: 1d81b657b1f73bf95b8f0b745c0305574b91630c1db340318f3ca8918e206fce20a933b95e7c419338cc4452cb80bb2b2d92acaf01b6aa315c78a332d832545c - languageName: node - linkType: hard - -"eslint-module-utils@npm:^2.7.3": - version: 2.7.4 - resolution: "eslint-module-utils@npm:2.7.4" - dependencies: - debug: ^3.2.7 - dependenciesMeta: - debug@4.3.4: - unplugged: true - peerDependenciesMeta: - eslint: - optional: true - checksum: 5da13645daff145a5c922896b258f8bba560722c3767254e458d894ff5fbb505d6dfd945bffa932a5b0ae06714da2379bd41011c4c20d2d59cc83e23895360f7 - languageName: node - linkType: hard - -"eslint-plugin-import@npm:^2.26.0": - version: 2.26.0 - resolution: "eslint-plugin-import@npm:2.26.0" - dependencies: - array-includes: ^3.1.4 - array.prototype.flat: ^1.2.5 - debug: ^2.6.9 - doctrine: ^2.1.0 - eslint-import-resolver-node: ^0.3.6 - eslint-module-utils: ^2.7.3 - has: ^1.0.3 - is-core-module: ^2.8.1 - is-glob: ^4.0.3 - minimatch: ^3.1.2 - object.values: ^1.1.5 - resolve: ^1.22.0 - tsconfig-paths: ^3.14.1 - peerDependencies: - eslint: ^2 || ^3 || ^4 || ^5 || ^6 || ^7.2.0 || ^8 - checksum: 0bf77ad80339554481eafa2b1967449e1f816b94c7a6f9614ce33fb4083c4e6c050f10d241dd50b4975d47922880a34de1e42ea9d8e6fd663ebb768baa67e655 - languageName: node - linkType: hard - -"eslint-plugin-jsx-a11y@npm:^6.5.1": - version: 6.6.1 - resolution: "eslint-plugin-jsx-a11y@npm:6.6.1" - dependencies: - "@babel/runtime": ^7.18.9 - aria-query: ^4.2.2 - array-includes: ^3.1.5 - ast-types-flow: ^0.0.7 - axe-core: ^4.4.3 - axobject-query: ^2.2.0 - damerau-levenshtein: ^1.0.8 - emoji-regex: ^9.2.2 - has: ^1.0.3 - jsx-ast-utils: ^3.3.2 - language-tags: ^1.0.5 - minimatch: ^3.1.2 - semver: ^6.3.0 - peerDependencies: - eslint: ^3 || ^4 || ^5 || ^6 || ^7 || ^8 - checksum: baae7377f0e25a0cc9b34dc333a3dc6ead9ee8365e445451eff554c3ca267a0a6cb88127fe90395c578ab1b92cfed246aef7dc8d2b48b603389e10181799e144 - languageName: node - linkType: hard - -"eslint-plugin-react-hooks@npm:^4.5.0": - version: 4.6.0 - resolution: "eslint-plugin-react-hooks@npm:4.6.0" - peerDependencies: - eslint: ^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0 || ^8.0.0-0 - checksum: 23001801f14c1d16bf0a837ca7970d9dd94e7b560384b41db378b49b6e32dc43d6e2790de1bd737a652a86f81a08d6a91f402525061b47719328f586a57e86c3 - languageName: node - linkType: hard - -"eslint-plugin-react@npm:7.31.7": - version: 7.31.7 - resolution: "eslint-plugin-react@npm:7.31.7" - dependencies: - array-includes: ^3.1.5 - array.prototype.flatmap: ^1.3.0 - doctrine: ^2.1.0 - estraverse: ^5.3.0 - jsx-ast-utils: ^2.4.1 || ^3.0.0 - minimatch: ^3.1.2 - object.entries: ^1.1.5 - object.fromentries: ^2.0.5 - object.hasown: ^1.1.1 - object.values: ^1.1.5 - prop-types: ^15.8.1 - resolve: ^2.0.0-next.3 - semver: ^6.3.0 - string.prototype.matchall: ^4.0.7 - peerDependencies: - eslint: ^3 || ^4 || ^5 || ^6 || ^7 || ^8 - checksum: 582d422f531d7d3894fc09ac941ef8b6ad595782cfca5e1d52af5895ce117def7a0ff8afeea0166bff7b6ceae8baec2313614b1571754f539575cfa9351cd2da - languageName: node - linkType: hard - -"eslint-plugin-react@npm:^7.29.4": - version: 7.31.8 - resolution: "eslint-plugin-react@npm:7.31.8" - dependencies: - array-includes: ^3.1.5 - array.prototype.flatmap: ^1.3.0 - doctrine: ^2.1.0 - estraverse: ^5.3.0 - jsx-ast-utils: ^2.4.1 || ^3.0.0 - minimatch: ^3.1.2 - object.entries: ^1.1.5 - object.fromentries: ^2.0.5 - object.hasown: ^1.1.1 - object.values: ^1.1.5 - prop-types: ^15.8.1 - resolve: ^2.0.0-next.3 - semver: ^6.3.0 - string.prototype.matchall: ^4.0.7 - peerDependencies: - eslint: ^3 || ^4 || ^5 || ^6 || ^7 || ^8 - checksum: 0683e2a624a4df6f08264a3f6bc614a81e8f961c83173bdf2d8d3523f84ed5d234cddc976dbc6815913e007c5984df742ba61be0c0592b27c3daabe0f68165a3 - languageName: node - linkType: hard - -"eslint-plugin-turbo@npm:0.0.3": - version: 0.0.3 - resolution: "eslint-plugin-turbo@npm:0.0.3" - peerDependencies: - eslint: ^7.23.0 || ^8.0.0 - checksum: 18e2b13ede03eee7635d0c67ca792cf46483e90443143bdc06555bf231045fb5f70b2f6f1d67492365b7fe47620408eea22f7548879f3afcb07ccc070aec5c15 - languageName: node - linkType: hard - -"eslint-scope@npm:^5.1.1": - version: 5.1.1 - resolution: "eslint-scope@npm:5.1.1" - dependencies: - esrecurse: ^4.3.0 - estraverse: ^4.1.1 - checksum: 47e4b6a3f0cc29c7feedee6c67b225a2da7e155802c6ea13bbef4ac6b9e10c66cd2dcb987867ef176292bf4e64eccc680a49e35e9e9c669f4a02bac17e86abdb - languageName: node - linkType: hard - -"eslint-utils@npm:^2.1.0": - version: 2.1.0 - resolution: "eslint-utils@npm:2.1.0" - dependencies: - eslint-visitor-keys: ^1.1.0 - checksum: 27500938f348da42100d9e6ad03ae29b3de19ba757ae1a7f4a087bdcf83ac60949bbb54286492ca61fac1f5f3ac8692dd21537ce6214240bf95ad0122f24d71d - languageName: node - linkType: hard - -"eslint-visitor-keys@npm:^1.1.0, eslint-visitor-keys@npm:^1.3.0": - version: 1.3.0 - resolution: "eslint-visitor-keys@npm:1.3.0" - checksum: 37a19b712f42f4c9027e8ba98c2b06031c17e0c0a4c696cd429bd9ee04eb43889c446f2cd545e1ff51bef9593fcec94ecd2c2ef89129fcbbf3adadbef520376a - languageName: node - linkType: hard - -"eslint-visitor-keys@npm:^2.0.0": - version: 2.1.0 - resolution: "eslint-visitor-keys@npm:2.1.0" - checksum: e3081d7dd2611a35f0388bbdc2f5da60b3a3c5b8b6e928daffff7391146b434d691577aa95064c8b7faad0b8a680266bcda0a42439c18c717b80e6718d7e267d - languageName: node - linkType: hard - -"eslint-visitor-keys@npm:^3.3.0": - version: 3.3.0 - resolution: "eslint-visitor-keys@npm:3.3.0" - checksum: d59e68a7c5a6d0146526b0eec16ce87fbf97fe46b8281e0d41384224375c4e52f5ffb9e16d48f4ea50785cde93f766b0c898e31ab89978d88b0e1720fbfb7808 - languageName: node - linkType: hard - -"eslint@npm:7.32.0, eslint@npm:^7.23.0, eslint@npm:^7.32.0": - version: 7.32.0 - resolution: "eslint@npm:7.32.0" - dependencies: - "@babel/code-frame": 7.12.11 - "@eslint/eslintrc": ^0.4.3 - "@humanwhocodes/config-array": ^0.5.0 - ajv: ^6.10.0 - chalk: ^4.0.0 - cross-spawn: ^7.0.2 - debug: ^4.0.1 - doctrine: ^3.0.0 - enquirer: ^2.3.5 - escape-string-regexp: ^4.0.0 - eslint-scope: ^5.1.1 - eslint-utils: ^2.1.0 - eslint-visitor-keys: ^2.0.0 - espree: ^7.3.1 - esquery: ^1.4.0 - esutils: ^2.0.2 - fast-deep-equal: ^3.1.3 - file-entry-cache: ^6.0.1 - functional-red-black-tree: ^1.0.1 - glob-parent: ^5.1.2 - globals: ^13.6.0 - ignore: ^4.0.6 - import-fresh: ^3.0.0 - imurmurhash: ^0.1.4 - is-glob: ^4.0.0 - js-yaml: ^3.13.1 - json-stable-stringify-without-jsonify: ^1.0.1 - levn: ^0.4.1 - lodash.merge: ^4.6.2 - minimatch: ^3.0.4 - natural-compare: ^1.4.0 - optionator: ^0.9.1 - progress: ^2.0.0 - regexpp: ^3.1.0 - semver: ^7.2.1 - strip-ansi: ^6.0.0 - strip-json-comments: ^3.1.0 - table: ^6.0.9 - text-table: ^0.2.0 - v8-compile-cache: ^2.0.3 - bin: - eslint: bin/eslint.js - checksum: cc85af9985a3a11085c011f3d27abe8111006d34cc274291b3c4d7bea51a4e2ff6135780249becd919ba7f6d6d1ecc38a6b73dacb6a7be08d38453b344dc8d37 - languageName: node - linkType: hard - -"espree@npm:^7.3.0, espree@npm:^7.3.1": - version: 7.3.1 - resolution: "espree@npm:7.3.1" - dependencies: - acorn: ^7.4.0 - acorn-jsx: ^5.3.1 - eslint-visitor-keys: ^1.3.0 - checksum: aa9b50dcce883449af2e23bc2b8d9abb77118f96f4cb313935d6b220f77137eaef7724a83c3f6243b96bc0e4ab14766198e60818caad99f9519ae5a336a39b45 - languageName: node - linkType: hard - -"esprima@npm:^4.0.0": - version: 4.0.1 - resolution: "esprima@npm:4.0.1" - bin: - esparse: ./bin/esparse.js - esvalidate: ./bin/esvalidate.js - checksum: b45bc805a613dbea2835278c306b91aff6173c8d034223fa81498c77dcbce3b2931bf6006db816f62eacd9fd4ea975dfd85a5b7f3c6402cfd050d4ca3c13a628 - languageName: node - linkType: hard - -"esquery@npm:^1.4.0": - version: 1.4.0 - resolution: "esquery@npm:1.4.0" - dependencies: - estraverse: ^5.1.0 - checksum: a0807e17abd7fbe5fbd4fab673038d6d8a50675cdae6b04fbaa520c34581be0c5fa24582990e8acd8854f671dd291c78bb2efb9e0ed5b62f33bac4f9cf820210 - languageName: node - linkType: hard - -"esrecurse@npm:^4.3.0": - version: 4.3.0 - resolution: "esrecurse@npm:4.3.0" - dependencies: - estraverse: ^5.2.0 - checksum: ebc17b1a33c51cef46fdc28b958994b1dc43cd2e86237515cbc3b4e5d2be6a811b2315d0a1a4d9d340b6d2308b15322f5c8291059521cc5f4802f65e7ec32837 - languageName: node - linkType: hard - -"estraverse@npm:^4.1.1": - version: 4.3.0 - resolution: "estraverse@npm:4.3.0" - checksum: a6299491f9940bb246124a8d44b7b7a413a8336f5436f9837aaa9330209bd9ee8af7e91a654a3545aee9c54b3308e78ee360cef1d777d37cfef77d2fa33b5827 - languageName: node - linkType: hard - -"estraverse@npm:^5.1.0, estraverse@npm:^5.2.0, estraverse@npm:^5.3.0": - version: 5.3.0 - resolution: "estraverse@npm:5.3.0" - checksum: 072780882dc8416ad144f8fe199628d2b3e7bbc9989d9ed43795d2c90309a2047e6bc5979d7e2322a341163d22cfad9e21f4110597fe487519697389497e4e2b - languageName: node - linkType: hard - -"esutils@npm:^2.0.2": - version: 2.0.3 - resolution: "esutils@npm:2.0.3" - checksum: 22b5b08f74737379a840b8ed2036a5fb35826c709ab000683b092d9054e5c2a82c27818f12604bfc2a9a76b90b6834ef081edbc1c7ae30d1627012e067c6ec87 - languageName: node - linkType: hard - -"fast-deep-equal@npm:^3.1.1, fast-deep-equal@npm:^3.1.3": - version: 3.1.3 - resolution: "fast-deep-equal@npm:3.1.3" - checksum: e21a9d8d84f53493b6aa15efc9cfd53dd5b714a1f23f67fb5dc8f574af80df889b3bce25dc081887c6d25457cce704e636395333abad896ccdec03abaf1f3f9d - languageName: node - linkType: hard - -"fast-glob@npm:^3.2.9": - version: 3.2.12 - resolution: "fast-glob@npm:3.2.12" - dependencies: - "@nodelib/fs.stat": ^2.0.2 - "@nodelib/fs.walk": ^1.2.3 - glob-parent: ^5.1.2 - merge2: ^1.3.0 - micromatch: ^4.0.4 - checksum: 0b1990f6ce831c7e28c4d505edcdaad8e27e88ab9fa65eedadb730438cfc7cde4910d6c975d6b7b8dc8a73da4773702ebcfcd6e3518e73938bb1383badfe01c2 - languageName: node - linkType: hard - -"fast-json-stable-stringify@npm:^2.0.0": - version: 2.1.0 - resolution: "fast-json-stable-stringify@npm:2.1.0" - checksum: b191531e36c607977e5b1c47811158733c34ccb3bfde92c44798929e9b4154884378536d26ad90dfecd32e1ffc09c545d23535ad91b3161a27ddbb8ebe0cbecb - languageName: node - linkType: hard - -"fast-levenshtein@npm:^2.0.6": - version: 2.0.6 - resolution: "fast-levenshtein@npm:2.0.6" - checksum: 92cfec0a8dfafd9c7a15fba8f2cc29cd0b62b85f056d99ce448bbcd9f708e18ab2764bda4dd5158364f4145a7c72788538994f0d1787b956ef0d1062b0f7c24c - languageName: node - linkType: hard - -"fastq@npm:^1.6.0": - version: 1.13.0 - resolution: "fastq@npm:1.13.0" - dependencies: - reusify: ^1.0.4 - checksum: 32cf15c29afe622af187d12fc9cd93e160a0cb7c31a3bb6ace86b7dea3b28e7b72acde89c882663f307b2184e14782c6c664fa315973c03626c7d4bff070bb0b - languageName: node - linkType: hard - -"file-entry-cache@npm:^6.0.1": - version: 6.0.1 - resolution: "file-entry-cache@npm:6.0.1" - dependencies: - flat-cache: ^3.0.4 - checksum: f49701feaa6314c8127c3c2f6173cfefff17612f5ed2daaafc6da13b5c91fd43e3b2a58fd0d63f9f94478a501b167615931e7200e31485e320f74a33885a9c74 - languageName: node - linkType: hard - -"fill-range@npm:^7.0.1": - version: 7.0.1 - resolution: "fill-range@npm:7.0.1" - dependencies: - to-regex-range: ^5.0.1 - checksum: cc283f4e65b504259e64fd969bcf4def4eb08d85565e906b7d36516e87819db52029a76b6363d0f02d0d532f0033c9603b9e2d943d56ee3b0d4f7ad3328ff917 - languageName: node - linkType: hard - -"flat-cache@npm:^3.0.4": - version: 3.0.4 - resolution: "flat-cache@npm:3.0.4" - dependencies: - flatted: ^3.1.0 - rimraf: ^3.0.2 - checksum: 4fdd10ecbcbf7d520f9040dd1340eb5dfe951e6f0ecf2252edeec03ee68d989ec8b9a20f4434270e71bcfd57800dc09b3344fca3966b2eb8f613072c7d9a2365 - languageName: node - linkType: hard - -"flatted@npm:^3.1.0": - version: 3.2.7 - resolution: "flatted@npm:3.2.7" - checksum: 427633049d55bdb80201c68f7eb1cbd533e03eac541f97d3aecab8c5526f12a20ccecaeede08b57503e772c769e7f8680b37e8d482d1e5f8d7e2194687f9ea35 - languageName: node - linkType: hard - -"fs.realpath@npm:^1.0.0": - version: 1.0.0 - resolution: "fs.realpath@npm:1.0.0" - checksum: 99ddea01a7e75aa276c250a04eedeffe5662bce66c65c07164ad6264f9de18fb21be9433ead460e54cff20e31721c811f4fb5d70591799df5f85dce6d6746fd0 - languageName: node - linkType: hard - -"function-bind@npm:^1.1.1": - version: 1.1.1 - resolution: "function-bind@npm:1.1.1" - checksum: b32fbaebb3f8ec4969f033073b43f5c8befbb58f1a79e12f1d7490358150359ebd92f49e72ff0144f65f2c48ea2a605bff2d07965f548f6474fd8efd95bf361a - languageName: node - linkType: hard - -"function.prototype.name@npm:^1.1.5": - version: 1.1.5 - resolution: "function.prototype.name@npm:1.1.5" - dependencies: - call-bind: ^1.0.2 - define-properties: ^1.1.3 - es-abstract: ^1.19.0 - functions-have-names: ^1.2.2 - checksum: acd21d733a9b649c2c442f067567743214af5fa248dbeee69d8278ce7df3329ea5abac572be9f7470b4ec1cd4d8f1040e3c5caccf98ebf2bf861a0deab735c27 - languageName: node - linkType: hard - -"functional-red-black-tree@npm:^1.0.1": - version: 1.0.1 - resolution: "functional-red-black-tree@npm:1.0.1" - checksum: ca6c170f37640e2d94297da8bb4bf27a1d12bea3e00e6a3e007fd7aa32e37e000f5772acf941b4e4f3cf1c95c3752033d0c509af157ad8f526e7f00723b9eb9f - languageName: node - linkType: hard - -"functions-have-names@npm:^1.2.2": - version: 1.2.3 - resolution: "functions-have-names@npm:1.2.3" - checksum: c3f1f5ba20f4e962efb71344ce0a40722163e85bee2101ce25f88214e78182d2d2476aa85ef37950c579eb6cf6ee811c17b3101bb84004bb75655f3e33f3fdb5 - languageName: node - linkType: hard - -"gensync@npm:^1.0.0-beta.2": - version: 1.0.0-beta.2 - resolution: "gensync@npm:1.0.0-beta.2" - checksum: a7437e58c6be12aa6c90f7730eac7fa9833dc78872b4ad2963d2031b00a3367a93f98aec75f9aaac7220848e4026d67a8655e870b24f20a543d103c0d65952ec - languageName: node - linkType: hard - -"get-intrinsic@npm:^1.0.2, get-intrinsic@npm:^1.1.0, get-intrinsic@npm:^1.1.1, get-intrinsic@npm:^1.1.2": - version: 1.1.3 - resolution: "get-intrinsic@npm:1.1.3" - dependencies: - function-bind: ^1.1.1 - has: ^1.0.3 - has-symbols: ^1.0.3 - checksum: 152d79e87251d536cf880ba75cfc3d6c6c50e12b3a64e1ea960e73a3752b47c69f46034456eae1b0894359ce3bc64c55c186f2811f8a788b75b638b06fab228a - languageName: node - linkType: hard - -"get-symbol-description@npm:^1.0.0": - version: 1.0.0 - resolution: "get-symbol-description@npm:1.0.0" - dependencies: - call-bind: ^1.0.2 - get-intrinsic: ^1.1.1 - checksum: 9ceff8fe968f9270a37a1f73bf3f1f7bda69ca80f4f80850670e0e7b9444ff99323f7ac52f96567f8b5f5fbe7ac717a0d81d3407c7313e82810c6199446a5247 - languageName: node - linkType: hard - -"glob-parent@npm:^5.1.2": - version: 5.1.2 - resolution: "glob-parent@npm:5.1.2" - dependencies: - is-glob: ^4.0.1 - checksum: f4f2bfe2425296e8a47e36864e4f42be38a996db40420fe434565e4480e3322f18eb37589617a98640c5dc8fdec1a387007ee18dbb1f3f5553409c34d17f425e - languageName: node - linkType: hard - -"glob@npm:7.1.7": - version: 7.1.7 - resolution: "glob@npm:7.1.7" - dependencies: - fs.realpath: ^1.0.0 - inflight: ^1.0.4 - inherits: 2 - minimatch: ^3.0.4 - once: ^1.3.0 - path-is-absolute: ^1.0.0 - checksum: b61f48973bbdcf5159997b0874a2165db572b368b931135832599875919c237fc05c12984e38fe828e69aa8a921eb0e8a4997266211c517c9cfaae8a93988bb8 - languageName: node - linkType: hard - -"glob@npm:^7.1.3, glob@npm:^7.2.0": - version: 7.2.3 - resolution: "glob@npm:7.2.3" - dependencies: - fs.realpath: ^1.0.0 - inflight: ^1.0.4 - inherits: 2 - minimatch: ^3.1.1 - once: ^1.3.0 - path-is-absolute: ^1.0.0 - checksum: 29452e97b38fa704dabb1d1045350fb2467cf0277e155aa9ff7077e90ad81d1ea9d53d3ee63bd37c05b09a065e90f16aec4a65f5b8de401d1dac40bc5605d133 - languageName: node - linkType: hard - -"globals@npm:^11.1.0": - version: 11.12.0 - resolution: "globals@npm:11.12.0" - checksum: 67051a45eca3db904aee189dfc7cd53c20c7d881679c93f6146ddd4c9f4ab2268e68a919df740d39c71f4445d2b38ee360fc234428baea1dbdfe68bbcb46979e - languageName: node - linkType: hard - -"globals@npm:^13.6.0, globals@npm:^13.9.0": - version: 13.17.0 - resolution: "globals@npm:13.17.0" - dependencies: - type-fest: ^0.20.2 - checksum: fbaf4112e59b92c9f5575e85ce65e9e17c0b82711196ec5f58beb08599bbd92fd72703d6dfc9b080381fd35b644e1b11dcf25b38cc2341ec21df942594cbc8ce - languageName: node - linkType: hard - -"globby@npm:^11.1.0": - version: 11.1.0 - resolution: "globby@npm:11.1.0" - dependencies: - array-union: ^2.1.0 - dir-glob: ^3.0.1 - fast-glob: ^3.2.9 - ignore: ^5.2.0 - merge2: ^1.4.1 - slash: ^3.0.0 - checksum: b4be8885e0cfa018fc783792942d53926c35c50b3aefd3fdcfb9d22c627639dc26bd2327a40a0b74b074100ce95bb7187bfeae2f236856aa3de183af7a02aea6 - languageName: node - linkType: hard - -"graceful-fs@npm:^4.2.4": - version: 4.2.10 - resolution: "graceful-fs@npm:4.2.10" - checksum: 3f109d70ae123951905d85032ebeae3c2a5a7a997430df00ea30df0e3a6c60cf6689b109654d6fdacd28810a053348c4d14642da1d075049e6be1ba5216218da - languageName: node - linkType: hard - -"has-bigints@npm:^1.0.1, has-bigints@npm:^1.0.2": - version: 1.0.2 - resolution: "has-bigints@npm:1.0.2" - checksum: 390e31e7be7e5c6fe68b81babb73dfc35d413604d7ee5f56da101417027a4b4ce6a27e46eff97ad040c835b5d228676eae99a9b5c3bc0e23c8e81a49241ff45b - languageName: node - linkType: hard - -"has-flag@npm:^3.0.0": - version: 3.0.0 - resolution: "has-flag@npm:3.0.0" - checksum: 4a15638b454bf086c8148979aae044dd6e39d63904cd452d970374fa6a87623423da485dfb814e7be882e05c096a7ccf1ebd48e7e7501d0208d8384ff4dea73b - languageName: node - linkType: hard - -"has-flag@npm:^4.0.0": - version: 4.0.0 - resolution: "has-flag@npm:4.0.0" - checksum: 261a1357037ead75e338156b1f9452c016a37dcd3283a972a30d9e4a87441ba372c8b81f818cd0fbcd9c0354b4ae7e18b9e1afa1971164aef6d18c2b6095a8ad - languageName: node - linkType: hard - -"has-property-descriptors@npm:^1.0.0": - version: 1.0.0 - resolution: "has-property-descriptors@npm:1.0.0" - dependencies: - get-intrinsic: ^1.1.1 - checksum: a6d3f0a266d0294d972e354782e872e2fe1b6495b321e6ef678c9b7a06a40408a6891817350c62e752adced73a94ac903c54734fee05bf65b1905ee1368194bb - languageName: node - linkType: hard - -"has-symbols@npm:^1.0.2, has-symbols@npm:^1.0.3": - version: 1.0.3 - resolution: "has-symbols@npm:1.0.3" - checksum: a054c40c631c0d5741a8285010a0777ea0c068f99ed43e5d6eb12972da223f8af553a455132fdb0801bdcfa0e0f443c0c03a68d8555aa529b3144b446c3f2410 - languageName: node - linkType: hard - -"has-tostringtag@npm:^1.0.0": - version: 1.0.0 - resolution: "has-tostringtag@npm:1.0.0" - dependencies: - has-symbols: ^1.0.2 - checksum: cc12eb28cb6ae22369ebaad3a8ab0799ed61270991be88f208d508076a1e99abe4198c965935ce85ea90b60c94ddda73693b0920b58e7ead048b4a391b502c1c - languageName: node - linkType: hard - -"has@npm:^1.0.3": - version: 1.0.3 - resolution: "has@npm:1.0.3" - dependencies: - function-bind: ^1.1.1 - checksum: b9ad53d53be4af90ce5d1c38331e712522417d017d5ef1ebd0507e07c2fbad8686fffb8e12ddecd4c39ca9b9b47431afbb975b8abf7f3c3b82c98e9aad052792 - languageName: node - linkType: hard - -"ignore@npm:^4.0.6": - version: 4.0.6 - resolution: "ignore@npm:4.0.6" - checksum: 248f82e50a430906f9ee7f35e1158e3ec4c3971451dd9f99c9bc1548261b4db2b99709f60ac6c6cac9333494384176cc4cc9b07acbe42d52ac6a09cad734d800 - languageName: node - linkType: hard - -"ignore@npm:^5.2.0": - version: 5.2.0 - resolution: "ignore@npm:5.2.0" - checksum: 6b1f926792d614f64c6c83da3a1f9c83f6196c2839aa41e1e32dd7b8d174cef2e329d75caabb62cb61ce9dc432f75e67d07d122a037312db7caa73166a1bdb77 - languageName: node - linkType: hard - -"import-fresh@npm:^3.0.0, import-fresh@npm:^3.2.1": - version: 3.3.0 - resolution: "import-fresh@npm:3.3.0" - dependencies: - parent-module: ^1.0.0 - resolve-from: ^4.0.0 - checksum: 2cacfad06e652b1edc50be650f7ec3be08c5e5a6f6d12d035c440a42a8cc028e60a5b99ca08a77ab4d6b1346da7d971915828f33cdab730d3d42f08242d09baa - languageName: node - linkType: hard - -"imurmurhash@npm:^0.1.4": - version: 0.1.4 - resolution: "imurmurhash@npm:0.1.4" - checksum: 7cae75c8cd9a50f57dadd77482359f659eaebac0319dd9368bcd1714f55e65badd6929ca58569da2b6494ef13fdd5598cd700b1eba23f8b79c5f19d195a3ecf7 - languageName: node - linkType: hard - -"inflight@npm:^1.0.4": - version: 1.0.6 - resolution: "inflight@npm:1.0.6" - dependencies: - once: ^1.3.0 - wrappy: 1 - checksum: f4f76aa072ce19fae87ce1ef7d221e709afb59d445e05d47fba710e85470923a75de35bfae47da6de1b18afc3ce83d70facf44cfb0aff89f0a3f45c0a0244dfd - languageName: node - linkType: hard - -"inherits@npm:2": - version: 2.0.4 - resolution: "inherits@npm:2.0.4" - checksum: 4a48a733847879d6cf6691860a6b1e3f0f4754176e4d71494c41f3475553768b10f84b5ce1d40fbd0e34e6bfbb864ee35858ad4dd2cf31e02fc4a154b724d7f1 - languageName: node - linkType: hard - -"internal-slot@npm:^1.0.3": - version: 1.0.3 - resolution: "internal-slot@npm:1.0.3" - dependencies: - get-intrinsic: ^1.1.0 - has: ^1.0.3 - side-channel: ^1.0.4 - checksum: 1944f92e981e47aebc98a88ff0db579fd90543d937806104d0b96557b10c1f170c51fb777b97740a8b6ddeec585fca8c39ae99fd08a8e058dfc8ab70937238bf - languageName: node - linkType: hard - -"is-bigint@npm:^1.0.1": - version: 1.0.4 - resolution: "is-bigint@npm:1.0.4" - dependencies: - has-bigints: ^1.0.1 - checksum: c56edfe09b1154f8668e53ebe8252b6f185ee852a50f9b41e8d921cb2bed425652049fbe438723f6cb48a63ca1aa051e948e7e401e093477c99c84eba244f666 - languageName: node - linkType: hard - -"is-boolean-object@npm:^1.1.0": - version: 1.1.2 - resolution: "is-boolean-object@npm:1.1.2" - dependencies: - call-bind: ^1.0.2 - has-tostringtag: ^1.0.0 - checksum: c03b23dbaacadc18940defb12c1c0e3aaece7553ef58b162a0f6bba0c2a7e1551b59f365b91e00d2dbac0522392d576ef322628cb1d036a0fe51eb466db67222 - languageName: node - linkType: hard - -"is-callable@npm:^1.1.4, is-callable@npm:^1.2.4": - version: 1.2.6 - resolution: "is-callable@npm:1.2.6" - checksum: 7667d6a6be66df00741cfa18c657877c46a00139ea7ea7765251e9db0182745c9ee173506941a329d6914e34e59e9cc80029fb3f68bbf8c22a6c155ee6ea77b3 - languageName: node - linkType: hard - -"is-core-module@npm:^2.8.1, is-core-module@npm:^2.9.0": - version: 2.10.0 - resolution: "is-core-module@npm:2.10.0" - dependencies: - has: ^1.0.3 - checksum: 0f3f77811f430af3256fa7bbc806f9639534b140f8ee69476f632c3e1eb4e28a38be0b9d1b8ecf596179c841b53576129279df95e7051d694dac4ceb6f967593 - languageName: node - linkType: hard - -"is-date-object@npm:^1.0.1": - version: 1.0.5 - resolution: "is-date-object@npm:1.0.5" - dependencies: - has-tostringtag: ^1.0.0 - checksum: baa9077cdf15eb7b58c79398604ca57379b2fc4cf9aa7a9b9e295278648f628c9b201400c01c5e0f7afae56507d741185730307cbe7cad3b9f90a77e5ee342fc - languageName: node - linkType: hard - -"is-extglob@npm:^2.1.1": - version: 2.1.1 - resolution: "is-extglob@npm:2.1.1" - checksum: df033653d06d0eb567461e58a7a8c9f940bd8c22274b94bf7671ab36df5719791aae15eef6d83bbb5e23283967f2f984b8914559d4449efda578c775c4be6f85 - languageName: node - linkType: hard - -"is-fullwidth-code-point@npm:^3.0.0": - version: 3.0.0 - resolution: "is-fullwidth-code-point@npm:3.0.0" - checksum: 44a30c29457c7fb8f00297bce733f0a64cd22eca270f83e58c105e0d015e45c019491a4ab2faef91ab51d4738c670daff901c799f6a700e27f7314029e99e348 - languageName: node - linkType: hard - -"is-glob@npm:^4.0.0, is-glob@npm:^4.0.1, is-glob@npm:^4.0.3": - version: 4.0.3 - resolution: "is-glob@npm:4.0.3" - dependencies: - is-extglob: ^2.1.1 - checksum: d381c1319fcb69d341cc6e6c7cd588e17cd94722d9a32dbd60660b993c4fb7d0f19438674e68dfec686d09b7c73139c9166b47597f846af387450224a8101ab4 - languageName: node - linkType: hard - -"is-negative-zero@npm:^2.0.2": - version: 2.0.2 - resolution: "is-negative-zero@npm:2.0.2" - checksum: f3232194c47a549da60c3d509c9a09be442507616b69454716692e37ae9f37c4dea264fb208ad0c9f3efd15a796a46b79df07c7e53c6227c32170608b809149a - languageName: node - linkType: hard - -"is-number-object@npm:^1.0.4": - version: 1.0.7 - resolution: "is-number-object@npm:1.0.7" - dependencies: - has-tostringtag: ^1.0.0 - checksum: d1e8d01bb0a7134c74649c4e62da0c6118a0bfc6771ea3c560914d52a627873e6920dd0fd0ebc0e12ad2ff4687eac4c308f7e80320b973b2c8a2c8f97a7524f7 - languageName: node - linkType: hard - -"is-number@npm:^7.0.0": - version: 7.0.0 - resolution: "is-number@npm:7.0.0" - checksum: 456ac6f8e0f3111ed34668a624e45315201dff921e5ac181f8ec24923b99e9f32ca1a194912dc79d539c97d33dba17dc635202ff0b2cf98326f608323276d27a - languageName: node - linkType: hard - -"is-regex@npm:^1.1.4": - version: 1.1.4 - resolution: "is-regex@npm:1.1.4" - dependencies: - call-bind: ^1.0.2 - has-tostringtag: ^1.0.0 - checksum: 362399b33535bc8f386d96c45c9feb04cf7f8b41c182f54174c1a45c9abbbe5e31290bbad09a458583ff6bf3b2048672cdb1881b13289569a7c548370856a652 - languageName: node - linkType: hard - -"is-shared-array-buffer@npm:^1.0.2": - version: 1.0.2 - resolution: "is-shared-array-buffer@npm:1.0.2" - dependencies: - call-bind: ^1.0.2 - checksum: 9508929cf14fdc1afc9d61d723c6e8d34f5e117f0bffda4d97e7a5d88c3a8681f633a74f8e3ad1fe92d5113f9b921dc5ca44356492079612f9a247efbce7032a - languageName: node - linkType: hard - -"is-string@npm:^1.0.5, is-string@npm:^1.0.7": - version: 1.0.7 - resolution: "is-string@npm:1.0.7" - dependencies: - has-tostringtag: ^1.0.0 - checksum: 323b3d04622f78d45077cf89aab783b2f49d24dc641aa89b5ad1a72114cfeff2585efc8c12ef42466dff32bde93d839ad321b26884cf75e5a7892a938b089989 - languageName: node - linkType: hard - -"is-symbol@npm:^1.0.2, is-symbol@npm:^1.0.3": - version: 1.0.4 - resolution: "is-symbol@npm:1.0.4" - dependencies: - has-symbols: ^1.0.2 - checksum: 92805812ef590738d9de49d677cd17dfd486794773fb6fa0032d16452af46e9b91bb43ffe82c983570f015b37136f4b53b28b8523bfb10b0ece7a66c31a54510 - languageName: node - linkType: hard - -"is-weakref@npm:^1.0.2": - version: 1.0.2 - resolution: "is-weakref@npm:1.0.2" - dependencies: - call-bind: ^1.0.2 - checksum: 95bd9a57cdcb58c63b1c401c60a474b0f45b94719c30f548c891860f051bc2231575c290a6b420c6bc6e7ed99459d424c652bd5bf9a1d5259505dc35b4bf83de - languageName: node - linkType: hard - -"isexe@npm:^2.0.0": - version: 2.0.0 - resolution: "isexe@npm:2.0.0" - checksum: 26bf6c5480dda5161c820c5b5c751ae1e766c587b1f951ea3fcfc973bafb7831ae5b54a31a69bd670220e42e99ec154475025a468eae58ea262f813fdc8d1c62 - languageName: node - linkType: hard - -"js-tokens@npm:^3.0.0 || ^4.0.0, js-tokens@npm:^4.0.0": - version: 4.0.0 - resolution: "js-tokens@npm:4.0.0" - checksum: 8a95213a5a77deb6cbe94d86340e8d9ace2b93bc367790b260101d2f36a2eaf4e4e22d9fa9cf459b38af3a32fb4190e638024cf82ec95ef708680e405ea7cc78 - languageName: node - linkType: hard - -"js-yaml@npm:^3.13.1": - version: 3.14.1 - resolution: "js-yaml@npm:3.14.1" - dependencies: - argparse: ^1.0.7 - esprima: ^4.0.0 - bin: - js-yaml: bin/js-yaml.js - checksum: bef146085f472d44dee30ec34e5cf36bf89164f5d585435a3d3da89e52622dff0b188a580e4ad091c3341889e14cb88cac6e4deb16dc5b1e9623bb0601fc255c - languageName: node - linkType: hard - -"jsesc@npm:^2.5.1": - version: 2.5.2 - resolution: "jsesc@npm:2.5.2" - bin: - jsesc: bin/jsesc - checksum: 4dc190771129e12023f729ce20e1e0bfceac84d73a85bc3119f7f938843fe25a4aeccb54b6494dce26fcf263d815f5f31acdefac7cc9329efb8422a4f4d9fa9d - languageName: node - linkType: hard - -"json-schema-traverse@npm:^0.4.1": - version: 0.4.1 - resolution: "json-schema-traverse@npm:0.4.1" - checksum: 7486074d3ba247769fda17d5181b345c9fb7d12e0da98b22d1d71a5db9698d8b4bd900a3ec1a4ffdd60846fc2556274a5c894d0c48795f14cb03aeae7b55260b - languageName: node - linkType: hard - -"json-schema-traverse@npm:^1.0.0": - version: 1.0.0 - resolution: "json-schema-traverse@npm:1.0.0" - checksum: 02f2f466cdb0362558b2f1fd5e15cce82ef55d60cd7f8fa828cf35ba74330f8d767fcae5c5c2adb7851fa811766c694b9405810879bc4e1ddd78a7c0e03658ad - languageName: node - linkType: hard - -"json-stable-stringify-without-jsonify@npm:^1.0.1": - version: 1.0.1 - resolution: "json-stable-stringify-without-jsonify@npm:1.0.1" - checksum: cff44156ddce9c67c44386ad5cddf91925fe06b1d217f2da9c4910d01f358c6e3989c4d5a02683c7a5667f9727ff05831f7aa8ae66c8ff691c556f0884d49215 - languageName: node - linkType: hard - -"json5@npm:^1.0.1": - version: 1.0.1 - resolution: "json5@npm:1.0.1" - dependencies: - minimist: ^1.2.0 - bin: - json5: lib/cli.js - checksum: e76ea23dbb8fc1348c143da628134a98adf4c5a4e8ea2adaa74a80c455fc2cdf0e2e13e6398ef819bfe92306b610ebb2002668ed9fc1af386d593691ef346fc3 - languageName: node - linkType: hard - -"json5@npm:^2.2.1": - version: 2.2.1 - resolution: "json5@npm:2.2.1" - bin: - json5: lib/cli.js - checksum: 74b8a23b102a6f2bf2d224797ae553a75488b5adbaee9c9b6e5ab8b510a2fc6e38f876d4c77dea672d4014a44b2399e15f2051ac2b37b87f74c0c7602003543b - languageName: node - linkType: hard - -"jsx-ast-utils@npm:^2.4.1 || ^3.0.0, jsx-ast-utils@npm:^3.3.2": - version: 3.3.3 - resolution: "jsx-ast-utils@npm:3.3.3" - dependencies: - array-includes: ^3.1.5 - object.assign: ^4.1.3 - checksum: a2ed78cac49a0f0c4be8b1eafe3c5257a1411341d8e7f1ac740debae003de04e5f6372bfcfbd9d082e954ffd99aac85bcda85b7c6bc11609992483f4cdc0f745 - languageName: node - linkType: hard - -"language-subtag-registry@npm:~0.3.2": - version: 0.3.22 - resolution: "language-subtag-registry@npm:0.3.22" - checksum: 8ab70a7e0e055fe977ac16ea4c261faec7205ac43db5e806f72e5b59606939a3b972c4bd1e10e323b35d6ffa97c3e1c4c99f6553069dad2dfdd22020fa3eb56a - languageName: node - linkType: hard - -"language-tags@npm:^1.0.5": - version: 1.0.5 - resolution: "language-tags@npm:1.0.5" - dependencies: - language-subtag-registry: ~0.3.2 - checksum: c81b5d8b9f5f9cfd06ee71ada6ddfe1cf83044dd5eeefcd1e420ad491944da8957688db4a0a9bc562df4afdc2783425cbbdfd152c01d93179cf86888903123cf - languageName: node - linkType: hard - -"levn@npm:^0.4.1": - version: 0.4.1 - resolution: "levn@npm:0.4.1" - dependencies: - prelude-ls: ^1.2.1 - type-check: ~0.4.0 - checksum: 12c5021c859bd0f5248561bf139121f0358285ec545ebf48bb3d346820d5c61a4309535c7f387ed7d84361cf821e124ce346c6b7cef8ee09a67c1473b46d0fc4 - languageName: node - linkType: hard - -"lodash.merge@npm:^4.6.2": - version: 4.6.2 - resolution: "lodash.merge@npm:4.6.2" - checksum: ad580b4bdbb7ca1f7abf7e1bce63a9a0b98e370cf40194b03380a46b4ed799c9573029599caebc1b14e3f24b111aef72b96674a56cfa105e0f5ac70546cdc005 - languageName: node - linkType: hard - -"lodash.truncate@npm:^4.4.2": - version: 4.4.2 - resolution: "lodash.truncate@npm:4.4.2" - checksum: b463d8a382cfb5f0e71c504dcb6f807a7bd379ff1ea216669aa42c52fc28c54e404bfbd96791aa09e6df0de2c1d7b8f1b7f4b1a61f324d38fe98bc535aeee4f5 - languageName: node - linkType: hard - -"lodash@npm:4.17.21": - version: 4.17.21 - resolution: "lodash@npm:4.17.21" - checksum: eb835a2e51d381e561e508ce932ea50a8e5a68f4ebdd771ea240d3048244a8d13658acbd502cd4829768c56f2e16bdd4340b9ea141297d472517b83868e677f7 - languageName: node - linkType: hard - -"lodash@patch:lodash@npm%3A4.17.21#./.yarn/patches/lodash-npm-4.17.21-6382451519.patch::locator=berry-patch%40workspace%3A.": - version: 4.17.21 - resolution: "lodash@patch:lodash@npm%3A4.17.21#./.yarn/patches/lodash-npm-4.17.21-6382451519.patch::version=4.17.21&hash=2c6e9e&locator=berry-patch%40workspace%3A." - checksum: 0f54b5291a5cfa3322cc3cb85716df4e23503535b79a341f12a41231513baaa6285fd9808d9894100dcea8b36bf91644360c4f783db1814719a4e103a04f59f3 - languageName: node - linkType: hard - -"loose-envify@npm:^1.1.0, loose-envify@npm:^1.4.0": - version: 1.4.0 - resolution: "loose-envify@npm:1.4.0" - dependencies: - js-tokens: ^3.0.0 || ^4.0.0 - bin: - loose-envify: cli.js - checksum: 6517e24e0cad87ec9888f500c5b5947032cdfe6ef65e1c1936a0c48a524b81e65542c9c3edc91c97d5bddc806ee2a985dbc79be89215d613b1de5db6d1cfe6f4 - languageName: node - linkType: hard - -"lru-cache@npm:^6.0.0": - version: 6.0.0 - resolution: "lru-cache@npm:6.0.0" - dependencies: - yallist: ^4.0.0 - checksum: f97f499f898f23e4585742138a22f22526254fdba6d75d41a1c2526b3b6cc5747ef59c5612ba7375f42aca4f8461950e925ba08c991ead0651b4918b7c978297 - languageName: node - linkType: hard - -"merge2@npm:^1.3.0, merge2@npm:^1.4.1": - version: 1.4.1 - resolution: "merge2@npm:1.4.1" - checksum: 7268db63ed5169466540b6fb947aec313200bcf6d40c5ab722c22e242f651994619bcd85601602972d3c85bd2cc45a358a4c61937e9f11a061919a1da569b0c2 - languageName: node - linkType: hard - -"micromatch@npm:^4.0.4": - version: 4.0.5 - resolution: "micromatch@npm:4.0.5" - dependencies: - braces: ^3.0.2 - picomatch: ^2.3.1 - checksum: 02a17b671c06e8fefeeb6ef996119c1e597c942e632a21ef589154f23898c9c6a9858526246abb14f8bca6e77734aa9dcf65476fca47cedfb80d9577d52843fc - languageName: node - linkType: hard - -"minimatch@npm:^3.0.4, minimatch@npm:^3.1.1, minimatch@npm:^3.1.2": - version: 3.1.2 - resolution: "minimatch@npm:3.1.2" - dependencies: - brace-expansion: ^1.1.7 - checksum: c154e566406683e7bcb746e000b84d74465b3a832c45d59912b9b55cd50dee66e5c4b1e5566dba26154040e51672f9aa450a9aef0c97cfc7336b78b7afb9540a - languageName: node - linkType: hard - -"minimist@npm:^1.2.0, minimist@npm:^1.2.6": - version: 1.2.6 - resolution: "minimist@npm:1.2.6" - checksum: d15428cd1e11eb14e1233bcfb88ae07ed7a147de251441d61158619dfb32c4d7e9061d09cab4825fdee18ecd6fce323228c8c47b5ba7cd20af378ca4048fb3fb - languageName: node - linkType: hard - -"ms@npm:2.0.0": - version: 2.0.0 - resolution: "ms@npm:2.0.0" - checksum: 0e6a22b8b746d2e0b65a430519934fefd41b6db0682e3477c10f60c76e947c4c0ad06f63ffdf1d78d335f83edee8c0aa928aa66a36c7cd95b69b26f468d527f4 - languageName: node - linkType: hard - -"ms@npm:2.1.2": - version: 2.1.2 - resolution: "ms@npm:2.1.2" - checksum: 673cdb2c3133eb050c745908d8ce632ed2c02d85640e2edb3ace856a2266a813b30c613569bf3354fdf4ea7d1a1494add3bfa95e2713baa27d0c2c71fc44f58f - languageName: node - linkType: hard - -"ms@npm:^2.1.1": - version: 2.1.3 - resolution: "ms@npm:2.1.3" - checksum: aa92de608021b242401676e35cfa5aa42dd70cbdc082b916da7fb925c542173e36bce97ea3e804923fe92c0ad991434e4a38327e15a1b5b5f945d66df615ae6d - languageName: node - linkType: hard - -"nanoid@npm:^3.3.4": - version: 3.3.4 - resolution: "nanoid@npm:3.3.4" - bin: - nanoid: bin/nanoid.cjs - checksum: 2fddd6dee994b7676f008d3ffa4ab16035a754f4bb586c61df5a22cf8c8c94017aadd360368f47d653829e0569a92b129979152ff97af23a558331e47e37cd9c - languageName: node - linkType: hard - -"natural-compare@npm:^1.4.0": - version: 1.4.0 - resolution: "natural-compare@npm:1.4.0" - checksum: 23ad088b08f898fc9b53011d7bb78ec48e79de7627e01ab5518e806033861bef68d5b0cd0e2205c2f36690ac9571ff6bcb05eb777ced2eeda8d4ac5b44592c3d - languageName: node - linkType: hard - -"next-transpile-modules@npm:9.0.0": - version: 9.0.0 - resolution: "next-transpile-modules@npm:9.0.0" - dependencies: - enhanced-resolve: ^5.7.0 - escalade: ^3.1.1 - checksum: 9a5d86d80cedc2404b2b1d5bd4994f2f7bf60e5e20f24e8cc5cfec34da1418b4a439916f37a95ca336bcf6d81094c3647354ac6a0c6737b3df59e62b6380507d - languageName: node - linkType: hard - -"next@npm:12.2.5": - version: 12.2.5 - resolution: "next@npm:12.2.5" - dependencies: - "@next/env": 12.2.5 - "@next/swc-android-arm-eabi": 12.2.5 - "@next/swc-android-arm64": 12.2.5 - "@next/swc-darwin-arm64": 12.2.5 - "@next/swc-darwin-x64": 12.2.5 - "@next/swc-freebsd-x64": 12.2.5 - "@next/swc-linux-arm-gnueabihf": 12.2.5 - "@next/swc-linux-arm64-gnu": 12.2.5 - "@next/swc-linux-arm64-musl": 12.2.5 - "@next/swc-linux-x64-gnu": 12.2.5 - "@next/swc-linux-x64-musl": 12.2.5 - "@next/swc-win32-arm64-msvc": 12.2.5 - "@next/swc-win32-ia32-msvc": 12.2.5 - "@next/swc-win32-x64-msvc": 12.2.5 - "@swc/helpers": 0.4.3 - caniuse-lite: ^1.0.30001332 - postcss: 8.4.14 - styled-jsx: 5.0.4 - use-sync-external-store: 1.2.0 - peerDependencies: - fibers: ">= 3.1.0" - node-sass: ^6.0.0 || ^7.0.0 - react: ^17.0.2 || ^18.0.0-0 - react-dom: ^17.0.2 || ^18.0.0-0 - sass: ^1.3.0 - dependenciesMeta: - "@next/swc-android-arm-eabi": - optional: true - "@next/swc-android-arm64": - optional: true - "@next/swc-darwin-arm64": - optional: true - "@next/swc-darwin-x64": - optional: true - "@next/swc-freebsd-x64": - optional: true - "@next/swc-linux-arm-gnueabihf": - optional: true - "@next/swc-linux-arm64-gnu": - optional: true - "@next/swc-linux-arm64-musl": - optional: true - "@next/swc-linux-x64-gnu": - optional: true - "@next/swc-linux-x64-musl": - optional: true - "@next/swc-win32-arm64-msvc": - optional: true - "@next/swc-win32-ia32-msvc": - optional: true - "@next/swc-win32-x64-msvc": - optional: true - peerDependenciesMeta: - fibers: - optional: true - node-sass: - optional: true - sass: - optional: true - bin: - next: dist/bin/next - checksum: e8fcbd93d74fda81640fd174a9d380f22db404d3ce0893730db3db806317ae18c86d1dbb502e63e47c92fb21a93812de62639c2f1204330cb569fdac4d3d0573 - languageName: node - linkType: hard - -"node-releases@npm:^2.0.6": - version: 2.0.6 - resolution: "node-releases@npm:2.0.6" - checksum: e86a926dc9fbb3b41b4c4a89d998afdf140e20a4e8dbe6c0a807f7b2948b42ea97d7fd3ad4868041487b6e9ee98409829c6e4d84a734a4215dff060a7fbeb4bf - languageName: node - linkType: hard - -"object-assign@npm:^4.1.1": - version: 4.1.1 - resolution: "object-assign@npm:4.1.1" - checksum: fcc6e4ea8c7fe48abfbb552578b1c53e0d194086e2e6bbbf59e0a536381a292f39943c6e9628af05b5528aa5e3318bb30d6b2e53cadaf5b8fe9e12c4b69af23f - languageName: node - linkType: hard - -"object-inspect@npm:^1.12.2, object-inspect@npm:^1.9.0": - version: 1.12.2 - resolution: "object-inspect@npm:1.12.2" - checksum: a534fc1b8534284ed71f25ce3a496013b7ea030f3d1b77118f6b7b1713829262be9e6243acbcb3ef8c626e2b64186112cb7f6db74e37b2789b9c789ca23048b2 - languageName: node - linkType: hard - -"object-keys@npm:^1.1.1": - version: 1.1.1 - resolution: "object-keys@npm:1.1.1" - checksum: b363c5e7644b1e1b04aa507e88dcb8e3a2f52b6ffd0ea801e4c7a62d5aa559affe21c55a07fd4b1fd55fc03a33c610d73426664b20032405d7b92a1414c34d6a - languageName: node - linkType: hard - -"object.assign@npm:^4.1.3, object.assign@npm:^4.1.4": - version: 4.1.4 - resolution: "object.assign@npm:4.1.4" - dependencies: - call-bind: ^1.0.2 - define-properties: ^1.1.4 - has-symbols: ^1.0.3 - object-keys: ^1.1.1 - checksum: 76cab513a5999acbfe0ff355f15a6a125e71805fcf53de4e9d4e082e1989bdb81d1e329291e1e4e0ae7719f0e4ef80e88fb2d367ae60500d79d25a6224ac8864 - languageName: node - linkType: hard - -"object.entries@npm:^1.1.5": - version: 1.1.5 - resolution: "object.entries@npm:1.1.5" - dependencies: - call-bind: ^1.0.2 - define-properties: ^1.1.3 - es-abstract: ^1.19.1 - checksum: d658696f74fd222060d8428d2a9fda2ce736b700cb06f6bdf4a16a1892d145afb746f453502b2fa55d1dca8ead6f14ddbcf66c545df45adadea757a6c4cd86c7 - languageName: node - linkType: hard - -"object.fromentries@npm:^2.0.5": - version: 2.0.5 - resolution: "object.fromentries@npm:2.0.5" - dependencies: - call-bind: ^1.0.2 - define-properties: ^1.1.3 - es-abstract: ^1.19.1 - checksum: 61a0b565ded97b76df9e30b569729866e1824cce902f98e90bb106e84f378aea20163366f66dc75c9000e2aad2ed0caf65c6f530cb2abc4c0c0f6c982102db4b - languageName: node - linkType: hard - -"object.hasown@npm:^1.1.1": - version: 1.1.1 - resolution: "object.hasown@npm:1.1.1" - dependencies: - define-properties: ^1.1.4 - es-abstract: ^1.19.5 - checksum: d8ed4907ce57f48b93e3b53c418fd6787bf226a51e8d698c91e39b78e80fe5b124cb6282f6a9d5be21cf9e2c7829ab10206dcc6112b7748860eefe641880c793 - languageName: node - linkType: hard - -"object.values@npm:^1.1.5": - version: 1.1.5 - resolution: "object.values@npm:1.1.5" - dependencies: - call-bind: ^1.0.2 - define-properties: ^1.1.3 - es-abstract: ^1.19.1 - checksum: 0f17e99741ebfbd0fa55ce942f6184743d3070c61bd39221afc929c8422c4907618c8da694c6915bc04a83ab3224260c779ba37fc07bb668bdc5f33b66a902a4 - languageName: node - linkType: hard - -"once@npm:^1.3.0": - version: 1.4.0 - resolution: "once@npm:1.4.0" - dependencies: - wrappy: 1 - checksum: cd0a88501333edd640d95f0d2700fbde6bff20b3d4d9bdc521bdd31af0656b5706570d6c6afe532045a20bb8dc0849f8332d6f2a416e0ba6d3d3b98806c7db68 - languageName: node - linkType: hard - -"optionator@npm:^0.9.1": - version: 0.9.1 - resolution: "optionator@npm:0.9.1" - dependencies: - deep-is: ^0.1.3 - fast-levenshtein: ^2.0.6 - levn: ^0.4.1 - prelude-ls: ^1.2.1 - type-check: ^0.4.0 - word-wrap: ^1.2.3 - checksum: dbc6fa065604b24ea57d734261914e697bd73b69eff7f18e967e8912aa2a40a19a9f599a507fa805be6c13c24c4eae8c71306c239d517d42d4c041c942f508a0 - languageName: node - linkType: hard - -"parent-module@npm:^1.0.0": - version: 1.0.1 - resolution: "parent-module@npm:1.0.1" - dependencies: - callsites: ^3.0.0 - checksum: 6ba8b255145cae9470cf5551eb74be2d22281587af787a2626683a6c20fbb464978784661478dd2a3f1dad74d1e802d403e1b03c1a31fab310259eec8ac560ff - languageName: node - linkType: hard - -"path-is-absolute@npm:^1.0.0": - version: 1.0.1 - resolution: "path-is-absolute@npm:1.0.1" - checksum: 060840f92cf8effa293bcc1bea81281bd7d363731d214cbe5c227df207c34cd727430f70c6037b5159c8a870b9157cba65e775446b0ab06fd5ecc7e54615a3b8 - languageName: node - linkType: hard - -"path-key@npm:^3.1.0": - version: 3.1.1 - resolution: "path-key@npm:3.1.1" - checksum: 55cd7a9dd4b343412a8386a743f9c746ef196e57c823d90ca3ab917f90ab9f13dd0ded27252ba49dbdfcab2b091d998bc446f6220cd3cea65db407502a740020 - languageName: node - linkType: hard - -"path-parse@npm:^1.0.7": - version: 1.0.7 - resolution: "path-parse@npm:1.0.7" - checksum: 49abf3d81115642938a8700ec580da6e830dde670be21893c62f4e10bd7dd4c3742ddc603fe24f898cba7eb0c6bc1777f8d9ac14185d34540c6d4d80cd9cae8a - languageName: node - linkType: hard - -"path-type@npm:^4.0.0": - version: 4.0.0 - resolution: "path-type@npm:4.0.0" - checksum: 5b1e2daa247062061325b8fdbfd1fb56dde0a448fb1455453276ea18c60685bdad23a445dc148cf87bc216be1573357509b7d4060494a6fd768c7efad833ee45 - languageName: node - linkType: hard - -"picocolors@npm:^1.0.0": - version: 1.0.0 - resolution: "picocolors@npm:1.0.0" - checksum: a2e8092dd86c8396bdba9f2b5481032848525b3dc295ce9b57896f931e63fc16f79805144321f72976383fc249584672a75cc18d6777c6b757603f372f745981 - languageName: node - linkType: hard - -"picomatch@npm:^2.3.1": - version: 2.3.1 - resolution: "picomatch@npm:2.3.1" - checksum: 050c865ce81119c4822c45d3c84f1ced46f93a0126febae20737bd05ca20589c564d6e9226977df859ed5e03dc73f02584a2b0faad36e896936238238b0446cf - languageName: node - linkType: hard - -"postcss@npm:8.4.14": - version: 8.4.14 - resolution: "postcss@npm:8.4.14" - dependencies: - nanoid: ^3.3.4 - picocolors: ^1.0.0 - source-map-js: ^1.0.2 - checksum: fe58766ff32e4becf65a7d57678995cfd239df6deed2fe0557f038b47c94e4132e7e5f68b5aa820c13adfec32e523b693efaeb65798efb995ce49ccd83953816 - languageName: node - linkType: hard - -"prelude-ls@npm:^1.2.1": - version: 1.2.1 - resolution: "prelude-ls@npm:1.2.1" - checksum: cd192ec0d0a8e4c6da3bb80e4f62afe336df3f76271ac6deb0e6a36187133b6073a19e9727a1ff108cd8b9982e4768850d413baa71214dd80c7979617dca827a - languageName: node - linkType: hard - -prettier@latest: - version: 2.7.1 - resolution: "prettier@npm:2.7.1" - bin: - prettier: bin-prettier.js - checksum: 55a4409182260866ab31284d929b3cb961e5fdb91fe0d2e099dac92eaecec890f36e524b4c19e6ceae839c99c6d7195817579cdffc8e2c80da0cb794463a748b - languageName: node - linkType: hard - -"progress@npm:^2.0.0": - version: 2.0.3 - resolution: "progress@npm:2.0.3" - checksum: f67403fe7b34912148d9252cb7481266a354bd99ce82c835f79070643bb3c6583d10dbcfda4d41e04bbc1d8437e9af0fb1e1f2135727878f5308682a579429b7 - languageName: node - linkType: hard - -"prop-types@npm:^15.8.1": - version: 15.8.1 - resolution: "prop-types@npm:15.8.1" - dependencies: - loose-envify: ^1.4.0 - object-assign: ^4.1.1 - react-is: ^16.13.1 - checksum: c056d3f1c057cb7ff8344c645450e14f088a915d078dcda795041765047fa080d38e5d626560ccaac94a4e16e3aa15f3557c1a9a8d1174530955e992c675e459 - languageName: node - linkType: hard - -"punycode@npm:^2.1.0": - version: 2.1.1 - resolution: "punycode@npm:2.1.1" - checksum: 823bf443c6dd14f669984dea25757b37993f67e8d94698996064035edd43bed8a5a17a9f12e439c2b35df1078c6bec05a6c86e336209eb1061e8025c481168e8 - languageName: node - linkType: hard - -"queue-microtask@npm:^1.2.2": - version: 1.2.3 - resolution: "queue-microtask@npm:1.2.3" - checksum: b676f8c040cdc5b12723ad2f91414d267605b26419d5c821ff03befa817ddd10e238d22b25d604920340fd73efd8ba795465a0377c4adf45a4a41e4234e42dc4 - languageName: node - linkType: hard - -"react-dom@npm:18.2.0": - version: 18.2.0 - resolution: "react-dom@npm:18.2.0" - dependencies: - loose-envify: ^1.1.0 - scheduler: ^0.23.0 - peerDependencies: - react: ^18.2.0 - checksum: 7d323310bea3a91be2965f9468d552f201b1c27891e45ddc2d6b8f717680c95a75ae0bc1e3f5cf41472446a2589a75aed4483aee8169287909fcd59ad149e8cc - languageName: node - linkType: hard - -"react-is@npm:^16.13.1": - version: 16.13.1 - resolution: "react-is@npm:16.13.1" - checksum: f7a19ac3496de32ca9ae12aa030f00f14a3d45374f1ceca0af707c831b2a6098ef0d6bdae51bd437b0a306d7f01d4677fcc8de7c0d331eb47ad0f46130e53c5f - languageName: node - linkType: hard - -"react@npm:18.2.0, react@npm:^18.2.0": - version: 18.2.0 - resolution: "react@npm:18.2.0" - dependencies: - loose-envify: ^1.1.0 - checksum: 88e38092da8839b830cda6feef2e8505dec8ace60579e46aa5490fc3dc9bba0bd50336507dc166f43e3afc1c42939c09fe33b25fae889d6f402721dcd78fca1b - languageName: node - linkType: hard - -"regenerator-runtime@npm:^0.13.4": - version: 0.13.9 - resolution: "regenerator-runtime@npm:0.13.9" - checksum: 65ed455fe5afd799e2897baf691ca21c2772e1a969d19bb0c4695757c2d96249eb74ee3553ea34a91062b2a676beedf630b4c1551cc6299afb937be1426ec55e - languageName: node - linkType: hard - -"regexp.prototype.flags@npm:^1.4.1, regexp.prototype.flags@npm:^1.4.3": - version: 1.4.3 - resolution: "regexp.prototype.flags@npm:1.4.3" - dependencies: - call-bind: ^1.0.2 - define-properties: ^1.1.3 - functions-have-names: ^1.2.2 - checksum: 51228bae732592adb3ededd5e15426be25f289e9c4ef15212f4da73f4ec3919b6140806374b8894036a86020d054a8d2657d3fee6bb9b4d35d8939c20030b7a6 - languageName: node - linkType: hard - -"regexpp@npm:^3.1.0": - version: 3.2.0 - resolution: "regexpp@npm:3.2.0" - checksum: a78dc5c7158ad9ddcfe01aa9144f46e192ddbfa7b263895a70a5c6c73edd9ce85faf7c0430e59ac38839e1734e275b9c3de5c57ee3ab6edc0e0b1bdebefccef8 - languageName: node - linkType: hard - -"require-from-string@npm:^2.0.2": - version: 2.0.2 - resolution: "require-from-string@npm:2.0.2" - checksum: a03ef6895445f33a4015300c426699bc66b2b044ba7b670aa238610381b56d3f07c686251740d575e22f4c87531ba662d06937508f0f3c0f1ddc04db3130560b - languageName: node - linkType: hard - -"resolve-from@npm:^4.0.0": - version: 4.0.0 - resolution: "resolve-from@npm:4.0.0" - checksum: f4ba0b8494846a5066328ad33ef8ac173801a51739eb4d63408c847da9a2e1c1de1e6cbbf72699211f3d13f8fc1325648b169bd15eb7da35688e30a5fb0e4a7f - languageName: node - linkType: hard - -"resolve@npm:^1.20.0, resolve@npm:^1.22.0": - version: 1.22.1 - resolution: "resolve@npm:1.22.1" - dependencies: - is-core-module: ^2.9.0 - path-parse: ^1.0.7 - supports-preserve-symlinks-flag: ^1.0.0 - bin: - resolve: bin/resolve - checksum: 07af5fc1e81aa1d866cbc9e9460fbb67318a10fa3c4deadc35c3ad8a898ee9a71a86a65e4755ac3195e0ea0cfbe201eb323ebe655ce90526fd61917313a34e4e - languageName: node - linkType: hard - -"resolve@npm:^2.0.0-next.3": - version: 2.0.0-next.4 - resolution: "resolve@npm:2.0.0-next.4" - dependencies: - is-core-module: ^2.9.0 - path-parse: ^1.0.7 - supports-preserve-symlinks-flag: ^1.0.0 - bin: - resolve: bin/resolve - checksum: c438ac9a650f2030fd074219d7f12ceb983b475da2d89ad3d6dd05fbf6b7a0a8cd37d4d10b43cb1f632bc19f22246ab7f36ebda54d84a29bfb2910a0680906d3 - languageName: node - linkType: hard - -"resolve@patch:resolve@^1.20.0#~builtin, resolve@patch:resolve@^1.22.0#~builtin": - version: 1.22.1 - resolution: "resolve@patch:resolve@npm%3A1.22.1#~builtin::version=1.22.1&hash=07638b" - dependencies: - is-core-module: ^2.9.0 - path-parse: ^1.0.7 - supports-preserve-symlinks-flag: ^1.0.0 - bin: - resolve: bin/resolve - checksum: 5656f4d0bedcf8eb52685c1abdf8fbe73a1603bb1160a24d716e27a57f6cecbe2432ff9c89c2bd57542c3a7b9d14b1882b73bfe2e9d7849c9a4c0b8b39f02b8b - languageName: node - linkType: hard - -"resolve@patch:resolve@^2.0.0-next.3#~builtin": - version: 2.0.0-next.4 - resolution: "resolve@patch:resolve@npm%3A2.0.0-next.4#~builtin::version=2.0.0-next.4&hash=07638b" - dependencies: - is-core-module: ^2.9.0 - path-parse: ^1.0.7 - supports-preserve-symlinks-flag: ^1.0.0 - bin: - resolve: bin/resolve - checksum: 4bf9f4f8a458607af90518ff73c67a4bc1a38b5a23fef2bb0ccbd45e8be89820a1639b637b0ba377eb2be9eedfb1739a84cde24fe4cd670c8207d8fea922b011 - languageName: node - linkType: hard - -"reusify@npm:^1.0.4": - version: 1.0.4 - resolution: "reusify@npm:1.0.4" - checksum: c3076ebcc22a6bc252cb0b9c77561795256c22b757f40c0d8110b1300723f15ec0fc8685e8d4ea6d7666f36c79ccc793b1939c748bf36f18f542744a4e379fcc - languageName: node - linkType: hard - -"rimraf@npm:^3.0.2": - version: 3.0.2 - resolution: "rimraf@npm:3.0.2" - dependencies: - glob: ^7.1.3 - bin: - rimraf: bin.js - checksum: 87f4164e396f0171b0a3386cc1877a817f572148ee13a7e113b238e48e8a9f2f31d009a92ec38a591ff1567d9662c6b67fd8818a2dbbaed74bc26a87a2a4a9a0 - languageName: node - linkType: hard - -"run-parallel@npm:^1.1.9": - version: 1.2.0 - resolution: "run-parallel@npm:1.2.0" - dependencies: - queue-microtask: ^1.2.2 - checksum: cb4f97ad25a75ebc11a8ef4e33bb962f8af8516bb2001082ceabd8902e15b98f4b84b4f8a9b222e5d57fc3bd1379c483886ed4619367a7680dad65316993021d - languageName: node - linkType: hard - -"safe-buffer@npm:~5.1.1": - version: 5.1.2 - resolution: "safe-buffer@npm:5.1.2" - checksum: f2f1f7943ca44a594893a852894055cf619c1fbcb611237fc39e461ae751187e7baf4dc391a72125e0ac4fb2d8c5c0b3c71529622e6a58f46b960211e704903c - languageName: node - linkType: hard - -"scheduler@npm:^0.23.0": - version: 0.23.0 - resolution: "scheduler@npm:0.23.0" - dependencies: - loose-envify: ^1.1.0 - checksum: d79192eeaa12abef860c195ea45d37cbf2bbf5f66e3c4dcd16f54a7da53b17788a70d109ee3d3dde1a0fd50e6a8fc171f4300356c5aee4fc0171de526bf35f8a - languageName: node - linkType: hard - -"semver@npm:^6.3.0": - version: 6.3.0 - resolution: "semver@npm:6.3.0" - bin: - semver: ./bin/semver.js - checksum: 1b26ecf6db9e8292dd90df4e781d91875c0dcc1b1909e70f5d12959a23c7eebb8f01ea581c00783bbee72ceeaad9505797c381756326073850dc36ed284b21b9 - languageName: node - linkType: hard - -"semver@npm:^7.2.1, semver@npm:^7.3.7": - version: 7.3.7 - resolution: "semver@npm:7.3.7" - dependencies: - lru-cache: ^6.0.0 - bin: - semver: bin/semver.js - checksum: 2fa3e877568cd6ce769c75c211beaed1f9fce80b28338cadd9d0b6c40f2e2862bafd62c19a6cff42f3d54292b7c623277bcab8816a2b5521cf15210d43e75232 - languageName: node - linkType: hard - -"shebang-command@npm:^2.0.0": - version: 2.0.0 - resolution: "shebang-command@npm:2.0.0" - dependencies: - shebang-regex: ^3.0.0 - checksum: 6b52fe87271c12968f6a054e60f6bde5f0f3d2db483a1e5c3e12d657c488a15474121a1d55cd958f6df026a54374ec38a4a963988c213b7570e1d51575cea7fa - languageName: node - linkType: hard - -"shebang-regex@npm:^3.0.0": - version: 3.0.0 - resolution: "shebang-regex@npm:3.0.0" - checksum: 1a2bcae50de99034fcd92ad4212d8e01eedf52c7ec7830eedcf886622804fe36884278f2be8be0ea5fde3fd1c23911643a4e0f726c8685b61871c8908af01222 - languageName: node - linkType: hard - -"side-channel@npm:^1.0.4": - version: 1.0.4 - resolution: "side-channel@npm:1.0.4" - dependencies: - call-bind: ^1.0.0 - get-intrinsic: ^1.0.2 - object-inspect: ^1.9.0 - checksum: 351e41b947079c10bd0858364f32bb3a7379514c399edb64ab3dce683933483fc63fb5e4efe0a15a2e8a7e3c436b6a91736ddb8d8c6591b0460a24bb4a1ee245 - languageName: node - linkType: hard - -"slash@npm:^3.0.0": - version: 3.0.0 - resolution: "slash@npm:3.0.0" - checksum: 94a93fff615f25a999ad4b83c9d5e257a7280c90a32a7cb8b4a87996e4babf322e469c42b7f649fd5796edd8687652f3fb452a86dc97a816f01113183393f11c - languageName: node - linkType: hard - -"slice-ansi@npm:^4.0.0": - version: 4.0.0 - resolution: "slice-ansi@npm:4.0.0" - dependencies: - ansi-styles: ^4.0.0 - astral-regex: ^2.0.0 - is-fullwidth-code-point: ^3.0.0 - checksum: 4a82d7f085b0e1b070e004941ada3c40d3818563ac44766cca4ceadd2080427d337554f9f99a13aaeb3b4a94d9964d9466c807b3d7b7541d1ec37ee32d308756 - languageName: node - linkType: hard - -"source-map-js@npm:^1.0.2": - version: 1.0.2 - resolution: "source-map-js@npm:1.0.2" - checksum: c049a7fc4deb9a7e9b481ae3d424cc793cb4845daa690bc5a05d428bf41bf231ced49b4cf0c9e77f9d42fdb3d20d6187619fc586605f5eabe995a316da8d377c - languageName: node - linkType: hard - -"sprintf-js@npm:~1.0.2": - version: 1.0.3 - resolution: "sprintf-js@npm:1.0.3" - checksum: 19d79aec211f09b99ec3099b5b2ae2f6e9cdefe50bc91ac4c69144b6d3928a640bb6ae5b3def70c2e85a2c3d9f5ec2719921e3a59d3ca3ef4b2fd1a4656a0df3 - languageName: node - linkType: hard - -"string-width@npm:^4.2.3": - version: 4.2.3 - resolution: "string-width@npm:4.2.3" - dependencies: - emoji-regex: ^8.0.0 - is-fullwidth-code-point: ^3.0.0 - strip-ansi: ^6.0.1 - checksum: e52c10dc3fbfcd6c3a15f159f54a90024241d0f149cf8aed2982a2d801d2e64df0bf1dc351cf8e95c3319323f9f220c16e740b06faecd53e2462df1d2b5443fb - languageName: node - linkType: hard - -"string.prototype.matchall@npm:^4.0.7": - version: 4.0.7 - resolution: "string.prototype.matchall@npm:4.0.7" - dependencies: - call-bind: ^1.0.2 - define-properties: ^1.1.3 - es-abstract: ^1.19.1 - get-intrinsic: ^1.1.1 - has-symbols: ^1.0.3 - internal-slot: ^1.0.3 - regexp.prototype.flags: ^1.4.1 - side-channel: ^1.0.4 - checksum: fc09f3ccbfb325de0472bcc87a6be0598a7499e0b4a31db5789676155b15754a4cc4bb83924f15fc9ed48934dac7366ee52c8b9bd160bed6fd072c93b489e75c - languageName: node - linkType: hard - -"string.prototype.trimend@npm:^1.0.5": - version: 1.0.5 - resolution: "string.prototype.trimend@npm:1.0.5" - dependencies: - call-bind: ^1.0.2 - define-properties: ^1.1.4 - es-abstract: ^1.19.5 - checksum: d44f543833112f57224e79182debadc9f4f3bf9d48a0414d6f0cbd2a86f2b3e8c0ca1f95c3f8e5b32ae83e91554d79d932fc746b411895f03f93d89ed3dfb6bc - languageName: node - linkType: hard - -"string.prototype.trimstart@npm:^1.0.5": - version: 1.0.5 - resolution: "string.prototype.trimstart@npm:1.0.5" - dependencies: - call-bind: ^1.0.2 - define-properties: ^1.1.4 - es-abstract: ^1.19.5 - checksum: a4857c5399ad709d159a77371eeaa8f9cc284469a0b5e1bfe405de16f1fd4166a8ea6f4180e55032f348d1b679b1599fd4301fbc7a8b72bdb3e795e43f7b1048 - languageName: node - linkType: hard - -"strip-ansi@npm:^6.0.0, strip-ansi@npm:^6.0.1": - version: 6.0.1 - resolution: "strip-ansi@npm:6.0.1" - dependencies: - ansi-regex: ^5.0.1 - checksum: f3cd25890aef3ba6e1a74e20896c21a46f482e93df4a06567cebf2b57edabb15133f1f94e57434e0a958d61186087b1008e89c94875d019910a213181a14fc8c - languageName: node - linkType: hard - -"strip-bom@npm:^3.0.0": - version: 3.0.0 - resolution: "strip-bom@npm:3.0.0" - checksum: 8d50ff27b7ebe5ecc78f1fe1e00fcdff7af014e73cf724b46fb81ef889eeb1015fc5184b64e81a2efe002180f3ba431bdd77e300da5c6685d702780fbf0c8d5b - languageName: node - linkType: hard - -"strip-json-comments@npm:^3.1.0, strip-json-comments@npm:^3.1.1": - version: 3.1.1 - resolution: "strip-json-comments@npm:3.1.1" - checksum: 492f73e27268f9b1c122733f28ecb0e7e8d8a531a6662efbd08e22cccb3f9475e90a1b82cab06a392f6afae6d2de636f977e231296400d0ec5304ba70f166443 - languageName: node - linkType: hard - -"styled-jsx@npm:5.0.4": - version: 5.0.4 - resolution: "styled-jsx@npm:5.0.4" - peerDependencies: - react: ">= 16.8.0 || 17.x.x || ^18.0.0-0" - peerDependenciesMeta: - "@babel/core": - optional: true - babel-plugin-macros: - optional: true - checksum: db7530155626e5eebc9d80ca117ea5aed6219b0a65469196b0b5727550fbe743117d7eea1499d80511ccb312d31f4a1027a58d1f94a83f0986c9acfdcce8bdd1 - languageName: node - linkType: hard - -"supports-color@npm:^5.3.0": - version: 5.5.0 - resolution: "supports-color@npm:5.5.0" - dependencies: - has-flag: ^3.0.0 - checksum: 95f6f4ba5afdf92f495b5a912d4abee8dcba766ae719b975c56c084f5004845f6f5a5f7769f52d53f40e21952a6d87411bafe34af4a01e65f9926002e38e1dac - languageName: node - linkType: hard - -"supports-color@npm:^7.1.0": - version: 7.2.0 - resolution: "supports-color@npm:7.2.0" - dependencies: - has-flag: ^4.0.0 - checksum: 3dda818de06ebbe5b9653e07842d9479f3555ebc77e9a0280caf5a14fb877ffee9ed57007c3b78f5a6324b8dbeec648d9e97a24e2ed9fdb81ddc69ea07100f4a - languageName: node - linkType: hard - -"supports-preserve-symlinks-flag@npm:^1.0.0": - version: 1.0.0 - resolution: "supports-preserve-symlinks-flag@npm:1.0.0" - checksum: 53b1e247e68e05db7b3808b99b892bd36fb096e6fba213a06da7fab22045e97597db425c724f2bbd6c99a3c295e1e73f3e4de78592289f38431049e1277ca0ae - languageName: node - linkType: hard - -"table@npm:^6.0.9": - version: 6.8.0 - resolution: "table@npm:6.8.0" - dependencies: - ajv: ^8.0.1 - lodash.truncate: ^4.4.2 - slice-ansi: ^4.0.0 - string-width: ^4.2.3 - strip-ansi: ^6.0.1 - checksum: 5b07fe462ee03d2e1fac02cbb578efd2e0b55ac07e3d3db2e950aa9570ade5a4a2b8d3c15e9f25c89e4e50b646bc4269934601ee1eef4ca7968ad31960977690 - languageName: node - linkType: hard - -"tapable@npm:^2.2.0": - version: 2.2.1 - resolution: "tapable@npm:2.2.1" - checksum: 3b7a1b4d86fa940aad46d9e73d1e8739335efd4c48322cb37d073eb6f80f5281889bf0320c6d8ffcfa1a0dd5bfdbd0f9d037e252ef972aca595330538aac4d51 - languageName: node - linkType: hard - -"text-table@npm:^0.2.0": - version: 0.2.0 - resolution: "text-table@npm:0.2.0" - checksum: b6937a38c80c7f84d9c11dd75e49d5c44f71d95e810a3250bd1f1797fc7117c57698204adf676b71497acc205d769d65c16ae8fa10afad832ae1322630aef10a - languageName: node - linkType: hard - -"to-fast-properties@npm:^2.0.0": - version: 2.0.0 - resolution: "to-fast-properties@npm:2.0.0" - checksum: be2de62fe58ead94e3e592680052683b1ec986c72d589e7b21e5697f8744cdbf48c266fa72f6c15932894c10187b5f54573a3bcf7da0bfd964d5caf23d436168 - languageName: node - linkType: hard - -"to-regex-range@npm:^5.0.1": - version: 5.0.1 - resolution: "to-regex-range@npm:5.0.1" - dependencies: - is-number: ^7.0.0 - checksum: f76fa01b3d5be85db6a2a143e24df9f60dd047d151062d0ba3df62953f2f697b16fe5dad9b0ac6191c7efc7b1d9dcaa4b768174b7b29da89d4428e64bc0a20ed - languageName: node - linkType: hard - -"tsconfig-paths@npm:^3.14.1": - version: 3.14.1 - resolution: "tsconfig-paths@npm:3.14.1" - dependencies: - "@types/json5": ^0.0.29 - json5: ^1.0.1 - minimist: ^1.2.6 - strip-bom: ^3.0.0 - checksum: 8afa01c673ebb4782ba53d3a12df97fa837ce524f8ad38ee4e2b2fd57f5ac79abc21c574e9e9eb014d93efe7fe8214001b96233b5c6ea75bd1ea82afe17a4c6d - languageName: node - linkType: hard - -"tsconfig@*, tsconfig@workspace:packages/tsconfig": - version: 0.0.0-use.local - resolution: "tsconfig@workspace:packages/tsconfig" - languageName: unknown - linkType: soft - -"tslib@npm:^1.8.1": - version: 1.14.1 - resolution: "tslib@npm:1.14.1" - checksum: dbe628ef87f66691d5d2959b3e41b9ca0045c3ee3c7c7b906cc1e328b39f199bb1ad9e671c39025bd56122ac57dfbf7385a94843b1cc07c60a4db74795829acd - languageName: node - linkType: hard - -"tslib@npm:^2.4.0": - version: 2.4.0 - resolution: "tslib@npm:2.4.0" - checksum: 8c4aa6a3c5a754bf76aefc38026134180c053b7bd2f81338cb5e5ebf96fefa0f417bff221592bf801077f5bf990562f6264fecbc42cd3309b33872cb6fc3b113 - languageName: node - linkType: hard - -"tsutils@npm:^3.21.0": - version: 3.21.0 - resolution: "tsutils@npm:3.21.0" - dependencies: - tslib: ^1.8.1 - peerDependencies: - typescript: ">=2.8.0 || >= 3.2.0-dev || >= 3.3.0-dev || >= 3.4.0-dev || >= 3.5.0-dev || >= 3.6.0-dev || >= 3.6.0-beta || >= 3.7.0-dev || >= 3.7.0-beta" - checksum: 1843f4c1b2e0f975e08c4c21caa4af4f7f65a12ac1b81b3b8489366826259323feb3fc7a243123453d2d1a02314205a7634e048d4a8009921da19f99755cdc48 - languageName: node - linkType: hard - -"turbo-android-arm64@npm:1.4.6": - version: 1.4.6 - resolution: "turbo-android-arm64@npm:1.4.6" - conditions: os=android & cpu=arm64 - languageName: node - linkType: hard - -"turbo-darwin-64@npm:1.4.6": - version: 1.4.6 - resolution: "turbo-darwin-64@npm:1.4.6" - conditions: os=darwin & cpu=x64 - languageName: node - linkType: hard - -"turbo-darwin-arm64@npm:1.4.6": - version: 1.4.6 - resolution: "turbo-darwin-arm64@npm:1.4.6" - conditions: os=darwin & cpu=arm64 - languageName: node - linkType: hard - -"turbo-freebsd-64@npm:1.4.6": - version: 1.4.6 - resolution: "turbo-freebsd-64@npm:1.4.6" - conditions: os=freebsd & cpu=x64 - languageName: node - linkType: hard - -"turbo-freebsd-arm64@npm:1.4.6": - version: 1.4.6 - resolution: "turbo-freebsd-arm64@npm:1.4.6" - conditions: os=freebsd & cpu=arm64 - languageName: node - linkType: hard - -"turbo-linux-32@npm:1.4.6": - version: 1.4.6 - resolution: "turbo-linux-32@npm:1.4.6" - conditions: os=linux & cpu=ia32 - languageName: node - linkType: hard - -"turbo-linux-64@npm:1.4.6": - version: 1.4.6 - resolution: "turbo-linux-64@npm:1.4.6" - conditions: os=linux & cpu=x64 - languageName: node - linkType: hard - -"turbo-linux-arm64@npm:1.4.6": - version: 1.4.6 - resolution: "turbo-linux-arm64@npm:1.4.6" - conditions: os=linux & cpu=arm64 - languageName: node - linkType: hard - -"turbo-linux-arm@npm:1.4.6": - version: 1.4.6 - resolution: "turbo-linux-arm@npm:1.4.6" - conditions: os=linux & cpu=arm - languageName: node - linkType: hard - -"turbo-linux-mips64le@npm:1.4.6": - version: 1.4.6 - resolution: "turbo-linux-mips64le@npm:1.4.6" - conditions: os=linux & cpu=mipsel - languageName: node - linkType: hard - -"turbo-linux-ppc64le@npm:1.4.6": - version: 1.4.6 - resolution: "turbo-linux-ppc64le@npm:1.4.6" - conditions: os=linux & cpu=ppc64 - languageName: node - linkType: hard - -"turbo-windows-32@npm:1.4.6": - version: 1.4.6 - resolution: "turbo-windows-32@npm:1.4.6" - conditions: os=win32 & cpu=ia32 - languageName: node - linkType: hard - -"turbo-windows-64@npm:1.4.6": - version: 1.4.6 - resolution: "turbo-windows-64@npm:1.4.6" - conditions: os=win32 & cpu=x64 - languageName: node - linkType: hard - -"turbo-windows-arm64@npm:1.4.6": - version: 1.4.6 - resolution: "turbo-windows-arm64@npm:1.4.6" - conditions: os=win32 & cpu=arm64 - languageName: node - linkType: hard - -turbo@latest: - version: 1.4.6 - resolution: "turbo@npm:1.4.6" - dependencies: - turbo-android-arm64: 1.4.6 - turbo-darwin-64: 1.4.6 - turbo-darwin-arm64: 1.4.6 - turbo-freebsd-64: 1.4.6 - turbo-freebsd-arm64: 1.4.6 - turbo-linux-32: 1.4.6 - turbo-linux-64: 1.4.6 - turbo-linux-arm: 1.4.6 - turbo-linux-arm64: 1.4.6 - turbo-linux-mips64le: 1.4.6 - turbo-linux-ppc64le: 1.4.6 - turbo-windows-32: 1.4.6 - turbo-windows-64: 1.4.6 - turbo-windows-arm64: 1.4.6 - dependenciesMeta: - turbo-android-arm64: - optional: true - turbo-darwin-64: - optional: true - turbo-darwin-arm64: - optional: true - turbo-freebsd-64: - optional: true - turbo-freebsd-arm64: - optional: true - turbo-linux-32: - optional: true - turbo-linux-64: - optional: true - turbo-linux-arm: - optional: true - turbo-linux-arm64: - optional: true - turbo-linux-mips64le: - optional: true - turbo-linux-ppc64le: - optional: true - turbo-windows-32: - optional: true - turbo-windows-64: - optional: true - turbo-windows-arm64: - optional: true - bin: - turbo: bin/turbo - checksum: f7191f36e0abddf6dc88eb9a83a007a8616ebed1edd44c37f9b19e0451f3ce90c4406699f6166a99c0a6f8d39cc1f24d96513b7ef16b21747863827538b9c966 - languageName: node - linkType: hard - -"type-check@npm:^0.4.0, type-check@npm:~0.4.0": - version: 0.4.0 - resolution: "type-check@npm:0.4.0" - dependencies: - prelude-ls: ^1.2.1 - checksum: ec688ebfc9c45d0c30412e41ca9c0cdbd704580eb3a9ccf07b9b576094d7b86a012baebc95681999dd38f4f444afd28504cb3a89f2ef16b31d4ab61a0739025a - languageName: node - linkType: hard - -"type-fest@npm:^0.20.2": - version: 0.20.2 - resolution: "type-fest@npm:0.20.2" - checksum: 4fb3272df21ad1c552486f8a2f8e115c09a521ad7a8db3d56d53718d0c907b62c6e9141ba5f584af3f6830d0872c521357e512381f24f7c44acae583ad517d73 - languageName: node - linkType: hard - -"typescript@npm:^4.5.2, typescript@npm:^4.5.3, typescript@npm:^4.7.4": - version: 4.8.3 - resolution: "typescript@npm:4.8.3" - bin: - tsc: bin/tsc - tsserver: bin/tsserver - checksum: 8286a5edcaf3d68e65c451aa1e7150ad1cf53ee0813c07ec35b7abdfdb10f355ecaa13c6a226a694ae7a67785fd7eeebf89f845da0b4f7e4a35561ddc459aba0 - languageName: node - linkType: hard - -"typescript@patch:typescript@^4.5.2#~builtin, typescript@patch:typescript@^4.5.3#~builtin, typescript@patch:typescript@^4.7.4#~builtin": - version: 4.8.3 - resolution: "typescript@patch:typescript@npm%3A4.8.3#~builtin::version=4.8.3&hash=a1c5e5" - bin: - tsc: bin/tsc - tsserver: bin/tsserver - checksum: 2222d2382fb3146089b1d27ce2b55e9d1f99cc64118f1aba75809b693b856c5d3c324f052f60c75b577947fc538bc1c27bad0eb76cbdba9a63a253489504ba7e - languageName: node - linkType: hard - -"ui@*, ui@workspace:packages/ui": - version: 0.0.0-use.local - resolution: "ui@workspace:packages/ui" - dependencies: - "@types/react": ^17.0.37 - "@types/react-dom": ^17.0.11 - eslint: ^7.32.0 - eslint-config-custom: "*" - react: ^18.2.0 - tsconfig: "*" - typescript: ^4.5.2 - languageName: unknown - linkType: soft - -"unbox-primitive@npm:^1.0.2": - version: 1.0.2 - resolution: "unbox-primitive@npm:1.0.2" - dependencies: - call-bind: ^1.0.2 - has-bigints: ^1.0.2 - has-symbols: ^1.0.3 - which-boxed-primitive: ^1.0.2 - checksum: b7a1cf5862b5e4b5deb091672ffa579aa274f648410009c81cca63fed3b62b610c4f3b773f912ce545bb4e31edc3138975b5bc777fc6e4817dca51affb6380e9 - languageName: node - linkType: hard - -"update-browserslist-db@npm:^1.0.9": - version: 1.0.9 - resolution: "update-browserslist-db@npm:1.0.9" - dependencies: - escalade: ^3.1.1 - picocolors: ^1.0.0 - peerDependencies: - browserslist: ">= 4.21.0" - bin: - browserslist-lint: cli.js - checksum: f625899b236f6a4d7f62b56be1b8da230c5563d1fef84d3ef148f2e1a3f11a5a4b3be4fd7e3703e51274c116194017775b10afb4de09eb2c0d09d36b90f1f578 - languageName: node - linkType: hard - -"uri-js@npm:^4.2.2": - version: 4.4.1 - resolution: "uri-js@npm:4.4.1" - dependencies: - punycode: ^2.1.0 - checksum: 7167432de6817fe8e9e0c9684f1d2de2bb688c94388f7569f7dbdb1587c9f4ca2a77962f134ec90be0cc4d004c939ff0d05acc9f34a0db39a3c797dada262633 - languageName: node - linkType: hard - -"use-sync-external-store@npm:1.2.0": - version: 1.2.0 - resolution: "use-sync-external-store@npm:1.2.0" - peerDependencies: - react: ^16.8.0 || ^17.0.0 || ^18.0.0 - checksum: 5c639e0f8da3521d605f59ce5be9e094ca772bd44a4ce7322b055a6f58eeed8dda3c94cabd90c7a41fb6fa852210092008afe48f7038792fd47501f33299116a - languageName: node - linkType: hard - -"v8-compile-cache@npm:^2.0.3": - version: 2.3.0 - resolution: "v8-compile-cache@npm:2.3.0" - checksum: adb0a271eaa2297f2f4c536acbfee872d0dd26ec2d76f66921aa7fc437319132773483344207bdbeee169225f4739016d8d2dbf0553913a52bb34da6d0334f8e - languageName: node - linkType: hard - -"web@workspace:apps/web": - version: 0.0.0-use.local - resolution: "web@workspace:apps/web" - dependencies: - "@babel/core": ^7.0.0 - "@types/node": ^17.0.12 - "@types/react": 18.0.17 - eslint: 7.32.0 - eslint-config-custom: "*" - next: 12.2.5 - next-transpile-modules: 9.0.0 - react: 18.2.0 - react-dom: 18.2.0 - tsconfig: "*" - typescript: ^4.5.3 - ui: "*" - languageName: unknown - linkType: soft - -"which-boxed-primitive@npm:^1.0.2": - version: 1.0.2 - resolution: "which-boxed-primitive@npm:1.0.2" - dependencies: - is-bigint: ^1.0.1 - is-boolean-object: ^1.1.0 - is-number-object: ^1.0.4 - is-string: ^1.0.5 - is-symbol: ^1.0.3 - checksum: 53ce774c7379071729533922adcca47220228405e1895f26673bbd71bdf7fb09bee38c1d6399395927c6289476b5ae0629863427fd151491b71c4b6cb04f3a5e - languageName: node - linkType: hard - -"which@npm:^2.0.1": - version: 2.0.2 - resolution: "which@npm:2.0.2" - dependencies: - isexe: ^2.0.0 - bin: - node-which: ./bin/node-which - checksum: 1a5c563d3c1b52d5f893c8b61afe11abc3bab4afac492e8da5bde69d550de701cf9806235f20a47b5c8fa8a1d6a9135841de2596535e998027a54589000e66d1 - languageName: node - linkType: hard - -"word-wrap@npm:^1.2.3": - version: 1.2.3 - resolution: "word-wrap@npm:1.2.3" - checksum: 30b48f91fcf12106ed3186ae4fa86a6a1842416df425be7b60485de14bec665a54a68e4b5156647dec3a70f25e84d270ca8bc8cd23182ed095f5c7206a938c1f - languageName: node - linkType: hard - -"wrappy@npm:1": - version: 1.0.2 - resolution: "wrappy@npm:1.0.2" - checksum: 159da4805f7e84a3d003d8841557196034155008f817172d4e986bd591f74aa82aa7db55929a54222309e01079a65a92a9e6414da5a6aa4b01ee44a511ac3ee5 - languageName: node - linkType: hard - -"yallist@npm:^4.0.0": - version: 4.0.0 - resolution: "yallist@npm:4.0.0" - checksum: 343617202af32df2a15a3be36a5a8c0c8545208f3d3dfbc6bb7c3e3b7e8c6f8e7485432e4f3b88da3031a6e20afa7c711eded32ddfb122896ac5d914e75848d5 - languageName: node - linkType: hard diff --git a/cli/internal/lockfile/testdata/minimal-berry.lock b/cli/internal/lockfile/testdata/minimal-berry.lock deleted file mode 100644 index 3844ce3067698..0000000000000 --- a/cli/internal/lockfile/testdata/minimal-berry.lock +++ /dev/null @@ -1,45 +0,0 @@ -# This file is generated by running "yarn install" inside your project. -# Manual changes might be lost - proceed with caution! - -__metadata: - version: 6 - cacheKey: 8c8 - -"a@workspace:packages/a": - version: 0.0.0-use.local - resolution: "a@workspace:packages/a" - dependencies: - c: "*" - lodash: ^4.17.0 - peerDependencies: - lodash: ^3.0.0 || ^4.0.0 - languageName: unknown - linkType: soft - -"b@workspace:packages/b": - version: 0.0.0-use.local - resolution: "b@workspace:packages/b" - dependencies: - c: "*" - lodash: ^3.0.0 || ^4.0.0 - languageName: unknown - linkType: soft - -"c@*, c@workspace:packages/c": - version: 0.0.0-use.local - resolution: "c@workspace:packages/c" - languageName: unknown - linkType: soft - -"lodash@npm:^3.0.0 || ^4.0.0, lodash@npm:^4.17.0": - version: 4.17.21 - resolution: "lodash@npm:4.17.21" - checksum: eb835a2e51d381e561e508ce932ea50a8e5a68f4ebdd771ea240d3048244a8d13658acbd502cd4829768c56f2e16bdd4340b9ea141297d472517b83868e677f7 - languageName: node - linkType: hard - -"minimal-berry@workspace:.": - version: 0.0.0-use.local - resolution: "minimal-berry@workspace:." - languageName: unknown - linkType: soft diff --git a/cli/internal/lockfile/testdata/npm-lock-workspace-variation.json b/cli/internal/lockfile/testdata/npm-lock-workspace-variation.json deleted file mode 100644 index 4dcfc2da8a18e..0000000000000 --- a/cli/internal/lockfile/testdata/npm-lock-workspace-variation.json +++ /dev/null @@ -1,186 +0,0 @@ -{ - "name": "npm-prune-workspace-variation", - "version": "0.0.0", - "lockfileVersion": 2, - "requires": true, - "packages": { - "": { - "name": "npm-prune", - "version": "0.0.0", - "workspaces": { "packages": ["apps/*", "packages/*"] }, - "devDependencies": { - "eslint-config-custom": "*", - "prettier": "latest", - "turbo": "latest" - }, - "engines": { - "node": ">=14.0.0" - } - }, - "apps/docs": { - "version": "0.0.0", - "dependencies": { - "lodash": "^3.0.0", - "next": "12.3.0", - "react": "18.2.0", - "react-dom": "18.2.0", - "ui": "*" - }, - "devDependencies": { - "@babel/core": "^7.0.0", - "@types/node": "^17.0.12", - "@types/react": "18.0.17", - "eslint": "7.32.0", - "eslint-config-custom": "*", - "next-transpile-modules": "9.0.0", - "tsconfig": "*", - "typescript": "^4.5.3" - } - }, - "apps/web": { - "version": "0.0.0", - "dependencies": { - "lodash": "^4.17.21", - "next": "12.3.0", - "react": "18.2.0", - "react-dom": "18.2.0", - "ui": "*" - }, - "devDependencies": { - "@babel/core": "^7.0.0", - "@types/node": "^17.0.12", - "@types/react": "18.0.17", - "eslint": "7.32.0", - "eslint-config-custom": "*", - "next-transpile-modules": "9.0.0", - "tsconfig": "*", - "typescript": "^4.5.3" - } - }, - "apps/web/node_modules/lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", - "engines": ["node >= 0.8.0"] - }, - "node_modules/@ampproject/remapping": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.2.0.tgz", - "integrity": "sha512-qRmjj8nj9qmLTQXXmaR1cck3UXSRMPrbsLJAasZpF+t3riI71BXed5ebIOYwQntykeZuhjsdweEc9BxH5Jc26w==", - "dev": true, - "dependencies": { - "@jridgewell/gen-mapping": "^0.1.0", - "@jridgewell/trace-mapping": "^0.3.9" - }, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@babel/code-frame": { - "version": "7.18.6", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.18.6.tgz", - "integrity": "sha512-TDCmlK5eOvH+eH7cdAFlNXeVJqWIQ7gW9tY1GJIpUtFb6CmjVyq2VM3u71bOyR8CRihcCgMUYoDNyLXao3+70Q==", - "dev": true, - "dependencies": { - "@babel/highlight": "^7.18.6" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/compat-data": { - "version": "7.19.3", - "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.19.3.tgz", - "integrity": "sha512-prBHMK4JYYK+wDjJF1q99KK4JLL+egWS4nmNqdlMUgCExMZ+iZW0hGhyC3VEbsPjvaN0TBhW//VIFwBrk8sEiw==", - "dev": true, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/core": { - "version": "7.19.3", - "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.19.3.tgz", - "integrity": "sha512-WneDJxdsjEvyKtXKsaBGbDeiyOjR5vYq4HcShxnIbG0qixpoHjI3MqeZM9NDvsojNCEBItQE4juOo/bU6e72gQ==", - "dev": true, - "dependencies": { - "@ampproject/remapping": "^2.1.0", - "@babel/code-frame": "^7.18.6", - "@babel/generator": "^7.19.3", - "@babel/helper-compilation-targets": "^7.19.3", - "@babel/helper-module-transforms": "^7.19.0", - "@babel/helpers": "^7.19.0", - "@babel/parser": "^7.19.3", - "@babel/template": "^7.18.10", - "@babel/traverse": "^7.19.3", - "@babel/types": "^7.19.3", - "convert-source-map": "^1.7.0", - "debug": "^4.1.0", - "gensync": "^1.0.0-beta.2", - "json5": "^2.2.1", - "semver": "^6.3.0" - }, - "engines": { - "node": ">=6.9.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/babel" - } - }, - "node_modules/@babel/generator": { - "version": "7.19.3", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.19.3.tgz", - "integrity": "sha512-fqVZnmp1ncvZU757UzDheKZpfPgatqY59XtW2/j/18H7u76akb8xqvjw82f+i2UKd/ksYsSick/BCLQUUtJ/qQ==", - "dev": true, - "dependencies": { - "@babel/types": "^7.19.3", - "@jridgewell/gen-mapping": "^0.3.2", - "jsesc": "^2.5.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/generator/node_modules/@jridgewell/gen-mapping": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.2.tgz", - "integrity": "sha512-mh65xKQAzI6iBcFzwv28KVWSmCkdRBWoOh+bYQGW3+6OZvbbN3TqMGo5hqYxQniRcH9F2VZIoJCm4pa3BPDK/A==", - "dev": true, - "dependencies": { - "@jridgewell/set-array": "^1.0.1", - "@jridgewell/sourcemap-codec": "^1.4.10", - "@jridgewell/trace-mapping": "^0.3.9" - }, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@babel/helper-compilation-targets": { - "version": "7.19.3", - "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.19.3.tgz", - "integrity": "sha512-65ESqLGyGmLvgR0mst5AdW1FkNlj9rQsCKduzEoEPhBCDFGXvz2jW6bXFG6i0/MrV2s7hhXjjb2yAzcPuQlLwg==", - "dev": true, - "dependencies": { - "@babel/compat-data": "^7.19.3", - "@babel/helper-validator-option": "^7.18.6", - "browserslist": "^4.21.3", - "semver": "^6.3.0" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/@babel/helper-environment-visitor": { - "version": "7.18.9", - "resolved": "https://registry.npmjs.org/@babel/helper-environment-visitor/-/helper-environment-visitor-7.18.9.tgz", - "integrity": "sha512-3r/aACDJ3fhQ/EVgFy0hpj8oHyHpQc+LPtJoY9SzTThAsStm4Ptegq92vqKoE3vD706ZVFWITnMnxucw+S9Ipg==", - "dev": true, - "engines": { - "node": ">=6.9.0" - } - } - } -} diff --git a/cli/internal/lockfile/testdata/npm-lock.json b/cli/internal/lockfile/testdata/npm-lock.json deleted file mode 100644 index c5607f1a72c72..0000000000000 --- a/cli/internal/lockfile/testdata/npm-lock.json +++ /dev/null @@ -1,6472 +0,0 @@ -{ - "name": "npm-prune", - "version": "0.0.0", - "lockfileVersion": 2, - "requires": true, - "packages": { - "": { - "name": "npm-prune", - "version": "0.0.0", - "workspaces": ["apps/*", "packages/*"], - "devDependencies": { - "eslint-config-custom": "*", - "prettier": "latest", - "turbo": "latest" - }, - "engines": { - "node": ">=14.0.0" - } - }, - "apps/docs": { - "version": "0.0.0", - "dependencies": { - "lodash": "^3.0.0", - "next": "12.3.0", - "react": "18.2.0", - "react-dom": "18.2.0", - "ui": "*" - }, - "devDependencies": { - "@babel/core": "^7.0.0", - "@types/node": "^17.0.12", - "@types/react": "18.0.17", - "eslint": "7.32.0", - "eslint-config-custom": "*", - "next-transpile-modules": "9.0.0", - "tsconfig": "*", - "typescript": "^4.5.3" - } - }, - "apps/web": { - "version": "0.0.0", - "dependencies": { - "lodash": "^4.17.21", - "next": "12.3.0", - "react": "18.2.0", - "react-dom": "18.2.0", - "ui": "*" - }, - "devDependencies": { - "@babel/core": "^7.0.0", - "@types/node": "^17.0.12", - "@types/react": "18.0.17", - "eslint": "7.32.0", - "eslint-config-custom": "*", - "next-transpile-modules": "9.0.0", - "tsconfig": "*", - "typescript": "^4.5.3" - } - }, - "apps/web/node_modules/lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", - "engines": ["node >= 0.8.0"] - }, - "node_modules/@ampproject/remapping": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.2.0.tgz", - "integrity": "sha512-qRmjj8nj9qmLTQXXmaR1cck3UXSRMPrbsLJAasZpF+t3riI71BXed5ebIOYwQntykeZuhjsdweEc9BxH5Jc26w==", - "dev": true, - "dependencies": { - "@jridgewell/gen-mapping": "^0.1.0", - "@jridgewell/trace-mapping": "^0.3.9" - }, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@babel/code-frame": { - "version": "7.18.6", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.18.6.tgz", - "integrity": "sha512-TDCmlK5eOvH+eH7cdAFlNXeVJqWIQ7gW9tY1GJIpUtFb6CmjVyq2VM3u71bOyR8CRihcCgMUYoDNyLXao3+70Q==", - "dev": true, - "dependencies": { - "@babel/highlight": "^7.18.6" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/compat-data": { - "version": "7.19.3", - "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.19.3.tgz", - "integrity": "sha512-prBHMK4JYYK+wDjJF1q99KK4JLL+egWS4nmNqdlMUgCExMZ+iZW0hGhyC3VEbsPjvaN0TBhW//VIFwBrk8sEiw==", - "dev": true, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/core": { - "version": "7.19.3", - "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.19.3.tgz", - "integrity": "sha512-WneDJxdsjEvyKtXKsaBGbDeiyOjR5vYq4HcShxnIbG0qixpoHjI3MqeZM9NDvsojNCEBItQE4juOo/bU6e72gQ==", - "dev": true, - "dependencies": { - "@ampproject/remapping": "^2.1.0", - "@babel/code-frame": "^7.18.6", - "@babel/generator": "^7.19.3", - "@babel/helper-compilation-targets": "^7.19.3", - "@babel/helper-module-transforms": "^7.19.0", - "@babel/helpers": "^7.19.0", - "@babel/parser": "^7.19.3", - "@babel/template": "^7.18.10", - "@babel/traverse": "^7.19.3", - "@babel/types": "^7.19.3", - "convert-source-map": "^1.7.0", - "debug": "^4.1.0", - "gensync": "^1.0.0-beta.2", - "json5": "^2.2.1", - "semver": "^6.3.0" - }, - "engines": { - "node": ">=6.9.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/babel" - } - }, - "node_modules/@babel/generator": { - "version": "7.19.3", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.19.3.tgz", - "integrity": "sha512-fqVZnmp1ncvZU757UzDheKZpfPgatqY59XtW2/j/18H7u76akb8xqvjw82f+i2UKd/ksYsSick/BCLQUUtJ/qQ==", - "dev": true, - "dependencies": { - "@babel/types": "^7.19.3", - "@jridgewell/gen-mapping": "^0.3.2", - "jsesc": "^2.5.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/generator/node_modules/@jridgewell/gen-mapping": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.2.tgz", - "integrity": "sha512-mh65xKQAzI6iBcFzwv28KVWSmCkdRBWoOh+bYQGW3+6OZvbbN3TqMGo5hqYxQniRcH9F2VZIoJCm4pa3BPDK/A==", - "dev": true, - "dependencies": { - "@jridgewell/set-array": "^1.0.1", - "@jridgewell/sourcemap-codec": "^1.4.10", - "@jridgewell/trace-mapping": "^0.3.9" - }, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@babel/helper-compilation-targets": { - "version": "7.19.3", - "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.19.3.tgz", - "integrity": "sha512-65ESqLGyGmLvgR0mst5AdW1FkNlj9rQsCKduzEoEPhBCDFGXvz2jW6bXFG6i0/MrV2s7hhXjjb2yAzcPuQlLwg==", - "dev": true, - "dependencies": { - "@babel/compat-data": "^7.19.3", - "@babel/helper-validator-option": "^7.18.6", - "browserslist": "^4.21.3", - "semver": "^6.3.0" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/@babel/helper-environment-visitor": { - "version": "7.18.9", - "resolved": "https://registry.npmjs.org/@babel/helper-environment-visitor/-/helper-environment-visitor-7.18.9.tgz", - "integrity": "sha512-3r/aACDJ3fhQ/EVgFy0hpj8oHyHpQc+LPtJoY9SzTThAsStm4Ptegq92vqKoE3vD706ZVFWITnMnxucw+S9Ipg==", - "dev": true, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-function-name": { - "version": "7.19.0", - "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.19.0.tgz", - "integrity": "sha512-WAwHBINyrpqywkUH0nTnNgI5ina5TFn85HKS0pbPDfxFfhyR/aNQEn4hGi1P1JyT//I0t4OgXUlofzWILRvS5w==", - "dev": true, - "dependencies": { - "@babel/template": "^7.18.10", - "@babel/types": "^7.19.0" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-hoist-variables": { - "version": "7.18.6", - "resolved": "https://registry.npmjs.org/@babel/helper-hoist-variables/-/helper-hoist-variables-7.18.6.tgz", - "integrity": "sha512-UlJQPkFqFULIcyW5sbzgbkxn2FKRgwWiRexcuaR8RNJRy8+LLveqPjwZV/bwrLZCN0eUHD/x8D0heK1ozuoo6Q==", - "dev": true, - "dependencies": { - "@babel/types": "^7.18.6" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-module-imports": { - "version": "7.18.6", - "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.18.6.tgz", - "integrity": "sha512-0NFvs3VkuSYbFi1x2Vd6tKrywq+z/cLeYC/RJNFrIX/30Bf5aiGYbtvGXolEktzJH8o5E5KJ3tT+nkxuuZFVlA==", - "dev": true, - "dependencies": { - "@babel/types": "^7.18.6" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-module-transforms": { - "version": "7.19.0", - "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.19.0.tgz", - "integrity": "sha512-3HBZ377Fe14RbLIA+ac3sY4PTgpxHVkFrESaWhoI5PuyXPBBX8+C34qblV9G89ZtycGJCmCI/Ut+VUDK4bltNQ==", - "dev": true, - "dependencies": { - "@babel/helper-environment-visitor": "^7.18.9", - "@babel/helper-module-imports": "^7.18.6", - "@babel/helper-simple-access": "^7.18.6", - "@babel/helper-split-export-declaration": "^7.18.6", - "@babel/helper-validator-identifier": "^7.18.6", - "@babel/template": "^7.18.10", - "@babel/traverse": "^7.19.0", - "@babel/types": "^7.19.0" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-simple-access": { - "version": "7.18.6", - "resolved": "https://registry.npmjs.org/@babel/helper-simple-access/-/helper-simple-access-7.18.6.tgz", - "integrity": "sha512-iNpIgTgyAvDQpDj76POqg+YEt8fPxx3yaNBg3S30dxNKm2SWfYhD0TGrK/Eu9wHpUW63VQU894TsTg+GLbUa1g==", - "dev": true, - "dependencies": { - "@babel/types": "^7.18.6" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-split-export-declaration": { - "version": "7.18.6", - "resolved": "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.18.6.tgz", - "integrity": "sha512-bde1etTx6ZyTmobl9LLMMQsaizFVZrquTEHOqKeQESMKo4PlObf+8+JA25ZsIpZhT/WEd39+vOdLXAFG/nELpA==", - "dev": true, - "dependencies": { - "@babel/types": "^7.18.6" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-string-parser": { - "version": "7.18.10", - "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.18.10.tgz", - "integrity": "sha512-XtIfWmeNY3i4t7t4D2t02q50HvqHybPqW2ki1kosnvWCwuCMeo81Jf0gwr85jy/neUdg5XDdeFE/80DXiO+njw==", - "dev": true, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-validator-identifier": { - "version": "7.19.1", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.19.1.tgz", - "integrity": "sha512-awrNfaMtnHUr653GgGEs++LlAvW6w+DcPrOliSMXWCKo597CwL5Acf/wWdNkf/tfEQE3mjkeD1YOVZOUV/od1w==", - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-validator-option": { - "version": "7.18.6", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.18.6.tgz", - "integrity": "sha512-XO7gESt5ouv/LRJdrVjkShckw6STTaB7l9BrpBaAHDeF5YZT+01PCwmR0SJHnkW6i8OwW/EVWRShfi4j2x+KQw==", - "dev": true, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helpers": { - "version": "7.19.0", - "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.19.0.tgz", - "integrity": "sha512-DRBCKGwIEdqY3+rPJgG/dKfQy9+08rHIAJx8q2p+HSWP87s2HCrQmaAMMyMll2kIXKCW0cO1RdQskx15Xakftg==", - "dev": true, - "dependencies": { - "@babel/template": "^7.18.10", - "@babel/traverse": "^7.19.0", - "@babel/types": "^7.19.0" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/highlight": { - "version": "7.18.6", - "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.18.6.tgz", - "integrity": "sha512-u7stbOuYjaPezCuLj29hNW1v64M2Md2qupEKP1fHc7WdOA3DgLh37suiSrZYY7haUB7iBeQZ9P1uiRF359do3g==", - "dependencies": { - "@babel/helper-validator-identifier": "^7.18.6", - "chalk": "^2.0.0", - "js-tokens": "^4.0.0" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/parser": { - "version": "7.19.3", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.19.3.tgz", - "integrity": "sha512-pJ9xOlNWHiy9+FuFP09DEAFbAn4JskgRsVcc169w2xRBC3FRGuQEwjeIMMND9L2zc0iEhO/tGv4Zq+km+hxNpQ==", - "dev": true, - "bin": { - "parser": "bin/babel-parser.js" - }, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@babel/runtime": { - "version": "7.19.0", - "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.19.0.tgz", - "integrity": "sha512-eR8Lo9hnDS7tqkO7NsV+mKvCmv5boaXFSZ70DnfhcgiEne8hv9oCEd36Klw74EtizEqLsy4YnW8UWwpBVolHZA==", - "dependencies": { - "regenerator-runtime": "^0.13.4" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/runtime-corejs3": { - "version": "7.19.1", - "resolved": "https://registry.npmjs.org/@babel/runtime-corejs3/-/runtime-corejs3-7.19.1.tgz", - "integrity": "sha512-j2vJGnkopRzH+ykJ8h68wrHnEUmtK//E723jjixiAl/PPf6FhqY/vYRcMVlNydRKQjQsTsYEjpx+DZMIvnGk/g==", - "dependencies": { - "core-js-pure": "^3.25.1", - "regenerator-runtime": "^0.13.4" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/template": { - "version": "7.18.10", - "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.18.10.tgz", - "integrity": "sha512-TI+rCtooWHr3QJ27kJxfjutghu44DLnasDMwpDqCXVTal9RLp3RSYNh4NdBrRP2cQAoG9A8juOQl6P6oZG4JxA==", - "dev": true, - "dependencies": { - "@babel/code-frame": "^7.18.6", - "@babel/parser": "^7.18.10", - "@babel/types": "^7.18.10" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/traverse": { - "version": "7.19.3", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.19.3.tgz", - "integrity": "sha512-qh5yf6149zhq2sgIXmwjnsvmnNQC2iw70UFjp4olxucKrWd/dvlUsBI88VSLUsnMNF7/vnOiA+nk1+yLoCqROQ==", - "dev": true, - "dependencies": { - "@babel/code-frame": "^7.18.6", - "@babel/generator": "^7.19.3", - "@babel/helper-environment-visitor": "^7.18.9", - "@babel/helper-function-name": "^7.19.0", - "@babel/helper-hoist-variables": "^7.18.6", - "@babel/helper-split-export-declaration": "^7.18.6", - "@babel/parser": "^7.19.3", - "@babel/types": "^7.19.3", - "debug": "^4.1.0", - "globals": "^11.1.0" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/types": { - "version": "7.19.3", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.19.3.tgz", - "integrity": "sha512-hGCaQzIY22DJlDh9CH7NOxgKkFjBk0Cw9xDO1Xmh2151ti7wiGfQ3LauXzL4HP1fmFlTX6XjpRETTpUcv7wQLw==", - "dev": true, - "dependencies": { - "@babel/helper-string-parser": "^7.18.10", - "@babel/helper-validator-identifier": "^7.19.1", - "to-fast-properties": "^2.0.0" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@eslint/eslintrc": { - "version": "0.4.3", - "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-0.4.3.tgz", - "integrity": "sha512-J6KFFz5QCYUJq3pf0mjEcCJVERbzv71PUIDczuh9JkwGEzced6CO5ADLHB1rbf/+oPBtoPfMYNOpGDzCANlbXw==", - "dependencies": { - "ajv": "^6.12.4", - "debug": "^4.1.1", - "espree": "^7.3.0", - "globals": "^13.9.0", - "ignore": "^4.0.6", - "import-fresh": "^3.2.1", - "js-yaml": "^3.13.1", - "minimatch": "^3.0.4", - "strip-json-comments": "^3.1.1" - }, - "engines": { - "node": "^10.12.0 || >=12.0.0" - } - }, - "node_modules/@eslint/eslintrc/node_modules/globals": { - "version": "13.17.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-13.17.0.tgz", - "integrity": "sha512-1C+6nQRb1GwGMKm2dH/E7enFAMxGTmGI7/dEdhy/DNelv85w9B72t3uc5frtMNXIbzrarJJ/lTCjcaZwbLJmyw==", - "dependencies": { - "type-fest": "^0.20.2" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/@humanwhocodes/config-array": { - "version": "0.5.0", - "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.5.0.tgz", - "integrity": "sha512-FagtKFz74XrTl7y6HCzQpwDfXP0yhxe9lHLD1UZxjvZIcbyRz8zTFF/yYNfSfzU414eDwZ1SrO0Qvtyf+wFMQg==", - "dependencies": { - "@humanwhocodes/object-schema": "^1.2.0", - "debug": "^4.1.1", - "minimatch": "^3.0.4" - }, - "engines": { - "node": ">=10.10.0" - } - }, - "node_modules/@humanwhocodes/object-schema": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz", - "integrity": "sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA==" - }, - "node_modules/@jridgewell/gen-mapping": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.1.1.tgz", - "integrity": "sha512-sQXCasFk+U8lWYEe66WxRDOE9PjVz4vSM51fTu3Hw+ClTpUSQb718772vH3pyS5pShp6lvQM7SxgIDXXXmOX7w==", - "dev": true, - "dependencies": { - "@jridgewell/set-array": "^1.0.0", - "@jridgewell/sourcemap-codec": "^1.4.10" - }, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@jridgewell/resolve-uri": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.0.tgz", - "integrity": "sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w==", - "dev": true, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@jridgewell/set-array": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.1.2.tgz", - "integrity": "sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw==", - "dev": true, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@jridgewell/sourcemap-codec": { - "version": "1.4.14", - "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.14.tgz", - "integrity": "sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw==", - "dev": true - }, - "node_modules/@jridgewell/trace-mapping": { - "version": "0.3.15", - "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.15.tgz", - "integrity": "sha512-oWZNOULl+UbhsgB51uuZzglikfIKSUBO/M9W2OfEjn7cmqoAiCgmv9lyACTUacZwBz0ITnJ2NqjU8Tx0DHL88g==", - "dev": true, - "dependencies": { - "@jridgewell/resolve-uri": "^3.0.3", - "@jridgewell/sourcemap-codec": "^1.4.10" - } - }, - "node_modules/@next/env": { - "version": "12.3.0", - "resolved": "https://registry.npmjs.org/@next/env/-/env-12.3.0.tgz", - "integrity": "sha512-PTJpjAFVbzBQ9xXpzMTroShvD5YDIIy46jQ7d4LrWpY+/5a8H90Tm8hE3Hvkc5RBRspVo7kvEOnqQms0A+2Q6w==" - }, - "node_modules/@next/eslint-plugin-next": { - "version": "12.3.1", - "resolved": "https://registry.npmjs.org/@next/eslint-plugin-next/-/eslint-plugin-next-12.3.1.tgz", - "integrity": "sha512-sw+lTf6r6P0j+g/n9y4qdWWI2syPqZx+uc0+B/fRENqfR3KpSid6MIKqc9gNwGhJASazEQ5b3w8h4cAET213jw==", - "dependencies": { - "glob": "7.1.7" - } - }, - "node_modules/@next/swc-android-arm-eabi": { - "version": "12.3.0", - "resolved": "https://registry.npmjs.org/@next/swc-android-arm-eabi/-/swc-android-arm-eabi-12.3.0.tgz", - "integrity": "sha512-/PuirPnAKsYBw93w/7Q9hqy+KGOU9mjYprZ/faxMUJh/dc6v3rYLxkZKNG9nFPIW4QKNTCnhP40xF9hLnxO+xg==", - "cpu": ["arm"], - "optional": true, - "os": ["android"], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@next/swc-android-arm64": { - "version": "12.3.0", - "resolved": "https://registry.npmjs.org/@next/swc-android-arm64/-/swc-android-arm64-12.3.0.tgz", - "integrity": "sha512-OaI+FhAM6P9B6Ybwbn0Zl8YwWido0lLwhDBi9WiYCh4RQmIXAyVIoIJPHo4fP05+mXaJ/k1trvDvuURvHOq2qw==", - "cpu": ["arm64"], - "optional": true, - "os": ["android"], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@next/swc-darwin-arm64": { - "version": "12.3.0", - "resolved": "https://registry.npmjs.org/@next/swc-darwin-arm64/-/swc-darwin-arm64-12.3.0.tgz", - "integrity": "sha512-9s4d3Mhii+WFce8o8Jok7WC3Bawkr9wEUU++SJRptjU1L5tsfYJMrSYCACHLhZujziNDLyExe4Hwwsccps1sfg==", - "cpu": ["arm64"], - "optional": true, - "os": ["darwin"], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@next/swc-darwin-x64": { - "version": "12.3.0", - "resolved": "https://registry.npmjs.org/@next/swc-darwin-x64/-/swc-darwin-x64-12.3.0.tgz", - "integrity": "sha512-2scC4MqUTwGwok+wpVxP+zWp7WcCAVOtutki2E1n99rBOTnUOX6qXkgxSy083yBN6GqwuC/dzHeN7hIKjavfRA==", - "cpu": ["x64"], - "optional": true, - "os": ["darwin"], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@next/swc-freebsd-x64": { - "version": "12.3.0", - "resolved": "https://registry.npmjs.org/@next/swc-freebsd-x64/-/swc-freebsd-x64-12.3.0.tgz", - "integrity": "sha512-xAlruUREij/bFa+qsE1tmsP28t7vz02N4ZDHt2lh3uJUniE0Ne9idyIDLc1Ed0IF2RjfgOp4ZVunuS3OM0sngw==", - "cpu": ["x64"], - "optional": true, - "os": ["freebsd"], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@next/swc-linux-arm-gnueabihf": { - "version": "12.3.0", - "resolved": "https://registry.npmjs.org/@next/swc-linux-arm-gnueabihf/-/swc-linux-arm-gnueabihf-12.3.0.tgz", - "integrity": "sha512-jin2S4VT/cugc2dSZEUIabhYDJNgrUh7fufbdsaAezgcQzqfdfJqfxl4E9GuafzB4cbRPTaqA0V5uqbp0IyGkQ==", - "cpu": ["arm"], - "optional": true, - "os": ["linux"], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@next/swc-linux-arm64-gnu": { - "version": "12.3.0", - "resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-gnu/-/swc-linux-arm64-gnu-12.3.0.tgz", - "integrity": "sha512-RqJHDKe0WImeUrdR0kayTkRWgp4vD/MS7g0r6Xuf8+ellOFH7JAAJffDW3ayuVZeMYOa7RvgNFcOoWnrTUl9Nw==", - "cpu": ["arm64"], - "optional": true, - "os": ["linux"], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@next/swc-linux-arm64-musl": { - "version": "12.3.0", - "resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-musl/-/swc-linux-arm64-musl-12.3.0.tgz", - "integrity": "sha512-nvNWoUieMjvDjpYJ/4SQe9lQs2xMj6ZRs8N+bmTrVu9leY2Fg3WD6W9p/1uU9hGO8u+OdF13wc4iRShu/WYIHg==", - "cpu": ["arm64"], - "optional": true, - "os": ["linux"], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@next/swc-linux-x64-gnu": { - "version": "12.3.0", - "resolved": "https://registry.npmjs.org/@next/swc-linux-x64-gnu/-/swc-linux-x64-gnu-12.3.0.tgz", - "integrity": "sha512-4ajhIuVU9PeQCMMhdDgZTLrHmjbOUFuIyg6J19hZqwEwDTSqQyrSLkbJs2Nd7IRiM6Ul/XyrtEFCpk4k+xD2+w==", - "cpu": ["x64"], - "optional": true, - "os": ["linux"], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@next/swc-linux-x64-musl": { - "version": "12.3.0", - "resolved": "https://registry.npmjs.org/@next/swc-linux-x64-musl/-/swc-linux-x64-musl-12.3.0.tgz", - "integrity": "sha512-U092RBYbaGxoMAwpauePJEu2PuZSEoUCGJBvsptQr2/2XIMwAJDYM4c/M5NfYEsBr+yjvsYNsOpYfeQ88D82Yg==", - "cpu": ["x64"], - "optional": true, - "os": ["linux"], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@next/swc-win32-arm64-msvc": { - "version": "12.3.0", - "resolved": "https://registry.npmjs.org/@next/swc-win32-arm64-msvc/-/swc-win32-arm64-msvc-12.3.0.tgz", - "integrity": "sha512-pzSzaxjDEJe67bUok9Nxf9rykbJfHXW0owICFsPBsqHyc+cr8vpF7g9e2APTCddtVhvjkga9ILoZJ9NxWS7Yiw==", - "cpu": ["arm64"], - "optional": true, - "os": ["win32"], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@next/swc-win32-ia32-msvc": { - "version": "12.3.0", - "resolved": "https://registry.npmjs.org/@next/swc-win32-ia32-msvc/-/swc-win32-ia32-msvc-12.3.0.tgz", - "integrity": "sha512-MQGUpMbYhQmTZ06a9e0hPQJnxFMwETo2WtyAotY3GEzbNCQVbCGhsvqEKcl+ZEHgShlHXUWvSffq1ZscY6gK7A==", - "cpu": ["ia32"], - "optional": true, - "os": ["win32"], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@next/swc-win32-x64-msvc": { - "version": "12.3.0", - "resolved": "https://registry.npmjs.org/@next/swc-win32-x64-msvc/-/swc-win32-x64-msvc-12.3.0.tgz", - "integrity": "sha512-C/nw6OgQpEULWqs+wgMHXGvlJLguPRFFGqR2TAqWBerQ8J+Sg3z1ZTqwelkSi4FoqStGuZ2UdFHIDN1ySmR1xA==", - "cpu": ["x64"], - "optional": true, - "os": ["win32"], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@nodelib/fs.scandir": { - "version": "2.1.5", - "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", - "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", - "dependencies": { - "@nodelib/fs.stat": "2.0.5", - "run-parallel": "^1.1.9" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/@nodelib/fs.stat": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", - "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", - "engines": { - "node": ">= 8" - } - }, - "node_modules/@nodelib/fs.walk": { - "version": "1.2.8", - "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", - "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", - "dependencies": { - "@nodelib/fs.scandir": "2.1.5", - "fastq": "^1.6.0" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/@rushstack/eslint-patch": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/@rushstack/eslint-patch/-/eslint-patch-1.2.0.tgz", - "integrity": "sha512-sXo/qW2/pAcmT43VoRKOJbDOfV3cYpq3szSVfIThQXNt+E4DfKj361vaAt3c88U5tPUxzEswam7GW48PJqtKAg==" - }, - "node_modules/@swc/helpers": { - "version": "0.4.11", - "resolved": "https://registry.npmjs.org/@swc/helpers/-/helpers-0.4.11.tgz", - "integrity": "sha512-rEUrBSGIoSFuYxwBYtlUFMlE2CwGhmW+w9355/5oduSw8e5h2+Tj4UrAGNNgP9915++wj5vkQo0UuOBqOAq4nw==", - "dependencies": { - "tslib": "^2.4.0" - } - }, - "node_modules/@types/json5": { - "version": "0.0.29", - "resolved": "https://registry.npmjs.org/@types/json5/-/json5-0.0.29.tgz", - "integrity": "sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ==" - }, - "node_modules/@types/node": { - "version": "17.0.45", - "resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.45.tgz", - "integrity": "sha512-w+tIMs3rq2afQdsPJlODhoUEKzFP1ayaoyl1CcnwtIlsVe7K7bA1NGm4s3PraqTLlXnbIN84zuBlxBWo1u9BLw==", - "dev": true - }, - "node_modules/@types/prop-types": { - "version": "15.7.5", - "resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.5.tgz", - "integrity": "sha512-JCB8C6SnDoQf0cNycqd/35A7MjcnK+ZTqE7judS6o7utxUCg6imJg3QK2qzHKszlTjcj2cn+NwMB2i96ubpj7w==", - "dev": true - }, - "node_modules/@types/react": { - "version": "18.0.17", - "resolved": "https://registry.npmjs.org/@types/react/-/react-18.0.17.tgz", - "integrity": "sha512-38ETy4tL+rn4uQQi7mB81G7V1g0u2ryquNmsVIOKUAEIDK+3CUjZ6rSRpdvS99dNBnkLFL83qfmtLacGOTIhwQ==", - "dev": true, - "dependencies": { - "@types/prop-types": "*", - "@types/scheduler": "*", - "csstype": "^3.0.2" - } - }, - "node_modules/@types/react-dom": { - "version": "17.0.17", - "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-17.0.17.tgz", - "integrity": "sha512-VjnqEmqGnasQKV0CWLevqMTXBYG9GbwuE6x3VetERLh0cq2LTptFE73MrQi2S7GkKXCf2GgwItB/melLnxfnsg==", - "dev": true, - "dependencies": { - "@types/react": "^17" - } - }, - "node_modules/@types/react-dom/node_modules/@types/react": { - "version": "17.0.50", - "resolved": "https://registry.npmjs.org/@types/react/-/react-17.0.50.tgz", - "integrity": "sha512-ZCBHzpDb5skMnc1zFXAXnL3l1FAdi+xZvwxK+PkglMmBrwjpp9nKaWuEvrGnSifCJmBFGxZOOFuwC6KH/s0NuA==", - "dev": true, - "dependencies": { - "@types/prop-types": "*", - "@types/scheduler": "*", - "csstype": "^3.0.2" - } - }, - "node_modules/@types/scheduler": { - "version": "0.16.2", - "resolved": "https://registry.npmjs.org/@types/scheduler/-/scheduler-0.16.2.tgz", - "integrity": "sha512-hppQEBDmlwhFAXKJX2KnWLYu5yMfi91yazPb2l+lbJiwW+wdo1gNeRA+3RgNSO39WYX2euey41KEwnqesU2Jew==", - "dev": true - }, - "node_modules/@typescript-eslint/parser": { - "version": "5.39.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-5.39.0.tgz", - "integrity": "sha512-PhxLjrZnHShe431sBAGHaNe6BDdxAASDySgsBCGxcBecVCi8NQWxQZMcizNA4g0pN51bBAn/FUfkWG3SDVcGlA==", - "dependencies": { - "@typescript-eslint/scope-manager": "5.39.0", - "@typescript-eslint/types": "5.39.0", - "@typescript-eslint/typescript-estree": "5.39.0", - "debug": "^4.3.4" - }, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - }, - "peerDependencies": { - "eslint": "^6.0.0 || ^7.0.0 || ^8.0.0" - }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } - } - }, - "node_modules/@typescript-eslint/scope-manager": { - "version": "5.39.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-5.39.0.tgz", - "integrity": "sha512-/I13vAqmG3dyqMVSZPjsbuNQlYS082Y7OMkwhCfLXYsmlI0ca4nkL7wJ/4gjX70LD4P8Hnw1JywUVVAwepURBw==", - "dependencies": { - "@typescript-eslint/types": "5.39.0", - "@typescript-eslint/visitor-keys": "5.39.0" - }, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/types": { - "version": "5.39.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-5.39.0.tgz", - "integrity": "sha512-gQMZrnfEBFXK38hYqt8Lkwt8f4U6yq+2H5VDSgP/qiTzC8Nw8JO3OuSUOQ2qW37S/dlwdkHDntkZM6SQhKyPhw==", - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/typescript-estree": { - "version": "5.39.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-5.39.0.tgz", - "integrity": "sha512-qLFQP0f398sdnogJoLtd43pUgB18Q50QSA+BTE5h3sUxySzbWDpTSdgt4UyxNSozY/oDK2ta6HVAzvGgq8JYnA==", - "dependencies": { - "@typescript-eslint/types": "5.39.0", - "@typescript-eslint/visitor-keys": "5.39.0", - "debug": "^4.3.4", - "globby": "^11.1.0", - "is-glob": "^4.0.3", - "semver": "^7.3.7", - "tsutils": "^3.21.0" - }, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } - } - }, - "node_modules/@typescript-eslint/typescript-estree/node_modules/semver": { - "version": "7.3.7", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.7.tgz", - "integrity": "sha512-QlYTucUYOews+WeEujDoEGziz4K6c47V/Bd+LjSSYcA94p+DmINdf7ncaUinThfvZyu13lN9OY1XDxt8C0Tw0g==", - "dependencies": { - "lru-cache": "^6.0.0" - }, - "bin": { - "semver": "bin/semver.js" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/@typescript-eslint/visitor-keys": { - "version": "5.39.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-5.39.0.tgz", - "integrity": "sha512-yyE3RPwOG+XJBLrhvsxAidUgybJVQ/hG8BhiJo0k8JSAYfk/CshVcxf0HwP4Jt7WZZ6vLmxdo1p6EyN3tzFTkg==", - "dependencies": { - "@typescript-eslint/types": "5.39.0", - "eslint-visitor-keys": "^3.3.0" - }, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/visitor-keys/node_modules/eslint-visitor-keys": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.3.0.tgz", - "integrity": "sha512-mQ+suqKJVyeuwGYHAdjMFqjCyfl8+Ldnxuyp3ldiMBFKkvytrXUZWaiPCEav8qDHKty44bD+qV1IP4T+w+xXRA==", - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - } - }, - "node_modules/acorn": { - "version": "7.4.1", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.4.1.tgz", - "integrity": "sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==", - "bin": { - "acorn": "bin/acorn" - }, - "engines": { - "node": ">=0.4.0" - } - }, - "node_modules/acorn-jsx": { - "version": "5.3.2", - "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", - "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", - "peerDependencies": { - "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" - } - }, - "node_modules/ajv": { - "version": "6.12.6", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", - "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", - "dependencies": { - "fast-deep-equal": "^3.1.1", - "fast-json-stable-stringify": "^2.0.0", - "json-schema-traverse": "^0.4.1", - "uri-js": "^4.2.2" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/epoberezkin" - } - }, - "node_modules/ansi-colors": { - "version": "4.1.3", - "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-4.1.3.tgz", - "integrity": "sha512-/6w/C21Pm1A7aZitlI5Ni/2J6FFQN8i1Cvz3kHABAAbw93v/NlvKdVOqz7CCWz/3iv/JplRSEEZ83XION15ovw==", - "engines": { - "node": ">=6" - } - }, - "node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "engines": { - "node": ">=8" - } - }, - "node_modules/ansi-styles": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", - "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", - "dependencies": { - "color-convert": "^1.9.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/argparse": { - "version": "1.0.10", - "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", - "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", - "dependencies": { - "sprintf-js": "~1.0.2" - } - }, - "node_modules/aria-query": { - "version": "4.2.2", - "resolved": "https://registry.npmjs.org/aria-query/-/aria-query-4.2.2.tgz", - "integrity": "sha512-o/HelwhuKpTj/frsOsbNLNgnNGVIFsVP/SW2BSF14gVl7kAfMOJ6/8wUAUvG1R1NHKrfG+2sHZTu0yauT1qBrA==", - "dependencies": { - "@babel/runtime": "^7.10.2", - "@babel/runtime-corejs3": "^7.10.2" - }, - "engines": { - "node": ">=6.0" - } - }, - "node_modules/array-includes": { - "version": "3.1.5", - "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.5.tgz", - "integrity": "sha512-iSDYZMMyTPkiFasVqfuAQnWAYcvO/SeBSCGKePoEthjp4LEMTe4uLc7b025o4jAZpHhihh8xPo99TNWUWWkGDQ==", - "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.19.5", - "get-intrinsic": "^1.1.1", - "is-string": "^1.0.7" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/array-union": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz", - "integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==", - "engines": { - "node": ">=8" - } - }, - "node_modules/array.prototype.flat": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/array.prototype.flat/-/array.prototype.flat-1.3.0.tgz", - "integrity": "sha512-12IUEkHsAhA4DY5s0FPgNXIdc8VRSqD9Zp78a5au9abH/SOBrsp082JOWFNTjkMozh8mqcdiKuaLGhPeYztxSw==", - "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.3", - "es-abstract": "^1.19.2", - "es-shim-unscopables": "^1.0.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/array.prototype.flatmap": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/array.prototype.flatmap/-/array.prototype.flatmap-1.3.0.tgz", - "integrity": "sha512-PZC9/8TKAIxcWKdyeb77EzULHPrIX/tIZebLJUQOMR1OwYosT8yggdfWScfTBCDj5utONvOuPQQumYsU2ULbkg==", - "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.3", - "es-abstract": "^1.19.2", - "es-shim-unscopables": "^1.0.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/ast-types-flow": { - "version": "0.0.7", - "resolved": "https://registry.npmjs.org/ast-types-flow/-/ast-types-flow-0.0.7.tgz", - "integrity": "sha512-eBvWn1lvIApYMhzQMsu9ciLfkBY499mFZlNqG+/9WR7PVlroQw0vG30cOQQbaKz3sCEc44TAOu2ykzqXSNnwag==" - }, - "node_modules/astral-regex": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/astral-regex/-/astral-regex-2.0.0.tgz", - "integrity": "sha512-Z7tMw1ytTXt5jqMcOP+OQteU1VuNK9Y02uuJtKQ1Sv69jXQKKg5cibLwGJow8yzZP+eAc18EmLGPal0bp36rvQ==", - "engines": { - "node": ">=8" - } - }, - "node_modules/axe-core": { - "version": "4.4.3", - "resolved": "https://registry.npmjs.org/axe-core/-/axe-core-4.4.3.tgz", - "integrity": "sha512-32+ub6kkdhhWick/UjvEwRchgoetXqTK14INLqbGm5U2TzBkBNF3nQtLYm8ovxSkQWArjEQvftCKryjZaATu3w==", - "engines": { - "node": ">=4" - } - }, - "node_modules/axobject-query": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/axobject-query/-/axobject-query-2.2.0.tgz", - "integrity": "sha512-Td525n+iPOOyUQIeBfcASuG6uJsDOITl7Mds5gFyerkWiX7qhUTdYUBlSgNMyVqtSJqwpt1kXGLdUt6SykLMRA==" - }, - "node_modules/balanced-match": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", - "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" - }, - "node_modules/brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", - "dependencies": { - "balanced-match": "^1.0.0", - "concat-map": "0.0.1" - } - }, - "node_modules/braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", - "dependencies": { - "fill-range": "^7.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/browserslist": { - "version": "4.21.4", - "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.21.4.tgz", - "integrity": "sha512-CBHJJdDmgjl3daYjN5Cp5kbTf1mUhZoS+beLklHIvkOWscs83YAhLlF3Wsh/lciQYAcbBJgTOD44VtG31ZM4Hw==", - "dev": true, - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/browserslist" - }, - { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/browserslist" - } - ], - "dependencies": { - "caniuse-lite": "^1.0.30001400", - "electron-to-chromium": "^1.4.251", - "node-releases": "^2.0.6", - "update-browserslist-db": "^1.0.9" - }, - "bin": { - "browserslist": "cli.js" - }, - "engines": { - "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" - } - }, - "node_modules/call-bind": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz", - "integrity": "sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==", - "dependencies": { - "function-bind": "^1.1.1", - "get-intrinsic": "^1.0.2" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/callsites": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", - "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", - "engines": { - "node": ">=6" - } - }, - "node_modules/caniuse-lite": { - "version": "1.0.30001414", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001414.tgz", - "integrity": "sha512-t55jfSaWjCdocnFdKQoO+d2ct9C59UZg4dY3OnUlSZ447r8pUtIKdp0hpAzrGFultmTC+Us+KpKi4GZl/LXlFg==", - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/browserslist" - }, - { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/caniuse-lite" - } - ] - }, - "node_modules/chalk": { - "version": "2.4.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", - "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", - "dependencies": { - "ansi-styles": "^3.2.1", - "escape-string-regexp": "^1.0.5", - "supports-color": "^5.3.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/color-convert": { - "version": "1.9.3", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", - "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", - "dependencies": { - "color-name": "1.1.3" - } - }, - "node_modules/color-name": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", - "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==" - }, - "node_modules/concat-map": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", - "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==" - }, - "node_modules/convert-source-map": { - "version": "1.8.0", - "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.8.0.tgz", - "integrity": "sha512-+OQdjP49zViI/6i7nIJpA8rAl4sV/JdPfU9nZs3VqOwGIgizICvuN2ru6fMd+4llL0tar18UYJXfZ/TWtmhUjA==", - "dev": true, - "dependencies": { - "safe-buffer": "~5.1.1" - } - }, - "node_modules/core-js-pure": { - "version": "3.25.5", - "resolved": "https://registry.npmjs.org/core-js-pure/-/core-js-pure-3.25.5.tgz", - "integrity": "sha512-oml3M22pHM+igfWHDfdLVq2ShWmjM2V4L+dQEBs0DWVIqEm9WHCwGAlZ6BmyBQGy5sFrJmcx+856D9lVKyGWYg==", - "hasInstallScript": true, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/core-js" - } - }, - "node_modules/cross-spawn": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", - "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", - "dependencies": { - "path-key": "^3.1.0", - "shebang-command": "^2.0.0", - "which": "^2.0.1" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/csstype": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.1.1.tgz", - "integrity": "sha512-DJR/VvkAvSZW9bTouZue2sSxDwdTN92uHjqeKVm+0dAqdfNykRzQ95tay8aXMBAAPpUiq4Qcug2L7neoRh2Egw==", - "dev": true - }, - "node_modules/damerau-levenshtein": { - "version": "1.0.8", - "resolved": "https://registry.npmjs.org/damerau-levenshtein/-/damerau-levenshtein-1.0.8.tgz", - "integrity": "sha512-sdQSFB7+llfUcQHUQO3+B8ERRj0Oa4w9POWMI/puGtuf7gFywGmkaLCElnudfTiKZV+NvHqL0ifzdrI8Ro7ESA==" - }, - "node_modules/debug": { - "version": "4.3.4", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", - "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", - "dependencies": { - "ms": "2.1.2" - }, - "engines": { - "node": ">=6.0" - }, - "peerDependenciesMeta": { - "supports-color": { - "optional": true - } - } - }, - "node_modules/deep-is": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", - "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==" - }, - "node_modules/define-properties": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.1.4.tgz", - "integrity": "sha512-uckOqKcfaVvtBdsVkdPv3XjveQJsNQqmhXgRi8uhvWWuPYZCNlzT8qAyblUgNoXdHdjMTzAqeGjAoli8f+bzPA==", - "dependencies": { - "has-property-descriptors": "^1.0.0", - "object-keys": "^1.1.1" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/dir-glob": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz", - "integrity": "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==", - "dependencies": { - "path-type": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/docs": { - "resolved": "apps/docs", - "link": true - }, - "node_modules/doctrine": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz", - "integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==", - "dependencies": { - "esutils": "^2.0.2" - }, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/electron-to-chromium": { - "version": "1.4.270", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.270.tgz", - "integrity": "sha512-KNhIzgLiJmDDC444dj9vEOpZEgsV96ult9Iff98Vanumn+ShJHd5se8aX6KeVxdc0YQeqdrezBZv89rleDbvSg==", - "dev": true - }, - "node_modules/emoji-regex": { - "version": "9.2.2", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", - "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==" - }, - "node_modules/enhanced-resolve": { - "version": "5.10.0", - "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.10.0.tgz", - "integrity": "sha512-T0yTFjdpldGY8PmuXXR0PyQ1ufZpEGiHVrp7zHKB7jdR4qlmZHhONVM5AQOAWXuF/w3dnHbEQVrNptJgt7F+cQ==", - "dev": true, - "dependencies": { - "graceful-fs": "^4.2.4", - "tapable": "^2.2.0" - }, - "engines": { - "node": ">=10.13.0" - } - }, - "node_modules/enquirer": { - "version": "2.3.6", - "resolved": "https://registry.npmjs.org/enquirer/-/enquirer-2.3.6.tgz", - "integrity": "sha512-yjNnPr315/FjS4zIsUxYguYUPP2e1NK4d7E7ZOLiyYCcbFBiTMyID+2wvm2w6+pZ/odMA7cRkjhsPbltwBOrLg==", - "dependencies": { - "ansi-colors": "^4.1.1" - }, - "engines": { - "node": ">=8.6" - } - }, - "node_modules/es-abstract": { - "version": "1.20.3", - "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.20.3.tgz", - "integrity": "sha512-AyrnaKVpMzljIdwjzrj+LxGmj8ik2LckwXacHqrJJ/jxz6dDDBcZ7I7nlHM0FvEW8MfbWJwOd+yT2XzYW49Frw==", - "dependencies": { - "call-bind": "^1.0.2", - "es-to-primitive": "^1.2.1", - "function-bind": "^1.1.1", - "function.prototype.name": "^1.1.5", - "get-intrinsic": "^1.1.3", - "get-symbol-description": "^1.0.0", - "has": "^1.0.3", - "has-property-descriptors": "^1.0.0", - "has-symbols": "^1.0.3", - "internal-slot": "^1.0.3", - "is-callable": "^1.2.6", - "is-negative-zero": "^2.0.2", - "is-regex": "^1.1.4", - "is-shared-array-buffer": "^1.0.2", - "is-string": "^1.0.7", - "is-weakref": "^1.0.2", - "object-inspect": "^1.12.2", - "object-keys": "^1.1.1", - "object.assign": "^4.1.4", - "regexp.prototype.flags": "^1.4.3", - "safe-regex-test": "^1.0.0", - "string.prototype.trimend": "^1.0.5", - "string.prototype.trimstart": "^1.0.5", - "unbox-primitive": "^1.0.2" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/es-shim-unscopables": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/es-shim-unscopables/-/es-shim-unscopables-1.0.0.tgz", - "integrity": "sha512-Jm6GPcCdC30eMLbZ2x8z2WuRwAws3zTBBKuusffYVUrNj/GVSUAZ+xKMaUpfNDR5IbyNA5LJbaecoUVbmUcB1w==", - "dependencies": { - "has": "^1.0.3" - } - }, - "node_modules/es-to-primitive": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz", - "integrity": "sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==", - "dependencies": { - "is-callable": "^1.1.4", - "is-date-object": "^1.0.1", - "is-symbol": "^1.0.2" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/escalade": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", - "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==", - "dev": true, - "engines": { - "node": ">=6" - } - }, - "node_modules/escape-string-regexp": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", - "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", - "engines": { - "node": ">=0.8.0" - } - }, - "node_modules/eslint": { - "version": "7.32.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-7.32.0.tgz", - "integrity": "sha512-VHZ8gX+EDfz+97jGcgyGCyRia/dPOd6Xh9yPv8Bl1+SoaIwD+a/vlrOmGRUyOYu7MwUhc7CxqeaDZU13S4+EpA==", - "dependencies": { - "@babel/code-frame": "7.12.11", - "@eslint/eslintrc": "^0.4.3", - "@humanwhocodes/config-array": "^0.5.0", - "ajv": "^6.10.0", - "chalk": "^4.0.0", - "cross-spawn": "^7.0.2", - "debug": "^4.0.1", - "doctrine": "^3.0.0", - "enquirer": "^2.3.5", - "escape-string-regexp": "^4.0.0", - "eslint-scope": "^5.1.1", - "eslint-utils": "^2.1.0", - "eslint-visitor-keys": "^2.0.0", - "espree": "^7.3.1", - "esquery": "^1.4.0", - "esutils": "^2.0.2", - "fast-deep-equal": "^3.1.3", - "file-entry-cache": "^6.0.1", - "functional-red-black-tree": "^1.0.1", - "glob-parent": "^5.1.2", - "globals": "^13.6.0", - "ignore": "^4.0.6", - "import-fresh": "^3.0.0", - "imurmurhash": "^0.1.4", - "is-glob": "^4.0.0", - "js-yaml": "^3.13.1", - "json-stable-stringify-without-jsonify": "^1.0.1", - "levn": "^0.4.1", - "lodash.merge": "^4.6.2", - "minimatch": "^3.0.4", - "natural-compare": "^1.4.0", - "optionator": "^0.9.1", - "progress": "^2.0.0", - "regexpp": "^3.1.0", - "semver": "^7.2.1", - "strip-ansi": "^6.0.0", - "strip-json-comments": "^3.1.0", - "table": "^6.0.9", - "text-table": "^0.2.0", - "v8-compile-cache": "^2.0.3" - }, - "bin": { - "eslint": "bin/eslint.js" - }, - "engines": { - "node": "^10.12.0 || >=12.0.0" - }, - "funding": { - "url": "https://opencollective.com/eslint" - } - }, - "node_modules/eslint-config-custom": { - "resolved": "packages/eslint-config-custom", - "link": true - }, - "node_modules/eslint-config-next": { - "version": "12.3.1", - "resolved": "https://registry.npmjs.org/eslint-config-next/-/eslint-config-next-12.3.1.tgz", - "integrity": "sha512-EN/xwKPU6jz1G0Qi6Bd/BqMnHLyRAL0VsaQaWA7F3KkjAgZHi4f1uL1JKGWNxdQpHTW/sdGONBd0bzxUka/DJg==", - "dependencies": { - "@next/eslint-plugin-next": "12.3.1", - "@rushstack/eslint-patch": "^1.1.3", - "@typescript-eslint/parser": "^5.21.0", - "eslint-import-resolver-node": "^0.3.6", - "eslint-import-resolver-typescript": "^2.7.1", - "eslint-plugin-import": "^2.26.0", - "eslint-plugin-jsx-a11y": "^6.5.1", - "eslint-plugin-react": "^7.31.7", - "eslint-plugin-react-hooks": "^4.5.0" - }, - "peerDependencies": { - "eslint": "^7.23.0 || ^8.0.0", - "typescript": ">=3.3.1" - }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } - } - }, - "node_modules/eslint-config-prettier": { - "version": "8.5.0", - "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-8.5.0.tgz", - "integrity": "sha512-obmWKLUNCnhtQRKc+tmnYuQl0pFU1ibYJQ5BGhTVB08bHe9wC8qUeG7c08dj9XX+AuPj1YSGSQIHl1pnDHZR0Q==", - "bin": { - "eslint-config-prettier": "bin/cli.js" - }, - "peerDependencies": { - "eslint": ">=7.0.0" - } - }, - "node_modules/eslint-config-turbo": { - "version": "0.0.4", - "resolved": "https://registry.npmjs.org/eslint-config-turbo/-/eslint-config-turbo-0.0.4.tgz", - "integrity": "sha512-HErPS/wfWkSdV9Yd2dDkhZt3W2B78Ih/aWPFfaHmCMjzPalh+5KxRRGTf8MOBQLCebcWJX0lP1Zvc1rZIHlXGg==", - "dependencies": { - "eslint-plugin-turbo": "0.0.4" - }, - "peerDependencies": { - "eslint": "^7.23.0 || ^8.0.0" - } - }, - "node_modules/eslint-import-resolver-node": { - "version": "0.3.6", - "resolved": "https://registry.npmjs.org/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.6.tgz", - "integrity": "sha512-0En0w03NRVMn9Uiyn8YRPDKvWjxCWkslUEhGNTdGx15RvPJYQ+lbOlqrlNI2vEAs4pDYK4f/HN2TbDmk5TP0iw==", - "dependencies": { - "debug": "^3.2.7", - "resolve": "^1.20.0" - } - }, - "node_modules/eslint-import-resolver-node/node_modules/debug": { - "version": "3.2.7", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", - "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", - "dependencies": { - "ms": "^2.1.1" - } - }, - "node_modules/eslint-import-resolver-typescript": { - "version": "2.7.1", - "resolved": "https://registry.npmjs.org/eslint-import-resolver-typescript/-/eslint-import-resolver-typescript-2.7.1.tgz", - "integrity": "sha512-00UbgGwV8bSgUv34igBDbTOtKhqoRMy9bFjNehT40bXg6585PNIct8HhXZ0SybqB9rWtXj9crcku8ndDn/gIqQ==", - "dependencies": { - "debug": "^4.3.4", - "glob": "^7.2.0", - "is-glob": "^4.0.3", - "resolve": "^1.22.0", - "tsconfig-paths": "^3.14.1" - }, - "engines": { - "node": ">=4" - }, - "peerDependencies": { - "eslint": "*", - "eslint-plugin-import": "*" - } - }, - "node_modules/eslint-import-resolver-typescript/node_modules/glob": { - "version": "7.2.3", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", - "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", - "dependencies": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.1.1", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" - }, - "engines": { - "node": "*" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/eslint-module-utils": { - "version": "2.7.4", - "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.7.4.tgz", - "integrity": "sha512-j4GT+rqzCoRKHwURX7pddtIPGySnX9Si/cgMI5ztrcqOPtk5dDEeZ34CQVPphnqkJytlc97Vuk05Um2mJ3gEQA==", - "dependencies": { - "debug": "^3.2.7" - }, - "engines": { - "node": ">=4" - }, - "peerDependenciesMeta": { - "eslint": { - "optional": true - } - } - }, - "node_modules/eslint-module-utils/node_modules/debug": { - "version": "3.2.7", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", - "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", - "dependencies": { - "ms": "^2.1.1" - } - }, - "node_modules/eslint-plugin-import": { - "version": "2.26.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.26.0.tgz", - "integrity": "sha512-hYfi3FXaM8WPLf4S1cikh/r4IxnO6zrhZbEGz2b660EJRbuxgpDS5gkCuYgGWg2xxh2rBuIr4Pvhve/7c31koA==", - "dependencies": { - "array-includes": "^3.1.4", - "array.prototype.flat": "^1.2.5", - "debug": "^2.6.9", - "doctrine": "^2.1.0", - "eslint-import-resolver-node": "^0.3.6", - "eslint-module-utils": "^2.7.3", - "has": "^1.0.3", - "is-core-module": "^2.8.1", - "is-glob": "^4.0.3", - "minimatch": "^3.1.2", - "object.values": "^1.1.5", - "resolve": "^1.22.0", - "tsconfig-paths": "^3.14.1" - }, - "engines": { - "node": ">=4" - }, - "peerDependencies": { - "eslint": "^2 || ^3 || ^4 || ^5 || ^6 || ^7.2.0 || ^8" - } - }, - "node_modules/eslint-plugin-import/node_modules/debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", - "dependencies": { - "ms": "2.0.0" - } - }, - "node_modules/eslint-plugin-import/node_modules/doctrine": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz", - "integrity": "sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==", - "dependencies": { - "esutils": "^2.0.2" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/eslint-plugin-import/node_modules/ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" - }, - "node_modules/eslint-plugin-jsx-a11y": { - "version": "6.6.1", - "resolved": "https://registry.npmjs.org/eslint-plugin-jsx-a11y/-/eslint-plugin-jsx-a11y-6.6.1.tgz", - "integrity": "sha512-sXgFVNHiWffBq23uiS/JaP6eVR622DqwB4yTzKvGZGcPq6/yZ3WmOZfuBks/vHWo9GaFOqC2ZK4i6+C35knx7Q==", - "dependencies": { - "@babel/runtime": "^7.18.9", - "aria-query": "^4.2.2", - "array-includes": "^3.1.5", - "ast-types-flow": "^0.0.7", - "axe-core": "^4.4.3", - "axobject-query": "^2.2.0", - "damerau-levenshtein": "^1.0.8", - "emoji-regex": "^9.2.2", - "has": "^1.0.3", - "jsx-ast-utils": "^3.3.2", - "language-tags": "^1.0.5", - "minimatch": "^3.1.2", - "semver": "^6.3.0" - }, - "engines": { - "node": ">=4.0" - }, - "peerDependencies": { - "eslint": "^3 || ^4 || ^5 || ^6 || ^7 || ^8" - } - }, - "node_modules/eslint-plugin-react": { - "version": "7.31.8", - "resolved": "https://registry.npmjs.org/eslint-plugin-react/-/eslint-plugin-react-7.31.8.tgz", - "integrity": "sha512-5lBTZmgQmARLLSYiwI71tiGVTLUuqXantZM6vlSY39OaDSV0M7+32K5DnLkmFrwTe+Ksz0ffuLUC91RUviVZfw==", - "dependencies": { - "array-includes": "^3.1.5", - "array.prototype.flatmap": "^1.3.0", - "doctrine": "^2.1.0", - "estraverse": "^5.3.0", - "jsx-ast-utils": "^2.4.1 || ^3.0.0", - "minimatch": "^3.1.2", - "object.entries": "^1.1.5", - "object.fromentries": "^2.0.5", - "object.hasown": "^1.1.1", - "object.values": "^1.1.5", - "prop-types": "^15.8.1", - "resolve": "^2.0.0-next.3", - "semver": "^6.3.0", - "string.prototype.matchall": "^4.0.7" - }, - "engines": { - "node": ">=4" - }, - "peerDependencies": { - "eslint": "^3 || ^4 || ^5 || ^6 || ^7 || ^8" - } - }, - "node_modules/eslint-plugin-react-hooks": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-react-hooks/-/eslint-plugin-react-hooks-4.6.0.tgz", - "integrity": "sha512-oFc7Itz9Qxh2x4gNHStv3BqJq54ExXmfC+a1NjAta66IAN87Wu0R/QArgIS9qKzX3dXKPI9H5crl9QchNMY9+g==", - "engines": { - "node": ">=10" - }, - "peerDependencies": { - "eslint": "^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0 || ^8.0.0-0" - } - }, - "node_modules/eslint-plugin-react/node_modules/doctrine": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz", - "integrity": "sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==", - "dependencies": { - "esutils": "^2.0.2" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/eslint-plugin-react/node_modules/resolve": { - "version": "2.0.0-next.4", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-2.0.0-next.4.tgz", - "integrity": "sha512-iMDbmAWtfU+MHpxt/I5iWI7cY6YVEZUQ3MBgPQ++XD1PELuJHIl82xBmObyP2KyQmkNB2dsqF7seoQQiAn5yDQ==", - "dependencies": { - "is-core-module": "^2.9.0", - "path-parse": "^1.0.7", - "supports-preserve-symlinks-flag": "^1.0.0" - }, - "bin": { - "resolve": "bin/resolve" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/eslint-plugin-turbo": { - "version": "0.0.4", - "resolved": "https://registry.npmjs.org/eslint-plugin-turbo/-/eslint-plugin-turbo-0.0.4.tgz", - "integrity": "sha512-dfmYE/iPvoJInQq+5E/0mj140y/rYwKtzZkn3uVK8+nvwC5zmWKQ6ehMWrL4bYBkGzSgpOndZM+jOXhPQ2m8Cg==", - "peerDependencies": { - "eslint": "^7.23.0 || ^8.0.0" - } - }, - "node_modules/eslint-scope": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.1.tgz", - "integrity": "sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==", - "dependencies": { - "esrecurse": "^4.3.0", - "estraverse": "^4.1.1" - }, - "engines": { - "node": ">=8.0.0" - } - }, - "node_modules/eslint-scope/node_modules/estraverse": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz", - "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==", - "engines": { - "node": ">=4.0" - } - }, - "node_modules/eslint-utils": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-2.1.0.tgz", - "integrity": "sha512-w94dQYoauyvlDc43XnGB8lU3Zt713vNChgt4EWwhXAP2XkBvndfxF0AgIqKOOasjPIPzj9JqgwkwbCYD0/V3Zg==", - "dependencies": { - "eslint-visitor-keys": "^1.1.0" - }, - "engines": { - "node": ">=6" - }, - "funding": { - "url": "https://github.com/sponsors/mysticatea" - } - }, - "node_modules/eslint-utils/node_modules/eslint-visitor-keys": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz", - "integrity": "sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ==", - "engines": { - "node": ">=4" - } - }, - "node_modules/eslint-visitor-keys": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-2.1.0.tgz", - "integrity": "sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw==", - "engines": { - "node": ">=10" - } - }, - "node_modules/eslint/node_modules/@babel/code-frame": { - "version": "7.12.11", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.12.11.tgz", - "integrity": "sha512-Zt1yodBx1UcyiePMSkWnU4hPqhwq7hGi2nFL1LeA3EUl+q2LQx16MISgJ0+z7dnmgvP9QtIleuETGOiOH1RcIw==", - "dependencies": { - "@babel/highlight": "^7.10.4" - } - }, - "node_modules/eslint/node_modules/ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "dependencies": { - "color-convert": "^2.0.1" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" - } - }, - "node_modules/eslint/node_modules/chalk": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", - "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", - "dependencies": { - "ansi-styles": "^4.1.0", - "supports-color": "^7.1.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/chalk?sponsor=1" - } - }, - "node_modules/eslint/node_modules/color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", - "dependencies": { - "color-name": "~1.1.4" - }, - "engines": { - "node": ">=7.0.0" - } - }, - "node_modules/eslint/node_modules/color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" - }, - "node_modules/eslint/node_modules/escape-string-regexp": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", - "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/eslint/node_modules/globals": { - "version": "13.17.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-13.17.0.tgz", - "integrity": "sha512-1C+6nQRb1GwGMKm2dH/E7enFAMxGTmGI7/dEdhy/DNelv85w9B72t3uc5frtMNXIbzrarJJ/lTCjcaZwbLJmyw==", - "dependencies": { - "type-fest": "^0.20.2" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/eslint/node_modules/has-flag": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", - "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", - "engines": { - "node": ">=8" - } - }, - "node_modules/eslint/node_modules/semver": { - "version": "7.3.7", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.7.tgz", - "integrity": "sha512-QlYTucUYOews+WeEujDoEGziz4K6c47V/Bd+LjSSYcA94p+DmINdf7ncaUinThfvZyu13lN9OY1XDxt8C0Tw0g==", - "dependencies": { - "lru-cache": "^6.0.0" - }, - "bin": { - "semver": "bin/semver.js" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/eslint/node_modules/supports-color": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", - "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", - "dependencies": { - "has-flag": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/espree": { - "version": "7.3.1", - "resolved": "https://registry.npmjs.org/espree/-/espree-7.3.1.tgz", - "integrity": "sha512-v3JCNCE64umkFpmkFGqzVKsOT0tN1Zr+ueqLZfpV1Ob8e+CEgPWa+OxCoGH3tnhimMKIaBm4m/vaRpJ/krRz2g==", - "dependencies": { - "acorn": "^7.4.0", - "acorn-jsx": "^5.3.1", - "eslint-visitor-keys": "^1.3.0" - }, - "engines": { - "node": "^10.12.0 || >=12.0.0" - } - }, - "node_modules/espree/node_modules/eslint-visitor-keys": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz", - "integrity": "sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ==", - "engines": { - "node": ">=4" - } - }, - "node_modules/esprima": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", - "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", - "bin": { - "esparse": "bin/esparse.js", - "esvalidate": "bin/esvalidate.js" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/esquery": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.4.0.tgz", - "integrity": "sha512-cCDispWt5vHHtwMY2YrAQ4ibFkAL8RbH5YGBnZBc90MolvvfkkQcJro/aZiAQUlQ3qgrYS6D6v8Gc5G5CQsc9w==", - "dependencies": { - "estraverse": "^5.1.0" - }, - "engines": { - "node": ">=0.10" - } - }, - "node_modules/esrecurse": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", - "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", - "dependencies": { - "estraverse": "^5.2.0" - }, - "engines": { - "node": ">=4.0" - } - }, - "node_modules/estraverse": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", - "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", - "engines": { - "node": ">=4.0" - } - }, - "node_modules/esutils": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", - "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/fast-deep-equal": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", - "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==" - }, - "node_modules/fast-glob": { - "version": "3.2.12", - "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.2.12.tgz", - "integrity": "sha512-DVj4CQIYYow0BlaelwK1pHl5n5cRSJfM60UA0zK891sVInoPri2Ekj7+e1CT3/3qxXenpI+nBBmQAcJPJgaj4w==", - "dependencies": { - "@nodelib/fs.stat": "^2.0.2", - "@nodelib/fs.walk": "^1.2.3", - "glob-parent": "^5.1.2", - "merge2": "^1.3.0", - "micromatch": "^4.0.4" - }, - "engines": { - "node": ">=8.6.0" - } - }, - "node_modules/fast-json-stable-stringify": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", - "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==" - }, - "node_modules/fast-levenshtein": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", - "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==" - }, - "node_modules/fastq": { - "version": "1.13.0", - "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.13.0.tgz", - "integrity": "sha512-YpkpUnK8od0o1hmeSc7UUs/eB/vIPWJYjKck2QKIzAf71Vm1AAQ3EbuZB3g2JIy+pg+ERD0vqI79KyZiB2e2Nw==", - "dependencies": { - "reusify": "^1.0.4" - } - }, - "node_modules/file-entry-cache": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz", - "integrity": "sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==", - "dependencies": { - "flat-cache": "^3.0.4" - }, - "engines": { - "node": "^10.12.0 || >=12.0.0" - } - }, - "node_modules/fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", - "dependencies": { - "to-regex-range": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/flat-cache": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.0.4.tgz", - "integrity": "sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg==", - "dependencies": { - "flatted": "^3.1.0", - "rimraf": "^3.0.2" - }, - "engines": { - "node": "^10.12.0 || >=12.0.0" - } - }, - "node_modules/flatted": { - "version": "3.2.7", - "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.2.7.tgz", - "integrity": "sha512-5nqDSxl8nn5BSNxyR3n4I6eDmbolI6WT+QqR547RwxQapgjQBmtktdP+HTBb/a/zLsbzERTONyUB5pefh5TtjQ==" - }, - "node_modules/fs.realpath": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", - "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==" - }, - "node_modules/function-bind": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", - "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==" - }, - "node_modules/function.prototype.name": { - "version": "1.1.5", - "resolved": "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.5.tgz", - "integrity": "sha512-uN7m/BzVKQnCUF/iW8jYea67v++2u7m5UgENbHRtdDVclOUP+FMPlCNdmk0h/ysGyo2tavMJEDqJAkJdRa1vMA==", - "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.3", - "es-abstract": "^1.19.0", - "functions-have-names": "^1.2.2" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/functional-red-black-tree": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz", - "integrity": "sha512-dsKNQNdj6xA3T+QlADDA7mOSlX0qiMINjn0cgr+eGHGsbSHzTabcIogz2+p/iqP1Xs6EP/sS2SbqH+brGTbq0g==" - }, - "node_modules/functions-have-names": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/functions-have-names/-/functions-have-names-1.2.3.tgz", - "integrity": "sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ==", - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/gensync": { - "version": "1.0.0-beta.2", - "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", - "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==", - "dev": true, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/get-intrinsic": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.1.3.tgz", - "integrity": "sha512-QJVz1Tj7MS099PevUG5jvnt9tSkXN8K14dxQlikJuPt4uD9hHAHjLyLBiLR5zELelBdD9QNRAXZzsJx0WaDL9A==", - "dependencies": { - "function-bind": "^1.1.1", - "has": "^1.0.3", - "has-symbols": "^1.0.3" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/get-symbol-description": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.0.0.tgz", - "integrity": "sha512-2EmdH1YvIQiZpltCNgkuiUnyukzxM/R6NDJX31Ke3BG1Nq5b0S2PhX59UKi9vZpPDQVdqn+1IcaAwnzTT5vCjw==", - "dependencies": { - "call-bind": "^1.0.2", - "get-intrinsic": "^1.1.1" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/glob": { - "version": "7.1.7", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.7.tgz", - "integrity": "sha512-OvD9ENzPLbegENnYP5UUfJIirTg4+XwMWGaQfQTY0JenxNvvIKP3U3/tAQSPIu/lHxXYSZmpXlUHeqAIdKzBLQ==", - "dependencies": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.0.4", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" - }, - "engines": { - "node": "*" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/glob-parent": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", - "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", - "dependencies": { - "is-glob": "^4.0.1" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/globals": { - "version": "11.12.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", - "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/globby": { - "version": "11.1.0", - "resolved": "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz", - "integrity": "sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==", - "dependencies": { - "array-union": "^2.1.0", - "dir-glob": "^3.0.1", - "fast-glob": "^3.2.9", - "ignore": "^5.2.0", - "merge2": "^1.4.1", - "slash": "^3.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/globby/node_modules/ignore": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.2.0.tgz", - "integrity": "sha512-CmxgYGiEPCLhfLnpPp1MoRmifwEIOgjcHXxOBjv7mY96c+eWScsOP9c112ZyLdWHi0FxHjI+4uVhKYp/gcdRmQ==", - "engines": { - "node": ">= 4" - } - }, - "node_modules/graceful-fs": { - "version": "4.2.10", - "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.10.tgz", - "integrity": "sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==", - "dev": true - }, - "node_modules/has": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", - "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", - "dependencies": { - "function-bind": "^1.1.1" - }, - "engines": { - "node": ">= 0.4.0" - } - }, - "node_modules/has-bigints": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/has-bigints/-/has-bigints-1.0.2.tgz", - "integrity": "sha512-tSvCKtBr9lkF0Ex0aQiP9N+OpV4zi2r/Nee5VkRDbaqv35RLYMzbwQfFSZZH0kR+Rd6302UJZ2p/bJCEoR3VoQ==", - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/has-flag": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", - "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", - "engines": { - "node": ">=4" - } - }, - "node_modules/has-property-descriptors": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.0.tgz", - "integrity": "sha512-62DVLZGoiEBDHQyqG4w9xCuZ7eJEwNmJRWw2VY84Oedb7WFcA27fiEVe8oUQx9hAUJ4ekurquucTGwsyO1XGdQ==", - "dependencies": { - "get-intrinsic": "^1.1.1" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/has-symbols": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", - "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==", - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/has-tostringtag": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.0.tgz", - "integrity": "sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ==", - "dependencies": { - "has-symbols": "^1.0.2" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/ignore": { - "version": "4.0.6", - "resolved": "https://registry.npmjs.org/ignore/-/ignore-4.0.6.tgz", - "integrity": "sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg==", - "engines": { - "node": ">= 4" - } - }, - "node_modules/import-fresh": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz", - "integrity": "sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==", - "dependencies": { - "parent-module": "^1.0.0", - "resolve-from": "^4.0.0" - }, - "engines": { - "node": ">=6" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/imurmurhash": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", - "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", - "engines": { - "node": ">=0.8.19" - } - }, - "node_modules/inflight": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", - "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", - "dependencies": { - "once": "^1.3.0", - "wrappy": "1" - } - }, - "node_modules/inherits": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", - "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" - }, - "node_modules/internal-slot": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.3.tgz", - "integrity": "sha512-O0DB1JC/sPyZl7cIo78n5dR7eUSwwpYPiXRhTzNxZVAMUuB8vlnRFyLxdrVToks6XPLVnFfbzaVd5WLjhgg+vA==", - "dependencies": { - "get-intrinsic": "^1.1.0", - "has": "^1.0.3", - "side-channel": "^1.0.4" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/is-bigint": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.0.4.tgz", - "integrity": "sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg==", - "dependencies": { - "has-bigints": "^1.0.1" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-boolean-object": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.1.2.tgz", - "integrity": "sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA==", - "dependencies": { - "call-bind": "^1.0.2", - "has-tostringtag": "^1.0.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-callable": { - "version": "1.2.7", - "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.7.tgz", - "integrity": "sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA==", - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-core-module": { - "version": "2.10.0", - "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.10.0.tgz", - "integrity": "sha512-Erxj2n/LDAZ7H8WNJXd9tw38GYM3dv8rk8Zcs+jJuxYTW7sozH+SS8NtrSjVL1/vpLvWi1hxy96IzjJ3EHTJJg==", - "dependencies": { - "has": "^1.0.3" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-date-object": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.5.tgz", - "integrity": "sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ==", - "dependencies": { - "has-tostringtag": "^1.0.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-extglob": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", - "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-fullwidth-code-point": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", - "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", - "engines": { - "node": ">=8" - } - }, - "node_modules/is-glob": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", - "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", - "dependencies": { - "is-extglob": "^2.1.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-negative-zero": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.2.tgz", - "integrity": "sha512-dqJvarLawXsFbNDeJW7zAz8ItJ9cd28YufuuFzh0G8pNHjJMnY08Dv7sYX2uF5UpQOwieAeOExEYAWWfu7ZZUA==", - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", - "engines": { - "node": ">=0.12.0" - } - }, - "node_modules/is-number-object": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.0.7.tgz", - "integrity": "sha512-k1U0IRzLMo7ZlYIfzRu23Oh6MiIFasgpb9X76eqfFZAqwH44UI4KTBvBYIZ1dSL9ZzChTB9ShHfLkR4pdW5krQ==", - "dependencies": { - "has-tostringtag": "^1.0.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-regex": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", - "integrity": "sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg==", - "dependencies": { - "call-bind": "^1.0.2", - "has-tostringtag": "^1.0.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-shared-array-buffer": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.2.tgz", - "integrity": "sha512-sqN2UDu1/0y6uvXyStCOzyhAjCSlHceFoMKJW8W9EU9cvic/QdsZ0kEU93HEy3IUEFZIiH/3w+AH/UQbPHNdhA==", - "dependencies": { - "call-bind": "^1.0.2" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-string": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.7.tgz", - "integrity": "sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg==", - "dependencies": { - "has-tostringtag": "^1.0.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-symbol": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.4.tgz", - "integrity": "sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg==", - "dependencies": { - "has-symbols": "^1.0.2" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-weakref": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-weakref/-/is-weakref-1.0.2.tgz", - "integrity": "sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ==", - "dependencies": { - "call-bind": "^1.0.2" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/isexe": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", - "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==" - }, - "node_modules/js-tokens": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", - "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==" - }, - "node_modules/js-yaml": { - "version": "3.14.1", - "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz", - "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==", - "dependencies": { - "argparse": "^1.0.7", - "esprima": "^4.0.0" - }, - "bin": { - "js-yaml": "bin/js-yaml.js" - } - }, - "node_modules/jsesc": { - "version": "2.5.2", - "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-2.5.2.tgz", - "integrity": "sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==", - "dev": true, - "bin": { - "jsesc": "bin/jsesc" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/json-schema-traverse": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", - "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==" - }, - "node_modules/json-stable-stringify-without-jsonify": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", - "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==" - }, - "node_modules/json5": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.1.tgz", - "integrity": "sha512-1hqLFMSrGHRHxav9q9gNjJ5EXznIxGVO09xQRrwplcS8qs28pZ8s8hupZAmqDwZUmVZ2Qb2jnyPOWcDH8m8dlA==", - "dev": true, - "bin": { - "json5": "lib/cli.js" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/jsx-ast-utils": { - "version": "3.3.3", - "resolved": "https://registry.npmjs.org/jsx-ast-utils/-/jsx-ast-utils-3.3.3.tgz", - "integrity": "sha512-fYQHZTZ8jSfmWZ0iyzfwiU4WDX4HpHbMCZ3gPlWYiCl3BoeOTsqKBqnTVfH2rYT7eP5c3sVbeSPHnnJOaTrWiw==", - "dependencies": { - "array-includes": "^3.1.5", - "object.assign": "^4.1.3" - }, - "engines": { - "node": ">=4.0" - } - }, - "node_modules/language-subtag-registry": { - "version": "0.3.22", - "resolved": "https://registry.npmjs.org/language-subtag-registry/-/language-subtag-registry-0.3.22.tgz", - "integrity": "sha512-tN0MCzyWnoz/4nHS6uxdlFWoUZT7ABptwKPQ52Ea7URk6vll88bWBVhodtnlfEuCcKWNGoc+uGbw1cwa9IKh/w==" - }, - "node_modules/language-tags": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/language-tags/-/language-tags-1.0.5.tgz", - "integrity": "sha512-qJhlO9cGXi6hBGKoxEG/sKZDAHD5Hnu9Hs4WbOY3pCWXDhw0N8x1NenNzm2EnNLkLkk7J2SdxAkDSbb6ftT+UQ==", - "dependencies": { - "language-subtag-registry": "~0.3.2" - } - }, - "node_modules/levn": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", - "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==", - "dependencies": { - "prelude-ls": "^1.2.1", - "type-check": "~0.4.0" - }, - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/lodash": { - "version": "3.10.1", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-3.10.1.tgz", - "integrity": "sha512-9mDDwqVIma6OZX79ZlDACZl8sBm0TEnkf99zV3iMA4GzkIT/9hiqP5mY0HoT1iNLCrKc/R1HByV+yJfRWVJryQ==" - }, - "node_modules/lodash.merge": { - "version": "4.6.2", - "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", - "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==" - }, - "node_modules/lodash.truncate": { - "version": "4.4.2", - "resolved": "https://registry.npmjs.org/lodash.truncate/-/lodash.truncate-4.4.2.tgz", - "integrity": "sha512-jttmRe7bRse52OsWIMDLaXxWqRAmtIUccAQ3garviCqJjafXOfNMO0yMfNpdD6zbGaTU0P5Nz7e7gAT6cKmJRw==" - }, - "node_modules/loose-envify": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz", - "integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==", - "dependencies": { - "js-tokens": "^3.0.0 || ^4.0.0" - }, - "bin": { - "loose-envify": "cli.js" - } - }, - "node_modules/lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/merge2": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", - "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", - "engines": { - "node": ">= 8" - } - }, - "node_modules/micromatch": { - "version": "4.0.5", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz", - "integrity": "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==", - "dependencies": { - "braces": "^3.0.2", - "picomatch": "^2.3.1" - }, - "engines": { - "node": ">=8.6" - } - }, - "node_modules/minimatch": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", - "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", - "dependencies": { - "brace-expansion": "^1.1.7" - }, - "engines": { - "node": "*" - } - }, - "node_modules/minimist": { - "version": "1.2.6", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.6.tgz", - "integrity": "sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==" - }, - "node_modules/ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" - }, - "node_modules/nanoid": { - "version": "3.3.4", - "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.4.tgz", - "integrity": "sha512-MqBkQh/OHTS2egovRtLk45wEyNXwF+cokD+1YPf9u5VfJiRdAiRwB2froX5Co9Rh20xs4siNPm8naNotSD6RBw==", - "bin": { - "nanoid": "bin/nanoid.cjs" - }, - "engines": { - "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" - } - }, - "node_modules/natural-compare": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", - "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==" - }, - "node_modules/next": { - "version": "12.3.0", - "resolved": "https://registry.npmjs.org/next/-/next-12.3.0.tgz", - "integrity": "sha512-GpzI6me9V1+XYtfK0Ae9WD0mKqHyzQlGq1xH1rzNIYMASo4Tkl4rTe9jSqtBpXFhOS33KohXs9ZY38Akkhdciw==", - "dependencies": { - "@next/env": "12.3.0", - "@swc/helpers": "0.4.11", - "caniuse-lite": "^1.0.30001332", - "postcss": "8.4.14", - "styled-jsx": "5.0.6", - "use-sync-external-store": "1.2.0" - }, - "bin": { - "next": "dist/bin/next" - }, - "engines": { - "node": ">=12.22.0" - }, - "optionalDependencies": { - "@next/swc-android-arm-eabi": "12.3.0", - "@next/swc-android-arm64": "12.3.0", - "@next/swc-darwin-arm64": "12.3.0", - "@next/swc-darwin-x64": "12.3.0", - "@next/swc-freebsd-x64": "12.3.0", - "@next/swc-linux-arm-gnueabihf": "12.3.0", - "@next/swc-linux-arm64-gnu": "12.3.0", - "@next/swc-linux-arm64-musl": "12.3.0", - "@next/swc-linux-x64-gnu": "12.3.0", - "@next/swc-linux-x64-musl": "12.3.0", - "@next/swc-win32-arm64-msvc": "12.3.0", - "@next/swc-win32-ia32-msvc": "12.3.0", - "@next/swc-win32-x64-msvc": "12.3.0" - }, - "peerDependencies": { - "fibers": ">= 3.1.0", - "node-sass": "^6.0.0 || ^7.0.0", - "react": "^17.0.2 || ^18.0.0-0", - "react-dom": "^17.0.2 || ^18.0.0-0", - "sass": "^1.3.0" - }, - "peerDependenciesMeta": { - "fibers": { - "optional": true - }, - "node-sass": { - "optional": true - }, - "sass": { - "optional": true - } - } - }, - "node_modules/next-transpile-modules": { - "version": "9.0.0", - "resolved": "https://registry.npmjs.org/next-transpile-modules/-/next-transpile-modules-9.0.0.tgz", - "integrity": "sha512-VCNFOazIAnXn1hvgYYSTYMnoWgKgwlYh4lm1pKbSfiB3kj5ZYLcKVhfh3jkPOg1cnd9DP+pte9yCUocdPEUBTQ==", - "dev": true, - "dependencies": { - "enhanced-resolve": "^5.7.0", - "escalade": "^3.1.1" - } - }, - "node_modules/node-releases": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.6.tgz", - "integrity": "sha512-PiVXnNuFm5+iYkLBNeq5211hvO38y63T0i2KKh2KnUs3RpzJ+JtODFjkD8yjLwnDkTYF1eKXheUwdssR+NRZdg==", - "dev": true - }, - "node_modules/object-assign": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", - "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/object-inspect": { - "version": "1.12.2", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.2.tgz", - "integrity": "sha512-z+cPxW0QGUp0mcqcsgQyLVRDoXFQbXOwBaqyF7VIgI4TWNQsDHrBpUQslRmIfAoYWdYzs6UlKJtB2XJpTaNSpQ==", - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/object-keys": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", - "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==", - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/object.assign": { - "version": "4.1.4", - "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.4.tgz", - "integrity": "sha512-1mxKf0e58bvyjSCtKYY4sRe9itRk3PJpquJOjeIkz885CczcI4IvJJDLPS72oowuSh+pBxUFROpX+TU++hxhZQ==", - "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "has-symbols": "^1.0.3", - "object-keys": "^1.1.1" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/object.entries": { - "version": "1.1.5", - "resolved": "https://registry.npmjs.org/object.entries/-/object.entries-1.1.5.tgz", - "integrity": "sha512-TyxmjUoZggd4OrrU1W66FMDG6CuqJxsFvymeyXI51+vQLN67zYfZseptRge703kKQdo4uccgAKebXFcRCzk4+g==", - "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.3", - "es-abstract": "^1.19.1" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/object.fromentries": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/object.fromentries/-/object.fromentries-2.0.5.tgz", - "integrity": "sha512-CAyG5mWQRRiBU57Re4FKoTBjXfDoNwdFVH2Y1tS9PqCsfUTymAohOkEMSG3aRNKmv4lV3O7p1et7c187q6bynw==", - "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.3", - "es-abstract": "^1.19.1" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/object.hasown": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/object.hasown/-/object.hasown-1.1.1.tgz", - "integrity": "sha512-LYLe4tivNQzq4JdaWW6WO3HMZZJWzkkH8fnI6EebWl0VZth2wL2Lovm74ep2/gZzlaTdV62JZHEqHQ2yVn8Q/A==", - "dependencies": { - "define-properties": "^1.1.4", - "es-abstract": "^1.19.5" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/object.values": { - "version": "1.1.5", - "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.1.5.tgz", - "integrity": "sha512-QUZRW0ilQ3PnPpbNtgdNV1PDbEqLIiSFB3l+EnGtBQ/8SUTLj1PZwtQHABZtLgwpJZTSZhuGLOGk57Drx2IvYg==", - "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.3", - "es-abstract": "^1.19.1" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/once": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", - "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", - "dependencies": { - "wrappy": "1" - } - }, - "node_modules/optionator": { - "version": "0.9.1", - "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.1.tgz", - "integrity": "sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw==", - "dependencies": { - "deep-is": "^0.1.3", - "fast-levenshtein": "^2.0.6", - "levn": "^0.4.1", - "prelude-ls": "^1.2.1", - "type-check": "^0.4.0", - "word-wrap": "^1.2.3" - }, - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/parent-module": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", - "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", - "dependencies": { - "callsites": "^3.0.0" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/path-is-absolute": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", - "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/path-key": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", - "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", - "engines": { - "node": ">=8" - } - }, - "node_modules/path-parse": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", - "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==" - }, - "node_modules/path-type": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", - "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==", - "engines": { - "node": ">=8" - } - }, - "node_modules/picocolors": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz", - "integrity": "sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==" - }, - "node_modules/picomatch": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", - "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", - "engines": { - "node": ">=8.6" - }, - "funding": { - "url": "https://github.com/sponsors/jonschlinkert" - } - }, - "node_modules/postcss": { - "version": "8.4.14", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.14.tgz", - "integrity": "sha512-E398TUmfAYFPBSdzgeieK2Y1+1cpdxJx8yXbK/m57nRhKSmk1GB2tO4lbLBtlkfPQTDKfe4Xqv1ASWPpayPEig==", - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/postcss/" - }, - { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/postcss" - } - ], - "dependencies": { - "nanoid": "^3.3.4", - "picocolors": "^1.0.0", - "source-map-js": "^1.0.2" - }, - "engines": { - "node": "^10 || ^12 || >=14" - } - }, - "node_modules/prelude-ls": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", - "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==", - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/prettier": { - "version": "2.7.1", - "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.7.1.tgz", - "integrity": "sha512-ujppO+MkdPqoVINuDFDRLClm7D78qbDt0/NR+wp5FqEZOoTNAjPHWj17QRhu7geIHJfcNhRk1XVQmF8Bp3ye+g==", - "dev": true, - "bin": { - "prettier": "bin-prettier.js" - }, - "engines": { - "node": ">=10.13.0" - }, - "funding": { - "url": "https://github.com/prettier/prettier?sponsor=1" - } - }, - "node_modules/progress": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/progress/-/progress-2.0.3.tgz", - "integrity": "sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA==", - "engines": { - "node": ">=0.4.0" - } - }, - "node_modules/prop-types": { - "version": "15.8.1", - "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.8.1.tgz", - "integrity": "sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg==", - "dependencies": { - "loose-envify": "^1.4.0", - "object-assign": "^4.1.1", - "react-is": "^16.13.1" - } - }, - "node_modules/punycode": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", - "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==", - "engines": { - "node": ">=6" - } - }, - "node_modules/queue-microtask": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", - "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ] - }, - "node_modules/react": { - "version": "18.2.0", - "resolved": "https://registry.npmjs.org/react/-/react-18.2.0.tgz", - "integrity": "sha512-/3IjMdb2L9QbBdWiW5e3P2/npwMBaU9mHCSCUzNln0ZCYbcfTsGbTJrU/kGemdH2IWmB2ioZ+zkxtmq6g09fGQ==", - "dependencies": { - "loose-envify": "^1.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/react-dom": { - "version": "18.2.0", - "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-18.2.0.tgz", - "integrity": "sha512-6IMTriUmvsjHUjNtEDudZfuDQUoWXVxKHhlEGSk81n4YFS+r/Kl99wXiwlVXtPBtJenozv2P+hxDsw9eA7Xo6g==", - "dependencies": { - "loose-envify": "^1.1.0", - "scheduler": "^0.23.0" - }, - "peerDependencies": { - "react": "^18.2.0" - } - }, - "node_modules/react-is": { - "version": "16.13.1", - "resolved": "https://registry.npmjs.org/react-is/-/react-is-16.13.1.tgz", - "integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==" - }, - "node_modules/regenerator-runtime": { - "version": "0.13.9", - "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.9.tgz", - "integrity": "sha512-p3VT+cOEgxFsRRA9X4lkI1E+k2/CtnKtU4gcxyaCUreilL/vqI6CdZ3wxVUx3UOUg+gnUOQQcRI7BmSI656MYA==" - }, - "node_modules/regexp.prototype.flags": { - "version": "1.4.3", - "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.4.3.tgz", - "integrity": "sha512-fjggEOO3slI6Wvgjwflkc4NFRCTZAu5CnNfBd5qOMYhWdn67nJBBu34/TkD++eeFmd8C9r9jfXJ27+nSiRkSUA==", - "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.3", - "functions-have-names": "^1.2.2" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/regexpp": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-3.2.0.tgz", - "integrity": "sha512-pq2bWo9mVD43nbts2wGv17XLiNLya+GklZ8kaDLV2Z08gDCsGpnKn9BFMepvWuHCbyVvY7J5o5+BVvoQbmlJLg==", - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/mysticatea" - } - }, - "node_modules/require-from-string": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz", - "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/resolve": { - "version": "1.22.1", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.1.tgz", - "integrity": "sha512-nBpuuYuY5jFsli/JIs1oldw6fOQCBioohqWZg/2hiaOybXOft4lonv85uDOKXdf8rhyK159cxU5cDcK/NKk8zw==", - "dependencies": { - "is-core-module": "^2.9.0", - "path-parse": "^1.0.7", - "supports-preserve-symlinks-flag": "^1.0.0" - }, - "bin": { - "resolve": "bin/resolve" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/resolve-from": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", - "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", - "engines": { - "node": ">=4" - } - }, - "node_modules/reusify": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", - "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==", - "engines": { - "iojs": ">=1.0.0", - "node": ">=0.10.0" - } - }, - "node_modules/rimraf": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", - "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", - "dependencies": { - "glob": "^7.1.3" - }, - "bin": { - "rimraf": "bin.js" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/run-parallel": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", - "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "dependencies": { - "queue-microtask": "^1.2.2" - } - }, - "node_modules/safe-buffer": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", - "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", - "dev": true - }, - "node_modules/safe-regex-test": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.0.0.tgz", - "integrity": "sha512-JBUUzyOgEwXQY1NuPtvcj/qcBDbDmEvWufhlnXZIm75DEHp+afM1r1ujJpJsV/gSM4t59tpDyPi1sd6ZaPFfsA==", - "dependencies": { - "call-bind": "^1.0.2", - "get-intrinsic": "^1.1.3", - "is-regex": "^1.1.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/scheduler": { - "version": "0.23.0", - "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.23.0.tgz", - "integrity": "sha512-CtuThmgHNg7zIZWAXi3AsyIzA3n4xx7aNyjwC2VJldO2LMVDhFK+63xGqq6CsJH4rTAt6/M+N4GhZiDYPx9eUw==", - "dependencies": { - "loose-envify": "^1.1.0" - } - }, - "node_modules/semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", - "bin": { - "semver": "bin/semver.js" - } - }, - "node_modules/shebang-command": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", - "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", - "dependencies": { - "shebang-regex": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/shebang-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", - "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", - "engines": { - "node": ">=8" - } - }, - "node_modules/side-channel": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz", - "integrity": "sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==", - "dependencies": { - "call-bind": "^1.0.0", - "get-intrinsic": "^1.0.2", - "object-inspect": "^1.9.0" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/slash": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", - "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", - "engines": { - "node": ">=8" - } - }, - "node_modules/slice-ansi": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-4.0.0.tgz", - "integrity": "sha512-qMCMfhY040cVHT43K9BFygqYbUPFZKHOg7K73mtTWJRb8pyP3fzf4Ixd5SzdEJQ6MRUg/WBnOLxghZtKKurENQ==", - "dependencies": { - "ansi-styles": "^4.0.0", - "astral-regex": "^2.0.0", - "is-fullwidth-code-point": "^3.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/slice-ansi?sponsor=1" - } - }, - "node_modules/slice-ansi/node_modules/ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "dependencies": { - "color-convert": "^2.0.1" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" - } - }, - "node_modules/slice-ansi/node_modules/color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", - "dependencies": { - "color-name": "~1.1.4" - }, - "engines": { - "node": ">=7.0.0" - } - }, - "node_modules/slice-ansi/node_modules/color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" - }, - "node_modules/source-map-js": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.0.2.tgz", - "integrity": "sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw==", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/sprintf-js": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", - "integrity": "sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==" - }, - "node_modules/string-width": { - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "dependencies": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/string-width/node_modules/emoji-regex": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" - }, - "node_modules/string.prototype.matchall": { - "version": "4.0.7", - "resolved": "https://registry.npmjs.org/string.prototype.matchall/-/string.prototype.matchall-4.0.7.tgz", - "integrity": "sha512-f48okCX7JiwVi1NXCVWcFnZgADDC/n2vePlQ/KUCNqCikLLilQvwjMO8+BHVKvgzH0JB0J9LEPgxOGT02RoETg==", - "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.3", - "es-abstract": "^1.19.1", - "get-intrinsic": "^1.1.1", - "has-symbols": "^1.0.3", - "internal-slot": "^1.0.3", - "regexp.prototype.flags": "^1.4.1", - "side-channel": "^1.0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/string.prototype.trimend": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.5.tgz", - "integrity": "sha512-I7RGvmjV4pJ7O3kdf+LXFpVfdNOxtCW/2C8f6jNiW4+PQchwxkCDzlk1/7p+Wl4bqFIZeF47qAHXLuHHWKAxog==", - "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.19.5" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/string.prototype.trimstart": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.5.tgz", - "integrity": "sha512-THx16TJCGlsN0o6dl2o6ncWUsdgnLRSA23rRE5pyGBw/mLr3Ej/R2LaqCtgP8VNMGZsvMWnf9ooZPyY2bHvUFg==", - "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.19.5" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/strip-bom": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz", - "integrity": "sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA==", - "engines": { - "node": ">=4" - } - }, - "node_modules/strip-json-comments": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", - "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/styled-jsx": { - "version": "5.0.6", - "resolved": "https://registry.npmjs.org/styled-jsx/-/styled-jsx-5.0.6.tgz", - "integrity": "sha512-xOeROtkK5MGMDimBQ3J6iPId8q0t/BDoG5XN6oKkZClVz9ISF/hihN8OCn2LggMU6N32aXnrXBdn3auSqNS9fA==", - "engines": { - "node": ">= 12.0.0" - }, - "peerDependencies": { - "react": ">= 16.8.0 || 17.x.x || ^18.0.0-0" - }, - "peerDependenciesMeta": { - "@babel/core": { - "optional": true - }, - "babel-plugin-macros": { - "optional": true - } - } - }, - "node_modules/supports-color": { - "version": "5.5.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", - "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", - "dependencies": { - "has-flag": "^3.0.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/supports-preserve-symlinks-flag": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", - "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/table": { - "version": "6.8.0", - "resolved": "https://registry.npmjs.org/table/-/table-6.8.0.tgz", - "integrity": "sha512-s/fitrbVeEyHKFa7mFdkuQMWlH1Wgw/yEXMt5xACT4ZpzWFluehAxRtUUQKPuWhaLAWhFcVx6w3oC8VKaUfPGA==", - "dependencies": { - "ajv": "^8.0.1", - "lodash.truncate": "^4.4.2", - "slice-ansi": "^4.0.0", - "string-width": "^4.2.3", - "strip-ansi": "^6.0.1" - }, - "engines": { - "node": ">=10.0.0" - } - }, - "node_modules/table/node_modules/ajv": { - "version": "8.11.0", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.11.0.tgz", - "integrity": "sha512-wGgprdCvMalC0BztXvitD2hC04YffAvtsUn93JbGXYLAtCUO4xd17mCCZQxUOItiBwZvJScWo8NIvQMQ71rdpg==", - "dependencies": { - "fast-deep-equal": "^3.1.1", - "json-schema-traverse": "^1.0.0", - "require-from-string": "^2.0.2", - "uri-js": "^4.2.2" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/epoberezkin" - } - }, - "node_modules/table/node_modules/json-schema-traverse": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", - "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==" - }, - "node_modules/tapable": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/tapable/-/tapable-2.2.1.tgz", - "integrity": "sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ==", - "dev": true, - "engines": { - "node": ">=6" - } - }, - "node_modules/text-table": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", - "integrity": "sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==" - }, - "node_modules/to-fast-properties": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz", - "integrity": "sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog==", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "dependencies": { - "is-number": "^7.0.0" - }, - "engines": { - "node": ">=8.0" - } - }, - "node_modules/tsconfig": { - "resolved": "packages/tsconfig", - "link": true - }, - "node_modules/tsconfig-paths": { - "version": "3.14.1", - "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.14.1.tgz", - "integrity": "sha512-fxDhWnFSLt3VuTwtvJt5fpwxBHg5AdKWMsgcPOOIilyjymcYVZoCQF8fvFRezCNfblEXmi+PcM1eYHeOAgXCOQ==", - "dependencies": { - "@types/json5": "^0.0.29", - "json5": "^1.0.1", - "minimist": "^1.2.6", - "strip-bom": "^3.0.0" - } - }, - "node_modules/tsconfig-paths/node_modules/json5": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/json5/-/json5-1.0.1.tgz", - "integrity": "sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow==", - "dependencies": { - "minimist": "^1.2.0" - }, - "bin": { - "json5": "lib/cli.js" - } - }, - "node_modules/tslib": { - "version": "2.4.0", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz", - "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==" - }, - "node_modules/tsutils": { - "version": "3.21.0", - "resolved": "https://registry.npmjs.org/tsutils/-/tsutils-3.21.0.tgz", - "integrity": "sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA==", - "dependencies": { - "tslib": "^1.8.1" - }, - "engines": { - "node": ">= 6" - }, - "peerDependencies": { - "typescript": ">=2.8.0 || >= 3.2.0-dev || >= 3.3.0-dev || >= 3.4.0-dev || >= 3.5.0-dev || >= 3.6.0-dev || >= 3.6.0-beta || >= 3.7.0-dev || >= 3.7.0-beta" - } - }, - "node_modules/tsutils/node_modules/tslib": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", - "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==" - }, - "node_modules/turbo": { - "version": "1.5.5", - "resolved": "https://registry.npmjs.org/turbo/-/turbo-1.5.5.tgz", - "integrity": "sha512-PVQSDl0STC9WXIyHcYUWs9gXsf8JjQig/FuHfuB8N6+XlgCGB3mPbfMEE6zrChGz2hufH4/guKRX1XJuNL6XTA==", - "dev": true, - "hasInstallScript": true, - "bin": { - "turbo": "bin/turbo" - }, - "optionalDependencies": { - "turbo-darwin-64": "1.5.5", - "turbo-darwin-arm64": "1.5.5", - "turbo-linux-64": "1.5.5", - "turbo-linux-arm64": "1.5.5", - "turbo-windows-64": "1.5.5", - "turbo-windows-arm64": "1.5.5" - } - }, - "node_modules/turbo-darwin-64": { - "version": "1.5.5", - "resolved": "https://registry.npmjs.org/turbo-darwin-64/-/turbo-darwin-64-1.5.5.tgz", - "integrity": "sha512-HvEn6P2B+NXDekq9LRpRgUjcT9/oygLTcK47U0qsAJZXRBSq/2hvD7lx4nAwgY/4W3rhYJeWtHTzbhoN6BXqGQ==", - "cpu": ["x64"], - "dev": true, - "optional": true, - "os": ["darwin"] - }, - "node_modules/turbo-darwin-arm64": { - "version": "1.5.5", - "resolved": "https://registry.npmjs.org/turbo-darwin-arm64/-/turbo-darwin-arm64-1.5.5.tgz", - "integrity": "sha512-Dmxr09IUy6M0nc7/xWod9galIO2DD500B75sJSkHeT+CCdJOWnlinux0ZPF8CSygNqymwYO8AO2l15/6yxcycg==", - "cpu": ["arm64"], - "dev": true, - "optional": true, - "os": ["darwin"] - }, - "node_modules/turbo-linux-64": { - "version": "1.5.5", - "resolved": "https://registry.npmjs.org/turbo-linux-64/-/turbo-linux-64-1.5.5.tgz", - "integrity": "sha512-wd07TZ4zXXWjzZE00FcFMLmkybQQK/NV9ff66vvAV0vdiuacSMBCNLrD6Mm4ncfrUPW/rwFW5kU/7hyuEqqtDw==", - "cpu": ["x64"], - "dev": true, - "optional": true, - "os": ["linux"] - }, - "node_modules/turbo-linux-arm64": { - "version": "1.5.5", - "resolved": "https://registry.npmjs.org/turbo-linux-arm64/-/turbo-linux-arm64-1.5.5.tgz", - "integrity": "sha512-q3q33tuo74R7gicnfvFbnZZvqmlq7Vakcvx0eshifnJw4PR+oMnTCb4w8ElVFx070zsb8DVTibq99y8NJH8T1Q==", - "cpu": ["arm64"], - "dev": true, - "optional": true, - "os": ["linux"] - }, - "node_modules/turbo-windows-64": { - "version": "1.5.5", - "resolved": "https://registry.npmjs.org/turbo-windows-64/-/turbo-windows-64-1.5.5.tgz", - "integrity": "sha512-lPp9kHonNFfqgovbaW+UAPO5cLmoAN+m3G3FzqcrRPnlzt97vXYsDhDd/4Zy3oAKoAcprtP4CGy0ddisqsKTVw==", - "cpu": ["x64"], - "dev": true, - "optional": true, - "os": ["win32"] - }, - "node_modules/turbo-windows-arm64": { - "version": "1.5.5", - "resolved": "https://registry.npmjs.org/turbo-windows-arm64/-/turbo-windows-arm64-1.5.5.tgz", - "integrity": "sha512-3AfGULKNZiZVrEzsIE+W79ZRW1+f5r4nM4wLlJ1PTBHyRxBZdD6KTH1tijGfy/uTlcV5acYnKHEkDc6Q9PAXGQ==", - "cpu": ["arm64"], - "dev": true, - "optional": true, - "os": ["win32"] - }, - "node_modules/type-check": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", - "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==", - "dependencies": { - "prelude-ls": "^1.2.1" - }, - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/type-fest": { - "version": "0.20.2", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", - "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/typescript": { - "version": "4.8.4", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.8.4.tgz", - "integrity": "sha512-QCh+85mCy+h0IGff8r5XWzOVSbBO+KfeYrMQh7NJ58QujwcE22u+NUSmUxqF+un70P9GXKxa2HCNiTTMJknyjQ==", - "bin": { - "tsc": "bin/tsc", - "tsserver": "bin/tsserver" - }, - "engines": { - "node": ">=4.2.0" - } - }, - "node_modules/ui": { - "resolved": "packages/ui", - "link": true - }, - "node_modules/unbox-primitive": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.0.2.tgz", - "integrity": "sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw==", - "dependencies": { - "call-bind": "^1.0.2", - "has-bigints": "^1.0.2", - "has-symbols": "^1.0.3", - "which-boxed-primitive": "^1.0.2" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/update-browserslist-db": { - "version": "1.0.9", - "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.0.9.tgz", - "integrity": "sha512-/xsqn21EGVdXI3EXSum1Yckj3ZVZugqyOZQ/CxYPBD/R+ko9NSUScf8tFF4dOKY+2pvSSJA/S+5B8s4Zr4kyvg==", - "dev": true, - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/browserslist" - }, - { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/browserslist" - } - ], - "dependencies": { - "escalade": "^3.1.1", - "picocolors": "^1.0.0" - }, - "bin": { - "browserslist-lint": "cli.js" - }, - "peerDependencies": { - "browserslist": ">= 4.21.0" - } - }, - "node_modules/uri-js": { - "version": "4.4.1", - "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", - "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", - "dependencies": { - "punycode": "^2.1.0" - } - }, - "node_modules/use-sync-external-store": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/use-sync-external-store/-/use-sync-external-store-1.2.0.tgz", - "integrity": "sha512-eEgnFxGQ1Ife9bzYs6VLi8/4X6CObHMw9Qr9tPY43iKwsPw8xE8+EFsf/2cFZ5S3esXgpWgtSCtLNS41F+sKPA==", - "peerDependencies": { - "react": "^16.8.0 || ^17.0.0 || ^18.0.0" - } - }, - "node_modules/v8-compile-cache": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/v8-compile-cache/-/v8-compile-cache-2.3.0.tgz", - "integrity": "sha512-l8lCEmLcLYZh4nbunNZvQCJc5pv7+RCwa8q/LdUx8u7lsWvPDKmpodJAJNwkAhJC//dFY48KuIEmjtd4RViDrA==" - }, - "node_modules/web": { - "resolved": "apps/web", - "link": true - }, - "node_modules/which": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "dependencies": { - "isexe": "^2.0.0" - }, - "bin": { - "node-which": "bin/node-which" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/which-boxed-primitive": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz", - "integrity": "sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg==", - "dependencies": { - "is-bigint": "^1.0.1", - "is-boolean-object": "^1.1.0", - "is-number-object": "^1.0.4", - "is-string": "^1.0.5", - "is-symbol": "^1.0.3" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/word-wrap": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz", - "integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/wrappy": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" - }, - "node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" - }, - "packages/eslint-config-custom": { - "version": "0.0.0", - "license": "MIT", - "dependencies": { - "eslint": "^7.23.0", - "eslint-config-next": "^12.0.8", - "eslint-config-prettier": "^8.3.0", - "eslint-config-turbo": "latest", - "eslint-plugin-react": "7.31.8" - }, - "devDependencies": { - "typescript": "^4.7.4" - } - }, - "packages/tsconfig": { - "version": "0.0.0" - }, - "packages/ui": { - "version": "0.0.0", - "license": "MIT", - "devDependencies": { - "@types/react": "^17.0.37", - "@types/react-dom": "^17.0.11", - "eslint": "^7.32.0", - "eslint-config-custom": "*", - "react": "^18.2.0", - "tsconfig": "*", - "typescript": "^4.5.2" - } - }, - "packages/ui/node_modules/@types/react": { - "version": "17.0.50", - "resolved": "https://registry.npmjs.org/@types/react/-/react-17.0.50.tgz", - "integrity": "sha512-ZCBHzpDb5skMnc1zFXAXnL3l1FAdi+xZvwxK+PkglMmBrwjpp9nKaWuEvrGnSifCJmBFGxZOOFuwC6KH/s0NuA==", - "dev": true, - "dependencies": { - "@types/prop-types": "*", - "@types/scheduler": "*", - "csstype": "^3.0.2" - } - } - }, - "dependencies": { - "@ampproject/remapping": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.2.0.tgz", - "integrity": "sha512-qRmjj8nj9qmLTQXXmaR1cck3UXSRMPrbsLJAasZpF+t3riI71BXed5ebIOYwQntykeZuhjsdweEc9BxH5Jc26w==", - "dev": true, - "requires": { - "@jridgewell/gen-mapping": "^0.1.0", - "@jridgewell/trace-mapping": "^0.3.9" - } - }, - "@babel/code-frame": { - "version": "7.18.6", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.18.6.tgz", - "integrity": "sha512-TDCmlK5eOvH+eH7cdAFlNXeVJqWIQ7gW9tY1GJIpUtFb6CmjVyq2VM3u71bOyR8CRihcCgMUYoDNyLXao3+70Q==", - "dev": true, - "requires": { - "@babel/highlight": "^7.18.6" - } - }, - "@babel/compat-data": { - "version": "7.19.3", - "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.19.3.tgz", - "integrity": "sha512-prBHMK4JYYK+wDjJF1q99KK4JLL+egWS4nmNqdlMUgCExMZ+iZW0hGhyC3VEbsPjvaN0TBhW//VIFwBrk8sEiw==", - "dev": true - }, - "@babel/core": { - "version": "7.19.3", - "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.19.3.tgz", - "integrity": "sha512-WneDJxdsjEvyKtXKsaBGbDeiyOjR5vYq4HcShxnIbG0qixpoHjI3MqeZM9NDvsojNCEBItQE4juOo/bU6e72gQ==", - "dev": true, - "requires": { - "@ampproject/remapping": "^2.1.0", - "@babel/code-frame": "^7.18.6", - "@babel/generator": "^7.19.3", - "@babel/helper-compilation-targets": "^7.19.3", - "@babel/helper-module-transforms": "^7.19.0", - "@babel/helpers": "^7.19.0", - "@babel/parser": "^7.19.3", - "@babel/template": "^7.18.10", - "@babel/traverse": "^7.19.3", - "@babel/types": "^7.19.3", - "convert-source-map": "^1.7.0", - "debug": "^4.1.0", - "gensync": "^1.0.0-beta.2", - "json5": "^2.2.1", - "semver": "^6.3.0" - } - }, - "@babel/generator": { - "version": "7.19.3", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.19.3.tgz", - "integrity": "sha512-fqVZnmp1ncvZU757UzDheKZpfPgatqY59XtW2/j/18H7u76akb8xqvjw82f+i2UKd/ksYsSick/BCLQUUtJ/qQ==", - "dev": true, - "requires": { - "@babel/types": "^7.19.3", - "@jridgewell/gen-mapping": "^0.3.2", - "jsesc": "^2.5.1" - }, - "dependencies": { - "@jridgewell/gen-mapping": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.2.tgz", - "integrity": "sha512-mh65xKQAzI6iBcFzwv28KVWSmCkdRBWoOh+bYQGW3+6OZvbbN3TqMGo5hqYxQniRcH9F2VZIoJCm4pa3BPDK/A==", - "dev": true, - "requires": { - "@jridgewell/set-array": "^1.0.1", - "@jridgewell/sourcemap-codec": "^1.4.10", - "@jridgewell/trace-mapping": "^0.3.9" - } - } - } - }, - "@babel/helper-compilation-targets": { - "version": "7.19.3", - "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.19.3.tgz", - "integrity": "sha512-65ESqLGyGmLvgR0mst5AdW1FkNlj9rQsCKduzEoEPhBCDFGXvz2jW6bXFG6i0/MrV2s7hhXjjb2yAzcPuQlLwg==", - "dev": true, - "requires": { - "@babel/compat-data": "^7.19.3", - "@babel/helper-validator-option": "^7.18.6", - "browserslist": "^4.21.3", - "semver": "^6.3.0" - } - }, - "@babel/helper-environment-visitor": { - "version": "7.18.9", - "resolved": "https://registry.npmjs.org/@babel/helper-environment-visitor/-/helper-environment-visitor-7.18.9.tgz", - "integrity": "sha512-3r/aACDJ3fhQ/EVgFy0hpj8oHyHpQc+LPtJoY9SzTThAsStm4Ptegq92vqKoE3vD706ZVFWITnMnxucw+S9Ipg==", - "dev": true - }, - "@babel/helper-function-name": { - "version": "7.19.0", - "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.19.0.tgz", - "integrity": "sha512-WAwHBINyrpqywkUH0nTnNgI5ina5TFn85HKS0pbPDfxFfhyR/aNQEn4hGi1P1JyT//I0t4OgXUlofzWILRvS5w==", - "dev": true, - "requires": { - "@babel/template": "^7.18.10", - "@babel/types": "^7.19.0" - } - }, - "@babel/helper-hoist-variables": { - "version": "7.18.6", - "resolved": "https://registry.npmjs.org/@babel/helper-hoist-variables/-/helper-hoist-variables-7.18.6.tgz", - "integrity": "sha512-UlJQPkFqFULIcyW5sbzgbkxn2FKRgwWiRexcuaR8RNJRy8+LLveqPjwZV/bwrLZCN0eUHD/x8D0heK1ozuoo6Q==", - "dev": true, - "requires": { - "@babel/types": "^7.18.6" - } - }, - "@babel/helper-module-imports": { - "version": "7.18.6", - "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.18.6.tgz", - "integrity": "sha512-0NFvs3VkuSYbFi1x2Vd6tKrywq+z/cLeYC/RJNFrIX/30Bf5aiGYbtvGXolEktzJH8o5E5KJ3tT+nkxuuZFVlA==", - "dev": true, - "requires": { - "@babel/types": "^7.18.6" - } - }, - "@babel/helper-module-transforms": { - "version": "7.19.0", - "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.19.0.tgz", - "integrity": "sha512-3HBZ377Fe14RbLIA+ac3sY4PTgpxHVkFrESaWhoI5PuyXPBBX8+C34qblV9G89ZtycGJCmCI/Ut+VUDK4bltNQ==", - "dev": true, - "requires": { - "@babel/helper-environment-visitor": "^7.18.9", - "@babel/helper-module-imports": "^7.18.6", - "@babel/helper-simple-access": "^7.18.6", - "@babel/helper-split-export-declaration": "^7.18.6", - "@babel/helper-validator-identifier": "^7.18.6", - "@babel/template": "^7.18.10", - "@babel/traverse": "^7.19.0", - "@babel/types": "^7.19.0" - } - }, - "@babel/helper-simple-access": { - "version": "7.18.6", - "resolved": "https://registry.npmjs.org/@babel/helper-simple-access/-/helper-simple-access-7.18.6.tgz", - "integrity": "sha512-iNpIgTgyAvDQpDj76POqg+YEt8fPxx3yaNBg3S30dxNKm2SWfYhD0TGrK/Eu9wHpUW63VQU894TsTg+GLbUa1g==", - "dev": true, - "requires": { - "@babel/types": "^7.18.6" - } - }, - "@babel/helper-split-export-declaration": { - "version": "7.18.6", - "resolved": "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.18.6.tgz", - "integrity": "sha512-bde1etTx6ZyTmobl9LLMMQsaizFVZrquTEHOqKeQESMKo4PlObf+8+JA25ZsIpZhT/WEd39+vOdLXAFG/nELpA==", - "dev": true, - "requires": { - "@babel/types": "^7.18.6" - } - }, - "@babel/helper-string-parser": { - "version": "7.18.10", - "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.18.10.tgz", - "integrity": "sha512-XtIfWmeNY3i4t7t4D2t02q50HvqHybPqW2ki1kosnvWCwuCMeo81Jf0gwr85jy/neUdg5XDdeFE/80DXiO+njw==", - "dev": true - }, - "@babel/helper-validator-identifier": { - "version": "7.19.1", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.19.1.tgz", - "integrity": "sha512-awrNfaMtnHUr653GgGEs++LlAvW6w+DcPrOliSMXWCKo597CwL5Acf/wWdNkf/tfEQE3mjkeD1YOVZOUV/od1w==" - }, - "@babel/helper-validator-option": { - "version": "7.18.6", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.18.6.tgz", - "integrity": "sha512-XO7gESt5ouv/LRJdrVjkShckw6STTaB7l9BrpBaAHDeF5YZT+01PCwmR0SJHnkW6i8OwW/EVWRShfi4j2x+KQw==", - "dev": true - }, - "@babel/helpers": { - "version": "7.19.0", - "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.19.0.tgz", - "integrity": "sha512-DRBCKGwIEdqY3+rPJgG/dKfQy9+08rHIAJx8q2p+HSWP87s2HCrQmaAMMyMll2kIXKCW0cO1RdQskx15Xakftg==", - "dev": true, - "requires": { - "@babel/template": "^7.18.10", - "@babel/traverse": "^7.19.0", - "@babel/types": "^7.19.0" - } - }, - "@babel/highlight": { - "version": "7.18.6", - "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.18.6.tgz", - "integrity": "sha512-u7stbOuYjaPezCuLj29hNW1v64M2Md2qupEKP1fHc7WdOA3DgLh37suiSrZYY7haUB7iBeQZ9P1uiRF359do3g==", - "requires": { - "@babel/helper-validator-identifier": "^7.18.6", - "chalk": "^2.0.0", - "js-tokens": "^4.0.0" - } - }, - "@babel/parser": { - "version": "7.19.3", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.19.3.tgz", - "integrity": "sha512-pJ9xOlNWHiy9+FuFP09DEAFbAn4JskgRsVcc169w2xRBC3FRGuQEwjeIMMND9L2zc0iEhO/tGv4Zq+km+hxNpQ==", - "dev": true - }, - "@babel/runtime": { - "version": "7.19.0", - "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.19.0.tgz", - "integrity": "sha512-eR8Lo9hnDS7tqkO7NsV+mKvCmv5boaXFSZ70DnfhcgiEne8hv9oCEd36Klw74EtizEqLsy4YnW8UWwpBVolHZA==", - "requires": { - "regenerator-runtime": "^0.13.4" - } - }, - "@babel/runtime-corejs3": { - "version": "7.19.1", - "resolved": "https://registry.npmjs.org/@babel/runtime-corejs3/-/runtime-corejs3-7.19.1.tgz", - "integrity": "sha512-j2vJGnkopRzH+ykJ8h68wrHnEUmtK//E723jjixiAl/PPf6FhqY/vYRcMVlNydRKQjQsTsYEjpx+DZMIvnGk/g==", - "requires": { - "core-js-pure": "^3.25.1", - "regenerator-runtime": "^0.13.4" - } - }, - "@babel/template": { - "version": "7.18.10", - "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.18.10.tgz", - "integrity": "sha512-TI+rCtooWHr3QJ27kJxfjutghu44DLnasDMwpDqCXVTal9RLp3RSYNh4NdBrRP2cQAoG9A8juOQl6P6oZG4JxA==", - "dev": true, - "requires": { - "@babel/code-frame": "^7.18.6", - "@babel/parser": "^7.18.10", - "@babel/types": "^7.18.10" - } - }, - "@babel/traverse": { - "version": "7.19.3", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.19.3.tgz", - "integrity": "sha512-qh5yf6149zhq2sgIXmwjnsvmnNQC2iw70UFjp4olxucKrWd/dvlUsBI88VSLUsnMNF7/vnOiA+nk1+yLoCqROQ==", - "dev": true, - "requires": { - "@babel/code-frame": "^7.18.6", - "@babel/generator": "^7.19.3", - "@babel/helper-environment-visitor": "^7.18.9", - "@babel/helper-function-name": "^7.19.0", - "@babel/helper-hoist-variables": "^7.18.6", - "@babel/helper-split-export-declaration": "^7.18.6", - "@babel/parser": "^7.19.3", - "@babel/types": "^7.19.3", - "debug": "^4.1.0", - "globals": "^11.1.0" - } - }, - "@babel/types": { - "version": "7.19.3", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.19.3.tgz", - "integrity": "sha512-hGCaQzIY22DJlDh9CH7NOxgKkFjBk0Cw9xDO1Xmh2151ti7wiGfQ3LauXzL4HP1fmFlTX6XjpRETTpUcv7wQLw==", - "dev": true, - "requires": { - "@babel/helper-string-parser": "^7.18.10", - "@babel/helper-validator-identifier": "^7.19.1", - "to-fast-properties": "^2.0.0" - } - }, - "@eslint/eslintrc": { - "version": "0.4.3", - "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-0.4.3.tgz", - "integrity": "sha512-J6KFFz5QCYUJq3pf0mjEcCJVERbzv71PUIDczuh9JkwGEzced6CO5ADLHB1rbf/+oPBtoPfMYNOpGDzCANlbXw==", - "requires": { - "ajv": "^6.12.4", - "debug": "^4.1.1", - "espree": "^7.3.0", - "globals": "^13.9.0", - "ignore": "^4.0.6", - "import-fresh": "^3.2.1", - "js-yaml": "^3.13.1", - "minimatch": "^3.0.4", - "strip-json-comments": "^3.1.1" - }, - "dependencies": { - "globals": { - "version": "13.17.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-13.17.0.tgz", - "integrity": "sha512-1C+6nQRb1GwGMKm2dH/E7enFAMxGTmGI7/dEdhy/DNelv85w9B72t3uc5frtMNXIbzrarJJ/lTCjcaZwbLJmyw==", - "requires": { - "type-fest": "^0.20.2" - } - } - } - }, - "@humanwhocodes/config-array": { - "version": "0.5.0", - "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.5.0.tgz", - "integrity": "sha512-FagtKFz74XrTl7y6HCzQpwDfXP0yhxe9lHLD1UZxjvZIcbyRz8zTFF/yYNfSfzU414eDwZ1SrO0Qvtyf+wFMQg==", - "requires": { - "@humanwhocodes/object-schema": "^1.2.0", - "debug": "^4.1.1", - "minimatch": "^3.0.4" - } - }, - "@humanwhocodes/object-schema": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz", - "integrity": "sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA==" - }, - "@jridgewell/gen-mapping": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.1.1.tgz", - "integrity": "sha512-sQXCasFk+U8lWYEe66WxRDOE9PjVz4vSM51fTu3Hw+ClTpUSQb718772vH3pyS5pShp6lvQM7SxgIDXXXmOX7w==", - "dev": true, - "requires": { - "@jridgewell/set-array": "^1.0.0", - "@jridgewell/sourcemap-codec": "^1.4.10" - } - }, - "@jridgewell/resolve-uri": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.0.tgz", - "integrity": "sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w==", - "dev": true - }, - "@jridgewell/set-array": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.1.2.tgz", - "integrity": "sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw==", - "dev": true - }, - "@jridgewell/sourcemap-codec": { - "version": "1.4.14", - "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.14.tgz", - "integrity": "sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw==", - "dev": true - }, - "@jridgewell/trace-mapping": { - "version": "0.3.15", - "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.15.tgz", - "integrity": "sha512-oWZNOULl+UbhsgB51uuZzglikfIKSUBO/M9W2OfEjn7cmqoAiCgmv9lyACTUacZwBz0ITnJ2NqjU8Tx0DHL88g==", - "dev": true, - "requires": { - "@jridgewell/resolve-uri": "^3.0.3", - "@jridgewell/sourcemap-codec": "^1.4.10" - } - }, - "@next/env": { - "version": "12.3.0", - "resolved": "https://registry.npmjs.org/@next/env/-/env-12.3.0.tgz", - "integrity": "sha512-PTJpjAFVbzBQ9xXpzMTroShvD5YDIIy46jQ7d4LrWpY+/5a8H90Tm8hE3Hvkc5RBRspVo7kvEOnqQms0A+2Q6w==" - }, - "@next/eslint-plugin-next": { - "version": "12.3.1", - "resolved": "https://registry.npmjs.org/@next/eslint-plugin-next/-/eslint-plugin-next-12.3.1.tgz", - "integrity": "sha512-sw+lTf6r6P0j+g/n9y4qdWWI2syPqZx+uc0+B/fRENqfR3KpSid6MIKqc9gNwGhJASazEQ5b3w8h4cAET213jw==", - "requires": { - "glob": "7.1.7" - } - }, - "@next/swc-android-arm-eabi": { - "version": "12.3.0", - "resolved": "https://registry.npmjs.org/@next/swc-android-arm-eabi/-/swc-android-arm-eabi-12.3.0.tgz", - "integrity": "sha512-/PuirPnAKsYBw93w/7Q9hqy+KGOU9mjYprZ/faxMUJh/dc6v3rYLxkZKNG9nFPIW4QKNTCnhP40xF9hLnxO+xg==", - "optional": true - }, - "@next/swc-android-arm64": { - "version": "12.3.0", - "resolved": "https://registry.npmjs.org/@next/swc-android-arm64/-/swc-android-arm64-12.3.0.tgz", - "integrity": "sha512-OaI+FhAM6P9B6Ybwbn0Zl8YwWido0lLwhDBi9WiYCh4RQmIXAyVIoIJPHo4fP05+mXaJ/k1trvDvuURvHOq2qw==", - "optional": true - }, - "@next/swc-darwin-arm64": { - "version": "12.3.0", - "resolved": "https://registry.npmjs.org/@next/swc-darwin-arm64/-/swc-darwin-arm64-12.3.0.tgz", - "integrity": "sha512-9s4d3Mhii+WFce8o8Jok7WC3Bawkr9wEUU++SJRptjU1L5tsfYJMrSYCACHLhZujziNDLyExe4Hwwsccps1sfg==", - "optional": true - }, - "@next/swc-darwin-x64": { - "version": "12.3.0", - "resolved": "https://registry.npmjs.org/@next/swc-darwin-x64/-/swc-darwin-x64-12.3.0.tgz", - "integrity": "sha512-2scC4MqUTwGwok+wpVxP+zWp7WcCAVOtutki2E1n99rBOTnUOX6qXkgxSy083yBN6GqwuC/dzHeN7hIKjavfRA==", - "optional": true - }, - "@next/swc-freebsd-x64": { - "version": "12.3.0", - "resolved": "https://registry.npmjs.org/@next/swc-freebsd-x64/-/swc-freebsd-x64-12.3.0.tgz", - "integrity": "sha512-xAlruUREij/bFa+qsE1tmsP28t7vz02N4ZDHt2lh3uJUniE0Ne9idyIDLc1Ed0IF2RjfgOp4ZVunuS3OM0sngw==", - "optional": true - }, - "@next/swc-linux-arm-gnueabihf": { - "version": "12.3.0", - "resolved": "https://registry.npmjs.org/@next/swc-linux-arm-gnueabihf/-/swc-linux-arm-gnueabihf-12.3.0.tgz", - "integrity": "sha512-jin2S4VT/cugc2dSZEUIabhYDJNgrUh7fufbdsaAezgcQzqfdfJqfxl4E9GuafzB4cbRPTaqA0V5uqbp0IyGkQ==", - "optional": true - }, - "@next/swc-linux-arm64-gnu": { - "version": "12.3.0", - "resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-gnu/-/swc-linux-arm64-gnu-12.3.0.tgz", - "integrity": "sha512-RqJHDKe0WImeUrdR0kayTkRWgp4vD/MS7g0r6Xuf8+ellOFH7JAAJffDW3ayuVZeMYOa7RvgNFcOoWnrTUl9Nw==", - "optional": true - }, - "@next/swc-linux-arm64-musl": { - "version": "12.3.0", - "resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-musl/-/swc-linux-arm64-musl-12.3.0.tgz", - "integrity": "sha512-nvNWoUieMjvDjpYJ/4SQe9lQs2xMj6ZRs8N+bmTrVu9leY2Fg3WD6W9p/1uU9hGO8u+OdF13wc4iRShu/WYIHg==", - "optional": true - }, - "@next/swc-linux-x64-gnu": { - "version": "12.3.0", - "resolved": "https://registry.npmjs.org/@next/swc-linux-x64-gnu/-/swc-linux-x64-gnu-12.3.0.tgz", - "integrity": "sha512-4ajhIuVU9PeQCMMhdDgZTLrHmjbOUFuIyg6J19hZqwEwDTSqQyrSLkbJs2Nd7IRiM6Ul/XyrtEFCpk4k+xD2+w==", - "optional": true - }, - "@next/swc-linux-x64-musl": { - "version": "12.3.0", - "resolved": "https://registry.npmjs.org/@next/swc-linux-x64-musl/-/swc-linux-x64-musl-12.3.0.tgz", - "integrity": "sha512-U092RBYbaGxoMAwpauePJEu2PuZSEoUCGJBvsptQr2/2XIMwAJDYM4c/M5NfYEsBr+yjvsYNsOpYfeQ88D82Yg==", - "optional": true - }, - "@next/swc-win32-arm64-msvc": { - "version": "12.3.0", - "resolved": "https://registry.npmjs.org/@next/swc-win32-arm64-msvc/-/swc-win32-arm64-msvc-12.3.0.tgz", - "integrity": "sha512-pzSzaxjDEJe67bUok9Nxf9rykbJfHXW0owICFsPBsqHyc+cr8vpF7g9e2APTCddtVhvjkga9ILoZJ9NxWS7Yiw==", - "optional": true - }, - "@next/swc-win32-ia32-msvc": { - "version": "12.3.0", - "resolved": "https://registry.npmjs.org/@next/swc-win32-ia32-msvc/-/swc-win32-ia32-msvc-12.3.0.tgz", - "integrity": "sha512-MQGUpMbYhQmTZ06a9e0hPQJnxFMwETo2WtyAotY3GEzbNCQVbCGhsvqEKcl+ZEHgShlHXUWvSffq1ZscY6gK7A==", - "optional": true - }, - "@next/swc-win32-x64-msvc": { - "version": "12.3.0", - "resolved": "https://registry.npmjs.org/@next/swc-win32-x64-msvc/-/swc-win32-x64-msvc-12.3.0.tgz", - "integrity": "sha512-C/nw6OgQpEULWqs+wgMHXGvlJLguPRFFGqR2TAqWBerQ8J+Sg3z1ZTqwelkSi4FoqStGuZ2UdFHIDN1ySmR1xA==", - "optional": true - }, - "@nodelib/fs.scandir": { - "version": "2.1.5", - "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", - "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", - "requires": { - "@nodelib/fs.stat": "2.0.5", - "run-parallel": "^1.1.9" - } - }, - "@nodelib/fs.stat": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", - "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==" - }, - "@nodelib/fs.walk": { - "version": "1.2.8", - "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", - "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", - "requires": { - "@nodelib/fs.scandir": "2.1.5", - "fastq": "^1.6.0" - } - }, - "@rushstack/eslint-patch": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/@rushstack/eslint-patch/-/eslint-patch-1.2.0.tgz", - "integrity": "sha512-sXo/qW2/pAcmT43VoRKOJbDOfV3cYpq3szSVfIThQXNt+E4DfKj361vaAt3c88U5tPUxzEswam7GW48PJqtKAg==" - }, - "@swc/helpers": { - "version": "0.4.11", - "resolved": "https://registry.npmjs.org/@swc/helpers/-/helpers-0.4.11.tgz", - "integrity": "sha512-rEUrBSGIoSFuYxwBYtlUFMlE2CwGhmW+w9355/5oduSw8e5h2+Tj4UrAGNNgP9915++wj5vkQo0UuOBqOAq4nw==", - "requires": { - "tslib": "^2.4.0" - } - }, - "@types/json5": { - "version": "0.0.29", - "resolved": "https://registry.npmjs.org/@types/json5/-/json5-0.0.29.tgz", - "integrity": "sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ==" - }, - "@types/node": { - "version": "17.0.45", - "resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.45.tgz", - "integrity": "sha512-w+tIMs3rq2afQdsPJlODhoUEKzFP1ayaoyl1CcnwtIlsVe7K7bA1NGm4s3PraqTLlXnbIN84zuBlxBWo1u9BLw==", - "dev": true - }, - "@types/prop-types": { - "version": "15.7.5", - "resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.5.tgz", - "integrity": "sha512-JCB8C6SnDoQf0cNycqd/35A7MjcnK+ZTqE7judS6o7utxUCg6imJg3QK2qzHKszlTjcj2cn+NwMB2i96ubpj7w==", - "dev": true - }, - "@types/react": { - "version": "18.0.17", - "resolved": "https://registry.npmjs.org/@types/react/-/react-18.0.17.tgz", - "integrity": "sha512-38ETy4tL+rn4uQQi7mB81G7V1g0u2ryquNmsVIOKUAEIDK+3CUjZ6rSRpdvS99dNBnkLFL83qfmtLacGOTIhwQ==", - "dev": true, - "requires": { - "@types/prop-types": "*", - "@types/scheduler": "*", - "csstype": "^3.0.2" - } - }, - "@types/react-dom": { - "version": "17.0.17", - "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-17.0.17.tgz", - "integrity": "sha512-VjnqEmqGnasQKV0CWLevqMTXBYG9GbwuE6x3VetERLh0cq2LTptFE73MrQi2S7GkKXCf2GgwItB/melLnxfnsg==", - "dev": true, - "requires": { - "@types/react": "^17" - }, - "dependencies": { - "@types/react": { - "version": "17.0.50", - "resolved": "https://registry.npmjs.org/@types/react/-/react-17.0.50.tgz", - "integrity": "sha512-ZCBHzpDb5skMnc1zFXAXnL3l1FAdi+xZvwxK+PkglMmBrwjpp9nKaWuEvrGnSifCJmBFGxZOOFuwC6KH/s0NuA==", - "dev": true, - "requires": { - "@types/prop-types": "*", - "@types/scheduler": "*", - "csstype": "^3.0.2" - } - } - } - }, - "@types/scheduler": { - "version": "0.16.2", - "resolved": "https://registry.npmjs.org/@types/scheduler/-/scheduler-0.16.2.tgz", - "integrity": "sha512-hppQEBDmlwhFAXKJX2KnWLYu5yMfi91yazPb2l+lbJiwW+wdo1gNeRA+3RgNSO39WYX2euey41KEwnqesU2Jew==", - "dev": true - }, - "@typescript-eslint/parser": { - "version": "5.39.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-5.39.0.tgz", - "integrity": "sha512-PhxLjrZnHShe431sBAGHaNe6BDdxAASDySgsBCGxcBecVCi8NQWxQZMcizNA4g0pN51bBAn/FUfkWG3SDVcGlA==", - "requires": { - "@typescript-eslint/scope-manager": "5.39.0", - "@typescript-eslint/types": "5.39.0", - "@typescript-eslint/typescript-estree": "5.39.0", - "debug": "^4.3.4" - } - }, - "@typescript-eslint/scope-manager": { - "version": "5.39.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-5.39.0.tgz", - "integrity": "sha512-/I13vAqmG3dyqMVSZPjsbuNQlYS082Y7OMkwhCfLXYsmlI0ca4nkL7wJ/4gjX70LD4P8Hnw1JywUVVAwepURBw==", - "requires": { - "@typescript-eslint/types": "5.39.0", - "@typescript-eslint/visitor-keys": "5.39.0" - } - }, - "@typescript-eslint/types": { - "version": "5.39.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-5.39.0.tgz", - "integrity": "sha512-gQMZrnfEBFXK38hYqt8Lkwt8f4U6yq+2H5VDSgP/qiTzC8Nw8JO3OuSUOQ2qW37S/dlwdkHDntkZM6SQhKyPhw==" - }, - "@typescript-eslint/typescript-estree": { - "version": "5.39.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-5.39.0.tgz", - "integrity": "sha512-qLFQP0f398sdnogJoLtd43pUgB18Q50QSA+BTE5h3sUxySzbWDpTSdgt4UyxNSozY/oDK2ta6HVAzvGgq8JYnA==", - "requires": { - "@typescript-eslint/types": "5.39.0", - "@typescript-eslint/visitor-keys": "5.39.0", - "debug": "^4.3.4", - "globby": "^11.1.0", - "is-glob": "^4.0.3", - "semver": "^7.3.7", - "tsutils": "^3.21.0" - }, - "dependencies": { - "semver": { - "version": "7.3.7", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.7.tgz", - "integrity": "sha512-QlYTucUYOews+WeEujDoEGziz4K6c47V/Bd+LjSSYcA94p+DmINdf7ncaUinThfvZyu13lN9OY1XDxt8C0Tw0g==", - "requires": { - "lru-cache": "^6.0.0" - } - } - } - }, - "@typescript-eslint/visitor-keys": { - "version": "5.39.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-5.39.0.tgz", - "integrity": "sha512-yyE3RPwOG+XJBLrhvsxAidUgybJVQ/hG8BhiJo0k8JSAYfk/CshVcxf0HwP4Jt7WZZ6vLmxdo1p6EyN3tzFTkg==", - "requires": { - "@typescript-eslint/types": "5.39.0", - "eslint-visitor-keys": "^3.3.0" - }, - "dependencies": { - "eslint-visitor-keys": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.3.0.tgz", - "integrity": "sha512-mQ+suqKJVyeuwGYHAdjMFqjCyfl8+Ldnxuyp3ldiMBFKkvytrXUZWaiPCEav8qDHKty44bD+qV1IP4T+w+xXRA==" - } - } - }, - "acorn": { - "version": "7.4.1", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.4.1.tgz", - "integrity": "sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==" - }, - "acorn-jsx": { - "version": "5.3.2", - "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", - "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", - "requires": {} - }, - "ajv": { - "version": "6.12.6", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", - "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", - "requires": { - "fast-deep-equal": "^3.1.1", - "fast-json-stable-stringify": "^2.0.0", - "json-schema-traverse": "^0.4.1", - "uri-js": "^4.2.2" - } - }, - "ansi-colors": { - "version": "4.1.3", - "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-4.1.3.tgz", - "integrity": "sha512-/6w/C21Pm1A7aZitlI5Ni/2J6FFQN8i1Cvz3kHABAAbw93v/NlvKdVOqz7CCWz/3iv/JplRSEEZ83XION15ovw==" - }, - "ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==" - }, - "ansi-styles": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", - "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", - "requires": { - "color-convert": "^1.9.0" - } - }, - "argparse": { - "version": "1.0.10", - "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", - "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", - "requires": { - "sprintf-js": "~1.0.2" - } - }, - "aria-query": { - "version": "4.2.2", - "resolved": "https://registry.npmjs.org/aria-query/-/aria-query-4.2.2.tgz", - "integrity": "sha512-o/HelwhuKpTj/frsOsbNLNgnNGVIFsVP/SW2BSF14gVl7kAfMOJ6/8wUAUvG1R1NHKrfG+2sHZTu0yauT1qBrA==", - "requires": { - "@babel/runtime": "^7.10.2", - "@babel/runtime-corejs3": "^7.10.2" - } - }, - "array-includes": { - "version": "3.1.5", - "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.5.tgz", - "integrity": "sha512-iSDYZMMyTPkiFasVqfuAQnWAYcvO/SeBSCGKePoEthjp4LEMTe4uLc7b025o4jAZpHhihh8xPo99TNWUWWkGDQ==", - "requires": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.19.5", - "get-intrinsic": "^1.1.1", - "is-string": "^1.0.7" - } - }, - "array-union": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz", - "integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==" - }, - "array.prototype.flat": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/array.prototype.flat/-/array.prototype.flat-1.3.0.tgz", - "integrity": "sha512-12IUEkHsAhA4DY5s0FPgNXIdc8VRSqD9Zp78a5au9abH/SOBrsp082JOWFNTjkMozh8mqcdiKuaLGhPeYztxSw==", - "requires": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.3", - "es-abstract": "^1.19.2", - "es-shim-unscopables": "^1.0.0" - } - }, - "array.prototype.flatmap": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/array.prototype.flatmap/-/array.prototype.flatmap-1.3.0.tgz", - "integrity": "sha512-PZC9/8TKAIxcWKdyeb77EzULHPrIX/tIZebLJUQOMR1OwYosT8yggdfWScfTBCDj5utONvOuPQQumYsU2ULbkg==", - "requires": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.3", - "es-abstract": "^1.19.2", - "es-shim-unscopables": "^1.0.0" - } - }, - "ast-types-flow": { - "version": "0.0.7", - "resolved": "https://registry.npmjs.org/ast-types-flow/-/ast-types-flow-0.0.7.tgz", - "integrity": "sha512-eBvWn1lvIApYMhzQMsu9ciLfkBY499mFZlNqG+/9WR7PVlroQw0vG30cOQQbaKz3sCEc44TAOu2ykzqXSNnwag==" - }, - "astral-regex": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/astral-regex/-/astral-regex-2.0.0.tgz", - "integrity": "sha512-Z7tMw1ytTXt5jqMcOP+OQteU1VuNK9Y02uuJtKQ1Sv69jXQKKg5cibLwGJow8yzZP+eAc18EmLGPal0bp36rvQ==" - }, - "axe-core": { - "version": "4.4.3", - "resolved": "https://registry.npmjs.org/axe-core/-/axe-core-4.4.3.tgz", - "integrity": "sha512-32+ub6kkdhhWick/UjvEwRchgoetXqTK14INLqbGm5U2TzBkBNF3nQtLYm8ovxSkQWArjEQvftCKryjZaATu3w==" - }, - "axobject-query": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/axobject-query/-/axobject-query-2.2.0.tgz", - "integrity": "sha512-Td525n+iPOOyUQIeBfcASuG6uJsDOITl7Mds5gFyerkWiX7qhUTdYUBlSgNMyVqtSJqwpt1kXGLdUt6SykLMRA==" - }, - "balanced-match": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", - "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" - }, - "brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", - "requires": { - "balanced-match": "^1.0.0", - "concat-map": "0.0.1" - } - }, - "braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", - "requires": { - "fill-range": "^7.0.1" - } - }, - "browserslist": { - "version": "4.21.4", - "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.21.4.tgz", - "integrity": "sha512-CBHJJdDmgjl3daYjN5Cp5kbTf1mUhZoS+beLklHIvkOWscs83YAhLlF3Wsh/lciQYAcbBJgTOD44VtG31ZM4Hw==", - "dev": true, - "requires": { - "caniuse-lite": "^1.0.30001400", - "electron-to-chromium": "^1.4.251", - "node-releases": "^2.0.6", - "update-browserslist-db": "^1.0.9" - } - }, - "call-bind": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz", - "integrity": "sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==", - "requires": { - "function-bind": "^1.1.1", - "get-intrinsic": "^1.0.2" - } - }, - "callsites": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", - "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==" - }, - "caniuse-lite": { - "version": "1.0.30001414", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001414.tgz", - "integrity": "sha512-t55jfSaWjCdocnFdKQoO+d2ct9C59UZg4dY3OnUlSZ447r8pUtIKdp0hpAzrGFultmTC+Us+KpKi4GZl/LXlFg==" - }, - "chalk": { - "version": "2.4.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", - "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", - "requires": { - "ansi-styles": "^3.2.1", - "escape-string-regexp": "^1.0.5", - "supports-color": "^5.3.0" - } - }, - "color-convert": { - "version": "1.9.3", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", - "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", - "requires": { - "color-name": "1.1.3" - } - }, - "color-name": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", - "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==" - }, - "concat-map": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", - "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==" - }, - "convert-source-map": { - "version": "1.8.0", - "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.8.0.tgz", - "integrity": "sha512-+OQdjP49zViI/6i7nIJpA8rAl4sV/JdPfU9nZs3VqOwGIgizICvuN2ru6fMd+4llL0tar18UYJXfZ/TWtmhUjA==", - "dev": true, - "requires": { - "safe-buffer": "~5.1.1" - } - }, - "core-js-pure": { - "version": "3.25.5", - "resolved": "https://registry.npmjs.org/core-js-pure/-/core-js-pure-3.25.5.tgz", - "integrity": "sha512-oml3M22pHM+igfWHDfdLVq2ShWmjM2V4L+dQEBs0DWVIqEm9WHCwGAlZ6BmyBQGy5sFrJmcx+856D9lVKyGWYg==" - }, - "cross-spawn": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", - "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", - "requires": { - "path-key": "^3.1.0", - "shebang-command": "^2.0.0", - "which": "^2.0.1" - } - }, - "csstype": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.1.1.tgz", - "integrity": "sha512-DJR/VvkAvSZW9bTouZue2sSxDwdTN92uHjqeKVm+0dAqdfNykRzQ95tay8aXMBAAPpUiq4Qcug2L7neoRh2Egw==", - "dev": true - }, - "damerau-levenshtein": { - "version": "1.0.8", - "resolved": "https://registry.npmjs.org/damerau-levenshtein/-/damerau-levenshtein-1.0.8.tgz", - "integrity": "sha512-sdQSFB7+llfUcQHUQO3+B8ERRj0Oa4w9POWMI/puGtuf7gFywGmkaLCElnudfTiKZV+NvHqL0ifzdrI8Ro7ESA==" - }, - "debug": { - "version": "4.3.4", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", - "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", - "requires": { - "ms": "2.1.2" - } - }, - "deep-is": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", - "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==" - }, - "define-properties": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.1.4.tgz", - "integrity": "sha512-uckOqKcfaVvtBdsVkdPv3XjveQJsNQqmhXgRi8uhvWWuPYZCNlzT8qAyblUgNoXdHdjMTzAqeGjAoli8f+bzPA==", - "requires": { - "has-property-descriptors": "^1.0.0", - "object-keys": "^1.1.1" - } - }, - "dir-glob": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz", - "integrity": "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==", - "requires": { - "path-type": "^4.0.0" - } - }, - "docs": { - "version": "file:apps/docs", - "requires": { - "@babel/core": "^7.0.0", - "@types/node": "^17.0.12", - "@types/react": "18.0.17", - "eslint": "7.32.0", - "eslint-config-custom": "*", - "lodash": "^3.0.0", - "next": "12.3.0", - "next-transpile-modules": "9.0.0", - "react": "18.2.0", - "react-dom": "18.2.0", - "tsconfig": "*", - "typescript": "^4.5.3", - "ui": "*" - } - }, - "doctrine": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz", - "integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==", - "requires": { - "esutils": "^2.0.2" - } - }, - "electron-to-chromium": { - "version": "1.4.270", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.270.tgz", - "integrity": "sha512-KNhIzgLiJmDDC444dj9vEOpZEgsV96ult9Iff98Vanumn+ShJHd5se8aX6KeVxdc0YQeqdrezBZv89rleDbvSg==", - "dev": true - }, - "emoji-regex": { - "version": "9.2.2", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", - "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==" - }, - "enhanced-resolve": { - "version": "5.10.0", - "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.10.0.tgz", - "integrity": "sha512-T0yTFjdpldGY8PmuXXR0PyQ1ufZpEGiHVrp7zHKB7jdR4qlmZHhONVM5AQOAWXuF/w3dnHbEQVrNptJgt7F+cQ==", - "dev": true, - "requires": { - "graceful-fs": "^4.2.4", - "tapable": "^2.2.0" - } - }, - "enquirer": { - "version": "2.3.6", - "resolved": "https://registry.npmjs.org/enquirer/-/enquirer-2.3.6.tgz", - "integrity": "sha512-yjNnPr315/FjS4zIsUxYguYUPP2e1NK4d7E7ZOLiyYCcbFBiTMyID+2wvm2w6+pZ/odMA7cRkjhsPbltwBOrLg==", - "requires": { - "ansi-colors": "^4.1.1" - } - }, - "es-abstract": { - "version": "1.20.3", - "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.20.3.tgz", - "integrity": "sha512-AyrnaKVpMzljIdwjzrj+LxGmj8ik2LckwXacHqrJJ/jxz6dDDBcZ7I7nlHM0FvEW8MfbWJwOd+yT2XzYW49Frw==", - "requires": { - "call-bind": "^1.0.2", - "es-to-primitive": "^1.2.1", - "function-bind": "^1.1.1", - "function.prototype.name": "^1.1.5", - "get-intrinsic": "^1.1.3", - "get-symbol-description": "^1.0.0", - "has": "^1.0.3", - "has-property-descriptors": "^1.0.0", - "has-symbols": "^1.0.3", - "internal-slot": "^1.0.3", - "is-callable": "^1.2.6", - "is-negative-zero": "^2.0.2", - "is-regex": "^1.1.4", - "is-shared-array-buffer": "^1.0.2", - "is-string": "^1.0.7", - "is-weakref": "^1.0.2", - "object-inspect": "^1.12.2", - "object-keys": "^1.1.1", - "object.assign": "^4.1.4", - "regexp.prototype.flags": "^1.4.3", - "safe-regex-test": "^1.0.0", - "string.prototype.trimend": "^1.0.5", - "string.prototype.trimstart": "^1.0.5", - "unbox-primitive": "^1.0.2" - } - }, - "es-shim-unscopables": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/es-shim-unscopables/-/es-shim-unscopables-1.0.0.tgz", - "integrity": "sha512-Jm6GPcCdC30eMLbZ2x8z2WuRwAws3zTBBKuusffYVUrNj/GVSUAZ+xKMaUpfNDR5IbyNA5LJbaecoUVbmUcB1w==", - "requires": { - "has": "^1.0.3" - } - }, - "es-to-primitive": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz", - "integrity": "sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==", - "requires": { - "is-callable": "^1.1.4", - "is-date-object": "^1.0.1", - "is-symbol": "^1.0.2" - } - }, - "escalade": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", - "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==", - "dev": true - }, - "escape-string-regexp": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", - "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==" - }, - "eslint": { - "version": "7.32.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-7.32.0.tgz", - "integrity": "sha512-VHZ8gX+EDfz+97jGcgyGCyRia/dPOd6Xh9yPv8Bl1+SoaIwD+a/vlrOmGRUyOYu7MwUhc7CxqeaDZU13S4+EpA==", - "requires": { - "@babel/code-frame": "7.12.11", - "@eslint/eslintrc": "^0.4.3", - "@humanwhocodes/config-array": "^0.5.0", - "ajv": "^6.10.0", - "chalk": "^4.0.0", - "cross-spawn": "^7.0.2", - "debug": "^4.0.1", - "doctrine": "^3.0.0", - "enquirer": "^2.3.5", - "escape-string-regexp": "^4.0.0", - "eslint-scope": "^5.1.1", - "eslint-utils": "^2.1.0", - "eslint-visitor-keys": "^2.0.0", - "espree": "^7.3.1", - "esquery": "^1.4.0", - "esutils": "^2.0.2", - "fast-deep-equal": "^3.1.3", - "file-entry-cache": "^6.0.1", - "functional-red-black-tree": "^1.0.1", - "glob-parent": "^5.1.2", - "globals": "^13.6.0", - "ignore": "^4.0.6", - "import-fresh": "^3.0.0", - "imurmurhash": "^0.1.4", - "is-glob": "^4.0.0", - "js-yaml": "^3.13.1", - "json-stable-stringify-without-jsonify": "^1.0.1", - "levn": "^0.4.1", - "lodash.merge": "^4.6.2", - "minimatch": "^3.0.4", - "natural-compare": "^1.4.0", - "optionator": "^0.9.1", - "progress": "^2.0.0", - "regexpp": "^3.1.0", - "semver": "^7.2.1", - "strip-ansi": "^6.0.0", - "strip-json-comments": "^3.1.0", - "table": "^6.0.9", - "text-table": "^0.2.0", - "v8-compile-cache": "^2.0.3" - }, - "dependencies": { - "@babel/code-frame": { - "version": "7.12.11", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.12.11.tgz", - "integrity": "sha512-Zt1yodBx1UcyiePMSkWnU4hPqhwq7hGi2nFL1LeA3EUl+q2LQx16MISgJ0+z7dnmgvP9QtIleuETGOiOH1RcIw==", - "requires": { - "@babel/highlight": "^7.10.4" - } - }, - "ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "requires": { - "color-convert": "^2.0.1" - } - }, - "chalk": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", - "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", - "requires": { - "ansi-styles": "^4.1.0", - "supports-color": "^7.1.0" - } - }, - "color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", - "requires": { - "color-name": "~1.1.4" - } - }, - "color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" - }, - "escape-string-regexp": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", - "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==" - }, - "globals": { - "version": "13.17.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-13.17.0.tgz", - "integrity": "sha512-1C+6nQRb1GwGMKm2dH/E7enFAMxGTmGI7/dEdhy/DNelv85w9B72t3uc5frtMNXIbzrarJJ/lTCjcaZwbLJmyw==", - "requires": { - "type-fest": "^0.20.2" - } - }, - "has-flag": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", - "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==" - }, - "semver": { - "version": "7.3.7", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.7.tgz", - "integrity": "sha512-QlYTucUYOews+WeEujDoEGziz4K6c47V/Bd+LjSSYcA94p+DmINdf7ncaUinThfvZyu13lN9OY1XDxt8C0Tw0g==", - "requires": { - "lru-cache": "^6.0.0" - } - }, - "supports-color": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", - "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", - "requires": { - "has-flag": "^4.0.0" - } - } - } - }, - "eslint-config-custom": { - "version": "file:packages/eslint-config-custom", - "requires": { - "eslint": "^7.23.0", - "eslint-config-next": "^12.0.8", - "eslint-config-prettier": "^8.3.0", - "eslint-config-turbo": "latest", - "eslint-plugin-react": "7.31.8", - "typescript": "^4.7.4" - } - }, - "eslint-config-next": { - "version": "12.3.1", - "resolved": "https://registry.npmjs.org/eslint-config-next/-/eslint-config-next-12.3.1.tgz", - "integrity": "sha512-EN/xwKPU6jz1G0Qi6Bd/BqMnHLyRAL0VsaQaWA7F3KkjAgZHi4f1uL1JKGWNxdQpHTW/sdGONBd0bzxUka/DJg==", - "requires": { - "@next/eslint-plugin-next": "12.3.1", - "@rushstack/eslint-patch": "^1.1.3", - "@typescript-eslint/parser": "^5.21.0", - "eslint-import-resolver-node": "^0.3.6", - "eslint-import-resolver-typescript": "^2.7.1", - "eslint-plugin-import": "^2.26.0", - "eslint-plugin-jsx-a11y": "^6.5.1", - "eslint-plugin-react": "^7.31.7", - "eslint-plugin-react-hooks": "^4.5.0" - } - }, - "eslint-config-prettier": { - "version": "8.5.0", - "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-8.5.0.tgz", - "integrity": "sha512-obmWKLUNCnhtQRKc+tmnYuQl0pFU1ibYJQ5BGhTVB08bHe9wC8qUeG7c08dj9XX+AuPj1YSGSQIHl1pnDHZR0Q==", - "requires": {} - }, - "eslint-config-turbo": { - "version": "0.0.4", - "resolved": "https://registry.npmjs.org/eslint-config-turbo/-/eslint-config-turbo-0.0.4.tgz", - "integrity": "sha512-HErPS/wfWkSdV9Yd2dDkhZt3W2B78Ih/aWPFfaHmCMjzPalh+5KxRRGTf8MOBQLCebcWJX0lP1Zvc1rZIHlXGg==", - "requires": { - "eslint-plugin-turbo": "0.0.4" - } - }, - "eslint-import-resolver-node": { - "version": "0.3.6", - "resolved": "https://registry.npmjs.org/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.6.tgz", - "integrity": "sha512-0En0w03NRVMn9Uiyn8YRPDKvWjxCWkslUEhGNTdGx15RvPJYQ+lbOlqrlNI2vEAs4pDYK4f/HN2TbDmk5TP0iw==", - "requires": { - "debug": "^3.2.7", - "resolve": "^1.20.0" - }, - "dependencies": { - "debug": { - "version": "3.2.7", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", - "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", - "requires": { - "ms": "^2.1.1" - } - } - } - }, - "eslint-import-resolver-typescript": { - "version": "2.7.1", - "resolved": "https://registry.npmjs.org/eslint-import-resolver-typescript/-/eslint-import-resolver-typescript-2.7.1.tgz", - "integrity": "sha512-00UbgGwV8bSgUv34igBDbTOtKhqoRMy9bFjNehT40bXg6585PNIct8HhXZ0SybqB9rWtXj9crcku8ndDn/gIqQ==", - "requires": { - "debug": "^4.3.4", - "glob": "^7.2.0", - "is-glob": "^4.0.3", - "resolve": "^1.22.0", - "tsconfig-paths": "^3.14.1" - }, - "dependencies": { - "glob": { - "version": "7.2.3", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", - "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", - "requires": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.1.1", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" - } - } - } - }, - "eslint-module-utils": { - "version": "2.7.4", - "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.7.4.tgz", - "integrity": "sha512-j4GT+rqzCoRKHwURX7pddtIPGySnX9Si/cgMI5ztrcqOPtk5dDEeZ34CQVPphnqkJytlc97Vuk05Um2mJ3gEQA==", - "requires": { - "debug": "^3.2.7" - }, - "dependencies": { - "debug": { - "version": "3.2.7", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", - "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", - "requires": { - "ms": "^2.1.1" - } - } - } - }, - "eslint-plugin-import": { - "version": "2.26.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.26.0.tgz", - "integrity": "sha512-hYfi3FXaM8WPLf4S1cikh/r4IxnO6zrhZbEGz2b660EJRbuxgpDS5gkCuYgGWg2xxh2rBuIr4Pvhve/7c31koA==", - "requires": { - "array-includes": "^3.1.4", - "array.prototype.flat": "^1.2.5", - "debug": "^2.6.9", - "doctrine": "^2.1.0", - "eslint-import-resolver-node": "^0.3.6", - "eslint-module-utils": "^2.7.3", - "has": "^1.0.3", - "is-core-module": "^2.8.1", - "is-glob": "^4.0.3", - "minimatch": "^3.1.2", - "object.values": "^1.1.5", - "resolve": "^1.22.0", - "tsconfig-paths": "^3.14.1" - }, - "dependencies": { - "debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", - "requires": { - "ms": "2.0.0" - } - }, - "doctrine": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz", - "integrity": "sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==", - "requires": { - "esutils": "^2.0.2" - } - }, - "ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" - } - } - }, - "eslint-plugin-jsx-a11y": { - "version": "6.6.1", - "resolved": "https://registry.npmjs.org/eslint-plugin-jsx-a11y/-/eslint-plugin-jsx-a11y-6.6.1.tgz", - "integrity": "sha512-sXgFVNHiWffBq23uiS/JaP6eVR622DqwB4yTzKvGZGcPq6/yZ3WmOZfuBks/vHWo9GaFOqC2ZK4i6+C35knx7Q==", - "requires": { - "@babel/runtime": "^7.18.9", - "aria-query": "^4.2.2", - "array-includes": "^3.1.5", - "ast-types-flow": "^0.0.7", - "axe-core": "^4.4.3", - "axobject-query": "^2.2.0", - "damerau-levenshtein": "^1.0.8", - "emoji-regex": "^9.2.2", - "has": "^1.0.3", - "jsx-ast-utils": "^3.3.2", - "language-tags": "^1.0.5", - "minimatch": "^3.1.2", - "semver": "^6.3.0" - } - }, - "eslint-plugin-react": { - "version": "7.31.8", - "resolved": "https://registry.npmjs.org/eslint-plugin-react/-/eslint-plugin-react-7.31.8.tgz", - "integrity": "sha512-5lBTZmgQmARLLSYiwI71tiGVTLUuqXantZM6vlSY39OaDSV0M7+32K5DnLkmFrwTe+Ksz0ffuLUC91RUviVZfw==", - "requires": { - "array-includes": "^3.1.5", - "array.prototype.flatmap": "^1.3.0", - "doctrine": "^2.1.0", - "estraverse": "^5.3.0", - "jsx-ast-utils": "^2.4.1 || ^3.0.0", - "minimatch": "^3.1.2", - "object.entries": "^1.1.5", - "object.fromentries": "^2.0.5", - "object.hasown": "^1.1.1", - "object.values": "^1.1.5", - "prop-types": "^15.8.1", - "resolve": "^2.0.0-next.3", - "semver": "^6.3.0", - "string.prototype.matchall": "^4.0.7" - }, - "dependencies": { - "doctrine": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz", - "integrity": "sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==", - "requires": { - "esutils": "^2.0.2" - } - }, - "resolve": { - "version": "2.0.0-next.4", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-2.0.0-next.4.tgz", - "integrity": "sha512-iMDbmAWtfU+MHpxt/I5iWI7cY6YVEZUQ3MBgPQ++XD1PELuJHIl82xBmObyP2KyQmkNB2dsqF7seoQQiAn5yDQ==", - "requires": { - "is-core-module": "^2.9.0", - "path-parse": "^1.0.7", - "supports-preserve-symlinks-flag": "^1.0.0" - } - } - } - }, - "eslint-plugin-react-hooks": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-react-hooks/-/eslint-plugin-react-hooks-4.6.0.tgz", - "integrity": "sha512-oFc7Itz9Qxh2x4gNHStv3BqJq54ExXmfC+a1NjAta66IAN87Wu0R/QArgIS9qKzX3dXKPI9H5crl9QchNMY9+g==", - "requires": {} - }, - "eslint-plugin-turbo": { - "version": "0.0.4", - "resolved": "https://registry.npmjs.org/eslint-plugin-turbo/-/eslint-plugin-turbo-0.0.4.tgz", - "integrity": "sha512-dfmYE/iPvoJInQq+5E/0mj140y/rYwKtzZkn3uVK8+nvwC5zmWKQ6ehMWrL4bYBkGzSgpOndZM+jOXhPQ2m8Cg==", - "requires": {} - }, - "eslint-scope": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.1.tgz", - "integrity": "sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==", - "requires": { - "esrecurse": "^4.3.0", - "estraverse": "^4.1.1" - }, - "dependencies": { - "estraverse": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz", - "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==" - } - } - }, - "eslint-utils": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-2.1.0.tgz", - "integrity": "sha512-w94dQYoauyvlDc43XnGB8lU3Zt713vNChgt4EWwhXAP2XkBvndfxF0AgIqKOOasjPIPzj9JqgwkwbCYD0/V3Zg==", - "requires": { - "eslint-visitor-keys": "^1.1.0" - }, - "dependencies": { - "eslint-visitor-keys": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz", - "integrity": "sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ==" - } - } - }, - "eslint-visitor-keys": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-2.1.0.tgz", - "integrity": "sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw==" - }, - "espree": { - "version": "7.3.1", - "resolved": "https://registry.npmjs.org/espree/-/espree-7.3.1.tgz", - "integrity": "sha512-v3JCNCE64umkFpmkFGqzVKsOT0tN1Zr+ueqLZfpV1Ob8e+CEgPWa+OxCoGH3tnhimMKIaBm4m/vaRpJ/krRz2g==", - "requires": { - "acorn": "^7.4.0", - "acorn-jsx": "^5.3.1", - "eslint-visitor-keys": "^1.3.0" - }, - "dependencies": { - "eslint-visitor-keys": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz", - "integrity": "sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ==" - } - } - }, - "esprima": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", - "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==" - }, - "esquery": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.4.0.tgz", - "integrity": "sha512-cCDispWt5vHHtwMY2YrAQ4ibFkAL8RbH5YGBnZBc90MolvvfkkQcJro/aZiAQUlQ3qgrYS6D6v8Gc5G5CQsc9w==", - "requires": { - "estraverse": "^5.1.0" - } - }, - "esrecurse": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", - "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", - "requires": { - "estraverse": "^5.2.0" - } - }, - "estraverse": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", - "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==" - }, - "esutils": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", - "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==" - }, - "fast-deep-equal": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", - "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==" - }, - "fast-glob": { - "version": "3.2.12", - "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.2.12.tgz", - "integrity": "sha512-DVj4CQIYYow0BlaelwK1pHl5n5cRSJfM60UA0zK891sVInoPri2Ekj7+e1CT3/3qxXenpI+nBBmQAcJPJgaj4w==", - "requires": { - "@nodelib/fs.stat": "^2.0.2", - "@nodelib/fs.walk": "^1.2.3", - "glob-parent": "^5.1.2", - "merge2": "^1.3.0", - "micromatch": "^4.0.4" - } - }, - "fast-json-stable-stringify": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", - "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==" - }, - "fast-levenshtein": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", - "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==" - }, - "fastq": { - "version": "1.13.0", - "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.13.0.tgz", - "integrity": "sha512-YpkpUnK8od0o1hmeSc7UUs/eB/vIPWJYjKck2QKIzAf71Vm1AAQ3EbuZB3g2JIy+pg+ERD0vqI79KyZiB2e2Nw==", - "requires": { - "reusify": "^1.0.4" - } - }, - "file-entry-cache": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz", - "integrity": "sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==", - "requires": { - "flat-cache": "^3.0.4" - } - }, - "fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", - "requires": { - "to-regex-range": "^5.0.1" - } - }, - "flat-cache": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.0.4.tgz", - "integrity": "sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg==", - "requires": { - "flatted": "^3.1.0", - "rimraf": "^3.0.2" - } - }, - "flatted": { - "version": "3.2.7", - "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.2.7.tgz", - "integrity": "sha512-5nqDSxl8nn5BSNxyR3n4I6eDmbolI6WT+QqR547RwxQapgjQBmtktdP+HTBb/a/zLsbzERTONyUB5pefh5TtjQ==" - }, - "fs.realpath": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", - "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==" - }, - "function-bind": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", - "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==" - }, - "function.prototype.name": { - "version": "1.1.5", - "resolved": "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.5.tgz", - "integrity": "sha512-uN7m/BzVKQnCUF/iW8jYea67v++2u7m5UgENbHRtdDVclOUP+FMPlCNdmk0h/ysGyo2tavMJEDqJAkJdRa1vMA==", - "requires": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.3", - "es-abstract": "^1.19.0", - "functions-have-names": "^1.2.2" - } - }, - "functional-red-black-tree": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz", - "integrity": "sha512-dsKNQNdj6xA3T+QlADDA7mOSlX0qiMINjn0cgr+eGHGsbSHzTabcIogz2+p/iqP1Xs6EP/sS2SbqH+brGTbq0g==" - }, - "functions-have-names": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/functions-have-names/-/functions-have-names-1.2.3.tgz", - "integrity": "sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ==" - }, - "gensync": { - "version": "1.0.0-beta.2", - "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", - "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==", - "dev": true - }, - "get-intrinsic": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.1.3.tgz", - "integrity": "sha512-QJVz1Tj7MS099PevUG5jvnt9tSkXN8K14dxQlikJuPt4uD9hHAHjLyLBiLR5zELelBdD9QNRAXZzsJx0WaDL9A==", - "requires": { - "function-bind": "^1.1.1", - "has": "^1.0.3", - "has-symbols": "^1.0.3" - } - }, - "get-symbol-description": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.0.0.tgz", - "integrity": "sha512-2EmdH1YvIQiZpltCNgkuiUnyukzxM/R6NDJX31Ke3BG1Nq5b0S2PhX59UKi9vZpPDQVdqn+1IcaAwnzTT5vCjw==", - "requires": { - "call-bind": "^1.0.2", - "get-intrinsic": "^1.1.1" - } - }, - "glob": { - "version": "7.1.7", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.7.tgz", - "integrity": "sha512-OvD9ENzPLbegENnYP5UUfJIirTg4+XwMWGaQfQTY0JenxNvvIKP3U3/tAQSPIu/lHxXYSZmpXlUHeqAIdKzBLQ==", - "requires": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.0.4", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" - } - }, - "glob-parent": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", - "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", - "requires": { - "is-glob": "^4.0.1" - } - }, - "globals": { - "version": "11.12.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", - "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", - "dev": true - }, - "globby": { - "version": "11.1.0", - "resolved": "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz", - "integrity": "sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==", - "requires": { - "array-union": "^2.1.0", - "dir-glob": "^3.0.1", - "fast-glob": "^3.2.9", - "ignore": "^5.2.0", - "merge2": "^1.4.1", - "slash": "^3.0.0" - }, - "dependencies": { - "ignore": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.2.0.tgz", - "integrity": "sha512-CmxgYGiEPCLhfLnpPp1MoRmifwEIOgjcHXxOBjv7mY96c+eWScsOP9c112ZyLdWHi0FxHjI+4uVhKYp/gcdRmQ==" - } - } - }, - "graceful-fs": { - "version": "4.2.10", - "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.10.tgz", - "integrity": "sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==", - "dev": true - }, - "has": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", - "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", - "requires": { - "function-bind": "^1.1.1" - } - }, - "has-bigints": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/has-bigints/-/has-bigints-1.0.2.tgz", - "integrity": "sha512-tSvCKtBr9lkF0Ex0aQiP9N+OpV4zi2r/Nee5VkRDbaqv35RLYMzbwQfFSZZH0kR+Rd6302UJZ2p/bJCEoR3VoQ==" - }, - "has-flag": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", - "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==" - }, - "has-property-descriptors": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.0.tgz", - "integrity": "sha512-62DVLZGoiEBDHQyqG4w9xCuZ7eJEwNmJRWw2VY84Oedb7WFcA27fiEVe8oUQx9hAUJ4ekurquucTGwsyO1XGdQ==", - "requires": { - "get-intrinsic": "^1.1.1" - } - }, - "has-symbols": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", - "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==" - }, - "has-tostringtag": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.0.tgz", - "integrity": "sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ==", - "requires": { - "has-symbols": "^1.0.2" - } - }, - "ignore": { - "version": "4.0.6", - "resolved": "https://registry.npmjs.org/ignore/-/ignore-4.0.6.tgz", - "integrity": "sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg==" - }, - "import-fresh": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz", - "integrity": "sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==", - "requires": { - "parent-module": "^1.0.0", - "resolve-from": "^4.0.0" - } - }, - "imurmurhash": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", - "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==" - }, - "inflight": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", - "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", - "requires": { - "once": "^1.3.0", - "wrappy": "1" - } - }, - "inherits": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", - "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" - }, - "internal-slot": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.3.tgz", - "integrity": "sha512-O0DB1JC/sPyZl7cIo78n5dR7eUSwwpYPiXRhTzNxZVAMUuB8vlnRFyLxdrVToks6XPLVnFfbzaVd5WLjhgg+vA==", - "requires": { - "get-intrinsic": "^1.1.0", - "has": "^1.0.3", - "side-channel": "^1.0.4" - } - }, - "is-bigint": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.0.4.tgz", - "integrity": "sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg==", - "requires": { - "has-bigints": "^1.0.1" - } - }, - "is-boolean-object": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.1.2.tgz", - "integrity": "sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA==", - "requires": { - "call-bind": "^1.0.2", - "has-tostringtag": "^1.0.0" - } - }, - "is-callable": { - "version": "1.2.7", - "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.7.tgz", - "integrity": "sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA==" - }, - "is-core-module": { - "version": "2.10.0", - "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.10.0.tgz", - "integrity": "sha512-Erxj2n/LDAZ7H8WNJXd9tw38GYM3dv8rk8Zcs+jJuxYTW7sozH+SS8NtrSjVL1/vpLvWi1hxy96IzjJ3EHTJJg==", - "requires": { - "has": "^1.0.3" - } - }, - "is-date-object": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.5.tgz", - "integrity": "sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ==", - "requires": { - "has-tostringtag": "^1.0.0" - } - }, - "is-extglob": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", - "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==" - }, - "is-fullwidth-code-point": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", - "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==" - }, - "is-glob": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", - "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", - "requires": { - "is-extglob": "^2.1.1" - } - }, - "is-negative-zero": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.2.tgz", - "integrity": "sha512-dqJvarLawXsFbNDeJW7zAz8ItJ9cd28YufuuFzh0G8pNHjJMnY08Dv7sYX2uF5UpQOwieAeOExEYAWWfu7ZZUA==" - }, - "is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==" - }, - "is-number-object": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.0.7.tgz", - "integrity": "sha512-k1U0IRzLMo7ZlYIfzRu23Oh6MiIFasgpb9X76eqfFZAqwH44UI4KTBvBYIZ1dSL9ZzChTB9ShHfLkR4pdW5krQ==", - "requires": { - "has-tostringtag": "^1.0.0" - } - }, - "is-regex": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", - "integrity": "sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg==", - "requires": { - "call-bind": "^1.0.2", - "has-tostringtag": "^1.0.0" - } - }, - "is-shared-array-buffer": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.2.tgz", - "integrity": "sha512-sqN2UDu1/0y6uvXyStCOzyhAjCSlHceFoMKJW8W9EU9cvic/QdsZ0kEU93HEy3IUEFZIiH/3w+AH/UQbPHNdhA==", - "requires": { - "call-bind": "^1.0.2" - } - }, - "is-string": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.7.tgz", - "integrity": "sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg==", - "requires": { - "has-tostringtag": "^1.0.0" - } - }, - "is-symbol": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.4.tgz", - "integrity": "sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg==", - "requires": { - "has-symbols": "^1.0.2" - } - }, - "is-weakref": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-weakref/-/is-weakref-1.0.2.tgz", - "integrity": "sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ==", - "requires": { - "call-bind": "^1.0.2" - } - }, - "isexe": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", - "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==" - }, - "js-tokens": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", - "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==" - }, - "js-yaml": { - "version": "3.14.1", - "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz", - "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==", - "requires": { - "argparse": "^1.0.7", - "esprima": "^4.0.0" - } - }, - "jsesc": { - "version": "2.5.2", - "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-2.5.2.tgz", - "integrity": "sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==", - "dev": true - }, - "json-schema-traverse": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", - "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==" - }, - "json-stable-stringify-without-jsonify": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", - "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==" - }, - "json5": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.1.tgz", - "integrity": "sha512-1hqLFMSrGHRHxav9q9gNjJ5EXznIxGVO09xQRrwplcS8qs28pZ8s8hupZAmqDwZUmVZ2Qb2jnyPOWcDH8m8dlA==", - "dev": true - }, - "jsx-ast-utils": { - "version": "3.3.3", - "resolved": "https://registry.npmjs.org/jsx-ast-utils/-/jsx-ast-utils-3.3.3.tgz", - "integrity": "sha512-fYQHZTZ8jSfmWZ0iyzfwiU4WDX4HpHbMCZ3gPlWYiCl3BoeOTsqKBqnTVfH2rYT7eP5c3sVbeSPHnnJOaTrWiw==", - "requires": { - "array-includes": "^3.1.5", - "object.assign": "^4.1.3" - } - }, - "language-subtag-registry": { - "version": "0.3.22", - "resolved": "https://registry.npmjs.org/language-subtag-registry/-/language-subtag-registry-0.3.22.tgz", - "integrity": "sha512-tN0MCzyWnoz/4nHS6uxdlFWoUZT7ABptwKPQ52Ea7URk6vll88bWBVhodtnlfEuCcKWNGoc+uGbw1cwa9IKh/w==" - }, - "language-tags": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/language-tags/-/language-tags-1.0.5.tgz", - "integrity": "sha512-qJhlO9cGXi6hBGKoxEG/sKZDAHD5Hnu9Hs4WbOY3pCWXDhw0N8x1NenNzm2EnNLkLkk7J2SdxAkDSbb6ftT+UQ==", - "requires": { - "language-subtag-registry": "~0.3.2" - } - }, - "levn": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", - "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==", - "requires": { - "prelude-ls": "^1.2.1", - "type-check": "~0.4.0" - } - }, - "lodash": { - "version": "3.10.1", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-3.10.1.tgz", - "integrity": "sha512-9mDDwqVIma6OZX79ZlDACZl8sBm0TEnkf99zV3iMA4GzkIT/9hiqP5mY0HoT1iNLCrKc/R1HByV+yJfRWVJryQ==" - }, - "lodash.merge": { - "version": "4.6.2", - "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", - "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==" - }, - "lodash.truncate": { - "version": "4.4.2", - "resolved": "https://registry.npmjs.org/lodash.truncate/-/lodash.truncate-4.4.2.tgz", - "integrity": "sha512-jttmRe7bRse52OsWIMDLaXxWqRAmtIUccAQ3garviCqJjafXOfNMO0yMfNpdD6zbGaTU0P5Nz7e7gAT6cKmJRw==" - }, - "loose-envify": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz", - "integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==", - "requires": { - "js-tokens": "^3.0.0 || ^4.0.0" - } - }, - "lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "requires": { - "yallist": "^4.0.0" - } - }, - "merge2": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", - "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==" - }, - "micromatch": { - "version": "4.0.5", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz", - "integrity": "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==", - "requires": { - "braces": "^3.0.2", - "picomatch": "^2.3.1" - } - }, - "minimatch": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", - "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", - "requires": { - "brace-expansion": "^1.1.7" - } - }, - "minimist": { - "version": "1.2.6", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.6.tgz", - "integrity": "sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==" - }, - "ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" - }, - "nanoid": { - "version": "3.3.4", - "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.4.tgz", - "integrity": "sha512-MqBkQh/OHTS2egovRtLk45wEyNXwF+cokD+1YPf9u5VfJiRdAiRwB2froX5Co9Rh20xs4siNPm8naNotSD6RBw==" - }, - "natural-compare": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", - "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==" - }, - "next": { - "version": "12.3.0", - "resolved": "https://registry.npmjs.org/next/-/next-12.3.0.tgz", - "integrity": "sha512-GpzI6me9V1+XYtfK0Ae9WD0mKqHyzQlGq1xH1rzNIYMASo4Tkl4rTe9jSqtBpXFhOS33KohXs9ZY38Akkhdciw==", - "requires": { - "@next/env": "12.3.0", - "@next/swc-android-arm-eabi": "12.3.0", - "@next/swc-android-arm64": "12.3.0", - "@next/swc-darwin-arm64": "12.3.0", - "@next/swc-darwin-x64": "12.3.0", - "@next/swc-freebsd-x64": "12.3.0", - "@next/swc-linux-arm-gnueabihf": "12.3.0", - "@next/swc-linux-arm64-gnu": "12.3.0", - "@next/swc-linux-arm64-musl": "12.3.0", - "@next/swc-linux-x64-gnu": "12.3.0", - "@next/swc-linux-x64-musl": "12.3.0", - "@next/swc-win32-arm64-msvc": "12.3.0", - "@next/swc-win32-ia32-msvc": "12.3.0", - "@next/swc-win32-x64-msvc": "12.3.0", - "@swc/helpers": "0.4.11", - "caniuse-lite": "^1.0.30001332", - "postcss": "8.4.14", - "styled-jsx": "5.0.6", - "use-sync-external-store": "1.2.0" - } - }, - "next-transpile-modules": { - "version": "9.0.0", - "resolved": "https://registry.npmjs.org/next-transpile-modules/-/next-transpile-modules-9.0.0.tgz", - "integrity": "sha512-VCNFOazIAnXn1hvgYYSTYMnoWgKgwlYh4lm1pKbSfiB3kj5ZYLcKVhfh3jkPOg1cnd9DP+pte9yCUocdPEUBTQ==", - "dev": true, - "requires": { - "enhanced-resolve": "^5.7.0", - "escalade": "^3.1.1" - } - }, - "node-releases": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.6.tgz", - "integrity": "sha512-PiVXnNuFm5+iYkLBNeq5211hvO38y63T0i2KKh2KnUs3RpzJ+JtODFjkD8yjLwnDkTYF1eKXheUwdssR+NRZdg==", - "dev": true - }, - "object-assign": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", - "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==" - }, - "object-inspect": { - "version": "1.12.2", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.2.tgz", - "integrity": "sha512-z+cPxW0QGUp0mcqcsgQyLVRDoXFQbXOwBaqyF7VIgI4TWNQsDHrBpUQslRmIfAoYWdYzs6UlKJtB2XJpTaNSpQ==" - }, - "object-keys": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", - "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==" - }, - "object.assign": { - "version": "4.1.4", - "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.4.tgz", - "integrity": "sha512-1mxKf0e58bvyjSCtKYY4sRe9itRk3PJpquJOjeIkz885CczcI4IvJJDLPS72oowuSh+pBxUFROpX+TU++hxhZQ==", - "requires": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "has-symbols": "^1.0.3", - "object-keys": "^1.1.1" - } - }, - "object.entries": { - "version": "1.1.5", - "resolved": "https://registry.npmjs.org/object.entries/-/object.entries-1.1.5.tgz", - "integrity": "sha512-TyxmjUoZggd4OrrU1W66FMDG6CuqJxsFvymeyXI51+vQLN67zYfZseptRge703kKQdo4uccgAKebXFcRCzk4+g==", - "requires": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.3", - "es-abstract": "^1.19.1" - } - }, - "object.fromentries": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/object.fromentries/-/object.fromentries-2.0.5.tgz", - "integrity": "sha512-CAyG5mWQRRiBU57Re4FKoTBjXfDoNwdFVH2Y1tS9PqCsfUTymAohOkEMSG3aRNKmv4lV3O7p1et7c187q6bynw==", - "requires": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.3", - "es-abstract": "^1.19.1" - } - }, - "object.hasown": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/object.hasown/-/object.hasown-1.1.1.tgz", - "integrity": "sha512-LYLe4tivNQzq4JdaWW6WO3HMZZJWzkkH8fnI6EebWl0VZth2wL2Lovm74ep2/gZzlaTdV62JZHEqHQ2yVn8Q/A==", - "requires": { - "define-properties": "^1.1.4", - "es-abstract": "^1.19.5" - } - }, - "object.values": { - "version": "1.1.5", - "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.1.5.tgz", - "integrity": "sha512-QUZRW0ilQ3PnPpbNtgdNV1PDbEqLIiSFB3l+EnGtBQ/8SUTLj1PZwtQHABZtLgwpJZTSZhuGLOGk57Drx2IvYg==", - "requires": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.3", - "es-abstract": "^1.19.1" - } - }, - "once": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", - "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", - "requires": { - "wrappy": "1" - } - }, - "optionator": { - "version": "0.9.1", - "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.1.tgz", - "integrity": "sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw==", - "requires": { - "deep-is": "^0.1.3", - "fast-levenshtein": "^2.0.6", - "levn": "^0.4.1", - "prelude-ls": "^1.2.1", - "type-check": "^0.4.0", - "word-wrap": "^1.2.3" - } - }, - "parent-module": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", - "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", - "requires": { - "callsites": "^3.0.0" - } - }, - "path-is-absolute": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", - "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==" - }, - "path-key": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", - "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==" - }, - "path-parse": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", - "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==" - }, - "path-type": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", - "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==" - }, - "picocolors": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz", - "integrity": "sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==" - }, - "picomatch": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", - "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==" - }, - "postcss": { - "version": "8.4.14", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.14.tgz", - "integrity": "sha512-E398TUmfAYFPBSdzgeieK2Y1+1cpdxJx8yXbK/m57nRhKSmk1GB2tO4lbLBtlkfPQTDKfe4Xqv1ASWPpayPEig==", - "requires": { - "nanoid": "^3.3.4", - "picocolors": "^1.0.0", - "source-map-js": "^1.0.2" - } - }, - "prelude-ls": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", - "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==" - }, - "prettier": { - "version": "2.7.1", - "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.7.1.tgz", - "integrity": "sha512-ujppO+MkdPqoVINuDFDRLClm7D78qbDt0/NR+wp5FqEZOoTNAjPHWj17QRhu7geIHJfcNhRk1XVQmF8Bp3ye+g==", - "dev": true - }, - "progress": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/progress/-/progress-2.0.3.tgz", - "integrity": "sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA==" - }, - "prop-types": { - "version": "15.8.1", - "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.8.1.tgz", - "integrity": "sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg==", - "requires": { - "loose-envify": "^1.4.0", - "object-assign": "^4.1.1", - "react-is": "^16.13.1" - } - }, - "punycode": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", - "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==" - }, - "queue-microtask": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", - "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==" - }, - "react": { - "version": "18.2.0", - "resolved": "https://registry.npmjs.org/react/-/react-18.2.0.tgz", - "integrity": "sha512-/3IjMdb2L9QbBdWiW5e3P2/npwMBaU9mHCSCUzNln0ZCYbcfTsGbTJrU/kGemdH2IWmB2ioZ+zkxtmq6g09fGQ==", - "requires": { - "loose-envify": "^1.1.0" - } - }, - "react-dom": { - "version": "18.2.0", - "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-18.2.0.tgz", - "integrity": "sha512-6IMTriUmvsjHUjNtEDudZfuDQUoWXVxKHhlEGSk81n4YFS+r/Kl99wXiwlVXtPBtJenozv2P+hxDsw9eA7Xo6g==", - "requires": { - "loose-envify": "^1.1.0", - "scheduler": "^0.23.0" - } - }, - "react-is": { - "version": "16.13.1", - "resolved": "https://registry.npmjs.org/react-is/-/react-is-16.13.1.tgz", - "integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==" - }, - "regenerator-runtime": { - "version": "0.13.9", - "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.9.tgz", - "integrity": "sha512-p3VT+cOEgxFsRRA9X4lkI1E+k2/CtnKtU4gcxyaCUreilL/vqI6CdZ3wxVUx3UOUg+gnUOQQcRI7BmSI656MYA==" - }, - "regexp.prototype.flags": { - "version": "1.4.3", - "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.4.3.tgz", - "integrity": "sha512-fjggEOO3slI6Wvgjwflkc4NFRCTZAu5CnNfBd5qOMYhWdn67nJBBu34/TkD++eeFmd8C9r9jfXJ27+nSiRkSUA==", - "requires": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.3", - "functions-have-names": "^1.2.2" - } - }, - "regexpp": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-3.2.0.tgz", - "integrity": "sha512-pq2bWo9mVD43nbts2wGv17XLiNLya+GklZ8kaDLV2Z08gDCsGpnKn9BFMepvWuHCbyVvY7J5o5+BVvoQbmlJLg==" - }, - "require-from-string": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz", - "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==" - }, - "resolve": { - "version": "1.22.1", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.1.tgz", - "integrity": "sha512-nBpuuYuY5jFsli/JIs1oldw6fOQCBioohqWZg/2hiaOybXOft4lonv85uDOKXdf8rhyK159cxU5cDcK/NKk8zw==", - "requires": { - "is-core-module": "^2.9.0", - "path-parse": "^1.0.7", - "supports-preserve-symlinks-flag": "^1.0.0" - } - }, - "resolve-from": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", - "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==" - }, - "reusify": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", - "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==" - }, - "rimraf": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", - "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", - "requires": { - "glob": "^7.1.3" - } - }, - "run-parallel": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", - "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", - "requires": { - "queue-microtask": "^1.2.2" - } - }, - "safe-buffer": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", - "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", - "dev": true - }, - "safe-regex-test": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.0.0.tgz", - "integrity": "sha512-JBUUzyOgEwXQY1NuPtvcj/qcBDbDmEvWufhlnXZIm75DEHp+afM1r1ujJpJsV/gSM4t59tpDyPi1sd6ZaPFfsA==", - "requires": { - "call-bind": "^1.0.2", - "get-intrinsic": "^1.1.3", - "is-regex": "^1.1.4" - } - }, - "scheduler": { - "version": "0.23.0", - "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.23.0.tgz", - "integrity": "sha512-CtuThmgHNg7zIZWAXi3AsyIzA3n4xx7aNyjwC2VJldO2LMVDhFK+63xGqq6CsJH4rTAt6/M+N4GhZiDYPx9eUw==", - "requires": { - "loose-envify": "^1.1.0" - } - }, - "semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" - }, - "shebang-command": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", - "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", - "requires": { - "shebang-regex": "^3.0.0" - } - }, - "shebang-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", - "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==" - }, - "side-channel": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz", - "integrity": "sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==", - "requires": { - "call-bind": "^1.0.0", - "get-intrinsic": "^1.0.2", - "object-inspect": "^1.9.0" - } - }, - "slash": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", - "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==" - }, - "slice-ansi": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-4.0.0.tgz", - "integrity": "sha512-qMCMfhY040cVHT43K9BFygqYbUPFZKHOg7K73mtTWJRb8pyP3fzf4Ixd5SzdEJQ6MRUg/WBnOLxghZtKKurENQ==", - "requires": { - "ansi-styles": "^4.0.0", - "astral-regex": "^2.0.0", - "is-fullwidth-code-point": "^3.0.0" - }, - "dependencies": { - "ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "requires": { - "color-convert": "^2.0.1" - } - }, - "color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", - "requires": { - "color-name": "~1.1.4" - } - }, - "color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" - } - } - }, - "source-map-js": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.0.2.tgz", - "integrity": "sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw==" - }, - "sprintf-js": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", - "integrity": "sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==" - }, - "string-width": { - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "requires": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - }, - "dependencies": { - "emoji-regex": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" - } - } - }, - "string.prototype.matchall": { - "version": "4.0.7", - "resolved": "https://registry.npmjs.org/string.prototype.matchall/-/string.prototype.matchall-4.0.7.tgz", - "integrity": "sha512-f48okCX7JiwVi1NXCVWcFnZgADDC/n2vePlQ/KUCNqCikLLilQvwjMO8+BHVKvgzH0JB0J9LEPgxOGT02RoETg==", - "requires": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.3", - "es-abstract": "^1.19.1", - "get-intrinsic": "^1.1.1", - "has-symbols": "^1.0.3", - "internal-slot": "^1.0.3", - "regexp.prototype.flags": "^1.4.1", - "side-channel": "^1.0.4" - } - }, - "string.prototype.trimend": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.5.tgz", - "integrity": "sha512-I7RGvmjV4pJ7O3kdf+LXFpVfdNOxtCW/2C8f6jNiW4+PQchwxkCDzlk1/7p+Wl4bqFIZeF47qAHXLuHHWKAxog==", - "requires": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.19.5" - } - }, - "string.prototype.trimstart": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.5.tgz", - "integrity": "sha512-THx16TJCGlsN0o6dl2o6ncWUsdgnLRSA23rRE5pyGBw/mLr3Ej/R2LaqCtgP8VNMGZsvMWnf9ooZPyY2bHvUFg==", - "requires": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.19.5" - } - }, - "strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "requires": { - "ansi-regex": "^5.0.1" - } - }, - "strip-bom": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz", - "integrity": "sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA==" - }, - "strip-json-comments": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", - "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==" - }, - "styled-jsx": { - "version": "5.0.6", - "resolved": "https://registry.npmjs.org/styled-jsx/-/styled-jsx-5.0.6.tgz", - "integrity": "sha512-xOeROtkK5MGMDimBQ3J6iPId8q0t/BDoG5XN6oKkZClVz9ISF/hihN8OCn2LggMU6N32aXnrXBdn3auSqNS9fA==", - "requires": {} - }, - "supports-color": { - "version": "5.5.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", - "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", - "requires": { - "has-flag": "^3.0.0" - } - }, - "supports-preserve-symlinks-flag": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", - "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==" - }, - "table": { - "version": "6.8.0", - "resolved": "https://registry.npmjs.org/table/-/table-6.8.0.tgz", - "integrity": "sha512-s/fitrbVeEyHKFa7mFdkuQMWlH1Wgw/yEXMt5xACT4ZpzWFluehAxRtUUQKPuWhaLAWhFcVx6w3oC8VKaUfPGA==", - "requires": { - "ajv": "^8.0.1", - "lodash.truncate": "^4.4.2", - "slice-ansi": "^4.0.0", - "string-width": "^4.2.3", - "strip-ansi": "^6.0.1" - }, - "dependencies": { - "ajv": { - "version": "8.11.0", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.11.0.tgz", - "integrity": "sha512-wGgprdCvMalC0BztXvitD2hC04YffAvtsUn93JbGXYLAtCUO4xd17mCCZQxUOItiBwZvJScWo8NIvQMQ71rdpg==", - "requires": { - "fast-deep-equal": "^3.1.1", - "json-schema-traverse": "^1.0.0", - "require-from-string": "^2.0.2", - "uri-js": "^4.2.2" - } - }, - "json-schema-traverse": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", - "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==" - } - } - }, - "tapable": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/tapable/-/tapable-2.2.1.tgz", - "integrity": "sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ==", - "dev": true - }, - "text-table": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", - "integrity": "sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==" - }, - "to-fast-properties": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz", - "integrity": "sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog==", - "dev": true - }, - "to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "requires": { - "is-number": "^7.0.0" - } - }, - "tsconfig": { - "version": "file:packages/tsconfig" - }, - "tsconfig-paths": { - "version": "3.14.1", - "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.14.1.tgz", - "integrity": "sha512-fxDhWnFSLt3VuTwtvJt5fpwxBHg5AdKWMsgcPOOIilyjymcYVZoCQF8fvFRezCNfblEXmi+PcM1eYHeOAgXCOQ==", - "requires": { - "@types/json5": "^0.0.29", - "json5": "^1.0.1", - "minimist": "^1.2.6", - "strip-bom": "^3.0.0" - }, - "dependencies": { - "json5": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/json5/-/json5-1.0.1.tgz", - "integrity": "sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow==", - "requires": { - "minimist": "^1.2.0" - } - } - } - }, - "tslib": { - "version": "2.4.0", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz", - "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==" - }, - "tsutils": { - "version": "3.21.0", - "resolved": "https://registry.npmjs.org/tsutils/-/tsutils-3.21.0.tgz", - "integrity": "sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA==", - "requires": { - "tslib": "^1.8.1" - }, - "dependencies": { - "tslib": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", - "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==" - } - } - }, - "turbo": { - "version": "1.5.5", - "resolved": "https://registry.npmjs.org/turbo/-/turbo-1.5.5.tgz", - "integrity": "sha512-PVQSDl0STC9WXIyHcYUWs9gXsf8JjQig/FuHfuB8N6+XlgCGB3mPbfMEE6zrChGz2hufH4/guKRX1XJuNL6XTA==", - "dev": true, - "requires": { - "turbo-darwin-64": "1.5.5", - "turbo-darwin-arm64": "1.5.5", - "turbo-linux-64": "1.5.5", - "turbo-linux-arm64": "1.5.5", - "turbo-windows-64": "1.5.5", - "turbo-windows-arm64": "1.5.5" - } - }, - "turbo-darwin-64": { - "version": "1.5.5", - "resolved": "https://registry.npmjs.org/turbo-darwin-64/-/turbo-darwin-64-1.5.5.tgz", - "integrity": "sha512-HvEn6P2B+NXDekq9LRpRgUjcT9/oygLTcK47U0qsAJZXRBSq/2hvD7lx4nAwgY/4W3rhYJeWtHTzbhoN6BXqGQ==", - "dev": true, - "optional": true - }, - "turbo-darwin-arm64": { - "version": "1.5.5", - "resolved": "https://registry.npmjs.org/turbo-darwin-arm64/-/turbo-darwin-arm64-1.5.5.tgz", - "integrity": "sha512-Dmxr09IUy6M0nc7/xWod9galIO2DD500B75sJSkHeT+CCdJOWnlinux0ZPF8CSygNqymwYO8AO2l15/6yxcycg==", - "dev": true, - "optional": true - }, - "turbo-linux-64": { - "version": "1.5.5", - "resolved": "https://registry.npmjs.org/turbo-linux-64/-/turbo-linux-64-1.5.5.tgz", - "integrity": "sha512-wd07TZ4zXXWjzZE00FcFMLmkybQQK/NV9ff66vvAV0vdiuacSMBCNLrD6Mm4ncfrUPW/rwFW5kU/7hyuEqqtDw==", - "dev": true, - "optional": true - }, - "turbo-linux-arm64": { - "version": "1.5.5", - "resolved": "https://registry.npmjs.org/turbo-linux-arm64/-/turbo-linux-arm64-1.5.5.tgz", - "integrity": "sha512-q3q33tuo74R7gicnfvFbnZZvqmlq7Vakcvx0eshifnJw4PR+oMnTCb4w8ElVFx070zsb8DVTibq99y8NJH8T1Q==", - "dev": true, - "optional": true - }, - "turbo-windows-64": { - "version": "1.5.5", - "resolved": "https://registry.npmjs.org/turbo-windows-64/-/turbo-windows-64-1.5.5.tgz", - "integrity": "sha512-lPp9kHonNFfqgovbaW+UAPO5cLmoAN+m3G3FzqcrRPnlzt97vXYsDhDd/4Zy3oAKoAcprtP4CGy0ddisqsKTVw==", - "dev": true, - "optional": true - }, - "turbo-windows-arm64": { - "version": "1.5.5", - "resolved": "https://registry.npmjs.org/turbo-windows-arm64/-/turbo-windows-arm64-1.5.5.tgz", - "integrity": "sha512-3AfGULKNZiZVrEzsIE+W79ZRW1+f5r4nM4wLlJ1PTBHyRxBZdD6KTH1tijGfy/uTlcV5acYnKHEkDc6Q9PAXGQ==", - "dev": true, - "optional": true - }, - "type-check": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", - "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==", - "requires": { - "prelude-ls": "^1.2.1" - } - }, - "type-fest": { - "version": "0.20.2", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", - "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==" - }, - "typescript": { - "version": "4.8.4", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.8.4.tgz", - "integrity": "sha512-QCh+85mCy+h0IGff8r5XWzOVSbBO+KfeYrMQh7NJ58QujwcE22u+NUSmUxqF+un70P9GXKxa2HCNiTTMJknyjQ==" - }, - "ui": { - "version": "file:packages/ui", - "requires": { - "@types/react": "^17.0.37", - "@types/react-dom": "^17.0.11", - "eslint": "^7.32.0", - "eslint-config-custom": "*", - "react": "^18.2.0", - "tsconfig": "*", - "typescript": "^4.5.2" - }, - "dependencies": { - "@types/react": { - "version": "17.0.50", - "resolved": "https://registry.npmjs.org/@types/react/-/react-17.0.50.tgz", - "integrity": "sha512-ZCBHzpDb5skMnc1zFXAXnL3l1FAdi+xZvwxK+PkglMmBrwjpp9nKaWuEvrGnSifCJmBFGxZOOFuwC6KH/s0NuA==", - "dev": true, - "requires": { - "@types/prop-types": "*", - "@types/scheduler": "*", - "csstype": "^3.0.2" - } - } - } - }, - "unbox-primitive": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.0.2.tgz", - "integrity": "sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw==", - "requires": { - "call-bind": "^1.0.2", - "has-bigints": "^1.0.2", - "has-symbols": "^1.0.3", - "which-boxed-primitive": "^1.0.2" - } - }, - "update-browserslist-db": { - "version": "1.0.9", - "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.0.9.tgz", - "integrity": "sha512-/xsqn21EGVdXI3EXSum1Yckj3ZVZugqyOZQ/CxYPBD/R+ko9NSUScf8tFF4dOKY+2pvSSJA/S+5B8s4Zr4kyvg==", - "dev": true, - "requires": { - "escalade": "^3.1.1", - "picocolors": "^1.0.0" - } - }, - "uri-js": { - "version": "4.4.1", - "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", - "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", - "requires": { - "punycode": "^2.1.0" - } - }, - "use-sync-external-store": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/use-sync-external-store/-/use-sync-external-store-1.2.0.tgz", - "integrity": "sha512-eEgnFxGQ1Ife9bzYs6VLi8/4X6CObHMw9Qr9tPY43iKwsPw8xE8+EFsf/2cFZ5S3esXgpWgtSCtLNS41F+sKPA==", - "requires": {} - }, - "v8-compile-cache": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/v8-compile-cache/-/v8-compile-cache-2.3.0.tgz", - "integrity": "sha512-l8lCEmLcLYZh4nbunNZvQCJc5pv7+RCwa8q/LdUx8u7lsWvPDKmpodJAJNwkAhJC//dFY48KuIEmjtd4RViDrA==" - }, - "web": { - "version": "file:apps/web", - "requires": { - "@babel/core": "^7.0.0", - "@types/node": "^17.0.12", - "@types/react": "18.0.17", - "eslint": "7.32.0", - "eslint-config-custom": "*", - "lodash": "^4.17.21", - "next": "12.3.0", - "next-transpile-modules": "9.0.0", - "react": "18.2.0", - "react-dom": "18.2.0", - "tsconfig": "*", - "typescript": "^4.5.3", - "ui": "*" - }, - "dependencies": { - "lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==" - } - } - }, - "which": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "requires": { - "isexe": "^2.0.0" - } - }, - "which-boxed-primitive": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz", - "integrity": "sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg==", - "requires": { - "is-bigint": "^1.0.1", - "is-boolean-object": "^1.1.0", - "is-number-object": "^1.0.4", - "is-string": "^1.0.5", - "is-symbol": "^1.0.3" - } - }, - "word-wrap": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz", - "integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==" - }, - "wrappy": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" - }, - "yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" - } - } -} diff --git a/cli/internal/lockfile/testdata/pnpm-absolute-v6.yaml b/cli/internal/lockfile/testdata/pnpm-absolute-v6.yaml deleted file mode 100644 index dc5d0e626014a..0000000000000 --- a/cli/internal/lockfile/testdata/pnpm-absolute-v6.yaml +++ /dev/null @@ -1,18 +0,0 @@ -lockfileVersion: "6.0" -importers: - packages/a: - dependencies: - "@scope/parent": - specifier: ^1.0.0 - version: 1.0.0 - -packages: - /@scope/parent@1.0.0: - resolution: { integrity: junk } - dependencies: - child: /@scope/child@1.0.0 - dev: false - - /@scope/child@1.0.0: - resolution: { integrity: junk } - dev: false diff --git a/cli/internal/lockfile/testdata/pnpm-absolute.yaml b/cli/internal/lockfile/testdata/pnpm-absolute.yaml deleted file mode 100644 index d39f802da9e7b..0000000000000 --- a/cli/internal/lockfile/testdata/pnpm-absolute.yaml +++ /dev/null @@ -1,38 +0,0 @@ -lockfileVersion: 5.4 -importers: - packages/a: - specifiers: - another: ^1.0.0 - "@scope/parent": ^1.0.0 - special: npm:Special@1.2.3 - dependencies: - another: 1.0.0 - "@scope/parent": 1.0.0 - special: /Special/1.2.3 - -packages: - /@scope/parent/1.0.0: - resolution: { integrity: junk } - dependencies: - child: /@scope/child/1.0.0 - dev: false - - /@scope/child/1.0.0: - resolution: { integrity: junk } - dev: false - - /another/1.0.0: - resolution: { integrity: junk } - dev: false - dependencies: - foo: 1.0.0 - - /foo/1.0.0: - resolution: { integrity: junk } - dev: false - dependencies: - Special: 1.2.3 - - /Special/1.2.3: - resolution: { integrity: junk } - dev: false diff --git a/cli/internal/lockfile/testdata/pnpm-patch-v6.yaml b/cli/internal/lockfile/testdata/pnpm-patch-v6.yaml deleted file mode 100644 index b620472460d97..0000000000000 --- a/cli/internal/lockfile/testdata/pnpm-patch-v6.yaml +++ /dev/null @@ -1,40 +0,0 @@ -lockfileVersion: "6.0" - -patchedDependencies: - lodash@4.17.21: - hash: lgum37zgng4nfkynzh3cs7wdeq - path: patches/lodash@4.17.21.patch - "@babel/helper-string-parser@7.19.4": - hash: wjhgmpzh47qmycrzgpeyoyh3ce - path: patches/@babel__helper-string-parser@7.19.4.patch - -importers: - .: {} - - packages/a: - dependencies: - lodash: - specifier: ^4.17.21 - version: 4.17.21(patch_hash=lgum37zgng4nfkynzh3cs7wdeq) - - packages/b: - dependencies: - "@babel/helper-string-parser": - specifier: ^7.19.4 - version: 7.19.4(patch_hash=wjhgmpzh47qmycrzgpeyoyh3ce)(@babel/core@7.21.0) - -packages: - /@babel/helper-string-parser@7.19.4(patch_hash=wjhgmpzh47qmycrzgpeyoyh3ce)(@babel/core@7.21.0): - resolution: - { - integrity: sha512-nHtDoQcuqFmwYNYPz3Rah5ph2p8PFeFCsZk9A/48dPc/rGocJ5J3hAAZ7pb76VWX3fZKu+uEr/FhH5jLx7umrw==, - } - engines: { node: ">=6.9.0" } - dev: false - - /lodash@4.17.21(patch_hash=lgum37zgng4nfkynzh3cs7wdeq): - resolution: - { - integrity: sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==, - } - dev: false diff --git a/cli/internal/lockfile/testdata/pnpm-patch.yaml b/cli/internal/lockfile/testdata/pnpm-patch.yaml deleted file mode 100644 index ea84d72043b7b..0000000000000 --- a/cli/internal/lockfile/testdata/pnpm-patch.yaml +++ /dev/null @@ -1,63 +0,0 @@ -lockfileVersion: 5.4 - -patchedDependencies: - is-odd@3.0.1: - hash: nrrwwz7lemethtlvvm75r5bmhq - path: patches/is-odd@3.0.1.patch - "@babel/core@7.20.12": - hash: 3hyn7hbvzkemudbydlwjmrb65y - path: patches/@babel__core@7.20.12.patch - moleculer@0.14.28: - hash: 5pk7ojv7qbqha75ozglk4y4f74 - path: patches/moleculer@0.14.28.patch - -importers: - .: - specifiers: {} - - packages/dependency: - specifiers: - is-odd: ^3.0.1 - "@babel/core": ^7.20.12 - dependencies: - is-odd: 3.0.1_nrrwwz7lemethtlvvm75r5bmhq - "@babel/core": 7.20.12_3hyn7hbvzkemudbydlwjmrb65y - -packages: - /@babel/core/7.20.12_3hyn7hbvzkemudbydlwjmrb65y: - resolution: - { - integrity: sha512-XsMfHovsUYHFMdrIHkZphTN/2Hzzi78R08NuHfDBehym2VsPDL6Zn/JAD/JQdnRvbSsbQc4mVaU1m6JgtTEElg==, - } - engines: { node: ">=6.9.0" } - dev: false - - /is-number/6.0.0: - resolution: - { - integrity: sha512-Wu1VHeILBK8KAWJUAiSZQX94GmOE45Rg6/538fKwiloUu21KncEkYGPqob2oSZ5mUT73vLGrHQjKw3KMPwfDzg==, - } - engines: { node: ">=0.10.0" } - dev: false - - /is-odd/3.0.1_nrrwwz7lemethtlvvm75r5bmhq: - resolution: - { - integrity: sha512-CQpnWPrDwmP1+SMHXZhtLtJv90yiyVfluGsX5iNCVkrhQtU3TQHsUWPG9wkdk9Lgd5yNpAg9jQEo90CBaXgWMA==, - } - engines: { node: ">=4" } - dependencies: - is-number: 6.0.0 - dev: false - patched: true - - /moleculer/0.14.28_5pk7ojv7qbqha75ozglk4y4f74_kumip57h7zlinbhp4gz3jrbqry: - resolution: - { - integrity: sha512-CQpnWPrDwmP1+SMHXZhtLtJv90yiyVfluGsX5iNCVkrhQtU3TQHsUWPG9wkdk9Lgd5yNpAg9jQEo90CBaXgWMA==, - } - engines: { node: ">=4" } - dependencies: - is-number: 6.0.0 - dev: false - patched: true diff --git a/cli/internal/lockfile/testdata/pnpm-peer-v6.yaml b/cli/internal/lockfile/testdata/pnpm-peer-v6.yaml deleted file mode 100644 index feddd0769b09f..0000000000000 --- a/cli/internal/lockfile/testdata/pnpm-peer-v6.yaml +++ /dev/null @@ -1,67 +0,0 @@ -lockfileVersion: "6.0" - -importers: - .: {} - - apps/web: - dependencies: - next: - specifier: 13.0.4 - version: 13.0.4(react-dom@18.2.0)(react@18.2.0) - react: - specifier: 18.2.0 - version: 18.2.0 - react-dom: - specifier: 18.2.0 - version: 18.2.0(react@18.2.0) - - packages/next-config: {} - - packages/package-for-ci: {} - - packages/tsconfig: {} - -packages: - /next@13.0.4: - resolution: - { - integrity: sha512-4P0MvbjPCI1E/UPL1GrTXtYlgFnbBbY3JQ+AMY8jYE2SwyvCWctEJySoRjveznAHjrl6TIjuAJeB8u1c2StYUQ==, - } - engines: { node: ">=14.6.0" } - hasBin: true - peerDependencies: - fibers: ">= 3.1.0" - node-sass: ^6.0.0 || ^7.0.0 - react: ^18.2.0 - react-dom: ^18.2.0 - sass: ^1.3.0 - peerDependenciesMeta: - fibers: - optional: true - node-sass: - optional: true - sass: - optional: true - dev: true - - /next@13.0.4(react-dom@18.2.0)(react@18.2.0): - resolution: - { - integrity: sha512-4P0MvbjPCI1E/UPL1GrTXtYlgFnbBbY3JQ+AMY8jYE2SwyvCWctEJySoRjveznAHjrl6TIjuAJeB8u1c2StYUQ==, - } - engines: { node: ">=14.6.0" } - hasBin: true - peerDependencies: - fibers: ">= 3.1.0" - node-sass: ^6.0.0 || ^7.0.0 - react: ^18.2.0 - react-dom: ^18.2.0 - sass: ^1.3.0 - peerDependenciesMeta: - fibers: - optional: true - node-sass: - optional: true - sass: - optional: true - dev: false diff --git a/cli/internal/lockfile/testdata/pnpm-top-level-dupe.yaml b/cli/internal/lockfile/testdata/pnpm-top-level-dupe.yaml deleted file mode 100644 index 6837f223da853..0000000000000 --- a/cli/internal/lockfile/testdata/pnpm-top-level-dupe.yaml +++ /dev/null @@ -1,36 +0,0 @@ -lockfileVersion: 5.4 - -importers: - packages/a: - specifiers: - ci-info: ^2.0.0 - is-ci: ^3.0.1 - dependencies: - ci-info: 2.0.0 - is-ci: 3.0.1 - -packages: - /ci-info/2.0.0: - resolution: - { - integrity: sha512-5tK7EtrZ0N+OLFMthtqOj4fI2Jeb88C4CAZPu25LDVUgXJ0A3Js4PMGqrn0JU1W0Mh1/Z8wZzYPxqUrXeBboCQ==, - } - dev: false - - /ci-info/3.7.1: - resolution: - { - integrity: sha512-4jYS4MOAaCIStSRwiuxc4B8MYhIe676yO1sYGzARnjXkWpmzZMMYxY6zu8WYWDhSuth5zhrQ1rhNSibyyvv4/w==, - } - engines: { node: ">=8" } - dev: false - - /is-ci/3.0.1: - resolution: - { - integrity: sha512-ZYvCgrefwqoQ6yTyYUbQu64HsITZ3NfKX1lzaEYdkTDcfKzzCI/wthRRYKkdjHKFVgNiXKAKm65Zo1pk2as/QQ==, - } - hasBin: true - dependencies: - ci-info: 3.7.1 - dev: false diff --git a/cli/internal/lockfile/testdata/pnpm6-workspace.yaml b/cli/internal/lockfile/testdata/pnpm6-workspace.yaml deleted file mode 100644 index daf92b79118b0..0000000000000 --- a/cli/internal/lockfile/testdata/pnpm6-workspace.yaml +++ /dev/null @@ -1,1704 +0,0 @@ -lockfileVersion: 5.3 - -importers: - .: - specifiers: - "@pnpm/make-dedicated-lockfile": ^0.3.19 - devDependencies: - "@pnpm/make-dedicated-lockfile": 0.3.19 - - packages/a: - specifiers: - b: workspace:* - express: ^4.18.1 - dependencies: - b: link:../b - express: 4.18.1 - - packages/b: - specifiers: - c: workspace:* - lodash: ^4.17.21 - dependencies: - c: link:../c - lodash: 4.17.21 - - packages/c: - specifiers: - chalk: ^5.0.1 - dependencies: - chalk: 5.0.1 - -packages: - /@babel/code-frame/7.18.6: - resolution: - { - integrity: sha512-TDCmlK5eOvH+eH7cdAFlNXeVJqWIQ7gW9tY1GJIpUtFb6CmjVyq2VM3u71bOyR8CRihcCgMUYoDNyLXao3+70Q==, - } - engines: { node: ">=6.9.0" } - dependencies: - "@babel/highlight": 7.18.6 - dev: true - - /@babel/helper-validator-identifier/7.18.6: - resolution: - { - integrity: sha512-MmetCkz9ej86nJQV+sFCxoGGrUbU3q02kgLciwkrt9QqEB7cP39oKEY0PakknEO0Gu20SskMRi+AYZ3b1TpN9g==, - } - engines: { node: ">=6.9.0" } - dev: true - - /@babel/highlight/7.18.6: - resolution: - { - integrity: sha512-u7stbOuYjaPezCuLj29hNW1v64M2Md2qupEKP1fHc7WdOA3DgLh37suiSrZYY7haUB7iBeQZ9P1uiRF359do3g==, - } - engines: { node: ">=6.9.0" } - dependencies: - "@babel/helper-validator-identifier": 7.18.6 - chalk: 2.4.2 - js-tokens: 4.0.0 - dev: true - - /@pnpm/constants/6.1.0: - resolution: - { - integrity: sha512-L6AiU3OXv9kjKGTJN9j8n1TeJGDcLX9atQlZvAkthlvbXjvKc5SKNWESc/eXhr5nEfuMWhQhiKHDJCpYejmeCQ==, - } - engines: { node: ">=14.19" } - dev: true - - /@pnpm/crypto.base32-hash/1.0.1: - resolution: - { - integrity: sha512-pzAXNn6KxTA3kbcI3iEnYs4vtH51XEVqmK/1EiD18MaPKylhqy8UvMJK3zKG+jeP82cqQbozcTGm4yOQ8i3vNw==, - } - engines: { node: ">=14.6" } - dependencies: - rfc4648: 1.5.2 - dev: true - - /@pnpm/error/3.0.1: - resolution: - { - integrity: sha512-hMlbWbFcfcfolNfSjKjpeaZFow71kNg438LZ8rAd01swiVIYRUf/sRv8gGySru6AijYfz5UqslpIJRDbYBkgQA==, - } - engines: { node: ">=14.19" } - dependencies: - "@pnpm/constants": 6.1.0 - dev: true - - /@pnpm/exec/2.0.0: - resolution: - { - integrity: sha512-b5ALfWEOFQprWKntN7MF8XWCyslBk2c8u20GEDcDDQOs6c0HyHlWxX5lig8riQKdS000U6YyS4L4b32NOleXAQ==, - } - engines: { node: ">=10" } - dependencies: - "@pnpm/self-installer": 2.2.1 - command-exists: 1.2.9 - cross-spawn: 7.0.3 - dev: true - - /@pnpm/exportable-manifest/3.1.2: - resolution: - { - integrity: sha512-IvTBwt3n73pXsU6iS1Y4OipBg3GBN37I/mUR8t3q5N0c5TkVxj9xAsra5/m7mX4dsYCv9BPL6Rw+MuKSV5P1hA==, - } - engines: { node: ">=14.6" } - dependencies: - "@pnpm/error": 3.0.1 - "@pnpm/read-project-manifest": 3.0.9 - "@pnpm/types": 8.5.0 - ramda: /@pnpm/ramda/0.28.1 - dev: true - - /@pnpm/find-workspace-dir/4.0.2: - resolution: - { - integrity: sha512-gU7ycFSWuEGJh7RE/STa33Ch27geODTXIfc+ntiE1BietxfpJIAk34zz51kTUuCFthBkpHlO6yV7jgHD2Tuc3g==, - } - engines: { node: ">=14.6" } - dependencies: - "@pnpm/error": 3.0.1 - find-up: 5.0.0 - dev: true - - /@pnpm/git-utils/0.1.0: - resolution: - { - integrity: sha512-W3zsG9585cKL+FqgcT+IfTgZX5C+CbNkFjOnJN+qbysT1N30+BbvEByCcDMsTy7QDrAk6oS7WU1Rym3U2xlh2Q==, - } - engines: { node: ">=14.6" } - dependencies: - execa: /safe-execa/0.1.2 - dev: true - - /@pnpm/graceful-fs/2.0.0: - resolution: - { - integrity: sha512-ogUZCGf0/UILZt6d8PsO4gA4pXh7f0BumXeFkcCe4AQ65PXPKfAkHC0C30Lheh2EgFOpLZm3twDP1Eiww18gew==, - } - engines: { node: ">=14.19" } - dependencies: - graceful-fs: 4.2.10 - dev: true - - /@pnpm/lockfile-file/5.3.3_@pnpm+logger@4.0.0: - resolution: - { - integrity: sha512-IOvjeMRX+++osG9VsfSd7+hVa/sIzhqdrm/nFcL7AexFhC7wjXbWW3YMlN5Cw4v0fwm93fgRZlikIKJ7BmkBBA==, - } - engines: { node: ">=14.6" } - peerDependencies: - "@pnpm/logger": ^4.0.0 - dependencies: - "@pnpm/constants": 6.1.0 - "@pnpm/error": 3.0.1 - "@pnpm/git-utils": 0.1.0 - "@pnpm/lockfile-types": 4.3.1 - "@pnpm/logger": 4.0.0 - "@pnpm/merge-lockfile-changes": 3.0.9 - "@pnpm/types": 8.5.0 - "@zkochan/rimraf": 2.1.2 - comver-to-semver: 1.0.0 - js-yaml: /@zkochan/js-yaml/0.0.6 - normalize-path: 3.0.0 - ramda: /@pnpm/ramda/0.28.1 - semver: 7.3.7 - sort-keys: 4.2.0 - strip-bom: 4.0.0 - write-file-atomic: 3.0.3 - dev: true - - /@pnpm/lockfile-types/4.3.1: - resolution: - { - integrity: sha512-xoorF+CuuUvpjfi8Uw/xkf8LI9VDzs9W1gjSxkKS8UwK60zU5fu4agILJfVVGlHO1tnjJeGRuspBjp7UZ8ufMA==, - } - engines: { node: ">=14.6" } - dependencies: - "@pnpm/types": 8.5.0 - dev: true - - /@pnpm/logger/4.0.0: - resolution: - { - integrity: sha512-SIShw+k556e7S7tLZFVSIHjCdiVog1qWzcKW2RbLEHPItdisAFVNIe34kYd9fMSswTlSRLS/qRjw3ZblzWmJ9Q==, - } - engines: { node: ">=12.17" } - dependencies: - bole: 4.0.1 - ndjson: 2.0.0 - dev: true - - /@pnpm/make-dedicated-lockfile/0.3.19: - resolution: - { - integrity: sha512-VHllqMh5zviSHds2kOlWSiwmxos3LLGWCVIHpo+HX45D3TXx+oMOgE8k6WB0dSOTVIuGKduoCNTGeSW4p2bD2w==, - } - engines: { node: ">=14.6" } - hasBin: true - dependencies: - "@pnpm/error": 3.0.1 - "@pnpm/exec": 2.0.0 - "@pnpm/exportable-manifest": 3.1.2 - "@pnpm/find-workspace-dir": 4.0.2 - "@pnpm/lockfile-file": 5.3.3_@pnpm+logger@4.0.0 - "@pnpm/logger": 4.0.0 - "@pnpm/prune-lockfile": 4.0.14 - "@pnpm/read-project-manifest": 3.0.9 - "@pnpm/types": 8.5.0 - ramda: /@pnpm/ramda/0.28.1 - rename-overwrite: 4.0.2 - dev: true - - /@pnpm/merge-lockfile-changes/3.0.9: - resolution: - { - integrity: sha512-UOl3AYsi13R8bvQNJPNUml8sZYKBRns0xjAcPQomoX3WTU0dv+KzVyv86Iv86YlApP0aJj9MS8Vq++JOC10RKg==, - } - engines: { node: ">=14.6" } - dependencies: - "@pnpm/lockfile-types": 4.3.1 - comver-to-semver: 1.0.0 - ramda: /@pnpm/ramda/0.28.1 - semver: 7.3.7 - dev: true - - /@pnpm/prune-lockfile/4.0.14: - resolution: - { - integrity: sha512-lICCgm9j3e2Bu75zK4PA1FKjpu9pCcagRbZWruONBf44byyEkHcnTf8b8a9M1MvtoiArhmKOmyOVJ2OFyBBRyA==, - } - engines: { node: ">=14.6" } - dependencies: - "@pnpm/constants": 6.1.0 - "@pnpm/lockfile-types": 4.3.1 - "@pnpm/types": 8.5.0 - dependency-path: 9.2.4 - ramda: /@pnpm/ramda/0.28.1 - dev: true - - /@pnpm/ramda/0.28.1: - resolution: - { - integrity: sha512-zcAG+lvU0fMziNeGXpPyCyCJYp5ZVrPElEE4t14jAmViaihohocZ+dDkcRIyAomox8pQsuZnv1EyHR+pOhmUWw==, - } - dev: true - - /@pnpm/read-project-manifest/3.0.9: - resolution: - { - integrity: sha512-27j40C48hA/tqsCiqk9ApJxp2g6WGrrj2RSs0NKhsSHynxAuA1tIvwatNISQbAiMjZiu1lfhzhq8m1QdblyNmA==, - } - engines: { node: ">=14.6" } - dependencies: - "@pnpm/error": 3.0.1 - "@pnpm/graceful-fs": 2.0.0 - "@pnpm/types": 8.5.0 - "@pnpm/write-project-manifest": 3.0.7 - detect-indent: 6.1.0 - fast-deep-equal: 3.1.3 - is-windows: 1.0.2 - json5: 2.2.1 - parse-json: 5.2.0 - read-yaml-file: 2.1.0 - sort-keys: 4.2.0 - strip-bom: 4.0.0 - dev: true - - /@pnpm/self-installer/2.2.1: - resolution: - { - integrity: sha512-aefLe96wAWghkx6q1PwbVS1Iz1iGE+HKwkTmtzWLFXeGhbknaIdG2voMwaBGIYGCSxm8sDKR1uLO4aRRAYuc+Q==, - } - engines: { node: ">=4" } - hasBin: true - dev: true - - /@pnpm/types/8.5.0: - resolution: - { - integrity: sha512-PSKnhkwgiZtp9dcWZR9mPz2W9UopmADr9o8FTqazo5kjUSh2xQmDUSJOJ/ZWcfNziO64Ix/VbcxKIZeplhog1Q==, - } - engines: { node: ">=14.6" } - dev: true - - /@pnpm/write-project-manifest/3.0.7: - resolution: - { - integrity: sha512-rMgIWR52asESg1D7Cp/vBi3dBsv18iUWPvvtYNynrcOjRdE3NsH5CAdfZP/XN6HJF6CSY8rS9W4YC5Q3JGtxiw==, - } - engines: { node: ">=14.6" } - dependencies: - "@pnpm/types": 8.5.0 - json5: 2.2.1 - write-file-atomic: 3.0.3 - write-yaml-file: 4.2.0 - dev: true - - /@zkochan/js-yaml/0.0.6: - resolution: - { - integrity: sha512-nzvgl3VfhcELQ8LyVrYOru+UtAy1nrygk2+AGbTm8a5YcO6o8lSjAT+pfg3vJWxIoZKOUhrK6UU7xW/+00kQrg==, - } - hasBin: true - dependencies: - argparse: 2.0.1 - dev: true - - /@zkochan/rimraf/2.1.2: - resolution: - { - integrity: sha512-Lc2oK51J6aQWcLWTloobJun5ZF41BbTDdLvE+aMcexoVWFoFqvZmnZoyXR2IZk6NJEVoZW8tjgtvQLfTsmRs2Q==, - } - engines: { node: ">=12.10" } - dependencies: - rimraf: 3.0.2 - dev: true - - /@zkochan/which/2.0.3: - resolution: - { - integrity: sha512-C1ReN7vt2/2O0fyTsx5xnbQuxBrmG5NMSbcIkPKCCfCTJgpZBsuRYzFXHj3nVq8vTfK7vxHUmzfCpSHgO7j4rg==, - } - engines: { node: ">= 8" } - hasBin: true - dependencies: - isexe: 2.0.0 - dev: true - - /accepts/1.3.8: - resolution: - { - integrity: sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw==, - } - engines: { node: ">= 0.6" } - dependencies: - mime-types: 2.1.35 - negotiator: 0.6.3 - dev: false - - /ansi-styles/3.2.1: - resolution: - { - integrity: sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==, - } - engines: { node: ">=4" } - dependencies: - color-convert: 1.9.3 - dev: true - - /argparse/2.0.1: - resolution: - { - integrity: sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==, - } - dev: true - - /array-flatten/1.1.1: - resolution: - { - integrity: sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg==, - } - dev: false - - /balanced-match/1.0.2: - resolution: - { - integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==, - } - dev: true - - /body-parser/1.20.0: - resolution: - { - integrity: sha512-DfJ+q6EPcGKZD1QWUjSpqp+Q7bDQTsQIF4zfUAtZ6qk+H/3/QRhg9CEp39ss+/T2vw0+HaidC0ecJj/DRLIaKg==, - } - engines: { node: ">= 0.8", npm: 1.2.8000 || >= 1.4.16 } - dependencies: - bytes: 3.1.2 - content-type: 1.0.4 - debug: 2.6.9 - depd: 2.0.0 - destroy: 1.2.0 - http-errors: 2.0.0 - iconv-lite: 0.4.24 - on-finished: 2.4.1 - qs: 6.10.3 - raw-body: 2.5.1 - type-is: 1.6.18 - unpipe: 1.0.0 - dev: false - - /bole/4.0.1: - resolution: - { - integrity: sha512-42r0aSOJFJti2l6LasBHq2BuWJzohGs349olQnH/ETlJo87XnoWw7UT8pGE6UstjxzOKkwz7tjoFcmSr6L16vg==, - } - dependencies: - fast-safe-stringify: 2.1.1 - individual: 3.0.0 - dev: true - - /brace-expansion/1.1.11: - resolution: - { - integrity: sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==, - } - dependencies: - balanced-match: 1.0.2 - concat-map: 0.0.1 - dev: true - - /bytes/3.1.2: - resolution: - { - integrity: sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==, - } - engines: { node: ">= 0.8" } - dev: false - - /call-bind/1.0.2: - resolution: - { - integrity: sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==, - } - dependencies: - function-bind: 1.1.1 - get-intrinsic: 1.1.2 - dev: false - - /chalk/2.4.2: - resolution: - { - integrity: sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==, - } - engines: { node: ">=4" } - dependencies: - ansi-styles: 3.2.1 - escape-string-regexp: 1.0.5 - supports-color: 5.5.0 - dev: true - - /chalk/5.0.1: - resolution: - { - integrity: sha512-Fo07WOYGqMfCWHOzSXOt2CxDbC6skS/jO9ynEcmpANMoPrD+W1r1K6Vx7iNm+AQmETU1Xr2t+n8nzkV9t6xh3w==, - } - engines: { node: ^12.17.0 || ^14.13 || >=16.0.0 } - dev: false - - /color-convert/1.9.3: - resolution: - { - integrity: sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==, - } - dependencies: - color-name: 1.1.3 - dev: true - - /color-name/1.1.3: - resolution: - { - integrity: sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==, - } - dev: true - - /command-exists/1.2.9: - resolution: - { - integrity: sha512-LTQ/SGc+s0Xc0Fu5WaKnR0YiygZkm9eKFvyS+fRsU7/ZWFF8ykFM6Pc9aCVf1+xasOOZpO3BAVgVrKvsqKHV7w==, - } - dev: true - - /comver-to-semver/1.0.0: - resolution: - { - integrity: sha512-gcGtbRxjwROQOdXLUWH1fQAXqThUVRZ219aAwgtX3KfYw429/Zv6EIJRf5TBSzWdAGwePmqH7w70WTaX4MDqag==, - } - engines: { node: ">=12.17" } - dev: true - - /concat-map/0.0.1: - resolution: - { - integrity: sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==, - } - dev: true - - /content-disposition/0.5.4: - resolution: - { - integrity: sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ==, - } - engines: { node: ">= 0.6" } - dependencies: - safe-buffer: 5.2.1 - dev: false - - /content-type/1.0.4: - resolution: - { - integrity: sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA==, - } - engines: { node: ">= 0.6" } - dev: false - - /cookie-signature/1.0.6: - resolution: - { - integrity: sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ==, - } - dev: false - - /cookie/0.5.0: - resolution: - { - integrity: sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw==, - } - engines: { node: ">= 0.6" } - dev: false - - /cross-spawn/7.0.3: - resolution: - { - integrity: sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==, - } - engines: { node: ">= 8" } - dependencies: - path-key: 3.1.1 - shebang-command: 2.0.0 - which: 2.0.2 - dev: true - - /debug/2.6.9: - resolution: - { - integrity: sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==, - } - dependencies: - ms: 2.0.0 - dev: false - - /depd/2.0.0: - resolution: - { - integrity: sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==, - } - engines: { node: ">= 0.8" } - dev: false - - /dependency-path/9.2.4: - resolution: - { - integrity: sha512-bH29ZcKyo/i5nr4SgnVZGksuoZzroOWpHtKbq8fKdKgJDr0SdUIPu2EwjJkjzbw9SqRzWd912e0opHYJTkFf6w==, - } - engines: { node: ">=14.6" } - dependencies: - "@pnpm/crypto.base32-hash": 1.0.1 - "@pnpm/types": 8.5.0 - encode-registry: 3.0.0 - semver: 7.3.7 - dev: true - - /destroy/1.2.0: - resolution: - { - integrity: sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==, - } - engines: { node: ">= 0.8", npm: 1.2.8000 || >= 1.4.16 } - dev: false - - /detect-indent/6.1.0: - resolution: - { - integrity: sha512-reYkTUJAZb9gUuZ2RvVCNhVHdg62RHnJ7WJl8ftMi4diZ6NWlciOzQN88pUhSELEwflJht4oQDv0F0BMlwaYtA==, - } - engines: { node: ">=8" } - dev: true - - /ee-first/1.1.1: - resolution: - { - integrity: sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==, - } - dev: false - - /encode-registry/3.0.0: - resolution: - { - integrity: sha512-2fRYji8K6FwYuQ6EPBKR/J9mcqb7kIoNqt1vGvJr3NrvKfncRiNm00Oxo6gi/YJF8R5Sp2bNFSFdGKTG0rje1Q==, - } - engines: { node: ">=10" } - dependencies: - mem: 8.1.1 - dev: true - - /encodeurl/1.0.2: - resolution: - { - integrity: sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==, - } - engines: { node: ">= 0.8" } - dev: false - - /error-ex/1.3.2: - resolution: - { - integrity: sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==, - } - dependencies: - is-arrayish: 0.2.1 - dev: true - - /escape-html/1.0.3: - resolution: - { - integrity: sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==, - } - dev: false - - /escape-string-regexp/1.0.5: - resolution: - { - integrity: sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==, - } - engines: { node: ">=0.8.0" } - dev: true - - /etag/1.8.1: - resolution: - { - integrity: sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==, - } - engines: { node: ">= 0.6" } - dev: false - - /execa/5.1.1: - resolution: - { - integrity: sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==, - } - engines: { node: ">=10" } - dependencies: - cross-spawn: 7.0.3 - get-stream: 6.0.1 - human-signals: 2.1.0 - is-stream: 2.0.1 - merge-stream: 2.0.0 - npm-run-path: 4.0.1 - onetime: 5.1.2 - signal-exit: 3.0.7 - strip-final-newline: 2.0.0 - dev: true - - /express/4.18.1: - resolution: - { - integrity: sha512-zZBcOX9TfehHQhtupq57OF8lFZ3UZi08Y97dwFCkD8p9d/d2Y3M+ykKcwaMDEL+4qyUolgBDX6AblpR3fL212Q==, - } - engines: { node: ">= 0.10.0" } - dependencies: - accepts: 1.3.8 - array-flatten: 1.1.1 - body-parser: 1.20.0 - content-disposition: 0.5.4 - content-type: 1.0.4 - cookie: 0.5.0 - cookie-signature: 1.0.6 - debug: 2.6.9 - depd: 2.0.0 - encodeurl: 1.0.2 - escape-html: 1.0.3 - etag: 1.8.1 - finalhandler: 1.2.0 - fresh: 0.5.2 - http-errors: 2.0.0 - merge-descriptors: 1.0.1 - methods: 1.1.2 - on-finished: 2.4.1 - parseurl: 1.3.3 - path-to-regexp: 0.1.7 - proxy-addr: 2.0.7 - qs: 6.10.3 - range-parser: 1.2.1 - safe-buffer: 5.2.1 - send: 0.18.0 - serve-static: 1.15.0 - setprototypeof: 1.2.0 - statuses: 2.0.1 - type-is: 1.6.18 - utils-merge: 1.0.1 - vary: 1.1.2 - dev: false - - /fast-deep-equal/3.1.3: - resolution: - { - integrity: sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==, - } - dev: true - - /fast-safe-stringify/2.1.1: - resolution: - { - integrity: sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==, - } - dev: true - - /finalhandler/1.2.0: - resolution: - { - integrity: sha512-5uXcUVftlQMFnWC9qu/svkWv3GTd2PfUhK/3PLkYNAe7FbqJMt3515HaxE6eRL74GdsriiwujiawdaB1BpEISg==, - } - engines: { node: ">= 0.8" } - dependencies: - debug: 2.6.9 - encodeurl: 1.0.2 - escape-html: 1.0.3 - on-finished: 2.4.1 - parseurl: 1.3.3 - statuses: 2.0.1 - unpipe: 1.0.0 - dev: false - - /find-up/5.0.0: - resolution: - { - integrity: sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==, - } - engines: { node: ">=10" } - dependencies: - locate-path: 6.0.0 - path-exists: 4.0.0 - dev: true - - /forwarded/0.2.0: - resolution: - { - integrity: sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==, - } - engines: { node: ">= 0.6" } - dev: false - - /fresh/0.5.2: - resolution: - { - integrity: sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==, - } - engines: { node: ">= 0.6" } - dev: false - - /fs.realpath/1.0.0: - resolution: - { - integrity: sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==, - } - dev: true - - /function-bind/1.1.1: - resolution: - { - integrity: sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==, - } - dev: false - - /get-intrinsic/1.1.2: - resolution: - { - integrity: sha512-Jfm3OyCxHh9DJyc28qGk+JmfkpO41A4XkneDSujN9MDXrm4oDKdHvndhZ2dN94+ERNfkYJWDclW6k2L/ZGHjXA==, - } - dependencies: - function-bind: 1.1.1 - has: 1.0.3 - has-symbols: 1.0.3 - dev: false - - /get-stream/6.0.1: - resolution: - { - integrity: sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==, - } - engines: { node: ">=10" } - dev: true - - /glob/7.2.3: - resolution: - { - integrity: sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==, - } - dependencies: - fs.realpath: 1.0.0 - inflight: 1.0.6 - inherits: 2.0.4 - minimatch: 3.1.2 - once: 1.4.0 - path-is-absolute: 1.0.1 - dev: true - - /graceful-fs/4.2.10: - resolution: - { - integrity: sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==, - } - dev: true - - /has-flag/3.0.0: - resolution: - { - integrity: sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==, - } - engines: { node: ">=4" } - dev: true - - /has-symbols/1.0.3: - resolution: - { - integrity: sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==, - } - engines: { node: ">= 0.4" } - dev: false - - /has/1.0.3: - resolution: - { - integrity: sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==, - } - engines: { node: ">= 0.4.0" } - dependencies: - function-bind: 1.1.1 - dev: false - - /http-errors/2.0.0: - resolution: - { - integrity: sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==, - } - engines: { node: ">= 0.8" } - dependencies: - depd: 2.0.0 - inherits: 2.0.4 - setprototypeof: 1.2.0 - statuses: 2.0.1 - toidentifier: 1.0.1 - dev: false - - /human-signals/2.1.0: - resolution: - { - integrity: sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==, - } - engines: { node: ">=10.17.0" } - dev: true - - /iconv-lite/0.4.24: - resolution: - { - integrity: sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==, - } - engines: { node: ">=0.10.0" } - dependencies: - safer-buffer: 2.1.2 - dev: false - - /imurmurhash/0.1.4: - resolution: - { - integrity: sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==, - } - engines: { node: ">=0.8.19" } - dev: true - - /individual/3.0.0: - resolution: - { - integrity: sha512-rUY5vtT748NMRbEMrTNiFfy29BgGZwGXUi2NFUVMWQrogSLzlJvQV9eeMWi+g1aVaQ53tpyLAQtd5x/JH0Nh1g==, - } - dev: true - - /inflight/1.0.6: - resolution: - { - integrity: sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==, - } - dependencies: - once: 1.4.0 - wrappy: 1.0.2 - dev: true - - /inherits/2.0.4: - resolution: - { - integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==, - } - dev: false - - /ipaddr.js/1.9.1: - resolution: - { - integrity: sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==, - } - engines: { node: ">= 0.10" } - dev: false - - /is-arrayish/0.2.1: - resolution: - { - integrity: sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==, - } - dev: true - - /is-plain-obj/2.1.0: - resolution: - { - integrity: sha512-YWnfyRwxL/+SsrWYfOpUtz5b3YD+nyfkHvjbcanzk8zgyO4ASD67uVMRt8k5bM4lLMDnXfriRhOpemw+NfT1eA==, - } - engines: { node: ">=8" } - dev: true - - /is-stream/2.0.1: - resolution: - { - integrity: sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==, - } - engines: { node: ">=8" } - dev: true - - /is-typedarray/1.0.0: - resolution: - { - integrity: sha512-cyA56iCMHAh5CdzjJIa4aohJyeO1YbwLi3Jc35MmRU6poroFjIGZzUzupGiRPOjgHg9TLu43xbpwXk523fMxKA==, - } - dev: true - - /is-windows/1.0.2: - resolution: - { - integrity: sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA==, - } - engines: { node: ">=0.10.0" } - dev: true - - /isexe/2.0.0: - resolution: - { - integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==, - } - dev: true - - /js-tokens/4.0.0: - resolution: - { - integrity: sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==, - } - dev: true - - /js-yaml/4.1.0: - resolution: - { - integrity: sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==, - } - hasBin: true - dependencies: - argparse: 2.0.1 - dev: true - - /json-parse-even-better-errors/2.3.1: - resolution: - { - integrity: sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==, - } - dev: true - - /json-stringify-safe/5.0.1: - resolution: - { - integrity: sha512-ZClg6AaYvamvYEE82d3Iyd3vSSIjQ+odgjaTzRuO3s7toCdFKczob2i0zCh7JE8kWn17yvAWhUVxvqGwUalsRA==, - } - dev: true - - /json5/2.2.1: - resolution: - { - integrity: sha512-1hqLFMSrGHRHxav9q9gNjJ5EXznIxGVO09xQRrwplcS8qs28pZ8s8hupZAmqDwZUmVZ2Qb2jnyPOWcDH8m8dlA==, - } - engines: { node: ">=6" } - hasBin: true - dev: true - - /lines-and-columns/1.2.4: - resolution: - { - integrity: sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==, - } - dev: true - - /locate-path/6.0.0: - resolution: - { - integrity: sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==, - } - engines: { node: ">=10" } - dependencies: - p-locate: 5.0.0 - dev: true - - /lodash/4.17.21: - resolution: - { - integrity: sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==, - } - dev: false - - /lru-cache/6.0.0: - resolution: - { - integrity: sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==, - } - engines: { node: ">=10" } - dependencies: - yallist: 4.0.0 - dev: true - - /map-age-cleaner/0.1.3: - resolution: - { - integrity: sha512-bJzx6nMoP6PDLPBFmg7+xRKeFZvFboMrGlxmNj9ClvX53KrmvM5bXFXEWjbz4cz1AFn+jWJ9z/DJSz7hrs0w3w==, - } - engines: { node: ">=6" } - dependencies: - p-defer: 1.0.0 - dev: true - - /media-typer/0.3.0: - resolution: - { - integrity: sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==, - } - engines: { node: ">= 0.6" } - dev: false - - /mem/8.1.1: - resolution: - { - integrity: sha512-qFCFUDs7U3b8mBDPyz5EToEKoAkgCzqquIgi9nkkR9bixxOVOre+09lbuH7+9Kn2NFpm56M3GUWVbU2hQgdACA==, - } - engines: { node: ">=10" } - dependencies: - map-age-cleaner: 0.1.3 - mimic-fn: 3.1.0 - dev: true - - /merge-descriptors/1.0.1: - resolution: - { - integrity: sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w==, - } - dev: false - - /merge-stream/2.0.0: - resolution: - { - integrity: sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==, - } - dev: true - - /methods/1.1.2: - resolution: - { - integrity: sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==, - } - engines: { node: ">= 0.6" } - dev: false - - /mime-db/1.52.0: - resolution: - { - integrity: sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==, - } - engines: { node: ">= 0.6" } - dev: false - - /mime-types/2.1.35: - resolution: - { - integrity: sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==, - } - engines: { node: ">= 0.6" } - dependencies: - mime-db: 1.52.0 - dev: false - - /mime/1.6.0: - resolution: - { - integrity: sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==, - } - engines: { node: ">=4" } - hasBin: true - dev: false - - /mimic-fn/2.1.0: - resolution: - { - integrity: sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==, - } - engines: { node: ">=6" } - dev: true - - /mimic-fn/3.1.0: - resolution: - { - integrity: sha512-Ysbi9uYW9hFyfrThdDEQuykN4Ey6BuwPD2kpI5ES/nFTDn/98yxYNLZJcgUAKPT/mcrLLKaGzJR9YVxJrIdASQ==, - } - engines: { node: ">=8" } - dev: true - - /minimatch/3.1.2: - resolution: - { - integrity: sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==, - } - dependencies: - brace-expansion: 1.1.11 - dev: true - - /minimist/1.2.6: - resolution: - { - integrity: sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==, - } - dev: true - - /ms/2.0.0: - resolution: - { - integrity: sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==, - } - dev: false - - /ms/2.1.3: - resolution: - { - integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==, - } - dev: false - - /ndjson/2.0.0: - resolution: - { - integrity: sha512-nGl7LRGrzugTtaFcJMhLbpzJM6XdivmbkdlaGcrk/LXg2KL/YBC6z1g70xh0/al+oFuVFP8N8kiWRucmeEH/qQ==, - } - engines: { node: ">=10" } - hasBin: true - dependencies: - json-stringify-safe: 5.0.1 - minimist: 1.2.6 - readable-stream: 3.6.0 - split2: 3.2.2 - through2: 4.0.2 - dev: true - - /negotiator/0.6.3: - resolution: - { - integrity: sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==, - } - engines: { node: ">= 0.6" } - dev: false - - /normalize-path/3.0.0: - resolution: - { - integrity: sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==, - } - engines: { node: ">=0.10.0" } - dev: true - - /npm-run-path/4.0.1: - resolution: - { - integrity: sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==, - } - engines: { node: ">=8" } - dependencies: - path-key: 3.1.1 - dev: true - - /object-inspect/1.12.2: - resolution: - { - integrity: sha512-z+cPxW0QGUp0mcqcsgQyLVRDoXFQbXOwBaqyF7VIgI4TWNQsDHrBpUQslRmIfAoYWdYzs6UlKJtB2XJpTaNSpQ==, - } - dev: false - - /on-finished/2.4.1: - resolution: - { - integrity: sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==, - } - engines: { node: ">= 0.8" } - dependencies: - ee-first: 1.1.1 - dev: false - - /once/1.4.0: - resolution: - { - integrity: sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==, - } - dependencies: - wrappy: 1.0.2 - dev: true - - /onetime/5.1.2: - resolution: - { - integrity: sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==, - } - engines: { node: ">=6" } - dependencies: - mimic-fn: 2.1.0 - dev: true - - /p-defer/1.0.0: - resolution: - { - integrity: sha512-wB3wfAxZpk2AzOfUMJNL+d36xothRSyj8EXOa4f6GMqYDN9BJaaSISbsk+wS9abmnebVw95C2Kb5t85UmpCxuw==, - } - engines: { node: ">=4" } - dev: true - - /p-limit/3.1.0: - resolution: - { - integrity: sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==, - } - engines: { node: ">=10" } - dependencies: - yocto-queue: 0.1.0 - dev: true - - /p-locate/5.0.0: - resolution: - { - integrity: sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==, - } - engines: { node: ">=10" } - dependencies: - p-limit: 3.1.0 - dev: true - - /parse-json/5.2.0: - resolution: - { - integrity: sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==, - } - engines: { node: ">=8" } - dependencies: - "@babel/code-frame": 7.18.6 - error-ex: 1.3.2 - json-parse-even-better-errors: 2.3.1 - lines-and-columns: 1.2.4 - dev: true - - /parseurl/1.3.3: - resolution: - { - integrity: sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==, - } - engines: { node: ">= 0.8" } - dev: false - - /path-exists/4.0.0: - resolution: - { - integrity: sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==, - } - engines: { node: ">=8" } - dev: true - - /path-is-absolute/1.0.1: - resolution: - { - integrity: sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==, - } - engines: { node: ">=0.10.0" } - dev: true - - /path-key/3.1.1: - resolution: - { - integrity: sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==, - } - engines: { node: ">=8" } - dev: true - - /path-name/1.0.0: - resolution: - { - integrity: sha512-/dcAb5vMXH0f51yvMuSUqFpxUcA8JelbRmE5mW/p4CUJxrNgK24IkstnV7ENtg2IDGBOu6izKTG6eilbnbNKWQ==, - } - dev: true - - /path-to-regexp/0.1.7: - resolution: - { - integrity: sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ==, - } - dev: false - - /proxy-addr/2.0.7: - resolution: - { - integrity: sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==, - } - engines: { node: ">= 0.10" } - dependencies: - forwarded: 0.2.0 - ipaddr.js: 1.9.1 - dev: false - - /qs/6.10.3: - resolution: - { - integrity: sha512-wr7M2E0OFRfIfJZjKGieI8lBKb7fRCH4Fv5KNPEs7gJ8jadvotdsS08PzOKR7opXhZ/Xkjtt3WF9g38drmyRqQ==, - } - engines: { node: ">=0.6" } - dependencies: - side-channel: 1.0.4 - dev: false - - /range-parser/1.2.1: - resolution: - { - integrity: sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==, - } - engines: { node: ">= 0.6" } - dev: false - - /raw-body/2.5.1: - resolution: - { - integrity: sha512-qqJBtEyVgS0ZmPGdCFPWJ3FreoqvG4MVQln/kCgF7Olq95IbOp0/BWyMwbdtn4VTvkM8Y7khCQ2Xgk/tcrCXig==, - } - engines: { node: ">= 0.8" } - dependencies: - bytes: 3.1.2 - http-errors: 2.0.0 - iconv-lite: 0.4.24 - unpipe: 1.0.0 - dev: false - - /read-yaml-file/2.1.0: - resolution: - { - integrity: sha512-UkRNRIwnhG+y7hpqnycCL/xbTk7+ia9VuVTC0S+zVbwd65DI9eUpRMfsWIGrCWxTU/mi+JW8cHQCrv+zfCbEPQ==, - } - engines: { node: ">=10.13" } - dependencies: - js-yaml: 4.1.0 - strip-bom: 4.0.0 - dev: true - - /readable-stream/3.6.0: - resolution: - { - integrity: sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==, - } - engines: { node: ">= 6" } - dependencies: - inherits: 2.0.4 - string_decoder: 1.3.0 - util-deprecate: 1.0.2 - dev: true - - /rename-overwrite/4.0.2: - resolution: - { - integrity: sha512-L1sgBgagVgOgb1Z6QZr1yJgSMHI4SXQqAH0l/UbeyHnLKxECvKIlyVEmBo4BqsCAZGg0SBSyjCh68lis5PgC7g==, - } - engines: { node: ">=12.10" } - dependencies: - "@zkochan/rimraf": 2.1.2 - dev: true - - /rfc4648/1.5.2: - resolution: - { - integrity: sha512-tLOizhR6YGovrEBLatX1sdcuhoSCXddw3mqNVAcKxGJ+J0hFeJ+SjeWCv5UPA/WU3YzWPPuCVYgXBKZUPGpKtg==, - } - dev: true - - /rimraf/3.0.2: - resolution: - { - integrity: sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==, - } - hasBin: true - dependencies: - glob: 7.2.3 - dev: true - - /safe-buffer/5.2.1: - resolution: - { - integrity: sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==, - } - dev: false - - /safe-execa/0.1.2: - resolution: - { - integrity: sha512-vdTshSQ2JsRCgT8eKZWNJIL26C6bVqy1SOmuCMlKHegVeo8KYRobRrefOdUq9OozSPUUiSxrylteeRmLOMFfWg==, - } - engines: { node: ">=12" } - dependencies: - "@zkochan/which": 2.0.3 - execa: 5.1.1 - path-name: 1.0.0 - dev: true - - /safer-buffer/2.1.2: - resolution: - { - integrity: sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==, - } - dev: false - - /semver/7.3.7: - resolution: - { - integrity: sha512-QlYTucUYOews+WeEujDoEGziz4K6c47V/Bd+LjSSYcA94p+DmINdf7ncaUinThfvZyu13lN9OY1XDxt8C0Tw0g==, - } - engines: { node: ">=10" } - hasBin: true - dependencies: - lru-cache: 6.0.0 - dev: true - - /send/0.18.0: - resolution: - { - integrity: sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg==, - } - engines: { node: ">= 0.8.0" } - dependencies: - debug: 2.6.9 - depd: 2.0.0 - destroy: 1.2.0 - encodeurl: 1.0.2 - escape-html: 1.0.3 - etag: 1.8.1 - fresh: 0.5.2 - http-errors: 2.0.0 - mime: 1.6.0 - ms: 2.1.3 - on-finished: 2.4.1 - range-parser: 1.2.1 - statuses: 2.0.1 - dev: false - - /serve-static/1.15.0: - resolution: - { - integrity: sha512-XGuRDNjXUijsUL0vl6nSD7cwURuzEgglbOaFuZM9g3kwDXOWVTck0jLzjPzGD+TazWbboZYu52/9/XPdUgne9g==, - } - engines: { node: ">= 0.8.0" } - dependencies: - encodeurl: 1.0.2 - escape-html: 1.0.3 - parseurl: 1.3.3 - send: 0.18.0 - dev: false - - /setprototypeof/1.2.0: - resolution: - { - integrity: sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==, - } - dev: false - - /shebang-command/2.0.0: - resolution: - { - integrity: sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==, - } - engines: { node: ">=8" } - dependencies: - shebang-regex: 3.0.0 - dev: true - - /shebang-regex/3.0.0: - resolution: - { - integrity: sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==, - } - engines: { node: ">=8" } - dev: true - - /side-channel/1.0.4: - resolution: - { - integrity: sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==, - } - dependencies: - call-bind: 1.0.2 - get-intrinsic: 1.1.2 - object-inspect: 1.12.2 - dev: false - - /signal-exit/3.0.7: - resolution: - { - integrity: sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==, - } - dev: true - - /sort-keys/4.2.0: - resolution: - { - integrity: sha512-aUYIEU/UviqPgc8mHR6IW1EGxkAXpeRETYcrzg8cLAvUPZcpAlleSXHV2mY7G12GphSH6Gzv+4MMVSSkbdteHg==, - } - engines: { node: ">=8" } - dependencies: - is-plain-obj: 2.1.0 - dev: true - - /split2/3.2.2: - resolution: - { - integrity: sha512-9NThjpgZnifTkJpzTZ7Eue85S49QwpNhZTq6GRJwObb6jnLFNGB7Qm73V5HewTROPyxD0C29xqmaI68bQtV+hg==, - } - dependencies: - readable-stream: 3.6.0 - dev: true - - /statuses/2.0.1: - resolution: - { - integrity: sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==, - } - engines: { node: ">= 0.8" } - dev: false - - /string_decoder/1.3.0: - resolution: - { - integrity: sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==, - } - dependencies: - safe-buffer: 5.2.1 - dev: true - - /strip-bom/4.0.0: - resolution: - { - integrity: sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w==, - } - engines: { node: ">=8" } - dev: true - - /strip-final-newline/2.0.0: - resolution: - { - integrity: sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==, - } - engines: { node: ">=6" } - dev: true - - /supports-color/5.5.0: - resolution: - { - integrity: sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==, - } - engines: { node: ">=4" } - dependencies: - has-flag: 3.0.0 - dev: true - - /through2/4.0.2: - resolution: - { - integrity: sha512-iOqSav00cVxEEICeD7TjLB1sueEL+81Wpzp2bY17uZjZN0pWZPuo4suZ/61VujxmqSGFfgOcNuTZ85QJwNZQpw==, - } - dependencies: - readable-stream: 3.6.0 - dev: true - - /toidentifier/1.0.1: - resolution: - { - integrity: sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==, - } - engines: { node: ">=0.6" } - dev: false - - /type-is/1.6.18: - resolution: - { - integrity: sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==, - } - engines: { node: ">= 0.6" } - dependencies: - media-typer: 0.3.0 - mime-types: 2.1.35 - dev: false - - /typedarray-to-buffer/3.1.5: - resolution: - { - integrity: sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q==, - } - dependencies: - is-typedarray: 1.0.0 - dev: true - - /unpipe/1.0.0: - resolution: - { - integrity: sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==, - } - engines: { node: ">= 0.8" } - dev: false - - /util-deprecate/1.0.2: - resolution: - { - integrity: sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==, - } - dev: true - - /utils-merge/1.0.1: - resolution: - { - integrity: sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA==, - } - engines: { node: ">= 0.4.0" } - dev: false - - /vary/1.1.2: - resolution: - { - integrity: sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==, - } - engines: { node: ">= 0.8" } - dev: false - - /which/2.0.2: - resolution: - { - integrity: sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==, - } - engines: { node: ">= 8" } - hasBin: true - dependencies: - isexe: 2.0.0 - dev: true - - /wrappy/1.0.2: - resolution: - { - integrity: sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==, - } - dev: true - - /write-file-atomic/3.0.3: - resolution: - { - integrity: sha512-AvHcyZ5JnSfq3ioSyjrBkH9yW4m7Ayk8/9My/DD9onKeu/94fwrMocemO2QAJFAlnnDN+ZDS+ZjAR5ua1/PV/Q==, - } - dependencies: - imurmurhash: 0.1.4 - is-typedarray: 1.0.0 - signal-exit: 3.0.7 - typedarray-to-buffer: 3.1.5 - dev: true - - /write-yaml-file/4.2.0: - resolution: - { - integrity: sha512-LwyucHy0uhWqbrOkh9cBluZBeNVxzHjDaE9mwepZG3n3ZlbM4v3ndrFw51zW/NXYFFqP+QWZ72ihtLWTh05e4Q==, - } - engines: { node: ">=10.13" } - dependencies: - js-yaml: 4.1.0 - write-file-atomic: 3.0.3 - dev: true - - /yallist/4.0.0: - resolution: - { - integrity: sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==, - } - dev: true - - /yocto-queue/0.1.0: - resolution: - { - integrity: sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==, - } - engines: { node: ">=10" } - dev: true diff --git a/cli/internal/lockfile/testdata/pnpm7-workspace.yaml b/cli/internal/lockfile/testdata/pnpm7-workspace.yaml deleted file mode 100644 index 2f7b66319bcbc..0000000000000 --- a/cli/internal/lockfile/testdata/pnpm7-workspace.yaml +++ /dev/null @@ -1,3445 +0,0 @@ -lockfileVersion: 5.4 - -patchedDependencies: - lodash@4.17.21: - hash: ehchni3mpmovsvjxesffg2i5a4 - path: patches/lodash@4.17.21.patch - underscore@1.13.4: - hash: 3pbfs36izefyn2uycmknwkvuuy - path: patches/underscore@1.13.4.patch - -importers: - .: - specifiers: - eslint-config-custom: workspace:* - prettier: latest - turbo: latest - devDependencies: - eslint-config-custom: link:packages/eslint-config-custom - prettier: 2.7.1 - turbo: 1.4.6 - - apps/docs: - specifiers: - "@babel/core": ^7.0.0 - "@types/node": ^17.0.12 - "@types/react": 18.0.17 - dashboard-icons: github:peerigon/dashboard-icons - eslint: 7.32.0 - eslint-config-custom: workspace:* - next: 12.2.5 - next-transpile-modules: 9.0.0 - react: 18.2.0 - react-dom: 18.2.0 - tsconfig: workspace:* - typescript: ^4.5.3 - ui: workspace:* - underscore: ^1.13.4 - dependencies: - dashboard-icons: github.com/peerigon/dashboard-icons/ce27ef933144e09cef3911025f3649040a8571b6 - next: 12.2.5_ir3quccc6i62x6qn6jjhyjjiey - react: 18.2.0 - react-dom: 18.2.0_react@18.2.0 - ui: file:packages/ui - underscore: 1.13.4_3pbfs36izefyn2uycmknwkvuuy - devDependencies: - "@babel/core": 7.19.1 - "@types/node": 17.0.45 - "@types/react": 18.0.17 - eslint: 7.32.0 - eslint-config-custom: link:../../packages/eslint-config-custom - next-transpile-modules: 9.0.0 - tsconfig: link:../../packages/tsconfig - typescript: 4.8.3 - dependenciesMeta: - ui: - injected: true - - apps/web: - specifiers: - "@babel/core": ^7.0.0 - "@types/node": ^17.0.12 - "@types/react": 18.0.17 - eslint: 7.32.0 - eslint-config-custom: workspace:* - lodash: ^4.17.21 - next: 12.2.5 - next-transpile-modules: 9.0.0 - react: 18.2.0 - react-dom: 18.2.0 - tsconfig: workspace:* - typescript: ^4.5.3 - ui: workspace:* - dependencies: - lodash: 4.17.21_ehchni3mpmovsvjxesffg2i5a4 - next: 12.2.5_ir3quccc6i62x6qn6jjhyjjiey - react: 18.2.0 - react-dom: 18.2.0_react@18.2.0 - ui: link:../../packages/ui - devDependencies: - "@babel/core": 7.19.1 - "@types/node": 17.0.45 - "@types/react": 18.0.17 - eslint: 7.32.0 - eslint-config-custom: link:../../packages/eslint-config-custom - next-transpile-modules: 9.0.0 - tsconfig: link:../../packages/tsconfig - typescript: 4.8.3 - - packages/eslint-config-custom: - specifiers: - eslint: ^7.23.0 - eslint-config-next: ^12.0.8 - eslint-config-prettier: ^8.3.0 - eslint-config-turbo: latest - eslint-plugin-react: 7.31.7 - typescript: ^4.7.4 - dependencies: - eslint: 7.32.0 - eslint-config-next: 12.3.0_dyxdave6dwjbccc5dgiifcmuza - eslint-config-prettier: 8.5.0_eslint@7.32.0 - eslint-config-turbo: 0.0.3_eslint@7.32.0 - eslint-plugin-react: 7.31.7_eslint@7.32.0 - devDependencies: - typescript: 4.8.3 - - packages/tsconfig: - specifiers: {} - - packages/ui: - specifiers: - "@types/react": ^18.0.17 - "@types/react-dom": ^18.0.6 - eslint: ^7.32.0 - eslint-config-custom: workspace:* - react: ^18.2.0 - tsconfig: workspace:* - typescript: ^4.5.2 - devDependencies: - "@types/react": 18.0.20 - "@types/react-dom": 18.0.6 - eslint: 7.32.0 - eslint-config-custom: link:../eslint-config-custom - react: 18.2.0 - tsconfig: link:../tsconfig - typescript: 4.8.3 - -packages: - /@ampproject/remapping/2.2.0: - resolution: - { - integrity: sha512-qRmjj8nj9qmLTQXXmaR1cck3UXSRMPrbsLJAasZpF+t3riI71BXed5ebIOYwQntykeZuhjsdweEc9BxH5Jc26w==, - } - engines: { node: ">=6.0.0" } - dependencies: - "@jridgewell/gen-mapping": 0.1.1 - "@jridgewell/trace-mapping": 0.3.15 - - /@babel/code-frame/7.12.11: - resolution: - { - integrity: sha512-Zt1yodBx1UcyiePMSkWnU4hPqhwq7hGi2nFL1LeA3EUl+q2LQx16MISgJ0+z7dnmgvP9QtIleuETGOiOH1RcIw==, - } - dependencies: - "@babel/highlight": 7.18.6 - - /@babel/code-frame/7.18.6: - resolution: - { - integrity: sha512-TDCmlK5eOvH+eH7cdAFlNXeVJqWIQ7gW9tY1GJIpUtFb6CmjVyq2VM3u71bOyR8CRihcCgMUYoDNyLXao3+70Q==, - } - engines: { node: ">=6.9.0" } - dependencies: - "@babel/highlight": 7.18.6 - - /@babel/compat-data/7.19.1: - resolution: - { - integrity: sha512-72a9ghR0gnESIa7jBN53U32FOVCEoztyIlKaNoU05zRhEecduGK9L9c3ww7Mp06JiR+0ls0GBPFJQwwtjn9ksg==, - } - engines: { node: ">=6.9.0" } - - /@babel/core/7.19.1: - resolution: - { - integrity: sha512-1H8VgqXme4UXCRv7/Wa1bq7RVymKOzC7znjyFM8KiEzwFqcKUKYNoQef4GhdklgNvoBXyW4gYhuBNCM5o1zImw==, - } - engines: { node: ">=6.9.0" } - dependencies: - "@ampproject/remapping": 2.2.0 - "@babel/code-frame": 7.18.6 - "@babel/generator": 7.19.0 - "@babel/helper-compilation-targets": 7.19.1_@babel+core@7.19.1 - "@babel/helper-module-transforms": 7.19.0 - "@babel/helpers": 7.19.0 - "@babel/parser": 7.19.1 - "@babel/template": 7.18.10 - "@babel/traverse": 7.19.1 - "@babel/types": 7.19.0 - convert-source-map: 1.8.0 - debug: 4.3.4 - gensync: 1.0.0-beta.2 - json5: 2.2.1 - semver: 6.3.0 - transitivePeerDependencies: - - supports-color - - /@babel/generator/7.19.0: - resolution: - { - integrity: sha512-S1ahxf1gZ2dpoiFgA+ohK9DIpz50bJ0CWs7Zlzb54Z4sG8qmdIrGrVqmy1sAtTVRb+9CU6U8VqT9L0Zj7hxHVg==, - } - engines: { node: ">=6.9.0" } - dependencies: - "@babel/types": 7.19.0 - "@jridgewell/gen-mapping": 0.3.2 - jsesc: 2.5.2 - - /@babel/helper-compilation-targets/7.19.1_@babel+core@7.19.1: - resolution: - { - integrity: sha512-LlLkkqhCMyz2lkQPvJNdIYU7O5YjWRgC2R4omjCTpZd8u8KMQzZvX4qce+/BluN1rcQiV7BoGUpmQ0LeHerbhg==, - } - engines: { node: ">=6.9.0" } - peerDependencies: - "@babel/core": ^7.0.0 - dependencies: - "@babel/compat-data": 7.19.1 - "@babel/core": 7.19.1 - "@babel/helper-validator-option": 7.18.6 - browserslist: 4.21.3 - semver: 6.3.0 - - /@babel/helper-environment-visitor/7.18.9: - resolution: - { - integrity: sha512-3r/aACDJ3fhQ/EVgFy0hpj8oHyHpQc+LPtJoY9SzTThAsStm4Ptegq92vqKoE3vD706ZVFWITnMnxucw+S9Ipg==, - } - engines: { node: ">=6.9.0" } - - /@babel/helper-function-name/7.19.0: - resolution: - { - integrity: sha512-WAwHBINyrpqywkUH0nTnNgI5ina5TFn85HKS0pbPDfxFfhyR/aNQEn4hGi1P1JyT//I0t4OgXUlofzWILRvS5w==, - } - engines: { node: ">=6.9.0" } - dependencies: - "@babel/template": 7.18.10 - "@babel/types": 7.19.0 - - /@babel/helper-hoist-variables/7.18.6: - resolution: - { - integrity: sha512-UlJQPkFqFULIcyW5sbzgbkxn2FKRgwWiRexcuaR8RNJRy8+LLveqPjwZV/bwrLZCN0eUHD/x8D0heK1ozuoo6Q==, - } - engines: { node: ">=6.9.0" } - dependencies: - "@babel/types": 7.19.0 - - /@babel/helper-module-imports/7.18.6: - resolution: - { - integrity: sha512-0NFvs3VkuSYbFi1x2Vd6tKrywq+z/cLeYC/RJNFrIX/30Bf5aiGYbtvGXolEktzJH8o5E5KJ3tT+nkxuuZFVlA==, - } - engines: { node: ">=6.9.0" } - dependencies: - "@babel/types": 7.19.0 - - /@babel/helper-module-transforms/7.19.0: - resolution: - { - integrity: sha512-3HBZ377Fe14RbLIA+ac3sY4PTgpxHVkFrESaWhoI5PuyXPBBX8+C34qblV9G89ZtycGJCmCI/Ut+VUDK4bltNQ==, - } - engines: { node: ">=6.9.0" } - dependencies: - "@babel/helper-environment-visitor": 7.18.9 - "@babel/helper-module-imports": 7.18.6 - "@babel/helper-simple-access": 7.18.6 - "@babel/helper-split-export-declaration": 7.18.6 - "@babel/helper-validator-identifier": 7.19.1 - "@babel/template": 7.18.10 - "@babel/traverse": 7.19.1 - "@babel/types": 7.19.0 - transitivePeerDependencies: - - supports-color - - /@babel/helper-simple-access/7.18.6: - resolution: - { - integrity: sha512-iNpIgTgyAvDQpDj76POqg+YEt8fPxx3yaNBg3S30dxNKm2SWfYhD0TGrK/Eu9wHpUW63VQU894TsTg+GLbUa1g==, - } - engines: { node: ">=6.9.0" } - dependencies: - "@babel/types": 7.19.0 - - /@babel/helper-split-export-declaration/7.18.6: - resolution: - { - integrity: sha512-bde1etTx6ZyTmobl9LLMMQsaizFVZrquTEHOqKeQESMKo4PlObf+8+JA25ZsIpZhT/WEd39+vOdLXAFG/nELpA==, - } - engines: { node: ">=6.9.0" } - dependencies: - "@babel/types": 7.19.0 - - /@babel/helper-string-parser/7.18.10: - resolution: - { - integrity: sha512-XtIfWmeNY3i4t7t4D2t02q50HvqHybPqW2ki1kosnvWCwuCMeo81Jf0gwr85jy/neUdg5XDdeFE/80DXiO+njw==, - } - engines: { node: ">=6.9.0" } - - /@babel/helper-validator-identifier/7.19.1: - resolution: - { - integrity: sha512-awrNfaMtnHUr653GgGEs++LlAvW6w+DcPrOliSMXWCKo597CwL5Acf/wWdNkf/tfEQE3mjkeD1YOVZOUV/od1w==, - } - engines: { node: ">=6.9.0" } - - /@babel/helper-validator-option/7.18.6: - resolution: - { - integrity: sha512-XO7gESt5ouv/LRJdrVjkShckw6STTaB7l9BrpBaAHDeF5YZT+01PCwmR0SJHnkW6i8OwW/EVWRShfi4j2x+KQw==, - } - engines: { node: ">=6.9.0" } - - /@babel/helpers/7.19.0: - resolution: - { - integrity: sha512-DRBCKGwIEdqY3+rPJgG/dKfQy9+08rHIAJx8q2p+HSWP87s2HCrQmaAMMyMll2kIXKCW0cO1RdQskx15Xakftg==, - } - engines: { node: ">=6.9.0" } - dependencies: - "@babel/template": 7.18.10 - "@babel/traverse": 7.19.1 - "@babel/types": 7.19.0 - transitivePeerDependencies: - - supports-color - - /@babel/highlight/7.18.6: - resolution: - { - integrity: sha512-u7stbOuYjaPezCuLj29hNW1v64M2Md2qupEKP1fHc7WdOA3DgLh37suiSrZYY7haUB7iBeQZ9P1uiRF359do3g==, - } - engines: { node: ">=6.9.0" } - dependencies: - "@babel/helper-validator-identifier": 7.19.1 - chalk: 2.4.2 - js-tokens: 4.0.0 - - /@babel/parser/7.19.1: - resolution: - { - integrity: sha512-h7RCSorm1DdTVGJf3P2Mhj3kdnkmF/EiysUkzS2TdgAYqyjFdMQJbVuXOBej2SBJaXan/lIVtT6KkGbyyq753A==, - } - engines: { node: ">=6.0.0" } - hasBin: true - dependencies: - "@babel/types": 7.19.0 - - /@babel/runtime-corejs3/7.19.1: - resolution: - { - integrity: sha512-j2vJGnkopRzH+ykJ8h68wrHnEUmtK//E723jjixiAl/PPf6FhqY/vYRcMVlNydRKQjQsTsYEjpx+DZMIvnGk/g==, - } - engines: { node: ">=6.9.0" } - dependencies: - core-js-pure: 3.25.1 - regenerator-runtime: 0.13.9 - dev: false - - /@babel/runtime/7.19.0: - resolution: - { - integrity: sha512-eR8Lo9hnDS7tqkO7NsV+mKvCmv5boaXFSZ70DnfhcgiEne8hv9oCEd36Klw74EtizEqLsy4YnW8UWwpBVolHZA==, - } - engines: { node: ">=6.9.0" } - dependencies: - regenerator-runtime: 0.13.9 - dev: false - - /@babel/template/7.18.10: - resolution: - { - integrity: sha512-TI+rCtooWHr3QJ27kJxfjutghu44DLnasDMwpDqCXVTal9RLp3RSYNh4NdBrRP2cQAoG9A8juOQl6P6oZG4JxA==, - } - engines: { node: ">=6.9.0" } - dependencies: - "@babel/code-frame": 7.18.6 - "@babel/parser": 7.19.1 - "@babel/types": 7.19.0 - - /@babel/traverse/7.19.1: - resolution: - { - integrity: sha512-0j/ZfZMxKukDaag2PtOPDbwuELqIar6lLskVPPJDjXMXjfLb1Obo/1yjxIGqqAJrmfaTIY3z2wFLAQ7qSkLsuA==, - } - engines: { node: ">=6.9.0" } - dependencies: - "@babel/code-frame": 7.18.6 - "@babel/generator": 7.19.0 - "@babel/helper-environment-visitor": 7.18.9 - "@babel/helper-function-name": 7.19.0 - "@babel/helper-hoist-variables": 7.18.6 - "@babel/helper-split-export-declaration": 7.18.6 - "@babel/parser": 7.19.1 - "@babel/types": 7.19.0 - debug: 4.3.4 - globals: 11.12.0 - transitivePeerDependencies: - - supports-color - - /@babel/types/7.19.0: - resolution: - { - integrity: sha512-YuGopBq3ke25BVSiS6fgF49Ul9gH1x70Bcr6bqRLjWCkcX8Hre1/5+z+IiWOIerRMSSEfGZVB9z9kyq7wVs9YA==, - } - engines: { node: ">=6.9.0" } - dependencies: - "@babel/helper-string-parser": 7.18.10 - "@babel/helper-validator-identifier": 7.19.1 - to-fast-properties: 2.0.0 - - /@eslint/eslintrc/0.4.3: - resolution: - { - integrity: sha512-J6KFFz5QCYUJq3pf0mjEcCJVERbzv71PUIDczuh9JkwGEzced6CO5ADLHB1rbf/+oPBtoPfMYNOpGDzCANlbXw==, - } - engines: { node: ^10.12.0 || >=12.0.0 } - dependencies: - ajv: 6.12.6 - debug: 4.3.4 - espree: 7.3.1 - globals: 13.17.0 - ignore: 4.0.6 - import-fresh: 3.3.0 - js-yaml: 3.14.1 - minimatch: 3.1.2 - strip-json-comments: 3.1.1 - transitivePeerDependencies: - - supports-color - - /@humanwhocodes/config-array/0.5.0: - resolution: - { - integrity: sha512-FagtKFz74XrTl7y6HCzQpwDfXP0yhxe9lHLD1UZxjvZIcbyRz8zTFF/yYNfSfzU414eDwZ1SrO0Qvtyf+wFMQg==, - } - engines: { node: ">=10.10.0" } - dependencies: - "@humanwhocodes/object-schema": 1.2.1 - debug: 4.3.4 - minimatch: 3.1.2 - transitivePeerDependencies: - - supports-color - - /@humanwhocodes/object-schema/1.2.1: - resolution: - { - integrity: sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA==, - } - - /@jridgewell/gen-mapping/0.1.1: - resolution: - { - integrity: sha512-sQXCasFk+U8lWYEe66WxRDOE9PjVz4vSM51fTu3Hw+ClTpUSQb718772vH3pyS5pShp6lvQM7SxgIDXXXmOX7w==, - } - engines: { node: ">=6.0.0" } - dependencies: - "@jridgewell/set-array": 1.1.2 - "@jridgewell/sourcemap-codec": 1.4.14 - - /@jridgewell/gen-mapping/0.3.2: - resolution: - { - integrity: sha512-mh65xKQAzI6iBcFzwv28KVWSmCkdRBWoOh+bYQGW3+6OZvbbN3TqMGo5hqYxQniRcH9F2VZIoJCm4pa3BPDK/A==, - } - engines: { node: ">=6.0.0" } - dependencies: - "@jridgewell/set-array": 1.1.2 - "@jridgewell/sourcemap-codec": 1.4.14 - "@jridgewell/trace-mapping": 0.3.15 - - /@jridgewell/resolve-uri/3.1.0: - resolution: - { - integrity: sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w==, - } - engines: { node: ">=6.0.0" } - - /@jridgewell/set-array/1.1.2: - resolution: - { - integrity: sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw==, - } - engines: { node: ">=6.0.0" } - - /@jridgewell/sourcemap-codec/1.4.14: - resolution: - { - integrity: sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw==, - } - - /@jridgewell/trace-mapping/0.3.15: - resolution: - { - integrity: sha512-oWZNOULl+UbhsgB51uuZzglikfIKSUBO/M9W2OfEjn7cmqoAiCgmv9lyACTUacZwBz0ITnJ2NqjU8Tx0DHL88g==, - } - dependencies: - "@jridgewell/resolve-uri": 3.1.0 - "@jridgewell/sourcemap-codec": 1.4.14 - - /@next/env/12.2.5: - resolution: - { - integrity: sha512-vLPLV3cpPGjUPT3PjgRj7e3nio9t6USkuew3JE/jMeon/9Mvp1WyR18v3iwnCuX7eUAm1HmAbJHHLAbcu/EJcw==, - } - dev: false - - /@next/eslint-plugin-next/12.3.0: - resolution: - { - integrity: sha512-jVdq1qYTNDjUtulnE8/hkPv0pHILV4jMg5La99iaY/FFm20WxVnsAZtbNnMvlPbf8dc010oO304SX9yXbg5PAw==, - } - dependencies: - glob: 7.1.7 - dev: false - - /@next/swc-android-arm-eabi/12.2.5: - resolution: - { - integrity: sha512-cPWClKxGhgn2dLWnspW+7psl3MoLQUcNqJqOHk2BhNcou9ARDtC0IjQkKe5qcn9qg7I7U83Gp1yh2aesZfZJMA==, - } - engines: { node: ">= 10" } - cpu: [arm] - os: [android] - requiresBuild: true - dev: false - optional: true - - /@next/swc-android-arm64/12.2.5: - resolution: - { - integrity: sha512-vMj0efliXmC5b7p+wfcQCX0AfU8IypjkzT64GiKJD9PgiA3IILNiGJr1fw2lyUDHkjeWx/5HMlMEpLnTsQslwg==, - } - engines: { node: ">= 10" } - cpu: [arm64] - os: [android] - requiresBuild: true - dev: false - optional: true - - /@next/swc-darwin-arm64/12.2.5: - resolution: - { - integrity: sha512-VOPWbO5EFr6snla/WcxUKtvzGVShfs302TEMOtzYyWni6f9zuOetijJvVh9CCTzInnXAZMtHyNhefijA4HMYLg==, - } - engines: { node: ">= 10" } - cpu: [arm64] - os: [darwin] - requiresBuild: true - dev: false - optional: true - - /@next/swc-darwin-x64/12.2.5: - resolution: - { - integrity: sha512-5o8bTCgAmtYOgauO/Xd27vW52G2/m3i5PX7MUYePquxXAnX73AAtqA3WgPXBRitEB60plSKZgOTkcpqrsh546A==, - } - engines: { node: ">= 10" } - cpu: [x64] - os: [darwin] - requiresBuild: true - dev: false - optional: true - - /@next/swc-freebsd-x64/12.2.5: - resolution: - { - integrity: sha512-yYUbyup1JnznMtEBRkK4LT56N0lfK5qNTzr6/DEyDw5TbFVwnuy2hhLBzwCBkScFVjpFdfiC6SQAX3FrAZzuuw==, - } - engines: { node: ">= 10" } - cpu: [x64] - os: [freebsd] - requiresBuild: true - dev: false - optional: true - - /@next/swc-linux-arm-gnueabihf/12.2.5: - resolution: - { - integrity: sha512-2ZE2/G921Acks7UopJZVMgKLdm4vN4U0yuzvAMJ6KBavPzqESA2yHJlm85TV/K9gIjKhSk5BVtauIUntFRP8cg==, - } - engines: { node: ">= 10" } - cpu: [arm] - os: [linux] - requiresBuild: true - dev: false - optional: true - - /@next/swc-linux-arm64-gnu/12.2.5: - resolution: - { - integrity: sha512-/I6+PWVlz2wkTdWqhlSYYJ1pWWgUVva6SgX353oqTh8njNQp1SdFQuWDqk8LnM6ulheVfSsgkDzxrDaAQZnzjQ==, - } - engines: { node: ">= 10" } - cpu: [arm64] - os: [linux] - requiresBuild: true - dev: false - optional: true - - /@next/swc-linux-arm64-musl/12.2.5: - resolution: - { - integrity: sha512-LPQRelfX6asXyVr59p5sTpx5l+0yh2Vjp/R8Wi4X9pnqcayqT4CUJLiHqCvZuLin3IsFdisJL0rKHMoaZLRfmg==, - } - engines: { node: ">= 10" } - cpu: [arm64] - os: [linux] - requiresBuild: true - dev: false - optional: true - - /@next/swc-linux-x64-gnu/12.2.5: - resolution: - { - integrity: sha512-0szyAo8jMCClkjNK0hknjhmAngUppoRekW6OAezbEYwHXN/VNtsXbfzgYOqjKWxEx3OoAzrT3jLwAF0HdX2MEw==, - } - engines: { node: ">= 10" } - cpu: [x64] - os: [linux] - requiresBuild: true - dev: false - optional: true - - /@next/swc-linux-x64-musl/12.2.5: - resolution: - { - integrity: sha512-zg/Y6oBar1yVnW6Il1I/08/2ukWtOG6s3acdJdEyIdsCzyQi4RLxbbhkD/EGQyhqBvd3QrC6ZXQEXighQUAZ0g==, - } - engines: { node: ">= 10" } - cpu: [x64] - os: [linux] - requiresBuild: true - dev: false - optional: true - - /@next/swc-win32-arm64-msvc/12.2.5: - resolution: - { - integrity: sha512-3/90DRNSqeeSRMMEhj4gHHQlLhhKg5SCCoYfE3kBjGpE63EfnblYUqsszGGZ9ekpKL/R4/SGB40iCQr8tR5Jiw==, - } - engines: { node: ">= 10" } - cpu: [arm64] - os: [win32] - requiresBuild: true - dev: false - optional: true - - /@next/swc-win32-ia32-msvc/12.2.5: - resolution: - { - integrity: sha512-hGLc0ZRAwnaPL4ulwpp4D2RxmkHQLuI8CFOEEHdzZpS63/hMVzv81g8jzYA0UXbb9pus/iTc3VRbVbAM03SRrw==, - } - engines: { node: ">= 10" } - cpu: [ia32] - os: [win32] - requiresBuild: true - dev: false - optional: true - - /@next/swc-win32-x64-msvc/12.2.5: - resolution: - { - integrity: sha512-7h5/ahY7NeaO2xygqVrSG/Y8Vs4cdjxIjowTZ5W6CKoTKn7tmnuxlUc2h74x06FKmbhAd9agOjr/AOKyxYYm9Q==, - } - engines: { node: ">= 10" } - cpu: [x64] - os: [win32] - requiresBuild: true - dev: false - optional: true - - /@nodelib/fs.scandir/2.1.5: - resolution: - { - integrity: sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==, - } - engines: { node: ">= 8" } - dependencies: - "@nodelib/fs.stat": 2.0.5 - run-parallel: 1.2.0 - dev: false - - /@nodelib/fs.stat/2.0.5: - resolution: - { - integrity: sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==, - } - engines: { node: ">= 8" } - dev: false - - /@nodelib/fs.walk/1.2.8: - resolution: - { - integrity: sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==, - } - engines: { node: ">= 8" } - dependencies: - "@nodelib/fs.scandir": 2.1.5 - fastq: 1.13.0 - dev: false - - /@rushstack/eslint-patch/1.1.4: - resolution: - { - integrity: sha512-LwzQKA4vzIct1zNZzBmRKI9QuNpLgTQMEjsQLf3BXuGYb3QPTP4Yjf6mkdX+X1mYttZ808QpOwAzZjv28kq7DA==, - } - dev: false - - /@swc/helpers/0.4.3: - resolution: - { - integrity: sha512-6JrF+fdUK2zbGpJIlN7G3v966PQjyx/dPt1T9km2wj+EUBqgrxCk3uX4Kct16MIm9gGxfKRcfax2hVf5jvlTzA==, - } - dependencies: - tslib: 2.4.0 - dev: false - - /@types/json5/0.0.29: - resolution: - { - integrity: sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ==, - } - dev: false - - /@types/node/17.0.45: - resolution: - { - integrity: sha512-w+tIMs3rq2afQdsPJlODhoUEKzFP1ayaoyl1CcnwtIlsVe7K7bA1NGm4s3PraqTLlXnbIN84zuBlxBWo1u9BLw==, - } - dev: true - - /@types/prop-types/15.7.5: - resolution: - { - integrity: sha512-JCB8C6SnDoQf0cNycqd/35A7MjcnK+ZTqE7judS6o7utxUCg6imJg3QK2qzHKszlTjcj2cn+NwMB2i96ubpj7w==, - } - dev: true - - /@types/react-dom/18.0.6: - resolution: - { - integrity: sha512-/5OFZgfIPSwy+YuIBP/FgJnQnsxhZhjjrnxudMddeblOouIodEQ75X14Rr4wGSG/bknL+Omy9iWlLo1u/9GzAA==, - } - dependencies: - "@types/react": 18.0.20 - dev: true - - /@types/react/18.0.17: - resolution: - { - integrity: sha512-38ETy4tL+rn4uQQi7mB81G7V1g0u2ryquNmsVIOKUAEIDK+3CUjZ6rSRpdvS99dNBnkLFL83qfmtLacGOTIhwQ==, - } - dependencies: - "@types/prop-types": 15.7.5 - "@types/scheduler": 0.16.2 - csstype: 3.1.1 - dev: true - - /@types/react/18.0.20: - resolution: - { - integrity: sha512-MWul1teSPxujEHVwZl4a5HxQ9vVNsjTchVA+xRqv/VYGCuKGAU6UhfrTdF5aBefwD1BHUD8i/zq+O/vyCm/FrA==, - } - dependencies: - "@types/prop-types": 15.7.5 - "@types/scheduler": 0.16.2 - csstype: 3.1.1 - dev: true - - /@types/scheduler/0.16.2: - resolution: - { - integrity: sha512-hppQEBDmlwhFAXKJX2KnWLYu5yMfi91yazPb2l+lbJiwW+wdo1gNeRA+3RgNSO39WYX2euey41KEwnqesU2Jew==, - } - dev: true - - /@typescript-eslint/parser/5.37.0_dyxdave6dwjbccc5dgiifcmuza: - resolution: - { - integrity: sha512-01VzI/ipYKuaG5PkE5+qyJ6m02fVALmMPY3Qq5BHflDx3y4VobbLdHQkSMg9VPRS4KdNt4oYTMaomFoHonBGAw==, - } - engines: { node: ^12.22.0 || ^14.17.0 || >=16.0.0 } - peerDependencies: - eslint: ^6.0.0 || ^7.0.0 || ^8.0.0 - typescript: "*" - peerDependenciesMeta: - typescript: - optional: true - dependencies: - "@typescript-eslint/scope-manager": 5.37.0 - "@typescript-eslint/types": 5.37.0 - "@typescript-eslint/typescript-estree": 5.37.0_typescript@4.8.3 - debug: 4.3.4 - eslint: 7.32.0 - typescript: 4.8.3 - transitivePeerDependencies: - - supports-color - dev: false - - /@typescript-eslint/scope-manager/5.37.0: - resolution: - { - integrity: sha512-F67MqrmSXGd/eZnujjtkPgBQzgespu/iCZ+54Ok9X5tALb9L2v3G+QBSoWkXG0p3lcTJsL+iXz5eLUEdSiJU9Q==, - } - engines: { node: ^12.22.0 || ^14.17.0 || >=16.0.0 } - dependencies: - "@typescript-eslint/types": 5.37.0 - "@typescript-eslint/visitor-keys": 5.37.0 - dev: false - - /@typescript-eslint/types/5.37.0: - resolution: - { - integrity: sha512-3frIJiTa5+tCb2iqR/bf7XwU20lnU05r/sgPJnRpwvfZaqCJBrl8Q/mw9vr3NrNdB/XtVyMA0eppRMMBqdJ1bA==, - } - engines: { node: ^12.22.0 || ^14.17.0 || >=16.0.0 } - dev: false - - /@typescript-eslint/typescript-estree/5.37.0_typescript@4.8.3: - resolution: - { - integrity: sha512-JkFoFIt/cx59iqEDSgIGnQpCTRv96MQnXCYvJi7QhBC24uyuzbD8wVbajMB1b9x4I0octYFJ3OwjAwNqk1AjDA==, - } - engines: { node: ^12.22.0 || ^14.17.0 || >=16.0.0 } - peerDependencies: - typescript: "*" - peerDependenciesMeta: - typescript: - optional: true - dependencies: - "@typescript-eslint/types": 5.37.0 - "@typescript-eslint/visitor-keys": 5.37.0 - debug: 4.3.4 - globby: 11.1.0 - is-glob: 4.0.3 - semver: 7.3.7 - tsutils: 3.21.0_typescript@4.8.3 - typescript: 4.8.3 - transitivePeerDependencies: - - supports-color - dev: false - - /@typescript-eslint/visitor-keys/5.37.0: - resolution: - { - integrity: sha512-Hp7rT4cENBPIzMwrlehLW/28EVCOcE9U1Z1BQTc8EA8v5qpr7GRGuG+U58V5tTY48zvUOA3KHvw3rA8tY9fbdA==, - } - engines: { node: ^12.22.0 || ^14.17.0 || >=16.0.0 } - dependencies: - "@typescript-eslint/types": 5.37.0 - eslint-visitor-keys: 3.3.0 - dev: false - - /acorn-jsx/5.3.2_acorn@7.4.1: - resolution: - { - integrity: sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==, - } - peerDependencies: - acorn: ^6.0.0 || ^7.0.0 || ^8.0.0 - dependencies: - acorn: 7.4.1 - - /acorn/7.4.1: - resolution: - { - integrity: sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==, - } - engines: { node: ">=0.4.0" } - hasBin: true - - /ajv/6.12.6: - resolution: - { - integrity: sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==, - } - dependencies: - fast-deep-equal: 3.1.3 - fast-json-stable-stringify: 2.1.0 - json-schema-traverse: 0.4.1 - uri-js: 4.4.1 - - /ajv/8.11.0: - resolution: - { - integrity: sha512-wGgprdCvMalC0BztXvitD2hC04YffAvtsUn93JbGXYLAtCUO4xd17mCCZQxUOItiBwZvJScWo8NIvQMQ71rdpg==, - } - dependencies: - fast-deep-equal: 3.1.3 - json-schema-traverse: 1.0.0 - require-from-string: 2.0.2 - uri-js: 4.4.1 - - /ansi-colors/4.1.3: - resolution: - { - integrity: sha512-/6w/C21Pm1A7aZitlI5Ni/2J6FFQN8i1Cvz3kHABAAbw93v/NlvKdVOqz7CCWz/3iv/JplRSEEZ83XION15ovw==, - } - engines: { node: ">=6" } - - /ansi-regex/5.0.1: - resolution: - { - integrity: sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==, - } - engines: { node: ">=8" } - - /ansi-styles/3.2.1: - resolution: - { - integrity: sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==, - } - engines: { node: ">=4" } - dependencies: - color-convert: 1.9.3 - - /ansi-styles/4.3.0: - resolution: - { - integrity: sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==, - } - engines: { node: ">=8" } - dependencies: - color-convert: 2.0.1 - - /argparse/1.0.10: - resolution: - { - integrity: sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==, - } - dependencies: - sprintf-js: 1.0.3 - - /aria-query/4.2.2: - resolution: - { - integrity: sha512-o/HelwhuKpTj/frsOsbNLNgnNGVIFsVP/SW2BSF14gVl7kAfMOJ6/8wUAUvG1R1NHKrfG+2sHZTu0yauT1qBrA==, - } - engines: { node: ">=6.0" } - dependencies: - "@babel/runtime": 7.19.0 - "@babel/runtime-corejs3": 7.19.1 - dev: false - - /array-includes/3.1.5: - resolution: - { - integrity: sha512-iSDYZMMyTPkiFasVqfuAQnWAYcvO/SeBSCGKePoEthjp4LEMTe4uLc7b025o4jAZpHhihh8xPo99TNWUWWkGDQ==, - } - engines: { node: ">= 0.4" } - dependencies: - call-bind: 1.0.2 - define-properties: 1.1.4 - es-abstract: 1.20.2 - get-intrinsic: 1.1.3 - is-string: 1.0.7 - dev: false - - /array-union/2.1.0: - resolution: - { - integrity: sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==, - } - engines: { node: ">=8" } - dev: false - - /array.prototype.flat/1.3.0: - resolution: - { - integrity: sha512-12IUEkHsAhA4DY5s0FPgNXIdc8VRSqD9Zp78a5au9abH/SOBrsp082JOWFNTjkMozh8mqcdiKuaLGhPeYztxSw==, - } - engines: { node: ">= 0.4" } - dependencies: - call-bind: 1.0.2 - define-properties: 1.1.4 - es-abstract: 1.20.2 - es-shim-unscopables: 1.0.0 - dev: false - - /array.prototype.flatmap/1.3.0: - resolution: - { - integrity: sha512-PZC9/8TKAIxcWKdyeb77EzULHPrIX/tIZebLJUQOMR1OwYosT8yggdfWScfTBCDj5utONvOuPQQumYsU2ULbkg==, - } - engines: { node: ">= 0.4" } - dependencies: - call-bind: 1.0.2 - define-properties: 1.1.4 - es-abstract: 1.20.2 - es-shim-unscopables: 1.0.0 - dev: false - - /ast-types-flow/0.0.7: - resolution: - { - integrity: sha512-eBvWn1lvIApYMhzQMsu9ciLfkBY499mFZlNqG+/9WR7PVlroQw0vG30cOQQbaKz3sCEc44TAOu2ykzqXSNnwag==, - } - dev: false - - /astral-regex/2.0.0: - resolution: - { - integrity: sha512-Z7tMw1ytTXt5jqMcOP+OQteU1VuNK9Y02uuJtKQ1Sv69jXQKKg5cibLwGJow8yzZP+eAc18EmLGPal0bp36rvQ==, - } - engines: { node: ">=8" } - - /axe-core/4.4.3: - resolution: - { - integrity: sha512-32+ub6kkdhhWick/UjvEwRchgoetXqTK14INLqbGm5U2TzBkBNF3nQtLYm8ovxSkQWArjEQvftCKryjZaATu3w==, - } - engines: { node: ">=4" } - dev: false - - /axobject-query/2.2.0: - resolution: - { - integrity: sha512-Td525n+iPOOyUQIeBfcASuG6uJsDOITl7Mds5gFyerkWiX7qhUTdYUBlSgNMyVqtSJqwpt1kXGLdUt6SykLMRA==, - } - dev: false - - /balanced-match/1.0.2: - resolution: - { - integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==, - } - - /brace-expansion/1.1.11: - resolution: - { - integrity: sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==, - } - dependencies: - balanced-match: 1.0.2 - concat-map: 0.0.1 - - /braces/3.0.2: - resolution: - { - integrity: sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==, - } - engines: { node: ">=8" } - dependencies: - fill-range: 7.0.1 - dev: false - - /browserslist/4.21.3: - resolution: - { - integrity: sha512-898rgRXLAyRkM1GryrrBHGkqA5hlpkV5MhtZwg9QXeiyLUYs2k00Un05aX5l2/yJIOObYKOpS2JNo8nJDE7fWQ==, - } - engines: { node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7 } - hasBin: true - dependencies: - caniuse-lite: 1.0.30001399 - electron-to-chromium: 1.4.249 - node-releases: 2.0.6 - update-browserslist-db: 1.0.9_browserslist@4.21.3 - - /call-bind/1.0.2: - resolution: - { - integrity: sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==, - } - dependencies: - function-bind: 1.1.1 - get-intrinsic: 1.1.3 - dev: false - - /callsites/3.1.0: - resolution: - { - integrity: sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==, - } - engines: { node: ">=6" } - - /caniuse-lite/1.0.30001399: - resolution: - { - integrity: sha512-4vQ90tMKS+FkvuVWS5/QY1+d805ODxZiKFzsU8o/RsVJz49ZSRR8EjykLJbqhzdPgadbX6wB538wOzle3JniRA==, - } - - /chalk/2.4.2: - resolution: - { - integrity: sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==, - } - engines: { node: ">=4" } - dependencies: - ansi-styles: 3.2.1 - escape-string-regexp: 1.0.5 - supports-color: 5.5.0 - - /chalk/4.1.2: - resolution: - { - integrity: sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==, - } - engines: { node: ">=10" } - dependencies: - ansi-styles: 4.3.0 - supports-color: 7.2.0 - - /color-convert/1.9.3: - resolution: - { - integrity: sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==, - } - dependencies: - color-name: 1.1.3 - - /color-convert/2.0.1: - resolution: - { - integrity: sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==, - } - engines: { node: ">=7.0.0" } - dependencies: - color-name: 1.1.4 - - /color-name/1.1.3: - resolution: - { - integrity: sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==, - } - - /color-name/1.1.4: - resolution: - { - integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==, - } - - /concat-map/0.0.1: - resolution: - { - integrity: sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==, - } - - /convert-source-map/1.8.0: - resolution: - { - integrity: sha512-+OQdjP49zViI/6i7nIJpA8rAl4sV/JdPfU9nZs3VqOwGIgizICvuN2ru6fMd+4llL0tar18UYJXfZ/TWtmhUjA==, - } - dependencies: - safe-buffer: 5.1.2 - - /core-js-pure/3.25.1: - resolution: - { - integrity: sha512-7Fr74bliUDdeJCBMxkkIuQ4xfxn/SwrVg+HkJUAoNEXVqYLv55l6Af0dJ5Lq2YBUW9yKqSkLXaS5SYPK6MGa/A==, - } - requiresBuild: true - dev: false - - /cross-spawn/7.0.3: - resolution: - { - integrity: sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==, - } - engines: { node: ">= 8" } - dependencies: - path-key: 3.1.1 - shebang-command: 2.0.0 - which: 2.0.2 - - /csstype/3.1.1: - resolution: - { - integrity: sha512-DJR/VvkAvSZW9bTouZue2sSxDwdTN92uHjqeKVm+0dAqdfNykRzQ95tay8aXMBAAPpUiq4Qcug2L7neoRh2Egw==, - } - dev: true - - /damerau-levenshtein/1.0.8: - resolution: - { - integrity: sha512-sdQSFB7+llfUcQHUQO3+B8ERRj0Oa4w9POWMI/puGtuf7gFywGmkaLCElnudfTiKZV+NvHqL0ifzdrI8Ro7ESA==, - } - dev: false - - /debug/2.6.9: - resolution: - { - integrity: sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==, - } - peerDependencies: - supports-color: "*" - peerDependenciesMeta: - supports-color: - optional: true - dependencies: - ms: 2.0.0 - dev: false - - /debug/3.2.7: - resolution: - { - integrity: sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==, - } - peerDependencies: - supports-color: "*" - peerDependenciesMeta: - supports-color: - optional: true - dependencies: - ms: 2.1.3 - dev: false - - /debug/4.3.4: - resolution: - { - integrity: sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==, - } - engines: { node: ">=6.0" } - peerDependencies: - supports-color: "*" - peerDependenciesMeta: - supports-color: - optional: true - dependencies: - ms: 2.1.2 - - /deep-is/0.1.4: - resolution: - { - integrity: sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==, - } - - /define-properties/1.1.4: - resolution: - { - integrity: sha512-uckOqKcfaVvtBdsVkdPv3XjveQJsNQqmhXgRi8uhvWWuPYZCNlzT8qAyblUgNoXdHdjMTzAqeGjAoli8f+bzPA==, - } - engines: { node: ">= 0.4" } - dependencies: - has-property-descriptors: 1.0.0 - object-keys: 1.1.1 - dev: false - - /dir-glob/3.0.1: - resolution: - { - integrity: sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==, - } - engines: { node: ">=8" } - dependencies: - path-type: 4.0.0 - dev: false - - /doctrine/2.1.0: - resolution: - { - integrity: sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==, - } - engines: { node: ">=0.10.0" } - dependencies: - esutils: 2.0.3 - dev: false - - /doctrine/3.0.0: - resolution: - { - integrity: sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==, - } - engines: { node: ">=6.0.0" } - dependencies: - esutils: 2.0.3 - - /electron-to-chromium/1.4.249: - resolution: - { - integrity: sha512-GMCxR3p2HQvIw47A599crTKYZprqihoBL4lDSAUmr7IYekXFK5t/WgEBrGJDCa2HWIZFQEkGuMqPCi05ceYqPQ==, - } - - /emoji-regex/8.0.0: - resolution: - { - integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==, - } - - /emoji-regex/9.2.2: - resolution: - { - integrity: sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==, - } - dev: false - - /enhanced-resolve/5.10.0: - resolution: - { - integrity: sha512-T0yTFjdpldGY8PmuXXR0PyQ1ufZpEGiHVrp7zHKB7jdR4qlmZHhONVM5AQOAWXuF/w3dnHbEQVrNptJgt7F+cQ==, - } - engines: { node: ">=10.13.0" } - dependencies: - graceful-fs: 4.2.10 - tapable: 2.2.1 - dev: true - - /enquirer/2.3.6: - resolution: - { - integrity: sha512-yjNnPr315/FjS4zIsUxYguYUPP2e1NK4d7E7ZOLiyYCcbFBiTMyID+2wvm2w6+pZ/odMA7cRkjhsPbltwBOrLg==, - } - engines: { node: ">=8.6" } - dependencies: - ansi-colors: 4.1.3 - - /es-abstract/1.20.2: - resolution: - { - integrity: sha512-XxXQuVNrySBNlEkTYJoDNFe5+s2yIOpzq80sUHEdPdQr0S5nTLz4ZPPPswNIpKseDDUS5yghX1gfLIHQZ1iNuQ==, - } - engines: { node: ">= 0.4" } - dependencies: - call-bind: 1.0.2 - es-to-primitive: 1.2.1 - function-bind: 1.1.1 - function.prototype.name: 1.1.5 - get-intrinsic: 1.1.3 - get-symbol-description: 1.0.0 - has: 1.0.3 - has-property-descriptors: 1.0.0 - has-symbols: 1.0.3 - internal-slot: 1.0.3 - is-callable: 1.2.5 - is-negative-zero: 2.0.2 - is-regex: 1.1.4 - is-shared-array-buffer: 1.0.2 - is-string: 1.0.7 - is-weakref: 1.0.2 - object-inspect: 1.12.2 - object-keys: 1.1.1 - object.assign: 4.1.4 - regexp.prototype.flags: 1.4.3 - string.prototype.trimend: 1.0.5 - string.prototype.trimstart: 1.0.5 - unbox-primitive: 1.0.2 - dev: false - - /es-shim-unscopables/1.0.0: - resolution: - { - integrity: sha512-Jm6GPcCdC30eMLbZ2x8z2WuRwAws3zTBBKuusffYVUrNj/GVSUAZ+xKMaUpfNDR5IbyNA5LJbaecoUVbmUcB1w==, - } - dependencies: - has: 1.0.3 - dev: false - - /es-to-primitive/1.2.1: - resolution: - { - integrity: sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==, - } - engines: { node: ">= 0.4" } - dependencies: - is-callable: 1.2.5 - is-date-object: 1.0.5 - is-symbol: 1.0.4 - dev: false - - /escalade/3.1.1: - resolution: - { - integrity: sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==, - } - engines: { node: ">=6" } - - /escape-string-regexp/1.0.5: - resolution: - { - integrity: sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==, - } - engines: { node: ">=0.8.0" } - - /escape-string-regexp/4.0.0: - resolution: - { - integrity: sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==, - } - engines: { node: ">=10" } - - /eslint-config-next/12.3.0_dyxdave6dwjbccc5dgiifcmuza: - resolution: - { - integrity: sha512-guHSkNyKnTBB8HU35COgAMeMV0E026BiYRYvyEVVaTOeFcnU3i1EI8/Da0Rl7H3Sgua5FEvoA0vYd2s8kdIUXg==, - } - peerDependencies: - eslint: ^7.23.0 || ^8.0.0 - typescript: ">=3.3.1" - peerDependenciesMeta: - typescript: - optional: true - dependencies: - "@next/eslint-plugin-next": 12.3.0 - "@rushstack/eslint-patch": 1.1.4 - "@typescript-eslint/parser": 5.37.0_dyxdave6dwjbccc5dgiifcmuza - eslint: 7.32.0 - eslint-import-resolver-node: 0.3.6 - eslint-import-resolver-typescript: 2.7.1_hpmu7kn6tcn2vnxpfzvv33bxmy - eslint-plugin-import: 2.26.0_xag76ci373f5hzfwsxolrbhy4a - eslint-plugin-jsx-a11y: 6.6.1_eslint@7.32.0 - eslint-plugin-react: 7.31.7_eslint@7.32.0 - eslint-plugin-react-hooks: 4.6.0_eslint@7.32.0 - typescript: 4.8.3 - transitivePeerDependencies: - - eslint-import-resolver-webpack - - supports-color - dev: false - - /eslint-config-prettier/8.5.0_eslint@7.32.0: - resolution: - { - integrity: sha512-obmWKLUNCnhtQRKc+tmnYuQl0pFU1ibYJQ5BGhTVB08bHe9wC8qUeG7c08dj9XX+AuPj1YSGSQIHl1pnDHZR0Q==, - } - hasBin: true - peerDependencies: - eslint: ">=7.0.0" - dependencies: - eslint: 7.32.0 - dev: false - - /eslint-config-turbo/0.0.3_eslint@7.32.0: - resolution: - { - integrity: sha512-hK5MlxDugUWZV9ZKcyfNwLXrlMuM2wPgAUk51cUFBC3nXRCVmCA9uSRFBZsyAIurN1wH7mS7G1NBo5F8VkF7lQ==, - } - peerDependencies: - eslint: ^7.23.0 || ^8.0.0 - dependencies: - eslint: 7.32.0 - eslint-plugin-turbo: 0.0.3_eslint@7.32.0 - dev: false - - /eslint-import-resolver-node/0.3.6: - resolution: - { - integrity: sha512-0En0w03NRVMn9Uiyn8YRPDKvWjxCWkslUEhGNTdGx15RvPJYQ+lbOlqrlNI2vEAs4pDYK4f/HN2TbDmk5TP0iw==, - } - dependencies: - debug: 3.2.7 - resolve: 1.22.1 - transitivePeerDependencies: - - supports-color - dev: false - - /eslint-import-resolver-typescript/2.7.1_hpmu7kn6tcn2vnxpfzvv33bxmy: - resolution: - { - integrity: sha512-00UbgGwV8bSgUv34igBDbTOtKhqoRMy9bFjNehT40bXg6585PNIct8HhXZ0SybqB9rWtXj9crcku8ndDn/gIqQ==, - } - engines: { node: ">=4" } - peerDependencies: - eslint: "*" - eslint-plugin-import: "*" - dependencies: - debug: 4.3.4 - eslint: 7.32.0 - eslint-plugin-import: 2.26.0_xag76ci373f5hzfwsxolrbhy4a - glob: 7.2.3 - is-glob: 4.0.3 - resolve: 1.22.1 - tsconfig-paths: 3.14.1 - transitivePeerDependencies: - - supports-color - dev: false - - /eslint-module-utils/2.7.4_qk4u2ghovatg5ueomqmuln4u2e: - resolution: - { - integrity: sha512-j4GT+rqzCoRKHwURX7pddtIPGySnX9Si/cgMI5ztrcqOPtk5dDEeZ34CQVPphnqkJytlc97Vuk05Um2mJ3gEQA==, - } - engines: { node: ">=4" } - peerDependencies: - "@typescript-eslint/parser": "*" - eslint: "*" - eslint-import-resolver-node: "*" - eslint-import-resolver-typescript: "*" - eslint-import-resolver-webpack: "*" - peerDependenciesMeta: - "@typescript-eslint/parser": - optional: true - eslint: - optional: true - eslint-import-resolver-node: - optional: true - eslint-import-resolver-typescript: - optional: true - eslint-import-resolver-webpack: - optional: true - dependencies: - "@typescript-eslint/parser": 5.37.0_dyxdave6dwjbccc5dgiifcmuza - debug: 3.2.7 - eslint: 7.32.0 - eslint-import-resolver-node: 0.3.6 - eslint-import-resolver-typescript: 2.7.1_hpmu7kn6tcn2vnxpfzvv33bxmy - transitivePeerDependencies: - - supports-color - dev: false - - /eslint-plugin-import/2.26.0_xag76ci373f5hzfwsxolrbhy4a: - resolution: - { - integrity: sha512-hYfi3FXaM8WPLf4S1cikh/r4IxnO6zrhZbEGz2b660EJRbuxgpDS5gkCuYgGWg2xxh2rBuIr4Pvhve/7c31koA==, - } - engines: { node: ">=4" } - peerDependencies: - "@typescript-eslint/parser": "*" - eslint: ^2 || ^3 || ^4 || ^5 || ^6 || ^7.2.0 || ^8 - peerDependenciesMeta: - "@typescript-eslint/parser": - optional: true - dependencies: - "@typescript-eslint/parser": 5.37.0_dyxdave6dwjbccc5dgiifcmuza - array-includes: 3.1.5 - array.prototype.flat: 1.3.0 - debug: 2.6.9 - doctrine: 2.1.0 - eslint: 7.32.0 - eslint-import-resolver-node: 0.3.6 - eslint-module-utils: 2.7.4_qk4u2ghovatg5ueomqmuln4u2e - has: 1.0.3 - is-core-module: 2.10.0 - is-glob: 4.0.3 - minimatch: 3.1.2 - object.values: 1.1.5 - resolve: 1.22.1 - tsconfig-paths: 3.14.1 - transitivePeerDependencies: - - eslint-import-resolver-typescript - - eslint-import-resolver-webpack - - supports-color - dev: false - - /eslint-plugin-jsx-a11y/6.6.1_eslint@7.32.0: - resolution: - { - integrity: sha512-sXgFVNHiWffBq23uiS/JaP6eVR622DqwB4yTzKvGZGcPq6/yZ3WmOZfuBks/vHWo9GaFOqC2ZK4i6+C35knx7Q==, - } - engines: { node: ">=4.0" } - peerDependencies: - eslint: ^3 || ^4 || ^5 || ^6 || ^7 || ^8 - dependencies: - "@babel/runtime": 7.19.0 - aria-query: 4.2.2 - array-includes: 3.1.5 - ast-types-flow: 0.0.7 - axe-core: 4.4.3 - axobject-query: 2.2.0 - damerau-levenshtein: 1.0.8 - emoji-regex: 9.2.2 - eslint: 7.32.0 - has: 1.0.3 - jsx-ast-utils: 3.3.3 - language-tags: 1.0.5 - minimatch: 3.1.2 - semver: 6.3.0 - dev: false - - /eslint-plugin-react-hooks/4.6.0_eslint@7.32.0: - resolution: - { - integrity: sha512-oFc7Itz9Qxh2x4gNHStv3BqJq54ExXmfC+a1NjAta66IAN87Wu0R/QArgIS9qKzX3dXKPI9H5crl9QchNMY9+g==, - } - engines: { node: ">=10" } - peerDependencies: - eslint: ^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0 || ^8.0.0-0 - dependencies: - eslint: 7.32.0 - dev: false - - /eslint-plugin-react/7.31.7_eslint@7.32.0: - resolution: - { - integrity: sha512-8NldBTeYp/kQoTV1uT0XF6HcmDqbgZ0lNPkN0wlRw8DJKXEnaWu+oh/6gt3xIhzvQ35wB2Y545fJhIbJSZ2NNw==, - } - engines: { node: ">=4" } - peerDependencies: - eslint: ^3 || ^4 || ^5 || ^6 || ^7 || ^8 - dependencies: - array-includes: 3.1.5 - array.prototype.flatmap: 1.3.0 - doctrine: 2.1.0 - eslint: 7.32.0 - estraverse: 5.3.0 - jsx-ast-utils: 3.3.3 - minimatch: 3.1.2 - object.entries: 1.1.5 - object.fromentries: 2.0.5 - object.hasown: 1.1.1 - object.values: 1.1.5 - prop-types: 15.8.1 - resolve: 2.0.0-next.4 - semver: 6.3.0 - string.prototype.matchall: 4.0.7 - dev: false - - /eslint-plugin-turbo/0.0.3_eslint@7.32.0: - resolution: - { - integrity: sha512-QjidATGxWtaB9QUrD3NocUySmsgWKZlBMFlw4kX2IIjRLAxMPwukk90h3ZTaNXyRHuaQsrEgh7hhlCZoxP0TTw==, - } - peerDependencies: - eslint: ^7.23.0 || ^8.0.0 - dependencies: - eslint: 7.32.0 - dev: false - - /eslint-scope/5.1.1: - resolution: - { - integrity: sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==, - } - engines: { node: ">=8.0.0" } - dependencies: - esrecurse: 4.3.0 - estraverse: 4.3.0 - - /eslint-utils/2.1.0: - resolution: - { - integrity: sha512-w94dQYoauyvlDc43XnGB8lU3Zt713vNChgt4EWwhXAP2XkBvndfxF0AgIqKOOasjPIPzj9JqgwkwbCYD0/V3Zg==, - } - engines: { node: ">=6" } - dependencies: - eslint-visitor-keys: 1.3.0 - - /eslint-visitor-keys/1.3.0: - resolution: - { - integrity: sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ==, - } - engines: { node: ">=4" } - - /eslint-visitor-keys/2.1.0: - resolution: - { - integrity: sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw==, - } - engines: { node: ">=10" } - - /eslint-visitor-keys/3.3.0: - resolution: - { - integrity: sha512-mQ+suqKJVyeuwGYHAdjMFqjCyfl8+Ldnxuyp3ldiMBFKkvytrXUZWaiPCEav8qDHKty44bD+qV1IP4T+w+xXRA==, - } - engines: { node: ^12.22.0 || ^14.17.0 || >=16.0.0 } - dev: false - - /eslint/7.32.0: - resolution: - { - integrity: sha512-VHZ8gX+EDfz+97jGcgyGCyRia/dPOd6Xh9yPv8Bl1+SoaIwD+a/vlrOmGRUyOYu7MwUhc7CxqeaDZU13S4+EpA==, - } - engines: { node: ^10.12.0 || >=12.0.0 } - hasBin: true - dependencies: - "@babel/code-frame": 7.12.11 - "@eslint/eslintrc": 0.4.3 - "@humanwhocodes/config-array": 0.5.0 - ajv: 6.12.6 - chalk: 4.1.2 - cross-spawn: 7.0.3 - debug: 4.3.4 - doctrine: 3.0.0 - enquirer: 2.3.6 - escape-string-regexp: 4.0.0 - eslint-scope: 5.1.1 - eslint-utils: 2.1.0 - eslint-visitor-keys: 2.1.0 - espree: 7.3.1 - esquery: 1.4.0 - esutils: 2.0.3 - fast-deep-equal: 3.1.3 - file-entry-cache: 6.0.1 - functional-red-black-tree: 1.0.1 - glob-parent: 5.1.2 - globals: 13.17.0 - ignore: 4.0.6 - import-fresh: 3.3.0 - imurmurhash: 0.1.4 - is-glob: 4.0.3 - js-yaml: 3.14.1 - json-stable-stringify-without-jsonify: 1.0.1 - levn: 0.4.1 - lodash.merge: 4.6.2 - minimatch: 3.1.2 - natural-compare: 1.4.0 - optionator: 0.9.1 - progress: 2.0.3 - regexpp: 3.2.0 - semver: 7.3.7 - strip-ansi: 6.0.1 - strip-json-comments: 3.1.1 - table: 6.8.0 - text-table: 0.2.0 - v8-compile-cache: 2.3.0 - transitivePeerDependencies: - - supports-color - - /espree/7.3.1: - resolution: - { - integrity: sha512-v3JCNCE64umkFpmkFGqzVKsOT0tN1Zr+ueqLZfpV1Ob8e+CEgPWa+OxCoGH3tnhimMKIaBm4m/vaRpJ/krRz2g==, - } - engines: { node: ^10.12.0 || >=12.0.0 } - dependencies: - acorn: 7.4.1 - acorn-jsx: 5.3.2_acorn@7.4.1 - eslint-visitor-keys: 1.3.0 - - /esprima/4.0.1: - resolution: - { - integrity: sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==, - } - engines: { node: ">=4" } - hasBin: true - - /esquery/1.4.0: - resolution: - { - integrity: sha512-cCDispWt5vHHtwMY2YrAQ4ibFkAL8RbH5YGBnZBc90MolvvfkkQcJro/aZiAQUlQ3qgrYS6D6v8Gc5G5CQsc9w==, - } - engines: { node: ">=0.10" } - dependencies: - estraverse: 5.3.0 - - /esrecurse/4.3.0: - resolution: - { - integrity: sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==, - } - engines: { node: ">=4.0" } - dependencies: - estraverse: 5.3.0 - - /estraverse/4.3.0: - resolution: - { - integrity: sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==, - } - engines: { node: ">=4.0" } - - /estraverse/5.3.0: - resolution: - { - integrity: sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==, - } - engines: { node: ">=4.0" } - - /esutils/2.0.3: - resolution: - { - integrity: sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==, - } - engines: { node: ">=0.10.0" } - - /fast-deep-equal/3.1.3: - resolution: - { - integrity: sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==, - } - - /fast-glob/3.2.12: - resolution: - { - integrity: sha512-DVj4CQIYYow0BlaelwK1pHl5n5cRSJfM60UA0zK891sVInoPri2Ekj7+e1CT3/3qxXenpI+nBBmQAcJPJgaj4w==, - } - engines: { node: ">=8.6.0" } - dependencies: - "@nodelib/fs.stat": 2.0.5 - "@nodelib/fs.walk": 1.2.8 - glob-parent: 5.1.2 - merge2: 1.4.1 - micromatch: 4.0.5 - dev: false - - /fast-json-stable-stringify/2.1.0: - resolution: - { - integrity: sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==, - } - - /fast-levenshtein/2.0.6: - resolution: - { - integrity: sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==, - } - - /fastq/1.13.0: - resolution: - { - integrity: sha512-YpkpUnK8od0o1hmeSc7UUs/eB/vIPWJYjKck2QKIzAf71Vm1AAQ3EbuZB3g2JIy+pg+ERD0vqI79KyZiB2e2Nw==, - } - dependencies: - reusify: 1.0.4 - dev: false - - /file-entry-cache/6.0.1: - resolution: - { - integrity: sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==, - } - engines: { node: ^10.12.0 || >=12.0.0 } - dependencies: - flat-cache: 3.0.4 - - /fill-range/7.0.1: - resolution: - { - integrity: sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==, - } - engines: { node: ">=8" } - dependencies: - to-regex-range: 5.0.1 - dev: false - - /flat-cache/3.0.4: - resolution: - { - integrity: sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg==, - } - engines: { node: ^10.12.0 || >=12.0.0 } - dependencies: - flatted: 3.2.7 - rimraf: 3.0.2 - - /flatted/3.2.7: - resolution: - { - integrity: sha512-5nqDSxl8nn5BSNxyR3n4I6eDmbolI6WT+QqR547RwxQapgjQBmtktdP+HTBb/a/zLsbzERTONyUB5pefh5TtjQ==, - } - - /fs.realpath/1.0.0: - resolution: - { - integrity: sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==, - } - - /function-bind/1.1.1: - resolution: - { - integrity: sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==, - } - dev: false - - /function.prototype.name/1.1.5: - resolution: - { - integrity: sha512-uN7m/BzVKQnCUF/iW8jYea67v++2u7m5UgENbHRtdDVclOUP+FMPlCNdmk0h/ysGyo2tavMJEDqJAkJdRa1vMA==, - } - engines: { node: ">= 0.4" } - dependencies: - call-bind: 1.0.2 - define-properties: 1.1.4 - es-abstract: 1.20.2 - functions-have-names: 1.2.3 - dev: false - - /functional-red-black-tree/1.0.1: - resolution: - { - integrity: sha512-dsKNQNdj6xA3T+QlADDA7mOSlX0qiMINjn0cgr+eGHGsbSHzTabcIogz2+p/iqP1Xs6EP/sS2SbqH+brGTbq0g==, - } - - /functions-have-names/1.2.3: - resolution: - { - integrity: sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ==, - } - dev: false - - /gensync/1.0.0-beta.2: - resolution: - { - integrity: sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==, - } - engines: { node: ">=6.9.0" } - - /get-intrinsic/1.1.3: - resolution: - { - integrity: sha512-QJVz1Tj7MS099PevUG5jvnt9tSkXN8K14dxQlikJuPt4uD9hHAHjLyLBiLR5zELelBdD9QNRAXZzsJx0WaDL9A==, - } - dependencies: - function-bind: 1.1.1 - has: 1.0.3 - has-symbols: 1.0.3 - dev: false - - /get-symbol-description/1.0.0: - resolution: - { - integrity: sha512-2EmdH1YvIQiZpltCNgkuiUnyukzxM/R6NDJX31Ke3BG1Nq5b0S2PhX59UKi9vZpPDQVdqn+1IcaAwnzTT5vCjw==, - } - engines: { node: ">= 0.4" } - dependencies: - call-bind: 1.0.2 - get-intrinsic: 1.1.3 - dev: false - - /glob-parent/5.1.2: - resolution: - { - integrity: sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==, - } - engines: { node: ">= 6" } - dependencies: - is-glob: 4.0.3 - - /glob/7.1.7: - resolution: - { - integrity: sha512-OvD9ENzPLbegENnYP5UUfJIirTg4+XwMWGaQfQTY0JenxNvvIKP3U3/tAQSPIu/lHxXYSZmpXlUHeqAIdKzBLQ==, - } - dependencies: - fs.realpath: 1.0.0 - inflight: 1.0.6 - inherits: 2.0.4 - minimatch: 3.1.2 - once: 1.4.0 - path-is-absolute: 1.0.1 - dev: false - - /glob/7.2.3: - resolution: - { - integrity: sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==, - } - dependencies: - fs.realpath: 1.0.0 - inflight: 1.0.6 - inherits: 2.0.4 - minimatch: 3.1.2 - once: 1.4.0 - path-is-absolute: 1.0.1 - - /globals/11.12.0: - resolution: - { - integrity: sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==, - } - engines: { node: ">=4" } - - /globals/13.17.0: - resolution: - { - integrity: sha512-1C+6nQRb1GwGMKm2dH/E7enFAMxGTmGI7/dEdhy/DNelv85w9B72t3uc5frtMNXIbzrarJJ/lTCjcaZwbLJmyw==, - } - engines: { node: ">=8" } - dependencies: - type-fest: 0.20.2 - - /globby/11.1.0: - resolution: - { - integrity: sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==, - } - engines: { node: ">=10" } - dependencies: - array-union: 2.1.0 - dir-glob: 3.0.1 - fast-glob: 3.2.12 - ignore: 5.2.0 - merge2: 1.4.1 - slash: 3.0.0 - dev: false - - /graceful-fs/4.2.10: - resolution: - { - integrity: sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==, - } - dev: true - - /has-bigints/1.0.2: - resolution: - { - integrity: sha512-tSvCKtBr9lkF0Ex0aQiP9N+OpV4zi2r/Nee5VkRDbaqv35RLYMzbwQfFSZZH0kR+Rd6302UJZ2p/bJCEoR3VoQ==, - } - dev: false - - /has-flag/3.0.0: - resolution: - { - integrity: sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==, - } - engines: { node: ">=4" } - - /has-flag/4.0.0: - resolution: - { - integrity: sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==, - } - engines: { node: ">=8" } - - /has-property-descriptors/1.0.0: - resolution: - { - integrity: sha512-62DVLZGoiEBDHQyqG4w9xCuZ7eJEwNmJRWw2VY84Oedb7WFcA27fiEVe8oUQx9hAUJ4ekurquucTGwsyO1XGdQ==, - } - dependencies: - get-intrinsic: 1.1.3 - dev: false - - /has-symbols/1.0.3: - resolution: - { - integrity: sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==, - } - engines: { node: ">= 0.4" } - dev: false - - /has-tostringtag/1.0.0: - resolution: - { - integrity: sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ==, - } - engines: { node: ">= 0.4" } - dependencies: - has-symbols: 1.0.3 - dev: false - - /has/1.0.3: - resolution: - { - integrity: sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==, - } - engines: { node: ">= 0.4.0" } - dependencies: - function-bind: 1.1.1 - dev: false - - /ignore/4.0.6: - resolution: - { - integrity: sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg==, - } - engines: { node: ">= 4" } - - /ignore/5.2.0: - resolution: - { - integrity: sha512-CmxgYGiEPCLhfLnpPp1MoRmifwEIOgjcHXxOBjv7mY96c+eWScsOP9c112ZyLdWHi0FxHjI+4uVhKYp/gcdRmQ==, - } - engines: { node: ">= 4" } - dev: false - - /import-fresh/3.3.0: - resolution: - { - integrity: sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==, - } - engines: { node: ">=6" } - dependencies: - parent-module: 1.0.1 - resolve-from: 4.0.0 - - /imurmurhash/0.1.4: - resolution: - { - integrity: sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==, - } - engines: { node: ">=0.8.19" } - - /inflight/1.0.6: - resolution: - { - integrity: sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==, - } - dependencies: - once: 1.4.0 - wrappy: 1.0.2 - - /inherits/2.0.4: - resolution: - { - integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==, - } - - /internal-slot/1.0.3: - resolution: - { - integrity: sha512-O0DB1JC/sPyZl7cIo78n5dR7eUSwwpYPiXRhTzNxZVAMUuB8vlnRFyLxdrVToks6XPLVnFfbzaVd5WLjhgg+vA==, - } - engines: { node: ">= 0.4" } - dependencies: - get-intrinsic: 1.1.3 - has: 1.0.3 - side-channel: 1.0.4 - dev: false - - /is-bigint/1.0.4: - resolution: - { - integrity: sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg==, - } - dependencies: - has-bigints: 1.0.2 - dev: false - - /is-boolean-object/1.1.2: - resolution: - { - integrity: sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA==, - } - engines: { node: ">= 0.4" } - dependencies: - call-bind: 1.0.2 - has-tostringtag: 1.0.0 - dev: false - - /is-callable/1.2.5: - resolution: - { - integrity: sha512-ZIWRujF6MvYGkEuHMYtFRkL2wAtFw89EHfKlXrkPkjQZZRWeh9L1q3SV13NIfHnqxugjLvAOkEHx9mb1zcMnEw==, - } - engines: { node: ">= 0.4" } - dev: false - - /is-core-module/2.10.0: - resolution: - { - integrity: sha512-Erxj2n/LDAZ7H8WNJXd9tw38GYM3dv8rk8Zcs+jJuxYTW7sozH+SS8NtrSjVL1/vpLvWi1hxy96IzjJ3EHTJJg==, - } - dependencies: - has: 1.0.3 - dev: false - - /is-date-object/1.0.5: - resolution: - { - integrity: sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ==, - } - engines: { node: ">= 0.4" } - dependencies: - has-tostringtag: 1.0.0 - dev: false - - /is-extglob/2.1.1: - resolution: - { - integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==, - } - engines: { node: ">=0.10.0" } - - /is-fullwidth-code-point/3.0.0: - resolution: - { - integrity: sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==, - } - engines: { node: ">=8" } - - /is-glob/4.0.3: - resolution: - { - integrity: sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==, - } - engines: { node: ">=0.10.0" } - dependencies: - is-extglob: 2.1.1 - - /is-negative-zero/2.0.2: - resolution: - { - integrity: sha512-dqJvarLawXsFbNDeJW7zAz8ItJ9cd28YufuuFzh0G8pNHjJMnY08Dv7sYX2uF5UpQOwieAeOExEYAWWfu7ZZUA==, - } - engines: { node: ">= 0.4" } - dev: false - - /is-number-object/1.0.7: - resolution: - { - integrity: sha512-k1U0IRzLMo7ZlYIfzRu23Oh6MiIFasgpb9X76eqfFZAqwH44UI4KTBvBYIZ1dSL9ZzChTB9ShHfLkR4pdW5krQ==, - } - engines: { node: ">= 0.4" } - dependencies: - has-tostringtag: 1.0.0 - dev: false - - /is-number/7.0.0: - resolution: - { - integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==, - } - engines: { node: ">=0.12.0" } - dev: false - - /is-regex/1.1.4: - resolution: - { - integrity: sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg==, - } - engines: { node: ">= 0.4" } - dependencies: - call-bind: 1.0.2 - has-tostringtag: 1.0.0 - dev: false - - /is-shared-array-buffer/1.0.2: - resolution: - { - integrity: sha512-sqN2UDu1/0y6uvXyStCOzyhAjCSlHceFoMKJW8W9EU9cvic/QdsZ0kEU93HEy3IUEFZIiH/3w+AH/UQbPHNdhA==, - } - dependencies: - call-bind: 1.0.2 - dev: false - - /is-string/1.0.7: - resolution: - { - integrity: sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg==, - } - engines: { node: ">= 0.4" } - dependencies: - has-tostringtag: 1.0.0 - dev: false - - /is-symbol/1.0.4: - resolution: - { - integrity: sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg==, - } - engines: { node: ">= 0.4" } - dependencies: - has-symbols: 1.0.3 - dev: false - - /is-weakref/1.0.2: - resolution: - { - integrity: sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ==, - } - dependencies: - call-bind: 1.0.2 - dev: false - - /isexe/2.0.0: - resolution: - { - integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==, - } - - /js-tokens/4.0.0: - resolution: - { - integrity: sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==, - } - - /js-yaml/3.14.1: - resolution: - { - integrity: sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==, - } - hasBin: true - dependencies: - argparse: 1.0.10 - esprima: 4.0.1 - - /jsesc/2.5.2: - resolution: - { - integrity: sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==, - } - engines: { node: ">=4" } - hasBin: true - - /json-schema-traverse/0.4.1: - resolution: - { - integrity: sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==, - } - - /json-schema-traverse/1.0.0: - resolution: - { - integrity: sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==, - } - - /json-stable-stringify-without-jsonify/1.0.1: - resolution: - { - integrity: sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==, - } - - /json5/1.0.1: - resolution: - { - integrity: sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow==, - } - hasBin: true - dependencies: - minimist: 1.2.6 - dev: false - - /json5/2.2.1: - resolution: - { - integrity: sha512-1hqLFMSrGHRHxav9q9gNjJ5EXznIxGVO09xQRrwplcS8qs28pZ8s8hupZAmqDwZUmVZ2Qb2jnyPOWcDH8m8dlA==, - } - engines: { node: ">=6" } - hasBin: true - - /jsx-ast-utils/3.3.3: - resolution: - { - integrity: sha512-fYQHZTZ8jSfmWZ0iyzfwiU4WDX4HpHbMCZ3gPlWYiCl3BoeOTsqKBqnTVfH2rYT7eP5c3sVbeSPHnnJOaTrWiw==, - } - engines: { node: ">=4.0" } - dependencies: - array-includes: 3.1.5 - object.assign: 4.1.4 - dev: false - - /language-subtag-registry/0.3.22: - resolution: - { - integrity: sha512-tN0MCzyWnoz/4nHS6uxdlFWoUZT7ABptwKPQ52Ea7URk6vll88bWBVhodtnlfEuCcKWNGoc+uGbw1cwa9IKh/w==, - } - dev: false - - /language-tags/1.0.5: - resolution: - { - integrity: sha512-qJhlO9cGXi6hBGKoxEG/sKZDAHD5Hnu9Hs4WbOY3pCWXDhw0N8x1NenNzm2EnNLkLkk7J2SdxAkDSbb6ftT+UQ==, - } - dependencies: - language-subtag-registry: 0.3.22 - dev: false - - /levn/0.4.1: - resolution: - { - integrity: sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==, - } - engines: { node: ">= 0.8.0" } - dependencies: - prelude-ls: 1.2.1 - type-check: 0.4.0 - - /lodash.merge/4.6.2: - resolution: - { - integrity: sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==, - } - - /lodash.truncate/4.4.2: - resolution: - { - integrity: sha512-jttmRe7bRse52OsWIMDLaXxWqRAmtIUccAQ3garviCqJjafXOfNMO0yMfNpdD6zbGaTU0P5Nz7e7gAT6cKmJRw==, - } - - /lodash/4.17.21_ehchni3mpmovsvjxesffg2i5a4: - resolution: - { - integrity: sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==, - } - dev: false - patched: true - - /loose-envify/1.4.0: - resolution: - { - integrity: sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==, - } - hasBin: true - dependencies: - js-tokens: 4.0.0 - - /lru-cache/6.0.0: - resolution: - { - integrity: sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==, - } - engines: { node: ">=10" } - dependencies: - yallist: 4.0.0 - - /merge2/1.4.1: - resolution: - { - integrity: sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==, - } - engines: { node: ">= 8" } - dev: false - - /micromatch/4.0.5: - resolution: - { - integrity: sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==, - } - engines: { node: ">=8.6" } - dependencies: - braces: 3.0.2 - picomatch: 2.3.1 - dev: false - - /minimatch/3.1.2: - resolution: - { - integrity: sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==, - } - dependencies: - brace-expansion: 1.1.11 - - /minimist/1.2.6: - resolution: - { - integrity: sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==, - } - dev: false - - /ms/2.0.0: - resolution: - { - integrity: sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==, - } - dev: false - - /ms/2.1.2: - resolution: - { - integrity: sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==, - } - - /ms/2.1.3: - resolution: - { - integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==, - } - dev: false - - /nanoid/3.3.4: - resolution: - { - integrity: sha512-MqBkQh/OHTS2egovRtLk45wEyNXwF+cokD+1YPf9u5VfJiRdAiRwB2froX5Co9Rh20xs4siNPm8naNotSD6RBw==, - } - engines: { node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1 } - hasBin: true - dev: false - - /natural-compare/1.4.0: - resolution: - { - integrity: sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==, - } - - /next-transpile-modules/9.0.0: - resolution: - { - integrity: sha512-VCNFOazIAnXn1hvgYYSTYMnoWgKgwlYh4lm1pKbSfiB3kj5ZYLcKVhfh3jkPOg1cnd9DP+pte9yCUocdPEUBTQ==, - } - dependencies: - enhanced-resolve: 5.10.0 - escalade: 3.1.1 - dev: true - - /next/12.2.5_ir3quccc6i62x6qn6jjhyjjiey: - resolution: - { - integrity: sha512-tBdjqX5XC/oFs/6gxrZhjmiq90YWizUYU6qOWAfat7zJwrwapJ+BYgX2PmiacunXMaRpeVT4vz5MSPSLgNkrpA==, - } - engines: { node: ">=12.22.0" } - hasBin: true - peerDependencies: - fibers: ">= 3.1.0" - node-sass: ^6.0.0 || ^7.0.0 - react: ^17.0.2 || ^18.0.0-0 - react-dom: ^17.0.2 || ^18.0.0-0 - sass: ^1.3.0 - peerDependenciesMeta: - fibers: - optional: true - node-sass: - optional: true - sass: - optional: true - dependencies: - "@next/env": 12.2.5 - "@swc/helpers": 0.4.3 - caniuse-lite: 1.0.30001399 - postcss: 8.4.14 - react: 18.2.0 - react-dom: 18.2.0_react@18.2.0 - styled-jsx: 5.0.4_3toe27fv7etiytxb5kxc7fxaw4 - use-sync-external-store: 1.2.0_react@18.2.0 - optionalDependencies: - "@next/swc-android-arm-eabi": 12.2.5 - "@next/swc-android-arm64": 12.2.5 - "@next/swc-darwin-arm64": 12.2.5 - "@next/swc-darwin-x64": 12.2.5 - "@next/swc-freebsd-x64": 12.2.5 - "@next/swc-linux-arm-gnueabihf": 12.2.5 - "@next/swc-linux-arm64-gnu": 12.2.5 - "@next/swc-linux-arm64-musl": 12.2.5 - "@next/swc-linux-x64-gnu": 12.2.5 - "@next/swc-linux-x64-musl": 12.2.5 - "@next/swc-win32-arm64-msvc": 12.2.5 - "@next/swc-win32-ia32-msvc": 12.2.5 - "@next/swc-win32-x64-msvc": 12.2.5 - transitivePeerDependencies: - - "@babel/core" - - babel-plugin-macros - dev: false - - /node-releases/2.0.6: - resolution: - { - integrity: sha512-PiVXnNuFm5+iYkLBNeq5211hvO38y63T0i2KKh2KnUs3RpzJ+JtODFjkD8yjLwnDkTYF1eKXheUwdssR+NRZdg==, - } - - /object-assign/4.1.1: - resolution: - { - integrity: sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==, - } - engines: { node: ">=0.10.0" } - dev: false - - /object-inspect/1.12.2: - resolution: - { - integrity: sha512-z+cPxW0QGUp0mcqcsgQyLVRDoXFQbXOwBaqyF7VIgI4TWNQsDHrBpUQslRmIfAoYWdYzs6UlKJtB2XJpTaNSpQ==, - } - dev: false - - /object-keys/1.1.1: - resolution: - { - integrity: sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==, - } - engines: { node: ">= 0.4" } - dev: false - - /object.assign/4.1.4: - resolution: - { - integrity: sha512-1mxKf0e58bvyjSCtKYY4sRe9itRk3PJpquJOjeIkz885CczcI4IvJJDLPS72oowuSh+pBxUFROpX+TU++hxhZQ==, - } - engines: { node: ">= 0.4" } - dependencies: - call-bind: 1.0.2 - define-properties: 1.1.4 - has-symbols: 1.0.3 - object-keys: 1.1.1 - dev: false - - /object.entries/1.1.5: - resolution: - { - integrity: sha512-TyxmjUoZggd4OrrU1W66FMDG6CuqJxsFvymeyXI51+vQLN67zYfZseptRge703kKQdo4uccgAKebXFcRCzk4+g==, - } - engines: { node: ">= 0.4" } - dependencies: - call-bind: 1.0.2 - define-properties: 1.1.4 - es-abstract: 1.20.2 - dev: false - - /object.fromentries/2.0.5: - resolution: - { - integrity: sha512-CAyG5mWQRRiBU57Re4FKoTBjXfDoNwdFVH2Y1tS9PqCsfUTymAohOkEMSG3aRNKmv4lV3O7p1et7c187q6bynw==, - } - engines: { node: ">= 0.4" } - dependencies: - call-bind: 1.0.2 - define-properties: 1.1.4 - es-abstract: 1.20.2 - dev: false - - /object.hasown/1.1.1: - resolution: - { - integrity: sha512-LYLe4tivNQzq4JdaWW6WO3HMZZJWzkkH8fnI6EebWl0VZth2wL2Lovm74ep2/gZzlaTdV62JZHEqHQ2yVn8Q/A==, - } - dependencies: - define-properties: 1.1.4 - es-abstract: 1.20.2 - dev: false - - /object.values/1.1.5: - resolution: - { - integrity: sha512-QUZRW0ilQ3PnPpbNtgdNV1PDbEqLIiSFB3l+EnGtBQ/8SUTLj1PZwtQHABZtLgwpJZTSZhuGLOGk57Drx2IvYg==, - } - engines: { node: ">= 0.4" } - dependencies: - call-bind: 1.0.2 - define-properties: 1.1.4 - es-abstract: 1.20.2 - dev: false - - /once/1.4.0: - resolution: - { - integrity: sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==, - } - dependencies: - wrappy: 1.0.2 - - /optionator/0.9.1: - resolution: - { - integrity: sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw==, - } - engines: { node: ">= 0.8.0" } - dependencies: - deep-is: 0.1.4 - fast-levenshtein: 2.0.6 - levn: 0.4.1 - prelude-ls: 1.2.1 - type-check: 0.4.0 - word-wrap: 1.2.3 - - /parent-module/1.0.1: - resolution: - { - integrity: sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==, - } - engines: { node: ">=6" } - dependencies: - callsites: 3.1.0 - - /path-is-absolute/1.0.1: - resolution: - { - integrity: sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==, - } - engines: { node: ">=0.10.0" } - - /path-key/3.1.1: - resolution: - { - integrity: sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==, - } - engines: { node: ">=8" } - - /path-parse/1.0.7: - resolution: - { - integrity: sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==, - } - dev: false - - /path-type/4.0.0: - resolution: - { - integrity: sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==, - } - engines: { node: ">=8" } - dev: false - - /picocolors/1.0.0: - resolution: - { - integrity: sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==, - } - - /picomatch/2.3.1: - resolution: - { - integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==, - } - engines: { node: ">=8.6" } - dev: false - - /postcss/8.4.14: - resolution: - { - integrity: sha512-E398TUmfAYFPBSdzgeieK2Y1+1cpdxJx8yXbK/m57nRhKSmk1GB2tO4lbLBtlkfPQTDKfe4Xqv1ASWPpayPEig==, - } - engines: { node: ^10 || ^12 || >=14 } - dependencies: - nanoid: 3.3.4 - picocolors: 1.0.0 - source-map-js: 1.0.2 - dev: false - - /prelude-ls/1.2.1: - resolution: - { - integrity: sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==, - } - engines: { node: ">= 0.8.0" } - - /prettier/2.7.1: - resolution: - { - integrity: sha512-ujppO+MkdPqoVINuDFDRLClm7D78qbDt0/NR+wp5FqEZOoTNAjPHWj17QRhu7geIHJfcNhRk1XVQmF8Bp3ye+g==, - } - engines: { node: ">=10.13.0" } - hasBin: true - dev: true - - /progress/2.0.3: - resolution: - { - integrity: sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA==, - } - engines: { node: ">=0.4.0" } - - /prop-types/15.8.1: - resolution: - { - integrity: sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg==, - } - dependencies: - loose-envify: 1.4.0 - object-assign: 4.1.1 - react-is: 16.13.1 - dev: false - - /punycode/2.1.1: - resolution: - { - integrity: sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==, - } - engines: { node: ">=6" } - - /queue-microtask/1.2.3: - resolution: - { - integrity: sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==, - } - dev: false - - /react-dom/18.2.0_react@18.2.0: - resolution: - { - integrity: sha512-6IMTriUmvsjHUjNtEDudZfuDQUoWXVxKHhlEGSk81n4YFS+r/Kl99wXiwlVXtPBtJenozv2P+hxDsw9eA7Xo6g==, - } - peerDependencies: - react: ^18.2.0 - dependencies: - loose-envify: 1.4.0 - react: 18.2.0 - scheduler: 0.23.0 - dev: false - - /react-is/16.13.1: - resolution: - { - integrity: sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==, - } - dev: false - - /react/18.2.0: - resolution: - { - integrity: sha512-/3IjMdb2L9QbBdWiW5e3P2/npwMBaU9mHCSCUzNln0ZCYbcfTsGbTJrU/kGemdH2IWmB2ioZ+zkxtmq6g09fGQ==, - } - engines: { node: ">=0.10.0" } - dependencies: - loose-envify: 1.4.0 - - /regenerator-runtime/0.13.9: - resolution: - { - integrity: sha512-p3VT+cOEgxFsRRA9X4lkI1E+k2/CtnKtU4gcxyaCUreilL/vqI6CdZ3wxVUx3UOUg+gnUOQQcRI7BmSI656MYA==, - } - dev: false - - /regexp.prototype.flags/1.4.3: - resolution: - { - integrity: sha512-fjggEOO3slI6Wvgjwflkc4NFRCTZAu5CnNfBd5qOMYhWdn67nJBBu34/TkD++eeFmd8C9r9jfXJ27+nSiRkSUA==, - } - engines: { node: ">= 0.4" } - dependencies: - call-bind: 1.0.2 - define-properties: 1.1.4 - functions-have-names: 1.2.3 - dev: false - - /regexpp/3.2.0: - resolution: - { - integrity: sha512-pq2bWo9mVD43nbts2wGv17XLiNLya+GklZ8kaDLV2Z08gDCsGpnKn9BFMepvWuHCbyVvY7J5o5+BVvoQbmlJLg==, - } - engines: { node: ">=8" } - - /require-from-string/2.0.2: - resolution: - { - integrity: sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==, - } - engines: { node: ">=0.10.0" } - - /resolve-from/4.0.0: - resolution: - { - integrity: sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==, - } - engines: { node: ">=4" } - - /resolve/1.22.1: - resolution: - { - integrity: sha512-nBpuuYuY5jFsli/JIs1oldw6fOQCBioohqWZg/2hiaOybXOft4lonv85uDOKXdf8rhyK159cxU5cDcK/NKk8zw==, - } - hasBin: true - dependencies: - is-core-module: 2.10.0 - path-parse: 1.0.7 - supports-preserve-symlinks-flag: 1.0.0 - dev: false - - /resolve/2.0.0-next.4: - resolution: - { - integrity: sha512-iMDbmAWtfU+MHpxt/I5iWI7cY6YVEZUQ3MBgPQ++XD1PELuJHIl82xBmObyP2KyQmkNB2dsqF7seoQQiAn5yDQ==, - } - hasBin: true - dependencies: - is-core-module: 2.10.0 - path-parse: 1.0.7 - supports-preserve-symlinks-flag: 1.0.0 - dev: false - - /reusify/1.0.4: - resolution: - { - integrity: sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==, - } - engines: { iojs: ">=1.0.0", node: ">=0.10.0" } - dev: false - - /rimraf/3.0.2: - resolution: - { - integrity: sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==, - } - hasBin: true - dependencies: - glob: 7.2.3 - - /run-parallel/1.2.0: - resolution: - { - integrity: sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==, - } - dependencies: - queue-microtask: 1.2.3 - dev: false - - /safe-buffer/5.1.2: - resolution: - { - integrity: sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==, - } - - /scheduler/0.23.0: - resolution: - { - integrity: sha512-CtuThmgHNg7zIZWAXi3AsyIzA3n4xx7aNyjwC2VJldO2LMVDhFK+63xGqq6CsJH4rTAt6/M+N4GhZiDYPx9eUw==, - } - dependencies: - loose-envify: 1.4.0 - dev: false - - /semver/6.3.0: - resolution: - { - integrity: sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==, - } - hasBin: true - - /semver/7.3.7: - resolution: - { - integrity: sha512-QlYTucUYOews+WeEujDoEGziz4K6c47V/Bd+LjSSYcA94p+DmINdf7ncaUinThfvZyu13lN9OY1XDxt8C0Tw0g==, - } - engines: { node: ">=10" } - hasBin: true - dependencies: - lru-cache: 6.0.0 - - /shebang-command/2.0.0: - resolution: - { - integrity: sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==, - } - engines: { node: ">=8" } - dependencies: - shebang-regex: 3.0.0 - - /shebang-regex/3.0.0: - resolution: - { - integrity: sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==, - } - engines: { node: ">=8" } - - /side-channel/1.0.4: - resolution: - { - integrity: sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==, - } - dependencies: - call-bind: 1.0.2 - get-intrinsic: 1.1.3 - object-inspect: 1.12.2 - dev: false - - /slash/3.0.0: - resolution: - { - integrity: sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==, - } - engines: { node: ">=8" } - dev: false - - /slice-ansi/4.0.0: - resolution: - { - integrity: sha512-qMCMfhY040cVHT43K9BFygqYbUPFZKHOg7K73mtTWJRb8pyP3fzf4Ixd5SzdEJQ6MRUg/WBnOLxghZtKKurENQ==, - } - engines: { node: ">=10" } - dependencies: - ansi-styles: 4.3.0 - astral-regex: 2.0.0 - is-fullwidth-code-point: 3.0.0 - - /source-map-js/1.0.2: - resolution: - { - integrity: sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw==, - } - engines: { node: ">=0.10.0" } - dev: false - - /sprintf-js/1.0.3: - resolution: - { - integrity: sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==, - } - - /string-width/4.2.3: - resolution: - { - integrity: sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==, - } - engines: { node: ">=8" } - dependencies: - emoji-regex: 8.0.0 - is-fullwidth-code-point: 3.0.0 - strip-ansi: 6.0.1 - - /string.prototype.matchall/4.0.7: - resolution: - { - integrity: sha512-f48okCX7JiwVi1NXCVWcFnZgADDC/n2vePlQ/KUCNqCikLLilQvwjMO8+BHVKvgzH0JB0J9LEPgxOGT02RoETg==, - } - dependencies: - call-bind: 1.0.2 - define-properties: 1.1.4 - es-abstract: 1.20.2 - get-intrinsic: 1.1.3 - has-symbols: 1.0.3 - internal-slot: 1.0.3 - regexp.prototype.flags: 1.4.3 - side-channel: 1.0.4 - dev: false - - /string.prototype.trimend/1.0.5: - resolution: - { - integrity: sha512-I7RGvmjV4pJ7O3kdf+LXFpVfdNOxtCW/2C8f6jNiW4+PQchwxkCDzlk1/7p+Wl4bqFIZeF47qAHXLuHHWKAxog==, - } - dependencies: - call-bind: 1.0.2 - define-properties: 1.1.4 - es-abstract: 1.20.2 - dev: false - - /string.prototype.trimstart/1.0.5: - resolution: - { - integrity: sha512-THx16TJCGlsN0o6dl2o6ncWUsdgnLRSA23rRE5pyGBw/mLr3Ej/R2LaqCtgP8VNMGZsvMWnf9ooZPyY2bHvUFg==, - } - dependencies: - call-bind: 1.0.2 - define-properties: 1.1.4 - es-abstract: 1.20.2 - dev: false - - /strip-ansi/6.0.1: - resolution: - { - integrity: sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==, - } - engines: { node: ">=8" } - dependencies: - ansi-regex: 5.0.1 - - /strip-bom/3.0.0: - resolution: - { - integrity: sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA==, - } - engines: { node: ">=4" } - dev: false - - /strip-json-comments/3.1.1: - resolution: - { - integrity: sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==, - } - engines: { node: ">=8" } - - /styled-jsx/5.0.4_3toe27fv7etiytxb5kxc7fxaw4: - resolution: - { - integrity: sha512-sDFWLbg4zR+UkNzfk5lPilyIgtpddfxXEULxhujorr5jtePTUqiPDc5BC0v1NRqTr/WaFBGQQUoYToGlF4B2KQ==, - } - engines: { node: ">= 12.0.0" } - peerDependencies: - "@babel/core": "*" - babel-plugin-macros: "*" - react: ">= 16.8.0 || 17.x.x || ^18.0.0-0" - peerDependenciesMeta: - "@babel/core": - optional: true - babel-plugin-macros: - optional: true - dependencies: - "@babel/core": 7.19.1 - react: 18.2.0 - dev: false - - /supports-color/5.5.0: - resolution: - { - integrity: sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==, - } - engines: { node: ">=4" } - dependencies: - has-flag: 3.0.0 - - /supports-color/7.2.0: - resolution: - { - integrity: sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==, - } - engines: { node: ">=8" } - dependencies: - has-flag: 4.0.0 - - /supports-preserve-symlinks-flag/1.0.0: - resolution: - { - integrity: sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==, - } - engines: { node: ">= 0.4" } - dev: false - - /table/6.8.0: - resolution: - { - integrity: sha512-s/fitrbVeEyHKFa7mFdkuQMWlH1Wgw/yEXMt5xACT4ZpzWFluehAxRtUUQKPuWhaLAWhFcVx6w3oC8VKaUfPGA==, - } - engines: { node: ">=10.0.0" } - dependencies: - ajv: 8.11.0 - lodash.truncate: 4.4.2 - slice-ansi: 4.0.0 - string-width: 4.2.3 - strip-ansi: 6.0.1 - - /tapable/2.2.1: - resolution: - { - integrity: sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ==, - } - engines: { node: ">=6" } - dev: true - - /text-table/0.2.0: - resolution: - { - integrity: sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==, - } - - /to-fast-properties/2.0.0: - resolution: - { - integrity: sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog==, - } - engines: { node: ">=4" } - - /to-regex-range/5.0.1: - resolution: - { - integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==, - } - engines: { node: ">=8.0" } - dependencies: - is-number: 7.0.0 - dev: false - - /tsconfig-paths/3.14.1: - resolution: - { - integrity: sha512-fxDhWnFSLt3VuTwtvJt5fpwxBHg5AdKWMsgcPOOIilyjymcYVZoCQF8fvFRezCNfblEXmi+PcM1eYHeOAgXCOQ==, - } - dependencies: - "@types/json5": 0.0.29 - json5: 1.0.1 - minimist: 1.2.6 - strip-bom: 3.0.0 - dev: false - - /tslib/1.14.1: - resolution: - { - integrity: sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==, - } - dev: false - - /tslib/2.4.0: - resolution: - { - integrity: sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==, - } - dev: false - - /tsutils/3.21.0_typescript@4.8.3: - resolution: - { - integrity: sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA==, - } - engines: { node: ">= 6" } - peerDependencies: - typescript: ">=2.8.0 || >= 3.2.0-dev || >= 3.3.0-dev || >= 3.4.0-dev || >= 3.5.0-dev || >= 3.6.0-dev || >= 3.6.0-beta || >= 3.7.0-dev || >= 3.7.0-beta" - dependencies: - tslib: 1.14.1 - typescript: 4.8.3 - dev: false - - /turbo-android-arm64/1.4.6: - resolution: - { - integrity: sha512-YxSlHc64CF5J7yNUMiLBHkeLyzrpe75Oy7tivWb3z7ySG44BXPikk4HDJZPh0T1ELvukDwuPKkvDukJ2oCLJpA==, - } - cpu: [arm64] - os: [android] - requiresBuild: true - dev: true - optional: true - - /turbo-darwin-64/1.4.6: - resolution: - { - integrity: sha512-f6uto7LLpjwZ6iZSF+8uaDpuiTji6xmnWDxNuW23DBE8iv5mxehHd+6Ys851uKDRrPb3QdCu9ctyigKTAla5Vg==, - } - cpu: [x64] - os: [darwin] - requiresBuild: true - dev: true - optional: true - - /turbo-darwin-arm64/1.4.6: - resolution: - { - integrity: sha512-o9C6e5XyuMHQwE0fEhUxfpXxvNr2QXXWX8nxIjygxeF19AqKbk/s08vZBOEmXV6/gx/pRhZ1S2nf0PIUjKBD/Q==, - } - cpu: [arm64] - os: [darwin] - requiresBuild: true - dev: true - optional: true - - /turbo-freebsd-64/1.4.6: - resolution: - { - integrity: sha512-Gg9VOUo6McXYKGevcYjGUSmMryZyZggvpdPh7Dw3QTcT8Tsy6OBtq6WnJ2O4kFDsMigyKtEOJPceD9vDMZt3yQ==, - } - cpu: [x64] - os: [freebsd] - requiresBuild: true - dev: true - optional: true - - /turbo-freebsd-arm64/1.4.6: - resolution: - { - integrity: sha512-W7VrcneWFN1QENKt5cpAPSsf9ArYBBAm3VtPBZEO5tX8kuahGlah1SKdKJXrRxYOY82wyNxDagS/rHpBlrAAzw==, - } - cpu: [arm64] - os: [freebsd] - requiresBuild: true - dev: true - optional: true - - /turbo-linux-32/1.4.6: - resolution: - { - integrity: sha512-76j/zsui6mWPX8pZVMGgF8eiKHPmKuGa2lo0A/Ja0HUvdYCOGUfHsWJGVVIeYbuEp3jsKyVt7OnMDeH9CqO6bg==, - } - cpu: [ia32] - os: [linux] - requiresBuild: true - dev: true - optional: true - - /turbo-linux-64/1.4.6: - resolution: - { - integrity: sha512-z4A37Xm7lZyO9ddtGnvQHWMrsAKX6vFBxdbtb9MY76VRblo7lWSuk4LwCeM+T+ZDJ9LBFiF7aD/diRShlLx9jA==, - } - cpu: [x64] - os: [linux] - requiresBuild: true - dev: true - optional: true - - /turbo-linux-arm/1.4.6: - resolution: - { - integrity: sha512-Uh/V3oaAdhyZW6FKPpKihAxQo3EbvLaVNnzzkBmBnvHRkqoDJHhpuG72V7nn8pzxVbJ1++NEVjvbc2kmKFvGjg==, - } - cpu: [arm] - os: [linux] - requiresBuild: true - dev: true - optional: true - - /turbo-linux-arm64/1.4.6: - resolution: - { - integrity: sha512-FW1jmOpZfOoVVvml338N0MPnYjiMyYWTaMb4T+IosgGYymcUE3xJjfXJcqfU/9/uKTyY8zG0qr9/5rw2kpMS2Q==, - } - cpu: [arm64] - os: [linux] - requiresBuild: true - dev: true - optional: true - - /turbo-linux-mips64le/1.4.6: - resolution: - { - integrity: sha512-iWaL3Pwj52BH3T2M8nXScmbSnq4+x47MYK7lJMG7FsZGAIoT5ToO1Wt1iX3GRHTcnIZYm/kCfJ1ptK/NCossLA==, - } - cpu: [mipsel] - os: [linux] - requiresBuild: true - dev: true - optional: true - - /turbo-linux-ppc64le/1.4.6: - resolution: - { - integrity: sha512-Af/KlUmpiORDyELxT7byXNWl3fefErGQMJfeqXEtAdhs8OCKQWuU+lchcZbiBZYNpL+lZoa3PAmP9Fpx7R4plA==, - } - cpu: [ppc64] - os: [linux] - requiresBuild: true - dev: true - optional: true - - /turbo-windows-32/1.4.6: - resolution: - { - integrity: sha512-NBd+XPlRSaR//lVN13Q9DOqK3CbowSvafIyGsO4jfvMsGTdyNDL6AYtFsvTKW91/G7ZhATmSEkPn2pZRuhP/DA==, - } - cpu: [ia32] - os: [win32] - requiresBuild: true - dev: true - optional: true - - /turbo-windows-64/1.4.6: - resolution: - { - integrity: sha512-86AbmG+CjzVTpn4RGtwU2CYy4zSyAc9bIQ4pDGLIpCJg6JlD11duaiMJh0SCU/HCqWLJjWDI4qD+f9WNbgPsyQ==, - } - cpu: [x64] - os: [win32] - requiresBuild: true - dev: true - optional: true - - /turbo-windows-arm64/1.4.6: - resolution: - { - integrity: sha512-V+pWcqhTtmQQ3ew8qEjYtUwzyW6tO1RgvP+6OKzItYzTnMTr1Fe42Q21V+tqRNxuNfFDKsgVJdk2p5wB87bvyQ==, - } - cpu: [arm64] - os: [win32] - requiresBuild: true - dev: true - optional: true - - /turbo/1.4.6: - resolution: - { - integrity: sha512-FKtBXlOJ7YjSK22yj4sJLCtDcHFElypt7xw9cZN7Wyv9x4XBrTmh5KP6RmcGnRR1/GJlTNwD2AY2T9QTPnHh+g==, - } - hasBin: true - requiresBuild: true - optionalDependencies: - turbo-android-arm64: 1.4.6 - turbo-darwin-64: 1.4.6 - turbo-darwin-arm64: 1.4.6 - turbo-freebsd-64: 1.4.6 - turbo-freebsd-arm64: 1.4.6 - turbo-linux-32: 1.4.6 - turbo-linux-64: 1.4.6 - turbo-linux-arm: 1.4.6 - turbo-linux-arm64: 1.4.6 - turbo-linux-mips64le: 1.4.6 - turbo-linux-ppc64le: 1.4.6 - turbo-windows-32: 1.4.6 - turbo-windows-64: 1.4.6 - turbo-windows-arm64: 1.4.6 - dev: true - - /type-check/0.4.0: - resolution: - { - integrity: sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==, - } - engines: { node: ">= 0.8.0" } - dependencies: - prelude-ls: 1.2.1 - - /type-fest/0.20.2: - resolution: - { - integrity: sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==, - } - engines: { node: ">=10" } - - /typescript/4.8.3: - resolution: - { - integrity: sha512-goMHfm00nWPa8UvR/CPSvykqf6dVV8x/dp0c5mFTMTIu0u0FlGWRioyy7Nn0PGAdHxpJZnuO/ut+PpQ8UiHAig==, - } - engines: { node: ">=4.2.0" } - hasBin: true - - /unbox-primitive/1.0.2: - resolution: - { - integrity: sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw==, - } - dependencies: - call-bind: 1.0.2 - has-bigints: 1.0.2 - has-symbols: 1.0.3 - which-boxed-primitive: 1.0.2 - dev: false - - /underscore/1.13.4_3pbfs36izefyn2uycmknwkvuuy: - resolution: - { - integrity: sha512-BQFnUDuAQ4Yf/cYY5LNrK9NCJFKriaRbD9uR1fTeXnBeoa97W0i41qkZfGO9pSo8I5KzjAcSY2XYtdf0oKd7KQ==, - } - dev: false - patched: true - - /update-browserslist-db/1.0.9_browserslist@4.21.3: - resolution: - { - integrity: sha512-/xsqn21EGVdXI3EXSum1Yckj3ZVZugqyOZQ/CxYPBD/R+ko9NSUScf8tFF4dOKY+2pvSSJA/S+5B8s4Zr4kyvg==, - } - hasBin: true - peerDependencies: - browserslist: ">= 4.21.0" - dependencies: - browserslist: 4.21.3 - escalade: 3.1.1 - picocolors: 1.0.0 - - /uri-js/4.4.1: - resolution: - { - integrity: sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==, - } - dependencies: - punycode: 2.1.1 - - /use-sync-external-store/1.2.0_react@18.2.0: - resolution: - { - integrity: sha512-eEgnFxGQ1Ife9bzYs6VLi8/4X6CObHMw9Qr9tPY43iKwsPw8xE8+EFsf/2cFZ5S3esXgpWgtSCtLNS41F+sKPA==, - } - peerDependencies: - react: ^16.8.0 || ^17.0.0 || ^18.0.0 - dependencies: - react: 18.2.0 - dev: false - - /v8-compile-cache/2.3.0: - resolution: - { - integrity: sha512-l8lCEmLcLYZh4nbunNZvQCJc5pv7+RCwa8q/LdUx8u7lsWvPDKmpodJAJNwkAhJC//dFY48KuIEmjtd4RViDrA==, - } - - /which-boxed-primitive/1.0.2: - resolution: - { - integrity: sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg==, - } - dependencies: - is-bigint: 1.0.4 - is-boolean-object: 1.1.2 - is-number-object: 1.0.7 - is-string: 1.0.7 - is-symbol: 1.0.4 - dev: false - - /which/2.0.2: - resolution: - { - integrity: sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==, - } - engines: { node: ">= 8" } - hasBin: true - dependencies: - isexe: 2.0.0 - - /word-wrap/1.2.3: - resolution: - { - integrity: sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==, - } - engines: { node: ">=0.10.0" } - - /wrappy/1.0.2: - resolution: - { - integrity: sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==, - } - - /yallist/4.0.0: - resolution: - { - integrity: sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==, - } - - file:packages/ui: - resolution: { directory: packages/ui, type: directory } - name: ui - version: 0.0.0 - dev: false - - github.com/peerigon/dashboard-icons/ce27ef933144e09cef3911025f3649040a8571b6: - resolution: - { - tarball: https://codeload.github.com/peerigon/dashboard-icons/tar.gz/ce27ef933144e09cef3911025f3649040a8571b, - } - name: dashboard-icons - version: 1.0.0 - dev: false diff --git a/cli/internal/lockfile/testdata/pnpm8.yaml b/cli/internal/lockfile/testdata/pnpm8.yaml deleted file mode 100644 index d7d9e274d60fd..0000000000000 --- a/cli/internal/lockfile/testdata/pnpm8.yaml +++ /dev/null @@ -1,107 +0,0 @@ -lockfileVersion: "6.0" - -patchedDependencies: - is-even@1.0.0: - hash: trwuddosrpxsvtoqztvint6pca - path: patches/is-even@1.0.0.patch - -importers: - .: {} - - packages/a: - dependencies: - c: - specifier: workspace:* - version: link:../c - is-odd: - specifier: ^3.0.1 - version: 3.0.1 - - packages/b: - dependencies: - c: - specifier: workspace:* - version: link:../c - is-even: - specifier: ^1.0.0 - version: 1.0.0_trwuddosrpxsvtoqztvint6pca - - packages/c: - dependencies: - lodash: - specifier: ^4.17.21 - version: 4.17.21 - -packages: - /is-buffer@1.1.6: - resolution: - { - integrity: sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w==, - } - dev: false - - /is-even@1.0.0_trwuddosrpxsvtoqztvint6pca: - resolution: - { - integrity: sha512-LEhnkAdJqic4Dbqn58A0y52IXoHWlsueqQkKfMfdEnIYG8A1sm/GHidKkS6yvXlMoRrkM34csHnXQtOqcb+Jzg==, - } - engines: { node: ">=0.10.0" } - dependencies: - is-odd: 0.1.2 - dev: false - patched: true - - /is-number@3.0.0: - resolution: - { - integrity: sha512-4cboCqIpliH+mAvFNegjZQ4kgKc3ZUhQVr3HvWbSh5q3WH2v82ct+T2Y1hdU5Gdtorx/cLifQjqCbL7bpznLTg==, - } - engines: { node: ">=0.10.0" } - dependencies: - kind-of: 3.2.2 - dev: false - - /is-number@6.0.0: - resolution: - { - integrity: sha512-Wu1VHeILBK8KAWJUAiSZQX94GmOE45Rg6/538fKwiloUu21KncEkYGPqob2oSZ5mUT73vLGrHQjKw3KMPwfDzg==, - } - engines: { node: ">=0.10.0" } - dev: false - - /is-odd@0.1.2: - resolution: - { - integrity: sha512-Ri7C2K7o5IrUU9UEI8losXJCCD/UtsaIrkR5sxIcFg4xQ9cRJXlWA5DQvTE0yDc0krvSNLsRGXN11UPS6KyfBw==, - } - engines: { node: ">=0.10.0" } - dependencies: - is-number: 3.0.0 - dev: false - - /is-odd@3.0.1: - resolution: - { - integrity: sha512-CQpnWPrDwmP1+SMHXZhtLtJv90yiyVfluGsX5iNCVkrhQtU3TQHsUWPG9wkdk9Lgd5yNpAg9jQEo90CBaXgWMA==, - } - engines: { node: ">=4" } - dependencies: - is-number: 6.0.0 - dev: false - - /kind-of@3.2.2: - resolution: - { - integrity: sha512-NOW9QQXMoZGg/oqnVNoNTTIFEIid1627WCffUBJEdMxYApq7mNE7CpzucIPc+ZQg25Phej7IJSmX3hO+oblOtQ==, - } - engines: { node: ">=0.10.0" } - dependencies: - is-buffer: 1.1.6 - dev: false - - /lodash@4.17.21: - resolution: - { - integrity: sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==, - } - dev: false diff --git a/cli/internal/lockfile/testdata/pnpm_override.yaml b/cli/internal/lockfile/testdata/pnpm_override.yaml deleted file mode 100644 index 21021920f574e..0000000000000 --- a/cli/internal/lockfile/testdata/pnpm_override.yaml +++ /dev/null @@ -1,24 +0,0 @@ -lockfileVersion: 5.4 - -overrides: - "@nomiclabs/hardhat-ethers": npm:hardhat-deploy-ethers@^0.3.0-beta.13 - -importers: - config/hardhat: - specifiers: - "@nomiclabs/hardhat-ethers": npm:hardhat-deploy-ethers@^0.3.0-beta.13 - dependencies: - "@nomiclabs/hardhat-ethers": /hardhat-deploy-ethers/0.3.0-beta.13_yab2ug5tvye2kp6e24l5x3z7uy - -packages: - /hardhat-deploy-ethers/0.3.0-beta.13_yab2ug5tvye2kp6e24l5x3z7uy: - resolution: - { - integrity: sha512-PdWVcKB9coqWV1L7JTpfXRCI91Cgwsm7KLmBcwZ8f0COSm1xtABHZTyz3fvF6p42cTnz1VM0QnfDvMFlIRkSNw==, - } - peerDependencies: - ethers: ^5.0.0 - hardhat: ^2.0.0 - dependencies: - ethers: 5.7.2 - hardhat: 2.12.4_typescript@4.9.4 diff --git a/cli/internal/lockfile/yarn_lockfile.go b/cli/internal/lockfile/yarn_lockfile.go deleted file mode 100644 index 5d87348cf7e9d..0000000000000 --- a/cli/internal/lockfile/yarn_lockfile.go +++ /dev/null @@ -1,38 +0,0 @@ -package lockfile - -import ( - "github.com/vercel/turbo/cli/internal/turbopath" -) - -// YarnLockfile representation of yarn lockfile -type YarnLockfile struct { - contents []byte -} - -var _ Lockfile = (*YarnLockfile)(nil) - -// ResolvePackage Given a package and version returns the key, resolved version, and if it was found -func (l *YarnLockfile) ResolvePackage(_workspacePath turbopath.AnchoredUnixPath, name string, version string) (Package, error) { - // This is only used when doing calculating the transitive deps, but Rust - // implementations do this calculation on the Rust side. - panic("Unreachable") -} - -// AllDependencies Given a lockfile key return all (dev/optional/peer) dependencies of that package -func (l *YarnLockfile) AllDependencies(key string) (map[string]string, bool) { - // This is only used when doing calculating the transitive deps, but Rust - // implementations do this calculation on the Rust side. - panic("Unreachable") -} - -// DecodeYarnLockfile Takes the contents of a yarn lockfile and returns a struct representation -func DecodeYarnLockfile(contents []byte) (*YarnLockfile, error) { - return &YarnLockfile{contents: contents}, nil -} - -// GlobalChange checks if there are any differences between lockfiles that would completely invalidate -// the cache. -func (l *YarnLockfile) GlobalChange(other Lockfile) bool { - _, ok := other.(*YarnLockfile) - return !ok -} diff --git a/cli/internal/logstreamer/logstreamer.go b/cli/internal/logstreamer/logstreamer.go deleted file mode 100644 index 4368a8b44fbac..0000000000000 --- a/cli/internal/logstreamer/logstreamer.go +++ /dev/null @@ -1,159 +0,0 @@ -// Copyright (c) 2013 Kevin van Zonneveld . All rights reserved. -// Source: https://github.com/kvz/logstreamer -// SPDX-License-Identifier: MIT -package logstreamer - -import ( - "bytes" - "io" - "log" - "os" - "strings" -) - -type Logstreamer struct { - Logger *log.Logger - buf *bytes.Buffer - // If prefix == stdout, colors green - // If prefix == stderr, colors red - // Else, prefix is taken as-is, and prepended to anything - // you throw at Write() - prefix string - // if true, saves output in memory - record bool - persist string - - // Adds color to stdout & stderr if terminal supports it - colorOkay string - colorFail string - colorReset string -} - -func NewLogstreamer(logger *log.Logger, prefix string, record bool) *Logstreamer { - streamer := &Logstreamer{ - Logger: logger, - buf: bytes.NewBuffer([]byte("")), - prefix: prefix, - record: record, - persist: "", - colorOkay: "", - colorFail: "", - colorReset: "", - } - - if strings.HasPrefix(os.Getenv("TERM"), "xterm") { - streamer.colorOkay = "\x1b[32m" - streamer.colorFail = "\x1b[31m" - streamer.colorReset = "\x1b[0m" - } - - return streamer -} - -func (l *Logstreamer) Write(p []byte) (n int, err error) { - if n, err = l.buf.Write(p); err != nil { - return - } - - err = l.OutputLines() - return -} - -func (l *Logstreamer) Close() error { - if err := l.Flush(); err != nil { - return err - } - l.buf = bytes.NewBuffer([]byte("")) - return nil -} - -func (l *Logstreamer) Flush() error { - p := make([]byte, l.buf.Len()) - if _, err := l.buf.Read(p); err != nil { - return err - } - - l.out(string(p)) - return nil -} - -func (l *Logstreamer) OutputLines() error { - for { - line, err := l.buf.ReadString('\n') - - if len(line) > 0 { - if strings.HasSuffix(line, "\n") { - l.out(line) - } else { - // put back into buffer, it's not a complete line yet - // Close() or Flush() have to be used to flush out - // the last remaining line if it does not end with a newline - if _, err := l.buf.WriteString(line); err != nil { - return err - } - } - } - - if err == io.EOF { - break - } - - if err != nil { - return err - } - } - - return nil -} - -func (l *Logstreamer) FlushRecord() string { - buffer := l.persist - l.persist = "" - return buffer -} - -func (l *Logstreamer) out(str string) { - if len(str) < 1 { - return - } - - if l.record { - l.persist = l.persist + str - } - - if l.prefix == "stdout" { - str = l.colorOkay + l.prefix + l.colorReset + " " + str - } else if l.prefix == "stderr" { - str = l.colorFail + l.prefix + l.colorReset + " " + str - } - - l.Logger.Print(str) -} - -// PrettyStdoutWriter wraps an ioWriter so it can add string -// prefixes to every message it writes to stdout. -type PrettyStdoutWriter struct { - w io.Writer - Prefix string -} - -var _ io.Writer = (*PrettyStdoutWriter)(nil) - -// NewPrettyStdoutWriter returns an instance of PrettyStdoutWriter -func NewPrettyIoWriter(prefix string, ioWriter io.Writer) *PrettyStdoutWriter { - return &PrettyStdoutWriter{ - w: ioWriter, - Prefix: prefix, - } -} - -func (psw *PrettyStdoutWriter) Write(p []byte) (int, error) { - str := psw.Prefix + string(p) - n, err := psw.w.Write([]byte(str)) - - if err != nil { - return n, err - } - - return len(p), nil -} diff --git a/cli/internal/logstreamer/logstreamer_test.go b/cli/internal/logstreamer/logstreamer_test.go deleted file mode 100644 index 94d8a8283716c..0000000000000 --- a/cli/internal/logstreamer/logstreamer_test.go +++ /dev/null @@ -1,114 +0,0 @@ -// Copyright (c) 2013 Kevin van Zonneveld . All rights reserved. -// Source: https://github.com/kvz/logstreamer -// SPDX-License-Identifier: MIT -package logstreamer - -import ( - "bufio" - "bytes" - "fmt" - "log" - "os" - "os/exec" - "strings" - "testing" -) - -func TestLogstreamerOk(t *testing.T) { - // Create a logger (your app probably already has one) - logger := log.New(os.Stdout, "--> ", log.Ldate|log.Ltime) - - // Setup a streamer that we'll pipe cmd.Stdout to - logStreamerOut := NewLogstreamer(logger, "stdout", false) - defer logStreamerOut.Close() - // Setup a streamer that we'll pipe cmd.Stderr to. - // We want to record/buffer anything that's written to this (3rd argument true) - logStreamerErr := NewLogstreamer(logger, "stderr", true) - defer logStreamerErr.Close() - - // Execute something that succeeds - cmd := exec.Command( - "ls", - "-al", - ) - cmd.Stderr = logStreamerErr - cmd.Stdout = logStreamerOut - - // Reset any error we recorded - logStreamerErr.FlushRecord() - - // Execute command - err := cmd.Start() - - // Failed to spawn? - if err != nil { - t.Fatal("ERROR could not spawn command.", err.Error()) - } - - // Failed to execute? - err = cmd.Wait() - if err != nil { - t.Fatal("ERROR command finished with error. ", err.Error(), logStreamerErr.FlushRecord()) - } -} - -func TestLogstreamerErr(t *testing.T) { - // Create a logger (your app probably already has one) - logger := log.New(os.Stdout, "--> ", log.Ldate|log.Ltime) - - // Setup a streamer that we'll pipe cmd.Stdout to - logStreamerOut := NewLogstreamer(logger, "stdout", false) - defer logStreamerOut.Close() - // Setup a streamer that we'll pipe cmd.Stderr to. - // We want to record/buffer anything that's written to this (3rd argument true) - logStreamerErr := NewLogstreamer(logger, "stderr", true) - defer logStreamerErr.Close() - - // Execute something that succeeds - cmd := exec.Command( - "ls", - "nonexisting", - ) - cmd.Stderr = logStreamerErr - cmd.Stdout = logStreamerOut - - // Reset any error we recorded - logStreamerErr.FlushRecord() - - // Execute command - err := cmd.Start() - - // Failed to spawn? - if err != nil { - logger.Print("ERROR could not spawn command. ") - } - - // Failed to execute? - err = cmd.Wait() - if err != nil { - fmt.Printf("Good. command finished with %s. %s. \n", err.Error(), logStreamerErr.FlushRecord()) - } else { - t.Fatal("This command should have failed") - } -} - -func TestLogstreamerFlush(t *testing.T) { - const text = "Text without newline" - - var buffer bytes.Buffer - byteWriter := bufio.NewWriter(&buffer) - - logger := log.New(byteWriter, "", 0) - logStreamerOut := NewLogstreamer(logger, "", false) - defer logStreamerOut.Close() - - logStreamerOut.Write([]byte(text)) - logStreamerOut.Flush() - byteWriter.Flush() - - s := strings.TrimSpace(buffer.String()) - - if s != text { - t.Fatalf("Expected '%s', got '%s'.", text, s) - } -} diff --git a/cli/internal/nodes/packagetask.go b/cli/internal/nodes/packagetask.go deleted file mode 100644 index 04549f2eb2984..0000000000000 --- a/cli/internal/nodes/packagetask.go +++ /dev/null @@ -1,66 +0,0 @@ -// Package nodes defines the nodes that are present in the execution graph used by turbo. -package nodes - -import ( - "fmt" - "path/filepath" - "strings" - - "github.com/vercel/turbo/cli/internal/fs" - "github.com/vercel/turbo/cli/internal/fs/hash" - "github.com/vercel/turbo/cli/internal/util" -) - -// PackageTask represents running a particular task in a particular package -type PackageTask struct { - TaskID string - Task string - PackageName string - Pkg *fs.PackageJSON - EnvMode util.EnvMode - TaskDefinition *fs.TaskDefinition - Dir string - Command string - Outputs []string - ExcludedOutputs []string - Hash string -} - -const logDir = ".turbo" - -// RepoRelativeSystemLogFile returns the path from the repo root -// to the log file in system format -func (pt *PackageTask) RepoRelativeSystemLogFile() string { - return filepath.Join(pt.Dir, logDir, logFilename(pt.Task)) -} - -func (pt *PackageTask) packageRelativeSharableLogFile() string { - return strings.Join([]string{logDir, logFilename(pt.Task)}, "/") -} - -func logFilename(taskName string) string { - escapedTaskName := strings.ReplaceAll(taskName, ":", "$colon$") - return fmt.Sprintf("turbo-%v.log", escapedTaskName) -} - -// OutputPrefix returns the prefix to be used for logging and ui for this task -func (pt *PackageTask) OutputPrefix(isSinglePackage bool) string { - if isSinglePackage { - return pt.Task - } - return fmt.Sprintf("%v:%v", pt.PackageName, pt.Task) -} - -// HashableOutputs returns the package-relative globs for files to be considered outputs -// of this task -func (pt *PackageTask) HashableOutputs() hash.TaskOutputs { - inclusionOutputs := []string{pt.packageRelativeSharableLogFile()} - inclusionOutputs = append(inclusionOutputs, pt.TaskDefinition.Outputs.Inclusions...) - - hashable := hash.TaskOutputs{ - Inclusions: inclusionOutputs, - Exclusions: pt.TaskDefinition.Outputs.Exclusions, - } - hashable.Sort() - return hashable -} diff --git a/cli/internal/nodes/packagetask_test.go b/cli/internal/nodes/packagetask_test.go deleted file mode 100644 index eb880511274ea..0000000000000 --- a/cli/internal/nodes/packagetask_test.go +++ /dev/null @@ -1,29 +0,0 @@ -package nodes - -import ( - "testing" - - "gotest.tools/v3/assert" -) - -func TestLogFilename(t *testing.T) { - testCases := []struct{ input, want string }{ - { - "build", - "turbo-build.log", - }, - { - "build:prod", - "turbo-build$colon$prod.log", - }, - { - "build:prod:extra", - "turbo-build$colon$prod$colon$extra.log", - }, - } - - for _, testCase := range testCases { - got := logFilename(testCase.input) - assert.Equal(t, got, testCase.want) - } -} diff --git a/cli/internal/packagemanager/berry.go b/cli/internal/packagemanager/berry.go deleted file mode 100644 index 668a95ebd4ae3..0000000000000 --- a/cli/internal/packagemanager/berry.go +++ /dev/null @@ -1,112 +0,0 @@ -package packagemanager - -import ( - "fmt" - "strings" - - "github.com/vercel/turbo/cli/internal/fs" - "github.com/vercel/turbo/cli/internal/lockfile" - "github.com/vercel/turbo/cli/internal/turbopath" -) - -const berryLockfile = "yarn.lock" - -var nodejsBerry = PackageManager{ - Name: "nodejs-berry", - Slug: "yarn", - Command: "yarn", - Specfile: "package.json", - Lockfile: berryLockfile, - PackageDir: "node_modules", - ArgSeparator: func(_userArgs []string) []string { return nil }, - - getWorkspaceGlobs: func(rootpath turbopath.AbsoluteSystemPath) ([]string, error) { - pkg, err := fs.ReadPackageJSON(rootpath.UntypedJoin("package.json")) - if err != nil { - return nil, fmt.Errorf("package.json: %w", err) - } - if len(pkg.Workspaces) == 0 { - return nil, fmt.Errorf("package.json: no workspaces found. Turborepo requires Yarn workspaces to be defined in the root package.json") - } - return pkg.Workspaces, nil - }, - - getWorkspaceIgnores: func(pm PackageManager, rootpath turbopath.AbsoluteSystemPath) ([]string, error) { - // Matches upstream values: - // Key code: https://github.com/yarnpkg/berry/blob/8e0c4b897b0881878a1f901230ea49b7c8113fbe/packages/yarnpkg-core/sources/Workspace.ts#L64-L70 - return []string{ - "**/node_modules", - "**/.git", - "**/.yarn", - }, nil - }, - - canPrune: func(cwd turbopath.AbsoluteSystemPath) (bool, error) { - return true, nil - }, - - GetLockfileName: func(_ turbopath.AbsoluteSystemPath) string { - return berryLockfile - }, - - GetLockfilePath: func(projectDirectory turbopath.AbsoluteSystemPath) turbopath.AbsoluteSystemPath { - return projectDirectory.UntypedJoin(berryLockfile) - }, - - GetLockfileContents: func(projectDirectory turbopath.AbsoluteSystemPath) ([]byte, error) { - return projectDirectory.UntypedJoin(berryLockfile).ReadFile() - }, - - UnmarshalLockfile: func(rootPackageJSON *fs.PackageJSON, contents []byte) (lockfile.Lockfile, error) { - var resolutions map[string]string - if untypedResolutions, ok := rootPackageJSON.RawJSON["resolutions"]; ok { - if untypedResolutions, ok := untypedResolutions.(map[string]interface{}); ok { - resolutions = make(map[string]string, len(untypedResolutions)) - for resolution, reference := range untypedResolutions { - if reference, ok := reference.(string); ok { - resolutions[resolution] = reference - } - } - } - } - - return lockfile.DecodeBerryLockfile(contents, resolutions) - }, - - prunePatches: func(pkgJSON *fs.PackageJSON, patches []turbopath.AnchoredUnixPath) error { - pkgJSON.Mu.Lock() - defer pkgJSON.Mu.Unlock() - - keysToDelete := []string{} - resolutions, ok := pkgJSON.RawJSON["resolutions"].(map[string]interface{}) - if !ok { - return fmt.Errorf("Invalid structure for resolutions field in package.json") - } - - for dependency, untypedPatch := range resolutions { - inPatches := false - patch, ok := untypedPatch.(string) - if !ok { - return fmt.Errorf("Expected value of %s in package.json to be a string, got %v", dependency, untypedPatch) - } - - for _, wantedPatch := range patches { - if strings.HasSuffix(patch, wantedPatch.ToString()) { - inPatches = true - break - } - } - - // We only want to delete unused patches as they are the only ones that throw if unused - if !inPatches && strings.HasSuffix(patch, ".patch") { - keysToDelete = append(keysToDelete, dependency) - } - } - - for _, key := range keysToDelete { - delete(resolutions, key) - } - - return nil - }, -} diff --git a/cli/internal/packagemanager/bun.go b/cli/internal/packagemanager/bun.go deleted file mode 100644 index 4f908ef2f3d2c..0000000000000 --- a/cli/internal/packagemanager/bun.go +++ /dev/null @@ -1,81 +0,0 @@ -package packagemanager - -import ( - "fmt" - "os/exec" - - "github.com/vercel/turbo/cli/internal/fs" - "github.com/vercel/turbo/cli/internal/lockfile" - "github.com/vercel/turbo/cli/internal/turbopath" -) - -const command = "bun" -const bunLockfile = "bun.lockb" - -func getLockfilePath(rootPath turbopath.AbsoluteSystemPath) turbopath.AbsoluteSystemPath { - return rootPath.UntypedJoin(bunLockfile) -} - -var nodejsBun = PackageManager{ - Name: "nodejs-bun", - Slug: "bun", - Command: command, - Specfile: "package.json", - Lockfile: bunLockfile, - PackageDir: "node_modules", - ArgSeparator: func(userArgs []string) []string { - // Bun swallows a single "--" token. If the user is passing "--", we need - // to prepend our own so that the user's doesn't get swallowed. If they are not - // passing their own, we don't need the "--" token and can avoid the warning. - for _, arg := range userArgs { - if arg == "--" { - return []string{"--"} - } - } - return nil - }, - - getWorkspaceGlobs: func(rootpath turbopath.AbsoluteSystemPath) ([]string, error) { - pkg, err := fs.ReadPackageJSON(rootpath.UntypedJoin("package.json")) - if err != nil { - return nil, fmt.Errorf("package.json: %w", err) - } - if len(pkg.Workspaces) == 0 { - return nil, fmt.Errorf("package.json: no workspaces found. Turborepo requires Bun workspaces to be defined in the root package.json") - } - return pkg.Workspaces, nil - }, - - getWorkspaceIgnores: func(pm PackageManager, rootpath turbopath.AbsoluteSystemPath) ([]string, error) { - // Matches upstream values: - // Key code: https://github.com/oven-sh/bun/blob/f267c1d097923a2d2992f9f60a6dd365fe706512/src/install/lockfile.zig#L3057 - return []string{ - "**/node_modules", - "**/.git", - }, nil - }, - - canPrune: func(cwd turbopath.AbsoluteSystemPath) (bool, error) { - return false, nil - }, - - GetLockfileName: func(rootPath turbopath.AbsoluteSystemPath) string { - return bunLockfile - }, - - GetLockfilePath: func(rootPath turbopath.AbsoluteSystemPath) turbopath.AbsoluteSystemPath { - return getLockfilePath(rootPath) - }, - - GetLockfileContents: func(projectDirectory turbopath.AbsoluteSystemPath) ([]byte, error) { - lockfilePath := getLockfilePath(projectDirectory) - cmd := exec.Command(command, lockfilePath.ToString()) - cmd.Dir = projectDirectory.ToString() - - return cmd.Output() - }, - - UnmarshalLockfile: func(_rootPackageJSON *fs.PackageJSON, contents []byte) (lockfile.Lockfile, error) { - return lockfile.DecodeBunLockfile(contents) - }, -} diff --git a/cli/internal/packagemanager/fixtures/package.json b/cli/internal/packagemanager/fixtures/package.json deleted file mode 100644 index 6b27f7ce1b17a..0000000000000 --- a/cli/internal/packagemanager/fixtures/package.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "name": "fixture", - "workspaces": [ - "apps/*", - "packages/*" - ] -} diff --git a/cli/internal/packagemanager/fixtures/pnpm-patches.json b/cli/internal/packagemanager/fixtures/pnpm-patches.json deleted file mode 100644 index f772bc35ce483..0000000000000 --- a/cli/internal/packagemanager/fixtures/pnpm-patches.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "name": "turborepo-prune-removes-patched", - "version": "1.0.0", - "packageManager": "pnpm@7.15.0", - "workspaces": ["packages/*"], - "pnpm": { - "patchedDependencies": { - "is-odd@3.0.1": "patches/is-odd@3.0.1.patch" - } - } -} diff --git a/cli/internal/packagemanager/fixtures/pnpm-workspace.yaml b/cli/internal/packagemanager/fixtures/pnpm-workspace.yaml deleted file mode 100644 index 7fbb7701da342..0000000000000 --- a/cli/internal/packagemanager/fixtures/pnpm-workspace.yaml +++ /dev/null @@ -1,3 +0,0 @@ -packages: - - "packages/*" - - "!packages/skip" diff --git a/cli/internal/packagemanager/infer_root.go b/cli/internal/packagemanager/infer_root.go deleted file mode 100644 index 7920f122a2439..0000000000000 --- a/cli/internal/packagemanager/infer_root.go +++ /dev/null @@ -1,146 +0,0 @@ -package packagemanager - -import ( - "path/filepath" - - "github.com/vercel/turbo/cli/internal/doublestar" - "github.com/vercel/turbo/cli/internal/turbopath" -) - -// PackageType represents the mode in which turbo is running. -type PackageType string - -const ( - // Single is for single-package mode. - Single PackageType = "single" - // Multi is for monorepo mode. - Multi PackageType = "multi" -) - -func candidateDirectoryWorkspaceGlobs(directory turbopath.AbsoluteSystemPath) []string { - packageManagers := []PackageManager{ - nodejsNpm, - nodejsPnpm, - } - - for _, pm := range packageManagers { - globs, err := pm.getWorkspaceGlobs(directory) - if err != nil { - // Try the other package manager workspace formats. - continue - } - - return globs - } - - return nil -} - -func isOneOfTheWorkspaces(globs []string, nearestPackageJSONDir turbopath.AbsoluteSystemPath, currentPackageJSONDir turbopath.AbsoluteSystemPath) bool { - for _, glob := range globs { - globpattern := currentPackageJSONDir.UntypedJoin(filepath.FromSlash(glob)).ToString() - match, _ := doublestar.PathMatch(globpattern, nearestPackageJSONDir.ToString()) - if match { - return true - } - } - - return false -} - -// InferRoot identifies which directory we should treat as the root, and which mode -// turbo should be in when operating at that directory. -func InferRoot(directory turbopath.AbsoluteSystemPath) (turbopath.AbsoluteSystemPath, PackageType) { - // Go doesn't have iterators, so this is very not-elegant. - - // Scenarios: - // 0. Has a turbo.json but doesn't have a peer package.json. directory + multi - // 1. Nearest turbo.json, check peer package.json/pnpm-workspace.yaml. - // A. Has workspaces, multi package mode. - // B. No workspaces, single package mode. - // 2. If no turbo.json find the closest package.json parent. - // A. No parent package.json, default to current behavior. - // B. Nearest package.json defines workspaces. Can't be in single-package mode, so we bail. (This could be changed in the future.) - // 3. Closest package.json does not define workspaces. Traverse toward the root looking for package.jsons. - // A. No parent package.json with workspaces. nearestPackageJson + single - // B. Stop at the first one that has workspaces. - // i. If we are one of the workspaces, directory + multi. (This could be changed in the future.) - // ii. If we're not one of the workspaces, nearestPackageJson + single. - - nearestTurboJSON, findTurboJSONErr := directory.Findup("turbo.json") - if nearestTurboJSON == "" || findTurboJSONErr != nil { - // We didn't find a turbo.json. We're in situation 2 or 3. - - // Unroll the first loop for Scenario 2 - nearestPackageJSON, nearestPackageJSONErr := directory.Findup("package.json") - - // If we fail to find any package.json files we aren't in single package mode. - // We let things go through our existing failure paths. - // Scenario 2A. - if nearestPackageJSON == "" || nearestPackageJSONErr != nil { - return directory, Multi - } - - // If we find a package.json which has workspaces we aren't in single package mode. - // We let things go through our existing failure paths. - // Scenario 2B. - if candidateDirectoryWorkspaceGlobs(nearestPackageJSON.Dir()) != nil { - // In a future world we could maybe change this behavior. - // return nearestPackageJson.Dir(), Multi - return directory, Multi - } - - // Scenario 3. - // Find the nearest package.json that has workspaces. - // If found _and_ the nearestPackageJson is one of the workspaces, thatPackageJson + multi. - // Else, nearestPackageJson + single - cursor := nearestPackageJSON.Dir().UntypedJoin("..") - for { - nextPackageJSON, nextPackageJSONErr := cursor.Findup("package.json") - if nextPackageJSON == "" || nextPackageJSONErr != nil { - // We haven't found a parent defining workspaces. - // So we're single package mode at nearestPackageJson. - // Scenario 3A. - return nearestPackageJSON.Dir(), Single - } - - // Found a package.json file, see if it has workspaces. - // Workspaces are not allowed to be recursive, so we know what to - // return the moment we find something with workspaces. - globs := candidateDirectoryWorkspaceGlobs(nextPackageJSON.Dir()) - if globs != nil { - if isOneOfTheWorkspaces(globs, nearestPackageJSON.Dir(), nextPackageJSON.Dir()) { - // If it has workspaces, and nearestPackageJson is one of them, we're multi. - // We don't infer in this scenario. - // Scenario 3BI. - // TODO: return nextPackageJson.Dir(), Multi - return directory, Multi - } - - // We found a parent with workspaces, but we're not one of them. - // We choose to operate in single package mode. - // Scenario 3BII - return nearestPackageJSON.Dir(), Single - } - - // Loop around and see if we have another parent. - cursor = nextPackageJSON.Dir().UntypedJoin("..") - } - } else { - // If there is no sibling package.json we do no inference. - siblingPackageJSONPath := nearestTurboJSON.Dir().UntypedJoin("package.json") - if !siblingPackageJSONPath.Exists() { - // We do no inference. - // Scenario 0 - return directory, Multi - } - - if candidateDirectoryWorkspaceGlobs(nearestTurboJSON.Dir()) != nil { - // Scenario 1A. - return nearestTurboJSON.Dir(), Multi - } - - // Scenario 1B. - return nearestTurboJSON.Dir(), Single - } -} diff --git a/cli/internal/packagemanager/infer_root_test.go b/cli/internal/packagemanager/infer_root_test.go deleted file mode 100644 index 2e37a80440638..0000000000000 --- a/cli/internal/packagemanager/infer_root_test.go +++ /dev/null @@ -1,347 +0,0 @@ -package packagemanager - -import ( - "reflect" - "testing" - - "github.com/vercel/turbo/cli/internal/turbopath" - "gotest.tools/v3/assert" -) - -func TestInferRoot(t *testing.T) { - type file struct { - path turbopath.AnchoredSystemPath - content []byte - } - - tests := []struct { - name string - fs []file - executionDirectory turbopath.AnchoredSystemPath - rootPath turbopath.AnchoredSystemPath - packageMode PackageType - }{ - // Scenario 0 - { - name: "turbo.json at current dir, no package.json", - fs: []file{ - {path: turbopath.AnchoredUnixPath("turbo.json").ToSystemPath()}, - }, - executionDirectory: turbopath.AnchoredUnixPath("").ToSystemPath(), - rootPath: turbopath.AnchoredUnixPath("").ToSystemPath(), - packageMode: Multi, - }, - { - name: "turbo.json at parent dir, no package.json", - fs: []file{ - {path: turbopath.AnchoredUnixPath("execution/path/subdir/.file").ToSystemPath()}, - {path: turbopath.AnchoredUnixPath("turbo.json").ToSystemPath()}, - }, - executionDirectory: turbopath.AnchoredUnixPath("execution/path/subdir").ToSystemPath(), - // This is "no inference" - rootPath: turbopath.AnchoredUnixPath("execution/path/subdir").ToSystemPath(), - packageMode: Multi, - }, - // Scenario 1A - { - name: "turbo.json at current dir, has package.json, has workspaces key", - fs: []file{ - {path: turbopath.AnchoredUnixPath("turbo.json").ToSystemPath()}, - { - path: turbopath.AnchoredUnixPath("package.json").ToSystemPath(), - content: []byte("{ \"workspaces\": [ \"exists\" ] }"), - }, - }, - executionDirectory: turbopath.AnchoredUnixPath("").ToSystemPath(), - rootPath: turbopath.AnchoredUnixPath("").ToSystemPath(), - packageMode: Multi, - }, - { - name: "turbo.json at parent dir, has package.json, has workspaces key", - fs: []file{ - {path: turbopath.AnchoredUnixPath("execution/path/subdir/.file").ToSystemPath()}, - {path: turbopath.AnchoredUnixPath("turbo.json").ToSystemPath()}, - { - path: turbopath.AnchoredUnixPath("package.json").ToSystemPath(), - content: []byte("{ \"workspaces\": [ \"exists\" ] }"), - }, - }, - executionDirectory: turbopath.AnchoredUnixPath("execution/path/subdir").ToSystemPath(), - rootPath: turbopath.AnchoredUnixPath("").ToSystemPath(), - packageMode: Multi, - }, - { - name: "turbo.json at parent dir, has package.json, has pnpm workspaces", - fs: []file{ - {path: turbopath.AnchoredUnixPath("execution/path/subdir/.file").ToSystemPath()}, - {path: turbopath.AnchoredUnixPath("turbo.json").ToSystemPath()}, - { - path: turbopath.AnchoredUnixPath("package.json").ToSystemPath(), - content: []byte("{}"), - }, - { - path: turbopath.AnchoredUnixPath("pnpm-workspace.yaml").ToSystemPath(), - content: []byte("packages:\n - docs"), - }, - }, - executionDirectory: turbopath.AnchoredUnixPath("execution/path/subdir").ToSystemPath(), - rootPath: turbopath.AnchoredUnixPath("").ToSystemPath(), - packageMode: Multi, - }, - // Scenario 1A aware of the weird thing we do for packages. - { - name: "turbo.json at current dir, has package.json, has packages key", - fs: []file{ - {path: turbopath.AnchoredUnixPath("turbo.json").ToSystemPath()}, - { - path: turbopath.AnchoredUnixPath("package.json").ToSystemPath(), - content: []byte("{ \"packages\": [ \"exists\" ] }"), - }, - }, - executionDirectory: turbopath.AnchoredUnixPath("").ToSystemPath(), - rootPath: turbopath.AnchoredUnixPath("").ToSystemPath(), - packageMode: Single, - }, - { - name: "turbo.json at parent dir, has package.json, has packages key", - fs: []file{ - {path: turbopath.AnchoredUnixPath("execution/path/subdir/.file").ToSystemPath()}, - {path: turbopath.AnchoredUnixPath("turbo.json").ToSystemPath()}, - { - path: turbopath.AnchoredUnixPath("package.json").ToSystemPath(), - content: []byte("{ \"packages\": [ \"exists\" ] }"), - }, - }, - executionDirectory: turbopath.AnchoredUnixPath("execution/path/subdir").ToSystemPath(), - rootPath: turbopath.AnchoredUnixPath("").ToSystemPath(), - packageMode: Single, - }, - // Scenario 1A aware of the the weird thing we do for packages when both methods of specification exist. - { - name: "turbo.json at current dir, has package.json, has workspace and packages key", - fs: []file{ - {path: turbopath.AnchoredUnixPath("turbo.json").ToSystemPath()}, - { - path: turbopath.AnchoredUnixPath("package.json").ToSystemPath(), - content: []byte("{ \"workspaces\": [ \"clobbered\" ], \"packages\": [ \"exists\" ] }"), - }, - }, - executionDirectory: turbopath.AnchoredUnixPath("").ToSystemPath(), - rootPath: turbopath.AnchoredUnixPath("").ToSystemPath(), - packageMode: Multi, - }, - { - name: "turbo.json at parent dir, has package.json, has workspace and packages key", - fs: []file{ - {path: turbopath.AnchoredUnixPath("execution/path/subdir/.file").ToSystemPath()}, - {path: turbopath.AnchoredUnixPath("turbo.json").ToSystemPath()}, - { - path: turbopath.AnchoredUnixPath("package.json").ToSystemPath(), - content: []byte("{ \"workspaces\": [ \"clobbered\" ], \"packages\": [ \"exists\" ] }"), - }, - }, - executionDirectory: turbopath.AnchoredUnixPath("execution/path/subdir").ToSystemPath(), - rootPath: turbopath.AnchoredUnixPath("").ToSystemPath(), - packageMode: Multi, - }, - // Scenario 1B - { - name: "turbo.json at current dir, has package.json, no workspaces", - fs: []file{ - {path: turbopath.AnchoredUnixPath("turbo.json").ToSystemPath()}, - { - path: turbopath.AnchoredUnixPath("package.json").ToSystemPath(), - content: []byte("{}"), - }, - }, - executionDirectory: turbopath.AnchoredUnixPath("").ToSystemPath(), - rootPath: turbopath.AnchoredUnixPath("").ToSystemPath(), - packageMode: Single, - }, - { - name: "turbo.json at parent dir, has package.json, no workspaces", - fs: []file{ - {path: turbopath.AnchoredUnixPath("execution/path/subdir/.file").ToSystemPath()}, - {path: turbopath.AnchoredUnixPath("turbo.json").ToSystemPath()}, - { - path: turbopath.AnchoredUnixPath("package.json").ToSystemPath(), - content: []byte("{}"), - }, - }, - executionDirectory: turbopath.AnchoredUnixPath("execution/path/subdir").ToSystemPath(), - rootPath: turbopath.AnchoredUnixPath("").ToSystemPath(), - packageMode: Single, - }, - { - name: "turbo.json at parent dir, has package.json, no workspaces, includes pnpm", - fs: []file{ - {path: turbopath.AnchoredUnixPath("execution/path/subdir/.file").ToSystemPath()}, - {path: turbopath.AnchoredUnixPath("turbo.json").ToSystemPath()}, - { - path: turbopath.AnchoredUnixPath("package.json").ToSystemPath(), - content: []byte("{}"), - }, - { - path: turbopath.AnchoredUnixPath("pnpm-workspace.yaml").ToSystemPath(), - content: []byte(""), - }, - }, - executionDirectory: turbopath.AnchoredUnixPath("execution/path/subdir").ToSystemPath(), - rootPath: turbopath.AnchoredUnixPath("").ToSystemPath(), - packageMode: Single, - }, - // Scenario 2A - { - name: "no turbo.json, no package.json at current", - fs: []file{}, - executionDirectory: turbopath.AnchoredUnixPath("").ToSystemPath(), - rootPath: turbopath.AnchoredUnixPath("").ToSystemPath(), - packageMode: Multi, - }, - { - name: "no turbo.json, no package.json at parent", - fs: []file{ - {path: turbopath.AnchoredUnixPath("execution/path/subdir/.file").ToSystemPath()}, - }, - executionDirectory: turbopath.AnchoredUnixPath("execution/path/subdir").ToSystemPath(), - rootPath: turbopath.AnchoredUnixPath("execution/path/subdir").ToSystemPath(), - packageMode: Multi, - }, - // Scenario 2B - { - name: "no turbo.json, has package.json with workspaces at current", - fs: []file{ - { - path: turbopath.AnchoredUnixPath("package.json").ToSystemPath(), - content: []byte("{ \"workspaces\": [ \"exists\" ] }"), - }, - }, - executionDirectory: turbopath.AnchoredUnixPath("").ToSystemPath(), - rootPath: turbopath.AnchoredUnixPath("").ToSystemPath(), - packageMode: Multi, - }, - { - name: "no turbo.json, has package.json with workspaces at parent", - fs: []file{ - {path: turbopath.AnchoredUnixPath("execution/path/subdir/.file").ToSystemPath()}, - { - path: turbopath.AnchoredUnixPath("package.json").ToSystemPath(), - content: []byte("{ \"workspaces\": [ \"exists\" ] }"), - }, - }, - executionDirectory: turbopath.AnchoredUnixPath("execution/path/subdir").ToSystemPath(), - rootPath: turbopath.AnchoredUnixPath("execution/path/subdir").ToSystemPath(), - packageMode: Multi, - }, - { - name: "no turbo.json, has package.json with pnpm workspaces at parent", - fs: []file{ - {path: turbopath.AnchoredUnixPath("execution/path/subdir/.file").ToSystemPath()}, - { - path: turbopath.AnchoredUnixPath("package.json").ToSystemPath(), - content: []byte("{ \"workspaces\": [ \"exists\" ] }"), - }, - { - path: turbopath.AnchoredUnixPath("pnpm-workspace.yaml").ToSystemPath(), - content: []byte("packages:\n - docs"), - }, - }, - executionDirectory: turbopath.AnchoredUnixPath("execution/path/subdir").ToSystemPath(), - rootPath: turbopath.AnchoredUnixPath("execution/path/subdir").ToSystemPath(), - packageMode: Multi, - }, - // Scenario 3A - { - name: "no turbo.json, lots of package.json files but no workspaces", - fs: []file{ - { - path: turbopath.AnchoredUnixPath("package.json").ToSystemPath(), - content: []byte("{}"), - }, - { - path: turbopath.AnchoredUnixPath("one/package.json").ToSystemPath(), - content: []byte("{}"), - }, - { - path: turbopath.AnchoredUnixPath("one/two/package.json").ToSystemPath(), - content: []byte("{}"), - }, - { - path: turbopath.AnchoredUnixPath("one/two/three/package.json").ToSystemPath(), - content: []byte("{}"), - }, - }, - executionDirectory: turbopath.AnchoredUnixPath("one/two/three").ToSystemPath(), - rootPath: turbopath.AnchoredUnixPath("one/two/three").ToSystemPath(), - packageMode: Single, - }, - // Scenario 3BI - { - name: "no turbo.json, lots of package.json files, and a workspace at the root that matches execution directory", - fs: []file{ - { - path: turbopath.AnchoredUnixPath("package.json").ToSystemPath(), - content: []byte("{ \"workspaces\": [ \"one/two/three\" ] }"), - }, - { - path: turbopath.AnchoredUnixPath("one/package.json").ToSystemPath(), - content: []byte("{}"), - }, - { - path: turbopath.AnchoredUnixPath("one/two/package.json").ToSystemPath(), - content: []byte("{}"), - }, - { - path: turbopath.AnchoredUnixPath("one/two/three/package.json").ToSystemPath(), - content: []byte("{}"), - }, - }, - executionDirectory: turbopath.AnchoredUnixPath("one/two/three").ToSystemPath(), - rootPath: turbopath.AnchoredUnixPath("one/two/three").ToSystemPath(), - packageMode: Multi, - }, - // Scenario 3BII - { - name: "no turbo.json, lots of package.json files, and a workspace at the root that matches execution directory", - fs: []file{ - { - path: turbopath.AnchoredUnixPath("package.json").ToSystemPath(), - content: []byte("{ \"workspaces\": [ \"does-not-exist\" ] }"), - }, - { - path: turbopath.AnchoredUnixPath("one/package.json").ToSystemPath(), - content: []byte("{}"), - }, - { - path: turbopath.AnchoredUnixPath("one/two/package.json").ToSystemPath(), - content: []byte("{}"), - }, - { - path: turbopath.AnchoredUnixPath("one/two/three/package.json").ToSystemPath(), - content: []byte("{}"), - }, - }, - executionDirectory: turbopath.AnchoredUnixPath("one/two/three").ToSystemPath(), - rootPath: turbopath.AnchoredUnixPath("one/two/three").ToSystemPath(), - packageMode: Single, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - fsRoot := turbopath.AbsoluteSystemPath(t.TempDir()) - for _, file := range tt.fs { - path := file.path.RestoreAnchor(fsRoot) - assert.NilError(t, path.Dir().MkdirAll(0777)) - assert.NilError(t, path.WriteFile(file.content, 0777)) - } - - turboRoot, packageMode := InferRoot(tt.executionDirectory.RestoreAnchor(fsRoot)) - if !reflect.DeepEqual(turboRoot, tt.rootPath.RestoreAnchor(fsRoot)) { - t.Errorf("InferRoot() turboRoot = %v, want %v", turboRoot, tt.rootPath.RestoreAnchor(fsRoot)) - } - if packageMode != tt.packageMode { - t.Errorf("InferRoot() packageMode = %v, want %v", packageMode, tt.packageMode) - } - }) - } -} diff --git a/cli/internal/packagemanager/npm.go b/cli/internal/packagemanager/npm.go deleted file mode 100644 index 8c082b962213b..0000000000000 --- a/cli/internal/packagemanager/npm.go +++ /dev/null @@ -1,62 +0,0 @@ -package packagemanager - -import ( - "fmt" - - "github.com/vercel/turbo/cli/internal/fs" - "github.com/vercel/turbo/cli/internal/lockfile" - "github.com/vercel/turbo/cli/internal/turbopath" -) - -const npmLockfile = "package-lock.json" - -var nodejsNpm = PackageManager{ - Name: "nodejs-npm", - Slug: "npm", - Command: "npm", - Specfile: "package.json", - Lockfile: npmLockfile, - PackageDir: "node_modules", - ArgSeparator: func(_userArgs []string) []string { return []string{"--"} }, - - getWorkspaceGlobs: func(rootpath turbopath.AbsoluteSystemPath) ([]string, error) { - pkg, err := fs.ReadPackageJSON(rootpath.UntypedJoin("package.json")) - if err != nil { - return nil, fmt.Errorf("package.json: %w", err) - } - if len(pkg.Workspaces) == 0 { - return nil, fmt.Errorf("package.json: no workspaces found. Turborepo requires npm workspaces to be defined in the root package.json") - } - return pkg.Workspaces, nil - }, - - getWorkspaceIgnores: func(pm PackageManager, rootpath turbopath.AbsoluteSystemPath) ([]string, error) { - // Matches upstream values: - // function: https://github.com/npm/map-workspaces/blob/a46503543982cb35f51cc2d6253d4dcc6bca9b32/lib/index.js#L73 - // key code: https://github.com/npm/map-workspaces/blob/a46503543982cb35f51cc2d6253d4dcc6bca9b32/lib/index.js#L90-L96 - // call site: https://github.com/npm/cli/blob/7a858277171813b37d46a032e49db44c8624f78f/lib/workspaces/get-workspaces.js#L14 - return []string{ - "**/node_modules/**", - }, nil - }, - - canPrune: func(cwd turbopath.AbsoluteSystemPath) (bool, error) { - return true, nil - }, - - GetLockfileName: func(_ turbopath.AbsoluteSystemPath) string { - return npmLockfile - }, - - GetLockfilePath: func(projectDirectory turbopath.AbsoluteSystemPath) turbopath.AbsoluteSystemPath { - return projectDirectory.UntypedJoin(npmLockfile) - }, - - GetLockfileContents: func(projectDirectory turbopath.AbsoluteSystemPath) ([]byte, error) { - return projectDirectory.UntypedJoin(npmLockfile).ReadFile() - }, - - UnmarshalLockfile: func(_rootPackageJSON *fs.PackageJSON, contents []byte) (lockfile.Lockfile, error) { - return lockfile.DecodeNpmLockfile(contents) - }, -} diff --git a/cli/internal/packagemanager/packagemanager.go b/cli/internal/packagemanager/packagemanager.go deleted file mode 100644 index de5c52587889b..0000000000000 --- a/cli/internal/packagemanager/packagemanager.go +++ /dev/null @@ -1,143 +0,0 @@ -// Adapted from https://github.com/replit/upm -// Copyright (c) 2019 Neoreason d/b/a Repl.it. All rights reserved. -// SPDX-License-Identifier: MIT - -package packagemanager - -import ( - "fmt" - "path/filepath" - - "github.com/pkg/errors" - "github.com/vercel/turbo/cli/internal/fs" - "github.com/vercel/turbo/cli/internal/globby" - "github.com/vercel/turbo/cli/internal/lockfile" - "github.com/vercel/turbo/cli/internal/turbopath" -) - -// PackageManager is an abstraction across package managers -type PackageManager struct { - // The descriptive name of the Package Manager. - Name string - - // The unique identifier of the Package Manager. - Slug string - - // The command used to invoke the Package Manager. - Command string - - // The location of the package spec file used by the Package Manager. - Specfile string - - // The location of the package lock file used by the Package Manager. - Lockfile string - - // The directory in which package assets are stored by the Package Manager. - PackageDir string - - // The location of the file that defines the workspace. Empty if workspaces defined in package.json - WorkspaceConfigurationPath string - - // The separator that the Package Manger uses to identify arguments that - // should be passed through to the underlying script. - ArgSeparator func(userArgs []string) []string - - // Return the list of workspace glob - getWorkspaceGlobs func(rootpath turbopath.AbsoluteSystemPath) ([]string, error) - - // Return the list of workspace ignore globs - getWorkspaceIgnores func(pm PackageManager, rootpath turbopath.AbsoluteSystemPath) ([]string, error) - - // Detect if Turbo knows how to produce a pruned workspace for the project - canPrune func(cwd turbopath.AbsoluteSystemPath) (bool, error) - - // Gets lockfile name. - GetLockfileName func(projectDirectory turbopath.AbsoluteSystemPath) string - - // Gets lockfile path. - GetLockfilePath func(projectDirectory turbopath.AbsoluteSystemPath) turbopath.AbsoluteSystemPath - - // Read from disk a lockfile for a package manager. - GetLockfileContents func(projectDirectory turbopath.AbsoluteSystemPath) ([]byte, error) - - // Read a lockfile for a given package manager - UnmarshalLockfile func(rootPackageJSON *fs.PackageJSON, contents []byte) (lockfile.Lockfile, error) - - // Prune the given pkgJSON to only include references to the given patches - prunePatches func(pkgJSON *fs.PackageJSON, patches []turbopath.AnchoredUnixPath) error -} - -var packageManagers = []PackageManager{ - nodejsYarn, - nodejsBerry, - nodejsNpm, - nodejsPnpm, - nodejsPnpm6, - nodejsBun, -} - -// GetPackageManager reads the package manager name sent by the Rust side -func GetPackageManager(name string) (packageManager *PackageManager, err error) { - switch name { - case "yarn": - return &nodejsYarn, nil - case "bun": - return &nodejsBun, nil - case "berry": - return &nodejsBerry, nil - case "npm": - return &nodejsNpm, nil - case "pnpm": - return &nodejsPnpm, nil - case "pnpm6": - return &nodejsPnpm6, nil - default: - return nil, errors.New("Unknown package manager") - } -} - -// GetWorkspaces returns the list of package.json files for the current repository. -func (pm PackageManager) GetWorkspaces(rootpath turbopath.AbsoluteSystemPath) ([]string, error) { - globs, err := pm.getWorkspaceGlobs(rootpath) - if err != nil { - return nil, err - } - - justJsons := make([]string, len(globs)) - for i, space := range globs { - justJsons[i] = filepath.Join(space, "package.json") - } - - ignores, err := pm.getWorkspaceIgnores(pm, rootpath) - if err != nil { - return nil, err - } - - f, err := globby.GlobFiles(rootpath.ToStringDuringMigration(), justJsons, ignores) - if err != nil { - return nil, err - } - - return f, nil -} - -// GetWorkspaceIgnores returns an array of globs not to search for workspaces. -func (pm PackageManager) GetWorkspaceIgnores(rootpath turbopath.AbsoluteSystemPath) ([]string, error) { - return pm.getWorkspaceIgnores(pm, rootpath) -} - -// ReadLockfile will read the applicable lockfile into memory -func (pm PackageManager) ReadLockfile(projectDirectory turbopath.AbsoluteSystemPath, rootPackageJSON *fs.PackageJSON) (lockfile.Lockfile, error) { - if pm.UnmarshalLockfile == nil { - return nil, nil - } - contents, err := pm.GetLockfileContents(projectDirectory) - if err != nil { - return nil, fmt.Errorf("reading %s: %w", pm.GetLockfilePath(projectDirectory).ToString(), err) - } - lf, err := pm.UnmarshalLockfile(rootPackageJSON, contents) - if err != nil { - return nil, errors.Wrapf(err, "error in %v", pm.GetLockfilePath(projectDirectory).ToString()) - } - return lf, nil -} diff --git a/cli/internal/packagemanager/packagemanager_test.go b/cli/internal/packagemanager/packagemanager_test.go deleted file mode 100644 index cb70655545ead..0000000000000 --- a/cli/internal/packagemanager/packagemanager_test.go +++ /dev/null @@ -1,165 +0,0 @@ -package packagemanager - -import ( - "os" - "path/filepath" - "reflect" - "sort" - "testing" - - "github.com/vercel/turbo/cli/internal/fs" - "github.com/vercel/turbo/cli/internal/turbopath" - "gotest.tools/v3/assert" -) - -func Test_GetWorkspaces(t *testing.T) { - type test struct { - name string - pm PackageManager - rootPath turbopath.AbsoluteSystemPath - want []string - wantErr bool - } - - cwd, _ := os.Getwd() - - repoRoot, err := fs.GetCwd(cwd) - assert.NilError(t, err, "GetCwd") - rootPath := map[string]turbopath.AbsoluteSystemPath{ - "nodejs-bun": repoRoot.UntypedJoin("../../../examples/with-yarn"), - "nodejs-npm": repoRoot.UntypedJoin("../../../examples/with-yarn"), - "nodejs-berry": repoRoot.UntypedJoin("../../../examples/with-yarn"), - "nodejs-yarn": repoRoot.UntypedJoin("../../../examples/with-yarn"), - "nodejs-pnpm": repoRoot.UntypedJoin("../../../examples/basic"), - "nodejs-pnpm6": repoRoot.UntypedJoin("../../../examples/basic"), - } - - want := map[string][]string{ - "nodejs-bun": { - filepath.ToSlash(filepath.Join(cwd, "../../../examples/with-yarn/apps/docs/package.json")), - filepath.ToSlash(filepath.Join(cwd, "../../../examples/with-yarn/apps/web/package.json")), - filepath.ToSlash(filepath.Join(cwd, "../../../examples/with-yarn/packages/eslint-config/package.json")), - filepath.ToSlash(filepath.Join(cwd, "../../../examples/with-yarn/packages/typescript-config/package.json")), - filepath.ToSlash(filepath.Join(cwd, "../../../examples/with-yarn/packages/ui/package.json")), - }, - "nodejs-npm": { - filepath.ToSlash(filepath.Join(cwd, "../../../examples/with-yarn/apps/docs/package.json")), - filepath.ToSlash(filepath.Join(cwd, "../../../examples/with-yarn/apps/web/package.json")), - filepath.ToSlash(filepath.Join(cwd, "../../../examples/with-yarn/packages/eslint-config/package.json")), - filepath.ToSlash(filepath.Join(cwd, "../../../examples/with-yarn/packages/typescript-config/package.json")), - filepath.ToSlash(filepath.Join(cwd, "../../../examples/with-yarn/packages/ui/package.json")), - }, - "nodejs-berry": { - filepath.ToSlash(filepath.Join(cwd, "../../../examples/with-yarn/apps/docs/package.json")), - filepath.ToSlash(filepath.Join(cwd, "../../../examples/with-yarn/apps/web/package.json")), - filepath.ToSlash(filepath.Join(cwd, "../../../examples/with-yarn/packages/eslint-config/package.json")), - filepath.ToSlash(filepath.Join(cwd, "../../../examples/with-yarn/packages/typescript-config/package.json")), - filepath.ToSlash(filepath.Join(cwd, "../../../examples/with-yarn/packages/ui/package.json")), - }, - "nodejs-yarn": { - filepath.ToSlash(filepath.Join(cwd, "../../../examples/with-yarn/apps/docs/package.json")), - filepath.ToSlash(filepath.Join(cwd, "../../../examples/with-yarn/apps/web/package.json")), - filepath.ToSlash(filepath.Join(cwd, "../../../examples/with-yarn/packages/eslint-config/package.json")), - filepath.ToSlash(filepath.Join(cwd, "../../../examples/with-yarn/packages/typescript-config/package.json")), - filepath.ToSlash(filepath.Join(cwd, "../../../examples/with-yarn/packages/ui/package.json")), - }, - "nodejs-pnpm": { - filepath.ToSlash(filepath.Join(cwd, "../../../examples/basic/apps/docs/package.json")), - filepath.ToSlash(filepath.Join(cwd, "../../../examples/basic/apps/web/package.json")), - filepath.ToSlash(filepath.Join(cwd, "../../../examples/basic/packages/eslint-config/package.json")), - filepath.ToSlash(filepath.Join(cwd, "../../../examples/basic/packages/typescript-config/package.json")), - filepath.ToSlash(filepath.Join(cwd, "../../../examples/basic/packages/ui/package.json")), - }, - "nodejs-pnpm6": { - filepath.ToSlash(filepath.Join(cwd, "../../../examples/basic/apps/docs/package.json")), - filepath.ToSlash(filepath.Join(cwd, "../../../examples/basic/apps/web/package.json")), - filepath.ToSlash(filepath.Join(cwd, "../../../examples/basic/packages/eslint-config/package.json")), - filepath.ToSlash(filepath.Join(cwd, "../../../examples/basic/packages/typescript-config/package.json")), - filepath.ToSlash(filepath.Join(cwd, "../../../examples/basic/packages/ui/package.json")), - }, - } - - tests := make([]test, len(packageManagers)) - for i, packageManager := range packageManagers { - tests[i] = test{ - name: packageManager.Name, - pm: packageManager, - rootPath: rootPath[packageManager.Name], - want: want[packageManager.Name], - wantErr: false, - } - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - gotWorkspaces, err := tt.pm.GetWorkspaces(tt.rootPath) - - gotToSlash := make([]string, len(gotWorkspaces)) - for index, workspace := range gotWorkspaces { - gotToSlash[index] = filepath.ToSlash(workspace) - } - - if (err != nil) != tt.wantErr { - t.Errorf("GetWorkspaces() error = %v, wantErr %v", err, tt.wantErr) - return - } - sort.Strings(gotToSlash) - if !reflect.DeepEqual(gotToSlash, tt.want) { - t.Errorf("GetWorkspaces() = %v, want %v", gotToSlash, tt.want) - } - }) - } -} - -func Test_GetWorkspaceIgnores(t *testing.T) { - type test struct { - name string - pm PackageManager - rootPath turbopath.AbsoluteSystemPath - want []string - wantErr bool - } - - cwdRaw, err := os.Getwd() - assert.NilError(t, err, "os.Getwd") - cwd, err := fs.GetCwd(cwdRaw) - assert.NilError(t, err, "GetCwd") - want := map[string][]string{ - "nodejs-bun": {"**/node_modules", "**/.git"}, - "nodejs-npm": {"**/node_modules/**"}, - "nodejs-berry": {"**/node_modules", "**/.git", "**/.yarn"}, - "nodejs-yarn": {"apps/*/node_modules/**", "packages/*/node_modules/**"}, - "nodejs-pnpm": {"**/node_modules/**", "**/bower_components/**", "packages/skip"}, - "nodejs-pnpm6": {"**/node_modules/**", "**/bower_components/**", "packages/skip"}, - } - - tests := make([]test, len(packageManagers)) - for i, packageManager := range packageManagers { - tests[i] = test{ - name: packageManager.Name, - pm: packageManager, - rootPath: cwd.UntypedJoin("fixtures"), - want: want[packageManager.Name], - wantErr: false, - } - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - gotWorkspaceIgnores, err := tt.pm.GetWorkspaceIgnores(tt.rootPath) - - gotToSlash := make([]string, len(gotWorkspaceIgnores)) - for index, ignore := range gotWorkspaceIgnores { - gotToSlash[index] = filepath.ToSlash(ignore) - } - - if (err != nil) != tt.wantErr { - t.Errorf("GetWorkspaceIgnores() error = %v, wantErr %v", err, tt.wantErr) - return - } - if !reflect.DeepEqual(gotToSlash, tt.want) { - t.Errorf("GetWorkspaceIgnores() = %v, want %v", gotToSlash, tt.want) - } - }) - } -} diff --git a/cli/internal/packagemanager/pnpm.go b/cli/internal/packagemanager/pnpm.go deleted file mode 100644 index 96d7382592c6c..0000000000000 --- a/cli/internal/packagemanager/pnpm.go +++ /dev/null @@ -1,163 +0,0 @@ -package packagemanager - -import ( - "errors" - "fmt" - stdfs "io/fs" - "strings" - - "github.com/vercel/turbo/cli/internal/fs" - "github.com/vercel/turbo/cli/internal/lockfile" - "github.com/vercel/turbo/cli/internal/turbopath" - "github.com/vercel/turbo/cli/internal/yaml" -) - -const pnpmLockfile = "pnpm-lock.yaml" - -// PnpmWorkspaces is a representation of workspace package globs found -// in pnpm-workspace.yaml -type PnpmWorkspaces struct { - Packages []string `yaml:"packages,omitempty"` -} - -func readPnpmWorkspacePackages(workspaceFile turbopath.AbsoluteSystemPath) ([]string, error) { - bytes, err := workspaceFile.ReadFile() - if err != nil { - return nil, fmt.Errorf("%v: %w", workspaceFile, err) - } - var pnpmWorkspaces PnpmWorkspaces - if err := yaml.Unmarshal(bytes, &pnpmWorkspaces); err != nil { - return nil, fmt.Errorf("%v: %w", workspaceFile, err) - } - return pnpmWorkspaces.Packages, nil -} - -func getPnpmWorkspaceGlobs(rootpath turbopath.AbsoluteSystemPath) ([]string, error) { - pkgGlobs, err := readPnpmWorkspacePackages(rootpath.UntypedJoin("pnpm-workspace.yaml")) - if err != nil { - return nil, err - } - - if len(pkgGlobs) == 0 { - return nil, fmt.Errorf("pnpm-workspace.yaml: no packages found. Turborepo requires pnpm workspaces and thus packages to be defined in the root pnpm-workspace.yaml") - } - - filteredPkgGlobs := []string{} - for _, pkgGlob := range pkgGlobs { - if !strings.HasPrefix(pkgGlob, "!") { - filteredPkgGlobs = append(filteredPkgGlobs, pkgGlob) - } - } - return filteredPkgGlobs, nil -} - -func getPnpmWorkspaceIgnores(pm PackageManager, rootpath turbopath.AbsoluteSystemPath) ([]string, error) { - // Matches upstream values: - // function: https://github.com/pnpm/pnpm/blob/d99daa902442e0c8ab945143ebaf5cdc691a91eb/packages/find-packages/src/index.ts#L27 - // key code: https://github.com/pnpm/pnpm/blob/d99daa902442e0c8ab945143ebaf5cdc691a91eb/packages/find-packages/src/index.ts#L30 - // call site: https://github.com/pnpm/pnpm/blob/d99daa902442e0c8ab945143ebaf5cdc691a91eb/packages/find-workspace-packages/src/index.ts#L32-L39 - ignores := []string{ - "**/node_modules/**", - "**/bower_components/**", - } - pkgGlobs, err := readPnpmWorkspacePackages(rootpath.UntypedJoin("pnpm-workspace.yaml")) - if err != nil { - // If workspace file doesn't exist we shouldn't error as we might be a single package repo - if errors.Is(err, stdfs.ErrNotExist) { - return ignores, nil - } - return nil, err - } - for _, pkgGlob := range pkgGlobs { - if strings.HasPrefix(pkgGlob, "!") { - ignores = append(ignores, pkgGlob[1:]) - } - } - return ignores, nil -} - -var nodejsPnpm = PackageManager{ - Name: "nodejs-pnpm", - Slug: "pnpm", - Command: "pnpm", - Specfile: "package.json", - Lockfile: pnpmLockfile, - PackageDir: "node_modules", - // pnpm v7+ changed their handling of '--'. We no longer need to pass it to pass args to - // the script being run, and in fact doing so will cause the '--' to be passed through verbatim, - // potentially breaking scripts that aren't expecting it. - // We are allowed to use nil here because ArgSeparator already has a type, so it's a typed nil, - // This could just as easily be []string{}, but the style guide says to prefer - // nil for empty slices. - ArgSeparator: func(_userArgs []string) []string { return nil }, - WorkspaceConfigurationPath: "pnpm-workspace.yaml", - - getWorkspaceGlobs: getPnpmWorkspaceGlobs, - - getWorkspaceIgnores: getPnpmWorkspaceIgnores, - - canPrune: func(cwd turbopath.AbsoluteSystemPath) (bool, error) { - return true, nil - }, - - GetLockfileName: func(_ turbopath.AbsoluteSystemPath) string { - return pnpmLockfile - }, - - GetLockfilePath: func(projectDirectory turbopath.AbsoluteSystemPath) turbopath.AbsoluteSystemPath { - return projectDirectory.UntypedJoin(pnpmLockfile) - }, - - GetLockfileContents: func(projectDirectory turbopath.AbsoluteSystemPath) ([]byte, error) { - return projectDirectory.UntypedJoin(pnpmLockfile).ReadFile() - }, - - UnmarshalLockfile: func(_rootPackageJSON *fs.PackageJSON, contents []byte) (lockfile.Lockfile, error) { - return lockfile.DecodePnpmLockfile(contents) - }, - - prunePatches: func(pkgJSON *fs.PackageJSON, patches []turbopath.AnchoredUnixPath) error { - return pnpmPrunePatches(pkgJSON, patches) - }, -} - -func pnpmPrunePatches(pkgJSON *fs.PackageJSON, patches []turbopath.AnchoredUnixPath) error { - pkgJSON.Mu.Lock() - defer pkgJSON.Mu.Unlock() - - keysToDelete := []string{} - pnpmConfig, ok := pkgJSON.RawJSON["pnpm"].(map[string]interface{}) - if !ok { - return fmt.Errorf("Invalid structure for pnpm field in package.json") - } - patchedDependencies, ok := pnpmConfig["patchedDependencies"].(map[string]interface{}) - if !ok { - return fmt.Errorf("Invalid structure for patchedDependencies field in package.json") - } - - for dependency, untypedPatch := range patchedDependencies { - patch, ok := untypedPatch.(string) - if !ok { - return fmt.Errorf("Expected only strings in patchedDependencies. Got %v", untypedPatch) - } - - inPatches := false - - for _, wantedPatch := range patches { - if wantedPatch.ToString() == patch { - inPatches = true - break - } - } - - if !inPatches { - keysToDelete = append(keysToDelete, dependency) - } - } - - for _, key := range keysToDelete { - delete(patchedDependencies, key) - } - - return nil -} diff --git a/cli/internal/packagemanager/pnpm6.go b/cli/internal/packagemanager/pnpm6.go deleted file mode 100644 index d19076910283e..0000000000000 --- a/cli/internal/packagemanager/pnpm6.go +++ /dev/null @@ -1,50 +0,0 @@ -package packagemanager - -import ( - "github.com/vercel/turbo/cli/internal/fs" - "github.com/vercel/turbo/cli/internal/lockfile" - "github.com/vercel/turbo/cli/internal/turbopath" -) - -const pnpm6Lockfile = "pnpm-lock.yaml" - -// Pnpm6Workspaces is a representation of workspace package globs found -// in pnpm-workspace.yaml -type Pnpm6Workspaces struct { - Packages []string `yaml:"packages,omitempty"` -} - -var nodejsPnpm6 = PackageManager{ - Name: "nodejs-pnpm6", - Slug: "pnpm", - Command: "pnpm", - Specfile: "package.json", - Lockfile: pnpm6Lockfile, - PackageDir: "node_modules", - ArgSeparator: func(_userArgs []string) []string { return []string{"--"} }, - WorkspaceConfigurationPath: "pnpm-workspace.yaml", - - getWorkspaceGlobs: getPnpmWorkspaceGlobs, - - getWorkspaceIgnores: getPnpmWorkspaceIgnores, - - canPrune: func(cwd turbopath.AbsoluteSystemPath) (bool, error) { - return true, nil - }, - - GetLockfileName: func(_ turbopath.AbsoluteSystemPath) string { - return pnpm6Lockfile - }, - - GetLockfilePath: func(projectDirectory turbopath.AbsoluteSystemPath) turbopath.AbsoluteSystemPath { - return projectDirectory.UntypedJoin(pnpm6Lockfile) - }, - - GetLockfileContents: func(projectDirectory turbopath.AbsoluteSystemPath) ([]byte, error) { - return projectDirectory.UntypedJoin(pnpm6Lockfile).ReadFile() - }, - - UnmarshalLockfile: func(_rootPackageJSON *fs.PackageJSON, contents []byte) (lockfile.Lockfile, error) { - return lockfile.DecodePnpmLockfile(contents) - }, -} diff --git a/cli/internal/packagemanager/pnpm_test.go b/cli/internal/packagemanager/pnpm_test.go deleted file mode 100644 index c05bc4335ce89..0000000000000 --- a/cli/internal/packagemanager/pnpm_test.go +++ /dev/null @@ -1,57 +0,0 @@ -package packagemanager - -import ( - "os" - "testing" - - "github.com/vercel/turbo/cli/internal/fs" - "github.com/vercel/turbo/cli/internal/turbopath" - "gotest.tools/v3/assert" -) - -func pnpmPatchesSection(t *testing.T, pkgJSON *fs.PackageJSON) map[string]interface{} { - t.Helper() - pnpmSection, ok := pkgJSON.RawJSON["pnpm"].(map[string]interface{}) - assert.Assert(t, ok) - patchesSection, ok := pnpmSection["patchedDependencies"].(map[string]interface{}) - assert.Assert(t, ok) - return patchesSection -} - -func getPnpmPackageJSON(t *testing.T) *fs.PackageJSON { - t.Helper() - rawCwd, err := os.Getwd() - assert.NilError(t, err) - cwd, err := fs.CheckedToAbsoluteSystemPath(rawCwd) - assert.NilError(t, err) - pkgJSONPath := cwd.Join("fixtures", "pnpm-patches.json") - pkgJSON, err := fs.ReadPackageJSON(pkgJSONPath) - assert.NilError(t, err) - return pkgJSON -} - -func Test_PnpmPrunePatches_KeepsNecessary(t *testing.T) { - pkgJSON := getPnpmPackageJSON(t) - initialPatches := pnpmPatchesSection(t, pkgJSON) - - assert.DeepEqual(t, initialPatches, map[string]interface{}{"is-odd@3.0.1": "patches/is-odd@3.0.1.patch"}) - - err := pnpmPrunePatches(pkgJSON, []turbopath.AnchoredUnixPath{turbopath.AnchoredUnixPath("patches/is-odd@3.0.1.patch")}) - assert.NilError(t, err) - - newPatches := pnpmPatchesSection(t, pkgJSON) - assert.DeepEqual(t, newPatches, map[string]interface{}{"is-odd@3.0.1": "patches/is-odd@3.0.1.patch"}) -} - -func Test_PnpmPrunePatches_RemovesExtra(t *testing.T) { - pkgJSON := getPnpmPackageJSON(t) - initialPatches := pnpmPatchesSection(t, pkgJSON) - - assert.DeepEqual(t, initialPatches, map[string]interface{}{"is-odd@3.0.1": "patches/is-odd@3.0.1.patch"}) - - err := pnpmPrunePatches(pkgJSON, nil) - assert.NilError(t, err) - - newPatches := pnpmPatchesSection(t, pkgJSON) - assert.DeepEqual(t, newPatches, map[string]interface{}{}) -} diff --git a/cli/internal/packagemanager/yarn.go b/cli/internal/packagemanager/yarn.go deleted file mode 100644 index 53241b1194a10..0000000000000 --- a/cli/internal/packagemanager/yarn.go +++ /dev/null @@ -1,99 +0,0 @@ -package packagemanager - -import ( - "errors" - "fmt" - "path/filepath" - - "github.com/vercel/turbo/cli/internal/fs" - "github.com/vercel/turbo/cli/internal/lockfile" - "github.com/vercel/turbo/cli/internal/turbopath" -) - -// NoWorkspacesFoundError is a custom error used so that upstream implementations can switch on it -type NoWorkspacesFoundError struct{} - -func (e *NoWorkspacesFoundError) Error() string { - return "package.json: no workspaces found. Turborepo requires Yarn workspaces to be defined in the root package.json" -} - -const yarnLockfile = "yarn.lock" - -var nodejsYarn = PackageManager{ - Name: "nodejs-yarn", - Slug: "yarn", - Command: "yarn", - Specfile: "package.json", - Lockfile: yarnLockfile, - PackageDir: "node_modules", - ArgSeparator: func(userArgs []string) []string { - // Yarn warns and swallows a "--" token. If the user is passing "--", we need - // to prepend our own so that the user's doesn't get swallowed. If they are not - // passing their own, we don't need the "--" token and can avoid the warning. - for _, arg := range userArgs { - if arg == "--" { - return []string{"--"} - } - } - return nil - }, - - getWorkspaceGlobs: func(rootpath turbopath.AbsoluteSystemPath) ([]string, error) { - pkg, err := fs.ReadPackageJSON(rootpath.UntypedJoin("package.json")) - if err != nil { - return nil, fmt.Errorf("package.json: %w", err) - } - if len(pkg.Workspaces) == 0 { - return nil, &NoWorkspacesFoundError{} - } - return pkg.Workspaces, nil - }, - - getWorkspaceIgnores: func(pm PackageManager, rootpath turbopath.AbsoluteSystemPath) ([]string, error) { - // function: https://github.com/yarnpkg/yarn/blob/3119382885ea373d3c13d6a846de743eca8c914b/src/config.js#L799 - - // Yarn is unique in ignore patterns handling. - // The only time it does globbing is for package.json or yarn.json and it scopes the search to each workspace. - // For example: `apps/*/node_modules/**/+(package.json|yarn.json)` - // The `extglob` `+(package.json|yarn.json)` (from micromatch) after node_modules/** is redundant. - - globs, err := pm.getWorkspaceGlobs(rootpath) - if err != nil { - // In case of a non-monorepo, the workspaces field is empty and only node_modules in the root should be ignored - var e *NoWorkspacesFoundError - if errors.As(err, &e) { - return []string{"node_modules/**"}, nil - } - - return nil, err - } - - ignores := make([]string, len(globs)) - - for i, glob := range globs { - ignores[i] = filepath.Join(glob, "/node_modules/**") - } - - return ignores, nil - }, - - canPrune: func(cwd turbopath.AbsoluteSystemPath) (bool, error) { - return true, nil - }, - - GetLockfileName: func(_ turbopath.AbsoluteSystemPath) string { - return yarnLockfile - }, - - GetLockfilePath: func(projectDirectory turbopath.AbsoluteSystemPath) turbopath.AbsoluteSystemPath { - return projectDirectory.UntypedJoin(yarnLockfile) - }, - - GetLockfileContents: func(projectDirectory turbopath.AbsoluteSystemPath) ([]byte, error) { - return projectDirectory.UntypedJoin(yarnLockfile).ReadFile() - }, - - UnmarshalLockfile: func(_rootPackageJSON *fs.PackageJSON, contents []byte) (lockfile.Lockfile, error) { - return lockfile.DecodeYarnLockfile(contents) - }, -} diff --git a/cli/internal/process/child.go b/cli/internal/process/child.go deleted file mode 100644 index 1c3e6e7d4d92e..0000000000000 --- a/cli/internal/process/child.go +++ /dev/null @@ -1,406 +0,0 @@ -package process - -/** - * Code in this file is based on the source code at - * https://github.com/hashicorp/consul-template/tree/3ea7d99ad8eff17897e0d63dac86d74770170bb8/child/child.go - * - * Major changes include removing the ability to restart a child process, - * requiring a fully-formed exec.Cmd to be passed in, and including cmd.Dir - * in the description of a child process. - */ - -import ( - "errors" - "fmt" - "math/rand" - "os" - "os/exec" - "strings" - "sync" - "syscall" - "time" - - "github.com/hashicorp/go-hclog" -) - -func init() { - // Seed the default rand Source with current time to produce better random - // numbers used with splay - rand.Seed(time.Now().UnixNano()) -} - -var ( - // ErrMissingCommand is the error returned when no command is specified - // to run. - ErrMissingCommand = errors.New("missing command") - - // ExitCodeOK is the default OK exit code. - ExitCodeOK = 0 - - // ExitCodeError is the default error code returned when the child exits with - // an error without a more specific code. - ExitCodeError = 127 -) - -// Child is a wrapper around a child process which can be used to send signals -// and manage the processes' lifecycle. -type Child struct { - sync.RWMutex - - timeout time.Duration - - killSignal os.Signal - killTimeout time.Duration - - splay time.Duration - - // cmd is the actual child process under management. - cmd *exec.Cmd - - // exitCh is the channel where the processes exit will be returned. - exitCh chan int - - // stopLock is the mutex to lock when stopping. stopCh is the circuit breaker - // to force-terminate any waiting splays to kill the process now. stopped is - // a boolean that tells us if we have previously been stopped. - stopLock sync.RWMutex - stopCh chan struct{} - stopped bool - - // whether to set process group id or not (default on) - setpgid bool - - Label string - - logger hclog.Logger -} - -// NewInput is input to the NewChild function. -type NewInput struct { - // Cmd is the unstarted, preconfigured command to run - Cmd *exec.Cmd - - // Timeout is the maximum amount of time to allow the command to execute. If - // set to 0, the command is permitted to run infinitely. - Timeout time.Duration - - // KillSignal is the signal to send to gracefully kill this process. This - // value may be nil. - KillSignal os.Signal - - // KillTimeout is the amount of time to wait for the process to gracefully - // terminate before force-killing. - KillTimeout time.Duration - - // Splay is the maximum random amount of time to wait before sending signals. - // This option helps reduce the thundering herd problem by effectively - // sleeping for a random amount of time before sending the signal. This - // prevents multiple processes from all signaling at the same time. This value - // may be zero (which disables the splay entirely). - Splay time.Duration - - // Logger receives debug log lines about the process state and transitions - Logger hclog.Logger -} - -// New creates a new child process for management with high-level APIs for -// sending signals to the child process, restarting the child process, and -// gracefully terminating the child process. -func newChild(i NewInput) (*Child, error) { - // exec.Command prepends the command to be run to the arguments list, so - // we only need the arguments here, it will include the command itself. - label := fmt.Sprintf("(%v) %v", i.Cmd.Dir, strings.Join(i.Cmd.Args, " ")) - child := &Child{ - cmd: i.Cmd, - timeout: i.Timeout, - killSignal: i.KillSignal, - killTimeout: i.KillTimeout, - splay: i.Splay, - stopCh: make(chan struct{}, 1), - setpgid: true, - Label: label, - logger: i.Logger.Named(label), - } - - return child, nil -} - -// ExitCh returns the current exit channel for this child process. This channel -// may change if the process is restarted, so implementers must not cache this -// value. -func (c *Child) ExitCh() <-chan int { - c.RLock() - defer c.RUnlock() - return c.exitCh -} - -// Pid returns the pid of the child process. If no child process exists, 0 is -// returned. -func (c *Child) Pid() int { - c.RLock() - defer c.RUnlock() - return c.pid() -} - -// Command returns the human-formatted command with arguments. -func (c *Child) Command() string { - return c.Label -} - -// Start starts and begins execution of the child process. A buffered channel -// is returned which is where the command's exit code will be returned upon -// exit. Any errors that occur prior to starting the command will be returned -// as the second error argument, but any errors returned by the command after -// execution will be returned as a non-zero value over the exit code channel. -func (c *Child) Start() error { - // log.Printf("[INFO] (child) spawning: %s", c.Command()) - c.Lock() - defer c.Unlock() - return c.start() -} - -// Signal sends the signal to the child process, returning any errors that -// occur. -func (c *Child) Signal(s os.Signal) error { - c.logger.Debug("receiving signal %q", s.String()) - c.RLock() - defer c.RUnlock() - return c.signal(s) -} - -// Kill sends the kill signal to the child process and waits for successful -// termination. If no kill signal is defined, the process is killed with the -// most aggressive kill signal. If the process does not gracefully stop within -// the provided KillTimeout, the process is force-killed. If a splay was -// provided, this function will sleep for a random period of time between 0 and -// the provided splay value to reduce the thundering herd problem. This function -// does not return any errors because it guarantees the process will be dead by -// the return of the function call. -func (c *Child) Kill() { - c.logger.Debug("killing process") - c.Lock() - defer c.Unlock() - c.kill(false) -} - -// Stop behaves almost identical to Kill except it suppresses future processes -// from being started by this child and it prevents the killing of the child -// process from sending its value back up the exit channel. This is useful -// when doing a graceful shutdown of an application. -func (c *Child) Stop() { - c.internalStop(false) -} - -// StopImmediately behaves almost identical to Stop except it does not wait -// for any random splay if configured. This is used for performing a fast -// shutdown of consul-template and its children when a kill signal is received. -func (c *Child) StopImmediately() { - c.internalStop(true) -} - -func (c *Child) internalStop(immediately bool) { - c.Lock() - defer c.Unlock() - - c.stopLock.Lock() - defer c.stopLock.Unlock() - if c.stopped { - return - } - c.kill(immediately) - close(c.stopCh) - c.stopped = true -} - -func (c *Child) start() error { - setSetpgid(c.cmd, c.setpgid) - if err := c.cmd.Start(); err != nil { - return err - } - - // Create a new exitCh so that previously invoked commands (if any) don't - // cause us to exit, and start a goroutine to wait for that process to end. - exitCh := make(chan int, 1) - go func() { - var code int - // It's possible that kill is called before we even - // manage to get here. Make sure we still have a valid - // cmd before waiting on it. - c.RLock() - var cmd = c.cmd - c.RUnlock() - var err error - if cmd != nil { - err = cmd.Wait() - } - if err == nil { - code = ExitCodeOK - } else { - code = ExitCodeError - if exiterr, ok := err.(*exec.ExitError); ok { - if status, ok := exiterr.Sys().(syscall.WaitStatus); ok { - code = status.ExitStatus() - } - } - } - - // If the child is in the process of killing, do not send a response back - // down the exit channel. - c.stopLock.RLock() - defer c.stopLock.RUnlock() - if !c.stopped { - select { - case <-c.stopCh: - case exitCh <- code: - } - } - - close(exitCh) - }() - - c.exitCh = exitCh - - // If a timeout was given, start the timer to wait for the child to exit - if c.timeout != 0 { - select { - case code := <-exitCh: - if code != 0 { - return fmt.Errorf( - "command exited with a non-zero exit status:\n"+ - "\n"+ - " %s\n"+ - "\n"+ - "This is assumed to be a failure. Please ensure the command\n"+ - "exits with a zero exit status.", - c.Command(), - ) - } - case <-time.After(c.timeout): - // Force-kill the process - c.stopLock.Lock() - defer c.stopLock.Unlock() - if c.cmd != nil && c.cmd.Process != nil { - c.cmd.Process.Kill() - } - - return fmt.Errorf( - "command did not exit within %q:\n"+ - "\n"+ - " %s\n"+ - "\n"+ - "Commands must exit in a timely manner in order for processing to\n"+ - "continue. Consider using a process supervisor or utilizing the\n"+ - "built-in exec mode instead.", - c.timeout, - c.Command(), - ) - } - } - - return nil -} - -func (c *Child) pid() int { - if !c.running() { - return 0 - } - return c.cmd.Process.Pid -} - -func (c *Child) signal(s os.Signal) error { - if !c.running() { - return nil - } - - sig, ok := s.(syscall.Signal) - if !ok { - return fmt.Errorf("bad signal: %s", s) - } - pid := c.cmd.Process.Pid - if c.setpgid { - // kill takes negative pid to indicate that you want to use gpid - pid = -(pid) - } - // cross platform way to signal process/process group - p, err := os.FindProcess(pid) - if err != nil { - return err - } - return p.Signal(sig) -} - -// kill sends the signal to kill the process using the configured signal -// if set, else the default system signal -func (c *Child) kill(immediately bool) { - - if !c.running() { - c.logger.Debug("Kill() called but process dead; not waiting for splay.") - return - } else if immediately { - c.logger.Debug("Kill() called but performing immediate shutdown; not waiting for splay.") - } else { - c.logger.Debug("Kill(%v) called", immediately) - select { - case <-c.stopCh: - case <-c.randomSplay(): - } - } - - var exited bool - defer func() { - if !exited { - c.logger.Debug("PKill") - c.cmd.Process.Kill() - } - c.cmd = nil - }() - - if c.killSignal == nil { - return - } - - if err := c.signal(c.killSignal); err != nil { - c.logger.Debug("Kill failed: %s", err) - if processNotFoundErr(err) { - exited = true // checked in defer - } - return - } - - killCh := make(chan struct{}, 1) - go func() { - defer close(killCh) - c.cmd.Process.Wait() - }() - - select { - case <-c.stopCh: - case <-killCh: - exited = true - case <-time.After(c.killTimeout): - c.logger.Debug("timeout") - } -} - -func (c *Child) running() bool { - select { - case <-c.exitCh: - return false - default: - } - return c.cmd != nil && c.cmd.Process != nil -} - -func (c *Child) randomSplay() <-chan time.Time { - if c.splay == 0 { - return time.After(0) - } - - ns := c.splay.Nanoseconds() - offset := rand.Int63n(ns) - t := time.Duration(offset) - - c.logger.Debug("waiting %.2fs for random splay", t.Seconds()) - - return time.After(t) -} diff --git a/cli/internal/process/child_nix_test.go b/cli/internal/process/child_nix_test.go deleted file mode 100644 index 7311d187138a3..0000000000000 --- a/cli/internal/process/child_nix_test.go +++ /dev/null @@ -1,190 +0,0 @@ -//go:build !windows -// +build !windows - -package process - -/** - * Code in this file is based on the source code at - * https://github.com/hashicorp/consul-template/tree/3ea7d99ad8eff17897e0d63dac86d74770170bb8/child/child_test.go - * - * Tests in this file use signals or pgid features not available on windows - */ - -import ( - "os/exec" - "syscall" - "testing" - "time" - - "github.com/hashicorp/go-gatedio" -) - -func TestSignal(t *testing.T) { - - c := testChild(t) - cmd := exec.Command("sh", "-c", "trap 'echo one; exit' USR1; while true; do sleep 0.2; done") - c.cmd = cmd - - out := gatedio.NewByteBuffer() - c.cmd.Stdout = out - - if err := c.Start(); err != nil { - t.Fatal(err) - } - defer c.Stop() - - // For some reason bash doesn't start immediately - time.Sleep(fileWaitSleepDelay) - - if err := c.Signal(syscall.SIGUSR1); err != nil { - t.Fatal(err) - } - - // Give time for the file to flush - time.Sleep(fileWaitSleepDelay) - - expected := "one\n" - if out.String() != expected { - t.Errorf("expected %q to be %q", out.String(), expected) - } -} - -func TestStop_childAlreadyDead(t *testing.T) { - c := testChild(t) - c.cmd = exec.Command("sh", "-c", "exit 1") - c.splay = 100 * time.Second - c.killSignal = syscall.SIGTERM - - if err := c.Start(); err != nil { - t.Fatal(err) - } - - // For some reason bash doesn't start immediately - time.Sleep(fileWaitSleepDelay) - - killStartTime := time.Now() - c.Stop() - killEndTime := time.Now() - - if killEndTime.Sub(killStartTime) > fileWaitSleepDelay { - t.Error("expected not to wait for splay") - } -} - -func TestSignal_noProcess(t *testing.T) { - - c := testChild(t) - if err := c.Signal(syscall.SIGUSR1); err != nil { - // Just assert there is no error - t.Fatal(err) - } -} - -func TestKill_signal(t *testing.T) { - - c := testChild(t) - cmd := exec.Command("sh", "-c", "trap 'echo one; exit' USR1; while true; do sleep 0.2; done") - c.killSignal = syscall.SIGUSR1 - - out := gatedio.NewByteBuffer() - cmd.Stdout = out - c.cmd = cmd - - if err := c.Start(); err != nil { - t.Fatal(err) - } - defer c.Stop() - - // For some reason bash doesn't start immediately - time.Sleep(fileWaitSleepDelay) - - c.Kill() - - // Give time for the file to flush - time.Sleep(fileWaitSleepDelay) - - expected := "one\n" - if out.String() != expected { - t.Errorf("expected %q to be %q", out.String(), expected) - } -} - -func TestKill_noProcess(t *testing.T) { - c := testChild(t) - c.killSignal = syscall.SIGUSR1 - c.Kill() -} - -func TestStop_noWaitForSplay(t *testing.T) { - c := testChild(t) - c.cmd = exec.Command("sh", "-c", "trap 'echo one; exit' USR1; while true; do sleep 0.2; done") - c.splay = 100 * time.Second - c.killSignal = syscall.SIGUSR1 - - out := gatedio.NewByteBuffer() - c.cmd.Stdout = out - - if err := c.Start(); err != nil { - t.Fatal(err) - } - - // For some reason bash doesn't start immediately - time.Sleep(fileWaitSleepDelay) - - killStartTime := time.Now() - c.StopImmediately() - killEndTime := time.Now() - - expected := "one\n" - if out.String() != expected { - t.Errorf("expected %q to be %q", out.String(), expected) - } - - if killEndTime.Sub(killStartTime) > fileWaitSleepDelay { - t.Error("expected not to wait for splay") - } -} - -func TestSetpgid(t *testing.T) { - t.Run("true", func(t *testing.T) { - c := testChild(t) - c.cmd = exec.Command("sh", "-c", "while true; do sleep 0.2; done") - // default, but to be explicit for the test - c.setpgid = true - - if err := c.Start(); err != nil { - t.Fatal(err) - } - defer c.Stop() - - // when setpgid is true, the pid and gpid should be the same - gpid, err := syscall.Getpgid(c.Pid()) - if err != nil { - t.Fatal("Getpgid error:", err) - } - - if c.Pid() != gpid { - t.Fatal("pid and gpid should match") - } - }) - t.Run("false", func(t *testing.T) { - c := testChild(t) - c.cmd = exec.Command("sh", "-c", "while true; do sleep 0.2; done") - c.setpgid = false - - if err := c.Start(); err != nil { - t.Fatal(err) - } - defer c.Stop() - - // when setpgid is true, the pid and gpid should be the same - gpid, err := syscall.Getpgid(c.Pid()) - if err != nil { - t.Fatal("Getpgid error:", err) - } - - if c.Pid() == gpid { - t.Fatal("pid and gpid should NOT match") - } - }) -} diff --git a/cli/internal/process/child_test.go b/cli/internal/process/child_test.go deleted file mode 100644 index 63dee22c08554..0000000000000 --- a/cli/internal/process/child_test.go +++ /dev/null @@ -1,193 +0,0 @@ -package process - -/** - * Code in this file is based on the source code at - * https://github.com/hashicorp/consul-template/tree/3ea7d99ad8eff17897e0d63dac86d74770170bb8/child/child_test.go - * - * Major changes include supporting api changes in child.go and removing - * tests for reloading, which was removed in child.go - */ - -import ( - "io/ioutil" - "os" - "os/exec" - "strings" - "testing" - "time" - - "github.com/hashicorp/go-gatedio" - "github.com/hashicorp/go-hclog" -) - -const fileWaitSleepDelay = 150 * time.Millisecond - -func testChild(t *testing.T) *Child { - cmd := exec.Command("echo", "hello", "world") - cmd.Stdout = ioutil.Discard - cmd.Stderr = ioutil.Discard - c, err := newChild(NewInput{ - Cmd: cmd, - KillSignal: os.Kill, - KillTimeout: 2 * time.Second, - Splay: 0 * time.Second, - Logger: hclog.Default(), - }) - if err != nil { - t.Fatal(err) - } - return c -} - -func TestNew(t *testing.T) { - - stdin := gatedio.NewByteBuffer() - stdout := gatedio.NewByteBuffer() - stderr := gatedio.NewByteBuffer() - command := "echo" - args := []string{"hello", "world"} - env := []string{"a=b", "c=d"} - killSignal := os.Kill - killTimeout := fileWaitSleepDelay - splay := fileWaitSleepDelay - - cmd := exec.Command(command, args...) - cmd.Stdin = stdin - cmd.Stderr = stderr - cmd.Stdout = stdout - cmd.Env = env - c, err := newChild(NewInput{ - Cmd: cmd, - KillSignal: killSignal, - KillTimeout: killTimeout, - Splay: splay, - Logger: hclog.Default(), - }) - if err != nil { - t.Fatal(err) - } - - if c.killSignal != killSignal { - t.Errorf("expected %q to be %q", c.killSignal, killSignal) - } - - if c.killTimeout != killTimeout { - t.Errorf("expected %q to be %q", c.killTimeout, killTimeout) - } - - if c.splay != splay { - t.Errorf("expected %q to be %q", c.splay, splay) - } - - if c.stopCh == nil { - t.Errorf("expected %#v to be", c.stopCh) - } -} - -func TestExitCh_noProcess(t *testing.T) { - - c := testChild(t) - ch := c.ExitCh() - if ch != nil { - t.Errorf("expected %#v to be nil", ch) - } -} - -func TestExitCh(t *testing.T) { - - c := testChild(t) - if err := c.Start(); err != nil { - t.Fatal(err) - } - println("Started") - defer c.Stop() - - ch := c.ExitCh() - if ch == nil { - t.Error("expected ch to exist") - } -} - -func TestPid_noProcess(t *testing.T) { - - c := testChild(t) - pid := c.Pid() - if pid != 0 { - t.Errorf("expected %q to be 0", pid) - } -} - -func TestPid(t *testing.T) { - - c := testChild(t) - if err := c.Start(); err != nil { - t.Fatal(err) - } - defer c.Stop() - - pid := c.Pid() - if pid == 0 { - t.Error("expected pid to not be 0") - } -} - -func TestStart(t *testing.T) { - - c := testChild(t) - - // Set our own reader and writer so we can verify they are wired to the child. - stdin := gatedio.NewByteBuffer() - stdout := gatedio.NewByteBuffer() - stderr := gatedio.NewByteBuffer() - // Custom env and command - env := []string{"a=b", "c=d"} - cmd := exec.Command("env") - cmd.Stdin = stdin - cmd.Stdout = stdout - cmd.Stderr = stderr - cmd.Env = env - c.cmd = cmd - - if err := c.Start(); err != nil { - t.Fatal(err) - } - defer c.Stop() - - select { - case <-c.ExitCh(): - case <-time.After(fileWaitSleepDelay): - t.Fatal("process should have exited") - } - - output := stdout.String() - for _, envVar := range env { - if !strings.Contains(output, envVar) { - t.Errorf("expected to find %q in %q", envVar, output) - } - } -} - -func TestKill_noSignal(t *testing.T) { - - c := testChild(t) - c.cmd = exec.Command("sh", "-c", "while true; do sleep 0.2; done") - c.killTimeout = 20 * time.Millisecond - c.killSignal = nil - - if err := c.Start(); err != nil { - t.Fatal(err) - } - defer c.Stop() - - // For some reason bash doesn't start immediately - time.Sleep(fileWaitSleepDelay) - - c.Kill() - - // Give time for the file to flush - time.Sleep(fileWaitSleepDelay) - - if c.cmd != nil { - t.Errorf("expected cmd to be nil") - } -} diff --git a/cli/internal/process/manager.go b/cli/internal/process/manager.go deleted file mode 100644 index 0488a2980a0ec..0000000000000 --- a/cli/internal/process/manager.go +++ /dev/null @@ -1,120 +0,0 @@ -package process - -import ( - "errors" - "fmt" - "os" - "os/exec" - "sync" - "time" - - "github.com/hashicorp/go-hclog" -) - -// ErrClosing is returned when the process manager is in the process of closing, -// meaning that no more child processes can be Exec'd, and existing, non-failed -// child processes will be stopped with this error. -var ErrClosing = errors.New("process manager is already closing") - -// ChildExit is returned when a child process exits with a non-zero exit code -type ChildExit struct { - ExitCode int - Command string -} - -func (ce *ChildExit) Error() string { - return fmt.Sprintf("command %s exited (%d)", ce.Command, ce.ExitCode) -} - -// Manager tracks all of the child processes that have been spawned -type Manager struct { - done bool - children map[*Child]struct{} - mu sync.Mutex - doneCh chan struct{} - logger hclog.Logger -} - -// NewManager creates a new properly-initialized Manager instance -func NewManager(logger hclog.Logger) *Manager { - return &Manager{ - children: make(map[*Child]struct{}), - doneCh: make(chan struct{}), - logger: logger, - } -} - -// Exec spawns a child process to run the given command, then blocks -// until it completes. Returns a nil error if the child process finished -// successfully, ErrClosing if the manager closed during execution, and -// a ChildExit error if the child process exited with a non-zero exit code. -func (m *Manager) Exec(cmd *exec.Cmd) error { - m.mu.Lock() - if m.done { - m.mu.Unlock() - return ErrClosing - } - - child, err := newChild(NewInput{ - Cmd: cmd, - // Run forever by default - Timeout: 0, - // When it's time to exit, give a 10 second timeout - KillTimeout: 10 * time.Second, - // Send SIGINT to stop children - KillSignal: os.Interrupt, - Logger: m.logger, - }) - if err != nil { - return err - } - - m.children[child] = struct{}{} - m.mu.Unlock() - err = child.Start() - if err != nil { - m.mu.Lock() - delete(m.children, child) - m.mu.Unlock() - return err - } - err = nil - exitCode, ok := <-child.ExitCh() - if !ok { - err = ErrClosing - } else if exitCode != ExitCodeOK { - err = &ChildExit{ - ExitCode: exitCode, - Command: child.Command(), - } - } - - m.mu.Lock() - delete(m.children, child) - m.mu.Unlock() - return err -} - -// Close sends SIGINT to all child processes if it hasn't been done yet, -// and in either case blocks until they all exit or timeout -func (m *Manager) Close() { - m.mu.Lock() - if m.done { - m.mu.Unlock() - <-m.doneCh - return - } - wg := sync.WaitGroup{} - m.done = true - for child := range m.children { - child := child - wg.Add(1) - go func() { - child.Stop() - wg.Done() - }() - } - m.mu.Unlock() - wg.Wait() - close(m.doneCh) -} diff --git a/cli/internal/process/manager_test.go b/cli/internal/process/manager_test.go deleted file mode 100644 index fb40ffaca8f0b..0000000000000 --- a/cli/internal/process/manager_test.go +++ /dev/null @@ -1,94 +0,0 @@ -package process - -import ( - "errors" - "os/exec" - "sync" - "testing" - "time" - - "github.com/hashicorp/go-gatedio" - "github.com/hashicorp/go-hclog" -) - -func newManager() *Manager { - return NewManager(hclog.Default()) -} - -func TestExec_simple(t *testing.T) { - mgr := newManager() - - out := gatedio.NewByteBuffer() - cmd := exec.Command("env") - cmd.Stdout = out - - err := mgr.Exec(cmd) - if err != nil { - t.Errorf("expected %q to be nil", err) - } - - output := out.String() - if output == "" { - t.Error("expected output from running 'env', got empty string") - } -} - -func TestClose(t *testing.T) { - mgr := newManager() - - wg := sync.WaitGroup{} - tasks := 4 - errors := make([]error, tasks) - start := time.Now() - for i := 0; i < tasks; i++ { - wg.Add(1) - go func(index int) { - cmd := exec.Command("sleep", "0.5") - err := mgr.Exec(cmd) - if err != nil { - errors[index] = err - } - wg.Done() - }(i) - } - // let processes kick off - time.Sleep(50 * time.Millisecond) - mgr.Close() - end := time.Now() - wg.Wait() - duration := end.Sub(start) - if duration >= 500*time.Millisecond { - t.Errorf("expected to close, total time was %q", duration) - } - for _, err := range errors { - if err != ErrClosing { - t.Errorf("expected manager closing error, found %q", err) - } - } -} - -func TestClose_alreadyClosed(t *testing.T) { - mgr := newManager() - mgr.Close() - - // repeated closing does not error - mgr.Close() - - err := mgr.Exec(exec.Command("sleep", "1")) - if err != ErrClosing { - t.Errorf("expected manager closing error, found %q", err) - } -} - -func TestExitCode(t *testing.T) { - mgr := newManager() - - err := mgr.Exec(exec.Command("ls", "doesnotexist")) - exitErr := &ChildExit{} - if !errors.As(err, &exitErr) { - t.Errorf("expected a ChildExit err, got %q", err) - } - if exitErr.ExitCode == 0 { - t.Error("expected non-zero exit code , got 0") - } -} diff --git a/cli/internal/process/sys_nix.go b/cli/internal/process/sys_nix.go deleted file mode 100644 index 0e6c003f4bc6b..0000000000000 --- a/cli/internal/process/sys_nix.go +++ /dev/null @@ -1,23 +0,0 @@ -//go:build !windows -// +build !windows - -package process - -/** - * Code in this file is based on the source code at - * https://github.com/hashicorp/consul-template/tree/3ea7d99ad8eff17897e0d63dac86d74770170bb8/child/sys_nix.go - */ - -import ( - "os/exec" - "syscall" -) - -func setSetpgid(cmd *exec.Cmd, value bool) { - cmd.SysProcAttr = &syscall.SysProcAttr{Setpgid: value} -} - -func processNotFoundErr(err error) bool { - // ESRCH == no such process, ie. already exited - return err == syscall.ESRCH -} diff --git a/cli/internal/process/sys_windows.go b/cli/internal/process/sys_windows.go deleted file mode 100644 index c626c22f19579..0000000000000 --- a/cli/internal/process/sys_windows.go +++ /dev/null @@ -1,17 +0,0 @@ -//go:build windows -// +build windows - -package process - -/** - * Code in this file is based on the source code at - * https://github.com/hashicorp/consul-template/tree/3ea7d99ad8eff17897e0d63dac86d74770170bb8/child/sys_windows.go - */ - -import "os/exec" - -func setSetpgid(cmd *exec.Cmd, value bool) {} - -func processNotFoundErr(err error) bool { - return false -} diff --git a/cli/internal/run/dry_run.go b/cli/internal/run/dry_run.go deleted file mode 100644 index 552bdb82b53d0..0000000000000 --- a/cli/internal/run/dry_run.go +++ /dev/null @@ -1,119 +0,0 @@ -// Package run implements `turbo run` -// This file implements the logic for `turbo run --dry` -package run - -import ( - gocontext "context" - "sync" - - "github.com/pkg/errors" - "github.com/vercel/turbo/cli/internal/cache" - "github.com/vercel/turbo/cli/internal/cmdutil" - "github.com/vercel/turbo/cli/internal/core" - "github.com/vercel/turbo/cli/internal/env" - "github.com/vercel/turbo/cli/internal/graph" - "github.com/vercel/turbo/cli/internal/nodes" - "github.com/vercel/turbo/cli/internal/runsummary" - "github.com/vercel/turbo/cli/internal/taskhash" - "github.com/vercel/turbo/cli/internal/util" -) - -// DryRun gets all the info needed from tasks and prints out a summary, but doesn't actually -// execute the task. -func DryRun( - ctx gocontext.Context, - g *graph.CompleteGraph, - rs *runSpec, - engine *core.Engine, - _ *taskhash.Tracker, // unused, but keep here for parity with RealRun method signature - turboCache cache.Cache, - globalEnvMode util.EnvMode, - _ env.EnvironmentVariableMap, - _ env.EnvironmentVariableMap, - base *cmdutil.CmdBase, - summary runsummary.Meta, -) error { - defer turboCache.Shutdown() - - taskSummaries := []*runsummary.TaskSummary{} - - mu := sync.Mutex{} - execFunc := func(ctx gocontext.Context, packageTask *nodes.PackageTask, taskSummary *runsummary.TaskSummary) error { - // Assign some fallbacks if they were missing - if taskSummary.Command == "" { - taskSummary.Command = runsummary.MissingTaskLabel - } - - // This mutex is not _really_ required, since we are using Concurrency: 1 as an execution - // option, but we add it here to match the shape of RealRuns execFunc. - mu.Lock() - defer mu.Unlock() - taskSummaries = append(taskSummaries, taskSummary) - return nil - } - - // This setup mirrors a real run. We call engine.execute() with - // a visitor function and some hardcoded execOpts. - // Note: we do not currently attempt to parallelize the graph walking - // (as we do in real execution) - getArgs := func(taskID string) []string { - return rs.ArgsForTask(taskID) - } - - visitorFn := g.GetPackageTaskVisitor(ctx, engine.TaskGraph, rs.Opts.runOpts.FrameworkInference, globalEnvMode, getArgs, base.Logger, execFunc) - execOpts := core.EngineExecutionOptions{ - Concurrency: 1, - Parallel: false, - } - - if errs := engine.Execute(visitorFn, execOpts); len(errs) > 0 { - for _, err := range errs { - base.UI.Error(err.Error()) - } - return errors.New("errors occurred during dry-run graph traversal") - } - - // We walk the graph with no concurrency. - // Populating the cache state is parallelizable. - // Do this _after_ walking the graph. - populateCacheState(turboCache, taskSummaries) - - // Assign the Task Summaries to the main summary - summary.RunSummary.Tasks = taskSummaries - - // The exitCode isn't really used by the Run Summary Close() method for dry runs - // but we pass in a successful value to match Real Runs. - return summary.Close(ctx, 0, g.WorkspaceInfos, base.UI) -} - -func populateCacheState(turboCache cache.Cache, taskSummaries []*runsummary.TaskSummary) { - // We make at most 8 requests at a time for cache state. - maxParallelRequests := 8 - taskCount := len(taskSummaries) - - parallelRequestCount := maxParallelRequests - if taskCount < maxParallelRequests { - parallelRequestCount = taskCount - } - - queue := make(chan int, taskCount) - - wg := &sync.WaitGroup{} - for i := 0; i < parallelRequestCount; i++ { - wg.Add(1) - go func() { - defer wg.Done() - for index := range queue { - task := taskSummaries[index] - itemStatus := turboCache.Exists(task.Hash) - task.CacheSummary = runsummary.NewTaskCacheSummary(itemStatus) - } - }() - } - - for index := range taskSummaries { - queue <- index - } - close(queue) - wg.Wait() -} diff --git a/cli/internal/run/global_hash.go b/cli/internal/run/global_hash.go deleted file mode 100644 index c2c7739b6443b..0000000000000 --- a/cli/internal/run/global_hash.go +++ /dev/null @@ -1,174 +0,0 @@ -package run - -import ( - "fmt" - "path/filepath" - - "github.com/hashicorp/go-hclog" - "github.com/vercel/turbo/cli/internal/env" - "github.com/vercel/turbo/cli/internal/fs" - "github.com/vercel/turbo/cli/internal/fs/hash" - "github.com/vercel/turbo/cli/internal/globby" - "github.com/vercel/turbo/cli/internal/hashing" - "github.com/vercel/turbo/cli/internal/lockfile" - "github.com/vercel/turbo/cli/internal/packagemanager" - "github.com/vercel/turbo/cli/internal/turbopath" - "github.com/vercel/turbo/cli/internal/util" -) - -const _globalCacheKey = "HEY STELLLLLLLAAAAAAAAAAAAA" - -// Variables that we always include -var _defaultEnvVars = []string{ - "VERCEL_ANALYTICS_ID", -} - -// GlobalHashableInputs represents all the things that we use to create the global hash -type GlobalHashableInputs struct { - globalCacheKey string - globalFileHashMap map[turbopath.AnchoredUnixPath]string - rootExternalDepsHash string - env []string - resolvedEnvVars env.DetailedMap - passThroughEnv []string - envMode util.EnvMode - frameworkInference bool - dotEnv turbopath.AnchoredUnixPathArray -} - -// calculateGlobalHash is a transformation of GlobalHashableInputs. -// It's used for the situations where we have an `EnvMode` specified -// as that is not compatible with existing global hashes. -func calculateGlobalHash(full GlobalHashableInputs) (string, error) { - return fs.HashGlobal(hash.GlobalHashable{ - GlobalCacheKey: full.globalCacheKey, - GlobalFileHashMap: full.globalFileHashMap, - RootExternalDepsHash: full.rootExternalDepsHash, - Env: full.env, - ResolvedEnvVars: full.resolvedEnvVars.All.ToHashable(), - PassThroughEnv: full.passThroughEnv, - EnvMode: full.envMode, - FrameworkInference: full.frameworkInference, - DotEnv: full.dotEnv, - }) -} - -// calculateGlobalHashFromHashableInputs returns a hash string from the GlobalHashableInputs -func calculateGlobalHashFromHashableInputs(full GlobalHashableInputs) (string, error) { - switch full.envMode { - case util.Infer: - if full.passThroughEnv != nil { - // In infer mode, if there is any passThru config (even if it is an empty array) - // we'll hash the whole object, so we can detect changes to that config - // Further, resolve the envMode to the concrete value. - full.envMode = util.Strict - } - - return calculateGlobalHash(full) - case util.Loose: - // Remove the passthroughs from hash consideration if we're explicitly loose. - full.passThroughEnv = nil - return calculateGlobalHash(full) - case util.Strict: - // Collapse `nil` and `[]` in strict mode. - if full.passThroughEnv == nil { - full.passThroughEnv = make([]string, 0) - } - return calculateGlobalHash(full) - default: - panic("unimplemented environment mode") - } -} - -func getGlobalHashInputs( - logger hclog.Logger, - rootpath turbopath.AbsoluteSystemPath, - rootPackageJSON *fs.PackageJSON, - packageManager *packagemanager.PackageManager, - lockFile lockfile.Lockfile, - globalFileDependencies []string, - envAtExecutionStart env.EnvironmentVariableMap, - globalEnv []string, - globalPassThroughEnv []string, - envMode util.EnvMode, - frameworkInference bool, - dotEnv turbopath.AnchoredUnixPathArray, -) (GlobalHashableInputs, error) { - globalHashableEnvVars, err := getGlobalHashableEnvVars(envAtExecutionStart, globalEnv) - if err != nil { - return GlobalHashableInputs{}, err - } - - logger.Debug("global hash env vars", "vars", globalHashableEnvVars.All.Names()) - - // Calculate global file dependencies - globalDeps := make(util.Set) - if len(globalFileDependencies) > 0 { - ignores, err := packageManager.GetWorkspaceIgnores(rootpath) - if err != nil { - return GlobalHashableInputs{}, err - } - - f, err := globby.GlobFiles(rootpath.ToStringDuringMigration(), globalFileDependencies, ignores) - if err != nil { - return GlobalHashableInputs{}, err - } - - for _, val := range f { - globalDeps.Add(val) - } - } - - if lockFile == nil { - // If we don't have lockfile information available, add the specfile and lockfile to global deps - globalDeps.Add(filepath.Join(rootpath.ToStringDuringMigration(), packageManager.Specfile)) - lockfilePath := packageManager.GetLockfilePath(rootpath) - if lockfilePath.Exists() { - globalDeps.Add(lockfilePath.ToString()) - } - } - - // No prefix, global deps already have full paths - globalDepsArray := globalDeps.UnsafeListOfStrings() - globalDepsPaths := make([]turbopath.AnchoredSystemPath, len(globalDepsArray)) - for i, path := range globalDepsArray { - fullyQualifiedPath := turbopath.AbsoluteSystemPathFromUpstream(path) - anchoredPath, err := fullyQualifiedPath.RelativeTo(rootpath) - if err != nil { - return GlobalHashableInputs{}, err - } - - globalDepsPaths[i] = anchoredPath - } - - globalFileHashMap, err := hashing.GetHashesForFiles(rootpath, globalDepsPaths) - if err != nil { - return GlobalHashableInputs{}, fmt.Errorf("error hashing files: %w", err) - } - - // Make sure we include specified .env files in the file hash. - // Handled separately because these are not globs! - if len(dotEnv) > 0 { - dotEnvObject, err := hashing.GetHashesForExistingFiles(rootpath, dotEnv.ToSystemPathArray()) - if err != nil { - return GlobalHashableInputs{}, fmt.Errorf("error hashing files: %w", err) - } - - // Add the dotEnv files into the file hash object. - for key, value := range dotEnvObject { - globalFileHashMap[key] = value - } - } - - return GlobalHashableInputs{ - globalCacheKey: _globalCacheKey, - globalFileHashMap: globalFileHashMap, - rootExternalDepsHash: rootPackageJSON.ExternalDepsHash, - env: globalEnv, - resolvedEnvVars: globalHashableEnvVars, - passThroughEnv: globalPassThroughEnv, - envMode: envMode, - frameworkInference: frameworkInference, - dotEnv: dotEnv, - }, nil -} diff --git a/cli/internal/run/global_hash_go.go b/cli/internal/run/global_hash_go.go deleted file mode 100644 index f5fe6f035112f..0000000000000 --- a/cli/internal/run/global_hash_go.go +++ /dev/null @@ -1,42 +0,0 @@ -//go:build go || !rust -// +build go !rust - -package run - -import "github.com/vercel/turbo/cli/internal/env" - -// `getGlobalHashableEnvVars` calculates env var dependencies -func getGlobalHashableEnvVars(envAtExecutionStart env.EnvironmentVariableMap, globalEnv []string) (env.DetailedMap, error) { - // Our "inferred" env var maps - defaultEnvVarMap, err := envAtExecutionStart.FromWildcards(_defaultEnvVars) - if err != nil { - return env.DetailedMap{}, err - } - userEnvVarSet, err := envAtExecutionStart.FromWildcardsUnresolved(globalEnv) - if err != nil { - return env.DetailedMap{}, err - } - - allEnvVarMap := env.EnvironmentVariableMap{} - allEnvVarMap.Union(userEnvVarSet.Inclusions) - allEnvVarMap.Union(defaultEnvVarMap) - allEnvVarMap.Difference(userEnvVarSet.Exclusions) - - explicitEnvVarMap := env.EnvironmentVariableMap{} - explicitEnvVarMap.Union(userEnvVarSet.Inclusions) - explicitEnvVarMap.Difference(userEnvVarSet.Exclusions) - - matchingEnvVarMap := env.EnvironmentVariableMap{} - matchingEnvVarMap.Union(defaultEnvVarMap) - matchingEnvVarMap.Difference(userEnvVarSet.Exclusions) - - globalHashableEnvVars := env.DetailedMap{ - All: allEnvVarMap, - BySource: env.BySource{ - Explicit: explicitEnvVarMap, - Matching: matchingEnvVarMap, - }, - } - - return globalHashableEnvVars, nil -} diff --git a/cli/internal/run/global_hash_rust.go b/cli/internal/run/global_hash_rust.go deleted file mode 100644 index fc9b2c1d36404..0000000000000 --- a/cli/internal/run/global_hash_rust.go +++ /dev/null @@ -1,38 +0,0 @@ -//go:build rust -// +build rust - -package run - -import ( - "github.com/vercel/turbo/cli/internal/env" - - "github.com/vercel/turbo/cli/internal/ffi" -) - -// `getGlobalHashableEnvVars` calculates env var dependencies -func getGlobalHashableEnvVars(envAtExecutionStart env.EnvironmentVariableMap, globalEnv []string) (env.DetailedMap, error) { - respDetailedMap, err := ffi.GetGlobalHashableEnvVars(envAtExecutionStart, globalEnv) - if err != nil { - return env.DetailedMap{}, err - } - - // We set explicit and matching to empty maps if they are nil - // to preserve existing behavior from the Go code - explicit := respDetailedMap.GetBySource().GetExplicit() - if explicit == nil { - explicit = make(map[string]string) - } - - matching := respDetailedMap.GetBySource().GetMatching() - if matching == nil { - matching = make(map[string]string) - } - detailedMap := env.DetailedMap{ - All: respDetailedMap.GetAll(), - BySource: env.BySource{ - Explicit: explicit, - Matching: matching, - }, - } - return detailedMap, nil -} diff --git a/cli/internal/run/global_hash_test.go b/cli/internal/run/global_hash_test.go deleted file mode 100644 index c1841a4e4baef..0000000000000 --- a/cli/internal/run/global_hash_test.go +++ /dev/null @@ -1,75 +0,0 @@ -package run - -import ( - "testing" - - "github.com/stretchr/testify/assert" - "github.com/vercel/turbo/cli/internal/env" -) - -func TestGetGlobalHashableEnvVars(t *testing.T) { - testCases := []struct { - name string - envAtExecutionStart env.EnvironmentVariableMap - globalEnv []string - expectedMap env.DetailedMap - }{ - { - name: "has default env var", - envAtExecutionStart: env.EnvironmentVariableMap{ - "VERCEL_ANALYTICS_ID": "123", - }, - globalEnv: []string{}, - expectedMap: env.DetailedMap{ - All: map[string]string{ - "VERCEL_ANALYTICS_ID": "123", - }, - BySource: env.BySource{ - Matching: map[string]string{ - "VERCEL_ANALYTICS_ID": "123", - }, - Explicit: map[string]string{}, - }, - }, - }, - { - name: "has global env wildcard", - envAtExecutionStart: env.EnvironmentVariableMap{ - "FOO_BAR": "123", - }, - globalEnv: []string{"FOO*"}, - expectedMap: env.DetailedMap{ - All: map[string]string{ - "FOO_BAR": "123", - }, - BySource: env.BySource{ - Matching: map[string]string{}, - Explicit: map[string]string{ - "FOO_BAR": "123", - }, - }, - }, - }, - { - name: "has global env wildcard but also excluded", - envAtExecutionStart: env.EnvironmentVariableMap{ - "FOO_BAR": "123", - }, - globalEnv: []string{"FOO*", "!FOO_BAR"}, - expectedMap: env.DetailedMap{ - BySource: env.BySource{ - Matching: map[string]string{}, - Explicit: map[string]string{}, - }, - }, - }, - } - - for _, testCase := range testCases { - t.Run(testCase.name, func(t *testing.T) { - result, err := getGlobalHashableEnvVars(testCase.envAtExecutionStart, testCase.globalEnv) - assert.NoError(t, err) - assert.Equal(t, testCase.expectedMap, result) - }) - } -} diff --git a/cli/internal/run/graph_run.go b/cli/internal/run/graph_run.go deleted file mode 100644 index 85317184d0800..0000000000000 --- a/cli/internal/run/graph_run.go +++ /dev/null @@ -1,46 +0,0 @@ -package run - -import ( - gocontext "context" - - "github.com/pyr-sh/dag" - "github.com/vercel/turbo/cli/internal/cmdutil" - "github.com/vercel/turbo/cli/internal/core" - "github.com/vercel/turbo/cli/internal/graphvisualizer" - "github.com/vercel/turbo/cli/internal/util" -) - -// GraphRun generates a visualization of the task graph rather than executing it. -func GraphRun(ctx gocontext.Context, rs *runSpec, engine *core.Engine, base *cmdutil.CmdBase) error { - graph := engine.TaskGraph - if rs.Opts.runOpts.SinglePackage { - graph = filterSinglePackageGraphForDisplay(engine.TaskGraph) - } - visualizer := graphvisualizer.New(base.RepoRoot, base.UI, graph) - - if rs.Opts.runOpts.GraphDot { - visualizer.RenderDotGraph() - } else { - err := visualizer.GenerateGraphFile(rs.Opts.runOpts.GraphFile) - if err != nil { - return err - } - } - return nil -} - -// filterSinglePackageGraphForDisplay builds an equivalent graph with package names stripped from tasks. -// Given that this should only be used in a single-package context, all of the package names are expected -// to be //. Also, all nodes are always connected to the root node, so we are not concerned with leaving -// behind any unconnected nodes. -func filterSinglePackageGraphForDisplay(originalGraph *dag.AcyclicGraph) *dag.AcyclicGraph { - graph := &dag.AcyclicGraph{} - for _, edge := range originalGraph.Edges() { - src := util.StripPackageName(edge.Source().(string)) - tgt := util.StripPackageName(edge.Target().(string)) - graph.Add(src) - graph.Add(tgt) - graph.Connect(dag.BasicEdge(src, tgt)) - } - return graph -} diff --git a/cli/internal/run/log_tag_go.go b/cli/internal/run/log_tag_go.go deleted file mode 100644 index a3e825f37df7d..0000000000000 --- a/cli/internal/run/log_tag_go.go +++ /dev/null @@ -1,11 +0,0 @@ -//go:build go || !rust -// +build go !rust - -package run - -import "github.com/hashicorp/go-hclog" - -// LogTag logs out the build tag (in this case "go") for the current build. -func LogTag(logger hclog.Logger) { - logger.Debug("build tag: go") -} diff --git a/cli/internal/run/log_tag_rust.go b/cli/internal/run/log_tag_rust.go deleted file mode 100644 index 065f438c1ac0a..0000000000000 --- a/cli/internal/run/log_tag_rust.go +++ /dev/null @@ -1,11 +0,0 @@ -//go:build rust -// +build rust - -package run - -import "github.com/hashicorp/go-hclog" - -// LogTag logs out the build tag (in this case "rust") for the current build. -func LogTag(logger hclog.Logger) { - logger.Debug("build tag: rust") -} diff --git a/cli/internal/run/real_run.go b/cli/internal/run/real_run.go deleted file mode 100644 index 62962d22fd872..0000000000000 --- a/cli/internal/run/real_run.go +++ /dev/null @@ -1,619 +0,0 @@ -package run - -import ( - "bytes" - gocontext "context" - "fmt" - "io" - "log" - "os" - "os/exec" - "strings" - "sync" - "time" - - "github.com/fatih/color" - "github.com/hashicorp/go-hclog" - "github.com/mitchellh/cli" - "github.com/pkg/errors" - "github.com/vercel/turbo/cli/internal/cache" - "github.com/vercel/turbo/cli/internal/cmdutil" - "github.com/vercel/turbo/cli/internal/colorcache" - "github.com/vercel/turbo/cli/internal/core" - "github.com/vercel/turbo/cli/internal/env" - "github.com/vercel/turbo/cli/internal/graph" - "github.com/vercel/turbo/cli/internal/logstreamer" - "github.com/vercel/turbo/cli/internal/nodes" - "github.com/vercel/turbo/cli/internal/packagemanager" - "github.com/vercel/turbo/cli/internal/process" - "github.com/vercel/turbo/cli/internal/runcache" - "github.com/vercel/turbo/cli/internal/runsummary" - "github.com/vercel/turbo/cli/internal/spinner" - "github.com/vercel/turbo/cli/internal/taskhash" - "github.com/vercel/turbo/cli/internal/turbopath" - "github.com/vercel/turbo/cli/internal/turbostate" - "github.com/vercel/turbo/cli/internal/ui" - "github.com/vercel/turbo/cli/internal/util" -) - -// threadsafeOutputBuffer implements io.Writer for multiple goroutines -// to write to the same underlying buffer. Child processes use separate -// goroutines to handle reading from stdout and stderr, but for now we -// send both to the same buffer. -type threadsafeOutputBuffer struct { - buf bytes.Buffer - mu sync.Mutex -} - -func (tsob *threadsafeOutputBuffer) Write(p []byte) (n int, err error) { - tsob.mu.Lock() - defer tsob.mu.Unlock() - return tsob.buf.Write(p) -} - -func (tsob *threadsafeOutputBuffer) Bytes() []byte { - return tsob.buf.Bytes() -} - -type logLine struct { - isStdout bool - line []byte -} - -// logBuffer holds the log lines for a task, tagged to stdout or stderr -type logBuffer struct { - mu sync.Mutex - lines []logLine -} - -// LogLine appends a log line to the log buffer -func (lb *logBuffer) LogLine(line []byte, isStdout bool) { - lb.mu.Lock() - defer lb.mu.Unlock() - lb.lines = append(lb.lines, logLine{isStdout, line}) -} - -// Drain writes the contents of the logBuffer to the appropriate output stream -func (lb *logBuffer) Drain(stdout io.Writer, stderr io.Writer) error { - for _, line := range lb.lines { - if line.isStdout { - if _, err := stdout.Write(line.line); err != nil { - return err - } - } else { - if _, err := stderr.Write(line.line); err != nil { - return err - } - } - } - return nil -} - -// StdoutWriter returns a writer tagged to stdout -func (lb *logBuffer) StdoutWriter() *logBufferWriter { - return &logBufferWriter{ - isStdout: true, - logBuffer: lb, - } -} - -// StderrWriter returns a writer tagged to stderr -func (lb *logBuffer) StderrWriter() *logBufferWriter { - return &logBufferWriter{ - isStdout: false, - logBuffer: lb, - } -} - -type logBufferWriter struct { - isStdout bool - logBuffer *logBuffer -} - -// Write implements io.Writer.Write for logBufferWriter -func (lbw *logBufferWriter) Write(bytes []byte) (int, error) { - n := len(bytes) - // The io.Writer contract states that we cannot retain the bytes we are passed, - // so we need to make a copy of them - cpy := make([]byte, n) - copy(cpy, bytes) - lbw.logBuffer.LogLine(cpy, lbw.isStdout) - return n, nil -} - -// RealRun executes a set of tasks -func RealRun( - ctx gocontext.Context, - g *graph.CompleteGraph, - rs *runSpec, - engine *core.Engine, - taskHashTracker *taskhash.Tracker, - turboCache cache.Cache, - globalEnvMode util.EnvMode, - globalEnv env.EnvironmentVariableMap, - globalPassThroughEnv env.EnvironmentVariableMap, - packagesInScope []string, - base *cmdutil.CmdBase, - runSummary runsummary.Meta, - packageManager *packagemanager.PackageManager, - processes *process.Manager, - executionState *turbostate.ExecutionState, -) error { - singlePackage := rs.Opts.runOpts.SinglePackage - - if singlePackage { - base.UI.Output(fmt.Sprintf("%s %s", ui.Dim("• Running"), ui.Dim(ui.Bold(strings.Join(rs.Targets, ", "))))) - } else { - base.UI.Output(fmt.Sprintf(ui.Dim("• Packages in scope: %v"), strings.Join(packagesInScope, ", "))) - base.UI.Output(fmt.Sprintf("%s %s %s", ui.Dim("• Running"), ui.Dim(ui.Bold(strings.Join(rs.Targets, ", "))), ui.Dim(fmt.Sprintf("in %v packages", rs.FilteredPkgs.Len())))) - } - - // Log whether remote cache is enabled - useHTTPCache := !rs.Opts.cacheOpts.SkipRemote - if useHTTPCache { - base.UI.Info(ui.Dim("• Remote caching enabled")) - } else { - base.UI.Info(ui.Dim("• Remote caching disabled")) - } - - defer func() { - _ = spinner.WaitFor(ctx, turboCache.Shutdown, base.UI, "...Finishing writing to cache...", 1500*time.Millisecond) - }() - colorCache := colorcache.New() - - runCache := runcache.New(turboCache, base.RepoRoot, rs.Opts.runcacheOpts, colorCache) - - concurrentUIFactory := ui.ConcurrentUIFactory{ - Base: base.UIFactory, - } - - ec := &execContext{ - colorCache: colorCache, - runSummary: runSummary, - rs: rs, - ui: concurrentUIFactory.Build(os.Stdin, os.Stdout, os.Stderr), - runCache: runCache, - env: globalEnv, - passThroughEnv: globalPassThroughEnv, - logger: base.Logger, - packageManager: packageManager, - processes: processes, - taskHashTracker: taskHashTracker, - repoRoot: base.RepoRoot, - isSinglePackage: singlePackage, - } - - // run the thing - execOpts := core.EngineExecutionOptions{ - Parallel: rs.Opts.runOpts.Parallel, - Concurrency: rs.Opts.runOpts.Concurrency, - } - - taskCount := len(engine.TaskGraph.Vertices()) - logChan := make(chan *logBuffer, taskCount) - logWaitGroup := sync.WaitGroup{} - isGrouped := rs.Opts.runOpts.LogOrder == "grouped" - - if isGrouped { - logWaitGroup.Add(1) - go func() { - for logBuffer := range logChan { - - err := logBuffer.Drain(os.Stdout, os.Stderr) - if err != nil { - ec.ui.Error(fmt.Sprintf("Failed to output some of the logs: %v", err)) - } - } - logWaitGroup.Done() - }() - } - - taskSummaryMutex := sync.Mutex{} - taskSummaries := []*runsummary.TaskSummary{} - execFunc := func(ctx gocontext.Context, packageTask *nodes.PackageTask, taskSummary *runsummary.TaskSummary) error { - var outWriter io.Writer = os.Stdout - var errWriter io.Writer = os.Stderr - - logBuffer := &logBuffer{} - - if isGrouped { - outWriter = logBuffer.StdoutWriter() - if rs.Opts.runOpts.IsGithubActions { - // If we're running on Github Actions, force everything to stdout - // so as not to have out-of-order log lines - errWriter = outWriter - } else { - errWriter = logBuffer.StderrWriter() - } - } - - var spacesLogBuffer *threadsafeOutputBuffer - if runSummary.SpacesIsEnabled() { - spacesLogBuffer = &threadsafeOutputBuffer{} - outWriter = io.MultiWriter(spacesLogBuffer, outWriter) - errWriter = io.MultiWriter(spacesLogBuffer, errWriter) - } - - ui := concurrentUIFactory.Build(os.Stdin, outWriter, errWriter) - - taskExecutionSummary, err := ec.exec(ctx, packageTask, ui, outWriter) - - // taskExecutionSummary will be nil if the task never executed - // (i.e. if the workspace didn't implement the script corresponding to the task) - // We don't need to collect any of the outputs or execution if the task didn't execute. - if taskExecutionSummary != nil { - taskSummary.ExpandedOutputs = taskHashTracker.GetExpandedOutputs(taskSummary.TaskID) - taskSummary.Execution = taskExecutionSummary - taskSummary.CacheSummary = taskHashTracker.GetCacheStatus(taskSummary.TaskID) - - // lock since multiple things to be appending to this array at the same time - taskSummaryMutex.Lock() - taskSummaries = append(taskSummaries, taskSummary) - // not using defer, just release the lock - taskSummaryMutex.Unlock() - - var logBytes []byte - if spacesLogBuffer != nil { - logBytes = spacesLogBuffer.Bytes() - } - runSummary.CloseTask(taskSummary, logBytes) - } - if isGrouped { - logChan <- logBuffer - } - - // Return the error when there is one - if err != nil { - return err - } - - return nil - } - - getArgs := func(taskID string) []string { - return rs.ArgsForTask(taskID) - } - - visitorFn := g.GetPackageTaskVisitor(ctx, engine.TaskGraph, rs.Opts.runOpts.FrameworkInference, globalEnvMode, getArgs, base.Logger, execFunc) - errs := engine.Execute(visitorFn, execOpts) - if isGrouped { - close(logChan) - logWaitGroup.Wait() - } - - // Track if we saw any child with a non-zero exit code - exitCode := 0 - exitCodeErr := &process.ChildExit{} - - // Assign tasks after execution - runSummary.RunSummary.Tasks = taskSummaries - - terminal := base.UI - if rs.Opts.runOpts.IsGithubActions { - terminal = &cli.PrefixedUi{ - Ui: terminal, - ErrorPrefix: "::error::", - WarnPrefix: "::warn::", - } - } - for _, err := range errs { - if errors.As(err, &exitCodeErr) { - // If a process gets killed via a signal, Go reports it's exit code as -1. - // We take the absolute value of the exit code so we don't select '0' as - // the greatest exit code. - childExit := exitCodeErr.ExitCode - if childExit < 0 { - childExit = -childExit - } - if childExit > exitCode { - exitCode = childExit - } - } else if exitCode == 0 { - // We hit some error, it shouldn't be exit code 0 - exitCode = 1 - } - terminal.Error(err.Error()) - } - - // When continue on error is enabled don't register failed tasks as errors - // and instead must inspect the task summaries. - if ec.rs.Opts.runOpts.ContinueOnError { - for _, summary := range runSummary.RunSummary.Tasks { - if childExit := summary.Execution.ExitCode(); childExit != nil { - childExit := *childExit - if childExit < 0 { - childExit = -childExit - } - if childExit > exitCode { - exitCode = childExit - } - } - } - } - - if err := runSummary.Close(ctx, exitCode, g.WorkspaceInfos, base.UI); err != nil { - // We don't need to throw an error, but we can warn on this. - // Note: this method doesn't actually return an error for Real Runs at the time of writing. - base.UI.Info(fmt.Sprintf("Failed to close Run Summary %v", err)) - } - - if exitCode != 0 { - return &process.ChildExit{ - ExitCode: exitCode, - } - } - return nil -} - -type execContext struct { - colorCache *colorcache.ColorCache - runSummary runsummary.Meta - rs *runSpec - ui cli.Ui - runCache *runcache.RunCache - env env.EnvironmentVariableMap - passThroughEnv env.EnvironmentVariableMap - logger hclog.Logger - packageManager *packagemanager.PackageManager - processes *process.Manager - taskHashTracker *taskhash.Tracker - repoRoot turbopath.AbsoluteSystemPath - isSinglePackage bool -} - -func (ec *execContext) logError(prefix string, err error) { - ec.logger.Error(prefix, "error", err) - - if prefix != "" { - prefix += ": " - } - - ec.ui.Error(fmt.Sprintf("%s%s%s", ui.ERROR_PREFIX, prefix, color.RedString(" %v", err))) -} - -func (ec *execContext) exec(ctx gocontext.Context, packageTask *nodes.PackageTask, ui cli.Ui, outWriter io.Writer) (*runsummary.TaskExecutionSummary, error) { - // Setup tracer. Every time tracer() is called the taskExecutionSummary's duration is updated - // So make sure to call it before returning. - successExitCode := 0 // We won't use this till later - - tracer, taskExecutionSummary := ec.runSummary.RunSummary.TrackTask(packageTask.TaskID) - progressLogger := ec.logger.Named("") - progressLogger.Debug("start") - - passThroughArgs := ec.rs.ArgsForTask(packageTask.Task) - hash := packageTask.Hash - ec.logger.Debug("task hash", "value", hash) - // TODO(gsoltis): if/when we fix https://github.com/vercel/turbo/issues/937 - // the following block should never get hit. In the meantime, keep it after hashing - // so that downstream tasks can count on the hash existing - // - // bail if the script doesn't exist - if packageTask.Command == "" { - progressLogger.Debug("no task in package, skipping") - progressLogger.Debug("done", "status", "skipped", "duration", taskExecutionSummary.Duration) - // Return nil here because there was no execution, so there is no task execution summary - return nil, nil - } - - // Set building status now that we know it's going to run. - tracer(runsummary.TargetBuilding, nil, &successExitCode) - - var prefix string - var prettyPrefix string - if ec.rs.Opts.runOpts.LogPrefix == "none" { - prefix = "" - } else { - prefix = packageTask.OutputPrefix(ec.isSinglePackage) - } - - prettyPrefix = ec.colorCache.PrefixWithColor(packageTask.PackageName, prefix) - - // Cache --------------------------------------------- - taskCache := ec.runCache.TaskCache(packageTask, hash) - // Create a logger for replaying - prefixedUI := &cli.PrefixedUi{ - Ui: ui, - OutputPrefix: prettyPrefix, - InfoPrefix: prettyPrefix, - ErrorPrefix: prettyPrefix + "ERROR: ", - WarnPrefix: prettyPrefix, - } - - if ec.rs.Opts.runOpts.IsGithubActions { - ui.Output(fmt.Sprintf("::group::%s", packageTask.OutputPrefix(ec.isSinglePackage))) - prefixedUI.WarnPrefix = "[WARN] " - prefixedUI.ErrorPrefix = "[ERROR] " - defer func() { - // We don't use the prefixedUI here because the prefix in this case would include - // the ::group::, and we explicitly want to close the github group - ui.Output("::endgroup::") - }() - } - - cacheStatus, err := taskCache.RestoreOutputs(ctx, prefixedUI, progressLogger) - - // It's safe to set the CacheStatus even if there's an error, because if there's - // an error, the 0 values are actually what we want. We save cacheStatus and timeSaved - // for the task, so that even if there's an error, we have those values for the taskSummary. - ec.taskHashTracker.SetCacheStatus( - packageTask.TaskID, - runsummary.NewTaskCacheSummary(cacheStatus), - ) - - if err != nil { - prefixedUI.Error(fmt.Sprintf("error fetching from cache: %s", err)) - } else if cacheStatus.Hit { // If there was a cache hit - ec.taskHashTracker.SetExpandedOutputs(packageTask.TaskID, taskCache.ExpandedOutputs) - // We only cache successful executions, so we can assume this is a successExitCode exit. - tracer(runsummary.TargetCached, nil, &successExitCode) - return taskExecutionSummary, nil - } - - // Setup command execution - argsactual := append([]string{"run"}, packageTask.Task) - if len(passThroughArgs) > 0 { - // This will be either '--' or a typed nil - argsactual = append(argsactual, ec.packageManager.ArgSeparator(passThroughArgs)...) - argsactual = append(argsactual, passThroughArgs...) - } - - cmd := exec.Command(ec.packageManager.Command, argsactual...) - cmd.Dir = packageTask.Pkg.Dir.ToSystemPath().RestoreAnchor(ec.repoRoot).ToString() - - passThroughEnv := env.EnvironmentVariableMap{} - - if packageTask.EnvMode == util.Strict { - defaultPassThroughEnvVarMap, err := ec.taskHashTracker.EnvAtExecutionStart.FromWildcards([]string{ - "PATH", - "SHELL", - "SYSTEMROOT", // Go will always include this on Windows, but we're being explicit here - }) - if err != nil { - return nil, err - } - - envVarPassThroughMap, err := ec.taskHashTracker.EnvAtExecutionStart.FromWildcards(packageTask.TaskDefinition.PassThroughEnv) - if err != nil { - return nil, err - } - - passThroughEnv.Union(defaultPassThroughEnvVarMap) - passThroughEnv.Union(ec.env) - passThroughEnv.Union(ec.passThroughEnv) - passThroughEnv.Union(ec.taskHashTracker.GetEnvVars(packageTask.TaskID).All) - passThroughEnv.Union(envVarPassThroughMap) - } else { - passThroughEnv.Union(ec.taskHashTracker.EnvAtExecutionStart) - } - - // Always last to make sure it clobbers. - passThroughEnv.Add("TURBO_HASH", hash) - - cmd.Env = passThroughEnv.ToHashable() - - // Setup stdout/stderr - // If we are not caching anything, then we don't need to write logs to disk - // be careful about this conditional given the default of cache = true - writer, err := taskCache.OutputWriter(prettyPrefix, outWriter) - if err != nil { - tracer(runsummary.TargetBuildFailed, err, nil) - - ec.logError(prettyPrefix, err) - if !ec.rs.Opts.runOpts.ContinueOnError { - return nil, core.StopExecution(errors.Wrapf(err, "failed to capture outputs for \"%v\"", packageTask.TaskID)) - } - } - - // Create a logger - logger := log.New(writer, "", 0) - // Setup a streamer that we'll pipe cmd.Stdout to - logStreamerOut := logstreamer.NewLogstreamer(logger, prettyPrefix, false) - // Setup a streamer that we'll pipe cmd.Stderr to. - logStreamerErr := logstreamer.NewLogstreamer(logger, prettyPrefix, false) - cmd.Stderr = logStreamerErr - cmd.Stdout = logStreamerOut - // Flush/Reset any error we recorded - logStreamerErr.FlushRecord() - logStreamerOut.FlushRecord() - - closeOutputs := func() error { - var closeErrors []error - - if err := logStreamerOut.Close(); err != nil { - closeErrors = append(closeErrors, errors.Wrap(err, "log stdout")) - } - if err := logStreamerErr.Close(); err != nil { - closeErrors = append(closeErrors, errors.Wrap(err, "log stderr")) - } - - if err := writer.Close(); err != nil { - closeErrors = append(closeErrors, errors.Wrap(err, "log file")) - } - if len(closeErrors) > 0 { - msgs := make([]string, len(closeErrors)) - for i, err := range closeErrors { - msgs[i] = err.Error() - } - return fmt.Errorf("could not flush log output: %v", strings.Join(msgs, ", ")) - } - return nil - } - - // Run the command - if err := ec.processes.Exec(cmd); err != nil { - // ensure we close off our outputs. We errored, so we mostly don't care if we fail to close - // We don't close them directly because we're potentially going to output some errors or - // warnings that we want grouped with the task output. - _ = closeOutputs() - // if we already know we're in the process of exiting, - // we don't need to record an error to that effect. - if errors.Is(err, process.ErrClosing) { - return taskExecutionSummary, nil - } - - // If the error we got is a ChildExit, it will have an ExitCode field - // Pass that along into the tracer. - var e *process.ChildExit - if errors.As(err, &e) { - tracer(runsummary.TargetBuildFailed, err, &e.ExitCode) - } else { - // If it wasn't a ChildExit, and something else went wrong, we don't have an exitCode - tracer(runsummary.TargetBuildFailed, err, nil) - } - taskIDDisplay := packageTask.TaskID - if ec.isSinglePackage { - taskIDDisplay = packageTask.Task - } - taskErr := &TaskError{ - cause: err, - taskIDDisplay: taskIDDisplay, - } - // If there was an error, flush the buffered output - taskCache.OnError(prefixedUI, progressLogger) - progressLogger.Error(fmt.Sprintf("Error: command finished with error: %v", err)) - if !ec.rs.Opts.runOpts.ContinueOnError { - prefixedUI.Error(fmt.Sprintf("command finished with error: %s", err)) - ec.processes.Close() - // We're not continuing, stop graph traversal - err = core.StopExecution(taskErr) - } else { - prefixedUI.Warn("command finished with error, but continuing...") - err = taskErr - } - - return taskExecutionSummary, err - } - - // Add another timestamp into the tracer, so we have an accurate timestamp for how long the task took. - tracer(runsummary.TargetExecuted, nil, nil) - - // Close off our outputs and cache them - if err := closeOutputs(); err != nil { - ec.logError("", err) - } else { - if err = taskCache.SaveOutputs(ctx, progressLogger, prefixedUI, int(taskExecutionSummary.Duration.Milliseconds())); err != nil { - ec.logError("", fmt.Errorf("error caching output: %w", err)) - } else { - ec.taskHashTracker.SetExpandedOutputs(packageTask.TaskID, taskCache.ExpandedOutputs) - } - } - - // Clean up tracing - - tracer(runsummary.TargetBuilt, nil, &successExitCode) - progressLogger.Debug("done", "status", "complete", "duration", taskExecutionSummary.Duration) - return taskExecutionSummary, nil -} - -// TaskError wraps an error encountered running the given task -type TaskError struct { - cause error - taskIDDisplay string -} - -// Unwrap allows for interoperation with standard library error wrapping -func (te *TaskError) Unwrap() error { return te.cause } - -func (te *TaskError) Error() string { - return fmt.Sprintf("%v: %v", te.taskIDDisplay, te.cause) -} diff --git a/cli/internal/run/run.go b/cli/internal/run/run.go deleted file mode 100644 index 16be6bd80fa17..0000000000000 --- a/cli/internal/run/run.go +++ /dev/null @@ -1,508 +0,0 @@ -package run - -import ( - gocontext "context" - "fmt" - "sort" - "sync" - "time" - - "github.com/vercel/turbo/cli/internal/analytics" - "github.com/vercel/turbo/cli/internal/cache" - "github.com/vercel/turbo/cli/internal/ci" - "github.com/vercel/turbo/cli/internal/cmdutil" - "github.com/vercel/turbo/cli/internal/context" - "github.com/vercel/turbo/cli/internal/core" - "github.com/vercel/turbo/cli/internal/daemon" - "github.com/vercel/turbo/cli/internal/daemonclient" - "github.com/vercel/turbo/cli/internal/env" - "github.com/vercel/turbo/cli/internal/fs" - "github.com/vercel/turbo/cli/internal/graph" - "github.com/vercel/turbo/cli/internal/process" - "github.com/vercel/turbo/cli/internal/runsummary" - "github.com/vercel/turbo/cli/internal/scm" - "github.com/vercel/turbo/cli/internal/scope" - "github.com/vercel/turbo/cli/internal/signals" - "github.com/vercel/turbo/cli/internal/taskhash" - "github.com/vercel/turbo/cli/internal/turbostate" - "github.com/vercel/turbo/cli/internal/ui" - "github.com/vercel/turbo/cli/internal/util" - - "github.com/pkg/errors" -) - -// ExecuteRun executes the run command -func ExecuteRun(ctx gocontext.Context, helper *cmdutil.Helper, signalWatcher *signals.Watcher, executionState *turbostate.ExecutionState) error { - base, err := helper.GetCmdBase(executionState) - if err != nil { - return err - } - LogTag(base.Logger) - tasks := executionState.CLIArgs.Command.Run.Tasks - passThroughArgs := executionState.CLIArgs.Command.Run.PassThroughArgs - opts, err := optsFromArgs(&executionState.CLIArgs) - if err != nil { - return err - } - - opts.runOpts.PassThroughArgs = passThroughArgs - run := configureRun(base, opts, signalWatcher) - if err := run.run(ctx, tasks, executionState); err != nil { - base.LogError("run failed: %v", err) - return err - } - return nil -} - -func optsFromArgs(args *turbostate.ParsedArgsFromRust) (*Opts, error) { - runPayload := args.Command.Run - - opts := getDefaultOptions() - // aliases := make(map[string]string) - if err := scope.OptsFromArgs(&opts.scopeOpts, args); err != nil { - return nil, err - } - - opts.cacheOpts.SkipFilesystem = runPayload.RemoteOnly - opts.cacheOpts.OverrideDir = runPayload.CacheDir - opts.cacheOpts.Workers = runPayload.CacheWorkers - - // Run flags - opts.runOpts.LogPrefix = runPayload.LogPrefix - opts.runOpts.Summarize = runPayload.Summarize - opts.runOpts.ExperimentalSpaceID = runPayload.ExperimentalSpaceID - opts.runOpts.EnvMode = runPayload.EnvMode - opts.runOpts.FrameworkInference = runPayload.FrameworkInference - - // Runcache flags - opts.runcacheOpts.SkipReads = runPayload.Force - opts.runcacheOpts.SkipWrites = runPayload.NoCache - - if runPayload.OutputLogs != "" { - err := opts.runcacheOpts.SetTaskOutputMode(runPayload.OutputLogs) - if err != nil { - return nil, err - } - } - - // Run flags - if runPayload.Concurrency != "" { - concurrency, err := util.ParseConcurrency(runPayload.Concurrency) - if err != nil { - return nil, err - } - opts.runOpts.Concurrency = concurrency - } - - opts.runOpts.Parallel = runPayload.Parallel - opts.runOpts.Profile = runPayload.Profile - opts.runOpts.ContinueOnError = runPayload.ContinueExecution - opts.runOpts.Only = runPayload.Only - opts.runOpts.NoDaemon = runPayload.NoDaemon - opts.runOpts.SinglePackage = args.Command.Run.SinglePackage - opts.runOpts.LogOrder = args.Command.Run.LogOrder - - // See comment on Graph in turbostate.go for an explanation on Graph's representation. - // If flag is passed... - if runPayload.Graph != nil { - // If no value is attached, we print to stdout - if *runPayload.Graph == "" { - opts.runOpts.GraphDot = true - } else { - // Otherwise, we emit to the file name attached as value - opts.runOpts.GraphDot = false - opts.runOpts.GraphFile = *runPayload.Graph - } - } - - if runPayload.DryRun != "" { - opts.runOpts.DryRunJSON = runPayload.DryRun == _dryRunJSONValue - - if runPayload.DryRun == _dryRunTextValue || runPayload.DryRun == _dryRunJSONValue { - opts.runOpts.DryRun = true - } else { - return nil, fmt.Errorf("invalid dry-run mode: %v", runPayload.DryRun) - } - } - - return opts, nil -} - -func configureRun(base *cmdutil.CmdBase, opts *Opts, signalWatcher *signals.Watcher) *run { - if opts.runOpts.LogOrder == "auto" && ci.Constant() == "GITHUB_ACTIONS" { - opts.runOpts.LogOrder = "grouped" - if opts.runOpts.LogPrefix != "task" { - opts.runOpts.LogPrefix = "none" - } - opts.runOpts.IsGithubActions = true - } - - processes := process.NewManager(base.Logger.Named("processes")) - signalWatcher.AddOnClose(processes.Close) - return &run{ - base: base, - opts: opts, - processes: processes, - } -} - -type run struct { - base *cmdutil.CmdBase - opts *Opts - processes *process.Manager -} - -func (r *run) run(ctx gocontext.Context, targets []string, executionState *turbostate.ExecutionState) error { - startAt := time.Now() - packageJSONPath := r.base.RepoRoot.UntypedJoin("package.json") - rootPackageJSON, err := fs.ReadPackageJSON(packageJSONPath) - if err != nil { - return fmt.Errorf("failed to read package.json: %w", err) - } - - var pkgDepGraph *context.Context - if r.opts.runOpts.SinglePackage { - pkgDepGraph, err = context.SinglePackageGraph(rootPackageJSON, executionState.PackageManager) - } else { - pkgDepGraph, err = context.BuildPackageGraph(r.base.RepoRoot, rootPackageJSON, executionState.PackageManager) - } - - if err != nil { - var warnings *context.Warnings - if errors.As(err, &warnings) { - r.base.LogWarning("Issues occurred when constructing package graph. Turbo will function, but some features may not be available", err) - } else { - return err - } - } - - if ui.IsCI && !r.opts.runOpts.NoDaemon { - r.base.Logger.Debug("skipping turbod since we appear to be in a non-interactive context") - } else if !r.opts.runOpts.NoDaemon { - turbodClient, err := daemon.GetClient(ctx, r.base.RepoRoot, r.base.Logger, r.base.TurboVersion, daemon.ClientOpts{}) - if err != nil { - r.base.LogWarning("", errors.Wrap(err, "failed to contact turbod. Continuing in standalone mode")) - } else { - defer func() { _ = turbodClient.Close() }() - r.base.Logger.Debug("running in daemon mode") - daemonClient := daemonclient.New(turbodClient) - r.opts.runcacheOpts.OutputWatcher = daemonClient - } - } - - if err := util.ValidateGraph(&pkgDepGraph.WorkspaceGraph); err != nil { - return errors.Wrap(err, "Invalid package dependency graph") - } - - // TODO: consolidate some of these arguments - // Note: not all properties are set here. GlobalHash and Pipeline keys are set later - g := &graph.CompleteGraph{ - WorkspaceGraph: pkgDepGraph.WorkspaceGraph, - WorkspaceInfos: pkgDepGraph.WorkspaceInfos, - RootNode: pkgDepGraph.RootNode, - TaskDefinitions: map[string]*fs.TaskDefinition{}, - RepoRoot: r.base.RepoRoot, - } - - turboJSON, err := g.GetTurboConfigFromWorkspace(util.RootPkgName, r.opts.runOpts.SinglePackage) - if err != nil { - return err - } - - r.opts.cacheOpts.Signature = r.base.Config.Signature - - // If a spaceID wasn't passed as a flag, read it from the turbo.json config. - // If that is not set either, we'll still end up with a blank string. - if r.opts.runOpts.ExperimentalSpaceID == "" { - r.opts.runOpts.ExperimentalSpaceID = turboJSON.SpaceID - } - - pipeline := turboJSON.Pipeline - g.Pipeline = pipeline - scmInstance, err := scm.FromInRepo(r.base.RepoRoot) - if err != nil { - if errors.Is(err, scm.ErrFallback) { - r.base.Logger.Debug("", err) - } else { - return errors.Wrap(err, "failed to create SCM") - } - } - filteredPkgs, isAllPackages, err := scope.ResolvePackages(&r.opts.scopeOpts, r.base.RepoRoot, scmInstance, pkgDepGraph, r.base.UI, r.base.Logger) - if err != nil { - return errors.Wrap(err, "failed to resolve packages to run") - } - if isAllPackages { - // if there is a root task for any of our targets, we need to add it - for _, target := range targets { - key := util.RootTaskID(target) - if _, ok := pipeline[key]; ok { - filteredPkgs.Add(util.RootPkgName) - // we only need to know we're running a root task once to add it for consideration - break - } - } - } - - envAtExecutionStart := env.GetEnvMap() - - globalHashInputs, err := getGlobalHashInputs( - r.base.Logger, - r.base.RepoRoot, - rootPackageJSON, - pkgDepGraph.PackageManager, - pkgDepGraph.Lockfile, - turboJSON.GlobalDeps, - envAtExecutionStart, - turboJSON.GlobalEnv, - turboJSON.GlobalPassThroughEnv, - r.opts.runOpts.EnvMode, - r.opts.runOpts.FrameworkInference, - turboJSON.GlobalDotEnv, - ) - - if err != nil { - return fmt.Errorf("failed to collect global hash inputs: %v", err) - } - - if globalHash, err := calculateGlobalHashFromHashableInputs(globalHashInputs); err == nil { - r.base.Logger.Debug("global hash", "value", globalHash) - g.GlobalHash = globalHash - } else { - return fmt.Errorf("failed to calculate global hash: %v", err) - } - - r.base.Logger.Debug("local cache folder", "path", r.opts.cacheOpts.OverrideDir) - - rs := &runSpec{ - Targets: targets, - FilteredPkgs: filteredPkgs, - Opts: r.opts, - } - packageManager := pkgDepGraph.PackageManager - - engine, err := buildTaskGraphEngine( - g, - rs, - r.opts.runOpts.SinglePackage, - ) - - if err != nil { - return errors.Wrap(err, "error preparing engine") - } - - taskHashTracker := taskhash.NewTracker( - g.RootNode, - g.GlobalHash, - envAtExecutionStart, - // TODO(mehulkar): remove g.Pipeline, because we need to get task definitions from CompleteGraph instead - g.Pipeline, - ) - - g.TaskHashTracker = taskHashTracker - - // CalculateFileHashes assigns PackageInputsExpandedHashes as a side-effect - err = taskHashTracker.CalculateFileHashes( - engine.TaskGraph.Vertices(), - rs.Opts.runOpts.Concurrency, - g.WorkspaceInfos, - g.TaskDefinitions, - r.base.RepoRoot, - ) - - if err != nil { - return errors.Wrap(err, "error hashing package files") - } - - // If we are running in parallel, then we remove all the edges in the graph - // except for the root. Rebuild the task graph for backwards compatibility. - // We still use dependencies specified by the pipeline configuration. - if rs.Opts.runOpts.Parallel { - for _, edge := range g.WorkspaceGraph.Edges() { - if edge.Target() != g.RootNode { - g.WorkspaceGraph.RemoveEdge(edge) - } - } - engine, err = buildTaskGraphEngine( - g, - rs, - r.opts.runOpts.SinglePackage, - ) - if err != nil { - return errors.Wrap(err, "error preparing engine") - } - } - - // Graph Run - if rs.Opts.runOpts.GraphFile != "" || rs.Opts.runOpts.GraphDot { - return GraphRun(ctx, rs, engine, r.base) - } - - packagesInScope := rs.FilteredPkgs.UnsafeListOfStrings() - sort.Strings(packagesInScope) - // Initiate analytics and cache - analyticsClient := r.initAnalyticsClient(ctx) - defer analyticsClient.CloseWithTimeout(50 * time.Millisecond) - turboCache, err := r.initCache(ctx, rs, analyticsClient) - - if err != nil { - if errors.Is(err, cache.ErrNoCachesEnabled) { - r.base.UI.Warn("No caches are enabled. You can try \"turbo login\", \"turbo link\", or ensuring you are not passing --remote-only to enable caching") - } else { - return errors.Wrap(err, "failed to set up caching") - } - } - - resolvedPassThroughEnvVars, err := envAtExecutionStart.FromWildcards(globalHashInputs.passThroughEnv) - if err != nil { - return err - } - - globalEnvMode := rs.Opts.runOpts.EnvMode - if globalEnvMode == util.Infer && turboJSON.GlobalPassThroughEnv != nil { - globalEnvMode = util.Strict - } - - // RunSummary contains information that is statically analyzable about - // the tasks that we expect to run based on the user command. - summary := runsummary.NewRunSummary( - startAt, - r.base.RepoRoot, - rs.Opts.scopeOpts.PackageInferenceRoot, - r.base.TurboVersion, - r.base.APIClient, - r.base.SpacesAPIClient, - rs.Opts.runOpts, - packagesInScope, - globalEnvMode, - envAtExecutionStart, - runsummary.NewGlobalHashSummary( - globalHashInputs.globalCacheKey, - globalHashInputs.globalFileHashMap, - globalHashInputs.rootExternalDepsHash, - globalHashInputs.env, - globalHashInputs.passThroughEnv, - globalHashInputs.dotEnv, - globalHashInputs.resolvedEnvVars, - resolvedPassThroughEnvVars, - ), - rs.Opts.SynthesizeCommand(rs.Targets), - ) - - // Dry Run - if rs.Opts.runOpts.DryRun { - return DryRun( - ctx, - g, - rs, - engine, - taskHashTracker, - turboCache, - globalEnvMode, - globalHashInputs.resolvedEnvVars.All, - resolvedPassThroughEnvVars, - r.base, - summary, - ) - } - - // Regular run - return RealRun( - ctx, - g, - rs, - engine, - taskHashTracker, - turboCache, - globalEnvMode, - globalHashInputs.resolvedEnvVars.All, - resolvedPassThroughEnvVars, - packagesInScope, - r.base, - summary, - // Extra arg only for regular runs, dry-run doesn't get this - packageManager, - r.processes, - executionState, - ) -} - -func (r *run) initAnalyticsClient(ctx gocontext.Context) analytics.Client { - apiClient := r.base.APIClient - var analyticsSink analytics.Sink - - if apiClient.IsLinked() { - analyticsSink = apiClient - } else { - r.opts.cacheOpts.SkipRemote = true - analyticsSink = analytics.NullSink - } - - // After we know if its _possible_ to enable remote cache, check the config - // and disable it if wanted. - if r.base.Config.Enabled != nil { - r.opts.cacheOpts.SkipRemote = !*r.base.Config.Enabled - } - - analyticsClient := analytics.NewClient(ctx, analyticsSink, r.base.Logger.Named("analytics")) - return analyticsClient -} - -func (r *run) initCache(ctx gocontext.Context, rs *runSpec, analyticsClient analytics.Client) (cache.Cache, error) { - apiClient := r.base.APIClient - // Theoretically this is overkill, but bias towards not spamming the console - once := &sync.Once{} - - return cache.New(rs.Opts.cacheOpts, r.base.RepoRoot, apiClient, analyticsClient, func(_cache cache.Cache, err error) { - // Currently the HTTP Cache is the only one that can be disabled. - // With a cache system refactor, we might consider giving names to the caches so - // we can accurately report them here. - once.Do(func() { - r.base.LogWarning("Remote Caching is unavailable", err) - }) - }) -} - -func buildTaskGraphEngine( - g *graph.CompleteGraph, - rs *runSpec, - isSinglePackage bool, -) (*core.Engine, error) { - engine := core.NewEngine(g, isSinglePackage) - - // Note: g.Pipeline is a map, but this for loop only cares about the keys - for taskName := range g.Pipeline { - engine.AddTask(taskName) - } - - if err := engine.Prepare(&core.EngineBuildingOptions{ - Packages: rs.FilteredPkgs.UnsafeListOfStrings(), - TaskNames: rs.Targets, - TasksOnly: rs.Opts.runOpts.Only, - }); err != nil { - return nil, err - } - - // Check for cycles in the DAG. - if err := util.ValidateGraph(engine.TaskGraph); err != nil { - return nil, fmt.Errorf("Invalid task dependency graph:\n%v", err) - } - - // Check that no tasks would be blocked by a persistent task. Note that the - // parallel flag ignores both concurrency and dependencies, so in that scenario - // we don't need to validate. - if !rs.Opts.runOpts.Parallel { - if err := engine.ValidatePersistentDependencies(g, rs.Opts.runOpts.Concurrency); err != nil { - return nil, fmt.Errorf("Invalid persistent task configuration:\n%v", err) - } - } - - return engine, nil -} - -// dry run custom flag -// NOTE: These *must* be kept in sync with the corresponding Rust -// enum definitions in shim/src/commands/mod.rs -const ( - _dryRunJSONValue = "Json" - _dryRunTextValue = "Text" -) diff --git a/cli/internal/run/run_spec.go b/cli/internal/run/run_spec.go deleted file mode 100644 index d1faa37309b5f..0000000000000 --- a/cli/internal/run/run_spec.go +++ /dev/null @@ -1,88 +0,0 @@ -// Package run implements `turbo run` -// This file implements some structs for options -package run - -import ( - "strings" - - "github.com/vercel/turbo/cli/internal/cache" - "github.com/vercel/turbo/cli/internal/runcache" - "github.com/vercel/turbo/cli/internal/scope" - "github.com/vercel/turbo/cli/internal/util" -) - -// runSpec contains the run-specific configuration elements that come from a particular -// invocation of turbo. -type runSpec struct { - // Target is a list of task that are going to run this time - // E.g. in `turbo run build lint` Targets will be ["build", "lint"] - Targets []string - - // FilteredPkgs is the list of packages that are relevant for this run. - FilteredPkgs util.Set - - // Opts contains various opts, gathered from CLI flags, - // but bucketed in smaller structs based on what they mean. - Opts *Opts -} - -// ArgsForTask returns the set of args that need to be passed through to the task -func (rs *runSpec) ArgsForTask(task string) []string { - passThroughArgs := make([]string, 0, len(rs.Opts.runOpts.PassThroughArgs)) - for _, target := range rs.Targets { - if target == task { - passThroughArgs = append(passThroughArgs, rs.Opts.runOpts.PassThroughArgs...) - } - } - return passThroughArgs -} - -// Opts holds the current run operations configuration -type Opts struct { - runOpts util.RunOpts - cacheOpts cache.Opts - runcacheOpts runcache.Opts - scopeOpts scope.Opts -} - -// SynthesizeCommand produces a command that produces an equivalent set of packages, tasks, -// and task arguments to what the current set of opts selects. -func (o *Opts) SynthesizeCommand(tasks []string) string { - cmd := "turbo run" - cmd += " " + strings.Join(tasks, " ") - for _, filterPattern := range o.scopeOpts.FilterPatterns { - cmd += " --filter=" + filterPattern - } - for _, filterPattern := range o.scopeOpts.LegacyFilter.AsFilterPatterns() { - cmd += " --filter=" + filterPattern - } - if o.runOpts.Parallel { - cmd += " --parallel" - } - if o.runOpts.ContinueOnError { - cmd += " --continue" - } - if o.runOpts.DryRun { - if o.runOpts.DryRunJSON { - cmd += " --dry=json" - } else { - cmd += " --dry" - } - } - if o.runOpts.Only { - cmd += " --only" - } - if len(o.runOpts.PassThroughArgs) > 0 { - cmd += " -- " + strings.Join(o.runOpts.PassThroughArgs, " ") - } - return cmd -} - -// getDefaultOptions returns the default set of Opts for every run -func getDefaultOptions() *Opts { - return &Opts{ - runOpts: util.RunOpts{ - Concurrency: 10, - }, - } -} diff --git a/cli/internal/run/run_spec_test.go b/cli/internal/run/run_spec_test.go deleted file mode 100644 index 51cdd2917e2c1..0000000000000 --- a/cli/internal/run/run_spec_test.go +++ /dev/null @@ -1,120 +0,0 @@ -package run - -import ( - "testing" - - "github.com/vercel/turbo/cli/internal/scope" - "github.com/vercel/turbo/cli/internal/util" -) - -func TestSynthesizeCommand(t *testing.T) { - testCases := []struct { - filterPatterns []string - legacyFilter scope.LegacyFilter - only bool - passThroughArgs []string - parallel bool - continueOnError bool - dryRun bool - dryRunJSON bool - tasks []string - expected string - }{ - { - filterPatterns: []string{"my-app"}, - tasks: []string{"build"}, - expected: "turbo run build --filter=my-app", - }, - { - tasks: []string{"build"}, - only: true, - expected: "turbo run build --only", - }, - { - filterPatterns: []string{"my-app"}, - tasks: []string{"build"}, - - expected: "turbo run build --filter=my-app", - }, - { - filterPatterns: []string{"my-app"}, - tasks: []string{"build"}, - passThroughArgs: []string{"-v", "--foo=bar"}, - expected: "turbo run build --filter=my-app -- -v --foo=bar", - }, - { - legacyFilter: scope.LegacyFilter{ - Entrypoints: []string{"my-app"}, - SkipDependents: true, - }, - tasks: []string{"build"}, - passThroughArgs: []string{"-v", "--foo=bar"}, - expected: "turbo run build --filter=my-app -- -v --foo=bar", - }, - { - legacyFilter: scope.LegacyFilter{ - Entrypoints: []string{"my-app"}, - SkipDependents: true, - }, - filterPatterns: []string{"other-app"}, - tasks: []string{"build"}, - passThroughArgs: []string{"-v", "--foo=bar"}, - expected: "turbo run build --filter=other-app --filter=my-app -- -v --foo=bar", - }, - { - legacyFilter: scope.LegacyFilter{ - Entrypoints: []string{"my-app"}, - IncludeDependencies: true, - Since: "some-ref", - }, - filterPatterns: []string{"other-app"}, - tasks: []string{"build"}, - expected: "turbo run build --filter=other-app --filter=...my-app...[some-ref]...", - }, - { - filterPatterns: []string{"my-app"}, - tasks: []string{"build"}, - parallel: true, - continueOnError: true, - expected: "turbo run build --filter=my-app --parallel --continue", - }, - { - filterPatterns: []string{"my-app"}, - tasks: []string{"build"}, - dryRun: true, - expected: "turbo run build --filter=my-app --dry", - }, - { - filterPatterns: []string{"my-app"}, - tasks: []string{"build"}, - dryRun: true, - dryRunJSON: true, - expected: "turbo run build --filter=my-app --dry=json", - }, - } - - for _, testCase := range testCases { - testCase := testCase - t.Run(testCase.expected, func(t *testing.T) { - o := Opts{ - scopeOpts: scope.Opts{ - FilterPatterns: testCase.filterPatterns, - LegacyFilter: testCase.legacyFilter, - }, - runOpts: util.RunOpts{ - PassThroughArgs: testCase.passThroughArgs, - Parallel: testCase.parallel, - ContinueOnError: testCase.continueOnError, - DryRun: testCase.dryRun, - DryRunJSON: testCase.dryRunJSON, - Only: testCase.only, - }, - } - cmd := o.SynthesizeCommand(testCase.tasks) - if cmd != testCase.expected { - t.Errorf("SynthesizeCommand() got %v, want %v", cmd, testCase.expected) - } - }) - } - -} diff --git a/cli/internal/runcache/output_watcher.go b/cli/internal/runcache/output_watcher.go deleted file mode 100644 index 0eddce998d1a5..0000000000000 --- a/cli/internal/runcache/output_watcher.go +++ /dev/null @@ -1,32 +0,0 @@ -package runcache - -import ( - "context" - - "github.com/vercel/turbo/cli/internal/fs/hash" -) - -// OutputWatcher instances are responsible for tracking changes to task outputs -type OutputWatcher interface { - // GetChangedOutputs returns which of the given globs have changed since the specified hash was last run - GetChangedOutputs(ctx context.Context, hash string, repoRelativeOutputGlobs []string) ([]string, int, error) - // NotifyOutputsWritten tells the watcher that the given globs have been cached with the specified hash - NotifyOutputsWritten(ctx context.Context, hash string, repoRelativeOutputGlobs hash.TaskOutputs, timeSaved int) error -} - -// NoOpOutputWatcher implements OutputWatcher, but always considers every glob to have changed -type NoOpOutputWatcher struct{} - -var _ OutputWatcher = (*NoOpOutputWatcher)(nil) - -// GetChangedOutputs implements OutputWatcher.GetChangedOutputs. -// Since this is a no-op watcher, no tracking is done. -func (NoOpOutputWatcher) GetChangedOutputs(_ context.Context, _ string, repoRelativeOutputGlobs []string) ([]string, int, error) { - return repoRelativeOutputGlobs, 0, nil -} - -// NotifyOutputsWritten implements OutputWatcher.NotifyOutputsWritten. -// Since this is a no-op watcher, consider all globs to have changed -func (NoOpOutputWatcher) NotifyOutputsWritten(_ context.Context, _ string, _ hash.TaskOutputs, _ int) error { - return nil -} diff --git a/cli/internal/runcache/runcache.go b/cli/internal/runcache/runcache.go deleted file mode 100644 index 5d300572c05e1..0000000000000 --- a/cli/internal/runcache/runcache.go +++ /dev/null @@ -1,362 +0,0 @@ -package runcache - -import ( - "bufio" - "context" - "fmt" - "io" - "os" - "path/filepath" - "strings" - - "github.com/fatih/color" - "github.com/hashicorp/go-hclog" - "github.com/mitchellh/cli" - "github.com/vercel/turbo/cli/internal/cache" - "github.com/vercel/turbo/cli/internal/colorcache" - "github.com/vercel/turbo/cli/internal/fs" - "github.com/vercel/turbo/cli/internal/fs/hash" - "github.com/vercel/turbo/cli/internal/globby" - "github.com/vercel/turbo/cli/internal/logstreamer" - "github.com/vercel/turbo/cli/internal/nodes" - "github.com/vercel/turbo/cli/internal/turbopath" - "github.com/vercel/turbo/cli/internal/ui" - "github.com/vercel/turbo/cli/internal/util" -) - -// LogReplayer is a function that is responsible for replaying the contents of a given log file -type LogReplayer = func(logger hclog.Logger, output *cli.PrefixedUi, logFile turbopath.AbsoluteSystemPath) - -// Opts holds the configurable options for a RunCache instance -type Opts struct { - SkipReads bool - SkipWrites bool - TaskOutputModeOverride *util.TaskOutputMode - LogReplayer LogReplayer - OutputWatcher OutputWatcher -} - -// SetTaskOutputMode parses the task output mode from string and then sets it in opts -func (opts *Opts) SetTaskOutputMode(value string) error { - outputMode, err := util.FromTaskOutputModeString(value) - if err != nil { - return fmt.Errorf("must be one of \"%v\"", TaskOutputModes()) - } - opts.TaskOutputModeOverride = &outputMode - return nil -} - -// TaskOutputModes creates the description string for task outputs -func TaskOutputModes() string { - var builder strings.Builder - - first := true - for _, mode := range util.TaskOutputModeStrings { - if !first { - builder.WriteString("|") - } - first = false - builder.WriteString(string(mode)) - } - return builder.String() -} - -// RunCache represents the interface to the cache for a single `turbo run` -type RunCache struct { - taskOutputModeOverride *util.TaskOutputMode - cache cache.Cache - readsDisabled bool - writesDisabled bool - repoRoot turbopath.AbsoluteSystemPath - logReplayer LogReplayer - outputWatcher OutputWatcher - colorCache *colorcache.ColorCache -} - -// New returns a new instance of RunCache, wrapping the given cache -func New(cache cache.Cache, repoRoot turbopath.AbsoluteSystemPath, opts Opts, colorCache *colorcache.ColorCache) *RunCache { - rc := &RunCache{ - taskOutputModeOverride: opts.TaskOutputModeOverride, - cache: cache, - readsDisabled: opts.SkipReads, - writesDisabled: opts.SkipWrites, - repoRoot: repoRoot, - logReplayer: opts.LogReplayer, - outputWatcher: opts.OutputWatcher, - colorCache: colorCache, - } - - if rc.logReplayer == nil { - rc.logReplayer = defaultLogReplayer - } - if rc.outputWatcher == nil { - rc.outputWatcher = &NoOpOutputWatcher{} - } - return rc -} - -// TaskCache represents a single task's (package-task?) interface to the RunCache -// and controls access to the task's outputs -type TaskCache struct { - ExpandedOutputs []turbopath.AnchoredSystemPath - rc *RunCache - repoRelativeGlobs hash.TaskOutputs - hash string - pt *nodes.PackageTask - taskOutputMode util.TaskOutputMode - cachingDisabled bool - LogFileName turbopath.AbsoluteSystemPath -} - -// RestoreOutputs attempts to restore output for the corresponding task from the cache. -// Returns the cacheStatus, the timeSaved, and error values, so the consumer can understand -// what happened in here. -func (tc *TaskCache) RestoreOutputs(ctx context.Context, prefixedUI *cli.PrefixedUi, progressLogger hclog.Logger) (cache.ItemStatus, error) { - if tc.cachingDisabled || tc.rc.readsDisabled { - if tc.taskOutputMode != util.NoTaskOutput && tc.taskOutputMode != util.ErrorTaskOutput { - prefixedUI.Output(fmt.Sprintf("cache bypass, force executing %s", ui.Dim(tc.hash))) - } - return cache.NewCacheMiss(), nil - } - - changedOutputGlobs, timeSavedFromDaemon, err := tc.rc.outputWatcher.GetChangedOutputs(ctx, tc.hash, tc.repoRelativeGlobs.Inclusions) - - if err != nil { - progressLogger.Warn(fmt.Sprintf("Failed to check if we can skip restoring outputs for %v: %v. Proceeding to check cache", tc.pt.TaskID, err)) - changedOutputGlobs = tc.repoRelativeGlobs.Inclusions - } - - hasChangedOutputs := len(changedOutputGlobs) > 0 - var cacheStatus cache.ItemStatus - if hasChangedOutputs { - // Note that we currently don't use the output globs when restoring, but we could in the - // future to avoid doing unnecessary file I/O. We also need to pass along the exclusion - // globs as well. - itemStatus, restoredFiles, err := tc.rc.cache.Fetch(tc.rc.repoRoot, tc.hash, nil) - // Assign to this variable outside this closure so we can return at the end of the function - cacheStatus = itemStatus - tc.ExpandedOutputs = restoredFiles - if err != nil { - // If there was an error fetching from cache, we'll say there was no cache hit - return cache.NewCacheMiss(), err - } else if !itemStatus.Hit { - if tc.taskOutputMode != util.NoTaskOutput && tc.taskOutputMode != util.ErrorTaskOutput { - prefixedUI.Output(fmt.Sprintf("cache miss, executing %s", ui.Dim(tc.hash))) - } - // If there was no hit, we can also say there was no hit - return cache.NewCacheMiss(), nil - } - - if err := tc.rc.outputWatcher.NotifyOutputsWritten(ctx, tc.hash, tc.repoRelativeGlobs, cacheStatus.TimeSaved); err != nil { - // Don't fail the whole operation just because we failed to watch the outputs - prefixedUI.Warn(ui.Dim(fmt.Sprintf("Failed to mark outputs as cached for %v: %v", tc.pt.TaskID, err))) - } - } else { - // If no outputs have changed, that means we have a local cache hit. - cacheStatus = cache.ItemStatus{ - Hit: true, - Source: cache.CacheSourceFS, - TimeSaved: timeSavedFromDaemon, - } - } - - // Some more context to add into the cache hit messages. - // This isn't the cleanest way to update the log message, so we should revisit during Rust port. - moreContext := "" - if !hasChangedOutputs { - moreContext = " (outputs already on disk)" - } - - switch tc.taskOutputMode { - // When only showing new task output, cached output should only show the computed hash - case util.NewTaskOutput: - fallthrough - case util.HashTaskOutput: - prefixedUI.Info(fmt.Sprintf("cache hit%s, suppressing logs %s", moreContext, ui.Dim(tc.hash))) - case util.FullTaskOutput: - progressLogger.Debug("log file", "path", tc.LogFileName) - prefixedUI.Info(fmt.Sprintf("cache hit%s, replaying logs %s", moreContext, ui.Dim(tc.hash))) - tc.ReplayLogFile(prefixedUI, progressLogger) - case util.ErrorTaskOutput: - // The task succeeded, so we don't output anything in this case - default: - // NoLogs, do not output anything - } - - return cacheStatus, nil -} - -// ReplayLogFile writes out the stored logfile to the terminal -func (tc TaskCache) ReplayLogFile(prefixedUI *cli.PrefixedUi, progressLogger hclog.Logger) { - if tc.LogFileName.FileExists() { - tc.rc.logReplayer(progressLogger, prefixedUI, tc.LogFileName) - } -} - -// OnError replays the logfile if --output-mode=errors-only. -// This is called if the task exited with an non-zero error code. -func (tc TaskCache) OnError(terminal *cli.PrefixedUi, logger hclog.Logger) { - if tc.taskOutputMode == util.ErrorTaskOutput { - terminal.Output(fmt.Sprintf("cache miss, executing %s", ui.Dim(tc.hash))) - tc.ReplayLogFile(terminal, logger) - } -} - -// nopWriteCloser is modeled after io.NopCloser, which is for Readers -type nopWriteCloser struct { - io.Writer -} - -func (nopWriteCloser) Close() error { return nil } - -type fileWriterCloser struct { - io.Writer - file *os.File - bufio *bufio.Writer -} - -func (fwc *fileWriterCloser) Close() error { - if err := fwc.bufio.Flush(); err != nil { - return err - } - return fwc.file.Close() -} - -// OutputWriter creates a sink suitable for handling the output of the command associated -// with this task. -func (tc TaskCache) OutputWriter(prefix string, ioWriter io.Writer) (io.WriteCloser, error) { - // an os.Stdout wrapper that will add prefixes before printing to stdout - prettyIoWriter := logstreamer.NewPrettyIoWriter(prefix, ioWriter) - - if tc.cachingDisabled || tc.rc.writesDisabled { - return nopWriteCloser{prettyIoWriter}, nil - } - // Setup log file - if err := tc.LogFileName.EnsureDir(); err != nil { - return nil, err - } - - output, err := tc.LogFileName.Create() - if err != nil { - return nil, err - } - - bufWriter := bufio.NewWriter(output) - fwc := &fileWriterCloser{ - file: output, - bufio: bufWriter, - } - if tc.taskOutputMode == util.NoTaskOutput || tc.taskOutputMode == util.HashTaskOutput || tc.taskOutputMode == util.ErrorTaskOutput { - // only write to log file, not to stdout - fwc.Writer = bufWriter - } else { - fwc.Writer = io.MultiWriter(prettyIoWriter, bufWriter) - } - - return fwc, nil -} - -var _emptyIgnore []string - -// SaveOutputs is responsible for saving the outputs of task to the cache, after the task has completed -func (tc *TaskCache) SaveOutputs(ctx context.Context, logger hclog.Logger, terminal cli.Ui, duration int) error { - if tc.cachingDisabled || tc.rc.writesDisabled { - return nil - } - - logger.Debug("caching output", "outputs", tc.repoRelativeGlobs) - - filesToBeCached, err := globby.GlobAll(tc.rc.repoRoot.ToStringDuringMigration(), tc.repoRelativeGlobs.Inclusions, tc.repoRelativeGlobs.Exclusions) - if err != nil { - return err - } - - relativePaths := make([]turbopath.AnchoredSystemPath, len(filesToBeCached)) - - for index, value := range filesToBeCached { - relativePath, err := tc.rc.repoRoot.RelativePathString(value) - if err != nil { - logger.Error(fmt.Sprintf("error: %v", err)) - terminal.Error(fmt.Sprintf("%s%s", ui.ERROR_PREFIX, color.RedString(" %v", fmt.Errorf("File path cannot be made relative: %w", err)))) - continue - } - relativePaths[index] = fs.UnsafeToAnchoredSystemPath(relativePath) - } - - if err = tc.rc.cache.Put(tc.rc.repoRoot, tc.hash, duration, relativePaths); err != nil { - return err - } - err = tc.rc.outputWatcher.NotifyOutputsWritten(ctx, tc.hash, tc.repoRelativeGlobs, duration) - if err != nil { - // Don't fail the cache write because we also failed to record it, we will just do - // extra I/O in the future restoring files that haven't changed from cache - logger.Warn(fmt.Sprintf("Failed to mark outputs as cached for %v: %v", tc.pt.TaskID, err)) - terminal.Warn(ui.Dim(fmt.Sprintf("Failed to mark outputs as cached for %v: %v", tc.pt.TaskID, err))) - } - - tc.ExpandedOutputs = relativePaths - - return nil -} - -// TaskCache returns a TaskCache instance, providing an interface to the underlying cache specific -// to this run and the given PackageTask -func (rc *RunCache) TaskCache(pt *nodes.PackageTask, packageTaskHash string) TaskCache { - logFileName := rc.repoRoot.UntypedJoin(pt.RepoRelativeSystemLogFile()) - hashableOutputs := pt.HashableOutputs() - repoRelativeGlobs := hash.TaskOutputs{ - Inclusions: make([]string, len(hashableOutputs.Inclusions)), - Exclusions: make([]string, len(hashableOutputs.Exclusions)), - } - - for index, output := range hashableOutputs.Inclusions { - repoRelativeGlobs.Inclusions[index] = filepath.Join(pt.Pkg.Dir.ToStringDuringMigration(), output) - } - for index, output := range hashableOutputs.Exclusions { - repoRelativeGlobs.Exclusions[index] = filepath.Join(pt.Pkg.Dir.ToStringDuringMigration(), output) - } - - taskOutputMode := pt.TaskDefinition.OutputMode - if rc.taskOutputModeOverride != nil { - taskOutputMode = *rc.taskOutputModeOverride - } - - return TaskCache{ - ExpandedOutputs: []turbopath.AnchoredSystemPath{}, - rc: rc, - repoRelativeGlobs: repoRelativeGlobs, - hash: packageTaskHash, - pt: pt, - taskOutputMode: taskOutputMode, - cachingDisabled: !pt.TaskDefinition.Cache, - LogFileName: logFileName, - } -} - -// defaultLogReplayer will try to replay logs back to the given Ui instance -func defaultLogReplayer(logger hclog.Logger, output *cli.PrefixedUi, logFileName turbopath.AbsoluteSystemPath) { - logger.Debug("start replaying logs") - f, err := logFileName.Open() - if err != nil { - output.Warn(fmt.Sprintf("error reading logs: %v", err)) - logger.Error(fmt.Sprintf("error reading logs: %v", err.Error())) - } - defer func() { _ = f.Close() }() - scan := bufio.NewScanner(f) - for scan.Scan() { - str := string(scan.Bytes()) - // cli.PrefixedUi won't prefix empty strings (it'll just print them as empty strings). - // So if we have a blank string, we'll just output the string here, instead of passing - // it onto the PrefixedUi. - if str == "" { - // Just output the prefix if the current line is a blank string - // Note: output.OutputPrefix is also a colored prefix already - output.Ui.Output(output.OutputPrefix) - } else { - // Writing to Stdout - output.Output(str) - } - - } - logger.Debug("finish replaying logs") -} diff --git a/cli/internal/runsummary/execution_summary.go b/cli/internal/runsummary/execution_summary.go deleted file mode 100644 index d60b6158c83b0..0000000000000 --- a/cli/internal/runsummary/execution_summary.go +++ /dev/null @@ -1,291 +0,0 @@ -package runsummary - -import ( - "encoding/json" - "fmt" - "os" - "sync" - "time" - - "github.com/vercel/turbo/cli/internal/chrometracing" - "github.com/vercel/turbo/cli/internal/fs" - "github.com/vercel/turbo/cli/internal/turbopath" - - "github.com/mitchellh/cli" -) - -// executionEvent represents a single event in the build process, i.e. a target starting or finishing -// building, or reaching some milestone within those steps. -type executionEvent struct { - // Timestamp of this event - Time time.Time - // Duration of this event - Duration time.Duration - // Target which has just changed - Label string - // Its current status - Status executionEventName - // Error, only populated for failure statuses - Err string - - exitCode *int -} - -// executionEventName represents the status of a target when we log a build result. -type executionEventName int - -// The collection of expected build result statuses. -const ( - targetInitialized executionEventName = iota - TargetBuilding - TargetBuildStopped - TargetExecuted - TargetBuilt - TargetCached - TargetBuildFailed -) - -func (en executionEventName) toString() string { - switch en { - case targetInitialized: - return "initialized" - case TargetBuilding: - return "building" - case TargetBuildStopped: - return "buildStopped" - case TargetExecuted: - return "executed" - case TargetBuilt: - return "built" - case TargetCached: - return "cached" - case TargetBuildFailed: - return "buildFailed" - } - - return "" -} - -// TaskExecutionSummary contains data about the state of a single task in a turbo run. -// Some fields are updated over time as the task prepares to execute and finishes execution. -type TaskExecutionSummary struct { - startAt time.Time // set once - status executionEventName // current status, updated during execution - err string // only populated for failure statuses - Duration time.Duration // updated during the task execution - exitCode *int // pointer so we can distinguish between 0 and unknown. -} - -func (ts *TaskExecutionSummary) endTime() time.Time { - return ts.startAt.Add(ts.Duration) -} - -// MarshalJSON munges the TaskExecutionSummary into a format we want -// We'll use an anonmyous, private struct for this, so it's not confusingly duplicated -func (ts *TaskExecutionSummary) MarshalJSON() ([]byte, error) { - serializable := struct { - Start int64 `json:"startTime"` - End int64 `json:"endTime"` - Err string `json:"error,omitempty"` - ExitCode *int `json:"exitCode"` - }{ - Start: ts.startAt.UnixMilli(), - End: ts.endTime().UnixMilli(), - Err: ts.err, - ExitCode: ts.exitCode, - } - - return json.Marshal(&serializable) -} - -// ExitCode access exit code nil means no exit code was received -func (ts *TaskExecutionSummary) ExitCode() *int { - var exitCode int - if ts.exitCode == nil { - return nil - } - exitCode = *ts.exitCode - return &exitCode -} - -// executionSummary is the state of the entire `turbo run`. Individual task state in `Tasks` field -type executionSummary struct { - // mu guards reads/writes to the `state` field - mu sync.Mutex - tasks map[string]*TaskExecutionSummary // key is a taskID - profileFilename string - - // These get serialized to JSON - command string // a synthesized turbo command to produce this invocation - repoPath turbopath.RelativeSystemPath // the (possibly empty) path from the turborepo root to where the command was run - success int // number of tasks that exited successfully (does not include cache hits) - failure int // number of tasks that exited with failure - cached int // number of tasks that had a cache hit - attempted int // number of tasks that started - startedAt time.Time - endedAt time.Time - exitCode int -} - -func (es *executionSummary) Duration() time.Duration { - ended := es.endedAt - if ended.IsZero() { - ended = time.Now() - } - - return ended.Sub(es.startedAt) -} - -// MarshalJSON munges the executionSummary into a format we want -// We'll use an anonmyous, private struct for this, so it's not confusingly duplicated. -func (es *executionSummary) MarshalJSON() ([]byte, error) { - serializable := struct { - Command string `json:"command"` - RepoPath string `json:"repoPath"` - Success int `json:"success"` - Failure int `json:"failed"` - Cached int `json:"cached"` - Attempted int `json:"attempted"` - StartTime int64 `json:"startTime"` - EndTime int64 `json:"endTime"` - ExitCode int `json:"exitCode"` - }{ - Command: es.command, - RepoPath: es.repoPath.ToString(), - StartTime: es.startedAt.UnixMilli(), - EndTime: es.endedAt.UnixMilli(), - Success: es.success, - Failure: es.failure, - Cached: es.cached, - Attempted: es.attempted, - ExitCode: es.exitCode, - } - - return json.Marshal(&serializable) -} - -// newExecutionSummary creates a executionSummary instance to track events in a `turbo run`.` -func newExecutionSummary(command string, repoPath turbopath.RelativeSystemPath, start time.Time, tracingProfile string) *executionSummary { - if tracingProfile != "" { - chrometracing.EnableTracing() - } - - return &executionSummary{ - command: command, - repoPath: repoPath, - success: 0, - failure: 0, - cached: 0, - attempted: 0, - tasks: make(map[string]*TaskExecutionSummary), - startedAt: start, - profileFilename: tracingProfile, - } -} - -// Run starts the Execution of a single task. It returns a function that can -// be used to update the state of a given taskID with the executionEventName enum -func (es *executionSummary) run(taskID string) (func(outcome executionEventName, err error, exitCode *int), *TaskExecutionSummary) { - start := time.Now() - taskExecutionSummary := es.add(&executionEvent{ - Time: start, - Label: taskID, - Status: targetInitialized, - }) - - tracer := chrometracing.Event(taskID) - - // This function can be called with an enum and an optional error to update - // the state of a given taskID. - tracerFn := func(outcome executionEventName, err error, exitCode *int) { - defer tracer.Done() - now := time.Now() - result := &executionEvent{ - Time: now, - Duration: now.Sub(start), - Label: taskID, - Status: outcome, - // We'll assign this here regardless of whether it is nil, but we'll check for nil - // when we assign it to the taskExecutionSummary. - exitCode: exitCode, - } - - if err != nil { - result.Err = err.Error() - } - - // Ignore the return value here - es.add(result) - } - - return tracerFn, taskExecutionSummary -} - -func (es *executionSummary) add(event *executionEvent) *TaskExecutionSummary { - es.mu.Lock() - defer es.mu.Unlock() - - var taskExecSummary *TaskExecutionSummary - if ts, ok := es.tasks[event.Label]; ok { - // If we already know about this task, we'll update it with the new event - taskExecSummary = ts - } else { - // If we don't know about it yet, init and add it into the parent struct - // (event.Status should always be `targetBuilding` here.) - taskExecSummary = &TaskExecutionSummary{startAt: event.Time} - es.tasks[event.Label] = taskExecSummary - } - - // Update the Status, Duration, and Err fields - taskExecSummary.status = event.Status - taskExecSummary.err = event.Err - taskExecSummary.Duration = event.Duration - - if event.exitCode != nil { - taskExecSummary.exitCode = event.exitCode - } - - switch { - case event.Status == TargetBuilding: - es.attempted++ - case event.Status == TargetBuildFailed: - es.failure++ - case event.Status == TargetCached: - es.cached++ - case event.Status == TargetBuilt: - es.success++ - } - - return es.tasks[event.Label] -} - -// writeChromeTracing writes to a profile name if the `--profile` flag was passed to turbo run -func writeChrometracing(filename string, terminal cli.Ui) error { - outputPath := chrometracing.Path() - if outputPath == "" { - // tracing wasn't enabled - return nil - } - - name := fmt.Sprintf("turbo-%s.trace", time.Now().Format(time.RFC3339)) - if filename != "" { - name = filename - } - if err := chrometracing.Close(); err != nil { - terminal.Warn(fmt.Sprintf("Failed to flush tracing data: %v", err)) - } - cwdRaw, err := os.Getwd() - if err != nil { - return err - } - root, err := fs.GetCwd(cwdRaw) - if err != nil { - return err - } - // chrometracing.Path() is absolute by default, but can still be relative if overriden via $CHROMETRACING_DIR - // so we have to account for that before converting to turbopath.AbsoluteSystemPath - if err := fs.CopyFile(&fs.LstatCachedFile{Path: fs.ResolveUnknownPath(root, outputPath)}, name); err != nil { - return err - } - return nil -} diff --git a/cli/internal/runsummary/format_execution_summary.go b/cli/internal/runsummary/format_execution_summary.go deleted file mode 100644 index f3867ee9819a7..0000000000000 --- a/cli/internal/runsummary/format_execution_summary.go +++ /dev/null @@ -1,92 +0,0 @@ -package runsummary - -import ( - "fmt" - "os" - "sort" - "strings" - "time" - - "github.com/fatih/color" - "github.com/mitchellh/cli" - internalUI "github.com/vercel/turbo/cli/internal/ui" - "github.com/vercel/turbo/cli/internal/util" -) - -func (rsm *Meta) printExecutionSummary(ui cli.Ui) { - maybeFullTurbo := "" - summary := rsm.RunSummary - - attempted := summary.ExecutionSummary.attempted - successful := summary.ExecutionSummary.cached + summary.ExecutionSummary.success - failed := rsm.RunSummary.getFailedTasks() // Note: ExecutionSummary.failure exists, but we need the task names - cached := summary.ExecutionSummary.cached - duration := summary.ExecutionSummary.Duration().Truncate(time.Millisecond) - - if cached == attempted && attempted > 0 { - terminalProgram := os.Getenv("TERM_PROGRAM") - // On the macOS Terminal, the rainbow colors show up as a magenta background - // with a gray background on a single letter. Instead, we print in bold magenta - if terminalProgram == "Apple_Terminal" { - fallbackTurboColor := color.New(color.FgHiMagenta, color.Bold).SprintFunc() - maybeFullTurbo = fallbackTurboColor(">>> FULL TURBO") - } else { - maybeFullTurbo = internalUI.Rainbow(">>> FULL TURBO") - } - } - - lineData := []summaryLine{ - {header: "Tasks", trailer: util.Sprintf("${BOLD_GREEN}%v successful${RESET}${GRAY}, %v total", successful, attempted)}, - {header: "Cached", trailer: util.Sprintf("%v cached${RESET}${GRAY}, %v total", cached, attempted)}, - {header: "Time", trailer: util.Sprintf("%v${RESET} %v", duration, maybeFullTurbo)}, - } - - if rsm.getPath().FileExists() { - l := summaryLine{header: "Summary", trailer: util.Sprintf("%s", rsm.getPath())} - lineData = append(lineData, l) - } - - if len(failed) > 0 { - formatted := []string{} - for _, t := range failed { - formatted = append(formatted, util.Sprintf("${BOLD_RED}%s${RESET}", t.TaskID)) - } - sort.Strings(formatted) // To make the order deterministic - l := summaryLine{header: "Failed", trailer: strings.Join(formatted, ", ")} - lineData = append(lineData, l) - } - - // Some info we need for left padding - maxlength := 0 - for _, sl := range lineData { - if len(sl.header) > maxlength { - maxlength = len(sl.header) - } - } - - lines := []string{} - for _, sl := range lineData { - paddedHeader := fmt.Sprintf("%*s", maxlength, sl.header) - line := util.Sprintf("${BOLD}%s: %s${RESET}", paddedHeader, sl.trailer) - lines = append(lines, line) - } - - // Print the lines to terminal - if attempted == 0 { - ui.Output("") // Clear the line - ui.Warn("No tasks were executed as part of this run.") - } - - ui.Output("") // Clear the line - - for _, line := range lines { - ui.Output(line) - } - - ui.Output("") -} - -type summaryLine struct { - header string - trailer string -} diff --git a/cli/internal/runsummary/format_json.go b/cli/internal/runsummary/format_json.go deleted file mode 100644 index fcbd578ab422e..0000000000000 --- a/cli/internal/runsummary/format_json.go +++ /dev/null @@ -1,80 +0,0 @@ -package runsummary - -import ( - "bytes" - "encoding/json" - "sort" - - "github.com/pkg/errors" - "github.com/segmentio/ksuid" - "github.com/vercel/turbo/cli/internal/util" -) - -// FormatJSON returns a json string representing a RunSummary -func (rsm *Meta) FormatJSON() ([]byte, error) { - rsm.normalize() // normalize data - - var err error - var buffer bytes.Buffer - - encoder := json.NewEncoder(&buffer) - encoder.SetEscapeHTML(false) - encoder.SetIndent("", " ") - - if rsm.singlePackage { - err = encoder.Encode(nonMonorepoRunSummary(*rsm.RunSummary)) - } else { - err = encoder.Encode(rsm.RunSummary) - } - - if err != nil { - return nil, errors.Wrap(err, "failed to render JSON") - } - - return buffer.Bytes(), nil -} - -func (rsm *Meta) normalize() { - // Remove execution summary for dry runs - if rsm.runType == runTypeDryJSON { - rsm.RunSummary.ExecutionSummary = nil - } - - // For single packages, we don't need the Packages - // and each task summary needs some cleaning. - if rsm.singlePackage { - rsm.RunSummary.Packages = []string{} - - for _, task := range rsm.RunSummary.Tasks { - task.cleanForSinglePackage() - } - } - - sort.Sort(byTaskID(rsm.RunSummary.Tasks)) -} - -type byTaskID []*TaskSummary - -func (a byTaskID) Len() int { return len(a) } -func (a byTaskID) Swap(i, j int) { a[i], a[j] = a[j], a[i] } -func (a byTaskID) Less(i, j int) bool { return a[i].TaskID < a[j].TaskID } - -// nonMonorepoRunSummary is an exact copy of RunSummary, but the JSON tags are structured -// for rendering a single-package run of turbo. Notably, we want to always omit packages -// since there is no concept of packages in a single-workspace repo. -// This struct exists solely for the purpose of serializing to JSON and should not be -// used anywhere else. -type nonMonorepoRunSummary struct { - ID ksuid.KSUID `json:"id"` - Version string `json:"version"` - TurboVersion string `json:"turboVersion"` - Monorepo bool `json:"monorepo"` - GlobalHashSummary *GlobalHashSummary `json:"globalCacheInputs"` - Packages []string `json:"-"` - EnvMode util.EnvMode `json:"envMode"` - FrameworkInference bool `json:"frameworkInference"` - ExecutionSummary *executionSummary `json:"execution,omitempty"` - Tasks []*TaskSummary `json:"tasks"` - User string `json:"user"` - SCM *scmState `json:"scm"` -} diff --git a/cli/internal/runsummary/format_text.go b/cli/internal/runsummary/format_text.go deleted file mode 100644 index 7fcb91366f5cb..0000000000000 --- a/cli/internal/runsummary/format_text.go +++ /dev/null @@ -1,110 +0,0 @@ -package runsummary - -import ( - "encoding/json" - "fmt" - "os" - "strconv" - "strings" - "text/tabwriter" - - "github.com/mitchellh/cli" - "github.com/vercel/turbo/cli/internal/util" - "github.com/vercel/turbo/cli/internal/workspace" -) - -// FormatAndPrintText prints a Run Summary to the Terminal UI -func (rsm Meta) FormatAndPrintText(workspaceInfos workspace.Catalog, ui cli.Ui) error { - summary := rsm.RunSummary - - rsm.normalize() // normalize data - - if !rsm.singlePackage { - ui.Output("") - ui.Info(util.Sprintf("${CYAN}${BOLD}Packages in Scope${RESET}")) - p := tabwriter.NewWriter(os.Stdout, 0, 0, 1, ' ', 0) - fmt.Fprintln(p, "Name\tPath\t") - for _, pkg := range summary.Packages { - fmt.Fprintf(p, "%s\t%s\t\n", pkg, workspaceInfos.PackageJSONs[pkg].Dir) - } - if err := p.Flush(); err != nil { - return err - } - } - - fileCount := 0 - for range summary.GlobalHashSummary.GlobalFileHashMap { - fileCount = fileCount + 1 - } - w1 := tabwriter.NewWriter(os.Stdout, 0, 0, 1, ' ', 0) - ui.Output("") - ui.Info(util.Sprintf("${CYAN}${BOLD}Global Hash Inputs${RESET}")) - fmt.Fprintln(w1, util.Sprintf(" ${GREY}Global Files\t=\t%d${RESET}", fileCount)) - fmt.Fprintln(w1, util.Sprintf(" ${GREY}External Dependencies Hash\t=\t%s${RESET}", summary.GlobalHashSummary.RootExternalDepsHash)) - fmt.Fprintln(w1, util.Sprintf(" ${GREY}Global Cache Key\t=\t%s${RESET}", summary.GlobalHashSummary.GlobalCacheKey)) - - fmt.Fprintln(w1, util.Sprintf(" ${GREY}Global .env Files Considered\t=\t%d${RESET}", len(summary.GlobalHashSummary.DotEnv))) - - fmt.Fprintln(w1, util.Sprintf(" ${GREY}Global Env Vars\t=\t%s${RESET}", strings.Join(summary.GlobalHashSummary.EnvVars.Specified.Env, ", "))) - fmt.Fprintln(w1, util.Sprintf(" ${GREY}Global Env Vars Values\t=\t%s${RESET}", strings.Join(summary.GlobalHashSummary.EnvVars.Configured, ", "))) - fmt.Fprintln(w1, util.Sprintf(" ${GREY}Inferred Global Env Vars Values\t=\t%s${RESET}", strings.Join(summary.GlobalHashSummary.EnvVars.Inferred, ", "))) - - fmt.Fprintln(w1, util.Sprintf(" ${GREY}Global Passed Through Env Vars\t=\t%s${RESET}", strings.Join(summary.GlobalHashSummary.EnvVars.Specified.PassThroughEnv, ", "))) - fmt.Fprintln(w1, util.Sprintf(" ${GREY}Global Passed Through Env Vars Values\t=\t%s${RESET}", strings.Join(summary.GlobalHashSummary.EnvVars.PassThrough, ", "))) - if err := w1.Flush(); err != nil { - return err - } - - ui.Output("") - ui.Info(util.Sprintf("${CYAN}${BOLD}Tasks to Run${RESET}")) - - for _, task := range summary.Tasks { - taskName := task.TaskID - - if rsm.singlePackage { - taskName = task.Task - } - - ui.Info(util.Sprintf("${BOLD}%s${RESET}", taskName)) - w := tabwriter.NewWriter(os.Stdout, 0, 0, 1, ' ', 0) - fmt.Fprintln(w, util.Sprintf(" ${GREY}Task\t=\t%s\t${RESET}", task.Task)) - - if !rsm.singlePackage { - fmt.Fprintln(w, util.Sprintf(" ${GREY}Package\t=\t%s\t${RESET}", task.Package)) - } - fmt.Fprintln(w, util.Sprintf(" ${GREY}Hash\t=\t%s\t${RESET}", task.Hash)) - fmt.Fprintln(w, util.Sprintf(" ${GREY}Cached (Local)\t=\t%s\t${RESET}", strconv.FormatBool(task.CacheSummary.Local))) - fmt.Fprintln(w, util.Sprintf(" ${GREY}Cached (Remote)\t=\t%s\t${RESET}", strconv.FormatBool(task.CacheSummary.Remote))) - - if !rsm.singlePackage { - fmt.Fprintln(w, util.Sprintf(" ${GREY}Directory\t=\t%s\t${RESET}", task.Dir)) - } - - fmt.Fprintln(w, util.Sprintf(" ${GREY}Command\t=\t%s\t${RESET}", task.Command)) - fmt.Fprintln(w, util.Sprintf(" ${GREY}Outputs\t=\t%s\t${RESET}", strings.Join(task.Outputs, ", "))) - fmt.Fprintln(w, util.Sprintf(" ${GREY}Log File\t=\t%s\t${RESET}", task.LogFileRelativePath)) - fmt.Fprintln(w, util.Sprintf(" ${GREY}Dependencies\t=\t%s\t${RESET}", strings.Join(task.Dependencies, ", "))) - fmt.Fprintln(w, util.Sprintf(" ${GREY}Dependents\t=\t%s\t${RESET}", strings.Join(task.Dependents, ", "))) - fmt.Fprintln(w, util.Sprintf(" ${GREY}Inputs Files Considered\t=\t%d\t${RESET}", len(task.ExpandedInputs))) - fmt.Fprintln(w, util.Sprintf(" ${GREY}.env Files Considered\t=\t%d\t${RESET}", len(task.DotEnv))) - - fmt.Fprintln(w, util.Sprintf(" ${GREY}Env Vars\t=\t%s\t${RESET}", strings.Join(task.EnvVars.Specified.Env, ", "))) - fmt.Fprintln(w, util.Sprintf(" ${GREY}Env Vars Values\t=\t%s\t${RESET}", strings.Join(task.EnvVars.Configured, ", "))) - fmt.Fprintln(w, util.Sprintf(" ${GREY}Inferred Env Vars Values\t=\t%s\t${RESET}", strings.Join(task.EnvVars.Inferred, ", "))) - - fmt.Fprintln(w, util.Sprintf(" ${GREY}Passed Through Env Vars\t=\t%s\t${RESET}", strings.Join(task.EnvVars.Specified.PassThroughEnv, ", "))) - fmt.Fprintln(w, util.Sprintf(" ${GREY}Passed Through Env Vars Values\t=\t%s\t${RESET}", strings.Join(task.EnvVars.PassThrough, ", "))) - - bytes, err := json.Marshal(task.ResolvedTaskDefinition) - // If there's an error, we can silently ignore it, we don't need to block the entire print. - if err == nil { - fmt.Fprintln(w, util.Sprintf(" ${GREY}Resolved Task Definition\t=\t%s\t${RESET}", string(bytes))) - } - - fmt.Fprintln(w, util.Sprintf(" ${GREY}Framework\t=\t%s\t${RESET}", task.Framework)) - if err := w.Flush(); err != nil { - return err - } - } - return nil -} diff --git a/cli/internal/runsummary/globalhash_summary.go b/cli/internal/runsummary/globalhash_summary.go deleted file mode 100644 index 8fcf35f58b876..0000000000000 --- a/cli/internal/runsummary/globalhash_summary.go +++ /dev/null @@ -1,60 +0,0 @@ -package runsummary - -import ( - "github.com/vercel/turbo/cli/internal/env" - "github.com/vercel/turbo/cli/internal/turbopath" -) - -// GlobalEnvConfiguration contains the environment variable inputs for the global hash -type GlobalEnvConfiguration struct { - Env []string `json:"env"` - PassThroughEnv []string `json:"passThroughEnv"` -} - -// GlobalEnvVarSummary contains the environment variables that impacted the global hash -type GlobalEnvVarSummary struct { - Specified GlobalEnvConfiguration `json:"specified"` - - Configured env.EnvironmentVariablePairs `json:"configured"` - Inferred env.EnvironmentVariablePairs `json:"inferred"` - PassThrough env.EnvironmentVariablePairs `json:"passthrough"` -} - -// GlobalHashSummary contains the pieces of data that impacted the global hash (then then impacted the task hash) -type GlobalHashSummary struct { - GlobalCacheKey string `json:"rootKey"` - GlobalFileHashMap map[turbopath.AnchoredUnixPath]string `json:"files"` - RootExternalDepsHash string `json:"hashOfExternalDependencies"` - DotEnv turbopath.AnchoredUnixPathArray `json:"globalDotEnv"` - EnvVars GlobalEnvVarSummary `json:"environmentVariables"` -} - -// NewGlobalHashSummary creates a GlobalHashSummary struct from a set of fields. -func NewGlobalHashSummary( - globalCacheKey string, - fileHashMap map[turbopath.AnchoredUnixPath]string, - rootExternalDepsHash string, - globalEnv []string, - globalPassThroughEnv []string, - globalDotEnv turbopath.AnchoredUnixPathArray, - resolvedEnvVars env.DetailedMap, - resolvedPassThroughEnvVars env.EnvironmentVariableMap, -) *GlobalHashSummary { - return &GlobalHashSummary{ - GlobalCacheKey: globalCacheKey, - GlobalFileHashMap: fileHashMap, - RootExternalDepsHash: rootExternalDepsHash, - - EnvVars: GlobalEnvVarSummary{ - Specified: GlobalEnvConfiguration{ - Env: globalEnv, - PassThroughEnv: globalPassThroughEnv, - }, - Configured: resolvedEnvVars.BySource.Explicit.ToSecretHashable(), - Inferred: resolvedEnvVars.BySource.Matching.ToSecretHashable(), - PassThrough: resolvedPassThroughEnvVars.ToSecretHashable(), - }, - - DotEnv: globalDotEnv, - } -} diff --git a/cli/internal/runsummary/run_summary.go b/cli/internal/runsummary/run_summary.go deleted file mode 100644 index 8e837a304e62a..0000000000000 --- a/cli/internal/runsummary/run_summary.go +++ /dev/null @@ -1,264 +0,0 @@ -// Package runsummary implements structs that report on a `turbo run` and `turbo run --dry` -package runsummary - -import ( - "context" - "fmt" - "path/filepath" - "time" - - "github.com/mitchellh/cli" - "github.com/segmentio/ksuid" - "github.com/vercel/turbo/cli/internal/ci" - "github.com/vercel/turbo/cli/internal/client" - "github.com/vercel/turbo/cli/internal/env" - "github.com/vercel/turbo/cli/internal/spinner" - "github.com/vercel/turbo/cli/internal/turbopath" - "github.com/vercel/turbo/cli/internal/util" - "github.com/vercel/turbo/cli/internal/workspace" -) - -// MissingTaskLabel is printed when a package is missing a definition for a task that is supposed to run -// E.g. if `turbo run build --dry` is run, and package-a doesn't define a `build` script in package.json, -// the RunSummary will print this, instead of the script (e.g. `next build`). -const MissingTaskLabel = "" - -// NOTE: When changing this, please ensure that the server side is updated to handle the new version on vercel.com -// this is required to ensure safe handling of env vars (unknown run summary versions will be ignored on the server) -const runSummarySchemaVersion = "1" - -type runType int - -const ( - runTypeReal runType = iota - runTypeDryText - runTypeDryJSON -) - -// Meta is a wrapper around the serializable RunSummary, with some extra information -// about the Run and references to other things that we need. -type Meta struct { - RunSummary *RunSummary - repoRoot turbopath.AbsoluteSystemPath // used to write run summary - repoPath turbopath.RelativeSystemPath - singlePackage bool - shouldSave bool - spacesClient *spacesClient - runType runType - synthesizedCommand string -} - -// RunSummary contains a summary of what happens in the `turbo run` command and why. -type RunSummary struct { - ID ksuid.KSUID `json:"id"` - Version string `json:"version"` - TurboVersion string `json:"turboVersion"` - Monorepo bool `json:"monorepo"` - GlobalHashSummary *GlobalHashSummary `json:"globalCacheInputs"` - Packages []string `json:"packages"` - EnvMode util.EnvMode `json:"envMode"` - FrameworkInference bool `json:"frameworkInference"` - ExecutionSummary *executionSummary `json:"execution,omitempty"` - Tasks []*TaskSummary `json:"tasks"` - User string `json:"user"` - SCM *scmState `json:"scm"` -} - -// NewRunSummary returns a RunSummary instance -func NewRunSummary( - startAt time.Time, - repoRoot turbopath.AbsoluteSystemPath, - repoPath turbopath.RelativeSystemPath, - turboVersion string, - apiClient *client.APIClient, - spacesClient *client.APIClient, - runOpts util.RunOpts, - packages []string, - globalEnvMode util.EnvMode, - envAtExecutionStart env.EnvironmentVariableMap, - globalHashSummary *GlobalHashSummary, - synthesizedCommand string, -) Meta { - singlePackage := runOpts.SinglePackage - profile := runOpts.Profile - shouldSave := runOpts.Summarize - spaceID := runOpts.ExperimentalSpaceID - - runType := runTypeReal - if runOpts.DryRun { - runType = runTypeDryText - if runOpts.DryRunJSON { - runType = runTypeDryJSON - } - } - - executionSummary := newExecutionSummary(synthesizedCommand, repoPath, startAt, profile) - - rsm := Meta{ - RunSummary: &RunSummary{ - ID: ksuid.New(), - Version: runSummarySchemaVersion, - ExecutionSummary: executionSummary, - TurboVersion: turboVersion, - Packages: packages, - EnvMode: globalEnvMode, - FrameworkInference: runOpts.FrameworkInference, - Tasks: []*TaskSummary{}, - GlobalHashSummary: globalHashSummary, - SCM: getSCMState(envAtExecutionStart, repoRoot), - User: getUser(envAtExecutionStart, repoRoot), - Monorepo: !singlePackage, - }, - runType: runType, - repoRoot: repoRoot, - singlePackage: singlePackage, - shouldSave: shouldSave, - synthesizedCommand: synthesizedCommand, - } - - rsm.spacesClient = newSpacesClient(spaceID, spacesClient) - if rsm.spacesClient.enabled { - go rsm.spacesClient.start() - payload := newSpacesRunCreatePayload(&rsm) - rsm.spacesClient.createRun(payload) - } - - return rsm -} - -// SpacesIsEnabled returns true if this run summary is going to send to a -// spaces backend -func (rsm *Meta) SpacesIsEnabled() bool { - return rsm.spacesClient.enabled -} - -// getPath returns a path to where the runSummary is written. -// The returned path will always be relative to the dir passsed in. -// We don't do a lot of validation, so `../../` paths are allowed. -func (rsm *Meta) getPath() turbopath.AbsoluteSystemPath { - filename := fmt.Sprintf("%s.json", rsm.RunSummary.ID) - return rsm.repoRoot.UntypedJoin(filepath.Join(".turbo", "runs"), filename) -} - -// Close wraps up the RunSummary at the end of a `turbo run`. -func (rsm *Meta) Close(ctx context.Context, exitCode int, workspaceInfos workspace.Catalog, ui cli.Ui) error { - if rsm.runType == runTypeDryJSON || rsm.runType == runTypeDryText { - return rsm.closeDryRun(workspaceInfos, ui) - } - - rsm.RunSummary.ExecutionSummary.exitCode = exitCode - rsm.RunSummary.ExecutionSummary.endedAt = time.Now() - - summary := rsm.RunSummary - if err := writeChrometracing(summary.ExecutionSummary.profileFilename, ui); err != nil { - ui.Error(fmt.Sprintf("Error writing tracing data: %v", err)) - } - - // TODO: printing summary to local, writing to disk, and sending to API - // are all the same thing, we should use a strategy similar to cache save/upload to - // do this in parallel. - - // Otherwise, attempt to save the summary - // Warn on the error, but we don't need to throw an error - if rsm.shouldSave { - if err := rsm.save(); err != nil { - ui.Warn(fmt.Sprintf("Error writing run summary: %v", err)) - } - } - - rsm.printExecutionSummary(ui) - if rsm.spacesClient.enabled { - rsm.sendToSpace(ctx, ui) - } else { - // Print any errors if the client is not enabled, since it could have - // been disabled at runtime due to an issue. - rsm.spacesClient.printErrors(ui) - } - - return nil -} - -func (rsm *Meta) sendToSpace(ctx context.Context, ui cli.Ui) { - rsm.spacesClient.finishRun(rsm) - func() { - _ = spinner.WaitFor(ctx, rsm.spacesClient.Close, ui, "...sending run summary...", 1000*time.Millisecond) - }() - - rsm.spacesClient.printErrors(ui) - - url := rsm.spacesClient.run.URL - if url != "" { - ui.Output(fmt.Sprintf("Run: %s", url)) - ui.Output("") - } -} - -// closeDryRun wraps up the Run Summary at the end of `turbo run --dry`. -// Ideally this should be inlined into Close(), but RunSummary doesn't currently -// have context about whether a run was real or dry. -func (rsm *Meta) closeDryRun(workspaceInfos workspace.Catalog, ui cli.Ui) error { - // Render the dry run as json - if rsm.runType == runTypeDryJSON { - rendered, err := rsm.FormatJSON() - if err != nil { - return err - } - - ui.Output(string(rendered)) - return nil - } - - return rsm.FormatAndPrintText(workspaceInfos, ui) -} - -// TrackTask makes it possible for the consumer to send information about the execution of a task. -func (summary *RunSummary) TrackTask(taskID string) (func(outcome executionEventName, err error, exitCode *int), *TaskExecutionSummary) { - return summary.ExecutionSummary.run(taskID) -} - -func (summary *RunSummary) getFailedTasks() []*TaskSummary { - failed := []*TaskSummary{} - - for _, t := range summary.Tasks { - if *t.Execution.exitCode != 0 { - failed = append(failed, t) - } - } - return failed -} - -// Save saves the run summary to a file -func (rsm *Meta) save() error { - json, err := rsm.FormatJSON() - if err != nil { - return err - } - - // summaryPath will always be relative to the dir passsed in. - // We don't do a lot of validation, so `../../` paths are allowed - summaryPath := rsm.getPath() - - if err := summaryPath.EnsureDir(); err != nil { - return err - } - - return summaryPath.WriteFile(json, 0644) -} - -// CloseTask posts the result of the Task to Spaces -func (rsm *Meta) CloseTask(task *TaskSummary, logs []byte) { - if rsm.spacesClient.enabled { - rsm.spacesClient.postTask(task, logs) - } -} - -func getUser(envVars env.EnvironmentVariableMap, dir turbopath.AbsoluteSystemPath) string { - var username string - - if ci.IsCi() { - vendor := ci.Info() - username = envVars[vendor.UsernameEnvVar] - } - - return username -} diff --git a/cli/internal/runsummary/scm_summary.go b/cli/internal/runsummary/scm_summary.go deleted file mode 100644 index e38b23d79a7f2..0000000000000 --- a/cli/internal/runsummary/scm_summary.go +++ /dev/null @@ -1,40 +0,0 @@ -package runsummary - -import ( - "github.com/vercel/turbo/cli/internal/ci" - "github.com/vercel/turbo/cli/internal/env" - "github.com/vercel/turbo/cli/internal/scm" - "github.com/vercel/turbo/cli/internal/turbopath" -) - -type scmState struct { - Type string `json:"type"` - Sha string `json:"sha"` - Branch string `json:"branch"` -} - -// getSCMState returns the sha and branch when in a git repo -// Otherwise it should return empty strings right now. -// We my add handling of other scms and non-git tracking in the future. -func getSCMState(envVars env.EnvironmentVariableMap, dir turbopath.AbsoluteSystemPath) *scmState { - - state := &scmState{Type: "git"} - - // If we're in CI, try to get the values we need from environment variables - if ci.IsCi() { - vendor := ci.Info() - state.Sha = envVars[vendor.ShaEnvVar] - state.Branch = envVars[vendor.BranchEnvVar] - } - - // Otherwise fallback to using `git` - if state.Branch == "" { - state.Branch = scm.GetCurrentBranch(dir) - } - - if state.Sha == "" { - state.Sha = scm.GetCurrentSha(dir) - } - - return state -} diff --git a/cli/internal/runsummary/spaces.go b/cli/internal/runsummary/spaces.go deleted file mode 100644 index 3476161e0bf93..0000000000000 --- a/cli/internal/runsummary/spaces.go +++ /dev/null @@ -1,361 +0,0 @@ -package runsummary - -import ( - "context" - "encoding/json" - "fmt" - "sync" - "time" - - "github.com/mitchellh/cli" - "github.com/pkg/errors" - "github.com/vercel/turbo/cli/internal/ci" -) - -const runsEndpoint = "/v0/spaces/%s/runs" -const runsPatchEndpoint = "/v0/spaces/%s/runs/%s" -const tasksEndpoint = "/v0/spaces/%s/runs/%s/tasks" - -// spaceRequest contains all the information for a single request to Spaces -// This will be an enum in Rust with all the relevant information to construct the url. -// We'll pattern match and call the correct API client method. -type spaceRequest struct { - method string - url string - body interface{} - makeURL func(self *spaceRequest, r *spaceRun) error // Should set url on self - //onDone func(self *spaceRequest, response []byte, err error) // Handler for when request completes -} - -type spacesAPIClient interface { - JSONPost(ctx context.Context, url string, payload []byte) ([]byte, error) - JSONPatch(ctx context.Context, url string, payload []byte) ([]byte, error) - IsLinked() bool -} - -type spacesClient struct { - requests chan *spaceRequest - api spacesAPIClient - run *spaceRun - runCreated chan struct{} - runCreateError error - wg sync.WaitGroup - spaceID string - enabled bool - requestTimeout time.Duration - - errMu sync.Mutex - errors []error -} - -type spaceRun struct { - ID string - URL string -} - -func newSpacesClient(spaceID string, api spacesAPIClient) *spacesClient { - c := &spacesClient{ - api: api, - spaceID: spaceID, - enabled: false, // Start with disabled - requests: make(chan *spaceRequest), // TODO: give this a size based on tasks - runCreated: make(chan struct{}, 1), - run: &spaceRun{}, - requestTimeout: 10 * time.Second, - } - - if spaceID == "" { - return c - } - - if !c.api.IsLinked() { - c.errors = append(c.errors, fmt.Errorf("Error: experimentalSpaceId is enabled, but repo is not linked to API. Run `turbo link` or `turbo login` first")) - return c - } - - // Explicitly enable if all conditions are met - c.enabled = true - - return c -} - -// Start receiving and processing requests in 8 goroutines -// There is an additional marker (protected by a mutex) that indicates -// when the first request is done. All other requests are blocked on that one. -// This first request is the POST /run request. We need to block on it because -// the response contains the run ID from the server, which we need to construct the -// URLs of subsequent requests. -func (c *spacesClient) start() { - // Start an immediately invoked go routine that listens for requests coming in from a channel - pending := []*spaceRequest{} - - // Create a labeled statement so we can break out of the for loop more easily - - // Setup a for loop that goes infinitely until we break out of it -FirstRequest: - for { - // A select statement that can listen for messages from multiple channels - select { - // listen for new requests coming in - case req, isOpen := <-c.requests: - // If we read from the channel and its already closed, it means - // something went wrong and we are done with the run, but the first - // request either never happened or didn't write to the c.runCreated channel - // to signal that its done. In this case, we need to break out of the forever loop. - if !isOpen { - break FirstRequest - } - // Queue everything. When the first request is done, - // we'll get a message on the other channel and break out of this loop - pending = append(pending, req) - // Wait for c.runCreated channel to be closed and: - case <-c.runCreated: - // 1. flush pending requests - for _, req := range pending { - go c.dequeueRequest(req) - } - - // 2. break out of the forever loop. - break FirstRequest - } - } - - // and then continue listening for more requests as they come in until the channel is closed - for req := range c.requests { - go c.dequeueRequest(req) - } -} - -func makeRequest(ctx context.Context, api spacesAPIClient, req *spaceRequest, run *spaceRun) ([]byte, error) { - // The runID is required for POST task requests and PATCH run request URLS, - // so we have to construct these URLs lazily with a `makeURL` affordance. - // - // We are assuming that if makeURL is defined, this is NOT the first request. - // This is not a great assumption, and will fail if our endpoint URLs change later. - // - // Secondly, if makeURL _is_ defined, we call it, and if there are any errors, we exit early. - // We are doing this check before any of the other basic checks (e.g. the existence of a spaceID) - // because in the case the repo is not linked to a space, we don't want to print those errors - // for every request that fails. On the other hand, if that POST /run request fails, and N - // requests fail after that as a consequence, it is ok to print all of those errors. - // - // We're going to remove this in the Rust version because closures are kinda messy, especially closures that mutate. - // Instead we'll directly store the id in the struct and use that. - if req.makeURL != nil { - if err := req.makeURL(req, run); err != nil { - return nil, err - } - } - - payload, err := json.Marshal(req.body) - if err != nil { - return nil, err - } - - // Make the request - if req.method == "POST" { - return api.JSONPost(ctx, req.url, payload) - } else if req.method == "PATCH" { - return api.JSONPatch(ctx, req.url, payload) - } - panic(fmt.Sprintf("Unsupported method %v", req.method)) -} - -func (c *spacesClient) createRun(payload *spacesRunPayload) { - c.wg.Add(1) - // We don't need the goroutine here since tokio will schedule the threads for us. - // If needed we can do `spawn` - go func() { - defer c.wg.Done() - defer close(c.runCreated) - ctx, cancel := context.WithTimeout(context.Background(), c.requestTimeout) - defer cancel() - - // Because `makeUrl` is not defined, this is the first request. - req := &spaceRequest{ - method: "POST", - url: fmt.Sprintf(runsEndpoint, c.spaceID), - body: payload, - } - // In the Rust version we'll have a specific `create_spaces_run` method - resp, err := makeRequest(ctx, c.api, req, c.run) - // We don't need to store this error in the Rust version. Instead - // if this fails, we send a message via a oneshot channel to indicate - // that we don't need to send any more requests. - if err != nil { - c.runCreateError = err - return - } - if err := json.Unmarshal(resp, c.run); err != nil { - c.runCreateError = errors.Wrap(err, "failed to unmarshal create run response") - } - }() -} - -func (c *spacesClient) postTask(task *TaskSummary, logs []byte) { - c.queueRequest(&spaceRequest{ - method: "POST", - makeURL: func(self *spaceRequest, run *spaceRun) error { - if run.ID == "" { - return fmt.Errorf("No Run ID found to post task %s", task.TaskID) - } - self.url = fmt.Sprintf(tasksEndpoint, c.spaceID, run.ID) - return nil - }, - body: newSpacesTaskPayload(task, logs), - }) -} - -func (c *spacesClient) finishRun(rsm *Meta) { - c.queueRequest(&spaceRequest{ - method: "PATCH", - makeURL: func(self *spaceRequest, run *spaceRun) error { - if run.ID == "" { - return fmt.Errorf("No Run ID found to send PATCH request") - } - self.url = fmt.Sprintf(runsPatchEndpoint, c.spaceID, run.ID) - return nil - }, - body: newSpacesDonePayload(rsm.RunSummary), - }) -} - -// queueRequest adds the given request to the requests channel and increments the waitGroup counter -func (c *spacesClient) queueRequest(req *spaceRequest) { - c.wg.Add(1) - c.requests <- req -} - -// dequeueRequest makes the request in a go routine and decrements the waitGroup counter -func (c *spacesClient) dequeueRequest(req *spaceRequest) { - defer c.wg.Done() - // Only send requests if we successfully created the Run - if c.runCreateError != nil { - return - } - ctx, cancel := context.WithTimeout(context.Background(), c.requestTimeout) - defer cancel() - _, err := makeRequest(ctx, c.api, req, c.run) - if err != nil { - c.errMu.Lock() - defer c.errMu.Unlock() - c.errors = append(c.errors, err) - } -} - -func (c *spacesClient) printErrors(ui cli.Ui) { - // Print any errors - if len(c.errors) > 0 { - for _, err := range c.errors { - ui.Warn(fmt.Sprintf("%s", err)) - } - } -} - -// Close will wait for all requests to finish and then close the channel listening for them -func (c *spacesClient) Close() { - // wait for all requests to finish. - c.wg.Wait() - - // close out the channel, since there should be no more requests. - close(c.requests) -} - -type spacesClientSummary struct { - ID string `json:"id"` - Name string `json:"name"` - Version string `json:"version"` -} - -type spacesRunPayload struct { - StartTime int64 `json:"startTime,omitempty"` // when the run was started - EndTime int64 `json:"endTime,omitempty"` // when the run ended. we should never submit start and end at the same time. - Status string `json:"status,omitempty"` // Status is "running" or "completed" - Type string `json:"type,omitempty"` // hardcoded to "TURBO" - ExitCode *int `json:"exitCode,omitempty"` // exit code for the full run - Command string `json:"command,omitempty"` // the thing that kicked off the turbo run - RepositoryPath string `json:"repositoryPath,omitempty"` // where the command was invoked from - Context string `json:"context,omitempty"` // the host on which this Run was executed (e.g. Github Action, Vercel, etc) - Client spacesClientSummary `json:"client"` // Details about the turbo client - GitBranch string `json:"gitBranch"` - GitSha string `json:"gitSha"` - User string `json:"originationUser,omitempty"` -} - -// spacesCacheStatus is the same as TaskCacheSummary so we can convert -// spacesCacheStatus(cacheSummary), but change the json tags, to omit local and remote fields -type spacesCacheStatus struct { - // omitted fields, but here so we can convert from TaskCacheSummary easily - Local bool `json:"-"` - Remote bool `json:"-"` - Status string `json:"status"` // should always be there - Source string `json:"source,omitempty"` - TimeSaved int `json:"timeSaved"` -} - -type spacesTask struct { - Key string `json:"key,omitempty"` - Name string `json:"name,omitempty"` - Workspace string `json:"workspace,omitempty"` - Hash string `json:"hash,omitempty"` - StartTime int64 `json:"startTime,omitempty"` - EndTime int64 `json:"endTime,omitempty"` - Cache spacesCacheStatus `json:"cache,omitempty"` - ExitCode *int `json:"exitCode,omitempty"` - Dependencies []string `json:"dependencies,omitempty"` - Dependents []string `json:"dependents,omitempty"` - Logs string `json:"log"` -} - -func newSpacesRunCreatePayload(rsm *Meta) *spacesRunPayload { - startTime := rsm.RunSummary.ExecutionSummary.startedAt.UnixMilli() - context := "LOCAL" - if name := ci.Constant(); name != "" { - context = name - } - - return &spacesRunPayload{ - StartTime: startTime, - Status: "running", - Command: rsm.synthesizedCommand, - RepositoryPath: rsm.repoPath.ToString(), - Type: "TURBO", - Context: context, - GitBranch: rsm.RunSummary.SCM.Branch, - GitSha: rsm.RunSummary.SCM.Sha, - User: rsm.RunSummary.User, - Client: spacesClientSummary{ - ID: "turbo", - Name: "Turbo", - Version: rsm.RunSummary.TurboVersion, - }, - } -} - -func newSpacesDonePayload(runsummary *RunSummary) *spacesRunPayload { - endTime := runsummary.ExecutionSummary.endedAt.UnixMilli() - return &spacesRunPayload{ - Status: "completed", - EndTime: endTime, - ExitCode: &runsummary.ExecutionSummary.exitCode, - } -} - -func newSpacesTaskPayload(taskSummary *TaskSummary, logs []byte) *spacesTask { - startTime := taskSummary.Execution.startAt.UnixMilli() - endTime := taskSummary.Execution.endTime().UnixMilli() - - return &spacesTask{ - Key: taskSummary.TaskID, - Name: taskSummary.Task, - Workspace: taskSummary.Package, - Hash: taskSummary.Hash, - StartTime: startTime, - EndTime: endTime, - Cache: spacesCacheStatus(taskSummary.CacheSummary), // wrapped so we can remove fields - ExitCode: taskSummary.Execution.exitCode, - Dependencies: taskSummary.Dependencies, - Dependents: taskSummary.Dependents, - Logs: string(logs), - } -} diff --git a/cli/internal/runsummary/spaces_test.go b/cli/internal/runsummary/spaces_test.go deleted file mode 100644 index f8d5cf26c95cc..0000000000000 --- a/cli/internal/runsummary/spaces_test.go +++ /dev/null @@ -1,69 +0,0 @@ -package runsummary - -import ( - "context" - "errors" - "sync" - "testing" - "time" - - "github.com/stretchr/testify/assert" -) - -type failFirstClient struct { - mu sync.Mutex - sawFirst bool - additionalRequests int -} - -func (f *failFirstClient) IsLinked() bool { - return true -} - -func (f *failFirstClient) request() ([]byte, error) { - f.mu.Lock() - defer f.mu.Unlock() - if f.sawFirst { - f.additionalRequests++ - return []byte("some response"), nil - } - f.sawFirst = true - return nil, errors.New("failed request") -} - -func (f *failFirstClient) JSONPost(_ context.Context, _ string, _ []byte) ([]byte, error) { - return f.request() -} - -func (f *failFirstClient) JSONPatch(_ context.Context, _ string, _ []byte) ([]byte, error) { - return f.request() -} - -func TestFailToCreateRun(t *testing.T) { - api := &failFirstClient{} - - c := newSpacesClient("my-space-id", api) - go c.start() - payload := &spacesRunPayload{} - c.createRun(payload) - exitCode := 1 - ts := &TaskSummary{ - TaskID: "my-id", - Task: "task", - Package: "package", - Hash: "hash", - Execution: &TaskExecutionSummary{ - startAt: time.Now(), - Duration: 3 * time.Second, - exitCode: &exitCode, - }, - } - var logs []byte - c.postTask(ts, logs) - c.postTask(ts, logs) - c.postTask(ts, logs) - c.Close() - - assert.True(t, api.sawFirst) - assert.Equal(t, api.additionalRequests, 0) -} diff --git a/cli/internal/runsummary/task_summary.go b/cli/internal/runsummary/task_summary.go deleted file mode 100644 index 92680adaf7a2b..0000000000000 --- a/cli/internal/runsummary/task_summary.go +++ /dev/null @@ -1,116 +0,0 @@ -package runsummary - -import ( - "github.com/vercel/turbo/cli/internal/cache" - "github.com/vercel/turbo/cli/internal/fs" - "github.com/vercel/turbo/cli/internal/turbopath" - "github.com/vercel/turbo/cli/internal/util" -) - -// TaskCacheSummary is an extended version of cache.ItemStatus -// that includes TimeSaved and some better data. -type TaskCacheSummary struct { - Local bool `json:"local"` // Deprecated, but keeping around for --dry=json - Remote bool `json:"remote"` // Deprecated, but keeping around for --dry=json - Status string `json:"status"` // should always be there - Source string `json:"source,omitempty"` // can be empty on status:miss - TimeSaved int `json:"timeSaved"` // always include, but can be 0 -} - -// NewTaskCacheSummary decorates a cache.ItemStatus into a TaskCacheSummary -// Importantly, it adds the derived keys of `source` and `status` based on -// the local/remote booleans. It would be nice if these were just included -// from upstream, but that is a more invasive change. -func NewTaskCacheSummary(itemStatus cache.ItemStatus) TaskCacheSummary { - status := cache.CacheEventMiss - if itemStatus.Hit { - status = cache.CacheEventHit - } - var source string - if itemStatus.Hit { - source = itemStatus.Source - } - - cs := TaskCacheSummary{ - Status: status, - Source: source, - TimeSaved: itemStatus.TimeSaved, - } - - // Assign these deprecated fields Local and Remote based on the information available - // in the itemStatus. Note that these fields are problematic, because an ItemStatus isn't always - // the composite of both local and remote caches. That means that an ItemStatus might say it - // was a local cache hit, and we return remote: false here. That's misleading because it does - // not mean that there is no remote cache hit, it _could_ mean that we never checked the remote - // cache. These fields are being deprecated for this reason. - cs.Local = itemStatus.Hit && itemStatus.Source == cache.CacheSourceFS - cs.Remote = itemStatus.Hit && itemStatus.Source == cache.CacheSourceRemote - - return cs -} - -// TaskSummary contains information about the task that was about to run -// TODO(mehulkar): `Outputs` and `ExcludedOutputs` are slightly redundant -// as the information is also available in ResolvedTaskDefinition. We could remove them -// and favor a version of Outputs that is the fully expanded list of files. -type TaskSummary struct { - TaskID string `json:"taskId,omitempty"` - Task string `json:"task"` - Package string `json:"package,omitempty"` - Hash string `json:"hash"` - ExpandedInputs map[turbopath.AnchoredUnixPath]string `json:"inputs"` - ExternalDepsHash string `json:"hashOfExternalDependencies"` - CacheSummary TaskCacheSummary `json:"cache"` - Command string `json:"command"` - CommandArguments []string `json:"cliArguments"` - Outputs []string `json:"outputs"` - ExcludedOutputs []string `json:"excludedOutputs"` - // Repo-relative, relative system path - LogFileRelativePath string `json:"logFile"` - // Repo-relative, relative system path - Dir string `json:"directory,omitempty"` - Dependencies []string `json:"dependencies"` - Dependents []string `json:"dependents"` - ResolvedTaskDefinition *fs.TaskDefinition `json:"resolvedTaskDefinition"` - ExpandedOutputs []turbopath.AnchoredSystemPath `json:"expandedOutputs"` - Framework string `json:"framework"` - EnvMode util.EnvMode `json:"envMode"` - EnvVars TaskEnvVarSummary `json:"environmentVariables"` - DotEnv turbopath.AnchoredUnixPathArray `json:"dotEnv"` - Execution *TaskExecutionSummary `json:"execution,omitempty"` // omit when it's not set -} - -// TaskEnvConfiguration contains the environment variable inputs for a task -type TaskEnvConfiguration struct { - Env []string `json:"env"` - PassThroughEnv []string `json:"passThroughEnv"` -} - -// TaskEnvVarSummary contains the environment variables that impacted a task's hash -type TaskEnvVarSummary struct { - Specified TaskEnvConfiguration `json:"specified"` - - Configured []string `json:"configured"` - Inferred []string `json:"inferred"` - PassThrough []string `json:"passthrough"` -} - -// cleanForSinglePackage converts a TaskSummary to remove references to workspaces -func (ts *TaskSummary) cleanForSinglePackage() { - dependencies := make([]string, len(ts.Dependencies)) - for i, dependency := range ts.Dependencies { - dependencies[i] = util.StripPackageName(dependency) - } - dependents := make([]string, len(ts.Dependents)) - for i, dependent := range ts.Dependents { - dependents[i] = util.StripPackageName(dependent) - } - task := util.StripPackageName(ts.TaskID) - - ts.TaskID = task - ts.Task = task - ts.Dependencies = dependencies - ts.Dependents = dependents - ts.Dir = "" - ts.Package = "" -} diff --git a/cli/internal/scm/git.go b/cli/internal/scm/git.go deleted file mode 100644 index 58518b5b54072..0000000000000 --- a/cli/internal/scm/git.go +++ /dev/null @@ -1,32 +0,0 @@ -// Package scm abstracts operations on various tools like git -// Currently, only git is supported. -// -// Adapted from https://github.com/thought-machine/please/tree/master/src/scm -// Copyright Thought Machine, Inc. or its affiliates. All Rights Reserved. -// SPDX-License-Identifier: Apache-2.0 -package scm - -import ( - "fmt" - - "github.com/vercel/turbo/cli/internal/ffi" - "github.com/vercel/turbo/cli/internal/turbopath" -) - -// git implements operations on a git repository. -type git struct { - repoRoot turbopath.AbsoluteSystemPath -} - -// ChangedFiles returns a list of modified files since the given commit, optionally including untracked files. -func (g *git) ChangedFiles(fromCommit string, toCommit string, monorepoRoot string) ([]string, error) { - return ffi.ChangedFiles(g.repoRoot.ToString(), monorepoRoot, fromCommit, toCommit) -} - -func (g *git) PreviousContent(fromCommit string, filePath string) ([]byte, error) { - if fromCommit == "" { - return nil, fmt.Errorf("Need commit sha to inspect file contents") - } - - return ffi.PreviousContent(g.repoRoot.ToString(), fromCommit, filePath) -} diff --git a/cli/internal/scm/scm.go b/cli/internal/scm/scm.go deleted file mode 100644 index 825401b79faf3..0000000000000 --- a/cli/internal/scm/scm.go +++ /dev/null @@ -1,80 +0,0 @@ -// Package scm abstracts operations on various tools like git -// Currently, only git is supported. -// -// Adapted from https://github.com/thought-machine/please/tree/master/src/scm -// Copyright Thought Machine, Inc. or its affiliates. All Rights Reserved. -// SPDX-License-Identifier: Apache-2.0 -package scm - -import ( - "os/exec" - "strings" - - "github.com/pkg/errors" - - "github.com/vercel/turbo/cli/internal/turbopath" -) - -var ErrFallback = errors.New("cannot find a .git folder. Falling back to manual file hashing (which may be slower). If you are running this build in a pruned directory, you can ignore this message. Otherwise, please initialize a git repository in the root of your monorepo") - -// An SCM represents an SCM implementation that we can ask for various things. -type SCM interface { - // ChangedFiles returns a list of modified files since the given commit, including untracked files - ChangedFiles(fromCommit string, toCommit string, relativeTo string) ([]string, error) - // PreviousContent Returns the content of the file at fromCommit - PreviousContent(fromCommit string, filePath string) ([]byte, error) -} - -// newGitSCM returns a new SCM instance for this repo root. -// It returns nil if there is no known implementation there. -func newGitSCM(repoRoot turbopath.AbsoluteSystemPath) SCM { - if repoRoot.UntypedJoin(".git").Exists() { - return &git{repoRoot: repoRoot} - } - return nil -} - -// newFallback returns a new SCM instance for this repo root. -// If there is no known implementation it returns a stub. -func newFallback(repoRoot turbopath.AbsoluteSystemPath) (SCM, error) { - if scm := newGitSCM(repoRoot); scm != nil { - return scm, nil - } - - return &stub{}, ErrFallback -} - -// FromInRepo produces an SCM instance, given a path within a -// repository. It does not need to be a git repository, and if -// it is not, the given path is assumed to be the root. -func FromInRepo(repoRoot turbopath.AbsoluteSystemPath) (SCM, error) { - dotGitDir, err := repoRoot.Findup(".git") - if err != nil { - return nil, err - } - return newFallback(dotGitDir.Dir()) -} - -// GetCurrentBranch returns the current branch -func GetCurrentBranch(dir turbopath.AbsoluteSystemPath) string { - cmd := exec.Command("git", []string{"branch", "--show-current"}...) - cmd.Dir = dir.ToString() - - out, err := cmd.Output() - if err != nil { - return "" - } - return strings.TrimRight(string(out), "\n") -} - -// GetCurrentSha returns the current SHA -func GetCurrentSha(dir turbopath.AbsoluteSystemPath) string { - cmd := exec.Command("git", []string{"rev-parse", "HEAD"}...) - cmd.Dir = dir.ToString() - - out, err := cmd.Output() - if err != nil { - return "" - } - return strings.TrimRight(string(out), "\n") -} diff --git a/cli/internal/scm/scm_test.go b/cli/internal/scm/scm_test.go deleted file mode 100644 index e0982aecc259e..0000000000000 --- a/cli/internal/scm/scm_test.go +++ /dev/null @@ -1,136 +0,0 @@ -package scm - -import ( - "os" - "os/exec" - "testing" - - "github.com/stretchr/testify/assert" - "github.com/vercel/turbo/cli/internal/fs" - "github.com/vercel/turbo/cli/internal/turbopath" -) - -func TestGetCurrentBranchMain(t *testing.T) { - targetbranch := "main" - testDir := getTestDir(t, "myrepo") - originalName, originalEmail := getOriginalConfig(testDir) - - // Setup git - gitCommand(t, testDir, []string{"config", "--global", "user.email", "turbo@vercel.com"}) - gitCommand(t, testDir, []string{"config", "--global", "user.name", "Turbobot"}) - gitCommand(t, testDir, []string{"init"}) - - gitCommand(t, testDir, []string{"checkout", "-B", targetbranch}) - branch := GetCurrentBranch(testDir) - assert.Equal(t, branch, targetbranch) - - // cleanup - gitRm(t, testDir) - gitCommand(t, testDir, []string{"config", "--global", "user.email", originalEmail}) - gitCommand(t, testDir, []string{"config", "--global", "user.name", originalName}) -} - -func TestGetCurrentBranchNonMain(t *testing.T) { - targetbranch := "mybranch" - testDir := getTestDir(t, "myrepo") - - originalName, originalEmail := getOriginalConfig(testDir) - - // Setup git - gitCommand(t, testDir, []string{"config", "--global", "user.email", "turbo@vercel.com"}) - gitCommand(t, testDir, []string{"config", "--global", "user.name", "Turbobot"}) - gitCommand(t, testDir, []string{"init"}) - gitCommand(t, testDir, []string{"checkout", "-B", targetbranch}) - - branch := GetCurrentBranch(testDir) - assert.Equal(t, branch, targetbranch) - - // cleanup - gitRm(t, testDir) - gitCommand(t, testDir, []string{"config", "--global", "user.email", originalEmail}) - gitCommand(t, testDir, []string{"config", "--global", "user.name", originalName}) -} - -func TestGetCurrentSHA(t *testing.T) { - testDir := getTestDir(t, "myrepo") - originalName, originalEmail := getOriginalConfig(testDir) - - // Setup git - gitCommand(t, testDir, []string{"config", "--global", "user.email", "turbo@vercel.com"}) - gitCommand(t, testDir, []string{"config", "--global", "user.name", "Turbobot"}) - gitCommand(t, testDir, []string{"init"}) - - // initial sha is blank because there are no commits - initSha := GetCurrentSha(testDir) - assert.True(t, initSha == "", "initial sha is empty") - - // first commit - gitCommand(t, testDir, []string{"commit", "--allow-empty", "-am", "new commit"}) - sha1 := GetCurrentSha(testDir) - assert.True(t, sha1 != "sha on commit 1 is not empty") - - // second commit - gitCommand(t, testDir, []string{"commit", "--allow-empty", "-am", "new commit"}) - sha2 := GetCurrentSha(testDir) - assert.True(t, sha2 != "", "sha on commit 2 is not empty") - assert.True(t, sha2 != sha1, "sha on commit 2 changes from commit 1") - - // cleanup - gitRm(t, testDir) - gitCommand(t, testDir, []string{"config", "--global", "user.email", originalEmail}) - gitCommand(t, testDir, []string{"config", "--global", "user.name", originalName}) -} - -// Helper functions -func getTestDir(t *testing.T, testName string) turbopath.AbsoluteSystemPath { - defaultCwd, err := os.Getwd() - if err != nil { - t.Errorf("failed to get cwd: %v", err) - } - cwd, err := fs.CheckedToAbsoluteSystemPath(defaultCwd) - if err != nil { - t.Fatalf("cwd is not an absolute directory %v: %v", defaultCwd, err) - } - - return cwd.UntypedJoin("testdata", testName) -} - -func gitRm(t *testing.T, dir turbopath.AbsoluteSystemPath) { - cmd := exec.Command("rm", []string{"-rf", ".git"}...) - cmd.Dir = dir.ToString() - if out, err := cmd.Output(); err != nil { - t.Fatalf("Failed to cleanup git dir: %s\n%v", out, err) - } -} - -func getOriginalConfig(cwd turbopath.AbsoluteSystemPath) (string, string) { - // Ignore errors. If there was an error, it's likely because there was no value for these - // configs (e.g. in CI), so git is returning non-zero status code. This is ok, and we'll use the - // zero-value empty strings. - name, _ := _gitCommand(cwd, []string{"config", "--global", "user.name"}) - email, _ := _gitCommand(cwd, []string{"config", "--global", "user.name"}) - - return name, email -} - -func gitCommand(t *testing.T, cwd turbopath.AbsoluteSystemPath, args []string) string { - out, err := _gitCommand(cwd, args) - - if err != nil { - t.Fatalf("Failed git command %s: %s\n%v", args, out, err) - } - - return string(out) -} - -func _gitCommand(cwd turbopath.AbsoluteSystemPath, args []string) (string, error) { - cmd := exec.Command("git", args...) - cmd.Dir = cwd.ToString() - out, err := cmd.CombinedOutput() - - if err != nil { - return "", err - } - - return string(out), nil -} diff --git a/cli/internal/scm/stub.go b/cli/internal/scm/stub.go deleted file mode 100644 index 2e356c5b36f08..0000000000000 --- a/cli/internal/scm/stub.go +++ /dev/null @@ -1,14 +0,0 @@ -// Adapted from https://github.com/thought-machine/please/tree/master/src/scm -// Copyright Thought Machine, Inc. or its affiliates. All Rights Reserved. -// SPDX-License-Identifier: Apache-2.0 -package scm - -type stub struct{} - -func (s *stub) ChangedFiles(fromCommit string, toCommit string, relativeTo string) ([]string, error) { - return nil, nil -} - -func (s *stub) PreviousContent(fromCommit string, filePath string) ([]byte, error) { - return nil, nil -} diff --git a/cli/internal/scm/testdata/myrepo/foo b/cli/internal/scm/testdata/myrepo/foo deleted file mode 100644 index e69de29bb2d1d..0000000000000 diff --git a/cli/internal/scope/filter/filter.go b/cli/internal/scope/filter/filter.go deleted file mode 100644 index bdaca8d486f0e..0000000000000 --- a/cli/internal/scope/filter/filter.go +++ /dev/null @@ -1,428 +0,0 @@ -package filter - -import ( - "fmt" - "strings" - - "github.com/hashicorp/go-hclog" - "github.com/pkg/errors" - "github.com/pyr-sh/dag" - "github.com/vercel/turbo/cli/internal/doublestar" - "github.com/vercel/turbo/cli/internal/turbopath" - "github.com/vercel/turbo/cli/internal/util" - "github.com/vercel/turbo/cli/internal/workspace" -) - -type SelectedPackages struct { - pkgs util.Set - unusedFilters []*TargetSelector -} - -// PackagesChangedInRange is the signature of a function to provide the set of -// packages that have changed in a particular range of git refs. -type PackagesChangedInRange = func(fromRef string, toRef string) (util.Set, error) - -// PackageInference holds the information we have inferred from the working-directory -// (really --infer-filter-root flag) about which packages are of interest. -type PackageInference struct { - // PackageName, if set, means that we have determined that filters without a package-specifier - // should get this package name - PackageName string - // DirectoryRoot is used to infer a "parentDir" for the filter in the event that we haven't - // identified a specific package. If the filter already contains a parentDir, this acts as - // a prefix. If the filter does not contain a parentDir, we consider this to be a glob for - // all subdirectories - DirectoryRoot turbopath.RelativeSystemPath -} - -type Resolver struct { - Graph *dag.AcyclicGraph - WorkspaceInfos workspace.Catalog - Cwd turbopath.AbsoluteSystemPath - Inference *PackageInference - PackagesChangedInRange PackagesChangedInRange - Logger hclog.Logger -} - -// GetPackagesFromPatterns compiles filter patterns and applies them, returning -// the selected packages -func (r *Resolver) GetPackagesFromPatterns(patterns []string) (util.Set, error) { - selectors := []*TargetSelector{} - for _, pattern := range patterns { - selector, err := ParseTargetSelector(pattern) - if err != nil { - return nil, err - } - r.Logger.Debug("Parsed selector", "selector", hclog.Fmt("%+v", selector)) - selectors = append(selectors, selector) - } - selected, err := r.getFilteredPackages(selectors) - if err != nil { - return nil, err - } - return selected.pkgs, nil -} - -func (pi *PackageInference) apply(selector *TargetSelector) error { - if selector.namePattern != "" { - // The selector references a package name, don't apply inference - return nil - } - if pi.PackageName != "" { - selector.namePattern = pi.PackageName - } - if selector.parentDir != "" { - parentDir := pi.DirectoryRoot.Join(selector.parentDir) - selector.parentDir = parentDir - } else if pi.PackageName == "" { - // The user didn't set a parent directory and we didn't find a single package, - // so use the directory we inferred and select all subdirectories - selector.parentDir = pi.DirectoryRoot.Join("**") - } - return nil -} - -func (r *Resolver) applyInference(selectors []*TargetSelector) ([]*TargetSelector, error) { - if r.Inference == nil { - return selectors, nil - } - // If there are existing patterns, use inference on those. If there are no - // patterns, but there is a directory supplied, synthesize a selector - if len(selectors) == 0 { - selectors = append(selectors, &TargetSelector{}) - } - for _, selector := range selectors { - if err := r.Inference.apply(selector); err != nil { - return nil, err - } - } - return selectors, nil -} - -func (r *Resolver) getFilteredPackages(selectors []*TargetSelector) (*SelectedPackages, error) { - selectors, err := r.applyInference(selectors) - if err != nil { - return nil, err - } - prodPackageSelectors := []*TargetSelector{} - allPackageSelectors := []*TargetSelector{} - for _, selector := range selectors { - if selector.followProdDepsOnly { - prodPackageSelectors = append(prodPackageSelectors, selector) - } else { - allPackageSelectors = append(allPackageSelectors, selector) - } - } - r.Logger.Debug("Filtering packages", "allPackageSelectors", allPackageSelectors) - if len(allPackageSelectors) > 0 || len(prodPackageSelectors) > 0 { - if len(allPackageSelectors) > 0 { - selected, err := r.filterGraph(allPackageSelectors) - if err != nil { - return nil, err - } - return selected, nil - } - } - return &SelectedPackages{ - pkgs: make(util.Set), - }, nil -} - -func (r *Resolver) filterGraph(selectors []*TargetSelector) (*SelectedPackages, error) { - includeSelectors := []*TargetSelector{} - excludeSelectors := []*TargetSelector{} - for _, selector := range selectors { - if selector.exclude { - excludeSelectors = append(excludeSelectors, selector) - } else { - includeSelectors = append(includeSelectors, selector) - } - } - var include *SelectedPackages - if len(includeSelectors) > 0 { - found, err := r.filterGraphWithSelectors(includeSelectors) - if err != nil { - return nil, err - } - include = found - } else { - vertexSet := make(util.Set) - for _, v := range r.Graph.Vertices() { - vertexSet.Add(v) - } - include = &SelectedPackages{ - pkgs: vertexSet, - } - } - exclude, err := r.filterGraphWithSelectors(excludeSelectors) - if err != nil { - return nil, err - } - return &SelectedPackages{ - pkgs: include.pkgs.Difference(exclude.pkgs), - unusedFilters: append(include.unusedFilters, exclude.unusedFilters...), - }, nil -} - -func (r *Resolver) filterGraphWithSelectors(selectors []*TargetSelector) (*SelectedPackages, error) { - unmatchedSelectors := []*TargetSelector{} - - cherryPickedPackages := make(dag.Set) - walkedDependencies := make(dag.Set) - walkedDependents := make(dag.Set) - walkedDependentsDependencies := make(dag.Set) - - for _, selector := range selectors { - // TODO(gsoltis): this should be a list? - entryPackages, err := r.filterGraphWithSelector(selector) - r.Logger.Debug("Filtered packages", "selector", hclog.Fmt("%+v", selector), "entryPackages", entryPackages) - if err != nil { - return nil, err - } - if entryPackages.Len() == 0 { - unmatchedSelectors = append(unmatchedSelectors, selector) - } - for _, pkg := range entryPackages { - if selector.includeDependencies { - dependencies, err := r.Graph.Ancestors(pkg) - if err != nil { - return nil, errors.Wrapf(err, "failed to get dependencies of package %v", pkg) - } - for dep := range dependencies { - walkedDependencies.Add(dep) - } - if !selector.excludeSelf { - walkedDependencies.Add(pkg) - } - } - if selector.includeDependents { - dependents, err := r.Graph.Descendents(pkg) - if err != nil { - return nil, errors.Wrapf(err, "failed to get dependents of package %v", pkg) - } - for dep := range dependents { - walkedDependents.Add(dep) - if selector.includeDependencies { - dependentDeps, err := r.Graph.Ancestors(dep) - if err != nil { - return nil, errors.Wrapf(err, "failed to get dependencies of dependent %v", dep) - } - for dependentDep := range dependentDeps { - walkedDependentsDependencies.Add(dependentDep) - } - } - } - if !selector.excludeSelf { - walkedDependents.Add(pkg) - } - } - if !selector.includeDependencies && !selector.includeDependents { - cherryPickedPackages.Add(pkg) - } - } - } - - r.Logger.Debug("Filtered packages", "cherryPickedPackages", cherryPickedPackages, "walkedDependencies", walkedDependencies, "walkedDependents", walkedDependents, "walkedDependentsDependencies", walkedDependentsDependencies) - allPkgs := make(util.Set) - for pkg := range cherryPickedPackages { - allPkgs.Add(pkg) - } - for pkg := range walkedDependencies { - allPkgs.Add(pkg) - } - for pkg := range walkedDependents { - allPkgs.Add(pkg) - } - for pkg := range walkedDependentsDependencies { - allPkgs.Add(pkg) - } - return &SelectedPackages{ - pkgs: allPkgs, - unusedFilters: unmatchedSelectors, - }, nil -} - -func (r *Resolver) filterGraphWithSelector(selector *TargetSelector) (util.Set, error) { - if selector.matchDependencies { - return r.filterSubtreesWithSelector(selector) - } - return r.filterNodesWithSelector(selector) -} - -// filterNodesWithSelector returns the set of nodes that match a given selector -func (r *Resolver) filterNodesWithSelector(selector *TargetSelector) (util.Set, error) { - entryPackages := make(util.Set) - selectorWasUsed := false - if selector.fromRef != "" { - // get changed packaged - selectorWasUsed = true - changedPkgs, err := r.PackagesChangedInRange(selector.fromRef, selector.getToRef()) - if err != nil { - return nil, err - } - parentDir := selector.parentDir - for pkgName := range changedPkgs { - if parentDir != "" { - // Type assert/coerce to string here because we want to use - // this value in a map that has string keys. - // TODO(mehulkar) `changedPkgs` is a util.Set, we could make a `util.PackageNamesSet`` - // or something similar that is all strings. - pkgNameStr := pkgName.(string) - if pkgName == util.RootPkgName { - // The root package changed, only add it if - // the parentDir is equivalent to the root - if matches, err := doublestar.PathMatch(r.Cwd.Join(parentDir).ToString(), r.Cwd.ToString()); err != nil { - return nil, fmt.Errorf("failed to resolve directory relationship %v contains %v: %v", parentDir, r.Cwd, err) - } else if matches { - entryPackages.Add(pkgName) - } - } else if pkg, ok := r.WorkspaceInfos.PackageJSONs[pkgNameStr]; !ok { - return nil, fmt.Errorf("missing info for package %v", pkgName) - } else if matches, err := doublestar.PathMatch(r.Cwd.Join(parentDir).ToString(), pkg.Dir.RestoreAnchor(r.Cwd).ToString()); err != nil { - return nil, fmt.Errorf("failed to resolve directory relationship %v contains %v: %v", selector.parentDir, pkg.Dir, err) - } else if matches { - entryPackages.Add(pkgName) - } - } else { - entryPackages.Add(pkgName) - } - } - } else if selector.parentDir != "" { - // get packages by path - selectorWasUsed = true - parentDir := selector.parentDir - if parentDir == "." { - entryPackages.Add(util.RootPkgName) - } else { - for name, pkg := range r.WorkspaceInfos.PackageJSONs { - if matches, err := doublestar.PathMatch(r.Cwd.Join(parentDir).ToString(), pkg.Dir.RestoreAnchor(r.Cwd).ToString()); err != nil { - return nil, fmt.Errorf("failed to resolve directory relationship %v contains %v: %v", selector.parentDir, pkg.Dir, err) - } else if matches { - entryPackages.Add(name) - } - } - } - } - if selector.namePattern != "" { - // find packages that match name - if !selectorWasUsed { - matched, err := matchPackageNamesToVertices(selector.namePattern, r.Graph.Vertices()) - if err != nil { - return nil, err - } - entryPackages = matched - selectorWasUsed = true - } else { - matched, err := matchPackageNames(selector.namePattern, entryPackages) - if err != nil { - return nil, err - } - entryPackages = matched - } - } - // TODO(gsoltis): we can do this earlier - // Check if the selector specified anything - if !selectorWasUsed { - return nil, fmt.Errorf("invalid selector: %v", selector.raw) - } - return entryPackages, nil -} - -// filterSubtreesWithSelector returns the set of nodes where the node or any of its dependencies -// match a selector -func (r *Resolver) filterSubtreesWithSelector(selector *TargetSelector) (util.Set, error) { - // foreach package that matches parentDir && namePattern, check if any dependency is in changed packages - changedPkgs, err := r.PackagesChangedInRange(selector.fromRef, selector.getToRef()) - if err != nil { - return nil, err - } - - parentDir := selector.parentDir - entryPackages := make(util.Set) - for name, pkg := range r.WorkspaceInfos.PackageJSONs { - if parentDir == "" { - entryPackages.Add(name) - } else if matches, err := doublestar.PathMatch(r.Cwd.Join(parentDir).ToString(), pkg.Dir.RestoreAnchor(r.Cwd).ToString()); err != nil { - return nil, fmt.Errorf("failed to resolve directory relationship %v contains %v: %v", selector.parentDir, pkg.Dir, err) - } else if matches { - entryPackages.Add(name) - } - } - if selector.namePattern != "" { - matched, err := matchPackageNames(selector.namePattern, entryPackages) - if err != nil { - return nil, err - } - entryPackages = matched - } - roots := make(util.Set) - matched := make(util.Set) - for pkg := range entryPackages { - if matched.Includes(pkg) { - roots.Add(pkg) - continue - } - deps, err := r.Graph.Ancestors(pkg) - if err != nil { - return nil, err - } - for changedPkg := range changedPkgs { - if !selector.excludeSelf && pkg == changedPkg { - roots.Add(pkg) - break - } - if deps.Include(changedPkg) { - roots.Add(pkg) - matched.Add(changedPkg) - break - } - } - } - return roots, nil -} - -func matchPackageNamesToVertices(pattern string, vertices []dag.Vertex) (util.Set, error) { - packages := make(util.Set) - for _, v := range vertices { - packages.Add(v) - } - packages.Add(util.RootPkgName) - return matchPackageNames(pattern, packages) -} - -func matchPackageNames(pattern string, packages util.Set) (util.Set, error) { - matcher, err := matcherFromPattern(pattern) - if err != nil { - return nil, err - } - matched := make(util.Set) - for _, pkg := range packages { - pkg := pkg.(string) - if matcher(pkg) { - matched.Add(pkg) - } - } - if matched.Len() == 0 && !strings.HasPrefix(pattern, "@") && !strings.Contains(pattern, "/") { - // we got no matches and the pattern isn't a scoped package. - // Check if we have exactly one scoped package that does match - scopedPattern := fmt.Sprintf("@*/%v", pattern) - matcher, err = matcherFromPattern(scopedPattern) - if err != nil { - return nil, err - } - foundScopedPkg := false - for _, pkg := range packages { - pkg := pkg.(string) - if matcher(pkg) { - if foundScopedPkg { - // we found a second scoped package. Return the empty set, we can't - // disambiguate - return make(util.Set), nil - } - foundScopedPkg = true - matched.Add(pkg) - } - } - } - return matched, nil -} diff --git a/cli/internal/scope/filter/filter_test.go b/cli/internal/scope/filter/filter_test.go deleted file mode 100644 index 268999af615db..0000000000000 --- a/cli/internal/scope/filter/filter_test.go +++ /dev/null @@ -1,640 +0,0 @@ -package filter - -import ( - "fmt" - "os" - "strings" - "testing" - - "github.com/hashicorp/go-hclog" - "github.com/pyr-sh/dag" - "github.com/vercel/turbo/cli/internal/fs" - "github.com/vercel/turbo/cli/internal/turbopath" - "github.com/vercel/turbo/cli/internal/util" - "github.com/vercel/turbo/cli/internal/workspace" -) - -func setMatches(t *testing.T, name string, s util.Set, expected []string) { - expectedSet := make(util.Set) - for _, item := range expected { - expectedSet.Add(item) - } - missing := s.Difference(expectedSet) - if missing.Len() > 0 { - t.Errorf("%v set has extra elements: %v", name, strings.Join(missing.UnsafeListOfStrings(), ", ")) - } - extra := expectedSet.Difference(s) - if extra.Len() > 0 { - t.Errorf("%v set missing elements: %v", name, strings.Join(extra.UnsafeListOfStrings(), ", ")) - } -} - -func Test_filter(t *testing.T) { - rawCwd, err := os.Getwd() - if err != nil { - t.Fatalf("failed to get working directory: %v", err) - } - root, err := fs.GetCwd(rawCwd) - if err != nil { - t.Fatalf("failed to get working directory: %v", err) - } - workspaceInfos := workspace.Catalog{ - PackageJSONs: make(map[string]*fs.PackageJSON), - } - packageJSONs := workspaceInfos.PackageJSONs - graph := &dag.AcyclicGraph{} - graph.Add("project-0") - packageJSONs["project-0"] = &fs.PackageJSON{ - Name: "project-0", - Dir: turbopath.AnchoredUnixPath("packages/project-0").ToSystemPath(), - } - graph.Add("project-1") - packageJSONs["project-1"] = &fs.PackageJSON{ - Name: "project-1", - Dir: turbopath.AnchoredUnixPath("packages/project-1").ToSystemPath(), - } - graph.Add("project-2") - packageJSONs["project-2"] = &fs.PackageJSON{ - Name: "project-2", - Dir: "project-2", - } - graph.Add("project-3") - packageJSONs["project-3"] = &fs.PackageJSON{ - Name: "project-3", - Dir: "project-3", - } - graph.Add("project-4") - packageJSONs["project-4"] = &fs.PackageJSON{ - Name: "project-4", - Dir: "project-4", - } - graph.Add("project-5") - packageJSONs["project-5"] = &fs.PackageJSON{ - Name: "project-5", - Dir: "project-5", - } - // Note: inside project-5 - graph.Add("project-6") - packageJSONs["project-6"] = &fs.PackageJSON{ - Name: "project-6", - Dir: turbopath.AnchoredUnixPath("project-5/packages/project-6").ToSystemPath(), - } - // Add dependencies - graph.Connect(dag.BasicEdge("project-0", "project-1")) - graph.Connect(dag.BasicEdge("project-0", "project-5")) - graph.Connect(dag.BasicEdge("project-1", "project-2")) - graph.Connect(dag.BasicEdge("project-1", "project-4")) - - testCases := []struct { - Name string - Selectors []*TargetSelector - PackageInference *PackageInference - Expected []string - }{ - { - "select root package", - []*TargetSelector{ - { - namePattern: util.RootPkgName, - }, - }, - nil, - []string{util.RootPkgName}, - }, - { - "select only package dependencies (excluding the package itself)", - []*TargetSelector{ - { - excludeSelf: true, - includeDependencies: true, - namePattern: "project-1", - }, - }, - nil, - []string{"project-2", "project-4"}, - }, - { - "select package with dependencies", - []*TargetSelector{ - { - excludeSelf: false, - includeDependencies: true, - namePattern: "project-1", - }, - }, - nil, - []string{"project-1", "project-2", "project-4"}, - }, - { - "select package with dependencies and dependents, including dependent dependencies", - []*TargetSelector{ - { - excludeSelf: true, - includeDependencies: true, - includeDependents: true, - namePattern: "project-1", - }, - }, - nil, - []string{"project-0", "project-1", "project-2", "project-4", "project-5"}, - }, - { - "select package with dependents", - []*TargetSelector{ - { - includeDependents: true, - namePattern: "project-2", - }, - }, - nil, - []string{"project-1", "project-2", "project-0"}, - }, - { - "select dependents excluding package itself", - []*TargetSelector{ - { - excludeSelf: true, - includeDependents: true, - namePattern: "project-2", - }, - }, - nil, - []string{"project-0", "project-1"}, - }, - { - "filter using two selectors: one selects dependencies another selects dependents", - []*TargetSelector{ - { - excludeSelf: true, - includeDependents: true, - namePattern: "project-2", - }, - { - excludeSelf: true, - includeDependencies: true, - namePattern: "project-1", - }, - }, - nil, - []string{"project-0", "project-1", "project-2", "project-4"}, - }, - { - "select just a package by name", - []*TargetSelector{ - { - namePattern: "project-2", - }, - }, - nil, - []string{"project-2"}, - }, - // Note: we don't support the option to switch path prefix mode - // { - // "select by parentDir", - // []*TargetSelector{ - // { - // parentDir: "/packages", - // }, - // }, - // []string{"project-0", "project-1"}, - // }, - { - "select by parentDir using glob", - []*TargetSelector{ - { - parentDir: turbopath.MakeRelativeSystemPath("packages", "*"), - }, - }, - nil, - []string{"project-0", "project-1"}, - }, - { - "select sibling directory", - []*TargetSelector{{parentDir: turbopath.MakeRelativeSystemPath("..", "packages", "*")}}, - &PackageInference{ - DirectoryRoot: turbopath.MakeRelativeSystemPath("project-5"), - }, - []string{"project-0", "project-1"}, - }, - { - "select by parentDir using globstar", - []*TargetSelector{ - { - parentDir: turbopath.MakeRelativeSystemPath("project-5", "**"), - }, - }, - nil, - []string{"project-5", "project-6"}, - }, - { - "select by parentDir with no glob", - []*TargetSelector{ - { - parentDir: turbopath.MakeRelativeSystemPath("project-5"), - }, - }, - nil, - []string{"project-5"}, - }, - { - "select all packages except one", - []*TargetSelector{ - { - exclude: true, - namePattern: "project-1", - }, - }, - nil, - []string{"project-0", "project-2", "project-3", "project-4", "project-5", "project-6"}, - }, - { - "select by parentDir and exclude one package by pattern", - []*TargetSelector{ - { - parentDir: turbopath.MakeRelativeSystemPath("packages", "*"), - }, - { - exclude: true, - namePattern: "*-1", - }, - }, - nil, - []string{"project-0"}, - }, - { - "select root package by directory", - []*TargetSelector{ - { - parentDir: turbopath.MakeRelativeSystemPath("."), // input . gets cleaned to "" - }, - }, - nil, - []string{util.RootPkgName}, - }, - { - "select packages directory", - []*TargetSelector{}, - &PackageInference{ - DirectoryRoot: turbopath.MakeRelativeSystemPath("packages"), - }, - []string{"project-0", "project-1"}, - }, - { - "infer single package", - []*TargetSelector{}, - &PackageInference{ - DirectoryRoot: turbopath.MakeRelativeSystemPath("packages", "project-0"), - PackageName: "project-0", - }, - []string{"project-0"}, - }, - { - "infer single package from subdirectory", - []*TargetSelector{}, - &PackageInference{ - DirectoryRoot: turbopath.MakeRelativeSystemPath("packages", "project-0", "src"), - PackageName: "project-0", - }, - []string{"project-0"}, - }, - } - - for _, tc := range testCases { - t.Run(tc.Name, func(t *testing.T) { - r := &Resolver{ - Graph: graph, - WorkspaceInfos: workspaceInfos, - Cwd: root, - Inference: tc.PackageInference, - Logger: hclog.Default(), - } - pkgs, err := r.getFilteredPackages(tc.Selectors) - if err != nil { - t.Fatalf("%v failed to filter packages: %v", tc.Name, err) - } - setMatches(t, tc.Name, pkgs.pkgs, tc.Expected) - }) - } - - t.Run("report unmatched filters", func(t *testing.T) { - r := &Resolver{ - Graph: graph, - WorkspaceInfos: workspaceInfos, - Cwd: root, - Logger: hclog.Default(), - } - pkgs, err := r.getFilteredPackages([]*TargetSelector{ - { - excludeSelf: true, - includeDependencies: true, - namePattern: "project-7", - }, - }) - if err != nil { - t.Fatalf("unmatched filter failed to filter packages: %v", err) - } - if pkgs.pkgs.Len() != 0 { - t.Errorf("unmatched filter expected no packages, got %v", strings.Join(pkgs.pkgs.UnsafeListOfStrings(), ", ")) - } - if len(pkgs.unusedFilters) != 1 { - t.Errorf("unmatched filter expected to report one unused filter, got %v", len(pkgs.unusedFilters)) - } - }) -} - -func Test_matchScopedPackage(t *testing.T) { - rawCwd, err := os.Getwd() - if err != nil { - t.Fatalf("failed to get working directory: %v", err) - } - root, err := fs.GetCwd(rawCwd) - if err != nil { - t.Fatalf("failed to get working directory: %v", err) - } - - workspaceInfos := workspace.Catalog{ - PackageJSONs: make(map[string]*fs.PackageJSON), - } - packageJSONs := workspaceInfos.PackageJSONs - graph := &dag.AcyclicGraph{} - graph.Add("@foo/bar") - packageJSONs["@foo/bar"] = &fs.PackageJSON{ - Name: "@foo/bar", - Dir: turbopath.AnchoredUnixPath("packages/bar").ToSystemPath(), - } - r := &Resolver{ - Graph: graph, - WorkspaceInfos: workspaceInfos, - Cwd: root, - Logger: hclog.Default(), - } - pkgs, err := r.getFilteredPackages([]*TargetSelector{ - { - namePattern: "bar", - }, - }) - if err != nil { - t.Fatalf("failed to filter packages: %v", err) - } - setMatches(t, "match scoped package", pkgs.pkgs, []string{"@foo/bar"}) -} - -func Test_matchExactPackages(t *testing.T) { - rawCwd, err := os.Getwd() - if err != nil { - t.Fatalf("failed to get working directory: %v", err) - } - root, err := fs.GetCwd(rawCwd) - if err != nil { - t.Fatalf("failed to get working directory: %v", err) - } - - workspaceInfos := workspace.Catalog{ - PackageJSONs: make(map[string]*fs.PackageJSON), - } - packageJSONs := workspaceInfos.PackageJSONs - graph := &dag.AcyclicGraph{} - graph.Add("@foo/bar") - packageJSONs["@foo/bar"] = &fs.PackageJSON{ - Name: "@foo/bar", - Dir: turbopath.AnchoredUnixPath("packages/@foo/bar").ToSystemPath(), - } - graph.Add("bar") - packageJSONs["bar"] = &fs.PackageJSON{ - Name: "bar", - Dir: turbopath.AnchoredUnixPath("packages/bar").ToSystemPath(), - } - r := &Resolver{ - Graph: graph, - WorkspaceInfos: workspaceInfos, - Cwd: root, - Logger: hclog.Default(), - } - pkgs, err := r.getFilteredPackages([]*TargetSelector{ - { - namePattern: "bar", - }, - }) - if err != nil { - t.Fatalf("failed to filter packages: %v", err) - } - setMatches(t, "match exact package", pkgs.pkgs, []string{"bar"}) -} - -func Test_matchMultipleScopedPackages(t *testing.T) { - rawCwd, err := os.Getwd() - if err != nil { - t.Fatalf("failed to get working directory: %v", err) - } - root, err := fs.GetCwd(rawCwd) - if err != nil { - t.Fatalf("failed to get working directory: %v", err) - } - - workspaceInfos := workspace.Catalog{ - PackageJSONs: make(map[string]*fs.PackageJSON), - } - packageJSONs := workspaceInfos.PackageJSONs - graph := &dag.AcyclicGraph{} - graph.Add("@foo/bar") - packageJSONs["@foo/bar"] = &fs.PackageJSON{ - Name: "@foo/bar", - Dir: turbopath.AnchoredUnixPath("packages/@foo/bar").ToSystemPath(), - } - graph.Add("@types/bar") - packageJSONs["@types/bar"] = &fs.PackageJSON{ - Name: "@types/bar", - Dir: turbopath.AnchoredUnixPath("packages/@types/bar").ToSystemPath(), - } - r := &Resolver{ - Graph: graph, - WorkspaceInfos: workspaceInfos, - Cwd: root, - Logger: hclog.Default(), - } - pkgs, err := r.getFilteredPackages([]*TargetSelector{ - { - namePattern: "bar", - }, - }) - if err != nil { - t.Fatalf("failed to filter packages: %v", err) - } - setMatches(t, "match nothing with multiple scoped packages", pkgs.pkgs, []string{}) -} - -func Test_SCM(t *testing.T) { - rawCwd, err := os.Getwd() - if err != nil { - t.Fatalf("failed to get working directory: %v", err) - } - root, err := fs.GetCwd(rawCwd) - if err != nil { - t.Fatalf("failed to get working directory: %v", err) - } - head1Changed := make(util.Set) - head1Changed.Add("package-1") - head1Changed.Add("package-2") - head1Changed.Add(util.RootPkgName) - head2Changed := make(util.Set) - head2Changed.Add("package-3") - workspaceInfos := workspace.Catalog{ - PackageJSONs: make(map[string]*fs.PackageJSON), - } - packageJSONs := workspaceInfos.PackageJSONs - graph := &dag.AcyclicGraph{} - graph.Add("package-1") - packageJSONs["package-1"] = &fs.PackageJSON{ - Name: "package-1", - Dir: "package-1", - } - graph.Add("package-2") - packageJSONs["package-2"] = &fs.PackageJSON{ - Name: "package-2", - Dir: "package-2", - } - graph.Add("package-3") - packageJSONs["package-3"] = &fs.PackageJSON{ - Name: "package-3", - Dir: "package-3", - } - graph.Add("package-20") - packageJSONs["package-20"] = &fs.PackageJSON{ - Name: "package-20", - Dir: "package-20", - } - - graph.Connect(dag.BasicEdge("package-3", "package-20")) - - r := &Resolver{ - Graph: graph, - WorkspaceInfos: workspaceInfos, - Cwd: root, - PackagesChangedInRange: func(fromRef string, toRef string) (util.Set, error) { - if fromRef == "HEAD~1" && toRef == "HEAD" { - return head1Changed, nil - } else if fromRef == "HEAD~2" && toRef == "HEAD" { - union := head1Changed.Copy() - for val := range head2Changed { - union.Add(val) - } - return union, nil - } else if fromRef == "HEAD~2" && toRef == "HEAD~1" { - return head2Changed, nil - } - panic(fmt.Sprintf("unsupported commit range %v...%v", fromRef, toRef)) - }, - Logger: hclog.Default(), - } - - testCases := []struct { - Name string - Selectors []*TargetSelector - Expected []string - }{ - { - "all changed packages", - []*TargetSelector{ - { - fromRef: "HEAD~1", - }, - }, - []string{"package-1", "package-2", util.RootPkgName}, - }, - { - "all changed packages with parent dir exact match", - []*TargetSelector{ - { - fromRef: "HEAD~1", - parentDir: ".", - }, - }, - []string{util.RootPkgName}, - }, - { - "changed packages in directory", - []*TargetSelector{ - { - fromRef: "HEAD~1", - parentDir: "package-2", - }, - }, - []string{"package-2"}, - }, - { - "changed packages matching pattern", - []*TargetSelector{ - { - fromRef: "HEAD~1", - namePattern: "package-2*", - }, - }, - []string{"package-2"}, - }, - { - "changed packages matching pattern", - []*TargetSelector{ - { - fromRef: "HEAD~1", - namePattern: "package-2*", - }, - }, - []string{"package-2"}, - }, - // Note: missing test here that takes advantage of automatically exempting - // test-only changes from pulling in dependents - // - // turbo-specific tests below here - { - "changed package was requested scope, and we're matching dependencies", - []*TargetSelector{ - { - fromRef: "HEAD~1", - namePattern: "package-1", - matchDependencies: true, - }, - }, - []string{"package-1"}, - }, - { - "older commit", - []*TargetSelector{ - { - fromRef: "HEAD~2", - }, - }, - []string{"package-1", "package-2", "package-3", util.RootPkgName}, - }, - { - "commit range", - []*TargetSelector{ - { - fromRef: "HEAD~2", - toRefOverride: "HEAD~1", - }, - }, - []string{"package-3"}, - }, - { - "match dependency subtree", - []*TargetSelector{ - { - fromRef: "HEAD~1", - parentDir: "package-*", - matchDependencies: true, - }, - }, - []string{"package-1", "package-2"}, - }, - } - - for _, tc := range testCases { - t.Run(tc.Name, func(t *testing.T) { - pkgs, err := r.getFilteredPackages(tc.Selectors) - if err != nil { - t.Fatalf("%v failed to filter packages: %v", tc.Name, err) - } - setMatches(t, tc.Name, pkgs.pkgs, tc.Expected) - }) - } -} diff --git a/cli/internal/scope/filter/matcher.go b/cli/internal/scope/filter/matcher.go deleted file mode 100644 index 2460326619b74..0000000000000 --- a/cli/internal/scope/filter/matcher.go +++ /dev/null @@ -1,32 +0,0 @@ -package filter - -import ( - "regexp" - "strings" - - "github.com/pkg/errors" -) - -type Matcher = func(pkgName string) bool - -func matchAll(pkgName string) bool { - return true -} - -func matcherFromPattern(pattern string) (Matcher, error) { - if pattern == "*" { - return matchAll, nil - } - - escaped := regexp.QuoteMeta(pattern) - // replace escaped '*' with regex '.*' - normalized := strings.ReplaceAll(escaped, "\\*", ".*") - if normalized == pattern { - return func(pkgName string) bool { return pkgName == pattern }, nil - } - regex, err := regexp.Compile("^" + normalized + "$") - if err != nil { - return nil, errors.Wrapf(err, "failed to compile filter pattern to regex: %v", pattern) - } - return func(pkgName string) bool { return regex.Match([]byte(pkgName)) }, nil -} diff --git a/cli/internal/scope/filter/matcher_test.go b/cli/internal/scope/filter/matcher_test.go deleted file mode 100644 index 966be2b8abfe6..0000000000000 --- a/cli/internal/scope/filter/matcher_test.go +++ /dev/null @@ -1,65 +0,0 @@ -package filter - -import "testing" - -func TestMatcher(t *testing.T) { - testCases := map[string][]struct { - test string - want bool - }{ - "*": { - { - test: "@eslint/plugin-foo", - want: true, - }, - { - test: "express", - want: true, - }, - }, - "eslint-*": { - { - test: "eslint-plugin-foo", - want: true, - }, - { - test: "express", - want: false, - }, - }, - "*plugin*": { - { - test: "@eslint/plugin-foo", - want: true, - }, - { - test: "express", - want: false, - }, - }, - "a*c": { - { - test: "abc", - want: true, - }, - }, - "*-positive": { - { - test: "is-positive", - want: true, - }, - }, - } - for pattern, tests := range testCases { - matcher, err := matcherFromPattern(pattern) - if err != nil { - t.Fatalf("failed to compile match pattern %v, %v", pattern, err) - } - for _, testCase := range tests { - got := matcher(testCase.test) - if got != testCase.want { - t.Errorf("%v.match(%v) got %v, want %v", pattern, testCase.test, got, testCase.want) - } - } - } -} diff --git a/cli/internal/scope/filter/parse_target_selector.go b/cli/internal/scope/filter/parse_target_selector.go deleted file mode 100644 index 4f5c90f0e19eb..0000000000000 --- a/cli/internal/scope/filter/parse_target_selector.go +++ /dev/null @@ -1,165 +0,0 @@ -package filter - -import ( - "regexp" - "strings" - - "github.com/pkg/errors" - "github.com/vercel/turbo/cli/internal/turbopath" -) - -type TargetSelector struct { - includeDependencies bool - matchDependencies bool - includeDependents bool - exclude bool - excludeSelf bool - followProdDepsOnly bool - parentDir turbopath.RelativeSystemPath - namePattern string - fromRef string - toRefOverride string - raw string -} - -func (ts *TargetSelector) IsValid() bool { - return ts.fromRef != "" || ts.parentDir != "" || ts.namePattern != "" -} - -// getToRef returns the git ref to use for upper bound of the comparison when finding changed -// packages. -func (ts *TargetSelector) getToRef() string { - if ts.toRefOverride == "" { - return "HEAD" - } - return ts.toRefOverride -} - -var errCantMatchDependencies = errors.New("cannot use match dependencies without specifying either a directory or package") - -var targetSelectorRegex = regexp.MustCompile(`^(?P[^.](?:[^{}[\]]*[^{}[\].])?)?(?P\{[^}]*\})?(?P(?:\.{3})?\[[^\]]+\])?$`) - -// ParseTargetSelector is a function that returns pnpm compatible --filter command line flags -func ParseTargetSelector(rawSelector string) (*TargetSelector, error) { - exclude := false - firstChar := rawSelector[0] - selector := rawSelector - if firstChar == '!' { - selector = selector[1:] - exclude = true - } - excludeSelf := false - includeDependencies := strings.HasSuffix(selector, "...") - if includeDependencies { - selector = selector[:len(selector)-3] - if strings.HasSuffix(selector, "^") { - excludeSelf = true - selector = selector[:len(selector)-1] - } - } - includeDependents := strings.HasPrefix(selector, "...") - if includeDependents { - selector = selector[3:] - if strings.HasPrefix(selector, "^") { - excludeSelf = true - selector = selector[1:] - } - } - - matches := targetSelectorRegex.FindAllStringSubmatch(selector, -1) - - if len(matches) == 0 { - if relativePath, ok := isSelectorByLocation(selector); ok { - return &TargetSelector{ - exclude: exclude, - includeDependencies: includeDependencies, - includeDependents: includeDependents, - parentDir: relativePath, - raw: rawSelector, - }, nil - } - return &TargetSelector{ - exclude: exclude, - excludeSelf: excludeSelf, - includeDependencies: includeDependencies, - includeDependents: includeDependents, - namePattern: selector, - raw: rawSelector, - }, nil - } - - fromRef := "" - toRefOverride := "" - var parentDir turbopath.RelativeSystemPath - namePattern := "" - preAddDepdencies := false - if len(matches) > 0 && len(matches[0]) > 0 { - match := matches[0] - namePattern = match[targetSelectorRegex.SubexpIndex("name")] - rawParentDir := match[targetSelectorRegex.SubexpIndex("directory")] - if len(rawParentDir) > 0 { - // trim {} - rawParentDir = rawParentDir[1 : len(rawParentDir)-1] - if rawParentDir == "" { - return nil, errors.New("empty path specification") - } else if relPath, err := turbopath.CheckedToRelativeSystemPath(rawParentDir); err == nil { - parentDir = relPath - } else { - return nil, errors.Wrapf(err, "invalid path specification: %v", rawParentDir) - } - } - rawCommits := match[targetSelectorRegex.SubexpIndex("commits")] - if len(rawCommits) > 0 { - fromRef = rawCommits - if strings.HasPrefix(fromRef, "...") { - if parentDir == "" && namePattern == "" { - return &TargetSelector{}, errCantMatchDependencies - } - preAddDepdencies = true - fromRef = fromRef[3:] - } - // strip [] - fromRef = fromRef[1 : len(fromRef)-1] - refs := strings.Split(fromRef, "...") - if len(refs) == 2 { - fromRef = refs[0] - toRefOverride = refs[1] - } - } - } - - return &TargetSelector{ - fromRef: fromRef, - toRefOverride: toRefOverride, - exclude: exclude, - excludeSelf: excludeSelf, - includeDependencies: includeDependencies, - matchDependencies: preAddDepdencies, - includeDependents: includeDependents, - namePattern: namePattern, - parentDir: parentDir, - raw: rawSelector, - }, nil -} - -// isSelectorByLocation returns true if the selector is by filesystem location -func isSelectorByLocation(rawSelector string) (turbopath.RelativeSystemPath, bool) { - if rawSelector[0:1] != "." { - return "", false - } - - // . or ./ or .\ - if len(rawSelector) == 1 || rawSelector[1:2] == "/" || rawSelector[1:2] == "\\" { - return turbopath.MakeRelativeSystemPath(rawSelector), true - } - - if rawSelector[1:2] != "." { - return "", false - } - - // .. or ../ or ..\ - if len(rawSelector) == 2 || rawSelector[2:3] == "/" || rawSelector[2:3] == "\\" { - return turbopath.MakeRelativeSystemPath(rawSelector), true - } - return "", false -} diff --git a/cli/internal/scope/filter/parse_target_selector_test.go b/cli/internal/scope/filter/parse_target_selector_test.go deleted file mode 100644 index 255a6160f503e..0000000000000 --- a/cli/internal/scope/filter/parse_target_selector_test.go +++ /dev/null @@ -1,324 +0,0 @@ -package filter - -import ( - "reflect" - "testing" - - "github.com/vercel/turbo/cli/internal/turbopath" -) - -func TestParseTargetSelector(t *testing.T) { - tests := []struct { - rawSelector string - want *TargetSelector - wantErr bool - }{ - { - "{}", - &TargetSelector{}, - true, - }, - { - "foo", - &TargetSelector{ - fromRef: "", - exclude: false, - excludeSelf: false, - includeDependencies: false, - includeDependents: false, - namePattern: "foo", - parentDir: "", - }, - false, - }, - { - "foo...", - &TargetSelector{ - fromRef: "", - exclude: false, - excludeSelf: false, - includeDependencies: true, - includeDependents: false, - namePattern: "foo", - parentDir: "", - }, - false, - }, - { - "...foo", - &TargetSelector{ - fromRef: "", - exclude: false, - excludeSelf: false, - includeDependencies: false, - includeDependents: true, - namePattern: "foo", - parentDir: "", - }, - false, - }, - { - "...foo...", - &TargetSelector{ - fromRef: "", - exclude: false, - excludeSelf: false, - includeDependencies: true, - includeDependents: true, - namePattern: "foo", - parentDir: "", - }, - false, - }, - { - "foo^...", - &TargetSelector{ - fromRef: "", - exclude: false, - excludeSelf: true, - includeDependencies: true, - includeDependents: false, - namePattern: "foo", - parentDir: "", - }, - false, - }, - { - "...^foo", - &TargetSelector{ - fromRef: "", - exclude: false, - excludeSelf: true, - includeDependencies: false, - includeDependents: true, - namePattern: "foo", - parentDir: "", - }, - false, - }, - { - "./foo", - &TargetSelector{ - fromRef: "", - exclude: false, - excludeSelf: false, - includeDependencies: false, - includeDependents: false, - namePattern: "", - parentDir: "foo", - }, - false, - }, - { - "./foo/*", - &TargetSelector{ - fromRef: "", - exclude: false, - excludeSelf: false, - includeDependencies: false, - includeDependents: false, - namePattern: "", - parentDir: turbopath.MakeRelativeSystemPath("foo/*"), - }, - false, - }, - { - "../foo", - &TargetSelector{ - fromRef: "", - exclude: false, - excludeSelf: false, - includeDependencies: false, - includeDependents: false, - namePattern: "", - parentDir: turbopath.MakeRelativeSystemPath("..", "foo"), - }, - false, - }, - { - "...{./foo}", - &TargetSelector{ - fromRef: "", - exclude: false, - excludeSelf: false, - includeDependencies: false, - includeDependents: true, - namePattern: "", - parentDir: "foo", - }, - false, - }, - { - ".", - &TargetSelector{ - fromRef: "", - exclude: false, - excludeSelf: false, - includeDependencies: false, - includeDependents: false, - namePattern: "", - parentDir: ".", - }, - false, - }, - { - "..", - &TargetSelector{ - fromRef: "", - exclude: false, - excludeSelf: false, - includeDependencies: false, - includeDependents: false, - namePattern: "", - parentDir: "..", - }, - false, - }, - { - "[master]", - &TargetSelector{ - fromRef: "master", - exclude: false, - excludeSelf: false, - includeDependencies: false, - includeDependents: false, - namePattern: "", - parentDir: "", - }, - false, - }, - { - "[from...to]", - &TargetSelector{ - fromRef: "from", - toRefOverride: "to", - }, - false, - }, - { - "{foo}[master]", - &TargetSelector{ - fromRef: "master", - exclude: false, - excludeSelf: false, - includeDependencies: false, - includeDependents: false, - namePattern: "", - parentDir: "foo", - }, - false, - }, - { - "pattern{foo}[master]", - &TargetSelector{ - fromRef: "master", - exclude: false, - excludeSelf: false, - includeDependencies: false, - includeDependents: false, - namePattern: "pattern", - parentDir: "foo", - }, - false, - }, - { - "[master]...", - &TargetSelector{ - fromRef: "master", - exclude: false, - excludeSelf: false, - includeDependencies: true, - includeDependents: false, - namePattern: "", - parentDir: "", - }, - false, - }, - { - "...[master]", - &TargetSelector{ - fromRef: "master", - exclude: false, - excludeSelf: false, - includeDependencies: false, - includeDependents: true, - namePattern: "", - parentDir: "", - }, - false, - }, - { - "...[master]...", - &TargetSelector{ - fromRef: "master", - exclude: false, - excludeSelf: false, - includeDependencies: true, - includeDependents: true, - namePattern: "", - parentDir: "", - }, - false, - }, - { - "...[from...to]...", - &TargetSelector{ - fromRef: "from", - toRefOverride: "to", - includeDependencies: true, - includeDependents: true, - }, - false, - }, - { - "foo...[master]", - &TargetSelector{ - fromRef: "master", - namePattern: "foo", - matchDependencies: true, - }, - false, - }, - { - "foo...[master]...", - &TargetSelector{ - fromRef: "master", - namePattern: "foo", - matchDependencies: true, - includeDependencies: true, - }, - false, - }, - { - "{foo}...[master]", - &TargetSelector{ - fromRef: "master", - parentDir: "foo", - matchDependencies: true, - }, - false, - }, - { - "......[master]", - &TargetSelector{}, - true, - }, - } - for _, tt := range tests { - t.Run(tt.rawSelector, func(t *testing.T) { - got, err := ParseTargetSelector(tt.rawSelector) - if tt.wantErr { - if err == nil { - t.Errorf("ParseTargetSelector() error = %#v, wantErr %#v", err, tt.wantErr) - } - } else { - // copy the raw selector from the args into what we want. This value is used - // for reporting errors in the case of a malformed selector - tt.want.raw = tt.rawSelector - if !reflect.DeepEqual(got, tt.want) { - t.Errorf("ParseTargetSelector() = %#v, want %#v", got, tt.want) - } - } - }) - } -} diff --git a/cli/internal/scope/scope.go b/cli/internal/scope/scope.go deleted file mode 100644 index 423df0bc6b479..0000000000000 --- a/cli/internal/scope/scope.go +++ /dev/null @@ -1,390 +0,0 @@ -package scope - -import ( - "fmt" - "os" - "path/filepath" - "sort" - "strings" - - "github.com/hashicorp/go-hclog" - "github.com/mitchellh/cli" - "github.com/pkg/errors" - "github.com/vercel/turbo/cli/internal/context" - "github.com/vercel/turbo/cli/internal/lockfile" - "github.com/vercel/turbo/cli/internal/scm" - scope_filter "github.com/vercel/turbo/cli/internal/scope/filter" - "github.com/vercel/turbo/cli/internal/turbopath" - "github.com/vercel/turbo/cli/internal/turbostate" - "github.com/vercel/turbo/cli/internal/util" - "github.com/vercel/turbo/cli/internal/util/filter" - "github.com/vercel/turbo/cli/internal/workspace" -) - -// LegacyFilter holds the options in use before the filter syntax. They have their own rules -// for how they are compiled into filter expressions. -type LegacyFilter struct { - // IncludeDependencies is whether to include pkg.dependencies in execution (defaults to false) - IncludeDependencies bool - // SkipDependents is whether to skip dependent impacted consumers in execution (defaults to false) - SkipDependents bool - // Entrypoints is a list of package entrypoints - Entrypoints []string - // Since is the git ref used to calculate changed packages - Since string -} - -var _sinceHelp = `Limit/Set scope to changed packages since a -mergebase. This uses the git diff ${target_branch}... -mechanism to identify which packages have changed.` - -func addLegacyFlagsFromArgs(opts *LegacyFilter, args *turbostate.ParsedArgsFromRust) { - opts.IncludeDependencies = args.Command.Run.IncludeDependencies - opts.SkipDependents = args.Command.Run.NoDeps - opts.Entrypoints = args.Command.Run.Scope - opts.Since = args.Command.Run.Since -} - -// Opts holds the options for how to select the entrypoint packages for a turbo run -type Opts struct { - LegacyFilter LegacyFilter - // IgnorePatterns is the list of globs of file paths to ignore from execution scope calculation - IgnorePatterns []string - // GlobalDepPatterns is a list of globs to global files whose contents will be included in the global hash calculation - GlobalDepPatterns []string - // Patterns are the filter patterns supplied to --filter on the commandline - FilterPatterns []string - - PackageInferenceRoot turbopath.RelativeSystemPath -} - -var ( - _filterHelp = `Use the given selector to specify package(s) to act as -entry points. The syntax mirrors pnpm's syntax, and -additional documentation and examples can be found in -turbo's documentation https://turbo.build/repo/docs/reference/command-line-reference/run#--filter ---filter can be specified multiple times. Packages that -match any filter will be included.` - _ignoreHelp = `Files to ignore when calculating changed files (i.e. --since). Supports globs.` - _globalDepHelp = `Specify glob of global filesystem dependencies to be hashed. Useful for .env and files -in the root directory. Includes turbo.json, root package.json, and the root lockfile by default.` -) - -// normalize package inference path. We compare against "" in several places, so maintain -// that behavior. In a post-rust-port world, this should more properly be an Option -func resolvePackageInferencePath(raw string) (turbopath.RelativeSystemPath, error) { - pkgInferenceRoot, err := turbopath.CheckedToRelativeSystemPath(raw) - if err != nil { - return "", errors.Wrapf(err, "invalid package inference root %v", raw) - } - if pkgInferenceRoot == "." { - return "", nil - } - return pkgInferenceRoot, nil -} - -// OptsFromArgs adds the settings relevant to this package to the given Opts -func OptsFromArgs(opts *Opts, args *turbostate.ParsedArgsFromRust) error { - opts.FilterPatterns = args.Command.Run.Filter - opts.IgnorePatterns = args.Command.Run.Ignore - opts.GlobalDepPatterns = args.Command.Run.GlobalDeps - pkgInferenceRoot, err := resolvePackageInferencePath(args.Command.Run.PkgInferenceRoot) - if err != nil { - return err - } - opts.PackageInferenceRoot = pkgInferenceRoot - addLegacyFlagsFromArgs(&opts.LegacyFilter, args) - return nil -} - -// AsFilterPatterns normalizes legacy selectors to filter syntax -func (l *LegacyFilter) AsFilterPatterns() []string { - var patterns []string - prefix := "" - if !l.SkipDependents { - prefix = "..." - } - suffix := "" - if l.IncludeDependencies { - suffix = "..." - } - since := "" - if l.Since != "" { - since = fmt.Sprintf("[%v]", l.Since) - } - if len(l.Entrypoints) > 0 { - // --scope implies our tweaked syntax to see if any dependency matches - if since != "" { - since = "..." + since - } - for _, pattern := range l.Entrypoints { - if strings.HasPrefix(pattern, "!") { - patterns = append(patterns, pattern) - } else { - filterPattern := fmt.Sprintf("%v%v%v%v", prefix, pattern, since, suffix) - patterns = append(patterns, filterPattern) - } - } - } else if since != "" { - // no scopes specified, but --since was provided - filterPattern := fmt.Sprintf("%v%v%v", prefix, since, suffix) - patterns = append(patterns, filterPattern) - } - return patterns -} - -// ResolvePackages translates specified flags to a set of entry point packages for -// the selected tasks. Returns the selected packages and whether or not the selected -// packages represents a default "all packages". -func ResolvePackages(opts *Opts, repoRoot turbopath.AbsoluteSystemPath, scm scm.SCM, ctx *context.Context, tui cli.Ui, logger hclog.Logger) (util.Set, bool, error) { - inferenceBase, err := calculateInference(repoRoot, opts.PackageInferenceRoot, ctx.WorkspaceInfos, logger) - if err != nil { - return nil, false, err - } - filterResolver := &scope_filter.Resolver{ - Graph: &ctx.WorkspaceGraph, - WorkspaceInfos: ctx.WorkspaceInfos, - Cwd: repoRoot, - Inference: inferenceBase, - PackagesChangedInRange: opts.getPackageChangeFunc(scm, repoRoot, ctx, logger), - Logger: logger, - } - filterPatterns := opts.FilterPatterns - legacyFilterPatterns := opts.LegacyFilter.AsFilterPatterns() - filterPatterns = append(filterPatterns, legacyFilterPatterns...) - logger.Debug("filter patterns", "patterns", filterPatterns) - isAllPackages := len(filterPatterns) == 0 && opts.PackageInferenceRoot == "" - filteredPkgs, err := filterResolver.GetPackagesFromPatterns(filterPatterns) - if err != nil { - return nil, false, err - } - - if isAllPackages { - logger.Debug("No filters specified, running all packages") - // no filters specified, run every package - for _, f := range ctx.WorkspaceNames { - filteredPkgs.Add(f) - } - } - filteredPkgs.Delete(ctx.RootNode) - logger.Debug("filtered packages", "packages", filteredPkgs) - return filteredPkgs, isAllPackages, nil -} - -func calculateInference(repoRoot turbopath.AbsoluteSystemPath, pkgInferencePath turbopath.RelativeSystemPath, packageInfos workspace.Catalog, logger hclog.Logger) (*scope_filter.PackageInference, error) { - if pkgInferencePath == "" { - // No inference specified, no need to calculate anything - return nil, nil - } - logger.Debug(fmt.Sprintf("Using %v as a basis for selecting packages", pkgInferencePath)) - fullInferencePath := repoRoot.Join(pkgInferencePath) - for _, pkgInfo := range packageInfos.PackageJSONs { - pkgPath := pkgInfo.Dir.RestoreAnchor(repoRoot) - inferredPathIsBelow, err := pkgPath.ContainsPath(fullInferencePath) - if err != nil { - return nil, err - } - // We skip over the root package as the inferred path will always be below it - if inferredPathIsBelow && pkgPath != repoRoot { - // set both. The user might have set a parent directory filter, - // in which case we *should* fail to find any packages, but we should - // do so in a consistent manner - return &scope_filter.PackageInference{ - PackageName: pkgInfo.Name, - DirectoryRoot: pkgInferencePath, - }, nil - } - inferredPathIsBetweenRootAndPkg, err := fullInferencePath.ContainsPath(pkgPath) - if err != nil { - return nil, err - } - if inferredPathIsBetweenRootAndPkg { - // we've found *some* package below our inference directory. We can stop now and conclude - // that we're looking for all packages in a subdirectory - break - } - } - return &scope_filter.PackageInference{ - DirectoryRoot: pkgInferencePath, - }, nil -} - -func (o *Opts) getPackageChangeFunc(scm scm.SCM, repoRoot turbopath.AbsoluteSystemPath, ctx *context.Context, logger hclog.Logger) scope_filter.PackagesChangedInRange { - return func(fromRef string, toRef string) (util.Set, error) { - // We could filter changed files at the git level, since it's possible - // that the changes we're interested in are scoped, but we need to handle - // global dependencies changing as well. A future optimization might be to - // scope changed files more deeply if we know there are no global dependencies. - var changedFiles []string - if fromRef != "" { - logger.Debug("Getting changed files", "from", fromRef, "to", toRef) - scmChangedFiles, err := scm.ChangedFiles(fromRef, toRef, repoRoot.ToStringDuringMigration()) - if err != nil { - return nil, err - } - sort.Strings(scmChangedFiles) - logger.Debug("Changed files", "files", scmChangedFiles) - changedFiles = scmChangedFiles - } - makeAllPkgs := func() util.Set { - allPkgs := make(util.Set) - for pkg := range ctx.WorkspaceInfos.PackageJSONs { - allPkgs.Add(pkg) - } - return allPkgs - } - if hasRepoGlobalFileChanged, err := repoGlobalFileHasChanged(o, getDefaultGlobalDeps(), changedFiles); err != nil { - return nil, err - } else if hasRepoGlobalFileChanged { - logger.Debug("Global dependencies have changed, running all packages") - return makeAllPkgs(), nil - } - - filteredChangedFiles, err := filterIgnoredFiles(o, changedFiles) - if err != nil { - return nil, err - } - changedPkgs := getChangedPackages(filteredChangedFiles, ctx.WorkspaceInfos) - logger.Debug("Changed packages", "packages", changedPkgs) - - if lockfileChanges, fullChanges := getChangesFromLockfile(repoRoot, scm, ctx, changedFiles, fromRef); !fullChanges { - for _, pkg := range lockfileChanges { - logger.Debug("adding package from lockfile", "package", pkg) - changedPkgs.Add(pkg) - } - } else { - return makeAllPkgs(), nil - } - - return changedPkgs, nil - } -} - -func getChangesFromLockfile(repoRoot turbopath.AbsoluteSystemPath, scm scm.SCM, ctx *context.Context, changedFiles []string, fromRef string) ([]string, bool) { - lockfileFilter, err := filter.Compile([]string{ctx.PackageManager.GetLockfileName(repoRoot)}) - if err != nil { - panic(fmt.Sprintf("Lockfile is invalid glob: %v", err)) - } - match := false - for _, file := range changedFiles { - if lockfileFilter.Match(file) { - match = true - break - } - } - if !match { - return nil, false - } - - if lockfile.IsNil(ctx.Lockfile) { - return nil, true - } - - // FIXME: If you move your bun lockfile then we don't track that move into the history. - prevContents, err := scm.PreviousContent(fromRef, ctx.PackageManager.GetLockfileName(repoRoot)) - if err != nil { - // unable to reconstruct old lockfile, assume everything changed - return nil, true - } - prevLockfile, err := ctx.PackageManager.UnmarshalLockfile(ctx.WorkspaceInfos.PackageJSONs[util.RootPkgName], prevContents) - if err != nil { - // unable to parse old lockfile, assume everything changed - return nil, true - } - additionalPkgs, err := ctx.ChangedPackages(prevLockfile) - if err != nil { - // missing at least one lockfile, assume everything changed - return nil, true - } - - return additionalPkgs, false -} - -func getDefaultGlobalDeps() []string { - // include turbo.json and root package.json as implicit global dependencies - defaultGlobalDeps := []string{ - "turbo.json", - "package.json", - } - return defaultGlobalDeps -} - -func repoGlobalFileHasChanged(opts *Opts, defaultGlobalDeps []string, changedFiles []string) (bool, error) { - globalDepsGlob, err := filter.Compile(append(opts.GlobalDepPatterns, defaultGlobalDeps...)) - if err != nil { - return false, errors.Wrap(err, "invalid global deps glob") - } - - if globalDepsGlob != nil { - for _, file := range changedFiles { - if globalDepsGlob.Match(filepath.ToSlash(file)) { - return true, nil - } - } - } - return false, nil -} - -func filterIgnoredFiles(opts *Opts, changedFiles []string) ([]string, error) { - // changedFiles is an array of repo-relative system paths. - // opts.IgnorePatterns is an array of unix-separator glob paths. - ignoreGlob, err := filter.Compile(opts.IgnorePatterns) - if err != nil { - return nil, errors.Wrap(err, "invalid ignore globs") - } - filteredChanges := []string{} - for _, file := range changedFiles { - // If we don't have anything to ignore, or if this file doesn't match the ignore pattern, - // keep it as a changed file. - if ignoreGlob == nil || !ignoreGlob.Match(filepath.ToSlash(file)) { - filteredChanges = append(filteredChanges, file) - } - } - return filteredChanges, nil -} - -func fileInPackage(changedFile string, packagePath string) bool { - // This whole method is basically this regex: /^.*\/?$/ - // The regex is more-expensive, so we don't do it. - - // If it has the prefix, it might be in the package. - if strings.HasPrefix(changedFile, packagePath) { - // Now we need to see if the prefix stopped at a reasonable boundary. - prefixLen := len(packagePath) - changedFileLen := len(changedFile) - - // Same path. - if prefixLen == changedFileLen { - return true - } - - // We know changedFile is longer than packagePath. - // We can safely directly index into it. - // Look ahead one byte and see if it's the separator. - if changedFile[prefixLen] == os.PathSeparator { - return true - } - } - - // If it does not have the prefix, it's definitely not in the package. - return false -} - -func getChangedPackages(changedFiles []string, packageInfos workspace.Catalog) util.Set { - changedPackages := make(util.Set) - for _, changedFile := range changedFiles { - found := false - for pkgName, pkgInfo := range packageInfos.PackageJSONs { - if pkgName != util.RootPkgName && fileInPackage(changedFile, pkgInfo.Dir.ToStringDuringMigration()) { - changedPackages.Add(pkgName) - found = true - break - } - } - if !found { - // Consider the root package to have changed - changedPackages.Add(util.RootPkgName) - } - } - return changedPackages -} diff --git a/cli/internal/scope/scope_test.go b/cli/internal/scope/scope_test.go deleted file mode 100644 index 5f0525b244b92..0000000000000 --- a/cli/internal/scope/scope_test.go +++ /dev/null @@ -1,553 +0,0 @@ -package scope - -import ( - "fmt" - "io" - "os" - "path/filepath" - "reflect" - "testing" - - "github.com/hashicorp/go-hclog" - "github.com/pyr-sh/dag" - "github.com/vercel/turbo/cli/internal/context" - "github.com/vercel/turbo/cli/internal/fs" - "github.com/vercel/turbo/cli/internal/lockfile" - "github.com/vercel/turbo/cli/internal/packagemanager" - "github.com/vercel/turbo/cli/internal/turbopath" - "github.com/vercel/turbo/cli/internal/ui" - "github.com/vercel/turbo/cli/internal/util" - "github.com/vercel/turbo/cli/internal/workspace" -) - -type mockSCM struct { - changed []string - contents map[string][]byte -} - -func (m *mockSCM) ChangedFiles(_fromCommit string, _toCommit string, _relativeTo string) ([]string, error) { - return m.changed, nil -} - -func (m *mockSCM) PreviousContent(fromCommit string, filePath string) ([]byte, error) { - contents, ok := m.contents[filePath] - if !ok { - return nil, fmt.Errorf("No contents found") - } - return contents, nil -} - -type mockLockfile struct { - globalChange bool - versions map[string]string - allDeps map[string]map[string]string -} - -func (m *mockLockfile) ResolvePackage(workspacePath turbopath.AnchoredUnixPath, name string, version string) (lockfile.Package, error) { - resolvedVersion, ok := m.versions[name] - if ok { - key := fmt.Sprintf("%s%s", name, version) - return lockfile.Package{Key: key, Version: resolvedVersion, Found: true}, nil - } - return lockfile.Package{Found: false}, nil -} - -func (m *mockLockfile) AllDependencies(key string) (map[string]string, bool) { - deps, ok := m.allDeps[key] - return deps, ok -} - -func (m *mockLockfile) Encode(w io.Writer) error { - return nil -} - -func (m *mockLockfile) GlobalChange(other lockfile.Lockfile) bool { - return m.globalChange || (other != nil && other.(*mockLockfile).globalChange) -} - -func (m *mockLockfile) Patches() []turbopath.AnchoredUnixPath { - return nil -} - -func (m *mockLockfile) Subgraph(workspaces []turbopath.AnchoredSystemPath, packages []string) (lockfile.Lockfile, error) { - return nil, nil -} - -var _ (lockfile.Lockfile) = (*mockLockfile)(nil) - -func TestResolvePackages(t *testing.T) { - cwd, err := os.Getwd() - if err != nil { - t.Fatalf("cwd: %v", err) - } - root, err := fs.GetCwd(cwd) - if err != nil { - t.Fatalf("cwd: %v", err) - } - defaultUIFactory := ui.ColoredUIFactory{ - Base: &ui.BasicUIFactory{}, - } - tui := defaultUIFactory.Build(os.Stdin, os.Stdout, os.Stderr) - logger := hclog.Default() - // Dependency graph: - // - // app0 - - // \ - // app1 -> libA - // \ - // > libB -> libD - // / - // app2 < - // \ - // > libC - // / - // app2-a < - // - // Filesystem layout: - // - // app/ - // app0 - // app1 - // app2 - // app2-a - // libs/ - // libA - // libB - // libC - // libD - graph := dag.AcyclicGraph{} - graph.Add("app0") - graph.Add("app1") - graph.Add("app2") - graph.Add("app2-a") - graph.Add("libA") - graph.Add("libB") - graph.Add("libC") - graph.Add("libD") - graph.Connect(dag.BasicEdge("libA", "libB")) - graph.Connect(dag.BasicEdge("libB", "libD")) - graph.Connect(dag.BasicEdge("app0", "libA")) - graph.Connect(dag.BasicEdge("app1", "libA")) - graph.Connect(dag.BasicEdge("app2", "libB")) - graph.Connect(dag.BasicEdge("app2", "libC")) - graph.Connect(dag.BasicEdge("app2-a", "libC")) - workspaceInfos := workspace.Catalog{ - PackageJSONs: map[string]*fs.PackageJSON{ - "//": { - Dir: turbopath.AnchoredSystemPath("").ToSystemPath(), - UnresolvedExternalDeps: map[string]string{"global": "2"}, - TransitiveDeps: []lockfile.Package{{Key: "global2", Version: "2", Found: true}}, - }, - "app0": { - Dir: turbopath.AnchoredUnixPath("app/app0").ToSystemPath(), - Name: "app0", - UnresolvedExternalDeps: map[string]string{"app0-dep": "2"}, - TransitiveDeps: []lockfile.Package{ - {Key: "app0-dep2", Version: "2", Found: true}, - {Key: "app0-util2", Version: "2", Found: true}, - }, - }, - "app1": { - Dir: turbopath.AnchoredUnixPath("app/app1").ToSystemPath(), - Name: "app1", - }, - "app2": { - Dir: turbopath.AnchoredUnixPath("app/app2").ToSystemPath(), - Name: "app2", - }, - "app2-a": { - Dir: turbopath.AnchoredUnixPath("app/app2-a").ToSystemPath(), - Name: "app2-a", - }, - "libA": { - Dir: turbopath.AnchoredUnixPath("libs/libA").ToSystemPath(), - Name: "libA", - }, - "libB": { - Dir: turbopath.AnchoredUnixPath("libs/libB").ToSystemPath(), - Name: "libB", - UnresolvedExternalDeps: map[string]string{"external": "1"}, - TransitiveDeps: []lockfile.Package{ - {Key: "external-dep-a1", Version: "1", Found: true}, - {Key: "external-dep-b1", Version: "1", Found: true}, - {Key: "external1", Version: "1", Found: true}, - }, - }, - "libC": { - Dir: turbopath.AnchoredUnixPath("libs/libC").ToSystemPath(), - Name: "libC", - }, - "libD": { - Dir: turbopath.AnchoredUnixPath("libs/libD").ToSystemPath(), - Name: "libD", - }, - }, - } - packageNames := []string{} - for name := range workspaceInfos.PackageJSONs { - packageNames = append(packageNames, name) - } - - // global -> globalDep - // app0-dep -> app0-dep :) - - makeLockfile := func(f func(*mockLockfile)) *mockLockfile { - l := mockLockfile{ - globalChange: false, - versions: map[string]string{ - "global": "2", - "app0-dep": "2", - "app0-util": "2", - "external": "1", - "external-dep-a": "1", - "external-dep-b": "1", - }, - allDeps: map[string]map[string]string{ - "global2": map[string]string{}, - "app0-dep2": map[string]string{ - "app0-util": "2", - }, - "app0-util2": map[string]string{}, - "external1": map[string]string{ - "external-dep-a": "1", - "external-dep-b": "1", - }, - "external-dep-a1": map[string]string{}, - "external-dep-b1": map[string]string{}, - }, - } - if f != nil { - f(&l) - } - return &l - } - - testCases := []struct { - name string - changed []string - expected []string - expectAllPackages bool - scope []string - since string - ignore string - globalDeps []string - includeDependencies bool - includeDependents bool - lockfile string - currLockfile *mockLockfile - prevLockfile *mockLockfile - inferPkgPath string - }{ - { - name: "Just scope and dependencies", - changed: []string{}, - includeDependencies: true, - scope: []string{"app2"}, - expected: []string{"app2", "libB", "libC", "libD"}, - }, - { - name: "Only turbo.json changed", - changed: []string{"turbo.json"}, - expected: []string{"//", "app0", "app1", "app2", "app2-a", "libA", "libB", "libC", "libD"}, - since: "dummy", - includeDependencies: true, - }, - { - name: "Only root package.json changed", - changed: []string{"package.json"}, - expected: []string{"//", "app0", "app1", "app2", "app2-a", "libA", "libB", "libC", "libD"}, - since: "dummy", - includeDependencies: true, - }, - { - name: "Only package-lock.json changed", - changed: []string{"package-lock.json"}, - expected: []string{"//", "app0", "app1", "app2", "app2-a", "libA", "libB", "libC", "libD"}, - since: "dummy", - includeDependencies: true, - lockfile: "package-lock.json", - }, - { - name: "Only yarn.lock changed", - changed: []string{"yarn.lock"}, - expected: []string{"//", "app0", "app1", "app2", "app2-a", "libA", "libB", "libC", "libD"}, - since: "dummy", - includeDependencies: true, - lockfile: "yarn.lock", - }, - { - name: "Only pnpm-lock.yaml changed", - changed: []string{"pnpm-lock.yaml"}, - expected: []string{"//", "app0", "app1", "app2", "app2-a", "libA", "libB", "libC", "libD"}, - since: "dummy", - includeDependencies: true, - lockfile: "pnpm-lock.yaml", - }, - { - name: "One package changed", - changed: []string{"libs/libB/src/index.ts"}, - expected: []string{"libB"}, - since: "dummy", - }, - { - name: "One package manifest changed", - changed: []string{"libs/libB/package.json"}, - expected: []string{"libB"}, - since: "dummy", - }, - { - name: "An ignored package changed", - changed: []string{"libs/libB/src/index.ts"}, - expected: []string{}, - since: "dummy", - ignore: "libs/libB/**/*.ts", - }, - { - // nothing in scope depends on the change - name: "unrelated library changed", - changed: []string{"libs/libC/src/index.ts"}, - expected: []string{}, - since: "dummy", - scope: []string{"app1"}, - includeDependencies: true, // scope implies include-dependencies - }, - { - // a dependent lib changed, scope implies include-dependencies, - // so all deps of app1 get built - name: "dependency of scope changed", - changed: []string{"libs/libA/src/index.ts"}, - expected: []string{"libA", "libB", "libD", "app1"}, - since: "dummy", - scope: []string{"app1"}, - includeDependencies: true, // scope implies include-dependencies - }, - { - // a dependent lib changed, user explicitly asked to not build dependencies. - // Since the package matching the scope had a changed dependency, we run it. - // We don't include its dependencies because the user asked for no dependencies. - // note: this is not yet supported by the CLI, as you cannot specify --include-dependencies=false - name: "dependency of scope changed, user asked to not include depedencies", - changed: []string{"libs/libA/src/index.ts"}, - expected: []string{"app1"}, - since: "dummy", - scope: []string{"app1"}, - includeDependencies: false, - }, - { - // a nested dependent lib changed, user explicitly asked to not build dependencies - // note: this is not yet supported by the CLI, as you cannot specify --include-dependencies=false - name: "nested dependency of scope changed, user asked to not include dependencies", - changed: []string{"libs/libB/src/index.ts"}, - expected: []string{"app1"}, - since: "dummy", - scope: []string{"app1"}, - includeDependencies: false, - }, - { - name: "global dependency changed, even though it was ignored, forcing a build of everything", - changed: []string{"libs/libB/src/index.ts"}, - expected: []string{"//", "app0", "app1", "app2", "app2-a", "libA", "libB", "libC", "libD"}, - since: "dummy", - ignore: "libs/libB/**/*.ts", - globalDeps: []string{"libs/**/*.ts"}, - }, - { - name: "an app changed, user asked for dependencies to build", - changed: []string{"app/app2/src/index.ts"}, - since: "dummy", - includeDependencies: true, - expected: []string{"app2", "libB", "libC", "libD"}, - }, - { - name: "a library changed, user asked for dependents to be built", - changed: []string{"libs/libB"}, - since: "dummy", - includeDependents: true, - expected: []string{"app0", "app1", "app2", "libA", "libB"}, - }, - { - // no changes, no base to compare against, defaults to everything - name: "no changes or scope specified, build everything", - since: "", - expected: []string{"//", "app0", "app1", "app2", "app2-a", "libA", "libB", "libC", "libD"}, - expectAllPackages: true, - }, - { - // a dependent library changed, no deps beyond the scope are build - // "libB" is still built because it is a dependent within the scope, but libB's dependents - // are skipped - name: "a dependent library changed, build up to scope", - changed: []string{"libs/libD/src/index.ts"}, - since: "dummy", - scope: []string{"libB"}, - expected: []string{"libB", "libD"}, - includeDependencies: true, // scope implies include-dependencies - }, - { - name: "library change, no scope", - changed: []string{"libs/libA/src/index.ts"}, - expected: []string{"libA", "app0", "app1"}, - includeDependents: true, - since: "dummy", - }, - { - // make sure multiple apps with the same prefix are handled separately. - // prevents this issue: https://github.com/vercel/turbo/issues/1528 - name: "Two apps with an overlapping prefix changed", - changed: []string{"app/app2/src/index.js", "app/app2-a/src/index.js"}, - expected: []string{"app2", "app2-a"}, - since: "dummy", - }, - { - name: "Global lockfile change invalidates all packages", - changed: []string{"dummy.lock"}, - expected: []string{"//", "app0", "app1", "app2", "app2-a", "libA", "libB", "libC", "libD"}, - lockfile: "dummy.lock", - currLockfile: makeLockfile(nil), - prevLockfile: makeLockfile(func(ml *mockLockfile) { - ml.globalChange = true - }), - since: "dummy", - }, - { - name: "Dependency of workspace root change invalidates all packages", - changed: []string{"dummy.lock"}, - expected: []string{"//", "app0", "app1", "app2", "app2-a", "libA", "libB", "libC", "libD"}, - lockfile: "dummy.lock", - currLockfile: makeLockfile(nil), - prevLockfile: makeLockfile(func(ml *mockLockfile) { - ml.versions["global"] = "3" - ml.allDeps["global3"] = map[string]string{} - }), - since: "dummy", - }, - { - name: "Version change invalidates package", - changed: []string{"dummy.lock"}, - expected: []string{"//", "app0"}, - lockfile: "dummy.lock", - currLockfile: makeLockfile(nil), - prevLockfile: makeLockfile(func(ml *mockLockfile) { - ml.versions["app0-util"] = "3" - ml.allDeps["app0-dep2"] = map[string]string{"app0-util": "3"} - ml.allDeps["app0-util3"] = map[string]string{} - }), - since: "dummy", - }, - { - name: "Transitive dep invalidates package", - changed: []string{"dummy.lock"}, - expected: []string{"//", "libB"}, - lockfile: "dummy.lock", - currLockfile: makeLockfile(nil), - prevLockfile: makeLockfile(func(ml *mockLockfile) { - ml.versions["external-dep-a"] = "2" - ml.allDeps["external1"] = map[string]string{"external-dep-a": "2", "external-dep-b": "1"} - ml.allDeps["external-dep-a2"] = map[string]string{} - }), - since: "dummy", - }, - { - name: "Transitive dep invalidates package and dependents", - changed: []string{"dummy.lock"}, - expected: []string{"//", "app0", "app1", "app2", "libA", "libB"}, - lockfile: "dummy.lock", - includeDependents: true, - currLockfile: makeLockfile(nil), - prevLockfile: makeLockfile(func(ml *mockLockfile) { - ml.versions["external-dep-a"] = "2" - ml.allDeps["external1"] = map[string]string{"external-dep-a": "2", "external-dep-b": "1"} - ml.allDeps["external-dep-a2"] = map[string]string{} - }), - since: "dummy", - }, - { - name: "Infer app2 from directory", - inferPkgPath: "app/app2", - expected: []string{"app2"}, - }, - { - name: "Infer app2 from a subdirectory", - inferPkgPath: "app/app2/src", - expected: []string{"app2"}, - }, - { - name: "Infer from a directory with no packages", - inferPkgPath: "wrong", - expected: []string{}, - }, - { - name: "Infer from a parent directory", - inferPkgPath: "app", - expected: []string{"app0", "app1", "app2", "app2-a"}, - }, - { - name: "library change, no scope, inferred libs", - changed: []string{"libs/libA/src/index.ts"}, - expected: []string{"libA"}, - since: "dummy", - inferPkgPath: "libs", - }, - { - name: "library change, no scope, inferred app", - changed: []string{"libs/libA/src/index.ts"}, - expected: []string{}, - since: "dummy", - inferPkgPath: "app", - }, - } - for i, tc := range testCases { - t.Run(fmt.Sprintf("test #%v %v", i, tc.name), func(t *testing.T) { - // Convert test data to system separators. - systemSeparatorChanged := make([]string, len(tc.changed)) - for index, path := range tc.changed { - systemSeparatorChanged[index] = filepath.FromSlash(path) - } - scm := &mockSCM{ - changed: systemSeparatorChanged, - contents: make(map[string][]byte, len(systemSeparatorChanged)), - } - for _, path := range systemSeparatorChanged { - scm.contents[path] = nil - } - readLockfile := func(_rootPackageJSON *fs.PackageJSON, content []byte) (lockfile.Lockfile, error) { - return tc.prevLockfile, nil - } - pkgInferenceRoot, err := resolvePackageInferencePath(tc.inferPkgPath) - if err != nil { - t.Errorf("bad inference path (%v): %v", tc.inferPkgPath, err) - } - pkgs, isAllPackages, err := ResolvePackages(&Opts{ - LegacyFilter: LegacyFilter{ - Entrypoints: tc.scope, - Since: tc.since, - IncludeDependencies: tc.includeDependencies, - SkipDependents: !tc.includeDependents, - }, - IgnorePatterns: []string{tc.ignore}, - GlobalDepPatterns: tc.globalDeps, - PackageInferenceRoot: pkgInferenceRoot, - }, root, scm, &context.Context{ - WorkspaceInfos: workspaceInfos, - WorkspaceNames: packageNames, - PackageManager: &packagemanager.PackageManager{Lockfile: tc.lockfile, UnmarshalLockfile: readLockfile, GetLockfileName: func(_ turbopath.AbsoluteSystemPath) string { return tc.lockfile }}, - WorkspaceGraph: graph, - RootNode: "root", - Lockfile: tc.currLockfile, - }, tui, logger) - if err != nil { - t.Errorf("expected no error, got %v", err) - } - expected := make(util.Set) - for _, pkg := range tc.expected { - expected.Add(pkg) - } - if !reflect.DeepEqual(pkgs, expected) { - t.Errorf("ResolvePackages got %v, want %v", pkgs, expected) - } - if isAllPackages != tc.expectAllPackages { - t.Errorf("isAllPackages got %v, want %v", isAllPackages, tc.expectAllPackages) - } - }) - } -} diff --git a/cli/internal/server/server.go b/cli/internal/server/server.go deleted file mode 100644 index 4c7c42c8548b6..0000000000000 --- a/cli/internal/server/server.go +++ /dev/null @@ -1,203 +0,0 @@ -package server - -import ( - "context" - "sync" - "time" - - "github.com/hashicorp/go-hclog" - "github.com/pkg/errors" - "github.com/vercel/turbo/cli/internal/filewatcher" - "github.com/vercel/turbo/cli/internal/fs" - "github.com/vercel/turbo/cli/internal/fs/hash" - "github.com/vercel/turbo/cli/internal/globwatcher" - "github.com/vercel/turbo/cli/internal/turbodprotocol" - "github.com/vercel/turbo/cli/internal/turbopath" - "google.golang.org/grpc" - codes "google.golang.org/grpc/codes" - status "google.golang.org/grpc/status" -) - -// Server implements the GRPC serverside of TurbodServer -// Note for the future: we don't yet make use of turbo.json -// or the package graph in the server. Once we do, we may need a -// layer of indirection between "the thing that responds to grpc requests" -// and "the thing that holds our persistent data structures" to handle -// changes in the underlying configuration. -type Server struct { - turbodprotocol.UnimplementedTurbodServer - watcher *filewatcher.FileWatcher - globWatcher *globwatcher.GlobWatcher - turboVersion string - started time.Time - logFilePath turbopath.AbsoluteSystemPath - repoRoot turbopath.AbsoluteSystemPath - closerMu sync.Mutex - closer *closer - timeSavedMu sync.Mutex - timesSaved map[string]uint64 -} - -// GRPCServer is the interface that the turbo server needs to the underlying -// GRPC server. This lets the turbo server register itself, as well as provides -// a hook for shutting down the server. -type GRPCServer interface { - grpc.ServiceRegistrar - GracefulStop() -} - -type closer struct { - grpcServer GRPCServer - once sync.Once -} - -func (c *closer) close() { - // This can get triggered from a request handler (Shutdown). Since - // calling GracefulStop blocks until all request handlers complete, - // we need to run it in a goroutine to let the Shutdown handler complete - // and avoid deadlocking. - c.once.Do(func() { - go func() { - c.grpcServer.GracefulStop() - }() - }) -} - -var _defaultCookieTimeout = 500 * time.Millisecond - -// New returns a new instance of Server -func New(serverName string, logger hclog.Logger, repoRoot turbopath.AbsoluteSystemPath, turboVersion string, logFilePath turbopath.AbsoluteSystemPath) (*Server, error) { - cookieDir := fs.GetTurboDataDir().UntypedJoin("cookies", serverName) - cookieJar, err := filewatcher.NewCookieJar(cookieDir, _defaultCookieTimeout) - if err != nil { - return nil, err - } - watcher, err := filewatcher.GetPlatformSpecificBackend(logger) - if err != nil { - return nil, err - } - fileWatcher := filewatcher.New(logger.Named("FileWatcher"), repoRoot, watcher) - globWatcher := globwatcher.New(logger.Named("GlobWatcher"), repoRoot, cookieJar) - server := &Server{ - watcher: fileWatcher, - globWatcher: globWatcher, - turboVersion: turboVersion, - started: time.Now(), - logFilePath: logFilePath, - repoRoot: repoRoot, - timesSaved: map[string]uint64{}, - } - server.watcher.AddClient(cookieJar) - server.watcher.AddClient(globWatcher) - server.watcher.AddClient(server) - if err := server.watcher.Start(); err != nil { - return nil, errors.Wrapf(err, "watching %v", repoRoot) - } - if err := server.watcher.AddRoot(cookieDir); err != nil { - _ = server.watcher.Close() - return nil, errors.Wrapf(err, "failed to watch cookie directory: %v", cookieDir) - } - return server, nil -} - -func (s *Server) tryClose() bool { - s.closerMu.Lock() - defer s.closerMu.Unlock() - if s.closer != nil { - s.closer.close() - return true - } - return false -} - -// OnFileWatchEvent implements filewatcher.FileWatchClient.OnFileWatchEvent -// In the event that the root of the monorepo is deleted, shut down the server. -func (s *Server) OnFileWatchEvent(ev filewatcher.Event) { - if ev.EventType == filewatcher.FileDeleted && ev.Path == s.repoRoot { - _ = s.tryClose() - } -} - -// OnFileWatchError implements filewatcher.FileWatchClient.OnFileWatchError -func (s *Server) OnFileWatchError(err error) {} - -// OnFileWatchClosed implements filewatcher.FileWatchClient.OnFileWatchClosed -func (s *Server) OnFileWatchClosed() {} - -// Close is used for shutting down this copy of the server -func (s *Server) Close() error { - return s.watcher.Close() -} - -// Register registers this server to respond to GRPC requests -func (s *Server) Register(grpcServer GRPCServer) { - s.closerMu.Lock() - s.closer = &closer{ - grpcServer: grpcServer, - } - s.closerMu.Unlock() - turbodprotocol.RegisterTurbodServer(grpcServer, s) -} - -// NotifyOutputsWritten implements the NotifyOutputsWritten rpc from turbo.proto -func (s *Server) NotifyOutputsWritten(ctx context.Context, req *turbodprotocol.NotifyOutputsWrittenRequest) (*turbodprotocol.NotifyOutputsWrittenResponse, error) { - s.timeSavedMu.Lock() - s.timesSaved[req.Hash] = req.TimeSaved - s.timeSavedMu.Unlock() - outputs := hash.TaskOutputs{ - Inclusions: req.OutputGlobs, - Exclusions: req.OutputExclusionGlobs, - } - - err := s.globWatcher.WatchGlobs(req.Hash, outputs) - if err != nil { - return nil, err - } - return &turbodprotocol.NotifyOutputsWrittenResponse{}, nil -} - -// GetChangedOutputs implements the GetChangedOutputs rpc from turbo.proto -func (s *Server) GetChangedOutputs(ctx context.Context, req *turbodprotocol.GetChangedOutputsRequest) (*turbodprotocol.GetChangedOutputsResponse, error) { - s.timeSavedMu.Lock() - timeSaved := s.timesSaved[req.Hash] - s.timeSavedMu.Unlock() - - changedGlobs, err := s.globWatcher.GetChangedGlobs(req.Hash, req.OutputGlobs) - if err != nil { - return nil, err - } - return &turbodprotocol.GetChangedOutputsResponse{ - ChangedOutputGlobs: changedGlobs, - TimeSaved: timeSaved, - }, nil -} - -// Hello implements the Hello rpc from turbo.proto -func (s *Server) Hello(ctx context.Context, req *turbodprotocol.HelloRequest) (*turbodprotocol.HelloResponse, error) { - clientVersion := req.Version - if clientVersion != s.turboVersion { - err := status.Errorf(codes.FailedPrecondition, "version mismatch. Client %v Server %v", clientVersion, s.turboVersion) - return nil, err - } - return &turbodprotocol.HelloResponse{}, nil -} - -// Shutdown implements the Shutdown rpc from turbo.proto -func (s *Server) Shutdown(ctx context.Context, req *turbodprotocol.ShutdownRequest) (*turbodprotocol.ShutdownResponse, error) { - if s.tryClose() { - return &turbodprotocol.ShutdownResponse{}, nil - } - err := status.Error(codes.NotFound, "shutdown mechanism not found") - return nil, err -} - -// Status implements the Status rpc from turbo.proto -func (s *Server) Status(ctx context.Context, req *turbodprotocol.StatusRequest) (*turbodprotocol.StatusResponse, error) { - uptime := uint64(time.Since(s.started).Milliseconds()) - return &turbodprotocol.StatusResponse{ - DaemonStatus: &turbodprotocol.DaemonStatus{ - LogFile: s.logFilePath.ToString(), - UptimeMsec: uptime, - }, - }, nil -} diff --git a/cli/internal/server/server_test.go b/cli/internal/server/server_test.go deleted file mode 100644 index b7dcf3a15bcbe..0000000000000 --- a/cli/internal/server/server_test.go +++ /dev/null @@ -1,73 +0,0 @@ -package server - -import ( - "context" - "testing" - "time" - - "github.com/hashicorp/go-hclog" - "google.golang.org/grpc" - "gotest.tools/v3/assert" - - turbofs "github.com/vercel/turbo/cli/internal/fs" - "github.com/vercel/turbo/cli/internal/turbodprotocol" -) - -type mockGrpc struct { - stopped chan struct{} -} - -func (m *mockGrpc) GracefulStop() { - close(m.stopped) -} - -func (m *mockGrpc) RegisterService(desc *grpc.ServiceDesc, impl interface{}) {} - -func TestDeleteRepoRoot(t *testing.T) { - logger := hclog.Default() - logger.SetLevel(hclog.Debug) - repoRootRaw := t.TempDir() - repoRoot := turbofs.AbsoluteSystemPathFromUpstream(repoRootRaw) - - grpcServer := &mockGrpc{ - stopped: make(chan struct{}), - } - - s, err := New("testServer", logger, repoRoot, "some-version", "/log/file/path") - assert.NilError(t, err, "New") - s.Register(grpcServer) - - // Delete the repo root, ensure that GracefulStop got called - err = repoRoot.Remove() - assert.NilError(t, err, "Remove") - - select { - case <-grpcServer.stopped: - case <-time.After(2 * time.Second): - t.Error("timed out waiting for graceful stop to be called") - } -} - -func TestShutdown(t *testing.T) { - logger := hclog.Default() - repoRootRaw := t.TempDir() - repoRoot := turbofs.AbsoluteSystemPathFromUpstream(repoRootRaw) - - grpcServer := &mockGrpc{ - stopped: make(chan struct{}), - } - - s, err := New("testServer", logger, repoRoot, "some-version", "/log/file/path") - assert.NilError(t, err, "New") - s.Register(grpcServer) - - ctx := context.Background() - _, err = s.Shutdown(ctx, &turbodprotocol.ShutdownRequest{}) - assert.NilError(t, err, "Shutdown") - // Ensure that graceful stop gets called - select { - case <-grpcServer.stopped: - case <-time.After(2 * time.Second): - t.Error("timed out waiting for graceful stop to be called") - } -} diff --git a/cli/internal/signals/signals.go b/cli/internal/signals/signals.go deleted file mode 100644 index 8634144f02b49..0000000000000 --- a/cli/internal/signals/signals.go +++ /dev/null @@ -1,60 +0,0 @@ -package signals - -import ( - "os" - "os/signal" - "sync" - "syscall" -) - -// Watcher watches for signals delivered to this process and provides -// the opportunity for turbo to run cleanup -type Watcher struct { - doneCh chan struct{} - closed bool - mu sync.Mutex - closers []func() -} - -// AddOnClose registers a cleanup handler to run when a signal is received -func (w *Watcher) AddOnClose(closer func()) { - w.mu.Lock() - defer w.mu.Unlock() - w.closers = append(w.closers, closer) -} - -// Close runs the cleanup handlers registered with this watcher -func (w *Watcher) Close() { - w.mu.Lock() - defer w.mu.Unlock() - if w.closed { - return - } - w.closed = true - for _, closer := range w.closers { - closer() - } - w.closers = nil - close(w.doneCh) -} - -// Done returns a channel that will be closed after all of the cleanup -// handlers have been run. -func (w *Watcher) Done() <-chan struct{} { - return w.doneCh -} - -// NewWatcher returns a new Watcher instance for watching signals. -func NewWatcher() *Watcher { - // TODO: platform specific signals to watch for? - signalCh := make(chan os.Signal, 1) - signal.Notify(signalCh, os.Interrupt, syscall.SIGTERM, syscall.SIGQUIT) - w := &Watcher{ - doneCh: make(chan struct{}), - } - go func() { - <-signalCh - w.Close() - }() - return w -} diff --git a/cli/internal/spinner/spinner.go b/cli/internal/spinner/spinner.go deleted file mode 100644 index 812e53bc0cfe8..0000000000000 --- a/cli/internal/spinner/spinner.go +++ /dev/null @@ -1,91 +0,0 @@ -package spinner - -import ( - "context" - "fmt" - "io" - "time" - - "github.com/mitchellh/cli" - progressbar "github.com/schollz/progressbar/v3" - "github.com/vercel/turbo/cli/internal/ui" -) - -// getWriterAndColor unwraps cli.Ui instances until it gets to a BasicUi. -// If it happens to spot a ColoredUi along the way, it marks that color is -// enabled. -func getWriterAndColor(terminal cli.Ui, useColor bool) (io.Writer, bool) { - switch terminal := terminal.(type) { - case *cli.BasicUi: - return terminal.Writer, useColor - case *ui.BasicUI: - return terminal.Writer, useColor - case *cli.ColoredUi: - return getWriterAndColor(terminal.Ui, true) - case *cli.ConcurrentUi: - return getWriterAndColor(terminal.Ui, useColor) - case *cli.PrefixedUi: - return getWriterAndColor(terminal.Ui, useColor) - case *cli.MockUi: - return terminal.OutputWriter, false - default: - panic(fmt.Sprintf("unknown Ui: %v", terminal)) - } -} - -// WaitFor runs fn, and prints msg to the terminal if it takes longer -// than initialDelay to complete. Depending on the terminal configuration, it may -// display a single instance of msg, or an infinite spinner, updated every 250ms. -func WaitFor(ctx context.Context, fn func(), terminal cli.Ui, msg string, initialDelay time.Duration) error { - doneCh := make(chan struct{}) - go func() { - fn() - close(doneCh) - }() - if ui.IsTTY { - select { - case <-ctx.Done(): - return nil - case <-time.After(initialDelay): - writer, useColor := getWriterAndColor(terminal, false) - bar := progressbar.NewOptions( - -1, - progressbar.OptionEnableColorCodes(useColor), - progressbar.OptionSetDescription(fmt.Sprintf("[yellow]%v[reset]", msg)), - progressbar.OptionSpinnerType(14), - progressbar.OptionSetWriter(writer), - ) - for { - select { - case <-doneCh: - err := bar.Finish() - terminal.Output("") - return err - case <-time.After(250 * time.Millisecond): - if err := bar.Add(1); err != nil { - return err - } - case <-ctx.Done(): - return nil - } - } - case <-doneCh: - return nil - } - } else { - // wait for the timeout before displaying a message, even with no tty - select { - case <-ctx.Done(): - return nil - case <-doneCh: - return nil - case <-time.After(initialDelay): - terminal.Output(msg) - } - select { - case <-ctx.Done(): - case <-doneCh: - } - return nil - } -} diff --git a/cli/internal/tarpatch/tar.go b/cli/internal/tarpatch/tar.go deleted file mode 100644 index a4dab233d34a0..0000000000000 --- a/cli/internal/tarpatch/tar.go +++ /dev/null @@ -1,92 +0,0 @@ -// Adapted from https://github.com/moby/moby/blob/924edb948c2731df3b77697a8fcc85da3f6eef57/pkg/archive/archive.go -// Copyright Docker, Inc. -// SPDX-License-Identifier: Apache-2.0 - -// Package tarpatch addresses an issue with stdlib throwing an error in some environments. -package tarpatch - -import ( - "archive/tar" - "io/fs" - "os" - "strings" - "time" - - "github.com/vercel/turbo/cli/internal/turbopath" -) - -// nosysFileInfo hides the system-dependent info of the wrapped FileInfo to -// prevent tar.FileInfoHeader from introspecting it and potentially calling into -// glibc. -type nosysFileInfo struct { - os.FileInfo -} - -func (fi nosysFileInfo) Sys() interface{} { - // A Sys value of type *tar.Header is safe as it is system-independent. - // The tar.FileInfoHeader function copies the fields into the returned - // header without performing any OS lookups. - if sys, ok := fi.FileInfo.Sys().(*tar.Header); ok { - return sys - } - return nil -} - -// FileInfoHeaderNoLookups creates a partially-populated tar.Header from fi. -// -// Compared to the archive/tar.FileInfoHeader function, this function is safe to -// call from a chrooted process as it does not populate fields which would -// require operating system lookups. It behaves identically to -// tar.FileInfoHeader when fi is a FileInfo value returned from -// tar.Header.FileInfo(). -// -// When fi is a FileInfo for a native file, such as returned from os.Stat() and -// os.Lstat(), the returned Header value differs from one returned from -// tar.FileInfoHeader in the following ways. The Uname and Gname fields are not -// set as OS lookups would be required to populate them. The AccessTime and -// ChangeTime fields are not currently set (not yet implemented) although that -// is subject to change. Callers which require the AccessTime or ChangeTime -// fields to be zeroed should explicitly zero them out in the returned Header -// value to avoid any compatibility issues in the future. -func FileInfoHeaderNoLookups(fi fs.FileInfo, link string) (*tar.Header, error) { - hdr, err := tar.FileInfoHeader(nosysFileInfo{fi}, link) - if err != nil { - return nil, err - } - return hdr, sysStat(fi, hdr) -} - -// FileInfoHeader creates a populated Header from fi. -// -// Compared to the archive/tar package, this function fills in less information -// but is safe to call from a chrooted process. The AccessTime and ChangeTime -// fields are not set in the returned header, ModTime is truncated to one-second -// precision, and the Uname and Gname fields are only set when fi is a FileInfo -// value returned from tar.Header.FileInfo(). -func FileInfoHeader(fullPath turbopath.AnchoredUnixPath, fileInfo fs.FileInfo, link string) (*tar.Header, error) { - hdr, err := FileInfoHeaderNoLookups(fileInfo, link) - if err != nil { - return nil, err - } - hdr.Format = tar.FormatPAX - hdr.ModTime = hdr.ModTime.Truncate(time.Second) - hdr.AccessTime = time.Time{} - hdr.ChangeTime = time.Time{} - hdr.Mode = int64(chmodTarEntry(os.FileMode(hdr.Mode))) - hdr.Name = canonicalTarName(fullPath, fileInfo.IsDir()) - return hdr, nil -} - -// canonicalTarName provides a platform-independent and consistent posix-style -// path for files and directories to be archived regardless of the platform. -func canonicalTarName(fullPath turbopath.AnchoredUnixPath, isDir bool) string { - nameString := fullPath.ToString() - if isDir { - // Append '/' if not already present. - if !strings.HasSuffix(nameString, "/") { - nameString += "/" - } - } - - return nameString -} diff --git a/cli/internal/tarpatch/tar_unix.go b/cli/internal/tarpatch/tar_unix.go deleted file mode 100644 index 3020c0e4c7c00..0000000000000 --- a/cli/internal/tarpatch/tar_unix.go +++ /dev/null @@ -1,42 +0,0 @@ -//go:build !windows -// +build !windows - -// Adapted from https://github.com/moby/moby/blob/924edb948c2731df3b77697a8fcc85da3f6eef57/pkg/archive/archive_unix.go -// Copyright Docker, Inc. -// SPDX-License-Identifier: Apache-2.0 - -package tarpatch - -import ( - "archive/tar" - "os" - "syscall" - - "golang.org/x/sys/unix" -) - -// chmodTarEntry is used to adjust the file permissions used in tar header based -// on the platform the archival is done. -func chmodTarEntry(perm os.FileMode) os.FileMode { - return perm // noop for unix as golang APIs provide perm bits correctly -} - -// sysStat populates hdr from system-dependent fields of fi without performing -// any OS lookups. -func sysStat(fi os.FileInfo, hdr *tar.Header) error { - s, ok := fi.Sys().(*syscall.Stat_t) - if !ok { - return nil - } - - hdr.Uid = int(s.Uid) - hdr.Gid = int(s.Gid) - - if s.Mode&unix.S_IFBLK != 0 || - s.Mode&unix.S_IFCHR != 0 { - hdr.Devmajor = int64(unix.Major(uint64(s.Rdev))) //nolint: unconvert - hdr.Devminor = int64(unix.Minor(uint64(s.Rdev))) //nolint: unconvert - } - - return nil -} diff --git a/cli/internal/tarpatch/tar_windows.go b/cli/internal/tarpatch/tar_windows.go deleted file mode 100644 index 486e6fdc748d1..0000000000000 --- a/cli/internal/tarpatch/tar_windows.go +++ /dev/null @@ -1,27 +0,0 @@ -//go:build windows -// +build windows - -// Adapted from https://github.com/moby/moby/blob/924edb948c2731df3b77697a8fcc85da3f6eef57/pkg/archive/archive_windows.go -// Copyright Docker, Inc. -// SPDX-License-Identifier: Apache-2.0 - -package tarpatch - -import ( - "archive/tar" - "os" -) - -// chmodTarEntry is used to adjust the file permissions used in tar header based -// on the platform the archival is done. -func chmodTarEntry(perm os.FileMode) os.FileMode { - // Remove group- and world-writable bits. - perm &= 0o755 - - // Add the x bit: make everything +x on Windows - return perm | 0o111 -} - -func sysStat(fi os.FileInfo, hdr *tar.Header) error { - return nil -} diff --git a/cli/internal/taskhash/taskhash.go b/cli/internal/taskhash/taskhash.go deleted file mode 100644 index d249eedbd09c0..0000000000000 --- a/cli/internal/taskhash/taskhash.go +++ /dev/null @@ -1,431 +0,0 @@ -// Package taskhash handles calculating dependency hashes for nodes in the task execution graph. -package taskhash - -import ( - "context" - "fmt" - "sort" - "strings" - "sync" - - "github.com/hashicorp/go-hclog" - "github.com/pyr-sh/dag" - "github.com/vercel/turbo/cli/internal/env" - "github.com/vercel/turbo/cli/internal/fs" - "github.com/vercel/turbo/cli/internal/fs/hash" - "github.com/vercel/turbo/cli/internal/hashing" - "github.com/vercel/turbo/cli/internal/inference" - "github.com/vercel/turbo/cli/internal/nodes" - "github.com/vercel/turbo/cli/internal/runsummary" - "github.com/vercel/turbo/cli/internal/turbopath" - "github.com/vercel/turbo/cli/internal/util" - "github.com/vercel/turbo/cli/internal/workspace" - "golang.org/x/sync/errgroup" -) - -// Tracker caches package-inputs hashes, as well as package-task hashes. -// package-inputs hashes must be calculated before package-task hashes, -// and package-task hashes must be calculated in topographical order. -// package-task hashing is threadsafe, provided topographical order is -// respected. -type Tracker struct { - rootNode string - globalHash string - EnvAtExecutionStart env.EnvironmentVariableMap - pipeline fs.Pipeline - - PackageInputsHashes map[string]string - - // PackageInputsExpandedHashes is a map of a hashkey to a list of files that are inputs to the task. - // Writes to this map happen during CalculateFileHash(). Since this happens synchronously - // before walking the task graph, it does not need to be protected by a mutex. - PackageInputsExpandedHashes map[string]map[turbopath.AnchoredUnixPath]string - - // mu is a mutex that we can lock/unlock to read/write from maps - // the fields below should be protected by the mutex. - mu sync.RWMutex - packageTaskEnvVars map[string]env.DetailedMap // taskId -> envvar pairs that affect the hash. - packageTaskHashes map[string]string // taskID -> hash - packageTaskFramework map[string]string // taskID -> inferred framework for package - packageTaskOutputs map[string][]turbopath.AnchoredSystemPath - packageTaskCacheStatus map[string]runsummary.TaskCacheSummary -} - -// NewTracker creates a tracker for package-inputs combinations and package-task combinations. -func NewTracker(rootNode string, globalHash string, envAtExecutionStart env.EnvironmentVariableMap, pipeline fs.Pipeline) *Tracker { - return &Tracker{ - rootNode: rootNode, - globalHash: globalHash, - EnvAtExecutionStart: envAtExecutionStart, - pipeline: pipeline, - packageTaskHashes: make(map[string]string), - packageTaskFramework: make(map[string]string), - packageTaskEnvVars: make(map[string]env.DetailedMap), - packageTaskOutputs: make(map[string][]turbopath.AnchoredSystemPath), - packageTaskCacheStatus: make(map[string]runsummary.TaskCacheSummary), - } -} - -// packageFileHashInputs defines a combination of a package and optional set of input globs -type packageFileHashInputs struct { - taskID string - taskDefinition *fs.TaskDefinition - packageName string -} - -// CalculateFileHashes hashes each unique package-inputs combination that is present -// in the task graph. Must be called before calculating task hashes. -func (th *Tracker) CalculateFileHashes( - allTasks []dag.Vertex, - workerCount int, - workspaceInfos workspace.Catalog, - taskDefinitions map[string]*fs.TaskDefinition, - repoRoot turbopath.AbsoluteSystemPath, -) error { - hashTasks := make(util.Set) - - for _, v := range allTasks { - taskID, ok := v.(string) - if !ok { - return fmt.Errorf("unknown task %v", taskID) - } - if taskID == th.rootNode { - continue - } - - packageName, _ := util.GetPackageTaskFromId(taskID) - if packageName == th.rootNode { - continue - } - - taskDefinition, ok := taskDefinitions[taskID] - if !ok { - return fmt.Errorf("missing pipeline entry %v", taskID) - } - - pfs := &packageFileHashInputs{ - taskID, - taskDefinition, - packageName, - } - - hashTasks.Add(pfs) - } - - hashes := make(map[string]string, len(hashTasks)) - hashObjects := make(map[string]map[turbopath.AnchoredUnixPath]string, len(hashTasks)) - hashQueue := make(chan *packageFileHashInputs, workerCount) - hashErrs, ctx := errgroup.WithContext(context.Background()) - - for i := 0; i < workerCount; i++ { - hashErrs.Go(func() error { - for packageFileHashInputs := range hashQueue { - pkg, ok := workspaceInfos.PackageJSONs[packageFileHashInputs.packageName] - if !ok { - return fmt.Errorf("cannot find package %v", packageFileHashInputs.packageName) - } - - // Get the hashes of each file, keyed by the path. - hashObject, err := hashing.GetPackageFileHashes(repoRoot, pkg.Dir, packageFileHashInputs.taskDefinition.Inputs) - if err != nil { - return err - } - - // Make sure we include specified .env files in the file hash. - // Handled separately because these are not globs! - if len(packageFileHashInputs.taskDefinition.DotEnv) > 0 { - packagePath := pkg.Dir.RestoreAnchor(repoRoot) - dotEnvObject, err := hashing.GetHashesForExistingFiles(packagePath, packageFileHashInputs.taskDefinition.DotEnv.ToSystemPathArray()) - if err != nil { - return err - } - - // Add the dotEnv files into the file hash object. - for key, value := range dotEnvObject { - hashObject[key] = value - } - } - - // Get the combined hash of all the files. - hash, err := fs.HashFileHashes(hashObject) - if err != nil { - return err - } - - // Save off the hash information, keyed by package task. - th.mu.Lock() - hashes[packageFileHashInputs.taskID] = hash - hashObjects[packageFileHashInputs.taskID] = hashObject - th.mu.Unlock() - } - return nil - }) - } -outer: - for ht := range hashTasks { - select { - case hashQueue <- ht.(*packageFileHashInputs): - // If we return an error, stop sending more work - case <-ctx.Done(): - break outer - } - } - close(hashQueue) - err := hashErrs.Wait() - if err != nil { - return err - } - th.PackageInputsHashes = hashes - th.PackageInputsExpandedHashes = hashObjects - return nil -} - -// type taskHashable struct { -// globalHash string -// taskDependencyHashes []string -// packageDir turbopath.AnchoredUnixPath -// hashOfFiles string -// externalDepsHash string -// task string -// outputs hash.TaskOutputs -// passThruArgs []string -// env []string -// resolvedEnvVars env.EnvironmentVariablePairs -// passThroughEnv []string -// envMode util.EnvMode -// dotEnv turbopath.AnchoredUnixPathArray -// } - -// calculateTaskHashFromHashable returns a hash string from the taskHashable -func calculateTaskHashFromHashable(full *hash.TaskHashable) (string, error) { - switch full.EnvMode { - case util.Loose: - // Remove the passthroughs from hash consideration if we're explicitly loose. - full.PassThroughEnv = nil - return fs.HashTask(full) - case util.Strict: - // Collapse `nil` and `[]` in strict mode. - if full.PassThroughEnv == nil { - full.PassThroughEnv = make([]string, 0) - } - return fs.HashTask(full) - case util.Infer: - panic("task inferred status should have already been resolved") - default: - panic("unimplemented environment mode") - } -} - -func (th *Tracker) calculateDependencyHashes(dependencySet dag.Set) ([]string, error) { - dependencyHashSet := make(util.Set) - - rootPrefix := th.rootNode + util.TaskDelimiter - th.mu.RLock() - defer th.mu.RUnlock() - for _, dependency := range dependencySet { - if dependency == th.rootNode { - continue - } - dependencyTask, ok := dependency.(string) - if !ok { - return nil, fmt.Errorf("unknown task: %v", dependency) - } - if strings.HasPrefix(dependencyTask, rootPrefix) { - continue - } - dependencyHash, ok := th.packageTaskHashes[dependencyTask] - if !ok { - return nil, fmt.Errorf("missing hash for dependent task: %v", dependencyTask) - } - dependencyHashSet.Add(dependencyHash) - } - dependenciesHashList := dependencyHashSet.UnsafeListOfStrings() - sort.Strings(dependenciesHashList) - return dependenciesHashList, nil -} - -// CalculateTaskHash calculates the hash for package-task combination. It is threadsafe, provided -// that it has previously been called on its task-graph dependencies. File hashes must be calculated -// first. -func (th *Tracker) CalculateTaskHash(logger hclog.Logger, packageTask *nodes.PackageTask, dependencySet dag.Set, frameworkInference bool, args []string) (string, error) { - hashOfFiles, ok := th.PackageInputsHashes[packageTask.TaskID] - if !ok { - return "", fmt.Errorf("cannot find package-file hash for %v", packageTask.TaskID) - } - - allEnvVarMap := env.EnvironmentVariableMap{} - explicitEnvVarMap := env.EnvironmentVariableMap{} - matchingEnvVarMap := env.EnvironmentVariableMap{} - - var framework *inference.Framework - if frameworkInference { - // See if we infer a framework. - framework = inference.InferFramework(packageTask.Pkg) - if framework != nil { - logger.Debug(fmt.Sprintf("auto detected framework for %s", packageTask.PackageName), "framework", framework.Slug, "env_prefix", framework.EnvWildcards) - - computedWildcards := []string{} - computedWildcards = append(computedWildcards, framework.EnvWildcards...) - - // Vendor excludes are only applied against inferred includes. - excludePrefix, exists := th.EnvAtExecutionStart["TURBO_CI_VENDOR_ENV_KEY"] - if exists && excludePrefix != "" { - computedExclude := "!" + excludePrefix + "*" - logger.Debug(fmt.Sprintf("excluding environment variables matching wildcard %s", computedExclude)) - computedWildcards = append(computedWildcards, computedExclude) - } - - inferenceEnvVarMap, err := th.EnvAtExecutionStart.FromWildcards(computedWildcards) - if err != nil { - return "", err - } - - userEnvVarSet, err := th.EnvAtExecutionStart.FromWildcardsUnresolved(packageTask.TaskDefinition.Env) - if err != nil { - return "", err - } - - allEnvVarMap.Union(userEnvVarSet.Inclusions) - allEnvVarMap.Union(inferenceEnvVarMap) - allEnvVarMap.Difference(userEnvVarSet.Exclusions) - - explicitEnvVarMap.Union(userEnvVarSet.Inclusions) - explicitEnvVarMap.Difference(userEnvVarSet.Exclusions) - - matchingEnvVarMap.Union(inferenceEnvVarMap) - matchingEnvVarMap.Difference(userEnvVarSet.Exclusions) - } else { - var err error - allEnvVarMap, err = th.EnvAtExecutionStart.FromWildcards(packageTask.TaskDefinition.Env) - if err != nil { - return "", err - } - explicitEnvVarMap.Union(allEnvVarMap) - } - } else { - var err error - allEnvVarMap, err = th.EnvAtExecutionStart.FromWildcards(packageTask.TaskDefinition.Env) - if err != nil { - return "", err - } - - explicitEnvVarMap.Union(allEnvVarMap) - } - - envVars := env.DetailedMap{ - All: allEnvVarMap, - BySource: env.BySource{ - Explicit: explicitEnvVarMap, - Matching: matchingEnvVarMap, - }, - } - - hashableEnvPairs := envVars.All.ToHashable() - outputs := packageTask.HashableOutputs() - taskDependencyHashes, err := th.calculateDependencyHashes(dependencySet) - if err != nil { - return "", err - } - // log any auto detected env vars - logger.Debug(fmt.Sprintf("task hash env vars for %s:%s", packageTask.PackageName, packageTask.Task), "vars", hashableEnvPairs) - - hash, err := calculateTaskHashFromHashable(&hash.TaskHashable{ - GlobalHash: th.globalHash, - TaskDependencyHashes: taskDependencyHashes, - PackageDir: packageTask.Pkg.Dir.ToUnixPath(), - HashOfFiles: hashOfFiles, - ExternalDepsHash: packageTask.Pkg.ExternalDepsHash, - Task: packageTask.Task, - Outputs: outputs, - PassThruArgs: args, - Env: packageTask.TaskDefinition.Env, - ResolvedEnvVars: hashableEnvPairs, - PassThroughEnv: packageTask.TaskDefinition.PassThroughEnv, - EnvMode: packageTask.EnvMode, - DotEnv: packageTask.TaskDefinition.DotEnv, - }) - if err != nil { - return "", fmt.Errorf("failed to hash task %v: %v", packageTask.TaskID, hash) - } - th.mu.Lock() - th.packageTaskEnvVars[packageTask.TaskID] = envVars - th.packageTaskHashes[packageTask.TaskID] = hash - if framework != nil { - th.packageTaskFramework[packageTask.TaskID] = framework.Slug - } - th.mu.Unlock() - return hash, nil -} - -// GetExpandedInputs gets the expanded set of inputs for a given PackageTask -func (th *Tracker) GetExpandedInputs(packageTask *nodes.PackageTask) map[turbopath.AnchoredUnixPath]string { - expandedInputs := th.PackageInputsExpandedHashes[packageTask.TaskID] - inputsCopy := make(map[turbopath.AnchoredUnixPath]string, len(expandedInputs)) - - for path, hash := range expandedInputs { - inputsCopy[path] = hash - } - - return inputsCopy -} - -// GetEnvVars returns the hashed env vars for a given taskID -func (th *Tracker) GetEnvVars(taskID string) env.DetailedMap { - th.mu.RLock() - defer th.mu.RUnlock() - return th.packageTaskEnvVars[taskID] -} - -// GetFramework returns the inferred framework for a given taskID -func (th *Tracker) GetFramework(taskID string) string { - th.mu.RLock() - defer th.mu.RUnlock() - return th.packageTaskFramework[taskID] -} - -// GetExpandedOutputs returns a list of outputs for a given taskID -func (th *Tracker) GetExpandedOutputs(taskID string) []turbopath.AnchoredSystemPath { - th.mu.RLock() - defer th.mu.RUnlock() - outputs, ok := th.packageTaskOutputs[taskID] - - if !ok { - return []turbopath.AnchoredSystemPath{} - } - - return outputs -} - -// SetExpandedOutputs a list of outputs for a given taskID so it can be read later -func (th *Tracker) SetExpandedOutputs(taskID string, outputs []turbopath.AnchoredSystemPath) { - th.mu.Lock() - defer th.mu.Unlock() - th.packageTaskOutputs[taskID] = outputs -} - -// GetTaskHashes gets the package task hashes -func (th *Tracker) GetTaskHashes() map[string]string { - th.mu.RLock() - defer th.mu.RUnlock() - return th.packageTaskHashes -} - -// SetCacheStatus records the task status for the given taskID -func (th *Tracker) SetCacheStatus(taskID string, cacheSummary runsummary.TaskCacheSummary) { - th.mu.Lock() - defer th.mu.Unlock() - th.packageTaskCacheStatus[taskID] = cacheSummary -} - -// GetCacheStatus records the task status for the given taskID -func (th *Tracker) GetCacheStatus(taskID string) runsummary.TaskCacheSummary { - th.mu.Lock() - defer th.mu.Unlock() - - if status, ok := th.packageTaskCacheStatus[taskID]; ok { - return status - } - - // Return an empty one, all the fields will be false and 0 - return runsummary.TaskCacheSummary{} -} diff --git a/cli/internal/turbopath/absolute_system_path.go b/cli/internal/turbopath/absolute_system_path.go deleted file mode 100644 index df65827c5cd97..0000000000000 --- a/cli/internal/turbopath/absolute_system_path.go +++ /dev/null @@ -1,258 +0,0 @@ -package turbopath - -import ( - "io/ioutil" - "os" - "path/filepath" - "strings" -) - -// AbsoluteSystemPath is a root-relative path using system separators. -type AbsoluteSystemPath string - -// _dirPermissions are the default permission bits we apply to directories. -const _dirPermissions = os.ModeDir | 0775 - -// _nonRelativeSentinel is the leading sentinel that indicates traversal. -const _nonRelativeSentinel = ".." - -// ToString returns a string represenation of this Path. -// Used for interfacing with APIs that require a string. -func (p AbsoluteSystemPath) ToString() string { - return string(p) -} - -// RelativeTo calculates the relative path between two `AbsoluteSystemPath`s. -func (p AbsoluteSystemPath) RelativeTo(basePath AbsoluteSystemPath) (AnchoredSystemPath, error) { - processed, err := filepath.Rel(basePath.ToString(), p.ToString()) - return AnchoredSystemPath(processed), err -} - -// Join appends relative path segments to this AbsoluteSystemPath. -func (p AbsoluteSystemPath) Join(additional ...RelativeSystemPath) AbsoluteSystemPath { - cast := RelativeSystemPathArray(additional) - return AbsoluteSystemPath(filepath.Join(p.ToString(), filepath.Join(cast.ToStringArray()...))) -} - -// ToStringDuringMigration returns a string representation of this path. -// These instances should eventually be removed. -func (p AbsoluteSystemPath) ToStringDuringMigration() string { - return p.ToString() -} - -// UntypedJoin is a Join that does not constrain the type of the arguments. -// This enables you to pass in strings, but does not protect you from garbage in. -func (p AbsoluteSystemPath) UntypedJoin(args ...string) AbsoluteSystemPath { - return AbsoluteSystemPath(filepath.Join(p.ToString(), filepath.Join(args...))) -} - -// Dir implements filepath.Dir() for an AbsoluteSystemPath -func (p AbsoluteSystemPath) Dir() AbsoluteSystemPath { - return AbsoluteSystemPath(filepath.Dir(p.ToString())) -} - -// Mkdir implements os.Mkdir(p, perm) -func (p AbsoluteSystemPath) Mkdir(perm os.FileMode) error { - return os.Mkdir(p.ToString(), perm) -} - -// MkdirAll implements os.MkdirAll(p, perm) -func (p AbsoluteSystemPath) MkdirAll(perm os.FileMode) error { - return os.MkdirAll(p.ToString(), perm) -} - -// Open implements os.Open(p) for an AbsoluteSystemPath -func (p AbsoluteSystemPath) Open() (*os.File, error) { - return os.Open(p.ToString()) -} - -// OpenFile implements os.OpenFile for an absolute path -func (p AbsoluteSystemPath) OpenFile(flags int, mode os.FileMode) (*os.File, error) { - return os.OpenFile(p.ToString(), flags, mode) -} - -// Lstat implements os.Lstat for absolute path -func (p AbsoluteSystemPath) Lstat() (os.FileInfo, error) { - return os.Lstat(p.ToString()) -} - -// Stat implements os.Stat for absolute path -func (p AbsoluteSystemPath) Stat() (os.FileInfo, error) { - return os.Stat(p.ToString()) -} - -// Findup checks all parent directories for a file. -func (p AbsoluteSystemPath) Findup(name RelativeSystemPath) (AbsoluteSystemPath, error) { - path, err := FindupFrom(name.ToString(), p.ToString()) - - return AbsoluteSystemPath(path), err - -} - -// Exists returns true if the given path exists. -func (p AbsoluteSystemPath) Exists() bool { - _, err := p.Lstat() - return err == nil -} - -// DirExists returns true if the given path exists and is a directory. -func (p AbsoluteSystemPath) DirExists() bool { - info, err := p.Lstat() - return err == nil && info.IsDir() -} - -// FileExists returns true if the given path exists and is a file. -func (p AbsoluteSystemPath) FileExists() bool { - info, err := os.Lstat(p.ToString()) - return err == nil && !info.IsDir() -} - -// ContainsPath returns true if this absolute path is a parent of the -// argument. -func (p AbsoluteSystemPath) ContainsPath(other AbsoluteSystemPath) (bool, error) { - // In Go, filepath.Rel can return a path that starts with "../" or equivalent. - // Checking filesystem-level contains can get extremely complicated - // (see https://github.com/golang/dep/blob/f13583b555deaa6742f141a9c1185af947720d60/internal/fs/fs.go#L33) - // As a compromise, rely on the stdlib to generate a relative path and then check - // if the first step is "../". - rel, err := filepath.Rel(p.ToString(), other.ToString()) - if err != nil { - return false, err - } - return !strings.HasPrefix(rel, _nonRelativeSentinel), nil -} - -// ReadFile reads the contents of the specified file -func (p AbsoluteSystemPath) ReadFile() ([]byte, error) { - return ioutil.ReadFile(p.ToString()) -} - -// VolumeName returns the volume of the specified path -func (p AbsoluteSystemPath) VolumeName() string { - return filepath.VolumeName(p.ToString()) -} - -// WriteFile writes the contents of the specified file -func (p AbsoluteSystemPath) WriteFile(contents []byte, mode os.FileMode) error { - return ioutil.WriteFile(p.ToString(), contents, mode) -} - -// EnsureDir ensures that the directory containing this file exists -func (p AbsoluteSystemPath) EnsureDir() error { - dir := p.Dir() - err := os.MkdirAll(dir.ToString(), _dirPermissions) - if err != nil && dir.FileExists() { - // It looks like this is a file and not a directory. Attempt to remove it; this can - // happen in some cases if you change a rule from outputting a file to a directory. - if err2 := dir.Remove(); err2 == nil { - err = os.MkdirAll(dir.ToString(), _dirPermissions) - } else { - return err - } - } - return err -} - -// MkdirAllMode Create directory at path and all necessary parents ensuring that path has the correct mode set -func (p AbsoluteSystemPath) MkdirAllMode(mode os.FileMode) error { - info, err := p.Lstat() - if err == nil { - if info.IsDir() && info.Mode() == mode { - // Dir exists with the correct mode - return nil - } else if info.IsDir() { - // Dir exists with incorrect mode - return os.Chmod(p.ToString(), mode) - } else { - // Path exists as file, remove it - if err := p.Remove(); err != nil { - return err - } - } - } - if err := os.MkdirAll(p.ToString(), mode); err != nil { - return err - } - // This is necessary only when umask results in creating a directory with permissions different than the one passed by the user - return os.Chmod(p.ToString(), mode) -} - -// Create is the AbsoluteSystemPath wrapper for os.Create -func (p AbsoluteSystemPath) Create() (*os.File, error) { - return os.Create(p.ToString()) -} - -// Ext implements filepath.Ext(p) for an absolute path -func (p AbsoluteSystemPath) Ext() string { - return filepath.Ext(p.ToString()) -} - -// RelativePathString returns the relative path from this AbsoluteSystemPath to another absolute path in string form as a string -func (p AbsoluteSystemPath) RelativePathString(path string) (string, error) { - return filepath.Rel(p.ToString(), path) -} - -// PathTo returns the relative path between two absolute paths -// This should likely eventually return an AnchoredSystemPath -func (p AbsoluteSystemPath) PathTo(other AbsoluteSystemPath) (string, error) { - return p.RelativePathString(other.ToString()) -} - -// Symlink implements os.Symlink(target, p) for absolute path -func (p AbsoluteSystemPath) Symlink(target string) error { - return os.Symlink(target, p.ToString()) -} - -// Readlink implements os.Readlink(p) for an absolute path -func (p AbsoluteSystemPath) Readlink() (string, error) { - return os.Readlink(p.ToString()) -} - -// Remove removes the file or (empty) directory at the given path -func (p AbsoluteSystemPath) Remove() error { - return os.Remove(p.ToString()) -} - -// RemoveAll implements os.RemoveAll for absolute paths. -func (p AbsoluteSystemPath) RemoveAll() error { - return os.RemoveAll(p.ToString()) -} - -// Base implements filepath.Base for an absolute path -func (p AbsoluteSystemPath) Base() string { - return filepath.Base(p.ToString()) -} - -// Rename implements os.Rename(p, dest) for absolute paths -func (p AbsoluteSystemPath) Rename(dest AbsoluteSystemPath) error { - return os.Rename(p.ToString(), dest.ToString()) -} - -// EvalSymlinks implements filepath.EvalSymlinks for absolute path -func (p AbsoluteSystemPath) EvalSymlinks() (AbsoluteSystemPath, error) { - result, err := filepath.EvalSymlinks(p.ToString()) - if err != nil { - return "", err - } - return AbsoluteSystemPath(result), nil -} - -// HasPrefix is strings.HasPrefix for paths, ensuring that it matches on separator boundaries. -// This does NOT perform Clean in advance. -func (p AbsoluteSystemPath) HasPrefix(prefix AbsoluteSystemPath) bool { - prefixLen := len(prefix) - pathLen := len(p) - - if prefixLen > pathLen { - // Can't be a prefix if longer. - return false - } else if prefixLen == pathLen { - // Can be a prefix if they're equal, but otherwise no. - return p == prefix - } - - // otherPath is definitely shorter than p. - // We need to confirm that p[len(otherPath)] is a system separator. - - return strings.HasPrefix(p.ToString(), prefix.ToString()) && os.IsPathSeparator(p[prefixLen]) -} diff --git a/cli/internal/turbopath/absolute_system_path_darwin.go b/cli/internal/turbopath/absolute_system_path_darwin.go deleted file mode 100644 index e2c3bfffc89b1..0000000000000 --- a/cli/internal/turbopath/absolute_system_path_darwin.go +++ /dev/null @@ -1,23 +0,0 @@ -//go:build darwin -// +build darwin - -// Adapted from https://github.com/containerd/continuity/blob/b4ca35286886296377de39e6eafd1affae019fc3/driver/lchmod_unix.go -// Copyright The containerd Authors -// SPDX-License-Identifier: Apache-2.0 - -package turbopath - -import ( - "os" - - "golang.org/x/sys/unix" -) - -// Lchmod changes the mode of a file not following symlinks. -func (p AbsoluteSystemPath) Lchmod(mode os.FileMode) error { - err := unix.Fchmodat(unix.AT_FDCWD, p.ToString(), uint32(mode), unix.AT_SYMLINK_NOFOLLOW) - if err != nil { - err = &os.PathError{Op: "lchmod", Path: p.ToString(), Err: err} - } - return err -} diff --git a/cli/internal/turbopath/absolute_system_path_notdarwin.go b/cli/internal/turbopath/absolute_system_path_notdarwin.go deleted file mode 100644 index 11958886cad9b..0000000000000 --- a/cli/internal/turbopath/absolute_system_path_notdarwin.go +++ /dev/null @@ -1,13 +0,0 @@ -//go:build !darwin -// +build !darwin - -package turbopath - -import ( - "os" -) - -// Lchmod changes the mode of a file not following symlinks. -func (p AbsoluteSystemPath) Lchmod(mode os.FileMode) error { - return nil -} diff --git a/cli/internal/turbopath/absolute_system_path_test.go b/cli/internal/turbopath/absolute_system_path_test.go deleted file mode 100644 index 4ca36f9b91f93..0000000000000 --- a/cli/internal/turbopath/absolute_system_path_test.go +++ /dev/null @@ -1,174 +0,0 @@ -package turbopath - -import ( - "os" - "runtime" - "testing" - - "gotest.tools/v3/assert" - "gotest.tools/v3/fs" -) - -func Test_Mkdir(t *testing.T) { - type Case struct { - name string - isDir bool - exists bool - mode os.FileMode - expectedMode os.FileMode - } - - cases := []Case{ - { - name: "dir doesn't exist", - exists: false, - expectedMode: os.ModeDir | 0777, - }, - { - name: "path exists as file", - exists: true, - isDir: false, - mode: 0666, - expectedMode: os.ModeDir | 0755, - }, - { - name: "dir exists with incorrect mode", - exists: true, - isDir: false, - mode: os.ModeDir | 0755, - expectedMode: os.ModeDir | 0655, - }, - { - name: "dir exists with correct mode", - exists: true, - isDir: false, - mode: os.ModeDir | 0755, - expectedMode: os.ModeDir | 0755, - }, - } - - for _, testCase := range cases { - testDir := fs.NewDir(t, "system-path-mkdir-test") - testName := testCase.name - path := testDir.Join("foo") - if testCase.isDir { - err := os.Mkdir(path, testCase.mode) - assert.NilError(t, err, "%s: Mkdir", testName) - } else if testCase.exists { - file, err := os.Create(path) - assert.NilError(t, err, "%s: Create", testName) - err = file.Chmod(testCase.mode) - assert.NilError(t, err, "%s: Chmod", testName) - err = file.Close() - assert.NilError(t, err, "%s: Close", testName) - } - - testPath := AbsoluteSystemPath(path) - err := testPath.MkdirAllMode(testCase.expectedMode) - assert.NilError(t, err, "%s: Mkdir", testName) - - stat, err := testPath.Lstat() - assert.NilError(t, err, "%s: Lstat", testName) - assert.Assert(t, stat.IsDir(), testName) - - assert.Assert(t, stat.IsDir(), testName) - - if runtime.GOOS == "windows" { - // For windows os.Chmod will only change the writable bit so that's all we check - assert.Equal(t, stat.Mode().Perm()&0200, testCase.expectedMode.Perm()&0200, testName) - } else { - assert.Equal(t, stat.Mode(), testCase.expectedMode, testName) - } - - } -} - -func TestAbsoluteSystemPath_Findup(t *testing.T) { - tests := []struct { - name string - fs []AnchoredSystemPath - executionDirectory AnchoredSystemPath - fileName RelativeSystemPath - want AnchoredSystemPath - wantErr bool - }{ - { - name: "hello world", - fs: []AnchoredSystemPath{ - AnchoredUnixPath("one/two/three/four/.file").ToSystemPath(), - AnchoredUnixPath("one/two/three/four/.target").ToSystemPath(), - }, - executionDirectory: AnchoredUnixPath("one/two/three/four").ToSystemPath(), - fileName: RelativeUnixPath(".target").ToSystemPath(), - want: AnchoredUnixPath("one/two/three/four/.target").ToSystemPath(), - }, - { - name: "parent", - fs: []AnchoredSystemPath{ - AnchoredUnixPath("one/two/three/four/.file").ToSystemPath(), - AnchoredUnixPath("one/two/three/.target").ToSystemPath(), - }, - executionDirectory: AnchoredUnixPath("one/two/three/four").ToSystemPath(), - fileName: RelativeUnixPath(".target").ToSystemPath(), - want: AnchoredUnixPath("one/two/three/.target").ToSystemPath(), - }, - { - name: "gets the closest", - fs: []AnchoredSystemPath{ - AnchoredUnixPath("one/two/three/four/.file").ToSystemPath(), - AnchoredUnixPath("one/two/three/.target").ToSystemPath(), - AnchoredUnixPath("one/two/.target").ToSystemPath(), - }, - executionDirectory: AnchoredUnixPath("one/two/three/four").ToSystemPath(), - fileName: RelativeUnixPath(".target").ToSystemPath(), - want: AnchoredUnixPath("one/two/three/.target").ToSystemPath(), - }, - { - name: "nonexistent", - fs: []AnchoredSystemPath{ - AnchoredUnixPath("one/two/three/four/.file").ToSystemPath(), - }, - executionDirectory: AnchoredUnixPath("one/two/three/four").ToSystemPath(), - fileName: RelativeUnixPath(".nonexistent").ToSystemPath(), - want: "", - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - fsRoot := AbsoluteSystemPath(t.TempDir()) - for _, file := range tt.fs { - path := file.RestoreAnchor(fsRoot) - assert.NilError(t, path.Dir().MkdirAll(0777)) - assert.NilError(t, path.WriteFile(nil, 0777)) - } - - got, err := tt.executionDirectory.RestoreAnchor(fsRoot).Findup(tt.fileName) - if tt.wantErr { - assert.ErrorIs(t, err, os.ErrNotExist) - return - } - if got != "" && got != tt.want.RestoreAnchor(fsRoot) { - t.Errorf("AbsoluteSystemPath.Findup() = %v, want %v", got, tt.want) - } - }) - } -} - -func TestJoin(t *testing.T) { - rawRoot, err := os.Getwd() - if err != nil { - t.Fatalf("cwd %v", err) - } - root := AbsoluteSystemPathFromUpstream(rawRoot) - testRoot := root.Join("a", "b", "c") - dot := testRoot.Join(".") - if dot != testRoot { - t.Errorf(". path got %v, want %v", dot, testRoot) - } - - doubleDot := testRoot.Join("..") - expectedDoubleDot := root.Join("a", "b") - if doubleDot != expectedDoubleDot { - t.Errorf(".. path got %v, want %v", doubleDot, expectedDoubleDot) - } -} diff --git a/cli/internal/turbopath/anchored_system_path.go b/cli/internal/turbopath/anchored_system_path.go deleted file mode 100644 index 0957ead6f0a47..0000000000000 --- a/cli/internal/turbopath/anchored_system_path.go +++ /dev/null @@ -1,75 +0,0 @@ -package turbopath - -import ( - "os" - "path/filepath" - "strings" -) - -// AnchoredSystemPath is a path stemming from a specified root using system separators. -type AnchoredSystemPath string - -// ToString returns a string represenation of this Path. -// Used for interfacing with APIs that require a string. -func (p AnchoredSystemPath) ToString() string { - return string(p) -} - -// ToStringDuringMigration returns the string representation of this path, and is for -// use in situations where we expect a future path migration to remove the need for the -// string representation -func (p AnchoredSystemPath) ToStringDuringMigration() string { - return string(p) -} - -// ToSystemPath returns itself. -func (p AnchoredSystemPath) ToSystemPath() AnchoredSystemPath { - return p -} - -// ToUnixPath converts a AnchoredSystemPath to a AnchoredUnixPath. -func (p AnchoredSystemPath) ToUnixPath() AnchoredUnixPath { - return AnchoredUnixPath(filepath.ToSlash(p.ToString())) -} - -// RelativeTo calculates the relative path between two AnchoredSystemPath`s. -func (p AnchoredSystemPath) RelativeTo(basePath AnchoredSystemPath) (AnchoredSystemPath, error) { - processed, err := filepath.Rel(basePath.ToString(), p.ToString()) - return AnchoredSystemPath(processed), err -} - -// RestoreAnchor prefixes the AnchoredSystemPath with its anchor to return an AbsoluteSystemPath. -func (p AnchoredSystemPath) RestoreAnchor(anchor AbsoluteSystemPath) AbsoluteSystemPath { - return AbsoluteSystemPath(filepath.Join(anchor.ToString(), p.ToString())) -} - -// Dir returns filepath.Dir for the path. -func (p AnchoredSystemPath) Dir() AnchoredSystemPath { - return AnchoredSystemPath(filepath.Dir(p.ToString())) -} - -// Join appends relative path segments to this AnchoredSystemPath. -func (p AnchoredSystemPath) Join(additional ...RelativeSystemPath) AnchoredSystemPath { - cast := RelativeSystemPathArray(additional) - return AnchoredSystemPath(filepath.Join(p.ToString(), filepath.Join(cast.ToStringArray()...))) -} - -// HasPrefix is strings.HasPrefix for paths, ensuring that it matches on separator boundaries. -// This does NOT perform Clean in advance. -func (p AnchoredSystemPath) HasPrefix(prefix AnchoredSystemPath) bool { - prefixLen := len(prefix) - pathLen := len(p) - - if prefixLen > pathLen { - // Can't be a prefix if longer. - return false - } else if prefixLen == pathLen { - // Can be a prefix if they're equal, but otherwise no. - return p == prefix - } - - // otherPath is definitely shorter than p. - // We need to confirm that p[len(otherPath)] is a system separator. - - return strings.HasPrefix(p.ToString(), prefix.ToString()) && os.IsPathSeparator(p[prefixLen]) -} diff --git a/cli/internal/turbopath/anchored_unix_path.go b/cli/internal/turbopath/anchored_unix_path.go deleted file mode 100644 index e2fa579040ffd..0000000000000 --- a/cli/internal/turbopath/anchored_unix_path.go +++ /dev/null @@ -1,40 +0,0 @@ -package turbopath - -import ( - "fmt" - "path" - "path/filepath" -) - -// AnchoredUnixPath is a path stemming from a specified root using Unix `/` separators. -type AnchoredUnixPath string - -// CheckedToAnchoredUnixPath inspects a string and determines if it is a relative path. -func CheckedToAnchoredUnixPath(s string) (AnchoredUnixPath, error) { - if filepath.IsAbs(s) { - return "", fmt.Errorf("%v is not a relative path", s) - } - return AnchoredUnixPath(s), nil -} - -// ToString returns a string represenation of this Path. -// Used for interfacing with APIs that require a string. -func (p AnchoredUnixPath) ToString() string { - return string(p) -} - -// ToSystemPath converts a AnchoredUnixPath to a AnchoredSystemPath. -func (p AnchoredUnixPath) ToSystemPath() AnchoredSystemPath { - return AnchoredSystemPath(filepath.FromSlash(p.ToString())) -} - -// ToUnixPath returns itself. -func (p AnchoredUnixPath) ToUnixPath() AnchoredUnixPath { - return p -} - -// Join appends relative path segments to this RelativeUnixPath. -func (p AnchoredUnixPath) Join(additional ...RelativeUnixPath) AnchoredUnixPath { - cast := RelativeUnixPathArray(additional) - return AnchoredUnixPath(path.Join(p.ToString(), path.Join(cast.ToStringArray()...))) -} diff --git a/cli/internal/turbopath/find_up.go b/cli/internal/turbopath/find_up.go deleted file mode 100644 index bf7c39c9e43b8..0000000000000 --- a/cli/internal/turbopath/find_up.go +++ /dev/null @@ -1,50 +0,0 @@ -package turbopath - -import ( - "os" - "path/filepath" -) - -func hasFile(name, dir string) (bool, error) { - files, err := os.ReadDir(dir) - - if err != nil { - return false, err - } - - for _, f := range files { - if name == f.Name() { - return true, nil - } - } - - return false, nil -} - -func findupFrom(name, dir string) (string, error) { - for { - found, err := hasFile(name, dir) - - if err != nil { - return "", err - } - - if found { - return filepath.Join(dir, name), nil - } - - parent := filepath.Dir(dir) - - if parent == dir { - return "", nil - } - - dir = parent - } -} - -// FindupFrom Recursively finds a file by walking up parents in the file tree -// starting from a specific directory. -func FindupFrom(name, dir string) (string, error) { - return findupFrom(name, dir) -} diff --git a/cli/internal/turbopath/relative_system_path.go b/cli/internal/turbopath/relative_system_path.go deleted file mode 100644 index d6115dbb539b1..0000000000000 --- a/cli/internal/turbopath/relative_system_path.go +++ /dev/null @@ -1,44 +0,0 @@ -package turbopath - -import ( - "fmt" - "path/filepath" -) - -// RelativeSystemPath is a relative path using system separators. -type RelativeSystemPath string - -// CheckedToRelativeSystemPath inspects a string and determines if it is a relative path. -func CheckedToRelativeSystemPath(s string) (RelativeSystemPath, error) { - if filepath.IsAbs(s) { - return "", fmt.Errorf("%v is not a relative path", s) - } - return RelativeSystemPath(filepath.Clean(s)), nil -} - -// MakeRelativeSystemPath joins the given segments in a system-appropriate way -func MakeRelativeSystemPath(segments ...string) RelativeSystemPath { - return RelativeSystemPath(filepath.Join(segments...)) -} - -// ToString returns a string represenation of this Path. -// Used for interfacing with APIs that require a string. -func (p RelativeSystemPath) ToString() string { - return string(p) -} - -// ToSystemPath returns itself. -func (p RelativeSystemPath) ToSystemPath() RelativeSystemPath { - return p -} - -// ToUnixPath converts from RelativeSystemPath to RelativeUnixPath. -func (p RelativeSystemPath) ToUnixPath() RelativeUnixPath { - return RelativeUnixPath(filepath.ToSlash(p.ToString())) -} - -// Join appends relative path segments to this RelativeSystemPath. -func (p RelativeSystemPath) Join(additional ...RelativeSystemPath) RelativeSystemPath { - cast := RelativeSystemPathArray(additional) - return RelativeSystemPath(filepath.Join(p.ToString(), filepath.Join(cast.ToStringArray()...))) -} diff --git a/cli/internal/turbopath/relative_unix_path.go b/cli/internal/turbopath/relative_unix_path.go deleted file mode 100644 index 05829e20c1e20..0000000000000 --- a/cli/internal/turbopath/relative_unix_path.go +++ /dev/null @@ -1,31 +0,0 @@ -package turbopath - -import ( - "path" - "path/filepath" -) - -// RelativeUnixPath is a relative path using Unix `/` separators. -type RelativeUnixPath string - -// ToString returns a string represenation of this Path. -// Used for interfacing with APIs that require a string. -func (p RelativeUnixPath) ToString() string { - return string(p) -} - -// ToSystemPath converts a RelativeUnixPath to a RelativeSystemPath. -func (p RelativeUnixPath) ToSystemPath() RelativeSystemPath { - return RelativeSystemPath(filepath.FromSlash(p.ToString())) -} - -// ToUnixPath converts a RelativeUnixPath to a RelativeSystemPath. -func (p RelativeUnixPath) ToUnixPath() RelativeUnixPath { - return p -} - -// Join appends relative path segments to this RelativeUnixPath. -func (p RelativeUnixPath) Join(additional ...RelativeUnixPath) RelativeUnixPath { - cast := RelativeUnixPathArray(additional) - return RelativeUnixPath(path.Join(p.ToString(), path.Join(cast.ToStringArray()...))) -} diff --git a/cli/internal/turbopath/turbopath.go b/cli/internal/turbopath/turbopath.go deleted file mode 100644 index f50b75f31c338..0000000000000 --- a/cli/internal/turbopath/turbopath.go +++ /dev/null @@ -1,112 +0,0 @@ -// Package turbopath teaches the Go type system about six -// different types of paths: -// - AbsoluteSystemPath -// - RelativeSystemPath -// - AnchoredSystemPath -// - AbsoluteUnixPath -// - RelativeUnixPath -// - AnchoredUnixPath -// -// Between these two things it is assumed that we will be able to -// reasonably describe file paths being used within the system and -// have the type system enforce correctness instead of relying upon -// runtime code to accomplish the task. -// -// Absolute paths are, "absolute, including volume root." They are not -// portable between System and Unix. -// -// Relative paths are simply arbitrary path segments using a particular -// path delimiter. They are portable between System and Unix. -// -// Anchored paths are, "absolute, starting at a particular root." -// They are not aware of *what* their anchor is. It could be a repository, -// an `os.dirFS`, a package, `cwd`, or more. They are stored *without* -// a preceding delimiter for compatibility with `io/fs`. They are portable -// between System and Unix. -// -// In some future world everything in here can be optimized out at compile time. -// Everything is either `string` or `[]string` -// -// Much of this is dreadfully repetitive because of intentional -// limitations in the Go type system. -package turbopath - -// AnchoredUnixPathArray is a type used to enable transform operations on arrays of paths. -type AnchoredUnixPathArray []AnchoredUnixPath - -// RelativeSystemPathArray is a type used to enable transform operations on arrays of paths. -type RelativeSystemPathArray []RelativeSystemPath - -// RelativeUnixPathArray is a type used to enable transform operations on arrays of paths. -type RelativeUnixPathArray []RelativeUnixPath - -// ToStringArray enables ergonomic operations on arrays of RelativeSystemPath -func (source RelativeSystemPathArray) ToStringArray() []string { - output := make([]string, len(source)) - for index, path := range source { - output[index] = path.ToString() - } - return output -} - -// ToStringArray enables ergonomic operations on arrays of RelativeUnixPath -func (source RelativeUnixPathArray) ToStringArray() []string { - output := make([]string, len(source)) - for index, path := range source { - output[index] = path.ToString() - } - return output -} - -// ToSystemPathArray enables ergonomic operations on arrays of AnchoredUnixPath -func (source AnchoredUnixPathArray) ToSystemPathArray() []AnchoredSystemPath { - output := make([]AnchoredSystemPath, len(source)) - for index, path := range source { - output[index] = path.ToSystemPath() - } - return output -} - -// The following methods exist to import a path string and cast it to the appropriate -// type. They exist to communicate intent and make it explicit that this is an -// intentional action, not a "helpful" insertion by the IDE. -// -// This is intended to map closely to the `unsafe` keyword, without the denotative -// meaning of `unsafe` in English. These are "trust me, I've checkex it" places, and -// intend to mark the places where we smuggle paths from outside the world of safe -// path handling into the world where we carefully consider the path to ensure safety. - -// AbsoluteSystemPathFromUpstream takes a path string and casts it to an -// AbsoluteSystemPath without checking. If the input to this function is -// not an AbsoluteSystemPath it will result in downstream errors. -func AbsoluteSystemPathFromUpstream(path string) AbsoluteSystemPath { - return AbsoluteSystemPath(path) -} - -// AnchoredSystemPathFromUpstream takes a path string and casts it to an -// AnchoredSystemPath without checking. If the input to this function is -// not an AnchoredSystemPath it will result in downstream errors. -func AnchoredSystemPathFromUpstream(path string) AnchoredSystemPath { - return AnchoredSystemPath(path) -} - -// AnchoredUnixPathFromUpstream takes a path string and casts it to an -// AnchoredUnixPath without checking. If the input to this function is -// not an AnchoredUnixPath it will result in downstream errors. -func AnchoredUnixPathFromUpstream(path string) AnchoredUnixPath { - return AnchoredUnixPath(path) -} - -// RelativeSystemPathFromUpstream takes a path string and casts it to an -// RelativeSystemPath without checking. If the input to this function is -// not an RelativeSystemPath it will result in downstream errors. -func RelativeSystemPathFromUpstream(path string) RelativeSystemPath { - return RelativeSystemPath(path) -} - -// RelativeUnixPathFromUpstream takes a path string and casts it to an -// RelativeUnixPath without checking. If the input to this function is -// not an RelativeUnixPath it will result in downstream errors. -func RelativeUnixPathFromUpstream(path string) RelativeUnixPath { - return RelativeUnixPath(path) -} diff --git a/cli/internal/turbostate/turbostate.go b/cli/internal/turbostate/turbostate.go deleted file mode 100644 index 40e8244d9a7c3..0000000000000 --- a/cli/internal/turbostate/turbostate.go +++ /dev/null @@ -1,130 +0,0 @@ -// Package turbostate holds all of the state given from the Rust CLI -// that is necessary to execute turbo. We transfer this state from Rust -// to Go via a JSON payload. -package turbostate - -import ( - "github.com/vercel/turbo/cli/internal/util" -) - -// DaemonPayload is the extra flags and command that are -// passed for the `daemon` subcommand -type DaemonPayload struct { - IdleTimeout string `json:"idle_time"` - JSON bool `json:"json"` -} - -// PrunePayload is the extra flags passed for the `prune` subcommand -type PrunePayload struct { - Scope []string `json:"scope"` - Docker bool `json:"docker"` - OutputDir string `json:"output_dir"` -} - -// RunPayload is the extra flags passed for the `run` subcommand -type RunPayload struct { - CacheDir string `json:"cache_dir"` - CacheWorkers int `json:"cache_workers"` - Concurrency string `json:"concurrency"` - ContinueExecution bool `json:"continue_execution"` - DryRun string `json:"dry_run"` - Filter []string `json:"filter"` - Force bool `json:"force"` - FrameworkInference bool `json:"framework_inference"` - GlobalDeps []string `json:"global_deps"` - EnvMode util.EnvMode `json:"env_mode"` - // NOTE: Graph has three effective states that is modeled using a *string: - // nil -> no flag passed - // "" -> flag passed but no file name attached: print to stdout - // "foo" -> flag passed and file name attached: emit to file - // The mirror for this in Rust is `Option` with the default value - // for the flag being `Some("")`. - Graph *string `json:"graph"` - Ignore []string `json:"ignore"` - IncludeDependencies bool `json:"include_dependencies"` - NoCache bool `json:"no_cache"` - Daemon bool `json:"daemon"` - NoDaemon bool `json:"no_daemon"` - NoDeps bool `json:"no_deps"` - Only bool `json:"only"` - OutputLogs string `json:"output_logs"` - LogOrder string `json:"log_order"` - PassThroughArgs []string `json:"pass_through_args"` - Parallel bool `json:"parallel"` - Profile string `json:"profile"` - RemoteOnly bool `json:"remote_only"` - Scope []string `json:"scope"` - Since string `json:"since"` - SinglePackage bool `json:"single_package"` - Summarize bool `json:"summarize"` - Tasks []string `json:"tasks"` - PkgInferenceRoot string `json:"pkg_inference_root"` - LogPrefix string `json:"log_prefix"` - ExperimentalSpaceID string `json:"experimental_space_id"` - ExperimentalRustCodepath bool `json:"experimental_rust_codepath"` -} - -// Command consists of the data necessary to run a command. -// Only one of these fields should be initialized at a time. -type Command struct { - Daemon *DaemonPayload `json:"daemon"` - Prune *PrunePayload `json:"prune"` - Run *RunPayload `json:"run"` -} - -// ParsedArgsFromRust are the parsed command line arguments passed -// from the Rust shim -type ParsedArgsFromRust struct { - API string `json:"api"` - Color bool `json:"color"` - CPUProfile string `json:"cpu_profile"` - CWD string `json:"cwd"` - Heap string `json:"heap"` - Login string `json:"login"` - NoColor bool `json:"no_color"` - Preflight bool `json:"preflight"` - RemoteCacheTimeout uint64 `json:"remote_cache_timeout"` - Team string `json:"team"` - Token string `json:"token"` - Trace string `json:"trace"` - Verbosity int `json:"verbosity"` - TestRun bool `json:"test_run"` - Command Command `json:"command"` -} - -// TaskHashTracker stores the hashes calculated in Rust -type TaskHashTracker struct { - PackageTaskHashes map[string]string `json:"package_task_hashes"` -} - -// ExecutionState is the entire state of a turbo execution that is passed from the Rust shim. -type ExecutionState struct { - Config Config `json:"config"` - APIClientConfig APIClientConfig `json:"api_client_config"` - SpacesAPIClientConfig APIClientConfig `json:"spaces_api_client_config"` - PackageManager string `json:"package_manager"` - CLIArgs ParsedArgsFromRust `json:"cli_args"` -} - -// Config holds the resolved configuration from the combination of all sources. -type Config struct { - APIURL string `json:"apiUrl"` - LoginURL string `json:"loginUrl"` - TeamSlug string `json:"teamSlug"` - TeamID string `json:"teamId"` - Token string `json:"token"` - Signature bool `json:"signature"` - Preflight bool `json:"preflight"` - Timeout uint64 `json:"timeout"` - Enabled *bool `json:"enabled"` -} - -// APIClientConfig holds the authentication and endpoint details for the API client -type APIClientConfig struct { - Token string `json:"token"` - TeamID string `json:"team_id"` - TeamSlug string `json:"team_slug"` - APIURL string `json:"api_url"` - UsePreflight bool `json:"use_preflight"` - Timeout uint64 `json:"timeout"` -} diff --git a/cli/internal/ui/charset.go b/cli/internal/ui/charset.go deleted file mode 100644 index 0207c10ec2c6f..0000000000000 --- a/cli/internal/ui/charset.go +++ /dev/null @@ -1,3 +0,0 @@ -package ui - -var charset = []string{" ", "> ", ">> ", ">>>"} diff --git a/cli/internal/ui/colors.go b/cli/internal/ui/colors.go deleted file mode 100644 index 4b2eccd7e262a..0000000000000 --- a/cli/internal/ui/colors.go +++ /dev/null @@ -1,54 +0,0 @@ -package ui - -import ( - "os" - - "github.com/fatih/color" -) - -type ColorMode int - -const ( - ColorModeUndefined ColorMode = iota + 1 - ColorModeSuppressed - ColorModeForced -) - -func GetColorModeFromEnv() ColorMode { - // The FORCED_COLOR behavior and accepted values are taken from the supports-color NodeJS Package: - // The accepted values as documented are "0" to disable, and "1", "2", or "3" to force-enable color - // at the specified support level (1 = 16 colors, 2 = 256 colors, 3 = 16M colors). - // We don't currently use the level for anything specific, and just treat things as on and off. - // - // Note: while "false" and "true" aren't documented, the library coerces these values to 0 and 1 - // respectively, so that behavior is reproduced here as well. - // https://www.npmjs.com/package/supports-color - - switch forceColor := os.Getenv("FORCE_COLOR"); { - case forceColor == "false" || forceColor == "0": - return ColorModeSuppressed - case forceColor == "true" || forceColor == "1" || forceColor == "2" || forceColor == "3": - return ColorModeForced - default: - return ColorModeUndefined - } -} - -func applyColorMode(colorMode ColorMode) ColorMode { - switch colorMode { - case ColorModeForced: - color.NoColor = false - case ColorModeSuppressed: - color.NoColor = true - case ColorModeUndefined: - default: - // color.NoColor already gets its default value based on - // isTTY and/or the presence of the NO_COLOR env variable. - } - - if color.NoColor { - return ColorModeSuppressed - } else { - return ColorModeForced - } -} diff --git a/cli/internal/ui/spinner.go b/cli/internal/ui/spinner.go deleted file mode 100644 index 6e47d2d689c8b..0000000000000 --- a/cli/internal/ui/spinner.go +++ /dev/null @@ -1,80 +0,0 @@ -// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -// SPDX-License-Identifier: Apache-2.0 -package ui - -import ( - "fmt" - "io" - "os" - "time" - - "github.com/briandowns/spinner" -) - -// startStopper is the interface to interact with the spinner. -type startStopper interface { - Start() - Stop() -} - -// Spinner represents an indicator that an asynchronous operation is taking place. -// -// For short operations, less than 4 seconds, display only the spinner with the Start and Stop methods. -// For longer operations, display intermediate progress events using the Events method. -type Spinner struct { - spin startStopper -} - -// NewSpinner returns a spinner that outputs to w. -func NewSpinner(w io.Writer) *Spinner { - interval := 125 * time.Millisecond - if os.Getenv("CI") == "true" { - interval = 30 * time.Second - } - s := spinner.New(charset, interval, spinner.WithHiddenCursor(true)) - s.Writer = w - s.Color("faint") - return &Spinner{ - spin: s, - } -} - -// Start starts the spinner suffixed with a label. -func (s *Spinner) Start(label string) { - s.suffix(fmt.Sprintf(" %s", label)) - s.spin.Start() -} - -// Stop stops the spinner and replaces it with a label. -func (s *Spinner) Stop(label string) { - s.finalMSG(fmt.Sprint(label)) - s.spin.Stop() -} - -func (s *Spinner) lock() { - if spinner, ok := s.spin.(*spinner.Spinner); ok { - spinner.Lock() - } -} - -func (s *Spinner) unlock() { - if spinner, ok := s.spin.(*spinner.Spinner); ok { - spinner.Unlock() - } -} - -func (s *Spinner) suffix(label string) { - s.lock() - defer s.unlock() - if spinner, ok := s.spin.(*spinner.Spinner); ok { - spinner.Suffix = label - } -} - -func (s *Spinner) finalMSG(label string) { - s.lock() - defer s.unlock() - if spinner, ok := s.spin.(*spinner.Spinner); ok { - spinner.FinalMSG = label - } -} diff --git a/cli/internal/ui/term/cursor.go b/cli/internal/ui/term/cursor.go deleted file mode 100644 index 253f0430c7881..0000000000000 --- a/cli/internal/ui/term/cursor.go +++ /dev/null @@ -1,73 +0,0 @@ -// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -// SPDX-License-Identifier: Apache-2.0 - -// Package cursor provides functionality to interact with the terminal cursor. -package cursor - -import ( - "io" - "os" - - "github.com/AlecAivazis/survey/v2/terminal" -) - -type cursor interface { - Up(n int) error - Down(n int) error - Hide() error - Show() error -} - -// fakeFileWriter is a terminal.FileWriter. -// If the underlying writer w does not implement Fd() then a dummy value is returned. -type fakeFileWriter struct { - w io.Writer -} - -// Write delegates to the internal writer. -func (w *fakeFileWriter) Write(p []byte) (int, error) { - return w.w.Write(p) -} - -// Fd is required to be implemented to satisfy the terminal.FileWriter interface. -// If the underlying writer is a file, like os.Stdout, then invoke it. Otherwise, this method allows us to create -// a Cursor that can write to any io.Writer like a bytes.Buffer by returning a dummy value. -func (w *fakeFileWriter) Fd() uintptr { - if v, ok := w.w.(terminal.FileWriter); ok { - return v.Fd() - } - return 0 -} - -// Cursor represents the terminal's cursor. -type Cursor struct { - c cursor -} - -// New creates a new cursor that writes to stderr. -func New() *Cursor { - return &Cursor{ - c: &terminal.Cursor{ - Out: os.Stderr, - }, - } -} - -// EraseLine erases a line from a FileWriter. -func EraseLine(fw terminal.FileWriter) { - terminal.EraseLine(fw, terminal.ERASE_LINE_ALL) -} - -// EraseLinesAbove erases a line and moves the cursor up from fw, repeated n times. -func EraseLinesAbove(fw terminal.FileWriter, n int) { - c := Cursor{ - c: &terminal.Cursor{ - Out: fw, - }, - } - for i := 0; i < n; i += 1 { - EraseLine(fw) - c.c.Up(1) - } - EraseLine(fw) // Erase the nth line as well. -} diff --git a/cli/internal/ui/term/cursor_test.go b/cli/internal/ui/term/cursor_test.go deleted file mode 100644 index 270ebe8b9fcbb..0000000000000 --- a/cli/internal/ui/term/cursor_test.go +++ /dev/null @@ -1,43 +0,0 @@ -//go:build !windows -// +build !windows - -// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -// SPDX-License-Identifier: Apache-2.0 -package cursor - -import ( - "io" - "strings" - "testing" - - "github.com/AlecAivazis/survey/v2/terminal" - "github.com/stretchr/testify/require" -) - -func TestEraseLine(t *testing.T) { - testCases := map[string]struct { - inWriter func(writer io.Writer) terminal.FileWriter - shouldErase bool - }{ - "should erase a line if the writer is a file": { - inWriter: func(writer io.Writer) terminal.FileWriter { - return &fakeFileWriter{w: writer} - }, - shouldErase: true, - }, - } - - for name, tc := range testCases { - t.Run(name, func(t *testing.T) { - // GIVEN - buf := new(strings.Builder) - - // WHEN - EraseLine(tc.inWriter(buf)) - - // THEN - isErased := buf.String() != "" - require.Equal(t, tc.shouldErase, isErased) - }) - } -} diff --git a/cli/internal/ui/ui.go b/cli/internal/ui/ui.go deleted file mode 100644 index f4deaf7e80ad1..0000000000000 --- a/cli/internal/ui/ui.go +++ /dev/null @@ -1,89 +0,0 @@ -package ui - -import ( - "fmt" - "io" - "math" - "os" - "regexp" - "strings" - - "github.com/fatih/color" - "github.com/mattn/go-isatty" - "github.com/vercel/turbo/cli/internal/ci" -) - -const ansiEscapeStr = "[\u001B\u009B][[\\]()#;?]*(?:(?:(?:[a-zA-Z\\d]*(?:;[a-zA-Z\\d]*)*)?\u0007)|(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PRZcf-ntqry=><~]))" - -// IsTTY is true when stdout appears to be a tty -var IsTTY = isatty.IsTerminal(os.Stdout.Fd()) || isatty.IsCygwinTerminal(os.Stdout.Fd()) - -// IsCI is true when we appear to be running in a non-interactive context. -var IsCI = !IsTTY || ci.IsCi() -var gray = color.New(color.Faint) -var bold = color.New(color.Bold) -var ERROR_PREFIX = color.New(color.Bold, color.FgRed, color.ReverseVideo).Sprint(" ERROR ") -var WARNING_PREFIX = color.New(color.Bold, color.FgYellow, color.ReverseVideo).Sprint(" WARNING ") - -// InfoPrefix is a colored string for warning level log messages -var InfoPrefix = color.New(color.Bold, color.FgWhite, color.ReverseVideo).Sprint(" INFO ") - -var ansiRegex = regexp.MustCompile(ansiEscapeStr) - -// Dim prints out dimmed text -func Dim(str string) string { - return gray.Sprint(str) -} - -func Bold(str string) string { - return bold.Sprint(str) -} - -// Adapted from go-rainbow -// Copyright (c) 2017 Raphael Amorim -// Source: https://github.com/raphamorim/go-rainbow -// SPDX-License-Identifier: MIT -func rgb(i int) (int, int, int) { - var f = 0.275 - - return int(math.Sin(f*float64(i)+4*math.Pi/3)*127 + 128), - // int(math.Sin(f*float64(i)+2*math.Pi/3)*127 + 128), - int(45), - int(math.Sin(f*float64(i)+0)*127 + 128) -} - -// Rainbow function returns a formated colorized string ready to print it to the shell/terminal -// -// Adapted from go-rainbow -// Copyright (c) 2017 Raphael Amorim -// Source: https://github.com/raphamorim/go-rainbow -// SPDX-License-Identifier: MIT -func Rainbow(text string) string { - var rainbowStr []string - for index, value := range text { - r, g, b := rgb(index) - str := fmt.Sprintf("\033[1m\033[38;2;%d;%d;%dm%c\033[0m\033[0;1m", r, g, b, value) - rainbowStr = append(rainbowStr, str) - } - - return strings.Join(rainbowStr, "") -} - -type stripAnsiWriter struct { - wrappedWriter io.Writer -} - -func (into *stripAnsiWriter) Write(p []byte) (int, error) { - n, err := into.wrappedWriter.Write(ansiRegex.ReplaceAll(p, []byte{})) - if err != nil { - // The number of bytes returned here isn't directly related to the input bytes - // if ansi color codes were being stripped out, but we are counting on Stdout.Write - // not failing under typical operation as well. - return n, err - } - - // Write must return a non-nil error if it returns n < len(p). Consequently, if the - // wrappedWrite.Write call succeeded we will return len(p) as the number of bytes - // written. - return len(p), nil -} diff --git a/cli/internal/ui/ui_factory.go b/cli/internal/ui/ui_factory.go deleted file mode 100644 index 6fb7daaec3848..0000000000000 --- a/cli/internal/ui/ui_factory.go +++ /dev/null @@ -1,191 +0,0 @@ -package ui - -import ( - "bufio" - "errors" - "fmt" - "io" - "os" - "os/signal" - "strings" - - "github.com/bgentry/speakeasy" - "github.com/fatih/color" - "github.com/mattn/go-isatty" - "github.com/mitchellh/cli" -) - -// Factory provides an interface for creating cli.Ui instances from input, output and error IOs -type Factory interface { - Build(in io.Reader, out io.Writer, err io.Writer) cli.Ui -} - -// BasicUIFactory provides a method for creating a cli.BasicUi from input, output and error IOs -type BasicUIFactory struct { -} - -// BasicUI is an implementation of Ui that just outputs to the given -// writer. This UI is not threadsafe by default, but you can wrap it -// in a ConcurrentUi to make it safe. -// -// Inlined from cli.Ui to fuse newlines to lines being logged. This is -// probably not the optimal way to do it, but it works for now. -type BasicUI struct { - Reader io.Reader - Writer io.Writer - ErrorWriter io.Writer -} - -// Ask implements ui.Cli.Ask for BasicUi -func (u *BasicUI) Ask(query string) (string, error) { - return u.ask(query, false) -} - -// AskSecret implements ui.Cli.AskSecret for BasicUi -func (u *BasicUI) AskSecret(query string) (string, error) { - return u.ask(query, true) -} - -func (u *BasicUI) ask(query string, secret bool) (string, error) { - if _, err := fmt.Fprint(u.Writer, query+" "); err != nil { - return "", err - } - - // Register for interrupts so that we can catch it and immediately - // return... - sigCh := make(chan os.Signal, 1) - signal.Notify(sigCh, os.Interrupt) - defer signal.Stop(sigCh) - - // Ask for input in a go-routine so that we can ignore it. - errCh := make(chan error, 1) - lineCh := make(chan string, 1) - go func() { - var line string - var err error - if secret && isatty.IsTerminal(os.Stdin.Fd()) { - line, err = speakeasy.Ask("") - } else { - r := bufio.NewReader(u.Reader) - line, err = r.ReadString('\n') - } - if err != nil { - errCh <- err - return - } - - lineCh <- strings.TrimRight(line, "\r\n") - }() - - select { - case err := <-errCh: - return "", err - case line := <-lineCh: - return line, nil - case <-sigCh: - // Print a newline so that any further output starts properly - // on a new line. - fmt.Fprintln(u.Writer) - - return "", errors.New("interrupted") - } -} - -// Error implements ui.Cli.Error for BasicUi -func (u *BasicUI) Error(message string) { - w := u.Writer - if u.ErrorWriter != nil { - w = u.ErrorWriter - } - - fmt.Fprintf(w, "%v\n", message) -} - -// Info implements ui.Cli.Info for BasicUi -func (u *BasicUI) Info(message string) { - u.Output(message) -} - -// Output implements ui.Cli.Output for BasicUi -func (u *BasicUI) Output(message string) { - fmt.Fprintf(u.Writer, "%v\n", message) -} - -// Warn implements ui.Cli.Warn for BasicUi -func (u *BasicUI) Warn(message string) { - u.Error(message) -} - -// Build builds a cli.BasicUi from input, output and error IOs -func (factory *BasicUIFactory) Build(in io.Reader, out io.Writer, err io.Writer) cli.Ui { - return &BasicUI{ - Reader: in, - Writer: out, - ErrorWriter: err, - } -} - -// ColoredUIFactory provides a method for creating a cli.ColoredUi from input, output and error IOs -type ColoredUIFactory struct { - ColorMode ColorMode - Base Factory -} - -// Build builds a cli.ColoredUi from input, output and error IOs -func (factory *ColoredUIFactory) Build(in io.Reader, out io.Writer, err io.Writer) cli.Ui { - factory.ColorMode = applyColorMode(factory.ColorMode) - - var outWriter, errWriter io.Writer - - if factory.ColorMode == ColorModeSuppressed { - outWriter = &stripAnsiWriter{wrappedWriter: out} - errWriter = &stripAnsiWriter{wrappedWriter: err} - } else { - outWriter = out - errWriter = err - } - - return &cli.ColoredUi{ - Ui: factory.Base.Build(in, outWriter, errWriter), - OutputColor: cli.UiColorNone, - InfoColor: cli.UiColorNone, - WarnColor: cli.UiColor{Code: int(color.FgYellow), Bold: false}, - ErrorColor: cli.UiColorRed, - } -} - -// ConcurrentUIFactory provides a method for creating a cli.ConcurrentUi from input, output and error IOs -type ConcurrentUIFactory struct { - Base Factory -} - -// Build builds a cli.ConcurrentUi from input, output and error IOs -func (factory *ConcurrentUIFactory) Build(in io.Reader, out io.Writer, err io.Writer) cli.Ui { - return &cli.ConcurrentUi{ - Ui: factory.Base.Build(in, out, err), - } -} - -// PrefixedUIFactory provides a method for creating a cli.PrefixedUi from input, output and error IOs -type PrefixedUIFactory struct { - Base Factory - AskPrefix string - AskSecretPrefix string - OutputPrefix string - InfoPrefix string - ErrorPrefix string - WarnPrefix string -} - -// Build builds a cli.PrefixedUi from input, output and error IOs -func (factory *PrefixedUIFactory) Build(in io.Reader, out io.Writer, err io.Writer) cli.Ui { - return &cli.PrefixedUi{ - AskPrefix: factory.AskPrefix, - AskSecretPrefix: factory.AskSecretPrefix, - OutputPrefix: factory.OutputPrefix, - InfoPrefix: factory.InfoPrefix, - ErrorPrefix: factory.ErrorPrefix, - WarnPrefix: factory.WarnPrefix, - Ui: factory.Base.Build(in, out, err), - } -} diff --git a/cli/internal/util/browser/open.go b/cli/internal/util/browser/open.go deleted file mode 100644 index a6171e93396b6..0000000000000 --- a/cli/internal/util/browser/open.go +++ /dev/null @@ -1,37 +0,0 @@ -package browser - -import ( - "fmt" - "os/exec" - "runtime" -) - -// OpenBrowser attempts to interactively open a browser window at the given URL -func OpenBrowser(url string) error { - var err error - - switch runtime.GOOS { - case "linux": - if posixBinExists("wslview") { - err = exec.Command("wslview", url).Start() - } else { - err = exec.Command("xdg-open", url).Start() - } - case "windows": - err = exec.Command("rundll32", "url.dll,FileProtocolHandler", url).Start() - case "darwin": - err = exec.Command("open", url).Start() - default: - err = fmt.Errorf("unsupported platform") - } - if err != nil { - return err - } - return nil -} - -func posixBinExists(bin string) bool { - err := exec.Command("which", bin).Run() - // we mostly don't care what the error is, it suggests the binary is not usable - return err == nil -} diff --git a/cli/internal/util/closer.go b/cli/internal/util/closer.go deleted file mode 100644 index 996760b31b40e..0000000000000 --- a/cli/internal/util/closer.go +++ /dev/null @@ -1,15 +0,0 @@ -package util - -// CloseAndIgnoreError is a utility to tell our linter that we explicitly deem it okay -// to not check a particular error on closing of a resource. -// -// We use `errcheck` as a linter, which is super-opinionated about checking errors, -// even in places where we don't necessarily care to check the error. -// -// `golangci-lint` has a default ignore list for this lint problem (EXC0001) which -// can be used to sidestep this problem but it's possibly a little too-heavy-handed -// in exclusion. At the expense of discoverability, this utility function forces -// opt-in to ignoring errors on closing of things that can be `Close`d. -func CloseAndIgnoreError(closer interface{ Close() error }) { - _ = closer.Close() -} diff --git a/cli/internal/util/cmd.go b/cli/internal/util/cmd.go deleted file mode 100644 index ae79aa071c508..0000000000000 --- a/cli/internal/util/cmd.go +++ /dev/null @@ -1,24 +0,0 @@ -package util - -import ( - "bytes" - - "github.com/spf13/cobra" -) - -// ExitCodeError is a specific error that is returned by the command to specify the exit code -type ExitCodeError struct { - ExitCode int -} - -func (e *ExitCodeError) Error() string { return "exit code error" } - -// HelpForCobraCmd returns the help string for a given command -// Note that this overwrites the output for the command -func HelpForCobraCmd(cmd *cobra.Command) string { - f := cmd.HelpFunc() - buf := bytes.NewBufferString("") - cmd.SetOut(buf) - f(cmd, []string{}) - return buf.String() -} diff --git a/cli/internal/util/filter/filter.go b/cli/internal/util/filter/filter.go deleted file mode 100644 index fbc475d9ac275..0000000000000 --- a/cli/internal/util/filter/filter.go +++ /dev/null @@ -1,133 +0,0 @@ -// Copyright (c) 2015-2020 InfluxData Inc. MIT License (MIT) -// https://github.com/influxdata/telegraf -package filter - -import ( - "strings" - - "github.com/gobwas/glob" -) - -type Filter interface { - Match(string) bool -} - -// Compile takes a list of string filters and returns a Filter interface -// for matching a given string against the filter list. The filter list -// supports glob matching too, ie: -// -// f, _ := Compile([]string{"cpu", "mem", "net*"}) -// f.Match("cpu") // true -// f.Match("network") // true -// f.Match("memory") // false -func Compile(filters []string) (Filter, error) { - // return if there is nothing to compile - if len(filters) == 0 { - return nil, nil - } - - // check if we can compile a non-glob filter - noGlob := true - for _, filter := range filters { - if hasMeta(filter) { - noGlob = false - break - } - } - - switch { - case noGlob: - // return non-globbing filter if not needed. - return compileFilterNoGlob(filters), nil - case len(filters) == 1: - return glob.Compile(filters[0]) - default: - return glob.Compile("{" + strings.Join(filters, ",") + "}") - } -} - -// hasMeta reports whether path contains any magic glob characters. -func hasMeta(s string) bool { - return strings.ContainsAny(s, "*?[") -} - -type filter struct { - m map[string]struct{} -} - -func (f *filter) Match(s string) bool { - _, ok := f.m[s] - return ok -} - -type filtersingle struct { - s string -} - -func (f *filtersingle) Match(s string) bool { - return f.s == s -} - -func compileFilterNoGlob(filters []string) Filter { - if len(filters) == 1 { - return &filtersingle{s: filters[0]} - } - out := filter{m: make(map[string]struct{})} - for _, filter := range filters { - out.m[filter] = struct{}{} - } - return &out -} - -type IncludeExcludeFilter struct { - include Filter - exclude Filter - includeDefault bool - excludeDefault bool -} - -func NewIncludeExcludeFilter( - include []string, - exclude []string, -) (Filter, error) { - return NewIncludeExcludeFilterDefaults(include, exclude, true, false) -} - -func NewIncludeExcludeFilterDefaults( - include []string, - exclude []string, - includeDefault bool, - excludeDefault bool, -) (Filter, error) { - in, err := Compile(include) - if err != nil { - return nil, err - } - - ex, err := Compile(exclude) - if err != nil { - return nil, err - } - - return &IncludeExcludeFilter{in, ex, includeDefault, excludeDefault}, nil -} - -func (f *IncludeExcludeFilter) Match(s string) bool { - if f.include != nil { - if !f.include.Match(s) { - return false - } - } else if !f.includeDefault { - return false - } - - if f.exclude != nil { - if f.exclude.Match(s) { - return false - } - } else if f.excludeDefault { - return false - } - - return true -} diff --git a/cli/internal/util/filter/filter_test.go b/cli/internal/util/filter/filter_test.go deleted file mode 100644 index 727a4b685d9ff..0000000000000 --- a/cli/internal/util/filter/filter_test.go +++ /dev/null @@ -1,116 +0,0 @@ -// Copyright (c) 2015-2020 InfluxData Inc. MIT License (MIT) -// https://github.com/influxdata/telegraf -package filter - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestCompile(t *testing.T) { - f, err := Compile([]string{}) - assert.NoError(t, err) - assert.Nil(t, f) - - f, err = Compile([]string{"cpu"}) - assert.NoError(t, err) - assert.True(t, f.Match("cpu")) - assert.False(t, f.Match("cpu0")) - assert.False(t, f.Match("mem")) - - f, err = Compile([]string{"cpu*"}) - assert.NoError(t, err) - assert.True(t, f.Match("cpu")) - assert.True(t, f.Match("cpu0")) - assert.False(t, f.Match("mem")) - - f, err = Compile([]string{"cpu", "mem"}) - assert.NoError(t, err) - assert.True(t, f.Match("cpu")) - assert.False(t, f.Match("cpu0")) - assert.True(t, f.Match("mem")) - - f, err = Compile([]string{"cpu", "mem", "net*"}) - assert.NoError(t, err) - assert.True(t, f.Match("cpu")) - assert.False(t, f.Match("cpu0")) - assert.True(t, f.Match("mem")) - assert.True(t, f.Match("network")) -} - -func TestIncludeExclude(t *testing.T) { - tags := []string{} - labels := []string{"best", "com_influxdata", "timeseries", "com_influxdata_telegraf", "ever"} - - filter, err := NewIncludeExcludeFilter([]string{}, []string{"com_influx*"}) - if err != nil { - t.Fatalf("Failed to create include/exclude filter - %v", err) - } - - for i := range labels { - if filter.Match(labels[i]) { - tags = append(tags, labels[i]) - } - } - - assert.Equal(t, []string{"best", "timeseries", "ever"}, tags) -} - -var benchbool bool - -func BenchmarkFilterSingleNoGlobFalse(b *testing.B) { - f, _ := Compile([]string{"cpu"}) - var tmp bool - for n := 0; n < b.N; n++ { - tmp = f.Match("network") - } - benchbool = tmp -} - -func BenchmarkFilterSingleNoGlobTrue(b *testing.B) { - f, _ := Compile([]string{"cpu"}) - var tmp bool - for n := 0; n < b.N; n++ { - tmp = f.Match("cpu") - } - benchbool = tmp -} - -func BenchmarkFilter(b *testing.B) { - f, _ := Compile([]string{"cpu", "mem", "net*"}) - var tmp bool - for n := 0; n < b.N; n++ { - tmp = f.Match("network") - } - benchbool = tmp -} - -func BenchmarkFilterNoGlob(b *testing.B) { - f, _ := Compile([]string{"cpu", "mem", "net"}) - var tmp bool - for n := 0; n < b.N; n++ { - tmp = f.Match("net") - } - benchbool = tmp -} - -func BenchmarkFilter2(b *testing.B) { - f, _ := Compile([]string{"aa", "bb", "c", "ad", "ar", "at", "aq", - "aw", "az", "axxx", "ab", "cpu", "mem", "net*"}) - var tmp bool - for n := 0; n < b.N; n++ { - tmp = f.Match("network") - } - benchbool = tmp -} - -func BenchmarkFilter2NoGlob(b *testing.B) { - f, _ := Compile([]string{"aa", "bb", "c", "ad", "ar", "at", "aq", - "aw", "az", "axxx", "ab", "cpu", "mem", "net"}) - var tmp bool - for n := 0; n < b.N; n++ { - tmp = f.Match("net") - } - benchbool = tmp -} diff --git a/cli/internal/util/graph.go b/cli/internal/util/graph.go deleted file mode 100644 index 89de18c7f1a28..0000000000000 --- a/cli/internal/util/graph.go +++ /dev/null @@ -1,35 +0,0 @@ -package util - -import ( - "fmt" - "strings" - - "github.com/pyr-sh/dag" -) - -// ValidateGraph checks that a given DAG has no cycles and no self-referential edges. -// We differ from the underlying DAG Validate method in that we allow multiple roots. -func ValidateGraph(graph *dag.AcyclicGraph) error { - // We use Cycles instead of Validate because - // our DAG has multiple roots (entrypoints). - // Validate mandates that there is only a single root node. - cycles := graph.Cycles() - if len(cycles) > 0 { - cycleLines := make([]string, len(cycles)) - for i, cycle := range cycles { - vertices := make([]string, len(cycle)) - for j, vertex := range cycle { - vertices[j] = vertex.(string) - } - cycleLines[i] = "\t" + strings.Join(vertices, ",") - } - return fmt.Errorf("cyclic dependency detected:\n%s", strings.Join(cycleLines, "\n")) - } - - for _, e := range graph.Edges() { - if e.Source() == e.Target() { - return fmt.Errorf("%s depends on itself", e.Source()) - } - } - return nil -} diff --git a/cli/internal/util/modulo.go b/cli/internal/util/modulo.go deleted file mode 100644 index ec2957ad2a377..0000000000000 --- a/cli/internal/util/modulo.go +++ /dev/null @@ -1,13 +0,0 @@ -package util - -// PostitiveMod returns a modulo operator like JavaScripts -func PositiveMod(x, d int) int { - x = x % d - if x >= 0 { - return x - } - if d < 0 { - return x - d - } - return x + d -} diff --git a/cli/internal/util/parse_concurrency.go b/cli/internal/util/parse_concurrency.go deleted file mode 100644 index 69176001416a4..0000000000000 --- a/cli/internal/util/parse_concurrency.go +++ /dev/null @@ -1,39 +0,0 @@ -package util - -import ( - "fmt" - "math" - "runtime" - "strconv" - "strings" -) - -var ( - // alias so we can mock in tests - runtimeNumCPU = runtime.NumCPU - // positive values check for +Inf - _positiveInfinity = 1 -) - -// ParseConcurrency parses a concurrency value, which can be a number (e.g. 2) or a percentage (e.g. 50%). -func ParseConcurrency(concurrencyRaw string) (int, error) { - if strings.HasSuffix(concurrencyRaw, "%") { - if percent, err := strconv.ParseFloat(concurrencyRaw[:len(concurrencyRaw)-1], 64); err != nil { - return 0, fmt.Errorf("invalid value for --concurrency CLI flag. This should be a number --concurrency=4 or percentage of CPU cores --concurrency=50%% : %w", err) - } else { - if percent > 0 && !math.IsInf(percent, _positiveInfinity) { - return int(math.Max(1, float64(runtimeNumCPU())*percent/100)), nil - } else { - return 0, fmt.Errorf("invalid percentage value for --concurrency CLI flag. This should be a percentage of CPU cores, between 1%% and 100%% : %w", err) - } - } - } else if i, err := strconv.Atoi(concurrencyRaw); err != nil { - return 0, fmt.Errorf("invalid value for --concurrency CLI flag. This should be a positive integer greater than or equal to 1: %w", err) - } else { - if i >= 1 { - return i, nil - } else { - return 0, fmt.Errorf("invalid value %v for --concurrency CLI flag. This should be a positive integer greater than or equal to 1", i) - } - } -} diff --git a/cli/internal/util/parse_concurrency_test.go b/cli/internal/util/parse_concurrency_test.go deleted file mode 100644 index b732724bd7701..0000000000000 --- a/cli/internal/util/parse_concurrency_test.go +++ /dev/null @@ -1,79 +0,0 @@ -package util - -import ( - "fmt" - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestParseConcurrency(t *testing.T) { - cases := []struct { - Input string - Expected int - }{ - { - "12", - 12, - }, - { - "200%", - 20, - }, - { - "100%", - 10, - }, - { - "50%", - 5, - }, - { - "25%", - 2, - }, - { - "1%", - 1, - }, - { - "0644", // we parse in base 10 - 644, - }, - } - - // mock runtime.NumCPU() to 10 - runtimeNumCPU = func() int { - return 10 - } - - for i, tc := range cases { - t.Run(fmt.Sprintf("%d) '%s' should be parsed at '%d'", i, tc.Input, tc.Expected), func(t *testing.T) { - if result, err := ParseConcurrency(tc.Input); err != nil { - t.Fatalf("invalid parse: %#v", err) - } else { - assert.EqualValues(t, tc.Expected, result) - } - }) - } -} - -func TestInvalidPercents(t *testing.T) { - inputs := []string{ - "asdf", - "-1", - "-l%", - "infinity%", - "-infinity%", - "nan%", - "0b01", - "0o644", - "0xFF", - } - for _, tc := range inputs { - t.Run(tc, func(t *testing.T) { - val, err := ParseConcurrency(tc) - assert.Error(t, err, "input %v got %v", tc, val) - }) - } -} diff --git a/cli/internal/util/printf.go b/cli/internal/util/printf.go deleted file mode 100644 index 9cd6dce44de81..0000000000000 --- a/cli/internal/util/printf.go +++ /dev/null @@ -1,63 +0,0 @@ -// Copyright Thought Machine, Inc. or its affiliates. All Rights Reserved. -// SPDX-License-Identifier: Apache-2.0 -package util - -import ( - "fmt" - "io" - "os" - - "github.com/vercel/turbo/cli/internal/ui" -) - -// initPrintf sets up the replacements used by printf. -func InitPrintf() { - if !ui.IsTTY { - replacements = map[string]string{} - } -} - -// printf is used throughout this package to print something to stderr with some -// replacements for pseudo-shell variables for ANSI formatting codes. -func Sprintf(format string, args ...interface{}) string { - return os.Expand(fmt.Sprintf(format, args...), replace) -} - -func Printf(format string, args ...interface{}) { - fmt.Fprint(os.Stderr, os.Expand(fmt.Sprintf(format, args...), replace)) -} - -func Fprintf(writer io.Writer, format string, args ...interface{}) { - fmt.Fprint(writer, os.Expand(fmt.Sprintf(format, args...), replace)) -} - -func replace(s string) string { - return replacements[s] -} - -// These are the standard set of replacements we use. -var replacements = map[string]string{ - "BOLD": "\x1b[1m", - "BOLD_GREY": "\x1b[30;1m", - "BOLD_RED": "\x1b[31;1m", - "BOLD_GREEN": "\x1b[32;1m", - "BOLD_YELLOW": "\x1b[33;1m", - "BOLD_BLUE": "\x1b[34;1m", - "BOLD_MAGENTA": "\x1b[35;1m", - "BOLD_CYAN": "\x1b[36;1m", - "BOLD_WHITE": "\x1b[37;1m", - "UNDERLINE": "\x1b[4m", - "GREY": "\x1b[2m", - "RED": "\x1b[31m", - "GREEN": "\x1b[32m", - "YELLOW": "\x1b[33m", - "BLUE": "\x1b[34m", - "MAGENTA": "\x1b[35m", - "CYAN": "\x1b[36m", - "WHITE": "\x1b[37m", - "WHITE_ON_RED": "\x1b[37;41;1m", - "RED_NO_BG": "\x1b[31;49;1m", - "RESET": "\x1b[0m", - "ERASE_AFTER": "\x1b[K", - "CLEAR_END": "\x1b[0J", -} diff --git a/cli/internal/util/run_opts.go b/cli/internal/util/run_opts.go deleted file mode 100644 index 269b1bfd4b86d..0000000000000 --- a/cli/internal/util/run_opts.go +++ /dev/null @@ -1,62 +0,0 @@ -package util - -import "strings" - -// EnvMode specifies if we will be using strict env vars -type EnvMode string - -const ( - // Infer - infer environment variable constraints from turbo.json - Infer EnvMode = "Infer" - // Loose - environment variables are unconstrained - Loose EnvMode = "Loose" - // Strict - environment variables are limited - Strict EnvMode = "Strict" -) - -// MarshalText implements TextMarshaler for the struct. -func (s EnvMode) MarshalText() (text []byte, err error) { - return []byte(strings.ToLower(string(s))), nil -} - -// RunOpts holds the options that control the execution of a turbo run -type RunOpts struct { - // Force execution to be serially one-at-a-time - Concurrency int - // Whether to execute in parallel (defaults to false) - Parallel bool - - EnvMode EnvMode - // Whether or not to infer the framework for each workspace. - FrameworkInference bool - // The filename to write a perf profile. - Profile string - // If true, continue task executions even if a task fails. - ContinueOnError bool - PassThroughArgs []string - // Restrict execution to only the listed task names. Default false - Only bool - // Dry run flags - DryRun bool - DryRunJSON bool - // Graph flags - GraphDot bool - GraphFile string - Daemon bool - NoDaemon bool - SinglePackage bool - - // logPrefix controls whether we should print a prefix in task logs - LogPrefix string - - // The order of the logs, either 'grouped' or 'stream' - LogOrder string - - // Whether turbo should create a run summary - Summarize bool - - ExperimentalSpaceID string - - // Whether this run is in Github Actions - IsGithubActions bool -} diff --git a/cli/internal/util/semaphore.go b/cli/internal/util/semaphore.go deleted file mode 100644 index ef29df0c25596..0000000000000 --- a/cli/internal/util/semaphore.go +++ /dev/null @@ -1,43 +0,0 @@ -package util - -// Semaphore is a wrapper around a channel to provide -// utility methods to clarify that we are treating the -// channel as a semaphore -type Semaphore chan struct{} - -// NewSemaphore creates a semaphore that allows up -// to a given limit of simultaneous acquisitions -func NewSemaphore(n int) Semaphore { - if n <= 0 { - panic("semaphore with limit <=0") - } - ch := make(chan struct{}, n) - return Semaphore(ch) -} - -// Acquire is used to acquire an available slot. -// Blocks until available. -func (s Semaphore) Acquire() { - s <- struct{}{} -} - -// TryAcquire is used to do a non-blocking acquire. -// Returns a bool indicating success -func (s Semaphore) TryAcquire() bool { - select { - case s <- struct{}{}: - return true - default: - return false - } -} - -// Release is used to return a slot. Acquire must -// be called as a pre-condition. -func (s Semaphore) Release() { - select { - case <-s: - default: - panic("release without an acquire") - } -} diff --git a/cli/internal/util/set.go b/cli/internal/util/set.go deleted file mode 100644 index b6c5f8613bc55..0000000000000 --- a/cli/internal/util/set.go +++ /dev/null @@ -1,147 +0,0 @@ -package util - -// Set is a set data structure. -type Set map[interface{}]interface{} - -// SetFromStrings creates a Set containing the strings from the given slice -func SetFromStrings(sl []string) Set { - set := make(Set, len(sl)) - for _, item := range sl { - set.Add(item) - } - return set -} - -// Hashable is the interface used by set to get the hash code of a value. -// If this isn't given, then the value of the item being added to the set -// itself is used as the comparison value. -type Hashable interface { - Hashcode() interface{} -} - -// hashcode returns the hashcode used for set elements. -func hashcode(v interface{}) interface{} { - if h, ok := v.(Hashable); ok { - return h.Hashcode() - } - - return v -} - -// Add adds an item to the set -func (s Set) Add(v interface{}) { - s[hashcode(v)] = v -} - -// Delete removes an item from the set. -func (s Set) Delete(v interface{}) { - delete(s, hashcode(v)) -} - -// Includes returns true/false of whether a value is in the set. -func (s Set) Includes(v interface{}) bool { - _, ok := s[hashcode(v)] - return ok -} - -// Intersection computes the set intersection with other. -func (s Set) Intersection(other Set) Set { - result := make(Set) - if s == nil || other == nil { - return result - } - // Iteration over a smaller set has better performance. - if other.Len() < s.Len() { - s, other = other, s - } - for _, v := range s { - if other.Includes(v) { - result.Add(v) - } - } - return result -} - -// Difference returns a set with the elements that s has but -// other doesn't. -func (s Set) Difference(other Set) Set { - result := make(Set) - for k, v := range s { - var ok bool - if other != nil { - _, ok = other[k] - } - if !ok { - result.Add(v) - } - } - - return result -} - -// Some tests whether at least one element in the array passes the test implemented by the provided function. -// It returns a Boolean value. -func (s Set) Some(cb func(interface{}) bool) bool { - for _, v := range s { - if cb(v) { - return true - } - } - return false -} - -// Filter returns a set that contains the elements from the receiver -// where the given callback returns true. -func (s Set) Filter(cb func(interface{}) bool) Set { - result := make(Set) - - for _, v := range s { - if cb(v) { - result.Add(v) - } - } - - return result -} - -// Len is the number of items in the set. -func (s Set) Len() int { - return len(s) -} - -// List returns the list of set elements. -func (s Set) List() []interface{} { - if s == nil { - return nil - } - - r := make([]interface{}, 0, len(s)) - for _, v := range s { - r = append(r, v) - } - - return r -} - -// UnsafeListOfStrings dangerously casts list to a string -func (s Set) UnsafeListOfStrings() []string { - if s == nil { - return nil - } - - r := make([]string, 0, len(s)) - for _, v := range s { - r = append(r, v.(string)) - } - - return r -} - -// Copy returns a shallow copy of the set. -func (s Set) Copy() Set { - c := make(Set) - for k, v := range s { - c[k] = v - } - return c -} diff --git a/cli/internal/util/set_test.go b/cli/internal/util/set_test.go deleted file mode 100644 index 52736b484d424..0000000000000 --- a/cli/internal/util/set_test.go +++ /dev/null @@ -1,149 +0,0 @@ -package util - -import ( - "fmt" - "testing" -) - -func TestSetDifference(t *testing.T) { - cases := []struct { - Name string - A, B []interface{} - Expected []interface{} - }{ - { - "same", - []interface{}{1, 2, 3}, - []interface{}{3, 1, 2}, - []interface{}{}, - }, - - { - "A has extra elements", - []interface{}{1, 2, 3}, - []interface{}{3, 2}, - []interface{}{1}, - }, - - { - "B has extra elements", - []interface{}{1, 2, 3}, - []interface{}{3, 2, 1, 4}, - []interface{}{}, - }, - } - - for i, tc := range cases { - t.Run(fmt.Sprintf("%d-%s", i, tc.Name), func(t *testing.T) { - one := make(Set) - two := make(Set) - expected := make(Set) - for _, v := range tc.A { - one.Add(v) - } - for _, v := range tc.B { - two.Add(v) - } - for _, v := range tc.Expected { - expected.Add(v) - } - - actual := one.Difference(two) - match := actual.Intersection(expected) - if match.Len() != expected.Len() { - t.Fatalf("bad: %#v", actual.List()) - } - }) - } -} - -func TestSetFilter(t *testing.T) { - cases := []struct { - Input []interface{} - Expected []interface{} - }{ - { - []interface{}{1, 2, 3}, - []interface{}{1, 2, 3}, - }, - - { - []interface{}{4, 5, 6}, - []interface{}{4}, - }, - - { - []interface{}{7, 8, 9}, - []interface{}{}, - }, - } - - for i, tc := range cases { - t.Run(fmt.Sprintf("%d-%#v", i, tc.Input), func(t *testing.T) { - input := make(Set) - expected := make(Set) - for _, v := range tc.Input { - input.Add(v) - } - for _, v := range tc.Expected { - expected.Add(v) - } - - actual := input.Filter(func(v interface{}) bool { - return v.(int) < 5 - }) - match := actual.Intersection(expected) - if match.Len() != expected.Len() { - t.Fatalf("bad: %#v", actual.List()) - } - }) - } -} - -func TestSetCopy(t *testing.T) { - a := make(Set) - a.Add(1) - a.Add(2) - - b := a.Copy() - b.Add(3) - - diff := b.Difference(a) - - if diff.Len() != 1 { - t.Fatalf("expected single diff value, got %#v", diff) - } - - if !diff.Includes(3) { - t.Fatalf("diff does not contain 3, got %#v", diff) - } - -} - -func makeSet(n int) Set { - ret := make(Set, n) - for i := 0; i < n; i++ { - ret.Add(i) - } - return ret -} - -func BenchmarkSetIntersection_100_100000(b *testing.B) { - small := makeSet(100) - large := makeSet(100000) - - b.ResetTimer() - for n := 0; n < b.N; n++ { - small.Intersection(large) - } -} - -func BenchmarkSetIntersection_100000_100(b *testing.B) { - small := makeSet(100) - large := makeSet(100000) - - b.ResetTimer() - for n := 0; n < b.N; n++ { - large.Intersection(small) - } -} diff --git a/cli/internal/util/status.go b/cli/internal/util/status.go deleted file mode 100644 index 23ae16528ea9d..0000000000000 --- a/cli/internal/util/status.go +++ /dev/null @@ -1,47 +0,0 @@ -package util - -import "fmt" - -// CachingStatus represents the api server's perspective -// on whether remote caching should be allowed -type CachingStatus int - -const ( - // CachingStatusDisabled indicates that the server will not accept or serve artifacts - CachingStatusDisabled CachingStatus = iota - // CachingStatusEnabled indicates that the server will accept and serve artifacts - CachingStatusEnabled - // CachingStatusOverLimit indicates that a usage limit has been hit and the - // server will temporarily not accept or serve artifacts - CachingStatusOverLimit - // CachingStatusPaused indicates that a customer's spending has been paused and the - // server will temporarily not accept or serve artifacts - CachingStatusPaused -) - -// CachingStatusFromString parses a raw string to a caching status enum value -func CachingStatusFromString(raw string) (CachingStatus, error) { - switch raw { - case "disabled": - return CachingStatusDisabled, nil - case "enabled": - return CachingStatusEnabled, nil - case "over_limit": - return CachingStatusOverLimit, nil - case "paused": - return CachingStatusPaused, nil - default: - return CachingStatusDisabled, fmt.Errorf("unknown caching status: %v", raw) - } -} - -// CacheDisabledError is an error used to indicate that remote caching -// is not available. -type CacheDisabledError struct { - Status CachingStatus - Message string -} - -func (cd *CacheDisabledError) Error() string { - return cd.Message -} diff --git a/cli/internal/util/task_id.go b/cli/internal/util/task_id.go deleted file mode 100644 index e4415b614b2fe..0000000000000 --- a/cli/internal/util/task_id.go +++ /dev/null @@ -1,66 +0,0 @@ -package util - -import ( - "fmt" - "strings" -) - -const ( - // TaskDelimiter separates a package name from a task name in a task id - TaskDelimiter = "#" - // RootPkgName is the reserved name that specifies the root package - RootPkgName = "//" -) - -// GetTaskId returns a package-task identifier (e.g @feed/thing#build). -func GetTaskId(pkgName interface{}, target string) string { - if IsPackageTask(target) { - return target - } - return fmt.Sprintf("%v%v%v", pkgName, TaskDelimiter, target) -} - -// RootTaskID returns the task id for running the given task in the root package -func RootTaskID(target string) string { - return GetTaskId(RootPkgName, target) -} - -// GetPackageTaskFromId returns a tuple of the package name and target task -func GetPackageTaskFromId(taskId string) (packageName string, task string) { - arr := strings.Split(taskId, TaskDelimiter) - return arr[0], arr[1] -} - -// RootTaskTaskName returns the task portion of a root task taskID -func RootTaskTaskName(taskID string) string { - return strings.TrimPrefix(taskID, RootPkgName+TaskDelimiter) -} - -// IsPackageTask returns true if input is a package-specific task -// whose name has a length greater than 0. -// -// Accepted: myapp#build -// Rejected: #build, build -func IsPackageTask(task string) bool { - return strings.Index(task, TaskDelimiter) > 0 -} - -// IsTaskInPackage returns true if the task does not belong to a different package -// note that this means unscoped tasks will always return true -func IsTaskInPackage(task string, packageName string) bool { - if !IsPackageTask(task) { - return true - } - packageNameExpected, _ := GetPackageTaskFromId(task) - return packageNameExpected == packageName -} - -// StripPackageName removes the package portion of a taskID if it -// is a package task. Non-package tasks are returned unmodified -func StripPackageName(taskID string) string { - if IsPackageTask(taskID) { - _, task := GetPackageTaskFromId(taskID) - return task - } - return taskID -} diff --git a/cli/internal/util/task_output_mode.go b/cli/internal/util/task_output_mode.go deleted file mode 100644 index eee42e0bc50d1..0000000000000 --- a/cli/internal/util/task_output_mode.go +++ /dev/null @@ -1,100 +0,0 @@ -package util - -import ( - "encoding/json" - "fmt" -) - -// TaskOutputMode defines the ways turbo can display task output during a run -type TaskOutputMode int - -const ( - // FullTaskOutput will show all task output - FullTaskOutput TaskOutputMode = iota - // NoTaskOutput will hide all task output - NoTaskOutput - // HashTaskOutput will display turbo-computed task hashes - HashTaskOutput - // NewTaskOutput will show all new task output and turbo-computed task hashes for cached output - NewTaskOutput - // ErrorTaskOutput will show task output for failures only; no cache miss/hit messages are emitted - ErrorTaskOutput -) - -const ( - fullTaskOutputString = "full" - noTaskOutputString = "none" - hashTaskOutputString = "hash-only" - newTaskOutputString = "new-only" - errorTaskOutputString = "errors-only" -) - -// TaskOutputModeStrings is an array containing the string representations for task output modes -var TaskOutputModeStrings = []string{ - fullTaskOutputString, - noTaskOutputString, - hashTaskOutputString, - newTaskOutputString, - errorTaskOutputString, -} - -// FromTaskOutputModeString converts a task output mode's string representation into the enum value -func FromTaskOutputModeString(value string) (TaskOutputMode, error) { - switch value { - case fullTaskOutputString: - return FullTaskOutput, nil - case noTaskOutputString: - return NoTaskOutput, nil - case hashTaskOutputString: - return HashTaskOutput, nil - case newTaskOutputString: - return NewTaskOutput, nil - case errorTaskOutputString: - return ErrorTaskOutput, nil - } - - return FullTaskOutput, fmt.Errorf("invalid task output mode: %v", value) -} - -// ToTaskOutputModeString converts a task output mode enum value into the string representation -func ToTaskOutputModeString(value TaskOutputMode) (string, error) { - switch value { - case FullTaskOutput: - return fullTaskOutputString, nil - case NoTaskOutput: - return noTaskOutputString, nil - case HashTaskOutput: - return hashTaskOutputString, nil - case NewTaskOutput: - return newTaskOutputString, nil - case ErrorTaskOutput: - return errorTaskOutputString, nil - } - - return "", fmt.Errorf("invalid task output mode: %v", value) -} - -// UnmarshalJSON converts a task output mode string representation into an enum -func (c *TaskOutputMode) UnmarshalJSON(data []byte) error { - var rawTaskOutputMode string - if err := json.Unmarshal(data, &rawTaskOutputMode); err != nil { - return err - } - - taskOutputMode, err := FromTaskOutputModeString(rawTaskOutputMode) - if err != nil { - return err - } - - *c = taskOutputMode - return nil -} - -// MarshalJSON converts a task output mode to its string representation -func (c TaskOutputMode) MarshalJSON() ([]byte, error) { - outputModeString, err := ToTaskOutputModeString(c) - if err != nil { - return nil, err - } - return json.Marshal(outputModeString) -} diff --git a/cli/internal/workspace/workspace.go b/cli/internal/workspace/workspace.go deleted file mode 100644 index fcd1eb8340c7a..0000000000000 --- a/cli/internal/workspace/workspace.go +++ /dev/null @@ -1,10 +0,0 @@ -// Package workspace contains some utilities around managing workspaces -package workspace - -import "github.com/vercel/turbo/cli/internal/fs" - -// Catalog holds information about each workspace in the monorepo. -type Catalog struct { - PackageJSONs map[string]*fs.PackageJSON - TurboConfigs map[string]*fs.TurboJSON -} diff --git a/cli/internal/xxhash/xxhash.go b/cli/internal/xxhash/xxhash.go deleted file mode 100644 index 642ac738e3fc2..0000000000000 --- a/cli/internal/xxhash/xxhash.go +++ /dev/null @@ -1,202 +0,0 @@ -// Package xxhash implements the 64-bit variant of xxHash (XXH64) as described -// at http://cyan4973.github.io/xxHash/. - -// Adapted from https://cs.github.com/evanw/esbuild/blob/0c9ced59c8b3ea3bd8dd5feebafed1f47ed279dd/internal/xxhash -// Copyright (c) 2016 Caleb Spare. All rights reserved. -// SPDX-License-Identifier: MIT -package xxhash - -import ( - "encoding/binary" - "math/bits" -) - -const ( - prime1 uint64 = 11400714785074694791 - prime2 uint64 = 14029467366897019727 - prime3 uint64 = 1609587929392839161 - prime4 uint64 = 9650029242287828579 - prime5 uint64 = 2870177450012600261 -) - -// NOTE(caleb): I'm using both consts and vars of the primes. Using consts where -// possible in the Go code is worth a small (but measurable) performance boost -// by avoiding some MOVQs. Vars are needed for the asm and also are useful for -// convenience in the Go code in a few places where we need to intentionally -// avoid constant arithmetic (e.g., v1 := prime1 + prime2 fails because the -// result overflows a uint64). -var prime1v = prime1 - -// Digest implements hash.Hash64. -type Digest struct { - v1 uint64 - v2 uint64 - v3 uint64 - v4 uint64 - total uint64 - mem [32]byte - n int // how much of mem is used -} - -// New creates a new Digest that computes the 64-bit xxHash algorithm. -func New() *Digest { - var d Digest - d.Reset() - return &d -} - -// Reset clears the Digest's state so that it can be reused. -func (d *Digest) Reset() { - d.v1 = prime1v + prime2 - d.v2 = prime2 - d.v3 = 0 - d.v4 = -prime1v - d.total = 0 - d.n = 0 -} - -// Size always returns 8 bytes. -func (d *Digest) Size() int { return 8 } - -// BlockSize always returns 32 bytes. -func (d *Digest) BlockSize() int { return 32 } - -// Write adds more data to d. It always returns len(b), nil. -func (d *Digest) Write(b []byte) (n int, err error) { - n = len(b) - d.total += uint64(n) - - if d.n+n < 32 { - // This new data doesn't even fill the current block. - copy(d.mem[d.n:], b) - d.n += n - return - } - - if d.n > 0 { - // Finish off the partial block. - copy(d.mem[d.n:], b) - d.v1 = round(d.v1, u64(d.mem[0:8])) - d.v2 = round(d.v2, u64(d.mem[8:16])) - d.v3 = round(d.v3, u64(d.mem[16:24])) - d.v4 = round(d.v4, u64(d.mem[24:32])) - b = b[32-d.n:] - d.n = 0 - } - - if len(b) >= 32 { - // One or more full blocks left. - nw := writeBlocks(d, b) - b = b[nw:] - } - - // Store any remaining partial block. - copy(d.mem[:], b) - d.n = len(b) - - return -} - -// Sum appends the current hash to b and returns the resulting slice. -func (d *Digest) Sum(b []byte) []byte { - s := d.Sum64() - return append( - b, - byte(s>>56), - byte(s>>48), - byte(s>>40), - byte(s>>32), - byte(s>>24), - byte(s>>16), - byte(s>>8), - byte(s), - ) -} - -// Sum64 returns the current hash. -func (d *Digest) Sum64() uint64 { - var h uint64 - - if d.total >= 32 { - v1, v2, v3, v4 := d.v1, d.v2, d.v3, d.v4 - h = rol1(v1) + rol7(v2) + rol12(v3) + rol18(v4) - h = mergeRound(h, v1) - h = mergeRound(h, v2) - h = mergeRound(h, v3) - h = mergeRound(h, v4) - } else { - h = d.v3 + prime5 - } - - h += d.total - - i, end := 0, d.n - for ; i+8 <= end; i += 8 { - k1 := round(0, u64(d.mem[i:i+8])) - h ^= k1 - h = rol27(h)*prime1 + prime4 - } - if i+4 <= end { - h ^= uint64(u32(d.mem[i:i+4])) * prime1 - h = rol23(h)*prime2 + prime3 - i += 4 - } - for i < end { - h ^= uint64(d.mem[i]) * prime5 - h = rol11(h) * prime1 - i++ - } - - h ^= h >> 33 - h *= prime2 - h ^= h >> 29 - h *= prime3 - h ^= h >> 32 - - return h -} - -const ( - magic = "xxh\x06" - marshaledSize = len(magic) + 8*5 + 32 -) - -func u64(b []byte) uint64 { return binary.LittleEndian.Uint64(b) } -func u32(b []byte) uint32 { return binary.LittleEndian.Uint32(b) } - -func round(acc, input uint64) uint64 { - acc += input * prime2 - acc = rol31(acc) - acc *= prime1 - return acc -} - -func mergeRound(acc, val uint64) uint64 { - val = round(0, val) - acc ^= val - acc = acc*prime1 + prime4 - return acc -} - -func rol1(x uint64) uint64 { return bits.RotateLeft64(x, 1) } -func rol7(x uint64) uint64 { return bits.RotateLeft64(x, 7) } -func rol11(x uint64) uint64 { return bits.RotateLeft64(x, 11) } -func rol12(x uint64) uint64 { return bits.RotateLeft64(x, 12) } -func rol18(x uint64) uint64 { return bits.RotateLeft64(x, 18) } -func rol23(x uint64) uint64 { return bits.RotateLeft64(x, 23) } -func rol27(x uint64) uint64 { return bits.RotateLeft64(x, 27) } -func rol31(x uint64) uint64 { return bits.RotateLeft64(x, 31) } - -func writeBlocks(d *Digest, b []byte) int { - v1, v2, v3, v4 := d.v1, d.v2, d.v3, d.v4 - n := len(b) - for len(b) >= 32 { - v1 = round(v1, u64(b[0:8:len(b)])) - v2 = round(v2, u64(b[8:16:len(b)])) - v3 = round(v3, u64(b[16:24:len(b)])) - v4 = round(v4, u64(b[24:32:len(b)])) - b = b[32:len(b):len(b)] - } - d.v1, d.v2, d.v3, d.v4 = v1, v2, v3, v4 - return n - len(b) -} diff --git a/cli/internal/yaml/apic.go b/cli/internal/yaml/apic.go deleted file mode 100644 index 05fd305da1658..0000000000000 --- a/cli/internal/yaml/apic.go +++ /dev/null @@ -1,747 +0,0 @@ -// -// Copyright (c) 2011-2019 Canonical Ltd -// Copyright (c) 2006-2010 Kirill Simonov -// -// Permission is hereby granted, free of charge, to any person obtaining a copy of -// this software and associated documentation files (the "Software"), to deal in -// the Software without restriction, including without limitation the rights to -// use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -// of the Software, and to permit persons to whom the Software is furnished to do -// so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in all -// copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -// SOFTWARE. - -package yaml - -import ( - "io" -) - -func yaml_insert_token(parser *yaml_parser_t, pos int, token *yaml_token_t) { - //fmt.Println("yaml_insert_token", "pos:", pos, "typ:", token.typ, "head:", parser.tokens_head, "len:", len(parser.tokens)) - - // Check if we can move the queue at the beginning of the buffer. - if parser.tokens_head > 0 && len(parser.tokens) == cap(parser.tokens) { - if parser.tokens_head != len(parser.tokens) { - copy(parser.tokens, parser.tokens[parser.tokens_head:]) - } - parser.tokens = parser.tokens[:len(parser.tokens)-parser.tokens_head] - parser.tokens_head = 0 - } - parser.tokens = append(parser.tokens, *token) - if pos < 0 { - return - } - copy(parser.tokens[parser.tokens_head+pos+1:], parser.tokens[parser.tokens_head+pos:]) - parser.tokens[parser.tokens_head+pos] = *token -} - -// Create a new parser object. -func yaml_parser_initialize(parser *yaml_parser_t) bool { - *parser = yaml_parser_t{ - raw_buffer: make([]byte, 0, input_raw_buffer_size), - buffer: make([]byte, 0, input_buffer_size), - } - return true -} - -// Destroy a parser object. -func yaml_parser_delete(parser *yaml_parser_t) { - *parser = yaml_parser_t{} -} - -// String read handler. -func yaml_string_read_handler(parser *yaml_parser_t, buffer []byte) (n int, err error) { - if parser.input_pos == len(parser.input) { - return 0, io.EOF - } - n = copy(buffer, parser.input[parser.input_pos:]) - parser.input_pos += n - return n, nil -} - -// Reader read handler. -func yaml_reader_read_handler(parser *yaml_parser_t, buffer []byte) (n int, err error) { - return parser.input_reader.Read(buffer) -} - -// Set a string input. -func yaml_parser_set_input_string(parser *yaml_parser_t, input []byte) { - if parser.read_handler != nil { - panic("must set the input source only once") - } - parser.read_handler = yaml_string_read_handler - parser.input = input - parser.input_pos = 0 -} - -// Set a file input. -func yaml_parser_set_input_reader(parser *yaml_parser_t, r io.Reader) { - if parser.read_handler != nil { - panic("must set the input source only once") - } - parser.read_handler = yaml_reader_read_handler - parser.input_reader = r -} - -// Set the source encoding. -func yaml_parser_set_encoding(parser *yaml_parser_t, encoding yaml_encoding_t) { - if parser.encoding != yaml_ANY_ENCODING { - panic("must set the encoding only once") - } - parser.encoding = encoding -} - -// Create a new emitter object. -func yaml_emitter_initialize(emitter *yaml_emitter_t) { - *emitter = yaml_emitter_t{ - buffer: make([]byte, output_buffer_size), - raw_buffer: make([]byte, 0, output_raw_buffer_size), - states: make([]yaml_emitter_state_t, 0, initial_stack_size), - events: make([]yaml_event_t, 0, initial_queue_size), - best_width: -1, - } -} - -// Destroy an emitter object. -func yaml_emitter_delete(emitter *yaml_emitter_t) { - *emitter = yaml_emitter_t{} -} - -// String write handler. -func yaml_string_write_handler(emitter *yaml_emitter_t, buffer []byte) error { - *emitter.output_buffer = append(*emitter.output_buffer, buffer...) - return nil -} - -// yaml_writer_write_handler uses emitter.output_writer to write the -// emitted text. -func yaml_writer_write_handler(emitter *yaml_emitter_t, buffer []byte) error { - _, err := emitter.output_writer.Write(buffer) - return err -} - -// Set a string output. -func yaml_emitter_set_output_string(emitter *yaml_emitter_t, output_buffer *[]byte) { - if emitter.write_handler != nil { - panic("must set the output target only once") - } - emitter.write_handler = yaml_string_write_handler - emitter.output_buffer = output_buffer -} - -// Set a file output. -func yaml_emitter_set_output_writer(emitter *yaml_emitter_t, w io.Writer) { - if emitter.write_handler != nil { - panic("must set the output target only once") - } - emitter.write_handler = yaml_writer_write_handler - emitter.output_writer = w -} - -// Set the output encoding. -func yaml_emitter_set_encoding(emitter *yaml_emitter_t, encoding yaml_encoding_t) { - if emitter.encoding != yaml_ANY_ENCODING { - panic("must set the output encoding only once") - } - emitter.encoding = encoding -} - -// Set the canonical output style. -func yaml_emitter_set_canonical(emitter *yaml_emitter_t, canonical bool) { - emitter.canonical = canonical -} - -// Set the indentation increment. -func yaml_emitter_set_indent(emitter *yaml_emitter_t, indent int) { - if indent < 2 || indent > 9 { - indent = 2 - } - emitter.best_indent = indent -} - -// Set the preferred line width. -func yaml_emitter_set_width(emitter *yaml_emitter_t, width int) { - if width < 0 { - width = -1 - } - emitter.best_width = width -} - -// Set if unescaped non-ASCII characters are allowed. -func yaml_emitter_set_unicode(emitter *yaml_emitter_t, unicode bool) { - emitter.unicode = unicode -} - -// Set the preferred line break character. -func yaml_emitter_set_break(emitter *yaml_emitter_t, line_break yaml_break_t) { - emitter.line_break = line_break -} - -///* -// * Destroy a token object. -// */ -// -//YAML_DECLARE(void) -//yaml_token_delete(yaml_token_t *token) -//{ -// assert(token); // Non-NULL token object expected. -// -// switch (token.type) -// { -// case YAML_TAG_DIRECTIVE_TOKEN: -// yaml_free(token.data.tag_directive.handle); -// yaml_free(token.data.tag_directive.prefix); -// break; -// -// case YAML_ALIAS_TOKEN: -// yaml_free(token.data.alias.value); -// break; -// -// case YAML_ANCHOR_TOKEN: -// yaml_free(token.data.anchor.value); -// break; -// -// case YAML_TAG_TOKEN: -// yaml_free(token.data.tag.handle); -// yaml_free(token.data.tag.suffix); -// break; -// -// case YAML_SCALAR_TOKEN: -// yaml_free(token.data.scalar.value); -// break; -// -// default: -// break; -// } -// -// memset(token, 0, sizeof(yaml_token_t)); -//} -// -///* -// * Check if a string is a valid UTF-8 sequence. -// * -// * Check 'reader.c' for more details on UTF-8 encoding. -// */ -// -//static int -//yaml_check_utf8(yaml_char_t *start, size_t length) -//{ -// yaml_char_t *end = start+length; -// yaml_char_t *pointer = start; -// -// while (pointer < end) { -// unsigned char octet; -// unsigned int width; -// unsigned int value; -// size_t k; -// -// octet = pointer[0]; -// width = (octet & 0x80) == 0x00 ? 1 : -// (octet & 0xE0) == 0xC0 ? 2 : -// (octet & 0xF0) == 0xE0 ? 3 : -// (octet & 0xF8) == 0xF0 ? 4 : 0; -// value = (octet & 0x80) == 0x00 ? octet & 0x7F : -// (octet & 0xE0) == 0xC0 ? octet & 0x1F : -// (octet & 0xF0) == 0xE0 ? octet & 0x0F : -// (octet & 0xF8) == 0xF0 ? octet & 0x07 : 0; -// if (!width) return 0; -// if (pointer+width > end) return 0; -// for (k = 1; k < width; k ++) { -// octet = pointer[k]; -// if ((octet & 0xC0) != 0x80) return 0; -// value = (value << 6) + (octet & 0x3F); -// } -// if (!((width == 1) || -// (width == 2 && value >= 0x80) || -// (width == 3 && value >= 0x800) || -// (width == 4 && value >= 0x10000))) return 0; -// -// pointer += width; -// } -// -// return 1; -//} -// - -// Create STREAM-START. -func yaml_stream_start_event_initialize(event *yaml_event_t, encoding yaml_encoding_t) { - *event = yaml_event_t{ - typ: yaml_STREAM_START_EVENT, - encoding: encoding, - } -} - -// Create STREAM-END. -func yaml_stream_end_event_initialize(event *yaml_event_t) { - *event = yaml_event_t{ - typ: yaml_STREAM_END_EVENT, - } -} - -// Create DOCUMENT-START. -func yaml_document_start_event_initialize( - event *yaml_event_t, - version_directive *yaml_version_directive_t, - tag_directives []yaml_tag_directive_t, - implicit bool, -) { - *event = yaml_event_t{ - typ: yaml_DOCUMENT_START_EVENT, - version_directive: version_directive, - tag_directives: tag_directives, - implicit: implicit, - } -} - -// Create DOCUMENT-END. -func yaml_document_end_event_initialize(event *yaml_event_t, implicit bool) { - *event = yaml_event_t{ - typ: yaml_DOCUMENT_END_EVENT, - implicit: implicit, - } -} - -// Create ALIAS. -func yaml_alias_event_initialize(event *yaml_event_t, anchor []byte) bool { - *event = yaml_event_t{ - typ: yaml_ALIAS_EVENT, - anchor: anchor, - } - return true -} - -// Create SCALAR. -func yaml_scalar_event_initialize(event *yaml_event_t, anchor, tag, value []byte, plain_implicit, quoted_implicit bool, style yaml_scalar_style_t) bool { - *event = yaml_event_t{ - typ: yaml_SCALAR_EVENT, - anchor: anchor, - tag: tag, - value: value, - implicit: plain_implicit, - quoted_implicit: quoted_implicit, - style: yaml_style_t(style), - } - return true -} - -// Create SEQUENCE-START. -func yaml_sequence_start_event_initialize(event *yaml_event_t, anchor, tag []byte, implicit bool, style yaml_sequence_style_t) bool { - *event = yaml_event_t{ - typ: yaml_SEQUENCE_START_EVENT, - anchor: anchor, - tag: tag, - implicit: implicit, - style: yaml_style_t(style), - } - return true -} - -// Create SEQUENCE-END. -func yaml_sequence_end_event_initialize(event *yaml_event_t) bool { - *event = yaml_event_t{ - typ: yaml_SEQUENCE_END_EVENT, - } - return true -} - -// Create MAPPING-START. -func yaml_mapping_start_event_initialize(event *yaml_event_t, anchor, tag []byte, implicit bool, style yaml_mapping_style_t) { - *event = yaml_event_t{ - typ: yaml_MAPPING_START_EVENT, - anchor: anchor, - tag: tag, - implicit: implicit, - style: yaml_style_t(style), - } -} - -// Create MAPPING-END. -func yaml_mapping_end_event_initialize(event *yaml_event_t) { - *event = yaml_event_t{ - typ: yaml_MAPPING_END_EVENT, - } -} - -// Destroy an event object. -func yaml_event_delete(event *yaml_event_t) { - *event = yaml_event_t{} -} - -///* -// * Create a document object. -// */ -// -//YAML_DECLARE(int) -//yaml_document_initialize(document *yaml_document_t, -// version_directive *yaml_version_directive_t, -// tag_directives_start *yaml_tag_directive_t, -// tag_directives_end *yaml_tag_directive_t, -// start_implicit int, end_implicit int) -//{ -// struct { -// error yaml_error_type_t -// } context -// struct { -// start *yaml_node_t -// end *yaml_node_t -// top *yaml_node_t -// } nodes = { NULL, NULL, NULL } -// version_directive_copy *yaml_version_directive_t = NULL -// struct { -// start *yaml_tag_directive_t -// end *yaml_tag_directive_t -// top *yaml_tag_directive_t -// } tag_directives_copy = { NULL, NULL, NULL } -// value yaml_tag_directive_t = { NULL, NULL } -// mark yaml_mark_t = { 0, 0, 0 } -// -// assert(document) // Non-NULL document object is expected. -// assert((tag_directives_start && tag_directives_end) || -// (tag_directives_start == tag_directives_end)) -// // Valid tag directives are expected. -// -// if (!STACK_INIT(&context, nodes, INITIAL_STACK_SIZE)) goto error -// -// if (version_directive) { -// version_directive_copy = yaml_malloc(sizeof(yaml_version_directive_t)) -// if (!version_directive_copy) goto error -// version_directive_copy.major = version_directive.major -// version_directive_copy.minor = version_directive.minor -// } -// -// if (tag_directives_start != tag_directives_end) { -// tag_directive *yaml_tag_directive_t -// if (!STACK_INIT(&context, tag_directives_copy, INITIAL_STACK_SIZE)) -// goto error -// for (tag_directive = tag_directives_start -// tag_directive != tag_directives_end; tag_directive ++) { -// assert(tag_directive.handle) -// assert(tag_directive.prefix) -// if (!yaml_check_utf8(tag_directive.handle, -// strlen((char *)tag_directive.handle))) -// goto error -// if (!yaml_check_utf8(tag_directive.prefix, -// strlen((char *)tag_directive.prefix))) -// goto error -// value.handle = yaml_strdup(tag_directive.handle) -// value.prefix = yaml_strdup(tag_directive.prefix) -// if (!value.handle || !value.prefix) goto error -// if (!PUSH(&context, tag_directives_copy, value)) -// goto error -// value.handle = NULL -// value.prefix = NULL -// } -// } -// -// DOCUMENT_INIT(*document, nodes.start, nodes.end, version_directive_copy, -// tag_directives_copy.start, tag_directives_copy.top, -// start_implicit, end_implicit, mark, mark) -// -// return 1 -// -//error: -// STACK_DEL(&context, nodes) -// yaml_free(version_directive_copy) -// while (!STACK_EMPTY(&context, tag_directives_copy)) { -// value yaml_tag_directive_t = POP(&context, tag_directives_copy) -// yaml_free(value.handle) -// yaml_free(value.prefix) -// } -// STACK_DEL(&context, tag_directives_copy) -// yaml_free(value.handle) -// yaml_free(value.prefix) -// -// return 0 -//} -// -///* -// * Destroy a document object. -// */ -// -//YAML_DECLARE(void) -//yaml_document_delete(document *yaml_document_t) -//{ -// struct { -// error yaml_error_type_t -// } context -// tag_directive *yaml_tag_directive_t -// -// context.error = YAML_NO_ERROR // Eliminate a compiler warning. -// -// assert(document) // Non-NULL document object is expected. -// -// while (!STACK_EMPTY(&context, document.nodes)) { -// node yaml_node_t = POP(&context, document.nodes) -// yaml_free(node.tag) -// switch (node.type) { -// case YAML_SCALAR_NODE: -// yaml_free(node.data.scalar.value) -// break -// case YAML_SEQUENCE_NODE: -// STACK_DEL(&context, node.data.sequence.items) -// break -// case YAML_MAPPING_NODE: -// STACK_DEL(&context, node.data.mapping.pairs) -// break -// default: -// assert(0) // Should not happen. -// } -// } -// STACK_DEL(&context, document.nodes) -// -// yaml_free(document.version_directive) -// for (tag_directive = document.tag_directives.start -// tag_directive != document.tag_directives.end -// tag_directive++) { -// yaml_free(tag_directive.handle) -// yaml_free(tag_directive.prefix) -// } -// yaml_free(document.tag_directives.start) -// -// memset(document, 0, sizeof(yaml_document_t)) -//} -// -///** -// * Get a document node. -// */ -// -//YAML_DECLARE(yaml_node_t *) -//yaml_document_get_node(document *yaml_document_t, index int) -//{ -// assert(document) // Non-NULL document object is expected. -// -// if (index > 0 && document.nodes.start + index <= document.nodes.top) { -// return document.nodes.start + index - 1 -// } -// return NULL -//} -// -///** -// * Get the root object. -// */ -// -//YAML_DECLARE(yaml_node_t *) -//yaml_document_get_root_node(document *yaml_document_t) -//{ -// assert(document) // Non-NULL document object is expected. -// -// if (document.nodes.top != document.nodes.start) { -// return document.nodes.start -// } -// return NULL -//} -// -///* -// * Add a scalar node to a document. -// */ -// -//YAML_DECLARE(int) -//yaml_document_add_scalar(document *yaml_document_t, -// tag *yaml_char_t, value *yaml_char_t, length int, -// style yaml_scalar_style_t) -//{ -// struct { -// error yaml_error_type_t -// } context -// mark yaml_mark_t = { 0, 0, 0 } -// tag_copy *yaml_char_t = NULL -// value_copy *yaml_char_t = NULL -// node yaml_node_t -// -// assert(document) // Non-NULL document object is expected. -// assert(value) // Non-NULL value is expected. -// -// if (!tag) { -// tag = (yaml_char_t *)YAML_DEFAULT_SCALAR_TAG -// } -// -// if (!yaml_check_utf8(tag, strlen((char *)tag))) goto error -// tag_copy = yaml_strdup(tag) -// if (!tag_copy) goto error -// -// if (length < 0) { -// length = strlen((char *)value) -// } -// -// if (!yaml_check_utf8(value, length)) goto error -// value_copy = yaml_malloc(length+1) -// if (!value_copy) goto error -// memcpy(value_copy, value, length) -// value_copy[length] = '\0' -// -// SCALAR_NODE_INIT(node, tag_copy, value_copy, length, style, mark, mark) -// if (!PUSH(&context, document.nodes, node)) goto error -// -// return document.nodes.top - document.nodes.start -// -//error: -// yaml_free(tag_copy) -// yaml_free(value_copy) -// -// return 0 -//} -// -///* -// * Add a sequence node to a document. -// */ -// -//YAML_DECLARE(int) -//yaml_document_add_sequence(document *yaml_document_t, -// tag *yaml_char_t, style yaml_sequence_style_t) -//{ -// struct { -// error yaml_error_type_t -// } context -// mark yaml_mark_t = { 0, 0, 0 } -// tag_copy *yaml_char_t = NULL -// struct { -// start *yaml_node_item_t -// end *yaml_node_item_t -// top *yaml_node_item_t -// } items = { NULL, NULL, NULL } -// node yaml_node_t -// -// assert(document) // Non-NULL document object is expected. -// -// if (!tag) { -// tag = (yaml_char_t *)YAML_DEFAULT_SEQUENCE_TAG -// } -// -// if (!yaml_check_utf8(tag, strlen((char *)tag))) goto error -// tag_copy = yaml_strdup(tag) -// if (!tag_copy) goto error -// -// if (!STACK_INIT(&context, items, INITIAL_STACK_SIZE)) goto error -// -// SEQUENCE_NODE_INIT(node, tag_copy, items.start, items.end, -// style, mark, mark) -// if (!PUSH(&context, document.nodes, node)) goto error -// -// return document.nodes.top - document.nodes.start -// -//error: -// STACK_DEL(&context, items) -// yaml_free(tag_copy) -// -// return 0 -//} -// -///* -// * Add a mapping node to a document. -// */ -// -//YAML_DECLARE(int) -//yaml_document_add_mapping(document *yaml_document_t, -// tag *yaml_char_t, style yaml_mapping_style_t) -//{ -// struct { -// error yaml_error_type_t -// } context -// mark yaml_mark_t = { 0, 0, 0 } -// tag_copy *yaml_char_t = NULL -// struct { -// start *yaml_node_pair_t -// end *yaml_node_pair_t -// top *yaml_node_pair_t -// } pairs = { NULL, NULL, NULL } -// node yaml_node_t -// -// assert(document) // Non-NULL document object is expected. -// -// if (!tag) { -// tag = (yaml_char_t *)YAML_DEFAULT_MAPPING_TAG -// } -// -// if (!yaml_check_utf8(tag, strlen((char *)tag))) goto error -// tag_copy = yaml_strdup(tag) -// if (!tag_copy) goto error -// -// if (!STACK_INIT(&context, pairs, INITIAL_STACK_SIZE)) goto error -// -// MAPPING_NODE_INIT(node, tag_copy, pairs.start, pairs.end, -// style, mark, mark) -// if (!PUSH(&context, document.nodes, node)) goto error -// -// return document.nodes.top - document.nodes.start -// -//error: -// STACK_DEL(&context, pairs) -// yaml_free(tag_copy) -// -// return 0 -//} -// -///* -// * Append an item to a sequence node. -// */ -// -//YAML_DECLARE(int) -//yaml_document_append_sequence_item(document *yaml_document_t, -// sequence int, item int) -//{ -// struct { -// error yaml_error_type_t -// } context -// -// assert(document) // Non-NULL document is required. -// assert(sequence > 0 -// && document.nodes.start + sequence <= document.nodes.top) -// // Valid sequence id is required. -// assert(document.nodes.start[sequence-1].type == YAML_SEQUENCE_NODE) -// // A sequence node is required. -// assert(item > 0 && document.nodes.start + item <= document.nodes.top) -// // Valid item id is required. -// -// if (!PUSH(&context, -// document.nodes.start[sequence-1].data.sequence.items, item)) -// return 0 -// -// return 1 -//} -// -///* -// * Append a pair of a key and a value to a mapping node. -// */ -// -//YAML_DECLARE(int) -//yaml_document_append_mapping_pair(document *yaml_document_t, -// mapping int, key int, value int) -//{ -// struct { -// error yaml_error_type_t -// } context -// -// pair yaml_node_pair_t -// -// assert(document) // Non-NULL document is required. -// assert(mapping > 0 -// && document.nodes.start + mapping <= document.nodes.top) -// // Valid mapping id is required. -// assert(document.nodes.start[mapping-1].type == YAML_MAPPING_NODE) -// // A mapping node is required. -// assert(key > 0 && document.nodes.start + key <= document.nodes.top) -// // Valid key id is required. -// assert(value > 0 && document.nodes.start + value <= document.nodes.top) -// // Valid value id is required. -// -// pair.key = key -// pair.value = value -// -// if (!PUSH(&context, -// document.nodes.start[mapping-1].data.mapping.pairs, pair)) -// return 0 -// -// return 1 -//} -// -// diff --git a/cli/internal/yaml/decode.go b/cli/internal/yaml/decode.go deleted file mode 100644 index 0173b6982e843..0000000000000 --- a/cli/internal/yaml/decode.go +++ /dev/null @@ -1,1000 +0,0 @@ -// -// Copyright (c) 2011-2019 Canonical Ltd -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package yaml - -import ( - "encoding" - "encoding/base64" - "fmt" - "io" - "math" - "reflect" - "strconv" - "time" -) - -// ---------------------------------------------------------------------------- -// Parser, produces a node tree out of a libyaml event stream. - -type parser struct { - parser yaml_parser_t - event yaml_event_t - doc *Node - anchors map[string]*Node - doneInit bool - textless bool -} - -func newParser(b []byte) *parser { - p := parser{} - if !yaml_parser_initialize(&p.parser) { - panic("failed to initialize YAML emitter") - } - if len(b) == 0 { - b = []byte{'\n'} - } - yaml_parser_set_input_string(&p.parser, b) - return &p -} - -func newParserFromReader(r io.Reader) *parser { - p := parser{} - if !yaml_parser_initialize(&p.parser) { - panic("failed to initialize YAML emitter") - } - yaml_parser_set_input_reader(&p.parser, r) - return &p -} - -func (p *parser) init() { - if p.doneInit { - return - } - p.anchors = make(map[string]*Node) - p.expect(yaml_STREAM_START_EVENT) - p.doneInit = true -} - -func (p *parser) destroy() { - if p.event.typ != yaml_NO_EVENT { - yaml_event_delete(&p.event) - } - yaml_parser_delete(&p.parser) -} - -// expect consumes an event from the event stream and -// checks that it's of the expected type. -func (p *parser) expect(e yaml_event_type_t) { - if p.event.typ == yaml_NO_EVENT { - if !yaml_parser_parse(&p.parser, &p.event) { - p.fail() - } - } - if p.event.typ == yaml_STREAM_END_EVENT { - failf("attempted to go past the end of stream; corrupted value?") - } - if p.event.typ != e { - p.parser.problem = fmt.Sprintf("expected %s event but got %s", e, p.event.typ) - p.fail() - } - yaml_event_delete(&p.event) - p.event.typ = yaml_NO_EVENT -} - -// peek peeks at the next event in the event stream, -// puts the results into p.event and returns the event type. -func (p *parser) peek() yaml_event_type_t { - if p.event.typ != yaml_NO_EVENT { - return p.event.typ - } - // It's curious choice from the underlying API to generally return a - // positive result on success, but on this case return true in an error - // scenario. This was the source of bugs in the past (issue #666). - if !yaml_parser_parse(&p.parser, &p.event) || p.parser.error != yaml_NO_ERROR { - p.fail() - } - return p.event.typ -} - -func (p *parser) fail() { - var where string - var line int - if p.parser.context_mark.line != 0 { - line = p.parser.context_mark.line - // Scanner errors don't iterate line before returning error - if p.parser.error == yaml_SCANNER_ERROR { - line++ - } - } else if p.parser.problem_mark.line != 0 { - line = p.parser.problem_mark.line - // Scanner errors don't iterate line before returning error - if p.parser.error == yaml_SCANNER_ERROR { - line++ - } - } - if line != 0 { - where = "line " + strconv.Itoa(line) + ": " - } - var msg string - if len(p.parser.problem) > 0 { - msg = p.parser.problem - } else { - msg = "unknown problem parsing YAML content" - } - failf("%s%s", where, msg) -} - -func (p *parser) anchor(n *Node, anchor []byte) { - if anchor != nil { - n.Anchor = string(anchor) - p.anchors[n.Anchor] = n - } -} - -func (p *parser) parse() *Node { - p.init() - switch p.peek() { - case yaml_SCALAR_EVENT: - return p.scalar() - case yaml_ALIAS_EVENT: - return p.alias() - case yaml_MAPPING_START_EVENT: - return p.mapping() - case yaml_SEQUENCE_START_EVENT: - return p.sequence() - case yaml_DOCUMENT_START_EVENT: - return p.document() - case yaml_STREAM_END_EVENT: - // Happens when attempting to decode an empty buffer. - return nil - case yaml_TAIL_COMMENT_EVENT: - panic("internal error: unexpected tail comment event (please report)") - default: - panic("internal error: attempted to parse unknown event (please report): " + p.event.typ.String()) - } -} - -func (p *parser) node(kind Kind, defaultTag, tag, value string) *Node { - var style Style - if tag != "" && tag != "!" { - tag = shortTag(tag) - style = TaggedStyle - } else if defaultTag != "" { - tag = defaultTag - } else if kind == ScalarNode { - tag, _ = resolve("", value) - } - n := &Node{ - Kind: kind, - Tag: tag, - Value: value, - Style: style, - } - if !p.textless { - n.Line = p.event.start_mark.line + 1 - n.Column = p.event.start_mark.column + 1 - n.HeadComment = string(p.event.head_comment) - n.LineComment = string(p.event.line_comment) - n.FootComment = string(p.event.foot_comment) - } - return n -} - -func (p *parser) parseChild(parent *Node) *Node { - child := p.parse() - parent.Content = append(parent.Content, child) - return child -} - -func (p *parser) document() *Node { - n := p.node(DocumentNode, "", "", "") - p.doc = n - p.expect(yaml_DOCUMENT_START_EVENT) - p.parseChild(n) - if p.peek() == yaml_DOCUMENT_END_EVENT { - n.FootComment = string(p.event.foot_comment) - } - p.expect(yaml_DOCUMENT_END_EVENT) - return n -} - -func (p *parser) alias() *Node { - n := p.node(AliasNode, "", "", string(p.event.anchor)) - n.Alias = p.anchors[n.Value] - if n.Alias == nil { - failf("unknown anchor '%s' referenced", n.Value) - } - p.expect(yaml_ALIAS_EVENT) - return n -} - -func (p *parser) scalar() *Node { - var parsedStyle = p.event.scalar_style() - var nodeStyle Style - switch { - case parsedStyle&yaml_DOUBLE_QUOTED_SCALAR_STYLE != 0: - nodeStyle = DoubleQuotedStyle - case parsedStyle&yaml_SINGLE_QUOTED_SCALAR_STYLE != 0: - nodeStyle = SingleQuotedStyle - case parsedStyle&yaml_LITERAL_SCALAR_STYLE != 0: - nodeStyle = LiteralStyle - case parsedStyle&yaml_FOLDED_SCALAR_STYLE != 0: - nodeStyle = FoldedStyle - } - var nodeValue = string(p.event.value) - var nodeTag = string(p.event.tag) - var defaultTag string - if nodeStyle == 0 { - if nodeValue == "<<" { - defaultTag = mergeTag - } - } else { - defaultTag = strTag - } - n := p.node(ScalarNode, defaultTag, nodeTag, nodeValue) - n.Style |= nodeStyle - p.anchor(n, p.event.anchor) - p.expect(yaml_SCALAR_EVENT) - return n -} - -func (p *parser) sequence() *Node { - n := p.node(SequenceNode, seqTag, string(p.event.tag), "") - if p.event.sequence_style()&yaml_FLOW_SEQUENCE_STYLE != 0 { - n.Style |= FlowStyle - } - p.anchor(n, p.event.anchor) - p.expect(yaml_SEQUENCE_START_EVENT) - for p.peek() != yaml_SEQUENCE_END_EVENT { - p.parseChild(n) - } - n.LineComment = string(p.event.line_comment) - n.FootComment = string(p.event.foot_comment) - p.expect(yaml_SEQUENCE_END_EVENT) - return n -} - -func (p *parser) mapping() *Node { - n := p.node(MappingNode, mapTag, string(p.event.tag), "") - block := true - if p.event.mapping_style()&yaml_FLOW_MAPPING_STYLE != 0 { - block = false - n.Style |= FlowStyle - } - p.anchor(n, p.event.anchor) - p.expect(yaml_MAPPING_START_EVENT) - for p.peek() != yaml_MAPPING_END_EVENT { - k := p.parseChild(n) - if block && k.FootComment != "" { - // Must be a foot comment for the prior value when being dedented. - if len(n.Content) > 2 { - n.Content[len(n.Content)-3].FootComment = k.FootComment - k.FootComment = "" - } - } - v := p.parseChild(n) - if k.FootComment == "" && v.FootComment != "" { - k.FootComment = v.FootComment - v.FootComment = "" - } - if p.peek() == yaml_TAIL_COMMENT_EVENT { - if k.FootComment == "" { - k.FootComment = string(p.event.foot_comment) - } - p.expect(yaml_TAIL_COMMENT_EVENT) - } - } - n.LineComment = string(p.event.line_comment) - n.FootComment = string(p.event.foot_comment) - if n.Style&FlowStyle == 0 && n.FootComment != "" && len(n.Content) > 1 { - n.Content[len(n.Content)-2].FootComment = n.FootComment - n.FootComment = "" - } - p.expect(yaml_MAPPING_END_EVENT) - return n -} - -// ---------------------------------------------------------------------------- -// Decoder, unmarshals a node into a provided value. - -type decoder struct { - doc *Node - aliases map[*Node]bool - terrors []string - - stringMapType reflect.Type - generalMapType reflect.Type - - knownFields bool - uniqueKeys bool - decodeCount int - aliasCount int - aliasDepth int - - mergedFields map[interface{}]bool -} - -var ( - nodeType = reflect.TypeOf(Node{}) - durationType = reflect.TypeOf(time.Duration(0)) - stringMapType = reflect.TypeOf(map[string]interface{}{}) - generalMapType = reflect.TypeOf(map[interface{}]interface{}{}) - ifaceType = generalMapType.Elem() - timeType = reflect.TypeOf(time.Time{}) - ptrTimeType = reflect.TypeOf(&time.Time{}) -) - -func newDecoder() *decoder { - d := &decoder{ - stringMapType: stringMapType, - generalMapType: generalMapType, - uniqueKeys: true, - } - d.aliases = make(map[*Node]bool) - return d -} - -func (d *decoder) terror(n *Node, tag string, out reflect.Value) { - if n.Tag != "" { - tag = n.Tag - } - value := n.Value - if tag != seqTag && tag != mapTag { - if len(value) > 10 { - value = " `" + value[:7] + "...`" - } else { - value = " `" + value + "`" - } - } - d.terrors = append(d.terrors, fmt.Sprintf("line %d: cannot unmarshal %s%s into %s", n.Line, shortTag(tag), value, out.Type())) -} - -func (d *decoder) callUnmarshaler(n *Node, u Unmarshaler) (good bool) { - err := u.UnmarshalYAML(n) - if e, ok := err.(*TypeError); ok { - d.terrors = append(d.terrors, e.Errors...) - return false - } - if err != nil { - fail(err) - } - return true -} - -func (d *decoder) callObsoleteUnmarshaler(n *Node, u obsoleteUnmarshaler) (good bool) { - terrlen := len(d.terrors) - err := u.UnmarshalYAML(func(v interface{}) (err error) { - defer handleErr(&err) - d.unmarshal(n, reflect.ValueOf(v)) - if len(d.terrors) > terrlen { - issues := d.terrors[terrlen:] - d.terrors = d.terrors[:terrlen] - return &TypeError{issues} - } - return nil - }) - if e, ok := err.(*TypeError); ok { - d.terrors = append(d.terrors, e.Errors...) - return false - } - if err != nil { - fail(err) - } - return true -} - -// d.prepare initializes and dereferences pointers and calls UnmarshalYAML -// if a value is found to implement it. -// It returns the initialized and dereferenced out value, whether -// unmarshalling was already done by UnmarshalYAML, and if so whether -// its types unmarshalled appropriately. -// -// If n holds a null value, prepare returns before doing anything. -func (d *decoder) prepare(n *Node, out reflect.Value) (newout reflect.Value, unmarshaled, good bool) { - if n.ShortTag() == nullTag { - return out, false, false - } - again := true - for again { - again = false - if out.Kind() == reflect.Ptr { - if out.IsNil() { - out.Set(reflect.New(out.Type().Elem())) - } - out = out.Elem() - again = true - } - if out.CanAddr() { - outi := out.Addr().Interface() - if u, ok := outi.(Unmarshaler); ok { - good = d.callUnmarshaler(n, u) - return out, true, good - } - if u, ok := outi.(obsoleteUnmarshaler); ok { - good = d.callObsoleteUnmarshaler(n, u) - return out, true, good - } - } - } - return out, false, false -} - -func (d *decoder) fieldByIndex(n *Node, v reflect.Value, index []int) (field reflect.Value) { - if n.ShortTag() == nullTag { - return reflect.Value{} - } - for _, num := range index { - for { - if v.Kind() == reflect.Ptr { - if v.IsNil() { - v.Set(reflect.New(v.Type().Elem())) - } - v = v.Elem() - continue - } - break - } - v = v.Field(num) - } - return v -} - -const ( - // 400,000 decode operations is ~500kb of dense object declarations, or - // ~5kb of dense object declarations with 10000% alias expansion - alias_ratio_range_low = 400000 - - // 4,000,000 decode operations is ~5MB of dense object declarations, or - // ~4.5MB of dense object declarations with 10% alias expansion - alias_ratio_range_high = 4000000 - - // alias_ratio_range is the range over which we scale allowed alias ratios - alias_ratio_range = float64(alias_ratio_range_high - alias_ratio_range_low) -) - -func allowedAliasRatio(decodeCount int) float64 { - switch { - case decodeCount <= alias_ratio_range_low: - // allow 99% to come from alias expansion for small-to-medium documents - return 0.99 - case decodeCount >= alias_ratio_range_high: - // allow 10% to come from alias expansion for very large documents - return 0.10 - default: - // scale smoothly from 99% down to 10% over the range. - // this maps to 396,000 - 400,000 allowed alias-driven decodes over the range. - // 400,000 decode operations is ~100MB of allocations in worst-case scenarios (single-item maps). - return 0.99 - 0.89*(float64(decodeCount-alias_ratio_range_low)/alias_ratio_range) - } -} - -func (d *decoder) unmarshal(n *Node, out reflect.Value) (good bool) { - d.decodeCount++ - if d.aliasDepth > 0 { - d.aliasCount++ - } - if d.aliasCount > 100 && d.decodeCount > 1000 && float64(d.aliasCount)/float64(d.decodeCount) > allowedAliasRatio(d.decodeCount) { - failf("document contains excessive aliasing") - } - if out.Type() == nodeType { - out.Set(reflect.ValueOf(n).Elem()) - return true - } - switch n.Kind { - case DocumentNode: - return d.document(n, out) - case AliasNode: - return d.alias(n, out) - } - out, unmarshaled, good := d.prepare(n, out) - if unmarshaled { - return good - } - switch n.Kind { - case ScalarNode: - good = d.scalar(n, out) - case MappingNode: - good = d.mapping(n, out) - case SequenceNode: - good = d.sequence(n, out) - case 0: - if n.IsZero() { - return d.null(out) - } - fallthrough - default: - failf("cannot decode node with unknown kind %d", n.Kind) - } - return good -} - -func (d *decoder) document(n *Node, out reflect.Value) (good bool) { - if len(n.Content) == 1 { - d.doc = n - d.unmarshal(n.Content[0], out) - return true - } - return false -} - -func (d *decoder) alias(n *Node, out reflect.Value) (good bool) { - if d.aliases[n] { - // TODO this could actually be allowed in some circumstances. - failf("anchor '%s' value contains itself", n.Value) - } - d.aliases[n] = true - d.aliasDepth++ - good = d.unmarshal(n.Alias, out) - d.aliasDepth-- - delete(d.aliases, n) - return good -} - -var zeroValue reflect.Value - -func resetMap(out reflect.Value) { - for _, k := range out.MapKeys() { - out.SetMapIndex(k, zeroValue) - } -} - -func (d *decoder) null(out reflect.Value) bool { - if out.CanAddr() { - switch out.Kind() { - case reflect.Interface, reflect.Ptr, reflect.Map, reflect.Slice: - out.Set(reflect.Zero(out.Type())) - return true - } - } - return false -} - -func (d *decoder) scalar(n *Node, out reflect.Value) bool { - var tag string - var resolved interface{} - if n.indicatedString() { - tag = strTag - resolved = n.Value - } else { - tag, resolved = resolve(n.Tag, n.Value) - if tag == binaryTag { - data, err := base64.StdEncoding.DecodeString(resolved.(string)) - if err != nil { - failf("!!binary value contains invalid base64 data") - } - resolved = string(data) - } - } - if resolved == nil { - return d.null(out) - } - if resolvedv := reflect.ValueOf(resolved); out.Type() == resolvedv.Type() { - // We've resolved to exactly the type we want, so use that. - out.Set(resolvedv) - return true - } - // Perhaps we can use the value as a TextUnmarshaler to - // set its value. - if out.CanAddr() { - u, ok := out.Addr().Interface().(encoding.TextUnmarshaler) - if ok { - var text []byte - if tag == binaryTag { - text = []byte(resolved.(string)) - } else { - // We let any value be unmarshaled into TextUnmarshaler. - // That might be more lax than we'd like, but the - // TextUnmarshaler itself should bowl out any dubious values. - text = []byte(n.Value) - } - err := u.UnmarshalText(text) - if err != nil { - fail(err) - } - return true - } - } - switch out.Kind() { - case reflect.String: - if tag == binaryTag { - out.SetString(resolved.(string)) - return true - } - out.SetString(n.Value) - return true - case reflect.Interface: - out.Set(reflect.ValueOf(resolved)) - return true - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: - // This used to work in v2, but it's very unfriendly. - isDuration := out.Type() == durationType - - switch resolved := resolved.(type) { - case int: - if !isDuration && !out.OverflowInt(int64(resolved)) { - out.SetInt(int64(resolved)) - return true - } - case int64: - if !isDuration && !out.OverflowInt(resolved) { - out.SetInt(resolved) - return true - } - case uint64: - if !isDuration && resolved <= math.MaxInt64 && !out.OverflowInt(int64(resolved)) { - out.SetInt(int64(resolved)) - return true - } - case float64: - if !isDuration && resolved <= math.MaxInt64 && !out.OverflowInt(int64(resolved)) { - out.SetInt(int64(resolved)) - return true - } - case string: - if out.Type() == durationType { - d, err := time.ParseDuration(resolved) - if err == nil { - out.SetInt(int64(d)) - return true - } - } - } - case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: - switch resolved := resolved.(type) { - case int: - if resolved >= 0 && !out.OverflowUint(uint64(resolved)) { - out.SetUint(uint64(resolved)) - return true - } - case int64: - if resolved >= 0 && !out.OverflowUint(uint64(resolved)) { - out.SetUint(uint64(resolved)) - return true - } - case uint64: - if !out.OverflowUint(uint64(resolved)) { - out.SetUint(uint64(resolved)) - return true - } - case float64: - if resolved <= math.MaxUint64 && !out.OverflowUint(uint64(resolved)) { - out.SetUint(uint64(resolved)) - return true - } - } - case reflect.Bool: - switch resolved := resolved.(type) { - case bool: - out.SetBool(resolved) - return true - case string: - // This offers some compatibility with the 1.1 spec (https://yaml.org/type/bool.html). - // It only works if explicitly attempting to unmarshal into a typed bool value. - switch resolved { - case "y", "Y", "yes", "Yes", "YES", "on", "On", "ON": - out.SetBool(true) - return true - case "n", "N", "no", "No", "NO", "off", "Off", "OFF": - out.SetBool(false) - return true - } - } - case reflect.Float32, reflect.Float64: - switch resolved := resolved.(type) { - case int: - out.SetFloat(float64(resolved)) - return true - case int64: - out.SetFloat(float64(resolved)) - return true - case uint64: - out.SetFloat(float64(resolved)) - return true - case float64: - out.SetFloat(resolved) - return true - } - case reflect.Struct: - if resolvedv := reflect.ValueOf(resolved); out.Type() == resolvedv.Type() { - out.Set(resolvedv) - return true - } - case reflect.Ptr: - panic("yaml internal error: please report the issue") - } - d.terror(n, tag, out) - return false -} - -func settableValueOf(i interface{}) reflect.Value { - v := reflect.ValueOf(i) - sv := reflect.New(v.Type()).Elem() - sv.Set(v) - return sv -} - -func (d *decoder) sequence(n *Node, out reflect.Value) (good bool) { - l := len(n.Content) - - var iface reflect.Value - switch out.Kind() { - case reflect.Slice: - out.Set(reflect.MakeSlice(out.Type(), l, l)) - case reflect.Array: - if l != out.Len() { - failf("invalid array: want %d elements but got %d", out.Len(), l) - } - case reflect.Interface: - // No type hints. Will have to use a generic sequence. - iface = out - out = settableValueOf(make([]interface{}, l)) - default: - d.terror(n, seqTag, out) - return false - } - et := out.Type().Elem() - - j := 0 - for i := 0; i < l; i++ { - e := reflect.New(et).Elem() - if ok := d.unmarshal(n.Content[i], e); ok { - out.Index(j).Set(e) - j++ - } - } - if out.Kind() != reflect.Array { - out.Set(out.Slice(0, j)) - } - if iface.IsValid() { - iface.Set(out) - } - return true -} - -func (d *decoder) mapping(n *Node, out reflect.Value) (good bool) { - l := len(n.Content) - if d.uniqueKeys { - nerrs := len(d.terrors) - for i := 0; i < l; i += 2 { - ni := n.Content[i] - for j := i + 2; j < l; j += 2 { - nj := n.Content[j] - if ni.Kind == nj.Kind && ni.Value == nj.Value { - d.terrors = append(d.terrors, fmt.Sprintf("line %d: mapping key %#v already defined at line %d", nj.Line, nj.Value, ni.Line)) - } - } - } - if len(d.terrors) > nerrs { - return false - } - } - switch out.Kind() { - case reflect.Struct: - return d.mappingStruct(n, out) - case reflect.Map: - // okay - case reflect.Interface: - iface := out - if isStringMap(n) { - out = reflect.MakeMap(d.stringMapType) - } else { - out = reflect.MakeMap(d.generalMapType) - } - iface.Set(out) - default: - d.terror(n, mapTag, out) - return false - } - - outt := out.Type() - kt := outt.Key() - et := outt.Elem() - - stringMapType := d.stringMapType - generalMapType := d.generalMapType - if outt.Elem() == ifaceType { - if outt.Key().Kind() == reflect.String { - d.stringMapType = outt - } else if outt.Key() == ifaceType { - d.generalMapType = outt - } - } - - mergedFields := d.mergedFields - d.mergedFields = nil - - var mergeNode *Node - - mapIsNew := false - if out.IsNil() { - out.Set(reflect.MakeMap(outt)) - mapIsNew = true - } - for i := 0; i < l; i += 2 { - if isMerge(n.Content[i]) { - mergeNode = n.Content[i+1] - continue - } - k := reflect.New(kt).Elem() - if d.unmarshal(n.Content[i], k) { - if mergedFields != nil { - ki := k.Interface() - if mergedFields[ki] { - continue - } - mergedFields[ki] = true - } - kkind := k.Kind() - if kkind == reflect.Interface { - kkind = k.Elem().Kind() - } - if kkind == reflect.Map || kkind == reflect.Slice { - failf("invalid map key: %#v", k.Interface()) - } - e := reflect.New(et).Elem() - if d.unmarshal(n.Content[i+1], e) || n.Content[i+1].ShortTag() == nullTag && (mapIsNew || !out.MapIndex(k).IsValid()) { - out.SetMapIndex(k, e) - } - } - } - - d.mergedFields = mergedFields - if mergeNode != nil { - d.merge(n, mergeNode, out) - } - - d.stringMapType = stringMapType - d.generalMapType = generalMapType - return true -} - -func isStringMap(n *Node) bool { - if n.Kind != MappingNode { - return false - } - l := len(n.Content) - for i := 0; i < l; i += 2 { - shortTag := n.Content[i].ShortTag() - if shortTag != strTag && shortTag != mergeTag { - return false - } - } - return true -} - -func (d *decoder) mappingStruct(n *Node, out reflect.Value) (good bool) { - sinfo, err := getStructInfo(out.Type()) - if err != nil { - panic(err) - } - - var inlineMap reflect.Value - var elemType reflect.Type - if sinfo.InlineMap != -1 { - inlineMap = out.Field(sinfo.InlineMap) - elemType = inlineMap.Type().Elem() - } - - for _, index := range sinfo.InlineUnmarshalers { - field := d.fieldByIndex(n, out, index) - d.prepare(n, field) - } - - mergedFields := d.mergedFields - d.mergedFields = nil - var mergeNode *Node - var doneFields []bool - if d.uniqueKeys { - doneFields = make([]bool, len(sinfo.FieldsList)) - } - name := settableValueOf("") - l := len(n.Content) - for i := 0; i < l; i += 2 { - ni := n.Content[i] - if isMerge(ni) { - mergeNode = n.Content[i+1] - continue - } - if !d.unmarshal(ni, name) { - continue - } - sname := name.String() - if mergedFields != nil { - if mergedFields[sname] { - continue - } - mergedFields[sname] = true - } - if info, ok := sinfo.FieldsMap[sname]; ok { - if d.uniqueKeys { - if doneFields[info.Id] { - d.terrors = append(d.terrors, fmt.Sprintf("line %d: field %s already set in type %s", ni.Line, name.String(), out.Type())) - continue - } - doneFields[info.Id] = true - } - var field reflect.Value - if info.Inline == nil { - field = out.Field(info.Num) - } else { - field = d.fieldByIndex(n, out, info.Inline) - } - d.unmarshal(n.Content[i+1], field) - } else if sinfo.InlineMap != -1 { - if inlineMap.IsNil() { - inlineMap.Set(reflect.MakeMap(inlineMap.Type())) - } - value := reflect.New(elemType).Elem() - d.unmarshal(n.Content[i+1], value) - inlineMap.SetMapIndex(name, value) - } else if d.knownFields { - d.terrors = append(d.terrors, fmt.Sprintf("line %d: field %s not found in type %s", ni.Line, name.String(), out.Type())) - } - } - - d.mergedFields = mergedFields - if mergeNode != nil { - d.merge(n, mergeNode, out) - } - return true -} - -func failWantMap() { - failf("map merge requires map or sequence of maps as the value") -} - -func (d *decoder) merge(parent *Node, merge *Node, out reflect.Value) { - mergedFields := d.mergedFields - if mergedFields == nil { - d.mergedFields = make(map[interface{}]bool) - for i := 0; i < len(parent.Content); i += 2 { - k := reflect.New(ifaceType).Elem() - if d.unmarshal(parent.Content[i], k) { - d.mergedFields[k.Interface()] = true - } - } - } - - switch merge.Kind { - case MappingNode: - d.unmarshal(merge, out) - case AliasNode: - if merge.Alias != nil && merge.Alias.Kind != MappingNode { - failWantMap() - } - d.unmarshal(merge, out) - case SequenceNode: - for i := 0; i < len(merge.Content); i++ { - ni := merge.Content[i] - if ni.Kind == AliasNode { - if ni.Alias != nil && ni.Alias.Kind != MappingNode { - failWantMap() - } - } else if ni.Kind != MappingNode { - failWantMap() - } - d.unmarshal(ni, out) - } - default: - failWantMap() - } - - d.mergedFields = mergedFields -} - -func isMerge(n *Node) bool { - return n.Kind == ScalarNode && n.Value == "<<" && (n.Tag == "" || n.Tag == "!" || shortTag(n.Tag) == mergeTag) -} diff --git a/cli/internal/yaml/emitterc.go b/cli/internal/yaml/emitterc.go deleted file mode 100644 index dde20e5079dd8..0000000000000 --- a/cli/internal/yaml/emitterc.go +++ /dev/null @@ -1,2019 +0,0 @@ -// -// Copyright (c) 2011-2019 Canonical Ltd -// Copyright (c) 2006-2010 Kirill Simonov -// -// Permission is hereby granted, free of charge, to any person obtaining a copy of -// this software and associated documentation files (the "Software"), to deal in -// the Software without restriction, including without limitation the rights to -// use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -// of the Software, and to permit persons to whom the Software is furnished to do -// so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in all -// copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -// SOFTWARE. - -package yaml - -import ( - "bytes" - "fmt" -) - -// Flush the buffer if needed. -func flush(emitter *yaml_emitter_t) bool { - if emitter.buffer_pos+5 >= len(emitter.buffer) { - return yaml_emitter_flush(emitter) - } - return true -} - -// Put a character to the output buffer. -func put(emitter *yaml_emitter_t, value byte) bool { - if emitter.buffer_pos+5 >= len(emitter.buffer) && !yaml_emitter_flush(emitter) { - return false - } - emitter.buffer[emitter.buffer_pos] = value - emitter.buffer_pos++ - emitter.column++ - return true -} - -// Put a line break to the output buffer. -func put_break(emitter *yaml_emitter_t) bool { - if emitter.buffer_pos+5 >= len(emitter.buffer) && !yaml_emitter_flush(emitter) { - return false - } - switch emitter.line_break { - case yaml_CR_BREAK: - emitter.buffer[emitter.buffer_pos] = '\r' - emitter.buffer_pos += 1 - case yaml_LN_BREAK: - emitter.buffer[emitter.buffer_pos] = '\n' - emitter.buffer_pos += 1 - case yaml_CRLN_BREAK: - emitter.buffer[emitter.buffer_pos+0] = '\r' - emitter.buffer[emitter.buffer_pos+1] = '\n' - emitter.buffer_pos += 2 - default: - panic("unknown line break setting") - } - if emitter.column == 0 { - emitter.space_above = true - } - emitter.column = 0 - emitter.line++ - // [Go] Do this here and below and drop from everywhere else (see commented lines). - emitter.indention = true - return true -} - -// Copy a character from a string into buffer. -func write(emitter *yaml_emitter_t, s []byte, i *int) bool { - if emitter.buffer_pos+5 >= len(emitter.buffer) && !yaml_emitter_flush(emitter) { - return false - } - p := emitter.buffer_pos - w := width(s[*i]) - switch w { - case 4: - emitter.buffer[p+3] = s[*i+3] - fallthrough - case 3: - emitter.buffer[p+2] = s[*i+2] - fallthrough - case 2: - emitter.buffer[p+1] = s[*i+1] - fallthrough - case 1: - emitter.buffer[p+0] = s[*i+0] - default: - panic("unknown character width") - } - emitter.column++ - emitter.buffer_pos += w - *i += w - return true -} - -// Write a whole string into buffer. -func write_all(emitter *yaml_emitter_t, s []byte) bool { - for i := 0; i < len(s); { - if !write(emitter, s, &i) { - return false - } - } - return true -} - -// Copy a line break character from a string into buffer. -func write_break(emitter *yaml_emitter_t, s []byte, i *int) bool { - if s[*i] == '\n' { - if !put_break(emitter) { - return false - } - *i++ - } else { - if !write(emitter, s, i) { - return false - } - if emitter.column == 0 { - emitter.space_above = true - } - emitter.column = 0 - emitter.line++ - // [Go] Do this here and above and drop from everywhere else (see commented lines). - emitter.indention = true - } - return true -} - -// Set an emitter error and return false. -func yaml_emitter_set_emitter_error(emitter *yaml_emitter_t, problem string) bool { - emitter.error = yaml_EMITTER_ERROR - emitter.problem = problem - return false -} - -// Emit an event. -func yaml_emitter_emit(emitter *yaml_emitter_t, event *yaml_event_t) bool { - emitter.events = append(emitter.events, *event) - for !yaml_emitter_need_more_events(emitter) { - event := &emitter.events[emitter.events_head] - if !yaml_emitter_analyze_event(emitter, event) { - return false - } - if !yaml_emitter_state_machine(emitter, event) { - return false - } - yaml_event_delete(event) - emitter.events_head++ - } - return true -} - -// Check if we need to accumulate more events before emitting. -// -// We accumulate extra -// - 1 event for DOCUMENT-START -// - 2 events for SEQUENCE-START -// - 3 events for MAPPING-START -func yaml_emitter_need_more_events(emitter *yaml_emitter_t) bool { - if emitter.events_head == len(emitter.events) { - return true - } - var accumulate int - switch emitter.events[emitter.events_head].typ { - case yaml_DOCUMENT_START_EVENT: - accumulate = 1 - break - case yaml_SEQUENCE_START_EVENT: - accumulate = 2 - break - case yaml_MAPPING_START_EVENT: - accumulate = 3 - break - default: - return false - } - if len(emitter.events)-emitter.events_head > accumulate { - return false - } - var level int - for i := emitter.events_head; i < len(emitter.events); i++ { - switch emitter.events[i].typ { - case yaml_STREAM_START_EVENT, yaml_DOCUMENT_START_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT: - level++ - case yaml_STREAM_END_EVENT, yaml_DOCUMENT_END_EVENT, yaml_SEQUENCE_END_EVENT, yaml_MAPPING_END_EVENT: - level-- - } - if level == 0 { - return false - } - } - return true -} - -// Append a directive to the directives stack. -func yaml_emitter_append_tag_directive(emitter *yaml_emitter_t, value *yaml_tag_directive_t, allow_duplicates bool) bool { - for i := 0; i < len(emitter.tag_directives); i++ { - if bytes.Equal(value.handle, emitter.tag_directives[i].handle) { - if allow_duplicates { - return true - } - return yaml_emitter_set_emitter_error(emitter, "duplicate %TAG directive") - } - } - - // [Go] Do we actually need to copy this given garbage collection - // and the lack of deallocating destructors? - tag_copy := yaml_tag_directive_t{ - handle: make([]byte, len(value.handle)), - prefix: make([]byte, len(value.prefix)), - } - copy(tag_copy.handle, value.handle) - copy(tag_copy.prefix, value.prefix) - emitter.tag_directives = append(emitter.tag_directives, tag_copy) - return true -} - -// Increase the indentation level. -func yaml_emitter_increase_indent(emitter *yaml_emitter_t, flow, indentless bool) bool { - emitter.indents = append(emitter.indents, emitter.indent) - if emitter.indent < 0 { - if flow { - emitter.indent = emitter.best_indent - } else { - emitter.indent = 0 - } - } else if !indentless { - // [Go] This was changed so that indentations are more regular. - if emitter.states[len(emitter.states)-1] == yaml_EMIT_BLOCK_SEQUENCE_ITEM_STATE { - // The first indent inside a sequence will just skip the "- " indicator. - emitter.indent += 2 - } else { - // Everything else aligns to the chosen indentation. - emitter.indent = emitter.best_indent * ((emitter.indent + emitter.best_indent) / emitter.best_indent) - } - } - return true -} - -// State dispatcher. -func yaml_emitter_state_machine(emitter *yaml_emitter_t, event *yaml_event_t) bool { - switch emitter.state { - default: - case yaml_EMIT_STREAM_START_STATE: - return yaml_emitter_emit_stream_start(emitter, event) - - case yaml_EMIT_FIRST_DOCUMENT_START_STATE: - return yaml_emitter_emit_document_start(emitter, event, true) - - case yaml_EMIT_DOCUMENT_START_STATE: - return yaml_emitter_emit_document_start(emitter, event, false) - - case yaml_EMIT_DOCUMENT_CONTENT_STATE: - return yaml_emitter_emit_document_content(emitter, event) - - case yaml_EMIT_DOCUMENT_END_STATE: - return yaml_emitter_emit_document_end(emitter, event) - - case yaml_EMIT_FLOW_SEQUENCE_FIRST_ITEM_STATE: - return yaml_emitter_emit_flow_sequence_item(emitter, event, true, false) - - case yaml_EMIT_FLOW_SEQUENCE_TRAIL_ITEM_STATE: - return yaml_emitter_emit_flow_sequence_item(emitter, event, false, true) - - case yaml_EMIT_FLOW_SEQUENCE_ITEM_STATE: - return yaml_emitter_emit_flow_sequence_item(emitter, event, false, false) - - case yaml_EMIT_FLOW_MAPPING_FIRST_KEY_STATE: - return yaml_emitter_emit_flow_mapping_key(emitter, event, true, false) - - case yaml_EMIT_FLOW_MAPPING_TRAIL_KEY_STATE: - return yaml_emitter_emit_flow_mapping_key(emitter, event, false, true) - - case yaml_EMIT_FLOW_MAPPING_KEY_STATE: - return yaml_emitter_emit_flow_mapping_key(emitter, event, false, false) - - case yaml_EMIT_FLOW_MAPPING_SIMPLE_VALUE_STATE: - return yaml_emitter_emit_flow_mapping_value(emitter, event, true) - - case yaml_EMIT_FLOW_MAPPING_VALUE_STATE: - return yaml_emitter_emit_flow_mapping_value(emitter, event, false) - - case yaml_EMIT_BLOCK_SEQUENCE_FIRST_ITEM_STATE: - return yaml_emitter_emit_block_sequence_item(emitter, event, true) - - case yaml_EMIT_BLOCK_SEQUENCE_ITEM_STATE: - return yaml_emitter_emit_block_sequence_item(emitter, event, false) - - case yaml_EMIT_BLOCK_MAPPING_FIRST_KEY_STATE: - return yaml_emitter_emit_block_mapping_key(emitter, event, true) - - case yaml_EMIT_BLOCK_MAPPING_KEY_STATE: - return yaml_emitter_emit_block_mapping_key(emitter, event, false) - - case yaml_EMIT_BLOCK_MAPPING_SIMPLE_VALUE_STATE: - return yaml_emitter_emit_block_mapping_value(emitter, event, true) - - case yaml_EMIT_BLOCK_MAPPING_VALUE_STATE: - return yaml_emitter_emit_block_mapping_value(emitter, event, false) - - case yaml_EMIT_END_STATE: - return yaml_emitter_set_emitter_error(emitter, "expected nothing after STREAM-END") - } - panic("invalid emitter state") -} - -// Expect STREAM-START. -func yaml_emitter_emit_stream_start(emitter *yaml_emitter_t, event *yaml_event_t) bool { - if event.typ != yaml_STREAM_START_EVENT { - return yaml_emitter_set_emitter_error(emitter, "expected STREAM-START") - } - if emitter.encoding == yaml_ANY_ENCODING { - emitter.encoding = event.encoding - if emitter.encoding == yaml_ANY_ENCODING { - emitter.encoding = yaml_UTF8_ENCODING - } - } - if emitter.best_indent < 2 || emitter.best_indent > 9 { - emitter.best_indent = 2 - } - if emitter.best_width >= 0 && emitter.best_width <= emitter.best_indent*2 { - emitter.best_width = 80 - } - if emitter.best_width < 0 { - emitter.best_width = 1<<31 - 1 - } - if emitter.line_break == yaml_ANY_BREAK { - emitter.line_break = yaml_LN_BREAK - } - - emitter.indent = -1 - emitter.line = 0 - emitter.column = 0 - emitter.whitespace = true - emitter.indention = true - emitter.space_above = true - emitter.foot_indent = -1 - - if emitter.encoding != yaml_UTF8_ENCODING { - if !yaml_emitter_write_bom(emitter) { - return false - } - } - emitter.state = yaml_EMIT_FIRST_DOCUMENT_START_STATE - return true -} - -// Expect DOCUMENT-START or STREAM-END. -func yaml_emitter_emit_document_start(emitter *yaml_emitter_t, event *yaml_event_t, first bool) bool { - - if event.typ == yaml_DOCUMENT_START_EVENT { - - if event.version_directive != nil { - if !yaml_emitter_analyze_version_directive(emitter, event.version_directive) { - return false - } - } - - for i := 0; i < len(event.tag_directives); i++ { - tag_directive := &event.tag_directives[i] - if !yaml_emitter_analyze_tag_directive(emitter, tag_directive) { - return false - } - if !yaml_emitter_append_tag_directive(emitter, tag_directive, false) { - return false - } - } - - for i := 0; i < len(default_tag_directives); i++ { - tag_directive := &default_tag_directives[i] - if !yaml_emitter_append_tag_directive(emitter, tag_directive, true) { - return false - } - } - - implicit := event.implicit - if !first || emitter.canonical { - implicit = false - } - - if emitter.open_ended && (event.version_directive != nil || len(event.tag_directives) > 0) { - if !yaml_emitter_write_indicator(emitter, []byte("..."), true, false, false) { - return false - } - if !yaml_emitter_write_indent(emitter) { - return false - } - } - - if event.version_directive != nil { - implicit = false - if !yaml_emitter_write_indicator(emitter, []byte("%YAML"), true, false, false) { - return false - } - if !yaml_emitter_write_indicator(emitter, []byte("1.1"), true, false, false) { - return false - } - if !yaml_emitter_write_indent(emitter) { - return false - } - } - - if len(event.tag_directives) > 0 { - implicit = false - for i := 0; i < len(event.tag_directives); i++ { - tag_directive := &event.tag_directives[i] - if !yaml_emitter_write_indicator(emitter, []byte("%TAG"), true, false, false) { - return false - } - if !yaml_emitter_write_tag_handle(emitter, tag_directive.handle) { - return false - } - if !yaml_emitter_write_tag_content(emitter, tag_directive.prefix, true) { - return false - } - if !yaml_emitter_write_indent(emitter) { - return false - } - } - } - - if yaml_emitter_check_empty_document(emitter) { - implicit = false - } - if !implicit { - if !yaml_emitter_write_indent(emitter) { - return false - } - if !yaml_emitter_write_indicator(emitter, []byte("---"), true, false, false) { - return false - } - if emitter.canonical || true { - if !yaml_emitter_write_indent(emitter) { - return false - } - } - } - - if len(emitter.head_comment) > 0 { - if !yaml_emitter_process_head_comment(emitter) { - return false - } - if !put_break(emitter) { - return false - } - } - - emitter.state = yaml_EMIT_DOCUMENT_CONTENT_STATE - return true - } - - if event.typ == yaml_STREAM_END_EVENT { - if emitter.open_ended { - if !yaml_emitter_write_indicator(emitter, []byte("..."), true, false, false) { - return false - } - if !yaml_emitter_write_indent(emitter) { - return false - } - } - if !yaml_emitter_flush(emitter) { - return false - } - emitter.state = yaml_EMIT_END_STATE - return true - } - - return yaml_emitter_set_emitter_error(emitter, "expected DOCUMENT-START or STREAM-END") -} - -// Expect the root node. -func yaml_emitter_emit_document_content(emitter *yaml_emitter_t, event *yaml_event_t) bool { - emitter.states = append(emitter.states, yaml_EMIT_DOCUMENT_END_STATE) - - if !yaml_emitter_process_head_comment(emitter) { - return false - } - if !yaml_emitter_emit_node(emitter, event, true, false, false, false) { - return false - } - if !yaml_emitter_process_line_comment(emitter) { - return false - } - if !yaml_emitter_process_foot_comment(emitter) { - return false - } - return true -} - -// Expect DOCUMENT-END. -func yaml_emitter_emit_document_end(emitter *yaml_emitter_t, event *yaml_event_t) bool { - if event.typ != yaml_DOCUMENT_END_EVENT { - return yaml_emitter_set_emitter_error(emitter, "expected DOCUMENT-END") - } - // [Go] Force document foot separation. - emitter.foot_indent = 0 - if !yaml_emitter_process_foot_comment(emitter) { - return false - } - emitter.foot_indent = -1 - if !yaml_emitter_write_indent(emitter) { - return false - } - if !event.implicit { - // [Go] Allocate the slice elsewhere. - if !yaml_emitter_write_indicator(emitter, []byte("..."), true, false, false) { - return false - } - if !yaml_emitter_write_indent(emitter) { - return false - } - } - if !yaml_emitter_flush(emitter) { - return false - } - emitter.state = yaml_EMIT_DOCUMENT_START_STATE - emitter.tag_directives = emitter.tag_directives[:0] - return true -} - -// Expect a flow item node. -func yaml_emitter_emit_flow_sequence_item(emitter *yaml_emitter_t, event *yaml_event_t, first, trail bool) bool { - if first { - if !yaml_emitter_write_indicator(emitter, []byte{'['}, true, true, false) { - return false - } - if !yaml_emitter_increase_indent(emitter, true, false) { - return false - } - emitter.flow_level++ - } - - if event.typ == yaml_SEQUENCE_END_EVENT { - if emitter.canonical && !first && !trail { - if !yaml_emitter_write_indicator(emitter, []byte{','}, false, false, false) { - return false - } - } - emitter.flow_level-- - emitter.indent = emitter.indents[len(emitter.indents)-1] - emitter.indents = emitter.indents[:len(emitter.indents)-1] - if emitter.column == 0 || emitter.canonical && !first { - if !yaml_emitter_write_indent(emitter) { - return false - } - } - if !yaml_emitter_write_indicator(emitter, []byte{']'}, false, false, false) { - return false - } - if !yaml_emitter_process_line_comment(emitter) { - return false - } - if !yaml_emitter_process_foot_comment(emitter) { - return false - } - emitter.state = emitter.states[len(emitter.states)-1] - emitter.states = emitter.states[:len(emitter.states)-1] - - return true - } - - if !first && !trail { - if !yaml_emitter_write_indicator(emitter, []byte{','}, false, false, false) { - return false - } - } - - if !yaml_emitter_process_head_comment(emitter) { - return false - } - if emitter.column == 0 { - if !yaml_emitter_write_indent(emitter) { - return false - } - } - - if emitter.canonical || emitter.column > emitter.best_width { - if !yaml_emitter_write_indent(emitter) { - return false - } - } - if len(emitter.line_comment)+len(emitter.foot_comment)+len(emitter.tail_comment) > 0 { - emitter.states = append(emitter.states, yaml_EMIT_FLOW_SEQUENCE_TRAIL_ITEM_STATE) - } else { - emitter.states = append(emitter.states, yaml_EMIT_FLOW_SEQUENCE_ITEM_STATE) - } - if !yaml_emitter_emit_node(emitter, event, false, true, false, false) { - return false - } - if len(emitter.line_comment)+len(emitter.foot_comment)+len(emitter.tail_comment) > 0 { - if !yaml_emitter_write_indicator(emitter, []byte{','}, false, false, false) { - return false - } - } - if !yaml_emitter_process_line_comment(emitter) { - return false - } - if !yaml_emitter_process_foot_comment(emitter) { - return false - } - return true -} - -// Expect a flow key node. -func yaml_emitter_emit_flow_mapping_key(emitter *yaml_emitter_t, event *yaml_event_t, first, trail bool) bool { - if first { - if !yaml_emitter_write_indicator(emitter, []byte{'{'}, true, true, false) { - return false - } - if !yaml_emitter_increase_indent(emitter, true, false) { - return false - } - emitter.flow_level++ - } - - if event.typ == yaml_MAPPING_END_EVENT { - if (emitter.canonical || len(emitter.head_comment)+len(emitter.foot_comment)+len(emitter.tail_comment) > 0) && !first && !trail { - if !yaml_emitter_write_indicator(emitter, []byte{','}, false, false, false) { - return false - } - } - if !yaml_emitter_process_head_comment(emitter) { - return false - } - emitter.flow_level-- - emitter.indent = emitter.indents[len(emitter.indents)-1] - emitter.indents = emitter.indents[:len(emitter.indents)-1] - if emitter.canonical && !first { - if !yaml_emitter_write_indent(emitter) { - return false - } - } - if !yaml_emitter_write_indicator(emitter, []byte{'}'}, false, false, false) { - return false - } - if !yaml_emitter_process_line_comment(emitter) { - return false - } - if !yaml_emitter_process_foot_comment(emitter) { - return false - } - emitter.state = emitter.states[len(emitter.states)-1] - emitter.states = emitter.states[:len(emitter.states)-1] - return true - } - - if !first && !trail { - if !yaml_emitter_write_indicator(emitter, []byte{','}, false, false, false) { - return false - } - } - - if !yaml_emitter_process_head_comment(emitter) { - return false - } - - if emitter.column == 0 { - if !yaml_emitter_write_indent(emitter) { - return false - } - } - - if emitter.canonical || emitter.column > emitter.best_width { - if !yaml_emitter_write_indent(emitter) { - return false - } - } - - if !emitter.canonical && yaml_emitter_check_simple_key(emitter) { - emitter.states = append(emitter.states, yaml_EMIT_FLOW_MAPPING_SIMPLE_VALUE_STATE) - return yaml_emitter_emit_node(emitter, event, false, false, true, true) - } - if !yaml_emitter_write_indicator(emitter, []byte{'?'}, true, false, false) { - return false - } - emitter.states = append(emitter.states, yaml_EMIT_FLOW_MAPPING_VALUE_STATE) - return yaml_emitter_emit_node(emitter, event, false, false, true, false) -} - -// Expect a flow value node. -func yaml_emitter_emit_flow_mapping_value(emitter *yaml_emitter_t, event *yaml_event_t, simple bool) bool { - if simple { - if !yaml_emitter_write_indicator(emitter, []byte{':'}, false, false, false) { - return false - } - } else { - if emitter.canonical || emitter.column > emitter.best_width { - if !yaml_emitter_write_indent(emitter) { - return false - } - } - if !yaml_emitter_write_indicator(emitter, []byte{':'}, true, false, false) { - return false - } - } - if len(emitter.line_comment)+len(emitter.foot_comment)+len(emitter.tail_comment) > 0 { - emitter.states = append(emitter.states, yaml_EMIT_FLOW_MAPPING_TRAIL_KEY_STATE) - } else { - emitter.states = append(emitter.states, yaml_EMIT_FLOW_MAPPING_KEY_STATE) - } - if !yaml_emitter_emit_node(emitter, event, false, false, true, false) { - return false - } - if len(emitter.line_comment)+len(emitter.foot_comment)+len(emitter.tail_comment) > 0 { - if !yaml_emitter_write_indicator(emitter, []byte{','}, false, false, false) { - return false - } - } - if !yaml_emitter_process_line_comment(emitter) { - return false - } - if !yaml_emitter_process_foot_comment(emitter) { - return false - } - return true -} - -// Expect a block item node. -func yaml_emitter_emit_block_sequence_item(emitter *yaml_emitter_t, event *yaml_event_t, first bool) bool { - if first { - if !yaml_emitter_increase_indent(emitter, false, false) { - return false - } - } - if event.typ == yaml_SEQUENCE_END_EVENT { - emitter.indent = emitter.indents[len(emitter.indents)-1] - emitter.indents = emitter.indents[:len(emitter.indents)-1] - emitter.state = emitter.states[len(emitter.states)-1] - emitter.states = emitter.states[:len(emitter.states)-1] - return true - } - if !yaml_emitter_process_head_comment(emitter) { - return false - } - if !yaml_emitter_write_indent(emitter) { - return false - } - if !yaml_emitter_write_indicator(emitter, []byte{'-'}, true, false, true) { - return false - } - emitter.states = append(emitter.states, yaml_EMIT_BLOCK_SEQUENCE_ITEM_STATE) - if !yaml_emitter_emit_node(emitter, event, false, true, false, false) { - return false - } - if !yaml_emitter_process_line_comment(emitter) { - return false - } - if !yaml_emitter_process_foot_comment(emitter) { - return false - } - return true -} - -// Expect a block key node. -func yaml_emitter_emit_block_mapping_key(emitter *yaml_emitter_t, event *yaml_event_t, first bool) bool { - if first { - if !yaml_emitter_increase_indent(emitter, false, false) { - return false - } - } - if !yaml_emitter_process_head_comment(emitter) { - return false - } - if event.typ == yaml_MAPPING_END_EVENT { - emitter.indent = emitter.indents[len(emitter.indents)-1] - emitter.indents = emitter.indents[:len(emitter.indents)-1] - emitter.state = emitter.states[len(emitter.states)-1] - emitter.states = emitter.states[:len(emitter.states)-1] - return true - } - if !yaml_emitter_write_indent(emitter) { - return false - } - if len(emitter.line_comment) > 0 { - // [Go] A line comment was provided for the key. That's unusual as the - // scanner associates line comments with the value. Either way, - // save the line comment and render it appropriately later. - emitter.key_line_comment = emitter.line_comment - emitter.line_comment = nil - } - if yaml_emitter_check_simple_key(emitter) { - emitter.states = append(emitter.states, yaml_EMIT_BLOCK_MAPPING_SIMPLE_VALUE_STATE) - return yaml_emitter_emit_node(emitter, event, false, false, true, true) - } - if !yaml_emitter_write_indicator(emitter, []byte{'?'}, true, false, true) { - return false - } - emitter.states = append(emitter.states, yaml_EMIT_BLOCK_MAPPING_VALUE_STATE) - return yaml_emitter_emit_node(emitter, event, false, false, true, false) -} - -// Expect a block value node. -func yaml_emitter_emit_block_mapping_value(emitter *yaml_emitter_t, event *yaml_event_t, simple bool) bool { - if simple { - if !yaml_emitter_write_indicator(emitter, []byte{':'}, false, false, false) { - return false - } - } else { - if !yaml_emitter_write_indent(emitter) { - return false - } - if !yaml_emitter_write_indicator(emitter, []byte{':'}, true, false, true) { - return false - } - } - if len(emitter.key_line_comment) > 0 { - // [Go] Line comments are generally associated with the value, but when there's - // no value on the same line as a mapping key they end up attached to the - // key itself. - if event.typ == yaml_SCALAR_EVENT { - if len(emitter.line_comment) == 0 { - // A scalar is coming and it has no line comments by itself yet, - // so just let it handle the line comment as usual. If it has a - // line comment, we can't have both so the one from the key is lost. - emitter.line_comment = emitter.key_line_comment - emitter.key_line_comment = nil - } - } else if event.sequence_style() != yaml_FLOW_SEQUENCE_STYLE && (event.typ == yaml_MAPPING_START_EVENT || event.typ == yaml_SEQUENCE_START_EVENT) { - // An indented block follows, so write the comment right now. - emitter.line_comment, emitter.key_line_comment = emitter.key_line_comment, emitter.line_comment - if !yaml_emitter_process_line_comment(emitter) { - return false - } - emitter.line_comment, emitter.key_line_comment = emitter.key_line_comment, emitter.line_comment - } - } - emitter.states = append(emitter.states, yaml_EMIT_BLOCK_MAPPING_KEY_STATE) - if !yaml_emitter_emit_node(emitter, event, false, false, true, false) { - return false - } - if !yaml_emitter_process_line_comment(emitter) { - return false - } - if !yaml_emitter_process_foot_comment(emitter) { - return false - } - return true -} - -func yaml_emitter_silent_nil_event(emitter *yaml_emitter_t, event *yaml_event_t) bool { - return event.typ == yaml_SCALAR_EVENT && event.implicit && !emitter.canonical && len(emitter.scalar_data.value) == 0 -} - -// Expect a node. -func yaml_emitter_emit_node(emitter *yaml_emitter_t, event *yaml_event_t, - root bool, sequence bool, mapping bool, simple_key bool) bool { - - emitter.root_context = root - emitter.sequence_context = sequence - emitter.mapping_context = mapping - emitter.simple_key_context = simple_key - - switch event.typ { - case yaml_ALIAS_EVENT: - return yaml_emitter_emit_alias(emitter, event) - case yaml_SCALAR_EVENT: - return yaml_emitter_emit_scalar(emitter, event) - case yaml_SEQUENCE_START_EVENT: - return yaml_emitter_emit_sequence_start(emitter, event) - case yaml_MAPPING_START_EVENT: - return yaml_emitter_emit_mapping_start(emitter, event) - default: - return yaml_emitter_set_emitter_error(emitter, - fmt.Sprintf("expected SCALAR, SEQUENCE-START, MAPPING-START, or ALIAS, but got %v", event.typ)) - } -} - -// Expect ALIAS. -func yaml_emitter_emit_alias(emitter *yaml_emitter_t, event *yaml_event_t) bool { - if !yaml_emitter_process_anchor(emitter) { - return false - } - emitter.state = emitter.states[len(emitter.states)-1] - emitter.states = emitter.states[:len(emitter.states)-1] - return true -} - -// Expect SCALAR. -func yaml_emitter_emit_scalar(emitter *yaml_emitter_t, event *yaml_event_t) bool { - if !yaml_emitter_select_scalar_style(emitter, event) { - return false - } - if !yaml_emitter_process_anchor(emitter) { - return false - } - if !yaml_emitter_process_tag(emitter) { - return false - } - if !yaml_emitter_increase_indent(emitter, true, false) { - return false - } - if !yaml_emitter_process_scalar(emitter) { - return false - } - emitter.indent = emitter.indents[len(emitter.indents)-1] - emitter.indents = emitter.indents[:len(emitter.indents)-1] - emitter.state = emitter.states[len(emitter.states)-1] - emitter.states = emitter.states[:len(emitter.states)-1] - return true -} - -// Expect SEQUENCE-START. -func yaml_emitter_emit_sequence_start(emitter *yaml_emitter_t, event *yaml_event_t) bool { - if !yaml_emitter_process_anchor(emitter) { - return false - } - if !yaml_emitter_process_tag(emitter) { - return false - } - if emitter.flow_level > 0 || emitter.canonical || event.sequence_style() == yaml_FLOW_SEQUENCE_STYLE || - yaml_emitter_check_empty_sequence(emitter) { - emitter.state = yaml_EMIT_FLOW_SEQUENCE_FIRST_ITEM_STATE - } else { - emitter.state = yaml_EMIT_BLOCK_SEQUENCE_FIRST_ITEM_STATE - } - return true -} - -// Expect MAPPING-START. -func yaml_emitter_emit_mapping_start(emitter *yaml_emitter_t, event *yaml_event_t) bool { - if !yaml_emitter_process_anchor(emitter) { - return false - } - if !yaml_emitter_process_tag(emitter) { - return false - } - if emitter.flow_level > 0 || emitter.canonical || event.mapping_style() == yaml_FLOW_MAPPING_STYLE || - yaml_emitter_check_empty_mapping(emitter) { - emitter.state = yaml_EMIT_FLOW_MAPPING_FIRST_KEY_STATE - } else { - emitter.state = yaml_EMIT_BLOCK_MAPPING_FIRST_KEY_STATE - } - return true -} - -// Check if the document content is an empty scalar. -func yaml_emitter_check_empty_document(emitter *yaml_emitter_t) bool { - return false // [Go] Huh? -} - -// Check if the next events represent an empty sequence. -func yaml_emitter_check_empty_sequence(emitter *yaml_emitter_t) bool { - if len(emitter.events)-emitter.events_head < 2 { - return false - } - return emitter.events[emitter.events_head].typ == yaml_SEQUENCE_START_EVENT && - emitter.events[emitter.events_head+1].typ == yaml_SEQUENCE_END_EVENT -} - -// Check if the next events represent an empty mapping. -func yaml_emitter_check_empty_mapping(emitter *yaml_emitter_t) bool { - if len(emitter.events)-emitter.events_head < 2 { - return false - } - return emitter.events[emitter.events_head].typ == yaml_MAPPING_START_EVENT && - emitter.events[emitter.events_head+1].typ == yaml_MAPPING_END_EVENT -} - -// Check if the next node can be expressed as a simple key. -func yaml_emitter_check_simple_key(emitter *yaml_emitter_t) bool { - length := 0 - switch emitter.events[emitter.events_head].typ { - case yaml_ALIAS_EVENT: - length += len(emitter.anchor_data.anchor) - case yaml_SCALAR_EVENT: - if emitter.scalar_data.multiline { - return false - } - length += len(emitter.anchor_data.anchor) + - len(emitter.tag_data.handle) + - len(emitter.tag_data.suffix) + - len(emitter.scalar_data.value) - case yaml_SEQUENCE_START_EVENT: - if !yaml_emitter_check_empty_sequence(emitter) { - return false - } - length += len(emitter.anchor_data.anchor) + - len(emitter.tag_data.handle) + - len(emitter.tag_data.suffix) - case yaml_MAPPING_START_EVENT: - if !yaml_emitter_check_empty_mapping(emitter) { - return false - } - length += len(emitter.anchor_data.anchor) + - len(emitter.tag_data.handle) + - len(emitter.tag_data.suffix) - default: - return false - } - return length <= 128 -} - -// Determine an acceptable scalar style. -func yaml_emitter_select_scalar_style(emitter *yaml_emitter_t, event *yaml_event_t) bool { - - no_tag := len(emitter.tag_data.handle) == 0 && len(emitter.tag_data.suffix) == 0 - if no_tag && !event.implicit && !event.quoted_implicit { - return yaml_emitter_set_emitter_error(emitter, "neither tag nor implicit flags are specified") - } - - style := event.scalar_style() - if style == yaml_ANY_SCALAR_STYLE { - style = yaml_PLAIN_SCALAR_STYLE - } - if emitter.canonical { - style = yaml_DOUBLE_QUOTED_SCALAR_STYLE - } - if emitter.simple_key_context && emitter.scalar_data.multiline { - style = yaml_DOUBLE_QUOTED_SCALAR_STYLE - } - - if style == yaml_PLAIN_SCALAR_STYLE { - if emitter.flow_level > 0 && !emitter.scalar_data.flow_plain_allowed || - emitter.flow_level == 0 && !emitter.scalar_data.block_plain_allowed { - style = yaml_SINGLE_QUOTED_SCALAR_STYLE - } - if len(emitter.scalar_data.value) == 0 && (emitter.flow_level > 0 || emitter.simple_key_context) { - style = yaml_SINGLE_QUOTED_SCALAR_STYLE - } - if no_tag && !event.implicit { - style = yaml_SINGLE_QUOTED_SCALAR_STYLE - } - } - if style == yaml_SINGLE_QUOTED_SCALAR_STYLE { - if !emitter.scalar_data.single_quoted_allowed { - style = yaml_DOUBLE_QUOTED_SCALAR_STYLE - } - } - if style == yaml_LITERAL_SCALAR_STYLE || style == yaml_FOLDED_SCALAR_STYLE { - if !emitter.scalar_data.block_allowed || emitter.flow_level > 0 || emitter.simple_key_context { - style = yaml_DOUBLE_QUOTED_SCALAR_STYLE - } - } - - if no_tag && !event.quoted_implicit && style != yaml_PLAIN_SCALAR_STYLE { - emitter.tag_data.handle = []byte{'!'} - } - emitter.scalar_data.style = style - return true -} - -// Write an anchor. -func yaml_emitter_process_anchor(emitter *yaml_emitter_t) bool { - if emitter.anchor_data.anchor == nil { - return true - } - c := []byte{'&'} - if emitter.anchor_data.alias { - c[0] = '*' - } - if !yaml_emitter_write_indicator(emitter, c, true, false, false) { - return false - } - return yaml_emitter_write_anchor(emitter, emitter.anchor_data.anchor) -} - -// Write a tag. -func yaml_emitter_process_tag(emitter *yaml_emitter_t) bool { - if len(emitter.tag_data.handle) == 0 && len(emitter.tag_data.suffix) == 0 { - return true - } - if len(emitter.tag_data.handle) > 0 { - if !yaml_emitter_write_tag_handle(emitter, emitter.tag_data.handle) { - return false - } - if len(emitter.tag_data.suffix) > 0 { - if !yaml_emitter_write_tag_content(emitter, emitter.tag_data.suffix, false) { - return false - } - } - } else { - // [Go] Allocate these slices elsewhere. - if !yaml_emitter_write_indicator(emitter, []byte("!<"), true, false, false) { - return false - } - if !yaml_emitter_write_tag_content(emitter, emitter.tag_data.suffix, false) { - return false - } - if !yaml_emitter_write_indicator(emitter, []byte{'>'}, false, false, false) { - return false - } - } - return true -} - -// Write a scalar. -func yaml_emitter_process_scalar(emitter *yaml_emitter_t) bool { - switch emitter.scalar_data.style { - case yaml_PLAIN_SCALAR_STYLE: - return yaml_emitter_write_plain_scalar(emitter, emitter.scalar_data.value, !emitter.simple_key_context) - - case yaml_SINGLE_QUOTED_SCALAR_STYLE: - return yaml_emitter_write_single_quoted_scalar(emitter, emitter.scalar_data.value, !emitter.simple_key_context) - - case yaml_DOUBLE_QUOTED_SCALAR_STYLE: - return yaml_emitter_write_double_quoted_scalar(emitter, emitter.scalar_data.value, !emitter.simple_key_context) - - case yaml_LITERAL_SCALAR_STYLE: - return yaml_emitter_write_literal_scalar(emitter, emitter.scalar_data.value) - - case yaml_FOLDED_SCALAR_STYLE: - return yaml_emitter_write_folded_scalar(emitter, emitter.scalar_data.value) - } - panic("unknown scalar style") -} - -// Write a head comment. -func yaml_emitter_process_head_comment(emitter *yaml_emitter_t) bool { - if len(emitter.tail_comment) > 0 { - if !yaml_emitter_write_indent(emitter) { - return false - } - if !yaml_emitter_write_comment(emitter, emitter.tail_comment) { - return false - } - emitter.tail_comment = emitter.tail_comment[:0] - emitter.foot_indent = emitter.indent - if emitter.foot_indent < 0 { - emitter.foot_indent = 0 - } - } - - if len(emitter.head_comment) == 0 { - return true - } - if !yaml_emitter_write_indent(emitter) { - return false - } - if !yaml_emitter_write_comment(emitter, emitter.head_comment) { - return false - } - emitter.head_comment = emitter.head_comment[:0] - return true -} - -// Write an line comment. -func yaml_emitter_process_line_comment(emitter *yaml_emitter_t) bool { - if len(emitter.line_comment) == 0 { - return true - } - if !emitter.whitespace { - if !put(emitter, ' ') { - return false - } - } - if !yaml_emitter_write_comment(emitter, emitter.line_comment) { - return false - } - emitter.line_comment = emitter.line_comment[:0] - return true -} - -// Write a foot comment. -func yaml_emitter_process_foot_comment(emitter *yaml_emitter_t) bool { - if len(emitter.foot_comment) == 0 { - return true - } - if !yaml_emitter_write_indent(emitter) { - return false - } - if !yaml_emitter_write_comment(emitter, emitter.foot_comment) { - return false - } - emitter.foot_comment = emitter.foot_comment[:0] - emitter.foot_indent = emitter.indent - if emitter.foot_indent < 0 { - emitter.foot_indent = 0 - } - return true -} - -// Check if a %YAML directive is valid. -func yaml_emitter_analyze_version_directive(emitter *yaml_emitter_t, version_directive *yaml_version_directive_t) bool { - if version_directive.major != 1 || version_directive.minor != 1 { - return yaml_emitter_set_emitter_error(emitter, "incompatible %YAML directive") - } - return true -} - -// Check if a %TAG directive is valid. -func yaml_emitter_analyze_tag_directive(emitter *yaml_emitter_t, tag_directive *yaml_tag_directive_t) bool { - handle := tag_directive.handle - prefix := tag_directive.prefix - if len(handle) == 0 { - return yaml_emitter_set_emitter_error(emitter, "tag handle must not be empty") - } - if handle[0] != '!' { - return yaml_emitter_set_emitter_error(emitter, "tag handle must start with '!'") - } - if handle[len(handle)-1] != '!' { - return yaml_emitter_set_emitter_error(emitter, "tag handle must end with '!'") - } - for i := 1; i < len(handle)-1; i += width(handle[i]) { - if !is_alpha(handle, i) { - return yaml_emitter_set_emitter_error(emitter, "tag handle must contain alphanumerical characters only") - } - } - if len(prefix) == 0 { - return yaml_emitter_set_emitter_error(emitter, "tag prefix must not be empty") - } - return true -} - -// Check if an anchor is valid. -func yaml_emitter_analyze_anchor(emitter *yaml_emitter_t, anchor []byte, alias bool) bool { - if len(anchor) == 0 { - problem := "anchor value must not be empty" - if alias { - problem = "alias value must not be empty" - } - return yaml_emitter_set_emitter_error(emitter, problem) - } - for i := 0; i < len(anchor); i += width(anchor[i]) { - if !is_alpha(anchor, i) { - problem := "anchor value must contain alphanumerical characters only" - if alias { - problem = "alias value must contain alphanumerical characters only" - } - return yaml_emitter_set_emitter_error(emitter, problem) - } - } - emitter.anchor_data.anchor = anchor - emitter.anchor_data.alias = alias - return true -} - -// Check if a tag is valid. -func yaml_emitter_analyze_tag(emitter *yaml_emitter_t, tag []byte) bool { - if len(tag) == 0 { - return yaml_emitter_set_emitter_error(emitter, "tag value must not be empty") - } - for i := 0; i < len(emitter.tag_directives); i++ { - tag_directive := &emitter.tag_directives[i] - if bytes.HasPrefix(tag, tag_directive.prefix) { - emitter.tag_data.handle = tag_directive.handle - emitter.tag_data.suffix = tag[len(tag_directive.prefix):] - return true - } - } - emitter.tag_data.suffix = tag - return true -} - -// Check if a scalar is valid. -func yaml_emitter_analyze_scalar(emitter *yaml_emitter_t, value []byte) bool { - var ( - block_indicators = false - flow_indicators = false - line_breaks = false - special_characters = false - tab_characters = false - - leading_space = false - leading_break = false - trailing_space = false - trailing_break = false - break_space = false - space_break = false - - preceded_by_whitespace = false - followed_by_whitespace = false - previous_space = false - previous_break = false - ) - - emitter.scalar_data.value = value - - if len(value) == 0 { - emitter.scalar_data.multiline = false - emitter.scalar_data.flow_plain_allowed = false - emitter.scalar_data.block_plain_allowed = true - emitter.scalar_data.single_quoted_allowed = true - emitter.scalar_data.block_allowed = false - return true - } - - if len(value) >= 3 && ((value[0] == '-' && value[1] == '-' && value[2] == '-') || (value[0] == '.' && value[1] == '.' && value[2] == '.')) { - block_indicators = true - flow_indicators = true - } - - preceded_by_whitespace = true - for i, w := 0, 0; i < len(value); i += w { - w = width(value[i]) - followed_by_whitespace = i+w >= len(value) || is_blank(value, i+w) - - if i == 0 { - switch value[i] { - case '#', ',', '[', ']', '{', '}', '&', '*', '!', '|', '>', '\'', '"', '%', '@', '`': - flow_indicators = true - block_indicators = true - case '?', ':': - flow_indicators = true - if followed_by_whitespace { - block_indicators = true - } - case '-': - if followed_by_whitespace { - flow_indicators = true - block_indicators = true - } - } - } else { - switch value[i] { - case ',', '?', '[', ']', '{', '}': - flow_indicators = true - case ':': - flow_indicators = true - if followed_by_whitespace { - block_indicators = true - } - case '#': - if preceded_by_whitespace { - flow_indicators = true - block_indicators = true - } - } - } - - if value[i] == '\t' { - tab_characters = true - } else if !is_printable(value, i) || !is_ascii(value, i) && !emitter.unicode { - special_characters = true - } - if is_space(value, i) { - if i == 0 { - leading_space = true - } - if i+width(value[i]) == len(value) { - trailing_space = true - } - if previous_break { - break_space = true - } - previous_space = true - previous_break = false - } else if is_break(value, i) { - line_breaks = true - if i == 0 { - leading_break = true - } - if i+width(value[i]) == len(value) { - trailing_break = true - } - if previous_space { - space_break = true - } - previous_space = false - previous_break = true - } else { - previous_space = false - previous_break = false - } - - // [Go]: Why 'z'? Couldn't be the end of the string as that's the loop condition. - preceded_by_whitespace = is_blankz(value, i) - } - - emitter.scalar_data.multiline = line_breaks - emitter.scalar_data.flow_plain_allowed = true - emitter.scalar_data.block_plain_allowed = true - emitter.scalar_data.single_quoted_allowed = true - emitter.scalar_data.block_allowed = true - - if leading_space || leading_break || trailing_space || trailing_break { - emitter.scalar_data.flow_plain_allowed = false - emitter.scalar_data.block_plain_allowed = false - } - if trailing_space { - emitter.scalar_data.block_allowed = false - } - if break_space { - emitter.scalar_data.flow_plain_allowed = false - emitter.scalar_data.block_plain_allowed = false - emitter.scalar_data.single_quoted_allowed = false - } - if space_break || tab_characters || special_characters { - emitter.scalar_data.flow_plain_allowed = false - emitter.scalar_data.block_plain_allowed = false - emitter.scalar_data.single_quoted_allowed = false - } - if space_break || special_characters { - emitter.scalar_data.block_allowed = false - } - if line_breaks { - emitter.scalar_data.flow_plain_allowed = false - emitter.scalar_data.block_plain_allowed = false - } - if flow_indicators { - emitter.scalar_data.flow_plain_allowed = false - } - if block_indicators { - emitter.scalar_data.block_plain_allowed = false - } - return true -} - -// Check if the event data is valid. -func yaml_emitter_analyze_event(emitter *yaml_emitter_t, event *yaml_event_t) bool { - - emitter.anchor_data.anchor = nil - emitter.tag_data.handle = nil - emitter.tag_data.suffix = nil - emitter.scalar_data.value = nil - - if len(event.head_comment) > 0 { - emitter.head_comment = event.head_comment - } - if len(event.line_comment) > 0 { - emitter.line_comment = event.line_comment - } - if len(event.foot_comment) > 0 { - emitter.foot_comment = event.foot_comment - } - if len(event.tail_comment) > 0 { - emitter.tail_comment = event.tail_comment - } - - switch event.typ { - case yaml_ALIAS_EVENT: - if !yaml_emitter_analyze_anchor(emitter, event.anchor, true) { - return false - } - - case yaml_SCALAR_EVENT: - if len(event.anchor) > 0 { - if !yaml_emitter_analyze_anchor(emitter, event.anchor, false) { - return false - } - } - if len(event.tag) > 0 && (emitter.canonical || (!event.implicit && !event.quoted_implicit)) { - if !yaml_emitter_analyze_tag(emitter, event.tag) { - return false - } - } - if !yaml_emitter_analyze_scalar(emitter, event.value) { - return false - } - - case yaml_SEQUENCE_START_EVENT: - if len(event.anchor) > 0 { - if !yaml_emitter_analyze_anchor(emitter, event.anchor, false) { - return false - } - } - if len(event.tag) > 0 && (emitter.canonical || !event.implicit) { - if !yaml_emitter_analyze_tag(emitter, event.tag) { - return false - } - } - - case yaml_MAPPING_START_EVENT: - if len(event.anchor) > 0 { - if !yaml_emitter_analyze_anchor(emitter, event.anchor, false) { - return false - } - } - if len(event.tag) > 0 && (emitter.canonical || !event.implicit) { - if !yaml_emitter_analyze_tag(emitter, event.tag) { - return false - } - } - } - return true -} - -// Write the BOM character. -func yaml_emitter_write_bom(emitter *yaml_emitter_t) bool { - if !flush(emitter) { - return false - } - pos := emitter.buffer_pos - emitter.buffer[pos+0] = '\xEF' - emitter.buffer[pos+1] = '\xBB' - emitter.buffer[pos+2] = '\xBF' - emitter.buffer_pos += 3 - return true -} - -func yaml_emitter_write_indent(emitter *yaml_emitter_t) bool { - indent := emitter.indent - if indent < 0 { - indent = 0 - } - if !emitter.indention || emitter.column > indent || (emitter.column == indent && !emitter.whitespace) { - if !put_break(emitter) { - return false - } - } - if emitter.foot_indent == indent { - if !put_break(emitter) { - return false - } - } - for emitter.column < indent { - if !put(emitter, ' ') { - return false - } - } - emitter.whitespace = true - //emitter.indention = true - emitter.space_above = false - emitter.foot_indent = -1 - return true -} - -func yaml_emitter_write_indicator(emitter *yaml_emitter_t, indicator []byte, need_whitespace, is_whitespace, is_indention bool) bool { - if need_whitespace && !emitter.whitespace { - if !put(emitter, ' ') { - return false - } - } - if !write_all(emitter, indicator) { - return false - } - emitter.whitespace = is_whitespace - emitter.indention = (emitter.indention && is_indention) - emitter.open_ended = false - return true -} - -func yaml_emitter_write_anchor(emitter *yaml_emitter_t, value []byte) bool { - if !write_all(emitter, value) { - return false - } - emitter.whitespace = false - emitter.indention = false - return true -} - -func yaml_emitter_write_tag_handle(emitter *yaml_emitter_t, value []byte) bool { - if !emitter.whitespace { - if !put(emitter, ' ') { - return false - } - } - if !write_all(emitter, value) { - return false - } - emitter.whitespace = false - emitter.indention = false - return true -} - -func yaml_emitter_write_tag_content(emitter *yaml_emitter_t, value []byte, need_whitespace bool) bool { - if need_whitespace && !emitter.whitespace { - if !put(emitter, ' ') { - return false - } - } - for i := 0; i < len(value); { - var must_write bool - switch value[i] { - case ';', '/', '?', ':', '@', '&', '=', '+', '$', ',', '_', '.', '~', '*', '\'', '(', ')', '[', ']': - must_write = true - default: - must_write = is_alpha(value, i) - } - if must_write { - if !write(emitter, value, &i) { - return false - } - } else { - w := width(value[i]) - for k := 0; k < w; k++ { - octet := value[i] - i++ - if !put(emitter, '%') { - return false - } - - c := octet >> 4 - if c < 10 { - c += '0' - } else { - c += 'A' - 10 - } - if !put(emitter, c) { - return false - } - - c = octet & 0x0f - if c < 10 { - c += '0' - } else { - c += 'A' - 10 - } - if !put(emitter, c) { - return false - } - } - } - } - emitter.whitespace = false - emitter.indention = false - return true -} - -func yaml_emitter_write_plain_scalar(emitter *yaml_emitter_t, value []byte, allow_breaks bool) bool { - if len(value) > 0 && !emitter.whitespace { - if !put(emitter, ' ') { - return false - } - } - - spaces := false - breaks := false - for i := 0; i < len(value); { - if is_space(value, i) { - if allow_breaks && !spaces && emitter.column > emitter.best_width && !is_space(value, i+1) { - if !yaml_emitter_write_indent(emitter) { - return false - } - i += width(value[i]) - } else { - if !write(emitter, value, &i) { - return false - } - } - spaces = true - } else if is_break(value, i) { - if !breaks && value[i] == '\n' { - if !put_break(emitter) { - return false - } - } - if !write_break(emitter, value, &i) { - return false - } - //emitter.indention = true - breaks = true - } else { - if breaks { - if !yaml_emitter_write_indent(emitter) { - return false - } - } - if !write(emitter, value, &i) { - return false - } - emitter.indention = false - spaces = false - breaks = false - } - } - - if len(value) > 0 { - emitter.whitespace = false - } - emitter.indention = false - if emitter.root_context { - emitter.open_ended = true - } - - return true -} - -func yaml_emitter_write_single_quoted_scalar(emitter *yaml_emitter_t, value []byte, allow_breaks bool) bool { - - if !yaml_emitter_write_indicator(emitter, []byte{'\''}, true, false, false) { - return false - } - - spaces := false - breaks := false - for i := 0; i < len(value); { - if is_space(value, i) { - if allow_breaks && !spaces && emitter.column > emitter.best_width && i > 0 && i < len(value)-1 && !is_space(value, i+1) { - if !yaml_emitter_write_indent(emitter) { - return false - } - i += width(value[i]) - } else { - if !write(emitter, value, &i) { - return false - } - } - spaces = true - } else if is_break(value, i) { - if !breaks && value[i] == '\n' { - if !put_break(emitter) { - return false - } - } - if !write_break(emitter, value, &i) { - return false - } - //emitter.indention = true - breaks = true - } else { - if breaks { - if !yaml_emitter_write_indent(emitter) { - return false - } - } - if value[i] == '\'' { - if !put(emitter, '\'') { - return false - } - } - if !write(emitter, value, &i) { - return false - } - emitter.indention = false - spaces = false - breaks = false - } - } - if !yaml_emitter_write_indicator(emitter, []byte{'\''}, false, false, false) { - return false - } - emitter.whitespace = false - emitter.indention = false - return true -} - -func yaml_emitter_write_double_quoted_scalar(emitter *yaml_emitter_t, value []byte, allow_breaks bool) bool { - spaces := false - if !yaml_emitter_write_indicator(emitter, []byte{'"'}, true, false, false) { - return false - } - - for i := 0; i < len(value); { - if !is_printable(value, i) || (!emitter.unicode && !is_ascii(value, i)) || - is_bom(value, i) || is_break(value, i) || - value[i] == '"' || value[i] == '\\' { - - octet := value[i] - - var w int - var v rune - switch { - case octet&0x80 == 0x00: - w, v = 1, rune(octet&0x7F) - case octet&0xE0 == 0xC0: - w, v = 2, rune(octet&0x1F) - case octet&0xF0 == 0xE0: - w, v = 3, rune(octet&0x0F) - case octet&0xF8 == 0xF0: - w, v = 4, rune(octet&0x07) - } - for k := 1; k < w; k++ { - octet = value[i+k] - v = (v << 6) + (rune(octet) & 0x3F) - } - i += w - - if !put(emitter, '\\') { - return false - } - - var ok bool - switch v { - case 0x00: - ok = put(emitter, '0') - case 0x07: - ok = put(emitter, 'a') - case 0x08: - ok = put(emitter, 'b') - case 0x09: - ok = put(emitter, 't') - case 0x0A: - ok = put(emitter, 'n') - case 0x0b: - ok = put(emitter, 'v') - case 0x0c: - ok = put(emitter, 'f') - case 0x0d: - ok = put(emitter, 'r') - case 0x1b: - ok = put(emitter, 'e') - case 0x22: - ok = put(emitter, '"') - case 0x5c: - ok = put(emitter, '\\') - case 0x85: - ok = put(emitter, 'N') - case 0xA0: - ok = put(emitter, '_') - case 0x2028: - ok = put(emitter, 'L') - case 0x2029: - ok = put(emitter, 'P') - default: - if v <= 0xFF { - ok = put(emitter, 'x') - w = 2 - } else if v <= 0xFFFF { - ok = put(emitter, 'u') - w = 4 - } else { - ok = put(emitter, 'U') - w = 8 - } - for k := (w - 1) * 4; ok && k >= 0; k -= 4 { - digit := byte((v >> uint(k)) & 0x0F) - if digit < 10 { - ok = put(emitter, digit+'0') - } else { - ok = put(emitter, digit+'A'-10) - } - } - } - if !ok { - return false - } - spaces = false - } else if is_space(value, i) { - if allow_breaks && !spaces && emitter.column > emitter.best_width && i > 0 && i < len(value)-1 { - if !yaml_emitter_write_indent(emitter) { - return false - } - if is_space(value, i+1) { - if !put(emitter, '\\') { - return false - } - } - i += width(value[i]) - } else if !write(emitter, value, &i) { - return false - } - spaces = true - } else { - if !write(emitter, value, &i) { - return false - } - spaces = false - } - } - if !yaml_emitter_write_indicator(emitter, []byte{'"'}, false, false, false) { - return false - } - emitter.whitespace = false - emitter.indention = false - return true -} - -func yaml_emitter_write_block_scalar_hints(emitter *yaml_emitter_t, value []byte) bool { - if is_space(value, 0) || is_break(value, 0) { - indent_hint := []byte{'0' + byte(emitter.best_indent)} - if !yaml_emitter_write_indicator(emitter, indent_hint, false, false, false) { - return false - } - } - - emitter.open_ended = false - - var chomp_hint [1]byte - if len(value) == 0 { - chomp_hint[0] = '-' - } else { - i := len(value) - 1 - for value[i]&0xC0 == 0x80 { - i-- - } - if !is_break(value, i) { - chomp_hint[0] = '-' - } else if i == 0 { - chomp_hint[0] = '+' - emitter.open_ended = true - } else { - i-- - for value[i]&0xC0 == 0x80 { - i-- - } - if is_break(value, i) { - chomp_hint[0] = '+' - emitter.open_ended = true - } - } - } - if chomp_hint[0] != 0 { - if !yaml_emitter_write_indicator(emitter, chomp_hint[:], false, false, false) { - return false - } - } - return true -} - -func yaml_emitter_write_literal_scalar(emitter *yaml_emitter_t, value []byte) bool { - if !yaml_emitter_write_indicator(emitter, []byte{'|'}, true, false, false) { - return false - } - if !yaml_emitter_write_block_scalar_hints(emitter, value) { - return false - } - if !yaml_emitter_process_line_comment(emitter) { - return false - } - //emitter.indention = true - emitter.whitespace = true - breaks := true - for i := 0; i < len(value); { - if is_break(value, i) { - if !write_break(emitter, value, &i) { - return false - } - //emitter.indention = true - breaks = true - } else { - if breaks { - if !yaml_emitter_write_indent(emitter) { - return false - } - } - if !write(emitter, value, &i) { - return false - } - emitter.indention = false - breaks = false - } - } - - return true -} - -func yaml_emitter_write_folded_scalar(emitter *yaml_emitter_t, value []byte) bool { - if !yaml_emitter_write_indicator(emitter, []byte{'>'}, true, false, false) { - return false - } - if !yaml_emitter_write_block_scalar_hints(emitter, value) { - return false - } - if !yaml_emitter_process_line_comment(emitter) { - return false - } - - //emitter.indention = true - emitter.whitespace = true - - breaks := true - leading_spaces := true - for i := 0; i < len(value); { - if is_break(value, i) { - if !breaks && !leading_spaces && value[i] == '\n' { - k := 0 - for is_break(value, k) { - k += width(value[k]) - } - if !is_blankz(value, k) { - if !put_break(emitter) { - return false - } - } - } - if !write_break(emitter, value, &i) { - return false - } - //emitter.indention = true - breaks = true - } else { - if breaks { - if !yaml_emitter_write_indent(emitter) { - return false - } - leading_spaces = is_blank(value, i) - } - if !breaks && is_space(value, i) && !is_space(value, i+1) && emitter.column > emitter.best_width { - if !yaml_emitter_write_indent(emitter) { - return false - } - i += width(value[i]) - } else { - if !write(emitter, value, &i) { - return false - } - } - emitter.indention = false - breaks = false - } - } - return true -} - -func yaml_emitter_write_comment(emitter *yaml_emitter_t, comment []byte) bool { - breaks := false - pound := false - for i := 0; i < len(comment); { - if is_break(comment, i) { - if !write_break(emitter, comment, &i) { - return false - } - //emitter.indention = true - breaks = true - pound = false - } else { - if breaks && !yaml_emitter_write_indent(emitter) { - return false - } - if !pound { - if comment[i] != '#' && (!put(emitter, '#') || !put(emitter, ' ')) { - return false - } - pound = true - } - if !write(emitter, comment, &i) { - return false - } - emitter.indention = false - breaks = false - } - } - if !breaks && !put_break(emitter) { - return false - } - - emitter.whitespace = true - //emitter.indention = true - return true -} diff --git a/cli/internal/yaml/encode.go b/cli/internal/yaml/encode.go deleted file mode 100644 index de9e72a3e638d..0000000000000 --- a/cli/internal/yaml/encode.go +++ /dev/null @@ -1,577 +0,0 @@ -// -// Copyright (c) 2011-2019 Canonical Ltd -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package yaml - -import ( - "encoding" - "fmt" - "io" - "reflect" - "regexp" - "sort" - "strconv" - "strings" - "time" - "unicode/utf8" -) - -type encoder struct { - emitter yaml_emitter_t - event yaml_event_t - out []byte - flow bool - indent int - doneInit bool -} - -func newEncoder() *encoder { - e := &encoder{} - yaml_emitter_initialize(&e.emitter) - yaml_emitter_set_output_string(&e.emitter, &e.out) - yaml_emitter_set_unicode(&e.emitter, true) - return e -} - -func newEncoderWithWriter(w io.Writer) *encoder { - e := &encoder{} - yaml_emitter_initialize(&e.emitter) - yaml_emitter_set_output_writer(&e.emitter, w) - yaml_emitter_set_unicode(&e.emitter, true) - return e -} - -func (e *encoder) init() { - if e.doneInit { - return - } - if e.indent == 0 { - e.indent = 4 - } - e.emitter.best_indent = e.indent - yaml_stream_start_event_initialize(&e.event, yaml_UTF8_ENCODING) - e.emit() - e.doneInit = true -} - -func (e *encoder) finish() { - e.emitter.open_ended = false - yaml_stream_end_event_initialize(&e.event) - e.emit() -} - -func (e *encoder) destroy() { - yaml_emitter_delete(&e.emitter) -} - -func (e *encoder) emit() { - // This will internally delete the e.event value. - e.must(yaml_emitter_emit(&e.emitter, &e.event)) -} - -func (e *encoder) must(ok bool) { - if !ok { - msg := e.emitter.problem - if msg == "" { - msg = "unknown problem generating YAML content" - } - failf("%s", msg) - } -} - -func (e *encoder) marshalDoc(tag string, in reflect.Value) { - e.init() - var node *Node - if in.IsValid() { - node, _ = in.Interface().(*Node) - } - if node != nil && node.Kind == DocumentNode { - e.nodev(in) - } else { - yaml_document_start_event_initialize(&e.event, nil, nil, true) - e.emit() - e.marshal(tag, in) - yaml_document_end_event_initialize(&e.event, true) - e.emit() - } -} - -func (e *encoder) marshal(tag string, in reflect.Value) { - tag = shortTag(tag) - if !in.IsValid() || in.Kind() == reflect.Ptr && in.IsNil() { - e.nilv() - return - } - iface := in.Interface() - switch value := iface.(type) { - case *Node: - e.nodev(in) - return - case Node: - if !in.CanAddr() { - var n = reflect.New(in.Type()).Elem() - n.Set(in) - in = n - } - e.nodev(in.Addr()) - return - case time.Time: - e.timev(tag, in) - return - case *time.Time: - e.timev(tag, in.Elem()) - return - case time.Duration: - e.stringv(tag, reflect.ValueOf(value.String())) - return - case Marshaler: - v, err := value.MarshalYAML() - if err != nil { - fail(err) - } - if v == nil { - e.nilv() - return - } - e.marshal(tag, reflect.ValueOf(v)) - return - case encoding.TextMarshaler: - text, err := value.MarshalText() - if err != nil { - fail(err) - } - in = reflect.ValueOf(string(text)) - case nil: - e.nilv() - return - } - switch in.Kind() { - case reflect.Interface: - e.marshal(tag, in.Elem()) - case reflect.Map: - e.mapv(tag, in) - case reflect.Ptr: - e.marshal(tag, in.Elem()) - case reflect.Struct: - e.structv(tag, in) - case reflect.Slice, reflect.Array: - e.slicev(tag, in) - case reflect.String: - e.stringv(tag, in) - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: - e.intv(tag, in) - case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: - e.uintv(tag, in) - case reflect.Float32, reflect.Float64: - e.floatv(tag, in) - case reflect.Bool: - e.boolv(tag, in) - default: - panic("cannot marshal type: " + in.Type().String()) - } -} - -func (e *encoder) mapv(tag string, in reflect.Value) { - e.mappingv(tag, func() { - keys := keyList(in.MapKeys()) - sort.Sort(keys) - for _, k := range keys { - e.marshal("", k) - e.marshal("", in.MapIndex(k)) - } - }) -} - -func (e *encoder) fieldByIndex(v reflect.Value, index []int) (field reflect.Value) { - for _, num := range index { - for { - if v.Kind() == reflect.Ptr { - if v.IsNil() { - return reflect.Value{} - } - v = v.Elem() - continue - } - break - } - v = v.Field(num) - } - return v -} - -func (e *encoder) structv(tag string, in reflect.Value) { - sinfo, err := getStructInfo(in.Type()) - if err != nil { - panic(err) - } - e.mappingv(tag, func() { - for _, info := range sinfo.FieldsList { - var value reflect.Value - if info.Inline == nil { - value = in.Field(info.Num) - } else { - value = e.fieldByIndex(in, info.Inline) - if !value.IsValid() { - continue - } - } - if info.OmitEmpty && isZero(value) { - continue - } - e.marshal("", reflect.ValueOf(info.Key)) - e.flow = info.Flow - e.marshal("", value) - } - if sinfo.InlineMap >= 0 { - m := in.Field(sinfo.InlineMap) - if m.Len() > 0 { - e.flow = false - keys := keyList(m.MapKeys()) - sort.Sort(keys) - for _, k := range keys { - if _, found := sinfo.FieldsMap[k.String()]; found { - panic(fmt.Sprintf("cannot have key %q in inlined map: conflicts with struct field", k.String())) - } - e.marshal("", k) - e.flow = false - e.marshal("", m.MapIndex(k)) - } - } - } - }) -} - -func (e *encoder) mappingv(tag string, f func()) { - implicit := tag == "" - style := yaml_BLOCK_MAPPING_STYLE - if e.flow { - e.flow = false - style = yaml_FLOW_MAPPING_STYLE - } - yaml_mapping_start_event_initialize(&e.event, nil, []byte(tag), implicit, style) - e.emit() - f() - yaml_mapping_end_event_initialize(&e.event) - e.emit() -} - -func (e *encoder) slicev(tag string, in reflect.Value) { - implicit := tag == "" - style := yaml_BLOCK_SEQUENCE_STYLE - if e.flow { - e.flow = false - style = yaml_FLOW_SEQUENCE_STYLE - } - e.must(yaml_sequence_start_event_initialize(&e.event, nil, []byte(tag), implicit, style)) - e.emit() - n := in.Len() - for i := 0; i < n; i++ { - e.marshal("", in.Index(i)) - } - e.must(yaml_sequence_end_event_initialize(&e.event)) - e.emit() -} - -// isBase60 returns whether s is in base 60 notation as defined in YAML 1.1. -// -// The base 60 float notation in YAML 1.1 is a terrible idea and is unsupported -// in YAML 1.2 and by this package, but these should be marshalled quoted for -// the time being for compatibility with other parsers. -func isBase60Float(s string) (result bool) { - // Fast path. - if s == "" { - return false - } - c := s[0] - if !(c == '+' || c == '-' || c >= '0' && c <= '9') || strings.IndexByte(s, ':') < 0 { - return false - } - // Do the full match. - return base60float.MatchString(s) -} - -// From http://yaml.org/type/float.html, except the regular expression there -// is bogus. In practice parsers do not enforce the "\.[0-9_]*" suffix. -var base60float = regexp.MustCompile(`^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+(?:\.[0-9_]*)?$`) - -// isOldBool returns whether s is bool notation as defined in YAML 1.1. -// -// We continue to force strings that YAML 1.1 would interpret as booleans to be -// rendered as quotes strings so that the marshalled output valid for YAML 1.1 -// parsing. -func isOldBool(s string) (result bool) { - switch s { - case "y", "Y", "yes", "Yes", "YES", "on", "On", "ON", - "n", "N", "no", "No", "NO", "off", "Off", "OFF": - return true - default: - return false - } -} - -func (e *encoder) stringv(tag string, in reflect.Value) { - var style yaml_scalar_style_t - s := in.String() - canUsePlain := true - switch { - case !utf8.ValidString(s): - if tag == binaryTag { - failf("explicitly tagged !!binary data must be base64-encoded") - } - if tag != "" { - failf("cannot marshal invalid UTF-8 data as %s", shortTag(tag)) - } - // It can't be encoded directly as YAML so use a binary tag - // and encode it as base64. - tag = binaryTag - s = encodeBase64(s) - case tag == "": - // Check to see if it would resolve to a specific - // tag when encoded unquoted. If it doesn't, - // there's no need to quote it. - rtag, _ := resolve("", s) - canUsePlain = rtag == strTag && !(isBase60Float(s) || isOldBool(s)) - } - // Note: it's possible for user code to emit invalid YAML - // if they explicitly specify a tag and a string containing - // text that's incompatible with that tag. - switch { - case strings.Contains(s, "\n"): - if e.flow { - style = yaml_DOUBLE_QUOTED_SCALAR_STYLE - } else { - style = yaml_LITERAL_SCALAR_STYLE - } - case canUsePlain: - style = yaml_PLAIN_SCALAR_STYLE - default: - style = yaml_DOUBLE_QUOTED_SCALAR_STYLE - } - e.emitScalar(s, "", tag, style, nil, nil, nil, nil) -} - -func (e *encoder) boolv(tag string, in reflect.Value) { - var s string - if in.Bool() { - s = "true" - } else { - s = "false" - } - e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE, nil, nil, nil, nil) -} - -func (e *encoder) intv(tag string, in reflect.Value) { - s := strconv.FormatInt(in.Int(), 10) - e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE, nil, nil, nil, nil) -} - -func (e *encoder) uintv(tag string, in reflect.Value) { - s := strconv.FormatUint(in.Uint(), 10) - e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE, nil, nil, nil, nil) -} - -func (e *encoder) timev(tag string, in reflect.Value) { - t := in.Interface().(time.Time) - s := t.Format(time.RFC3339Nano) - e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE, nil, nil, nil, nil) -} - -func (e *encoder) floatv(tag string, in reflect.Value) { - // Issue #352: When formatting, use the precision of the underlying value - precision := 64 - if in.Kind() == reflect.Float32 { - precision = 32 - } - - s := strconv.FormatFloat(in.Float(), 'g', -1, precision) - switch s { - case "+Inf": - s = ".inf" - case "-Inf": - s = "-.inf" - case "NaN": - s = ".nan" - } - e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE, nil, nil, nil, nil) -} - -func (e *encoder) nilv() { - e.emitScalar("null", "", "", yaml_PLAIN_SCALAR_STYLE, nil, nil, nil, nil) -} - -func (e *encoder) emitScalar(value, anchor, tag string, style yaml_scalar_style_t, head, line, foot, tail []byte) { - // TODO Kill this function. Replace all initialize calls by their underlining Go literals. - implicit := tag == "" - if !implicit { - tag = longTag(tag) - } - e.must(yaml_scalar_event_initialize(&e.event, []byte(anchor), []byte(tag), []byte(value), implicit, implicit, style)) - e.event.head_comment = head - e.event.line_comment = line - e.event.foot_comment = foot - e.event.tail_comment = tail - e.emit() -} - -func (e *encoder) nodev(in reflect.Value) { - e.node(in.Interface().(*Node), "") -} - -func (e *encoder) node(node *Node, tail string) { - // Zero nodes behave as nil. - if node.Kind == 0 && node.IsZero() { - e.nilv() - return - } - - // If the tag was not explicitly requested, and dropping it won't change the - // implicit tag of the value, don't include it in the presentation. - var tag = node.Tag - var stag = shortTag(tag) - var forceQuoting bool - if tag != "" && node.Style&TaggedStyle == 0 { - if node.Kind == ScalarNode { - if stag == strTag && node.Style&(SingleQuotedStyle|DoubleQuotedStyle|LiteralStyle|FoldedStyle) != 0 { - tag = "" - } else { - rtag, _ := resolve("", node.Value) - if rtag == stag { - tag = "" - } else if stag == strTag { - tag = "" - forceQuoting = true - } - } - } else { - var rtag string - switch node.Kind { - case MappingNode: - rtag = mapTag - case SequenceNode: - rtag = seqTag - } - if rtag == stag { - tag = "" - } - } - } - - switch node.Kind { - case DocumentNode: - yaml_document_start_event_initialize(&e.event, nil, nil, true) - e.event.head_comment = []byte(node.HeadComment) - e.emit() - for _, node := range node.Content { - e.node(node, "") - } - yaml_document_end_event_initialize(&e.event, true) - e.event.foot_comment = []byte(node.FootComment) - e.emit() - - case SequenceNode: - style := yaml_BLOCK_SEQUENCE_STYLE - if node.Style&FlowStyle != 0 { - style = yaml_FLOW_SEQUENCE_STYLE - } - e.must(yaml_sequence_start_event_initialize(&e.event, []byte(node.Anchor), []byte(longTag(tag)), tag == "", style)) - e.event.head_comment = []byte(node.HeadComment) - e.emit() - for _, node := range node.Content { - e.node(node, "") - } - e.must(yaml_sequence_end_event_initialize(&e.event)) - e.event.line_comment = []byte(node.LineComment) - e.event.foot_comment = []byte(node.FootComment) - e.emit() - - case MappingNode: - style := yaml_BLOCK_MAPPING_STYLE - if node.Style&FlowStyle != 0 { - style = yaml_FLOW_MAPPING_STYLE - } - yaml_mapping_start_event_initialize(&e.event, []byte(node.Anchor), []byte(longTag(tag)), tag == "", style) - e.event.tail_comment = []byte(tail) - e.event.head_comment = []byte(node.HeadComment) - e.emit() - - // The tail logic below moves the foot comment of prior keys to the following key, - // since the value for each key may be a nested structure and the foot needs to be - // processed only the entirety of the value is streamed. The last tail is processed - // with the mapping end event. - var tail string - for i := 0; i+1 < len(node.Content); i += 2 { - k := node.Content[i] - foot := k.FootComment - if foot != "" { - kopy := *k - kopy.FootComment = "" - k = &kopy - } - e.node(k, tail) - tail = foot - - v := node.Content[i+1] - e.node(v, "") - } - - yaml_mapping_end_event_initialize(&e.event) - e.event.tail_comment = []byte(tail) - e.event.line_comment = []byte(node.LineComment) - e.event.foot_comment = []byte(node.FootComment) - e.emit() - - case AliasNode: - yaml_alias_event_initialize(&e.event, []byte(node.Value)) - e.event.head_comment = []byte(node.HeadComment) - e.event.line_comment = []byte(node.LineComment) - e.event.foot_comment = []byte(node.FootComment) - e.emit() - - case ScalarNode: - value := node.Value - if !utf8.ValidString(value) { - if stag == binaryTag { - failf("explicitly tagged !!binary data must be base64-encoded") - } - if stag != "" { - failf("cannot marshal invalid UTF-8 data as %s", stag) - } - // It can't be encoded directly as YAML so use a binary tag - // and encode it as base64. - tag = binaryTag - value = encodeBase64(value) - } - - style := yaml_PLAIN_SCALAR_STYLE - switch { - case node.Style&DoubleQuotedStyle != 0: - style = yaml_DOUBLE_QUOTED_SCALAR_STYLE - case node.Style&SingleQuotedStyle != 0: - style = yaml_SINGLE_QUOTED_SCALAR_STYLE - case node.Style&LiteralStyle != 0: - style = yaml_LITERAL_SCALAR_STYLE - case node.Style&FoldedStyle != 0: - style = yaml_FOLDED_SCALAR_STYLE - case strings.Contains(value, "\n"): - style = yaml_LITERAL_SCALAR_STYLE - case forceQuoting: - style = yaml_DOUBLE_QUOTED_SCALAR_STYLE - } - - e.emitScalar(value, node.Anchor, tag, style, []byte(node.HeadComment), []byte(node.LineComment), []byte(node.FootComment), []byte(tail)) - default: - failf("cannot encode node with unknown kind %d", node.Kind) - } -} diff --git a/cli/internal/yaml/parserc.go b/cli/internal/yaml/parserc.go deleted file mode 100644 index 25fe823637abc..0000000000000 --- a/cli/internal/yaml/parserc.go +++ /dev/null @@ -1,1274 +0,0 @@ -// -// Copyright (c) 2011-2019 Canonical Ltd -// Copyright (c) 2006-2010 Kirill Simonov -// -// Permission is hereby granted, free of charge, to any person obtaining a copy of -// this software and associated documentation files (the "Software"), to deal in -// the Software without restriction, including without limitation the rights to -// use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -// of the Software, and to permit persons to whom the Software is furnished to do -// so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in all -// copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -// SOFTWARE. - -package yaml - -import ( - "bytes" -) - -// The parser implements the following grammar: -// -// stream ::= STREAM-START implicit_document? explicit_document* STREAM-END -// implicit_document ::= block_node DOCUMENT-END* -// explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* -// block_node_or_indentless_sequence ::= -// ALIAS -// | properties (block_content | indentless_block_sequence)? -// | block_content -// | indentless_block_sequence -// block_node ::= ALIAS -// | properties block_content? -// | block_content -// flow_node ::= ALIAS -// | properties flow_content? -// | flow_content -// properties ::= TAG ANCHOR? | ANCHOR TAG? -// block_content ::= block_collection | flow_collection | SCALAR -// flow_content ::= flow_collection | SCALAR -// block_collection ::= block_sequence | block_mapping -// flow_collection ::= flow_sequence | flow_mapping -// block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END -// indentless_sequence ::= (BLOCK-ENTRY block_node?)+ -// block_mapping ::= BLOCK-MAPPING_START -// ((KEY block_node_or_indentless_sequence?)? -// (VALUE block_node_or_indentless_sequence?)?)* -// BLOCK-END -// flow_sequence ::= FLOW-SEQUENCE-START -// (flow_sequence_entry FLOW-ENTRY)* -// flow_sequence_entry? -// FLOW-SEQUENCE-END -// flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? -// flow_mapping ::= FLOW-MAPPING-START -// (flow_mapping_entry FLOW-ENTRY)* -// flow_mapping_entry? -// FLOW-MAPPING-END -// flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? - -// Peek the next token in the token queue. -func peek_token(parser *yaml_parser_t) *yaml_token_t { - if parser.token_available || yaml_parser_fetch_more_tokens(parser) { - token := &parser.tokens[parser.tokens_head] - yaml_parser_unfold_comments(parser, token) - return token - } - return nil -} - -// yaml_parser_unfold_comments walks through the comments queue and joins all -// comments behind the position of the provided token into the respective -// top-level comment slices in the parser. -func yaml_parser_unfold_comments(parser *yaml_parser_t, token *yaml_token_t) { - for parser.comments_head < len(parser.comments) && token.start_mark.index >= parser.comments[parser.comments_head].token_mark.index { - comment := &parser.comments[parser.comments_head] - if len(comment.head) > 0 { - if token.typ == yaml_BLOCK_END_TOKEN { - // No heads on ends, so keep comment.head for a follow up token. - break - } - if len(parser.head_comment) > 0 { - parser.head_comment = append(parser.head_comment, '\n') - } - parser.head_comment = append(parser.head_comment, comment.head...) - } - if len(comment.foot) > 0 { - if len(parser.foot_comment) > 0 { - parser.foot_comment = append(parser.foot_comment, '\n') - } - parser.foot_comment = append(parser.foot_comment, comment.foot...) - } - if len(comment.line) > 0 { - if len(parser.line_comment) > 0 { - parser.line_comment = append(parser.line_comment, '\n') - } - parser.line_comment = append(parser.line_comment, comment.line...) - } - *comment = yaml_comment_t{} - parser.comments_head++ - } -} - -// Remove the next token from the queue (must be called after peek_token). -func skip_token(parser *yaml_parser_t) { - parser.token_available = false - parser.tokens_parsed++ - parser.stream_end_produced = parser.tokens[parser.tokens_head].typ == yaml_STREAM_END_TOKEN - parser.tokens_head++ -} - -// Get the next event. -func yaml_parser_parse(parser *yaml_parser_t, event *yaml_event_t) bool { - // Erase the event object. - *event = yaml_event_t{} - - // No events after the end of the stream or error. - if parser.stream_end_produced || parser.error != yaml_NO_ERROR || parser.state == yaml_PARSE_END_STATE { - return true - } - - // Generate the next event. - return yaml_parser_state_machine(parser, event) -} - -// Set parser error. -func yaml_parser_set_parser_error(parser *yaml_parser_t, problem string, problem_mark yaml_mark_t) bool { - parser.error = yaml_PARSER_ERROR - parser.problem = problem - parser.problem_mark = problem_mark - return false -} - -func yaml_parser_set_parser_error_context(parser *yaml_parser_t, context string, context_mark yaml_mark_t, problem string, problem_mark yaml_mark_t) bool { - parser.error = yaml_PARSER_ERROR - parser.context = context - parser.context_mark = context_mark - parser.problem = problem - parser.problem_mark = problem_mark - return false -} - -// State dispatcher. -func yaml_parser_state_machine(parser *yaml_parser_t, event *yaml_event_t) bool { - //trace("yaml_parser_state_machine", "state:", parser.state.String()) - - switch parser.state { - case yaml_PARSE_STREAM_START_STATE: - return yaml_parser_parse_stream_start(parser, event) - - case yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE: - return yaml_parser_parse_document_start(parser, event, true) - - case yaml_PARSE_DOCUMENT_START_STATE: - return yaml_parser_parse_document_start(parser, event, false) - - case yaml_PARSE_DOCUMENT_CONTENT_STATE: - return yaml_parser_parse_document_content(parser, event) - - case yaml_PARSE_DOCUMENT_END_STATE: - return yaml_parser_parse_document_end(parser, event) - - case yaml_PARSE_BLOCK_NODE_STATE: - return yaml_parser_parse_node(parser, event, true, false) - - case yaml_PARSE_BLOCK_NODE_OR_INDENTLESS_SEQUENCE_STATE: - return yaml_parser_parse_node(parser, event, true, true) - - case yaml_PARSE_FLOW_NODE_STATE: - return yaml_parser_parse_node(parser, event, false, false) - - case yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE: - return yaml_parser_parse_block_sequence_entry(parser, event, true) - - case yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE: - return yaml_parser_parse_block_sequence_entry(parser, event, false) - - case yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE: - return yaml_parser_parse_indentless_sequence_entry(parser, event) - - case yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE: - return yaml_parser_parse_block_mapping_key(parser, event, true) - - case yaml_PARSE_BLOCK_MAPPING_KEY_STATE: - return yaml_parser_parse_block_mapping_key(parser, event, false) - - case yaml_PARSE_BLOCK_MAPPING_VALUE_STATE: - return yaml_parser_parse_block_mapping_value(parser, event) - - case yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE: - return yaml_parser_parse_flow_sequence_entry(parser, event, true) - - case yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE: - return yaml_parser_parse_flow_sequence_entry(parser, event, false) - - case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE: - return yaml_parser_parse_flow_sequence_entry_mapping_key(parser, event) - - case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE: - return yaml_parser_parse_flow_sequence_entry_mapping_value(parser, event) - - case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE: - return yaml_parser_parse_flow_sequence_entry_mapping_end(parser, event) - - case yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE: - return yaml_parser_parse_flow_mapping_key(parser, event, true) - - case yaml_PARSE_FLOW_MAPPING_KEY_STATE: - return yaml_parser_parse_flow_mapping_key(parser, event, false) - - case yaml_PARSE_FLOW_MAPPING_VALUE_STATE: - return yaml_parser_parse_flow_mapping_value(parser, event, false) - - case yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE: - return yaml_parser_parse_flow_mapping_value(parser, event, true) - - default: - panic("invalid parser state") - } -} - -// Parse the production: -// stream ::= STREAM-START implicit_document? explicit_document* STREAM-END -// -// ************ -func yaml_parser_parse_stream_start(parser *yaml_parser_t, event *yaml_event_t) bool { - token := peek_token(parser) - if token == nil { - return false - } - if token.typ != yaml_STREAM_START_TOKEN { - return yaml_parser_set_parser_error(parser, "did not find expected ", token.start_mark) - } - parser.state = yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE - *event = yaml_event_t{ - typ: yaml_STREAM_START_EVENT, - start_mark: token.start_mark, - end_mark: token.end_mark, - encoding: token.encoding, - } - skip_token(parser) - return true -} - -// Parse the productions: -// implicit_document ::= block_node DOCUMENT-END* -// -// * -// -// explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* -// -// ************************* -func yaml_parser_parse_document_start(parser *yaml_parser_t, event *yaml_event_t, implicit bool) bool { - - token := peek_token(parser) - if token == nil { - return false - } - - // Parse extra document end indicators. - if !implicit { - for token.typ == yaml_DOCUMENT_END_TOKEN { - skip_token(parser) - token = peek_token(parser) - if token == nil { - return false - } - } - } - - if implicit && token.typ != yaml_VERSION_DIRECTIVE_TOKEN && - token.typ != yaml_TAG_DIRECTIVE_TOKEN && - token.typ != yaml_DOCUMENT_START_TOKEN && - token.typ != yaml_STREAM_END_TOKEN { - // Parse an implicit document. - if !yaml_parser_process_directives(parser, nil, nil) { - return false - } - parser.states = append(parser.states, yaml_PARSE_DOCUMENT_END_STATE) - parser.state = yaml_PARSE_BLOCK_NODE_STATE - - var head_comment []byte - if len(parser.head_comment) > 0 { - // [Go] Scan the header comment backwards, and if an empty line is found, break - // the header so the part before the last empty line goes into the - // document header, while the bottom of it goes into a follow up event. - for i := len(parser.head_comment) - 1; i > 0; i-- { - if parser.head_comment[i] == '\n' { - if i == len(parser.head_comment)-1 { - head_comment = parser.head_comment[:i] - parser.head_comment = parser.head_comment[i+1:] - break - } else if parser.head_comment[i-1] == '\n' { - head_comment = parser.head_comment[:i-1] - parser.head_comment = parser.head_comment[i+1:] - break - } - } - } - } - - *event = yaml_event_t{ - typ: yaml_DOCUMENT_START_EVENT, - start_mark: token.start_mark, - end_mark: token.end_mark, - - head_comment: head_comment, - } - - } else if token.typ != yaml_STREAM_END_TOKEN { - // Parse an explicit document. - var version_directive *yaml_version_directive_t - var tag_directives []yaml_tag_directive_t - start_mark := token.start_mark - if !yaml_parser_process_directives(parser, &version_directive, &tag_directives) { - return false - } - token = peek_token(parser) - if token == nil { - return false - } - if token.typ != yaml_DOCUMENT_START_TOKEN { - yaml_parser_set_parser_error(parser, - "did not find expected ", token.start_mark) - return false - } - parser.states = append(parser.states, yaml_PARSE_DOCUMENT_END_STATE) - parser.state = yaml_PARSE_DOCUMENT_CONTENT_STATE - end_mark := token.end_mark - - *event = yaml_event_t{ - typ: yaml_DOCUMENT_START_EVENT, - start_mark: start_mark, - end_mark: end_mark, - version_directive: version_directive, - tag_directives: tag_directives, - implicit: false, - } - skip_token(parser) - - } else { - // Parse the stream end. - parser.state = yaml_PARSE_END_STATE - *event = yaml_event_t{ - typ: yaml_STREAM_END_EVENT, - start_mark: token.start_mark, - end_mark: token.end_mark, - } - skip_token(parser) - } - - return true -} - -// Parse the productions: -// explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* -// -// *********** -func yaml_parser_parse_document_content(parser *yaml_parser_t, event *yaml_event_t) bool { - token := peek_token(parser) - if token == nil { - return false - } - - if token.typ == yaml_VERSION_DIRECTIVE_TOKEN || - token.typ == yaml_TAG_DIRECTIVE_TOKEN || - token.typ == yaml_DOCUMENT_START_TOKEN || - token.typ == yaml_DOCUMENT_END_TOKEN || - token.typ == yaml_STREAM_END_TOKEN { - parser.state = parser.states[len(parser.states)-1] - parser.states = parser.states[:len(parser.states)-1] - return yaml_parser_process_empty_scalar(parser, event, - token.start_mark) - } - return yaml_parser_parse_node(parser, event, true, false) -} - -// Parse the productions: -// implicit_document ::= block_node DOCUMENT-END* -// -// ************* -// -// explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* -func yaml_parser_parse_document_end(parser *yaml_parser_t, event *yaml_event_t) bool { - token := peek_token(parser) - if token == nil { - return false - } - - start_mark := token.start_mark - end_mark := token.start_mark - - implicit := true - if token.typ == yaml_DOCUMENT_END_TOKEN { - end_mark = token.end_mark - skip_token(parser) - implicit = false - } - - parser.tag_directives = parser.tag_directives[:0] - - parser.state = yaml_PARSE_DOCUMENT_START_STATE - *event = yaml_event_t{ - typ: yaml_DOCUMENT_END_EVENT, - start_mark: start_mark, - end_mark: end_mark, - implicit: implicit, - } - yaml_parser_set_event_comments(parser, event) - if len(event.head_comment) > 0 && len(event.foot_comment) == 0 { - event.foot_comment = event.head_comment - event.head_comment = nil - } - return true -} - -func yaml_parser_set_event_comments(parser *yaml_parser_t, event *yaml_event_t) { - event.head_comment = parser.head_comment - event.line_comment = parser.line_comment - event.foot_comment = parser.foot_comment - parser.head_comment = nil - parser.line_comment = nil - parser.foot_comment = nil - parser.tail_comment = nil - parser.stem_comment = nil -} - -// Parse the productions: -// block_node_or_indentless_sequence ::= -// -// ALIAS -// ***** -// | properties (block_content | indentless_block_sequence)? -// ********** * -// | block_content | indentless_block_sequence -// * -// -// block_node ::= ALIAS -// -// ***** -// | properties block_content? -// ********** * -// | block_content -// * -// -// flow_node ::= ALIAS -// -// ***** -// | properties flow_content? -// ********** * -// | flow_content -// * -// -// properties ::= TAG ANCHOR? | ANCHOR TAG? -// -// ************************* -// -// block_content ::= block_collection | flow_collection | SCALAR -// -// ****** -// -// flow_content ::= flow_collection | SCALAR -// -// ****** -func yaml_parser_parse_node(parser *yaml_parser_t, event *yaml_event_t, block, indentless_sequence bool) bool { - //defer trace("yaml_parser_parse_node", "block:", block, "indentless_sequence:", indentless_sequence)() - - token := peek_token(parser) - if token == nil { - return false - } - - if token.typ == yaml_ALIAS_TOKEN { - parser.state = parser.states[len(parser.states)-1] - parser.states = parser.states[:len(parser.states)-1] - *event = yaml_event_t{ - typ: yaml_ALIAS_EVENT, - start_mark: token.start_mark, - end_mark: token.end_mark, - anchor: token.value, - } - yaml_parser_set_event_comments(parser, event) - skip_token(parser) - return true - } - - start_mark := token.start_mark - end_mark := token.start_mark - - var tag_token bool - var tag_handle, tag_suffix, anchor []byte - var tag_mark yaml_mark_t - if token.typ == yaml_ANCHOR_TOKEN { - anchor = token.value - start_mark = token.start_mark - end_mark = token.end_mark - skip_token(parser) - token = peek_token(parser) - if token == nil { - return false - } - if token.typ == yaml_TAG_TOKEN { - tag_token = true - tag_handle = token.value - tag_suffix = token.suffix - tag_mark = token.start_mark - end_mark = token.end_mark - skip_token(parser) - token = peek_token(parser) - if token == nil { - return false - } - } - } else if token.typ == yaml_TAG_TOKEN { - tag_token = true - tag_handle = token.value - tag_suffix = token.suffix - start_mark = token.start_mark - tag_mark = token.start_mark - end_mark = token.end_mark - skip_token(parser) - token = peek_token(parser) - if token == nil { - return false - } - if token.typ == yaml_ANCHOR_TOKEN { - anchor = token.value - end_mark = token.end_mark - skip_token(parser) - token = peek_token(parser) - if token == nil { - return false - } - } - } - - var tag []byte - if tag_token { - if len(tag_handle) == 0 { - tag = tag_suffix - tag_suffix = nil - } else { - for i := range parser.tag_directives { - if bytes.Equal(parser.tag_directives[i].handle, tag_handle) { - tag = append([]byte(nil), parser.tag_directives[i].prefix...) - tag = append(tag, tag_suffix...) - break - } - } - if len(tag) == 0 { - yaml_parser_set_parser_error_context(parser, - "while parsing a node", start_mark, - "found undefined tag handle", tag_mark) - return false - } - } - } - - implicit := len(tag) == 0 - if indentless_sequence && token.typ == yaml_BLOCK_ENTRY_TOKEN { - end_mark = token.end_mark - parser.state = yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE - *event = yaml_event_t{ - typ: yaml_SEQUENCE_START_EVENT, - start_mark: start_mark, - end_mark: end_mark, - anchor: anchor, - tag: tag, - implicit: implicit, - style: yaml_style_t(yaml_BLOCK_SEQUENCE_STYLE), - } - return true - } - if token.typ == yaml_SCALAR_TOKEN { - var plain_implicit, quoted_implicit bool - end_mark = token.end_mark - if (len(tag) == 0 && token.style == yaml_PLAIN_SCALAR_STYLE) || (len(tag) == 1 && tag[0] == '!') { - plain_implicit = true - } else if len(tag) == 0 { - quoted_implicit = true - } - parser.state = parser.states[len(parser.states)-1] - parser.states = parser.states[:len(parser.states)-1] - - *event = yaml_event_t{ - typ: yaml_SCALAR_EVENT, - start_mark: start_mark, - end_mark: end_mark, - anchor: anchor, - tag: tag, - value: token.value, - implicit: plain_implicit, - quoted_implicit: quoted_implicit, - style: yaml_style_t(token.style), - } - yaml_parser_set_event_comments(parser, event) - skip_token(parser) - return true - } - if token.typ == yaml_FLOW_SEQUENCE_START_TOKEN { - // [Go] Some of the events below can be merged as they differ only on style. - end_mark = token.end_mark - parser.state = yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE - *event = yaml_event_t{ - typ: yaml_SEQUENCE_START_EVENT, - start_mark: start_mark, - end_mark: end_mark, - anchor: anchor, - tag: tag, - implicit: implicit, - style: yaml_style_t(yaml_FLOW_SEQUENCE_STYLE), - } - yaml_parser_set_event_comments(parser, event) - return true - } - if token.typ == yaml_FLOW_MAPPING_START_TOKEN { - end_mark = token.end_mark - parser.state = yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE - *event = yaml_event_t{ - typ: yaml_MAPPING_START_EVENT, - start_mark: start_mark, - end_mark: end_mark, - anchor: anchor, - tag: tag, - implicit: implicit, - style: yaml_style_t(yaml_FLOW_MAPPING_STYLE), - } - yaml_parser_set_event_comments(parser, event) - return true - } - if block && token.typ == yaml_BLOCK_SEQUENCE_START_TOKEN { - end_mark = token.end_mark - parser.state = yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE - *event = yaml_event_t{ - typ: yaml_SEQUENCE_START_EVENT, - start_mark: start_mark, - end_mark: end_mark, - anchor: anchor, - tag: tag, - implicit: implicit, - style: yaml_style_t(yaml_BLOCK_SEQUENCE_STYLE), - } - if parser.stem_comment != nil { - event.head_comment = parser.stem_comment - parser.stem_comment = nil - } - return true - } - if block && token.typ == yaml_BLOCK_MAPPING_START_TOKEN { - end_mark = token.end_mark - parser.state = yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE - *event = yaml_event_t{ - typ: yaml_MAPPING_START_EVENT, - start_mark: start_mark, - end_mark: end_mark, - anchor: anchor, - tag: tag, - implicit: implicit, - style: yaml_style_t(yaml_BLOCK_MAPPING_STYLE), - } - if parser.stem_comment != nil { - event.head_comment = parser.stem_comment - parser.stem_comment = nil - } - return true - } - if len(anchor) > 0 || len(tag) > 0 { - parser.state = parser.states[len(parser.states)-1] - parser.states = parser.states[:len(parser.states)-1] - - *event = yaml_event_t{ - typ: yaml_SCALAR_EVENT, - start_mark: start_mark, - end_mark: end_mark, - anchor: anchor, - tag: tag, - implicit: implicit, - quoted_implicit: false, - style: yaml_style_t(yaml_PLAIN_SCALAR_STYLE), - } - return true - } - - context := "while parsing a flow node" - if block { - context = "while parsing a block node" - } - yaml_parser_set_parser_error_context(parser, context, start_mark, - "did not find expected node content", token.start_mark) - return false -} - -// Parse the productions: -// block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END -// -// ******************** *********** * ********* -func yaml_parser_parse_block_sequence_entry(parser *yaml_parser_t, event *yaml_event_t, first bool) bool { - if first { - token := peek_token(parser) - if token == nil { - return false - } - parser.marks = append(parser.marks, token.start_mark) - skip_token(parser) - } - - token := peek_token(parser) - if token == nil { - return false - } - - if token.typ == yaml_BLOCK_ENTRY_TOKEN { - mark := token.end_mark - prior_head_len := len(parser.head_comment) - skip_token(parser) - yaml_parser_split_stem_comment(parser, prior_head_len) - token = peek_token(parser) - if token == nil { - return false - } - if token.typ != yaml_BLOCK_ENTRY_TOKEN && token.typ != yaml_BLOCK_END_TOKEN { - parser.states = append(parser.states, yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE) - return yaml_parser_parse_node(parser, event, true, false) - } else { - parser.state = yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE - return yaml_parser_process_empty_scalar(parser, event, mark) - } - } - if token.typ == yaml_BLOCK_END_TOKEN { - parser.state = parser.states[len(parser.states)-1] - parser.states = parser.states[:len(parser.states)-1] - parser.marks = parser.marks[:len(parser.marks)-1] - - *event = yaml_event_t{ - typ: yaml_SEQUENCE_END_EVENT, - start_mark: token.start_mark, - end_mark: token.end_mark, - } - - skip_token(parser) - return true - } - - context_mark := parser.marks[len(parser.marks)-1] - parser.marks = parser.marks[:len(parser.marks)-1] - return yaml_parser_set_parser_error_context(parser, - "while parsing a block collection", context_mark, - "did not find expected '-' indicator", token.start_mark) -} - -// Parse the productions: -// indentless_sequence ::= (BLOCK-ENTRY block_node?)+ -// -// *********** * -func yaml_parser_parse_indentless_sequence_entry(parser *yaml_parser_t, event *yaml_event_t) bool { - token := peek_token(parser) - if token == nil { - return false - } - - if token.typ == yaml_BLOCK_ENTRY_TOKEN { - mark := token.end_mark - prior_head_len := len(parser.head_comment) - skip_token(parser) - yaml_parser_split_stem_comment(parser, prior_head_len) - token = peek_token(parser) - if token == nil { - return false - } - if token.typ != yaml_BLOCK_ENTRY_TOKEN && - token.typ != yaml_KEY_TOKEN && - token.typ != yaml_VALUE_TOKEN && - token.typ != yaml_BLOCK_END_TOKEN { - parser.states = append(parser.states, yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE) - return yaml_parser_parse_node(parser, event, true, false) - } - parser.state = yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE - return yaml_parser_process_empty_scalar(parser, event, mark) - } - parser.state = parser.states[len(parser.states)-1] - parser.states = parser.states[:len(parser.states)-1] - - *event = yaml_event_t{ - typ: yaml_SEQUENCE_END_EVENT, - start_mark: token.start_mark, - end_mark: token.start_mark, // [Go] Shouldn't this be token.end_mark? - } - return true -} - -// Split stem comment from head comment. -// -// When a sequence or map is found under a sequence entry, the former head comment -// is assigned to the underlying sequence or map as a whole, not the individual -// sequence or map entry as would be expected otherwise. To handle this case the -// previous head comment is moved aside as the stem comment. -func yaml_parser_split_stem_comment(parser *yaml_parser_t, stem_len int) { - if stem_len == 0 { - return - } - - token := peek_token(parser) - if token == nil || token.typ != yaml_BLOCK_SEQUENCE_START_TOKEN && token.typ != yaml_BLOCK_MAPPING_START_TOKEN { - return - } - - parser.stem_comment = parser.head_comment[:stem_len] - if len(parser.head_comment) == stem_len { - parser.head_comment = nil - } else { - // Copy suffix to prevent very strange bugs if someone ever appends - // further bytes to the prefix in the stem_comment slice above. - parser.head_comment = append([]byte(nil), parser.head_comment[stem_len+1:]...) - } -} - -// Parse the productions: -// block_mapping ::= BLOCK-MAPPING_START -// -// ******************* -// ((KEY block_node_or_indentless_sequence?)? -// *** * -// (VALUE block_node_or_indentless_sequence?)?)* -// -// BLOCK-END -// ********* -func yaml_parser_parse_block_mapping_key(parser *yaml_parser_t, event *yaml_event_t, first bool) bool { - if first { - token := peek_token(parser) - if token == nil { - return false - } - parser.marks = append(parser.marks, token.start_mark) - skip_token(parser) - } - - token := peek_token(parser) - if token == nil { - return false - } - - // [Go] A tail comment was left from the prior mapping value processed. Emit an event - // as it needs to be processed with that value and not the following key. - if len(parser.tail_comment) > 0 { - *event = yaml_event_t{ - typ: yaml_TAIL_COMMENT_EVENT, - start_mark: token.start_mark, - end_mark: token.end_mark, - foot_comment: parser.tail_comment, - } - parser.tail_comment = nil - return true - } - - if token.typ == yaml_KEY_TOKEN { - mark := token.end_mark - skip_token(parser) - token = peek_token(parser) - if token == nil { - return false - } - if token.typ != yaml_KEY_TOKEN && - token.typ != yaml_VALUE_TOKEN && - token.typ != yaml_BLOCK_END_TOKEN { - parser.states = append(parser.states, yaml_PARSE_BLOCK_MAPPING_VALUE_STATE) - return yaml_parser_parse_node(parser, event, true, true) - } else { - parser.state = yaml_PARSE_BLOCK_MAPPING_VALUE_STATE - return yaml_parser_process_empty_scalar(parser, event, mark) - } - } else if token.typ == yaml_BLOCK_END_TOKEN { - parser.state = parser.states[len(parser.states)-1] - parser.states = parser.states[:len(parser.states)-1] - parser.marks = parser.marks[:len(parser.marks)-1] - *event = yaml_event_t{ - typ: yaml_MAPPING_END_EVENT, - start_mark: token.start_mark, - end_mark: token.end_mark, - } - yaml_parser_set_event_comments(parser, event) - skip_token(parser) - return true - } - - context_mark := parser.marks[len(parser.marks)-1] - parser.marks = parser.marks[:len(parser.marks)-1] - return yaml_parser_set_parser_error_context(parser, - "while parsing a block mapping", context_mark, - "did not find expected key", token.start_mark) -} - -// Parse the productions: -// block_mapping ::= BLOCK-MAPPING_START -// -// ((KEY block_node_or_indentless_sequence?)? -// -// (VALUE block_node_or_indentless_sequence?)?)* -// ***** * -// BLOCK-END -func yaml_parser_parse_block_mapping_value(parser *yaml_parser_t, event *yaml_event_t) bool { - token := peek_token(parser) - if token == nil { - return false - } - if token.typ == yaml_VALUE_TOKEN { - mark := token.end_mark - skip_token(parser) - token = peek_token(parser) - if token == nil { - return false - } - if token.typ != yaml_KEY_TOKEN && - token.typ != yaml_VALUE_TOKEN && - token.typ != yaml_BLOCK_END_TOKEN { - parser.states = append(parser.states, yaml_PARSE_BLOCK_MAPPING_KEY_STATE) - return yaml_parser_parse_node(parser, event, true, true) - } - parser.state = yaml_PARSE_BLOCK_MAPPING_KEY_STATE - return yaml_parser_process_empty_scalar(parser, event, mark) - } - parser.state = yaml_PARSE_BLOCK_MAPPING_KEY_STATE - return yaml_parser_process_empty_scalar(parser, event, token.start_mark) -} - -// Parse the productions: -// flow_sequence ::= FLOW-SEQUENCE-START -// -// ******************* -// (flow_sequence_entry FLOW-ENTRY)* -// * ********** -// flow_sequence_entry? -// * -// FLOW-SEQUENCE-END -// ***************** -// -// flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? -// -// * -func yaml_parser_parse_flow_sequence_entry(parser *yaml_parser_t, event *yaml_event_t, first bool) bool { - if first { - token := peek_token(parser) - if token == nil { - return false - } - parser.marks = append(parser.marks, token.start_mark) - skip_token(parser) - } - token := peek_token(parser) - if token == nil { - return false - } - if token.typ != yaml_FLOW_SEQUENCE_END_TOKEN { - if !first { - if token.typ == yaml_FLOW_ENTRY_TOKEN { - skip_token(parser) - token = peek_token(parser) - if token == nil { - return false - } - } else { - context_mark := parser.marks[len(parser.marks)-1] - parser.marks = parser.marks[:len(parser.marks)-1] - return yaml_parser_set_parser_error_context(parser, - "while parsing a flow sequence", context_mark, - "did not find expected ',' or ']'", token.start_mark) - } - } - - if token.typ == yaml_KEY_TOKEN { - parser.state = yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE - *event = yaml_event_t{ - typ: yaml_MAPPING_START_EVENT, - start_mark: token.start_mark, - end_mark: token.end_mark, - implicit: true, - style: yaml_style_t(yaml_FLOW_MAPPING_STYLE), - } - skip_token(parser) - return true - } else if token.typ != yaml_FLOW_SEQUENCE_END_TOKEN { - parser.states = append(parser.states, yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE) - return yaml_parser_parse_node(parser, event, false, false) - } - } - - parser.state = parser.states[len(parser.states)-1] - parser.states = parser.states[:len(parser.states)-1] - parser.marks = parser.marks[:len(parser.marks)-1] - - *event = yaml_event_t{ - typ: yaml_SEQUENCE_END_EVENT, - start_mark: token.start_mark, - end_mark: token.end_mark, - } - yaml_parser_set_event_comments(parser, event) - - skip_token(parser) - return true -} - -// Parse the productions: -// flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? -// -// *** * -func yaml_parser_parse_flow_sequence_entry_mapping_key(parser *yaml_parser_t, event *yaml_event_t) bool { - token := peek_token(parser) - if token == nil { - return false - } - if token.typ != yaml_VALUE_TOKEN && - token.typ != yaml_FLOW_ENTRY_TOKEN && - token.typ != yaml_FLOW_SEQUENCE_END_TOKEN { - parser.states = append(parser.states, yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE) - return yaml_parser_parse_node(parser, event, false, false) - } - mark := token.end_mark - skip_token(parser) - parser.state = yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE - return yaml_parser_process_empty_scalar(parser, event, mark) -} - -// Parse the productions: -// flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? -// -// ***** * -func yaml_parser_parse_flow_sequence_entry_mapping_value(parser *yaml_parser_t, event *yaml_event_t) bool { - token := peek_token(parser) - if token == nil { - return false - } - if token.typ == yaml_VALUE_TOKEN { - skip_token(parser) - token := peek_token(parser) - if token == nil { - return false - } - if token.typ != yaml_FLOW_ENTRY_TOKEN && token.typ != yaml_FLOW_SEQUENCE_END_TOKEN { - parser.states = append(parser.states, yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE) - return yaml_parser_parse_node(parser, event, false, false) - } - } - parser.state = yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE - return yaml_parser_process_empty_scalar(parser, event, token.start_mark) -} - -// Parse the productions: -// flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? -// -// * -func yaml_parser_parse_flow_sequence_entry_mapping_end(parser *yaml_parser_t, event *yaml_event_t) bool { - token := peek_token(parser) - if token == nil { - return false - } - parser.state = yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE - *event = yaml_event_t{ - typ: yaml_MAPPING_END_EVENT, - start_mark: token.start_mark, - end_mark: token.start_mark, // [Go] Shouldn't this be end_mark? - } - return true -} - -// Parse the productions: -// flow_mapping ::= FLOW-MAPPING-START -// -// ****************** -// (flow_mapping_entry FLOW-ENTRY)* -// * ********** -// flow_mapping_entry? -// ****************** -// FLOW-MAPPING-END -// **************** -// -// flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? -// - *** * -func yaml_parser_parse_flow_mapping_key(parser *yaml_parser_t, event *yaml_event_t, first bool) bool { - if first { - token := peek_token(parser) - parser.marks = append(parser.marks, token.start_mark) - skip_token(parser) - } - - token := peek_token(parser) - if token == nil { - return false - } - - if token.typ != yaml_FLOW_MAPPING_END_TOKEN { - if !first { - if token.typ == yaml_FLOW_ENTRY_TOKEN { - skip_token(parser) - token = peek_token(parser) - if token == nil { - return false - } - } else { - context_mark := parser.marks[len(parser.marks)-1] - parser.marks = parser.marks[:len(parser.marks)-1] - return yaml_parser_set_parser_error_context(parser, - "while parsing a flow mapping", context_mark, - "did not find expected ',' or '}'", token.start_mark) - } - } - - if token.typ == yaml_KEY_TOKEN { - skip_token(parser) - token = peek_token(parser) - if token == nil { - return false - } - if token.typ != yaml_VALUE_TOKEN && - token.typ != yaml_FLOW_ENTRY_TOKEN && - token.typ != yaml_FLOW_MAPPING_END_TOKEN { - parser.states = append(parser.states, yaml_PARSE_FLOW_MAPPING_VALUE_STATE) - return yaml_parser_parse_node(parser, event, false, false) - } else { - parser.state = yaml_PARSE_FLOW_MAPPING_VALUE_STATE - return yaml_parser_process_empty_scalar(parser, event, token.start_mark) - } - } else if token.typ != yaml_FLOW_MAPPING_END_TOKEN { - parser.states = append(parser.states, yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE) - return yaml_parser_parse_node(parser, event, false, false) - } - } - - parser.state = parser.states[len(parser.states)-1] - parser.states = parser.states[:len(parser.states)-1] - parser.marks = parser.marks[:len(parser.marks)-1] - *event = yaml_event_t{ - typ: yaml_MAPPING_END_EVENT, - start_mark: token.start_mark, - end_mark: token.end_mark, - } - yaml_parser_set_event_comments(parser, event) - skip_token(parser) - return true -} - -// Parse the productions: -// flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? -// - ***** * -func yaml_parser_parse_flow_mapping_value(parser *yaml_parser_t, event *yaml_event_t, empty bool) bool { - token := peek_token(parser) - if token == nil { - return false - } - if empty { - parser.state = yaml_PARSE_FLOW_MAPPING_KEY_STATE - return yaml_parser_process_empty_scalar(parser, event, token.start_mark) - } - if token.typ == yaml_VALUE_TOKEN { - skip_token(parser) - token = peek_token(parser) - if token == nil { - return false - } - if token.typ != yaml_FLOW_ENTRY_TOKEN && token.typ != yaml_FLOW_MAPPING_END_TOKEN { - parser.states = append(parser.states, yaml_PARSE_FLOW_MAPPING_KEY_STATE) - return yaml_parser_parse_node(parser, event, false, false) - } - } - parser.state = yaml_PARSE_FLOW_MAPPING_KEY_STATE - return yaml_parser_process_empty_scalar(parser, event, token.start_mark) -} - -// Generate an empty scalar event. -func yaml_parser_process_empty_scalar(parser *yaml_parser_t, event *yaml_event_t, mark yaml_mark_t) bool { - *event = yaml_event_t{ - typ: yaml_SCALAR_EVENT, - start_mark: mark, - end_mark: mark, - value: nil, // Empty - implicit: true, - style: yaml_style_t(yaml_PLAIN_SCALAR_STYLE), - } - return true -} - -var default_tag_directives = []yaml_tag_directive_t{ - {[]byte("!"), []byte("!")}, - {[]byte("!!"), []byte("tag:yaml.org,2002:")}, -} - -// Parse directives. -func yaml_parser_process_directives(parser *yaml_parser_t, - version_directive_ref **yaml_version_directive_t, - tag_directives_ref *[]yaml_tag_directive_t) bool { - - var version_directive *yaml_version_directive_t - var tag_directives []yaml_tag_directive_t - - token := peek_token(parser) - if token == nil { - return false - } - - for token.typ == yaml_VERSION_DIRECTIVE_TOKEN || token.typ == yaml_TAG_DIRECTIVE_TOKEN { - if token.typ == yaml_VERSION_DIRECTIVE_TOKEN { - if version_directive != nil { - yaml_parser_set_parser_error(parser, - "found duplicate %YAML directive", token.start_mark) - return false - } - if token.major != 1 || token.minor != 1 { - yaml_parser_set_parser_error(parser, - "found incompatible YAML document", token.start_mark) - return false - } - version_directive = &yaml_version_directive_t{ - major: token.major, - minor: token.minor, - } - } else if token.typ == yaml_TAG_DIRECTIVE_TOKEN { - value := yaml_tag_directive_t{ - handle: token.value, - prefix: token.prefix, - } - if !yaml_parser_append_tag_directive(parser, value, false, token.start_mark) { - return false - } - tag_directives = append(tag_directives, value) - } - - skip_token(parser) - token = peek_token(parser) - if token == nil { - return false - } - } - - for i := range default_tag_directives { - if !yaml_parser_append_tag_directive(parser, default_tag_directives[i], true, token.start_mark) { - return false - } - } - - if version_directive_ref != nil { - *version_directive_ref = version_directive - } - if tag_directives_ref != nil { - *tag_directives_ref = tag_directives - } - return true -} - -// Append a tag directive to the directives stack. -func yaml_parser_append_tag_directive(parser *yaml_parser_t, value yaml_tag_directive_t, allow_duplicates bool, mark yaml_mark_t) bool { - for i := range parser.tag_directives { - if bytes.Equal(value.handle, parser.tag_directives[i].handle) { - if allow_duplicates { - return true - } - return yaml_parser_set_parser_error(parser, "found duplicate %TAG directive", mark) - } - } - - // [Go] I suspect the copy is unnecessary. This was likely done - // because there was no way to track ownership of the data. - value_copy := yaml_tag_directive_t{ - handle: make([]byte, len(value.handle)), - prefix: make([]byte, len(value.prefix)), - } - copy(value_copy.handle, value.handle) - copy(value_copy.prefix, value.prefix) - parser.tag_directives = append(parser.tag_directives, value_copy) - return true -} diff --git a/cli/internal/yaml/readerc.go b/cli/internal/yaml/readerc.go deleted file mode 100644 index 56af245366f28..0000000000000 --- a/cli/internal/yaml/readerc.go +++ /dev/null @@ -1,434 +0,0 @@ -// -// Copyright (c) 2011-2019 Canonical Ltd -// Copyright (c) 2006-2010 Kirill Simonov -// -// Permission is hereby granted, free of charge, to any person obtaining a copy of -// this software and associated documentation files (the "Software"), to deal in -// the Software without restriction, including without limitation the rights to -// use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -// of the Software, and to permit persons to whom the Software is furnished to do -// so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in all -// copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -// SOFTWARE. - -package yaml - -import ( - "io" -) - -// Set the reader error and return 0. -func yaml_parser_set_reader_error(parser *yaml_parser_t, problem string, offset int, value int) bool { - parser.error = yaml_READER_ERROR - parser.problem = problem - parser.problem_offset = offset - parser.problem_value = value - return false -} - -// Byte order marks. -const ( - bom_UTF8 = "\xef\xbb\xbf" - bom_UTF16LE = "\xff\xfe" - bom_UTF16BE = "\xfe\xff" -) - -// Determine the input stream encoding by checking the BOM symbol. If no BOM is -// found, the UTF-8 encoding is assumed. Return 1 on success, 0 on failure. -func yaml_parser_determine_encoding(parser *yaml_parser_t) bool { - // Ensure that we had enough bytes in the raw buffer. - for !parser.eof && len(parser.raw_buffer)-parser.raw_buffer_pos < 3 { - if !yaml_parser_update_raw_buffer(parser) { - return false - } - } - - // Determine the encoding. - buf := parser.raw_buffer - pos := parser.raw_buffer_pos - avail := len(buf) - pos - if avail >= 2 && buf[pos] == bom_UTF16LE[0] && buf[pos+1] == bom_UTF16LE[1] { - parser.encoding = yaml_UTF16LE_ENCODING - parser.raw_buffer_pos += 2 - parser.offset += 2 - } else if avail >= 2 && buf[pos] == bom_UTF16BE[0] && buf[pos+1] == bom_UTF16BE[1] { - parser.encoding = yaml_UTF16BE_ENCODING - parser.raw_buffer_pos += 2 - parser.offset += 2 - } else if avail >= 3 && buf[pos] == bom_UTF8[0] && buf[pos+1] == bom_UTF8[1] && buf[pos+2] == bom_UTF8[2] { - parser.encoding = yaml_UTF8_ENCODING - parser.raw_buffer_pos += 3 - parser.offset += 3 - } else { - parser.encoding = yaml_UTF8_ENCODING - } - return true -} - -// Update the raw buffer. -func yaml_parser_update_raw_buffer(parser *yaml_parser_t) bool { - size_read := 0 - - // Return if the raw buffer is full. - if parser.raw_buffer_pos == 0 && len(parser.raw_buffer) == cap(parser.raw_buffer) { - return true - } - - // Return on EOF. - if parser.eof { - return true - } - - // Move the remaining bytes in the raw buffer to the beginning. - if parser.raw_buffer_pos > 0 && parser.raw_buffer_pos < len(parser.raw_buffer) { - copy(parser.raw_buffer, parser.raw_buffer[parser.raw_buffer_pos:]) - } - parser.raw_buffer = parser.raw_buffer[:len(parser.raw_buffer)-parser.raw_buffer_pos] - parser.raw_buffer_pos = 0 - - // Call the read handler to fill the buffer. - size_read, err := parser.read_handler(parser, parser.raw_buffer[len(parser.raw_buffer):cap(parser.raw_buffer)]) - parser.raw_buffer = parser.raw_buffer[:len(parser.raw_buffer)+size_read] - if err == io.EOF { - parser.eof = true - } else if err != nil { - return yaml_parser_set_reader_error(parser, "input error: "+err.Error(), parser.offset, -1) - } - return true -} - -// Ensure that the buffer contains at least `length` characters. -// Return true on success, false on failure. -// -// The length is supposed to be significantly less that the buffer size. -func yaml_parser_update_buffer(parser *yaml_parser_t, length int) bool { - if parser.read_handler == nil { - panic("read handler must be set") - } - - // [Go] This function was changed to guarantee the requested length size at EOF. - // The fact we need to do this is pretty awful, but the description above implies - // for that to be the case, and there are tests - - // If the EOF flag is set and the raw buffer is empty, do nothing. - if parser.eof && parser.raw_buffer_pos == len(parser.raw_buffer) { - // [Go] ACTUALLY! Read the documentation of this function above. - // This is just broken. To return true, we need to have the - // given length in the buffer. Not doing that means every single - // check that calls this function to make sure the buffer has a - // given length is Go) panicking; or C) accessing invalid memory. - //return true - } - - // Return if the buffer contains enough characters. - if parser.unread >= length { - return true - } - - // Determine the input encoding if it is not known yet. - if parser.encoding == yaml_ANY_ENCODING { - if !yaml_parser_determine_encoding(parser) { - return false - } - } - - // Move the unread characters to the beginning of the buffer. - buffer_len := len(parser.buffer) - if parser.buffer_pos > 0 && parser.buffer_pos < buffer_len { - copy(parser.buffer, parser.buffer[parser.buffer_pos:]) - buffer_len -= parser.buffer_pos - parser.buffer_pos = 0 - } else if parser.buffer_pos == buffer_len { - buffer_len = 0 - parser.buffer_pos = 0 - } - - // Open the whole buffer for writing, and cut it before returning. - parser.buffer = parser.buffer[:cap(parser.buffer)] - - // Fill the buffer until it has enough characters. - first := true - for parser.unread < length { - - // Fill the raw buffer if necessary. - if !first || parser.raw_buffer_pos == len(parser.raw_buffer) { - if !yaml_parser_update_raw_buffer(parser) { - parser.buffer = parser.buffer[:buffer_len] - return false - } - } - first = false - - // Decode the raw buffer. - inner: - for parser.raw_buffer_pos != len(parser.raw_buffer) { - var value rune - var width int - - raw_unread := len(parser.raw_buffer) - parser.raw_buffer_pos - - // Decode the next character. - switch parser.encoding { - case yaml_UTF8_ENCODING: - // Decode a UTF-8 character. Check RFC 3629 - // (http://www.ietf.org/rfc/rfc3629.txt) for more details. - // - // The following table (taken from the RFC) is used for - // decoding. - // - // Char. number range | UTF-8 octet sequence - // (hexadecimal) | (binary) - // --------------------+------------------------------------ - // 0000 0000-0000 007F | 0xxxxxxx - // 0000 0080-0000 07FF | 110xxxxx 10xxxxxx - // 0000 0800-0000 FFFF | 1110xxxx 10xxxxxx 10xxxxxx - // 0001 0000-0010 FFFF | 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx - // - // Additionally, the characters in the range 0xD800-0xDFFF - // are prohibited as they are reserved for use with UTF-16 - // surrogate pairs. - - // Determine the length of the UTF-8 sequence. - octet := parser.raw_buffer[parser.raw_buffer_pos] - switch { - case octet&0x80 == 0x00: - width = 1 - case octet&0xE0 == 0xC0: - width = 2 - case octet&0xF0 == 0xE0: - width = 3 - case octet&0xF8 == 0xF0: - width = 4 - default: - // The leading octet is invalid. - return yaml_parser_set_reader_error(parser, - "invalid leading UTF-8 octet", - parser.offset, int(octet)) - } - - // Check if the raw buffer contains an incomplete character. - if width > raw_unread { - if parser.eof { - return yaml_parser_set_reader_error(parser, - "incomplete UTF-8 octet sequence", - parser.offset, -1) - } - break inner - } - - // Decode the leading octet. - switch { - case octet&0x80 == 0x00: - value = rune(octet & 0x7F) - case octet&0xE0 == 0xC0: - value = rune(octet & 0x1F) - case octet&0xF0 == 0xE0: - value = rune(octet & 0x0F) - case octet&0xF8 == 0xF0: - value = rune(octet & 0x07) - default: - value = 0 - } - - // Check and decode the trailing octets. - for k := 1; k < width; k++ { - octet = parser.raw_buffer[parser.raw_buffer_pos+k] - - // Check if the octet is valid. - if (octet & 0xC0) != 0x80 { - return yaml_parser_set_reader_error(parser, - "invalid trailing UTF-8 octet", - parser.offset+k, int(octet)) - } - - // Decode the octet. - value = (value << 6) + rune(octet&0x3F) - } - - // Check the length of the sequence against the value. - switch { - case width == 1: - case width == 2 && value >= 0x80: - case width == 3 && value >= 0x800: - case width == 4 && value >= 0x10000: - default: - return yaml_parser_set_reader_error(parser, - "invalid length of a UTF-8 sequence", - parser.offset, -1) - } - - // Check the range of the value. - if value >= 0xD800 && value <= 0xDFFF || value > 0x10FFFF { - return yaml_parser_set_reader_error(parser, - "invalid Unicode character", - parser.offset, int(value)) - } - - case yaml_UTF16LE_ENCODING, yaml_UTF16BE_ENCODING: - var low, high int - if parser.encoding == yaml_UTF16LE_ENCODING { - low, high = 0, 1 - } else { - low, high = 1, 0 - } - - // The UTF-16 encoding is not as simple as one might - // naively think. Check RFC 2781 - // (http://www.ietf.org/rfc/rfc2781.txt). - // - // Normally, two subsequent bytes describe a Unicode - // character. However a special technique (called a - // surrogate pair) is used for specifying character - // values larger than 0xFFFF. - // - // A surrogate pair consists of two pseudo-characters: - // high surrogate area (0xD800-0xDBFF) - // low surrogate area (0xDC00-0xDFFF) - // - // The following formulas are used for decoding - // and encoding characters using surrogate pairs: - // - // U = U' + 0x10000 (0x01 00 00 <= U <= 0x10 FF FF) - // U' = yyyyyyyyyyxxxxxxxxxx (0 <= U' <= 0x0F FF FF) - // W1 = 110110yyyyyyyyyy - // W2 = 110111xxxxxxxxxx - // - // where U is the character value, W1 is the high surrogate - // area, W2 is the low surrogate area. - - // Check for incomplete UTF-16 character. - if raw_unread < 2 { - if parser.eof { - return yaml_parser_set_reader_error(parser, - "incomplete UTF-16 character", - parser.offset, -1) - } - break inner - } - - // Get the character. - value = rune(parser.raw_buffer[parser.raw_buffer_pos+low]) + - (rune(parser.raw_buffer[parser.raw_buffer_pos+high]) << 8) - - // Check for unexpected low surrogate area. - if value&0xFC00 == 0xDC00 { - return yaml_parser_set_reader_error(parser, - "unexpected low surrogate area", - parser.offset, int(value)) - } - - // Check for a high surrogate area. - if value&0xFC00 == 0xD800 { - width = 4 - - // Check for incomplete surrogate pair. - if raw_unread < 4 { - if parser.eof { - return yaml_parser_set_reader_error(parser, - "incomplete UTF-16 surrogate pair", - parser.offset, -1) - } - break inner - } - - // Get the next character. - value2 := rune(parser.raw_buffer[parser.raw_buffer_pos+low+2]) + - (rune(parser.raw_buffer[parser.raw_buffer_pos+high+2]) << 8) - - // Check for a low surrogate area. - if value2&0xFC00 != 0xDC00 { - return yaml_parser_set_reader_error(parser, - "expected low surrogate area", - parser.offset+2, int(value2)) - } - - // Generate the value of the surrogate pair. - value = 0x10000 + ((value & 0x3FF) << 10) + (value2 & 0x3FF) - } else { - width = 2 - } - - default: - panic("impossible") - } - - // Check if the character is in the allowed range: - // #x9 | #xA | #xD | [#x20-#x7E] (8 bit) - // | #x85 | [#xA0-#xD7FF] | [#xE000-#xFFFD] (16 bit) - // | [#x10000-#x10FFFF] (32 bit) - switch { - case value == 0x09: - case value == 0x0A: - case value == 0x0D: - case value >= 0x20 && value <= 0x7E: - case value == 0x85: - case value >= 0xA0 && value <= 0xD7FF: - case value >= 0xE000 && value <= 0xFFFD: - case value >= 0x10000 && value <= 0x10FFFF: - default: - return yaml_parser_set_reader_error(parser, - "control characters are not allowed", - parser.offset, int(value)) - } - - // Move the raw pointers. - parser.raw_buffer_pos += width - parser.offset += width - - // Finally put the character into the buffer. - if value <= 0x7F { - // 0000 0000-0000 007F . 0xxxxxxx - parser.buffer[buffer_len+0] = byte(value) - buffer_len += 1 - } else if value <= 0x7FF { - // 0000 0080-0000 07FF . 110xxxxx 10xxxxxx - parser.buffer[buffer_len+0] = byte(0xC0 + (value >> 6)) - parser.buffer[buffer_len+1] = byte(0x80 + (value & 0x3F)) - buffer_len += 2 - } else if value <= 0xFFFF { - // 0000 0800-0000 FFFF . 1110xxxx 10xxxxxx 10xxxxxx - parser.buffer[buffer_len+0] = byte(0xE0 + (value >> 12)) - parser.buffer[buffer_len+1] = byte(0x80 + ((value >> 6) & 0x3F)) - parser.buffer[buffer_len+2] = byte(0x80 + (value & 0x3F)) - buffer_len += 3 - } else { - // 0001 0000-0010 FFFF . 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx - parser.buffer[buffer_len+0] = byte(0xF0 + (value >> 18)) - parser.buffer[buffer_len+1] = byte(0x80 + ((value >> 12) & 0x3F)) - parser.buffer[buffer_len+2] = byte(0x80 + ((value >> 6) & 0x3F)) - parser.buffer[buffer_len+3] = byte(0x80 + (value & 0x3F)) - buffer_len += 4 - } - - parser.unread++ - } - - // On EOF, put NUL into the buffer and return. - if parser.eof { - parser.buffer[buffer_len] = 0 - buffer_len++ - parser.unread++ - break - } - } - // [Go] Read the documentation of this function above. To return true, - // we need to have the given length in the buffer. Not doing that means - // every single check that calls this function to make sure the buffer - // has a given length is Go) panicking; or C) accessing invalid memory. - // This happens here due to the EOF above breaking early. - for buffer_len < length { - parser.buffer[buffer_len] = 0 - buffer_len++ - } - parser.buffer = parser.buffer[:buffer_len] - return true -} diff --git a/cli/internal/yaml/resolve.go b/cli/internal/yaml/resolve.go deleted file mode 100644 index 64ae888057a5a..0000000000000 --- a/cli/internal/yaml/resolve.go +++ /dev/null @@ -1,326 +0,0 @@ -// -// Copyright (c) 2011-2019 Canonical Ltd -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package yaml - -import ( - "encoding/base64" - "math" - "regexp" - "strconv" - "strings" - "time" -) - -type resolveMapItem struct { - value interface{} - tag string -} - -var resolveTable = make([]byte, 256) -var resolveMap = make(map[string]resolveMapItem) - -func init() { - t := resolveTable - t[int('+')] = 'S' // Sign - t[int('-')] = 'S' - for _, c := range "0123456789" { - t[int(c)] = 'D' // Digit - } - for _, c := range "yYnNtTfFoO~" { - t[int(c)] = 'M' // In map - } - t[int('.')] = '.' // Float (potentially in map) - - var resolveMapList = []struct { - v interface{} - tag string - l []string - }{ - {true, boolTag, []string{"true", "True", "TRUE"}}, - {false, boolTag, []string{"false", "False", "FALSE"}}, - {nil, nullTag, []string{"", "~", "null", "Null", "NULL"}}, - {math.NaN(), floatTag, []string{".nan", ".NaN", ".NAN"}}, - {math.Inf(+1), floatTag, []string{".inf", ".Inf", ".INF"}}, - {math.Inf(+1), floatTag, []string{"+.inf", "+.Inf", "+.INF"}}, - {math.Inf(-1), floatTag, []string{"-.inf", "-.Inf", "-.INF"}}, - {"<<", mergeTag, []string{"<<"}}, - } - - m := resolveMap - for _, item := range resolveMapList { - for _, s := range item.l { - m[s] = resolveMapItem{item.v, item.tag} - } - } -} - -const ( - nullTag = "!!null" - boolTag = "!!bool" - strTag = "!!str" - intTag = "!!int" - floatTag = "!!float" - timestampTag = "!!timestamp" - seqTag = "!!seq" - mapTag = "!!map" - binaryTag = "!!binary" - mergeTag = "!!merge" -) - -var longTags = make(map[string]string) -var shortTags = make(map[string]string) - -func init() { - for _, stag := range []string{nullTag, boolTag, strTag, intTag, floatTag, timestampTag, seqTag, mapTag, binaryTag, mergeTag} { - ltag := longTag(stag) - longTags[stag] = ltag - shortTags[ltag] = stag - } -} - -const longTagPrefix = "tag:yaml.org,2002:" - -func shortTag(tag string) string { - if strings.HasPrefix(tag, longTagPrefix) { - if stag, ok := shortTags[tag]; ok { - return stag - } - return "!!" + tag[len(longTagPrefix):] - } - return tag -} - -func longTag(tag string) string { - if strings.HasPrefix(tag, "!!") { - if ltag, ok := longTags[tag]; ok { - return ltag - } - return longTagPrefix + tag[2:] - } - return tag -} - -func resolvableTag(tag string) bool { - switch tag { - case "", strTag, boolTag, intTag, floatTag, nullTag, timestampTag: - return true - } - return false -} - -var yamlStyleFloat = regexp.MustCompile(`^[-+]?(\.[0-9]+|[0-9]+(\.[0-9]*)?)([eE][-+]?[0-9]+)?$`) - -func resolve(tag string, in string) (rtag string, out interface{}) { - tag = shortTag(tag) - if !resolvableTag(tag) { - return tag, in - } - - defer func() { - switch tag { - case "", rtag, strTag, binaryTag: - return - case floatTag: - if rtag == intTag { - switch v := out.(type) { - case int64: - rtag = floatTag - out = float64(v) - return - case int: - rtag = floatTag - out = float64(v) - return - } - } - } - failf("cannot decode %s `%s` as a %s", shortTag(rtag), in, shortTag(tag)) - }() - - // Any data is accepted as a !!str or !!binary. - // Otherwise, the prefix is enough of a hint about what it might be. - hint := byte('N') - if in != "" { - hint = resolveTable[in[0]] - } - if hint != 0 && tag != strTag && tag != binaryTag { - // Handle things we can lookup in a map. - if item, ok := resolveMap[in]; ok { - return item.tag, item.value - } - - // Base 60 floats are a bad idea, were dropped in YAML 1.2, and - // are purposefully unsupported here. They're still quoted on - // the way out for compatibility with other parser, though. - - switch hint { - case 'M': - // We've already checked the map above. - - case '.': - // Not in the map, so maybe a normal float. - floatv, err := strconv.ParseFloat(in, 64) - if err == nil { - return floatTag, floatv - } - - case 'D', 'S': - // Int, float, or timestamp. - // Only try values as a timestamp if the value is unquoted or there's an explicit - // !!timestamp tag. - if tag == "" || tag == timestampTag { - t, ok := parseTimestamp(in) - if ok { - return timestampTag, t - } - } - - plain := strings.Replace(in, "_", "", -1) - intv, err := strconv.ParseInt(plain, 0, 64) - if err == nil { - if intv == int64(int(intv)) { - return intTag, int(intv) - } else { - return intTag, intv - } - } - uintv, err := strconv.ParseUint(plain, 0, 64) - if err == nil { - return intTag, uintv - } - if yamlStyleFloat.MatchString(plain) { - floatv, err := strconv.ParseFloat(plain, 64) - if err == nil { - return floatTag, floatv - } - } - if strings.HasPrefix(plain, "0b") { - intv, err := strconv.ParseInt(plain[2:], 2, 64) - if err == nil { - if intv == int64(int(intv)) { - return intTag, int(intv) - } else { - return intTag, intv - } - } - uintv, err := strconv.ParseUint(plain[2:], 2, 64) - if err == nil { - return intTag, uintv - } - } else if strings.HasPrefix(plain, "-0b") { - intv, err := strconv.ParseInt("-"+plain[3:], 2, 64) - if err == nil { - if true || intv == int64(int(intv)) { - return intTag, int(intv) - } else { - return intTag, intv - } - } - } - // Octals as introduced in version 1.2 of the spec. - // Octals from the 1.1 spec, spelled as 0777, are still - // decoded by default in v3 as well for compatibility. - // May be dropped in v4 depending on how usage evolves. - if strings.HasPrefix(plain, "0o") { - intv, err := strconv.ParseInt(plain[2:], 8, 64) - if err == nil { - if intv == int64(int(intv)) { - return intTag, int(intv) - } else { - return intTag, intv - } - } - uintv, err := strconv.ParseUint(plain[2:], 8, 64) - if err == nil { - return intTag, uintv - } - } else if strings.HasPrefix(plain, "-0o") { - intv, err := strconv.ParseInt("-"+plain[3:], 8, 64) - if err == nil { - if true || intv == int64(int(intv)) { - return intTag, int(intv) - } else { - return intTag, intv - } - } - } - default: - panic("internal error: missing handler for resolver table: " + string(rune(hint)) + " (with " + in + ")") - } - } - return strTag, in -} - -// encodeBase64 encodes s as base64 that is broken up into multiple lines -// as appropriate for the resulting length. -func encodeBase64(s string) string { - const lineLen = 70 - encLen := base64.StdEncoding.EncodedLen(len(s)) - lines := encLen/lineLen + 1 - buf := make([]byte, encLen*2+lines) - in := buf[0:encLen] - out := buf[encLen:] - base64.StdEncoding.Encode(in, []byte(s)) - k := 0 - for i := 0; i < len(in); i += lineLen { - j := i + lineLen - if j > len(in) { - j = len(in) - } - k += copy(out[k:], in[i:j]) - if lines > 1 { - out[k] = '\n' - k++ - } - } - return string(out[:k]) -} - -// This is a subset of the formats allowed by the regular expression -// defined at http://yaml.org/type/timestamp.html. -var allowedTimestampFormats = []string{ - "2006-1-2T15:4:5.999999999Z07:00", // RCF3339Nano with short date fields. - "2006-1-2t15:4:5.999999999Z07:00", // RFC3339Nano with short date fields and lower-case "t". - "2006-1-2 15:4:5.999999999", // space separated with no time zone - "2006-1-2", // date only - // Notable exception: time.Parse cannot handle: "2001-12-14 21:59:43.10 -5" - // from the set of examples. -} - -// parseTimestamp parses s as a timestamp string and -// returns the timestamp and reports whether it succeeded. -// Timestamp formats are defined at http://yaml.org/type/timestamp.html -func parseTimestamp(s string) (time.Time, bool) { - // TODO write code to check all the formats supported by - // http://yaml.org/type/timestamp.html instead of using time.Parse. - - // Quick check: all date formats start with YYYY-. - i := 0 - for ; i < len(s); i++ { - if c := s[i]; c < '0' || c > '9' { - break - } - } - if i != 4 || i == len(s) || s[i] != '-' { - return time.Time{}, false - } - for _, format := range allowedTimestampFormats { - if t, err := time.Parse(format, s); err == nil { - return t, true - } - } - return time.Time{}, false -} diff --git a/cli/internal/yaml/scannerc.go b/cli/internal/yaml/scannerc.go deleted file mode 100644 index 87e46ef70cbdd..0000000000000 --- a/cli/internal/yaml/scannerc.go +++ /dev/null @@ -1,3040 +0,0 @@ -// -// Copyright (c) 2011-2019 Canonical Ltd -// Copyright (c) 2006-2010 Kirill Simonov -// -// Permission is hereby granted, free of charge, to any person obtaining a copy of -// this software and associated documentation files (the "Software"), to deal in -// the Software without restriction, including without limitation the rights to -// use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -// of the Software, and to permit persons to whom the Software is furnished to do -// so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in all -// copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -// SOFTWARE. - -package yaml - -import ( - "bytes" - "fmt" -) - -// Introduction -// ************ -// -// The following notes assume that you are familiar with the YAML specification -// (http://yaml.org/spec/1.2/spec.html). We mostly follow it, although in -// some cases we are less restrictive that it requires. -// -// The process of transforming a YAML stream into a sequence of events is -// divided on two steps: Scanning and Parsing. -// -// The Scanner transforms the input stream into a sequence of tokens, while the -// parser transform the sequence of tokens produced by the Scanner into a -// sequence of parsing events. -// -// The Scanner is rather clever and complicated. The Parser, on the contrary, -// is a straightforward implementation of a recursive-descendant parser (or, -// LL(1) parser, as it is usually called). -// -// Actually there are two issues of Scanning that might be called "clever", the -// rest is quite straightforward. The issues are "block collection start" and -// "simple keys". Both issues are explained below in details. -// -// Here the Scanning step is explained and implemented. We start with the list -// of all the tokens produced by the Scanner together with short descriptions. -// -// Now, tokens: -// -// STREAM-START(encoding) # The stream start. -// STREAM-END # The stream end. -// VERSION-DIRECTIVE(major,minor) # The '%YAML' directive. -// TAG-DIRECTIVE(handle,prefix) # The '%TAG' directive. -// DOCUMENT-START # '---' -// DOCUMENT-END # '...' -// BLOCK-SEQUENCE-START # Indentation increase denoting a block -// BLOCK-MAPPING-START # sequence or a block mapping. -// BLOCK-END # Indentation decrease. -// FLOW-SEQUENCE-START # '[' -// FLOW-SEQUENCE-END # ']' -// BLOCK-SEQUENCE-START # '{' -// BLOCK-SEQUENCE-END # '}' -// BLOCK-ENTRY # '-' -// FLOW-ENTRY # ',' -// KEY # '?' or nothing (simple keys). -// VALUE # ':' -// ALIAS(anchor) # '*anchor' -// ANCHOR(anchor) # '&anchor' -// TAG(handle,suffix) # '!handle!suffix' -// SCALAR(value,style) # A scalar. -// -// The following two tokens are "virtual" tokens denoting the beginning and the -// end of the stream: -// -// STREAM-START(encoding) -// STREAM-END -// -// We pass the information about the input stream encoding with the -// STREAM-START token. -// -// The next two tokens are responsible for tags: -// -// VERSION-DIRECTIVE(major,minor) -// TAG-DIRECTIVE(handle,prefix) -// -// Example: -// -// %YAML 1.1 -// %TAG ! !foo -// %TAG !yaml! tag:yaml.org,2002: -// --- -// -// The correspoding sequence of tokens: -// -// STREAM-START(utf-8) -// VERSION-DIRECTIVE(1,1) -// TAG-DIRECTIVE("!","!foo") -// TAG-DIRECTIVE("!yaml","tag:yaml.org,2002:") -// DOCUMENT-START -// STREAM-END -// -// Note that the VERSION-DIRECTIVE and TAG-DIRECTIVE tokens occupy a whole -// line. -// -// The document start and end indicators are represented by: -// -// DOCUMENT-START -// DOCUMENT-END -// -// Note that if a YAML stream contains an implicit document (without '---' -// and '...' indicators), no DOCUMENT-START and DOCUMENT-END tokens will be -// produced. -// -// In the following examples, we present whole documents together with the -// produced tokens. -// -// 1. An implicit document: -// -// 'a scalar' -// -// Tokens: -// -// STREAM-START(utf-8) -// SCALAR("a scalar",single-quoted) -// STREAM-END -// -// 2. An explicit document: -// -// --- -// 'a scalar' -// ... -// -// Tokens: -// -// STREAM-START(utf-8) -// DOCUMENT-START -// SCALAR("a scalar",single-quoted) -// DOCUMENT-END -// STREAM-END -// -// 3. Several documents in a stream: -// -// 'a scalar' -// --- -// 'another scalar' -// --- -// 'yet another scalar' -// -// Tokens: -// -// STREAM-START(utf-8) -// SCALAR("a scalar",single-quoted) -// DOCUMENT-START -// SCALAR("another scalar",single-quoted) -// DOCUMENT-START -// SCALAR("yet another scalar",single-quoted) -// STREAM-END -// -// We have already introduced the SCALAR token above. The following tokens are -// used to describe aliases, anchors, tag, and scalars: -// -// ALIAS(anchor) -// ANCHOR(anchor) -// TAG(handle,suffix) -// SCALAR(value,style) -// -// The following series of examples illustrate the usage of these tokens: -// -// 1. A recursive sequence: -// -// &A [ *A ] -// -// Tokens: -// -// STREAM-START(utf-8) -// ANCHOR("A") -// FLOW-SEQUENCE-START -// ALIAS("A") -// FLOW-SEQUENCE-END -// STREAM-END -// -// 2. A tagged scalar: -// -// !!float "3.14" # A good approximation. -// -// Tokens: -// -// STREAM-START(utf-8) -// TAG("!!","float") -// SCALAR("3.14",double-quoted) -// STREAM-END -// -// 3. Various scalar styles: -// -// --- # Implicit empty plain scalars do not produce tokens. -// --- a plain scalar -// --- 'a single-quoted scalar' -// --- "a double-quoted scalar" -// --- |- -// a literal scalar -// --- >- -// a folded -// scalar -// -// Tokens: -// -// STREAM-START(utf-8) -// DOCUMENT-START -// DOCUMENT-START -// SCALAR("a plain scalar",plain) -// DOCUMENT-START -// SCALAR("a single-quoted scalar",single-quoted) -// DOCUMENT-START -// SCALAR("a double-quoted scalar",double-quoted) -// DOCUMENT-START -// SCALAR("a literal scalar",literal) -// DOCUMENT-START -// SCALAR("a folded scalar",folded) -// STREAM-END -// -// Now it's time to review collection-related tokens. We will start with -// flow collections: -// -// FLOW-SEQUENCE-START -// FLOW-SEQUENCE-END -// FLOW-MAPPING-START -// FLOW-MAPPING-END -// FLOW-ENTRY -// KEY -// VALUE -// -// The tokens FLOW-SEQUENCE-START, FLOW-SEQUENCE-END, FLOW-MAPPING-START, and -// FLOW-MAPPING-END represent the indicators '[', ']', '{', and '}' -// correspondingly. FLOW-ENTRY represent the ',' indicator. Finally the -// indicators '?' and ':', which are used for denoting mapping keys and values, -// are represented by the KEY and VALUE tokens. -// -// The following examples show flow collections: -// -// 1. A flow sequence: -// -// [item 1, item 2, item 3] -// -// Tokens: -// -// STREAM-START(utf-8) -// FLOW-SEQUENCE-START -// SCALAR("item 1",plain) -// FLOW-ENTRY -// SCALAR("item 2",plain) -// FLOW-ENTRY -// SCALAR("item 3",plain) -// FLOW-SEQUENCE-END -// STREAM-END -// -// 2. A flow mapping: -// -// { -// a simple key: a value, # Note that the KEY token is produced. -// ? a complex key: another value, -// } -// -// Tokens: -// -// STREAM-START(utf-8) -// FLOW-MAPPING-START -// KEY -// SCALAR("a simple key",plain) -// VALUE -// SCALAR("a value",plain) -// FLOW-ENTRY -// KEY -// SCALAR("a complex key",plain) -// VALUE -// SCALAR("another value",plain) -// FLOW-ENTRY -// FLOW-MAPPING-END -// STREAM-END -// -// A simple key is a key which is not denoted by the '?' indicator. Note that -// the Scanner still produce the KEY token whenever it encounters a simple key. -// -// For scanning block collections, the following tokens are used (note that we -// repeat KEY and VALUE here): -// -// BLOCK-SEQUENCE-START -// BLOCK-MAPPING-START -// BLOCK-END -// BLOCK-ENTRY -// KEY -// VALUE -// -// The tokens BLOCK-SEQUENCE-START and BLOCK-MAPPING-START denote indentation -// increase that precedes a block collection (cf. the INDENT token in Python). -// The token BLOCK-END denote indentation decrease that ends a block collection -// (cf. the DEDENT token in Python). However YAML has some syntax pecularities -// that makes detections of these tokens more complex. -// -// The tokens BLOCK-ENTRY, KEY, and VALUE are used to represent the indicators -// '-', '?', and ':' correspondingly. -// -// The following examples show how the tokens BLOCK-SEQUENCE-START, -// BLOCK-MAPPING-START, and BLOCK-END are emitted by the Scanner: -// -// 1. Block sequences: -// -// - item 1 -// - item 2 -// - -// - item 3.1 -// - item 3.2 -// - -// key 1: value 1 -// key 2: value 2 -// -// Tokens: -// -// STREAM-START(utf-8) -// BLOCK-SEQUENCE-START -// BLOCK-ENTRY -// SCALAR("item 1",plain) -// BLOCK-ENTRY -// SCALAR("item 2",plain) -// BLOCK-ENTRY -// BLOCK-SEQUENCE-START -// BLOCK-ENTRY -// SCALAR("item 3.1",plain) -// BLOCK-ENTRY -// SCALAR("item 3.2",plain) -// BLOCK-END -// BLOCK-ENTRY -// BLOCK-MAPPING-START -// KEY -// SCALAR("key 1",plain) -// VALUE -// SCALAR("value 1",plain) -// KEY -// SCALAR("key 2",plain) -// VALUE -// SCALAR("value 2",plain) -// BLOCK-END -// BLOCK-END -// STREAM-END -// -// 2. Block mappings: -// -// a simple key: a value # The KEY token is produced here. -// ? a complex key -// : another value -// a mapping: -// key 1: value 1 -// key 2: value 2 -// a sequence: -// - item 1 -// - item 2 -// -// Tokens: -// -// STREAM-START(utf-8) -// BLOCK-MAPPING-START -// KEY -// SCALAR("a simple key",plain) -// VALUE -// SCALAR("a value",plain) -// KEY -// SCALAR("a complex key",plain) -// VALUE -// SCALAR("another value",plain) -// KEY -// SCALAR("a mapping",plain) -// BLOCK-MAPPING-START -// KEY -// SCALAR("key 1",plain) -// VALUE -// SCALAR("value 1",plain) -// KEY -// SCALAR("key 2",plain) -// VALUE -// SCALAR("value 2",plain) -// BLOCK-END -// KEY -// SCALAR("a sequence",plain) -// VALUE -// BLOCK-SEQUENCE-START -// BLOCK-ENTRY -// SCALAR("item 1",plain) -// BLOCK-ENTRY -// SCALAR("item 2",plain) -// BLOCK-END -// BLOCK-END -// STREAM-END -// -// YAML does not always require to start a new block collection from a new -// line. If the current line contains only '-', '?', and ':' indicators, a new -// block collection may start at the current line. The following examples -// illustrate this case: -// -// 1. Collections in a sequence: -// -// - - item 1 -// - item 2 -// - key 1: value 1 -// key 2: value 2 -// - ? complex key -// : complex value -// -// Tokens: -// -// STREAM-START(utf-8) -// BLOCK-SEQUENCE-START -// BLOCK-ENTRY -// BLOCK-SEQUENCE-START -// BLOCK-ENTRY -// SCALAR("item 1",plain) -// BLOCK-ENTRY -// SCALAR("item 2",plain) -// BLOCK-END -// BLOCK-ENTRY -// BLOCK-MAPPING-START -// KEY -// SCALAR("key 1",plain) -// VALUE -// SCALAR("value 1",plain) -// KEY -// SCALAR("key 2",plain) -// VALUE -// SCALAR("value 2",plain) -// BLOCK-END -// BLOCK-ENTRY -// BLOCK-MAPPING-START -// KEY -// SCALAR("complex key") -// VALUE -// SCALAR("complex value") -// BLOCK-END -// BLOCK-END -// STREAM-END -// -// 2. Collections in a mapping: -// -// ? a sequence -// : - item 1 -// - item 2 -// ? a mapping -// : key 1: value 1 -// key 2: value 2 -// -// Tokens: -// -// STREAM-START(utf-8) -// BLOCK-MAPPING-START -// KEY -// SCALAR("a sequence",plain) -// VALUE -// BLOCK-SEQUENCE-START -// BLOCK-ENTRY -// SCALAR("item 1",plain) -// BLOCK-ENTRY -// SCALAR("item 2",plain) -// BLOCK-END -// KEY -// SCALAR("a mapping",plain) -// VALUE -// BLOCK-MAPPING-START -// KEY -// SCALAR("key 1",plain) -// VALUE -// SCALAR("value 1",plain) -// KEY -// SCALAR("key 2",plain) -// VALUE -// SCALAR("value 2",plain) -// BLOCK-END -// BLOCK-END -// STREAM-END -// -// YAML also permits non-indented sequences if they are included into a block -// mapping. In this case, the token BLOCK-SEQUENCE-START is not produced: -// -// key: -// - item 1 # BLOCK-SEQUENCE-START is NOT produced here. -// - item 2 -// -// Tokens: -// -// STREAM-START(utf-8) -// BLOCK-MAPPING-START -// KEY -// SCALAR("key",plain) -// VALUE -// BLOCK-ENTRY -// SCALAR("item 1",plain) -// BLOCK-ENTRY -// SCALAR("item 2",plain) -// BLOCK-END -// - -// Ensure that the buffer contains the required number of characters. -// Return true on success, false on failure (reader error or memory error). -func cache(parser *yaml_parser_t, length int) bool { - // [Go] This was inlined: !cache(A, B) -> unread < B && !update(A, B) - return parser.unread >= length || yaml_parser_update_buffer(parser, length) -} - -// Advance the buffer pointer. -func skip(parser *yaml_parser_t) { - if !is_blank(parser.buffer, parser.buffer_pos) { - parser.newlines = 0 - } - parser.mark.index++ - parser.mark.column++ - parser.unread-- - parser.buffer_pos += width(parser.buffer[parser.buffer_pos]) -} - -func skip_line(parser *yaml_parser_t) { - if is_crlf(parser.buffer, parser.buffer_pos) { - parser.mark.index += 2 - parser.mark.column = 0 - parser.mark.line++ - parser.unread -= 2 - parser.buffer_pos += 2 - parser.newlines++ - } else if is_break(parser.buffer, parser.buffer_pos) { - parser.mark.index++ - parser.mark.column = 0 - parser.mark.line++ - parser.unread-- - parser.buffer_pos += width(parser.buffer[parser.buffer_pos]) - parser.newlines++ - } -} - -// Copy a character to a string buffer and advance pointers. -func read(parser *yaml_parser_t, s []byte) []byte { - if !is_blank(parser.buffer, parser.buffer_pos) { - parser.newlines = 0 - } - w := width(parser.buffer[parser.buffer_pos]) - if w == 0 { - panic("invalid character sequence") - } - if len(s) == 0 { - s = make([]byte, 0, 32) - } - if w == 1 && len(s)+w <= cap(s) { - s = s[:len(s)+1] - s[len(s)-1] = parser.buffer[parser.buffer_pos] - parser.buffer_pos++ - } else { - s = append(s, parser.buffer[parser.buffer_pos:parser.buffer_pos+w]...) - parser.buffer_pos += w - } - parser.mark.index++ - parser.mark.column++ - parser.unread-- - return s -} - -// Copy a line break character to a string buffer and advance pointers. -func read_line(parser *yaml_parser_t, s []byte) []byte { - buf := parser.buffer - pos := parser.buffer_pos - switch { - case buf[pos] == '\r' && buf[pos+1] == '\n': - // CR LF . LF - s = append(s, '\n') - parser.buffer_pos += 2 - parser.mark.index++ - parser.unread-- - case buf[pos] == '\r' || buf[pos] == '\n': - // CR|LF . LF - s = append(s, '\n') - parser.buffer_pos += 1 - case buf[pos] == '\xC2' && buf[pos+1] == '\x85': - // NEL . LF - s = append(s, '\n') - parser.buffer_pos += 2 - case buf[pos] == '\xE2' && buf[pos+1] == '\x80' && (buf[pos+2] == '\xA8' || buf[pos+2] == '\xA9'): - // LS|PS . LS|PS - s = append(s, buf[parser.buffer_pos:pos+3]...) - parser.buffer_pos += 3 - default: - return s - } - parser.mark.index++ - parser.mark.column = 0 - parser.mark.line++ - parser.unread-- - parser.newlines++ - return s -} - -// Get the next token. -func yaml_parser_scan(parser *yaml_parser_t, token *yaml_token_t) bool { - // Erase the token object. - *token = yaml_token_t{} // [Go] Is this necessary? - - // No tokens after STREAM-END or error. - if parser.stream_end_produced || parser.error != yaml_NO_ERROR { - return true - } - - // Ensure that the tokens queue contains enough tokens. - if !parser.token_available { - if !yaml_parser_fetch_more_tokens(parser) { - return false - } - } - - // Fetch the next token from the queue. - *token = parser.tokens[parser.tokens_head] - parser.tokens_head++ - parser.tokens_parsed++ - parser.token_available = false - - if token.typ == yaml_STREAM_END_TOKEN { - parser.stream_end_produced = true - } - return true -} - -// Set the scanner error and return false. -func yaml_parser_set_scanner_error(parser *yaml_parser_t, context string, context_mark yaml_mark_t, problem string) bool { - parser.error = yaml_SCANNER_ERROR - parser.context = context - parser.context_mark = context_mark - parser.problem = problem - parser.problem_mark = parser.mark - return false -} - -func yaml_parser_set_scanner_tag_error(parser *yaml_parser_t, directive bool, context_mark yaml_mark_t, problem string) bool { - context := "while parsing a tag" - if directive { - context = "while parsing a %TAG directive" - } - return yaml_parser_set_scanner_error(parser, context, context_mark, problem) -} - -func trace(args ...interface{}) func() { - pargs := append([]interface{}{"+++"}, args...) - fmt.Println(pargs...) - pargs = append([]interface{}{"---"}, args...) - return func() { fmt.Println(pargs...) } -} - -// Ensure that the tokens queue contains at least one token which can be -// returned to the Parser. -func yaml_parser_fetch_more_tokens(parser *yaml_parser_t) bool { - // While we need more tokens to fetch, do it. - for { - // [Go] The comment parsing logic requires a lookahead of two tokens - // so that foot comments may be parsed in time of associating them - // with the tokens that are parsed before them, and also for line - // comments to be transformed into head comments in some edge cases. - if parser.tokens_head < len(parser.tokens)-2 { - // If a potential simple key is at the head position, we need to fetch - // the next token to disambiguate it. - head_tok_idx, ok := parser.simple_keys_by_tok[parser.tokens_parsed] - if !ok { - break - } else if valid, ok := yaml_simple_key_is_valid(parser, &parser.simple_keys[head_tok_idx]); !ok { - return false - } else if !valid { - break - } - } - // Fetch the next token. - if !yaml_parser_fetch_next_token(parser) { - return false - } - } - - parser.token_available = true - return true -} - -// The dispatcher for token fetchers. -func yaml_parser_fetch_next_token(parser *yaml_parser_t) (ok bool) { - // Ensure that the buffer is initialized. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - - // Check if we just started scanning. Fetch STREAM-START then. - if !parser.stream_start_produced { - return yaml_parser_fetch_stream_start(parser) - } - - scan_mark := parser.mark - - // Eat whitespaces and comments until we reach the next token. - if !yaml_parser_scan_to_next_token(parser) { - return false - } - - // [Go] While unrolling indents, transform the head comments of prior - // indentation levels observed after scan_start into foot comments at - // the respective indexes. - - // Check the indentation level against the current column. - if !yaml_parser_unroll_indent(parser, parser.mark.column, scan_mark) { - return false - } - - // Ensure that the buffer contains at least 4 characters. 4 is the length - // of the longest indicators ('--- ' and '... '). - if parser.unread < 4 && !yaml_parser_update_buffer(parser, 4) { - return false - } - - // Is it the end of the stream? - if is_z(parser.buffer, parser.buffer_pos) { - return yaml_parser_fetch_stream_end(parser) - } - - // Is it a directive? - if parser.mark.column == 0 && parser.buffer[parser.buffer_pos] == '%' { - return yaml_parser_fetch_directive(parser) - } - - buf := parser.buffer - pos := parser.buffer_pos - - // Is it the document start indicator? - if parser.mark.column == 0 && buf[pos] == '-' && buf[pos+1] == '-' && buf[pos+2] == '-' && is_blankz(buf, pos+3) { - return yaml_parser_fetch_document_indicator(parser, yaml_DOCUMENT_START_TOKEN) - } - - // Is it the document end indicator? - if parser.mark.column == 0 && buf[pos] == '.' && buf[pos+1] == '.' && buf[pos+2] == '.' && is_blankz(buf, pos+3) { - return yaml_parser_fetch_document_indicator(parser, yaml_DOCUMENT_END_TOKEN) - } - - comment_mark := parser.mark - if len(parser.tokens) > 0 && (parser.flow_level == 0 && buf[pos] == ':' || parser.flow_level > 0 && buf[pos] == ',') { - // Associate any following comments with the prior token. - comment_mark = parser.tokens[len(parser.tokens)-1].start_mark - } - defer func() { - if !ok { - return - } - if len(parser.tokens) > 0 && parser.tokens[len(parser.tokens)-1].typ == yaml_BLOCK_ENTRY_TOKEN { - // Sequence indicators alone have no line comments. It becomes - // a head comment for whatever follows. - return - } - if !yaml_parser_scan_line_comment(parser, comment_mark) { - ok = false - return - } - }() - - // Is it the flow sequence start indicator? - if buf[pos] == '[' { - return yaml_parser_fetch_flow_collection_start(parser, yaml_FLOW_SEQUENCE_START_TOKEN) - } - - // Is it the flow mapping start indicator? - if parser.buffer[parser.buffer_pos] == '{' { - return yaml_parser_fetch_flow_collection_start(parser, yaml_FLOW_MAPPING_START_TOKEN) - } - - // Is it the flow sequence end indicator? - if parser.buffer[parser.buffer_pos] == ']' { - return yaml_parser_fetch_flow_collection_end(parser, - yaml_FLOW_SEQUENCE_END_TOKEN) - } - - // Is it the flow mapping end indicator? - if parser.buffer[parser.buffer_pos] == '}' { - return yaml_parser_fetch_flow_collection_end(parser, - yaml_FLOW_MAPPING_END_TOKEN) - } - - // Is it the flow entry indicator? - if parser.buffer[parser.buffer_pos] == ',' { - return yaml_parser_fetch_flow_entry(parser) - } - - // Is it the block entry indicator? - if parser.buffer[parser.buffer_pos] == '-' && is_blankz(parser.buffer, parser.buffer_pos+1) { - return yaml_parser_fetch_block_entry(parser) - } - - // Is it the key indicator? - if parser.buffer[parser.buffer_pos] == '?' && (parser.flow_level > 0 || is_blankz(parser.buffer, parser.buffer_pos+1)) { - return yaml_parser_fetch_key(parser) - } - - // Is it the value indicator? - if parser.buffer[parser.buffer_pos] == ':' && (parser.flow_level > 0 || is_blankz(parser.buffer, parser.buffer_pos+1)) { - return yaml_parser_fetch_value(parser) - } - - // Is it an alias? - if parser.buffer[parser.buffer_pos] == '*' { - return yaml_parser_fetch_anchor(parser, yaml_ALIAS_TOKEN) - } - - // Is it an anchor? - if parser.buffer[parser.buffer_pos] == '&' { - return yaml_parser_fetch_anchor(parser, yaml_ANCHOR_TOKEN) - } - - // Is it a tag? - if parser.buffer[parser.buffer_pos] == '!' { - return yaml_parser_fetch_tag(parser) - } - - // Is it a literal scalar? - if parser.buffer[parser.buffer_pos] == '|' && parser.flow_level == 0 { - return yaml_parser_fetch_block_scalar(parser, true) - } - - // Is it a folded scalar? - if parser.buffer[parser.buffer_pos] == '>' && parser.flow_level == 0 { - return yaml_parser_fetch_block_scalar(parser, false) - } - - // Is it a single-quoted scalar? - if parser.buffer[parser.buffer_pos] == '\'' { - return yaml_parser_fetch_flow_scalar(parser, true) - } - - // Is it a double-quoted scalar? - if parser.buffer[parser.buffer_pos] == '"' { - return yaml_parser_fetch_flow_scalar(parser, false) - } - - // Is it a plain scalar? - // - // A plain scalar may start with any non-blank characters except - // - // '-', '?', ':', ',', '[', ']', '{', '}', - // '#', '&', '*', '!', '|', '>', '\'', '\"', - // '%', '@', '`'. - // - // In the block context (and, for the '-' indicator, in the flow context - // too), it may also start with the characters - // - // '-', '?', ':' - // - // if it is followed by a non-space character. - // - // The last rule is more restrictive than the specification requires. - // [Go] TODO Make this logic more reasonable. - //switch parser.buffer[parser.buffer_pos] { - //case '-', '?', ':', ',', '?', '-', ',', ':', ']', '[', '}', '{', '&', '#', '!', '*', '>', '|', '"', '\'', '@', '%', '-', '`': - //} - if !(is_blankz(parser.buffer, parser.buffer_pos) || parser.buffer[parser.buffer_pos] == '-' || - parser.buffer[parser.buffer_pos] == '?' || parser.buffer[parser.buffer_pos] == ':' || - parser.buffer[parser.buffer_pos] == ',' || parser.buffer[parser.buffer_pos] == '[' || - parser.buffer[parser.buffer_pos] == ']' || parser.buffer[parser.buffer_pos] == '{' || - parser.buffer[parser.buffer_pos] == '}' || parser.buffer[parser.buffer_pos] == '#' || - parser.buffer[parser.buffer_pos] == '&' || parser.buffer[parser.buffer_pos] == '*' || - parser.buffer[parser.buffer_pos] == '!' || parser.buffer[parser.buffer_pos] == '|' || - parser.buffer[parser.buffer_pos] == '>' || parser.buffer[parser.buffer_pos] == '\'' || - parser.buffer[parser.buffer_pos] == '"' || parser.buffer[parser.buffer_pos] == '%' || - parser.buffer[parser.buffer_pos] == '@' || parser.buffer[parser.buffer_pos] == '`') || - (parser.buffer[parser.buffer_pos] == '-' && !is_blank(parser.buffer, parser.buffer_pos+1)) || - (parser.flow_level == 0 && - (parser.buffer[parser.buffer_pos] == '?' || parser.buffer[parser.buffer_pos] == ':') && - !is_blankz(parser.buffer, parser.buffer_pos+1)) { - return yaml_parser_fetch_plain_scalar(parser) - } - - // If we don't determine the token type so far, it is an error. - return yaml_parser_set_scanner_error(parser, - "while scanning for the next token", parser.mark, - "found character that cannot start any token") -} - -func yaml_simple_key_is_valid(parser *yaml_parser_t, simple_key *yaml_simple_key_t) (valid, ok bool) { - if !simple_key.possible { - return false, true - } - - // The 1.2 specification says: - // - // "If the ? indicator is omitted, parsing needs to see past the - // implicit key to recognize it as such. To limit the amount of - // lookahead required, the “:” indicator must appear at most 1024 - // Unicode characters beyond the start of the key. In addition, the key - // is restricted to a single line." - // - if simple_key.mark.line < parser.mark.line || simple_key.mark.index+1024 < parser.mark.index { - // Check if the potential simple key to be removed is required. - if simple_key.required { - return false, yaml_parser_set_scanner_error(parser, - "while scanning a simple key", simple_key.mark, - "could not find expected ':'") - } - simple_key.possible = false - return false, true - } - return true, true -} - -// Check if a simple key may start at the current position and add it if -// needed. -func yaml_parser_save_simple_key(parser *yaml_parser_t) bool { - // A simple key is required at the current position if the scanner is in - // the block context and the current column coincides with the indentation - // level. - - required := parser.flow_level == 0 && parser.indent == parser.mark.column - - // - // If the current position may start a simple key, save it. - // - if parser.simple_key_allowed { - simple_key := yaml_simple_key_t{ - possible: true, - required: required, - token_number: parser.tokens_parsed + (len(parser.tokens) - parser.tokens_head), - mark: parser.mark, - } - - if !yaml_parser_remove_simple_key(parser) { - return false - } - parser.simple_keys[len(parser.simple_keys)-1] = simple_key - parser.simple_keys_by_tok[simple_key.token_number] = len(parser.simple_keys) - 1 - } - return true -} - -// Remove a potential simple key at the current flow level. -func yaml_parser_remove_simple_key(parser *yaml_parser_t) bool { - i := len(parser.simple_keys) - 1 - if parser.simple_keys[i].possible { - // If the key is required, it is an error. - if parser.simple_keys[i].required { - return yaml_parser_set_scanner_error(parser, - "while scanning a simple key", parser.simple_keys[i].mark, - "could not find expected ':'") - } - // Remove the key from the stack. - parser.simple_keys[i].possible = false - delete(parser.simple_keys_by_tok, parser.simple_keys[i].token_number) - } - return true -} - -// max_flow_level limits the flow_level -const max_flow_level = 10000 - -// Increase the flow level and resize the simple key list if needed. -func yaml_parser_increase_flow_level(parser *yaml_parser_t) bool { - // Reset the simple key on the next level. - parser.simple_keys = append(parser.simple_keys, yaml_simple_key_t{ - possible: false, - required: false, - token_number: parser.tokens_parsed + (len(parser.tokens) - parser.tokens_head), - mark: parser.mark, - }) - - // Increase the flow level. - parser.flow_level++ - if parser.flow_level > max_flow_level { - return yaml_parser_set_scanner_error(parser, - "while increasing flow level", parser.simple_keys[len(parser.simple_keys)-1].mark, - fmt.Sprintf("exceeded max depth of %d", max_flow_level)) - } - return true -} - -// Decrease the flow level. -func yaml_parser_decrease_flow_level(parser *yaml_parser_t) bool { - if parser.flow_level > 0 { - parser.flow_level-- - last := len(parser.simple_keys) - 1 - delete(parser.simple_keys_by_tok, parser.simple_keys[last].token_number) - parser.simple_keys = parser.simple_keys[:last] - } - return true -} - -// max_indents limits the indents stack size -const max_indents = 10000 - -// Push the current indentation level to the stack and set the new level -// the current column is greater than the indentation level. In this case, -// append or insert the specified token into the token queue. -func yaml_parser_roll_indent(parser *yaml_parser_t, column, number int, typ yaml_token_type_t, mark yaml_mark_t) bool { - // In the flow context, do nothing. - if parser.flow_level > 0 { - return true - } - - if parser.indent < column { - // Push the current indentation level to the stack and set the new - // indentation level. - parser.indents = append(parser.indents, parser.indent) - parser.indent = column - if len(parser.indents) > max_indents { - return yaml_parser_set_scanner_error(parser, - "while increasing indent level", parser.simple_keys[len(parser.simple_keys)-1].mark, - fmt.Sprintf("exceeded max depth of %d", max_indents)) - } - - // Create a token and insert it into the queue. - token := yaml_token_t{ - typ: typ, - start_mark: mark, - end_mark: mark, - } - if number > -1 { - number -= parser.tokens_parsed - } - yaml_insert_token(parser, number, &token) - } - return true -} - -// Pop indentation levels from the indents stack until the current level -// becomes less or equal to the column. For each indentation level, append -// the BLOCK-END token. -func yaml_parser_unroll_indent(parser *yaml_parser_t, column int, scan_mark yaml_mark_t) bool { - // In the flow context, do nothing. - if parser.flow_level > 0 { - return true - } - - block_mark := scan_mark - block_mark.index-- - - // Loop through the indentation levels in the stack. - for parser.indent > column { - - // [Go] Reposition the end token before potential following - // foot comments of parent blocks. For that, search - // backwards for recent comments that were at the same - // indent as the block that is ending now. - stop_index := block_mark.index - for i := len(parser.comments) - 1; i >= 0; i-- { - comment := &parser.comments[i] - - if comment.end_mark.index < stop_index { - // Don't go back beyond the start of the comment/whitespace scan, unless column < 0. - // If requested indent column is < 0, then the document is over and everything else - // is a foot anyway. - break - } - if comment.start_mark.column == parser.indent+1 { - // This is a good match. But maybe there's a former comment - // at that same indent level, so keep searching. - block_mark = comment.start_mark - } - - // While the end of the former comment matches with - // the start of the following one, we know there's - // nothing in between and scanning is still safe. - stop_index = comment.scan_mark.index - } - - // Create a token and append it to the queue. - token := yaml_token_t{ - typ: yaml_BLOCK_END_TOKEN, - start_mark: block_mark, - end_mark: block_mark, - } - yaml_insert_token(parser, -1, &token) - - // Pop the indentation level. - parser.indent = parser.indents[len(parser.indents)-1] - parser.indents = parser.indents[:len(parser.indents)-1] - } - return true -} - -// Initialize the scanner and produce the STREAM-START token. -func yaml_parser_fetch_stream_start(parser *yaml_parser_t) bool { - - // Set the initial indentation. - parser.indent = -1 - - // Initialize the simple key stack. - parser.simple_keys = append(parser.simple_keys, yaml_simple_key_t{}) - - parser.simple_keys_by_tok = make(map[int]int) - - // A simple key is allowed at the beginning of the stream. - parser.simple_key_allowed = true - - // We have started. - parser.stream_start_produced = true - - // Create the STREAM-START token and append it to the queue. - token := yaml_token_t{ - typ: yaml_STREAM_START_TOKEN, - start_mark: parser.mark, - end_mark: parser.mark, - encoding: parser.encoding, - } - yaml_insert_token(parser, -1, &token) - return true -} - -// Produce the STREAM-END token and shut down the scanner. -func yaml_parser_fetch_stream_end(parser *yaml_parser_t) bool { - - // Force new line. - if parser.mark.column != 0 { - parser.mark.column = 0 - parser.mark.line++ - } - - // Reset the indentation level. - if !yaml_parser_unroll_indent(parser, -1, parser.mark) { - return false - } - - // Reset simple keys. - if !yaml_parser_remove_simple_key(parser) { - return false - } - - parser.simple_key_allowed = false - - // Create the STREAM-END token and append it to the queue. - token := yaml_token_t{ - typ: yaml_STREAM_END_TOKEN, - start_mark: parser.mark, - end_mark: parser.mark, - } - yaml_insert_token(parser, -1, &token) - return true -} - -// Produce a VERSION-DIRECTIVE or TAG-DIRECTIVE token. -func yaml_parser_fetch_directive(parser *yaml_parser_t) bool { - // Reset the indentation level. - if !yaml_parser_unroll_indent(parser, -1, parser.mark) { - return false - } - - // Reset simple keys. - if !yaml_parser_remove_simple_key(parser) { - return false - } - - parser.simple_key_allowed = false - - // Create the YAML-DIRECTIVE or TAG-DIRECTIVE token. - token := yaml_token_t{} - if !yaml_parser_scan_directive(parser, &token) { - return false - } - // Append the token to the queue. - yaml_insert_token(parser, -1, &token) - return true -} - -// Produce the DOCUMENT-START or DOCUMENT-END token. -func yaml_parser_fetch_document_indicator(parser *yaml_parser_t, typ yaml_token_type_t) bool { - // Reset the indentation level. - if !yaml_parser_unroll_indent(parser, -1, parser.mark) { - return false - } - - // Reset simple keys. - if !yaml_parser_remove_simple_key(parser) { - return false - } - - parser.simple_key_allowed = false - - // Consume the token. - start_mark := parser.mark - - skip(parser) - skip(parser) - skip(parser) - - end_mark := parser.mark - - // Create the DOCUMENT-START or DOCUMENT-END token. - token := yaml_token_t{ - typ: typ, - start_mark: start_mark, - end_mark: end_mark, - } - // Append the token to the queue. - yaml_insert_token(parser, -1, &token) - return true -} - -// Produce the FLOW-SEQUENCE-START or FLOW-MAPPING-START token. -func yaml_parser_fetch_flow_collection_start(parser *yaml_parser_t, typ yaml_token_type_t) bool { - - // The indicators '[' and '{' may start a simple key. - if !yaml_parser_save_simple_key(parser) { - return false - } - - // Increase the flow level. - if !yaml_parser_increase_flow_level(parser) { - return false - } - - // A simple key may follow the indicators '[' and '{'. - parser.simple_key_allowed = true - - // Consume the token. - start_mark := parser.mark - skip(parser) - end_mark := parser.mark - - // Create the FLOW-SEQUENCE-START of FLOW-MAPPING-START token. - token := yaml_token_t{ - typ: typ, - start_mark: start_mark, - end_mark: end_mark, - } - // Append the token to the queue. - yaml_insert_token(parser, -1, &token) - return true -} - -// Produce the FLOW-SEQUENCE-END or FLOW-MAPPING-END token. -func yaml_parser_fetch_flow_collection_end(parser *yaml_parser_t, typ yaml_token_type_t) bool { - // Reset any potential simple key on the current flow level. - if !yaml_parser_remove_simple_key(parser) { - return false - } - - // Decrease the flow level. - if !yaml_parser_decrease_flow_level(parser) { - return false - } - - // No simple keys after the indicators ']' and '}'. - parser.simple_key_allowed = false - - // Consume the token. - - start_mark := parser.mark - skip(parser) - end_mark := parser.mark - - // Create the FLOW-SEQUENCE-END of FLOW-MAPPING-END token. - token := yaml_token_t{ - typ: typ, - start_mark: start_mark, - end_mark: end_mark, - } - // Append the token to the queue. - yaml_insert_token(parser, -1, &token) - return true -} - -// Produce the FLOW-ENTRY token. -func yaml_parser_fetch_flow_entry(parser *yaml_parser_t) bool { - // Reset any potential simple keys on the current flow level. - if !yaml_parser_remove_simple_key(parser) { - return false - } - - // Simple keys are allowed after ','. - parser.simple_key_allowed = true - - // Consume the token. - start_mark := parser.mark - skip(parser) - end_mark := parser.mark - - // Create the FLOW-ENTRY token and append it to the queue. - token := yaml_token_t{ - typ: yaml_FLOW_ENTRY_TOKEN, - start_mark: start_mark, - end_mark: end_mark, - } - yaml_insert_token(parser, -1, &token) - return true -} - -// Produce the BLOCK-ENTRY token. -func yaml_parser_fetch_block_entry(parser *yaml_parser_t) bool { - // Check if the scanner is in the block context. - if parser.flow_level == 0 { - // Check if we are allowed to start a new entry. - if !parser.simple_key_allowed { - return yaml_parser_set_scanner_error(parser, "", parser.mark, - "block sequence entries are not allowed in this context") - } - // Add the BLOCK-SEQUENCE-START token if needed. - if !yaml_parser_roll_indent(parser, parser.mark.column, -1, yaml_BLOCK_SEQUENCE_START_TOKEN, parser.mark) { - return false - } - } else { - // It is an error for the '-' indicator to occur in the flow context, - // but we let the Parser detect and report about it because the Parser - // is able to point to the context. - } - - // Reset any potential simple keys on the current flow level. - if !yaml_parser_remove_simple_key(parser) { - return false - } - - // Simple keys are allowed after '-'. - parser.simple_key_allowed = true - - // Consume the token. - start_mark := parser.mark - skip(parser) - end_mark := parser.mark - - // Create the BLOCK-ENTRY token and append it to the queue. - token := yaml_token_t{ - typ: yaml_BLOCK_ENTRY_TOKEN, - start_mark: start_mark, - end_mark: end_mark, - } - yaml_insert_token(parser, -1, &token) - return true -} - -// Produce the KEY token. -func yaml_parser_fetch_key(parser *yaml_parser_t) bool { - - // In the block context, additional checks are required. - if parser.flow_level == 0 { - // Check if we are allowed to start a new key (not nessesary simple). - if !parser.simple_key_allowed { - return yaml_parser_set_scanner_error(parser, "", parser.mark, - "mapping keys are not allowed in this context") - } - // Add the BLOCK-MAPPING-START token if needed. - if !yaml_parser_roll_indent(parser, parser.mark.column, -1, yaml_BLOCK_MAPPING_START_TOKEN, parser.mark) { - return false - } - } - - // Reset any potential simple keys on the current flow level. - if !yaml_parser_remove_simple_key(parser) { - return false - } - - // Simple keys are allowed after '?' in the block context. - parser.simple_key_allowed = parser.flow_level == 0 - - // Consume the token. - start_mark := parser.mark - skip(parser) - end_mark := parser.mark - - // Create the KEY token and append it to the queue. - token := yaml_token_t{ - typ: yaml_KEY_TOKEN, - start_mark: start_mark, - end_mark: end_mark, - } - yaml_insert_token(parser, -1, &token) - return true -} - -// Produce the VALUE token. -func yaml_parser_fetch_value(parser *yaml_parser_t) bool { - - simple_key := &parser.simple_keys[len(parser.simple_keys)-1] - - // Have we found a simple key? - if valid, ok := yaml_simple_key_is_valid(parser, simple_key); !ok { - return false - - } else if valid { - - // Create the KEY token and insert it into the queue. - token := yaml_token_t{ - typ: yaml_KEY_TOKEN, - start_mark: simple_key.mark, - end_mark: simple_key.mark, - } - yaml_insert_token(parser, simple_key.token_number-parser.tokens_parsed, &token) - - // In the block context, we may need to add the BLOCK-MAPPING-START token. - if !yaml_parser_roll_indent(parser, simple_key.mark.column, - simple_key.token_number, - yaml_BLOCK_MAPPING_START_TOKEN, simple_key.mark) { - return false - } - - // Remove the simple key. - simple_key.possible = false - delete(parser.simple_keys_by_tok, simple_key.token_number) - - // A simple key cannot follow another simple key. - parser.simple_key_allowed = false - - } else { - // The ':' indicator follows a complex key. - - // In the block context, extra checks are required. - if parser.flow_level == 0 { - - // Check if we are allowed to start a complex value. - if !parser.simple_key_allowed { - return yaml_parser_set_scanner_error(parser, "", parser.mark, - "mapping values are not allowed in this context") - } - - // Add the BLOCK-MAPPING-START token if needed. - if !yaml_parser_roll_indent(parser, parser.mark.column, -1, yaml_BLOCK_MAPPING_START_TOKEN, parser.mark) { - return false - } - } - - // Simple keys after ':' are allowed in the block context. - parser.simple_key_allowed = parser.flow_level == 0 - } - - // Consume the token. - start_mark := parser.mark - skip(parser) - end_mark := parser.mark - - // Create the VALUE token and append it to the queue. - token := yaml_token_t{ - typ: yaml_VALUE_TOKEN, - start_mark: start_mark, - end_mark: end_mark, - } - yaml_insert_token(parser, -1, &token) - return true -} - -// Produce the ALIAS or ANCHOR token. -func yaml_parser_fetch_anchor(parser *yaml_parser_t, typ yaml_token_type_t) bool { - // An anchor or an alias could be a simple key. - if !yaml_parser_save_simple_key(parser) { - return false - } - - // A simple key cannot follow an anchor or an alias. - parser.simple_key_allowed = false - - // Create the ALIAS or ANCHOR token and append it to the queue. - var token yaml_token_t - if !yaml_parser_scan_anchor(parser, &token, typ) { - return false - } - yaml_insert_token(parser, -1, &token) - return true -} - -// Produce the TAG token. -func yaml_parser_fetch_tag(parser *yaml_parser_t) bool { - // A tag could be a simple key. - if !yaml_parser_save_simple_key(parser) { - return false - } - - // A simple key cannot follow a tag. - parser.simple_key_allowed = false - - // Create the TAG token and append it to the queue. - var token yaml_token_t - if !yaml_parser_scan_tag(parser, &token) { - return false - } - yaml_insert_token(parser, -1, &token) - return true -} - -// Produce the SCALAR(...,literal) or SCALAR(...,folded) tokens. -func yaml_parser_fetch_block_scalar(parser *yaml_parser_t, literal bool) bool { - // Remove any potential simple keys. - if !yaml_parser_remove_simple_key(parser) { - return false - } - - // A simple key may follow a block scalar. - parser.simple_key_allowed = true - - // Create the SCALAR token and append it to the queue. - var token yaml_token_t - if !yaml_parser_scan_block_scalar(parser, &token, literal) { - return false - } - yaml_insert_token(parser, -1, &token) - return true -} - -// Produce the SCALAR(...,single-quoted) or SCALAR(...,double-quoted) tokens. -func yaml_parser_fetch_flow_scalar(parser *yaml_parser_t, single bool) bool { - // A plain scalar could be a simple key. - if !yaml_parser_save_simple_key(parser) { - return false - } - - // A simple key cannot follow a flow scalar. - parser.simple_key_allowed = false - - // Create the SCALAR token and append it to the queue. - var token yaml_token_t - if !yaml_parser_scan_flow_scalar(parser, &token, single) { - return false - } - yaml_insert_token(parser, -1, &token) - return true -} - -// Produce the SCALAR(...,plain) token. -func yaml_parser_fetch_plain_scalar(parser *yaml_parser_t) bool { - // A plain scalar could be a simple key. - if !yaml_parser_save_simple_key(parser) { - return false - } - - // A simple key cannot follow a flow scalar. - parser.simple_key_allowed = false - - // Create the SCALAR token and append it to the queue. - var token yaml_token_t - if !yaml_parser_scan_plain_scalar(parser, &token) { - return false - } - yaml_insert_token(parser, -1, &token) - return true -} - -// Eat whitespaces and comments until the next token is found. -func yaml_parser_scan_to_next_token(parser *yaml_parser_t) bool { - - scan_mark := parser.mark - - // Until the next token is not found. - for { - // Allow the BOM mark to start a line. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - if parser.mark.column == 0 && is_bom(parser.buffer, parser.buffer_pos) { - skip(parser) - } - - // Eat whitespaces. - // Tabs are allowed: - // - in the flow context - // - in the block context, but not at the beginning of the line or - // after '-', '?', or ':' (complex value). - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - - for parser.buffer[parser.buffer_pos] == ' ' || ((parser.flow_level > 0 || !parser.simple_key_allowed) && parser.buffer[parser.buffer_pos] == '\t') { - skip(parser) - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - - // Check if we just had a line comment under a sequence entry that - // looks more like a header to the following content. Similar to this: - // - // - # The comment - // - Some data - // - // If so, transform the line comment to a head comment and reposition. - if len(parser.comments) > 0 && len(parser.tokens) > 1 { - tokenA := parser.tokens[len(parser.tokens)-2] - tokenB := parser.tokens[len(parser.tokens)-1] - comment := &parser.comments[len(parser.comments)-1] - if tokenA.typ == yaml_BLOCK_SEQUENCE_START_TOKEN && tokenB.typ == yaml_BLOCK_ENTRY_TOKEN && len(comment.line) > 0 && !is_break(parser.buffer, parser.buffer_pos) { - // If it was in the prior line, reposition so it becomes a - // header of the follow up token. Otherwise, keep it in place - // so it becomes a header of the former. - comment.head = comment.line - comment.line = nil - if comment.start_mark.line == parser.mark.line-1 { - comment.token_mark = parser.mark - } - } - } - - // Eat a comment until a line break. - if parser.buffer[parser.buffer_pos] == '#' { - if !yaml_parser_scan_comments(parser, scan_mark) { - return false - } - } - - // If it is a line break, eat it. - if is_break(parser.buffer, parser.buffer_pos) { - if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { - return false - } - skip_line(parser) - - // In the block context, a new line may start a simple key. - if parser.flow_level == 0 { - parser.simple_key_allowed = true - } - } else { - break // We have found a token. - } - } - - return true -} - -// Scan a YAML-DIRECTIVE or TAG-DIRECTIVE token. -// -// Scope: -// -// %YAML 1.1 # a comment \n -// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -// %TAG !yaml! tag:yaml.org,2002: \n -// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -func yaml_parser_scan_directive(parser *yaml_parser_t, token *yaml_token_t) bool { - // Eat '%'. - start_mark := parser.mark - skip(parser) - - // Scan the directive name. - var name []byte - if !yaml_parser_scan_directive_name(parser, start_mark, &name) { - return false - } - - // Is it a YAML directive? - if bytes.Equal(name, []byte("YAML")) { - // Scan the VERSION directive value. - var major, minor int8 - if !yaml_parser_scan_version_directive_value(parser, start_mark, &major, &minor) { - return false - } - end_mark := parser.mark - - // Create a VERSION-DIRECTIVE token. - *token = yaml_token_t{ - typ: yaml_VERSION_DIRECTIVE_TOKEN, - start_mark: start_mark, - end_mark: end_mark, - major: major, - minor: minor, - } - - // Is it a TAG directive? - } else if bytes.Equal(name, []byte("TAG")) { - // Scan the TAG directive value. - var handle, prefix []byte - if !yaml_parser_scan_tag_directive_value(parser, start_mark, &handle, &prefix) { - return false - } - end_mark := parser.mark - - // Create a TAG-DIRECTIVE token. - *token = yaml_token_t{ - typ: yaml_TAG_DIRECTIVE_TOKEN, - start_mark: start_mark, - end_mark: end_mark, - value: handle, - prefix: prefix, - } - - // Unknown directive. - } else { - yaml_parser_set_scanner_error(parser, "while scanning a directive", - start_mark, "found unknown directive name") - return false - } - - // Eat the rest of the line including any comments. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - - for is_blank(parser.buffer, parser.buffer_pos) { - skip(parser) - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - - if parser.buffer[parser.buffer_pos] == '#' { - // [Go] Discard this inline comment for the time being. - //if !yaml_parser_scan_line_comment(parser, start_mark) { - // return false - //} - for !is_breakz(parser.buffer, parser.buffer_pos) { - skip(parser) - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - } - - // Check if we are at the end of the line. - if !is_breakz(parser.buffer, parser.buffer_pos) { - yaml_parser_set_scanner_error(parser, "while scanning a directive", - start_mark, "did not find expected comment or line break") - return false - } - - // Eat a line break. - if is_break(parser.buffer, parser.buffer_pos) { - if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { - return false - } - skip_line(parser) - } - - return true -} - -// Scan the directive name. -// -// Scope: -// -// %YAML 1.1 # a comment \n -// ^^^^ -// %TAG !yaml! tag:yaml.org,2002: \n -// ^^^ -func yaml_parser_scan_directive_name(parser *yaml_parser_t, start_mark yaml_mark_t, name *[]byte) bool { - // Consume the directive name. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - - var s []byte - for is_alpha(parser.buffer, parser.buffer_pos) { - s = read(parser, s) - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - - // Check if the name is empty. - if len(s) == 0 { - yaml_parser_set_scanner_error(parser, "while scanning a directive", - start_mark, "could not find expected directive name") - return false - } - - // Check for an blank character after the name. - if !is_blankz(parser.buffer, parser.buffer_pos) { - yaml_parser_set_scanner_error(parser, "while scanning a directive", - start_mark, "found unexpected non-alphabetical character") - return false - } - *name = s - return true -} - -// Scan the value of VERSION-DIRECTIVE. -// -// Scope: -// -// %YAML 1.1 # a comment \n -// ^^^^^^ -func yaml_parser_scan_version_directive_value(parser *yaml_parser_t, start_mark yaml_mark_t, major, minor *int8) bool { - // Eat whitespaces. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - for is_blank(parser.buffer, parser.buffer_pos) { - skip(parser) - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - - // Consume the major version number. - if !yaml_parser_scan_version_directive_number(parser, start_mark, major) { - return false - } - - // Eat '.'. - if parser.buffer[parser.buffer_pos] != '.' { - return yaml_parser_set_scanner_error(parser, "while scanning a %YAML directive", - start_mark, "did not find expected digit or '.' character") - } - - skip(parser) - - // Consume the minor version number. - if !yaml_parser_scan_version_directive_number(parser, start_mark, minor) { - return false - } - return true -} - -const max_number_length = 2 - -// Scan the version number of VERSION-DIRECTIVE. -// -// Scope: -// -// %YAML 1.1 # a comment \n -// ^ -// %YAML 1.1 # a comment \n -// ^ -func yaml_parser_scan_version_directive_number(parser *yaml_parser_t, start_mark yaml_mark_t, number *int8) bool { - - // Repeat while the next character is digit. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - var value, length int8 - for is_digit(parser.buffer, parser.buffer_pos) { - // Check if the number is too long. - length++ - if length > max_number_length { - return yaml_parser_set_scanner_error(parser, "while scanning a %YAML directive", - start_mark, "found extremely long version number") - } - value = value*10 + int8(as_digit(parser.buffer, parser.buffer_pos)) - skip(parser) - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - - // Check if the number was present. - if length == 0 { - return yaml_parser_set_scanner_error(parser, "while scanning a %YAML directive", - start_mark, "did not find expected version number") - } - *number = value - return true -} - -// Scan the value of a TAG-DIRECTIVE token. -// -// Scope: -// -// %TAG !yaml! tag:yaml.org,2002: \n -// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -func yaml_parser_scan_tag_directive_value(parser *yaml_parser_t, start_mark yaml_mark_t, handle, prefix *[]byte) bool { - var handle_value, prefix_value []byte - - // Eat whitespaces. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - - for is_blank(parser.buffer, parser.buffer_pos) { - skip(parser) - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - - // Scan a handle. - if !yaml_parser_scan_tag_handle(parser, true, start_mark, &handle_value) { - return false - } - - // Expect a whitespace. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - if !is_blank(parser.buffer, parser.buffer_pos) { - yaml_parser_set_scanner_error(parser, "while scanning a %TAG directive", - start_mark, "did not find expected whitespace") - return false - } - - // Eat whitespaces. - for is_blank(parser.buffer, parser.buffer_pos) { - skip(parser) - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - - // Scan a prefix. - if !yaml_parser_scan_tag_uri(parser, true, nil, start_mark, &prefix_value) { - return false - } - - // Expect a whitespace or line break. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - if !is_blankz(parser.buffer, parser.buffer_pos) { - yaml_parser_set_scanner_error(parser, "while scanning a %TAG directive", - start_mark, "did not find expected whitespace or line break") - return false - } - - *handle = handle_value - *prefix = prefix_value - return true -} - -func yaml_parser_scan_anchor(parser *yaml_parser_t, token *yaml_token_t, typ yaml_token_type_t) bool { - var s []byte - - // Eat the indicator character. - start_mark := parser.mark - skip(parser) - - // Consume the value. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - - for is_alpha(parser.buffer, parser.buffer_pos) { - s = read(parser, s) - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - - end_mark := parser.mark - - /* - * Check if length of the anchor is greater than 0 and it is followed by - * a whitespace character or one of the indicators: - * - * '?', ':', ',', ']', '}', '%', '@', '`'. - */ - - if len(s) == 0 || - !(is_blankz(parser.buffer, parser.buffer_pos) || parser.buffer[parser.buffer_pos] == '?' || - parser.buffer[parser.buffer_pos] == ':' || parser.buffer[parser.buffer_pos] == ',' || - parser.buffer[parser.buffer_pos] == ']' || parser.buffer[parser.buffer_pos] == '}' || - parser.buffer[parser.buffer_pos] == '%' || parser.buffer[parser.buffer_pos] == '@' || - parser.buffer[parser.buffer_pos] == '`') { - context := "while scanning an alias" - if typ == yaml_ANCHOR_TOKEN { - context = "while scanning an anchor" - } - yaml_parser_set_scanner_error(parser, context, start_mark, - "did not find expected alphabetic or numeric character") - return false - } - - // Create a token. - *token = yaml_token_t{ - typ: typ, - start_mark: start_mark, - end_mark: end_mark, - value: s, - } - - return true -} - -/* - * Scan a TAG token. - */ - -func yaml_parser_scan_tag(parser *yaml_parser_t, token *yaml_token_t) bool { - var handle, suffix []byte - - start_mark := parser.mark - - // Check if the tag is in the canonical form. - if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { - return false - } - - if parser.buffer[parser.buffer_pos+1] == '<' { - // Keep the handle as '' - - // Eat '!<' - skip(parser) - skip(parser) - - // Consume the tag value. - if !yaml_parser_scan_tag_uri(parser, false, nil, start_mark, &suffix) { - return false - } - - // Check for '>' and eat it. - if parser.buffer[parser.buffer_pos] != '>' { - yaml_parser_set_scanner_error(parser, "while scanning a tag", - start_mark, "did not find the expected '>'") - return false - } - - skip(parser) - } else { - // The tag has either the '!suffix' or the '!handle!suffix' form. - - // First, try to scan a handle. - if !yaml_parser_scan_tag_handle(parser, false, start_mark, &handle) { - return false - } - - // Check if it is, indeed, handle. - if handle[0] == '!' && len(handle) > 1 && handle[len(handle)-1] == '!' { - // Scan the suffix now. - if !yaml_parser_scan_tag_uri(parser, false, nil, start_mark, &suffix) { - return false - } - } else { - // It wasn't a handle after all. Scan the rest of the tag. - if !yaml_parser_scan_tag_uri(parser, false, handle, start_mark, &suffix) { - return false - } - - // Set the handle to '!'. - handle = []byte{'!'} - - // A special case: the '!' tag. Set the handle to '' and the - // suffix to '!'. - if len(suffix) == 0 { - handle, suffix = suffix, handle - } - } - } - - // Check the character which ends the tag. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - if !is_blankz(parser.buffer, parser.buffer_pos) { - yaml_parser_set_scanner_error(parser, "while scanning a tag", - start_mark, "did not find expected whitespace or line break") - return false - } - - end_mark := parser.mark - - // Create a token. - *token = yaml_token_t{ - typ: yaml_TAG_TOKEN, - start_mark: start_mark, - end_mark: end_mark, - value: handle, - suffix: suffix, - } - return true -} - -// Scan a tag handle. -func yaml_parser_scan_tag_handle(parser *yaml_parser_t, directive bool, start_mark yaml_mark_t, handle *[]byte) bool { - // Check the initial '!' character. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - if parser.buffer[parser.buffer_pos] != '!' { - yaml_parser_set_scanner_tag_error(parser, directive, - start_mark, "did not find expected '!'") - return false - } - - var s []byte - - // Copy the '!' character. - s = read(parser, s) - - // Copy all subsequent alphabetical and numerical characters. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - for is_alpha(parser.buffer, parser.buffer_pos) { - s = read(parser, s) - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - - // Check if the trailing character is '!' and copy it. - if parser.buffer[parser.buffer_pos] == '!' { - s = read(parser, s) - } else { - // It's either the '!' tag or not really a tag handle. If it's a %TAG - // directive, it's an error. If it's a tag token, it must be a part of URI. - if directive && string(s) != "!" { - yaml_parser_set_scanner_tag_error(parser, directive, - start_mark, "did not find expected '!'") - return false - } - } - - *handle = s - return true -} - -// Scan a tag. -func yaml_parser_scan_tag_uri(parser *yaml_parser_t, directive bool, head []byte, start_mark yaml_mark_t, uri *[]byte) bool { - //size_t length = head ? strlen((char *)head) : 0 - var s []byte - hasTag := len(head) > 0 - - // Copy the head if needed. - // - // Note that we don't copy the leading '!' character. - if len(head) > 1 { - s = append(s, head[1:]...) - } - - // Scan the tag. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - - // The set of characters that may appear in URI is as follows: - // - // '0'-'9', 'A'-'Z', 'a'-'z', '_', '-', ';', '/', '?', ':', '@', '&', - // '=', '+', '$', ',', '.', '!', '~', '*', '\'', '(', ')', '[', ']', - // '%'. - // [Go] TODO Convert this into more reasonable logic. - for is_alpha(parser.buffer, parser.buffer_pos) || parser.buffer[parser.buffer_pos] == ';' || - parser.buffer[parser.buffer_pos] == '/' || parser.buffer[parser.buffer_pos] == '?' || - parser.buffer[parser.buffer_pos] == ':' || parser.buffer[parser.buffer_pos] == '@' || - parser.buffer[parser.buffer_pos] == '&' || parser.buffer[parser.buffer_pos] == '=' || - parser.buffer[parser.buffer_pos] == '+' || parser.buffer[parser.buffer_pos] == '$' || - parser.buffer[parser.buffer_pos] == ',' || parser.buffer[parser.buffer_pos] == '.' || - parser.buffer[parser.buffer_pos] == '!' || parser.buffer[parser.buffer_pos] == '~' || - parser.buffer[parser.buffer_pos] == '*' || parser.buffer[parser.buffer_pos] == '\'' || - parser.buffer[parser.buffer_pos] == '(' || parser.buffer[parser.buffer_pos] == ')' || - parser.buffer[parser.buffer_pos] == '[' || parser.buffer[parser.buffer_pos] == ']' || - parser.buffer[parser.buffer_pos] == '%' { - // Check if it is a URI-escape sequence. - if parser.buffer[parser.buffer_pos] == '%' { - if !yaml_parser_scan_uri_escapes(parser, directive, start_mark, &s) { - return false - } - } else { - s = read(parser, s) - } - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - hasTag = true - } - - if !hasTag { - yaml_parser_set_scanner_tag_error(parser, directive, - start_mark, "did not find expected tag URI") - return false - } - *uri = s - return true -} - -// Decode an URI-escape sequence corresponding to a single UTF-8 character. -func yaml_parser_scan_uri_escapes(parser *yaml_parser_t, directive bool, start_mark yaml_mark_t, s *[]byte) bool { - - // Decode the required number of characters. - w := 1024 - for w > 0 { - // Check for a URI-escaped octet. - if parser.unread < 3 && !yaml_parser_update_buffer(parser, 3) { - return false - } - - if !(parser.buffer[parser.buffer_pos] == '%' && - is_hex(parser.buffer, parser.buffer_pos+1) && - is_hex(parser.buffer, parser.buffer_pos+2)) { - return yaml_parser_set_scanner_tag_error(parser, directive, - start_mark, "did not find URI escaped octet") - } - - // Get the octet. - octet := byte((as_hex(parser.buffer, parser.buffer_pos+1) << 4) + as_hex(parser.buffer, parser.buffer_pos+2)) - - // If it is the leading octet, determine the length of the UTF-8 sequence. - if w == 1024 { - w = width(octet) - if w == 0 { - return yaml_parser_set_scanner_tag_error(parser, directive, - start_mark, "found an incorrect leading UTF-8 octet") - } - } else { - // Check if the trailing octet is correct. - if octet&0xC0 != 0x80 { - return yaml_parser_set_scanner_tag_error(parser, directive, - start_mark, "found an incorrect trailing UTF-8 octet") - } - } - - // Copy the octet and move the pointers. - *s = append(*s, octet) - skip(parser) - skip(parser) - skip(parser) - w-- - } - return true -} - -// Scan a block scalar. -func yaml_parser_scan_block_scalar(parser *yaml_parser_t, token *yaml_token_t, literal bool) bool { - // Eat the indicator '|' or '>'. - start_mark := parser.mark - skip(parser) - - // Scan the additional block scalar indicators. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - - // Check for a chomping indicator. - var chomping, increment int - if parser.buffer[parser.buffer_pos] == '+' || parser.buffer[parser.buffer_pos] == '-' { - // Set the chomping method and eat the indicator. - if parser.buffer[parser.buffer_pos] == '+' { - chomping = +1 - } else { - chomping = -1 - } - skip(parser) - - // Check for an indentation indicator. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - if is_digit(parser.buffer, parser.buffer_pos) { - // Check that the indentation is greater than 0. - if parser.buffer[parser.buffer_pos] == '0' { - yaml_parser_set_scanner_error(parser, "while scanning a block scalar", - start_mark, "found an indentation indicator equal to 0") - return false - } - - // Get the indentation level and eat the indicator. - increment = as_digit(parser.buffer, parser.buffer_pos) - skip(parser) - } - - } else if is_digit(parser.buffer, parser.buffer_pos) { - // Do the same as above, but in the opposite order. - - if parser.buffer[parser.buffer_pos] == '0' { - yaml_parser_set_scanner_error(parser, "while scanning a block scalar", - start_mark, "found an indentation indicator equal to 0") - return false - } - increment = as_digit(parser.buffer, parser.buffer_pos) - skip(parser) - - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - if parser.buffer[parser.buffer_pos] == '+' || parser.buffer[parser.buffer_pos] == '-' { - if parser.buffer[parser.buffer_pos] == '+' { - chomping = +1 - } else { - chomping = -1 - } - skip(parser) - } - } - - // Eat whitespaces and comments to the end of the line. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - for is_blank(parser.buffer, parser.buffer_pos) { - skip(parser) - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - if parser.buffer[parser.buffer_pos] == '#' { - if !yaml_parser_scan_line_comment(parser, start_mark) { - return false - } - for !is_breakz(parser.buffer, parser.buffer_pos) { - skip(parser) - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - } - - // Check if we are at the end of the line. - if !is_breakz(parser.buffer, parser.buffer_pos) { - yaml_parser_set_scanner_error(parser, "while scanning a block scalar", - start_mark, "did not find expected comment or line break") - return false - } - - // Eat a line break. - if is_break(parser.buffer, parser.buffer_pos) { - if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { - return false - } - skip_line(parser) - } - - end_mark := parser.mark - - // Set the indentation level if it was specified. - var indent int - if increment > 0 { - if parser.indent >= 0 { - indent = parser.indent + increment - } else { - indent = increment - } - } - - // Scan the leading line breaks and determine the indentation level if needed. - var s, leading_break, trailing_breaks []byte - if !yaml_parser_scan_block_scalar_breaks(parser, &indent, &trailing_breaks, start_mark, &end_mark) { - return false - } - - // Scan the block scalar content. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - var leading_blank, trailing_blank bool - for parser.mark.column == indent && !is_z(parser.buffer, parser.buffer_pos) { - // We are at the beginning of a non-empty line. - - // Is it a trailing whitespace? - trailing_blank = is_blank(parser.buffer, parser.buffer_pos) - - // Check if we need to fold the leading line break. - if !literal && !leading_blank && !trailing_blank && len(leading_break) > 0 && leading_break[0] == '\n' { - // Do we need to join the lines by space? - if len(trailing_breaks) == 0 { - s = append(s, ' ') - } - } else { - s = append(s, leading_break...) - } - leading_break = leading_break[:0] - - // Append the remaining line breaks. - s = append(s, trailing_breaks...) - trailing_breaks = trailing_breaks[:0] - - // Is it a leading whitespace? - leading_blank = is_blank(parser.buffer, parser.buffer_pos) - - // Consume the current line. - for !is_breakz(parser.buffer, parser.buffer_pos) { - s = read(parser, s) - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - - // Consume the line break. - if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { - return false - } - - leading_break = read_line(parser, leading_break) - - // Eat the following indentation spaces and line breaks. - if !yaml_parser_scan_block_scalar_breaks(parser, &indent, &trailing_breaks, start_mark, &end_mark) { - return false - } - } - - // Chomp the tail. - if chomping != -1 { - s = append(s, leading_break...) - } - if chomping == 1 { - s = append(s, trailing_breaks...) - } - - // Create a token. - *token = yaml_token_t{ - typ: yaml_SCALAR_TOKEN, - start_mark: start_mark, - end_mark: end_mark, - value: s, - style: yaml_LITERAL_SCALAR_STYLE, - } - if !literal { - token.style = yaml_FOLDED_SCALAR_STYLE - } - return true -} - -// Scan indentation spaces and line breaks for a block scalar. Determine the -// indentation level if needed. -func yaml_parser_scan_block_scalar_breaks(parser *yaml_parser_t, indent *int, breaks *[]byte, start_mark yaml_mark_t, end_mark *yaml_mark_t) bool { - *end_mark = parser.mark - - // Eat the indentation spaces and line breaks. - max_indent := 0 - for { - // Eat the indentation spaces. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - for (*indent == 0 || parser.mark.column < *indent) && is_space(parser.buffer, parser.buffer_pos) { - skip(parser) - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - if parser.mark.column > max_indent { - max_indent = parser.mark.column - } - - // Check for a tab character messing the indentation. - if (*indent == 0 || parser.mark.column < *indent) && is_tab(parser.buffer, parser.buffer_pos) { - return yaml_parser_set_scanner_error(parser, "while scanning a block scalar", - start_mark, "found a tab character where an indentation space is expected") - } - - // Have we found a non-empty line? - if !is_break(parser.buffer, parser.buffer_pos) { - break - } - - // Consume the line break. - if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { - return false - } - // [Go] Should really be returning breaks instead. - *breaks = read_line(parser, *breaks) - *end_mark = parser.mark - } - - // Determine the indentation level if needed. - if *indent == 0 { - *indent = max_indent - if *indent < parser.indent+1 { - *indent = parser.indent + 1 - } - if *indent < 1 { - *indent = 1 - } - } - return true -} - -// Scan a quoted scalar. -func yaml_parser_scan_flow_scalar(parser *yaml_parser_t, token *yaml_token_t, single bool) bool { - // Eat the left quote. - start_mark := parser.mark - skip(parser) - - // Consume the content of the quoted scalar. - var s, leading_break, trailing_breaks, whitespaces []byte - for { - // Check that there are no document indicators at the beginning of the line. - if parser.unread < 4 && !yaml_parser_update_buffer(parser, 4) { - return false - } - - if parser.mark.column == 0 && - ((parser.buffer[parser.buffer_pos+0] == '-' && - parser.buffer[parser.buffer_pos+1] == '-' && - parser.buffer[parser.buffer_pos+2] == '-') || - (parser.buffer[parser.buffer_pos+0] == '.' && - parser.buffer[parser.buffer_pos+1] == '.' && - parser.buffer[parser.buffer_pos+2] == '.')) && - is_blankz(parser.buffer, parser.buffer_pos+3) { - yaml_parser_set_scanner_error(parser, "while scanning a quoted scalar", - start_mark, "found unexpected document indicator") - return false - } - - // Check for EOF. - if is_z(parser.buffer, parser.buffer_pos) { - yaml_parser_set_scanner_error(parser, "while scanning a quoted scalar", - start_mark, "found unexpected end of stream") - return false - } - - // Consume non-blank characters. - leading_blanks := false - for !is_blankz(parser.buffer, parser.buffer_pos) { - if single && parser.buffer[parser.buffer_pos] == '\'' && parser.buffer[parser.buffer_pos+1] == '\'' { - // Is is an escaped single quote. - s = append(s, '\'') - skip(parser) - skip(parser) - - } else if single && parser.buffer[parser.buffer_pos] == '\'' { - // It is a right single quote. - break - } else if !single && parser.buffer[parser.buffer_pos] == '"' { - // It is a right double quote. - break - - } else if !single && parser.buffer[parser.buffer_pos] == '\\' && is_break(parser.buffer, parser.buffer_pos+1) { - // It is an escaped line break. - if parser.unread < 3 && !yaml_parser_update_buffer(parser, 3) { - return false - } - skip(parser) - skip_line(parser) - leading_blanks = true - break - - } else if !single && parser.buffer[parser.buffer_pos] == '\\' { - // It is an escape sequence. - code_length := 0 - - // Check the escape character. - switch parser.buffer[parser.buffer_pos+1] { - case '0': - s = append(s, 0) - case 'a': - s = append(s, '\x07') - case 'b': - s = append(s, '\x08') - case 't', '\t': - s = append(s, '\x09') - case 'n': - s = append(s, '\x0A') - case 'v': - s = append(s, '\x0B') - case 'f': - s = append(s, '\x0C') - case 'r': - s = append(s, '\x0D') - case 'e': - s = append(s, '\x1B') - case ' ': - s = append(s, '\x20') - case '"': - s = append(s, '"') - case '\'': - s = append(s, '\'') - case '\\': - s = append(s, '\\') - case 'N': // NEL (#x85) - s = append(s, '\xC2') - s = append(s, '\x85') - case '_': // #xA0 - s = append(s, '\xC2') - s = append(s, '\xA0') - case 'L': // LS (#x2028) - s = append(s, '\xE2') - s = append(s, '\x80') - s = append(s, '\xA8') - case 'P': // PS (#x2029) - s = append(s, '\xE2') - s = append(s, '\x80') - s = append(s, '\xA9') - case 'x': - code_length = 2 - case 'u': - code_length = 4 - case 'U': - code_length = 8 - default: - yaml_parser_set_scanner_error(parser, "while parsing a quoted scalar", - start_mark, "found unknown escape character") - return false - } - - skip(parser) - skip(parser) - - // Consume an arbitrary escape code. - if code_length > 0 { - var value int - - // Scan the character value. - if parser.unread < code_length && !yaml_parser_update_buffer(parser, code_length) { - return false - } - for k := 0; k < code_length; k++ { - if !is_hex(parser.buffer, parser.buffer_pos+k) { - yaml_parser_set_scanner_error(parser, "while parsing a quoted scalar", - start_mark, "did not find expected hexdecimal number") - return false - } - value = (value << 4) + as_hex(parser.buffer, parser.buffer_pos+k) - } - - // Check the value and write the character. - if (value >= 0xD800 && value <= 0xDFFF) || value > 0x10FFFF { - yaml_parser_set_scanner_error(parser, "while parsing a quoted scalar", - start_mark, "found invalid Unicode character escape code") - return false - } - if value <= 0x7F { - s = append(s, byte(value)) - } else if value <= 0x7FF { - s = append(s, byte(0xC0+(value>>6))) - s = append(s, byte(0x80+(value&0x3F))) - } else if value <= 0xFFFF { - s = append(s, byte(0xE0+(value>>12))) - s = append(s, byte(0x80+((value>>6)&0x3F))) - s = append(s, byte(0x80+(value&0x3F))) - } else { - s = append(s, byte(0xF0+(value>>18))) - s = append(s, byte(0x80+((value>>12)&0x3F))) - s = append(s, byte(0x80+((value>>6)&0x3F))) - s = append(s, byte(0x80+(value&0x3F))) - } - - // Advance the pointer. - for k := 0; k < code_length; k++ { - skip(parser) - } - } - } else { - // It is a non-escaped non-blank character. - s = read(parser, s) - } - if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { - return false - } - } - - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - - // Check if we are at the end of the scalar. - if single { - if parser.buffer[parser.buffer_pos] == '\'' { - break - } - } else { - if parser.buffer[parser.buffer_pos] == '"' { - break - } - } - - // Consume blank characters. - for is_blank(parser.buffer, parser.buffer_pos) || is_break(parser.buffer, parser.buffer_pos) { - if is_blank(parser.buffer, parser.buffer_pos) { - // Consume a space or a tab character. - if !leading_blanks { - whitespaces = read(parser, whitespaces) - } else { - skip(parser) - } - } else { - if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { - return false - } - - // Check if it is a first line break. - if !leading_blanks { - whitespaces = whitespaces[:0] - leading_break = read_line(parser, leading_break) - leading_blanks = true - } else { - trailing_breaks = read_line(parser, trailing_breaks) - } - } - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - - // Join the whitespaces or fold line breaks. - if leading_blanks { - // Do we need to fold line breaks? - if len(leading_break) > 0 && leading_break[0] == '\n' { - if len(trailing_breaks) == 0 { - s = append(s, ' ') - } else { - s = append(s, trailing_breaks...) - } - } else { - s = append(s, leading_break...) - s = append(s, trailing_breaks...) - } - trailing_breaks = trailing_breaks[:0] - leading_break = leading_break[:0] - } else { - s = append(s, whitespaces...) - whitespaces = whitespaces[:0] - } - } - - // Eat the right quote. - skip(parser) - end_mark := parser.mark - - // Create a token. - *token = yaml_token_t{ - typ: yaml_SCALAR_TOKEN, - start_mark: start_mark, - end_mark: end_mark, - value: s, - style: yaml_SINGLE_QUOTED_SCALAR_STYLE, - } - if !single { - token.style = yaml_DOUBLE_QUOTED_SCALAR_STYLE - } - return true -} - -// Scan a plain scalar. -func yaml_parser_scan_plain_scalar(parser *yaml_parser_t, token *yaml_token_t) bool { - - var s, leading_break, trailing_breaks, whitespaces []byte - var leading_blanks bool - var indent = parser.indent + 1 - - start_mark := parser.mark - end_mark := parser.mark - - // Consume the content of the plain scalar. - for { - // Check for a document indicator. - if parser.unread < 4 && !yaml_parser_update_buffer(parser, 4) { - return false - } - if parser.mark.column == 0 && - ((parser.buffer[parser.buffer_pos+0] == '-' && - parser.buffer[parser.buffer_pos+1] == '-' && - parser.buffer[parser.buffer_pos+2] == '-') || - (parser.buffer[parser.buffer_pos+0] == '.' && - parser.buffer[parser.buffer_pos+1] == '.' && - parser.buffer[parser.buffer_pos+2] == '.')) && - is_blankz(parser.buffer, parser.buffer_pos+3) { - break - } - - // Check for a comment. - if parser.buffer[parser.buffer_pos] == '#' { - break - } - - // Consume non-blank characters. - for !is_blankz(parser.buffer, parser.buffer_pos) { - - // Check for indicators that may end a plain scalar. - if (parser.buffer[parser.buffer_pos] == ':' && is_blankz(parser.buffer, parser.buffer_pos+1)) || - (parser.flow_level > 0 && - (parser.buffer[parser.buffer_pos] == ',' || - parser.buffer[parser.buffer_pos] == '[' || - parser.buffer[parser.buffer_pos] == ']' || parser.buffer[parser.buffer_pos] == '{' || - parser.buffer[parser.buffer_pos] == '}')) { - break - } - - // Check if we need to join whitespaces and breaks. - if leading_blanks || len(whitespaces) > 0 { - if leading_blanks { - // Do we need to fold line breaks? - if leading_break[0] == '\n' { - if len(trailing_breaks) == 0 { - s = append(s, ' ') - } else { - s = append(s, trailing_breaks...) - } - } else { - s = append(s, leading_break...) - s = append(s, trailing_breaks...) - } - trailing_breaks = trailing_breaks[:0] - leading_break = leading_break[:0] - leading_blanks = false - } else { - s = append(s, whitespaces...) - whitespaces = whitespaces[:0] - } - } - - // Copy the character. - s = read(parser, s) - - end_mark = parser.mark - if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { - return false - } - } - - // Is it the end? - if !(is_blank(parser.buffer, parser.buffer_pos) || is_break(parser.buffer, parser.buffer_pos)) { - break - } - - // Consume blank characters. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - - for is_blank(parser.buffer, parser.buffer_pos) || is_break(parser.buffer, parser.buffer_pos) { - if is_blank(parser.buffer, parser.buffer_pos) { - - // Check for tab characters that abuse indentation. - if leading_blanks && parser.mark.column < indent && is_tab(parser.buffer, parser.buffer_pos) { - yaml_parser_set_scanner_error(parser, "while scanning a plain scalar", - start_mark, "found a tab character that violates indentation") - return false - } - - // Consume a space or a tab character. - if !leading_blanks { - whitespaces = read(parser, whitespaces) - } else { - skip(parser) - } - } else { - if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { - return false - } - - // Check if it is a first line break. - if !leading_blanks { - whitespaces = whitespaces[:0] - leading_break = read_line(parser, leading_break) - leading_blanks = true - } else { - trailing_breaks = read_line(parser, trailing_breaks) - } - } - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - - // Check indentation level. - if parser.flow_level == 0 && parser.mark.column < indent { - break - } - } - - // Create a token. - *token = yaml_token_t{ - typ: yaml_SCALAR_TOKEN, - start_mark: start_mark, - end_mark: end_mark, - value: s, - style: yaml_PLAIN_SCALAR_STYLE, - } - - // Note that we change the 'simple_key_allowed' flag. - if leading_blanks { - parser.simple_key_allowed = true - } - return true -} - -func yaml_parser_scan_line_comment(parser *yaml_parser_t, token_mark yaml_mark_t) bool { - if parser.newlines > 0 { - return true - } - - var start_mark yaml_mark_t - var text []byte - - for peek := 0; peek < 512; peek++ { - if parser.unread < peek+1 && !yaml_parser_update_buffer(parser, peek+1) { - break - } - if is_blank(parser.buffer, parser.buffer_pos+peek) { - continue - } - if parser.buffer[parser.buffer_pos+peek] == '#' { - seen := parser.mark.index + peek - for { - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - if is_breakz(parser.buffer, parser.buffer_pos) { - if parser.mark.index >= seen { - break - } - if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { - return false - } - skip_line(parser) - } else if parser.mark.index >= seen { - if len(text) == 0 { - start_mark = parser.mark - } - text = read(parser, text) - } else { - skip(parser) - } - } - } - break - } - if len(text) > 0 { - parser.comments = append(parser.comments, yaml_comment_t{ - token_mark: token_mark, - start_mark: start_mark, - line: text, - }) - } - return true -} - -func yaml_parser_scan_comments(parser *yaml_parser_t, scan_mark yaml_mark_t) bool { - token := parser.tokens[len(parser.tokens)-1] - - if token.typ == yaml_FLOW_ENTRY_TOKEN && len(parser.tokens) > 1 { - token = parser.tokens[len(parser.tokens)-2] - } - - var token_mark = token.start_mark - var start_mark yaml_mark_t - var next_indent = parser.indent - if next_indent < 0 { - next_indent = 0 - } - - var recent_empty = false - var first_empty = parser.newlines <= 1 - - var line = parser.mark.line - var column = parser.mark.column - - var text []byte - - // The foot line is the place where a comment must start to - // still be considered as a foot of the prior content. - // If there's some content in the currently parsed line, then - // the foot is the line below it. - var foot_line = -1 - if scan_mark.line > 0 { - foot_line = parser.mark.line - parser.newlines + 1 - if parser.newlines == 0 && parser.mark.column > 1 { - foot_line++ - } - } - - var peek = 0 - for ; peek < 512; peek++ { - if parser.unread < peek+1 && !yaml_parser_update_buffer(parser, peek+1) { - break - } - column++ - if is_blank(parser.buffer, parser.buffer_pos+peek) { - continue - } - c := parser.buffer[parser.buffer_pos+peek] - var close_flow = parser.flow_level > 0 && (c == ']' || c == '}') - if close_flow || is_breakz(parser.buffer, parser.buffer_pos+peek) { - // Got line break or terminator. - if close_flow || !recent_empty { - if close_flow || first_empty && (start_mark.line == foot_line && token.typ != yaml_VALUE_TOKEN || start_mark.column-1 < next_indent) { - // This is the first empty line and there were no empty lines before, - // so this initial part of the comment is a foot of the prior token - // instead of being a head for the following one. Split it up. - // Alternatively, this might also be the last comment inside a flow - // scope, so it must be a footer. - if len(text) > 0 { - if start_mark.column-1 < next_indent { - // If dedented it's unrelated to the prior token. - token_mark = start_mark - } - parser.comments = append(parser.comments, yaml_comment_t{ - scan_mark: scan_mark, - token_mark: token_mark, - start_mark: start_mark, - end_mark: yaml_mark_t{parser.mark.index + peek, line, column}, - foot: text, - }) - scan_mark = yaml_mark_t{parser.mark.index + peek, line, column} - token_mark = scan_mark - text = nil - } - } else { - if len(text) > 0 && parser.buffer[parser.buffer_pos+peek] != 0 { - text = append(text, '\n') - } - } - } - if !is_break(parser.buffer, parser.buffer_pos+peek) { - break - } - first_empty = false - recent_empty = true - column = 0 - line++ - continue - } - - if len(text) > 0 && (close_flow || column-1 < next_indent && column != start_mark.column) { - // The comment at the different indentation is a foot of the - // preceding data rather than a head of the upcoming one. - parser.comments = append(parser.comments, yaml_comment_t{ - scan_mark: scan_mark, - token_mark: token_mark, - start_mark: start_mark, - end_mark: yaml_mark_t{parser.mark.index + peek, line, column}, - foot: text, - }) - scan_mark = yaml_mark_t{parser.mark.index + peek, line, column} - token_mark = scan_mark - text = nil - } - - if parser.buffer[parser.buffer_pos+peek] != '#' { - break - } - - if len(text) == 0 { - start_mark = yaml_mark_t{parser.mark.index + peek, line, column} - } else { - text = append(text, '\n') - } - - recent_empty = false - - // Consume until after the consumed comment line. - seen := parser.mark.index + peek - for { - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - if is_breakz(parser.buffer, parser.buffer_pos) { - if parser.mark.index >= seen { - break - } - if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { - return false - } - skip_line(parser) - } else if parser.mark.index >= seen { - text = read(parser, text) - } else { - skip(parser) - } - } - - peek = 0 - column = 0 - line = parser.mark.line - next_indent = parser.indent - if next_indent < 0 { - next_indent = 0 - } - } - - if len(text) > 0 { - parser.comments = append(parser.comments, yaml_comment_t{ - scan_mark: scan_mark, - token_mark: start_mark, - start_mark: start_mark, - end_mark: yaml_mark_t{parser.mark.index + peek - 1, line, column}, - head: text, - }) - } - return true -} diff --git a/cli/internal/yaml/sorter.go b/cli/internal/yaml/sorter.go deleted file mode 100644 index 9210ece7e9723..0000000000000 --- a/cli/internal/yaml/sorter.go +++ /dev/null @@ -1,134 +0,0 @@ -// -// Copyright (c) 2011-2019 Canonical Ltd -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package yaml - -import ( - "reflect" - "unicode" -) - -type keyList []reflect.Value - -func (l keyList) Len() int { return len(l) } -func (l keyList) Swap(i, j int) { l[i], l[j] = l[j], l[i] } -func (l keyList) Less(i, j int) bool { - a := l[i] - b := l[j] - ak := a.Kind() - bk := b.Kind() - for (ak == reflect.Interface || ak == reflect.Ptr) && !a.IsNil() { - a = a.Elem() - ak = a.Kind() - } - for (bk == reflect.Interface || bk == reflect.Ptr) && !b.IsNil() { - b = b.Elem() - bk = b.Kind() - } - af, aok := keyFloat(a) - bf, bok := keyFloat(b) - if aok && bok { - if af != bf { - return af < bf - } - if ak != bk { - return ak < bk - } - return numLess(a, b) - } - if ak != reflect.String || bk != reflect.String { - return ak < bk - } - ar, br := []rune(a.String()), []rune(b.String()) - digits := false - for i := 0; i < len(ar) && i < len(br); i++ { - if ar[i] == br[i] { - digits = unicode.IsDigit(ar[i]) - continue - } - al := unicode.IsLetter(ar[i]) - bl := unicode.IsLetter(br[i]) - if al && bl { - return ar[i] < br[i] - } - if al || bl { - if digits { - return al - } else { - return bl - } - } - var ai, bi int - var an, bn int64 - if ar[i] == '0' || br[i] == '0' { - for j := i - 1; j >= 0 && unicode.IsDigit(ar[j]); j-- { - if ar[j] != '0' { - an = 1 - bn = 1 - break - } - } - } - for ai = i; ai < len(ar) && unicode.IsDigit(ar[ai]); ai++ { - an = an*10 + int64(ar[ai]-'0') - } - for bi = i; bi < len(br) && unicode.IsDigit(br[bi]); bi++ { - bn = bn*10 + int64(br[bi]-'0') - } - if an != bn { - return an < bn - } - if ai != bi { - return ai < bi - } - return ar[i] < br[i] - } - return len(ar) < len(br) -} - -// keyFloat returns a float value for v if it is a number/bool -// and whether it is a number/bool or not. -func keyFloat(v reflect.Value) (f float64, ok bool) { - switch v.Kind() { - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: - return float64(v.Int()), true - case reflect.Float32, reflect.Float64: - return v.Float(), true - case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: - return float64(v.Uint()), true - case reflect.Bool: - if v.Bool() { - return 1, true - } - return 0, true - } - return 0, false -} - -// numLess returns whether a < b. -// a and b must necessarily have the same kind. -func numLess(a, b reflect.Value) bool { - switch a.Kind() { - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: - return a.Int() < b.Int() - case reflect.Float32, reflect.Float64: - return a.Float() < b.Float() - case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: - return a.Uint() < b.Uint() - case reflect.Bool: - return !a.Bool() && b.Bool() - } - panic("not a number") -} diff --git a/cli/internal/yaml/writerc.go b/cli/internal/yaml/writerc.go deleted file mode 100644 index 266d0b092c031..0000000000000 --- a/cli/internal/yaml/writerc.go +++ /dev/null @@ -1,48 +0,0 @@ -// -// Copyright (c) 2011-2019 Canonical Ltd -// Copyright (c) 2006-2010 Kirill Simonov -// -// Permission is hereby granted, free of charge, to any person obtaining a copy of -// this software and associated documentation files (the "Software"), to deal in -// the Software without restriction, including without limitation the rights to -// use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -// of the Software, and to permit persons to whom the Software is furnished to do -// so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in all -// copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -// SOFTWARE. - -package yaml - -// Set the writer error and return false. -func yaml_emitter_set_writer_error(emitter *yaml_emitter_t, problem string) bool { - emitter.error = yaml_WRITER_ERROR - emitter.problem = problem - return false -} - -// Flush the output buffer. -func yaml_emitter_flush(emitter *yaml_emitter_t) bool { - if emitter.write_handler == nil { - panic("write handler not set") - } - - // Check if the buffer is empty. - if emitter.buffer_pos == 0 { - return true - } - - if err := emitter.write_handler(emitter, emitter.buffer[:emitter.buffer_pos]); err != nil { - return yaml_emitter_set_writer_error(emitter, "write error: "+err.Error()) - } - emitter.buffer_pos = 0 - return true -} diff --git a/cli/internal/yaml/yaml.go b/cli/internal/yaml/yaml.go deleted file mode 100644 index f0bedf3d63c96..0000000000000 --- a/cli/internal/yaml/yaml.go +++ /dev/null @@ -1,693 +0,0 @@ -// -// Copyright (c) 2011-2019 Canonical Ltd -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Package yaml implements YAML support for the Go language. -// -// Source code and other details for the project are available at GitHub: -// -// https://github.com/go-yaml/yaml -package yaml - -import ( - "errors" - "fmt" - "io" - "reflect" - "strings" - "sync" - "unicode/utf8" -) - -// The Unmarshaler interface may be implemented by types to customize their -// behavior when being unmarshaled from a YAML document. -type Unmarshaler interface { - UnmarshalYAML(value *Node) error -} - -type obsoleteUnmarshaler interface { - UnmarshalYAML(unmarshal func(interface{}) error) error -} - -// The Marshaler interface may be implemented by types to customize their -// behavior when being marshaled into a YAML document. The returned value -// is marshaled in place of the original value implementing Marshaler. -// -// If an error is returned by MarshalYAML, the marshaling procedure stops -// and returns with the provided error. -type Marshaler interface { - MarshalYAML() (interface{}, error) -} - -// Unmarshal decodes the first document found within the in byte slice -// and assigns decoded values into the out value. -// -// Maps and pointers (to a struct, string, int, etc) are accepted as out -// values. If an internal pointer within a struct is not initialized, -// the yaml package will initialize it if necessary for unmarshalling -// the provided data. The out parameter must not be nil. -// -// The type of the decoded values should be compatible with the respective -// values in out. If one or more values cannot be decoded due to a type -// mismatches, decoding continues partially until the end of the YAML -// content, and a *yaml.TypeError is returned with details for all -// missed values. -// -// Struct fields are only unmarshalled if they are exported (have an -// upper case first letter), and are unmarshalled using the field name -// lowercased as the default key. Custom keys may be defined via the -// "yaml" name in the field tag: the content preceding the first comma -// is used as the key, and the following comma-separated options are -// used to tweak the marshalling process (see Marshal). -// Conflicting names result in a runtime error. -// -// For example: -// -// type T struct { -// F int `yaml:"a,omitempty"` -// B int -// } -// var t T -// yaml.Unmarshal([]byte("a: 1\nb: 2"), &t) -// -// See the documentation of Marshal for the format of tags and a list of -// supported tag options. -func Unmarshal(in []byte, out interface{}) (err error) { - return unmarshal(in, out, false) -} - -// A Decoder reads and decodes YAML values from an input stream. -type Decoder struct { - parser *parser - knownFields bool -} - -// NewDecoder returns a new decoder that reads from r. -// -// The decoder introduces its own buffering and may read -// data from r beyond the YAML values requested. -func NewDecoder(r io.Reader) *Decoder { - return &Decoder{ - parser: newParserFromReader(r), - } -} - -// KnownFields ensures that the keys in decoded mappings to -// exist as fields in the struct being decoded into. -func (dec *Decoder) KnownFields(enable bool) { - dec.knownFields = enable -} - -// Decode reads the next YAML-encoded value from its input -// and stores it in the value pointed to by v. -// -// See the documentation for Unmarshal for details about the -// conversion of YAML into a Go value. -func (dec *Decoder) Decode(v interface{}) (err error) { - d := newDecoder() - d.knownFields = dec.knownFields - defer handleErr(&err) - node := dec.parser.parse() - if node == nil { - return io.EOF - } - out := reflect.ValueOf(v) - if out.Kind() == reflect.Ptr && !out.IsNil() { - out = out.Elem() - } - d.unmarshal(node, out) - if len(d.terrors) > 0 { - return &TypeError{d.terrors} - } - return nil -} - -// Decode decodes the node and stores its data into the value pointed to by v. -// -// See the documentation for Unmarshal for details about the -// conversion of YAML into a Go value. -func (n *Node) Decode(v interface{}) (err error) { - d := newDecoder() - defer handleErr(&err) - out := reflect.ValueOf(v) - if out.Kind() == reflect.Ptr && !out.IsNil() { - out = out.Elem() - } - d.unmarshal(n, out) - if len(d.terrors) > 0 { - return &TypeError{d.terrors} - } - return nil -} - -func unmarshal(in []byte, out interface{}, strict bool) (err error) { - defer handleErr(&err) - d := newDecoder() - p := newParser(in) - defer p.destroy() - node := p.parse() - if node != nil { - v := reflect.ValueOf(out) - if v.Kind() == reflect.Ptr && !v.IsNil() { - v = v.Elem() - } - d.unmarshal(node, v) - } - if len(d.terrors) > 0 { - return &TypeError{d.terrors} - } - return nil -} - -// Marshal serializes the value provided into a YAML document. The structure -// of the generated document will reflect the structure of the value itself. -// Maps and pointers (to struct, string, int, etc) are accepted as the in value. -// -// Struct fields are only marshalled if they are exported (have an upper case -// first letter), and are marshalled using the field name lowercased as the -// default key. Custom keys may be defined via the "yaml" name in the field -// tag: the content preceding the first comma is used as the key, and the -// following comma-separated options are used to tweak the marshalling process. -// Conflicting names result in a runtime error. -// -// The field tag format accepted is: -// -// `(...) yaml:"[][,[,]]" (...)` -// -// The following flags are currently supported: -// -// omitempty Only include the field if it's not set to the zero -// value for the type or to empty slices or maps. -// Zero valued structs will be omitted if all their public -// fields are zero, unless they implement an IsZero -// method (see the IsZeroer interface type), in which -// case the field will be excluded if IsZero returns true. -// -// flow Marshal using a flow style (useful for structs, -// sequences and maps). -// -// inline Inline the field, which must be a struct or a map, -// causing all of its fields or keys to be processed as if -// they were part of the outer struct. For maps, keys must -// not conflict with the yaml keys of other struct fields. -// -// In addition, if the key is "-", the field is ignored. -// -// For example: -// -// type T struct { -// F int `yaml:"a,omitempty"` -// B int -// } -// yaml.Marshal(&T{B: 2}) // Returns "b: 2\n" -// yaml.Marshal(&T{F: 1}} // Returns "a: 1\nb: 0\n" -func Marshal(in interface{}) (out []byte, err error) { - defer handleErr(&err) - e := newEncoder() - defer e.destroy() - e.marshalDoc("", reflect.ValueOf(in)) - e.finish() - out = e.out - return -} - -// An Encoder writes YAML values to an output stream. -type Encoder struct { - encoder *encoder -} - -// NewEncoder returns a new encoder that writes to w. -// The Encoder should be closed after use to flush all data -// to w. -func NewEncoder(w io.Writer) *Encoder { - return &Encoder{ - encoder: newEncoderWithWriter(w), - } -} - -// Encode writes the YAML encoding of v to the stream. -// If multiple items are encoded to the stream, the -// second and subsequent document will be preceded -// with a "---" document separator, but the first will not. -// -// See the documentation for Marshal for details about the conversion of Go -// values to YAML. -func (e *Encoder) Encode(v interface{}) (err error) { - defer handleErr(&err) - e.encoder.marshalDoc("", reflect.ValueOf(v)) - return nil -} - -// Encode encodes value v and stores its representation in n. -// -// See the documentation for Marshal for details about the -// conversion of Go values into YAML. -func (n *Node) Encode(v interface{}) (err error) { - defer handleErr(&err) - e := newEncoder() - defer e.destroy() - e.marshalDoc("", reflect.ValueOf(v)) - e.finish() - p := newParser(e.out) - p.textless = true - defer p.destroy() - doc := p.parse() - *n = *doc.Content[0] - return nil -} - -// SetIndent changes the used indentation used when encoding. -func (e *Encoder) SetIndent(spaces int) { - if spaces < 0 { - panic("yaml: cannot indent to a negative number of spaces") - } - e.encoder.indent = spaces -} - -// Close closes the encoder by writing any remaining data. -// It does not write a stream terminating string "...". -func (e *Encoder) Close() (err error) { - defer handleErr(&err) - e.encoder.finish() - return nil -} - -func handleErr(err *error) { - if v := recover(); v != nil { - if e, ok := v.(yamlError); ok { - *err = e.err - } else { - panic(v) - } - } -} - -type yamlError struct { - err error -} - -func fail(err error) { - panic(yamlError{err}) -} - -func failf(format string, args ...interface{}) { - panic(yamlError{fmt.Errorf("yaml: "+format, args...)}) -} - -// A TypeError is returned by Unmarshal when one or more fields in -// the YAML document cannot be properly decoded into the requested -// types. When this error is returned, the value is still -// unmarshaled partially. -type TypeError struct { - Errors []string -} - -func (e *TypeError) Error() string { - return fmt.Sprintf("yaml: unmarshal errors:\n %s", strings.Join(e.Errors, "\n ")) -} - -type Kind uint32 - -const ( - DocumentNode Kind = 1 << iota - SequenceNode - MappingNode - ScalarNode - AliasNode -) - -type Style uint32 - -const ( - TaggedStyle Style = 1 << iota - DoubleQuotedStyle - SingleQuotedStyle - LiteralStyle - FoldedStyle - FlowStyle -) - -// Node represents an element in the YAML document hierarchy. While documents -// are typically encoded and decoded into higher level types, such as structs -// and maps, Node is an intermediate representation that allows detailed -// control over the content being decoded or encoded. -// -// It's worth noting that although Node offers access into details such as -// line numbers, colums, and comments, the content when re-encoded will not -// have its original textual representation preserved. An effort is made to -// render the data plesantly, and to preserve comments near the data they -// describe, though. -// -// Values that make use of the Node type interact with the yaml package in the -// same way any other type would do, by encoding and decoding yaml data -// directly or indirectly into them. -// -// For example: -// -// var person struct { -// Name string -// Address yaml.Node -// } -// err := yaml.Unmarshal(data, &person) -// -// Or by itself: -// -// var person Node -// err := yaml.Unmarshal(data, &person) -type Node struct { - // Kind defines whether the node is a document, a mapping, a sequence, - // a scalar value, or an alias to another node. The specific data type of - // scalar nodes may be obtained via the ShortTag and LongTag methods. - Kind Kind - - // Style allows customizing the apperance of the node in the tree. - Style Style - - // Tag holds the YAML tag defining the data type for the value. - // When decoding, this field will always be set to the resolved tag, - // even when it wasn't explicitly provided in the YAML content. - // When encoding, if this field is unset the value type will be - // implied from the node properties, and if it is set, it will only - // be serialized into the representation if TaggedStyle is used or - // the implicit tag diverges from the provided one. - Tag string - - // Value holds the unescaped and unquoted represenation of the value. - Value string - - // Anchor holds the anchor name for this node, which allows aliases to point to it. - Anchor string - - // Alias holds the node that this alias points to. Only valid when Kind is AliasNode. - Alias *Node - - // Content holds contained nodes for documents, mappings, and sequences. - Content []*Node - - // HeadComment holds any comments in the lines preceding the node and - // not separated by an empty line. - HeadComment string - - // LineComment holds any comments at the end of the line where the node is in. - LineComment string - - // FootComment holds any comments following the node and before empty lines. - FootComment string - - // Line and Column hold the node position in the decoded YAML text. - // These fields are not respected when encoding the node. - Line int - Column int -} - -// IsZero returns whether the node has all of its fields unset. -func (n *Node) IsZero() bool { - return n.Kind == 0 && n.Style == 0 && n.Tag == "" && n.Value == "" && n.Anchor == "" && n.Alias == nil && n.Content == nil && - n.HeadComment == "" && n.LineComment == "" && n.FootComment == "" && n.Line == 0 && n.Column == 0 -} - -// LongTag returns the long form of the tag that indicates the data type for -// the node. If the Tag field isn't explicitly defined, one will be computed -// based on the node properties. -func (n *Node) LongTag() string { - return longTag(n.ShortTag()) -} - -// ShortTag returns the short form of the YAML tag that indicates data type for -// the node. If the Tag field isn't explicitly defined, one will be computed -// based on the node properties. -func (n *Node) ShortTag() string { - if n.indicatedString() { - return strTag - } - if n.Tag == "" || n.Tag == "!" { - switch n.Kind { - case MappingNode: - return mapTag - case SequenceNode: - return seqTag - case AliasNode: - if n.Alias != nil { - return n.Alias.ShortTag() - } - case ScalarNode: - tag, _ := resolve("", n.Value) - return tag - case 0: - // Special case to make the zero value convenient. - if n.IsZero() { - return nullTag - } - } - return "" - } - return shortTag(n.Tag) -} - -func (n *Node) indicatedString() bool { - return n.Kind == ScalarNode && - (shortTag(n.Tag) == strTag || - (n.Tag == "" || n.Tag == "!") && n.Style&(SingleQuotedStyle|DoubleQuotedStyle|LiteralStyle|FoldedStyle) != 0) -} - -// SetString is a convenience function that sets the node to a string value -// and defines its style in a pleasant way depending on its content. -func (n *Node) SetString(s string) { - n.Kind = ScalarNode - if utf8.ValidString(s) { - n.Value = s - n.Tag = strTag - } else { - n.Value = encodeBase64(s) - n.Tag = binaryTag - } - if strings.Contains(n.Value, "\n") { - n.Style = LiteralStyle - } -} - -// -------------------------------------------------------------------------- -// Maintain a mapping of keys to structure field indexes - -// The code in this section was copied from mgo/bson. - -// structInfo holds details for the serialization of fields of -// a given struct. -type structInfo struct { - FieldsMap map[string]fieldInfo - FieldsList []fieldInfo - - // InlineMap is the number of the field in the struct that - // contains an ,inline map, or -1 if there's none. - InlineMap int - - // InlineUnmarshalers holds indexes to inlined fields that - // contain unmarshaler values. - InlineUnmarshalers [][]int -} - -type fieldInfo struct { - Key string - Num int - OmitEmpty bool - Flow bool - // Id holds the unique field identifier, so we can cheaply - // check for field duplicates without maintaining an extra map. - Id int - - // Inline holds the field index if the field is part of an inlined struct. - Inline []int -} - -var structMap = make(map[reflect.Type]*structInfo) -var fieldMapMutex sync.RWMutex -var unmarshalerType reflect.Type - -func init() { - var v Unmarshaler - unmarshalerType = reflect.ValueOf(&v).Elem().Type() -} - -func getStructInfo(st reflect.Type) (*structInfo, error) { - fieldMapMutex.RLock() - sinfo, found := structMap[st] - fieldMapMutex.RUnlock() - if found { - return sinfo, nil - } - - n := st.NumField() - fieldsMap := make(map[string]fieldInfo) - fieldsList := make([]fieldInfo, 0, n) - inlineMap := -1 - inlineUnmarshalers := [][]int(nil) - for i := 0; i != n; i++ { - field := st.Field(i) - if field.PkgPath != "" && !field.Anonymous { - continue // Private field - } - - info := fieldInfo{Num: i} - - tag := field.Tag.Get("yaml") - if tag == "" && strings.Index(string(field.Tag), ":") < 0 { - tag = string(field.Tag) - } - if tag == "-" { - continue - } - - inline := false - fields := strings.Split(tag, ",") - if len(fields) > 1 { - for _, flag := range fields[1:] { - switch flag { - case "omitempty": - info.OmitEmpty = true - case "flow": - info.Flow = true - case "inline": - inline = true - default: - return nil, errors.New(fmt.Sprintf("unsupported flag %q in tag %q of type %s", flag, tag, st)) - } - } - tag = fields[0] - } - - if inline { - switch field.Type.Kind() { - case reflect.Map: - if inlineMap >= 0 { - return nil, errors.New("multiple ,inline maps in struct " + st.String()) - } - if field.Type.Key() != reflect.TypeOf("") { - return nil, errors.New("option ,inline needs a map with string keys in struct " + st.String()) - } - inlineMap = info.Num - case reflect.Struct, reflect.Ptr: - ftype := field.Type - for ftype.Kind() == reflect.Ptr { - ftype = ftype.Elem() - } - if ftype.Kind() != reflect.Struct { - return nil, errors.New("option ,inline may only be used on a struct or map field") - } - if reflect.PtrTo(ftype).Implements(unmarshalerType) { - inlineUnmarshalers = append(inlineUnmarshalers, []int{i}) - } else { - sinfo, err := getStructInfo(ftype) - if err != nil { - return nil, err - } - for _, index := range sinfo.InlineUnmarshalers { - inlineUnmarshalers = append(inlineUnmarshalers, append([]int{i}, index...)) - } - for _, finfo := range sinfo.FieldsList { - if _, found := fieldsMap[finfo.Key]; found { - msg := "duplicated key '" + finfo.Key + "' in struct " + st.String() - return nil, errors.New(msg) - } - if finfo.Inline == nil { - finfo.Inline = []int{i, finfo.Num} - } else { - finfo.Inline = append([]int{i}, finfo.Inline...) - } - finfo.Id = len(fieldsList) - fieldsMap[finfo.Key] = finfo - fieldsList = append(fieldsList, finfo) - } - } - default: - return nil, errors.New("option ,inline may only be used on a struct or map field") - } - continue - } - - if tag != "" { - info.Key = tag - } else { - info.Key = strings.ToLower(field.Name) - } - - if _, found = fieldsMap[info.Key]; found { - msg := "duplicated key '" + info.Key + "' in struct " + st.String() - return nil, errors.New(msg) - } - - info.Id = len(fieldsList) - fieldsList = append(fieldsList, info) - fieldsMap[info.Key] = info - } - - sinfo = &structInfo{ - FieldsMap: fieldsMap, - FieldsList: fieldsList, - InlineMap: inlineMap, - InlineUnmarshalers: inlineUnmarshalers, - } - - fieldMapMutex.Lock() - structMap[st] = sinfo - fieldMapMutex.Unlock() - return sinfo, nil -} - -// IsZeroer is used to check whether an object is zero to -// determine whether it should be omitted when marshaling -// with the omitempty flag. One notable implementation -// is time.Time. -type IsZeroer interface { - IsZero() bool -} - -func isZero(v reflect.Value) bool { - kind := v.Kind() - if z, ok := v.Interface().(IsZeroer); ok { - if (kind == reflect.Ptr || kind == reflect.Interface) && v.IsNil() { - return true - } - return z.IsZero() - } - switch kind { - case reflect.String: - return len(v.String()) == 0 - case reflect.Interface, reflect.Ptr: - return v.IsNil() - case reflect.Slice: - return v.Len() == 0 - case reflect.Map: - return v.Len() == 0 - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: - return v.Int() == 0 - case reflect.Float32, reflect.Float64: - return v.Float() == 0 - case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: - return v.Uint() == 0 - case reflect.Bool: - return !v.Bool() - case reflect.Struct: - vt := v.Type() - for i := v.NumField() - 1; i >= 0; i-- { - if vt.Field(i).PkgPath != "" { - continue // Private field - } - if !isZero(v.Field(i)) { - return false - } - } - return true - } - return false -} diff --git a/cli/internal/yaml/yamlh.go b/cli/internal/yaml/yamlh.go deleted file mode 100644 index ddcd5513ba770..0000000000000 --- a/cli/internal/yaml/yamlh.go +++ /dev/null @@ -1,809 +0,0 @@ -// -// Copyright (c) 2011-2019 Canonical Ltd -// Copyright (c) 2006-2010 Kirill Simonov -// -// Permission is hereby granted, free of charge, to any person obtaining a copy of -// this software and associated documentation files (the "Software"), to deal in -// the Software without restriction, including without limitation the rights to -// use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -// of the Software, and to permit persons to whom the Software is furnished to do -// so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in all -// copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -// SOFTWARE. - -package yaml - -import ( - "fmt" - "io" -) - -// The version directive data. -type yaml_version_directive_t struct { - major int8 // The major version number. - minor int8 // The minor version number. -} - -// The tag directive data. -type yaml_tag_directive_t struct { - handle []byte // The tag handle. - prefix []byte // The tag prefix. -} - -type yaml_encoding_t int - -// The stream encoding. -const ( - // Let the parser choose the encoding. - yaml_ANY_ENCODING yaml_encoding_t = iota - - yaml_UTF8_ENCODING // The default UTF-8 encoding. - yaml_UTF16LE_ENCODING // The UTF-16-LE encoding with BOM. - yaml_UTF16BE_ENCODING // The UTF-16-BE encoding with BOM. -) - -type yaml_break_t int - -// Line break types. -const ( - // Let the parser choose the break type. - yaml_ANY_BREAK yaml_break_t = iota - - yaml_CR_BREAK // Use CR for line breaks (Mac style). - yaml_LN_BREAK // Use LN for line breaks (Unix style). - yaml_CRLN_BREAK // Use CR LN for line breaks (DOS style). -) - -type yaml_error_type_t int - -// Many bad things could happen with the parser and emitter. -const ( - // No error is produced. - yaml_NO_ERROR yaml_error_type_t = iota - - yaml_MEMORY_ERROR // Cannot allocate or reallocate a block of memory. - yaml_READER_ERROR // Cannot read or decode the input stream. - yaml_SCANNER_ERROR // Cannot scan the input stream. - yaml_PARSER_ERROR // Cannot parse the input stream. - yaml_COMPOSER_ERROR // Cannot compose a YAML document. - yaml_WRITER_ERROR // Cannot write to the output stream. - yaml_EMITTER_ERROR // Cannot emit a YAML stream. -) - -// The pointer position. -type yaml_mark_t struct { - index int // The position index. - line int // The position line. - column int // The position column. -} - -// Node Styles - -type yaml_style_t int8 - -type yaml_scalar_style_t yaml_style_t - -// Scalar styles. -const ( - // Let the emitter choose the style. - yaml_ANY_SCALAR_STYLE yaml_scalar_style_t = 0 - - yaml_PLAIN_SCALAR_STYLE yaml_scalar_style_t = 1 << iota // The plain scalar style. - yaml_SINGLE_QUOTED_SCALAR_STYLE // The single-quoted scalar style. - yaml_DOUBLE_QUOTED_SCALAR_STYLE // The double-quoted scalar style. - yaml_LITERAL_SCALAR_STYLE // The literal scalar style. - yaml_FOLDED_SCALAR_STYLE // The folded scalar style. -) - -type yaml_sequence_style_t yaml_style_t - -// Sequence styles. -const ( - // Let the emitter choose the style. - yaml_ANY_SEQUENCE_STYLE yaml_sequence_style_t = iota - - yaml_BLOCK_SEQUENCE_STYLE // The block sequence style. - yaml_FLOW_SEQUENCE_STYLE // The flow sequence style. -) - -type yaml_mapping_style_t yaml_style_t - -// Mapping styles. -const ( - // Let the emitter choose the style. - yaml_ANY_MAPPING_STYLE yaml_mapping_style_t = iota - - yaml_BLOCK_MAPPING_STYLE // The block mapping style. - yaml_FLOW_MAPPING_STYLE // The flow mapping style. -) - -// Tokens - -type yaml_token_type_t int - -// Token types. -const ( - // An empty token. - yaml_NO_TOKEN yaml_token_type_t = iota - - yaml_STREAM_START_TOKEN // A STREAM-START token. - yaml_STREAM_END_TOKEN // A STREAM-END token. - - yaml_VERSION_DIRECTIVE_TOKEN // A VERSION-DIRECTIVE token. - yaml_TAG_DIRECTIVE_TOKEN // A TAG-DIRECTIVE token. - yaml_DOCUMENT_START_TOKEN // A DOCUMENT-START token. - yaml_DOCUMENT_END_TOKEN // A DOCUMENT-END token. - - yaml_BLOCK_SEQUENCE_START_TOKEN // A BLOCK-SEQUENCE-START token. - yaml_BLOCK_MAPPING_START_TOKEN // A BLOCK-SEQUENCE-END token. - yaml_BLOCK_END_TOKEN // A BLOCK-END token. - - yaml_FLOW_SEQUENCE_START_TOKEN // A FLOW-SEQUENCE-START token. - yaml_FLOW_SEQUENCE_END_TOKEN // A FLOW-SEQUENCE-END token. - yaml_FLOW_MAPPING_START_TOKEN // A FLOW-MAPPING-START token. - yaml_FLOW_MAPPING_END_TOKEN // A FLOW-MAPPING-END token. - - yaml_BLOCK_ENTRY_TOKEN // A BLOCK-ENTRY token. - yaml_FLOW_ENTRY_TOKEN // A FLOW-ENTRY token. - yaml_KEY_TOKEN // A KEY token. - yaml_VALUE_TOKEN // A VALUE token. - - yaml_ALIAS_TOKEN // An ALIAS token. - yaml_ANCHOR_TOKEN // An ANCHOR token. - yaml_TAG_TOKEN // A TAG token. - yaml_SCALAR_TOKEN // A SCALAR token. -) - -func (tt yaml_token_type_t) String() string { - switch tt { - case yaml_NO_TOKEN: - return "yaml_NO_TOKEN" - case yaml_STREAM_START_TOKEN: - return "yaml_STREAM_START_TOKEN" - case yaml_STREAM_END_TOKEN: - return "yaml_STREAM_END_TOKEN" - case yaml_VERSION_DIRECTIVE_TOKEN: - return "yaml_VERSION_DIRECTIVE_TOKEN" - case yaml_TAG_DIRECTIVE_TOKEN: - return "yaml_TAG_DIRECTIVE_TOKEN" - case yaml_DOCUMENT_START_TOKEN: - return "yaml_DOCUMENT_START_TOKEN" - case yaml_DOCUMENT_END_TOKEN: - return "yaml_DOCUMENT_END_TOKEN" - case yaml_BLOCK_SEQUENCE_START_TOKEN: - return "yaml_BLOCK_SEQUENCE_START_TOKEN" - case yaml_BLOCK_MAPPING_START_TOKEN: - return "yaml_BLOCK_MAPPING_START_TOKEN" - case yaml_BLOCK_END_TOKEN: - return "yaml_BLOCK_END_TOKEN" - case yaml_FLOW_SEQUENCE_START_TOKEN: - return "yaml_FLOW_SEQUENCE_START_TOKEN" - case yaml_FLOW_SEQUENCE_END_TOKEN: - return "yaml_FLOW_SEQUENCE_END_TOKEN" - case yaml_FLOW_MAPPING_START_TOKEN: - return "yaml_FLOW_MAPPING_START_TOKEN" - case yaml_FLOW_MAPPING_END_TOKEN: - return "yaml_FLOW_MAPPING_END_TOKEN" - case yaml_BLOCK_ENTRY_TOKEN: - return "yaml_BLOCK_ENTRY_TOKEN" - case yaml_FLOW_ENTRY_TOKEN: - return "yaml_FLOW_ENTRY_TOKEN" - case yaml_KEY_TOKEN: - return "yaml_KEY_TOKEN" - case yaml_VALUE_TOKEN: - return "yaml_VALUE_TOKEN" - case yaml_ALIAS_TOKEN: - return "yaml_ALIAS_TOKEN" - case yaml_ANCHOR_TOKEN: - return "yaml_ANCHOR_TOKEN" - case yaml_TAG_TOKEN: - return "yaml_TAG_TOKEN" - case yaml_SCALAR_TOKEN: - return "yaml_SCALAR_TOKEN" - } - return "" -} - -// The token structure. -type yaml_token_t struct { - // The token type. - typ yaml_token_type_t - - // The start/end of the token. - start_mark, end_mark yaml_mark_t - - // The stream encoding (for yaml_STREAM_START_TOKEN). - encoding yaml_encoding_t - - // The alias/anchor/scalar value or tag/tag directive handle - // (for yaml_ALIAS_TOKEN, yaml_ANCHOR_TOKEN, yaml_SCALAR_TOKEN, yaml_TAG_TOKEN, yaml_TAG_DIRECTIVE_TOKEN). - value []byte - - // The tag suffix (for yaml_TAG_TOKEN). - suffix []byte - - // The tag directive prefix (for yaml_TAG_DIRECTIVE_TOKEN). - prefix []byte - - // The scalar style (for yaml_SCALAR_TOKEN). - style yaml_scalar_style_t - - // The version directive major/minor (for yaml_VERSION_DIRECTIVE_TOKEN). - major, minor int8 -} - -// Events - -type yaml_event_type_t int8 - -// Event types. -const ( - // An empty event. - yaml_NO_EVENT yaml_event_type_t = iota - - yaml_STREAM_START_EVENT // A STREAM-START event. - yaml_STREAM_END_EVENT // A STREAM-END event. - yaml_DOCUMENT_START_EVENT // A DOCUMENT-START event. - yaml_DOCUMENT_END_EVENT // A DOCUMENT-END event. - yaml_ALIAS_EVENT // An ALIAS event. - yaml_SCALAR_EVENT // A SCALAR event. - yaml_SEQUENCE_START_EVENT // A SEQUENCE-START event. - yaml_SEQUENCE_END_EVENT // A SEQUENCE-END event. - yaml_MAPPING_START_EVENT // A MAPPING-START event. - yaml_MAPPING_END_EVENT // A MAPPING-END event. - yaml_TAIL_COMMENT_EVENT -) - -var eventStrings = []string{ - yaml_NO_EVENT: "none", - yaml_STREAM_START_EVENT: "stream start", - yaml_STREAM_END_EVENT: "stream end", - yaml_DOCUMENT_START_EVENT: "document start", - yaml_DOCUMENT_END_EVENT: "document end", - yaml_ALIAS_EVENT: "alias", - yaml_SCALAR_EVENT: "scalar", - yaml_SEQUENCE_START_EVENT: "sequence start", - yaml_SEQUENCE_END_EVENT: "sequence end", - yaml_MAPPING_START_EVENT: "mapping start", - yaml_MAPPING_END_EVENT: "mapping end", - yaml_TAIL_COMMENT_EVENT: "tail comment", -} - -func (e yaml_event_type_t) String() string { - if e < 0 || int(e) >= len(eventStrings) { - return fmt.Sprintf("unknown event %d", e) - } - return eventStrings[e] -} - -// The event structure. -type yaml_event_t struct { - - // The event type. - typ yaml_event_type_t - - // The start and end of the event. - start_mark, end_mark yaml_mark_t - - // The document encoding (for yaml_STREAM_START_EVENT). - encoding yaml_encoding_t - - // The version directive (for yaml_DOCUMENT_START_EVENT). - version_directive *yaml_version_directive_t - - // The list of tag directives (for yaml_DOCUMENT_START_EVENT). - tag_directives []yaml_tag_directive_t - - // The comments - head_comment []byte - line_comment []byte - foot_comment []byte - tail_comment []byte - - // The anchor (for yaml_SCALAR_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT, yaml_ALIAS_EVENT). - anchor []byte - - // The tag (for yaml_SCALAR_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT). - tag []byte - - // The scalar value (for yaml_SCALAR_EVENT). - value []byte - - // Is the document start/end indicator implicit, or the tag optional? - // (for yaml_DOCUMENT_START_EVENT, yaml_DOCUMENT_END_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT, yaml_SCALAR_EVENT). - implicit bool - - // Is the tag optional for any non-plain style? (for yaml_SCALAR_EVENT). - quoted_implicit bool - - // The style (for yaml_SCALAR_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT). - style yaml_style_t -} - -func (e *yaml_event_t) scalar_style() yaml_scalar_style_t { return yaml_scalar_style_t(e.style) } -func (e *yaml_event_t) sequence_style() yaml_sequence_style_t { return yaml_sequence_style_t(e.style) } -func (e *yaml_event_t) mapping_style() yaml_mapping_style_t { return yaml_mapping_style_t(e.style) } - -// Nodes - -const ( - yaml_NULL_TAG = "tag:yaml.org,2002:null" // The tag !!null with the only possible value: null. - yaml_BOOL_TAG = "tag:yaml.org,2002:bool" // The tag !!bool with the values: true and false. - yaml_STR_TAG = "tag:yaml.org,2002:str" // The tag !!str for string values. - yaml_INT_TAG = "tag:yaml.org,2002:int" // The tag !!int for integer values. - yaml_FLOAT_TAG = "tag:yaml.org,2002:float" // The tag !!float for float values. - yaml_TIMESTAMP_TAG = "tag:yaml.org,2002:timestamp" // The tag !!timestamp for date and time values. - - yaml_SEQ_TAG = "tag:yaml.org,2002:seq" // The tag !!seq is used to denote sequences. - yaml_MAP_TAG = "tag:yaml.org,2002:map" // The tag !!map is used to denote mapping. - - // Not in original libyaml. - yaml_BINARY_TAG = "tag:yaml.org,2002:binary" - yaml_MERGE_TAG = "tag:yaml.org,2002:merge" - - yaml_DEFAULT_SCALAR_TAG = yaml_STR_TAG // The default scalar tag is !!str. - yaml_DEFAULT_SEQUENCE_TAG = yaml_SEQ_TAG // The default sequence tag is !!seq. - yaml_DEFAULT_MAPPING_TAG = yaml_MAP_TAG // The default mapping tag is !!map. -) - -type yaml_node_type_t int - -// Node types. -const ( - // An empty node. - yaml_NO_NODE yaml_node_type_t = iota - - yaml_SCALAR_NODE // A scalar node. - yaml_SEQUENCE_NODE // A sequence node. - yaml_MAPPING_NODE // A mapping node. -) - -// An element of a sequence node. -type yaml_node_item_t int - -// An element of a mapping node. -type yaml_node_pair_t struct { - key int // The key of the element. - value int // The value of the element. -} - -// The node structure. -type yaml_node_t struct { - typ yaml_node_type_t // The node type. - tag []byte // The node tag. - - // The node data. - - // The scalar parameters (for yaml_SCALAR_NODE). - scalar struct { - value []byte // The scalar value. - length int // The length of the scalar value. - style yaml_scalar_style_t // The scalar style. - } - - // The sequence parameters (for YAML_SEQUENCE_NODE). - sequence struct { - items_data []yaml_node_item_t // The stack of sequence items. - style yaml_sequence_style_t // The sequence style. - } - - // The mapping parameters (for yaml_MAPPING_NODE). - mapping struct { - pairs_data []yaml_node_pair_t // The stack of mapping pairs (key, value). - pairs_start *yaml_node_pair_t // The beginning of the stack. - pairs_end *yaml_node_pair_t // The end of the stack. - pairs_top *yaml_node_pair_t // The top of the stack. - style yaml_mapping_style_t // The mapping style. - } - - start_mark yaml_mark_t // The beginning of the node. - end_mark yaml_mark_t // The end of the node. - -} - -// The document structure. -type yaml_document_t struct { - - // The document nodes. - nodes []yaml_node_t - - // The version directive. - version_directive *yaml_version_directive_t - - // The list of tag directives. - tag_directives_data []yaml_tag_directive_t - tag_directives_start int // The beginning of the tag directives list. - tag_directives_end int // The end of the tag directives list. - - start_implicit int // Is the document start indicator implicit? - end_implicit int // Is the document end indicator implicit? - - // The start/end of the document. - start_mark, end_mark yaml_mark_t -} - -// The prototype of a read handler. -// -// The read handler is called when the parser needs to read more bytes from the -// source. The handler should write not more than size bytes to the buffer. -// The number of written bytes should be set to the size_read variable. -// -// [in,out] data A pointer to an application data specified by -// -// yaml_parser_set_input(). -// -// [out] buffer The buffer to write the data from the source. -// [in] size The size of the buffer. -// [out] size_read The actual number of bytes read from the source. -// -// On success, the handler should return 1. If the handler failed, -// the returned value should be 0. On EOF, the handler should set the -// size_read to 0 and return 1. -type yaml_read_handler_t func(parser *yaml_parser_t, buffer []byte) (n int, err error) - -// This structure holds information about a potential simple key. -type yaml_simple_key_t struct { - possible bool // Is a simple key possible? - required bool // Is a simple key required? - token_number int // The number of the token. - mark yaml_mark_t // The position mark. -} - -// The states of the parser. -type yaml_parser_state_t int - -const ( - yaml_PARSE_STREAM_START_STATE yaml_parser_state_t = iota - - yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE // Expect the beginning of an implicit document. - yaml_PARSE_DOCUMENT_START_STATE // Expect DOCUMENT-START. - yaml_PARSE_DOCUMENT_CONTENT_STATE // Expect the content of a document. - yaml_PARSE_DOCUMENT_END_STATE // Expect DOCUMENT-END. - yaml_PARSE_BLOCK_NODE_STATE // Expect a block node. - yaml_PARSE_BLOCK_NODE_OR_INDENTLESS_SEQUENCE_STATE // Expect a block node or indentless sequence. - yaml_PARSE_FLOW_NODE_STATE // Expect a flow node. - yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE // Expect the first entry of a block sequence. - yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE // Expect an entry of a block sequence. - yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE // Expect an entry of an indentless sequence. - yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE // Expect the first key of a block mapping. - yaml_PARSE_BLOCK_MAPPING_KEY_STATE // Expect a block mapping key. - yaml_PARSE_BLOCK_MAPPING_VALUE_STATE // Expect a block mapping value. - yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE // Expect the first entry of a flow sequence. - yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE // Expect an entry of a flow sequence. - yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE // Expect a key of an ordered mapping. - yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE // Expect a value of an ordered mapping. - yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE // Expect the and of an ordered mapping entry. - yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE // Expect the first key of a flow mapping. - yaml_PARSE_FLOW_MAPPING_KEY_STATE // Expect a key of a flow mapping. - yaml_PARSE_FLOW_MAPPING_VALUE_STATE // Expect a value of a flow mapping. - yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE // Expect an empty value of a flow mapping. - yaml_PARSE_END_STATE // Expect nothing. -) - -func (ps yaml_parser_state_t) String() string { - switch ps { - case yaml_PARSE_STREAM_START_STATE: - return "yaml_PARSE_STREAM_START_STATE" - case yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE: - return "yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE" - case yaml_PARSE_DOCUMENT_START_STATE: - return "yaml_PARSE_DOCUMENT_START_STATE" - case yaml_PARSE_DOCUMENT_CONTENT_STATE: - return "yaml_PARSE_DOCUMENT_CONTENT_STATE" - case yaml_PARSE_DOCUMENT_END_STATE: - return "yaml_PARSE_DOCUMENT_END_STATE" - case yaml_PARSE_BLOCK_NODE_STATE: - return "yaml_PARSE_BLOCK_NODE_STATE" - case yaml_PARSE_BLOCK_NODE_OR_INDENTLESS_SEQUENCE_STATE: - return "yaml_PARSE_BLOCK_NODE_OR_INDENTLESS_SEQUENCE_STATE" - case yaml_PARSE_FLOW_NODE_STATE: - return "yaml_PARSE_FLOW_NODE_STATE" - case yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE: - return "yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE" - case yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE: - return "yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE" - case yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE: - return "yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE" - case yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE: - return "yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE" - case yaml_PARSE_BLOCK_MAPPING_KEY_STATE: - return "yaml_PARSE_BLOCK_MAPPING_KEY_STATE" - case yaml_PARSE_BLOCK_MAPPING_VALUE_STATE: - return "yaml_PARSE_BLOCK_MAPPING_VALUE_STATE" - case yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE: - return "yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE" - case yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE: - return "yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE" - case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE: - return "yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE" - case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE: - return "yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE" - case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE: - return "yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE" - case yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE: - return "yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE" - case yaml_PARSE_FLOW_MAPPING_KEY_STATE: - return "yaml_PARSE_FLOW_MAPPING_KEY_STATE" - case yaml_PARSE_FLOW_MAPPING_VALUE_STATE: - return "yaml_PARSE_FLOW_MAPPING_VALUE_STATE" - case yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE: - return "yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE" - case yaml_PARSE_END_STATE: - return "yaml_PARSE_END_STATE" - } - return "" -} - -// This structure holds aliases data. -type yaml_alias_data_t struct { - anchor []byte // The anchor. - index int // The node id. - mark yaml_mark_t // The anchor mark. -} - -// The parser structure. -// -// All members are internal. Manage the structure using the -// yaml_parser_ family of functions. -type yaml_parser_t struct { - - // Error handling - - error yaml_error_type_t // Error type. - - problem string // Error description. - - // The byte about which the problem occurred. - problem_offset int - problem_value int - problem_mark yaml_mark_t - - // The error context. - context string - context_mark yaml_mark_t - - // Reader stuff - - read_handler yaml_read_handler_t // Read handler. - - input_reader io.Reader // File input data. - input []byte // String input data. - input_pos int - - eof bool // EOF flag - - buffer []byte // The working buffer. - buffer_pos int // The current position of the buffer. - - unread int // The number of unread characters in the buffer. - - newlines int // The number of line breaks since last non-break/non-blank character - - raw_buffer []byte // The raw buffer. - raw_buffer_pos int // The current position of the buffer. - - encoding yaml_encoding_t // The input encoding. - - offset int // The offset of the current position (in bytes). - mark yaml_mark_t // The mark of the current position. - - // Comments - - head_comment []byte // The current head comments - line_comment []byte // The current line comments - foot_comment []byte // The current foot comments - tail_comment []byte // Foot comment that happens at the end of a block. - stem_comment []byte // Comment in item preceding a nested structure (list inside list item, etc) - - comments []yaml_comment_t // The folded comments for all parsed tokens - comments_head int - - // Scanner stuff - - stream_start_produced bool // Have we started to scan the input stream? - stream_end_produced bool // Have we reached the end of the input stream? - - flow_level int // The number of unclosed '[' and '{' indicators. - - tokens []yaml_token_t // The tokens queue. - tokens_head int // The head of the tokens queue. - tokens_parsed int // The number of tokens fetched from the queue. - token_available bool // Does the tokens queue contain a token ready for dequeueing. - - indent int // The current indentation level. - indents []int // The indentation levels stack. - - simple_key_allowed bool // May a simple key occur at the current position? - simple_keys []yaml_simple_key_t // The stack of simple keys. - simple_keys_by_tok map[int]int // possible simple_key indexes indexed by token_number - - // Parser stuff - - state yaml_parser_state_t // The current parser state. - states []yaml_parser_state_t // The parser states stack. - marks []yaml_mark_t // The stack of marks. - tag_directives []yaml_tag_directive_t // The list of TAG directives. - - // Dumper stuff - - aliases []yaml_alias_data_t // The alias data. - - document *yaml_document_t // The currently parsed document. -} - -type yaml_comment_t struct { - scan_mark yaml_mark_t // Position where scanning for comments started - token_mark yaml_mark_t // Position after which tokens will be associated with this comment - start_mark yaml_mark_t // Position of '#' comment mark - end_mark yaml_mark_t // Position where comment terminated - - head []byte - line []byte - foot []byte -} - -// Emitter Definitions - -// The prototype of a write handler. -// -// The write handler is called when the emitter needs to flush the accumulated -// characters to the output. The handler should write @a size bytes of the -// @a buffer to the output. -// -// @param[in,out] data A pointer to an application data specified by -// -// yaml_emitter_set_output(). -// -// @param[in] buffer The buffer with bytes to be written. -// @param[in] size The size of the buffer. -// -// @returns On success, the handler should return @c 1. If the handler failed, -// the returned value should be @c 0. -type yaml_write_handler_t func(emitter *yaml_emitter_t, buffer []byte) error - -type yaml_emitter_state_t int - -// The emitter states. -const ( - // Expect STREAM-START. - yaml_EMIT_STREAM_START_STATE yaml_emitter_state_t = iota - - yaml_EMIT_FIRST_DOCUMENT_START_STATE // Expect the first DOCUMENT-START or STREAM-END. - yaml_EMIT_DOCUMENT_START_STATE // Expect DOCUMENT-START or STREAM-END. - yaml_EMIT_DOCUMENT_CONTENT_STATE // Expect the content of a document. - yaml_EMIT_DOCUMENT_END_STATE // Expect DOCUMENT-END. - yaml_EMIT_FLOW_SEQUENCE_FIRST_ITEM_STATE // Expect the first item of a flow sequence. - yaml_EMIT_FLOW_SEQUENCE_TRAIL_ITEM_STATE // Expect the next item of a flow sequence, with the comma already written out - yaml_EMIT_FLOW_SEQUENCE_ITEM_STATE // Expect an item of a flow sequence. - yaml_EMIT_FLOW_MAPPING_FIRST_KEY_STATE // Expect the first key of a flow mapping. - yaml_EMIT_FLOW_MAPPING_TRAIL_KEY_STATE // Expect the next key of a flow mapping, with the comma already written out - yaml_EMIT_FLOW_MAPPING_KEY_STATE // Expect a key of a flow mapping. - yaml_EMIT_FLOW_MAPPING_SIMPLE_VALUE_STATE // Expect a value for a simple key of a flow mapping. - yaml_EMIT_FLOW_MAPPING_VALUE_STATE // Expect a value of a flow mapping. - yaml_EMIT_BLOCK_SEQUENCE_FIRST_ITEM_STATE // Expect the first item of a block sequence. - yaml_EMIT_BLOCK_SEQUENCE_ITEM_STATE // Expect an item of a block sequence. - yaml_EMIT_BLOCK_MAPPING_FIRST_KEY_STATE // Expect the first key of a block mapping. - yaml_EMIT_BLOCK_MAPPING_KEY_STATE // Expect the key of a block mapping. - yaml_EMIT_BLOCK_MAPPING_SIMPLE_VALUE_STATE // Expect a value for a simple key of a block mapping. - yaml_EMIT_BLOCK_MAPPING_VALUE_STATE // Expect a value of a block mapping. - yaml_EMIT_END_STATE // Expect nothing. -) - -// The emitter structure. -// -// All members are internal. Manage the structure using the @c yaml_emitter_ -// family of functions. -type yaml_emitter_t struct { - - // Error handling - - error yaml_error_type_t // Error type. - problem string // Error description. - - // Writer stuff - - write_handler yaml_write_handler_t // Write handler. - - output_buffer *[]byte // String output data. - output_writer io.Writer // File output data. - - buffer []byte // The working buffer. - buffer_pos int // The current position of the buffer. - - raw_buffer []byte // The raw buffer. - raw_buffer_pos int // The current position of the buffer. - - encoding yaml_encoding_t // The stream encoding. - - // Emitter stuff - - canonical bool // If the output is in the canonical style? - best_indent int // The number of indentation spaces. - best_width int // The preferred width of the output lines. - unicode bool // Allow unescaped non-ASCII characters? - line_break yaml_break_t // The preferred line break. - - state yaml_emitter_state_t // The current emitter state. - states []yaml_emitter_state_t // The stack of states. - - events []yaml_event_t // The event queue. - events_head int // The head of the event queue. - - indents []int // The stack of indentation levels. - - tag_directives []yaml_tag_directive_t // The list of tag directives. - - indent int // The current indentation level. - - flow_level int // The current flow level. - - root_context bool // Is it the document root context? - sequence_context bool // Is it a sequence context? - mapping_context bool // Is it a mapping context? - simple_key_context bool // Is it a simple mapping key context? - - line int // The current line. - column int // The current column. - whitespace bool // If the last character was a whitespace? - indention bool // If the last character was an indentation character (' ', '-', '?', ':')? - open_ended bool // If an explicit document end is required? - - space_above bool // Is there's an empty line above? - foot_indent int // The indent used to write the foot comment above, or -1 if none. - - // Anchor analysis. - anchor_data struct { - anchor []byte // The anchor value. - alias bool // Is it an alias? - } - - // Tag analysis. - tag_data struct { - handle []byte // The tag handle. - suffix []byte // The tag suffix. - } - - // Scalar analysis. - scalar_data struct { - value []byte // The scalar value. - multiline bool // Does the scalar contain line breaks? - flow_plain_allowed bool // Can the scalar be expessed in the flow plain style? - block_plain_allowed bool // Can the scalar be expressed in the block plain style? - single_quoted_allowed bool // Can the scalar be expressed in the single quoted style? - block_allowed bool // Can the scalar be expressed in the literal or folded styles? - style yaml_scalar_style_t // The output style. - } - - // Comments - head_comment []byte - line_comment []byte - foot_comment []byte - tail_comment []byte - - key_line_comment []byte - - // Dumper stuff - - opened bool // If the stream was already opened? - closed bool // If the stream was already closed? - - // The information associated with the document nodes. - anchors *struct { - references int // The number of references. - anchor int // The anchor id. - serialized bool // If the node has been emitted? - } - - last_anchor_id int // The last assigned anchor id. - - document *yaml_document_t // The currently emitted document. -} diff --git a/cli/internal/yaml/yamlprivateh.go b/cli/internal/yaml/yamlprivateh.go deleted file mode 100644 index dea1ba9610dfd..0000000000000 --- a/cli/internal/yaml/yamlprivateh.go +++ /dev/null @@ -1,198 +0,0 @@ -// -// Copyright (c) 2011-2019 Canonical Ltd -// Copyright (c) 2006-2010 Kirill Simonov -// -// Permission is hereby granted, free of charge, to any person obtaining a copy of -// this software and associated documentation files (the "Software"), to deal in -// the Software without restriction, including without limitation the rights to -// use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -// of the Software, and to permit persons to whom the Software is furnished to do -// so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in all -// copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -// SOFTWARE. - -package yaml - -const ( - // The size of the input raw buffer. - input_raw_buffer_size = 512 - - // The size of the input buffer. - // It should be possible to decode the whole raw buffer. - input_buffer_size = input_raw_buffer_size * 3 - - // The size of the output buffer. - output_buffer_size = 128 - - // The size of the output raw buffer. - // It should be possible to encode the whole output buffer. - output_raw_buffer_size = (output_buffer_size*2 + 2) - - // The size of other stacks and queues. - initial_stack_size = 16 - initial_queue_size = 16 - initial_string_size = 16 -) - -// Check if the character at the specified position is an alphabetical -// character, a digit, '_', or '-'. -func is_alpha(b []byte, i int) bool { - return b[i] >= '0' && b[i] <= '9' || b[i] >= 'A' && b[i] <= 'Z' || b[i] >= 'a' && b[i] <= 'z' || b[i] == '_' || b[i] == '-' -} - -// Check if the character at the specified position is a digit. -func is_digit(b []byte, i int) bool { - return b[i] >= '0' && b[i] <= '9' -} - -// Get the value of a digit. -func as_digit(b []byte, i int) int { - return int(b[i]) - '0' -} - -// Check if the character at the specified position is a hex-digit. -func is_hex(b []byte, i int) bool { - return b[i] >= '0' && b[i] <= '9' || b[i] >= 'A' && b[i] <= 'F' || b[i] >= 'a' && b[i] <= 'f' -} - -// Get the value of a hex-digit. -func as_hex(b []byte, i int) int { - bi := b[i] - if bi >= 'A' && bi <= 'F' { - return int(bi) - 'A' + 10 - } - if bi >= 'a' && bi <= 'f' { - return int(bi) - 'a' + 10 - } - return int(bi) - '0' -} - -// Check if the character is ASCII. -func is_ascii(b []byte, i int) bool { - return b[i] <= 0x7F -} - -// Check if the character at the start of the buffer can be printed unescaped. -func is_printable(b []byte, i int) bool { - return ((b[i] == 0x0A) || // . == #x0A - (b[i] >= 0x20 && b[i] <= 0x7E) || // #x20 <= . <= #x7E - (b[i] == 0xC2 && b[i+1] >= 0xA0) || // #0xA0 <= . <= #xD7FF - (b[i] > 0xC2 && b[i] < 0xED) || - (b[i] == 0xED && b[i+1] < 0xA0) || - (b[i] == 0xEE) || - (b[i] == 0xEF && // #xE000 <= . <= #xFFFD - !(b[i+1] == 0xBB && b[i+2] == 0xBF) && // && . != #xFEFF - !(b[i+1] == 0xBF && (b[i+2] == 0xBE || b[i+2] == 0xBF)))) -} - -// Check if the character at the specified position is NUL. -func is_z(b []byte, i int) bool { - return b[i] == 0x00 -} - -// Check if the beginning of the buffer is a BOM. -func is_bom(b []byte, i int) bool { - return b[0] == 0xEF && b[1] == 0xBB && b[2] == 0xBF -} - -// Check if the character at the specified position is space. -func is_space(b []byte, i int) bool { - return b[i] == ' ' -} - -// Check if the character at the specified position is tab. -func is_tab(b []byte, i int) bool { - return b[i] == '\t' -} - -// Check if the character at the specified position is blank (space or tab). -func is_blank(b []byte, i int) bool { - //return is_space(b, i) || is_tab(b, i) - return b[i] == ' ' || b[i] == '\t' -} - -// Check if the character at the specified position is a line break. -func is_break(b []byte, i int) bool { - return (b[i] == '\r' || // CR (#xD) - b[i] == '\n' || // LF (#xA) - b[i] == 0xC2 && b[i+1] == 0x85 || // NEL (#x85) - b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA8 || // LS (#x2028) - b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA9) // PS (#x2029) -} - -func is_crlf(b []byte, i int) bool { - return b[i] == '\r' && b[i+1] == '\n' -} - -// Check if the character is a line break or NUL. -func is_breakz(b []byte, i int) bool { - //return is_break(b, i) || is_z(b, i) - return ( - // is_break: - b[i] == '\r' || // CR (#xD) - b[i] == '\n' || // LF (#xA) - b[i] == 0xC2 && b[i+1] == 0x85 || // NEL (#x85) - b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA8 || // LS (#x2028) - b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA9 || // PS (#x2029) - // is_z: - b[i] == 0) -} - -// Check if the character is a line break, space, or NUL. -func is_spacez(b []byte, i int) bool { - //return is_space(b, i) || is_breakz(b, i) - return ( - // is_space: - b[i] == ' ' || - // is_breakz: - b[i] == '\r' || // CR (#xD) - b[i] == '\n' || // LF (#xA) - b[i] == 0xC2 && b[i+1] == 0x85 || // NEL (#x85) - b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA8 || // LS (#x2028) - b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA9 || // PS (#x2029) - b[i] == 0) -} - -// Check if the character is a line break, space, tab, or NUL. -func is_blankz(b []byte, i int) bool { - //return is_blank(b, i) || is_breakz(b, i) - return ( - // is_blank: - b[i] == ' ' || b[i] == '\t' || - // is_breakz: - b[i] == '\r' || // CR (#xD) - b[i] == '\n' || // LF (#xA) - b[i] == 0xC2 && b[i+1] == 0x85 || // NEL (#x85) - b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA8 || // LS (#x2028) - b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA9 || // PS (#x2029) - b[i] == 0) -} - -// Determine the width of the character. -func width(b byte) int { - // Don't replace these by a switch without first - // confirming that it is being inlined. - if b&0x80 == 0x00 { - return 1 - } - if b&0xE0 == 0xC0 { - return 2 - } - if b&0xF0 == 0xE0 { - return 3 - } - if b&0xF8 == 0xF0 { - return 4 - } - return 0 - -} diff --git a/cli/package.json b/cli/package.json index d229b27e25ce1..3078334bc4f2d 100644 --- a/cli/package.json +++ b/cli/package.json @@ -5,9 +5,6 @@ "scripts": { "clean": "make clean", "build": "make", - "test": "make test-go", - "format": "make fmt-go", - "lint": "make lint-go", "lint:prettier": "prettier -c ./**/*.js ./**/*.ts --cache --ignore-path=../.prettierignore" }, "devDependencies": { diff --git a/cli/scripts/npm-native-packages/npm-native-packages.js b/cli/scripts/npm-native-packages/npm-native-packages.js index 06ab67f1dd36d..52b7463b40d1b 100755 --- a/cli/scripts/npm-native-packages/npm-native-packages.js +++ b/cli/scripts/npm-native-packages/npm-native-packages.js @@ -49,9 +49,3 @@ fs.writeFileSync( path.join(outputPath, "package.json"), JSON.stringify(template, null, 2) ); - -const goBin = os === "windows" ? "go-turbo.exe" : "go-turbo"; -fs.copyFileSync( - path.join(__dirname, "..", "..", `dist-${os}-${arch}`, goBin), - path.join(outputPath, "bin", goBin) -); diff --git a/cli/turbo.json b/cli/turbo.json index 0901a19f104fe..0e986cbc543cf 100644 --- a/cli/turbo.json +++ b/cli/turbo.json @@ -5,12 +5,8 @@ "build": { "env": ["RUNNER_OS"], "outputs": [ - "../target/debug/go-turbo", - "../target/debug/go-turbo.exe", "../target/debug/turbo", "../target/debug/turbo.exe", - "../target/release/go-turbo", - "../target/release/go-turbo.exe", "../target/release/turbo", "../target/release/turbo.exe" ], diff --git a/crates/turborepo-ffi/Cargo.toml b/crates/turborepo-ffi/Cargo.toml deleted file mode 100644 index fb07c12298436..0000000000000 --- a/crates/turborepo-ffi/Cargo.toml +++ /dev/null @@ -1,27 +0,0 @@ -[package] -name = "turborepo-ffi" -version = "0.1.0" -edition = "2021" -license = "MPL-2.0" - -[lib] -crate-type = ["staticlib"] - -[lints] -workspace = true - -[dependencies] -directories = "4.0.1" -globwalk = { version = "0.1.0", path = "../turborepo-globwalk" } -prost = "0.11.6" -thiserror = { workspace = true } -turbopath = { workspace = true } -turborepo-cache = { workspace = true } -turborepo-env = { workspace = true } -turborepo-fs = { workspace = true } -turborepo-lockfiles = { workspace = true } -turborepo-scm = { workspace = true } - -[build-dependencies] -cbindgen = "0.24.3" -prost-build = "0.11.6" diff --git a/crates/turborepo-ffi/README.md b/crates/turborepo-ffi/README.md deleted file mode 100644 index 5f0fe90ec2a96..0000000000000 --- a/crates/turborepo-ffi/README.md +++ /dev/null @@ -1,24 +0,0 @@ -# Turborepo FFI - -This crate provides a C-compatible FFI for dependencies that are being -ported from Go to Rust. The dependencies use protobuf to send and receive -values from Go. - -The crate produces a staticlib which is then linked to the Go code -in `cli/internal/ffi/ffi.go` using CGO. - -## Common Questions - -- Why do I get linker errors in Go when I use this crate? - Can't I link C dependencies in Rust? - -Because this crate produces a staticlib, it cannot link C dependencies. -This is because a staticlib is an _input_ to the linker and therefore -cannot bundle its C dependencies. Instead, you need to pass the libraries -to the CGO linker. You can do so by editing `cli/internal/ffi/ffi.go`, -where the linker flags are defined. - -To find the libraries needed to link against, you can use rustc's `native-static-libs` -feature to print them. - -For more information, read [here](https://users.rust-lang.org/t/solved-statically-linking-rust-library-yields-undefined-references/53815/5) diff --git a/crates/turborepo-ffi/build.rs b/crates/turborepo-ffi/build.rs deleted file mode 100644 index 5cd44b02740dd..0000000000000 --- a/crates/turborepo-ffi/build.rs +++ /dev/null @@ -1,17 +0,0 @@ -use std::io::Result; - -use cbindgen::Language; - -fn main() -> Result<()> { - let crate_dir = std::env::var("CARGO_MANIFEST_DIR").unwrap(); - - cbindgen::Builder::new() - .with_crate(crate_dir) - .with_language(Language::C) - .generate() - .expect("Unable to generate bindings") - .write_to_file("bindings.h"); - - prost_build::compile_protos(&["messages.proto"], &["."])?; - Ok(()) -} diff --git a/crates/turborepo-ffi/messages.proto b/crates/turborepo-ffi/messages.proto deleted file mode 100644 index de27736c5f3d5..0000000000000 --- a/crates/turborepo-ffi/messages.proto +++ /dev/null @@ -1,237 +0,0 @@ -syntax = "proto3"; -option go_package = "ffi/proto"; - -message TurboDataDirResp { - string dir = 1; -} - -message GlobReq { - string base_path = 1; - repeated string include_patterns = 2; - repeated string exclude_patterns = 3; - bool files_only = 4; // note that the default for a bool is false -} - -message GlobResp { - oneof response { - GlobRespList files = 1; - string error = 2; - } -} - -message GlobRespList { - repeated string files = 1; -} - -message ChangedFilesReq { - string git_root = 1; - string turbo_root = 2; - optional string from_commit = 3; - string to_commit = 4; -} - -message ChangedFilesResp { - oneof response { - ChangedFilesList files = 1; - string error = 2; - } -} - -message ChangedFilesList { - repeated string files = 1; -} - -message PreviousContentReq { - string git_root = 1; - string from_commit = 2; - string file_path = 3; -} - -message PreviousContentResp { - oneof response { - bytes content = 1; - string error = 2; - } -} - -enum PackageManager { - NPM = 0; - BERRY = 1; - PNPM = 2; - YARN = 3; - BUN = 4; -} - -message PackageDependency { - string name = 1; - string range = 2; -} - -message PackageDependencyList { - repeated PackageDependency list = 1; -} - -message WorkspaceDependencies { - map dependencies = 1; -} - -message TransitiveDepsRequest { - bytes contents = 1; - PackageManager package_manager = 2; - map workspaces = 3; - optional AdditionalBerryData resolutions = 4; -} - -message TransitiveDepsResponse { - oneof response { - WorkspaceDependencies dependencies = 1; - string error = 2; - } -} - -message AdditionalBerryData { - map resolutions = 1; -} - -message LockfilePackage { - string key = 1; - string version = 2; - bool found = 3; -} - -message LockfilePackageList { - repeated LockfilePackage list = 1; -} - -message SubgraphRequest { - bytes contents = 1; - PackageManager package_manager = 2; - repeated string workspaces = 3; - repeated string packages = 4; - optional AdditionalBerryData resolutions = 5; -} - -message SubgraphResponse { - oneof response { - bytes contents = 1; - string error = 2; - } -} - -message PatchesRequest { - bytes contents = 1; - PackageManager package_manager = 2; -} - -message PatchesResponse { - oneof response { - Patches patches = 1; - string error = 2; - } -} - -message Patches { - repeated string patches = 1; -} - -message GlobalChangeRequest { - PackageManager package_manager = 1; - bytes prev_contents = 2; - bytes curr_contents = 3; -} - -message GlobalChangeResponse { - bool global_change = 1; -} - -message RecursiveCopyRequest { - string src = 1; - string dst = 2; -} - -message RecursiveCopyResponse { - optional string error = 1; -} - -message VerifySignatureRequest { - string hash = 1; - bytes artifact_body = 2; - bytes team_id = 3; - string expected_tag = 4; - optional bytes secret_key_override = 5; -} - -message VerifySignatureResponse { - oneof response { - bool verified = 1; - string error = 2; - } -} - -message GetPackageFileHashesRequest { - string turbo_root = 1; - string package_path = 2; - repeated string inputs = 3; -} - -message GetPackageFileHashesResponse { - oneof response { - FileHashes hashes = 1; - string error = 2; - } -} - -message GetHashesForFilesRequest { - string turbo_root = 1; - repeated string files = 2; - bool allow_missing = 3; -} - -message GetHashesForFilesResponse { - oneof response { - FileHashes hashes = 1; - string error = 2; - } -} - -message FileHashes { - map hashes = 1; -} - -message FromWildcardsRequest { - EnvVarMap env_vars = 1; - repeated string wildcard_patterns = 2; -} - -message FromWildcardsResponse { - oneof response { - EnvVarMap env_vars = 1; - string error = 2; - } -} - -message EnvVarMap { - map map = 1; -} - -message DetailedMap { - map all = 1; - BySource by_source = 2; -} - -message BySource { - map explicit = 1; - map matching = 2; -} - -message GetGlobalHashableEnvVarsRequest { - EnvVarMap env_at_execution_start = 1; - repeated string global_env = 2; -} - -message GetGlobalHashableEnvVarsResponse { - oneof response { - DetailedMap detailed_map = 1; - string error = 2; - } -} diff --git a/crates/turborepo-ffi/src/lib.rs b/crates/turborepo-ffi/src/lib.rs deleted file mode 100644 index 80e928cdfc72a..0000000000000 --- a/crates/turborepo-ffi/src/lib.rs +++ /dev/null @@ -1,512 +0,0 @@ -//! turborepo-ffi -//! -//! Please read the notes about safety (marked with `SAFETY`) in both this file, -//! and in ffi.go before modifying this file. -mod lockfile; - -use std::{collections::HashMap, mem::ManuallyDrop, str::FromStr}; - -use globwalk::{globwalk, ValidatedGlob}; -pub use lockfile::{patches, subgraph, transitive_closure}; -use turbopath::{AbsoluteSystemPathBuf, AnchoredSystemPathBuf, PathError}; -use turborepo_env::EnvironmentVariableMap; - -mod proto { - include!(concat!(env!("OUT_DIR"), "/_.rs")); -} - -#[repr(C)] -#[derive(Debug)] -pub struct Buffer { - len: u32, - data: *mut u8, -} - -#[no_mangle] -pub extern "C" fn free_buffer(buffer: Buffer) { - // SAFETY - // it is important that any memory allocated in rust, is freed in rust - // we do this by converting the raw pointer to a Vec and letting it drop - // this is safe because we know that the memory was allocated by rust - // and that the length is correct - let _ = unsafe { Vec::from_raw_parts(buffer.data, buffer.len as usize, buffer.len as usize) }; -} - -impl From for Buffer { - fn from(value: T) -> Self { - let len = value.encoded_len() as u32; - let data = match len { - // Check if the message will have a non-zero length to avoid returning - // a dangling pointer to Go. - 0 => std::ptr::null_mut(), - _ => { - let mut bytes = ManuallyDrop::new(value.encode_to_vec()); - bytes.as_mut_ptr() - } - }; - Buffer { len, data } - } -} - -impl Buffer { - #[allow(dead_code)] - fn into_proto(self) -> Result { - // SAFETY - // protobuf has a fairly strict schema so overrunning or underrunning the byte - // array will not cause any major issues, that is to say garbage in - // garbage out - let mut slice = unsafe { std::slice::from_raw_parts(self.data, self.len as usize) }; - T::decode(&mut slice) - } -} - -#[no_mangle] -pub extern "C" fn get_turbo_data_dir() -> Buffer { - // note: this is _not_ recommended, but it the current behaviour go-side - // ideally we should use the platform specific convention - // (which we get from using ProjectDirs::from) - let dirs = - directories::ProjectDirs::from_path("turborepo".into()).expect("user has a home dir"); - - let dir = dirs.data_dir().to_string_lossy().to_string(); - proto::TurboDataDirResp { dir }.into() -} - -#[no_mangle] -pub extern "C" fn changed_files(buffer: Buffer) -> Buffer { - let req: proto::ChangedFilesReq = match buffer.into_proto() { - Ok(req) => req, - Err(err) => { - let resp = proto::ChangedFilesResp { - response: Some(proto::changed_files_resp::Response::Error(err.to_string())), - }; - return resp.into(); - } - }; - - let response = match turborepo_scm::git::changed_files( - req.git_root.into(), - req.turbo_root.into(), - req.from_commit.as_deref(), - &req.to_commit, - ) { - Ok(files) => { - let files: Vec<_> = files.into_iter().collect(); - proto::changed_files_resp::Response::Files(proto::ChangedFilesList { files }) - } - Err(err) => proto::changed_files_resp::Response::Error(err.to_string()), - }; - - let resp = proto::ChangedFilesResp { - response: Some(response), - }; - resp.into() -} - -#[no_mangle] -pub extern "C" fn previous_content(buffer: Buffer) -> Buffer { - let req: proto::PreviousContentReq = match buffer.into_proto() { - Ok(req) => req, - Err(err) => { - let resp = proto::PreviousContentResp { - response: Some(proto::previous_content_resp::Response::Error( - err.to_string(), - )), - }; - return resp.into(); - } - }; - - let response = match turborepo_scm::git::previous_content( - req.git_root.into(), - &req.from_commit, - req.file_path, - ) { - Ok(content) => proto::previous_content_resp::Response::Content(content), - Err(err) => proto::previous_content_resp::Response::Error(err.to_string()), - }; - - let resp = proto::PreviousContentResp { - response: Some(response), - }; - resp.into() -} - -#[no_mangle] -pub extern "C" fn recursive_copy(buffer: Buffer) -> Buffer { - let req: proto::RecursiveCopyRequest = match buffer.into_proto() { - Ok(req) => req, - Err(err) => { - let resp = proto::RecursiveCopyResponse { - error: Some(err.to_string()), - }; - return resp.into(); - } - }; - - let src = match AbsoluteSystemPathBuf::new(req.src) { - Ok(src) => src, - Err(e) => { - let response = proto::RecursiveCopyResponse { - error: Some(e.to_string()), - }; - return response.into(); - } - }; - - let dst = match AbsoluteSystemPathBuf::new(req.dst) { - Ok(dst) => dst, - Err(e) => { - let response = proto::RecursiveCopyResponse { - error: Some(e.to_string()), - }; - return response.into(); - } - }; - - let response = match turborepo_fs::recursive_copy(src, dst) { - Ok(()) => proto::RecursiveCopyResponse { error: None }, - Err(e) => proto::RecursiveCopyResponse { - error: Some(e.to_string()), - }, - }; - response.into() -} - -#[no_mangle] -pub extern "C" fn verify_signature(buffer: Buffer) -> Buffer { - let req: proto::VerifySignatureRequest = match buffer.into_proto() { - Ok(req) => req, - Err(err) => { - let resp = proto::VerifySignatureResponse { - response: Some(proto::verify_signature_response::Response::Error( - err.to_string(), - )), - }; - return resp.into(); - } - }; - - let authenticator = - turborepo_cache::signature_authentication::ArtifactSignatureAuthenticator::new( - req.team_id, - req.secret_key_override, - ); - - match authenticator.validate(req.hash.as_bytes(), &req.artifact_body, &req.expected_tag) { - Ok(verified) => { - let resp = proto::VerifySignatureResponse { - response: Some(proto::verify_signature_response::Response::Verified( - verified, - )), - }; - resp.into() - } - Err(err) => { - let resp = proto::VerifySignatureResponse { - response: Some(proto::verify_signature_response::Response::Error( - err.to_string(), - )), - }; - resp.into() - } - } -} - -#[no_mangle] -pub extern "C" fn get_package_file_hashes(buffer: Buffer) -> Buffer { - let req: proto::GetPackageFileHashesRequest = match buffer.into_proto() { - Ok(req) => req, - Err(err) => { - let resp = proto::GetPackageFileHashesResponse { - response: Some(proto::get_package_file_hashes_response::Response::Error( - err.to_string(), - )), - }; - return resp.into(); - } - }; - let turbo_root = match AbsoluteSystemPathBuf::new(req.turbo_root) { - Ok(turbo_root) => turbo_root, - Err(err) => { - let resp = proto::GetPackageFileHashesResponse { - response: Some(proto::get_package_file_hashes_response::Response::Error( - err.to_string(), - )), - }; - return resp.into(); - } - }; - let package_path = match AnchoredSystemPathBuf::from_raw(req.package_path) { - Ok(package_path) => package_path, - Err(err) => { - let resp = proto::GetPackageFileHashesResponse { - response: Some(proto::get_package_file_hashes_response::Response::Error( - err.to_string(), - )), - }; - return resp.into(); - } - }; - let inputs = req.inputs.as_slice(); - let hasher = turborepo_scm::SCM::new(&turbo_root); - let response = match hasher.get_package_file_hashes(&turbo_root, &package_path, inputs, None) { - Ok(hashes) => { - let mut to_return = HashMap::new(); - for (filename, hash) in hashes { - let filename = filename.to_string(); - to_return.insert(filename, hash); - } - let file_hashes = proto::FileHashes { hashes: to_return }; - proto::GetPackageFileHashesResponse { - response: Some(proto::get_package_file_hashes_response::Response::Hashes( - file_hashes, - )), - } - } - Err(err) => { - let resp = proto::GetPackageFileHashesResponse { - response: Some(proto::get_package_file_hashes_response::Response::Error( - err.to_string(), - )), - }; - return resp.into(); - } - }; - response.into() -} - -#[no_mangle] -pub extern "C" fn get_hashes_for_files(buffer: Buffer) -> Buffer { - let req: proto::GetHashesForFilesRequest = match buffer.into_proto() { - Ok(req) => req, - Err(err) => { - let resp = proto::GetHashesForFilesResponse { - response: Some(proto::get_hashes_for_files_response::Response::Error( - err.to_string(), - )), - }; - return resp.into(); - } - }; - let turbo_root = match AbsoluteSystemPathBuf::new(req.turbo_root) { - Ok(turbo_root) => turbo_root, - Err(err) => { - let resp = proto::GetHashesForFilesResponse { - response: Some(proto::get_hashes_for_files_response::Response::Error( - err.to_string(), - )), - }; - return resp.into(); - } - }; - let allow_missing = req.allow_missing; - let files = match req - .files - .iter() - .map(AnchoredSystemPathBuf::from_raw) - .collect::, PathError>>() - { - Ok(files) => files, - Err(err) => { - let resp = proto::GetHashesForFilesResponse { - response: Some(proto::get_hashes_for_files_response::Response::Error( - err.to_string(), - )), - }; - return resp.into(); - } - }; - let hasher = turborepo_scm::SCM::new(&turbo_root); - - match hasher.get_hashes_for_files(&turbo_root, &files, allow_missing) { - Ok(hashes) => { - let mut to_return = HashMap::new(); - for (filename, hash) in hashes { - let filename = filename.to_string(); - to_return.insert(filename, hash); - } - let file_hashes = proto::FileHashes { hashes: to_return }; - let resp = proto::GetHashesForFilesResponse { - response: Some(proto::get_hashes_for_files_response::Response::Hashes( - file_hashes, - )), - }; - resp.into() - } - Err(err) => { - let resp = proto::GetHashesForFilesResponse { - response: Some(proto::get_hashes_for_files_response::Response::Error( - err.to_string(), - )), - }; - resp.into() - } - } -} - -#[no_mangle] -pub extern "C" fn glob(buffer: Buffer) -> Buffer { - let req: proto::GlobReq = match buffer.into_proto() { - Ok(req) => req, - Err(err) => { - let resp = proto::GlobResp { - response: Some(proto::glob_resp::Response::Error(err.to_string())), - }; - return resp.into(); - } - }; - let walk_type = match req.files_only { - true => globwalk::WalkType::Files, - false => globwalk::WalkType::All, - }; - - let inclusions = match req - .include_patterns - .iter() - .map(|i| ValidatedGlob::from_str(i)) - .collect::, _>>() - { - Ok(inclusions) => inclusions, - Err(err) => { - let resp = proto::GlobResp { - response: Some(proto::glob_resp::Response::Error(err.to_string())), - }; - return resp.into(); - } - }; - - let exclusions = match req - .exclude_patterns - .iter() - .map(|e| ValidatedGlob::from_str(e)) - .collect::, _>>() - { - Ok(exclusions) => exclusions, - Err(err) => { - let resp = proto::GlobResp { - response: Some(proto::glob_resp::Response::Error(err.to_string())), - }; - return resp.into(); - } - }; - - let files = match globwalk( - &AbsoluteSystemPathBuf::new(req.base_path).expect("absolute"), - &inclusions, - &exclusions, - walk_type, - ) { - Ok(files) => files, - Err(err) => { - let resp = proto::GlobResp { - response: Some(proto::glob_resp::Response::Error(err.to_string())), - }; - return resp.into(); - } - }; - - let files: Vec<_> = files.into_iter().map(|path| path.to_string()).collect(); - - proto::GlobResp { - response: Some(proto::glob_resp::Response::Files(proto::GlobRespList { - files, - })), - } - .into() -} - -#[no_mangle] -pub extern "C" fn from_wildcards(buffer: Buffer) -> Buffer { - let req: proto::FromWildcardsRequest = match buffer.into_proto() { - Ok(req) => req, - Err(err) => { - let resp = proto::FromWildcardsResponse { - response: Some(proto::from_wildcards_response::Response::Error( - err.to_string(), - )), - }; - return resp.into(); - } - }; - - let env_var_map: EnvironmentVariableMap = req.env_vars.unwrap().map.into(); - match env_var_map.from_wildcards(&req.wildcard_patterns) { - Ok(map) => { - let resp = proto::FromWildcardsResponse { - response: Some(proto::from_wildcards_response::Response::EnvVars( - proto::EnvVarMap { - map: map.into_inner(), - }, - )), - }; - resp.into() - } - Err(err) => { - let resp = proto::FromWildcardsResponse { - response: Some(proto::from_wildcards_response::Response::Error( - err.to_string(), - )), - }; - resp.into() - } - } -} - -#[no_mangle] -pub extern "C" fn get_global_hashable_env_vars(buffer: Buffer) -> Buffer { - let req: proto::GetGlobalHashableEnvVarsRequest = match buffer.into_proto() { - Ok(req) => req, - Err(err) => { - let resp = proto::GetGlobalHashableEnvVarsResponse { - response: Some( - proto::get_global_hashable_env_vars_response::Response::Error(err.to_string()), - ), - }; - return resp.into(); - } - }; - - match turborepo_env::get_global_hashable_env_vars( - &req.env_at_execution_start.unwrap().map.into(), - &req.global_env, - ) { - Ok(map) => { - let resp = proto::GetGlobalHashableEnvVarsResponse { - response: Some( - proto::get_global_hashable_env_vars_response::Response::DetailedMap( - proto::DetailedMap { - all: map.all.into_inner(), - by_source: Some(proto::BySource { - explicit: map.by_source.explicit.into_inner(), - matching: map.by_source.matching.into_inner(), - }), - }, - ), - ), - }; - resp.into() - } - Err(err) => { - let resp = proto::GetGlobalHashableEnvVarsResponse { - response: Some( - proto::get_global_hashable_env_vars_response::Response::Error(err.to_string()), - ), - }; - resp.into() - } - } -} - -#[cfg(test)] -mod test { - use super::*; - - #[test] - fn test_empty_message_has_null_ptr() { - let message = proto::RecursiveCopyResponse { error: None }; - let buffer = Buffer::from(message); - assert_eq!(buffer.len, 0); - assert_eq!(buffer.data, std::ptr::null_mut()); - } -} diff --git a/crates/turborepo-ffi/src/lockfile.rs b/crates/turborepo-ffi/src/lockfile.rs deleted file mode 100644 index 42f340f208883..0000000000000 --- a/crates/turborepo-ffi/src/lockfile.rs +++ /dev/null @@ -1,300 +0,0 @@ -use std::{ - collections::{HashMap, HashSet}, - fmt, -}; - -use thiserror::Error; -use turborepo_lockfiles::{ - self, BerryLockfile, BunLockfile, Lockfile, LockfileData, NpmLockfile, Package, PnpmLockfile, - Yarn1Lockfile, -}; - -use super::{proto, Buffer}; - -impl From for proto::LockfilePackage { - fn from(value: Package) -> Self { - let Package { key, version } = value; - proto::LockfilePackage { - key, - version, - found: true, - } - } -} - -#[derive(Debug, Error)] -enum Error { - #[error("error performing lockfile operation: {0}")] - Lockfile(#[from] turborepo_lockfiles::Error), - #[error("error decoding protobuf: {0}")] - Protobuf(#[from] prost::DecodeError), - #[error(transparent)] - BerryParse(#[from] turborepo_lockfiles::BerryError), - #[error("unsupported package manager {0}")] - UnsupportedPackageManager(proto::PackageManager), -} - -#[no_mangle] -pub extern "C" fn transitive_closure(buf: Buffer) -> Buffer { - use proto::transitive_deps_response::Response; - let response = match transitive_closure_inner(buf) { - Ok(list) => Response::Dependencies(list), - Err(err) => Response::Error(err.to_string()), - }; - proto::TransitiveDepsResponse { - response: Some(response), - } - .into() -} - -fn transitive_closure_inner(buf: Buffer) -> Result { - let request: proto::TransitiveDepsRequest = buf.into_proto()?; - - match request.package_manager() { - proto::PackageManager::Npm => npm_transitive_closure_inner(request), - proto::PackageManager::Berry => berry_transitive_closure_inner(request), - proto::PackageManager::Pnpm => pnpm_transitive_closure_inner(request), - proto::PackageManager::Yarn => yarn_transitive_closure_inner(request), - proto::PackageManager::Bun => bun_transitive_closure_inner(request), - } -} - -fn npm_transitive_closure_inner( - request: proto::TransitiveDepsRequest, -) -> Result { - let proto::TransitiveDepsRequest { - contents, - workspaces, - .. - } = request; - let lockfile = NpmLockfile::load(contents.as_slice())?; - let dependencies = turborepo_lockfiles::all_transitive_closures( - &lockfile, - workspaces.into_iter().map(|(k, v)| (k, v.into())).collect(), - )?; - Ok(dependencies.into()) -} - -fn berry_transitive_closure_inner( - request: proto::TransitiveDepsRequest, -) -> Result { - let proto::TransitiveDepsRequest { - contents, - workspaces, - resolutions, - .. - } = request; - let resolutions = - resolutions.map(|r| turborepo_lockfiles::BerryManifest::with_resolutions(r.resolutions)); - let data = LockfileData::from_bytes(contents.as_slice())?; - let lockfile = BerryLockfile::new(data, resolutions)?; - let dependencies = turborepo_lockfiles::all_transitive_closures( - &lockfile, - workspaces.into_iter().map(|(k, v)| (k, v.into())).collect(), - )?; - Ok(dependencies.into()) -} - -fn pnpm_transitive_closure_inner( - request: proto::TransitiveDepsRequest, -) -> Result { - let proto::TransitiveDepsRequest { - contents, - workspaces, - .. - } = request; - let lockfile = PnpmLockfile::from_bytes(contents.as_slice())?; - let dependencies = turborepo_lockfiles::all_transitive_closures( - &lockfile, - workspaces.into_iter().map(|(k, v)| (k, v.into())).collect(), - )?; - Ok(dependencies.into()) -} - -fn yarn_transitive_closure_inner( - request: proto::TransitiveDepsRequest, -) -> Result { - let proto::TransitiveDepsRequest { - contents, - workspaces, - .. - } = request; - let lockfile = - Yarn1Lockfile::from_bytes(contents.as_slice()).map_err(turborepo_lockfiles::Error::from)?; - let dependencies = turborepo_lockfiles::all_transitive_closures( - &lockfile, - workspaces.into_iter().map(|(k, v)| (k, v.into())).collect(), - )?; - Ok(dependencies.into()) -} - -fn bun_transitive_closure_inner( - request: proto::TransitiveDepsRequest, -) -> Result { - let proto::TransitiveDepsRequest { - contents, - workspaces, - .. - } = request; - let lockfile = - BunLockfile::from_bytes(contents.as_slice()).map_err(turborepo_lockfiles::Error::from)?; - let dependencies = turborepo_lockfiles::all_transitive_closures( - &lockfile, - workspaces.into_iter().map(|(k, v)| (k, v.into())).collect(), - )?; - Ok(dependencies.into()) -} - -#[no_mangle] -pub extern "C" fn subgraph(buf: Buffer) -> Buffer { - use proto::subgraph_response::Response; - proto::SubgraphResponse { - response: Some(match subgraph_inner(buf) { - Ok(contents) => Response::Contents(contents), - Err(err) => Response::Error(err.to_string()), - }), - } - .into() -} - -fn subgraph_inner(buf: Buffer) -> Result, Error> { - let request: proto::SubgraphRequest = buf.into_proto()?; - let package_manager = request.package_manager(); - let proto::SubgraphRequest { - contents, - workspaces, - packages, - resolutions, - .. - } = request; - let contents = match package_manager { - proto::PackageManager::Npm => { - turborepo_lockfiles::npm_subgraph(&contents, &workspaces, &packages)? - } - proto::PackageManager::Berry => turborepo_lockfiles::berry_subgraph( - &contents, - &workspaces, - &packages, - resolutions.map(|res| res.resolutions), - )?, - proto::PackageManager::Pnpm => { - turborepo_lockfiles::pnpm_subgraph(&contents, &workspaces, &packages)? - } - proto::PackageManager::Yarn => turborepo_lockfiles::yarn_subgraph(&contents, &packages)?, - proto::PackageManager::Bun => { - return Err(Error::UnsupportedPackageManager(proto::PackageManager::Bun)) - } - }; - Ok(contents) -} - -#[no_mangle] -pub extern "C" fn patches(buf: Buffer) -> Buffer { - use proto::patches_response::Response; - proto::PatchesResponse { - response: Some(match patches_internal(buf) { - Ok(patches) => Response::Patches(patches), - Err(err) => Response::Error(err.to_string()), - }), - } - .into() -} - -fn patches_internal(buf: Buffer) -> Result { - let request: proto::PatchesRequest = buf.into_proto()?; - let patches = match request.package_manager() { - proto::PackageManager::Berry => { - let data = LockfileData::from_bytes(&request.contents)?; - let lockfile = BerryLockfile::new(data, None)?; - Ok(lockfile - .patches()? - .into_iter() - .map(|p| p.to_string()) - .collect::>()) - } - proto::PackageManager::Pnpm => { - let lockfile = PnpmLockfile::from_bytes(&request.contents)?; - Ok(lockfile - .patches()? - .into_iter() - .map(|p| p.to_string()) - .collect()) - } - pm => Err(Error::UnsupportedPackageManager(pm)), - }?; - Ok(proto::Patches { patches }) -} - -#[no_mangle] -pub extern "C" fn global_change(buf: Buffer) -> Buffer { - // If there's any issue checking if there's been a global lockfile change - // we assume one has changed. - let global_change = global_change_inner(buf).unwrap_or(true); - proto::GlobalChangeResponse { global_change }.into() -} - -fn global_change_inner(buf: Buffer) -> Result { - let request: proto::GlobalChangeRequest = buf.into_proto()?; - match request.package_manager() { - proto::PackageManager::Npm => Ok(turborepo_lockfiles::npm_global_change( - &request.prev_contents, - &request.curr_contents, - )?), - proto::PackageManager::Berry => Ok(turborepo_lockfiles::berry_global_change( - &request.prev_contents, - &request.curr_contents, - )?), - proto::PackageManager::Pnpm => Ok(turborepo_lockfiles::pnpm_global_change( - &request.prev_contents, - &request.curr_contents, - )?), - proto::PackageManager::Yarn => Ok(false), - proto::PackageManager::Bun => Ok(false), - } -} - -impl From for HashMap { - fn from(other: proto::PackageDependencyList) -> Self { - other - .list - .into_iter() - .map(|proto::PackageDependency { name, range }| (name, range)) - .collect() - } -} - -impl From> for proto::LockfilePackageList { - fn from(value: HashSet) -> Self { - proto::LockfilePackageList { - list: value - .into_iter() - .map(proto::LockfilePackage::from) - .collect(), - } - } -} - -impl From>> for proto::WorkspaceDependencies { - fn from(value: HashMap>) -> Self { - proto::WorkspaceDependencies { - dependencies: value - .into_iter() - .map(|(workspace, dependencies)| { - (workspace, proto::LockfilePackageList::from(dependencies)) - }) - .collect(), - } - } -} - -impl fmt::Display for proto::PackageManager { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_str(match self { - proto::PackageManager::Npm => "npm", - proto::PackageManager::Berry => "berry", - proto::PackageManager::Pnpm => "pnpm", - proto::PackageManager::Yarn => "yarn", - proto::PackageManager::Bun => "bun", - }) - } -} diff --git a/crates/turborepo-lib/build.rs b/crates/turborepo-lib/build.rs index d588329e7e02c..a77750cd58c23 100644 --- a/crates/turborepo-lib/build.rs +++ b/crates/turborepo-lib/build.rs @@ -11,7 +11,7 @@ fn main() -> Result<(), Box> { let tonic_build_result = tonic_build::configure() .build_server(true) .file_descriptor_set_path("src/daemon/file_descriptor_set.bin") - .compile(&["turbod.proto"], &["../../cli/internal/turbodprotocol"]); + .compile(&["turbod.proto"], &["./src/daemon"]); let capnpc_result = capnpc::CompilerCommand::new() .file("./src/hash/proto.capnp") .import_path("./src/hash/std") // we need to include the 'stdlib' for capnp-go diff --git a/crates/turborepo-lib/src/cli/mod.rs b/crates/turborepo-lib/src/cli/mod.rs index 05d9079d886ec..d5babe4f7f554 100644 --- a/crates/turborepo-lib/src/cli/mod.rs +++ b/crates/turborepo-lib/src/cli/mod.rs @@ -24,12 +24,12 @@ use turborepo_ui::UI; use crate::{ commands::{ - bin, daemon, generate, info, link, login, logout, prune, telemetry, unlink, CommandBase, + bin, daemon, generate, info, link, login, logout, prune, run, telemetry, unlink, + CommandBase, }, get_version, shim::TurboState, tracing::TurboSubscriber, - Payload, }; mod error; @@ -101,8 +101,6 @@ impl Display for LogOrder { } } -// NOTE: These *must* be kept in sync with the `_dryRunJSONValue` -// and `_dryRunTextValue` constants in run.go. #[derive(Copy, Clone, Debug, PartialEq, Serialize, ValueEnum)] pub enum DryRunMode { Text, @@ -619,10 +617,6 @@ pub struct RunArgs { pub continue_execution: bool, #[clap(alias = "dry", long = "dry-run", num_args = 0..=1, default_missing_value = "text")] pub dry_run: Option, - /// Fallback to use Go for task execution - #[serde(skip)] - #[clap(long, conflicts_with = "remote_cache_read_only")] - pub go_fallback: bool, /// Run turbo in single-package mode #[clap(long)] pub single_package: bool, @@ -802,7 +796,6 @@ impl RunArgs { // default to true track_usage!(telemetry, self.continue_execution, |val| val); track_usage!(telemetry, self.include_dependencies, |val| val); - track_usage!(telemetry, self.go_fallback, |val| val); track_usage!(telemetry, self.single_package, |val| val); track_usage!(telemetry, self.no_deps, |val| val); track_usage!(telemetry, self.no_cache, |val| val); @@ -926,7 +919,7 @@ pub async fn run( repo_state: Option, #[allow(unused_variables)] logger: &TurboSubscriber, ui: UI, -) -> Result { +) -> Result { let mut cli_args = Args::new(); let version = get_version(); @@ -1033,7 +1026,7 @@ pub async fn run( .track_call(); bin::run()?; - Ok(Payload::Rust(Ok(0))) + Ok(0) } #[allow(unused_variables)] Command::Daemon { command, idle_time } => { @@ -1044,15 +1037,10 @@ pub async fn run( match command { Some(command) => daemon::daemon_client(command, &base).await, - #[cfg(not(feature = "go-daemon"))] None => daemon::daemon_server(&base, idle_time, logger).await, - #[cfg(feature = "go-daemon")] - None => { - return Ok(Payload::Go(Box::new(base))); - } }?; - Ok(Payload::Rust(Ok(0))) + Ok(0) } Command::Generate { tag, @@ -1073,7 +1061,7 @@ pub async fn run( }; let child_event = event.child(); generate::run(tag, command, &args, child_event)?; - Ok(Payload::Rust(Ok(0))) + Ok(0) } Command::Telemetry { command } => { let event = CommandEventBuilder::new("telemetry").with_parent(&root_telemetry); @@ -1081,7 +1069,7 @@ pub async fn run( let mut base = CommandBase::new(cli_args.clone(), repo_root, version, ui); let child_event = event.child(); telemetry::configure(command, &mut base, child_event); - Ok(Payload::Rust(Ok(0))) + Ok(0) } Command::Info { workspace, json } => { CommandEventBuilder::new("info") @@ -1092,7 +1080,7 @@ pub async fn run( let mut base = CommandBase::new(cli_args, repo_root, version, ui); info::run(&mut base, workspace.as_deref(), json).await?; - Ok(Payload::Rust(Ok(0))) + Ok(0) } Command::Link { no_gitignore, @@ -1103,7 +1091,7 @@ pub async fn run( .track_call(); if cli_args.test_run { println!("Link test run successful"); - return Ok(Payload::Rust(Ok(0))); + return Ok(0); } let modify_gitignore = !*no_gitignore; @@ -1114,7 +1102,7 @@ pub async fn run( error!("error: {}", err.to_string()) } - Ok(Payload::Rust(Ok(0))) + Ok(0) } Command::Logout { .. } => { let event = CommandEventBuilder::new("logout").with_parent(&root_telemetry); @@ -1123,14 +1111,14 @@ pub async fn run( let event_child = event.child(); logout::logout(&mut base, event_child)?; - Ok(Payload::Rust(Ok(0))) + Ok(0) } Command::Login { sso_team } => { let event = CommandEventBuilder::new("login").with_parent(&root_telemetry); event.track_call(); if cli_args.test_run { println!("Login test run successful"); - return Ok(Payload::Rust(Ok(0))); + return Ok(0); } let sso_team = sso_team.clone(); @@ -1144,7 +1132,7 @@ pub async fn run( login::login(&mut base, event_child).await?; } - Ok(Payload::Rust(Ok(0))) + Ok(0) } Command::Unlink { target } => { CommandEventBuilder::new("unlink") @@ -1152,7 +1140,7 @@ pub async fn run( .track_call(); if cli_args.test_run { println!("Unlink test run successful"); - return Ok(Payload::Rust(Ok(0))); + return Ok(0); } let from = *target; @@ -1160,7 +1148,7 @@ pub async fn run( unlink::unlink(&mut base, from)?; - Ok(Payload::Rust(Ok(0))) + Ok(0) } Command::Run(args) => { let event = CommandEventBuilder::new("run").with_parent(&root_telemetry); @@ -1177,33 +1165,19 @@ pub async fn run( } let base = CommandBase::new(cli_args.clone(), repo_root, version, ui); - let should_use_go = args.go_fallback - || env::var("EXPERIMENTAL_RUST_CODEPATH").as_deref() == Ok("false"); - args.track(&event); - if should_use_go { - event.track_run_code_path(CodePath::Go); - // we have to clear the telemetry queue before we hand off to go - if telemetry_handle.is_some() { - let handle = telemetry_handle.take().unwrap(); - handle.close_with_timeout().await; - } - Ok(Payload::Go(Box::new(base))) - } else { - use crate::commands::run; - event.track_run_code_path(CodePath::Rust); - let exit_code = run::run(base, event) - .await - .inspect(|code| { - if *code != 0 { - error!("run failed: command exited ({code})"); - } - }) - .inspect_err(|err| { - error!("run failed: {err}"); - })?; - Ok(Payload::Rust(Ok(exit_code))) - } + event.track_run_code_path(CodePath::Rust); + let exit_code = run::run(base, event) + .await + .inspect(|code| { + if *code != 0 { + error!("run failed: command exited ({code})"); + } + }) + .inspect_err(|err| { + error!("run failed: {err}"); + })?; + Ok(exit_code) } Command::Prune { scope, @@ -1223,14 +1197,14 @@ pub async fn run( let base = CommandBase::new(cli_args, repo_root, version, ui); let event_child = event.child(); prune::prune(&base, &scope, docker, &output_dir, event_child).await?; - Ok(Payload::Rust(Ok(0))) + Ok(0) } Command::Completion { shell } => { CommandEventBuilder::new("completion") .with_parent(&root_telemetry) .track_call(); generate(*shell, &mut Args::command(), "turbo", &mut io::stdout()); - Ok(Payload::Rust(Ok(0))) + Ok(0) } }; @@ -2341,47 +2315,6 @@ mod test { ); } - #[test] - fn test_go_fallback_conflicts_with_remote_read_only() { - assert!(Args::try_parse_from([ - "turbo", - "build", - "--remote-cache-read-only", - "--go-fallback", - ]) - .unwrap_err() - .to_string() - .contains( - "the argument '--remote-cache-read-only []' cannot be used with '--go-fallback" - )); - assert!(Args::try_parse_from([ - "turbo", - "--go-fallback", - "--remote-cache-read-only", - "true", - "build", - ]) - .unwrap_err() - .to_string() - .contains( - "the argument '--go-fallback' cannot be used with '--remote-cache-read-only []'" - )); - assert!(Args::try_parse_from([ - "turbo", - "run", - "build", - "--remote-cache-read-only", - "--go-fallback", - ]) - .unwrap_err() - .to_string() - .contains( - "the argument '--remote-cache-read-only []' cannot be used with '--go-fallback" - )); - assert!(Args::try_parse_from(["turbo", "build", "--go-fallback"]).is_ok(),); - assert!(Args::try_parse_from(["turbo", "build", "--remote-cache-read-only",]).is_ok(),); - } - #[test] fn test_profile_usage() { assert!(Args::try_parse_from(["turbo", "build", "--profile", ""]).is_err()); diff --git a/cli/internal/turbodprotocol/turbod.proto b/crates/turborepo-lib/src/daemon/turbod.proto similarity index 100% rename from cli/internal/turbodprotocol/turbod.proto rename to crates/turborepo-lib/src/daemon/turbod.proto diff --git a/crates/turborepo-lib/src/lib.rs b/crates/turborepo-lib/src/lib.rs index 629d880152125..10b9822d63147 100644 --- a/crates/turborepo-lib/src/lib.rs +++ b/crates/turborepo-lib/src/lib.rs @@ -16,6 +16,7 @@ mod commands; mod config; mod daemon; mod engine; + mod execution_state; mod framework; pub(crate) mod globwatcher; @@ -32,27 +33,17 @@ mod tracing; mod turbo_json; mod unescape; -pub use child::spawn_child; use miette::Report; -use shim::Error; pub use crate::{ + child::spawn_child, cli::Args, commands::DaemonRootHasher, daemon::{DaemonClient, DaemonConnector}, execution_state::ExecutionState, run::package_discovery::DaemonPackageDiscovery, }; -use crate::{commands::CommandBase, engine::BuilderError}; - -/// The payload from running main, if the program can complete without using Go -/// the Rust variant will be returned. If Go is needed then the execution state -/// that should be passed to Go will be returned. -#[derive(Debug)] -pub enum Payload { - Rust(Result), - Go(Box), -} +use crate::{engine::BuilderError, shim::Error}; pub fn get_version() -> &'static str { include_str!("../../../version.txt") @@ -63,9 +54,9 @@ pub fn get_version() -> &'static str { .trim_end() } -pub fn main() -> Payload { +pub fn main() -> Result { match shim::run() { - Ok(payload) => payload, + Ok(code) => Ok(code), // We only print using miette for some errors because we want to keep // compatibility with Go. When we've deleted the Go code we can // move all errors to miette since it provides slightly nicer @@ -85,16 +76,16 @@ pub fn main() -> Payload { ) => { println!("{:?}", Report::new(err)); - Payload::Rust(Ok(1)) + Ok(1) } // We don't need to print "Turbo error" for Run errors - Err(err @ shim::Error::Cli(cli::Error::Run(_))) => Payload::Rust(Err(err)), + Err(err @ shim::Error::Cli(cli::Error::Run(_))) => Err(err), Err(err) => { // This raw print matches the Go behavior, once we no longer care // about matching formatting we should remove this. println!("Turbo error: {err}"); - Payload::Rust(Err(err)) + Err(err) } } } diff --git a/crates/turborepo-lib/src/shim.rs b/crates/turborepo-lib/src/shim.rs index c102dea3e5833..96e6530370c50 100644 --- a/crates/turborepo-lib/src/shim.rs +++ b/crates/turborepo-lib/src/shim.rs @@ -26,7 +26,7 @@ use turborepo_repository::{ }; use turborepo_ui::UI; -use crate::{cli, get_version, spawn_child, tracing::TurboSubscriber, Payload}; +use crate::{cli, get_version, spawn_child, tracing::TurboSubscriber}; #[derive(Debug, Error, Diagnostic)] #[error("cannot have multiple `--cwd` flags in command")] @@ -561,7 +561,7 @@ fn run_correct_turbo( shim_args: ShimArgs, subscriber: &TurboSubscriber, ui: UI, -) -> Result { +) -> Result { if let Some(turbo_state) = LocalTurboState::infer(&repo_state.root) { try_check_for_updates(&shim_args, &turbo_state.version); @@ -573,11 +573,7 @@ fn run_correct_turbo( debug!("Currently running turbo is local turbo."); Ok(cli::run(Some(repo_state), subscriber, ui)?) } else { - Ok(Payload::Rust(spawn_local_turbo( - &repo_state, - turbo_state, - shim_args, - ))) + spawn_local_turbo(&repo_state, turbo_state, shim_args) } } else { try_check_for_updates(&shim_args, get_version()); @@ -713,7 +709,7 @@ fn try_check_for_updates(args: &ShimArgs, current_version: &str) { } } -pub fn run() -> Result { +pub fn run() -> Result { let args = ShimArgs::parse()?; let ui = args.ui(); if ui.should_strip_ansi { diff --git a/crates/turborepo/Cargo.toml b/crates/turborepo/Cargo.toml index 9312d964a3d77..32ad9ca8ebc25 100644 --- a/crates/turborepo/Cargo.toml +++ b/crates/turborepo/Cargo.toml @@ -9,12 +9,11 @@ license = "MPL-2.0" # This is for the convenience of running daily dev workflows, i.e running # `cargo xxx` without explicitly specifying features, not that we want to # promote this as default backend. -default = ["rustls-tls", "go-binary"] +default = ["rustls-tls"] native-tls = ["turborepo-lib/native-tls"] rustls-tls = ["turborepo-lib/rustls-tls"] http = ["turborepo-lib/http"] go-daemon = ["turborepo-lib/go-daemon"] -go-binary = [] pprof = ["turborepo-lib/pprof"] # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html @@ -36,6 +35,7 @@ clap_complete = { workspace = true } command-group = { version = "2.0.1", features = ["with-tokio"] } dunce = { workspace = true } human-panic = "1.2.1" +miette.workspace = true serde = { workspace = true, features = ["derive"] } serde_json = { workspace = true } serde_yaml = { workspace = true } diff --git a/crates/turborepo/build.rs b/crates/turborepo/build.rs deleted file mode 100644 index e7039e63bac7f..0000000000000 --- a/crates/turborepo/build.rs +++ /dev/null @@ -1,72 +0,0 @@ -use std::{env, fs, path::PathBuf, process::Command}; - -fn main() { - println!("cargo:rerun-if-changed=../../cli"); - let profile = env::var("PROFILE").unwrap(); - let is_ci_release = - &profile == "release" && matches!(env::var("RELEASE_TURBO_CLI"), Ok(v) if v == "true"); - - let invocation = std::env::var("RUSTC_WRAPPER").unwrap_or_default(); - if !is_ci_release && !invocation.ends_with("rust-analyzer") { - build_local_go_binary(profile); - } -} - -#[cfg(any(not(feature = "go-binary"), doc))] -fn build_local_go_binary(_: String) {} - -#[cfg(all(feature = "go-binary", not(doc)))] -fn build_local_go_binary(profile: String) { - let cli_path = cli_path(); - let target = build_target::target().unwrap(); - - let go_binary_name = if target.os == build_target::Os::Windows { - "go-turbo.exe" - } else { - "go-turbo" - }; - - #[cfg(not(windows))] - let mut cmd = { - let mut cmd = Command::new("make"); - cmd.current_dir(&cli_path); - cmd.arg(go_binary_name); - cmd - }; - #[cfg(windows)] - let mut cmd = { - let mut cmd = Command::new(cli_path.join("build_go.bat")); - cmd.current_dir(&cli_path); - cmd - }; - - assert!( - cmd.stdout(std::process::Stdio::inherit()) - .status() - .expect("failed to build go binary") - .success(), - "failed to build go binary" - ); - - let go_binary_path = env::var("CARGO_WORKSPACE_DIR") - .map(PathBuf::from) - .unwrap() - .join("cli") - .join(go_binary_name); - - let new_go_binary_path = env::var_os("CARGO_WORKSPACE_DIR") - .map(PathBuf::from) - .unwrap() - .join("target") - .join(profile) - .join(go_binary_name); - - fs::rename(go_binary_path, new_go_binary_path).unwrap(); -} - -fn cli_path() -> PathBuf { - env::var_os("CARGO_WORKSPACE_DIR") - .map(PathBuf::from) - .unwrap() - .join("cli") -} diff --git a/crates/turborepo/src/main.rs b/crates/turborepo/src/main.rs index a77325e68ffd0..592957591f638 100644 --- a/crates/turborepo/src/main.rs +++ b/crates/turborepo/src/main.rs @@ -3,83 +3,18 @@ mod panic_handler; -use std::{ - env::{consts, current_exe}, - process, - process::Stdio, -}; +use std::process; use anyhow::Result; -use dunce::canonicalize as fs_canonicalize; -use tracing::{debug, error, trace}; -use turborepo_lib::{spawn_child, ExecutionState, Payload}; use crate::panic_handler::panic_handler; -fn run_go_binary(execution_state: ExecutionState) -> Result { - // canonicalize the binary path to ensure we can find go-turbo - let turbo_path = fs_canonicalize(current_exe()?)?; - let mut go_binary_path = turbo_path.clone(); - go_binary_path.pop(); - #[cfg(windows)] - go_binary_path.push("go-turbo.exe"); - #[cfg(not(windows))] - go_binary_path.push("go-turbo"); - - if go_binary_path.exists() { - debug!("Found go binary at {:?}", go_binary_path); - } else { - error!("Unable to find Go binary. Please report this issue at https://github.com/vercel/turbo/issues and include your package manager and version along with the following information: - os={os} - arch={arch} - turbo-version={turbo_version} - turbo-bin={turbo_bin} - go-turbo-bin={go_turbo_bin} - ", - os = consts::OS, - arch = consts::ARCH, - turbo_version = turborepo_lib::get_version(), - turbo_bin = turbo_path.display(), - go_turbo_bin = go_binary_path.display() - ); - // return an error - return Err(anyhow::anyhow!( - "Failed to execute turbo (Unable to locate Go binary)." - )); - } - - if execution_state.cli_args.test_run { - let serialized_args = serde_json::to_string_pretty(&execution_state)?; - println!("{}", serialized_args); - return Ok(0); - } - - let serialized_args = serde_json::to_string(&execution_state)?; - trace!("Invoking go binary with {}", serialized_args); - let mut command = process::Command::new(go_binary_path); - command - .arg(serialized_args) - .stdout(Stdio::inherit()) - .stderr(Stdio::inherit()); - - let child = spawn_child(command)?; - let exit_code = child.wait()?.code().unwrap_or(2); - - Ok(exit_code) -} - // This function should not expanded. Please add any logic to // `turborepo_lib::main` instead fn main() -> Result<()> { std::panic::set_hook(Box::new(panic_handler)); - let exit_code = match turborepo_lib::main() { - Payload::Rust(res) => res.unwrap_or(1), - Payload::Go(base) => { - let execution_state = (&*base).try_into()?; - run_go_binary(execution_state)? - } - }; + let exit_code = turborepo_lib::main().unwrap_or(1); process::exit(exit_code) } diff --git a/go.work b/go.work deleted file mode 100644 index b2b03060608d8..0000000000000 --- a/go.work +++ /dev/null @@ -1,3 +0,0 @@ -go 1.20 - -use ./cli diff --git a/go.work.sum b/go.work.sum deleted file mode 100644 index eabc9a86acd45..0000000000000 --- a/go.work.sum +++ /dev/null @@ -1,156 +0,0 @@ -cloud.google.com/go v0.99.0 h1:y/cM2iqGgGi5D5DQZl6D9STN/3dR/Vx5Mp8s752oJTY= -cloud.google.com/go/bigquery v1.8.0 h1:PQcPefKFdaIzjQFbiyOgAqyx8q5djaE7x9Sqe712DPA= -cloud.google.com/go/datastore v1.1.0 h1:/May9ojXjRkPBNVrq+oWLqmWCkr4OU5uRY29bu0mRyQ= -cloud.google.com/go/firestore v1.6.1 h1:8rBq3zRjnHx8UtBvaOWqBB1xq9jH6/wltfQLlTMh2Fw= -cloud.google.com/go/pubsub v1.3.1 h1:ukjixP1wl0LpnZ6LWtZJ0mX5tBmjp1f8Sqer8Z2OMUU= -cloud.google.com/go/storage v1.10.0 h1:STgFzyU5/8miMl0//zKh2aQeTyeaUH3WN9bSUiJ09bA= -dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9 h1:VpgP7xuJadIUuKccphEpTJnWhS2jkQyMt6Y7pJCD7fY= -github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ= -github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802 h1:1BDTz0u9nC3//pOCMdNH+CiXJVYJh5UQNCOBG7jbELc= -github.com/DataDog/datadog-go v3.2.0+incompatible h1:qSG2N4FghB1He/r2mFrWKCaL7dXCilEuNEeAn20fdD4= -github.com/Netflix/go-expect v0.0.0-20220104043353-73e0943537d2 h1:+vx7roKuyA63nhn5WAunQHLTznkw5W8b1Xc0dNjp83s= -github.com/OneOfOne/xxhash v1.2.2 h1:KMrpdQIwFcEqXDklaen+P1axHaj9BSKzvpUUfnHldSE= -github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751 h1:JYp7IbQjafoB+tBA3gMyHYHrpOtNuDiK/uB5uXxq5wM= -github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4 h1:Hs82Z41s6SdL1CELW+XaDYmOH4hkBN4/N9og/AsOv7E= -github.com/antihax/optional v1.0.0 h1:xK2lYat7ZLaVVcIuj82J8kIro4V6kDe0AUDFboUCwcg= -github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e h1:QEF07wC0T1rKkctt1RINW/+RMTVmiwxETico2l3gxJA= -github.com/armon/go-metrics v0.3.10 h1:FR+drcQStOe+32sYyJYyZ7FIdgoGGBnwLl+flodp8Uo= -github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= -github.com/census-instrumentation/opencensus-proto v0.3.0 h1:t/LhUZLVitR1Ow2YOnduCsavhwFUklBMoGVYUCqmCqk= -github.com/cespare/xxhash v1.1.0 h1:a6HrQnmkObjyL+Gs60czilIUGqrzKutQD6XZog3p+ko= -github.com/cespare/xxhash/v2 v2.1.2 h1:YRXhKfTDauu4ajMg1TPgFO5jnlC2HCbmLXMcTG5cbYE= -github.com/chzyer/logex v1.1.10 h1:Swpa1K6QvQznwJRcfTfQJmTE72DqScAa40E+fbHEXEE= -github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e h1:fY5BOSpyZCqRo5OhCuC+XN+r/bBCmeuuJtjz+bCNIf8= -github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1 h1:q763qf9huN11kDQavWsoZXJNW3xEE4JJyHa5Q25/sd8= -github.com/circonus-labs/circonus-gometrics v2.3.1+incompatible h1:C29Ae4G5GtYyYMm1aztcyj/J5ckgJm2zwdDajFbx1NY= -github.com/circonus-labs/circonusllhist v0.1.3 h1:TJH+oke8D16535+jHExHj4nQvzlZrj7ug5D7I/orNUA= -github.com/client9/misspell v0.3.4 h1:ta993UF76GwbvJcIo3Y68y/M3WxlpEHPWIGDkJYwzJI= -github.com/cncf/udpa/go v0.0.0-20210930031921-04548b0d99d4 h1:hzAQntlaYRkVSFEfj9OTWlVV1H155FMD8BTKktLv0QI= -github.com/cncf/xds/go v0.0.0-20211130200136-a8f946100490 h1:KwaoQzs/WeUxxJqiJsZ4euOly1Az/IgZXXSxlD/UBNk= -github.com/coreos/go-semver v0.3.0 h1:wkHLiw0WNATZnSG7epLsujiMCgPAc9xhjJ4tgnAxmfM= -github.com/coreos/go-systemd/v22 v22.3.2 h1:D9/bQk5vlXQFZ6Kwuu6zaiXJ9oTPe68++AzAJc1DzSI= -github.com/cpuguy83/go-md2man/v2 v2.0.1 h1:r/myEWzV9lfsM1tFLgDyu0atFtJ1fXn261LKYj/3DxU= -github.com/creack/pty v1.1.17 h1:QeVUsEDNrLBW4tMgZHvxy18sKtr6VI492kBhUfhDJNI= -github.com/envoyproxy/go-control-plane v0.10.2-0.20220325020618-49ff273808a1 h1:xvqufLtNVwAhN8NMyWklVgxnWohi+wtMGQMhtxexlm0= -github.com/envoyproxy/protoc-gen-validate v0.6.2 h1:JiO+kJTpmYGjEodY7O1Zk8oZcNz1+f30UtwtXoFUPzE= -github.com/frankban/quicktest v1.14.3/go.mod h1:mgiwOwqx65TmIk1wJ6Q7wvnVMocbUorkibMOrVTHZps= -github.com/ghodss/yaml v1.0.0 h1:wQHKEahhL6wmXdzwWG11gIVCkOv05bNOh+Rxn0yngAk= -github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1 h1:QbL/5oDUmRBzO9/Z7Seo6zf912W/a6Sr4Eu0G/3Jho0= -github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4 h1:WtGNWLvXpe6ZudgnXrq0barxBImvnnJoMEhXAzcbM0I= -github.com/go-kit/kit v0.9.0 h1:wDJmvq38kDhkVxi50ni9ykkdUr1PKgqKOoi01fa0Mdk= -github.com/go-logfmt/logfmt v0.4.0 h1:MP4Eh7ZCb31lleYCFuwm0oe4/YGak+5l1vA2NOE80nA= -github.com/go-stack/stack v1.8.0 h1:5SgMzNM5HxrEjV0ww2lTmX6E2Izsfxas4+YHWRs3Lsk= -github.com/godbus/dbus/v5 v5.0.4 h1:9349emZab16e7zQvpmsbtjc18ykshndd8y2PG3sgJbA= -github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= -github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b h1:VKtxabqXZkF25pY9ekfRL6a582T4P37/31XEstQ5p58= -github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE= -github.com/golang/mock v1.6.0 h1:ErTB+efbowRARo13NNdxyJji2egdxLGQhRaY+DUumQc= -github.com/golang/snappy v0.0.3 h1:fHPg5GQYlCeLIPB9BZqMVR5nR9A+IM5zcgeTdjMYmLA= -github.com/google/btree v1.0.0 h1:0udJVsspx3VBr5FwtLhQQtuAsVc79tTq0ocGIPAU6qo= -github.com/google/gofuzz v1.0.0 h1:A8PeW59pxE9IoFRqBp37U+mSNaQoZ46F1f0f863XSXw= -github.com/google/martian v2.1.0+incompatible h1:/CP5g8u/VJHijgedC/Legn3BAbAaWPgecwXBIDzw5no= -github.com/google/martian/v3 v3.2.1 h1:d8MncMlErDFTwQGBK1xhv026j9kqhvw1Qv9IbWT1VLQ= -github.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1 h1:K6RDEckDVWvDI9JAJYCmNdQXq6neHJOYx3V6jnqNEec= -github.com/google/renameio v0.1.0 h1:GOZbcHa3HfsPKPlmyPyN2KEohoMXOhdMbHrvbpl2QaA= -github.com/googleapis/gax-go/v2 v2.1.1 h1:dp3bWCh+PPO1zjRRiCSczJav13sBvG4UhNyVTa1KqdU= -github.com/grpc-ecosystem/grpc-gateway v1.16.0 h1:gmcG1KaJ57LophUzW0Hy8NmPhnMZb4M0+kPpLofRdBo= -github.com/hashicorp/consul/api v1.11.0 h1:Hw/G8TtRvOElqxVIhBzXciiSTbapq8hZ2XKZsXk5ZCE= -github.com/hashicorp/consul/sdk v0.8.0 h1:OJtKBtEjboEZvG6AOUdh4Z1Zbyu0WcxQ0qatRrZHTVU= -github.com/hashicorp/go-immutable-radix v1.3.1 h1:DKHmCUm2hRBK510BaiZlwvpD40f8bJFeZnpfm2KLowc= -github.com/hashicorp/go-msgpack v0.5.3 h1:zKjpN5BK/P5lMYrLmBHdBULWbJ0XpYR+7NGzqkZzoD4= -github.com/hashicorp/go-rootcerts v1.0.2 h1:jzhAVGtqPKbwpyCPELlgNWhE1znq+qwJtW5Oi2viEzc= -github.com/hashicorp/go-sockaddr v1.0.0 h1:GeH6tui99pF4NJgfnhp+L6+FfobzVW3Ah46sLo0ICXs= -github.com/hashicorp/go-syslog v1.0.0 h1:KaodqZuhUoZereWVIYmpUgZysurB1kBLX2j0MwMrUAE= -github.com/hashicorp/go-uuid v1.0.1 h1:fv1ep09latC32wFoVwnqcnKJGnMSdBanPczbHAYm1BE= -github.com/hashicorp/golang-lru v0.5.4 h1:YDjusn29QI/Das2iO9M0BHnIbxPeyuCHsjMW+lJfyTc= -github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= -github.com/hashicorp/logutils v1.0.0 h1:dLEQVugN8vlakKOUE3ihGLTZJRB4j+M2cdTm/ORI65Y= -github.com/hashicorp/mdns v1.0.4 h1:sY0CMhFmjIPDMlTB+HfymFHCaYLhgifZ0QhjaYKD/UQ= -github.com/hashicorp/memberlist v0.3.0 h1:8+567mCcFDnS5ADl7lrpxPMWiFCElyUEeW0gtj34fMA= -github.com/hashicorp/serf v0.9.6 h1:uuEX1kLR6aoda1TBttmJQKDLZE1Ob7KN0NPdE7EtCDc= -github.com/hinshun/vt10x v0.0.0-20220119200601-820417d04eec h1:qv2VnGeEQHchGaZ/u7lxST/RaJw+cv273q79D81Xbog= -github.com/iancoleman/strcase v0.2.0 h1:05I4QRnGpI0m37iZQRuskXh+w77mr6Z41lwQzuHLwW0= -github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639 h1:mV02weKRL81bEnm8A0HT1/CAelMQDBuQIfLw8n+d6xI= -github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= -github.com/jstemmer/go-junit-report v0.9.1 h1:6QPYqodiu3GuPL+7mfx+NwDdp2eTkp9IfEUpgAwUN0o= -github.com/julienschmidt/httprouter v1.2.0 h1:TDTW5Yz1mjftljbcKqRcrYhd4XeOoI98t+9HbQbYf7g= -github.com/k0kubun/go-ansi v0.0.0-20180517002512-3bf9e2903213 h1:qGQQKEcAR99REcMpsXCp3lJ03zYT1PkRd3kQGPn9GVg= -github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 h1:Z9n2FFNUXsshfwJMBgNA0RU6/i7WVaAegv3PtuIHPMs= -github.com/kisielk/errcheck v1.5.0 h1:e8esj/e4R+SAOwFwN+n3zr0nYeCyeweozKfO23MvHzY= -github.com/kisielk/gotool v1.0.0 h1:AV2c/EiW3KqPNT9ZKl07ehoAGi4C5/01Cfbblndcapg= -github.com/konsorten/go-windows-terminal-sequences v1.0.1 h1:mweAR1A6xJ3oS2pRaGiHgQ4OO8tzTaLawm8vnODuwDk= -github.com/kr/fs v0.1.0 h1:Jskdu9ieNAYnjxsi0LbQp1ulIKZV1LAFgK1tWhpZgl8= -github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515 h1:T+h1c/A9Gawja4Y9mFVWj2vyii2bbUNDw3kt9VxK2EY= -github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk= -github.com/kr/pty v1.1.1 h1:VkoXIwSboBpnk99O/KFauAEILuNHv5DVFKZMBN/gUgw= -github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= -github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= -github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= -github.com/lyft/protoc-gen-star v0.5.3 h1:zSGLzsUew8RT+ZKPHc3jnf8XLaVyHzTcAFBzHtCNR20= -github.com/magiconair/properties v1.8.5 h1:b6kJs+EmPFMYGkow9GiUyCyOvIwYetYJ3fSaWak/Gls= -github.com/matttproud/golang_protobuf_extensions v1.0.1 h1:4hp9jkHxhMHkqkrB3Ix0jegS5sx/RkqARlsWZ6pIwiU= -github.com/mgutz/ansi v0.0.0-20170206155736-9520e82c474b h1:j7+1HpAFS1zy5+Q4qx1fWh90gTKwiN4QCGoY9TWyyO4= -github.com/miekg/dns v1.1.41 h1:WMszZWJG0XmzbK9FEmzH2TVcqYzFesusSIB41b8KHxY= -github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= -github.com/mitchellh/go-testing-interface v1.0.0 h1:fzU/JVNcaqHQEcVFAKeR41fkiLdIPrefOvVG1VZ96U0= -github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= -github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= -github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223 h1:F9x/1yl3T2AeKLr2AMdilSD8+f9bvMnNN8VS5iDtovc= -github.com/opentracing/opentracing-go v1.1.0 h1:pWlfV3Bxv7k65HYwkikxat0+s3pV4bsqf19k25Ur8rU= -github.com/pascaldekloe/goe v0.1.0 h1:cBOtyMzM9HTpWjXfbbunk26uA6nG3a8n06Wieeh0MwY= -github.com/pelletier/go-toml v1.9.4 h1:tjENF6MfZAg8e4ZmZTeWaWiT2vXtsoO6+iuOjFhECwM= -github.com/pkg/sftp v1.10.1 h1:VasscCm72135zRysgrJDKsntdmPN+OuU3+nnHYA9wyc= -github.com/prometheus/client_golang v1.4.0 h1:YVIb/fVcOTMSqtqZWSKnHpSLBxu8DKgxq8z6RuBZwqI= -github.com/prometheus/client_model v0.2.0 h1:uq5h0d+GuxiXLJLNABMgp2qUWDPiLvgCzz2dUR+/W/M= -github.com/prometheus/common v0.9.1 h1:KOMtN28tlbam3/7ZKEYKHhKoJZYYj3gMH4uc62x7X7U= -github.com/prometheus/procfs v0.0.8 h1:+fpWZdT24pJBiqJdAwYBjPSk+5YmQzYNPYzQsdzLkt8= -github.com/rogpeppe/fastuuid v1.2.0 h1:Ppwyp6VYCF1nvBTXL3trRso7mXMlRrw9ooo375wvi2s= -github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= -github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk= -github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f h1:UFr9zpz4xgTnIE5yIMtWAMngCdZ9p/+q6lTbgelo80M= -github.com/sagikazarmark/crypt v0.3.0 h1:TV5DVog+pihN4Rr0rN1IClv4ePpkzdg9sPrw7WDofZ8= -github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529 h1:nn5Wsu0esKSJiIVhscUtVbo7ada43DJhG55ua/hjS5I= -github.com/sirupsen/logrus v1.4.2 h1:SPIRibHv4MatM3XXNO2BJeFLZwZ2LvZgfQ5+UNI2im4= -github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72 h1:qLC7fQah7D6K1B0ujays3HV9gkFtllcxhzImRR7ArPQ= -github.com/spf13/afero v1.6.0 h1:xoax2sJ2DT8S8xA2paPFjDCScCNeWsg75VG0DLRreiY= -github.com/spf13/jwalterweatherman v1.1.0 h1:ue6voC5bR5F8YxI5S67j9i582FU4Qvo2bmqnqMYADFk= -github.com/spf13/viper v1.10.0 h1:mXH0UwHS4D2HwWZa75im4xIQynLfblmWV7qcWpfv0yk= -github.com/stretchr/objx v0.5.0 h1:1zr/of2m5FGMsad5YfcqgdqdWrIhu+EBEJRhR1U7z/c= -github.com/subosito/gotenv v1.2.0 h1:Slr1R9HxAlEKefgq5jn9U+DnETlIUa6HfgEzj0g5d7s= -github.com/tj/assert v0.0.3 h1:Df/BlaZ20mq6kuai7f5z2TvPFiwC3xaWJSDQNiIS3Rk= -github.com/tj/assert v0.0.3/go.mod h1:Ne6X72Q+TB1AteidzQncjw9PabbMp4PBMZ1k+vd1Pvk= -github.com/tv42/httpunix v0.0.0-20150427012821-b75d8614f926 h1:G3dpKMzFDjgEh2q1Z7zUUtKa8ViPtH+ocF0bE0g00O8= -github.com/yuin/goldmark v1.4.13 h1:fVcFKWvrslecOb/tg+Cc05dkeYx540o0FuFt3nUVDoE= -go.etcd.io/etcd/api/v3 v3.5.1 h1:v28cktvBq+7vGyJXF8G+rWJmj+1XUmMtqcLnH8hDocM= -go.etcd.io/etcd/client/pkg/v3 v3.5.1 h1:XIQcHCFSG53bJETYeRJtIxdLv2EWRGxcfzR8lSnTH4E= -go.etcd.io/etcd/client/v2 v2.305.1 h1:vtxYCKWA9x31w0WJj7DdqsHFNjhkigdAnziDtkZb/l4= -go.opencensus.io v0.23.0 h1:gqCw0LfLxScz8irSi8exQc7fyQ0fKQU/qnC/X8+V/1M= -go.opentelemetry.io/proto/otlp v0.7.0 h1:rwOQPCuKAKmwGKq2aVNnYIibI6wnV7EvzgfTCzcdGg8= -go.uber.org/atomic v1.7.0 h1:ADUqmZGgLDDfbSL9ZmPxKTybcoEYHgpYfELNoN+7hsw= -go.uber.org/multierr v1.6.0 h1:y6IPFStTAIT5Ytl7/XYmHvzXQ7S3g/IeZW9hyZ5thw4= -go.uber.org/zap v1.17.0 h1:MTjgFu6ZLKvY6Pvaqk97GlxNBuMpV4Hy/3P6tRGlI2U= -golang.org/x/exp v0.0.0-20230321023759-10a507213a29 h1:ooxPy7fPvB4kwsA2h+iBNHkAbp/4JxTSwCmvdjEYmug= -golang.org/x/exp v0.0.0-20230321023759-10a507213a29/go.mod h1:CxIveKay+FTh1D0yPZemJVgC/95VzuuOLq5Qi4xnoYc= -golang.org/x/image v0.0.0-20190802002840-cff245a6509b h1:+qEpEAPhDZ1o0x3tHzZTQDArnOixOzGD9HUJfcg0mb4= -golang.org/x/lint v0.0.0-20210508222113-6edffad5e616 h1:VLliZ0d+/avPrXXH+OakdXhpJuEoBZuwh1m2j7U6Iug= -golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028 h1:4+4C/Iv2U4fMZBiMCc98MG1In4gJY5YRhtpDNeDeHWs= -golang.org/x/mod v0.7.0 h1:LapD9S96VoQRhi/GrNTqeBJFrUjs5UHCAtTlgwA5oZA= -golang.org/x/mod v0.8.0 h1:LUYupSeNrTNCGzR/hVBk2NHZO4hXcVaW1k4Qx7rjPx8= -golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= -golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= -golang.org/x/oauth2 v0.0.0-20211104180415-d3ed0bb246c8 h1:RerP+noqYHUQ8CMRcPlC2nvTa4dcBIjegkuWdcUDuqg= -golang.org/x/time v0.0.0-20191024005414-555d28b269f0 h1:/5xXl8Y5W96D+TtHSlonuFqGHIWVuyCkGJLwGh9JJFs= -golang.org/x/tools v0.4.0 h1:7mTAgkunk3fr4GAloyyCasadO6h9zSsQZbwvcaIciV4= -golang.org/x/tools v0.6.0 h1:BOw41kyTf3PuCW1pVQf8+Cyg8pMlkYB1oo9iJ6D/lKM= -golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= -golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= -google.golang.org/api v0.62.0 h1:PhGymJMXfGBzc4lBRmrx9+1w4w2wEzURHNGF/sD/xGc= -google.golang.org/appengine v1.6.7 h1:FZR1q0exgwxzPzp/aF+VccGrSfxfPpkBqjIIEq3ru6c= -google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0 h1:M1YKkFIboKNieVO5DLUEVzQfGwJD30Nv2jfUgzb5UcE= -gopkg.in/alecthomas/kingpin.v2 v2.2.6 h1:jMFz6MfLP0/4fUyZle81rXUoxOBFi19VUFKVDOQfozc= -gopkg.in/errgo.v2 v2.1.0 h1:0vLT13EuvQ0hNvakwLuFZ/jYrLp5F3kcWHXdRggjCE8= -gopkg.in/ini.v1 v1.66.2 h1:XfR1dOYubytKy4Shzc2LHrrGhU0lDCfDGG1yLPmpgsI= -honnef.co/go/tools v0.0.1-2020.1.4 h1:UoveltGrhghAA7ePc+e+QYDHXrBps2PqFZiHkGR/xK8= -rsc.io/binaryregexp v0.2.0 h1:HfqmD5MEmC0zvwBuF187nq9mdnXjXsSivRiXN7SmRkE= -rsc.io/quote/v3 v3.1.0 h1:9JKUTTIUgS6kzR9mK1YuGKv6Nl+DijDNIc0ghT58FaY= -rsc.io/sampler v1.3.0 h1:7uVkIFmeBqHfdjD+gZwtXXI+RODJ2Wc4O7MPEh/QiW4= diff --git a/package.json b/package.json index 3b0cb4f5163ec..8635edb06aff7 100644 --- a/package.json +++ b/package.json @@ -42,9 +42,6 @@ "*.{md,mdx,mjs,yml,yaml,css}": [ "prettier --write" ], - "*.go": [ - "pnpm --filter cli format" - ], "*.toml": [ "taplo format" ], diff --git a/turborepo-tests/integration-go/package.json b/turborepo-tests/integration-go/package.json deleted file mode 100644 index 1e67c85718ec2..0000000000000 --- a/turborepo-tests/integration-go/package.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "name": "turborepo-tests-integration-go", - "scripts": { - "test:go-fallback": "cross-env EXPERIMENTAL_RUST_CODEPATH=false ./node_modules/.bin/prysk tests" - }, - "dependencies": { - "turborepo-tests-helpers": "workspace:*", - "@turbo/exe-stub": "workspace:*", - "prysk": "workspace:*", - "turborepo-tests-integration-common": "workspace:*" - }, - "devDependencies": { - "cross-env": "^7.0.3" - } -} diff --git a/turborepo-tests/integration-go/tests/stub.t b/turborepo-tests/integration-go/tests/stub.t deleted file mode 100644 index e69de29bb2d1d..0000000000000 diff --git a/turborepo-tests/integration-go/turbo.json b/turborepo-tests/integration-go/turbo.json deleted file mode 100644 index bc3323beed890..0000000000000 --- a/turborepo-tests/integration-go/turbo.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "extends": ["//"], - "pipeline": { - // This is a synthetic tasks that lets us pull in other workspaces as dependencies - // So changes in internal workspaces that we depend on, will trigger this task. - "topo": { - "dependsOn": ["^topo"] - }, - "test": { - "dependsOn": [ - "cli#build", - "topo", - "^build", - "turborepo-tests-integration-common#test:go-fallback" - ], - "passThroughEnv": ["CI"] - } - } -} diff --git a/turborepo-tests/integration/tests/no-args.t b/turborepo-tests/integration/tests/no-args.t index 18e70d6410aba..1466e573728e4 100644 --- a/turborepo-tests/integration/tests/no-args.t +++ b/turborepo-tests/integration/tests/no-args.t @@ -50,8 +50,6 @@ Make sure exit code is 2 when no args are passed Continue execution even if a task exits with an error or non-zero exit code. The default behavior is to bail --dry-run [] [possible values: text, json] - --go-fallback - Fallback to use Go for task execution --single-package Run turbo in single-package mode --force [] diff --git a/turborepo-tests/integration/tests/turbo-help.t b/turborepo-tests/integration/tests/turbo-help.t index 37950fcfe4417..b3acc2250e819 100644 --- a/turborepo-tests/integration/tests/turbo-help.t +++ b/turborepo-tests/integration/tests/turbo-help.t @@ -50,8 +50,6 @@ Test help flag Continue execution even if a task exits with an error or non-zero exit code. The default behavior is to bail --dry-run [] [possible values: text, json] - --go-fallback - Fallback to use Go for task execution --single-package Run turbo in single-package mode --force [] @@ -154,8 +152,6 @@ Test help flag Continue execution even if a task exits with an error or non-zero exit code. The default behavior is to bail --dry-run [] [possible values: text, json] - --go-fallback - Fallback to use Go for task execution --single-package Run turbo in single-package mode --force []