diff --git a/.circleci/config.yml b/.circleci/config.yml index 8f552060aa..e9af0aa3c7 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -9,7 +9,7 @@ jobs: # that flag starts the download asynchronously so we'd have a race # condition. # renovate: datasource=github-releases depName=hashicorp/terraform versioning=hashicorp - TERRAFORM_VERSION: 1.7.5 + TERRAFORM_VERSION: 1.8.3 steps: - checkout - run: make build-service diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 0000000000..3f90f6406f --- /dev/null +++ b/.editorconfig @@ -0,0 +1,12 @@ +root = true + +[*] +charset = utf-8 +end_of_line = lf +trim_trailing_whitespace = true +insert_final_newline = true + +[*.md] +indent_style = space +indent_size = 3 +trim_trailing_whitespace = false diff --git a/.github/labeler.yml b/.github/labeler.yml index 7d6cf75daf..bbd0021dde 100644 --- a/.github/labeler.yml +++ b/.github/labeler.yml @@ -40,6 +40,6 @@ provider/gitlab: website: - changed-files: - - any-glob-to-any-file: 'runatlantis.io/.vuepress/**/*' + - any-glob-to-any-file: 'runatlantis.io/.vitepress/**/*' - any-glob-to-any-file: 'package.json' - any-glob-to-any-file: 'pnpm-lock.yaml' diff --git a/.github/renovate.json5 b/.github/renovate.json5 index e520313b2a..a03bfba879 100644 --- a/.github/renovate.json5 +++ b/.github/renovate.json5 @@ -1,108 +1,116 @@ { extends: [ - "config:base", - "schedule:daily", + 'config:best-practices', + ':separateMultipleMajorReleases', + 'schedule:daily', ], - commitMessageSuffix: " in {{packageFile}}", + commitMessageSuffix: ' in {{packageFile}}', dependencyDashboardAutoclose: true, automerge: true, - baseBranches: ["main", "/^release\-.*/"], + baseBranches: [ + 'main', + '/^release-.*/', + ], platformAutomerge: true, - labels: ["dependencies"], + labels: [ + 'dependencies', + ], postUpdateOptions: [ - "gomodTidy", - "gomodUpdateImportPaths", - "pnpmDedupe", + 'gomodTidy', + 'gomodUpdateImportPaths', + 'pnpmDedupe', ], - // needed so e2e tests do not stomp over each other prHourlyLimit: 1, - lockFileMaintenance: { - enabled: true, - }, + osvVulnerabilityAlerts: true, vulnerabilityAlerts: { enabled: true, labels: [ - "security", + 'security', ], }, packageRules: [ - // For vuepress { - "matchPackageNames": ["vuepress", "@vuepress/client", "@vuepress/markdown", "@vuepress/utils"], - "groupName": "vuepress", - "allowedVersions": "!/pre.*$/", + "matchFileNames": ["package.json"], + "enabled": false }, - // e2e test depends on testing/Dockefile testing-image which has conftest specific version. - // to upgrade conftest versions, we need following PRs. - // 1. update testing/Dockerfile conftest version - // 2. update testing-env tag - // 3. update e2e conftest version - // This will allow conftest version updates in testing/Dockefile { - matchPaths: ["testing/**"], - matchPackagePatterns: ["conftest"], - additionalBranchPrefix: "{{baseDir}}-", - groupName: "conftest-testing", - /* - prBodyNotes: [ - ":warning: Upgrade testing-env conftest and then upgrade other conftest versions for e2e :warning:", + matchFileNames: [ + 'testing/**', + ], + matchPackagePatterns: [ + 'conftest', ], - */ + additionalBranchPrefix: '{{packageFileDir}}-', + groupName: 'conftest-testing', }, { - ignorePaths: ["testing/**"], - matchPackagePatterns: ["github-actions"], - groupName: "github-", + ignorePaths: [ + 'testing/**', + ], + matchPackagePatterns: [ + 'github-actions', + ], + groupName: 'github-', }, - /* - // This tag is currently latest so we can skip this check for now unless we need to pin it again. { - // we need to upgrade testing-env on ci quickly - matchPackageNames: ["ghcr.io/runatlantis/testing-env"], - groupName: "testing-env-ci-test", - schedule: ["every 1 hour after 00:00 and before 23:59 every day"], + matchDatasources: [ + 'docker', + ], + matchPackageNames: [ + 'node', + 'cimg/node', + ], + versioning: 'node', }, - */ { - // use LTS node version for node docker image - matchDatasources: ["docker"], - matchPackageNames: ["node", "cimg/node"], - versioning: "node", + matchPackageNames: [ + 'go', + 'golang', + ], + versioning: 'go', + groupName: 'go' }, ], - // https://docs.renovatebot.com/modules/manager/regex/ - regexManagers: [ + customManagers: [ { - fileMatch: ["(^|/)Dockerfile$", "(^|/)Dockerfile\\.[^/]*$"], + customType: 'regex', + fileMatch: [ + '(^|/)Dockerfile$', + '(^|/)Dockerfile\\.[^/]*$', + ], matchStrings: [ // example: - // renovate: datasource=github-releases depName=hashicorp/terraform versioning=hashicorp + // # renovate: datasource=github-releases depName=hashicorp/terraform versioning=hashicorp // ENV DEFAULT_TERRAFORM_VERSION=x.x.x - "renovate: datasource=(?.*?) depName=(?.*?)( versioning=(?.*?))?\\sENV .*?_VERSION=(?.*)\\s", + // # renovate: datasource=github-releases depName=open-policy-agent/conftest + // ARG DEFAULT_CONFTEST_VERSION=x.x.x + "renovate: datasource=(?.*?) depName=(?.*?)( versioning=(?.*?))?\\s(ARG|ENV) .*?_VERSION=(?.*)\\s", ], - versioningTemplate: "{{#if versioning}}{{{versioning}}}{{else}}semver{{/if}}", + versioningTemplate: '{{#if versioning}}{{{versioning}}}{{else}}semver{{/if}}', extractVersionTemplate: '^v(?\\d+\\.\\d+\\.\\d+)', }, { - fileMatch: [".*go$"], + customType: 'regex', + fileMatch: [ + '.*go$', + ], matchStrings: [ - // example: - // const ConftestVersion = "x.x.x" // renovate: datasource=github-releases depName=open-policy-agent/conftest - "\\sconst .*Version = \"(?.*)\"\\s// renovate: datasource=(?.*?) depName=(?.*?)( versioning=(?.*?))?\\s", + '\\sconst .*Version = "(?.*)"\\s// renovate: datasource=(?.*?) depName=(?.*?)( versioning=(?.*?))?\\s', ], - versioningTemplate: "{{#if versioning}}{{{versioning}}}{{else}}semver{{/if}}", + versioningTemplate: '{{#if versioning}}{{{versioning}}}{{else}}semver{{/if}}', extractVersionTemplate: '^v(?\\d+\\.\\d+\\.\\d+)', }, { - fileMatch: [".circleci/config.yml$"], + customType: 'regex', + fileMatch: [ + '.circleci/config.yml$', + '^\\.github/workflows/[^/]+\\.ya?ml$', + ], matchStrings: [ - // example: - // # renovate: datasource=github-releases depName=hashicorp/terraform versioning=hashicorp - // TRRAFORM_VERSION: x.x.x - "renovate: datasource=(?.*?) depName=(?.*?)( versioning=(?.*?))?\\s.*?_VERSION: (?.*)\\s", + 'renovate: datasource=(?.*?) depName=(?.*?)( versioning=(?.*?))?\\s.*?_VERSION: (?.*)\\s', ], - versioningTemplate: "{{#if versioning}}{{{versioning}}}{{else}}semver{{/if}}", + versioningTemplate: '{{#if versioning}}{{{versioning}}}{{else}}semver{{/if}}', extractVersionTemplate: '^v(?\\d+\\.\\d+\\.\\d+)', }, - ] + ], } diff --git a/.github/workflows/atlantis-image.yml b/.github/workflows/atlantis-image.yml index 157a5c1bb7..b37e678a94 100644 --- a/.github/workflows/atlantis-image.yml +++ b/.github/workflows/atlantis-image.yml @@ -12,6 +12,11 @@ on: branches: - 'main' - 'release-**' + types: + - opened + - reopened + - synchronize + - ready_for_review workflow_dispatch: concurrency: @@ -25,8 +30,8 @@ jobs: if: github.event.pull_request.draft == false runs-on: ubuntu-22.04 steps: - - uses: actions/checkout@v4 - - uses: dorny/paths-filter@v3 + - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 + - uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3 id: changes with: filters: | @@ -52,22 +57,22 @@ jobs: PUSH: ${{ github.event_name != 'pull_request' && (github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/tags/')) }} steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 # Lint the Dockerfile first before setting anything up - name: Lint Dockerfile - uses: hadolint/hadolint-action@v3.1.0 + uses: hadolint/hadolint-action@54c9adbab1582c2ef04b2016b760714a4bfde3cf # v3.1.0 with: dockerfile: "Dockerfile" - name: Set up QEMU - uses: docker/setup-qemu-action@v3 + uses: docker/setup-qemu-action@68827325e0b33c7199eb31dd4e31fbe9023e06e3 # v3 with: image: tonistiigi/binfmt:latest platforms: arm64,arm - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3 + uses: docker/setup-buildx-action@d70bba72b1f3fd22344832f00baa16ece964efeb # v3 # https://github.com/docker/build-push-action/issues/761#issuecomment-1575006515 with: driver-opts: | @@ -81,7 +86,7 @@ jobs: # if it's v0.10.0 and debian, it will do v0.10.0-debian, latest-debian - name: Docker meta id: meta - uses: docker/metadata-action@v5 + uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81 # v5 env: SUFFIX: ${{ format('-{0}', matrix.image_type) }} with: @@ -113,7 +118,7 @@ jobs: # Suffix is not used here since there's no way to disable it above - name: Login to Packages Container registry - uses: docker/login-action@v3 + uses: docker/login-action@e92390c5fb421da1463c202d546fed0ec5c39f20 # v3 with: registry: ghcr.io username: ${{ github.actor }} @@ -126,7 +131,7 @@ jobs: - name: "Build ${{ env.PUSH == 'true' && 'and push' || '' }} ${{ env.DOCKER_REPO }} image" if: contains(fromJson('["push", "pull_request"]'), github.event_name) - uses: docker/build-push-action@v5 + uses: docker/build-push-action@2cdde995de11925a030ce8070c3d77a52ffcf1c0 # v5 with: cache-from: type=gha cache-to: type=gha,mode=max diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 8be3df50f9..2d99f0907d 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -37,8 +37,8 @@ jobs: if: github.event.pull_request.draft == false runs-on: ubuntu-22.04 steps: - - uses: actions/checkout@v4 - - uses: dorny/paths-filter@v3 + - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 + - uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3 id: changes with: filters: | @@ -67,11 +67,11 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v4 + uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL - uses: github/codeql-action/init@v3 + uses: github/codeql-action/init@b7cec7526559c32f1616476ff32d17ba4c59b2d6 # v3 with: languages: ${{ matrix.language }} # If you wish to specify custom queries, you can do so here or in a config file. @@ -85,7 +85,7 @@ jobs: # Autobuild attempts to build any compiled languages (C/C++, C#, Go, or Java). # If this step fails, then you should remove it and run the build manually (see below) - name: Autobuild - uses: github/codeql-action/autobuild@v3 + uses: github/codeql-action/autobuild@b7cec7526559c32f1616476ff32d17ba4c59b2d6 # v3 # ℹī¸ Command-line programs to run using the OS shell. # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun @@ -98,7 +98,7 @@ jobs: # ./location_of_script_within_repo/buildscript.sh - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v3 + uses: github/codeql-action/analyze@b7cec7526559c32f1616476ff32d17ba4c59b2d6 # v3 with: category: "/language:${{matrix.language}}" diff --git a/.github/workflows/labeler.yml b/.github/workflows/labeler.yml index aed089def0..40752d22e5 100644 --- a/.github/workflows/labeler.yml +++ b/.github/workflows/labeler.yml @@ -16,4 +16,4 @@ jobs: if: github.event.pull_request.draft == false runs-on: ubuntu-22.04 steps: - - uses: actions/labeler@v5 + - uses: actions/labeler@8558fd74291d67161a8a78ce36a881fa63b766a9 # v5 diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 02ab6f7365..22f930d141 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -22,8 +22,8 @@ jobs: if: github.event.pull_request.draft == false runs-on: ubuntu-22.04 steps: - - uses: actions/checkout@v4 - - uses: dorny/paths-filter@v3 + - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 + - uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3 id: changes with: filters: | @@ -39,15 +39,15 @@ jobs: name: Linting runs-on: ubuntu-22.04 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 # need to setup go toolchain explicitly - - uses: actions/setup-go@v5 + - uses: actions/setup-go@cdcb36043654635271a94b9a6d1392de5bb323a7 # v5 with: go-version-file: go.mod - name: golangci-lint - uses: reviewdog/action-golangci-lint@v2 + uses: reviewdog/action-golangci-lint@00311c26a97213f93f2fd3a3524d66762e956ae0 # v2 with: tool_name: golangci-lint diff --git a/.github/workflows/pr-lint.yml b/.github/workflows/pr-lint.yml index e3a5b647ac..86eb4d291c 100644 --- a/.github/workflows/pr-lint.yml +++ b/.github/workflows/pr-lint.yml @@ -15,6 +15,6 @@ jobs: name: Validate PR title runs-on: ubuntu-22.04 steps: - - uses: amannn/action-semantic-pull-request@v5 + - uses: amannn/action-semantic-pull-request@e9fabac35e210fea40ca5b14c0da95a099eff26f # v5 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/pr-size-labeler.yml b/.github/workflows/pr-size-labeler.yml new file mode 100644 index 0000000000..8576c5c141 --- /dev/null +++ b/.github/workflows/pr-size-labeler.yml @@ -0,0 +1,28 @@ +name: pr-size + +on: [pull_request] + +jobs: + labeler: + runs-on: ubuntu-latest + name: Label the PR size + steps: + - uses: codelytv/pr-size-labeler@54ef36785e9f4cb5ecf1949cfc9b00dbb621d761 # v1 + with: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + xs_label: 'size/xs' + xs_max_size: '10' + s_label: 'size/s' + s_max_size: '200' + m_label: 'size/m' + m_max_size: '1000' + l_label: 'size/l' + l_max_size: '10000' + xl_label: 'size/xl' + fail_if_xl: 'false' + message_if_xl: > + This PR exceeds the recommended size of 1000 lines. + Please make sure you are NOT addressing multiple issues with one PR. + Note this PR might be rejected due to its size. + github_api_url: 'https://api.github.com' + files_to_ignore: '' diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 479e404f69..72b1473330 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -11,16 +11,16 @@ jobs: goreleaser: runs-on: ubuntu-22.04 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 with: submodules: true - - uses: actions/setup-go@v5 + - uses: actions/setup-go@cdcb36043654635271a94b9a6d1392de5bb323a7 # v5 with: go-version-file: go.mod - name: Run GoReleaser for stable release - uses: goreleaser/goreleaser-action@v5 + uses: goreleaser/goreleaser-action@5742e2a039330cbb23ebf35f046f814d4c6ff811 # v5 if: (!contains(github.ref, 'pre')) with: version: v1.16.2 @@ -43,7 +43,7 @@ jobs: GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}} - name: Run GoReleaser for pre-release - uses: goreleaser/goreleaser-action@v5 + uses: goreleaser/goreleaser-action@5742e2a039330cbb23ebf35f046f814d4c6ff811 # v5 if: contains(github.ref, 'pre') with: version: v1.16.2 diff --git a/.github/workflows/renovate-config.yml b/.github/workflows/renovate-config.yml index bb5258df99..ace4bb0609 100644 --- a/.github/workflows/renovate-config.yml +++ b/.github/workflows/renovate-config.yml @@ -16,6 +16,6 @@ jobs: validate: runs-on: ubuntu-22.04 steps: - - uses: actions/checkout@v4 - - uses: actions/setup-node@v4 + - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 + - uses: actions/setup-node@60edb5dd545a775178f52524783378180af0d1f8 # v4 - run: npx --package renovate -c 'renovate-config-validator' diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml index c989d76963..5911dea35d 100644 --- a/.github/workflows/stale.yml +++ b/.github/workflows/stale.yml @@ -6,7 +6,7 @@ jobs: stale: runs-on: ubuntu-22.04 steps: - - uses: actions/stale@v9 + - uses: actions/stale@28ca1036281a5e5922ead5184a1bbf96e5fc984e # v9 with: stale-pr-message: 'This issue is stale because it has been open for 1 month with no activity. Remove stale label or comment or this will be closed in 1 month.' stale-issue-message: This issue is stale because it has been open for 1 month with no activity. Remove stale label or comment or this will be closed in 1 month.' diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index fab30b3b31..b8f0fdc012 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -26,13 +26,14 @@ jobs: if: github.event.pull_request.draft == false runs-on: ubuntu-22.04 steps: - - uses: actions/checkout@v4 - - uses: dorny/paths-filter@v3 + - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 + - uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3 id: changes with: filters: | go: - '**.go' + - '**.txt' # golden file test output - 'go.*' - '.github/workflows/test.yml' test: @@ -40,17 +41,18 @@ jobs: if: needs.changes.outputs.should-run-tests == 'true' name: Tests runs-on: ubuntu-22.04 - container: ghcr.io/runatlantis/testing-env:latest + container: ghcr.io/runatlantis/testing-env:latest@sha256:346fd2028603d7c9369f709023ef993faf60a70ef4c91963f5baa7454196df32 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 # need to setup go toolchain explicitly - - uses: actions/setup-go@v5 + - uses: actions/setup-go@cdcb36043654635271a94b9a6d1392de5bb323a7 # v5 with: go-version-file: go.mod - run: make test-all - run: make check-fmt + ########################################################### # Notifying #contributors about test failure on main branch ########################################################### diff --git a/.github/workflows/testing-env-image.yml b/.github/workflows/testing-env-image.yml index 0cf8d5ecf2..a55b31c621 100644 --- a/.github/workflows/testing-env-image.yml +++ b/.github/workflows/testing-env-image.yml @@ -22,8 +22,8 @@ jobs: if: github.event.pull_request.draft == false runs-on: ubuntu-22.04 steps: - - uses: actions/checkout@v4 - - uses: dorny/paths-filter@v3 + - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 + - uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3 id: changes with: filters: | @@ -37,19 +37,19 @@ jobs: name: Build Testing Env Image runs-on: ubuntu-22.04 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 - name: Set up QEMU - uses: docker/setup-qemu-action@v3 + uses: docker/setup-qemu-action@68827325e0b33c7199eb31dd4e31fbe9023e06e3 # v3 with: image: tonistiigi/binfmt:latest platforms: arm64,arm - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3 + uses: docker/setup-buildx-action@d70bba72b1f3fd22344832f00baa16ece964efeb # v3 - name: Login to Packages Container registry - uses: docker/login-action@v3 + uses: docker/login-action@e92390c5fb421da1463c202d546fed0ec5c39f20 # v3 with: registry: ghcr.io username: ${{ github.actor }} @@ -57,7 +57,7 @@ jobs: - run: echo "TODAY=$(date +"%Y.%m.%d")" >> $GITHUB_ENV - name: Build and push testing-env:${{env.TODAY}} image - uses: docker/build-push-action@v5 + uses: docker/build-push-action@2cdde995de11925a030ce8070c3d77a52ffcf1c0 # v5 with: cache-from: type=gha cache-to: type=gha,mode=max diff --git a/.github/workflows/website.yml b/.github/workflows/website.yml index 8d58751deb..5d9493a9ee 100644 --- a/.github/workflows/website.yml +++ b/.github/workflows/website.yml @@ -26,8 +26,8 @@ jobs: if: github.event.pull_request.draft == false runs-on: ubuntu-22.04 steps: - - uses: actions/checkout@v4 - - uses: dorny/paths-filter@v3 + - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 + - uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3 id: changes with: filters: | @@ -46,9 +46,15 @@ jobs: name: Website Link Check runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 - - uses: wyvox/action-setup-pnpm@v3 + - name: markdown-lint + uses: DavidAnson/markdownlint-cli2-action@b4c9feab76d8025d1e83c653fa3990936df0e6c8 # v16 + with: + config: .markdownlint.yaml + globs: 'runatlantis.io/**/*.md' + + - uses: wyvox/action-setup-pnpm@6597ef5c1300fe08efa6bc75e6141f7153e2b4cc # v3 with: node-version: 20 @@ -65,7 +71,12 @@ jobs: pnpm website:build # start http-server for integration testing - npx http-server runatlantis.io/.vuepress/dist & + npx http-server runatlantis.io/.vitepress/dist & + + - name: Run Playwright E2E tests + run: | + pnpx playwright install --with-deps + pnpm run e2e - name: wait until server listened run: curl --retry-delay 1 --retry 30 --retry-all-error http://localhost:8080 @@ -76,6 +87,7 @@ jobs: -e 'https://medium.com/runatlantis' \ -e 'https://github\.com/runatlantis/atlantis/edit/main/.*' \ -e 'https://github.com/runatlantis/helm-charts#customization' \ + -e 'https://github.com/sethvargo/atlantis-on-gke/blob/master/terraform/tls.tf#L64-L84' \ -e 'https://confluence.atlassian.com/*' \ --header 'Accept-Encoding:deflate, gzip' \ --buffer-size 8192 \ diff --git a/.gitignore b/.gitignore index a3040a1ee5..3830273d47 100644 --- a/.gitignore +++ b/.gitignore @@ -8,7 +8,6 @@ output .cover .terraform/ node_modules/ -**/.vuepress/* helm/test-values.yaml *.swp golangci-lint @@ -27,3 +26,14 @@ tmp-CHANGELOG.md # IDE files *.code-workspace + +# draw.io backup files +*.bkp + +# VitePress build output & cache directory +**/.vitepress/cache +**/.vitepress/dist +**/.vitepress/config.ts.timestamp-* + +# playwright +test-results/ diff --git a/.markdownlint.yaml b/.markdownlint.yaml new file mode 100644 index 0000000000..efc157fb61 --- /dev/null +++ b/.markdownlint.yaml @@ -0,0 +1,33 @@ +# MD013/line-length +# +# We're not particular about line length, generally preferring longer +# lines, since tools like Grammarly and other writing assistance tools +# work best with "normal" lines not broken up arbitrary. +# +# https://github.com/DavidAnson/markdownlint/blob/main/doc/md013.md +MD013: false + +# MD033/no-inline-html +# +# We're fine with inline HTML, there are lots of valid VitePress features +# that depends on this. +# +# https://github.com/DavidAnson/markdownlint/blob/main/doc/md033.md +MD033: false + +# MD024/no-duplicate-heading +# +# VitePress do not follow GitHub heading styling, so duplicate headlines +# are fine as long as they are not siblings (aka same indention hierarchy) +# +# https://github.com/DavidAnson/markdownlint/blob/main/doc/md024.md +MD024: + siblings_only: true + +# MD051/link-fragments +# +# VitePress generate these differently that markdownlint expects, so disabling +# for now, and something to improve on later (cc @jippi) +# +# https://github.com/DavidAnson/markdownlint/blob/main/doc/md051.md +MD051: false diff --git a/.node-version b/.node-version index 2dbbe00e67..f203ab89b7 100644 --- a/.node-version +++ b/.node-version @@ -1 +1 @@ -20.11.1 +20.13.1 diff --git a/.tool-versions b/.tool-versions index e2db8c3dfb..f2f739fc02 100644 --- a/.tool-versions +++ b/.tool-versions @@ -1 +1 @@ -pnpm 8.15.5 +pnpm 9.1.1 diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 3bd4290095..f465fcc0bd 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,9 +1,24 @@ -# Topics -* [Reporting Issues](#reporting-issues) -* [Reporting Security Issues](#reporting-security-issues) -* [Updating The Website](#updating-the-website) -* [Developing](#developing) -* [Releasing](#creating-a-new-release) +# Contributing + +# Table of Contents +- [Reporting Issues](#reporting-issues) +- [Reporting Security Issues](#reporting-security-issues) +- [Updating The Website](#updating-the-website) +- [Developing](#developing) + - [Running Atlantis Locally](#running-atlantis-locally) + - [Running Atlantis With Local Changes](#running-atlantis-with-local-changes) + - [Rebuilding](#rebuilding) + - [Running Tests Locally](#running-tests-locally) + - [Running Tests In Docker](#running-tests-in-docker) + - [Calling Your Local Atlantis From GitHub](#calling-your-local-atlantis-from-github) + - [Code Style](#code-style) + - [Logging](#logging) + - [Errors](#errors) + - [Testing](#testing) + - [Mocks](#mocks) +- [Backporting Fixes](#backporting-fixes) + - [Manual Backporting Fixes](#manual-backporting-fixes) +- [Creating a New Release](#creating-a-new-release) # Reporting Issues * When reporting issues, please include the output of `atlantis version`. @@ -23,11 +38,11 @@ open your browser to http://localhost:8080. ## Running Atlantis Locally * Clone the repo from https://github.com/runatlantis/atlantis/ * Compile Atlantis: - ``` + ```sh go install ``` * Run Atlantis: - ``` + ```sh atlantis server --gh-user --gh-token --repo-allowlist --gh-webhook-secret --log-level debug ``` If you get an error like `command not found: atlantis`, ensure that `$GOPATH/bin` is in your `$PATH`. @@ -36,43 +51,46 @@ open your browser to http://localhost:8080. Docker compose is set up to start an atlantis container and ngrok container in the same network in order to expose the atlantis instance to the internet. In order to do this, create a file in the repository called `atlantis.env` and add the required env vars for the atlantis server configuration. e.g. -``` + +```sh +NGROK_AUTH=1234567890 + ATLANTIS_GH_APP_ID=123 ATLANTIS_GH_APP_KEY_FILE="/.ssh/somekey.pem" ATLANTIS_GH_WEBHOOK_SECRET=12345 ``` -Note: `~/.ssh` is mounted to allow for referencing any local ssh keys +Note: `~/.ssh` is mounted to allow for referencing any local ssh keys. Following this just run: -``` +```sh make build-service -docker-compose up +docker-compose up --detach +docker-compose logs --follow ``` ### Rebuilding - If the ngrok container is restarted, the url will change which is a hassle. Fortunately, when we make a code change, we can rebuild and restart the atlantis container easily without disrupting ngrok. e.g. -``` +```sh make build-service docker-compose up --detach --build ``` -## Running Tests Locally: - +## Running Tests Locally `make test`. If you want to run the integration tests that actually run real `terraform` commands, run `make test-all`. -## Running Tests In Docker: -``` +## Running Tests In Docker +```sh docker run --rm -v $(pwd):/go/src/github.com/runatlantis/atlantis -w /go/src/github.com/runatlantis/atlantis ghcr.io/runatlantis/testing-env:latest make test ``` Or to run the integration tests -``` + +```sh docker run --rm -v $(pwd):/go/src/github.com/runatlantis/atlantis -w /go/src/github.com/runatlantis/atlantis ghcr.io/runatlantis/testing-env:latest make test-all ``` @@ -80,18 +98,19 @@ docker run --rm -v $(pwd):/go/src/github.com/runatlantis/atlantis -w /go/src/git - Create a test terraform repository in your GitHub. - Create a personal access token for Atlantis. See [Create a GitHub token](https://github.com/runatlantis/atlantis/tree/main/runatlantis.io/docs/access-credentials.md#generating-an-access-token). - Start Atlantis in server mode using that token: -``` +```sh atlantis server --gh-user --gh-token --repo-allowlist --gh-webhook-secret --log-level debug ``` - Download ngrok from https://ngrok.com/download. This will enable you to expose Atlantis running on your laptop to the internet so GitHub can call it. - When you've downloaded and extracted ngrok, run it on port `4141`: -``` +```sh ngrok http 4141 ``` - Create a Webhook in your repo and use the `https` url that `ngrok` printed out after running `ngrok http 4141`. Be sure to append `/events` so your webhook url looks something like `https://efce3bcd.ngrok.io/events`. See [Add GitHub Webhook](https://github.com/runatlantis/atlantis/blob/main/runatlantis.io/docs/configuring-webhooks.md#configuring-webhooks). - Create a pull request and type `atlantis help`. You should see the request in the `ngrok` and Atlantis logs and you should also see Atlantis comment back. ## Code Style + ### Logging - `ctx.Log` should be available in most methods. If not, pass it down. - levels: @@ -161,12 +180,11 @@ go get github.com/petergtz/pegomock/... ``` # Backporting Fixes - Atlantis now uses a [cherry-pick-bot](https://github.com/googleapis/repo-automation-bots/tree/main/packages/cherry-pick-bot) from Google. The bot assists in maintaining changes across releases branches by easily cherry-picking changes via pull requests. Maintainers and Core Contributors can add a comment to a pull request: -``` +```sh /cherry-pick target-branch-name ``` @@ -175,7 +193,6 @@ target-branch-name is the branch to cherry-pick to. cherry-pick-bot will cherry- The bot will immediately try to cherry-pick a merged PR. On unmerged pull request, it will not do anything immediately, but wait until merge. You can comment multiple times on a PR for multiple release branches. ## Manual Backporting Fixes - The bot will fail to cherry-pick if the feature branches' git history is not linear (merge commits instead of rebase). In that case, you will need to manually cherry-pick the squashed merged commit from main to the release branch 1. Switch to the release branch intended for the fix. diff --git a/Dockerfile b/Dockerfile index 1102b06a16..b839a76197 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,19 +1,19 @@ -# syntax=docker/dockerfile:1 +# syntax=docker/dockerfile:1@sha256:a57df69d0ea827fb7266491f2813635de6f17269be881f696fbfdf2d83dda33e # what distro is the image being built for -ARG ALPINE_TAG=3.19.1 -ARG DEBIAN_TAG=12.5-slim -ARG GOLANG_VERSION=1.22.1 +ARG ALPINE_TAG=3.19.1@sha256:c5b1261d6d3e43071626931fc004f70149baeba2c8ec672bd4f27761f8e1ad6b +ARG DEBIAN_TAG=12.5-slim@sha256:804194b909ef23fb995d9412c9378fb3505fe2427b70f3cc425339e48a828fca +ARG GOLANG_TAG=1.22.1-alpine # renovate: datasource=github-releases depName=hashicorp/terraform versioning=hashicorp -ARG DEFAULT_TERRAFORM_VERSION=1.7.2 -# renovate: datasource=github-releases depName=hashicorp/terraform versioning=hashicorp -ARG DEFAULT_OPENTOFU_VERSION=1.6.2 +ARG DEFAULT_TERRAFORM_VERSION=1.8.3 +# renovate: datasource=github-releases depName=opentofu/opentofu versioning=hashicorp +ARG DEFAULT_OPENTOFU_VERSION=1.7.1 # renovate: datasource=github-releases depName=open-policy-agent/conftest -ARG DEFAULT_CONFTEST_VERSION=0.49.1 +ARG DEFAULT_CONFTEST_VERSION=0.52.0 # Stage 1: build artifact and download deps -FROM golang:${GOLANG_VERSION}-alpine AS builder +FROM golang:${GOLANG_TAG} AS builder ARG ATLANTIS_VERSION=dev ENV ATLANTIS_VERSION=${ATLANTIS_VERSION} @@ -122,7 +122,7 @@ RUN ./download-release.sh \ "terraform" \ "${TARGETPLATFORM}" \ "${DEFAULT_TERRAFORM_VERSION}" \ - "1.4.7 1.5.7 1.6.6 ${DEFAULT_TERRAFORM_VERSION}" \ + "1.5.7 1.6.6 1.7.5 ${DEFAULT_TERRAFORM_VERSION}" \ && ./download-release.sh \ "tofu" \ "${TARGETPLATFORM}" \ @@ -147,8 +147,8 @@ RUN addgroup atlantis && \ # copy atlantis binary COPY --from=builder /app/atlantis /usr/local/bin/atlantis # copy terraform binaries -COPY --from=deps /usr/local/bin/terraform* /usr/local/bin/ -COPY --from=deps /usr/local/bin/tofu* /usr/local/bin/ +COPY --from=deps /usr/local/bin/terraform/terraform* /usr/local/bin/ +COPY --from=deps /usr/local/bin/tofu/tofu* /usr/local/bin/ # copy dependencies COPY --from=deps /usr/local/bin/conftest /usr/local/bin/conftest COPY --from=deps /usr/bin/git-lfs /usr/bin/git-lfs @@ -187,8 +187,8 @@ RUN useradd --create-home --user-group --shell /bin/bash atlantis && \ # copy atlantis binary COPY --from=builder /app/atlantis /usr/local/bin/atlantis # copy terraform binaries -COPY --from=deps /usr/local/bin/terraform* /usr/local/bin/ -COPY --from=deps /usr/local/bin/tofu* /usr/local/bin/ +COPY --from=deps /usr/local/bin/terraform/terraform* /usr/local/bin/ +COPY --from=deps /usr/local/bin/tofu/tofu* /usr/local/bin/ # copy dependencies COPY --from=deps /usr/local/bin/conftest /usr/local/bin/conftest COPY --from=deps /usr/bin/git-lfs /usr/bin/git-lfs diff --git a/Dockerfile.dev b/Dockerfile.dev index f85a5555e2..7b399372c2 100644 --- a/Dockerfile.dev +++ b/Dockerfile.dev @@ -1,3 +1,3 @@ -FROM ghcr.io/runatlantis/atlantis:latest +FROM ghcr.io/runatlantis/atlantis:latest@sha256:5ad2e3fe752104a614374490d111ed1230f8ebf5409552d0ad3b716de356dc45 COPY atlantis /usr/local/bin/atlantis WORKDIR /atlantis/src diff --git a/Makefile b/Makefile index 1678ef588e..ee9e22589f 100644 --- a/Makefile +++ b/Makefile @@ -98,7 +98,7 @@ check-lint: ## Run linter in CI/CD. If running locally use 'lint' .PHONY: check-fmt check-fmt: ## Fail if not formatted - if [[ $$(goimports -l $$(find . -type f -name '*.go' ! -path "./vendor/*" ! -path "**/mocks/*")) ]]; then exit 1; fi + ./scripts/fmt.sh .PHONY: end-to-end-deps end-to-end-deps: ## Install e2e dependencies diff --git a/README.md b/README.md index e60bfba133..142d1799f5 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,7 @@ # Atlantis [![Latest Release](https://img.shields.io/github/release/runatlantis/atlantis.svg)](https://github.com/runatlantis/atlantis/releases/latest) -[![SuperDopeBadge](./runatlantis.io/.vuepress/public/hightower-super-dope.svg)](https://twitter.com/kelseyhightower/status/893260922222813184) +[![SuperDopeBadge](./runatlantis.io/.vitepress/public/hightower-super-dope.svg)](https://twitter.com/kelseyhightower/status/893260922222813184) [![Go Report Card](https://goreportcard.com/badge/github.com/runatlantis/atlantis)](https://goreportcard.com/report/github.com/runatlantis/atlantis) [![Go Reference](https://pkg.go.dev/badge/github.com/runatlantis/atlantis.svg)](https://pkg.go.dev/github.com/runatlantis/atlantis) [![codecov](https://codecov.io/gh/runatlantis/atlantis/branch/main/graph/badge.svg)](https://codecov.io/gh/runatlantis/atlantis) @@ -9,7 +9,7 @@ [![Slack](https://img.shields.io/badge/Join-Atlantis%20Community%20Slack-red)](https://join.slack.com/t/atlantis-community/shared_invite/zt-9xlxtxtc-CUSKB1ATt_sQy6um~LDPNw)

- Atlantis Logo

+ Atlantis Logo

Terraform Pull Request Automation

@@ -40,3 +40,7 @@ Runs `terraform plan`, `import`, `apply` remotely and comments back on the pull ## Stargazers over time [![Stargazers over time](https://starchart.cc/runatlantis/atlantis.svg)](https://starchart.cc/runatlantis/atlantis) + +## User Survey + +In April 2024, the Core Atlantis Team put together an anonymous survey for Atlantis users to help us understand the community needs and prioritize our roadmap. If you are a user of Atlantis, please take 5 minutes to fill it out diff --git a/docker-compose.yml b/docker-compose.yml index ab2b2f1cab..a4331c61a7 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,42 +1,41 @@ # Note: This file is only used for Atlantis local development -version: "3.8" services: - ngrok: - image: wernight/ngrok:latest - ports: - - 4040:4040 - environment: - # https://dashboard.ngrok.com/get-started/your-authtoken - # NGROK_AUTH: REPLACE-WITH-YOUR-TOKEN // set this in atlantis.env - NGROK_PROTOCOL: http - NGROK_PORT: atlantis:4141 - env_file: - - ./atlantis.env - depends_on: - - atlantis - redis: - image: redis:7.2-alpine - restart: always - ports: - - '6379:6379' - command: redis-server --save 20 1 --loglevel warning --requirepass test123 - volumes: - - redis:/data - atlantis: - depends_on: - - redis - build: - context: . - dockerfile: Dockerfile.dev - ports: - - 4141:4141 - volumes: - - ~/.ssh:/.ssh - - ./:/atlantis/src - # Contains the flags that atlantis uses in env var form - env_file: - - ./atlantis.env + ngrok: + image: wernight/ngrok:latest@sha256:d211f29ebcfe5f4e72df4fa8bdd9a667886e127d7fcb1be4a1af5ad83a8a1b77 + ports: + - 4040:4040 + environment: + # https://dashboard.ngrok.com/get-started/your-authtoken + # NGROK_AUTH: REPLACE-WITH-YOUR-TOKEN // set this in atlantis.env + NGROK_PROTOCOL: http + NGROK_PORT: atlantis:4141 + env_file: + - atlantis.env + depends_on: + - atlantis + redis: + image: redis:7.2-alpine@sha256:a40e29800d387e3cf9431902e1e7a362e4d819233d68ae39380532c3310091ac + restart: always + ports: + - 6379:6379 + command: redis-server --save 20 1 --loglevel warning --requirepass test123 + volumes: + - redis:/data + atlantis: + depends_on: + - redis + build: + context: . + dockerfile: Dockerfile.dev + ports: + - 4141:4141 + volumes: + - ${HOME}/.ssh:/.ssh:ro + - ${PWD}:/atlantis/src:ro + # Contains the flags that atlantis uses in env var form + env_file: + - atlantis.env volumes: - redis: - driver: local + redis: + driver: local diff --git a/go.mod b/go.mod index 1816de9dc9..eb29f982c4 100644 --- a/go.mod +++ b/go.mod @@ -6,7 +6,7 @@ require ( code.gitea.io/sdk/gitea v0.17.1 github.com/Masterminds/sprig/v3 v3.2.3 github.com/alicebob/miniredis/v2 v2.32.1 - github.com/bradleyfalzon/ghinstallation/v2 v2.9.0 + github.com/bradleyfalzon/ghinstallation/v2 v2.10.0 github.com/briandowns/spinner v1.23.0 github.com/cactus/go-statsd-client/v5 v5.1.0 github.com/go-ozzo/ozzo-validation v3.6.0+incompatible @@ -22,7 +22,8 @@ require ( github.com/hashicorp/go-multierror v1.1.1 github.com/hashicorp/go-version v1.6.0 github.com/hashicorp/golang-lru/v2 v2.0.7 - github.com/hashicorp/terraform-config-inspect v0.0.0-20231204233900-a34142ec2a72 + github.com/hashicorp/terraform-config-inspect v0.0.0-20240509232506-4708120f8f30 + github.com/jpillora/backoff v1.0.0 github.com/kr/pretty v0.3.1 github.com/mcdafydd/go-azuredevops v0.12.1 github.com/microcosm-cc/bluemonday v1.0.26 @@ -34,7 +35,7 @@ require ( github.com/pkg/errors v0.9.1 github.com/redis/go-redis/v9 v9.5.1 github.com/remeh/sizedwaitgroup v1.0.0 - github.com/shurcooL/githubv4 v0.0.0-20240120211514-18a1ae0e79dc + github.com/shurcooL/githubv4 v0.0.0-20240429030203-be2daab69064 github.com/slack-go/slack v0.12.5 github.com/spf13/cobra v1.8.0 github.com/spf13/pflag v1.0.5 @@ -42,11 +43,11 @@ require ( github.com/stretchr/testify v1.9.0 github.com/uber-go/tally/v4 v4.1.10 github.com/urfave/negroni/v3 v3.1.0 - github.com/warrensbox/terraform-switcher v0.1.1-0.20230206012955-d7dfd1b44605 - github.com/xanzy/go-gitlab v0.100.0 - go.etcd.io/bbolt v1.3.9 + github.com/warrensbox/terraform-switcher v0.1.1-0.20240413181427-4d66b260d90c + github.com/xanzy/go-gitlab v0.102.0 + go.etcd.io/bbolt v1.3.10 go.uber.org/zap v1.27.0 - golang.org/x/term v0.18.0 + golang.org/x/term v0.19.0 golang.org/x/text v0.14.0 gopkg.in/yaml.v3 v3.0.1 ) @@ -56,7 +57,7 @@ require ( github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 // indirect github.com/go-playground/locales v0.14.1 // indirect github.com/go-playground/universal-translator v0.18.1 // indirect - github.com/hashicorp/hcl/v2 v2.20.0 + github.com/hashicorp/hcl/v2 v2.20.1 github.com/inconshreveable/mousetrap v1.1.0 // indirect github.com/leodido/go-urn v1.4.0 // indirect github.com/shurcooL/graphql v0.0.0-20220606043923-3cf50f8a0a29 // indirect @@ -65,13 +66,10 @@ require ( require github.com/twmb/murmur3 v1.1.8 // indirect -require github.com/google/go-github/v57 v57.0.0 // indirect - require ( github.com/Masterminds/goutils v1.1.1 // indirect github.com/Masterminds/semver/v3 v3.2.1 // indirect github.com/alicebob/gopher-json v0.0.0-20200520072559-a9ecdc9d1d3a // indirect - github.com/apparentlymart/go-textseg/v13 v13.0.0 // indirect github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect github.com/aymerick/douceur v0.2.0 // indirect github.com/beorn7/perks v1.0.1 // indirect @@ -87,7 +85,12 @@ require ( github.com/golang-jwt/jwt/v4 v4.5.0 // indirect github.com/golang/protobuf v1.5.3 // indirect github.com/google/go-cmp v0.6.0 // indirect + github.com/google/go-github/v60 v60.0.0 // indirect github.com/google/go-querystring v1.1.0 // indirect + github.com/gookit/color v1.5.4 // indirect + github.com/gookit/goutil v0.6.15 // indirect + github.com/gookit/gsr v0.1.0 // indirect + github.com/gookit/slog v0.5.5 // indirect github.com/gorilla/css v1.0.0 // indirect github.com/hashicorp/errwrap v1.1.0 // indirect github.com/hashicorp/go-cleanhttp v0.5.2 // indirect @@ -123,17 +126,20 @@ require ( github.com/spf13/cast v1.6.0 // indirect github.com/subosito/gotenv v1.6.0 // indirect github.com/ulikunitz/xz v0.5.11 // indirect + github.com/valyala/bytebufferpool v1.0.0 // indirect + github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e // indirect github.com/yuin/gopher-lua v1.1.1 // indirect - github.com/zclconf/go-cty v1.13.2 // indirect + github.com/zclconf/go-cty v1.14.4 // indirect go.uber.org/multierr v1.11.0 // indirect - golang.org/x/crypto v0.19.0 // indirect - golang.org/x/exp v0.0.0-20230905200255-921286631fa9 // indirect - golang.org/x/mod v0.12.0 // indirect - golang.org/x/net v0.21.0 // indirect + golang.org/x/crypto v0.22.0 // indirect + golang.org/x/exp v0.0.0-20231006140011-7918f672742d // indirect + golang.org/x/mod v0.13.0 // indirect + golang.org/x/net v0.23.0 // indirect golang.org/x/oauth2 v0.15.0 // indirect - golang.org/x/sys v0.18.0 // indirect + golang.org/x/sync v0.5.0 // indirect + golang.org/x/sys v0.19.0 // indirect golang.org/x/time v0.5.0 // indirect - golang.org/x/tools v0.13.0 // indirect + golang.org/x/tools v0.14.0 // indirect google.golang.org/appengine v1.6.7 // indirect google.golang.org/protobuf v1.33.0 // indirect gopkg.in/ini.v1 v1.67.0 // indirect diff --git a/go.sum b/go.sum index 015159aa40..24c36a6e56 100644 --- a/go.sum +++ b/go.sum @@ -53,8 +53,6 @@ github.com/alicebob/gopher-json v0.0.0-20200520072559-a9ecdc9d1d3a h1:HbKu58rmZp github.com/alicebob/gopher-json v0.0.0-20200520072559-a9ecdc9d1d3a/go.mod h1:SGnFV6hVsYE877CKEZ6tDNTjaSXYUk6QqoIK6PrAtcc= github.com/alicebob/miniredis/v2 v2.32.1 h1:Bz7CciDnYSaa0mX5xODh6GUITRSx+cVhjNoOR4JssBo= github.com/alicebob/miniredis/v2 v2.32.1/go.mod h1:AqkLNAfUm0K07J28hnAyyQKf/x0YkCY/g5DCtuL01Mw= -github.com/apparentlymart/go-textseg/v13 v13.0.0 h1:Y+KvPE1NYz0xl601PVImeQfFyEy6iT90AvPUL1NNfNw= -github.com/apparentlymart/go-textseg/v13 v13.0.0/go.mod h1:ZK2fH7c4NqDTLtiYLvIkEghdlcqw7yxLeM89kiTRPUo= github.com/apparentlymart/go-textseg/v15 v15.0.0 h1:uYvfpb3DyLSCGWnctWKGj857c6ew1u1fNQOlOtuGxQY= github.com/apparentlymart/go-textseg/v15 v15.0.0/go.mod h1:K8XmNZdhEBkdlyDdvbmmsvpAG721bKi0joRfFdHIWJ4= github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 h1:DklsrG3dyBCFEj5IhUbnKptjxatkF07cF2ak3yi77so= @@ -67,8 +65,8 @@ github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= github.com/bgentry/go-netrc v0.0.0-20140422174119-9fd32a8b3d3d h1:xDfNPAt8lFiC1UJrqV3uuy861HCTo708pDMbjHHdCas= github.com/bgentry/go-netrc v0.0.0-20140422174119-9fd32a8b3d3d/go.mod h1:6QX/PXZ00z/TKoufEY6K/a0k6AhaJrQKdFe6OfVXsa4= -github.com/bradleyfalzon/ghinstallation/v2 v2.9.0 h1:HmxIYqnxubRYcYGRc5v3wUekmo5Wv2uX3gukmWJ0AFk= -github.com/bradleyfalzon/ghinstallation/v2 v2.9.0/go.mod h1:wmkTDJf8CmVypxE8ijIStFnKoTa6solK5QfdmJrP9KI= +github.com/bradleyfalzon/ghinstallation/v2 v2.10.0 h1:XWuWBRFEpqVrHepQob9yPS3Xg4K3Wr9QCx4fu8HbUNg= +github.com/bradleyfalzon/ghinstallation/v2 v2.10.0/go.mod h1:qoGA4DxWPaYTgVCrmEspVSjlTu4WYAiSxMIhorMRXXc= github.com/briandowns/spinner v1.23.0 h1:alDF2guRWqa/FOZZYWjlMIx2L6H0wyewPxo/CH4Pt2A= github.com/briandowns/spinner v1.23.0/go.mod h1:rPG4gmXeN3wQV/TsAY4w8lPdIM6RX3yqeBQJSrbXjuE= github.com/bsm/ginkgo/v2 v2.12.0 h1:Ny8MWAHyOepLGlLKYmXG4IEkioBysk6GpaRTLC8zwWs= @@ -190,10 +188,10 @@ github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/ github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE= github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= -github.com/google/go-github/v57 v57.0.0 h1:L+Y3UPTY8ALM8x+TV0lg+IEBI+upibemtBD8Q9u7zHs= -github.com/google/go-github/v57 v57.0.0/go.mod h1:s0omdnye0hvK/ecLvpsGfJMiRt85PimQh4oygmLIxHw= github.com/google/go-github/v59 v59.0.0 h1:7h6bgpF5as0YQLLkEiVqpgtJqjimMYhBkD4jT5aN3VA= github.com/google/go-github/v59 v59.0.0/go.mod h1:rJU4R0rQHFVFDOkqGWxfLNo6vEk4dv40oDjhV/gH6wM= +github.com/google/go-github/v60 v60.0.0 h1:oLG98PsLauFvvu4D/YPxq374jhSxFYdzQGNCyONLfn8= +github.com/google/go-github/v60 v60.0.0/go.mod h1:ByhX2dP9XT9o/ll2yXAu2VD8l5eNVg8hD4Cr0S/LmQk= github.com/google/go-querystring v1.0.0/go.mod h1:odCYkC5MyYFN7vkCjXpyrEuKhc/BUO6wN/zVPAxq5ck= github.com/google/go-querystring v1.1.0 h1:AnCroh3fv4ZBgVIf1Iwtovgjaw/GiKJo8M8yD/fhyJ8= github.com/google/go-querystring v1.1.0/go.mod h1:Kcdr2DB4koayq7X8pmAG4sNG59So17icRSOU623lUBU= @@ -217,6 +215,14 @@ github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= +github.com/gookit/color v1.5.4 h1:FZmqs7XOyGgCAxmWyPslpiok1k05wmY3SJTytgvYFs0= +github.com/gookit/color v1.5.4/go.mod h1:pZJOeOS8DM43rXbp4AZo1n9zCU2qjpcRko0b6/QJi9w= +github.com/gookit/goutil v0.6.15 h1:mMQ0ElojNZoyPD0eVROk5QXJPh2uKR4g06slgPDF5Jo= +github.com/gookit/goutil v0.6.15/go.mod h1:qdKdYEHQdEtyH+4fNdQNZfJHhI0jUZzHxQVAV3DaMDY= +github.com/gookit/gsr v0.1.0 h1:0gadWaYGU4phMs0bma38t+Do5OZowRMEVlHv31p0Zig= +github.com/gookit/gsr v0.1.0/go.mod h1:7wv4Y4WCnil8+DlDYHBjidzrEzfHhXEoFjEA0pPPWpI= +github.com/gookit/slog v0.5.5 h1:XoyK3NilKzuC/umvnqTQDHTOnpC8R6pvlr/ht9PyfgU= +github.com/gookit/slog v0.5.5/go.mod h1:RfIwzoaQ8wZbKdcqG7+3EzbkMqcp2TUn3mcaSZAw2EQ= github.com/gorilla/css v1.0.0 h1:BQqNyPTi50JCFMTw/b67hByjMVXZRwGha6wxVGkeihY= github.com/gorilla/css v1.0.0/go.mod h1:Dn721qIggHpt4+EFCcTLTU/vk5ySda2ReITrtgBl60c= github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY= @@ -248,10 +254,10 @@ github.com/hashicorp/golang-lru/v2 v2.0.7 h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs github.com/hashicorp/golang-lru/v2 v2.0.7/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM= github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= -github.com/hashicorp/hcl/v2 v2.20.0 h1:l++cRs/5jQOiKVvqXZm/P1ZEfVXJmvLS9WSVxkaeTb4= -github.com/hashicorp/hcl/v2 v2.20.0/go.mod h1:WmcD/Ym72MDOOx5F62Ly+leloeu6H7m0pG7VBiU6pQk= -github.com/hashicorp/terraform-config-inspect v0.0.0-20231204233900-a34142ec2a72 h1:nZ5gGjbe5o7XUu1d7j+Y5Ztcxlp+yaumTKH9i0D3wlg= -github.com/hashicorp/terraform-config-inspect v0.0.0-20231204233900-a34142ec2a72/go.mod h1:l8HcFPm9cQh6Q0KSWoYPiePqMvRFenybP1CH2MjKdlg= +github.com/hashicorp/hcl/v2 v2.20.1 h1:M6hgdyz7HYt1UN9e61j+qKJBqR3orTWbI1HKBJEdxtc= +github.com/hashicorp/hcl/v2 v2.20.1/go.mod h1:TZDqQ4kNKCbh1iJp99FdPiUaVDDUPivbqxZulxDYqL4= +github.com/hashicorp/terraform-config-inspect v0.0.0-20240509232506-4708120f8f30 h1:0qwr2oZy9mIIJMWh7W9NTHLWGMbEF5KEQ+QqM9hym34= +github.com/hashicorp/terraform-config-inspect v0.0.0-20240509232506-4708120f8f30/go.mod h1:Gz/z9Hbn+4KSp8A2FBtNszfLSdT2Tn/uAKGuVqqWmDI= github.com/huandu/xstrings v1.3.3/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE= github.com/huandu/xstrings v1.4.0 h1:D17IlohoQq4UcpqD7fDk80P7l+lwAmlFaBHgOipl2FU= github.com/huandu/xstrings v1.4.0/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE= @@ -262,6 +268,7 @@ github.com/imdario/mergo v0.3.16/go.mod h1:WBLT9ZmE3lPoWsEzCh9LPo3TiwVN+ZKEjmz+h github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI= +github.com/jpillora/backoff v1.0.0 h1:uvFg412JmmHBHw7iwprIxkPMI+sGQ4kzOWsMeHnm2EA= github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4= github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= @@ -284,8 +291,6 @@ github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= -github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= -github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ= github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI= github.com/magiconair/properties v1.8.7 h1:IeQXZAiQcpL9mgcAe1Nu6cX9LLw6ExEHKjN0VQdvPDY= @@ -383,13 +388,11 @@ github.com/sagikazarmark/slog-shim v0.1.0 h1:diDBnUNK9N/354PgrxMywXnAwEr1QZcOr6g github.com/sagikazarmark/slog-shim v0.1.0/go.mod h1:SrcSrq8aKtyuqEI1uvTDTK1arOWRIczQRv+GVI1AkeQ= github.com/samber/lo v1.38.1 h1:j2XEAqXKb09Am4ebOg31SpvzUTTs6EN3VfgeLUhPdXM= github.com/samber/lo v1.38.1/go.mod h1:+m/ZKRl6ClXCE2Lgf3MsQlWfh4bn1bz6CXEOxnEXnEA= -github.com/sergi/go-diff v1.0.0 h1:Kpca3qRNrduNnOQeazBd0ysaKrUJiIuISHxogkT9RPQ= -github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo= github.com/shopspring/decimal v1.2.0/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= github.com/shopspring/decimal v1.3.1 h1:2Usl1nmF/WZucqkFZhnfFYxxxu8LG21F6nPQBE5gKV8= github.com/shopspring/decimal v1.3.1/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= -github.com/shurcooL/githubv4 v0.0.0-20240120211514-18a1ae0e79dc h1:vH0NQbIDk+mJLvBliNGfcQgUmhlniWBDXC79oRxfZA0= -github.com/shurcooL/githubv4 v0.0.0-20240120211514-18a1ae0e79dc/go.mod h1:zqMwyHmnN/eDOZOdiTohqIUKUrTFX62PNlu7IJdu0q8= +github.com/shurcooL/githubv4 v0.0.0-20240429030203-be2daab69064 h1:RCQBSFx5JrsbHltqTtJ+kN3U0Y3a/N/GlVdmRSoxzyE= +github.com/shurcooL/githubv4 v0.0.0-20240429030203-be2daab69064/go.mod h1:zqMwyHmnN/eDOZOdiTohqIUKUrTFX62PNlu7IJdu0q8= github.com/shurcooL/graphql v0.0.0-20220606043923-3cf50f8a0a29 h1:B1PEwpArrNp4dkQrfxh/abbBAOZBVp0ds+fBEOUOqOc= github.com/shurcooL/graphql v0.0.0-20220606043923-3cf50f8a0a29/go.mod h1:AuYgA5Kyo4c7HfUmvRGs/6rGlMMV/6B1bVnB9JxJEEg= github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= @@ -435,20 +438,26 @@ github.com/ulikunitz/xz v0.5.11 h1:kpFauv27b6ynzBNT/Xy+1k+fK4WswhN/6PN5WhFAGw8= github.com/ulikunitz/xz v0.5.11/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14= github.com/urfave/negroni/v3 v3.1.0 h1:lzmuxGSpnJCT/ujgIAjkU3+LW3NX8alCglO/L6KjIGQ= github.com/urfave/negroni/v3 v3.1.0/go.mod h1:jWvnX03kcSjDBl/ShB0iHvx5uOs7mAzZXW+JvJ5XYAs= -github.com/warrensbox/terraform-switcher v0.1.1-0.20230206012955-d7dfd1b44605 h1:bRt3KvPapqnO3s9XenyU4COpU9X7cNW3BMELyHRxuSs= -github.com/warrensbox/terraform-switcher v0.1.1-0.20230206012955-d7dfd1b44605/go.mod h1:saryXNaL624mlulV138FP+HhVw7IpvETUXLS3nTvH1g= -github.com/xanzy/go-gitlab v0.100.0 h1:jaOtYj5nWI19+9oVVmgy233pax2oYqucwetogYU46ks= -github.com/xanzy/go-gitlab v0.100.0/go.mod h1:ETg8tcj4OhrB84UEgeE8dSuV/0h4BBL1uOV/qK0vlyI= +github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw= +github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= +github.com/warrensbox/terraform-switcher v0.1.1-0.20240413181427-4d66b260d90c h1:gQw6llCIsW/RGSiKT7BfV22CNtyqPKbMX6GE0eaU2e4= +github.com/warrensbox/terraform-switcher v0.1.1-0.20240413181427-4d66b260d90c/go.mod h1:g/BtIOjGxYaOe1HMyvl740MMkOoGi3Ib0dv0P6ihiVI= +github.com/xanzy/go-gitlab v0.102.0 h1:ExHuJ1OTQ2yt25zBMMj0G96ChBirGYv8U7HyUiYkZ+4= +github.com/xanzy/go-gitlab v0.102.0/go.mod h1:ETg8tcj4OhrB84UEgeE8dSuV/0h4BBL1uOV/qK0vlyI= +github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e h1:JVG44RsyaB9T2KIHavMF/ppJZNG9ZpyihvCd0w101no= +github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e/go.mod h1:RbqR21r5mrJuqunuUZ/Dhy/avygyECGrLceyNeo4LiM= github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= github.com/yuin/gopher-lua v1.1.1 h1:kYKnWBjvbNP4XLT3+bPEwAXJx262OhaHDWDVOPjL46M= github.com/yuin/gopher-lua v1.1.1/go.mod h1:GBR0iDaNXjAgGg9zfCvksxSRnQx76gclCIb7kdAd1Pw= -github.com/zclconf/go-cty v1.13.2 h1:4GvrUxe/QUDYuJKAav4EYqdM47/kZa672LwmXFmEKT0= -github.com/zclconf/go-cty v1.13.2/go.mod h1:YKQzy/7pZ7iq2jNFzy5go57xdxdWoLLpaEp4u238AE0= -go.etcd.io/bbolt v1.3.9 h1:8x7aARPEXiXbHmtUwAIv7eV2fQFHrLLavdiJ3uzJXoI= -go.etcd.io/bbolt v1.3.9/go.mod h1:zaO32+Ti0PK1ivdPtgMESzuzL2VPoIG1PCQNvOdo/dE= +github.com/zclconf/go-cty v1.14.4 h1:uXXczd9QDGsgu0i/QFR/hzI5NYCHLf6NQw/atrbnhq8= +github.com/zclconf/go-cty v1.14.4/go.mod h1:VvMs5i0vgZdhYawQNq5kePSpLAoz8u1xvZgrPIxfnZE= +github.com/zclconf/go-cty-debug v0.0.0-20191215020915-b22d67c1ba0b h1:FosyBZYxY34Wul7O/MSKey3txpPYyCqVO5ZyceuQJEI= +github.com/zclconf/go-cty-debug v0.0.0-20191215020915-b22d67c1ba0b/go.mod h1:ZRKQfBXbGkpdV6QMzT3rU1kSTAnfu1dO8dPKjYprgj8= +go.etcd.io/bbolt v1.3.10 h1:+BqfJTcCzTItrop8mq/lbzL8wSGtj94UO/3U31shqG0= +go.etcd.io/bbolt v1.3.10/go.mod h1:bK3UQLPJZly7IlNmV7uVHJDxfe5aK9Ll93e/74Y9oEQ= go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= @@ -475,8 +484,8 @@ golang.org/x/crypto v0.0.0-20210513164829-c07d793c2f9a/go.mod h1:P+XmwS30IXTQdn5 golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.3.0/go.mod h1:hebNnKkNXi2UzZN1eVRvBB7co0a+JxK6XbPiWVs/3J4= golang.org/x/crypto v0.17.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4= -golang.org/x/crypto v0.19.0 h1:ENy+Az/9Y1vSrlrvBSyna3PITt4tiZLf7sgCjZBX7Wo= -golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= +golang.org/x/crypto v0.22.0 h1:g1v0xeRhjcugydODzvb3mEM9SQ0HGp9s/nh3COQ/C30= +golang.org/x/crypto v0.22.0/go.mod h1:vr6Su+7cTlO45qkww3VDJlzDn0ctJvRgYbC2NvXHt+M= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= @@ -487,8 +496,8 @@ golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u0 golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= -golang.org/x/exp v0.0.0-20230905200255-921286631fa9 h1:GoHiUyI/Tp2nVkLI2mCxVkOjsbSXD66ic0XW0js0R9g= -golang.org/x/exp v0.0.0-20230905200255-921286631fa9/go.mod h1:S2oDrQGGwySpoQPVqRShND87VCbxmc6bL1Yd2oYrm6k= +golang.org/x/exp v0.0.0-20231006140011-7918f672742d h1:jtJma62tbqLibJ5sFQz8bKtEM8rJBtfilJ2qTU199MI= +golang.org/x/exp v0.0.0-20231006140011-7918f672742d/go.mod h1:ldy0pHrwJyGW56pPQzzkH36rKxoZW1tw7ZJpeKx+hdo= golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= @@ -510,8 +519,9 @@ golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzB golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= -golang.org/x/mod v0.12.0 h1:rmsUpXtvNzj340zd98LZ4KntptpfRHwpFOHG188oHXc= -golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/mod v0.13.0 h1:I/DsJXRlw/8l/0c24sM9yb0T4z9liZTduXvdAWYiysY= +golang.org/x/mod v0.13.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -548,8 +558,8 @@ golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug golang.org/x/net v0.2.0/go.mod h1:KqCZLdyyvdV855qA2rE3GC2aiw5xGR5TEjj8smXukLY= golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= -golang.org/x/net v0.21.0 h1:AQyQV4dYCvJ7vGmJyKki9+PBdyvhkSd8EIx/qb0AYv4= -golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44= +golang.org/x/net v0.23.0 h1:7EYJ93RZ9vYSZAIb2x3lnuvqO5zneoD6IvWjuhfxjTs= +golang.org/x/net v0.23.0/go.mod h1:JKghWKKOSdJwpW2GEx0Ja7fmaKnMsbu+MWVZTokSYmg= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190402181905-9f3314589c9a/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= @@ -570,6 +580,7 @@ golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.5.0 h1:60k92dhOjHxJkrqnwsfl8KuaHbn/5dl0lUPUklKo3qE= golang.org/x/sync v0.5.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -619,20 +630,16 @@ golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.15.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/sys v0.17.0 h1:25cE3gD+tdBA7lp7QfhuV+rJiE9YXTcS3VG1SqssI/Y= -golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/sys v0.18.0 h1:DBdB3niSjOA/O0blCZBqDefyWNYveAYMNF1Wum0DYQ4= -golang.org/x/sys v0.18.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.19.0 h1:q5f1RH2jigJ1MoAWp2KTp3gm5zAGFUTarQZ5U386+4o= +golang.org/x/sys v0.19.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.2.0/go.mod h1:TVmDHMZPmdnySmBfhjOoOdhjzdE1h4u1VwSiw2l1Nuc= golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= golang.org/x/term v0.15.0/go.mod h1:BDl952bC7+uMoWR75FIrCDx79TPU9oHkTZ9yRbYOrX0= -golang.org/x/term v0.17.0 h1:mkTF7LCd6WGJNL3K1Ad7kwxNfYAW6a8a8QqtMblp/4U= -golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= -golang.org/x/term v0.18.0 h1:FcHjZXDMxI8mM3nwhX9HlKop4C0YQvCVCdwYl2wOtE8= -golang.org/x/term v0.18.0/go.mod h1:ILwASektA3OnRv7amZ1xhE/KTR+u50pbXfZ03+6Nx58= +golang.org/x/term v0.19.0 h1:+ThwsDv+tYfnJFhF4L8jITxu1tdTWRTZpdsWgEgjL6Q= +golang.org/x/term v0.19.0/go.mod h1:2CuTdWZ7KHSQwUzKva0cbMg6q2DMI3Mmxp+gKJbskEk= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -692,8 +699,8 @@ golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= -golang.org/x/tools v0.13.0 h1:Iey4qkscZuv0VvIt8E0neZjtPVQFSc870HQ448QgEmQ= -golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58= +golang.org/x/tools v0.14.0 h1:jvNa2pY0M4r62jkRQ6RwEZZyPcymeL9XZMLBbV7U2nc= +golang.org/x/tools v0.14.0/go.mod h1:uYBEerGOWcJyEORxN+Ek8+TT266gXkNlHdJBwexUsBg= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= diff --git a/netlify.toml b/netlify.toml new file mode 100644 index 0000000000..d19fe178dc --- /dev/null +++ b/netlify.toml @@ -0,0 +1,33 @@ +# Netlify Config, https://www.netlify.com/docs/netlify-toml-reference/ +[build] + base = "/" + publish = "runatlantis.io/.vitepress/dist/" + command = "pnpm website:build" + +[[redirects]] + from = "/guide/getting-started.html" + to = "/guide/" + status = 301 + force = true + +[[redirects]] + from = "/docs/atlantis-yaml-reference.html" + to = "/docs/repo-level-atlantis-yaml.html" + status = 301 + force = true + +[[headers]] + for = "/*" + [headers.values] + X-Frame-Options = "DENY" + X-XSS-Protection = "1; mode=block" + Content-Security-Policy = "default-src 'self'; frame-src 'self' https://app.netlify.com; script-src 'self' https://*.google-analytics.com https://www.googletagmanager.com https://*.algolianet.com https://*.algolia.net 'unsafe-inline'; style-src 'self' https://fonts.googleapis.com https://*.algolianet.com https://*.algolia.net 'unsafe-inline'; img-src 'self' https://*.google-analytics.com data:; font-src 'self' data: https://fonts.googleapis.com https://fonts.gstatic.com; connect-src 'self' https://*.google-analytics.com https://www.googletagmanager.com https://*.algolianet.com https://*.algolia.net" + Cache-Control = "public, max-age=86400, must-revalidate" + Strict-Transport-Security = "max-age=86400; includeSubDomains; preload" + Referrer-Policy = "no-referrer" + X-Content-Type-Options = "nosniff" + +[[headers]] + for = "*.html" + [headers.values] + Content-Type = "text/html; charset=UTF-8" diff --git a/package.json b/package.json index 6874b9f97f..0ae58e56c2 100644 --- a/package.json +++ b/package.json @@ -1,16 +1,21 @@ { "license": "Apache-2.0", + "type": "module", "devDependencies": { - "@vuepress/client": "2.0.0-rc.0", - "@vuepress/plugin-docsearch": "2.0.0-rc.0", - "@vuepress/plugin-google-analytics": "2.0.0-rc.15", - "@vuepress/utils": "2.0.0-rc.0", - "vue": "^3.3.11", - "vuepress": "2.0.0-rc.0", - "vuepress-plugin-sitemap2": "2.0.0-rc.4" + "@playwright/test": "^1.44.0", + "@types/node": "^20.12.10", + "@vueuse/core": "^10.9.0", + "sass": "^1.77.0", + "sitemap-ts": "^1.6.1", + "vitepress": "^1.1.4", + "vue": "^3.4.27", + "markdownlint-cli": "^0.40.0" }, "scripts": { - "website:dev": "vuepress dev runatlantis.io", - "website:build": "NODE_OPTIONS=--openssl-legacy-provider vuepress build runatlantis.io" + "website:dev": "vitepress dev --host localhost --port 8080 runatlantis.io", + "website:lint": "markdownlint runatlantis.io", + "website:lint-fix": "markdownlint --fix runatlantis.io", + "website:build": "vitepress build runatlantis.io", + "e2e": "playwright test" } } diff --git a/playwright.config.cjs b/playwright.config.cjs new file mode 100644 index 0000000000..e3411787bd --- /dev/null +++ b/playwright.config.cjs @@ -0,0 +1,3 @@ +module.exports = { + testDir: './runatlantis.io/e2e' +}; diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 5a075817d2..5b6f01364f 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -1,214 +1,133 @@ -lockfileVersion: '6.0' +lockfileVersion: '9.0' settings: autoInstallPeers: true excludeLinksFromLockfile: false -devDependencies: - '@vuepress/client': - specifier: 2.0.0-rc.0 - version: 2.0.0-rc.0 - '@vuepress/plugin-docsearch': - specifier: 2.0.0-rc.0 - version: 2.0.0-rc.0(@algolia/client-search@4.21.1)(search-insights@2.13.0) - '@vuepress/plugin-google-analytics': - specifier: 2.0.0-rc.15 - version: 2.0.0-rc.15(vuepress@2.0.0-rc.0) - '@vuepress/utils': - specifier: 2.0.0-rc.0 - version: 2.0.0-rc.0 - vue: - specifier: ^3.3.11 - version: 3.3.11 - vuepress: - specifier: 2.0.0-rc.0 - version: 2.0.0-rc.0(@vuepress/client@2.0.0-rc.0)(vue@3.3.11) - vuepress-plugin-sitemap2: - specifier: 2.0.0-rc.4 - version: 2.0.0-rc.4(vuepress@2.0.0-rc.0) +importers: + + .: + devDependencies: + '@playwright/test': + specifier: ^1.44.0 + version: 1.44.0 + '@types/node': + specifier: ^20.12.10 + version: 20.12.10 + '@vueuse/core': + specifier: ^10.9.0 + version: 10.9.0(vue@3.4.27) + markdownlint-cli: + specifier: ^0.40.0 + version: 0.40.0 + sass: + specifier: ^1.77.0 + version: 1.77.0 + sitemap-ts: + specifier: ^1.6.1 + version: 1.6.1 + vitepress: + specifier: ^1.1.4 + version: 1.1.4(@algolia/client-search@4.23.3)(@types/node@20.12.10)(postcss@8.4.38)(sass@1.77.0)(search-insights@2.13.0) + vue: + specifier: ^3.4.27 + version: 3.4.27 packages: - /@algolia/autocomplete-core@1.9.3(@algolia/client-search@4.21.1)(algoliasearch@4.21.1)(search-insights@2.13.0): + '@algolia/autocomplete-core@1.9.3': resolution: {integrity: sha512-009HdfugtGCdC4JdXUbVJClA0q0zh24yyePn+KUGk3rP7j8FEe/m5Yo/z65gn6nP/cM39PxpzqKrL7A6fP6PPw==} - dependencies: - '@algolia/autocomplete-plugin-algolia-insights': 1.9.3(@algolia/client-search@4.21.1)(algoliasearch@4.21.1)(search-insights@2.13.0) - '@algolia/autocomplete-shared': 1.9.3(@algolia/client-search@4.21.1)(algoliasearch@4.21.1) - transitivePeerDependencies: - - '@algolia/client-search' - - algoliasearch - - search-insights - dev: true - /@algolia/autocomplete-plugin-algolia-insights@1.9.3(@algolia/client-search@4.21.1)(algoliasearch@4.21.1)(search-insights@2.13.0): + '@algolia/autocomplete-plugin-algolia-insights@1.9.3': resolution: {integrity: sha512-a/yTUkcO/Vyy+JffmAnTWbr4/90cLzw+CC3bRbhnULr/EM0fGNvM13oQQ14f2moLMcVDyAx/leczLlAOovhSZg==} peerDependencies: search-insights: '>= 1 < 3' - dependencies: - '@algolia/autocomplete-shared': 1.9.3(@algolia/client-search@4.21.1)(algoliasearch@4.21.1) - search-insights: 2.13.0 - transitivePeerDependencies: - - '@algolia/client-search' - - algoliasearch - dev: true - /@algolia/autocomplete-preset-algolia@1.9.3(@algolia/client-search@4.21.1)(algoliasearch@4.21.1): + '@algolia/autocomplete-preset-algolia@1.9.3': resolution: {integrity: sha512-d4qlt6YmrLMYy95n5TB52wtNDr6EgAIPH81dvvvW8UmuWRgxEtY0NJiPwl/h95JtG2vmRM804M0DSwMCNZlzRA==} peerDependencies: '@algolia/client-search': '>= 4.9.1 < 6' algoliasearch: '>= 4.9.1 < 6' - dependencies: - '@algolia/autocomplete-shared': 1.9.3(@algolia/client-search@4.21.1)(algoliasearch@4.21.1) - '@algolia/client-search': 4.21.1 - algoliasearch: 4.21.1 - dev: true - /@algolia/autocomplete-shared@1.9.3(@algolia/client-search@4.21.1)(algoliasearch@4.21.1): + '@algolia/autocomplete-shared@1.9.3': resolution: {integrity: sha512-Wnm9E4Ye6Rl6sTTqjoymD+l8DjSTHsHboVRYrKgEt8Q7UHm9nYbqhN/i0fhUYA3OAEH7WA8x3jfpnmJm3rKvaQ==} peerDependencies: '@algolia/client-search': '>= 4.9.1 < 6' algoliasearch: '>= 4.9.1 < 6' - dependencies: - '@algolia/client-search': 4.21.1 - algoliasearch: 4.21.1 - dev: true - /@algolia/cache-browser-local-storage@4.21.1: - resolution: {integrity: sha512-vUkac/vgj8inyGR/IgunRjTOQ6IlBwl7afFkIfUZRqbqKKXBs+A/g5wgH+UnAlCSW8wjFRAIfCzuvSRb1/qjsQ==} - dependencies: - '@algolia/cache-common': 4.21.1 - dev: true + '@algolia/cache-browser-local-storage@4.23.3': + resolution: {integrity: sha512-vRHXYCpPlTDE7i6UOy2xE03zHF2C8MEFjPN2v7fRbqVpcOvAUQK81x3Kc21xyb5aSIpYCjWCZbYZuz8Glyzyyg==} - /@algolia/cache-common@4.21.1: - resolution: {integrity: sha512-HUo4fRk8KXFMyCASW0k+Kl8iXBoRPdqAjV9OVaFibTNg1dbwnpe6eIxbSTM6AJ2X82ic/8x3GuAO8zF/E515PA==} - dev: true + '@algolia/cache-common@4.23.3': + resolution: {integrity: sha512-h9XcNI6lxYStaw32pHpB1TMm0RuxphF+Ik4o7tcQiodEdpKK+wKufY6QXtba7t3k8eseirEMVB83uFFF3Nu54A==} - /@algolia/cache-in-memory@4.21.1: - resolution: {integrity: sha512-+l2pLg6yIwRaGNtv41pGF/f/e9Qk80FeYE41f4OXS9lb5vpyrxzqM5nUaffWk/ZSFrPDuw5J2E226c//tIIffA==} - dependencies: - '@algolia/cache-common': 4.21.1 - dev: true + '@algolia/cache-in-memory@4.23.3': + resolution: {integrity: sha512-yvpbuUXg/+0rbcagxNT7un0eo3czx2Uf0y4eiR4z4SD7SiptwYTpbuS0IHxcLHG3lq22ukx1T6Kjtk/rT+mqNg==} - /@algolia/client-account@4.21.1: - resolution: {integrity: sha512-AC6SjA9n38th73gAUqcjsuxNUChpwaflaAhPL0qO9cUICN67njpQrnYaoSVZ/yx0opG5zQFRKbpEcuPGj0XjhQ==} - dependencies: - '@algolia/client-common': 4.21.1 - '@algolia/client-search': 4.21.1 - '@algolia/transporter': 4.21.1 - dev: true + '@algolia/client-account@4.23.3': + resolution: {integrity: sha512-hpa6S5d7iQmretHHF40QGq6hz0anWEHGlULcTIT9tbUssWUriN9AUXIFQ8Ei4w9azD0hc1rUok9/DeQQobhQMA==} - /@algolia/client-analytics@4.21.1: - resolution: {integrity: sha512-q6AxvAcBl4fNZXZsMwRRQXcsxUv0PK5eUAz/lHDvgkMWAg6cP7Fl+WIq0fHcG7cJA4EHf2sT5fV6Z+yUlf7NfA==} - dependencies: - '@algolia/client-common': 4.21.1 - '@algolia/client-search': 4.21.1 - '@algolia/requester-common': 4.21.1 - '@algolia/transporter': 4.21.1 - dev: true + '@algolia/client-analytics@4.23.3': + resolution: {integrity: sha512-LBsEARGS9cj8VkTAVEZphjxTjMVCci+zIIiRhpFun9jGDUlS1XmhCW7CTrnaWeIuCQS/2iPyRqSy1nXPjcBLRA==} - /@algolia/client-common@4.21.1: - resolution: {integrity: sha512-LOH7ncYwY/x7epOgxc/MIuV7m3qzl00wIjDG5/9rgImFpkV0X+D/ndJI9DmPsIx7yaTLd5xv/XYuKLcvrUR0eQ==} - dependencies: - '@algolia/requester-common': 4.21.1 - '@algolia/transporter': 4.21.1 - dev: true + '@algolia/client-common@4.23.3': + resolution: {integrity: sha512-l6EiPxdAlg8CYhroqS5ybfIczsGUIAC47slLPOMDeKSVXYG1n0qGiz4RjAHLw2aD0xzh2EXZ7aRguPfz7UKDKw==} - /@algolia/client-personalization@4.21.1: - resolution: {integrity: sha512-u2CyQjHbyVwPqM5eSXd/o+rh1Pk949P/MO6s+OxyEGg6/R2YpYvmsafVZl9Q+xqT8pFaf5QygfcqlSdMUDHV5Q==} - dependencies: - '@algolia/client-common': 4.21.1 - '@algolia/requester-common': 4.21.1 - '@algolia/transporter': 4.21.1 - dev: true + '@algolia/client-personalization@4.23.3': + resolution: {integrity: sha512-3E3yF3Ocr1tB/xOZiuC3doHQBQ2zu2MPTYZ0d4lpfWads2WTKG7ZzmGnsHmm63RflvDeLK/UVx7j2b3QuwKQ2g==} - /@algolia/client-search@4.21.1: - resolution: {integrity: sha512-3KqSmMkQmF+ACY/Ms5TdcvrcK8iqgQP/N0EPnNUUP4LMUzAACpLLTdzA+AtCuc6oaz5ITtGJBVdPUljj5Jf/Lg==} - dependencies: - '@algolia/client-common': 4.21.1 - '@algolia/requester-common': 4.21.1 - '@algolia/transporter': 4.21.1 - dev: true + '@algolia/client-search@4.23.3': + resolution: {integrity: sha512-P4VAKFHqU0wx9O+q29Q8YVuaowaZ5EM77rxfmGnkHUJggh28useXQdopokgwMeYw2XUht49WX5RcTQ40rZIabw==} - /@algolia/logger-common@4.21.1: - resolution: {integrity: sha512-9AyYpR2OO9vPkkDlpTtW2/6nX+RmMd7LUwzJiAF3uN+BYUiQqgXEp+oGaH8UC0dgetmK7wJO6hw4b39cnTdEpw==} - dev: true + '@algolia/logger-common@4.23.3': + resolution: {integrity: sha512-y9kBtmJwiZ9ZZ+1Ek66P0M68mHQzKRxkW5kAAXYN/rdzgDN0d2COsViEFufxJ0pb45K4FRcfC7+33YB4BLrZ+g==} - /@algolia/logger-console@4.21.1: - resolution: {integrity: sha512-9wizQiQ8kL4DiBmT82i403UwacNuv+0hpfsfaWYZQrGjpzG+yvXETWM4AgwFZLj007esuKQiGfOPUoYFZNkGGA==} - dependencies: - '@algolia/logger-common': 4.21.1 - dev: true + '@algolia/logger-console@4.23.3': + resolution: {integrity: sha512-8xoiseoWDKuCVnWP8jHthgaeobDLolh00KJAdMe9XPrWPuf1by732jSpgy2BlsLTaT9m32pHI8CRfrOqQzHv3A==} - /@algolia/requester-browser-xhr@4.21.1: - resolution: {integrity: sha512-9NudesJLuXtRHV+JD8fTkrsdVj/oAPQbtLnxBbSQeMduzV6+a7W+G9VuWo5fwFymCdXR8/Hb6jy8D1owQIq5Gw==} - dependencies: - '@algolia/requester-common': 4.21.1 - dev: true + '@algolia/recommend@4.23.3': + resolution: {integrity: sha512-9fK4nXZF0bFkdcLBRDexsnGzVmu4TSYZqxdpgBW2tEyfuSSY54D4qSRkLmNkrrz4YFvdh2GM1gA8vSsnZPR73w==} - /@algolia/requester-common@4.21.1: - resolution: {integrity: sha512-KtX2Ep3C43XxoN3xKw755cdf9enE6gPgzh6ufZQRJBl4rYCOoXbiREU6noDYX/Nq+Q+sl03V37WAp0YgtIlh9g==} - dev: true + '@algolia/requester-browser-xhr@4.23.3': + resolution: {integrity: sha512-jDWGIQ96BhXbmONAQsasIpTYWslyjkiGu0Quydjlowe+ciqySpiDUrJHERIRfELE5+wFc7hc1Q5hqjGoV7yghw==} - /@algolia/requester-node-http@4.21.1: - resolution: {integrity: sha512-EcD8cY6Bh2iMySpqXglTKU9+pt+km1ws3xF0V7CGMIUzW1HmN/ZVhi4apCBY4tEMytbyARv0XRTPsolSC4gSSw==} - dependencies: - '@algolia/requester-common': 4.21.1 - dev: true + '@algolia/requester-common@4.23.3': + resolution: {integrity: sha512-xloIdr/bedtYEGcXCiF2muajyvRhwop4cMZo+K2qzNht0CMzlRkm8YsDdj5IaBhshqfgmBb3rTg4sL4/PpvLYw==} - /@algolia/transporter@4.21.1: - resolution: {integrity: sha512-KGLFKz8krzOWRwcbR4FT49Grh1dES/mG8dHABEojbvrfUb6kUFxkAee/aezp2GIxuNx+gpQjRn1IzOsqbUZL0A==} - dependencies: - '@algolia/cache-common': 4.21.1 - '@algolia/logger-common': 4.21.1 - '@algolia/requester-common': 4.21.1 - dev: true + '@algolia/requester-node-http@4.23.3': + resolution: {integrity: sha512-zgu++8Uj03IWDEJM3fuNl34s746JnZOWn1Uz5taV1dFyJhVM/kTNw9Ik7YJWiUNHJQXcaD8IXD1eCb0nq/aByA==} + + '@algolia/transporter@4.23.3': + resolution: {integrity: sha512-Wjl5gttqnf/gQKJA+dafnD0Y6Yw97yvfY8R9h0dQltX1GXTgNs1zWgvtWW0tHl1EgMdhAyw189uWiZMnL3QebQ==} + + '@antfu/utils@0.7.6': + resolution: {integrity: sha512-pvFiLP2BeOKA/ZOS6jxx4XhKzdVLHDhGlFEaZ2flWWYf2xOqVniqpk38I04DFRyz+L0ASggl7SkItTc+ZLju4w==} - /@babel/helper-string-parser@7.23.4: - resolution: {integrity: sha512-803gmbQdqwdf4olxrX4AJyFBV/RTr3rSmOj0rKwesmzlfhYNDEs+/iOcznzpNWlJlIlTJC2QfPFcHB6DlzdVLQ==} + '@babel/helper-string-parser@7.24.1': + resolution: {integrity: sha512-2ofRCjnnA9y+wk8b9IAREroeUP02KHp431N2mhKniy2yKIDKpbrHv9eXwm8cBeWQYcJmzv5qKCu65P47eCF7CQ==} engines: {node: '>=6.9.0'} - dev: true - /@babel/helper-validator-identifier@7.22.20: + '@babel/helper-validator-identifier@7.22.20': resolution: {integrity: sha512-Y4OZ+ytlatR8AI+8KZfKuL5urKp7qey08ha31L8b3BwewJAoJamTzyvxPR/5D+KkdJCGPq/+8TukHBlY10FX9A==} engines: {node: '>=6.9.0'} - dev: true - /@babel/parser@7.23.6: - resolution: {integrity: sha512-Z2uID7YJ7oNvAI20O9X0bblw7Qqs8Q2hFy0R9tAfnfLkp5MW0UH9eUvnDSnFwKZ0AvgS1ucqR4KzvVHgnke1VQ==} + '@babel/parser@7.24.5': + resolution: {integrity: sha512-EOv5IK8arwh3LI47dz1b0tKUb/1uhHAnHJOrjgtQMIpu1uXd9mlFrJg9IUgGUgZ41Ch0K8REPTYpO7B76b4vJg==} engines: {node: '>=6.0.0'} hasBin: true - dependencies: - '@babel/types': 7.23.6 - dev: true - /@babel/types@7.23.6: - resolution: {integrity: sha512-+uarb83brBzPKN38NX1MkB6vb6+mwvR6amUulqAE7ccQw1pEl+bCia9TbdG1lsnFP7lZySvUn37CHyXQdfTwzg==} + '@babel/types@7.24.0': + resolution: {integrity: sha512-+j7a5c253RfKh8iABBhywc8NSfP5LURe7Uh4qpsh6jc+aLJguvmIUBdjSdEMQv2bENrCR5MfRdjGo7vzS/ob7w==} engines: {node: '>=6.9.0'} - dependencies: - '@babel/helper-string-parser': 7.23.4 - '@babel/helper-validator-identifier': 7.22.20 - to-fast-properties: 2.0.0 - dev: true - /@docsearch/css@3.5.2: - resolution: {integrity: sha512-SPiDHaWKQZpwR2siD0KQUwlStvIAnEyK6tAE2h2Wuoq8ue9skzhlyVQ1ddzOxX6khULnAALDiR/isSF3bnuciA==} - dev: true + '@docsearch/css@3.6.0': + resolution: {integrity: sha512-+sbxb71sWre+PwDK7X2T8+bhS6clcVMLwBPznX45Qu6opJcgRjAp7gYSDzVFp187J+feSj5dNBN1mJoi6ckkUQ==} - /@docsearch/js@3.5.2(@algolia/client-search@4.21.1)(search-insights@2.13.0): - resolution: {integrity: sha512-p1YFTCDflk8ieHgFJYfmyHBki1D61+U9idwrLh+GQQMrBSP3DLGKpy0XUJtPjAOPltcVbqsTjiPFfH7JImjUNg==} - dependencies: - '@docsearch/react': 3.5.2(@algolia/client-search@4.21.1)(search-insights@2.13.0) - preact: 10.19.3 - transitivePeerDependencies: - - '@algolia/client-search' - - '@types/react' - - react - - react-dom - - search-insights - dev: true + '@docsearch/js@3.6.0': + resolution: {integrity: sha512-QujhqINEElrkIfKwyyyTfbsfMAYCkylInLYMRqHy7PHc8xTBQCow73tlo/Kc7oIwBrCLf0P3YhjlOeV4v8hevQ==} - /@docsearch/react@3.5.2(@algolia/client-search@4.21.1)(search-insights@2.13.0): - resolution: {integrity: sha512-9Ahcrs5z2jq/DcAvYtvlqEBHImbm4YJI8M9y0x6Tqg598P40HTEkX7hsMcIuThI+hTFxRGZ9hll0Wygm2yEjng==} + '@docsearch/react@3.6.0': + resolution: {integrity: sha512-HUFut4ztcVNmqy9gp/wxNbC7pTOHhgVVkHVGCACTuLhUKUhKAF9KYHJtMiLUJxEqiFLQiuri1fWF8zqwM/cu1w==} peerDependencies: '@types/react': '>= 16.8.0 < 19.0.0' react: '>= 16.8.0 < 19.0.0' @@ -223,1945 +142,780 @@ packages: optional: true search-insights: optional: true - dependencies: - '@algolia/autocomplete-core': 1.9.3(@algolia/client-search@4.21.1)(algoliasearch@4.21.1)(search-insights@2.13.0) - '@algolia/autocomplete-preset-algolia': 1.9.3(@algolia/client-search@4.21.1)(algoliasearch@4.21.1) - '@docsearch/css': 3.5.2 - algoliasearch: 4.21.1 - search-insights: 2.13.0 - transitivePeerDependencies: - - '@algolia/client-search' - dev: true - /@esbuild/android-arm64@0.19.9: - resolution: {integrity: sha512-q4cR+6ZD0938R19MyEW3jEsMzbb/1rulLXiNAJQADD/XYp7pT+rOS5JGxvpRW8dFDEfjW4wLgC/3FXIw4zYglQ==} + '@esbuild/aix-ppc64@0.20.2': + resolution: {integrity: sha512-D+EBOJHXdNZcLJRBkhENNG8Wji2kgc9AZ9KiPr1JuZjsNtyHzrsfLRrY0tk2H2aoFu6RANO1y1iPPUCDYWkb5g==} + engines: {node: '>=12'} + cpu: [ppc64] + os: [aix] + + '@esbuild/android-arm64@0.20.2': + resolution: {integrity: sha512-mRzjLacRtl/tWU0SvD8lUEwb61yP9cqQo6noDZP/O8VkwafSYwZ4yWy24kan8jE/IMERpYncRt2dw438LP3Xmg==} engines: {node: '>=12'} cpu: [arm64] os: [android] - requiresBuild: true - dev: true - optional: true - /@esbuild/android-arm@0.19.9: - resolution: {integrity: sha512-jkYjjq7SdsWuNI6b5quymW0oC83NN5FdRPuCbs9HZ02mfVdAP8B8eeqLSYU3gb6OJEaY5CQabtTFbqBf26H3GA==} + '@esbuild/android-arm@0.20.2': + resolution: {integrity: sha512-t98Ra6pw2VaDhqNWO2Oph2LXbz/EJcnLmKLGBJwEwXX/JAN83Fym1rU8l0JUWK6HkIbWONCSSatf4sf2NBRx/w==} engines: {node: '>=12'} cpu: [arm] os: [android] - requiresBuild: true - dev: true - optional: true - /@esbuild/android-x64@0.19.9: - resolution: {integrity: sha512-KOqoPntWAH6ZxDwx1D6mRntIgZh9KodzgNOy5Ebt9ghzffOk9X2c1sPwtM9P+0eXbefnDhqYfkh5PLP5ULtWFA==} + '@esbuild/android-x64@0.20.2': + resolution: {integrity: sha512-btzExgV+/lMGDDa194CcUQm53ncxzeBrWJcncOBxuC6ndBkKxnHdFJn86mCIgTELsooUmwUm9FkhSp5HYu00Rg==} engines: {node: '>=12'} cpu: [x64] os: [android] - requiresBuild: true - dev: true - optional: true - /@esbuild/darwin-arm64@0.19.9: - resolution: {integrity: sha512-KBJ9S0AFyLVx2E5D8W0vExqRW01WqRtczUZ8NRu+Pi+87opZn5tL4Y0xT0mA4FtHctd0ZgwNoN639fUUGlNIWw==} + '@esbuild/darwin-arm64@0.20.2': + resolution: {integrity: sha512-4J6IRT+10J3aJH3l1yzEg9y3wkTDgDk7TSDFX+wKFiWjqWp/iCfLIYzGyasx9l0SAFPT1HwSCR+0w/h1ES/MjA==} engines: {node: '>=12'} cpu: [arm64] os: [darwin] - requiresBuild: true - dev: true - optional: true - /@esbuild/darwin-x64@0.19.9: - resolution: {integrity: sha512-vE0VotmNTQaTdX0Q9dOHmMTao6ObjyPm58CHZr1UK7qpNleQyxlFlNCaHsHx6Uqv86VgPmR4o2wdNq3dP1qyDQ==} + '@esbuild/darwin-x64@0.20.2': + resolution: {integrity: sha512-tBcXp9KNphnNH0dfhv8KYkZhjc+H3XBkF5DKtswJblV7KlT9EI2+jeA8DgBjp908WEuYll6pF+UStUCfEpdysA==} engines: {node: '>=12'} cpu: [x64] os: [darwin] - requiresBuild: true - dev: true - optional: true - /@esbuild/freebsd-arm64@0.19.9: - resolution: {integrity: sha512-uFQyd/o1IjiEk3rUHSwUKkqZwqdvuD8GevWF065eqgYfexcVkxh+IJgwTaGZVu59XczZGcN/YMh9uF1fWD8j1g==} + '@esbuild/freebsd-arm64@0.20.2': + resolution: {integrity: sha512-d3qI41G4SuLiCGCFGUrKsSeTXyWG6yem1KcGZVS+3FYlYhtNoNgYrWcvkOoaqMhwXSMrZRl69ArHsGJ9mYdbbw==} engines: {node: '>=12'} cpu: [arm64] os: [freebsd] - requiresBuild: true - dev: true - optional: true - /@esbuild/freebsd-x64@0.19.9: - resolution: {integrity: sha512-WMLgWAtkdTbTu1AWacY7uoj/YtHthgqrqhf1OaEWnZb7PQgpt8eaA/F3LkV0E6K/Lc0cUr/uaVP/49iE4M4asA==} + '@esbuild/freebsd-x64@0.20.2': + resolution: {integrity: sha512-d+DipyvHRuqEeM5zDivKV1KuXn9WeRX6vqSqIDgwIfPQtwMP4jaDsQsDncjTDDsExT4lR/91OLjRo8bmC1e+Cw==} engines: {node: '>=12'} cpu: [x64] os: [freebsd] - requiresBuild: true - dev: true - optional: true - /@esbuild/linux-arm64@0.19.9: - resolution: {integrity: sha512-PiPblfe1BjK7WDAKR1Cr9O7VVPqVNpwFcPWgfn4xu0eMemzRp442hXyzF/fSwgrufI66FpHOEJk0yYdPInsmyQ==} + '@esbuild/linux-arm64@0.20.2': + resolution: {integrity: sha512-9pb6rBjGvTFNira2FLIWqDk/uaf42sSyLE8j1rnUpuzsODBq7FvpwHYZxQ/It/8b+QOS1RYfqgGFNLRI+qlq2A==} engines: {node: '>=12'} cpu: [arm64] os: [linux] - requiresBuild: true - dev: true - optional: true - /@esbuild/linux-arm@0.19.9: - resolution: {integrity: sha512-C/ChPohUYoyUaqn1h17m/6yt6OB14hbXvT8EgM1ZWaiiTYz7nWZR0SYmMnB5BzQA4GXl3BgBO1l8MYqL/He3qw==} + '@esbuild/linux-arm@0.20.2': + resolution: {integrity: sha512-VhLPeR8HTMPccbuWWcEUD1Az68TqaTYyj6nfE4QByZIQEQVWBB8vup8PpR7y1QHL3CpcF6xd5WVBU/+SBEvGTg==} engines: {node: '>=12'} cpu: [arm] os: [linux] - requiresBuild: true - dev: true - optional: true - /@esbuild/linux-ia32@0.19.9: - resolution: {integrity: sha512-f37i/0zE0MjDxijkPSQw1CO/7C27Eojqb+r3BbHVxMLkj8GCa78TrBZzvPyA/FNLUMzP3eyHCVkAopkKVja+6Q==} + '@esbuild/linux-ia32@0.20.2': + resolution: {integrity: sha512-o10utieEkNPFDZFQm9CoP7Tvb33UutoJqg3qKf1PWVeeJhJw0Q347PxMvBgVVFgouYLGIhFYG0UGdBumROyiig==} engines: {node: '>=12'} cpu: [ia32] os: [linux] - requiresBuild: true - dev: true - optional: true - /@esbuild/linux-loong64@0.19.9: - resolution: {integrity: sha512-t6mN147pUIf3t6wUt3FeumoOTPfmv9Cc6DQlsVBpB7eCpLOqQDyWBP1ymXn1lDw4fNUSb/gBcKAmvTP49oIkaA==} + '@esbuild/linux-loong64@0.20.2': + resolution: {integrity: sha512-PR7sp6R/UC4CFVomVINKJ80pMFlfDfMQMYynX7t1tNTeivQ6XdX5r2XovMmha/VjR1YN/HgHWsVcTRIMkymrgQ==} engines: {node: '>=12'} cpu: [loong64] os: [linux] - requiresBuild: true - dev: true - optional: true - /@esbuild/linux-mips64el@0.19.9: - resolution: {integrity: sha512-jg9fujJTNTQBuDXdmAg1eeJUL4Jds7BklOTkkH80ZgQIoCTdQrDaHYgbFZyeTq8zbY+axgptncko3v9p5hLZtw==} + '@esbuild/linux-mips64el@0.20.2': + resolution: {integrity: sha512-4BlTqeutE/KnOiTG5Y6Sb/Hw6hsBOZapOVF6njAESHInhlQAghVVZL1ZpIctBOoTFbQyGW+LsVYZ8lSSB3wkjA==} engines: {node: '>=12'} cpu: [mips64el] os: [linux] - requiresBuild: true - dev: true - optional: true - /@esbuild/linux-ppc64@0.19.9: - resolution: {integrity: sha512-tkV0xUX0pUUgY4ha7z5BbDS85uI7ABw3V1d0RNTii7E9lbmV8Z37Pup2tsLV46SQWzjOeyDi1Q7Wx2+QM8WaCQ==} + '@esbuild/linux-ppc64@0.20.2': + resolution: {integrity: sha512-rD3KsaDprDcfajSKdn25ooz5J5/fWBylaaXkuotBDGnMnDP1Uv5DLAN/45qfnf3JDYyJv/ytGHQaziHUdyzaAg==} engines: {node: '>=12'} cpu: [ppc64] os: [linux] - requiresBuild: true - dev: true - optional: true - /@esbuild/linux-riscv64@0.19.9: - resolution: {integrity: sha512-DfLp8dj91cufgPZDXr9p3FoR++m3ZJ6uIXsXrIvJdOjXVREtXuQCjfMfvmc3LScAVmLjcfloyVtpn43D56JFHg==} + '@esbuild/linux-riscv64@0.20.2': + resolution: {integrity: sha512-snwmBKacKmwTMmhLlz/3aH1Q9T8v45bKYGE3j26TsaOVtjIag4wLfWSiZykXzXuE1kbCE+zJRmwp+ZbIHinnVg==} engines: {node: '>=12'} cpu: [riscv64] os: [linux] - requiresBuild: true - dev: true - optional: true - /@esbuild/linux-s390x@0.19.9: - resolution: {integrity: sha512-zHbglfEdC88KMgCWpOl/zc6dDYJvWGLiUtmPRsr1OgCViu3z5GncvNVdf+6/56O2Ca8jUU+t1BW261V6kp8qdw==} + '@esbuild/linux-s390x@0.20.2': + resolution: {integrity: sha512-wcWISOobRWNm3cezm5HOZcYz1sKoHLd8VL1dl309DiixxVFoFe/o8HnwuIwn6sXre88Nwj+VwZUvJf4AFxkyrQ==} engines: {node: '>=12'} cpu: [s390x] os: [linux] - requiresBuild: true - dev: true - optional: true - /@esbuild/linux-x64@0.19.9: - resolution: {integrity: sha512-JUjpystGFFmNrEHQnIVG8hKwvA2DN5o7RqiO1CVX8EN/F/gkCjkUMgVn6hzScpwnJtl2mPR6I9XV1oW8k9O+0A==} + '@esbuild/linux-x64@0.20.2': + resolution: {integrity: sha512-1MdwI6OOTsfQfek8sLwgyjOXAu+wKhLEoaOLTjbijk6E2WONYpH9ZU2mNtR+lZ2B4uwr+usqGuVfFT9tMtGvGw==} engines: {node: '>=12'} cpu: [x64] os: [linux] - requiresBuild: true - dev: true - optional: true - /@esbuild/netbsd-x64@0.19.9: - resolution: {integrity: sha512-GThgZPAwOBOsheA2RUlW5UeroRfESwMq/guy8uEe3wJlAOjpOXuSevLRd70NZ37ZrpO6RHGHgEHvPg1h3S1Jug==} + '@esbuild/netbsd-x64@0.20.2': + resolution: {integrity: sha512-K8/DhBxcVQkzYc43yJXDSyjlFeHQJBiowJ0uVL6Tor3jGQfSGHNNJcWxNbOI8v5k82prYqzPuwkzHt3J1T1iZQ==} engines: {node: '>=12'} cpu: [x64] os: [netbsd] - requiresBuild: true - dev: true - optional: true - /@esbuild/openbsd-x64@0.19.9: - resolution: {integrity: sha512-Ki6PlzppaFVbLnD8PtlVQfsYw4S9n3eQl87cqgeIw+O3sRr9IghpfSKY62mggdt1yCSZ8QWvTZ9jo9fjDSg9uw==} + '@esbuild/openbsd-x64@0.20.2': + resolution: {integrity: sha512-eMpKlV0SThJmmJgiVyN9jTPJ2VBPquf6Kt/nAoo6DgHAoN57K15ZghiHaMvqjCye/uU4X5u3YSMgVBI1h3vKrQ==} engines: {node: '>=12'} cpu: [x64] os: [openbsd] - requiresBuild: true - dev: true - optional: true - /@esbuild/sunos-x64@0.19.9: - resolution: {integrity: sha512-MLHj7k9hWh4y1ddkBpvRj2b9NCBhfgBt3VpWbHQnXRedVun/hC7sIyTGDGTfsGuXo4ebik2+3ShjcPbhtFwWDw==} + '@esbuild/sunos-x64@0.20.2': + resolution: {integrity: sha512-2UyFtRC6cXLyejf/YEld4Hajo7UHILetzE1vsRcGL3earZEW77JxrFjH4Ez2qaTiEfMgAXxfAZCm1fvM/G/o8w==} engines: {node: '>=12'} cpu: [x64] os: [sunos] - requiresBuild: true - dev: true - optional: true - /@esbuild/win32-arm64@0.19.9: - resolution: {integrity: sha512-GQoa6OrQ8G08guMFgeXPH7yE/8Dt0IfOGWJSfSH4uafwdC7rWwrfE6P9N8AtPGIjUzdo2+7bN8Xo3qC578olhg==} + '@esbuild/win32-arm64@0.20.2': + resolution: {integrity: sha512-GRibxoawM9ZCnDxnP3usoUDO9vUkpAxIIZ6GQI+IlVmr5kP3zUq+l17xELTHMWTWzjxa2guPNyrpq1GWmPvcGQ==} engines: {node: '>=12'} cpu: [arm64] os: [win32] - requiresBuild: true - dev: true - optional: true - /@esbuild/win32-ia32@0.19.9: - resolution: {integrity: sha512-UOozV7Ntykvr5tSOlGCrqU3NBr3d8JqPes0QWN2WOXfvkWVGRajC+Ym0/Wj88fUgecUCLDdJPDF0Nna2UK3Qtg==} + '@esbuild/win32-ia32@0.20.2': + resolution: {integrity: sha512-HfLOfn9YWmkSKRQqovpnITazdtquEW8/SoHW7pWpuEeguaZI4QnCRW6b+oZTztdBnZOS2hqJ6im/D5cPzBTTlQ==} engines: {node: '>=12'} cpu: [ia32] os: [win32] - requiresBuild: true - dev: true - optional: true - /@esbuild/win32-x64@0.19.9: - resolution: {integrity: sha512-oxoQgglOP7RH6iasDrhY+R/3cHrfwIDvRlT4CGChflq6twk8iENeVvMJjmvBb94Ik1Z+93iGO27err7w6l54GQ==} + '@esbuild/win32-x64@0.20.2': + resolution: {integrity: sha512-N49X4lJX27+l9jbLKSqZ6bKNjzQvHaT8IIFUy+YIqmXQdjYCToGWwOItDrfby14c78aDd5NHQl29xingXfCdLQ==} engines: {node: '>=12'} cpu: [x64] os: [win32] - requiresBuild: true - dev: true - optional: true - - /@jridgewell/sourcemap-codec@1.4.15: - resolution: {integrity: sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg==} - dev: true - - /@mdit-vue/plugin-component@1.0.0: - resolution: {integrity: sha512-ZXsJwxkG5yyTHARIYbR74cT4AZ0SfMokFFjiHYCbypHIeYWgJhso4+CZ8+3V9EWFG3EHlGoKNGqKp9chHnqntQ==} - dependencies: - '@types/markdown-it': 13.0.7 - markdown-it: 13.0.2 - dev: true - - /@mdit-vue/plugin-frontmatter@1.0.0: - resolution: {integrity: sha512-MMA7Ny+YPZA7eDOY1t4E+rKuEWO39mzDdP/M68fKdXJU6VfcGkPr7gnpnJfW2QBJ5qIvMrK/3lDAA2JBy5TfpA==} - dependencies: - '@mdit-vue/types': 1.0.0 - '@types/markdown-it': 13.0.7 - gray-matter: 4.0.3 - markdown-it: 13.0.2 - dev: true - - /@mdit-vue/plugin-headers@1.0.0: - resolution: {integrity: sha512-0rK/iKy6x13d/Pp5XxdLBshTD0+YjZvtHIaIV+JO+/H2WnOv7oaRgs48G5d44z3XJVUE2u6fNnTlI169fef0/A==} - dependencies: - '@mdit-vue/shared': 1.0.0 - '@mdit-vue/types': 1.0.0 - '@types/markdown-it': 13.0.7 - markdown-it: 13.0.2 - dev: true - - /@mdit-vue/plugin-sfc@1.0.0: - resolution: {integrity: sha512-agMUe0fY4YHxsZivSvplBwRwrFvsIf/JNUJCAYq1+2Sg9+2hviTBZwjZDxYqHDHOVLtiNr+wuo68tE24mAx3AQ==} - dependencies: - '@mdit-vue/types': 1.0.0 - '@types/markdown-it': 13.0.7 - markdown-it: 13.0.2 - dev: true - - /@mdit-vue/plugin-title@1.0.0: - resolution: {integrity: sha512-8yC60fCZ95xcJ/cvJH4Lv43Rs4k+33UGyKrRWj5J8TNyMwUyGcwur0XyPM+ffJH4/Bzq4myZLsj/TTFSkXRxvw==} - dependencies: - '@mdit-vue/shared': 1.0.0 - '@mdit-vue/types': 1.0.0 - '@types/markdown-it': 13.0.7 - markdown-it: 13.0.2 - dev: true - /@mdit-vue/plugin-toc@1.0.0: - resolution: {integrity: sha512-WN8blfX0X/5Nolic0ClDWP7eVo9IB+U4g0jbycX3lolIZX5Bai1UpsD3QYZr5VVsPbQJMKMGvTrCEtCNTGvyWQ==} - dependencies: - '@mdit-vue/shared': 1.0.0 - '@mdit-vue/types': 1.0.0 - '@types/markdown-it': 13.0.7 - markdown-it: 13.0.2 - dev: true - - /@mdit-vue/shared@1.0.0: - resolution: {integrity: sha512-nbYBfmEi+pR2Lm0Z6TMVX2/iBjfr/kGEsHW8CC0rQw+3+sG5dY6VG094HuFAkiAmmvZx9DZZb+7ZMWp9vkwCRw==} - dependencies: - '@mdit-vue/types': 1.0.0 - '@types/markdown-it': 13.0.7 - markdown-it: 13.0.2 - dev: true + '@isaacs/cliui@8.0.2': + resolution: {integrity: sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==} + engines: {node: '>=12'} - /@mdit-vue/types@1.0.0: - resolution: {integrity: sha512-xeF5+sHLzRNF7plbksywKCph4qli20l72of2fMlZQQ7RECvXYrRkE9+bjRFQCyULC7B8ydUYbpbkux5xJlVWyw==} - dev: true + '@jridgewell/sourcemap-codec@1.4.15': + resolution: {integrity: sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg==} - /@nodelib/fs.scandir@2.1.5: + '@nodelib/fs.scandir@2.1.5': resolution: {integrity: sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==} engines: {node: '>= 8'} - dependencies: - '@nodelib/fs.stat': 2.0.5 - run-parallel: 1.2.0 - dev: true - /@nodelib/fs.stat@2.0.5: + '@nodelib/fs.stat@2.0.5': resolution: {integrity: sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==} engines: {node: '>= 8'} - dev: true - /@nodelib/fs.walk@1.2.8: + '@nodelib/fs.walk@1.2.8': resolution: {integrity: sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==} engines: {node: '>= 8'} - dependencies: - '@nodelib/fs.scandir': 2.1.5 - fastq: 1.15.0 - dev: true - /@rollup/rollup-android-arm-eabi@4.8.0: - resolution: {integrity: sha512-zdTObFRoNENrdPpnTNnhOljYIcOX7aI7+7wyrSpPFFIOf/nRdedE6IYsjaBE7tjukphh1tMTojgJ7p3lKY8x6Q==} + '@pkgjs/parseargs@0.11.0': + resolution: {integrity: sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==} + engines: {node: '>=14'} + + '@playwright/test@1.44.0': + resolution: {integrity: sha512-rNX5lbNidamSUorBhB4XZ9SQTjAqfe5M+p37Z8ic0jPFBMo5iCtQz1kRWkEMg+rYOKSlVycpQmpqjSFq7LXOfg==} + engines: {node: '>=16'} + hasBin: true + + '@rollup/rollup-android-arm-eabi@4.17.2': + resolution: {integrity: sha512-NM0jFxY8bB8QLkoKxIQeObCaDlJKewVlIEkuyYKm5An1tdVZ966w2+MPQ2l8LBZLjR+SgyV+nRkTIunzOYBMLQ==} cpu: [arm] os: [android] - requiresBuild: true - dev: true - optional: true - /@rollup/rollup-android-arm64@4.8.0: - resolution: {integrity: sha512-aiItwP48BiGpMFS9Znjo/xCNQVwTQVcRKkFKsO81m8exrGjHkCBDvm9PHay2kpa8RPnZzzKcD1iQ9KaLY4fPQQ==} + '@rollup/rollup-android-arm64@4.17.2': + resolution: {integrity: sha512-yeX/Usk7daNIVwkq2uGoq2BYJKZY1JfyLTaHO/jaiSwi/lsf8fTFoQW/n6IdAsx5tx+iotu2zCJwz8MxI6D/Bw==} cpu: [arm64] os: [android] - requiresBuild: true - dev: true - optional: true - /@rollup/rollup-darwin-arm64@4.8.0: - resolution: {integrity: sha512-zhNIS+L4ZYkYQUjIQUR6Zl0RXhbbA0huvNIWjmPc2SL0cB1h5Djkcy+RZ3/Bwszfb6vgwUvcVJYD6e6Zkpsi8g==} + '@rollup/rollup-darwin-arm64@4.17.2': + resolution: {integrity: sha512-kcMLpE6uCwls023+kknm71ug7MZOrtXo+y5p/tsg6jltpDtgQY1Eq5sGfHcQfb+lfuKwhBmEURDga9N0ol4YPw==} cpu: [arm64] os: [darwin] - requiresBuild: true - dev: true - optional: true - /@rollup/rollup-darwin-x64@4.8.0: - resolution: {integrity: sha512-A/FAHFRNQYrELrb/JHncRWzTTXB2ticiRFztP4ggIUAfa9Up1qfW8aG2w/mN9jNiZ+HB0t0u0jpJgFXG6BfRTA==} + '@rollup/rollup-darwin-x64@4.17.2': + resolution: {integrity: sha512-AtKwD0VEx0zWkL0ZjixEkp5tbNLzX+FCqGG1SvOu993HnSz4qDI6S4kGzubrEJAljpVkhRSlg5bzpV//E6ysTQ==} cpu: [x64] os: [darwin] - requiresBuild: true - dev: true - optional: true - /@rollup/rollup-linux-arm-gnueabihf@4.8.0: - resolution: {integrity: sha512-JsidBnh3p2IJJA4/2xOF2puAYqbaczB3elZDT0qHxn362EIoIkq7hrR43Xa8RisgI6/WPfvb2umbGsuvf7E37A==} + '@rollup/rollup-linux-arm-gnueabihf@4.17.2': + resolution: {integrity: sha512-3reX2fUHqN7sffBNqmEyMQVj/CKhIHZd4y631duy0hZqI8Qoqf6lTtmAKvJFYa6bhU95B1D0WgzHkmTg33In0A==} cpu: [arm] os: [linux] - requiresBuild: true - dev: true - optional: true - /@rollup/rollup-linux-arm64-gnu@4.8.0: - resolution: {integrity: sha512-hBNCnqw3EVCkaPB0Oqd24bv8SklETptQWcJz06kb9OtiShn9jK1VuTgi7o4zPSt6rNGWQOTDEAccbk0OqJmS+g==} + '@rollup/rollup-linux-arm-musleabihf@4.17.2': + resolution: {integrity: sha512-uSqpsp91mheRgw96xtyAGP9FW5ChctTFEoXP0r5FAzj/3ZRv3Uxjtc7taRQSaQM/q85KEKjKsZuiZM3GyUivRg==} + cpu: [arm] + os: [linux] + + '@rollup/rollup-linux-arm64-gnu@4.17.2': + resolution: {integrity: sha512-EMMPHkiCRtE8Wdk3Qhtciq6BndLtstqZIroHiiGzB3C5LDJmIZcSzVtLRbwuXuUft1Cnv+9fxuDtDxz3k3EW2A==} cpu: [arm64] os: [linux] - requiresBuild: true - dev: true - optional: true - /@rollup/rollup-linux-arm64-musl@4.8.0: - resolution: {integrity: sha512-Fw9ChYfJPdltvi9ALJ9wzdCdxGw4wtq4t1qY028b2O7GwB5qLNSGtqMsAel1lfWTZvf4b6/+4HKp0GlSYg0ahA==} + '@rollup/rollup-linux-arm64-musl@4.17.2': + resolution: {integrity: sha512-NMPylUUZ1i0z/xJUIx6VUhISZDRT+uTWpBcjdv0/zkp7b/bQDF+NfnfdzuTiB1G6HTodgoFa93hp0O1xl+/UbA==} cpu: [arm64] os: [linux] - requiresBuild: true - dev: true - optional: true - /@rollup/rollup-linux-riscv64-gnu@4.8.0: - resolution: {integrity: sha512-BH5xIh7tOzS9yBi8dFrCTG8Z6iNIGWGltd3IpTSKp6+pNWWO6qy8eKoRxOtwFbMrid5NZaidLYN6rHh9aB8bEw==} + '@rollup/rollup-linux-powerpc64le-gnu@4.17.2': + resolution: {integrity: sha512-T19My13y8uYXPw/L/k0JYaX1fJKFT/PWdXiHr8mTbXWxjVF1t+8Xl31DgBBvEKclw+1b00Chg0hxE2O7bTG7GQ==} + cpu: [ppc64] + os: [linux] + + '@rollup/rollup-linux-riscv64-gnu@4.17.2': + resolution: {integrity: sha512-BOaNfthf3X3fOWAB+IJ9kxTgPmMqPPH5f5k2DcCsRrBIbWnaJCgX2ll77dV1TdSy9SaXTR5iDXRL8n7AnoP5cg==} cpu: [riscv64] os: [linux] - requiresBuild: true - dev: true - optional: true - /@rollup/rollup-linux-x64-gnu@4.8.0: - resolution: {integrity: sha512-PmvAj8k6EuWiyLbkNpd6BLv5XeYFpqWuRvRNRl80xVfpGXK/z6KYXmAgbI4ogz7uFiJxCnYcqyvZVD0dgFog7Q==} + '@rollup/rollup-linux-s390x-gnu@4.17.2': + resolution: {integrity: sha512-W0UP/x7bnn3xN2eYMql2T/+wpASLE5SjObXILTMPUBDB/Fg/FxC+gX4nvCfPBCbNhz51C+HcqQp2qQ4u25ok6g==} + cpu: [s390x] + os: [linux] + + '@rollup/rollup-linux-x64-gnu@4.17.2': + resolution: {integrity: sha512-Hy7pLwByUOuyaFC6mAr7m+oMC+V7qyifzs/nW2OJfC8H4hbCzOX07Ov0VFk/zP3kBsELWNFi7rJtgbKYsav9QQ==} cpu: [x64] os: [linux] - requiresBuild: true - dev: true - optional: true - /@rollup/rollup-linux-x64-musl@4.8.0: - resolution: {integrity: sha512-mdxnlW2QUzXwY+95TuxZ+CurrhgrPAMveDWI97EQlA9bfhR8tw3Pt7SUlc/eSlCNxlWktpmT//EAA8UfCHOyXg==} + '@rollup/rollup-linux-x64-musl@4.17.2': + resolution: {integrity: sha512-h1+yTWeYbRdAyJ/jMiVw0l6fOOm/0D1vNLui9iPuqgRGnXA0u21gAqOyB5iHjlM9MMfNOm9RHCQ7zLIzT0x11Q==} cpu: [x64] os: [linux] - requiresBuild: true - dev: true - optional: true - /@rollup/rollup-win32-arm64-msvc@4.8.0: - resolution: {integrity: sha512-ge7saUz38aesM4MA7Cad8CHo0Fyd1+qTaqoIo+Jtk+ipBi4ATSrHWov9/S4u5pbEQmLjgUjB7BJt+MiKG2kzmA==} + '@rollup/rollup-win32-arm64-msvc@4.17.2': + resolution: {integrity: sha512-tmdtXMfKAjy5+IQsVtDiCfqbynAQE/TQRpWdVataHmhMb9DCoJxp9vLcCBjEQWMiUYxO1QprH/HbY9ragCEFLA==} cpu: [arm64] os: [win32] - requiresBuild: true - dev: true - optional: true - /@rollup/rollup-win32-ia32-msvc@4.8.0: - resolution: {integrity: sha512-p9E3PZlzurhlsN5h9g7zIP1DnqKXJe8ZUkFwAazqSvHuWfihlIISPxG9hCHCoA+dOOspL/c7ty1eeEVFTE0UTw==} + '@rollup/rollup-win32-ia32-msvc@4.17.2': + resolution: {integrity: sha512-7II/QCSTAHuE5vdZaQEwJq2ZACkBpQDOmQsE6D6XUbnBHW8IAhm4eTufL6msLJorzrHDFv3CF8oCA/hSIRuZeQ==} cpu: [ia32] os: [win32] - requiresBuild: true - dev: true - optional: true - /@rollup/rollup-win32-x64-msvc@4.8.0: - resolution: {integrity: sha512-kb4/auKXkYKqlUYTE8s40FcJIj5soOyRLHKd4ugR0dCq0G2EfcF54eYcfQiGkHzjidZ40daB4ulsFdtqNKZtBg==} + '@rollup/rollup-win32-x64-msvc@4.17.2': + resolution: {integrity: sha512-TGGO7v7qOq4CYmSBVEYpI1Y5xDuCEnbVC5Vth8mOsW0gDSzxNrVERPc790IGHsrT2dQSimgMr9Ub3Y1Jci5/8w==} cpu: [x64] os: [win32] - requiresBuild: true - dev: true - optional: true - - /@sindresorhus/merge-streams@1.0.0: - resolution: {integrity: sha512-rUV5WyJrJLoloD4NDN1V1+LDMDWOa4OTsT4yYJwQNpTU6FWxkxHpL7eu4w+DmiH8x/EAM1otkPE1+LaspIbplw==} - engines: {node: '>=18'} - dev: true - - /@types/debug@4.1.12: - resolution: {integrity: sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ==} - dependencies: - '@types/ms': 0.7.34 - dev: true - - /@types/fs-extra@11.0.4: - resolution: {integrity: sha512-yTbItCNreRooED33qjunPthRcSjERP1r4MqCZc7wv0u2sUkzTFp45tgUfS5+r7FrZPdmCCNflLhVSP/o+SemsQ==} - dependencies: - '@types/jsonfile': 6.1.4 - '@types/node': 20.10.4 - dev: true - - /@types/hash-sum@1.0.2: - resolution: {integrity: sha512-UP28RddqY8xcU0SCEp9YKutQICXpaAq9N8U2klqF5hegGha7KzTOL8EdhIIV3bOSGBzjEpN9bU/d+nNZBdJYVw==} - dev: true - /@types/jsonfile@6.1.4: - resolution: {integrity: sha512-D5qGUYwjvnNNextdU59/+fI+spnwtTFmyQP0h+PfIOSkNfpU6AOICUOkm4i0OnSk+NyjdPJrxCDro0sJsWlRpQ==} - dependencies: - '@types/node': 20.10.4 - dev: true + '@shikijs/core@1.4.0': + resolution: {integrity: sha512-CxpKLntAi64h3j+TwWqVIQObPTED0FyXLHTTh3MKXtqiQNn2JGcMQQ362LftDbc9kYbDtrksNMNoVmVXzKFYUQ==} - /@types/linkify-it@3.0.5: - resolution: {integrity: sha512-yg6E+u0/+Zjva+buc3EIb+29XEg4wltq7cSmd4Uc2EE/1nUVmxyzpX6gUXD0V8jIrG0r7YeOGVIbYRkxeooCtw==} - dev: true + '@shikijs/transformers@1.4.0': + resolution: {integrity: sha512-kzvlWmWYYSeaLKRce/kgmFFORUtBtFahfXRKndor0b60ocYiXufBQM6d6w1PlMuUkdk55aor9xLvy9wy7hTEJg==} - /@types/markdown-it-emoji@2.0.4: - resolution: {integrity: sha512-H6ulk/ZmbDxOayPwI/leJzrmoW1YKX1Z+MVSCHXuYhvqckV4I/c+hPTf6UiqJyn2avWugfj30XroheEb6/Ekqg==} - dependencies: - '@types/markdown-it': 13.0.7 - dev: true + '@types/estree@1.0.5': + resolution: {integrity: sha512-/kYRxGDLWzHOB7q+wtSUQlFrtcdUccpfy+X+9iMBpHK8QLLhx2wIPYuS5DYtR9Wa/YlZAbIovy7qVdB1Aq6Lyw==} - /@types/markdown-it@13.0.7: - resolution: {integrity: sha512-U/CBi2YUUcTHBt5tjO2r5QV/x0Po6nsYwQU4Y04fBS6vfoImaiZ6f8bi3CjTCxBPQSO1LMyUqkByzi8AidyxfA==} - dependencies: - '@types/linkify-it': 3.0.5 - '@types/mdurl': 1.0.5 - dev: true + '@types/linkify-it@5.0.0': + resolution: {integrity: sha512-sVDA58zAw4eWAffKOaQH5/5j3XeayukzDk+ewSsnv3p4yJEZHCCzMDiZM8e0OUrRvmpGZ85jf4yDHkHsgBNr9Q==} - /@types/mdurl@1.0.5: - resolution: {integrity: sha512-6L6VymKTzYSrEf4Nev4Xa1LCHKrlTlYCBMTlQKFuddo1CvQcE52I0mwfOJayueUC7MJuXOeHTcIU683lzd0cUA==} - dev: true + '@types/markdown-it@14.1.1': + resolution: {integrity: sha512-4NpsnpYl2Gt1ljyBGrKMxFYAYvpqbnnkgP/i/g+NLpjEUa3obn1XJCur9YbEXKDAkaXqsR1LbDnGEJ0MmKFxfg==} - /@types/ms@0.7.34: - resolution: {integrity: sha512-nG96G3Wp6acyAgJqGasjODb+acrI7KltPiRxzHPXnP3NgI28bpQDRv53olbqGXbfcgF5aiiHmO3xpwEpS5Ld9g==} - dev: true + '@types/mdurl@2.0.0': + resolution: {integrity: sha512-RGdgjQUZba5p6QEFAVx2OGb8rQDL/cPRG7GiedRzMcJ1tYnUANBncjbSB1NRGwbvjcPeikRABz2nshyPk1bhWg==} - /@types/node@17.0.45: + '@types/node@17.0.45': resolution: {integrity: sha512-w+tIMs3rq2afQdsPJlODhoUEKzFP1ayaoyl1CcnwtIlsVe7K7bA1NGm4s3PraqTLlXnbIN84zuBlxBWo1u9BLw==} - dev: true - /@types/node@20.10.4: - resolution: {integrity: sha512-D08YG6rr8X90YB56tSIuBaddy/UXAA9RKJoFvrsnogAum/0pmjkgi4+2nx96A330FmioegBWmEYQ+syqCFaveg==} - dependencies: - undici-types: 5.26.5 - dev: true + '@types/node@20.12.10': + resolution: {integrity: sha512-Eem5pH9pmWBHoGAT8Dr5fdc5rYA+4NAovdM4EktRPVAAiJhmWWfQrA0cFhAbOsQdSfIHjAud6YdkbL69+zSKjw==} - /@types/sax@1.2.7: + '@types/sax@1.2.7': resolution: {integrity: sha512-rO73L89PJxeYM3s3pPPjiPgVVcymqU490g0YO5n5By0k2Erzj6tay/4lr1CHAAU4JyOWd1rpQ8bCf6cZfHU96A==} - dependencies: - '@types/node': 20.10.4 - dev: true - /@types/web-bluetooth@0.0.20: + '@types/web-bluetooth@0.0.20': resolution: {integrity: sha512-g9gZnnXVq7gM7v3tJCWV/qw7w+KeOlSHAhgF9RytFyifW6AF61hdT2ucrYhPq9hLs5JIryeupHV3qGk95dH9ow==} - dev: true - /@vitejs/plugin-vue@4.5.2(vite@5.0.12)(vue@3.3.11): - resolution: {integrity: sha512-UGR3DlzLi/SaVBPX0cnSyE37vqxU3O6chn8l0HJNzQzDia6/Au2A4xKv+iIJW8w2daf80G7TYHhi1pAUjdZ0bQ==} - engines: {node: ^14.18.0 || >=16.0.0} + '@vitejs/plugin-vue@5.0.4': + resolution: {integrity: sha512-WS3hevEszI6CEVEx28F8RjTX97k3KsrcY6kvTg7+Whm5y3oYvcqzVeGCU3hxSAn4uY2CLCkeokkGKpoctccilQ==} + engines: {node: ^18.0.0 || >=20.0.0} peerDependencies: - vite: ^4.0.0 || ^5.0.0 + vite: ^5.0.0 vue: ^3.2.25 - dependencies: - vite: 5.0.12 - vue: 3.3.11 - dev: true - /@vue/compiler-core@3.3.11: - resolution: {integrity: sha512-h97/TGWBilnLuRaj58sxNrsUU66fwdRKLOLQ9N/5iNDfp+DZhYH9Obhe0bXxhedl8fjAgpRANpiZfbgWyruQ0w==} - dependencies: - '@babel/parser': 7.23.6 - '@vue/shared': 3.3.11 - estree-walker: 2.0.2 - source-map-js: 1.0.2 - dev: true + '@vue/compiler-core@3.4.27': + resolution: {integrity: sha512-E+RyqY24KnyDXsCuQrI+mlcdW3ALND6U7Gqa/+bVwbcpcR3BRRIckFoz7Qyd4TTlnugtwuI7YgjbvsLmxb+yvg==} - /@vue/compiler-dom@3.3.11: - resolution: {integrity: sha512-zoAiUIqSKqAJ81WhfPXYmFGwDRuO+loqLxvXmfUdR5fOitPoUiIeFI9cTTyv9MU5O1+ZZglJVTusWzy+wfk5hw==} - dependencies: - '@vue/compiler-core': 3.3.11 - '@vue/shared': 3.3.11 - dev: true + '@vue/compiler-dom@3.4.27': + resolution: {integrity: sha512-kUTvochG/oVgE1w5ViSr3KUBh9X7CWirebA3bezTbB5ZKBQZwR2Mwj9uoSKRMFcz4gSMzzLXBPD6KpCLb9nvWw==} - /@vue/compiler-sfc@3.3.11: - resolution: {integrity: sha512-U4iqPlHO0KQeK1mrsxCN0vZzw43/lL8POxgpzcJweopmqtoYy9nljJzWDIQS3EfjiYhfdtdk9Gtgz7MRXnz3GA==} - dependencies: - '@babel/parser': 7.23.6 - '@vue/compiler-core': 3.3.11 - '@vue/compiler-dom': 3.3.11 - '@vue/compiler-ssr': 3.3.11 - '@vue/reactivity-transform': 3.3.11 - '@vue/shared': 3.3.11 - estree-walker: 2.0.2 - magic-string: 0.30.5 - postcss: 8.4.32 - source-map-js: 1.0.2 - dev: true + '@vue/compiler-sfc@3.4.27': + resolution: {integrity: sha512-nDwntUEADssW8e0rrmE0+OrONwmRlegDA1pD6QhVeXxjIytV03yDqTey9SBDiALsvAd5U4ZrEKbMyVXhX6mCGA==} - /@vue/compiler-ssr@3.3.11: - resolution: {integrity: sha512-Zd66ZwMvndxRTgVPdo+muV4Rv9n9DwQ4SSgWWKWkPFebHQfVYRrVjeygmmDmPewsHyznCNvJ2P2d6iOOhdv8Qg==} - dependencies: - '@vue/compiler-dom': 3.3.11 - '@vue/shared': 3.3.11 - dev: true + '@vue/compiler-ssr@3.4.27': + resolution: {integrity: sha512-CVRzSJIltzMG5FcidsW0jKNQnNRYC8bT21VegyMMtHmhW3UOI7knmUehzswXLrExDLE6lQCZdrhD4ogI7c+vuw==} - /@vue/devtools-api@6.5.1: - resolution: {integrity: sha512-+KpckaAQyfbvshdDW5xQylLni1asvNSGme1JFs8I1+/H5pHEhqUKMEQD/qn3Nx5+/nycBq11qAEi8lk+LXI2dA==} - dev: true + '@vue/devtools-api@7.1.3': + resolution: {integrity: sha512-W8IwFJ/o5iUk78jpqhvScbgCsPiOp2uileDVC0NDtW38gCWhsnu9SeBTjcdu3lbwLdsjc+H1c5Msd/x9ApbcFA==} - /@vue/reactivity-transform@3.3.11: - resolution: {integrity: sha512-fPGjH0wqJo68A0wQ1k158utDq/cRyZNlFoxGwNScE28aUFOKFEnCBsvyD8jHn+0kd0UKVpuGuaZEQ6r9FJRqCg==} - dependencies: - '@babel/parser': 7.23.6 - '@vue/compiler-core': 3.3.11 - '@vue/shared': 3.3.11 - estree-walker: 2.0.2 - magic-string: 0.30.5 - dev: true + '@vue/devtools-kit@7.1.3': + resolution: {integrity: sha512-NFskFSJMVCBXTkByuk2llzI3KD3Blcm7WqiRorWjD6nClHPgkH5BobDH08rfulqq5ocRt5xV+3qOT1Q9FXJrwQ==} + peerDependencies: + vue: ^3.0.0 - /@vue/reactivity@3.3.11: - resolution: {integrity: sha512-D5tcw091f0nuu+hXq5XANofD0OXnBmaRqMYl5B3fCR+mX+cXJIGNw/VNawBqkjLNWETrFW0i+xH9NvDbTPVh7g==} - dependencies: - '@vue/shared': 3.3.11 - dev: true + '@vue/devtools-shared@7.1.3': + resolution: {integrity: sha512-KJ3AfgjTn3tJz/XKF+BlVShNPecim3G21oHRue+YQOsooW+0s+qXvm09U09aO7yBza5SivL1QgxSrzAbiKWjhQ==} - /@vue/runtime-core@3.3.11: - resolution: {integrity: sha512-g9ztHGwEbS5RyWaOpXuyIVFTschclnwhqEbdy5AwGhYOgc7m/q3NFwr50MirZwTTzX55JY8pSkeib9BX04NIpw==} - dependencies: - '@vue/reactivity': 3.3.11 - '@vue/shared': 3.3.11 - dev: true + '@vue/reactivity@3.4.27': + resolution: {integrity: sha512-kK0g4NknW6JX2yySLpsm2jlunZJl2/RJGZ0H9ddHdfBVHcNzxmQ0sS0b09ipmBoQpY8JM2KmUw+a6sO8Zo+zIA==} - /@vue/runtime-dom@3.3.11: - resolution: {integrity: sha512-OlhtV1PVpbgk+I2zl+Y5rQtDNcCDs12rsRg71XwaA2/Rbllw6mBLMi57VOn8G0AjOJ4Mdb4k56V37+g8ukShpQ==} - dependencies: - '@vue/runtime-core': 3.3.11 - '@vue/shared': 3.3.11 - csstype: 3.1.3 - dev: true + '@vue/runtime-core@3.4.27': + resolution: {integrity: sha512-7aYA9GEbOOdviqVvcuweTLe5Za4qBZkUY7SvET6vE8kyypxVgaT1ixHLg4urtOlrApdgcdgHoTZCUuTGap/5WA==} + + '@vue/runtime-dom@3.4.27': + resolution: {integrity: sha512-ScOmP70/3NPM+TW9hvVAz6VWWtZJqkbdf7w6ySsws+EsqtHvkhxaWLecrTorFxsawelM5Ys9FnDEMt6BPBDS0Q==} - /@vue/server-renderer@3.3.11(vue@3.3.11): - resolution: {integrity: sha512-AIWk0VwwxCAm4wqtJyxBylRTXSy1wCLOKbWxHaHiu14wjsNYtiRCSgVuqEPVuDpErOlRdNnuRgipQfXRLjLN5A==} + '@vue/server-renderer@3.4.27': + resolution: {integrity: sha512-dlAMEuvmeA3rJsOMJ2J1kXU7o7pOxgsNHVr9K8hB3ImIkSuBrIdy0vF66h8gf8Tuinf1TK3mPAz2+2sqyf3KzA==} peerDependencies: - vue: 3.3.11 - dependencies: - '@vue/compiler-ssr': 3.3.11 - '@vue/shared': 3.3.11 - vue: 3.3.11 - dev: true - - /@vue/shared@3.3.11: - resolution: {integrity: sha512-u2G8ZQ9IhMWTMXaWqZycnK4UthG1fA238CD+DP4Dm4WJi5hdUKKLg0RMRaRpDPNMdkTwIDkp7WtD0Rd9BH9fLw==} - dev: true - - /@vuepress/bundler-vite@2.0.0-rc.0: - resolution: {integrity: sha512-rX8S8IYpqqlJfNPstS/joorpxXx/4WuE7+gDM31i2HUrxOKGZVzq8ZsRRRU2UdoTwHZSd3LpUS4sMtxE5xLK1A==} - dependencies: - '@vitejs/plugin-vue': 4.5.2(vite@5.0.12)(vue@3.3.11) - '@vuepress/client': 2.0.0-rc.0 - '@vuepress/core': 2.0.0-rc.0 - '@vuepress/shared': 2.0.0-rc.0 - '@vuepress/utils': 2.0.0-rc.0 - autoprefixer: 10.4.16(postcss@8.4.32) - connect-history-api-fallback: 2.0.0 - postcss: 8.4.32 - postcss-load-config: 4.0.2(postcss@8.4.32) - rollup: 4.8.0 - vite: 5.0.12 - vue: 3.3.11 - vue-router: 4.2.5(vue@3.3.11) - transitivePeerDependencies: - - '@types/node' - - '@vue/composition-api' - - less - - lightningcss - - sass - - stylus - - sugarss - - supports-color - - terser - - ts-node - - typescript - dev: true + vue: 3.4.27 - /@vuepress/cli@2.0.0-rc.0: - resolution: {integrity: sha512-XWSIFO9iOR7N4O2lXIwS5vZuLjU9WU/aGAtmhMWEMxrdMx7TQaJbgrfpTUEbHMf+cPI1DXBbUbtmkqIvtfOV0w==} - hasBin: true - dependencies: - '@vuepress/core': 2.0.0-rc.0 - '@vuepress/shared': 2.0.0-rc.0 - '@vuepress/utils': 2.0.0-rc.0 - cac: 6.7.14 - chokidar: 3.5.3 - envinfo: 7.11.0 - esbuild: 0.19.9 - transitivePeerDependencies: - - '@vue/composition-api' - - supports-color - - typescript - dev: true + '@vue/shared@3.4.27': + resolution: {integrity: sha512-DL3NmY2OFlqmYYrzp39yi3LDkKxa5vZVwxWdQ3rG0ekuWscHraeIbnI8t+aZK7qhYqEqWKTUdijadunb9pnrgA==} - /@vuepress/client@2.0.0-rc.0: - resolution: {integrity: sha512-TwQx8hJgYONYxX+QltZ2aw9O5Ym6SKelfiUduuIRb555B1gece/jSVap3H/ZwyBhpgJMtG4+/Mrmf8nlDSHjvw==} - dependencies: - '@vue/devtools-api': 6.5.1 - '@vuepress/shared': 2.0.0-rc.0 - '@vueuse/core': 10.7.0(vue@3.3.11) - vue: 3.3.11 - vue-router: 4.2.5(vue@3.3.11) - transitivePeerDependencies: - - '@vue/composition-api' - - typescript - dev: true + '@vueuse/core@10.9.0': + resolution: {integrity: sha512-/1vjTol8SXnx6xewDEKfS0Ra//ncg4Hb0DaZiwKf7drgfMsKFExQ+FnnENcN6efPen+1kIzhLQoGSy0eDUVOMg==} - /@vuepress/core@2.0.0-rc.0: - resolution: {integrity: sha512-uoOaZP1MdxZYJIAJcRcmYKKeCIVnxZeOuLMOOB9CPuAKSalT1RvJ1lztw6RX3q9SPnlqtSZPQXDncPAZivw4pA==} - dependencies: - '@vuepress/client': 2.0.0-rc.0 - '@vuepress/markdown': 2.0.0-rc.0 - '@vuepress/shared': 2.0.0-rc.0 - '@vuepress/utils': 2.0.0-rc.0 - vue: 3.3.11 - transitivePeerDependencies: - - '@vue/composition-api' - - supports-color - - typescript - dev: true - - /@vuepress/markdown@2.0.0-rc.0: - resolution: {integrity: sha512-USmqdKKMT6ZFHYRztTjKUlO8qgGfnEygMAAq4AzC/uYXiEfrbMBLAWJhteyGS56P3rGLj0OPAhksE681bX/wOg==} - dependencies: - '@mdit-vue/plugin-component': 1.0.0 - '@mdit-vue/plugin-frontmatter': 1.0.0 - '@mdit-vue/plugin-headers': 1.0.0 - '@mdit-vue/plugin-sfc': 1.0.0 - '@mdit-vue/plugin-title': 1.0.0 - '@mdit-vue/plugin-toc': 1.0.0 - '@mdit-vue/shared': 1.0.0 - '@mdit-vue/types': 1.0.0 - '@types/markdown-it': 13.0.7 - '@types/markdown-it-emoji': 2.0.4 - '@vuepress/shared': 2.0.0-rc.0 - '@vuepress/utils': 2.0.0-rc.0 - markdown-it: 13.0.2 - markdown-it-anchor: 8.6.7(@types/markdown-it@13.0.7)(markdown-it@13.0.2) - markdown-it-emoji: 2.0.2 - mdurl: 1.0.1 - transitivePeerDependencies: - - supports-color - dev: true - - /@vuepress/plugin-active-header-links@2.0.0-rc.0: - resolution: {integrity: sha512-UJdXLYNGL5Wjy5YGY8M2QgqT75bZ95EHebbqGi8twBdIJE9O+bM+dPJyYtAk2PIVqFORiw3Hj+PchsNSxdn9+g==} - dependencies: - '@vuepress/client': 2.0.0-rc.0 - '@vuepress/core': 2.0.0-rc.0 - '@vuepress/utils': 2.0.0-rc.0 - ts-debounce: 4.0.0 - vue: 3.3.11 - vue-router: 4.2.5(vue@3.3.11) - transitivePeerDependencies: - - '@vue/composition-api' - - supports-color - - typescript - dev: true + '@vueuse/integrations@10.9.0': + resolution: {integrity: sha512-acK+A01AYdWSvL4BZmCoJAcyHJ6EqhmkQEXbQLwev1MY7NBnS+hcEMx/BzVoR9zKI+UqEPMD9u6PsyAuiTRT4Q==} + peerDependencies: + async-validator: '*' + axios: '*' + change-case: '*' + drauu: '*' + focus-trap: '*' + fuse.js: '*' + idb-keyval: '*' + jwt-decode: '*' + nprogress: '*' + qrcode: '*' + sortablejs: '*' + universal-cookie: '*' + peerDependenciesMeta: + async-validator: + optional: true + axios: + optional: true + change-case: + optional: true + drauu: + optional: true + focus-trap: + optional: true + fuse.js: + optional: true + idb-keyval: + optional: true + jwt-decode: + optional: true + nprogress: + optional: true + qrcode: + optional: true + sortablejs: + optional: true + universal-cookie: + optional: true - /@vuepress/plugin-back-to-top@2.0.0-rc.0: - resolution: {integrity: sha512-6GPfuzV5lkAnR00BxRUhqMXwMWt741alkq2R6bln4N8BneSOwEpX/7vi19MGf232aKdS/Va4pF5p0/nJ8Sed/g==} - dependencies: - '@vuepress/client': 2.0.0-rc.0 - '@vuepress/core': 2.0.0-rc.0 - '@vuepress/utils': 2.0.0-rc.0 - ts-debounce: 4.0.0 - vue: 3.3.11 - transitivePeerDependencies: - - '@vue/composition-api' - - supports-color - - typescript - dev: true - - /@vuepress/plugin-container@2.0.0-rc.0: - resolution: {integrity: sha512-b7vrLN11YE7qiUDPfA3N9P7Z8fupe9Wbcr9KAE/bmfZ9VT4d6kzpVyoU7XHi99XngitsmnkaXP4aBvBF1c2AnA==} - dependencies: - '@types/markdown-it': 13.0.7 - '@vuepress/core': 2.0.0-rc.0 - '@vuepress/markdown': 2.0.0-rc.0 - '@vuepress/shared': 2.0.0-rc.0 - '@vuepress/utils': 2.0.0-rc.0 - markdown-it: 13.0.2 - markdown-it-container: 3.0.0 - transitivePeerDependencies: - - '@vue/composition-api' - - supports-color - - typescript - dev: true - - /@vuepress/plugin-docsearch@2.0.0-rc.0(@algolia/client-search@4.21.1)(search-insights@2.13.0): - resolution: {integrity: sha512-bFbb+RxNyoLVbojv3Fh3UNfMmx9tszdae5ni9nG2xa05giCRwGKT0wFG3Q6n0a9kIQ6V7z3PjCj9x1k4SALPEA==} - dependencies: - '@docsearch/css': 3.5.2 - '@docsearch/js': 3.5.2(@algolia/client-search@4.21.1)(search-insights@2.13.0) - '@docsearch/react': 3.5.2(@algolia/client-search@4.21.1)(search-insights@2.13.0) - '@vuepress/client': 2.0.0-rc.0 - '@vuepress/core': 2.0.0-rc.0 - '@vuepress/shared': 2.0.0-rc.0 - '@vuepress/utils': 2.0.0-rc.0 - '@vueuse/core': 10.7.0(vue@3.3.11) - ts-debounce: 4.0.0 - vue: 3.3.11 - vue-router: 4.2.5(vue@3.3.11) - transitivePeerDependencies: - - '@algolia/client-search' - - '@types/react' - - '@vue/composition-api' - - react - - react-dom - - search-insights - - supports-color - - typescript - dev: true + '@vueuse/metadata@10.9.0': + resolution: {integrity: sha512-iddNbg3yZM0X7qFY2sAotomgdHK7YJ6sKUvQqbvwnf7TmaVPxS4EJydcNsVejNdS8iWCtDk+fYXr7E32nyTnGA==} - /@vuepress/plugin-external-link-icon@2.0.0-rc.0: - resolution: {integrity: sha512-o8bk0oIlj/BkKc02mq91XLDloq1VOz/8iNcRwKAeqBE6svXzdYiyoTGet0J/4iPuAetsCn75S57W6RioDJHMnQ==} - dependencies: - '@vuepress/client': 2.0.0-rc.0 - '@vuepress/core': 2.0.0-rc.0 - '@vuepress/markdown': 2.0.0-rc.0 - '@vuepress/shared': 2.0.0-rc.0 - '@vuepress/utils': 2.0.0-rc.0 - vue: 3.3.11 - transitivePeerDependencies: - - '@vue/composition-api' - - supports-color - - typescript - dev: true + '@vueuse/shared@10.9.0': + resolution: {integrity: sha512-Uud2IWncmAfJvRaFYzv5OHDli+FbOzxiVEQdLCKQKLyhz94PIyFC3CHcH7EDMwIn8NPtD06+PNbC/PiO0LGLtw==} - /@vuepress/plugin-git@2.0.0-rc.0: - resolution: {integrity: sha512-r7UF77vZxaYeJQLygzodKv+15z3/dTLuGp4VcYO21W6BlJZvd4u9zqgiV7A//bZQvK4+3Hprylr0G3KgXqMewA==} - dependencies: - '@vuepress/core': 2.0.0-rc.0 - '@vuepress/utils': 2.0.0-rc.0 - execa: 8.0.1 - transitivePeerDependencies: - - '@vue/composition-api' - - supports-color - - typescript - dev: true + algoliasearch@4.23.3: + resolution: {integrity: sha512-Le/3YgNvjW9zxIQMRhUHuhiUjAlKY/zsdZpfq4dlLqg6mEm0nL6yk+7f2hDOtLpxsgE4jSzDmvHL7nXdBp5feg==} - /@vuepress/plugin-google-analytics@2.0.0-rc.15(vuepress@2.0.0-rc.0): - resolution: {integrity: sha512-ovMpOYz0fFoVcRVgyv+7qnU7LPnovocbtYPk+oPspd9hMedYXjAMeyxOYYnN/MiC6+DSKshDqStTfjVpW3x9DQ==} - peerDependencies: - vuepress: 2.0.0-rc.8 - dependencies: - vuepress: 2.0.0-rc.0(@vuepress/client@2.0.0-rc.0)(vue@3.3.11) - dev: true + ansi-regex@5.0.1: + resolution: {integrity: sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==} + engines: {node: '>=8'} - /@vuepress/plugin-medium-zoom@2.0.0-rc.0: - resolution: {integrity: sha512-peU1lYKsmKikIe/0pkJuHzD/k6xW2TuqdvKVhV4I//aOE1WxsREKJ4ACcldmoIsnysoDydAUqKT6xDPGyDsH2g==} - dependencies: - '@vuepress/client': 2.0.0-rc.0 - '@vuepress/core': 2.0.0-rc.0 - '@vuepress/utils': 2.0.0-rc.0 - medium-zoom: 1.1.0 - vue: 3.3.11 - transitivePeerDependencies: - - '@vue/composition-api' - - supports-color - - typescript - dev: true + ansi-regex@6.0.1: + resolution: {integrity: sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==} + engines: {node: '>=12'} - /@vuepress/plugin-nprogress@2.0.0-rc.0: - resolution: {integrity: sha512-rI+eK0Pg1KiZE+7hGmDUeSbgdWCid8Vnw0hFKNmjinDzGVmx4m03M6qfvclsI0SryH+lR7itZGLaR4gbTlrz/w==} - dependencies: - '@vuepress/client': 2.0.0-rc.0 - '@vuepress/core': 2.0.0-rc.0 - '@vuepress/utils': 2.0.0-rc.0 - vue: 3.3.11 - vue-router: 4.2.5(vue@3.3.11) - transitivePeerDependencies: - - '@vue/composition-api' - - supports-color - - typescript - dev: true + ansi-styles@4.3.0: + resolution: {integrity: sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==} + engines: {node: '>=8'} - /@vuepress/plugin-palette@2.0.0-rc.0: - resolution: {integrity: sha512-wW70SCp3/K7s1lln5YQsBGTog2WXaQv5piva5zhXcQ47YGf4aAJpThDa5C/ot4HhkPOKn8Iz5s0ckxXZzW8DIg==} - dependencies: - '@vuepress/core': 2.0.0-rc.0 - '@vuepress/utils': 2.0.0-rc.0 - chokidar: 3.5.3 - transitivePeerDependencies: - - '@vue/composition-api' - - supports-color - - typescript - dev: true + ansi-styles@6.2.1: + resolution: {integrity: sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==} + engines: {node: '>=12'} - /@vuepress/plugin-prismjs@2.0.0-rc.0: - resolution: {integrity: sha512-c5WRI7+FhVjdbymOKQ8F2KY/Bnv7aQtWScVk8vCMUimNi7v7Wff/A/i3KSFNz/tge3LxiAeH/Dc2WS/OnQXwCg==} - dependencies: - '@vuepress/core': 2.0.0-rc.0 - prismjs: 1.29.0 - transitivePeerDependencies: - - '@vue/composition-api' - - supports-color - - typescript - dev: true + anymatch@3.1.3: + resolution: {integrity: sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==} + engines: {node: '>= 8'} - /@vuepress/plugin-theme-data@2.0.0-rc.0: - resolution: {integrity: sha512-FXY3/Ml+rM6gNKvwdBF6vKAcwnSvtXCzKgQwJAw3ppQTKUkLcbOxqM+h4d8bzHWAAvdnEvQFug5uEZgWllBQbA==} - dependencies: - '@vue/devtools-api': 6.5.1 - '@vuepress/client': 2.0.0-rc.0 - '@vuepress/core': 2.0.0-rc.0 - '@vuepress/shared': 2.0.0-rc.0 - '@vuepress/utils': 2.0.0-rc.0 - vue: 3.3.11 - transitivePeerDependencies: - - '@vue/composition-api' - - supports-color - - typescript - dev: true + arg@5.0.2: + resolution: {integrity: sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg==} - /@vuepress/shared@2.0.0-rc.0: - resolution: {integrity: sha512-ikdSfjRv5LGM1iv4HHwF9P6gqTjaFCXKPK+hzlkHFHNZO1GLqk7/BPc4F51tAG1s8TcLhUZc+54LrfgS7PkXXA==} - dependencies: - '@mdit-vue/types': 1.0.0 - '@vue/shared': 3.3.11 - dev: true + argparse@2.0.1: + resolution: {integrity: sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==} - /@vuepress/theme-default@2.0.0-rc.0: - resolution: {integrity: sha512-I8Y08evDmMuD1jh3NftPpFFSlCWOizQDJLjN7EQwcg7jiAP4A7c2REo6nBN2EmP24Mi7UrRM+RnytHR5V+pElA==} - peerDependencies: - sass-loader: ^13.3.2 - peerDependenciesMeta: - sass-loader: - optional: true - dependencies: - '@vuepress/client': 2.0.0-rc.0 - '@vuepress/core': 2.0.0-rc.0 - '@vuepress/plugin-active-header-links': 2.0.0-rc.0 - '@vuepress/plugin-back-to-top': 2.0.0-rc.0 - '@vuepress/plugin-container': 2.0.0-rc.0 - '@vuepress/plugin-external-link-icon': 2.0.0-rc.0 - '@vuepress/plugin-git': 2.0.0-rc.0 - '@vuepress/plugin-medium-zoom': 2.0.0-rc.0 - '@vuepress/plugin-nprogress': 2.0.0-rc.0 - '@vuepress/plugin-palette': 2.0.0-rc.0 - '@vuepress/plugin-prismjs': 2.0.0-rc.0 - '@vuepress/plugin-theme-data': 2.0.0-rc.0 - '@vuepress/shared': 2.0.0-rc.0 - '@vuepress/utils': 2.0.0-rc.0 - '@vueuse/core': 10.7.0(vue@3.3.11) - sass: 1.69.5 - vue: 3.3.11 - vue-router: 4.2.5(vue@3.3.11) - transitivePeerDependencies: - - '@vue/composition-api' - - supports-color - - typescript - dev: true - - /@vuepress/utils@2.0.0-rc.0: - resolution: {integrity: sha512-Q1ay/woClDHcW0Qe91KsnHoupdNN0tp/vhjvVLuAYxlv/1Obii7hz9WFcajyyGEhmsYxdvG2sGmcxFA02tuKkw==} - dependencies: - '@types/debug': 4.1.12 - '@types/fs-extra': 11.0.4 - '@types/hash-sum': 1.0.2 - '@vuepress/shared': 2.0.0-rc.0 - debug: 4.3.4 - fs-extra: 11.2.0 - globby: 14.0.0 - hash-sum: 2.0.0 - ora: 7.0.1 - picocolors: 1.0.0 - upath: 2.0.1 - transitivePeerDependencies: - - supports-color - dev: true + balanced-match@1.0.2: + resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==} - /@vueuse/core@10.7.0(vue@3.3.11): - resolution: {integrity: sha512-4EUDESCHtwu44ZWK3Gc/hZUVhVo/ysvdtwocB5vcauSV4B7NiGY5972WnsojB3vRNdxvAt7kzJWE2h9h7C9d5w==} - dependencies: - '@types/web-bluetooth': 0.0.20 - '@vueuse/metadata': 10.7.0 - '@vueuse/shared': 10.7.0(vue@3.3.11) - vue-demi: 0.14.6(vue@3.3.11) - transitivePeerDependencies: - - '@vue/composition-api' - - vue - dev: true - - /@vueuse/metadata@10.7.0: - resolution: {integrity: sha512-GlaH7tKP2iBCZ3bHNZ6b0cl9g0CJK8lttkBNUX156gWvNYhTKEtbweWLm9rxCPIiwzYcr/5xML6T8ZUEt+DkvA==} - dev: true - - /@vueuse/shared@10.7.0(vue@3.3.11): - resolution: {integrity: sha512-kc00uV6CiaTdc3i1CDC4a3lBxzaBE9AgYNtFN87B5OOscqeWElj/uza8qVDmk7/U8JbqoONLbtqiLJ5LGRuqlw==} - dependencies: - vue-demi: 0.14.6(vue@3.3.11) - transitivePeerDependencies: - - '@vue/composition-api' - - vue - dev: true - - /algoliasearch@4.21.1: - resolution: {integrity: sha512-Ym0MGwOcjQhZ+s1N/j0o94g3vQD0MzNpWsfJLyPVCt0zHflbi0DwYX+9GPmTJ4BzegoxWMyCPgcmpd3R+VlOzQ==} - dependencies: - '@algolia/cache-browser-local-storage': 4.21.1 - '@algolia/cache-common': 4.21.1 - '@algolia/cache-in-memory': 4.21.1 - '@algolia/client-account': 4.21.1 - '@algolia/client-analytics': 4.21.1 - '@algolia/client-common': 4.21.1 - '@algolia/client-personalization': 4.21.1 - '@algolia/client-search': 4.21.1 - '@algolia/logger-common': 4.21.1 - '@algolia/logger-console': 4.21.1 - '@algolia/requester-browser-xhr': 4.21.1 - '@algolia/requester-common': 4.21.1 - '@algolia/requester-node-http': 4.21.1 - '@algolia/transporter': 4.21.1 - dev: true - - /ansi-regex@6.0.1: - resolution: {integrity: sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==} - engines: {node: '>=12'} - dev: true - - /anymatch@3.1.3: - resolution: {integrity: sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==} - engines: {node: '>= 8'} - dependencies: - normalize-path: 3.0.0 - picomatch: 2.3.1 - dev: true - - /arg@5.0.2: - resolution: {integrity: sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg==} - dev: true - - /argparse@1.0.10: - resolution: {integrity: sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==} - dependencies: - sprintf-js: 1.0.3 - dev: true - - /argparse@2.0.1: - resolution: {integrity: sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==} - dev: true - - /autoprefixer@10.4.16(postcss@8.4.32): - resolution: {integrity: sha512-7vd3UC6xKp0HLfua5IjZlcXvGAGy7cBAXTg2lyQ/8WpNhd6SiZ8Be+xm3FyBSYJx5GKcpRCzBh7RH4/0dnY+uQ==} - engines: {node: ^10 || ^12 || >=14} - hasBin: true - peerDependencies: - postcss: ^8.1.0 - dependencies: - browserslist: 4.22.2 - caniuse-lite: 1.0.30001568 - fraction.js: 4.3.7 - normalize-range: 0.1.2 - picocolors: 1.0.0 - postcss: 8.4.32 - postcss-value-parser: 4.2.0 - dev: true - - /base64-js@1.5.1: - resolution: {integrity: sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==} - dev: true - - /binary-extensions@2.2.0: - resolution: {integrity: sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==} + binary-extensions@2.3.0: + resolution: {integrity: sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==} engines: {node: '>=8'} - dev: true - /bl@5.1.0: - resolution: {integrity: sha512-tv1ZJHLfTDnXE6tMHv73YgSJaWR2AFuPwMntBe7XL/GBFHnT0CLnsHMogfk5+GzCDC5ZWarSCYaIGATZt9dNsQ==} - dependencies: - buffer: 6.0.3 - inherits: 2.0.4 - readable-stream: 3.6.2 - dev: true - - /boolbase@1.0.0: - resolution: {integrity: sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww==} - dev: true + brace-expansion@2.0.1: + resolution: {integrity: sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==} - /braces@3.0.2: + braces@3.0.2: resolution: {integrity: sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==} engines: {node: '>=8'} - dependencies: - fill-range: 7.0.1 - dev: true - /browserslist@4.22.2: - resolution: {integrity: sha512-0UgcrvQmBDvZHFGdYUehrCNIazki7/lUP3kkoi/r3YB2amZbFM9J43ZRkJTXBUZK4gmx56+Sqk9+Vs9mwZx9+A==} - engines: {node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7} - hasBin: true - dependencies: - caniuse-lite: 1.0.30001568 - electron-to-chromium: 1.4.611 - node-releases: 2.0.14 - update-browserslist-db: 1.0.13(browserslist@4.22.2) - dev: true - - /buffer@6.0.3: - resolution: {integrity: sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==} - dependencies: - base64-js: 1.5.1 - ieee754: 1.2.1 - dev: true - - /cac@6.7.14: - resolution: {integrity: sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ==} - engines: {node: '>=8'} - dev: true - - /caniuse-lite@1.0.30001568: - resolution: {integrity: sha512-vSUkH84HontZJ88MiNrOau1EBrCqEQYgkC5gIySiDlpsm8sGVrhU7Kx4V6h0tnqaHzIHZv08HlJIwPbL4XL9+A==} - dev: true - - /chalk@5.3.0: - resolution: {integrity: sha512-dLitG79d+GV1Nb/VYcCDFivJeK1hiukt9QjRNVOsUtTy1rR1YJsmpGGTZ3qJos+uw7WmWF4wUwBd9jxjocFC2w==} - engines: {node: ^12.17.0 || ^14.13 || >=16.0.0} - dev: true - - /cheerio-select@2.1.0: - resolution: {integrity: sha512-9v9kG0LvzrlcungtnJtpGNxY+fzECQKhK4EGJX2vByejiMX84MFNQw4UxPJl3bFbTMw+Dfs37XaIkCwTZfLh4g==} - dependencies: - boolbase: 1.0.0 - css-select: 5.1.0 - css-what: 6.1.0 - domelementtype: 2.3.0 - domhandler: 5.0.3 - domutils: 3.1.0 - dev: true - - /cheerio@1.0.0-rc.12: - resolution: {integrity: sha512-VqR8m68vM46BNnuZ5NtnGBKIE/DfN0cRIzg9n40EIq9NOv90ayxLBXA8fXC5gquFRGJSTRqBq25Jt2ECLR431Q==} - engines: {node: '>= 6'} - dependencies: - cheerio-select: 2.1.0 - dom-serializer: 2.0.0 - domhandler: 5.0.3 - domutils: 3.1.0 - htmlparser2: 8.0.2 - parse5: 7.1.2 - parse5-htmlparser2-tree-adapter: 7.0.0 - dev: true - - /chokidar@3.5.3: - resolution: {integrity: sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw==} + chokidar@3.6.0: + resolution: {integrity: sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==} engines: {node: '>= 8.10.0'} - dependencies: - anymatch: 3.1.3 - braces: 3.0.2 - glob-parent: 5.1.2 - is-binary-path: 2.1.0 - is-glob: 4.0.3 - normalize-path: 3.0.0 - readdirp: 3.6.0 - optionalDependencies: - fsevents: 2.3.3 - dev: true - /cli-cursor@4.0.0: - resolution: {integrity: sha512-VGtlMu3x/4DOtIUwEkRezxUZ2lBacNJCHash0N0WeZDBS+7Ux1dm3XWAgWYxLJFMMdOeXMHXorshEFhbMSGelg==} - engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} - dependencies: - restore-cursor: 4.0.0 - dev: true + color-convert@2.0.1: + resolution: {integrity: sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==} + engines: {node: '>=7.0.0'} - /cli-spinners@2.9.2: - resolution: {integrity: sha512-ywqV+5MmyL4E7ybXgKys4DugZbX0FC6LnwrhjuykIjnK9k8OQacQ7axGKnjDXWNhns0xot3bZI5h55H8yo9cJg==} - engines: {node: '>=6'} - dev: true + color-name@1.1.4: + resolution: {integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==} - /connect-history-api-fallback@2.0.0: - resolution: {integrity: sha512-U73+6lQFmfiNPrYbXqr6kZ1i1wiRqXnp2nhMsINseWXO8lDau0LGEffJ8kQi4EjLZympVgRdvqjAgiZ1tgzDDA==} - engines: {node: '>=0.8'} - dev: true + commander@12.0.0: + resolution: {integrity: sha512-MwVNWlYjDTtOjX5PiD7o5pK0UrFU/OYgcJfjjK4RaHZETNtjJqrZa9Y9ds88+A+f+d5lv+561eZ+yCKoS3gbAA==} + engines: {node: '>=18'} - /cross-spawn@7.0.3: + cross-spawn@7.0.3: resolution: {integrity: sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==} engines: {node: '>= 8'} - dependencies: - path-key: 3.1.1 - shebang-command: 2.0.0 - which: 2.0.2 - dev: true - - /css-select@5.1.0: - resolution: {integrity: sha512-nwoRF1rvRRnnCqqY7updORDsuqKzqYJ28+oSMaJMMgOauh3fvwHqMS7EZpIPqK8GL+g9mKxF1vP/ZjSeNjEVHg==} - dependencies: - boolbase: 1.0.0 - css-what: 6.1.0 - domhandler: 5.0.3 - domutils: 3.1.0 - nth-check: 2.1.1 - dev: true - - /css-what@6.1.0: - resolution: {integrity: sha512-HTUrgRJ7r4dsZKU6GjmpfRK1O76h97Z8MfS1G0FozR+oF2kG6Vfe8JE6zwrkbxigziPHinCJ+gCPjA9EaBDtRw==} - engines: {node: '>= 6'} - dev: true - /csstype@3.1.3: + csstype@3.1.3: resolution: {integrity: sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw==} - dev: true - - /dayjs@1.11.10: - resolution: {integrity: sha512-vjAczensTgRcqDERK0SR2XMwsF/tSvnvlv6VcF2GIhg6Sx4yOIt/irsr1RDJsKiIyBzJDpCoXiWWq28MqH2cnQ==} - dev: true - - /debug@4.3.4: - resolution: {integrity: sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==} - engines: {node: '>=6.0'} - peerDependencies: - supports-color: '*' - peerDependenciesMeta: - supports-color: - optional: true - dependencies: - ms: 2.1.2 - dev: true - - /dom-serializer@2.0.0: - resolution: {integrity: sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg==} - dependencies: - domelementtype: 2.3.0 - domhandler: 5.0.3 - entities: 4.5.0 - dev: true - - /domelementtype@2.3.0: - resolution: {integrity: sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw==} - dev: true - - /domhandler@5.0.3: - resolution: {integrity: sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w==} - engines: {node: '>= 4'} - dependencies: - domelementtype: 2.3.0 - dev: true - /domutils@3.1.0: - resolution: {integrity: sha512-H78uMmQtI2AhgDJjWeQmHwJJ2bLPD3GMmO7Zja/ZZh84wkm+4ut+IUnUdRa8uCGX88DiVx1j6FRe1XfxEgjEZA==} - dependencies: - dom-serializer: 2.0.0 - domelementtype: 2.3.0 - domhandler: 5.0.3 - dev: true + deep-extend@0.6.0: + resolution: {integrity: sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==} + engines: {node: '>=4.0.0'} - /eastasianwidth@0.2.0: + eastasianwidth@0.2.0: resolution: {integrity: sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==} - dev: true - - /electron-to-chromium@1.4.611: - resolution: {integrity: sha512-ZtRpDxrjHapOwxtv+nuth5ByB8clyn8crVynmRNGO3wG3LOp8RTcyZDqwaI6Ng6y8FCK2hVZmJoqwCskKbNMaw==} - dev: true - /emoji-regex@10.3.0: - resolution: {integrity: sha512-QpLs9D9v9kArv4lfDEgg1X/gN5XLnf/A6l9cs8SPZLRZR3ZkY9+kwIQTxm+fsSej5UMYGE8fdoaZVIBlqG0XTw==} - dev: true + emoji-regex@8.0.0: + resolution: {integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==} - /entities@3.0.1: - resolution: {integrity: sha512-WiyBqoomrwMdFG1e0kqvASYfnlb0lp8M5o5Fw2OFq1hNZxxcNk8Ik0Xm7LxzBhuidnZB/UtBqVCgUz3kBOP51Q==} - engines: {node: '>=0.12'} - dev: true + emoji-regex@9.2.2: + resolution: {integrity: sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==} - /entities@4.5.0: + entities@4.5.0: resolution: {integrity: sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==} engines: {node: '>=0.12'} - dev: true - - /envinfo@7.11.0: - resolution: {integrity: sha512-G9/6xF1FPbIw0TtalAMaVPpiq2aDEuKLXM314jPVAO9r2fo2a4BLqMNkmRS7O/xPPZ+COAhGIz3ETvHEV3eUcg==} - engines: {node: '>=4'} - hasBin: true - dev: true - /esbuild@0.19.9: - resolution: {integrity: sha512-U9CHtKSy+EpPsEBa+/A2gMs/h3ylBC0H0KSqIg7tpztHerLi6nrrcoUJAkNCEPumx8yJ+Byic4BVwHgRbN0TBg==} + esbuild@0.20.2: + resolution: {integrity: sha512-WdOOppmUNU+IbZ0PaDiTst80zjnrOkyJNHoKupIcVyU8Lvla3Ugx94VzkQ32Ijqd7UhHJy75gNWDMUekcrSJ6g==} engines: {node: '>=12'} hasBin: true - requiresBuild: true - optionalDependencies: - '@esbuild/android-arm': 0.19.9 - '@esbuild/android-arm64': 0.19.9 - '@esbuild/android-x64': 0.19.9 - '@esbuild/darwin-arm64': 0.19.9 - '@esbuild/darwin-x64': 0.19.9 - '@esbuild/freebsd-arm64': 0.19.9 - '@esbuild/freebsd-x64': 0.19.9 - '@esbuild/linux-arm': 0.19.9 - '@esbuild/linux-arm64': 0.19.9 - '@esbuild/linux-ia32': 0.19.9 - '@esbuild/linux-loong64': 0.19.9 - '@esbuild/linux-mips64el': 0.19.9 - '@esbuild/linux-ppc64': 0.19.9 - '@esbuild/linux-riscv64': 0.19.9 - '@esbuild/linux-s390x': 0.19.9 - '@esbuild/linux-x64': 0.19.9 - '@esbuild/netbsd-x64': 0.19.9 - '@esbuild/openbsd-x64': 0.19.9 - '@esbuild/sunos-x64': 0.19.9 - '@esbuild/win32-arm64': 0.19.9 - '@esbuild/win32-ia32': 0.19.9 - '@esbuild/win32-x64': 0.19.9 - dev: true - - /escalade@3.1.1: - resolution: {integrity: sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==} - engines: {node: '>=6'} - dev: true - - /esprima@4.0.1: - resolution: {integrity: sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==} - engines: {node: '>=4'} - hasBin: true - dev: true - /estree-walker@2.0.2: + estree-walker@2.0.2: resolution: {integrity: sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==} - dev: true - - /execa@8.0.1: - resolution: {integrity: sha512-VyhnebXciFV2DESc+p6B+y0LjSm0krU4OgJN44qFAhBY0TJ+1V61tYD2+wHusZ6F9n5K+vl8k0sTy7PEfV4qpg==} - engines: {node: '>=16.17'} - dependencies: - cross-spawn: 7.0.3 - get-stream: 8.0.1 - human-signals: 5.0.0 - is-stream: 3.0.0 - merge-stream: 2.0.0 - npm-run-path: 5.1.0 - onetime: 6.0.0 - signal-exit: 4.1.0 - strip-final-newline: 3.0.0 - dev: true - - /extend-shallow@2.0.1: - resolution: {integrity: sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==} - engines: {node: '>=0.10.0'} - dependencies: - is-extendable: 0.1.1 - dev: true - /fast-glob@3.3.2: - resolution: {integrity: sha512-oX2ruAFQwf/Orj8m737Y5adxDQO0LAB7/S5MnxCdTNDd4p6BsyIVsv9JQsATbTSq8KHRpLwIHbVlUNatxd+1Ow==} + fast-glob@3.3.1: + resolution: {integrity: sha512-kNFPyjhh5cKjrUltxs+wFx+ZkbRaxxmZ+X0ZU31SOsxCEtP9VPgtq2teZw1DebupL5GmDaNQ6yKMMVcM41iqDg==} engines: {node: '>=8.6.0'} - dependencies: - '@nodelib/fs.stat': 2.0.5 - '@nodelib/fs.walk': 1.2.8 - glob-parent: 5.1.2 - merge2: 1.4.1 - micromatch: 4.0.5 - dev: true - - /fastq@1.15.0: - resolution: {integrity: sha512-wBrocU2LCXXa+lWBt8RoIRD89Fi8OdABODa/kEnyeyjS5aZO5/GNvI5sEINADqP/h8M29UHTHUb53sUu5Ihqdw==} - dependencies: - reusify: 1.0.4 - dev: true - /fflate@0.8.1: - resolution: {integrity: sha512-/exOvEuc+/iaUm105QIiOt4LpBdMTWsXxqR0HDF35vx3fmaKzw7354gTilCh5rkzEt8WYyG//ku3h3nRmd7CHQ==} - dev: true + fastq@1.17.1: + resolution: {integrity: sha512-sRVD3lWVIXWg6By68ZN7vho9a1pQcN/WBFaAAsDDFzlJjvoGx0P8z7V1t72grFJfJhu3YPZBuu25f7Kaw2jN1w==} - /fill-range@7.0.1: + fill-range@7.0.1: resolution: {integrity: sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==} engines: {node: '>=8'} - dependencies: - to-regex-range: 5.0.1 - dev: true - /fraction.js@4.3.7: - resolution: {integrity: sha512-ZsDfxO51wGAXREY55a7la9LScWpwv9RxIrYABrlvOFBlH/ShPnrtsXeuUIfXKKOVicNxQ+o8JTbJvjS4M89yew==} - dev: true + focus-trap@7.5.4: + resolution: {integrity: sha512-N7kHdlgsO/v+iD/dMoJKtsSqs5Dz/dXZVebRgJw23LDk+jMi/974zyiOYDziY2JPp8xivq9BmUGwIJMiuSBi7w==} - /fs-extra@11.2.0: - resolution: {integrity: sha512-PmDi3uwK5nFuXh7XDTlVnS17xJS7vW36is2+w3xcv8SVxiB4NyATf4ctkVY5bkSjX0Y4nbvZCq1/EjtEyr9ktw==} - engines: {node: '>=14.14'} - dependencies: - graceful-fs: 4.2.11 - jsonfile: 6.1.0 - universalify: 2.0.1 - dev: true + foreground-child@3.1.1: + resolution: {integrity: sha512-TMKDUnIte6bfb5nWv7V/caI169OHgvwjb7V4WkeUvbQQdjr5rWKqHFiKWb/fcOwB+CzBT+qbWjvj+DVwRskpIg==} + engines: {node: '>=14'} + + fsevents@2.3.2: + resolution: {integrity: sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==} + engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0} + os: [darwin] - /fsevents@2.3.3: + fsevents@2.3.3: resolution: {integrity: sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==} engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0} os: [darwin] - requiresBuild: true - dev: true - optional: true - /get-stream@8.0.1: - resolution: {integrity: sha512-VaUJspBffn/LMCJVoMvSAdmscJyS1auj5Zulnn5UoYcY531UWmdwhRWkcGKnGU93m5HSXP9LP2usOryrBtQowA==} - engines: {node: '>=16'} - dev: true + get-stdin@9.0.0: + resolution: {integrity: sha512-dVKBjfWisLAicarI2Sf+JuBE/DghV4UzNAVe9yhEJuzeREd3JhOTE9cUaJTeSa77fsbQUK3pcOpJfM59+VKZaA==} + engines: {node: '>=12'} - /glob-parent@5.1.2: + glob-parent@5.1.2: resolution: {integrity: sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==} engines: {node: '>= 6'} - dependencies: - is-glob: 4.0.3 - dev: true - - /globby@14.0.0: - resolution: {integrity: sha512-/1WM/LNHRAOH9lZta77uGbq0dAEQM+XjNesWwhlERDVenqothRbnzTrL3/LrIoEPPjeUHC3vrS6TwoyxeHs7MQ==} - engines: {node: '>=18'} - dependencies: - '@sindresorhus/merge-streams': 1.0.0 - fast-glob: 3.3.2 - ignore: 5.3.0 - path-type: 5.0.0 - slash: 5.1.0 - unicorn-magic: 0.1.0 - dev: true - - /graceful-fs@4.2.11: - resolution: {integrity: sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==} - dev: true - - /gray-matter@4.0.3: - resolution: {integrity: sha512-5v6yZd4JK3eMI3FqqCouswVqwugaA9r4dNZB1wwcmrD02QkV5H0y7XBQW8QwQqEaZY1pM9aqORSORhJRdNK44Q==} - engines: {node: '>=6.0'} - dependencies: - js-yaml: 3.14.1 - kind-of: 6.0.3 - section-matter: 1.0.0 - strip-bom-string: 1.0.0 - dev: true - - /hash-sum@2.0.0: - resolution: {integrity: sha512-WdZTbAByD+pHfl/g9QSsBIIwy8IT+EsPiKDs0KNX+zSHhdDLFKdZu0BQHljvO+0QI/BasbMSUa8wYNCZTvhslg==} - dev: true - - /htmlparser2@8.0.2: - resolution: {integrity: sha512-GYdjWKDkbRLkZ5geuHs5NY1puJ+PXwP7+fHPRz06Eirsb9ugf6d8kkXav6ADhcODhFFPMIXyxkxSuMf3D6NCFA==} - dependencies: - domelementtype: 2.3.0 - domhandler: 5.0.3 - domutils: 3.1.0 - entities: 4.5.0 - dev: true - /human-signals@5.0.0: - resolution: {integrity: sha512-AXcZb6vzzrFAUE61HnN4mpLqd/cSIwNQjtNWR0euPm6y0iqx3G4gOXaIDdtdDwZmhwe82LA6+zinmW4UBWVePQ==} - engines: {node: '>=16.17.0'} - dev: true + glob@10.3.15: + resolution: {integrity: sha512-0c6RlJt1TICLyvJYIApxb8GsXoai0KUP7AxKKAtsYXdgJR1mGEUa7DgwShbdk1nly0PYoZj01xd4hzbq3fsjpw==} + engines: {node: '>=16 || 14 >=14.18'} + hasBin: true - /ieee754@1.2.1: - resolution: {integrity: sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==} - dev: true + hookable@5.5.3: + resolution: {integrity: sha512-Yc+BQe8SvoXH1643Qez1zqLRmbA5rCL+sSmk6TVos0LWVfNIB7PGncdlId77WzLGSIB5KaWgTaNTs2lNVEI6VQ==} - /ignore@5.3.0: - resolution: {integrity: sha512-g7dmpshy+gD7mh88OC9NwSGTKoc3kyLAZQRU1mt53Aw/vnvfXnbC+F/7F7QoYVKbV+KNvJx8wArewKy1vXMtlg==} + ignore@5.3.1: + resolution: {integrity: sha512-5Fytz/IraMjqpwfd34ke28PTVMjZjJG2MPn5t7OE4eUCUNf8BAa7b5WUS9/Qvr6mwOQS7Mk6vdsMno5he+T8Xw==} engines: {node: '>= 4'} - dev: true - /immutable@4.3.4: - resolution: {integrity: sha512-fsXeu4J4i6WNWSikpI88v/PcVflZz+6kMhUfIwc5SY+poQRPnaf5V7qds6SUyUN3cVxEzuCab7QIoLOQ+DQ1wA==} - dev: true + immutable@4.3.5: + resolution: {integrity: sha512-8eabxkth9gZatlwl5TBuJnCsoTADlL6ftEr7A4qgdaTsPyreilDSnUk57SO+jfKcNtxPa22U5KK6DSeAYhpBJw==} - /inherits@2.0.4: - resolution: {integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==} - dev: true + ini@4.1.2: + resolution: {integrity: sha512-AMB1mvwR1pyBFY/nSevUX6y8nJWS63/SzUKD3JyQn97s4xgIdgQPT75IRouIiBAN4yLQBUShNYVW0+UG25daCw==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} - /is-binary-path@2.1.0: + is-binary-path@2.1.0: resolution: {integrity: sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==} engines: {node: '>=8'} - dependencies: - binary-extensions: 2.2.0 - dev: true - - /is-extendable@0.1.1: - resolution: {integrity: sha512-5BMULNob1vgFX6EjQw5izWDxrecWK9AM72rugNr0TFldMOi0fj6Jk+zeKIt0xGj4cEfQIJth4w3OKWOJ4f+AFw==} - engines: {node: '>=0.10.0'} - dev: true - /is-extglob@2.1.1: + is-extglob@2.1.1: resolution: {integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==} engines: {node: '>=0.10.0'} - dev: true - /is-glob@4.0.3: + is-fullwidth-code-point@3.0.0: + resolution: {integrity: sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==} + engines: {node: '>=8'} + + is-glob@4.0.3: resolution: {integrity: sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==} engines: {node: '>=0.10.0'} - dependencies: - is-extglob: 2.1.1 - dev: true - - /is-interactive@2.0.0: - resolution: {integrity: sha512-qP1vozQRI+BMOPcjFzrjXuQvdak2pHNUMZoeG2eRbiSqyvbEf/wQtEOTOX1guk6E3t36RkaqiSt8A/6YElNxLQ==} - engines: {node: '>=12'} - dev: true - /is-number@7.0.0: + is-number@7.0.0: resolution: {integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==} engines: {node: '>=0.12.0'} - dev: true - - /is-stream@3.0.0: - resolution: {integrity: sha512-LnQR4bZ9IADDRSkvpqMGvt/tEJWclzklNgSw48V5EAaAeDd6qGvN8ei6k5p0tvxSR171VmGyHuTiAOfxAbr8kA==} - engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} - dev: true - /is-unicode-supported@1.3.0: - resolution: {integrity: sha512-43r2mRvz+8JRIKnWJ+3j8JtjRKZ6GmjzfaE/qiBJnikNnYv/6bagRJ1kUhNk8R5EX/GkobD+r+sfxCPJsiKBLQ==} - engines: {node: '>=12'} - dev: true - - /isexe@2.0.0: + isexe@2.0.0: resolution: {integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==} - dev: true - /js-yaml@3.14.1: - resolution: {integrity: sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==} + jackspeak@2.3.6: + resolution: {integrity: sha512-N3yCS/NegsOBokc8GAdM8UcmfsKiSS8cipheD/nivzr700H+nsMOxJjQnvwOcRYVuFkdH0wGUvW2WbXGmrZGbQ==} + engines: {node: '>=14'} + + js-yaml@4.1.0: + resolution: {integrity: sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==} hasBin: true - dependencies: - argparse: 1.0.10 - esprima: 4.0.1 - dev: true - /jsonfile@6.1.0: - resolution: {integrity: sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==} - dependencies: - universalify: 2.0.1 - optionalDependencies: - graceful-fs: 4.2.11 - dev: true + jsonc-parser@3.2.1: + resolution: {integrity: sha512-AilxAyFOAcK5wA1+LeaySVBrHsGQvUFCDWXKpZjzaL0PqW+xfBOttn8GNtWKFWqneyMZj41MWF9Kl6iPWLwgOA==} - /kind-of@6.0.3: - resolution: {integrity: sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==} + jsonpointer@5.0.1: + resolution: {integrity: sha512-p/nXbhSEcu3pZRdkW1OfJhpsVtW1gd4Wa1fnQc9YLiTfAjn0312eMKimbdIQzuZl9aa9xUGaRlP9T/CJE/ditQ==} engines: {node: '>=0.10.0'} - dev: true - - /lilconfig@3.0.0: - resolution: {integrity: sha512-K2U4W2Ff5ibV7j7ydLr+zLAkIg5JJ4lPn1Ltsdt+Tz/IjQ8buJ55pZAxoP34lqIiwtF9iAvtLv3JGv7CAyAg+g==} - engines: {node: '>=14'} - dev: true - - /linkify-it@4.0.1: - resolution: {integrity: sha512-C7bfi1UZmoj8+PQx22XyeXCuBlokoyWQL5pWSP+EI6nzRylyThouddufc2c1NDIcP9k5agmN9fLpA7VNJfIiqw==} - dependencies: - uc.micro: 1.0.6 - dev: true - - /log-symbols@5.1.0: - resolution: {integrity: sha512-l0x2DvrW294C9uDCoQe1VSU4gf529FkSZ6leBl4TiqZH/e+0R7hSfHQBNut2mNygDgHwvYHfFLn6Oxb3VWj2rA==} - engines: {node: '>=12'} - dependencies: - chalk: 5.3.0 - is-unicode-supported: 1.3.0 - dev: true - /lru-cache@6.0.0: - resolution: {integrity: sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==} - engines: {node: '>=10'} - dependencies: - yallist: 4.0.0 - dev: true + linkify-it@5.0.0: + resolution: {integrity: sha512-5aHCbzQRADcdP+ATqnDuhhJ/MRIqDkZX5pyjFHRRysS8vZ5AbqGEoFIb6pYHPZ+L/OC2Lc+xT8uHVVR5CAK/wQ==} - /magic-string@0.30.5: - resolution: {integrity: sha512-7xlpfBaQaP/T6Vh8MO/EqXSW5En6INHEvEXQiuff7Gku0PWjU3uf6w/j9o7O+SpB5fOAkrI5HeoNgwjEO0pFsA==} - engines: {node: '>=12'} - dependencies: - '@jridgewell/sourcemap-codec': 1.4.15 - dev: true + lru-cache@10.2.2: + resolution: {integrity: sha512-9hp3Vp2/hFQUiIwKo8XCeFVnrg8Pk3TYNPIR7tJADKi5YfcF7vEaK7avFHTlSy3kOKYaJQaalfEo6YuXdceBOQ==} + engines: {node: 14 || >=16.14} - /markdown-it-anchor@8.6.7(@types/markdown-it@13.0.7)(markdown-it@13.0.2): - resolution: {integrity: sha512-FlCHFwNnutLgVTflOYHPW2pPcl2AACqVzExlkGQNsi4CJgqOHN7YTgDd4LuhgN1BFO3TS0vLAruV1Td6dwWPJA==} - peerDependencies: - '@types/markdown-it': '*' - markdown-it: '*' - dependencies: - '@types/markdown-it': 13.0.7 - markdown-it: 13.0.2 - dev: true + magic-string@0.30.10: + resolution: {integrity: sha512-iIRwTIf0QKV3UAnYK4PU8uiEc4SRh5jX0mwpIwETPpHdhVM4f53RSwS/vXvN1JhGX+Cs7B8qIq3d6AH49O5fAQ==} - /markdown-it-container@3.0.0: - resolution: {integrity: sha512-y6oKTq4BB9OQuY/KLfk/O3ysFhB3IMYoIWhGJEidXt1NQFocFK2sA2t0NYZAMyMShAGL6x5OPIbrmXPIqaN9rw==} - dev: true + mark.js@8.11.1: + resolution: {integrity: sha512-1I+1qpDt4idfgLQG+BNWmrqku+7/2bi5nLf4YwF8y8zXvmfiTBY3PV3ZibfrjBueCByROpuBjLLFCajqkgYoLQ==} - /markdown-it-emoji@2.0.2: - resolution: {integrity: sha512-zLftSaNrKuYl0kR5zm4gxXjHaOI3FAOEaloKmRA5hijmJZvSjmxcokOLlzycb/HXlUFWzXqpIEoyEMCE4i9MvQ==} - dev: true + markdown-it@14.1.0: + resolution: {integrity: sha512-a54IwgWPaeBCAAsv13YgmALOF1elABB08FxO9i+r4VFk5Vl4pKokRPeX8u5TCgSsPi6ec1otfLjdOpVcgbpshg==} + hasBin: true - /markdown-it@13.0.2: - resolution: {integrity: sha512-FtwnEuuK+2yVU7goGn/MJ0WBZMM9ZPgU9spqlFs7/A/pDIUNSOQZhUgOqYCficIuR2QaFnrt8LHqBWsbTAoI5w==} + markdownlint-cli@0.40.0: + resolution: {integrity: sha512-JXhI3dRQcaqwiFYpPz6VJ7aKYheD53GmTz9y4D/d0F1MbZDGOp9pqKlbOfUX/pHP/iAoeiE4wYRmk8/kjLakxA==} + engines: {node: '>=18'} hasBin: true - dependencies: - argparse: 2.0.1 - entities: 3.0.1 - linkify-it: 4.0.1 - mdurl: 1.0.1 - uc.micro: 1.0.6 - dev: true - /mdurl@1.0.1: - resolution: {integrity: sha512-/sKlQJCBYVY9Ers9hqzKou4H6V5UWc/M59TH2dvkt+84itfnq7uFOMLpOiOS4ujvHP4etln18fmIxA5R5fll0g==} - dev: true + markdownlint-micromark@0.1.9: + resolution: {integrity: sha512-5hVs/DzAFa8XqYosbEAEg6ok6MF2smDj89ztn9pKkCtdKHVdPQuGMH7frFfYL9mLkvfFe4pTyAMffLbjf3/EyA==} + engines: {node: '>=18'} - /medium-zoom@1.1.0: - resolution: {integrity: sha512-ewyDsp7k4InCUp3jRmwHBRFGyjBimKps/AJLjRSox+2q/2H4p/PNpQf+pwONWlJiOudkBXtbdmVbFjqyybfTmQ==} - dev: true + markdownlint@0.34.0: + resolution: {integrity: sha512-qwGyuyKwjkEMOJ10XN6OTKNOVYvOIi35RNvDLNxTof5s8UmyGHlCdpngRHoRGNvQVGuxO3BJ7uNSgdeX166WXw==} + engines: {node: '>=18'} - /merge-stream@2.0.0: - resolution: {integrity: sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==} - dev: true + mdurl@2.0.0: + resolution: {integrity: sha512-Lf+9+2r+Tdp5wXDXC4PcIBjTDtq4UKjCPMQhKIuzpJNW0b96kVqSwW0bT7FhRSfmAiFYgP+SCRvdrDozfh0U5w==} - /merge2@1.4.1: + merge2@1.4.1: resolution: {integrity: sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==} engines: {node: '>= 8'} - dev: true - /micromatch@4.0.5: + micromatch@4.0.5: resolution: {integrity: sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==} engines: {node: '>=8.6'} - dependencies: - braces: 3.0.2 - picomatch: 2.3.1 - dev: true - /mimic-fn@2.1.0: - resolution: {integrity: sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==} - engines: {node: '>=6'} - dev: true + minimatch@9.0.4: + resolution: {integrity: sha512-KqWh+VchfxcMNRAJjj2tnsSJdNbHsVgnkBhTNrW7AjVo6OvLtxw8zfT9oLw1JSohlFzJ8jCoTgaoXvJ+kHt6fw==} + engines: {node: '>=16 || 14 >=14.17'} - /mimic-fn@4.0.0: - resolution: {integrity: sha512-vqiC06CuhBTUdZH+RYl8sFrL096vA45Ok5ISO6sE/Mr1jRbGH4Csnhi8f3wKVl7x8mO4Au7Ir9D3Oyv1VYMFJw==} - engines: {node: '>=12'} - dev: true + minimist@1.2.8: + resolution: {integrity: sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==} - /ms@2.1.2: - resolution: {integrity: sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==} - dev: true + minipass@7.1.1: + resolution: {integrity: sha512-UZ7eQ+h8ywIRAW1hIEl2AqdwzJucU/Kp59+8kkZeSvafXhZjul247BvIJjEVFVeON6d7lM46XX1HXCduKAS8VA==} + engines: {node: '>=16 || 14 >=14.17'} - /nanoid@3.3.7: + minisearch@6.3.0: + resolution: {integrity: sha512-ihFnidEeU8iXzcVHy74dhkxh/dn8Dc08ERl0xwoMMGqp4+LvRSCgicb+zGqWthVokQKvCSxITlh3P08OzdTYCQ==} + + mitt@3.0.1: + resolution: {integrity: sha512-vKivATfr97l2/QBCYAkXYDbrIWPM2IIKEl7YPhjCvKlG3kE2gm+uBo6nEXK3M5/Ffh/FLpKExzOQ3JJoJGFKBw==} + + nanoid@3.3.7: resolution: {integrity: sha512-eSRppjcPIatRIMC1U6UngP8XFcz8MQWGQdt1MTBQ7NaAmvXDfvNxbvWV3x2y6CdEUciCSsDHDQZbhYaB8QEo2g==} engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1} hasBin: true - dev: true - /node-releases@2.0.14: - resolution: {integrity: sha512-y10wOWt8yZpqXmOgRo77WaHEmhYQYGNA6y421PKsKYWEK8aW+cqAphborZDhqfyKrbZEN92CN1X2KbafY2s7Yw==} - dev: true - - /normalize-path@3.0.0: + normalize-path@3.0.0: resolution: {integrity: sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==} engines: {node: '>=0.10.0'} - dev: true - - /normalize-range@0.1.2: - resolution: {integrity: sha512-bdok/XvKII3nUpklnV6P2hxtMNrCboOjAcyBuQnWEhO665FwrSNRxU+AqpsyvO6LgGYPspN+lu5CLtw4jPRKNA==} - engines: {node: '>=0.10.0'} - dev: true - - /npm-run-path@5.1.0: - resolution: {integrity: sha512-sJOdmRGrY2sjNTRMbSvluQqg+8X7ZK61yvzBEIDhz4f8z1TZFYABsqjjCBd/0PUNE9M6QDgHJXQkGUEm7Q+l9Q==} - engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} - dependencies: - path-key: 4.0.0 - dev: true - - /nth-check@2.1.1: - resolution: {integrity: sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w==} - dependencies: - boolbase: 1.0.0 - dev: true - - /onetime@5.1.2: - resolution: {integrity: sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==} - engines: {node: '>=6'} - dependencies: - mimic-fn: 2.1.0 - dev: true - - /onetime@6.0.0: - resolution: {integrity: sha512-1FlR+gjXK7X+AsAHso35MnyN5KqGwJRi/31ft6x0M194ht7S+rWAvd7PHss9xSKMzE0asv1pyIHaJYq+BbacAQ==} - engines: {node: '>=12'} - dependencies: - mimic-fn: 4.0.0 - dev: true - - /ora@7.0.1: - resolution: {integrity: sha512-0TUxTiFJWv+JnjWm4o9yvuskpEJLXTcng8MJuKd+SzAzp2o+OP3HWqNhB4OdJRt1Vsd9/mR0oyaEYlOnL7XIRw==} - engines: {node: '>=16'} - dependencies: - chalk: 5.3.0 - cli-cursor: 4.0.0 - cli-spinners: 2.9.2 - is-interactive: 2.0.0 - is-unicode-supported: 1.3.0 - log-symbols: 5.1.0 - stdin-discarder: 0.1.0 - string-width: 6.1.0 - strip-ansi: 7.1.0 - dev: true - - /parse5-htmlparser2-tree-adapter@7.0.0: - resolution: {integrity: sha512-B77tOZrqqfUfnVcOrUvfdLbz4pu4RopLD/4vmu3HUPswwTA8OH0EMW9BlWR2B0RCoiZRAHEUu7IxeP1Pd1UU+g==} - dependencies: - domhandler: 5.0.3 - parse5: 7.1.2 - dev: true - - /parse5@7.1.2: - resolution: {integrity: sha512-Czj1WaSVpaoj0wbhMzLmWD69anp2WH7FXMB9n1Sy8/ZFF9jolSQVMu1Ij5WIyGmcBmhk7EOndpO4mIpihVqAXw==} - dependencies: - entities: 4.5.0 - dev: true - /path-key@3.1.1: + path-key@3.1.1: resolution: {integrity: sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==} engines: {node: '>=8'} - dev: true - /path-key@4.0.0: - resolution: {integrity: sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ==} - engines: {node: '>=12'} - dev: true + path-scurry@1.11.1: + resolution: {integrity: sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==} + engines: {node: '>=16 || 14 >=14.18'} - /path-type@5.0.0: - resolution: {integrity: sha512-5HviZNaZcfqP95rwpv+1HDgUamezbqdSYTyzjTvwtJSnIH+3vnbmWsItli8OFEndS984VT55M3jduxZbX351gg==} - engines: {node: '>=12'} - dev: true + perfect-debounce@1.0.0: + resolution: {integrity: sha512-xCy9V055GLEqoFaHoC1SoLIaLmWctgCUaBaWxDZ7/Zx4CTyX7cJQLJOok/orfjZAh9kEYpjJa4d0KcJmCbctZA==} - /picocolors@1.0.0: + picocolors@1.0.0: resolution: {integrity: sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==} - dev: true - /picomatch@2.3.1: + picomatch@2.3.1: resolution: {integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==} engines: {node: '>=8.6'} - dev: true - /postcss-load-config@4.0.2(postcss@8.4.32): - resolution: {integrity: sha512-bSVhyJGL00wMVoPUzAVAnbEoWyqRxkjv64tUl427SKnPrENtq6hJwUojroMz2VB+Q1edmi4IfrAPpami5VVgMQ==} - engines: {node: '>= 14'} - peerDependencies: - postcss: '>=8.0.9' - ts-node: '>=9.0.0' - peerDependenciesMeta: - postcss: - optional: true - ts-node: - optional: true - dependencies: - lilconfig: 3.0.0 - postcss: 8.4.32 - yaml: 2.3.4 - dev: true + playwright-core@1.44.0: + resolution: {integrity: sha512-ZTbkNpFfYcGWohvTTl+xewITm7EOuqIqex0c7dNZ+aXsbrLj0qI8XlGKfPpipjm0Wny/4Lt4CJsWJk1stVS5qQ==} + engines: {node: '>=16'} + hasBin: true - /postcss-value-parser@4.2.0: - resolution: {integrity: sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ==} - dev: true + playwright@1.44.0: + resolution: {integrity: sha512-F9b3GUCLQ3Nffrfb6dunPOkE5Mh68tR7zN32L4jCk4FjQamgesGay7/dAAe1WaMEGV04DkdJfcJzjoCKygUaRQ==} + engines: {node: '>=16'} + hasBin: true - /postcss@8.4.32: - resolution: {integrity: sha512-D/kj5JNu6oo2EIy+XL/26JEDTlIbB8hw85G8StOE6L74RQAVVP5rej6wxCNqyMbR4RkPfqvezVbPw81Ngd6Kcw==} + postcss@8.4.38: + resolution: {integrity: sha512-Wglpdk03BSfXkHoQa3b/oulrotAkwrlLDRSOb9D0bN86FdRyE9lppSp33aHNPgBa0JKCoB+drFLZkQoRRYae5A==} engines: {node: ^10 || ^12 || >=14} - dependencies: - nanoid: 3.3.7 - picocolors: 1.0.0 - source-map-js: 1.0.2 - dev: true - /preact@10.19.3: - resolution: {integrity: sha512-nHHTeFVBTHRGxJXKkKu5hT8C/YWBkPso4/Gad6xuj5dbptt9iF9NZr9pHbPhBrnT2klheu7mHTxTZ/LjwJiEiQ==} - dev: true + preact@10.21.0: + resolution: {integrity: sha512-aQAIxtzWEwH8ou+OovWVSVNlFImL7xUCwJX3YMqA3U8iKCNC34999fFOnWjYNsylgfPgMexpbk7WYOLtKr/mxg==} - /prismjs@1.29.0: - resolution: {integrity: sha512-Kx/1w86q/epKcmte75LNrEoT+lX8pBpavuAbvJWRXar7Hz8jrtF+e3vY751p0R8H9HdArwaCTNDDzHg/ScJK1Q==} + punycode.js@2.3.1: + resolution: {integrity: sha512-uxFIHU0YlHYhDQtV4R9J6a52SLx28BCjT+4ieh7IGbgwVJWO+km431c4yRlREUAsAmt/uMjQUyQHNEPf0M39CA==} engines: {node: '>=6'} - dev: true - /queue-microtask@1.2.3: + queue-microtask@1.2.3: resolution: {integrity: sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==} - dev: true - /readable-stream@3.6.2: - resolution: {integrity: sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==} - engines: {node: '>= 6'} - dependencies: - inherits: 2.0.4 - string_decoder: 1.3.0 - util-deprecate: 1.0.2 - dev: true - - /readdirp@3.6.0: + readdirp@3.6.0: resolution: {integrity: sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==} engines: {node: '>=8.10.0'} - dependencies: - picomatch: 2.3.1 - dev: true - - /restore-cursor@4.0.0: - resolution: {integrity: sha512-I9fPXU9geO9bHOt9pHHOhOkYerIMsmVaWB0rA2AI9ERh/+x/i7MV5HKBNrg+ljO5eoPVgCcnFuRjJ9uH6I/3eg==} - engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} - dependencies: - onetime: 5.1.2 - signal-exit: 3.0.7 - dev: true - /reusify@1.0.4: + reusify@1.0.4: resolution: {integrity: sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==} engines: {iojs: '>=1.0.0', node: '>=0.10.0'} - dev: true - /rollup@4.8.0: - resolution: {integrity: sha512-NpsklK2fach5CdI+PScmlE5R4Ao/FSWtF7LkoIrHDxPACY/xshNasPsbpG0VVHxUTbf74tJbVT4PrP8JsJ6ZDA==} + rfdc@1.3.1: + resolution: {integrity: sha512-r5a3l5HzYlIC68TpmYKlxWjmOP6wiPJ1vWv2HeLhNsRZMrCkxeqxiHlQ21oXmQ4F3SiryXBHhAD7JZqvOJjFmg==} + + rollup@4.17.2: + resolution: {integrity: sha512-/9ClTJPByC0U4zNLowV1tMBe8yMEAxewtR3cUNX5BoEpGH3dQEWpJLr6CLp0fPdYRF/fzVOgvDb1zXuakwF5kQ==} engines: {node: '>=18.0.0', npm: '>=8.0.0'} hasBin: true - optionalDependencies: - '@rollup/rollup-android-arm-eabi': 4.8.0 - '@rollup/rollup-android-arm64': 4.8.0 - '@rollup/rollup-darwin-arm64': 4.8.0 - '@rollup/rollup-darwin-x64': 4.8.0 - '@rollup/rollup-linux-arm-gnueabihf': 4.8.0 - '@rollup/rollup-linux-arm64-gnu': 4.8.0 - '@rollup/rollup-linux-arm64-musl': 4.8.0 - '@rollup/rollup-linux-riscv64-gnu': 4.8.0 - '@rollup/rollup-linux-x64-gnu': 4.8.0 - '@rollup/rollup-linux-x64-musl': 4.8.0 - '@rollup/rollup-win32-arm64-msvc': 4.8.0 - '@rollup/rollup-win32-ia32-msvc': 4.8.0 - '@rollup/rollup-win32-x64-msvc': 4.8.0 - fsevents: 2.3.3 - dev: true - /run-parallel@1.2.0: - resolution: {integrity: sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==} - dependencies: - queue-microtask: 1.2.3 - dev: true + run-con@1.3.2: + resolution: {integrity: sha512-CcfE+mYiTcKEzg0IqS08+efdnH0oJ3zV0wSUFBNrMHMuxCtXvBCLzCJHatwuXDcu/RlhjTziTo/a1ruQik6/Yg==} + hasBin: true - /safe-buffer@5.2.1: - resolution: {integrity: sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==} - dev: true + run-parallel@1.2.0: + resolution: {integrity: sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==} - /sass@1.69.5: - resolution: {integrity: sha512-qg2+UCJibLr2LCVOt3OlPhr/dqVHWOa9XtZf2OjbLs/T4VPSJ00udtgJxH3neXZm+QqX8B+3cU7RaLqp1iVfcQ==} + sass@1.77.0: + resolution: {integrity: sha512-eGj4HNfXqBWtSnvItNkn7B6icqH14i3CiCGbzMKs3BAPTq62pp9NBYsBgyN4cA+qssqo9r26lW4JSvlaUUWbgw==} engines: {node: '>=14.0.0'} hasBin: true - dependencies: - chokidar: 3.5.3 - immutable: 4.3.4 - source-map-js: 1.0.2 - dev: true - /sax@1.3.0: + sax@1.3.0: resolution: {integrity: sha512-0s+oAmw9zLl1V1cS9BtZN7JAd0cW5e0QH4W3LWEK6a4LaLEA2OTpGYWDY+6XasBLtz6wkm3u1xRw95mRuJ59WA==} - dev: true - /search-insights@2.13.0: + search-insights@2.13.0: resolution: {integrity: sha512-Orrsjf9trHHxFRuo9/rzm0KIWmgzE8RMlZMzuhZOJ01Rnz3D0YBAe+V6473t6/H6c7irs6Lt48brULAiRWb3Vw==} - dev: true - /section-matter@1.0.0: - resolution: {integrity: sha512-vfD3pmTzGpufjScBh50YHKzEu2lxBWhVEHsNGoEXmCmn2hKGfeNLYMzCJpe8cD7gqX7TJluOVpBkAequ6dgMmA==} - engines: {node: '>=4'} - dependencies: - extend-shallow: 2.0.1 - kind-of: 6.0.3 - dev: true - - /semver@7.5.4: - resolution: {integrity: sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==} - engines: {node: '>=10'} - hasBin: true - dependencies: - lru-cache: 6.0.0 - dev: true - - /shebang-command@2.0.0: + shebang-command@2.0.0: resolution: {integrity: sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==} engines: {node: '>=8'} - dependencies: - shebang-regex: 3.0.0 - dev: true - /shebang-regex@3.0.0: + shebang-regex@3.0.0: resolution: {integrity: sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==} engines: {node: '>=8'} - dev: true - /signal-exit@3.0.7: - resolution: {integrity: sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==} - dev: true + shiki@1.4.0: + resolution: {integrity: sha512-5WIn0OL8PWm7JhnTwRWXniy6eEDY234mRrERVlFa646V2ErQqwIFd2UML7e0Pq9eqSKLoMa3Ke+xbsF+DAuy+Q==} - /signal-exit@4.1.0: + signal-exit@4.1.0: resolution: {integrity: sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==} engines: {node: '>=14'} - dev: true - /sitemap@7.1.1: + sitemap-ts@1.6.1: + resolution: {integrity: sha512-MOvutoHiSUxna/Q8m45Sz33il0aBvChtj3nqExYXEIkVB+CeVrVVKGqSSNCItbfnXYLBG/MG3HxwIOXxmzYf5w==} + + sitemap@7.1.1: resolution: {integrity: sha512-mK3aFtjz4VdJN0igpIJrinf3EO8U8mxOPsTBzSsy06UtjZQJ3YY3o3Xa7zSc5nMqcMrRwlChHZ18Kxg0caiPBg==} engines: {node: '>=12.0.0', npm: '>=5.6.0'} hasBin: true - dependencies: - '@types/node': 17.0.45 - '@types/sax': 1.2.7 - arg: 5.0.2 - sax: 1.3.0 - dev: true - - /slash@5.1.0: - resolution: {integrity: sha512-ZA6oR3T/pEyuqwMgAKT0/hAv8oAXckzbkmR0UkUosQ+Mc4RxGoJkRmwHgHufaenlyAgE1Mxgpdcrf75y6XcnDg==} - engines: {node: '>=14.16'} - dev: true - /source-map-js@1.0.2: - resolution: {integrity: sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw==} + source-map-js@1.2.0: + resolution: {integrity: sha512-itJW8lvSA0TXEphiRoawsCksnlf8SyvmFzIhltqAHluXd88pkCd+cXJVHTDwdCr0IzwptSm035IHQktUu1QUMg==} engines: {node: '>=0.10.0'} - dev: true - /sprintf-js@1.0.3: - resolution: {integrity: sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==} - dev: true + speakingurl@14.0.1: + resolution: {integrity: sha512-1POYv7uv2gXoyGFpBCmpDVSNV74IfsWlDW216UPjbWufNf+bSU6GdbDsxdcxtfwb4xlI3yxzOTKClUosxARYrQ==} + engines: {node: '>=0.10.0'} - /stdin-discarder@0.1.0: - resolution: {integrity: sha512-xhV7w8S+bUwlPTb4bAOUQhv8/cSS5offJuX8GQGq32ONF0ZtDWKfkdomM3HMRA+LhX6um/FZ0COqlwsjD53LeQ==} - engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} - dependencies: - bl: 5.1.0 - dev: true + string-width@4.2.3: + resolution: {integrity: sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==} + engines: {node: '>=8'} - /string-width@6.1.0: - resolution: {integrity: sha512-k01swCJAgQmuADB0YIc+7TuatfNvTBVOoaUWJjTB9R4VJzR5vNWzf5t42ESVZFPS8xTySF7CAdV4t/aaIm3UnQ==} - engines: {node: '>=16'} - dependencies: - eastasianwidth: 0.2.0 - emoji-regex: 10.3.0 - strip-ansi: 7.1.0 - dev: true + string-width@5.1.2: + resolution: {integrity: sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==} + engines: {node: '>=12'} - /string_decoder@1.3.0: - resolution: {integrity: sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==} - dependencies: - safe-buffer: 5.2.1 - dev: true + strip-ansi@6.0.1: + resolution: {integrity: sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==} + engines: {node: '>=8'} - /strip-ansi@7.1.0: + strip-ansi@7.1.0: resolution: {integrity: sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==} engines: {node: '>=12'} - dependencies: - ansi-regex: 6.0.1 - dev: true - /strip-bom-string@1.0.0: - resolution: {integrity: sha512-uCC2VHvQRYu+lMh4My/sFNmF2klFymLX1wHJeXnbEJERpV/ZsVuonzerjfrGpIGF7LBVa1O7i9kjiWvJiFck8g==} - engines: {node: '>=0.10.0'} - dev: true - - /strip-final-newline@3.0.0: - resolution: {integrity: sha512-dOESqjYr96iWYylGObzd39EuNTa5VJxyvVAEm5Jnh7KGo75V43Hk1odPQkNDyXNmUR6k+gEiDVXnjB8HJ3crXw==} - engines: {node: '>=12'} - dev: true + strip-json-comments@3.1.1: + resolution: {integrity: sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==} + engines: {node: '>=8'} - /striptags@3.2.0: - resolution: {integrity: sha512-g45ZOGzHDMe2bdYMdIvdAfCQkCTDMGBazSw1ypMowwGIee7ZQ5dU0rBJ8Jqgl+jAKIv4dbeE1jscZq9wid1Tkw==} - dev: true + tabbable@6.2.0: + resolution: {integrity: sha512-Cat63mxsVJlzYvN51JmVXIgNoUokrIaT2zLclCXjRd8boZ0004U4KCs/sToJ75C6sdlByWxpYnb5Boif1VSFew==} - /to-fast-properties@2.0.0: + to-fast-properties@2.0.0: resolution: {integrity: sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog==} engines: {node: '>=4'} - dev: true - /to-regex-range@5.0.1: + to-regex-range@5.0.1: resolution: {integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==} engines: {node: '>=8.0'} - dependencies: - is-number: 7.0.0 - dev: true - /ts-debounce@4.0.0: - resolution: {integrity: sha512-+1iDGY6NmOGidq7i7xZGA4cm8DAa6fqdYcvO5Z6yBevH++Bdo9Qt/mN0TzHUgcCcKv1gmh9+W5dHqz8pMWbCbg==} - dev: true + toml@3.0.0: + resolution: {integrity: sha512-y/mWCZinnvxjTKYhJ+pYxwD0mRLVvOtdS2Awbgxln6iEnt4rk0yBxeSBHkGJcPucRiG0e55mwWp+g/05rsrd6w==} - /uc.micro@1.0.6: - resolution: {integrity: sha512-8Y75pvTYkLJW2hWQHXxoqRgV7qb9B+9vFEtidML+7koHUFapnVJAZ6cKs+Qjz5Aw3aZWHMC6u0wJE3At+nSGwA==} - dev: true + uc.micro@2.1.0: + resolution: {integrity: sha512-ARDJmphmdvUk6Glw7y9DQ2bFkKBHwQHLi2lsaH6PPmz/Ka9sFOBsBluozhDltWmnv9u/cF6Rt87znRTPV+yp/A==} - /undici-types@5.26.5: + undici-types@5.26.5: resolution: {integrity: sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==} - dev: true - - /unicorn-magic@0.1.0: - resolution: {integrity: sha512-lRfVq8fE8gz6QMBuDM6a+LO3IAzTi05H6gCVaUpir2E1Rwpo4ZUog45KpNXKC/Mn3Yb9UDuHumeFTo9iV/D9FQ==} - engines: {node: '>=18'} - dev: true - /universalify@2.0.1: - resolution: {integrity: sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==} - engines: {node: '>= 10.0.0'} - dev: true - - /upath@2.0.1: - resolution: {integrity: sha512-1uEe95xksV1O0CYKXo8vQvN1JEbtJp7lb7C5U9HMsIp6IVwntkH/oNUzyVNQSd4S1sYk2FpSSW44FqMc8qee5w==} - engines: {node: '>=4'} - dev: true - - /update-browserslist-db@1.0.13(browserslist@4.22.2): - resolution: {integrity: sha512-xebP81SNcPuNpPP3uzeW1NYXxI3rxyJzF3pD6sH4jE7o/IX+WtSpwnVU+qIsDPyk0d3hmFQ7mjqc6AtV604hbg==} - hasBin: true - peerDependencies: - browserslist: '>= 4.21.0' - dependencies: - browserslist: 4.22.2 - escalade: 3.1.1 - picocolors: 1.0.0 - dev: true - - /util-deprecate@1.0.2: - resolution: {integrity: sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==} - dev: true - - /vite@5.0.12: - resolution: {integrity: sha512-4hsnEkG3q0N4Tzf1+t6NdN9dg/L3BM+q8SWgbSPnJvrgH2kgdyzfVJwbR1ic69/4uMJJ/3dqDZZE5/WwqW8U1w==} - engines: {node: ^18.0.0 || >=20.0.0} + vite@5.2.11: + resolution: {integrity: sha512-HndV31LWW05i1BLPMUCE1B9E9GFbOu1MbenhS58FuK6owSO5qHm7GiCotrNY1YE5rMeQSFBGmT5ZaLEjFizgiQ==} + engines: {node: ^18.0.0 || >=20.0.0} hasBin: true peerDependencies: '@types/node': ^18.0.0 || >=20.0.0 @@ -2186,180 +940,989 @@ packages: optional: true terser: optional: true - dependencies: - esbuild: 0.19.9 - postcss: 8.4.32 - rollup: 4.8.0 - optionalDependencies: - fsevents: 2.3.3 - dev: true - /vue-demi@0.14.6(vue@3.3.11): - resolution: {integrity: sha512-8QA7wrYSHKaYgUxDA5ZC24w+eHm3sYCbp0EzcDwKqN3p6HqtTCGR/GVsPyZW92unff4UlcSh++lmqDWN3ZIq4w==} + vitepress@1.1.4: + resolution: {integrity: sha512-bWIzFZXpPB6NIDBuWnS20aMADH+FcFKDfQNYFvbOWij03PR29eImTceQHIzCKordjXYBhM/TjE5VKFTUJ3EheA==} + hasBin: true + peerDependencies: + markdown-it-mathjax3: ^4 + postcss: ^8 + peerDependenciesMeta: + markdown-it-mathjax3: + optional: true + postcss: + optional: true + + vue-demi@0.14.7: + resolution: {integrity: sha512-EOG8KXDQNwkJILkx/gPcoL/7vH+hORoBaKgGe+6W7VFMvCYJfmF2dGbvgDroVnI8LU7/kTu8mbjRZGBU1z9NTA==} engines: {node: '>=12'} hasBin: true - requiresBuild: true peerDependencies: '@vue/composition-api': ^1.0.0-rc.1 vue: ^3.0.0-0 || ^2.6.0 peerDependenciesMeta: '@vue/composition-api': optional: true - dependencies: - vue: 3.3.11 - dev: true - - /vue-router@4.2.5(vue@3.3.11): - resolution: {integrity: sha512-DIUpKcyg4+PTQKfFPX88UWhlagBEBEfJ5A8XDXRJLUnZOvcpMF8o/dnL90vpVkGaPbjvXazV/rC1qBKrZlFugw==} - peerDependencies: - vue: ^3.2.0 - dependencies: - '@vue/devtools-api': 6.5.1 - vue: 3.3.11 - dev: true - /vue@3.3.11: - resolution: {integrity: sha512-d4oBctG92CRO1cQfVBZp6WJAs0n8AK4Xf5fNjQCBeKCvMI1efGQ5E3Alt1slFJS9fZuPcFoiAiqFvQlv1X7t/w==} + vue@3.4.27: + resolution: {integrity: sha512-8s/56uK6r01r1icG/aEOHqyMVxd1bkYcSe9j8HcKtr/xTOFWvnzIVTehNW+5Yt89f+DLBe4A569pnZLS5HzAMA==} peerDependencies: typescript: '*' peerDependenciesMeta: typescript: optional: true + + which@2.0.2: + resolution: {integrity: sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==} + engines: {node: '>= 8'} + hasBin: true + + wrap-ansi@7.0.0: + resolution: {integrity: sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==} + engines: {node: '>=10'} + + wrap-ansi@8.1.0: + resolution: {integrity: sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==} + engines: {node: '>=12'} + + xml-formatter@3.5.0: + resolution: {integrity: sha512-9ij/f2PLIPv+YDywtdztq7U82kYMDa5yPYwpn0TnXnqJRH6Su8RC/oaw91erHe3aSEbfgBaA1hDzReDFb1SVXw==} + engines: {node: '>= 14'} + + xml-parser-xo@4.1.1: + resolution: {integrity: sha512-Ggf2y90+Y6e9IK5hoPuembVHJ03PhDSdhldEmgzbihzu9k0XBo0sfcFxaSi4W1PlUSSI1ok+MJ0JCXUn+U4Ilw==} + engines: {node: '>= 14'} + +snapshots: + + '@algolia/autocomplete-core@1.9.3(@algolia/client-search@4.23.3)(algoliasearch@4.23.3)(search-insights@2.13.0)': dependencies: - '@vue/compiler-dom': 3.3.11 - '@vue/compiler-sfc': 3.3.11 - '@vue/runtime-dom': 3.3.11 - '@vue/server-renderer': 3.3.11(vue@3.3.11) - '@vue/shared': 3.3.11 - dev: true - - /vuepress-plugin-sitemap2@2.0.0-rc.4(vuepress@2.0.0-rc.0): - resolution: {integrity: sha512-zi57grbyAFL54HUZNmmAWELYgwPsqa8p63HkEBSpXiQEa3JbYumAXHPZp4sIBGlBxcF8X34GtddrVw9FDlCtZA==} - engines: {node: '>=18.16.0', npm: '>=8', pnpm: '>=7', yarn: '>=2'} - deprecated: Please use @vuepress/plugin-sitemap@v2 instead - peerDependencies: - vuepress: 2.0.0-rc.0 - vuepress-vite: 2.0.0-rc.0 - vuepress-webpack: 2.0.0-rc.0 - peerDependenciesMeta: - vuepress: - optional: true - vuepress-vite: - optional: true - vuepress-webpack: - optional: true + '@algolia/autocomplete-plugin-algolia-insights': 1.9.3(@algolia/client-search@4.23.3)(algoliasearch@4.23.3)(search-insights@2.13.0) + '@algolia/autocomplete-shared': 1.9.3(@algolia/client-search@4.23.3)(algoliasearch@4.23.3) + transitivePeerDependencies: + - '@algolia/client-search' + - algoliasearch + - search-insights + + '@algolia/autocomplete-plugin-algolia-insights@1.9.3(@algolia/client-search@4.23.3)(algoliasearch@4.23.3)(search-insights@2.13.0)': dependencies: - '@vuepress/shared': 2.0.0-rc.0 - '@vuepress/utils': 2.0.0-rc.0 - sitemap: 7.1.1 - vuepress: 2.0.0-rc.0(@vuepress/client@2.0.0-rc.0)(vue@3.3.11) - vuepress-shared: 2.0.0-rc.4(vuepress@2.0.0-rc.0) + '@algolia/autocomplete-shared': 1.9.3(@algolia/client-search@4.23.3)(algoliasearch@4.23.3) + search-insights: 2.13.0 + transitivePeerDependencies: + - '@algolia/client-search' + - algoliasearch + + '@algolia/autocomplete-preset-algolia@1.9.3(@algolia/client-search@4.23.3)(algoliasearch@4.23.3)': + dependencies: + '@algolia/autocomplete-shared': 1.9.3(@algolia/client-search@4.23.3)(algoliasearch@4.23.3) + '@algolia/client-search': 4.23.3 + algoliasearch: 4.23.3 + + '@algolia/autocomplete-shared@1.9.3(@algolia/client-search@4.23.3)(algoliasearch@4.23.3)': + dependencies: + '@algolia/client-search': 4.23.3 + algoliasearch: 4.23.3 + + '@algolia/cache-browser-local-storage@4.23.3': + dependencies: + '@algolia/cache-common': 4.23.3 + + '@algolia/cache-common@4.23.3': {} + + '@algolia/cache-in-memory@4.23.3': + dependencies: + '@algolia/cache-common': 4.23.3 + + '@algolia/client-account@4.23.3': + dependencies: + '@algolia/client-common': 4.23.3 + '@algolia/client-search': 4.23.3 + '@algolia/transporter': 4.23.3 + + '@algolia/client-analytics@4.23.3': + dependencies: + '@algolia/client-common': 4.23.3 + '@algolia/client-search': 4.23.3 + '@algolia/requester-common': 4.23.3 + '@algolia/transporter': 4.23.3 + + '@algolia/client-common@4.23.3': + dependencies: + '@algolia/requester-common': 4.23.3 + '@algolia/transporter': 4.23.3 + + '@algolia/client-personalization@4.23.3': + dependencies: + '@algolia/client-common': 4.23.3 + '@algolia/requester-common': 4.23.3 + '@algolia/transporter': 4.23.3 + + '@algolia/client-search@4.23.3': + dependencies: + '@algolia/client-common': 4.23.3 + '@algolia/requester-common': 4.23.3 + '@algolia/transporter': 4.23.3 + + '@algolia/logger-common@4.23.3': {} + + '@algolia/logger-console@4.23.3': + dependencies: + '@algolia/logger-common': 4.23.3 + + '@algolia/recommend@4.23.3': + dependencies: + '@algolia/cache-browser-local-storage': 4.23.3 + '@algolia/cache-common': 4.23.3 + '@algolia/cache-in-memory': 4.23.3 + '@algolia/client-common': 4.23.3 + '@algolia/client-search': 4.23.3 + '@algolia/logger-common': 4.23.3 + '@algolia/logger-console': 4.23.3 + '@algolia/requester-browser-xhr': 4.23.3 + '@algolia/requester-common': 4.23.3 + '@algolia/requester-node-http': 4.23.3 + '@algolia/transporter': 4.23.3 + + '@algolia/requester-browser-xhr@4.23.3': + dependencies: + '@algolia/requester-common': 4.23.3 + + '@algolia/requester-common@4.23.3': {} + + '@algolia/requester-node-http@4.23.3': + dependencies: + '@algolia/requester-common': 4.23.3 + + '@algolia/transporter@4.23.3': + dependencies: + '@algolia/cache-common': 4.23.3 + '@algolia/logger-common': 4.23.3 + '@algolia/requester-common': 4.23.3 + + '@antfu/utils@0.7.6': {} + + '@babel/helper-string-parser@7.24.1': {} + + '@babel/helper-validator-identifier@7.22.20': {} + + '@babel/parser@7.24.5': + dependencies: + '@babel/types': 7.24.0 + + '@babel/types@7.24.0': + dependencies: + '@babel/helper-string-parser': 7.24.1 + '@babel/helper-validator-identifier': 7.22.20 + to-fast-properties: 2.0.0 + + '@docsearch/css@3.6.0': {} + + '@docsearch/js@3.6.0(@algolia/client-search@4.23.3)(search-insights@2.13.0)': + dependencies: + '@docsearch/react': 3.6.0(@algolia/client-search@4.23.3)(search-insights@2.13.0) + preact: 10.21.0 + transitivePeerDependencies: + - '@algolia/client-search' + - '@types/react' + - react + - react-dom + - search-insights + + '@docsearch/react@3.6.0(@algolia/client-search@4.23.3)(search-insights@2.13.0)': + dependencies: + '@algolia/autocomplete-core': 1.9.3(@algolia/client-search@4.23.3)(algoliasearch@4.23.3)(search-insights@2.13.0) + '@algolia/autocomplete-preset-algolia': 1.9.3(@algolia/client-search@4.23.3)(algoliasearch@4.23.3) + '@docsearch/css': 3.6.0 + algoliasearch: 4.23.3 + optionalDependencies: + search-insights: 2.13.0 + transitivePeerDependencies: + - '@algolia/client-search' + + '@esbuild/aix-ppc64@0.20.2': + optional: true + + '@esbuild/android-arm64@0.20.2': + optional: true + + '@esbuild/android-arm@0.20.2': + optional: true + + '@esbuild/android-x64@0.20.2': + optional: true + + '@esbuild/darwin-arm64@0.20.2': + optional: true + + '@esbuild/darwin-x64@0.20.2': + optional: true + + '@esbuild/freebsd-arm64@0.20.2': + optional: true + + '@esbuild/freebsd-x64@0.20.2': + optional: true + + '@esbuild/linux-arm64@0.20.2': + optional: true + + '@esbuild/linux-arm@0.20.2': + optional: true + + '@esbuild/linux-ia32@0.20.2': + optional: true + + '@esbuild/linux-loong64@0.20.2': + optional: true + + '@esbuild/linux-mips64el@0.20.2': + optional: true + + '@esbuild/linux-ppc64@0.20.2': + optional: true + + '@esbuild/linux-riscv64@0.20.2': + optional: true + + '@esbuild/linux-s390x@0.20.2': + optional: true + + '@esbuild/linux-x64@0.20.2': + optional: true + + '@esbuild/netbsd-x64@0.20.2': + optional: true + + '@esbuild/openbsd-x64@0.20.2': + optional: true + + '@esbuild/sunos-x64@0.20.2': + optional: true + + '@esbuild/win32-arm64@0.20.2': + optional: true + + '@esbuild/win32-ia32@0.20.2': + optional: true + + '@esbuild/win32-x64@0.20.2': + optional: true + + '@isaacs/cliui@8.0.2': + dependencies: + string-width: 5.1.2 + string-width-cjs: string-width@4.2.3 + strip-ansi: 7.1.0 + strip-ansi-cjs: strip-ansi@6.0.1 + wrap-ansi: 8.1.0 + wrap-ansi-cjs: wrap-ansi@7.0.0 + + '@jridgewell/sourcemap-codec@1.4.15': {} + + '@nodelib/fs.scandir@2.1.5': + dependencies: + '@nodelib/fs.stat': 2.0.5 + run-parallel: 1.2.0 + + '@nodelib/fs.stat@2.0.5': {} + + '@nodelib/fs.walk@1.2.8': + dependencies: + '@nodelib/fs.scandir': 2.1.5 + fastq: 1.17.1 + + '@pkgjs/parseargs@0.11.0': + optional: true + + '@playwright/test@1.44.0': + dependencies: + playwright: 1.44.0 + + '@rollup/rollup-android-arm-eabi@4.17.2': + optional: true + + '@rollup/rollup-android-arm64@4.17.2': + optional: true + + '@rollup/rollup-darwin-arm64@4.17.2': + optional: true + + '@rollup/rollup-darwin-x64@4.17.2': + optional: true + + '@rollup/rollup-linux-arm-gnueabihf@4.17.2': + optional: true + + '@rollup/rollup-linux-arm-musleabihf@4.17.2': + optional: true + + '@rollup/rollup-linux-arm64-gnu@4.17.2': + optional: true + + '@rollup/rollup-linux-arm64-musl@4.17.2': + optional: true + + '@rollup/rollup-linux-powerpc64le-gnu@4.17.2': + optional: true + + '@rollup/rollup-linux-riscv64-gnu@4.17.2': + optional: true + + '@rollup/rollup-linux-s390x-gnu@4.17.2': + optional: true + + '@rollup/rollup-linux-x64-gnu@4.17.2': + optional: true + + '@rollup/rollup-linux-x64-musl@4.17.2': + optional: true + + '@rollup/rollup-win32-arm64-msvc@4.17.2': + optional: true + + '@rollup/rollup-win32-ia32-msvc@4.17.2': + optional: true + + '@rollup/rollup-win32-x64-msvc@4.17.2': + optional: true + + '@shikijs/core@1.4.0': {} + + '@shikijs/transformers@1.4.0': + dependencies: + shiki: 1.4.0 + + '@types/estree@1.0.5': {} + + '@types/linkify-it@5.0.0': {} + + '@types/markdown-it@14.1.1': + dependencies: + '@types/linkify-it': 5.0.0 + '@types/mdurl': 2.0.0 + + '@types/mdurl@2.0.0': {} + + '@types/node@17.0.45': {} + + '@types/node@20.12.10': + dependencies: + undici-types: 5.26.5 + + '@types/sax@1.2.7': + dependencies: + '@types/node': 20.12.10 + + '@types/web-bluetooth@0.0.20': {} + + '@vitejs/plugin-vue@5.0.4(vite@5.2.11(@types/node@20.12.10)(sass@1.77.0))(vue@3.4.27)': + dependencies: + vite: 5.2.11(@types/node@20.12.10)(sass@1.77.0) + vue: 3.4.27 + + '@vue/compiler-core@3.4.27': + dependencies: + '@babel/parser': 7.24.5 + '@vue/shared': 3.4.27 + entities: 4.5.0 + estree-walker: 2.0.2 + source-map-js: 1.2.0 + + '@vue/compiler-dom@3.4.27': + dependencies: + '@vue/compiler-core': 3.4.27 + '@vue/shared': 3.4.27 + + '@vue/compiler-sfc@3.4.27': + dependencies: + '@babel/parser': 7.24.5 + '@vue/compiler-core': 3.4.27 + '@vue/compiler-dom': 3.4.27 + '@vue/compiler-ssr': 3.4.27 + '@vue/shared': 3.4.27 + estree-walker: 2.0.2 + magic-string: 0.30.10 + postcss: 8.4.38 + source-map-js: 1.2.0 + + '@vue/compiler-ssr@3.4.27': + dependencies: + '@vue/compiler-dom': 3.4.27 + '@vue/shared': 3.4.27 + + '@vue/devtools-api@7.1.3(vue@3.4.27)': + dependencies: + '@vue/devtools-kit': 7.1.3(vue@3.4.27) + transitivePeerDependencies: + - vue + + '@vue/devtools-kit@7.1.3(vue@3.4.27)': + dependencies: + '@vue/devtools-shared': 7.1.3 + hookable: 5.5.3 + mitt: 3.0.1 + perfect-debounce: 1.0.0 + speakingurl: 14.0.1 + vue: 3.4.27 + + '@vue/devtools-shared@7.1.3': + dependencies: + rfdc: 1.3.1 + + '@vue/reactivity@3.4.27': + dependencies: + '@vue/shared': 3.4.27 + + '@vue/runtime-core@3.4.27': + dependencies: + '@vue/reactivity': 3.4.27 + '@vue/shared': 3.4.27 + + '@vue/runtime-dom@3.4.27': + dependencies: + '@vue/runtime-core': 3.4.27 + '@vue/shared': 3.4.27 + csstype: 3.1.3 + + '@vue/server-renderer@3.4.27(vue@3.4.27)': + dependencies: + '@vue/compiler-ssr': 3.4.27 + '@vue/shared': 3.4.27 + vue: 3.4.27 + + '@vue/shared@3.4.27': {} + + '@vueuse/core@10.9.0(vue@3.4.27)': + dependencies: + '@types/web-bluetooth': 0.0.20 + '@vueuse/metadata': 10.9.0 + '@vueuse/shared': 10.9.0(vue@3.4.27) + vue-demi: 0.14.7(vue@3.4.27) transitivePeerDependencies: - '@vue/composition-api' - - supports-color - - typescript - dev: true + - vue - /vuepress-shared@2.0.0-rc.4(vuepress@2.0.0-rc.0): - resolution: {integrity: sha512-YndYftQ9AUdWWESZHFZ7QjuUGXqgVayHzu3Qfar9GWr45NP2ZW7edKN4adU2/bOiokYG1Rfj47dgMUrRxEgqhg==} - engines: {node: '>=18.16.0', npm: '>=8', pnpm: '>=7', yarn: '>=2'} - peerDependencies: - vuepress: 2.0.0-rc.0 - vuepress-vite: 2.0.0-rc.0 - vuepress-webpack: 2.0.0-rc.0 - peerDependenciesMeta: - vuepress: - optional: true - vuepress-vite: - optional: true - vuepress-webpack: - optional: true + '@vueuse/integrations@10.9.0(focus-trap@7.5.4)(vue@3.4.27)': dependencies: - '@vuepress/client': 2.0.0-rc.0 - '@vuepress/shared': 2.0.0-rc.0 - '@vuepress/utils': 2.0.0-rc.0 - '@vueuse/core': 10.7.0(vue@3.3.11) - cheerio: 1.0.0-rc.12 - dayjs: 1.11.10 - execa: 8.0.1 - fflate: 0.8.1 - gray-matter: 4.0.3 - semver: 7.5.4 - striptags: 3.2.0 - vue: 3.3.11 - vue-router: 4.2.5(vue@3.3.11) - vuepress: 2.0.0-rc.0(@vuepress/client@2.0.0-rc.0)(vue@3.3.11) + '@vueuse/core': 10.9.0(vue@3.4.27) + '@vueuse/shared': 10.9.0(vue@3.4.27) + vue-demi: 0.14.7(vue@3.4.27) + optionalDependencies: + focus-trap: 7.5.4 transitivePeerDependencies: - '@vue/composition-api' - - supports-color - - typescript - dev: true + - vue - /vuepress-vite@2.0.0-rc.0(@vuepress/client@2.0.0-rc.0)(vue@3.3.11): - resolution: {integrity: sha512-+2XBejeiskPyr2raBeA2o4uDFDsjtadpUVmtio3qqFtQpOhidz/ORuiTLr2UfLtFn1ASIHP6Vy2YjQ0e/TeUVw==} - engines: {node: '>=18.16.0'} - hasBin: true - peerDependencies: - '@vuepress/client': 2.0.0-rc.0 - vue: ^3.3.4 - dependencies: - '@vuepress/bundler-vite': 2.0.0-rc.0 - '@vuepress/cli': 2.0.0-rc.0 - '@vuepress/client': 2.0.0-rc.0 - '@vuepress/core': 2.0.0-rc.0 - '@vuepress/theme-default': 2.0.0-rc.0 - vue: 3.3.11 + '@vueuse/metadata@10.9.0': {} + + '@vueuse/shared@10.9.0(vue@3.4.27)': + dependencies: + vue-demi: 0.14.7(vue@3.4.27) transitivePeerDependencies: - - '@types/node' - '@vue/composition-api' - - less - - lightningcss - - sass - - sass-loader - - stylus - - sugarss - - supports-color - - terser - - ts-node - - typescript - dev: true + - vue - /vuepress@2.0.0-rc.0(@vuepress/client@2.0.0-rc.0)(vue@3.3.11): - resolution: {integrity: sha512-sydt/B7+pIw926G5PntYmptLkC5o2buXKh+WR1+P2KnsvkXU+UGnQrJJ0FBvu/4RNuY99tkUZd59nyPhEmRrCg==} - engines: {node: '>=18.16.0'} - hasBin: true + algoliasearch@4.23.3: + dependencies: + '@algolia/cache-browser-local-storage': 4.23.3 + '@algolia/cache-common': 4.23.3 + '@algolia/cache-in-memory': 4.23.3 + '@algolia/client-account': 4.23.3 + '@algolia/client-analytics': 4.23.3 + '@algolia/client-common': 4.23.3 + '@algolia/client-personalization': 4.23.3 + '@algolia/client-search': 4.23.3 + '@algolia/logger-common': 4.23.3 + '@algolia/logger-console': 4.23.3 + '@algolia/recommend': 4.23.3 + '@algolia/requester-browser-xhr': 4.23.3 + '@algolia/requester-common': 4.23.3 + '@algolia/requester-node-http': 4.23.3 + '@algolia/transporter': 4.23.3 + + ansi-regex@5.0.1: {} + + ansi-regex@6.0.1: {} + + ansi-styles@4.3.0: + dependencies: + color-convert: 2.0.1 + + ansi-styles@6.2.1: {} + + anymatch@3.1.3: + dependencies: + normalize-path: 3.0.0 + picomatch: 2.3.1 + + arg@5.0.2: {} + + argparse@2.0.1: {} + + balanced-match@1.0.2: {} + + binary-extensions@2.3.0: {} + + brace-expansion@2.0.1: + dependencies: + balanced-match: 1.0.2 + + braces@3.0.2: + dependencies: + fill-range: 7.0.1 + + chokidar@3.6.0: + dependencies: + anymatch: 3.1.3 + braces: 3.0.2 + glob-parent: 5.1.2 + is-binary-path: 2.1.0 + is-glob: 4.0.3 + normalize-path: 3.0.0 + readdirp: 3.6.0 + optionalDependencies: + fsevents: 2.3.3 + + color-convert@2.0.1: + dependencies: + color-name: 1.1.4 + + color-name@1.1.4: {} + + commander@12.0.0: {} + + cross-spawn@7.0.3: + dependencies: + path-key: 3.1.1 + shebang-command: 2.0.0 + which: 2.0.2 + + csstype@3.1.3: {} + + deep-extend@0.6.0: {} + + eastasianwidth@0.2.0: {} + + emoji-regex@8.0.0: {} + + emoji-regex@9.2.2: {} + + entities@4.5.0: {} + + esbuild@0.20.2: + optionalDependencies: + '@esbuild/aix-ppc64': 0.20.2 + '@esbuild/android-arm': 0.20.2 + '@esbuild/android-arm64': 0.20.2 + '@esbuild/android-x64': 0.20.2 + '@esbuild/darwin-arm64': 0.20.2 + '@esbuild/darwin-x64': 0.20.2 + '@esbuild/freebsd-arm64': 0.20.2 + '@esbuild/freebsd-x64': 0.20.2 + '@esbuild/linux-arm': 0.20.2 + '@esbuild/linux-arm64': 0.20.2 + '@esbuild/linux-ia32': 0.20.2 + '@esbuild/linux-loong64': 0.20.2 + '@esbuild/linux-mips64el': 0.20.2 + '@esbuild/linux-ppc64': 0.20.2 + '@esbuild/linux-riscv64': 0.20.2 + '@esbuild/linux-s390x': 0.20.2 + '@esbuild/linux-x64': 0.20.2 + '@esbuild/netbsd-x64': 0.20.2 + '@esbuild/openbsd-x64': 0.20.2 + '@esbuild/sunos-x64': 0.20.2 + '@esbuild/win32-arm64': 0.20.2 + '@esbuild/win32-ia32': 0.20.2 + '@esbuild/win32-x64': 0.20.2 + + estree-walker@2.0.2: {} + + fast-glob@3.3.1: + dependencies: + '@nodelib/fs.stat': 2.0.5 + '@nodelib/fs.walk': 1.2.8 + glob-parent: 5.1.2 + merge2: 1.4.1 + micromatch: 4.0.5 + + fastq@1.17.1: + dependencies: + reusify: 1.0.4 + + fill-range@7.0.1: + dependencies: + to-regex-range: 5.0.1 + + focus-trap@7.5.4: + dependencies: + tabbable: 6.2.0 + + foreground-child@3.1.1: + dependencies: + cross-spawn: 7.0.3 + signal-exit: 4.1.0 + + fsevents@2.3.2: + optional: true + + fsevents@2.3.3: + optional: true + + get-stdin@9.0.0: {} + + glob-parent@5.1.2: + dependencies: + is-glob: 4.0.3 + + glob@10.3.15: + dependencies: + foreground-child: 3.1.1 + jackspeak: 2.3.6 + minimatch: 9.0.4 + minipass: 7.1.1 + path-scurry: 1.11.1 + + hookable@5.5.3: {} + + ignore@5.3.1: {} + + immutable@4.3.5: {} + + ini@4.1.2: {} + + is-binary-path@2.1.0: + dependencies: + binary-extensions: 2.3.0 + + is-extglob@2.1.1: {} + + is-fullwidth-code-point@3.0.0: {} + + is-glob@4.0.3: + dependencies: + is-extglob: 2.1.1 + + is-number@7.0.0: {} + + isexe@2.0.0: {} + + jackspeak@2.3.6: + dependencies: + '@isaacs/cliui': 8.0.2 + optionalDependencies: + '@pkgjs/parseargs': 0.11.0 + + js-yaml@4.1.0: + dependencies: + argparse: 2.0.1 + + jsonc-parser@3.2.1: {} + + jsonpointer@5.0.1: {} + + linkify-it@5.0.0: + dependencies: + uc.micro: 2.1.0 + + lru-cache@10.2.2: {} + + magic-string@0.30.10: dependencies: - vuepress-vite: 2.0.0-rc.0(@vuepress/client@2.0.0-rc.0)(vue@3.3.11) + '@jridgewell/sourcemap-codec': 1.4.15 + + mark.js@8.11.1: {} + + markdown-it@14.1.0: + dependencies: + argparse: 2.0.1 + entities: 4.5.0 + linkify-it: 5.0.0 + mdurl: 2.0.0 + punycode.js: 2.3.1 + uc.micro: 2.1.0 + + markdownlint-cli@0.40.0: + dependencies: + commander: 12.0.0 + get-stdin: 9.0.0 + glob: 10.3.15 + ignore: 5.3.1 + js-yaml: 4.1.0 + jsonc-parser: 3.2.1 + jsonpointer: 5.0.1 + markdownlint: 0.34.0 + minimatch: 9.0.4 + run-con: 1.3.2 + toml: 3.0.0 + + markdownlint-micromark@0.1.9: {} + + markdownlint@0.34.0: + dependencies: + markdown-it: 14.1.0 + markdownlint-micromark: 0.1.9 + + mdurl@2.0.0: {} + + merge2@1.4.1: {} + + micromatch@4.0.5: + dependencies: + braces: 3.0.2 + picomatch: 2.3.1 + + minimatch@9.0.4: + dependencies: + brace-expansion: 2.0.1 + + minimist@1.2.8: {} + + minipass@7.1.1: {} + + minisearch@6.3.0: {} + + mitt@3.0.1: {} + + nanoid@3.3.7: {} + + normalize-path@3.0.0: {} + + path-key@3.1.1: {} + + path-scurry@1.11.1: + dependencies: + lru-cache: 10.2.2 + minipass: 7.1.1 + + perfect-debounce@1.0.0: {} + + picocolors@1.0.0: {} + + picomatch@2.3.1: {} + + playwright-core@1.44.0: {} + + playwright@1.44.0: + dependencies: + playwright-core: 1.44.0 + optionalDependencies: + fsevents: 2.3.2 + + postcss@8.4.38: + dependencies: + nanoid: 3.3.7 + picocolors: 1.0.0 + source-map-js: 1.2.0 + + preact@10.21.0: {} + + punycode.js@2.3.1: {} + + queue-microtask@1.2.3: {} + + readdirp@3.6.0: + dependencies: + picomatch: 2.3.1 + + reusify@1.0.4: {} + + rfdc@1.3.1: {} + + rollup@4.17.2: + dependencies: + '@types/estree': 1.0.5 + optionalDependencies: + '@rollup/rollup-android-arm-eabi': 4.17.2 + '@rollup/rollup-android-arm64': 4.17.2 + '@rollup/rollup-darwin-arm64': 4.17.2 + '@rollup/rollup-darwin-x64': 4.17.2 + '@rollup/rollup-linux-arm-gnueabihf': 4.17.2 + '@rollup/rollup-linux-arm-musleabihf': 4.17.2 + '@rollup/rollup-linux-arm64-gnu': 4.17.2 + '@rollup/rollup-linux-arm64-musl': 4.17.2 + '@rollup/rollup-linux-powerpc64le-gnu': 4.17.2 + '@rollup/rollup-linux-riscv64-gnu': 4.17.2 + '@rollup/rollup-linux-s390x-gnu': 4.17.2 + '@rollup/rollup-linux-x64-gnu': 4.17.2 + '@rollup/rollup-linux-x64-musl': 4.17.2 + '@rollup/rollup-win32-arm64-msvc': 4.17.2 + '@rollup/rollup-win32-ia32-msvc': 4.17.2 + '@rollup/rollup-win32-x64-msvc': 4.17.2 + fsevents: 2.3.3 + + run-con@1.3.2: + dependencies: + deep-extend: 0.6.0 + ini: 4.1.2 + minimist: 1.2.8 + strip-json-comments: 3.1.1 + + run-parallel@1.2.0: + dependencies: + queue-microtask: 1.2.3 + + sass@1.77.0: + dependencies: + chokidar: 3.6.0 + immutable: 4.3.5 + source-map-js: 1.2.0 + + sax@1.3.0: {} + + search-insights@2.13.0: {} + + shebang-command@2.0.0: + dependencies: + shebang-regex: 3.0.0 + + shebang-regex@3.0.0: {} + + shiki@1.4.0: + dependencies: + '@shikijs/core': 1.4.0 + + signal-exit@4.1.0: {} + + sitemap-ts@1.6.1: + dependencies: + '@antfu/utils': 0.7.6 + fast-glob: 3.3.1 + sitemap: 7.1.1 + xml-formatter: 3.5.0 + + sitemap@7.1.1: + dependencies: + '@types/node': 17.0.45 + '@types/sax': 1.2.7 + arg: 5.0.2 + sax: 1.3.0 + + source-map-js@1.2.0: {} + + speakingurl@14.0.1: {} + + string-width@4.2.3: + dependencies: + emoji-regex: 8.0.0 + is-fullwidth-code-point: 3.0.0 + strip-ansi: 6.0.1 + + string-width@5.1.2: + dependencies: + eastasianwidth: 0.2.0 + emoji-regex: 9.2.2 + strip-ansi: 7.1.0 + + strip-ansi@6.0.1: + dependencies: + ansi-regex: 5.0.1 + + strip-ansi@7.1.0: + dependencies: + ansi-regex: 6.0.1 + + strip-json-comments@3.1.1: {} + + tabbable@6.2.0: {} + + to-fast-properties@2.0.0: {} + + to-regex-range@5.0.1: + dependencies: + is-number: 7.0.0 + + toml@3.0.0: {} + + uc.micro@2.1.0: {} + + undici-types@5.26.5: {} + + vite@5.2.11(@types/node@20.12.10)(sass@1.77.0): + dependencies: + esbuild: 0.20.2 + postcss: 8.4.38 + rollup: 4.17.2 + optionalDependencies: + '@types/node': 20.12.10 + fsevents: 2.3.3 + sass: 1.77.0 + + vitepress@1.1.4(@algolia/client-search@4.23.3)(@types/node@20.12.10)(postcss@8.4.38)(sass@1.77.0)(search-insights@2.13.0): + dependencies: + '@docsearch/css': 3.6.0 + '@docsearch/js': 3.6.0(@algolia/client-search@4.23.3)(search-insights@2.13.0) + '@shikijs/core': 1.4.0 + '@shikijs/transformers': 1.4.0 + '@types/markdown-it': 14.1.1 + '@vitejs/plugin-vue': 5.0.4(vite@5.2.11(@types/node@20.12.10)(sass@1.77.0))(vue@3.4.27) + '@vue/devtools-api': 7.1.3(vue@3.4.27) + '@vueuse/core': 10.9.0(vue@3.4.27) + '@vueuse/integrations': 10.9.0(focus-trap@7.5.4)(vue@3.4.27) + focus-trap: 7.5.4 + mark.js: 8.11.1 + minisearch: 6.3.0 + shiki: 1.4.0 + vite: 5.2.11(@types/node@20.12.10)(sass@1.77.0) + vue: 3.4.27 + optionalDependencies: + postcss: 8.4.38 transitivePeerDependencies: + - '@algolia/client-search' - '@types/node' + - '@types/react' - '@vue/composition-api' - - '@vuepress/client' + - async-validator + - axios + - change-case + - drauu + - fuse.js + - idb-keyval + - jwt-decode - less - lightningcss + - nprogress + - qrcode + - react + - react-dom - sass - - sass-loader + - search-insights + - sortablejs - stylus - sugarss - - supports-color - terser - - ts-node - typescript - - vue - dev: true + - universal-cookie - /which@2.0.2: - resolution: {integrity: sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==} - engines: {node: '>= 8'} - hasBin: true + vue-demi@0.14.7(vue@3.4.27): + dependencies: + vue: 3.4.27 + + vue@3.4.27: + dependencies: + '@vue/compiler-dom': 3.4.27 + '@vue/compiler-sfc': 3.4.27 + '@vue/runtime-dom': 3.4.27 + '@vue/server-renderer': 3.4.27(vue@3.4.27) + '@vue/shared': 3.4.27 + + which@2.0.2: dependencies: isexe: 2.0.0 - dev: true - /yallist@4.0.0: - resolution: {integrity: sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==} - dev: true + wrap-ansi@7.0.0: + dependencies: + ansi-styles: 4.3.0 + string-width: 4.2.3 + strip-ansi: 6.0.1 - /yaml@2.3.4: - resolution: {integrity: sha512-8aAvwVUSHpfEqTQ4w/KMlf3HcRdt50E5ODIQJBw1fQ5RL34xabzxtUlzTXVqc4rkZsPbvrXKWnABCD7kWSmocA==} - engines: {node: '>= 14'} - dev: true + wrap-ansi@8.1.0: + dependencies: + ansi-styles: 6.2.1 + string-width: 5.1.2 + strip-ansi: 7.1.0 + + xml-formatter@3.5.0: + dependencies: + xml-parser-xo: 4.1.1 + + xml-parser-xo@4.1.1: {} diff --git a/runatlantis.io/.vitepress/components/Banner.vue b/runatlantis.io/.vitepress/components/Banner.vue new file mode 100644 index 0000000000..84e212d50e --- /dev/null +++ b/runatlantis.io/.vitepress/components/Banner.vue @@ -0,0 +1,96 @@ + + + + + + + diff --git a/runatlantis.io/.vitepress/components/shims.d.ts b/runatlantis.io/.vitepress/components/shims.d.ts new file mode 100644 index 0000000000..d1f3133128 --- /dev/null +++ b/runatlantis.io/.vitepress/components/shims.d.ts @@ -0,0 +1,5 @@ +declare module '*.vue' { + import type { DefineComponent } from 'vue'; + const component: DefineComponent; + export default component; +} diff --git a/runatlantis.io/.vitepress/config.ts b/runatlantis.io/.vitepress/config.ts new file mode 100644 index 0000000000..9e3e7f7669 --- /dev/null +++ b/runatlantis.io/.vitepress/config.ts @@ -0,0 +1,124 @@ +import { generateSitemap as sitemap } from "sitemap-ts" +import { defineConfig } from 'vitepress'; +import * as navbars from "./navbars"; +import * as sidebars from "./sidebars"; + +// https://vitepress.dev/reference/site-config +export default defineConfig({ + title: 'Atlantis', + description: 'Atlantis: Terraform Pull Request Automation', + lang: 'en-US', + lastUpdated: true, + locales: { + root: { + label: 'English', + lang: 'en-US', + themeConfig: { + nav: navbars.en, + sidebar: sidebars.en, + }, + }, + }, + themeConfig: { + // https://vitepress.dev/reference/default-theme-config + editLink: { + pattern: 'https://github.com/runatlantis/atlantis/edit/main/runatlantis.io/:path' + }, + // headline "depth" the right nav will show for its TOC + // + // https://vitepress.dev/reference/frontmatter-config#outline + outline: [2, 3], + search: { + provider: 'algolia', + options: { + // We internally discussed how this API key is exposed in the code and decided + // that it is a non-issue because this API key can easily be extracted by + // looking at the browser dev tools since the key is used in the API requests. + apiKey: '3b733dff1539ca3a210775860301fa86', + indexName: 'runatlantis', + appId: 'BH4D9OD16A', + locales: { + '/': { + placeholder: 'Search Documentation', + translations: { + button: { + buttonText: 'Search Documentation', + }, + }, + }, + }, + } + }, + socialLinks: [ + { icon: "slack", link: "https://join.slack.com/t/atlantis-community/shared_invite/zt-9xlxtxtc-CUSKB1ATt_sQy6um~LDPNw" }, + { icon: "twitter", link: "https://twitter.com/runatlantis" }, + { icon: "github", link: "https://github.com/runatlantis/atlantis" }, + ], + }, + // SEO Improvement - sitemap.xml & robots.txt + buildEnd: async ({ outDir }) => { + sitemap({ + hostname: "https://www.runatlantis.io/", + outDir: outDir, + generateRobotsTxt: true, + }) + }, + head: [ + ['link', { rel: 'icon', type: 'image/png', href: '/favicon-196x196.png', sizes: '196x196' }], + ['link', { rel: 'icon', type: 'image/png', href: '/favicon-96x96.png', sizes: '96x96' }], + ['link', { rel: 'icon', type: 'image/png', href: '/favicon-32x32.png', sizes: '32x32' }], + ['link', { rel: 'icon', type: 'image/png', href: '/favicon-16x16.png', sizes: '16x16' }], + ['link', { rel: 'icon', type: 'image/png', href: '/favicon-128.png', sizes: '128x128' }], + ['link', { rel: 'apple-touch-icon-precomposed', sizes: '57x57', href: '/apple-touch-icon-57x57.png' }], + ['link', { rel: 'apple-touch-icon-precomposed', sizes: '114x114', href: '/apple-touch-icon-114x114.png' }], + ['link', { rel: 'apple-touch-icon-precomposed', sizes: '72x72', href: '/apple-touch-icon-72x72.png' }], + ['link', { rel: 'apple-touch-icon-precomposed', sizes: '144x144', href: '/apple-touch-icon-144x144.png' }], + ['link', { rel: 'apple-touch-icon-precomposed', sizes: '60x60', href: '/apple-touch-icon-60x60.png' }], + ['link', { rel: 'apple-touch-icon-precomposed', sizes: '120x120', href: '/apple-touch-icon-120x120.png' }], + ['link', { rel: 'apple-touch-icon-precomposed', sizes: '76x76', href: '/apple-touch-icon-76x76.png' }], + ['link', { rel: 'apple-touch-icon-precomposed', sizes: '152x152', href: '/apple-touch-icon-152x152.png' }], + ['meta', { name: 'msapplication-TileColor', content: '#FFFFFF' }], + ['meta', { name: 'msapplication-TileImage', content: '/mstile-144x144.png' }], + ['meta', { name: 'msapplication-square70x70logo', content: '/mstile-70x70.png' }], + ['meta', { name: 'msapplication-square150x150logo', content: '/mstile-150x150.png' }], + ['meta', { name: 'msapplication-wide310x150logo', content: '/mstile-310x150.png' }], + ['meta', { name: 'msapplication-square310x310logo', content: '/mstile-310x310.png' }], + ['link', { rel: 'stylesheet', sizes: '152x152', href: 'https://fonts.googleapis.com/css?family=Lato:400,900' }], + ['meta', { name: 'google-site-verification', content: 'kTnsDBpHqtTNY8oscYxrQeeiNml2d2z-03Ct9wqeCeE' }], + // google analytics + [ + 'script', + { async: '', src: 'https://www.googletagmanager.com/gtag/js?id=UA-6850151-3' } + ], + [ + 'script', + {}, + `window.dataLayer = window.dataLayer || []; + function gtag(){dataLayer.push(arguments);} + gtag('js', new Date()); + + gtag('config', 'UA-6850151-3');` + ], + [ + 'script', + { id: 'restore-banner-preference' }, + ` + (() => { + const restore = (key, cls, def = false) => { + const saved = localStorage.getItem(key); + if (saved ? saved !== 'false' && new Date() < saved : def) { + document.documentElement.classList.add(cls); + } + }; + restore('survey-banner', 'banner-dismissed'); + })();`, + ] + ], + vite: { + server: { + fs: { + cachedChecks: false, + }, + } + } +}) diff --git a/runatlantis.io/.vitepress/navbars.ts b/runatlantis.io/.vitepress/navbars.ts new file mode 100644 index 0000000000..84e6cf6034 --- /dev/null +++ b/runatlantis.io/.vitepress/navbars.ts @@ -0,0 +1,9 @@ +const en = [ + { text: "Home", link: "/"}, + { text: "Guide", link: "/guide" }, + { text: "Docs", link: "/docs" }, + { text: "Contributing", link: "/contributing" }, + { text: "Blog", link: "https://medium.com/runatlantis" }, +]; + +export { en }; diff --git a/runatlantis.io/.vitepress/sidebars.ts b/runatlantis.io/.vitepress/sidebars.ts new file mode 100644 index 0000000000..1afacb11c2 --- /dev/null +++ b/runatlantis.io/.vitepress/sidebars.ts @@ -0,0 +1,100 @@ +const en = [ + { + text: "Guide", + link: "/guide", + collapsed: false, + items: [ + { text: "Test Drive", link: "/guide/test-drive" }, + { text: "Testing locally", link: "/guide/testing-locally" }, + ], + }, + { + text: "Docs", + link: "/docs", + collapsed: true, + items: [ + { + text: "Installing Atlantis", + collapsed: true, + items: [ + { text: "Installing Guide", link: "/docs/installation-guide" }, + { text: "Requirements", link: "/docs/requirements" }, + { text: "Git Host Access Credentials", link: "/docs/access-credentials" }, + { text: "Webhook Secrets", link: "/docs/webhook-secrets" }, + { text: "Deployment", link: "/docs/deployment" }, + { text: "Configuring Webhooks", link: "/docs/configuring-webhooks" }, + { text: "Provider Credentials", link: "/docs/provider-credentials" }, + ] + }, + { + text: "Configuring Atlantis", + collapsed: true, + items: [ + { text: "Overview", link: "/docs/configuring-atlantis" }, + { text: "Server Configuration", link: "/docs/server-configuration" }, + { text: "Server Side Repo Config", link: "/docs/server-side-repo-config" }, + { text: "Pre Workflow Hooks", link: "/docs/pre-workflow-hooks" }, + { text: "Post Workflow Hooks", link: "/docs/post-workflow-hooks" }, + { text: "Conftest Policy Checking", link: "/docs/policy-checking" }, + { text: "Custom Workflows", link: "/docs/custom-workflows" }, + { text: "Repo Level atlantis.yaml", link: "/docs/repo-level-atlantis-yaml" }, + { text: "Upgrading atlantis.yaml", link: "/docs/upgrading-atlantis-yaml" }, + { text: "Command Requirements", link: "/docs/command-requirements" }, + { text: "Checkout Strategy", link: "/docs/checkout-strategy" }, + { text: "Terraform Versions", link: "/docs/terraform-versions" }, + { text: "Terraform Cloud", link: "/docs/terraform-cloud" }, + { text: "Using Slack Hooks", link: "/docs/using-slack-hooks" }, + { text: "Stats", link: "/docs/stats" }, + { text: "FAQ", link: "/docs/faq" }, + ] + }, + { + text: "Using Atlantis", + collapsed: true, + items: [ + { text: "Overview", link: "/docs/using-atlantis" }, + { text: "API endpoints", link: "/docs/api-endpoints" }, + ] + }, + { + text: 'How Atlantis Works', + collapsed: true, + items: [ + { text: 'Overview', link: '/docs/how-atlantis-works', }, + { text: 'Locking', link: '/docs/locking', }, + { text: 'Autoplanning', link: '/docs/autoplanning', }, + { text: 'Automerging', link: '/docs/automerging', }, + { text: 'Security', link: '/docs/security', }, + ] + }, + { + text: 'Real-time Terraform Logs', + link: '/docs/streaming-logs', + }, + { + text: 'Troubleshooting', + collapsed: true, + items: [ + { text: 'HTTPS, SSL, TLS', 'link': '/docs/troubleshooting-https', }, + ] + }, + ], + }, + { + text: "Contributing", + link: "/contributing", + collapsed: false, + items: [ + { + text: 'Implementation Details', + items: [ + { text: "Events Controller", link: "/contributing/events-controller" }, + ] + }, + { text: "Glossary", link: "/contributing/glossary" }, + ] + + } +] + +export { en } diff --git a/runatlantis.io/.vitepress/theme/index.ts b/runatlantis.io/.vitepress/theme/index.ts new file mode 100644 index 0000000000..395964ae4b --- /dev/null +++ b/runatlantis.io/.vitepress/theme/index.ts @@ -0,0 +1,11 @@ +import DefaultTheme from "vitepress/theme"; +import { defineAsyncComponent, h } from 'vue'; + +export default { + ...DefaultTheme, + Layout() { + return h(DefaultTheme.Layout, null, { + 'layout-top': () => h(defineAsyncComponent(() => import('../components/Banner.vue'))) + }); + } +}; diff --git a/runatlantis.io/.vuepress/config.js b/runatlantis.io/.vuepress/config.js deleted file mode 100644 index 9658fc29a5..0000000000 --- a/runatlantis.io/.vuepress/config.js +++ /dev/null @@ -1,194 +0,0 @@ -import { googleAnalyticsPlugin } from '@vuepress/plugin-google-analytics' -import { docsearchPlugin } from '@vuepress/plugin-docsearch' -import { getDirname, path } from '@vuepress/utils' -import { defaultTheme, defineUserConfig } from 'vuepress' -import { sitemapPlugin } from 'vuepress-plugin-sitemap2'; - -const __dirname = getDirname(import.meta.url) - -export default defineUserConfig({ - alias: { - '@theme/Home.vue': path.resolve(__dirname, './theme/components/Home.vue'), - }, - locales: { - '/': { - lang: 'en-US', - title: 'Atlantis', - description: 'Atlantis: Terraform Pull Request Automation', - }, -/* - '/es/': { - lang: 'es-ES', - title: 'Atlantis', - description: 'Atlantis: AutomatizaciÃŗn de Pull Requests para Terraform', - }, -*/ - }, - plugins: [ - googleAnalyticsPlugin({ - id: 'UA-6850151-3', - }), - sitemapPlugin({ - hostname: 'https://runatlantis.io', - }), - docsearchPlugin({ - // We internally discussed how this API key is exposed in the code and decided - // that it is a non-issue because this API key can easily be extracted by - // looking at the browser dev tools since the key is used in the API requests. - apiKey: '3b733dff1539ca3a210775860301fa86', - indexName: 'runatlantis', - appId: 'BH4D9OD16A', - locales: { - '/': { - placeholder: 'Search Documentation', - translations: { - button: { - buttonText: 'Search Documentation', - }, - }, - }, - }, - }), - ], - head: [ - ['link', { rel: 'icon', type: 'image/png', href: '/favicon-196x196.png', sizes: '196x196' }], - ['link', { rel: 'icon', type: 'image/png', href: '/favicon-96x96.png', sizes: '96x96' }], - ['link', { rel: 'icon', type: 'image/png', href: '/favicon-32x32.png', sizes: '32x32' }], - ['link', { rel: 'icon', type: 'image/png', href: '/favicon-16x16.png', sizes: '16x16' }], - ['link', { rel: 'icon', type: 'image/png', href: '/favicon-128.png', sizes: '128x128' }], - ['link', { rel: 'apple-touch-icon-precomposed', sizes: '57x57', href: '/apple-touch-icon-57x57.png' }], - ['link', { rel: 'apple-touch-icon-precomposed', sizes: '114x114', href: '/apple-touch-icon-114x114.png' }], - ['link', { rel: 'apple-touch-icon-precomposed', sizes: '72x72', href: '/apple-touch-icon-72x72.png' }], - ['link', { rel: 'apple-touch-icon-precomposed', sizes: '144x144', href: '/apple-touch-icon-144x144.png' }], - ['link', { rel: 'apple-touch-icon-precomposed', sizes: '60x60', href: '/apple-touch-icon-60x60.png' }], - ['link', { rel: 'apple-touch-icon-precomposed', sizes: '120x120', href: '/apple-touch-icon-120x120.png' }], - ['link', { rel: 'apple-touch-icon-precomposed', sizes: '76x76', href: '/apple-touch-icon-76x76.png' }], - ['link', { rel: 'apple-touch-icon-precomposed', sizes: '152x152', href: '/apple-touch-icon-152x152.png' }], - ['meta', { name: 'msapplication-TileColor', content: '#FFFFFF' }], - ['meta', { name: 'msapplication-TileImage', content: '/mstile-144x144.png' }], - ['meta', { name: 'msapplication-square70x70logo', content: '/mstile-70x70.png' }], - ['meta', { name: 'msapplication-square150x150logo', content: '/mstile-150x150.png' }], - ['meta', { name: 'msapplication-wide310x150logo', content: '/mstile-310x150.png' }], - ['meta', { name: 'msapplication-square310x310logo', content: '/mstile-310x310.png' }], - ['link', { rel: 'stylesheet', sizes: '152x152', href: 'https://fonts.googleapis.com/css?family=Lato:400,900' }], - ['meta', { name: 'google-site-verification', content: 'kTnsDBpHqtTNY8oscYxrQeeiNml2d2z-03Ct9wqeCeE' }], - ], - themePlugins: { - activeHeaderLinks: false, - }, - theme: defaultTheme({ - docsBranch: "main", - logo: '/hero.png', - locales: { - '/': { - selectLanguageName: 'English', - navbar: [ - { text: 'Home', link: '/' }, - { text: 'Guide', link: '/guide/' }, - { text: 'Docs', link: '/docs/' }, - { text: 'Blog', link: 'https://medium.com/runatlantis' }, - ], - }, -/* - '/es/': { - selectLanguageName: 'Spanish', - navbar: [ - { text: 'Home', link: '/es/' }, - { text: 'Guide', link: '/es/guide/' }, - { text: 'Docs', link: '/es/docs/' }, - { text: 'Blog', link: 'https://medium.com/runatlantis' }, - ], - }, -*/ - }, - sidebar: { - '/guide/': [ - '', - 'test-drive', - 'testing-locally', - ], - '/docs/': [ - { - text: 'Installing Atlantis', - collapsible: true, - children: [ - 'installation-guide', - 'requirements', - 'access-credentials', - 'webhook-secrets', - 'deployment', - 'configuring-webhooks', - 'provider-credentials', - ] - }, - { - text: 'Configuring Atlantis', - collapsible: true, - children: [ - { - text: 'Overview', - link: 'configuring-atlantis', - }, - 'server-configuration', - 'server-side-repo-config', - 'pre-workflow-hooks', - 'post-workflow-hooks', - 'policy-checking', - 'custom-workflows', - 'repo-level-atlantis-yaml', - 'upgrading-atlantis-yaml', - 'command-requirements', - 'checkout-strategy', - 'terraform-versions', - 'terraform-cloud', - 'using-slack-hooks', - 'stats', - 'faq', - ] - }, - { - text: 'Using Atlantis', - collapsible: true, - children: [ - { - text: 'Overview', - link: 'using-atlantis', - }, - 'api-endpoints', - ] - }, - { - text: 'How Atlantis Works', - collapsible: true, - children: [ - { - text: 'Overview', - link: 'how-atlantis-works', - }, - 'locking', - 'autoplanning', - 'automerging', - 'security', - ] - }, - { - text: 'Real-time Terraform Logs', - collapsible: true, - children: [ - 'streaming-logs', - ] - }, - { - text: 'Troubleshooting', - collapsible: true, - children: [ - 'troubleshooting-https', - ] - } - ] - }, - repo: 'runatlantis/atlantis', - docsDir: 'runatlantis.io', - editLink: true, - }) -}) diff --git a/runatlantis.io/.vuepress/public/_redirects b/runatlantis.io/.vuepress/public/_redirects deleted file mode 100644 index a025dc528b..0000000000 --- a/runatlantis.io/.vuepress/public/_redirects +++ /dev/null @@ -1,2 +0,0 @@ -/guide/getting-started.html /guide/ -/docs/atlantis-yaml-reference.html /docs/repo-level-atlantis-yaml.html diff --git a/runatlantis.io/.vuepress/public/certificate.svg b/runatlantis.io/.vuepress/public/certificate.svg deleted file mode 100644 index 17df5278b1..0000000000 --- a/runatlantis.io/.vuepress/public/certificate.svg +++ /dev/null @@ -1,59 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/runatlantis.io/.vuepress/public/checkmark.svg b/runatlantis.io/.vuepress/public/checkmark.svg deleted file mode 100644 index ccdc2f7404..0000000000 --- a/runatlantis.io/.vuepress/public/checkmark.svg +++ /dev/null @@ -1,12 +0,0 @@ - - - - - - - - - diff --git a/runatlantis.io/.vuepress/public/coding.svg b/runatlantis.io/.vuepress/public/coding.svg deleted file mode 100644 index 1f67eec776..0000000000 --- a/runatlantis.io/.vuepress/public/coding.svg +++ /dev/null @@ -1,42 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/runatlantis.io/.vuepress/public/list.svg b/runatlantis.io/.vuepress/public/list.svg deleted file mode 100644 index 8c5735e658..0000000000 --- a/runatlantis.io/.vuepress/public/list.svg +++ /dev/null @@ -1,49 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/runatlantis.io/.vuepress/public/mobile-workflow-min.png b/runatlantis.io/.vuepress/public/mobile-workflow-min.png deleted file mode 100644 index b8eea33cc2..0000000000 Binary files a/runatlantis.io/.vuepress/public/mobile-workflow-min.png and /dev/null differ diff --git a/runatlantis.io/.vuepress/public/powerful.svg b/runatlantis.io/.vuepress/public/powerful.svg deleted file mode 100644 index e179434b71..0000000000 --- a/runatlantis.io/.vuepress/public/powerful.svg +++ /dev/null @@ -1,18 +0,0 @@ - - - - - - - - - - - - - - - - - diff --git a/runatlantis.io/.vuepress/public/workflow-min.png b/runatlantis.io/.vuepress/public/workflow-min.png deleted file mode 100644 index 1c9e383c70..0000000000 Binary files a/runatlantis.io/.vuepress/public/workflow-min.png and /dev/null differ diff --git a/runatlantis.io/.vuepress/styles/index.scss b/runatlantis.io/.vuepress/styles/index.scss deleted file mode 100644 index 6daf19f232..0000000000 --- a/runatlantis.io/.vuepress/styles/index.scss +++ /dev/null @@ -1,243 +0,0 @@ -// https://v2.vuepress.vuejs.org/reference/default-theme/styles.html#style-file - -// colors -$textColor: var(--c-text); -$borderColor: var(--c-border); -$buttonTextColor: var(--c-badge-danger-text); -$buttonColor: var(--c-brand); -$buttonHoverColor: var(--c-brand-light); -$darkBackground: var(--c-bg-light); -$darkBackgroundBorder: var(--c-border-dark); - -// layout -$navbarHeight: 3.6rem; -$sidebarWidth: 20rem; -$contentWidth: 740px; - -// responsive breakpoints -$MQNarrow: 959px; -$MQMobile: 719px; -$MQMobileNarrow: 419px; - -$homeWidth: 960px; - -.container { - padding-top: 3.6rem; -} - -.home { - padding: 0 2rem; - max-width: $homeWidth; - margin: 0px auto 80px; - .hero { - text-align: center; - img { - max-height: 280px; - display: block; - margin: 3rem auto 1.5rem; - } - h1 { - font-size: 3rem; - } - h1, .description, .action { - margin: 1.8rem auto; - } - .description { - max-width: 35rem; - font-family: Lato, sans-serif; - font-size: 1.9rem; - line-height: 1.3; - } - .action { - display: inline; - } - .action-button { - display: inline-block; - font-size: 1.2rem; - color: $buttonTextColor; - cursor: pointer; - background-color: $buttonColor; - padding: 0.8rem 1.6rem; - border-radius: 4px; - transition: background-color .1s ease; - box-sizing: border-box; - margin: 0 10px; - &:hover { - background-color: $buttonHoverColor; - } - } - } - h2 { - border-bottom: none; - } - .features { - border-top: 1px solid $borderColor; - padding: 1.2rem 0; - margin-top: 0; - } - .footer { - padding: 2.5rem; - border-top: 1px solid $borderColor; - text-align: center; - } -} - -.getting-started-footer { - padding: 2.5rem 0; - margin: 0 auto; -} - -.workflow-container { - border-top: 2px solid $borderColor; -} - -.workflow { - text-align: center; - margin: 80px auto; - max-width: $homeWidth; - img { - width: 100%; - } - .mobile { - display: none; - } -} - -.benefits-container { - border-top: 1px solid $darkBackgroundBorder; - .benefit-container { - border-bottom: 1px solid $darkBackgroundBorder; - .title { - padding-top: 40px; - text-align: center; - } - &.-dark { - background-color: $darkBackground; - } - .benefit { - max-width: $homeWidth; - margin: 0 auto; - display: flex; - flex-flow: row wrap; - align-items: center; - .item { - flex-basis: 50%; - flex-grow: 1; - min-width: 250px; - .image { - padding: 40px; - text-align: center; - img { - max-height: 200px; - } - } - } - .description { - padding: 40px; - h2 { - border: none; - } - ul { - list-style-type: none; - padding-left: 0; - } - li { - display: flex; - align-items: center; - line-height: 25px; - margin-bottom: 20px; - } - .checkmark { - width: 20px; - margin-right: 10px; - vertical-align: middle; - align-self: baseline; - padding-top: 5px; - } - } - } - } -} - -@media (max-width: $MQMobile) { - .workflow { - .mobile { - display: block; - } - .desktop { - display: none; - } - } - - .benefits-container { - .benefit-container { - .benefit { - flex-direction: column; - .item { - &.image { - order: -1; - } - } - } - } - } -} - -@media (max-width: $MQMobileNarrow) { - .home { - padding-left: 1.5rem; - padding-right: 1.5rem; - .hero { - img { - max-height: 210px; - margin: 2rem auto 1.2rem; - } - h1 { - font-size: 2rem; - } - h1, .description, .action { - margin: 1.2rem auto; - } - .description { - font-size: 1.2rem; - } - .action-button { - font-size: 1rem; - padding: 0.6rem 1.2rem; - } - } - } -} - -.theme-container { - &.home-custom { - .hero { - h1 { - font-size: 64px; - font-family: Lato, sans-serif; - font-weight: 900; - } - img { - height: 200px; - } - } - p { - &.description { - position: relative; - &:before { - position: absolute; - content: ''; - width: 40px; - height: 3px; - top: -19px; - left: 50%; - margin-left: -20px; - background: #f36; - } - } - } - } -} -.sidebar-heading { - font-size: inherit; -} diff --git a/runatlantis.io/.vuepress/styles/palette.scss b/runatlantis.io/.vuepress/styles/palette.scss deleted file mode 100644 index 7f406d4555..0000000000 --- a/runatlantis.io/.vuepress/styles/palette.scss +++ /dev/null @@ -1,4 +0,0 @@ -$accentColor: #0074db; -$textColor: #2c3e50; -$borderColor: #eaecef; -$codeBgColor: #282c34; diff --git a/runatlantis.io/.vuepress/theme/components/Home.vue b/runatlantis.io/.vuepress/theme/components/Home.vue deleted file mode 100644 index 271a574c85..0000000000 --- a/runatlantis.io/.vuepress/theme/components/Home.vue +++ /dev/null @@ -1,175 +0,0 @@ - - - diff --git a/runatlantis.io/.vuepress/theme/index.js b/runatlantis.io/.vuepress/theme/index.js deleted file mode 100644 index 85ad504429..0000000000 --- a/runatlantis.io/.vuepress/theme/index.js +++ /dev/null @@ -1,6 +0,0 @@ -// introduce custom home with navbar -// https://stackoverflow.com/a/60220684 -// https://vuepress.vuejs.org/theme/inheritance.html#usage -module.exports = { - extend: '@vuepress/theme-default' -} diff --git a/runatlantis.io/README.md b/runatlantis.io/README.md deleted file mode 100644 index 5772c9faf7..0000000000 --- a/runatlantis.io/README.md +++ /dev/null @@ -1,9 +0,0 @@ ---- -home: true -pageClass: home-custom -heroImage: /hero.png -heroText: Atlantis -actionText: Get Started → -actionLink: /guide/ -title: Terraform Pull Request Automation ---- diff --git a/runatlantis.io/contributing.md b/runatlantis.io/contributing.md new file mode 100644 index 0000000000..3d8e24de16 --- /dev/null +++ b/runatlantis.io/contributing.md @@ -0,0 +1,17 @@ +--- +aside: false +--- +# Atlantis Contributing Documentation + +These docs are for users who want to contribute to the Atlantis project. This +can vary from writing documentation, helping the community on Slack, discussing +issues, or writing code. + +:::tip Looking to get started or use Atlantis? +If you're new, check out the [Guide](./guide.md) or the +[Documentation](./docs.md). +::: + +## Next Steps + +- [Events Controller](./contributing/events-controller.md)  â€“  How do the events work? diff --git a/runatlantis.io/contributing/events-controller.md b/runatlantis.io/contributing/events-controller.md new file mode 100644 index 0000000000..8e2fe6a19e --- /dev/null +++ b/runatlantis.io/contributing/events-controller.md @@ -0,0 +1,72 @@ +# Events Controller + +Webhooks are the primary interaction between the Version Control System (VCS) +and Atlantis. Each VCS sends the requests to the `/events` endpoint. The +implementation of this endpoint can be found in the +[events_controller.go](https://github.com/runatlantis/atlantis/blob/main/server/controllers/events/events_controller.go) +file. This file contains the Post function `func (e *VCSEventsController) +Post(w http.ResponseWriter, r *http.Request`)` that parses the request +according to the configured VCS. + +Atlantis currently handles one of the following events: + +- Comment Event +- Pull Request Event + +All the other events are ignored. + +![Events Controller flow](./images/events-controller.png) + +## Comment Event + +This event is triggered whenever a user enters a comment on the Pull Request, +Merge Request, or whatever it's called for the respective VCS. After parsing the +VCS-specific request, the code calls the `handleCommentEvent` function, which +then passes the processing to the `handleCommentEvent` function in the +[command_runner.go](https://github.com/runatlantis/atlantis/blob/main/server/events/command_runner.go) +file. This function first calls the pre-workflow hooks, then executes one of the +below-listed commands and, at last, the post-workflow hooks. + +- [plan_command_runner.go](https://github.com/runatlantis/atlantis/blob/main/server/events/plan_command_runner.go) +- [apply_command_runner.go](https://github.com/runatlantis/atlantis/blob/main/server/events/apply_command_runner.go) +- [approve_policies_command_runner.go](https://github.com/runatlantis/atlantis/blob/main/server/events/approve_policies_command_runner.go) +- [unlock_command_runner.go](https://github.com/runatlantis/atlantis/blob/main/server/events/unlock_command_runner.go) +- [version_command_runner.go](https://github.com/runatlantis/atlantis/blob/main/server/events/version_command_runner.go) +- [import_command_runner.go](https://github.com/runatlantis/atlantis/blob/main/server/events/import_command_runner.go) +- [state_command_runner.go](https://github.com/runatlantis/atlantis/blob/main/server/events/state_command_runner.go) + +## Pull Request Event + +To handle comment events on Pull Requests, they must be created first. Atlantis +also allows the running of commands for certain Pull Requests events. + +
+ Pull Request Webhooks + +The list below links to the supported VCSs and their Pull Request Webhook +documentation. + +- [Azure DevOps Pull Request Created](https://learn.microsoft.com/en-us/azure/devops/service-hooks/events?view=azure-devops#pull-request-created) +- [BitBucket Pull Request](https://support.atlassian.com/bitbucket-cloud/docs/event-payloads/#Pull-request-events) +- [GitHub Pull Request](https://docs.github.com/en/webhooks/webhook-events-and-payloads#pull_request) +- [GitLab Merge Request](https://docs.gitlab.com/ee/user/project/integrations/webhook_events.html#merge-request-events) +- [Gitea Webhooks](https://docs.gitea.com/next/usage/webhooks) + +
+ +The following list shows the supported events: + +- Opened Pull Request +- Updated Pull Request +- Closed Pull Request +- Other Pull Request event + +The `RunAutoPlanCommand` function in the +[command_runner.go](https://github.com/runatlantis/atlantis/blob/main/server/events/command_runner.go) +file is called for the _Open_ and _Update_ Pull Request events. When enabled on +the project, this automatically runs the `plan` for the specific repository. + +Whenever a Pull Request is closed, the `CleanUpPull` function in the +[instrumented_pull_closed_executor.go](https://github.com/runatlantis/atlantis/blob/main/server/events/instrumented_pull_closed_executor.go) +file is called. This function cleans up all the closed Pull Request files, +locks, and other related information. diff --git a/runatlantis.io/contributing/glossary.md b/runatlantis.io/contributing/glossary.md new file mode 100644 index 0000000000..99c1e73287 --- /dev/null +++ b/runatlantis.io/contributing/glossary.md @@ -0,0 +1,26 @@ +# Glossary + +The Atlantis community uses many words and phrases to work more efficiently. +You will find the most common ones and their meaning on this page. + +## Pull / Merge Request Event + +The different VCSs have different names for merging changes. Atlantis uses the +name Pull Request as the abstraction. The VCS provider implements this +abstraction and forwards the call to the respective function. + +## VCS + +VCS stands for Version Control System. + +Atlantis supports only git as a Version Control System. However, there is +support for multiple VCS Providers. Currently, it supports the following +providers: + +- [Azure DevOps](https://azure.microsoft.com/en-us/products/devops) +- [BitBucket](https://bitbucket.org/) +- [GitHub](https://github.com/) +- [GitLab](https://gitlab.com/) +- [Gitea](https://gitea.com/) + +The term VCS is used for both git and the different VCS providers. diff --git a/runatlantis.io/contributing/images/events-controller.png b/runatlantis.io/contributing/images/events-controller.png new file mode 100644 index 0000000000..e81c0c6527 Binary files /dev/null and b/runatlantis.io/contributing/images/events-controller.png differ diff --git a/runatlantis.io/docs.md b/runatlantis.io/docs.md new file mode 100644 index 0000000000..23b27f1c32 --- /dev/null +++ b/runatlantis.io/docs.md @@ -0,0 +1,18 @@ +--- +aside: false +--- +# Atlantis Documentation + +These docs are for users that are ready to get Atlantis installed and start using it. + +:::tip Looking to get started? +If you're new here, check out the [Guide](./guide.md) +where you can try our [Test Drive](./guide/test-drive.md) or [Run Atlantis Locally](./guide/testing-locally.md). +::: + +## Next Steps + +* [Installing Atlantis](./docs/installation-guide.md)  â€“  Get Atlantis up and running +* [Configuring Atlantis](./docs/configuring-atlantis.md)  â€“  Configure how Atlantis works for your specific use-cases +* [Using Atlantis](./docs/using-atlantis.md)  â€“  How do you use Atlantis? +* [How Atlantis Works](./docs/how-atlantis-works.md)  â€“  Internals of what Atlantis is doing diff --git a/runatlantis.io/docs/README.md b/runatlantis.io/docs/README.md deleted file mode 100644 index 5527692cf5..0000000000 --- a/runatlantis.io/docs/README.md +++ /dev/null @@ -1,14 +0,0 @@ -# Atlantis Documentation - -These docs are for users that are ready to get Atlantis installed and start using it. - -:::tip Looking to get started? -If you're new here, check out the [Guide](/guide/) -where you can try our [Test Drive](/guide/test-drive.html) or [Run Atlantis Locally](/guide/testing-locally.html). -::: - -### Next Steps -* [Installing Atlantis](/docs/installation-guide.html)  â€“  Get Atlantis up and running -* [Configuring Atlantis](configuring-atlantis.html)  â€“  Configure how Atlantis works for your specific use-cases -* [Using Atlantis](using-atlantis.html)  â€“  How do you use Atlantis? -* [How Atlantis Works](how-atlantis-works.html)  â€“  Internals of what Atlantis is doing diff --git a/runatlantis.io/docs/access-credentials.md b/runatlantis.io/docs/access-credentials.md index da84e34064..7d7410ee03 100644 --- a/runatlantis.io/docs/access-credentials.md +++ b/runatlantis.io/docs/access-credentials.md @@ -1,10 +1,11 @@ # Git Host Access Credentials + This page describes how to create credentials for your Git host (GitHub, GitLab, Gitea, Bitbucket, or Azure DevOps) that Atlantis will use to make API calls. -[[toc]] ## Create an Atlantis user (optional) + We recommend creating a new user named **@atlantis** (or something close) or using a dedicated CI user. This isn't required (you can use an existing user or github app credentials), however all the comments that Atlantis writes @@ -14,8 +15,10 @@ will come from that user so it might be confusing if its coming from a personal

An example comment coming from the @atlantisbot user

## Generating an Access Token + Once you've created a new user (or decided to use an existing one), you need to generate an access token. Read on for the instructions for your specific Git host: + * [GitHub](#github-user) * [GitHub app](#github-app) * [GitLab](#gitlab) @@ -25,9 +28,10 @@ generate an access token. Read on for the instructions for your specific Git hos * [Azure DevOps](#azure-devops) ### GitHub user -- Create a [Personal Access Token](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/creating-a-personal-access-token#creating-a-fine-grained-personal-access-token) -- Create the token with **repo** scope -- Record the access token + +* Create a [Personal Access Token](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/creating-a-personal-access-token#creating-a-fine-grained-personal-access-token) +* Create the token with **repo** scope +* Record the access token ::: warning Your Atlantis user must also have "Write permissions" (for repos in an organization) or be a "Collaborator" (for repos in a user account) to be able to set commit statuses: ![Atlantis status](./images/status.png) @@ -36,18 +40,18 @@ Your Atlantis user must also have "Write permissions" (for repos in an organizat ### GitHub app #### Create the GitHub App Using Atlantis + ::: warning Available in Atlantis versions **newer** than 0.13.0. ::: +* Start Atlantis with fake github username and token (`atlantis server --gh-user fake --gh-token fake --repo-allowlist 'github.com/your-org/*' --atlantis-url https://$ATLANTIS_HOST`). If installing as an **Organization**, remember to add `--gh-org your-github-org` to this command. +* Visit `https://$ATLANTIS_HOST/github-app/setup` and click on **Setup** to create the app on GitHub. You'll be redirected back to Atlantis +* A link to install your app, along with its secrets, will be shown on the screen. Record your app's credentials and install your app for your user/org by following said link. +* Create a file with the contents of the GitHub App Key, e.g. `atlantis-app-key.pem` +* Restart Atlantis with new flags: `atlantis server --gh-app-id --gh-app-key-file atlantis-app-key.pem --gh-webhook-secret --write-git-creds --repo-allowlist 'github.com/your-org/*' --atlantis-url https://$ATLANTIS_HOST`. -- Start Atlantis with fake github username and token (`atlantis server --gh-user fake --gh-token fake --repo-allowlist 'github.com/your-org/*' --atlantis-url https://$ATLANTIS_HOST`). If installing as an **Organization**, remember to add `--gh-org your-github-org` to this command. -- Visit `https://$ATLANTIS_HOST/github-app/setup` and click on **Setup** to create the app on GitHub. You'll be redirected back to Atlantis -- A link to install your app, along with its secrets, will be shown on the screen. Record your app's credentials and install your app for your user/org by following said link. -- Create a file with the contents of the GitHub App Key, e.g. `atlantis-app-key.pem` -- Restart Atlantis with new flags: `atlantis server --gh-app-id --gh-app-key-file atlantis-app-key.pem --gh-webhook-secret --write-git-creds --repo-allowlist 'github.com/your-org/*' --atlantis-url https://$ATLANTIS_HOST`. - - NOTE: Instead of using a file for the GitHub App Key you can also pass the key value directly using `--gh-app-key`. You can also create a config file instead of using flags. See [Server Configuration](/docs/server-configuration.html#config-file). + NOTE: Instead of using a file for the GitHub App Key you can also pass the key value directly using `--gh-app-key`. You can also create a config file instead of using flags. See [Server Configuration](server-configuration.md#config-file). ::: warning Only a single installation per GitHub App is supported at the moment. @@ -59,13 +63,13 @@ GitHub App handles the webhook calls by itself, hence there is no need to create #### Manually Creating the GitHub app -- Create the GitHub app as an Administrator - - Ensure the app is registered / installed with the organization / user - - See the GitHub app [documentation](https://docs.github.com/en/apps/creating-github-apps/about-creating-github-apps/about-creating-github-apps) -- Create a file with the contents of the GitHub App Key, e.g. `atlantis-app-key.pem` -- Start Atlantis with the following flags: `atlantis server --gh-app-id --gh-installation-id --gh-app-key-file atlantis-app-key.pem --gh-webhook-secret --write-git-creds --repo-allowlist 'github.com/your-org/*' --atlantis-url https://$ATLANTIS_HOST`. +* Create the GitHub app as an Administrator + * Ensure the app is registered / installed with the organization / user + * See the GitHub app [documentation](https://docs.github.com/en/apps/creating-github-apps/about-creating-github-apps/about-creating-github-apps) +* Create a file with the contents of the GitHub App Key, e.g. `atlantis-app-key.pem` +* Start Atlantis with the following flags: `atlantis server --gh-app-id --gh-installation-id --gh-app-key-file atlantis-app-key.pem --gh-webhook-secret --write-git-creds --repo-allowlist 'github.com/your-org/*' --atlantis-url https://$ATLANTIS_HOST`. - NOTE: Instead of using a file for the GitHub App Key you can also pass the key value directly using `--gh-app-key`. You can also create a config file instead of using flags. See [Server Configuration](/docs/server-configuration.html#config-file). + NOTE: Instead of using a file for the GitHub App Key you can also pass the key value directly using `--gh-app-key`. You can also create a config file instead of using flags. See [Server Configuration](server-configuration.md#config-file). ::: tip NOTE Manually installing the GitHub app means that the credentials can be shared by many Atlantis installations. This has the benefit of centralizing repository access for shared modules / code. @@ -93,56 +97,62 @@ Since v0.19.7, a new permission for `Administration` has been added. If you have Since v0.22.3, a new permission for `Members` has been added, which is required for features that apply permissions to an organizations team members rather than individual users. Like the `Administration` permission above, updating Atlantis will not automatically add this permission, so if you wish to use features that rely on checking team membership you will need to add this manually. ::: -| Type | Access | -| --------------- | ------------------- | -| Administration | Read-only | -| Checks | Read and write | -| Commit statuses | Read and write | -| Contents | Read and write | -| Issues | Read and write | -| Metadata | Read-only (default) | -| Pull requests | Read and write | -| Webhooks | Read and write | -| Members | Read-only | +| Type | Access | +| --------------- | ------------------- | +| Administration | Read-only | +| Checks | Read and write | +| Commit statuses | Read and write | +| Contents | Read and write | +| Issues | Read and write | +| Metadata | Read-only (default) | +| Pull requests | Read and write | +| Webhooks | Read and write | +| Members | Read-only | ### GitLab -- Follow: [https://docs.gitlab.com/ce/user/profile/personal_access_tokens.html#create-a-personal-access-token](https://docs.gitlab.com/ce/user/profile/personal_access_tokens.html#create-a-personal-access-token) -- Create a token with **api** scope -- Record the access token + +* Follow: [GitLab: Create a personal access token](https://docs.gitlab.com/ce/user/profile/personal_access_tokens.html#create-a-personal-access-token) +* Create a token with **api** scope +* Record the access token ### Gitea -- Go to "Profile and Settings" > "Settings" in Gitea (top-right) -- Go to "Applications" under "User Settings" in Gitea -- Create a token under the "Manage Access Tokens" with the following permissions: - - issue: Read and Write - - repository: Read and Write - - user: Read -- Record the access token + +* Go to "Profile and Settings" > "Settings" in Gitea (top-right) +* Go to "Applications" under "User Settings" in Gitea +* Create a token under the "Manage Access Tokens" with the following permissions: + * issue: Read and Write + * repository: Read and Write + * user: Read +* Record the access token ### Bitbucket Cloud (bitbucket.org) -- Create an App Password by following [https://support.atlassian.com/bitbucket-cloud/docs/create-an-app-password/](https://support.atlassian.com/bitbucket-cloud/docs/create-an-app-password/) -- Label the password "atlantis" -- Select **Pull requests**: **Read** and **Write** so that Atlantis can read your pull requests and write comments to them -- Record the access token + +* Create an App Password by following [BitBucket Cloud: Create an app password](https://support.atlassian.com/bitbucket-cloud/docs/create-an-app-password/) +* Label the password "atlantis" +* Select **Pull requests**: **Read** and **Write** so that Atlantis can read your pull requests and write comments to them +* Record the access token ### Bitbucket Server (aka Stash) -- Click on your avatar in the top right and select **Manage account** -- Click **Personal access tokens** in the sidebar -- Click **Create a token** -- Name the token **atlantis** -- Give the token **Read** Project permissions and **Write** Pull request permissions -- Click **Create** and record the access token + +* Click on your avatar in the top right and select **Manage account** +* Click **Personal access tokens** in the sidebar +* Click **Create a token** +* Name the token **atlantis** +* Give the token **Read** Project permissions and **Write** Pull request permissions +* Click **Create** and record the access token NOTE: Atlantis will send the token as a [Bearer Auth to the Bitbucket API](https://confluence.atlassian.com/bitbucketserver/http-access-tokens-939515499.html#HTTPaccesstokens-UsingHTTPaccesstokens) instead of using Basic Auth. ### Azure DevOps -- Create a Personal access token by following [https://docs.microsoft.com/en-us/azure/devops/organizations/accounts/use-personal-access-tokens-to-authenticate?view=azure-devops](https://docs.microsoft.com/en-us/azure/devops/organizations/accounts/use-personal-access-tokens-to-authenticate?view=azure-devops) -- Label the password "atlantis" -- The minimum scopes required for this token are: - - Code (Read & Write) - - Code (Status) - - Member Entitlement Management (Read) -- Record the access token + +* Create a Personal access token by following [Azure DevOps: Use personal access tokens to authenticate](https://docs.microsoft.com/en-us/azure/devops/organizations/accounts/use-personal-access-tokens-to-authenticate?view=azure-devops) +* Label the password "atlantis" +* The minimum scopes required for this token are: + * Code (Read & Write) + * Code (Status) + * Member Entitlement Management (Read) +* Record the access token ## Next Steps -Once you've got your user and access token, you're ready to create a webhook secret. See [Creating a Webhook Secret](webhook-secrets.html). + +Once you've got your user and access token, you're ready to create a webhook secret. See [Creating a Webhook Secret](webhook-secrets.md). diff --git a/runatlantis.io/docs/api-endpoints.md b/runatlantis.io/docs/api-endpoints.md index 96dd6d0b51..ce622979da 100644 --- a/runatlantis.io/docs/api-endpoints.md +++ b/runatlantis.io/docs/api-endpoints.md @@ -9,7 +9,7 @@ To enable the API endpoints, `api-secret` should be configured. :::tip Prerequisites -* Set `api-secret` as part of the [Server Configuration](server-configuration.html#api-secret) +* Set `api-secret` as part of the [Server Configuration](server-configuration.md#api-secret) * Pass `X-Atlantis-Token` with the same secret in the request header ::: @@ -17,22 +17,22 @@ To enable the API endpoints, `api-secret` should be configured. #### Description -Execute [atlantis plan](using-atlantis.html#atlantis-plan) on the specified repository. +Execute [atlantis plan](using-atlantis.md#atlantis-plan) on the specified repository. #### Parameters -| Name | Type | Required | Description | -|------------|-------------------------------------|----------|------------------------------------------| -| Repository | string | Yes | Name of the Terraform repository | -| Ref | string | Yes | Git reference, like a branch name | -| Type | string | Yes | Type of the VCS provider (Github/Gitlab) | -| Paths | [ [Path](api-endpoints.html#path) ] | Yes | Paths to the projects to run the plan | -| PR | int | No | Pull Request number | +| Name | Type | Required | Description | +|------------|---------|----------|------------------------------------------| +| Repository | string | Yes | Name of the Terraform repository | +| Ref | string | Yes | Git reference, like a branch name | +| Type | string | Yes | Type of the VCS provider (Github/Gitlab) | +| Paths | Path | Yes | Paths to the projects to run the plan | +| PR | int | No | Pull Request number | -##### Path +#### Path -Similar to the [Options](using-atlantis.html#options) of `atlantis plan`. Path specifies which directory/workspace -within the repository to run the plan. +Similar to the [Options](using-atlantis.md#options) of `atlantis plan`. Path specifies which directory/workspace +within the repository to run the plan. At least one of `Directory` or `Workspace` should be specified. | Name | Type | Required | Description | @@ -92,22 +92,22 @@ curl --request POST 'https:///api/plan' \ #### Description -Execute [atlantis apply](using-atlantis.html#atlantis-apply) on the specified repository. +Execute [atlantis apply](using-atlantis.md#atlantis-apply) on the specified repository. #### Parameters -| Name | Type | Required | Description | -|------------|---------------------------------------|----------|------------------------------------------| -| Repository | string | Yes | Name of the Terraform repository | -| Ref | string | Yes | Git reference, like a branch name | -| Type | string | Yes | Type of the VCS provider (Github/Gitlab) | -| Paths | [ [Path](api-endpoints.html#path-1) ] | Yes | Paths to the projects to run the apply | -| PR | int | No | Pull Request number | +| Name | Type | Required | Description | +|------------|--------|----------|------------------------------------------| +| Repository | string | Yes | Name of the Terraform repository | +| Ref | string | Yes | Git reference, like a branch name | +| Type | string | Yes | Type of the VCS provider (Github/Gitlab) | +| Paths | Path | Yes | Paths to the projects to run the apply | +| PR | int | No | Pull Request number | -##### Path +#### Path -Similar to the [Options](using-atlantis.html#options-1) of `atlantis apply`. Path specifies which directory/workspace -within the repository to run the apply. +Similar to the [Options](using-atlantis.md#options-1) of `atlantis apply`. Path specifies which directory/workspace +within the repository to run the apply. At least one of `Directory` or `Workspace` should be specified. | Name | Type | Required | Description | diff --git a/runatlantis.io/docs/apply-requirements.md b/runatlantis.io/docs/apply-requirements.md index 870ac4972e..166931851d 100644 --- a/runatlantis.io/docs/apply-requirements.md +++ b/runatlantis.io/docs/apply-requirements.md @@ -1,5 +1,5 @@ # Apply Requirements :::warning REDIRECT -This page is moved to [Command Requirements](/docs/command-requirements.html). +This page is moved to [Command Requirements](command-requirements.md). ::: diff --git a/runatlantis.io/docs/automerging.md b/runatlantis.io/docs/automerging.md index 1e0b21ba77..2716a572ee 100644 --- a/runatlantis.io/docs/automerging.md +++ b/runatlantis.io/docs/automerging.md @@ -1,44 +1,53 @@ # Automerging + Atlantis can be configured to automatically merge a pull request after all plans have been successfully applied. - ![Automerge](./images/automerge.png) ## How To Enable + Automerging can be enabled either by: + 1. Passing the `--automerge` flag to `atlantis server`. This sets the parameter globally; however, explicit declaration in the repo config will be respected and take priority. 1. Setting `automerge: true` in the repo's `atlantis.yaml` file: + ```yaml version: 3 automerge: true projects: - dir: . ``` + :::tip NOTE If a repo has an `atlantis.yaml` file, then each project in the repo needs to be configured under the `projects` key. ::: ## How to Disable + If automerge is enabled, you can disable it for a single `atlantis apply` command with the `--auto-merge-disabled` option. ## Requirements ### All Plans Must Succeed + When automerge is enabled, **all plans** in a pull request **must succeed** before **any** plans can be applied. For example, imagine this scenario: + 1. I open a pull request that makes changes to two Terraform projects, in `dir1/` and `dir2/`. 1. The plan for `dir2/` fails because my Terraform syntax is wrong. In this scenario, I can't run -``` + +```shell atlantis apply -d dir1 ``` + Even though that plan succeeded, because **all** plans must succeed for **any** plans to be saved. @@ -47,8 +56,9 @@ autoplan. Then I will be able to apply both plans. ### All Plans must be applied -If multiple projects/dirs/workspaces are configured to be planned automatically, +If multiple projects/dirs/workspaces are configured to be planned automatically, then they should all be applied before Atlantis automatically merges the PR. ## Permissions + The Atlantis VCS user must have the ability to merge pull requests. diff --git a/runatlantis.io/docs/autoplanning.md b/runatlantis.io/docs/autoplanning.md index 2183219703..b4657d801a 100644 --- a/runatlantis.io/docs/autoplanning.md +++ b/runatlantis.io/docs/autoplanning.md @@ -1,8 +1,10 @@ # Autoplanning + On any **new** pull request or **new commit** to an existing pull request, Atlantis will attempt to run `terraform plan` in the directories it thinks hold modified Terraform projects. The algorithm it uses is as follows: + 1. Get list of all modified files in pull request 1. Filter to those containing `.tf` 1. Get the directories that those files are in @@ -11,8 +13,10 @@ The algorithm it uses is as follows: contains a `main.tf` run plan in that directory, otherwise ignore the change (see below for exceptions). ## Example + Given the directory structure: -``` + +```plain . ├── modules │   └── module1 @@ -26,21 +30,25 @@ Given the directory structure: * If `project1/main.tf` were modified, we would run `plan` in `project1` * If `modules/module1/main.tf` were modified, we would not automatically run `plan` because we couldn't determine the location of the terraform project - * You could use an [atlantis.yaml](repo-level-atlantis-yaml.html#configuring-planning) file to specify which projects to plan when this module changed - * You could enable [module autoplanning](server-configuration.html#autoplan-modules) which indexes projects to their local module dependencies. - * Or you could manually plan with `atlantis plan -d ` + * You could use an [atlantis.yaml](repo-level-atlantis-yaml.md#configuring-planning) file to specify which projects to plan when this module changed + * You could enable [module autoplanning](server-configuration.md#autoplan-modules) which indexes projects to their local module dependencies. + * Or you could manually plan with `atlantis plan -d ` * If `project1/modules/module1/main.tf` were modified, we would look one level above `project1/modules` into `project1/`, see that there was a `main.tf` file and so run plan in `project1/` ## Bitbucket-Specific Notes + Bitbucket does not have a webhook that triggers only upon a new PR or commit. To fix this we cache the last commit to see if it has changed. If the cache is emptied, Atlantis will think your commit is new and you may see extra plans. This scenario can happen if: + * Atlantis restarts * You are running multiple Atlantis instances behind a load balancer ## Customizing + If you would like to customize how Atlantis determines which directory to run in or disable it all together you need to create an `atlantis.yaml` file. See -* [Disabling Autoplanning](repo-level-atlantis-yaml.html#disabling-autoplanning) -* [Configuring Planning](repo-level-atlantis-yaml.html#configuring-planning) + +* [Disabling Autoplanning](repo-level-atlantis-yaml.md#disabling-autoplanning) +* [Configuring Planning](repo-level-atlantis-yaml.md#configuring-planning) diff --git a/runatlantis.io/docs/checkout-strategy.md b/runatlantis.io/docs/checkout-strategy.md index 066f7444f0..5c38586a4c 100644 --- a/runatlantis.io/docs/checkout-strategy.md +++ b/runatlantis.io/docs/checkout-strategy.md @@ -7,6 +7,7 @@ variable that get passed to the `atlantis server` command. Atlantis supports `branch` and `merge` strategies. ## Branch + If set to `branch` (the default), Atlantis will check out the source branch of the pull request. @@ -17,6 +18,7 @@ If the pull request was asking to merge `branch` into `main`, Atlantis would check out `branch` at commit `C3`. ## Merge + The problem with the `branch` strategy, is that if users push branches that are out of date with `main`, then their `terraform plan` could be deleting some resources that were configured in the main branch. @@ -49,9 +51,9 @@ commit is pushed to `main` **after** Atlantis runs `plan`, nothing will happen. To optimize cloning time, Atlantis can perform a shallow clone by specifying the `--checkout-depth` flag. The cloning is performed in a following manner: -- Shallow clone of the default branch is performed with depth of `--checkout-depth` value of zero (full clone). -- `branch` is retrieved, including the same amount of commits. -- Merge base of the default branch and `branch` is checked for existence in the shallow clone. -- If the merge base is not present, it means that either of the branches are ahead of the merge base by more than `--checkout-depth` commits. In this case full repo history is fetched. +* Shallow clone of the default branch is performed with depth of `--checkout-depth` value of zero (full clone). +* `branch` is retrieved, including the same amount of commits. +* Merge base of the default branch and `branch` is checked for existence in the shallow clone. +* If the merge base is not present, it means that either of the branches are ahead of the merge base by more than `--checkout-depth` commits. In this case full repo history is fetched. -If the commit history often diverges by more than the default checkout depth then the `--checkout-depth` flag should be tuned to avoid full fetches. \ No newline at end of file +If the commit history often diverges by more than the default checkout depth then the `--checkout-depth` flag should be tuned to avoid full fetches. diff --git a/runatlantis.io/docs/command-requirements.md b/runatlantis.io/docs/command-requirements.md index e3aea4ea21..bbf12ede00 100644 --- a/runatlantis.io/docs/command-requirements.md +++ b/runatlantis.io/docs/command-requirements.md @@ -1,7 +1,7 @@ # Command Requirements -[[toc]] ## Intro + Atlantis requires certain conditions be satisfied **before** `atlantis apply` and `atlantis import` commands can be run: @@ -10,31 +10,41 @@ commands can be run: * [UnDiverged](#undiverged) - requires pull requests to be ahead of the base branch ## What Happens If The Requirement Is Not Met? + If the requirement is not met, users will see an error if they try to run `atlantis apply`: ![Mergeable Apply Requirement](./images/apply-requirement.png) ## Supported Requirements + ### Approved + The `approved` requirement will prevent applies unless the pull request is approved by at least one person other than the author. #### Usage + The `approved` requirement by: + 1. Creating a `repos.yaml` file with the `apply_requirements` key: + ```yaml repos: - id: /.*/ apply_requirements: [approved] ``` + 1. Or by allowing an `atlantis.yaml` file to specify the `apply_requirements` key in the `repos.yaml` config: - #### repos.yaml + + **repos.yaml** + ```yaml repos: - id: /.*/ allowed_overrides: [apply_requirements] ``` - #### atlantis.yaml + **atlantis.yaml** + ```yaml version: 3 projects: @@ -43,7 +53,9 @@ The `approved` requirement by: ``` #### Meaning + Each VCS provider has different rules around who can approve: + * **GitHub** – **Any user with read permissions** to the repo can approve a pull request * **GitLab** – The user who can approve can be set in the [repo settings](https://docs.gitlab.com/ee/user/project/merge_requests/approvals/) * **Bitbucket Cloud (bitbucket.org)** – A user can approve their own pull request but @@ -57,11 +69,15 @@ To require **certain people** to approve the pull request, look at the ::: ### Mergeable + The `mergeable` requirement will prevent applies unless a pull request is able to be merged. #### Usage + Set the `mergeable` requirement by: + 1. Creating a `repos.yaml` file with the `apply_requirements` key: + ```yaml repos: - id: /.*/ @@ -69,14 +85,17 @@ Set the `mergeable` requirement by: ``` 1. Or by allowing an `atlantis.yaml` file to specify `plan_requirements`, `apply_requirements` and `import_requirements` keys in the `repos.yaml` config: - #### repos.yaml + + **repos.yaml** + ```yaml repos: - id: /.*/ allowed_overrides: [plan_requirements, apply_requirements, import_requirements] ``` - #### atlantis.yaml + **atlantis.yaml** + ```yaml version: 3 projects: @@ -87,25 +106,28 @@ Set the `mergeable` requirement by: ``` #### Meaning + Each VCS provider has a different concept of "mergeability": ::: warning Some VCS providers have a feature for branch protection to control "mergeability". To use it, limit the base branch so to not bypass the branch protection. -See also the `branch` keyword in [Server Side Repo Config](server-side-repo-config.html#reference) for more details. +See also the `branch` keyword in [Server Side Repo Config](server-side-repo-config.md#reference) for more details. ::: #### GitHub + In GitHub, if you're not using [Protected Branches](https://docs.github.com/en/repositories/configuring-branches-and-merges-in-your-repository/defining-the-mergeability-of-pull-requests/about-protected-branches) then all pull requests are mergeable unless there is a conflict. If you set up Protected Branches then you can enforce: + * Requiring certain status checks to be passing * Requiring certain people to have reviewed and approved the pull request * Requiring `CODEOWNERS` to have reviewed and approved the pull request -* Requiring that the branch is up to date with `main` +* Requiring that the branch is up-to-date with `main` -See [https://docs.github.com/en/repositories/configuring-branches-and-merges-in-your-repository/defining-the-mergeability-of-pull-requests/about-protected-branches](https://docs.github.com/en/repositories/configuring-branches-and-merges-in-your-repository/defining-the-mergeability-of-pull-requests/about-protected-branches) +See [GitHub: About protected branches](https://docs.github.com/en/repositories/configuring-branches-and-merges-in-your-repository/defining-the-mergeability-of-pull-requests/about-protected-branches) for more details. ::: warning @@ -119,6 +141,7 @@ If you set `atlantis/apply` to the mergeable requirement, use the `--gh-allow-me ::: #### GitLab + For GitLab, a merge request will be merged if there are no conflicts, no unresolved discussions if it is a project requirement and if all necessary approvers have approved the pull request. For pipelines, if the project requires that pipelines must succeed, all builds except the apply command status will be checked. @@ -126,6 +149,7 @@ For pipelines, if the project requires that pipelines must succeed, all builds e For Jobs with allow_failure setting set to true, will be ignored. If the pipeline has been skipped and the project allows merging, it will be marked as mergeable. #### Bitbucket.org (Bitbucket Cloud) and Bitbucket Server (Stash) + For Bitbucket, we just check if there is a conflict that is preventing a merge. We don't check anything else because Bitbucket's API doesn't support it. @@ -133,9 +157,11 @@ If you need a specific check, please [open an issue](https://github.com/runatlantis/atlantis/issues/new). #### Azure DevOps + In Azure DevOps, all pull requests are mergeable unless there is a conflict. You can set a pull request to "Complete" right away, or set "Auto-Complete", which will merge after all branch policies are met. See [Review code with pull requests](https://docs.microsoft.com/en-us/azure/devops/repos/git/pull-requests?view=azure-devops). [Branch policies](https://docs.microsoft.com/en-us/azure/devops/repos/git/branch-policies?view=azure-devops) can: + * Require a minimum number of reviewers * Allow users to approve their own changes * Allow completion even if some reviewers vote "Waiting" or "Reject" @@ -147,12 +173,16 @@ At this time, the Azure DevOps client only supports merging using the default 'n ::: ### UnDiverged + Prevent applies if there are any changes on the base branch since the most recent plan. Applies to `merge` checkout strategy only which you need to set via `--checkout-strategy` flag. #### Usage + You can set the `undiverged` requirement by: + 1. Creating a `repos.yaml` file with `plan_requirements`, `apply_requirements` and `import_requirements` keys: + ```yaml repos: - id: /.*/ @@ -160,15 +190,19 @@ You can set the `undiverged` requirement by: apply_requirements: [undiverged] import_requirements: [undiverged] ``` + 1. Or by allowing an `atlantis.yaml` file to specify the `plan_requirements`, `apply_requirements` and `import_requirements` keys in your `repos.yaml` config: - #### repos.yaml + + **repos.yaml** + ```yaml repos: - id: /.*/ allowed_overrides: [plan_requirements, apply_requirements, import_requirements] ``` - #### atlantis.yaml + **atlantis.yaml** + ```yaml version: 3 projects: @@ -177,7 +211,9 @@ You can set the `undiverged` requirement by: apply_requirements: [undiverged] import_requirements: [undiverged] ``` + #### Meaning + The `merge` checkout strategy creates a temporary merge commit and runs the `plan` on the Atlantis local version of the PR source and destination branch. The local destination branch can become out of date since changes to the destination branch are not fetched if there are no changes to the source branch. `undiverged` enforces that Atlantis local version of main is up to date @@ -185,16 +221,21 @@ with remote so that the state of the source during the `apply` is identical to t time. ## Setting Command Requirements + As mentioned above, you can set command requirements via flags, in `repos.yaml`, or in `atlantis.yaml` if `repos.yaml` allows the override. ### Flags Override + Flags **override** any `repos.yaml` or `atlantis.yaml` settings so they are equivalent to always having that apply requirement set. ### Project-Specific Settings + If you only want some projects/repos to have apply requirements, then you must + 1. Specifying which repos have which requirements via the `repos.yaml` file. + ```yaml repos: - id: /.*/ @@ -221,7 +262,9 @@ If you only want some projects/repos to have apply requirements, then you must config. For example if I have two directories, `staging` and `production`, I might use: - #### repos.yaml + + **repos.yaml:** + ```yaml repos: - id: /.*/ @@ -229,7 +272,8 @@ If you only want some projects/repos to have apply requirements, then you must # Allow any repo to specify apply_requirements in atlantis.yaml ``` - #### atlantis.yaml + **atlantis.yaml:** + ```yaml version: 3 projects: @@ -248,14 +292,17 @@ If you only want some projects/repos to have apply requirements, then you must ``` ### Multiple Requirements + You can set any or all of `approved`, `mergeable`, and `undiverged` requirements. ## Who Can Apply? + Once the apply requirement is satisfied, **anyone** that can comment on the pull request can run the actual `atlantis apply` command. ## Next Steps -* For more information on GitHub pull request reviews and approvals see: [https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/reviewing-changes-in-pull-requests/about-pull-request-reviews](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/reviewing-changes-in-pull-requests/about-pull-request-reviews) -* For more information on GitLab merge request reviews and approvals (only supported on GitLab Enterprise) see: [https://docs.gitlab.com/ee/user/project/merge_requests/approvals/](https://docs.gitlab.com/ee/user/project/merge_requests/approvals/). -* For more information on Bitbucket pull request reviews and approvals see: [https://confluence.atlassian.com/bitbucket/pull-requests-and-code-review-223220593.html](https://confluence.atlassian.com/bitbucket/pull-requests-and-code-review-223220593.html) -* For more information on Azure DevOps pull request reviews and approvals see: [https://docs.microsoft.com/en-us/azure/devops/repos/git/pull-requests?view=azure-devops&tabs=browser](https://docs.microsoft.com/en-us/azure/devops/repos/git/pull-requests?view=azure-devops&tabs=browser) + +* For more information on GitHub pull request reviews and approvals see: [GitHub: About pull request reviews](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/reviewing-changes-in-pull-requests/about-pull-request-reviews) +* For more information on GitLab merge request reviews and approvals (only supported on GitLab Enterprise) see: [GitLab: Merge request approvals](https://docs.gitlab.com/ee/user/project/merge_requests/approvals/). +* For more information on Bitbucket pull request reviews and approvals see: [BitBucket: Use pull requests for code review](https://confluence.atlassian.com/bitbucket/pull-requests-and-code-review-223220593.html) +* For more information on Azure DevOps pull request reviews and approvals see: [Azure DevOps: Create pull requests](https://docs.microsoft.com/en-us/azure/devops/repos/git/pull-requests?view=azure-devops&tabs=browser) diff --git a/runatlantis.io/docs/configuring-atlantis.md b/runatlantis.io/docs/configuring-atlantis.md index 46edbbbc3c..c40e55560c 100644 --- a/runatlantis.io/docs/configuring-atlantis.md +++ b/runatlantis.io/docs/configuring-atlantis.md @@ -1,25 +1,29 @@ # Configuring Atlantis There are three methods for configuring Atlantis: + 1. Passing flags to the `atlantis server` command 1. Creating a server-side repo config file and using the `--repo-config` flag 1. Placing an `atlantis.yaml` file at the root of your Terraform repositories ## Flags + Flags to `atlantis server` are used to configure the global operation of Atlantis, for example setting credentials for your Git Host or configuring SSL certs. -See [Server Configuration](server-configuration.html) for more details. +See [Server Configuration](server-configuration.md) for more details. ## Server-Side Repo Config + A Server-Side Repo Config file is used to control per-repo behaviour and what users can do in repo-level `atlantis.yaml` files. -See [Server-Side Repo Config](server-side-repo-config.html) for more details. +See [Server-Side Repo Config](server-side-repo-config.md) for more details. ## Repo-Level `atlantis.yaml` Files + `atlantis.yaml` files placed at the root of your Terraform repos can be used to change the default Atlantis behaviour for each repo. -See [Repo-Level atlantis.yaml Files](repo-level-atlantis-yaml.html) for more details. +See [Repo-Level atlantis.yaml Files](repo-level-atlantis-yaml.md) for more details. diff --git a/runatlantis.io/docs/configuring-webhooks.md b/runatlantis.io/docs/configuring-webhooks.md index 82a6e1d3c3..295b50f437 100644 --- a/runatlantis.io/docs/configuring-webhooks.md +++ b/runatlantis.io/docs/configuring-webhooks.md @@ -1,16 +1,18 @@ # Configuring Webhooks + Atlantis needs to receive Webhooks from your Git host so that it can respond to pull request events. :::tip Prerequisites + * You have created an [access credential](access-credentials.md) * You have created a [webhook secret](webhook-secrets.md) * You have [deployed](deployment.md) Atlantis and have a url for it ::: See the instructions for your specific provider below. -[[toc]] ## GitHub/GitHub Enterprise + You can install your webhook at the [organization](https://docs.github.com/en/get-started/learning-about-github/types-of-github-accounts) level, or for each individual repository. ::: tip NOTE @@ -22,121 +24,130 @@ When authenticating as a GitHub App, Webhooks are automatically created and need If you're installing on the organization, navigate to your organization's page and click **Settings**. If installing on a single repository, navigate to the repository home page and click **Settings**. -- Select **Webhooks** or **Hooks** in the sidebar -- Click **Add webhook** -- set **Payload URL** to `http://$URL/events` (or `https://$URL/events` if you're using SSL) where `$URL` is where Atlantis is hosted. **Be sure to add `/events`** -- double-check you added `/events` to the end of your URL. -- set **Content type** to `application/json` -- set **Secret** to the Webhook Secret you generated previously - - **NOTE** If you're adding a webhook to multiple repositories, each repository will need to use the **same** secret. -- select **Let me select individual events** -- check the boxes - - **Pull request reviews** - - **Pushes** - - **Issue comments** - - **Pull requests** -- leave **Active** checked -- click **Add webhook** -- See [Next Steps](#next-steps) + +* Select **Webhooks** or **Hooks** in the sidebar +* Click **Add webhook** +* set **Payload URL** to `http://$URL/events` (or `https://$URL/events` if you're using SSL) where `$URL` is where Atlantis is hosted. **Be sure to add `/events`** +* double-check you added `/events` to the end of your URL. +* set **Content type** to `application/json` +* set **Secret** to the Webhook Secret you generated previously + * **NOTE** If you're adding a webhook to multiple repositories, each repository will need to use the **same** secret. +* select **Let me select individual events** +* check the boxes + * **Pull request reviews** + * **Pushes** + * **Issue comments** + * **Pull requests** +* leave **Active** checked +* click **Add webhook** +* See [Next Steps](#next-steps) ## GitLab + If you're using GitLab, navigate to your project's home page in GitLab -- Click **Settings > Webhooks** in the sidebar -- set **URL** to `http://$URL/events` (or `https://$URL/events` if you're using SSL) where `$URL` is where Atlantis is hosted. **Be sure to add `/events`** -- double-check you added `/events` to the end of your URL. -- set **Secret Token** to the Webhook Secret you generated previously - - **NOTE** If you're adding a webhook to multiple repositories, each repository will need to use the **same** secret. -- check the boxes - - **Push events** - - **Comments** - - **Merge Request events** -- leave **Enable SSL verification** checked -- click **Add webhook** -- See [Next Steps](#next-steps) + +* Click **Settings > Webhooks** in the sidebar +* set **URL** to `http://$URL/events` (or `https://$URL/events` if you're using SSL) where `$URL` is where Atlantis is hosted. **Be sure to add `/events`** +* double-check you added `/events` to the end of your URL. +* set **Secret Token** to the Webhook Secret you generated previously + * **NOTE** If you're adding a webhook to multiple repositories, each repository will need to use the **same** secret. +* check the boxes + * **Push events** + * **Comments** + * **Merge Request events** +* leave **Enable SSL verification** checked +* click **Add webhook** +* See [Next Steps](#next-steps) ## Gitea + If you're using Gitea, navigate to your project's home page in Gitea -- Click **Settings > Webhooks** in the top- and then sidebar -- Click **Add webhook > Gitea** (Gitea webhooks are service specific, but this works) -- set **Target URL** to `http://$URL/events` (or `https://$URL/events` if you're using SSL) where `$URL` is where Atlantis is hosted. **Be sure to add `/events`** -- double-check you added `/events` to the end of your URL. -- set **Secret** to the Webhook Secret you generated previously - - **NOTE** If you're adding a webhook to multiple repositories, each repository will need to use the **same** secret. -- Select **Custom Events...** -- Check the boxes - - **Repository events > Push** - - **Issue events > Issue Comment** - - **Pull Request events > Pull Request** - - **Pull Request events > Pull Request Comment** - - **Pull Request events > Pull Request Reviewed** - - **Pull Request events > Pull Request Synchronized** -- Leave **Active** checked -- Click **Add Webhook** -- See [Next Steps](#next-steps) + +* Click **Settings > Webhooks** in the top- and then sidebar +* Click **Add webhook > Gitea** (Gitea webhooks are service specific, but this works) +* set **Target URL** to `http://$URL/events` (or `https://$URL/events` if you're using SSL) where `$URL` is where Atlantis is hosted. **Be sure to add `/events`** +* double-check you added `/events` to the end of your URL. +* set **Secret** to the Webhook Secret you generated previously + * **NOTE** If you're adding a webhook to multiple repositories, each repository will need to use the **same** secret. +* Select **Custom Events...** +* Check the boxes + * **Repository events > Push** + * **Issue events > Issue Comment** + * **Pull Request events > Pull Request** + * **Pull Request events > Pull Request Comment** + * **Pull Request events > Pull Request Reviewed** + * **Pull Request events > Pull Request Synchronized** +* Leave **Active** checked +* Click **Add Webhook** +* See [Next Steps](#next-steps) ## Bitbucket Cloud (bitbucket.org) -- Go to your repo's home page -- Click **Settings** in the sidebar -- Click **Webhooks** under the **WORKFLOW** section -- Click **Add webhook** -- Enter "Atlantis" for **Title** -- set **URL** to `http://$URL/events` (or `https://$URL/events` if you're using SSL) where `$URL` is where Atlantis is hosted. **Be sure to add `/events`** -- double-check you added `/events` to the end of your URL. -- Keep **Status** as Active -- Don't check **Skip certificate validation** because NGROK has a valid cert. -- Select **Choose from a full list of triggers** -- Under **Repository** **un**check everything -- Under **Issues** leave everything **un**checked -- Under **Pull Request**, select: Created, Updated, Merged, Declined and Comment created -- Click **Save** + +* Go to your repo's home page +* Click **Settings** in the sidebar +* Click **Webhooks** under the **WORKFLOW** section +* Click **Add webhook** +* Enter "Atlantis" for **Title** +* set **URL** to `http://$URL/events` (or `https://$URL/events` if you're using SSL) where `$URL` is where Atlantis is hosted. **Be sure to add `/events`** +* double-check you added `/events` to the end of your URL. +* Keep **Status** as Active +* Don't check **Skip certificate validation** because NGROK has a valid cert. +* Select **Choose from a full list of triggers** +* Under **Repository** **un**check everything +* Under **Issues** leave everything **un**checked +* Under **Pull Request**, select: Created, Updated, Merged, Declined and Comment created +* Click **Save** Bitbucket Webhook -- See [Next Steps](#next-steps) +* See [Next Steps](#next-steps) ## Bitbucket Server (aka Stash) -- Go to your repo's home page -- Click **Settings** in the sidebar -- Click **Webhooks** under the **WORKFLOW** section -- Click **Create webhook** -- Enter "Atlantis" for **Name** -- set **URL** to `http://$URL/events` (or `https://$URL/events` if you're using SSL) where `$URL` is where Atlantis is hosted. **Be sure to add `/events`** -- Double-check you added `/events` to the end of your URL. -- Set **Secret** to the Webhook Secret you generated previously - - **NOTE** If you're adding a webhook to multiple repositories, each repository will need to use the **same** secret. -- Under **Pull Request**, select: Opened, Source branch updated, Merged, Declined, Deleted and Comment added -- Click **Save**Bitbucket Webhook -- See [Next Steps](#next-steps) + +* Go to your repo's home page +* Click **Settings** in the sidebar +* Click **Webhooks** under the **WORKFLOW** section +* Click **Create webhook** +* Enter "Atlantis" for **Name** +* set **URL** to `http://$URL/events` (or `https://$URL/events` if you're using SSL) where `$URL` is where Atlantis is hosted. **Be sure to add `/events`** +* Double-check you added `/events` to the end of your URL. +* Set **Secret** to the Webhook Secret you generated previously + * **NOTE** If you're adding a webhook to multiple repositories, each repository will need to use the **same** secret. +* Under **Pull Request**, select: Opened, Source branch updated, Merged, Declined, Deleted and Comment added +* Click **Save**Bitbucket Webhook +* See [Next Steps](#next-steps) ## Azure DevOps + Webhooks are installed at the [team project](https://docs.microsoft.com/en-us/azure/devops/organizations/projects/about-projects?view=azure-devops) level, but may be restricted to only fire based on events pertaining to [specific repos](https://docs.microsoft.com/en-us/azure/devops/service-hooks/services/webhooks?view=azure-devops) within the team project. -- Navigate anywhere within a team project, ie: `https://dev.azure.com/orgName/projectName/_git/repoName` -- Select **Project settings** in the lower-left corner -- Select **Service hooks** - - If you see the message "You do not have sufficient permissions to view or configure subscriptions." you need to ensure your user is a member of either the organization's "Project Collection Administrators" group or the project's "Project Administrators" group. - - To add your user to the Project Collection Build Administrators group, navigate to the organization level, click **Organization Settings** and then click **Permissions**. You should be at `https://dev.azure.com//_settings/groups`. Now click on the **\/Project Collection Administrators** group and add your user as a member. - - To add your user to the Project Administrators group, navigate to the project level, click **Project Settings** and then click **Permissions**. You should be at `https://dev.azure.com///_settings/permissions`. Now click on the **\/Project Administrators** group and add your user as a member. -- Click **Create subscription** or the green plus icon to add a new webhook -- Scroll to the bottom of the list and select **Web Hooks** -- Click **Next** -- Under "Trigger on this type of event", select **Pull request created** - - Optionally, select a repository under **Filters** to restrict the scope of this webhook subscription to a specific repository -- Click **Next** -- Set **URL** to `http://$URL/events` where `$URL` is where Atlantis is hosted. Note that SSL, or `https://$URL/events`, is required if you set a Basic username and password for the webhook). **Be sure to add `/events`** -- It is strongly recommended to set a Basic Username and Password for all webhooks -- Leave all three drop-down menus for `...to send` set to **All** -- Resource version should be set to **1.0** for `Pull request created` and `Pull request updated` event types and **2.0** for `Pull request commented on` -- **NOTE** If you're adding a webhook to multiple team projects or repositories (using filters), each repository will need to use the **same** basic username and password. -- Click **Finish** +* Navigate anywhere within a team project, ie: `https://dev.azure.com/orgName/projectName/_git/repoName` +* Select **Project settings** in the lower-left corner +* Select **Service hooks** + * If you see the message "You do not have sufficient permissions to view or configure subscriptions." you need to ensure your user is a member of either the organization's "Project Collection Administrators" group or the project's "Project Administrators" group. + * To add your user to the Project Collection Build Administrators group, navigate to the organization level, click **Organization Settings** and then click **Permissions**. You should be at `https://dev.azure.com//_settings/groups`. Now click on the **\/Project Collection Administrators** group and add your user as a member. + * To add your user to the Project Administrators group, navigate to the project level, click **Project Settings** and then click **Permissions**. You should be at `https://dev.azure.com///_settings/permissions`. Now click on the **\/Project Administrators** group and add your user as a member. +* Click **Create subscription** or the green plus icon to add a new webhook +* Scroll to the bottom of the list and select **Web Hooks** +* Click **Next** +* Under "Trigger on this type of event", select **Pull request created** + * Optionally, select a repository under **Filters** to restrict the scope of this webhook subscription to a specific repository +* Click **Next** +* Set **URL** to `http://$URL/events` where `$URL` is where Atlantis is hosted. Note that SSL, or `https://$URL/events`, is required if you set a Basic username and password for the webhook). **Be sure to add `/events`** +* It is strongly recommended to set a Basic Username and Password for all webhooks +* Leave all three drop-down menus for `...to send` set to **All** +* Resource version should be set to **1.0** for `Pull request created` and `Pull request updated` event types and **2.0** for `Pull request commented on` +* **NOTE** If you're adding a webhook to multiple team projects or repositories (using filters), each repository will need to use the **same** basic username and password. +* Click **Finish** Repeat the process above until you have webhook subscriptions for the following event types that will trigger on all repositories Atlantis will manage: -- Pull request created (you just added this one) -- Pull request updated -- Pull request commented on +* Pull request created (you just added this one) +* Pull request updated +* Pull request commented on -- See [Next Steps](#next-steps) +* See [Next Steps](#next-steps) ## Next Steps + * To verify that Atlantis is receiving your webhooks, create a test pull request to your repo. * You should see the request show up in the Atlantis logs at an `INFO` level. * You'll now need to configure Atlantis to add your [Provider Credentials](provider-credentials.md) diff --git a/runatlantis.io/docs/custom-policy-checks.md b/runatlantis.io/docs/custom-policy-checks.md index 9c6d362803..4c353335c7 100644 --- a/runatlantis.io/docs/custom-policy-checks.md +++ b/runatlantis.io/docs/custom-policy-checks.md @@ -1,9 +1,11 @@ # Custom Policy Checks -If you want to run custom policy tools or scripts instead of the built-in Conftest integration, you can do so by setting the `custom_policy_check` option and running it in a custom workflow. Note: custom policy tool output is simply parsed for "fail" substrings to determine if the policy set passed. -This option can be configured either at the server-level in a [repos.yaml config file](server-configuration.md) or at the repo-level in an [atlantis.yaml file.](repo-level-atlantis-yaml.md). +If you want to run custom policy tools or scripts instead of the built-in Conftest integration, you can do so by setting the `custom_policy_check` option and running it in a custom workflow. Note: custom policy tool output is simply parsed for "fail" substrings to determine if the policy set passed. + +This option can be configured either at the server-level in a [repos.yaml config file](server-configuration.md) or at the repo-level in an [atlantis.yaml file.](repo-level-atlantis-yaml.md). ## Server-side config example + Set the `policy_check` and `custom_policy_check` options to true, and run the custom tool in the policy check steps as seen below. ```yaml @@ -30,8 +32,8 @@ policies: source: local ``` - ## Repo-level atlantis.yaml example + First, you will need to ensure `custom_policy_check` is within the `allowed_overrides` field of the server-side config. Next, just set the custom option to true on the specific project you want as shown in the example `atlantis.yaml` below: ```yaml diff --git a/runatlantis.io/docs/custom-workflows.md b/runatlantis.io/docs/custom-workflows.md index 3d8da5a566..85c1330a7a 100644 --- a/runatlantis.io/docs/custom-workflows.md +++ b/runatlantis.io/docs/custom-workflows.md @@ -3,23 +3,25 @@ Custom workflows can be defined to override the default commands that Atlantis runs. -[[toc]] - ## Usage + Custom workflows can be specified in the Server-Side Repo Config or in the Repo-Level `atlantis.yaml` files. -**Notes** +**Notes:** + * If you want to allow repos to select their own workflows, they must have the -`allowed_overrides: [workflow]` setting. See [server-side repo config use cases](server-side-repo-config.html#allow-repos-to-choose-a-server-side-workflow) for more details. +`allowed_overrides: [workflow]` setting. See [server-side repo config use cases](server-side-repo-config.md#allow-repos-to-choose-a-server-side-workflow) for more details. * If in addition you also want to allow repos to define their own workflows, they must have the -`allow_custom_workflows: true` setting. See [server-side repo config use cases](server-side-repo-config.html#allow-repos-to-define-their-own-workflows) for more details. - +`allow_custom_workflows: true` setting. See [server-side repo config use cases](server-side-repo-config.md#allow-repos-to-define-their-own-workflows) for more details. ## Use Cases + ### .tfvars files + Given the structure: -``` + +```plain . └── project1 ├── main.tf @@ -29,6 +31,7 @@ Given the structure: If you wanted Atlantis to automatically run plan with `-var-file staging.tfvars` and `-var-file production.tfvars` you could define two workflows: + ```yaml # repos.yaml or atlantis.yaml workflows: @@ -40,7 +43,7 @@ workflows: extra_args: ["-var-file", "staging.tfvars"] # NOTE: no need to define the apply stage because it will default # to the normal apply stage. - + production: plan: steps: @@ -62,7 +65,9 @@ workflows: - state_rm: extra_args: ["-lock=false"] ``` + Then in your repo-level `atlantis.yaml` file, you would reference the workflows: + ```yaml # atlantis.yaml version: 3 @@ -80,20 +85,27 @@ workflows: # If you didn't define the workflows in your server-side repos.yaml config, # you would define them here instead. ``` + When you want to apply the plans, you can comment -``` + +```shell atlantis apply -p project1-staging ``` + and -``` + +```shell atlantis apply -p project1-production ``` + Where `-p` refers to the project name. ### Adding extra arguments to Terraform commands + If you need to append flags to `terraform plan` or `apply` temporarily, you can append flags on a comment following `--`, for example commenting: -``` + +```shell atlantis plan -- -lock=false ``` @@ -117,7 +129,7 @@ workflows: extra_args: ["-lock=false"] ``` -If [policy checking](/docs/policy-checking.html#how-it-works) is enabled, `extra_args` can also be used to change the default behaviour of conftest. +If [policy checking](policy-checking.md#how-it-works) is enabled, `extra_args` can also be used to change the default behaviour of conftest. ```yaml workflows: @@ -130,6 +142,7 @@ workflows: ``` ### Custom init/plan/apply Commands + If you want to customize `terraform init`, `plan` or `apply` in ways that aren't supported by `extra_args`, you can completely override those commands. @@ -147,11 +160,11 @@ workflows: - run: command: terraform init -input=false output: hide - + # If you're using workspaces you need to select the workspace using the # $WORKSPACE environment variable. - run: terraform workspace select $WORKSPACE - + # You MUST output the plan using -out $PLANFILE because Atlantis expects # plans to be in a specific location. - run: terraform plan -input=false -refresh -out $PLANFILE @@ -162,14 +175,15 @@ workflows: ``` ### CDKTF + Here are the requirements to enable [CDKTF](https://developer.hashicorp.com/terraform/cdktf) -- A custom image with `CDKTF` installed -- Add `**/cdk.tf.json` to the list of Atlantis autoplan files. -- Set the `atlantis-include-git-untracked-files` flag so that the Terraform files dynamically generated +* A custom image with `CDKTF` installed +* Add `**/cdk.tf.json` to the list of Atlantis autoplan files. +* Set the `atlantis-include-git-untracked-files` flag so that the Terraform files dynamically generated by CDKTF will be add to the Atlantis modified file list. -- Use `pre_workflow_hooks` to run `cdktf synth` -- Optional: There isn't a requirement to use a repo `atlantis.yaml` but one can be leveraged if needed. +* Use `pre_workflow_hooks` to run `cdktf synth` +* Optional: There isn't a requirement to use a repo `atlantis.yaml` but one can be leveraged if needed. #### Custom Image @@ -192,6 +206,7 @@ ATLANTIS_INCLUDE_GIT_UNTRACKED_FILES=true OR `atlantis server --config config.yaml` + ```yaml # config.yaml autoplan-file-list: "**/*.tf,**/*.tfvars,**/*.tfvars.json,**/cdk.tf.json" @@ -203,6 +218,7 @@ include-git-untracked-files: true Use `pre_workflow_hooks` `atlantis server --repo-config="repos.yaml"` + ```yaml # repos.yaml repos: @@ -234,7 +250,7 @@ $ tree --gitignore 1. Container orchestrator (k8s/fargate/ecs/etc) uses the custom docker image of atlantis with `cdktf` installed with the `--autoplan-file-list` to trigger on `cdk.tf.json` files and `--include-git-untracked-files` set to include the -CDKTF dynamically generated Terraform files in the Atlantis plan. +CDKTF dynamically generated Terraform files in the Atlantis plan. 1. PR branch is pushed up containing `cdktf` code changes. 1. Atlantis checks out the branch in the repo. 1. Atlantis runs the `npm i && cdktf get && cdktf synth` command in the repo root as a step in `pre_workflow_hooks`, @@ -243,6 +259,7 @@ generating the `cdk.tf.json` Terraform files. 1. Atlantis then runs `terraform` workflows in the respective directories as usual. ### Terragrunt + Atlantis supports running custom commands in place of the default Atlantis commands. We can use this functionality to enable [Terragrunt](https://github.com/gruntwork-io/terragrunt). @@ -250,7 +267,8 @@ commands. We can use this functionality to enable You can either use your repo's `atlantis.yaml` file or the Atlantis server's `repos.yaml` file. Given a directory structure: -``` + +```plain . └── live    ├── prod @@ -315,6 +333,7 @@ workflows: ``` If using the repo's `atlantis.yaml` file you would use the following config: + ```yaml version: 3 projects: @@ -350,10 +369,9 @@ workflows: **NOTE:** If using the repo's `atlantis.yaml` file, you will need to specify each directory that is a Terragrunt project. - ::: warning Atlantis will need to have the `terragrunt` binary in its PATH. -If you're using Docker you can build your own image, see [Customization](/docs/deployment.html#customization). +If you're using Docker you can build your own image, see [Customization](deployment.md#customization). ::: If you don't want to create/manage the repo's `atlantis.yaml` file yourself, you can use the tool [terragrunt-atlantis-config](https://github.com/transcend-io/terragrunt-atlantis-config) to generate it. @@ -361,6 +379,7 @@ If you don't want to create/manage the repo's `atlantis.yaml` file yourself, you The `terragrunt-atlantis-config` tool is a community project and not maintained by the Atlantis team. ### Running custom commands + Atlantis supports running completely custom commands. In this example, we want to run a script after every `apply`: @@ -375,17 +394,19 @@ workflows: ``` ::: tip Notes + * We don't need to write a `plan` key under `myworkflow`. If `plan` isn't set, Atlantis will use the default plan workflow which is what we want in this case. * A custom command will only terminate if all output file descriptors are closed. Therefore a custom command can only be sent to the background (e.g. for an SSH tunnel during the terraform run) when its output is redirected to a different location. For example, Atlantis -will execute a custom script containing the following code to create a SSH tunnel correctly: +will execute a custom script containing the following code to create a SSH tunnel correctly: `ssh -f -M -S /tmp/ssh_tunnel -L 3306:database:3306 -N bastion 1>/dev/null 2>&1`. Without the redirect, the script would block the Atlantis workflow. ::: ### Custom Backend Config + If you need to specify the `-backend-config` flag to `terraform init` you'll need to use a custom workflow. In this example, we're using custom backend files to configure two remote states, one for each environment. We're then using `.tfvars` files to load different variables for each environment. @@ -410,12 +431,14 @@ workflows: - plan: extra_args: [-var-file=production.tfvars] ``` + ::: warning NOTE We have to use a custom `run` step to `rm -rf .terraform` because otherwise Terraform will complain in-between commands since the backend config has changed. ::: You would then reference the workflows in your repo-level `atlantis.yaml`: + ```yaml version: 3 projects: @@ -428,7 +451,9 @@ projects: ``` ## Reference + ### Workflow + ```yaml plan: apply: @@ -444,6 +469,7 @@ state_rm: | state_rm | [Stage](#stage) | `steps: [init, state_rm]` | no | How to run state rm for this project. | ### Stage + ```yaml steps: - run: custom-command @@ -457,8 +483,11 @@ steps: | steps | array[[Step](#step)] | `[]` | no | List of steps for this stage. If the steps key is empty, no steps will be run for this stage. | ### Step + #### Built-In Commands + Steps can be a single string for a built-in command. + ```yaml - init - plan @@ -466,12 +495,15 @@ Steps can be a single string for a built-in command. - import - state_rm ``` + | Key | Type | Default | Required | Description | |---------------------------------|--------|---------|----------|------------------------------------------------------------------------------------------------------------------------------| | init/plan/apply/import/state_rm | string | none | no | Use a built-in command without additional configuration. Only `init`, `plan`, `apply`, `import` and `state_rm` are supported | #### Built-In Command With Extra Args + A map from string to `extra_args` for a built-in command with extra arguments. + ```yaml - init: extra_args: [arg1, arg2] @@ -484,79 +516,88 @@ A map from string to `extra_args` for a built-in command with extra arguments. - state_rm: extra_args: [arg1, arg2] ``` + | Key | Type | Default | Required | Description | |---------------------------------|------------------------------------|---------|----------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| init/plan/apply/import/state_rm | map[`extra_args` -> array[string]] | none | no | Use a built-in command and append `extra_args`. Only `init`, `plan`, `apply`, `import` and `state_rm` are supported as keys and only `extra_args` is supported as a value | +| init/plan/apply/import/state_rm | map\[`extra_args` -> array\[string\]\] | none | no | Use a built-in command and append `extra_args`. Only `init`, `plan`, `apply`, `import` and `state_rm` are supported as keys and only `extra_args` is supported as a value | #### Custom `run` Command + A custom command can be written in 2 ways Compact: + ```yaml - run: custom-command arg1 arg2 ``` + | Key | Type | Default | Required | Description | |-----|--------|---------|----------|----------------------| | run | string | none | no | Run a custom command | Full + ```yaml -- run: +- run: command: custom-command arg1 arg2 output: show ``` + | Key | Type | Default | Required | Description | |-----|--------------------------------------------------------------|---------|----------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| run | map[string -> string] | none | no | Run a custom command | +| run | map\[string -> string\] | none | no | Run a custom command | | run.command | string | none | yes | Shell command to run | -| run.output | string | "show" | no | How to post-process the output of this command when posted in the PR comment. The options are
* `show` - preserve the full output
* `hide` - hide output from comment (still visible in the real-time streaming output)
* `strip_refreshing` - hide all output up until and including the last line containing "Refreshing...". This matches the behavior of the built-in `plan` command | +| run.output | string | "show" | no | How to post-process the output of this command when posted in the PR comment. The options are
*`show` - preserve the full output
* `hide` - hide output from comment (still visible in the real-time streaming output)
* `strip_refreshing` - hide all output up until and including the last line containing "Refreshing...". This matches the behavior of the built-in `plan` command | ::: tip Notes -* `run` steps in the main `workflow` are executed with the following environment variables: + +* `run` steps in the main `workflow` are executed with the following environment variables: note: these variables are not available to `pre` or `post` workflows - * `WORKSPACE` - The Terraform workspace used for this project, ex. `default`. + * `WORKSPACE` - The Terraform workspace used for this project, ex. `default`. NOTE: if the step is executed before `init` then Atlantis won't have switched to this workspace yet. - * `ATLANTIS_TERRAFORM_VERSION` - The version of Terraform used for this project, ex. `0.11.0`. - * `DIR` - Absolute path to the current directory. - * `PLANFILE` - Absolute path to the location where Atlantis expects the plan to + * `ATLANTIS_TERRAFORM_VERSION` - The version of Terraform used for this project, ex. `0.11.0`. + * `DIR` - Absolute path to the current directory. + * `PLANFILE` - Absolute path to the location where Atlantis expects the plan to either be generated (by plan) or already exist (if running apply). Can be used to override the built-in `plan`/`apply` commands, ex. `run: terraform plan -out $PLANFILE`. - * `SHOWFILE` - Absolute path to the location where Atlantis expects the plan in json format to + * `SHOWFILE` - Absolute path to the location where Atlantis expects the plan in json format to either be generated (by show) or already exist (if running policy checks). Can be used to override the built-in `plan`/`apply` commands, ex. `run: terraform show -json $PLANFILE > $SHOWFILE`. - * `POLICYCHECKFILE` - Absolute path to the location of policy check output if Atlantis runs policy checks. - See [policy checking](/docs/policy-checking.html#data-for-custom-run-steps) for information of data structure. - * `BASE_REPO_NAME` - Name of the repository that the pull request will be merged into, ex. `atlantis`. - * `BASE_REPO_OWNER` - Owner of the repository that the pull request will be merged into, ex. `runatlantis`. - * `HEAD_REPO_NAME` - Name of the repository that is getting merged into the base repository, ex. `atlantis`. - * `HEAD_REPO_OWNER` - Owner of the repository that is getting merged into the base repository, ex. `acme-corp`. - * `HEAD_BRANCH_NAME` - Name of the head branch of the pull request (the branch that is getting merged into the base) - * `HEAD_COMMIT` - The sha256 that points to the head of the branch that is being pull requested into the base. If the pull request is from Bitbucket Cloud the string will only be 12 characters long because Bitbucket Cloud truncates its commit IDs. - * `BASE_BRANCH_NAME` - Name of the base branch of the pull request (the branch that the pull request is getting merged into) - * `PROJECT_NAME` - Name of the project configured in `atlantis.yaml`. If no project name is configured this will be an empty string. - * `PULL_NUM` - Pull request number or ID, ex. `2`. - * `PULL_URL` - Pull request URL, ex. `https://github.com/runatlantis/atlantis/pull/2`. - * `PULL_AUTHOR` - Username of the pull request author, ex. `acme-user`. - * `REPO_REL_DIR` - The relative path of the project in the repository. For example if your project is in `dir1/dir2/` then this will be set to `"dir1/dir2"`. If your project is at the root this will be `"."`. - * `USER_NAME` - Username of the VCS user running command, ex. `acme-user`. During an autoplan, the user will be the Atlantis API user, ex. `atlantis`. - * `COMMENT_ARGS` - Any additional flags passed in the comment on the pull request. Flags are separated by commas and + * `POLICYCHECKFILE` - Absolute path to the location of policy check output if Atlantis runs policy checks. + See [policy checking](policy-checking.md#data-for-custom-run-steps) for information of data structure. + * `BASE_REPO_NAME` - Name of the repository that the pull request will be merged into, ex. `atlantis`. + * `BASE_REPO_OWNER` - Owner of the repository that the pull request will be merged into, ex. `runatlantis`. + * `HEAD_REPO_NAME` - Name of the repository that is getting merged into the base repository, ex. `atlantis`. + * `HEAD_REPO_OWNER` - Owner of the repository that is getting merged into the base repository, ex. `acme-corp`. + * `HEAD_BRANCH_NAME` - Name of the head branch of the pull request (the branch that is getting merged into the base) + * `HEAD_COMMIT` - The sha256 that points to the head of the branch that is being pull requested into the base. If the pull request is from Bitbucket Cloud the string will only be 12 characters long because Bitbucket Cloud truncates its commit IDs. + * `BASE_BRANCH_NAME` - Name of the base branch of the pull request (the branch that the pull request is getting merged into) + * `PROJECT_NAME` - Name of the project configured in `atlantis.yaml`. If no project name is configured this will be an empty string. + * `PULL_NUM` - Pull request number or ID, ex. `2`. + * `PULL_URL` - Pull request URL, ex. `https://github.com/runatlantis/atlantis/pull/2`. + * `PULL_AUTHOR` - Username of the pull request author, ex. `acme-user`. + * `REPO_REL_DIR` - The relative path of the project in the repository. For example if your project is in `dir1/dir2/` then this will be set to `"dir1/dir2"`. If your project is at the root this will be `"."`. + * `USER_NAME` - Username of the VCS user running command, ex. `acme-user`. During an autoplan, the user will be the Atlantis API user, ex. `atlantis`. + * `COMMENT_ARGS` - Any additional flags passed in the comment on the pull request. Flags are separated by commas and every character is escaped, ex. `atlantis plan -- arg1 arg2` will result in `COMMENT_ARGS=\a\r\g\1,\a\r\g\2`. * A custom command will only terminate if all output file descriptors are closed. Therefore a custom command can only be sent to the background (e.g. for an SSH tunnel during the terraform run) when its output is redirected to a different location. For example, Atlantis -will execute a custom script containing the following code to create a SSH tunnel correctly: +will execute a custom script containing the following code to create a SSH tunnel correctly: `ssh -f -M -S /tmp/ssh_tunnel -L 3306:database:3306 -N bastion 1>/dev/null 2>&1`. Without the redirect, the script would block the Atlantis workflow. -* If a workflow step returns a non-zero exit code, the workflow will stop. +* If a workflow step returns a non-zero exit code, the workflow will stop. ::: #### Environment Variable `env` Command + The `env` command allows you to set environment variables that will be available to all steps defined **below** the `env` step. You can set hard coded values via the `value` key, or set dynamic values via the `command` key which allows you to run any command and uses the output as the environment variable value. + ```yaml - env: name: ENV_NAME @@ -565,24 +606,29 @@ as the environment variable value. name: ENV_NAME_2 command: 'echo "dynamic-value-$(date)"' ``` + | Key | Type | Default | Required | Description | |-----------------|-----------------------|---------|----------|-----------------------------------------------------------------------------------------------------------------| -| env | map[string -> string] | none | no | Set environment variables for subsequent steps | +| env | map\[string -> string\] | none | no | Set environment variables for subsequent steps | | env.name | string | none | yes | Name of the environment variable | | env.value | string | none | no | Set the value of the environment variable to a hard-coded string. Cannot be set at the same time as `command` | | env.command | string | none | no | Set the value of the environment variable to the output of a command. Cannot be set at the same time as `value` | ::: tip Notes + * `env` `command`'s can use any of the built-in environment variables available - to `run` commands. + to `run` commands. ::: #### Multiple Environment Variables `multienv` Command + The `multienv` command allows you to set dynamic number of multiple environment variables that will be available to all steps defined **below** the `multienv` step. + ```yaml - multienv: custom-command ``` + | Key | Type | Default | Required | Description | |----------|--------|---------|----------|--------------------------------------------------------------------------------| | multienv | string | none | no | Run a custom command and add set environment variables according to the result | @@ -593,6 +639,7 @@ EnvVar1Name=value1,EnvVar2Name=value2,EnvVar3Name=value3 The name-value pairs in the result are added as environment variables if success is true otherwise the workflow execution stops with error and the errorMessage is getting displayed. ::: tip Notes + * `multienv` `command`'s can use any of the built-in environment variables available - to `run` commands. + to `run` commands. ::: diff --git a/runatlantis.io/docs/deployment.md b/runatlantis.io/docs/deployment.md index a5f5499026..dfe5ae27cc 100644 --- a/runatlantis.io/docs/deployment.md +++ b/runatlantis.io/docs/deployment.md @@ -1,20 +1,23 @@ # Deployment + This page covers getting Atlantis up and running in your infrastructure. ::: tip Prerequisites -* You have created [access credentials](access-credentials.html) for your Atlantis user -* You have created a [webhook secret](webhook-secrets.html) -::: -[[toc]] +* You have created [access credentials](access-credentials.md) for your Atlantis user +* You have created a [webhook secret](webhook-secrets.md) +::: ## Architecture Overview + ### Runtime + Atlantis is a simple [Go](https://golang.org/) app. It receives webhooks from your Git host and executes Terraform commands locally. There is an official Atlantis [Docker image](https://ghcr.io/runatlantis/atlantis). ### Routing + Atlantis and your Git host need to be able to route and communicate with one another. Your Git host needs to be able to send webhooks to Atlantis and Atlantis needs to be able to make API calls to your Git host. If you're using a public Git host like github.com, gitlab.com, gitea.com, bitbucket.org, or dev.azure.com then you'll need to @@ -24,6 +27,7 @@ If you're using a private Git host like GitHub Enterprise, GitLab Enterprise, se Bitbucket Server, then Atlantis needs to be routable from the private host and Atlantis will need to be able to route to the private host. ### Data + Atlantis has no external database. Atlantis stores Terraform plan files on disk. If Atlantis loses that data in between a `plan` and `apply` cycle, then users will have to re-run `plan`. Because of this, you may want to provision a persistent disk @@ -32,6 +36,7 @@ for Atlantis. ## Deployment Pick your deployment type: + * [Kubernetes Helm Chart](#kubernetes-helm-chart) * [Kubernetes Manifests](#kubernetes-manifests) * [Kubernetes Kustomize](#kubernetes-kustomize) @@ -41,21 +46,27 @@ Pick your deployment type: * [Docker](#docker) * [Roll Your Own](#roll-your-own) - ### Kubernetes Helm Chart + Atlantis has an [official Helm chart](https://github.com/runatlantis/helm-charts/tree/main/charts/atlantis) To install: + 1. Add the runatlantis helm chart repository to helm + ```bash helm repo add runatlantis https://runatlantis.github.io/helm-charts ``` + 1. `cd` into a directory where you're going to configure your Atlantis Helm chart 1. Create a `values.yaml` file by running + ```bash helm inspect values runatlantis/atlantis > values.yaml ``` + 1. Edit `values.yaml` and add your access credentials and webhook secret + ```yaml # for example github: @@ -63,28 +74,33 @@ To install: token: bar secret: baz ``` + 1. Edit `values.yaml` and set your `orgAllowlist` (see [Repo Allowlist](server-configuration.md#repo-allowlist) for more information) + ```yaml orgAllowlist: github.com/runatlantis/* ``` - **Note**: For helm chart version < `4.0.2`, `orgWhitelist` must be used instead. -1. Configure any other variables (see [https://github.com/runatlantis/helm-charts#customization](https://github.com/runatlantis/helm-charts#customization) + + **Note**: For helm chart version < `4.0.2`, `orgWhitelist` must be used instead. +1. Configure any other variables (see [Atlantis Helm Chart: Customization](https://github.com/runatlantis/helm-charts#customization) for documentation) 1. Run + ```sh helm install atlantis runatlantis/atlantis -f values.yaml ``` If you are using helm v2, run: + ```sh helm install -f values.yaml runatlantis/atlantis ``` - Atlantis should be up and running in minutes! See [Next Steps](#next-steps) for what to do next. ### Kubernetes Manifests + If you'd like to use a raw Kubernetes manifest, we offer either a [Deployment](https://kubernetes.io/docs/concepts/workloads/controllers/deployment/) or a [Statefulset](https://kubernetes.io/docs/concepts/workloads/controllers/statefulset/) with persistent storage. @@ -94,21 +110,24 @@ or you upgrade Atlantis, you won't lose plans that haven't been applied. If you do lose that data, you just need to run `atlantis plan` again so it's not the end of the world. Regardless of whether you choose a Deployment or StatefulSet, first create a Secret with the webhook secret and access token: + ```bash echo -n "yourtoken" > token echo -n "yoursecret" > webhook-secret kubectl create secret generic atlantis-vcs --from-file=token --from-file=webhook-secret ``` + ::: tip Note If you're using Bitbucket Cloud then there is no webhook secret since it's not supported. ::: Next, edit the manifests below as follows: -1. Replace `` in `image: ghcr.io/runatlantis/atlantis:` with the most recent version from [https://github.com/runatlantis/atlantis/releases/latest](https://github.com/runatlantis/atlantis/releases/latest). + +1. Replace `` in `image: ghcr.io/runatlantis/atlantis:` with the most recent version from [GitHub: Atlantis latest release](https://github.com/runatlantis/atlantis/releases/latest). * NOTE: You never want to run with `:latest` because if your Pod moves to a new node, Kubernetes will pull the latest image and you might end up upgrading Atlantis by accident! 2. Replace `value: github.com/yourorg/*` under `name: ATLANTIS_REPO_ALLOWLIST` with the allowlist pattern -for your Terraform repos. See [Repo Allowlist](server-configuration.html#repo-allowlist) for more details. +for your Terraform repos. See [--repo-allowlist](server-configuration.md#repo-allowlist) for more details. 3. If you're using GitHub: 1. Replace `` with the username of your Atlantis GitHub user without the `@`. 2. Delete all the `ATLANTIS_GITLAB_*`, `ATLANTIS_GITEA_*`, `ATLANTIS_BITBUCKET_*`, and `ATLANTIS_AZUREDEVOPS_*` environment variables. @@ -126,6 +145,7 @@ for your Terraform repos. See [Repo Allowlist](server-configuration.html#repo-al 2. Delete all the `ATLANTIS_GH_*`, `ATLANTIS_GITLAB_*`, `ATLANTIS_GITEA_*`, and `ATLANTIS_BITBUCKET_*` environment variables. #### StatefulSet Manifest +
Show... @@ -290,10 +310,11 @@ spec: selector: app.kubernetes.io/name: atlantis ``` -
+ #### Deployment Manifest +
Show... @@ -438,14 +459,16 @@ spec: selector: app.kubernetes.io/name: atlantis ``` +
#### Routing and SSL + The manifests above create a Kubernetes `Service` of `type: ClusterIP` which isn't accessible outside your cluster. Depending on how you're doing routing into Kubernetes, you may want to use a Service of `type: LoadBalancer` so that Atlantis is accessible to GitHub/GitLab and your internal users. -If you want to add SSL you can use something like [https://github.com/jetstack/cert-manager](https://github.com/jetstack/cert-manager) to generate SSL +If you want to add SSL you can use something like [cert-manager](https://github.com/cert-manager/cert-manager) to generate SSL certs and mount them into the Pod. Then set the `ATLANTIS_SSL_CERT_FILE` and `ATLANTIS_SSL_KEY_FILE` environment variables to enable SSL. You could also set up SSL at your LoadBalancer. @@ -458,6 +481,7 @@ A `kustomization.yaml` file is provided in the directory `kustomize/`, so you ma You will need to provide a secret (with the default name of `atlantis-vcs`) to configure Atlantis with access credentials for your remote repositories. Example: + ```yaml bases: - github.com/runatlantis/atlantis//kustomize @@ -483,7 +507,6 @@ patchesStrategicMerge: #### Required - ```yaml ... containers: @@ -572,37 +595,44 @@ containers: ``` ### OpenShift + The Helm chart and Kubernetes manifests above are compatible with OpenShift, however you need to run with an additional environment variable: `HOME=/home/atlantis`. This is required because OpenShift runs Docker images with random user id's that use `/` as their home directory. ### AWS Fargate + If you'd like to run Atlantis on [AWS Fargate](https://aws.amazon.com/fargate/) check out the Atlantis module on the [Terraform Module Registry](https://registry.terraform.io/modules/terraform-aws-modules/atlantis/aws/latest) and then check out the [Next Steps](#next-steps). ### Google Kubernetes Engine (GKE) + You can run Atlantis on GKE using the [Helm chart](#kubernetes-helm-chart) or the [manifests](#kubernetes-manifests). There is also a set of full Terraform configurations that create a GKE Cluster, -Cloud Storage Backend and TLS certs: [https://github.com/sethvargo/atlantis-on-gke](https://github.com/sethvargo/atlantis-on-gke). +Cloud Storage Backend and TLS certs: [sethvargo atlantis-on-gke](https://github.com/sethvargo/atlantis-on-gke). Once you're done, see [Next Steps](#next-steps). ### Google Compute Engine (GCE) -Atlantis can be run on Google Compute Engine using a Terraform module that deploys it as a Docker container on a managed Compute Engine instance. -This [Terraform module](https://registry.terraform.io/modules/bschaatsbergen/atlantis/gce/latest) features the creation of a Cloud load balancer, a Container-Optimized OS-based VM, a persistent data disk, and a managed instance group. +Atlantis can be run on Google Compute Engine using a Terraform module that deploys it as a Docker container on a managed Compute Engine instance. + +This [Terraform module](https://registry.terraform.io/modules/runatlantis/atlantis/gce/latest) features the creation of a Cloud load balancer, a Container-Optimized OS-based VM, a persistent data disk, and a managed instance group. After it is deployed, see [Next Steps](#next-steps). ### Docker + Atlantis has an [official](https://ghcr.io/runatlantis/atlantis) Docker image: `ghcr.io/runatlantis/atlantis`. #### Customization + If you need to modify the Docker image that we provide, for instance to add the terragrunt binary, you can do something like this: 1. Create a custom docker file + ```dockerfile FROM ghcr.io/runatlantis/atlantis:{latest version} @@ -616,32 +646,37 @@ Additionally, the /docker-entrypoint.d/ directory offers a flexible option for i **Important Notice**: There is a critical update regarding the data directory in Atlantis. In versions prior to 0.26.0, the directory was configured to be accessible by the root user. However, with the transition to the atlantis user in newer versions, it is imperative to update the directory permissions accordingly in your current deployment when upgrading to a version later than 0.26.0. This step ensures seamless access and functionality for the atlantis user. 1. Build your Docker image + ```bash docker build -t {YOUR_DOCKER_ORG}/atlantis-custom . ``` 1. Run your image + ```bash docker run {YOUR_DOCKER_ORG}/atlantis-custom server --gh-user=GITHUB_USERNAME --gh-token=GITHUB_TOKEN ``` ### Microsoft Azure -The standard [Kubernetes Helm Chart](#kubernetes-helm-chart) should work fine on [Azure Kubernetes Service](https://docs.microsoft.com/en-us/azure/aks/intro-kubernetes). +The standard [Kubernetes Helm Chart](#kubernetes-helm-chart) should work fine on [Azure Kubernetes Service](https://docs.microsoft.com/en-us/azure/aks/intro-kubernetes). -Another option is [Azure Container Instances](https://docs.microsoft.com/en-us/azure/container-instances/). See this community member's [repo](https://github.com/jplane/atlantis-on-aci) or the new and more up-to-date [Terraform module](https://github.com/getindata/terraform-azurerm-atlantis) for install scripts and more information on running Atlantis on ACI. +Another option is [Azure Container Instances](https://docs.microsoft.com/en-us/azure/container-instances/). See this community member's [repo](https://github.com/jplane/atlantis-on-aci) or the new and more up-to-date [Terraform module](https://github.com/getindata/terraform-azurerm-atlantis) for install scripts and more information on running Atlantis on ACI. **Note on ACI Deployment:** Due to a bug in earlier Docker releases, Docker v23.0.0 or later is required for straightforward deployment. Alternatively, the Atlantis Docker image can be pushed to a private registry such as ACR and then used. ### Roll Your Own + If you want to roll your own Atlantis installation, you can get the `atlantis` -binary from [https://github.com/runatlantis/atlantis/releases](https://github.com/runatlantis/atlantis/releases) +binary from [GitHub](https://github.com/runatlantis/atlantis/releases) or use the [official Docker image](https://ghcr.io/runatlantis/atlantis). #### Startup Command + The exact flags to `atlantis server` depends on your Git host: ##### GitHub + ```bash atlantis server \ --atlantis-url="$URL" \ @@ -652,6 +687,7 @@ atlantis server \ ``` ##### GitHub Enterprise + ```bash HOSTNAME=YOUR_GITHUB_ENTERPRISE_HOSTNAME # ex. github.runatlantis.io atlantis server \ @@ -664,6 +700,7 @@ atlantis server \ ``` ##### GitLab + ```bash atlantis server \ --atlantis-url="$URL" \ @@ -674,6 +711,7 @@ atlantis server \ ``` ##### GitLab Enterprise + ```bash HOSTNAME=YOUR_GITLAB_ENTERPRISE_HOSTNAME # ex. gitlab.runatlantis.io atlantis server \ @@ -686,6 +724,7 @@ atlantis server \ ``` ##### Gitea + ```bash atlantis server \ --atlantis-url="$URL" \ @@ -697,6 +736,7 @@ atlantis server \ ``` ##### Bitbucket Cloud (bitbucket.org) + ```bash atlantis server \ --atlantis-url="$URL" \ @@ -706,6 +746,7 @@ atlantis server \ ``` ##### Bitbucket Server (aka Stash) + ```bash BASE_URL=YOUR_BITBUCKET_SERVER_URL # ex. http://bitbucket.mycorp:7990 atlantis server \ @@ -734,22 +775,23 @@ atlantis server \ ``` Where -- `$URL` is the URL that Atlantis can be reached at -- `$USERNAME` is the GitHub/GitLab/Gitea/Bitbucket/AzureDevops username you generated the token for -- `$TOKEN` is the access token you created. If you don't want this to be passed + +* `$URL` is the URL that Atlantis can be reached at +* `$USERNAME` is the GitHub/GitLab/Gitea/Bitbucket/AzureDevops username you generated the token for +* `$TOKEN` is the access token you created. If you don't want this to be passed in as an argument for security reasons you can specify it in a config file - (see [Configuration](/docs/server-configuration.html#environment-variables)) + (see [Configuration](server-configuration.md#environment-variables)) or as an environment variable: `ATLANTIS_GH_TOKEN` or `ATLANTIS_GITLAB_TOKEN` or `ATLANTIS_GITEA_TOKEN` or `ATLANTIS_BITBUCKET_TOKEN` or `ATLANTIS_AZUREDEVOPS_TOKEN` -- `$SECRET` is the random key you used for the webhook secret. +* `$SECRET` is the random key you used for the webhook secret. If you don't want this to be passed in as an argument for security reasons you can specify it in a config file - (see [Configuration](/docs/server-configuration.html#environment-variables)) + (see [Configuration](server-configuration.md#environment-variables)) or as an environment variable: `ATLANTIS_GH_WEBHOOK_SECRET` or `ATLANTIS_GITLAB_WEBHOOK_SECRET` or - `ATLANTIS_GITEA_WEBHOOK_SECRET` -- `$REPO_ALLOWLIST` is which repos Atlantis can run on, ex. + `ATLANTIS_GITEA_WEBHOOK_SECRET` +* `$REPO_ALLOWLIST` is which repos Atlantis can run on, ex. `github.com/runatlantis/*` or `github.enterprise.corp.com/*`. - See [Repo Allowlist](server-configuration.html#repo-allowlist) for more details. + See [--repo-allowlist](server-configuration.md#repo-allowlist) for more details. Atlantis is now running! ::: tip @@ -758,5 +800,6 @@ restart it in case of failure. ::: ## Next Steps + * To ensure Atlantis is running, load its UI. By default Atlantis runs on port `4141`. -* Now you're ready to add Webhooks to your repos. See [Configuring Webhooks](configuring-webhooks.html). +* Now you're ready to add Webhooks to your repos. See [Configuring Webhooks](configuring-webhooks.md). diff --git a/runatlantis.io/docs/faq.md b/runatlantis.io/docs/faq.md index 2cea8e8c92..1764719d97 100644 --- a/runatlantis.io/docs/faq.md +++ b/runatlantis.io/docs/faq.md @@ -1,4 +1,5 @@ # FAQ + **Q: Does Atlantis affect Terraform [remote state](https://developer.hashicorp.com/terraform/language/state/remote)?** A: No. Atlantis does not interfere with Terraform remote state in any way. Under the hood, Atlantis is simply executing `terraform plan` and `terraform apply`. diff --git a/runatlantis.io/docs/how-atlantis-works.md b/runatlantis.io/docs/how-atlantis-works.md index ed57d988f5..f486091b3b 100644 --- a/runatlantis.io/docs/how-atlantis-works.md +++ b/runatlantis.io/docs/how-atlantis-works.md @@ -1,7 +1,8 @@ # How Atlantis Works + This section of docs talks about how Atlantis at deeper level. -* [Locking](locking.html) -* [Autoplanning](autoplanning.html) -* [Automerging](automerging.html) -* [Security](security.html) +* [Locking](locking.md) +* [Autoplanning](autoplanning.md) +* [Automerging](automerging.md) +* [Security](security.md) diff --git a/runatlantis.io/docs/installation-guide.md b/runatlantis.io/docs/installation-guide.md index ec166f45f4..f5f1bd71d1 100644 --- a/runatlantis.io/docs/installation-guide.md +++ b/runatlantis.io/docs/installation-guide.md @@ -1,20 +1,22 @@ # Installation Guide + This guide is for installing a **production-ready** instance of Atlantis onto your infrastructure: + 1. First, ensure your Terraform setup meets the Atlantis **requirements** - * See [Requirements](requirements.html) + * See [Requirements](requirements.md) 1. Create **access credentials** for your Git host (GitHub, GitLab, Gitea, Bitbucket, Azure DevOps) - * See [Generating Git Host Access Credentials](access-credentials.html) + * See [Generating Git Host Access Credentials](access-credentials.md) 1. Create a **webhook secret** so Atlantis can validate webhooks - * See [Creating a Webhook Secret](webhook-secrets.html) + * See [Creating a Webhook Secret](webhook-secrets.md) 1. **Deploy** Atlantis into your infrastructure - * See [Deployment](deployment.html) + * See [Deployment](deployment.md) 1. Configure **Webhooks** on your Git host so Atlantis can respond to your pull requests - * See [Configuring Webhooks](configuring-webhooks.html) + * See [Configuring Webhooks](configuring-webhooks.md) 1. Configure **provider credentials** so Atlantis can actually run Terraform commands - * See [Provider Credentials](provider-credentials.html) + * See [Provider Credentials](provider-credentials.md) :::tip -If you want to test out Atlantis first, check out [Test Drive](../guide/test-drive.html) -and [Testing Locally](../guide/testing-locally.html). +If you want to test out Atlantis first, check out [Test Drive](../guide/test-drive.md) +and [Testing Locally](../guide/testing-locally.md). ::: diff --git a/runatlantis.io/docs/locking.md b/runatlantis.io/docs/locking.md index 65836d3b70..c75e2b3fce 100644 --- a/runatlantis.io/docs/locking.md +++ b/runatlantis.io/docs/locking.md @@ -1,4 +1,5 @@ # Locking + When `plan` is run, the directory and Terraform workspace are **Locked** until the pull request is merged or closed, or the plan is manually deleted. If another user attempts to `plan` for the same directory and workspace in a different pull request @@ -12,9 +13,8 @@ Which links them to the pull request that holds the lock. Only the directory in the repo and Terraform workspace are locked, not the whole repo. ::: -[[toc]] - ## Why + 1. Because `atlantis apply` is being done before the pull request is merged, after an apply your `main` branch does not represent the most up to date version of your infrastructure anymore. With locking, you can ensure that no other changes will be made until the @@ -30,6 +30,7 @@ but with the added ability to re-plan/apply multiple times if things don't work. will be made invalid after the in-progress plan is applied. ## Viewing Locks + To view locks, go to the URL that Atlantis is hosted at: ![Locks View](./images/locks-ui.png) @@ -41,6 +42,7 @@ You can click on a lock to view its details:

## Unlocking + The project and workspace will be automatically unlocked when the PR is merged or closed. To unlock the project and workspace without completing an `apply` and merging, comment `atlantis unlock` on the PR, @@ -59,6 +61,7 @@ to delete the lock. Once a plan is discarded, you'll need to run `plan` again prior to running `apply` when you go back to that pull request. ## Relationship to Terraform State Locking + Atlantis does not conflict with [Terraform State Locking](https://developer.hashicorp.com/terraform/language/state/locking). Under the hood, all Atlantis is doing is running `terraform plan` and `apply` and so all of the locking built in to those commands by Terraform isn't affected. diff --git a/runatlantis.io/docs/policy-checking.md b/runatlantis.io/docs/policy-checking.md index c996ef7ee0..054a5448ce 100644 --- a/runatlantis.io/docs/policy-checking.md +++ b/runatlantis.io/docs/policy-checking.md @@ -10,7 +10,7 @@ for using this step include: ## How it works? -Enabling "policy checking" in addition to the [mergeable apply requirement](/docs/command-requirements.html#supported-requirements) blocks applies on plans that fail any of the defined conftest policies. +Enabling "policy checking" in addition to the [mergeable apply requirement](command-requirements.md#supported-requirements) blocks applies on plans that fail any of the defined conftest policies. ![Policy Check Apply Failure](./images/policy-check-apply-failure.png) @@ -20,9 +20,9 @@ Any failures need to either be addressed in a successive commit, or approved by ![Policy Check Approval](./images/policy-check-approval.png) - Policy approvals may be cleared either by re-planing, or by issuing the following command: -``` + +```shell atlantis approve_policies --clear-policy-approval ``` @@ -44,11 +44,11 @@ All repositories will have policy checking enabled. ### Step 2: Define the policy configuration -Policy Configuration is defined in the [server-side repo configuration](https://www.runatlantis.io/docs/server-side-repo-config.html#reference). +Policy Configuration is defined in the [server-side repo configuration](server-side-repo-config.md#reference). In this example we will define one policy set with one owner: -``` +```yaml policies: owners: users: @@ -72,11 +72,11 @@ policies: - `owners` - Defines the users/teams which are able to approve a specific policy set. - `approve_count` - Defines the number of approvals needed to bypass policy checks. Defaults to the top-level policies configuration, if not specified. -By default conftest is configured to only run the `main` package. If you wish to run specific/multiple policies consider passing `--namespace` or `--all-namespaces` to conftest with [`extra_args`](https://www.runatlantis.io/docs/custom-workflows.html#adding-extra-arguments-to-terraform-commands) via a custom workflow as shown in the below example. +By default conftest is configured to only run the `main` package. If you wish to run specific/multiple policies consider passing `--namespace` or `--all-namespaces` to conftest with [`extra_args`](custom-workflows.md#adding-extra-arguments-to-terraform-commands) via a custom workflow as shown in the below example. Example Server Side Repo configuration using `--all-namespaces` and a local src dir. -``` +```yaml repos: - id: github.com/myorg/example-repo workflow: custom @@ -104,7 +104,7 @@ workflows: Conftest policies are based on [Open Policy Agent (OPA)](https://www.openpolicyagent.org/) and written in [rego](https://www.openpolicyagent.org/docs/latest/policy-language/#what-is-rego). Following our example, simply create a `rego` file in `null_resource_warning` folder with following code, the code below a simple policy that will fail for plans containing newly created `null_resource`s. -``` +```rego package main resource_types = {"null_resource"} @@ -144,7 +144,7 @@ That's it! Now your Atlantis instance is configured to run policies on your Terr ### Pulling policies from a remote location -Conftest supports [pulling policies](https://www.conftest.dev/sharing/#pulling) from remote locations such as S3, git, OCI, and other protocols supported by the [go-getter](https://github.com/hashicorp/go-getter) library. The key [`extra_args`](https://www.runatlantis.io/docs/custom-workflows.html#adding-extra-arguments-to-terraform-commands) can be used to pass in the [`--update`](https://www.conftest.dev/sharing/#-update-flag) flag to tell `conftest` to pull the policies into the project folder before running the policy check. +Conftest supports [pulling policies](https://www.conftest.dev/sharing/#pulling) from remote locations such as S3, git, OCI, and other protocols supported by the [go-getter](https://github.com/hashicorp/go-getter) library. The key [`extra_args`](custom-workflows.md#adding-extra-arguments-to-terraform-commands) can be used to pass in the [`--update`](https://www.conftest.dev/sharing/#-update-flag) flag to tell `conftest` to pull the policies into the project folder before running the policy check. ```yaml workflows: @@ -163,7 +163,7 @@ Note that authentication may need to be configured separately if pulling policie ### Running policy check against Terraform source code -By default, Atlantis runs the policy check against the [`SHOWFILE`](https://www.runatlantis.io/docs/custom-workflows.html#custom-run-command). In order to run the policy test against Terraform files directly, override the default `conftest` command used and pass in `*.tf` as one of the inputs to `conftest`. The `show` step is required so that Atlantis will generate the `SHOWFILE`. +By default, Atlantis runs the policy check against the [`SHOWFILE`](custom-workflows.md#custom-run-command). In order to run the policy test against Terraform files directly, override the default `conftest` command used and pass in `*.tf` as one of the inputs to `conftest`. The `show` step is required so that Atlantis will generate the `SHOWFILE`. ```yaml workflows: @@ -176,8 +176,7 @@ workflows: ### Quiet policy checks -By default, Atlantis will add a comment to all pull requests with the policy check result - both successes and failures. Version 0.21.0 added the [`--quiet-policy-checks`](server-configuration.html#quiet-policy-checks) option, which will instead only add comments when policy checks fail, significantly reducing the number of comments when most policy check results succeed. - +By default, Atlantis will add a comment to all pull requests with the policy check result - both successes and failures. Version 0.21.0 added the [`--quiet-policy-checks`](server-configuration.md#quiet-policy-checks) option, which will instead only add comments when policy checks fail, significantly reducing the number of comments when most policy check results succeed. ### Data for custom run steps @@ -198,9 +197,10 @@ When the policy check workflow runs, a file is created in the working directory ## Running policy check only on some repositories -When policy checking is enabled it will be enforced on all repositories, in order to disable policy checking on some repositories first [enable policy checks](https://www.runatlantis.io/docs/policy-checking.html#getting-started) and then disable it explicitly on each repository with the `policy_check` flag. +When policy checking is enabled it will be enforced on all repositories, in order to disable policy checking on some repositories first [enable policy checks](policy-checking.md#getting-started) and then disable it explicitly on each repository with the `policy_check` flag. For server side config: + ```yml # repos.yaml repos: @@ -216,6 +216,7 @@ repos: ``` For repo level `atlantis.yaml` config: + ```yml version: 3 projects: diff --git a/runatlantis.io/docs/post-workflow-hooks.md b/runatlantis.io/docs/post-workflow-hooks.md index a9f1e05e94..91ba0b7aa7 100644 --- a/runatlantis.io/docs/post-workflow-hooks.md +++ b/runatlantis.io/docs/post-workflow-hooks.md @@ -2,12 +2,10 @@ Post workflow hooks can be defined to run scripts after default or custom workflows are executed. Post workflow hooks differ from [custom -workflows](custom-workflows.html#custom-run-command) in that they are run +workflows](custom-workflows.md#custom-run-command) in that they are run outside of Atlantis commands. Which means they do not surface their output back to the PR as a comment. -[[toc]] - ## Usage Post workflow hooks can only be specified in the Server-Side Repo Config under @@ -44,7 +42,6 @@ have finished. In this example we use a custom workflow to generate cost estimates for each workflow using [Infracost](https://www.infracost.io/docs/integrations/cicd/#cicd-integrations), then create a summary report after all workflows have completed. - ```yaml # repos.yaml workflows: @@ -88,7 +85,7 @@ repos: ### Custom `run` Command This is very similar to [custom workflow run -command](custom-workflows.html#custom-run-command). +command](custom-workflows.md#custom-run-command). ```yaml - run: custom-command @@ -102,6 +99,7 @@ command](custom-workflows.html#custom-run-command). | shellArgs | string | '-c' | no | The shell arguments to use for running the command | ::: tip Notes + * `run` commands are executed with the following environment variables: * `BASE_REPO_NAME` - Name of the repository that the pull request will be merged into, ex. `atlantis`. * `BASE_REPO_OWNER` - Owner of the repository that the pull request will be merged into, ex. `runatlantis`. diff --git a/runatlantis.io/docs/pre-workflow-hooks.md b/runatlantis.io/docs/pre-workflow-hooks.md index 9087be24c7..dce3f2fe7d 100644 --- a/runatlantis.io/docs/pre-workflow-hooks.md +++ b/runatlantis.io/docs/pre-workflow-hooks.md @@ -2,15 +2,13 @@ Pre workflow hooks can be defined to run scripts before default or custom workflows are executed. Pre workflow hooks differ from [custom -workflows](custom-workflows.html#custom-run-command) in several ways. +workflows](custom-workflows.md#custom-run-command) in several ways. 1. Pre workflow hooks do not require the repository configuration to be - present. This can be utilized to [dynamically generate repo configs](pre-workflow-hooks.html#dynamic-repo-config-generation). + present. This can be utilized to [dynamically generate repo configs](pre-workflow-hooks.md#dynamic-repo-config-generation). 2. Pre workflow hooks are run outside of Atlantis commands. Which means they do not surface their output back to the PR as a comment. -[[toc]] - ## Usage Pre workflow hooks can only be specified in the Server-Side Repo Config under the @@ -19,9 +17,9 @@ Pre workflow hooks can only be specified in the Server-Side Repo Config under th ::: tip Note By default, `pre-workflow-hooks` do not prevent Atlantis from executing its workflows(`plan`, `apply`) even if a `run` command exits with an error. This -behavior can be changed by setting the [fail-on-pre-workflow-hook-error](server-configuration.html#fail-on-pre-workflow-hook-error) +behavior can be changed by setting the [fail-on-pre-workflow-hook-error](server-configuration.md#fail-on-pre-workflow-hook-error) flag in the Atlantis server configuration. -::: +::: ## Atlantis Command Targetting @@ -84,7 +82,7 @@ repos: ### Custom `run` Command This is very similar to the [custom workflow run -command](custom-workflows.html#custom-run-command). +command](custom-workflows.md#custom-run-command). ```yaml - run: custom-command @@ -98,6 +96,7 @@ command](custom-workflows.html#custom-run-command). | shellArgs | string | '-c' | no | The shell arguments to use for running the command | ::: tip Notes + * `run` commands are executed with the following environment variables: * `BASE_REPO_NAME` - Name of the repository that the pull request will be merged into, ex. `atlantis`. * `BASE_REPO_OWNER` - Owner of the repository that the pull request will be merged into, ex. `runatlantis`. @@ -109,7 +108,7 @@ command](custom-workflows.html#custom-run-command). * `PULL_NUM` - Pull request number or ID, ex. `2`. * `PULL_URL` - Pull request URL, ex. `https://github.com/runatlantis/atlantis/pull/2`. * `PULL_AUTHOR` - Username of the pull request author, ex. `acme-user`. - * `DIR` - The absolute path to the root of the cloned repository. + * `DIR` - The absolute path to the root of the cloned repository. * `USER_NAME` - Username of the VCS user running command, ex. `acme-user`. During an autoplan, the user will be the Atlantis API user, ex. `atlantis`. * `COMMENT_ARGS` - Any additional flags passed in the comment on the pull request. Flags are separated by commas and every character is escaped, ex. `atlantis plan -- arg1 arg2` will result in `COMMENT_ARGS=\a\r\g\1,\a\r\g\2`. diff --git a/runatlantis.io/docs/provider-credentials.md b/runatlantis.io/docs/provider-credentials.md index 793c082e94..09dd289759 100644 --- a/runatlantis.io/docs/provider-credentials.md +++ b/runatlantis.io/docs/provider-credentials.md @@ -1,17 +1,19 @@ # Provider Credentials + Atlantis runs Terraform by simply executing `terraform plan` and `apply` commands on the server Atlantis is hosted on. Just like when you run Terraform locally, Atlantis needs credentials for your specific provider. It's up to you how you provide credentials for your specific provider to Atlantis: -* The Atlantis [Helm Chart](deployment.html#kubernetes-helm-chart) and - [AWS Fargate Module](deployment.html#aws-fargate) have their own mechanisms for provider + +* The Atlantis [Helm Chart](deployment.md#kubernetes-helm-chart) and + [AWS Fargate Module](deployment.md#aws-fargate) have their own mechanisms for provider credentials. Read their docs. * If you're running Atlantis in a cloud then many clouds have ways to give cloud API access to applications running on them, ex: - * [AWS EC2 Roles](https://registry.terraform.io/providers/hashicorp/aws/latest/docs) (Search for "EC2 Role") - * [GCE Instance Service Accounts](https://registry.terraform.io/providers/hashicorp/google/latest/docs/guides/provider_reference) + * [AWS EC2 Roles](https://registry.terraform.io/providers/hashicorp/aws/latest/docs) (Search for "EC2 Role") + * [GCE Instance Service Accounts](https://registry.terraform.io/providers/hashicorp/google/latest/docs/guides/provider_reference) * Many users set environment variables, ex. `AWS_ACCESS_KEY`, where Atlantis is running. * Others create the necessary config files, ex. `~/.aws/credentials`, where Atlantis is running. * Use the [HashiCorp Vault Provider](https://registry.terraform.io/providers/hashicorp/vault/latest/docs) @@ -22,10 +24,10 @@ As a general rule, if you can `ssh` or `exec` into the server where Atlantis is running and run `terraform` commands like you would locally, then Atlantis will work. ::: - ## AWS Specific Info ### Multiple AWS Accounts + Atlantis supports multiple AWS accounts through the use of Terraform's [AWS Authentication](https://registry.terraform.io/providers/hashicorp/aws/latest/docs) (Search for "Authentication"). @@ -41,6 +43,7 @@ won't work for multiple accounts since Atlantis wouldn't know which environment Terraform with. ### Assume Role Session Names + If you're using Terraform < 0.12, Atlantis injects 5 Terraform variables that can be used to dynamically name the assume role session name. Setting the `session_name` allows you to trace API calls made through Atlantis back to a specific user and repo via CloudWatch: @@ -89,5 +92,6 @@ You can still set these variables yourself using the `extra_args` configuration. ::: ## Next Steps -* If you want to configure Atlantis further, read [Configuring Atlantis](configuring-atlantis.html) -* If you're ready to use Atlantis, read [Using Atlantis](using-atlantis.html) + +* If you want to configure Atlantis further, read [Configuring Atlantis](configuring-atlantis.md) +* If you're ready to use Atlantis, read [Using Atlantis](using-atlantis.md) diff --git a/runatlantis.io/docs/repo-level-atlantis-yaml.md b/runatlantis.io/docs/repo-level-atlantis-yaml.md index f53fd3363f..0c9af490d6 100644 --- a/runatlantis.io/docs/repo-level-atlantis-yaml.md +++ b/runatlantis.io/docs/repo-level-atlantis-yaml.md @@ -1,31 +1,33 @@ # Repo Level atlantis.yaml Config + An `atlantis.yaml` file specified at the root of a Terraform repo allows you to instruct Atlantis on the structure of your repo and set custom workflows. -[[toc]] - ## Do I need an atlantis.yaml file? + `atlantis.yaml` files are only required if you wish to customize some aspect of Atlantis. The default Atlantis config works for many users without changes. Read through the [use-cases](#use-cases) to determine if you need it. ## Enabling atlantis.yaml + By default, all repos are allowed to have an `atlantis.yaml` file, but some of the keys are restricted by default. Restricted keys can be set in the server-side `repos.yaml` repo config file. You can enable `atlantis.yaml` to override restricted -keys by setting the `allowed_overrides` key there. See the [Server Side Repo Config](server-side-repo-config.html) for +keys by setting the `allowed_overrides` key there. See the [Server Side Repo Config](server-side-repo-config.md) for more details. -**Notes** +**Notes:** + * By default, repo root `atlantis.yaml` file is used. -* You can change this behaviour by setting [Server Side Repo Config](server-side-repo-config.html) +* You can change this behaviour by setting [Server Side Repo Config](server-side-repo-config.md) ::: danger DANGER Atlantis uses the `atlantis.yaml` version from the pull request, similar to other -CI/CD systems. If you're allowing users to [create custom workflows](server-side-repo-config.html#allow-repos-to-define-their-own-workflows) +CI/CD systems. If you're allowing users to [create custom workflows](server-side-repo-config.md#allow-repos-to-define-their-own-workflows) then this means anyone that can create a pull request to your repo can run arbitrary code on the Atlantis server. @@ -149,7 +151,9 @@ grep -P 'backend[\s]+"s3"' **/*.tf | ``` ## Use Cases + ### Disabling Autoplanning + ```yaml version: 3 projects: @@ -157,6 +161,7 @@ projects: autoplan: enabled: false ``` + This will stop Atlantis automatically running plan when `project1/` is updated in a pull request. @@ -180,7 +185,7 @@ Parallel plans and applies work across both multiple directories and multiple wo Given the directory structure: -``` +```plain . ├── modules │   └── module1 @@ -195,7 +200,6 @@ Given the directory structure: If you want Atlantis to plan `project1/` whenever any `.tf` files under `module1/` change or any `.tf` or `.tfvars` files under `project1/` change you could use the following configuration: - ```yaml version: 3 projects: @@ -205,12 +209,14 @@ projects: ``` Note: + * `when_modified` uses the [`.dockerignore` syntax](https://docs.docker.com/engine/reference/builder/#dockerignore-file) * The paths are relative to the project's directory. * `when_modified` will be used by both automatic and manually run plans. * `when_modified` will continue to work for manually run plans even when autoplan is disabled. ### Supporting Terraform Workspaces + ```yaml version: 3 projects: @@ -219,34 +225,44 @@ projects: - dir: project1 workspace: production ``` + With the above config, when Atlantis determines that the configuration for the `project1` dir has changed, it will run plan for both the `staging` and `production` workspaces. If you want to `plan` or `apply` for a specific workspace you can use -``` + +```shell atlantis plan -w staging -d project1 ``` + and -``` + +```shell atlantis apply -w staging -d project1 ``` ### Using .tfvars files -See [Custom Workflow Use Cases: Using .tfvars files](custom-workflows.html#tfvars-files) + +See [Custom Workflow Use Cases: Using .tfvars files](custom-workflows.md#tfvars-files) ### Adding extra arguments to Terraform commands -See [Custom Workflow Use Cases: Adding extra arguments to Terraform commands](custom-workflows.html#adding-extra-arguments-to-terraform-commands) + +See [Custom Workflow Use Cases: Adding extra arguments to Terraform commands](custom-workflows.md#adding-extra-arguments-to-terraform-commands) ### Custom init/plan/apply Commands -See [Custom Workflow Use Cases: Custom init/plan/apply Commands](custom-workflows.html#custom-init-plan-apply-commands) + +See [Custom Workflow Use Cases: Custom init/plan/apply Commands](custom-workflows.md#custom-init-plan-apply-commands) ### Terragrunt -See [Custom Workflow Use Cases: Terragrunt](custom-workflows.html#terragrunt) + +See [Custom Workflow Use Cases: Terragrunt](custom-workflows.md#terragrunt) ### Running custom commands -See [Custom Workflow Use Cases: Running custom commands](custom-workflows.html#running-custom-commands) + +See [Custom Workflow Use Cases: Running custom commands](custom-workflows.md#running-custom-commands) ### Terraform Versions + If you'd like to use a different version of Terraform than what is in Atlantis' `PATH` or is set by the `--default-tf-version` flag, then set the `terraform_version` key: @@ -260,7 +276,9 @@ projects: Atlantis will automatically download and use this version. ### Requiring Approvals For Production + In this example, we only want to require `apply` approvals for the `production` directory. + ```yaml version: 3 projects: @@ -270,12 +288,14 @@ projects: apply_requirements: [approved] import_requirements: [approved] ``` + :::warning `plan_requirements`, `apply_requirements` and `import_requirements` are restricted keys so this repo will need to be configured -to be allowed to set this key. See [Server-Side Repo Config Use Cases](server-side-repo-config.html#repos-can-set-their-own-apply-an-applicable-subcommand). +to be allowed to set this key. See [Server-Side Repo Config Use Cases](server-side-repo-config.md#repos-can-set-their-own-apply-an-applicable-subcommand). ::: ### Order of planning/applying + ```yaml version: 3 abort_on_execution_order_fail: true @@ -285,12 +305,13 @@ projects: - dir: project2 execution_order_group: 1 ``` + With this config above, Atlantis runs planning/applying for project2 first, then for project1. Several projects can have same `execution_order_group`. Any order in one group isn't guaranteed. `parallel_plan` and `parallel_apply` respect these order groups, so parallel planning/applying works -in each group one by one. +in each group one by one. -If any plan/apply fails and `abort_on_execution_order_fail` is set to true on a repo level, all the +If any plan/apply fails and `abort_on_execution_order_fail` is set to true on a repo level, all the following groups will be aborted. For this example, if project2 fails then project1 will not run. Execution order groups are useful when you have dependencies between projects. However, they are only applicable in the case where @@ -298,6 +319,7 @@ you initiate a global apply for all of your projects, i.e `atlantis apply`. If y Thus, the `depends_on` key is more useful in this case. and can be used in conjunction with execution order groups. The following configuration is an example of how to use execution order groups and depends_on together to enforce dependencies between projects. + ```yaml version: 3 projects: @@ -325,6 +347,7 @@ projects: workspace: production workflow: infra ``` + the `depends_on` feature will make sure that `production` is not applied before `staging` for example. ::: tip @@ -333,11 +356,14 @@ What Happens if one or more project's dependencies are not applied? If there's one or more projects in the dependency list which is not in applied status, users will see an error message like this: `Can't apply your project unless you apply its dependencies` ::: + ### Autodiscovery Config + ```yaml autodiscover: mode: "auto" ``` + The above is the default configuration for `autodiscover.mode`. When `autodiscover.mode` is auto, projects will be discovered only if the repo has no `projects` configured. @@ -345,14 +371,16 @@ projects will be discovered only if the repo has no `projects` configured. autodiscover: mode: "disabled" ``` + With the config above, Atlantis will never try to discover projects, even when there are no `projects` configured. This is useful if dynamically generating Atlantis config in pre_workflow hooks. -See [Dynamic Repo Config Generation](pre-workflow-hooks.html#dynamic-repo-config-generation). +See [Dynamic Repo Config Generation](pre-workflow-hooks.md#dynamic-repo-config-generation). ```yaml autodiscover: mode: "enabled" ``` + With the config above, Atlantis will unconditionally try to discover projects based on modified_files, even when the directory of the project is missing from the configured `projects` in the repo configuration. If a discovered project has the same directory as a project which was manually configured in `projects`, @@ -362,10 +390,13 @@ Use this feature when some projects require specific configuration in a repo wit it's still desirable for Atlantis to plan/apply for projects not enumerated in the config. ### Custom Backend Config -See [Custom Workflow Use Cases: Custom Backend Config](custom-workflows.html#custom-backend-config) + +See [Custom Workflow Use Cases: Custom Backend Config](custom-workflows.md#custom-backend-config) ## Reference + ### Top-Level Keys + ```yaml version: 3 automerge: false @@ -374,16 +405,18 @@ projects: workflows: allowed_regexp_prefixes: ``` -| Key | Type | Default | Required | Description | -|-------------------------------|----------------------------------------------------------|---------|----------|--------------------------------------------------------------------------------------------------------------------------------------| -| version | int | none | **yes** | This key is required and must be set to `3`. | -| automerge | bool | `false` | no | Automatically merges pull request when all plans are applied. | -| delete_source_branch_on_merge | bool | `false` | no | Automatically deletes the source branch on merge. | -| projects | array[[Project](repo-level-atlantis-yaml.html#project)] | `[]` | no | Lists the projects in this repo. | -| workflows
*(restricted)* | map[string: [Workflow](custom-workflows.html#reference)] | `{}` | no | Custom workflows. | -| allowed_regexp_prefixes | array[string] | `[]` | no | Lists the allowed regexp prefixes to use when the [`--enable-regexp-cmd`](server-configuration.html#enable-regexp-cmd) flag is used. | + +| Key | Type | Default | Required | Description | +|-------------------------------|--------------------------------------------------------|---------|----------|------------------------------------------------------------------------------------------------------------------------------------| +| version | int | none | **yes** | This key is required and must be set to `3`. | +| automerge | bool | `false` | no | Automatically merges pull request when all plans are applied. | +| delete_source_branch_on_merge | bool | `false` | no | Automatically deletes the source branch on merge. | +| projects | array[[Project](repo-level-atlantis-yaml.md#project)] | `[]` | no | Lists the projects in this repo. | +| workflows
*(restricted)* | map[string: [Workflow](custom-workflows.md#reference)] | `{}` | no | Custom workflows. | +| allowed_regexp_prefixes | array\[string\] | `[]` | no | Lists the allowed regexp prefixes to use when the [`--enable-regexp-cmd`](server-configuration.md#enable-regexp-cmd) flag is used. | ### Project + ```yaml name: myname branch: /mybranch/ @@ -414,33 +447,37 @@ workflow: myworkflow | repo_locking | bool | `true` | no | (deprecated) Get a repository lock in this project when plan. | | repo_locks | [RepoLocks](#repolocks) | `mode: on_plan` | no | Get a repository lock in this project on plan or apply. See [RepoLocks](#repolocks) for more details. | | custom_policy_check | bool | `false` | no | Enable using policy check tools other than Conftest | -| autoplan | [Autoplan](#autoplan) | none | no | A custom autoplan configuration. If not specified, will use the autoplan config. See [Autoplanning](autoplanning.html). | +| autoplan | [Autoplan](#autoplan) | none | no | A custom autoplan configuration. If not specified, will use the autoplan config. See [Autoplanning](autoplanning.md). | | terraform_version | string | none | no | A specific Terraform version to use when running commands for this project. Must be [Semver compatible](https://semver.org/), ex. `v0.11.0`, `0.12.0-beta1`. | -| plan_requirements
*(restricted)* | array[string] | none | no | Requirements that must be satisfied before `atlantis plan` can be run. Currently the only supported requirements are `approved`, `mergeable`, and `undiverged`. See [Command Requirements](command-requirements.html) for more details. | -| apply_requirements
*(restricted)* | array[string] | none | no | Requirements that must be satisfied before `atlantis apply` can be run. Currently the only supported requirements are `approved`, `mergeable`, and `undiverged`. See [Command Requirements](command-requirements.html) for more details. | -| import_requirements
*(restricted)* | array[string] | none | no | Requirements that must be satisfied before `atlantis import` can be run. Currently the only supported requirements are `approved`, `mergeable`, and `undiverged`. See [Command Requirements](command-requirements.html) for more details. | +| plan_requirements
*(restricted)* | array\[string\] | none | no | Requirements that must be satisfied before `atlantis plan` can be run. Currently the only supported requirements are `approved`, `mergeable`, and `undiverged`. See [Command Requirements](command-requirements.md) for more details. | +| apply_requirements
*(restricted)* | array\[string\] | none | no | Requirements that must be satisfied before `atlantis apply` can be run. Currently the only supported requirements are `approved`, `mergeable`, and `undiverged`. See [Command Requirements](command-requirements.md) for more details. | +| import_requirements
*(restricted)* | array\[string\] | none | no | Requirements that must be satisfied before `atlantis import` can be run. Currently the only supported requirements are `approved`, `mergeable`, and `undiverged`. See [Command Requirements](command-requirements.md) for more details. | | workflow
*(restricted)* | string | none | no | A custom workflow. If not specified, Atlantis will use its default workflow. | ::: tip A project represents a Terraform state. Typically, there is one state per directory and workspace however it's possible to have multiple states in the same directory using `terraform init -backend-config=custom-config.tfvars`. -Atlantis supports this but requires the `name` key to be specified. See [Custom Backend Config](custom-workflows.html#custom-backend-config) for more details. +Atlantis supports this but requires the `name` key to be specified. See [Custom Backend Config](custom-workflows.md#custom-backend-config) for more details. ::: ### Autoplan + ```yaml enabled: true when_modified: ["*.tf", "terragrunt.hcl", ".terraform.lock.hcl"] ``` -| Key | Type | Default | Required | Description | -|-----------------------|---------------|----------------|----------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| enabled | boolean | `true` | no | Whether autoplanning is enabled for this project. | -| when_modified | array[string] | `["**/*.tf*"]` | no | Uses [.dockerignore](https://docs.docker.com/engine/reference/builder/#dockerignore-file) syntax. If any modified file in the pull request matches, this project will be planned. See [Autoplanning](autoplanning.html). Paths are relative to the project's dir. | + +| Key | Type | Default | Required | Description | +|-----------------------|-----------------|----------------|----------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| enabled | boolean | `true` | no | Whether autoplanning is enabled for this project. | +| when_modified | array\[string\] | `["**/*.tf*"]` | no | Uses [.dockerignore](https://docs.docker.com/engine/reference/builder/#dockerignore-file) syntax. If any modified file in the pull request matches, this project will be planned. See [Autoplanning](autoplanning.md). Paths are relative to the project's dir. | ### RepoLocks + ```yaml mode: on_apply ``` + | Key | Type | Default | Required | Description | |------|--------|-----------|----------|---------------------------------------------------------------------------------------------------------------------------------------| | mode | `Mode` | `on_plan` | no | Whether or not repository locks are enabled for this project on plan or apply. Valid values are `disabled`, `on_plan` and `on_apply`. | diff --git a/runatlantis.io/docs/requirements.md b/runatlantis.io/docs/requirements.md index 4630c05fb7..a661ac3508 100644 --- a/runatlantis.io/docs/requirements.md +++ b/runatlantis.io/docs/requirements.md @@ -1,10 +1,10 @@ # Requirements + Atlantis works with most Git hosts and Terraform setups. Read on to confirm it works with yours. -[[toc]] - ## Git Host + Atlantis integrates with the following Git hosts: * GitHub (public, private or enterprise) @@ -15,6 +15,7 @@ Atlantis integrates with the following Git hosts: * Azure DevOps ## Terraform State + Atlantis supports all backend types **except for local state**. We don't support local state because Atlantis does not have permanent storage and it doesn't commit the new statefile back to version control. @@ -25,17 +26,20 @@ storage from Terraform Cloud. This is fully supported by Atlantis. ::: ## Repository Structure + Atlantis supports any Terraform repository structure, for example: ### Single Terraform Project At Repo Root -``` + +```plain . ├── main.tf └── ... ``` ### Multiple Project Folders -``` + +```plain . ├── project1 │   ├── main.tf @@ -46,7 +50,8 @@ Atlantis supports any Terraform repository structure, for example: ``` ### Modules -``` + +```plain . ├── project1 │   ├── main.tf @@ -56,35 +61,42 @@ Atlantis supports any Terraform repository structure, for example:    ├── main.tf └── ... ``` + With modules, if you want `project1` automatically planned when `module1` is modified -you need to create an `atlantis.yaml` file. See [atlantis.yaml Use Cases](repo-level-atlantis-yaml.html#configuring-planning) for more details. +you need to create an `atlantis.yaml` file. See [atlantis.yaml Use Cases](repo-level-atlantis-yaml.md#configuring-planning) for more details. + +### Terraform Workspaces -### Terraform Workspaces *See [Terraform's docs](https://developer.hashicorp.com/terraform/language/state/workspaces) if you are unfamiliar with workspaces.* If you're using Terraform `>= 0.9.0`, Atlantis supports workspaces through an `atlantis.yaml` file that tells Atlantis the names of your workspaces -(see [atlantis.yaml Use Cases](repo-level-atlantis-yaml.html#supporting-terraform-workspaces) for more details) +(see [atlantis.yaml Use Cases](repo-level-atlantis-yaml.md#supporting-terraform-workspaces) for more details) ### .tfvars Files -``` + +```plain . ├── production.tfvars │── staging.tfvars └── main.tf ``` + For Atlantis to be able to plan automatically with `.tfvars files`, you need to create an `atlantis.yaml` file to tell it to use `-var-file={YOUR_FILE}`. -See [atlantis.yaml Use Cases](custom-workflows.html#tfvars-files) for more details. +See [atlantis.yaml Use Cases](custom-workflows.md#tfvars-files) for more details. ### Multiple Repos + Atlantis supports multiple repos as well–as long as there is a webhook configured for each repo. ## Terraform Versions + Atlantis supports all Terraform versions (including 0.12) and can be configured -to use different versions for different repositories/projects. See [Terraform Versions](terraform-versions.html). +to use different versions for different repositories/projects. See [Terraform Versions](terraform-versions.md). ## Next Steps + * If your Terraform setup meets the Atlantis requirements, continue the installation - guide and set up your [Git Host Access Credentials](access-credentials.html) + guide and set up your [Git Host Access Credentials](access-credentials.md) diff --git a/runatlantis.io/docs/security.md b/runatlantis.io/docs/security.md index a6bafda5a8..0f5d8df4c6 100644 --- a/runatlantis.io/docs/security.md +++ b/runatlantis.io/docs/security.md @@ -1,13 +1,16 @@ # Security -[[toc]] + ## Exploits + Because you usually run Atlantis on a server with credentials that allow access to your infrastructure it's important that you deploy Atlantis securely. Atlantis could be exploited by + * An attacker submitting a pull request that contains a malicious Terraform file that uses a malicious provider or an [`external` data source](https://registry.terraform.io/providers/hashicorp/external/latest/docs/data-sources/data_source) that Atlantis then runs `terraform plan` on (which it does automatically unless you've turned off automatic plans). * Running `terraform apply` on a malicious Terraform file with [local-exec](https://developer.hashicorp.com/terraform/language/resources/provisioners/local-exec) + ```tf resource "null_resource" "null" { provisioner "local-exec" { @@ -15,10 +18,12 @@ Atlantis could be exploited by } } ``` + * Running malicious custom build commands specified in an `atlantis.yaml` file. Atlantis uses the `atlantis.yaml` file from the pull request branch, **not** `main`. * Someone adding `atlantis plan/apply` comments on your valid pull requests causing terraform to run when you don't want it to. ## Bitbucket Cloud (bitbucket.org) + ::: danger Bitbucket Cloud does not support webhook secrets. This could allow attackers to spoof requests from Bitbucket. Ensure you are allowing only Bitbucket IPs. ::: @@ -33,16 +38,21 @@ To prevent this, allowlist [Bitbucket's IP addresses](https://confluence.atlassi (see Outbound IPv4 addresses). ## Mitigations + ### Don't Use On Public Repos + Because anyone can comment on public pull requests, even with all the security mitigations available, it's still dangerous to run Atlantis on public repos without proper configuration of the security settings. ### Don't Use `--allow-fork-prs` + If you're running on a public repo (which isn't recommended, see above) you shouldn't set `--allow-fork-prs` (defaults to false) because anyone can open up a pull request from their fork to your repo. ### `--repo-allowlist` + Atlantis requires you to specify a allowlist of repositories it will accept webhooks from via the `--repo-allowlist` flag. For example: + * Specific repositories: `--repo-allowlist=github.com/runatlantis/atlantis,github.com/runatlantis/atlantis-tests` * Your whole organization: `--repo-allowlist=github.com/runatlantis/*` * Every repository in your GitHub Enterprise install: `--repo-allowlist=github.yourcompany.com/*` @@ -52,19 +62,22 @@ For example: This flag ensures your Atlantis install isn't being used with repositories you don't control. See `atlantis server --help` for more details. ### Protect Terraform Planning + If attackers submitting pull requests with malicious Terraform code is in your threat model then you must be aware that `terraform apply` approvals are not enough. It is possible to run malicious code in a `terraform plan` using the [`external` data source](https://registry.terraform.io/providers/hashicorp/external/latest/docs/data-sources/data_source) or by specifying a malicious provider. This code could then exfiltrate your credentials. To prevent this, you could: + 1. Bake providers into the Atlantis image or host and deny egress in production. 1. Implement the provider registry protocol internally and deny public egress, that way you control who has write access to the registry. -1. Modify your [server-side repo configuration](https://www.runatlantis.io/docs/server-side-repo-config.html)'s `plan` step to validate against the +1. Modify your [server-side repo configuration](server-side-repo-config.md)'s `plan` step to validate against the use of disallowed providers or data sources or PRs from not allowed users. You could also add in extra validation at this point, e.g. requiring a "thumbs-up" on the PR before allowing the `plan` to continue. Conftest could be of use here. ### `--var-file-allowlist` + The files on your Atlantis install may be accessible as [variable definition files](https://developer.hashicorp.com/terraform/language/values/variables#variable-definitions-tfvars-files) from pull requests by adding `atlantis plan -- -var-file=/path/to/file` comments. To mitigate this security risk, Atlantis has limited such access @@ -72,6 +85,7 @@ only to the files allowlisted by the `--var-file-allowlist` flag. If this argume Atlantis' data directory. ### Webhook Secrets + Atlantis should be run with Webhook secrets set via the `$ATLANTIS_GH_WEBHOOK_SECRET`/`$ATLANTIS_GITLAB_WEBHOOK_SECRET` environment variables. Even with the `--repo-allowlist` flag set, without a webhook secret, attackers could make requests to Atlantis posing as a repository that is allowlisted. Webhook secrets ensure that the webhook requests are actually coming from your VCS provider (GitHub or GitLab). @@ -81,17 +95,20 @@ If you are using Azure DevOps, instead of webhook secrets add a [basic username ::: ### Azure DevOps Basic Authentication + Azure DevOps supports sending a basic authentication header in all webhook events. This requires using an HTTPS URL for your webhook location. ### SSL/HTTPS + If you're using webhook secrets but your traffic is over HTTP then the webhook secrets could be stolen. Enable SSL/HTTPS using the `--ssl-cert-file` and `--ssl-key-file` flags. ### Enable Authentication on Atlantis Web Server + It is very recommended to enable authentication in the web service. Enable BasicAuth using the `--web-basic-auth=true` and setup a username and a password using `--web-username=yourUsername` and `--web-password=yourPassword` flags. -You can also pass these as environment variables `ATLANTIS_WEB_BASIC_AUTH=true` `ATLANTIS_WEB_USERNAME=yourUsername` and `ATLANTIS_WEB_PASSWORD=yourPassword`. +You can also pass these as environment variables `ATLANTIS_WEB_BASIC_AUTH=true` `ATLANTIS_WEB_USERNAME=yourUsername` and `ATLANTIS_WEB_PASSWORD=yourPassword`. :::tip Tip We do encourage the usage of complex passwords in order to prevent basic bruteforcing attacks. diff --git a/runatlantis.io/docs/server-configuration.md b/runatlantis.io/docs/server-configuration.md index c91be9e9f7..439e55d481 100644 --- a/runatlantis.io/docs/server-configuration.md +++ b/runatlantis.io/docs/server-configuration.md @@ -1,12 +1,12 @@ # Server Configuration + This page explains how to configure the `atlantis server` command. Configuration to `atlantis server` can be specified via command line flags, environment variables, a config file or a mix of the three. -[[toc]] - ## Environment Variables + All flags can be specified as environment variables. 1. Take the flag name, ex. `--gh-user` @@ -24,11 +24,13 @@ The flag `--atlantis-url` is set by the environment variable `ATLANTIS_ATLANTIS_ ::: ## Config File + All flags can also be specified via a YAML config file. To use a YAML config file, run `atlantis server --config /path/to/config.yaml`. The keys of your config file should be the same as the flag names, ex. + ```yaml gh-token: ... log-level: ... @@ -40,40 +42,49 @@ The `--config` config file is only used as an alternate way of setting `atlantis ::: ## Precedence + Values are chosen in this order: + 1. Flags 1. Environment Variables 1. Config File - ## Flags + ### `--allow-commands` + ```bash atlantis server --allow-commands=version,plan,apply,unlock,approve_policies # or ATLANTIS_ALLOW_COMMANDS='version,plan,apply,unlock,approve_policies' ``` + List of allowed commands to be run on the Atlantis server, Defaults to `version,plan,apply,unlock,approve_policies` Notes: - * Accepts a comma separated list, ex. `command1,command2`. - * `version`, `plan`, `apply`, `unlock`, `approve_policies`, `import`, `state` and `all` are available. - * `all` is a special keyword that allows all commands. If pass `all` then all other commands will be ignored. + +* Accepts a comma separated list, ex. `command1,command2`. +* `version`, `plan`, `apply`, `unlock`, `approve_policies`, `import`, `state` and `all` are available. +* `all` is a special keyword that allows all commands. If pass `all` then all other commands will be ignored. ### `--allow-draft-prs` + ```bash atlantis server --allow-draft-prs # or ATLANTIS_ALLOW_DRAFT_PRS=true ``` + Respond to pull requests from draft prs. Defaults to `false`. ### `--allow-fork-prs` + ```bash atlantis server --allow-fork-prs # or ATLANTIS_ALLOW_FORK_PRS=true ``` + Respond to pull requests from forks. Defaults to `false`. :::warning SECURITY WARNING @@ -85,33 +96,40 @@ Values are chosen in this order: ::: ### `--api-secret` + ```bash atlantis server --api-secret="secret" # or (recommended) ATLANTIS_API_SECRET="secret" ``` - Required secret used to validate requests made to the [`/api/*` endpoints](api-endpoints.html). + + Required secret used to validate requests made to the [`/api/*` endpoints](api-endpoints.md). ### `--atlantis-url` + ```bash atlantis server --atlantis-url="https://my-domain.com:9090/basepath" # or ATLANTIS_ATLANTIS_URL=https://my-domain.com:9090/basepath ``` + Specify the URL that Atlantis is accessible from. Used in the Atlantis UI and in links from pull request comments. Defaults to `http://$(hostname):$port` where `$port` is from the [`--port`](#port) flag. Supports a basepath if you're hosting Atlantis under a path. Notes: - * If a load balancer with a non http/https port (not the one defined in the `--port` flag) is used, update the URL to include the port like in the example above. - * This URL is used as the `details` link next to each atlantis job to view the job's logs. + +* If a load balancer with a non http/https port (not the one defined in the `--port` flag) is used, update the URL to include the port like in the example above. +* This URL is used as the `details` link next to each atlantis job to view the job's logs. ### `--autodiscover-mode` + ```bash atlantis server --autodiscover-mode="" # or ATLANTIS_AUTODISCOVER_MODE="" ``` + Sets auto discover mode, default is `auto`. When set to `auto`, projects in a repo will be discovered by Atlantis when there are no projects configured in the repo config. If one or more projects are defined in the repo config then auto discovery will be completely disabled. @@ -123,40 +141,45 @@ Values are chosen in this order: When set to `disabled` projects will never be discovered, even if there are no projects configured in the repo config. ### `--automerge` + ```bash atlantis server --automerge # or ATLANTIS_AUTOMERGE=true ``` + Automatically merge pull requests after all plans have been successfully applied. - Defaults to `false`. See [Automerging](automerging.html) for more details. + Defaults to `false`. See [Automerging](automerging.md) for more details. ### `--autoplan-file-list` + ```bash # NOTE: Use single quotes to avoid shell expansion of *. atlantis server --autoplan-file-list='**/*.tf,project1/*.pkr.hcl' # or ATLANTIS_AUTOPLAN_FILE_LIST='**/*.tf,project1/*.pkr.hcl' ``` + List of file patterns that Atlantis will use to check if a directory contains modified files that should trigger project planning. Notes: - * Accepts a comma separated list, ex. `pattern1,pattern2`. - * Patterns use the [`.dockerignore` syntax](https://docs.docker.com/engine/reference/builder/#dockerignore-file) - * List of file patterns will be used by both automatic and manually run plans. - * When not set, defaults to all `.tf`, `.tfvars`, `.tfvars.json`, `terragrunt.hcl` and `.terraform.lock.hcl` files + +* Accepts a comma separated list, ex. `pattern1,pattern2`. +* Patterns use the [`.dockerignore` syntax](https://docs.docker.com/engine/reference/builder/#dockerignore-file) +* List of file patterns will be used by both automatic and manually run plans. +* When not set, defaults to all `.tf`, `.tfvars`, `.tfvars.json`, `terragrunt.hcl` and `.terraform.lock.hcl` files (`--autoplan-file-list='**/*.tf,**/*.tfvars,**/*.tfvars.json,**/terragrunt.hcl,**/.terraform.lock.hcl'`). - * Setting `--autoplan-file-list` will override the defaults. You **must** add `**/*.tf` and other defaults if you want to include them. - * A custom [Workflow](repo-level-atlantis-yaml.html#configuring-planning) that uses autoplan `when_modified` will ignore this value. +* Setting `--autoplan-file-list` will override the defaults. You **must** add `**/*.tf` and other defaults if you want to include them. +* A custom [Workflow](repo-level-atlantis-yaml.md#configuring-planning) that uses autoplan `when_modified` will ignore this value. Examples: - * Autoplan when any `*.tf` or `*.tfvars` file is modified. - * `--autoplan-file-list='**/*.tf,**/*.tfvars'` - * Autoplan when any `*.tf` file is modified except in `project2/` directory - * `--autoplan-file-list='**/*.tf,!project2'` - * Autoplan when any `*.tf` files or `.yml` files in subfolder of `project1` is modified. - * `--autoplan-file-list='**/*.tf,project2/**/*.yml'` +* Autoplan when any `*.tf` or `*.tfvars` file is modified. + * `--autoplan-file-list='**/*.tf,**/*.tfvars'` +* Autoplan when any `*.tf` file is modified except in `project2/` directory + * `--autoplan-file-list='**/*.tf,!project2'` +* Autoplan when any `*.tf` files or `.yml` files in subfolder of `project1` is modified. + * `--autoplan-file-list='**/*.tf,project2/**/*.yml'` ::: warning NOTE By default, changes to modules will not trigger autoplanning. See the flags below. @@ -202,8 +225,8 @@ Current default is "" (disabled). Examples: - * `**/*.tf` - will index all projects that have a `.tf` file in their directory, and plan them whenever an in-repo module dependency has changed. - * `**/*.tf,!foo,!bar` - will index all projects containing `.tf` except `foo` and `bar` and plan them whenever an in-repo module dependency has changed. +* `**/*.tf` - will index all projects that have a `.tf` file in their directory, and plan them whenever an in-repo module dependency has changed. +* `**/*.tf,!foo,!bar` - will index all projects containing `.tf` except `foo` and `bar` and plan them whenever an in-repo module dependency has changed. This allows projects to opt-out of auto-planning when a module dependency changes. ::: warning NOTE @@ -217,35 +240,43 @@ and set `--autoplan-modules` to `false`. ::: ### `--azuredevops-hostname` + ```bash atlantis server --azuredevops-hostname="dev.azure.com" # or ATLANTIS_AZUREDEVOPS_HOSTNAME="dev.azure.com" ``` + Azure DevOps hostname to support cloud and self hosted instances. Defaults to `dev.azure.com`. ### `--azuredevops-token` + ```bash atlantis server --azuredevops-token="RandomStringProducedByAzureDevOps" # or (recommended) ATLANTIS_AZUREDEVOPS_TOKEN="RandomStringProducedByAzureDevOps" ``` + Azure DevOps token of API user. ### `--azuredevops-user` + ```bash atlantis server --azuredevops-user="username@example.com" # or ATLANTIS_AZUREDEVOPS_USER="username@example.com" ``` + Azure DevOps username of API user. ### `--azuredevops-webhook-password` + ```bash atlantis server --azuredevops-webhook-password="password123" # or (recommended) ATLANTIS_AZUREDEVOPS_WEBHOOK_PASSWORD="password123" ``` + Azure DevOps basic authentication password for inbound webhooks (see [docs](https://docs.microsoft.com/en-us/azure/devops/service-hooks/authorize?view=azure-devops)). @@ -258,47 +289,57 @@ and set `--autoplan-modules` to `false`. ::: ### `--azuredevops-webhook-user` + ```bash atlantis server --azuredevops-webhook-user="username@example.com" # or ATLANTIS_AZUREDEVOPS_WEBHOOK_USER="username@example.com" ``` + Azure DevOps basic authentication username for inbound webhooks. ### `--bitbucket-base-url` + ```bash atlantis server --bitbucket-base-url="http://bitbucket.corp:7990/basepath" # or ATLANTIS_BITBUCKET_BASE_URL="http://bitbucket.corp:7990/basepath" ``` + Base URL of Bitbucket Server (aka Stash) installation. Must include `http://` or `https://`. If using Bitbucket Cloud (bitbucket.org), do not set. Defaults to `https://api.bitbucket.org`. ### `--bitbucket-token` + ```bash atlantis server --bitbucket-token="token" # or (recommended) ATLANTIS_BITBUCKET_TOKEN="token" ``` + Bitbucket app password of API user. ### `--bitbucket-user` + ```bash atlantis server --bitbucket-user="myuser" # or ATLANTIS_BITBUCKET_USER="myuser" ``` + Bitbucket username of API user. ### `--bitbucket-webhook-secret` + ```bash atlantis server --bitbucket-webhook-secret="secret" # or (recommended) ATLANTIS_BITBUCKET_WEBHOOK_SECRET="secret" ``` + Secret used to validate Bitbucket webhooks. Only Bitbucket Server supports webhook secrets. - For Bitbucket.org, see [Security](security.html#bitbucket-cloud-bitbucket-org) for mitigations. + For Bitbucket.org, see [Security](security.md#bitbucket-cloud-bitbucket-org) for mitigations. ::: warning SECURITY WARNING If not specified, Atlantis won't be able to validate that the incoming webhook call came from Bitbucket. @@ -306,143 +347,173 @@ and set `--autoplan-modules` to `false`. ::: ### `--checkout-depth` + ```bash atlantis server --checkout-depth=0 # or ATLANTIS_CHECKOUT_DEPTH=0 ``` + The number of commits to fetch from the branch. Used if `--checkout-strategy=merge` since the `--checkout-strategy=branch` (default) checkout strategy always defaults to a shallow clone using a depth of 1. - Defaults to `0`. See [Checkout Strategy](checkout-strategy.html) for more details. + Defaults to `0`. See [Checkout Strategy](checkout-strategy.md) for more details. ### `--checkout-strategy` + ```bash atlantis server --checkout-strategy="" # or ATLANTIS_CHECKOUT_STRATEGY="" ``` + How to check out pull requests. Use either `branch` or `merge`. - Defaults to `branch`. See [Checkout Strategy](checkout-strategy.html) for more details. + Defaults to `branch`. See [Checkout Strategy](checkout-strategy.md) for more details. ### `--config` + ```bash atlantis server --config="my/config/file.yaml" # or ATLANTIS_CONFIG="my/config/file.yaml" ``` + YAML config file where flags can also be set. See [Config File](#config-file) for more details. ### `--data-dir` + ```bash atlantis server --data-dir="path/to/data/dir" # or ATLANTIS_DATA_DIR="path/to/data/dir" ``` + Directory where Atlantis will store its data. Will be created if it doesn't exist. Defaults to `~/.atlantis`. Atlantis will store its database, checked out repos, Terraform plans and downloaded - Terraform binaries here. If Atlantis loses this directory, [locks](locking.html) + Terraform binaries here. If Atlantis loses this directory, [locks](locking.md) will be lost and unapplied plans will be lost. Note that the atlantis user is restricted to `~/.atlantis`. If you set the `--data-dir` flag to a path outside of Atlantis its home directory, ensure that you grant the atlantis user the correct permissions. ### `--default-tf-version` + ```bash atlantis server --default-tf-version="v0.12.31" # or ATLANTIS_DEFAULT_TF_VERSION="v0.12.31" ``` + Terraform version to default to. Will download to `/bin/terraform` - if not in `PATH`. See [Terraform Versions](terraform-versions.html) for more details. + if not in `PATH`. See [Terraform Versions](terraform-versions.md) for more details. ### `--disable-apply-all` + ```bash atlantis server --disable-apply-all # or ATLANTIS_DISABLE_APPLY_ALL=true ``` + Disable `atlantis apply` command so a specific project/workspace/directory has to be specified for applies. ### `--disable-autoplan` + ```bash atlantis server --disable-autoplan # or ATLANTIS_DISABLE_AUTOPLAN=true ``` + Disable atlantis auto planning. ### `--disable-autoplan-label` + ```bash atlantis server --disable-autoplan-label="no-autoplan" # or ATLANTIS_DISABLE_AUTOPLAN_LABEL="no-autoplan" ``` + Disable atlantis auto planning only on pull requests with the specified label. If `disable-autoplan` property is `true`, this flag has no effect. ### `--disable-markdown-folding` + ```bash atlantis server --disable-markdown-folding # or - ATLANTIS_DISABLE_MARKDOWN_FOLDER=true + ATLANTIS_DISABLE_MARKDOWN_FOLDING=true ``` + Disable folding in markdown output using the `
` html tag. ### `--disable-repo-locking` + ```bash atlantis server --disable-repo-locking # or ATLANTIS_DISABLE_REPO_LOCKING=true ``` + Stops atlantis from locking projects and or workspaces when running terraform. ### `--disable-unlock-label` + ```bash atlantis server --disable-unlock-label do-not-unlock # or ATLANTIS_DISABLE_UNLOCK_LABEL="do-not-unlock" ``` + Stops atlantis from unlocking a pull request with this label. Defaults to "" (feature disabled). ### `--emoji-reaction` + ```bash atlantis server --emoji-reaction thumbsup # or ATLANTIS_EMOJI_REACTION=thumbsup ``` + The emoji reaction to use for marking processed comments. Currently supported on Azure DevOps, GitHub and GitLab. Defaults to `eyes`. ### `--enable-diff-markdown-format` + ```bash atlantis server --enable-diff-markdown-format # or ATLANTIS_ENABLE_DIFF_MARKDOWN_FORMAT=true ``` + Enable Atlantis to format Terraform plan output into a markdown-diff friendly format for color-coding purposes. Useful to enable for use with GitHub. ### `--enable-policy-checks` + ```bash atlantis server --enable-policy-checks # or ATLANTIS_ENABLE_POLICY_CHECKS=true ``` - Enables atlantis to run server side policies on the result of a terraform plan. Policies are defined in [server side repo config](https://www.runatlantis.io/docs/server-side-repo-config.html#reference). + + Enables atlantis to run server side policies on the result of a terraform plan. Policies are defined in [server side repo config](server-side-repo-config.md#reference). ### `--enable-regexp-cmd` + ```bash atlantis server --enable-regexp-cmd # or ATLANTIS_ENABLE_REGEXP_CMD=true ``` + Enable Atlantis to use regular expressions to run plan/apply commands against defined project names when `-p` flag is passed with it. This can be used to run all defined projects (with the `name` key) in `atlantis.yaml` using `atlantis plan -p .*`. - The flag will only allow the regexes listed in the [`allowed_regexp_prefixes`](https://www.runatlantis.io/docs/repo-level-atlantis-yaml.html#reference) key defined in the repo `atlantis.yaml` file. If the key is undefined, its value defaults to `[]` which will allow any regex. + The flag will only allow the regexes listed in the [`allowed_regexp_prefixes`](repo-level-atlantis-yaml.md#reference) key defined in the repo `atlantis.yaml` file. If the key is undefined, its value defaults to `[]` which will allow any regex. This will not work with `-d` yet and to use `-p` the repo projects must be defined in the repo `atlantis.yaml` file. @@ -454,16 +525,19 @@ and set `--autoplan-modules` to `false`. ::: ### `--executable-name` + ```bash atlantis server --executable-name="atlantis" # or ATLANTIS_EXECUTABLE_NAME="atlantis" ``` + Comment command trigger executable name. Defaults to `atlantis`. This is useful when running multiple Atlantis servers against a single repository. ### `--fail-on-pre-workflow-hook-error` + ```bash atlantis server --fail-on-pre-workflow-hook-error # or @@ -473,35 +547,43 @@ and set `--autoplan-modules` to `false`. Fail and do not run the requested Atlantis command if any of the pre workflow hooks error. ### `--gitea-base-url` + ```bash atlantis server --gitea-base-url="http://your-gitea.corp:7990/basepath" # or ATLANTIS_GITEA_BASE_URL="http://your-gitea.corp:7990/basepath" ``` + Base URL of Gitea installation. Must include `http://` or `https://`. Defaults to `https://gitea.com` if left empty/absent. ### `--gitea-token` + ```bash atlantis server --gitea-token="token" # or (recommended) ATLANTIS_GITEA_TOKEN="token" ``` + Gitea app password of API user. ### `--gitea-user` + ```bash atlantis server --gitea-user="myuser" # or ATLANTIS_GITEA_USER="myuser" ``` + Gitea username of API user. ### `--gitea-webhook-secret` + ```bash atlantis server --gitea-webhook-secret="secret" # or (recommended) ATLANTIS_GITEA_WEBHOOK_SECRET="secret" ``` + Secret used to validate Gitea webhooks. ::: warning SECURITY WARNING @@ -510,44 +592,50 @@ and set `--autoplan-modules` to `false`. ::: ### `--gitea-page-size` + ```bash atlantis server --gitea-page-size=30 # or (recommended) ATLANTIS_GITEA_PAGE_SIZE=30 ``` + Number of items on a single page in Gitea paged responses. ::: warning Configuration dependent The default value conforms to the Gitea server's standard config setting: DEFAULT_PAGING_NUM - The highest valid value depends on the Gitea server's config setting: MAX_RESPONSE_ITEMS + The highest valid value depends on the Gitea server's config setting: MAX_RESPONSE_ITEMS ::: ### `--gh-allow-mergeable-bypass-apply` + ```bash atlantis server --gh-allow-mergeable-bypass-apply # or ATLANTIS_GH_ALLOW_MERGEABLE_BYPASS_APPLY=true ``` + Feature flag to enable ability to use `mergeable` mode with required apply status check. ### `--gh-app-id` + ```bash atlantis server --gh-app-id="00000" # or ATLANTIS_GH_APP_ID="00000" ``` + GitHub app ID. If set, GitHub authentication will be performed as [an installation](https://docs.github.com/en/rest/apps/installations). ::: tip A GitHub app can be created by starting Atlantis first, then pointing your browser at - ``` + ```shell $(hostname)/github-app/setup ``` You'll be redirected to GitHub to create a new app, and will then be redirected to - ``` + ```shell $(hostname)/github-app/exchange-code?code=some-code ``` @@ -555,11 +643,13 @@ and set `--autoplan-modules` to `false`. ::: ### `--gh-app-key` + ```bash atlantis server --gh-app-key="-----BEGIN RSA PRIVATE KEY-----(...)" # or ATLANTIS_GH_APP_KEY="-----BEGIN RSA PRIVATE KEY-----(...)" ``` + The PEM encoded private key for the GitHub App. ::: warning SECURITY WARNING @@ -567,44 +657,54 @@ and set `--autoplan-modules` to `false`. ::: ### `--gh-app-key-file` + ```bash atlantis server --gh-app-key-file="path/to/app-key.pem" # or ATLANTIS_GH_APP_KEY_FILE="path/to/app-key.pem" ``` + Path to a GitHub App PEM encoded private key file. If set, GitHub authentication will be performed as [an installation](https://docs.github.com/en/rest/apps/installations). ### `--gh-app-slug` + ```bash atlantis server --gh-app-slug="myappslug" # or ATLANTIS_GH_APP_SLUG="myappslug" ``` + A slugged version of GitHub app name shown in pull requests comments, etc (not `Atlantis App` but something like `atlantis-app`). Atlantis uses the value of this parameter to identify the comments it has left on GitHub pull requests. This is used for functions such as `--hide-prev-plan-comments`. You need to obtain this value from your GitHub app, one way is to go to your App settings and open "Public page" from the left sidebar. Your `--gh-app-slug` value will be the last part of the URL, e.g `https://github.com/apps/`. ### `--gh-hostname` + ```bash atlantis server --gh-hostname="my.github.enterprise.com" # or ATLANTIS_GH_HOSTNAME="my.github.enterprise.com" ``` + Hostname of your GitHub Enterprise installation. If using [GitHub.com](https://github.com), don't set. Defaults to `github.com`. ### `--gh-org` + ```bash atlantis server --gh-org="myorgname" # or ATLANTIS_GH_ORG="myorgname" ``` + GitHub organization name. Set to enable creating a private GitHub app for this organization. ### `--gh-team-allowlist` + ```bash atlantis server --gh-team-allowlist="myteam:plan, secteam:apply, DevOps Team:apply, DevOps Team:import" # or ATLANTIS_GH_TEAM_ALLOWLIST="myteam:plan, secteam:apply, DevOps Team:apply, DevOps Team:import" ``` + In versions v0.21.0 and later, the GitHub team name can be a name or a slug. In versions v0.20.1 and below, the Github team name required the case sensitive team name. @@ -619,28 +719,34 @@ and set `--autoplan-modules` to `false`. ::: ### `--gh-token` + ```bash atlantis server --gh-token="token" # or (recommended) ATLANTIS_GH_TOKEN="token" ``` + GitHub token of API user. ### `--gh-user` + ```bash atlantis server --gh-user="myuser" # or ATLANTIS_GH_USER="myuser" ``` + GitHub username of API user. ### `--gh-webhook-secret` + ```bash atlantis server --gh-webhook-secret="secret" # or (recommended) ATLANTIS_GH_WEBHOOK_SECRET="secret" ``` - Secret used to validate GitHub webhooks (see [https://developer.github.com/webhooks/securing/](https://docs.github.com/en/developers/webhooks-and-events/webhooks/securing-your-webhooks)). + + Secret used to validate GitHub webhooks (see [GitHub: Validating webhook deliveries](https://docs.github.com/en/webhooks/using-webhooks/validating-webhook-deliveries)). ::: warning SECURITY WARNING If not specified, Atlantis won't be able to validate that the incoming webhook call came from GitHub. @@ -648,36 +754,44 @@ and set `--autoplan-modules` to `false`. ::: ### `--gitlab-hostname` + ```bash atlantis server --gitlab-hostname="my.gitlab.enterprise.com" # or ATLANTIS_GITLAB_HOSTNAME="my.gitlab.enterprise.com" ``` + Hostname of your GitLab Enterprise installation. If using [Gitlab.com](https://gitlab.com), don't set. Defaults to `gitlab.com`. ### `--gitlab-token` + ```bash atlantis server --gitlab-token="token" # or (recommended) ATLANTIS_GITLAB_TOKEN="token" ``` + GitLab token of API user. ### `--gitlab-user` + ```bash atlantis server --gitlab-user="myuser" # or ATLANTIS_GITLAB_USER="myuser" ``` + GitLab username of API user. ### `--gitlab-webhook-secret` + ```bash atlantis server --gitlab-webhook-secret="secret" # or (recommended) ATLANTIS_GITLAB_WEBHOOK_SECRET="secret" ``` + Secret used to validate GitLab webhooks. ::: warning SECURITY WARNING @@ -686,71 +800,86 @@ and set `--autoplan-modules` to `false`. ::: ### `--help` + ```bash atlantis server --help ``` + View help. ### `--hide-prev-plan-comments` + ```bash atlantis server --hide-prev-plan-comments # or ATLANTIS_HIDE_PREV_PLAN_COMMENTS=true ``` + Hide previous plan comments to declutter PRs. This is only supported in GitHub and GitLab currently. This is not enabled by default. When using Github App, you need to set `--gh-app-slug` to enable this feature. ### `--hide-unchanged-plan-comments` + ```bash atlantis server --hide-unchanged-plan-comments # or ATLANTIS_HIDE_UNCHANGED_PLAN_COMMENTS=true ``` + Remove no-changes plan comments from the pull request. This is useful when you have many projects and want to keep the pull request clean from useless comments. ### `--include-git-untracked-files` + ```bash atlantis server --include-git-untracked-files # or ATLANTIS_INCLUDE_GIT_UNTRACKED_FILES=true ``` + Include git untracked files in the Atlantis modified file list. Used for example with CDKTF pre-workflow hooks that dynamically generate Terraform files. ### `--locking-db-type` + ```bash atlantis server --locking-db-type="" # or ATLANTIS_LOCKING_DB_TYPE="" ``` + The locking database type to use for storing plan and apply locks. Defaults to `boltdb`. Notes: - * If set to `boltdb`, only one process may have access to the boltdb instance. - * If set to `redis`, then `--redis-host`, `--redis-port`, and `--redis-password` must be set. + +* If set to `boltdb`, only one process may have access to the boltdb instance. +* If set to `redis`, then `--redis-host`, `--redis-port`, and `--redis-password` must be set. ### `--log-level` + ```bash atlantis server --log-level="" # or ATLANTIS_LOG_LEVEL="" ``` + Log level. Defaults to `info`. ### `--markdown-template-overrides-dir` + ```bash atlantis server --markdown-template-overrides-dir="path/to/templates/" # or ATLANTIS_MARKDOWN_TEMPLATE_OVERRIDES_DIR="path/to/templates/" ``` + This will be available in v0.21.0. Directory where Atlantis will read in overrides for markdown templates used to render comments on pull requests. Markdown template overrides may be specified either in individual files, or all together in a single file. All template - override files _must_ have the `.tmpl` extension, otherwise they will not be parsed. + override files *must* have the `.tmpl` extension, otherwise they will not be parsed. Markdown templates which may have overrides can be found [here](https://github.com/runatlantis/atlantis/tree/main/server/events/templates) @@ -760,67 +889,83 @@ This is useful when you have many projects and want to keep the pull request cle Defaults to the atlantis home directory `/home/atlantis/.markdown_templates/` in `/$HOME/.markdown_templates`. ### `--parallel-apply` + ```bash atlantis server --parallel-apply # or ATLANTIS_PARALLEL_APPLY=true ``` - Whether to run apply operations in parallel. Defaults to `false`. Explicit declaration in [repo config](repo-level-atlantis-yaml.html#run-plans-and-applies-in-parallel) takes precedence. + + Whether to run apply operations in parallel. Defaults to `false`. Explicit declaration in [repo config](repo-level-atlantis-yaml.md#run-plans-and-applies-in-parallel) takes precedence. ### `--parallel-plan` + ```bash atlantis server --parallel-plan # or ATLANTIS_PARALLEL_PLAN=true ``` - Whether to run plan operations in parallel. Defaults to `false`. Explicit declaration in [repo config](repo-level-atlantis-yaml.html#run-plans-and-applies-in-parallel) takes precedence. + + Whether to run plan operations in parallel. Defaults to `false`. Explicit declaration in [repo config](repo-level-atlantis-yaml.md#run-plans-and-applies-in-parallel) takes precedence. ### `--parallel-pool-size` + ```bash atlantis server --parallel-pool-size=100 # or ATLANTIS_PARALLEL_POOL_SIZE=100 ``` + Max size of the wait group that runs parallel plans and applies (if enabled). Defaults to `15` ### `--port` + ```bash atlantis server --port=4141 # or ATLANTIS_PORT=4141 ``` + Port to bind to. Defaults to `4141`. ### `--quiet-policy-checks` + ```bash atlantis server --quiet-policy-checks # or ATLANTIS_QUIET_POLICY_CHECKS=true ``` + Exclude policy check comments from pull requests unless there's an actual error from conftest. This also excludes warnings. Defaults to `false`. ### `--redis-db` + ```bash atlantis server --redis-db=0 # or ATLANTIS_REDIS_DB=0 ``` + The Redis Database to use when using a Locking DB type of `redis`. Defaults to `0`. ### `--redis-host` + ```bash atlantis server --redis-host="localhost" # or ATLANTIS_REDIS_HOST="localhost" ``` + The Redis Hostname for when using a Locking DB type of `redis`. ### `--redis-insecure-skip-verify` + ```bash atlantis server --redis-insecure-skip-verify=false # or ATLANTIS_REDIS_INSECURE_SKIP_VERIFY=false ``` + Controls whether the Redis client verifies the Redis server's certificate chain and host name. If true, accepts any certificate presented by the server and any host name in that certificate. Defaults to `false`. ::: warning SECURITY WARNING @@ -828,82 +973,97 @@ This is useful when you have many projects and want to keep the pull request cle ::: ### `--redis-password` + ```bash atlantis server --redis-password="password123" # or (recommended) ATLANTIS_REDIS_PASSWORD="password123" ``` + The Redis Password for when using a Locking DB type of `redis`. ### `--redis-port` + ```bash atlantis server --redis-port=6379 # or ATLANTIS_REDIS_PORT=6379 ``` + The Redis Port for when using a Locking DB type of `redis`. Defaults to `6379`. ### `--redis-tls-enabled` + ```bash atlantis server --redis-tls-enabled=false # or ATLANTIS_REDIS_TLS_ENABLED=false ``` + Enables a TLS connection, with min version of 1.2, to Redis when using a Locking DB type of `redis`. Defaults to `false`. ### `--repo-allowlist` + ```bash # NOTE: Use single quotes to avoid shell expansion of *. atlantis server --repo-allowlist='github.com/myorg/*' # or ATLANTIS_REPO_ALLOWLIST='github.com/myorg/*' ``` + Atlantis requires you to specify an allowlist of repositories it will accept webhooks from. Notes: - * Accepts a comma separated list, ex. `definition1,definition2` - * Format is `{hostname}/{owner}/{repo}`, ex. `github.com/runatlantis/atlantis` - * `*` matches any characters, ex. `github.com/runatlantis/*` will match all repos in the runatlantis organization - * An entry beginning with `!` negates it, ex. `github.com/foo/*,!github.com/foo/bar` will match all github repos in the `foo` owner *except* `bar`. - * For Bitbucket Server: `{hostname}` is the domain without scheme and port, `{owner}` is the name of the project (not the key), and `{repo}` is the repo name - * User (not project) repositories take on the format: `{hostname}/{full name}/{repo}` (e.g., `bitbucket.example.com/Jane Doe/myatlantis` for username `jdoe` and full name `Jane Doe`, which is not very intuitive) - * For Azure DevOps the allowlist takes one of two forms: `{owner}.visualstudio.com/{project}/{repo}` or `dev.azure.com/{owner}/{project}/{repo}` - * Microsoft is in the process of changing Azure DevOps to the latter form, so it may be safest to always specify both formats in your repo allowlist for each repository until the change is complete. + +* Accepts a comma separated list, ex. `definition1,definition2` +* Format is `{hostname}/{owner}/{repo}`, ex. `github.com/runatlantis/atlantis` +* `*` matches any characters, ex. `github.com/runatlantis/*` will match all repos in the runatlantis organization +* An entry beginning with `!` negates it, ex. `github.com/foo/*,!github.com/foo/bar` will match all github repos in the `foo` owner *except* `bar`. +* For Bitbucket Server: `{hostname}` is the domain without scheme and port, `{owner}` is the name of the project (not the key), and `{repo}` is the repo name + * User (not project) repositories take on the format: `{hostname}/{full name}/{repo}` (e.g., `bitbucket.example.com/Jane Doe/myatlantis` for username `jdoe` and full name `Jane Doe`, which is not very intuitive) +* For Azure DevOps the allowlist takes one of two forms: `{owner}.visualstudio.com/{project}/{repo}` or `dev.azure.com/{owner}/{project}/{repo}` +* Microsoft is in the process of changing Azure DevOps to the latter form, so it may be safest to always specify both formats in your repo allowlist for each repository until the change is complete. Examples: - * Allowlist `myorg/repo1` and `myorg/repo2` on `github.com` - * `--repo-allowlist=github.com/myorg/repo1,github.com/myorg/repo2` - * Allowlist all repos under `myorg` on `github.com` - * `--repo-allowlist='github.com/myorg/*'` - * Allowlist all repos under `myorg` on `github.com`, excluding `myorg/untrusted-repo` - * `--repo-allowlist='github.com/myorg/*,!github.com/myorg/untrusted-repo'` - * Allowlist all repos in my GitHub Enterprise installation - * `--repo-allowlist='github.yourcompany.com/*'` - * Allowlist all repos under `myorg` project `myproject` on Azure DevOps - * `--repo-allowlist='myorg.visualstudio.com/myproject/*,dev.azure.com/myorg/myproject/*'` - * Allowlist all repositories - * `--repo-allowlist='*'` + +* Allowlist `myorg/repo1` and `myorg/repo2` on `github.com` + * `--repo-allowlist=github.com/myorg/repo1,github.com/myorg/repo2` +* Allowlist all repos under `myorg` on `github.com` + * `--repo-allowlist='github.com/myorg/*'` +* Allowlist all repos under `myorg` on `github.com`, excluding `myorg/untrusted-repo` + * `--repo-allowlist='github.com/myorg/*,!github.com/myorg/untrusted-repo'` +* Allowlist all repos in my GitHub Enterprise installation + * `--repo-allowlist='github.yourcompany.com/*'` +* Allowlist all repos under `myorg` project `myproject` on Azure DevOps + * `--repo-allowlist='myorg.visualstudio.com/myproject/*,dev.azure.com/myorg/myproject/*'` +* Allowlist all repositories + * `--repo-allowlist='*'` ### `--repo-config` + ```bash atlantis server --repo-config="path/to/repos.yaml" # or ATLANTIS_REPO_CONFIG="path/to/repos.yaml" ``` - Path to a YAML server-side repo config file. See [Server Side Repo Config](server-side-repo-config.html). + + Path to a YAML server-side repo config file. See [Server Side Repo Config](server-side-repo-config.md). ### `--repo-config-json` + ```bash atlantis server --repo-config-json='{"repos":[{"id":"/.*/", "apply_requirements":["mergeable"]}]}' # or ATLANTIS_REPO_CONFIG_JSON='{"repos":[{"id":"/.*/", "apply_requirements":["mergeable"]}]}' ``` + Specify server-side repo config as a JSON string. Useful if you don't want to write a config file to disk. - See [Server Side Repo Config](server-side-repo-config.html) for more details. + See [Server Side Repo Config](server-side-repo-config.md) for more details. ::: tip - If specifying a [Workflow](custom-workflows.html#reference), [step](custom-workflows.html#step)'s + If specifying a [Workflow](custom-workflows.md#reference), [step](custom-workflows.md#step)'s can be specified as follows: + ```json { "repos": [], @@ -926,25 +1086,30 @@ This is useful when you have many projects and want to keep the pull request cle } } ``` + ::: ### `--restrict-file-list` + ```bash atlantis server --restrict-file-list # or (recommended) ATLANTIS_RESTRICT_FILE_LIST=true ``` + `--restrict-file-list` will block plan requests from projects outside the files modified in the pull request. This will not block plan requests with regex if using the `--enable-regexp-cmd` flag, in these cases commands like `atlantis plan -p .*` will still work if used. normal commands will stil be blocked if necessary. Defaults to `false`. ### `--silence-allowlist-errors` + ```bash atlantis server --silence-allowlist-errors # or ATLANTIS_SILENCE_ALLOWLIST_ERRORS=true ``` + Some users use the `--repo-allowlist` flag to control which repos Atlantis responds to. Normally, if Atlantis receives a pull request webhook from a repo not listed in the allowlist, it will comment back with an error. This flag disables that commenting. @@ -953,20 +1118,24 @@ This is useful when you have many projects and want to keep the pull request cle at an organization level rather than on each repo. ### `--silence-fork-pr-errors` + ```bash atlantis server --silence-fork-pr-errors # or ATLANTIS_SILENCE_FORK_PR_ERRORS=true ``` + Normally, if Atlantis receives a pull request webhook from a fork and --allow-fork-prs is not set, it will comment back with an error. This flag disables that commenting. ### `--silence-no-projects` + ```bash atlantis server --silence-no-projects # or ATLANTIS_SILENCE_NO_PROJECTS=true ``` + `--silence-no-projects` will tell Atlantis to ignore PRs if none of the modified files are part of a project defined in the `atlantis.yaml` file. This flag ensures an Atlantis server only responds to its explicitly declared projects. This has no effect if projects are undefined in the repo level `atlantis.yaml`. @@ -976,78 +1145,96 @@ This is useful when you have many projects and want to keep the pull request cle delegate work to each Atlantis server. Also useful when used with pre_workflow_hooks to dynamically generate an `atlantis.yaml` file. ### `--silence-vcs-status-no-plans` + ```bash atlantis server --silence-vcs-status-no-plans # or ATLANTIS_SILENCE_VCS_STATUS_NO_PLANS=true ``` + `--silence-vcs-status-no-plans` will tell Atlantis to ignore setting VCS status on plans if none of the modified files are part of a project defined in the `atlantis.yaml` file. ### `--silence-vcs-status-no-projects` + ```bash atlantis server --silence-vcs-status-no-projects # or ATLANTIS_SILENCE_VCS_STATUS_NO_PROJECTS=true ``` + `--silence-vcs-status-no-projects` will tell Atlantis to ignore setting VCS status on any command if none of the modified files are part of a project defined in the `atlantis.yaml` file. ### `--skip-clone-no-changes` + ```bash atlantis server --skip-clone-no-changes # or ATLANTIS_SKIP_CLONE_NO_CHANGES=true ``` + `--skip-clone-no-changes` will skip cloning the repo during autoplan if there are no changes to Terraform projects. This will only apply for GitHub and GitLab and only for repos that have `atlantis.yaml` file. Defaults to `false`. ### `--slack-token` + ```bash atlantis server --slack-token=token # or (recommended) ATLANTIS_SLACK_TOKEN='token' ``` + API token for Slack notifications. Slack is not fully supported. TODO: Slack docs. ### `--ssl-cert-file` + ```bash atlantis server --ssl-cert-file="/etc/ssl/certs/my-cert.crt" # or ATLANTIS_SSL_CERT_FILE="/etc/ssl/certs/my-cert.crt" ``` + File containing x509 Certificate used for serving HTTPS. If the cert is signed by a CA, the file should be the concatenation of the server's certificate, any intermediates, and the CA's certificate. ### `--ssl-key-file` + ```bash atlantis server --ssl-key-file="/etc/ssl/private/my-cert.key" # or ATLANTIS_SSL_KEY_FILE="/etc/ssl/private/my-cert.key" ``` + File containing x509 private key matching `--ssl-cert-file`. ### `--stats-namespace` + ```bash atlantis server --stats-namespace="myatlantis" # or ATLANTIS_STATS_NAMESPACE="myatlantis" ``` - Namespace for emitting stats/metrics. See [stats](stats.html) section. + + Namespace for emitting stats/metrics. See [stats](stats.md) section. ### `--tf-download` + ```bash atlantis server --tf-download=false # or ATLANTIS_TF_DOWNLOAD=false ``` + Defaults to `true`. Allow Atlantis to list and download additional versions of Terraform. Setting this to `false` can be useful in an air-gapped environment where a download mirror is not available. ### `--tf-download-url` + ```bash atlantis server --tf-download-url="https://releases.company.com" # or ATLANTIS_TF_DOWNLOAD_URL="https://releases.company.com" ``` + An alternative URL to download Terraform versions if they are missing. Useful in an airgapped environment where releases.hashicorp.com is not available. Directory structure of the custom endpoint should match that of releases.hashicorp.com. @@ -1055,106 +1242,128 @@ Setting this to `false` can be useful in an air-gapped environment where a downl This has no impact if `--tf-download` is set to `false`. ### `--tfe-hostname` + ```bash atlantis server --tfe-hostname="my-terraform-enterprise.company.com" # or ATLANTIS_TFE_HOSTNAME="my-terraform-enterprise.company.com" ``` + Hostname of your Terraform Enterprise installation to be used in conjunction with - `--tfe-token`. See [Terraform Cloud](terraform-cloud.html) for more details. + `--tfe-token`. See [Terraform Cloud](terraform-cloud.md) for more details. If using Terraform Cloud (i.e. you don't have your own Terraform Enterprise installation) no need to set since it defaults to `app.terraform.io`. ### `--tfe-local-execution-mode` + ```bash atlantis server --tfe-local-execution-mode # or ATLANTIS_TFE_LOCAL_EXECUTION_MODE=true ``` - Enable if you're using local execution mode (instead of TFE/C's remote execution mode). See [Terraform Cloud](terraform-cloud.html) for more details. + + Enable if you're using local execution mode (instead of TFE/C's remote execution mode). See [Terraform Cloud](terraform-cloud.md) for more details. ### `--tfe-token` + ```bash atlantis server --tfe-token="xxx.atlasv1.yyy" # or (recommended) ATLANTIS_TFE_TOKEN='xxx.atlasv1.yyy' ``` - A token for Terraform Cloud/Terraform Enterprise integration. See [Terraform Cloud](terraform-cloud.html) for more details. + + A token for Terraform Cloud/Terraform Enterprise integration. See [Terraform Cloud](terraform-cloud.md) for more details. ### `--use-tf-plugin-cache` + ```bash atlantis server --use-tf-plugin-cache=false # or ATLANTIS_USE_TF_PLUGIN_CACHE=false ``` + Set to false if you want to disable terraform plugin cache. This flag is useful when having multiple projects that need to run a plan and apply in the same PR to avoid the race condition of `plugin_cache_dir` concurrently, this is a terraform known issue, more info: -- [plugin_cache_dir concurrently discussion](https://github.com/hashicorp/terraform/issues/31964) -- [PR to improve the situation](https://github.com/hashicorp/terraform/pull/33479) +* [plugin_cache_dir concurrently discussion](https://github.com/hashicorp/terraform/issues/31964) +* [PR to improve the situation](https://github.com/hashicorp/terraform/pull/33479) The effect of the race condition is more evident when using parallel configuration to run plan and apply, by disabling the use of plugin cache will impact in the performance when starting a new plan or apply, but in large atlantis deployments with multiple projects and shared modules the use of `--parallel_plan` and `--parallel_apply` is mandatory for an efficient managment of the PRs. ### `--var-file-allowlist` + ```bash atlantis server --var-file-allowlist='/path/to/tfvars/dir' # or ATLANTIS_VAR_FILE_ALLOWLIST='/path/to/tfvars/dir' ``` + Comma-separated list of additional directory paths where [variable definition files](https://developer.hashicorp.com/terraform/language/values/variables#variable-definitions-tfvars-files) can be read from. The paths in this argument should be absolute paths. Relative paths and globbing are currently not supported. If this argument is not provided, it defaults to Atlantis' data directory, determined by the `--data-dir` argument. ### `--vcs-status-name` + ```bash atlantis server --vcs-status-name="atlantis-dev" # or ATLANTIS_VCS_STATUS_NAME="atlantis-dev" ``` + Name used to identify Atlantis when updating a pull request status. Defaults to `atlantis`. This is useful when running multiple Atlantis servers against a single repository so you can give each Atlantis server its own unique name to prevent the statuses clashing. ### `--web-basic-auth` + ```bash atlantis server --web-basic-auth # or ATLANTIS_WEB_BASIC_AUTH=true ``` + Enable Basic Authentication on the Atlantis web service. ### `--web-password` + ```bash atlantis server --web-password="atlantis" # or ATLANTIS_WEB_PASSWORD="atlantis" ``` + Password used for Basic Authentication on the Atlantis web service. Defaults to `atlantis`. ### `--web-username` + ```bash atlantis server --web-username="atlantis" # or ATLANTIS_WEB_USERNAME="atlantis" ``` + Username used for Basic Authentication on the Atlantis web service. Defaults to `atlantis`. ### `--websocket-check-origin` + ```bash atlantis server --websocket-check-origin # or ATLANTIS_WEBSOCKET_CHECK_ORIGIN=true ``` + Only allow websockets connection when they originate from the running Atlantis web server ### `--write-git-creds` + ```bash atlantis server --write-git-creds # or ATLANTIS_WRITE_GIT_CREDS=true ``` + Write out a .git-credentials file with the provider user and token to allow cloning private modules over HTTPS or SSH. See [here](https://git-scm.com/docs/git-credential-store) for more information. @@ -1171,4 +1380,3 @@ The effect of the race condition is more evident when using parallel configurati ::: warning SECURITY WARNING This does write secrets to disk and should only be enabled in a secure environment. ::: - diff --git a/runatlantis.io/docs/server-side-repo-config.md b/runatlantis.io/docs/server-side-repo-config.md index 060e910f9e..f7cd73595c 100644 --- a/runatlantis.io/docs/server-side-repo-config.md +++ b/runatlantis.io/docs/server-side-repo-config.md @@ -1,27 +1,29 @@ -# Server Side Config +# Server Side Repo Config + A Server-Side Config file is used for more groups of server config that can't reasonably be expressed through flags. One such usecase is to control per-repo behaviour and what users can do in repo-level `atlantis.yaml` files. -[[toc]] - ## Do I Need A Server-Side Config File? + You do not need a server-side repo config file unless you want to customize some aspect of Atlantis on a per-repo basis. Read through the [use-cases](#use-cases) to determine if you need it. ## Enabling Server Side Config + To use server side repo config create a config file, ex. `repos.yaml`, and pass it to the `atlantis server` command via the `--repo-config` flag, ex. `--repo-config=path/to/repos.yaml`. If you don't wish to write a config file to disk, you can use the `--repo-config-json` flag or `ATLANTIS_REPO_CONFIG_JSON` environment variable -to specify your config as JSON. See [--repo-config-json](server-configuration.html#repo-config-json) +to specify your config as JSON. See [--repo-config-json](server-configuration.md#repo-config-json) for an example. - + ## Example Server Side Repo + ```yaml # repos lists the config for specific repos. repos: @@ -56,7 +58,7 @@ repos: # its atlantis.yaml file. allowed_overrides: [apply_requirements, workflow, delete_source_branch_on_merge, repo_locking, repo_locks, custom_policy_check] - # allowed_workflows specifies which workflows the repos that match + # allowed_workflows specifies which workflows the repos that match # are allowed to select. allowed_workflows: [custom] @@ -84,11 +86,11 @@ repos: custom_policy_check: false # pre_workflow_hooks defines arbitrary list of scripts to execute before workflow execution. - pre_workflow_hooks: + pre_workflow_hooks: - run: my-pre-workflow-hook-command arg1 - + # post_workflow_hooks defines arbitrary list of scripts to execute after workflow execution. - post_workflow_hooks: + post_workflow_hooks: - run: my-post-workflow-hook-command arg1 # policy_check defines if policy checking should be enable on this repository. @@ -118,13 +120,16 @@ workflows: ``` ## Use Cases + Here are some of the reasons you might want to use a repo config. ### Requiring PR Is Approved Before an applicable subcommand + If you want to require that all (or specific) repos must have pull requests approved before Atlantis will allow running `apply` or `import`, use the `plan_requirements`, `apply_requirements` or `import_requirements` keys. For all repos: + ```yaml # repos.yaml repos: @@ -135,6 +140,7 @@ repos: ``` For a specific repo: + ```yaml # repos.yaml repos: @@ -144,13 +150,15 @@ repos: import_requirements: [approved] ``` -See [Command Requirements](command-requirements.html) for more details. +See [Command Requirements](command-requirements.md) for more details. ### Requiring PR Is "Mergeable" Before Apply or Import + If you want to require that all (or specific) repos must have pull requests in a mergeable state before Atlantis will allow running `apply` or `import`, use the `plan_requirements`, `apply_requirements` or `import_requirements` keys. For all repos: + ```yaml # repos.yaml repos: @@ -161,6 +169,7 @@ repos: ``` For a specific repo: + ```yaml # repos.yaml repos: @@ -170,13 +179,15 @@ repos: import_requirements: [mergeable] ``` -See [Command Requirements](command-requirements.html) for more details. +See [Command Requirements](command-requirements.md) for more details. ### Repos Can Set Their Own Apply an applicable subcommand + If you want all (or specific) repos to be able to override the default apply requirements, use the `allowed_overrides` key. To allow all repos to override the default: + ```yaml # repos.yaml repos: @@ -189,7 +200,9 @@ repos: # But all repos can set their own using atlantis.yaml allowed_overrides: [plan_requirements, apply_requirements, import_requirements] ``` + To allow only a specific repo to override the default: + ```yaml # repos.yaml repos: @@ -206,6 +219,7 @@ repos: Then each allowed repo can have an `atlantis.yaml` file that sets `plan_requirements`, `apply_requirements` or `import_requirements` to an empty array (disabling the requirement). + ```yaml # atlantis.yaml in the repo root or set repo_config_file in repos.yaml version: 3 @@ -217,6 +231,7 @@ projects: ``` ### Running Scripts Before Atlantis Workflows + If you want to run scripts that would execute before Atlantis can run default or custom workflows, you can create a `pre-workflow-hooks`: @@ -228,10 +243,12 @@ repos: - run: | my bash script inline ``` -See [Pre Workflow Hooks](pre-workflow-hooks.html) for more details on writing + +See [Pre Workflow Hooks](pre-workflow-hooks.md) for more details on writing pre workflow hooks. ### Running Scripts After Atlantis Workflows + If you want to run scripts that would execute after Atlantis runs default or custom workflows, you can create a `post-workflow-hooks`: @@ -243,15 +260,18 @@ repos: - run: | my bash script inline ``` -See [Post Workflow Hooks](post-workflow-hooks.html) for more details on writing + +See [Post Workflow Hooks](post-workflow-hooks.md) for more details on writing post workflow hooks. ### Change The Default Atlantis Workflow + If you want to change the default commands that Atlantis runs during `plan` and `apply` phases, you can create a new `workflow`. If you want to use that workflow by default for all repos, use the workflow key `default`: + ```yaml # repos.yaml # NOTE: the repos key is not required. @@ -267,10 +287,11 @@ workflows: - run: my custom apply command ``` -See [Custom Workflows](custom-workflows.html) for more details on writing +See [Custom Workflows](custom-workflows.md) for more details on writing custom workflows. ### Allow Repos To Choose A Server-Side Workflow + If you want repos to be able to choose their own workflows that are defined in the server-side repo config, you need to create the workflows server-side and then allow each repo to override the `workflow` key: @@ -301,7 +322,8 @@ workflows: steps: - run: another custom command ``` -Or, if you want to restrict what workflows each repo has access to, use the `allowed_workflows` + +Or, if you want to restrict what workflows each repo has access to, use the `allowed_workflows` key: ```yaml @@ -351,13 +373,15 @@ There is always a workflow named `default` that corresponds to Atlantis' default unless you've created your own server-side workflow with that key (overriding it). ::: -See [Custom Workflows](custom-workflows.html) for more details on writing +See [Custom Workflows](custom-workflows.md) for more details on writing custom workflows. ### Allow Using Custom Policy Tools + Conftest is the standard policy check application integrated with Atlantis, but custom tools can still be run in custom workflows when the `custom_policy_check` option is set. See the [Custom Policy Checks page](custom-policy-checks.md) for detailed examples. ### Allow Repos To Define Their Own Workflows + If you want repos to be able to define their own workflows you need to allow them to override the `workflow` key and set `allow_custom_workflows` to `true`. @@ -381,6 +405,7 @@ repos: ``` Then each allowed repo can define and use a custom workflow in their `atlantis.yaml` files: + ```yaml # atlantis.yaml version: 3 @@ -398,12 +423,13 @@ workflows: - run: my custom apply command ``` -See [Custom Workflows](custom-workflows.html) for more details on writing +See [Custom Workflows](custom-workflows.md) for more details on writing custom workflows. ### Multiple Atlantis Servers Handle The Same Repository + Running multiple Atlantis servers to handle the same repository can be done to separate permissions for each Atlantis server. -In this case, a different [atlantis.yaml](repo-level-atlantis-yaml.html) repository config file can be used by using different `repos.yaml` files. +In this case, a different [atlantis.yaml](repo-level-atlantis-yaml.md) repository config file can be used by using different `repos.yaml` files. For example, consider a situation where a separate `production-server` atlantis uses repo config `atlantis-production.yaml` and `staging-server` atlantis uses repo config `atlantis-staging.yaml`. @@ -422,7 +448,7 @@ repos: ``` Then, create `atlantis-production.yaml` and `atlantis-staging.yaml` files in the repository. -See the configuration examples in [atlantis.yaml](repo-level-atlantis-yaml.html). +See the configuration examples in [atlantis.yaml](repo-level-atlantis-yaml.md). ```yaml # atlantis-production.yaml @@ -444,25 +470,29 @@ Now, 2 webhook URLs can be setup for the repository, which send events to `produ Each servers handle different repository config files. :::tip Notes -* If `no projects` comments are annoying, set [--silence-no-projects](server-configuration.html#silence-no-projects). -* The command trigger executable name can be reconfigured from `atlantis` to something else by setting [Executable Name](server-configuration.html#executable-name). + +* If `no projects` comments are annoying, set [--silence-no-projects](server-configuration.md#silence-no-projects). +* The command trigger executable name can be reconfigured from `atlantis` to something else by setting [Executable Name](server-configuration.md#executable-name). * When using different atlantis server vcs users such as `@atlantis-staging`, the comment `@atlantis-staging plan` can be used instead `atlantis plan` to call `staging-server` only. ::: ## Reference ### Top-Level Keys -| Key | Type | Default | Required | Description | -|-----------|---------------------------------------------------------|-----------|----------|---------------------------------------------------------------------------------------| -| repos | array[[Repo](#repo)] | see below | no | List of repos to apply settings to. | -| workflows | map[string: [Workflow](custom-workflows.html#workflow)] | see below | no | Map from workflow name to workflow. Workflows override the default Atlantis commands. | -| policies | Policies. | none | no | List of policy sets to run and associated metadata | -| metrics | Metrics. | none | no | Map of metric configuration | +| Key | Type | Default | Required | Description | +|-----------|-------------------------------------------------------|-----------|----------|---------------------------------------------------------------------------------------| +| repos | array[[Repo](#repo)] | see below | no | List of repos to apply settings to. | +| workflows | map[string: [Workflow](custom-workflows.md#workflow)] | see below | no | Map from workflow name to workflow. Workflows override the default Atlantis commands. | +| policies | Policies. | none | no | List of policy sets to run and associated metadata | +| metrics | Metrics. | none | no | Map of metric configuration | ::: tip A Note On Defaults + #### `repos` + `repos` always contains a first element with the Atlantis default config: + ```yaml repos: - id: /.*/ @@ -476,7 +506,9 @@ repos: ``` #### `workflows` + `workflows` always contains the Atlantis default workflow under the key `default`: + ```yaml workflows: default: @@ -491,18 +523,19 @@ If you set a workflow with the key `default`, it will override this. ::: ### Repo + | Key | Type | Default | Required | Description | |-------------------------------|-------------------------|-----------------|----------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | id | string | none | yes | Value can be a regular expression when specified as /<regex>/ or an exact string match. Repo IDs are of the form `{vcs hostname}/{org}/{name}`, ex. `github.com/owner/repo`. Hostname is specified without scheme or port. For Bitbucket Server, {org} is the **name** of the project, not the key. | | branch | string | none | no | An regex matching pull requests by base branch (the branch the pull request is getting merged into). By default, all branches are matched | | repo_config_file | string | none | no | Repo config file path in this repo. By default, use `atlantis.yaml` which is located on repository root. When multiple atlantis servers work with the same repo, please set different file names. | | workflow | string | none | no | A custom workflow. | -| plan_requirements | []string | none | no | Requirements that must be satisfied before `atlantis plan` can be run. Currently the only supported requirements are `approved`, `mergeable`, and `undiverged`. See [Command Requirements](command-requirements.html) for more details. | -| apply_requirements | []string | none | no | Requirements that must be satisfied before `atlantis apply` can be run. Currently the only supported requirements are `approved`, `mergeable`, and `undiverged`. See [Command Requirements](command-requirements.html) for more details. | -| import_requirements | []string | none | no | Requirements that must be satisfied before `atlantis import` can be run. Currently the only supported requirements are `approved`, `mergeable`, and `undiverged`. See [Command Requirements](command-requirements.html) for more details. | +| plan_requirements | []string | none | no | Requirements that must be satisfied before `atlantis plan` can be run. Currently the only supported requirements are `approved`, `mergeable`, and `undiverged`. See [Command Requirements](command-requirements.md) for more details. | +| apply_requirements | []string | none | no | Requirements that must be satisfied before `atlantis apply` can be run. Currently the only supported requirements are `approved`, `mergeable`, and `undiverged`. See [Command Requirements](command-requirements.md) for more details. | +| import_requirements | []string | none | no | Requirements that must be satisfied before `atlantis import` can be run. Currently the only supported requirements are `approved`, `mergeable`, and `undiverged`. See [Command Requirements](command-requirements.md) for more details. | | allowed_overrides | []string | none | no | A list of restricted keys that `atlantis.yaml` files can override. The only supported keys are `apply_requirements`, `workflow`, `delete_source_branch_on_merge`,`repo_locking`, `repo_locks`, and `custom_policy_check` | | allowed_workflows | []string | none | no | A list of workflows that `atlantis.yaml` files can select from. | -| allow_custom_workflows | bool | false | no | Whether or not to allow [Custom Workflows](custom-workflows.html). | +| allow_custom_workflows | bool | false | no | Whether or not to allow [Custom Workflows](custom-workflows.md). | | delete_source_branch_on_merge | bool | false | no | Whether or not to delete the source branch on merge. | | repo_locking | bool | false | no | (deprecated) Whether or not to get a lock. | | repo_locks | [RepoLocks](#repolocks) | `mode: on_plan` | no | Whether or not repository locks are enabled for this project on plan or apply. See [RepoLocks](#repolocks) for more details. | @@ -511,9 +544,11 @@ If you set a workflow with the key `default`, it will override this. | autodiscover | AutoDiscover | none | no | Auto discover settings for this repo | :::tip Notes + * If multiple repos match, the last match will apply. * If a key isn't defined, it won't override a key that matched from above. For example, given a repo ID `github.com/owner/repo` and a config: + ```yaml repos: - id: /.*/ @@ -524,12 +559,14 @@ If you set a workflow with the key `default`, it will override this. ``` The final config will look like: + ```yaml apply_requirements: [] workflow: default allowed_overrides: [] allow_custom_workflows: true ``` + Where * `apply_requirements` is set from the `id: github.com/owner/repo` config because it overrides the previous matching config from `id: /.*/`. @@ -542,9 +579,11 @@ If you set a workflow with the key `default`, it will override this. ::: ### RepoLocks + ```yaml mode: on_apply ``` + | Key | Type | Default | Required | Description | |------|--------|-----------|----------|---------------------------------------------------------------------------------------------------------------------------------------| | mode | `Mode` | `on_plan` | no | Whether or not repository locks are enabled for this project on plan or apply. Valid values are `disabled`, `on_plan` and `on_apply`. | @@ -559,6 +598,7 @@ mode: on_apply | policy_sets | []PolicySet | none | yes | set of policies to run on a plan output | ### Owners + | Key | Type | Default | Required | Description | |-------------|-------------------|---------|------------|---------------------------------------------------------| | users | []string | none | no | list of github users that can approve failing policies | @@ -572,7 +612,6 @@ mode: on_apply | path | string | none | yes | path to the rego policies directory | | source | string | none | yes | only `local` is supported at this time | - ### Metrics | Key | Type | Default | Required | Description | diff --git a/runatlantis.io/docs/stats.md b/runatlantis.io/docs/stats.md index a2980c5634..9c6073ab64 100644 --- a/runatlantis.io/docs/stats.md +++ b/runatlantis.io/docs/stats.md @@ -8,12 +8,11 @@ Currently Statsd and Prometheus is supported. See configuration below for detail ## Configuration -Metrics are configured through the [Server Side Config](server-side-repo-config.html#metrics). +Metrics are configured through the [Server Side Config](server-side-repo-config.md#metrics). ## Available Metrics -Assuming metrics are exposed from the endpoint `/metrics` from the [metrics](server-side-repo-config.html#metrics) server side config e.g. - +Assuming metrics are exposed from the endpoint `/metrics` from the [metrics](server-side-repo-config.md#metrics) server side config e.g. ```yaml metrics: @@ -21,10 +20,8 @@ metrics: endpoint: "/metrics" ``` - To see all the metrics exposed from atlantis service, make a GET request to the `/metrics` endpoint. - ```bash curl localhost:4141/metrics # HELP atlantis_cmd_autoplan_builder_execution_error atlantis_cmd_autoplan_builder_execution_error counter @@ -47,20 +44,19 @@ atlantis_cmd_autoplan_builder_execution_time_count 10 ..... ``` - ::: tip NOTE The output shown above is trimmed, since with every new version release this metric set will need to be updated accordingly as there may be a case if some metrics are added/modified/deprecated, so the output shown above just gives a brief idea of how these metrics look like and rest can be explored. ::: Important metrics to monitor are -| Metric Name | Metric Type | Purpose | -|------------------------------------------------|----------------------------------------------------------------------|--------------------------------------------------------------------------------------------------------------------| -| `atlantis_cmd_autoplan_execution_error` | [counter](https://prometheus.io/docs/concepts/metric_types/#counter) | number of times when [autoplan](autoplanning.html#autoplanning) has thrown error. | -| `atlantis_cmd_comment_plan_execution_error` | [counter](https://prometheus.io/docs/concepts/metric_types/#counter) | number of times when on commenting `atlantis plan` has thrown error. | -| `atlantis_cmd_autoplan_execution_success` | [counter](https://prometheus.io/docs/concepts/metric_types/#counter) | number of times when [autoplan](autoplanning.html#autoplanning) has run successfully. | -| `atlantis_cmd_comment_apply_execution_error` | [counter](https://prometheus.io/docs/concepts/metric_types/#counter) | number of times when on commenting `atlantis apply` has thrown error. | -| `atlantis_cmd_comment_apply_execution_success` | [counter](https://prometheus.io/docs/concepts/metric_types/#counter) | number of times when on commenting `atlantis apply` has run successfully. | +| Metric Name | Metric Type | Purpose | +|------------------------------------------------|----------------------------------------------------------------------|-------------------------------------------------------------------------------------| +| `atlantis_cmd_autoplan_execution_error` | [counter](https://prometheus.io/docs/concepts/metric_types/#counter) | number of times when [autoplan](autoplanning.md#autoplanning) has thrown error. | +| `atlantis_cmd_comment_plan_execution_error` | [counter](https://prometheus.io/docs/concepts/metric_types/#counter) | number of times when on commenting `atlantis plan` has thrown error. | +| `atlantis_cmd_autoplan_execution_success` | [counter](https://prometheus.io/docs/concepts/metric_types/#counter) | number of times when [autoplan](autoplanning.md#autoplanning) has run successfully. | +| `atlantis_cmd_comment_apply_execution_error` | [counter](https://prometheus.io/docs/concepts/metric_types/#counter) | number of times when on commenting `atlantis apply` has thrown error. | +| `atlantis_cmd_comment_apply_execution_success` | [counter](https://prometheus.io/docs/concepts/metric_types/#counter) | number of times when on commenting `atlantis apply` has run successfully. | ::: tip NOTE There are plenty of additional metrics exposed by atlantis that are not described above. diff --git a/runatlantis.io/docs/streaming-logs.md b/runatlantis.io/docs/streaming-logs.md index c066d47d47..df936c52f9 100644 --- a/runatlantis.io/docs/streaming-logs.md +++ b/runatlantis.io/docs/streaming-logs.md @@ -20,4 +20,3 @@ This will link to the atlantis UI which provides real-time logging in addition t ::: warning As of now the logs are currently stored in memory and cleared when a given pull request is closed, so this link shouldn't be persisted anywhere. ::: - diff --git a/runatlantis.io/docs/terraform-cloud.md b/runatlantis.io/docs/terraform-cloud.md index bab22a5db0..2e3393d7dd 100644 --- a/runatlantis.io/docs/terraform-cloud.md +++ b/runatlantis.io/docs/terraform-cloud.md @@ -6,15 +6,17 @@ and Private Terraform Enterprise was renamed Terraform Enterprise. ::: Atlantis integrates seamlessly with Terraform Cloud and Terraform Enterprise, whether you're using: + * [Free Remote State Management](https://app.terraform.io) * Terraform Cloud Paid Tiers * A Private Installation of Terraform Enterprise Read the docs below :point_down: depending on your use-case. -[[toc]] ## Using Atlantis With Free Remote State Storage + To use Atlantis with Free Remote State Storage, you need to: + 1. Migrate your state to Terraform Cloud. See [Migrating State from Local Terraform](https://developer.hashicorp.com/terraform/cloud-docs/migrate) 1. Update any projects that are referencing the state you migrated to use the new location 1. [Generate a Terraform Cloud/Enterprise Token](#generating-a-terraform-cloud-enterprise-token) @@ -24,6 +26,7 @@ That's it! Atlantis will run as normal and your state will be stored in Terrafor Cloud. ## Using Atlantis With Terraform Cloud Remote Operations or Terraform Enterprise + Atlantis integrates with the full version of Terraform Cloud and Terraform Enterprise via the [remote backend](https://developer.hashicorp.com/terraform/language/settings/backends/remote). @@ -31,7 +34,9 @@ Atlantis will run `terraform` commands as usual, however those commands will actually be executed *remotely* in Terraform Cloud or Terraform Enterprise. ### Why? + Using Atlantis with Terraform Cloud or Terraform Enterprise gives you access to features like: + * Real-time streaming output * Ability to cancel in-progress commands * Secret variables @@ -40,28 +45,34 @@ Using Atlantis with Terraform Cloud or Terraform Enterprise gives you access to **Without** having to change your pull request workflow. ### Getting Started + To use Atlantis with Terraform Cloud Remote Operations or Terraform Enterprise, you need to: + 1. Migrate your state to Terraform Cloud/Enterprise. See [Migrating State from Local Terraform](https://developer.hashicorp.com/terraform/cloud-docs/migrate) 1. Update any projects that are referencing the state you migrated to use the new location 1. [Generate a Terraform Cloud/Enterprise Token](#generating-a-terraform-cloud-enterprise-token) 1. [Pass the token to Atlantis](#passing-the-token-to-atlantis) ## Generating a Terraform Cloud/Enterprise Token + Atlantis needs a Terraform Cloud/Enterprise Token that it will use to access the API. Using a **Team Token is recommended**, however you can also use a User Token. ### Team Token + To generate a team token, click on **Settings** in the top bar, then **Teams** in the sidebar. Choose an existing team or create a new one. Enable the **Manage Workspaces** permission, then scroll down to **Team API Token**. ### User Token + To generate a user token, click on your avatar, then **User Settings**, then **Tokens** in the sidebar. Ensure the **Manage Workspaces** permission is enabled for this user's team. ## Passing The Token To Atlantis + The token can be passed to Atlantis via the `ATLANTIS_TFE_TOKEN` environment variable. You can also use the `--tfe-token` flag, however your token would then be easily @@ -88,12 +99,14 @@ Under the hood, Atlantis is generating a `~/.terraformrc` file. If you already had a `~/.terraformrc` file where Atlantis is running, then you'll need to manually add the credentials block to that file: -``` + +```hcl ... credentials "app.terraform.io" { token = "xxxx" } ``` + instead of using the `ATLANTIS_TFE_TOKEN` environment variable, since Atlantis won't overwrite your `.terraformrc` file. ::: diff --git a/runatlantis.io/docs/terraform-versions.md b/runatlantis.io/docs/terraform-versions.md index 79fdee0db3..321278d505 100644 --- a/runatlantis.io/docs/terraform-versions.md +++ b/runatlantis.io/docs/terraform-versions.md @@ -4,47 +4,60 @@ You can customize which version of Terraform Atlantis defaults to by setting the `--default-tf-version` flag (ex. `--default-tf-version=v1.3.7`). ## Via `atlantis.yaml` + If you wish to use a different version than the default for a specific repo or project, you need to create an `atlantis.yaml` file and set the `terraform_version` key: + ```yaml version: 3 projects: - dir: . terraform_version: v1.1.5 ``` -See [atlantis.yaml Use Cases](repo-level-atlantis-yaml.html#terraform-versions) for more details. + +See [atlantis.yaml Use Cases](repo-level-atlantis-yaml.md#terraform-versions) for more details. ## Via terraform config + Alternatively, one can use the terraform configuration block's `required_version` key to specify an exact version (`x.y.z` or `= x.y.z`), or as of [atlantis v0.21.0](https://github.com/runatlantis/atlantis/releases/tag/v0.21.0), a comparison or pessimistic [version constraint](https://developer.hashicorp.com/terraform/language/expressions/version-constraints#version-constraint-syntax): -#### Exactly version 1.2.9 + +### Exactly version 1.2.9 + ```tf terraform { required_version = "= 1.2.9" } ``` -#### Any patch/tiny version of minor version 1.2 (1.2.z) + +### Any patch/tiny version of minor version 1.2 (1.2.z) + ```tf terraform { required_version = "~> 1.2.0" } ``` -#### Any minor version of major version 1 (1.y.z) + +### Any minor version of major version 1 (1.y.z) + ```tf terraform { required_version = "~> 1.2" } ``` -#### Any version that is at least 1.2.0 + +### Any version that is at least 1.2.0 + ```tf terraform { required_version = ">= 1.2.0" } ``` + See [Terraform `required_version`](https://developer.hashicorp.com/terraform/language/settings#specifying-a-required-terraform-version) for reference. ::: tip NOTE Atlantis will automatically download the latest version that fulfills the constraint specified. -A `terraform_version` specified in the `atlantis.yaml` file takes precedence over both the [`--default-tf-version`](server-configuration.html#default-tf-version) flag and the `required_version` in the terraform hcl. +A `terraform_version` specified in the `atlantis.yaml` file takes precedence over both the [`--default-tf-version`](server-configuration.md#default-tf-version) flag and the `required_version` in the terraform hcl. ::: ::: tip NOTE diff --git a/runatlantis.io/docs/troubleshooting-https.md b/runatlantis.io/docs/troubleshooting-https.md index 191a4b1242..f59058da1c 100644 --- a/runatlantis.io/docs/troubleshooting-https.md +++ b/runatlantis.io/docs/troubleshooting-https.md @@ -3,25 +3,24 @@ When using a self-signed certificate for Atlantis (with flags `--ssl-cert-file` and `--ssl-key-file`), there are a few considerations. -Atlantis uses the web server from the standard Go library, +Atlantis uses the web server from the standard Go library, the method name is [ListenAndServeTLS](https://pkg.go.dev/net/http#ListenAndServeTLS). `ListenAndServeTLS` acts identically to [ListenAndServe](https://pkg.go.dev/net/http#ListenAndServe), -except that it expects HTTPS connections. -Additionally, files containing a certificate and matching private key for the server must be provided. -If the certificate is signed by a certificate authority, -the file passed to `--ssl-cert-file` should be the concatenation of the server's certificate, any intermediates, and the CA's certificate. +except that it expects HTTPS connections. +Additionally, files containing a certificate and matching private key for the server must be provided. +If the certificate is signed by a certificate authority, +the file passed to `--ssl-cert-file` should be the concatenation of the server's certificate, any intermediates, and the CA's certificate. -If you have this error when specifying a TLS cert with a key: -``` +If you have this error when specifying a TLS cert with a key: + +```plain [ERROR] server.go:413 server: Tls: private key does not match public key ``` Check that the locally signed certificate authority is prepended to the self signed certificate. -A good example is shown at [Seth Vargo terraform implementation of atlantis-on-gke](https://github.com/sethvargo/atlantis-on-gke/blob/master/terraform/tls.tf#L64) +A good example is shown at [Seth Vargo terraform implementation of atlantis-on-gke](https://github.com/sethvargo/atlantis-on-gke/blob/master/terraform/tls.tf#L64-L84) For Go specific TLS resources have a look at the repository by [denji called golang-tls](https://github.com/denji/golang-tls). For a complete explanation on PKI, read this [article](https://smallstep.com/blog/everything-pki.html). - - diff --git a/runatlantis.io/docs/upgrading-atlantis-yaml.md b/runatlantis.io/docs/upgrading-atlantis-yaml.md index 1b8fe7aaa0..37e20900e8 100644 --- a/runatlantis.io/docs/upgrading-atlantis-yaml.md +++ b/runatlantis.io/docs/upgrading-atlantis-yaml.md @@ -1,15 +1,17 @@ # Upgrading atlantis.yaml ## Upgrading From v2 To v3 + Atlantis version `v0.7.0` introduced a new version 3 of `atlantis.yaml`. -**If you're not using [custom `run` steps](custom-workflows.html#custom-run-command), +**If you're not using [custom `run` steps](custom-workflows.md#custom-run-command), then you can upgrade from `version: 2` to `version: 3` without any changes.** **NOTE:** Version 2 **is not being deprecated** and there is no need to upgrade your version if you don't wish to do so. The only change from v2 to v3 is that we're parsing custom `run` steps differently. + ```yaml # atlantis.yaml workflows: @@ -18,33 +20,38 @@ workflows: steps: - run: my custom command ``` +
An example workflow using a custom run step
Previously, we used a library that would parse the custom step prior to running it. Now, we just run the step directly. This will only affect your steps if they were using shell escaping of some sort. For example, if your step was previously: + ```yaml # version: 2 - run: "printf \'print me\'" ``` You can now write this in version 3 as: + ```yaml # version: 3 - run: "printf 'print me'" ``` - ## Upgrading From V1 To V3 + If you are upgrading from an **old** Atlantis version `<=v0.3.10` (from before July 4, 2018) you'll need to follow the following steps. ### Single atlantis.yaml + If you had multiple `atlantis.yaml` files per directory then you'll need to consolidate them into a single `atlantis.yaml` file at the root of the repo. For example, if you had a directory structure: -``` + +```plain . ├── project1 │ └── atlantis.yaml @@ -53,7 +60,8 @@ For example, if you had a directory structure: ``` Then your new structure would look like: -``` + +```plain . ├── atlantis.yaml ├── project1 @@ -61,6 +69,7 @@ Then your new structure would look like: ``` And your `atlantis.yaml` would look something like: + ```yaml version: 2 projects: @@ -80,13 +89,16 @@ workflows: We will talk more about `workflows` below. ### Terraform Version + The `terraform_version` key moved from being a top-level key to being per `project` so if before your `atlantis.yaml` was in directory `mydir` and looked like: + ```yaml terraform_version: 0.11.0 ``` Then your new config would be: + ```yaml version: 2 projects: @@ -95,9 +107,11 @@ projects: ``` ### Workflows + Workflows are the new way to set all `pre_*`, `post_*` and `extra_arguments`. Each `project` can have a custom workflow via the `workflow` key. + ```yaml version: 2 projects: @@ -106,6 +120,7 @@ projects: ``` Workflows are defined as a top-level key: + ```yaml version: 2 projects: @@ -118,6 +133,7 @@ workflows: To start with, determine whether you're customizing commands that happen during `plan` or `apply`. You then set that key under the workflow's name: + ```yaml ... workflows: @@ -133,6 +149,7 @@ workflows: If you're not customizing a specific stage then you can omit that key. For example if you're only customizing the commands that happen during `plan` then your config will look like: + ```yaml ... workflows: @@ -143,7 +160,9 @@ workflows: ``` #### Extra Arguments + `extra_arguments` is now specified as follows. Given a previous config: + ```yaml extra_arguments: - command_name: init @@ -158,6 +177,7 @@ extra_arguments: ``` Your config would now look like: + ```yaml ... workflows: @@ -174,8 +194,8 @@ workflows: extra_args: ["-lock=false"] ``` - #### Pre/Post Commands + Instead of using `pre_*` or `post_*`, you now can insert your custom commands before/after the built-in commands. Given a previous config: @@ -202,6 +222,7 @@ post_apply: ``` Your config would now look like: + ```yaml ... workflows: diff --git a/runatlantis.io/docs/using-atlantis.md b/runatlantis.io/docs/using-atlantis.md index 15a0b5a681..61c06e1a21 100644 --- a/runatlantis.io/docs/using-atlantis.md +++ b/runatlantis.io/docs/using-atlantis.md @@ -5,8 +5,9 @@ Atlantis triggers commands via pull request comments. ::: tip You can use following executable names. + * `atlantis help` - * `atlantis` is executable name. You can configure by [Executable Name](/docs/server-configuration.html#executable-name). + * `atlantis` is executable name. You can configure by [Executable Name](server-configuration.md#executable-name). * `run help` * `run` is a global executable name. * `@GithubUser help` @@ -14,35 +15,46 @@ You can use following executable names. ::: Currently, Atlantis supports the following commands. -[[toc]] --- + ## atlantis help + ```bash atlantis help ``` + ### Explanation + View help --- + ## atlantis version + ```bash atlantis version ``` ### Explanation + Print the output of 'terraform version'. --- + ## atlantis plan + ```bash atlantis plan [options] -- [terraform plan flags] ``` + ### Explanation + Runs `terraform plan` on the pull request's branch. You may wish to re-run plan after Atlantis has already done so if you've changed some resources manually. ### Examples + ```bash # Runs plan for any projects that Atlantis thinks were modified. # If an `atlantis.yaml` file is specified, runs plan on the projects that @@ -60,9 +72,10 @@ atlantis plan -w staging ``` ### Options + * `-d directory` Which directory to run plan in relative to root of repo. Use `.` for root. - * Ex. `atlantis plan -d child/dir` -* `-p project` Which project to run plan for. Refers to the name of the project configured in the repo's [`atlantis.yaml` file](repo-level-atlantis-yaml.html). Cannot be used at same time as `-d` or `-w` because the project defines this already. + * Ex. `atlantis plan -d child/dir` +* `-p project` Which project to run plan for. Refers to the name of the project configured in the repo's [`atlantis.yaml` file](repo-level-atlantis-yaml.md). Cannot be used at same time as `-d` or `-w` because the project defines this already. * `-w workspace` Switch to this [Terraform workspace](https://developer.hashicorp.com/terraform/language/state/workspaces) before planning. Defaults to `default`. Ignore this if Terraform workspaces are unused. * `--verbose` Append Atlantis log to comment. @@ -74,30 +87,38 @@ A `atlantis plan` (without flags), like autoplans, discards all plans previously If `terraform plan` requires additional arguments, like `-target=resource` or `-var 'foo=bar'` or `-var-file myfile.tfvars` you can append them to the end of the comment after `--`, ex. -``` + +```shell atlantis plan -d dir -- -var foo='bar' ``` -If you always need to append a certain flag, see [Custom Workflow Use Cases](custom-workflows.html#adding-extra-arguments-to-terraform-commands). + +If you always need to append a certain flag, see [Custom Workflow Use Cases](custom-workflows.md#adding-extra-arguments-to-terraform-commands). ### Using the -destroy Flag #### Example + To perform a destructive plan that will destroy resources you can use the `-destroy` flag like this: ```bash atlantis plan -- -destroy atlantis plan -d dir -- -destroy ``` -::: warning NOTE + +::: warning NOTE The `-destroy` flag generates a destroy plan, If this plan is applied it can result in data loss or service disruptions. Ensure that you have thoroughly reviewed your Terraform configuration and intend to remove the specified resources before using this flag. ::: --- + ## atlantis apply + ```bash atlantis apply [options] -- [terraform apply flags] ``` + ### Explanation + Runs `terraform apply` for the plan that matches the directory/project/workspace. ::: tip @@ -106,8 +127,8 @@ This includes all projects that have been planned manually with `atlantis plan` For Atlantis commands to work, Atlantis needs to know the location where the plan file is. For that, you can use $PLANFILE which will contain the path of the plan file to be used in your custom steps. i.e `terraform plan -out $PLANFILE` ::: - ### Examples + ```bash # Runs apply for all unapplied plans from this pull request. atlantis apply @@ -123,15 +144,17 @@ atlantis apply -w staging ``` ### Options + * `-d directory` Apply the plan for this directory, relative to root of repo. Use `.` for root. -* `-p project` Apply the plan for this project. Refers to the name of the project configured in the repo's [`atlantis.yaml` file](repo-level-atlantis-yaml.html). Cannot be used at same time as `-d` or `-w`. +* `-p project` Apply the plan for this project. Refers to the name of the project configured in the repo's [`atlantis.yaml` file](repo-level-atlantis-yaml.md). Cannot be used at same time as `-d` or `-w`. * `-w workspace` Apply the plan for this [Terraform workspace](https://developer.hashicorp.com/terraform/language/state/workspaces). Ignore this if Terraform workspaces are unused. -* `--auto-merge-disabled` Disable [automerge](automerging.html) for this apply command. +* `--auto-merge-disabled` Disable [automerge](automerging.md) for this apply command. * `--verbose` Append Atlantis log to comment. ### Additional Terraform flags Because Atlantis under the hood is running `terraform apply plan.tfplan`, any Terraform options that would change the `plan` are ignored, ex: + * `-target=resource` * `-var 'foo=bar'` * `-var-file=myfile.tfvars` @@ -140,17 +163,22 @@ They're ignored because they can't be specified for an already generated planfil If you would like to specify these flags, do it while running `atlantis plan`. --- + ## atlantis import + ```bash atlantis import [options] ADDRESS ID -- [terraform import flags] ``` + ### Explanation + Runs `terraform import` that matches the directory/project/workspace. This command discards the terraform plan result. After an import and before an apply, another `atlantis plan` must be run again. -To allow the `import` command requires [--allow-commands](/docs/server-configuration.html#allow-commands) configuration. +To allow the `import` command requires [--allow-commands](server-configuration.md#allow-commands) configuration. ### Examples + ```bash # Runs import atlantis import ADDRESS ID @@ -166,36 +194,45 @@ atlantis import -w staging ADDRESS ID ``` ::: tip + * If import for_each resources, it requires a single quoted address. * ex. `atlantis import 'aws_instance.example["foo"]' i-1234567890abcdef0` ::: ### Options + * `-d directory` Import a resource for this directory, relative to root of repo. Use `.` for root. -* `-p project` Import a resource for this project. Refers to the name of the project configured in the repo's [`atlantis.yaml`](repo-level-atlantis-yaml.html) repo configuration file. This cannot be used at the same time as `-d` or `-w`. +* `-p project` Import a resource for this project. Refers to the name of the project configured in the repo's [`atlantis.yaml`](repo-level-atlantis-yaml.md) repo configuration file. This cannot be used at the same time as `-d` or `-w`. * `-w workspace` Import a resource for a specific [Terraform workspace](https://developer.hashicorp.com/terraform/language/state/workspaces). Ignore this if Terraform workspaces are unused. ### Additional Terraform flags If `terraform import` requires additional arguments, like `-var 'foo=bar'` or `-var-file myfile.tfvars` append them to the end of the comment after `--`, e.g. -``` + +```shell atlantis import -d dir 'aws_instance.example["foo"]' i-1234567890abcdef0 -- -var foo='bar' ``` -If a flag is needed to be always appended, see [Custom Workflow Use Cases](custom-workflows.html#adding-extra-arguments-to-terraform-commands). + +If a flag is needed to be always appended, see [Custom Workflow Use Cases](custom-workflows.md#adding-extra-arguments-to-terraform-commands). --- + ## atlantis state rm + ```bash atlantis state [options] rm ADDRESS... -- [terraform state rm flags] ``` + ### Explanation + Runs `terraform state rm` that matches the directory/project/workspace. This command discards the terraform plan result. After run state rm and before an apply, another `atlantis plan` must be run again. -To allow the `state` command requires [--allow-commands](/docs/server-configuration.html#allow-commands) configuration. +To allow the `state` command requires [--allow-commands](server-configuration.md#allow-commands) configuration. ### Examples + ```bash # Runs state rm atlantis state rm ADDRESS1 ADDRESS2 @@ -211,44 +248,55 @@ atlantis state -w staging rm ADDRESS ``` ::: tip + * If run state rm to for_each resources, it requires a single quoted address. * ex. `atlantis state rm 'aws_instance.example["foo"]'` ::: ### Options + * `-d directory` Run state rm a resource for this directory, relative to root of repo. Use `.` for root. -* `-p project` Run state rm a resource for this project. Refers to the name of the project configured in the repo's [`atlantis.yaml`](repo-level-atlantis-yaml.html) repo configuration file. This cannot be used at the same time as `-d` or `-w`. +* `-p project` Run state rm a resource for this project. Refers to the name of the project configured in the repo's [`atlantis.yaml`](repo-level-atlantis-yaml.md) repo configuration file. This cannot be used at the same time as `-d` or `-w`. * `-w workspace` Run state rm a resource for a specific [Terraform workspace](https://developer.hashicorp.com/terraform/language/state/workspaces). Ignore this if Terraform workspaces are unused. ### Additional Terraform flags If `terraform state rm` requires additional arguments, like `-lock=false'` append them to the end of the comment after `--`, e.g. -``` + +```shell atlantis state -d dir rm 'aws_instance.example["foo"]' -- -lock=false ``` -If a flag is needed to be always appended, see [Custom Workflow Use Cases](custom-workflows.html#adding-extra-arguments-to-terraform-commands). + +If a flag is needed to be always appended, see [Custom Workflow Use Cases](custom-workflows.md#adding-extra-arguments-to-terraform-commands). --- + ## atlantis unlock + ```bash atlantis unlock ``` ### Explanation + Removes all atlantis locks and discards all plans for this PR. To unlock a specific plan you can use the Atlantis UI. --- + ## atlantis approve_policies + ```bash atlantis approve_policies ``` ### Explanation + Approves all current policy checking failures for the PR. -See also [policy checking](/docs/policy-checking.html). +See also [policy checking](policy-checking.md). ### Options + * `--verbose` Append Atlantis log to comment. diff --git a/runatlantis.io/docs/using-slack-hooks.md b/runatlantis.io/docs/using-slack-hooks.md index c75c243fca..572b0857f8 100644 --- a/runatlantis.io/docs/using-slack-hooks.md +++ b/runatlantis.io/docs/using-slack-hooks.md @@ -13,7 +13,7 @@ For this you'll need to: ## Configuring Slack for Atlantis -* Go to [https://api.slack.com/apps](https://api.slack.com/apps) +* Go to [Slack: Apps](https://api.slack.com/apps) * Click the `Create New App` button * Select `From scratch` in the dialog that opens * Give it a name, e.g. `atlantis-bot`. @@ -43,13 +43,12 @@ webhooks: workspace-regex: .* branch-regex: .* kind: slack - channel: my-channel + channel: my-channel-id ``` If you are deploying Atlantis as a Helm chart, this can be implemented via the `config` parameter available for [chart customizations](https://github.com/runatlantis/helm-charts#customization): -``` - +```yaml ## Use Server Side Config, ## ref: https://www.runatlantis.io/docs/server-configuration.html config: | @@ -59,9 +58,7 @@ config: | workspace-regex: .* branch-regex: .* kind: slack - channel: my-channel + channel: my-channel-id ``` - - -The `apply` event information will be sent to the `my-channel` Slack channel. +The `apply` event information will be sent to the `my-channel-id` Slack channel. diff --git a/runatlantis.io/docs/webhook-secrets.md b/runatlantis.io/docs/webhook-secrets.md index 8b66ee8276..4e2ab1a059 100644 --- a/runatlantis.io/docs/webhook-secrets.md +++ b/runatlantis.io/docs/webhook-secrets.md @@ -17,27 +17,30 @@ Azure DevOps uses Basic authentication for webhooks rather than webhook secrets. ::: ::: tip NOTE -An app-wide token is generated during [GitHub App setup](access-credentials.html#github-app). You can recover it by navigating to the [GitHub app settings page](https://github.com/settings/apps) and selecting "Edit" next to your Atlantis app's name. Token appears after clicking "Edit" under the Webhook header. +An app-wide token is generated during [GitHub App setup](access-credentials.md#github-app). You can recover it by navigating to the [GitHub app settings page](https://github.com/settings/apps) and selecting "Edit" next to your Atlantis app's name. Token appears after clicking "Edit" under the Webhook header. ::: ::: warning Bitbucket.org **does not** support webhook secrets. -To mitigate, use repo allowlists and IP allowlists. See [Security](security.html#bitbucket-cloud-bitbucket-org) for more information. +To mitigate, use repo allowlists and IP allowlists. See [Security](security.md#bitbucket-cloud-bitbucket-org) for more information. ::: ## Generating A Webhook Secret + You can use any random string generator to create your Webhook secret. It should be > 24 characters. For example: + * Generate via Ruby with `ruby -rsecurerandom -e 'puts SecureRandom.hex(32)'` -* Generate online with [https://www.browserling.com/tools/random-string](https://www.browserling.com/tools/random-string) +* Generate online with [browserling: Generate Random Strings and Numbers](https://www.browserling.com/tools/random-string) ::: tip NOTE You must use **the same** webhook secret for each repo. ::: ## Next Steps + * Record your secret -* You'll be using it later to [configure your webhooks](configuring-webhooks.html), however if you're -following the [Installation Guide](installation-guide.html) then your next step is to -[Deploy Atlantis](deployment.html) +* You'll be using it later to [configure your webhooks](configuring-webhooks.md), however if you're +following the [Installation Guide](installation-guide.md) then your next step is to +[Deploy Atlantis](deployment.md) diff --git a/runatlantis.io/e2e/site-check.spec.js b/runatlantis.io/e2e/site-check.spec.js new file mode 100644 index 0000000000..2fbf3b5a3a --- /dev/null +++ b/runatlantis.io/e2e/site-check.spec.js @@ -0,0 +1,12 @@ +import { test } from '@playwright/test'; + +test('page should load without errors', async ({ page }) => { + // Listen for any errors that occur within the page + page.on('pageerror', error => { + console.error('Page error:', error.message); + throw new Error(`Page error: ${error.message}`); + }); + + // Navigate to the URL + await page.goto('http://localhost:8080/'); +}); diff --git a/runatlantis.io/guide/README.md b/runatlantis.io/guide.md similarity index 80% rename from runatlantis.io/guide/README.md rename to runatlantis.io/guide.md index 15472518b8..9d71a3acf1 100644 --- a/runatlantis.io/guide/README.md +++ b/runatlantis.io/guide.md @@ -1,15 +1,17 @@ # Introduction ## Getting Started -* If you'd like to just test out running Atlantis on an **example repo** check out the [Test Drive](test-drive.html). -* If you'd like to test out running Atlantis on **your repos** then read [Testing Locally](testing-locally.html). -* If you're ready to properly install Atlantis on real infrastructure then head over to the [Installation Guide](/docs/installation-guide.html). + +* If you'd like to just test out running Atlantis on an **example repo** check out the [Test Drive](./guide/test-drive.md). +* If you'd like to test out running Atlantis on **your repos** then read [Testing Locally](./guide/testing-locally.md). +* If you're ready to properly install Atlantis on real infrastructure then head over to the [Installation Guide](./docs/installation-guide.md). ::: tip Looking for the full docs? -Go here: [www.runatlantis.io/docs](/docs/) +Go here: [www.runatlantis.io/docs](./docs.md) ::: ## Overview – What Is Atlantis? + Atlantis is an application for automating Terraform via pull requests. It is deployed as a standalone application into your infrastructure. No third-party has access to your credentials. @@ -21,14 +23,18 @@ When you want to apply, comment `atlantis apply` on the pull request and Atlanti will run `terraform apply` and comment back with the output. ## Watch + Check out the video below to see it in action: -[![Atlantis Walkthrough](./images/atlantis-walkthrough-icon.png)](https://www.youtube.com/watch?v=TmIPWda0IKg) +[![Atlantis Walkthrough](./guide/images/atlantis-walkthrough-icon.png)](https://www.youtube.com/watch?v=TmIPWda0IKg) ## Why would you run Atlantis? + ### Increased visibility + When everyone is executing Terraform on their own computers, it's hard to know the current state of your infrastructure: + * Is what's in `main` branch deployed? * Did someone forget to create a pull request for that latest change? * What was the output from that last `terraform apply`? @@ -37,6 +43,7 @@ With Atlantis, everything is visible on the pull request. You can view the histo of everything that was done to your infrastructure. ### Enable collaboration with everyone + You probably don't want to distribute Terraform credentials to everyone in your engineering organization, but now anyone can open up a Terraform pull request. @@ -44,10 +51,12 @@ You can require approval before the pull request is applied so nothing happens accidentally. ### Review Terraform pull requests better + You can't fully review a Terraform change without seeing the output of `terraform plan`. Now that output is added to the pull request automatically. ### Standardize your workflows + Atlantis locks a directory/workspace until the pull request is merged or the lock is manually deleted. This ensures that changes are applied in the order expected. @@ -55,6 +64,7 @@ The exact commands that Atlantis runs are configurable. You can run custom scrip to construct your ideal workflow. ## Next Steps -* If you'd like to just test out running Atlantis on an **example repo** check out the [Test Drive](test-drive.html). -* If you'd like to test out running Atlantis on **your repos** then read [Testing Locally](testing-locally.html). -* If you're ready to properly install Atlantis on real infrastructure then head over to the [Installation Guide](/docs/installation-guide.html). + +* If you'd like to just test out running Atlantis on an **example repo** check out the [Test Drive](./guide/test-drive.md). +* If you'd like to test out running Atlantis on **your repos** then read [Testing Locally](./guide/testing-locally.md). +* If you're ready to properly install Atlantis on real infrastructure then head over to the [Installation Guide](./docs/installation-guide.md). diff --git a/runatlantis.io/guide/test-drive.md b/runatlantis.io/guide/test-drive.md index 22e8c77f21..8510f0a0e2 100644 --- a/runatlantis.io/guide/test-drive.md +++ b/runatlantis.io/guide/test-drive.md @@ -1,18 +1,22 @@ # Test Drive -To test drive Atlantis on an example repo, download the latest release: -[https://github.com/runatlantis/atlantis/releases](https://github.com/runatlantis/atlantis/releases) + +To test drive Atlantis on an example repo, download the latest release from +[GitHub](https://github.com/runatlantis/atlantis/releases) Once you've extracted the archive, run: + ```bash ./atlantis testdrive ``` This mode sets up Atlantis on a test repo so you can try it out. It will + - Fork an example Terraform project into your GitHub account - Install Terraform (if not already in your PATH) - Install [ngrok](https://ngrok.com/) so we can expose Atlantis to GitHub - Start Atlantis so you can execute commands on the pull request ## Next Steps -* If you're ready to test out running Atlantis on **your repos** then read [Testing Locally](testing-locally.html). -* If you're ready to properly install Atlantis on real infrastructure then head over to the [Installation Guide](/docs/installation-guide.html). + +- If you're ready to test out running Atlantis on **your repos** then read [Testing Locally](testing-locally.md). +- If you're ready to properly install Atlantis on real infrastructure then head over to the [Installation Guide](../docs/installation-guide.md). diff --git a/runatlantis.io/guide/testing-locally.md b/runatlantis.io/guide/testing-locally.md index 3c33d9b12e..104d40a942 100644 --- a/runatlantis.io/guide/testing-locally.md +++ b/runatlantis.io/guide/testing-locally.md @@ -1,45 +1,51 @@ # Testing Locally + These instructions are for running Atlantis **locally on your own computer** so you can test it out against your own repositories before deciding whether to install it more permanently. ::: tip -If you want to set up a production-ready Atlantis installation, read [Deployment](../docs/deployment.html). +If you want to set up a production-ready Atlantis installation, read [Deployment](../docs/deployment.md). ::: Steps: -[[toc]] - ## Install Terraform + `terraform` needs to be in the `$PATH` for Atlantis. -Download from [https://developer.hashicorp.com/terraform/downloads](https://developer.hashicorp.com/terraform/downloads) -``` +Download from [Terraform](https://developer.hashicorp.com/terraform/downloads) + +```shell unzip path/to/terraform_*.zip -d /usr/local/bin ``` ## Download Atlantis -Get the latest release from [https://github.com/runatlantis/atlantis/releases](https://github.com/runatlantis/atlantis/releases) + +Get the latest release from [GitHub](https://github.com/runatlantis/atlantis/releases) and unpackage it. ## Download Ngrok + Atlantis needs to be accessible somewhere that github.com/gitlab.com/bitbucket.org or your GitHub/GitLab Enterprise installation can reach. One way to accomplish this is with ngrok, a tool that forwards your local port to a random public hostname. -Go to [https://ngrok.com/download](https://ngrok.com/download), download ngrok and `unzip` it. +[Download](https://ngrok.com/download) ngrok and `unzip` it. Start `ngrok` on port `4141` and take note of the hostname it gives you: + ```bash ./ngrok http 4141 ``` In a new tab (where you'll soon start Atlantis) create an environment variable with ngrok's hostname: + ```bash URL="https://{YOUR_HOSTNAME}.ngrok.io" ``` ## Create a Webhook Secret + GitHub and GitLab use webhook secrets so clients can verify that the webhooks came from them. ::: warning @@ -47,16 +53,19 @@ Bitbucket Cloud (bitbucket.org) doesn't use webhook secrets so if you're using B When you're ready to do a production deploy of Atlantis you should allowlist [Bitbucket IPs](https://confluence.atlassian.com/bitbucket/what-are-the-bitbucket-cloud-ip-addresses-i-should-use-to-configure-my-corporate-firewall-343343385.html) to ensure the webhooks are coming from them. ::: -Create a random string of any length (you can use [https://www.random.org/strings/](https://www.random.org/strings/)) +Create a random string of any length (you can use [random.org](https://www.random.org/strings/)) and set an environment variable: -``` + +```shell SECRET="{YOUR_RANDOM_STRING}" ``` ## Add Webhook + Take the URL that ngrok output and create a webhook in your GitHub, GitLab or Bitbucket repo: ### GitHub or GitHub Enterprise Webhook +
Expand
    @@ -82,6 +91,7 @@ Take the URL that ngrok output and create a webhook in your GitHub, GitLab or Bi
### GitLab or GitLab Enterprise Webhook +
Expand
    @@ -103,6 +113,7 @@ Take the URL that ngrok output and create a webhook in your GitHub, GitLab or Bi
### Bitbucket Cloud (bitbucket.org) Webhook +
Expand
    @@ -124,6 +135,7 @@ Take the URL that ngrok output and create a webhook in your GitHub, GitLab or Bi
### Bitbucket Server (aka Stash) Webhook +
Expand
    @@ -141,6 +153,7 @@ Take the URL that ngrok output and create a webhook in your GitHub, GitLab or Bi
### Gitea Webhook +
Expand
    @@ -148,13 +161,13 @@ Take the URL that ngrok output and create a webhook in your GitHub, GitLab or Bi
  • Click Add webhook > Gitea (Gitea webhooks are service specific, but this works)
  • set Target URL to http://$URL/events (or https://$URL/events if you're using SSL) where $URL is where Atlantis is hosted. Be sure to add /events
  • double-check you added /events to the end of your URL.
  • -
  • set Secret to the Webhook Secret you generated previously +
  • set Secret to the Webhook Secret you generated previously
    • NOTE If you're adding a webhook to multiple repositories, each repository will need to use the same secret.
  • Select Custom Events...
  • -
  • Check the boxes +
  • Check the boxes
    • Repository events > Push
    • Issue events > Issue Comment
    • @@ -170,50 +183,59 @@ Take the URL that ngrok output and create a webhook in your GitHub, GitLab or Bi
- ## Create an access token for Atlantis + We recommend using a dedicated CI user or creating a new user named **@atlantis** that performs all API actions, however for testing, you can use your own user. Here we'll create the access token that Atlantis uses to comment on the pull request and set commit statuses. ### GitHub or GitHub Enterprise Access Token + - Create a [Personal Access Token](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/creating-a-personal-access-token#creating-a-fine-grained-personal-access-token) - create a token with **repo** scope - set the token as an environment variable -``` + +```shell TOKEN="{YOUR_TOKEN}" ``` ### GitLab or GitLab Enterprise Access Token -- follow [https://docs.gitlab.com/ce/user/profile/personal_access_tokens.html#create-a-personal-access-token](https://docs.gitlab.com/ce/user/profile/personal_access_tokens.html#create-a-personal-access-token) + +- follow [GitLab: Create a personal access token](https://docs.gitlab.com/ce/user/profile/personal_access_tokens.html#create-a-personal-access-token) - create a token with **api** scope - set the token as an environment variable -``` + +```shell TOKEN="{YOUR_TOKEN}" ``` ### Bitbucket Cloud (bitbucket.org) Access Token -- follow [https://support.atlassian.com/bitbucket-cloud/docs/create-an-app-password/](https://support.atlassian.com/bitbucket-cloud/docs/create-an-app-password/) + +- follow [BitBucket Cloud: Create an app password](https://support.atlassian.com/bitbucket-cloud/docs/create-an-app-password/) - Label the password "atlantis" - Select **Pull requests**: **Read** and **Write** so that Atlantis can read your pull requests and write comments to them - set the token as an environment variable -``` + +```shell TOKEN="{YOUR_TOKEN}" ``` ### Bitbucket Server (aka Stash) Access Token + - Click on your avatar in the top right and select **Manage account** - Click **HTTP access tokens** in the sidebar - Click **Create token** - Name the token **atlantis** - Give the token **Read** Project permissions and **Write** Pull request permissions -- Choose an Expiry option **Do not expire** or **Expire automatically** +- Choose an Expiry option **Do not expire** or **Expire automatically** - Click **Create** and set the token as an environment variable -``` + +```shell TOKEN="{YOUR_TOKEN}" ``` ### Gite Access Token + - Go to "Profile and Settings" > "Settings" in Gitea (top-right) - Go to "Applications" under "User Settings" in Gitea - Create a token under the "Manage Access Tokens" with the following permissions: @@ -222,6 +244,7 @@ TOKEN="{YOUR_TOKEN}" - Record the access token ## Start Atlantis + You're almost ready to start Atlantis, just set two more variables: ```bash @@ -232,9 +255,11 @@ REPO_ALLOWLIST="$YOUR_GIT_HOST/$YOUR_USERNAME/$YOUR_REPO" # server without scheme or port and $YOUR_USERNAME will be the name of the **project** the repo # is under, **not the key** of the project. ``` + Now you can start Atlantis. The exact command differs depending on your Git host: ### GitHub Command + ```bash atlantis server \ --atlantis-url="$URL" \ @@ -245,6 +270,7 @@ atlantis server \ ``` ### GitHub Enterprise Command + ```bash HOSTNAME=YOUR_GITHUB_ENTERPRISE_HOSTNAME # ex. github.runatlantis.io atlantis server \ @@ -257,6 +283,7 @@ atlantis server \ ``` ### GitLab Command + ```bash atlantis server \ --atlantis-url="$URL" \ @@ -267,6 +294,7 @@ atlantis server \ ``` ### GitLab Enterprise Command + ```bash HOSTNAME=YOUR_GITLAB_ENTERPRISE_HOSTNAME # ex. gitlab.runatlantis.io atlantis server \ @@ -279,6 +307,7 @@ atlantis server \ ``` ### Bitbucket Cloud (bitbucket.org) Command + ```bash atlantis server \ --atlantis-url="$URL" \ @@ -288,6 +317,7 @@ atlantis server \ ``` ### Bitbucket Server (aka Stash) Command + ```bash BASE_URL=YOUR_BITBUCKET_SERVER_URL # ex. http://bitbucket.mycorp:7990 atlantis server \ @@ -315,7 +345,7 @@ atlantis server \ --ssl-key-file=file.key ``` -### Gitea +### Gitea ```bash atlantis server \ @@ -331,45 +361,55 @@ atlantis server \ ``` ## Create a pull request + Create a pull request so you can test Atlantis. ::: tip You could add a null resource as a test: + ```hcl resource "null_resource" "example" {} ``` + Or just modify the whitespace in a file. ::: ### Autoplan + You should see Atlantis logging about receiving the webhook and you should see the output of `terraform plan` on your repo. Atlantis tries to figure out the directory to plan in based on the files modified. If you need to customize the directories that Atlantis runs in or the commands it runs if you're using workspaces -or `.tfvars` files, see [atlantis.yaml Reference](/docs/repo-level-atlantis-yaml.html#reference). +or `.tfvars` files, see [atlantis.yaml Reference](../docs/repo-level-atlantis-yaml.md#reference). ### Manual Plan + To manually `plan` in a specific directory or workspace, comment on the pull request using the `-d` or `-w` flags: -``` + +```shell atlantis plan -d mydir atlantis plan -w staging ``` To add additional arguments to the underlying `terraform plan` you can use: -``` + +```shell atlantis plan -- -target=resource -var 'foo=bar' ``` ### Apply + If you'd like to `apply`, type a comment: `atlantis apply`. You can use the `-d` or `-w` flags to point Atlantis at a specific plan. Otherwise it tries to apply the plan for the root directory. ## Real-time logs -The [real-time terraform output](/docs/streaming-logs.md) for your command can be found by clicking into the status check for a given project in a PR which + +The [real-time terraform output](../docs/streaming-logs.md) for your command can be found by clicking into the status check for a given project in a PR which links to the log-streaming UI. This is a terminal UI where you can view your commands executing in real-time. ## Next Steps -* If things are working as expected you can `Ctrl-C` the `atlantis server` command and the `ngrok` command. -* Hopefully Atlantis is working with your repo and you're ready to move on to a [production-ready deployment](../docs/deployment.html). -* If it's not working as expected, you may need to customize how Atlantis runs with an `atlantis.yaml` file. -See [atlantis.yaml use cases](/docs/repo-level-atlantis-yaml.html#use-cases). -* Check out our [full documentation](../docs/) for more details. + +- If things are working as expected you can `Ctrl-C` the `atlantis server` command and the `ngrok` command. +- Hopefully Atlantis is working with your repo and you're ready to move on to a [production-ready deployment](../docs/deployment.md). +- If it's not working as expected, you may need to customize how Atlantis runs with an `atlantis.yaml` file. +See [atlantis.yaml use cases](../docs/repo-level-atlantis-yaml.md#use-cases). +- Check out our [full documentation](../docs.md) for more details. diff --git a/runatlantis.io/index.md b/runatlantis.io/index.md new file mode 100644 index 0000000000..e1f3e90d62 --- /dev/null +++ b/runatlantis.io/index.md @@ -0,0 +1,43 @@ +--- +# https://vitepress.dev/reference/default-theme-home-page +layout: home + +pageClass: home-custom + +hero: + name: Atlantis + text: Terraform Pull Request Automation + tagline: Running Terraform Workflows with Ease + image: /hero.png + actions: + - theme: brand + text: Get Started + link: /guide + - theme: alt + text: What is Atlantis? + link: https://medium.com/runatlantis/introducing-atlantis-6570d6de7281 + - theme: alt + text: Join us on Slack + link: https://join.slack.com/t/atlantis-community/shared_invite/zt-9xlxtxtc-CUSKB1ATt_sQy6um~LDPNw + +features: + - title: Fewer Mistakes + details: "Catch errors in Terraform plan output before applying changes. Ensure changes are applied before merging." + icon: ✅ + - title: Empower Developers + details: "Developers can safely submit Terraform pull requests without credentials. Require approvals for applies." + icon: đŸ’ģ + - title: Instant Audit Logs + details: "Detailed logs for infrastructure changes, approvals, and user actions. Configure approvals for production changes." + icon: 📋 + - title: Proven at Scale + details: "Used by top companies to manage over 600 repos with 300 developers. In production since 2017." + icon: 🌍 + - title: Self-Hosted + details: "Your credentials remain secure. Deployable on VMs, Kubernetes, Fargate, etc. Supports GitHub, GitLab, Bitbucket, Azure DevOps." + icon: ⚙ī¸ + - title: Open Source + details: "Atlantis is an open source project with strong community support, powered by volunteer contributions." + icon: 🌐 + +--- diff --git a/runatlantis.io/.vuepress/public/apple-touch-icon-114x114.png b/runatlantis.io/public/apple-touch-icon-114x114.png similarity index 100% rename from runatlantis.io/.vuepress/public/apple-touch-icon-114x114.png rename to runatlantis.io/public/apple-touch-icon-114x114.png diff --git a/runatlantis.io/.vuepress/public/apple-touch-icon-120x120.png b/runatlantis.io/public/apple-touch-icon-120x120.png similarity index 100% rename from runatlantis.io/.vuepress/public/apple-touch-icon-120x120.png rename to runatlantis.io/public/apple-touch-icon-120x120.png diff --git a/runatlantis.io/.vuepress/public/apple-touch-icon-144x144.png b/runatlantis.io/public/apple-touch-icon-144x144.png similarity index 100% rename from runatlantis.io/.vuepress/public/apple-touch-icon-144x144.png rename to runatlantis.io/public/apple-touch-icon-144x144.png diff --git a/runatlantis.io/.vuepress/public/apple-touch-icon-152x152.png b/runatlantis.io/public/apple-touch-icon-152x152.png similarity index 100% rename from runatlantis.io/.vuepress/public/apple-touch-icon-152x152.png rename to runatlantis.io/public/apple-touch-icon-152x152.png diff --git a/runatlantis.io/.vuepress/public/apple-touch-icon-57x57.png b/runatlantis.io/public/apple-touch-icon-57x57.png similarity index 100% rename from runatlantis.io/.vuepress/public/apple-touch-icon-57x57.png rename to runatlantis.io/public/apple-touch-icon-57x57.png diff --git a/runatlantis.io/.vuepress/public/apple-touch-icon-60x60.png b/runatlantis.io/public/apple-touch-icon-60x60.png similarity index 100% rename from runatlantis.io/.vuepress/public/apple-touch-icon-60x60.png rename to runatlantis.io/public/apple-touch-icon-60x60.png diff --git a/runatlantis.io/.vuepress/public/apple-touch-icon-72x72.png b/runatlantis.io/public/apple-touch-icon-72x72.png similarity index 100% rename from runatlantis.io/.vuepress/public/apple-touch-icon-72x72.png rename to runatlantis.io/public/apple-touch-icon-72x72.png diff --git a/runatlantis.io/.vuepress/public/apple-touch-icon-76x76.png b/runatlantis.io/public/apple-touch-icon-76x76.png similarity index 100% rename from runatlantis.io/.vuepress/public/apple-touch-icon-76x76.png rename to runatlantis.io/public/apple-touch-icon-76x76.png diff --git a/runatlantis.io/.vuepress/public/favicon-128.png b/runatlantis.io/public/favicon-128.png similarity index 100% rename from runatlantis.io/.vuepress/public/favicon-128.png rename to runatlantis.io/public/favicon-128.png diff --git a/runatlantis.io/.vuepress/public/favicon-16x16.png b/runatlantis.io/public/favicon-16x16.png similarity index 100% rename from runatlantis.io/.vuepress/public/favicon-16x16.png rename to runatlantis.io/public/favicon-16x16.png diff --git a/runatlantis.io/.vuepress/public/favicon-196x196.png b/runatlantis.io/public/favicon-196x196.png similarity index 100% rename from runatlantis.io/.vuepress/public/favicon-196x196.png rename to runatlantis.io/public/favicon-196x196.png diff --git a/runatlantis.io/.vuepress/public/favicon-32x32.png b/runatlantis.io/public/favicon-32x32.png similarity index 100% rename from runatlantis.io/.vuepress/public/favicon-32x32.png rename to runatlantis.io/public/favicon-32x32.png diff --git a/runatlantis.io/.vuepress/public/favicon-96x96.png b/runatlantis.io/public/favicon-96x96.png similarity index 100% rename from runatlantis.io/.vuepress/public/favicon-96x96.png rename to runatlantis.io/public/favicon-96x96.png diff --git a/runatlantis.io/.vuepress/public/favicon.ico b/runatlantis.io/public/favicon.ico similarity index 100% rename from runatlantis.io/.vuepress/public/favicon.ico rename to runatlantis.io/public/favicon.ico diff --git a/runatlantis.io/.vuepress/public/hero.png b/runatlantis.io/public/hero.png similarity index 100% rename from runatlantis.io/.vuepress/public/hero.png rename to runatlantis.io/public/hero.png diff --git a/runatlantis.io/.vuepress/public/hightower-super-dope.svg b/runatlantis.io/public/hightower-super-dope.svg similarity index 100% rename from runatlantis.io/.vuepress/public/hightower-super-dope.svg rename to runatlantis.io/public/hightower-super-dope.svg diff --git a/runatlantis.io/.vuepress/public/mstile-144x144.png b/runatlantis.io/public/mstile-144x144.png similarity index 100% rename from runatlantis.io/.vuepress/public/mstile-144x144.png rename to runatlantis.io/public/mstile-144x144.png diff --git a/runatlantis.io/.vuepress/public/mstile-150x150.png b/runatlantis.io/public/mstile-150x150.png similarity index 100% rename from runatlantis.io/.vuepress/public/mstile-150x150.png rename to runatlantis.io/public/mstile-150x150.png diff --git a/runatlantis.io/.vuepress/public/mstile-310x150.png b/runatlantis.io/public/mstile-310x150.png similarity index 100% rename from runatlantis.io/.vuepress/public/mstile-310x150.png rename to runatlantis.io/public/mstile-310x150.png diff --git a/runatlantis.io/.vuepress/public/mstile-310x310.png b/runatlantis.io/public/mstile-310x310.png similarity index 100% rename from runatlantis.io/.vuepress/public/mstile-310x310.png rename to runatlantis.io/public/mstile-310x310.png diff --git a/runatlantis.io/.vuepress/public/mstile-70x70.png b/runatlantis.io/public/mstile-70x70.png similarity index 100% rename from runatlantis.io/.vuepress/public/mstile-70x70.png rename to runatlantis.io/public/mstile-70x70.png diff --git a/scripts/download-release.sh b/scripts/download-release.sh index 9b3ea574d3..8b661bf1a1 100755 --- a/scripts/download-release.sh +++ b/scripts/download-release.sh @@ -1,8 +1,8 @@ #!/bin/sh COMMAND_NAME=${1:-terraform} TARGETPLATFORM=${2:-"linux/amd64"} -DEFAULT_VERSION=${3:-"1.6.2"} -AVAILABLE_VERSIONS=${4:-"1.6.2"} +DEFAULT_VERSION=${3:-"1.8.0"} +AVAILABLE_VERSIONS=${4:-"1.8.0"} case "${TARGETPLATFORM}" in "linux/amd64") ARCH=amd64 ;; "linux/arm64") ARCH=arm64 ;; @@ -13,11 +13,11 @@ for VERSION in ${AVAILABLE_VERSIONS}; do case "${COMMAND_NAME}" in "terraform") DOWNLOAD_URL_FORMAT=$(printf 'https://releases.hashicorp.com/terraform/%s/%s_%s' "$VERSION" "$COMMAND_NAME" "$VERSION") - COMMAND_DIR=/usr/local/bin/tf + COMMAND_DIR=/usr/local/bin/terraform ;; "tofu") DOWNLOAD_URL_FORMAT=$(printf 'https://github.com/opentofu/opentofu/releases/download/v%s/%s_%s' "$VERSION" "$COMMAND_NAME" "$VERSION") - COMMAND_DIR=/usr/local/bin/opentofu + COMMAND_DIR=/usr/local/bin/tofu ;; *) echo "ERROR: 'COMMAND_NAME' value unexpected: ${COMMAND_NAME}"; exit 1 ;; esac @@ -26,8 +26,8 @@ for VERSION in ${AVAILABLE_VERSIONS}; do sed -n "/${COMMAND_NAME}_${VERSION}_linux_${ARCH}.zip/p" "${COMMAND_NAME}_${VERSION}_SHA256SUMS" | sha256sum -c mkdir -p "${COMMAND_DIR}/${VERSION}" unzip "${COMMAND_NAME}_${VERSION}_linux_${ARCH}.zip" -d "${COMMAND_DIR}/${VERSION}" - ln -s "${COMMAND_DIR}/${VERSION}/${COMMAND_NAME}" "${COMMAND_NAME}${VERSION}" + ln -s "${COMMAND_DIR}/${VERSION}/${COMMAND_NAME}" "${COMMAND_DIR}/${COMMAND_NAME}${VERSION}" rm "${COMMAND_NAME}_${VERSION}_linux_${ARCH}.zip" rm "${COMMAND_NAME}_${VERSION}_SHA256SUMS" done -ln -s "${COMMAND_DIR}/${DEFAULT_VERSION}/${COMMAND_NAME}" "${COMMAND_NAME}" +ln -s "${COMMAND_DIR}/${DEFAULT_VERSION}/${COMMAND_NAME}" "${COMMAND_DIR}/${COMMAND_NAME}" diff --git a/scripts/fmt.sh b/scripts/fmt.sh new file mode 100755 index 0000000000..b8a5aef752 --- /dev/null +++ b/scripts/fmt.sh @@ -0,0 +1,23 @@ +#!/usr/bin/env bash + +set -euo pipefail + +go install golang.org/x/tools/cmd/goimports@latest + +gobin="$(go env GOPATH)/bin" +declare -r gobin + +declare -a files +readarray -d '' files < <(find . -type f -name '*.go' ! -name 'mock_*' ! -path './vendor/*' ! -path '**/mocks/*' -print0) +declare -r files + +output="$("${gobin}"/goimports -l "${files[@]}")" +declare -r output + +if [[ -n "$output" ]]; then + echo "These files had their 'import' changed - please fix them locally and push a fix" + + echo "$output" + + exit 1 +fi diff --git a/server/controllers/api_controller.go b/server/controllers/api_controller.go index 43e316bbdf..c48c99b41d 100644 --- a/server/controllers/api_controller.go +++ b/server/controllers/api_controller.go @@ -20,16 +20,19 @@ import ( const atlantisTokenHeader = "X-Atlantis-Token" type APIController struct { - APISecret []byte - Locker locking.Locker - Logger logging.SimpleLogging - Parser events.EventParsing - ProjectCommandBuilder events.ProjectCommandBuilder - ProjectPlanCommandRunner events.ProjectPlanCommandRunner - ProjectApplyCommandRunner events.ProjectApplyCommandRunner - RepoAllowlistChecker *events.RepoAllowlistChecker - Scope tally.Scope - VCSClient vcs.Client + APISecret []byte + Locker locking.Locker + Logger logging.SimpleLogging + Parser events.EventParsing + ProjectCommandBuilder events.ProjectCommandBuilder + ProjectPlanCommandRunner events.ProjectPlanCommandRunner + ProjectApplyCommandRunner events.ProjectApplyCommandRunner + FailOnPreWorkflowHookError bool + PreWorkflowHooksCommandRunner events.PreWorkflowHooksCommandRunner + PostWorkflowHooksCommandRunner events.PostWorkflowHooksCommandRunner + RepoAllowlistChecker *events.RepoAllowlistChecker + Scope tally.Scope + VCSClient vcs.Client } type APIRequest struct { @@ -44,7 +47,7 @@ type APIRequest struct { } } -func (a *APIRequest) getCommands(ctx *command.Context, cmdBuilder func(*command.Context, *events.CommentCommand) ([]command.ProjectContext, error)) ([]command.ProjectContext, error) { +func (a *APIRequest) getCommands(ctx *command.Context, cmdBuilder func(*command.Context, *events.CommentCommand) ([]command.ProjectContext, error)) ([]command.ProjectContext, []*events.CommentCommand, error) { cc := make([]*events.CommentCommand, 0) for _, project := range a.Projects { @@ -63,12 +66,12 @@ func (a *APIRequest) getCommands(ctx *command.Context, cmdBuilder func(*command. for _, commentCommand := range cc { projectCmds, err := cmdBuilder(ctx, commentCommand) if err != nil { - return nil, fmt.Errorf("failed to build command: %v", err) + return nil, nil, fmt.Errorf("failed to build command: %v", err) } cmds = append(cmds, projectCmds...) } - return cmds, nil + return cmds, cc, nil } func (a *APIController) apiReportError(w http.ResponseWriter, code int, err error) { @@ -142,29 +145,55 @@ func (a *APIController) Apply(w http.ResponseWriter, r *http.Request) { } func (a *APIController) apiPlan(request *APIRequest, ctx *command.Context) (*command.Result, error) { - cmds, err := request.getCommands(ctx, a.ProjectCommandBuilder.BuildPlanCommands) + cmds, cc, err := request.getCommands(ctx, a.ProjectCommandBuilder.BuildPlanCommands) if err != nil { return nil, err } var projectResults []command.ProjectResult - for _, cmd := range cmds { + for i, cmd := range cmds { + err = a.PreWorkflowHooksCommandRunner.RunPreHooks(ctx, cc[i]) + if err != nil { + ctx.Log.Err("Error running pre-workflow hooks %s.", err) + if a.FailOnPreWorkflowHookError { + return nil, err + } + } + res := a.ProjectPlanCommandRunner.Plan(cmd) projectResults = append(projectResults, res) + + err = a.PostWorkflowHooksCommandRunner.RunPostHooks(ctx, cc[i]) + if err != nil { + ctx.Log.Err("Error running post-workflow hooks %s.", err) + } } return &command.Result{ProjectResults: projectResults}, nil } func (a *APIController) apiApply(request *APIRequest, ctx *command.Context) (*command.Result, error) { - cmds, err := request.getCommands(ctx, a.ProjectCommandBuilder.BuildApplyCommands) + cmds, cc, err := request.getCommands(ctx, a.ProjectCommandBuilder.BuildApplyCommands) if err != nil { return nil, err } var projectResults []command.ProjectResult - for _, cmd := range cmds { + for i, cmd := range cmds { + err = a.PreWorkflowHooksCommandRunner.RunPreHooks(ctx, cc[i]) + if err != nil { + ctx.Log.Err("Error running pre-workflow hooks %s.", err) + if a.FailOnPreWorkflowHookError { + return nil, err + } + } + res := a.ProjectApplyCommandRunner.Apply(cmd) projectResults = append(projectResults, res) + + err = a.PostWorkflowHooksCommandRunner.RunPostHooks(ctx, cc[i]) + if err != nil { + ctx.Log.Err("Error running post-workflow hooks %s.", err) + } } return &command.Result{ProjectResults: projectResults}, nil } @@ -223,6 +252,7 @@ func (a *APIController) apiParseAndValidate(r *http.Request) (*APIRequest, *comm }, Scope: a.Scope, Log: a.Logger, + API: true, }, http.StatusOK, nil } diff --git a/server/controllers/api_controller_test.go b/server/controllers/api_controller_test.go index 1f2370ef08..3b3aa520aa 100644 --- a/server/controllers/api_controller_test.go +++ b/server/controllers/api_controller_test.go @@ -86,17 +86,27 @@ func setup(t *testing.T) (controllers.APIController, *MockProjectCommandBuilder, ApplySuccess: "success", }) + preWorkflowHooksCommandRunner := NewMockPreWorkflowHooksCommandRunner() + + When(preWorkflowHooksCommandRunner.RunPreHooks(Any[*command.Context](), Any[*events.CommentCommand]())).ThenReturn(nil) + + postWorkflowHooksCommandRunner := NewMockPostWorkflowHooksCommandRunner() + + When(postWorkflowHooksCommandRunner.RunPostHooks(Any[*command.Context](), Any[*events.CommentCommand]())).ThenReturn(nil) + ac := controllers.APIController{ - APISecret: []byte(atlantisToken), - Locker: locker, - Logger: logger, - Scope: scope, - Parser: parser, - ProjectCommandBuilder: projectCommandBuilder, - ProjectPlanCommandRunner: projectCommandRunner, - ProjectApplyCommandRunner: projectCommandRunner, - VCSClient: vcsClient, - RepoAllowlistChecker: repoAllowlistChecker, + APISecret: []byte(atlantisToken), + Locker: locker, + Logger: logger, + Scope: scope, + Parser: parser, + ProjectCommandBuilder: projectCommandBuilder, + ProjectPlanCommandRunner: projectCommandRunner, + ProjectApplyCommandRunner: projectCommandRunner, + PreWorkflowHooksCommandRunner: preWorkflowHooksCommandRunner, + PostWorkflowHooksCommandRunner: postWorkflowHooksCommandRunner, + VCSClient: vcsClient, + RepoAllowlistChecker: repoAllowlistChecker, } return ac, projectCommandBuilder, projectCommandRunner } diff --git a/server/controllers/events/events_controller.go b/server/controllers/events/events_controller.go index 91a7bf2592..ca29053519 100644 --- a/server/controllers/events/events_controller.go +++ b/server/controllers/events/events_controller.go @@ -371,7 +371,7 @@ func (e *VCSEventsController) handleGiteaPullRequestEvent(w http.ResponseWriter, e.respond(w, logging.Debug, http.StatusOK, response.body) } -// HandleGiteaCommentEvent handles comment events from Gitea where Atlantis commands can come from. +// HandleGiteaPullRequestCommentEvent handles comment events from Gitea where Atlantis commands can come from. func (e *VCSEventsController) HandleGiteaPullRequestCommentEvent(w http.ResponseWriter, body []byte, reqID string) { var event gitea.GiteaIssueCommentPayload if err := json.Unmarshal(body, &event); err != nil { diff --git a/server/controllers/events/testdata/test-repos/import-multiple-project/dir1/main.tf b/server/controllers/events/testdata/test-repos/import-multiple-project/dir1/main.tf index 2aa6a6437d..231579dd90 100644 --- a/server/controllers/events/testdata/test-repos/import-multiple-project/dir1/main.tf +++ b/server/controllers/events/testdata/test-repos/import-multiple-project/dir1/main.tf @@ -1,4 +1,3 @@ resource "random_id" "dummy1" { - keepers = {} byte_length = 1 } diff --git a/server/controllers/events/testdata/test-repos/import-multiple-project/dir2/main.tf b/server/controllers/events/testdata/test-repos/import-multiple-project/dir2/main.tf index 5292f29c85..97f93c35e1 100644 --- a/server/controllers/events/testdata/test-repos/import-multiple-project/dir2/main.tf +++ b/server/controllers/events/testdata/test-repos/import-multiple-project/dir2/main.tf @@ -1,4 +1,3 @@ resource "random_id" "dummy2" { - keepers = {} byte_length = 1 } diff --git a/server/controllers/events/testdata/test-repos/import-multiple-project/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/import-multiple-project/exp-output-autoplan.txt index b6116bfde9..d49fde3e8f 100644 --- a/server/controllers/events/testdata/test-repos/import-multiple-project/exp-output-autoplan.txt +++ b/server/controllers/events/testdata/test-repos/import-multiple-project/exp-output-autoplan.txt @@ -22,7 +22,6 @@ Terraform will perform the following actions: + dec = (known after apply) + hex = (known after apply) + id = (known after apply) - + keepers = {} } Plan: 1 to add, 0 to change, 0 to destroy. @@ -59,7 +58,6 @@ Terraform will perform the following actions: + dec = (known after apply) + hex = (known after apply) + id = (known after apply) - + keepers = {} } Plan: 1 to add, 0 to change, 0 to destroy. @@ -89,4 +87,4 @@ Plan: 1 to add, 0 to change, 0 to destroy. * :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: ```shell atlantis unlock - ``` \ No newline at end of file + ``` diff --git a/server/controllers/events/testdata/test-repos/import-multiple-project/exp-output-plan-again.txt b/server/controllers/events/testdata/test-repos/import-multiple-project/exp-output-plan-again.txt index 9955b4f2c7..f94c8567ed 100644 --- a/server/controllers/events/testdata/test-repos/import-multiple-project/exp-output-plan-again.txt +++ b/server/controllers/events/testdata/test-repos/import-multiple-project/exp-output-plan-again.txt @@ -43,7 +43,6 @@ Terraform will perform the following actions: + dec = (known after apply) + hex = (known after apply) + id = (known after apply) - + keepers = {} } Plan: 1 to add, 0 to change, 0 to destroy. @@ -73,4 +72,4 @@ Plan: 1 to add, 0 to change, 0 to destroy. * :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: ```shell atlantis unlock - ``` \ No newline at end of file + ``` diff --git a/server/controllers/events/testdata/test-repos/import-single-project-var/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/import-single-project-var/exp-output-autoplan.txt index 4d597951a9..ddcccae10a 100644 --- a/server/controllers/events/testdata/test-repos/import-single-project-var/exp-output-autoplan.txt +++ b/server/controllers/events/testdata/test-repos/import-single-project-var/exp-output-autoplan.txt @@ -17,7 +17,6 @@ Terraform will perform the following actions: + dec = (known after apply) + hex = (known after apply) + id = (known after apply) - + keepers = {} } # random_id.for_each["default"] will be created @@ -28,7 +27,6 @@ Terraform will perform the following actions: + dec = (known after apply) + hex = (known after apply) + id = (known after apply) - + keepers = {} } Plan: 2 to add, 0 to change, 0 to destroy. @@ -54,4 +52,4 @@ Plan: 2 to add, 0 to change, 0 to destroy. * :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: ```shell atlantis unlock - ``` \ No newline at end of file + ``` diff --git a/server/controllers/events/testdata/test-repos/import-single-project-var/main.tf b/server/controllers/events/testdata/test-repos/import-single-project-var/main.tf index f7bf7839d0..082a9534c0 100644 --- a/server/controllers/events/testdata/test-repos/import-single-project-var/main.tf +++ b/server/controllers/events/testdata/test-repos/import-single-project-var/main.tf @@ -1,12 +1,10 @@ resource "random_id" "for_each" { for_each = toset([var.var]) - keepers = {} byte_length = 1 } resource "random_id" "count" { count = 1 - keepers = {} byte_length = 1 } diff --git a/server/controllers/events/testdata/test-repos/import-single-project/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/import-single-project/exp-output-autoplan.txt index 45007d2b8f..9c9fa29474 100644 --- a/server/controllers/events/testdata/test-repos/import-single-project/exp-output-autoplan.txt +++ b/server/controllers/events/testdata/test-repos/import-single-project/exp-output-autoplan.txt @@ -17,7 +17,6 @@ Terraform will perform the following actions: + dec = (known after apply) + hex = (known after apply) + id = (known after apply) - + keepers = {} } # random_id.dummy2 will be created @@ -28,7 +27,6 @@ Terraform will perform the following actions: + dec = (known after apply) + hex = (known after apply) + id = (known after apply) - + keepers = {} } Plan: 2 to add, 0 to change, 0 to destroy. @@ -54,4 +52,4 @@ Plan: 2 to add, 0 to change, 0 to destroy. * :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: ```shell atlantis unlock - ``` \ No newline at end of file + ``` diff --git a/server/controllers/events/testdata/test-repos/import-single-project/main.tf b/server/controllers/events/testdata/test-repos/import-single-project/main.tf index 2e60a118f5..0a1884fe5e 100644 --- a/server/controllers/events/testdata/test-repos/import-single-project/main.tf +++ b/server/controllers/events/testdata/test-repos/import-single-project/main.tf @@ -1,9 +1,7 @@ resource "random_id" "dummy1" { - keepers = {} byte_length = 1 } resource "random_id" "dummy2" { - keepers = {} byte_length = 1 } diff --git a/server/controllers/events/testdata/test-repos/import-workspace/dir1/main.tf b/server/controllers/events/testdata/test-repos/import-workspace/dir1/main.tf index de0cb8d4a2..0bc18fe1e3 100644 --- a/server/controllers/events/testdata/test-repos/import-workspace/dir1/main.tf +++ b/server/controllers/events/testdata/test-repos/import-workspace/dir1/main.tf @@ -1,14 +1,12 @@ resource "random_id" "dummy1" { count = terraform.workspace == "ops" ? 1 : 0 - keepers = {} byte_length = 1 } resource "random_id" "dummy2" { count = terraform.workspace == "ops" ? 1 : 0 - keepers = {} byte_length = 1 } diff --git a/server/controllers/events/testdata/test-repos/state-rm-multiple-project/dir1/main.tf b/server/controllers/events/testdata/test-repos/state-rm-multiple-project/dir1/main.tf index 1af2266d40..0c4b79e3f8 100644 --- a/server/controllers/events/testdata/test-repos/state-rm-multiple-project/dir1/main.tf +++ b/server/controllers/events/testdata/test-repos/state-rm-multiple-project/dir1/main.tf @@ -1,4 +1,3 @@ resource "random_id" "dummy" { - keepers = {} byte_length = 1 } diff --git a/server/controllers/events/testdata/test-repos/state-rm-multiple-project/dir2/main.tf b/server/controllers/events/testdata/test-repos/state-rm-multiple-project/dir2/main.tf index 1af2266d40..0c4b79e3f8 100644 --- a/server/controllers/events/testdata/test-repos/state-rm-multiple-project/dir2/main.tf +++ b/server/controllers/events/testdata/test-repos/state-rm-multiple-project/dir2/main.tf @@ -1,4 +1,3 @@ resource "random_id" "dummy" { - keepers = {} byte_length = 1 } diff --git a/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-autoplan.txt index fe62683dad..1de0174378 100644 --- a/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-autoplan.txt +++ b/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-autoplan.txt @@ -22,7 +22,6 @@ Terraform will perform the following actions: + dec = (known after apply) + hex = (known after apply) + id = (known after apply) - + keepers = {} } Plan: 1 to add, 0 to change, 0 to destroy. @@ -59,7 +58,6 @@ Terraform will perform the following actions: + dec = (known after apply) + hex = (known after apply) + id = (known after apply) - + keepers = {} } Plan: 1 to add, 0 to change, 0 to destroy. @@ -89,4 +87,4 @@ Plan: 1 to add, 0 to change, 0 to destroy. * :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: ```shell atlantis unlock - ``` \ No newline at end of file + ``` diff --git a/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-plan-again.txt b/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-plan-again.txt index fe62683dad..1de0174378 100644 --- a/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-plan-again.txt +++ b/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-plan-again.txt @@ -22,7 +22,6 @@ Terraform will perform the following actions: + dec = (known after apply) + hex = (known after apply) + id = (known after apply) - + keepers = {} } Plan: 1 to add, 0 to change, 0 to destroy. @@ -59,7 +58,6 @@ Terraform will perform the following actions: + dec = (known after apply) + hex = (known after apply) + id = (known after apply) - + keepers = {} } Plan: 1 to add, 0 to change, 0 to destroy. @@ -89,4 +87,4 @@ Plan: 1 to add, 0 to change, 0 to destroy. * :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: ```shell atlantis unlock - ``` \ No newline at end of file + ``` diff --git a/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-autoplan.txt index 530e9df755..3728b1b223 100644 --- a/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-autoplan.txt +++ b/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-autoplan.txt @@ -17,7 +17,6 @@ Terraform will perform the following actions: + dec = (known after apply) + hex = (known after apply) + id = (known after apply) - + keepers = {} } # random_id.for_each["default"] will be created @@ -28,7 +27,6 @@ Terraform will perform the following actions: + dec = (known after apply) + hex = (known after apply) + id = (known after apply) - + keepers = {} } # random_id.simple will be created @@ -39,7 +37,6 @@ Terraform will perform the following actions: + dec = (known after apply) + hex = (known after apply) + id = (known after apply) - + keepers = {} } Plan: 3 to add, 0 to change, 0 to destroy. @@ -65,4 +62,4 @@ Plan: 3 to add, 0 to change, 0 to destroy. * :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: ```shell atlantis unlock - ``` \ No newline at end of file + ``` diff --git a/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-plan-again.txt b/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-plan-again.txt index 548bf843a2..288ee1df89 100644 --- a/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-plan-again.txt +++ b/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-plan-again.txt @@ -17,7 +17,6 @@ Terraform will perform the following actions: + dec = (known after apply) + hex = (known after apply) + id = (known after apply) - + keepers = {} } # random_id.for_each["overridden"] will be created @@ -28,7 +27,6 @@ Terraform will perform the following actions: + dec = (known after apply) + hex = (known after apply) + id = (known after apply) - + keepers = {} } # random_id.simple will be created @@ -39,7 +37,6 @@ Terraform will perform the following actions: + dec = (known after apply) + hex = (known after apply) + id = (known after apply) - + keepers = {} } Plan: 3 to add, 0 to change, 0 to destroy. @@ -65,4 +62,4 @@ Plan: 3 to add, 0 to change, 0 to destroy. * :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: ```shell atlantis unlock - ``` \ No newline at end of file + ``` diff --git a/server/controllers/events/testdata/test-repos/state-rm-single-project/main.tf b/server/controllers/events/testdata/test-repos/state-rm-single-project/main.tf index d434ac8645..05e52a00b2 100644 --- a/server/controllers/events/testdata/test-repos/state-rm-single-project/main.tf +++ b/server/controllers/events/testdata/test-repos/state-rm-single-project/main.tf @@ -1,17 +1,14 @@ resource "random_id" "simple" { - keepers = {} byte_length = 1 } resource "random_id" "for_each" { for_each = toset([var.var]) - keepers = {} byte_length = 1 } resource "random_id" "count" { count = 1 - keepers = {} byte_length = 1 } diff --git a/server/controllers/events/testdata/test-repos/state-rm-workspace/dir1/main.tf b/server/controllers/events/testdata/test-repos/state-rm-workspace/dir1/main.tf index 353cb66e31..3056320d04 100644 --- a/server/controllers/events/testdata/test-repos/state-rm-workspace/dir1/main.tf +++ b/server/controllers/events/testdata/test-repos/state-rm-workspace/dir1/main.tf @@ -1,7 +1,6 @@ resource "random_id" "dummy1" { count = terraform.workspace == "ops" ? 1 : 0 - keepers = {} byte_length = 1 } diff --git a/server/controllers/events/testdata/test-repos/state-rm-workspace/exp-output-plan-again.txt b/server/controllers/events/testdata/test-repos/state-rm-workspace/exp-output-plan-again.txt index e1ea612f2f..632b3cf24c 100644 --- a/server/controllers/events/testdata/test-repos/state-rm-workspace/exp-output-plan-again.txt +++ b/server/controllers/events/testdata/test-repos/state-rm-workspace/exp-output-plan-again.txt @@ -17,7 +17,6 @@ Terraform will perform the following actions: + dec = (known after apply) + hex = (known after apply) + id = (known after apply) - + keepers = {} } Plan: 1 to add, 0 to change, 0 to destroy. @@ -43,4 +42,4 @@ Plan: 1 to add, 0 to change, 0 to destroy. * :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: ```shell atlantis unlock - ``` \ No newline at end of file + ``` diff --git a/server/controllers/web_templates/mocks/mock_template_writer.go b/server/controllers/web_templates/mocks/mock_template_writer.go index e3fafa580c..5d3e33a2ef 100644 --- a/server/controllers/web_templates/mocks/mock_template_writer.go +++ b/server/controllers/web_templates/mocks/mock_template_writer.go @@ -1,5 +1,5 @@ // Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/controllers/templates (interfaces: TemplateWriter) +// Source: github.com/runatlantis/atlantis/server/controllers/web_templates (interfaces: TemplateWriter) package mocks diff --git a/server/controllers/web_templates/templates/github-app.html.tmpl b/server/controllers/web_templates/templates/github-app.html.tmpl new file mode 100644 index 0000000000..34ce01550d --- /dev/null +++ b/server/controllers/web_templates/templates/github-app.html.tmpl @@ -0,0 +1,81 @@ + + + + + atlantis + + + + + + + + + + + +
+
+ +

atlantis

+ +

+ {{ if .Target }} + Create a github app + {{ else }} + Github app created successfully! + {{ end }} +

+
+
+ {{ if .Target }} +
+ + +
+ {{ else }} +

Visit {{ .URL }}/installations/new to install the app for your user or organization, then update the following values in your config and restart Atlantis:

+ +
    +
  • gh-app-id:
    {{ .ID }}
  • +
  • gh-app-key-file:
    {{ .Key }}
  • +
  • gh-webhook-secret:
    {{ .WebhookSecret }}
  • +
+ {{ end }} +
+
+ + diff --git a/server/controllers/web_templates/templates/index.html.tmpl b/server/controllers/web_templates/templates/index.html.tmpl new file mode 100644 index 0000000000..b9021f9b61 --- /dev/null +++ b/server/controllers/web_templates/templates/index.html.tmpl @@ -0,0 +1,243 @@ + + + + + atlantis + + + + + + + + + + + +
+
+ +

atlantis

+

Plan discarded and unlocked!

+
+
+ {{ if .ApplyLock.GlobalApplyLockEnabled }} + {{ if .ApplyLock.Locked }} +
+
Apply commands are disabled globally
+
Lock Status: Active
+
Active Since: {{ .ApplyLock.TimeFormatted }}
+ Enable Apply Commands +
+ {{ else }} +
+
Apply commands are enabled
+ Disable Apply Commands +
+ {{ end }} + {{ end }} +
+
+
+
+
+

Locks

+ {{ $basePath := .CleanedBasePath }} + {{ if .Locks }} +
+
+ Repository + Project + Workspace + Locked By + Date/Time + Status +
+ {{ range .Locks }} + + {{ end }} +
+ {{ else }} +

No locks found.

+ {{ end }} +
+
+
+
+
+

Jobs

+ {{ if .PullToJobMapping }} +
+
+ Repository + Project + Workspace + Date/Time + Step + Description +
+ {{ range .PullToJobMapping }} +
+ {{ .Pull.RepoFullName }} #{{ .Pull.PullNum }} + {{ if .Pull.Path }}{{ .Pull.Path }}{{ end }} + {{ if .Pull.Workspace }}{{ .Pull.Workspace }}{{ end }} + + {{ range .JobIDInfos }} +
{{ .TimeFormatted }}
+ {{ end }} +
+ + {{ range .JobIDInfos }} + + {{ end }} + + + {{ range .JobIDInfos }} +
{{ .JobDescription }}
+ {{ end }} +
+
+ {{ end }} +
+ {{ else }} +

No jobs found.

+ {{ end }} +
+ + +
+
+{{ .AtlantisVersion }} +
+ + + diff --git a/server/controllers/web_templates/templates/lock.html.tmpl b/server/controllers/web_templates/templates/lock.html.tmpl new file mode 100644 index 0000000000..56bf25a06b --- /dev/null +++ b/server/controllers/web_templates/templates/lock.html.tmpl @@ -0,0 +1,97 @@ + + + + + atlantis + + + + + + + + + + +
+
+ +

atlantis

+

{{.LockKey}} Locked

+
+ +
+
+
+
Repo Owner:
{{.RepoOwner}}
+
Repo Name:
{{.RepoName}}
+
Pull Request Link:
+
Locked By:
{{.LockedBy}}
+
Workspace:
{{.Workspace}}
+
+
+ Discard Plan & Unlock +
+
+ +
+v{{ .AtlantisVersion }} +
+ + + \ No newline at end of file diff --git a/server/controllers/web_templates/templates/project-jobs-error.html.tmpl b/server/controllers/web_templates/templates/project-jobs-error.html.tmpl new file mode 100644 index 0000000000..8eead799b7 --- /dev/null +++ b/server/controllers/web_templates/templates/project-jobs-error.html.tmpl @@ -0,0 +1,59 @@ + + + + + atlantis + + + + + + + + + + + +
+
+ +

atlantis

+

+
+
+
+
+
+
+
+
+
+ + + + + + + + + diff --git a/server/controllers/web_templates/templates/project-jobs.html.tmpl b/server/controllers/web_templates/templates/project-jobs.html.tmpl new file mode 100644 index 0000000000..aaeb222568 --- /dev/null +++ b/server/controllers/web_templates/templates/project-jobs.html.tmpl @@ -0,0 +1,95 @@ + + + + + atlantis + + + + + + + + + + + +
+ +

atlantis

+

+
+
+
+
+ +
Initializing... +
+ + + + + + + + + + + diff --git a/server/controllers/web_templates/web_templates.go b/server/controllers/web_templates/web_templates.go index c3be8e4daa..0794c80fba 100644 --- a/server/controllers/web_templates/web_templates.go +++ b/server/controllers/web_templates/web_templates.go @@ -14,15 +14,31 @@ package web_templates import ( + "embed" "html/template" "io" "time" + "github.com/Masterminds/sprig/v3" "github.com/runatlantis/atlantis/server/jobs" ) //go:generate pegomock generate --package mocks -o mocks/mock_template_writer.go TemplateWriter +//go:embed templates/* +var templatesFS embed.FS + +// Read all the templates from the embedded filesystem +var templates, _ = template.New("").Funcs(sprig.TxtFuncMap()).ParseFS(templatesFS, "templates/*.tmpl") + +var templateFileNames = map[string]string{ + "index": "index.html.tmpl", + "lock": "lock.html.tmpl", + "project-jobs": "project-jobs.html.tmpl", + "project-jobs-error": "project-jobs-error.html.tmpl", + "github-app": "github-app.html.tmpl", +} + // TemplateWriter is an interface over html/template that's used to enable // mocking. type TemplateWriter interface { @@ -64,251 +80,7 @@ type IndexData struct { CleanedBasePath string } -var IndexTemplate = template.Must(template.New("index.html.tmpl").Parse(` - - - - - atlantis - - - - - - - - - - - -
-
- -

atlantis

-

Plan discarded and unlocked!

-
-
- {{ if .ApplyLock.GlobalApplyLockEnabled }} - {{ if .ApplyLock.Locked }} -
-
Apply commands are disabled globally
-
Lock Status: Active
-
Active Since: {{ .ApplyLock.TimeFormatted }}
- Enable Apply Commands -
- {{ else }} -
-
Apply commands are enabled
- Disable Apply Commands -
- {{ end }} - {{ end }} -
-
-
-
-
-

Locks

- {{ $basePath := .CleanedBasePath }} - {{ if .Locks }} -
-
- Repository - Project - Workspace - Locked By - Date/Time - Status -
- {{ range .Locks }} - - {{ end }} -
- {{ else }} -

No locks found.

- {{ end }} -
-
-
-
-
-

Jobs

- {{ if .PullToJobMapping }} -
-
- Repository - Project - Workspace - Date/Time - Step - Description -
- {{ range .PullToJobMapping }} -
- {{ .Pull.RepoFullName }} #{{ .Pull.PullNum }} - {{ if .Pull.Path }}{{ .Pull.Path }}{{ end }} - {{ if .Pull.Workspace }}{{ .Pull.Workspace }}{{ end }} - - {{ range .JobIDInfos }} -
{{ .TimeFormatted }}
- {{ end }} -
- - {{ range .JobIDInfos }} - - {{ end }} - - - {{ range .JobIDInfos }} -
{{ .JobDescription }}
- {{ end }} -
-
- {{ end }} -
- {{ else }} -

No jobs found.

- {{ end }} -
- - -
-
-{{ .AtlantisVersion }} -
- - - -`)) +var IndexTemplate = templates.Lookup(templateFileNames["index"]) // LockDetailData holds the fields needed to display the lock detail view. type LockDetailData struct { @@ -326,105 +98,7 @@ type LockDetailData struct { CleanedBasePath string } -var LockTemplate = template.Must(template.New("lock.html.tmpl").Parse(` - - - - - atlantis - - - - - - - - - - -
-
- -

atlantis

-

{{.LockKey}} Locked

-
- -
-
-
-
Repo Owner:
{{.RepoOwner}}
-
Repo Name:
{{.RepoName}}
-
Pull Request Link:
-
Locked By:
{{.LockedBy}}
-
Workspace:
{{.Workspace}}
-
-
- Discard Plan & Unlock -
-
- -
-v{{ .AtlantisVersion }} -
- - - -`)) +var LockTemplate = templates.Lookup(templateFileNames["lock"]) // ProjectJobData holds the data needed to stream the current PR information type ProjectJobData struct { @@ -433,103 +107,7 @@ type ProjectJobData struct { CleanedBasePath string } -var ProjectJobsTemplate = template.Must(template.New("blank.html.tmpl").Parse(` - - - - - atlantis - - - - - - - - - - - -
- -

atlantis

-

-
-
-
-
- -
Initializing... -
- - - - - - - - - - - -`)) +var ProjectJobsTemplate = templates.Lookup(templateFileNames["project-jobs"]) type ProjectJobsError struct { AtlantisVersion string @@ -537,67 +115,7 @@ type ProjectJobsError struct { CleanedBasePath string } -var ProjectJobsErrorTemplate = template.Must(template.New("blank.html.tmpl").Parse(` - - - - - atlantis - - - - - - - - - - - -
-
- -

atlantis

-

-
-
-
-
-
-
-
-
-
- - - - - - - - - -`)) +var ProjectJobsErrorTemplate = templates.Lookup(templateFileNames["project-jobs-error"]) // GithubSetupData holds the data for rendering the github app setup page type GithubSetupData struct { @@ -610,86 +128,4 @@ type GithubSetupData struct { CleanedBasePath string } -var GithubAppSetupTemplate = template.Must(template.New("github-app.html.tmpl").Parse(` - - - - - atlantis - - - - - - - - - - - -
-
- -

atlantis

- -

- {{ if .Target }} - Create a github app - {{ else }} - Github app created successfully! - {{ end }} -

-
-
- {{ if .Target }} -
- - -
- {{ else }} -

Visit {{ .URL }}/installations/new to install the app for your user or organization, then update the following values in your config and restart Atlantis:

- -
    -
  • gh-app-id:
    {{ .ID }}
  • -
  • gh-app-key-file:
    {{ .Key }}
  • -
  • gh-webhook-secret:
    {{ .WebhookSecret }}
  • -
- {{ end }} -
-
- - -`)) +var GithubAppSetupTemplate = templates.Lookup(templateFileNames["github-app"]) diff --git a/server/controllers/web_templates/web_templates_test.go b/server/controllers/web_templates/web_templates_test.go index 22fd4e90fe..0ce6f00a9a 100644 --- a/server/controllers/web_templates/web_templates_test.go +++ b/server/controllers/web_templates/web_templates_test.go @@ -19,13 +19,13 @@ func TestIndexTemplate(t *testing.T) { Path: "path", Workspace: "workspace", Time: time.Now(), - TimeFormatted: "02-01-2006 15:04:05", + TimeFormatted: "2006-01-02 15:04:05", }, }, ApplyLock: ApplyLockData{ Locked: true, Time: time.Now(), - TimeFormatted: "02-01-2006 15:04:05", + TimeFormatted: "2006-01-02 15:04:05", }, AtlantisVersion: "v0.0.0", CleanedBasePath: "/path", diff --git a/server/core/redis/redis.go b/server/core/redis/redis.go index 030cd15b82..2afe336ce4 100644 --- a/server/core/redis/redis.go +++ b/server/core/redis/redis.go @@ -238,7 +238,7 @@ func (r *RedisDB) CheckCommandLock(cmdName command.Name) (*command.Lock, error) return &cmdLock, err } -// UpdatePullWithResults updates pull's status with the latest project results. +// UpdateProjectStatus updates pull's status with the latest project results. // It returns the new PullStatus object. func (r *RedisDB) UpdateProjectStatus(pull models.PullRequest, workspace string, repoRelDir string, newStatus models.ProjectPlanStatus) error { key, err := r.pullKey(pull) diff --git a/server/core/runtime/apply_step_runner.go b/server/core/runtime/apply_step_runner.go index eb1633eea0..2e223f2996 100644 --- a/server/core/runtime/apply_step_runner.go +++ b/server/core/runtime/apply_step_runner.go @@ -12,6 +12,7 @@ import ( version "github.com/hashicorp/go-version" "github.com/runatlantis/atlantis/server/events/command" "github.com/runatlantis/atlantis/server/events/models" + "github.com/runatlantis/atlantis/server/utils" ) // ApplyStepRunner runs `terraform apply`. @@ -56,7 +57,7 @@ func (a *ApplyStepRunner) Run(ctx command.ProjectContext, extraArgs []string, pa // If the apply was successful, delete the plan. if err == nil { ctx.Log.Info("apply successful, deleting planfile") - if removeErr := os.Remove(planPath); removeErr != nil { + if removeErr := utils.RemoveIgnoreNonExistent(planPath); removeErr != nil { ctx.Log.Warn("failed to delete planfile after successful apply: %s", removeErr) } } @@ -116,7 +117,6 @@ func (a *ApplyStepRunner) runRemoteApply( absPlanPath string, tfVersion *version.Version, envs map[string]string) (string, error) { - // The planfile contents are needed to ensure that the plan didn't change // between plan and apply phases. planfileBytes, err := os.ReadFile(absPlanPath) diff --git a/server/core/runtime/import_step_runner.go b/server/core/runtime/import_step_runner.go index 2f4cb8c51c..0d5787a8ad 100644 --- a/server/core/runtime/import_step_runner.go +++ b/server/core/runtime/import_step_runner.go @@ -6,6 +6,7 @@ import ( version "github.com/hashicorp/go-version" "github.com/runatlantis/atlantis/server/events/command" + "github.com/runatlantis/atlantis/server/utils" ) type importStepRunner struct { @@ -37,7 +38,7 @@ func (p *importStepRunner) Run(ctx command.ProjectContext, extraArgs []string, p if err == nil { if _, planPathErr := os.Stat(planPath); !os.IsNotExist(planPathErr) { ctx.Log.Info("import successful, deleting planfile") - if removeErr := os.Remove(planPath); removeErr != nil { + if removeErr := utils.RemoveIgnoreNonExistent(planPath); removeErr != nil { ctx.Log.Warn("failed to delete planfile after successful import: %s", removeErr) } } diff --git a/server/core/runtime/init_step_runner.go b/server/core/runtime/init_step_runner.go index cd3ab32810..0c6de1b013 100644 --- a/server/core/runtime/init_step_runner.go +++ b/server/core/runtime/init_step_runner.go @@ -1,12 +1,12 @@ package runtime import ( - "os" "path/filepath" version "github.com/hashicorp/go-version" "github.com/runatlantis/atlantis/server/core/runtime/common" "github.com/runatlantis/atlantis/server/events/command" + "github.com/runatlantis/atlantis/server/utils" ) // InitStep runs `terraform init`. @@ -21,14 +21,13 @@ func (i *InitStepRunner) Run(ctx command.ProjectContext, extraArgs []string, pat terraformLockFileTracked, err := common.IsFileTracked(path, lockFileName) if err != nil { ctx.Log.Warn("Error checking if %s is tracked in %s", lockFileName, path) - } // If .terraform.lock.hcl is not tracked in git and it exists prior to init // delete it as it probably has been created by a previous run of // terraform init if common.FileExists(terraformLockfilePath) && !terraformLockFileTracked { ctx.Log.Debug("Deleting `%s` that was generated by previous terraform init", terraformLockfilePath) - delErr := os.Remove(terraformLockfilePath) + delErr := utils.RemoveIgnoreNonExistent(terraformLockfilePath) if delErr != nil { ctx.Log.Info("Error Deleting `%s`", lockFileName) } diff --git a/server/core/runtime/mocks/mock_pull_approved_checker.go b/server/core/runtime/mocks/mock_pull_approved_checker.go index 13e1a3a834..fc43172cee 100644 --- a/server/core/runtime/mocks/mock_pull_approved_checker.go +++ b/server/core/runtime/mocks/mock_pull_approved_checker.go @@ -6,6 +6,7 @@ package mocks import ( pegomock "github.com/petergtz/pegomock/v4" models "github.com/runatlantis/atlantis/server/events/models" + logging "github.com/runatlantis/atlantis/server/logging" "reflect" "time" ) @@ -25,11 +26,11 @@ func NewMockPullApprovedChecker(options ...pegomock.Option) *MockPullApprovedChe func (mock *MockPullApprovedChecker) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } func (mock *MockPullApprovedChecker) FailHandler() pegomock.FailHandler { return mock.fail } -func (mock *MockPullApprovedChecker) PullIsApproved(baseRepo models.Repo, pull models.PullRequest) (models.ApprovalStatus, error) { +func (mock *MockPullApprovedChecker) PullIsApproved(logger logging.SimpleLogging, baseRepo models.Repo, pull models.PullRequest) (models.ApprovalStatus, error) { if mock == nil { panic("mock must not be nil. Use myMock := NewMockPullApprovedChecker().") } - params := []pegomock.Param{baseRepo, pull} + params := []pegomock.Param{logger, baseRepo, pull} result := pegomock.GetGenericMockFrom(mock).Invoke("PullIsApproved", params, []reflect.Type{reflect.TypeOf((*models.ApprovalStatus)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) var ret0 models.ApprovalStatus var ret1 error @@ -81,8 +82,8 @@ type VerifierMockPullApprovedChecker struct { timeout time.Duration } -func (verifier *VerifierMockPullApprovedChecker) PullIsApproved(baseRepo models.Repo, pull models.PullRequest) *MockPullApprovedChecker_PullIsApproved_OngoingVerification { - params := []pegomock.Param{baseRepo, pull} +func (verifier *VerifierMockPullApprovedChecker) PullIsApproved(logger logging.SimpleLogging, baseRepo models.Repo, pull models.PullRequest) *MockPullApprovedChecker_PullIsApproved_OngoingVerification { + params := []pegomock.Param{logger, baseRepo, pull} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "PullIsApproved", params, verifier.timeout) return &MockPullApprovedChecker_PullIsApproved_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } @@ -92,21 +93,25 @@ type MockPullApprovedChecker_PullIsApproved_OngoingVerification struct { methodInvocations []pegomock.MethodInvocation } -func (c *MockPullApprovedChecker_PullIsApproved_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest) { - baseRepo, pull := c.GetAllCapturedArguments() - return baseRepo[len(baseRepo)-1], pull[len(pull)-1] +func (c *MockPullApprovedChecker_PullIsApproved_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, models.Repo, models.PullRequest) { + logger, baseRepo, pull := c.GetAllCapturedArguments() + return logger[len(logger)-1], baseRepo[len(baseRepo)-1], pull[len(pull)-1] } -func (c *MockPullApprovedChecker_PullIsApproved_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest) { +func (c *MockPullApprovedChecker_PullIsApproved_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []models.Repo, _param2 []models.PullRequest) { params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) if len(params) > 0 { - _param0 = make([]models.Repo, len(c.methodInvocations)) + _param0 = make([]logging.SimpleLogging, len(c.methodInvocations)) for u, param := range params[0] { - _param0[u] = param.(models.Repo) + _param0[u] = param.(logging.SimpleLogging) } - _param1 = make([]models.PullRequest, len(c.methodInvocations)) + _param1 = make([]models.Repo, len(c.methodInvocations)) for u, param := range params[1] { - _param1[u] = param.(models.PullRequest) + _param1[u] = param.(models.Repo) + } + _param2 = make([]models.PullRequest, len(c.methodInvocations)) + for u, param := range params[2] { + _param2[u] = param.(models.PullRequest) } } return diff --git a/server/core/runtime/state_rm_step_runner.go b/server/core/runtime/state_rm_step_runner.go index 74a0d18875..3b4a08f102 100644 --- a/server/core/runtime/state_rm_step_runner.go +++ b/server/core/runtime/state_rm_step_runner.go @@ -6,6 +6,7 @@ import ( version "github.com/hashicorp/go-version" "github.com/runatlantis/atlantis/server/events/command" + "github.com/runatlantis/atlantis/server/utils" ) type stateRmStepRunner struct { @@ -37,7 +38,7 @@ func (p *stateRmStepRunner) Run(ctx command.ProjectContext, extraArgs []string, if err == nil { if _, planPathErr := os.Stat(planPath); !os.IsNotExist(planPathErr) { ctx.Log.Info("state rm successful, deleting planfile") - if removeErr := os.Remove(planPath); removeErr != nil { + if removeErr := utils.RemoveIgnoreNonExistent(planPath); removeErr != nil { ctx.Log.Warn("failed to delete planfile after successful state rm: %s", removeErr) } } diff --git a/server/core/terraform/terraform_client.go b/server/core/terraform/terraform_client.go index 7ca6fc7277..89c02c8634 100644 --- a/server/core/terraform/terraform_client.go +++ b/server/core/terraform/terraform_client.go @@ -338,6 +338,11 @@ func (c *DefaultClient) DetectVersion(log logging.SimpleLogging, projectDirector } constraint, _ := version.NewConstraint(requiredVersionSetting) + // Since terraform version 1.8.2, terraform is not a single file download anymore and + // Atlantis fails to download version 1.8.2 and higher. So, as a short-term fix, + // we need to block any version higher than 1.8.1 until proper solution is implemented. + // More details on the issue here - https://github.com/runatlantis/atlantis/issues/4471 + highestSupportedConstraint, _ := version.NewConstraint("<= 1.8.1") versions := make([]*version.Version, len(tfVersions)) for i, tfvals := range tfVersions { @@ -355,7 +360,7 @@ func (c *DefaultClient) DetectVersion(log logging.SimpleLogging, projectDirector sort.Sort(sort.Reverse(version.Collection(versions))) for _, element := range versions { - if constraint.Check(element) { // Validate a version against a constraint + if constraint.Check(element) && highestSupportedConstraint.Check(element) { // Validate a version against a constraint tfversionStr := element.String() if lib.ValidVersionFormat(tfversionStr) { //check if version format is correct tfversion, _ := version.NewVersion(tfversionStr) diff --git a/server/core/terraform/terraform_client_test.go b/server/core/terraform/terraform_client_test.go index 29fccb4579..cfa0f60fb3 100644 --- a/server/core/terraform/terraform_client_test.go +++ b/server/core/terraform/terraform_client_test.go @@ -393,6 +393,12 @@ terraform { // cannot use ~> 1.3 or ~> 1.0 since that is a moving target since it will always // resolve to the latest terraform 1.x "~> 1.3.0": "1.3.10", + // Since terraform version 1.8.2, terraform is not a single file download anymore and + // Atlantis fails to download version 1.8.2 and higher. So, as a short-term fix, + // we need to block any version higher than 1.8.1 until proper solution is implemented. + // More details on the issue here - https://github.com/runatlantis/atlantis/issues/4471 + ">= 1.3.0": "1.8.1", + ">= 1.8.2": "", } type testCase struct { diff --git a/server/events/command/context.go b/server/events/command/context.go index 1d6748915c..623c49588a 100644 --- a/server/events/command/context.go +++ b/server/events/command/context.go @@ -43,4 +43,7 @@ type Context struct { ClearPolicyApproval bool Trigger Trigger + + // API is true if plan/apply by API endpoints + API bool } diff --git a/server/events/event_parser.go b/server/events/event_parser.go index 54abcebb26..8a02f476e1 100644 --- a/server/events/event_parser.go +++ b/server/events/event_parser.go @@ -1088,7 +1088,7 @@ func (e *EventParser) ParseGiteaPullRequestEvent(event giteasdk.PullRequest) (mo return pull, pullEventType, baseRepo, headRepo, user, nil } -// ParseGithubPull parses the response from the GitHub API endpoint (not +// ParseGiteaPull parses the response from the Gitea API endpoint (not // from a webhook) that returns a pull request. // See EventParsing for return value docs. func (e *EventParser) ParseGiteaPull(pull *giteasdk.PullRequest) (pullModel models.PullRequest, baseRepo models.Repo, headRepo models.Repo, err error) { diff --git a/server/events/markdown_renderer_test.go b/server/events/markdown_renderer_test.go index 92887db5d1..ace23c443a 100644 --- a/server/events/markdown_renderer_test.go +++ b/server/events/markdown_renderer_test.go @@ -488,7 +488,6 @@ $$$ $$$shell atlantis plan -d path -w workspace $$$ - $$$ policy set: policy1: 2 tests, 1 passed, 0 warnings, 1 failure, 0 exceptions $$$ diff --git a/server/events/mocks/mock_event_parsing.go b/server/events/mocks/mock_event_parsing.go index ad7a75c252..505fadb5fa 100644 --- a/server/events/mocks/mock_event_parsing.go +++ b/server/events/mocks/mock_event_parsing.go @@ -5,11 +5,11 @@ package mocks import ( gitea "code.gitea.io/sdk/gitea" - gitea0 "github.com/runatlantis/atlantis/server/events/vcs/gitea" github "github.com/google/go-github/v59/github" azuredevops "github.com/mcdafydd/go-azuredevops/azuredevops" pegomock "github.com/petergtz/pegomock/v4" models "github.com/runatlantis/atlantis/server/events/models" + gitea0 "github.com/runatlantis/atlantis/server/events/vcs/gitea" logging "github.com/runatlantis/atlantis/server/logging" go_gitlab "github.com/xanzy/go-gitlab" "reflect" diff --git a/server/events/models/models.go b/server/events/models/models.go index 4ff5bc339d..a23410a69b 100644 --- a/server/events/models/models.go +++ b/server/events/models/models.go @@ -653,6 +653,8 @@ type WorkflowHookCommandContext struct { // Workspace is the Terraform workspace this project is in. It will always // be set. Workspace string + // API is true if plan/apply by API endpoints + API bool } // PlanSuccessStats holds stats for a plan. diff --git a/server/events/pending_plan_finder.go b/server/events/pending_plan_finder.go index 72a4f2742f..9a26866b1a 100644 --- a/server/events/pending_plan_finder.go +++ b/server/events/pending_plan_finder.go @@ -8,6 +8,7 @@ import ( "github.com/pkg/errors" "github.com/runatlantis/atlantis/server/core/runtime" + "github.com/runatlantis/atlantis/server/utils" ) //go:generate pegomock generate --package mocks -o mocks/mock_pending_plan_finder.go PendingPlanFinder @@ -92,7 +93,7 @@ func (p *DefaultPendingPlanFinder) DeletePlans(pullDir string) error { return err } for _, path := range absPaths { - if err := os.Remove(path); err != nil { + if err := utils.RemoveIgnoreNonExistent(path); err != nil { return errors.Wrapf(err, "delete plan at %s", path) } } diff --git a/server/events/post_workflow_hooks_command_runner.go b/server/events/post_workflow_hooks_command_runner.go index e1f7f10a9d..f5fe0c5245 100644 --- a/server/events/post_workflow_hooks_command_runner.go +++ b/server/events/post_workflow_hooks_command_runner.go @@ -79,6 +79,7 @@ func (w *DefaultPostWorkflowHooksCommandRunner) RunPostHooks(ctx *command.Contex Verbose: false, EscapedCommentArgs: escapedArgs, CommandName: cmd.Name.String(), + API: ctx.API, }, postWorkflowHooks, repoDir) @@ -124,7 +125,7 @@ func (w *DefaultPostWorkflowHooksCommandRunner) runHooks( shellArgs = "-c" } url, err := w.Router.GenerateProjectWorkflowHookURL(ctx.HookID) - if err != nil { + if err != nil && !ctx.API { return err } diff --git a/server/events/pre_workflow_hooks_command_runner.go b/server/events/pre_workflow_hooks_command_runner.go index 0e81f15ab9..70462765a3 100644 --- a/server/events/pre_workflow_hooks_command_runner.go +++ b/server/events/pre_workflow_hooks_command_runner.go @@ -91,6 +91,7 @@ func (w *DefaultPreWorkflowHooksCommandRunner) RunPreHooks(ctx *command.Context, Verbose: false, EscapedCommentArgs: escapedArgs, CommandName: cmd.Name.String(), + API: ctx.API, }, preWorkflowHooks, repoDir) @@ -135,13 +136,17 @@ func (w *DefaultPreWorkflowHooksCommandRunner) runHooks( shellArgs = "-c" } url, err := w.Router.GenerateProjectWorkflowHookURL(ctx.HookID) - if err != nil { + if err != nil && !ctx.API { return err } if err := w.CommitStatusUpdater.UpdatePreWorkflowHook(ctx.Log, ctx.Pull, models.PendingCommitStatus, ctx.HookDescription, "", url); err != nil { ctx.Log.Warn("unable to update pre workflow hook status: %s", err) - return err + ctx.Log.Info("is api? %v", ctx.API) + if !ctx.API { + ctx.Log.Info("is api? %v", ctx.API) + return err + } } _, runtimeDesc, err := w.PreWorkflowHookRunner.Run(ctx, hook.RunCommand, shell, shellArgs, repoDir) @@ -155,7 +160,9 @@ func (w *DefaultPreWorkflowHooksCommandRunner) runHooks( if err := w.CommitStatusUpdater.UpdatePreWorkflowHook(ctx.Log, ctx.Pull, models.SuccessCommitStatus, ctx.HookDescription, runtimeDesc, url); err != nil { ctx.Log.Warn("unable to update pre workflow hook status: %s", err) - return err + if !ctx.API { + return err + } } } diff --git a/server/events/project_command_builder.go b/server/events/project_command_builder.go index 41945ca170..d40ae25893 100644 --- a/server/events/project_command_builder.go +++ b/server/events/project_command_builder.go @@ -11,6 +11,7 @@ import ( "github.com/runatlantis/atlantis/server/core/config/valid" "github.com/runatlantis/atlantis/server/core/terraform" + "github.com/runatlantis/atlantis/server/logging" "github.com/runatlantis/atlantis/server/metrics" "github.com/pkg/errors" @@ -35,6 +36,7 @@ const ( ) func NewInstrumentedProjectCommandBuilder( + logger logging.SimpleLogging, policyChecksSupported bool, parserValidator *config.ParserValidator, projectFinder ProjectFinder, @@ -89,7 +91,8 @@ func NewInstrumentedProjectCommandBuilder( scope, terraformClient, ), - scope: scope, + Logger: logger, + scope: scope, } } diff --git a/server/events/vcs/azuredevops_client.go b/server/events/vcs/azuredevops_client.go index 03bc1963c0..cd2ebe52fe 100644 --- a/server/events/vcs/azuredevops_client.go +++ b/server/events/vcs/azuredevops_client.go @@ -316,7 +316,7 @@ func (g *AzureDevopsClient) MergePull(logger logging.SimpleLogging, pull models. return fmt.Errorf("the user %s is not found in the organization %s", g.UserName, owner) } - imageURL := "https://github.com/runatlantis/atlantis/raw/main/runatlantis.io/.vuepress/public/hero.png" + imageURL := "https://raw.githubusercontent.com/runatlantis/atlantis/main/runatlantis.io/public/hero.png" id := azuredevops.IdentityRef{ Descriptor: &descriptor, ID: userID, diff --git a/server/events/vcs/gitlab_client.go b/server/events/vcs/gitlab_client.go index c4cb837a4e..4003f33ca4 100644 --- a/server/events/vcs/gitlab_client.go +++ b/server/events/vcs/gitlab_client.go @@ -22,14 +22,13 @@ import ( "strings" "time" - "github.com/runatlantis/atlantis/server/events/command" - "github.com/runatlantis/atlantis/server/events/vcs/common" - version "github.com/hashicorp/go-version" + "github.com/jpillora/backoff" "github.com/pkg/errors" - "github.com/runatlantis/atlantis/server/logging" - + "github.com/runatlantis/atlantis/server/events/command" "github.com/runatlantis/atlantis/server/events/models" + "github.com/runatlantis/atlantis/server/events/vcs/common" + "github.com/runatlantis/atlantis/server/logging" gitlab "github.com/xanzy/go-gitlab" ) @@ -355,18 +354,28 @@ func (g *GitlabClient) PullIsMergeable(logger logging.SimpleLogging, repo models return false, err } + if supportsDetailedMergeStatus { + logger.Debug("Detailed merge status: '%s'", mr.DetailedMergeStatus) + } else { + logger.Debug("Merge status: '%s'", mr.MergeStatus) //nolint:staticcheck // Need to reference deprecated field for backwards compatibility + } + if ((supportsDetailedMergeStatus && (mr.DetailedMergeStatus == "mergeable" || mr.DetailedMergeStatus == "ci_still_running" || - mr.DetailedMergeStatus == "ci_must_pass")) || + mr.DetailedMergeStatus == "ci_must_pass" || + mr.DetailedMergeStatus == "need_rebase")) || (!supportsDetailedMergeStatus && mr.MergeStatus == "can_be_merged")) && //nolint:staticcheck // Need to reference deprecated field for backwards compatibility mr.ApprovalsBeforeMerge <= 0 && mr.BlockingDiscussionsResolved && !mr.WorkInProgress && (allowSkippedPipeline || !isPipelineSkipped) { + + logger.Debug("Merge request is mergeable") return true, nil } + logger.Debug("Merge request is not mergeable") return false, nil } @@ -429,17 +438,63 @@ func (g *GitlabClient) UpdateStatus(logger logging.SimpleLogging, repo models.Re } } - _, resp, err := g.Client.Commits.SetCommitStatus(repo.FullName, pull.HeadCommit, &gitlab.SetCommitStatusOptions{ - State: gitlabState, - Context: gitlab.Ptr(src), - Description: gitlab.Ptr(description), - TargetURL: &url, - Ref: gitlab.Ptr(refTarget), - }) - if resp != nil { - logger.Debug("POST /projects/%s/statuses/%s returned: %d", repo.FullName, pull.HeadCommit, resp.StatusCode) + var ( + resp *gitlab.Response + maxAttempts = 10 + b = &backoff.Backoff{Jitter: true} + ) + + for i := 0; i <= maxAttempts; i++ { + logger := logger.With( + "attempt", i+1, + "max_attempts", maxAttempts, + "repo", repo.FullName, + "commit", pull.HeadCommit, + "state", state.String(), + ) + + _, resp, err = g.Client.Commits.SetCommitStatus(repo.FullName, pull.HeadCommit, &gitlab.SetCommitStatusOptions{ + State: gitlabState, + Context: gitlab.Ptr(src), + Description: gitlab.Ptr(description), + TargetURL: &url, + Ref: gitlab.Ptr(refTarget), + }) + + if resp != nil { + logger.Debug("POST /projects/%s/statuses/%s returned: %d", repo.FullName, pull.HeadCommit, resp.StatusCode) + + // GitLab returns a `409 Conflict` status when the commit pipeline status is being changed/locked by another request, + // which is likely to happen if you use [`--parallel-pool-size > 1`] and [`parallel-plan|apply`]. + // + // The likelihood of this happening is increased when the number of parallel apply jobs is increased. + // + // Returning the [err] without retrying will permanently leave the GitLab commit status in a "running" state, + // which would prevent Atlantis from merging the merge request on [apply]. + // + // GitLab does not allow merge requests to be merged when the pipeline status is "running." + + if resp.StatusCode == http.StatusConflict { + sleep := b.ForAttempt(float64(i)) + + logger.With("retry_in", sleep).Warn("GitLab returned HTTP [409 Conflict] when updating commit status") + time.Sleep(sleep) + + continue + } + } + + // Log we got a 200 OK response from GitLab after at least one retry to help with debugging/understanding delays/errors. + if err == nil && i > 0 { + logger.Info("GitLab returned HTTP [200 OK] after updating commit status") + } + + // Return the err, which might be nil if everything worked out + return err } - return err + + // If we got here, we've exhausted all attempts to update the commit status and still failed, so return the error upstream + return errors.Wrap(err, fmt.Sprintf("failed to update commit status for '%s' @ '%s' to '%s' after %d attempts", repo.FullName, pull.HeadCommit, src, maxAttempts)) } func (g *GitlabClient) GetMergeRequest(logger logging.SimpleLogging, repoFullName string, pullNum int) (*gitlab.MergeRequest, error) { @@ -461,7 +516,7 @@ func (g *GitlabClient) WaitForSuccessPipeline(logger logging.SimpleLogging, ctx case <-ctx.Done(): // validation check time out cancel() - return //ctx.Err() + return // ctx.Err() default: mr, _ := g.GetMergeRequest(logger, pull.BaseRepo.FullName, pull.Num) diff --git a/server/events/vcs/gitlab_client_test.go b/server/events/vcs/gitlab_client_test.go index 5c463e85cf..3853698823 100644 --- a/server/events/vcs/gitlab_client_test.go +++ b/server/events/vcs/gitlab_client_test.go @@ -358,6 +358,7 @@ func TestGitlabClient_PullIsMergeable(t *testing.T) { noHeadPipelineMR := 2 ciMustPassSuccessMR := 3 ciMustPassFailureMR := 4 + needRebaseMR := 5 pipelineSuccess, err := os.ReadFile("testdata/gitlab-pipeline-success.json") Ok(t, err) @@ -368,6 +369,9 @@ func TestGitlabClient_PullIsMergeable(t *testing.T) { detailedMergeStatusCiMustPass, err := os.ReadFile("testdata/gitlab-detailed-merge-status-ci-must-pass.json") Ok(t, err) + detailedMergeStatusNeedRebase, err := os.ReadFile("testdata/gitlab-detailed-merge-status-need-rebase.json") + Ok(t, err) + headPipelineNotAvailable, err := os.ReadFile("testdata/gitlab-head-pipeline-not-available.json") Ok(t, err) @@ -427,6 +431,13 @@ func TestGitlabClient_PullIsMergeable(t *testing.T) { ciMustPassFailureMR, false, }, + { + fmt.Sprintf("%s/apply", vcsStatusName), + models.FailedCommitStatus, + gitlabServerVersions, + needRebaseMR, + true, + }, { fmt.Sprintf("%s/apply: resource/default", vcsStatusName), models.FailedCommitStatus, @@ -491,6 +502,9 @@ func TestGitlabClient_PullIsMergeable(t *testing.T) { case fmt.Sprintf("/api/v4/projects/runatlantis%%2Fatlantis/merge_requests/%v", ciMustPassFailureMR): w.WriteHeader(http.StatusOK) w.Write(detailedMergeStatusCiMustPass) // nolint: errcheck + case fmt.Sprintf("/api/v4/projects/runatlantis%%2Fatlantis/merge_requests/%v", needRebaseMR): + w.WriteHeader(http.StatusOK) + w.Write(detailedMergeStatusNeedRebase) // nolint: errcheck case fmt.Sprintf("/api/v4/projects/%v", projectID): w.WriteHeader(http.StatusOK) w.Write(projectSuccess) // nolint: errcheck diff --git a/server/events/vcs/mocks/mock_github_pull_request_getter.go b/server/events/vcs/mocks/mock_github_pull_request_getter.go index f31a9a7ca9..00b472db75 100644 --- a/server/events/vcs/mocks/mock_github_pull_request_getter.go +++ b/server/events/vcs/mocks/mock_github_pull_request_getter.go @@ -7,6 +7,7 @@ import ( github "github.com/google/go-github/v59/github" pegomock "github.com/petergtz/pegomock/v4" models "github.com/runatlantis/atlantis/server/events/models" + logging "github.com/runatlantis/atlantis/server/logging" "reflect" "time" ) @@ -26,11 +27,11 @@ func NewMockGithubPullRequestGetter(options ...pegomock.Option) *MockGithubPullR func (mock *MockGithubPullRequestGetter) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } func (mock *MockGithubPullRequestGetter) FailHandler() pegomock.FailHandler { return mock.fail } -func (mock *MockGithubPullRequestGetter) GetPullRequest(repo models.Repo, pullNum int) (*github.PullRequest, error) { +func (mock *MockGithubPullRequestGetter) GetPullRequest(logger logging.SimpleLogging, repo models.Repo, pullNum int) (*github.PullRequest, error) { if mock == nil { panic("mock must not be nil. Use myMock := NewMockGithubPullRequestGetter().") } - params := []pegomock.Param{repo, pullNum} + params := []pegomock.Param{logger, repo, pullNum} result := pegomock.GetGenericMockFrom(mock).Invoke("GetPullRequest", params, []reflect.Type{reflect.TypeOf((**github.PullRequest)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) var ret0 *github.PullRequest var ret1 error @@ -82,8 +83,8 @@ type VerifierMockGithubPullRequestGetter struct { timeout time.Duration } -func (verifier *VerifierMockGithubPullRequestGetter) GetPullRequest(repo models.Repo, pullNum int) *MockGithubPullRequestGetter_GetPullRequest_OngoingVerification { - params := []pegomock.Param{repo, pullNum} +func (verifier *VerifierMockGithubPullRequestGetter) GetPullRequest(logger logging.SimpleLogging, repo models.Repo, pullNum int) *MockGithubPullRequestGetter_GetPullRequest_OngoingVerification { + params := []pegomock.Param{logger, repo, pullNum} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "GetPullRequest", params, verifier.timeout) return &MockGithubPullRequestGetter_GetPullRequest_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } @@ -93,21 +94,25 @@ type MockGithubPullRequestGetter_GetPullRequest_OngoingVerification struct { methodInvocations []pegomock.MethodInvocation } -func (c *MockGithubPullRequestGetter_GetPullRequest_OngoingVerification) GetCapturedArguments() (models.Repo, int) { - repo, pullNum := c.GetAllCapturedArguments() - return repo[len(repo)-1], pullNum[len(pullNum)-1] +func (c *MockGithubPullRequestGetter_GetPullRequest_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, models.Repo, int) { + logger, repo, pullNum := c.GetAllCapturedArguments() + return logger[len(logger)-1], repo[len(repo)-1], pullNum[len(pullNum)-1] } -func (c *MockGithubPullRequestGetter_GetPullRequest_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []int) { +func (c *MockGithubPullRequestGetter_GetPullRequest_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []models.Repo, _param2 []int) { params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) if len(params) > 0 { - _param0 = make([]models.Repo, len(c.methodInvocations)) + _param0 = make([]logging.SimpleLogging, len(c.methodInvocations)) for u, param := range params[0] { - _param0[u] = param.(models.Repo) + _param0[u] = param.(logging.SimpleLogging) } - _param1 = make([]int, len(c.methodInvocations)) + _param1 = make([]models.Repo, len(c.methodInvocations)) for u, param := range params[1] { - _param1[u] = param.(int) + _param1[u] = param.(models.Repo) + } + _param2 = make([]int, len(c.methodInvocations)) + for u, param := range params[2] { + _param2[u] = param.(int) } } return diff --git a/server/events/vcs/testdata/gitlab-detailed-merge-status-need-rebase.json b/server/events/vcs/testdata/gitlab-detailed-merge-status-need-rebase.json new file mode 100644 index 0000000000..a37f0e8577 --- /dev/null +++ b/server/events/vcs/testdata/gitlab-detailed-merge-status-need-rebase.json @@ -0,0 +1,124 @@ +{ + "id": 22461274, + "iid": 13, + "project_id": 4580910, + "title": "Update main.tf", + "description": "", + "state": "opened", + "created_at": "2019-01-15T18:27:29.375Z", + "updated_at": "2019-01-25T17:28:01.437Z", + "merged_by": null, + "merged_at": null, + "closed_by": null, + "closed_at": null, + "target_branch": "patch-1", + "source_branch": "patch-1-merger", + "user_notes_count": 0, + "upvotes": 0, + "downvotes": 0, + "author": { + "id": 1755902, + "name": "Luke Kysow", + "username": "lkysow", + "state": "active", + "avatar_url": "https://secure.gravatar.com/avatar/25fd57e71590fe28736624ff24d41c5f?s=80&d=identicon", + "web_url": "https://gitlab.com/lkysow" + }, + "assignee": null, + "reviewers": [], + "source_project_id": 4580910, + "target_project_id": 4580910, + "labels": [], + "work_in_progress": false, + "milestone": null, + "merge_when_pipeline_succeeds": false, + "merge_status": "can_be_merged", + "detailed_merge_status": "need_rebase", + "sha": "cb86d70f464632bdfbe1bb9bc0f2f9d847a774a0", + "merge_commit_sha": null, + "squash_commit_sha": null, + "discussion_locked": null, + "should_remove_source_branch": null, + "force_remove_source_branch": true, + "reference": "!13", + "references": { + "short": "!13", + "relative": "!13", + "full": "lkysow/atlantis-example!13" + }, + "web_url": "https://gitlab.com/lkysow/atlantis-example/merge_requests/13", + "time_stats": { + "time_estimate": 0, + "total_time_spent": 0, + "human_time_estimate": null, + "human_total_time_spent": null + }, + "squash": true, + "task_completion_status": { + "count": 0, + "completed_count": 0 + }, + "has_conflicts": false, + "blocking_discussions_resolved": true, + "approvals_before_merge": null, + "subscribed": false, + "changes_count": "1", + "latest_build_started_at": "2019-01-15T18:27:29.375Z", + "latest_build_finished_at": "2019-01-25T17:28:01.437Z", + "first_deployed_to_production_at": null, + "pipeline": { + "id": 488598, + "sha": "67cb91d3f6198189f433c045154a885784ba6977", + "ref": "patch-1-merger", + "status": "success", + "created_at": "2019-01-15T18:27:29.375Z", + "updated_at": "2019-01-25T17:28:01.437Z", + "web_url": "https://gitlab.com/lkysow/atlantis-example/-/pipelines/488598" + }, + "head_pipeline": { + "id": 488598, + "sha": "67cb91d3f6198189f433c045154a885784ba6977", + "ref": "patch-1-merger", + "status": "success", + "created_at": "2019-01-15T18:27:29.375Z", + "updated_at": "2019-01-25T17:28:01.437Z", + "web_url": "https://gitlab.com/lkysow/atlantis-example/-/pipelines/488598", + "before_sha": "0000000000000000000000000000000000000000", + "tag": false, + "yaml_errors": null, + "user": { + "id": 1755902, + "name": "Luke Kysow", + "username": "lkysow", + "state": "active", + "avatar_url": "https://secure.gravatar.com/avatar/25fd57e71590fe28736624ff24d41c5f?s=80&d=identicon", + "web_url": "https://gitlab.com/lkysow" + }, + "started_at": "2019-01-15T18:27:29.375Z", + "finished_at": "2019-01-25T17:28:01.437Z", + "committed_at": null, + "duration": 31, + "coverage": null, + "detailed_status": { + "icon": "status_success", + "text": "passed", + "label": "passed", + "group": "success", + "tooltip": "passed", + "has_details": true, + "details_path": "/lkysow/atlantis-example/-/pipelines/488598", + "illustration": null, + "favicon": "/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png" + } + }, + "diff_refs": { + "base_sha": "67cb91d3f6198189f433c045154a885784ba6977", + "head_sha": "cb86d70f464632bdfbe1bb9bc0f2f9d847a774a0", + "start_sha": "67cb91d3f6198189f433c045154a885784ba6977" + }, + "merge_error": null, + "first_contribution": false, + "user": { + "can_merge": true + } +} diff --git a/server/events/working_dir.go b/server/events/working_dir.go index c3ebe56e80..c2e56d8dc7 100644 --- a/server/events/working_dir.go +++ b/server/events/working_dir.go @@ -26,6 +26,7 @@ import ( "github.com/runatlantis/atlantis/server/core/runtime" "github.com/runatlantis/atlantis/server/events/models" "github.com/runatlantis/atlantis/server/logging" + "github.com/runatlantis/atlantis/server/utils" ) const workingDirPrefix = "repos" @@ -179,7 +180,6 @@ func (w *FileWorkspace) recheckDiverged(logger logging.SimpleLogging, p models.P cmd.Dir = cloneDir output, err := cmd.CombinedOutput() - if err != nil { logger.Warn("getting remote update failed: %s", string(output)) return false @@ -420,7 +420,7 @@ func (w *FileWorkspace) SetCheckForUpstreamChanges() { func (w *FileWorkspace) DeletePlan(logger logging.SimpleLogging, r models.Repo, p models.PullRequest, workspace string, projectPath string, projectName string) error { planPath := filepath.Join(w.cloneDir(r, p, workspace), projectPath, runtime.GetPlanFilename(workspace, projectName)) logger.Info("Deleting plan: " + planPath) - return os.Remove(planPath) + return utils.RemoveIgnoreNonExistent(planPath) } // getGitUntrackedFiles returns a list of Git untracked files in the working dir. diff --git a/server/server.go b/server/server.go index 4bae892b0a..215b5dd02b 100644 --- a/server/server.go +++ b/server/server.go @@ -600,6 +600,7 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) { Router: router, } projectCommandBuilder := events.NewInstrumentedProjectCommandBuilder( + logger, policyChecksEnabled, validator, &events.DefaultProjectFinder{}, @@ -875,16 +876,19 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) { StatsScope: statsScope.SubScope("api"), } apiController := &controllers.APIController{ - APISecret: []byte(userConfig.APISecret), - Locker: lockingClient, - Logger: logger, - Parser: eventParser, - ProjectCommandBuilder: projectCommandBuilder, - ProjectPlanCommandRunner: instrumentedProjectCmdRunner, - ProjectApplyCommandRunner: instrumentedProjectCmdRunner, - RepoAllowlistChecker: repoAllowlist, - Scope: statsScope.SubScope("api"), - VCSClient: vcsClient, + APISecret: []byte(userConfig.APISecret), + Locker: lockingClient, + Logger: logger, + Parser: eventParser, + ProjectCommandBuilder: projectCommandBuilder, + ProjectPlanCommandRunner: instrumentedProjectCmdRunner, + ProjectApplyCommandRunner: instrumentedProjectCmdRunner, + FailOnPreWorkflowHookError: userConfig.FailOnPreWorkflowHookError, + PreWorkflowHooksCommandRunner: preWorkflowHooksCommandRunner, + PostWorkflowHooksCommandRunner: postWorkflowHooksCommandRunner, + RepoAllowlistChecker: repoAllowlist, + Scope: statsScope.SubScope("api"), + VCSClient: vcsClient, } eventsController := &events_controllers.VCSEventsController{ @@ -1080,7 +1084,7 @@ func (s *Server) Index(w http.ResponseWriter, _ *http.Request) { Path: v.Project.Path, Workspace: v.Workspace, Time: v.Time, - TimeFormatted: v.Time.Format("02-01-2006 15:04:05"), + TimeFormatted: v.Time.Format("2006-01-02 15:04:05"), }) } @@ -1096,7 +1100,7 @@ func (s *Server) Index(w http.ResponseWriter, _ *http.Request) { Time: applyCmdLock.Time, Locked: applyCmdLock.Locked, GlobalApplyLockEnabled: applyCmdLock.GlobalApplyLockEnabled, - TimeFormatted: applyCmdLock.Time.Format("02-01-2006 15:04:05"), + TimeFormatted: applyCmdLock.Time.Format("2006-01-02 15:04:05"), } //Sort by date - newest to oldest. sort.SliceStable(lockResults, func(i, j int) bool { return lockResults[i].Time.After(lockResults[j].Time) }) @@ -1121,7 +1125,7 @@ func preparePullToJobMappings(s *Server) []jobs.PullInfoWithJobIDs { for j := range pullToJobMappings[i].JobIDInfos { jobUrl, _ := s.Router.Get(ProjectJobsViewRouteName).URL("job-id", pullToJobMappings[i].JobIDInfos[j].JobID) pullToJobMappings[i].JobIDInfos[j].JobIDUrl = jobUrl.String() - pullToJobMappings[i].JobIDInfos[j].TimeFormatted = pullToJobMappings[i].JobIDInfos[j].Time.Format("02-01-2006 15:04:05") + pullToJobMappings[i].JobIDInfos[j].TimeFormatted = pullToJobMappings[i].JobIDInfos[j].Time.Format("2006-01-02 15:04:05") } //Sort by date - newest to oldest. diff --git a/server/server_test.go b/server/server_test.go index e96c6aa6b4..692cc8e283 100644 --- a/server/server_test.go +++ b/server/server_test.go @@ -117,7 +117,7 @@ func TestIndex_Success(t *testing.T) { ApplyLock: web_templates.ApplyLockData{ Locked: false, Time: time.Time{}, - TimeFormatted: "01-01-0001 00:00:00", + TimeFormatted: "0001-01-01 00:00:00", }, Locks: []web_templates.LockIndexData{ { @@ -125,7 +125,7 @@ func TestIndex_Success(t *testing.T) { RepoFullName: "lkysow/atlantis-example", PullNum: 9, Time: now, - TimeFormatted: now.Format("02-01-2006 15:04:05"), + TimeFormatted: now.Format("2006-01-02 15:04:05"), }, }, PullToJobMapping: []jobs.PullInfoWithJobIDs{}, diff --git a/server/utils/os.go b/server/utils/os.go new file mode 100644 index 0000000000..2a06d8486e --- /dev/null +++ b/server/utils/os.go @@ -0,0 +1,13 @@ +package utils + +import "os" + +// RemoveIgnoreNonExistent removes a file, ignoring if it doesn't exist. +func RemoveIgnoreNonExistent(file string) error { + err := os.Remove(file) + if err == nil || os.IsNotExist(err) { + return nil + } + + return err +} diff --git a/testdrive/utils.go b/testdrive/utils.go index ba6d8288b4..0657591886 100644 --- a/testdrive/utils.go +++ b/testdrive/utils.go @@ -35,7 +35,7 @@ import ( ) const hashicorpReleasesURL = "https://releases.hashicorp.com" -const terraformVersion = "1.7.5" // renovate: datasource=github-releases depName=hashicorp/terraform versioning=hashicorp +const terraformVersion = "1.8.3" // renovate: datasource=github-releases depName=hashicorp/terraform versioning=hashicorp const ngrokDownloadURL = "https://bin.equinox.io/c/4VmDzA7iaHb" const ngrokAPIURL = "localhost:41414" // We hope this isn't used. const atlantisPort = 4141 diff --git a/testing/Dockerfile b/testing/Dockerfile index 1d734c529e..2958f1bfba 100644 --- a/testing/Dockerfile +++ b/testing/Dockerfile @@ -1,4 +1,4 @@ -FROM golang:1.22.1 +FROM golang:1.22.2@sha256:450e3822c7a135e1463cd83e51c8e2eb03b86a02113c89424e6f0f8344bb4168 RUN apt-get update && apt-get --no-install-recommends -y install unzip \ && apt-get clean \ @@ -6,7 +6,7 @@ RUN apt-get update && apt-get --no-install-recommends -y install unzip \ # Install Terraform # renovate: datasource=github-releases depName=hashicorp/terraform versioning=hashicorp -ENV TERRAFORM_VERSION=1.7.5 +ENV TERRAFORM_VERSION=1.8.3 RUN case $(uname -m) in x86_64|amd64) ARCH="amd64" ;; aarch64|arm64|armv7l) ARCH="arm64" ;; esac && \ wget -nv -O terraform.zip https://releases.hashicorp.com/terraform/${TERRAFORM_VERSION}/terraform_${TERRAFORM_VERSION}_linux_${ARCH}.zip && \ mkdir -p /usr/local/bin/tf/versions/${TERRAFORM_VERSION} && \ @@ -16,7 +16,7 @@ RUN case $(uname -m) in x86_64|amd64) ARCH="amd64" ;; aarch64|arm64|armv7l) ARCH # Install conftest # renovate: datasource=github-releases depName=open-policy-agent/conftest -ENV CONFTEST_VERSION=0.50.0 +ENV CONFTEST_VERSION=0.51.0 SHELL ["/bin/bash", "-o", "pipefail", "-c"] RUN case $(uname -m) in x86_64|amd64) ARCH="x86_64" ;; aarch64|arm64|armv7l) ARCH="arm64" ;; esac && \ curl -LOs https://github.com/open-policy-agent/conftest/releases/download/v${CONFTEST_VERSION}/conftest_${CONFTEST_VERSION}_Linux_${ARCH}.tar.gz && \