From ec0bdf6ceae3f1f7c3452c098aa500044e4ad13b Mon Sep 17 00:00:00 2001 From: krrrr38 Date: Tue, 13 Dec 2022 08:41:36 +0900 Subject: [PATCH] feat: atlantis import --- cmd/server.go | 19 +- runatlantis.io/.vuepress/config.js | 2 +- ...equirements.md => command-requirements.md} | 32 +- .../docs/repo-level-atlantis-yaml.md | 36 ++- runatlantis.io/docs/server-configuration.md | 8 +- .../docs/server-side-repo-config.md | 36 ++- runatlantis.io/docs/using-atlantis.md | 28 ++ server/controllers/api_controller_test.go | 4 +- .../events/events_controller_e2e_test.go | 51 +++- .../import-multiple-project/atlantis.yaml | 4 + .../import-multiple-project/dir1/main.tf | 4 + .../import-multiple-project/dir2/main.tf | 4 + .../exp-output-autoplan.txt | 81 ++++++ .../exp-output-import-dummy1.txt | 20 ++ .../exp-output-import-multiple-projects.txt | 1 + .../exp-output-merge.txt | 4 + .../exp-output-plan-again.txt | 63 ++++ .../exp-output-apply-no-projects.txt | 4 + .../exp-output-autoplan.txt | 52 ++++ .../exp-output-import-dummy1.txt | 20 ++ .../exp-output-import-dummy2.txt | 20 ++ .../exp-output-merge.txt | 3 + .../exp-output-plan-again.txt | 22 ++ .../test-repos/import-single-project/main.tf | 9 + server/core/config/parser_validator_test.go | 205 ++++++++----- server/core/config/raw/global_cfg.go | 30 +- server/core/config/raw/project.go | 27 +- server/core/config/raw/project_test.go | 36 ++- server/core/config/raw/repo_cfg_test.go | 39 +-- server/core/config/raw/step.go | 4 +- server/core/config/raw/step_test.go | 23 ++ server/core/config/raw/workflow.go | 3 + server/core/config/raw/workflow_test.go | 15 + server/core/config/valid/global_cfg.go | 69 +++-- server/core/config/valid/global_cfg_test.go | 219 +++++++++----- server/core/config/valid/repo_cfg.go | 2 + server/core/runtime/import_step_runner.go | 38 +++ .../core/runtime/import_step_runner_test.go | 61 ++++ .../mocks/matchers/command_projectcontext.go | 33 +++ server/events/apply_requirement_handler.go | 43 --- server/events/command/name.go | 16 +- server/events/command/name_test.go | 74 +++-- server/events/command/project_context.go | 3 + server/events/command/project_result.go | 1 + server/events/command_requirement_handler.go | 65 +++++ .../command_requirement_handler_test.go | 194 +++++++++++++ server/events/command_runner_test.go | 8 + server/events/comment_parser.go | 25 +- server/events/comment_parser_test.go | 69 ++++- server/events/import_command_runner.go | 44 +++ server/events/markdown_renderer.go | 11 + server/events/markdown_renderer_test.go | 27 ++ server/events/mock_workingdir_test.go | 49 +++- server/events/mocks/matchers/command_name.go | 33 +++ .../mocks/matchers/command_projectcontext.go | 33 +++ .../mocks/matchers/command_projectresult.go | 33 +++ .../mocks/matchers/ptr_to_command_context.go | 33 +++ .../slice_of_command_projectcontext.go | 33 +++ server/events/mocks/mock_apply_handler.go | 114 -------- .../mocks/mock_command_requirement_handler.go | 163 +++++++++++ .../mocks/mock_project_command_builder.go | 50 ++++ .../mocks/mock_project_command_runner.go | 42 +++ server/events/mocks/mock_working_dir.go | 77 ++--- server/events/models/models.go | 8 + server/events/project_command_builder.go | 79 ++++- .../project_command_builder_internal_test.go | 274 +++++++++--------- .../events/project_command_context_builder.go | 6 +- server/events/project_command_runner.go | 103 +++++-- server/events/project_command_runner_test.go | 191 +++++++++--- .../templates/import_success_unwrapped.tmpl | 8 + .../templates/import_success_wrapped.tmpl | 9 + .../templates/multi_project_import.tmpl | 3 + .../single_project_import_success.tmpl | 6 + server/scheduled/executor_service.go | 5 +- server/server.go | 31 +- 75 files changed, 2543 insertions(+), 751 deletions(-) rename runatlantis.io/docs/{apply-requirements.md => command-requirements.md} (90%) create mode 100644 server/controllers/events/testfixtures/test-repos/import-multiple-project/atlantis.yaml create mode 100644 server/controllers/events/testfixtures/test-repos/import-multiple-project/dir1/main.tf create mode 100644 server/controllers/events/testfixtures/test-repos/import-multiple-project/dir2/main.tf create mode 100644 server/controllers/events/testfixtures/test-repos/import-multiple-project/exp-output-autoplan.txt create mode 100644 server/controllers/events/testfixtures/test-repos/import-multiple-project/exp-output-import-dummy1.txt create mode 100644 server/controllers/events/testfixtures/test-repos/import-multiple-project/exp-output-import-multiple-projects.txt create mode 100644 server/controllers/events/testfixtures/test-repos/import-multiple-project/exp-output-merge.txt create mode 100644 server/controllers/events/testfixtures/test-repos/import-multiple-project/exp-output-plan-again.txt create mode 100644 server/controllers/events/testfixtures/test-repos/import-single-project/exp-output-apply-no-projects.txt create mode 100644 server/controllers/events/testfixtures/test-repos/import-single-project/exp-output-autoplan.txt create mode 100644 server/controllers/events/testfixtures/test-repos/import-single-project/exp-output-import-dummy1.txt create mode 100644 server/controllers/events/testfixtures/test-repos/import-single-project/exp-output-import-dummy2.txt create mode 100644 server/controllers/events/testfixtures/test-repos/import-single-project/exp-output-merge.txt create mode 100644 server/controllers/events/testfixtures/test-repos/import-single-project/exp-output-plan-again.txt create mode 100644 server/controllers/events/testfixtures/test-repos/import-single-project/main.tf create mode 100644 server/core/runtime/import_step_runner.go create mode 100644 server/core/runtime/import_step_runner_test.go create mode 100644 server/core/terraform/mocks/matchers/command_projectcontext.go delete mode 100644 server/events/apply_requirement_handler.go create mode 100644 server/events/command_requirement_handler.go create mode 100644 server/events/command_requirement_handler_test.go create mode 100644 server/events/import_command_runner.go create mode 100644 server/events/mocks/matchers/command_name.go create mode 100644 server/events/mocks/matchers/command_projectcontext.go create mode 100644 server/events/mocks/matchers/command_projectresult.go create mode 100644 server/events/mocks/matchers/ptr_to_command_context.go create mode 100644 server/events/mocks/matchers/slice_of_command_projectcontext.go delete mode 100644 server/events/mocks/mock_apply_handler.go create mode 100644 server/events/mocks/mock_command_requirement_handler.go create mode 100644 server/events/templates/import_success_unwrapped.tmpl create mode 100644 server/events/templates/import_success_wrapped.tmpl create mode 100644 server/events/templates/multi_project_import.tmpl create mode 100644 server/events/templates/single_project_import_success.tmpl diff --git a/cmd/server.go b/cmd/server.go index 08648cc809..74f9a225fd 100644 --- a/cmd/server.go +++ b/cmd/server.go @@ -985,30 +985,31 @@ func (s *ServerCmd) securityWarnings(userConfig *server.UserConfig) { // being used. Right now this only applies to flags that have been made obsolete // due to server-side config. func (s *ServerCmd) deprecationWarnings(userConfig *server.UserConfig) error { - var applyReqs []string + var commandReqs []string var deprecatedFlags []string if userConfig.RequireApproval { deprecatedFlags = append(deprecatedFlags, RequireApprovalFlag) - applyReqs = append(applyReqs, valid.ApprovedApplyReq) + commandReqs = append(commandReqs, valid.ApprovedCommandReq) } if userConfig.RequireMergeable { deprecatedFlags = append(deprecatedFlags, RequireMergeableFlag) - applyReqs = append(applyReqs, valid.MergeableApplyReq) + commandReqs = append(commandReqs, valid.MergeableCommandReq) } // Build up strings with what the recommended yaml and json config should // be instead of using the deprecated flags. yamlCfg := "---\nrepos:\n- id: /.*/" jsonCfg := `{"repos":[{"id":"/.*/"` - if len(applyReqs) > 0 { - yamlCfg += fmt.Sprintf("\n apply_requirements: [%s]", strings.Join(applyReqs, ", ")) - jsonCfg += fmt.Sprintf(`, "apply_requirements":["%s"]`, strings.Join(applyReqs, "\", \"")) - + if len(commandReqs) > 0 { + yamlCfg += fmt.Sprintf("\n apply_requirements: [%s]", strings.Join(commandReqs, ", ")) + yamlCfg += fmt.Sprintf("\n import_requirements: [%s]", strings.Join(commandReqs, ", ")) + jsonCfg += fmt.Sprintf(`, "apply_requirements":["%s"]`, strings.Join(commandReqs, "\", \"")) + jsonCfg += fmt.Sprintf(`, "import_requirements":["%s"]`, strings.Join(commandReqs, "\", \"")) } if userConfig.AllowRepoConfig { deprecatedFlags = append(deprecatedFlags, AllowRepoConfigFlag) - yamlCfg += "\n allowed_overrides: [apply_requirements, workflow]\n allow_custom_workflows: true" - jsonCfg += `, "allowed_overrides":["apply_requirements","workflow"], "allow_custom_workflows":true` + yamlCfg += "\n allowed_overrides: [apply_requirements, import_requirements, workflow]\n allow_custom_workflows: true" + jsonCfg += `, "allowed_overrides":["apply_requirements","import_requirements","workflow"], "allow_custom_workflows":true` } jsonCfg += "}]}" diff --git a/runatlantis.io/.vuepress/config.js b/runatlantis.io/.vuepress/config.js index 6874025bc0..55885b7382 100644 --- a/runatlantis.io/.vuepress/config.js +++ b/runatlantis.io/.vuepress/config.js @@ -72,7 +72,7 @@ module.exports = { 'custom-workflows', 'repo-level-atlantis-yaml', 'upgrading-atlantis-yaml', - 'apply-requirements', + 'command-requirements', 'checkout-strategy', 'terraform-versions', 'terraform-cloud', diff --git a/runatlantis.io/docs/apply-requirements.md b/runatlantis.io/docs/command-requirements.md similarity index 90% rename from runatlantis.io/docs/apply-requirements.md rename to runatlantis.io/docs/command-requirements.md index 4fa7edf3d7..1a3e6309a4 100644 --- a/runatlantis.io/docs/apply-requirements.md +++ b/runatlantis.io/docs/command-requirements.md @@ -1,9 +1,9 @@ -# Apply Requirements +# Command Requirements [[toc]] ## Intro -Atlantis allows you to require certain conditions be satisfied **before** an `atlantis apply` -command can be run: +Atlantis allows you to require certain conditions be satisfied **before** `atlantis apply` and `atlantis import` +commands can be run: * [Approved](#approved) – requires pull requests to be approved by at least one user other than the author * [Mergeable](#mergeable) – requires pull requests to be able to be merged @@ -70,12 +70,12 @@ You can set the `mergeable` requirement by: apply_requirements: [mergeable] ``` -1. Or by allowing an `atlantis.yaml` file to specify the `apply_requirements` key in your `repos.yaml` config: +1. Or by allowing an `atlantis.yaml` file to specify `apply_requirements` and `import_requirements` keys in your `repos.yaml` config: #### repos.yaml ```yaml repos: - id: /.*/ - allowed_overrides: [apply_requirements] + allowed_overrides: [apply_requirements, import_requirements] ``` #### atlantis.yaml @@ -84,6 +84,7 @@ You can set the `mergeable` requirement by: projects: - dir: . apply_requirements: [mergeable] + import_requirements: [mergeable] ``` #### Meaning @@ -152,18 +153,19 @@ Applies to `merge` checkout strategy only. #### Usage You can set the `undiverged` requirement by: -1. Creating a `repos.yaml` file with the `apply_requirements` key: +1. Creating a `repos.yaml` file with `apply_requirements` and `import_requirements` keys: ```yaml repos: - id: /.*/ apply_requirements: [undiverged] + import_requirements: [undiverged] ``` 1. Or by allowing an `atlantis.yaml` file to specify the `apply_requirements` key in your `repos.yaml` config: #### repos.yaml ```yaml repos: - id: /.*/ - allowed_overrides: [apply_requirements] + allowed_overrides: [apply_requirements, apply_requirements] ``` #### atlantis.yaml @@ -172,6 +174,7 @@ You can set the `undiverged` requirement by: projects: - dir: . apply_requirements: [undiverged] + import_requirements: [undiverged] ``` #### Meaning The `merge` checkout strategy creates a temporary merge commit and runs the `plan` on the Atlantis local version of the PR @@ -180,8 +183,8 @@ if there are no changes to the source branch. `undiverged` enforces that Atlanti with remote so that the state of the source during the `apply` is identical to that if you were to merge the PR at that time. -## Setting Apply Requirements -As mentioned above, you can set apply requirements via flags, in `repos.yaml`, or in `atlantis.yaml` if `repos.yaml` +## Setting Command Requirements +As mentioned above, you can set command requirements via flags, in `repos.yaml`, or in `atlantis.yaml` if `repos.yaml` allows the override. ### Flags Override @@ -197,19 +200,22 @@ If you only want some projects/repos to have apply requirements, then you must repos: - id: /.*/ apply_requirements: [approved] + import_requirements: [approved] # Regex that defaults all repos to requiring approval - id: /github.com/runatlantis/.*/ # Regex to match any repo under the atlantis namespace, and not require approval # except for repos that might match later in the chain apply_requirements: [] + import_requirements: [] - id: github.com/runatlantis/atlantis apply_requirements: [approved] + import_requirements: [approved] # Exact string match of the github.com/runatlantis/atlantis repo # that sets apply_requirements to approved ``` 1. Specify which projects have which requirements via an `atlantis.yaml` file, and allowing - `apply_requirements` to be set in in `atlantis.yaml` by the server side `repos.yaml` + `apply_requirements` and `import_requirements` to be set in `atlantis.yaml` by the server side `repos.yaml` config. For example if I have two directories, `staging` and `production`, I might use: @@ -217,7 +223,7 @@ If you only want some projects/repos to have apply requirements, then you must ```yaml repos: - id: /.*/ - allowed_overrides: [apply_requirements] + allowed_overrides: [apply_requirements, import_requirements] # Allow any repo to specify apply_requirements in atlantis.yaml ``` @@ -226,13 +232,15 @@ If you only want some projects/repos to have apply requirements, then you must version: 3 projects: - dir: staging - # By default, apply_requirements is empty so this + # By default, apply_requirements and import_requirements are empty so this # isn't strictly necessary. apply_requirements: [] + import_requirements: [] - dir: production # This requirement will only apply to the # production directory. apply_requirements: [mergeable] + import_requirements: [mergeable] ### Multiple Requirements diff --git a/runatlantis.io/docs/repo-level-atlantis-yaml.md b/runatlantis.io/docs/repo-level-atlantis-yaml.md index 482d5b8129..837a339df5 100644 --- a/runatlantis.io/docs/repo-level-atlantis-yaml.md +++ b/runatlantis.io/docs/repo-level-atlantis-yaml.md @@ -60,7 +60,8 @@ projects: autoplan: when_modified: ["*.tf", "../modules/**/*.tf"] enabled: true - apply_requirements: [mergeable, approved] + apply_requirements: [mergeable, approved, undiverged] + import_requirements: [mergeable, approved, undiverged] workflow: myworkflow workflows: myworkflow: @@ -215,10 +216,11 @@ projects: - dir: staging - dir: production apply_requirements: [approved] + import_requirements: [approved] ``` :::warning -`apply_requirements` is a restricted key so this repo will need to be configured -to be allowed to set this key. See [Server-Side Repo Config Use Cases](server-side-repo-config.html#repos-can-set-their-own-apply-requirements). +`apply_requirements` and `import_requirements` are restricted keys so this repo will need to be configured +to be allowed to set this key. See [Server-Side Repo Config Use Cases](server-side-repo-config.html#repos-can-set-their-own-apply-or-import-requirements). ::: ### Order of planning/applying @@ -269,22 +271,24 @@ repo_locking: true autoplan: terraform_version: 0.11.0 apply_requirements: ["approved"] +import_requirements: ["approved"] workflow: myworkflow ``` -| Key | Type | Default | Required | Description | -|----------------------------------------|-----------------------|-------------|----------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| name | string | none | maybe | Required if there is more than one project with the same `dir` and `workspace`. This project name can be used with the `-p` flag. | -| branch | string | none | no | Regex matching projects by the base branch of pull request (the branch the pull request is getting merged into). Only projects that match the PR's branch will be considered. By default, all branches are matched. | -| dir | string | none | **yes** | The directory of this project relative to the repo root. For example if the project was under `./project1` then use `project1`. Use `.` to indicate the repo root. | -| workspace | string | `"default"` | no | The [Terraform workspace](https://www.terraform.io/docs/state/workspaces.html) for this project. Atlantis will switch to this workplace when planning/applying and will create it if it doesn't exist. | -| execution_order_group | int | `0` | no | Index of execution order group. Projects will be sort by this field before planning/applying. | -| delete_source_branch_on_merge | bool | `false` | no | Automatically deletes the source branch on merge. | -| repo_locking | bool | `true` | no | Get a repository lock in this project when plan. | -| autoplan | [Autoplan](#autoplan) | none | no | A custom autoplan configuration. If not specified, will use the autoplan config. See [Autoplanning](autoplanning.html). | -| terraform_version | string | none | no | A specific Terraform version to use when running commands for this project. Must be [Semver compatible](https://semver.org/), ex. `v0.11.0`, `0.12.0-beta1`. | -| apply_requirements
*(restricted)* | array[string] | none | no | Requirements that must be satisfied before `atlantis apply` can be run. Currently the only supported requirements are `approved`, `mergeable`, and `undiverged`. See [Apply Requirements](apply-requirements.html) for more details. | -| workflow
*(restricted)* | string | none | no | A custom workflow. If not specified, Atlantis will use its default workflow. | +| Key | Type | Default | Required | Description | +|-----------------------------------------|-----------------------|-------------|----------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| name | string | none | maybe | Required if there is more than one project with the same `dir` and `workspace`. This project name can be used with the `-p` flag. | +| branch | string | none | no | Regex matching projects by the base branch of pull request (the branch the pull request is getting merged into). Only projects that match the PR's branch will be considered. By default, all branches are matched. | +| dir | string | none | **yes** | The directory of this project relative to the repo root. For example if the project was under `./project1` then use `project1`. Use `.` to indicate the repo root. | +| workspace | string | `"default"` | no | The [Terraform workspace](https://www.terraform.io/docs/state/workspaces.html) for this project. Atlantis will switch to this workplace when planning/applying and will create it if it doesn't exist. | +| execution_order_group | int | `0` | no | Index of execution order group. Projects will be sort by this field before planning/applying. | +| delete_source_branch_on_merge | bool | `false` | no | Automatically deletes the source branch on merge. | +| repo_locking | bool | `true` | no | Get a repository lock in this project when plan. | +| autoplan | [Autoplan](#autoplan) | none | no | A custom autoplan configuration. If not specified, will use the autoplan config. See [Autoplanning](autoplanning.html). | +| terraform_version | string | none | no | A specific Terraform version to use when running commands for this project. Must be [Semver compatible](https://semver.org/), ex. `v0.11.0`, `0.12.0-beta1`. | +| apply_requirements
*(restricted)* | array[string] | none | no | Requirements that must be satisfied before `atlantis apply` can be run. Currently the only supported requirements are `approved`, `mergeable`, and `undiverged`. See [Command Requirements](command-requirements.html) for more details. | +| import_requirements
*(restricted)* | array[string] | none | no | Requirements that must be satisfied before `atlantis import` can be run. Currently the only supported requirements are `approved`, `mergeable`, and `undiverged`. See [Command Requirements](command-requirements.html) for more details. | +| workflow
*(restricted)* | string | none | no | A custom workflow. If not specified, Atlantis will use its default workflow. | ::: tip A project represents a Terraform state. Typically, there is one state per directory and workspace however it's possible to diff --git a/runatlantis.io/docs/server-configuration.md b/runatlantis.io/docs/server-configuration.md index 0104071dd5..df6041983e 100644 --- a/runatlantis.io/docs/server-configuration.md +++ b/runatlantis.io/docs/server-configuration.md @@ -495,9 +495,9 @@ and set `--autoplan-modules` to `false`. ### `--gh-team-allowlist` ```bash - atlantis server --gh-team-allowlist="myteam:plan, secteam:apply, DevOps Team:apply" + atlantis server --gh-team-allowlist="myteam:plan, secteam:apply, DevOps Team:apply, DevOps Team:import" # or - ATLANTIS_GH_TEAM_ALLOWLIST="myteam:plan, secteam:apply, DevOps Team:apply" + ATLANTIS_GH_TEAM_ALLOWLIST="myteam:plan, secteam:apply, DevOps Team:apply, DevOps Team:import" ``` In versions v0.21.0 and later, the GitHub team name can be a name or a slug. @@ -779,7 +779,7 @@ and set `--autoplan-modules` to `false`. ATLANTIS_REQUIRE_APPROVAL=true ``` This flag is deprecated. It requires all pull requests to be approved - before `atlantis apply` is allowed. See [Apply Requirements](apply-requirements.html) for more details. + before `atlantis apply` is allowed. See [Command Requirements](command-requirements.html) for more details. Instead of using this flag, create a server-side `--repo-config` file: ```yaml @@ -798,7 +798,7 @@ and set `--autoplan-modules` to `false`. ATLANTIS_REQUIRE_MERGEABLE=true ``` This flag is deprecated. It causes all pull requests to be mergeable - before `atlantis apply` is allowed. See [Apply Requirements](apply-requirements.html) for more details. + before `atlantis apply` is allowed. See [Command Requirements](command-requirements.html) for more details. Instead of using this flag, create a server-side `--repo-config` file: ```yaml diff --git a/runatlantis.io/docs/server-side-repo-config.md b/runatlantis.io/docs/server-side-repo-config.md index bc3ee64594..526bcf0ff6 100644 --- a/runatlantis.io/docs/server-side-repo-config.md +++ b/runatlantis.io/docs/server-side-repo-config.md @@ -36,7 +36,10 @@ repos: branch: /.*/ # apply_requirements sets the Apply Requirements for all repos that match. - apply_requirements: [approved, mergeable] + apply_requirements: [approved, mergeable, undiverged] + + # import_requirements sets the Import Requirements for all repos that match. + import_requirements: [approved, mergeable, undiverged] # workflow sets the workflow for all repos that match. # This workflow must be defined in the workflows section. @@ -93,9 +96,9 @@ workflows: ## Use Cases Here are some of the reasons you might want to use a repo config. -### Requiring PR Is Approved Before Apply +### Requiring PR Is Approved Before Apply or Import If you want to require that all (or specific) repos must have pull requests -approved before Atlantis will allow running `apply`, use the `apply_requirements` key. +approved before Atlantis will allow running `apply` or `import`, use the `apply_requirements` or `import_requirements` keys. For all repos: ```yaml @@ -103,6 +106,7 @@ For all repos: repos: - id: /.*/ apply_requirements: [approved] + import_requirements: [approved] ``` For a specific repo: @@ -111,13 +115,14 @@ For a specific repo: repos: - id: github.com/myorg/myrepo apply_requirements: [approved] + import_requirements: [approved] ``` -See [Apply Requirements](apply-requirements.html) for more details. +See [Command Requirements](command-requirements.html) for more details. -### Requiring PR Is "Mergeable" Before Apply +### Requiring PR Is "Mergeable" Before Apply or Import If you want to require that all (or specific) repos must have pull requests -in a mergeable state before Atlantis will allow running `apply`, use the `apply_requirements` key. +in a mergeable state before Atlantis will allow running `apply` or `import`, use the `apply_requirements` or `import_requirements` keys. For all repos: ```yaml @@ -125,6 +130,7 @@ For all repos: repos: - id: /.*/ apply_requirements: [mergeable] + import_requirements: [mergeable] ``` For a specific repo: @@ -133,11 +139,12 @@ For a specific repo: repos: - id: github.com/myorg/myrepo apply_requirements: [mergeable] + import_requirements: [mergeable] ``` -See [Apply Requirements](apply-requirements.html) for more details. +See [Command Requirements](command-requirements.html) for more details. -### Repos Can Set Their Own Apply Requirements +### Repos Can Set Their Own Apply or Import Requirements If you want all (or specific) repos to be able to override the default apply requirements, use the `allowed_overrides` key. @@ -148,9 +155,10 @@ repos: - id: /.*/ # The default will be approved. apply_requirements: [approved] + import_requirements: [approved] # But all repos can set their own using atlantis.yaml - allowed_overrides: [apply_requirements] + allowed_overrides: [apply_requirements, import_requirements] ``` To allow only a specific repo to override the default: ```yaml @@ -159,20 +167,22 @@ repos: # Set a default for all repos. - id: /.*/ apply_requirements: [approved] + import_requirements: [approved] # Allow a specific repo to override. - id: github.com/myorg/myrepo - allowed_overrides: [apply_requirements] + allowed_overrides: [apply_requirements, import_requirements] ``` Then each allowed repo can have an `atlantis.yaml` file that -sets `apply_requirements` to an empty array (disabling the requirement). +sets `apply_requirements` or `import_requirements` to an empty array (disabling the requirement). ```yaml # atlantis.yaml in the repo root version: 3 projects: - dir: . apply_requirements: [] + import_requirements: [] ``` ### Running Scripts Before Atlantis Workflows @@ -375,6 +385,7 @@ repos: - id: /.*/ branch: /.*/ apply_requirements: [] + import_requirements: [] workflow: default allowed_overrides: [] allow_custom_workflows: false @@ -401,7 +412,8 @@ If you set a workflow with the key `default`, it will override this. | id | string | none | yes | Value can be a regular expression when specified as /<regex>/ or an exact string match. Repo IDs are of the form `{vcs hostname}/{org}/{name}`, ex. `github.com/owner/repo`. Hostname is specified without scheme or port. For Bitbucket Server, {org} is the **name** of the project, not the key. | | branch | string | none | no | An regex matching pull requests by base branch (the branch the pull request is getting merged into). By default, all branches are matched | | workflow | string | none | no | A custom workflow. | -| apply_requirements | []string | none | no | Requirements that must be satisfied before `atlantis apply` can be run. Currently the only supported requirements are `approved`, `mergeable`, and `undiverged`. See [Apply Requirements](apply-requirements.html) for more details. | +| apply_requirements | []string | none | no | Requirements that must be satisfied before `atlantis apply` can be run. Currently the only supported requirements are `approved`, `mergeable`, and `undiverged`. See [Command Requirements](command-requirements.html) for more details. | +| import_requirements | []string | none | no | Requirements that must be satisfied before `atlantis import` can be run. Currently the only supported requirements are `approved`, `mergeable`, and `undiverged`. See [Command Requirements](command-requirements.html) for more details. | | allowed_overrides | []string | none | no | A list of restricted keys that `atlantis.yaml` files can override. The only supported keys are `apply_requirements`, `workflow`, `delete_source_branch_on_merge` and `repo_locking` | | allowed_workflows | []string | none | no | A list of workflows that `atlantis.yaml` files can select from. | | allow_custom_workflows | bool | false | no | Whether or not to allow [Custom Workflows](custom-workflows.html). | diff --git a/runatlantis.io/docs/using-atlantis.md b/runatlantis.io/docs/using-atlantis.md index fe499623b1..4a57f2f103 100644 --- a/runatlantis.io/docs/using-atlantis.md +++ b/runatlantis.io/docs/using-atlantis.md @@ -104,3 +104,31 @@ Because Atlantis under the hood is running `terraform apply plan.tfplan`, any Te They're ignored because they can't be specified for an already generated planfile. If you would like to specify these flags, do it while running `atlantis plan`. +--- +## atlantis import + +```bash +atlantis import [options] -- [terraform import flags] addr id +``` +### Explanation +Runs `terraform import` that matches the directory/project/workspace. + +### Examples +```bash +# Runs import +atlantis import -- addr id + +# Runs import in the root directory of the repo with workspace `default`. +atlantis import -d . -- addr id + +# Runs import in the `project1` directory of the repo with workspace `default` +atlantis import -d project1 -- addr id + +# Runs import in the root directory of the repo with workspace `staging` +atlantis import -w staging -- addr id +``` + +### Options +* `-d directory` Import a resource for this directory, relative to root of repo. Use `.` for root. +* `-p project` Import a resource for this project. Refers to the name of the project configured in the repo's [`atlantis.yaml` file](repo-level-atlantis-yaml.html). Cannot be used at same time as `-d` or `-w`. +* `-w workspace` Import a resource for this [Terraform workspace](https://www.terraform.io/docs/state/workspaces.html). If not using Terraform workspaces you can ignore this. diff --git a/server/controllers/api_controller_test.go b/server/controllers/api_controller_test.go index 8cae678b63..1ba56d02f2 100644 --- a/server/controllers/api_controller_test.go +++ b/server/controllers/api_controller_test.go @@ -3,8 +3,6 @@ package controllers_test import ( "bytes" "encoding/json" - "github.com/runatlantis/atlantis/server/events/command" - "github.com/runatlantis/atlantis/server/events/models" "net/http" "net/http/httptest" "testing" @@ -13,8 +11,10 @@ import ( "github.com/runatlantis/atlantis/server/controllers" . "github.com/runatlantis/atlantis/server/core/locking/mocks" "github.com/runatlantis/atlantis/server/events" + "github.com/runatlantis/atlantis/server/events/command" . "github.com/runatlantis/atlantis/server/events/mocks" . "github.com/runatlantis/atlantis/server/events/mocks/matchers" + "github.com/runatlantis/atlantis/server/events/models" . "github.com/runatlantis/atlantis/server/events/vcs/mocks" "github.com/runatlantis/atlantis/server/logging" "github.com/runatlantis/atlantis/server/metrics" diff --git a/server/controllers/events/events_controller_e2e_test.go b/server/controllers/events/events_controller_e2e_test.go index a5236d337c..0fa5e59d67 100644 --- a/server/controllers/events/events_controller_e2e_test.go +++ b/server/controllers/events/events_controller_e2e_test.go @@ -384,6 +384,44 @@ func TestGitHubWorkflow(t *testing.T) { {"exp-output-merge.txt"}, }, }, + { + Description: "import single project", + RepoDir: "import-single-project", + ModifiedFiles: []string{"main.tf"}, + ExpAutoplan: true, + Comments: []string{ + "atlantis import -- random_id.dummy1 AA", + "atlantis apply", + "atlantis import -- random_id.dummy2 BB", + "atlantis plan", + }, + ExpReplies: [][]string{ + {"exp-output-autoplan.txt"}, + {"exp-output-import-dummy1.txt"}, + {"exp-output-apply-no-projects.txt"}, + {"exp-output-import-dummy2.txt"}, + {"exp-output-plan-again.txt"}, + {"exp-output-merge.txt"}, + }, + }, + { + Description: "import multiple project", + RepoDir: "import-multiple-project", + ModifiedFiles: []string{"dir1/main.tf", "dir2/main.tf"}, + ExpAutoplan: true, + Comments: []string{ + "atlantis import -- random_id.dummy1 AA", + "atlantis import -d dir1 -- random_id.dummy1 AA", + "atlantis plan", + }, + ExpReplies: [][]string{ + {"exp-output-autoplan.txt"}, + {"exp-output-import-multiple-projects.txt"}, + {"exp-output-import-dummy1.txt"}, + {"exp-output-plan-again.txt"}, + {"exp-output-merge.txt"}, + }, + }, } for _, c := range cases { t.Run(c.Description, func(t *testing.T) { @@ -1017,6 +1055,10 @@ func setupE2E(t *testing.T, repoDir string) (events_controllers.VCSEventsControl ApplyStepRunner: &runtime.ApplyStepRunner{ TerraformExecutor: terraformClient, }, + ImportStepRunner: &runtime.ImportStepRunner{ + TerraformExecutor: terraformClient, + DefaultTFVersion: defaultTFVersion, + }, RunStepRunner: &runtime.RunStepRunner{ TerraformExecutor: terraformClient, DefaultTFVersion: defaultTFVersion, @@ -1025,7 +1067,7 @@ func setupE2E(t *testing.T, repoDir string) (events_controllers.VCSEventsControl WorkingDir: workingDir, Webhooks: &mockWebhookSender{}, WorkingDirLocker: locker, - AggregateApplyRequirements: &events.AggregateApplyRequirements{ + CommandRequirementHandler: &events.DefaultCommandRequirementHandler{ WorkingDir: workingDir, }, } @@ -1118,12 +1160,19 @@ func setupE2E(t *testing.T, repoDir string) (events_controllers.VCSEventsControl silenceNoProjects, ) + importCommandRunner := events.NewImportCommandRunner( + pullUpdater, + projectCommandBuilder, + projectCommandRunner, + ) + commentCommandRunnerByCmd := map[command.Name]events.CommentCommandRunner{ command.Plan: planCommandRunner, command.Apply: applyCommandRunner, command.ApprovePolicies: approvePoliciesCommandRunner, command.Unlock: unlockCommandRunner, command.Version: versionCommandRunner, + command.Import: importCommandRunner, } commandRunner := &events.DefaultCommandRunner{ diff --git a/server/controllers/events/testfixtures/test-repos/import-multiple-project/atlantis.yaml b/server/controllers/events/testfixtures/test-repos/import-multiple-project/atlantis.yaml new file mode 100644 index 0000000000..006db31ba5 --- /dev/null +++ b/server/controllers/events/testfixtures/test-repos/import-multiple-project/atlantis.yaml @@ -0,0 +1,4 @@ +version: 3 +projects: +- dir: dir1 +- dir: dir2 diff --git a/server/controllers/events/testfixtures/test-repos/import-multiple-project/dir1/main.tf b/server/controllers/events/testfixtures/test-repos/import-multiple-project/dir1/main.tf new file mode 100644 index 0000000000..2aa6a6437d --- /dev/null +++ b/server/controllers/events/testfixtures/test-repos/import-multiple-project/dir1/main.tf @@ -0,0 +1,4 @@ +resource "random_id" "dummy1" { + keepers = {} + byte_length = 1 +} diff --git a/server/controllers/events/testfixtures/test-repos/import-multiple-project/dir2/main.tf b/server/controllers/events/testfixtures/test-repos/import-multiple-project/dir2/main.tf new file mode 100644 index 0000000000..5292f29c85 --- /dev/null +++ b/server/controllers/events/testfixtures/test-repos/import-multiple-project/dir2/main.tf @@ -0,0 +1,4 @@ +resource "random_id" "dummy2" { + keepers = {} + byte_length = 1 +} diff --git a/server/controllers/events/testfixtures/test-repos/import-multiple-project/exp-output-autoplan.txt b/server/controllers/events/testfixtures/test-repos/import-multiple-project/exp-output-autoplan.txt new file mode 100644 index 0000000000..7c14657512 --- /dev/null +++ b/server/controllers/events/testfixtures/test-repos/import-multiple-project/exp-output-autoplan.txt @@ -0,0 +1,81 @@ +Ran Plan for 2 projects: + +1. dir: `dir1` workspace: `default` +1. dir: `dir2` workspace: `default` + +### 1. dir: `dir1` workspace: `default` +
Show Output + +```diff + +Terraform used the selected providers to generate the following execution +plan. Resource actions are indicated with the following symbols: ++ create + +Terraform will perform the following actions: + + # random_id.dummy1 will be created ++ resource "random_id" "dummy1" { + + b64_std = (known after apply) + + b64_url = (known after apply) + + byte_length = 1 + + dec = (known after apply) + + hex = (known after apply) + + id = (known after apply) + + keepers = {} + } + +Plan: 1 to add, 0 to change, 0 to destroy. + + +``` + +* :arrow_forward: To **apply** this plan, comment: + * `atlantis apply -d dir1` +* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) +* :repeat: To **plan** this project again, comment: + * `atlantis plan -d dir1` +
+Plan: 1 to add, 0 to change, 0 to destroy. + +--- +### 2. dir: `dir2` workspace: `default` +
Show Output + +```diff + +Terraform used the selected providers to generate the following execution +plan. Resource actions are indicated with the following symbols: ++ create + +Terraform will perform the following actions: + + # random_id.dummy2 will be created ++ resource "random_id" "dummy2" { + + b64_std = (known after apply) + + b64_url = (known after apply) + + byte_length = 1 + + dec = (known after apply) + + hex = (known after apply) + + id = (known after apply) + + keepers = {} + } + +Plan: 1 to add, 0 to change, 0 to destroy. + + +``` + +* :arrow_forward: To **apply** this plan, comment: + * `atlantis apply -d dir2` +* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) +* :repeat: To **plan** this project again, comment: + * `atlantis plan -d dir2` +
+Plan: 1 to add, 0 to change, 0 to destroy. + +--- +* :fast_forward: To **apply** all unapplied plans from this pull request, comment: + * `atlantis apply` +* :put_litter_in_its_place: To delete all plans and locks for the PR, comment: + * `atlantis unlock` diff --git a/server/controllers/events/testfixtures/test-repos/import-multiple-project/exp-output-import-dummy1.txt b/server/controllers/events/testfixtures/test-repos/import-multiple-project/exp-output-import-dummy1.txt new file mode 100644 index 0000000000..131d54c80e --- /dev/null +++ b/server/controllers/events/testfixtures/test-repos/import-multiple-project/exp-output-import-dummy1.txt @@ -0,0 +1,20 @@ +Ran Import for dir: `dir1` workspace: `default` + +```diff +random_id.dummy1: Importing from ID "AA"... +random_id.dummy1: Import prepared! + Prepared random_id for import +random_id.dummy1: Refreshing state... [id=AA] + +Import successful! + +The resources that were imported are shown above. These resources are now in +your Terraform state and will henceforth be managed by Terraform. + + +``` + +* :repeat: To **plan** this project again, comment: + * `atlantis plan -d dir1` + + diff --git a/server/controllers/events/testfixtures/test-repos/import-multiple-project/exp-output-import-multiple-projects.txt b/server/controllers/events/testfixtures/test-repos/import-multiple-project/exp-output-import-multiple-projects.txt new file mode 100644 index 0000000000..023af68ffe --- /dev/null +++ b/server/controllers/events/testfixtures/test-repos/import-multiple-project/exp-output-import-multiple-projects.txt @@ -0,0 +1 @@ +**Import Failed**: import cannot run on multiple projects. please specify one project. diff --git a/server/controllers/events/testfixtures/test-repos/import-multiple-project/exp-output-merge.txt b/server/controllers/events/testfixtures/test-repos/import-multiple-project/exp-output-merge.txt new file mode 100644 index 0000000000..9228a63148 --- /dev/null +++ b/server/controllers/events/testfixtures/test-repos/import-multiple-project/exp-output-merge.txt @@ -0,0 +1,4 @@ +Locks and plans deleted for the projects and workspaces modified in this pull request: + +- dir: `dir1` workspace: `default` +- dir: `dir2` workspace: `default` diff --git a/server/controllers/events/testfixtures/test-repos/import-multiple-project/exp-output-plan-again.txt b/server/controllers/events/testfixtures/test-repos/import-multiple-project/exp-output-plan-again.txt new file mode 100644 index 0000000000..0d52456e5a --- /dev/null +++ b/server/controllers/events/testfixtures/test-repos/import-multiple-project/exp-output-plan-again.txt @@ -0,0 +1,63 @@ +Ran Plan for 2 projects: + +1. dir: `dir1` workspace: `default` +1. dir: `dir2` workspace: `default` + +### 1. dir: `dir1` workspace: `default` +```diff +random_id.dummy1: Refreshing state... [id=AA] + +No changes. Your infrastructure matches the configuration. + +Terraform has compared your real infrastructure against your configuration +and found no differences, so no changes are needed. + +``` + +* :arrow_forward: To **apply** this plan, comment: + * `atlantis apply -d dir1` +* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) +* :repeat: To **plan** this project again, comment: + * `atlantis plan -d dir1` + +--- +### 2. dir: `dir2` workspace: `default` +
Show Output + +```diff + +Terraform used the selected providers to generate the following execution +plan. Resource actions are indicated with the following symbols: ++ create + +Terraform will perform the following actions: + + # random_id.dummy2 will be created ++ resource "random_id" "dummy2" { + + b64_std = (known after apply) + + b64_url = (known after apply) + + byte_length = 1 + + dec = (known after apply) + + hex = (known after apply) + + id = (known after apply) + + keepers = {} + } + +Plan: 1 to add, 0 to change, 0 to destroy. + + +``` + +* :arrow_forward: To **apply** this plan, comment: + * `atlantis apply -d dir2` +* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) +* :repeat: To **plan** this project again, comment: + * `atlantis plan -d dir2` +
+Plan: 1 to add, 0 to change, 0 to destroy. + +--- +* :fast_forward: To **apply** all unapplied plans from this pull request, comment: + * `atlantis apply` +* :put_litter_in_its_place: To delete all plans and locks for the PR, comment: + * `atlantis unlock` diff --git a/server/controllers/events/testfixtures/test-repos/import-single-project/exp-output-apply-no-projects.txt b/server/controllers/events/testfixtures/test-repos/import-single-project/exp-output-apply-no-projects.txt new file mode 100644 index 0000000000..70da860967 --- /dev/null +++ b/server/controllers/events/testfixtures/test-repos/import-single-project/exp-output-apply-no-projects.txt @@ -0,0 +1,4 @@ +Ran Apply for 0 projects: + + + diff --git a/server/controllers/events/testfixtures/test-repos/import-single-project/exp-output-autoplan.txt b/server/controllers/events/testfixtures/test-repos/import-single-project/exp-output-autoplan.txt new file mode 100644 index 0000000000..aa7b149fa7 --- /dev/null +++ b/server/controllers/events/testfixtures/test-repos/import-single-project/exp-output-autoplan.txt @@ -0,0 +1,52 @@ +Ran Plan for dir: `.` workspace: `default` + +
Show Output + +```diff + +Terraform used the selected providers to generate the following execution +plan. Resource actions are indicated with the following symbols: ++ create + +Terraform will perform the following actions: + + # random_id.dummy1 will be created ++ resource "random_id" "dummy1" { + + b64_std = (known after apply) + + b64_url = (known after apply) + + byte_length = 1 + + dec = (known after apply) + + hex = (known after apply) + + id = (known after apply) + + keepers = {} + } + + # random_id.dummy2 will be created ++ resource "random_id" "dummy2" { + + b64_std = (known after apply) + + b64_url = (known after apply) + + byte_length = 1 + + dec = (known after apply) + + hex = (known after apply) + + id = (known after apply) + + keepers = {} + } + +Plan: 2 to add, 0 to change, 0 to destroy. + + +``` + +* :arrow_forward: To **apply** this plan, comment: + * `atlantis apply -d .` +* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) +* :repeat: To **plan** this project again, comment: + * `atlantis plan -d .` +
+Plan: 2 to add, 0 to change, 0 to destroy. + +--- +* :fast_forward: To **apply** all unapplied plans from this pull request, comment: + * `atlantis apply` +* :put_litter_in_its_place: To delete all plans and locks for the PR, comment: + * `atlantis unlock` diff --git a/server/controllers/events/testfixtures/test-repos/import-single-project/exp-output-import-dummy1.txt b/server/controllers/events/testfixtures/test-repos/import-single-project/exp-output-import-dummy1.txt new file mode 100644 index 0000000000..4ea238b8df --- /dev/null +++ b/server/controllers/events/testfixtures/test-repos/import-single-project/exp-output-import-dummy1.txt @@ -0,0 +1,20 @@ +Ran Import for dir: `.` workspace: `default` + +```diff +random_id.dummy1: Importing from ID "AA"... +random_id.dummy1: Import prepared! + Prepared random_id for import +random_id.dummy1: Refreshing state... [id=AA] + +Import successful! + +The resources that were imported are shown above. These resources are now in +your Terraform state and will henceforth be managed by Terraform. + + +``` + +* :repeat: To **plan** this project again, comment: + * `atlantis plan -d .` + + diff --git a/server/controllers/events/testfixtures/test-repos/import-single-project/exp-output-import-dummy2.txt b/server/controllers/events/testfixtures/test-repos/import-single-project/exp-output-import-dummy2.txt new file mode 100644 index 0000000000..c1120ec254 --- /dev/null +++ b/server/controllers/events/testfixtures/test-repos/import-single-project/exp-output-import-dummy2.txt @@ -0,0 +1,20 @@ +Ran Import for dir: `.` workspace: `default` + +```diff +random_id.dummy2: Importing from ID "BB"... +random_id.dummy2: Import prepared! + Prepared random_id for import +random_id.dummy2: Refreshing state... [id=BB] + +Import successful! + +The resources that were imported are shown above. These resources are now in +your Terraform state and will henceforth be managed by Terraform. + + +``` + +* :repeat: To **plan** this project again, comment: + * `atlantis plan -d .` + + diff --git a/server/controllers/events/testfixtures/test-repos/import-single-project/exp-output-merge.txt b/server/controllers/events/testfixtures/test-repos/import-single-project/exp-output-merge.txt new file mode 100644 index 0000000000..872c5ee40c --- /dev/null +++ b/server/controllers/events/testfixtures/test-repos/import-single-project/exp-output-merge.txt @@ -0,0 +1,3 @@ +Locks and plans deleted for the projects and workspaces modified in this pull request: + +- dir: `.` workspace: `default` diff --git a/server/controllers/events/testfixtures/test-repos/import-single-project/exp-output-plan-again.txt b/server/controllers/events/testfixtures/test-repos/import-single-project/exp-output-plan-again.txt new file mode 100644 index 0000000000..035ebff06a --- /dev/null +++ b/server/controllers/events/testfixtures/test-repos/import-single-project/exp-output-plan-again.txt @@ -0,0 +1,22 @@ +Ran Plan for dir: `.` workspace: `default` + +```diff + +No changes. Your infrastructure matches the configuration. + +Terraform has compared your real infrastructure against your configuration +and found no differences, so no changes are needed. + +``` + +* :arrow_forward: To **apply** this plan, comment: + * `atlantis apply -d .` +* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) +* :repeat: To **plan** this project again, comment: + * `atlantis plan -d .` + +--- +* :fast_forward: To **apply** all unapplied plans from this pull request, comment: + * `atlantis apply` +* :put_litter_in_its_place: To delete all plans and locks for the PR, comment: + * `atlantis unlock` diff --git a/server/controllers/events/testfixtures/test-repos/import-single-project/main.tf b/server/controllers/events/testfixtures/test-repos/import-single-project/main.tf new file mode 100644 index 0000000000..2e60a118f5 --- /dev/null +++ b/server/controllers/events/testfixtures/test-repos/import-single-project/main.tf @@ -0,0 +1,9 @@ +resource "random_id" "dummy1" { + keepers = {} + byte_length = 1 +} + +resource "random_id" "dummy2" { + keepers = {} + byte_length = 1 +} diff --git a/server/core/config/parser_validator_test.go b/server/core/config/parser_validator_test.go index 08e07987fc..a9b9ad2781 100644 --- a/server/core/config/parser_validator_test.go +++ b/server/core/config/parser_validator_test.go @@ -174,6 +174,7 @@ workflows: }, }, }, + Import: valid.DefaultImportStage, }, }, }, @@ -340,12 +341,7 @@ workflows: }, }, Workflows: map[string]valid.Workflow{ - "default": { - Name: "default", - Plan: valid.DefaultPlanStage, - Apply: valid.DefaultApplyStage, - PolicyCheck: valid.DefaultPolicyCheckStage, - }, + "default": defaultWorkflow("default"), }, }, }, @@ -377,12 +373,7 @@ workflows: }, }, Workflows: map[string]valid.Workflow{ - "myworkflow": { - Name: "myworkflow", - Apply: valid.DefaultApplyStage, - Plan: valid.DefaultPlanStage, - PolicyCheck: valid.DefaultPolicyCheckStage, - }, + "myworkflow": defaultWorkflow("myworkflow"), }, }, }, @@ -416,12 +407,7 @@ workflows: }, }, Workflows: map[string]valid.Workflow{ - "myworkflow": { - Name: "myworkflow", - Apply: valid.DefaultApplyStage, - Plan: valid.DefaultPlanStage, - PolicyCheck: valid.DefaultPolicyCheckStage, - }, + "myworkflow": defaultWorkflow("myworkflow"), }, }, }, @@ -455,12 +441,7 @@ workflows: }, }, Workflows: map[string]valid.Workflow{ - "myworkflow": { - Name: "myworkflow", - Apply: valid.DefaultApplyStage, - Plan: valid.DefaultPlanStage, - PolicyCheck: valid.DefaultPolicyCheckStage, - }, + "myworkflow": defaultWorkflow("myworkflow"), }, }, }, @@ -494,12 +475,7 @@ workflows: }, }, Workflows: map[string]valid.Workflow{ - "myworkflow": { - Name: "myworkflow", - Apply: valid.DefaultApplyStage, - Plan: valid.DefaultPlanStage, - PolicyCheck: valid.DefaultPolicyCheckStage, - }, + "myworkflow": defaultWorkflow("myworkflow"), }, }, }, @@ -533,12 +509,7 @@ workflows: }, }, Workflows: map[string]valid.Workflow{ - "myworkflow": { - Name: "myworkflow", - Apply: valid.DefaultApplyStage, - Plan: valid.DefaultPlanStage, - PolicyCheck: valid.DefaultPolicyCheckStage, - }, + "myworkflow": defaultWorkflow("myworkflow"), }, }, }, @@ -572,12 +543,7 @@ workflows: }, }, Workflows: map[string]valid.Workflow{ - "myworkflow": { - Name: "myworkflow", - Apply: valid.DefaultApplyStage, - Plan: valid.DefaultPlanStage, - PolicyCheck: valid.DefaultPolicyCheckStage, - }, + "myworkflow": defaultWorkflow("myworkflow"), }, }, }, @@ -611,12 +577,7 @@ workflows: }, }, Workflows: map[string]valid.Workflow{ - "myworkflow": { - Name: "myworkflow", - Apply: valid.DefaultApplyStage, - Plan: valid.DefaultPlanStage, - PolicyCheck: valid.DefaultPolicyCheckStage, - }, + "myworkflow": defaultWorkflow("myworkflow"), }, }, }, @@ -650,12 +611,7 @@ workflows: }, }, Workflows: map[string]valid.Workflow{ - "myworkflow": { - Name: "myworkflow", - Apply: valid.DefaultApplyStage, - Plan: valid.DefaultPlanStage, - PolicyCheck: valid.DefaultPolicyCheckStage, - }, + "myworkflow": defaultWorkflow("myworkflow"), }, }, }, @@ -795,6 +751,9 @@ workflows: steps: - plan # NOTE: we don't validate if they make sense - apply + import: + steps: + - import `, exp: valid.RepoCfg{ Version: 3, @@ -841,6 +800,13 @@ workflows: }, }, }, + Import: valid.Stage{ + Steps: []valid.Step{ + { + StepName: "import", + }, + }, + }, }, }, }, @@ -872,6 +838,10 @@ workflows: extra_args: [a, b] - apply: extra_args: ["a", "b"] + import: + steps: + - import: + extra_args: ["a", "b"] `, exp: valid.RepoCfg{ Version: 3, @@ -920,6 +890,14 @@ workflows: }, }, }, + Import: valid.Stage{ + Steps: []valid.Step{ + { + StepName: "import", + ExtraArgs: []string{"a", "b"}, + }, + }, + }, }, }, }, @@ -941,6 +919,9 @@ workflows: apply: steps: - run: echo apply "arg 2" + import: + steps: + - run: echo apply "arg 3" `, exp: valid.RepoCfg{ Version: 3, @@ -981,6 +962,14 @@ workflows: }, }, }, + Import: valid.Stage{ + Steps: []valid.Step{ + { + StepName: "run", + RunCommand: "echo apply \"arg 3\"", + }, + }, + }, }, }, }, @@ -1008,6 +997,11 @@ workflows: - env: name: env_name command: command and args + import: + steps: + - env: + name: env_name + value: env_value `, exp: valid.RepoCfg{ Version: 3, @@ -1051,6 +1045,15 @@ workflows: }, }, }, + Import: valid.Stage{ + Steps: []valid.Step{ + { + StepName: "env", + EnvVarName: "env_name", + EnvVarValue: "env_value", + }, + }, + }, }, }, }, @@ -1179,6 +1182,17 @@ func TestParseGlobalCfg(t *testing.T) { }, }, }, + Import: valid.Stage{ + Steps: []valid.Step{ + { + StepName: "run", + RunCommand: "custom command", + }, + { + StepName: "import", + }, + }, + }, } conftestVersion, _ := version.NewVersion("v1.0.0") @@ -1222,7 +1236,7 @@ func TestParseGlobalCfg(t *testing.T) { input: `repos: - id: /.*/ allowed_overrides: [invalid]`, - expErr: "repos: (0: (allowed_overrides: \"invalid\" is not a valid override, only \"apply_requirements\", \"workflow\", \"delete_source_branch_on_merge\" and \"repo_locking\" are supported.).).", + expErr: "repos: (0: (allowed_overrides: \"invalid\" is not a valid override, only \"apply_requirements\", \"import_requirements\", \"workflow\", \"delete_source_branch_on_merge\" and \"repo_locking\" are supported.).).", }, "invalid apply_requirement": { input: `repos: @@ -1230,6 +1244,12 @@ func TestParseGlobalCfg(t *testing.T) { apply_requirements: [invalid]`, expErr: "repos: (0: (apply_requirements: \"invalid\" is not a valid apply_requirement, only \"approved\", \"mergeable\" and \"undiverged\" are supported.).).", }, + "invalid import_requirement": { + input: `repos: +- id: /.*/ + import_requirements: [invalid]`, + expErr: "repos: (0: (import_requirements: \"invalid\" is not a valid import_requirement, only \"approved\", \"mergeable\" and \"undiverged\" are supported.).).", + }, "no workflows key": { input: `repos: []`, exp: defaultCfg, @@ -1246,12 +1266,7 @@ workflows: Repos: defaultCfg.Repos, Workflows: map[string]valid.Workflow{ "default": defaultCfg.Workflows["default"], - "name": { - Name: "name", - Apply: valid.DefaultApplyStage, - Plan: valid.DefaultPlanStage, - PolicyCheck: valid.DefaultPolicyCheckStage, - }, + "name": defaultWorkflow("name"), }, }, }, @@ -1261,17 +1276,14 @@ workflows: name: apply: plan: + policy_check: + import: `, exp: valid.GlobalCfg{ Repos: defaultCfg.Repos, Workflows: map[string]valid.Workflow{ "default": defaultCfg.Workflows["default"], - "name": { - Name: "name", - Apply: valid.DefaultApplyStage, - Plan: valid.DefaultPlanStage, - PolicyCheck: valid.DefaultPolicyCheckStage, - }, + "name": defaultWorkflow("name"), }, }, }, @@ -1282,17 +1294,17 @@ workflows: apply: steps: plan: - steps:`, + steps: + policy_check: + steps: + import: + steps: +`, exp: valid.GlobalCfg{ Repos: defaultCfg.Repos, Workflows: map[string]valid.Workflow{ "default": defaultCfg.Workflows["default"], - "name": { - Name: "name", - Plan: valid.DefaultPlanStage, - PolicyCheck: valid.DefaultPolicyCheckStage, - Apply: valid.DefaultApplyStage, - }, + "name": defaultWorkflow("name"), }, }, }, @@ -1307,7 +1319,7 @@ repos: workflow: custom1 post_workflow_hooks: - run: custom workflow command - allowed_overrides: [apply_requirements, workflow, delete_source_branch_on_merge] + allowed_overrides: [apply_requirements, import_requirements, workflow, delete_source_branch_on_merge] allow_custom_workflows: true - id: /.*/ branch: /(master|main)/ @@ -1333,6 +1345,10 @@ workflows: steps: - run: custom command - apply + import: + steps: + - run: custom command + - import policies: conftest_version: v1.0.0 policy_sets: @@ -1349,7 +1365,7 @@ policies: PreWorkflowHooks: preWorkflowHooks, Workflow: &customWorkflow1, PostWorkflowHooks: postWorkflowHooks, - AllowedOverrides: []string{"apply_requirements", "workflow", "delete_source_branch_on_merge"}, + AllowedOverrides: []string{"apply_requirements", "import_requirements", "workflow", "delete_source_branch_on_merge"}, AllowCustomWorkflows: Bool(true), }, { @@ -1421,14 +1437,17 @@ workflows: policy_check: steps: [] apply: - steps: [] + steps: [] + import: + steps: [] `, exp: valid.GlobalCfg{ Repos: []valid.Repo{ { - IDRegex: regexp.MustCompile(".*"), - BranchRegex: regexp.MustCompile(".*"), - ApplyRequirements: []string{}, + IDRegex: regexp.MustCompile(".*"), + BranchRegex: regexp.MustCompile(".*"), + ApplyRequirements: []string{}, + ImportRequirements: []string{}, Workflow: &valid.Workflow{ Name: "default", Apply: valid.Stage{ @@ -1445,6 +1464,9 @@ workflows: }, }, }, + Import: valid.Stage{ + Steps: nil, + }, }, AllowedWorkflows: []string{}, AllowedOverrides: []string{}, @@ -1555,6 +1577,14 @@ func TestParserValidator_ParseGlobalCfgJSON(t *testing.T) { }, }, }, + Import: valid.Stage{ + Steps: []valid.Step{ + { + StepName: "run", + RunCommand: "custom import", + }, + }, + }, } conftestVersion, _ := version.NewVersion("v1.0.0") @@ -1612,6 +1642,11 @@ func TestParserValidator_ParseGlobalCfgJSON(t *testing.T) { "steps": [ {"run": "my custom command"} ] + }, + "import": { + "steps": [ + {"run": "custom import"} + ] } } }, @@ -1776,3 +1811,13 @@ func String(v string) *string { return &v } // Bool is a helper routine that allocates a new bool value // to store v and returns a pointer to it. func Bool(v bool) *bool { return &v } + +func defaultWorkflow(name string) valid.Workflow { + return valid.Workflow{ + Name: name, + Apply: valid.DefaultApplyStage, + Plan: valid.DefaultPlanStage, + PolicyCheck: valid.DefaultPolicyCheckStage, + Import: valid.DefaultImportStage, + } +} diff --git a/server/core/config/raw/global_cfg.go b/server/core/config/raw/global_cfg.go index b998342133..f9d28aef25 100644 --- a/server/core/config/raw/global_cfg.go +++ b/server/core/config/raw/global_cfg.go @@ -23,6 +23,7 @@ type Repo struct { ID string `yaml:"id" json:"id"` Branch string `yaml:"branch" json:"branch"` ApplyRequirements []string `yaml:"apply_requirements" json:"apply_requirements"` + ImportRequirements []string `yaml:"import_requirements" json:"import_requirements"` PreWorkflowHooks []WorkflowHook `yaml:"pre_workflow_hooks" json:"pre_workflow_hooks"` Workflow *string `yaml:"workflow,omitempty" json:"workflow,omitempty"` PostWorkflowHooks []WorkflowHook `yaml:"post_workflow_hooks" json:"post_workflow_hooks"` @@ -95,9 +96,7 @@ func (g GlobalCfg) ToValid(defaultCfg valid.GlobalCfg) valid.GlobalCfg { // assumes: globalcfg is always initialized with one repo .* applyReqs := defaultCfg.Repos[0].ApplyRequirements - var globalApplyReqs []string - for _, req := range applyReqs { for _, nonOverrideableReq := range valid.NonOverrideableApplyReqs { if req == nonOverrideableReq { @@ -105,6 +104,7 @@ func (g GlobalCfg) ToValid(defaultCfg valid.GlobalCfg) valid.GlobalCfg { } } } + globalImportReqs := defaultCfg.Repos[0].ImportRequirements for k, v := range g.Workflows { validatedWorkflow := v.ToValid(k) @@ -125,7 +125,7 @@ func (g GlobalCfg) ToValid(defaultCfg valid.GlobalCfg) valid.GlobalCfg { var repos []valid.Repo for _, r := range g.Repos { - repos = append(repos, r.ToValid(workflows, globalApplyReqs)) + repos = append(repos, r.ToValid(workflows, globalApplyReqs, globalImportReqs)) } repos = append(defaultCfg.Repos, repos...) @@ -174,8 +174,8 @@ func (r Repo) Validate() error { overridesValid := func(value interface{}) error { overrides := value.([]string) for _, o := range overrides { - if o != valid.ApplyRequirementsKey && o != valid.WorkflowKey && o != valid.DeleteSourceBranchOnMergeKey && o != valid.RepoLockingKey { - return fmt.Errorf("%q is not a valid override, only %q, %q, %q and %q are supported", o, valid.ApplyRequirementsKey, valid.WorkflowKey, valid.DeleteSourceBranchOnMergeKey, valid.RepoLockingKey) + if o != valid.ApplyRequirementsKey && o != valid.ImportRequirementsKey && o != valid.WorkflowKey && o != valid.DeleteSourceBranchOnMergeKey && o != valid.RepoLockingKey { + return fmt.Errorf("%q is not a valid override, only %q, %q, %q, %q and %q are supported", o, valid.ApplyRequirementsKey, valid.ImportRequirementsKey, valid.WorkflowKey, valid.DeleteSourceBranchOnMergeKey, valid.RepoLockingKey) } } return nil @@ -197,12 +197,13 @@ func (r Repo) Validate() error { validation.Field(&r.Branch, validation.By(branchValid)), validation.Field(&r.AllowedOverrides, validation.By(overridesValid)), validation.Field(&r.ApplyRequirements, validation.By(validApplyReq)), + validation.Field(&r.ImportRequirements, validation.By(validImportReq)), validation.Field(&r.Workflow, validation.By(workflowExists)), validation.Field(&r.DeleteSourceBranchOnMerge, validation.By(deleteSourceBranchOnMergeValid)), ) } -func (r Repo) ToValid(workflows map[string]valid.Workflow, globalApplyReqs []string) valid.Repo { +func (r Repo) ToValid(workflows map[string]valid.Workflow, globalApplyReqs []string, globalImportReqs []string) valid.Repo { var id string var idRegex *regexp.Regexp if r.HasRegexID() { @@ -243,25 +244,36 @@ func (r Repo) ToValid(workflows map[string]valid.Workflow, globalApplyReqs []str } var mergedApplyReqs []string - mergedApplyReqs = append(mergedApplyReqs, r.ApplyRequirements...) + var mergedImportReqs []string + mergedImportReqs = append(mergedImportReqs, r.ImportRequirements...) // only add global reqs if they don't exist already. -OUTER: +OuterGlobalApplyReqs: for _, globalReq := range globalApplyReqs { for _, currReq := range r.ApplyRequirements { if globalReq == currReq { - continue OUTER + continue OuterGlobalApplyReqs } } mergedApplyReqs = append(mergedApplyReqs, globalReq) } +OuterGlobalImportReqs: + for _, globalReq := range globalImportReqs { + for _, currReq := range r.ImportRequirements { + if globalReq == currReq { + continue OuterGlobalImportReqs + } + } + mergedImportReqs = append(mergedImportReqs, globalReq) + } return valid.Repo{ ID: id, IDRegex: idRegex, BranchRegex: branchRegex, ApplyRequirements: mergedApplyReqs, + ImportRequirements: mergedImportReqs, PreWorkflowHooks: preWorkflowHooks, Workflow: workflow, PostWorkflowHooks: postWorkflowHooks, diff --git a/server/core/config/raw/project.go b/server/core/config/raw/project.go index 081eb87903..add1db320e 100644 --- a/server/core/config/raw/project.go +++ b/server/core/config/raw/project.go @@ -14,10 +14,10 @@ import ( ) const ( - DefaultWorkspace = "default" - ApprovedApplyRequirement = "approved" - MergeableApplyRequirement = "mergeable" - UnDivergedApplyRequirement = "undiverged" + DefaultWorkspace = "default" + ApprovedRequirement = "approved" + MergeableRequirement = "mergeable" + UnDivergedRequirement = "undiverged" ) type Project struct { @@ -29,6 +29,7 @@ type Project struct { TerraformVersion *string `yaml:"terraform_version,omitempty"` Autoplan *Autoplan `yaml:"autoplan,omitempty"` ApplyRequirements []string `yaml:"apply_requirements,omitempty"` + ImportRequirements []string `yaml:"import_requirements,omitempty"` DeleteSourceBranchOnMerge *bool `yaml:"delete_source_branch_on_merge,omitempty"` RepoLocking *bool `yaml:"repo_locking,omitempty"` ExecutionOrderGroup *int `yaml:"execution_order_group,omitempty"` @@ -73,6 +74,7 @@ func (p Project) Validate() error { return validation.ValidateStruct(&p, validation.Field(&p.Dir, validation.Required, validation.By(hasDotDot)), validation.Field(&p.ApplyRequirements, validation.By(validApplyReq)), + validation.Field(&p.ImportRequirements, validation.By(validImportReq)), validation.Field(&p.TerraformVersion, validation.By(VersionValidator)), validation.Field(&p.Name, validation.By(validName)), validation.Field(&p.Branch, validation.By(branchValid)), @@ -110,8 +112,9 @@ func (p Project) ToValid() valid.Project { v.Autoplan = p.Autoplan.ToValid() } - // There are no default apply requirements. + // There are no default apply/import requirements. v.ApplyRequirements = p.ApplyRequirements + v.ImportRequirements = p.ImportRequirements v.Name = p.Name @@ -142,8 +145,18 @@ func validProjectName(name string) bool { func validApplyReq(value interface{}) error { reqs := value.([]string) for _, r := range reqs { - if r != ApprovedApplyRequirement && r != MergeableApplyRequirement && r != UnDivergedApplyRequirement { - return fmt.Errorf("%q is not a valid apply_requirement, only %q, %q and %q are supported", r, ApprovedApplyRequirement, MergeableApplyRequirement, UnDivergedApplyRequirement) + if r != ApprovedRequirement && r != MergeableRequirement && r != UnDivergedRequirement { + return fmt.Errorf("%q is not a valid apply_requirement, only %q, %q and %q are supported", r, ApprovedRequirement, MergeableRequirement, UnDivergedRequirement) + } + } + return nil +} + +func validImportReq(value interface{}) error { + reqs := value.([]string) + for _, r := range reqs { + if r != ApprovedRequirement && r != MergeableRequirement && r != UnDivergedRequirement { + return fmt.Errorf("%q is not a valid import_requirement, only %q, %q and %q are supported", r, ApprovedRequirement, MergeableRequirement, UnDivergedRequirement) } } return nil diff --git a/server/core/config/raw/project_test.go b/server/core/config/raw/project_test.go index 1698a35678..2fa360c882 100644 --- a/server/core/config/raw/project_test.go +++ b/server/core/config/raw/project_test.go @@ -21,14 +21,15 @@ func TestProject_UnmarshalYAML(t *testing.T) { description: "omit unset fields", input: "", exp: raw.Project{ - Dir: nil, - Workspace: nil, - Workflow: nil, - TerraformVersion: nil, - Autoplan: nil, - ApplyRequirements: nil, - Name: nil, - Branch: nil, + Dir: nil, + Workspace: nil, + Workflow: nil, + TerraformVersion: nil, + Autoplan: nil, + ApplyRequirements: nil, + ImportRequirements: nil, + Name: nil, + Branch: nil, }, }, { @@ -45,6 +46,8 @@ autoplan: enabled: false apply_requirements: - mergeable +import_requirements: +- mergeable execution_order_group: 10`, exp: raw.Project{ Name: String("myname"), @@ -58,6 +61,7 @@ execution_order_group: 10`, Enabled: Bool(false), }, ApplyRequirements: []string{"mergeable"}, + ImportRequirements: []string{"mergeable"}, ExecutionOrderGroup: Int(10), }, }, @@ -180,6 +184,22 @@ func TestProject_Validate(t *testing.T) { }, expErr: "", }, + { + description: "import reqs with unsupported", + input: raw.Project{ + Dir: String("."), + ImportRequirements: []string{"unsupported"}, + }, + expErr: "import_requirements: \"unsupported\" is not a valid import_requirement, only \"approved\", \"mergeable\" and \"undiverged\" are supported.", + }, + { + description: "import reqs with undiverged, mergeable and approved requirements", + input: raw.Project{ + Dir: String("."), + ImportRequirements: []string{"undiverged", "mergeable", "approved"}, + }, + expErr: "", + }, { description: "empty tf version string", input: raw.Project{ diff --git a/server/core/config/raw/repo_cfg_test.go b/server/core/config/raw/repo_cfg_test.go index d5493fcc25..6bde9473f0 100644 --- a/server/core/config/raw/repo_cfg_test.go +++ b/server/core/config/raw/repo_cfg_test.go @@ -307,6 +307,7 @@ func TestConfig_ToValid(t *testing.T) { Plan: &raw.Stage{}, Apply: nil, PolicyCheck: nil, + Import: nil, }, }, }, @@ -316,25 +317,11 @@ func TestConfig_ToValid(t *testing.T) { ParallelApply: false, Workflows: map[string]valid.Workflow{ "myworkflow": { - Name: "myworkflow", - Plan: valid.DefaultPlanStage, - PolicyCheck: valid.Stage{ - Steps: []valid.Step{ - { - StepName: "show", - }, - { - StepName: "policy_check", - }, - }, - }, - Apply: valid.Stage{ - Steps: []valid.Step{ - { - StepName: "apply", - }, - }, - }, + Name: "myworkflow", + Plan: valid.DefaultPlanStage, + PolicyCheck: valid.DefaultPolicyCheckStage, + Apply: valid.DefaultApplyStage, + Import: valid.DefaultImportStage, }, }, }, @@ -368,6 +355,13 @@ func TestConfig_ToValid(t *testing.T) { }, }, }, + Import: &raw.Stage{ + Steps: []raw.Step{ + { + Key: String("import"), + }, + }, + }, }, }, Projects: []raw.Project{ @@ -404,6 +398,13 @@ func TestConfig_ToValid(t *testing.T) { }, }, }, + Import: valid.Stage{ + Steps: []valid.Step{ + { + StepName: "import", + }, + }, + }, }, }, Projects: []valid.Project{ diff --git a/server/core/config/raw/step.go b/server/core/config/raw/step.go index a32a0e60c4..3d5e41f675 100644 --- a/server/core/config/raw/step.go +++ b/server/core/config/raw/step.go @@ -24,6 +24,7 @@ const ( InitStepName = "init" EnvStepName = "env" MultiEnvStepName = "multienv" + ImportStepName = "import" ) // Step represents a single action/command to perform. In YAML, it can be set as @@ -88,7 +89,8 @@ func (s Step) validStepName(stepName string) bool { stepName == EnvStepName || stepName == MultiEnvStepName || stepName == ShowStepName || - stepName == PolicyCheckStepName + stepName == PolicyCheckStepName || + stepName == ImportStepName } func (s Step) Validate() error { diff --git a/server/core/config/raw/step_test.go b/server/core/config/raw/step_test.go index b45cfd90d9..3321dce360 100644 --- a/server/core/config/raw/step_test.go +++ b/server/core/config/raw/step_test.go @@ -467,6 +467,15 @@ func TestStep_ToValid(t *testing.T) { EnvVarName: "test", }, }, + { + description: "import step", + input: raw.Step{ + Key: String("import"), + }, + exp: valid.Step{ + StepName: "import", + }, + }, { description: "init extra_args", input: raw.Step{ @@ -523,6 +532,20 @@ func TestStep_ToValid(t *testing.T) { ExtraArgs: []string{"arg1", "arg2"}, }, }, + { + description: "import extra_args", + input: raw.Step{ + Map: MapType{ + "import": { + "extra_args": []string{"arg1", "arg2"}, + }, + }, + }, + exp: valid.Step{ + StepName: "import", + ExtraArgs: []string{"arg1", "arg2"}, + }, + }, { description: "run step", input: raw.Step{ diff --git a/server/core/config/raw/workflow.go b/server/core/config/raw/workflow.go index 59050dce2b..d5164a8319 100644 --- a/server/core/config/raw/workflow.go +++ b/server/core/config/raw/workflow.go @@ -9,6 +9,7 @@ type Workflow struct { Apply *Stage `yaml:"apply,omitempty" json:"apply,omitempty"` Plan *Stage `yaml:"plan,omitempty" json:"plan,omitempty"` PolicyCheck *Stage `yaml:"policy_check,omitempty" json:"policy_check,omitempty"` + Import *Stage `yaml:"import,omitempty" json:"import,omitempty"` } func (w Workflow) Validate() error { @@ -16,6 +17,7 @@ func (w Workflow) Validate() error { validation.Field(&w.Apply), validation.Field(&w.Plan), validation.Field(&w.PolicyCheck), + validation.Field(&w.Import), ) } @@ -35,6 +37,7 @@ func (w Workflow) ToValid(name string) valid.Workflow { v.Apply = w.toValidStage(w.Apply, valid.DefaultApplyStage) v.Plan = w.toValidStage(w.Plan, valid.DefaultPlanStage) v.PolicyCheck = w.toValidStage(w.PolicyCheck, valid.DefaultPolicyCheckStage) + v.Import = w.toValidStage(w.Import, valid.DefaultImportStage) return v } diff --git a/server/core/config/raw/workflow_test.go b/server/core/config/raw/workflow_test.go index 8ff25e1a24..04c753a63b 100644 --- a/server/core/config/raw/workflow_test.go +++ b/server/core/config/raw/workflow_test.go @@ -148,6 +148,7 @@ func TestWorkflow_ToValid(t *testing.T) { Apply: valid.DefaultApplyStage, Plan: valid.DefaultPlanStage, PolicyCheck: valid.DefaultPolicyCheckStage, + Import: valid.DefaultImportStage, }, }, { @@ -174,6 +175,13 @@ func TestWorkflow_ToValid(t *testing.T) { }, }, }, + Import: &raw.Stage{ + Steps: []raw.Step{ + { + Key: String("import"), + }, + }, + }, }, exp: valid.Workflow{ Apply: valid.Stage{ @@ -197,6 +205,13 @@ func TestWorkflow_ToValid(t *testing.T) { }, }, }, + Import: valid.Stage{ + Steps: []valid.Step{ + { + StepName: "import", + }, + }, + }, }, }, } diff --git a/server/core/config/valid/global_cfg.go b/server/core/config/valid/global_cfg.go index ecf4ec5cf8..d0361a31fb 100644 --- a/server/core/config/valid/global_cfg.go +++ b/server/core/config/valid/global_cfg.go @@ -9,11 +9,12 @@ import ( "github.com/runatlantis/atlantis/server/logging" ) -const MergeableApplyReq = "mergeable" -const ApprovedApplyReq = "approved" -const UnDivergedApplyReq = "undiverged" -const PoliciesPassedApplyReq = "policies_passed" +const MergeableCommandReq = "mergeable" +const ApprovedCommandReq = "approved" +const UnDivergedCommandReq = "undiverged" +const PoliciesPassedCommandReq = "policies_passed" const ApplyRequirementsKey = "apply_requirements" +const ImportRequirementsKey = "import_requirements" const PreWorkflowHooksKey = "pre_workflow_hooks" const WorkflowKey = "workflow" const PostWorkflowHooksKey = "post_workflow_hooks" @@ -29,7 +30,7 @@ const RepoLockingKey = "repo_locking" // TODO: Make this more customizable, not everyone wants this rigid workflow // maybe something along the lines of defining overridable/non-overrideable apply // requirements in the config and removing the flag to enable policy checking. -var NonOverrideableApplyReqs = []string{PoliciesPassedApplyReq} +var NonOverrideableApplyReqs = []string{PoliciesPassedCommandReq} // GlobalCfg is the final parsed version of server-side repo config. type GlobalCfg struct { @@ -63,6 +64,7 @@ type Repo struct { IDRegex *regexp.Regexp BranchRegex *regexp.Regexp ApplyRequirements []string + ImportRequirements []string PreWorkflowHooks []*WorkflowHook Workflow *Workflow PostWorkflowHooks []*WorkflowHook @@ -75,6 +77,7 @@ type Repo struct { type MergedProjectCfg struct { ApplyRequirements []string + ImportRequirements []string Workflow Workflow AllowedWorkflows []string RepoRelDir string @@ -129,6 +132,18 @@ var DefaultPlanStage = Stage{ }, } +// DefaultImportStage is the Atlantis default import stage. +var DefaultImportStage = Stage{ + Steps: []Step{ + { + StepName: "init", + }, + { + StepName: "import", + }, + }, +} + // Deprecated: use NewGlobalCfgFromArgs func NewGlobalCfgWithHooks(allowRepoCfg bool, mergeableReq bool, approvedReq bool, unDivergedReq bool, preWorkflowHooks []*WorkflowHook, postWorkflowHooks []*WorkflowHook) GlobalCfg { return NewGlobalCfgFromArgs(GlobalCfgArgs{ @@ -172,31 +187,31 @@ func NewGlobalCfgFromArgs(args GlobalCfgArgs) GlobalCfg { Apply: DefaultApplyStage, Plan: DefaultPlanStage, PolicyCheck: DefaultPolicyCheckStage, + Import: DefaultImportStage, } // Must construct slices here instead of using a `var` declaration because // we treat nil slices differently. - applyReqs := []string{} + commandReqs := []string{} allowedOverrides := []string{} allowedWorkflows := []string{} if args.MergeableReq { - applyReqs = append(applyReqs, MergeableApplyReq) + commandReqs = append(commandReqs, MergeableCommandReq) } if args.ApprovedReq { - applyReqs = append(applyReqs, ApprovedApplyReq) + commandReqs = append(commandReqs, ApprovedCommandReq) } if args.UnDivergedReq { - applyReqs = append(applyReqs, UnDivergedApplyReq) + commandReqs = append(commandReqs, UnDivergedCommandReq) } - if args.PolicyCheckEnabled { - applyReqs = append(applyReqs, PoliciesPassedApplyReq) + commandReqs = append(commandReqs, PoliciesPassedCommandReq) } allowCustomWorkflows := false deleteSourceBranchOnMerge := false repoLockingKey := true if args.AllowRepoCfg { - allowedOverrides = []string{ApplyRequirementsKey, WorkflowKey, DeleteSourceBranchOnMergeKey, RepoLockingKey} + allowedOverrides = []string{ApplyRequirementsKey, ImportRequirementsKey, WorkflowKey, DeleteSourceBranchOnMergeKey, RepoLockingKey} allowCustomWorkflows = true } @@ -205,7 +220,8 @@ func NewGlobalCfgFromArgs(args GlobalCfgArgs) GlobalCfg { { IDRegex: regexp.MustCompile(".*"), BranchRegex: regexp.MustCompile(".*"), - ApplyRequirements: applyReqs, + ApplyRequirements: commandReqs, + ImportRequirements: commandReqs, PreWorkflowHooks: args.PreWorkflowHooks, Workflow: &defaultWorkflow, PostWorkflowHooks: args.PostWorkflowHooks, @@ -250,7 +266,7 @@ func (r Repo) IDString() string { // final config. It assumes that all configs have been validated. func (g GlobalCfg) MergeProjectCfg(log logging.SimpleLogging, repoID string, proj Project, rCfg RepoCfg) MergedProjectCfg { log.Debug("MergeProjectCfg started") - applyReqs, workflow, allowedOverrides, allowCustomWorkflows, deleteSourceBranchOnMerge, repoLocking := g.getMatchingCfg(log, repoID) + applyReqs, importReqs, workflow, allowedOverrides, allowCustomWorkflows, deleteSourceBranchOnMerge, repoLocking := g.getMatchingCfg(log, repoID) // If repos are allowed to override certain keys then override them. for _, key := range allowedOverrides { @@ -260,6 +276,11 @@ func (g GlobalCfg) MergeProjectCfg(log logging.SimpleLogging, repoID string, pro log.Debug("overriding server-defined %s with repo settings: [%s]", ApplyRequirementsKey, strings.Join(proj.ApplyRequirements, ",")) applyReqs = proj.ApplyRequirements } + case ImportRequirementsKey: + if proj.ImportRequirements != nil { + log.Debug("overriding server-defined %s with repo settings: [%s]", ImportRequirementsKey, strings.Join(proj.ImportRequirements, ",")) + importReqs = proj.ImportRequirements + } case WorkflowKey: if proj.WorkflowName != nil { // We iterate over the global workflows first and the repo @@ -305,11 +326,12 @@ func (g GlobalCfg) MergeProjectCfg(log logging.SimpleLogging, repoID string, pro log.Debug("MergeProjectCfg completed") } - log.Debug("final settings: %s: [%s], %s: %s", - ApplyRequirementsKey, strings.Join(applyReqs, ","), WorkflowKey, workflow.Name) + log.Debug("final settings: %s: [%s], %s: [%s], %s: %s", + ApplyRequirementsKey, strings.Join(applyReqs, ","), ImportRequirementsKey, strings.Join(importReqs, ","), WorkflowKey, workflow.Name) return MergedProjectCfg{ ApplyRequirements: applyReqs, + ImportRequirements: importReqs, Workflow: workflow, RepoRelDir: proj.Dir, Workspace: proj.Workspace, @@ -328,9 +350,10 @@ func (g GlobalCfg) MergeProjectCfg(log logging.SimpleLogging, repoID string, pro // repo with id repoID. It is used when there is no repo config. func (g GlobalCfg) DefaultProjCfg(log logging.SimpleLogging, repoID string, repoRelDir string, workspace string) MergedProjectCfg { log.Debug("building config based on server-side config") - applyReqs, workflow, _, _, deleteSourceBranchOnMerge, repoLocking := g.getMatchingCfg(log, repoID) + applyReqs, importReqs, workflow, _, _, deleteSourceBranchOnMerge, repoLocking := g.getMatchingCfg(log, repoID) return MergedProjectCfg{ ApplyRequirements: applyReqs, + ImportRequirements: importReqs, Workflow: workflow, RepoRelDir: repoRelDir, Workspace: workspace, @@ -380,6 +403,9 @@ func (g GlobalCfg) ValidateRepoCfg(rCfg RepoCfg, repoID string) error { if p.ApplyRequirements != nil && !sliceContainsF(allowedOverrides, ApplyRequirementsKey) { return fmt.Errorf("repo config not allowed to set '%s' key: server-side config needs '%s: [%s]'", ApplyRequirementsKey, AllowedOverridesKey, ApplyRequirementsKey) } + if p.ImportRequirements != nil && !sliceContainsF(allowedOverrides, ImportRequirementsKey) { + return fmt.Errorf("repo config not allowed to set '%s' key: server-side config needs '%s: [%s]'", ImportRequirementsKey, AllowedOverridesKey, ImportRequirementsKey) + } if p.DeleteSourceBranchOnMerge != nil && !sliceContainsF(allowedOverrides, DeleteSourceBranchOnMergeKey) { return fmt.Errorf("repo config not allowed to set '%s' key: server-side config needs '%s: [%s]'", DeleteSourceBranchOnMergeKey, AllowedOverridesKey, DeleteSourceBranchOnMergeKey) } @@ -444,7 +470,7 @@ func (g GlobalCfg) ValidateRepoCfg(rCfg RepoCfg, repoID string) error { } // getMatchingCfg returns the key settings for repoID. -func (g GlobalCfg) getMatchingCfg(log logging.SimpleLogging, repoID string) (applyReqs []string, workflow Workflow, allowedOverrides []string, allowCustomWorkflows bool, deleteSourceBranchOnMerge bool, repoLocking bool) { +func (g GlobalCfg) getMatchingCfg(log logging.SimpleLogging, repoID string) (applyReqs []string, importReqs []string, workflow Workflow, allowedOverrides []string, allowCustomWorkflows bool, deleteSourceBranchOnMerge bool, repoLocking bool) { toLog := make(map[string]string) traceF := func(repoIdx int, repoID string, key string, val interface{}) string { from := "default server config" @@ -466,7 +492,7 @@ func (g GlobalCfg) getMatchingCfg(log logging.SimpleLogging, repoID string) (app return fmt.Sprintf("setting %s: %s from %s", key, valStr, from) } - for _, key := range []string{ApplyRequirementsKey, WorkflowKey, AllowedOverridesKey, AllowCustomWorkflowsKey, DeleteSourceBranchOnMergeKey, RepoLockingKey} { + for _, key := range []string{ApplyRequirementsKey, ImportRequirementsKey, WorkflowKey, AllowedOverridesKey, AllowCustomWorkflowsKey, DeleteSourceBranchOnMergeKey, RepoLockingKey} { for i, repo := range g.Repos { if repo.IDMatches(repoID) { switch key { @@ -475,6 +501,11 @@ func (g GlobalCfg) getMatchingCfg(log logging.SimpleLogging, repoID string) (app toLog[ApplyRequirementsKey] = traceF(i, repo.IDString(), ApplyRequirementsKey, repo.ApplyRequirements) applyReqs = repo.ApplyRequirements } + case ImportRequirementsKey: + if repo.ImportRequirements != nil { + toLog[ImportRequirementsKey] = traceF(i, repo.IDString(), ImportRequirementsKey, repo.ImportRequirements) + importReqs = repo.ImportRequirements + } case WorkflowKey: if repo.Workflow != nil { toLog[WorkflowKey] = traceF(i, repo.IDString(), WorkflowKey, repo.Workflow.Name) diff --git a/server/core/config/valid/global_cfg_test.go b/server/core/config/valid/global_cfg_test.go index c161a03ecc..4879ee0e85 100644 --- a/server/core/config/valid/global_cfg_test.go +++ b/server/core/config/valid/global_cfg_test.go @@ -45,6 +45,16 @@ func TestNewGlobalCfg(t *testing.T) { }, }, }, + Import: valid.Stage{ + Steps: []valid.Step{ + { + StepName: "init", + }, + { + StepName: "import", + }, + }, + }, } baseCfg := valid.GlobalCfg{ Repos: []valid.Repo{ @@ -52,6 +62,7 @@ func TestNewGlobalCfg(t *testing.T) { IDRegex: regexp.MustCompile(".*"), BranchRegex: regexp.MustCompile(".*"), ApplyRequirements: []string{}, + ImportRequirements: []string{}, Workflow: &expDefaultWorkflow, AllowedWorkflows: []string{}, AllowedOverrides: []string{}, @@ -164,16 +175,19 @@ func TestNewGlobalCfg(t *testing.T) { if c.allowRepoCfg { exp.Repos[0].AllowCustomWorkflows = Bool(true) - exp.Repos[0].AllowedOverrides = []string{"apply_requirements", "workflow", "delete_source_branch_on_merge", "repo_locking"} + exp.Repos[0].AllowedOverrides = []string{"apply_requirements", "import_requirements", "workflow", "delete_source_branch_on_merge", "repo_locking"} } if c.mergeableReq { exp.Repos[0].ApplyRequirements = append(exp.Repos[0].ApplyRequirements, "mergeable") + exp.Repos[0].ImportRequirements = append(exp.Repos[0].ImportRequirements, "mergeable") } if c.approvedReq { exp.Repos[0].ApplyRequirements = append(exp.Repos[0].ApplyRequirements, "approved") + exp.Repos[0].ImportRequirements = append(exp.Repos[0].ImportRequirements, "approved") } if c.unDivergedReq { exp.Repos[0].ApplyRequirements = append(exp.Repos[0].ApplyRequirements, "undiverged") + exp.Repos[0].ImportRequirements = append(exp.Repos[0].ImportRequirements, "undiverged") } Equals(t, exp, act) @@ -533,6 +547,25 @@ func TestGlobalCfg_ValidateRepoCfg(t *testing.T) { repoID: "github.com/owner/repo", expErr: "repo config not allowed to set 'apply_requirements' key: server-side config needs 'allowed_overrides: [apply_requirements]'", }, + "import_reqs not allowed": { + gCfg: valid.NewGlobalCfgFromArgs(valid.GlobalCfgArgs{ + AllowRepoCfg: false, + MergeableReq: false, + ApprovedReq: false, + UnDivergedReq: false, + }), + rCfg: valid.RepoCfg{ + Projects: []valid.Project{ + { + Dir: ".", + Workspace: "default", + ImportRequirements: []string{""}, + }, + }, + }, + repoID: "github.com/owner/repo", + expErr: "repo config not allowed to set 'import_requirements' key: server-side config needs 'allowed_overrides: [import_requirements]'", + }, "repo workflow doesn't exist": { gCfg: valid.NewGlobalCfgFromArgs(valid.GlobalCfgArgs{ AllowRepoCfg: true, @@ -590,12 +623,14 @@ policies: WorkflowName: String("custom"), }, exp: valid.MergedProjectCfg{ - ApplyRequirements: []string{}, + ApplyRequirements: []string{}, + ImportRequirements: []string{}, Workflow: valid.Workflow{ Name: "default", Apply: valid.DefaultApplyStage, Plan: valid.DefaultPlanStage, PolicyCheck: valid.DefaultPolicyCheckStage, + Import: valid.DefaultImportStage, }, PolicySets: valid.PolicySets{ Version: nil, @@ -632,12 +667,14 @@ policies: WorkflowName: String("custom"), }, exp: valid.MergedProjectCfg{ - ApplyRequirements: []string{}, + ApplyRequirements: []string{}, + ImportRequirements: []string{}, Workflow: valid.Workflow{ Name: "default", Apply: valid.DefaultApplyStage, Plan: valid.DefaultPlanStage, PolicyCheck: valid.DefaultPolicyCheckStage, + Import: valid.DefaultImportStage, }, PolicySets: valid.PolicySets{ Version: version, @@ -693,6 +730,13 @@ policies: func TestGlobalCfg_MergeProjectCfg(t *testing.T) { var emptyPolicySets valid.PolicySets + defaultWorkflow := valid.Workflow{ + Name: "default", + Apply: valid.DefaultApplyStage, + PolicyCheck: valid.DefaultPolicyCheckStage, + Plan: valid.DefaultPlanStage, + Import: valid.DefaultImportStage, + } cases := map[string]struct { gCfg string repoID string @@ -717,7 +761,8 @@ workflows: }, repoWorkflows: nil, exp: valid.MergedProjectCfg{ - ApplyRequirements: []string{}, + ApplyRequirements: []string{}, + ImportRequirements: []string{}, Workflow: valid.Workflow{ Name: "custom", Apply: valid.DefaultApplyStage, @@ -729,6 +774,7 @@ workflows: }, }, }, + Import: valid.DefaultImportStage, }, RepoRelDir: ".", Workspace: "default", @@ -747,25 +793,49 @@ repos: `, repoID: "github.com/owner/repo", proj: valid.Project{ - Dir: ".", - Workspace: "default", - ApplyRequirements: []string{"mergeable"}, + Dir: ".", + Workspace: "default", + ApplyRequirements: []string{"mergeable"}, + ImportRequirements: []string{}, }, repoWorkflows: nil, exp: valid.MergedProjectCfg{ - ApplyRequirements: []string{"mergeable"}, - Workflow: valid.Workflow{ - Name: "default", - Apply: valid.DefaultApplyStage, - PolicyCheck: valid.DefaultPolicyCheckStage, - Plan: valid.DefaultPlanStage, - }, - RepoRelDir: ".", - Workspace: "default", - Name: "", - AutoplanEnabled: false, - PolicySets: emptyPolicySets, - RepoLocking: true, + ApplyRequirements: []string{"mergeable"}, + ImportRequirements: []string{}, + Workflow: defaultWorkflow, + RepoRelDir: ".", + Workspace: "default", + Name: "", + AutoplanEnabled: false, + PolicySets: emptyPolicySets, + RepoLocking: true, + }, + }, + "repo-side import reqs win out if allowed": { + gCfg: ` +repos: +- id: /.*/ + allowed_overrides: [import_requirements] + import_requirements: [approved] +`, + repoID: "github.com/owner/repo", + proj: valid.Project{ + Dir: ".", + Workspace: "default", + ApplyRequirements: []string{}, + ImportRequirements: []string{"mergeable"}, + }, + repoWorkflows: nil, + exp: valid.MergedProjectCfg{ + ApplyRequirements: []string{}, + ImportRequirements: []string{"mergeable"}, + Workflow: defaultWorkflow, + RepoRelDir: ".", + Workspace: "default", + Name: "", + AutoplanEnabled: false, + PolicySets: emptyPolicySets, + RepoLocking: true, }, }, "repo-side repo_locking win out if allowed": { @@ -776,26 +846,23 @@ repos: `, repoID: "github.com/owner/repo", proj: valid.Project{ - Dir: ".", - Workspace: "default", - ApplyRequirements: []string{}, - RepoLocking: Bool(true), + Dir: ".", + Workspace: "default", + ApplyRequirements: []string{}, + ImportRequirements: []string{}, + RepoLocking: Bool(true), }, repoWorkflows: nil, exp: valid.MergedProjectCfg{ - ApplyRequirements: []string{}, - Workflow: valid.Workflow{ - Name: "default", - Apply: valid.DefaultApplyStage, - PolicyCheck: valid.DefaultPolicyCheckStage, - Plan: valid.DefaultPlanStage, - }, - RepoRelDir: ".", - Workspace: "default", - Name: "", - AutoplanEnabled: false, - PolicySets: emptyPolicySets, - RepoLocking: false, + ApplyRequirements: []string{}, + ImportRequirements: []string{}, + Workflow: defaultWorkflow, + RepoRelDir: ".", + Workspace: "default", + Name: "", + AutoplanEnabled: false, + PolicySets: emptyPolicySets, + RepoLocking: false, }, }, "last server-side match wins": { @@ -803,10 +870,13 @@ repos: repos: - id: /.*/ apply_requirements: [approved] + import_requirements: [approved] - id: /github.com/.*/ apply_requirements: [mergeable] + import_requirements: [mergeable] - id: github.com/owner/repo apply_requirements: [approved, mergeable] + import_requirements: [approved, mergeable] `, repoID: "github.com/owner/repo", proj: valid.Project{ @@ -816,19 +886,15 @@ repos: }, repoWorkflows: nil, exp: valid.MergedProjectCfg{ - ApplyRequirements: []string{"approved", "mergeable"}, - Workflow: valid.Workflow{ - Name: "default", - Apply: valid.DefaultApplyStage, - PolicyCheck: valid.DefaultPolicyCheckStage, - Plan: valid.DefaultPlanStage, - }, - RepoRelDir: "mydir", - Workspace: "myworkspace", - Name: "myname", - AutoplanEnabled: false, - PolicySets: emptyPolicySets, - RepoLocking: true, + ApplyRequirements: []string{"approved", "mergeable"}, + ImportRequirements: []string{"approved", "mergeable"}, + Workflow: defaultWorkflow, + RepoRelDir: "mydir", + Workspace: "myworkspace", + Name: "myname", + AutoplanEnabled: false, + PolicySets: emptyPolicySets, + RepoLocking: true, }, }, "autoplan is set properly": { @@ -845,19 +911,15 @@ repos: }, repoWorkflows: nil, exp: valid.MergedProjectCfg{ - ApplyRequirements: []string{}, - Workflow: valid.Workflow{ - Name: "default", - Apply: valid.DefaultApplyStage, - PolicyCheck: valid.DefaultPolicyCheckStage, - Plan: valid.DefaultPlanStage, - }, - RepoRelDir: "mydir", - Workspace: "myworkspace", - Name: "myname", - AutoplanEnabled: true, - PolicySets: emptyPolicySets, - RepoLocking: true, + ApplyRequirements: []string{}, + ImportRequirements: []string{}, + Workflow: defaultWorkflow, + RepoRelDir: "mydir", + Workspace: "myworkspace", + Name: "myname", + AutoplanEnabled: true, + PolicySets: emptyPolicySets, + RepoLocking: true, }, }, "execution order group is set": { @@ -875,13 +937,9 @@ repos: }, repoWorkflows: nil, exp: valid.MergedProjectCfg{ - ApplyRequirements: []string{}, - Workflow: valid.Workflow{ - Name: "default", - Apply: valid.DefaultApplyStage, - PolicyCheck: valid.DefaultPolicyCheckStage, - Plan: valid.DefaultPlanStage, - }, + ApplyRequirements: []string{}, + ImportRequirements: []string{}, + Workflow: defaultWorkflow, RepoRelDir: "mydir", Workspace: "myworkspace", Name: "myname", @@ -960,19 +1018,22 @@ func TestRepo_BranchMatches(t *testing.T) { func TestGlobalCfg_MatchingRepo(t *testing.T) { defaultRepo := valid.Repo{ - IDRegex: regexp.MustCompile(".*"), - BranchRegex: regexp.MustCompile(".*"), - ApplyRequirements: []string{}, + IDRegex: regexp.MustCompile(".*"), + BranchRegex: regexp.MustCompile(".*"), + ApplyRequirements: []string{}, + ImportRequirements: []string{}, } repo1 := valid.Repo{ - IDRegex: regexp.MustCompile(".*"), - BranchRegex: regexp.MustCompile("^main$"), - ApplyRequirements: []string{"approved"}, + IDRegex: regexp.MustCompile(".*"), + BranchRegex: regexp.MustCompile("^main$"), + ApplyRequirements: []string{"approved"}, + ImportRequirements: []string{"approved"}, } repo2 := valid.Repo{ - ID: "github.com/owner/repo", - BranchRegex: regexp.MustCompile("^main$"), - ApplyRequirements: []string{"approved", "mergeable"}, + ID: "github.com/owner/repo", + BranchRegex: regexp.MustCompile("^main$"), + ApplyRequirements: []string{"approved", "mergeable"}, + ImportRequirements: []string{"approved", "mergeable"}, } cases := map[string]struct { diff --git a/server/core/config/valid/repo_cfg.go b/server/core/config/valid/repo_cfg.go index 331b4c7ba0..1222f56b4f 100644 --- a/server/core/config/valid/repo_cfg.go +++ b/server/core/config/valid/repo_cfg.go @@ -127,6 +127,7 @@ type Project struct { TerraformVersion *version.Version Autoplan Autoplan ApplyRequirements []string + ImportRequirements []string DeleteSourceBranchOnMerge *bool RepoLocking *bool ExecutionOrderGroup int @@ -168,4 +169,5 @@ type Workflow struct { Apply Stage Plan Stage PolicyCheck Stage + Import Stage } diff --git a/server/core/runtime/import_step_runner.go b/server/core/runtime/import_step_runner.go new file mode 100644 index 0000000000..3599ae9794 --- /dev/null +++ b/server/core/runtime/import_step_runner.go @@ -0,0 +1,38 @@ +package runtime + +import ( + "os" + "path/filepath" + + version "github.com/hashicorp/go-version" + "github.com/runatlantis/atlantis/server/events/command" +) + +type ImportStepRunner struct { + TerraformExecutor TerraformExec + DefaultTFVersion *version.Version +} + +func (p *ImportStepRunner) Run(ctx command.ProjectContext, extraArgs []string, path string, envs map[string]string) (string, error) { + tfVersion := p.DefaultTFVersion + if ctx.TerraformVersion != nil { + tfVersion = ctx.TerraformVersion + } + + importCmd := []string{"import"} + importCmd = append(importCmd, extraArgs...) + importCmd = append(importCmd, ctx.EscapedCommentArgs...) + out, err := p.TerraformExecutor.RunCommandWithVersion(ctx, filepath.Clean(path), importCmd, envs, tfVersion, ctx.Workspace) + + // If the import was successful and a plan file exists, delete the plan. + planPath := filepath.Join(path, GetPlanFilename(ctx.Workspace, ctx.ProjectName)) + if err == nil { + if _, planPathErr := os.Stat(planPath); !os.IsNotExist(planPathErr) { + ctx.Log.Info("import successful, deleting planfile") + if removeErr := os.Remove(planPath); removeErr != nil { + ctx.Log.Warn("failed to delete planfile after successful import: %s", removeErr) + } + } + } + return out, err +} diff --git a/server/core/runtime/import_step_runner_test.go b/server/core/runtime/import_step_runner_test.go new file mode 100644 index 0000000000..cbe97099a8 --- /dev/null +++ b/server/core/runtime/import_step_runner_test.go @@ -0,0 +1,61 @@ +package runtime + +import ( + "fmt" + "os" + "path/filepath" + "testing" + + "github.com/hashicorp/go-version" + . "github.com/petergtz/pegomock" + "github.com/runatlantis/atlantis/server/core/runtime/mocks/matchers" + "github.com/runatlantis/atlantis/server/core/terraform/mocks" + matchers2 "github.com/runatlantis/atlantis/server/core/terraform/mocks/matchers" + "github.com/runatlantis/atlantis/server/events/command" + "github.com/runatlantis/atlantis/server/events/models" + "github.com/runatlantis/atlantis/server/logging" + . "github.com/runatlantis/atlantis/testing" +) + +func TestImportStepRunner_Run_Success(t *testing.T) { + logger := logging.NewNoopLogger(t) + workspace := "default" + tmpDir := t.TempDir() + planPath := filepath.Join(tmpDir, fmt.Sprintf("%s.tfplan", workspace)) + err := os.WriteFile(planPath, nil, 0600) + Ok(t, err) + + context := command.ProjectContext{ + Log: logger, + EscapedCommentArgs: []string{"-var", "foo=bar", "addr", "id"}, + Workspace: workspace, + RepoRelDir: ".", + User: models.User{Username: "username"}, + Pull: models.PullRequest{ + Num: 2, + }, + BaseRepo: models.Repo{ + FullName: "owner/repo", + Owner: "owner", + Name: "repo", + }, + } + + RegisterMockTestingT(t) + terraform := mocks.NewMockClient() + tfVersion, _ := version.NewVersion("0.15.0") + s := &ImportStepRunner{ + TerraformExecutor: terraform, + DefaultTFVersion: tfVersion, + } + + When(terraform.RunCommandWithVersion(matchers.AnyModelsProjectCommandContext(), AnyString(), AnyStringSlice(), matchers2.AnyMapOfStringToString(), matchers2.AnyPtrToGoVersionVersion(), AnyString())). + ThenReturn("output", nil) + output, err := s.Run(context, []string{}, tmpDir, map[string]string(nil)) + Ok(t, err) + Equals(t, "output", output) + commands := []string{"import", "-var", "foo=bar", "addr", "id"} + terraform.VerifyWasCalledOnce().RunCommandWithVersion(context, tmpDir, commands, map[string]string(nil), tfVersion, "default") + _, err = os.Stat(planPath) + Assert(t, os.IsNotExist(err), "planfile should be deleted") +} diff --git a/server/core/terraform/mocks/matchers/command_projectcontext.go b/server/core/terraform/mocks/matchers/command_projectcontext.go new file mode 100644 index 0000000000..c25f35d932 --- /dev/null +++ b/server/core/terraform/mocks/matchers/command_projectcontext.go @@ -0,0 +1,33 @@ +// Code generated by pegomock. DO NOT EDIT. +package matchers + +import ( + "github.com/petergtz/pegomock" + "reflect" + + command "github.com/runatlantis/atlantis/server/events/command" +) + +func AnyCommandProjectContext() command.ProjectContext { + pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(command.ProjectContext))(nil)).Elem())) + var nullValue command.ProjectContext + return nullValue +} + +func EqCommandProjectContext(value command.ProjectContext) command.ProjectContext { + pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) + var nullValue command.ProjectContext + return nullValue +} + +func NotEqCommandProjectContext(value command.ProjectContext) command.ProjectContext { + pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) + var nullValue command.ProjectContext + return nullValue +} + +func CommandProjectContextThat(matcher pegomock.ArgumentMatcher) command.ProjectContext { + pegomock.RegisterMatcher(matcher) + var nullValue command.ProjectContext + return nullValue +} diff --git a/server/events/apply_requirement_handler.go b/server/events/apply_requirement_handler.go deleted file mode 100644 index 107b163cab..0000000000 --- a/server/events/apply_requirement_handler.go +++ /dev/null @@ -1,43 +0,0 @@ -package events - -import ( - "github.com/runatlantis/atlantis/server/core/config/raw" - "github.com/runatlantis/atlantis/server/core/config/valid" - "github.com/runatlantis/atlantis/server/events/command" - "github.com/runatlantis/atlantis/server/events/models" -) - -//go:generate pegomock generate -m --package mocks -o mocks/mock_apply_handler.go ApplyRequirement -type ApplyRequirement interface { - ValidateProject(repoDir string, ctx command.ProjectContext) (string, error) -} - -type AggregateApplyRequirements struct { - WorkingDir WorkingDir -} - -func (a *AggregateApplyRequirements) ValidateProject(repoDir string, ctx command.ProjectContext) (failure string, err error) { - for _, req := range ctx.ApplyRequirements { - switch req { - case raw.ApprovedApplyRequirement: - if !ctx.PullReqStatus.ApprovalStatus.IsApproved { - return "Pull request must be approved by at least one person other than the author before running apply.", nil - } - // this should come before mergeability check since mergeability is a superset of this check. - case valid.PoliciesPassedApplyReq: - if ctx.ProjectPlanStatus == models.ErroredPolicyCheckStatus { - return "All policies must pass for project before running apply", nil - } - case raw.MergeableApplyRequirement: - if !ctx.PullReqStatus.Mergeable { - return "Pull request must be mergeable before running apply.", nil - } - case raw.UnDivergedApplyRequirement: - if a.WorkingDir.HasDiverged(ctx.Log, repoDir) { - return "Default branch must be rebased onto pull request before running apply.", nil - } - } - } - // Passed all apply requirements configured. - return "", nil -} diff --git a/server/events/command/name.go b/server/events/command/name.go index 9194102da6..1396b41b9a 100644 --- a/server/events/command/name.go +++ b/server/events/command/name.go @@ -21,10 +21,12 @@ const ( PolicyCheck // ApprovePolicies is a command to approve policies with owner check ApprovePolicies - // Autoplan is a command to run terrafor plan on PR open/update if autoplan is enabled + // Autoplan is a command to run terraform plan on PR open/update if autoplan is enabled Autoplan // Version is a command to run terraform version. Version + // Import is a command to run terraform import + Import // Adding more? Don't forget to update String() below ) @@ -49,6 +51,18 @@ func (c Name) String() string { return "approve_policies" case Version: return "version" + case Import: + return "import" } return "" } + +// DefaultUsage returns the command default usage +func (c Name) DefaultUsage() string { + switch c { + case Import: + return "import -- ADDR ID" + default: + return c.String() + } +} diff --git a/server/events/command/name_test.go b/server/events/command/name_test.go index 6f766000c0..461d228856 100644 --- a/server/events/command/name_test.go +++ b/server/events/command/name_test.go @@ -4,29 +4,65 @@ import ( "testing" "github.com/runatlantis/atlantis/server/events/command" - . "github.com/runatlantis/atlantis/testing" ) -func TestApplyCommand_String(t *testing.T) { - uc := command.Apply - - Equals(t, "apply", uc.String()) -} - -func TestPlanCommand_String(t *testing.T) { - uc := command.Plan - - Equals(t, "plan", uc.String()) +func TestName_TitleString(t *testing.T) { + tests := []struct { + c command.Name + want string + }{ + {command.Apply, "Apply"}, + {command.PolicyCheck, "Policy Check"}, + } + for _, tt := range tests { + t.Run(tt.want, func(t *testing.T) { + if got := tt.c.TitleString(); got != tt.want { + t.Errorf("TitleString() = %v, want %v", got, tt.want) + } + }) + } } -func TestPolicyCheckCommand_String(t *testing.T) { - uc := command.PolicyCheck - - Equals(t, "policy_check", uc.String()) +func TestName_String(t *testing.T) { + tests := []struct { + c command.Name + want string + }{ + {command.Apply, "apply"}, + {command.Plan, "plan"}, + {command.Unlock, "unlock"}, + {command.PolicyCheck, "policy_check"}, + {command.ApprovePolicies, "approve_policies"}, + {command.Version, "version"}, + {command.Import, "import"}, + } + for _, tt := range tests { + t.Run(tt.want, func(t *testing.T) { + if got := tt.c.String(); got != tt.want { + t.Errorf("String() = %v, want %v", got, tt.want) + } + }) + } } -func TestUnlockCommand_String(t *testing.T) { - uc := command.Unlock - - Equals(t, "unlock", uc.String()) +func TestName_DefaultUsage(t *testing.T) { + tests := []struct { + c command.Name + want string + }{ + {command.Apply, "apply"}, + {command.Plan, "plan"}, + {command.Unlock, "unlock"}, + {command.PolicyCheck, "policy_check"}, + {command.ApprovePolicies, "approve_policies"}, + {command.Version, "version"}, + {command.Import, "import -- ADDR ID"}, + } + for _, tt := range tests { + t.Run(tt.c.String(), func(t *testing.T) { + if got := tt.c.DefaultUsage(); got != tt.want { + t.Errorf("DefaultUsage() = %v, want %v", got, tt.want) + } + }) + } } diff --git a/server/events/command/project_context.go b/server/events/command/project_context.go index 4a772b7158..0b5aa3d52b 100644 --- a/server/events/command/project_context.go +++ b/server/events/command/project_context.go @@ -26,6 +26,9 @@ type ProjectContext struct { // ApplyRequirements is the list of requirements that must be satisfied // before we will run the apply stage. ApplyRequirements []string + // ImportRequirements is the list of requirements that must be satisfied + // before we will run the import stage. + ImportRequirements []string // AutomergeEnabled is true if automerge is enabled for the repo that this // project is in. AutomergeEnabled bool diff --git a/server/events/command/project_result.go b/server/events/command/project_result.go index 3fadd00c83..9d5c83ea7c 100644 --- a/server/events/command/project_result.go +++ b/server/events/command/project_result.go @@ -15,6 +15,7 @@ type ProjectResult struct { PolicyCheckSuccess *models.PolicyCheckSuccess ApplySuccess string VersionSuccess string + ImportSuccess *models.ImportSuccess ProjectName string } diff --git a/server/events/command_requirement_handler.go b/server/events/command_requirement_handler.go new file mode 100644 index 0000000000..f67ef531be --- /dev/null +++ b/server/events/command_requirement_handler.go @@ -0,0 +1,65 @@ +package events + +import ( + "github.com/runatlantis/atlantis/server/core/config/raw" + "github.com/runatlantis/atlantis/server/core/config/valid" + "github.com/runatlantis/atlantis/server/events/command" + "github.com/runatlantis/atlantis/server/events/models" +) + +//go:generate pegomock generate -m --package mocks -o mocks/mock_command_requirement_handler.go CommandRequirementHandler +type CommandRequirementHandler interface { + ValidateApplyProject(repoDir string, ctx command.ProjectContext) (string, error) + ValidateImportProject(repoDir string, ctx command.ProjectContext) (string, error) +} + +type DefaultCommandRequirementHandler struct { + WorkingDir WorkingDir +} + +func (a *DefaultCommandRequirementHandler) ValidateApplyProject(repoDir string, ctx command.ProjectContext) (failure string, err error) { + for _, req := range ctx.ApplyRequirements { + switch req { + case raw.ApprovedRequirement: + if !ctx.PullReqStatus.ApprovalStatus.IsApproved { + return "Pull request must be approved by at least one person other than the author before running apply.", nil + } + // this should come before mergeability check since mergeability is a superset of this check. + case valid.PoliciesPassedCommandReq: + if ctx.ProjectPlanStatus == models.ErroredPolicyCheckStatus { + return "All policies must pass for project before running apply", nil + } + case raw.MergeableRequirement: + if !ctx.PullReqStatus.Mergeable { + return "Pull request must be mergeable before running apply.", nil + } + case raw.UnDivergedRequirement: + if a.WorkingDir.HasDiverged(ctx.Log, repoDir) { + return "Default branch must be rebased onto pull request before running apply.", nil + } + } + } + // Passed all apply requirements configured. + return "", nil +} + +func (a *DefaultCommandRequirementHandler) ValidateImportProject(repoDir string, ctx command.ProjectContext) (failure string, err error) { + for _, req := range ctx.ImportRequirements { + switch req { + case raw.ApprovedRequirement: + if !ctx.PullReqStatus.ApprovalStatus.IsApproved { + return "Pull request must be approved by at least one person other than the author before running import.", nil + } + case raw.MergeableRequirement: + if !ctx.PullReqStatus.Mergeable { + return "Pull request must be mergeable before running import.", nil + } + case raw.UnDivergedRequirement: + if a.WorkingDir.HasDiverged(ctx.Log, repoDir) { + return "Default branch must be rebased onto pull request before running import.", nil + } + } + } + // Passed all import requirements configured. + return "", nil +} diff --git a/server/events/command_requirement_handler_test.go b/server/events/command_requirement_handler_test.go new file mode 100644 index 0000000000..d190cd657e --- /dev/null +++ b/server/events/command_requirement_handler_test.go @@ -0,0 +1,194 @@ +package events_test + +import ( + "fmt" + "testing" + + "github.com/runatlantis/atlantis/server/core/config/raw" + "github.com/runatlantis/atlantis/server/core/config/valid" + "github.com/runatlantis/atlantis/server/events" + "github.com/runatlantis/atlantis/server/events/models" + "github.com/runatlantis/atlantis/server/logging/mocks/matchers" + + . "github.com/petergtz/pegomock" + + "github.com/runatlantis/atlantis/server/events/command" + "github.com/runatlantis/atlantis/server/events/mocks" + "github.com/stretchr/testify/assert" +) + +func TestAggregateApplyRequirements_ValidateApplyProject(t *testing.T) { + repoDir := "repoDir" + fullRequirements := []string{ + raw.ApprovedRequirement, + valid.PoliciesPassedCommandReq, + raw.MergeableRequirement, + raw.UnDivergedRequirement, + } + tests := []struct { + name string + ctx command.ProjectContext + setup func(workingDir *mocks.MockWorkingDir) + wantFailure string + wantErr assert.ErrorAssertionFunc + }{ + { + name: "pass no requirements", + ctx: command.ProjectContext{}, + wantErr: assert.NoError, + }, + { + name: "pass full requirements", + ctx: command.ProjectContext{ + ApplyRequirements: fullRequirements, + PullReqStatus: models.PullReqStatus{ + ApprovalStatus: models.ApprovalStatus{IsApproved: true}, + Mergeable: true, + }, + ProjectPlanStatus: models.PassedPolicyCheckStatus, + }, + setup: func(workingDir *mocks.MockWorkingDir) { + When(workingDir.HasDiverged(matchers.AnyLoggingSimpleLogging(), AnyString())).ThenReturn(false) + }, + wantErr: assert.NoError, + }, + { + name: "fail by no approved", + ctx: command.ProjectContext{ + ApplyRequirements: []string{raw.ApprovedRequirement}, + PullReqStatus: models.PullReqStatus{ + ApprovalStatus: models.ApprovalStatus{IsApproved: false}, + }, + }, + wantFailure: "Pull request must be approved by at least one person other than the author before running apply.", + wantErr: assert.NoError, + }, + { + name: "fail by no policy passed", + ctx: command.ProjectContext{ + ApplyRequirements: []string{valid.PoliciesPassedCommandReq}, + ProjectPlanStatus: models.ErroredPolicyCheckStatus, + }, + wantFailure: "All policies must pass for project before running apply", + wantErr: assert.NoError, + }, + { + name: "fail by no mergeable", + ctx: command.ProjectContext{ + ApplyRequirements: []string{raw.MergeableRequirement}, + PullReqStatus: models.PullReqStatus{Mergeable: false}, + }, + wantFailure: "Pull request must be mergeable before running apply.", + wantErr: assert.NoError, + }, + { + name: "fail by diverged", + ctx: command.ProjectContext{ + ApplyRequirements: []string{raw.UnDivergedRequirement}, + }, + setup: func(workingDir *mocks.MockWorkingDir) { + When(workingDir.HasDiverged(matchers.AnyLoggingSimpleLogging(), AnyString())).ThenReturn(true) + }, + wantFailure: "Default branch must be rebased onto pull request before running apply.", + wantErr: assert.NoError, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + RegisterMockTestingT(t) + workingDir := mocks.NewMockWorkingDir() + a := &events.DefaultCommandRequirementHandler{WorkingDir: workingDir} + if tt.setup != nil { + tt.setup(workingDir) + } + gotFailure, err := a.ValidateApplyProject(repoDir, tt.ctx) + if !tt.wantErr(t, err, fmt.Sprintf("ValidateApplyProject(%v, %v)", repoDir, tt.ctx)) { + return + } + assert.Equalf(t, tt.wantFailure, gotFailure, "ValidateApplyProject(%v, %v)", repoDir, tt.ctx) + }) + } +} + +func TestAggregateApplyRequirements_ValidateImportProject(t *testing.T) { + repoDir := "repoDir" + fullRequirements := []string{ + raw.ApprovedRequirement, + raw.MergeableRequirement, + raw.UnDivergedRequirement, + } + tests := []struct { + name string + ctx command.ProjectContext + setup func(workingDir *mocks.MockWorkingDir) + wantFailure string + wantErr assert.ErrorAssertionFunc + }{ + { + name: "pass no requirements", + ctx: command.ProjectContext{}, + wantErr: assert.NoError, + }, + { + name: "pass full requirements", + ctx: command.ProjectContext{ + ImportRequirements: fullRequirements, + PullReqStatus: models.PullReqStatus{ + ApprovalStatus: models.ApprovalStatus{IsApproved: true}, + Mergeable: true, + }, + ProjectPlanStatus: models.PassedPolicyCheckStatus, + }, + setup: func(workingDir *mocks.MockWorkingDir) { + When(workingDir.HasDiverged(matchers.AnyLoggingSimpleLogging(), AnyString())).ThenReturn(false) + }, + wantErr: assert.NoError, + }, + { + name: "fail by no approved", + ctx: command.ProjectContext{ + ImportRequirements: []string{raw.ApprovedRequirement}, + PullReqStatus: models.PullReqStatus{ + ApprovalStatus: models.ApprovalStatus{IsApproved: false}, + }, + }, + wantFailure: "Pull request must be approved by at least one person other than the author before running import.", + wantErr: assert.NoError, + }, + { + name: "fail by no mergeable", + ctx: command.ProjectContext{ + ImportRequirements: []string{raw.MergeableRequirement}, + PullReqStatus: models.PullReqStatus{Mergeable: false}, + }, + wantFailure: "Pull request must be mergeable before running import.", + wantErr: assert.NoError, + }, + { + name: "fail by diverged", + ctx: command.ProjectContext{ + ImportRequirements: []string{raw.UnDivergedRequirement}, + }, + setup: func(workingDir *mocks.MockWorkingDir) { + When(workingDir.HasDiverged(matchers.AnyLoggingSimpleLogging(), AnyString())).ThenReturn(true) + }, + wantFailure: "Default branch must be rebased onto pull request before running import.", + wantErr: assert.NoError, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + RegisterMockTestingT(t) + workingDir := mocks.NewMockWorkingDir() + a := &events.DefaultCommandRequirementHandler{WorkingDir: workingDir} + if tt.setup != nil { + tt.setup(workingDir) + } + gotFailure, err := a.ValidateImportProject(repoDir, tt.ctx) + if !tt.wantErr(t, err, fmt.Sprintf("ValidateImportProject(%v, %v)", repoDir, tt.ctx)) { + return + } + assert.Equalf(t, tt.wantFailure, gotFailure, "ValidateImportProject(%v, %v)", repoDir, tt.ctx) + }) + } +} diff --git a/server/events/command_runner_test.go b/server/events/command_runner_test.go index d3f96b1e7c..29086fb403 100644 --- a/server/events/command_runner_test.go +++ b/server/events/command_runner_test.go @@ -65,6 +65,7 @@ var applyLockChecker *lockingmocks.MockApplyLockChecker var lockingLocker *lockingmocks.MockLocker var applyCommandRunner *events.ApplyCommandRunner var unlockCommandRunner *events.UnlockCommandRunner +var importCommandRunner *events.ImportCommandRunner var preWorkflowHooksCommandRunner events.PreWorkflowHooksCommandRunner var postWorkflowHooksCommandRunner events.PostWorkflowHooksCommandRunner @@ -181,12 +182,19 @@ func setup(t *testing.T) *vcsmocks.MockClient { SilenceNoProjects, ) + importCommandRunner = events.NewImportCommandRunner( + pullUpdater, + projectCommandBuilder, + projectCommandRunner, + ) + commentCommandRunnerByCmd := map[command.Name]events.CommentCommandRunner{ command.Plan: planCommandRunner, command.Apply: applyCommandRunner, command.ApprovePolicies: approvePoliciesCommandRunner, command.Unlock: unlockCommandRunner, command.Version: versionCommandRunner, + command.Import: importCommandRunner, } preWorkflowHooksCommandRunner = mocks.NewMockPreWorkflowHooksCommandRunner() diff --git a/server/events/comment_parser.go b/server/events/comment_parser.go index 48af12d7bd..c240c005f3 100644 --- a/server/events/comment_parser.go +++ b/server/events/comment_parser.go @@ -68,8 +68,6 @@ type CommentBuilder interface { BuildPlanComment(repoRelDir string, workspace string, project string, commentArgs []string) string // BuildApplyComment builds an apply comment for the specified args. BuildApplyComment(repoRelDir string, workspace string, project string, autoMergeDisabled bool) string - // BuildVersionComment builds a version comment for the specified args. - BuildVersionComment(repoRelDir string, workspace string, project string) string } // CommentParser implements CommentParsing @@ -112,6 +110,7 @@ type CommentParseResult struct { // - atlantis unlock // - atlantis version // - atlantis approve_policies +// - atlantis import -- addr id func (e *CommentParser) Parse(rawComment string, vcsHost models.VCSHostType) CommentParseResult { comment := strings.TrimSpace(rawComment) @@ -172,7 +171,7 @@ func (e *CommentParser) Parse(rawComment string, vcsHost models.VCSHostType) Com } // Need to have a plan, apply, approve_policy or unlock at this point. - if !e.stringInSlice(cmd, []string{command.Plan.String(), command.Apply.String(), command.Unlock.String(), command.ApprovePolicies.String(), command.Version.String()}) { + if !e.stringInSlice(cmd, []string{command.Plan.String(), command.Apply.String(), command.Unlock.String(), command.ApprovePolicies.String(), command.Version.String(), command.Import.String()}) { return CommentParseResult{CommentResponse: fmt.Sprintf("```\nError: unknown command %q.\nRun 'atlantis --help' for usage.\n```", cmd)} } @@ -218,6 +217,14 @@ func (e *CommentParser) Parse(rawComment string, vcsHost models.VCSHostType) Com flagSet.StringVarP(&dir, dirFlagLong, dirFlagShort, "", "Which directory to run version in relative to root of repo, ex. 'child/dir'.") flagSet.StringVarP(&project, projectFlagLong, projectFlagShort, "", fmt.Sprintf("Print the version for this project. Refers to the name of the project configured in %s.", config.AtlantisYAMLFilename)) flagSet.BoolVarP(&verbose, verboseFlagLong, verboseFlagShort, false, "Append Atlantis log to comment.") + case command.Import.String(): + name = command.Import + flagSet = pflag.NewFlagSet(command.Import.String(), pflag.ContinueOnError) + flagSet.SetOutput(io.Discard) + flagSet.StringVarP(&workspace, workspaceFlagLong, workspaceFlagShort, "", "Switch to this Terraform workspace before planning.") + flagSet.StringVarP(&dir, dirFlagLong, dirFlagShort, "", "Which directory to run plan in relative to root of repo, ex. 'child/dir'.") + flagSet.StringVarP(&project, projectFlagLong, projectFlagShort, "", fmt.Sprintf("Which project to run plan for. Refers to the name of the project configured in %s. Cannot be used at same time as workspace or dir flags.", config.AtlantisYAMLFilename)) + flagSet.BoolVarP(&verbose, verboseFlagLong, verboseFlagShort, false, "Append Atlantis log to comment.") default: return CommentParseResult{CommentResponse: fmt.Sprintf("Error: unknown command %q – this is a bug", cmd)} } @@ -226,7 +233,7 @@ func (e *CommentParser) Parse(rawComment string, vcsHost models.VCSHostType) Com // It's safe to use [2:] because we know there's at least 2 elements in args. err = flagSet.Parse(args[2:]) if err == pflag.ErrHelp { - return CommentParseResult{CommentResponse: fmt.Sprintf("```\nUsage of %s:\n%s\n```", cmd, flagSet.FlagUsagesWrapped(usagesCols))} + return CommentParseResult{CommentResponse: fmt.Sprintf("```\nUsage of %s:\n%s\n```", name.DefaultUsage(), flagSet.FlagUsagesWrapped(usagesCols))} } if err != nil { if cmd == command.Unlock.String() { @@ -242,7 +249,7 @@ func (e *CommentParser) Parse(rawComment string, vcsHost models.VCSHostType) Com unusedArgs = flagSet.Args()[0:flagSet.ArgsLenAtDash()] } if len(unusedArgs) > 0 { - return CommentParseResult{CommentResponse: e.errMarkdown(fmt.Sprintf("unknown argument(s) – %s", strings.Join(unusedArgs, " ")), cmd, flagSet)} + return CommentParseResult{CommentResponse: e.errMarkdown(fmt.Sprintf("unknown argument(s) – %s", strings.Join(unusedArgs, " ")), name.DefaultUsage(), flagSet)} } var extraArgs []string @@ -299,12 +306,6 @@ func (e *CommentParser) BuildApplyComment(repoRelDir string, workspace string, p return fmt.Sprintf("%s %s%s", atlantisExecutable, command.Apply.String(), flags) } -// BuildVersionComment builds a version comment for the specified args. -func (e *CommentParser) BuildVersionComment(repoRelDir string, workspace string, project string) string { - flags := e.buildFlags(repoRelDir, workspace, project, false) - return fmt.Sprintf("%s %s%s", atlantisExecutable, command.Version.String(), flags) -} - func (e *CommentParser) buildFlags(repoRelDir string, workspace string, project string, autoMergeDisabled bool) string { // Add quotes if dir has spaces. if strings.Contains(repoRelDir, " ") { @@ -412,6 +413,8 @@ Commands: approve_policies Approves all current policy checking failures for the PR. version Print the output of 'terraform version' + import Runs 'terraform import' for the changes in this pull request. + To plan a specific project, use the -d, -w and -p flags. help View help. Flags: diff --git a/server/events/comment_parser_test.go b/server/events/comment_parser_test.go index c19c404d34..9161d6a9f5 100644 --- a/server/events/comment_parser_test.go +++ b/server/events/comment_parser_test.go @@ -136,6 +136,11 @@ func TestParse_UnusedArguments(t *testing.T) { "arg arg2 --", "arg arg2", }, + { + command.Import, + "arg arg2 --", + "arg arg2", + }, } for _, c := range cases { comment := fmt.Sprintf("atlantis %s %s", c.Command.String(), c.Args) @@ -149,6 +154,8 @@ func TestParse_UnusedArguments(t *testing.T) { usage = ApplyUsage case command.ApprovePolicies: usage = ApprovePolicyUsage + case command.Import: + usage = ImportUsage } Equals(t, fmt.Sprintf("```\nError: unknown argument(s) – %s.\n%s```", c.Unused, usage), r.CommentResponse) }) @@ -200,17 +207,22 @@ func TestParse_InvalidCommand(t *testing.T) { func TestParse_SubcommandUsage(t *testing.T) { t.Log("given a comment asking for the usage of a subcommand should " + "return help") - comments := []string{ - "atlantis plan -h", - "atlantis plan --help", - "atlantis apply -h", - "atlantis apply --help", - "atlantis approve_policies -h", - "atlantis approve_policies --help", - } - for _, c := range comments { - r := commentParser.Parse(c, models.Github) - exp := "Usage of " + strings.Fields(c)[1] + tests := []struct { + input string + expUsage string + }{ + {"atlantis plan -h", "plan"}, + {"atlantis plan --help", "plan"}, + {"atlantis apply -h", "apply"}, + {"atlantis apply --help", "apply"}, + {"atlantis approve_policies -h", "approve_policies"}, + {"atlantis approve_policies --help", "approve_policies"}, + {"atlantis import -h", "import -- ADDR ID"}, + {"atlantis import --help", "import -- ADDR ID"}, + } + for _, c := range tests { + r := commentParser.Parse(c.input, models.Github) + exp := "Usage of " + c.expUsage Assert(t, strings.Contains(r.CommentResponse, exp), "For comment %q expected CommentResponse %q to contain %q", c, r.CommentResponse, exp) Assert(t, !strings.Contains(r.CommentResponse, "Error:"), @@ -241,6 +253,10 @@ func TestParse_InvalidFlags(t *testing.T) { "atlantis apply --abc", "Error: unknown flag: --abc", }, + { + "atlantis import --abc", + "Error: unknown flag: --abc", + }, } for _, c := range cases { r := commentParser.Parse(c.comment, models.Github) @@ -256,13 +272,17 @@ func TestParse_RelativeDirPath(t *testing.T) { comments := []string{ "atlantis plan -d ..", "atlantis apply -d ..", + "atlantis import -d ..", // These won't return an error because we prepend with . when parsing. //"atlantis plan -d /..", //"atlantis apply -d /..", + //"atlantis import -d /..", "atlantis plan -d ./..", "atlantis apply -d ./..", + "atlantis import -d ./..", "atlantis plan -d a/b/../../..", "atlantis apply -d a/../..", + "atlantis import -d a/../..", } for _, c := range comments { r := commentParser.Parse(c, models.Github) @@ -306,12 +326,16 @@ func TestParse_InvalidWorkspace(t *testing.T) { comments := []string{ "atlantis plan -w ..", "atlantis apply -w ..", + "atlantis import -w ..", "atlantis plan -w /", "atlantis apply -w /", + "atlantis import -w /", "atlantis plan -w ..abc", "atlantis apply -w abc..", + "atlantis import -w abc..", "atlantis plan -w abc..abc", "atlantis apply -w ../../../etc/passwd", + "atlantis import -w ../../../etc/passwd", } for _, c := range comments { r := commentParser.Parse(c, models.Github) @@ -557,7 +581,7 @@ func TestParse_Parsing(t *testing.T) { } for _, test := range cases { - for _, cmdName := range []string{"plan", "apply"} { + for _, cmdName := range []string{"plan", "apply", "import"} { comment := fmt.Sprintf("atlantis %s %s", cmdName, test.flags) t.Run(comment, func(t *testing.T) { r := commentParser.Parse(comment, models.Github) @@ -576,6 +600,9 @@ func TestParse_Parsing(t *testing.T) { if cmdName == "approve_policies" { Assert(t, r.Command.Name == command.ApprovePolicies, "did not parse comment %q as approve_policies command", comment) } + if cmdName == "import" { + Assert(t, r.Command.Name == command.Import, "did not parse comment %q as import command", comment) + } }) } } @@ -694,9 +721,6 @@ func TestBuildPlanApplyVersionComment(t *testing.T) { case command.Apply: actComment := commentParser.BuildApplyComment(c.repoRelDir, c.workspace, c.project, c.autoMergeDisabled) Equals(t, fmt.Sprintf("atlantis apply %s", c.expApplyFlags), actComment) - case command.Version: - actComment := commentParser.BuildVersionComment(c.repoRelDir, c.workspace, c.project) - Equals(t, fmt.Sprintf("atlantis version %s", c.expVersionFlags), actComment) } } }) @@ -737,6 +761,8 @@ Commands: approve_policies Approves all current policy checking failures for the PR. version Print the output of 'terraform version' + import Runs 'terraform import' for the changes in this pull request. + To plan a specific project, use the -d, -w and -p flags. help View help. Flags: @@ -766,6 +792,8 @@ Commands: approve_policies Approves all current policy checking failures for the PR. version Print the output of 'terraform version' + import Runs 'terraform import' for the changes in this pull request. + To plan a specific project, use the -d, -w and -p flags. help View help. Flags: @@ -848,6 +876,7 @@ var ApplyUsage = `Usage of apply: var ApprovePolicyUsage = `Usage of approve_policies: --verbose Append Atlantis log to comment. ` + var UnlockUsage = "`Usage of unlock:`\n\n ```cmake\n" + `atlantis unlock @@ -855,3 +884,13 @@ var UnlockUsage = "`Usage of unlock:`\n\n ```cmake\n" + Arguments or flags are not supported at the moment. If you need to unlock a specific project please use the atlantis UI.` + "\n```" + +var ImportUsage = `Usage of import -- ADDR ID: + -d, --dir string Which directory to run plan in relative to root of repo, + ex. 'child/dir'. + -p, --project string Which project to run plan for. Refers to the name of the + project configured in atlantis.yaml. Cannot be used at + same time as workspace or dir flags. + --verbose Append Atlantis log to comment. + -w, --workspace string Switch to this Terraform workspace before planning. +` diff --git a/server/events/import_command_runner.go b/server/events/import_command_runner.go new file mode 100644 index 0000000000..fc4d09fbfa --- /dev/null +++ b/server/events/import_command_runner.go @@ -0,0 +1,44 @@ +package events + +import ( + "github.com/runatlantis/atlantis/server/events/command" +) + +func NewImportCommandRunner( + pullUpdater *PullUpdater, + prjCmdBuilder ProjectImportCommandBuilder, + prjCmdRunner ProjectImportCommandRunner, +) *ImportCommandRunner { + return &ImportCommandRunner{ + pullUpdater: pullUpdater, + prjCmdBuilder: prjCmdBuilder, + prjCmdRunner: prjCmdRunner, + } +} + +type ImportCommandRunner struct { + pullUpdater *PullUpdater + prjCmdBuilder ProjectImportCommandBuilder + prjCmdRunner ProjectImportCommandRunner +} + +func (v *ImportCommandRunner) Run(ctx *command.Context, cmd *CommentCommand) { + var err error + var projectCmds []command.ProjectContext + projectCmds, err = v.prjCmdBuilder.BuildImportCommands(ctx, cmd) + if err != nil { + ctx.Log.Warn("Error %s", err) + } + + var result command.Result + if len(projectCmds) > 1 { + // There is no usecase to kick terraform import into multiple projects. + // To avoid incorrect import, suppress to execute terraform import in multiple projects. + result = command.Result{ + Failure: "import cannot run on multiple projects. please specify one project.", + } + } else { + result = runProjectCmds(projectCmds, v.prjCmdRunner.Import) + } + v.pullUpdater.updatePull(ctx, cmd, result) +} diff --git a/server/events/markdown_renderer.go b/server/events/markdown_renderer.go index 621d88be0f..7708adf3ef 100644 --- a/server/events/markdown_renderer.go +++ b/server/events/markdown_renderer.go @@ -33,6 +33,7 @@ var ( policyCheckCommandTitle = command.PolicyCheck.TitleString() approvePoliciesCommandTitle = command.ApprovePolicies.TitleString() versionCommandTitle = command.Version.TitleString() + importCommandTitle = command.Import.TitleString() // maxUnwrappedLines is the maximum number of lines the Terraform output // can be before we wrap it in an expandable template. maxUnwrappedLines = 12 @@ -220,6 +221,12 @@ func (m *MarkdownRenderer) renderProjectResults(results []command.ProjectResult, resultData.Rendered = m.renderTemplate(templates.Lookup("versionUnwrappedSuccess"), struct{ Output string }{result.VersionSuccess}) } numVersionSuccesses++ + } else if result.ImportSuccess != nil { + if m.shouldUseWrappedTmpl(vcsHost, result.ImportSuccess.Output) { + resultData.Rendered = m.renderTemplate(templates.Lookup("importSuccessWrapped"), result.ImportSuccess) + } else { + resultData.Rendered = m.renderTemplate(templates.Lookup("importSuccessUnwrapped"), result.ImportSuccess) + } } else { resultData.Rendered = "Found no template. This is a bug!" } @@ -242,6 +249,8 @@ func (m *MarkdownRenderer) renderProjectResults(results []command.ProjectResult, tmpl = templates.Lookup("singleProjectVersionUnsuccessful") case len(resultsTmplData) == 1 && common.Command == applyCommandTitle: tmpl = templates.Lookup("singleProjectApply") + case len(resultsTmplData) == 1 && common.Command == importCommandTitle: + tmpl = templates.Lookup("singleProjectImport") case common.Command == planCommandTitle, common.Command == policyCheckCommandTitle: tmpl = templates.Lookup("multiProjectPlan") @@ -251,6 +260,8 @@ func (m *MarkdownRenderer) renderProjectResults(results []command.ProjectResult, tmpl = templates.Lookup("multiProjectApply") case common.Command == versionCommandTitle: tmpl = templates.Lookup("multiProjectVersion") + case common.Command == importCommandTitle: + tmpl = templates.Lookup("multiProjectImport") default: return "no template matched–this is a bug" } diff --git a/server/events/markdown_renderer_test.go b/server/events/markdown_renderer_test.go index 855423a75e..2c943ffa3b 100644 --- a/server/events/markdown_renderer_test.go +++ b/server/events/markdown_renderer_test.go @@ -288,6 +288,33 @@ $$$ * $atlantis apply$ * :put_litter_in_its_place: To delete all plans and locks for the PR, comment: * $atlantis unlock$ +`, + }, + { + "single successful import", + command.Import, + []command.ProjectResult{ + { + ImportSuccess: &models.ImportSuccess{ + Output: "import-output", + RePlanCmd: "atlantis plan -d path -w workspace", + }, + Workspace: "workspace", + RepoRelDir: "path", + ProjectName: "projectname", + }, + }, + models.Github, + `Ran Import for project: $projectname$ dir: $path$ workspace: $workspace$ + +$$$diff +import-output +$$$ + +* :repeat: To **plan** this project again, comment: + * $atlantis plan -d path -w workspace$ + + `, }, { diff --git a/server/events/mock_workingdir_test.go b/server/events/mock_workingdir_test.go index 8fdf24ef7c..21479f89ed 100644 --- a/server/events/mock_workingdir_test.go +++ b/server/events/mock_workingdir_test.go @@ -49,9 +49,6 @@ func (mock *MockWorkingDir) Clone(log logging.SimpleLogging, headRepo models.Rep } return ret0, ret1, ret2 } -func (mock *MockWorkingDir) HasDiverged(log logging.SimpleLogging, cloneDir string) bool { - return false -} func (mock *MockWorkingDir) GetWorkingDir(r models.Repo, p models.PullRequest, workspace string) (string, error) { if mock == nil { @@ -72,6 +69,21 @@ func (mock *MockWorkingDir) GetWorkingDir(r models.Repo, p models.PullRequest, w return ret0, ret1 } +func (mock *MockWorkingDir) HasDiverged(log logging.SimpleLogging, cloneDir string) bool { + if mock == nil { + panic("mock must not be nil. Use myMock := NewMockWorkingDir().") + } + params := []pegomock.Param{log, cloneDir} + result := pegomock.GetGenericMockFrom(mock).Invoke("HasDiverged", params, []reflect.Type{reflect.TypeOf((*bool)(nil)).Elem()}) + var ret0 bool + if len(result) != 0 { + if result[0] != nil { + ret0 = result[0].(bool) + } + } + return ret0 +} + func (mock *MockWorkingDir) GetPullDir(r models.Repo, p models.PullRequest) (string, error) { if mock == nil { panic("mock must not be nil. Use myMock := NewMockWorkingDir().") @@ -232,6 +244,37 @@ func (c *MockWorkingDir_GetWorkingDir_OngoingVerification) GetAllCapturedArgumen return } +func (verifier *VerifierMockWorkingDir) HasDiverged(log logging.SimpleLogging, cloneDir string) *MockWorkingDir_HasDiverged_OngoingVerification { + params := []pegomock.Param{log, cloneDir} + methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "HasDiverged", params, verifier.timeout) + return &MockWorkingDir_HasDiverged_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} +} + +type MockWorkingDir_HasDiverged_OngoingVerification struct { + mock *MockWorkingDir + methodInvocations []pegomock.MethodInvocation +} + +func (c *MockWorkingDir_HasDiverged_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, string) { + log, cloneDir := c.GetAllCapturedArguments() + return log[len(log)-1], cloneDir[len(cloneDir)-1] +} + +func (c *MockWorkingDir_HasDiverged_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []string) { + params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) + if len(params) > 0 { + _param0 = make([]logging.SimpleLogging, len(c.methodInvocations)) + for u, param := range params[0] { + _param0[u] = param.(logging.SimpleLogging) + } + _param1 = make([]string, len(c.methodInvocations)) + for u, param := range params[1] { + _param1[u] = param.(string) + } + } + return +} + func (verifier *VerifierMockWorkingDir) GetPullDir(r models.Repo, p models.PullRequest) *MockWorkingDir_GetPullDir_OngoingVerification { params := []pegomock.Param{r, p} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "GetPullDir", params, verifier.timeout) diff --git a/server/events/mocks/matchers/command_name.go b/server/events/mocks/matchers/command_name.go new file mode 100644 index 0000000000..9dcc26d1e8 --- /dev/null +++ b/server/events/mocks/matchers/command_name.go @@ -0,0 +1,33 @@ +// Code generated by pegomock. DO NOT EDIT. +package matchers + +import ( + "github.com/petergtz/pegomock" + "reflect" + + command "github.com/runatlantis/atlantis/server/events/command" +) + +func AnyCommandName() command.Name { + pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(command.Name))(nil)).Elem())) + var nullValue command.Name + return nullValue +} + +func EqCommandName(value command.Name) command.Name { + pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) + var nullValue command.Name + return nullValue +} + +func NotEqCommandName(value command.Name) command.Name { + pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) + var nullValue command.Name + return nullValue +} + +func CommandNameThat(matcher pegomock.ArgumentMatcher) command.Name { + pegomock.RegisterMatcher(matcher) + var nullValue command.Name + return nullValue +} diff --git a/server/events/mocks/matchers/command_projectcontext.go b/server/events/mocks/matchers/command_projectcontext.go new file mode 100644 index 0000000000..c25f35d932 --- /dev/null +++ b/server/events/mocks/matchers/command_projectcontext.go @@ -0,0 +1,33 @@ +// Code generated by pegomock. DO NOT EDIT. +package matchers + +import ( + "github.com/petergtz/pegomock" + "reflect" + + command "github.com/runatlantis/atlantis/server/events/command" +) + +func AnyCommandProjectContext() command.ProjectContext { + pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(command.ProjectContext))(nil)).Elem())) + var nullValue command.ProjectContext + return nullValue +} + +func EqCommandProjectContext(value command.ProjectContext) command.ProjectContext { + pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) + var nullValue command.ProjectContext + return nullValue +} + +func NotEqCommandProjectContext(value command.ProjectContext) command.ProjectContext { + pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) + var nullValue command.ProjectContext + return nullValue +} + +func CommandProjectContextThat(matcher pegomock.ArgumentMatcher) command.ProjectContext { + pegomock.RegisterMatcher(matcher) + var nullValue command.ProjectContext + return nullValue +} diff --git a/server/events/mocks/matchers/command_projectresult.go b/server/events/mocks/matchers/command_projectresult.go new file mode 100644 index 0000000000..6d4c9b2e9d --- /dev/null +++ b/server/events/mocks/matchers/command_projectresult.go @@ -0,0 +1,33 @@ +// Code generated by pegomock. DO NOT EDIT. +package matchers + +import ( + "github.com/petergtz/pegomock" + "reflect" + + command "github.com/runatlantis/atlantis/server/events/command" +) + +func AnyCommandProjectResult() command.ProjectResult { + pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(command.ProjectResult))(nil)).Elem())) + var nullValue command.ProjectResult + return nullValue +} + +func EqCommandProjectResult(value command.ProjectResult) command.ProjectResult { + pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) + var nullValue command.ProjectResult + return nullValue +} + +func NotEqCommandProjectResult(value command.ProjectResult) command.ProjectResult { + pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) + var nullValue command.ProjectResult + return nullValue +} + +func CommandProjectResultThat(matcher pegomock.ArgumentMatcher) command.ProjectResult { + pegomock.RegisterMatcher(matcher) + var nullValue command.ProjectResult + return nullValue +} diff --git a/server/events/mocks/matchers/ptr_to_command_context.go b/server/events/mocks/matchers/ptr_to_command_context.go new file mode 100644 index 0000000000..134e02c1f9 --- /dev/null +++ b/server/events/mocks/matchers/ptr_to_command_context.go @@ -0,0 +1,33 @@ +// Code generated by pegomock. DO NOT EDIT. +package matchers + +import ( + "github.com/petergtz/pegomock" + "reflect" + + command "github.com/runatlantis/atlantis/server/events/command" +) + +func AnyPtrToCommandContext() *command.Context { + pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(*command.Context))(nil)).Elem())) + var nullValue *command.Context + return nullValue +} + +func EqPtrToCommandContext(value *command.Context) *command.Context { + pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) + var nullValue *command.Context + return nullValue +} + +func NotEqPtrToCommandContext(value *command.Context) *command.Context { + pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) + var nullValue *command.Context + return nullValue +} + +func PtrToCommandContextThat(matcher pegomock.ArgumentMatcher) *command.Context { + pegomock.RegisterMatcher(matcher) + var nullValue *command.Context + return nullValue +} diff --git a/server/events/mocks/matchers/slice_of_command_projectcontext.go b/server/events/mocks/matchers/slice_of_command_projectcontext.go new file mode 100644 index 0000000000..5f7e93135e --- /dev/null +++ b/server/events/mocks/matchers/slice_of_command_projectcontext.go @@ -0,0 +1,33 @@ +// Code generated by pegomock. DO NOT EDIT. +package matchers + +import ( + "github.com/petergtz/pegomock" + "reflect" + + command "github.com/runatlantis/atlantis/server/events/command" +) + +func AnySliceOfCommandProjectContext() []command.ProjectContext { + pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*([]command.ProjectContext))(nil)).Elem())) + var nullValue []command.ProjectContext + return nullValue +} + +func EqSliceOfCommandProjectContext(value []command.ProjectContext) []command.ProjectContext { + pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) + var nullValue []command.ProjectContext + return nullValue +} + +func NotEqSliceOfCommandProjectContext(value []command.ProjectContext) []command.ProjectContext { + pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) + var nullValue []command.ProjectContext + return nullValue +} + +func SliceOfCommandProjectContextThat(matcher pegomock.ArgumentMatcher) []command.ProjectContext { + pegomock.RegisterMatcher(matcher) + var nullValue []command.ProjectContext + return nullValue +} diff --git a/server/events/mocks/mock_apply_handler.go b/server/events/mocks/mock_apply_handler.go deleted file mode 100644 index 3bb8b2b011..0000000000 --- a/server/events/mocks/mock_apply_handler.go +++ /dev/null @@ -1,114 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/events (interfaces: ApplyRequirement) - -package mocks - -import ( - "reflect" - "time" - - pegomock "github.com/petergtz/pegomock" - "github.com/runatlantis/atlantis/server/events/command" -) - -type MockApplyRequirement struct { - fail func(message string, callerSkip ...int) -} - -func NewMockApplyRequirement(options ...pegomock.Option) *MockApplyRequirement { - mock := &MockApplyRequirement{} - for _, option := range options { - option.Apply(mock) - } - return mock -} - -func (mock *MockApplyRequirement) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } -func (mock *MockApplyRequirement) FailHandler() pegomock.FailHandler { return mock.fail } - -func (mock *MockApplyRequirement) ValidateProject(_param0 string, _param1 command.ProjectContext) (string, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockApplyRequirement().") - } - params := []pegomock.Param{_param0, _param1} - result := pegomock.GetGenericMockFrom(mock).Invoke("ValidateProject", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 string - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(string) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockApplyRequirement) VerifyWasCalledOnce() *VerifierMockApplyRequirement { - return &VerifierMockApplyRequirement{ - mock: mock, - invocationCountMatcher: pegomock.Times(1), - } -} - -func (mock *MockApplyRequirement) VerifyWasCalled(invocationCountMatcher pegomock.InvocationCountMatcher) *VerifierMockApplyRequirement { - return &VerifierMockApplyRequirement{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - } -} - -func (mock *MockApplyRequirement) VerifyWasCalledInOrder(invocationCountMatcher pegomock.InvocationCountMatcher, inOrderContext *pegomock.InOrderContext) *VerifierMockApplyRequirement { - return &VerifierMockApplyRequirement{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - inOrderContext: inOrderContext, - } -} - -func (mock *MockApplyRequirement) VerifyWasCalledEventually(invocationCountMatcher pegomock.InvocationCountMatcher, timeout time.Duration) *VerifierMockApplyRequirement { - return &VerifierMockApplyRequirement{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - timeout: timeout, - } -} - -type VerifierMockApplyRequirement struct { - mock *MockApplyRequirement - invocationCountMatcher pegomock.InvocationCountMatcher - inOrderContext *pegomock.InOrderContext - timeout time.Duration -} - -func (verifier *VerifierMockApplyRequirement) ValidateProject(_param0 string, _param1 command.ProjectContext) *MockApplyRequirement_ValidateProject_OngoingVerification { - params := []pegomock.Param{_param0, _param1} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "ValidateProject", params, verifier.timeout) - return &MockApplyRequirement_ValidateProject_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockApplyRequirement_ValidateProject_OngoingVerification struct { - mock *MockApplyRequirement - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockApplyRequirement_ValidateProject_OngoingVerification) GetCapturedArguments() (string, command.ProjectContext) { - _param0, _param1 := c.GetAllCapturedArguments() - return _param0[len(_param0)-1], _param1[len(_param1)-1] -} - -func (c *MockApplyRequirement_ValidateProject_OngoingVerification) GetAllCapturedArguments() (_param0 []string, _param1 []command.ProjectContext) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]string, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(string) - } - _param1 = make([]command.ProjectContext, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(command.ProjectContext) - } - } - return -} diff --git a/server/events/mocks/mock_command_requirement_handler.go b/server/events/mocks/mock_command_requirement_handler.go new file mode 100644 index 0000000000..e60b8cc5f3 --- /dev/null +++ b/server/events/mocks/mock_command_requirement_handler.go @@ -0,0 +1,163 @@ +// Code generated by pegomock. DO NOT EDIT. +// Source: github.com/runatlantis/atlantis/server/events (interfaces: CommandRequirementHandler) + +package mocks + +import ( + pegomock "github.com/petergtz/pegomock" + command "github.com/runatlantis/atlantis/server/events/command" + "reflect" + "time" +) + +type MockCommandRequirementHandler struct { + fail func(message string, callerSkip ...int) +} + +func NewMockCommandRequirementHandler(options ...pegomock.Option) *MockCommandRequirementHandler { + mock := &MockCommandRequirementHandler{} + for _, option := range options { + option.Apply(mock) + } + return mock +} + +func (mock *MockCommandRequirementHandler) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } +func (mock *MockCommandRequirementHandler) FailHandler() pegomock.FailHandler { return mock.fail } + +func (mock *MockCommandRequirementHandler) ValidateApplyProject(_param0 string, _param1 command.ProjectContext) (string, error) { + if mock == nil { + panic("mock must not be nil. Use myMock := NewMockCommandRequirementHandler().") + } + params := []pegomock.Param{_param0, _param1} + result := pegomock.GetGenericMockFrom(mock).Invoke("ValidateApplyProject", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) + var ret0 string + var ret1 error + if len(result) != 0 { + if result[0] != nil { + ret0 = result[0].(string) + } + if result[1] != nil { + ret1 = result[1].(error) + } + } + return ret0, ret1 +} + +func (mock *MockCommandRequirementHandler) ValidateImportProject(_param0 string, _param1 command.ProjectContext) (string, error) { + if mock == nil { + panic("mock must not be nil. Use myMock := NewMockCommandRequirementHandler().") + } + params := []pegomock.Param{_param0, _param1} + result := pegomock.GetGenericMockFrom(mock).Invoke("ValidateImportProject", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) + var ret0 string + var ret1 error + if len(result) != 0 { + if result[0] != nil { + ret0 = result[0].(string) + } + if result[1] != nil { + ret1 = result[1].(error) + } + } + return ret0, ret1 +} + +func (mock *MockCommandRequirementHandler) VerifyWasCalledOnce() *VerifierMockCommandRequirementHandler { + return &VerifierMockCommandRequirementHandler{ + mock: mock, + invocationCountMatcher: pegomock.Times(1), + } +} + +func (mock *MockCommandRequirementHandler) VerifyWasCalled(invocationCountMatcher pegomock.InvocationCountMatcher) *VerifierMockCommandRequirementHandler { + return &VerifierMockCommandRequirementHandler{ + mock: mock, + invocationCountMatcher: invocationCountMatcher, + } +} + +func (mock *MockCommandRequirementHandler) VerifyWasCalledInOrder(invocationCountMatcher pegomock.InvocationCountMatcher, inOrderContext *pegomock.InOrderContext) *VerifierMockCommandRequirementHandler { + return &VerifierMockCommandRequirementHandler{ + mock: mock, + invocationCountMatcher: invocationCountMatcher, + inOrderContext: inOrderContext, + } +} + +func (mock *MockCommandRequirementHandler) VerifyWasCalledEventually(invocationCountMatcher pegomock.InvocationCountMatcher, timeout time.Duration) *VerifierMockCommandRequirementHandler { + return &VerifierMockCommandRequirementHandler{ + mock: mock, + invocationCountMatcher: invocationCountMatcher, + timeout: timeout, + } +} + +type VerifierMockCommandRequirementHandler struct { + mock *MockCommandRequirementHandler + invocationCountMatcher pegomock.InvocationCountMatcher + inOrderContext *pegomock.InOrderContext + timeout time.Duration +} + +func (verifier *VerifierMockCommandRequirementHandler) ValidateApplyProject(_param0 string, _param1 command.ProjectContext) *MockCommandRequirementHandler_ValidateApplyProject_OngoingVerification { + params := []pegomock.Param{_param0, _param1} + methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "ValidateApplyProject", params, verifier.timeout) + return &MockCommandRequirementHandler_ValidateApplyProject_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} +} + +type MockCommandRequirementHandler_ValidateApplyProject_OngoingVerification struct { + mock *MockCommandRequirementHandler + methodInvocations []pegomock.MethodInvocation +} + +func (c *MockCommandRequirementHandler_ValidateApplyProject_OngoingVerification) GetCapturedArguments() (string, command.ProjectContext) { + _param0, _param1 := c.GetAllCapturedArguments() + return _param0[len(_param0)-1], _param1[len(_param1)-1] +} + +func (c *MockCommandRequirementHandler_ValidateApplyProject_OngoingVerification) GetAllCapturedArguments() (_param0 []string, _param1 []command.ProjectContext) { + params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) + if len(params) > 0 { + _param0 = make([]string, len(c.methodInvocations)) + for u, param := range params[0] { + _param0[u] = param.(string) + } + _param1 = make([]command.ProjectContext, len(c.methodInvocations)) + for u, param := range params[1] { + _param1[u] = param.(command.ProjectContext) + } + } + return +} + +func (verifier *VerifierMockCommandRequirementHandler) ValidateImportProject(_param0 string, _param1 command.ProjectContext) *MockCommandRequirementHandler_ValidateImportProject_OngoingVerification { + params := []pegomock.Param{_param0, _param1} + methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "ValidateImportProject", params, verifier.timeout) + return &MockCommandRequirementHandler_ValidateImportProject_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} +} + +type MockCommandRequirementHandler_ValidateImportProject_OngoingVerification struct { + mock *MockCommandRequirementHandler + methodInvocations []pegomock.MethodInvocation +} + +func (c *MockCommandRequirementHandler_ValidateImportProject_OngoingVerification) GetCapturedArguments() (string, command.ProjectContext) { + _param0, _param1 := c.GetAllCapturedArguments() + return _param0[len(_param0)-1], _param1[len(_param1)-1] +} + +func (c *MockCommandRequirementHandler_ValidateImportProject_OngoingVerification) GetAllCapturedArguments() (_param0 []string, _param1 []command.ProjectContext) { + params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) + if len(params) > 0 { + _param0 = make([]string, len(c.methodInvocations)) + for u, param := range params[0] { + _param0[u] = param.(string) + } + _param1 = make([]command.ProjectContext, len(c.methodInvocations)) + for u, param := range params[1] { + _param1[u] = param.(command.ProjectContext) + } + } + return +} diff --git a/server/events/mocks/mock_project_command_builder.go b/server/events/mocks/mock_project_command_builder.go index 1b3482359a..51ddc98295 100644 --- a/server/events/mocks/mock_project_command_builder.go +++ b/server/events/mocks/mock_project_command_builder.go @@ -122,6 +122,25 @@ func (mock *MockProjectCommandBuilder) BuildVersionCommands(ctx *command.Context return ret0, ret1 } +func (mock *MockProjectCommandBuilder) BuildImportCommands(ctx *command.Context, comment *events.CommentCommand) ([]command.ProjectContext, error) { + if mock == nil { + panic("mock must not be nil. Use myMock := NewMockProjectCommandBuilder().") + } + params := []pegomock.Param{ctx, comment} + result := pegomock.GetGenericMockFrom(mock).Invoke("BuildImportCommands", params, []reflect.Type{reflect.TypeOf((*[]command.ProjectContext)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) + var ret0 []command.ProjectContext + var ret1 error + if len(result) != 0 { + if result[0] != nil { + ret0 = result[0].([]command.ProjectContext) + } + if result[1] != nil { + ret1 = result[1].(error) + } + } + return ret0, ret1 +} + func (mock *MockProjectCommandBuilder) VerifyWasCalledOnce() *VerifierMockProjectCommandBuilder { return &VerifierMockProjectCommandBuilder{ mock: mock, @@ -309,3 +328,34 @@ func (c *MockProjectCommandBuilder_BuildVersionCommands_OngoingVerification) Get } return } + +func (verifier *VerifierMockProjectCommandBuilder) BuildImportCommands(ctx *command.Context, comment *events.CommentCommand) *MockProjectCommandBuilder_BuildImportCommands_OngoingVerification { + params := []pegomock.Param{ctx, comment} + methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "BuildImportCommands", params, verifier.timeout) + return &MockProjectCommandBuilder_BuildImportCommands_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} +} + +type MockProjectCommandBuilder_BuildImportCommands_OngoingVerification struct { + mock *MockProjectCommandBuilder + methodInvocations []pegomock.MethodInvocation +} + +func (c *MockProjectCommandBuilder_BuildImportCommands_OngoingVerification) GetCapturedArguments() (*command.Context, *events.CommentCommand) { + ctx, comment := c.GetAllCapturedArguments() + return ctx[len(ctx)-1], comment[len(comment)-1] +} + +func (c *MockProjectCommandBuilder_BuildImportCommands_OngoingVerification) GetAllCapturedArguments() (_param0 []*command.Context, _param1 []*events.CommentCommand) { + params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) + if len(params) > 0 { + _param0 = make([]*command.Context, len(c.methodInvocations)) + for u, param := range params[0] { + _param0[u] = param.(*command.Context) + } + _param1 = make([]*events.CommentCommand, len(c.methodInvocations)) + for u, param := range params[1] { + _param1[u] = param.(*events.CommentCommand) + } + } + return +} diff --git a/server/events/mocks/mock_project_command_runner.go b/server/events/mocks/mock_project_command_runner.go index 9f27f89a3d..e4e8d1f637 100644 --- a/server/events/mocks/mock_project_command_runner.go +++ b/server/events/mocks/mock_project_command_runner.go @@ -101,6 +101,21 @@ func (mock *MockProjectCommandRunner) Version(ctx command.ProjectContext) comman return ret0 } +func (mock *MockProjectCommandRunner) Import(ctx command.ProjectContext) command.ProjectResult { + if mock == nil { + panic("mock must not be nil. Use myMock := NewMockProjectCommandRunner().") + } + params := []pegomock.Param{ctx} + result := pegomock.GetGenericMockFrom(mock).Invoke("Import", params, []reflect.Type{reflect.TypeOf((*command.ProjectResult)(nil)).Elem()}) + var ret0 command.ProjectResult + if len(result) != 0 { + if result[0] != nil { + ret0 = result[0].(command.ProjectResult) + } + } + return ret0 +} + func (mock *MockProjectCommandRunner) VerifyWasCalledOnce() *VerifierMockProjectCommandRunner { return &VerifierMockProjectCommandRunner{ mock: mock, @@ -272,3 +287,30 @@ func (c *MockProjectCommandRunner_Version_OngoingVerification) GetAllCapturedArg } return } + +func (verifier *VerifierMockProjectCommandRunner) Import(ctx command.ProjectContext) *MockProjectCommandRunner_Import_OngoingVerification { + params := []pegomock.Param{ctx} + methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Import", params, verifier.timeout) + return &MockProjectCommandRunner_Import_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} +} + +type MockProjectCommandRunner_Import_OngoingVerification struct { + mock *MockProjectCommandRunner + methodInvocations []pegomock.MethodInvocation +} + +func (c *MockProjectCommandRunner_Import_OngoingVerification) GetCapturedArguments() command.ProjectContext { + ctx := c.GetAllCapturedArguments() + return ctx[len(ctx)-1] +} + +func (c *MockProjectCommandRunner_Import_OngoingVerification) GetAllCapturedArguments() (_param0 []command.ProjectContext) { + params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) + if len(params) > 0 { + _param0 = make([]command.ProjectContext, len(c.methodInvocations)) + for u, param := range params[0] { + _param0[u] = param.(command.ProjectContext) + } + } + return +} diff --git a/server/events/mocks/mock_working_dir.go b/server/events/mocks/mock_working_dir.go index e5f976a011..bf4585fb2c 100644 --- a/server/events/mocks/mock_working_dir.go +++ b/server/events/mocks/mock_working_dir.go @@ -4,12 +4,11 @@ package mocks import ( - "reflect" - "time" - pegomock "github.com/petergtz/pegomock" models "github.com/runatlantis/atlantis/server/events/models" logging "github.com/runatlantis/atlantis/server/logging" + "reflect" + "time" ) type MockWorkingDir struct { @@ -49,9 +48,6 @@ func (mock *MockWorkingDir) Clone(log logging.SimpleLogging, headRepo models.Rep } return ret0, ret1, ret2 } -func (mock *MockWorkingDir) HasDiverged(log logging.SimpleLogging, cloneDir string) bool { - return true -} func (mock *MockWorkingDir) GetWorkingDir(r models.Repo, p models.PullRequest, workspace string) (string, error) { if mock == nil { @@ -72,6 +68,21 @@ func (mock *MockWorkingDir) GetWorkingDir(r models.Repo, p models.PullRequest, w return ret0, ret1 } +func (mock *MockWorkingDir) HasDiverged(log logging.SimpleLogging, cloneDir string) bool { + if mock == nil { + panic("mock must not be nil. Use myMock := NewMockWorkingDir().") + } + params := []pegomock.Param{log, cloneDir} + result := pegomock.GetGenericMockFrom(mock).Invoke("HasDiverged", params, []reflect.Type{reflect.TypeOf((*bool)(nil)).Elem()}) + var ret0 bool + if len(result) != 0 { + if result[0] != nil { + ret0 = result[0].(bool) + } + } + return ret0 +} + func (mock *MockWorkingDir) GetPullDir(r models.Repo, p models.PullRequest) (string, error) { if mock == nil { panic("mock must not be nil. Use myMock := NewMockWorkingDir().") @@ -121,25 +132,6 @@ func (mock *MockWorkingDir) DeleteForWorkspace(r models.Repo, p models.PullReque return ret0 } -func (mock *MockWorkingDir) IsFileTracked(log logging.SimpleLogging, cloneDir string, filename string) (bool, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockWorkingDir().") - } - params := []pegomock.Param{log, cloneDir, filename} - result := pegomock.GetGenericMockFrom(mock).Invoke("IsFileTracked", params, []reflect.Type{reflect.TypeOf((*bool)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 bool - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(bool) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - func (mock *MockWorkingDir) VerifyWasCalledOnce() *VerifierMockWorkingDir { return &VerifierMockWorkingDir{ mock: mock, @@ -378,38 +370,3 @@ func (c *MockWorkingDir_DeleteForWorkspace_OngoingVerification) GetAllCapturedAr } return } - -func (verifier *VerifierMockWorkingDir) IsFileTracked(log logging.SimpleLogging, cloneDir string, filename string) *MockWorkingDir_IsFileTracked_OngoingVerification { - params := []pegomock.Param{log, cloneDir, filename} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "IsFileTracked", params, verifier.timeout) - return &MockWorkingDir_IsFileTracked_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockWorkingDir_IsFileTracked_OngoingVerification struct { - mock *MockWorkingDir - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockWorkingDir_IsFileTracked_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, string, string) { - log, cloneDir, filename := c.GetAllCapturedArguments() - return log[len(log)-1], cloneDir[len(cloneDir)-1], filename[len(filename)-1] -} - -func (c *MockWorkingDir_IsFileTracked_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []string, _param2 []string) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]logging.SimpleLogging, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(logging.SimpleLogging) - } - _param1 = make([]string, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(string) - } - _param2 = make([]string, len(c.methodInvocations)) - for u, param := range params[2] { - _param2[u] = param.(string) - } - } - return -} diff --git a/server/events/models/models.go b/server/events/models/models.go index 3c6b8f5b36..f80bf72ffb 100644 --- a/server/events/models/models.go +++ b/server/events/models/models.go @@ -411,6 +411,14 @@ type PolicyCheckSuccess struct { HasDiverged bool } +// ImportSuccess is the result of a successful import run. +type ImportSuccess struct { + // Output is the output from terraform import + Output string + // RePlanCmd is the command that users should run to re-plan this project. + RePlanCmd string +} + // Summary extracts one line summary of policy check. func (p *PolicyCheckSuccess) Summary() string { note := "" diff --git a/server/events/project_command_builder.go b/server/events/project_command_builder.go index d616f25a9a..3e8772d657 100644 --- a/server/events/project_command_builder.go +++ b/server/events/project_command_builder.go @@ -145,6 +145,13 @@ type ProjectVersionCommandBuilder interface { BuildVersionCommands(ctx *command.Context, comment *CommentCommand) ([]command.ProjectContext, error) } +type ProjectImportCommandBuilder interface { + // BuildImportCommands builds project Import commands for this ctx and comment. If + // comment doesn't specify one project then there may be multiple commands + // to be run. + BuildImportCommands(ctx *command.Context, comment *CommentCommand) ([]command.ProjectContext, error) +} + //go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_project_command_builder.go ProjectCommandBuilder // ProjectCommandBuilder builds commands that run on individual projects. @@ -153,6 +160,7 @@ type ProjectCommandBuilder interface { ProjectApplyCommandBuilder ProjectApprovePoliciesCommandBuilder ProjectVersionCommandBuilder + ProjectImportCommandBuilder } // DefaultProjectCommandBuilder implements ProjectCommandBuilder. @@ -177,7 +185,7 @@ type DefaultProjectCommandBuilder struct { // See ProjectCommandBuilder.BuildAutoplanCommands. func (p *DefaultProjectCommandBuilder) BuildAutoplanCommands(ctx *command.Context) ([]command.ProjectContext, error) { - projCtxs, err := p.buildPlanAllCommands(ctx, nil, false) + projCtxs, err := p.buildAllCommandsByCfg(ctx, command.Plan, nil, false) if err != nil { return nil, err } @@ -195,7 +203,7 @@ func (p *DefaultProjectCommandBuilder) BuildAutoplanCommands(ctx *command.Contex // See ProjectCommandBuilder.BuildPlanCommands. func (p *DefaultProjectCommandBuilder) BuildPlanCommands(ctx *command.Context, cmd *CommentCommand) ([]command.ProjectContext, error) { if !cmd.IsForSpecificProject() { - return p.buildPlanAllCommands(ctx, cmd.Flags, cmd.Verbose) + return p.buildAllCommandsByCfg(ctx, cmd.CommandName(), cmd.Flags, cmd.Verbose) } pcc, err := p.buildProjectPlanCommand(ctx, cmd) return pcc, err @@ -204,27 +212,35 @@ func (p *DefaultProjectCommandBuilder) BuildPlanCommands(ctx *command.Context, c // See ProjectCommandBuilder.BuildApplyCommands. func (p *DefaultProjectCommandBuilder) BuildApplyCommands(ctx *command.Context, cmd *CommentCommand) ([]command.ProjectContext, error) { if !cmd.IsForSpecificProject() { - return p.buildAllProjectCommands(ctx, cmd) + return p.buildAllProjectCommandsByPlan(ctx, cmd) } pac, err := p.buildProjectApplyCommand(ctx, cmd) return pac, err } func (p *DefaultProjectCommandBuilder) BuildApprovePoliciesCommands(ctx *command.Context, cmd *CommentCommand) ([]command.ProjectContext, error) { - return p.buildAllProjectCommands(ctx, cmd) + return p.buildAllProjectCommandsByPlan(ctx, cmd) } func (p *DefaultProjectCommandBuilder) BuildVersionCommands(ctx *command.Context, cmd *CommentCommand) ([]command.ProjectContext, error) { if !cmd.IsForSpecificProject() { - return p.buildAllProjectCommands(ctx, cmd) + return p.buildAllProjectCommandsByPlan(ctx, cmd) } pac, err := p.buildProjectVersionCommand(ctx, cmd) return pac, err } -// buildPlanAllCommands builds plan contexts for all projects we determine were +func (p *DefaultProjectCommandBuilder) BuildImportCommands(ctx *command.Context, cmd *CommentCommand) ([]command.ProjectContext, error) { + if !cmd.IsForSpecificProject() { + // import discard a plan file, so use buildAllCommandsByCfg instead buildAllProjectCommandsByPlan. + return p.buildAllCommandsByCfg(ctx, cmd.CommandName(), cmd.Flags, cmd.Verbose) + } + return p.buildProjectImportCommand(ctx, cmd) +} + +// buildAllCommandsByCfg builds init contexts for all projects we determine were // modified in this ctx. -func (p *DefaultProjectCommandBuilder) buildPlanAllCommands(ctx *command.Context, commentFlags []string, verbose bool) ([]command.ProjectContext, error) { +func (p *DefaultProjectCommandBuilder) buildAllCommandsByCfg(ctx *command.Context, cmdName command.Name, commentFlags []string, verbose bool) ([]command.ProjectContext, error) { // We'll need the list of modified files. modifiedFiles, err := p.VCSClient.GetModifiedFiles(ctx.Pull.BaseRepo, ctx.Pull) if err != nil { @@ -312,7 +328,7 @@ func (p *DefaultProjectCommandBuilder) buildPlanAllCommands(ctx *command.Context projCtxs = append(projCtxs, p.ProjectCommandContextBuilder.BuildProjectContext( ctx, - command.Plan, + cmdName, mergedCfg, commentFlags, repoDir, @@ -349,7 +365,7 @@ func (p *DefaultProjectCommandBuilder) buildPlanAllCommands(ctx *command.Context projCtxs = append(projCtxs, p.ProjectCommandContextBuilder.BuildProjectContext( ctx, - command.Plan, + cmdName, pCfg, commentFlags, repoDir, @@ -517,9 +533,9 @@ func (p *DefaultProjectCommandBuilder) getCfg(ctx *command.Context, projectName return } -// buildAllProjectCommands builds contexts for a command for every project that has +// buildAllProjectCommandsByPlan builds contexts for a command for every project that has // pending plans in this ctx. -func (p *DefaultProjectCommandBuilder) buildAllProjectCommands(ctx *command.Context, commentCmd *CommentCommand) ([]command.ProjectContext, error) { +func (p *DefaultProjectCommandBuilder) buildAllProjectCommandsByPlan(ctx *command.Context, commentCmd *CommentCommand) ([]command.ProjectContext, error) { // Lock all dirs in this pull request (instead of a single dir) because we // don't know how many dirs we'll need to run the command in. unlockFn, err := p.WorkingDirLocker.TryLockPull(ctx.Pull.BaseRepo.FullName, ctx.Pull.Num) @@ -643,6 +659,47 @@ func (p *DefaultProjectCommandBuilder) buildProjectVersionCommand(ctx *command.C ) } +// buildProjectImportCommand builds a import command for the single project +// identified by cmd. +func (p *DefaultProjectCommandBuilder) buildProjectImportCommand(ctx *command.Context, cmd *CommentCommand) ([]command.ProjectContext, error) { + workspace := DefaultWorkspace + if cmd.Workspace != "" { + workspace = cmd.Workspace + } + + var projCtx []command.ProjectContext + unlockFn, err := p.WorkingDirLocker.TryLock(ctx.Pull.BaseRepo.FullName, ctx.Pull.Num, workspace, DefaultRepoRelDir) + if err != nil { + return projCtx, err + } + defer unlockFn() + + // use the default repository workspace because it is the only one guaranteed to have an atlantis.yaml, + // other workspaces will not have the file if they are using pre_workflow_hooks to generate it dynamically + repoDir, err := p.WorkingDir.GetWorkingDir(ctx.Pull.BaseRepo, ctx.Pull, DefaultWorkspace) + if os.IsNotExist(errors.Cause(err)) { + return projCtx, errors.New("no working directory found–did you run plan?") + } else if err != nil { + return projCtx, err + } + + repoRelDir := DefaultRepoRelDir + if cmd.RepoRelDir != "" { + repoRelDir = cmd.RepoRelDir + } + + return p.buildProjectCommandCtx( + ctx, + command.Import, + cmd.ProjectName, + cmd.Flags, + repoDir, + repoRelDir, + workspace, + cmd.Verbose, + ) +} + // buildProjectCommandCtx builds a context for a single or several projects identified // by the parameters. func (p *DefaultProjectCommandBuilder) buildProjectCommandCtx(ctx *command.Context, diff --git a/server/events/project_command_builder_internal_test.go b/server/events/project_command_builder_internal_test.go index 3a9adbb9a3..2dfdfd2630 100644 --- a/server/events/project_command_builder_internal_test.go +++ b/server/events/project_command_builder_internal_test.go @@ -74,16 +74,17 @@ workflows: PullReqStatus: models.PullReqStatus{ Mergeable: true, }, - Pull: pull, - ProjectName: "", - ApplyRequirements: []string{}, - RePlanCmd: "atlantis plan -d project1 -w myworkspace -- flag", - RepoRelDir: "project1", - User: models.User{}, - Verbose: true, - Workspace: "myworkspace", - PolicySets: emptyPolicySets, - RepoLocking: true, + Pull: pull, + ProjectName: "", + ApplyRequirements: []string{}, + ImportRequirements: []string{}, + RePlanCmd: "atlantis plan -d project1 -w myworkspace -- flag", + RepoRelDir: "project1", + User: models.User{}, + Verbose: true, + Workspace: "myworkspace", + PolicySets: emptyPolicySets, + RepoLocking: true, }, expPlanSteps: []string{"init", "plan"}, expApplySteps: []string{"apply"}, @@ -128,30 +129,32 @@ projects: PullReqStatus: models.PullReqStatus{ Mergeable: true, }, - Pull: pull, - ProjectName: "", - ApplyRequirements: []string{}, - RepoConfigVersion: 3, - RePlanCmd: "atlantis plan -d project1 -w myworkspace -- flag", - RepoRelDir: "project1", - TerraformVersion: mustVersion("10.0"), - User: models.User{}, - Verbose: true, - Workspace: "myworkspace", - PolicySets: emptyPolicySets, - RepoLocking: true, + Pull: pull, + ProjectName: "", + ApplyRequirements: []string{}, + ImportRequirements: []string{}, + RepoConfigVersion: 3, + RePlanCmd: "atlantis plan -d project1 -w myworkspace -- flag", + RepoRelDir: "project1", + TerraformVersion: mustVersion("10.0"), + User: models.User{}, + Verbose: true, + Workspace: "myworkspace", + PolicySets: emptyPolicySets, + RepoLocking: true, }, expPlanSteps: []string{"init", "plan"}, expApplySteps: []string{"apply"}, }, // Set a global apply req that should be used. - "global apply_requirements": { + "global requirements": { globalCfg: ` repos: - id: /.*/ workflow: default apply_requirements: [approved, mergeable] + import_requirements: [approved, mergeable] workflows: default: plan: @@ -184,18 +187,19 @@ projects: PullReqStatus: models.PullReqStatus{ Mergeable: true, }, - Pull: pull, - ProjectName: "", - ApplyRequirements: []string{"approved", "mergeable"}, - RepoConfigVersion: 3, - RePlanCmd: "atlantis plan -d project1 -w myworkspace -- flag", - RepoRelDir: "project1", - TerraformVersion: mustVersion("10.0"), - User: models.User{}, - Verbose: true, - Workspace: "myworkspace", - PolicySets: emptyPolicySets, - RepoLocking: true, + Pull: pull, + ProjectName: "", + ApplyRequirements: []string{"approved", "mergeable"}, + ImportRequirements: []string{"approved", "mergeable"}, + RepoConfigVersion: 3, + RePlanCmd: "atlantis plan -d project1 -w myworkspace -- flag", + RepoRelDir: "project1", + TerraformVersion: mustVersion("10.0"), + User: models.User{}, + Verbose: true, + Workspace: "myworkspace", + PolicySets: emptyPolicySets, + RepoLocking: true, }, expPlanSteps: []string{"init", "plan"}, expApplySteps: []string{"apply"}, @@ -210,6 +214,7 @@ repos: - id: github.com/owner/repo workflow: specific apply_requirements: [approved] + import_requirements: [approved] workflows: default: plan: @@ -248,18 +253,19 @@ projects: PullReqStatus: models.PullReqStatus{ Mergeable: true, }, - Pull: pull, - ProjectName: "", - ApplyRequirements: []string{"approved"}, - RepoConfigVersion: 3, - RePlanCmd: "atlantis plan -d project1 -w myworkspace -- flag", - RepoRelDir: "project1", - TerraformVersion: mustVersion("10.0"), - User: models.User{}, - Verbose: true, - Workspace: "myworkspace", - PolicySets: emptyPolicySets, - RepoLocking: true, + Pull: pull, + ProjectName: "", + ApplyRequirements: []string{"approved"}, + ImportRequirements: []string{"approved"}, + RepoConfigVersion: 3, + RePlanCmd: "atlantis plan -d project1 -w myworkspace -- flag", + RepoRelDir: "project1", + TerraformVersion: mustVersion("10.0"), + User: models.User{}, + Verbose: true, + Workspace: "myworkspace", + PolicySets: emptyPolicySets, + RepoLocking: true, }, expPlanSteps: []string{"plan"}, expApplySteps: []string{}, @@ -357,7 +363,8 @@ repos: - id: /.*/ workflow: default apply_requirements: [approved] - allowed_overrides: [apply_requirements, workflow] + import_requirements: [approved] + allowed_overrides: [apply_requirements, import_requirements, workflow] allow_custom_workflows: true workflows: default: @@ -377,6 +384,7 @@ projects: when_modified: [../modules/**/*.tf] terraform_version: v10.0 apply_requirements: [] + import_requirements: [] workflow: custom workflows: custom: @@ -399,18 +407,19 @@ workflows: PullReqStatus: models.PullReqStatus{ Mergeable: true, }, - Pull: pull, - ProjectName: "", - ApplyRequirements: []string{}, - RepoConfigVersion: 3, - RePlanCmd: "atlantis plan -d project1 -w myworkspace -- flag", - RepoRelDir: "project1", - TerraformVersion: mustVersion("10.0"), - User: models.User{}, - Verbose: true, - Workspace: "myworkspace", - PolicySets: emptyPolicySets, - RepoLocking: true, + Pull: pull, + ProjectName: "", + ApplyRequirements: []string{}, + ImportRequirements: []string{}, + RepoConfigVersion: 3, + RePlanCmd: "atlantis plan -d project1 -w myworkspace -- flag", + RepoRelDir: "project1", + TerraformVersion: mustVersion("10.0"), + User: models.User{}, + Verbose: true, + Workspace: "myworkspace", + PolicySets: emptyPolicySets, + RepoLocking: true, }, expPlanSteps: []string{"plan"}, expApplySteps: []string{"apply"}, @@ -459,18 +468,19 @@ projects: PullReqStatus: models.PullReqStatus{ Mergeable: true, }, - Pull: pull, - ProjectName: "", - ApplyRequirements: []string{}, - RepoConfigVersion: 3, - RePlanCmd: "atlantis plan -d project1 -w myworkspace -- flag", - RepoRelDir: "project1", - TerraformVersion: mustVersion("10.0"), - User: models.User{}, - Verbose: true, - Workspace: "myworkspace", - PolicySets: emptyPolicySets, - RepoLocking: true, + Pull: pull, + ProjectName: "", + ApplyRequirements: []string{}, + ImportRequirements: []string{}, + RepoConfigVersion: 3, + RePlanCmd: "atlantis plan -d project1 -w myworkspace -- flag", + RepoRelDir: "project1", + TerraformVersion: mustVersion("10.0"), + User: models.User{}, + Verbose: true, + Workspace: "myworkspace", + PolicySets: emptyPolicySets, + RepoLocking: true, }, expPlanSteps: []string{"plan"}, expApplySteps: []string{"apply"}, @@ -522,18 +532,19 @@ workflows: PullReqStatus: models.PullReqStatus{ Mergeable: true, }, - Pull: pull, - ProjectName: "", - ApplyRequirements: []string{}, - RepoConfigVersion: 3, - RePlanCmd: "atlantis plan -d project1 -w myworkspace -- flag", - RepoRelDir: "project1", - TerraformVersion: mustVersion("10.0"), - User: models.User{}, - Verbose: true, - Workspace: "myworkspace", - PolicySets: emptyPolicySets, - RepoLocking: true, + Pull: pull, + ProjectName: "", + ApplyRequirements: []string{}, + ImportRequirements: []string{}, + RepoConfigVersion: 3, + RePlanCmd: "atlantis plan -d project1 -w myworkspace -- flag", + RepoRelDir: "project1", + TerraformVersion: mustVersion("10.0"), + User: models.User{}, + Verbose: true, + Workspace: "myworkspace", + PolicySets: emptyPolicySets, + RepoLocking: true, }, expPlanSteps: []string{}, expApplySteps: []string{}, @@ -544,6 +555,7 @@ workflows: repos: - id: /.*/ apply_requirements: [approved] + import_requirements: [approved] - id: github.com/owner/repo workflow: custom workflows: @@ -569,17 +581,18 @@ projects: PullReqStatus: models.PullReqStatus{ Mergeable: true, }, - Pull: pull, - ProjectName: "", - ApplyRequirements: []string{"approved"}, - RepoConfigVersion: 3, - RePlanCmd: "atlantis plan -d project1 -w myworkspace -- flag", - RepoRelDir: "project1", - User: models.User{}, - Verbose: true, - Workspace: "myworkspace", - PolicySets: emptyPolicySets, - RepoLocking: true, + Pull: pull, + ProjectName: "", + ApplyRequirements: []string{"approved"}, + ImportRequirements: []string{"approved"}, + RepoConfigVersion: 3, + RePlanCmd: "atlantis plan -d project1 -w myworkspace -- flag", + RepoRelDir: "project1", + User: models.User{}, + Verbose: true, + Workspace: "myworkspace", + PolicySets: emptyPolicySets, + RepoLocking: true, }, expPlanSteps: []string{"plan"}, expApplySteps: []string{"apply"}, @@ -775,18 +788,19 @@ projects: PullReqStatus: models.PullReqStatus{ Mergeable: true, }, - Pull: pull, - ProjectName: "myproject_1", - ApplyRequirements: []string{}, - RepoConfigVersion: 3, - RePlanCmd: "atlantis plan -p myproject_1 -- flag", - RepoRelDir: "project1", - TerraformVersion: mustVersion("10.0"), - User: models.User{}, - Verbose: true, - Workspace: "myworkspace", - PolicySets: emptyPolicySets, - RepoLocking: true, + Pull: pull, + ProjectName: "myproject_1", + ApplyRequirements: []string{}, + ImportRequirements: []string{}, + RepoConfigVersion: 3, + RePlanCmd: "atlantis plan -p myproject_1 -- flag", + RepoRelDir: "project1", + TerraformVersion: mustVersion("10.0"), + User: models.User{}, + Verbose: true, + Workspace: "myworkspace", + PolicySets: emptyPolicySets, + RepoLocking: true, }, expPlanSteps: []string{"init", "plan"}, expApplySteps: []string{"apply"}, @@ -945,16 +959,17 @@ repos: PullReqStatus: models.PullReqStatus{ Mergeable: true, }, - Pull: pull, - ProjectName: "", - ApplyRequirements: []string{}, - RePlanCmd: "atlantis plan -d project1 -w myworkspace -- flag", - RepoRelDir: "project1", - User: models.User{}, - Verbose: true, - Workspace: "myworkspace", - PolicySets: emptyPolicySets, - RepoLocking: true, + Pull: pull, + ProjectName: "", + ApplyRequirements: []string{}, + ImportRequirements: []string{}, + RePlanCmd: "atlantis plan -d project1 -w myworkspace -- flag", + RepoRelDir: "project1", + User: models.User{}, + Verbose: true, + Workspace: "myworkspace", + PolicySets: emptyPolicySets, + RepoLocking: true, }, expPolicyCheckSteps: []string{"show", "policy_check"}, }, @@ -1004,18 +1019,19 @@ workflows: PullReqStatus: models.PullReqStatus{ Mergeable: true, }, - Pull: pull, - ProjectName: "", - ApplyRequirements: []string{}, - RepoConfigVersion: 3, - RePlanCmd: "atlantis plan -d project1 -w myworkspace -- flag", - RepoRelDir: "project1", - TerraformVersion: mustVersion("10.0"), - User: models.User{}, - Verbose: true, - Workspace: "myworkspace", - PolicySets: emptyPolicySets, - RepoLocking: true, + Pull: pull, + ProjectName: "", + ApplyRequirements: []string{}, + ImportRequirements: []string{}, + RepoConfigVersion: 3, + RePlanCmd: "atlantis plan -d project1 -w myworkspace -- flag", + RepoRelDir: "project1", + TerraformVersion: mustVersion("10.0"), + User: models.User{}, + Verbose: true, + Workspace: "myworkspace", + PolicySets: emptyPolicySets, + RepoLocking: true, }, expPolicyCheckSteps: []string{"policy_check"}, }, diff --git a/server/events/project_command_context_builder.go b/server/events/project_command_context_builder.go index 62b9adbf27..17c472d411 100644 --- a/server/events/project_command_context_builder.go +++ b/server/events/project_command_context_builder.go @@ -110,6 +110,8 @@ func (cb *DefaultProjectCommandContextBuilder) BuildProjectContext( steps = []valid.Step{{ StepName: "version", }} + case command.Import: + steps = prjCfg.Workflow.Import.Steps } // If TerraformVersion not defined in config file look for a @@ -123,7 +125,6 @@ func (cb *DefaultProjectCommandContextBuilder) BuildProjectContext( cmdName, cb.CommentBuilder.BuildApplyComment(prjCfg.RepoRelDir, prjCfg.Workspace, prjCfg.Name, prjCfg.AutoMergeDisabled), cb.CommentBuilder.BuildPlanComment(prjCfg.RepoRelDir, prjCfg.Workspace, prjCfg.Name, commentFlags), - cb.CommentBuilder.BuildVersionComment(prjCfg.RepoRelDir, prjCfg.Workspace, prjCfg.Name), prjCfg, steps, prjCfg.PolicySets, @@ -183,7 +184,6 @@ func (cb *PolicyCheckProjectCommandContextBuilder) BuildProjectContext( command.PolicyCheck, cb.CommentBuilder.BuildApplyComment(prjCfg.RepoRelDir, prjCfg.Workspace, prjCfg.Name, prjCfg.AutoMergeDisabled), cb.CommentBuilder.BuildPlanComment(prjCfg.RepoRelDir, prjCfg.Workspace, prjCfg.Name, commentFlags), - cb.CommentBuilder.BuildVersionComment(prjCfg.RepoRelDir, prjCfg.Workspace, prjCfg.Name), prjCfg, steps, prjCfg.PolicySets, @@ -206,7 +206,6 @@ func newProjectCommandContext(ctx *command.Context, cmd command.Name, applyCmd string, planCmd string, - versionCmd string, projCfg valid.MergedProjectCfg, steps []valid.Step, policySets valid.PolicySets, @@ -257,6 +256,7 @@ func newProjectCommandContext(ctx *command.Context, Pull: ctx.Pull, ProjectName: projCfg.Name, ApplyRequirements: projCfg.ApplyRequirements, + ImportRequirements: projCfg.ImportRequirements, RePlanCmd: planCmd, RepoRelDir: projCfg.RepoRelDir, RepoConfigVersion: projCfg.RepoCfgVersion, diff --git a/server/events/project_command_runner.go b/server/events/project_command_runner.go index 246fdae701..24b36d8770 100644 --- a/server/events/project_command_runner.go +++ b/server/events/project_command_runner.go @@ -113,6 +113,11 @@ type ProjectVersionCommandRunner interface { Version(ctx command.ProjectContext) command.ProjectResult } +type ProjectImportCommandRunner interface { + // Import runs terraform import for the project described by ctx. + Import(ctx command.ProjectContext) command.ProjectResult +} + // ProjectCommandRunner runs project commands. A project command is a command // for a specific TF project. type ProjectCommandRunner interface { @@ -121,6 +126,7 @@ type ProjectCommandRunner interface { ProjectPolicyCheckCommandRunner ProjectApprovePoliciesCommandRunner ProjectVersionCommandRunner + ProjectImportCommandRunner } //go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_job_url_setter.go JobURLSetter @@ -185,22 +191,23 @@ func (p *ProjectOutputWrapper) updateProjectPRStatus(commandName command.Name, c // DefaultProjectCommandRunner implements ProjectCommandRunner. type DefaultProjectCommandRunner struct { - Locker ProjectLocker - LockURLGenerator LockURLGenerator - InitStepRunner StepRunner - PlanStepRunner StepRunner - ShowStepRunner StepRunner - ApplyStepRunner StepRunner - PolicyCheckStepRunner StepRunner - VersionStepRunner StepRunner - RunStepRunner CustomStepRunner - EnvStepRunner EnvStepRunner - MultiEnvStepRunner MultiEnvStepRunner - PullApprovedChecker runtime.PullApprovedChecker - WorkingDir WorkingDir - Webhooks WebhooksSender - WorkingDirLocker WorkingDirLocker - AggregateApplyRequirements ApplyRequirement + Locker ProjectLocker + LockURLGenerator LockURLGenerator + InitStepRunner StepRunner + PlanStepRunner StepRunner + ShowStepRunner StepRunner + ApplyStepRunner StepRunner + PolicyCheckStepRunner StepRunner + VersionStepRunner StepRunner + ImportStepRunner StepRunner + RunStepRunner CustomStepRunner + EnvStepRunner EnvStepRunner + MultiEnvStepRunner MultiEnvStepRunner + PullApprovedChecker runtime.PullApprovedChecker + WorkingDir WorkingDir + Webhooks WebhooksSender + WorkingDirLocker WorkingDirLocker + CommandRequirementHandler CommandRequirementHandler } // Plan runs terraform plan for the project described by ctx. @@ -271,6 +278,20 @@ func (p *DefaultProjectCommandRunner) Version(ctx command.ProjectContext) comman } } +// Import runs terraform import for the project described by ctx. +func (p *DefaultProjectCommandRunner) Import(ctx command.ProjectContext) command.ProjectResult { + importSuccess, failure, err := p.doImport(ctx) + return command.ProjectResult{ + Command: command.Import, + ImportSuccess: importSuccess, + Error: err, + Failure: failure, + RepoRelDir: ctx.RepoRelDir, + Workspace: ctx.Workspace, + ProjectName: ctx.ProjectName, + } +} + func (p *DefaultProjectCommandRunner) doApprovePolicies(ctx command.ProjectContext) (*models.PolicyCheckSuccess, string, error) { // TODO: Make this a bit smarter @@ -414,7 +435,7 @@ func (p *DefaultProjectCommandRunner) doApply(ctx command.ProjectContext) (apply return "", "", DirNotExistErr{RepoRelDir: ctx.RepoRelDir} } - failure, err = p.AggregateApplyRequirements.ValidateProject(repoDir, ctx) + failure, err = p.CommandRequirementHandler.ValidateApplyProject(repoDir, ctx) if failure != "" || err != nil { return "", failure, err } @@ -472,6 +493,52 @@ func (p *DefaultProjectCommandRunner) doVersion(ctx command.ProjectContext) (ver return strings.Join(outputs, "\n"), "", nil } +func (p *DefaultProjectCommandRunner) doImport(ctx command.ProjectContext) (out *models.ImportSuccess, failure string, err error) { + // Clone is idempotent so okay to run even if the repo was already cloned. + repoDir, _, cloneErr := p.WorkingDir.Clone(ctx.Log, ctx.HeadRepo, ctx.Pull, ctx.Workspace) + if cloneErr != nil { + return nil, "", cloneErr + } + projAbsPath := filepath.Join(repoDir, ctx.RepoRelDir) + if _, err = os.Stat(projAbsPath); os.IsNotExist(err) { + return nil, "", DirNotExistErr{RepoRelDir: ctx.RepoRelDir} + } + + failure, err = p.CommandRequirementHandler.ValidateImportProject(repoDir, ctx) + if failure != "" || err != nil { + return nil, failure, err + } + + // Acquire Atlantis lock for this repo/dir/workspace. + lockAttempt, err := p.Locker.TryLock(ctx.Log, ctx.Pull, ctx.User, ctx.Workspace, models.NewProject(ctx.Pull.BaseRepo.FullName, ctx.RepoRelDir), ctx.RepoLocking) + if err != nil { + return nil, "", errors.Wrap(err, "acquiring lock") + } + if !lockAttempt.LockAcquired { + return nil, lockAttempt.LockFailureReason, nil + } + ctx.Log.Debug("acquired lock for project") + + // Acquire internal lock for the directory we're going to operate in. + unlockFn, err := p.WorkingDirLocker.TryLock(ctx.Pull.BaseRepo.FullName, ctx.Pull.Num, ctx.Workspace, ctx.RepoRelDir) + if err != nil { + return nil, "", err + } + defer unlockFn() + + outputs, err := p.runSteps(ctx.Steps, ctx, projAbsPath) + if err != nil { + return nil, "", fmt.Errorf("%s\n%s", err, strings.Join(outputs, "\n")) + } + + // after import, re-plan command is required without import args + rePlanCmd := strings.TrimSpace(strings.Split(ctx.RePlanCmd, "--")[0]) + return &models.ImportSuccess{ + Output: strings.Join(outputs, "\n"), + RePlanCmd: rePlanCmd, + }, "", nil +} + func (p *DefaultProjectCommandRunner) runSteps(steps []valid.Step, ctx command.ProjectContext, absPath string) ([]string, error) { var outputs []string @@ -492,6 +559,8 @@ func (p *DefaultProjectCommandRunner) runSteps(steps []valid.Step, ctx command.P out, err = p.ApplyStepRunner.Run(ctx, step.ExtraArgs, absPath, envs) case "version": out, err = p.VersionStepRunner.Run(ctx, step.ExtraArgs, absPath, envs) + case "import": + out, err = p.ImportStepRunner.Run(ctx, step.ExtraArgs, absPath, envs) case "run": out, err = p.RunStepRunner.Run(ctx, step.RunCommand, absPath, envs, true) case "env": diff --git a/server/events/project_command_runner_test.go b/server/events/project_command_runner_test.go index 5b54005f88..260dd8f1cf 100644 --- a/server/events/project_command_runner_test.go +++ b/server/events/project_command_runner_test.go @@ -44,21 +44,21 @@ func TestDefaultProjectCommandRunner_Plan(t *testing.T) { realEnv := runtime.EnvStepRunner{} mockWorkingDir := mocks.NewMockWorkingDir() mockLocker := mocks.NewMockProjectLocker() - mockApplyReqHandler := mocks.NewMockApplyRequirement() + mockCommandRequirementHandler := mocks.NewMockCommandRequirementHandler() runner := events.DefaultProjectCommandRunner{ - Locker: mockLocker, - LockURLGenerator: mockURLGenerator{}, - InitStepRunner: mockInit, - PlanStepRunner: mockPlan, - ApplyStepRunner: mockApply, - RunStepRunner: mockRun, - EnvStepRunner: &realEnv, - PullApprovedChecker: nil, - WorkingDir: mockWorkingDir, - Webhooks: nil, - WorkingDirLocker: events.NewDefaultWorkingDirLocker(), - AggregateApplyRequirements: mockApplyReqHandler, + Locker: mockLocker, + LockURLGenerator: mockURLGenerator{}, + InitStepRunner: mockInit, + PlanStepRunner: mockPlan, + ApplyStepRunner: mockApply, + RunStepRunner: mockRun, + EnvStepRunner: &realEnv, + PullApprovedChecker: nil, + WorkingDir: mockWorkingDir, + Webhooks: nil, + WorkingDirLocker: events.NewDefaultWorkingDirLocker(), + CommandRequirementHandler: mockCommandRequirementHandler, } repoDir := t.TempDir() @@ -261,7 +261,7 @@ func TestDefaultProjectCommandRunner_ApplyNotApproved(t *testing.T) { runner := &events.DefaultProjectCommandRunner{ WorkingDir: mockWorkingDir, WorkingDirLocker: events.NewDefaultWorkingDirLocker(), - AggregateApplyRequirements: &events.AggregateApplyRequirements{ + CommandRequirementHandler: &events.DefaultCommandRequirementHandler{ WorkingDir: mockWorkingDir, }, } @@ -282,7 +282,7 @@ func TestDefaultProjectCommandRunner_ApplyNotMergeable(t *testing.T) { runner := &events.DefaultProjectCommandRunner{ WorkingDir: mockWorkingDir, WorkingDirLocker: events.NewDefaultWorkingDirLocker(), - AggregateApplyRequirements: &events.AggregateApplyRequirements{ + CommandRequirementHandler: &events.DefaultCommandRequirementHandler{ WorkingDir: mockWorkingDir, }, } @@ -306,15 +306,18 @@ func TestDefaultProjectCommandRunner_ApplyDiverged(t *testing.T) { runner := &events.DefaultProjectCommandRunner{ WorkingDir: mockWorkingDir, WorkingDirLocker: events.NewDefaultWorkingDirLocker(), - AggregateApplyRequirements: &events.AggregateApplyRequirements{ + CommandRequirementHandler: &events.DefaultCommandRequirementHandler{ WorkingDir: mockWorkingDir, }, } + log := logging.NewNoopLogger(t) ctx := command.ProjectContext{ + Log: log, ApplyRequirements: []string{"undiverged"}, } tmp := t.TempDir() When(mockWorkingDir.GetWorkingDir(ctx.BaseRepo, ctx.Pull, ctx.Workspace)).ThenReturn(tmp, nil) + When(mockWorkingDir.HasDiverged(log, tmp)).ThenReturn(true) res := runner.Apply(ctx) Equals(t, "Default branch must be rebased onto pull request before running apply.", res.Failure) @@ -410,22 +413,22 @@ func TestDefaultProjectCommandRunner_Apply(t *testing.T) { mockWorkingDir := mocks.NewMockWorkingDir() mockLocker := mocks.NewMockProjectLocker() mockSender := mocks.NewMockWebhooksSender() - applyReqHandler := &events.AggregateApplyRequirements{ + applyReqHandler := &events.DefaultCommandRequirementHandler{ WorkingDir: mockWorkingDir, } runner := events.DefaultProjectCommandRunner{ - Locker: mockLocker, - LockURLGenerator: mockURLGenerator{}, - InitStepRunner: mockInit, - PlanStepRunner: mockPlan, - ApplyStepRunner: mockApply, - RunStepRunner: mockRun, - EnvStepRunner: mockEnv, - WorkingDir: mockWorkingDir, - Webhooks: mockSender, - WorkingDirLocker: events.NewDefaultWorkingDirLocker(), - AggregateApplyRequirements: applyReqHandler, + Locker: mockLocker, + LockURLGenerator: mockURLGenerator{}, + InitStepRunner: mockInit, + PlanStepRunner: mockPlan, + ApplyStepRunner: mockApply, + RunStepRunner: mockRun, + EnvStepRunner: mockEnv, + WorkingDir: mockWorkingDir, + Webhooks: mockSender, + WorkingDirLocker: events.NewDefaultWorkingDirLocker(), + CommandRequirementHandler: applyReqHandler, } repoDir := t.TempDir() When(mockWorkingDir.GetWorkingDir( @@ -485,18 +488,18 @@ func TestDefaultProjectCommandRunner_ApplyRunStepFailure(t *testing.T) { mockWorkingDir := mocks.NewMockWorkingDir() mockLocker := mocks.NewMockProjectLocker() mockSender := mocks.NewMockWebhooksSender() - applyReqHandler := &events.AggregateApplyRequirements{ + applyReqHandler := &events.DefaultCommandRequirementHandler{ WorkingDir: mockWorkingDir, } runner := events.DefaultProjectCommandRunner{ - Locker: mockLocker, - LockURLGenerator: mockURLGenerator{}, - ApplyStepRunner: mockApply, - WorkingDir: mockWorkingDir, - WorkingDirLocker: events.NewDefaultWorkingDirLocker(), - AggregateApplyRequirements: applyReqHandler, - Webhooks: mockSender, + Locker: mockLocker, + LockURLGenerator: mockURLGenerator{}, + ApplyStepRunner: mockApply, + WorkingDir: mockWorkingDir, + WorkingDirLocker: events.NewDefaultWorkingDirLocker(), + CommandRequirementHandler: applyReqHandler, + Webhooks: mockSender, } repoDir := t.TempDir() When(mockWorkingDir.GetWorkingDir( @@ -618,6 +621,122 @@ func TestDefaultProjectCommandRunner_RunEnvSteps(t *testing.T) { Equals(t, "var=\n\nvar=value\n\ndynamic_var=dynamic_value\n\ndynamic_var=overridden\n", res.PlanSuccess.TerraformOutput) } +// Test that it runs the expected import steps. +func TestDefaultProjectCommandRunner_Import(t *testing.T) { + expEnvs := map[string]string{} + cases := []struct { + description string + steps []valid.Step + importReqs []string + pullReqStatus models.PullReqStatus + setup func(repoDir string, ctx command.ProjectContext, mockLocker *mocks.MockProjectLocker, mockInit *mocks.MockStepRunner, mockImport *mocks.MockStepRunner) + + expSteps []string + expOut *models.ImportSuccess + expFailure string + }{ + { + description: "normal workflow", + steps: valid.DefaultImportStage.Steps, + importReqs: []string{"approved"}, + pullReqStatus: models.PullReqStatus{ + ApprovalStatus: models.ApprovalStatus{ + IsApproved: true, + }, + }, + setup: func(repoDir string, ctx command.ProjectContext, mockLocker *mocks.MockProjectLocker, mockInit *mocks.MockStepRunner, mockImport *mocks.MockStepRunner) { + When(mockLocker.TryLock( + matchers.AnyPtrToLoggingSimpleLogger(), + matchers.AnyModelsPullRequest(), + matchers.AnyModelsUser(), + AnyString(), + matchers.AnyModelsProject(), + AnyBool(), + )).ThenReturn(&events.TryLockResponse{ + LockAcquired: true, + LockKey: "lock-key", + }, nil) + + When(mockInit.Run(ctx, nil, repoDir, expEnvs)).ThenReturn("init", nil) + When(mockImport.Run(ctx, nil, repoDir, expEnvs)).ThenReturn("import", nil) + }, + expSteps: []string{"import"}, + expOut: &models.ImportSuccess{ + Output: "init\nimport", + RePlanCmd: "atlantis plan -d .", + }, + }, + { + description: "approval required", + steps: valid.DefaultImportStage.Steps, + importReqs: []string{"approved"}, + pullReqStatus: models.PullReqStatus{ + ApprovalStatus: models.ApprovalStatus{ + IsApproved: false, + }, + }, + expFailure: "Pull request must be approved by at least one person other than the author before running import.", + }, + } + + for _, c := range cases { + t.Run(c.description, func(t *testing.T) { + RegisterMockTestingT(t) + mockInit := mocks.NewMockStepRunner() + mockImport := mocks.NewMockStepRunner() + mockWorkingDir := mocks.NewMockWorkingDir() + mockLocker := mocks.NewMockProjectLocker() + mockSender := mocks.NewMockWebhooksSender() + applyReqHandler := &events.DefaultCommandRequirementHandler{ + WorkingDir: mockWorkingDir, + } + + runner := events.DefaultProjectCommandRunner{ + Locker: mockLocker, + LockURLGenerator: mockURLGenerator{}, + InitStepRunner: mockInit, + ImportStepRunner: mockImport, + WorkingDir: mockWorkingDir, + Webhooks: mockSender, + WorkingDirLocker: events.NewDefaultWorkingDirLocker(), + CommandRequirementHandler: applyReqHandler, + } + ctx := command.ProjectContext{ + Log: logging.NewNoopLogger(t), + Steps: c.steps, + Workspace: "default", + ImportRequirements: c.importReqs, + RepoRelDir: ".", + PullReqStatus: c.pullReqStatus, + RePlanCmd: "atlantis plan -d . -- addr id", + } + repoDir := t.TempDir() + When(mockWorkingDir.Clone( + matchers.AnyPtrToLoggingSimpleLogger(), + matchers.AnyModelsRepo(), + matchers.AnyModelsPullRequest(), + AnyString(), + )).ThenReturn(repoDir, false, nil) + if c.setup != nil { + c.setup(repoDir, ctx, mockLocker, mockInit, mockImport) + } + + res := runner.Import(ctx) + Equals(t, c.expOut, res.ImportSuccess) + Equals(t, c.expFailure, res.Failure) + + for _, step := range c.expSteps { + switch step { + case "init": + mockInit.VerifyWasCalledOnce().Run(ctx, nil, repoDir, expEnvs) + case "import": + mockImport.VerifyWasCalledOnce().Run(ctx, nil, repoDir, expEnvs) + } + } + }) + } +} + type mockURLGenerator struct{} func (m mockURLGenerator) GenerateLockURL(lockID string) string { diff --git a/server/events/templates/import_success_unwrapped.tmpl b/server/events/templates/import_success_unwrapped.tmpl new file mode 100644 index 0000000000..3c04fc7b60 --- /dev/null +++ b/server/events/templates/import_success_unwrapped.tmpl @@ -0,0 +1,8 @@ +{{ define "importSuccessUnwrapped" -}} +```diff +{{.Output}} +``` + +* :repeat: To **plan** this project again, comment: + * `{{.RePlanCmd}}` +{{ end }} diff --git a/server/events/templates/import_success_wrapped.tmpl b/server/events/templates/import_success_wrapped.tmpl new file mode 100644 index 0000000000..d4f421177e --- /dev/null +++ b/server/events/templates/import_success_wrapped.tmpl @@ -0,0 +1,9 @@ +{{ define "importSuccessWrapped" -}} +
Show Output +```diff +{{.Output}} +``` +
+* :repeat: To **plan** this project again, comment: + * `{{.RePlanCmd}}` +{{ end }} diff --git a/server/events/templates/multi_project_import.tmpl b/server/events/templates/multi_project_import.tmpl new file mode 100644 index 0000000000..81dfc6d5f1 --- /dev/null +++ b/server/events/templates/multi_project_import.tmpl @@ -0,0 +1,3 @@ +{{ define "multiProjectImport" -}} +{{ template "multiProjectApply" . }} +{{- end }} diff --git a/server/events/templates/single_project_import_success.tmpl b/server/events/templates/single_project_import_success.tmpl new file mode 100644 index 0000000000..86e4c9b030 --- /dev/null +++ b/server/events/templates/single_project_import_success.tmpl @@ -0,0 +1,6 @@ +{{ define "singleProjectImport" -}} +{{$result := index .Results 0}}Ran {{.Command}} for {{ if $result.ProjectName }}project: `{{$result.ProjectName}}` {{ end }}dir: `{{$result.RepoRelDir}}` workspace: `{{$result.Workspace}}` + +{{$result.Rendered}} +{{ template "log" . }} +{{ end }} diff --git a/server/scheduled/executor_service.go b/server/scheduled/executor_service.go index e4dab1c3e7..2d522d472e 100644 --- a/server/scheduled/executor_service.go +++ b/server/scheduled/executor_service.go @@ -2,13 +2,14 @@ package scheduled import ( "context" - "github.com/runatlantis/atlantis/server/logging" - "github.com/uber-go/tally" "os" "os/signal" "sync" "syscall" "time" + + "github.com/runatlantis/atlantis/server/logging" + "github.com/uber-go/tally" ) type ExecutorService struct { diff --git a/server/server.go b/server/server.go index 579a921597..9da56f90ff 100644 --- a/server/server.go +++ b/server/server.go @@ -556,16 +556,20 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) { return nil, errors.Wrap(err, "initializing show step runner") } - policyCheckRunner, err := runtime.NewPolicyCheckStepRunner( + policyCheckStepRunner, err := runtime.NewPolicyCheckStepRunner( defaultTfVersion, policy.NewConfTestExecutorWorkflow(logger, binDir, &terraform.DefaultDownloader{}), ) if err != nil { - return nil, errors.Wrap(err, "initializing policy check runner") + return nil, errors.Wrap(err, "initializing policy check step runner") } - applyRequirementHandler := &events.AggregateApplyRequirements{ + if err != nil { + return nil, errors.Wrap(err, "initializing import step runner") + } + + applyRequirementHandler := &events.DefaultCommandRequirementHandler{ WorkingDir: workingDir, } @@ -583,7 +587,7 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) { AsyncTFExec: terraformClient, }, ShowStepRunner: showStepRunner, - PolicyCheckStepRunner: policyCheckRunner, + PolicyCheckStepRunner: policyCheckStepRunner, ApplyStepRunner: &runtime.ApplyStepRunner{ TerraformExecutor: terraformClient, DefaultTFVersion: defaultTfVersion, @@ -601,10 +605,14 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) { TerraformExecutor: terraformClient, DefaultTFVersion: defaultTfVersion, }, - WorkingDir: workingDir, - Webhooks: webhooksManager, - WorkingDirLocker: workingDirLocker, - AggregateApplyRequirements: applyRequirementHandler, + ImportStepRunner: &runtime.ImportStepRunner{ + TerraformExecutor: terraformClient, + DefaultTFVersion: defaultTfVersion, + }, + WorkingDir: workingDir, + Webhooks: webhooksManager, + WorkingDirLocker: workingDirLocker, + CommandRequirementHandler: applyRequirementHandler, } dbUpdater := &events.DBUpdater{ @@ -703,12 +711,19 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) { userConfig.SilenceNoProjects, ) + importCommandRunner := events.NewImportCommandRunner( + pullUpdater, + projectCommandBuilder, + instrumentedProjectCmdRunner, + ) + commentCommandRunnerByCmd := map[command.Name]events.CommentCommandRunner{ command.Plan: planCommandRunner, command.Apply: applyCommandRunner, command.ApprovePolicies: approvePoliciesCommandRunner, command.Unlock: unlockCommandRunner, command.Version: versionCommandRunner, + command.Import: importCommandRunner, } githubTeamAllowlistChecker, err := events.NewTeamAllowlistChecker(userConfig.GithubTeamAllowlist)