From 9fa0c91bcb5b3e0b7c44a53abee67e0127167980 Mon Sep 17 00:00:00 2001 From: Nish Krishnan Date: Wed, 14 Oct 2020 15:10:13 -0700 Subject: [PATCH 1/2] [1193] Fix merge strategy for gh apps --- server/events/command_context.go | 2 - server/events/command_runner.go | 28 ++- server/events/command_runner_test.go | 19 +- server/events/github_app_working_dir.go | 12 +- server/events/github_app_working_dir_test.go | 43 ++++- server/events/matchers/models_pullrequest.go | 1 - server/events/matchers/models_repo.go | 1 - .../matchers/ptr_to_logging_simplelogger.go | 1 - server/events/mock_workingdir_test.go | 33 ++-- server/events/mocks/mock_working_dir.go | 28 ++- server/events/project_command_builder.go | 38 ++-- .../project_command_builder_internal_test.go | 25 +-- server/events/project_command_builder_test.go | 14 +- server/events/project_command_runner.go | 14 +- server/events/project_command_runner_test.go | 2 - server/events/vcs/github_credentials.go | 2 + .../vcs/mocks/matchers/ptr_to_http_client.go | 20 +++ .../vcs/mocks/mock_github_credentials.go | 167 ++++++++++++++++++ server/events/working_dir.go | 5 +- server/events/working_dir_test.go | 33 ++-- 20 files changed, 362 insertions(+), 126 deletions(-) create mode 100644 server/events/vcs/mocks/matchers/ptr_to_http_client.go create mode 100644 server/events/vcs/mocks/mock_github_credentials.go diff --git a/server/events/command_context.go b/server/events/command_context.go index 5a2bcfe0b7..72e9bd2dff 100644 --- a/server/events/command_context.go +++ b/server/events/command_context.go @@ -21,8 +21,6 @@ import ( // CommandContext represents the context of a command that should be executed // for a pull request. type CommandContext struct { - // BaseRepo is the repository that the pull request will be merged into. - BaseRepo models.Repo // HeadRepo is the repository that is getting merged into the BaseRepo. // If the pull request branch is from the same repository then HeadRepo will // be the same as BaseRepo. diff --git a/server/events/command_runner.go b/server/events/command_runner.go index e0400054ed..0256bca4ee 100644 --- a/server/events/command_runner.go +++ b/server/events/command_runner.go @@ -126,7 +126,6 @@ func (c *DefaultCommandRunner) RunAutoplanCommand(baseRepo models.Repo, headRepo Log: log, Pull: pull, HeadRepo: headRepo, - BaseRepo: baseRepo, } if !c.validateCtxAndComment(ctx) { return @@ -137,7 +136,7 @@ func (c *DefaultCommandRunner) RunAutoplanCommand(baseRepo models.Repo, headRepo projectCmds, err := c.ProjectCommandBuilder.BuildAutoplanCommands(ctx) if err != nil { - if statusErr := c.CommitStatusUpdater.UpdateCombined(ctx.BaseRepo, ctx.Pull, models.FailedCommitStatus, models.PlanCommand); statusErr != nil { + if statusErr := c.CommitStatusUpdater.UpdateCombined(ctx.Pull.BaseRepo, ctx.Pull, models.FailedCommitStatus, models.PlanCommand); statusErr != nil { ctx.Log.Warn("unable to update commit status: %s", statusErr) } @@ -159,7 +158,7 @@ func (c *DefaultCommandRunner) RunAutoplanCommand(baseRepo models.Repo, headRepo } // At this point we are sure Atlantis has work to do, so set commit status to pending - if err := c.CommitStatusUpdater.UpdateCombined(ctx.BaseRepo, ctx.Pull, models.PendingCommitStatus, models.PlanCommand); err != nil { + if err := c.CommitStatusUpdater.UpdateCombined(ctx.Pull.BaseRepo, ctx.Pull, models.PendingCommitStatus, models.PlanCommand); err != nil { ctx.Log.Warn("unable to update commit status: %s", err) } @@ -246,7 +245,6 @@ func (c *DefaultCommandRunner) RunCommentCommand(baseRepo models.Repo, maybeHead Log: log, Pull: pull, HeadRepo: headRepo, - BaseRepo: baseRepo, } if !c.validateCtxAndComment(ctx) { return @@ -296,7 +294,7 @@ func (c *DefaultCommandRunner) RunCommentCommand(baseRepo models.Repo, maybeHead return } if err != nil { - if statusErr := c.CommitStatusUpdater.UpdateCombined(ctx.BaseRepo, ctx.Pull, models.FailedCommitStatus, cmd.CommandName()); statusErr != nil { + if statusErr := c.CommitStatusUpdater.UpdateCombined(ctx.Pull.BaseRepo, ctx.Pull, models.FailedCommitStatus, cmd.CommandName()); statusErr != nil { ctx.Log.Warn("unable to update commit status: %s", statusErr) } c.updatePull(ctx, cmd, CommandResult{Error: err}) @@ -366,7 +364,7 @@ func (c *DefaultCommandRunner) updateCommitStatus(ctx *CommandContext, cmd model } } - if err := c.CommitStatusUpdater.UpdateCombinedCount(ctx.BaseRepo, ctx.Pull, status, cmd, numSuccess, len(pullStatus.Projects)); err != nil { + if err := c.CommitStatusUpdater.UpdateCombinedCount(ctx.Pull.BaseRepo, ctx.Pull, status, cmd, numSuccess, len(pullStatus.Projects)); err != nil { ctx.Log.Warn("unable to update commit status: %s", err) } } @@ -381,7 +379,7 @@ func (c *DefaultCommandRunner) automerge(ctx *CommandContext, pullStatus models. } // Comment that we're automerging the pull request. - if err := c.VCSClient.CreateComment(ctx.BaseRepo, ctx.Pull.Num, automergeComment, models.ApplyCommand.String()); err != nil { + if err := c.VCSClient.CreateComment(ctx.Pull.BaseRepo, ctx.Pull.Num, automergeComment, models.ApplyCommand.String()); err != nil { ctx.Log.Err("failed to comment about automerge: %s", err) // Commenting isn't required so continue. } @@ -394,7 +392,7 @@ func (c *DefaultCommandRunner) automerge(ctx *CommandContext, pullStatus models. ctx.Log.Err("automerging failed: %s", err) failureComment := fmt.Sprintf("Automerging failed:\n```\n%s\n```", err) - if commentErr := c.VCSClient.CreateComment(ctx.BaseRepo, ctx.Pull.Num, failureComment, models.ApplyCommand.String()); commentErr != nil { + if commentErr := c.VCSClient.CreateComment(ctx.Pull.BaseRepo, ctx.Pull.Num, failureComment, models.ApplyCommand.String()); commentErr != nil { ctx.Log.Err("failed to comment about automerge failing: %s", err) } } @@ -498,12 +496,12 @@ func (c *DefaultCommandRunner) buildLogger(repoFullName string, pullNum int) *lo } func (c *DefaultCommandRunner) validateCtxAndComment(ctx *CommandContext) bool { - if !c.AllowForkPRs && ctx.HeadRepo.Owner != ctx.BaseRepo.Owner { + if !c.AllowForkPRs && ctx.HeadRepo.Owner != ctx.Pull.BaseRepo.Owner { if c.SilenceForkPRErrors { return false } ctx.Log.Info("command was run on a fork pull request which is disallowed") - if err := c.VCSClient.CreateComment(ctx.BaseRepo, ctx.Pull.Num, fmt.Sprintf("Atlantis commands can't be run on fork pull requests. To enable, set --%s or, to disable this message, set --%s", c.AllowForkPRsFlag, c.SilenceForkPRErrorsFlag), ""); err != nil { + if err := c.VCSClient.CreateComment(ctx.Pull.BaseRepo, ctx.Pull.Num, fmt.Sprintf("Atlantis commands can't be run on fork pull requests. To enable, set --%s or, to disable this message, set --%s", c.AllowForkPRsFlag, c.SilenceForkPRErrorsFlag), ""); err != nil { ctx.Log.Err("unable to comment: %s", err) } return false @@ -511,7 +509,7 @@ func (c *DefaultCommandRunner) validateCtxAndComment(ctx *CommandContext) bool { if ctx.Pull.State != models.OpenPullState { ctx.Log.Info("command was run on closed pull request") - if err := c.VCSClient.CreateComment(ctx.BaseRepo, ctx.Pull.Num, "Atlantis commands can't be run on closed pull requests", ""); err != nil { + if err := c.VCSClient.CreateComment(ctx.Pull.BaseRepo, ctx.Pull.Num, "Atlantis commands can't be run on closed pull requests", ""); err != nil { ctx.Log.Err("unable to comment: %s", err) } return false @@ -531,13 +529,13 @@ func (c *DefaultCommandRunner) updatePull(ctx *CommandContext, command PullComma // clutter in a pull/merge request. This will not delete the comment, since the // comment trail may be useful in auditing or backtracing problems. if c.HidePrevPlanComments { - if err := c.VCSClient.HidePrevPlanComments(ctx.BaseRepo, ctx.Pull.Num); err != nil { + if err := c.VCSClient.HidePrevPlanComments(ctx.Pull.BaseRepo, ctx.Pull.Num); err != nil { ctx.Log.Err("unable to hide old comments: %s", err) } } - comment := c.MarkdownRenderer.Render(res, command.CommandName(), ctx.Log.History.String(), command.IsVerbose(), ctx.BaseRepo.VCSHost.Type) - if err := c.VCSClient.CreateComment(ctx.BaseRepo, ctx.Pull.Num, comment, command.CommandName().String()); err != nil { + comment := c.MarkdownRenderer.Render(res, command.CommandName(), ctx.Log.History.String(), command.IsVerbose(), ctx.Pull.BaseRepo.VCSHost.Type) + if err := c.VCSClient.CreateComment(ctx.Pull.BaseRepo, ctx.Pull.Num, comment, command.CommandName().String()); err != nil { ctx.Log.Err("unable to comment: %s", err) } } @@ -560,7 +558,7 @@ func (c *DefaultCommandRunner) logPanics(baseRepo models.Repo, pullNum int, logg // deletePlans deletes all plans generated in this ctx. func (c *DefaultCommandRunner) deletePlans(ctx *CommandContext) { - pullDir, err := c.WorkingDir.GetPullDir(ctx.BaseRepo, ctx.Pull) + pullDir, err := c.WorkingDir.GetPullDir(ctx.Pull.BaseRepo, ctx.Pull) if err != nil { ctx.Log.Err("getting pull dir: %s", err) } diff --git a/server/events/command_runner_test.go b/server/events/command_runner_test.go index 45d35fa7d9..80244a6236 100644 --- a/server/events/command_runner_test.go +++ b/server/events/command_runner_test.go @@ -155,7 +155,10 @@ func TestRunCommentCommand_ForkPRDisabled(t *testing.T) { ch.AllowForkPRs = false ch.SilenceForkPRErrors = false var pull github.PullRequest - modelPull := models.PullRequest{State: models.OpenPullState} + modelPull := models.PullRequest{ + BaseRepo: fixtures.GithubRepo, + State: models.OpenPullState, + } When(githubGetter.GetPullRequest(fixtures.GithubRepo, fixtures.Pull.Num)).ThenReturn(&pull, nil) headRepo := fixtures.GithubRepo @@ -174,7 +177,7 @@ func TestRunCommentCommand_ForkPRDisabled_SilenceEnabled(t *testing.T) { ch.AllowForkPRs = false // by default it's false so don't need to reset ch.SilenceForkPRErrors = true var pull github.PullRequest - modelPull := models.PullRequest{State: models.OpenPullState} + modelPull := models.PullRequest{BaseRepo: fixtures.GithubRepo, State: models.OpenPullState} When(githubGetter.GetPullRequest(fixtures.GithubRepo, fixtures.Pull.Num)).ThenReturn(&pull, nil) headRepo := fixtures.GithubRepo @@ -191,7 +194,7 @@ func TestRunCommentCommand_DisableApplyAllDisabled(t *testing.T) { " comment saying that this is not allowed") vcsClient := setup(t) ch.DisableApplyAll = true - modelPull := models.PullRequest{State: models.OpenPullState} + modelPull := models.PullRequest{BaseRepo: fixtures.GithubRepo, State: models.OpenPullState} ch.RunCommentCommand(fixtures.GithubRepo, nil, nil, fixtures.User, modelPull.Num, &events.CommentCommand{Name: models.ApplyCommand}) vcsClient.VerifyWasCalledOnce().CreateComment(fixtures.GithubRepo, modelPull.Num, "**Error:** Running `atlantis apply` without flags is disabled. You must specify which project to apply via the `-d `, `-w ` or `-p ` flags.", "apply") } @@ -219,7 +222,7 @@ func TestRunCommentCommand_ClosedPull(t *testing.T) { pull := &github.PullRequest{ State: github.String("closed"), } - modelPull := models.PullRequest{State: models.ClosedPullState} + modelPull := models.PullRequest{BaseRepo: fixtures.GithubRepo, State: models.ClosedPullState} When(githubGetter.GetPullRequest(fixtures.GithubRepo, fixtures.Pull.Num)).ThenReturn(pull, nil) When(eventParsing.ParseGithubPull(pull)).ThenReturn(modelPull, modelPull.BaseRepo, fixtures.GithubRepo, nil) @@ -235,7 +238,7 @@ func TestRunUnlockCommand_VCSComment(t *testing.T) { pull := &github.PullRequest{ State: github.String("open"), } - modelPull := models.PullRequest{State: models.OpenPullState} + modelPull := models.PullRequest{BaseRepo: fixtures.GithubRepo, State: models.OpenPullState} When(githubGetter.GetPullRequest(fixtures.GithubRepo, fixtures.Pull.Num)).ThenReturn(pull, nil) When(eventParsing.ParseGithubPull(pull)).ThenReturn(modelPull, modelPull.BaseRepo, fixtures.GithubRepo, nil) @@ -253,7 +256,7 @@ func TestRunUnlockCommandFail_VCSComment(t *testing.T) { pull := &github.PullRequest{ State: github.String("open"), } - modelPull := models.PullRequest{State: models.OpenPullState} + modelPull := models.PullRequest{BaseRepo: fixtures.GithubRepo, State: models.OpenPullState} When(githubGetter.GetPullRequest(fixtures.GithubRepo, fixtures.Pull.Num)).ThenReturn(pull, nil) When(eventParsing.ParseGithubPull(pull)).ThenReturn(modelPull, modelPull.BaseRepo, fixtures.GithubRepo, nil) When(deleteLockCommand.DeleteLocksByPull(fixtures.GithubRepo.FullName, fixtures.Pull.Num)).ThenReturn(errors.New("err")) @@ -301,6 +304,7 @@ func TestRunAutoplanCommand_DeletePlans(t *testing.T) { When(workingDir.GetPullDir(matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest())). ThenReturn(tmp, nil) + fixtures.Pull.BaseRepo = fixtures.GithubRepo ch.RunAutoplanCommand(fixtures.GithubRepo, fixtures.GithubRepo, fixtures.Pull, fixtures.User) pendingPlanFinder.VerifyWasCalledOnce().DeletePlans(tmp) } @@ -312,7 +316,7 @@ func TestApplyWithAutoMerge_VSCMerge(t *testing.T) { pull := &github.PullRequest{ State: github.String("open"), } - modelPull := models.PullRequest{State: models.OpenPullState} + modelPull := models.PullRequest{BaseRepo: fixtures.GithubRepo, State: models.OpenPullState} When(githubGetter.GetPullRequest(fixtures.GithubRepo, fixtures.Pull.Num)).ThenReturn(pull, nil) When(eventParsing.ParseGithubPull(pull)).ThenReturn(modelPull, modelPull.BaseRepo, fixtures.GithubRepo, nil) ch.GlobalAutomerge = true @@ -387,6 +391,7 @@ func TestRunAutoplanCommand_DrainOngoing(t *testing.T) { func TestRunAutoplanCommand_DrainNotOngoing(t *testing.T) { t.Log("if drain is not ongoing then remove ongoing operation must be called even if panic occured") setup(t) + fixtures.Pull.BaseRepo = fixtures.GithubRepo When(projectCommandBuilder.BuildAutoplanCommands(matchers.AnyPtrToEventsCommandContext())).ThenPanic("panic test - if you're seeing this in a test failure this isn't the failing test") ch.RunAutoplanCommand(fixtures.GithubRepo, fixtures.GithubRepo, fixtures.Pull, fixtures.User) projectCommandBuilder.VerifyWasCalledOnce().BuildAutoplanCommands(matchers.AnyPtrToEventsCommandContext()) diff --git a/server/events/github_app_working_dir.go b/server/events/github_app_working_dir.go index 2d91fc5293..9e364eb413 100644 --- a/server/events/github_app_working_dir.go +++ b/server/events/github_app_working_dir.go @@ -21,7 +21,7 @@ type GithubAppWorkingDir struct { } // Clone writes a fresh token for Github App authentication -func (g *GithubAppWorkingDir) Clone(log *logging.SimpleLogger, baseRepo models.Repo, headRepo models.Repo, p models.PullRequest, workspace string) (string, bool, error) { +func (g *GithubAppWorkingDir) Clone(log *logging.SimpleLogger, headRepo models.Repo, p models.PullRequest, workspace string) (string, bool, error) { log.Info("Refreshing git tokens for Github App") @@ -40,8 +40,16 @@ func (g *GithubAppWorkingDir) Clone(log *logging.SimpleLogger, baseRepo models.R return "", false, err } + baseRepo := &p.BaseRepo + + // Realistically, this is a super brittle way of supporting clones using gh app installation tokens + // This URL should be built during Repo creation and the struct should be immutable going forward. + // Doing this requires a larger refactor however, and can probably be coupled with supporting > 1 installation authURL := fmt.Sprintf("://x-access-token:%s", token) baseRepo.CloneURL = strings.Replace(baseRepo.CloneURL, "://:", authURL, 1) + baseRepo.SanitizedCloneURL = strings.Replace(baseRepo.SanitizedCloneURL, "://:", "://x-access-token:", 1) headRepo.CloneURL = strings.Replace(headRepo.CloneURL, "://:", authURL, 1) - return g.WorkingDir.Clone(log, baseRepo, headRepo, p, workspace) + headRepo.SanitizedCloneURL = strings.Replace(baseRepo.SanitizedCloneURL, "://:", "://x-access-token:", 1) + + return g.WorkingDir.Clone(log, headRepo, p, workspace) } diff --git a/server/events/github_app_working_dir_test.go b/server/events/github_app_working_dir_test.go index b229d0200b..fef570c51f 100644 --- a/server/events/github_app_working_dir_test.go +++ b/server/events/github_app_working_dir_test.go @@ -5,10 +5,13 @@ import ( "testing" "github.com/runatlantis/atlantis/server/events" + eventMocks "github.com/runatlantis/atlantis/server/events/mocks" "github.com/runatlantis/atlantis/server/events/models" "github.com/runatlantis/atlantis/server/events/vcs" + vcsMocks "github.com/runatlantis/atlantis/server/events/vcs/mocks" "github.com/runatlantis/atlantis/server/events/vcs/fixtures" . "github.com/runatlantis/atlantis/testing" + . "github.com/petergtz/pegomock" ) // Test that if we don't have any existing files, we check out the repo with a github app. @@ -46,7 +49,8 @@ func TestClone_GithubAppNoneExisting(t *testing.T) { GithubHostname: testServer, } - cloneDir, _, err := gwd.Clone(nil, models.Repo{}, models.Repo{}, models.PullRequest{ + cloneDir, _, err := gwd.Clone(nil, models.Repo{}, models.PullRequest{ + BaseRepo: models.Repo{}, HeadBranch: "branch", }, "default") Ok(t, err) @@ -55,3 +59,40 @@ func TestClone_GithubAppNoneExisting(t *testing.T) { actCommit := runCmd(t, cloneDir, "git", "rev-parse", "HEAD") Equals(t, expCommit, actCommit) } + +func TestClone_GithubAppSetsCorrectUrl(t *testing.T) { + workingDir := eventMocks.NewMockWorkingDir() + + credentials := vcsMocks.NewMockGithubCredentials() + + ghAppWorkingDir := events.GithubAppWorkingDir{ + WorkingDir: workingDir, + Credentials: credentials, + GithubHostname: "some-host", + } + + baseRepo, _ := models.NewRepo( + models.Github, + "runatlantis/atlantis", + "https://github.com/runatlantis/atlantis.git", + + // user and token have to be blank otherwise this proxy wouldn't be invoked to begin with + "", + "", + ) + + headRepo := baseRepo + + modifiedBaseRepo := baseRepo + modifiedBaseRepo.CloneURL = "https://x-access-token:token@github.com/runatlantis/atlantis.git" + modifiedBaseRepo.SanitizedCloneURL = "https://x-access-token:@github.com/runatlantis/atlantis.git" + + When(credentials.GetToken()).ThenReturn("token", nil) + When(workingDir.Clone(nil, modifiedBaseRepo, models.PullRequest{BaseRepo: modifiedBaseRepo}, "default")).ThenReturn( + "", true, nil, + ) + + _, success, _ := ghAppWorkingDir.Clone(nil, headRepo, models.PullRequest{BaseRepo: baseRepo}, "default") + + Assert(t, success == true, "clone url mutation error") +} diff --git a/server/events/matchers/models_pullrequest.go b/server/events/matchers/models_pullrequest.go index 37e4780130..dd1fb0d4ee 100644 --- a/server/events/matchers/models_pullrequest.go +++ b/server/events/matchers/models_pullrequest.go @@ -3,7 +3,6 @@ package matchers import ( "reflect" - "github.com/petergtz/pegomock" models "github.com/runatlantis/atlantis/server/events/models" ) diff --git a/server/events/matchers/models_repo.go b/server/events/matchers/models_repo.go index e985fd3a90..418f13cfcf 100644 --- a/server/events/matchers/models_repo.go +++ b/server/events/matchers/models_repo.go @@ -3,7 +3,6 @@ package matchers import ( "reflect" - "github.com/petergtz/pegomock" models "github.com/runatlantis/atlantis/server/events/models" ) diff --git a/server/events/matchers/ptr_to_logging_simplelogger.go b/server/events/matchers/ptr_to_logging_simplelogger.go index 095fa65a72..04c72791bc 100644 --- a/server/events/matchers/ptr_to_logging_simplelogger.go +++ b/server/events/matchers/ptr_to_logging_simplelogger.go @@ -3,7 +3,6 @@ package matchers import ( "reflect" - "github.com/petergtz/pegomock" logging "github.com/runatlantis/atlantis/server/logging" ) diff --git a/server/events/mock_workingdir_test.go b/server/events/mock_workingdir_test.go index db1288f599..a0b5cc51af 100644 --- a/server/events/mock_workingdir_test.go +++ b/server/events/mock_workingdir_test.go @@ -4,12 +4,11 @@ package events import ( - "reflect" - "time" - pegomock "github.com/petergtz/pegomock" models "github.com/runatlantis/atlantis/server/events/models" logging "github.com/runatlantis/atlantis/server/logging" + "reflect" + "time" ) type MockWorkingDir struct { @@ -27,11 +26,11 @@ func NewMockWorkingDir(options ...pegomock.Option) *MockWorkingDir { func (mock *MockWorkingDir) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } func (mock *MockWorkingDir) FailHandler() pegomock.FailHandler { return mock.fail } -func (mock *MockWorkingDir) Clone(log *logging.SimpleLogger, baseRepo models.Repo, headRepo models.Repo, p models.PullRequest, workspace string) (string, bool, error) { +func (mock *MockWorkingDir) Clone(log *logging.SimpleLogger, headRepo models.Repo, p models.PullRequest, workspace string) (string, bool, error) { if mock == nil { panic("mock must not be nil. Use myMock := NewMockWorkingDir().") } - params := []pegomock.Param{log, baseRepo, headRepo, p, workspace} + params := []pegomock.Param{log, headRepo, p, workspace} result := pegomock.GetGenericMockFrom(mock).Invoke("Clone", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*bool)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) var ret0 string var ret1 bool @@ -155,8 +154,8 @@ type VerifierMockWorkingDir struct { timeout time.Duration } -func (verifier *VerifierMockWorkingDir) Clone(log *logging.SimpleLogger, baseRepo models.Repo, headRepo models.Repo, p models.PullRequest, workspace string) *MockWorkingDir_Clone_OngoingVerification { - params := []pegomock.Param{log, baseRepo, headRepo, p, workspace} +func (verifier *VerifierMockWorkingDir) Clone(log *logging.SimpleLogger, headRepo models.Repo, p models.PullRequest, workspace string) *MockWorkingDir_Clone_OngoingVerification { + params := []pegomock.Param{log, headRepo, p, workspace} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Clone", params, verifier.timeout) return &MockWorkingDir_Clone_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } @@ -166,12 +165,12 @@ type MockWorkingDir_Clone_OngoingVerification struct { methodInvocations []pegomock.MethodInvocation } -func (c *MockWorkingDir_Clone_OngoingVerification) GetCapturedArguments() (*logging.SimpleLogger, models.Repo, models.Repo, models.PullRequest, string) { - log, baseRepo, headRepo, p, workspace := c.GetAllCapturedArguments() - return log[len(log)-1], baseRepo[len(baseRepo)-1], headRepo[len(headRepo)-1], p[len(p)-1], workspace[len(workspace)-1] +func (c *MockWorkingDir_Clone_OngoingVerification) GetCapturedArguments() (*logging.SimpleLogger, models.Repo, models.PullRequest, string) { + log, headRepo, p, workspace := c.GetAllCapturedArguments() + return log[len(log)-1], headRepo[len(headRepo)-1], p[len(p)-1], workspace[len(workspace)-1] } -func (c *MockWorkingDir_Clone_OngoingVerification) GetAllCapturedArguments() (_param0 []*logging.SimpleLogger, _param1 []models.Repo, _param2 []models.Repo, _param3 []models.PullRequest, _param4 []string) { +func (c *MockWorkingDir_Clone_OngoingVerification) GetAllCapturedArguments() (_param0 []*logging.SimpleLogger, _param1 []models.Repo, _param2 []models.PullRequest, _param3 []string) { params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) if len(params) > 0 { _param0 = make([]*logging.SimpleLogger, len(c.methodInvocations)) @@ -182,17 +181,13 @@ func (c *MockWorkingDir_Clone_OngoingVerification) GetAllCapturedArguments() (_p for u, param := range params[1] { _param1[u] = param.(models.Repo) } - _param2 = make([]models.Repo, len(c.methodInvocations)) + _param2 = make([]models.PullRequest, len(c.methodInvocations)) for u, param := range params[2] { - _param2[u] = param.(models.Repo) + _param2[u] = param.(models.PullRequest) } - _param3 = make([]models.PullRequest, len(c.methodInvocations)) + _param3 = make([]string, len(c.methodInvocations)) for u, param := range params[3] { - _param3[u] = param.(models.PullRequest) - } - _param4 = make([]string, len(c.methodInvocations)) - for u, param := range params[4] { - _param4[u] = param.(string) + _param3[u] = param.(string) } } return diff --git a/server/events/mocks/mock_working_dir.go b/server/events/mocks/mock_working_dir.go index 778bdc7cdd..4ad6467fdf 100644 --- a/server/events/mocks/mock_working_dir.go +++ b/server/events/mocks/mock_working_dir.go @@ -26,11 +26,11 @@ func NewMockWorkingDir(options ...pegomock.Option) *MockWorkingDir { func (mock *MockWorkingDir) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } func (mock *MockWorkingDir) FailHandler() pegomock.FailHandler { return mock.fail } -func (mock *MockWorkingDir) Clone(log *logging.SimpleLogger, baseRepo models.Repo, headRepo models.Repo, p models.PullRequest, workspace string) (string, bool, error) { +func (mock *MockWorkingDir) Clone(log *logging.SimpleLogger, headRepo models.Repo, p models.PullRequest, workspace string) (string, bool, error) { if mock == nil { panic("mock must not be nil. Use myMock := NewMockWorkingDir().") } - params := []pegomock.Param{log, baseRepo, headRepo, p, workspace} + params := []pegomock.Param{log, headRepo, p, workspace} result := pegomock.GetGenericMockFrom(mock).Invoke("Clone", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*bool)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) var ret0 string var ret1 bool @@ -154,8 +154,8 @@ type VerifierMockWorkingDir struct { timeout time.Duration } -func (verifier *VerifierMockWorkingDir) Clone(log *logging.SimpleLogger, baseRepo models.Repo, headRepo models.Repo, p models.PullRequest, workspace string) *MockWorkingDir_Clone_OngoingVerification { - params := []pegomock.Param{log, baseRepo, headRepo, p, workspace} +func (verifier *VerifierMockWorkingDir) Clone(log *logging.SimpleLogger, headRepo models.Repo, p models.PullRequest, workspace string) *MockWorkingDir_Clone_OngoingVerification { + params := []pegomock.Param{log, headRepo, p, workspace} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Clone", params, verifier.timeout) return &MockWorkingDir_Clone_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } @@ -165,12 +165,12 @@ type MockWorkingDir_Clone_OngoingVerification struct { methodInvocations []pegomock.MethodInvocation } -func (c *MockWorkingDir_Clone_OngoingVerification) GetCapturedArguments() (*logging.SimpleLogger, models.Repo, models.Repo, models.PullRequest, string) { - log, baseRepo, headRepo, p, workspace := c.GetAllCapturedArguments() - return log[len(log)-1], baseRepo[len(baseRepo)-1], headRepo[len(headRepo)-1], p[len(p)-1], workspace[len(workspace)-1] +func (c *MockWorkingDir_Clone_OngoingVerification) GetCapturedArguments() (*logging.SimpleLogger, models.Repo, models.PullRequest, string) { + log, headRepo, p, workspace := c.GetAllCapturedArguments() + return log[len(log)-1], headRepo[len(headRepo)-1], p[len(p)-1], workspace[len(workspace)-1] } -func (c *MockWorkingDir_Clone_OngoingVerification) GetAllCapturedArguments() (_param0 []*logging.SimpleLogger, _param1 []models.Repo, _param2 []models.Repo, _param3 []models.PullRequest, _param4 []string) { +func (c *MockWorkingDir_Clone_OngoingVerification) GetAllCapturedArguments() (_param0 []*logging.SimpleLogger, _param1 []models.Repo, _param2 []models.PullRequest, _param3 []string) { params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) if len(params) > 0 { _param0 = make([]*logging.SimpleLogger, len(c.methodInvocations)) @@ -181,17 +181,13 @@ func (c *MockWorkingDir_Clone_OngoingVerification) GetAllCapturedArguments() (_p for u, param := range params[1] { _param1[u] = param.(models.Repo) } - _param2 = make([]models.Repo, len(c.methodInvocations)) + _param2 = make([]models.PullRequest, len(c.methodInvocations)) for u, param := range params[2] { - _param2[u] = param.(models.Repo) + _param2[u] = param.(models.PullRequest) } - _param3 = make([]models.PullRequest, len(c.methodInvocations)) + _param3 = make([]string, len(c.methodInvocations)) for u, param := range params[3] { - _param3[u] = param.(models.PullRequest) - } - _param4 = make([]string, len(c.methodInvocations)) - for u, param := range params[4] { - _param4[u] = param.(string) + _param3[u] = param.(string) } } return diff --git a/server/events/project_command_builder.go b/server/events/project_command_builder.go index 798df7f38e..391987fbda 100644 --- a/server/events/project_command_builder.go +++ b/server/events/project_command_builder.go @@ -103,20 +103,20 @@ func (p *DefaultProjectCommandBuilder) BuildApplyCommands(ctx *CommandContext, c // modified in this ctx. func (p *DefaultProjectCommandBuilder) buildPlanAllCommands(ctx *CommandContext, commentFlags []string, verbose bool) ([]models.ProjectCommandContext, error) { // We'll need the list of modified files. - modifiedFiles, err := p.VCSClient.GetModifiedFiles(ctx.BaseRepo, ctx.Pull) + modifiedFiles, err := p.VCSClient.GetModifiedFiles(ctx.Pull.BaseRepo, ctx.Pull) if err != nil { return nil, err } ctx.Log.Debug("%d files were modified in this pull request", len(modifiedFiles)) - if p.SkipCloneNoChanges && p.VCSClient.SupportsSingleFileDownload(ctx.BaseRepo) { + if p.SkipCloneNoChanges && p.VCSClient.SupportsSingleFileDownload(ctx.Pull.BaseRepo) { hasRepoCfg, repoCfgData, err := p.VCSClient.DownloadRepoConfigFile(ctx.Pull) if err != nil { return nil, errors.Wrapf(err, "downloading %s", yaml.AtlantisYAMLFilename) } if hasRepoCfg { - repoCfg, err := p.ParserValidator.ParseRepoCfgData(repoCfgData, p.GlobalCfg, ctx.BaseRepo.ID()) + repoCfg, err := p.ParserValidator.ParseRepoCfgData(repoCfgData, p.GlobalCfg, ctx.Pull.BaseRepo.ID()) if err != nil { return nil, errors.Wrapf(err, "parsing %s", yaml.AtlantisYAMLFilename) } @@ -139,7 +139,7 @@ func (p *DefaultProjectCommandBuilder) buildPlanAllCommands(ctx *CommandContext, // Need to lock the workspace we're about to clone to. workspace := DefaultWorkspace - unlockFn, err := p.WorkingDirLocker.TryLock(ctx.BaseRepo.FullName, ctx.Pull.Num, workspace) + unlockFn, err := p.WorkingDirLocker.TryLock(ctx.Pull.BaseRepo.FullName, ctx.Pull.Num, workspace) if err != nil { ctx.Log.Warn("workspace was locked") return nil, err @@ -147,7 +147,7 @@ func (p *DefaultProjectCommandBuilder) buildPlanAllCommands(ctx *CommandContext, ctx.Log.Debug("got workspace lock") defer unlockFn() - repoDir, _, err := p.WorkingDir.Clone(ctx.Log, ctx.BaseRepo, ctx.HeadRepo, ctx.Pull, workspace) + repoDir, _, err := p.WorkingDir.Clone(ctx.Log, ctx.HeadRepo, ctx.Pull, workspace) if err != nil { return nil, err } @@ -161,7 +161,7 @@ func (p *DefaultProjectCommandBuilder) buildPlanAllCommands(ctx *CommandContext, if hasRepoCfg { // If there's a repo cfg then we'll use it to figure out which projects // should be planed. - repoCfg, err := p.ParserValidator.ParseRepoCfg(repoDir, p.GlobalCfg, ctx.BaseRepo.ID()) + repoCfg, err := p.ParserValidator.ParseRepoCfg(repoDir, p.GlobalCfg, ctx.Pull.BaseRepo.ID()) if err != nil { return nil, errors.Wrapf(err, "parsing %s", yaml.AtlantisYAMLFilename) } @@ -173,18 +173,18 @@ func (p *DefaultProjectCommandBuilder) buildPlanAllCommands(ctx *CommandContext, ctx.Log.Info("%d projects are to be planned based on their when_modified config", len(matchingProjects)) for _, mp := range matchingProjects { ctx.Log.Debug("determining config for project at dir: %q workspace: %q", mp.Dir, mp.Workspace) - mergedCfg := p.GlobalCfg.MergeProjectCfg(ctx.Log, ctx.BaseRepo.ID(), mp, repoCfg) + mergedCfg := p.GlobalCfg.MergeProjectCfg(ctx.Log, ctx.Pull.BaseRepo.ID(), mp, repoCfg) projCtxs = append(projCtxs, p.buildCtx(ctx, models.PlanCommand, mergedCfg, commentFlags, repoCfg.Automerge, repoCfg.ParallelApply, repoCfg.ParallelPlan, verbose, repoDir)) } } else { // If there is no config file, then we'll plan each project that // our algorithm determines was modified. ctx.Log.Info("found no %s file", yaml.AtlantisYAMLFilename) - modifiedProjects := p.ProjectFinder.DetermineProjects(ctx.Log, modifiedFiles, ctx.BaseRepo.FullName, repoDir) + modifiedProjects := p.ProjectFinder.DetermineProjects(ctx.Log, modifiedFiles, ctx.Pull.BaseRepo.FullName, repoDir) ctx.Log.Info("automatically determined that there were %d projects modified in this pull request: %s", len(modifiedProjects), modifiedProjects) for _, mp := range modifiedProjects { ctx.Log.Debug("determining config for project at dir: %q", mp.Path) - pCfg := p.GlobalCfg.DefaultProjCfg(ctx.Log, ctx.BaseRepo.ID(), mp.Path, DefaultWorkspace) + pCfg := p.GlobalCfg.DefaultProjCfg(ctx.Log, ctx.Pull.BaseRepo.ID(), mp.Path, DefaultWorkspace) projCtxs = append(projCtxs, p.buildCtx(ctx, models.PlanCommand, pCfg, commentFlags, DefaultAutomergeEnabled, DefaultParallelApplyEnabled, DefaultParallelPlanEnabled, verbose, repoDir)) } } @@ -202,14 +202,14 @@ func (p *DefaultProjectCommandBuilder) buildProjectPlanCommand(ctx *CommandConte var pcc models.ProjectCommandContext ctx.Log.Debug("building plan command") - unlockFn, err := p.WorkingDirLocker.TryLock(ctx.BaseRepo.FullName, ctx.Pull.Num, workspace) + unlockFn, err := p.WorkingDirLocker.TryLock(ctx.Pull.BaseRepo.FullName, ctx.Pull.Num, workspace) if err != nil { return pcc, err } defer unlockFn() ctx.Log.Debug("cloning repository") - repoDir, _, err := p.WorkingDir.Clone(ctx.Log, ctx.BaseRepo, ctx.HeadRepo, ctx.Pull, workspace) + repoDir, _, err := p.WorkingDir.Clone(ctx.Log, ctx.HeadRepo, ctx.Pull, workspace) if err != nil { return pcc, err } @@ -227,13 +227,13 @@ func (p *DefaultProjectCommandBuilder) buildProjectPlanCommand(ctx *CommandConte func (p *DefaultProjectCommandBuilder) buildApplyAllCommands(ctx *CommandContext, commentCmd *CommentCommand) ([]models.ProjectCommandContext, error) { // Lock all dirs in this pull request (instead of a single dir) because we // don't know how many dirs we'll need to apply in. - unlockFn, err := p.WorkingDirLocker.TryLockPull(ctx.BaseRepo.FullName, ctx.Pull.Num) + unlockFn, err := p.WorkingDirLocker.TryLockPull(ctx.Pull.BaseRepo.FullName, ctx.Pull.Num) if err != nil { return nil, err } defer unlockFn() - pullDir, err := p.WorkingDir.GetPullDir(ctx.BaseRepo, ctx.Pull) + pullDir, err := p.WorkingDir.GetPullDir(ctx.Pull.BaseRepo, ctx.Pull) if err != nil { return nil, err } @@ -263,13 +263,13 @@ func (p *DefaultProjectCommandBuilder) buildProjectApplyCommand(ctx *CommandCont } var projCtx models.ProjectCommandContext - unlockFn, err := p.WorkingDirLocker.TryLock(ctx.BaseRepo.FullName, ctx.Pull.Num, workspace) + unlockFn, err := p.WorkingDirLocker.TryLock(ctx.Pull.BaseRepo.FullName, ctx.Pull.Num, workspace) if err != nil { return projCtx, err } defer unlockFn() - repoDir, err := p.WorkingDir.GetWorkingDir(ctx.BaseRepo, ctx.Pull, workspace) + repoDir, err := p.WorkingDir.GetWorkingDir(ctx.Pull.BaseRepo, ctx.Pull, workspace) if os.IsNotExist(errors.Cause(err)) { return projCtx, errors.New("no working directory found–did you run plan?") } else if err != nil { @@ -308,9 +308,9 @@ func (p *DefaultProjectCommandBuilder) buildProjectCommandCtx( // with both project name and dir/workspace. repoRelDir = projCfg.RepoRelDir workspace = projCfg.Workspace - projCfg = p.GlobalCfg.MergeProjectCfg(ctx.Log, ctx.BaseRepo.ID(), *projCfgPtr, *repoCfgPtr) + projCfg = p.GlobalCfg.MergeProjectCfg(ctx.Log, ctx.Pull.BaseRepo.ID(), *projCfgPtr, *repoCfgPtr) } else { - projCfg = p.GlobalCfg.DefaultProjCfg(ctx.Log, ctx.BaseRepo.ID(), repoRelDir, workspace) + projCfg = p.GlobalCfg.DefaultProjCfg(ctx.Log, ctx.Pull.BaseRepo.ID(), repoRelDir, workspace) } if err := p.validateWorkspaceAllowed(repoCfgPtr, repoRelDir, workspace); err != nil { @@ -345,7 +345,7 @@ func (p *DefaultProjectCommandBuilder) getCfg(ctx *CommandContext, projectName s } var repoConfig valid.RepoCfg - repoConfig, err = p.ParserValidator.ParseRepoCfg(repoDir, p.GlobalCfg, ctx.BaseRepo.ID()) + repoConfig, err = p.ParserValidator.ParseRepoCfg(repoDir, p.GlobalCfg, ctx.Pull.BaseRepo.ID()) if err != nil { return } @@ -435,7 +435,7 @@ func (p *DefaultProjectCommandBuilder) buildCtx(ctx *CommandContext, return models.ProjectCommandContext{ ApplyCmd: p.CommentBuilder.BuildApplyComment(projCfg.RepoRelDir, projCfg.Workspace, projCfg.Name), - BaseRepo: ctx.BaseRepo, + BaseRepo: ctx.Pull.BaseRepo, EscapedCommentArgs: p.escapeArgs(commentArgs), AutomergeEnabled: automergeEnabled, ParallelApplyEnabled: parallelApplyEnabled, diff --git a/server/events/project_command_builder_internal_test.go b/server/events/project_command_builder_internal_test.go index 9adfb64606..2ccbec4517 100644 --- a/server/events/project_command_builder_internal_test.go +++ b/server/events/project_command_builder_internal_test.go @@ -23,6 +23,9 @@ func TestBuildProjectCmdCtx(t *testing.T) { Hostname: "github.com", }, } + pull := models.PullRequest{ + BaseRepo: baseRepo, + } cases := map[string]struct { globalCfg string repoCfg string @@ -57,7 +60,7 @@ workflows: HeadRepo: models.Repo{}, Log: nil, PullMergeable: true, - Pull: models.PullRequest{}, + Pull: pull, ProjectName: "", ApplyRequirements: []string{}, RePlanCmd: "atlantis plan -d project1 -w myworkspace -- flag", @@ -106,7 +109,7 @@ projects: HeadRepo: models.Repo{}, Log: nil, PullMergeable: true, - Pull: models.PullRequest{}, + Pull: pull, ProjectName: "", ApplyRequirements: []string{}, RepoConfigVersion: 3, @@ -157,7 +160,7 @@ projects: HeadRepo: models.Repo{}, Log: nil, PullMergeable: true, - Pull: models.PullRequest{}, + Pull: pull, ProjectName: "", ApplyRequirements: []string{"approved", "mergeable"}, RepoConfigVersion: 3, @@ -216,7 +219,7 @@ projects: HeadRepo: models.Repo{}, Log: nil, PullMergeable: true, - Pull: models.PullRequest{}, + Pull: pull, ProjectName: "", ApplyRequirements: []string{"approved"}, RepoConfigVersion: 3, @@ -362,7 +365,7 @@ workflows: HeadRepo: models.Repo{}, Log: nil, PullMergeable: true, - Pull: models.PullRequest{}, + Pull: pull, ProjectName: "", ApplyRequirements: []string{}, RepoConfigVersion: 3, @@ -417,7 +420,7 @@ projects: HeadRepo: models.Repo{}, Log: nil, PullMergeable: true, - Pull: models.PullRequest{}, + Pull: pull, ProjectName: "", ApplyRequirements: []string{}, RepoConfigVersion: 3, @@ -475,7 +478,7 @@ workflows: HeadRepo: models.Repo{}, Log: nil, PullMergeable: true, - Pull: models.PullRequest{}, + Pull: pull, ProjectName: "", ApplyRequirements: []string{}, RepoConfigVersion: 3, @@ -517,7 +520,7 @@ projects: HeadRepo: models.Repo{}, Log: nil, PullMergeable: true, - Pull: models.PullRequest{}, + Pull: pull, ProjectName: "", ApplyRequirements: []string{"approved"}, RepoConfigVersion: 3, @@ -547,7 +550,7 @@ projects: defer cleanup() workingDir := NewMockWorkingDir() - When(workingDir.Clone(matchers.AnyPtrToLoggingSimpleLogger(), matchers.AnyModelsRepo(), matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest(), AnyString())).ThenReturn(tmp, false, nil) + When(workingDir.Clone(matchers.AnyPtrToLoggingSimpleLogger(), matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest(), AnyString())).ThenReturn(tmp, false, nil) vcsClient := vcsmocks.NewMockClient() When(vcsClient.GetModifiedFiles(matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest())).ThenReturn([]string{"modules/module/main.tf"}, nil) @@ -578,7 +581,9 @@ projects: for _, cmd := range []models.CommandName{models.PlanCommand, models.ApplyCommand} { t.Run(cmd.String(), func(t *testing.T) { ctx, err := builder.buildProjectCommandCtx(&CommandContext{ - BaseRepo: baseRepo, + Pull: models.PullRequest{ + BaseRepo: baseRepo, + }, PullMergeable: true, }, cmd, "", []string{"flag"}, tmp, "project1", "myworkspace", true) diff --git a/server/events/project_command_builder_test.go b/server/events/project_command_builder_test.go index 50bbd51ca5..5cafdc8dbe 100644 --- a/server/events/project_command_builder_test.go +++ b/server/events/project_command_builder_test.go @@ -126,7 +126,7 @@ projects: defer cleanup() workingDir := mocks.NewMockWorkingDir() - When(workingDir.Clone(matchers.AnyPtrToLoggingSimpleLogger(), matchers.AnyModelsRepo(), matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest(), AnyString())).ThenReturn(tmpDir, false, nil) + When(workingDir.Clone(matchers.AnyPtrToLoggingSimpleLogger(), matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest(), AnyString())).ThenReturn(tmpDir, false, nil) vcsClient := vcsmocks.NewMockClient() When(vcsClient.GetModifiedFiles(matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest())).ThenReturn([]string{"main.tf"}, nil) if c.AtlantisYAML != "" { @@ -349,7 +349,7 @@ projects: defer cleanup() workingDir := mocks.NewMockWorkingDir() - When(workingDir.Clone(matchers.AnyPtrToLoggingSimpleLogger(), matchers.AnyModelsRepo(), matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest(), AnyString())).ThenReturn(tmpDir, false, nil) + When(workingDir.Clone(matchers.AnyPtrToLoggingSimpleLogger(), matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest(), AnyString())).ThenReturn(tmpDir, false, nil) When(workingDir.GetWorkingDir(matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest(), AnyString())).ThenReturn(tmpDir, nil) vcsClient := vcsmocks.NewMockClient() When(vcsClient.GetModifiedFiles(matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest())).ThenReturn([]string{"main.tf"}, nil) @@ -483,7 +483,7 @@ projects: defer cleanup() workingDir := mocks.NewMockWorkingDir() - When(workingDir.Clone(matchers.AnyPtrToLoggingSimpleLogger(), matchers.AnyModelsRepo(), matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest(), AnyString())).ThenReturn(tmpDir, false, nil) + When(workingDir.Clone(matchers.AnyPtrToLoggingSimpleLogger(), matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest(), AnyString())).ThenReturn(tmpDir, false, nil) When(workingDir.GetWorkingDir(matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest(), AnyString())).ThenReturn(tmpDir, nil) vcsClient := vcsmocks.NewMockClient() When(vcsClient.GetModifiedFiles(matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest())).ThenReturn(c.ModifiedFiles, nil) @@ -625,7 +625,6 @@ projects: When(workingDir.Clone( matchers.AnyPtrToLoggingSimpleLogger(), matchers.AnyModelsRepo(), - matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest(), AnyString())).ThenReturn(repoDir, false, nil) When(workingDir.GetWorkingDir( @@ -645,7 +644,6 @@ projects: } ctx := &events.CommandContext{ - BaseRepo: models.Repo{}, HeadRepo: models.Repo{}, Pull: models.PullRequest{}, User: models.User{}, @@ -691,7 +689,7 @@ func TestDefaultProjectCommandBuilder_EscapeArgs(t *testing.T) { defer cleanup() workingDir := mocks.NewMockWorkingDir() - When(workingDir.Clone(matchers.AnyPtrToLoggingSimpleLogger(), matchers.AnyModelsRepo(), matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest(), AnyString())).ThenReturn(tmpDir, false, nil) + When(workingDir.Clone(matchers.AnyPtrToLoggingSimpleLogger(), matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest(), AnyString())).ThenReturn(tmpDir, false, nil) When(workingDir.GetWorkingDir(matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest(), AnyString())).ThenReturn(tmpDir, nil) vcsClient := vcsmocks.NewMockClient() When(vcsClient.GetModifiedFiles(matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest())).ThenReturn([]string{"main.tf"}, nil) @@ -852,7 +850,6 @@ projects: When(workingDir.Clone( matchers.AnyPtrToLoggingSimpleLogger(), matchers.AnyModelsRepo(), - matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest(), AnyString())).ThenReturn(tmpDir, false, nil) @@ -923,7 +920,6 @@ projects: var actCtxs []models.ProjectCommandContext var err error actCtxs, err = builder.BuildAutoplanCommands(&events.CommandContext{ - BaseRepo: models.Repo{}, HeadRepo: models.Repo{}, Pull: models.PullRequest{}, User: models.User{}, @@ -932,5 +928,5 @@ projects: }) Ok(t, err) Equals(t, 0, len(actCtxs)) - workingDir.VerifyWasCalled(Never()).Clone(matchers.AnyPtrToLoggingSimpleLogger(), matchers.AnyModelsRepo(), matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest(), AnyString()) + workingDir.VerifyWasCalled(Never()).Clone(matchers.AnyPtrToLoggingSimpleLogger(), matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest(), AnyString()) } diff --git a/server/events/project_command_runner.go b/server/events/project_command_runner.go index 1c84f2487c..cc3a8b0a4c 100644 --- a/server/events/project_command_runner.go +++ b/server/events/project_command_runner.go @@ -134,7 +134,7 @@ func (p *DefaultProjectCommandRunner) Apply(ctx models.ProjectCommandContext) mo func (p *DefaultProjectCommandRunner) doPlan(ctx models.ProjectCommandContext) (*models.PlanSuccess, string, error) { // Acquire Atlantis lock for this repo/dir/workspace. - lockAttempt, err := p.Locker.TryLock(ctx.Log, ctx.Pull, ctx.User, ctx.Workspace, models.NewProject(ctx.BaseRepo.FullName, ctx.RepoRelDir)) + lockAttempt, err := p.Locker.TryLock(ctx.Log, ctx.Pull, ctx.User, ctx.Workspace, models.NewProject(ctx.Pull.BaseRepo.FullName, ctx.RepoRelDir)) if err != nil { return nil, "", errors.Wrap(err, "acquiring lock") } @@ -144,14 +144,14 @@ func (p *DefaultProjectCommandRunner) doPlan(ctx models.ProjectCommandContext) ( ctx.Log.Debug("acquired lock for project") // Acquire internal lock for the directory we're going to operate in. - unlockFn, err := p.WorkingDirLocker.TryLock(ctx.BaseRepo.FullName, ctx.Pull.Num, ctx.Workspace) + unlockFn, err := p.WorkingDirLocker.TryLock(ctx.Pull.BaseRepo.FullName, ctx.Pull.Num, ctx.Workspace) if err != nil { return nil, "", err } defer unlockFn() // Clone is idempotent so okay to run even if the repo was already cloned. - repoDir, hasDiverged, cloneErr := p.WorkingDir.Clone(ctx.Log, ctx.BaseRepo, ctx.HeadRepo, ctx.Pull, ctx.Workspace) + repoDir, hasDiverged, cloneErr := p.WorkingDir.Clone(ctx.Log, ctx.HeadRepo, ctx.Pull, ctx.Workspace) if cloneErr != nil { if unlockErr := lockAttempt.UnlockFn(); unlockErr != nil { ctx.Log.Err("error unlocking state after plan error: %v", unlockErr) @@ -214,7 +214,7 @@ func (p *DefaultProjectCommandRunner) runSteps(steps []valid.Step, ctx models.Pr } func (p *DefaultProjectCommandRunner) doApply(ctx models.ProjectCommandContext) (applyOut string, failure string, err error) { - repoDir, err := p.WorkingDir.GetWorkingDir(ctx.BaseRepo, ctx.Pull, ctx.Workspace) + repoDir, err := p.WorkingDir.GetWorkingDir(ctx.Pull.BaseRepo, ctx.Pull, ctx.Workspace) if err != nil { if os.IsNotExist(err) { return "", "", errors.New("project has not been cloned–did you run plan?") @@ -229,7 +229,7 @@ func (p *DefaultProjectCommandRunner) doApply(ctx models.ProjectCommandContext) for _, req := range ctx.ApplyRequirements { switch req { case raw.ApprovedApplyRequirement: - approved, err := p.PullApprovedChecker.PullIsApproved(ctx.BaseRepo, ctx.Pull) // nolint: vetshadow + approved, err := p.PullApprovedChecker.PullIsApproved(ctx.Pull.BaseRepo, ctx.Pull) // nolint: vetshadow if err != nil { return "", "", errors.Wrap(err, "checking if pull request was approved") } @@ -243,7 +243,7 @@ func (p *DefaultProjectCommandRunner) doApply(ctx models.ProjectCommandContext) } } // Acquire internal lock for the directory we're going to operate in. - unlockFn, err := p.WorkingDirLocker.TryLock(ctx.BaseRepo.FullName, ctx.Pull.Num, ctx.Workspace) + unlockFn, err := p.WorkingDirLocker.TryLock(ctx.Pull.BaseRepo.FullName, ctx.Pull.Num, ctx.Workspace) if err != nil { return "", "", err } @@ -253,7 +253,7 @@ func (p *DefaultProjectCommandRunner) doApply(ctx models.ProjectCommandContext) p.Webhooks.Send(ctx.Log, webhooks.ApplyResult{ // nolint: errcheck Workspace: ctx.Workspace, User: ctx.User, - Repo: ctx.BaseRepo, + Repo: ctx.Pull.BaseRepo, Pull: ctx.Pull, Success: err == nil, Directory: ctx.RepoRelDir, diff --git a/server/events/project_command_runner_test.go b/server/events/project_command_runner_test.go index 809a7b553c..b6739d82d6 100644 --- a/server/events/project_command_runner_test.go +++ b/server/events/project_command_runner_test.go @@ -61,7 +61,6 @@ func TestDefaultProjectCommandRunner_Plan(t *testing.T) { When(mockWorkingDir.Clone( matchers.AnyPtrToLoggingSimpleLogger(), matchers.AnyModelsRepo(), - matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest(), AnyString(), )).ThenReturn(repoDir, false, nil) @@ -373,7 +372,6 @@ func TestDefaultProjectCommandRunner_RunEnvSteps(t *testing.T) { When(mockWorkingDir.Clone( matchers.AnyPtrToLoggingSimpleLogger(), matchers.AnyModelsRepo(), - matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest(), AnyString(), )).ThenReturn(repoDir, false, nil) diff --git a/server/events/vcs/github_credentials.go b/server/events/vcs/github_credentials.go index 802097c60e..29db764e00 100644 --- a/server/events/vcs/github_credentials.go +++ b/server/events/vcs/github_credentials.go @@ -12,6 +12,8 @@ import ( "github.com/pkg/errors" ) +//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_github_credentials.go GithubCredentials + // GithubCredentials handles creating http.Clients that authenticate. type GithubCredentials interface { Client() (*http.Client, error) diff --git a/server/events/vcs/mocks/matchers/ptr_to_http_client.go b/server/events/vcs/mocks/matchers/ptr_to_http_client.go new file mode 100644 index 0000000000..4ec2550015 --- /dev/null +++ b/server/events/vcs/mocks/matchers/ptr_to_http_client.go @@ -0,0 +1,20 @@ +// Code generated by pegomock. DO NOT EDIT. +package matchers + +import ( + "reflect" + "github.com/petergtz/pegomock" + http "net/http" +) + +func AnyPtrToHttpClient() *http.Client { + pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(*http.Client))(nil)).Elem())) + var nullValue *http.Client + return nullValue +} + +func EqPtrToHttpClient(value *http.Client) *http.Client { + pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) + var nullValue *http.Client + return nullValue +} diff --git a/server/events/vcs/mocks/mock_github_credentials.go b/server/events/vcs/mocks/mock_github_credentials.go new file mode 100644 index 0000000000..2837977c48 --- /dev/null +++ b/server/events/vcs/mocks/mock_github_credentials.go @@ -0,0 +1,167 @@ +// Code generated by pegomock. DO NOT EDIT. +// Source: github.com/runatlantis/atlantis/server/events/vcs (interfaces: GithubCredentials) + +package mocks + +import ( + pegomock "github.com/petergtz/pegomock" + http "net/http" + "reflect" + "time" +) + +type MockGithubCredentials struct { + fail func(message string, callerSkip ...int) +} + +func NewMockGithubCredentials(options ...pegomock.Option) *MockGithubCredentials { + mock := &MockGithubCredentials{} + for _, option := range options { + option.Apply(mock) + } + return mock +} + +func (mock *MockGithubCredentials) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } +func (mock *MockGithubCredentials) FailHandler() pegomock.FailHandler { return mock.fail } + +func (mock *MockGithubCredentials) Client() (*http.Client, error) { + if mock == nil { + panic("mock must not be nil. Use myMock := NewMockGithubCredentials().") + } + params := []pegomock.Param{} + result := pegomock.GetGenericMockFrom(mock).Invoke("Client", params, []reflect.Type{reflect.TypeOf((**http.Client)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) + var ret0 *http.Client + var ret1 error + if len(result) != 0 { + if result[0] != nil { + ret0 = result[0].(*http.Client) + } + if result[1] != nil { + ret1 = result[1].(error) + } + } + return ret0, ret1 +} + +func (mock *MockGithubCredentials) GetToken() (string, error) { + if mock == nil { + panic("mock must not be nil. Use myMock := NewMockGithubCredentials().") + } + params := []pegomock.Param{} + result := pegomock.GetGenericMockFrom(mock).Invoke("GetToken", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) + var ret0 string + var ret1 error + if len(result) != 0 { + if result[0] != nil { + ret0 = result[0].(string) + } + if result[1] != nil { + ret1 = result[1].(error) + } + } + return ret0, ret1 +} + +func (mock *MockGithubCredentials) GetUser() string { + if mock == nil { + panic("mock must not be nil. Use myMock := NewMockGithubCredentials().") + } + params := []pegomock.Param{} + result := pegomock.GetGenericMockFrom(mock).Invoke("GetUser", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem()}) + var ret0 string + if len(result) != 0 { + if result[0] != nil { + ret0 = result[0].(string) + } + } + return ret0 +} + +func (mock *MockGithubCredentials) VerifyWasCalledOnce() *VerifierMockGithubCredentials { + return &VerifierMockGithubCredentials{ + mock: mock, + invocationCountMatcher: pegomock.Times(1), + } +} + +func (mock *MockGithubCredentials) VerifyWasCalled(invocationCountMatcher pegomock.Matcher) *VerifierMockGithubCredentials { + return &VerifierMockGithubCredentials{ + mock: mock, + invocationCountMatcher: invocationCountMatcher, + } +} + +func (mock *MockGithubCredentials) VerifyWasCalledInOrder(invocationCountMatcher pegomock.Matcher, inOrderContext *pegomock.InOrderContext) *VerifierMockGithubCredentials { + return &VerifierMockGithubCredentials{ + mock: mock, + invocationCountMatcher: invocationCountMatcher, + inOrderContext: inOrderContext, + } +} + +func (mock *MockGithubCredentials) VerifyWasCalledEventually(invocationCountMatcher pegomock.Matcher, timeout time.Duration) *VerifierMockGithubCredentials { + return &VerifierMockGithubCredentials{ + mock: mock, + invocationCountMatcher: invocationCountMatcher, + timeout: timeout, + } +} + +type VerifierMockGithubCredentials struct { + mock *MockGithubCredentials + invocationCountMatcher pegomock.Matcher + inOrderContext *pegomock.InOrderContext + timeout time.Duration +} + +func (verifier *VerifierMockGithubCredentials) Client() *MockGithubCredentials_Client_OngoingVerification { + params := []pegomock.Param{} + methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Client", params, verifier.timeout) + return &MockGithubCredentials_Client_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} +} + +type MockGithubCredentials_Client_OngoingVerification struct { + mock *MockGithubCredentials + methodInvocations []pegomock.MethodInvocation +} + +func (c *MockGithubCredentials_Client_OngoingVerification) GetCapturedArguments() { +} + +func (c *MockGithubCredentials_Client_OngoingVerification) GetAllCapturedArguments() { +} + +func (verifier *VerifierMockGithubCredentials) GetToken() *MockGithubCredentials_GetToken_OngoingVerification { + params := []pegomock.Param{} + methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "GetToken", params, verifier.timeout) + return &MockGithubCredentials_GetToken_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} +} + +type MockGithubCredentials_GetToken_OngoingVerification struct { + mock *MockGithubCredentials + methodInvocations []pegomock.MethodInvocation +} + +func (c *MockGithubCredentials_GetToken_OngoingVerification) GetCapturedArguments() { +} + +func (c *MockGithubCredentials_GetToken_OngoingVerification) GetAllCapturedArguments() { +} + +func (verifier *VerifierMockGithubCredentials) GetUser() *MockGithubCredentials_GetUser_OngoingVerification { + params := []pegomock.Param{} + methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "GetUser", params, verifier.timeout) + return &MockGithubCredentials_GetUser_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} +} + +type MockGithubCredentials_GetUser_OngoingVerification struct { + mock *MockGithubCredentials + methodInvocations []pegomock.MethodInvocation +} + +func (c *MockGithubCredentials_GetUser_OngoingVerification) GetCapturedArguments() { +} + +func (c *MockGithubCredentials_GetUser_OngoingVerification) GetAllCapturedArguments() { +} diff --git a/server/events/working_dir.go b/server/events/working_dir.go index 5811055f0b..2a09bb53af 100644 --- a/server/events/working_dir.go +++ b/server/events/working_dir.go @@ -37,7 +37,7 @@ type WorkingDir interface { // absolute path to the root of the cloned repo. It also returns // a boolean indicating if we should warn users that the branch we're // merging into has been updated since we cloned it. - Clone(log *logging.SimpleLogger, baseRepo models.Repo, headRepo models.Repo, p models.PullRequest, workspace string) (string, bool, error) + Clone(log *logging.SimpleLogger, headRepo models.Repo, p models.PullRequest, workspace string) (string, bool, error) // GetWorkingDir returns the path to the workspace for this repo and pull. // If workspace does not exist on disk, error will be of type os.IsNotExist. GetWorkingDir(r models.Repo, p models.PullRequest, workspace string) (string, error) @@ -72,11 +72,10 @@ type FileWorkspace struct { // multiple dirs of the same repo without deleting existing plans. func (w *FileWorkspace) Clone( log *logging.SimpleLogger, - baseRepo models.Repo, headRepo models.Repo, p models.PullRequest, workspace string) (string, bool, error) { - cloneDir := w.cloneDir(baseRepo, p, workspace) + cloneDir := w.cloneDir(p.BaseRepo, p, workspace) // If the directory already exists, check if it's at the right commit. // If so, then we do nothing. diff --git a/server/events/working_dir_test.go b/server/events/working_dir_test.go index c4f49e837a..c1e01f3e42 100644 --- a/server/events/working_dir_test.go +++ b/server/events/working_dir_test.go @@ -40,7 +40,8 @@ func TestClone_NoneExisting(t *testing.T) { TestingOverrideHeadCloneURL: fmt.Sprintf("file://%s", repoDir), } - cloneDir, _, err := wd.Clone(nil, models.Repo{}, models.Repo{}, models.PullRequest{ + cloneDir, _, err := wd.Clone(nil, models.Repo{}, models.PullRequest{ + BaseRepo: models.Repo{}, HeadBranch: "branch", }, "default") Ok(t, err) @@ -89,7 +90,8 @@ func TestClone_CheckoutMergeNoneExisting(t *testing.T) { TestingOverrideBaseCloneURL: overrideURL, } - cloneDir, hasDiverged, err := wd.Clone(nil, models.Repo{}, models.Repo{}, models.PullRequest{ + cloneDir, hasDiverged, err := wd.Clone(nil, models.Repo{}, models.PullRequest{ + BaseRepo: models.Repo{}, HeadBranch: "branch", BaseBranch: "master", }, "default") @@ -137,7 +139,8 @@ func TestClone_CheckoutMergeNoReclone(t *testing.T) { TestingOverrideBaseCloneURL: overrideURL, } - _, hasDiverged, err := wd.Clone(nil, models.Repo{}, models.Repo{}, models.PullRequest{ + _, hasDiverged, err := wd.Clone(nil, models.Repo{}, models.PullRequest{ + BaseRepo: models.Repo{}, HeadBranch: "branch", BaseBranch: "master", }, "default") @@ -148,7 +151,8 @@ func TestClone_CheckoutMergeNoReclone(t *testing.T) { runCmd(t, dataDir, "touch", "repos/0/default/proof") // Now run the clone again. - cloneDir, hasDiverged, err := wd.Clone(nil, models.Repo{}, models.Repo{}, models.PullRequest{ + cloneDir, hasDiverged, err := wd.Clone(nil, models.Repo{}, models.PullRequest{ + BaseRepo: models.Repo{}, HeadBranch: "branch", BaseBranch: "master", }, "default") @@ -185,7 +189,8 @@ func TestClone_CheckoutMergeNoRecloneFastForward(t *testing.T) { TestingOverrideBaseCloneURL: overrideURL, } - _, hasDiverged, err := wd.Clone(nil, models.Repo{}, models.Repo{}, models.PullRequest{ + _, hasDiverged, err := wd.Clone(nil, models.Repo{}, models.PullRequest{ + BaseRepo: models.Repo{}, HeadBranch: "branch", BaseBranch: "master", }, "default") @@ -196,7 +201,8 @@ func TestClone_CheckoutMergeNoRecloneFastForward(t *testing.T) { runCmd(t, dataDir, "touch", "repos/0/default/proof") // Now run the clone again. - cloneDir, hasDiverged, err := wd.Clone(nil, models.Repo{}, models.Repo{}, models.PullRequest{ + cloneDir, hasDiverged, err := wd.Clone(nil, models.Repo{}, models.PullRequest{ + BaseRepo: models.Repo{}, HeadBranch: "branch", BaseBranch: "master", }, "default") @@ -238,7 +244,8 @@ func TestClone_CheckoutMergeConflict(t *testing.T) { TestingOverrideBaseCloneURL: overrideURL, } - _, _, err := wd.Clone(nil, models.Repo{}, models.Repo{}, models.PullRequest{ + _, _, err := wd.Clone(nil, models.Repo{}, models.PullRequest{ + BaseRepo: models.Repo{}, HeadBranch: "branch", BaseBranch: "master", }, "default") @@ -268,7 +275,8 @@ func TestClone_NoReclone(t *testing.T) { CheckoutMerge: false, TestingOverrideHeadCloneURL: fmt.Sprintf("file://%s", repoDir), } - cloneDir, hasDiverged, err := wd.Clone(nil, models.Repo{}, models.Repo{}, models.PullRequest{ + cloneDir, hasDiverged, err := wd.Clone(nil, models.Repo{}, models.PullRequest{ + BaseRepo: models.Repo{}, HeadBranch: "branch", }, "default") Ok(t, err) @@ -303,7 +311,8 @@ func TestClone_RecloneWrongCommit(t *testing.T) { CheckoutMerge: false, TestingOverrideHeadCloneURL: fmt.Sprintf("file://%s", repoDir), } - cloneDir, hasDiverged, err := wd.Clone(nil, models.Repo{}, models.Repo{}, models.PullRequest{ + cloneDir, hasDiverged, err := wd.Clone(nil, models.Repo{}, models.PullRequest{ + BaseRepo: models.Repo{}, HeadBranch: "branch", HeadCommit: expCommit, }, "default") @@ -368,7 +377,8 @@ func TestClone_MasterHasDiverged(t *testing.T) { DataDir: repoDir, CheckoutMerge: true, } - _, hasDiverged, err := wd.Clone(nil, models.Repo{}, models.Repo{}, models.PullRequest{ + _, hasDiverged, err := wd.Clone(nil, models.Repo{}, models.PullRequest{ + BaseRepo: models.Repo{}, HeadBranch: "second-pr", BaseBranch: "master", }, "default") @@ -378,7 +388,8 @@ func TestClone_MasterHasDiverged(t *testing.T) { // Run it again but without the checkout merge strategy. It should return // false. wd.CheckoutMerge = false - _, hasDiverged, err = wd.Clone(nil, models.Repo{}, models.Repo{}, models.PullRequest{ + _, hasDiverged, err = wd.Clone(nil, models.Repo{}, models.PullRequest{ + BaseRepo: models.Repo{}, HeadBranch: "second-pr", BaseBranch: "master", }, "default") From 0a7d6441fd87816d6e697e1f9dc5d8f9c5f56b2d Mon Sep 17 00:00:00 2001 From: Nish Krishnan Date: Wed, 14 Oct 2020 17:41:37 -0700 Subject: [PATCH 2/2] lint. --- server/events/command_runner_test.go | 2 +- server/events/github_app_working_dir_test.go | 10 ++++----- server/events/working_dir_test.go | 22 ++++++++++---------- 3 files changed, 17 insertions(+), 17 deletions(-) diff --git a/server/events/command_runner_test.go b/server/events/command_runner_test.go index 80244a6236..5fe53ec711 100644 --- a/server/events/command_runner_test.go +++ b/server/events/command_runner_test.go @@ -157,7 +157,7 @@ func TestRunCommentCommand_ForkPRDisabled(t *testing.T) { var pull github.PullRequest modelPull := models.PullRequest{ BaseRepo: fixtures.GithubRepo, - State: models.OpenPullState, + State: models.OpenPullState, } When(githubGetter.GetPullRequest(fixtures.GithubRepo, fixtures.Pull.Num)).ThenReturn(&pull, nil) diff --git a/server/events/github_app_working_dir_test.go b/server/events/github_app_working_dir_test.go index fef570c51f..9dbcbd77bf 100644 --- a/server/events/github_app_working_dir_test.go +++ b/server/events/github_app_working_dir_test.go @@ -4,14 +4,14 @@ import ( "fmt" "testing" + . "github.com/petergtz/pegomock" "github.com/runatlantis/atlantis/server/events" eventMocks "github.com/runatlantis/atlantis/server/events/mocks" "github.com/runatlantis/atlantis/server/events/models" "github.com/runatlantis/atlantis/server/events/vcs" - vcsMocks "github.com/runatlantis/atlantis/server/events/vcs/mocks" "github.com/runatlantis/atlantis/server/events/vcs/fixtures" + vcsMocks "github.com/runatlantis/atlantis/server/events/vcs/mocks" . "github.com/runatlantis/atlantis/testing" - . "github.com/petergtz/pegomock" ) // Test that if we don't have any existing files, we check out the repo with a github app. @@ -50,7 +50,7 @@ func TestClone_GithubAppNoneExisting(t *testing.T) { } cloneDir, _, err := gwd.Clone(nil, models.Repo{}, models.PullRequest{ - BaseRepo: models.Repo{}, + BaseRepo: models.Repo{}, HeadBranch: "branch", }, "default") Ok(t, err) @@ -66,8 +66,8 @@ func TestClone_GithubAppSetsCorrectUrl(t *testing.T) { credentials := vcsMocks.NewMockGithubCredentials() ghAppWorkingDir := events.GithubAppWorkingDir{ - WorkingDir: workingDir, - Credentials: credentials, + WorkingDir: workingDir, + Credentials: credentials, GithubHostname: "some-host", } diff --git a/server/events/working_dir_test.go b/server/events/working_dir_test.go index c1e01f3e42..163b0919e4 100644 --- a/server/events/working_dir_test.go +++ b/server/events/working_dir_test.go @@ -41,7 +41,7 @@ func TestClone_NoneExisting(t *testing.T) { } cloneDir, _, err := wd.Clone(nil, models.Repo{}, models.PullRequest{ - BaseRepo: models.Repo{}, + BaseRepo: models.Repo{}, HeadBranch: "branch", }, "default") Ok(t, err) @@ -91,7 +91,7 @@ func TestClone_CheckoutMergeNoneExisting(t *testing.T) { } cloneDir, hasDiverged, err := wd.Clone(nil, models.Repo{}, models.PullRequest{ - BaseRepo: models.Repo{}, + BaseRepo: models.Repo{}, HeadBranch: "branch", BaseBranch: "master", }, "default") @@ -140,7 +140,7 @@ func TestClone_CheckoutMergeNoReclone(t *testing.T) { } _, hasDiverged, err := wd.Clone(nil, models.Repo{}, models.PullRequest{ - BaseRepo: models.Repo{}, + BaseRepo: models.Repo{}, HeadBranch: "branch", BaseBranch: "master", }, "default") @@ -152,7 +152,7 @@ func TestClone_CheckoutMergeNoReclone(t *testing.T) { // Now run the clone again. cloneDir, hasDiverged, err := wd.Clone(nil, models.Repo{}, models.PullRequest{ - BaseRepo: models.Repo{}, + BaseRepo: models.Repo{}, HeadBranch: "branch", BaseBranch: "master", }, "default") @@ -190,7 +190,7 @@ func TestClone_CheckoutMergeNoRecloneFastForward(t *testing.T) { } _, hasDiverged, err := wd.Clone(nil, models.Repo{}, models.PullRequest{ - BaseRepo: models.Repo{}, + BaseRepo: models.Repo{}, HeadBranch: "branch", BaseBranch: "master", }, "default") @@ -202,7 +202,7 @@ func TestClone_CheckoutMergeNoRecloneFastForward(t *testing.T) { // Now run the clone again. cloneDir, hasDiverged, err := wd.Clone(nil, models.Repo{}, models.PullRequest{ - BaseRepo: models.Repo{}, + BaseRepo: models.Repo{}, HeadBranch: "branch", BaseBranch: "master", }, "default") @@ -245,7 +245,7 @@ func TestClone_CheckoutMergeConflict(t *testing.T) { } _, _, err := wd.Clone(nil, models.Repo{}, models.PullRequest{ - BaseRepo: models.Repo{}, + BaseRepo: models.Repo{}, HeadBranch: "branch", BaseBranch: "master", }, "default") @@ -276,7 +276,7 @@ func TestClone_NoReclone(t *testing.T) { TestingOverrideHeadCloneURL: fmt.Sprintf("file://%s", repoDir), } cloneDir, hasDiverged, err := wd.Clone(nil, models.Repo{}, models.PullRequest{ - BaseRepo: models.Repo{}, + BaseRepo: models.Repo{}, HeadBranch: "branch", }, "default") Ok(t, err) @@ -312,7 +312,7 @@ func TestClone_RecloneWrongCommit(t *testing.T) { TestingOverrideHeadCloneURL: fmt.Sprintf("file://%s", repoDir), } cloneDir, hasDiverged, err := wd.Clone(nil, models.Repo{}, models.PullRequest{ - BaseRepo: models.Repo{}, + BaseRepo: models.Repo{}, HeadBranch: "branch", HeadCommit: expCommit, }, "default") @@ -378,7 +378,7 @@ func TestClone_MasterHasDiverged(t *testing.T) { CheckoutMerge: true, } _, hasDiverged, err := wd.Clone(nil, models.Repo{}, models.PullRequest{ - BaseRepo: models.Repo{}, + BaseRepo: models.Repo{}, HeadBranch: "second-pr", BaseBranch: "master", }, "default") @@ -389,7 +389,7 @@ func TestClone_MasterHasDiverged(t *testing.T) { // false. wd.CheckoutMerge = false _, hasDiverged, err = wd.Clone(nil, models.Repo{}, models.PullRequest{ - BaseRepo: models.Repo{}, + BaseRepo: models.Repo{}, HeadBranch: "second-pr", BaseBranch: "master", }, "default")