diff --git a/build/teamcity/cockroach/post-merge/publish-bleeding-edge.sh b/build/teamcity/cockroach/post-merge/publish-bleeding-edge.sh index 0adeebdfb208..8315eaf5eebf 100755 --- a/build/teamcity/cockroach/post-merge/publish-bleeding-edge.sh +++ b/build/teamcity/cockroach/post-merge/publish-bleeding-edge.sh @@ -9,8 +9,17 @@ dir="$(dirname $(dirname $(dirname $(dirname "${0}"))))" source "$dir/teamcity-support.sh" source "$dir/teamcity-bazel-support.sh" -BAZEL_SUPPORT_EXTRA_DOCKER_ARGS="-e AWS_ACCESS_KEY_ID -e AWS_SECRET_ACCESS_KEY -e TC_BUILD_BRANCH" run_bazel << 'EOF' +# s3 pushes to the "cockroach" bucket. There is no test/dev bucket fir this build type. +gcs_bucket="cockroach-edge-artifacts-prod" +# export the variable to avoid shell escaping +export gcs_credentials="$GCS_CREDENTIALS_PROD" + +BAZEL_SUPPORT_EXTRA_DOCKER_ARGS="-e AWS_ACCESS_KEY_ID -e AWS_SECRET_ACCESS_KEY -e TC_BUILDTYPE_ID -e TC_BUILD_BRANCH -e gcs_credentials -e gcs_bucket=$gcs_bucket" run_bazel << 'EOF' bazel build --config ci //pkg/cmd/publish-artifacts BAZEL_BIN=$(bazel info bazel-bin --config ci) -$BAZEL_BIN/pkg/cmd/publish-artifacts/publish-artifacts_/publish-artifacts +export google_credentials="$gcs_credentials" +source "build/teamcity-support.sh" # For log_into_gcloud +log_into_gcloud +export GOOGLE_APPLICATION_CREDENTIALS="$PWD/.google-credentials.json" +$BAZEL_BIN/pkg/cmd/publish-artifacts/publish-artifacts_/publish-artifacts --gcs-bucket="$gcs_bucket" EOF diff --git a/build/teamcity/internal/release/process/make-and-publish-build-artifacts.sh b/build/teamcity/internal/release/process/make-and-publish-build-artifacts.sh index 2bcec40c3569..29ffeed9e577 100755 --- a/build/teamcity/internal/release/process/make-and-publish-build-artifacts.sh +++ b/build/teamcity/internal/release/process/make-and-publish-build-artifacts.sh @@ -15,17 +15,23 @@ release_branch="$(echo "$build_name" | grep -Eo "^v[0-9]+\.[0-9]+" || echo"")" is_custom_build="$(echo "$TC_BUILD_BRANCH" | grep -Eo "^custombuild-" || echo "")" if [[ -z "${DRY_RUN}" ]] ; then - bucket="${BUCKET-cockroach-builds}" + bucket="cockroach-builds" + gcs_bucket="cockroach-builds-artifacts-prod" google_credentials=$GOOGLE_COCKROACH_CLOUD_IMAGES_COCKROACHDB_CREDENTIALS gcr_repository="us-docker.pkg.dev/cockroach-cloud-images/cockroachdb/cockroach" # Used for docker login for gcloud gcr_hostname="us-docker.pkg.dev" + # export the variable to avoid shell escaping + export gcs_credentials="$GCS_CREDENTIALS_PROD" else - bucket="${BUCKET:-cockroach-builds-test}" + bucket="cockroach-builds-test" + gcs_bucket="cockroach-builds-artifacts-dryrun" google_credentials="$GOOGLE_COCKROACH_RELEASE_CREDENTIALS" gcr_repository="us.gcr.io/cockroach-release/cockroach-test" build_name="${build_name}.dryrun" gcr_hostname="us.gcr.io" + # export the variable to avoid shell escaping + export gcs_credentials="$GCS_CREDENTIALS_DEV" fi cat << EOF @@ -34,6 +40,7 @@ cat << EOF release_branch: $release_branch is_custom_build: $is_custom_build bucket: $bucket + gcs_bucket: $gcs_bucket gcr_repository: $gcr_repository EOF @@ -47,10 +54,14 @@ git tag "${build_name}" tc_end_block "Tag the release" tc_start_block "Compile and publish S3 artifacts" -BAZEL_SUPPORT_EXTRA_DOCKER_ARGS="-e AWS_ACCESS_KEY_ID -e AWS_SECRET_ACCESS_KEY -e TC_BUILD_BRANCH=$build_name -e bucket=$bucket" run_bazel << 'EOF' +BAZEL_SUPPORT_EXTRA_DOCKER_ARGS="-e AWS_ACCESS_KEY_ID -e AWS_SECRET_ACCESS_KEY -e TC_BUILDTYPE_ID -e TC_BUILD_BRANCH=$build_name -e bucket=$bucket -e gcs_credentials -e gcs_bucket=$gcs_bucket" run_bazel << 'EOF' bazel build --config ci //pkg/cmd/publish-provisional-artifacts BAZEL_BIN=$(bazel info bazel-bin --config ci) -$BAZEL_BIN/pkg/cmd/publish-provisional-artifacts/publish-provisional-artifacts_/publish-provisional-artifacts -provisional -release -bucket "$bucket" +export google_credentials="$gcs_credentials" +source "build/teamcity-support.sh" # For log_into_gcloud +log_into_gcloud +export GOOGLE_APPLICATION_CREDENTIALS="$PWD/.google-credentials.json" +$BAZEL_BIN/pkg/cmd/publish-provisional-artifacts/publish-provisional-artifacts_/publish-provisional-artifacts -provisional -release -bucket "$bucket" --gcs-bucket="$gcs_bucket" EOF tc_end_block "Compile and publish S3 artifacts" diff --git a/build/teamcity/internal/release/process/publish-cockroach-release.sh b/build/teamcity/internal/release/process/publish-cockroach-release.sh index 9df13c006c45..a7c8305d2cff 100755 --- a/build/teamcity/internal/release/process/publish-cockroach-release.sh +++ b/build/teamcity/internal/release/process/publish-cockroach-release.sh @@ -24,8 +24,11 @@ fi release_branch=$(echo ${build_name} | grep -E -o '^v[0-9]+\.[0-9]+') if [[ -z "${DRY_RUN}" ]] ; then - bucket="${BUCKET:-binaries.cockroachdb.com}" + bucket="binaries.cockroachdb.com" + gcs_bucket="cockroach-release-artifacts-prod" google_credentials="$GOOGLE_COCKROACH_CLOUD_IMAGES_COCKROACHDB_CREDENTIALS" + # export the variable to avoid shell escaping + export gcs_credentials="$GCS_CREDENTIALS_PROD" if [[ -z "${PRE_RELEASE}" ]] ; then dockerhub_repository="docker.io/cockroachdb/cockroach" else @@ -37,8 +40,11 @@ if [[ -z "${DRY_RUN}" ]] ; then s3_download_hostname="${bucket}" git_repo_for_tag="cockroachdb/cockroach" else - bucket="${BUCKET:-cockroach-builds-test}" + bucket="cockroach-builds-test" + gcs_bucket="cockroach-release-artifacts-dryrun" google_credentials="$GOOGLE_COCKROACH_RELEASE_CREDENTIALS" + # export the variable to avoid shell escaping + export gcs_credentials="$GCS_CREDENTIALS_DEV" dockerhub_repository="docker.io/cockroachdb/cockroach-misc" gcr_repository="us.gcr.io/cockroach-release/cockroach-test" gcr_hostname="us.gcr.io" @@ -77,10 +83,14 @@ tc_end_block "Tag the release" tc_start_block "Make and publish release S3 artifacts" # Using publish-provisional-artifacts here is funky. We're directly publishing # the official binaries, not provisional ones. Legacy naming. To clean up... -BAZEL_SUPPORT_EXTRA_DOCKER_ARGS="-e AWS_ACCESS_KEY_ID -e AWS_SECRET_ACCESS_KEY -e TC_BUILD_BRANCH=$build_name -e bucket=$bucket" run_bazel << 'EOF' +BAZEL_SUPPORT_EXTRA_DOCKER_ARGS="-e AWS_ACCESS_KEY_ID -e AWS_SECRET_ACCESS_KEY -e TC_BUILDTYPE_ID -e TC_BUILD_BRANCH=$build_name -e bucket=$bucket -e gcs_credentials -e gcs_bucket=$gcs_bucket" run_bazel << 'EOF' bazel build --config ci //pkg/cmd/publish-provisional-artifacts BAZEL_BIN=$(bazel info bazel-bin --config ci) -$BAZEL_BIN/pkg/cmd/publish-provisional-artifacts/publish-provisional-artifacts_/publish-provisional-artifacts -provisional -release -bucket "$bucket" +export google_credentials="$gcs_credentials" +source "build/teamcity-support.sh" # For log_into_gcloud +log_into_gcloud +export GOOGLE_APPLICATION_CREDENTIALS="$PWD/.google-credentials.json" +$BAZEL_BIN/pkg/cmd/publish-provisional-artifacts/publish-provisional-artifacts_/publish-provisional-artifacts -provisional -release -bucket "$bucket" --gcs-bucket="$gcs_bucket" EOF tc_end_block "Make and publish release S3 artifacts" @@ -127,10 +137,10 @@ tc_start_block "Publish S3 binaries and archive as latest" # Only push the "latest" for our most recent release branch. # https://github.com/cockroachdb/cockroach/issues/41067 if [[ -n "${PUBLISH_LATEST}" && -z "${PRE_RELEASE}" ]]; then - BAZEL_SUPPORT_EXTRA_DOCKER_ARGS="-e AWS_ACCESS_KEY_ID -e AWS_SECRET_ACCESS_KEY -e TC_BUILD_BRANCH=$build_name -e bucket=$bucket" run_bazel << 'EOF' + BAZEL_SUPPORT_EXTRA_DOCKER_ARGS="-e AWS_ACCESS_KEY_ID -e AWS_SECRET_ACCESS_KEY -e TC_BUILDTYPE_ID -e TC_BUILD_BRANCH=$build_name -e bucket -e gcs_credentials -e gcs_bucket" run_bazel << 'EOF' bazel build --config ci //pkg/cmd/publish-provisional-artifacts BAZEL_BIN=$(bazel info bazel-bin --config ci) -$BAZEL_BIN/pkg/cmd/publish-provisional-artifacts/publish-provisional-artifacts_/publish-provisional-artifacts -bless -release -bucket "$bucket" +$BAZEL_BIN/pkg/cmd/publish-provisional-artifacts/publish-provisional-artifacts_/publish-provisional-artifacts -bless -release -bucket "$bucket" --gcs-bucket="$gcs_bucket" EOF else @@ -153,7 +163,7 @@ tc_start_block "Tag docker image as latest" # latest unstable release # https://github.com/cockroachdb/cockroach/issues/41067 # https://github.com/cockroachdb/cockroach/issues/48309 -if [[ -n "${PUBLISH_LATEST}" ]]; then +if [[ -n "${PUBLISH_LATEST}" || -n "${PRE_RELEASE}" ]]; then docker push "${dockerhub_repository}:latest" else echo "The ${dockerhub_repository}:latest docker image tag was _not_ pushed." @@ -170,7 +180,7 @@ images=( if [[ -z "$PRE_RELEASE" ]]; then images+=("${dockerhub_repository}:latest-${release_branch}") fi -if [[ -n "${PUBLISH_LATEST}" ]]; then +if [[ -n "${PUBLISH_LATEST}" || -n "${PRE_RELEASE}" ]]; then images+=("${dockerhub_repository}:latest") fi diff --git a/pkg/cmd/publish-artifacts/BUILD.bazel b/pkg/cmd/publish-artifacts/BUILD.bazel index 9f1a9704e783..7c58558628d6 100644 --- a/pkg/cmd/publish-artifacts/BUILD.bazel +++ b/pkg/cmd/publish-artifacts/BUILD.bazel @@ -7,9 +7,6 @@ go_library( visibility = ["//visibility:private"], deps = [ "//pkg/release", - "@com_github_aws_aws_sdk_go//aws", - "@com_github_aws_aws_sdk_go//aws/session", - "@com_github_aws_aws_sdk_go//service/s3", "@com_github_kr_pretty//:pretty", ], ) @@ -23,14 +20,12 @@ go_binary( go_test( name = "publish-artifacts_test", size = "small", - srcs = [ - "main_test.go", - "slow_test.go", - ], + srcs = ["main_test.go"], embed = [":publish-artifacts_lib"], deps = [ - "//pkg/testutils/skip", - "@com_github_aws_aws_sdk_go//service/s3", + "//pkg/release", + "//pkg/testutils", + "@com_github_alessio_shellescape//:shellescape", "@com_github_stretchr_testify//require", ], ) diff --git a/pkg/cmd/publish-artifacts/main.go b/pkg/cmd/publish-artifacts/main.go index 9fa45af128d1..e8cc25002659 100644 --- a/pkg/cmd/publish-artifacts/main.go +++ b/pkg/cmd/publish-artifacts/main.go @@ -18,9 +18,6 @@ import ( "os/exec" "path/filepath" - "github.com/aws/aws-sdk-go/aws" - "github.com/aws/aws-sdk-go/aws/session" - "github.com/aws/aws-sdk-go/service/s3" "github.com/cockroachdb/cockroach/pkg/release" "github.com/kr/pretty" ) @@ -31,24 +28,9 @@ const ( teamcityBuildBranchKey = "TC_BUILD_BRANCH" ) -type s3putter interface { - PutObject(*s3.PutObjectInput) (*s3.PutObjectOutput, error) -} - -// Overridden in testing. -var testableS3 = func() (s3putter, error) { - sess, err := session.NewSession(&aws.Config{ - Region: aws.String("us-east-1"), - }) - if err != nil { - return nil, err - } - return s3.New(sess), nil -} - -var destBucket = flag.String("bucket", "", "override default bucket") - func main() { + var destBucket = flag.String("bucket", "cockroach", "override default bucket") + var gcsBucket = flag.String("gcs-bucket", "", "override default bucket") flag.Parse() if _, ok := os.LookupEnv(awsAccessKeyIDKey); !ok { @@ -81,73 +63,109 @@ func main() { } versionStr := string(bytes.TrimSpace(out)) - svc, err := testableS3() + var providers []release.ObjectPutGetter + s3, err := release.NewS3("us-east-1", *destBucket) if err != nil { log.Fatalf("Creating AWS S3 session: %s", err) } - - var bucketName string - if len(*destBucket) > 0 { - bucketName = *destBucket - } else { - bucketName = "cockroach" + providers = append(providers, s3) + + if *gcsBucket != "" { + if _, ok := os.LookupEnv("GOOGLE_APPLICATION_CREDENTIALS"); !ok { + log.Fatal("GOOGLE_APPLICATION_CREDENTIALS environment variable is not set") + } + gcs, err := release.NewGCS(*gcsBucket) + if err != nil { + log.Fatalf("Creating GCS session: %s", err) + } + providers = append(providers, gcs) } - log.Printf("Using S3 bucket: %s", bucketName) - releaseVersionStrs := []string{versionStr} + run(providers, runFlags{ + pkgDir: pkg, + branch: branch, + sha: versionStr, + }, release.ExecFn{}) +} + +type runFlags struct { + branch string + sha string + pkgDir string +} +func run(providers []release.ObjectPutGetter, flags runFlags, execFn release.ExecFn) { for _, platform := range []release.Platform{release.PlatformLinux, release.PlatformMacOS, release.PlatformWindows} { var o opts o.Platform = platform - o.ReleaseVersionStrs = releaseVersionStrs - o.PkgDir = pkg - o.Branch = branch - o.VersionStr = versionStr - o.BucketName = bucketName - o.Branch = branch - o.AbsolutePath = filepath.Join(pkg, "cockroach"+release.SuffixFromPlatform(platform)) + o.ReleaseVersions = []string{flags.sha} + o.PkgDir = flags.pkgDir + o.Branch = flags.branch + o.VersionStr = flags.sha + o.AbsolutePath = filepath.Join(flags.pkgDir, "cockroach"+release.SuffixFromPlatform(platform)) + o.CockroachSQLAbsolutePath = filepath.Join(flags.pkgDir, "cockroach-sql"+release.SuffixFromPlatform(platform)) log.Printf("building %s", pretty.Sprint(o)) - - buildOneCockroach(svc, o) + buildOneCockroach(providers, o, execFn) } + // We build workload only for Linux. + var o opts + o.Platform = release.PlatformLinux + o.PkgDir = flags.pkgDir + o.Branch = flags.branch + o.VersionStr = flags.sha + buildAndPublishWorkload(providers, o, execFn) } -func buildOneCockroach(svc s3putter, o opts) { +func buildOneCockroach(providers []release.ObjectPutGetter, o opts, execFn release.ExecFn) { log.Printf("building cockroach %s", pretty.Sprint(o)) - defer func() { - log.Printf("done building cockroach: %s", pretty.Sprint(o)) - }() - - if err := release.MakeRelease(o.Platform, release.BuildOptions{}, o.PkgDir); err != nil { + if err := release.MakeRelease(o.Platform, release.BuildOptions{ExecFn: execFn}, o.PkgDir); err != nil { log.Fatal(err) } - - putNonRelease(svc, o, release.MakeCRDBLibraryNonReleaseFiles(o.PkgDir, o.Platform, o.VersionStr)...) + for _, provider := range providers { + release.PutNonRelease( + provider, + release.PutNonReleaseOptions{ + Branch: o.Branch, + Files: append( + []release.NonReleaseFile{ + release.MakeCRDBBinaryNonReleaseFile(o.AbsolutePath, o.VersionStr), + release.MakeCRDBBinaryNonReleaseFile(o.CockroachSQLAbsolutePath, o.VersionStr), + }, + release.MakeCRDBLibraryNonReleaseFiles(o.PkgDir, o.Platform, o.VersionStr)..., + ), + }, + ) + } + log.Printf("done building cockroach: %s", pretty.Sprint(o)) } -type opts struct { - VersionStr string - Branch string - ReleaseVersionStrs []string - - Platform release.Platform - - BucketName string - AbsolutePath string - PkgDir string +func buildAndPublishWorkload(providers []release.ObjectPutGetter, o opts, execFn release.ExecFn) { + log.Printf("building workload %s", pretty.Sprint(o)) + if err := release.MakeWorkload(release.BuildOptions{ExecFn: execFn}, o.PkgDir); err != nil { + log.Fatal(err) + } + o.AbsolutePath = filepath.Join(o.PkgDir, "bin", "workload") + for _, provider := range providers { + release.PutNonRelease( + provider, + release.PutNonReleaseOptions{ + Branch: o.Branch, + Files: []release.NonReleaseFile{ + release.MakeCRDBBinaryNonReleaseFile(o.AbsolutePath, o.VersionStr), + }, + }, + ) + } + log.Printf("done building workload: %s", pretty.Sprint(o)) } -func putNonRelease(svc s3putter, o opts, additionalNonReleaseFiles ...release.NonReleaseFile) { - release.PutNonRelease( - svc, - release.PutNonReleaseOptions{ - Branch: o.Branch, - BucketName: o.BucketName, - Files: append( - []release.NonReleaseFile{release.MakeCRDBBinaryNonReleaseFile(o.AbsolutePath, o.VersionStr)}, - additionalNonReleaseFiles..., - ), - }, - ) +type opts struct { + VersionStr string + Branch string + ReleaseVersions []string + Platform release.Platform + AbsolutePath string + CockroachSQLAbsolutePath string + PkgDir string } diff --git a/pkg/cmd/publish-artifacts/main_test.go b/pkg/cmd/publish-artifacts/main_test.go index 351361136d63..4eebc7557851 100644 --- a/pkg/cmd/publish-artifacts/main_test.go +++ b/pkg/cmd/publish-artifacts/main_test.go @@ -11,241 +11,246 @@ package main import ( + "fmt" + "io/ioutil" "os" - "regexp" + "os/exec" + "path/filepath" + "strings" "testing" + "unicode/utf8" - "github.com/aws/aws-sdk-go/service/s3" - "github.com/cockroachdb/cockroach/pkg/testutils/skip" + "github.com/alessio/shellescape" + "github.com/cockroachdb/cockroach/pkg/release" + "github.com/cockroachdb/cockroach/pkg/testutils" "github.com/stretchr/testify/require" ) -// Whether to run slow tests. -var slow bool +type mockStorage struct { + puts []string +} -func init() { - if err := os.Setenv("AWS_ACCESS_KEY_ID", "testing"); err != nil { - panic(err) - } - if err := os.Setenv("AWS_SECRET_ACCESS_KEY", "hunter2"); err != nil { - panic(err) - } +var _ release.ObjectPutGetter = (*mockStorage)(nil) + +func (s *mockStorage) Bucket() string { + return "cockroach" } -type recorder struct { - reqs []s3.PutObjectInput +func (s mockStorage) URL(key string) string { + return "storage://bucket/" + key } -func (r *recorder) PutObject(req *s3.PutObjectInput) (*s3.PutObjectOutput, error) { - r.reqs = append(r.reqs, *req) - return &s3.PutObjectOutput{}, nil +func (s *mockStorage) GetObject(*release.GetObjectInput) (*release.GetObjectOutput, error) { + return &release.GetObjectOutput{}, nil } -func mockPutter(p s3putter) func() { - origPutter := testableS3 - f := func() { - testableS3 = origPutter +func (s *mockStorage) PutObject(i *release.PutObjectInput) error { + url := fmt.Sprintf(`s3://%s/%s`, s.Bucket(), *i.Key) + if i.CacheControl != nil { + url += `/` + *i.CacheControl } - testableS3 = func() (s3putter, error) { - return p, nil + if i.Body != nil { + bytes, err := ioutil.ReadAll(i.Body) + if err != nil { + return err + } + if strings.HasSuffix(*i.Key, release.ChecksumSuffix) { + // Unfortunately the archive tarball checksum changes every time, + // because we generate tarballs and the copy file modification time from the generated files. + // This makes the checksum not reproducible. + s.puts = append(s.puts, fmt.Sprintf("%s CONTENTS ", url)) + } else if utf8.Valid(bytes) { + s.puts = append(s.puts, fmt.Sprintf("%s CONTENTS %s", url, bytes)) + } else { + s.puts = append(s.puts, fmt.Sprintf("%s CONTENTS ", url)) + } + } else if i.WebsiteRedirectLocation != nil { + s.puts = append(s.puts, fmt.Sprintf("%s REDIRECT %s", url, *i.WebsiteRedirectLocation)) } - return f + return nil } -// NB: the function name TestMain is special as it is expected to be a function -// which takes a *testing.M as a parameter. Somehow `go test` allows TestMain to -// also work if passed a *testing.T, in which case it acts like a normal test -// function. Bazel prohibits this, so give this test function a name which -// doesn't collide with TestMain. -func TestMainF(t *testing.T) { - if !slow { - skip.IgnoreLint(t, "only to be run manually via `./build/builder.sh go test -tags slow -timeout 1h -v ./pkg/cmd/publish-artifacts`") - } - r := &recorder{} - undo := mockPutter(r) - defer undo() - - shaPat := regexp.MustCompile(`[a-f0-9]{40}`) - const shaStub = "" +type mockExecRunner struct { + fakeBazelBin string + cmds []string +} - type testCase struct { - Bucket, ContentDisposition, Key, WebsiteRedirectLocation, CacheControl string +func (r *mockExecRunner) run(c *exec.Cmd) ([]byte, error) { + if r.fakeBazelBin == "" { + panic("r.fakeBazelBin not set") } - exp := []testCase{ - { - Bucket: "cockroach", - ContentDisposition: "attachment; filename=cockroach.darwin-amd64." + shaStub, - Key: "/cockroach/cockroach.darwin-amd64." + shaStub, - }, - { - Bucket: "cockroach", - CacheControl: "no-cache", - Key: "cockroach/cockroach.darwin-amd64.LATEST", - WebsiteRedirectLocation: "/cockroach/cockroach.darwin-amd64." + shaStub, - }, - { - Bucket: "cockroach", - ContentDisposition: "attachment; filename=libgeos.darwin-amd64." + shaStub + ".dylib", - Key: "/cockroach/lib/libgeos.darwin-amd64." + shaStub + ".dylib", - }, - { - Bucket: "cockroach", - CacheControl: "no-cache", - Key: "cockroach/lib/libgeos.darwin-amd64.dylib.LATEST", - WebsiteRedirectLocation: "/cockroach/lib/libgeos.darwin-amd64." + shaStub + ".dylib", - }, - { - Bucket: "cockroach", - ContentDisposition: "attachment; filename=libgeos_c.darwin-amd64." + shaStub + ".dylib", - Key: "/cockroach/lib/libgeos_c.darwin-amd64." + shaStub + ".dylib", - }, - { - Bucket: "cockroach", - CacheControl: "no-cache", - Key: "cockroach/lib/libgeos_c.darwin-amd64.dylib.LATEST", - WebsiteRedirectLocation: "/cockroach/lib/libgeos_c.darwin-amd64." + shaStub + ".dylib", - }, - - { - Bucket: "cockroach", - ContentDisposition: "attachment; filename=cockroach.linux-gnu-amd64." + shaStub, - Key: "/cockroach/cockroach.linux-gnu-amd64." + shaStub, - }, - { - Bucket: "cockroach", - CacheControl: "no-cache", - Key: "cockroach/cockroach.linux-gnu-amd64.LATEST", - WebsiteRedirectLocation: "/cockroach/cockroach.linux-gnu-amd64." + shaStub, - }, - { - Bucket: "cockroach", - ContentDisposition: "attachment; filename=libgeos.linux-gnu-amd64." + shaStub + ".so", - Key: "/cockroach/lib/libgeos.linux-gnu-amd64." + shaStub + ".so", - }, - { - Bucket: "cockroach", - CacheControl: "no-cache", - Key: "cockroach/lib/libgeos.linux-gnu-amd64.so.LATEST", - WebsiteRedirectLocation: "/cockroach/lib/libgeos.linux-gnu-amd64." + shaStub + ".so", - }, - { - Bucket: "cockroach", - ContentDisposition: "attachment; filename=libgeos_c.linux-gnu-amd64." + shaStub + ".so", - Key: "/cockroach/lib/libgeos_c.linux-gnu-amd64." + shaStub + ".so", - }, - { - Bucket: "cockroach", - CacheControl: "no-cache", - Key: "cockroach/lib/libgeos_c.linux-gnu-amd64.so.LATEST", - WebsiteRedirectLocation: "/cockroach/lib/libgeos_c.linux-gnu-amd64." + shaStub + ".so", - }, - - { - Bucket: "cockroach", - ContentDisposition: "attachment; filename=cockroach.race.linux-gnu-amd64." + shaStub, - Key: "/cockroach/cockroach.race.linux-gnu-amd64." + shaStub, - }, - { - Bucket: "cockroach", - CacheControl: "no-cache", - Key: "cockroach/cockroach.race.linux-gnu-amd64.LATEST", - WebsiteRedirectLocation: "/cockroach/cockroach.race.linux-gnu-amd64." + shaStub, - }, - { - Bucket: "cockroach", - ContentDisposition: "attachment; filename=libgeos.race.linux-gnu-amd64." + shaStub + ".so", - Key: "/cockroach/lib/libgeos.race.linux-gnu-amd64." + shaStub + ".so", - }, - { - Bucket: "cockroach", - CacheControl: "no-cache", - Key: "cockroach/lib/libgeos.race.linux-gnu-amd64.so.LATEST", - WebsiteRedirectLocation: "/cockroach/lib/libgeos.race.linux-gnu-amd64." + shaStub + ".so", - }, - { - Bucket: "cockroach", - ContentDisposition: "attachment; filename=libgeos_c.race.linux-gnu-amd64." + shaStub + ".so", - Key: "/cockroach/lib/libgeos_c.race.linux-gnu-amd64." + shaStub + ".so", - }, - { - Bucket: "cockroach", - CacheControl: "no-cache", - Key: "cockroach/lib/libgeos_c.race.linux-gnu-amd64.so.LATEST", - WebsiteRedirectLocation: "/cockroach/lib/libgeos_c.race.linux-gnu-amd64." + shaStub + ".so", - }, - - { - Bucket: "cockroach", - ContentDisposition: "attachment; filename=cockroach.windows-amd64." + shaStub + ".exe", - Key: "/cockroach/cockroach.windows-amd64." + shaStub + ".exe", - }, - { - Bucket: "cockroach", - CacheControl: "no-cache", - Key: "cockroach/cockroach.windows-amd64.LATEST", - WebsiteRedirectLocation: "/cockroach/cockroach.windows-amd64." + shaStub + ".exe", - }, - { - Bucket: "cockroach", - ContentDisposition: "attachment; filename=libgeos.windows-amd64." + shaStub + ".dll", - Key: "/cockroach/lib/libgeos.windows-amd64." + shaStub + ".dll", - }, - { - Bucket: "cockroach", - CacheControl: "no-cache", - Key: "cockroach/lib/libgeos.windows-amd64.dll.LATEST", - WebsiteRedirectLocation: "/cockroach/lib/libgeos.windows-amd64." + shaStub + ".dll", - }, - { - Bucket: "cockroach", - ContentDisposition: "attachment; filename=libgeos_c.windows-amd64." + shaStub + ".dll", - Key: "/cockroach/lib/libgeos_c.windows-amd64." + shaStub + ".dll", - }, - { - Bucket: "cockroach", - CacheControl: "no-cache", - Key: "cockroach/lib/libgeos_c.windows-amd64.dll.LATEST", - WebsiteRedirectLocation: "/cockroach/lib/libgeos_c.windows-amd64." + shaStub + ".dll", - }, - - { - Bucket: "cockroach", - ContentDisposition: "attachment; filename=workload." + shaStub, - Key: "/cockroach/workload." + shaStub, - }, - { - Bucket: "cockroach", - CacheControl: "no-cache", - Key: "cockroach/workload.LATEST", - WebsiteRedirectLocation: "/cockroach/workload." + shaStub, - }, + if c.Dir == `` { + return nil, fmt.Errorf("`Dir` must be specified") } + cmd := fmt.Sprintf("env=%s args=%s", c.Env, shellescape.QuoteCommand(c.Args)) + r.cmds = append(r.cmds, cmd) - if err := os.Setenv("TC_BUILD_BRANCH", "master"); err != nil { - t.Fatal(err) + var paths []string + if c.Args[0] == "bazel" && c.Args[1] == "info" && c.Args[2] == "bazel-bin" { + return []byte(r.fakeBazelBin), nil } - main() - - var acts []testCase - for _, req := range r.reqs { - var act testCase - if req.Bucket != nil { - act.Bucket = *req.Bucket - } - if req.ContentDisposition != nil { - act.ContentDisposition = shaPat.ReplaceAllLiteralString(*req.ContentDisposition, shaStub) + if c.Args[0] == "bazel" && c.Args[1] == "build" && c.Args[2] == "//pkg/cmd/workload" { + paths = append(paths, filepath.Join(r.fakeBazelBin, "pkg", "cmd", "workload", "workload_", "workload")) + } else if c.Args[0] == "bazel" && c.Args[1] == "build" { + path := filepath.Join(r.fakeBazelBin, "pkg", "cmd", "cockroach", "cockroach_", "cockroach") + pathSQL := filepath.Join(r.fakeBazelBin, "pkg", "cmd", "cockroach-sql", "cockroach-sql_", "cockroach-sql") + var platform release.Platform + for _, arg := range c.Args { + if strings.HasPrefix(arg, `--config=`) { + switch strings.TrimPrefix(arg, `--config=`) { + case "crosslinuxbase": + platform = release.PlatformLinux + case "crosslinuxarmbase": + platform = release.PlatformLinuxArm + case "crossmacosbase": + platform = release.PlatformMacOS + case "crosswindowsbase": + platform = release.PlatformWindows + path += ".exe" + pathSQL += ".exe" + case "ci", "with_ui": + default: + panic(fmt.Sprintf("Unexpected configuration %s", arg)) + } + } } - if req.Key != nil { - act.Key = shaPat.ReplaceAllLiteralString(*req.Key, shaStub) + paths = append(paths, path, pathSQL) + ext := release.SharedLibraryExtensionFromPlatform(platform) + for _, lib := range release.CRDBSharedLibraries { + libDir := "lib" + if platform == release.PlatformWindows { + libDir = "bin" + } + paths = append(paths, filepath.Join(r.fakeBazelBin, "c-deps", "libgeos_foreign", libDir, lib+ext)) } - if req.WebsiteRedirectLocation != nil { - act.WebsiteRedirectLocation = shaPat.ReplaceAllLiteralString(*req.WebsiteRedirectLocation, shaStub) + } + + for _, path := range paths { + if err := os.MkdirAll(filepath.Dir(path), 0777); err != nil { + return nil, err } - if req.CacheControl != nil { - act.CacheControl = *req.CacheControl + if err := ioutil.WriteFile(path, []byte(cmd), 0666); err != nil { + return nil, err } - acts = append(acts, act) } - require.Equal(t, exp, acts) + var output []byte + return output, nil +} + +func TestPublish(t *testing.T) { + tests := []struct { + name string + flags runFlags + expectedCmds []string + expectedPuts []string + }{ + { + name: `release`, + flags: runFlags{ + branch: "master", + sha: "1234567890abcdef", + }, + expectedCmds: []string{ + "env=[] args=bazel build //pkg/cmd/cockroach //c-deps:libgeos //pkg/cmd/cockroach-sql " + + "'--workspace_status_command=./build/bazelutil/stamp.sh x86_64-pc-linux-gnu official-binary' -c opt --config=ci --config=with_ui --config=crosslinuxbase", + "env=[] args=bazel info bazel-bin -c opt --config=ci --config=with_ui --config=crosslinuxbase", + "env=[MALLOC_CONF=prof:true] args=./cockroach.linux-2.6.32-gnu-amd64 version", + "env=[] args=ldd ./cockroach.linux-2.6.32-gnu-amd64", + "env=[] args=bazel build //pkg/cmd/cockroach //c-deps:libgeos //pkg/cmd/cockroach-sql " + + "'--workspace_status_command=./build/bazelutil/stamp.sh x86_64-apple-darwin19 official-binary' -c opt --config=ci --config=with_ui --config=crossmacosbase", + "env=[] args=bazel info bazel-bin -c opt --config=ci --config=with_ui --config=crossmacosbase", + "env=[] args=bazel build //pkg/cmd/cockroach //c-deps:libgeos //pkg/cmd/cockroach-sql " + + "'--workspace_status_command=." + + "/build/bazelutil/stamp.sh x86_64-w64-mingw32 official-binary' -c opt --config=ci --config=with_ui --config=crosswindowsbase", + "env=[] args=bazel info bazel-bin -c opt --config=ci --config=with_ui --config=crosswindowsbase", + "env=[] args=bazel build //pkg/cmd/workload -c opt --config=crosslinux --config=ci", + "env=[] args=bazel info bazel-bin -c opt --config=crosslinux --config=ci", + }, + expectedPuts: []string{ + "s3://cockroach/cockroach/cockroach.linux-gnu-amd64.1234567890abcdef CONTENTS env=[] args=bazel build " + + "//pkg/cmd/cockroach //c-deps:libgeos //pkg/cmd/cockroach-sql " + + "'--workspace_status_command=./build/bazelutil/stamp.sh x86_64-pc-linux-gnu official-binary' " + + "-c opt --config=ci --config=with_ui --config=crosslinuxbase", + "s3://cockroach/cockroach/cockroach.linux-gnu-amd64.LATEST/no-cache REDIRECT /cockroach/cockroach.linux-gnu-amd64.1234567890abcdef", + "s3://cockroach/cockroach/cockroach-sql.linux-gnu-amd64.1234567890abcdef CONTENTS env=[] args=bazel build " + + "//pkg/cmd/cockroach //c-deps:libgeos //pkg/cmd/cockroach-sql " + + "'--workspace_status_command=./build/bazelutil/stamp.sh x86_64-pc-linux-gnu official-binary' " + + "-c opt --config=ci --config=with_ui --config=crosslinuxbase", + "s3://cockroach/cockroach/cockroach-sql.linux-gnu-amd64.LATEST/no-cache REDIRECT /cockroach/cockroach-sql.linux-gnu-amd64.1234567890abcdef", + "s3://cockroach/cockroach/lib/libgeos.linux-gnu-amd64.1234567890abcdef.so CONTENTS env=[] args=bazel build " + + "//pkg/cmd/cockroach //c-deps:libgeos //pkg/cmd/cockroach-sql " + + "'--workspace_status_command=./build/bazelutil/stamp.sh x86_64-pc-linux-gnu official-binary' " + + "-c opt --config=ci --config=with_ui --config=crosslinuxbase", + "s3://cockroach/cockroach/lib/libgeos.linux-gnu-amd64.so.LATEST/no-cache REDIRECT /cockroach/lib/libgeos.linux-gnu-amd64.1234567890abcdef.so", + "s3://cockroach/cockroach/lib/libgeos_c.linux-gnu-amd64.1234567890abcdef.so CONTENTS env=[] args=bazel build " + + "//pkg/cmd/cockroach //c-deps:libgeos //pkg/cmd/cockroach-sql " + + "'--workspace_status_command=./build/bazelutil/stamp.sh x86_64-pc-linux-gnu official-binary' " + + "-c opt --config=ci --config=with_ui --config=crosslinuxbase", + "s3://cockroach/cockroach/lib/libgeos_c.linux-gnu-amd64.so.LATEST/no-cache REDIRECT /cockroach/lib/libgeos_c.linux-gnu-amd64.1234567890abcdef.so", + "s3://cockroach/cockroach/cockroach.darwin-amd64.1234567890abcdef CONTENTS env=[] args=bazel build " + + "//pkg/cmd/cockroach //c-deps:libgeos //pkg/cmd/cockroach-sql " + + "'--workspace_status_command=./build/bazelutil/stamp.sh x86_64-apple-darwin19 official-binary' " + + "-c opt --config=ci --config=with_ui --config=crossmacosbase", + "s3://cockroach/cockroach/cockroach.darwin-amd64.LATEST/no-cache REDIRECT /cockroach/cockroach.darwin-amd64.1234567890abcdef", + "s3://cockroach/cockroach/cockroach-sql.darwin-amd64.1234567890abcdef CONTENTS env=[] args=bazel build " + + "//pkg/cmd/cockroach //c-deps:libgeos //pkg/cmd/cockroach-sql " + + "'--workspace_status_command=./build/bazelutil/stamp.sh x86_64-apple-darwin19 official-binary' " + + "-c opt --config=ci --config=with_ui --config=crossmacosbase", + "s3://cockroach/cockroach/cockroach-sql.darwin-amd64.LATEST/no-cache REDIRECT /cockroach/cockroach-sql.darwin-amd64.1234567890abcdef", + "s3://cockroach/cockroach/lib/libgeos.darwin-amd64.1234567890abcdef.dylib CONTENTS env=[] args=bazel build " + + "//pkg/cmd/cockroach //c-deps:libgeos //pkg/cmd/cockroach-sql " + + "'--workspace_status_command=./build/bazelutil/stamp.sh x86_64-apple-darwin19 official-binary' " + + "-c opt --config=ci --config=with_ui --config=crossmacosbase", + "s3://cockroach/cockroach/lib/libgeos.darwin-amd64.dylib.LATEST/no-cache REDIRECT /cockroach/lib/libgeos.darwin-amd64.1234567890abcdef.dylib", + "s3://cockroach/cockroach/lib/libgeos_c.darwin-amd64.1234567890abcdef.dylib CONTENTS env=[] args=bazel build " + + "//pkg/cmd/cockroach //c-deps:libgeos //pkg/cmd/cockroach-sql " + + "'--workspace_status_command=./build/bazelutil/stamp.sh x86_64-apple-darwin19 official-binary' " + + "-c opt --config=ci --config=with_ui --config=crossmacosbase", + "s3://cockroach/cockroach/lib/libgeos_c.darwin-amd64.dylib.LATEST/no-cache REDIRECT /cockroach/lib/libgeos_c.darwin-amd64.1234567890abcdef.dylib", + "s3://cockroach/cockroach/cockroach.windows-amd64.1234567890abcdef.exe CONTENTS env=[] args=bazel build " + + "//pkg/cmd/cockroach //c-deps:libgeos //pkg/cmd/cockroach-sql " + + "'--workspace_status_command=./build/bazelutil/stamp.sh x86_64-w64-mingw32 official-binary' " + + "-c opt --config=ci --config=with_ui --config=crosswindowsbase", + "s3://cockroach/cockroach/cockroach.windows-amd64.LATEST/no-cache REDIRECT /cockroach/cockroach.windows-amd64.1234567890abcdef.exe", + "s3://cockroach/cockroach/cockroach-sql.windows-amd64.1234567890abcdef.exe CONTENTS env=[] args=bazel build " + + "//pkg/cmd/cockroach //c-deps:libgeos //pkg/cmd/cockroach-sql " + + "'--workspace_status_command=./build/bazelutil/stamp.sh x86_64-w64-mingw32 official-binary' " + + "-c opt --config=ci --config=with_ui --config=crosswindowsbase", + "s3://cockroach/cockroach/cockroach-sql.windows-amd64.LATEST/no-cache REDIRECT /cockroach/cockroach-sql.windows-amd64.1234567890abcdef.exe", + "s3://cockroach/cockroach/lib/libgeos.windows-amd64.1234567890abcdef.dll CONTENTS env=[] args=bazel build " + + "//pkg/cmd/cockroach //c-deps:libgeos //pkg/cmd/cockroach-sql " + + "'--workspace_status_command=./build/bazelutil/stamp.sh x86_64-w64-mingw32 official-binary' " + + "-c opt --config=ci --config=with_ui --config=crosswindowsbase", + "s3://cockroach/cockroach/lib/libgeos.windows-amd64.dll.LATEST/no-cache REDIRECT /cockroach/lib/libgeos.windows-amd64.1234567890abcdef.dll", + "s3://cockroach/cockroach/lib/libgeos_c.windows-amd64.1234567890abcdef.dll CONTENTS env=[] args=bazel build " + + "//pkg/cmd/cockroach //c-deps:libgeos //pkg/cmd/cockroach-sql " + + "'--workspace_status_command=./build/bazelutil/stamp.sh x86_64-w64-mingw32 official-binary' " + + "-c opt --config=ci --config=with_ui --config=crosswindowsbase", + "s3://cockroach/cockroach/lib/libgeos_c.windows-amd64.dll.LATEST/no-cache REDIRECT /cockroach/lib/libgeos_c.windows-amd64.1234567890abcdef.dll", + "s3://cockroach/cockroach/workload.1234567890abcdef CONTENTS env=[] args=bazel build //pkg/cmd/workload -c opt --config=crosslinux --config=ci", + "s3://cockroach/cockroach/workload.LATEST/no-cache REDIRECT /cockroach/workload.1234567890abcdef", + }, + }, + } + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + dir, cleanup := testutils.TempDir(t) + defer cleanup() + + var s3 mockStorage + var gcs mockStorage + var runner mockExecRunner + fakeBazelBin, cleanup := testutils.TempDir(t) + defer cleanup() + runner.fakeBazelBin = fakeBazelBin + flags := test.flags + flags.pkgDir = dir + execFn := release.ExecFn{MockExecFn: runner.run} + run([]release.ObjectPutGetter{&s3, &gcs}, flags, execFn) + require.Equal(t, test.expectedCmds, runner.cmds) + require.Equal(t, test.expectedPuts, s3.puts) + require.Equal(t, test.expectedPuts, gcs.puts) + }) + } } diff --git a/pkg/cmd/publish-artifacts/slow_test.go b/pkg/cmd/publish-artifacts/slow_test.go deleted file mode 100644 index 694cdd358405..000000000000 --- a/pkg/cmd/publish-artifacts/slow_test.go +++ /dev/null @@ -1,18 +0,0 @@ -// Copyright 2018 The Cockroach Authors. -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -//go:build slow -// +build slow - -package main - -func init() { - slow = true -} diff --git a/pkg/cmd/publish-provisional-artifacts/BUILD.bazel b/pkg/cmd/publish-provisional-artifacts/BUILD.bazel index a23da639b176..a6885edf04af 100644 --- a/pkg/cmd/publish-provisional-artifacts/BUILD.bazel +++ b/pkg/cmd/publish-provisional-artifacts/BUILD.bazel @@ -8,9 +8,6 @@ go_library( deps = [ "//pkg/release", "//pkg/util/version", - "@com_github_aws_aws_sdk_go//aws", - "@com_github_aws_aws_sdk_go//aws/session", - "@com_github_aws_aws_sdk_go//service/s3", "@com_github_kr_pretty//:pretty", ], ) @@ -30,8 +27,6 @@ go_test( "//pkg/release", "//pkg/testutils", "@com_github_alessio_shellescape//:shellescape", - "@com_github_aws_aws_sdk_go//service/s3", - "@com_github_cockroachdb_errors//:errors", "@com_github_stretchr_testify//require", ], ) diff --git a/pkg/cmd/publish-provisional-artifacts/main.go b/pkg/cmd/publish-provisional-artifacts/main.go index b73497b5e4cf..dd360c97378e 100644 --- a/pkg/cmd/publish-provisional-artifacts/main.go +++ b/pkg/cmd/publish-provisional-artifacts/main.go @@ -13,16 +13,12 @@ package main import ( "bytes" "flag" - "io" "log" "os" "os/exec" "path/filepath" "regexp" - "github.com/aws/aws-sdk-go/aws" - "github.com/aws/aws-sdk-go/aws/session" - "github.com/aws/aws-sdk-go/service/s3" "github.com/cockroachdb/cockroach/pkg/release" "github.com/cockroachdb/cockroach/pkg/util/version" "github.com/kr/pretty" @@ -36,47 +32,49 @@ const ( var provisionalReleasePrefixRE = regexp.MustCompile(`^provisional_[0-9]{12}_`) -type s3I interface { - GetObject(*s3.GetObjectInput) (*s3.GetObjectOutput, error) - PutObject(*s3.PutObjectInput) (*s3.PutObjectOutput, error) -} - -func makeS3() (s3I, error) { - sess, err := session.NewSession(&aws.Config{ - Region: aws.String("us-east-1"), - }) - if err != nil { - return nil, err - } - return s3.New(sess), nil -} - -var isReleaseF = flag.Bool("release", false, "build in release mode instead of bleeding-edge mode") -var destBucket = flag.String("bucket", "", "override default bucket") -var doProvisionalF = flag.Bool("provisional", false, "publish provisional binaries") -var doBlessF = flag.Bool("bless", false, "bless provisional binaries") - -var ( - // TODO(tamird,benesch,bdarnell): make "latest" a website-redirect - // rather than a full key. This means that the actual artifact will no - // longer be named "-latest". - latestStr = "latest" -) - func main() { + var isReleaseF = flag.Bool("release", false, "build in release mode instead of bleeding-edge mode") + var destBucket = flag.String("bucket", "", "override default bucket") + var gcsBucket = flag.String("gcs-bucket", "", "override default bucket") + var doProvisionalF = flag.Bool("provisional", false, "publish provisional binaries") + var doBlessF = flag.Bool("bless", false, "bless provisional binaries") + flag.Parse() log.SetFlags(log.LstdFlags | log.Lshortfile) + var bucketName string + if len(*destBucket) > 0 { + bucketName = *destBucket + } else if *isReleaseF { + bucketName = "binaries.cockroachdb.com" + } else { + bucketName = "cockroach" + } + log.Printf("Using S3 bucket: %s", bucketName) + if _, ok := os.LookupEnv(awsAccessKeyIDKey); !ok { log.Fatalf("AWS access key ID environment variable %s is not set", awsAccessKeyIDKey) } if _, ok := os.LookupEnv(awsSecretAccessKeyKey); !ok { log.Fatalf("AWS secret access key environment variable %s is not set", awsSecretAccessKeyKey) } - s3, err := makeS3() + var providers []release.ObjectPutGetter + s3, err := release.NewS3("us-east-1", bucketName) if err != nil { log.Fatalf("Creating AWS S3 session: %s", err) } + providers = append(providers, s3) + if *gcsBucket != "" { + if _, ok := os.LookupEnv("GOOGLE_APPLICATION_CREDENTIALS"); !ok { + log.Fatal("GOOGLE_APPLICATION_CREDENTIALS environment variable is not set") + } + gcs, err := release.NewGCS(*gcsBucket) + if err != nil { + log.Fatalf("Creating GCS session: %s", err) + } + providers = append(providers, gcs) + } + branch, ok := os.LookupEnv(teamcityBuildBranchKey) if !ok { log.Fatalf("VCS branch environment variable %s is not set", teamcityBuildBranchKey) @@ -99,7 +97,7 @@ func main() { log.Fatalf("%s: out=%q err=%s", cmd.Args, shaOut, err) } - run(s3, runFlags{ + run(providers, runFlags{ doProvisional: *doProvisionalF, doBless: *doBlessF, isRelease: *isReleaseF, @@ -110,13 +108,15 @@ func main() { } type runFlags struct { - doProvisional, doBless bool - isRelease bool - branch, sha string - pkgDir string + doProvisional bool + doBless bool + isRelease bool + branch string + sha string + pkgDir string } -func run(svc s3I, flags runFlags, execFn release.ExecFn) { +func run(providers []release.ObjectPutGetter, flags runFlags, execFn release.ExecFn) { // TODO(dan): non-release builds currently aren't broken into the two // phases. Instead, the provisional phase does them both. if !flags.isRelease { @@ -130,7 +130,7 @@ func run(svc s3I, flags runFlags, execFn release.ExecFn) { // If the tag starts with "provisional_", then we're building a binary // that we hope will be some final release and the tag will be of the // form `provisional__`. If all goes well with the - // long running tests, these bits will be released exactly as-is, so the + // long-running tests, these bits will be released exactly as-is, so the // version is set to by stripping the prefix. versionStr = provisionalReleasePrefixRE.ReplaceAllLiteralString(flags.branch, "") @@ -154,24 +154,14 @@ func run(svc s3I, flags runFlags, execFn release.ExecFn) { updateLatest = true } - var bucketName string - if len(*destBucket) > 0 { - bucketName = *destBucket - } else if flags.isRelease { - bucketName = "binaries.cockroachdb.com" - } else { - bucketName = "cockroach" - } - log.Printf("Using S3 bucket: %s", bucketName) - + platforms := []release.Platform{release.PlatformLinux, release.PlatformMacOS, release.PlatformWindows} var cockroachBuildOpts []opts - for _, platform := range []release.Platform{release.PlatformLinux, release.PlatformMacOS, release.PlatformWindows} { + for _, platform := range platforms { var o opts o.Platform = platform o.PkgDir = flags.pkgDir o.Branch = flags.branch o.VersionStr = versionStr - o.BucketName = bucketName o.AbsolutePath = filepath.Join(flags.pkgDir, "cockroach"+release.SuffixFromPlatform(platform)) o.CockroachSQLAbsolutePath = filepath.Join(flags.pkgDir, "cockroach-sql"+release.SuffixFromPlatform(platform)) cockroachBuildOpts = append(cockroachBuildOpts, o) @@ -182,9 +172,41 @@ func run(svc s3I, flags runFlags, execFn release.ExecFn) { buildCockroach(flags, o, execFn) if !flags.isRelease { - putNonRelease(svc, o) + for _, provider := range providers { + release.PutNonRelease( + provider, + release.PutNonReleaseOptions{ + Branch: o.Branch, + Files: append( + []release.NonReleaseFile{ + release.MakeCRDBBinaryNonReleaseFile(o.AbsolutePath, o.VersionStr), + release.MakeCRDBBinaryNonReleaseFile(o.CockroachSQLAbsolutePath, o.VersionStr), + }, + release.MakeCRDBLibraryNonReleaseFiles(o.PkgDir, o.Platform, o.VersionStr)..., + ), + }, + ) + } } else { - putRelease(svc, o) + for _, provider := range providers { + release.PutRelease(provider, release.PutReleaseOptions{ + NoCache: false, + Platform: o.Platform, + VersionStr: o.VersionStr, + ArchivePrefix: "cockroach", + Files: append( + []release.ArchiveFile{release.MakeCRDBBinaryArchiveFile(o.AbsolutePath, "cockroach")}, + release.MakeCRDBLibraryArchiveFiles(o.PkgDir, o.Platform)..., + ), + }) + release.PutRelease(provider, release.PutReleaseOptions{ + NoCache: false, + Platform: o.Platform, + VersionStr: o.VersionStr, + ArchivePrefix: "cockroach-sql", + Files: []release.ArchiveFile{release.MakeCRDBBinaryArchiveFile(o.CockroachSQLAbsolutePath, "cockroach-sql")}, + }) + } } } } @@ -194,7 +216,9 @@ func run(svc s3I, flags runFlags, execFn release.ExecFn) { } if updateLatest { for _, o := range cockroachBuildOpts { - markLatestRelease(svc, o) + for _, provider := range providers { + markLatestRelease(provider, o) + } } } } @@ -219,94 +243,19 @@ func buildCockroach(flags runFlags, o opts, execFn release.ExecFn) { } type opts struct { - VersionStr string - Branch string - - Platform release.Platform - + VersionStr string + Branch string + Platform release.Platform AbsolutePath string CockroachSQLAbsolutePath string - BucketName string PkgDir string } -func putNonRelease(svc s3I, o opts) { - release.PutNonRelease( - svc, - release.PutNonReleaseOptions{ - Branch: o.Branch, - BucketName: o.BucketName, - Files: append( - []release.NonReleaseFile{ - release.MakeCRDBBinaryNonReleaseFile(o.AbsolutePath, o.VersionStr), - release.MakeCRDBBinaryNonReleaseFile(o.CockroachSQLAbsolutePath, o.VersionStr), - }, - release.MakeCRDBLibraryNonReleaseFiles(o.PkgDir, o.Platform, o.VersionStr)..., - ), - }, - ) -} - -func s3KeyRelease(o opts) (string, string) { - return release.S3KeyRelease(o.Platform, o.VersionStr, "cockroach") -} - -func putRelease(svc s3I, o opts) { - release.PutRelease(svc, release.PutReleaseOptions{ - BucketName: o.BucketName, - NoCache: false, - Platform: o.Platform, - VersionStr: o.VersionStr, - ArchivePrefix: "cockroach", - Files: append( - []release.ArchiveFile{release.MakeCRDBBinaryArchiveFile(o.AbsolutePath, "cockroach")}, - release.MakeCRDBLibraryArchiveFiles(o.PkgDir, o.Platform)..., - ), - }) - release.PutRelease(svc, release.PutReleaseOptions{ - BucketName: o.BucketName, - NoCache: false, - Platform: o.Platform, - VersionStr: o.VersionStr, - ArchivePrefix: "cockroach-sql", - Files: []release.ArchiveFile{release.MakeCRDBBinaryArchiveFile(o.AbsolutePath, "cockroach-sql")}, - }) -} - -func markLatestRelease(svc s3I, o opts) { - markLatestReleaseWithSuffix(svc, o, "") - markLatestReleaseWithSuffix(svc, o, release.ChecksumSuffix) -} - -func markLatestReleaseWithSuffix(svc s3I, o opts, suffix string) { - _, keyRelease := s3KeyRelease(o) - keyRelease += suffix - log.Printf("Downloading from %s/%s", o.BucketName, keyRelease) - binary, err := svc.GetObject(&s3.GetObjectInput{ - Bucket: &o.BucketName, - Key: &keyRelease, - }) - if err != nil { - log.Fatal(err) - } - defer binary.Body.Close() - var buf bytes.Buffer - if _, err := io.Copy(&buf, binary.Body); err != nil { - log.Fatalf("downloading %s/%s: %s", o.BucketName, keyRelease, err) - } - - oLatest := o - oLatest.VersionStr = latestStr - _, keyLatest := s3KeyRelease(oLatest) - keyLatest += suffix - log.Printf("Uploading to s3://%s/%s", o.BucketName, keyLatest) - putObjectInput := s3.PutObjectInput{ - Bucket: &o.BucketName, - Key: &keyLatest, - Body: bytes.NewReader(buf.Bytes()), - CacheControl: &release.NoCache, - } - if _, err := svc.PutObject(&putObjectInput); err != nil { - log.Fatalf("s3 upload %s: %s", keyLatest, err) +func markLatestRelease(svc release.ObjectPutGetter, o opts) { + latestOpts := release.LatestOpts{ + Platform: o.Platform, + VersionStr: o.VersionStr, } + release.MarkLatestReleaseWithSuffix(svc, latestOpts, "") + release.MarkLatestReleaseWithSuffix(svc, latestOpts, release.ChecksumSuffix) } diff --git a/pkg/cmd/publish-provisional-artifacts/main_test.go b/pkg/cmd/publish-provisional-artifacts/main_test.go index f62df6d16968..2c11c7bcdf0f 100644 --- a/pkg/cmd/publish-provisional-artifacts/main_test.go +++ b/pkg/cmd/publish-provisional-artifacts/main_test.go @@ -22,53 +22,60 @@ import ( "unicode/utf8" "github.com/alessio/shellescape" - "github.com/aws/aws-sdk-go/service/s3" "github.com/cockroachdb/cockroach/pkg/release" "github.com/cockroachdb/cockroach/pkg/testutils" - "github.com/cockroachdb/errors" "github.com/stretchr/testify/require" ) -type mockS3 struct { - gets []string - puts []string +type mockStorage struct { + bucket string + gets []string + puts []string } -var _ s3I = (*mockS3)(nil) +var _ release.ObjectPutGetter = (*mockStorage)(nil) -func (s *mockS3) GetObject(i *s3.GetObjectInput) (*s3.GetObjectOutput, error) { - url := fmt.Sprintf(`s3://%s/%s`, *i.Bucket, *i.Key) +func (s *mockStorage) Bucket() string { + return s.bucket +} + +func (s mockStorage) URL(key string) string { + return "storage://bucket/" + key +} + +func (s *mockStorage) GetObject(i *release.GetObjectInput) (*release.GetObjectOutput, error) { + url := fmt.Sprintf(`s3://%s/%s`, s.Bucket(), *i.Key) s.gets = append(s.gets, url) - o := &s3.GetObjectOutput{ + o := &release.GetObjectOutput{ Body: ioutil.NopCloser(bytes.NewBufferString(url)), } return o, nil } -func (s *mockS3) PutObject(i *s3.PutObjectInput) (*s3.PutObjectOutput, error) { - url := fmt.Sprintf(`s3://%s/%s`, *i.Bucket, *i.Key) +func (s *mockStorage) PutObject(i *release.PutObjectInput) error { + url := fmt.Sprintf(`s3://%s/%s`, s.Bucket(), *i.Key) if i.CacheControl != nil { url += `/` + *i.CacheControl } if i.Body != nil { - bytes, err := ioutil.ReadAll(i.Body) + binary, err := ioutil.ReadAll(i.Body) if err != nil { - return nil, err + return err } if strings.HasSuffix(*i.Key, release.ChecksumSuffix) { // Unfortunately the archive tarball checksum changes every time, // because we generate tarballs and the copy file modification time from the generated files. // This makes the checksum not reproducible. s.puts = append(s.puts, fmt.Sprintf("%s CONTENTS ", url)) - } else if utf8.Valid(bytes) { - s.puts = append(s.puts, fmt.Sprintf("%s CONTENTS %s", url, bytes)) + } else if utf8.Valid(binary) { + s.puts = append(s.puts, fmt.Sprintf("%s CONTENTS %s", url, binary)) } else { s.puts = append(s.puts, fmt.Sprintf("%s CONTENTS ", url)) } } else if i.WebsiteRedirectLocation != nil { s.puts = append(s.puts, fmt.Sprintf("%s REDIRECT %s", url, *i.WebsiteRedirectLocation)) } - return &s3.PutObjectOutput{}, nil + return nil } type mockExecRunner struct { @@ -80,8 +87,8 @@ func (r *mockExecRunner) run(c *exec.Cmd) ([]byte, error) { if r.fakeBazelBin == "" { panic("r.fakeBazelBin not set") } - if c.Dir == `` { - return nil, errors.Errorf(`Dir must be specified`) + if c.Dir == "" { + return nil, fmt.Errorf("`Dir` must be specified") } cmd := fmt.Sprintf("env=%s args=%s", c.Env, shellescape.QuoteCommand(c.Args)) r.cmds = append(r.cmds, cmd) @@ -120,7 +127,7 @@ func (r *mockExecRunner) run(c *exec.Cmd) ([]byte, error) { if platform == release.PlatformWindows { libDir = "bin" } - paths = append(paths, filepath.Join(r.fakeBazelBin, "c-deps", "libgeos", libDir, lib+ext)) + paths = append(paths, filepath.Join(r.fakeBazelBin, "c-deps", "libgeos_foreign", libDir, lib+ext)) } } @@ -192,8 +199,7 @@ func TestProvisional(t *testing.T) { }, expectedCmds: []string{ "env=[] args=bazel build //pkg/cmd/cockroach //c-deps:libgeos //pkg/cmd/cockroach-sql " + - "'--workspace_status_command=." + - "/build/bazelutil/stamp.sh x86_64-pc-linux-gnu official-binary' -c opt --config=ci --config=with_ui --config=crosslinuxbase", + "'--workspace_status_command=./build/bazelutil/stamp.sh x86_64-pc-linux-gnu official-binary' -c opt --config=ci --config=with_ui --config=crosslinuxbase", "env=[] args=bazel info bazel-bin -c opt --config=ci --config=with_ui --config=crosslinuxbase", "env=[MALLOC_CONF=prof:true] args=./cockroach.linux-2.6.32-gnu-amd64 version", "env=[] args=ldd ./cockroach.linux-2.6.32-gnu-amd64", @@ -201,57 +207,58 @@ func TestProvisional(t *testing.T) { "'--workspace_status_command=./build/bazelutil/stamp.sh x86_64-apple-darwin19 official-binary' -c opt --config=ci --config=with_ui --config=crossmacosbase", "env=[] args=bazel info bazel-bin -c opt --config=ci --config=with_ui --config=crossmacosbase", "env=[] args=bazel build //pkg/cmd/cockroach //c-deps:libgeos //pkg/cmd/cockroach-sql " + - "'--workspace_status_command=./build/bazelutil/stamp.sh x86_64-w64-mingw32 official-binary' -c opt --config=ci --config=with_ui --config=crosswindowsbase", + "'--workspace_status_command=./build/bazelutil/stamp." + + "sh x86_64-w64-mingw32 official-binary' -c opt --config=ci --config=with_ui --config=crosswindowsbase", "env=[] args=bazel info bazel-bin -c opt --config=ci --config=with_ui --config=crosswindowsbase", }, expectedGets: nil, expectedPuts: []string{ - "s3://cockroach//cockroach/cockroach.linux-gnu-amd64.00SHA00 " + + "s3://cockroach/cockroach/cockroach.linux-gnu-amd64.00SHA00 " + "CONTENTS env=[] args=bazel build //pkg/cmd/cockroach //c-deps:libgeos //pkg/cmd/cockroach-sql " + "'--workspace_status_command=./build/bazelutil/stamp." + "sh x86_64-pc-linux-gnu official-binary' -c opt --config=ci --config=with_ui --config=crosslinuxbase", "s3://cockroach/cockroach/cockroach.linux-gnu-amd64.LATEST/no-cache " + "REDIRECT /cockroach/cockroach.linux-gnu-amd64.00SHA00", - "s3://cockroach//cockroach/cockroach-sql.linux-gnu-amd64.00SHA00 CONTENTS env=[] args=bazel build //pkg/cmd/cockroach //c-deps:libgeos //pkg/cmd/cockroach-sql '--workspace_status_command=./build/bazelutil/stamp.sh x86_64-pc-linux-gnu official-binary' -c opt --config=ci --config=with_ui --config=crosslinuxbase", + "s3://cockroach/cockroach/cockroach-sql.linux-gnu-amd64.00SHA00 CONTENTS env=[] args=bazel build //pkg/cmd/cockroach //c-deps:libgeos //pkg/cmd/cockroach-sql '--workspace_status_command=./build/bazelutil/stamp.sh x86_64-pc-linux-gnu official-binary' -c opt --config=ci --config=with_ui --config=crosslinuxbase", "s3://cockroach/cockroach/cockroach-sql.linux-gnu-amd64.LATEST/no-cache REDIRECT /cockroach/cockroach-sql.linux-gnu-amd64.00SHA00", - "s3://cockroach//cockroach/lib/libgeos.linux-gnu-amd64.00SHA00." + + "s3://cockroach/cockroach/lib/libgeos.linux-gnu-amd64.00SHA00." + "so CONTENTS env=[] args=bazel build //pkg/cmd/cockroach //c-deps:libgeos //pkg/cmd/cockroach-sql " + "'--workspace_status_command=./build/bazelutil/stamp.sh x86_64-pc-linux-gnu official-binary' -c opt --config=ci --config=with_ui --config=crosslinuxbase", "s3://cockroach/cockroach/lib/libgeos.linux-gnu-amd64.so.LATEST/no-cache REDIRECT /cockroach/lib/libgeos.linux-gnu-amd64.00SHA00.so", - "s3://cockroach//cockroach/lib/libgeos_c.linux-gnu-amd64.00SHA00." + + "s3://cockroach/cockroach/lib/libgeos_c.linux-gnu-amd64.00SHA00." + "so CONTENTS env=[] args=bazel build //pkg/cmd/cockroach //c-deps:libgeos //pkg/cmd/cockroach-sql " + "'--workspace_status_command=./build/bazelutil/stamp.sh x86_64-pc-linux-gnu official-binary' -c opt --config=ci --config=with_ui --config=crosslinuxbase", "s3://cockroach/cockroach/lib/libgeos_c.linux-gnu-amd64.so.LATEST/no-cache REDIRECT /cockroach/lib/libgeos_c.linux-gnu-amd64.00SHA00.so", - "s3://cockroach//cockroach/cockroach.darwin-amd64.00SHA00 " + + "s3://cockroach/cockroach/cockroach.darwin-amd64.00SHA00 " + "CONTENTS env=[] args=bazel build //pkg/cmd/cockroach //c-deps:libgeos //pkg/cmd/cockroach-sql " + "'--workspace_status_command=./build/bazelutil/stamp.sh x86_64-apple-darwin19 official-binary' -c opt --config=ci --config=with_ui --config=crossmacosbase", "s3://cockroach/cockroach/cockroach.darwin-amd64.LATEST/no-cache " + "REDIRECT /cockroach/cockroach.darwin-amd64.00SHA00", - "s3://cockroach//cockroach/cockroach-sql.darwin-amd64.00SHA00 CONTENTS env=[] args=bazel build //pkg/cmd/cockroach //c-deps:libgeos //pkg/cmd/cockroach-sql '--workspace_status_command=./build/bazelutil/stamp.sh x86_64-apple-darwin19 official-binary' -c opt --config=ci --config=with_ui --config=crossmacosbase", + "s3://cockroach/cockroach/cockroach-sql.darwin-amd64.00SHA00 CONTENTS env=[] args=bazel build //pkg/cmd/cockroach //c-deps:libgeos //pkg/cmd/cockroach-sql '--workspace_status_command=./build/bazelutil/stamp.sh x86_64-apple-darwin19 official-binary' -c opt --config=ci --config=with_ui --config=crossmacosbase", "s3://cockroach/cockroach/cockroach-sql.darwin-amd64.LATEST/no-cache REDIRECT /cockroach/cockroach-sql." + "darwin-amd64.00SHA00", - "s3://cockroach//cockroach/lib/libgeos.darwin-amd64.00SHA00." + + "s3://cockroach/cockroach/lib/libgeos.darwin-amd64.00SHA00." + "dylib CONTENTS env=[] args=bazel build //pkg/cmd/cockroach //c-deps:libgeos //pkg/cmd/cockroach-sql " + "'--workspace_status_command=./build/bazelutil/stamp.sh x86_64-apple-darwin19 official-binary' -c opt --config=ci --config=with_ui --config=crossmacosbase", "s3://cockroach/cockroach/lib/libgeos.darwin-amd64.dylib.LATEST/no-cache REDIRECT /cockroach/lib/libgeos.darwin-amd64.00SHA00.dylib", - "s3://cockroach//cockroach/lib/libgeos_c.darwin-amd64.00SHA00." + + "s3://cockroach/cockroach/lib/libgeos_c.darwin-amd64.00SHA00." + "dylib CONTENTS env=[] args=bazel build //pkg/cmd/cockroach //c-deps:libgeos //pkg/cmd/cockroach-sql " + "'--workspace_status_command=./build/bazelutil/stamp." + "sh x86_64-apple-darwin19 official-binary' -c opt --config=ci --config=with_ui --config=crossmacosbase", "s3://cockroach/cockroach/lib/libgeos_c.darwin-amd64.dylib.LATEST/no-cache REDIRECT /cockroach/lib/libgeos_c.darwin-amd64.00SHA00.dylib", - "s3://cockroach//cockroach/cockroach.windows-amd64.00SHA00.exe " + + "s3://cockroach/cockroach/cockroach.windows-amd64.00SHA00.exe " + "CONTENTS env=[] args=bazel build //pkg/cmd/cockroach //c-deps:libgeos //pkg/cmd/cockroach-sql " + "'--workspace_status_command=./build/bazelutil/stamp." + "sh x86_64-w64-mingw32 official-binary' -c opt --config=ci --config=with_ui --config=crosswindowsbase", "s3://cockroach/cockroach/cockroach.windows-amd64.LATEST/no-cache " + "REDIRECT /cockroach/cockroach.windows-amd64.00SHA00.exe", - "s3://cockroach//cockroach/cockroach-sql.windows-amd64.00SHA00.exe CONTENTS env=[] args=bazel build //pkg/cmd/cockroach //c-deps:libgeos //pkg/cmd/cockroach-sql '--workspace_status_command=./build/bazelutil/stamp.sh x86_64-w64-mingw32 official-binary' -c opt --config=ci --config=with_ui --config=crosswindowsbase", + "s3://cockroach/cockroach/cockroach-sql.windows-amd64.00SHA00.exe CONTENTS env=[] args=bazel build //pkg/cmd/cockroach //c-deps:libgeos //pkg/cmd/cockroach-sql '--workspace_status_command=./build/bazelutil/stamp.sh x86_64-w64-mingw32 official-binary' -c opt --config=ci --config=with_ui --config=crosswindowsbase", "s3://cockroach/cockroach/cockroach-sql.windows-amd64.LATEST/no-cache REDIRECT /cockroach/cockroach-sql.windows-amd64.00SHA00.exe", - "s3://cockroach//cockroach/lib/libgeos.windows-amd64.00SHA00." + + "s3://cockroach/cockroach/lib/libgeos.windows-amd64.00SHA00." + "dll CONTENTS env=[] args=bazel build //pkg/cmd/cockroach //c-deps:libgeos //pkg/cmd/cockroach-sql " + "'--workspace_status_command=./build/bazelutil/stamp.sh x86_64-w64-mingw32 official-binary' -c opt --config=ci --config=with_ui --config=crosswindowsbase", "s3://cockroach/cockroach/lib/libgeos.windows-amd64.dll.LATEST/no-cache REDIRECT /cockroach/lib/libgeos.windows-amd64.00SHA00.dll", - "s3://cockroach//cockroach/lib/libgeos_c.windows-amd64.00SHA00." + + "s3://cockroach/cockroach/lib/libgeos_c.windows-amd64.00SHA00." + "dll CONTENTS env=[] args=bazel build //pkg/cmd/cockroach //c-deps:libgeos //pkg/cmd/cockroach-sql " + "'--workspace_status_command=./build/bazelutil/stamp.sh x86_64-w64-mingw32 official-binary' -c opt --config=ci --config=with_ui --config=crosswindowsbase", "s3://cockroach/cockroach/lib/libgeos_c.windows-amd64.dll.LATEST/no-cache REDIRECT /cockroach/lib/libgeos_c.windows-amd64.00SHA00.dll", @@ -263,18 +270,29 @@ func TestProvisional(t *testing.T) { dir, cleanup := testutils.TempDir(t) defer cleanup() - var s3 mockS3 - var exec mockExecRunner + var s3 mockStorage + s3.bucket = "cockroach" + if test.flags.isRelease { + s3.bucket = "binaries.cockroachdb.com" + } + var gcs mockStorage + gcs.bucket = "cockroach" + if test.flags.isRelease { + gcs.bucket = "binaries.cockroachdb.com" + } + var runner mockExecRunner fakeBazelBin, cleanup := testutils.TempDir(t) defer cleanup() - exec.fakeBazelBin = fakeBazelBin + runner.fakeBazelBin = fakeBazelBin flags := test.flags flags.pkgDir = dir - execFn := release.ExecFn{MockExecFn: exec.run} - run(&s3, flags, execFn) - require.Equal(t, test.expectedCmds, exec.cmds) + execFn := release.ExecFn{MockExecFn: runner.run} + run([]release.ObjectPutGetter{&s3, &gcs}, flags, execFn) + require.Equal(t, test.expectedCmds, runner.cmds) require.Equal(t, test.expectedGets, s3.gets) require.Equal(t, test.expectedPuts, s3.puts) + require.Equal(t, test.expectedGets, gcs.gets) + require.Equal(t, test.expectedPuts, gcs.puts) }) } } @@ -303,33 +321,30 @@ func TestBless(t *testing.T) { isRelease: true, branch: `provisional_201901010101_v0.0.1`, }, - expectedGets: []string{ - "s3://binaries.cockroachdb.com/cockroach-v0.0.1.linux-amd64.tgz", - "s3://binaries.cockroachdb.com/cockroach-v0.0.1.linux-amd64.tgz.sha256sum", - "s3://binaries.cockroachdb.com/cockroach-v0.0.1.darwin-10.9-amd64.tgz", - "s3://binaries.cockroachdb.com/cockroach-v0.0.1.darwin-10.9-amd64.tgz.sha256sum", - "s3://binaries.cockroachdb.com/cockroach-v0.0.1.windows-6.2-amd64.zip", - "s3://binaries.cockroachdb.com/cockroach-v0.0.1.windows-6.2-amd64.zip.sha256sum", - }, + expectedGets: nil, expectedPuts: []string{ "s3://binaries.cockroachdb.com/cockroach-latest.linux-amd64.tgz/no-cache " + - "CONTENTS s3://binaries.cockroachdb.com/cockroach-v0.0.1.linux-amd64.tgz", - "s3://binaries.cockroachdb.com/cockroach-latest.linux-amd64.tgz.sha256sum/no-cache CONTENTS ", + "REDIRECT cockroach-v0.0.1.linux-amd64.tgz", + "s3://binaries.cockroachdb.com/cockroach-latest.linux-amd64.tgz.sha256sum/no-cache " + + "REDIRECT cockroach-v0.0.1.linux-amd64.tgz.sha256sum", "s3://binaries.cockroachdb.com/cockroach-latest.darwin-10.9-amd64.tgz/no-cache " + - "CONTENTS s3://binaries.cockroachdb.com/cockroach-v0.0.1.darwin-10.9-amd64.tgz", - "s3://binaries.cockroachdb.com/cockroach-latest.darwin-10.9-amd64.tgz.sha256sum/no-cache CONTENTS ", + "REDIRECT cockroach-v0.0.1.darwin-10.9-amd64.tgz", + "s3://binaries.cockroachdb.com/cockroach-latest.darwin-10.9-amd64.tgz.sha256sum/no-cache " + + "REDIRECT cockroach-v0.0.1.darwin-10.9-amd64.tgz.sha256sum", "s3://binaries.cockroachdb.com/cockroach-latest.windows-6.2-amd64.zip/no-cache " + - "CONTENTS s3://binaries.cockroachdb.com/cockroach-v0.0.1.windows-6.2-amd64.zip", - "s3://binaries.cockroachdb.com/cockroach-latest.windows-6.2-amd64.zip.sha256sum/no-cache CONTENTS ", + "REDIRECT cockroach-v0.0.1.windows-6.2-amd64.zip", + "s3://binaries.cockroachdb.com/cockroach-latest.windows-6.2-amd64.zip.sha256sum/no-cache " + + "REDIRECT cockroach-v0.0.1.windows-6.2-amd64.zip.sha256sum", }, }, } for _, test := range tests { t.Run(test.name, func(t *testing.T) { - var s3 mockS3 + var s3 mockStorage + s3.bucket = "binaries.cockroachdb.com" var execFn release.ExecFn // bless shouldn't exec anything - run(&s3, test.flags, execFn) + run([]release.ObjectPutGetter{&s3}, test.flags, execFn) require.Equal(t, test.expectedGets, s3.gets) require.Equal(t, test.expectedPuts, s3.puts) }) diff --git a/pkg/release/BUILD.bazel b/pkg/release/BUILD.bazel index 7ed6ef37aa9a..c9fd3a793419 100644 --- a/pkg/release/BUILD.bazel +++ b/pkg/release/BUILD.bazel @@ -2,12 +2,21 @@ load("@io_bazel_rules_go//go:def.bzl", "go_library") go_library( name = "release", - srcs = ["release.go"], + srcs = [ + "build.go", + "gcs.go", + "release.go", + "s3.go", + "upload.go", + ], importpath = "github.com/cockroachdb/cockroach/pkg/release", visibility = ["//visibility:public"], deps = [ "//pkg/build/util", + "@com_github_aws_aws_sdk_go//aws", + "@com_github_aws_aws_sdk_go//aws/session", "@com_github_aws_aws_sdk_go//service/s3", "@com_github_cockroachdb_errors//:errors", + "@com_google_cloud_go_storage//:storage", ], ) diff --git a/pkg/release/build.go b/pkg/release/build.go new file mode 100644 index 000000000000..5e36534ef04a --- /dev/null +++ b/pkg/release/build.go @@ -0,0 +1,321 @@ +// Copyright 2020 The Cockroach Authors. +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package release + +import ( + "bufio" + "bytes" + "fmt" + "io" + "log" + "os" + "os/exec" + "path/filepath" + "regexp" + "strings" + + "github.com/cockroachdb/cockroach/pkg/build/util" + "github.com/cockroachdb/errors" +) + +// BuildOptions is a set of options that may be applied to a build. +type BuildOptions struct { + // True iff this is a release build. + Release bool + // BuildTag must be set if Release is set, and vice-versea. + BuildTag string + + // ExecFn.Run() is called to execute commands for this build. + // The zero value is appropriate in "real" scenarios but for + // tests you can update ExecFn.MockExecFn. + ExecFn ExecFn +} + +// SuffixFromPlatform returns the suffix that will be appended to the +// `cockroach` binary when built with the given platform. The binary +// itself can be found in pkgDir/cockroach$SUFFIX after the build. +func SuffixFromPlatform(platform Platform) string { + switch platform { + case PlatformLinux: + return ".linux-2.6.32-gnu-amd64" + case PlatformLinuxArm: + return ".linux-3.7.10-gnu-aarch64" + case PlatformMacOS: + // TODO(#release): The architecture is at least 10.10 until v20.2 and 10.15 for + // v21.1 and after. Check whether this can be changed. + return ".darwin-10.9-amd64" + case PlatformWindows: + return ".windows-6.2-amd64.exe" + default: + panic(errors.Newf("unknown platform %d", platform)) + } +} + +// CrossConfigFromPlatform returns the cross*base config corresponding +// to the given platform. (See .bazelrc for more details.) +func CrossConfigFromPlatform(platform Platform) string { + switch platform { + case PlatformLinux: + return "crosslinuxbase" + case PlatformLinuxArm: + return "crosslinuxarmbase" + case PlatformMacOS: + return "crossmacosbase" + case PlatformWindows: + return "crosswindowsbase" + default: + panic(errors.Newf("unknown platform %d", platform)) + } +} + +// TargetTripleFromPlatform returns the target triple that will be baked +// into the cockroach binary for the given platform. +func TargetTripleFromPlatform(platform Platform) string { + switch platform { + case PlatformLinux: + return "x86_64-pc-linux-gnu" + case PlatformLinuxArm: + return "aarch64-unknown-linux-gnu" + case PlatformMacOS: + return "x86_64-apple-darwin19" + case PlatformWindows: + return "x86_64-w64-mingw32" + default: + panic(errors.Newf("unknown platform %d", platform)) + } +} + +// SharedLibraryExtensionFromPlatform returns the shared library extensions for a given Platform. +func SharedLibraryExtensionFromPlatform(platform Platform) string { + switch platform { + case PlatformLinux, PlatformLinuxArm: + return ".so" + case PlatformWindows: + return ".dll" + case PlatformMacOS: + return ".dylib" + default: + panic(errors.Newf("unknown platform %d", platform)) + } +} + +// MakeWorkload makes the bin/workload binary. It is only ever built in the +// crosslinux configuration. +func MakeWorkload(opts BuildOptions, pkgDir string) error { + if opts.Release { + return errors.Newf("cannot build workload in Release mode") + } + // NB: workload doesn't need anything stamped so we can use `crosslinux` + // rather than `crosslinuxbase`. + cmd := exec.Command("bazel", "build", "//pkg/cmd/workload", "-c", "opt", "--config=crosslinux", "--config=ci") + cmd.Dir = pkgDir + cmd.Stderr = os.Stderr + log.Printf("%s", cmd.Args) + stdoutBytes, err := opts.ExecFn.Run(cmd) + if err != nil { + return errors.Wrapf(err, "failed to run %s: %s", cmd.Args, string(stdoutBytes)) + } + + bazelBin, err := getPathToBazelBin(opts.ExecFn, pkgDir, []string{"-c", "opt", "--config=crosslinux", "--config=ci"}) + if err != nil { + return err + } + return stageBinary("//pkg/cmd/workload", PlatformLinux, bazelBin, filepath.Join(pkgDir, "bin"), false) +} + +// MakeRelease makes the release binary and associated files. +func MakeRelease(platform Platform, opts BuildOptions, pkgDir string) error { + buildArgs := []string{"build", "//pkg/cmd/cockroach", "//c-deps:libgeos", "//pkg/cmd/cockroach-sql"} + targetTriple := TargetTripleFromPlatform(platform) + if opts.Release { + if opts.BuildTag == "" { + return errors.Newf("must set BuildTag if Release is set") + } + buildArgs = append(buildArgs, fmt.Sprintf("--workspace_status_command=./build/bazelutil/stamp.sh %s official-binary %s release", targetTriple, opts.BuildTag)) + } else { + if opts.BuildTag != "" { + return errors.Newf("cannot set BuildTag if Release is not set") + } + buildArgs = append(buildArgs, fmt.Sprintf("--workspace_status_command=./build/bazelutil/stamp.sh %s official-binary", targetTriple)) + } + configs := []string{"-c", "opt", "--config=ci", "--config=with_ui", fmt.Sprintf("--config=%s", CrossConfigFromPlatform(platform))} + buildArgs = append(buildArgs, configs...) + cmd := exec.Command("bazel", buildArgs...) + cmd.Dir = pkgDir + cmd.Stderr = os.Stderr + log.Printf("%s", cmd.Args) + stdoutBytes, err := opts.ExecFn.Run(cmd) + if err != nil { + return errors.Wrapf(err, "failed to run %s: %s", cmd.Args, string(stdoutBytes)) + } + + // Stage binaries from bazel-bin. + bazelBin, err := getPathToBazelBin(opts.ExecFn, pkgDir, configs) + if err != nil { + return err + } + if err := stageBinary("//pkg/cmd/cockroach", platform, bazelBin, pkgDir, true); err != nil { + return err + } + // TODO: strip the bianry + if err := stageBinary("//pkg/cmd/cockroach-sql", platform, bazelBin, pkgDir, true); err != nil { + return err + } + if err := stageLibraries(platform, bazelBin, filepath.Join(pkgDir, "lib")); err != nil { + return err + } + + if platform == PlatformLinux { + suffix := SuffixFromPlatform(platform) + binaryName := "./cockroach" + suffix + + cmd := exec.Command(binaryName, "version") + cmd.Dir = pkgDir + cmd.Env = append(cmd.Env, "MALLOC_CONF=prof:true") + cmd.Stderr = os.Stderr + log.Printf("%s %s", cmd.Env, cmd.Args) + stdoutBytes, err := opts.ExecFn.Run(cmd) + if err != nil { + return errors.Wrapf(err, "%s %s: %s", cmd.Env, cmd.Args, string(stdoutBytes)) + } + + cmd = exec.Command("ldd", binaryName) + cmd.Dir = pkgDir + cmd.Stderr = os.Stderr + log.Printf("%s %s", cmd.Env, cmd.Args) + stdoutBytes, err = opts.ExecFn.Run(cmd) + if err != nil { + log.Fatalf("%s %s: out=%s err=%v", cmd.Env, cmd.Args, string(stdoutBytes), err) + } + scanner := bufio.NewScanner(bytes.NewReader(stdoutBytes)) + for scanner.Scan() { + if line := scanner.Text(); !linuxStaticLibsRe.MatchString(line) { + return errors.Newf("%s is not properly statically linked:\n%s", binaryName, line) + } + } + if err := scanner.Err(); err != nil { + return err + } + } + return nil +} + +var ( + // linuxStaticLibsRe returns the regexp of all static libraries. + linuxStaticLibsRe = func() *regexp.Regexp { + libs := strings.Join([]string{ + regexp.QuoteMeta("linux-vdso.so."), + regexp.QuoteMeta("librt.so."), + regexp.QuoteMeta("libpthread.so."), + regexp.QuoteMeta("libdl.so."), + regexp.QuoteMeta("libm.so."), + regexp.QuoteMeta("libc.so."), + regexp.QuoteMeta("libresolv.so."), + strings.Replace(regexp.QuoteMeta("ld-linux-ARCH.so."), "ARCH", ".*", -1), + }, "|") + return regexp.MustCompile(libs) + }() + osVersionRe = regexp.MustCompile(`\d+(\.\d+)*-`) +) + +// Platform is an enumeration of the supported platforms for release. +type Platform int + +const ( + // PlatformLinux is the Linux x86_64 target. + PlatformLinux Platform = iota + // PlatformLinuxArm is the Linux aarch64 target. + PlatformLinuxArm + // PlatformMacOS is the Darwin x86_64 target. + PlatformMacOS + // PlatformWindows is the Windows (mingw) x86_64 target. + PlatformWindows +) + +func getPathToBazelBin(execFn ExecFn, pkgDir string, configArgs []string) (string, error) { + args := []string{"info", "bazel-bin"} + args = append(args, configArgs...) + cmd := exec.Command("bazel", args...) + cmd.Dir = pkgDir + cmd.Stderr = os.Stderr + stdoutBytes, err := execFn.Run(cmd) + if err != nil { + return "", errors.Wrapf(err, "failed to run %s: %s", cmd.Args, string(stdoutBytes)) + } + return strings.TrimSpace(string(stdoutBytes)), nil +} + +func stageBinary( + target string, platform Platform, bazelBin string, dir string, includePlatformSuffix bool, +) error { + if err := os.MkdirAll(dir, 0755); err != nil { + return err + } + rel := util.OutputOfBinaryRule(target, platform == PlatformWindows) + src := filepath.Join(bazelBin, rel) + dstBase, _ := TrimDotExe(filepath.Base(rel)) + suffix := "" + if includePlatformSuffix { + suffix = SuffixFromPlatform(platform) + } + dstBase = dstBase + suffix + dst := filepath.Join(dir, dstBase) + srcF, err := os.Open(src) + if err != nil { + return err + } + defer closeFileOrPanic(srcF) + dstF, err := os.OpenFile(dst, os.O_WRONLY|os.O_CREATE, 0755) + if err != nil { + return err + } + defer closeFileOrPanic(dstF) + _, err = io.Copy(dstF, srcF) + return err +} + +func stageLibraries(platform Platform, bazelBin string, dir string) error { + if err := os.MkdirAll(dir, 0755); err != nil { + return err + } + ext := SharedLibraryExtensionFromPlatform(platform) + for _, lib := range CRDBSharedLibraries { + libDir := "lib" + if platform == PlatformWindows { + // NB: On Windows these libs end up in the `bin` subdir. + libDir = "bin" + } + src := filepath.Join(bazelBin, "c-deps", "libgeos_foreign", libDir, lib+ext) + srcF, err := os.Open(src) + if err != nil { + return err + } + defer closeFileOrPanic(srcF) + dst := filepath.Join(dir, filepath.Base(src)) + dstF, err := os.OpenFile(dst, os.O_WRONLY|os.O_CREATE, 0644) + if err != nil { + return err + } + defer closeFileOrPanic(dstF) + _, err = io.Copy(dstF, srcF) + if err != nil { + return err + } + } + return nil +} + +func closeFileOrPanic(f io.Closer) { + err := f.Close() + if err != nil { + panic(errors.Wrapf(err, "could not close file")) + } +} diff --git a/pkg/release/gcs.go b/pkg/release/gcs.go new file mode 100644 index 000000000000..c087d47c5dc8 --- /dev/null +++ b/pkg/release/gcs.go @@ -0,0 +1,112 @@ +// Copyright 2020 The Cockroach Authors. +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package release + +import ( + "context" + "fmt" + "io/ioutil" + "strings" + + "cloud.google.com/go/storage" +) + +// GCSProvider is an implementation of the ObjectPutGetter interface for GCS +type GCSProvider struct { + client *storage.Client + bucket string +} + +// NewGCS creates a new instance of GCSProvider +func NewGCS(bucket string) (*GCSProvider, error) { + ctx := context.Background() + client, err := storage.NewClient(ctx) + if err != nil { + return &GCSProvider{}, err + } + provider := &GCSProvider{ + client: client, + bucket: bucket, + } + return provider, nil +} + +// GetObject implements object retrieval for S3 +func (p *GCSProvider) GetObject(input *GetObjectInput) (*GetObjectOutput, error) { + obj := p.client.Bucket(p.bucket).Object(*input.Key) + ctx := context.Background() + reader, err := obj.NewReader(ctx) + if err != nil { + return &GetObjectOutput{}, err + } + return &GetObjectOutput{ + Body: reader, + }, nil +} + +// Bucket returns bucket name +func (p *GCSProvider) Bucket() string { + return p.bucket +} + +// PutObject implements object upload for S3 +func (p *GCSProvider) PutObject(input *PutObjectInput) error { + obj := p.client.Bucket(p.bucket).Object(*input.Key) + ctx := context.Background() + var body []byte + if input.WebsiteRedirectLocation != nil { + // Seems like Google storage doesn't support this. Copy the original object. + // copy content. Strip the leading slash to normalize the object name. + copyFrom := strings.TrimPrefix(*input.WebsiteRedirectLocation, "/") + r, err := p.client.Bucket(p.bucket).Object(copyFrom).NewReader(ctx) + if err != nil { + return fmt.Errorf("cannot read %s: %w", copyFrom, err) + } + body, err = ioutil.ReadAll(r) + if err != nil { + return fmt.Errorf("cannot download %s: %w", copyFrom, err) + } + } else { + var err error + body, err = ioutil.ReadAll(input.Body) + if err != nil { + return fmt.Errorf("cannot read content: %w", err) + } + } + wc := obj.NewWriter(ctx) + if _, err := wc.Write(body); err != nil { + return fmt.Errorf("error writing to GCS object %s: %w", *input.Key, err) + } + if err := wc.Close(); err != nil { + return fmt.Errorf("error closing GCS object %s: %w", *input.Key, err) + } + attrs := storage.ObjectAttrsToUpdate{} + updateAttrs := false + if input.ContentDisposition != nil { + updateAttrs = true + attrs.ContentDisposition = *input.ContentDisposition + } + if input.CacheControl != nil { + updateAttrs = true + attrs.CacheControl = *input.CacheControl + } + if updateAttrs { + if _, err := obj.Update(ctx, attrs); err != nil { + return fmt.Errorf("error updating attributes for %s: %w", *input.Key, err) + } + } + return nil +} + +// URL returns key's representation that can be used by gcsutil +func (p GCSProvider) URL(key string) string { + return "gcs://" + p.bucket + "/" + strings.TrimPrefix(key, "/") +} diff --git a/pkg/release/release.go b/pkg/release/release.go index bc4d4711d6af..958dc18cdf43 100644 --- a/pkg/release/release.go +++ b/pkg/release/release.go @@ -13,64 +13,22 @@ package release import ( - "archive/tar" - "archive/zip" - "bufio" "bytes" - "compress/gzip" - "crypto/sha256" "fmt" "io" - "log" - "mime" "os" "os/exec" "path/filepath" - "regexp" "strings" - "github.com/aws/aws-sdk-go/service/s3" - bazelutil "github.com/cockroachdb/cockroach/pkg/build/util" "github.com/cockroachdb/errors" ) -var ( - // linuxStaticLibsRe returns the regexp of all static libraries. - linuxStaticLibsRe = func() *regexp.Regexp { - libs := strings.Join([]string{ - regexp.QuoteMeta("linux-vdso.so."), - regexp.QuoteMeta("librt.so."), - regexp.QuoteMeta("libpthread.so."), - regexp.QuoteMeta("libdl.so."), - regexp.QuoteMeta("libm.so."), - regexp.QuoteMeta("libc.so."), - regexp.QuoteMeta("libresolv.so."), - strings.Replace(regexp.QuoteMeta("ld-linux-ARCH.so."), "ARCH", ".*", -1), - }, "|") - return regexp.MustCompile(libs) - }() - osVersionRe = regexp.MustCompile(`\d+(\.\d+)*-`) -) - var ( // NoCache is a string constant to send no-cache to AWS. NoCache = "no-cache" ) -// Platform is an enumeration of the supported platforms for release. -type Platform int - -const ( - // PlatformLinux is the Linux x86_64 target. - PlatformLinux Platform = iota - // PlatformLinuxArm is the Linux aarch64 target. - PlatformLinuxArm - // PlatformMacOS is the Darwin x86_64 target. - PlatformMacOS - // PlatformWindows is the Windows (mingw) x86_64 target. - PlatformWindows -) - // ChecksumSuffix is a suffix of release tarball checksums const ChecksumSuffix = ".sha256sum" @@ -95,184 +53,6 @@ func (e ExecFn) Run(cmd *exec.Cmd) ([]byte, error) { return e.MockExecFn(cmd) } -// BuildOptions is a set of options that may be applied to a build. -type BuildOptions struct { - // True iff this is a release build. - Release bool - // BuildTag must be set if Release is set, and vice-versea. - BuildTag string - - // ExecFn.Run() is called to execute commands for this build. - // The zero value is appropriate in "real" scenarios but for - // tests you can update ExecFn.MockExecFn. - ExecFn ExecFn -} - -// SuffixFromPlatform returns the suffix that will be appended to the -// `cockroach` binary when built with the given platform. The binary -// itself can be found in pkgDir/cockroach$SUFFIX after the build. -func SuffixFromPlatform(platform Platform) string { - switch platform { - case PlatformLinux: - return ".linux-2.6.32-gnu-amd64" - case PlatformLinuxArm: - return ".linux-3.7.10-gnu-aarch64" - case PlatformMacOS: - // TODO(#release): The architecture is at least 10.10 until v20.2 and 10.15 for - // v21.1 and after. Check whether this can be changed. - return ".darwin-10.9-amd64" - case PlatformWindows: - return ".windows-6.2-amd64.exe" - default: - panic(errors.Newf("unknown platform %d", platform)) - } -} - -// CrossConfigFromPlatform returns the cross*base config corresponding -// to the given platform. (See .bazelrc for more details.) -func CrossConfigFromPlatform(platform Platform) string { - switch platform { - case PlatformLinux: - return "crosslinuxbase" - case PlatformLinuxArm: - return "crosslinuxarmbase" - case PlatformMacOS: - return "crossmacosbase" - case PlatformWindows: - return "crosswindowsbase" - default: - panic(errors.Newf("unknown platform %d", platform)) - } -} - -// TargetTripleFromPlatform returns the target triple that will be baked -// into the cockroach binary for the given platform. -func TargetTripleFromPlatform(platform Platform) string { - switch platform { - case PlatformLinux: - return "x86_64-pc-linux-gnu" - case PlatformLinuxArm: - return "aarch64-unknown-linux-gnu" - case PlatformMacOS: - return "x86_64-apple-darwin19" - case PlatformWindows: - return "x86_64-w64-mingw32" - default: - panic(errors.Newf("unknown platform %d", platform)) - } -} - -// SharedLibraryExtensionFromPlatform returns the shared library extensions for a given Platform. -func SharedLibraryExtensionFromPlatform(platform Platform) string { - switch platform { - case PlatformLinux, PlatformLinuxArm: - return ".so" - case PlatformWindows: - return ".dll" - case PlatformMacOS: - return ".dylib" - default: - panic(errors.Newf("unknown platform %d", platform)) - } -} - -// MakeWorkload makes the bin/workload binary. -func MakeWorkload(pkgDir string) error { - // NB: workload doesn't need anything stamped so we can use `crosslinux` - // rather than `crosslinuxbase`. - cmd := exec.Command("bazel", "build", "//pkg/cmd/workload", "--config=crosslinux", "--config=ci") - cmd.Dir = pkgDir - cmd.Stdout = os.Stdout - cmd.Stderr = os.Stderr - log.Printf("%s", cmd.Args) - err := cmd.Run() - if err != nil { - return err - } - bazelBin, err := getPathToBazelBin(ExecFn{}, pkgDir, []string{"--config=crosslinux", "--config=ci"}) - if err != nil { - return err - } - return stageBinary("//pkg/cmd/workload", PlatformLinux, bazelBin, filepath.Join(pkgDir, "bin")) -} - -// MakeRelease makes the release binary and associated files. -func MakeRelease(platform Platform, opts BuildOptions, pkgDir string) error { - buildArgs := []string{"build", "//pkg/cmd/cockroach", "//c-deps:libgeos", "//pkg/cmd/cockroach-sql"} - targetTriple := TargetTripleFromPlatform(platform) - if opts.Release { - if opts.BuildTag == "" { - return errors.Newf("must set BuildTag if Release is set") - } - buildArgs = append(buildArgs, fmt.Sprintf("--workspace_status_command=./build/bazelutil/stamp.sh %s official-binary %s release", targetTriple, opts.BuildTag)) - } else { - if opts.BuildTag != "" { - return errors.Newf("cannot set BuildTag if Release is not set") - } - buildArgs = append(buildArgs, fmt.Sprintf("--workspace_status_command=./build/bazelutil/stamp.sh %s official-binary", targetTriple)) - } - configs := []string{"-c", "opt", "--config=ci", "--config=with_ui", fmt.Sprintf("--config=%s", CrossConfigFromPlatform(platform))} - buildArgs = append(buildArgs, configs...) - cmd := exec.Command("bazel", buildArgs...) - cmd.Dir = pkgDir - cmd.Stderr = os.Stderr - log.Printf("%s", cmd.Args) - stdoutBytes, err := opts.ExecFn.Run(cmd) - if err != nil { - return errors.Wrapf(err, "failed to run %s: %s", cmd.Args, string(stdoutBytes)) - } - - // Stage binaries from bazel-bin. - bazelBin, err := getPathToBazelBin(opts.ExecFn, pkgDir, configs) - if err != nil { - return err - } - if err := stageBinary("//pkg/cmd/cockroach", platform, bazelBin, pkgDir); err != nil { - return err - } - // TODO: strip the bianry - if err := stageBinary("//pkg/cmd/cockroach-sql", platform, bazelBin, pkgDir); err != nil { - return err - } - if err := stageLibraries(platform, bazelBin, filepath.Join(pkgDir, "lib")); err != nil { - return err - } - - if platform == PlatformLinux { - suffix := SuffixFromPlatform(platform) - binaryName := "./cockroach" + suffix - - cmd := exec.Command(binaryName, "version") - cmd.Dir = pkgDir - cmd.Env = append(cmd.Env, "MALLOC_CONF=prof:true") - cmd.Stderr = os.Stderr - log.Printf("%s %s", cmd.Env, cmd.Args) - stdoutBytes, err := opts.ExecFn.Run(cmd) - if err != nil { - return errors.Wrapf(err, "%s %s: %s", cmd.Env, cmd.Args, string(stdoutBytes)) - } - - cmd = exec.Command("ldd", binaryName) - cmd.Dir = pkgDir - cmd.Stderr = os.Stderr - log.Printf("%s %s", cmd.Env, cmd.Args) - stdoutBytes, err = opts.ExecFn.Run(cmd) - if err != nil { - log.Fatalf("%s %s: out=%s err=%v", cmd.Env, cmd.Args, string(stdoutBytes), err) - } - scanner := bufio.NewScanner(bytes.NewReader(stdoutBytes)) - for scanner.Scan() { - if line := scanner.Text(); !linuxStaticLibsRe.MatchString(line) { - return errors.Newf("%s is not properly statically linked:\n%s", binaryName, line) - } - } - if err := scanner.Err(); err != nil { - return err - } - } - return nil -} - // TrimDotExe trims '.exe. from `name` and returns the result (and whether any // trimming has occurred). func TrimDotExe(name string) (string, bool) { @@ -280,41 +60,16 @@ func TrimDotExe(name string) (string, bool) { return strings.TrimSuffix(name, dotExe), strings.HasSuffix(name, dotExe) } -// S3Putter is an interface allowing uploads to S3. -type S3Putter interface { - PutObject(*s3.PutObjectInput) (*s3.PutObjectOutput, error) -} - -// S3KeyRelease extracts the target archive base and archive -// name for the given parameters. -func S3KeyRelease(platform Platform, versionStr string, binaryPrefix string) (string, string) { - suffix := SuffixFromPlatform(platform) - targetSuffix, hasExe := TrimDotExe(suffix) - // TODO(tamird): remove this weirdness. Requires updating - // "users" e.g. docs, cockroachdb/cockroach-go, maybe others. - if platform == PlatformLinux { - targetSuffix = strings.Replace(targetSuffix, "gnu-", "", -1) - targetSuffix = osVersionRe.ReplaceAllLiteralString(targetSuffix, "") - } - - archiveBase := fmt.Sprintf("%s-%s", binaryPrefix, versionStr) - targetArchiveBase := archiveBase + targetSuffix - if hasExe { - return targetArchiveBase, targetArchiveBase + ".zip" - } - return targetArchiveBase, targetArchiveBase + ".tgz" -} - // NonReleaseFile is a file to upload when publishing a non-release. type NonReleaseFile struct { - // S3FileName is the name of the file stored in S3. - S3FileName string - // S3FilePath is the path the file should be stored within the Cockroach bucket. - S3FilePath string - // S3RedirectPathPrefix is the prefix of the path that redirects to the S3FilePath. + // FileName is the name of the file stored in the cloud. + FileName string + // FilePath is the path the file should be stored within the Cockroach bucket. + FilePath string + // RedirectPathPrefix is the prefix of the path that redirects to the FilePath. // It is suffixed with .VersionStr or .LATEST, depending on whether the branch is // the master branch. - S3RedirectPathPrefix string + RedirectPathPrefix string // LocalAbsolutePath is the location of the file to upload in the local OS. LocalAbsolutePath string @@ -336,10 +91,10 @@ func MakeCRDBBinaryNonReleaseFile(localAbsolutePath string, versionStr string) N } return NonReleaseFile{ - S3FileName: fileName, - S3FilePath: fileName, - S3RedirectPathPrefix: remoteName, - LocalAbsolutePath: localAbsolutePath, + FileName: fileName, + FilePath: fileName, + RedirectPathPrefix: remoteName, + LocalAbsolutePath: localAbsolutePath, } } @@ -361,75 +116,16 @@ func MakeCRDBLibraryNonReleaseFiles( files = append( files, NonReleaseFile{ - S3FileName: fmt.Sprintf("%s%s", remoteFileName, ext), - S3FilePath: fmt.Sprintf("lib/%s%s", remoteFileName, ext), - S3RedirectPathPrefix: fmt.Sprintf("lib/%s%s", remoteFileNameBase, ext), - LocalAbsolutePath: filepath.Join(localAbsoluteBasePath, "lib", localFileName+ext), + FileName: fmt.Sprintf("%s%s", remoteFileName, ext), + FilePath: fmt.Sprintf("lib/%s%s", remoteFileName, ext), + RedirectPathPrefix: fmt.Sprintf("lib/%s%s", remoteFileNameBase, ext), + LocalAbsolutePath: filepath.Join(localAbsoluteBasePath, "lib", localFileName+ext), }, ) } return files } -// PutNonReleaseOptions are options to pass into PutNonRelease. -type PutNonReleaseOptions struct { - // Branch is the branch from which the release is being uploaded from. - Branch string - // BucketName is the bucket to upload the files to. - BucketName string - - // Files are all the files to be uploaded into S3. - Files []NonReleaseFile -} - -// PutNonRelease uploads non-release related files to S3. -// Files are uploaded to /cockroach/ for each non release file. -// A latest key is then put at cockroach/. that redirects -// to the above file. -func PutNonRelease(svc S3Putter, o PutNonReleaseOptions) { - const repoName = "cockroach" - for _, f := range o.Files { - disposition := mime.FormatMediaType("attachment", map[string]string{ - "filename": f.S3FileName, - }) - - fileToUpload, err := os.Open(f.LocalAbsolutePath) - if err != nil { - log.Fatalf("failed to open %s: %s", f.LocalAbsolutePath, err) - } - defer func() { - _ = fileToUpload.Close() - }() - - // NB: The leading slash is required to make redirects work - // correctly since we reuse this key as the redirect location. - versionKey := fmt.Sprintf("/%s/%s", repoName, f.S3FilePath) - log.Printf("Uploading to s3://%s%s", o.BucketName, versionKey) - if _, err := svc.PutObject(&s3.PutObjectInput{ - Bucket: &o.BucketName, - ContentDisposition: &disposition, - Key: &versionKey, - Body: fileToUpload, - }); err != nil { - log.Fatalf("s3 upload %s: %s", versionKey, err) - } - - latestSuffix := o.Branch - if latestSuffix == "master" { - latestSuffix = "LATEST" - } - latestKey := fmt.Sprintf("%s/%s.%s", repoName, f.S3RedirectPathPrefix, latestSuffix) - if _, err := svc.PutObject(&s3.PutObjectInput{ - Bucket: &o.BucketName, - CacheControl: &NoCache, - Key: &latestKey, - WebsiteRedirectLocation: &versionKey, - }); err != nil { - log.Fatalf("s3 redirect to %s: %s", versionKey, err) - } - } -} - // ArchiveFile is a file to store in the a archive for a release. type ArchiveFile struct { // LocalAbsolutePath is the location of the file to upload include in the archive on the local OS. @@ -467,201 +163,3 @@ func MakeCRDBLibraryArchiveFiles(pkgDir string, platform Platform) []ArchiveFile } return files } - -// PutReleaseOptions are options to for the PutRelease function. -type PutReleaseOptions struct { - // BucketName is the bucket to upload the files to. - BucketName string - // NoCache is true if we should set the NoCache option to S3. - NoCache bool - // Platform is the platform of the release. - Platform Platform - // VersionStr is the version (SHA/branch name) of the release. - VersionStr string - - // Files are all the files to be included in the archive. - Files []ArchiveFile - ArchivePrefix string -} - -// PutRelease uploads a compressed archive containing the release -// files and a checksum file of the archive to S3. -func PutRelease(svc S3Putter, o PutReleaseOptions) { - targetArchiveBase, targetArchive := S3KeyRelease(o.Platform, o.VersionStr, o.ArchivePrefix) - var body bytes.Buffer - - if strings.HasSuffix(targetArchive, ".zip") { - zw := zip.NewWriter(&body) - - for _, f := range o.Files { - file, err := os.Open(f.LocalAbsolutePath) - if err != nil { - log.Fatalf("failed to open file: %s", f.LocalAbsolutePath) - } - defer func() { _ = file.Close() }() - - stat, err := file.Stat() - if err != nil { - log.Fatalf("failed to stat: %s", f.LocalAbsolutePath) - } - - zipHeader, err := zip.FileInfoHeader(stat) - if err != nil { - log.Fatal(err) - } - zipHeader.Name = filepath.Join(targetArchiveBase, f.ArchiveFilePath) - zipHeader.Method = zip.Deflate - - zfw, err := zw.CreateHeader(zipHeader) - if err != nil { - log.Fatal(err) - } - if _, err := io.Copy(zfw, file); err != nil { - log.Fatal(err) - } - } - if err := zw.Close(); err != nil { - log.Fatal(err) - } - } else { - gzw := gzip.NewWriter(&body) - tw := tar.NewWriter(gzw) - for _, f := range o.Files { - - file, err := os.Open(f.LocalAbsolutePath) - if err != nil { - log.Fatalf("failed to open file: %s", f.LocalAbsolutePath) - } - defer func() { _ = file.Close() }() - - stat, err := file.Stat() - if err != nil { - log.Fatalf("failed to stat: %s", f.LocalAbsolutePath) - } - - // Set the tar header from the file info. Overwrite name. - tarHeader, err := tar.FileInfoHeader(stat, "") - if err != nil { - log.Fatal(err) - } - tarHeader.Name = filepath.Join(targetArchiveBase, f.ArchiveFilePath) - if err := tw.WriteHeader(tarHeader); err != nil { - log.Fatal(err) - } - - if _, err := io.Copy(tw, file); err != nil { - log.Fatal(err) - } - } - if err := tw.Close(); err != nil { - log.Fatal(err) - } - if err := gzw.Close(); err != nil { - log.Fatal(err) - } - } - - log.Printf("Uploading to s3://%s/%s", o.BucketName, targetArchive) - putObjectInput := s3.PutObjectInput{ - Bucket: &o.BucketName, - Key: &targetArchive, - Body: bytes.NewReader(body.Bytes()), - } - if o.NoCache { - putObjectInput.CacheControl = &NoCache - } - if _, err := svc.PutObject(&putObjectInput); err != nil { - log.Fatalf("s3 upload %s: %s", targetArchive, err) - } - // Generate a SHA256 checksum file with a single entry. - checksumContents := fmt.Sprintf("%x %s\n", sha256.Sum256(body.Bytes()), - filepath.Base(targetArchive)) - targetChecksum := targetArchive + ChecksumSuffix - log.Printf("Uploading to s3://%s/%s", o.BucketName, targetChecksum) - putObjectInputChecksum := s3.PutObjectInput{ - Bucket: &o.BucketName, - Key: &targetChecksum, - Body: strings.NewReader(checksumContents), - } - if o.NoCache { - putObjectInputChecksum.CacheControl = &NoCache - } - if _, err := svc.PutObject(&putObjectInputChecksum); err != nil { - log.Fatalf("s3 upload %s: %s", targetChecksum, err) - } -} - -func getPathToBazelBin(execFn ExecFn, pkgDir string, configArgs []string) (string, error) { - args := []string{"info", "bazel-bin"} - args = append(args, configArgs...) - cmd := exec.Command("bazel", args...) - cmd.Dir = pkgDir - cmd.Stderr = os.Stderr - stdoutBytes, err := execFn.Run(cmd) - if err != nil { - return "", errors.Wrapf(err, "failed to run %s: %s", cmd.Args, string(stdoutBytes)) - } - return strings.TrimSpace(string(stdoutBytes)), nil -} - -func stageBinary(target string, platform Platform, bazelBin string, dir string) error { - if err := os.MkdirAll(dir, 0755); err != nil { - return err - } - rel := bazelutil.OutputOfBinaryRule(target, platform == PlatformWindows) - src := filepath.Join(bazelBin, rel) - dstBase, _ := TrimDotExe(filepath.Base(rel)) - suffix := SuffixFromPlatform(platform) - dstBase = dstBase + suffix - dst := filepath.Join(dir, dstBase) - srcF, err := os.Open(src) - if err != nil { - return err - } - defer closeFileOrPanic(srcF) - dstF, err := os.OpenFile(dst, os.O_WRONLY|os.O_CREATE, 0755) - if err != nil { - return err - } - defer closeFileOrPanic(dstF) - _, err = io.Copy(dstF, srcF) - return err -} - -func stageLibraries(platform Platform, bazelBin string, dir string) error { - if err := os.MkdirAll(dir, 0755); err != nil { - return err - } - ext := SharedLibraryExtensionFromPlatform(platform) - for _, lib := range CRDBSharedLibraries { - libDir := "lib" - if platform == PlatformWindows { - // NB: On Windows these libs end up in the `bin` subdir. - libDir = "bin" - } - src := filepath.Join(bazelBin, "c-deps", "libgeos", libDir, lib+ext) - srcF, err := os.Open(src) - if err != nil { - return err - } - defer closeFileOrPanic(srcF) - dst := filepath.Join(dir, filepath.Base(src)) - dstF, err := os.OpenFile(dst, os.O_WRONLY|os.O_CREATE, 0644) - if err != nil { - return err - } - defer closeFileOrPanic(dstF) - _, err = io.Copy(dstF, srcF) - if err != nil { - return err - } - } - return nil -} - -func closeFileOrPanic(f io.Closer) { - err := f.Close() - if err != nil { - panic(errors.Wrapf(err, "could not close file")) - } -} diff --git a/pkg/release/s3.go b/pkg/release/s3.go new file mode 100644 index 000000000000..6dce5d816d42 --- /dev/null +++ b/pkg/release/s3.go @@ -0,0 +1,80 @@ +// Copyright 2020 The Cockroach Authors. +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package release + +import ( + "fmt" + "strings" + + "github.com/aws/aws-sdk-go/aws" + "github.com/aws/aws-sdk-go/aws/session" + "github.com/aws/aws-sdk-go/service/s3" +) + +// S3Provider is an implementation of the ObjectPutGetter interface for S3 +type S3Provider struct { + service *s3.S3 + bucket *string +} + +// NewS3 creates a new instance of S3Provider +func NewS3(region string, bucket string) (*S3Provider, error) { + sess, err := session.NewSession(&aws.Config{ + Region: aws.String(region), + }) + if err != nil { + return &S3Provider{}, err + } + return &S3Provider{ + service: s3.New(sess), + bucket: &bucket, + }, nil +} + +// GetObject implements object retrieval for S3 +func (p *S3Provider) GetObject(input *GetObjectInput) (*GetObjectOutput, error) { + obj, err := p.service.GetObject(&s3.GetObjectInput{ + Bucket: p.bucket, + Key: input.Key, + }) + if err != nil { + return &GetObjectOutput{}, err + } + return &GetObjectOutput{ + Body: obj.Body, + }, nil +} + +// PutObject implements object upload for S3 +func (p *S3Provider) PutObject(input *PutObjectInput) error { + putObjectInput := s3.PutObjectInput{ + Bucket: p.bucket, + Key: input.Key, + Body: input.Body, + CacheControl: input.CacheControl, + ContentDisposition: input.ContentDisposition, + WebsiteRedirectLocation: input.WebsiteRedirectLocation, + } + if _, err := p.service.PutObject(&putObjectInput); err != nil { + return fmt.Errorf("s3 upload %s: %w", *input.Key, err) + } + return nil +} + +// Bucket returns bucket name +func (p *S3Provider) Bucket() string { + return *p.bucket +} + +// URL returns key's representation that can be used by AWS CLI +func (p S3Provider) URL(key string) string { + return "s3://" + *p.bucket + "/" + strings.TrimPrefix(key, "/") +} diff --git a/pkg/release/upload.go b/pkg/release/upload.go new file mode 100644 index 000000000000..508fab5091fb --- /dev/null +++ b/pkg/release/upload.go @@ -0,0 +1,292 @@ +// Copyright 2020 The Cockroach Authors. +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package release + +import ( + "archive/tar" + "archive/zip" + "bytes" + "compress/gzip" + "crypto/sha256" + "fmt" + "io" + "log" + "mime" + "os" + "path/filepath" + "strings" +) + +// PutReleaseOptions are options to for the PutRelease function. +type PutReleaseOptions struct { + // NoCache is true if we should set the NoCache option. + NoCache bool + // Platform is the platform of the release. + Platform Platform + // VersionStr is the version (SHA/branch name) of the release. + VersionStr string + + // Files are all the files to be included in the archive. + Files []ArchiveFile + ArchivePrefix string +} + +// PutNonReleaseOptions are options to pass into PutNonRelease. +type PutNonReleaseOptions struct { + // Branch is the branch from which the release is being uploaded from. + Branch string + // Files are all the files to be uploaded + Files []NonReleaseFile +} + +// PutRelease uploads a compressed archive containing the release +// files and a checksum file of the archive. +func PutRelease(svc ObjectPutGetter, o PutReleaseOptions) { + keys := makeArchiveKeys(o.Platform, o.VersionStr, o.ArchivePrefix) + var body bytes.Buffer + + if strings.HasSuffix(keys.archive, ".zip") { + if err := createZip(o.Files, &body, keys.base); err != nil { + log.Fatalf("cannot create zip %s: %s", keys.archive, err) + } + } else { + if err := createTarball(o.Files, &body, keys.base); err != nil { + log.Fatalf("cannot create tarball %s: %s", keys.archive, err) + } + } + + log.Printf("Uploading to %s", svc.URL(keys.archive)) + putObjectInput := PutObjectInput{ + Key: &keys.archive, + Body: bytes.NewReader(body.Bytes()), + } + if o.NoCache { + putObjectInput.CacheControl = &NoCache + } + if err := svc.PutObject(&putObjectInput); err != nil { + log.Fatalf("failed uploading %s: %s", keys.archive, err) + } + // Generate a SHA256 checksum file with a single entry. + checksumContents := fmt.Sprintf("%x %s\n", sha256.Sum256(body.Bytes()), + filepath.Base(keys.archive)) + targetChecksum := keys.archive + ChecksumSuffix + log.Printf("Uploading to %s", svc.URL(targetChecksum)) + putObjectInputChecksum := PutObjectInput{ + Key: &targetChecksum, + Body: strings.NewReader(checksumContents), + } + if o.NoCache { + putObjectInputChecksum.CacheControl = &NoCache + } + if err := svc.PutObject(&putObjectInputChecksum); err != nil { + log.Fatalf("failed uploading %s: %s", targetChecksum, err) + } +} + +func createZip(files []ArchiveFile, body *bytes.Buffer, prefix string) error { + zw := zip.NewWriter(body) + for _, f := range files { + file, err := os.Open(f.LocalAbsolutePath) + if err != nil { + return fmt.Errorf("failed to open file: %s", f.LocalAbsolutePath) + } + defer func() { _ = file.Close() }() + + stat, err := file.Stat() + if err != nil { + return fmt.Errorf("failed to stat: %s", f.LocalAbsolutePath) + } + + zipHeader, err := zip.FileInfoHeader(stat) + if err != nil { + return err + } + zipHeader.Name = filepath.Join(prefix, f.ArchiveFilePath) + zipHeader.Method = zip.Deflate + + zfw, err := zw.CreateHeader(zipHeader) + if err != nil { + return err + } + if _, err := io.Copy(zfw, file); err != nil { + return err + } + } + if err := zw.Close(); err != nil { + return err + } + return nil +} + +func createTarball(files []ArchiveFile, body *bytes.Buffer, prefix string) error { + gzw := gzip.NewWriter(body) + tw := tar.NewWriter(gzw) + for _, f := range files { + file, err := os.Open(f.LocalAbsolutePath) + if err != nil { + return fmt.Errorf("failed to open file: %s", f.LocalAbsolutePath) + } + defer func() { _ = file.Close() }() + + stat, err := file.Stat() + if err != nil { + return fmt.Errorf("failed to stat: %s", f.LocalAbsolutePath) + } + + // Set the tar header from the file info. Overwrite name. + tarHeader, err := tar.FileInfoHeader(stat, "") + if err != nil { + return err + } + tarHeader.Name = filepath.Join(prefix, f.ArchiveFilePath) + if err := tw.WriteHeader(tarHeader); err != nil { + return err + } + + if _, err := io.Copy(tw, file); err != nil { + return err + } + } + if err := tw.Close(); err != nil { + return err + } + if err := gzw.Close(); err != nil { + return err + } + return nil +} + +// PutNonRelease uploads non-release related files. +// Files are uploaded to /cockroach/ for each non release file. +// A `latest` key is then put at cockroach/. that redirects +// to the above file. +func PutNonRelease(svc ObjectPutGetter, o PutNonReleaseOptions) { + const nonReleasePrefix = "cockroach" + for _, f := range o.Files { + disposition := mime.FormatMediaType("attachment", map[string]string{ + "filename": f.FileName, + }) + + fileToUpload, err := os.Open(f.LocalAbsolutePath) + if err != nil { + log.Fatalf("failed to open %s: %s", f.LocalAbsolutePath, err) + } + defer func() { + _ = fileToUpload.Close() + }() + + versionKey := fmt.Sprintf("%s/%s", nonReleasePrefix, f.FilePath) + log.Printf("Uploading to %s", svc.URL(versionKey)) + if err := svc.PutObject(&PutObjectInput{ + ContentDisposition: &disposition, + Key: &versionKey, + Body: fileToUpload, + }); err != nil { + log.Fatalf("failed uploading %s: %s", versionKey, err) + } + + latestSuffix := o.Branch + if latestSuffix == "master" { + latestSuffix = "LATEST" + } + latestKey := fmt.Sprintf("%s/%s.%s", nonReleasePrefix, f.RedirectPathPrefix, latestSuffix) + // NB: The leading slash is required to make redirects work + // correctly since we reuse this key as the redirect location. + target := "/" + versionKey + if err := svc.PutObject(&PutObjectInput{ + CacheControl: &NoCache, + Key: &latestKey, + WebsiteRedirectLocation: &target, + }); err != nil { + log.Fatalf("failed adding a redirect to %s: %s", target, err) + } + } +} + +type archiveKeys struct { + base string + archive string +} + +// makeArchiveKeys extracts the target archive base and archive +// name for the given parameters. +func makeArchiveKeys(platform Platform, versionStr string, archivePrefix string) archiveKeys { + suffix := SuffixFromPlatform(platform) + targetSuffix, hasExe := TrimDotExe(suffix) + if platform == PlatformLinux { + targetSuffix = strings.Replace(targetSuffix, "gnu-", "", -1) + targetSuffix = osVersionRe.ReplaceAllLiteralString(targetSuffix, "") + } + archiveBase := fmt.Sprintf("%s-%s", archivePrefix, versionStr) + targetArchiveBase := archiveBase + targetSuffix + keys := archiveKeys{ + base: targetArchiveBase, + } + if hasExe { + keys.archive = targetArchiveBase + ".zip" + } else { + keys.archive = targetArchiveBase + ".tgz" + } + return keys +} + +const latestStr = "latest" + +// LatestOpts are parameters passed to MarkLatestReleaseWithSuffix +type LatestOpts struct { + Platform Platform + VersionStr string +} + +// MarkLatestReleaseWithSuffix adds redirects to release files using "latest" instead of the version +func MarkLatestReleaseWithSuffix(svc ObjectPutGetter, o LatestOpts, suffix string) { + keys := makeArchiveKeys(o.Platform, o.VersionStr, "cockroach") + versionedKey := keys.archive + suffix + oLatest := o + oLatest.VersionStr = latestStr + latestKeys := makeArchiveKeys(oLatest.Platform, oLatest.VersionStr, "cockroach") + latestKey := latestKeys.archive + suffix + log.Printf("Adding redirect to %s", svc.URL(latestKey)) + if err := svc.PutObject(&PutObjectInput{ + CacheControl: &NoCache, + Key: &latestKey, + WebsiteRedirectLocation: &versionedKey, + }); err != nil { + log.Fatalf("failed adding a redirect to %s: %s", versionedKey, err) + } +} + +// GetObjectInput specifies input parameters for GetOject +type GetObjectInput struct { + Key *string +} + +// GetObjectOutput specifies output parameters for GetOject +type GetObjectOutput struct { + Body io.ReadCloser +} + +// PutObjectInput specifies input parameters for PutOject +type PutObjectInput struct { + Key *string + Body io.ReadSeeker + CacheControl *string + ContentDisposition *string + WebsiteRedirectLocation *string +} + +// ObjectPutGetter specifies a minimal interface for cloud storage providers +type ObjectPutGetter interface { + GetObject(*GetObjectInput) (*GetObjectOutput, error) + PutObject(*PutObjectInput) error + Bucket() string + URL(string) string +}