diff --git a/CHANGELOG.md b/CHANGELOG.md index 6d7c072..e12e8dd 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,7 @@ +# v2.4.13 + ++ Add `save-cache` property to s3 cache. @ingcsmoreno + # v2.4.12 + Sharing caches between pipelines. See #55 by @irphilli diff --git a/README.md b/README.md index 5ff2c0c..62e02eb 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -# Cache Buildkite Plugin [![Version badge](https://img.shields.io/badge/cache-v2.4.12-blue?style=flat-square)](https://buildkite.com/plugins) [![CI](https://github.com/gencer/cache-buildkite-plugin/actions/workflows/ci.yml/badge.svg)](https://github.com/gencer/cache-buildkite-plugin/actions/workflows/ci.yml) +# Cache Buildkite Plugin [![Version badge](https://img.shields.io/badge/cache-v2.4.13-blue?style=flat-square)](https://buildkite.com/plugins) [![CI](https://github.com/gencer/cache-buildkite-plugin/actions/workflows/ci.yml/badge.svg)](https://github.com/gencer/cache-buildkite-plugin/actions/workflows/ci.yml) ### Tarball, Rsync & S3 Cache Kit for Buildkite. Supports Linux, macOS and Windows* @@ -72,7 +72,7 @@ S3 backend uses **AWS CLI** v**1** or v**2** to copy and download from/to S3 buc ```yml steps: - plugins: - - gencer/cache#v2.4.12: + - gencer/cache#v2.4.13: id: ruby # or ruby-3.0 backend: s3 key: "v1-cache-{{ id }}-{{ runner.os }}-{{ checksum 'Gemfile.lock' }}" @@ -86,6 +86,7 @@ steps: bucket: "s3-bucket" class: STANDARD # Optional. Defaults to empty which is usually STANDARD or based on policy. args: '--option 1' # Optional. Defaults to empty. Any optional argument that can be passed to aws s3 cp command. + save-cache: true # Optional. Saves the cache on temp folder and keep between builds/jobs on the same machine. paths: - 'bundle/vendor' ``` @@ -101,7 +102,7 @@ Use `endpoint` and `region` fields to pass host and region parameters to be able ```yml steps: - plugins: - - gencer/cache#v2.4.12: + - gencer/cache#v2.4.13: id: ruby # or ruby-3.0 backend: s3 key: "v1-cache-{{ id }}-{{ runner.os }}-{{ checksum 'Gemfile.lock' }}" @@ -137,7 +138,7 @@ Enabling this interoperability in Google Cloud Storage will generate the respect ```yml steps: - plugins: - - gencer/cache#v2.4.12: + - gencer/cache#v2.4.13: id: ruby # or ruby-3.0 backend: s3 key: "v1-cache-{{ id }}-{{ runner.os }}-{{ checksum 'Gemfile.lock' }}" @@ -192,7 +193,7 @@ You can also use rsync to store your files using the `rsync` backend. Files will ```yml steps: - plugins: - - gencer/cache#v2.4.12: + - gencer/cache#v2.4.13: id: ruby # or ruby-3.0 backend: rsync key: "v1-cache-{{ id }}-{{ runner.os }}-{{ checksum 'Gemfile.lock' }}" @@ -217,7 +218,7 @@ You can also use tarballs to store your files using the `tarball` backend. Files ```yml steps: - plugins: - - gencer/cache#v2.4.12: + - gencer/cache#v2.4.13: id: ruby # or ruby-3.0 backend: tarball key: "v1-cache-{{ id }}-{{ runner.os }}-{{ checksum 'Gemfile.lock' }}" @@ -262,7 +263,7 @@ Along with lock files, you can calculate directory that contains multiple files ```yml steps: - plugins: - - gencer/cache#v2.4.12: + - gencer/cache#v2.4.13: id: node # or node-16 backend: tarball # Optional. Default `backend` is already set to `tarball` key: "v1-cache-{{ id }}-{{ runner.os }}-{{ checksum './app/javascript' }}" # Calculate whole 'app/javascript' recursively @@ -288,7 +289,7 @@ You can skip caching on Pull Requests (Merge Requests) by simply adding `pr: fal ```yml steps: - plugins: - - gencer/cache#v2.4.12: + - gencer/cache#v2.4.13: id: ruby # or ruby-3.0 backend: s3 key: "v1-cache-{{ id }}-{{ runner.os }}-{{ checksum 'Gemfile.lock' }}" @@ -344,8 +345,8 @@ ruby-cache: &ruby-cache - 'bundler/vendor' all-plugins: &all-plugins - - gencer/cache#v2.4.12: *node-cache - - gencer/cache#v2.4.12: *ruby-cache + - gencer/cache#v2.4.13: *node-cache + - gencer/cache#v2.4.13: *ruby-cache - docker#v3.7.0: ~ # Use your config here steps: @@ -370,7 +371,7 @@ steps: key: jest command: yarn test --runInBand plugins: - - gencer/cache#v2.4.12: # Define cache *before* docker plugins. + - gencer/cache#v2.4.13: # Define cache *before* docker plugins. id: ruby # or ruby-3.0 backend: s3 key: "v1-cache-{{ id }}-{{ runner.os }}-{{ checksum 'Gemfile.lock' }}" @@ -395,7 +396,7 @@ steps: key: jest command: yarn test --runInBand plugins: - - gencer/cache#v2.4.12: + - gencer/cache#v2.4.13: id: ruby # or ruby-3.0 backend: s3 key: "v1-cache-{{ id }}-{{ runner.os }}-{{ checksum 'Gemfile.lock' }}" @@ -426,7 +427,7 @@ steps: key: jest command: yarn test --runInBand plugins: - - gencer/cache#v2.4.12: + - gencer/cache#v2.4.13: id: ruby # or ruby-3.0 backend: s3 key: "v1-cache-{{ id }}-{{ runner.os }}-{{ checksum 'Gemfile.lock' }}" @@ -452,7 +453,7 @@ steps: key: jest command: yarn test --runInBand plugins: - - gencer/cache#v2.4.12: + - gencer/cache#v2.4.13: id: ruby # or ruby-3.0 backend: s3 key: "v1-cache-{{ id }}-{{ runner.os }}-{{ checksum 'Gemfile.lock' }}" @@ -477,7 +478,7 @@ steps: key: jest command: yarn test --runInBand plugins: - - gencer/cache#v2.4.12: + - gencer/cache#v2.4.13: id: ruby # or ruby-3.0 backend: s3 key: "v1-cache-{{ id }}-{{ runner.os }}-{{ checksum 'Gemfile.lock' }}" diff --git a/VERSION b/VERSION index cf95c01..b40e924 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -2.4.12 +2.4.13 diff --git a/lib/backends/s3.bash b/lib/backends/s3.bash index d510f1e..f5648f4 100644 --- a/lib/backends/s3.bash +++ b/lib/backends/s3.bash @@ -5,6 +5,8 @@ BK_DEFAULT_AWS_ARGS="" BK_CUSTOM_AWS_ARGS="" BK_CACHE_COMPRESS=${BUILDKITE_PLUGIN_CACHE_COMPRESS:-false} BK_CACHE_COMPRESS_PROGRAM=${BUILDKITE_PLUGIN_CACHE_COMPRESS_PROGRAM:-gzip} +BK_CACHE_SAVE_CACHE=${BUILDKITE_PLUGIN_CACHE_S3_SAVE_CACHE:-false} +BK_CACHE_LOCAL_PATH="/tmp" BK_TAR_ARGS=() BK_TAR_ADDITIONAL_ARGS="--ignore-failed-read" BK_TAR_EXTENSION="tar" @@ -69,6 +71,13 @@ function restore() { BUCKET="${BUILDKITE_PLUGIN_CACHE_S3_BUCKET}/${TKEY}" BK_AWS_FOUND=false + # Check if prefere local is true and if tar file exists in tmp dir + if [ "${BK_CACHE_SAVE_CACHE}" == "true" ] && [ -f "${BK_CACHE_LOCAL_PATH}/${TAR_FILE}" ]; then + echo -e "${BK_LOG_PREFIX}:file_cabinet: Using previously downloaded file ${BK_CACHE_LOCAL_PATH}/${TAR_FILE} since local is prefered." + tar ${BK_TAR_EXTRACT_ARGS} "${BK_CACHE_LOCAL_PATH}/${TAR_FILE}" -C . + return + fi + aws s3api head-object --bucket "${BUILDKITE_PLUGIN_CACHE_S3_BUCKET}" --key "${TKEY}/${TAR_FILE}" ${BK_DEFAULT_AWS_ARGS} || no_head=true if ${no_head:-false}; then @@ -103,6 +112,7 @@ function restore() { if [[ ! "${BK_AWS_FOUND}" =~ (false) ]]; then aws s3 cp ${BK_CUSTOM_AWS_ARGS} "s3://${BUCKET}/${TAR_FILE}" . + [ "${BK_CACHE_SAVE_CACHE}" == "true" ] && cp "${TAR_FILE}" "${BK_CACHE_LOCAL_PATH}/${TAR_FILE}" tar ${BK_TAR_EXTRACT_ARGS} "${TAR_FILE}" -C . else cache_restore_skip "s3://${BUCKET}/${TAR_FILE}" diff --git a/plugin.yml b/plugin.yml index 8c1c2ad..b7d3086 100644 --- a/plugin.yml +++ b/plugin.yml @@ -26,26 +26,31 @@ configuration: pr: type: boolean s3: - profile: - type: string - bucket: - type: string - class: - type: string - endpoint: - type: string - region: - type: string - args: - type: string + properties: + profile: + type: string + bucket: + type: string + class: + type: string + endpoint: + type: string + region: + type: string + args: + type: string + save-cache: + type: boolean tarball: - path: - type: string - max: - type: number + properties: + path: + type: string + max: + type: number rsync: - path: - type: string + properties: + path: + type: string paths: type: [string, array] env: diff --git a/tests/command.bats b/tests/command.bats index 06bf3bd..93876e0 100644 --- a/tests/command.bats +++ b/tests/command.bats @@ -28,6 +28,7 @@ setup() { run "$PWD/hooks/pre-command" assert_success assert_output --partial "Copied from S3" + refute_output --partial "Using previously downloaded file" assert_output --partial "Extracted tar archive" unset BUILDKITE_PLUGIN_CACHE_KEY @@ -41,6 +42,39 @@ setup() { unstub tar } +@test "Pre-command restores S3 backed cache using local file" { + RANDOM_NUM=$(echo $RANDOM) + + stub tar \ + "-xf /tmp/v1-local-cache-key-${RANDOM_NUM}.tar -C . : echo Extracted tar archive" + + export BUILDKITE_ORGANIZATION_SLUG="my-org" + export BUILDKITE_PIPELINE_SLUG="my-pipeline" + export BUILDKITE_PLUGIN_CACHE_S3_BUCKET="my-bucket" + export BUILDKITE_PLUGIN_CACHE_S3_PROFILE="my-profile" + export BUILDKITE_PLUGIN_CACHE_S3_SAVE_CACHE="true" + export BUILDKITE_PLUGIN_CACHE_BACKEND="s3" + export BUILDKITE_PLUGIN_CACHE_KEY="v1-local-cache-key-${RANDOM_NUM}" + + touch "/tmp/${BUILDKITE_PLUGIN_CACHE_KEY}.tar" + + run "$PWD/hooks/pre-command" + assert_success + refute_output --partial "Copied from S3" + assert_output --partial "Using previously downloaded file" + assert_output --partial "Extracted tar archive" + + unset BUILDKITE_PLUGIN_CACHE_KEY + unset BUILDKITE_PLUGIN_CACHE_BACKEND + unset BUILDKITE_PLUGIN_CACHE_S3_SAVE_CACHE + unset BUILDKITE_PLUGIN_CACHE_S3_PROFILE + unset BUILDKITE_PLUGIN_CACHE_S3_BUCKET + unset BUILDKITE_PIPELINE_SLUG + unset BUILDKITE_ORGANIZATION_SLUG + + unstub tar +} + @test "Post-command syncs artifacts with a single path" { stub mktemp \