diff --git a/.github/workflows/ci-s3.yml b/.github/workflows/ci-s3.yml new file mode 100644 index 00000000..b5719eaa --- /dev/null +++ b/.github/workflows/ci-s3.yml @@ -0,0 +1,158 @@ +name: Build and Upload iliad Binary + +on: + workflow_dispatch: + push: + branches: + - main + pull_request: + branches: + - main + +permissions: + id-token: write + contents: write + pull-requests: write + actions: write + +env: + NUM_INTERNAL_BINARIES_TO_KEEP: 50 + NUM_PUBLIC_BINARIES_TO_KEEP: 400 + S3_BUCKET: story-geth-binaries + BIN_NAME: story + +jobs: + # Add timestamp + Timestamp: + uses: storyprotocol/gha-workflows/.github/workflows/reusable-timestamp.yml@main + + # Build and upload the iliad binary + build_and_push: + needs: Timestamp + runs-on: ubuntu-latest + strategy: + matrix: + platform: [linux-amd64, linux-arm64, darwin-amd64, darwin-arm64, windows-amd64] + + steps: + - name: Checkout code + uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b # v4.1.5 + + - name: Configure AWS credentials + uses: aws-actions/configure-aws-credentials@v1 + with: + role-to-assume: arn:aws:iam::${{ secrets.AWS_ACCOUNT_SERVICE_STAGING }}:role/iac-max-role + aws-region: us-west-1 + role-session-name: github-actions + + - name: Extract the version + run: | + PARAMS_FILE="./lib/buildinfo/buildinfo.go" + VERSION_MAJOR=$(awk -F= '/VersionMajor/ {gsub(/[^0-9]/, "", $2); printf "%s", $2}' $PARAMS_FILE) + VERSION_MINOR=$(awk -F= '/VersionMinor/ {gsub(/[^0-9]/, "", $2); printf "%s", $2}' $PARAMS_FILE) + VERSION_PATCH=$(awk -F= '/VersionPatch/ {gsub(/[^0-9]/, "", $2); printf "%s", $2}' $PARAMS_FILE) + VERSION_META=$(awk -F\" '/VersionMeta/ {print $2; exit}' $PARAMS_FILE) + + VERSION="$VERSION_MAJOR.$VERSION_MINOR.$VERSION_PATCH" + if [ "$VERSION_META" != "stable" ]; then + VERSION+="-${VERSION_META}" + fi + + echo "Version extracted: $VERSION" + echo "VERSION=$VERSION" >> $GITHUB_ENV + + - name: Build the story binary + run: | + IFS="-" read -r GOOS GOARCH <<< "${{ matrix.platform }}" + if [ "$GOOS" = "windows" ]; then + export BIN_NAME="${BIN_NAME}.exe" + fi + + echo "Building for $GOOS/$GOARCH..." + cd client + env GOOS=$GOOS GOARCH=$GOARCH go build -o $BIN_NAME > /dev/null 2>&1 + + # Apply chmod only for non-windows platforms + if [ "$GOOS" != "windows" ]; then + chmod +x $BIN_NAME + fi + + - name: Upload the story binary to S3 + run: | + export TZ=America/Los_Angeles + IFS="-" read -r GOOS GOARCH <<< "${{ matrix.platform }}" + TIMESTAMP=$(date +%Y%m%d%H%M%S) + HUMAN_READABLE_VERSION=$(date) + COMMIT_HASH=$(git rev-parse --short HEAD) + FOLDER_NAME="story-${{ matrix.platform }}-${VERSION}-${COMMIT_HASH}" + ARCHIVE_NAME="${FOLDER_NAME}.tar.gz" + + BIN_NAME_WITH_PATH=./client/$BIN_NAME + if [ "$GOOS" = "windows" ]; then + export BIN_NAME_WITH_PATH="${BIN_NAME_WITH_PATH}.exe" + fi + + mkdir $FOLDER_NAME + mv $BIN_NAME_WITH_PATH $FOLDER_NAME/ + + echo "Archiving the iliad binary..." + tar -czvf $ARCHIVE_NAME $FOLDER_NAME + + if [ $? -ne 0 ]; then + echo "Failed to create the archive: $ARCHIVE_NAME" + exit 1 + fi + + aws s3 cp $ARCHIVE_NAME s3://$S3_BUCKET/$BIN_NAME-public/$ARCHIVE_NAME --quiet + + if [ "${{ matrix.platform }}" = "linux-amd64" ]; then + + echo "Uploading binary for internal use..." + aws s3 cp $ARCHIVE_NAME s3://$S3_BUCKET/$BIN_NAME/$BIN_NAME-$TIMESTAMP --quiet + + # Update manifest file + aws s3 cp s3://$S3_BUCKET/$BIN_NAME/manifest.txt manifest.txt --quiet || touch manifest.txt + echo "$TIMESTAMP" >> manifest.txt + aws s3 cp manifest.txt s3://$S3_BUCKET/$BIN_NAME/manifest.txt --quiet + fi + + cleanup: + runs-on: ubuntu-latest + needs: build_and_push + steps: + - name: Configure AWS credentials + uses: aws-actions/configure-aws-credentials@v1 + with: + role-to-assume: arn:aws:iam::${{ secrets.AWS_ACCOUNT_SERVICE_STAGING }}:role/iac-max-role + aws-region: us-west-1 + role-session-name: github-actions + + - name: Cleanup internal binaries + run: | + cleanup_s3() { + PREFIX=$1 + KEEP=$2 + + echo "Cleaning up in bucket $S3_BUCKET with prefix: $PREFIX, keeping latest $KEEP binaries" + + aws s3api list-objects-v2 --bucket $S3_BUCKET --prefix $PREFIX --query "sort_by(Contents,&LastModified)[*].Key" > all_binaries.json + + # Extract the list of keys, remove the latest $KEEP binaries + BINARIES_TO_DELETE=$(jq -r ".[0:-${KEEP}][]" all_binaries.json) + + if [ -n "$BINARIES_TO_DELETE" ]; then + # Delete old binaries + for key in $BINARIES_TO_DELETE; do + aws s3 rm s3://$S3_BUCKET/$key --quiet1 + done + echo "Deleted old binaries: $BINARIES_TO_DELETE" + else + echo "No old binaries to delete." + fi + } + + # Cleanup internal binaries + cleanup_s3 "${BIN_NAME}/" "${NUM_INTERNAL_BINARIES_TO_KEEP}" + + # Cleanup public binaries + cleanup_s3 "${BIN_NAME}-public/" "${NUM_PUBLIC_BINARIES_TO_KEEP}"