Skip to content

Merge pull request #7160 from jddocs/rc-v1.357.0 #54

Merge pull request #7160 from jddocs/rc-v1.357.0

Merge pull request #7160 from jddocs/rc-v1.357.0 #54

name: Build staging site
on:
push:
branches:
- 'main'
# This workflow builds the site with Hugo, compresses it, uploads it to an
# object storage bucket, and updates the sandbox Algolia app to reflect the
# content in the staging version of the site. Another non-github.com delivery
# system is responsible for deploying the compressed site to a staging
# webserver.
#
# This workflow is triggered whenever the `main` branch is updated. We inspect
# this staging site before cutting a release tag, to make sure the site renders
# as expected before cutting a release tag/updating production.
#
# The working directory for this GitHub Action is under:
# /home/runner/work/docs/docs/
#
# For this workflow, two repos are checked out:
# - The github.com/linode/docs repo is checked out into the `docs-repo` folder
# - The github.com/linode/linode-docs-theme repo is checked out into the
# `linode-docs-theme` folder.
#
# This workflow runs `hugo` to build the site inside the `docs-repo` folder.
# The site is rendered into the `docs-repo/public/` directory. The workflow
# uses SFTP to upload a tarball of this folder over to the destination web
# server.
#
# We check out the linode-docs-theme repo because it contains a
# `linode_algolia_admin` tool that we use to update the Algolia search indices.
#
# Inside the docs repo, the go.mod file contains a reference to which git
# commit of the linode-docs-theme is associated with the docs repo.
# We inspect this file to get that commit hash before checking out the
# linode-docs-theme repo.
#
# The following is a diagram of the above-mentioned repos and files. It does not
# show all the files within those repositories:
#
# .
#
# ├── docs-repo
# │   ├── go.mod
# │   └── public
# └── linode-docs-theme-repo
#    └── scripts
#       └── linode_algolia_admin
jobs:
build-staging-site:
if: github.repository_owner == 'linode'
runs-on: ubuntu-latest
environment:
name: build_staging_site
steps:
- name: 1. Set up Hugo
uses: peaceiris/actions-hugo@v2
with:
hugo-version: ${{ vars.HUGO_VERSION }}
- name: 2. Checkout docs repo
uses: actions/checkout@v3
with:
path: 'docs-repo'
# Debugging step so we can check if the right version
# of the docs repo is checked out
- name: (Debugging info) Print current docs repo branch/ref/commit
working-directory: ./docs-repo
run: |
git status
git log -1
- name: 3. Set up SSH agent (linode-docs-theme repo deploy key)
uses: webfactory/[email protected]
with:
ssh-private-key: ${{ secrets.LINODE_DOCS_THEME_DEPLOY_KEY_FOR_DOCS_DEPLOY_GHA }}
- name: 4. Clone docs theme repo
run: |
git clone [email protected]:$GITHUB_REPOSITORY_OWNER/linode-docs-theme linode-docs-theme-repo
- name: 5. Get linode-docs-theme commit hash from docs go.mod
id: get-theme-version
working-directory: ./docs-repo
run: |
# `hugo mod graph` prints output that looks like:
# project github.com/linode/[email protected]+vendor
# In this example, cc2dbdeb7143 is the relevant commit hash for the
# linode-docs-theme repo. The following commands grab the commit hash
# from the `hugo mod graph` string
LINODE_DOCS_THEME_VERSION=$(hugo mod graph | grep linode-docs-theme | cut -d '+' -f 1 | grep -o '[^-]*$')
echo "VERSION=$LINODE_DOCS_THEME_VERSION" >> $GITHUB_OUTPUT
- name: 6. Check out linode-docs-theme commit hash from docs go.mod
working-directory: ./linode-docs-theme-repo
run: |
git checkout ${{ steps.get-theme-version.outputs.VERSION }}
# Debugging step so we can check if the right version
# of the Algolia admin tool is checked out
- name: (Debugging info) Print Linode Algolia admin tool version
working-directory: ./linode-docs-theme-repo
run: |
cd scripts/linode_algolia_admin/
go run main.go version
# Debugging step that lists the Hugo-generated images
# directory *before* the imache cache restore step
- name: (Debugging info) List contents of images dir
continue-on-error: true
run: ls -al ${{ vars.HUGO_IMAGE_CACHE_PATH }}
- name: 7. Restore Hugo generated images cache
uses: ylemkimon/cache-restore@v2
with:
path: ${{ vars.HUGO_IMAGE_CACHE_PATH }}
key: ${{ vars.HUGO_IMAGE_CACHE_NAME }}
restore-keys: ${{ vars.HUGO_IMAGE_CACHE_NAME }}
# Debugging step that lists the Hugo-generated images
# directory *after* the imache cache restore step,
# to make sure that the restore happened successfully
- name: (Debugging info) List contents of images dir
continue-on-error: true
run: ls -al ${{ vars.HUGO_IMAGE_CACHE_PATH }}
- name: 8. Install Node.js
uses: actions/setup-node@v3
with:
node-version: ${{ vars.NODE_VERSION }}
- name: 9. Install dependencies (Node)
working-directory: ./docs-repo
run: npm ci
- name: 10. Build Hugo
env:
HUGOxPARAMSxSEARCH_CONFIG2xAPP_ID: ${{ vars.ALGOLIA_APP_ID }}
HUGOxPARAMSxSEARCH_CONFIG2xAPI_KEY: ${{ vars.ALGOLIA_SEARCH_KEY }}
HUGOxPARAMSxSEARCH_CONFIG2xINDEX_PREFIX: ${{ vars.ALGOLIA_INDEX_PREFIX }}
HUGO_ENVIRONMENT: ${{ vars.HUGO_ENVIRONMENT }}
HUGO_IGNOREERRORS: algolia-cachewarmer
working-directory: ./docs-repo
run: |
hugo config
hugo -b "${{ secrets.DOCS_WEBSITE_URL }}" --gc --minify -d public
# Debugging step that lists the Hugo-rendered files
# to make sure the site was built
- name: (Debugging info) List rendered files
working-directory: ./docs-repo
run: |
sudo apt install -y tree
tree -L 1 public
# When Hugo builds, it renders search index data to a collection of
# data files under public/. This step uploads that data to the
# Algolia search backend
- name: 11. Update Algolia (sandbox)
working-directory: ./linode-docs-theme-repo
env:
ALGOLIA_APP_ID: ${{ vars.ALGOLIA_APP_ID }}
ALGOLIA_ADMIN_API_KEY: ${{ secrets.ALGOLIA_WRITE_KEY }}
ALGOLIA_INDEX_PREFIX: ${{ vars.ALGOLIA_INDEX_PREFIX }}
run: |
cd scripts/linode_algolia_admin/
# The testing-docs-prefix-update sequence does the following:
# - Downloads data from the starter_ prefixed linode-wp,
# linode-community, and linode-documentation-sections indices to the
# GitHub Action workspace
# - Pushes the rendered index JSON files under the public/ folder
# in the docs repo from Hugo to Algolia. These are:
# - public/index.json
# - public/api/index.json
# - public/data/sections/index.json
# - Merge the data from the separate indices and push it into the
# PREFIX_linode-merged index in Algolia
# - Update the settings and synonyms for the PREFIX_linode-merged index
# in Algolia
# - Delete the PREFIX_linode-documentation and
# PREFIX_linode-documentation-api indices in Algolia to save space
go run main.go sequence -source-dir ../../../docs-repo/public testing-docs-prefix-update $ALGOLIA_INDEX_PREFIX
# Why build Hugo twice? In order to render the Explore Docs nav menu,
# Hugo downloads data from the Algolia search backend. However, the
# first time we build Hugo in this workflow, that data isn't up-to-date
# with new content we may be publishing. So, we build Hugo a second time
# after we have performed the previous Update Algolia step.
# In summary:
# - Build Hugo the first time in order to render the JSON search index
# data files
# - Update Algolia with those data files
# - Build Hugo again so that the navigation UI can be rendered with that
# updated info from Algolia
# It's a little redundant, but solves the chicken-or-egg problem
- name: 12. Build Hugo (second time)
env:
HUGOxPARAMSxSEARCH_CONFIG2xAPP_ID: ${{ vars.ALGOLIA_APP_ID }}
HUGOxPARAMSxSEARCH_CONFIG2xAPI_KEY: ${{ vars.ALGOLIA_SEARCH_KEY }}
HUGOxPARAMSxSEARCH_CONFIG2xINDEX_PREFIX: ${{ vars.ALGOLIA_INDEX_PREFIX }}
HUGO_ENVIRONMENT: ${{ vars.HUGO_ENVIRONMENT }}
working-directory: ./docs-repo
run: |
hugo config
hugo -b "${{ secrets.DOCS_WEBSITE_URL }}" --gc --minify -d public
# The gitcommithash.txt file is used when the site is deployed to the
# webserver to verify that the deployment was successful.
- name: 13. Add gitcommithash.txt to rendered public/ folder
working-directory: ./docs-repo
run: |
echo $GITHUB_SHA > public/gitcommithash.txt
# Make a tarball of the site, because it will upload much, much quicker
# than the uncompressed rendered site. The commit for this workflow run
# is encoded in the name of the tarball.
- name: 14. Create tarball of docs website
run: |
tar --owner=${{ secrets.DOCS_WEBSITE_USER }} --group=${{ secrets.DOCS_WEBSITE_USER }} -czf docs-$GITHUB_SHA.tar.gz -C docs-repo/public/ .
- name: 15. Set up S3cmd CLI tool
uses: s3-actions/[email protected]
with:
provider: linode
region: ${{ secrets.OBJ_REGION }}
access_key: ${{ secrets.OBJ_ACCESS_KEY }}
secret_key: ${{ secrets.OBJ_SECRET_KEY }}
# Upload the tarball to an object storage bucket. Another non-github.com
# delivery system is used to deploy this tarball to the staging webserver
- name: 16. Upload website tarball to object storage
run: |
echo "$(date +%s)" > build-date.txt # Print current Unix timestamp to a text file
s3cmd put docs-$GITHUB_SHA.tar.gz s3://${{ secrets.OBJ_BUCKET_NAME }}/staging/docs-$GITHUB_SHA/
s3cmd put build-date.txt s3://${{ secrets.OBJ_BUCKET_NAME }}/staging/docs-$GITHUB_SHA/