Skip to content

Nightly

Nightly #83

Workflow file for this run

# Copyright lowRISC contributors (OpenTitan project).
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
# SPDX-License-Identifier: Apache-2.0
name: Nightly
on:
schedule:
- cron: "00 04 * * *"
workflow_dispatch:
inputs:
branch:
description: 'Branch to run the nightly (for SiVal job only)'
required: true
type: string
permissions:
id-token: write
contents: read
env:
VIVADO_VERSION: "2021.1"
jobs:
fpga_cw310_sival_nightly:
name: FPGA CW310 SiVal tests
runs-on: [ubuntu-22.04-fpga, cw310]
env:
GS_PATH: gs://opentitan-test-results
BAZEL_TEST_RESULTS: test_results.xml
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0 # Required for the bitstream cache to work.
ref: ${{ inputs.branch || 'earlgrey_1.0.0' }} # Schedule only work on the default branch, but we want to run on a different branch.
- name: Install dependencies
uses: ./.github/actions/prepare-env
with:
service_account_json: '${{ secrets.BAZEL_CACHE_CREDS }}'
- uses: google-github-actions/setup-gcloud@v2
- name: Update hyperdebug
# We run the update command twice to workaround an issue with udev on the container.
# Where rusb cannot dynamically update its device list in CI (udev is not completely
# functional). If the device is in normal mode, the first thing that opentitantool
# does is to switch it to DFU mode and wait until it reconnects. This reconnection is
# never detected. But if we run the tool another time, the device list is queried again
# and opentitantool can finish the update. The device will now reboot in normal mode
# and work for the hyperdebug job.
run: |
./bazelisk.sh run //sw/host/opentitantool -- --interface=hyperdebug_dfu transport update-firmware \
|| ./bazelisk.sh run //sw/host/opentitantool -- --interface=hyperdebug_dfu transport update-firmware
- name: Run tests after ROM boot stage
if: success() || failure()
run: |
module load xilinx/vivado
bazel_tests="$(mktemp)"
./bazelisk.sh query 'attr("tags", "[\[ ]cw310_sival[,\]]", tests(//sw/device/...))' \
| grep -v examples \
| grep -v penetrationtests \
| grep spi_host \
> "$bazel_tests"
./bazelisk.sh test --build_tests_only --target_pattern_file="$bazel_tests"
- name: Run tests after ROM_EXT boot stage
if: success() || failure()
run: |
module load xilinx/vivado
bazel_tests="$(mktemp)"
./bazelisk.sh query 'attr("tags", "[\[ ]cw310_sival_rom_ext[,\]]", tests(//sw/device/...))' \
| grep -v examples \
| grep -v penetrationtests \
| grep spi_host \
> "$bazel_tests"
./bazelisk.sh test --build_tests_only --target_pattern_file="$bazel_tests"
- name: Publish bazel test results
if: success() || failure()
run: |
# Bazel produce one xml for each test. So we merge them together.
find -L bazel-out -name "test.xml" | xargs merge-junit -o "$BAZEL_TEST_RESULTS"
BUCKET_PATH=$GS_PATH/job/${{ github.job }}/branch/${{ inputs.branch || 'earlgrey_1.0.0'}}/${{ github.runner.name }}_$(date +%Y-%m-%d-%H%M%S)_${BAZEL_TEST_RESULTS}
gcloud storage cp $BAZEL_TEST_RESULTS "$BUCKET_PATH"