Skip to content

Add missing waivers for hci and tti #620

Add missing waivers for hci and tti

Add missing waivers for hci and tti #620

Workflow file for this run

# SPDX-License-Identifier: Apache-2.0
#
# I3C Core - Parallel Test and Documentation Workflow
#
# This workflow runs cocotb verification tests in parallel for both AHB and AXI
# bus configurations, then builds and deploys documentation.
#
# Architecture:
# 1. generate-matrix: Uses nox's JSON output to discover test sessions
# 2. tests-ahb: Runs all AHB-tagged tests in parallel (one job per test)
# 3. tests-axi: Runs all AXI-tagged tests in parallel (one job per test)
# 4. test-results: Aggregates results from all test jobs
# 5. docs-build: Builds and deploys documentation (only after tests pass)
#
# Optional: Long-running tests (i3c_ahb_verify, i3c_axi_verify) can be split
# into individual parametrized runs by enabling SPLIT_TESTS in generate-matrix.
#
# The parallel execution reduces total CI time from ~80 minutes to ~25 minutes.
name: Run Tests
on:
push:
pull_request:
permissions:
contents: write
env:
DEBIAN_FRONTEND: "noninteractive"
VERILATOR_VERSION: "v5.024"
WAVES: "0"
jobs:
# ===========================================================================
# Job: Generate Test Matrix
# ===========================================================================
# Dynamically discovers test sessions using nox's native JSON output.
# Tests are categorized by their tags (ahb, axi, or both).
# This allows adding new tests without modifying the workflow.
# ===========================================================================
generate-matrix:
name: Generate test matrix
runs-on: ubuntu-latest
outputs:
ahb-matrix: ${{ steps.gen.outputs.ahb_matrix }}
axi-matrix: ${{ steps.gen.outputs.axi_matrix }}
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: "3.12"
- name: Install nox and dependencies
run: |
pip install nox==2023.4.22 pyyaml
pip install -e tools/nox_utils
- name: Query nox for test sessions
id: gen
run: |
cd verification/cocotb
python3 << 'EOF'
import json
import subprocess
import os
import sys
# Long-running tests that should be split into individual parametrized runs
# These tests have multiple test_name parameters and take the most time
SPLIT_TESTS = {"i3c_ahb_verify", "i3c_axi_verify"}
# Use nox's native JSON output to list all sessions
# Note: nox -l doesn't execute sessions, just lists them, so no --no-venv needed
result = subprocess.run(
["nox", "-l", "--json", "-f", "noxfile.py"],
capture_output=True, text=True,
env={**os.environ, "PYTHONDONTWRITEBYTECODE": "1"}
)
# Debug: show any errors from nox
if result.returncode != 0:
print(f"nox failed with return code {result.returncode}", file=sys.stderr)
print(f"stderr: {result.stderr}", file=sys.stderr)
sys.exit(1)
if not result.stdout.strip():
print("nox returned empty output", file=sys.stderr)
print(f"stderr: {result.stderr}", file=sys.stderr)
sys.exit(1)
try:
sessions = json.loads(result.stdout)
except json.JSONDecodeError as e:
print(f"Failed to parse JSON: {e}", file=sys.stderr)
print(f"stdout was: {result.stdout[:500]}", file=sys.stderr)
print(f"stderr was: {result.stderr[:500]}", file=sys.stderr)
sys.exit(1)
ahb_tests = set()
axi_tests = set()
for session in sessions:
tags = session.get("tags", [])
# Only process test sessions (tagged with 'tests')
if "tests" not in tags:
continue
name = session["name"]
has_ahb = "ahb" in tags
has_axi = "axi" in tags
# For long-running tests, use full session string to run each variant separately
# For others, use just the name to run all variants together
if name in SPLIT_TESTS:
# Use full parametrized session string (e.g., "i3c_ahb_verify(simulator='vcs', ...)")
session_id = session["session"]
else:
# Use just the name (e.g., "ccc_verify")
session_id = name
# Categorize tests by bus configuration
if has_ahb:
ahb_tests.add(session_id)
if has_axi:
axi_tests.add(session_id)
# Sort for consistent ordering
ahb_tests = sorted(ahb_tests)
axi_tests = sorted(axi_tests)
print(f"Discovered {len(ahb_tests)} AHB tests and {len(axi_tests)} AXI tests")
# Output matrices for subsequent jobs
with open(os.environ['GITHUB_OUTPUT'], 'a') as f:
f.write(f"ahb_matrix={json.dumps(ahb_tests)}\n")
f.write(f"axi_matrix={json.dumps(axi_tests)}\n")
EOF
# ===========================================================================
# Job: Run AHB Tests (Parallel)
# ===========================================================================
# Runs each AHB test as a separate parallel job using GitHub Actions matrix.
# Each job:
# 1. Builds/caches Verilator
# 2. Sets up Python environment via install.sh and activate.sh
# 3. Configures RTL for AHB bus
# 4. Runs the specific test via nox
# ===========================================================================
tests-ahb:
name: "AHB: ${{ matrix.test }}"
needs: [generate-matrix]
runs-on: ubuntu-latest
strategy:
fail-fast: false # Continue running other tests even if one fails
matrix:
test: ${{ fromJson(needs.generate-matrix.outputs.ahb-matrix) }}
steps:
# --- System Dependencies ---
- name: Install system dependencies
run: |
sudo apt -qqy update
sudo apt -qqy --no-install-recommends install \
help2man libfl-dev make g++ git bison flex gperf \
libreadline-dev libbz2-dev autoconf
# --- Repository Setup ---
- name: Checkout repository
uses: actions/checkout@v4
- name: Initialize submodules
run: git submodule update --init --recursive
# --- Verilator (cached) ---
- name: Cache Verilator installation
id: cache-verilator
uses: actions/cache@v4
with:
path: ~/verilator-install
key: verilator-install-${{ env.VERILATOR_VERSION }}-${{ runner.os }}
- name: Build Verilator (if not cached)
if: steps.cache-verilator.outputs.cache-hit != 'true'
run: |
git clone https://github.com/verilator/verilator -b ${{ env.VERILATOR_VERSION }}
cd verilator
autoconf
./configure --prefix=$HOME/verilator-install
make -j$(nproc)
make install
- name: Add Verilator to PATH
run: |
echo "$HOME/verilator-install/bin" >> $GITHUB_PATH
echo "VERILATOR_ROOT=$HOME/verilator-install/share/verilator" >> $GITHUB_ENV
# --- Python Environment ---
- name: Setup Python environment (pyenv + dependencies)
run: ./install.sh
# --- Run Test ---
# Combined step ensures environment variables from activate.sh persist
# Note: Using env var to preserve quotes in parametrized session names
- name: Configure RTL and run test
id: run-test
env:
TEST_SESSION: ${{ matrix.test }}
run: |
source activate.sh
make config CFG_NAME=ahb
cd verification/cocotb && python -m nox -R -s "$TEST_SESSION" --no-venv --forcecolor
# --- Debug: Show logs on failure ---
- name: Display test logs on failure
if: failure() && steps.run-test.outcome == 'failure'
run: |
echo "=== Test failed - displaying log files ==="
find verification/cocotb -name "*.log" -type f -exec sh -c \
'echo ""; echo "========================================"; echo "=== {} ==="; echo "========================================"; cat "{}"' \;
# --- Upload Results ---
# Sanitize artifact name: replace special chars for parametrized test names
- name: Sanitize artifact name
if: always()
id: sanitize
run: |
NAME='${{ matrix.test }}'
# Replace problematic characters with underscores/remove them
SANITIZED=$(echo "$NAME" | sed "s/[()='\" ,]/_/g" | sed 's/__*/_/g' | sed 's/_$//')
echo "name=$SANITIZED" >> $GITHUB_OUTPUT
- name: Upload test artifacts
if: always()
uses: actions/upload-artifact@v4
with:
name: test-results-ahb-${{ steps.sanitize.outputs.name }}
path: |
verification/cocotb/**/results*.xml
verification/cocotb/**/*.log
# ===========================================================================
# Job: Run AXI Tests (Parallel)
# ===========================================================================
# Same structure as tests-ahb, but configured for AXI bus.
# ===========================================================================
tests-axi:
name: "AXI: ${{ matrix.test }}"
needs: [generate-matrix]
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
test: ${{ fromJson(needs.generate-matrix.outputs.axi-matrix) }}
steps:
# --- System Dependencies ---
- name: Install system dependencies
run: |
sudo apt -qqy update
sudo apt -qqy --no-install-recommends install \
help2man libfl-dev make g++ git bison flex gperf \
libreadline-dev libbz2-dev autoconf
# --- Repository Setup ---
- name: Checkout repository
uses: actions/checkout@v4
- name: Initialize submodules
run: git submodule update --init --recursive
# --- Verilator (cached) ---
- name: Cache Verilator installation
id: cache-verilator
uses: actions/cache@v4
with:
path: ~/verilator-install
key: verilator-install-${{ env.VERILATOR_VERSION }}-${{ runner.os }}
- name: Build Verilator (if not cached)
if: steps.cache-verilator.outputs.cache-hit != 'true'
run: |
git clone https://github.com/verilator/verilator -b ${{ env.VERILATOR_VERSION }}
cd verilator
autoconf
./configure --prefix=$HOME/verilator-install
make -j$(nproc)
make install
- name: Add Verilator to PATH
run: |
echo "$HOME/verilator-install/bin" >> $GITHUB_PATH
echo "VERILATOR_ROOT=$HOME/verilator-install/share/verilator" >> $GITHUB_ENV
# --- Python Environment ---
- name: Setup Python environment (pyenv + dependencies)
run: ./install.sh
# --- Run Test ---
# Note: Using env var to preserve quotes in parametrized session names
- name: Configure RTL and run test
id: run-test
env:
TEST_SESSION: ${{ matrix.test }}
run: |
source activate.sh
make config CFG_NAME=axi
cd verification/cocotb && python -m nox -R -s "$TEST_SESSION" --no-venv --forcecolor
# --- Debug: Show logs on failure ---
- name: Display test logs on failure
if: failure() && steps.run-test.outcome == 'failure'
run: |
echo "=== Test failed - displaying log files ==="
find verification/cocotb -name "*.log" -type f -exec sh -c \
'echo ""; echo "========================================"; echo "=== {} ==="; echo "========================================"; cat "{}"' \;
# --- Upload Results ---
# Sanitize artifact name: replace special chars for parametrized test names
- name: Sanitize artifact name
if: always()
id: sanitize
run: |
NAME='${{ matrix.test }}'
# Replace problematic characters with underscores/remove them
SANITIZED=$(echo "$NAME" | sed "s/[()='\" ,]/_/g" | sed 's/__*/_/g' | sed 's/_$//')
echo "name=$SANITIZED" >> $GITHUB_OUTPUT
- name: Upload test artifacts
if: always()
uses: actions/upload-artifact@v4
with:
name: test-results-axi-${{ steps.sanitize.outputs.name }}
path: |
verification/cocotb/**/results*.xml
verification/cocotb/**/*.log
# ===========================================================================
# Job: Aggregate Test Results
# ===========================================================================
# Collects all test artifacts into a single combined artifact.
# Provides a single point to check overall test status.
# ===========================================================================
test-results:
name: Aggregate test results
needs: [tests-ahb, tests-axi]
if: always()
runs-on: ubuntu-latest
steps:
- name: Download all test artifacts
uses: actions/download-artifact@v4
with:
pattern: test-results-*
path: test-results
merge-multiple: true
- name: Debug downloaded artifacts
run: |
echo "=== Files downloaded from test jobs ==="
find test-results -type f 2>/dev/null | head -30 || echo "No files found"
echo ""
echo "=== XML files specifically ==="
find test-results -name "*.xml" -type f 2>/dev/null | head -20 || echo "No XML files"
- name: Upload combined test results
uses: actions/upload-artifact@v4
with:
name: tests-results
path: test-results
- name: Verify all tests passed
run: |
if [ "${{ needs.tests-ahb.result }}" != "success" ] || \
[ "${{ needs.tests-axi.result }}" != "success" ]; then
echo "❌ Some tests failed!"
echo " AHB tests: ${{ needs.tests-ahb.result }}"
echo " AXI tests: ${{ needs.tests-axi.result }}"
exit 1
fi
echo "✅ All tests passed!"
# ===========================================================================
# Job: Build and Deploy Documentation
# ===========================================================================
# Builds Sphinx documentation including verification results.
# Only runs after all tests pass successfully.
# Deploys to GitHub Pages on main branch.
# ===========================================================================
docs-build:
name: Build documentation
runs-on: ubuntu-latest
needs: [test-results]
if: always() && needs.test-results.result == 'success'
steps:
- name: Install system dependencies
run: |
sudo apt -qqy update
sudo apt -qqy --no-install-recommends install \
python3 python3-pip python3-venv git
- name: Checkout repository
uses: actions/checkout@v4
- name: Initialize submodules
run: git submodule update --init --recursive
- name: Download test results
uses: actions/download-artifact@v4
with:
name: tests-results
path: tests-results
- name: Debug downloaded artifacts
run: |
echo "=== Full structure of tests-results ==="
find tests-results -type f 2>/dev/null | head -30 || echo "No files found or directory doesn't exist"
echo ""
echo "=== Directory listing ==="
ls -laR tests-results 2>/dev/null | head -50 || echo "tests-results directory not found"
- name: Setup Python virtual environment
run: |
python3 -m venv .venv
source .venv/bin/activate
export I3C_ROOT_DIR="$(pwd)"
pip3 install -r requirements.txt
pip3 install -r doc/requirements.txt
- name: Generate verification documentation
run: |
source .venv/bin/activate
# Copy XML files to the expected location, handling nested structure
# The artifact contains paths like verification/cocotb/block/*/results*.xml
if [ -d "tests-results/verification/cocotb" ]; then
echo "Found nested structure, copying from tests-results/verification/cocotb/"
cp -r tests-results/verification/cocotb/* verification/cocotb/
elif [ -d "tests-results" ] && [ "$(ls -A tests-results 2>/dev/null)" ]; then
echo "Found flat structure, copying from tests-results/"
cp -r tests-results/* verification/cocotb/
else
echo "WARNING: No test results found to copy!"
fi
echo "=== XML files in verification/cocotb ==="
find verification/cocotb -name "*.xml" -type f | head -10
# Only run verification docs if XML files exist
XML_COUNT=$(find verification/cocotb -name "*.xml" -type f | wc -l)
if [ "$XML_COUNT" -gt 0 ]; then
echo "Found $XML_COUNT XML files, generating verification docs..."
REPO_URL="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/tree/$GITHUB_REF_NAME/" \
make verification-docs-with-sim
else
echo "WARNING: No XML files found, skipping verification-docs-with-sim"
echo "Running verification-docs without sim results instead..."
REPO_URL="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/tree/$GITHUB_REF_NAME/" \
make verification-docs || true
fi
- name: Build Sphinx documentation
run: |
pushd doc
pip3 install -r requirements.txt
TZ=UTC make html
popd
# Include coverage results if available
git clone https://github.com/antmicro/i3c-core-coverage-results || true
cp i3c-core-coverage-results/*html doc/build/html 2>/dev/null || true
- name: Upload documentation artifact
if: always()
uses: actions/upload-artifact@v4
with:
name: docs
path: ./doc/build
- name: Deploy to GitHub Pages
if: github.ref == 'refs/heads/main'
uses: peaceiris/actions-gh-pages@v4
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
publish_dir: ./doc/build/html