Build and test #626
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| --- | |
| name: Build and test | |
| on: | |
| push: | |
| branches: | |
| - main | |
| - master | |
| tags: | |
| - '**' | |
| pull_request: | |
| branches: | |
| - main | |
| - master | |
| schedule: | |
| - cron: '0 1 * * *' | |
| permissions: read-all | |
| concurrency: | |
| group: ${{ github.workflow }}@${{ github.ref }} | |
| cancel-in-progress: true | |
| defaults: | |
| run: | |
| shell: bash | |
| env: | |
| PACK_DIR: /root/.pack | |
| GENERATION_DIR: 'generated-modules' | |
| BUILD_DIR: '.build' | |
| PACKAGE_NAME: verilog-model | |
| ERROR_DISTANCES_OUT: error_distances.html | |
| GH_PAGES_PATH: verilog-gh-pages | |
| jobs: | |
| get-upstream-matrix: | |
| name: Acquire matrix of upstream modes | |
| runs-on: ubuntu-latest | |
| container: ghcr.io/stefan-hoeck/idris2-pack:latest | |
| outputs: | |
| upstream-matrix: "${{ steps.get-upstream-matrix.outputs.upstream-matrix }}" | |
| steps: | |
| - name: Install Git | |
| run: apt-get update && apt-get install git | |
| - name: Get upstream matrix | |
| id: get-upstream-matrix | |
| run: | | |
| CURR="$(idris2 --version | sed 's/.*-//')" | |
| MAIN="$(git ls-remote https://github.com/idris-lang/Idris2 main | head -c 9)" | |
| echo "Current: $CURR, bleeding edge: $MAIN" | |
| if [ "$CURR" == "$MAIN" ]; then | |
| echo 'upstream-matrix=["latest-pack-collection"]' | |
| else | |
| echo 'upstream-matrix=["latest-pack-collection", "bleeding-edge-compiler"]' | |
| fi >> "$GITHUB_OUTPUT" | |
| build-and-test: | |
| name: Build and test `${{ github.repository }}` | |
| needs: get-upstream-matrix | |
| runs-on: ubuntu-latest | |
| container: ghcr.io/stefan-hoeck/idris2-pack:latest | |
| outputs: | |
| artifact-id: generated-modules | |
| strategy: | |
| fail-fast: false | |
| matrix: | |
| upstream-mode: ${{ fromJSON(needs.get-upstream-matrix.outputs.upstream-matrix) }} | |
| steps: | |
| - name: Checkout | |
| uses: actions/checkout@v4 | |
| - name: Calculate cache key | |
| id: cache-key | |
| run: | | |
| echo "key=build-${{ hashFiles('src/**', 'verilog-model.ipkg', 'pack.toml') }}-${{ matrix.upstream-mode }}" >> $GITHUB_OUTPUT | |
| - name: Restore from cache | |
| id: cache-build | |
| uses: actions/cache/restore@v4 | |
| with: | |
| path: ${{ env.BUILD_DIR }} | |
| key: ${{ steps.cache-key.outputs.key }} | |
| - name: Touch files | |
| if: steps.cache-build.outputs.cache-hit == 'true' | |
| run: | | |
| find "${{ env.BUILD_DIR }}" -type f -exec touch {} + | |
| sync | |
| - name: Update `pack-db` | |
| run: pack update-db | |
| - name: Switch to the latest compiler, if needed | |
| if: matrix.upstream-mode == 'bleeding-edge-compiler' | |
| run: | | |
| { echo; echo "[idris2]"; echo 'commit = "latest:main"'; } >> pack.toml | |
| pack fetch | |
| - name: Switch to the latest collection | |
| run: pack switch latest | |
| - name: Build `${{ env.PACKAGE_NAME }}` | |
| run: pack build ${{ env.PACKAGE_NAME }} | |
| - name: Save build | |
| uses: actions/cache/save@v4 | |
| with: | |
| path: ${{ env.BUILD_DIR }} | |
| key: ${{ steps.cache-key.outputs.key }} | |
| - name: Fix file system ¯\_(ツ)_/¯ | |
| run: | | |
| set -x +e | |
| pack run ${{ env.PACKAGE_NAME }} --coverage mcov -n 1 --seed 0,1 | |
| touch tests/printer/model-coverage/dummy | |
| touch tests/printer/create-dir/dummy | |
| - name: Test ${{ env.PACKAGE_NAME }} | |
| run: pack test ${{ env.PACKAGE_NAME }} | |
| - name: Gen SystemVerilog modules | |
| if: matrix.upstream-mode == 'latest-pack-collection' | |
| run : pack run ${{ env.PACKAGE_NAME }} --to "${{ env.GENERATION_DIR }}" -n 128 --seed-name --seed-content --coverage mcov # Generate modules | |
| - name: Show mcov | |
| if: matrix.upstream-mode == 'latest-pack-collection' | |
| run : cat mcov | |
| - uses: actions/upload-artifact@v4 | |
| if: matrix.upstream-mode == 'latest-pack-collection' | |
| with: | |
| name: generated-modules | |
| path: "${{ env.GENERATION_DIR }}" | |
| run-tools: | |
| name: ${{ matrix.tool.name }} | |
| needs: build-and-test | |
| timeout-minutes: 90 | |
| runs-on: ubuntu-latest | |
| strategy: | |
| fail-fast: false | |
| matrix: | |
| tool: | |
| # MATRIX FORMAT: | |
| # - name: Just display name | |
| # repo: link to the git repo | |
| # deps: packages to install with apt | |
| # path: folder with files to build | |
| # pre_build: commands to run before the build | |
| # build: build commands (runs only if cache miss) | |
| # install: install commands | |
| # version: command to print the version | |
| # python_runner_deps: python libs required for runner | |
| # run: command to run the tool. Note that path to the .sv file will be concatenated to the end of the command. | |
| # top_flag: flag to pass to the tool to specify the top module (if necessary) | |
| # error_regex: regex to match the error (has to be in double quotes!!) | |
| # sim_cmd: command to run the simulator (optional) | |
| # sim_error_regex: regex to match the simulator error (optional) | |
| # rust_ver: rust version to install (optional) | |
| # third_party_release_repo: link to the repo to download the release from. Format: {owner}/{repo} | |
| # third_party_release_name_wildcard: wildcard to match the release name | |
| # dep_path: path of cached dependency | |
| # dep_name: name of cached dependency | |
| # | |
| # P.S. ignored errors file should be named as <path>-ignored-errors.txt | |
| - name: iverilog | |
| repo: https://github.com/steveicarus/iverilog.git | |
| deps: autoconf autotools-dev bison flex libfl-dev gperf make gcc g++ cmake python3 | |
| path: iverilog | |
| build: | | |
| sh autoconf.sh | |
| ./configure | |
| sudo make -j$(nproc) | |
| install: | | |
| sudo make install | |
| version: | | |
| iverilog -V | |
| run: iverilog -g2012 -o a.out {file} | |
| error_regex: | | |
| (syntax error\W[A-z-\/0-9,.:]+ .*$|(error|sorry|assert|vvp): [\S ]+$) | |
| sim_cmd: vvp a.out | |
| sim_error_regex: | | |
| (syntax error\W[A-z-\/0-9,.:]+ .*$|(error|sorry|assert|vvp): [\S ]+$) | |
| extra_ignored_regexes: > | |
| "Unable to elaborate r-value: .*" | |
| "Code generation had .* error\(s\)\." | |
| - name: slang | |
| repo: https://github.com/MikePopoloski/slang.git | |
| deps: cmake | |
| path: slang | |
| build: | | |
| sudo cmake -B build | |
| sudo cmake --build build -j$(nproc) | |
| install: | | |
| sudo cmake --install build --strip | |
| version: | | |
| slang --version | |
| run: slang -Weverything {file} | |
| error_regex: | | |
| error: [\S ]+$ | |
| - name: verilator | |
| repo: https://github.com/verilator/verilator.git | |
| deps: autoconf autotools-dev bison flex help2man libfl-dev libelf-dev cmake | |
| path: verilator | |
| build: | | |
| autoconf | |
| ./configure | |
| make -j$(nproc) | |
| install: | | |
| sudo make install | |
| version: | | |
| verilator --version | |
| run: rm -rf obj_dir && rm -f top.sv && cp {file} top.sv && verilator --cc --exe .github/workflows/conf/verilator/testbench.cpp --timing -Wno-fatal --no-std +1800-2023ext+sv top.sv | |
| error_regex: | | |
| %Error(|\-[A-Z]+):( | Internal Error: )[A-z0-9-_\/]+.sv:\d+:\d+:[\S ]*$ | |
| sim_cmd: make -C obj_dir -f Vtop.mk Vtop && ./obj_dir/Vtop | |
| sim_error_regex: | | |
| %Error:[\S ]+$ | |
| - name: surelog | |
| repo: https://github.com/chipsalliance/Surelog.git | |
| deps: cmake gcc build-essential git pkg-config tclsh swig uuid-dev libgoogle-perftools-dev python3 python3-orderedmultidict python3-psutil python3-dev default-jre lcov zlib1g-dev | |
| path: surelog | |
| build: | | |
| git submodule update --init --recursive | |
| make -j$(nproc) | |
| install: | | |
| sudo apt install -y libgoogle-perftools-dev | |
| sudo pip3 install orderedmultidict | |
| sudo make install | |
| version: | | |
| surelog --version | |
| run: surelog -parse -nopython -sv {file} | |
| - name: zachjs-sv2v | |
| repo: https://github.com/zachjs/sv2v.git | |
| deps: haskell-stack | |
| path: zachjs-sv2v | |
| build: | | |
| make -j$(nproc) | |
| install: | | |
| echo $PWD | |
| sudo stack install --allow-different-user --local-bin-path /usr/local/bin | |
| version: | | |
| sv2v --version | |
| run: sv2v -v {file} | |
| error_regex: | | |
| sv2v:(?!CallStack$).+ | |
| - name: tree-sitter-systemverilog | |
| repo: https://github.com/gmlarumbe/tree-sitter-systemverilog.git | |
| deps: gcc python3 | |
| path: tssv | |
| pre_build: | | |
| gunzip tree-sitter-linux-x64.gz | |
| sudo mv tree-sitter-linux-x64 /usr/local/bin/tree-sitter-linux-x64 | |
| sudo chmod +x /usr/local/bin/tree-sitter-linux-x64 | |
| build: | | |
| cd .. | |
| sudo apt -qq -y install curl | |
| touch "$HOME/.bash_profile" | |
| # From https://nodejs.org/en/download | |
| curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.40.2/install.sh | bash | |
| . "$HOME/.nvm/nvm.sh" | |
| nvm install 22 | |
| cd tssv | |
| sudo tree-sitter-linux-x64 generate --abi 14 | |
| sudo tree-sitter-linux-x64 build -o sv.so | |
| python_runner_deps: tree-sitter==0.21.3 | |
| version: | | |
| sudo tree-sitter-linux-x64 -V | |
| run: python3 .github/workflows/conf/tree-sitter-sv/run_ts.py tssv/sv.so {file} | |
| third_party_release_repo: tree-sitter/tree-sitter | |
| third_party_release_name_wildcard: tree-sitter-linux-x64.gz | |
| - name: synlig | |
| repo: https://github.com/chipsalliance/synlig.git | |
| deps: gcc-11 g++-11 build-essential cmake tclsh ant default-jre swig google-perftools libgoogle-perftools-dev python3 python3-dev python3-pip uuid uuid-dev tcl-dev flex libfl-dev git pkg-config libreadline-dev bison libffi-dev wget python3-orderedmultidict | |
| path: synlig | |
| build: | | |
| git submodule sync | |
| git submodule update --init --recursive third_party/{surelog,yosys} | |
| make -j$(nproc) | |
| install: | | |
| sudo make install -j$(nproc) | |
| version: | | |
| synlig --version | |
| synlig -h | |
| run: synlig -f systemverilog -o output.blif -S {file} | |
| error_regex: | | |
| ERROR: .*$ | |
| - name: sv_parser | |
| repo: https://github.com/dalance/sv-parser.git | |
| deps: cargo | |
| rust_ver: "1.81" | |
| path: sv_parser | |
| build: | | |
| cargo build --release --example parse_sv | |
| install: | | |
| sudo install -m 755 target/release/examples/parse_sv /usr/local/bin/ | |
| version: | | |
| parse_sv -h | |
| run: parse_sv {file} | |
| # - name: yosys | |
| # repo: https://github.com/YosysHQ/yosys.git | |
| # deps: build-essential clang lld bison flex libreadline-dev gawk tcl-dev libffi-dev git graphviz xdot pkg-config python3 libboost-system-dev libboost-python-dev libboost-filesystem-dev zlib1g-dev | |
| # path: yosys | |
| # build: | | |
| # git submodule update --init --recursive | |
| # make -j$(nproc) | |
| # install: | | |
| # sudo make install | |
| # cd .. | |
| # bash .github/workflows/runner/rename_tests.sh | |
| # version: yosys --version | |
| # run: yosys -p 'read -sv2012 {file}; hierarchy -check -top {top_module}; proc; opt; fsm; memory; sim -assert; clean' | |
| # error_regex: > | |
| # ERROR:[A-z0-9, \"'\[\]=()]+ | |
| - name: yosys-slang | |
| repo: https://github.com/povik/yosys-slang.git | |
| deps: cmake clang tcl-dev bison default-jre flex libfl-dev libreadline-dev pkg-config tclsh uuid-dev build-essential lld libreadline-dev gawk libffi-dev git graphviz xdot pkg-config python3 libboost-system-dev libboost-python-dev libboost-filesystem-dev zlib1g-dev | |
| path: yosys-slang | |
| build: | | |
| git clone https://github.com/YosysHQ/yosys.git | |
| cd yosys | |
| git submodule update --init --recursive | |
| make -j$(nproc) | |
| sudo make install | |
| cd .. | |
| git submodule update --init --recursive | |
| make -j$(nproc) | |
| install: | | |
| cd yosys | |
| sudo make install | |
| cd .. | |
| sudo make install | |
| cd .. | |
| bash .github/workflows/runner/rename_tests.sh | |
| version: yosys --version | |
| run: yosys -m slang -p 'read_slang {file}; hierarchy -check -top {top_module}; proc; opt; fsm; memory; sim -assert; clean' | |
| error_regex: > | |
| (ERROR|error): .* | |
| extra_ignored_regexes: > | |
| "Compilation failed" | |
| steps: | |
| - name: Checkout | |
| uses: actions/checkout@v4 | |
| - name: Download Generated Files | |
| uses: actions/download-artifact@v4 | |
| with: | |
| name: generated-modules | |
| path: "${{ env.GENERATION_DIR }}" | |
| - name: Update apt | |
| run: sudo apt update -qq | |
| - name: Clone tool | |
| run: | | |
| git clone ${{ matrix.tool.repo }} ${{ matrix.tool.path }} | |
| cd ${{ matrix.tool.path }} | |
| echo "commit_hash=$(git rev-parse HEAD)" >> $GITHUB_ENV | |
| - name: Restore dependency | |
| if: ${{ matrix.tool.dep_path }} | |
| uses: actions/cache/restore@v4 | |
| with: | |
| path: ${{ matrix.tool.dep_path }} | |
| key: ${{ matrix.tool.dep_name }}-cache- | |
| - name: Restore from cache | |
| id: cache-build | |
| uses: actions/cache/restore@v4 | |
| with: | |
| path: ${{ matrix.tool.path }} | |
| key: ${{ matrix.tool.name }}-cache-${{ env.commit_hash }} | |
| - name: Add bazel repo (if needed) | |
| if: ${{ contains(matrix.tool.deps, 'bazel') }} | |
| run: | | |
| sudo apt -qq -y install apt-transport-https curl gnupg | |
| curl -fsSL https://bazel.build/bazel-release.pub.gpg | gpg --dearmor > bazel.gpg | |
| chmod a+r bazel.gpg | |
| sudo mv bazel.gpg /etc/apt/trusted.gpg.d/ | |
| sudo echo "deb [arch=amd64] https://storage.googleapis.com/bazel-apt stable jdk1.8" | sudo tee /etc/apt/sources.list.d/bazel.list | |
| sudo apt update -qq | |
| - name: Install tool-specific dependencies | |
| if: ${{ matrix.tool.deps }} | |
| run: | | |
| sudo apt -qq -y install ${{ matrix.tool.deps }} | |
| - name: Setup Rust (if needed) | |
| if: ${{ matrix.tool.rust_ver }} | |
| run: | | |
| sudo apt -y install curl | |
| curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs > setup.sh | |
| sh setup.sh -y | |
| source $HOME/.cargo/env | |
| rustup install ${{ matrix.tool.rust_ver }} | |
| rustup default ${{ matrix.tool.rust_ver }} | |
| - name: Update haskell (if needed) | |
| if: ${{ contains(matrix.tool.deps, 'haskell') && steps.cache-build.outputs.cache-hit != 'true' }} | |
| run: | | |
| stack upgrade | |
| - uses: robinraju/release-downloader@v1 | |
| if: ${{ matrix.tool.third_party_release_repo }} | |
| with: | |
| repository: ${{ matrix.tool.third_party_release_repo }} | |
| latest: true | |
| extract: true | |
| fileName: ${{ matrix.tool.third_party_release_name_wildcard }} | |
| - name: Pre-Build tool | |
| run : | | |
| ${{ matrix.tool.pre_build }} | |
| - name: Build tool | |
| if: steps.cache-build.outputs.cache-hit != 'true' | |
| run : | | |
| cd ${{ matrix.tool.path }} | |
| ${{ matrix.tool.build }} | |
| - name: Save build | |
| if: steps.cache-build.outputs.cache-hit != 'true' | |
| uses: actions/cache/save@v4 | |
| with: | |
| path: ${{ matrix.tool.path }} | |
| key: ${{ steps.cache-build.outputs.cache-primary-key }} | |
| - name: Install tool | |
| run : | | |
| cd ${{ matrix.tool.path }} | |
| ${{ matrix.tool.install }} | |
| - name: Print version | |
| run: ${{ matrix.tool.version }} | |
| - name: Print git commit hash | |
| run: | | |
| cd ${{ matrix.tool.path }} | |
| git rev-parse HEAD | |
| - name: Install runner deps | |
| run: sudo apt install -y bc python3 python3-pip python3-venv | |
| - name: Setup python venv | |
| run: | | |
| python3 -m venv .venv | |
| source .venv/bin/activate | |
| python3 -m pip install --upgrade pip | |
| pip install plotly pandas scikit-learn numpy textdistance pyyaml ${{ matrix.tool.python_runner_deps }} | |
| - name: Run | |
| run : | | |
| set +e | |
| source .venv/bin/activate | |
| git clone https://github.com/DepTyCheck/verilog-model.git --branch gh-pages --single-branch ${{ env.GH_PAGES_PATH }} | |
| cd "${{ env.GH_PAGES_PATH }}" | |
| echo "Latest commit hash of ${{ env.GH_PAGES_PATH }}:" | |
| git rev-parse HEAD | |
| cd - | |
| python3 .github/workflows/runner/main.py \ | |
| --gen-path "${{ env.GENERATION_DIR }}" \ | |
| --job-link "https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}/job/${{ github.job }}" \ | |
| --tool-name "${{ matrix.tool.name }}" \ | |
| --tool-cmd "${{ matrix.tool.run }}" \ | |
| --tool-error-regex "${{ matrix.tool.error_regex }}" \ | |
| --sim-cmd "${{ matrix.tool.sim_cmd }}" \ | |
| --sim-error-regex "${{ matrix.tool.sim_error_regex }}" \ | |
| --ignored-errors-dir "${{ env.GH_PAGES_PATH }}/found_errors/${{ matrix.tool.path }}" \ | |
| --error-distances-output "$ERROR_DISTANCES_OUT" \ | |
| --extra-ignored-regexes ${{ join(matrix.tool.extra_ignored_regexes, ' ') }} | |
| EXIT_CODE=$? | |
| if [ $EXIT_CODE -ne 0 ]; then | |
| exit 1 | |
| fi | |
| - name: Upload error distances artifact | |
| if: always() && hashFiles(env.ERROR_DISTANCES_OUT) != '' | |
| uses: actions/upload-artifact@v4 | |
| with: | |
| name: error-distances-${{ matrix.tool.name }} | |
| path: ${{ env.ERROR_DISTANCES_OUT }} | |
| publish-container: | |
| name: Build and publish Docker image | |
| needs: run-tools | |
| if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/master' }} | |
| runs-on: ubuntu-latest | |
| permissions: | |
| contents: read | |
| packages: write | |
| attestations: write | |
| id-token: write | |
| env: | |
| REGISTRY: ghcr.io | |
| IMAGE_NAME: ${{ github.repository }} | |
| steps: | |
| - name: Checkout repository | |
| uses: actions/checkout@v4 | |
| - name: Restore build cache | |
| id: cache-build | |
| uses: actions/cache/restore@v4 | |
| with: | |
| path: ${{ env.BUILD_DIR }} | |
| key: build-${{ hashFiles('src/**', 'verilog-model.ipkg', 'pack.toml') }}-latest-pack-collection | |
| - name: Touch ${{ env.BUILD_DIR }} dir (-_-) | |
| run: | | |
| set +e | |
| ls -la | |
| mkdir ${{ env.BUILD_DIR }} | |
| - name: Log in to the Container registry | |
| uses: docker/login-action@v3 | |
| with: | |
| registry: ${{ env.REGISTRY }} | |
| username: ${{ github.actor }} | |
| password: ${{ secrets.GITHUB_TOKEN }} | |
| - name: Extract metadata for Docker | |
| id: meta | |
| uses: docker/metadata-action@v5 | |
| with: | |
| images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} | |
| tags: | | |
| type=raw,value=latest,enable={{is_default_branch}} | |
| - name: Build and push Docker image | |
| id: push | |
| uses: docker/build-push-action@v6 | |
| with: | |
| context: . | |
| file: .github/workflows/push/Dockerfile | |
| push: true | |
| tags: ${{ steps.meta.outputs.tags }} | |
| labels: ${{ steps.meta.outputs.labels }} | |
| - name: Generate artifact attestation | |
| uses: actions/attest-build-provenance@v2 | |
| with: | |
| subject-name: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME}} | |
| subject-digest: ${{ steps.push.outputs.digest }} | |
| push-to-registry: true | |
| create-issue-on-failure: | |
| name: Create an issue on failure | |
| if: ${{ always() && (contains(needs.*.result, 'failure') || contains(needs.*.result, 'cancelled')) && github.ref == 'refs/heads/master' }} | |
| runs-on: ubuntu-latest | |
| needs: [get-upstream-matrix, build-and-test, run-tools] | |
| permissions: | |
| contents: read | |
| issues: write | |
| steps: | |
| - name: Checkout | |
| uses: actions/checkout@v4 | |
| - name: Read and Format Usernames | |
| run: | | |
| TAGGED_USERS=$(awk '{printf "@%s ", $0}' MAINTAINERS) | |
| echo "TAGGED_USERS=${TAGGED_USERS}" >> $GITHUB_ENV | |
| - name: Create Issue | |
| uses: actions/github-script@v7 | |
| with: | |
| script: | | |
| const issueTitle = `Workflow Failed: ${context.workflow} #${context.runNumber}`; | |
| const issueBody = ` | |
| Workflow Run: [${context.runNumber}](${context.serverUrl}/${context.repo.owner}/${context.repo.repo}/actions/runs/${context.runId}) | |
| Tagging: ${{ env.TAGGED_USERS }} | |
| Please check the logs for more details | |
| `; | |
| const { data: issue } = await github.rest.issues.create({ | |
| owner: context.repo.owner, | |
| repo: context.repo.repo, | |
| title: issueTitle, | |
| body: issueBody, | |
| }); |