Internals: Improve coverage flow (#6526)

See addes "Code coverage" section in docs/internals.rst
This commit is contained in:
Geza Lore 2025-10-03 18:18:24 +02:00 committed by GitHub
parent 62dbbbba85
commit ce0a05691b
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
26 changed files with 437 additions and 648 deletions

View File

@ -35,7 +35,8 @@ jobs:
os: ${{ matrix.os }}
os-name: linux
cc: ${{ matrix.cc }}
asan: ${{ matrix.asan }}
dev-asan: ${{ matrix.asan }}
dev-gcov: 0
strategy:
fail-fast: false
matrix:
@ -49,7 +50,8 @@ jobs:
os: ${{ matrix.os }}
os-name: linux
cc: ${{ matrix.cc }}
asan: ${{ matrix.asan }}
dev-asan: ${{ matrix.asan }}
dev-gcov: 0
strategy:
fail-fast: false
matrix:
@ -63,7 +65,8 @@ jobs:
os: ${{ matrix.os }}
os-name: linux
cc: ${{ matrix.cc }}
asan: ${{ matrix.asan }}
dev-asan: ${{ matrix.asan }}
dev-gcov: 0
strategy:
fail-fast: false
matrix:
@ -77,7 +80,8 @@ jobs:
os: ${{ matrix.os }}
os-name: linux
cc: ${{ matrix.cc }}
asan: ${{ matrix.asan }}
dev-asan: ${{ matrix.asan }}
dev-gcov: 0
strategy:
fail-fast: false
matrix:
@ -91,7 +95,8 @@ jobs:
os: ${{ matrix.os }}
os-name: osx
cc: ${{ matrix.cc }}
asan: ${{ matrix.asan }}
dev-asan: ${{ matrix.asan }}
dev-gcov: 0
strategy:
fail-fast: false
matrix:
@ -105,7 +110,8 @@ jobs:
os: ${{ matrix.os }}
os-name: osx
cc: ${{ matrix.cc }}
asan: ${{ matrix.asan }}
dev-asan: ${{ matrix.asan }}
dev-gcov: 0
strategy:
fail-fast: false
matrix:
@ -157,6 +163,7 @@ jobs:
cc: ${{ matrix.cc }}
reloc: ${{ matrix.reloc }}
suite: ${{ matrix.suite }}
dev-gcov: 0
strategy:
fail-fast: false
matrix:
@ -179,6 +186,7 @@ jobs:
cc: ${{ matrix.cc }}
reloc: ${{ matrix.reloc }}
suite: ${{ matrix.suite }}
dev-gcov: 0
strategy:
fail-fast: false
matrix:
@ -201,6 +209,7 @@ jobs:
cc: ${{ matrix.cc }}
reloc: ${{ matrix.reloc }}
suite: ${{ matrix.suite }}
dev-gcov: 0
strategy:
fail-fast: false
matrix:
@ -223,6 +232,7 @@ jobs:
cc: ${{ matrix.cc }}
reloc: ${{ matrix.reloc }}
suite: ${{ matrix.suite }}
dev-gcov: 0
strategy:
fail-fast: false
matrix:

View File

@ -2,7 +2,7 @@
# DESCRIPTION: Github actions config
# SPDX-License-Identifier: LGPL-3.0-only OR Artistic-2.0
name: coverage
name: Code coverage
on:
workflow_dispatch:
@ -12,85 +12,98 @@ on:
permissions:
contents: read
env:
CI_OS_NAME: linux
COVERAGE: 1
VERILATOR_ARCHIVE: verilator-coverage-${{ github.sha }}.tar.gz
defaults:
run:
shell: bash
working-directory: repo
jobs:
Build:
build:
name: Build
# Only run scheduled jobs if explicitly enabled for that repo (e.g.: not on forks)
if: ${{ github.event_name != 'schedule' || vars.ENABLE_SCHEDULED_JOBS == 'true' }}
runs-on: ubuntu-24.04
env:
CI_BUILD_STAGE_NAME: build
CI_RUNS_ON: ubuntu-24.04
steps:
uses: ./.github/workflows/reusable-build.yml
with:
os: ubuntu-24.04
os-name: linux
cc: gcc
dev-asan: 0
dev-gcov: 1
- name: Checkout
uses: actions/checkout@v5
with:
path: repo
- name: Install packages for build
run: ./ci/ci-install.bash
- name: Build
run: ./ci/ci-script.bash
- name: Tar up repository
working-directory: ${{ github.workspace }}
run: tar --posix -c -z -f ${{ env.VERILATOR_ARCHIVE }} repo
- name: Upload tar archive
uses: actions/upload-artifact@v4
with:
path: ${{ github.workspace }}/${{ env.VERILATOR_ARCHIVE }}
name: ${{ env.VERILATOR_ARCHIVE }}
Test:
needs: Build
test:
name: Test | ${{ matrix.test }}${{ matrix.num }}
needs: build
uses: ./.github/workflows/reusable-test.yml
with:
os: ubuntu-24.04
cc: gcc
reloc: 0
suite: ${{ matrix.test }}${{ matrix.num }}
dev-gcov: 1
strategy:
fail-fast: false
matrix:
test: [vlt-, vltmt-]
test: [coverage-vlt-, coverage-vltmt-]
num: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
include:
- {test: dist, num: ''}
runs-on: ubuntu-24.04
name: test-${{ matrix.test }}${{ matrix.num }}
env:
CI_BUILD_STAGE_NAME: test
CI_RUNS_ON: ubuntu-24.04
steps:
- {test: coverage-dist, num: ''}
- name: Download tar archive
publish:
name: Publish results to codecov.io
needs: test
if: ${{ contains(needs.*.result, 'success') && !cancelled() }}
runs-on: ubuntu-24.04
steps:
- name: Checkout
uses: actions/checkout@v5
- name: Download code coverage data
uses: actions/download-artifact@v5
with:
name: ${{ env.VERILATOR_ARCHIVE }}
path: ${{ github.workspace }}
pattern: code-coverage-*
path: obj_coverage
merge-multiple: true
- name: Unpack tar archive
working-directory: ${{ github.workspace }}
run: tar -x -z -f ${{ env.VERILATOR_ARCHIVE }}
- name: List files
id: list-files
run: |
ls -lsha obj_coverage
find obj_coverage -type f | paste -sd, | sed "s/^/files=/" >> "$GITHUB_OUTPUT"
- name: Install test dependencies
run: ./ci/ci-install.bash
- name: Upload to codecov.io
uses: codecov/codecov-action@v5
with:
disable_file_fixes: true
disable_search: true
fail_ci_if_error: true
files: ${{ steps.list-files.outputs.files }}
plugins: noop
token: ${{ secrets.CODECOV_TOKEN }}
verbose: true
- name: Test
# Create GitHub issue for failed scheduled jobs
# This should always be the last job (we want an issue if anything breaks)
create-issue:
name: Create issue on failure
needs: publish
if: ${{ github.event_name == 'schedule' && github.repository == 'verilator/verilator' && github.run_attempt == 1 && failure() && !cancelled() }}
runs-on: ubuntu-24.04
steps:
# Creating issues requires elevated privilege
- name: Generate access token
id: generate-token
uses: actions/create-github-app-token@v2.1.4
with:
app-id: ${{ vars.VERILATOR_CI_ID }}
private-key: ${{ secrets.VERILATOR_CI_KEY }}
owner: verilator
repositories: verilator
permission-issues: write
- name: Create issue
env:
TESTS: coverage-${{ matrix.test }}${{ matrix.num }}
run: ./ci/ci-script.bash
- name: Upload coverage data to Codecov
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
GH_TOKEN: ${{ steps.generate-token.outputs.token }}
run: |-
find . -name '*.gcno' -exec rm {} \;
./ci/codecov -v upload-process -Z --sha ${{ github.sha }} -f nodist/obj_dir/coverage/app_total.info
echo "This issue was created automatically by the GitHub Actions CI due to the failure of a scheduled Code coverage run." >> body.txt
echo "" >> body.txt
echo "Workflow status: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}" >> body.txt
gh issue --repo ${{ github.repository }} create \
--title "Code coverage run #${{ github.run_number }} Failed" \
--body-file body.txt \
--label new \
--assignee gezalore,wsnyder

View File

@ -16,7 +16,10 @@ on:
os-name: # 'linux' or 'osx'
required: true
type: string
asan:
dev-asan:
required: true
type: number
dev-gcov:
required: true
type: number
@ -39,8 +42,9 @@ jobs:
runs-on: ${{ inputs.os }}
name: Build
env:
CI_ASAN: ${{ inputs.asan }}
CI_BUILD_STAGE_NAME: build
CI_DEV_ASAN: ${{ inputs.dev-asan }}
CI_DEV_GCOV: ${{ inputs.dev-gcov }}
CI_RUNS_ON: ${{ inputs.os }}
CC: ${{ inputs.cc }}
CXX: ${{ inputs.cc == 'clang' && 'clang++' || 'g++' }}
@ -48,7 +52,6 @@ jobs:
CCACHE_MAXSIZE: 1000M # Per build matrix entry (* 5 = 5000M in total)
VERILATOR_ARCHIVE: verilator-${{ github.sha }}-${{ inputs.os }}-${{ inputs.cc }}.tar.gz
steps:
- name: Checkout
uses: actions/checkout@v5
with:

View File

@ -19,6 +19,9 @@ on:
suite: # e.g. dist-vlt-0
required: true
type: string
dev-gcov:
required: true
type: number
env:
CI_OS_NAME: linux
@ -73,6 +76,25 @@ jobs:
run: ./ci/ci-install.bash
- name: Test
continue-on-error: true
env:
TESTS: ${{ inputs.suite }}
run: ./ci/ci-script.bash
- name: Combine code coverage data
if: ${{ inputs.dev-gcov }}
run: |
make coverage-combine
mv obj_coverage/verilator.info obj_coverage/verilator-${{ inputs.suite }}.info
ls -lsha obj_coverage
- name: Upload code coverage data
if: ${{ inputs.dev-gcov }}
uses: actions/upload-artifact@v4
with:
path: ${{ github.workspace }}/repo/obj_coverage/verilator-${{ inputs.suite }}.info
name: code-coverage-${{ inputs.suite }}
- name: Fail job if a test failed
if: ${{ failure() && !cancelled() }}
run: exit 1

1
.gitignore vendored
View File

@ -44,6 +44,7 @@ verilator_coverage_bin*
/.vscode/
/.idea/
/cmake-build-*/
/obj_coverage/
/test_regress/snapshot/
xmverilog.*
xrun.history

View File

@ -91,6 +91,7 @@ datarootdir = @datarootdir@
# Compile options
CFG_WITH_CCWARN = @CFG_WITH_CCWARN@
CFG_WITH_DEFENV = @CFG_WITH_DEFENV@
CFG_WITH_DEV_GCOV = @CFG_WITH_DEV_GCOV@
CFG_WITH_LONGTESTS = @CFG_WITH_LONGTESTS@
CFG_WITH_SOLVER = @CFG_WITH_SOLVER@
PACKAGE_VERSION = @PACKAGE_VERSION@
@ -515,7 +516,6 @@ PY_PROGRAMS = \
test_regress/*.py \
test_regress/t/*.pf \
nodist/clang_check_attributes \
nodist/code_coverage \
nodist/dot_importer \
nodist/fuzzer/actual_fail \
nodist/fuzzer/generate_dictionary \
@ -525,7 +525,6 @@ PY_PROGRAMS = \
# Python files, subject to format but not lint
PY_FILES = \
$(PY_PROGRAMS) \
nodist/code_coverage.dat \
test_regress/t/*.py \
# Python files, test_regress tests
@ -597,6 +596,108 @@ else
autoconf
endif
######################################################################
# Coverage collection and reporting
COVERAGE_DIR := obj_coverage
ifeq ($(CFG_WITH_DEV_GCOV),yes)
FASTCOV := nodist/fastcov.py
FASTCOV_OPT := -j $(shell nproc)
FASTCOV_OPT += --process-gcno
FASTCOV_OPT += --branch-coverage
FASTCOV_OPT += --dump-statistic
# Files matching the following glob patterns will be excluded from coverage
FASTCOV_OPT += --exclude-glob
FASTCOV_OPT += '/usr/*'
FASTCOV_OPT += '*src/obj_dbg/*'
FASTCOV_OPT += '*src/obj_opt/*.yy.cpp'
FASTCOV_OPT += '*src/obj_opt/V3Ast*'
FASTCOV_OPT += '*src/obj_opt/V3Dfg*'
FASTCOV_OPT += '*src/obj_opt/V3ParseBison.c'
FASTCOV_OPT += '*include/gtkwave/*'
FASTCOV_OPT += '*test_regress/*'
FASTCOV_OPT += '*examples/*'
# Lines *containing* these substrings will be excluded from *all* coverage
FASTCOV_OPT += --custom-exclusion-marker
FASTCOV_OPT += LCOV_EXCL_LINE
FASTCOV_OPT += VL_UNREACHABLE
FASTCOV_OPT += VL_DEFINE_DEBUG_FUNCTIONS
FASTCOV_OPT += VL_RTTI_IMPL
FASTCOV_OPT += V3ERROR_NA
FASTCOV_OPT += ASTGEN_MEMBERS
FASTCOV_OPT += v3fatalSrc
FASTCOV_OPT += VL_FATAL
FASTCOV_OPT += ERROR_RSVD_WORD
# Lines *starting* with these substrings will be ecluded from *branch* coverage
FASTCOV_OPT += --exclude-br-lines-starting-with
FASTCOV_OPT += UINFO
FASTCOV_OPT += UASSERT
FASTCOV_OPT += NUM_ASSERT
FASTCOV_OPT += NUM_ASSERT
FASTCOV_OPT += BROKEN_RTN
FASTCOV_OPT += BROKEN_BASE_RTN
FASTCOV_OPT += SELF_CHECK
FASTCOV_OPT += 'if (VL_UNCOVERABLE'
FASTCOV_OPT += '} else if (VL_UNCOVERABLE'
GENHTML := genhtml
GENHTML_OPT := -j $(shell nproc)
GENHTML_OPT += --branch-coverage
GENHTML_OPT += --demangle-cpp
GENHTML_OPT += --rc branch_coverage=1
GENHTML_OPT += --rc genhtml_hi_limit=100
GENHTML_OPT += --ignore-errors negative
GENHTML_OPT += --header-title "Verilator code coverage report"
# There are loads (~20k combined), but using this seems fine on modern hardware
GCNO_FILES = $(shell find . -name '*.gcno')
GCDA_FILES = $(shell find . -name '*.gcda')
# Combine all .gcda coverage date files into lcov .info file
$(COVERAGE_DIR)/verilator.info: $(GCNO_FILES) $(GCDA_FILES)
@echo "####################################################################"
@echo "# fastcov: combining all .gcda files into lcov .info"
@echo "####################################################################"
mkdir -p $(COVERAGE_DIR)
/usr/bin/time -f "That took %E" \
$(FASTCOV) $(FASTCOV_OPT) --lcov --output $@
# Build coverage report
$(COVERAGE_DIR)/report/index.html: $(COVERAGE_DIR)/verilator.info
@echo "####################################################################"
@echo "# genhtml: Generating coverage report"
@echo "####################################################################"
/usr/bin/time -f "That took %E" \
$(GENHTML) $(GENHTML_OPT) --output-directory $(COVERAGE_DIR)/report $^
# Convenience targets
.PHONY: coverage-combine
coverage-combine: $(COVERAGE_DIR)/verilator.info
# Via recursive make, so the message is always printed
.PHONY: coverage-report
coverage-report:
@$(MAKE) --no-print-directory $(COVERAGE_DIR)/report/index.html
@echo "####################################################################"
@echo "# Coverage report is at: $(COVERAGE_DIR)/report/index.html"
@echo "# Use 'make coverage-view' to open it in your default browser"
@echo "####################################################################"
# Open covarage report in default web browser
.PHONY: coverage-view
coverage-view: $(COVERAGE_DIR)/report/index.html
open $<
# Deletes all coverage data files (.gcda)
.PHONY: coverage-zero
coverage-zero: # 'rm $(GCDA_FILES)' might fail with too many args
$(FASTCOV) --zerocounters
endif
######################################################################
# Clean
@ -622,7 +723,7 @@ clean mostlyclean distclean maintainer-clean::
rm -rf src/*.tidy include/*.tidy examples/*/*.tidy
rm -rf .ruff_cache
rm -rf nodist/fuzzer/dictionary
rm -rf nodist/obj_dir
rm -rf $(COVERAGE_DIR)
rm -rf verilator.txt
distclean maintainer-clean::

View File

@ -39,28 +39,27 @@ if [ "$CI_BUILD_STAGE_NAME" = "build" ]; then
##############################################################################
# Build verilator
if [ "$COVERAGE" != 1 ]; then
autoconf
CONFIGURE_ARGS="--enable-longtests --enable-ccwarn"
if [ "$CI_ASAN" = 1 ]; then
CONFIGURE_ARGS="$CONFIGURE_ARGS --enable-dev-asan"
CXX="$CXX -DVL_LEAK_CHECKS"
fi
./configure $CONFIGURE_ARGS --prefix="$INSTALL_DIR"
ccache -z
"$MAKE" -j "$NPROC" -k
# 22.04: ccache -s -v
ccache -s
if [ "$CI_OS_NAME" = "osx" ]; then
file bin/verilator_bin
file bin/verilator_bin_dbg
md5 bin/verilator_bin
md5 bin/verilator_bin_dbg
stat bin/verilator_bin
stat bin/verilator_bin_dbg
fi
else
nodist/code_coverage --stages 0-2
autoconf
CONFIGURE_ARGS="--enable-longtests --enable-ccwarn"
if [ "$CI_DEV_ASAN" = 1 ]; then
CONFIGURE_ARGS="$CONFIGURE_ARGS --enable-dev-asan"
CXX="$CXX -DVL_LEAK_CHECKS"
fi
if [ "$CI_DEV_GCOV" = 1 ]; then
CONFIGURE_ARGS="$CONFIGURE_ARGS --enable-dev-gcov"
fi
./configure $CONFIGURE_ARGS --prefix="$INSTALL_DIR"
ccache -z
"$MAKE" -j "$NPROC" -k
# 22.04: ccache -s -v
ccache -s
if [ "$CI_OS_NAME" = "osx" ]; then
file bin/verilator_bin
file bin/verilator_bin_dbg
md5 bin/verilator_bin
md5 bin/verilator_bin_dbg
stat bin/verilator_bin
stat bin/verilator_bin_dbg
fi
elif [ "$CI_BUILD_STAGE_NAME" = "test" ]; then
##############################################################################
@ -134,71 +133,68 @@ elif [ "$CI_BUILD_STAGE_NAME" = "test" ]; then
vltmt-2)
"$MAKE" -C "$TEST_REGRESS" SCENARIOS="--vltmt --driver-clean" DRIVER_HASHSET=--hashset=2/3
;;
coverage-all)
nodist/code_coverage --stages 1-
;;
coverage-dist)
nodist/code_coverage --stages 1- --scenarios=--dist
"$MAKE" -C "$TEST_REGRESS" SCENARIOS="--dist"
;;
coverage-vlt-0)
nodist/code_coverage --stages 1- --scenarios=--vlt --hashset=0/10
"$MAKE" -C "$TEST_REGRESS" SCENARIOS="--vlt" DRIVER_HASHSET=--hashset=0/10
;;
coverage-vlt-1)
nodist/code_coverage --stages 1- --scenarios=--vlt --hashset=1/10
"$MAKE" -C "$TEST_REGRESS" SCENARIOS="--vlt" DRIVER_HASHSET=--hashset=1/10
;;
coverage-vlt-2)
nodist/code_coverage --stages 1- --scenarios=--vlt --hashset=2/10
"$MAKE" -C "$TEST_REGRESS" SCENARIOS="--vlt" DRIVER_HASHSET=--hashset=2/10
;;
coverage-vlt-3)
nodist/code_coverage --stages 1- --scenarios=--vlt --hashset=3/10
"$MAKE" -C "$TEST_REGRESS" SCENARIOS="--vlt" DRIVER_HASHSET=--hashset=3/10
;;
coverage-vlt-4)
nodist/code_coverage --stages 1- --scenarios=--vlt --hashset=4/10
"$MAKE" -C "$TEST_REGRESS" SCENARIOS="--vlt" DRIVER_HASHSET=--hashset=4/10
;;
coverage-vlt-5)
nodist/code_coverage --stages 1- --scenarios=--vlt --hashset=5/10
"$MAKE" -C "$TEST_REGRESS" SCENARIOS="--vlt" DRIVER_HASHSET=--hashset=5/10
;;
coverage-vlt-6)
nodist/code_coverage --stages 1- --scenarios=--vlt --hashset=6/10
"$MAKE" -C "$TEST_REGRESS" SCENARIOS="--vlt" DRIVER_HASHSET=--hashset=6/10
;;
coverage-vlt-7)
nodist/code_coverage --stages 1- --scenarios=--vlt --hashset=7/10
"$MAKE" -C "$TEST_REGRESS" SCENARIOS="--vlt" DRIVER_HASHSET=--hashset=7/10
;;
coverage-vlt-8)
nodist/code_coverage --stages 1- --scenarios=--vlt --hashset=8/10
"$MAKE" -C "$TEST_REGRESS" SCENARIOS="--vlt" DRIVER_HASHSET=--hashset=8/10
;;
coverage-vlt-9)
nodist/code_coverage --stages 1- --scenarios=--vlt --hashset=9/10
"$MAKE" -C "$TEST_REGRESS" SCENARIOS="--vlt" DRIVER_HASHSET=--hashset=9/10
;;
coverage-vltmt-0)
nodist/code_coverage --stages 1- --scenarios=--vltmt --hashset=0/10
"$MAKE" -C "$TEST_REGRESS" SCENARIOS="--vltmt" DRIVER_HASHSET=--hashset=0/10
;;
coverage-vltmt-1)
nodist/code_coverage --stages 1- --scenarios=--vltmt --hashset=1/10
"$MAKE" -C "$TEST_REGRESS" SCENARIOS="--vltmt" DRIVER_HASHSET=--hashset=1/10
;;
coverage-vltmt-2)
nodist/code_coverage --stages 1- --scenarios=--vltmt --hashset=2/10
"$MAKE" -C "$TEST_REGRESS" SCENARIOS="--vltmt" DRIVER_HASHSET=--hashset=2/10
;;
coverage-vltmt-3)
nodist/code_coverage --stages 1- --scenarios=--vltmt --hashset=3/10
"$MAKE" -C "$TEST_REGRESS" SCENARIOS="--vltmt" DRIVER_HASHSET=--hashset=3/10
;;
coverage-vltmt-4)
nodist/code_coverage --stages 1- --scenarios=--vltmt --hashset=4/10
"$MAKE" -C "$TEST_REGRESS" SCENARIOS="--vltmt" DRIVER_HASHSET=--hashset=4/10
;;
coverage-vltmt-5)
nodist/code_coverage --stages 1- --scenarios=--vltmt --hashset=5/10
"$MAKE" -C "$TEST_REGRESS" SCENARIOS="--vltmt" DRIVER_HASHSET=--hashset=5/10
;;
coverage-vltmt-6)
nodist/code_coverage --stages 1- --scenarios=--vltmt --hashset=6/10
"$MAKE" -C "$TEST_REGRESS" SCENARIOS="--vltmt" DRIVER_HASHSET=--hashset=6/10
;;
coverage-vltmt-7)
nodist/code_coverage --stages 1- --scenarios=--vltmt --hashset=7/10
"$MAKE" -C "$TEST_REGRESS" SCENARIOS="--vltmt" DRIVER_HASHSET=--hashset=7/10
;;
coverage-vltmt-8)
nodist/code_coverage --stages 1- --scenarios=--vltmt --hashset=8/10
"$MAKE" -C "$TEST_REGRESS" SCENARIOS="--vltmt" DRIVER_HASHSET=--hashset=8/10
;;
coverage-vltmt-9)
nodist/code_coverage --stages 1- --scenarios=--vltmt --hashset=9/10
"$MAKE" -C "$TEST_REGRESS" SCENARIOS="--vltmt" DRIVER_HASHSET=--hashset=9/10
;;
*)
fatal "Unknown test: $TESTS"

Binary file not shown.

View File

@ -92,19 +92,20 @@ else
AC_MSG_RESULT($CFG_WITH_TCMALLOC)
fi
# Flag to enable coverage build
AC_MSG_CHECKING(whether to build for coverage collection)
AC_ARG_ENABLE([coverage],
[AS_HELP_STRING([--enable-coverage],
[Build Verilator for code coverage collection.
# Flag to enable code coverage build with gcov
AC_MSG_CHECKING(whether to build for gcov code coverage collection)
AC_ARG_ENABLE([dev-gcov],
[AS_HELP_STRING([--enable-dev-gcov],
[Build Verilator for code coverage collection with gcov.
For developers only.])],
[case "${enableval}" in
yes) CFG_ENABLE_COVERAGE=yes ;;
no) CFG_ENABLE_COVERAGE=no ;;
*) AC_MSG_ERROR([bad value '${enableval}' for --enable-coverage]) ;;
yes) CFG_WITH_DEV_GCOV=yes ;;
no) CFG_WITH_DEV_GCOV=no ;;
*) AC_MSG_ERROR([bad value '${enableval}' for --enable-dev-gcov]) ;;
esac],
CFG_ENABLE_COVERAGE=no)
AC_MSG_RESULT($CFG_ENABLE_COVERAGE)
CFG_WITH_DEV_GCOV=no)
AC_SUBST(CFG_WITH_DEV_GCOV)
AC_MSG_RESULT($CFG_WITH_DEV_GCOV)
# Special Substitutions - CFG_WITH_DEFENV
AC_MSG_CHECKING(whether to use hardcoded paths)
@ -399,7 +400,7 @@ AC_DEFUN([_MY_LDLIBS_CHECK_OPT],
])
# Add the coverage flags early as they influence later checks.
if test "$CFG_ENABLE_COVERAGE" = "yes"; then
if test "$CFG_WITH_DEV_GCOV" = "yes"; then
_MY_CXX_CHECK_OPT(CXX,--coverage)
# Otherwise inline may not show as uncovered
# If we use this then e.g. verilated.h functions properly show up
@ -411,11 +412,19 @@ if test "$CFG_ENABLE_COVERAGE" = "yes"; then
# _MY_CXX_CHECK_OPT(CXX,-fkeep-inline-functions)
# Otherwise static may not show as uncovered
_MY_CXX_CHECK_OPT(CXX,-fkeep-static-functions)
# Exceptions can pollute the branch coverage data
_MY_CXX_CHECK_OPT(CXX,-fno-exceptions)
# Define-out some impossible stuff
# Similarly for inline functions. - This is too slow. See Makefile_obj instead.
#_MY_CXX_CHECK_OPT(CXX,-fkeep-inline-functions)
# Make sure profiling is thread-safe
_MY_CXX_CHECK_OPT(CXX,-fprofile-update=atomic)
# Ensure data files can be written from parallel runs
_MY_CXX_CHECK_OPT(CXX,-fprofile-reproducible=parallel-runs)
# Save source files as absolute paths in gcno files
_MY_CXX_CHECK_OPT(CXX,-fprofile-abs-path)
# Define so compiled code can know
_MY_CXX_CHECK_OPT(CXX,-DVL_GCOV)
AC_DEFINE([HAVE_DEV_GCOV],[1],[Defined if compiled with code coverage collection for gcov])]
fi
AC_SUBST(HAVE_DEV_GCOV)
# Compiler flags to enable profiling
_MY_CXX_CHECK_OPT(CFG_CXXFLAGS_PROFILE,-pg)
@ -476,16 +485,22 @@ _MY_CXX_CHECK_OPT(CFG_CXXFLAGS_PARSER,-Wno-unused)
AC_SUBST(CFG_CXXFLAGS_PARSER)
# Flags for compiling the debug version of Verilator (in addition to above CFG_CXXFLAGS_SRC)
if test "$CFG_ENABLE_COVERAGE" = "no"; then # Do not optimize for the coverage build
_MY_CXX_CHECK_OPT(CFG_CXXFLAGS_DEBUG,-Og)
if test "$CFG_WITH_DEV_GCOV" = "no"; then # Do not optimize for the coverage build
_MY_CXX_CHECK_OPT(CFG_CXXFLAGS_DBG,-Og)
fi
_MY_CXX_CHECK_OPT(CFG_CXXFLAGS_DEBUG,-ggdb)
_MY_CXX_CHECK_OPT(CFG_CXXFLAGS_DEBUG,-gz)
AC_SUBST(CFG_CXXFLAGS_DEBUG)
_MY_CXX_CHECK_OPT(CFG_CXXFLAGS_DBG,-ggdb)
_MY_CXX_CHECK_OPT(CFG_CXXFLAGS_DBG,-gz)
AC_SUBST(CFG_CXXFLAGS_DBG)
# Flags for linking the debug version of Verilator (in addition to above CFG_LDFLAGS_SRC)
_MY_LDLIBS_CHECK_OPT(CFG_LDFLAGS_DEBUG,-gz)
AC_SUBST(CFG_LDFLAGS_DEBUG)
_MY_LDLIBS_CHECK_OPT(CFG_LDFLAGS_DBG,-gz)
AC_SUBST(CFG_LDFLAGS_DBG)
# Flags for compiling the optimized version of Verilator (in addition to above CFG_CXXFLAGS_SRC)
if test "$CFG_WITH_DEV_GCOV" = "no"; then # Do not optimize for the coverage build
_MY_CXX_CHECK_OPT(CFG_CXXFLAGS_OPT,-O3)
fi
AC_SUBST(CFG_CXXFLAGS_OPT)
# Flags for Verilated makefile
# For example, -Wno-div-by-zero isn't in 4.1.2

View File

@ -780,7 +780,8 @@ Summary:
be useful in makefiles. See also :vlopt:`-V`, and the various
:file:`*.mk` files.
Feature may be one of the following: COROUTINES, DEV_ASAN, SYSTEMC.
Feature may be one of the following: COROUTINES, DEV_ASAN, DEV_GCOV,
SYSTEMC.
.. option:: --getenv <variable>

View File

@ -1656,6 +1656,45 @@ significant variance. Experience shows that a ~20% time difference can be
reliably measured on GitHub hosted runners, and smaller differences are
noticeable over a few days of reruns as trends emerge from the noise.
Code coverage
-------------
Code coverage for developing Verilator itself can be collected using ``gcc``
and ``gcov`` with the following flow. Note that configuring with
``--enable-dev-gcov`` disables optimization for both the debug and optimized
builds of Verilator, so running the resulting executables can be slow:
.. code:: shell
./configure --enable-longtests --enable-dev-gcov
make -j$(nproc) # Build verilator
make test # Run the tests - this will generate .gcda files
make coverage-view # Create and open HTML coverate reprot
The ``coverage-view`` make target opens the generated coverage report. This
depends on ``coverage-report``, which generates the HTML coverage report. That
turn depends on ``coverage-combine``, which combines all ``.gcda`` files into
an lcov data file. Each of these make targets can be used separately if
desired.
You can also use the ``coverage-zero`` target to remove all ``.gcda`` files,
which in effect clears all collected coverage data. This can be useful when
checking individual test cases while modifying them.
To collect coverage only for select tests, instead of ``make test``, run the
individual ``test_regress/t/*.py`` cases, then use ``make coverage-view`` as
before.
Note that every time binary built with coverage collection is invoked, it will
add the coverage data collected during that invocation to the existing
``.gcda`` files. This means that ``verilator`` can also be run on any other
external files or projects to collect code coverage on those invocations.
Be aware if changing a test, if that test no longer covers some item, the
report will still contain the old coverage. Use ``make coverage-zero`` and
rerun all tests if this is a concern.
Fuzzing
-------

View File

@ -21,6 +21,7 @@ PYTHON3 = @PYTHON3@
# Configuration time options
CFG_WITH_CCWARN = @CFG_WITH_CCWARN@
CFG_WITH_DEV_GCOV = @CFG_WITH_DEV_GCOV@
CFG_WITH_LONGTESTS = @CFG_WITH_LONGTESTS@
# Compiler version found during configure. This make variable is not used
@ -144,6 +145,13 @@ OPT_FAST = -Os
# to change this as the library is small, but can have significant speed impact.
OPT_GLOBAL = -Os
# Disable optimization when collecing code coverage for Verilator itself
ifeq ($(CFG_WITH_DEV_GCOV),yes)
OPT_SLOW = -O0
OPT_FAST = -O0
OPT_GLOBAL = -O0
endif
#######################################################################
##### Profile builds

View File

@ -1,405 +0,0 @@
#!/usr/bin/env python3
# pylint: disable=C0103,C0114,C0115,C0116,C0209,R0912,R0914,R0915,W0125,W0621,exec-used
######################################################################
import argparse
import glob
import multiprocessing
import os
import re
import subprocess
import sys
RealPath = os.path.dirname(os.path.realpath(__file__))
Exclude_Branch_Regexps = []
Exclude_Line_Regexps = []
Remove_Gcda_Regexps = []
Remove_Sources = []
Source_Globs = []
if 'VERILATOR_ROOT' not in os.environ:
os.environ['VERILATOR_ROOT'] = os.getcwd()
######################################################################
def test():
if not os.path.exists("nodist/code_coverage.dat"):
sys.exit("%Error: Run code_coverage from the top of the verilator kit")
exec(open("./nodist/code_coverage.dat", "r", encoding="utf8").read()) # pylint: disable=consider-using-with
if Args.stage_enabled[0]:
ci_fold_start("distclean")
print("Stage 0: distclean")
run("make distclean || true")
ci_fold_end()
if Args.stage_enabled[1]:
ci_fold_start("configure")
print("Stage 1: configure (coverage on)")
run("autoconf")
run("./configure --enable-longtests --enable-coverage CXX=g++")
ci_fold_end()
if Args.stage_enabled[2]:
ci_fold_start("build")
print("Stage 2: build")
nproc = multiprocessing.cpu_count()
run("make -k -j " + str(nproc) + " VERILATOR_NO_OPT_BUILD=1")
# The optimized versions will not collect good coverage, overwrite them
run("cp bin/verilator_bin_dbg bin/verilator_bin")
run("cp bin/verilator_coverage_bin_dbg bin/verilator_coverage_bin")
ci_fold_end()
if Args.stage_enabled[3]:
ci_fold_start("test")
print("Stage 3: make tests (with coverage on)")
if not Args.tests:
if not Args.scenarios or re.match('dist', Args.scenarios):
run("make examples VERILATOR_NO_OPT_BUILD=1")
run("make test_regress VERILATOR_NO_OPT_BUILD=1" +
(" SCENARIOS='" + Args.scenarios + "'" if Args.scenarios else "") +
(" DRIVER_HASHSET='--hashset=" + Args.hashset + "'" if Args.hashset else "") +
('' if Args.stop else ' || true'))
else:
for test in Args.tests:
if not os.path.exists(test) and os.path.exists("test_regress/t/" + test):
test = "test_regress/t/" + test
run(test)
ci_fold_end()
cc_dir = "nodist/obj_dir/coverage"
if Args.stage_enabled[4]:
ci_fold_start("gcno")
print("Stage 4: Create gcno files under " + cc_dir)
os.makedirs(cc_dir, exist_ok=True)
os.makedirs(cc_dir + "/info", exist_ok=True)
with subprocess.Popen("find . -print | grep .gcda", shell=True,
stdout=subprocess.PIPE) as sp:
datout = sp.stdout.read()
dats = {}
for dat in datout.splitlines():
dat = dat.decode('utf-8')
dats[dat] = 1
for dat in sorted(dats.keys()):
gcno = re.sub(r'\.gcda$', '.gcno', dat)
for regexp in Remove_Gcda_Regexps:
if re.search(regexp, dat):
# Remove .gcda/.gcno for files we don't care about before we slowly
# read them
unlink_ok(dat)
unlink_ok(gcno)
del dats[dat]
break
with subprocess.Popen("find . -print | grep .gcno", shell=True,
stdout=subprocess.PIPE) as sp:
datout = sp.stdout.read()
gcnos = {}
for gcno in datout.splitlines():
gcno = gcno.decode('utf-8')
gbase = re.sub(r'.*/', '', gcno, count=1)
gcnos[gbase] = os.path.abspath(gcno)
# We need a matching .gcno for every .gcda, try to find a matching file elsewhere
for dat in sorted(dats):
gcno = re.sub(r'\.gcda$', '.gcno', dat)
gbase = re.sub(r'.*/', '', gcno, count=1)
if not os.path.exists(gcno):
if gbase in gcnos:
os.symlink(gcnos[gbase], gcno)
else:
print("MISSING .gcno for a .gcda: " + gcno, file=sys.stderr)
ci_fold_end()
if Args.stage_enabled[5]:
ci_fold_start("fastcov")
# Must run in root directory to find all files
os.makedirs(cc_dir, exist_ok=True)
run(RealPath + "/fastcov.py -b -c src/obj_dbg -X --lcov" +
# " --exclude /usr --exclude test_regress" + " -o " + cc_dir +
" -o " + cc_dir + "/app_total.info")
# For debug to convert single .gcna/.gcno in a directory to cov.info:
# lcov -c -d . -o cov.info
ci_fold_end()
if Args.stage_enabled[6]:
ci_fold_start("clone")
# No control file to override single lines, so replicate the sources
# Also lets us see the insertion markers in the HTML source res
print("Stage 6: Clone sources under " + cc_dir)
clone_sources(cc_dir)
ci_fold_end()
if Args.stage_enabled[11]:
ci_fold_start("dirs")
print("Stage 11: Cleanup paths")
cleanup_abs_paths_info(cc_dir, cc_dir + "/app_total.info", cc_dir + "/app_total.info")
ci_fold_end()
if Args.stage_enabled[12]:
ci_fold_start("filter")
print("Stage 12: Filter processed source files")
inc = ''
for globf in Source_Globs:
for infile in glob.glob(globf):
inc += " '" + infile + "'"
exc = ''
for globf in Remove_Sources:
# Fastcov does exact match not globbing at present
# Lcov requires whole path match so needs the glob
globf = re.sub(r'^\*', '', globf)
globf = re.sub(r'\*$', '', globf)
exc += " '" + globf + "'"
if inc != '':
inc = "--include " + inc
if exc != '':
exc = "--exclude " + exc
run("cd " + cc_dir + " ; " + RealPath + "/fastcov.py -C app_total.info " + inc + " " +
exc + " -x --lcov -o app_total_f.info")
ci_fold_end()
if Args.stage_enabled[17]:
ci_fold_start("report")
print("Stage 17: Create HTML")
run("cd " + cc_dir + " ; genhtml app_total_f.info --demangle-cpp" +
" --rc lcov_branch_coverage=1 --rc genhtml_hi_limit=100 --output-directory html")
ci_fold_end()
if Args.stage_enabled[18]:
ci_fold_start("upload")
print("Stage 18: Upload")
# curl -Os https://cli.codecov.io/latest/linux/codecov ; sudo chmod +x codecov
# --disable-search does not seem to work
# -d with false directory does not seem to work
# So, remove gcno files before calling codecov
upload_dir = "nodist/obj_dir/upload"
os.makedirs(upload_dir, exist_ok=True)
cmd = "ci/codecov -v upload-process -Z" + " -f " + cc_dir + "/app_total.info )"
print("print: Not running:")
print(" export CODECOV_TOKEN=<hidden>")
print(" find . -name '*.gcno' -exec rm {} \\;")
print(" " + cmd)
ci_fold_end()
if Args.stage_enabled[19]:
print("*-* All Finished *-*")
print("")
print("* See report in " + cc_dir + "/html/index.html")
print("* Remember to make distclean && ./configure before working on non-coverage")
def clone_sources(cc_dir):
excluded_lines = 0
excluded_br_lines = 0
for globf in Source_Globs:
for infile in glob.glob(globf):
if re.match(r'^/', infile):
sys.exit("%Error: source globs should be relative not absolute filenames, " +
infile)
outfile = cc_dir + "/" + infile
outpath = re.sub(r'/[^/]*$', '', outfile, count=1)
os.makedirs(outpath, exist_ok=True)
with open(infile, "r", encoding="utf8") as fh:
with open(outfile, "w", encoding="utf8") as ofh:
lineno = 0
for line in fh:
lineno += 1
line = line.rstrip()
done = False
if re.search(r'LCOV_EXCL_LINE', line):
line += " LCOV_EXCL_BR_LINE"
done = True
elif re.search(r'LCOV_EXCL_START', line):
line += " LCOV_EXCL_BR_START"
done = True
elif re.search(r'LCOV_EXCL_STOP', line):
line += " LCOV_EXCL_BR_STOP"
done = True
for regexp in Exclude_Line_Regexps:
if done:
break
if re.search(regexp, line):
# print("%s:%d: %s" % (infile, lineno, line)
line += " //code_coverage: // LCOV_EXCL_LINE LCOV_EXCL_BR_LINE"
excluded_lines += 1
excluded_br_lines += 1
done = True
for regexp in Exclude_Branch_Regexps:
if done:
break
if re.search(regexp, line):
# print("%s:%d: %s" % (infile, lineno, line)
line += " //code_coverage: // LCOV_EXCL_BR_LINE"
excluded_br_lines += 1
done = True
ofh.write(line + "\n")
print("Number of source lines automatically LCOV_EXCL_LINE'ed: %d" % excluded_lines)
print("Number of source lines automatically LCOV_EXCL_BR_LINE'ed: %d" % excluded_br_lines)
def cleanup_abs_paths_info(cc_dir, infile, outfile):
lines = []
with open(infile, "r", encoding="utf8") as fh:
for line in fh:
if re.search(r'^SF:', line) and not re.search(r'^SF:/usr/', line):
line = re.sub(os.environ['VERILATOR_ROOT'] + '/', '', line, count=1)
line = re.sub(cc_dir + '/', '', line, count=1)
line = re.sub(r'^SF:.*?/include/', 'SF:include/', line, count=1)
line = re.sub(r'^SF:.*?/src/', 'SF:src/', line, count=1)
line = re.sub(r'^SF:.*?/test_regress/', 'SF:test_regress/', line, count=1)
line = re.sub(r'obj_dbg/verilog.y$', 'verilog.y', line)
# print("Remaining SF: "+line)
lines.append(line)
with open(outfile, "w", encoding="utf8") as ofh:
for line in lines:
ofh.write(line)
def cleanup_abs_paths_json(cc_dir, infile, outfile):
# Handcrafted cleanup, alternative would be to deserialize/serialize JSON,
# but this is much faster
lines = []
with open(infile, "r", encoding="utf8") as fh:
for line in fh:
line = re.sub('"' + os.environ['VERILATOR_ROOT'] + '/', '"', line)
line = re.sub('"' + cc_dir + '/', '"', line)
line = re.sub(r'obj_dbg/verilog.y$', 'verilog.y', line)
lines.append(line)
with open(outfile, "w", encoding="utf8") as ofh:
for line in lines:
ofh.write(line)
######################################################################
# .dat file callbacks
def exclude_branch_regexp(*regexps):
Exclude_Branch_Regexps.extend(regexps)
def exclude_line_regexp(*regexps):
Exclude_Line_Regexps.extend(regexps)
def remove_gcda_regexp(*regexps):
Remove_Gcda_Regexps.extend(regexps)
def remove_source(*sources):
Remove_Sources.extend(sources)
def source_globs(*dirs):
Source_Globs.extend(dirs)
#######################################################################
def run(command):
# run a system command, check errors
print("\t%s" % command)
status = subprocess.call(command, shell=True)
if status < 0:
raise RuntimeError("%Error: Command failed " + command + ", stopped")
def unlink_ok(filename):
try:
os.unlink(filename)
except OSError:
pass
def ci_fold_start(action):
print("::group::" + action, flush=True)
def ci_fold_end():
print("::endgroup::\n", flush=True)
#######################################################################
#######################################################################
parser = argparse.ArgumentParser(
allow_abbrev=False,
formatter_class=argparse.RawDescriptionHelpFormatter,
description="""code_coverage builds Verilator with C++ coverage support and runs
tests with coverage enabled. This will rebuild the current object
files. Run as:
cd $VERILATOR_ROOT
nodist/code_coverage""",
epilog="""Copyright 2019-2025 by Wilson Snyder. This program is free software; you
can redistribute it and/or modify it under the terms of either the GNU
Lesser General Public License Version 3 or the Perl Artistic License
Version 2.0.
SPDX-License-Identifier: LGPL-3.0-only OR Artistic-2.0""")
parser.add_argument('--debug', action='store_true', help='enable debug')
parser.add_argument('--hashset',
action='store',
help='pass test hashset onto driver.py test harness')
parser.add_argument('--scenarios',
action='store',
help='pass test scenarios onto driver.py test harness')
parser.add_argument('--stages',
'--stage',
action='store',
help='runs a specific stage or range of stages (see the script)')
parser.add_argument(
'--tests',
'--test',
action='append',
default=[],
help='Instead of normal regressions, run the specified test(s), may be used multiple times')
parser.add_argument('--no-stop',
dest='stop',
action='store_false',
help='do not stop collecting data if tests fail')
parser.set_defaults(stop=True)
Args = parser.parse_args()
if True:
start = 0
end = 99
Args.stage_enabled = {}
if Args.stages:
match_one = re.match(r'^(\d+)$', Args.stages)
match_range = re.match(r'^(\d+)-(\d+)$', Args.stages)
match_to = re.match(r'^-(\d+)$', Args.stages)
match_from = re.match(r'^(\d+)-$', Args.stages)
if match_one:
start = end = int(match_one.group(1))
elif match_range:
start = int(match_range.group(1))
end = int(match_range.group(2))
elif match_to:
end = int(match_to.group(1))
elif match_from:
start = int(match_from.group(1))
else:
sys.exit("%Error: --stages not understood: " + Args.stages)
for n in range(0, 100):
Args.stage_enabled[n] = False
for n in range(start, end + 1):
Args.stage_enabled[n] = True
test()
######################################################################
# Local Variables:
# compile-command: "cd .. ; nodist/code_coverage "
# End:

View File

@ -1,61 +0,0 @@
# -*- Python -*-
# DESCRIPTION: Verilator: Internal C++ code lcov control file
#
# Copyright 2019-2025 by Wilson Snyder. This program is free software; you
# can redistribute it and/or modify it under the terms of either the GNU
# Lesser General Public License Version 3 or the Perl Artistic License
# Version 2.0.
# SPDX-License-Identifier: LGPL-3.0-only OR Artistic-2.0
source_globs("src/*.cpp")
source_globs("src/*.h")
source_globs("src/*.l")
source_globs("src/*.y")
source_globs("src/obj_dbg/*.h")
source_globs("src/obj_dbg/*.cpp")
source_globs("include/*.c")
source_globs("include/*.cpp")
source_globs("include/*.h")
source_globs("include/*/*.h")
source_globs("include/*/*.cpp")
source_globs("include/*/*.c")
# Note *'s are removed when using fastcov
remove_source("/usr/*")
remove_source("*/include/sysc/*")
remove_source("*/V3Lexer_pregen.yy.cpp")
remove_source("*/V3PreLex_pregen.yy.cpp")
remove_source("*/verilog.c")
remove_source("*include/gtkwave/*")
# Something wrong in generation, unfortunately as would like this
#genhtml: ERROR: cannot read /svaha/wsnyder/SandBox/homecvs/v4/verilator/src/obj_dbg/verilog.y
#remove_source("*/src/obj_dbg/verilog.y")
remove_source("*test_regress/*")
remove_source("*examples/*")
# Remove collected coverage on each little test main file
# Would just be removed with remove_source in later step
remove_gcda_regexp(r'test_regress/.*/(Vt_|Vtop_).*\.gcda')
# Exclude line entirely, also excludes from function and branch coverage
exclude_line_regexp(r'\bv3fatalSrc\b')
exclude_line_regexp(r'\bfatalSrc\b')
exclude_line_regexp(r'\bVL_DELETED\b')
exclude_line_regexp(r'\bVL_UNCOVERABLE\b')
exclude_line_regexp(r'\bVL_UNREACHABLE\b')
exclude_line_regexp(r'\bVL_FATAL')
exclude_line_regexp(r'\bUASSERT')
exclude_line_regexp(r'\bNUM_ASSERT')
exclude_line_regexp(r'\bERROR_RSVD_WORD')
exclude_line_regexp(r'\bV3ERROR_NA')
exclude_line_regexp(r'\bUINFO\b')
exclude_line_regexp(r'\bVL_DEFINE_DEBUG_FUNCTIONS\b')
# Exclude for branch coverage only
exclude_branch_regexp(r'\bdebug\(\)')
exclude_branch_regexp(r'\bassert\(')
exclude_branch_regexp(r'\bBROKEN_BASE_RTN\(')
exclude_branch_regexp(r'\bBROKEN_RTN\(')
exclude_branch_regexp(r'\bSELF_CHECK')
True

View File

@ -32,7 +32,7 @@ import subprocess
import multiprocessing
from pathlib import Path
FASTCOV_VERSION = (1,15)
FASTCOV_VERSION = (1,17)
MINIMUM_PYTHON = (3,5)
MINIMUM_GCOV = (9,0,0)
@ -56,6 +56,7 @@ EXIT_CODES = {
"excl_not_found": 6,
"bad_chunk_file": 7,
"missing_json_key": 8,
"no_coverage_files": 9,
}
# Disable all logging in case developers are using this as a module
@ -508,13 +509,6 @@ def containsMarker(markers, strBody):
def exclProcessSource(fastcov_sources, source, exclude_branches_sw, include_branches_sw, exclude_line_marker, fallback_encodings, gcov_prefix, gcov_prefix_strip):
source_to_open = processPrefix(source, gcov_prefix, gcov_prefix_strip)
# Before doing any work, check if this file even needs to be processed
if not exclude_branches_sw and not include_branches_sw:
# Ignore unencodable characters
with open(source_to_open, errors="ignore") as f:
if not containsMarker(exclude_line_marker + ["LCOV_EXCL"], f.read()):
return False
# If we've made it this far we have to check every line
start_line = 0
@ -532,8 +526,10 @@ def exclProcessSource(fastcov_sources, source, exclude_branches_sw, include_bran
if del_exclude_br or del_include_br:
del fastcov_data["branches"][i]
lineIsClosingBrace = line.strip() == "}"
# Skip to next line as soon as possible
if not containsMarker(exclude_line_marker + ["LCOV_EXCL"], line):
if not containsMarker(exclude_line_marker + ["LCOV_EXCL"], line) and not lineIsClosingBrace:
continue
# Build line to function dict so can quickly delete by line number
@ -544,7 +540,7 @@ def exclProcessSource(fastcov_sources, source, exclude_branches_sw, include_bran
line_to_func[l] = set()
line_to_func[l].add(f)
if any(marker in line for marker in exclude_line_marker):
if lineIsClosingBrace or any(marker in line for marker in exclude_line_marker):
for key in ["lines", "branches"]:
if i in fastcov_data[key]:
del fastcov_data[key][i]
@ -792,6 +788,7 @@ def parseInfo(path):
}
with open(path) as f:
current_test_name = ""
for line in f:
if line.startswith("TN:"):
current_test_name = line[3:].strip()
@ -806,9 +803,10 @@ def parseInfo(path):
})
current_data = fastcov_json["sources"][current_sf][current_test_name]
elif line.startswith("FN:"):
line_num, function_name = line[3:].strip().split(",")
line_nums, function_name = line[3:].strip().rsplit(",", maxsplit=1)
line_num_start = line_nums.split(",")[0]
current_data["functions"][function_name] = {}
current_data["functions"][function_name]["start_line"] = tryParseNumber(line_num)
current_data["functions"][function_name]["start_line"] = tryParseNumber(line_num_start)
elif line.startswith("FNDA:"):
count, function_name = line[5:].strip().split(",")
current_data["functions"][function_name]["execution_count"] = tryParseNumber(count)
@ -884,6 +882,11 @@ def getGcovCoverage(args):
logging.info("Removed {} .gcda files".format(len(coverage_files)))
sys.exit()
if not coverage_files:
logging.error("No coverage files found in directory '%s'", args.directory)
setExitCode("no_coverage_files")
sys.exit(EXIT_CODE)
# Fire up one gcov per cpu and start processing gcdas
gcov_filter_options = getGcovFilterOptions(args)
fastcov_json = processGcdas(args, coverage_files, gcov_filter_options)

View File

@ -56,6 +56,7 @@ pkgdatadir = @pkgdatadir@
# Compile options
CFG_WITH_CCWARN = @CFG_WITH_CCWARN@
CFG_WITH_DEFENV = @CFG_WITH_DEFENV@
CFG_WITH_DEV_GCOV = @CFG_WITH_DEV_GCOV@
CFG_WITH_SOLVER = @CFG_WITH_SOLVER@
CPPFLAGS += @CPPFLAGS@
CFLAGS += @CFLAGS@
@ -84,11 +85,11 @@ ifeq ($(VL_NOOPT),1)
CPPFLAGS += -O0
else ifeq ($(VL_DEBUG),)
# Optimize
CPPFLAGS += -O3
CPPFLAGS += @CFG_CXXFLAGS_OPT@
else
# Debug
CPPFLAGS += @CFG_CXXFLAGS_DEBUG@ -DVL_DEBUG -D_GLIBCXX_DEBUG
LDFLAGS += @CFG_LDFLAGS_DEBUG@
CPPFLAGS += @CFG_CXXFLAGS_DBG@ -DVL_DEBUG -D_GLIBCXX_DEBUG
LDFLAGS += @CFG_LDFLAGS_DBG@
endif
#################
@ -135,6 +136,17 @@ ifeq ($(CFG_WITH_DEFENV),yes)
endif
endif
# Object file specific options
CXXFLAGSOBJ =
ifeq ($(CFG_WITH_DEV_GCOV),yes)
# Adding '-fkeep-inline-functions' to all files slows down coverage collection
# during 'make test' by ~3x. This option is only useful to make sure that
# 'inline' functions defined in headers are actually emitted so they show up as
# uncovered if they are never called, so instead of adding it everywhere, use
# only on a few files that together pull in most of the headers.
Verilator.o V3Dfg.o: CXXFLAGSOBJ += -fkeep-inline-functions
endif
HEADERS = $(wildcard V*.h v*.h)
ASTGEN = $(srcdir)/astgen
@ -381,27 +393,27 @@ $(TGT): $(PREDEP_H) $(OBJS)
.SECONDARY:
%.gch: %
$(OBJCACHE) ${CXX} ${CXXFLAGS} ${CPPFLAGSWALL} ${CFG_CXXFLAGS_PCH} -c $< -o $@
$(OBJCACHE) ${CXX} ${CXXFLAGS} ${CXXFLAGSOBJ} ${CPPFLAGSWALL} ${CFG_CXXFLAGS_PCH} -c $< -o $@
%.o: %.cpp
$(OBJCACHE) ${CXX} ${CXXFLAGS} ${CPPFLAGSWALL} -c $< -o $@
$(OBJCACHE) ${CXX} ${CXXFLAGS} ${CXXFLAGSOBJ} ${CPPFLAGSWALL} -c $< -o $@
%.o: %.c
$(OBJCACHE) ${CC} ${CFLAGS} ${CPPFLAGSWALL} -c $< -o $@
V3ParseLex.o: V3ParseLex.cpp V3Lexer.yy.cpp V3ParseBison.c
$(OBJCACHE) ${CXX} ${CXXFLAGS} ${CPPFLAGSPARSER} -c $< -o $@
$(OBJCACHE) ${CXX} ${CXXFLAGS} ${CXXFLAGSOBJ} ${CPPFLAGSPARSER} -c $< -o $@
V3ParseGrammar.o: V3ParseGrammar.cpp V3ParseBison.c
$(OBJCACHE) ${CXX} ${CXXFLAGS} ${CPPFLAGSPARSER} -c $< -o $@
$(OBJCACHE) ${CXX} ${CXXFLAGS} ${CXXFLAGSOBJ} ${CPPFLAGSPARSER} -c $< -o $@
V3ParseImp.o: V3ParseImp.cpp V3ParseBison.c
$(OBJCACHE) ${CXX} ${CXXFLAGS} ${CPPFLAGSPARSER} -c $< -o $@
$(OBJCACHE) ${CXX} ${CXXFLAGS} ${CXXFLAGSOBJ} ${CPPFLAGSPARSER} -c $< -o $@
V3PreProc.o: V3PreProc.cpp V3PreLex.yy.cpp
$(OBJCACHE) ${CXX} ${CXXFLAGS} ${CPPFLAGSPARSER} -c $< -o $@
$(OBJCACHE) ${CXX} ${CXXFLAGS} ${CXXFLAGSOBJ} ${CPPFLAGSPARSER} -c $< -o $@
define CXX_ASTMT_template
$(1): $(basename $(1)).cpp V3PchAstMT.h.gch
$(OBJCACHE) ${CXX} ${CXXFLAGS} ${CPPFLAGSWALL} ${CFG_CXXFLAGS_PCH_I} V3PchAstMT.h${CFG_GCH_IF_CLANG} -c $(srcdir)/$(basename $(1)).cpp -o $(1)
$(OBJCACHE) ${CXX} ${CXXFLAGS} ${CXXFLAGSOBJ} ${CPPFLAGSWALL} ${CFG_CXXFLAGS_PCH_I} V3PchAstMT.h${CFG_GCH_IF_CLANG} -c $(srcdir)/$(basename $(1)).cpp -o $(1)
endef
@ -409,7 +421,7 @@ $(foreach obj,$(RAW_OBJS_PCH_ASTMT),$(eval $(call CXX_ASTMT_template,$(obj))))
define CXX_ASTNOMT_template
$(1): $(basename $(1)).cpp V3PchAstNoMT.h.gch
$(OBJCACHE) ${CXX} ${CXXFLAGS} ${CPPFLAGSWALL} ${CFG_CXXFLAGS_PCH_I} V3PchAstNoMT.h${CFG_GCH_IF_CLANG} -c $(srcdir)/$(basename $(1)).cpp -o $(1)
$(OBJCACHE) ${CXX} ${CXXFLAGS} ${CXXFLAGSOBJ} ${CPPFLAGSWALL} ${CFG_CXXFLAGS_PCH_I} V3PchAstNoMT.h${CFG_GCH_IF_CLANG} -c $(srcdir)/$(basename $(1)).cpp -o $(1)
endef

View File

@ -371,10 +371,10 @@ void V3Error::abortIfWarnings() {
// Abort/exit
void V3Error::vlAbort() {
VL_GCOV_DUMP();
#ifndef V3ERROR_NO_GLOBAL_
v3Global.shutdown();
#endif
VL_GCOV_DUMP();
std::abort();
}
std::ostringstream& V3Error::v3errorPrep(V3ErrorCode code) VL_ACQUIRE(s().m_mutex) {

View File

@ -843,6 +843,8 @@ string V3Options::getSupported(const string& var) {
return "1";
} else if (var == "DEV_ASAN" && devAsan()) {
return "1";
} else if (var == "DEV_GCOV" && devGcov()) {
return "1";
// cppcheck-suppress knownConditionTrueFalse
} else if (var == "SYSTEMC" && systemCFound()) {
return "1";
@ -880,6 +882,14 @@ bool V3Options::devAsan() {
#endif
}
bool V3Options::devGcov() {
#ifdef HAVE_DEV_GCOV
return true;
#else
return false;
#endif
}
//######################################################################
// V3 Options notification methods

View File

@ -785,6 +785,7 @@ public:
static bool systemCFound(); // SystemC installed, or environment points to it
static bool coroutineSupport(); // Compiler supports coroutines
static bool devAsan(); // Compiler built with AddressSanitizer
static bool devGcov(); // Compiler built with code coverage for gcov
// METHODS (file utilities using these options)
string fileExists(const string& filename);

View File

@ -47,6 +47,9 @@ PACKAGE_VERSION_STRING_CHAR
// Define if compiled with AddressSanitizer
#undef HAVE_DEV_ASAN
// Define if compiled with code coverage collection
#undef HAVE_DEV_GCOV
//**********************************************************************
//**** This file sometimes gets truncated, so check in consumers
#define HAVE_CONFIG_PACKAGE

View File

@ -128,6 +128,7 @@ class Capabilities:
_cached_cxx_version = None
_cached_have_coroutines = None
_cached_have_dev_asan = None
_cached_have_dev_gcov = None
_cached_have_gdb = None
_cached_have_sc = None
_cached_have_solver = None
@ -168,6 +169,13 @@ class Capabilities:
Capabilities._verilator_get_supported('DEV_ASAN'))
return Capabilities._cached_have_dev_asan
@staticproperty
def have_dev_gcov() -> bool: # pylint: disable=no-method-argument
if Capabilities._cached_have_dev_gcov is None:
Capabilities._cached_have_dev_gcov = bool(
Capabilities._verilator_get_supported('DEV_GCOV'))
return Capabilities._cached_have_dev_gcov
@staticproperty
def have_gdb() -> bool: # pylint: disable=no-method-argument
if Capabilities._cached_have_gdb is None:
@ -214,6 +222,7 @@ class Capabilities:
def warmup_cache() -> None:
_ignore = Capabilities.have_coroutines
_ignore = Capabilities.have_dev_asan
_ignore = Capabilities.have_dev_gcov
_ignore = Capabilities.have_gdb
_ignore = Capabilities.have_sc
_ignore = Capabilities.have_solver
@ -1669,6 +1678,10 @@ class VlTest:
def have_dev_asan(self) -> bool:
return Capabilities.have_dev_asan
@property
def have_dev_gcov(self) -> bool:
return Capabilities.have_dev_gcov
@property
def have_gdb(self) -> bool:
return Capabilities.have_gdb
@ -1744,19 +1757,6 @@ class VlTest:
if Args.benchmark and re.match(r'^cd ', command):
command = "time " + command
if verilator_run:
# Gcov fails when parallel jobs write same data file,
# so we make sure .gcda output dir is unique across all running jobs.
# We can't just put each one in an unique obj_dir as it uses too much disk.
# Must use absolute path as some execute()s have different PWD
self.setenv('GCOV_PREFIX_STRIP', '99')
self.setenv('GCOV_PREFIX',
os.path.abspath(__file__ + "/../obj_dist/gcov_" + str(self.running_id)))
os.makedirs(os.environ['GCOV_PREFIX'], exist_ok=True)
else:
VtOs.delenv('GCOV_PREFIX_STRIP')
VtOs.delenv('GCOV_PREFIX')
print("\t" + command + ((" > " + logfile) if logfile else ""))
if entering:

View File

@ -10,6 +10,10 @@
import vltest_bootstrap
test.scenarios('vlt')
if test.have_dev_gcov:
test.skip("Code coverage build upsets ccache")
test.top_filename = "t_a1_first_cc.v"
if not test.cfg_with_ccache:

View File

@ -11,6 +11,9 @@ import vltest_bootstrap
test.scenarios('vlt')
if test.have_dev_gcov:
test.skip("Too slow with code coverage")
test.top_filename = f"{test.obj_dir}/in.v"
with open(test.top_filename, "w", encoding="utf8") as f:

View File

@ -27,6 +27,9 @@ def check_all_file():
def check_gcc_flags(filename):
# Coverage collection alters optimization flags
if test.have_dev_gcov:
return
with open(filename, 'r', encoding="utf8") as fh:
for line in fh:
line = line.rstrip()

View File

@ -36,6 +36,10 @@ def check_files():
continue
if re.search(r'^(.*\.(o|a)|Vprefix)$', filename):
continue
if re.search(r'\.gcda$', filename):
continue
if re.search(r'\.gcno$', filename):
continue
with open(path, 'r', encoding="utf8") as fh:
for line in fh:
line = re.sub(r'--prefix V?t_flag_prefix', '', line)

View File

@ -11,6 +11,9 @@ import vltest_bootstrap
test.scenarios('vlt_all')
if test.have_dev_gcov:
test.skip("Too slow with code coverage")
test.timeout(10)
test.compile(verilator_make_gmake=False)