mirror of https://github.com/YosysHQ/yosys.git
Compare commits
49 Commits
cdaf5a38c5
...
f2279d2c2a
| Author | SHA1 | Date |
|---|---|---|
|
|
f2279d2c2a | |
|
|
a2aeef6c96 | |
|
|
3d5b1e0a93 | |
|
|
5b2252ffd8 | |
|
|
0f770285f3 | |
|
|
45bb5c690d | |
|
|
f8341affe3 | |
|
|
e89c5914fe | |
|
|
12cb8e9511 | |
|
|
52c108cd6a | |
|
|
3d80e1663e | |
|
|
51c8193643 | |
|
|
3a54ed6916 | |
|
|
f170c0f346 | |
|
|
ad3ae52e9a | |
|
|
db76eebc0f | |
|
|
35e4d967c6 | |
|
|
17c1388303 | |
|
|
684bbf6a25 | |
|
|
bf7c79cc85 | |
|
|
cc5642c904 | |
|
|
a4bd40e199 | |
|
|
1f6ac5f392 | |
|
|
c597bf70b0 | |
|
|
0e2d24edd3 | |
|
|
2d778a94fa | |
|
|
b9156c4f7c | |
|
|
0751b74e7a | |
|
|
0f2e470b0f | |
|
|
336877a353 | |
|
|
39fab4a07f | |
|
|
1a80c26bae | |
|
|
2d7a191b01 | |
|
|
e9733d681d | |
|
|
5fa7feccd3 | |
|
|
d0a41d4f58 | |
|
|
797780eda5 | |
|
|
dc051e98be | |
|
|
a243e4e60f | |
|
|
88d101b462 | |
|
|
b2fe335b2d | |
|
|
d6b9158fa3 | |
|
|
25aafab86b | |
|
|
1eb5181700 | |
|
|
d1a628ab26 | |
|
|
79cd4e08c4 | |
|
|
5cfe6a9c1e | |
|
|
913ac04764 | |
|
|
5748b37b1c |
|
|
@ -1,14 +1,65 @@
|
|||
name: Build environment setup
|
||||
description: Configure build env for Yosys builds
|
||||
|
||||
inputs:
|
||||
runs-on:
|
||||
required: true
|
||||
type: string
|
||||
get-build-deps:
|
||||
description: 'Install Yosys build dependencies'
|
||||
default: false
|
||||
required: false
|
||||
type: boolean
|
||||
get-docs-deps:
|
||||
description: 'Install Yosys docs dependencies'
|
||||
default: false
|
||||
required: false
|
||||
type: boolean
|
||||
get-test-deps:
|
||||
description: 'Install Yosys test dependencies'
|
||||
default: false
|
||||
required: false
|
||||
type: boolean
|
||||
get-iverilog:
|
||||
description: 'Install iverilog'
|
||||
default: false
|
||||
required: false
|
||||
type: boolean
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Install Linux Dependencies
|
||||
# if updating common/build/docs dependencies, make sure to update README.md
|
||||
# and docs/source/getting_started/installation.rst to match.
|
||||
- name: Linux common dependencies
|
||||
if: runner.os == 'Linux'
|
||||
shell: bash
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install gperf build-essential bison flex libfl-dev libreadline-dev gawk tcl-dev libffi-dev git graphviz xdot pkg-config python3 libboost-system-dev libboost-python-dev libboost-filesystem-dev zlib1g-dev libbz2-dev libgtest-dev
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.6.0
|
||||
with:
|
||||
packages: gawk git make python3
|
||||
version: ${{ inputs.runs-on }}-commonys
|
||||
|
||||
- name: Linux build dependencies
|
||||
if: runner.os == 'Linux' && inputs.get-build-deps == 'true'
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.6.0
|
||||
with:
|
||||
packages: bison clang flex libffi-dev libfl-dev libreadline-dev pkg-config tcl-dev zlib1g-dev
|
||||
version: ${{ inputs.runs-on }}-buildys
|
||||
|
||||
- name: Linux docs dependencies
|
||||
if: runner.os == 'Linux' && inputs.get-docs-deps == 'true'
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.6.0
|
||||
with:
|
||||
packages: graphviz xdot
|
||||
version: ${{ inputs.runs-on }}-docsys
|
||||
|
||||
# if updating test dependencies, make sure to update
|
||||
# docs/source/yosys_internals/extending_yosys/test_suites.rst to match.
|
||||
- name: Linux test dependencies
|
||||
if: runner.os == 'Linux' && inputs.get-test-deps == 'true'
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.6.0
|
||||
with:
|
||||
packages: libgtest-dev
|
||||
version: ${{ inputs.runs-on }}-testys
|
||||
|
||||
- name: Install macOS Dependencies
|
||||
if: runner.os == 'macOS'
|
||||
|
|
@ -32,3 +83,9 @@ runs:
|
|||
echo "$(brew --prefix bison)/bin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix flex)/bin" >> $GITHUB_PATH
|
||||
echo "procs=$(sysctl -n hw.ncpu)" >> $GITHUB_ENV
|
||||
|
||||
- name: Setup iverilog
|
||||
if: inputs.get-iverilog == 'true'
|
||||
uses: ./.github/actions/setup-iverilog
|
||||
with:
|
||||
runs-on: ${{ inputs.runs-on }}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,70 @@
|
|||
name: iverilog setup
|
||||
description: Cached build and install of iverilog
|
||||
|
||||
inputs:
|
||||
runs-on:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: iverilog Linux deps
|
||||
if: steps.restore-iverilog.outputs.cache-hit != 'true' && runner.os == 'Linux'
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.6.0
|
||||
with:
|
||||
packages: autoconf gperf make gcc g++ bison flex libbz2-dev
|
||||
version: ${{ inputs.runs-on }}-iverilog
|
||||
|
||||
- name: iverilog macOS deps
|
||||
if: steps.restore-iverilog.outputs.cache-hit != 'true' && runner.os == 'macOS'
|
||||
shell: bash
|
||||
run: |
|
||||
brew install autoconf
|
||||
|
||||
- name: Get iverilog
|
||||
id: get-iverilog
|
||||
shell: bash
|
||||
run: |
|
||||
git clone https://github.com/steveicarus/iverilog.git
|
||||
cd iverilog
|
||||
echo "IVERILOG_GIT=$(git rev-parse HEAD)" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Get vcd2fst
|
||||
shell: bash
|
||||
run: |
|
||||
git clone https://github.com/mmicko/libwave.git
|
||||
mkdir -p ${{ github.workspace }}/.local/
|
||||
cd libwave
|
||||
cmake . -DCMAKE_INSTALL_PREFIX=${{ github.workspace }}/.local
|
||||
make -j$procs
|
||||
make install
|
||||
|
||||
- uses: actions/cache/restore@v4
|
||||
id: restore-iverilog
|
||||
with:
|
||||
path: .local/
|
||||
key: ${{ inputs.runs-on }}-${{ steps.get-iverilog.outputs.IVERILOG_GIT }}
|
||||
|
||||
- name: Build iverilog
|
||||
if: steps.restore-iverilog.outputs.cache-hit != 'true'
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir -p ${{ github.workspace }}/.local/
|
||||
cd iverilog
|
||||
autoconf
|
||||
CC=gcc CXX=g++ ./configure --prefix=${{ github.workspace }}/.local
|
||||
make -j$procs
|
||||
make install
|
||||
|
||||
- name: Check iverilog
|
||||
shell: bash
|
||||
run: |
|
||||
iverilog -V
|
||||
|
||||
- uses: actions/cache/save@v4
|
||||
id: save-iverilog
|
||||
if: steps.restore-iverilog.outputs.cache-hit != 'true'
|
||||
with:
|
||||
path: .local/
|
||||
key: ${{ steps.restore-iverilog.outputs.cache-primary-key }}
|
||||
|
|
@ -10,15 +10,18 @@ jobs:
|
|||
name: Analyze
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Install deps
|
||||
run: sudo apt-get install bison flex libfl-dev libreadline-dev tcl-dev libffi-dev
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: true
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup environment
|
||||
uses: ./.github/actions/setup-build-env
|
||||
with:
|
||||
runs-on: ubuntu-latest
|
||||
get-build-deps: true
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3
|
||||
with:
|
||||
|
|
|
|||
|
|
@ -73,8 +73,8 @@ jobs:
|
|||
persist-credentials: false
|
||||
- name: Build
|
||||
run: |
|
||||
WASI_SDK=wasi-sdk-19.0
|
||||
WASI_SDK_URL=https://github.com/WebAssembly/wasi-sdk/releases/download/wasi-sdk-19/wasi-sdk-19.0-linux.tar.gz
|
||||
WASI_SDK=wasi-sdk-27.0-x86_64-linux
|
||||
WASI_SDK_URL=https://github.com/WebAssembly/wasi-sdk/releases/download/wasi-sdk-27/wasi-sdk-27.0-x86_64-linux.tar.gz
|
||||
if ! [ -d ${WASI_SDK} ]; then curl -L ${WASI_SDK_URL} | tar xzf -; fi
|
||||
|
||||
FLEX_VER=2.6.4
|
||||
|
|
|
|||
|
|
@ -60,6 +60,9 @@ jobs:
|
|||
|
||||
- name: Setup environment
|
||||
uses: ./.github/actions/setup-build-env
|
||||
with:
|
||||
runs-on: ${{ matrix.os }}
|
||||
get-build-deps: true
|
||||
|
||||
- name: Build
|
||||
shell: bash
|
||||
|
|
@ -105,48 +108,10 @@ jobs:
|
|||
|
||||
- name: Setup environment
|
||||
uses: ./.github/actions/setup-build-env
|
||||
|
||||
- name: Get iverilog
|
||||
id: get-iverilog
|
||||
shell: bash
|
||||
run: |
|
||||
git clone https://github.com/steveicarus/iverilog.git
|
||||
cd iverilog
|
||||
echo "IVERILOG_GIT=$(git rev-parse HEAD)" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Get vcd2fst
|
||||
shell: bash
|
||||
run: |
|
||||
git clone https://github.com/mmicko/libwave.git
|
||||
mkdir -p ${{ github.workspace }}/.local/
|
||||
cd libwave
|
||||
cmake . -DCMAKE_INSTALL_PREFIX=${{ github.workspace }}/.local
|
||||
make -j$procs
|
||||
make install
|
||||
|
||||
- name: Cache iverilog
|
||||
id: cache-iverilog
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: .local/
|
||||
key: ${{ matrix.os }}-${{ steps.get-iverilog.outputs.IVERILOG_GIT }}
|
||||
|
||||
- name: iverilog macOS deps
|
||||
if: steps.cache-iverilog.outputs.cache-hit != 'true' && runner.os == 'macOS'
|
||||
shell: bash
|
||||
run: |
|
||||
brew install autoconf
|
||||
|
||||
- name: Build iverilog
|
||||
if: steps.cache-iverilog.outputs.cache-hit != 'true'
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir -p ${{ github.workspace }}/.local/
|
||||
cd iverilog
|
||||
autoconf
|
||||
CC=gcc CXX=g++ ./configure --prefix=${{ github.workspace }}/.local
|
||||
make -j$procs
|
||||
make install
|
||||
runs-on: ${{ matrix.os }}
|
||||
get-test-deps: true
|
||||
get-iverilog: true
|
||||
|
||||
- name: Download build artifact
|
||||
uses: actions/download-artifact@v4
|
||||
|
|
@ -191,6 +156,8 @@ jobs:
|
|||
|
||||
- name: Setup environment
|
||||
uses: ./.github/actions/setup-build-env
|
||||
with:
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
- name: Download build artifact
|
||||
uses: actions/download-artifact@v4
|
||||
|
|
@ -229,6 +196,10 @@ jobs:
|
|||
|
||||
- name: Setup environment
|
||||
uses: ./.github/actions/setup-build-env
|
||||
with:
|
||||
runs-on: ${{ matrix.os }}
|
||||
get-build-deps: true
|
||||
get-docs-deps: true
|
||||
|
||||
- name: Download build artifact
|
||||
uses: actions/download-artifact@v4
|
||||
|
|
|
|||
|
|
@ -45,7 +45,7 @@ jobs:
|
|||
- 'gcc-14'
|
||||
include:
|
||||
# macOS x86
|
||||
- os: macos-13
|
||||
- os: macos-15-intel
|
||||
compiler: 'clang-19'
|
||||
# macOS arm
|
||||
- os: macos-latest
|
||||
|
|
@ -60,6 +60,9 @@ jobs:
|
|||
|
||||
- name: Setup environment
|
||||
uses: ./.github/actions/setup-build-env
|
||||
with:
|
||||
runs-on: ${{ matrix.os }}
|
||||
get-build-deps: true
|
||||
|
||||
- name: Setup Cpp
|
||||
uses: aminya/setup-cpp@v1
|
||||
|
|
|
|||
|
|
@ -44,53 +44,11 @@ jobs:
|
|||
|
||||
- name: Setup environment
|
||||
uses: ./.github/actions/setup-build-env
|
||||
|
||||
- name: Get iverilog
|
||||
id: get-iverilog
|
||||
shell: bash
|
||||
run: |
|
||||
git clone https://github.com/steveicarus/iverilog.git
|
||||
cd iverilog
|
||||
echo "IVERILOG_GIT=$(git rev-parse HEAD)" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Get vcd2fst
|
||||
shell: bash
|
||||
run: |
|
||||
git clone https://github.com/mmicko/libwave.git
|
||||
mkdir -p ${{ github.workspace }}/.local/
|
||||
cd libwave
|
||||
cmake . -DCMAKE_INSTALL_PREFIX=${{ github.workspace }}/.local
|
||||
make -j$procs
|
||||
make install
|
||||
|
||||
- name: Cache iverilog
|
||||
id: cache-iverilog
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: .local/
|
||||
key: ${{ matrix.os }}-${{ steps.get-iverilog.outputs.IVERILOG_GIT }}
|
||||
|
||||
- name: iverilog macOS deps
|
||||
if: steps.cache-iverilog.outputs.cache-hit != 'true' && runner.os == 'macOS'
|
||||
shell: bash
|
||||
run: |
|
||||
brew install autoconf
|
||||
|
||||
- name: Build iverilog
|
||||
if: steps.cache-iverilog.outputs.cache-hit != 'true'
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir -p ${{ github.workspace }}/.local/
|
||||
cd iverilog
|
||||
autoconf
|
||||
CC=gcc CXX=g++ ./configure --prefix=${{ github.workspace }}/.local
|
||||
make -j$procs
|
||||
make install
|
||||
|
||||
- name: Check iverilog
|
||||
shell: bash
|
||||
run: |
|
||||
iverilog -V
|
||||
runs-on: ${{ matrix.os }}
|
||||
get-build-deps: true
|
||||
get-test-deps: true
|
||||
get-iverilog: true
|
||||
|
||||
- name: Build
|
||||
shell: bash
|
||||
|
|
|
|||
|
|
@ -25,15 +25,15 @@ jobs:
|
|||
archs: "aarch64",
|
||||
},
|
||||
{
|
||||
name: "macOS 13",
|
||||
name: "macOS 15 x64",
|
||||
family: "macos",
|
||||
runner: "macos-13",
|
||||
runner: "macos-15-intel",
|
||||
archs: "x86_64",
|
||||
},
|
||||
{
|
||||
name: "macOS 14",
|
||||
name: "macOS 15 arm64",
|
||||
family: "macos",
|
||||
runner: "macos-14",
|
||||
runner: "macos-15",
|
||||
archs: "arm64",
|
||||
},
|
||||
## Windows is disabled because of an issue with compiling FFI as
|
||||
|
|
@ -59,7 +59,7 @@ jobs:
|
|||
shell: bash
|
||||
run: |
|
||||
mkdir -p ffi
|
||||
curl -L https://github.com/libffi/libffi/releases/download/v3.4.6/libffi-3.4.6.tar.gz | tar --strip-components=1 -xzC ffi
|
||||
curl -L https://github.com/libffi/libffi/releases/download/v3.4.8/libffi-3.4.8.tar.gz | tar --strip-components=1 -xzC ffi
|
||||
- if: ${{ matrix.os.family == 'linux' }}
|
||||
name: "[Linux] Bison 3.8.2"
|
||||
shell: bash
|
||||
|
|
@ -114,7 +114,7 @@ jobs:
|
|||
path: ./wheelhouse/*.whl
|
||||
upload_wheels:
|
||||
name: Upload Wheels
|
||||
if: github.repository == 'YosysHQ/Yosys'
|
||||
if: (github.repository == 'YosysHQ/Yosys') && (github.event_name == 'workflow_dispatch')
|
||||
runs-on: ubuntu-latest
|
||||
# Specifying a GitHub environment is optional, but strongly encouraged
|
||||
environment: pypi
|
||||
|
|
|
|||
|
|
@ -24,10 +24,10 @@ from pathlib import Path
|
|||
|
||||
__yosys_root__ = Path(__file__).absolute().parents[3]
|
||||
|
||||
for source in ["boost", "ffi", "bison"]:
|
||||
for source in ["ffi", "bison"]:
|
||||
if not (__yosys_root__ / source).is_dir():
|
||||
print(
|
||||
"You need to download boost, ffi and bison in a similar manner to wheels.yml first."
|
||||
"You need to download ffi and bison in a similar manner to wheels.yml first."
|
||||
)
|
||||
exit(-1)
|
||||
|
||||
|
|
|
|||
|
|
@ -55,7 +55,6 @@
|
|||
# pyosys
|
||||
/kernel/*.pyh
|
||||
/kernel/python_wrappers.cc
|
||||
/boost
|
||||
/ffi
|
||||
/bison
|
||||
/venv
|
||||
|
|
|
|||
2
Brewfile
2
Brewfile
|
|
@ -6,9 +6,9 @@ brew "git"
|
|||
brew "graphviz"
|
||||
brew "pkg-config"
|
||||
brew "python3"
|
||||
brew "uv"
|
||||
brew "xdot"
|
||||
brew "bash"
|
||||
brew "boost-python3"
|
||||
brew "llvm@20"
|
||||
brew "lld"
|
||||
brew "googletest"
|
||||
|
|
|
|||
20
Makefile
20
Makefile
|
|
@ -28,6 +28,7 @@ ENABLE_HELP_SOURCE := 0
|
|||
|
||||
# python wrappers
|
||||
ENABLE_PYOSYS := 0
|
||||
PYOSYS_USE_UV := 1
|
||||
|
||||
# other configuration flags
|
||||
ENABLE_GCOV := 0
|
||||
|
|
@ -160,7 +161,7 @@ ifeq ($(OS), Haiku)
|
|||
CXXFLAGS += -D_DEFAULT_SOURCE
|
||||
endif
|
||||
|
||||
YOSYS_VER := 0.58+94
|
||||
YOSYS_VER := 0.58+138
|
||||
YOSYS_MAJOR := $(shell echo $(YOSYS_VER) | cut -d'.' -f1)
|
||||
YOSYS_MINOR := $(shell echo $(YOSYS_VER) | cut -d'.' -f2 | cut -d'+' -f1)
|
||||
YOSYS_COMMIT := $(shell echo $(YOSYS_VER) | cut -d'+' -f2)
|
||||
|
|
@ -282,12 +283,11 @@ ifeq ($(WASI_SDK),)
|
|||
CXX = clang++
|
||||
AR = llvm-ar
|
||||
RANLIB = llvm-ranlib
|
||||
WASIFLAGS := -target wasm32-wasi --sysroot $(WASI_SYSROOT) $(WASIFLAGS)
|
||||
WASIFLAGS := -target wasm32-wasi $(WASIFLAGS)
|
||||
else
|
||||
CXX = $(WASI_SDK)/bin/clang++
|
||||
AR = $(WASI_SDK)/bin/ar
|
||||
RANLIB = $(WASI_SDK)/bin/ranlib
|
||||
WASIFLAGS := --sysroot $(WASI_SDK)/share/wasi-sysroot $(WASIFLAGS)
|
||||
endif
|
||||
CXXFLAGS := $(WASIFLAGS) -std=$(CXXSTD) $(OPT_LEVEL) -D_WASI_EMULATED_PROCESS_CLOCKS $(filter-out -fPIC,$(CXXFLAGS))
|
||||
LINKFLAGS := $(WASIFLAGS) -Wl,-z,stack-size=1048576 $(filter-out -rdynamic,$(LINKFLAGS))
|
||||
|
|
@ -352,16 +352,22 @@ PYTHON_OBJECTS = pyosys/wrappers.o kernel/drivers.o kernel/yosys.o passes/cmds/p
|
|||
|
||||
ifeq ($(ENABLE_PYOSYS),1)
|
||||
# python-config --ldflags includes -l and -L, but LINKFLAGS is only -L
|
||||
|
||||
UV_ENV :=
|
||||
ifeq ($(PYOSYS_USE_UV),1)
|
||||
UV_ENV := uv run --no-project --with 'pybind11>3,<4' --with 'cxxheaderparser'
|
||||
endif
|
||||
|
||||
LINKFLAGS += $(filter-out -l%,$(shell $(PYTHON_CONFIG) --ldflags))
|
||||
LIBS += $(shell $(PYTHON_CONFIG) --libs)
|
||||
EXE_LIBS += $(filter-out $(LIBS),$(shell $(PYTHON_CONFIG_FOR_EXE) --libs))
|
||||
PYBIND11_INCLUDE ?= $(shell $(PYTHON_EXECUTABLE) -m pybind11 --includes)
|
||||
PYBIND11_INCLUDE ?= $(shell $(UV_ENV) $(PYTHON_EXECUTABLE) -m pybind11 --includes)
|
||||
CXXFLAGS += -I$(PYBIND11_INCLUDE) -DYOSYS_ENABLE_PYTHON
|
||||
CXXFLAGS += $(shell $(PYTHON_CONFIG) --includes) -DYOSYS_ENABLE_PYTHON
|
||||
|
||||
OBJS += $(PY_WRAPPER_FILE).o
|
||||
PY_GEN_SCRIPT = pyosys/generator.py
|
||||
PY_WRAP_INCLUDES := $(shell $(PYTHON_EXECUTABLE) $(PY_GEN_SCRIPT) --print-includes)
|
||||
PY_GEN_SCRIPT = $(YOSYS_SRC)/pyosys/generator.py
|
||||
PY_WRAP_INCLUDES := $(shell $(UV_ENV) $(PYTHON_EXECUTABLE) $(PY_GEN_SCRIPT) --print-includes)
|
||||
endif # ENABLE_PYOSYS
|
||||
|
||||
ifeq ($(ENABLE_READLINE),1)
|
||||
|
|
@ -777,7 +783,7 @@ endif
|
|||
ifeq ($(ENABLE_PYOSYS),1)
|
||||
$(PY_WRAPPER_FILE).cc: $(PY_GEN_SCRIPT) pyosys/wrappers_tpl.cc $(PY_WRAP_INCLUDES) pyosys/hashlib.h
|
||||
$(Q) mkdir -p $(dir $@)
|
||||
$(P) $(PYTHON_EXECUTABLE) $(PY_GEN_SCRIPT) $(PY_WRAPPER_FILE).cc
|
||||
$(P) $(UV_ENV) $(PYTHON_EXECUTABLE) $(PY_GEN_SCRIPT) $(PY_WRAPPER_FILE).cc
|
||||
endif
|
||||
|
||||
%.o: %.cpp
|
||||
|
|
|
|||
|
|
@ -83,10 +83,10 @@ Xdot (graphviz) is used by the ``show`` command in yosys to display schematics.
|
|||
For example on Ubuntu Linux 22.04 LTS the following commands will install all
|
||||
prerequisites for building yosys:
|
||||
|
||||
$ sudo apt-get install build-essential clang lld bison flex libfl-dev \
|
||||
libreadline-dev gawk tcl-dev libffi-dev git \
|
||||
graphviz xdot pkg-config python3 libboost-system-dev \
|
||||
libboost-python-dev libboost-filesystem-dev zlib1g-dev
|
||||
$ sudo apt-get install gawk git make python3 lld bison clang flex \
|
||||
libffi-dev libfl-dev libreadline-dev pkg-config tcl-dev zlib1g-dev \
|
||||
graphviz xdot
|
||||
$ curl -LsSf https://astral.sh/uv/install.sh | sh
|
||||
|
||||
The environment variable `CXX` can be used to control the C++ compiler used, or
|
||||
run one of the following to override it:
|
||||
|
|
|
|||
|
|
@ -637,20 +637,6 @@ std::string escape_cxx_string(const std::string &input)
|
|||
return output;
|
||||
}
|
||||
|
||||
std::string basename(const std::string &filepath)
|
||||
{
|
||||
#ifdef _WIN32
|
||||
const std::string dir_seps = "\\/";
|
||||
#else
|
||||
const std::string dir_seps = "/";
|
||||
#endif
|
||||
size_t sep_pos = filepath.find_last_of(dir_seps);
|
||||
if (sep_pos != std::string::npos)
|
||||
return filepath.substr(sep_pos + 1);
|
||||
else
|
||||
return filepath;
|
||||
}
|
||||
|
||||
template<class T>
|
||||
std::string get_hdl_name(T *object)
|
||||
{
|
||||
|
|
@ -2858,7 +2844,7 @@ struct CxxrtlWorker {
|
|||
}
|
||||
|
||||
if (split_intf)
|
||||
f << "#include \"" << basename(intf_filename) << "\"\n";
|
||||
f << "#include \"" << name_from_file_path(intf_filename) << "\"\n";
|
||||
else
|
||||
f << "#include <cxxrtl/cxxrtl.h>\n";
|
||||
f << "\n";
|
||||
|
|
|
|||
|
|
@ -1,5 +0,0 @@
|
|||
ECP5
|
||||
------------------
|
||||
|
||||
.. autocmdgroup:: techlibs/ecp5
|
||||
:members:
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
Lattice Nexus
|
||||
------------------
|
||||
|
||||
.. autocmdgroup:: techlibs/nexus
|
||||
:members:
|
||||
|
|
@ -50,7 +50,7 @@ rst_prolog = """
|
|||
:language: yoscrypt
|
||||
"""
|
||||
|
||||
extensions = ['sphinx.ext.autosectionlabel', 'sphinxcontrib.bibtex']
|
||||
extensions = ['sphinx.ext.autosectionlabel', 'sphinxcontrib.bibtex', 'sphinx_inline_tabs']
|
||||
|
||||
if os.getenv("READTHEDOCS"):
|
||||
# Use rtds_action if we are building on read the docs and have a github token env var
|
||||
|
|
|
|||
|
|
@ -93,60 +93,66 @@ tools: readline, libffi, Tcl and zlib; are optional but enabled by default (see
|
|||
:makevar:`ENABLE_*` settings in Makefile). Graphviz and Xdot are used by the
|
||||
`show` command to display schematics.
|
||||
|
||||
Installing all prerequisites for Ubuntu 22.04:
|
||||
Installing all prerequisites:
|
||||
|
||||
.. code:: console
|
||||
|
||||
sudo apt-get install gperf build-essential clang lld bison flex libfl-dev \
|
||||
libreadline-dev gawk tcl-dev libffi-dev git \
|
||||
graphviz xdot pkg-config python3 libboost-system-dev \
|
||||
libboost-python-dev libboost-filesystem-dev zlib1g-dev
|
||||
|
||||
Installing all prerequisites for macOS 13 (with Homebrew):
|
||||
|
||||
.. code:: console
|
||||
|
||||
brew tap Homebrew/bundle && brew bundle
|
||||
|
||||
or MacPorts:
|
||||
|
||||
.. code:: console
|
||||
|
||||
sudo port install bison flex readline gawk libffi graphviz \
|
||||
pkgconfig python311 boost zlib tcl
|
||||
|
||||
On FreeBSD use the following command to install all prerequisites:
|
||||
|
||||
.. code:: console
|
||||
|
||||
pkg install bison flex readline gawk libffi graphviz \
|
||||
pkgconf python311 tcl-wrapper boost-libs
|
||||
|
||||
.. note:: On FreeBSD system use gmake instead of make. To run tests use:
|
||||
``MAKE=gmake CXX=cxx CC=cc gmake test``
|
||||
|
||||
For Cygwin use the following command to install all prerequisites, or select these additional packages:
|
||||
|
||||
.. code:: console
|
||||
|
||||
setup-x86_64.exe -q --packages=bison,flex,gcc-core,gcc-g++,git,libffi-devel,libreadline-devel,make,pkg-config,python3,tcl-devel,boost-build,zlib-devel
|
||||
|
||||
.. warning::
|
||||
|
||||
As of this writing, Cygwin only supports up to Python 3.9.16 while the
|
||||
minimum required version of Python is 3.11. This means that Cygwin is not
|
||||
compatible with many of the Python-based frontends. While this does not
|
||||
currently prevent Yosys itself from working, no guarantees are made for
|
||||
continued support. You may also need to specify `CXXSTD=gnu++17` to resolve
|
||||
missing `strdup` function when using gcc. It is instead recommended to use
|
||||
Windows Subsystem for Linux (WSL) and follow the instructions for Ubuntu.
|
||||
|
||||
..
|
||||
For MSYS2 (MINGW64):
|
||||
.. tab:: Ubuntu 22.04
|
||||
|
||||
.. code:: console
|
||||
|
||||
pacman -S bison flex mingw-w64-x86_64-gcc git libffi-devel libreadline-devel make pkg-config python3 tcl-devel mingw-w64-x86_64-boost zlib-devel
|
||||
sudo apt-get install gawk git make python3 lld bison clang flex \
|
||||
libffi-dev libfl-dev libreadline-dev pkg-config tcl-dev zlib1g-dev \
|
||||
graphviz xdot
|
||||
curl -LsSf https://astral.sh/uv/install.sh | sh
|
||||
|
||||
.. tab:: macOS 13 (with Homebrew)
|
||||
|
||||
.. code:: console
|
||||
|
||||
brew tap Homebrew/bundle && brew bundle
|
||||
|
||||
.. tab:: MacPorts
|
||||
|
||||
.. code:: console
|
||||
|
||||
sudo port install bison flex readline gawk libffi graphviz \
|
||||
pkgconfig python311 zlib tcl
|
||||
|
||||
.. tab:: FreeBSD
|
||||
|
||||
.. code:: console
|
||||
|
||||
pkg install bison flex readline gawk libffi graphviz \
|
||||
pkgconf python311 tcl-wrapper
|
||||
|
||||
.. note:: On FreeBSD system use gmake instead of make. To run tests use:
|
||||
``MAKE=gmake CXX=cxx CC=cc gmake test``
|
||||
|
||||
.. tab:: Cygwin
|
||||
|
||||
Use the following command to install all prerequisites, or select these
|
||||
additional packages:
|
||||
|
||||
.. code:: console
|
||||
|
||||
setup-x86_64.exe -q --packages=bison,flex,gcc-core,gcc-g++,git,libffi-devel,libreadline-devel,make,pkg-config,python3,tcl-devel,zlib-devel
|
||||
|
||||
.. warning::
|
||||
|
||||
As of this writing, Cygwin only supports up to Python 3.9.16 while the
|
||||
minimum required version of Python is 3.11. This means that Cygwin is not
|
||||
compatible with many of the Python-based frontends. While this does not
|
||||
currently prevent Yosys itself from working, no guarantees are made for
|
||||
continued support. You may also need to specify ``CXXSTD=gnu++17`` to
|
||||
resolve missing ``strdup`` function when using gcc. It is instead
|
||||
recommended to use Windows Subsystem for Linux (WSL) and follow the
|
||||
instructions for Ubuntu.
|
||||
|
||||
..
|
||||
tab:: MSYS2 (MINGW64)
|
||||
|
||||
.. code:: console
|
||||
|
||||
pacman -S bison flex mingw-w64-x86_64-gcc git libffi-devel libreadline-devel make pkg-config python3 tcl-devel zlib-devel
|
||||
|
||||
Not that I can get this to work; it's failing during ld with what looks like
|
||||
math library issues: ``multiple definition of `tanh'`` and
|
||||
|
|
@ -215,7 +221,7 @@ Running the build system
|
|||
From the root ``yosys`` directory, call the following commands:
|
||||
|
||||
.. code:: console
|
||||
|
||||
|
||||
make
|
||||
sudo make install
|
||||
|
||||
|
|
@ -228,7 +234,7 @@ To use a separate (out-of-tree) build directory, provide a path to the Makefile.
|
|||
|
||||
Out-of-tree builds require a clean source tree.
|
||||
|
||||
.. seealso::
|
||||
.. seealso::
|
||||
|
||||
Refer to :doc:`/yosys_internals/extending_yosys/test_suites` for details on
|
||||
testing Yosys once compiled.
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
furo-ys @ git+https://github.com/YosysHQ/furo-ys
|
||||
sphinxcontrib-bibtex
|
||||
rtds-action
|
||||
sphinx-inline-tabs
|
||||
|
|
|
|||
|
|
@ -28,6 +28,14 @@ methods:
|
|||
|
||||
``yosys -y ./my_pyosys_script.py``
|
||||
|
||||
Do note this requires some build-time dependencies to be available to Python,
|
||||
namely, ``pybind11`` and ``cxxheaderparser``. By default, the required
|
||||
``uv`` package will be used to create an ephemeral environment with the
|
||||
correct versions of the tools installed.
|
||||
|
||||
You can force use of your current Python environment by passing the Makefile
|
||||
flag ``PYOSYS_USE_UV=0``.
|
||||
|
||||
2. Installing the Pyosys wheels
|
||||
|
||||
On macOS and GNU/Linux you can install pre-built wheels of Yosys using
|
||||
|
|
|
|||
|
|
@ -1,7 +1,72 @@
|
|||
Testing Yosys
|
||||
=============
|
||||
|
||||
.. TODO:: more about the included test suite and how to add tests
|
||||
.. todo:: adding tests (makefile-tests vs seed-tests)
|
||||
|
||||
Running the included test suite
|
||||
-------------------------------
|
||||
|
||||
The Yosys source comes with a test suite to avoid regressions and keep
|
||||
everything working as expected. Tests can be run by calling ``make test`` from
|
||||
the root Yosys directory.
|
||||
|
||||
Functional tests
|
||||
~~~~~~~~~~~~~~~~
|
||||
|
||||
Testing functional backends (see
|
||||
:doc:`/yosys_internals/extending_yosys/functional_ir`) has a few requirements in
|
||||
addition to those listed in :ref:`getting_started/installation:Build
|
||||
prerequisites`:
|
||||
|
||||
.. tab:: Ubuntu
|
||||
|
||||
.. code:: console
|
||||
|
||||
sudo apt-get install racket
|
||||
raco pkg install rosette
|
||||
pip install pytest-xdist pytest-xdist-gnumake
|
||||
|
||||
.. tab:: macOS
|
||||
|
||||
.. code:: console
|
||||
|
||||
brew install racket
|
||||
raco pkg install rosette
|
||||
pip install pytest-xdist pytest-xdist-gnumake
|
||||
|
||||
If you don't have one of the :ref:`getting_started/installation:CAD suite(s)`
|
||||
installed, you should also install Z3 `following their
|
||||
instructions <https://github.com/Z3Prover/z3>`_.
|
||||
|
||||
Then, set the :makevar:`ENABLE_FUNCTIONAL_TESTS` make variable when calling
|
||||
``make test`` and the functional tests will be run as well.
|
||||
|
||||
Unit tests
|
||||
~~~~~~~~~~
|
||||
|
||||
Running the unit tests requires the following additional packages:
|
||||
|
||||
.. tab:: Ubuntu
|
||||
|
||||
.. code:: console
|
||||
|
||||
sudo apt-get install libgtest-dev
|
||||
|
||||
.. tab:: macOS
|
||||
|
||||
No additional requirements.
|
||||
|
||||
Unit tests can be run with ``make unit-test``.
|
||||
|
||||
Docs tests
|
||||
~~~~~~~~~~
|
||||
|
||||
There are some additional tests for checking examples included in the
|
||||
documentation, which can be run by calling ``make test`` from the
|
||||
:file:`yosys/docs` sub-directory (or ``make -C docs test`` from the root). This
|
||||
also includes checking some macro commands to ensure that descriptions of them
|
||||
are kept up to date, and is mostly intended for CI.
|
||||
|
||||
|
||||
Automatic testing
|
||||
-----------------
|
||||
|
|
@ -14,8 +79,6 @@ compiler versions. For up to date information, including OS versions, refer to
|
|||
.. _Yosys Git repo: https://github.com/YosysHQ/yosys
|
||||
.. _the git actions page: https://github.com/YosysHQ/yosys/actions
|
||||
|
||||
.. todo:: are unit tests currently working
|
||||
|
||||
..
|
||||
How to add a unit test
|
||||
----------------------
|
||||
|
|
|
|||
|
|
@ -41,7 +41,7 @@
|
|||
packages.default = yosys;
|
||||
defaultPackage = yosys;
|
||||
devShell = pkgs.mkShell {
|
||||
buildInputs = with pkgs; [ clang llvmPackages.bintools gcc bison flex libffi tcl readline python3 zlib git gtest abc-verifier verilog boost python3Packages.boost ];
|
||||
buildInputs = with pkgs; [ clang llvmPackages.bintools gcc bison flex libffi tcl readline python3 zlib git gtest abc-verifier verilog ];
|
||||
};
|
||||
}
|
||||
);
|
||||
|
|
|
|||
|
|
@ -30,6 +30,7 @@
|
|||
#include "libs/sha1/sha1.h"
|
||||
#include "frontends/verilog/verilog_frontend.h"
|
||||
#include "ast.h"
|
||||
#include "kernel/io.h"
|
||||
|
||||
#include <sstream>
|
||||
#include <stdarg.h>
|
||||
|
|
@ -4474,12 +4475,7 @@ std::unique_ptr<AstNode> AstNode::readmem(bool is_readmemh, std::string mem_file
|
|||
std::ifstream f;
|
||||
f.open(mem_filename.c_str());
|
||||
if (f.fail()) {
|
||||
#ifdef _WIN32
|
||||
char slash = '\\';
|
||||
#else
|
||||
char slash = '/';
|
||||
#endif
|
||||
std::string path = location.begin.filename->substr(0, location.begin.filename->find_last_of(slash)+1);
|
||||
std::string path = parent_from_file_path(*location.begin.filename);
|
||||
f.open(path + mem_filename.c_str());
|
||||
yosys_input_files.insert(path + mem_filename);
|
||||
} else {
|
||||
|
|
|
|||
|
|
@ -1576,6 +1576,7 @@ void VerificImporter::import_netlist(RTLIL::Design *design, Netlist *nl, std::ma
|
|||
SetIter si ;
|
||||
Port *port ;
|
||||
FOREACH_PORT_OF_PORTBUS(portbus, si, port) {
|
||||
wire->port_id = nl->IndexOf(port) + 1;
|
||||
import_attributes(wire->attributes, port->GetNet(), nl, portbus->Size());
|
||||
break;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -895,11 +895,7 @@ frontend_verilog_preproc(std::istream &f,
|
|||
// if the include file was not found, it is not given with an absolute path, and the
|
||||
// currently read file is given with a path, then try again relative to its directory
|
||||
ff.clear();
|
||||
#ifdef _WIN32
|
||||
fixed_fn = filename.substr(0, filename.find_last_of("/\\")+1) + fn;
|
||||
#else
|
||||
fixed_fn = filename.substr(0, filename.rfind('/')+1) + fn;
|
||||
#endif
|
||||
fixed_fn = parent_from_file_path(filename) + fn;
|
||||
ff.open(fixed_fn);
|
||||
}
|
||||
if (ff.fail() && fn.size() > 0 && fn_relative) {
|
||||
|
|
|
|||
19
kernel/io.cc
19
kernel/io.cc
|
|
@ -2,6 +2,7 @@
|
|||
#include "kernel/log.h"
|
||||
#include <iostream>
|
||||
#include <string>
|
||||
#include <filesystem>
|
||||
|
||||
#if !defined(WIN32)
|
||||
#include <dirent.h>
|
||||
|
|
@ -391,6 +392,24 @@ void append_globbed(std::vector<std::string>& paths, std::string pattern)
|
|||
copy(globbed.begin(), globbed.end(), back_inserter(paths));
|
||||
}
|
||||
|
||||
std::string name_from_file_path(std::string path) {
|
||||
return std::filesystem::path(path).filename().string();
|
||||
}
|
||||
|
||||
// Includes OS_PATH_SEP at the end if present
|
||||
std::string parent_from_file_path(std::string path) {
|
||||
auto parent = std::filesystem::path(path).parent_path();
|
||||
if (parent.empty()) {
|
||||
return "";
|
||||
}
|
||||
// Add trailing separator to match original behavior
|
||||
std::string result = parent.string();
|
||||
if (!result.empty() && result.back() != std::filesystem::path::preferred_separator) {
|
||||
result += std::filesystem::path::preferred_separator;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
void format_emit_unescaped(std::string &result, std::string_view fmt)
|
||||
{
|
||||
result.reserve(result.size() + fmt.size());
|
||||
|
|
|
|||
|
|
@ -470,6 +470,8 @@ void remove_directory(std::string dirname);
|
|||
bool create_directory(const std::string& dirname);
|
||||
std::string escape_filename_spaces(const std::string& filename);
|
||||
void append_globbed(std::vector<std::string>& paths, std::string pattern);
|
||||
std::string name_from_file_path(std::string path);
|
||||
std::string parent_from_file_path(std::string path);
|
||||
|
||||
YOSYS_NAMESPACE_END
|
||||
|
||||
|
|
|
|||
|
|
@ -3088,6 +3088,14 @@ RTLIL::Cell *RTLIL::Module::addCell(RTLIL::IdString name, const RTLIL::Cell *oth
|
|||
return cell;
|
||||
}
|
||||
|
||||
RTLIL::Memory *RTLIL::Module::addMemory(RTLIL::IdString name)
|
||||
{
|
||||
RTLIL::Memory *mem = new RTLIL::Memory;
|
||||
mem->name = std::move(name);
|
||||
memories[mem->name] = mem;
|
||||
return mem;
|
||||
}
|
||||
|
||||
RTLIL::Memory *RTLIL::Module::addMemory(RTLIL::IdString name, const RTLIL::Memory *other)
|
||||
{
|
||||
RTLIL::Memory *mem = new RTLIL::Memory;
|
||||
|
|
|
|||
|
|
@ -569,6 +569,7 @@ template <> struct IDMacroHelper<-1> {
|
|||
}
|
||||
};
|
||||
|
||||
#undef ID
|
||||
#define ID(_id) \
|
||||
YOSYS_NAMESPACE_PREFIX IDMacroHelper< \
|
||||
YOSYS_NAMESPACE_PREFIX lookup_well_known_id(#_id) \
|
||||
|
|
@ -1827,6 +1828,7 @@ public:
|
|||
RTLIL::Cell *addCell(RTLIL::IdString name, RTLIL::IdString type);
|
||||
RTLIL::Cell *addCell(RTLIL::IdString name, const RTLIL::Cell *other);
|
||||
|
||||
RTLIL::Memory *addMemory(RTLIL::IdString name);
|
||||
RTLIL::Memory *addMemory(RTLIL::IdString name, const RTLIL::Memory *other);
|
||||
|
||||
RTLIL::Process *addProcess(RTLIL::IdString name);
|
||||
|
|
|
|||
|
|
@ -39,7 +39,8 @@ const arrivalint INF_PAST = std::numeric_limits<arrivalint>::min();
|
|||
struct EstimateSta {
|
||||
SigMap sigmap;
|
||||
Module *m;
|
||||
SigBit clk;
|
||||
std::optional<SigBit> clk;
|
||||
bool top_port_endpoints = false;
|
||||
|
||||
dict<std::pair<RTLIL::IdString, dict<RTLIL::IdString, RTLIL::Const>>, Aig> aigs;
|
||||
dict<Cell *, Aig *> cell_aigs;
|
||||
|
|
@ -73,15 +74,18 @@ struct EstimateSta {
|
|||
}
|
||||
|
||||
// TODO: ignores clock polarity
|
||||
EstimateSta(Module *m, SigBit clk)
|
||||
: sigmap(m), m(m), clk(clk)
|
||||
EstimateSta(Module *m, std::optional<SigBit> clk, bool top_port_endpoints)
|
||||
: sigmap(m), m(m), clk(clk), top_port_endpoints(top_port_endpoints)
|
||||
{
|
||||
sigmap.apply(clk);
|
||||
if (clk.has_value())
|
||||
sigmap.apply(*clk);
|
||||
}
|
||||
|
||||
void run()
|
||||
{
|
||||
log("Domain %s\n", log_signal(clk));
|
||||
log("\nModule %s\n", log_id(m));
|
||||
if (clk.has_value())
|
||||
log("Domain %s\n", log_signal(*clk));
|
||||
|
||||
// first, we collect launch and sample points and convert the combinational logic to AIG
|
||||
std::vector<Cell *> combinational;
|
||||
|
|
@ -151,6 +155,22 @@ struct EstimateSta {
|
|||
}
|
||||
}
|
||||
|
||||
// add top module port launching/sampling, if requested
|
||||
if (top_port_endpoints) {
|
||||
SigSpec all_inputs, all_outputs;
|
||||
for (auto port_id : m->ports) {
|
||||
Wire *port = m->wire(port_id);
|
||||
if (port->port_input && !port->port_output) {
|
||||
all_inputs.append(port);
|
||||
} else if (port->port_output && !port->port_input) {
|
||||
all_outputs.append(port);
|
||||
} else if (port->port_output && port->port_input) {
|
||||
log_warning("Ignoring bi-directional port %s\n", log_id(port));
|
||||
}
|
||||
}
|
||||
add_seq(nullptr, all_inputs, all_outputs);
|
||||
}
|
||||
|
||||
// now we toposort the combinational logic
|
||||
|
||||
// each toposort node is either a SigBit or a pair of Cell * / AigNode *
|
||||
|
|
@ -360,7 +380,7 @@ struct TimeestPass : Pass {
|
|||
log("\n");
|
||||
log(" timeest [-clk <clk_signal>] [options] [selection]\n");
|
||||
log("\n");
|
||||
log("Estimate the critical path in clock domain <clk_signal> by counting AIG nodes.\n");
|
||||
log("Estimate the critical path by counting AIG nodes.\n");
|
||||
log("\n");
|
||||
log(" -all_paths\n");
|
||||
log(" Print or select nodes from all critical paths instead of focusing on\n");
|
||||
|
|
@ -374,7 +394,8 @@ struct TimeestPass : Pass {
|
|||
{
|
||||
log_header(d, "Executing TIMEEST pass. (estimate timing)\n");
|
||||
|
||||
std::string clk;
|
||||
std::string clk_name;
|
||||
bool clk_domain_specified = false;
|
||||
bool all_paths = false;
|
||||
bool select = false;
|
||||
size_t argidx;
|
||||
|
|
@ -388,26 +409,30 @@ struct TimeestPass : Pass {
|
|||
continue;
|
||||
}
|
||||
if (args[argidx] == "-clk" && argidx + 1 < args.size()) {
|
||||
clk = args[++argidx];
|
||||
clk_domain_specified = true;
|
||||
clk_name = args[++argidx];
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
}
|
||||
extra_args(args, argidx, d);
|
||||
|
||||
if (clk.empty())
|
||||
log_cmd_error("No -clk argument provided\n");
|
||||
|
||||
if (select && d->selected_modules().size() > 1)
|
||||
log_cmd_error("The -select option operates on a single selected module\n");
|
||||
|
||||
for (auto m : d->selected_modules()) {
|
||||
if (!m->wire(RTLIL::escape_id(clk))) {
|
||||
log_warning("No domain '%s' in module %s\n", clk, log_id(m));
|
||||
continue;
|
||||
std::optional<SigBit> clk;
|
||||
|
||||
if (clk_domain_specified) {
|
||||
if (!m->wire(RTLIL::escape_id(clk_name))) {
|
||||
log_warning("No domain '%s' in module %s\n", clk_name.c_str(), log_id(m));
|
||||
continue;
|
||||
}
|
||||
|
||||
clk = SigBit(m->wire(RTLIL::escape_id(clk_name)), 0);
|
||||
}
|
||||
|
||||
EstimateSta sta(m, SigBit(m->wire(RTLIL::escape_id(clk)), 0));
|
||||
EstimateSta sta(m, clk, /*top_port_endpoints=*/ !clk_domain_specified);
|
||||
sta.all_paths = all_paths;
|
||||
sta.select = select;
|
||||
sta.run();
|
||||
|
|
|
|||
|
|
@ -1071,6 +1071,17 @@ struct HierarchyPass : public Pass {
|
|||
mod->attributes[ID::initial_top] = RTLIL::Const(1);
|
||||
else
|
||||
mod->attributes.erase(ID::initial_top);
|
||||
|
||||
std::vector<IdString> abstract_ids;
|
||||
for (auto cell : top_mod->cells()) {
|
||||
IdString abstract_id = "$abstract" + cell->type.str();
|
||||
if (design->module(cell->type) == nullptr && design->module(abstract_id))
|
||||
abstract_ids.push_back(abstract_id);
|
||||
}
|
||||
for (auto abstract_id : abstract_ids)
|
||||
design->module(abstract_id)->derive(design, {});
|
||||
for (auto abstract_id : abstract_ids)
|
||||
design->remove(design->module(abstract_id));
|
||||
}
|
||||
|
||||
bool did_something = true;
|
||||
|
|
|
|||
|
|
@ -117,15 +117,6 @@ struct gate_t
|
|||
std::string bit_str;
|
||||
};
|
||||
|
||||
bool map_mux4;
|
||||
bool map_mux8;
|
||||
bool map_mux16;
|
||||
|
||||
bool markgroups;
|
||||
|
||||
pool<std::string> enabled_gates;
|
||||
bool cmos_cost;
|
||||
|
||||
struct AbcConfig
|
||||
{
|
||||
std::string global_tempdir_name;
|
||||
|
|
@ -146,6 +137,12 @@ struct AbcConfig
|
|||
bool show_tempdir = false;
|
||||
bool sop_mode = false;
|
||||
bool abc_dress = false;
|
||||
bool map_mux4 = false;
|
||||
bool map_mux8 = false;
|
||||
bool map_mux16 = false;
|
||||
bool markgroups = false;
|
||||
pool<std::string> enabled_gates;
|
||||
bool cmos_cost = false;
|
||||
};
|
||||
|
||||
struct AbcSigVal {
|
||||
|
|
@ -1382,7 +1379,7 @@ void emit_global_input_files(const AbcConfig &config)
|
|||
fprintf(f, "%d %d.00 1.00\n", i+1, config.lut_costs.at(i));
|
||||
fclose(f);
|
||||
} else {
|
||||
auto &cell_cost = cmos_cost ? CellCosts::cmos_gate_cost() : CellCosts::default_gate_cost();
|
||||
auto &cell_cost = config.cmos_cost ? CellCosts::cmos_gate_cost() : CellCosts::default_gate_cost();
|
||||
|
||||
std::string buffer = stringf("%s/stdcells.genlib", config.global_tempdir_name.c_str());
|
||||
FILE *f = fopen(buffer.c_str(), "wt");
|
||||
|
|
@ -1392,39 +1389,39 @@ void emit_global_input_files(const AbcConfig &config)
|
|||
fprintf(f, "GATE ONE 1 Y=CONST1;\n");
|
||||
fprintf(f, "GATE BUF %d Y=A; PIN * NONINV 1 999 1 0 1 0\n", cell_cost.at(ID($_BUF_)));
|
||||
fprintf(f, "GATE NOT %d Y=!A; PIN * INV 1 999 1 0 1 0\n", cell_cost.at(ID($_NOT_)));
|
||||
if (enabled_gates.count("AND"))
|
||||
if (config.enabled_gates.count("AND"))
|
||||
fprintf(f, "GATE AND %d Y=A*B; PIN * NONINV 1 999 1 0 1 0\n", cell_cost.at(ID($_AND_)));
|
||||
if (enabled_gates.count("NAND"))
|
||||
if (config.enabled_gates.count("NAND"))
|
||||
fprintf(f, "GATE NAND %d Y=!(A*B); PIN * INV 1 999 1 0 1 0\n", cell_cost.at(ID($_NAND_)));
|
||||
if (enabled_gates.count("OR"))
|
||||
if (config.enabled_gates.count("OR"))
|
||||
fprintf(f, "GATE OR %d Y=A+B; PIN * NONINV 1 999 1 0 1 0\n", cell_cost.at(ID($_OR_)));
|
||||
if (enabled_gates.count("NOR"))
|
||||
if (config.enabled_gates.count("NOR"))
|
||||
fprintf(f, "GATE NOR %d Y=!(A+B); PIN * INV 1 999 1 0 1 0\n", cell_cost.at(ID($_NOR_)));
|
||||
if (enabled_gates.count("XOR"))
|
||||
if (config.enabled_gates.count("XOR"))
|
||||
fprintf(f, "GATE XOR %d Y=(A*!B)+(!A*B); PIN * UNKNOWN 1 999 1 0 1 0\n", cell_cost.at(ID($_XOR_)));
|
||||
if (enabled_gates.count("XNOR"))
|
||||
if (config.enabled_gates.count("XNOR"))
|
||||
fprintf(f, "GATE XNOR %d Y=(A*B)+(!A*!B); PIN * UNKNOWN 1 999 1 0 1 0\n", cell_cost.at(ID($_XNOR_)));
|
||||
if (enabled_gates.count("ANDNOT"))
|
||||
if (config.enabled_gates.count("ANDNOT"))
|
||||
fprintf(f, "GATE ANDNOT %d Y=A*!B; PIN * UNKNOWN 1 999 1 0 1 0\n", cell_cost.at(ID($_ANDNOT_)));
|
||||
if (enabled_gates.count("ORNOT"))
|
||||
if (config.enabled_gates.count("ORNOT"))
|
||||
fprintf(f, "GATE ORNOT %d Y=A+!B; PIN * UNKNOWN 1 999 1 0 1 0\n", cell_cost.at(ID($_ORNOT_)));
|
||||
if (enabled_gates.count("AOI3"))
|
||||
if (config.enabled_gates.count("AOI3"))
|
||||
fprintf(f, "GATE AOI3 %d Y=!((A*B)+C); PIN * INV 1 999 1 0 1 0\n", cell_cost.at(ID($_AOI3_)));
|
||||
if (enabled_gates.count("OAI3"))
|
||||
if (config.enabled_gates.count("OAI3"))
|
||||
fprintf(f, "GATE OAI3 %d Y=!((A+B)*C); PIN * INV 1 999 1 0 1 0\n", cell_cost.at(ID($_OAI3_)));
|
||||
if (enabled_gates.count("AOI4"))
|
||||
if (config.enabled_gates.count("AOI4"))
|
||||
fprintf(f, "GATE AOI4 %d Y=!((A*B)+(C*D)); PIN * INV 1 999 1 0 1 0\n", cell_cost.at(ID($_AOI4_)));
|
||||
if (enabled_gates.count("OAI4"))
|
||||
if (config.enabled_gates.count("OAI4"))
|
||||
fprintf(f, "GATE OAI4 %d Y=!((A+B)*(C+D)); PIN * INV 1 999 1 0 1 0\n", cell_cost.at(ID($_OAI4_)));
|
||||
if (enabled_gates.count("MUX"))
|
||||
if (config.enabled_gates.count("MUX"))
|
||||
fprintf(f, "GATE MUX %d Y=(A*B)+(S*B)+(!S*A); PIN * UNKNOWN 1 999 1 0 1 0\n", cell_cost.at(ID($_MUX_)));
|
||||
if (enabled_gates.count("NMUX"))
|
||||
if (config.enabled_gates.count("NMUX"))
|
||||
fprintf(f, "GATE NMUX %d Y=!((A*B)+(S*B)+(!S*A)); PIN * UNKNOWN 1 999 1 0 1 0\n", cell_cost.at(ID($_NMUX_)));
|
||||
if (map_mux4)
|
||||
if (config.map_mux4)
|
||||
fprintf(f, "GATE MUX4 %d Y=(!S*!T*A)+(S*!T*B)+(!S*T*C)+(S*T*D); PIN * UNKNOWN 1 999 1 0 1 0\n", 2*cell_cost.at(ID($_MUX_)));
|
||||
if (map_mux8)
|
||||
if (config.map_mux8)
|
||||
fprintf(f, "GATE MUX8 %d Y=(!S*!T*!U*A)+(S*!T*!U*B)+(!S*T*!U*C)+(S*T*!U*D)+(!S*!T*U*E)+(S*!T*U*F)+(!S*T*U*G)+(S*T*U*H); PIN * UNKNOWN 1 999 1 0 1 0\n", 4*cell_cost.at(ID($_MUX_)));
|
||||
if (map_mux16)
|
||||
if (config.map_mux16)
|
||||
fprintf(f, "GATE MUX16 %d Y=(!S*!T*!U*!V*A)+(S*!T*!U*!V*B)+(!S*T*!U*!V*C)+(S*T*!U*!V*D)+(!S*!T*U*!V*E)+(S*!T*U*!V*F)+(!S*T*U*!V*G)+(S*T*U*!V*H)+(!S*!T*!U*V*I)+(S*!T*!U*V*J)+(!S*T*!U*V*K)+(S*T*!U*V*L)+(!S*!T*U*V*M)+(S*!T*U*V*N)+(!S*T*U*V*O)+(S*T*U*V*P); PIN * UNKNOWN 1 999 1 0 1 0\n", 8*cell_cost.at(ID($_MUX_)));
|
||||
fclose(f);
|
||||
}
|
||||
|
|
@ -1456,6 +1453,7 @@ void AbcModuleState::extract(AbcSigMap &assign_map, RTLIL::Design *design, RTLIL
|
|||
RTLIL::Module *mapped_mod = mapped_design->module(ID(netlist));
|
||||
if (mapped_mod == nullptr)
|
||||
log_error("ABC output file does not contain a module `netlist'.\n");
|
||||
bool markgroups = run_abc.config.markgroups;
|
||||
for (auto w : mapped_mod->wires()) {
|
||||
RTLIL::Wire *orig_wire = nullptr;
|
||||
RTLIL::Wire *wire = module->addWire(remap_name(w->name, &orig_wire));
|
||||
|
|
@ -1998,9 +1996,9 @@ struct AbcPass : public Pass {
|
|||
lut_arg = design->scratchpad_get_string("abc.lut", lut_arg);
|
||||
luts_arg = design->scratchpad_get_string("abc.luts", luts_arg);
|
||||
config.sop_mode = design->scratchpad_get_bool("abc.sop", false);
|
||||
map_mux4 = design->scratchpad_get_bool("abc.mux4", map_mux4);
|
||||
map_mux8 = design->scratchpad_get_bool("abc.mux8", map_mux8);
|
||||
map_mux16 = design->scratchpad_get_bool("abc.mux16", map_mux16);
|
||||
config.map_mux4 = design->scratchpad_get_bool("abc.mux4", false);
|
||||
config.map_mux8 = design->scratchpad_get_bool("abc.mux8", false);
|
||||
config.map_mux16 = design->scratchpad_get_bool("abc.mux16", false);
|
||||
config.abc_dress = design->scratchpad_get_bool("abc.dress", false);
|
||||
g_arg = design->scratchpad_get_string("abc.g", g_arg);
|
||||
|
||||
|
|
@ -2014,7 +2012,7 @@ struct AbcPass : public Pass {
|
|||
config.keepff = design->scratchpad_get_bool("abc.keepff", false);
|
||||
config.cleanup = !design->scratchpad_get_bool("abc.nocleanup", false);
|
||||
config.show_tempdir = design->scratchpad_get_bool("abc.showtmp", false);
|
||||
markgroups = design->scratchpad_get_bool("abc.markgroups", markgroups);
|
||||
config.markgroups = design->scratchpad_get_bool("abc.markgroups", false);
|
||||
|
||||
if (config.cleanup)
|
||||
config.global_tempdir_name = get_base_tmpdir() + "/";
|
||||
|
|
@ -2094,15 +2092,15 @@ struct AbcPass : public Pass {
|
|||
continue;
|
||||
}
|
||||
if (arg == "-mux4") {
|
||||
map_mux4 = true;
|
||||
config.map_mux4 = true;
|
||||
continue;
|
||||
}
|
||||
if (arg == "-mux8") {
|
||||
map_mux8 = true;
|
||||
config.map_mux8 = true;
|
||||
continue;
|
||||
}
|
||||
if (arg == "-mux16") {
|
||||
map_mux16 = true;
|
||||
config.map_mux16 = true;
|
||||
continue;
|
||||
}
|
||||
if (arg == "-dress") {
|
||||
|
|
@ -2143,7 +2141,7 @@ struct AbcPass : public Pass {
|
|||
continue;
|
||||
}
|
||||
if (arg == "-markgroups") {
|
||||
markgroups = true;
|
||||
config.markgroups = true;
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
|
|
@ -2236,14 +2234,14 @@ struct AbcPass : public Pass {
|
|||
}
|
||||
if (g == "cmos2") {
|
||||
if (!remove_gates)
|
||||
cmos_cost = true;
|
||||
config.cmos_cost = true;
|
||||
gate_list.push_back("NAND");
|
||||
gate_list.push_back("NOR");
|
||||
goto ok_alias;
|
||||
}
|
||||
if (g == "cmos3") {
|
||||
if (!remove_gates)
|
||||
cmos_cost = true;
|
||||
config.cmos_cost = true;
|
||||
gate_list.push_back("NAND");
|
||||
gate_list.push_back("NOR");
|
||||
gate_list.push_back("AOI3");
|
||||
|
|
@ -2252,7 +2250,7 @@ struct AbcPass : public Pass {
|
|||
}
|
||||
if (g == "cmos4") {
|
||||
if (!remove_gates)
|
||||
cmos_cost = true;
|
||||
config.cmos_cost = true;
|
||||
gate_list.push_back("NAND");
|
||||
gate_list.push_back("NOR");
|
||||
gate_list.push_back("AOI3");
|
||||
|
|
@ -2263,7 +2261,7 @@ struct AbcPass : public Pass {
|
|||
}
|
||||
if (g == "cmos") {
|
||||
if (!remove_gates)
|
||||
cmos_cost = true;
|
||||
config.cmos_cost = true;
|
||||
gate_list.push_back("NAND");
|
||||
gate_list.push_back("NOR");
|
||||
gate_list.push_back("AOI3");
|
||||
|
|
@ -2322,9 +2320,9 @@ struct AbcPass : public Pass {
|
|||
ok_alias:
|
||||
for (auto gate : gate_list) {
|
||||
if (remove_gates)
|
||||
enabled_gates.erase(gate);
|
||||
config.enabled_gates.erase(gate);
|
||||
else
|
||||
enabled_gates.insert(gate);
|
||||
config.enabled_gates.insert(gate);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -2334,21 +2332,21 @@ struct AbcPass : public Pass {
|
|||
if (!config.constr_file.empty() && (config.liberty_files.empty() && config.genlib_files.empty()))
|
||||
log_cmd_error("Got -constr but no -liberty/-genlib!\n");
|
||||
|
||||
if (enabled_gates.empty()) {
|
||||
enabled_gates.insert("AND");
|
||||
enabled_gates.insert("NAND");
|
||||
enabled_gates.insert("OR");
|
||||
enabled_gates.insert("NOR");
|
||||
enabled_gates.insert("XOR");
|
||||
enabled_gates.insert("XNOR");
|
||||
enabled_gates.insert("ANDNOT");
|
||||
enabled_gates.insert("ORNOT");
|
||||
// enabled_gates.insert("AOI3");
|
||||
// enabled_gates.insert("OAI3");
|
||||
// enabled_gates.insert("AOI4");
|
||||
// enabled_gates.insert("OAI4");
|
||||
enabled_gates.insert("MUX");
|
||||
// enabled_gates.insert("NMUX");
|
||||
if (config.enabled_gates.empty()) {
|
||||
config.enabled_gates.insert("AND");
|
||||
config.enabled_gates.insert("NAND");
|
||||
config.enabled_gates.insert("OR");
|
||||
config.enabled_gates.insert("NOR");
|
||||
config.enabled_gates.insert("XOR");
|
||||
config.enabled_gates.insert("XNOR");
|
||||
config.enabled_gates.insert("ANDNOT");
|
||||
config.enabled_gates.insert("ORNOT");
|
||||
// config.enabled_gates.insert("AOI3");
|
||||
// config.enabled_gates.insert("OAI3");
|
||||
// config.enabled_gates.insert("AOI4");
|
||||
// config.enabled_gates.insert("OAI4");
|
||||
config.enabled_gates.insert("MUX");
|
||||
// config.enabled_gates.insert("NMUX");
|
||||
}
|
||||
|
||||
emit_global_input_files(config);
|
||||
|
|
|
|||
|
|
@ -271,6 +271,13 @@ static void find_cell(std::vector<const LibertyAst *> cells, IdString cell_type,
|
|||
continue;
|
||||
if (!parse_next_state(cell, ff->find("next_state"), cell_next_pin, cell_next_pol, cell_enable_pin, cell_enable_pol) || (has_enable && (cell_enable_pin.empty() || cell_enable_pol != enapol)))
|
||||
continue;
|
||||
|
||||
if (has_reset && !cell_next_pol) {
|
||||
// next_state is negated
|
||||
// we later propagate this inversion to the output,
|
||||
// which requires the negation of the reset value
|
||||
rstval = !rstval;
|
||||
}
|
||||
if (has_reset && rstval == false) {
|
||||
if (!parse_pin(cell, ff->find("clear"), cell_rst_pin, cell_rst_pol) || cell_rst_pol != rstpol)
|
||||
continue;
|
||||
|
|
|
|||
|
|
@ -56,6 +56,7 @@ from cxxheaderparser.types import (
|
|||
Variable,
|
||||
Array,
|
||||
FundamentalSpecifier,
|
||||
FunctionType,
|
||||
)
|
||||
|
||||
__file_dir__ = Path(__file__).absolute().parent
|
||||
|
|
@ -177,11 +178,11 @@ pyosys_headers = [
|
|||
denylist=frozenset({"bits", "bitvectorize"}),
|
||||
),
|
||||
PyosysClass("AttrObject", denylist=frozenset({"get_blackbox_attribute"})),
|
||||
PyosysClass("NamedObject", denylist=frozenset({"get_blackbox_attribute"})),
|
||||
PyosysClass("NamedObject"),
|
||||
PyosysClass("Selection"),
|
||||
# PyosysClass("Monitor"), # Virtual methods, manually bridged
|
||||
PyosysClass("CaseRule", denylist=frozenset({"get_blackbox_attribute"})),
|
||||
PyosysClass("SwitchRule", denylist=frozenset({"get_blackbox_attribute"})),
|
||||
PyosysClass("CaseRule"),
|
||||
PyosysClass("SwitchRule"),
|
||||
PyosysClass("SyncRule"),
|
||||
PyosysClass(
|
||||
"Process",
|
||||
|
|
@ -219,7 +220,7 @@ pyosys_headers = [
|
|||
),
|
||||
PyosysClass(
|
||||
"Design",
|
||||
string_expr="s.hashidx_",
|
||||
string_expr="std::to_string(s.hashidx_)",
|
||||
hash_expr="s",
|
||||
denylist=frozenset({"selected_whole_modules"}), # deprecated
|
||||
),
|
||||
|
|
@ -241,13 +242,17 @@ class PyosysType:
|
|||
|
||||
@classmethod
|
||||
def from_type(Self, type_obj, drop_const=False) -> "PyosysType":
|
||||
const = type_obj.const and not drop_const
|
||||
const = hasattr(type_obj, "const") and type_obj.const and not drop_const
|
||||
if isinstance(type_obj, Pointer):
|
||||
ptr_to = Self.from_type(type_obj.ptr_to)
|
||||
return Self("ptr", (ptr_to,), const)
|
||||
elif isinstance(type_obj, Reference):
|
||||
ref_to = Self.from_type(type_obj.ref_to)
|
||||
return Self("ref", (ref_to,), const)
|
||||
elif isinstance(type_obj, FunctionType):
|
||||
ret_type = Self.from_type(type_obj.return_type)
|
||||
param_types = (Self.from_type(p.type) for p in type_obj.parameters)
|
||||
return Self("fn", (ret_type, *param_types), False)
|
||||
assert isinstance(
|
||||
type_obj, Type
|
||||
), f"unexpected c++ type object of type {type(type_obj)}"
|
||||
|
|
@ -270,6 +275,16 @@ class PyosysType:
|
|||
if title == "Dict":
|
||||
key, value = self.specialization
|
||||
return f"{key.generate_identifier()}To{value.generate_identifier()}{title}"
|
||||
elif title == "Fn":
|
||||
identifier = self.specialization[0].generate_identifier()
|
||||
if identifier == "Void":
|
||||
identifier = ""
|
||||
else:
|
||||
identifier += "From"
|
||||
identifier += "And".join(
|
||||
p.generate_identifier() for p in self.specialization[1:]
|
||||
)
|
||||
return identifier
|
||||
|
||||
return (
|
||||
"".join(spec.generate_identifier() for spec in self.specialization) + title
|
||||
|
|
@ -283,6 +298,9 @@ class PyosysType:
|
|||
return const_prefix + f"{self.specialization[0].generate_cpp_name()} *"
|
||||
elif self.base == "ref":
|
||||
return const_prefix + f"{self.specialization[0].generate_cpp_name()} &"
|
||||
elif self.base == "fn":
|
||||
param_cpp_names = (s.generate_cpp_name() for s in self.specialization[1:])
|
||||
return f"{self.specialization[0].generate_cpp_name()}({','.join(param_cpp_names)})"
|
||||
else:
|
||||
return (
|
||||
const_prefix
|
||||
|
|
@ -301,7 +319,7 @@ class PyosysWrapperGenerator(object):
|
|||
self.f = wrapper_stream
|
||||
self.f_inc = header_stream
|
||||
self.found_containers: Dict[PyosysType, Any] = {}
|
||||
self.class_registry: Dict[str, ClassScope] = {}
|
||||
self.class_registry: Dict[str, Tuple[ClassScope, PyosysClass]] = {}
|
||||
|
||||
# entry point
|
||||
def generate(self):
|
||||
|
|
@ -380,7 +398,7 @@ class PyosysWrapperGenerator(object):
|
|||
if isinstance(type_info, Reference):
|
||||
return PyosysWrapperGenerator.find_containers(containers, type_info.ref_to)
|
||||
if not isinstance(type_info, Type):
|
||||
return ()
|
||||
return {}
|
||||
segments = type_info.typename.segments
|
||||
containers_found = {}
|
||||
for segment in segments:
|
||||
|
|
@ -411,19 +429,23 @@ class PyosysWrapperGenerator(object):
|
|||
def get_parameter_types(function: Function) -> str:
|
||||
return ", ".join(p.type.format() for p in function.parameters)
|
||||
|
||||
def register_containers(self, target: Union[Function, Field, Variable]):
|
||||
def register_containers(self, target: Union[Function, Field, Variable]) -> bool:
|
||||
supported = ("dict", "idict", "pool", "set", "vector")
|
||||
found = False
|
||||
if isinstance(target, Function):
|
||||
self.found_containers.update(
|
||||
self.find_containers(supported, target.return_type)
|
||||
)
|
||||
return_type_containers = self.find_containers(supported, target.return_type)
|
||||
found = found or len(return_type_containers)
|
||||
self.found_containers.update(return_type_containers)
|
||||
|
||||
for parameter in target.parameters:
|
||||
self.found_containers.update(
|
||||
self.find_containers(supported, parameter.type)
|
||||
)
|
||||
parameter_containers = self.find_containers(supported, parameter.type)
|
||||
found = found or len(parameter_containers)
|
||||
self.found_containers.update(parameter_containers)
|
||||
else:
|
||||
self.found_containers.update(self.find_containers(supported, target.type))
|
||||
variable_containers = self.find_containers(supported, target.type)
|
||||
found = found or len(variable_containers)
|
||||
self.found_containers.update(variable_containers)
|
||||
return found
|
||||
|
||||
# processors
|
||||
def get_overload_cast(
|
||||
|
|
@ -470,9 +492,9 @@ class PyosysWrapperGenerator(object):
|
|||
|
||||
def_args = [f'"{python_function_basename}"']
|
||||
def_args.append(self.get_overload_cast(function, class_basename))
|
||||
for parameter in function.parameters:
|
||||
# ASSUMPTION: there are no unnamed parameters in the yosys codebase
|
||||
parameter_arg = f'py::arg("{parameter.name}")'
|
||||
for i, parameter in enumerate(function.parameters):
|
||||
name = parameter.name or f"arg{i}"
|
||||
parameter_arg = f'py::arg("{name}")'
|
||||
if parameter.default is not None:
|
||||
parameter_arg += f" = {parameter.default.format()}"
|
||||
def_args.append(parameter_arg)
|
||||
|
|
@ -525,8 +547,12 @@ class PyosysWrapperGenerator(object):
|
|||
if function.static:
|
||||
definition_fn = "def_static"
|
||||
|
||||
definition_args = self.get_definition_args(
|
||||
function, metadata.name, python_name_override
|
||||
)
|
||||
|
||||
print(
|
||||
f"\t\t\t.{definition_fn}({', '.join(self.get_definition_args(function, metadata.name, python_name_override))})",
|
||||
f"\t\t\t.{definition_fn}({', '.join(definition_args)})",
|
||||
file=self.f,
|
||||
)
|
||||
|
||||
|
|
@ -565,7 +591,7 @@ class PyosysWrapperGenerator(object):
|
|||
# care
|
||||
return
|
||||
|
||||
self.register_containers(field)
|
||||
has_containers = self.register_containers(field)
|
||||
|
||||
definition_fn = f"def_{'readonly' if field.type.const else 'readwrite'}"
|
||||
if field.static:
|
||||
|
|
@ -573,8 +599,13 @@ class PyosysWrapperGenerator(object):
|
|||
|
||||
field_python_basename = keyword_aliases.get(field.name, field.name)
|
||||
|
||||
def_args = [
|
||||
f'"{field_python_basename}"',
|
||||
f"&{metadata.name}::{field.name}",
|
||||
]
|
||||
def_args.append("py::return_value_policy::copy")
|
||||
print(
|
||||
f'\t\t\t.{definition_fn}("{field_python_basename}", &{metadata.name}::{field.name})',
|
||||
f"\t\t\t.{definition_fn}({', '.join(def_args)})",
|
||||
file=self.f,
|
||||
)
|
||||
|
||||
|
|
@ -603,16 +634,20 @@ class PyosysWrapperGenerator(object):
|
|||
)
|
||||
|
||||
def process_class_members(
|
||||
self, metadata: PyosysClass, cls: ClassScope, basename: str
|
||||
self,
|
||||
metadata: PyosysClass,
|
||||
base_metadata: PyosysClass,
|
||||
cls: ClassScope,
|
||||
basename: str,
|
||||
):
|
||||
for method in cls.methods:
|
||||
if method.name.segments[-1].name in metadata.denylist:
|
||||
if method.name.segments[-1].name in base_metadata.denylist:
|
||||
continue
|
||||
self.process_method(metadata, method)
|
||||
|
||||
visited_anonymous_unions = set()
|
||||
for field_ in cls.fields:
|
||||
if field_.name in metadata.denylist:
|
||||
if field_.name in base_metadata.denylist:
|
||||
continue
|
||||
self.process_field(metadata, field_)
|
||||
|
||||
|
|
@ -627,6 +662,16 @@ class PyosysWrapperGenerator(object):
|
|||
for subfield in subclass.fields:
|
||||
self.process_field(metadata, subfield)
|
||||
|
||||
for base in cls.class_decl.bases:
|
||||
if base.access != "public":
|
||||
continue
|
||||
name = base.typename.segments[-1].format()
|
||||
if processed := self.class_registry.get(name):
|
||||
base_scope, base_metadata = processed
|
||||
self.process_class_members(
|
||||
metadata, base_metadata, base_scope, basename
|
||||
)
|
||||
|
||||
def process_class(
|
||||
self,
|
||||
metadata: PyosysClass,
|
||||
|
|
@ -638,7 +683,7 @@ class PyosysWrapperGenerator(object):
|
|||
segment.format() for segment in pqname.segments
|
||||
]
|
||||
basename = full_path.pop()
|
||||
self.class_registry[basename] = cls
|
||||
self.class_registry[basename] = (cls, metadata)
|
||||
|
||||
declaration_namespace = "::".join(full_path)
|
||||
tpl_args = [basename]
|
||||
|
|
@ -649,19 +694,17 @@ class PyosysWrapperGenerator(object):
|
|||
file=self.f,
|
||||
)
|
||||
|
||||
self.process_class_members(metadata, cls, basename)
|
||||
for base in cls.class_decl.bases:
|
||||
if base.access != "public":
|
||||
continue
|
||||
name = base.typename.segments[-1].format()
|
||||
if base_scope := self.class_registry.get(name):
|
||||
self.process_class_members(metadata, base_scope, basename)
|
||||
self.process_class_members(metadata, metadata, cls, basename)
|
||||
|
||||
if expr := metadata.string_expr:
|
||||
print(
|
||||
f'\t\t.def("__str__", [](const {basename} &s) {{ return {expr}; }})',
|
||||
file=self.f,
|
||||
)
|
||||
print(
|
||||
f'\t\t.def("__repr__", [](const {basename} &s) {{ std::stringstream ss; ss << "<{basename} " << {expr} << ">"; return ss.str(); }})',
|
||||
file=self.f,
|
||||
)
|
||||
|
||||
if expr := metadata.hash_expr:
|
||||
print(
|
||||
|
|
|
|||
|
|
@ -21,6 +21,12 @@
|
|||
// <!-- generated includes -->
|
||||
#include <pybind11/pybind11.h>
|
||||
#include <pybind11/native_enum.h>
|
||||
#include <pybind11/functional.h>
|
||||
|
||||
// duplicates for LSPs
|
||||
#include "kernel/register.h"
|
||||
#include "kernel/yosys_common.h"
|
||||
|
||||
#include "pyosys/hashlib.h"
|
||||
|
||||
namespace py = pybind11;
|
||||
|
|
@ -28,7 +34,7 @@ namespace py = pybind11;
|
|||
USING_YOSYS_NAMESPACE
|
||||
|
||||
using std::set;
|
||||
using std::regex;
|
||||
using std::function;
|
||||
using std::ostream;
|
||||
using namespace RTLIL;
|
||||
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
requires = [
|
||||
"setuptools>=42",
|
||||
"pybind11>=3,<4",
|
||||
"cxxheaderparser",
|
||||
"cxxheaderparser"
|
||||
]
|
||||
build-backend = "setuptools.build_meta"
|
||||
|
||||
|
|
|
|||
1
setup.py
1
setup.py
|
|
@ -51,6 +51,7 @@ class libyosys_so_ext(Extension):
|
|||
"ENABLE_TCL=0",
|
||||
"ENABLE_READLINE=0",
|
||||
"ENABLE_EDITLINE=0",
|
||||
"PYOSYS_USE_UV=0", # + install requires takes its role when building wheels
|
||||
# Always compile and include ABC in wheel
|
||||
"ABCEXTERNAL=",
|
||||
# Show compile commands
|
||||
|
|
|
|||
|
|
@ -2,6 +2,9 @@
|
|||
OBJS += techlibs/gatemate/synth_gatemate.o
|
||||
OBJS += techlibs/gatemate/gatemate_foldinv.o
|
||||
|
||||
GENFILES += techlibs/gatemate/lut_tree_cells.genlib
|
||||
GENFILES += techlibs/gatemate/lut_tree_map.v
|
||||
|
||||
$(eval $(call add_share_file,share/gatemate,techlibs/gatemate/reg_map.v))
|
||||
$(eval $(call add_share_file,share/gatemate,techlibs/gatemate/mux_map.v))
|
||||
$(eval $(call add_share_file,share/gatemate,techlibs/gatemate/lut_map.v))
|
||||
|
|
@ -28,3 +31,4 @@ techlibs/gatemate/lut_tree_map.v: techlibs/gatemate/lut_tree_lib.mk
|
|||
|
||||
$(eval $(call add_gen_share_file,share/gatemate,techlibs/gatemate/lut_tree_cells.genlib))
|
||||
$(eval $(call add_gen_share_file,share/gatemate,techlibs/gatemate/lut_tree_map.v))
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,28 @@
|
|||
|
||||
from pyosys import libyosys as ys
|
||||
from pathlib import Path
|
||||
|
||||
__file_dir__ = Path(__file__).absolute().parent
|
||||
|
||||
d = ys.Design()
|
||||
ys.run_pass(f"read_verilog {__file_dir__ / 'spm.cut.v.gz'}", d)
|
||||
ys.run_pass("hierarchy -top spm", d)
|
||||
|
||||
external_idstring_holder_0 = None
|
||||
external_idstring_holder_1 = None
|
||||
|
||||
def get_top_module_idstring():
|
||||
global external_idstring_holder_0, external_idstring_holder_1
|
||||
d = ys.Design()
|
||||
ys.run_pass(f"read_verilog {__file_dir__ / 'spm.cut.v.gz'}", d)
|
||||
ys.run_pass("hierarchy -top spm", d)
|
||||
external_idstring_holder_0 = d.top_module().name
|
||||
for cell in d.top_module().cells_:
|
||||
print(f"TARGETED: {cell}", flush=True)
|
||||
external_idstring_holder_1 = cell
|
||||
break
|
||||
# d deallocates
|
||||
|
||||
get_top_module_idstring()
|
||||
print(external_idstring_holder_0, flush=True)
|
||||
print(external_idstring_holder_1, flush=True)
|
||||
|
|
@ -0,0 +1,15 @@
|
|||
|
||||
from pyosys import libyosys as ys
|
||||
from pathlib import Path
|
||||
|
||||
__file_dir__ = Path(__file__).absolute().parent
|
||||
|
||||
|
||||
d = ys.Design()
|
||||
ys.run_pass(f"read_verilog {__file_dir__ / 'spm.cut.v.gz'}", d)
|
||||
ys.run_pass("hierarchy -top spm", d)
|
||||
|
||||
for idstr, cell in d.top_module().cells_.items():
|
||||
cell.set_bool_attribute("\\set")
|
||||
print(cell.attributes)
|
||||
break
|
||||
|
|
@ -14,7 +14,7 @@ class Monitor(ys.Monitor):
|
|||
self.mods.append(mod.name.str())
|
||||
|
||||
m = Monitor()
|
||||
d.monitors.add(m)
|
||||
d.monitors = [m]
|
||||
|
||||
ys.run_pass(f"read_verilog {__file_dir__ / 'spm.cut.v.gz'}", d)
|
||||
ys.run_pass("hierarchy -top spm", d)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,26 @@
|
|||
read_verilog <<EOT
|
||||
module simple(I1, I2, O);
|
||||
input wire I1;
|
||||
input wire I2;
|
||||
output wire O;
|
||||
|
||||
assign O = I1 | I2;
|
||||
endmodule
|
||||
EOT
|
||||
abc -g all
|
||||
|
||||
design -reset
|
||||
read_verilog <<EOT
|
||||
module simple(I1, I2, O);
|
||||
input wire I1;
|
||||
input wire I2;
|
||||
output wire O;
|
||||
|
||||
assign O = I1 | I2;
|
||||
endmodule
|
||||
EOT
|
||||
techmap
|
||||
abc -g AND
|
||||
|
||||
select -assert-count 0 t:$_OR_
|
||||
select -assert-count 1 t:$_AND_
|
||||
|
|
@ -0,0 +1,24 @@
|
|||
library (test_not_next) {
|
||||
cell (dff_not_next) {
|
||||
area: 1.0;
|
||||
pin (QN) {
|
||||
direction : output;
|
||||
function : "STATE";
|
||||
}
|
||||
pin (CLK) {
|
||||
direction : input;
|
||||
clock : true;
|
||||
}
|
||||
pin (D) {
|
||||
direction : input;
|
||||
}
|
||||
pin (RN) {
|
||||
direction : input;
|
||||
}
|
||||
ff (STATE, STATEN) {
|
||||
clocked_on: "CLK";
|
||||
next_state: "!D";
|
||||
preset : "!RN";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -108,6 +108,37 @@ copy top top_unmapped
|
|||
simplemap top
|
||||
dfflibmap -liberty dfflibmap_dffn_dffe.lib -liberty dfflibmap_dffsr_not_next.lib top
|
||||
|
||||
async2sync
|
||||
flatten
|
||||
opt_clean -purge
|
||||
equiv_make top top_unmapped equiv
|
||||
equiv_induct equiv
|
||||
equiv_status -assert equiv
|
||||
|
||||
##################################################################
|
||||
|
||||
design -reset
|
||||
read_verilog <<EOT
|
||||
|
||||
module top(input C, D, R, output Q);
|
||||
// DFF with preset
|
||||
always @(posedge C or negedge R) begin
|
||||
if (!R) Q <= 1'b1;
|
||||
else Q <= D;
|
||||
end
|
||||
endmodule
|
||||
|
||||
EOT
|
||||
|
||||
proc
|
||||
opt
|
||||
read_liberty dfflibmap_dffn_dffe.lib
|
||||
read_liberty dfflibmap_dff_not_next.lib
|
||||
|
||||
copy top top_unmapped
|
||||
simplemap top
|
||||
dfflibmap -liberty dfflibmap_dffn_dffe.lib -liberty dfflibmap_dff_not_next.lib top
|
||||
|
||||
async2sync
|
||||
flatten
|
||||
opt_clean -purge
|
||||
|
|
|
|||
|
|
@ -0,0 +1,18 @@
|
|||
read_verilog -defer <<EOT
|
||||
|
||||
module bb (...);
|
||||
localparam A = "abc";
|
||||
input a;
|
||||
output b;
|
||||
endmodule
|
||||
|
||||
module top (...);
|
||||
input a;
|
||||
output b;
|
||||
bb #(.A("def")) my_bb (a, b);
|
||||
endmodule
|
||||
|
||||
EOT
|
||||
|
||||
logger -expect error "does not have a parameter named 'A'" 1
|
||||
hierarchy -check -top top
|
||||
|
|
@ -0,0 +1,12 @@
|
|||
read_verilog <<EOF
|
||||
module top(input [3:0] a, input [3:0] b, output [7:0] y);
|
||||
assign y = a * b;
|
||||
endmodule
|
||||
|
||||
module top2(input [7:0] a, input [7:0] b, output [15:0] y);
|
||||
assign y = a * b;
|
||||
endmodule
|
||||
EOF
|
||||
|
||||
synth
|
||||
timeest
|
||||
|
|
@ -0,0 +1,13 @@
|
|||
verific -sv <<EOT
|
||||
module simple (
|
||||
input [3:0] I2,
|
||||
input [3:0] I1,
|
||||
output [3:0] result
|
||||
);
|
||||
assign result = I2 & I1;
|
||||
endmodule
|
||||
EOT
|
||||
verific -import simple
|
||||
|
||||
write_verilog verilog_port_bus_order.out
|
||||
!grep -qF 'simple(I2, I1, result)' verilog_port_bus_order.out
|
||||
|
|
@ -4,3 +4,5 @@
|
|||
/roundtrip_proc_1.v
|
||||
/roundtrip_proc_2.v
|
||||
/assign_to_reg.v
|
||||
/subdir
|
||||
/temp_foo.v
|
||||
|
|
|
|||
|
|
@ -0,0 +1,30 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -eu
|
||||
|
||||
# only works with read_verilog
|
||||
yosys='../../yosys -f verilog'
|
||||
test='-p hierarchy'
|
||||
subdir=subdir
|
||||
source=local_include.v
|
||||
include=temp_foo.v
|
||||
|
||||
# no include file should fail
|
||||
rm -f $include
|
||||
echo "logger -expect error $include 1; read_verilog $source" | $yosys
|
||||
|
||||
# both files local
|
||||
echo 'module foo (input a, output b); assign b = a; endmodule' > $include
|
||||
$yosys $test $source
|
||||
|
||||
# include local to cwd
|
||||
mkdir -p $subdir
|
||||
cp $source $subdir
|
||||
$yosys $test $subdir/$source
|
||||
|
||||
# include local to source
|
||||
mv $include $subdir
|
||||
$yosys $test $subdir/$source
|
||||
|
||||
# include local to source, and source is given as an absolute path
|
||||
$yosys $test $(pwd)/$subdir/$source
|
||||
|
|
@ -0,0 +1,4 @@
|
|||
`include "temp_foo.v"
|
||||
module top (input x, output y);
|
||||
foo bar (.a(x), .b(y));
|
||||
endmodule
|
||||
Loading…
Reference in New Issue