2020-02-29 01:15:08 +01:00
|
|
|
// -*- mode: C++; c-file-style: "cc-mode" -*-
|
|
|
|
|
//*************************************************************************
|
|
|
|
|
// DESCRIPTION: Verilator: Break variables into separate words to avoid UNOPTFLAT
|
|
|
|
|
//
|
|
|
|
|
// Code available from: https://verilator.org
|
|
|
|
|
//
|
|
|
|
|
//*************************************************************************
|
|
|
|
|
//
|
2025-01-01 14:30:25 +01:00
|
|
|
// Copyright 2003-2025 by Wilson Snyder. This program is free software; you
|
2020-03-21 16:24:24 +01:00
|
|
|
// can redistribute it and/or modify it under the terms of either the GNU
|
2020-02-29 01:15:08 +01:00
|
|
|
// Lesser General Public License Version 3 or the Perl Artistic License
|
|
|
|
|
// Version 2.0.
|
2020-03-21 16:24:24 +01:00
|
|
|
// SPDX-License-Identifier: LGPL-3.0-only OR Artistic-2.0
|
2020-02-29 01:15:08 +01:00
|
|
|
//
|
|
|
|
|
//*************************************************************************
|
|
|
|
|
// V3SplitVar divides a variable into multiple variables to avoid UNOPTFLAT warning
|
2022-12-03 00:46:38 +01:00
|
|
|
// and get better performance.
|
2020-02-29 01:15:08 +01:00
|
|
|
// Variables to be split must be marked by /*verilator split_var*/ metacomment.
|
2022-12-03 00:46:38 +01:00
|
|
|
// There are several kinds of data types that may cause the warning.
|
2020-02-29 01:15:08 +01:00
|
|
|
// 1) Unpacked arrays
|
|
|
|
|
// 2) Packed arrays
|
|
|
|
|
// 3) Unpacked structs
|
|
|
|
|
// 4) Packed structs
|
|
|
|
|
// 5) Bitfields within a signal. (Especially Verilog code predating structs/2D arrays.)
|
|
|
|
|
// 2-5 above are treated as bitfields in verilator.
|
|
|
|
|
//
|
|
|
|
|
// What this pass does looks as below.
|
|
|
|
|
//
|
|
|
|
|
// // Original
|
2022-12-23 17:32:38 +01:00
|
|
|
// logic [1:0] unpacked_array_var[0:1] /*verilator split_var*/;
|
2020-02-29 01:15:08 +01:00
|
|
|
// always_comb begin
|
|
|
|
|
// unpacked_array_var[1][0] = unpacked_array_var[0][0]; // UNOPTFLAT warning
|
|
|
|
|
// unpacked_array_var[1][1] = ~unpacked_array_var[0][1]; // UNOPTFLAT warning
|
|
|
|
|
// end
|
|
|
|
|
// logic [3:0] packed_var /*verilator split_var*/;
|
|
|
|
|
// always_comb begin
|
|
|
|
|
// if (some_cond) begin
|
|
|
|
|
// packed_var = 4'b0;
|
|
|
|
|
// end else begin
|
|
|
|
|
// packed_var[3] = some_input0;
|
|
|
|
|
// packed_var[2:0] = some_input1;
|
|
|
|
|
// end
|
|
|
|
|
// end
|
|
|
|
|
//
|
|
|
|
|
// is initially converted to
|
|
|
|
|
//
|
|
|
|
|
// // Intermediate
|
2022-12-23 17:32:38 +01:00
|
|
|
// logic [1:0] unpacked_array_var0 /*verilator split_var*/;
|
|
|
|
|
// logic [1:0] unpacked_array_var1 /*verilator split_var*/;
|
2020-02-29 01:15:08 +01:00
|
|
|
// always_comb begin
|
|
|
|
|
// unpacked_array_var1[0] = unpacked_array_var0[0];
|
|
|
|
|
// unpacked_array_var1[1] = ~unpacked_array_var0[1];
|
|
|
|
|
// end
|
|
|
|
|
// logic [3:0] packed_var /*verilator split_var*/;
|
|
|
|
|
// always_comb begin
|
|
|
|
|
// if (some_cond) begin
|
|
|
|
|
// packed_var = 4'b0;
|
|
|
|
|
// end else begin
|
|
|
|
|
// packed_var[3] = some_input0;
|
|
|
|
|
// packed_var[2:0] = some_input1;
|
|
|
|
|
// end
|
|
|
|
|
// end
|
|
|
|
|
//
|
|
|
|
|
// then converted to
|
|
|
|
|
//
|
|
|
|
|
// // Final
|
|
|
|
|
// logic unpacked_array_var0__BRA__0__KET__;
|
|
|
|
|
// logic unpacked_array_var0__BRA__1__KET__;
|
|
|
|
|
// logic unpacked_array_var1__BRA__0__KET__;
|
|
|
|
|
// logic unpacked_array_var1__BRA__1__KET__;
|
|
|
|
|
// always_comb begin
|
|
|
|
|
// unpacked_array_var1__BRA__0__KET__ = unpacked_array_var0__BRA__0__KET__;
|
|
|
|
|
// unpacked_array_var1__BRA__1__KET__ = ~unpacked_array_var0__BRA__1__KET__;
|
|
|
|
|
// end
|
|
|
|
|
// logic packed_var__BRA__3__KET__;
|
|
|
|
|
// logic [2:0] packed_var__BRA__2_0__KET__;
|
|
|
|
|
// always_comb begin
|
|
|
|
|
// if (some_cond) begin
|
|
|
|
|
// {packed_var__BRA__3__KET__, packed_var__BRA__2_0__KET__} = 4'b0;
|
|
|
|
|
// end else begin
|
|
|
|
|
// packed_var__BRA__3__KET__ = some_input0;
|
|
|
|
|
// packed_var__BRA__2_0__KET__ = some_input1;
|
|
|
|
|
// end
|
|
|
|
|
// end
|
|
|
|
|
//
|
|
|
|
|
//
|
|
|
|
|
// Two visitor classes are defined here, SplitUnpackedVarVisitor and SplitPackedVarVisitor.
|
|
|
|
|
//
|
|
|
|
|
// - SplitUnpackedVarVisitor class splits unpacked arrays. ( 1) in the explanation above.)
|
|
|
|
|
// "unpacked_array_var" in the example above is a target of the class.
|
|
|
|
|
// The class changes AST from "Original" to "Intermediate".
|
|
|
|
|
// The visitor does not change packed variables.
|
|
|
|
|
// In addition to splitting unpacked arrays, the visitor collects the following information
|
|
|
|
|
// for each module.
|
|
|
|
|
// - AstVar
|
|
|
|
|
// - AstVarRef
|
|
|
|
|
// - AstSel
|
|
|
|
|
// They are stored in a RefsInModule instance and will be used in SplitPackedVarVisitor.
|
|
|
|
|
//
|
|
|
|
|
// - SplitPackedVarVisitor class splits packed variables ( 2), 3), 4), and 5) in the explanation
|
|
|
|
|
// above.)
|
|
|
|
|
// "unpacked_array0", "unpacked_array_var1", and "packed_var" in "Intermediate" are split by the
|
|
|
|
|
// class.
|
|
|
|
|
// Packed variables here include the result of SplitUnpackedVarVisitor.
|
|
|
|
|
// The result of this class looks like "Final" above.
|
|
|
|
|
// The class visits just necessary AstNode based on RefsInModule collected in the preceding
|
|
|
|
|
// SplitUnpackedVarVisitor.
|
|
|
|
|
// The visitor does not have to visit the entire AST because the necessary information is
|
|
|
|
|
// already in RefsInModule.
|
|
|
|
|
//
|
|
|
|
|
//*************************************************************************
|
|
|
|
|
|
2023-10-18 12:37:46 +02:00
|
|
|
#include "V3PchAstNoMT.h" // VL_MT_DISABLED_CODE_UNIT
|
|
|
|
|
|
2022-08-05 11:56:57 +02:00
|
|
|
#include "V3SplitVar.h"
|
|
|
|
|
|
2025-03-09 15:31:01 +01:00
|
|
|
#include "V3AstUserAllocator.h"
|
2020-02-29 01:15:08 +01:00
|
|
|
#include "V3Stats.h"
|
2021-08-11 15:30:00 +02:00
|
|
|
#include "V3UniqueNames.h"
|
2020-02-29 01:15:08 +01:00
|
|
|
|
|
|
|
|
#include <set>
|
|
|
|
|
#include <vector>
|
|
|
|
|
|
2022-09-18 21:53:42 +02:00
|
|
|
VL_DEFINE_DEBUG_FUNCTIONS;
|
|
|
|
|
|
2024-01-20 21:06:46 +01:00
|
|
|
struct SplitVarImpl VL_NOT_FINAL {
|
2021-08-11 15:30:00 +02:00
|
|
|
// NODE STATE
|
|
|
|
|
// AstNodeModule::user1() -> Block number counter for generating unique names
|
2022-01-02 19:56:40 +01:00
|
|
|
const VNUser1InUse m_user1InUse; // Only used in SplitUnpackedVarVisitor
|
2021-08-11 15:30:00 +02:00
|
|
|
|
2020-02-29 01:15:08 +01:00
|
|
|
// These check functions return valid pointer to the reason text if a variable cannot be split.
|
|
|
|
|
|
|
|
|
|
// Check if a var type can be split
|
2022-01-02 19:56:40 +01:00
|
|
|
static const char* cannotSplitVarTypeReason(VVarType type) {
|
2020-02-29 01:15:08 +01:00
|
|
|
// Only SplitUnpackedVarVisitor can split WREAL. SplitPackedVarVisitor cannot.
|
|
|
|
|
const bool ok
|
|
|
|
|
= type == type.VAR || type == type.WIRE || type == type.PORT || type == type.WREAL;
|
2020-08-15 16:12:55 +02:00
|
|
|
if (ok) return nullptr;
|
2020-02-29 01:15:08 +01:00
|
|
|
return "it is not one of variable, net, port, nor wreal";
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
static const char* cannotSplitVarDirectionReason(VDirection dir) {
|
|
|
|
|
if (dir == VDirection::REF) return "it is a ref argument";
|
|
|
|
|
if (dir == VDirection::INOUT) return "it is an inout port";
|
2020-08-15 16:12:55 +02:00
|
|
|
return nullptr;
|
2020-02-29 01:15:08 +01:00
|
|
|
}
|
|
|
|
|
|
2021-08-02 16:33:31 +02:00
|
|
|
static const char* cannotSplitConnectedPortReason(const AstPin* pinp) {
|
|
|
|
|
const AstVar* const varp = pinp->modVarp();
|
2020-02-29 01:15:08 +01:00
|
|
|
if (!varp) return "it is not connected";
|
2021-08-02 16:33:31 +02:00
|
|
|
if (const char* const reason = cannotSplitVarDirectionReason(varp->direction())) {
|
|
|
|
|
return reason;
|
|
|
|
|
}
|
2020-08-15 16:12:55 +02:00
|
|
|
return nullptr;
|
2020-02-29 01:15:08 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
static const char* cannotSplitTaskReason(const AstNodeFTask* taskp) {
|
|
|
|
|
if (taskp->prototype()) return "the task is prototype declaration";
|
|
|
|
|
if (taskp->dpiImport()) return "the task is imported from DPI-C";
|
|
|
|
|
if (taskp->dpiOpenChild()) return "the task takes DPI-C open array";
|
2020-08-15 16:12:55 +02:00
|
|
|
return nullptr;
|
2020-02-29 01:15:08 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
static const char* cannotSplitVarCommonReason(const AstVar* varp) {
|
2021-08-02 16:33:31 +02:00
|
|
|
if (const AstNodeFTask* const taskp = VN_CAST(varp->backp(), NodeFTask)) {
|
|
|
|
|
if (const char* const reason = cannotSplitTaskReason(taskp)) return reason;
|
|
|
|
|
}
|
|
|
|
|
if (const char* const reason = cannotSplitVarTypeReason(varp->varType())) {
|
|
|
|
|
return reason;
|
|
|
|
|
}
|
|
|
|
|
if (const char* const reason = cannotSplitVarDirectionReason(varp->direction())) {
|
|
|
|
|
return reason;
|
2020-02-29 01:15:08 +01:00
|
|
|
}
|
|
|
|
|
if (varp->isSigPublic()) return "it is public";
|
|
|
|
|
if (varp->isUsedLoopIdx()) return "it is used as a loop variable";
|
2025-03-09 15:31:01 +01:00
|
|
|
if (varp->isForceable()) return "it is forceable";
|
2020-08-15 16:12:55 +02:00
|
|
|
return nullptr;
|
2020-02-29 01:15:08 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
static const char* cannotSplitPackedVarReason(const AstVar* varp);
|
|
|
|
|
|
2024-11-30 00:01:50 +01:00
|
|
|
template <typename T_ALWAYSLIKE>
|
2021-08-11 15:30:00 +02:00
|
|
|
void insertBeginCore(T_ALWAYSLIKE* ap, AstNodeStmt* stmtp, AstNodeModule* modp) {
|
2022-09-15 20:43:56 +02:00
|
|
|
if (ap->isJustOneBodyStmt() && ap->stmtsp() == stmtp) {
|
2020-02-29 01:15:08 +01:00
|
|
|
stmtp->unlinkFrBack();
|
|
|
|
|
// Insert begin-end because temp value may be inserted to this block later.
|
2021-08-11 15:30:00 +02:00
|
|
|
const std::string name = "__VsplitVarBlk" + cvtToStr(modp->user1Inc(1));
|
2025-09-23 20:49:01 +02:00
|
|
|
ap->addStmtsp(new AstBegin{ap->fileline(), name, stmtp, false});
|
2020-02-29 01:15:08 +01:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2021-08-11 15:30:00 +02:00
|
|
|
void insertBeginCore(AstInitial* initp, AstNodeStmt* stmtp, AstNodeModule* modp) {
|
2022-09-15 20:43:56 +02:00
|
|
|
if (initp->isJustOneBodyStmt() && initp->stmtsp() == stmtp) {
|
2020-02-29 01:15:08 +01:00
|
|
|
stmtp->unlinkFrBack();
|
|
|
|
|
// Insert begin-end because temp value may be inserted to this block later.
|
2021-08-02 16:33:31 +02:00
|
|
|
FileLine* const fl = initp->fileline();
|
2021-08-11 15:30:00 +02:00
|
|
|
const std::string name = "__VsplitVarBlk" + cvtToStr(modp->user1Inc(1));
|
2025-09-23 20:49:01 +02:00
|
|
|
initp->replaceWith(new AstInitial{fl, new AstBegin{fl, name, stmtp, false}});
|
2020-02-29 01:15:08 +01:00
|
|
|
VL_DO_DANGLING(initp->deleteTree(), initp);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2021-08-11 15:30:00 +02:00
|
|
|
void insertBeginIfNecessary(AstNodeStmt* stmtp, AstNodeModule* modp) {
|
2021-08-02 16:33:31 +02:00
|
|
|
AstNode* const backp = stmtp->backp();
|
|
|
|
|
if (AstAlways* const ap = VN_CAST(backp, Always)) {
|
2020-02-29 01:15:08 +01:00
|
|
|
insertBeginCore(ap, stmtp, modp);
|
2021-08-02 16:33:31 +02:00
|
|
|
} else if (AstInitial* const ap = VN_CAST(backp, Initial)) {
|
2020-02-29 01:15:08 +01:00
|
|
|
insertBeginCore(ap, stmtp, modp);
|
2022-01-02 18:35:44 +01:00
|
|
|
} else if (auto* const ap = VN_CAST(backp, Initial)) {
|
|
|
|
|
insertBeginCore(ap, stmtp, modp);
|
2020-02-29 01:15:08 +01:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
}; // SplitVarImpl
|
|
|
|
|
|
2021-08-02 16:33:31 +02:00
|
|
|
//######################################################################
|
2022-12-26 10:30:41 +01:00
|
|
|
// Utilities required in various placs
|
2021-08-02 16:33:31 +02:00
|
|
|
|
|
|
|
|
static void warnNoSplit(const AstVar* varp, const AstNode* wherep, const char* reasonp) {
|
|
|
|
|
wherep->v3warn(SPLITVAR, varp->prettyNameQ()
|
|
|
|
|
<< " has split_var metacomment but will not be split because "
|
|
|
|
|
<< reasonp << ".\n");
|
|
|
|
|
}
|
2020-02-29 01:15:08 +01:00
|
|
|
|
|
|
|
|
//######################################################################
|
|
|
|
|
// Split Unpacked Variables
|
|
|
|
|
// Replacement policy:
|
|
|
|
|
// AstArraySel -> Just replace with the AstVarRef for the split variable
|
|
|
|
|
// AstVarRef -> Create a temporary variable and refer the variable
|
|
|
|
|
// AstSliceSel -> Create a temporary variable and refer the variable
|
|
|
|
|
|
2020-06-10 04:39:10 +02:00
|
|
|
// Compare AstNode* to get deterministic ordering when showing messages.
|
2024-01-20 21:06:46 +01:00
|
|
|
struct AstNodeComparator final {
|
2020-06-10 04:39:10 +02:00
|
|
|
bool operator()(const AstNode* ap, const AstNode* bp) const {
|
|
|
|
|
const int lineComp = ap->fileline()->operatorCompare(*bp->fileline());
|
|
|
|
|
if (lineComp != 0) return lineComp < 0;
|
|
|
|
|
return ap < bp;
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
2020-11-19 03:32:16 +01:00
|
|
|
class UnpackRef final {
|
2020-02-29 01:15:08 +01:00
|
|
|
// m_nodep is called in this context (AstNodeStmt, AstCell, AstNodeFTask, or AstAlways)
|
2021-11-26 23:55:36 +01:00
|
|
|
AstNode* const m_contextp;
|
|
|
|
|
AstNode* const m_nodep; // ArraySel, SliceSel, ArrayVarRef (entire value)
|
|
|
|
|
const int m_index; // for ArraySel
|
|
|
|
|
const int m_msb; // for SliceSel
|
|
|
|
|
const int m_lsb; // for SliceSel
|
|
|
|
|
const VAccess m_access;
|
|
|
|
|
const bool m_ftask; // true if the reference is in function/task. false if in module.
|
2020-02-29 01:15:08 +01:00
|
|
|
public:
|
|
|
|
|
UnpackRef(AstNode* stmtp, AstVarRef* nodep, bool ftask)
|
2020-08-16 15:55:36 +02:00
|
|
|
: m_contextp{stmtp}
|
|
|
|
|
, m_nodep{nodep}
|
|
|
|
|
, m_index{-1}
|
|
|
|
|
, m_msb{0}
|
|
|
|
|
, m_lsb{1}
|
2020-09-07 23:09:25 +02:00
|
|
|
, m_access{nodep->access()}
|
2020-08-16 15:55:36 +02:00
|
|
|
, m_ftask{ftask} {}
|
2020-09-07 23:09:25 +02:00
|
|
|
UnpackRef(AstNode* stmtp, AstArraySel* nodep, int idx, const VAccess& access, bool ftask)
|
2020-08-16 15:55:36 +02:00
|
|
|
: m_contextp{stmtp}
|
|
|
|
|
, m_nodep{nodep}
|
|
|
|
|
, m_index{idx}
|
|
|
|
|
, m_msb{0}
|
|
|
|
|
, m_lsb{1}
|
2020-09-07 23:09:25 +02:00
|
|
|
, m_access{access}
|
2020-08-16 15:55:36 +02:00
|
|
|
, m_ftask{ftask} {}
|
2020-09-07 23:09:25 +02:00
|
|
|
UnpackRef(AstNode* stmtp, AstSliceSel* nodep, int msb, int lsb, const VAccess& access,
|
|
|
|
|
bool ftask)
|
2020-08-16 15:55:36 +02:00
|
|
|
: m_contextp{stmtp}
|
|
|
|
|
, m_nodep{nodep}
|
|
|
|
|
, m_index{msb == lsb ? msb : -1} // Equivalent to ArraySel
|
|
|
|
|
, m_msb{msb}
|
|
|
|
|
, m_lsb{lsb}
|
2020-09-07 23:09:25 +02:00
|
|
|
, m_access{access}
|
2020-08-16 15:55:36 +02:00
|
|
|
, m_ftask{ftask} {}
|
2020-02-29 01:15:08 +01:00
|
|
|
AstNode* nodep() const { return m_nodep; }
|
|
|
|
|
bool isSingleRef() const {
|
|
|
|
|
return VN_IS(m_nodep, ArraySel) || (m_msb == m_lsb && m_lsb == m_index);
|
|
|
|
|
}
|
|
|
|
|
int index() const {
|
|
|
|
|
UASSERT_OBJ(isSingleRef(), m_nodep, "not array sel");
|
|
|
|
|
return m_index;
|
|
|
|
|
}
|
|
|
|
|
AstNode* context() const { return m_contextp; }
|
2020-09-07 23:09:25 +02:00
|
|
|
VAccess access() const { return m_access; }
|
2020-02-29 01:15:08 +01:00
|
|
|
bool ftask() const { return m_ftask; }
|
2020-06-10 04:39:10 +02:00
|
|
|
bool operator<(const UnpackRef& other) const {
|
|
|
|
|
return AstNodeComparator()(m_nodep, other.m_nodep);
|
|
|
|
|
}
|
2020-02-29 01:15:08 +01:00
|
|
|
};
|
|
|
|
|
|
2020-11-19 03:32:16 +01:00
|
|
|
class UnpackRefMap final {
|
2020-02-29 01:15:08 +01:00
|
|
|
public:
|
2021-03-13 00:10:45 +01:00
|
|
|
using MapType = std::map<AstVar*, std::set<UnpackRef>, AstNodeComparator>;
|
|
|
|
|
using MapIt = MapType::iterator;
|
2020-02-29 01:15:08 +01:00
|
|
|
|
|
|
|
|
private:
|
|
|
|
|
MapType m_map;
|
|
|
|
|
bool addCore(AstVarRef* refp, const UnpackRef& ref) {
|
2021-08-02 16:33:31 +02:00
|
|
|
AstVar* const varp = refp->varp();
|
2020-02-29 01:15:08 +01:00
|
|
|
UASSERT_OBJ(varp->attrSplitVar(), varp, " no split_var metacomment");
|
2021-06-21 00:32:57 +02:00
|
|
|
const MapIt it = m_map.find(varp);
|
2020-02-29 01:15:08 +01:00
|
|
|
if (it == m_map.end()) return false; // Not registered
|
2023-10-28 14:38:02 +02:00
|
|
|
const bool ok = it->second.insert(ref).second;
|
2020-02-29 01:15:08 +01:00
|
|
|
return ok;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
public:
|
|
|
|
|
// Register a variable to split
|
|
|
|
|
void registerVar(AstVar* varp) {
|
2023-10-28 12:24:04 +02:00
|
|
|
const bool inserted = m_map.emplace(varp, MapType::value_type::second_type()).second;
|
2020-02-29 01:15:08 +01:00
|
|
|
UASSERT_OBJ(inserted, varp, "already registered");
|
|
|
|
|
}
|
|
|
|
|
// Register the location where a variable is used.
|
|
|
|
|
bool tryAdd(AstNode* context, AstVarRef* refp, AstArraySel* selp, int idx, bool ftask) {
|
2022-11-20 21:06:49 +01:00
|
|
|
return addCore(refp, UnpackRef{context, selp, idx, refp->access(), ftask});
|
2020-02-29 01:15:08 +01:00
|
|
|
}
|
|
|
|
|
bool tryAdd(AstNode* context, AstVarRef* refp, AstSliceSel* selp, int msb, int lsb,
|
|
|
|
|
bool ftask) {
|
2022-11-20 21:06:49 +01:00
|
|
|
return addCore(refp, UnpackRef{context, selp, msb, lsb, refp->access(), ftask});
|
2020-02-29 01:15:08 +01:00
|
|
|
}
|
|
|
|
|
bool tryAdd(AstNode* context, AstVarRef* refp, bool ftask) {
|
2022-11-20 21:06:49 +01:00
|
|
|
return addCore(refp, UnpackRef{context, refp, ftask});
|
2020-02-29 01:15:08 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Remove a variable from the list to split
|
|
|
|
|
void remove(AstVar* varp) {
|
|
|
|
|
UASSERT_OBJ(varp->attrSplitVar(), varp, " no split_var metacomment");
|
|
|
|
|
m_map.erase(varp);
|
|
|
|
|
varp->attrSplitVar(!SplitVarImpl::cannotSplitPackedVarReason(varp));
|
|
|
|
|
}
|
|
|
|
|
bool empty() const { return m_map.empty(); }
|
|
|
|
|
void swap(UnpackRefMap& other) { other.m_map.swap(m_map); }
|
|
|
|
|
|
|
|
|
|
MapIt begin() { return m_map.begin(); }
|
|
|
|
|
MapIt end() { return m_map.end(); }
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
// Found nodes for SplitPackedVarVisitor
|
2024-01-20 21:06:46 +01:00
|
|
|
struct RefsInModule final {
|
2021-03-12 23:26:53 +01:00
|
|
|
std::set<AstVar*, AstNodeComparator> m_vars;
|
|
|
|
|
std::set<AstVarRef*, AstNodeComparator> m_refs;
|
|
|
|
|
std::set<AstSel*, AstNodeComparator> m_sels;
|
2020-02-29 01:15:08 +01:00
|
|
|
|
|
|
|
|
public:
|
|
|
|
|
void add(AstVar* nodep) { m_vars.insert(nodep); }
|
|
|
|
|
void add(AstVarRef* nodep) { m_refs.insert(nodep); }
|
|
|
|
|
void add(AstSel* nodep) { m_sels.insert(nodep); }
|
|
|
|
|
void remove(AstNode* nodep) {
|
2024-01-20 21:06:46 +01:00
|
|
|
struct Visitor final : public VNVisitor {
|
2020-02-29 01:15:08 +01:00
|
|
|
RefsInModule& m_parent;
|
2022-09-16 12:22:11 +02:00
|
|
|
void visit(AstNode* nodep) override { iterateChildren(nodep); }
|
|
|
|
|
void visit(AstVar* nodep) override { m_parent.m_vars.erase(nodep); }
|
|
|
|
|
void visit(AstVarRef* nodep) override { m_parent.m_refs.erase(nodep); }
|
|
|
|
|
void visit(AstSel* nodep) override {
|
2020-02-29 01:15:08 +01:00
|
|
|
m_parent.m_sels.erase(nodep);
|
|
|
|
|
iterateChildren(nodep);
|
|
|
|
|
}
|
|
|
|
|
explicit Visitor(RefsInModule& p)
|
2020-08-22 13:43:56 +02:00
|
|
|
: m_parent(p) {} // Need () or GCC 4.8 false warning
|
2020-02-29 01:15:08 +01:00
|
|
|
} v(*this);
|
|
|
|
|
v.iterate(nodep);
|
|
|
|
|
}
|
2022-01-02 19:56:40 +01:00
|
|
|
void visit(VNVisitor* visitor) {
|
2021-08-02 16:33:31 +02:00
|
|
|
for (AstVar* const varp : m_vars) visitor->iterate(varp);
|
|
|
|
|
for (AstSel* const selp : m_sels) {
|
2020-02-29 01:15:08 +01:00
|
|
|
// If m_refs includes VarRef from ArraySel, remove it
|
|
|
|
|
// because the VarRef would not be visited in SplitPackedVarVisitor::visit(AstSel*).
|
2021-08-02 16:33:31 +02:00
|
|
|
if (AstVarRef* const refp = VN_CAST(selp->fromp(), VarRef)) {
|
2020-02-29 01:15:08 +01:00
|
|
|
m_refs.erase(refp);
|
2021-08-02 16:33:31 +02:00
|
|
|
} else if (AstVarRef* const refp = VN_CAST(selp->lsbp(), VarRef)) {
|
2020-02-29 01:15:08 +01:00
|
|
|
m_refs.erase(refp);
|
|
|
|
|
}
|
2021-08-02 16:33:31 +02:00
|
|
|
UASSERT_OBJ(reinterpret_cast<uintptr_t>(selp->op1p()) != 1, selp, "stale");
|
|
|
|
|
visitor->iterate(selp);
|
2020-02-29 01:15:08 +01:00
|
|
|
}
|
2021-08-02 16:33:31 +02:00
|
|
|
for (AstVarRef* const vrefp : m_refs) {
|
|
|
|
|
UASSERT_OBJ(reinterpret_cast<uintptr_t>(vrefp->op1p()) != 1, vrefp, "stale");
|
|
|
|
|
visitor->iterate(vrefp);
|
2020-02-29 01:15:08 +01:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
2025-03-09 15:31:01 +01:00
|
|
|
struct SplitVarRefs final {
|
|
|
|
|
std::map<AstNodeModule*, RefsInModule, AstNodeComparator> m_refs;
|
|
|
|
|
std::unordered_set<AstVar*> m_hasXref;
|
|
|
|
|
};
|
2020-02-29 01:15:08 +01:00
|
|
|
|
2022-01-02 19:56:40 +01:00
|
|
|
class SplitUnpackedVarVisitor final : public VNVisitor, public SplitVarImpl {
|
2021-03-13 00:10:45 +01:00
|
|
|
using VarSet = std::set<AstVar*, AstNodeComparator>;
|
2020-02-29 01:15:08 +01:00
|
|
|
VarSet m_foundTargetVar;
|
|
|
|
|
UnpackRefMap m_refs;
|
2020-08-15 19:11:27 +02:00
|
|
|
AstNodeModule* m_modp = nullptr;
|
2022-10-12 11:19:21 +02:00
|
|
|
// AstNodeStmt, AstCell, or AstAlways(Public) for sensitivity
|
2020-08-15 19:11:27 +02:00
|
|
|
AstNode* m_contextp = nullptr;
|
2025-02-28 03:18:27 +01:00
|
|
|
const AstNodeFTask* m_inFTaskp = nullptr;
|
2020-08-15 19:11:27 +02:00
|
|
|
size_t m_numSplit = 0;
|
2020-02-29 01:15:08 +01:00
|
|
|
// List for SplitPackedVarVisitor
|
2025-03-09 15:31:01 +01:00
|
|
|
SplitVarRefs m_forPackedSplit;
|
2021-08-11 15:30:00 +02:00
|
|
|
V3UniqueNames m_tempNames; // For generating unique temporary variable names
|
2020-02-29 01:15:08 +01:00
|
|
|
|
|
|
|
|
static AstVarRef* isTargetVref(AstNode* nodep) {
|
2021-08-02 16:33:31 +02:00
|
|
|
if (AstVarRef* const refp = VN_CAST(nodep, VarRef)) {
|
2020-02-29 01:15:08 +01:00
|
|
|
if (refp->varp()->attrSplitVar()) return refp;
|
|
|
|
|
}
|
2020-08-15 16:12:55 +02:00
|
|
|
return nullptr;
|
2020-02-29 01:15:08 +01:00
|
|
|
}
|
2021-08-02 16:33:31 +02:00
|
|
|
static int outerMostSizeOfUnpackedArray(const AstVar* nodep) {
|
|
|
|
|
const AstUnpackArrayDType* const dtypep
|
2021-10-22 16:15:42 +02:00
|
|
|
= VN_AS(nodep->dtypep()->skipRefp(), UnpackArrayDType);
|
2020-02-29 01:15:08 +01:00
|
|
|
UASSERT_OBJ(dtypep, nodep, "Must be unapcked array");
|
2020-12-07 03:13:56 +01:00
|
|
|
return dtypep->elementsConst();
|
2020-02-29 01:15:08 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void setContextAndIterateChildren(AstNode* nodep) {
|
2020-08-25 03:10:43 +02:00
|
|
|
VL_RESTORER(m_contextp);
|
2020-02-29 01:15:08 +01:00
|
|
|
{
|
|
|
|
|
m_contextp = nodep;
|
|
|
|
|
iterateChildren(nodep);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
void setContextAndIterate(AstNode* contextp, AstNode* nodep) {
|
2020-08-25 03:10:43 +02:00
|
|
|
VL_RESTORER(m_contextp);
|
2020-02-29 01:15:08 +01:00
|
|
|
{
|
|
|
|
|
m_contextp = contextp;
|
|
|
|
|
iterate(nodep);
|
|
|
|
|
}
|
|
|
|
|
}
|
2025-08-21 10:43:37 +02:00
|
|
|
// cppcheck-suppress duplInheritedMember
|
2022-01-02 19:56:40 +01:00
|
|
|
void pushDeletep(AstNode* nodep) { // overriding VNVisitor::pusDeletep()
|
2020-08-15 16:12:55 +02:00
|
|
|
UASSERT_OBJ(m_modp, nodep, "Must not nullptr");
|
2025-03-09 15:31:01 +01:00
|
|
|
m_forPackedSplit.m_refs[m_modp].remove(nodep);
|
2022-01-02 19:56:40 +01:00
|
|
|
VNVisitor::pushDeletep(nodep);
|
2020-02-29 01:15:08 +01:00
|
|
|
}
|
2022-01-02 19:56:40 +01:00
|
|
|
AstVar* newVar(FileLine* fl, VVarType type, const std::string& name, AstNodeDType* dtp) {
|
2021-08-02 16:33:31 +02:00
|
|
|
AstVar* const varp = new AstVar{fl, type, name, dtp};
|
2020-08-15 16:12:55 +02:00
|
|
|
UASSERT_OBJ(m_modp, varp, "Must not nullptr");
|
2025-03-09 15:31:01 +01:00
|
|
|
m_forPackedSplit.m_refs[m_modp].add(varp);
|
2020-02-29 01:15:08 +01:00
|
|
|
return varp;
|
|
|
|
|
}
|
2020-09-07 23:09:25 +02:00
|
|
|
AstVarRef* newVarRef(FileLine* fl, AstVar* varp, const VAccess& access) {
|
2021-08-02 16:33:31 +02:00
|
|
|
AstVarRef* const refp = new AstVarRef{fl, varp, access};
|
2020-08-15 16:12:55 +02:00
|
|
|
UASSERT_OBJ(m_modp, refp, "Must not nullptr");
|
2025-03-09 15:31:01 +01:00
|
|
|
m_forPackedSplit.m_refs[m_modp].add(refp);
|
2020-02-29 01:15:08 +01:00
|
|
|
return refp;
|
|
|
|
|
}
|
2025-09-23 16:16:49 +02:00
|
|
|
void handleVarXRef(AstVarXRef* const nodep) {
|
|
|
|
|
UINFO(4, nodep->varp() << " Has hierarchical reference");
|
|
|
|
|
m_forPackedSplit.m_hasXref.emplace(nodep->varp());
|
|
|
|
|
}
|
2020-02-29 01:15:08 +01:00
|
|
|
|
2022-09-16 12:22:11 +02:00
|
|
|
void visit(AstNode* nodep) override { iterateChildren(nodep); }
|
|
|
|
|
void visit(AstNodeModule* nodep) override {
|
2025-05-23 02:29:32 +02:00
|
|
|
UINFO(4, "Start checking " << nodep->prettyNameQ());
|
2020-02-29 01:15:08 +01:00
|
|
|
if (!VN_IS(nodep, Module)) {
|
2025-05-23 02:29:32 +02:00
|
|
|
UINFO(4, "Skip " << nodep->prettyNameQ());
|
2025-09-23 16:16:49 +02:00
|
|
|
nodep->foreach([this](AstVarXRef* const nodep) { handleVarXRef(nodep); });
|
2020-02-29 01:15:08 +01:00
|
|
|
return;
|
|
|
|
|
}
|
2020-05-30 01:58:59 +02:00
|
|
|
UASSERT_OBJ(!m_modp, m_modp, "Nested module declaration");
|
2020-02-29 01:15:08 +01:00
|
|
|
UASSERT_OBJ(m_refs.empty(), nodep, "The last module didn't finish split()");
|
2025-02-28 03:18:27 +01:00
|
|
|
VL_RESTORER(m_modp);
|
2020-02-29 01:15:08 +01:00
|
|
|
m_modp = nodep;
|
2021-08-11 15:30:00 +02:00
|
|
|
m_tempNames.reset();
|
2020-02-29 01:15:08 +01:00
|
|
|
iterateChildren(nodep);
|
|
|
|
|
split();
|
|
|
|
|
}
|
2022-09-16 12:22:11 +02:00
|
|
|
void visit(AstNodeStmt* nodep) override { setContextAndIterateChildren(nodep); }
|
|
|
|
|
void visit(AstCell* nodep) override { setContextAndIterateChildren(nodep); }
|
|
|
|
|
void visit(AstAlways* nodep) override {
|
2025-08-18 01:14:34 +02:00
|
|
|
if (nodep->sentreep()) { // When visiting sensitivity list, always is the context
|
|
|
|
|
setContextAndIterate(nodep, nodep->sentreep());
|
2020-02-29 01:15:08 +01:00
|
|
|
}
|
2022-09-15 20:43:56 +02:00
|
|
|
for (AstNode* bodysp = nodep->stmtsp(); bodysp; bodysp = bodysp->nextp()) {
|
2020-11-13 15:50:09 +01:00
|
|
|
iterate(bodysp);
|
|
|
|
|
}
|
2020-02-29 01:15:08 +01:00
|
|
|
}
|
2022-09-16 12:22:11 +02:00
|
|
|
void visit(AstNodeFTaskRef* nodep) override {
|
2022-10-12 11:19:21 +02:00
|
|
|
const AstNodeFTask* const ftaskp = nodep->taskp();
|
|
|
|
|
UASSERT_OBJ(ftaskp, nodep, "Unlinked");
|
|
|
|
|
// Iterate arguments of a function/task.
|
|
|
|
|
for (AstNode *argp = nodep->pinsp(), *paramp = ftaskp->stmtsp(); argp;
|
|
|
|
|
argp = argp->nextp(), paramp = paramp ? paramp->nextp() : nullptr) {
|
|
|
|
|
const char* reason = nullptr;
|
|
|
|
|
const AstVar* vparamp = nullptr;
|
|
|
|
|
while (paramp) {
|
|
|
|
|
vparamp = VN_CAST(paramp, Var);
|
|
|
|
|
if (vparamp && vparamp->isIO()) {
|
|
|
|
|
reason = cannotSplitVarDirectionReason(vparamp->direction());
|
|
|
|
|
break;
|
2020-02-29 01:15:08 +01:00
|
|
|
}
|
2022-10-12 11:19:21 +02:00
|
|
|
paramp = paramp->nextp();
|
|
|
|
|
vparamp = nullptr;
|
|
|
|
|
}
|
|
|
|
|
if (!reason && !vparamp) {
|
|
|
|
|
reason = "the number of argument to the task/function mismatches";
|
|
|
|
|
}
|
|
|
|
|
m_foundTargetVar.clear();
|
|
|
|
|
iterate(argp);
|
|
|
|
|
if (reason) {
|
|
|
|
|
for (AstVar* const varp : m_foundTargetVar) {
|
|
|
|
|
warnNoSplit(varp, argp, reason);
|
|
|
|
|
m_refs.remove(varp);
|
2020-02-29 01:15:08 +01:00
|
|
|
}
|
|
|
|
|
}
|
2022-10-12 11:19:21 +02:00
|
|
|
m_foundTargetVar.clear();
|
2020-02-29 01:15:08 +01:00
|
|
|
}
|
|
|
|
|
}
|
2022-09-16 12:22:11 +02:00
|
|
|
void visit(AstPin* nodep) override {
|
2025-05-23 02:29:32 +02:00
|
|
|
UINFO(5, nodep->modVarp()->prettyNameQ() << " pin ");
|
2021-08-02 16:33:31 +02:00
|
|
|
AstNode* const exprp = nodep->exprp();
|
2020-02-29 01:15:08 +01:00
|
|
|
if (!exprp) return; // Not connected pin
|
|
|
|
|
m_foundTargetVar.clear();
|
|
|
|
|
iterate(exprp);
|
2021-11-26 23:55:36 +01:00
|
|
|
if (const char* const reason = cannotSplitConnectedPortReason(nodep)) {
|
2021-08-02 16:33:31 +02:00
|
|
|
for (AstVar* const varp : m_foundTargetVar) {
|
|
|
|
|
warnNoSplit(varp, nodep, reason);
|
|
|
|
|
m_refs.remove(varp);
|
2020-02-29 01:15:08 +01:00
|
|
|
}
|
|
|
|
|
m_foundTargetVar.clear();
|
|
|
|
|
}
|
|
|
|
|
}
|
2022-09-16 12:22:11 +02:00
|
|
|
void visit(AstNodeFTask* nodep) override {
|
2025-02-28 03:18:27 +01:00
|
|
|
UASSERT_OBJ(!m_inFTaskp, nodep, "Nested func/task");
|
2020-02-29 01:15:08 +01:00
|
|
|
if (!cannotSplitTaskReason(nodep)) {
|
2025-02-28 03:18:27 +01:00
|
|
|
VL_RESTORER(m_inFTaskp);
|
|
|
|
|
m_inFTaskp = nodep;
|
2020-02-29 01:15:08 +01:00
|
|
|
iterateChildren(nodep);
|
|
|
|
|
}
|
|
|
|
|
}
|
2022-09-16 12:22:11 +02:00
|
|
|
void visit(AstVar* nodep) override {
|
2025-03-09 15:31:01 +01:00
|
|
|
m_forPackedSplit.m_refs[m_modp].add(nodep);
|
2020-02-29 01:15:08 +01:00
|
|
|
if (!nodep->attrSplitVar()) return; // Nothing to do
|
|
|
|
|
if (!cannotSplitReason(nodep)) {
|
|
|
|
|
m_refs.registerVar(nodep);
|
2025-05-23 02:29:32 +02:00
|
|
|
UINFO(4, nodep->name() << " is added to candidate list.");
|
2020-02-29 01:15:08 +01:00
|
|
|
}
|
|
|
|
|
}
|
2022-09-16 12:22:11 +02:00
|
|
|
void visit(AstVarRef* nodep) override {
|
2025-03-09 15:31:01 +01:00
|
|
|
m_forPackedSplit.m_refs[m_modp].add(nodep);
|
2020-02-29 01:15:08 +01:00
|
|
|
if (!nodep->varp()->attrSplitVar()) return; // Nothing to do
|
2025-02-28 03:18:27 +01:00
|
|
|
if (m_refs.tryAdd(m_contextp, nodep, m_inFTaskp)) {
|
2020-02-29 01:15:08 +01:00
|
|
|
m_foundTargetVar.insert(nodep->varp());
|
|
|
|
|
}
|
2025-03-09 15:31:01 +01:00
|
|
|
}
|
2025-09-23 16:16:49 +02:00
|
|
|
void visit(AstVarXRef* nodep) override { handleVarXRef(nodep); }
|
2022-09-16 12:22:11 +02:00
|
|
|
void visit(AstSel* nodep) override {
|
2025-03-09 15:31:01 +01:00
|
|
|
if (VN_IS(nodep->fromp(), VarRef)) m_forPackedSplit.m_refs[m_modp].add(nodep);
|
2020-02-29 01:15:08 +01:00
|
|
|
iterateChildren(nodep);
|
|
|
|
|
}
|
2022-09-16 12:22:11 +02:00
|
|
|
void visit(AstArraySel* nodep) override {
|
2021-08-02 16:33:31 +02:00
|
|
|
if (AstVarRef* const refp = isTargetVref(nodep->fromp())) {
|
2021-11-26 23:55:36 +01:00
|
|
|
const AstConst* const indexp = VN_CAST(nodep->bitp(), Const);
|
2020-02-29 01:15:08 +01:00
|
|
|
if (indexp) { // OK
|
2025-05-23 02:29:32 +02:00
|
|
|
UINFO(4, "add " << nodep << " for " << refp->varp()->prettyName());
|
2020-02-29 01:15:08 +01:00
|
|
|
if (indexp->toSInt() < outerMostSizeOfUnpackedArray(refp->varp())) {
|
2025-02-28 03:18:27 +01:00
|
|
|
m_refs.tryAdd(m_contextp, refp, nodep, indexp->toSInt(), m_inFTaskp);
|
2020-02-29 01:15:08 +01:00
|
|
|
} else {
|
2021-08-02 16:33:31 +02:00
|
|
|
warnNoSplit(refp->varp(), nodep->bitp(), "index is out of range");
|
2020-02-29 01:15:08 +01:00
|
|
|
m_refs.remove(refp->varp());
|
|
|
|
|
}
|
|
|
|
|
} else {
|
2021-08-02 16:33:31 +02:00
|
|
|
warnNoSplit(refp->varp(), nodep->bitp(), "index cannot be determined statically");
|
2020-02-29 01:15:08 +01:00
|
|
|
m_refs.remove(refp->varp());
|
|
|
|
|
iterate(nodep->bitp());
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
iterateChildren(nodep);
|
|
|
|
|
}
|
|
|
|
|
}
|
2022-09-16 12:22:11 +02:00
|
|
|
void visit(AstSliceSel* nodep) override {
|
2021-08-02 16:33:31 +02:00
|
|
|
if (AstVarRef* const refp = isTargetVref(nodep->fromp())) {
|
|
|
|
|
const AstUnpackArrayDType* const dtypep
|
2021-10-22 14:56:48 +02:00
|
|
|
= VN_AS(refp->varp()->dtypep()->skipRefp(), UnpackArrayDType);
|
2020-08-25 01:11:20 +02:00
|
|
|
// declRange() of AstSliceSel is shifted by dtypep->declRange().lo() in V3WidthSel.cpp
|
|
|
|
|
// restore the original decl range here.
|
|
|
|
|
const VNumRange selRange{nodep->declRange().hi() + dtypep->declRange().lo(),
|
|
|
|
|
nodep->declRange().lo() + dtypep->declRange().lo(),
|
2023-03-21 01:44:11 +01:00
|
|
|
nodep->declRange().ascending()};
|
2020-12-07 03:13:56 +01:00
|
|
|
UASSERT_OBJ(dtypep->lo() <= selRange.lo() && selRange.hi() <= dtypep->hi(), nodep,
|
2020-08-25 01:11:20 +02:00
|
|
|
"Range check for AstSliceSel must have been finished in V3Width.cpp");
|
2025-05-23 02:29:32 +02:00
|
|
|
UINFO(4, "add " << nodep << " for " << refp->varp()->prettyName());
|
2020-08-25 01:11:20 +02:00
|
|
|
m_refs.tryAdd(m_contextp, refp, nodep, nodep->declRange().hi(),
|
2025-02-28 03:18:27 +01:00
|
|
|
nodep->declRange().lo(), m_inFTaskp);
|
2020-02-29 01:15:08 +01:00
|
|
|
} else {
|
|
|
|
|
iterateChildren(nodep);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
AstVarRef* createTempVar(AstNode* context, AstNode* nodep, AstUnpackArrayDType* dtypep,
|
|
|
|
|
const std::string& name_prefix, std::vector<AstVar*>& vars,
|
2022-07-30 16:01:25 +02:00
|
|
|
int start_idx, bool lvalue, bool /*ftask*/) {
|
2021-08-02 16:33:31 +02:00
|
|
|
FileLine* const fl = nodep->fileline();
|
2021-08-11 15:30:00 +02:00
|
|
|
const std::string name = m_tempNames.get(nodep) + "__" + name_prefix;
|
2021-08-02 16:33:31 +02:00
|
|
|
AstNodeAssign* const assignp = VN_CAST(context, NodeAssign);
|
2020-02-29 01:15:08 +01:00
|
|
|
if (assignp) {
|
|
|
|
|
// "always_comb a = b;" to "always_comb begin a = b; end" so that local
|
|
|
|
|
// variable can be added.
|
|
|
|
|
insertBeginIfNecessary(assignp, m_modp);
|
|
|
|
|
}
|
2022-01-02 19:56:40 +01:00
|
|
|
AstVar* const varp = newVar(fl, VVarType::VAR, name, dtypep);
|
2020-02-29 01:15:08 +01:00
|
|
|
// Variable will be registered in the caller side.
|
2025-02-26 02:41:00 +01:00
|
|
|
UINFO(4, varp->prettyNameQ()
|
2025-05-23 02:29:32 +02:00
|
|
|
<< " is created lsb:" << dtypep->lo() << " msb:" << dtypep->hi());
|
2020-02-29 01:15:08 +01:00
|
|
|
// Use AstAssign if true, otherwise AstAssignW
|
|
|
|
|
const bool use_simple_assign
|
|
|
|
|
= (context && VN_IS(context, NodeFTaskRef)) || (assignp && VN_IS(assignp, Assign));
|
|
|
|
|
|
2020-12-07 03:13:56 +01:00
|
|
|
for (int i = 0; i < dtypep->elementsConst(); ++i) {
|
2022-11-13 21:33:11 +01:00
|
|
|
AstNodeExpr* lhsp
|
2021-08-02 16:33:31 +02:00
|
|
|
= newVarRef(fl, vars.at(start_idx + i), lvalue ? VAccess::WRITE : VAccess::READ);
|
2022-11-13 21:33:11 +01:00
|
|
|
AstNodeExpr* rhsp = new AstArraySel{
|
2021-08-02 16:33:31 +02:00
|
|
|
fl, newVarRef(fl, varp, !lvalue ? VAccess::WRITE : VAccess::READ), i};
|
2021-11-26 23:55:36 +01:00
|
|
|
AstNode* const refp = lhsp;
|
2025-05-23 02:29:32 +02:00
|
|
|
UINFO(9, "Creating assign idx:" << i << " + " << start_idx);
|
2020-02-29 01:15:08 +01:00
|
|
|
if (!lvalue) std::swap(lhsp, rhsp);
|
|
|
|
|
if (use_simple_assign) {
|
Internals: Make AstAssignW a procedural statement (#6280) (#6556)
Initial idea was to remodel AssignW as Assign under Alway. Trying that
uncovered some issues, the most difficult of them was that a delay
attached to a continuous assignment behaves differently from a delay
attached to a blocking assignment statement, so we need to keep the
knowledge of which flavour an assignment was until V3Timing.
So instead of removing AstAssignW, we always wrap it in an AstAlways,
with a special `keyword()` type. This makes it into a proper procedural
statement, which is almost equivalent to AstAssign, except for the case
when they contain a delay. We still gain the benefits of #6280 and can
simplify some code. Every AstNodeStmt should now be under an
AstNodeProcedure - which we should rename to AstProcess, or an
AstNodeFTask). As a result, V3Table can now handle AssignW for free.
Also uncovered and fixed a bug in handling intra-assignment delays if
a function is present on the RHS of an AssignW.
There is more work to be done towards #6280, and potentially simplifying
AssignW handing, but this is the minimal change required to tick it off
the TODO list for #6280.
2025-10-14 10:05:19 +02:00
|
|
|
AstAssign* const ap = new AstAssign{fl, lhsp, rhsp};
|
2020-02-29 01:15:08 +01:00
|
|
|
if (lvalue) {
|
|
|
|
|
// If varp is LHS, this assignment must appear after the original
|
|
|
|
|
// assignment(context).
|
Internals: Make AstAssignW a procedural statement (#6280) (#6556)
Initial idea was to remodel AssignW as Assign under Alway. Trying that
uncovered some issues, the most difficult of them was that a delay
attached to a continuous assignment behaves differently from a delay
attached to a blocking assignment statement, so we need to keep the
knowledge of which flavour an assignment was until V3Timing.
So instead of removing AstAssignW, we always wrap it in an AstAlways,
with a special `keyword()` type. This makes it into a proper procedural
statement, which is almost equivalent to AstAssign, except for the case
when they contain a delay. We still gain the benefits of #6280 and can
simplify some code. Every AstNodeStmt should now be under an
AstNodeProcedure - which we should rename to AstProcess, or an
AstNodeFTask). As a result, V3Table can now handle AssignW for free.
Also uncovered and fixed a bug in handling intra-assignment delays if
a function is present on the RHS of an AssignW.
There is more work to be done towards #6280, and potentially simplifying
AssignW handing, but this is the minimal change required to tick it off
the TODO list for #6280.
2025-10-14 10:05:19 +02:00
|
|
|
context->addNextHere(ap);
|
2020-02-29 01:15:08 +01:00
|
|
|
} else {
|
|
|
|
|
// If varp is RHS, this assignment comes just before the original assignment
|
Internals: Make AstAssignW a procedural statement (#6280) (#6556)
Initial idea was to remodel AssignW as Assign under Alway. Trying that
uncovered some issues, the most difficult of them was that a delay
attached to a continuous assignment behaves differently from a delay
attached to a blocking assignment statement, so we need to keep the
knowledge of which flavour an assignment was until V3Timing.
So instead of removing AstAssignW, we always wrap it in an AstAlways,
with a special `keyword()` type. This makes it into a proper procedural
statement, which is almost equivalent to AstAssign, except for the case
when they contain a delay. We still gain the benefits of #6280 and can
simplify some code. Every AstNodeStmt should now be under an
AstNodeProcedure - which we should rename to AstProcess, or an
AstNodeFTask). As a result, V3Table can now handle AssignW for free.
Also uncovered and fixed a bug in handling intra-assignment delays if
a function is present on the RHS of an AssignW.
There is more work to be done towards #6280, and potentially simplifying
AssignW handing, but this is the minimal change required to tick it off
the TODO list for #6280.
2025-10-14 10:05:19 +02:00
|
|
|
context->addHereThisAsNext(ap);
|
2020-02-29 01:15:08 +01:00
|
|
|
}
|
Internals: Make AstAssignW a procedural statement (#6280) (#6556)
Initial idea was to remodel AssignW as Assign under Alway. Trying that
uncovered some issues, the most difficult of them was that a delay
attached to a continuous assignment behaves differently from a delay
attached to a blocking assignment statement, so we need to keep the
knowledge of which flavour an assignment was until V3Timing.
So instead of removing AstAssignW, we always wrap it in an AstAlways,
with a special `keyword()` type. This makes it into a proper procedural
statement, which is almost equivalent to AstAssign, except for the case
when they contain a delay. We still gain the benefits of #6280 and can
simplify some code. Every AstNodeStmt should now be under an
AstNodeProcedure - which we should rename to AstProcess, or an
AstNodeFTask). As a result, V3Table can now handle AssignW for free.
Also uncovered and fixed a bug in handling intra-assignment delays if
a function is present on the RHS of an AssignW.
There is more work to be done towards #6280, and potentially simplifying
AssignW handing, but this is the minimal change required to tick it off
the TODO list for #6280.
2025-10-14 10:05:19 +02:00
|
|
|
UASSERT_OBJ(!m_contextp, m_contextp, "must be null");
|
|
|
|
|
setContextAndIterate(ap, refp);
|
2020-02-29 01:15:08 +01:00
|
|
|
} else {
|
Internals: Make AstAssignW a procedural statement (#6280) (#6556)
Initial idea was to remodel AssignW as Assign under Alway. Trying that
uncovered some issues, the most difficult of them was that a delay
attached to a continuous assignment behaves differently from a delay
attached to a blocking assignment statement, so we need to keep the
knowledge of which flavour an assignment was until V3Timing.
So instead of removing AstAssignW, we always wrap it in an AstAlways,
with a special `keyword()` type. This makes it into a proper procedural
statement, which is almost equivalent to AstAssign, except for the case
when they contain a delay. We still gain the benefits of #6280 and can
simplify some code. Every AstNodeStmt should now be under an
AstNodeProcedure - which we should rename to AstProcess, or an
AstNodeFTask). As a result, V3Table can now handle AssignW for free.
Also uncovered and fixed a bug in handling intra-assignment delays if
a function is present on the RHS of an AssignW.
There is more work to be done towards #6280, and potentially simplifying
AssignW handing, but this is the minimal change required to tick it off
the TODO list for #6280.
2025-10-14 10:05:19 +02:00
|
|
|
AstAssignW* const ap = new AstAssignW{fl, lhsp, rhsp};
|
2020-02-29 01:15:08 +01:00
|
|
|
// Continuous assignment must be in module context.
|
Internals: Make AstAssignW a procedural statement (#6280) (#6556)
Initial idea was to remodel AssignW as Assign under Alway. Trying that
uncovered some issues, the most difficult of them was that a delay
attached to a continuous assignment behaves differently from a delay
attached to a blocking assignment statement, so we need to keep the
knowledge of which flavour an assignment was until V3Timing.
So instead of removing AstAssignW, we always wrap it in an AstAlways,
with a special `keyword()` type. This makes it into a proper procedural
statement, which is almost equivalent to AstAssign, except for the case
when they contain a delay. We still gain the benefits of #6280 and can
simplify some code. Every AstNodeStmt should now be under an
AstNodeProcedure - which we should rename to AstProcess, or an
AstNodeFTask). As a result, V3Table can now handle AssignW for free.
Also uncovered and fixed a bug in handling intra-assignment delays if
a function is present on the RHS of an AssignW.
There is more work to be done towards #6280, and potentially simplifying
AssignW handing, but this is the minimal change required to tick it off
the TODO list for #6280.
2025-10-14 10:05:19 +02:00
|
|
|
varp->addNextHere(new AstAlways{ap});
|
|
|
|
|
UASSERT_OBJ(!m_contextp, m_contextp, "must be null");
|
|
|
|
|
setContextAndIterate(ap, refp);
|
2020-02-29 01:15:08 +01:00
|
|
|
}
|
|
|
|
|
}
|
2021-08-02 16:33:31 +02:00
|
|
|
return newVarRef(fl, varp, lvalue ? VAccess::WRITE : VAccess::READ);
|
2020-02-29 01:15:08 +01:00
|
|
|
}
|
|
|
|
|
void connectPort(AstVar* varp, std::vector<AstVar*>& vars, AstNode* insertp) {
|
|
|
|
|
UASSERT_OBJ(varp->isIO(), varp, "must be port");
|
|
|
|
|
const bool lvalue = varp->direction().isWritable();
|
2021-08-02 16:33:31 +02:00
|
|
|
FileLine* const fl = varp->fileline();
|
2020-02-29 01:15:08 +01:00
|
|
|
for (size_t i = 0; i < vars.size(); ++i) {
|
2022-11-13 21:33:11 +01:00
|
|
|
AstNodeExpr* const nodes[] = {
|
2021-08-02 16:33:31 +02:00
|
|
|
new AstArraySel{fl, newVarRef(fl, varp, lvalue ? VAccess::WRITE : VAccess::READ),
|
|
|
|
|
static_cast<int>(i)},
|
|
|
|
|
newVarRef(fl, vars.at(i), !lvalue ? VAccess::WRITE : VAccess::READ)};
|
2022-11-13 21:33:11 +01:00
|
|
|
AstNodeExpr* const lhsp = nodes[lvalue ? 0 : 1];
|
|
|
|
|
AstNodeExpr* const rhsp = nodes[lvalue ? 1 : 0];
|
2020-02-29 01:15:08 +01:00
|
|
|
if (insertp) {
|
Internals: Make AstAssignW a procedural statement (#6280) (#6556)
Initial idea was to remodel AssignW as Assign under Alway. Trying that
uncovered some issues, the most difficult of them was that a delay
attached to a continuous assignment behaves differently from a delay
attached to a blocking assignment statement, so we need to keep the
knowledge of which flavour an assignment was until V3Timing.
So instead of removing AstAssignW, we always wrap it in an AstAlways,
with a special `keyword()` type. This makes it into a proper procedural
statement, which is almost equivalent to AstAssign, except for the case
when they contain a delay. We still gain the benefits of #6280 and can
simplify some code. Every AstNodeStmt should now be under an
AstNodeProcedure - which we should rename to AstProcess, or an
AstNodeFTask). As a result, V3Table can now handle AssignW for free.
Also uncovered and fixed a bug in handling intra-assignment delays if
a function is present on the RHS of an AssignW.
There is more work to be done towards #6280, and potentially simplifying
AssignW handing, but this is the minimal change required to tick it off
the TODO list for #6280.
2025-10-14 10:05:19 +02:00
|
|
|
AstAssign* const ap = new AstAssign{fl, lhsp, rhsp};
|
2020-02-29 01:15:08 +01:00
|
|
|
if (lvalue) { // Just after writing to the temporary variable
|
Internals: Make AstAssignW a procedural statement (#6280) (#6556)
Initial idea was to remodel AssignW as Assign under Alway. Trying that
uncovered some issues, the most difficult of them was that a delay
attached to a continuous assignment behaves differently from a delay
attached to a blocking assignment statement, so we need to keep the
knowledge of which flavour an assignment was until V3Timing.
So instead of removing AstAssignW, we always wrap it in an AstAlways,
with a special `keyword()` type. This makes it into a proper procedural
statement, which is almost equivalent to AstAssign, except for the case
when they contain a delay. We still gain the benefits of #6280 and can
simplify some code. Every AstNodeStmt should now be under an
AstNodeProcedure - which we should rename to AstProcess, or an
AstNodeFTask). As a result, V3Table can now handle AssignW for free.
Also uncovered and fixed a bug in handling intra-assignment delays if
a function is present on the RHS of an AssignW.
There is more work to be done towards #6280, and potentially simplifying
AssignW handing, but this is the minimal change required to tick it off
the TODO list for #6280.
2025-10-14 10:05:19 +02:00
|
|
|
insertp->addNextHere(ap);
|
2020-02-29 01:15:08 +01:00
|
|
|
} else { // Just before reading the temporary variable
|
Internals: Make AstAssignW a procedural statement (#6280) (#6556)
Initial idea was to remodel AssignW as Assign under Alway. Trying that
uncovered some issues, the most difficult of them was that a delay
attached to a continuous assignment behaves differently from a delay
attached to a blocking assignment statement, so we need to keep the
knowledge of which flavour an assignment was until V3Timing.
So instead of removing AstAssignW, we always wrap it in an AstAlways,
with a special `keyword()` type. This makes it into a proper procedural
statement, which is almost equivalent to AstAssign, except for the case
when they contain a delay. We still gain the benefits of #6280 and can
simplify some code. Every AstNodeStmt should now be under an
AstNodeProcedure - which we should rename to AstProcess, or an
AstNodeFTask). As a result, V3Table can now handle AssignW for free.
Also uncovered and fixed a bug in handling intra-assignment delays if
a function is present on the RHS of an AssignW.
There is more work to be done towards #6280, and potentially simplifying
AssignW handing, but this is the minimal change required to tick it off
the TODO list for #6280.
2025-10-14 10:05:19 +02:00
|
|
|
insertp->addHereThisAsNext(ap);
|
2020-02-29 01:15:08 +01:00
|
|
|
}
|
Internals: Make AstAssignW a procedural statement (#6280) (#6556)
Initial idea was to remodel AssignW as Assign under Alway. Trying that
uncovered some issues, the most difficult of them was that a delay
attached to a continuous assignment behaves differently from a delay
attached to a blocking assignment statement, so we need to keep the
knowledge of which flavour an assignment was until V3Timing.
So instead of removing AstAssignW, we always wrap it in an AstAlways,
with a special `keyword()` type. This makes it into a proper procedural
statement, which is almost equivalent to AstAssign, except for the case
when they contain a delay. We still gain the benefits of #6280 and can
simplify some code. Every AstNodeStmt should now be under an
AstNodeProcedure - which we should rename to AstProcess, or an
AstNodeFTask). As a result, V3Table can now handle AssignW for free.
Also uncovered and fixed a bug in handling intra-assignment delays if
a function is present on the RHS of an AssignW.
There is more work to be done towards #6280, and potentially simplifying
AssignW handing, but this is the minimal change required to tick it off
the TODO list for #6280.
2025-10-14 10:05:19 +02:00
|
|
|
setContextAndIterate(ap, nodes[1]);
|
2020-02-29 01:15:08 +01:00
|
|
|
} else {
|
Internals: Make AstAssignW a procedural statement (#6280) (#6556)
Initial idea was to remodel AssignW as Assign under Alway. Trying that
uncovered some issues, the most difficult of them was that a delay
attached to a continuous assignment behaves differently from a delay
attached to a blocking assignment statement, so we need to keep the
knowledge of which flavour an assignment was until V3Timing.
So instead of removing AstAssignW, we always wrap it in an AstAlways,
with a special `keyword()` type. This makes it into a proper procedural
statement, which is almost equivalent to AstAssign, except for the case
when they contain a delay. We still gain the benefits of #6280 and can
simplify some code. Every AstNodeStmt should now be under an
AstNodeProcedure - which we should rename to AstProcess, or an
AstNodeFTask). As a result, V3Table can now handle AssignW for free.
Also uncovered and fixed a bug in handling intra-assignment delays if
a function is present on the RHS of an AssignW.
There is more work to be done towards #6280, and potentially simplifying
AssignW handing, but this is the minimal change required to tick it off
the TODO list for #6280.
2025-10-14 10:05:19 +02:00
|
|
|
AstAssignW* const ap = new AstAssignW{fl, lhsp, rhsp};
|
|
|
|
|
vars.at(i)->addNextHere(new AstAlways{ap});
|
|
|
|
|
setContextAndIterate(ap, nodes[1]);
|
2020-02-29 01:15:08 +01:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2022-11-27 11:52:40 +01:00
|
|
|
// cppcheck-has-bug-suppress constParameter
|
2020-02-29 01:15:08 +01:00
|
|
|
size_t collapse(UnpackRefMap& refs) {
|
|
|
|
|
size_t numSplit = 0;
|
2021-08-02 16:33:31 +02:00
|
|
|
for (const auto& pair : refs) {
|
2025-02-26 02:41:00 +01:00
|
|
|
UINFO(4, "In module " << m_modp->name() << " var " << pair.first->prettyNameQ()
|
2021-08-02 16:33:31 +02:00
|
|
|
<< " which has " << pair.second.size()
|
2025-05-23 02:29:32 +02:00
|
|
|
<< " refs will be split.");
|
2021-08-02 16:33:31 +02:00
|
|
|
AstVar* const varp = pair.first;
|
2020-02-29 01:15:08 +01:00
|
|
|
AstNode* insertp = varp;
|
2021-08-02 16:33:31 +02:00
|
|
|
const AstUnpackArrayDType* const dtypep
|
2021-10-22 14:56:48 +02:00
|
|
|
= VN_AS(varp->dtypep()->skipRefp(), UnpackArrayDType);
|
2021-08-02 16:33:31 +02:00
|
|
|
AstNodeDType* const subTypep = dtypep->subDTypep();
|
2020-02-29 01:15:08 +01:00
|
|
|
const bool needNext = VN_IS(subTypep, UnpackArrayDType); // Still unpacked array.
|
|
|
|
|
std::vector<AstVar*> vars;
|
|
|
|
|
// Add the split variables
|
2022-03-27 21:27:40 +02:00
|
|
|
for (int32_t i = 0; i < dtypep->elementsConst(); ++i) {
|
2020-02-29 01:15:08 +01:00
|
|
|
// Unpacked array is traced as var(idx), not var[idx].
|
|
|
|
|
const std::string name
|
2020-12-07 03:13:56 +01:00
|
|
|
= varp->name() + AstNode::encodeName('(' + cvtToStr(i + dtypep->lo()) + ')');
|
2022-01-02 19:56:40 +01:00
|
|
|
AstVar* const newp = newVar(varp->fileline(), VVarType::VAR, name, subTypep);
|
2020-02-29 01:15:08 +01:00
|
|
|
newp->propagateAttrFrom(varp);
|
|
|
|
|
// If varp is an IO, varp will remain and will be traced.
|
|
|
|
|
newp->trace(!varp->isIO() && varp->isTrace());
|
|
|
|
|
newp->funcLocal(varp->isFuncLocal() || varp->isFuncReturn());
|
|
|
|
|
insertp->addNextHere(newp);
|
|
|
|
|
insertp = newp;
|
|
|
|
|
newp->attrSplitVar(needNext || !cannotSplitPackedVarReason(newp));
|
|
|
|
|
vars.push_back(newp);
|
2020-08-15 16:12:55 +02:00
|
|
|
setContextAndIterate(nullptr, newp);
|
2020-02-29 01:15:08 +01:00
|
|
|
}
|
2021-08-02 16:33:31 +02:00
|
|
|
for (const UnpackRef& ref : pair.second) {
|
2020-08-15 16:12:55 +02:00
|
|
|
AstNode* newp = nullptr;
|
2021-08-02 16:33:31 +02:00
|
|
|
if (ref.isSingleRef()) {
|
|
|
|
|
newp = newVarRef(ref.nodep()->fileline(), vars.at(ref.index()), ref.access());
|
2020-02-29 01:15:08 +01:00
|
|
|
} else {
|
2021-08-02 16:33:31 +02:00
|
|
|
AstVarRef* refp = VN_CAST(ref.nodep(), VarRef);
|
2020-04-06 00:30:46 +02:00
|
|
|
AstUnpackArrayDType* adtypep;
|
2020-02-29 01:15:08 +01:00
|
|
|
int lsb = 0;
|
|
|
|
|
if (refp) {
|
2021-10-22 14:56:48 +02:00
|
|
|
adtypep = VN_AS(refp->dtypep()->skipRefp(), UnpackArrayDType);
|
2020-02-29 01:15:08 +01:00
|
|
|
} else {
|
2021-11-26 23:55:36 +01:00
|
|
|
AstSliceSel* const selp = VN_AS(ref.nodep(), SliceSel);
|
2021-10-22 14:56:48 +02:00
|
|
|
refp = VN_AS(selp->fromp(), VarRef);
|
2020-02-29 01:15:08 +01:00
|
|
|
UASSERT_OBJ(refp, selp, "Unexpected op is registered");
|
2021-10-22 14:56:48 +02:00
|
|
|
adtypep = VN_AS(selp->dtypep()->skipRefp(), UnpackArrayDType);
|
2020-12-07 03:13:56 +01:00
|
|
|
lsb = adtypep->lo();
|
2020-02-29 01:15:08 +01:00
|
|
|
}
|
2021-08-02 16:33:31 +02:00
|
|
|
AstVarRef* const newrefp
|
|
|
|
|
= createTempVar(ref.context(), refp, adtypep, varp->name(), vars, lsb,
|
|
|
|
|
refp->access(), ref.ftask());
|
2020-02-29 01:15:08 +01:00
|
|
|
newp = newrefp;
|
|
|
|
|
refp->varp()->addNextHere(newrefp->varp());
|
2025-05-23 02:29:32 +02:00
|
|
|
UINFO(4, "Create " << newrefp->varp()->prettyNameQ() << " for " << refp);
|
2020-02-29 01:15:08 +01:00
|
|
|
}
|
2021-08-02 16:33:31 +02:00
|
|
|
ref.nodep()->replaceWith(newp);
|
|
|
|
|
pushDeletep(ref.nodep());
|
|
|
|
|
setContextAndIterate(ref.context(), newp->backp());
|
2020-02-29 01:15:08 +01:00
|
|
|
// AstAssign is used. So assignment is necessary for each reference.
|
|
|
|
|
if (varp->isIO() && (varp->isFuncLocal() || varp->isFuncReturn()))
|
2021-08-02 16:33:31 +02:00
|
|
|
connectPort(varp, vars, ref.context());
|
2020-02-29 01:15:08 +01:00
|
|
|
}
|
|
|
|
|
if (varp->isIO()) {
|
|
|
|
|
// AssignW will be created, so just once
|
|
|
|
|
if (!varp->isFuncLocal() && !varp->isFuncReturn()) {
|
2020-08-15 16:12:55 +02:00
|
|
|
connectPort(varp, vars, nullptr);
|
2020-02-29 01:15:08 +01:00
|
|
|
}
|
|
|
|
|
varp->attrSplitVar(!cannotSplitPackedVarReason(varp));
|
2025-03-09 15:31:01 +01:00
|
|
|
m_forPackedSplit.m_refs[m_modp].add(varp);
|
2020-02-29 01:15:08 +01:00
|
|
|
} else {
|
|
|
|
|
pushDeletep(varp->unlinkFrBack());
|
|
|
|
|
}
|
|
|
|
|
++numSplit;
|
|
|
|
|
}
|
|
|
|
|
return numSplit;
|
|
|
|
|
}
|
|
|
|
|
void split() {
|
|
|
|
|
for (int trial = 0; !m_refs.empty(); ++trial) {
|
|
|
|
|
UnpackRefMap next;
|
|
|
|
|
m_refs.swap(next);
|
|
|
|
|
const size_t n = collapse(next);
|
2025-02-26 02:41:00 +01:00
|
|
|
UINFO(4, n << " Variables are split " << trial << " th trial in "
|
2025-05-23 02:29:32 +02:00
|
|
|
<< m_modp->prettyNameQ());
|
2020-02-29 01:15:08 +01:00
|
|
|
if (trial == 0) m_numSplit += n;
|
|
|
|
|
}
|
|
|
|
|
doDeletes();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
public:
|
|
|
|
|
explicit SplitUnpackedVarVisitor(AstNetlist* nodep)
|
2022-07-30 16:01:25 +02:00
|
|
|
: m_tempNames{"__VsplitVar"} {
|
2020-02-29 01:15:08 +01:00
|
|
|
iterate(nodep);
|
|
|
|
|
}
|
2020-11-11 03:40:14 +01:00
|
|
|
~SplitUnpackedVarVisitor() override {
|
2020-02-29 01:15:08 +01:00
|
|
|
UASSERT(m_refs.empty(), "Don't forget to call split()");
|
2025-03-09 15:31:01 +01:00
|
|
|
V3Stats::addStat("SplitVar, unpacked arrays split due to attribute", m_numSplit);
|
2020-02-29 01:15:08 +01:00
|
|
|
}
|
2025-03-09 15:31:01 +01:00
|
|
|
const SplitVarRefs& getPackedVarRefs() const { return std::move(m_forPackedSplit); }
|
2020-02-29 01:15:08 +01:00
|
|
|
|
|
|
|
|
// Check if the passed variable can be split.
|
|
|
|
|
// Even if this function returns true, the variable may not be split
|
|
|
|
|
// because the access to the variable cannot be determined statically.
|
|
|
|
|
static const char* cannotSplitReason(const AstVar* nodep) {
|
|
|
|
|
const std::pair<uint32_t, uint32_t> dim = nodep->dtypep()->dimensions(false);
|
|
|
|
|
UINFO(7, nodep->prettyNameQ()
|
|
|
|
|
<< " pub:" << nodep->isSigPublic() << " pri:" << nodep->isPrimaryIO()
|
2025-05-23 02:29:32 +02:00
|
|
|
<< " io:" << nodep->isInout() << " typ:" << nodep->varType());
|
2020-08-15 16:12:55 +02:00
|
|
|
const char* reason = nullptr;
|
2020-02-29 01:15:08 +01:00
|
|
|
// Public variable cannot be split.
|
|
|
|
|
// at least one unpacked dimension must exist
|
|
|
|
|
if (dim.second < 1 || !VN_IS(nodep->dtypep()->skipRefp(), UnpackArrayDType))
|
|
|
|
|
reason = "it is not an unpacked array";
|
|
|
|
|
if (!reason) reason = cannotSplitVarCommonReason(nodep);
|
2020-04-15 13:58:34 +02:00
|
|
|
if (reason) {
|
2025-05-23 02:29:32 +02:00
|
|
|
UINFO(5, "Check " << nodep->prettyNameQ() << " cannot split because" << reason);
|
2020-04-15 13:58:34 +02:00
|
|
|
}
|
2020-02-29 01:15:08 +01:00
|
|
|
return reason;
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
//######################################################################
|
|
|
|
|
// Split packed variables
|
|
|
|
|
|
|
|
|
|
// Split variable
|
2020-11-19 03:32:16 +01:00
|
|
|
class SplitNewVar final {
|
2021-11-26 23:55:36 +01:00
|
|
|
const int m_lsb; // LSB in the original bitvector
|
|
|
|
|
const int m_bitwidth;
|
2020-02-29 01:15:08 +01:00
|
|
|
AstVar* m_varp; // The LSB of this variable is always 0, not m_lsb
|
|
|
|
|
public:
|
2020-08-15 16:12:55 +02:00
|
|
|
SplitNewVar(int lsb, int bitwidth, AstVar* varp = nullptr)
|
2020-08-16 15:55:36 +02:00
|
|
|
: m_lsb{lsb}
|
|
|
|
|
, m_bitwidth{bitwidth}
|
|
|
|
|
, m_varp{varp} {}
|
2020-02-29 01:15:08 +01:00
|
|
|
int lsb() const { return m_lsb; }
|
|
|
|
|
int msb() const { return m_lsb + m_bitwidth - 1; }
|
|
|
|
|
int bitwidth() const { return m_bitwidth; }
|
|
|
|
|
void varp(AstVar* vp) {
|
2020-08-15 16:12:55 +02:00
|
|
|
UASSERT_OBJ(!m_varp, m_varp, "must be nullptr");
|
2020-02-29 01:15:08 +01:00
|
|
|
m_varp = vp;
|
|
|
|
|
}
|
|
|
|
|
AstVar* varp() const { return m_varp; }
|
|
|
|
|
|
2024-01-20 21:06:46 +01:00
|
|
|
struct Match final {
|
2020-02-29 01:15:08 +01:00
|
|
|
bool operator()(int bit, const SplitNewVar& a) const {
|
|
|
|
|
return bit < a.m_lsb + a.m_bitwidth;
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
// One Entry instance for an AstVarRef instance
|
2020-11-19 03:32:16 +01:00
|
|
|
class PackedVarRefEntry final {
|
2021-11-26 23:55:36 +01:00
|
|
|
AstNode* const m_nodep; // Either AstSel or AstVarRef is expected.
|
|
|
|
|
const int m_lsb;
|
|
|
|
|
const int m_bitwidth;
|
2020-02-29 01:15:08 +01:00
|
|
|
|
|
|
|
|
public:
|
|
|
|
|
PackedVarRefEntry(AstSel* selp, int lsb, int bitwidth)
|
2020-08-16 15:55:36 +02:00
|
|
|
: m_nodep{selp}
|
|
|
|
|
, m_lsb{lsb}
|
|
|
|
|
, m_bitwidth{bitwidth} {}
|
2020-02-29 01:15:08 +01:00
|
|
|
PackedVarRefEntry(AstVarRef* refp, int lsb, int bitwidth)
|
2020-08-16 15:55:36 +02:00
|
|
|
: m_nodep{refp}
|
|
|
|
|
, m_lsb{lsb}
|
|
|
|
|
, m_bitwidth{bitwidth} {}
|
2020-02-29 01:15:08 +01:00
|
|
|
AstNode* nodep() const { return m_nodep; }
|
|
|
|
|
int lsb() const { return m_lsb; }
|
|
|
|
|
int msb() const { return m_lsb + m_bitwidth - 1; }
|
|
|
|
|
int bitwidth() const { return m_bitwidth; }
|
|
|
|
|
void replaceNodeWith(AstNode* nodep) {
|
|
|
|
|
m_nodep->replaceWith(nodep);
|
|
|
|
|
VL_DO_DANGLING(m_nodep->deleteTree(), m_nodep);
|
|
|
|
|
}
|
|
|
|
|
// If this is AstVarRef and referred in the sensitivity list of always@,
|
|
|
|
|
// return the sensitivity item
|
|
|
|
|
AstSenItem* backSenItemp() const {
|
2021-11-26 23:55:36 +01:00
|
|
|
if (const AstVarRef* const refp = VN_CAST(m_nodep, VarRef)) {
|
2021-08-02 16:33:31 +02:00
|
|
|
return VN_CAST(refp->backp(), SenItem);
|
|
|
|
|
}
|
2020-08-15 16:12:55 +02:00
|
|
|
return nullptr;
|
2020-02-29 01:15:08 +01:00
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
// How a variable is used
|
2020-11-19 03:32:16 +01:00
|
|
|
class PackedVarRef final {
|
2024-01-20 21:06:46 +01:00
|
|
|
struct SortByFirst final {
|
2020-02-29 01:15:08 +01:00
|
|
|
bool operator()(const std::pair<int, bool>& a, const std::pair<int, bool>& b) const {
|
|
|
|
|
if (a.first == b.first) return a.second < b.second;
|
|
|
|
|
return a.first < b.first;
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
std::vector<PackedVarRefEntry> m_lhs, m_rhs;
|
2021-11-26 23:55:36 +01:00
|
|
|
AstBasicDType* const m_basicp; // Cache the ptr since varp->dtypep()->basicp() is expensive
|
2020-08-15 19:11:27 +02:00
|
|
|
bool m_dedupDone = false;
|
2020-02-29 01:15:08 +01:00
|
|
|
static void dedupRefs(std::vector<PackedVarRefEntry>& refs) {
|
2020-06-10 04:39:10 +02:00
|
|
|
// Use raw pointer to dedup
|
2021-03-12 23:26:53 +01:00
|
|
|
std::map<AstNode*, size_t, AstNodeComparator> nodes;
|
2024-03-27 22:57:49 +01:00
|
|
|
for (size_t i = 0; i < refs.size(); ++i) nodes.emplace(refs[i].nodep(), i);
|
2020-02-29 01:15:08 +01:00
|
|
|
std::vector<PackedVarRefEntry> vect;
|
|
|
|
|
vect.reserve(nodes.size());
|
2021-08-02 16:33:31 +02:00
|
|
|
for (const auto& pair : nodes) vect.push_back(refs[pair.second]);
|
2020-02-29 01:15:08 +01:00
|
|
|
refs.swap(vect);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
public:
|
2021-03-13 00:10:45 +01:00
|
|
|
using iterator = std::vector<PackedVarRefEntry>::iterator;
|
|
|
|
|
using const_iterator = std::vector<PackedVarRefEntry>::const_iterator;
|
2020-02-29 01:15:08 +01:00
|
|
|
std::vector<PackedVarRefEntry>& lhs() {
|
|
|
|
|
UASSERT(m_dedupDone, "cannot read before dedup()");
|
|
|
|
|
return m_lhs;
|
|
|
|
|
}
|
|
|
|
|
std::vector<PackedVarRefEntry>& rhs() {
|
|
|
|
|
UASSERT(m_dedupDone, "cannot read before dedup()");
|
|
|
|
|
return m_rhs;
|
|
|
|
|
}
|
2025-08-21 10:43:37 +02:00
|
|
|
explicit PackedVarRef(const AstVar* varp)
|
2020-08-16 15:55:36 +02:00
|
|
|
: m_basicp{varp->dtypep()->basicp()} {}
|
2020-09-07 23:09:25 +02:00
|
|
|
void append(const PackedVarRefEntry& e, const VAccess& access) {
|
2020-02-29 01:15:08 +01:00
|
|
|
UASSERT(!m_dedupDone, "cannot add after dedup()");
|
2020-11-07 16:37:55 +01:00
|
|
|
if (access.isWriteOrRW()) m_lhs.push_back(e);
|
|
|
|
|
if (access.isReadOrRW()) m_rhs.push_back(e);
|
2020-02-29 01:15:08 +01:00
|
|
|
}
|
|
|
|
|
void dedup() {
|
|
|
|
|
UASSERT(!m_dedupDone, "dedup() called twice");
|
|
|
|
|
dedupRefs(m_lhs);
|
|
|
|
|
dedupRefs(m_rhs);
|
|
|
|
|
m_dedupDone = true;
|
|
|
|
|
}
|
|
|
|
|
const AstBasicDType* basicp() const { return m_basicp; }
|
|
|
|
|
// Make a plan for variables after split
|
|
|
|
|
// when skipUnused==true, split variable for unread bits will not be created.
|
2025-02-17 19:34:57 +01:00
|
|
|
std::vector<SplitNewVar> splitPlan(const AstVar* varp, bool skipUnused) const {
|
2020-02-29 01:15:08 +01:00
|
|
|
UASSERT(m_dedupDone, "dedup() must be called before");
|
2025-02-17 19:34:57 +01:00
|
|
|
AstNodeDType* const dtypep = varp->dtypeSkipRefp();
|
2020-02-29 01:15:08 +01:00
|
|
|
std::vector<SplitNewVar> plan;
|
2020-08-15 15:48:08 +02:00
|
|
|
std::vector<std::pair<int, bool>> points; // <bit location, is end>
|
2020-02-29 01:15:08 +01:00
|
|
|
points.reserve(m_lhs.size() * 2 + 2); // 2 points will be added per one PackedVarRefEntry
|
2021-08-02 16:33:31 +02:00
|
|
|
for (const PackedVarRefEntry& ref : m_lhs) {
|
2023-10-28 12:24:04 +02:00
|
|
|
points.emplace_back(ref.lsb(), false); // Start of a region
|
|
|
|
|
points.emplace_back(ref.msb() + 1, true); // End of a region
|
2020-02-29 01:15:08 +01:00
|
|
|
}
|
2025-05-06 11:00:17 +02:00
|
|
|
const int bit_lo = basicp()->lo();
|
|
|
|
|
const int bit_hi = bit_lo + dtypep->width() - 1;
|
2020-02-29 01:15:08 +01:00
|
|
|
if (skipUnused && !m_rhs.empty()) { // Range to be read must be kept, so add points here
|
2025-02-17 19:34:57 +01:00
|
|
|
int lsb = bit_hi + 1;
|
|
|
|
|
int msb = bit_lo - 1;
|
2021-08-02 16:33:31 +02:00
|
|
|
for (const PackedVarRefEntry& ref : m_rhs) {
|
|
|
|
|
lsb = std::min(lsb, ref.lsb());
|
|
|
|
|
msb = std::max(msb, ref.msb());
|
2020-02-29 01:15:08 +01:00
|
|
|
}
|
|
|
|
|
UASSERT_OBJ(lsb <= msb, m_basicp, "lsb:" << lsb << " msb:" << msb << " are wrong");
|
2023-10-28 12:24:04 +02:00
|
|
|
points.emplace_back(lsb, false);
|
|
|
|
|
points.emplace_back(msb + 1, true);
|
2020-02-29 01:15:08 +01:00
|
|
|
}
|
|
|
|
|
if (!skipUnused) { // All bits are necessary
|
2025-02-17 19:34:57 +01:00
|
|
|
points.emplace_back(bit_lo, false);
|
|
|
|
|
points.emplace_back(bit_hi + 1, true);
|
2020-02-29 01:15:08 +01:00
|
|
|
}
|
|
|
|
|
std::sort(points.begin(), points.end(), SortByFirst());
|
|
|
|
|
|
|
|
|
|
// Scan the sorted points and sub bitfields
|
|
|
|
|
int refcount = 0;
|
|
|
|
|
for (size_t i = 0; i + 1 < points.size(); ++i) {
|
|
|
|
|
const int bitwidth = points[i + 1].first - points[i].first;
|
|
|
|
|
if (points[i].second) {
|
|
|
|
|
--refcount; // End of a region
|
|
|
|
|
} else {
|
|
|
|
|
++refcount; // Start of a region
|
|
|
|
|
}
|
|
|
|
|
UASSERT(refcount >= 0, "refcounut must not be negative");
|
|
|
|
|
if (bitwidth == 0 || refcount == 0) continue; // Vacant region
|
2022-11-20 21:06:49 +01:00
|
|
|
plan.emplace_back(SplitNewVar{points[i].first, bitwidth});
|
2020-02-29 01:15:08 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return plan;
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
2022-01-02 19:56:40 +01:00
|
|
|
class SplitPackedVarVisitor final : public VNVisitor, public SplitVarImpl {
|
2025-03-09 15:31:01 +01:00
|
|
|
// NODE STATE
|
|
|
|
|
// AstVar::user2() -> bool. Automatically considered candidate
|
|
|
|
|
// AstVar::user3() -> VarInfo. Used only in findCandidates
|
|
|
|
|
const VNUser2InUse m_user2InUse;
|
|
|
|
|
|
2021-11-26 23:55:36 +01:00
|
|
|
AstNetlist* const m_netp;
|
|
|
|
|
const AstNodeModule* m_modp = nullptr; // Current module (just for log)
|
2025-03-09 15:31:01 +01:00
|
|
|
int m_numSplitAttr = 0; // Number of variables split due to attribute
|
|
|
|
|
int m_numSplitAuto = 0; // Number of variables split automatically
|
2020-02-29 01:15:08 +01:00
|
|
|
// key:variable to be split. value:location where the variable is referenced.
|
2021-03-12 23:26:53 +01:00
|
|
|
std::map<AstVar*, PackedVarRef, AstNodeComparator> m_refs;
|
2022-09-16 12:22:11 +02:00
|
|
|
void visit(AstNodeFTask* nodep) override {
|
2020-02-29 01:15:08 +01:00
|
|
|
if (!cannotSplitTaskReason(nodep)) iterateChildren(nodep);
|
|
|
|
|
}
|
2022-09-16 12:22:11 +02:00
|
|
|
void visit(AstVar* nodep) override {
|
2025-03-09 15:31:01 +01:00
|
|
|
if (!nodep->attrSplitVar() && !nodep->user2()) return; // Nothing to do
|
2021-08-02 16:33:31 +02:00
|
|
|
if (const char* const reason = cannotSplitReason(nodep, true)) {
|
2025-03-09 15:31:01 +01:00
|
|
|
if (nodep->attrSplitVar()) {
|
|
|
|
|
warnNoSplit(nodep, nodep, reason);
|
|
|
|
|
nodep->attrSplitVar(false);
|
|
|
|
|
}
|
2020-02-29 01:15:08 +01:00
|
|
|
} else { // Finally find a good candidate
|
2023-10-28 12:24:04 +02:00
|
|
|
const bool inserted = m_refs.emplace(nodep, PackedVarRef{nodep}).second;
|
2025-05-23 02:29:32 +02:00
|
|
|
if (inserted) UINFO(4, nodep->prettyNameQ() << " is added to candidate list.");
|
2020-02-29 01:15:08 +01:00
|
|
|
}
|
|
|
|
|
}
|
2022-09-16 12:22:11 +02:00
|
|
|
void visit(AstVarRef* nodep) override {
|
2021-08-02 16:33:31 +02:00
|
|
|
AstVar* const varp = nodep->varp();
|
2020-02-29 01:15:08 +01:00
|
|
|
visit(varp);
|
2020-08-16 17:43:49 +02:00
|
|
|
const auto refit = m_refs.find(varp);
|
2020-02-29 01:15:08 +01:00
|
|
|
if (refit == m_refs.end()) return; // variable without split_var metacomment
|
2025-03-09 15:31:01 +01:00
|
|
|
UASSERT_OBJ(varp->attrSplitVar() || varp->user2(), varp, "must be a split candidate");
|
2020-11-25 03:56:03 +01:00
|
|
|
UASSERT_OBJ(!nodep->classOrPackagep(), nodep,
|
2020-02-29 01:15:08 +01:00
|
|
|
"variable in package must have been dropped beforehand.");
|
2021-11-26 23:55:36 +01:00
|
|
|
const AstBasicDType* const basicp = refit->second.basicp();
|
2022-11-20 21:06:49 +01:00
|
|
|
refit->second.append(PackedVarRefEntry{nodep, basicp->lo(), varp->width()},
|
2020-09-07 23:09:25 +02:00
|
|
|
nodep->access());
|
2020-02-29 01:15:08 +01:00
|
|
|
UINFO(5, varp->prettyName()
|
2025-05-23 02:29:32 +02:00
|
|
|
<< " Entire bit of [" << basicp->lo() << "+:" << varp->width() << "]");
|
2020-02-29 01:15:08 +01:00
|
|
|
}
|
2022-09-16 12:22:11 +02:00
|
|
|
void visit(AstSel* nodep) override {
|
2021-11-26 23:55:36 +01:00
|
|
|
const AstVarRef* const vrefp = VN_CAST(nodep->fromp(), VarRef);
|
2020-02-29 01:15:08 +01:00
|
|
|
if (!vrefp) {
|
|
|
|
|
iterateChildren(nodep);
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
2021-08-02 16:33:31 +02:00
|
|
|
AstVar* const varp = vrefp->varp();
|
2020-08-16 17:43:49 +02:00
|
|
|
const auto refit = m_refs.find(varp);
|
2020-02-29 01:15:08 +01:00
|
|
|
if (refit == m_refs.end()) {
|
|
|
|
|
iterateChildren(nodep);
|
|
|
|
|
return; // Variable without split_var metacomment
|
|
|
|
|
}
|
2025-03-09 15:31:01 +01:00
|
|
|
UASSERT_OBJ(varp->attrSplitVar() || varp->user2(), varp, "must be a split candidate");
|
2020-02-29 01:15:08 +01:00
|
|
|
|
2025-06-24 17:59:09 +02:00
|
|
|
if (AstConst* const lsbConstp = VN_CAST(nodep->lsbp(), Const)) { // OK
|
2020-02-29 01:15:08 +01:00
|
|
|
refit->second.append(
|
2025-06-24 17:59:09 +02:00
|
|
|
PackedVarRefEntry(nodep, lsbConstp->toSInt() + refit->second.basicp()->lo(),
|
|
|
|
|
nodep->widthConst()),
|
2020-09-07 23:09:25 +02:00
|
|
|
vrefp->access());
|
2020-04-15 13:58:34 +02:00
|
|
|
UINFO(5, varp->prettyName()
|
2025-06-24 17:59:09 +02:00
|
|
|
<< " [" << lsbConstp->toSInt() << ":+" << nodep->widthConst()
|
2025-05-23 02:29:32 +02:00
|
|
|
<< "] lsb:" << refit->second.basicp()->lo());
|
2020-02-29 01:15:08 +01:00
|
|
|
} else {
|
2025-03-09 15:31:01 +01:00
|
|
|
if (varp->attrSplitVar()) {
|
|
|
|
|
warnNoSplit(vrefp->varp(), nodep, "its bit range cannot be determined statically");
|
|
|
|
|
varp->attrSplitVar(false);
|
|
|
|
|
}
|
2020-02-29 01:15:08 +01:00
|
|
|
m_refs.erase(varp);
|
|
|
|
|
iterateChildren(nodep);
|
|
|
|
|
}
|
|
|
|
|
}
|
2022-09-16 12:22:11 +02:00
|
|
|
void visit(AstNode* nodep) override { iterateChildren(nodep); }
|
2020-04-04 14:31:14 +02:00
|
|
|
|
2020-02-29 01:15:08 +01:00
|
|
|
// Extract necessary bit range from a newly created variable to meet ref
|
2022-11-13 21:33:11 +01:00
|
|
|
static AstNodeExpr* extractBits(const PackedVarRefEntry& ref, const SplitNewVar& var,
|
|
|
|
|
const VAccess access) {
|
2021-08-02 16:33:31 +02:00
|
|
|
FileLine* const fl = ref.nodep()->fileline();
|
|
|
|
|
AstVarRef* const refp = new AstVarRef{fl, var.varp(), access};
|
2020-02-29 01:15:08 +01:00
|
|
|
if (ref.lsb() <= var.lsb() && var.msb() <= ref.msb()) { // Use the entire bits
|
|
|
|
|
return refp;
|
|
|
|
|
} else { // Use slice
|
|
|
|
|
const int lsb = std::max(ref.lsb(), var.lsb());
|
|
|
|
|
const int msb = std::min(ref.msb(), var.msb());
|
|
|
|
|
const int bitwidth = msb + 1 - lsb;
|
|
|
|
|
UINFO(4, var.varp()->prettyNameQ() << "[" << msb << ":" << lsb << "] used for "
|
2020-04-15 13:58:34 +02:00
|
|
|
<< ref.nodep()->prettyNameQ() << '\n');
|
2020-02-29 01:15:08 +01:00
|
|
|
// LSB of varp is always 0. "lsb - var.lsb()" means this. see also SplitNewVar
|
2021-08-02 16:33:31 +02:00
|
|
|
return new AstSel{fl, refp, lsb - var.lsb(), bitwidth};
|
2020-02-29 01:15:08 +01:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
static void connectPortAndVar(const std::vector<SplitNewVar>& vars, AstVar* portp,
|
|
|
|
|
AstNode* insertp) {
|
|
|
|
|
for (; insertp; insertp = insertp->backp()) {
|
2022-10-12 11:19:21 +02:00
|
|
|
if (VN_IS(insertp, NodeStmt)) break;
|
2020-02-29 01:15:08 +01:00
|
|
|
}
|
|
|
|
|
const bool in = portp->isReadOnly();
|
2021-08-02 16:33:31 +02:00
|
|
|
FileLine* const fl = portp->fileline();
|
2025-03-08 15:37:30 +01:00
|
|
|
const int portLsb = portp->basicp()->lo();
|
2021-08-02 16:33:31 +02:00
|
|
|
for (const SplitNewVar& var : vars) {
|
2022-11-13 21:33:11 +01:00
|
|
|
AstNodeExpr* rhsp
|
2021-08-02 16:33:31 +02:00
|
|
|
= new AstSel{fl, new AstVarRef{fl, portp, !in ? VAccess::WRITE : VAccess::READ},
|
2025-03-08 15:37:30 +01:00
|
|
|
var.lsb() - portLsb, var.bitwidth()};
|
2022-11-13 21:33:11 +01:00
|
|
|
AstNodeExpr* lhsp = new AstVarRef{fl, var.varp(), in ? VAccess::WRITE : VAccess::READ};
|
2020-02-29 01:15:08 +01:00
|
|
|
if (!in) std::swap(lhsp, rhsp);
|
|
|
|
|
if (insertp) {
|
Internals: Make AstAssignW a procedural statement (#6280) (#6556)
Initial idea was to remodel AssignW as Assign under Alway. Trying that
uncovered some issues, the most difficult of them was that a delay
attached to a continuous assignment behaves differently from a delay
attached to a blocking assignment statement, so we need to keep the
knowledge of which flavour an assignment was until V3Timing.
So instead of removing AstAssignW, we always wrap it in an AstAlways,
with a special `keyword()` type. This makes it into a proper procedural
statement, which is almost equivalent to AstAssign, except for the case
when they contain a delay. We still gain the benefits of #6280 and can
simplify some code. Every AstNodeStmt should now be under an
AstNodeProcedure - which we should rename to AstProcess, or an
AstNodeFTask). As a result, V3Table can now handle AssignW for free.
Also uncovered and fixed a bug in handling intra-assignment delays if
a function is present on the RHS of an AssignW.
There is more work to be done towards #6280, and potentially simplifying
AssignW handing, but this is the minimal change required to tick it off
the TODO list for #6280.
2025-10-14 10:05:19 +02:00
|
|
|
AstAssign* const ap = new AstAssign{fl, lhsp, rhsp};
|
2020-02-29 01:15:08 +01:00
|
|
|
if (in) {
|
Internals: Make AstAssignW a procedural statement (#6280) (#6556)
Initial idea was to remodel AssignW as Assign under Alway. Trying that
uncovered some issues, the most difficult of them was that a delay
attached to a continuous assignment behaves differently from a delay
attached to a blocking assignment statement, so we need to keep the
knowledge of which flavour an assignment was until V3Timing.
So instead of removing AstAssignW, we always wrap it in an AstAlways,
with a special `keyword()` type. This makes it into a proper procedural
statement, which is almost equivalent to AstAssign, except for the case
when they contain a delay. We still gain the benefits of #6280 and can
simplify some code. Every AstNodeStmt should now be under an
AstNodeProcedure - which we should rename to AstProcess, or an
AstNodeFTask). As a result, V3Table can now handle AssignW for free.
Also uncovered and fixed a bug in handling intra-assignment delays if
a function is present on the RHS of an AssignW.
There is more work to be done towards #6280, and potentially simplifying
AssignW handing, but this is the minimal change required to tick it off
the TODO list for #6280.
2025-10-14 10:05:19 +02:00
|
|
|
insertp->addHereThisAsNext(ap);
|
2020-02-29 01:15:08 +01:00
|
|
|
} else {
|
Internals: Make AstAssignW a procedural statement (#6280) (#6556)
Initial idea was to remodel AssignW as Assign under Alway. Trying that
uncovered some issues, the most difficult of them was that a delay
attached to a continuous assignment behaves differently from a delay
attached to a blocking assignment statement, so we need to keep the
knowledge of which flavour an assignment was until V3Timing.
So instead of removing AstAssignW, we always wrap it in an AstAlways,
with a special `keyword()` type. This makes it into a proper procedural
statement, which is almost equivalent to AstAssign, except for the case
when they contain a delay. We still gain the benefits of #6280 and can
simplify some code. Every AstNodeStmt should now be under an
AstNodeProcedure - which we should rename to AstProcess, or an
AstNodeFTask). As a result, V3Table can now handle AssignW for free.
Also uncovered and fixed a bug in handling intra-assignment delays if
a function is present on the RHS of an AssignW.
There is more work to be done towards #6280, and potentially simplifying
AssignW handing, but this is the minimal change required to tick it off
the TODO list for #6280.
2025-10-14 10:05:19 +02:00
|
|
|
insertp->addNextHere(ap);
|
2020-02-29 01:15:08 +01:00
|
|
|
}
|
|
|
|
|
} else {
|
Internals: Make AstAssignW a procedural statement (#6280) (#6556)
Initial idea was to remodel AssignW as Assign under Alway. Trying that
uncovered some issues, the most difficult of them was that a delay
attached to a continuous assignment behaves differently from a delay
attached to a blocking assignment statement, so we need to keep the
knowledge of which flavour an assignment was until V3Timing.
So instead of removing AstAssignW, we always wrap it in an AstAlways,
with a special `keyword()` type. This makes it into a proper procedural
statement, which is almost equivalent to AstAssign, except for the case
when they contain a delay. We still gain the benefits of #6280 and can
simplify some code. Every AstNodeStmt should now be under an
AstNodeProcedure - which we should rename to AstProcess, or an
AstNodeFTask). As a result, V3Table can now handle AssignW for free.
Also uncovered and fixed a bug in handling intra-assignment delays if
a function is present on the RHS of an AssignW.
There is more work to be done towards #6280, and potentially simplifying
AssignW handing, but this is the minimal change required to tick it off
the TODO list for #6280.
2025-10-14 10:05:19 +02:00
|
|
|
AstAssignW* const ap = new AstAssignW{fl, lhsp, rhsp};
|
|
|
|
|
var.varp()->addNextHere(new AstAlways{ap});
|
2020-02-29 01:15:08 +01:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
void createVars(AstVar* varp, const AstBasicDType* basicp, std::vector<SplitNewVar>& vars) {
|
2021-08-02 16:33:31 +02:00
|
|
|
for (SplitNewVar& newvar : vars) {
|
|
|
|
|
int left = newvar.msb();
|
|
|
|
|
int right = newvar.lsb();
|
2023-03-21 01:44:11 +01:00
|
|
|
if (basicp->ascending()) std::swap(left, right);
|
2020-02-29 01:15:08 +01:00
|
|
|
const std::string name
|
|
|
|
|
= (left == right)
|
|
|
|
|
? varp->name() + "__BRA__" + AstNode::encodeNumber(left) + "__KET__"
|
|
|
|
|
: varp->name() + "__BRA__" + AstNode::encodeNumber(left)
|
2020-04-15 13:58:34 +02:00
|
|
|
+ AstNode::encodeName(":") + AstNode::encodeNumber(right) + "__KET__";
|
2020-02-29 01:15:08 +01:00
|
|
|
|
|
|
|
|
AstBasicDType* dtypep;
|
|
|
|
|
switch (basicp->keyword()) {
|
2022-01-02 19:56:40 +01:00
|
|
|
case VBasicDTypeKwd::BIT:
|
2022-11-20 21:06:49 +01:00
|
|
|
dtypep = new AstBasicDType{varp->subDTypep()->fileline(), VFlagBitPacked{},
|
2021-08-02 16:33:31 +02:00
|
|
|
newvar.bitwidth()};
|
2020-02-29 01:15:08 +01:00
|
|
|
break;
|
2022-01-02 19:56:40 +01:00
|
|
|
case VBasicDTypeKwd::LOGIC:
|
2022-11-20 21:06:49 +01:00
|
|
|
dtypep = new AstBasicDType{varp->subDTypep()->fileline(), VFlagLogicPacked{},
|
2021-08-02 16:33:31 +02:00
|
|
|
newvar.bitwidth()};
|
2020-02-29 01:15:08 +01:00
|
|
|
break;
|
|
|
|
|
default: UASSERT_OBJ(false, basicp, "Only bit and logic are allowed");
|
|
|
|
|
}
|
2021-08-02 16:33:31 +02:00
|
|
|
dtypep->rangep(new AstRange{
|
2023-03-21 01:44:11 +01:00
|
|
|
varp->fileline(), VNumRange{newvar.msb(), newvar.lsb(), basicp->ascending()}});
|
2022-01-02 19:56:40 +01:00
|
|
|
newvar.varp(new AstVar{varp->fileline(), VVarType::VAR, name, dtypep});
|
2021-08-02 16:33:31 +02:00
|
|
|
newvar.varp()->propagateAttrFrom(varp);
|
|
|
|
|
newvar.varp()->funcLocal(varp->isFuncLocal() || varp->isFuncReturn());
|
2020-02-29 01:15:08 +01:00
|
|
|
// Enable this line to trace split variable directly:
|
2021-08-02 16:33:31 +02:00
|
|
|
// newvar.varp()->trace(varp->isTrace());
|
2020-02-29 01:15:08 +01:00
|
|
|
m_netp->typeTablep()->addTypesp(dtypep);
|
2021-08-02 16:33:31 +02:00
|
|
|
varp->addNextHere(newvar.varp());
|
2025-05-23 02:29:32 +02:00
|
|
|
UINFO(4, newvar.varp()->prettyNameQ() << " is added for " << varp->prettyNameQ());
|
2020-02-29 01:15:08 +01:00
|
|
|
}
|
|
|
|
|
}
|
2022-07-30 16:01:25 +02:00
|
|
|
static void updateReferences(AstVar* varp, PackedVarRef& pref,
|
2020-02-29 01:15:08 +01:00
|
|
|
const std::vector<SplitNewVar>& vars) {
|
2021-08-02 16:33:31 +02:00
|
|
|
for (const bool lvalue : {false, true}) { // Refer the new split variables
|
2022-07-30 16:01:25 +02:00
|
|
|
std::vector<PackedVarRefEntry>& refs = lvalue ? pref.lhs() : pref.rhs();
|
2021-08-02 16:33:31 +02:00
|
|
|
for (PackedVarRefEntry& ref : refs) {
|
|
|
|
|
auto varit
|
|
|
|
|
= std::upper_bound(vars.begin(), vars.end(), ref.lsb(), SplitNewVar::Match());
|
|
|
|
|
UASSERT_OBJ(varit != vars.end(), ref.nodep(), "Not found");
|
|
|
|
|
UASSERT(!(varit->msb() < ref.lsb() || ref.msb() < varit->lsb()),
|
2020-02-29 01:15:08 +01:00
|
|
|
"wrong search result");
|
|
|
|
|
AstNode* prevp;
|
|
|
|
|
bool inSentitivityList = false;
|
2021-08-02 16:33:31 +02:00
|
|
|
if (AstSenItem* const senitemp = ref.backSenItemp()) {
|
|
|
|
|
AstNode* const oldsenrefp = senitemp->sensp();
|
2020-02-29 01:15:08 +01:00
|
|
|
oldsenrefp->replaceWith(
|
2021-08-02 16:33:31 +02:00
|
|
|
new AstVarRef{senitemp->fileline(), varit->varp(), VAccess::READ});
|
2020-02-29 01:15:08 +01:00
|
|
|
VL_DO_DANGLING(oldsenrefp->deleteTree(), oldsenrefp);
|
|
|
|
|
prevp = senitemp;
|
|
|
|
|
inSentitivityList = true;
|
|
|
|
|
} else {
|
2021-08-02 16:33:31 +02:00
|
|
|
prevp = extractBits(ref, *varit, lvalue ? VAccess::WRITE : VAccess::READ);
|
2020-02-29 01:15:08 +01:00
|
|
|
}
|
2021-08-02 16:33:31 +02:00
|
|
|
for (int residue = ref.msb() - varit->msb(); residue > 0;
|
2020-02-29 01:15:08 +01:00
|
|
|
residue -= varit->bitwidth()) {
|
|
|
|
|
++varit;
|
2021-08-02 16:33:31 +02:00
|
|
|
UASSERT_OBJ(varit != vars.end(), ref.nodep(), "not enough split variables");
|
|
|
|
|
if (AstSenItem* const senitemp = VN_CAST(prevp, SenItem)) {
|
|
|
|
|
prevp = new AstSenItem{
|
2020-02-29 01:15:08 +01:00
|
|
|
senitemp->fileline(), senitemp->edgeType(),
|
2021-08-02 16:33:31 +02:00
|
|
|
new AstVarRef{senitemp->fileline(), varit->varp(), VAccess::READ}};
|
2020-02-29 01:15:08 +01:00
|
|
|
senitemp->addNextHere(prevp);
|
|
|
|
|
} else {
|
2022-11-13 21:33:11 +01:00
|
|
|
AstNodeExpr* const bitsp
|
2021-08-02 16:33:31 +02:00
|
|
|
= extractBits(ref, *varit, lvalue ? VAccess::WRITE : VAccess::READ);
|
2022-11-13 21:33:11 +01:00
|
|
|
prevp = new AstConcat{ref.nodep()->fileline(), bitsp,
|
|
|
|
|
VN_AS(prevp, NodeExpr)};
|
2020-02-29 01:15:08 +01:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
// If varp is an argument of task/func, need to update temporary var
|
2022-12-03 00:46:38 +01:00
|
|
|
// every time the var is updated. See also another call of connectPortAndVar() in
|
2020-02-29 01:15:08 +01:00
|
|
|
// split()
|
|
|
|
|
if (varp->isIO() && (varp->isFuncLocal() || varp->isFuncReturn()))
|
2021-08-02 16:33:31 +02:00
|
|
|
connectPortAndVar(vars, varp, ref.nodep());
|
|
|
|
|
if (!inSentitivityList) ref.replaceNodeWith(prevp);
|
|
|
|
|
UASSERT_OBJ(varit->msb() >= ref.msb(), varit->varp(), "Out of range");
|
2020-02-29 01:15:08 +01:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
// Do the actual splitting operation
|
|
|
|
|
void split() {
|
2021-08-02 16:33:31 +02:00
|
|
|
for (auto& pair : m_refs) {
|
|
|
|
|
AstVar* const varp = pair.first;
|
|
|
|
|
PackedVarRef& ref = pair.second;
|
|
|
|
|
ref.dedup();
|
2025-02-26 02:41:00 +01:00
|
|
|
UINFO(4, "In module " << m_modp->name() << " var " << varp->prettyNameQ()
|
2021-08-02 16:33:31 +02:00
|
|
|
<< " which has " << ref.lhs().size() << " lhs refs and "
|
2025-05-23 02:29:32 +02:00
|
|
|
<< ref.rhs().size() << " rhs refs will be split.");
|
2020-02-29 01:15:08 +01:00
|
|
|
std::vector<SplitNewVar> vars
|
2025-02-17 19:34:57 +01:00
|
|
|
= ref.splitPlan(varp, !varp->isTrace()); // If traced, all bit must be kept
|
2020-02-29 01:15:08 +01:00
|
|
|
if (vars.empty()) continue;
|
|
|
|
|
if (vars.size() == 1 && vars.front().bitwidth() == varp->width())
|
|
|
|
|
continue; // No split
|
|
|
|
|
|
2025-03-09 15:31:01 +01:00
|
|
|
if (varp->attrSplitVar()) {
|
|
|
|
|
++m_numSplitAttr;
|
|
|
|
|
} else {
|
|
|
|
|
++m_numSplitAuto;
|
|
|
|
|
}
|
|
|
|
|
|
2021-08-02 16:33:31 +02:00
|
|
|
createVars(varp, ref.basicp(), vars); // Add the split variables
|
2020-02-29 01:15:08 +01:00
|
|
|
|
2021-08-02 16:33:31 +02:00
|
|
|
updateReferences(varp, ref, vars);
|
2020-02-29 01:15:08 +01:00
|
|
|
|
|
|
|
|
if (varp->isIO()) { // port cannot be deleted
|
|
|
|
|
// If varp is a port of a module, single AssignW is sufficient
|
|
|
|
|
if (!(varp->isFuncLocal() || varp->isFuncReturn()))
|
2020-08-15 16:12:55 +02:00
|
|
|
connectPortAndVar(vars, varp, nullptr);
|
2020-02-29 01:15:08 +01:00
|
|
|
} else if (varp->isTrace()) {
|
|
|
|
|
// Let's reuse the original variable for tracing
|
2022-11-13 21:33:11 +01:00
|
|
|
AstNodeExpr* rhsp = new AstVarRef{vars.front().varp()->fileline(),
|
|
|
|
|
vars.front().varp(), VAccess::READ};
|
2021-08-02 16:33:31 +02:00
|
|
|
FileLine* const fl = varp->fileline();
|
2020-02-29 01:15:08 +01:00
|
|
|
for (size_t i = 1; i < vars.size(); ++i) {
|
2021-08-02 16:33:31 +02:00
|
|
|
rhsp = new AstConcat{fl, new AstVarRef{fl, vars[i].varp(), VAccess::READ},
|
|
|
|
|
rhsp};
|
2020-02-29 01:15:08 +01:00
|
|
|
}
|
Internals: Make AstAssignW a procedural statement (#6280) (#6556)
Initial idea was to remodel AssignW as Assign under Alway. Trying that
uncovered some issues, the most difficult of them was that a delay
attached to a continuous assignment behaves differently from a delay
attached to a blocking assignment statement, so we need to keep the
knowledge of which flavour an assignment was until V3Timing.
So instead of removing AstAssignW, we always wrap it in an AstAlways,
with a special `keyword()` type. This makes it into a proper procedural
statement, which is almost equivalent to AstAssign, except for the case
when they contain a delay. We still gain the benefits of #6280 and can
simplify some code. Every AstNodeStmt should now be under an
AstNodeProcedure - which we should rename to AstProcess, or an
AstNodeFTask). As a result, V3Table can now handle AssignW for free.
Also uncovered and fixed a bug in handling intra-assignment delays if
a function is present on the RHS of an AssignW.
There is more work to be done towards #6280, and potentially simplifying
AssignW handing, but this is the minimal change required to tick it off
the TODO list for #6280.
2025-10-14 10:05:19 +02:00
|
|
|
if (varp->isFuncLocal() || varp->isFuncReturn()) {
|
|
|
|
|
AstAssign* const ap
|
|
|
|
|
= new AstAssign{fl, new AstVarRef{fl, varp, VAccess::WRITE}, rhsp};
|
|
|
|
|
varp->addNextHere(ap);
|
|
|
|
|
} else {
|
|
|
|
|
AstAssignW* const ap
|
|
|
|
|
= new AstAssignW{fl, new AstVarRef{fl, varp, VAccess::WRITE}, rhsp};
|
|
|
|
|
varp->addNextHere(new AstAlways{ap});
|
|
|
|
|
}
|
2020-02-29 01:15:08 +01:00
|
|
|
} else { // the original variable is not used anymore.
|
|
|
|
|
VL_DO_DANGLING(varp->unlinkFrBack()->deleteTree(), varp);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
m_refs.clear(); // Done
|
|
|
|
|
}
|
|
|
|
|
|
2025-03-09 15:31:01 +01:00
|
|
|
// Find Vars only referenced through non-overlapping constant selects,
|
|
|
|
|
// and set their user2 to mark them as split candidates
|
|
|
|
|
static void findCandidates(const RefsInModule& refSets,
|
|
|
|
|
const std::unordered_set<AstVar*>& hasXrefs) {
|
|
|
|
|
// Inclusive index range
|
|
|
|
|
using Range = std::pair<int32_t, int32_t>;
|
|
|
|
|
|
|
|
|
|
// Store one VarInfo per AstVar via user3
|
|
|
|
|
struct VarInfo final {
|
|
|
|
|
bool ineligible = false; // Ineligible for automatic consideration
|
|
|
|
|
std::vector<Range> ranges; // [lsb, msb] inclusive of Sels
|
|
|
|
|
};
|
|
|
|
|
const VNUser3InUse user3InUse;
|
|
|
|
|
AstUser3Allocator<AstVar, VarInfo> varInfos;
|
|
|
|
|
|
|
|
|
|
// Gather all Sels selecting from each variable, also mark if ineligible
|
|
|
|
|
for (const AstVarRef* const vrefp : refSets.m_refs) {
|
|
|
|
|
AstVar* const varp = vrefp->varp();
|
|
|
|
|
VarInfo& info = varInfos(varp);
|
|
|
|
|
if (info.ineligible) continue;
|
|
|
|
|
|
|
|
|
|
// Function return values seem not safe for splitting, even though
|
|
|
|
|
// the code above seems like it's tryinig to handle them.
|
|
|
|
|
if (varp->isFuncReturn()) {
|
|
|
|
|
info.ineligible = true;
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
// Don't consider ports, we don't know what is connected to them at this point
|
|
|
|
|
if (varp->isIO()) {
|
|
|
|
|
info.ineligible = true;
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
// Can't split variables referenced from outside the module
|
|
|
|
|
if (hasXrefs.count(varp)) {
|
|
|
|
|
info.ineligible = true;
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Ineligible if it is not being Sel from
|
|
|
|
|
AstSel* const selp = VN_CAST(vrefp->firstAbovep(), Sel);
|
|
|
|
|
if (!selp || vrefp != selp->fromp()) {
|
|
|
|
|
info.ineligible = true;
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
// Ineligible if the selection range is not constant
|
|
|
|
|
AstConst* const lsbConstp = VN_CAST(selp->lsbp(), Const);
|
|
|
|
|
if (!lsbConstp) {
|
|
|
|
|
info.ineligible = true;
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// All good, record the selection range
|
|
|
|
|
const int32_t lsb = lsbConstp->toSInt();
|
2025-06-24 17:59:09 +02:00
|
|
|
const int32_t msb = lsb + selp->widthConst() - 1;
|
2025-03-09 15:31:01 +01:00
|
|
|
info.ranges.emplace_back(lsb, msb);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Check the usage of each variable
|
|
|
|
|
for (AstVar* const varp : refSets.m_vars) {
|
|
|
|
|
VarInfo* const infop = varInfos.tryGet(varp);
|
|
|
|
|
if (!infop) continue;
|
|
|
|
|
// Don't consider if ineligible
|
|
|
|
|
if (infop->ineligible) continue;
|
|
|
|
|
// Sort ranges by LSB then MSB
|
|
|
|
|
std::sort(infop->ranges.begin(), infop->ranges.end(),
|
|
|
|
|
[](const Range& a, const Range& b) {
|
|
|
|
|
if (a.first != b.first) return a.first < b.first;
|
|
|
|
|
return a.second < b.second;
|
|
|
|
|
});
|
|
|
|
|
// Check for overlapping but non-identical ranges
|
|
|
|
|
bool overlap = false;
|
|
|
|
|
for (size_t i = 0; i + 1 < infop->ranges.size(); ++i) {
|
|
|
|
|
const Range& a = infop->ranges[i];
|
|
|
|
|
const Range& b = infop->ranges[i + 1];
|
|
|
|
|
// OK if the two ranges are the same
|
|
|
|
|
if (a == b) continue;
|
|
|
|
|
// OK if they don't overlap
|
|
|
|
|
if (a.second < b.first) continue;
|
|
|
|
|
// Overlap found
|
|
|
|
|
overlap = true;
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
// If no overlapping ranges, consider it for automatic splitting
|
|
|
|
|
varp->user2(!overlap);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2020-02-29 01:15:08 +01:00
|
|
|
public:
|
|
|
|
|
// When reusing the information from SplitUnpackedVarVisitor
|
2025-03-09 15:31:01 +01:00
|
|
|
SplitPackedVarVisitor(AstNetlist* nodep, SplitVarRefs fromUnpackedSplit)
|
2020-11-25 03:28:04 +01:00
|
|
|
: m_netp{nodep} {
|
2020-02-29 01:15:08 +01:00
|
|
|
// If you want ignore refs and walk the tne entire AST,
|
2020-06-13 10:45:47 +02:00
|
|
|
// just call iterateChildren(m_modp) and split() for each module
|
2025-03-09 15:31:01 +01:00
|
|
|
if (v3Global.opt.fVarSplit()) {
|
|
|
|
|
for (const auto& i : fromUnpackedSplit.m_refs) {
|
|
|
|
|
findCandidates(i.second, fromUnpackedSplit.m_hasXref);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
for (auto& i : fromUnpackedSplit.m_refs) {
|
2020-11-03 01:55:01 +01:00
|
|
|
m_modp = i.first;
|
|
|
|
|
i.second.visit(this);
|
2020-02-29 01:15:08 +01:00
|
|
|
split();
|
2020-08-15 16:12:55 +02:00
|
|
|
m_modp = nullptr;
|
2020-02-29 01:15:08 +01:00
|
|
|
}
|
|
|
|
|
}
|
2020-11-11 03:40:14 +01:00
|
|
|
~SplitPackedVarVisitor() override {
|
2020-02-29 01:15:08 +01:00
|
|
|
UASSERT(m_refs.empty(), "Forgot to call split()");
|
2025-03-09 15:31:01 +01:00
|
|
|
V3Stats::addStat("SplitVar, packed variables split due to attribute", m_numSplitAttr);
|
|
|
|
|
V3Stats::addStat("SplitVar, packed variables split automatically", m_numSplitAuto);
|
2020-02-29 01:15:08 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Check if the passed variable can be split.
|
|
|
|
|
// Even if this function returns true, the variable may not be split
|
|
|
|
|
// when the access to the variable cannot be determined statically.
|
|
|
|
|
static const char* cannotSplitReason(const AstVar* nodep, bool checkUnpacked) {
|
2020-08-15 16:12:55 +02:00
|
|
|
const char* reason = nullptr;
|
2021-11-26 23:55:36 +01:00
|
|
|
if (const AstBasicDType* const basicp = nodep->dtypep()->basicp()) {
|
2020-02-29 01:15:08 +01:00
|
|
|
const std::pair<uint32_t, uint32_t> dim = nodep->dtypep()->dimensions(false);
|
|
|
|
|
// Unpacked array will be split in SplitUnpackedVarVisitor() beforehand
|
|
|
|
|
if (!((!checkUnpacked || dim.second == 0) && nodep->dtypep()->widthMin() > 1))
|
|
|
|
|
reason = "its bitwidth is 1";
|
|
|
|
|
if (!reason && !basicp->isBitLogic()) // Floating point and string are not supported
|
|
|
|
|
reason = "it is not an aggregate type of bit nor logic";
|
|
|
|
|
if (!reason) reason = cannotSplitVarCommonReason(nodep);
|
|
|
|
|
} else {
|
2020-05-23 16:34:58 +02:00
|
|
|
reason = "its type is unknown"; // LCOV_EXCL_LINE
|
2020-02-29 01:15:08 +01:00
|
|
|
}
|
2021-07-25 19:38:27 +02:00
|
|
|
if (reason) {
|
2025-05-23 02:29:32 +02:00
|
|
|
UINFO(5, "Check " << nodep->prettyNameQ() << " cannot split because" << reason);
|
2021-07-25 19:38:27 +02:00
|
|
|
}
|
2020-02-29 01:15:08 +01:00
|
|
|
return reason;
|
|
|
|
|
}
|
2020-11-03 02:07:43 +01:00
|
|
|
};
|
2020-02-29 01:15:08 +01:00
|
|
|
|
|
|
|
|
const char* SplitVarImpl::cannotSplitPackedVarReason(const AstVar* varp) {
|
|
|
|
|
return SplitPackedVarVisitor::cannotSplitReason(varp, true);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
//######################################################################
|
|
|
|
|
// Split class functions
|
|
|
|
|
|
|
|
|
|
void V3SplitVar::splitVariable(AstNetlist* nodep) {
|
2025-05-23 02:29:32 +02:00
|
|
|
UINFO(2, __FUNCTION__ << ":");
|
2025-03-09 15:31:01 +01:00
|
|
|
SplitVarRefs refs;
|
2020-02-29 01:15:08 +01:00
|
|
|
{
|
2021-11-26 23:55:36 +01:00
|
|
|
const SplitUnpackedVarVisitor visitor{nodep};
|
2020-02-29 01:15:08 +01:00
|
|
|
refs = visitor.getPackedVarRefs();
|
|
|
|
|
}
|
2024-01-09 16:35:13 +01:00
|
|
|
V3Global::dumpCheckGlobalTree("split_var", 0, dumpTreeEitherLevel() >= 9);
|
2025-03-09 15:31:01 +01:00
|
|
|
{ SplitPackedVarVisitor{nodep, std::move(refs)}; }
|
2024-01-09 16:35:13 +01:00
|
|
|
V3Global::dumpCheckGlobalTree("split_var", 0, dumpTreeEitherLevel() >= 9);
|
2020-02-29 01:15:08 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
bool V3SplitVar::canSplitVar(const AstVar* varp) {
|
|
|
|
|
// If either SplitUnpackedVarVisitor or SplitPackedVarVisitor can handle,
|
|
|
|
|
// then accept varp.
|
|
|
|
|
return !SplitUnpackedVarVisitor::cannotSplitReason(varp)
|
|
|
|
|
|| !SplitPackedVarVisitor::cannotSplitReason(varp, false);
|
|
|
|
|
}
|