Improve V3LifePost to not require AstAssignPre/AssignPost (#6306)

Rewrote V3LifePost to not depend on having AstAssignPre and
AstAssignPost types, but work with generic AstNodeAssign. There is an
extra flag in AstVarScope to denote it's part of an NBA and should be
considered.

Step towards #6280.
This commit is contained in:
Geza Lore 2025-08-19 00:11:14 +01:00 committed by GitHub
parent c90f9e53b7
commit c9a6d06544
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
4 changed files with 217 additions and 253 deletions

View File

@ -2170,6 +2170,7 @@ class AstVarScope final : public AstNode {
// @astgen ptr := m_scopep : Optional[AstScope] // Scope variable is underneath
// @astgen ptr := m_varp : Optional[AstVar] // [AfterLink] Pointer to variable itself
bool m_trace : 1; // Tracing is turned on for this scope
bool m_optimizeLifePost : 1; // One half of an NBA pair using ShadowVariable scheme. Optimize.
public:
AstVarScope(FileLine* fl, AstScope* scopep, AstVar* varp)
: ASTGEN_SUPER_VarScope(fl)
@ -2178,6 +2179,7 @@ public:
UASSERT_OBJ(scopep, fl, "Scope must be non-null");
UASSERT_OBJ(varp, fl, "Var must be non-null");
m_trace = true;
m_optimizeLifePost = false;
dtypeFrom(varp);
}
ASTGEN_MEMBERS_AstVarScope;
@ -2198,6 +2200,8 @@ public:
void scopep(AstScope* nodep) { m_scopep = nodep; }
bool isTrace() const { return m_trace; }
void trace(bool flag) { m_trace = flag; }
bool optimizeLifePost() const { return m_optimizeLifePost; }
void optimizeLifePost(bool flag) { m_optimizeLifePost = flag; }
};
// === AstNodeBlock ===

View File

@ -585,6 +585,9 @@ class DelayedVisitor final : public VNVisitor {
const std::string name = "__Vdly__" + vscp->varp()->shortName();
AstVarScope* const shadowVscp = createTemp(flp, scopep, name, vscp->dtypep());
vscpInfo.shadowVariableKit().vscp = shadowVscp;
// Mark both for V3LifePsot
vscp->optimizeLifePost(true);
shadowVscp->optimizeLifePost(true);
// Create the AstActive for the Pre/Post logic
AstActive* const activep = new AstActive{flp, "nba-shadow-variable", vscpInfo.senTreep()};
activep->senTreeStorep(vscpInfo.senTreep());

View File

@ -1,6 +1,6 @@
// -*- mode: C++; c-file-style: "cc-mode" -*-
//*************************************************************************
// DESCRIPTION: Verilator: AssignPost Variable assignment elimination
// DESCRIPTION: Verilator: NBA shadow variable assignment elimination
//
// Code available from: https://verilator.org
//
@ -13,14 +13,48 @@
// SPDX-License-Identifier: LGPL-3.0-only OR Artistic-2.0
//
//*************************************************************************
// LIFE TRANSFORMATIONS:
// Build control-flow graph with assignments and var usages
// All modules:
// Delete these
// ASSIGN(Vdly, a)
// ... {no reads or writes of a after the first write to Vdly}
// ... {no reads of a after the first write to Vdly}
// ASSIGNPOST(Vdly, tmp)
//
// Given a pair of variables 'd' and 'q', where 'd' is the shadow variable
// created by V3Delayed using the ShadowVar scheme. Attemp to turn this code:
//
// ... reads of 'q' ok here
// X1: d = q; // First and complete write of 'd' (the pre-scheduled NBA initial assignment)
// ... reads of 'q' ok here
// d = foo; // Second assignment to 'd'
// ... no reads of 'q' here
// X2: q = d; // Only and complete write of 'q', only read of 'd' (the post-scheduled NBA commit)
// ... reads of 'q' ok here
//
// into:
//
// X1: q = q;
// ...
// q = foo;
// ...
// X2: q = q;
//
// by replacing 'd' with 'q'. This then allows deletion of 'd' and the two assignments.
//
// More formally, with the non-sequential mtasks graph, we must prove all of these:
// 1) No reads of 'd' anywhere except for the ASSIGNPOST itself
// 2) No write of 'q' anywhere except for the ASSIGNPOST itself
// 3) The first write of 'd' is complete (writes all bits)
// 4) Every read of 'q' either falls before the second write of 'd', or after only read of 'd'
//
// Notes:
//
// While these rules could be applied to any variables, not just the NBA
// shadow variables. **Proving** that no reads of 'q' happen after the second
// assignment of 'd' is difficult due to the presence of loops (the whole
// eval_nba is inside a loop), virtual methods and other dynamic executions.
// For the NBA shadow variables, we can compute this safely as their use
// is understood as we schedule their first and last assignments specially.
//
// Constraint 2 could be relaxed to "no write of 'q' before the only read of
// 'd', however we only have one write of 'q', created in V3Delayed, so
// trying harder would just be a code coverage hole today.
//
// Constraint 3 should always hold with V3Delayed, will check assert it.
//
//*************************************************************************
@ -32,236 +66,149 @@
#include "V3GraphPathChecker.h"
#include "V3Stats.h"
#include <memory> // for std::unique_ptr -> auto_ptr or unique_ptr
#include <algorithm>
#include <memory>
#include <unordered_map>
VL_DEFINE_DEBUG_FUNCTIONS;
//######################################################################
// LifePost class functions
class LifePostElimVisitor final : public VNVisitor {
bool m_tracingCall = false; // Iterating into a CCall to a CFunc
// NODE STATE
// INPUT:
// AstVarScope::user4p() -> AstVarScope*, If set, replace this
// varscope with specified new one
// STATE
// VISITORS
void visit(AstVarRef* nodep) override {
const AstVarScope* const vscp = nodep->varScopep();
UASSERT_OBJ(vscp, nodep, "Scope not assigned");
if (AstVarScope* const newvscp = reinterpret_cast<AstVarScope*>(vscp->user4p())) {
UINFO(9, " Replace " << nodep << " to " << newvscp);
AstVarRef* const newrefp = new AstVarRef{nodep->fileline(), newvscp, nodep->access()};
nodep->replaceWith(newrefp);
VL_DO_DANGLING(nodep->deleteTree(), nodep);
}
}
void visit(AstNodeModule* nodep) override {
// Only track the top scopes, not lower level functions
if (nodep->isTop()) iterateChildren(nodep);
}
void visit(AstNodeCCall* nodep) override {
iterateChildren(nodep);
if (!nodep->funcp()->entryPoint()) {
// Enter the function and trace it
m_tracingCall = true;
iterate(nodep->funcp());
}
}
void visit(AstExecGraph* nodep) override {
// Can just iterate across the MTask bodies in any order. Order
// isn't important for LifePostElimVisitor's simple substitution.
iterateChildren(nodep);
}
void visit(AstCFunc* nodep) override {
if (!m_tracingCall && !nodep->entryPoint()) return;
m_tracingCall = false;
iterateChildren(nodep);
}
void visit(AstVar*) override {} // Don't want varrefs under it
void visit(AstNode* nodep) override { iterateChildren(nodep); }
public:
// CONSTRUCTORS
explicit LifePostElimVisitor(AstTopScope* nodep) { iterate(nodep); }
~LifePostElimVisitor() override = default;
};
//######################################################################
// Location within the execution graph, identified by an mtask
// and a sequence number within the mtask:
struct LifeLocation final {
const ExecMTask* mtaskp = nullptr;
uint32_t sequence = 0;
public:
LifeLocation() = default;
LifeLocation(const ExecMTask* mtaskp_, uint32_t sequence_)
: mtaskp{mtaskp_}
, sequence{sequence_} {}
bool operator<(const LifeLocation& b) const {
const unsigned a_id = mtaskp ? mtaskp->id() : 0;
const unsigned b_id = b.mtaskp ? b.mtaskp->id() : 0;
if (a_id < b_id) return true;
if (b_id < a_id) return false;
return sequence < b.sequence;
}
};
struct LifePostLocation final {
LifeLocation loc;
AstAssignPost* nodep = nullptr;
LifePostLocation() = default;
LifePostLocation(LifeLocation loc_, AstAssignPost* nodep_)
: loc{loc_}
, nodep{nodep_} {}
};
//######################################################################
// LifePost delay elimination
class LifePostDlyVisitor final : public VNVisitor {
class LifePostDlyVisitor final : public VNVisitorConst {
// TYPES
// Location of AstNode within the program
template <typename T_Node>
class Location final {
template <typename U_Node>
friend class Location;
T_Node* m_nodep; // The AstNode being recorded
const AstExecGraph* m_egraphp; // AstExecTraph location is under, if any
const ExecMTask* m_mtaskp; // The ExecMTask location is under, if any
uint32_t m_seqNum; // Location counter
public:
Location(T_Node* nodep, const AstExecGraph* egp, const ExecMTask* mtaskp, uint32_t seqNum)
: m_nodep{nodep}
, m_egraphp{egp}
, m_mtaskp{mtaskp}
, m_seqNum{seqNum} {}
Location() = delete;
T_Node* const& nodep() const { return m_nodep; }
// "is before" - Note: Equality (concurrency) is possible iff they are independent mtasks!
template <typename U_Node>
bool operator<(const Location<U_Node>& that) const {
// If they are in different mtasks under the same graph, check for a path in the graph
if (m_egraphp && m_egraphp == that.m_egraphp && m_mtaskp != that.m_mtaskp) {
GraphPathChecker* const checkerp = m_egraphp->user1u().to<GraphPathChecker*>();
return checkerp->pathExistsFrom(m_mtaskp, that.m_mtaskp);
}
// Otherwise the sequence numbers work (one/both outside graph, or both in same mtask)
return m_seqNum < that.m_seqNum;
}
};
// NODE STATE
// AstVarScope::user1() -> bool: referenced outside _eval__nba
// AstVarScope::user4() -> AstVarScope*: Passed to LifePostElim to substitute this var
// AstVarScope::user4() -> AstVarScope*: Replacement variable
// AstExecGraph::user1p() -> GraphPathChecker*: path checker for this AstExecGraph
const VNUser1InUse m_inuser1;
const VNUser4InUse m_inuser4;
// STATE
uint32_t m_sequence = 0; // Sequence number of assigns/varrefs,
// // local to the current MTask.
const ExecMTask* m_execMTaskp = nullptr; // Current ExecMTask being processed,
// // or nullptr for serial code.
const AstExecGraph* m_execGraphp = nullptr; // Current AstExecGraph being processed (or null)
const ExecMTask* m_execMTaskp = nullptr; // Current ExecMTask being processed (or null)
VDouble0 m_statAssnDel; // Statistic tracking
bool m_tracingCall = false; // Currently tracing a CCall to a CFunc
// Map each varscope to one or more locations where it's accessed.
// These maps will not include any ASSIGNPOST accesses:
using LocMap = std::unordered_map<const AstVarScope*, std::set<LifeLocation>>;
// Maps from Varscope to all their reads and writes
using LocMap = std::unordered_map<const AstVarScope*, std::vector<Location<AstVarRef>>>;
LocMap m_reads; // VarScope read locations
LocMap m_writes; // VarScope write locations
// Map each dly var to its AstAssignPost* node and the location thereof
std::unordered_map<const AstVarScope*, LifePostLocation> m_assignposts;
V3Graph* m_mtasksGraphp = nullptr; // Mtask tracking graph
std::unique_ptr<GraphPathChecker> m_checker;
std::vector<Location<AstNodeAssign>> m_assigns; // Assignments considered for removal
std::vector<std::unique_ptr<GraphPathChecker>> m_checkers; // Storage for exec graph checkers
const AstCFunc* const m_evalNbap; // The _eval__nba function
bool m_inEvalNba = false; // Traversing under _eval__nba
// METHODS
bool before(const LifeLocation& a, const LifeLocation& b) {
if (a.mtaskp == b.mtaskp) return a.sequence < b.sequence;
return m_checker->pathExistsFrom(a.mtaskp, b.mtaskp);
}
bool outsideCriticalArea(LifeLocation loc, const std::set<LifeLocation>& dlyVarAssigns,
LifeLocation assignPostLoc) {
// If loc is before all of dlyVarAssigns, return true.
// ("Before" means certain to be ordered before them at execution time.)
// If assignPostLoc is before loc, return true.
//
// Otherwise, loc could fall in the "critical" area where the
// substitution affects the result of the operation at loc, so
// return false.
if (!loc.mtaskp && assignPostLoc.mtaskp) {
// This is threaded mode; 'loc' is something that happens at
// initial/settle time, or perhaps in _eval() but outside of
// the mtask graph.
// In either case, it's not in the critical area.
return true;
}
if (before(assignPostLoc, loc)) return true;
for (const auto& i : dlyVarAssigns) {
if (!before(loc, i)) return false;
}
return true;
}
void squashAssignposts() {
for (auto& pair : m_assignposts) {
// If referenced external to _eval__nba, don't optimize
if (pair.first->user1()) continue;
for (const Location<AstNodeAssign>& assign : m_assigns) {
AstVarScope* const dVscp = VN_AS(assign.nodep()->rhsp(), VarRef)->varScopep();
AstVarScope* const qVscp = VN_AS(assign.nodep()->lhsp(), VarRef)->varScopep();
const LifePostLocation* const app = &pair.second;
const AstVarRef* const lhsp = VN_AS(app->nodep->lhsp(), VarRef); // original var
const AstVarRef* const rhsp = VN_AS(app->nodep->rhsp(), VarRef); // dly var
AstVarScope* const dlyVarp = rhsp->varScopep();
AstVarScope* const origVarp = lhsp->varScopep();
// We are considering deleting 'y', don't do it if referenced external to _eval__nba
if (dVscp->user1()) continue;
// Scrunch these:
// X1: __Vdly__q = __PVT__clk_clocks;
// ... {no reads or writes of __PVT__q after the first write to __Vdly__q}
// ... {no reads of __Vdly__q after the first write to __Vdly__q}
// X2: __PVT__q = __Vdly__q;
//
// Into just this:
// X1: __PVT__q = __PVT__clk_clocks;
// X2: (nothing)
const std::vector<Location<AstVarRef>>& dWrites = m_writes[dVscp];
UASSERT_OBJ(!dWrites.empty(), dVscp, "NBA shadow variable read but never written");
// More formally, with the non-sequential mtasks graph, we must
// prove all of these before doing the scrunch:
// 1) No reads of the dly var anywhere except for the ASSIGNPOST
// 2) Every read of the original var either falls before each of
// the dly var's assignments, or after the ASSIGNPOST.
// 3) Every write of the original var either falls before each of
// the dly var's assignments, or after the ASSIGNPOST.
// *** See file header for requirements ***
const std::set<LifeLocation>& dlyVarAssigns = m_writes[dlyVarp];
// Proof (1)
const std::set<LifeLocation>& dlyVarReads = m_reads[dlyVarp];
if (!dlyVarReads.empty()) {
continue; // do not scrunch, go to next LifePostLocation
// Proof (1) - Only read is on the RHS of this assignment
if (m_reads[dVscp].size() > 1) continue;
// Proof (2) - Only write is on the LHS of this assignment
if (m_writes[qVscp].size() > 1) continue;
// Proof (3) - Should always hold
UASSERT_OBJ(VN_IS(dWrites.at(0).nodep()->backp(), NodeAssign), dVscp,
"Partial first write to NBA shadow variable");
// Proof (4)
if (dWrites.size() > 1) {
// V3Order always serializes writes so they cannot be concurrent
UASSERT_OBJ(dWrites[0] < dWrites[1], dVscp, "Concurrent writes");
const bool qRdOK = [&]() {
for (const Location<AstVarRef>& qRead : m_reads[qVscp]) {
if (assign < qRead) continue;
// Check from 2nd write of 'd'
for (size_t i = 1; i < dWrites.size(); ++i) {
if (qRead < dWrites[i]) continue;
return false;
}
}
return true;
}();
if (!qRdOK) continue;
}
// Proof (2)
bool canScrunch = true;
const std::set<LifeLocation>& origVarReads = m_reads[origVarp];
for (std::set<LifeLocation>::iterator rdLocIt = origVarReads.begin();
rdLocIt != origVarReads.end(); ++rdLocIt) {
if (!outsideCriticalArea(*rdLocIt, dlyVarAssigns, app->loc)) {
canScrunch = false;
break;
}
}
if (!canScrunch) continue;
// Proof (3)
const std::set<LifeLocation>& origVarWrites = m_writes[origVarp];
for (std::set<LifeLocation>::iterator wrLocIt = origVarWrites.begin();
wrLocIt != origVarWrites.end(); ++wrLocIt) {
if (!outsideCriticalArea(*wrLocIt, dlyVarAssigns, app->loc)) {
canScrunch = false;
break;
}
}
if (!canScrunch) continue;
// Delete and mark so LifePostElimVisitor will get it
UINFO(4, " DELETE " << app->nodep);
dlyVarp->user4p(origVarp);
VL_DO_DANGLING(app->nodep->unlinkFrBack()->deleteTree(), app->nodep);
// Mark variable for replacement
dVscp->user4p(qVscp);
// Delete assignment
UINFO(4, " DELETE " << assign.nodep());
VL_DO_DANGLING(assign.nodep()->unlinkFrBack()->deleteTree(), assign.nodep());
++m_statAssnDel;
}
}
// Trace code in the given function
void trace(AstCFunc* nodep) {
VL_RESTORER(m_inEvalNba);
if (nodep == m_evalNbap) m_inEvalNba = true;
iterateChildrenConst(nodep);
}
// VISITORS
void visit(AstTopScope* nodep) override {
void visit(AstNetlist* nodep) override {
// First, build maps of every location (mtask and sequence
// within the mtask) where each varscope is read, and written.
iterateChildren(nodep);
iterateChildrenConst(nodep);
if (v3Global.opt.mtasks()) {
UASSERT_OBJ(m_mtasksGraphp, nodep, "Should have initted m_mtasksGraphp by now");
m_checker.reset(new GraphPathChecker{m_mtasksGraphp});
} else {
UASSERT_OBJ(!m_mtasksGraphp, nodep,
"Did not expect any m_mtasksGraphp in serial mode");
// We need to be able to pick up the first write of each variable.
// V3Order serializes all writes, and we trace AstExecGraph in
// dependency order, so the first one we encouner during tracing should
// always be the one. It's somewhat expensive to assert so only with debugCheck().
if (v3Global.opt.debugCheck()) {
for (auto& pair : m_writes) {
const std::vector<Location<AstVarRef>>& writes = pair.second;
const Location<AstVarRef>& first = writes[0];
for (size_t i = 1; i < writes.size(); ++i) {
UASSERT_OBJ(first < writes[i], pair.first, "First write is not the first");
}
}
}
// Find all assignposts. Determine which ones can be
@ -270,10 +217,21 @@ class LifePostDlyVisitor final : public VNVisitor {
// variables.
squashAssignposts();
// Replace any node4p varscopes with the new scope
LifePostElimVisitor{nodep};
// Apply replacements
nodep->foreach([](AstVarRef* nodep) {
const AstVarScope* const vscp = nodep->varScopep();
AstVarScope* const replacementp = VN_AS(vscp->user4p(), VarScope);
if (!replacementp) return;
UINFO(9, " Replace " << nodep << " target " << vscp << " with " << replacementp);
nodep->varScopep(replacementp);
nodep->varp(replacementp->varp());
});
}
void visit(AstVarRef* nodep) override {
// We only try to optimize NBA shadow variables
if (!nodep->varScopep()->optimizeLifePost()) return;
// Mark variables referenced outside _eval__nba
if (!m_inEvalNba) {
nodep->varScopep()->user1(true);
@ -284,72 +242,69 @@ class LifePostDlyVisitor final : public VNVisitor {
const AstVarScope* const vscp = nodep->varScopep();
UASSERT_OBJ(vscp, nodep, "Scope not assigned");
const LifeLocation loc(m_execMTaskp, ++m_sequence);
if (nodep->access().isWriteOrRW()) m_writes[vscp].insert(loc);
if (nodep->access().isReadOrRW()) m_reads[vscp].insert(loc);
}
void visit(AstAssignPre* nodep) override {
// Do not record varrefs within assign pre.
//
// The pre-assignment into the dly var should not count as its
// first write; we only want to consider reads and writes that
// would still happen if the dly var were eliminated.
if (!m_inEvalNba) iterateChildren(nodep);
}
void visit(AstAssignPost* nodep) override {
// Don't record ASSIGNPOST in the read/write maps, record them in a
// separate map
if (const AstVarRef* const rhsp = VN_CAST(nodep->rhsp(), VarRef)) {
// rhsp is the dly var
const AstVarScope* const dlyVarp = rhsp->varScopep();
UASSERT_OBJ(m_assignposts.find(dlyVarp) == m_assignposts.end(), nodep,
"LifePostLocation attempted duplicate dlyvar map addition");
const LifeLocation loc(m_execMTaskp, ++m_sequence);
m_assignposts[dlyVarp] = LifePostLocation(loc, nodep);
++m_sequence;
if (nodep->access().isWriteOrRW()) {
m_writes[vscp].emplace_back(nodep, m_execGraphp, m_execMTaskp, m_sequence);
}
if (nodep->access().isReadOrRW()) {
m_reads[vscp].emplace_back(nodep, m_execGraphp, m_execMTaskp, m_sequence);
}
}
void visit(AstNodeModule* nodep) override {
// Only track the top scopes, not lower level functions
if (nodep->isTop()) iterateChildren(nodep);
void visit(AstNodeAssign* nodep) override {
// Record RHS before assignment
iterateConst(nodep->rhsp());
// If a straight assignment between NBA variables, consider for removal
if (const AstVarRef* const lhsp = VN_CAST(nodep->lhsp(), VarRef)) {
if (const AstVarRef* const rhsp = VN_CAST(nodep->rhsp(), VarRef)) {
if (lhsp->varScopep()->optimizeLifePost() //
&& rhsp->varScopep()->optimizeLifePost()) {
m_assigns.emplace_back(nodep, m_execGraphp, m_execMTaskp, ++m_sequence);
}
}
}
// Record LHS after assignment
iterateConst(nodep->lhsp());
}
void visit(AstNodeCCall* nodep) override {
iterateChildren(nodep);
if (!nodep->funcp()->entryPoint()) {
// Enter the function and trace it
m_tracingCall = true;
iterate(nodep->funcp());
}
iterateChildrenConst(nodep);
// Entry points are roots of the trace, no need to do it here
if (nodep->funcp()->entryPoint()) return;
// Trace cellee
trace(nodep->funcp());
}
void visit(AstExecGraph* nodep) override {
// Treat the ExecGraph like a call to each mtask body
VL_RESTORER(m_execMTaskp);
if (m_inEvalNba) {
UASSERT_OBJ(!m_mtasksGraphp, nodep, "Cannot handle more than one AstExecGraph");
m_mtasksGraphp = nodep->depGraphp();
}
UASSERT_OBJ(!m_execGraphp, nodep, "Nested AstExecGraph");
VL_RESTORER(m_execGraphp);
m_execGraphp = nodep;
// Set up the path checker for this graph
UASSERT_OBJ(!nodep->user1p(), nodep, "AstExecGraph visited twice");
m_checkers.emplace_back(new GraphPathChecker{nodep->depGraphp()});
nodep->user1p(m_checkers.back().get());
// Trace each mtask body. Note: the vertices are in topological order,
// and we do not reset m_sequence, so a lower sequence number does
// guarantee a node is not earlier than a higher sequence number, but
// might still be concurrent.
for (V3GraphVertex& mtaskVtx : nodep->depGraphp()->vertices()) {
const ExecMTask* const mtaskp = mtaskVtx.as<ExecMTask>();
VL_RESTORER(m_execMTaskp);
m_execMTaskp = mtaskp;
m_sequence = 0;
iterate(mtaskp->bodyp());
iterateConst(mtaskp->bodyp());
}
}
void visit(AstCFunc* nodep) override {
if (!m_tracingCall && !nodep->entryPoint()) return;
VL_RESTORER(m_inEvalNba);
if (nodep == m_evalNbap) m_inEvalNba = true;
m_tracingCall = false;
iterateChildren(nodep);
// Start a trace from each entry point
if (nodep->entryPoint()) trace(nodep);
}
//-----
void visit(AstVar*) override {} // Don't want varrefs under it
void visit(AstNode* nodep) override { iterateChildren(nodep); }
void visit(AstNode* nodep) override { iterateChildrenConst(nodep); }
public:
// CONSTRUCTORS
explicit LifePostDlyVisitor(AstNetlist* netlistp)
: m_evalNbap{netlistp->evalNbap()} {
iterate(netlistp);
iterateConst(netlistp);
}
~LifePostDlyVisitor() override {
V3Stats::addStat("Optimizations, Lifetime postassign deletions", m_statAssnDel);
@ -361,7 +316,6 @@ public:
void V3LifePost::lifepostAll(AstNetlist* nodep) {
UINFO(2, __FUNCTION__ << ":");
// Mark redundant AssignPost
{ LifePostDlyVisitor{nodep}; } // Destruct before checking
V3Global::dumpCheckGlobalTree("life_post", 0, dumpTreeEitherLevel() >= 3);
}

View File

@ -169,6 +169,9 @@ private:
}
void visit(AstAssignPost* nodep) override {
if (writesToVirtIface(nodep)) {
// Not sure if needed, but be paranoid to match previous behavior as didn't optimize
// before ..
nodep->foreach([](AstVarRef* refp) { refp->varScopep()->optimizeLifePost(false); });
// Convert to always, as we have to assign the trigger var
FileLine* const flp = nodep->fileline();
AstAlwaysPost* const postp = new AstAlwaysPost{flp};