Internals: preparation for fixing #3470 (#3475)

* Internals: Let LeafInfo class. No functional change is intended.

* Internals: Rename LeafInfo::width -> LeafInfo::varWidth(). No functional change is intende.
This commit is contained in:
Yutetsu TAKATSUKASA 2022-06-27 22:41:33 +09:00 committed by GitHub
parent fc4d6a62af
commit ced39d0982
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
1 changed files with 61 additions and 45 deletions

View File

@ -79,14 +79,32 @@ class ConstBitOpTreeVisitor final : public VNVisitor {
// bool indicating if the term is clean (0/1 value, or if the top bits might be dirty)
using ResultTerm = std::tuple<AstNode*, unsigned, bool>;
struct LeafInfo final { // Leaf node (either AstConst or AstVarRef)
class LeafInfo final { // Leaf node (either AstConst or AstVarRef)
bool m_polarity = true;
int m_lsb = 0;
int m_wordIdx = -1; // -1 means AstWordSel is not used.
AstVarRef* m_refp = nullptr;
const AstConst* m_constp = nullptr;
int width() const {
public:
void setLeaf(AstVarRef* refp) {
UASSERT(!m_refp && !m_constp, "Must be called just once");
m_refp = refp;
}
void setLeaf(const AstConst* constp) {
UASSERT(!m_refp && !m_constp, "Must be called just once");
m_constp = constp;
}
AstVarRef* refp() const { return m_refp; }
const AstConst* constp() const { return m_constp; }
int wordIdx() const { return m_wordIdx; }
bool polarity() const { return m_polarity; }
int lsb() const { return m_lsb; }
void wordIdx(int i) { m_wordIdx = i; }
void lsb(int l) { m_lsb = l; }
void polarity(bool p) { m_polarity = p; }
int varWidth() const {
UASSERT(m_refp, "m_refp should be set");
const int width = m_refp->varp()->widthMin();
if (!m_refp->isWide()) {
@ -339,25 +357,25 @@ class ConstBitOpTreeVisitor final : public VNVisitor {
UINFO(9, "Increment to " << m_ops << " " << nodep << " called from line " << line << "\n");
}
VarInfo& getVarInfo(const LeafInfo& ref) {
UASSERT_OBJ(ref.m_refp, m_rootp, "null varref in And/Or/Xor optimization");
AstNode* nodep = ref.m_refp->varScopep();
if (!nodep) nodep = ref.m_refp->varp(); // Not scoped
UASSERT_OBJ(ref.refp(), m_rootp, "null varref in And/Or/Xor optimization");
AstNode* nodep = ref.refp()->varScopep();
if (!nodep) nodep = ref.refp()->varp(); // Not scoped
int baseIdx = nodep->user4();
if (baseIdx == 0) { // Not set yet
baseIdx = m_varInfos.size();
const int numWords
= ref.m_refp->dtypep()->isWide() ? ref.m_refp->dtypep()->widthWords() : 1;
= ref.refp()->dtypep()->isWide() ? ref.refp()->dtypep()->widthWords() : 1;
m_varInfos.resize(m_varInfos.size() + numWords);
nodep->user4(baseIdx);
}
const size_t idx = baseIdx + std::max(0, ref.m_wordIdx);
const size_t idx = baseIdx + std::max(0, ref.wordIdx());
VarInfo* varInfop = m_varInfos[idx].get();
if (!varInfop) {
varInfop = new VarInfo{this, ref.m_refp, ref.width()};
varInfop = new VarInfo{this, ref.refp(), ref.varWidth()};
m_varInfos[idx].reset(varInfop);
} else {
if (!varInfop->sameVarAs(ref.m_refp))
CONST_BITOP_SET_FAILED("different var (scope?)", ref.m_refp);
if (!varInfop->sameVarAs(ref.refp()))
CONST_BITOP_SET_FAILED("different var (scope?)", ref.refp());
}
return *varInfop;
}
@ -373,9 +391,9 @@ class ConstBitOpTreeVisitor final : public VNVisitor {
bool ok = !m_failed;
if (expectConst) {
ok &= !info.m_refp && info.m_constp;
ok &= !info.refp() && info.constp();
} else {
ok &= info.m_refp && !info.m_constp;
ok &= info.refp() && !info.constp();
}
return ok ? info : LeafInfo{};
}
@ -411,22 +429,20 @@ class ConstBitOpTreeVisitor final : public VNVisitor {
CONST_BITOP_RETURN_IF(!m_leafp, nodep);
AstConst* const constp = VN_CAST(nodep->bitp(), Const);
CONST_BITOP_RETURN_IF(!constp, nodep->rhsp());
UASSERT_OBJ(m_leafp->m_wordIdx == -1, nodep, "Unexpected nested WordSel");
m_leafp->m_wordIdx = constp->toSInt();
UASSERT_OBJ(m_leafp->wordIdx() == -1, nodep, "Unexpected nested WordSel");
m_leafp->wordIdx(constp->toSInt());
iterate(nodep->fromp());
}
virtual void visit(AstVarRef* nodep) override {
CONST_BITOP_RETURN_IF(!m_leafp, nodep);
UASSERT_OBJ(!m_leafp->m_refp, nodep, m_leafp->m_refp << " is already set");
m_leafp->m_refp = nodep;
m_leafp->m_polarity = m_polarity;
m_leafp->m_lsb = m_lsb;
m_leafp->setLeaf(nodep);
m_leafp->polarity(m_polarity);
m_leafp->lsb(m_lsb);
}
virtual void visit(AstConst* nodep) override {
CONST_BITOP_RETURN_IF(!m_leafp, nodep);
UASSERT_OBJ(!m_leafp->m_constp, nodep, m_leafp->m_constp << " is already set");
m_leafp->m_constp = nodep;
m_leafp->m_lsb = m_lsb;
m_leafp->setLeaf(nodep);
m_leafp->lsb(m_lsb);
}
virtual void visit(AstRedXor* nodep) override {
@ -438,36 +454,36 @@ class ConstBitOpTreeVisitor final : public VNVisitor {
CONST_BITOP_RETURN_IF(!andp, lhsp);
const LeafInfo& mask = findLeaf(andp->lhsp(), true);
CONST_BITOP_RETURN_IF(!mask.m_constp || mask.m_lsb != 0, andp->lhsp());
CONST_BITOP_RETURN_IF(!mask.constp() || mask.lsb() != 0, andp->lhsp());
const LeafInfo& ref = findLeaf(andp->rhsp(), false);
CONST_BITOP_RETURN_IF(!ref.m_refp, andp->rhsp());
CONST_BITOP_RETURN_IF(!ref.refp(), andp->rhsp());
restorer.disableRestore(); // Now all subtree succeeded
const V3Number& maskNum = mask.m_constp->num();
const V3Number& maskNum = mask.constp()->num();
incrOps(nodep, __LINE__);
incrOps(andp, __LINE__);
// Mark all bits checked in this reduction
const int maxBitIdx = std::min(ref.m_lsb + maskNum.width(), ref.width());
for (int bitIdx = ref.m_lsb; bitIdx < maxBitIdx; ++bitIdx) {
const int maskIdx = bitIdx - ref.m_lsb;
const int maxBitIdx = std::min(ref.lsb() + maskNum.width(), ref.varWidth());
for (int bitIdx = ref.lsb(); bitIdx < maxBitIdx; ++bitIdx) {
const int maskIdx = bitIdx - ref.lsb();
if (maskNum.bitIs0(maskIdx)) continue;
// Set true, m_polarity takes care of the entire parity
m_bitPolarities.emplace_back(ref, true, bitIdx);
}
} else { // '^leaf'
const LeafInfo& ref = findLeaf(lhsp, false);
CONST_BITOP_RETURN_IF(!ref.m_refp, lhsp);
CONST_BITOP_RETURN_IF(!ref.refp(), lhsp);
restorer.disableRestore(); // Now all checks passed
incrOps(nodep, __LINE__);
// Mark all bits checked by this comparison
for (int bitIdx = ref.m_lsb; bitIdx < ref.width(); ++bitIdx) {
for (int bitIdx = ref.lsb(); bitIdx < ref.varWidth(); ++bitIdx) {
m_bitPolarities.emplace_back(ref, true, bitIdx);
}
}
@ -492,7 +508,7 @@ class ConstBitOpTreeVisitor final : public VNVisitor {
AstNode* opp = right ? nodep->rhsp() : nodep->lhsp();
const bool origFailed = m_failed;
iterate(opp);
if (leafInfo.m_constp || m_failed) {
if (leafInfo.constp() || m_failed) {
// Revert changes in leaf
restorer.restoreNow();
// Reach past a cast then add to frozen nodes to be added to final reduction
@ -502,14 +518,14 @@ class ConstBitOpTreeVisitor final : public VNVisitor {
continue;
}
restorer.disableRestore(); // Now all checks passed
if (leafInfo.m_refp) {
if (leafInfo.refp()) {
// The conditional on the lsb being in range is necessary for some degenerate
// case, e.g.: (IData)((QData)wide[0] >> 32), or <1-bit-var> >> 1, which is
// just zero
if (leafInfo.m_lsb < leafInfo.width()) {
m_bitPolarities.emplace_back(leafInfo, isXorTree() || leafInfo.m_polarity,
leafInfo.m_lsb);
} else if (isAndTree() && leafInfo.m_polarity) {
if (leafInfo.lsb() < leafInfo.varWidth()) {
m_bitPolarities.emplace_back(leafInfo, isXorTree() || leafInfo.polarity(),
leafInfo.lsb());
} else if (isAndTree() && leafInfo.polarity()) {
// If there is a constant 0 term in an And tree, we must include it. Fudge
// this by adding a bit with both polarities, which will simplify to zero
m_bitPolarities.emplace_back(leafInfo, true, 0);
@ -530,38 +546,38 @@ class ConstBitOpTreeVisitor final : public VNVisitor {
if (const AstAnd* const andp = VN_CAST(nodep->rhsp(), And)) { // comp == (mask & v)
const LeafInfo& mask = findLeaf(andp->lhsp(), true);
CONST_BITOP_RETURN_IF(!mask.m_constp || mask.m_lsb != 0, andp->lhsp());
CONST_BITOP_RETURN_IF(!mask.constp() || mask.lsb() != 0, andp->lhsp());
const LeafInfo& ref = findLeaf(andp->rhsp(), false);
CONST_BITOP_RETURN_IF(!ref.m_refp, andp->rhsp());
CONST_BITOP_RETURN_IF(!ref.refp(), andp->rhsp());
restorer.disableRestore(); // Now all checks passed
const V3Number& maskNum = mask.m_constp->num();
const V3Number& maskNum = mask.constp()->num();
incrOps(nodep, __LINE__);
incrOps(andp, __LINE__);
// Mark all bits checked by this comparison
const int maxBitIdx = std::min(ref.m_lsb + maskNum.width(), ref.width());
for (int bitIdx = ref.m_lsb; bitIdx < maxBitIdx; ++bitIdx) {
const int maskIdx = bitIdx - ref.m_lsb;
const int maxBitIdx = std::min(ref.lsb() + maskNum.width(), ref.varWidth());
for (int bitIdx = ref.lsb(); bitIdx < maxBitIdx; ++bitIdx) {
const int maskIdx = bitIdx - ref.lsb();
if (maskNum.bitIs0(maskIdx)) continue;
const bool polarity = compNum.bitIs1(maskIdx) != maskFlip;
m_bitPolarities.emplace_back(ref, polarity, bitIdx);
}
} else { // comp == v
const LeafInfo& ref = findLeaf(nodep->rhsp(), false);
CONST_BITOP_RETURN_IF(!ref.m_refp, nodep->rhsp());
CONST_BITOP_RETURN_IF(!ref.refp(), nodep->rhsp());
restorer.disableRestore(); // Now all checks passed
incrOps(nodep, __LINE__);
// Mark all bits checked by this comparison
const int maxBitIdx = std::min(ref.m_lsb + compNum.width(), ref.width());
for (int bitIdx = ref.m_lsb; bitIdx < maxBitIdx; ++bitIdx) {
const int maskIdx = bitIdx - ref.m_lsb;
const int maxBitIdx = std::min(ref.lsb() + compNum.width(), ref.varWidth());
for (int bitIdx = ref.lsb(); bitIdx < maxBitIdx; ++bitIdx) {
const int maskIdx = bitIdx - ref.lsb();
const bool polarity = compNum.bitIs1(maskIdx) != maskFlip;
m_bitPolarities.emplace_back(ref, polarity, bitIdx);
}