Beispiel #1
0
    void walkDupFuncs() {
	for (V3Hashed::iterator it = m_hashed.begin(); it != m_hashed.end(); ++it) {
	    V3Hash hashval = it->first;
	    AstNode* node1p = it->second;
	    if (!node1p->castCFunc()) continue;
	    if (hashval.isIllegal()) node1p->v3fatalSrc("Illegal (unhashed) nodes\n");
	    for (V3Hashed::iterator eqit = it; eqit != m_hashed.end(); ++eqit) {
		AstNode* node2p = eqit->second;
		if (!(eqit->first == hashval)) break;
		if (node1p==node2p) continue;  // Identical iterator
		if (node1p->user3p() || node2p->user3p()) continue;   // Already merged
		if (node1p->sameTree(node2p)) { // walk of tree has same comparison
		    // Replace AstCCall's that point here
		    replaceFuncWFunc(node2p->castCFunc(), node1p->castCFunc());
		    // Replacement may promote a slow routine to fast path
		    if (!node2p->castCFunc()->slow()) node1p->castCFunc()->slow(false);
		}
	    }
	}
    }
Beispiel #2
0
    void walkEmptyFuncs() {
	for (V3Hashed::iterator it = m_hashed.begin(); it != m_hashed.end(); ++it) {
	    AstNode* node1p = it->second;
	    AstCFunc* oldfuncp = node1p->castCFunc();
	    if (oldfuncp
		&& oldfuncp->emptyBody()
		&& !oldfuncp->dontCombine()) {
		UINFO(5,"     EmptyFunc "<<hex<<V3Hash(oldfuncp->user4p())<<" "<<oldfuncp<<endl);
		// Mark user3p on entire old tree, so we don't process it more
		CombMarkVisitor visitor(oldfuncp);
		m_call.replaceFunc(oldfuncp, NULL);
		oldfuncp->unlinkFrBack();
		pushDeletep(oldfuncp); VL_DANGLING(oldfuncp);
	    }
	}
    }
Beispiel #3
0
    AstNodeAssign* hashAndFindDupe(AstNodeAssign* assignp, AstNode* extra1p, AstNode* extra2p) {
	AstNode *rhsp = assignp->rhsp();
	rhsp->user2p(assignp);
	rhsp->user3p(extra1p);
	rhsp->user5p(extra2p);

	hash(extra1p);
	hash(extra2p);

	V3Hashed::iterator inserted = m_hashed.hashAndInsert(rhsp);
	V3Hashed::iterator dupit = m_hashed.findDuplicate(rhsp, this);
	// Even though rhsp was just inserted, V3Hashed::findDuplicate doesn't
	// return anything in the hash that has the same pointer (V3Hashed.cpp::findDuplicate)
	// So dupit is either a different, duplicate rhsp, or the end of the hash.
	if (dupit != m_hashed.end()) {
	    m_hashed.erase(inserted);
	    return m_hashed.iteratorNodep(dupit)->user2p()->castNode()->castNodeAssign();
	}
	return NULL;
    }
    void detectDuplicates() {
	UINFO(9,"Finding duplicates\n");
	// Note uses user4
	V3Hashed  hashed;	// Duplicate code detection
	// Hash all of the original signals we toggle cover
	for (ToggleList::iterator it = m_toggleps.begin(); it != m_toggleps.end(); ++it) {
	    AstCoverToggle* nodep = *it;
	    hashed.hashAndInsert(nodep->origp());
	}
	// Find if there are any duplicates
	for (ToggleList::iterator it = m_toggleps.begin(); it != m_toggleps.end(); ++it) {
	    AstCoverToggle* nodep = *it;
	    if (nodep->backp()) {   // nodep->backp() is null if we already detected it's a duplicate and unlinked it
		// Want to choose a base node, and keep finding duplicates that are identical
		// This prevents making chains where a->b, then c->d, then b->c, as we'll find a->b, a->c, a->d directly.
		while (1) {
		    V3Hashed::iterator dupit = hashed.findDuplicate(nodep->origp());
		    if (dupit == hashed.end()) break;
		    //
		    AstNode* duporigp = hashed.iteratorNodep(dupit);
		    // Note hashed will point to the original variable (what's duplicated), not the covertoggle,
		    // but we need to get back to the covertoggle which is immediately above, so:
		    AstCoverToggle* removep = duporigp->backp()->castCoverToggle();
		    if (!removep) nodep->v3fatalSrc("CoverageJoin duplicate of wrong type");
		    UINFO(8,"  Orig "<<nodep<<" -->> "<<nodep->incp()->declp()<<endl);
		    UINFO(8,"   dup "<<removep<<" -->> "<<removep->incp()->declp()<<endl);
		    // The CoverDecl the duplicate pointed to now needs to point to the original's data
		    // IE the duplicate will get the coverage number from the non-duplicate
		    AstCoverDecl* datadeclp = nodep->incp()->declp()->dataDeclThisp();
		    removep->incp()->declp()->dataDeclp (datadeclp);
		    UINFO(8,"   new "<<removep->incp()->declp()<<endl);
		    // Mark the found node as a duplicate of the first node
		    // (Not vice-versa as we have the iterator for the found node)
		    removep->unlinkFrBack();  pushDeletep(removep); removep=NULL;
		    // Remove node from comparison so don't hit it again
		    hashed.erase(dupit);
		    ++m_statToggleJoins;
		}
	    }
	}
    }