Beispiel #1
0
    virtual void visit(AstNodeModule* nodep, AstNUser*) {
	UINFO(4," MOD   "<<nodep<<endl);
	m_modp = nodep;
	m_modNFuncs = 0;
	m_hashed.clear();
	// Compute hash of all statement trees in the function
	m_state = STATE_HASH;
	nodep->iterateChildren(*this);
	m_state = STATE_IDLE;
	if (debug()>=9) {
	    m_hashed.dumpFilePrefixed("combine");
	}
	// Walk the hashes removing empty functions
	if (emptyFunctionDeletion()) {
	    walkEmptyFuncs();
	}
	// Walk the hashes looking for duplicate functions
	if (duplicateFunctionCombine()) {
	    walkDupFuncs();
	}
	// Walk the statements looking for large replicated code sections
	if (statementCombine()) {
	    m_state = STATE_DUP;
	    nodep->iterateChildren(*this);
	    m_state = STATE_IDLE;
	}
	m_modp = NULL;
    }
Beispiel #2
0
    void walkEmptyFuncs() {
	for (V3Hashed::iterator it = m_hashed.begin(); it != m_hashed.end(); ++it) {
	    AstNode* node1p = it->second;
	    AstCFunc* oldfuncp = node1p->castCFunc();
	    if (oldfuncp
		&& oldfuncp->emptyBody()
		&& !oldfuncp->dontCombine()) {
		UINFO(5,"     EmptyFunc "<<hex<<V3Hash(oldfuncp->user4p())<<" "<<oldfuncp<<endl);
		// Mark user3p on entire old tree, so we don't process it more
		CombMarkVisitor visitor(oldfuncp);
		m_call.replaceFunc(oldfuncp, NULL);
		oldfuncp->unlinkFrBack();
		pushDeletep(oldfuncp); VL_DANGLING(oldfuncp);
	    }
	}
    }
Beispiel #3
0
    void walkDupFuncs() {
	for (V3Hashed::iterator it = m_hashed.begin(); it != m_hashed.end(); ++it) {
	    V3Hash hashval = it->first;
	    AstNode* node1p = it->second;
	    if (!node1p->castCFunc()) continue;
	    if (hashval.isIllegal()) node1p->v3fatalSrc("Illegal (unhashed) nodes\n");
	    for (V3Hashed::iterator eqit = it; eqit != m_hashed.end(); ++eqit) {
		AstNode* node2p = eqit->second;
		if (!(eqit->first == hashval)) break;
		if (node1p==node2p) continue;  // Identical iterator
		if (node1p->user3p() || node2p->user3p()) continue;   // Already merged
		if (node1p->sameTree(node2p)) { // walk of tree has same comparison
		    // Replace AstCCall's that point here
		    replaceFuncWFunc(node2p->castCFunc(), node1p->castCFunc());
		    // Replacement may promote a slow routine to fast path
		    if (!node2p->castCFunc()->slow()) node1p->castCFunc()->slow(false);
		}
	    }
	}
    }
Beispiel #4
0
    AstNodeAssign* hashAndFindDupe(AstNodeAssign* assignp, AstNode* extra1p, AstNode* extra2p) {
	AstNode *rhsp = assignp->rhsp();
	rhsp->user2p(assignp);
	rhsp->user3p(extra1p);
	rhsp->user5p(extra2p);

	hash(extra1p);
	hash(extra2p);

	V3Hashed::iterator inserted = m_hashed.hashAndInsert(rhsp);
	V3Hashed::iterator dupit = m_hashed.findDuplicate(rhsp, this);
	// Even though rhsp was just inserted, V3Hashed::findDuplicate doesn't
	// return anything in the hash that has the same pointer (V3Hashed.cpp::findDuplicate)
	// So dupit is either a different, duplicate rhsp, or the end of the hash.
	if (dupit != m_hashed.end()) {
	    m_hashed.erase(inserted);
	    return m_hashed.iteratorNodep(dupit)->user2p()->castNode()->castNodeAssign();
	}
	return NULL;
    }
Beispiel #5
0
    int walkDupCodeNext(AstNode* node1p, AstNode* node2p, int level) {
	// Find number of common statements between the two node1p_nextp's...
	if (node1p->user1p() || node2p->user1p()) return 0;   // Already iterated
	if (node1p->user3p() || node2p->user3p()) return 0;   // Already merged
	if (!m_hashed.sameNodes(node1p,node2p)) return 0; // walk of tree has same comparison
	V3Hash hashval(node1p->user4p());
	//UINFO(9,"        wdup1 "<<level<<" "<<V3Hash(node1p->user4p())<<" "<<node1p<<endl);
	//UINFO(9,"        wdup2 "<<level<<" "<<V3Hash(node2p->user4p())<<" "<<node2p<<endl);
	m_walkLast1p = node1p;
	m_walkLast2p = node2p;
	node1p->user1(true);
	node2p->user1(true);
	if (node1p->nextp() && node2p->nextp()) {
	    return hashval.depth()+walkDupCodeNext(node1p->nextp(), node2p->nextp(), level+1);
	}
	return hashval.depth();
    }
Beispiel #6
0
    void walkDupCodeStart(AstNode* node1p) {
	V3Hash hashval (node1p->user4p());
	//UINFO(4,"    STMT "<<hashval<<" "<<node1p<<endl);
	//
	int	 bestDepth = 0;		// Best substitution found in the search
	AstNode* bestNode2p = NULL;
	AstNode* bestLast1p = NULL;
	AstNode* bestLast2p = NULL;
	//
	pair <V3Hashed::iterator,V3Hashed::iterator> eqrange = m_hashed.mmap().equal_range(hashval);
	for (V3Hashed::iterator eqit = eqrange.first; eqit != eqrange.second; ++eqit) {
	    AstNode* node2p = eqit->second;
	    if (node1p==node2p) continue;
	    //
	    // We need to mark iteration to prevent matching code inside code (abab matching in ababab)
	    AstNode::user1ClearTree();	// user1p() used on entire tree
	    m_walkLast1p = NULL;
	    m_walkLast2p = NULL;
	    int depth = walkDupCodeNext(node1p, node2p, 1);
	    if (depth>COMBINE_MIN_STATEMENTS
		&& depth>bestDepth) {
		bestDepth = depth;
		bestNode2p = node2p;
		bestLast1p = m_walkLast1p;
		bestLast2p = m_walkLast2p;
	    }
	}
	if (bestDepth) {
	    // Found a replacement
	    UINFO(5,"     Duplicate of depth "<<bestDepth<<endl);
	    UINFO(5,"       DupFunc "<<" "<<node1p<<endl);
	    UINFO(5,"           and "<<" "<<bestNode2p<<endl);
	    UINFO(5,"       Through "<<" "<<bestLast1p<<endl);
	    UINFO(5,"           and "<<" "<<bestLast2p<<endl);
	    //
	    walkReplace(node1p, bestNode2p, bestLast1p, bestLast2p);
	}
    }
    void detectDuplicates() {
	UINFO(9,"Finding duplicates\n");
	// Note uses user4
	V3Hashed  hashed;	// Duplicate code detection
	// Hash all of the original signals we toggle cover
	for (ToggleList::iterator it = m_toggleps.begin(); it != m_toggleps.end(); ++it) {
	    AstCoverToggle* nodep = *it;
	    hashed.hashAndInsert(nodep->origp());
	}
	// Find if there are any duplicates
	for (ToggleList::iterator it = m_toggleps.begin(); it != m_toggleps.end(); ++it) {
	    AstCoverToggle* nodep = *it;
	    if (nodep->backp()) {   // nodep->backp() is null if we already detected it's a duplicate and unlinked it
		// Want to choose a base node, and keep finding duplicates that are identical
		// This prevents making chains where a->b, then c->d, then b->c, as we'll find a->b, a->c, a->d directly.
		while (1) {
		    V3Hashed::iterator dupit = hashed.findDuplicate(nodep->origp());
		    if (dupit == hashed.end()) break;
		    //
		    AstNode* duporigp = hashed.iteratorNodep(dupit);
		    // Note hashed will point to the original variable (what's duplicated), not the covertoggle,
		    // but we need to get back to the covertoggle which is immediately above, so:
		    AstCoverToggle* removep = duporigp->backp()->castCoverToggle();
		    if (!removep) nodep->v3fatalSrc("CoverageJoin duplicate of wrong type");
		    UINFO(8,"  Orig "<<nodep<<" -->> "<<nodep->incp()->declp()<<endl);
		    UINFO(8,"   dup "<<removep<<" -->> "<<removep->incp()->declp()<<endl);
		    // The CoverDecl the duplicate pointed to now needs to point to the original's data
		    // IE the duplicate will get the coverage number from the non-duplicate
		    AstCoverDecl* datadeclp = nodep->incp()->declp()->dataDeclThisp();
		    removep->incp()->declp()->dataDeclp (datadeclp);
		    UINFO(8,"   new "<<removep->incp()->declp()<<endl);
		    // Mark the found node as a duplicate of the first node
		    // (Not vice-versa as we have the iterator for the found node)
		    removep->unlinkFrBack();  pushDeletep(removep); removep=NULL;
		    // Remove node from comparison so don't hit it again
		    hashed.erase(dupit);
		    ++m_statToggleJoins;
		}
	    }
	}
    }
Beispiel #8
0
    // METHODS
    void hashStatement(AstNode* nodep) {
	// Compute hash on entire tree of this statement
	m_hashed.hashAndInsert(nodep);
	//UINFO(9,"  stmthash "<<hex<<nodep->user4()<<"  "<<nodep<<endl);
    }
Beispiel #9
0
    bool sameHash(AstNode* node1p, AstNode* node2p) {
	return (node1p && node2p
		&& !node1p->sameHash().isIllegal()
		&& !node2p->sameHash().isIllegal()
		&& m_hashed.sameNodes(node1p,node2p));
    }
Beispiel #10
0
    void hash(AstNode* nodep) {
	// !NULL && the object is hashable
	if (nodep && !nodep->sameHash().isIllegal()) {
	    m_hashed.hash(nodep);
	}
    }