void TR::ValidateNodeRefCountWithinBlock::validate(TR::TreeTop *firstTreeTop, TR::TreeTop *exitTreeTop) { _nodeChecklist.empty(); for (TR::TreeTop *tt = firstTreeTop; tt != exitTreeTop->getNextTreeTop(); tt = tt->getNextTreeTop()) { TR::Node *node = tt->getNode(); node->setLocalIndex(node->getReferenceCount()); validateRefCountPass1(node); } /** * We start again from the start of the block, and check the localIndex to * make sure it is 0. * * NOTE: Walking the tree backwards causes huge stack usage in validateRefCountPass2. */ _nodeChecklist.empty(); for (TR::TreeTop *tt = firstTreeTop; tt != exitTreeTop->getNextTreeTop(); tt = tt->getNextTreeTop()) { validateRefCountPass2(tt->getNode()); } }
void TR_ExpressionsSimplification::transformNode(TR::Node *srcNode, TR::Block *dstBlock) { TR::TreeTop *lastTree = dstBlock->getLastRealTreeTop(); TR::TreeTop *prevTree = lastTree->getPrevTreeTop(); TR::TreeTop *srcNodeTT = TR::TreeTop::create(comp(), srcNode); if (trace()) comp()->getDebug()->print(comp()->getOutFile(),srcNode,0,true); if (lastTree->getNode()->getOpCode().isBranch() || (lastTree->getNode()->getOpCode().isJumpWithMultipleTargets() && lastTree->getNode()->getOpCode().hasBranchChildren())) { srcNodeTT->join(lastTree); prevTree->join(srcNodeTT); } /* else if (dstBlock->getEntry()->getNode()->getOpCodeValue() == TR::BBStart) { srcNodeTT->join(dstBlock->getExit()); dstBlock->getEntry()->join(srcNodeTT); } */ else { srcNodeTT->join(dstBlock->getExit()); lastTree->join(srcNodeTT); } return; }
void TR::ILValidator::checkSoundness(TR::TreeTop *start, TR::TreeTop *stop) { soundnessRule(start, start != NULL, "Start tree must exist"); soundnessRule(stop, !stop || stop->getNode() != NULL, "Stop tree must have a node"); TR::NodeChecklist treetopNodes(comp()), ancestorNodes(comp()), visitedNodes(comp()); // Can't use iterators here, because those presuppose the IL is sound. Walk trees the old-fashioned way. // for (TR::TreeTop *currentTree = start; currentTree != stop; currentTree = currentTree->getNextTreeTop()) { soundnessRule(currentTree, currentTree->getNode() != NULL, "Tree must have a node"); soundnessRule(currentTree, !treetopNodes.contains(currentTree->getNode()), "Treetop node n%dn encountered twice", currentTree->getNode()->getGlobalIndex()); treetopNodes.add(currentTree->getNode()); TR::TreeTop *next = currentTree->getNextTreeTop(); if (next) { soundnessRule(currentTree, next->getNode() != NULL, "Tree after n%dn must have a node", currentTree->getNode()->getGlobalIndex()); soundnessRule(currentTree, next->getPrevTreeTop() == currentTree, "Doubly-linked treetop list must be consistent: n%dn->n%dn<-n%dn", currentTree->getNode()->getGlobalIndex(), next->getNode()->getGlobalIndex(), next->getPrevTreeTop()->getNode()->getGlobalIndex()); } else { soundnessRule(currentTree, stop == NULL, "Reached the end of the trees after n%dn without encountering the stop tree n%dn", currentTree->getNode()->getGlobalIndex(), stop? stop->getNode()->getGlobalIndex() : 0); checkNodeSoundness(currentTree, currentTree->getNode(), ancestorNodes, visitedNodes); } } }
// Add an async check into a block - MUST be at block entry // void TR_AsyncCheckInsertion::insertAsyncCheck(TR::Block *block, TR::Compilation *comp, const char *counterPrefix) { TR::TreeTop *lastTree = block->getLastRealTreeTop(); TR::TreeTop *asyncTree = TR::TreeTop::create(comp, TR::Node::createWithSymRef(lastTree->getNode(), TR::asynccheck, 0, comp->getSymRefTab()->findOrCreateAsyncCheckSymbolRef(comp->getMethodSymbol()))); if (lastTree->getNode()->getOpCode().isReturn()) { TR::TreeTop *prevTree = lastTree->getPrevTreeTop(); prevTree->join(asyncTree); asyncTree->join(lastTree); } else { TR::TreeTop *nextTree = block->getEntry()->getNextTreeTop(); block->getEntry()->join(asyncTree); asyncTree->join(nextTree); } const char * const name = TR::DebugCounter::debugCounterName(comp, "asynccheck.insert/%s/(%s)/%s/block_%d", counterPrefix, comp->signature(), comp->getHotnessName(), block->getNumber()); TR::DebugCounter::prependDebugCounter(comp, name, asyncTree->getNextTreeTop()); }
int32_t OMR::Simplifier::perform() { vcount_t visitCount = comp()->incOrResetVisitCount(); TR::TreeTop * tt; for (tt = comp()->getStartTree(); tt; tt = tt->getNextTreeTop()) tt->getNode()->initializeFutureUseCounts(visitCount); comp()->incVisitCount(); for (tt = comp()->getStartTree(); tt; tt = tt->getNextTreeTop()) cleanupFlags(tt->getNode()); visitCount = comp()->incVisitCount(); tt = comp()->getStartTree(); while (tt) tt = simplifyExtendedBlock(tt); comp()->getFlowGraph()->removeUnreachableBlocks(); if (manager()->numPassesCompleted() == 0) manager()->incNumPassesCompleted(); return 1; }
/** * A runtime guard block may have monitor stores and privarg stores along with the guard * it self. This method will rearrange these stores and split the block, managing any * uncommoning necessary for eventual block order. * * The provided block will become the privarg block, containing any privarg stores and additonal * temps for uncommoning. It must be evaluated first. The returned block will contain monitor * stores and the guard. If no split is required, the provided block will be returned. * * @param comp Compilation object * @param block Block to manipulate * @param cfg Current CFG * @return The block containing the guard. */ static TR::Block* splitRuntimeGuardBlock(TR::Compilation *comp, TR::Block* block, TR::CFG *cfg) { TR::NodeChecklist checklist(comp); TR::TreeTop *start = block->getFirstRealTreeTop(); TR::TreeTop *guard = block->getLastRealTreeTop(); TR::TreeTop *firstPrivArg = NULL; TR::TreeTop *firstMonitor = NULL; // Manage the unexpected case that monitors and priv args are reversed bool privThenMonitor = false; TR_ASSERT(isMergeableGuard(guard->getNode()), "last node must be guard %p", guard->getNode()); // Search for privarg and monitor stores // Only commoned nodes under the guard are required to be anchored, due to the guard being // evaluted before the monitor stores later on bool anchoredTemps = false; for (TR::TreeTop *tt = start; tt && tt->getNode()->getOpCodeValue() != TR::BBEnd; tt = tt->getNextTreeTop()) { TR::Node * node = tt->getNode(); if (node->getOpCode().hasSymbolReference() && node->getSymbol()->holdsMonitoredObject()) firstMonitor = firstMonitor == NULL ? tt : firstMonitor; else if (node->chkIsPrivatizedInlinerArg()) { if (firstPrivArg == NULL) { firstPrivArg = tt; privThenMonitor = (firstMonitor == NULL); } } else if (isMergeableGuard(node)) anchoredTemps |= anchorCommonNodes(comp, node, start, checklist); else TR_ASSERT(0, "Node other than monitor or privarg store %p before runtime guard", node); } // If there are monitors then privargs, they must be swapped around, such that all privargs are // evaluated first if (firstPrivArg && firstMonitor && !privThenMonitor) { TR::TreeTop *monitorEnd = firstPrivArg->getPrevTreeTop(); firstMonitor->getPrevTreeTop()->join(firstPrivArg); guard->getPrevTreeTop()->join(firstMonitor); monitorEnd->join(guard); } // If there were temps created or privargs in the block, perform a split TR::TreeTop *split = NULL; if (firstPrivArg) split = firstMonitor ? firstMonitor : guard; else if (anchoredTemps) split = start; if (split) return block->split(split, cfg, true /* fixupCommoning */, false /* copyExceptionSuccessors */); return block; }
inline TR::TreeTop * OMR::TreeTop::getNextRealTreeTop() { TR::TreeTop *treeTop; for (treeTop = self()->getNextTreeTop(); treeTop && treeTop->getNode() && treeTop->getNode()->getOpCode().isExceptionRangeFence(); treeTop = treeTop->getNextTreeTop()) {} return treeTop; }
static TR::TreeTop *findNextLegalTreeTop(TR::Compilation *comp, TR::Block *block) { vcount_t startVisitCount = comp->getStartTree()->getNode()->getVisitCount(); TR::TreeTop * tt = NULL; for (tt = comp->getStartTree(); tt; tt = tt->getNextTreeTop()) { if (tt->getNode()->getVisitCount() < startVisitCount) break; if (tt->getNode()->getOpCodeValue() == TR::BBStart) tt = tt->getNode()->getBlock()->getExit(); } return tt; }
void TR_ReachingDefinitions::initializeGenAndKillSetInfo() { // For each block in the CFG build the gen and kill set for this analysis. // Go in treetop order, which guarantees that we see the correct (i.e. first) // evaluation point for each node. // TR::Block *block; int32_t blockNum = 0; bool seenException = false; TR_BitVector defsKilled(getNumberOfBits(), trMemory()->currentStackRegion()); comp()->incVisitCount(); for (TR::TreeTop *treeTop = comp()->getStartTree(); treeTop; treeTop = treeTop->getNextTreeTop()) { TR::Node *node = treeTop->getNode(); if (node->getOpCodeValue() == TR::BBStart) { block = node->getBlock(); blockNum = block->getNumber(); seenException = false; if (traceRD()) traceMsg(comp(), "\nNow generating gen and kill information for block_%d\n", blockNum); continue; } #if DEBUG if (node->getOpCodeValue() == TR::BBEnd && traceRD()) { traceMsg(comp(), " Block %d:\n", blockNum); traceMsg(comp(), " Gen set "); if (_regularGenSetInfo[blockNum]) _regularGenSetInfo[blockNum]->print(comp()); else traceMsg(comp(), "{}"); traceMsg(comp(), "\n Kill set "); if (_regularKillSetInfo[blockNum]) _regularKillSetInfo[blockNum]->print(comp()); else traceMsg(comp(), "{}"); traceMsg(comp(), "\n Exception Gen set "); if (_exceptionGenSetInfo[blockNum]) _exceptionGenSetInfo[blockNum]->print(comp()); else traceMsg(comp(), "{}"); traceMsg(comp(), "\n Exception Kill set "); if (_exceptionKillSetInfo[blockNum]) _exceptionKillSetInfo[blockNum]->print(comp()); else traceMsg(comp(), "{}"); continue; } #endif initializeGenAndKillSetInfoForNode(node, defsKilled, seenException, blockNum, NULL); if (!seenException && treeHasChecks(treeTop)) seenException = true; } }
int32_t TR_AsyncCheckInsertion::insertReturnAsyncChecks(TR::Optimization *opt, const char *counterPrefix) { TR::Compilation * const comp = opt->comp(); if (opt->trace()) traceMsg(comp, "Inserting return asyncchecks (%s)\n", counterPrefix); int numAsyncChecksInserted = 0; for (TR::TreeTop *treeTop = comp->getStartTree(); treeTop; /* nothing */ ) { TR::Block *block = treeTop->getNode()->getBlock(); if (block->getLastRealTreeTop()->getNode()->getOpCode().isReturn() && performTransformation(comp, "%sInserting return asynccheck (%s) in block_%d\n", opt->optDetailString(), counterPrefix, block->getNumber())) { insertAsyncCheck(block, comp, counterPrefix); numAsyncChecksInserted++; } treeTop = block->getExit()->getNextRealTreeTop(); } return numAsyncChecksInserted; }
void TR::ValidateLivenessBoundaries::validate(TR::ResolvedMethodSymbol *methodSymbol) { /** * These must be initialized at the start of every validate call, * since the same Rule object can be used multiple times to validate * the IL at different stages of the compilation. */ TR::NodeSideTable<TR::NodeState> nodeStates(comp()->trMemory()); /** * Similar to NodeChecklist, but more compact. Rather than track * node global indexes, which can be sparse, this tracks local * indexes, which are relatively dense. Furthermore, the _basis field * allows us not to waste space on nodes we saw in prior blocks. * As the name suggests, used to keep track of live Nodes. */ TR::LiveNodeWindow liveNodes(nodeStates, comp()->trMemory()); TR::TreeTop *start = methodSymbol->getFirstTreeTop(); TR::TreeTop *stop = methodSymbol->getLastTreeTop(); for (TR::PostorderNodeOccurrenceIterator iter(start, comp(), "VALIDATE_LIVENESS_BOUNDARIES"); iter != stop; ++iter) { TR::Node *node = iter.currentNode(); updateNodeState(node, nodeStates, liveNodes); if (node->getOpCodeValue() == TR::BBEnd) { /* Determine whether this is the end of an extended block */ bool isEndOfExtendedBlock = false; TR::TreeTop *nextTree = iter.currentTree()->getNextTreeTop(); if (nextTree) { TR::checkILCondition(node, nextTree->getNode()->getOpCodeValue() == TR::BBStart, comp(), "Expected BBStart after BBEnd"); isEndOfExtendedBlock = ! nextTree->getNode()->getBlock()->isExtensionOfPreviousBlock(); } else { isEndOfExtendedBlock = true; } if (isEndOfExtendedBlock) { /* Ensure there are no nodes live across the end of a block */ validateEndOfExtendedBlockBoundary(node, liveNodes); } } } }
inline TR::Block * OMR::TreeTop::getEnclosingBlock( bool forward) { TR::TreeTop * tt = self(); if (forward) while (tt->getNode()->getOpCodeValue() != TR::BBEnd) { tt = tt->getNextTreeTop(); //TR_ASSERT(tt && tt->getNode(), "either tt or node on a tt null here, we will segfault"); } else while (tt->getNode()->getOpCodeValue() != TR::BBStart) { tt = tt->getPrevTreeTop(); //TR_ASSERT(tt && tt->getNode(), "either tt or node on a tt null here, we will segfault"); } return tt->getNode()->getBlock(); }
void OMR::TreeTop::removeDeadTrees(TR::Compilation * comp, TR::TreeTop* first, TR::TreeTop* last) { for (TR::TreeTop* cur = first; cur != last; cur = cur->getNextTreeTop()) { int numChildren = cur->getNode()->getNumChildren(); for (int child = numChildren-1; child>0; --child) { TR::Node * node = cur->getNode()->getChild(child); cur->insertAfter(TR::TreeTop::create(comp, TR::Node::create(TR::treetop, 1, node))); node->decReferenceCount(); } if (numChildren != 0) { TR::Node * node = cur->getNode()->getChild(0); cur->setNode(TR::Node::create(TR::treetop, 1, node)); node->decReferenceCount(); } } }
void OMR::CodeGenPhase::performCleanUpFlagsPhase(TR::CodeGenerator * cg, TR::CodeGenPhase * phase) { TR::TreeTop * tt; vcount_t visitCount = cg->comp()->incVisitCount(); for (tt = cg->comp()->getStartTree(); tt; tt = tt->getNextTreeTop()) { cg->cleanupFlags(tt->getNode()); } }
int32_t TR::RegDepCopyRemoval::perform() { if (!cg()->supportsPassThroughCopyToNewVirtualRegister()) return 0; discardAllNodeChoices(); TR::TreeTop *tt; for (tt = comp()->getStartTree(); tt != NULL; tt = tt->getNextTreeTop()) { TR::Node *node = tt->getNode(); switch (node->getOpCodeValue()) { case TR::BBStart: if (!node->getBlock()->isExtensionOfPreviousBlock()) { if (trace()) traceMsg(comp(), "clearing remembered node choices at start of extended block at block_%d\n", node->getBlock()->getNumber()); discardAllNodeChoices(); } if (node->getNumChildren() > 0) processRegDeps(node->getFirstChild(), tt); break; case TR::BBEnd: if (node->getNumChildren() > 0) processRegDeps(node->getFirstChild(), tt); break; default: if (node->getOpCode().isSwitch()) { TR::Node *defaultDest = node->getSecondChild(); if (defaultDest->getNumChildren() > 0) processRegDeps(defaultDest->getFirstChild(), tt); } else if (node->getOpCode().isBranch()) { int nChildren = node->getNumChildren(); // only the last child may be GlRegDeps for (int i = 0; i < nChildren - 1; i++) TR_ASSERT(node->getChild(i)->getOpCodeValue() != TR::GlRegDeps, "GlRegDeps for branch is not the last child\n"); if (nChildren > 0) { TR::Node *lastChild = node->getChild(nChildren - 1); if (lastChild->getOpCodeValue() == TR::GlRegDeps) processRegDeps(lastChild, tt); } } break; } } return 1; // a bit arbitrary... }
void TR_ExpressionsSimplification::removeCandidate(TR::Node *node, TR::TreeTop* tt) { if (node->getVisitCount() == _visitCount) return; node->setVisitCount(_visitCount); if (trace()) traceMsg(comp(), "Looking at Node [%p]\n", node); ListIterator<TR::TreeTop> candidateTTs(_candidateTTs); for (TR::TreeTop *candidateTT = candidateTTs.getFirst(); candidateTT; candidateTT = candidateTTs.getNext()) { if (tt != candidateTT && node->getOpCode().hasSymbolReference() && candidateTT->getNode()->mayKill(true).contains(node->getSymbolReference(), comp())) { if (trace()) traceMsg(comp(), "Removing candidate %p which has aliases in the loop\n", candidateTT->getNode()); _candidateTTs->remove(candidateTT); continue; } } bool hasSupportedChildren = true; // Process the children as well // for (int32_t i = 0; i < node->getNumChildren(); i++) { removeCandidate(node->getChild(i), tt); // candidates child expressions must be invariant and supported. Here we determine if they are supported. if (!_supportedExpressions->get(node->getChild(i)->getGlobalIndex())) { hasSupportedChildren = false; } } if (hasSupportedChildren && isSupportedNodeForExpressionSimplification(node)) { _supportedExpressions->set(node->getGlobalIndex()); } else { if (trace()) traceMsg(comp(), " Node %p is unsupported expression because %s\n", node, !hasSupportedChildren ? "it has unsupported children" : "it is itself unsupported"); } }
void TR_ExpressionsSimplification::invalidateCandidates() { _visitCount = comp()->incVisitCount(); if (trace()) { traceMsg(comp(), "Checking which candidates may be invalidated\n"); ListIterator<TR::TreeTop> treeTops(_candidateTTs); for (TR::TreeTop *treeTop = treeTops.getFirst(); treeTop; treeTop = treeTops.getNext()) { traceMsg(comp(), " Candidate treetop: %p node: %p\n", treeTop, treeTop->getNode()); } } TR_ScratchList<TR::Block> blocksInLoop(trMemory()); _currentRegion->getBlocks(&blocksInLoop); ListIterator<TR::Block> blocks(&blocksInLoop); for (TR::Block *currentBlock = blocks.getFirst(); currentBlock; currentBlock = blocks.getNext()) { TR::TreeTop *tt = currentBlock->getEntry(); TR::TreeTop *exitTreeTop = currentBlock->getExit(); while (tt != exitTreeTop) { TR::Node *currentNode = tt->getNode(); if (trace()) traceMsg(comp(), "Looking at treeTop [%p]\n", currentNode); removeCandidate(currentNode, tt); tt = tt->getNextTreeTop(); } } removeUnsupportedCandidates(); }
/** * Search for direct loads in the taken side of a guard * * @param firstBlock The guard's branch destination * @param coldPathLoads BitVector of symbol reference numbers for any direct loads seen until the merge back to mainline */ static void collectColdPathLoads(TR::Block* firstBlock, TR_BitVector &coldPathLoads) { TR_Stack<TR::Block*> blocksToCheck(TR::comp()->trMemory(), 8, false, stackAlloc); blocksToCheck.push(firstBlock); TR::NodeChecklist checklist(TR::comp()); coldPathLoads.empty(); while (!blocksToCheck.isEmpty()) { TR::Block *block = blocksToCheck.pop(); for (TR::TreeTop *tt = block->getFirstRealTreeTop(); tt->getNode()->getOpCodeValue() != TR::BBEnd; tt = tt->getNextTreeTop()) collectDirectLoads(tt->getNode(), coldPathLoads, checklist); // Search for any successors that have not merged with the mainline for (auto itr = block->getSuccessors().begin(), end = block->getSuccessors().end(); itr != end; ++itr) { TR::Block *dest = (*itr)->getTo()->asBlock(); if (dest != TR::comp()->getFlowGraph()->getEnd() && dest->getPredecessors().size() == 1) blocksToCheck.push(dest); } } }
void TR_ExpressionsSimplification::removeUnsupportedCandidates() { ListIterator<TR::TreeTop> candidateTTs(_candidateTTs); for (TR::TreeTop *candidateTT = candidateTTs.getFirst(); candidateTT; candidateTT = candidateTTs.getNext()) { TR::Node *candidate = candidateTT->getNode(); if (!_supportedExpressions->get(candidate->getGlobalIndex())) { if (trace()) traceMsg(comp(), "Removing candidate %p which is unsupported or has unsupported subexpressions\n", candidate); _candidateTTs->remove(candidateTT); } } }
static bool blockHasCalls(TR::Block *block, TR::Compilation *comp) { intptrj_t visitCount = comp->incVisitCount(); TR::TreeTop *currentTree = block->getEntry(); TR::TreeTop *exitTree = block->getExit(); bool hasCalls = false; while (!hasCalls && currentTree != exitTree) { hasCalls = examineNode(currentTree->getNode(), visitCount); currentTree = currentTree->getNextTreeTop(); } return hasCalls; }
int32_t OMR::Simplifier::performOnBlock(TR::Block * block) { if (block->getEntry()) { TR::TreeTop *extendedExitTree = block->getEntry()->getExtendedBlockExitTreeTop(); vcount_t visitCount = comp()->incOrResetVisitCount(); for (TR::TreeTop * tt = block->getEntry(); tt; tt = tt->getNextTreeTop()) { tt->getNode()->initializeFutureUseCounts(visitCount); if (tt == extendedExitTree) break; } comp()->incVisitCount(); simplifyExtendedBlock(block->getEntry()); } return 0; }
int32_t TR_LocalLiveRangeReduction::perform() { if (TR::Compiler->target.cpu.isZ()) return false; TR::TreeTop * exitTT, * nextTT; TR::Block *b; TR::TreeTop * tt; //calculate number of TreeTops in each bb (or extended bb) for (tt = comp()->getStartTree(); tt; tt = nextTT) { TR::StackMemoryRegion stackMemoryRegion(*trMemory()); TR::Node *node = tt->getNode(); b = node->getBlock(); exitTT = b->getExit(); _numTreeTops = b->getNumberOfRealTreeTops()+2; //include both BBStart/BBend //support for extended blocks while ((nextTT = exitTT->getNextTreeTop()) && (b = nextTT->getNode()->getBlock(), b->isExtensionOfPreviousBlock())) { _numTreeTops += b->getNumberOfRealTreeTops()+2; exitTT = b->getExit(); } _treesRefInfoArray = (TR_TreeRefInfo**)trMemory()->allocateStackMemory(_numTreeTops*sizeof(TR_TreeRefInfo*)); memset(_treesRefInfoArray, 0, _numTreeTops*sizeof(TR_TreeRefInfo*)); _movedTreesList.deleteAll(); _depPairList.deleteAll(); transformExtendedBlock(tt,exitTT->getNextTreeTop()); } if (trace()) traceMsg(comp(), "\nEnding LocalLiveRangeReducer\n"); return 2; }
//---------------------------- collecting ref info at the beginning ----------------------------------------- void TR_LocalLiveRangeReduction::collectInfo(TR::TreeTop *entryTree,TR::TreeTop *exitTree) { TR::TreeTop *currentTree = entryTree; TR_TreeRefInfo *treeRefInfo; int32_t i = 0; int32_t maxRefCount = 0; vcount_t visitCount = comp()->getVisitCount(); while (!(currentTree == exitTree)) { treeRefInfo = new (trStackMemory()) TR_TreeRefInfo(currentTree, trMemory()); collectRefInfo(treeRefInfo, currentTree->getNode(),visitCount,&maxRefCount); _treesRefInfoArray[i++] = treeRefInfo; initPotentialDeps(treeRefInfo); treeRefInfo->resetSyms(); populatePotentialDeps(treeRefInfo,treeRefInfo->getTreeTop()->getNode()); currentTree = currentTree->getNextTreeTop(); } comp()->setVisitCount(visitCount+maxRefCount); }
static bool safeToMoveGuard(TR::Block *destination, TR::TreeTop *guardCandidate, TR::TreeTop *branchDest, TR_BitVector &privArgSymRefs) { static char *disablePrivArgMovement = feGetEnv("TR_DisableRuntimeGuardPrivArgMovement"); TR::TreeTop *start = destination ? destination->getExit() : TR::comp()->getStartTree(); if (guardCandidate->getNode()->isHCRGuard()) { for (TR::TreeTop *tt = start; tt && tt != guardCandidate; tt = tt->getNextTreeTop()) { if (tt->getNode()->canGCandReturn()) return false; } } else if (guardCandidate->getNode()->isOSRGuard()) { for (TR::TreeTop *tt = start; tt && tt != guardCandidate; tt = tt->getNextTreeTop()) { if (TR::comp()->isPotentialOSRPoint(tt->getNode(), NULL, true)) return false; } } else { privArgSymRefs.empty(); for (TR::TreeTop *tt = start; tt && tt != guardCandidate; tt = tt->getNextTreeTop()) { // It's safe to move the guard if there are only priv arg stores and live monitor stores // ahead of the guard if (tt->getNode()->getOpCodeValue() != TR::BBStart && tt->getNode()->getOpCodeValue() != TR::BBEnd && !tt->getNode()->chkIsPrivatizedInlinerArg() && !(tt->getNode()->getOpCode().hasSymbolReference() && tt->getNode()->getSymbol()->holdsMonitoredObject()) && !tt->getNode()->isNopableInlineGuard()) return false; if (tt->getNode()->chkIsPrivatizedInlinerArg() && (disablePrivArgMovement || // If the priv arg is not for this guard (guardCandidate->getNode()->getInlinedSiteIndex() > -1 && // if priv arg store does not have the same inlined site index as the guard's caller, that means it is not a priv arg for this guard, // then we cannot move the guard and its priv args up across other calls' priv args tt->getNode()->getInlinedSiteIndex() != TR::comp()->getInlinedCallSite(guardCandidate->getNode()->getInlinedSiteIndex())._byteCodeInfo.getCallerIndex()))) return false; if (tt->getNode()->chkIsPrivatizedInlinerArg()) privArgSymRefs.set(tt->getNode()->getSymbolReference()->getReferenceNumber()); if (tt->getNode()->isNopableInlineGuard() && tt->getNode()->getBranchDestination() != branchDest) return false; } } return true; }
bool TR_LocalLiveRangeReduction::moveTreeBefore(TR_TreeRefInfo *treeToMove,TR_TreeRefInfo *anchor,int32_t passNumber) { TR::TreeTop *treeToMoveTT = treeToMove->getTreeTop(); TR::TreeTop *anchorTT = anchor->getTreeTop(); if (treeToMoveTT->getNextRealTreeTop() == anchorTT) { addDepPair(treeToMove, anchor); return false; } if (!performTransformation(comp(), "%sPass %d: moving tree [%p] before Tree %p\n", OPT_DETAILS, passNumber, treeToMoveTT->getNode(),anchorTT->getNode())) return false; // printf("Moving [%p] before Tree %p\n", treeToMoveTT->getNode(),anchorTT->getNode()); //changing location in block TR::TreeTop *origPrevTree = treeToMoveTT->getPrevTreeTop(); TR::TreeTop *origNextTree = treeToMoveTT->getNextTreeTop(); origPrevTree->setNextTreeTop(origNextTree); origNextTree->setPrevTreeTop(origPrevTree); TR::TreeTop *prevTree = anchorTT->getPrevTreeTop(); anchorTT->setPrevTreeTop(treeToMoveTT); treeToMoveTT->setNextTreeTop(anchorTT); treeToMoveTT->setPrevTreeTop(prevTree); prevTree->setNextTreeTop(treeToMoveTT); //UPDATE REFINFO //find locations of treeTops in TreeTopsRefInfo array //startIndex points to the currentTree that has moved //endIndex points to the treeTop after which we moved the tree (nextTree) int32_t startIndex = getIndexInArray(treeToMove); int32_t endIndex = getIndexInArray(anchor)-1; int32_t i=0; for ( i = startIndex+1; i<= endIndex ; i++) { TR_TreeRefInfo *currentTreeRefInfo = _treesRefInfoArray[i]; List<TR::Node> *firstList = currentTreeRefInfo->getFirstRefNodesList(); List<TR::Node> *midList = currentTreeRefInfo->getMidRefNodesList(); List<TR::Node> *lastList = currentTreeRefInfo->getLastRefNodesList(); List<TR::Node> *M_firstList = treeToMove->getFirstRefNodesList(); List<TR::Node> *M_midList = treeToMove->getMidRefNodesList(); List<TR::Node> *M_lastList = treeToMove->getLastRefNodesList(); if (trace()) { traceMsg(comp(),"Before move:\n"); printRefInfo(treeToMove); printRefInfo(currentTreeRefInfo); } updateRefInfo(treeToMove->getTreeTop()->getNode(), currentTreeRefInfo, treeToMove , false); treeToMove->resetSyms(); currentTreeRefInfo->resetSyms(); populatePotentialDeps(currentTreeRefInfo,currentTreeRefInfo->getTreeTop()->getNode()); populatePotentialDeps(treeToMove,treeToMove->getTreeTop()->getNode()); if (trace()) { traceMsg(comp(),"After move:\n"); printRefInfo(treeToMove); printRefInfo(currentTreeRefInfo); traceMsg(comp(),"------------------------\n"); } } TR_TreeRefInfo *temp = _treesRefInfoArray[startIndex]; for (i = startIndex; i< endIndex ; i++) { _treesRefInfoArray[i] = _treesRefInfoArray[i+1]; } _treesRefInfoArray[endIndex]=temp; #if defined(DEBUG) || defined(PROD_WITH_ASSUMES) if (!(comp()->getOption(TR_EnableParanoidOptCheck) || debug("paranoidOptCheck"))) return true; //verifier { TR::StackMemoryRegion stackMemoryRegion(*trMemory()); vcount_t visitCount = comp()->getVisitCount(); int32_t maxRefCount = 0; TR::TreeTop *tt; TR_TreeRefInfo **treesRefInfoArrayTemp = (TR_TreeRefInfo**)trMemory()->allocateStackMemory(_numTreeTops*sizeof(TR_TreeRefInfo*)); memset(treesRefInfoArrayTemp, 0, _numTreeTops*sizeof(TR_TreeRefInfo*)); TR_TreeRefInfo *treeRefInfoTemp; //collect info for ( int32_t i = 0; i<_numTreeTops-1; i++) { tt =_treesRefInfoArray[i]->getTreeTop(); treeRefInfoTemp = new (trStackMemory()) TR_TreeRefInfo(tt, trMemory()); collectRefInfo(treeRefInfoTemp, tt->getNode(),visitCount,&maxRefCount); treesRefInfoArrayTemp[i] = treeRefInfoTemp; } comp()->setVisitCount(visitCount+maxRefCount); for ( int32_t i = 0; i<_numTreeTops-1; i++) { if (!verifyRefInfo(treesRefInfoArrayTemp[i]->getFirstRefNodesList(),_treesRefInfoArray[i]->getFirstRefNodesList())) { printOnVerifyError(_treesRefInfoArray[i],treesRefInfoArrayTemp[i]); TR_ASSERT(0,"fail to verify firstRefNodesList for %p\n",_treesRefInfoArray[i]->getTreeTop()->getNode()); } if (!verifyRefInfo(treesRefInfoArrayTemp[i]->getMidRefNodesList(),_treesRefInfoArray[i]->getMidRefNodesList())) { printOnVerifyError(_treesRefInfoArray[i],treesRefInfoArrayTemp[i]); TR_ASSERT(0,"fail to verify midRefNodesList for %p\n",_treesRefInfoArray[i]->getTreeTop()->getNode()); } if (!verifyRefInfo(treesRefInfoArrayTemp[i]->getLastRefNodesList(),_treesRefInfoArray[i]->getLastRefNodesList())) { printOnVerifyError(_treesRefInfoArray[i],treesRefInfoArrayTemp[i]); TR_ASSERT(0,"fail to verify lastRefNodesList for %p\n",_treesRefInfoArray[i]->getTreeTop()->getNode()); } } } // scope of the stack memory region #endif return true; }
TR_BitVector * addVeryRefinedCallAliasSets(TR::ResolvedMethodSymbol * methodSymbol, TR_BitVector * aliases, List<void> * methodsPeeked) { TR::Compilation *comp = TR::comp(); void * methodId = methodSymbol->getResolvedMethod()->getPersistentIdentifier(); if (methodsPeeked->find(methodId)) { // This can't be allocated into the alias region as it must be accessed across optimizations TR_BitVector *heapAliases = new (comp->trHeapMemory()) TR_BitVector(comp->getSymRefCount(), comp->trMemory(), heapAlloc, growable); *heapAliases |= *aliases; return heapAliases; } // stop if the peek is getting very deep // if (methodsPeeked->getSize() >= PEEK_THRESHOLD) return 0; methodsPeeked->add(methodId); dumpOptDetails(comp, "O^O REFINING ALIASES: Peeking into the IL to refine aliases \n"); if (!methodSymbol->getResolvedMethod()->genMethodILForPeeking(methodSymbol, comp, true)) return 0; TR::SymbolReferenceTable * symRefTab = comp->getSymRefTab(); for (TR::TreeTop * tt = methodSymbol->getFirstTreeTop(); tt; tt = tt->getNextTreeTop()) { TR::Node *node = tt->getNode(); if (node->getOpCode().isResolveCheck()) return 0; if ((node->getOpCodeValue() == TR::treetop) || (node->getOpCodeValue() == TR::compressedRefs) || node->getOpCode().isCheck()) node = node->getFirstChild(); if (node->getOpCode().isStore()) { TR::SymbolReference * symRefInCallee = node->getSymbolReference(), * symRefInCaller; TR::Symbol * symInCallee = symRefInCallee->getSymbol(); TR::DataType type = symInCallee->getDataType(); if (symInCallee->isShadow()) { if (symInCallee->isArrayShadowSymbol()) symRefInCaller = symRefTab->getSymRef(symRefTab->getArrayShadowIndex(type)); else if (symInCallee->isArrayletShadowSymbol()) symRefInCaller = symRefTab->getSymRef(symRefTab->getArrayletShadowIndex(type)); else symRefInCaller = symRefTab->findShadowSymbol(symRefInCallee->getOwningMethod(comp), symRefInCallee->getCPIndex(), type); if (symRefInCaller) { if (symRefInCaller->reallySharesSymbol(comp)) symRefInCaller->setSharedShadowAliases(aliases, symRefTab); aliases->set(symRefInCaller->getReferenceNumber()); } } else if (symInCallee->isStatic()) { symRefInCaller = symRefTab->findStaticSymbol(symRefInCallee->getOwningMethod(comp), symRefInCallee->getCPIndex(), type); if (symRefInCaller) { if (symRefInCaller->reallySharesSymbol(comp)) symRefInCaller->setSharedStaticAliases(aliases, symRefTab); else aliases->set(symRefInCaller->getReferenceNumber()); } } } else if (node->getOpCode().isCall()) { if (node->getOpCode().isCallIndirect()) return 0; TR::ResolvedMethodSymbol * calleeSymbol = node->getSymbol()->getResolvedMethodSymbol(); if (!calleeSymbol) return 0; TR_ResolvedMethod * calleeMethod = calleeSymbol->getResolvedMethod(); if (!calleeMethod->isCompilable(comp->trMemory()) || calleeMethod->isJNINative()) return 0; if (!addVeryRefinedCallAliasSets(calleeSymbol, aliases, methodsPeeked)) return 0; } else if (node->getOpCodeValue() == TR::monent) return 0; } // This can't be allocated into the alias region as it must be accessed across optimizations TR_BitVector *heapAliases = new (comp->trHeapMemory()) TR_BitVector(comp->getSymRefCount(), comp->trMemory(), heapAlloc, growable); *heapAliases |= *aliases; return heapAliases; }
static int cacheStringAppend(TR::ValuePropagation *vp,TR::Node *node) { return 0; if (!vp->lastTimeThrough()) return 0; TR::TreeTop *tt = vp->_curTree; TR::TreeTop *newTree = tt; TR::TreeTop *startTree = 0; TR::TreeTop *exitTree = vp->_curBlock->getExit(); TR::Node *newBuffer; if(node->getNumChildren() >= 1) newBuffer = node->getFirstChild(); else return 0; enum {MAX_STRINGS = 2}; int initWithString = 0; bool initWithInteger = false; TR::TreeTop *appendTree[MAX_STRINGS+1]; TR::Node *appendedString[MAX_STRINGS+1]; char pattern[MAX_STRINGS+1]; int stringCount = 0; bool useStringBuffer=false; TR::SymbolReference *valueOfSymRef[MAX_STRINGS+1]; bool success = false; char *sigBuffer="java/lang/StringBuffer.<init>("; char *sigBuilder = "java/lang/StringBuilder.<init>("; char *sigInit = "java/lang/String.<init>("; //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// if (checkMethodSignature(vp,node->getSymbolReference(), sigInit)) { TR::Symbol *symbol =node->getSymbolReference()->getSymbol(); TR_ResolvedMethod *m = symbol->castToResolvedMethodSymbol()->getResolvedMethod(); if (strncmp(m->signatureChars(), "(Ljava/lang/String;Ljava/lang/String;)V", m->signatureLength())==0) { vp->_cachedStringPeepHolesVcalls.add(new (vp->comp()->trStackMemory()) TR::ValuePropagation::VPTreeTopPair(tt,tt->getPrevRealTreeTop())); } } //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// if (checkMethodSignature(vp,node->getSymbolReference(), sigBuffer)) { useStringBuffer=true; success = true; } else if (checkMethodSignature(vp,node->getSymbolReference(), sigBuilder)) { success = true; useStringBuffer=false; } else { return 0; } if (success) { TR::Symbol *symbol =node->getSymbolReference()->getSymbol(); TR_ResolvedMethod *m = symbol->castToResolvedMethodSymbol()->getResolvedMethod(); if (strncmp(m->signatureChars(), "()V", m->signatureLength())==0) { // Diagnostics }else { return 0; } } else // <init> not found (could be unresolved) { return 0; } // now search for StringBuffer.append calls that are chained to one another TR::TreeTop *lastAppendTree = 0; // updated when we find an append TR::Node *child = newBuffer; while (1) { startTree = tt->getNextRealTreeTop(); appendedString[stringCount] = 0; int visitCount = 0; if (useStringBuffer) tt = searchForStringAppend(vp,"java/lang/StringBuffer.append(", startTree, exitTree, TR::acall, child, visitCount, appendedString + stringCount); else tt = searchForStringAppend(vp,"java/lang/StringBuilder.append(", startTree, exitTree, TR::acall, child, visitCount, appendedString + stringCount); if (appendedString[stringCount]) // we found it { appendTree[stringCount] = tt; // we could exit here if too many appends are chained if (stringCount >= MAX_STRINGS) return 0; // see which type of append we have TR::Symbol *symbol = tt->getNode()->getFirstChild()->getSymbolReference()->getSymbol(); TR_ASSERT(symbol->isResolvedMethod(), "assertion failure"); TR::ResolvedMethodSymbol *method = symbol->castToResolvedMethodSymbol(); TR_ASSERT(method, "assertion failure"); TR_ResolvedMethod *m = method->getResolvedMethod(); if (strncmp(m->signatureChars(), "(Ljava/lang/String;)", 20)==0) { pattern[stringCount] = 'S'; valueOfSymRef[stringCount] = 0; // don't need conversion to string } else // appending something that needs conversion using valueOf { TR::SymbolReference *symRefForValueOf = 0; // In the following we can vp->compare only (C) because we know that // StringBuffer.append returns a StringBuffer. //s char *sigBuffer = m->signatureChars(); TR_ASSERT(m->signatureLength() >= 3, "The minimum signature length should be 3 for ()V"); } stringCount++; } else // the chain of appends is broken { appendTree[stringCount] = 0; pattern[stringCount] = 0; // string terminator break; } lastAppendTree = tt; child = tt->getNode()->getFirstChild(); // the first node is a NULLCHK and its child is the call } // end while if (stringCount < 2) return 0; // cannot apply StringPeepholes if (stringCount > MAX_STRINGS) return 0; if (stringCount == 3) return 0; // same as above TR_ASSERT(lastAppendTree, "If stringCount <=2 then we must have found an append"); // now look for the toString call TR::TreeTop *toStringTree = 0; //visitCount = vp->comp()->incVisitCount(); int visitCount=0; tt = searchForToStringCall(vp,lastAppendTree->getNextRealTreeTop(), exitTree, lastAppendTree->getNode()->getFirstChild(), visitCount, &toStringTree, useStringBuffer); if (!toStringTree) return 0; vp->_cachedStringBufferVcalls.add(new (vp->comp()->trStackMemory()) TR::ValuePropagation::VPStringCached(appendTree[0],appendTree[1],appendedString[0],appendedString[1],newTree,toStringTree)); }
int32_t TR_CatchBlockRemover::perform() { TR::CFG *cfg = comp()->getFlowGraph(); if (cfg == NULL) { if (trace()) traceMsg(comp(), "Can't do Catch Block Removal, no CFG\n"); return 0; } if (trace()) traceMsg(comp(), "Starting Catch Block Removal\n"); bool thereMayBeRemovableCatchBlocks = false; { TR::StackMemoryRegion stackMemoryRegion(*trMemory()); TR::Block *block; ListIterator<TR::CFGEdge> edgeIterator; // Go through all blocks that have exception successors and see if any of them // are not reached. Mark each of these edges with a visit count so they can // be identified later. // vcount_t visitCount = comp()->incOrResetVisitCount(); TR::CFGNode *cfgNode; for (cfgNode = cfg->getFirstNode(); cfgNode; cfgNode = cfgNode->getNext()) { if (cfgNode->getExceptionSuccessors().empty()) continue; block = toBlock(cfgNode); uint32_t reachedExceptions = 0; TR::TreeTop *treeTop; for (treeTop = block->getEntry(); treeTop != block->getExit(); treeTop = treeTop->getNextTreeTop()) { reachedExceptions |= treeTop->getNode()->exceptionsRaised(); if (treeTop->getNode()->getOpCodeValue() == TR::monexitfence) // for live monitor metadata reachedExceptions |= TR::Block::CanCatchMonitorExit; } if (reachedExceptions & TR::Block::CanCatchUserThrows) continue; for (auto edge = block->getExceptionSuccessors().begin(); edge != block->getExceptionSuccessors().end();) { TR::CFGEdge * current = *(edge++); TR::Block *catchBlock = toBlock(current->getTo()); if (catchBlock->isOSRCodeBlock() || catchBlock->isOSRCatchBlock()) continue; if (!reachedExceptions && performTransformation(comp(), "%sRemove redundant exception edge from block_%d at [%p] to catch block_%d at [%p]\n", optDetailString(), block->getNumber(), block, catchBlock->getNumber(), catchBlock)) { cfg->removeEdge(block, catchBlock); thereMayBeRemovableCatchBlocks = true; } else { if (!catchBlock->canCatchExceptions(reachedExceptions)) { current->setVisitCount(visitCount); thereMayBeRemovableCatchBlocks = true; } } } } bool edgesRemoved = false; // Now look to see if there are any catch blocks for which all exception // predecessors have the visit count set. If so, the block is unreachable and // can be removed. // If only some of the exception predecessors are marked, these edges are // left in place to identify the try/catch structure properly. // while (thereMayBeRemovableCatchBlocks) { thereMayBeRemovableCatchBlocks = false; for (cfgNode = cfg->getFirstNode(); cfgNode; cfgNode = cfgNode->getNext()) { if (cfgNode->getExceptionPredecessors().empty()) continue; auto edgeIt = cfgNode->getExceptionPredecessors().begin(); for (; edgeIt != cfgNode->getExceptionPredecessors().end(); ++edgeIt) { if ((*edgeIt)->getVisitCount() != visitCount) break; } if (edgeIt == cfgNode->getExceptionPredecessors().end() && performTransformation(comp(), "%sRemove redundant catch block_%d at [%p]\n", optDetailString(), cfgNode->getNumber(), cfgNode)) { while (!cfgNode->getExceptionPredecessors().empty()) { cfg->removeEdge(cfgNode->getExceptionPredecessors().front()); } edgesRemoved = true; thereMayBeRemovableCatchBlocks = true; } } } // Any transformations invalidate use/def and value number information // if (edgesRemoved) { optimizer()->setUseDefInfo(NULL); optimizer()->setValueNumberInfo(NULL); requestOpt(OMR::treeSimplification, true); } } // scope of the stack memory region if (trace()) traceMsg(comp(), "\nEnding Catch Block Removal\n"); return 1; // actual cost }
TR_ExpressionsSimplification::LoopInfo* TR_ExpressionsSimplification::findLoopInfo(TR_RegionStructure* region) { ListIterator<TR::CFGEdge> exitEdges(®ion->getExitEdges()); if (region->getExitEdges().getSize() != 1) { if (trace()) traceMsg(comp(), "Region with more than 1 exit edges can't be handled\n"); return 0; } TR_StructureSubGraphNode* exitNode = toStructureSubGraphNode(exitEdges.getFirst()->getFrom()); if (!exitNode->getStructure()->asBlock()) { if (trace()) traceMsg(comp(), "The exit block can't be found\n"); return 0; } TR::Block *exitBlock = exitNode->getStructure()->asBlock()->getBlock(); TR::Node *lastTreeInExitBlock = exitBlock->getLastRealTreeTop()->getNode(); if (trace()) { traceMsg(comp(), "The exit block is %d\n", exitBlock->getNumber()); traceMsg(comp(), "The branch node is %p\n", lastTreeInExitBlock); } if (!lastTreeInExitBlock->getOpCode().isBranch()) { if (trace()) traceMsg(comp(), "The branch node couldn't be found\n"); return 0; } if (lastTreeInExitBlock->getNumChildren() < 2) { if (trace()) traceMsg(comp(), "The branch node has less than 2 children\n"); return 0; } TR::Node *firstChildOfLastTree = lastTreeInExitBlock->getFirstChild(); TR::Node *secondChildOfLastTree = lastTreeInExitBlock->getSecondChild(); if (!firstChildOfLastTree->getOpCode().hasSymbolReference()) { if (trace()) traceMsg(comp(), "The branch node's first child node %p - its opcode does not have a symbol reference\n", firstChildOfLastTree); return 0; } TR::SymbolReference *firstChildSymRef = firstChildOfLastTree->getSymbolReference(); if (trace()) traceMsg(comp(), "Symbol Reference: %p Symbol: %p\n", firstChildSymRef, firstChildSymRef->getSymbol()); // Locate the induction variable that matches with the exit node symbol // TR_InductionVariable *indVar = region->findMatchingIV(firstChildSymRef); if (!indVar) return 0; if (!indVar->getIncr()->asIntConst()) { if (trace()) traceMsg(comp(), "Increment is not a constant\n"); return 0; } int32_t increment = indVar->getIncr()->getLowInt(); _visitCount = comp()->incVisitCount(); bool indVarWrittenAndUsedUnexpectedly = false; if (firstChildOfLastTree->getReferenceCount() > 1) { TR::TreeTop *cursorTreeTopInExitBlock = exitBlock->getEntry(); TR::TreeTop *exitTreeTopInExitBlock = exitBlock->getExit(); bool loadSeen = false; while (cursorTreeTopInExitBlock != exitTreeTopInExitBlock) { TR::Node *cursorNode = cursorTreeTopInExitBlock->getNode(); if (checkForLoad(cursorNode, firstChildOfLastTree)) loadSeen = true; if (!cursorNode->getOpCode().isStore() && (cursorNode->getNumChildren() > 0)) cursorNode = cursorNode->getFirstChild(); if (cursorNode->getOpCode().isStore() && (cursorNode->getSymbolReference() == firstChildSymRef)) { indVarWrittenAndUsedUnexpectedly = true; if ((cursorNode->getFirstChild() == firstChildOfLastTree) || !loadSeen) indVarWrittenAndUsedUnexpectedly = false; else break; } cursorTreeTopInExitBlock = cursorTreeTopInExitBlock->getNextTreeTop(); } } if (indVarWrittenAndUsedUnexpectedly) { return 0; } int32_t lowerBound; int32_t upperBound = 0; TR::Node *bound = 0; bool equals = false; switch(lastTreeInExitBlock->getOpCodeValue()) { case TR::ificmplt: case TR::ificmpgt: equals = true; case TR::ificmple: case TR::ificmpge: if (!(indVar->getEntry() && indVar->getEntry()->asIntConst())) { if (trace()) traceMsg(comp(), "Entry value is not a constant\n"); return 0; } lowerBound = indVar->getEntry()->getLowInt(); if (secondChildOfLastTree->getOpCode().isLoadConst()) { upperBound = secondChildOfLastTree->getInt(); } else if (secondChildOfLastTree->getOpCode().isLoadVar()) { bound = secondChildOfLastTree; } else { if (trace()) traceMsg(comp(), "Second child is not a const or a load\n"); return 0; } return new (trStackMemory()) LoopInfo(bound, lowerBound, upperBound, increment, equals); default: if (trace()) traceMsg(comp(), "The condition has not been implemeted\n"); return 0; } return 0; }
bool TR::ILValidator::treesAreValid(TR::TreeTop *start, TR::TreeTop *stop) { checkSoundness(start, stop); for (PostorderNodeOccurrenceIterator iter(start, _comp, "VALIDATOR"); iter != stop; ++iter) { updateNodeState(iter); // General node validation // validateNode(iter); // // Additional specific kinds of validation // TR::Node *node = iter.currentNode(); if (node->getOpCodeValue() == TR::BBEnd) { // Determine whether this is the end of an extended block // bool isEndOfExtendedBlock = false; TR::TreeTop *nextTree = iter.currentTree()->getNextTreeTop(); if (nextTree) { validityRule(iter, nextTree->getNode()->getOpCodeValue() == TR::BBStart, "Expected BBStart after BBEnd"); isEndOfExtendedBlock = ! nextTree->getNode()->getBlock()->isExtensionOfPreviousBlock(); } else { isEndOfExtendedBlock = true; } if (isEndOfExtendedBlock) validateEndOfExtendedBlock(iter); } auto opcode = node->getOpCode(); if (opcode.expectedChildCount() != ILChildProp::UnspecifiedChildCount) { // Validate child expectations // const auto expChildCount = opcode.expectedChildCount(); const auto actChildCount = node->getNumChildren(); // validate child count if (!opcode.canHaveGlRegDeps()) { // in the common case, no GlRegDeps child is expect nor present validityRule(iter, actChildCount == expChildCount, "Child count %d does not match expected value of %d", actChildCount, expChildCount); } else if (actChildCount == (expChildCount + 1)) { // adjust expected child number to account for a possible extra GlRegDeps // child and make sure the last child is actually a GlRegDeps validityRule(iter, node->getChild(actChildCount - 1)->getOpCodeValue() == TR::GlRegDeps, "Child count %d does not match expected value of %d (%d without GlRegDeps) and last child is not a GlRegDeps", actChildCount, expChildCount + 1, expChildCount); } else { // if expected and actual child counts don't match, then the child // count is just wrong, even with an expected GlRegDeps validityRule(iter, actChildCount == expChildCount, "Child count %d matches neither expected values of %d (without GlRegDeps) nor %d (with GlRegDeps)", actChildCount, expChildCount, expChildCount + 1); } // validate child types for (auto i = 0; i < actChildCount; ++i) { auto childOpcode = node->getChild(i)->getOpCode(); if (childOpcode.getOpCodeValue() != TR::GlRegDeps) { const auto expChildType = opcode.expectedChildType(i); const auto actChildType = childOpcode.getDataType().getDataType(); const auto expChildTypeName = expChildType == ILChildProp::UnspecifiedChildType ? "UnspecifiedChildType" : TR::DataType::getName(expChildType); const auto actChildTypeName = TR::DataType::getName(actChildType); validityRule(iter, expChildType == ILChildProp::UnspecifiedChildType || actChildType == expChildType, "Child %d has unexpected type %s (expected %s)" , i, actChildTypeName, expChildTypeName); } else { // make sure the node is allowed to have a GlRegDeps child // and make sure that it is the last child validityRule(iter, opcode.canHaveGlRegDeps() && (i == actChildCount - 1), "Unexpected GlRegDeps child %d", i); } } } } return _isValidSoFar; }