void TR_RegisterAnticipatability::analyzeTreeTopsInBlockStructure(TR_BlockStructure *blockStructure) { int32_t blockNum = blockStructure->getBlock()->getNumber(); // save the outSetInfo for later use // copyFromInto(_regularInfo, _outSetInfo[blockNum]); // now compose the exception info into the outSet // for this block // compose(_regularInfo, _exceptionInfo); compose(_outSetInfo[blockNum], _exceptionInfo); // compute the _inSet = _outSet + RUSE // *_regularInfo |= *_registerUsageInfo[blockNum]; *_exceptionInfo |= *_registerUsageInfo[blockNum]; //FIXME: is this too conservative? if (comp()->getOption(TR_TraceShrinkWrapping)) { traceMsg(comp(), "Normal info of block_%d : ", blockNum); _regularInfo->print(comp()); traceMsg(comp(), "\n"); } }
void TR_ReachingDefinitions::analyzeBlockZeroStructure(TR_BlockStructure *blockStructure) { // Initialize the analysis info by making the initial parameter and field // definitions reach the method entry // if (_useDefInfo->getNumExpandedDefsOnEntry()) _regularInfo->setAll(_useDefInfo->getNumExpandedDefsOnEntry()); if (!_blockAnalysisInfo[0]) allocateBlockInfoContainer(&_blockAnalysisInfo[0], _regularInfo); copyFromInto(_regularInfo, _blockAnalysisInfo[0]); }
void TR_RegisterAnticipatability::initializeRegisterUsageInfo() { // initialize outSets bitvector as well // TR_BitVector **originalRegUsageInfo = _registerUsageInfo; _registerUsageInfo = (TR_BitVector **) trMemory()->allocateStackMemory(_numberOfNodes * sizeof(TR_BitVector *)); _outSetInfo = (TR_BitVector **) trMemory()->allocateStackMemory(_numberOfNodes * sizeof(TR_BitVector *)); for (int32_t i = 0; i < _numberOfNodes; i++) { _registerUsageInfo[i] = new (trStackMemory()) TR_BitVector(_numberOfBits, trMemory(), stackAlloc); copyFromInto(originalRegUsageInfo[i], _registerUsageInfo[i]); _outSetInfo[i] = new (trStackMemory()) TR_BitVector(_numberOfBits, trMemory(), stackAlloc); _outSetInfo[i]->empty(); } }
TR_Latestness::TR_Latestness(TR::Compilation *comp, TR::Optimizer *optimizer, TR_Structure *rootStructure, bool trace) : TR_BackwardIntersectionBitVectorAnalysis(comp, comp->getFlowGraph(), optimizer, trace) { _delayedness = new (comp->allocator()) TR_Delayedness(comp, optimizer, rootStructure, trace); _supportedNodesAsArray = _delayedness->_supportedNodesAsArray; if (trace) traceMsg(comp, "Starting Latestness\n"); TR::CFG *cfg = comp->getFlowGraph(); _numberOfNodes = cfg->getNextNodeNumber(); TR_ASSERT(_numberOfNodes > 0, "Latestness, node numbers not assigned"); _numberOfBits = getNumberOfBits(); _inSetInfo = (ContainerType **)trMemory()->allocateStackMemory(_numberOfNodes*sizeof(ContainerType *)); for (int32_t i=0;i<_numberOfNodes;i++) allocateContainer(_inSetInfo+i); // Allocate temp bit vectors from block info, since it is local to this analysis ContainerType *intersection, *negation; allocateBlockInfoContainer(&intersection); allocateBlockInfoContainer(&negation); TR::CFGNode *nextNode; for (nextNode = cfg->getFirstNode(); nextNode; nextNode = nextNode->getNext()) { TR_BlockStructure *blockStructure = (toBlock(nextNode))->getStructureOf(); if ((blockStructure == NULL) || (blockStructure->getBlock()->getSuccessors().empty() && blockStructure->getBlock()->getExceptionSuccessors().empty())) continue; /////analyzeTreeTopsInBlockStructure(blockStructure); /////analysisInfo->_containsExceptionTreeTop = _containsExceptionTreeTop; initializeInfo(intersection); for (auto succ = nextNode->getSuccessors().begin(); succ != nextNode->getSuccessors().end(); ++succ) { TR::CFGNode *succBlock = (*succ)->getTo(); compose(intersection, _delayedness->_inSetInfo[succBlock->getNumber()]); } /////if (getAnalysisInfo(blockStructure)->_containsExceptionTreeTop) { for (auto succ = nextNode->getExceptionSuccessors().begin(); succ != nextNode->getExceptionSuccessors().end(); ++succ) { TR::CFGNode *succBlock = (*succ)->getTo(); compose(intersection, _delayedness->_inSetInfo[succBlock->getNumber()]); } } negation->setAll(_numberOfBits); *negation -= *intersection; copyFromInto(negation, _inSetInfo[blockStructure->getNumber()]); *(_inSetInfo[blockStructure->getNumber()]) |= *(_delayedness->_earliestness->_globalAnticipatability->_localAnticipatability.getDownwardExposedAnalysisInfo(blockStructure->getBlock()->getNumber())); *(_inSetInfo[blockStructure->getNumber()]) &= *(_delayedness->_inSetInfo[blockStructure->getNumber()]); if (trace) { traceMsg(comp, "\nIn Set of Block : %d\n", blockStructure->getNumber()); _inSetInfo[blockStructure->getNumber()]->print(comp); } } if (trace) traceMsg(comp, "\nEnding Latestness\n"); // Null out info that will not be used by callers _delayedness->_inSetInfo = NULL; _blockAnalysisInfo = NULL; }