Ejemplo n.º 1
0
void LinearScan::preAllocSpillLocAux(Trace* trace, uint32 numSpillLocs) {
  for (IRInstruction* inst : trace->getInstructionList()) {
    if (inst->getOpcode() == Spill) {
      SSATmp* dst = inst->getDst();
      for (int index = 0; index < dst->numNeededRegs(); ++index) {
        assert(!dst->hasReg(index));
        if (dst->getSpillInfo(index).type() == SpillInfo::Memory) {
          uint32 spillLoc = dst->getSpillInfo(index).mem();
          // Native stack layout:
          // |               |
          // +---------------+
          // |               |  <-- spill[5..]
          // | pre allocated |  <-- spill[4]
          // |  (16 slots)   |  <-- spill[3]
          // +---------------+
          // |  return addr  |
          // +---------------+
          // |    extra      |  <-- spill[2]
          // |    spill      |  <-- spill[1]
          // |  locations    |  <-- spill[0]
          // +---------------+  <-- %rsp
          // If a spill location falls into the pre-allocated region, we
          // need to increase its index by 1 to avoid overwriting the
          // return address.
          if (spillLoc + NumPreAllocatedSpillLocs >= numSpillLocs) {
            dst->setSpillInfo(index, SpillInfo(spillLoc + 1));
          }
        }
      }
    }
  }
}
Ejemplo n.º 2
0
uint32 LinearScan::assignSpillLocAux(Trace* trace,
                                     uint32 nextSpillLoc,
                                     uint32 nextMmxReg) {
  IRInstruction::List& instructionList = trace->getInstructionList();
  for (IRInstruction::Iterator it = instructionList.begin();
       it != instructionList.end();
       ++it) {
    IRInstruction* inst = *it;
    if (getNextNative() == inst) {
      ASSERT(!m_natives.empty());
      m_natives.pop_front();
    }
    if (inst->getOpcode() == Spill) {
      SSATmp* dst = inst->getDst();
      SSATmp* src = inst->getSrc(0);
      for (int locIndex = 0;
           locIndex < src->numNeededRegs();
           ++locIndex) {
        if (dst->getLastUseId() <= getNextNativeId()) {
          TRACE(3, "[counter] 1 spill a tmp that does not span native\n");
        } else {
          TRACE(3, "[counter] 1 spill a tmp that spans native\n");
        }

        const bool allowMmxSpill = RuntimeOption::EvalHHIREnableMmx &&
          // The live range of the spill slot doesn't span native calls,
          // and we still have free MMX registers.
          dst->getLastUseId() <= getNextNativeId() &&
          nextMmxReg < (uint32)NumMmxRegs;

        dst->setSpillInfo(locIndex,
          allowMmxSpill
            ? SpillInfo(RegNumber(nextMmxReg++))
            : SpillInfo(nextSpillLoc++)
        );
        if (allowMmxSpill) {
          TRACE(3, "[counter] 1 spill to mmx\n");
        } else {
          TRACE(3, "[counter] 1 spill to memory\n");
        }
      }
    }
    if (inst->getOpcode() == Reload) {
      SSATmp* src = inst->getSrc(0);
      for (int locIndex = 0;
           locIndex < src->numNeededRegs();
           ++locIndex) {
        if (src->getSpillInfo(locIndex).type() == SpillInfo::MMX) {
          TRACE(3, "[counter] reload from mmx\n");
        } else {
          TRACE(3, "[counter] reload from memory\n");
        }
      }
    }
    if (inst->isControlFlowInstruction()) {
      LabelInstruction* label = inst->getLabel();
      if (label != NULL && label->getId() == inst->getId() + 1) {
        nextSpillLoc = assignSpillLocAux(label->getTrace(),
                                         nextSpillLoc,
                                         nextMmxReg);
      }
    }
  }
  return nextSpillLoc;
}