bool GreedyAllocator::prescanDefinition(LDefinition *def) { // If the definition is fakeo, a redefinition, ignore it entirely. It's not // valid to kill it, and it doesn't matter if an input uses the same // register (thus it does not go into the disallow set). if (def->policy() == LDefinition::PASSTHROUGH) return true; VirtualRegister *vr = getVirtualRegister(def); // Add its register to the free pool. killReg(vr); // If it has a register, prevent it from being allocated this round. if (vr->hasRegister()) disallowed.add(vr->reg()); if (def->policy() == LDefinition::PRESET) { const LAllocation *a = def->output(); if (a->isRegister()) { // Evict fixed registers. Use the unchecked version of set-add // because the register does not reflect any allocation state, so // it may have already been added. AnyRegister reg = GetPresetRegister(def); disallowed.addUnchecked(reg); if (!maybeEvict(reg)) return false; } } return true; }
bool GreedyAllocator::buildPhiMoves(LBlock *block) { IonSpew(IonSpew_RegAlloc, " Merging phi state."); phiMoves = Mover(); MBasicBlock *mblock = block->mir(); if (!mblock->successorWithPhis()) return true; // Insert moves from our state into our successor's phi. uint32 pos = mblock->positionInPhiSuccessor(); LBlock *successor = mblock->successorWithPhis()->lir(); for (size_t i = 0; i < successor->numPhis(); i++) { LPhi *phi = successor->getPhi(i); JS_ASSERT(phi->numDefs() == 1); VirtualRegister *phiReg = getVirtualRegister(phi->getDef(0)); allocateStack(phiReg); LAllocation *in = phi->getOperand(pos); VirtualRegister *inReg = getVirtualRegister(in->toUse()); allocateStack(inReg); // Try to get a register for the input. if (!inReg->hasRegister() && !allocatableRegs().empty(inReg->isDouble())) { if (!allocateReg(inReg)) return false; } // Add a move from the input to the phi. if (inReg->hasRegister()) { if (!phiMoves.move(inReg->reg(), phiReg->backingStack())) return false; } else { if (!phiMoves.move(inReg->backingStack(), phiReg->backingStack())) return false; } } return true; }
bool GreedyAllocator::spillDefinition(LDefinition *def) { if (def->policy() == LDefinition::PASSTHROUGH) return true; VirtualRegister *vr = getVirtualRegister(def); const LAllocation *output = def->output(); if (output->isRegister()) { if (vr->hasRegister()) { // If the returned register is different from the output // register, a move is required. AnyRegister out = GetAllocatedRegister(output); if (out != vr->reg()) { if (!spill(*output, vr->reg())) return false; } } // Spill to the stack if needed. if (vr->hasStackSlot() && vr->backingStackUsed()) { if (!spill(*output, vr->backingStack())) return false; } } else if (vr->hasRegister()) { // This definition has a canonical spill location, so make sure to // load it to the resulting register, if any. JS_ASSERT(!vr->hasStackSlot()); JS_ASSERT(vr->hasBackingStack()); if (!spill(*output, vr->reg())) return false; } return true; }
bool GreedyAllocator::prescanUses(LInstruction *ins) { for (size_t i = 0; i < ins->numOperands(); i++) { LAllocation *a = ins->getOperand(i); if (!a->isUse()) { JS_ASSERT(a->isConstant()); continue; } LUse *use = a->toUse(); VirtualRegister *vr = getVirtualRegister(use); if (use->policy() == LUse::FIXED) { // A def or temp may use the same register, so we have to use the // unchecked version. disallowed.addUnchecked(GetFixedRegister(vr->def, use)); } else if (vr->hasRegister()) { discouraged.addUnchecked(vr->reg()); } } return true; }
bool GreedyAllocator::allocateRegisters() { // Allocate registers bottom-up, such that we see all uses before their // definitions. for (size_t i = graph.numBlocks() - 1; i < graph.numBlocks(); i--) { LBlock *block = graph.getBlock(i); IonSpew(IonSpew_RegAlloc, "Allocating block %d", (uint32)i); // All registers should be free. JS_ASSERT(state.free == RegisterSet::All()); // Allocate stack for any phis. for (size_t j = 0; j < block->numPhis(); j++) { LPhi *phi = block->getPhi(j); VirtualRegister *vreg = getVirtualRegister(phi->getDef(0)); allocateStack(vreg); } // Allocate registers. if (!allocateRegistersInBlock(block)) return false; LMoveGroup *entrySpills = block->getEntryMoveGroup(); // We've reached the top of the block. Spill all registers by inserting // moves from their stack locations. for (AnyRegisterIterator iter(RegisterSet::All()); iter.more(); iter++) { VirtualRegister *vreg = state[*iter]; if (!vreg) { JS_ASSERT(state.free.has(*iter)); continue; } JS_ASSERT(vreg->reg() == *iter); JS_ASSERT(!state.free.has(vreg->reg())); allocateStack(vreg); LAllocation *from = LAllocation::New(vreg->backingStack()); LAllocation *to = LAllocation::New(vreg->reg()); if (!entrySpills->add(from, to)) return false; killReg(vreg); vreg->unsetRegister(); } // Before killing phis, ensure that each phi input has its own stack // allocation. This ensures we won't allocate the same slot for any phi // as its input, which technically may be legal (since the phi becomes // the last use of the slot), but we avoid for sanity. for (size_t i = 0; i < block->numPhis(); i++) { LPhi *phi = block->getPhi(i); for (size_t j = 0; j < phi->numOperands(); j++) { VirtualRegister *in = getVirtualRegister(phi->getOperand(j)->toUse()); allocateStack(in); } } // Kill phis. for (size_t i = 0; i < block->numPhis(); i++) { LPhi *phi = block->getPhi(i); VirtualRegister *vr = getVirtualRegister(phi->getDef(0)); JS_ASSERT(!vr->hasRegister()); killStack(vr); } } return true; }
bool GreedyAllocator::allocateDefinition(LInstruction *ins, LDefinition *def) { VirtualRegister *vr = getVirtualRegister(def); LAllocation output; switch (def->policy()) { case LDefinition::PASSTHROUGH: // This is purely passthru, so ignore it. return true; case LDefinition::DEFAULT: case LDefinition::MUST_REUSE_INPUT: { AnyRegister reg; // Either take the register requested, or allocate a new one. if (def->policy() == LDefinition::MUST_REUSE_INPUT && ins->getOperand(def->getReusedInput())->toUse()->isFixedRegister()) { LAllocation *a = ins->getOperand(def->getReusedInput()); VirtualRegister *vuse = getVirtualRegister(a->toUse()); reg = GetFixedRegister(vuse->def, a->toUse()); } else if (vr->hasRegister()) { reg = vr->reg(); } else { if (!allocate(vr->type(), DISALLOW, ®)) return false; } if (def->policy() == LDefinition::MUST_REUSE_INPUT) { LUse *use = ins->getOperand(def->getReusedInput())->toUse(); VirtualRegister *vuse = getVirtualRegister(use); // If the use already has the given register, we need to evict. if (vuse->hasRegister() && vuse->reg() == reg) { if (!evict(reg)) return false; } // Make sure our input is using a fixed register. if (reg.isFloat()) *use = LUse(reg.fpu(), use->virtualRegister()); else *use = LUse(reg.gpr(), use->virtualRegister()); } output = LAllocation(reg); break; } case LDefinition::PRESET: { // Eviction and disallowing occurred during the definition // pre-scan pass. output = *def->output(); break; } } if (output.isRegister()) { JS_ASSERT_IF(output.isFloatReg(), disallowed.has(output.toFloatReg()->reg())); JS_ASSERT_IF(output.isGeneralReg(), disallowed.has(output.toGeneralReg()->reg())); } // Finally, set the output. def->setOutput(output); return true; }