void MoveEmitterARM64::emit(const MoveResolver& moves) { if (moves.numCycles()) { masm.reserveStack(sizeof(void*)); pushedAtCycle_ = masm.framePushed(); } for (size_t i = 0; i < moves.numMoves(); i++) { emitMove(moves.getMove(i)); } }
void MoveEmitterMIPS::emit(const MoveResolver &moves) { if (moves.hasCycles()) { // Reserve stack for cycle resolution masm.reserveStack(sizeof(double)); pushedAtCycle_ = masm.framePushed(); } for (size_t i = 0; i < moves.numMoves(); i++) emit(moves.getMove(i)); }
void MoveEmitterX86::emit(const MoveResolver &moves) { for (size_t i = 0; i < moves.numMoves(); i++) { const MoveOp &move = moves.getMove(i); const MoveOperand &from = move.from(); const MoveOperand &to = move.to(); if (move.isCycleEnd()) { JS_ASSERT(inCycle_); completeCycle(to, move.type()); inCycle_ = false; continue; } if (move.isCycleBegin()) { JS_ASSERT(!inCycle_); // Characterize the cycle. bool allGeneralRegs = true, allFloatRegs = true; size_t swapCount = characterizeCycle(moves, i, &allGeneralRegs, &allFloatRegs); // Attempt to optimize it to avoid using the stack. if (maybeEmitOptimizedCycle(moves, i, allGeneralRegs, allFloatRegs, swapCount)) { i += swapCount; continue; } // Otherwise use the stack. breakCycle(to, move.endCycleType()); inCycle_ = true; } // A normal move which is not part of a cycle. switch (move.type()) { case MoveOp::FLOAT32: emitFloat32Move(from, to); break; case MoveOp::DOUBLE: emitDoubleMove(from, to); break; case MoveOp::INT32: emitInt32Move(from, to); break; case MoveOp::GENERAL: emitGeneralMove(from, to); break; default: MOZ_ASSUME_UNREACHABLE("Unexpected move type"); } } }
void MoveEmitterX86::emit(const MoveResolver &moves) { for (size_t i = 0; i < moves.numMoves(); i++) { const Move &move = moves.getMove(i); const MoveOperand &from = move.from(); const MoveOperand &to = move.to(); if (move.inCycle()) { // If this is the end of a cycle for which we're using the stack, // handle the end. if (inCycle_) { completeCycle(to, move.kind()); inCycle_ = false; continue; } // Characterize the cycle. bool allGeneralRegs = true, allFloatRegs = true; size_t swapCount = characterizeCycle(moves, i, &allGeneralRegs, &allFloatRegs); // Attempt to optimize it to avoid using the stack. if (maybeEmitOptimizedCycle(moves, i, allGeneralRegs, allFloatRegs, swapCount)) { i += swapCount; continue; } // Otherwise use the stack. breakCycle(to, move.kind()); inCycle_ = true; } // A normal move which is not part of a cycle. if (move.kind() == Move::DOUBLE) emitDoubleMove(from, to); else emitGeneralMove(from, to); } }
void MoveEmitterX86::emit(const MoveResolver& moves) { #if defined(JS_CODEGEN_X86) && defined(DEBUG) // Clobber any scratch register we have, to make regalloc bugs more visible. if (hasScratchRegister()) masm.mov(ImmWord(0xdeadbeef), scratchRegister()); #endif for (size_t i = 0; i < moves.numMoves(); i++) { const MoveOp& move = moves.getMove(i); const MoveOperand& from = move.from(); const MoveOperand& to = move.to(); if (move.isCycleEnd()) { MOZ_ASSERT(inCycle_); completeCycle(to, move.type()); inCycle_ = false; continue; } if (move.isCycleBegin()) { MOZ_ASSERT(!inCycle_); // Characterize the cycle. bool allGeneralRegs = true, allFloatRegs = true; size_t swapCount = characterizeCycle(moves, i, &allGeneralRegs, &allFloatRegs); // Attempt to optimize it to avoid using the stack. if (maybeEmitOptimizedCycle(moves, i, allGeneralRegs, allFloatRegs, swapCount)) { i += swapCount; continue; } // Otherwise use the stack. breakCycle(to, move.endCycleType()); inCycle_ = true; } // A normal move which is not part of a cycle. switch (move.type()) { case MoveOp::FLOAT32: emitFloat32Move(from, to); break; case MoveOp::DOUBLE: emitDoubleMove(from, to); break; case MoveOp::INT32: emitInt32Move(from, to); break; case MoveOp::GENERAL: emitGeneralMove(from, to); break; case MoveOp::INT32X4: emitInt32X4Move(from, to); break; case MoveOp::FLOAT32X4: emitFloat32X4Move(from, to); break; default: MOZ_CRASH("Unexpected move type"); } } }