void MoveEmitterMIPS::emit(const MoveOp &move) { const MoveOperand &from = move.from(); const MoveOperand &to = move.to(); if (move.isCycleEnd()) { MOZ_ASSERT(inCycle_); completeCycle(from, to, move.type()); inCycle_ = false; return; } if (move.isCycleBegin()) { MOZ_ASSERT(!inCycle_); breakCycle(from, to, move.endCycleType()); inCycle_ = true; } switch (move.type()) { case MoveOp::FLOAT32: emitFloat32Move(from, to); break; case MoveOp::DOUBLE: emitDoubleMove(from, to); break; case MoveOp::INT32: MOZ_ASSERT(sizeof(uintptr_t) == sizeof(int32_t)); case MoveOp::GENERAL: emitMove(from, to); break; default: MOZ_ASSUME_UNREACHABLE("Unexpected move type"); } }
void MoveEmitterARM64::emitMove(const MoveOp& move) { const MoveOperand& from = move.from(); const MoveOperand& to = move.to(); if (move.isCycleBegin()) { MOZ_ASSERT(!inCycle_ && !move.isCycleEnd()); breakCycle(from, to, move.endCycleType()); inCycle_ = true; } else if (move.isCycleEnd()) { MOZ_ASSERT(inCycle_); completeCycle(from, to, move.type()); inCycle_ = false; return; } switch (move.type()) { case MoveOp::FLOAT32: emitFloat32Move(from, to); break; case MoveOp::DOUBLE: emitDoubleMove(from, to); break; case MoveOp::INT32: emitInt32Move(from, to); break; case MoveOp::GENERAL: emitGeneralMove(from, to); break; default: MOZ_CRASH("Unexpected move type"); } }
bool MoveResolver::addOrderedMove(const MoveOp& move) { // Sometimes the register allocator generates move groups where multiple // moves have the same source. Try to optimize these cases when the source // is in memory and the target of one of the moves is in a register. MOZ_ASSERT(!move.from().aliases(move.to())); if (!move.from().isMemory() || move.isCycleBegin() || move.isCycleEnd()) return orderedMoves_.append(move); // Look for an earlier move with the same source, where no intervening move // touches either the source or destination of the new move. for (int i = orderedMoves_.length() - 1; i >= 0; i--) { const MoveOp& existing = orderedMoves_[i]; if (existing.from() == move.from() && !existing.to().aliases(move.to()) && existing.type() == move.type() && !existing.isCycleBegin() && !existing.isCycleEnd()) { MoveOp* after = orderedMoves_.begin() + i + 1; if (existing.to().isGeneralReg() || existing.to().isFloatReg()) { MoveOp nmove(existing.to(), move.to(), move.type()); return orderedMoves_.insert(after, nmove); } else if (move.to().isGeneralReg() || move.to().isFloatReg()) { MoveOp nmove(move.to(), existing.to(), move.type()); orderedMoves_[i] = move; return orderedMoves_.insert(after, nmove); } } if (existing.aliases(move)) break; } return orderedMoves_.append(move); }
void MoveEmitterMIPS::emit(const MoveOp& move) { const MoveOperand& from = move.from(); const MoveOperand& to = move.to(); if (move.isCycleEnd() && move.isCycleBegin()) { // A fun consequence of aliased registers is you can have multiple // cycles at once, and one can end exactly where another begins. breakCycle(from, to, move.endCycleType(), move.cycleBeginSlot()); completeCycle(from, to, move.type(), move.cycleEndSlot()); return; } if (move.isCycleEnd()) { MOZ_ASSERT(inCycle_); completeCycle(from, to, move.type(), move.cycleEndSlot()); MOZ_ASSERT(inCycle_ > 0); inCycle_--; return; } if (move.isCycleBegin()) { breakCycle(from, to, move.endCycleType(), move.cycleBeginSlot()); inCycle_++; } switch (move.type()) { case MoveOp::FLOAT32: emitFloat32Move(from, to); break; case MoveOp::DOUBLE: emitDoubleMove(from, to); break; case MoveOp::INT32: MOZ_ASSERT(sizeof(uintptr_t) == sizeof(int32_t)); case MoveOp::GENERAL: emitMove(from, to); break; default: MOZ_CRASH("Unexpected move type"); } }