Пример #1
0
void
MoveEmitterMIPS::emitMove(const MoveOperand &from, const MoveOperand &to)
{
    if (from.isGeneralReg()) {
        // Second scratch register should not be moved by MoveEmitter.
        MOZ_ASSERT(from.reg() != spilledReg_);

        if (to.isGeneralReg())
            masm.movePtr(from.reg(), to.reg());
        else if (to.isMemory())
            masm.storePtr(from.reg(), getAdjustedAddress(to));
        else
            MOZ_ASSUME_UNREACHABLE("Invalid emitMove arguments.");
    } else if (from.isMemory()) {
        if (to.isGeneralReg()) {
            masm.loadPtr(getAdjustedAddress(from), to.reg());
        } else if (to.isMemory()) {
            masm.loadPtr(getAdjustedAddress(from), tempReg());
            masm.storePtr(tempReg(), getAdjustedAddress(to));
        } else {
            MOZ_ASSUME_UNREACHABLE("Invalid emitMove arguments.");
        }
    } else if (from.isEffectiveAddress()) {
        if (to.isGeneralReg()) {
            masm.computeEffectiveAddress(getAdjustedAddress(from), to.reg());
        } else if (to.isMemory()) {
            masm.computeEffectiveAddress(getAdjustedAddress(from), tempReg());
            masm.storePtr(tempReg(), getAdjustedAddress(to));
        } else {
            MOZ_ASSUME_UNREACHABLE("Invalid emitMove arguments.");
        }
    } else {
        MOZ_ASSUME_UNREACHABLE("Invalid emitMove arguments.");
    }
}
Пример #2
0
void
MoveEmitterARM::emitMove(const MoveOperand &from, const MoveOperand &to)
{
    if (to.isGeneralReg() && to.reg() == spilledReg_) {
        // If the destination is the spilled register, make sure we
        // don't re-clobber its value.
        spilledReg_ = InvalidReg;
    }

    if (from.isGeneralReg()) {
        if (from.reg() == spilledReg_) {
            // If the source is a register that has been spilled, make sure
            // to load the source back into that register.
            masm.ma_ldr(spillSlot(), spilledReg_);
            spilledReg_ = InvalidReg;
        }
        switch (toOperand(to, false).getTag()) {
          case Operand::OP2:
            // secretly must be a register
            masm.ma_mov(from.reg(), to.reg());
            break;
          case Operand::MEM:
            masm.ma_str(from.reg(), toOperand(to, false));
            break;
          default:
            MOZ_ASSUME_UNREACHABLE("strange move!");
        }
    } else if (to.isGeneralReg()) {
        JS_ASSERT(from.isMemory() || from.isEffectiveAddress());
        if (from.isMemory())
            masm.ma_ldr(toOperand(from, false), to.reg());
        else
            masm.ma_add(from.base(), Imm32(from.disp()), to.reg());
    } else {
        // Memory to memory gpr move.
        Register reg = tempReg();

        JS_ASSERT(from.isMemory() || from.isEffectiveAddress());
        if (from.isMemory())
            masm.ma_ldr(toOperand(from, false), reg);
        else
            masm.ma_add(from.base(), Imm32(from.disp()), reg);
        JS_ASSERT(to.base() != reg);
        masm.ma_str(reg, toOperand(to, false));
    }
}
Пример #3
0
// Warning, do not use the resulting operand with pop instructions, since they
// compute the effective destination address after altering the stack pointer.
// Use toPopOperand if an Operand is needed for a pop.
Operand
MoveEmitterX86::toOperand(const MoveOperand &operand) const
{
    if (operand.isMemory() || operand.isEffectiveAddress() || operand.isFloatAddress())
        return Operand(toAddress(operand));
    if (operand.isGeneralReg())
        return Operand(operand.reg());

    JS_ASSERT(operand.isFloatReg());
    return Operand(operand.floatReg());
}
Пример #4
0
void
MoveEmitterMIPS::emitMove(const MoveOperand &from, const MoveOperand &to)
{
    if (to.isGeneralReg() && to.reg() == spilledReg_) {
        // If the destination is the spilled register, make sure we
        // don't re-clobber its value.
        spilledReg_ = InvalidReg;
    }

    if (from.isGeneralReg()) {
        if (from.reg() == spilledReg_) {
            // If the source is a register that has been spilled, make sure
            // to load the source back into that register.
            masm.mov(spillSlot(), spilledReg_);
            spilledReg_ = InvalidReg;
        }
        masm.mov(from.reg(), toOperand(to));
    } else if (to.isGeneralReg()) {
        JS_ASSERT(from.isMemory() || from.isEffectiveAddress());
        if (from.isMemory())
            masm.mov(toOperand(from), to.reg());
        else
            masm.lea(toOperand(from), to.reg());
    } else {
        // Memory to memory gpr move.
        Register reg = tempReg();
        // Reload its previous value from the stack.
        if (reg == from.base())
            masm.mov(spillSlot(), from.base());

        JS_ASSERT(from.isMemory() || from.isEffectiveAddress());
        if (from.isMemory())
            masm.mov(toOperand(from), reg);
        else
            masm.lea(toOperand(from), reg);
        JS_ASSERT(to.base() != reg);
        masm.mov(reg, toOperand(to));
    }
}
Пример #5
0
void
MoveEmitterX86::emitGeneralMove(const MoveOperand &from, const MoveOperand &to)
{
    if (from.isGeneralReg()) {
        masm.mov(from.reg(), toOperand(to));
    } else if (to.isGeneralReg()) {
        JS_ASSERT(from.isMemory() || from.isEffectiveAddress());
        if (from.isMemory())
            masm.loadPtr(toAddress(from), to.reg());
        else
            masm.lea(toOperand(from), to.reg());
    } else if (from.isMemory()) {
        // Memory to memory gpr move.
#ifdef JS_CPU_X64
        // x64 has a ScratchReg. Use it.
        masm.loadPtr(toAddress(from), ScratchReg);
        masm.mov(ScratchReg, toOperand(to));
#else
        // No ScratchReg; bounce it off the stack.
        masm.Push(toOperand(from));
        masm.Pop(toPopOperand(to));
#endif
    } else {
        // Effective address to memory move.
        JS_ASSERT(from.isEffectiveAddress());
#ifdef JS_CPU_X64
        // x64 has a ScratchReg. Use it.
        masm.lea(toOperand(from), ScratchReg);
        masm.mov(ScratchReg, toOperand(to));
#else
        // This is tricky without a ScratchReg. We can't do an lea. Bounce the
        // base register off the stack, then add the offset in place. Note that
        // this clobbers FLAGS!
        masm.Push(from.base());
        masm.Pop(toPopOperand(to));
        masm.addPtr(Imm32(from.disp()), toOperand(to));
#endif
    }
}
// Warning, do not use the resulting operand with pop instructions, since they
// compute the effective destination address after altering the stack pointer.
// Use toPopOperand if an Operand is needed for a pop.
Operand
MoveEmitterX86::toOperand(const MoveOperand &operand) const
{
    if (operand.isMemory() || operand.isEffectiveAddress() || operand.isFloatAddress()) {
        if (operand.base() != StackPointer)
            return Operand(operand.base(), operand.disp());

        JS_ASSERT(operand.disp() >= 0);

        // Otherwise, the stack offset may need to be adjusted.
        return Operand(StackPointer, operand.disp() + (masm.framePushed() - pushedAtStart_));
    }
    if (operand.isGeneralReg())
        return Operand(operand.reg());

    JS_ASSERT(operand.isFloatReg());
    return Operand(operand.floatReg());
}
Пример #7
0
void MoveEmitterARM64::emitGeneralMove(const MoveOperand& from,
                                       const MoveOperand& to) {
  if (from.isGeneralReg()) {
    MOZ_ASSERT(to.isGeneralReg() || to.isMemory());
    if (to.isGeneralReg()) {
      masm.Mov(toARMReg64(to), toARMReg64(from));
    } else {
      masm.Str(toARMReg64(from), toMemOperand(to));
    }
    return;
  }

  // {Memory OR EffectiveAddress} -> Register move.
  if (to.isGeneralReg()) {
    MOZ_ASSERT(from.isMemoryOrEffectiveAddress());
    if (from.isMemory()) {
      masm.Ldr(toARMReg64(to), toMemOperand(from));
    } else {
      masm.Add(toARMReg64(to), toARMReg64(from), Operand(from.disp()));
    }
    return;
  }

  vixl::UseScratchRegisterScope temps(&masm.asVIXL());
  const ARMRegister scratch64 = temps.AcquireX();

  // Memory -> Memory move.
  if (from.isMemory()) {
    MOZ_ASSERT(to.isMemory());
    masm.Ldr(scratch64, toMemOperand(from));
    masm.Str(scratch64, toMemOperand(to));
    return;
  }

  // EffectiveAddress -> Memory move.
  MOZ_ASSERT(from.isEffectiveAddress());
  MOZ_ASSERT(to.isMemory());
  masm.Add(scratch64, toARMReg64(from), Operand(from.disp()));
  masm.Str(scratch64, toMemOperand(to));
}
void
MoveEmitterX86::emitGeneralMove(const MoveOperand& from, const MoveOperand& to)
{
    if (from.isGeneralReg()) {
        masm.mov(from.reg(), toOperand(to));
    } else if (to.isGeneralReg()) {
        MOZ_ASSERT(from.isMemoryOrEffectiveAddress());
        if (from.isMemory())
            masm.loadPtr(toAddress(from), to.reg());
        else
            masm.lea(toOperand(from), to.reg());
    } else if (from.isMemory()) {
        // Memory to memory gpr move.
        if (hasScratchRegister()) {
            Register reg = scratchRegister();
            masm.loadPtr(toAddress(from), reg);
            masm.mov(reg, toOperand(to));
        } else {
            // No scratch register available; bounce it off the stack.
            masm.Push(toOperand(from));
            masm.Pop(toPopOperand(to));
        }
    } else {
        // Effective address to memory move.
        MOZ_ASSERT(from.isEffectiveAddress());
        if (hasScratchRegister()) {
            Register reg = scratchRegister();
            masm.lea(toOperand(from), reg);
            masm.mov(reg, toOperand(to));
        } else {
            // This is tricky without a scratch reg. We can't do an lea. Bounce the
            // base register off the stack, then add the offset in place. Note that
            // this clobbers FLAGS!
            masm.Push(from.base());
            masm.Pop(toPopOperand(to));
            masm.addPtr(Imm32(from.disp()), toOperand(to));
        }
    }
}