int32_t emitBindCall(CodeBlock& mainCode, CodeBlock& stubsCode,
                     SrcKey srcKey, const Func* funcd, int numArgs) {
  // If this is a call to a builtin and we don't need any argument
  // munging, we can skip the prologue system and do it inline.
  if (isNativeImplCall(funcd, numArgs)) {
    StoreImmPatcher patchIP(mainCode, (uint64_t)mainCode.frontier(), reg::rax,
                            cellsToBytes(numArgs) + AROFF(m_savedRip),
                            rVmSp);
    assert(funcd->numLocals() == funcd->numParams());
    assert(funcd->numIterators() == 0);
    Asm a { mainCode };
    emitLea(a, rVmSp[cellsToBytes(numArgs)], rVmFp);
    emitCheckSurpriseFlagsEnter(mainCode, stubsCode, true, mcg->fixupMap(),
                                Fixup(0, numArgs));
    // rVmSp is already correctly adjusted, because there's no locals
    // other than the arguments passed.
    auto retval = emitNativeImpl(mainCode, funcd);
    patchIP.patch(uint64_t(mainCode.frontier()));
    return retval;
  }

  Asm a { mainCode };
  if (debug) {
    auto off = cellsToBytes(numArgs) + AROFF(m_savedRip);
    emitImmStoreq(a, kUninitializedRIP, rVmSp[off]);
  }
  // Stash callee's rVmFp into rStashedAR for the callee's prologue
  emitLea(a, rVmSp[cellsToBytes(numArgs)], rStashedAR);
  emitBindCallHelper(mainCode, stubsCode, srcKey, funcd, numArgs);
  return 0;
}
Exemple #2
0
void cgDefInlineFP(IRLS& env, const IRInstruction* inst) {
  auto const extra = inst->extra<DefInlineFP>();
  auto const callerSP = srcLoc(env, inst, 0).reg();
  auto const callerFP = srcLoc(env, inst, 1).reg();
  auto& v = vmain(env);

  auto const ar = callerSP[cellsToBytes(extra->spOffset.offset)];

  // Do roughly the same work as an HHIR Call.
  v << store{callerFP, ar + AROFF(m_sfp)};
  emitImmStoreq(v, uintptr_t(tc::ustubs().retInlHelper),
                ar + AROFF(m_savedRip));
  v << storeli{extra->retBCOff, ar + AROFF(m_soff)};
  if (extra->target->attrs() & AttrMayUseVV) {
    v << storeqi{0, ar + AROFF(m_invName)};
  }

  v << lea{ar, dstLoc(env, inst, 0).reg()};
}
Exemple #3
0
void cgStMIPropState(IRLS& env, const IRInstruction* inst) {
  auto const cls = srcLoc(env, inst, 0).reg();
  auto const slot = srcLoc(env, inst, 1).reg();
  auto const isStatic = inst->src(2)->boolVal();
  auto const off = rds::kVmMInstrStateOff + offsetof(MInstrState, propState);
  auto& v = vmain(env);

  using M = MInstrPropState;

  static_assert(sizeof(M::slotSize() == 4), "");
  static_assert(sizeof(M::clsSize()) == 4 || sizeof(M::clsSize()) == 8, "");

  if (inst->src(0)->isA(TNullptr)) {
    // If the Class* field is null, none of the other fields matter.
    emitStLowPtr(v, v.cns(0), rvmtl()[off + M::clsOff()], M::clsSize());
    return;
  }

  if (inst->src(0)->hasConstVal(TCls) &&
      inst->src(1)->hasConstVal(TInt)) {
    // If everything is a constant, and this is a LowPtr build, we can store the
    // values with a single 64-bit immediate.
    if (M::clsOff() + M::clsSize() == M::slotOff() && M::clsSize() == 4) {
      auto const clsVal = inst->src(0)->clsVal();
      auto const slotVal = inst->src(1)->intVal();
      auto raw = reinterpret_cast<uint64_t>(clsVal);
      raw |= (static_cast<uint64_t>(slotVal) << 32);
      if (isStatic) raw |= 0x1;
      emitImmStoreq(v, raw, rvmtl()[off + M::clsOff()]);
      return;
    }
  }

  auto markedCls = cls;
  if (isStatic) {
    markedCls = v.makeReg();
    v << orqi{0x1, cls, markedCls, v.makeReg()};
  }
  emitStLowPtr(v, markedCls, rvmtl()[off + M::clsOff()], M::clsSize());
  v << storel{slot, rvmtl()[off + M::slotOff()]};
}
void emitEagerSyncPoint(Vout& v, PC pc, Vreg rds, Vreg vmfp, Vreg vmsp) {
  v << store{vmfp, rds[rds::kVmfpOff]};
  v << store{vmsp, rds[rds::kVmspOff]};
  emitImmStoreq(v, intptr_t(pc), rds[rds::kVmpcOff]);
}
Exemple #5
0
void cgCall(IRLS& env, const IRInstruction* inst) {
  auto const sp = srcLoc(env, inst, 0).reg();
  auto const fp = srcLoc(env, inst, 1).reg();
  auto const extra = inst->extra<Call>();
  auto const callee = extra->callee;
  auto const argc = extra->numParams;

  auto& v = vmain(env);
  auto& vc = vcold(env);
  auto const catchBlock = label(env, inst->taken());

  auto const calleeSP = sp[cellsToBytes(extra->spOffset.offset)];
  auto const calleeAR = calleeSP + cellsToBytes(argc);

  v << store{fp, calleeAR + AROFF(m_sfp)};
  v << storeli{safe_cast<int32_t>(extra->after), calleeAR + AROFF(m_soff)};

  if (extra->fcallAwait) {
    // This clobbers any flags that might have already been set on the callee
    // AR (e.g., by SpillFrame), but this is okay because there should never be
    // any conflicts; see the documentation in act-rec.h.
    auto const imm = static_cast<int32_t>(
      ActRec::encodeNumArgsAndFlags(argc, ActRec::Flags::IsFCallAwait)
    );
    v << storeli{imm, calleeAR + AROFF(m_numArgsAndFlags)};
  }

  auto const isNativeImplCall = callee &&
                                callee->builtinFuncPtr() &&
                                !callee->nativeFuncPtr() &&
                                argc == callee->numParams();
  if (isNativeImplCall) {
    // The assumption here is that for builtins, the generated func contains
    // only a single opcode (NativeImpl), and there are no non-argument locals.
    if (do_assert) {
      assertx(argc == callee->numLocals());
      assertx(callee->numIterators() == 0);

      auto addr = callee->getEntry();
      while (peek_op(addr) == Op::AssertRATL) {
        addr += instrLen(addr);
      }
      assertx(peek_op(addr) == Op::NativeImpl);
      assertx(addr + instrLen(addr) ==
              callee->unit()->entry() + callee->past());
    }

    v << store{v.cns(mcg->ustubs().retHelper), calleeAR + AROFF(m_savedRip)};
    if (callee->attrs() & AttrMayUseVV) {
      v << storeqi{0, calleeAR + AROFF(m_invName)};
    }
    v << lea{calleeAR, rvmfp()};

    emitCheckSurpriseFlagsEnter(v, vc, fp, Fixup(0, argc), catchBlock);

    auto const builtinFuncPtr = callee->builtinFuncPtr();
    TRACE(2, "Calling builtin preClass %p func %p\n",
          callee->preClass(), builtinFuncPtr);

    // We sometimes call this while curFunc() isn't really the builtin, so make
    // sure to record the sync point as if we are inside the builtin.
    if (FixupMap::eagerRecord(callee)) {
      auto const syncSP = v.makeReg();
      v << lea{calleeSP, syncSP};
      emitEagerSyncPoint(v, callee->getEntry(), rvmtl(), rvmfp(), syncSP);
    }

    // Call the native implementation.  This will free the locals for us in the
    // normal case.  In the case where an exception is thrown, the VM unwinder
    // will handle it for us.
    auto const done = v.makeBlock();
    v << vinvoke{CallSpec::direct(builtinFuncPtr), v.makeVcallArgs({{rvmfp()}}),
                 v.makeTuple({}), {done, catchBlock}, Fixup(0, argc)};
    env.catch_calls[inst->taken()] = CatchCall::CPP;

    v = done;
    // The native implementation already put the return value on the stack for
    // us, and handled cleaning up the arguments.  We have to update the frame
    // pointer and the stack pointer, and load the return value into the return
    // register so the trace we are returning to has it where it expects.
    // TODO(#1273094): We should probably modify the actual builtins to return
    // values via registers using the C ABI and do a reg-to-reg move.
    loadTV(v, inst->dst(), dstLoc(env, inst, 0), rvmfp()[AROFF(m_r)], true);
    v << load{rvmfp()[AROFF(m_sfp)], rvmfp()};
    emitRB(v, Trace::RBTypeFuncExit, callee->fullName()->data());
    return;
  }

  v << lea{calleeAR, rvmfp()};

  if (RuntimeOption::EvalHHIRGenerateAsserts) {
    v << syncvmsp{v.cns(0x42)};

    constexpr uint64_t kUninitializedRIP = 0xba5eba11acc01ade;
    emitImmStoreq(v, kUninitializedRIP, rvmfp()[AROFF(m_savedRip)]);
  }

  // Emit a smashable call that initially calls a recyclable service request
  // stub.  The stub and the eventual targets take rvmfp() as an argument,
  // pointing to the callee ActRec.
  auto const target = callee
    ? mcg->ustubs().immutableBindCallStub
    : mcg->ustubs().bindCallStub;

  auto const done = v.makeBlock();
  v << callphp{target, php_call_regs(), {{done, catchBlock}}};
  env.catch_calls[inst->taken()] = CatchCall::PHP;
  v = done;

  auto const dst = dstLoc(env, inst, 0);
  v << defvmret{dst.reg(0), dst.reg(1)};
}