void cgFwdCtxStaticCall(IRLS& env, const IRInstruction* inst) {
  auto const dstCtx = dstLoc(env, inst, 0).reg();
  auto const srcCtx = srcLoc(env, inst, 0).reg();
  auto const ty = inst->src(0)->type();

  auto& v = vmain(env);

  auto ctx_from_this =  [] (Vout& v, Vreg rthis, Vreg dst) {
    // Load (this->m_cls | 0x1) into `dst'.
    auto const cls = emitLdObjClass(v, rthis, v.makeReg());
    v << orqi{ActRec::kHasClassBit, cls, dst, v.makeReg()};
    return dst;
  };

  if (ty <= TCctx) {
    v << copy{srcCtx, dstCtx};
  } else if (ty <= TObj) {
    ctx_from_this(v, srcCtx, dstCtx);
  } else {
    // If we don't know whether we have a $this, we need to check dynamically.
    auto const sf = v.makeReg();
    v << testqi{ActRec::kHasClassBit, srcCtx, sf};
    unlikelyCond(v, vcold(env), CC_NZ, sf, dstCtx,
         [&] (Vout& v) { return srcCtx; },
         [&] (Vout& v) { return ctx_from_this(v, srcCtx, v.makeReg()); }
        );
  }
}
示例#2
0
void cgOrdStrIdx(IRLS& env, const IRInstruction* inst) {
  auto const sd = srcLoc(env, inst, 0).reg();
  auto const idx = srcLoc(env, inst, 1).reg();
  auto& v = vmain(env);

  auto const sf = v.makeReg();
  auto const length = v.makeReg();

  v << loadzlq{sd[StringData::sizeOff()], length};
  v << cmpq{idx, length, sf};

  unlikelyCond(v, vcold(env), CC_B, sf, dstLoc(env, inst, 0).reg(),
    [&] (Vout& v) {
      auto const args = argGroup(env, inst).ssa(0).ssa(1);
      cgCallHelper(v, env, CallSpec::direct(MInstrHelpers::stringGetI),
                   kVoidDest, SyncOptions::Sync, args);
      return v.cns(0);
    },
    [&] (Vout& v) {
      auto const dst = v.makeReg();
      auto const data = v.makeReg();
#ifdef NO_M_DATA
      v << lea{sd[sizeof(StringData)], data};
#else
      v << load{sd[StringData::dataOff()], data};
#endif
      v << loadzbq{data[idx], dst};
      return dst;
    }
  );
}
示例#3
0
void cgCheckSurpriseAndStack(IRLS& env, const IRInstruction* inst) {
  auto const fp = srcLoc(env, inst, 0).reg();
  auto const extra = inst->extra<CheckSurpriseAndStack>();
  auto const func = extra->func;

  auto const off = func->getEntryForNumArgs(extra->argc) - func->base();
  auto const fixup = Fixup(off, func->numSlotsInFrame());
  auto& v = vmain(env);

  auto const sf = v.makeReg();
  auto const needed_top = v.makeReg();
  v << lea{fp[-cellsToBytes(func->maxStackCells())], needed_top};
  v << cmpqm{needed_top, rvmtl()[rds::kSurpriseFlagsOff], sf};

  unlikelyIfThen(v, vcold(env), CC_AE, sf, [&] (Vout& v) {
    auto const stub = tc::ustubs().functionSurprisedOrStackOverflow;
    auto const done = v.makeBlock();
    v << vinvoke{CallSpec::stub(stub), v.makeVcallArgs({}), v.makeTuple({}),
                 {done, label(env, inst->taken())}, fixup };
    v = done;
  });
}
示例#4
0
void cgCall(IRLS& env, const IRInstruction* inst) {
  auto const sp = srcLoc(env, inst, 0).reg();
  auto const fp = srcLoc(env, inst, 1).reg();
  auto const extra = inst->extra<Call>();
  auto const callee = extra->callee;
  auto const argc = extra->numParams;

  auto& v = vmain(env);
  auto& vc = vcold(env);
  auto const catchBlock = label(env, inst->taken());

  auto const calleeSP = sp[cellsToBytes(extra->spOffset.offset)];
  auto const calleeAR = calleeSP + cellsToBytes(argc);

  v << store{fp, calleeAR + AROFF(m_sfp)};
  v << storeli{safe_cast<int32_t>(extra->after), calleeAR + AROFF(m_soff)};

  if (extra->fcallAwait) {
    // This clobbers any flags that might have already been set on the callee
    // AR (e.g., by SpillFrame), but this is okay because there should never be
    // any conflicts; see the documentation in act-rec.h.
    auto const imm = static_cast<int32_t>(
      ActRec::encodeNumArgsAndFlags(argc, ActRec::Flags::IsFCallAwait)
    );
    v << storeli{imm, calleeAR + AROFF(m_numArgsAndFlags)};
  }

  auto const isNativeImplCall = callee &&
                                callee->builtinFuncPtr() &&
                                !callee->nativeFuncPtr() &&
                                argc == callee->numParams();
  if (isNativeImplCall) {
    // The assumption here is that for builtins, the generated func contains
    // only a single opcode (NativeImpl), and there are no non-argument locals.
    if (do_assert) {
      assertx(argc == callee->numLocals());
      assertx(callee->numIterators() == 0);

      auto addr = callee->getEntry();
      while (peek_op(addr) == Op::AssertRATL) {
        addr += instrLen(addr);
      }
      assertx(peek_op(addr) == Op::NativeImpl);
      assertx(addr + instrLen(addr) ==
              callee->unit()->entry() + callee->past());
    }

    v << store{v.cns(mcg->ustubs().retHelper), calleeAR + AROFF(m_savedRip)};
    if (callee->attrs() & AttrMayUseVV) {
      v << storeqi{0, calleeAR + AROFF(m_invName)};
    }
    v << lea{calleeAR, rvmfp()};

    emitCheckSurpriseFlagsEnter(v, vc, fp, Fixup(0, argc), catchBlock);

    auto const builtinFuncPtr = callee->builtinFuncPtr();
    TRACE(2, "Calling builtin preClass %p func %p\n",
          callee->preClass(), builtinFuncPtr);

    // We sometimes call this while curFunc() isn't really the builtin, so make
    // sure to record the sync point as if we are inside the builtin.
    if (FixupMap::eagerRecord(callee)) {
      auto const syncSP = v.makeReg();
      v << lea{calleeSP, syncSP};
      emitEagerSyncPoint(v, callee->getEntry(), rvmtl(), rvmfp(), syncSP);
    }

    // Call the native implementation.  This will free the locals for us in the
    // normal case.  In the case where an exception is thrown, the VM unwinder
    // will handle it for us.
    auto const done = v.makeBlock();
    v << vinvoke{CallSpec::direct(builtinFuncPtr), v.makeVcallArgs({{rvmfp()}}),
                 v.makeTuple({}), {done, catchBlock}, Fixup(0, argc)};
    env.catch_calls[inst->taken()] = CatchCall::CPP;

    v = done;
    // The native implementation already put the return value on the stack for
    // us, and handled cleaning up the arguments.  We have to update the frame
    // pointer and the stack pointer, and load the return value into the return
    // register so the trace we are returning to has it where it expects.
    // TODO(#1273094): We should probably modify the actual builtins to return
    // values via registers using the C ABI and do a reg-to-reg move.
    loadTV(v, inst->dst(), dstLoc(env, inst, 0), rvmfp()[AROFF(m_r)], true);
    v << load{rvmfp()[AROFF(m_sfp)], rvmfp()};
    emitRB(v, Trace::RBTypeFuncExit, callee->fullName()->data());
    return;
  }

  v << lea{calleeAR, rvmfp()};

  if (RuntimeOption::EvalHHIRGenerateAsserts) {
    v << syncvmsp{v.cns(0x42)};

    constexpr uint64_t kUninitializedRIP = 0xba5eba11acc01ade;
    emitImmStoreq(v, kUninitializedRIP, rvmfp()[AROFF(m_savedRip)]);
  }

  // Emit a smashable call that initially calls a recyclable service request
  // stub.  The stub and the eventual targets take rvmfp() as an argument,
  // pointing to the callee ActRec.
  auto const target = callee
    ? mcg->ustubs().immutableBindCallStub
    : mcg->ustubs().bindCallStub;

  auto const done = v.makeBlock();
  v << callphp{target, php_call_regs(), {{done, catchBlock}}};
  env.catch_calls[inst->taken()] = CatchCall::PHP;
  v = done;

  auto const dst = dstLoc(env, inst, 0);
  v << defvmret{dst.reg(0), dst.reg(1)};
}