Ejemplo n.º 1
0
void CodeGenerator::cgCallHelper(Vout& v,
                                 CppCall call,
                                 const CallDest& dstInfo,
                                 SyncOptions sync,
                                 ArgGroup& args) {
  auto dstReg0 = dstInfo.reg0;
  DEBUG_ONLY auto dstReg1 = dstInfo.reg1;

  RegSet argRegs;
  for (size_t i = 0; i < args.numGpArgs(); i++) {
    auto const r = rarg(i);
    args.gpArg(i).setDstReg(r);
    argRegs.add(r);
  }
  always_assert_flog(
    args.numStackArgs() == 0,
    "Stack arguments not yet supported on ARM: `{}'",
    *m_curInst
  );
  shuffleArgs(v, args, call);

  auto syncPoint = emitCall(v, call, argRegs);
  if (RuntimeOption::HHProfServerEnabled || sync != SyncOptions::kNoSyncPoint) {
    recordHostCallSyncPoint(v, syncPoint);
  }

  auto* taken = m_curInst->taken();
  if (taken && taken->isCatch()) {
    assert_not_implemented(args.numStackArgs() == 0);
    auto next = v.makeBlock();
    v << hcunwind{syncPoint, {next, m_state.labels[taken]}};
    v = next;
  } else if (!m_curInst->is(Call, CallArray, ContEnter)) {
    v << hcnocatch{syncPoint};
  }

  switch (dstInfo.type) {
    case DestType::TV: CG_PUNT(cgCall-ReturnTV);
    case DestType::SIMD: CG_PUNT(cgCall-ReturnSIMD);
    case DestType::SSA:
    case DestType::Byte:
      assertx(dstReg1 == InvalidReg);
      v << copy{PhysReg(vixl::x0), dstReg0};
      break;
    case DestType::None:
      assertx(dstReg0 == InvalidReg && dstReg1 == InvalidReg);
      break;
    case DestType::Dbl:
      assertx(dstReg1 == InvalidReg);
      v << copy{PhysReg(vixl::d0), dstReg0};
      break;
  }
}
Ejemplo n.º 2
0
static void shuffleArgs(Vout& v, ArgGroup& args, CppCall& call) {
  MovePlan moves;
  PhysReg::Map<ArgDesc*> argDescs;

  for (size_t i = 0; i < args.numGpArgs(); i++) {
    auto& arg = args.gpArg(i);
    auto kind = arg.kind();
    if (!(kind == ArgDesc::Kind::Reg  ||
          kind == ArgDesc::Kind::Addr ||
          kind == ArgDesc::Kind::TypeReg)) {
      continue;
    }
    auto srcReg = arg.srcReg();
    auto dstReg = arg.dstReg();
    if (srcReg != dstReg && srcReg.isPhys()) {
      moves[dstReg] = srcReg;
      argDescs[dstReg] = &arg;
    }
  }

  auto const howTo = doVregMoves(v.unit(), moves);
  for (auto& how : howTo) {
    auto src = how.m_src;
    auto dst = how.m_dst;
    if (how.m_kind == VMoveInfo::Kind::Move) {
      if (dst.isVirt()) {
        v << copy{src, dst};
      } else {
        auto* argDesc = argDescs[how.m_dst];
        if (argDesc) {
          auto kind = argDesc->kind();
          if (kind == ArgDesc::Kind::Addr) {
            v << lea{src[argDesc->disp().l()], dst};
          } else {
            if (argDesc->isZeroExtend()) {
              v << movzbl{src, dst};
            } else {
              v << copy{src, dst};
            }
          }
          if (kind != ArgDesc::Kind::TypeReg) {
            argDesc->markDone();
          }
        } else {
          v << copy{src, dst};
        }
      }
    } else {
      v << copy2{src, dst, dst, src};
    }
  }

  for (size_t i = 0; i < args.numGpArgs(); ++i) {
    auto& arg = args.gpArg(i);
    if (arg.done()) continue;
    auto kind = arg.kind();
    auto src = arg.srcReg();
    auto dst = arg.dstReg();
    if (kind == ArgDesc::Kind::Imm) {
      v << ldimm{arg.imm().q(), dst};
    } else if (kind == ArgDesc::Kind::Reg) {
      if (arg.isZeroExtend()) {
        if (src.isVirt()) {
          v << movzbl{src, dst};
        } else {
          v << movzbl{dst, dst};
        }
      } else {
        if (src.isVirt()) {
          v << copy{src, dst};
        }
      }
    } else if (kind == ArgDesc::Kind::TypeReg) {
      if (kTypeShiftBits > 0) {
        if (src.isVirt()) {
          v << shlqi{kTypeShiftBits, src, dst, v.makeReg()};
        } else {
          v << shlqi{kTypeShiftBits, dst, dst, v.makeReg()};
        }
      } else {
        if (src.isVirt()) {
          v << copy{src, dst};
        }
      }
    } else if (kind == ArgDesc::Kind::Addr) {
      if (src.isVirt()) {
        v << addqi{arg.disp(), src, dst, v.makeReg()};
      } else {
        v << addqi{arg.disp(), dst, dst, v.makeReg()};
      }
    } else {
      not_implemented();
    }
  }
}
Ejemplo n.º 3
0
void cgCallHelper(Vout& v, IRLS& env, CallSpec call, const CallDest& dstInfo,
                  SyncOptions sync, const ArgGroup& args) {
    auto const inst = args.inst();
    jit::vector<Vreg> vIndRetArgs, vargs, vSimdArgs, vStkArgs;

    for (size_t i = 0; i < args.numIndRetArgs(); ++i) {
        prepareArg(args.indRetArg(i), v, vIndRetArgs);
    }
    for (size_t i = 0; i < args.numGpArgs(); ++i) {
        prepareArg(args.gpArg(i), v, vargs);
    }
    for (size_t i = 0; i < args.numSimdArgs(); ++i) {
        prepareArg(args.simdArg(i), v, vSimdArgs);
    }
    for (size_t i = 0; i < args.numStackArgs(); ++i) {
        prepareArg(args.stkArg(i), v, vStkArgs);
    }

    auto const syncFixup = [&] {
        if (RuntimeOption::HHProfEnabled || sync != SyncOptions::None) {
            // If we are profiling the heap, we always need to sync because regs need
            // to be correct during allocations no matter what.
            return makeFixup(inst->marker(), sync);
        }
        return Fixup{};
    }();

    Vlabel targets[2];
    bool nothrow = false;
    auto const taken = inst->taken();
    auto const do_catch = taken && taken->isCatch();

    if (do_catch) {
        always_assert_flog(
            inst->is(InterpOne) || sync != SyncOptions::None,
            "cgCallHelper called with None but inst has a catch block: {}\n",
            *inst
        );
        always_assert_flog(
            taken->catchMarker() == inst->marker(),
            "Catch trace doesn't match fixup:\n"
            "Instruction: {}\n"
            "Catch trace: {}\n"
            "Fixup      : {}\n",
            inst->toString(),
            taken->catchMarker().show(),
            inst->marker().show()
        );

        targets[0] = v.makeBlock();
        targets[1] = env.labels[taken];
    } else {
        // The current instruction doesn't have a catch block so it'd better not
        // throw.  Register a null catch trace to indicate this to the unwinder.
        nothrow = true;
    }

    VregList dstRegs;
    if (dstInfo.reg0.isValid()) {
        dstRegs.push_back(dstInfo.reg0);
        if (dstInfo.reg1.isValid()) {
            dstRegs.push_back(dstInfo.reg1);
        }
    }

    auto const argsId = v.makeVcallArgs({
        std::move(vargs),
        std::move(vSimdArgs),
        std::move(vStkArgs),
        std::move(vIndRetArgs)
    });
    auto const dstId = v.makeTuple(std::move(dstRegs));

    if (do_catch) {
        v << vinvoke{call, argsId, dstId, {targets[0], targets[1]},
                     syncFixup, dstInfo.type};
        v = targets[0];
    } else {
        v << vcall{call, argsId, dstId, syncFixup, dstInfo.type, nothrow};
    }
}