Esempio n. 1
0
void emitCall(Vout& v, CppCall target, RegSet args) {
  switch (target.kind()) {
    case CppCall::Kind::Direct:
      v << call{static_cast<TCA>(target.address()), args};
      return;
    case CppCall::Kind::Virtual:
      // Virtual call.  Load method's address from proper offset off of object in
      // rdi, using rax as scratch.
      v << load{*reg::rdi, reg::rax};
      v << callm{reg::rax[target.vtableOffset()], args};
      return;
    case CppCall::Kind::ArrayVirt: {
      auto const addr = reinterpret_cast<intptr_t>(target.arrayTable());
      v << loadzbl{reg::rdi[HeaderKindOffset], reg::eax};
      if (deltaFits(addr, sz::dword)) {
        v << callm{baseless(reg::rax * 8 + addr), args};
      } else {
        auto const base = v.makeReg();
        v << ldimmq{addr, base};
        v << callm{base[reg::rax * 8], args};
      }
      static_assert(sizeof(HeaderKind) == 1, "");
      return;
    }
    case CppCall::Kind::Destructor:
      // this movzbq is only needed because callers aren't required to
      // zero-extend the type.
      auto zextType = v.makeReg();
      v << movzbq{target.reg(), zextType};
      auto dtor_ptr = lookupDestructor(v, zextType);
      v << callm{dtor_ptr, args};
      return;
  }
  not_reached();
}
Esempio n. 2
0
Vptr lookupDestructor(Vout& v, Vreg type) {
  auto const table = reinterpret_cast<intptr_t>(g_destructors);
  always_assert_flog(deltaFits(table, sz::dword),
    "Destructor function table is expected to be in the data "
    "segment, with addresses less than 2^31"
  );
  auto index = v.makeReg();
  v << shrli{kShiftDataTypeToDestrIndex, type, index, v.makeReg()};
  return baseless(index * 8 + safe_cast<int>(table));
}
Esempio n. 3
0
Vptr lookupDestructor(Vout& v, Vreg type) {
  auto const table = reinterpret_cast<intptr_t>(g_destructors);
  always_assert_flog(deltaFits(table, sz::dword),
    "Destructor function table is expected to be in the data "
    "segment, with addresses less than 2^31"
  );
  static_assert((KindOfString   >> kShiftDataTypeToDestrIndex == 1) &&
                (KindOfArray    >> kShiftDataTypeToDestrIndex == 2) &&
                (KindOfObject   >> kShiftDataTypeToDestrIndex == 3) &&
                (KindOfResource >> kShiftDataTypeToDestrIndex == 4) &&
                (KindOfRef      >> kShiftDataTypeToDestrIndex == 5),
                "lookup of destructors depends on KindOf* values");
  auto index = v.makeReg();
  v << shrli{kShiftDataTypeToDestrIndex, type, index, v.makeReg()};
  return baseless(index * 8 + safe_cast<int>(table));
}
Esempio n. 4
0
Vptr lookupDestructor(Vout& v, Vreg type) {
  auto const table = reinterpret_cast<intptr_t>(g_destructors);

  auto const typel = v.makeReg();
  auto const index = v.makeReg();
  auto const indexl = v.makeReg();

  // This movzbl is only needed because callers aren't required to zero-extend
  // the type.
  v << movzbl{type, typel};
  v << shrli{kShiftDataTypeToDestrIndex, typel, indexl, v.makeReg()};
  v << movzlq{indexl, index};

  // The baseless form is more compact, but isn't supported for 64-bit
  // displacements.
  if (table <= std::numeric_limits<int>::max()) {
    return baseless(index * 8 + safe_cast<int>(table));
  }
  return v.cns(table)[index * 8];
}
Esempio n. 5
0
void emitCall(Vout& v, CallSpec target, RegSet args) {
  switch (target.kind()) {
    case CallSpec::Kind::Direct:
      v << call{static_cast<TCA>(target.address()), args};
      return;

    case CallSpec::Kind::Smashable:
      v << calls{static_cast<TCA>(target.address()), args};
      return;

    case CallSpec::Kind::ArrayVirt: {
      auto const addr = reinterpret_cast<intptr_t>(target.arrayTable());

      auto const arrkind = v.makeReg();
      v << loadzbl{rarg(0)[HeaderKindOffset], arrkind};

      if (deltaFits(addr, sz::dword)) {
        v << callm{baseless(arrkind * 8 + addr), args};
      } else {
        auto const base = v.makeReg();
        v << ldimmq{addr, base};
        v << callm{base[arrkind * 8], args};
      }
      static_assert(sizeof(HeaderKind) == 1, "");
    } return;

    case CallSpec::Kind::Destructor: {
      // this movzbq is only needed because callers aren't required to
      // zero-extend the type.
      auto zextType = v.makeReg();
      v << movzbq{target.reg(), zextType};
      auto dtor_ptr = lookupDestructor(v, zextType);
      v << callm{dtor_ptr, args};
    } return;

    case CallSpec::Kind::Stub:
      v << callstub{target.stubAddr(), args};
      return;
  }
  not_reached();
}
Esempio n. 6
0
void emitCall(Vout& v, CallSpec target, RegSet args) {
  using K = CallSpec::Kind;

  switch (target.kind()) {
    case K::Direct:
      v << call{static_cast<TCA>(target.address()), args};
      return;

    case K::Smashable:
      v << calls{static_cast<TCA>(target.address()), args};
      return;

    case K::ArrayVirt: {
      auto const addr = reinterpret_cast<intptr_t>(target.arrayTable());

      auto const arrkind = v.makeReg();
      v << loadzbl{rarg(0)[HeaderKindOffset], arrkind};

      if (deltaFits(addr, sz::dword)) {
        v << callm{baseless(arrkind * 8 + addr), args};
      } else {
        auto const base = v.makeReg();
        v << ldimmq{addr, base};
        v << callm{base[arrkind * 8], args};
      }
      static_assert(sizeof(HeaderKind) == 1, "");
    } return;

    case K::Destructor: {
      auto dtor = lookupDestructor(v, target.reg());
      v << callm{dtor, args};
    } return;

    case K::Stub:
      v << callstub{target.stubAddr(), args};
      return;
  }
  not_reached();
}
Esempio n. 7
0
void emitIncStat(Vout& v, Stats::StatCounter stat, int n, bool force) {
  if (!force && !Stats::enabled()) return;
  intptr_t disp = uintptr_t(&Stats::tl_counters[stat]) - tlsBase();
  v << addqim{n, Vptr{baseless(disp), Vptr::FS}, v.makeReg()};
}