Exemple #1
0
void emitCall(Vout& v, CppCall target, RegSet args) {
  switch (target.kind()) {
    case CppCall::Kind::Direct:
      v << call{static_cast<TCA>(target.address()), args};
      return;
    case CppCall::Kind::Virtual:
      // Virtual call.  Load method's address from proper offset off of object in
      // rdi, using rax as scratch.
      v << load{*reg::rdi, reg::rax};
      v << callm{reg::rax[target.vtableOffset()], args};
      return;
    case CppCall::Kind::ArrayVirt: {
      auto const addr = reinterpret_cast<intptr_t>(target.arrayTable());
      v << loadzbl{reg::rdi[HeaderKindOffset], reg::eax};
      if (deltaFits(addr, sz::dword)) {
        v << callm{baseless(reg::rax * 8 + addr), args};
      } else {
        auto const base = v.makeReg();
        v << ldimmq{addr, base};
        v << callm{base[reg::rax * 8], args};
      }
      static_assert(sizeof(HeaderKind) == 1, "");
      return;
    }
    case CppCall::Kind::Destructor:
      // this movzbq is only needed because callers aren't required to
      // zero-extend the type.
      auto zextType = v.makeReg();
      v << movzbq{target.reg(), zextType};
      auto dtor_ptr = lookupDestructor(v, zextType);
      v << callm{dtor_ptr, args};
      return;
  }
  not_reached();
}
Exemple #2
0
PhysRegSaverParity::PhysRegSaverParity(int parity, Vout& v,
                                       RegSet regs)
    : m_as(nullptr)
    , m_v(&v)
    , m_regs(regs)
{
  auto xmm = regs & x64::kXMMRegs;
  auto gpr = regs - xmm;
  m_adjust = (parity & 0x1) == (gpr.size() & 0x1) ? 8 : 0;
  if (!xmm.empty()) {
    v << subqi{16 * xmm.size(), reg::rsp, reg::rsp, v.makeReg()};
    int offset = 0;
    xmm.forEach([&](PhysReg pr) {
      v << storedqu{pr, reg::rsp[offset]};
      offset += 16;
    });
  }
  gpr.forEach([&] (PhysReg pr) {
    v << push{pr};
  });
  if (m_adjust) {
    // Maintain stack evenness for SIMD compatibility.
    v << subqi{m_adjust, reg::rsp, reg::rsp, v.makeReg()};
  }
}
void storeTV(Vout& v, Vptr dst, Vloc srcLoc, const SSATmp* src) {
  auto const type = src->type();

  if (srcLoc.isFullSIMD()) {
    // The whole TV is stored in a single SIMD reg.
    assertx(RuntimeOption::EvalHHIRAllocSIMDRegs);
    v << storeups{srcLoc.reg(), dst};
    return;
  }

  if (type.needsReg()) {
    assertx(srcLoc.hasReg(1));
    v << storeb{srcLoc.reg(1), dst + TVOFF(m_type)};
  } else {
    v << storeb{v.cns(type.toDataType()), dst + TVOFF(m_type)};
  }

  // We ignore the values of statically nullish types.
  if (src->isA(TNull) || src->isA(TNullptr)) return;

  // Store the value.
  if (src->hasConstVal()) {
    // Skip potential zero-extend if we know the value.
    v << store{v.cns(src->rawVal()), dst + TVOFF(m_data)};
  } else {
    assertx(srcLoc.hasReg(0));
    auto const extended = zeroExtendIfBool(v, src->type(), srcLoc.reg(0));
    v << store{extended, dst + TVOFF(m_data)};
  }
}
Exemple #4
0
void emitRB(Vout& v, Trace::RingBufferType t, const char* msg) {
  if (!Trace::moduleEnabled(Trace::ringbuffer, 1)) {
    return;
  }
  v << vcall{CppCall::direct(Trace::ringbufferMsg),
             v.makeVcallArgs({{v.cns(msg), v.cns(strlen(msg)), v.cns(t)}}),
             v.makeTuple({})};
}
Exemple #5
0
Vreg check_subcls(Vout& v, Vreg sf, Vreg d, Vreg lhs, Cls rhs, Len rhsVecLen) {
  return cond(v, CC_NB, sf, d,
       [&] (Vout& v) {
         return check_clsvec(v, v.makeReg(), lhs, rhs, rhsVecLen);
       },
       [&] (Vout& v) { return v.cns(false); }
      );
}
Vreg checkRDSHandleInitialized(Vout& v, rds::Handle ch) {
  assertx(rds::isNormalHandle(ch));
  auto const gen = v.makeReg();
  auto const sf = v.makeReg();
  v << loadb{rvmtl()[rds::genNumberHandleFrom(ch)], gen};
  v << cmpbm{gen, rvmtl()[rds::currentGenNumberHandle()], sf};
  return sf;
}
Exemple #7
0
void emitAssertRefCount(Vout& v, Vreg base) {
  auto const sf = v.makeReg();
  v << cmplim{StaticValue, base[FAST_REFCOUNT_OFFSET], sf};
  ifThen(v, CC_NLE, sf, [&](Vout& v) {
    auto const sf = v.makeReg();
    v << cmplim{RefCountMaxRealistic, base[FAST_REFCOUNT_OFFSET], sf};
    ifThen(v, CC_NBE, sf, [&](Vout& v) { v << ud2{}; });
  });
}
Exemple #8
0
void ifThen(Vout& v, ConditionCode cc, Vreg sf, Then thenBlock) {
  auto then = v.makeBlock();
  auto done = v.makeBlock();
  v << jcc{cc, sf, {done, then}};
  v = then;
  thenBlock(v);
  if (!v.closed()) v << jmp{done};
  v = done;
}
Exemple #9
0
void ifZero(Vout& v, unsigned bit, Vreg r, Then thenBlock) {
  auto then = v.makeBlock();
  auto done = v.makeBlock();
  v << tbcc{vixl::eq, bit, r, {done, then}};
  v = then;
  thenBlock(v);
  if (!v.closed()) v << jmp{done};
  v = done;
}
Vptr lookupDestructor(Vout& v, Vreg type) {
  auto const table = reinterpret_cast<intptr_t>(g_destructors);
  always_assert_flog(deltaFits(table, sz::dword),
    "Destructor function table is expected to be in the data "
    "segment, with addresses less than 2^31"
  );
  auto index = v.makeReg();
  v << shrli{kShiftDataTypeToDestrIndex, type, index, v.makeReg()};
  return baseless(index * 8 + safe_cast<int>(table));
}
void cmpLowPtrImpl(Vout& v, Vreg sf, const void* ptr, Vptr mem, size_t size) {
  if (size == 8) {
    v << cmpqm{v.cns(ptr), mem, sf};
  } else if (size == 4) {
    auto const ptrImm = safe_cast<uint32_t>(reinterpret_cast<intptr_t>(ptr));
    v << cmplm{v.cns(ptrImm), mem, sf};
  } else {
    not_implemented();
  }
}
void emitCmpClass(Vout& v, Vreg sf, const Class* cls, Vptr mem) {
  auto size = sizeof(LowPtr<Class>);
  if (size == 8) {
    v << cmpqm{v.cns(cls), mem, sf};
  } else if (size == 4) {
    auto const clsImm = safe_cast<uint32_t>(reinterpret_cast<intptr_t>(cls));
    v << cmplm{v.cns(clsImm), mem, sf};
  } else {
    not_implemented();
  }
}
void emitIncRefWork(Vout& v, Vreg data, Vreg type) {
  auto const sf = v.makeReg();
  emitCmpTVType(v, sf, KindOfRefCountThreshold, type);
  // ifRefCountType
  ifThen(v, CC_G, sf, [&] (Vout& v) {
    auto const sf2 = v.makeReg();
    // ifNonStatic
    v << cmplim{0, data[FAST_REFCOUNT_OFFSET], sf2};
    ifThen(v, CC_GE, sf2, [&] (Vout& v) { emitIncRef(v, data); });
  });
}
Exemple #14
0
void ifThenElse(Vout& v, ConditionCode cc, Vreg sf, Then thenBlock,
                Else elseBlock) {
  auto thenLabel = v.makeBlock();
  auto elseLabel = v.makeBlock();
  auto done = v.makeBlock();
  v << jcc{cc, sf, {elseLabel, thenLabel}};
  v = thenLabel;
  thenBlock(v);
  if (!v.closed()) v << jmp{done};
  v = elseLabel;
  elseBlock(v);
  if (!v.closed()) v << jmp{done};
  v = done;
}
void emitDecRefWorkObj(Vout& v, Vreg obj) {
  auto const shouldRelease = v.makeReg();
  v << cmplim{1, obj[FAST_REFCOUNT_OFFSET], shouldRelease};
  ifThenElse(
    v, CC_E, shouldRelease,
    [&] (Vout& v) {
      // Put fn inside vcall{} triggers a compiler internal error (gcc 4.4.7)
      auto const fn = CallSpec::method(&ObjectData::release);
      v << vcall{fn, v.makeVcallArgs({{obj}}), v.makeTuple({})};
    },
    [&] (Vout& v) {
      emitDecRef(v, obj);
    }
  );
}
Exemple #16
0
Vptr lookupDestructor(Vout& v, Vreg type) {
  auto const table = reinterpret_cast<intptr_t>(g_destructors);
  always_assert_flog(deltaFits(table, sz::dword),
    "Destructor function table is expected to be in the data "
    "segment, with addresses less than 2^31"
  );
  static_assert((KindOfString   >> kShiftDataTypeToDestrIndex == 1) &&
                (KindOfArray    >> kShiftDataTypeToDestrIndex == 2) &&
                (KindOfObject   >> kShiftDataTypeToDestrIndex == 3) &&
                (KindOfResource >> kShiftDataTypeToDestrIndex == 4) &&
                (KindOfRef      >> kShiftDataTypeToDestrIndex == 5),
                "lookup of destructors depends on KindOf* values");
  auto index = v.makeReg();
  v << shrli{kShiftDataTypeToDestrIndex, type, index, v.makeReg()};
  return baseless(index * 8 + safe_cast<int>(table));
}
Exemple #17
0
Vreg zeroExtendIfBool(Vout& v, const SSATmp* src, Vreg reg) {
  if (!src->isA(TBool)) return reg;
  // zero-extend the bool from a byte to a quad
  auto extended = v.makeReg();
  v << movzbq{reg, extended};
  return extended;
}
Exemple #18
0
Vpoint emitCall(Vout& v, CppCall call, RegSet args) {
  PhysReg arg0(argReg(0));
  PhysReg rHostCall(rHostCallReg);
  switch (call.kind()) {
  case CppCall::Kind::Direct:
    v << ldimm{reinterpret_cast<intptr_t>(call.address()), rHostCall};
    break;
  case CppCall::Kind::Virtual:
    v << loadq{arg0[0], rHostCall};
    v << loadq{rHostCall[call.vtableOffset()], rHostCall};
    break;
  case CppCall::Kind::IndirectReg:
  case CppCall::Kind::IndirectVreg:
    // call indirect currently not implemented. It'll be something like
    // a.Br(x2a(call.getReg()))
    not_implemented();
    always_assert(0);
    break;
  case CppCall::Kind::ArrayVirt:
  case CppCall::Kind::Destructor:
    not_implemented();
    always_assert(0);
    break;
  }
  uint8_t argc = args.size();
  args.add(rHostCall);
  auto fixupAddr = v.makePoint();
  v << hostcall{args, argc, fixupAddr};
  return fixupAddr;
}
Vreg emitDecRef(Vout& v, Vreg base) {
  auto const sf = v.makeReg();
  v << declm{base[FAST_REFCOUNT_OFFSET], sf};
  assertSFNonNegative(v, sf);

  return sf;
}
Exemple #20
0
Vreg condZero(Vout& v, Vreg r, Vreg dst, T t, F f) {
  using namespace x64;
  auto fblock = v.makeBlock();
  auto tblock = v.makeBlock();
  auto done = v.makeBlock();
  v << cbcc{vixl::eq, r, {fblock, tblock}};
  v = tblock;
  auto treg = t(v);
  v << phijmp{done, v.makeTuple(VregList{treg})};
  v = fblock;
  auto freg = f(v);
  v << phijmp{done, v.makeTuple(VregList{freg})};
  v = done;
  v << phidef{v.makeTuple(VregList{dst})};
  return dst;
}
void emitImmStoreq(Vout& v, Immed64 imm, Vptr ref) {
  if (imm.fits(sz::dword)) {
    v << storeqi{imm.l(), ref};
  } else {
    v << store{v.cns(imm.q()), ref};
  }
}
Vreg zeroExtendIfBool(Vout& v, Type ty, Vreg reg) {
  if (!(ty <= TBool)) return reg;

  // Zero-extend the bool from a byte to a quad.
  auto extended = v.makeReg();
  v << movzbq{reg, extended};
  return extended;
}
Exemple #23
0
void emitCheckSurpriseFlagsEnter(Vout& v, Vout& vcold, Vreg fp, Vreg rds,
                                 Fixup fixup, Vlabel catchBlock) {
  auto cold = vcold.makeBlock();
  auto done = v.makeBlock();

  auto const sf = v.makeReg();
  v << cmpqm{fp, rds[rds::kSurpriseFlagsOff], sf};
  v << jcc{CC_NBE, sf, {done, cold}};

  v = done;
  vcold = cold;

  auto const call = CppCall::direct(
    reinterpret_cast<void(*)()>(mcg->tx().uniqueStubs.functionEnterHelper));
  auto const args = v.makeVcallArgs({});
  vcold << vinvoke{call, args, v.makeTuple({}), {done, catchBlock}, fixup};
}
void emitIncRef(Vout& v, Vreg base) {
  if (RuntimeOption::EvalHHIRGenerateAsserts) {
    emitAssertRefCount(v, base);
  }
  auto const sf = v.makeReg();
  v << inclm{base[FAST_REFCOUNT_OFFSET], sf};
  assertSFNonNegative(v, sf);
}
Exemple #25
0
void CodeGenerator::emitDecRefMem(Vout& v, Type type, Vreg base, int offset) {
  if (type.needsReg()) {
    emitDecRefDynamicType(v, base, offset);
  } else if (type.maybeCounted()) {
    auto data = v.makeReg();
    v << load{base[offset + TVOFF(m_data)], data};
    emitDecRefStaticType(v, type, data);
  }
}
Exemple #26
0
Vreg check_clsvec(Vout& v, Vreg d, Vreg lhs, Cls rhs, Len rhsVecLen) {
  // If it's a subclass, rhs must be at the appropriate index.
  auto const vecOffset = rhsVecLen * static_cast<int>(sizeof(LowPtr<Class>)) +
    (Class::classVecOff() - sizeof(LowPtr<Class>));
  auto const sf = v.makeReg();
  emitCmpLowPtr<Class>(v, sf, rhs, lhs[vecOffset]);
  v << setcc{CC_E, sf, d};
  return d;
}
Exemple #27
0
void emitImmStoreq(Vout& v, Immed64 imm, Vptr ref) {
  if (imm.fits(sz::dword)) {
    v << storeqi{imm.l(), ref};
  } else {
    // An alternative is two 32-bit immediate stores, but that's little-endian
    // specific and generates larger code on x64 (24 bytes vs. 18 bytes).
    v << store{v.cns(imm.q()), ref};
  }
}
Exemple #28
0
void CodeGenerator::emitDecRefDynamicType(Vout& v, Vreg base, int offset) {
  auto counted_type = v.makeBlock();
  auto counted_obj = v.makeBlock();
  auto destruct = v.makeBlock();
  auto done = v.makeBlock();
  auto type = v.makeReg();
  auto data = v.makeReg();
  auto count = v.makeReg();
  auto count1 = v.makeReg();

  // Check the type
  {
    v << loadzbl{base[offset + TVOFF(m_type)], type};
    auto const sf = v.makeReg();
    v << cmpli{KindOfRefCountThreshold, type, sf};
    v << jcc{CC_LE, sf, {counted_type, done}};
    v = counted_type;
  }

  // Type is refcounted. Load the refcount.
  v << load{base[offset + TVOFF(m_data)], data};
  v << loadl{data[FAST_REFCOUNT_OFFSET], count};

  // Is it static? Note that only the lower 32 bits of count are valid right
  // now, but tbcc is only looking at a single one of them, so this is OK.
  v << tbcc{vixl::ne, UncountedBitPos, count, {counted_obj, done}};
  v = counted_obj;

  {
    // Not static. Decrement and write back.
    auto const sf = v.makeReg();
    v << subli{1, count, count1, sf};
    v << storel{count1, data[FAST_REFCOUNT_OFFSET]};

    // Did it go to zero?
    v << jcc{CC_NZ, sf, {destruct, done}};
    v = destruct;
  }

  // Went to zero. Have to destruct.
  cgCallHelper(v,
               CppCall::direct(tv_release_generic),
               kVoidDest,
               SyncOptions::kSyncPoint,
               argGroup().addr(base, offset));
  v << jmp{done};
  v = done;
}
void pack2(Vout& v, Vreg s0, Vreg s1, Vreg d0) {
  auto prep = [&] (Vreg r) {
    if (VregDbl::allowable(r)) return r;
    auto t = v.makeReg();
    v << copy{r, t};
    return t;
  };
  // s0 and s1 must be valid VregDbl registers; prep() takes care of it.
  v << unpcklpd{prep(s1), prep(s0), d0}; // s0,s1 -> d0[0],d0[1]
}
void cmpLowPtrImpl(Vout& v, Vreg sf, Vreg reg, Vptr mem, size_t size) {
  if (size == 8) {
    v << cmpqm{reg, mem, sf};
  } else if (size == 4) {
    auto low = v.makeReg();
    v << movtql{reg, low};
    v << cmplm{low, mem, sf};
  } else {
    not_implemented();
  }
}