void CodeGenerator::emitDecRefStaticType(Vout& v, Type type, Vreg data) { assert(type.isKnownDataType()); auto done = v.makeBlock(); auto count = v.makeReg(); v << loadl{data[FAST_REFCOUNT_OFFSET], count}; if (type.needsStaticBitCheck()) { auto next = v.makeBlock(); v << tbcc{vixl::ne, UncountedBitPos, count, {next, done}}; v = next; } auto count1 = v.makeReg(); auto destruct = v.makeBlock(); auto const sf = v.makeReg(); v << subli{1, count, count1, sf}; v << storel{count1, data[FAST_REFCOUNT_OFFSET]}; v << jcc{CC_Z, sf, {done, destruct}}; v = destruct; cgCallHelper(v, MCGenerator::getDtorCall(type.toDataType()), kVoidDest, SyncOptions::kSyncPoint, argGroup().reg(data)); v << jmp{done}; v = done; }
void ifThenElse(Vout& v, ConditionCode cc, Vreg sf, Then thenBlock, Else elseBlock) { auto thenLabel = v.makeBlock(); auto elseLabel = v.makeBlock(); auto done = v.makeBlock(); v << jcc{cc, sf, {elseLabel, thenLabel}}; v = thenLabel; thenBlock(v); if (!v.closed()) v << jmp{done}; v = elseLabel; elseBlock(v); if (!v.closed()) v << jmp{done}; v = done; }
void CodeGenerator::emitDecRefDynamicType(Vout& v, Vreg base, int offset) { auto counted_type = v.makeBlock(); auto counted_obj = v.makeBlock(); auto destruct = v.makeBlock(); auto done = v.makeBlock(); auto type = v.makeReg(); auto data = v.makeReg(); auto count = v.makeReg(); auto count1 = v.makeReg(); // Check the type { v << loadzbl{base[offset + TVOFF(m_type)], type}; auto const sf = v.makeReg(); v << cmpli{KindOfRefCountThreshold, type, sf}; v << jcc{CC_LE, sf, {counted_type, done}}; v = counted_type; } // Type is refcounted. Load the refcount. v << load{base[offset + TVOFF(m_data)], data}; v << loadl{data[FAST_REFCOUNT_OFFSET], count}; // Is it static? Note that only the lower 32 bits of count are valid right // now, but tbcc is only looking at a single one of them, so this is OK. v << tbcc{vixl::ne, UncountedBitPos, count, {counted_obj, done}}; v = counted_obj; { // Not static. Decrement and write back. auto const sf = v.makeReg(); v << subli{1, count, count1, sf}; v << storel{count1, data[FAST_REFCOUNT_OFFSET]}; // Did it go to zero? v << jcc{CC_NZ, sf, {destruct, done}}; v = destruct; } // Went to zero. Have to destruct. cgCallHelper(v, CppCall::direct(tv_release_generic), kVoidDest, SyncOptions::kSyncPoint, argGroup().addr(base, offset)); v << jmp{done}; v = done; }
Vreg condZero(Vout& v, Vreg r, Vreg dst, T t, F f) { using namespace x64; auto fblock = v.makeBlock(); auto tblock = v.makeBlock(); auto done = v.makeBlock(); v << cbcc{vixl::eq, r, {fblock, tblock}}; v = tblock; auto treg = t(v); v << phijmp{done, v.makeTuple(VregList{treg})}; v = fblock; auto freg = f(v); v << phijmp{done, v.makeTuple(VregList{freg})}; v = done; v << phidef{v.makeTuple(VregList{dst})}; return dst; }
void emitCheckSurpriseFlagsEnter(Vout& v, Vout& vcold, Vreg fp, Vreg rds, Fixup fixup, Vlabel catchBlock) { auto cold = vcold.makeBlock(); auto done = v.makeBlock(); auto const sf = v.makeReg(); v << cmpqm{fp, rds[rds::kSurpriseFlagsOff], sf}; v << jcc{CC_NBE, sf, {done, cold}}; v = done; vcold = cold; auto const call = CppCall::direct( reinterpret_cast<void(*)()>(mcg->tx().uniqueStubs.functionEnterHelper)); auto const args = v.makeVcallArgs({}); vcold << vinvoke{call, args, v.makeTuple({}), {done, catchBlock}, fixup}; }
void ifZero(Vout& v, unsigned bit, Vreg r, Then thenBlock) { auto then = v.makeBlock(); auto done = v.makeBlock(); v << tbcc{vixl::eq, bit, r, {done, then}}; v = then; thenBlock(v); if (!v.closed()) v << jmp{done}; v = done; }
void ifThen(Vout& v, ConditionCode cc, Vreg sf, Then thenBlock) { auto then = v.makeBlock(); auto done = v.makeBlock(); v << jcc{cc, sf, {done, then}}; v = then; thenBlock(v); if (!v.closed()) v << jmp{done}; v = done; }
void CodeGenerator::cgCallHelper(Vout& v, CppCall call, const CallDest& dstInfo, SyncOptions sync, ArgGroup& args) { auto dstReg0 = dstInfo.reg0; DEBUG_ONLY auto dstReg1 = dstInfo.reg1; RegSet argRegs; for (size_t i = 0; i < args.numGpArgs(); i++) { auto const r = rarg(i); args.gpArg(i).setDstReg(r); argRegs.add(r); } always_assert_flog( args.numStackArgs() == 0, "Stack arguments not yet supported on ARM: `{}'", *m_curInst ); shuffleArgs(v, args, call); auto syncPoint = emitCall(v, call, argRegs); if (RuntimeOption::HHProfServerEnabled || sync != SyncOptions::kNoSyncPoint) { recordHostCallSyncPoint(v, syncPoint); } auto* taken = m_curInst->taken(); if (taken && taken->isCatch()) { assert_not_implemented(args.numStackArgs() == 0); auto next = v.makeBlock(); v << hcunwind{syncPoint, {next, m_state.labels[taken]}}; v = next; } else if (!m_curInst->is(Call, CallArray, ContEnter)) { v << hcnocatch{syncPoint}; } switch (dstInfo.type) { case DestType::TV: CG_PUNT(cgCall-ReturnTV); case DestType::SIMD: CG_PUNT(cgCall-ReturnSIMD); case DestType::SSA: case DestType::Byte: assertx(dstReg1 == InvalidReg); v << copy{PhysReg(vixl::x0), dstReg0}; break; case DestType::None: assertx(dstReg0 == InvalidReg && dstReg1 == InvalidReg); break; case DestType::Dbl: assertx(dstReg1 == InvalidReg); v << copy{PhysReg(vixl::d0), dstReg0}; break; } }
void cgCallHelper(Vout& v, IRLS& env, CallSpec call, const CallDest& dstInfo, SyncOptions sync, const ArgGroup& args) { auto const inst = args.inst(); jit::vector<Vreg> vIndRetArgs, vargs, vSimdArgs, vStkArgs; for (size_t i = 0; i < args.numIndRetArgs(); ++i) { prepareArg(args.indRetArg(i), v, vIndRetArgs); } for (size_t i = 0; i < args.numGpArgs(); ++i) { prepareArg(args.gpArg(i), v, vargs); } for (size_t i = 0; i < args.numSimdArgs(); ++i) { prepareArg(args.simdArg(i), v, vSimdArgs); } for (size_t i = 0; i < args.numStackArgs(); ++i) { prepareArg(args.stkArg(i), v, vStkArgs); } auto const syncFixup = [&] { if (RuntimeOption::HHProfEnabled || sync != SyncOptions::None) { // If we are profiling the heap, we always need to sync because regs need // to be correct during allocations no matter what. return makeFixup(inst->marker(), sync); } return Fixup{}; }(); Vlabel targets[2]; bool nothrow = false; auto const taken = inst->taken(); auto const do_catch = taken && taken->isCatch(); if (do_catch) { always_assert_flog( inst->is(InterpOne) || sync != SyncOptions::None, "cgCallHelper called with None but inst has a catch block: {}\n", *inst ); always_assert_flog( taken->catchMarker() == inst->marker(), "Catch trace doesn't match fixup:\n" "Instruction: {}\n" "Catch trace: {}\n" "Fixup : {}\n", inst->toString(), taken->catchMarker().show(), inst->marker().show() ); targets[0] = v.makeBlock(); targets[1] = env.labels[taken]; } else { // The current instruction doesn't have a catch block so it'd better not // throw. Register a null catch trace to indicate this to the unwinder. nothrow = true; } VregList dstRegs; if (dstInfo.reg0.isValid()) { dstRegs.push_back(dstInfo.reg0); if (dstInfo.reg1.isValid()) { dstRegs.push_back(dstInfo.reg1); } } auto const argsId = v.makeVcallArgs({ std::move(vargs), std::move(vSimdArgs), std::move(vStkArgs), std::move(vIndRetArgs) }); auto const dstId = v.makeTuple(std::move(dstRegs)); if (do_catch) { v << vinvoke{call, argsId, dstId, {targets[0], targets[1]}, syncFixup, dstInfo.type}; v = targets[0]; } else { v << vcall{call, argsId, dstId, syncFixup, dstInfo.type, nothrow}; } }