void cgOrdStrIdx(IRLS& env, const IRInstruction* inst) { auto const sd = srcLoc(env, inst, 0).reg(); auto const idx = srcLoc(env, inst, 1).reg(); auto& v = vmain(env); auto const sf = v.makeReg(); auto const length = v.makeReg(); v << loadzlq{sd[StringData::sizeOff()], length}; v << cmpq{idx, length, sf}; unlikelyCond(v, vcold(env), CC_B, sf, dstLoc(env, inst, 0).reg(), [&] (Vout& v) { auto const args = argGroup(env, inst).ssa(0).ssa(1); cgCallHelper(v, env, CallSpec::direct(MInstrHelpers::stringGetI), kVoidDest, SyncOptions::Sync, args); return v.cns(0); }, [&] (Vout& v) { auto const dst = v.makeReg(); auto const data = v.makeReg(); #ifdef NO_M_DATA v << lea{sd[sizeof(StringData)], data}; #else v << load{sd[StringData::dataOff()], data}; #endif v << loadzbq{data[idx], dst}; return dst; } ); }
void cgLdSSwitchDestFast(IRLS& env, const IRInstruction* inst) { auto const extra = inst->extra<LdSSwitchDestFast>(); auto& v = vmain(env); auto const table = v.allocData<SSwitchMap>(); // TODO(t10347945): This causes our data section to own a pointer to heap // memory, and we're putting bindaddrs in said heap memory. new (table) SSwitchMap(extra->numCases); for (int64_t i = 0; i < extra->numCases; ++i) { table->add(extra->cases[i].str, nullptr); auto const addr = table->find(extra->cases[i].str); // The addresses we're passing to bindaddr{} here live in SSwitchMap's heap // buffer (see comment above). They don't need to be relocated like normal // VdataPtrs, so bind them here. VdataPtr<TCA> dataPtr{nullptr}; dataPtr.bind(addr); v << bindaddr{dataPtr, extra->cases[i].dest, extra->bcSPOff}; } // Bind the default case target. auto const def = v.allocData<TCA>(); v << bindaddr{def, extra->defaultSk, extra->bcSPOff}; auto const args = argGroup(env, inst) .ssa(0) .dataPtr(table) .dataPtr(def); cgCallHelper(v, env, CallSpec::direct(sswitchHelperFast), callDest(env, inst), SyncOptions::None, args); }
void cgLookupClsMethodCache(IRLS& env, const IRInstruction* inst) { auto const extra = inst->extra<ClsMethodData>(); auto const dst = dstLoc(env, inst, 0).reg(); auto const fp = srcLoc(env, inst, 0).reg(); auto& v = vmain(env); auto const ch = StaticMethodCache::alloc( extra->clsName, extra->methodName, ctxName(inst->marker()) ); if (false) { // typecheck UNUSED TypedValue* fake_fp = nullptr; const UNUSED Func* f = StaticMethodCache::lookup( ch, extra->namedEntity, extra->clsName, extra->methodName, fake_fp ); } auto const args = argGroup(env, inst) .imm(ch) .immPtr(extra->namedEntity) .immPtr(extra->clsName) .immPtr(extra->methodName) .reg(fp); // May raise an error if the class is undefined. cgCallHelper(v, env, CallSpec::direct(StaticMethodCache::lookup), callDest(dst), SyncOptions::Sync, args); }
void cgLookupClsMethodFCache(IRLS& env, const IRInstruction* inst) { auto const extra = inst->extra<ClsMethodData>(); auto const dst = dstLoc(env, inst, 0).reg(0); auto const cls = inst->src(0)->clsVal(); auto const fp = srcLoc(env, inst, 1).reg(); auto& v = vmain(env); auto const ch = StaticMethodFCache::alloc( cls->name(), extra->methodName, ctxName(inst->marker()) ); assertx(rds::isNormalHandle(ch)); const Func* (*lookup)(rds::Handle, const Class*, const StringData*, TypedValue*) = StaticMethodFCache::lookup; auto const args = argGroup(env, inst) .imm(ch) .immPtr(cls) .immPtr(extra->methodName) .reg(fp); cgCallHelper(v, env, CallSpec::direct(lookup), callDest(dst), SyncOptions::Sync, args); }
void cgConstructInstance(IRLS& env, const IRInstruction* inst) { auto const dst = dstLoc(env, inst, 0).reg(); auto const cls = inst->extra<ConstructInstance>()->cls; auto const args = argGroup(env, inst).immPtr(cls); cgCallHelper(vmain(env), env, CallSpec::direct(cls->instanceCtor().get()), callDest(dst), SyncOptions::Sync, args); }
void cgRBTraceEntry(IRLS& env, const IRInstruction* inst) { auto const extra = inst->extra<RBTraceEntry>(); auto const args = argGroup(env, inst) .imm(extra->type) .imm(extra->sk.toAtomicInt()); cgCallHelper(vmain(env), env, CallSpec::direct(Trace::ringbufferEntryRip), kVoidDest, SyncOptions::None, args); }
void cgProfileType(IRLS& env, const IRInstruction* inst) { auto const extra = inst->extra<RDSHandleData>(); auto const args = argGroup(env, inst) .addr(rvmtl(), safe_cast<int32_t>(extra->handle)) .typedValue(0); cgCallHelper(vmain(env), env, CallSpec::method(&TypeProfile::report), kVoidDest, SyncOptions::None, args); }
void cgLdGblAddr(IRLS& env, const IRInstruction* inst) { auto const dst = dstLoc(env, inst, 0).reg(); auto& v = vmain(env); cgCallHelper(v, env, CallSpec::direct(ldGblAddrHelper), callDest(dst), SyncOptions::None, argGroup(env, inst).ssa(0)); auto const sf = v.makeReg(); v << testq{dst, dst, sf}; v << jcc{CC_Z, sf, {label(env, inst->next()), label(env, inst->taken())}}; }
void cgDbgTraceCall(IRLS& env, const IRInstruction* inst) { auto const spOff = inst->extra<DbgTraceCall>()->offset; auto const args = argGroup(env, inst) .ssa(0) .addr(srcLoc(env, inst, 1).reg(), cellsToBytes(spOff.offset)) .imm(inst->marker().bcOff()); cgCallHelper(vmain(env), env, CallSpec::direct(traceCallback), callDest(env, inst), SyncOptions::None, args); }
void cgRBTraceMsg(IRLS& env, const IRInstruction* inst) { auto const extra = inst->extra<RBTraceMsg>(); assertx(extra->msg->isStatic()); auto const args = argGroup(env, inst) .immPtr(extra->msg->data()) .imm(extra->msg->size()) .imm(extra->type); cgCallHelper(vmain(env), env, CallSpec::direct(Trace::ringbufferMsg), kVoidDest, SyncOptions::None, args); }
void cgProfileMethod(IRLS& env, const IRInstruction* inst) { auto const extra = inst->extra<ProfileMethodData>(); auto const sp = srcLoc(env, inst, 0).reg(); auto const args = argGroup(env, inst) .addr(rvmtl(), safe_cast<int32_t>(extra->handle)) .addr(sp, cellsToBytes(extra->bcSPOff.offset)) .ssa(1); cgCallHelper(vmain(env), env, CallSpec::method(&MethProfile::reportMeth), kVoidDest, SyncOptions::None, args); }
void cgProfileSubClsCns(IRLS& env, const IRInstruction* inst) { auto const extra = inst->extra<ProfileSubClsCns>(); auto const args = argGroup(env, inst) .addr(rvmtl(), safe_cast<int32_t>(extra->handle)) .ssa(0) .imm(extra->cnsName); auto const dst = dstLoc(env, inst, 0).reg(); cgCallHelper(vmain(env), env, CallSpec::method(&ClsCnsProfile::reportClsCns), callDest(dst), SyncOptions::None, args); }
void cgInterpOne(IRLS& env, const IRInstruction* inst) { auto const extra = inst->extra<InterpOne>(); auto const sp = srcLoc(env, inst, 0).reg(); auto const helper = interpOneEntryPoints[size_t(extra->opcode)]; auto const args = argGroup(env, inst) .ssa(1) .addr(sp, cellsToBytes(extra->spOffset.offset)) .imm(extra->bcOff); // Call the interpOne##Op() routine, which syncs VM regs manually. cgCallHelper(vmain(env), env, CallSpec::direct(helper), kVoidDest, SyncOptions::None, args); }
void CodeGenerator::cgUnsetProp(IRInstruction* inst) { auto const base = inst->src(0); BUILD_OPTAB(UNSETPROP_HELPER_TABLE, base->isA(TObj)); cgCallHelper( vmain(), CallSpec::direct(opFunc), kVoidDest, SyncOptions::Sync, argGroup(inst) .immPtr(getClass(inst->marker())) .ssa(0) .typedValue(1) ); }
void CodeGenerator::cgIncDecProp(IRInstruction* inst) { auto const base = inst->src(0); auto const extra = inst->extra<IncDecProp>(); BUILD_OPTAB(INCDECPROP_HELPER_TABLE, base->isA(TObj)); cgCallHelper( vmain(), CallSpec::direct(opFunc), callDestTV(inst), SyncOptions::Sync, argGroup(inst) .immPtr(getClass(inst->marker())) .ssa(0) .typedValue(1) .imm(static_cast<int32_t>(extra->op)) ); }
void cgNewInstanceRaw(IRLS& env, const IRInstruction* inst) { auto const dst = dstLoc(env, inst, 0).reg(); auto const cls = inst->extra<NewInstanceRaw>()->cls; auto const size = ObjectData::sizeForNProps(cls->numDeclProperties()); auto const callSpec = size <= kMaxSmallSize ? CallSpec::direct(ObjectData::newInstanceRaw) : CallSpec::direct(ObjectData::newInstanceRawBig); auto const args = argGroup(env, inst) .imm(reinterpret_cast<uintptr_t>(cls)) .imm(size); cgCallHelper(vmain(env), env, callSpec, callDest(dst), SyncOptions::Sync, args); }
void cgCreateAAWH(IRLS& env, const IRInstruction* inst) { auto const fp = srcLoc(env, inst, 0).reg(); auto const extra = inst->extra<CreateAAWHData>(); cgCallHelper( vmain(env), env, CallSpec::direct(c_AwaitAllWaitHandle::fromFrameNoCheck), callDest(env, inst), SyncOptions::Sync, argGroup(env, inst) .imm(extra->count) .ssa(1) .addr(fp, localOffset(extra->first)) ); }
void cgInitClsCns(IRLS& env, const IRInstruction* inst) { auto const extra = inst->extra<InitClsCns>(); auto const link = rds::bindClassConstant(extra->clsName, extra->cnsName); auto& v = vmain(env); auto const args = argGroup(env, inst) .addr(rvmtl(), safe_cast<int32_t>(link.handle())) .immPtr(NamedEntity::get(extra->clsName)) .immPtr(extra->clsName) .immPtr(extra->cnsName); cgCallHelper(v, env, CallSpec::direct(lookupClsCnsHelper), callDestTV(env, inst), SyncOptions::Sync, args); markRDSHandleInitialized(v, link.handle()); }
void cgInstanceOf(IRLS& env, const IRInstruction* inst) { auto const dst = dstLoc(env, inst, 0).reg(); auto const rhs = srcLoc(env, inst, 1).reg(); auto& v = vmain(env); auto const call_classof = [&] (Vreg dest) { cgCallHelper(v, env, CallSpec::method(&Class::classof), {DestType::Byte, dest}, SyncOptions::None, argGroup(env, inst).ssa(0).ssa(1)); return dest; }; if (!inst->src(1)->isA(TCls)) { auto const sf = v.makeReg(); v << testq{rhs, rhs, sf}; cond(v, CC_NZ, sf, dst, [&] (Vout& v) { return call_classof(v.makeReg()); }, [&] (Vout& v) { return v.cns(false); } // rhs is nullptr ); return; } auto const spec = inst->src(1)->type().clsSpec(); if (!spec.cls() || (spec.cls()->attrs() & AttrInterface)) { call_classof(dst); return; } // This essentially inlines Class::classofNonIFace auto const lhs = srcLoc(env, inst, 0).reg(); auto const rhsTmp = v.makeReg(); auto const rhsLen = v.makeReg(); auto const sfVecLen = v.makeReg(); if (sizeof(Class::veclen_t) == 2) { v << loadw{rhs[Class::classVecLenOff()], rhsTmp}; v << movzwq{rhsTmp, rhsLen}; v << cmpwm{rhsTmp, lhs[Class::classVecLenOff()], sfVecLen}; } else if (sizeof(Class::veclen_t) == 4) { v << loadl{rhs[Class::classVecLenOff()], rhsTmp}; v << movzlq{rhsTmp, rhsLen}; v << cmplm{rhsTmp, lhs[Class::classVecLenOff()], sfVecLen}; } else { not_implemented(); } check_subcls(v, sfVecLen, dst, lhs, rhs, rhsLen); }
void cgInitObjProps(IRLS& env, const IRInstruction* inst) { auto const cls = inst->extra<InitObjProps>()->cls; auto const obj = srcLoc(env, inst, 0).reg(); auto& v = vmain(env); // Set the attributes, if any. auto const odAttrs = cls->getODAttrs(); if (odAttrs) { static_assert(sizeof(ObjectData::Attribute) == 2, "Codegen expects 2-byte ObjectData attributes"); assertx(!(odAttrs & 0xffff0000)); v << orwim{odAttrs, obj[ObjectData::attributeOff()], v.makeReg()}; } // Initialize the properties. auto const nprops = cls->numDeclProperties(); if (nprops > 0) { if (cls->pinitVec().size() == 0) { // If the Class has no 86pinit property-initializer functions, we can // just copy the initial values from a data member on the Class. implInitObjPropsFast(v, env, inst, obj, cls, nprops); } else { // Load the Class's propInitVec from the target cache. We know it's // already been initialized as a pre-condition on this op. auto const propHandle = cls->propHandle(); assertx(rds::isNormalHandle(propHandle)); auto const propInitVec = v.makeReg(); auto const propData = v.makeReg(); v << load{Vreg(rvmtl())[propHandle], propInitVec}; v << load{propInitVec[Class::PropInitVec::dataOff()], propData}; auto const propsOff = sizeof(ObjectData) + cls->builtinODTailSize(); auto args = argGroup(env, inst) .addr(obj, safe_cast<int32_t>(propsOff)) .reg(propData); if (!cls->hasDeepInitProps()) { cgCallHelper(v, env, CallSpec::direct(memcpy), kVoidDest, SyncOptions::None, args.imm(cellsToBytes(nprops))); } else { cgCallHelper(v, env, CallSpec::direct(deepInitHelper), kVoidDest, SyncOptions::None, args.imm(nprops)); } } } }
void CodeGenerator::cgSetProp(IRInstruction* inst) { auto const base = inst->src(0); auto const key = inst->src(1); auto const keyType = getKeyTypeNoInt(key); BUILD_OPTAB(SETPROP_HELPER_TABLE, keyType, base->isA(TObj)); cgCallHelper( vmain(), CallSpec::direct(opFunc), kVoidDest, SyncOptions::Sync, argGroup(inst) .immPtr(getClass(inst->marker())) .ssa(0) .memberKeyS(1) .typedValue(2) ); }
void CodeGenerator::cgIssetEmptyPropImpl(IRInstruction* inst) { auto const isEmpty = inst->op() == EmptyProp; auto const base = inst->src(0); auto const key = inst->src(1); auto const keyType = getKeyTypeNoInt(key); BUILD_OPTAB(ISSET_EMPTY_PROP_HELPER_TABLE, keyType, isEmpty, base->isA(TObj)); cgCallHelper( vmain(), CallSpec::direct(opFunc), callDest(inst), SyncOptions::Sync, argGroup(inst) .immPtr(getClass(inst->marker())) .ssa(0) .memberKeyS(1) ); }
void cgLookupClsMethod(IRLS& env, const IRInstruction* inst) { auto const extra = inst->extra<LookupClsMethod>(); auto const sp = srcLoc(env, inst, 2).reg(); auto const args = argGroup(env, inst) .ssa(0) .ssa(1) .addr(sp, cellsToBytes(extra->calleeAROffset.offset)) .ssa(3); if (extra->forward) { cgCallHelper(vmain(env), env, CallSpec::direct(lookupClsMethodHelper<true>), callDest(env, inst), SyncOptions::Sync, args); } else { cgCallHelper(vmain(env), env, CallSpec::direct(lookupClsMethodHelper<false>), callDest(env, inst), SyncOptions::Sync, args); } }
void cgLookupCnsU(IRLS& env, const IRInstruction* inst) { auto const cnsName = inst->src(0)->strVal(); auto const fallbackName = inst->src(1)->strVal(); auto const fallbackCh = makeCnsHandle(fallbackName, false); auto const args = argGroup(env, inst) .imm(safe_cast<int32_t>(fallbackCh)) .immPtr(cnsName) .immPtr(fallbackName); cgCallHelper( vmain(env), env, rds::isNormalHandle(fallbackCh) ? CallSpec::direct(lookupCnsUHelperNormal) : CallSpec::direct(lookupCnsUHelperPersistent), callDestTV(env, inst), SyncOptions::Sync, args ); }
void cgLdSSwitchDestSlow(IRLS& env, const IRInstruction* inst) { auto const extra = inst->extra<LdSSwitchDestSlow>(); auto& v = vmain(env); auto strtab = v.allocData<const StringData*>(extra->numCases); auto jmptab = v.allocData<TCA>(extra->numCases + 1); for (int64_t i = 0; i < extra->numCases; ++i) { strtab[i] = extra->cases[i].str; v << bindaddr{&jmptab[i], extra->cases[i].dest, extra->bcSPOff}; } v << bindaddr{&jmptab[extra->numCases], extra->defaultSk, extra->bcSPOff}; auto const args = argGroup(env, inst) .typedValue(0) .dataPtr(strtab) .imm(extra->numCases) .dataPtr(jmptab); cgCallHelper(v, env, CallSpec::direct(sswitchHelperSlow), callDest(env, inst), SyncOptions::Sync, args); }
void cgCheckCold(IRLS& env, const IRInstruction* inst) { auto const transID = inst->extra<CheckCold>()->transId; auto const counterAddr = profData()->transCounterAddr(transID); auto& v = vmain(env); auto const sf = v.makeReg(); v << decqmlock{v.cns(counterAddr)[0], sf}; if (RuntimeOption::EvalJitFilterLease) { auto filter = v.makeBlock(); v << jcc{CC_LE, sf, {label(env, inst->next()), filter}}; v = filter; auto const res = v.makeReg(); cgCallHelper(v, env, CallSpec::direct(couldAcquireOptimizeLease), callDest(res), SyncOptions::None, argGroup(env, inst).immPtr(inst->func())); auto const sf2 = v.makeReg(); v << testb{res, res, sf2}; v << jcc{CC_NZ, sf2, {label(env, inst->next()), label(env, inst->taken())}}; } else { v << jcc{CC_LE, sf, {label(env, inst->next()), label(env, inst->taken())}}; } }
void cgRaiseVarEnvDynCall(IRLS& env, const IRInstruction* inst) { cgCallHelper(vmain(env), env, CallSpec::direct(raiseVarEnvDynCall), kVoidDest, SyncOptions::Sync, argGroup(env, inst).ssa(0)); }
void cgRaiseHackArrCompatNotice(IRLS& env, const IRInstruction* inst) { cgCallHelper(vmain(env), env, CallSpec::direct(raiseHackArrCompatNotice), kVoidDest, SyncOptions::Sync, argGroup(env, inst).ssa(0)); }
void cgCallBuiltin(IRLS& env, const IRInstruction* inst) { auto const extra = inst->extra<CallBuiltin>(); auto const callee = extra->callee; auto const returnType = inst->typeParam(); auto const funcReturnType = callee->returnType(); auto const returnByValue = callee->isReturnByValue(); auto const dstData = dstLoc(env, inst, 0).reg(0); auto const dstType = dstLoc(env, inst, 0).reg(1); auto& v = vmain(env); // Whether `t' is passed in/out of C++ as String&/Array&/Object&. auto const isReqPtrRef = [] (MaybeDataType t) { return isStringType(t) || isArrayLikeType(t) || t == KindOfObject || t == KindOfResource; }; if (FixupMap::eagerRecord(callee)) { auto const sp = srcLoc(env, inst, 1).reg(); auto const spOffset = cellsToBytes(extra->spOffset.offset); auto const& marker = inst->marker(); auto const pc = marker.fixupSk().unit()->entry() + marker.fixupBcOff(); auto const synced_sp = v.makeReg(); v << lea{sp[spOffset], synced_sp}; emitEagerSyncPoint(v, pc, rvmtl(), srcLoc(env, inst, 0).reg(), synced_sp); } int returnOffset = rds::kVmMInstrStateOff + offsetof(MInstrState, tvBuiltinReturn); auto args = argGroup(env, inst); if (!returnByValue) { if (isBuiltinByRef(funcReturnType)) { if (isReqPtrRef(funcReturnType)) { returnOffset += TVOFF(m_data); } // Pass the address of tvBuiltinReturn to the native function as the // location where it can construct the return Array, String, Object, or // Variant. args.addr(rvmtl(), returnOffset); args.indirect(); } } // The srcs past the first two (sp and fp) are the arguments to the callee. auto srcNum = uint32_t{2}; // Add the this_ or self_ argument for HNI builtins. if (callee->isMethod()) { if (callee->isStatic()) { args.ssa(srcNum); ++srcNum; } else { // Note that we don't support objects with vtables here (if they may need // a $this pointer adjustment). This should be filtered out during irgen // or before. args.ssa(srcNum); ++srcNum; } } // Add the func_num_args() value if needed. if (callee->attrs() & AttrNumArgs) { // If `numNonDefault' is negative, this is passed as an src. if (extra->numNonDefault >= 0) { args.imm((int64_t)extra->numNonDefault); } else { args.ssa(srcNum); ++srcNum; } } // Add the positional arguments. for (uint32_t i = 0; i < callee->numParams(); ++i, ++srcNum) { auto const& pi = callee->params()[i]; // Non-pointer and NativeArg args are passed by value. String, Array, // Object, and Variant are passed by const&, i.e. a pointer to stack memory // holding the value, so we expect PtrToT types for these. Pointers to // req::ptr types (String, Array, Object) need adjusting to point to // &ptr->m_data. if (TVOFF(m_data) && !pi.nativeArg && isReqPtrRef(pi.builtinType)) { assertx(inst->src(srcNum)->type() <= TPtrToGen); args.addr(srcLoc(env, inst, srcNum).reg(), TVOFF(m_data)); } else if (pi.nativeArg && !pi.builtinType && !callee->byRef(i)) { // This condition indicates a MixedTV (i.e., TypedValue-by-value) arg. args.typedValue(srcNum); } else { args.ssa(srcNum, pi.builtinType == KindOfDouble); } } auto dest = [&] () -> CallDest { if (isBuiltinByRef(funcReturnType)) { if (!returnByValue) return kVoidDest; // indirect return return funcReturnType ? callDest(dstData) // String, Array, or Object : callDest(dstData, dstType); // Variant } return funcReturnType == KindOfDouble ? callDestDbl(env, inst) : callDest(env, inst); }(); cgCallHelper(v, env, CallSpec::direct(callee->nativeFuncPtr()), dest, SyncOptions::Sync, args); // For primitive return types (int, bool, double) and returnByValue, the // return value is already in dstData/dstType. if (returnType.isSimpleType() || returnByValue) return; // For return by reference (String, Object, Array, Variant), the builtin // writes the return value into MInstrState::tvBuiltinReturn, from where it // has to be tested and copied. if (returnType.isReferenceType()) { // The return type is String, Array, or Object; fold nullptr to KindOfNull. assertx(isBuiltinByRef(funcReturnType) && isReqPtrRef(funcReturnType)); v << load{rvmtl()[returnOffset], dstData}; if (dstType.isValid()) { auto const sf = v.makeReg(); auto const rtype = v.cns(returnType.toDataType()); auto const nulltype = v.cns(KindOfNull); v << testq{dstData, dstData, sf}; v << cmovb{CC_Z, sf, rtype, nulltype, dstType}; } return; } if (returnType <= TCell || returnType <= TBoxedCell) { // The return type is Variant; fold KindOfUninit to KindOfNull. assertx(isBuiltinByRef(funcReturnType) && !isReqPtrRef(funcReturnType)); static_assert(KindOfUninit == 0, "KindOfUninit must be 0 for test"); v << load{rvmtl()[returnOffset + TVOFF(m_data)], dstData}; if (dstType.isValid()) { auto const rtype = v.makeReg(); v << loadb{rvmtl()[returnOffset + TVOFF(m_type)], rtype}; auto const sf = v.makeReg(); auto const nulltype = v.cns(KindOfNull); v << testb{rtype, rtype, sf}; v << cmovb{CC_Z, sf, rtype, nulltype, dstType}; } return; } not_reached(); }
void cgThrowLateInitPropError(IRLS& env, const IRInstruction* inst) { cgCallHelper(vmain(env), env, CallSpec::direct(throw_late_init_prop), kVoidDest, SyncOptions::Sync, argGroup(env, inst).ssa(0).ssa(1).ssa(2)); }