int32_t emitBindCall(CodeBlock& mainCode, CodeBlock& stubsCode, SrcKey srcKey, const Func* funcd, int numArgs) { // If this is a call to a builtin and we don't need any argument // munging, we can skip the prologue system and do it inline. if (isNativeImplCall(funcd, numArgs)) { StoreImmPatcher patchIP(mainCode, (uint64_t)mainCode.frontier(), reg::rax, cellsToBytes(numArgs) + AROFF(m_savedRip), rVmSp); assert(funcd->numLocals() == funcd->numParams()); assert(funcd->numIterators() == 0); Asm a { mainCode }; emitLea(a, rVmSp[cellsToBytes(numArgs)], rVmFp); emitCheckSurpriseFlagsEnter(mainCode, stubsCode, true, mcg->fixupMap(), Fixup(0, numArgs)); // rVmSp is already correctly adjusted, because there's no locals // other than the arguments passed. auto retval = emitNativeImpl(mainCode, funcd); patchIP.patch(uint64_t(mainCode.frontier())); return retval; } Asm a { mainCode }; if (debug) { auto off = cellsToBytes(numArgs) + AROFF(m_savedRip); emitImmStoreq(a, kUninitializedRIP, rVmSp[off]); } // Stash callee's rVmFp into rStashedAR for the callee's prologue emitLea(a, rVmSp[cellsToBytes(numArgs)], rStashedAR); emitBindCallHelper(mainCode, stubsCode, srcKey, funcd, numArgs); return 0; }
void FixupStruct(UStruct* Struct, void* Dest, const void* Src) { if (Struct == Struct_SlateColor) { *((FSlateColor*)Dest) = Fixup(*(FSlateColor*)Src); return; } for (TFieldIterator<UProperty> PropertyIt(Struct, EFieldIteratorFlags::IncludeSuper); PropertyIt; ++PropertyIt) { auto Property = *PropertyIt; if (UStructProperty* p = Cast<UStructProperty>(Property)) { FixupStruct(p->Struct, p->ContainerPtrToValuePtr<void>(Dest), p->ContainerPtrToValuePtr<void>(Src)); } } }
void AllocateVariables(){ char msg[80]; tObj *VRef; VRef = FirstVar(); while(VRef != NULL){ if(VRef->Val == 0){ strcpy(msg, "Variable '"); strcat(msg, VRef->Name); strcat(msg, "' is declared but never used."); Warning(msg); } else{ Fixup(VRef->Val); // //printf("\n%s", VRef->Name); PC++; } VRef = NextVar(); } }
void AddStructure(const Header* header, const Strings& strings, Structure*& listHead) { Structure* const p = (Structure*)calloc(1, sizeof(Structure)); // freed in Cleanup p->header = *header; if(listHead) { // insert at end of list to preserve order of caches/slots Structure* last = listHead; while(last->next) last = last->next; last->next = p; } else listHead = p; FieldInitializer fieldInitializer(header, strings); VisitFields(*p, fieldInitializer); Fixup(*p); }
void cgCheckSurpriseAndStack(IRLS& env, const IRInstruction* inst) { auto const fp = srcLoc(env, inst, 0).reg(); auto const extra = inst->extra<CheckSurpriseAndStack>(); auto const func = extra->func; auto const off = func->getEntryForNumArgs(extra->argc) - func->base(); auto const fixup = Fixup(off, func->numSlotsInFrame()); auto& v = vmain(env); auto const sf = v.makeReg(); auto const needed_top = v.makeReg(); v << lea{fp[-cellsToBytes(func->maxStackCells())], needed_top}; v << cmpqm{needed_top, rvmtl()[rds::kSurpriseFlagsOff], sf}; unlikelyIfThen(v, vcold(env), CC_AE, sf, [&] (Vout& v) { auto const stub = tc::ustubs().functionSurprisedOrStackOverflow; auto const done = v.makeBlock(); v << vinvoke{CallSpec::stub(stub), v.makeVcallArgs({}), v.makeTuple({}), {done, label(env, inst->taken())}, fixup }; v = done; }); }
void Merging(Payload payload, const T& obj, uint16_t version = bond::v1, bool mergeByDeserialize = true) { Reader merged = Merge<Reader, Writer>(payload, obj, version); // Deserialize merged into T and compare against obj { T to; #ifdef _MSC_VER #pragma warning(push) #pragma warning(disable: 4127) // C4127: conditional expression is constant #endif if (boost::mpl::count_if<typename T::Schema::fields, is_optional_field<_> >::value == 0) #ifdef _MSC_VER #pragma warning(pop) #endif { to = InitRandom<T>(); Fixup(to); } Deserialize(merged, to); UT_AssertIsTrue(Equal(obj, to)); } // Deserialize merged into Payload and compare against combination of the // orginal payload and the obj. { Payload to; #ifdef _MSC_VER #pragma warning(push) #pragma warning(disable: 4127) // C4127: conditional expression is constant #endif if (boost::mpl::count_if<typename Payload::Schema::fields, is_optional_field<_> >::value == 0) #ifdef _MSC_VER #pragma warning(pop) #endif { to = InitRandom<Payload>(); Fixup(to); } Deserialize(merged, to); if (mergeByDeserialize) { // Will fail an assert without // #define BOND_UNIT_TEST_ONLY_PERMIT_OBJECT_REUSE Deserialize(Serialize<Reader, Writer>(obj, version), payload); UT_AssertIsTrue(Equal(payload, to)); } else { UT_AssertIsTrue(MergedEqual(payload, to, obj)); } } }
void cgCall(IRLS& env, const IRInstruction* inst) { auto const sp = srcLoc(env, inst, 0).reg(); auto const fp = srcLoc(env, inst, 1).reg(); auto const extra = inst->extra<Call>(); auto const callee = extra->callee; auto const argc = extra->numParams; auto& v = vmain(env); auto& vc = vcold(env); auto const catchBlock = label(env, inst->taken()); auto const calleeSP = sp[cellsToBytes(extra->spOffset.offset)]; auto const calleeAR = calleeSP + cellsToBytes(argc); v << store{fp, calleeAR + AROFF(m_sfp)}; v << storeli{safe_cast<int32_t>(extra->after), calleeAR + AROFF(m_soff)}; if (extra->fcallAwait) { // This clobbers any flags that might have already been set on the callee // AR (e.g., by SpillFrame), but this is okay because there should never be // any conflicts; see the documentation in act-rec.h. auto const imm = static_cast<int32_t>( ActRec::encodeNumArgsAndFlags(argc, ActRec::Flags::IsFCallAwait) ); v << storeli{imm, calleeAR + AROFF(m_numArgsAndFlags)}; } auto const isNativeImplCall = callee && callee->builtinFuncPtr() && !callee->nativeFuncPtr() && argc == callee->numParams(); if (isNativeImplCall) { // The assumption here is that for builtins, the generated func contains // only a single opcode (NativeImpl), and there are no non-argument locals. if (do_assert) { assertx(argc == callee->numLocals()); assertx(callee->numIterators() == 0); auto addr = callee->getEntry(); while (peek_op(addr) == Op::AssertRATL) { addr += instrLen(addr); } assertx(peek_op(addr) == Op::NativeImpl); assertx(addr + instrLen(addr) == callee->unit()->entry() + callee->past()); } v << store{v.cns(mcg->ustubs().retHelper), calleeAR + AROFF(m_savedRip)}; if (callee->attrs() & AttrMayUseVV) { v << storeqi{0, calleeAR + AROFF(m_invName)}; } v << lea{calleeAR, rvmfp()}; emitCheckSurpriseFlagsEnter(v, vc, fp, Fixup(0, argc), catchBlock); auto const builtinFuncPtr = callee->builtinFuncPtr(); TRACE(2, "Calling builtin preClass %p func %p\n", callee->preClass(), builtinFuncPtr); // We sometimes call this while curFunc() isn't really the builtin, so make // sure to record the sync point as if we are inside the builtin. if (FixupMap::eagerRecord(callee)) { auto const syncSP = v.makeReg(); v << lea{calleeSP, syncSP}; emitEagerSyncPoint(v, callee->getEntry(), rvmtl(), rvmfp(), syncSP); } // Call the native implementation. This will free the locals for us in the // normal case. In the case where an exception is thrown, the VM unwinder // will handle it for us. auto const done = v.makeBlock(); v << vinvoke{CallSpec::direct(builtinFuncPtr), v.makeVcallArgs({{rvmfp()}}), v.makeTuple({}), {done, catchBlock}, Fixup(0, argc)}; env.catch_calls[inst->taken()] = CatchCall::CPP; v = done; // The native implementation already put the return value on the stack for // us, and handled cleaning up the arguments. We have to update the frame // pointer and the stack pointer, and load the return value into the return // register so the trace we are returning to has it where it expects. // TODO(#1273094): We should probably modify the actual builtins to return // values via registers using the C ABI and do a reg-to-reg move. loadTV(v, inst->dst(), dstLoc(env, inst, 0), rvmfp()[AROFF(m_r)], true); v << load{rvmfp()[AROFF(m_sfp)], rvmfp()}; emitRB(v, Trace::RBTypeFuncExit, callee->fullName()->data()); return; } v << lea{calleeAR, rvmfp()}; if (RuntimeOption::EvalHHIRGenerateAsserts) { v << syncvmsp{v.cns(0x42)}; constexpr uint64_t kUninitializedRIP = 0xba5eba11acc01ade; emitImmStoreq(v, kUninitializedRIP, rvmfp()[AROFF(m_savedRip)]); } // Emit a smashable call that initially calls a recyclable service request // stub. The stub and the eventual targets take rvmfp() as an argument, // pointing to the callee ActRec. auto const target = callee ? mcg->ustubs().immutableBindCallStub : mcg->ustubs().bindCallStub; auto const done = v.makeBlock(); v << callphp{target, php_call_regs(), {{done, catchBlock}}}; env.catch_calls[inst->taken()] = CatchCall::PHP; v = done; auto const dst = dstLoc(env, inst, 0); v << defvmret{dst.reg(0), dst.reg(1)}; }
void FixupMap::recordSyncPoint(CodeAddress frontier, Offset pcOff, Offset spOff) { m_pendingFixups.push_back(PendingFixup(frontier, Fixup(pcOff, spOff))); }
FSlateColor UJavascriptEditorStyle::GetSlateColor(const FName& StyleName) { return Fixup(FEditorStyle::Get().GetSlateColor(StyleName)); }
FSlateBrush UJavascriptEditorStyle::GetBrush(const FName& StyleName) { auto Brush = FEditorStyle::Get().GetBrush(StyleName); return Brush ? Fixup(*Brush) : FSlateBrush(); }