MIterCtx::~MIterCtx() { m_mArray->~MutableArrayIter(); smart_free(m_mArray); tvRefcountedDecRef(&m_key); tvRefcountedDecRef(&m_val); if (m_ref) decRefRef(const_cast<RefData*>(m_ref)); }
MIterCtx::~MIterCtx() { m_mArray->~MutableArrayIter(); smart_free(m_mArray); tvRefcountedDecRef(&m_key); tvRefcountedDecRef(&m_val); if (m_ref && m_ref->decRefCount() == 0) { const_cast<RefData*>(m_ref)->release(); } }
c_Continuation::~c_Continuation() { tvRefcountedDecRef(m_key); tvRefcountedDecRef(m_value); // Free locals, but don't trigger the EventHook for FunctionExit // since the continuation function has already been exited. We // don't want redundant calls. ActRec* ar = actRec(); frame_free_locals_inl_no_hook<false>(ar, ar->m_func->numLocals()); }
Generator::~Generator() { if (LIKELY(getState() == State::Done)) { return; } assert(getState() != State::Running); tvRefcountedDecRef(m_key); tvRefcountedDecRef(m_value); // Free locals, but don't trigger the EventHook for FunctionReturn since // the generator has already been exited. We don't want redundant calls. ActRec* ar = actRec(); frame_free_locals_inl_no_hook<false>(ar, ar->func()->numLocals()); }
c_Generator::~c_Generator() { if (LIKELY(getState() == Done)) { return; } tvRefcountedDecRef(m_key); tvRefcountedDecRef(m_value); // Free locals, but don't trigger the EventHook for FunctionReturn // since the generator has already been exited. We // don't want redundant calls. ActRec* ar = actRec(); frame_free_locals_inl_no_hook<false>(ar, ar->m_func->numLocals()); }
void nativeDataInstanceDtor(ObjectData* obj, const Class* cls) { assert(!cls->preClass()->builtinObjSize()); assert(!cls->preClass()->builtinODOffset()); obj->~ObjectData(); auto const nProps = size_t{cls->numDeclProperties()}; auto prop = reinterpret_cast<TypedValue*>(obj + 1); auto const stop = prop + nProps; for (; prop != stop; ++prop) { tvRefcountedDecRef(prop); } auto ndi = cls->getNativeDataInfo(); assert(ndi); if (ndi->destroy) { ndi->destroy(obj); } if (ndi->sweep) { removeSweepNode(getSweepNode(obj)); } size_t nativeDataSize = alignTypedValue(ndi->sz + sizeof(SweepNode)); size_t size = ObjectData::sizeForNProps(nProps) + nativeDataSize; void *ptr = obj; ptr = static_cast<char*>(ptr) - nativeDataSize; if (LIKELY(size <= kMaxSmartSize)) { return MM().smartFreeSizeLogged(ptr, size); } MM().smartFreeSizeBigLogged(ptr, size); }
ALWAYS_INLINE void BaseMap::setImpl(StringData* key, const TypedValue* val) { if (!raw) { mutate(); } assert(val->m_type != KindOfRef); assert(canMutateBuffer()); retry: strhash_t h = key->hash(); auto* p = findForInsert(key, h); assert(p); if (validPos(*p)) { auto& e = data()[*p]; TypedValue old = e.data; cellDup(*val, e.data); tvRefcountedDecRef(old); return; } if (UNLIKELY(isFull())) { makeRoom(); goto retry; } if (!raw) { ++m_version; } auto& e = allocElm(p); cellDup(*val, e.data); e.setStrKey(key, h); updateIntLikeStrKeys(key); }
ALWAYS_INLINE void BaseMap::setImpl(int64_t h, const TypedValue* val) { if (!raw) { mutate(); } assert(val->m_type != KindOfRef); assert(canMutateBuffer()); retry: auto p = findForInsert(h); assert(p); if (validPos(*p)) { auto& e = data()[*p]; TypedValue old = e.data; cellDup(*val, e.data); tvRefcountedDecRef(old); return; } if (UNLIKELY(isFull())) { makeRoom(); goto retry; } if (!raw) { ++m_version; } auto& e = allocElm(p); cellDup(*val, e.data); e.setIntKey(h); updateNextKI(h); }
void trimExtraArgs(ActRec* ar) { assert(!ar->hasInvName()); sync_regstate_to_caller(ar); const Func* f = ar->m_func; int numParams = f->numParams(); int numArgs = ar->numArgs(); assert(numArgs > numParams); int numExtra = numArgs - numParams; TRACE(1, "trimExtraArgs: %d args, function %s takes only %d, ar %p\n", numArgs, f->name()->data(), numParams, ar); if (f->attrs() & AttrMayUseVV) { assert(!ar->hasExtraArgs()); ar->setExtraArgs(ExtraArgs::allocateCopy( (TypedValue*)(uintptr_t(ar) - numArgs * sizeof(TypedValue)), numArgs - numParams)); } else { // Function is not marked as "MayUseVV", so discard the extra arguments TypedValue* tv = (TypedValue*)(uintptr_t(ar) - numArgs*sizeof(TypedValue)); for (int i = 0; i < numExtra; ++i) { tvRefcountedDecRef(tv); ++tv; } ar->setNumArgs(numParams); } // Only go back to dirty in a non-exception case. (Same reason as // above.) tl_regState = VMRegState::DIRTY; }
void chainFaultObjects(ObjectData* top, ObjectData* prev) { static const StaticString nProp("previous"); bool visible, accessible, unset; while (true) { TypedValue* top_tv = top->getProp( SystemLib::s_ExceptionClass, nProp.get(), visible, accessible, unset ); assert(visible && accessible && !unset); if (top_tv->m_type != KindOfObject || !top_tv->m_data.pobj->instanceof( SystemLib::s_ExceptionClass)) { // Since we are overwriting, decref. tvRefcountedDecRef(top_tv); // Objects held in m_faults are not refcounted, therefore // we need to increase the ref count here. top_tv->m_type = KindOfObject; top_tv->m_data.pobj = prev; prev->incRefCount(); break; } top = top_tv->m_data.pobj; } }
BaseVector::~BaseVector() { for (uint i = 0; i < m_size; ++i) { tvRefcountedDecRef(&m_data[i]); } freeData(); }
int shuffleArgsForMagicCall(ActRec* ar) { if (!ar->hasInvName()) { return 0; } const Func* f UNUSED = ar->m_func; f->validate(); assert(f->name()->isame(s_call.get()) || f->name()->isame(s_callStatic.get())); assert(f->numParams() == 2); assert(ar->hasInvName()); StringData* invName = ar->getInvName(); assert(invName); ar->setVarEnv(nullptr); int nargs = ar->numArgs(); // We need to make an array containing all the arguments passed by the // caller and put it where the second argument is PackedArrayInit aInit(nargs); for (int i = 0; i < nargs; ++i) { auto const tv = reinterpret_cast<TypedValue*>( uintptr_t(ar) - (i+1) * sizeof(TypedValue) ); aInit.append(tvAsCVarRef(tv)); tvRefcountedDecRef(tv); } // Put invName in the slot for first argument setArgInActRec(ar, 0, uint64_t(invName), KindOfString); // Put argArray in the slot for second argument auto const argArray = aInit.toArray().detach(); setArgInActRec(ar, 1, uint64_t(argArray), KindOfArray); // Fix up ActRec's numArgs ar->initNumArgs(2); return 1; }
bool objOffsetIsset(TypedValue& tvRef, ObjectData* base, const Variant& offset, bool validate /* = true */) { auto exists = objOffsetExists(base, offset); // Unless we called ArrayObject::offsetExists, there's nothing more to do if (exists != OffsetExistsResult::IssetIfNonNull) { return (int)exists; } // For ArrayObject::offsetExists, we need to check the value at `offset`. // If it's null, then we return false. TypedValue tvResult; tvWriteUninit(&tvResult); // We can't call the offsetGet method on `base` because users aren't // expecting offsetGet to be called for `isset(...)` expressions, so call // the method on the base ArrayObject class. auto const method = SystemLib::s_ArrayObjectClass->lookupMethod(s_offsetGet.get()); assert(method != nullptr); g_context->invokeFuncFew(&tvResult, method, base, nullptr, 1, offset.asCell()); auto const result = !IS_NULL_TYPE(tvResult.m_type); tvRefcountedDecRef(&tvResult); return result; }
void BaseVector::mapwithkey(BaseVector* bvec, CVarRef callback) { CallCtx ctx; vm_decode_function(callback, nullptr, false, ctx); if (!ctx.func) { Object e(SystemLib::AllocInvalidArgumentExceptionObject( "Parameter must be a valid callback")); throw e; } uint sz = m_size; bvec->reserve(sz); for (uint i = 0; i < sz; ++i) { TypedValue* tv = &bvec->m_data[i]; int32_t version = m_version; TypedValue args[2] = { make_tv<KindOfInt64>(i), m_data[i] }; g_vmContext->invokeFuncFew(tv, ctx, 2, args); if (UNLIKELY(version != m_version)) { tvRefcountedDecRef(tv); throw_collection_modified(); } ++bvec->m_size; } }
void EventHook::onFunctionReturn(ActRec* ar, TypedValue retval) { // The locals are already gone. Null out everything. ar->setThisOrClassAllowNull(nullptr); ar->setLocalsDecRefd(); ar->setVarEnv(nullptr); try { ssize_t flags = CheckSurprise(); onFunctionExit(ar, &retval, nullptr, flags); // Async profiler if ((flags & AsyncEventHookFlag) && ar->func()->isAsyncFunction() && ar->resumed()) { auto session = AsioSession::Get(); // Return @ resumed execution => AsyncFunctionWaitHandle succeeded. if (session->hasOnResumableSuccessCallback()) { auto afwh = frame_afwh(ar); session->onResumableSuccess(afwh, cellAsCVarRef(retval)); } } } catch (...) { /* * We're responsible for freeing the return value if we exit with an * exception. See irgen-ret. */ tvRefcountedDecRef(retval); throw; } }
void chainFaultObjects(ObjectData* top, ObjectData* prev) { while (true) { auto const lookup = top->getProp( top->instanceof(SystemLib::s_ExceptionClass) ? SystemLib::s_ExceptionClass : SystemLib::s_ErrorClass, s_previous.get() ); auto const top_tv = lookup.prop; assert(top_tv != nullptr); assert(top_tv->m_type != KindOfUninit && lookup.accessible); if (top_tv->m_type != KindOfObject || !top_tv->m_data.pobj->instanceof(SystemLib::s_ThrowableClass)) { // Since we are overwriting, decref. tvRefcountedDecRef(top_tv); // Objects held in m_faults are not refcounted, therefore we need to // increase the ref count here. top_tv->m_type = KindOfObject; top_tv->m_data.pobj = prev; prev->incRefCount(); break; } top = top_tv->m_data.pobj; } }
void nativeDataInstanceDtor(ObjectData* obj, const Class* cls) { assert(!cls->preClass()->builtinObjSize()); assert(!cls->preClass()->builtinODOffset()); obj->~ObjectData(); auto const nProps = size_t{cls->numDeclProperties()}; auto prop = reinterpret_cast<TypedValue*>(obj + 1); auto const stop = prop + nProps; for (; prop != stop; ++prop) { tvRefcountedDecRef(prop); } auto ndi = cls->getNativeDataInfo(); if (ndi->destroy) { ndi->destroy(obj); } auto node = getNativeNode(obj, ndi); if (ndi->sweep) { MM().removeNativeObject(node); } size_t size = ObjectData::sizeForNProps(nProps) + ndsize(obj, ndi); if (LIKELY(size <= kMaxSmallSize)) { return MM().freeSmallSize(node, size); } MM().freeBigSize(node, size); }
c_GenericContinuation::~c_GenericContinuation() { if (hhvm && m_locals != NULL) { for (int i = 0; i < m_nLocals; ++i) { tvRefcountedDecRef(&m_locals[i]); } } c_GenericContinuation::sweep(); }
void objOffsetUnset(ObjectData* base, const Variant& offset) { objArrayAccess(base); assert(!base->isCollection()); const Func* method = base->methodNamed(s_offsetUnset.get()); assert(method != nullptr); TypedValue tv; tvWriteUninit(&tv); g_context->invokeFuncFew(&tv, method, base, nullptr, 1, offset.asCell()); tvRefcountedDecRef(&tv); }
ALWAYS_INLINE APCLocalArray::~APCLocalArray() { if (m_localCache) { for (TypedValue* tv = m_localCache, *end = tv + m_arr->capacity(); tv < end; ++tv) { tvRefcountedDecRef(tv); } smart_free(m_localCache); } m_arr->getHandle()->unreference(); }
/* * Cold path helper for AddNewElemC delegates to the ArrayData::append * virtual method. */ static NEVER_INLINE ArrayData* genericAddNewElemC(ArrayData* a, TypedValue value) { ArrayData* r = a->append(tvAsCVarRef(&value), a->getCount() != 1); if (UNLIKELY(r != a)) { r->incRefCount(); decRefArr(a); } tvRefcountedDecRef(value); return r; }
ALWAYS_INLINE SharedMap::~SharedMap() { if (m_localCache) { for (TypedValue* tv = m_localCache, *end = tv + m_arr->arrCap(); tv < end; ++tv) { tvRefcountedDecRef(tv); } smart_free(m_localCache); } m_arr->decRef(); }
ObjectData* colAddNewElemCHelper(ObjectData* coll, TypedValue value) { if (coll->isCollection()) { collectionInitAppend(coll, &value); // decref the value as the collection helper incref'ed it tvRefcountedDecRef(&value); } else { raise_error("ColAddNewElemC: $2 must be a collection"); } return coll; }
HOT_FUNC SharedMap::~SharedMap() { if (m_localCache) { for (TypedValue* tv = m_localCache, *end = tv + size(); tv < end; ++tv) { tvRefcountedDecRef(tv); } smart_free(m_localCache); } }
/** * Delegate the responsibility for freeing the buffer to the * frozen copy, if it exists. */ BaseVector::~BaseVector() { if (m_frozenCopy.isNull() && m_data) { for (uint i = 0; i < m_size; ++i) { tvRefcountedDecRef(&m_data[i]); } smart_free(m_data); m_data = nullptr; } }
ALWAYS_INLINE TypedValue& getDefaultIfNullCell(TypedValue* tv, TypedValue& def) { if (UNLIKELY(nullptr == tv)) { // refcount is already correct since def was never decrefed return def; } tvRefcountedDecRef(&def); TypedValue* ret = tvToCell(tv); tvRefcountedIncRef(ret); return *ret; }
AutoloadHandler::Result AutoloadHandler::loadFromMap(const String& name, const String& kind, bool toLower, const T &checkExists) { assert(!m_map.isNull()); while (true) { const Variant& type_map = m_map.get()->get(kind); auto const typeMapCell = type_map.asCell(); if (typeMapCell->m_type != KindOfArray) return Failure; String canonicalName = toLower ? f_strtolower(name) : name; const Variant& file = typeMapCell->m_data.parr->get(canonicalName); bool ok = false; if (file.isString()) { String fName = file.toCStrRef().get(); if (fName.get()->data()[0] != '/') { if (!m_map_root.empty()) { fName = m_map_root + fName; } } try { JIT::VMRegAnchor _; bool initial; auto const ec = g_context.getNoCheck(); Unit* u = ec->evalInclude(fName.get(), nullptr, &initial); if (u) { if (initial) { TypedValue retval; ec->invokeFunc(&retval, u->getMain(), init_null_variant, nullptr, nullptr, nullptr, nullptr, ExecutionContext::InvokePseudoMain); tvRefcountedDecRef(&retval); } ok = true; } } catch (...) {} } if (ok && checkExists(name)) { return Success; } const Variant& func = m_map.get()->get(s_failure); if (func.isNull()) return Failure; // can throw, otherwise // - true means the map was updated. try again // - false means we should stop applying autoloaders (only affects classes) // - anything else means keep going Variant action = vm_call_user_func(func, make_packed_array(kind, name)); auto const actionCell = action.asCell(); if (actionCell->m_type == KindOfBoolean) { if (actionCell->m_data.num) continue; return StopAutoloading; } return ContinueAutoloading; } }
void objOffsetUnset(ObjectData* base, CVarRef offset) { objArrayAccess(base); static StringData* sd__offsetUnset = StringData::GetStaticString("offsetUnset"); assert(!base->isCollection()); const Func* method = base->methodNamed(sd__offsetUnset); assert(method != nullptr); TypedValue tv; tvWriteUninit(&tv); base->invokeUserMethod(&tv, method, CREATE_VECTOR1(offset)); tvRefcountedDecRef(&tv); }
NameValueTable::~NameValueTable() { if (leaked()) return; for (Elm* elm = &m_table[m_tabMask]; elm != &m_table[-1]; --elm) { if (elm->m_name) { decRefStr(const_cast<StringData*>(elm->m_name)); if (elm->m_tv.m_type != kNamedLocalDataType) { tvRefcountedDecRef(elm->m_tv); } } } req::free(m_table); }
ALWAYS_INLINE APCLocalArray::~APCLocalArray() { if (m_localCache) { for (TypedValue* tv = m_localCache, *end = tv + m_arr->capacity(); tv < end; ++tv) { tvRefcountedDecRef(tv); } req::free(m_localCache); } m_arr->unreference(); MM().removeApcArray(this); }