ArrayData* PackedArray::SetInt(ArrayData* adIn, int64_t k, const Variant& v, bool copy) { assert(checkInvariants(adIn)); // Right now SetInt is used for the AddInt entry point also. This // first branch is the only thing we'd be able to omit if we were // doing AddInt. if (size_t(k) < adIn->m_size) { auto const ad = copy ? Copy(adIn) : adIn; auto& dst = *tvToCell(&packedData(ad)[k]); cellSet(*v.asCell(), dst); // TODO(#3888164): we should restructure things so we don't have to // check KindOfUninit here. if (UNLIKELY(dst.m_type == KindOfUninit)) { dst.m_type = KindOfNull; } return ad; } // Setting the int at the size of the array can keep it in packed // mode---it's the same as an append. if (size_t(k) == adIn->m_size) return Append(adIn, v, copy); // On the promote-to-mixed path, we can use addVal since we know the // key can't exist. auto const mixed = copy ? ToMixedCopy(adIn) : ToMixed(adIn); return mixed->addVal(k, v); }
ArrayData* PackedArray::NonSmartCopy(const ArrayData* adIn) { assert(checkInvariants(adIn)); // There's no reason to use the full capacity, since non-smart // arrays are not mutable. auto const cap = adIn->m_size; auto const size = adIn->m_size; auto const ad = static_cast<ArrayData*>( std::malloc(sizeof(ArrayData) + cap * sizeof(TypedValue)) ); ad->m_kindAndSize = uint64_t{size} << 32 | cap; // zero kind ad->m_posAndCount = static_cast<uint32_t>(adIn->m_pos); auto const srcData = packedData(adIn); auto const stop = srcData + size; auto targetData = reinterpret_cast<TypedValue*>(ad + 1); for (auto ptr = srcData; ptr != stop; ++ptr, ++targetData) { tvDupFlattenVars(ptr, targetData, adIn); } assert(ad->m_kind == ArrayData::kPackedKind); assert(ad->m_packedCap == cap); assert(ad->m_size == size); assert(ad->m_pos == adIn->m_pos); assert(ad->m_count == 0); assert(checkInvariants(ad)); return ad; }
ArrayData* PackedArray::Dequeue(ArrayData* adIn, Variant& value) { assert(checkInvariants(adIn)); auto const ad = adIn->hasMultipleRefs() ? Copy(adIn) : adIn; // To conform to PHP behavior, we invalidate all strong iterators when an // element is removed from the beginning of the array. if (UNLIKELY(strong_iterators_exist())) { free_strong_iterators(ad); } if (UNLIKELY(ad->m_size == 0)) { value = uninit_null(); ad->m_pos = ArrayData::invalid_index; return ad; } // This is O(N), but so is Dequeue on a mixed array, because it // needs to renumber keys. So it makes sense to stay packed. auto n = ad->m_size - 1; auto const data = packedData(ad); value = std::move(tvAsVariant(data)); // no incref+decref std::memmove(data, data + 1, n * sizeof *data); ad->m_size = n; ad->m_pos = n > 0 ? 0 : ArrayData::invalid_index; return ad; }
/* * Convert to mixed, reserving space for at least `neededSize' elems. * The `neededSize' should include old->size(), but may be equal to * it. * * Unlike the other ToMixed functions, the returned array already has * a reference count of 1. */ MixedArray* PackedArray::ToMixedCopyReserve(const ArrayData* old, size_t neededSize) { assert(neededSize >= old->m_size); auto const ad = ToMixedHeader(old, neededSize); ad->m_count = 1; auto const oldSize = old->m_size; auto const mask = ad->m_tableMask; auto dstData = ad->data(); auto dstHash = ad->hashTab(); auto const srcData = packedData(old); auto i = uint32_t{0}; for (; i < oldSize; ++i) { dstData->setIntKey(i); tvDupFlattenVars(&srcData[i], &dstData->data, old); *dstHash = i; ++dstData; ++dstHash; } for (; i <= mask; ++i) { *dstHash++ = MixedArray::Empty; } assert(ad->checkInvariants()); return ad; }
NEVER_INLINE ArrayData* PackedArray::Copy(const ArrayData* adIn) { assert(checkInvariants(adIn)); auto const cap = adIn->m_packedCap; auto const size = adIn->m_size; auto const ad = static_cast<ArrayData*>( MM().objMallocLogged(sizeof(ArrayData) + cap * sizeof(TypedValue)) ); ad->m_kindAndSize = uint64_t{size} << 32 | cap; // zero kind ad->m_posAndCount = static_cast<uint32_t>(adIn->m_pos); auto const srcData = packedData(adIn); auto const stop = srcData + size; auto targetData = reinterpret_cast<TypedValue*>(ad + 1); for (auto ptr = srcData; ptr != stop; ++ptr, ++targetData) { tvDupFlattenVars(ptr, targetData, adIn); } assert(ad->m_kind == ArrayData::kPackedKind); assert(ad->m_packedCap == cap); assert(ad->m_size == size); assert(ad->m_pos == adIn->m_pos); assert(ad->m_count == 0); assert(checkInvariants(ad)); return ad; }
/* * Converts a packed array to mixed, leaving the packed array in an * empty state. You need ToMixedCopy in cases where the old array * needs to remain un-modified (usually if `copy' is true). * * The returned array is mixed, and is guaranteed not to be isFull(). * (Note: only unset can call ToMixed when we aren't about to insert.) */ MixedArray* PackedArray::ToMixed(ArrayData* old) { auto const oldSize = old->m_size; auto const ad = ToMixedHeader(old, oldSize + 1); auto const mask = ad->m_tableMask; auto dstData = ad->data(); auto dstHash = ad->hashTab(); auto const srcData = packedData(old); auto i = uint32_t{0}; for (; i < oldSize; ++i) { dstData->setIntKey(i); tvCopy(srcData[i], dstData->data); *dstHash = i; ++dstData; ++dstHash; } for (; i <= mask; ++i) { *dstHash++ = MixedArray::Empty; } old->m_size = 0; assert(ad->checkInvariants()); assert(!ad->isFull()); return ad; }
Object c_AwaitAllWaitHandle::FromPackedArray(const ArrayData* dependencies) { auto const start = packedData(dependencies); auto const stop = start + dependencies->getSize(); return createAAWH<const TypedValue*>(start, stop, [](const TypedValue* tv, UNUSED const TypedValue* limit) { return tv + 1; }, [](const TypedValue* tv) { return tvToCell(tv); }); }
void PackedArray::OnSetEvalScalar(ArrayData* ad) { assert(checkInvariants(ad)); auto ptr = packedData(ad); auto const stop = ptr + ad->m_packedCap; for (; ptr != stop; ++ptr) { tvAsVariant(ptr).setEvalScalar(); } }
NEVER_INLINE ArrayData* PackedArray::Grow(ArrayData* old) { assert(checkInvariants(old)); assert(old->m_size == old->m_packedCap); DEBUG_ONLY auto const oldPos = old->m_pos; auto const oldCap = old->m_packedCap; auto const cap = oldCap * 2; if (UNLIKELY(cap >= kMaxPackedCap)) return nullptr; auto const ad = static_cast<ArrayData*>( MM().objMallocLogged(sizeof(ArrayData) + cap * sizeof(TypedValue)) ); auto const oldSize = old->m_size; auto const oldPosUnsigned = uint64_t{static_cast<uint32_t>(old->m_pos)}; ad->m_kindAndSize = uint64_t{oldSize} << 32 | cap; ad->m_posAndCount = oldPosUnsigned; if (UNLIKELY(strong_iterators_exist())) { move_strong_iterators(ad, old); } // Steal the old array payload. At the time of this writing, it was // better not to reuse the memcpy return value here because gcc had // `ad' in a callee saved register anyway. The reg-to-reg move was // smaller than subtracting sizeof(ArrayData) from rax to return. old->m_size = 0; std::memcpy(packedData(ad), packedData(old), oldSize * sizeof(TypedValue)); // TODO(#2926276): it would be good to refactor callers to expect // our refcount to start at 1. assert(ad->m_kind == ArrayData::kPackedKind); assert(ad->m_pos == oldPos); assert(ad->m_count == 0); assert(ad->m_packedCap == cap); assert(ad->m_size == oldSize); assert(checkInvariants(ad)); return ad; }
SortFlavor PackedArray::preSort(ArrayData* ad) { assert(ad->isPacked()); auto const data = packedData(ad); TVAccessor acc; uint32_t sz = ad->m_size; bool allInts = true; bool allStrs = true; for (uint32_t i = 0; i < sz; ++i) { allInts = (allInts && acc.isInt(data[i])); allStrs = (allStrs && acc.isStr(data[i])); } return allStrs ? StringSort : allInts ? IntegerSort : GenericSort; }
void PackedArray::Sort(ArrayData* ad, int sort_flags, bool ascending) { assert(ad->isPacked()); if (ad->m_size <= 1) { return; } assert(!ad->hasMultipleRefs()); auto a = ad; if (UNLIKELY(strong_iterators_exist())) { free_strong_iterators(a); } SortFlavor flav = preSort(ad); a->m_pos = 0; auto data_begin = packedData(ad); auto data_end = data_begin + a->m_size; CALL_SORT(TVAccessor); }
ArrayData* PackedArray::LvalNew(ArrayData* adIn, Variant*& ret, bool copy) { assert(checkInvariants(adIn)); auto const ad = copy ? CopyAndResizeIfNeeded(adIn) : ResizeIfNeeded(adIn); if (UNLIKELY(!ad)) { auto const mixed = copy ? ToMixedCopy(adIn) : ToMixed(ad); return MixedArray::LvalNew(mixed, ret, copy); } if (ad->m_pos == ArrayData::invalid_index) { ad->m_pos = ad->m_size; } auto& tv = packedData(ad)[ad->m_size++]; tv.m_type = KindOfNull; ret = &tvAsVariant(&tv); return ad; }
ArrayData* PackedArray::Append(ArrayData* adIn, const Variant& v, bool copy) { assert(checkInvariants(adIn)); auto const ad = copy ? CopyAndResizeIfNeeded(adIn) : ResizeIfNeeded(adIn); if (UNLIKELY(!ad)) { auto const mixed = copy ? ToMixedCopy(adIn) : ToMixed(adIn); return MixedArray::Append(mixed, v, copy); } if (ad->m_pos == ArrayData::invalid_index) { ad->m_pos = ad->m_size; } auto& dst = packedData(ad)[ad->m_size++]; cellDup(*v.asCell(), dst); // TODO(#3888164): restructure this so we don't need KindOfUninit checks. if (dst.m_type == KindOfUninit) dst.m_type = KindOfNull; return ad; }
NEVER_INLINE void PackedArray::Release(ArrayData* ad) { assert(checkInvariants(ad)); assert(ad->isRefCounted()); auto const size = ad->m_size; auto const data = packedData(ad); auto const stop = data + size; for (auto ptr = data; ptr != stop; ++ptr) { tvRefcountedDecRef(*ptr); } if (UNLIKELY(strong_iterators_exist())) { free_strong_iterators(ad); } auto const cap = ad->m_packedCap; MM().objFreeLogged(ad, sizeof(ArrayData) + sizeof(TypedValue) * cap); }
ArrayData* PackedArray::SetRefInt(ArrayData* adIn, int64_t k, Variant& v, bool copy) { assert(checkInvariants(adIn)); if (size_t(k) == adIn->m_size) return AppendRef(adIn, v, copy); if (size_t(k) < adIn->m_size) { auto const ad = copy ? Copy(adIn) : adIn; tvBind(v.asRef(), &packedData(ad)[k]); return ad; } // todo t2606310: key can't exist. use add/findForNewInsert auto const mixed = copy ? ToMixedCopy(adIn) : ToMixed(adIn); mixed->updateRef(k, v); return mixed; }
ArrayData* PackedArray::AppendWithRef(ArrayData* adIn, const Variant& v, bool copy) { assert(checkInvariants(adIn)); auto const ad = copy ? CopyAndResizeIfNeeded(adIn) : ResizeIfNeeded(adIn); if (UNLIKELY(!ad)) { auto const mixed = copy ? ToMixedCopy(adIn) : ToMixed(adIn); // XXX: constness return MixedArray::AppendRef(mixed, const_cast<Variant&>(v), copy); } if (ad->m_pos == ArrayData::invalid_index) { ad->m_pos = ad->m_size; } auto& dst = packedData(ad)[ad->m_size++]; dst.m_type = KindOfNull; tvAsVariant(&dst).setWithRef(v); return ad; }
ArrayData* PackedArray::AppendRef(ArrayData* adIn, Variant& v, bool copy) { assert(checkInvariants(adIn)); auto const ad = copy ? CopyAndResizeIfNeeded(adIn) : ResizeIfNeeded(adIn); if (UNLIKELY(!ad)) { auto const mixed = copy ? ToMixedCopy(adIn) : ToMixed(adIn); return MixedArray::AppendRef(mixed, v, copy); } if (ad->m_pos == ArrayData::invalid_index) { ad->m_pos = ad->m_size; } auto& dst = packedData(ad)[ad->m_size++]; dst.m_data.pref = v.asRef()->m_data.pref; dst.m_type = KindOfRef; dst.m_data.pref->incRefCount(); return ad; }
bool PackedArray::Usort(ArrayData* ad, const Variant& cmp_function) { assert(ad->isPacked()); if (ad->m_size <= 1) { return true; } assert(!ad->hasMultipleRefs()); if (UNLIKELY(strong_iterators_exist())) { free_strong_iterators(ad); } ElmUCompare<TVAccessor> comp; CallCtx ctx; CallerFrame cf; vm_decode_function(cmp_function, cf(), false, ctx); if (!ctx.func) { return false; } comp.ctx = &ctx; auto const data = packedData(ad); Sort::sort(data, data + ad->m_size, comp); return true; }
ArrayData* PackedArray::LvalInt(ArrayData* adIn, int64_t k, Variant*& ret, bool copy) { assert(checkInvariants(adIn)); if (LIKELY(size_t(k) < adIn->m_size)) { auto const ad = copy ? Copy(adIn) : adIn; ret = &tvAsVariant(&packedData(ad)[k]); return ad; } // We can stay packed if the index is m_size, and the operation does // the same thing as LvalNew. if (size_t(k) == adIn->m_size) return LvalNew(adIn, ret, copy); // Promote-to-mixed path, we know the key is new and should be using // findForNewInsert but aren't yet TODO(#2606310). auto const mixed = copy ? ToMixedCopy(adIn) : ToMixed(adIn); return mixed->addLvalImpl(k, ret); }
/* * The pass-by-value and move semantics of this helper are slightly different * than other array helpers, but tuned for the opcode. See doc comment in * hphp_array.h. */ ArrayData* MixedArray::AddNewElemC(ArrayData* ad, TypedValue value) { assert(value.m_type != KindOfRef); if (LIKELY(ad->isPacked())) { assert(PackedArray::checkInvariants(ad)); if (LIKELY(ad->m_pos >= 0) && LIKELY(!ad->hasMultipleRefs())) { int64_t const k = ad->m_size; if (LIKELY(k < ad->m_packedCap)) { auto& tv = packedData(ad)[k]; // TODO(#3888164): this KindOfUninit check is almost certainly // unnecessary, but it was here so it hasn't been removed yet. tv.m_type = value.m_type == KindOfUninit ? KindOfNull : value.m_type; tv.m_data = value.m_data; ad->m_size = k + 1; return ad; } } } return genericAddNewElemC(ad, value); }
ArrayData* PackedArray::Prepend(ArrayData* adIn, const Variant& v, bool copy) { assert(checkInvariants(adIn)); auto const ad = adIn->hasMultipleRefs() ? CopyAndResizeIfNeeded(adIn) : ResizeIfNeeded(adIn); // To conform to PHP behavior, we invalidate all strong iterators when an // element is added to the beginning of the array. if (UNLIKELY(strong_iterators_exist())) { free_strong_iterators(ad); } auto const size = ad->m_size; auto const data = packedData(ad); std::memmove(data + 1, data, sizeof *data * size); // TODO(#3888164): constructValHelper is making KindOfUninit checks. tvAsUninitializedVariant(&data[0]).constructValHelper(v); ad->m_size = size + 1; ad->m_pos = 0; return ad; }
ArrayData* PackedArray::Pop(ArrayData* adIn, Variant& value) { assert(checkInvariants(adIn)); auto const ad = adIn->hasMultipleRefs() ? Copy(adIn) : adIn; if (UNLIKELY(ad->m_size == 0)) { value = uninit_null(); ad->m_pos = ArrayData::invalid_index; return ad; } auto const oldSize = ad->m_size; auto& tv = packedData(ad)[oldSize - 1]; value = tvAsCVarRef(&tv); if (UNLIKELY(strong_iterators_exist())) { adjustMArrayIter(ad, oldSize - 1); } auto const oldType = tv.m_type; auto const oldDatum = tv.m_data.num; ad->m_size = oldSize - 1; ad->m_pos = oldSize - 1 > 0 ? 0 : ArrayData::invalid_index; tvRefcountedDecRefHelper(oldType, oldDatum); return ad; }
const TypedValue* PackedArray::NvGetInt(const ArrayData* ad, int64_t ki) { auto const data = packedData(ad); return LIKELY(size_t(ki) < ad->m_size) ? &data[ki] : nullptr; }
const Variant& PackedArray::GetValueRef(const ArrayData* ad, ssize_t pos) { assert(checkInvariants(ad)); assert(pos != ArrayData::invalid_index); return tvAsCVarRef(&packedData(ad)[pos]); }