StringData* StringData::reserve(size_t cap) { assert(!isImmutable() && !hasMultipleRefs()); assert(isFlat()); if (cap <= capacity()) return this; cap = std::min(cap + cap / 4, size_t(MaxSize) + 1); auto const allocRet = allocFlatForLen(cap); auto const sd = allocRet.first; auto const cc = allocRet.second; auto const data = reinterpret_cast<char*>(sd + 1); sd->m_data = data; sd->m_hdr.init(cc, HeaderKind::String, 1); // request-allocated StringData are always aligned at 16 bytes, thus it is // safe to copy in 16-byte groups. This copies m_lenAndHash (8 bytes), the // characters (m_len bytes), add the trailing zero (1 byte). memcpy16_inline(&sd->m_lenAndHash, &m_lenAndHash, (m_len + 8 + 1 + 15) & ~0xF); assertx(reinterpret_cast<uintptr_t>(&m_lenAndHash) + 8 == reinterpret_cast<uintptr_t>(m_data)); assertx(reinterpret_cast<uintptr_t>(&m_lenAndHash) % 16 == 0); assert(sd->hasExactlyOneRef()); assert(sd->isFlat()); assert(sd->checkSane()); return sd; }
StringData* StringData::append(StringSlice range) { assert(!hasMultipleRefs()); auto s = range.ptr; auto const len = range.len; if (len == 0) return this; auto const newLen = size_t(m_len) + size_t(len); if (UNLIKELY(newLen > MaxSize)) { throw_string_too_large(newLen); } /* * We may have an aliasing append. We don't allow appending with an * interior pointer, although we may be asked to append less than * the whole string in an aliasing situation. */ ALIASING_APPEND_ASSERT(s, len); auto const requestLen = static_cast<uint32_t>(newLen); auto const target = UNLIKELY(isShared()) ? escalate(requestLen) : reserve(requestLen); memcpy(target->mutableData() + m_len, s, len); target->setSize(newLen); assert(target->checkSane()); return target; }
StringData* StringData::reserve(size_t cap) { assert(!isImmutable() && !hasMultipleRefs()); assert(isFlat()); if (cap <= capacity()) return this; cap = std::min(cap + cap / 4, size_t(MaxSize)); auto const sd = allocFlatForLenSmall(cap); // Request-allocated StringData are always aligned at 16 bytes, thus it is // safe to copy in 16-byte groups. #ifdef NO_M_DATA // layout: [m_lenAndHash][header][...data] sd->m_lenAndHash = m_lenAndHash; // This copies the characters (m_len bytes), and the trailing zero (1 byte) memcpy16_inline(sd+1, this+1, (m_len + 1 + 15) & ~0xF); assertx(reinterpret_cast<uintptr_t>(this+1) % 16 == 0); #else // layout: [m_data][header][m_lenAndHash][...data] // This copies m_lenAndHash (8 bytes), the characters (m_len bytes), // and the trailing zero (1 byte). memcpy16_inline(&sd->m_lenAndHash, &m_lenAndHash, (m_len + 8 + 1 + 15) & ~0xF); assertx(reinterpret_cast<uintptr_t>(&m_lenAndHash) + 8 == reinterpret_cast<uintptr_t>(m_data)); assertx(reinterpret_cast<uintptr_t>(&m_lenAndHash) % 16 == 0); #endif assert(sd->hasExactlyOneRef()); assert(sd->isFlat()); assert(sd->checkSane()); return sd; }
TEST(Variant, MoveCasts) { { auto res = unsafe_cast_or_null<DummyResource>( Variant(req::make<DummyResource>())); EXPECT_NE(res, nullptr); auto res2 = dyn_cast<DummyResource>( Variant(req::make<DummyResource>())); EXPECT_NE(res2, nullptr); auto res3 = dyn_cast<File>( Variant(req::make<DummyResource>())); EXPECT_EQ(res3, nullptr); } { auto res = unsafe_cast_or_null<c_Vector>( Variant(req::make<c_Vector>())); EXPECT_NE(res, nullptr); auto res2 = dyn_cast<c_Vector>( Variant(req::make<c_Vector>())); EXPECT_NE(res2, nullptr); auto res3 = dyn_cast<c_Map>( Variant(req::make<c_Vector>())); EXPECT_EQ(res3, nullptr); } { auto dummy = req::make<DummyResource>(); dummy->incRefCount(); // the RefData constructor steals it's input. auto ref = req::ptr<RefData>::attach( RefData::Make(*Variant(dummy).asTypedValue())); Variant dummyRef(ref); EXPECT_FALSE(ref->hasExactlyOneRef()); auto res = cast<DummyResource>(dummyRef); EXPECT_EQ(res, dummy); } { auto dummy = req::make<DummyResource>(); dummy->incRefCount(); // the RefData constructor steals it's input. Variant dummyRef( req::ptr<RefData>::attach(RefData::Make(*Variant(dummy).asTypedValue()))); //EXPECT_TRUE(dummyRef.getRefData()->hasExactlyOneRef()); auto res = cast<DummyResource>(std::move(dummyRef)); EXPECT_EQ(res, dummy); } { auto dummy = req::make<DummyResource>(); dummy->incRefCount(); // the RefData constructor steals it's input. auto ref = req::ptr<RefData>::attach( RefData::Make(*Variant(dummy).asTypedValue())); Variant dummyRef(ref.get()); EXPECT_FALSE(ref->hasExactlyOneRef()); auto res = cast<DummyResource>(std::move(dummyRef)); EXPECT_EQ(res, dummy); EXPECT_TRUE(dummyRef.isNull()); EXPECT_TRUE(dummy->hasMultipleRefs()); } }
StringData* StringData::shrinkImpl(size_t len) { assert(!isImmutable() && !hasMultipleRefs()); assert(isFlat()); assert(len <= capacity()); auto const sd = allocFlatForLenSmall(len); sd->m_lenAndHash = len; auto const src = static_cast<void*>(this + 1); auto const dst = static_cast<void*>(sd + 1); *memcpy8(dst, src, len) = 0; assert(sd->checkSane()); return sd; }
StringData* StringData::shrinkImpl(size_t len) { assert(!isImmutable() && !hasMultipleRefs()); assert(isFlat()); assert(len <= capacity()); auto const sd = Make(len); sd->m_len = len; auto const src = slice(); auto const dst = sd->mutableData(); *memcpy8(dst, src.ptr, len) = 0; assert(sd->checkSane()); return sd; }
ArrayData* MixedArray::EscalateForSort(ArrayData* ad, SortFunction sf) { auto a = asMixed(ad); // We can uncomment later if we want this feature. // if (a->m_size <= 1 && !isSortFamily(sf)) { // return a; // } if (UNLIKELY(hasUserDefinedCmp(sf) || a->hasMultipleRefs())) { auto ret = a->copyMixed(); assert(ret->hasExactlyOneRef()); return ret; } else { return a; } }
ArrayData* PackedArray::PlusEq(ArrayData* adIn, const ArrayData* elems) { assert(checkInvariants(adIn)); auto const neededSize = adIn->size() + elems->size(); auto const mixed = ToMixedCopyReserve(adIn, neededSize); try { auto const ret = MixedArray::PlusEq(mixed, elems); assert(ret == mixed); assert(!mixed->hasMultipleRefs()); return ret; } catch (...) { MixedArray::Release(mixed); throw; } }
StringData* StringData::append(StringSlice r1, StringSlice r2, StringSlice r3) { assert(!hasMultipleRefs()); auto const len = r1.len + r2.len + r3.len; if (len == 0) return this; if (UNLIKELY(uint32_t(len) > MaxSize)) { throw_string_too_large(len); } if (UNLIKELY(size_t(m_len) + size_t(len) > MaxSize)) { throw_string_too_large(size_t(len) + size_t(m_len)); } auto const newLen = m_len + len; /* * We may have an aliasing append. We don't allow appending with an * interior pointer, although we may be asked to append less than * the whole string in an aliasing situation. */ ALIASING_APPEND_ASSERT(r1.ptr, r1.len); ALIASING_APPEND_ASSERT(r2.ptr, r2.len); ALIASING_APPEND_ASSERT(r3.ptr, r3.len); auto const target = UNLIKELY(isShared()) ? escalate(newLen) : reserve(newLen); auto const mslice = target->bufferSlice(); /* * memcpy is safe even if it's a self append---the regions will be * disjoint, since rN.ptr can't point past the start of our source * pointer, and rN.len is smaller than the old length. */ void* p = mslice.ptr; p = memcpy((char*)p + m_len, r1.ptr, r1.len); p = memcpy((char*)p + r1.len, r2.ptr, r2.len); memcpy((char*)p + r2.len, r3.ptr, r3.len); target->setSize(newLen); assert(target->checkSane()); return target; }
TEST(ReqPtr, Refcounts) { { auto ptr = req::make<DummyResource>(); EXPECT_TRUE(ptr->hasExactlyOneRef()); req::ptr<ResourceData> r(std::move(ptr)); EXPECT_TRUE(r.get()->hasExactlyOneRef()); } { auto ptr = req::make<DummyResource>(); EXPECT_TRUE(ptr->hasExactlyOneRef()); { req::ptr<ResourceData> r(ptr); EXPECT_TRUE(ptr->hasMultipleRefs()); // count==2 EXPECT_TRUE(r.get()->hasMultipleRefs()); } EXPECT_TRUE(ptr->hasExactlyOneRef()); } }
TEST(Resource, Refcounts) { { auto ptr = req::make<DummyResource>(); EXPECT_TRUE(ptr->hasExactlyOneRef()); Resource r(std::move(ptr)); EXPECT_TRUE(r->hasExactlyOneRef()); } { auto ptr = req::make<DummyResource>(); EXPECT_TRUE(ptr->hasExactlyOneRef()); { Resource r(ptr); EXPECT_TRUE(ptr->hasMultipleRefs()); // count==2 EXPECT_TRUE(r->hasMultipleRefs()); } EXPECT_TRUE(ptr->hasExactlyOneRef()); } }
StringData* StringData::shrinkImpl(size_t len) { assert(!isImmutable() && !hasMultipleRefs()); assert(isFlat()); assert(len <= m_len); assert(len <= capacity()); auto const sd = Make(len); auto const src = slice(); auto const dst = sd->mutableData(); assert(len <= src.len); sd->setSize(len); auto const mcret = memcpy(dst, src.ptr, len); auto const ret = static_cast<StringData*>(mcret) - 1; // Recalculating ret from mcret avoids a spill. assert(ret == sd); assert(ret->checkSane()); return ret; }
StringData* StringData::reserve(size_t cap) { assert(!isImmutable() && !hasMultipleRefs()); assert(isFlat()); if (cap <= capacity()) return this; cap = std::min(cap + cap/4, size_t(MaxSize) + 1); auto const sd = Make(cap); auto const src = slice(); auto const dst = sd->mutableData(); sd->setSize(src.len); auto const mcret = memcpy(dst, src.ptr, src.len); auto const ret = static_cast<StringData*>(mcret) - 1; // Recalculating ret from mcret avoids a spill. assert(ret == sd); assert(ret->checkSane()); return ret; }
StringData* StringData::append(folly::StringPiece r1, folly::StringPiece r2) { assert(!hasMultipleRefs()); auto const len = r1.size() + r2.size(); if (len == 0) return this; if (UNLIKELY(size_t(m_len) + size_t(len) > MaxSize)) { throw_string_too_large(size_t(len) + size_t(m_len)); } auto const newLen = m_len + len; /* * We may have an aliasing append. We don't allow appending with an * interior pointer, although we may be asked to append less than * the whole string in an aliasing situation. */ ALIASING_APPEND_ASSERT(r1.data(), r1.size()); ALIASING_APPEND_ASSERT(r2.data(), r2.size()); auto const target = UNLIKELY(isProxy()) ? escalate(newLen) : reserve(newLen); /* * memcpy is safe even if it's a self append---the regions will be * disjoint, since rN.data() can't point past the start of our source * pointer, and rN.size() is smaller than the old length. */ void* p = target->mutableData(); p = memcpy((char*)p + m_len, r1.data(), r1.size()); memcpy((char*)p + r1.size(), r2.data(), r2.size()); target->setSize(newLen); assert(target->checkSane()); return target; }