void StringData::releaseData() { if ((m_len & (IsLinear | IsLiteral)) == 0) { if (isShared()) { m_shared->decRef(); } else if (m_data) { free((void*)m_data); m_data = NULL; } } m_hash = 0; }
const Group* KeeShare::resolveSharedGroup(const Group* group) { while (group && group != group->database()->rootGroup()) { if (isShared(group)) { return group; } group = group->parentGroup(); } return nullptr; }
ALWAYS_INLINE void StringData::enlist() { assert(isShared()); auto& head = MM().getStringList(); // insert after head auto const next = head.next; auto& payload = *sharedPayload(); assert(uintptr_t(next) != kMallocFreeWord); payload.node.next = next; payload.node.prev = &head; next->prev = head.next = &payload.node; }
void StringData::preCompute() const { ASSERT(!isShared()); // because we are gonna reuse the space! // We don't want to collect taint for a hash StringSlice s = slice(); m_hash = hash_string(s.ptr, s.len); ASSERT(m_hash >= 0); int64 lval; double dval; if (isNumericWithVal(lval, dval, 1) == KindOfNull) { m_hash |= STRHASH_MSB; } }
int QMFunction::crop(double prec) { if (prec < 0.0) return 0; bool need_to_crop = not(isShared()) or mpi::share_master(); int nChunksremoved = 0; if (need_to_crop) { if (hasReal()) nChunksremoved = real().crop(prec, 1.0, false); if (hasImag()) nChunksremoved += imag().crop(prec, 1.0, false); } mpi::share_function(*this, 0, 7744, mpi::comm_share); return nChunksremoved; }
// State transition from Mode::Shared to Mode::Flat. StringData* StringData::escalate(size_t cap) { assert(isShared() && !isStatic() && cap >= m_len); auto const sd = allocFlatForLenSmall(cap); sd->m_lenAndHash = m_lenAndHash; auto const data = reinterpret_cast<char*>(sd + 1); *memcpy8(data, m_data, m_len) = 0; assert(sd->hasExactlyOneRef()); assert(sd->isFlat()); assert(sd->checkSane()); return sd; }
DataType StringData::isNumericWithVal(int64 &lval, double &dval, int allow_errors) const { if (m_hash < 0) return KindOfNull; DataType ret = KindOfNull; int len = size(); if (len) { ret = is_numeric_string(data(), size(), &lval, &dval, allow_errors); if (ret == KindOfNull && !isShared()) { m_hash |= (1ull << 63); } } return ret; }
DataType StringData::isNumericWithVal(int64_t &lval, double &dval, int allow_errors, int* overflow) const { if (m_hash < 0) return KindOfNull; DataType ret = KindOfNull; StringSlice s = slice(); if (s.len) { ret = is_numeric_string(s.ptr, s.len, &lval, &dval, allow_errors, overflow); if (ret == KindOfNull && !isShared() && allow_errors) { m_hash |= STRHASH_MSB; } } return ret; }
bool ArrayBuffer::shareContentsWith(ArrayBufferContents& result) { ASSERT(isShared()); RefPtr<ArrayBuffer> keepAlive(this); if (!m_contents.data()) { result.neuter(); return false; } m_contents.shareWith(result); return true; }
DataType StringData::isNumericWithVal(int64 &lval, double &dval, int allow_errors) const { if (m_hash < 0) return KindOfNull; DataType ret = KindOfNull; StringSlice s = slice(); if (s.len) { // Not involved in further string construction/mutation; no taint pickup ret = is_numeric_string(s.ptr, s.len, &lval, &dval, allow_errors); if (ret == KindOfNull && !isShared() && allow_errors) { m_hash |= STRHASH_MSB; } } return ret; }
QPixmap KeeShare::indicatorBadge(const Group* group, QPixmap pixmap) { if (!isShared(group)) { return pixmap; } const QPixmap badge = isEnabled(group) ? databaseIcons()->iconPixmap(DatabaseIcons::SharedIconIndex) : databaseIcons()->iconPixmap(DatabaseIcons::UnsharedIconIndex); QImage canvas = pixmap.toImage(); const QRectF target(canvas.width() * 0.4, canvas.height() * 0.4, canvas.width() * 0.6, canvas.height() * 0.6); QPainter painter(&canvas); painter.setCompositionMode(QPainter::CompositionMode_SourceOver); painter.drawPixmap(target, badge, badge.rect()); pixmap.convertFromImage(canvas); return pixmap; }
unsigned StringData::sweepAll() { auto& head = MM().getStringList(); auto count = 0; for (StringDataNode *next, *n = head.next; n != &head; n = next) { count++; next = n->next; assert(next && uintptr_t(next) != kSmallFreeWord); assert(next && uintptr_t(next) != kMallocFreeWord); auto const s = node2str(n); assert(s->isShared()); s->sharedPayload()->shared->getHandle()->unreference(); } head.next = head.prev = &head; return count; }
// State transition from Mode::Shared to Mode::Flat. StringData* StringData::escalate(size_t cap) { assert(isShared() && !isStatic() && cap >= m_len); auto const sd = Make(cap); auto const src = slice(); auto const dst = sd->mutableData(); sd->setSize(src.len); auto const mcret = memcpy(dst, src.ptr, src.len); auto const ret = static_cast<StringData*>(mcret) - 1; // Recalculating ret from mcret avoids a spill. assert(ret == sd); assert(ret->checkSane()); return ret; }
void StringData::dump() const { StringSlice s = slice(); printf("StringData(%d) (%s%s%d): [", getCount(), isShared() ? "shared " : "", isStatic() ? "static " : "", s.len); for (uint32_t i = 0; i < s.len; i++) { char ch = s.ptr[i]; if (isprint(ch)) { printf("%c", ch); } else { printf("\\x%02x", ch); } } printf("]\n"); }
StringData* StringData::append(StringSlice r1, StringSlice r2, StringSlice r3) { assert(!hasMultipleRefs()); auto const len = r1.len + r2.len + r3.len; if (len == 0) return this; if (UNLIKELY(uint32_t(len) > MaxSize)) { throw_string_too_large(len); } if (UNLIKELY(size_t(m_len) + size_t(len) > MaxSize)) { throw_string_too_large(size_t(len) + size_t(m_len)); } auto const newLen = m_len + len; /* * We may have an aliasing append. We don't allow appending with an * interior pointer, although we may be asked to append less than * the whole string in an aliasing situation. */ ALIASING_APPEND_ASSERT(r1.ptr, r1.len); ALIASING_APPEND_ASSERT(r2.ptr, r2.len); ALIASING_APPEND_ASSERT(r3.ptr, r3.len); auto const target = UNLIKELY(isShared()) ? escalate(newLen) : reserve(newLen); auto const mslice = target->bufferSlice(); /* * memcpy is safe even if it's a self append---the regions will be * disjoint, since rN.ptr can't point past the start of our source * pointer, and rN.len is smaller than the old length. */ void* p = mslice.ptr; p = memcpy((char*)p + m_len, r1.ptr, r1.len); p = memcpy((char*)p + r1.len, r2.ptr, r2.len); memcpy((char*)p + r2.len, r3.ptr, r3.len); target->setSize(newLen); assert(target->checkSane()); return target; }
unsigned StringData::sweepAll() { auto& head = MM().getStringList(); auto count = 0; for (StringDataNode *next, *n = head.next; n != &head; n = next) { count++; next = n->next; assert(next && uintptr_t(next) != kSmartFreeWord); assert(next && uintptr_t(next) != kMallocFreeWord); auto const s = reinterpret_cast<StringData*>( uintptr_t(n) - offsetof(SharedPayload, node) - sizeof(StringData) ); assert(s->isShared()); s->sharedPayload()->shared->getHandle()->unreference(); } head.next = head.prev = &head; return count; }
// State transition from Mode::Shared to Mode::Flat. StringData* StringData::escalate(size_t cap) { assert(isShared() && !isStatic() && cap >= m_len); auto const allocRet = allocFlatForLen(cap); auto const sd = allocRet.first; auto const cc = allocRet.second; auto const data = reinterpret_cast<char*>(sd + 1); sd->m_data = data; sd->m_hdr.init(cc, HeaderKind::String, 1); sd->m_lenAndHash = m_lenAndHash; *memcpy8(data, m_data, m_len) = 0; assert(sd->hasExactlyOneRef()); assert(sd->isFlat()); assert(sd->checkSane()); return sd; }
/* * Change to smart-malloced string. Then returns a mutable slice of * the usable string buffer (minus space for the null terminator). */ MutableSlice StringData::escalate(uint32_t cap) { assert(isShared() && !isStatic() && cap >= m_len); char *buf = (char*)smart_malloc(cap + 1); StringSlice s = slice(); memcpy(buf, s.ptr, s.len); buf[s.len] = 0; m_big.shared->decRef(); delist(); m_data = buf; setModeAndCap(Mode::Smart, cap + 1); // clear precomputed hashcode m_hash = 0; assert(checkSane()); return MutableSlice(buf, cap); }
void StringData::append(const char *s, int len) { if (len == 0) return; if (len < 0 || (len & IsMask)) { throw InvalidArgumentException("len: %d", len); } ASSERT(!isStatic()); // never mess around with static strings! if (!isMalloced()) { int newlen; m_data = string_concat(data(), size(), s, len, newlen); if (isShared()) { m_shared->decRef(); } m_len = newlen; m_hash = 0; } else if (m_data == s) { int newlen; char *newdata = string_concat(data(), size(), s, len, newlen); releaseData(); m_data = newdata; m_len = newlen; } else { int dataLen = size(); ASSERT((m_data > s && m_data - s > len) || (m_data < s && s - m_data > dataLen)); // no overlapping m_len = len + dataLen; m_data = (const char*)realloc((void*)m_data, m_len + 1); memcpy((void*)(m_data + dataLen), s, len); ((char*)m_data)[m_len] = '\0'; m_hash = 0; } if (m_len & IsMask) { int len = m_len; m_len &= ~IsMask; releaseData(); m_data = NULL; throw FatalErrorException(0, "String length exceeded 2^29 - 1: %d", len); } TAINT_OBSERVER_REGISTER_MUTATED(this); }
void StringData::dump() { const char *p = data(); int len = size(); printf("StringData(%d) (%s%s%s%d): [", _count, isLiteral() ? "literal " : "", isShared() ? "shared " : "", isLinear() ? "linear " : "", len); for (int i = 0; i < len; i++) { char ch = p[i]; if (isprint(ch)) { std::cout << ch; } else { printf("\\%02x", ch); } } printf("]\n"); }
void StringData::append(const char* s, int len) { assert(!isStatic() && getCount() <= 1); if (len == 0) return; if (UNLIKELY(uint32_t(len) > MaxSize)) { throw InvalidArgumentException("len > 2^31-2", len); } if (UNLIKELY(size_t(m_len) + size_t(len) > MaxSize)) { throw FatalErrorException(0, "String length exceeded 2^31-2: %zu", size_t(len) + size_t(m_len)); } const uint32_t newLen = m_len + len; /* * In case we're being to asked to append our own string, we need to * load the old pointer value (it might change when we reserve * below). * * We don't allow appending with an interior pointers here, although * we may be asked to append less than the whole string. */ auto const oldDataPtr = rawdata(); assert(uintptr_t(s) <= uintptr_t(rawdata()) || uintptr_t(s) >= uintptr_t(rawdata() + capacity())); assert(s != rawdata() || len <= m_len); auto const mslice = UNLIKELY(isShared()) ? escalate(newLen) : reserve(newLen); if (UNLIKELY(s == oldDataPtr)) s = mslice.ptr; /* * memcpy is safe even if it's a self append---the regions will be * disjoint, since s can't point past our oldDataPtr, and len is * smaller than the old length. */ memcpy(mslice.ptr + m_len, s, len); setSize(newLen); assert(checkSane()); }
NEVER_INLINE StringData* StringData::MakeAPCSlowPath(const APCString* shared) { auto const sd = static_cast<StringData*>( MM().smartMallocSize(sizeof(StringData) + sizeof(SharedPayload)) ); auto const data = shared->getStringData(); sd->m_data = const_cast<char*>(data->m_data); sd->m_capAndCount = data->m_capCode; // count=0, kind=data->kind sd->m_lenAndHash = data->m_lenAndHash; sd->sharedPayload()->shared = shared; sd->enlist(); shared->getHandle()->reference(); assert(sd->m_len == data->size()); assert(sd->m_count == 0); assert(sd->m_capCode == data->m_capCode); assert(sd->m_hash == data->m_hash); assert(sd->m_kind == HeaderKind::String); assert(sd->isShared()); assert(sd->checkSane()); return sd; }
NEVER_INLINE StringData* StringData::MakeAPCSlowPath(const APCString* shared) { auto const sd = static_cast<StringData*>( MM().mallocSmallSize(sizeof(StringData) + sizeof(SharedPayload)) ); auto const data = shared->getStringData(); sd->m_data = const_cast<char*>(data->m_data); sd->m_hdr.init(data->m_hdr, 1); sd->m_lenAndHash = data->m_lenAndHash; sd->sharedPayload()->shared = shared; sd->enlist(); shared->getHandle()->reference(); assert(sd->m_len == data->size()); assert(sd->m_hdr.aux == data->m_hdr.aux); assert(sd->m_hdr.kind == HeaderKind::String); assert(sd->hasExactlyOneRef()); assert(sd->m_hash == data->m_hash); assert(sd->isShared()); assert(sd->checkSane()); return sd; }
void StringData::dump() const { StringSlice s = slice(); printf("StringData(%d) (%s%s%s%d): [", _count, isLiteral() ? "literal " : "", isShared() ? "shared " : "", isStatic() ? "static " : "", s.len); for (uint32_t i = 0; i < s.len; i++) { char ch = s.ptr[i]; if (isprint(ch)) { std::cout << ch; } else { printf("\\x%02x", ch); } } #ifdef TAINTED printf("\n"); this->getTaintDataRefConst().dump(); #endif printf("]\n"); }
void StringData::dump() const { const char *p = data(); int len = size(); printf("StringData(%d) (%s%s%s%s%d): [", _count, isLiteral() ? "literal " : "", isShared() ? "shared " : "", isLinear() ? "linear " : "", isStatic() ? "static " : "", len); for (int i = 0; i < len; i++) { char ch = p[i]; if (isprint(ch)) { std::cout << ch; } else { printf("\\x%02x", ch); } } #ifdef TAINTED printf("\n"); this->getTaintDataRef().dump(); #endif printf("]\n"); }
APCHandle* StringData::getAPCHandle() const { if (isShared()) return sharedPayload()->shared->getHandle(); return nullptr; }
// test iterating objects in slabs void MemoryManager::checkHeap() { size_t bytes=0; std::vector<Header*> hdrs; std::unordered_set<FreeNode*> free_blocks; std::unordered_set<APCLocalArray*> apc_arrays; std::unordered_set<StringData*> apc_strings; size_t counts[NumHeaderKinds]; for (unsigned i=0; i < NumHeaderKinds; i++) counts[i] = 0; for (auto h = begin(), lim = end(); h != lim; ++h) { hdrs.push_back(&*h); TRACE(2, "checkHeap: hdr %p\n", hdrs[hdrs.size()-1]); bytes += h->size(); counts[(int)h->kind_]++; switch (h->kind_) { case HeaderKind::Debug: { // the next block's parsed size should agree with DebugHeader auto h2 = h; ++h2; if (h2 != lim) { assert(h2->kind_ != HeaderKind::Debug); assert(h->debug_.returnedCap == MemoryManager::smartSizeClass(h2->size())); } break; } case HeaderKind::Free: free_blocks.insert(&h->free_); break; case HeaderKind::Apc: apc_arrays.insert(&h->apc_); break; case HeaderKind::String: if (h->str_.isShared()) apc_strings.insert(&h->str_); break; case HeaderKind::Packed: case HeaderKind::Struct: case HeaderKind::Mixed: case HeaderKind::Empty: case HeaderKind::Globals: case HeaderKind::Proxy: case HeaderKind::Object: case HeaderKind::ResumableObj: case HeaderKind::AwaitAllWH: case HeaderKind::Vector: case HeaderKind::Map: case HeaderKind::Set: case HeaderKind::Pair: case HeaderKind::ImmVector: case HeaderKind::ImmMap: case HeaderKind::ImmSet: case HeaderKind::Resource: case HeaderKind::Ref: case HeaderKind::ResumableFrame: case HeaderKind::NativeData: case HeaderKind::SmallMalloc: case HeaderKind::BigMalloc: case HeaderKind::BigObj: case HeaderKind::Hole: break; } } // check the free lists for (size_t i = 0; i < kNumSmartSizes; i++) { for (auto n = m_freelists[i].head; n; n = n->next) { assert(free_blocks.find(n) != free_blocks.end()); free_blocks.erase(n); } } assert(free_blocks.empty()); // check the apc array list for (auto a : m_apc_arrays) { assert(apc_arrays.find(a) != apc_arrays.end()); apc_arrays.erase(a); } assert(apc_arrays.empty()); // check the apc string list for (StringDataNode *next, *n = m_strings.next; n != &m_strings; n = next) { next = n->next; auto const s = StringData::node2str(n); assert(s->isShared()); assert(apc_strings.find(s) != apc_strings.end()); apc_strings.erase(s); } assert(apc_strings.empty()); TRACE(1, "checkHeap: %lu objects %lu bytes\n", hdrs.size(), bytes); TRACE(1, "checkHeap-types: "); for (unsigned i = 0; i < NumHeaderKinds; ++i) { TRACE(1, "%s %lu%s", header_names[i], counts[i], (i + 1 < NumHeaderKinds ? " " : "\n")); } }
// test iterating objects in slabs void MemoryManager::checkHeap() { size_t bytes=0; std::vector<Header*> hdrs; std::unordered_set<FreeNode*> free_blocks; std::unordered_set<APCLocalArray*> apc_arrays; std::unordered_set<StringData*> apc_strings; size_t counts[NumHeaderKinds]; for (unsigned i=0; i < NumHeaderKinds; i++) counts[i] = 0; forEachHeader([&](Header* h) { hdrs.push_back(&*h); TRACE(2, "checkHeap: hdr %p\n", hdrs[hdrs.size()-1]); bytes += h->size(); counts[(int)h->kind()]++; switch (h->kind()) { case HeaderKind::Free: free_blocks.insert(&h->free_); break; case HeaderKind::Apc: apc_arrays.insert(&h->apc_); break; case HeaderKind::String: if (h->str_.isShared()) apc_strings.insert(&h->str_); break; case HeaderKind::Packed: case HeaderKind::Struct: case HeaderKind::Mixed: case HeaderKind::Empty: case HeaderKind::Globals: case HeaderKind::Proxy: case HeaderKind::Object: case HeaderKind::ResumableObj: case HeaderKind::AwaitAllWH: case HeaderKind::Vector: case HeaderKind::Map: case HeaderKind::Set: case HeaderKind::Pair: case HeaderKind::ImmVector: case HeaderKind::ImmMap: case HeaderKind::ImmSet: case HeaderKind::Resource: case HeaderKind::Ref: case HeaderKind::ResumableFrame: case HeaderKind::NativeData: case HeaderKind::SmallMalloc: case HeaderKind::BigMalloc: break; case HeaderKind::BigObj: case HeaderKind::Hole: assert(false && "forEachHeader skips these kinds"); break; } }); // check the free lists for (auto i = 0; i < kNumSmallSizes; i++) { for (auto n = m_freelists[i].head; n; n = n->next) { assert(free_blocks.find(n) != free_blocks.end()); free_blocks.erase(n); } } assert(free_blocks.empty()); // check the apc array list for (auto a : m_apc_arrays) { assert(apc_arrays.find(a) != apc_arrays.end()); apc_arrays.erase(a); } assert(apc_arrays.empty()); // check the apc string list for (StringDataNode *next, *n = m_strings.next; n != &m_strings; n = next) { next = n->next; auto const s = StringData::node2str(n); assert(s->isShared()); assert(apc_strings.find(s) != apc_strings.end()); apc_strings.erase(s); } assert(apc_strings.empty()); TRACE(1, "checkHeap: %lu objects %lu bytes\n", hdrs.size(), bytes); TRACE(1, "checkHeap-types: "); for (unsigned i = 0; i < NumHeaderKinds; ++i) { TRACE(1, "%s %lu%s", header_names[i], counts[i], (i + 1 < NumHeaderKinds ? " " : "\n")); } }
void QMFunction::setImag(mrcpp::FunctionTree<3> *tree) { if (isShared()) MSG_FATAL("Cannot set in shared function"); this->func_ptr->im = tree; }
int64 StringData::getSharedStringHash() const { ASSERT(isShared()); return m_shared->stringHash(); }