Position DocumentStorage::findField(StringData requested) const { int reqSize = requested.size(); // get size calculation out of the way if needed if (_numFields >= HASH_TAB_MIN) { // hash lookup const unsigned bucket = bucketForKey(requested); Position pos = _hashTab[bucket]; while (pos.found()) { const ValueElement& elem = getField(pos); if (elem.nameLen == reqSize && memcmp(requested.rawData(), elem._name, reqSize) == 0) { return pos; } // possible collision pos = elem.nextCollision; } } else { // linear scan for (DocumentStorageIterator it = iteratorAll(); !it.atEnd(); it.advance()) { if (it->nameLen == reqSize && memcmp(requested.rawData(), it->_name, reqSize) == 0) { return it.position(); } } } // if we got here, there's no such field return Position(); }
DocumentStorage::~DocumentStorage() { boost::scoped_array<char> deleteBufferAtScopeEnd (_buffer); for (DocumentStorageIterator it = iteratorAll(); !it.atEnd(); it.advance()) { it->val.~Value(); // explicit destructor call } }
void Document::hash_combine(size_t &seed) const { for (DocumentStorageIterator it = storage().iterator(); !it.atEnd(); it.advance()) { StringData name = it->nameSD(); boost::hash_range(seed, name.rawData(), name.rawData() + name.size()); it->val.hash_combine(seed); } }
DocumentStorage::~DocumentStorage() { std::unique_ptr<char[]> deleteBufferAtScopeEnd(_buffer); for (DocumentStorageIterator it = iteratorAll(); !it.atEnd(); it.advance()) { it->val.~Value(); // explicit destructor call } }
intrusive_ptr<DocumentStorage> DocumentStorage::clone() const { intrusive_ptr<DocumentStorage> out(new DocumentStorage()); // Make a copy of the buffer. // It is very important that the positions of each field are the same after cloning. const size_t bufferBytes = allocatedBytes(); out->_buffer = new char[bufferBytes]; out->_bufferEnd = out->_buffer + (_bufferEnd - _buffer); if (bufferBytes > 0) { memcpy(out->_buffer, _buffer, bufferBytes); } // Copy remaining fields out->_usedBytes = _usedBytes; out->_numFields = _numFields; out->_hashTabMask = _hashTabMask; out->_metaFields = _metaFields; out->_textScore = _textScore; out->_randVal = _randVal; out->_sortKey = _sortKey.getOwned(); // Tell values that they have been memcpyed (updates ref counts) for (DocumentStorageIterator it = out->iteratorAll(); !it.atEnd(); it.advance()) { it->val.memcpyed(); } return out; }
Position DocumentStorage::findField(StringData requested) const { if (_numFields >= HASH_TAB_MIN) { // hash lookup const unsigned bucket = bucketForKey(requested); Position pos = _hashTab[bucket]; while (pos.found()) { const ValueElement& elem = getField(pos); if (requested == elem.nameSD()) return pos; // possible collision pos = elem.nextCollision; } } else if (_numFields) { // linear scan for (DocumentStorageIterator it = iteratorAll(); !it.atEnd(); it.advance()) { if (size_t(it->nameLen) == requested.size() && requested == it->nameSD()) { return it.position(); } } } // if we got here, there's no such field return Position(); }
void Document::serializeForSorter(BufBuilder& buf) const { const int numElems = size(); buf.appendNum(numElems); for (DocumentStorageIterator it = storage().iterator(); !it.atEnd(); it.advance()) { buf.appendStr(it->nameSD(), /*NUL byte*/ true); it->val.serializeForSorter(buf); } }
int Document::compare(const Document& rL, const Document& rR) { DocumentStorageIterator lIt = rL.storage().iterator(); DocumentStorageIterator rIt = rR.storage().iterator(); while (true) { if (lIt.atEnd()) { if (rIt.atEnd()) return 0; // documents are the same length return -1; // left document is shorter } if (rIt.atEnd()) return 1; // right document is shorter const ValueElement& rField = rIt.get(); const ValueElement& lField = lIt.get(); const int nameCmp = lField.nameSD().compare(rField.nameSD()); if (nameCmp) return nameCmp; // field names are unequal const int valueCmp = Value::compare(lField.val, rField.val); if (valueCmp) return valueCmp; // fields are unequal rIt.advance(); lIt.advance(); } }
void Document::toBson(BSONObjBuilder* builder, size_t recursionLevel) const { uassert(ErrorCodes::Overflow, str::stream() << "cannot convert document to BSON because it exceeds the limit of " << BSONDepth::getMaxAllowableDepth() << " levels of nesting", recursionLevel <= BSONDepth::getMaxAllowableDepth()); for (DocumentStorageIterator it = storage().iterator(); !it.atEnd(); it.advance()) { it->val.addToBsonObj(builder, it->nameSD(), recursionLevel); } }
size_t Document::getApproximateSize() const { if (!_storage) return 0; // we've allocated no memory size_t size = sizeof(DocumentStorage); size += storage().allocatedBytes(); for (DocumentStorageIterator it = storage().iterator(); !it.atEnd(); it.advance()) { size += it->val.getApproximateSize(); size -= sizeof(Value); // already accounted for above } return size; }
string Document::toString() const { if (empty()) return "{}"; StringBuilder out; const char* prefix = "{"; for (DocumentStorageIterator it = storage().iterator(); !it.atEnd(); it.advance()) { out << prefix << it->nameSD() << ": " << it->val.toString(); prefix = ", "; } out << '}'; return out.str(); }
void Document::serializeForSorter(BufBuilder& buf) const { const int numElems = size(); buf.appendNum(numElems); for (DocumentStorageIterator it = storage().iterator(); !it.atEnd(); it.advance()) { buf.appendStr(it->nameSD(), /*NUL byte*/ true); it->val.serializeForSorter(buf); } if (hasTextScore()) { buf.appendNum(char(DocumentStorage::MetaType::TEXT_SCORE + 1)); buf.appendNum(getTextScore()); } if (hasRandMetaField()) { buf.appendNum(char(DocumentStorage::MetaType::RAND_VAL + 1)); buf.appendNum(getRandMetaField()); } buf.appendNum(char(0)); }
int Document::compare(const Document& rL, const Document& rR, const StringData::ComparatorInterface* stringComparator) { DocumentStorageIterator lIt = rL.storage().iterator(); DocumentStorageIterator rIt = rR.storage().iterator(); while (true) { if (lIt.atEnd()) { if (rIt.atEnd()) return 0; // documents are the same length return -1; // left document is shorter } if (rIt.atEnd()) return 1; // right document is shorter const ValueElement& rField = rIt.get(); const ValueElement& lField = lIt.get(); // For compatibility with BSONObj::woCompare() consider the canonical type of values // before considerting their names. if (lField.val.getType() != rField.val.getType()) { const int rCType = canonicalizeBSONType(rField.val.getType()); const int lCType = canonicalizeBSONType(lField.val.getType()); if (lCType != rCType) return lCType < rCType ? -1 : 1; } const int nameCmp = lField.nameSD().compare(rField.nameSD()); if (nameCmp) return nameCmp; // field names are unequal const int valueCmp = Value::compare(lField.val, rField.val, stringComparator); if (valueCmp) return valueCmp; // fields are unequal rIt.advance(); lIt.advance(); } }
void Document::toBson(BSONObjBuilder* pBuilder) const { for (DocumentStorageIterator it = storage().iterator(); !it.atEnd(); it.advance()) { *pBuilder << it->nameSD() << it->val; } }