Status CollectionOptions::parse(const BSONObj& options) { reset(); // During parsing, ignore some validation errors in order to accept options objects that // were valid in previous versions of the server. SERVER-13737. BSONObjIterator i( options ); while ( i.more() ) { BSONElement e = i.next(); StringData fieldName = e.fieldName(); if ( fieldName == "capped" ) { capped = e.trueValue(); } else if ( fieldName == "size" ) { if ( !e.isNumber() ) { // Ignoring for backwards compatibility. continue; } cappedSize = e.numberLong(); if ( cappedSize < 0 ) return Status( ErrorCodes::BadValue, "size has to be >= 0" ); cappedSize += 0xff; cappedSize &= 0xffffffffffffff00LL; } else if ( fieldName == "max" ) { if ( !options["capped"].trueValue() || !e.isNumber() ) { // Ignoring for backwards compatibility. continue; } cappedMaxDocs = e.numberLong(); if ( !validMaxCappedDocs( &cappedMaxDocs ) ) return Status( ErrorCodes::BadValue, "max in a capped collection has to be < 2^31 or not set" ); } else if ( fieldName == "$nExtents" ) { if ( e.type() == Array ) { BSONObjIterator j( e.Obj() ); while ( j.more() ) { BSONElement inner = j.next(); initialExtentSizes.push_back( inner.numberInt() ); } } else { initialNumExtents = e.numberLong(); } } else if ( fieldName == "autoIndexId" ) { if ( e.trueValue() ) autoIndexId = YES; else autoIndexId = NO; } else if ( fieldName == "flags" ) { flags = e.numberInt(); flagsSet = true; } else if ( fieldName == "temp" ) { temp = e.trueValue(); } else if (fieldName == "storageEngine") { // Storage engine-specific collection options. // "storageEngine" field must be of type "document". // Every field inside "storageEngine" has to be a document. // Format: // { // ... // storageEngine: { // storageEngine1: { // ... // }, // storageEngine2: { // ... // } // }, // ... // } if (e.type() != mongo::Object) { return Status(ErrorCodes::BadValue, "'storageEngine' has to be a document."); } BSONObjIterator j(e.Obj()); if (!j.more()) { return Status(ErrorCodes::BadValue, "Empty 'storageEngine' options are invalid. " "Please remove, or include valid options."); } // Loop through each provided storageEngine. while (j.more()) { BSONElement storageEngineElement = j.next(); StringData storageEngineName = storageEngineElement.fieldNameStringData(); if (storageEngineElement.type() != mongo::Object) { return Status(ErrorCodes::BadValue, str::stream() << "'storageEngine." << storageEngineName << "' has to be an embedded document."); } } storageEngine = e.Obj().getOwned(); } } return Status::OK(); }
// static bool QueryPlannerTestLib::solutionMatches(const BSONObj& testSoln, const QuerySolutionNode* trueSoln) { // // leaf nodes // if (STAGE_COLLSCAN == trueSoln->getType()) { const CollectionScanNode* csn = static_cast<const CollectionScanNode*>(trueSoln); BSONElement el = testSoln["cscan"]; if (el.eoo() || !el.isABSONObj()) { return false; } BSONObj csObj = el.Obj(); BSONElement dir = csObj["dir"]; if (dir.eoo() || !dir.isNumber()) { return false; } if (dir.numberInt() != csn->direction) { return false; } BSONElement filter = csObj["filter"]; if (filter.eoo()) { return true; } else if (filter.isNull()) { return NULL == csn->filter; } else if (!filter.isABSONObj()) { return false; } BSONObj collation; if (BSONElement collationElt = csObj["collation"]) { if (!collationElt.isABSONObj()) { return false; } collation = collationElt.Obj(); } return filterMatches(filter.Obj(), collation, trueSoln); } else if (STAGE_IXSCAN == trueSoln->getType()) { const IndexScanNode* ixn = static_cast<const IndexScanNode*>(trueSoln); BSONElement el = testSoln["ixscan"]; if (el.eoo() || !el.isABSONObj()) { return false; } BSONObj ixscanObj = el.Obj(); BSONElement pattern = ixscanObj["pattern"]; if (!pattern.eoo()) { if (!pattern.isABSONObj()) { return false; } if (pattern.Obj() != ixn->index.keyPattern) { return false; } } BSONElement name = ixscanObj["name"]; if (!name.eoo()) { if (name.type() != BSONType::String) { return false; } if (name.valueStringData() != ixn->index.name) { return false; } } if (name.eoo() && pattern.eoo()) { return false; } BSONElement bounds = ixscanObj["bounds"]; if (!bounds.eoo()) { if (!bounds.isABSONObj()) { return false; } else if (!boundsMatch(bounds.Obj(), ixn->bounds)) { return false; } } BSONElement dir = ixscanObj["dir"]; if (!dir.eoo() && NumberInt == dir.type()) { if (dir.numberInt() != ixn->direction) { return false; } } BSONElement filter = ixscanObj["filter"]; if (filter.eoo()) { return true; } else if (filter.isNull()) { return NULL == ixn->filter; } else if (!filter.isABSONObj()) { return false; } BSONObj collation; if (BSONElement collationElt = ixscanObj["collation"]) { if (!collationElt.isABSONObj()) { return false; } collation = collationElt.Obj(); } return filterMatches(filter.Obj(), collation, trueSoln); } else if (STAGE_GEO_NEAR_2D == trueSoln->getType()) { const GeoNear2DNode* node = static_cast<const GeoNear2DNode*>(trueSoln); BSONElement el = testSoln["geoNear2d"]; if (el.eoo() || !el.isABSONObj()) { return false; } BSONObj geoObj = el.Obj(); return geoObj == node->index.keyPattern; } else if (STAGE_GEO_NEAR_2DSPHERE == trueSoln->getType()) { const GeoNear2DSphereNode* node = static_cast<const GeoNear2DSphereNode*>(trueSoln); BSONElement el = testSoln["geoNear2dsphere"]; if (el.eoo() || !el.isABSONObj()) { return false; } BSONObj geoObj = el.Obj(); BSONElement pattern = geoObj["pattern"]; if (pattern.eoo() || !pattern.isABSONObj()) { return false; } if (pattern.Obj() != node->index.keyPattern) { return false; } BSONElement bounds = geoObj["bounds"]; if (!bounds.eoo()) { if (!bounds.isABSONObj()) { return false; } else if (!boundsMatch(bounds.Obj(), node->baseBounds)) { return false; } } return true; } else if (STAGE_TEXT == trueSoln->getType()) { // {text: {search: "somestr", language: "something", filter: {blah: 1}}} const TextNode* node = static_cast<const TextNode*>(trueSoln); BSONElement el = testSoln["text"]; if (el.eoo() || !el.isABSONObj()) { return false; } BSONObj textObj = el.Obj(); BSONElement searchElt = textObj["search"]; if (!searchElt.eoo()) { if (searchElt.String() != node->ftsQuery->getQuery()) { return false; } } BSONElement languageElt = textObj["language"]; if (!languageElt.eoo()) { if (languageElt.String() != node->ftsQuery->getLanguage()) { return false; } } BSONElement caseSensitiveElt = textObj["caseSensitive"]; if (!caseSensitiveElt.eoo()) { if (caseSensitiveElt.trueValue() != node->ftsQuery->getCaseSensitive()) { return false; } } BSONElement diacriticSensitiveElt = textObj["diacriticSensitive"]; if (!diacriticSensitiveElt.eoo()) { if (diacriticSensitiveElt.trueValue() != node->ftsQuery->getDiacriticSensitive()) { return false; } } BSONElement indexPrefix = textObj["prefix"]; if (!indexPrefix.eoo()) { if (!indexPrefix.isABSONObj()) { return false; } if (0 != indexPrefix.Obj().woCompare(node->indexPrefix)) { return false; } } BSONObj collation; if (BSONElement collationElt = textObj["collation"]) { if (!collationElt.isABSONObj()) { return false; } collation = collationElt.Obj(); } BSONElement filter = textObj["filter"]; if (!filter.eoo()) { if (filter.isNull()) { if (NULL != node->filter) { return false; } } else if (!filter.isABSONObj()) { return false; } else if (!filterMatches(filter.Obj(), collation, trueSoln)) { return false; } } return true; } // // internal nodes // if (STAGE_FETCH == trueSoln->getType()) { const FetchNode* fn = static_cast<const FetchNode*>(trueSoln); BSONElement el = testSoln["fetch"]; if (el.eoo() || !el.isABSONObj()) { return false; } BSONObj fetchObj = el.Obj(); BSONObj collation; if (BSONElement collationElt = fetchObj["collation"]) { if (!collationElt.isABSONObj()) { return false; } collation = collationElt.Obj(); } BSONElement filter = fetchObj["filter"]; if (!filter.eoo()) { if (filter.isNull()) { if (NULL != fn->filter) { return false; } } else if (!filter.isABSONObj()) { return false; } else if (!filterMatches(filter.Obj(), collation, trueSoln)) { return false; } } BSONElement child = fetchObj["node"]; if (child.eoo() || !child.isABSONObj()) { return false; } return solutionMatches(child.Obj(), fn->children[0]); } else if (STAGE_OR == trueSoln->getType()) { const OrNode* orn = static_cast<const OrNode*>(trueSoln); BSONElement el = testSoln["or"]; if (el.eoo() || !el.isABSONObj()) { return false; } BSONObj orObj = el.Obj(); return childrenMatch(orObj, orn); } else if (STAGE_AND_HASH == trueSoln->getType()) { const AndHashNode* ahn = static_cast<const AndHashNode*>(trueSoln); BSONElement el = testSoln["andHash"]; if (el.eoo() || !el.isABSONObj()) { return false; } BSONObj andHashObj = el.Obj(); BSONObj collation; if (BSONElement collationElt = andHashObj["collation"]) { if (!collationElt.isABSONObj()) { return false; } collation = collationElt.Obj(); } BSONElement filter = andHashObj["filter"]; if (!filter.eoo()) { if (filter.isNull()) { if (NULL != ahn->filter) { return false; } } else if (!filter.isABSONObj()) { return false; } else if (!filterMatches(filter.Obj(), collation, trueSoln)) { return false; } } return childrenMatch(andHashObj, ahn); } else if (STAGE_AND_SORTED == trueSoln->getType()) { const AndSortedNode* asn = static_cast<const AndSortedNode*>(trueSoln); BSONElement el = testSoln["andSorted"]; if (el.eoo() || !el.isABSONObj()) { return false; } BSONObj andSortedObj = el.Obj(); BSONObj collation; if (BSONElement collationElt = andSortedObj["collation"]) { if (!collationElt.isABSONObj()) { return false; } collation = collationElt.Obj(); } BSONElement filter = andSortedObj["filter"]; if (!filter.eoo()) { if (filter.isNull()) { if (NULL != asn->filter) { return false; } } else if (!filter.isABSONObj()) { return false; } else if (!filterMatches(filter.Obj(), collation, trueSoln)) { return false; } } return childrenMatch(andSortedObj, asn); } else if (STAGE_PROJECTION == trueSoln->getType()) { const ProjectionNode* pn = static_cast<const ProjectionNode*>(trueSoln); BSONElement el = testSoln["proj"]; if (el.eoo() || !el.isABSONObj()) { return false; } BSONObj projObj = el.Obj(); BSONElement projType = projObj["type"]; if (!projType.eoo()) { string projTypeStr = projType.str(); if (!((pn->projType == ProjectionNode::DEFAULT && projTypeStr == "default") || (pn->projType == ProjectionNode::SIMPLE_DOC && projTypeStr == "simple") || (pn->projType == ProjectionNode::COVERED_ONE_INDEX && projTypeStr == "coveredIndex"))) { return false; } } BSONElement spec = projObj["spec"]; if (spec.eoo() || !spec.isABSONObj()) { return false; } BSONElement child = projObj["node"]; if (child.eoo() || !child.isABSONObj()) { return false; } return (spec.Obj() == pn->projection) && solutionMatches(child.Obj(), pn->children[0]); } else if (STAGE_SORT == trueSoln->getType()) { const SortNode* sn = static_cast<const SortNode*>(trueSoln); BSONElement el = testSoln["sort"]; if (el.eoo() || !el.isABSONObj()) { return false; } BSONObj sortObj = el.Obj(); BSONElement patternEl = sortObj["pattern"]; if (patternEl.eoo() || !patternEl.isABSONObj()) { return false; } BSONElement limitEl = sortObj["limit"]; if (!limitEl.isNumber()) { return false; } BSONElement child = sortObj["node"]; if (child.eoo() || !child.isABSONObj()) { return false; } size_t expectedLimit = limitEl.numberInt(); return (patternEl.Obj() == sn->pattern) && (expectedLimit == sn->limit) && solutionMatches(child.Obj(), sn->children[0]); } else if (STAGE_SORT_KEY_GENERATOR == trueSoln->getType()) { const SortKeyGeneratorNode* keyGenNode = static_cast<const SortKeyGeneratorNode*>(trueSoln); BSONElement el = testSoln["sortKeyGen"]; if (el.eoo() || !el.isABSONObj()) { return false; } BSONObj keyGenObj = el.Obj(); BSONElement child = keyGenObj["node"]; if (child.eoo() || !child.isABSONObj()) { return false; } return solutionMatches(child.Obj(), keyGenNode->children[0]); } else if (STAGE_SORT_MERGE == trueSoln->getType()) { const MergeSortNode* msn = static_cast<const MergeSortNode*>(trueSoln); BSONElement el = testSoln["mergeSort"]; if (el.eoo() || !el.isABSONObj()) { return false; } BSONObj mergeSortObj = el.Obj(); return childrenMatch(mergeSortObj, msn); } else if (STAGE_SKIP == trueSoln->getType()) { const SkipNode* sn = static_cast<const SkipNode*>(trueSoln); BSONElement el = testSoln["skip"]; if (el.eoo() || !el.isABSONObj()) { return false; } BSONObj sortObj = el.Obj(); BSONElement skipEl = sortObj["n"]; if (!skipEl.isNumber()) { return false; } BSONElement child = sortObj["node"]; if (child.eoo() || !child.isABSONObj()) { return false; } return (skipEl.numberInt() == sn->skip) && solutionMatches(child.Obj(), sn->children[0]); } else if (STAGE_LIMIT == trueSoln->getType()) { const LimitNode* ln = static_cast<const LimitNode*>(trueSoln); BSONElement el = testSoln["limit"]; if (el.eoo() || !el.isABSONObj()) { return false; } BSONObj sortObj = el.Obj(); BSONElement limitEl = sortObj["n"]; if (!limitEl.isNumber()) { return false; } BSONElement child = sortObj["node"]; if (child.eoo() || !child.isABSONObj()) { return false; } return (limitEl.numberInt() == ln->limit) && solutionMatches(child.Obj(), ln->children[0]); } else if (STAGE_KEEP_MUTATIONS == trueSoln->getType()) { const KeepMutationsNode* kn = static_cast<const KeepMutationsNode*>(trueSoln); BSONElement el = testSoln["keep"]; if (el.eoo() || !el.isABSONObj()) { return false; } BSONObj keepObj = el.Obj(); // Doesn't have any parameters really. BSONElement child = keepObj["node"]; if (child.eoo() || !child.isABSONObj()) { return false; } return solutionMatches(child.Obj(), kn->children[0]); } else if (STAGE_SHARDING_FILTER == trueSoln->getType()) { const ShardingFilterNode* fn = static_cast<const ShardingFilterNode*>(trueSoln); BSONElement el = testSoln["sharding_filter"]; if (el.eoo() || !el.isABSONObj()) { return false; } BSONObj keepObj = el.Obj(); BSONElement child = keepObj["node"]; if (child.eoo() || !child.isABSONObj()) { return false; } return solutionMatches(child.Obj(), fn->children[0]); } else if (STAGE_ENSURE_SORTED == trueSoln->getType()) { const EnsureSortedNode* esn = static_cast<const EnsureSortedNode*>(trueSoln); BSONElement el = testSoln["ensureSorted"]; if (el.eoo() || !el.isABSONObj()) { return false; } BSONObj esObj = el.Obj(); BSONElement patternEl = esObj["pattern"]; if (patternEl.eoo() || !patternEl.isABSONObj()) { return false; } BSONElement child = esObj["node"]; if (child.eoo() || !child.isABSONObj()) { return false; } return (patternEl.Obj() == esn->pattern) && solutionMatches(child.Obj(), esn->children[0]); } return false; }
bool run(const string& dbname, BSONObj& cmdObj, int, string& errmsg, BSONObjBuilder& result, bool fromRepl ) { int s = 0; bool found = setParmsMongodSpecific(dbname, cmdObj, errmsg, result, fromRepl); if( cmdObj.hasElement("journalCommitInterval") ) { if( !cmdLine.dur ) { errmsg = "journaling is off"; return false; } int x = (int) cmdObj["journalCommitInterval"].Number(); verify( x > 1 && x < 500 ); cmdLine.journalCommitInterval = x; log() << "setParameter journalCommitInterval=" << x << endl; s++; } if( cmdObj.hasElement("notablescan") ) { verify( !cmdLine.isMongos() ); if( s == 0 ) result.append("was", cmdLine.noTableScan); cmdLine.noTableScan = cmdObj["notablescan"].Bool(); s++; } if( cmdObj.hasElement("quiet") ) { if( s == 0 ) result.append("was", cmdLine.quiet ); cmdLine.quiet = cmdObj["quiet"].Bool(); s++; } if( cmdObj.hasElement("syncdelay") ) { verify( !cmdLine.isMongos() ); if( s == 0 ) result.append("was", cmdLine.syncdelay ); cmdLine.syncdelay = cmdObj["syncdelay"].Number(); s++; } if( cmdObj.hasElement( "logLevel" ) ) { if( s == 0 ) result.append("was", logLevel ); logLevel = cmdObj["logLevel"].numberInt(); s++; } if( cmdObj.hasElement( "replApplyBatchSize" ) ) { if( s == 0 ) result.append("was", replApplyBatchSize ); BSONElement e = cmdObj["replApplyBatchSize"]; ParameterValidator * v = ParameterValidator::get( e.fieldName() ); verify( v ); if ( ! v->isValid( e , errmsg ) ) return false; replApplyBatchSize = e.numberInt(); s++; } if( cmdObj.hasElement( "traceExceptions" ) ) { if( s == 0 ) result.append( "was", DBException::traceExceptions ); DBException::traceExceptions = cmdObj["traceExceptions"].Bool(); s++; } if( cmdObj.hasElement( "replMonitorMaxFailedChecks" ) ) { if( s == 0 ) result.append( "was", ReplicaSetMonitor::getMaxFailedChecks() ); ReplicaSetMonitor::setMaxFailedChecks( cmdObj["replMonitorMaxFailedChecks"].numberInt() ); s++; } if( s == 0 && !found ) { errmsg = "no option found to set, use help:true to see options "; return false; } return true; }
void IndexScanNode::computeProperties() { _sorts.clear(); BSONObj sortPattern; { BSONObjBuilder sortBob; BSONObj normalizedIndexKeyPattern(LiteParsedQuery::normalizeSortOrder(indexKeyPattern)); BSONObjIterator it(normalizedIndexKeyPattern); while (it.more()) { BSONElement elt = it.next(); // Zero is returned if elt is not a number. This happens when elt is hashed or // 2dsphere, our two projection indices. We want to drop those from the sort // pattern. int val = elt.numberInt() * direction; if (0 != val) { sortBob.append(elt.fieldName(), val); } } sortPattern = sortBob.obj(); } _sorts.insert(sortPattern); const int nFields = sortPattern.nFields(); if (nFields > 1) { // We're sorted not only by sortPattern but also by all prefixes of it. for (int i = 0; i < nFields; ++i) { // Make obj out of fields [0,i] BSONObjIterator it(sortPattern); BSONObjBuilder prefixBob; for (int j = 0; j <= i; ++j) { prefixBob.append(it.next()); } _sorts.insert(prefixBob.obj()); } } // If we are using the index {a:1, b:1} to answer the predicate {a: 10}, it's sorted // both by the index key pattern and by the pattern {b: 1}. // See if there are any fields with equalities for bounds. We can drop these // from any sort orders created. set<string> equalityFields; if (!bounds.isSimpleRange) { // Figure out how many fields are point intervals. for (size_t i = 0; i < bounds.fields.size(); ++i) { const OrderedIntervalList& oil = bounds.fields[i]; if (oil.intervals.size() != 1) { continue; } const Interval& ival = oil.intervals[0]; if (!ival.isPoint()) { continue; } equalityFields.insert(oil.name); } } if (equalityFields.empty()) { return; } // TODO: Each field in equalityFields could be dropped from the sort order since it is // a point interval. The full set of sort orders is as follows: // For each sort in _sorts: // For each drop in powerset(equalityFields): // Remove fields in 'drop' from 'sort' and add resulting sort to output. // Since this involves a powerset, we only remove point intervals that the prior sort // planning code removed, namely the contiguous prefix of the key pattern. BSONObjIterator it(sortPattern); BSONObjBuilder prefixBob; while (it.more()) { BSONElement elt = it.next(); // XXX string slowness. fix when bounds are stringdata not string. if (equalityFields.end() == equalityFields.find(string(elt.fieldName()))) { prefixBob.append(elt); // This field isn't a point interval, can't drop. break; } } while (it.more()) { prefixBob.append(it.next()); } // If we have an index {a:1} and an equality on 'a' don't append an empty sort order. BSONObj filterPointsObj = prefixBob.obj(); if (!filterPointsObj.isEmpty()) { _sorts.insert(filterPointsObj); } }
StatusWith<BSONObj> FTSSpec::fixSpec(const BSONObj& spec) { if (spec["textIndexVersion"].numberInt() == TEXT_INDEX_VERSION_1) { return _fixSpecV1(spec); } map<string, int> m; BSONObj keyPattern; { BSONObjBuilder b; // Populate m and keyPattern. { bool addedFtsStuff = false; BSONObjIterator i(spec["key"].Obj()); while (i.more()) { BSONElement e = i.next(); if (e.fieldNameStringData() == "_fts") { if (INDEX_NAME != e.valuestrsafe()) { return {ErrorCodes::CannotCreateIndex, "expecting _fts:\"text\""}; } addedFtsStuff = true; b.append(e); } else if (e.fieldNameStringData() == "_ftsx") { if (e.numberInt() != 1) { return {ErrorCodes::CannotCreateIndex, "expecting _ftsx:1"}; } b.append(e); } else if (e.type() == String && INDEX_NAME == e.valuestr()) { if (!addedFtsStuff) { _addFTSStuff(&b); addedFtsStuff = true; } m[e.fieldName()] = 1; } else { if (e.numberInt() != 1 && e.numberInt() != -1) { return {ErrorCodes::CannotCreateIndex, "expected value 1 or -1 for non-text key in compound index"}; } b.append(e); } } verify(addedFtsStuff); } keyPattern = b.obj(); // Verify that index key is in the correct format: extraBefore fields, then text // fields, then extraAfter fields. { BSONObjIterator i(spec["key"].Obj()); verify(i.more()); BSONElement e = i.next(); // extraBefore fields while (String != e.type()) { Status notReservedStatus = verifyFieldNameNotReserved(e.fieldNameStringData()); if (!notReservedStatus.isOK()) { return notReservedStatus; } if (!i.more()) { return {ErrorCodes::CannotCreateIndex, "expected additional fields in text index key pattern"}; } e = i.next(); } // text fields bool alreadyFixed = (e.fieldNameStringData() == "_fts"); if (alreadyFixed) { if (!i.more()) { return {ErrorCodes::CannotCreateIndex, "expected _ftsx after _fts"}; } e = i.next(); if (e.fieldNameStringData() != "_ftsx") { return {ErrorCodes::CannotCreateIndex, "expected _ftsx after _fts"}; } e = i.next(); } else { do { Status notReservedStatus = verifyFieldNameNotReserved(e.fieldNameStringData()); if (!notReservedStatus.isOK()) { return notReservedStatus; } e = i.next(); } while (!e.eoo() && e.type() == String); } // extraAfterFields while (!e.eoo()) { if (e.type() == BSONType::String) { return {ErrorCodes::CannotCreateIndex, "'text' fields in index must all be adjacent"}; } Status notReservedStatus = verifyFieldNameNotReserved(e.fieldNameStringData()); if (!notReservedStatus.isOK()) { return notReservedStatus; } e = i.next(); } } } if (spec["weights"].type() == Object) { BSONObjIterator i(spec["weights"].Obj()); while (i.more()) { BSONElement e = i.next(); if (!e.isNumber()) { return {ErrorCodes::CannotCreateIndex, "weight for text index needs numeric type"}; } m[e.fieldName()] = e.numberInt(); } } else if (spec["weights"].str() == WILDCARD) { m[WILDCARD] = 1; } else if (!spec["weights"].eoo()) { return {ErrorCodes::CannotCreateIndex, "text index option 'weights' must be an object"}; } if (m.empty()) { return {ErrorCodes::CannotCreateIndex, "text index option 'weights' must specify fields or the wildcard"}; } BSONObj weights; { BSONObjBuilder b; for (map<string, int>::iterator i = m.begin(); i != m.end(); ++i) { if (i->second <= 0 || i->second >= MAX_WORD_WEIGHT) { return {ErrorCodes::CannotCreateIndex, str::stream() << "text index weight must be in the exclusive interval (0," << MAX_WORD_WEIGHT << ") but found: " << i->second}; } // Verify weight refers to a valid field. if (i->first != "$**") { FieldRef keyField(i->first); if (keyField.numParts() == 0) { return {ErrorCodes::CannotCreateIndex, "weight cannot be on an empty field"}; } for (size_t partNum = 0; partNum < keyField.numParts(); partNum++) { StringData part = keyField.getPart(partNum); if (part.empty()) { return {ErrorCodes::CannotCreateIndex, "weight cannot have empty path component"}; } if (part.startsWith("$")) { return {ErrorCodes::CannotCreateIndex, "weight cannot have path component with $ prefix"}; } } } b.append(i->first, i->second); } weights = b.obj(); } BSONElement default_language_elt = spec["default_language"]; string default_language(default_language_elt.str()); if (default_language_elt.eoo()) { default_language = moduleDefaultLanguage; } else if (default_language_elt.type() != BSONType::String) { return {ErrorCodes::CannotCreateIndex, "default_language needs a string type"}; } if (!FTSLanguage::make(default_language, TEXT_INDEX_VERSION_3).getStatus().isOK()) { return {ErrorCodes::CannotCreateIndex, "default_language is not valid"}; } BSONElement language_override_elt = spec["language_override"]; string language_override(language_override_elt.str()); if (language_override_elt.eoo()) { language_override = "language"; } else if (language_override_elt.type() != BSONType::String) { return {ErrorCodes::CannotCreateIndex, "language_override must be a string"}; } else if (!validateOverride(language_override)) { return {ErrorCodes::CannotCreateIndex, "language_override is not valid"}; } int version = -1; int textIndexVersion = TEXT_INDEX_VERSION_3; // default text index version BSONObjBuilder b; BSONObjIterator i(spec); while (i.more()) { BSONElement e = i.next(); StringData fieldName = e.fieldNameStringData(); if (fieldName == "key") { b.append("key", keyPattern); } else if (fieldName == "weights") { b.append("weights", weights); weights = BSONObj(); } else if (fieldName == "default_language") { b.append("default_language", default_language); default_language = ""; } else if (fieldName == "language_override") { b.append("language_override", language_override); language_override = ""; } else if (fieldName == "v") { version = e.numberInt(); } else if (fieldName == "textIndexVersion") { if (!e.isNumber()) { return {ErrorCodes::CannotCreateIndex, "text index option 'textIndexVersion' must be a number"}; } textIndexVersion = e.numberInt(); if (textIndexVersion != TEXT_INDEX_VERSION_2 && textIndexVersion != TEXT_INDEX_VERSION_3) { return {ErrorCodes::CannotCreateIndex, str::stream() << "bad textIndexVersion: " << textIndexVersion}; } } else { b.append(e); } } if (!weights.isEmpty()) { b.append("weights", weights); } if (!default_language.empty()) { b.append("default_language", default_language); } if (!language_override.empty()) { b.append("language_override", language_override); } if (version >= 0) { b.append("v", version); } b.append("textIndexVersion", textIndexVersion); return b.obj(); }
void run() { Client::WriteContext ctx(ns()); Database* db = ctx.ctx().db(); Collection* coll = db->getCollection(ns()); if (!coll) { coll = db->createCollection(ns()); } for (int i = 0; i < 50; ++i) { insert(BSON("foo" << i << "bar" << i)); } addIndex(BSON("foo" << 1)); addIndex(BSON("bar" << 1)); WorkingSet ws; scoped_ptr<AndHashStage> ah(new AndHashStage(&ws, NULL)); // Foo <= 20 IndexScanParams params; params.descriptor = getIndex(BSON("foo" << 1), coll); params.bounds.isSimpleRange = true; params.bounds.startKey = BSON("" << 20); params.bounds.endKey = BSONObj(); params.bounds.endKeyInclusive = true; params.direction = -1; ah->addChild(new IndexScan(params, &ws, NULL)); // Bar >= 10 params.descriptor = getIndex(BSON("bar" << 1), coll); params.bounds.startKey = BSON("" << 10); params.bounds.endKey = BSONObj(); params.bounds.endKeyInclusive = true; params.direction = 1; ah->addChild(new IndexScan(params, &ws, NULL)); // ah reads the first child into its hash table. // ah should read foo=20, foo=19, ..., foo=0 in that order. // Read half of them... for (int i = 0; i < 10; ++i) { WorkingSetID out; PlanStage::StageState status = ah->work(&out); ASSERT_EQUALS(PlanStage::NEED_TIME, status); } // ...yield ah->prepareToYield(); // ...invalidate one of the read objects set<DiskLoc> data; getLocs(&data, coll); for (set<DiskLoc>::const_iterator it = data.begin(); it != data.end(); ++it) { if (it->obj()["foo"].numberInt() == 15) { ah->invalidate(*it); remove(it->obj()); break; } } ah->recoverFromYield(); // And expect to find foo==15 it flagged for review. const unordered_set<WorkingSetID>& flagged = ws.getFlagged(); ASSERT_EQUALS(size_t(1), flagged.size()); // Expect to find the right value of foo in the flagged item. WorkingSetMember* member = ws.get(*flagged.begin()); ASSERT_TRUE(NULL != member); ASSERT_EQUALS(WorkingSetMember::OWNED_OBJ, member->state); BSONElement elt; ASSERT_TRUE(member->getFieldDotted("foo", &elt)); ASSERT_EQUALS(15, elt.numberInt()); // Now, finish up the AND. Since foo == bar, we would have 11 results, but we subtract // one because of a mid-plan invalidation, so 10. int count = 0; while (!ah->isEOF()) { WorkingSetID id; PlanStage::StageState status = ah->work(&id); if (PlanStage::ADVANCED != status) { continue; } ++count; member = ws.get(id); ASSERT_TRUE(member->getFieldDotted("foo", &elt)); ASSERT_LESS_THAN_OR_EQUALS(elt.numberInt(), 20); ASSERT_NOT_EQUALS(15, elt.numberInt()); ASSERT_TRUE(member->getFieldDotted("bar", &elt)); ASSERT_GREATER_THAN_OR_EQUALS(elt.numberInt(), 10); } ASSERT_EQUALS(10, count); }
bool handleSpecialNamespaces( Request& r , QueryMessage& q ) { const char * ns = strstr( r.getns() , ".$cmd.sys." ); if ( ! ns ) return false; ns += 10; BSONObjBuilder b; vector<Shard> shards; ClientBasic* client = ClientBasic::getCurrent(); AuthorizationSession* authSession = client->getAuthorizationSession(); if ( strcmp( ns , "inprog" ) == 0 ) { const bool isAuthorized = authSession->isAuthorizedForActionsOnResource( ResourcePattern::forClusterResource(), ActionType::inprog); audit::logInProgAuthzCheck( client, q.query, isAuthorized ? ErrorCodes::OK : ErrorCodes::Unauthorized); uassert(ErrorCodes::Unauthorized, "not authorized to run inprog", isAuthorized); Shard::getAllShards( shards ); BSONArrayBuilder arr( b.subarrayStart( "inprog" ) ); for ( unsigned i=0; i<shards.size(); i++ ) { Shard shard = shards[i]; ScopedDbConnection conn(shard.getConnString()); BSONObj temp = conn->findOne( r.getns() , q.query ); if ( temp["inprog"].isABSONObj() ) { BSONObjIterator i( temp["inprog"].Obj() ); while ( i.more() ) { BSONObjBuilder x; BSONObjIterator j( i.next().Obj() ); while( j.more() ) { BSONElement e = j.next(); if ( str::equals( e.fieldName() , "opid" ) ) { stringstream ss; ss << shard.getName() << ':' << e.numberInt(); x.append( "opid" , ss.str() ); } else if ( str::equals( e.fieldName() , "client" ) ) { x.appendAs( e , "client_s" ); } else { x.append( e ); } } arr.append( x.obj() ); } } conn.done(); } arr.done(); } else if ( strcmp( ns , "killop" ) == 0 ) { const bool isAuthorized = authSession->isAuthorizedForActionsOnResource( ResourcePattern::forClusterResource(), ActionType::killop); audit::logKillOpAuthzCheck( client, q.query, isAuthorized ? ErrorCodes::OK : ErrorCodes::Unauthorized); uassert(ErrorCodes::Unauthorized, "not authorized to run killop", isAuthorized); BSONElement e = q.query["op"]; if ( e.type() != String ) { b.append( "err" , "bad op" ); b.append( e ); } else { b.append( e ); string s = e.String(); string::size_type i = s.find( ':' ); if ( i == string::npos ) { b.append( "err" , "bad opid" ); } else { string shard = s.substr( 0 , i ); int opid = atoi( s.substr( i + 1 ).c_str() ); b.append( "shard" , shard ); b.append( "shardid" , opid ); log() << "want to kill op: " << e << endl; Shard s(shard); ScopedDbConnection conn(s.getConnString()); conn->findOne( r.getns() , BSON( "op" << opid ) ); conn.done(); } } } else if ( strcmp( ns , "unlock" ) == 0 ) { b.append( "err" , "can't do unlock through mongos" ); } else { warning() << "unknown sys command [" << ns << "]" << endl; return false; } BSONObj x = b.done(); replyToQuery(0, r.p(), r.m(), x); return true; }
bool run(const string& dbname, BSONObj& cmdObj, int, string& errmsg, BSONObjBuilder& result, bool fromRepl) { string ns = dbname + "." + cmdObj.firstElement().valuestr(); if (!cmdObj["start"].eoo()) { errmsg = "using deprecated 'start' argument to geoNear"; return false; } Database* db = cc().database(); if ( !db ) { errmsg = "can't find ns"; return false; } Collection* collection = db->getCollection( ns ); if ( !collection ) { errmsg = "can't find ns"; return false; } IndexCatalog* indexCatalog = collection->getIndexCatalog(); // cout << "raw cmd " << cmdObj.toString() << endl; // We seek to populate this. string nearFieldName; bool using2DIndex = false; if (!getFieldName(collection, indexCatalog, &nearFieldName, &errmsg, &using2DIndex)) { return false; } uassert(17304, "'near' field must be point", !cmdObj["near"].eoo() && cmdObj["near"].isABSONObj() && GeoParser::isPoint(cmdObj["near"].Obj())); bool isSpherical = cmdObj["spherical"].trueValue(); if (!using2DIndex) { uassert(17301, "2dsphere index must have spherical: true", isSpherical); } // Build the $near expression for the query. BSONObjBuilder nearBob; if (isSpherical) { nearBob.append("$nearSphere", cmdObj["near"].Obj()); } else { nearBob.append("$near", cmdObj["near"].Obj()); } if (!cmdObj["maxDistance"].eoo()) { uassert(17299, "maxDistance must be a number",cmdObj["maxDistance"].isNumber()); nearBob.append("$maxDistance", cmdObj["maxDistance"].number()); } if (!cmdObj["minDistance"].eoo()) { uassert(17298, "minDistance doesn't work on 2d index", !using2DIndex); uassert(17300, "minDistance must be a number",cmdObj["minDistance"].isNumber()); nearBob.append("$minDistance", cmdObj["minDistance"].number()); } if (!cmdObj["uniqueDocs"].eoo()) { nearBob.append("$uniqueDocs", cmdObj["uniqueDocs"].trueValue()); } // And, build the full query expression. BSONObjBuilder queryBob; queryBob.append(nearFieldName, nearBob.obj()); if (!cmdObj["query"].eoo() && cmdObj["query"].isABSONObj()) { queryBob.appendElements(cmdObj["query"].Obj()); } BSONObj rewritten = queryBob.obj(); // cout << "rewritten query: " << rewritten.toString() << endl; int numWanted = 100; const char* limitName = !cmdObj["num"].eoo() ? "num" : "limit"; BSONElement eNumWanted = cmdObj[limitName]; if (!eNumWanted.eoo()) { uassert(17303, "limit must be number", eNumWanted.isNumber()); numWanted = eNumWanted.numberInt(); uassert(17302, "limit must be >=0", numWanted >= 0); } bool includeLocs = false; if (!cmdObj["includeLocs"].eoo()) { includeLocs = cmdObj["includeLocs"].trueValue(); } double distanceMultiplier = 1.0; BSONElement eDistanceMultiplier = cmdObj["distanceMultiplier"]; if (!eDistanceMultiplier.eoo()) { uassert(17296, "distanceMultiplier must be a number", eDistanceMultiplier.isNumber()); distanceMultiplier = eDistanceMultiplier.number(); uassert(17297, "distanceMultiplier must be non-negative", distanceMultiplier >= 0); } BSONObj projObj = BSON("$pt" << BSON("$meta" << LiteParsedQuery::metaGeoNearPoint) << "$dis" << BSON("$meta" << LiteParsedQuery::metaGeoNearDistance)); CanonicalQuery* cq; if (!CanonicalQuery::canonicalize(ns, rewritten, BSONObj(), projObj, 0, numWanted, BSONObj(), &cq).isOK()) { errmsg = "Can't parse filter / create query"; return false; } Runner* rawRunner; if (!getRunner(cq, &rawRunner, 0).isOK()) { errmsg = "can't get query runner"; return false; } auto_ptr<Runner> runner(rawRunner); double totalDistance = 0; BSONObjBuilder resultBuilder(result.subarrayStart("results")); double farthestDist = 0; BSONObj currObj; int results = 0; while ((results < numWanted) && Runner::RUNNER_ADVANCED == runner->getNext(&currObj, NULL)) { // cout << "result is " << currObj.toString() << endl; double dist = currObj["$dis"].number() * distanceMultiplier; // cout << std::setprecision(10) << "HK GEON mul'd dist is " << dist << " raw dist is " << currObj["$dis"].number() << endl; totalDistance += dist; if (dist > farthestDist) { farthestDist = dist; } BSONObjBuilder oneResultBuilder( resultBuilder.subobjStart(BSONObjBuilder::numStr(results))); oneResultBuilder.append("dis", dist); if (includeLocs) { oneResultBuilder.appendAs(currObj["$pt"], "loc"); } // strip out '$dis' and '$pt' and the rest gets added as 'obj'. BSONObjIterator resIt(currObj); BSONObjBuilder resBob; while (resIt.more()) { BSONElement elt = resIt.next(); if (!mongoutils::str::equals("$pt", elt.fieldName()) && !mongoutils::str::equals("$dis", elt.fieldName())) { resBob.append(elt); } } oneResultBuilder.append("obj", resBob.obj()); oneResultBuilder.done(); ++results; } resultBuilder.done(); // Fill out the stats subobj. BSONObjBuilder stats(result.subobjStart("stats")); // Fill in nscanned from the explain. TypeExplain* bareExplain; Status res = runner->getExplainPlan(&bareExplain); if (res.isOK()) { auto_ptr<TypeExplain> explain(bareExplain); stats.append("nscanned", explain->getNScanned()); stats.append("objectsLoaded", explain->getNScannedObjects()); } stats.append("avgDistance", totalDistance / results); stats.append("maxDistance", farthestDist); stats.append("time", cc().curop()->elapsedMillis()); stats.done(); return true; }
Status ReplicaSetConfig::_parseSettingsSubdocument(const BSONObj& settings) { // // Parse heartbeatIntervalMillis // long long heartbeatIntervalMillis; Status hbIntervalStatus = bsonExtractIntegerFieldWithDefault(settings, kHeartbeatIntervalFieldName, durationCount<Milliseconds>(kDefaultHeartbeatInterval), &heartbeatIntervalMillis); if (!hbIntervalStatus.isOK()) { return hbIntervalStatus; } _heartbeatInterval = Milliseconds(heartbeatIntervalMillis); // Parse electionTimeoutMillis // BSONElement electionTimeoutMillisElement = settings[kElectionTimeoutFieldName]; if (electionTimeoutMillisElement.eoo()) { _electionTimeoutPeriod = Milliseconds(kDefaultElectionTimeoutPeriod); } else if (electionTimeoutMillisElement.isNumber()) { _electionTimeoutPeriod = Milliseconds(electionTimeoutMillisElement.numberInt()); } else { return Status(ErrorCodes::TypeMismatch, str::stream() << "Expected type of " << kSettingsFieldName << "." << kElectionTimeoutFieldName << " to be a number, but found a value of type " << typeName(electionTimeoutMillisElement.type())); } // // Parse heartbeatTimeoutSecs // BSONElement hbTimeoutSecsElement = settings[kHeartbeatTimeoutFieldName]; if (hbTimeoutSecsElement.eoo()) { _heartbeatTimeoutPeriod = Seconds(kDefaultHeartbeatTimeoutPeriod); } else if (hbTimeoutSecsElement.isNumber()) { _heartbeatTimeoutPeriod = Seconds(hbTimeoutSecsElement.numberInt()); } else { return Status(ErrorCodes::TypeMismatch, str::stream() << "Expected type of " << kSettingsFieldName << "." << kHeartbeatTimeoutFieldName << " to be a number, but found a value of type " << typeName(hbTimeoutSecsElement.type())); } // // Parse chainingAllowed // Status status = bsonExtractBooleanFieldWithDefault( settings, kChainingAllowedFieldName, true, &_chainingAllowed); if (!status.isOK()) return status; // // Parse getLastErrorDefaults // BSONElement gleDefaultsElement; status = bsonExtractTypedField( settings, kGetLastErrorDefaultsFieldName, Object, &gleDefaultsElement); if (status.isOK()) { status = _defaultWriteConcern.parse(gleDefaultsElement.Obj()); if (!status.isOK()) return status; } else if (status == ErrorCodes::NoSuchKey) { // Default write concern is w: 1. _defaultWriteConcern.reset(); _defaultWriteConcern.wNumNodes = 1; } else { return status; } // // Parse getLastErrorModes // BSONElement gleModesElement; status = bsonExtractTypedField(settings, kGetLastErrorModesFieldName, Object, &gleModesElement); BSONObj gleModes; if (status.isOK()) { gleModes = gleModesElement.Obj(); } else if (status != ErrorCodes::NoSuchKey) { return status; } for (BSONObj::iterator gleModeIter(gleModes); gleModeIter.more();) { const BSONElement modeElement = gleModeIter.next(); if (_customWriteConcernModes.find(modeElement.fieldNameStringData()) != _customWriteConcernModes.end()) { return Status(ErrorCodes::DuplicateKey, str::stream() << kSettingsFieldName << '.' << kGetLastErrorModesFieldName << " contains multiple fields named " << modeElement.fieldName()); } if (modeElement.type() != Object) { return Status(ErrorCodes::TypeMismatch, str::stream() << "Expected " << kSettingsFieldName << '.' << kGetLastErrorModesFieldName << '.' << modeElement.fieldName() << " to be an Object, not " << typeName(modeElement.type())); } ReplicaSetTagPattern pattern = _tagConfig.makePattern(); for (BSONObj::iterator constraintIter(modeElement.Obj()); constraintIter.more();) { const BSONElement constraintElement = constraintIter.next(); if (!constraintElement.isNumber()) { return Status(ErrorCodes::TypeMismatch, str::stream() << "Expected " << kSettingsFieldName << '.' << kGetLastErrorModesFieldName << '.' << modeElement.fieldName() << '.' << constraintElement.fieldName() << " to be a number, not " << typeName(constraintElement.type())); } const int minCount = constraintElement.numberInt(); if (minCount <= 0) { return Status(ErrorCodes::BadValue, str::stream() << "Value of " << kSettingsFieldName << '.' << kGetLastErrorModesFieldName << '.' << modeElement.fieldName() << '.' << constraintElement.fieldName() << " must be positive, but found " << minCount); } status = _tagConfig.addTagCountConstraintToPattern( &pattern, constraintElement.fieldNameStringData(), minCount); if (!status.isOK()) { return status; } } _customWriteConcernModes[modeElement.fieldNameStringData()] = pattern; } return Status::OK(); }
Status ReplSetHeartbeatResponse::initialize(const BSONObj& doc) { // Old versions set this even though they returned not "ok" _mismatch = doc[kMismatchFieldName].trueValue(); if (_mismatch) return Status(ErrorCodes::InconsistentReplicaSetNames, "replica set name doesn't match."); // Old versions sometimes set the replica set name ("set") but ok:0 const BSONElement replSetNameElement = doc[kReplSetFieldName]; if (replSetNameElement.eoo()) { _setName.clear(); } else if (replSetNameElement.type() != String) { return Status(ErrorCodes::TypeMismatch, str::stream() << "Expected \"" << kReplSetFieldName << "\" field in response to replSetHeartbeat to have " "type String, but found " << typeName(replSetNameElement.type())); } else { _setName = replSetNameElement.String(); } if (_setName.empty() && !doc[kOkFieldName].trueValue()) { std::string errMsg = doc[kErrMsgFieldName].str(); BSONElement errCodeElem = doc[kErrorCodeFieldName]; if (errCodeElem.ok()) { if (!errCodeElem.isNumber()) return Status(ErrorCodes::BadValue, "Error code is not a number!"); int errorCode = errCodeElem.numberInt(); return Status(ErrorCodes::Error(errorCode), errMsg); } return Status(ErrorCodes::UnknownError, errMsg); } const BSONElement hasDataElement = doc[kHasDataFieldName]; _hasDataSet = !hasDataElement.eoo(); _hasData = hasDataElement.trueValue(); const BSONElement electionTimeElement = doc[kElectionTimeFieldName]; if (electionTimeElement.eoo()) { _electionTimeSet = false; } else if (electionTimeElement.type() == Timestamp) { _electionTimeSet = true; _electionTime = electionTimeElement._opTime(); } else if (electionTimeElement.type() == Date) { _electionTimeSet = true; _electionTime = OpTime(electionTimeElement.date()); } else { return Status(ErrorCodes::TypeMismatch, str::stream() << "Expected \"" << kElectionTimeFieldName << "\" field in response to replSetHeartbeat " "command to have type Date or Timestamp, but found type " << typeName(electionTimeElement.type())); } const BSONElement timeElement = doc[kTimeFieldName]; if (timeElement.eoo()) { _timeSet = false; } else if (timeElement.isNumber()) { _timeSet = true; _time = Seconds(timeElement.numberLong()); } else { return Status(ErrorCodes::TypeMismatch, str::stream() << "Expected \"" << kTimeFieldName << "\" field in response to replSetHeartbeat " "command to have a numeric type, but found type " << typeName(timeElement.type())); } const BSONElement opTimeElement = doc[kOpTimeFieldName]; if (opTimeElement.eoo()) { _opTimeSet = false; } else if (opTimeElement.type() == Timestamp) { _opTimeSet = true; _opTime = opTimeElement._opTime(); } else if (opTimeElement.type() == Date) { _opTimeSet = true; _opTime = OpTime(opTimeElement.date()); } else { return Status(ErrorCodes::TypeMismatch, str::stream() << "Expected \"" << kOpTimeFieldName << "\" field in response to replSetHeartbeat " "command to have type Date or Timestamp, but found type " << typeName(opTimeElement.type())); } const BSONElement electableElement = doc[kIsElectableFieldName]; if (electableElement.eoo()) { _electableSet = false; } else { _electableSet = true; _electable = electableElement.trueValue(); } _isReplSet = doc[kIsReplSetFieldName].trueValue(); const BSONElement memberStateElement = doc[kMemberStateFieldName]; if (memberStateElement.eoo()) { _stateSet = false; } else if (memberStateElement.type() != NumberInt && memberStateElement.type() != NumberLong) { return Status(ErrorCodes::TypeMismatch, str::stream() << "Expected \"" << kMemberStateFieldName << "\" field in response to replSetHeartbeat " "command to have type NumberInt or NumberLong, but found type " << typeName(memberStateElement.type())); } else { long long stateInt = memberStateElement.numberLong(); if (stateInt < 0 || stateInt > MemberState::RS_MAX) { return Status(ErrorCodes::BadValue, str::stream() << "Value for \"" << kMemberStateFieldName << "\" in response to replSetHeartbeat is " "out of range; legal values are non-negative and no more than " << MemberState::RS_MAX); } _stateSet = true; _state = MemberState(static_cast<int>(stateInt)); } _stateDisagreement = doc[kHasStateDisagreementFieldName].trueValue(); // Not required for the case of uninitialized members -- they have no config const BSONElement versionElement = doc[kConfigVersionFieldName]; // If we have an optime then we must have a version if (_opTimeSet && versionElement.eoo()) { return Status(ErrorCodes::NoSuchKey, str::stream() << "Response to replSetHeartbeat missing required \"" << kConfigVersionFieldName << "\" field even though initialized"); } // If there is a "v" (config version) then it must be an int. if (!versionElement.eoo() && versionElement.type() != NumberInt) { return Status(ErrorCodes::TypeMismatch, str::stream() << "Expected \"" << kConfigVersionFieldName << "\" field in response to replSetHeartbeat to have " "type NumberInt, but found " << typeName(versionElement.type())); } _version = versionElement.numberInt(); const BSONElement hbMsgElement = doc[kHbMessageFieldName]; if (hbMsgElement.eoo()) { _hbmsg.clear(); } else if (hbMsgElement.type() != String) { return Status(ErrorCodes::TypeMismatch, str::stream() << "Expected \"" << kHbMessageFieldName << "\" field in response to replSetHeartbeat to have " "type String, but found " << typeName(hbMsgElement.type())); } else { _hbmsg = hbMsgElement.String(); } const BSONElement syncingToElement = doc[kSyncSourceFieldName]; if (syncingToElement.eoo()) { _syncingTo.clear(); } else if (syncingToElement.type() != String) { return Status(ErrorCodes::TypeMismatch, str::stream() << "Expected \"" << kSyncSourceFieldName << "\" field in response to replSetHeartbeat to " "have type String, but found " << typeName(syncingToElement.type())); } else { _syncingTo = syncingToElement.String(); } const BSONElement rsConfigElement = doc[kConfigFieldName]; if (rsConfigElement.eoo()) { _configSet = false; _config = ReplicaSetConfig(); return Status::OK(); } else if (rsConfigElement.type() != Object) { return Status(ErrorCodes::TypeMismatch, str::stream() << "Expected \"" << kConfigFieldName << "\" in response to replSetHeartbeat to have type " "Object, but found " << typeName(rsConfigElement.type())); } _configSet = true; return _config.initialize(rsConfigElement.Obj()); }
// static Status CanonicalQuery::isValid(MatchExpression* root, const QueryRequest& parsed) { // Analysis below should be done after squashing the tree to make it clearer. // There can only be one TEXT. If there is a TEXT, it cannot appear inside a NOR. // // Note that the query grammar (as enforced by the MatchExpression parser) forbids TEXT // inside of value-expression clauses like NOT, so we don't check those here. size_t numText = countNodes(root, MatchExpression::TEXT); if (numText > 1) { return Status(ErrorCodes::BadValue, "Too many text expressions"); } else if (1 == numText) { if (hasNodeInSubtree(root, MatchExpression::TEXT, MatchExpression::NOR)) { return Status(ErrorCodes::BadValue, "text expression not allowed in nor"); } } // There can only be one NEAR. If there is a NEAR, it must be either the root or the root // must be an AND and its child must be a NEAR. size_t numGeoNear = countNodes(root, MatchExpression::GEO_NEAR); if (numGeoNear > 1) { return Status(ErrorCodes::BadValue, "Too many geoNear expressions"); } else if (1 == numGeoNear) { bool topLevel = false; if (MatchExpression::GEO_NEAR == root->matchType()) { topLevel = true; } else if (MatchExpression::AND == root->matchType()) { for (size_t i = 0; i < root->numChildren(); ++i) { if (MatchExpression::GEO_NEAR == root->getChild(i)->matchType()) { topLevel = true; break; } } } if (!topLevel) { return Status(ErrorCodes::BadValue, "geoNear must be top-level expr"); } } // NEAR cannot have a $natural sort or $natural hint. const BSONObj& sortObj = parsed.getSort(); BSONElement sortNaturalElt = sortObj["$natural"]; const BSONObj& hintObj = parsed.getHint(); BSONElement hintNaturalElt = hintObj["$natural"]; if (numGeoNear > 0) { if (sortNaturalElt) { return Status(ErrorCodes::BadValue, "geoNear expression not allowed with $natural sort order"); } if (hintNaturalElt) { return Status(ErrorCodes::BadValue, "geoNear expression not allowed with $natural hint"); } } // TEXT and NEAR cannot both be in the query. if (numText > 0 && numGeoNear > 0) { return Status(ErrorCodes::BadValue, "text and geoNear not allowed in same query"); } // TEXT and {$natural: ...} sort order cannot both be in the query. if (numText > 0 && sortNaturalElt) { return Status(ErrorCodes::BadValue, "text expression not allowed with $natural sort order"); } // TEXT and hint cannot both be in the query. if (numText > 0 && !hintObj.isEmpty()) { return Status(ErrorCodes::BadValue, "text and hint not allowed in same query"); } // TEXT and tailable are incompatible. if (numText > 0 && parsed.isTailable()) { return Status(ErrorCodes::BadValue, "text and tailable cursor not allowed in same query"); } // $natural sort order must agree with hint. if (sortNaturalElt) { if (!hintObj.isEmpty() && !hintNaturalElt) { return Status(ErrorCodes::BadValue, "index hint not allowed with $natural sort order"); } if (hintNaturalElt) { if (hintNaturalElt.numberInt() != sortNaturalElt.numberInt()) { return Status(ErrorCodes::BadValue, "$natural hint must be in the same direction as $natural sort order"); } } } return Status::OK(); }
BSONObj FTSSpec::fixSpec( const BSONObj& spec ) { map<string,int> m; BSONObj keyPattern; { BSONObjBuilder b; bool addedFtsStuff = false; BSONObjIterator i( spec["key"].Obj() ); while ( i.more() ) { BSONElement e = i.next(); if ( str::equals( e.fieldName(), "_fts" ) || str::equals( e.fieldName(), "_ftsx" ) ) { continue; } else if ( e.type() == String && ( str::equals( "fts", e.valuestr() ) || str::equals( "text", e.valuestr() ) ) ) { if ( !addedFtsStuff ) { _addFTSStuff( &b ); addedFtsStuff = true; } m[e.fieldName()] = 1; } else { b.append( e ); } } if ( !addedFtsStuff ) _addFTSStuff( &b ); keyPattern = b.obj(); } if ( spec["weights"].isABSONObj() ) { BSONObjIterator i( spec["weights"].Obj() ); while ( i.more() ) { BSONElement e = i.next(); m[e.fieldName()] = e.numberInt(); } } else if ( spec["weights"].str() == WILDCARD ) { m[WILDCARD] = 1; } BSONObj weights; { BSONObjBuilder b; for ( map<string,int>::iterator i = m.begin(); i != m.end(); ++i ) b.append( i->first, i->second ); weights = b.obj(); } string default_language(spec.getStringField("default_language")); if ( default_language.empty() ) default_language = "english"; string language_override(spec.getStringField("language_override")); if ( language_override.empty() ) language_override = "language"; int version = 0; BSONObjBuilder b; BSONObjIterator i( spec ); while ( i.more() ) { BSONElement e = i.next(); if ( str::equals( e.fieldName(), "key" ) ) { b.append( "key", keyPattern ); } else if ( str::equals( e.fieldName(), "weights" ) ) { b.append( "weights", weights ); weights = BSONObj(); } else if ( str::equals( e.fieldName(), "default_language" ) ) { b.append( "default_language", default_language); default_language = ""; } else if ( str::equals( e.fieldName(), "language_override" ) ) { b.append( "language_override", language_override); language_override = ""; } else if ( str::equals( e.fieldName(), "v" ) ) { version = e.numberInt(); } else { b.append( e ); } } if ( !weights.isEmpty() ) b.append( "weights", weights ); if ( !default_language.empty() ) b.append( "default_language", default_language); if ( !language_override.empty() ) b.append( "language_override", language_override); b.append( "v", version ); return b.obj(); }
///PD_TRACE_DECLARE_FUNCTION ( SDB__MTHSUBSTRPARSER_PARSE, "_mthSubStrParser::parse" ) INT32 _mthSubStrParser::parse( const bson::BSONElement &e, _mthSAction &action ) const { INT32 rc = SDB_OK ; PD_TRACE_ENTRY( SDB__MTHSUBSTRPARSER_PARSE ) ; INT32 begin = 0 ; INT32 limit = -1 ; #if defined (_DEBUG) if ( 0 != _name.compare( e.fieldName() ) ) { PD_LOG( PDERROR, "field name[%s] is not valid", e.fieldName() ) ; rc = SDB_INVALIDARG ; goto error ; } #endif if ( e.isNumber() && 0 <= e.numberInt()) { limit = e.numberInt() ; } else if ( e.isNumber() ) { begin = e.numberInt() ; } else if ( Array == e.type() ) { BSONObjIterator i( e.embeddedObject() ) ; BSONElement ele ; if ( !i.more() ) { goto invalid_arg ; } ele = i.next() ; if ( !ele.isNumber() ) { goto invalid_arg ; } begin = ele.numberInt() ; if ( !i.more() ) { goto invalid_arg ; } ele = i.next() ; if ( !ele.isNumber() ) { goto invalid_arg ; } limit = ele.numberInt() ; } else { PD_LOG( PDERROR, "invalid element" ) ; rc = SDB_INVALIDARG ; goto error ; } action.setAttribute( MTH_S_ATTR_PROJECTION ) ; action.setFunc( &mthSubStrBuild, &mthSubStrGet ) ; action.setName( _name.c_str() ) ; action.setArg( BSON( "arg1" << begin << "arg2" << limit ) ) ; done: PD_TRACE_EXITRC( SDB__MTHSUBSTRPARSER_PARSE, rc ) ; return rc ; error: goto done ; invalid_arg: PD_LOG( PDERROR, "invalid substr argument:%s", e.toString( TRUE, TRUE ).c_str() ) ; rc = SDB_INVALIDARG ; goto error ; }
INT32 _omAgentNodeMgr::_addANode( const CHAR *arg1, const CHAR *arg2, BOOLEAN needLock, BOOLEAN isModify, string *omsvc ) { INT32 rc = SDB_OK ; const CHAR *pSvcName = NULL ; const CHAR *pDBPath = NULL ; string otherCfg ; CHAR dbPath[ OSS_MAX_PATHSIZE + 1 ] = { 0 } ; CHAR cfgPath[ OSS_MAX_PATHSIZE + 1 ] = { 0 } ; CHAR cfgFile[ OSS_MAX_PATHSIZE + 1 ] = { 0 } ; BOOLEAN createDBPath = FALSE ; BOOLEAN createCfgPath = FALSE ; BOOLEAN createCfgFile = FALSE ; BOOLEAN hasLock = FALSE ; try { stringstream ss ; BSONObj objArg1( arg1 ) ; BSONObjIterator it ( objArg1 ) ; while ( it.more() ) { BSONElement e = it.next() ; if ( 0 == ossStrcmp( e.fieldName(), PMD_OPTION_SVCNAME ) ) { if ( e.type() != String ) { PD_LOG( PDERROR, "Param[%s] type[%d] is not string", PMD_OPTION_SVCNAME, e.type() ) ; rc = SDB_INVALIDARG ; goto error ; } pSvcName = e.valuestrsafe() ; } else if ( 0 == ossStrcmp( e.fieldName(), PMD_OPTION_DBPATH ) ) { if ( e.type() != String ) { PD_LOG( PDERROR, "Param[%s] type[%d] is not string", PMD_OPTION_DBPATH, e.type() ) ; rc = SDB_INVALIDARG ; goto error ; } pDBPath = e.valuestrsafe() ; } else { ss << e.fieldName() << "=" ; switch( e.type() ) { case NumberDouble : ss << e.numberDouble () ; break ; case NumberInt : ss << e.numberLong () ; break ; case NumberLong : ss << e.numberInt () ; break ; case String : ss << e.valuestrsafe () ; break ; case Bool : ss << ( e.boolean() ? "TRUE" : "FALSE" ) ; break ; default : PD_LOG ( PDERROR, "Unexpected type[%d] for %s", e.type(), e.toString().c_str() ) ; rc = SDB_INVALIDARG ; goto error ; } ss << endl ; } } otherCfg = ss.str() ; } catch( std::exception &e ) { PD_LOG( PDERROR, "Occur exception: %s", e.what() ) ; rc = SDB_INVALIDARG ; goto error ; } if ( !pSvcName || !pDBPath ) { PD_LOG( PDERROR, "Param [%s] or [%s] is not config", PMD_OPTION_SVCNAME, PMD_OPTION_DBPATH ) ; rc = SDB_INVALIDARG ; goto error ; } if ( !ossGetRealPath( pDBPath, dbPath, OSS_MAX_PATHSIZE ) ) { PD_LOG( PDERROR, "Invalid db path: %s", pDBPath ) ; rc = SDB_INVALIDARG ; goto error ; } if ( needLock ) { lockBucket( pSvcName ) ; hasLock = TRUE ; } if ( isModify && !getNodeProcessInfo( pSvcName ) ) { rc = SDBCM_NODE_NOTEXISTED ; goto error ; } rc = ossAccess( dbPath, W_OK ) ; if ( SDB_PERM == rc ) { PD_LOG ( PDERROR, "Permission error for path: %s", dbPath ) ; goto error ; } else if ( SDB_FNE == rc ) { rc = ossMkdir ( dbPath, OSS_CREATE|OSS_READWRITE ) ; if ( rc ) { PD_LOG ( PDERROR, "Failed to create config file in path: %s, " "rc: %d", dbPath, rc ) ; goto error ; } createDBPath = TRUE ; } else if ( rc ) { PD_LOG ( PDERROR, "System error for access path: %s, rc: %d", dbPath, rc ) ; goto error ; } rc = utilBuildFullPath( sdbGetOMAgentOptions()->getLocalCfgPath(), pSvcName, OSS_MAX_PATHSIZE, cfgPath ) ; if ( rc ) { PD_LOG( PDERROR, "Build config path for service[%s] failed, rc: %d", pSvcName, rc ) ; goto error ; } rc = ossAccess( cfgPath, W_OK ) ; if ( SDB_PERM == rc ) { PD_LOG ( PDERROR, "Permission error for path[%s]", cfgPath ) ; goto error ; } else if ( SDB_FNE == rc ) { rc = ossMkdir ( cfgPath, OSS_CREATE|OSS_READWRITE ) ; if ( rc ) { PD_LOG ( PDERROR, "Failed to create directory: %s, rc: %d", cfgPath, rc ) ; goto error ; } createCfgPath = TRUE ; } else if ( rc ) { PD_LOG ( PDERROR, "System error for access path: %s, rc: %d", cfgPath, rc ) ; goto error ; } else if ( !isModify ) { PD_LOG ( PDERROR, "service[%s] node existed", pSvcName ) ; rc = SDBCM_NODE_EXISTED ; goto error ; } rc = utilBuildFullPath( cfgPath, PMD_DFT_CONF, OSS_MAX_PATHSIZE, cfgFile ) ; if ( rc ) { PD_LOG ( PDERROR, "Build config file for service[%s] failed, rc: %d", pSvcName, rc ) ; goto error ; } { pmdOptionsCB nodeOptions ; stringstream ss ; ss << PMD_OPTION_SVCNAME << "=" << pSvcName << endl ; ss << PMD_OPTION_DBPATH << "=" << dbPath << endl ; ss << otherCfg ; rc = utilWriteConfigFile( cfgFile, ss.str().c_str(), isModify ? FALSE : TRUE ) ; if ( rc ) { PD_LOG( PDERROR, "Write config file[%s] failed, rc: %d", cfgFile, rc ) ; goto error ; } createCfgFile = TRUE ; rc = nodeOptions.initFromFile( cfgFile, FALSE ) ; if ( rc ) { PD_LOG( PDERROR, "Extract node[%s] config failed, rc: %d", pSvcName, rc ) ; goto error ; } if ( omsvc ) { *omsvc = nodeOptions.getOMService() ; } } if ( isModify || !arg2 ) { goto done ; } try { CHAR cataCfgFile[ OSS_MAX_PATHSIZE + 1 ] = { 0 } ; BSONObj objArg2( arg2 ) ; stringstream ss ; if ( objArg2.isEmpty() ) { goto done ; } rc = utilBuildFullPath( cfgPath, PMD_DFT_CAT, OSS_MAX_PATHSIZE, cataCfgFile ) ; if ( rc ) { PD_LOG( PDERROR, "Build cat config file failed in service[%s], " "rc: %d", pSvcName, rc ) ; goto error ; } ss << objArg2 << endl ; rc = utilWriteConfigFile( cataCfgFile, ss.str().c_str(), TRUE ) ; if ( rc ) { PD_LOG( PDERROR, "Write cat file[%s] failed in service[%s], rc: %d", cataCfgFile, pSvcName, rc ) ; goto error ; } } catch( std::exception &e ) { PD_LOG( PDERROR, "Occur exeption for extract the second args for " "service[%s]: %s", pSvcName, e.what() ) ; rc = SDB_INVALIDARG ; goto error ; } done: if ( SDB_OK == rc ) { if ( !isModify ) { addNodeProcessInfo( pSvcName ) ; PD_LOG( PDEVENT, "Add node[%s] succeed", pSvcName ) ; } else { PD_LOG( PDEVENT, "Modify node[%s] succeed", pSvcName ) ; } } if ( hasLock ) { releaseBucket( pSvcName ) ; } return rc ; error: if ( createCfgFile ) { ossDelete( cfgFile ) ; } if ( createCfgPath ) { ossDelete( cfgPath ) ; } if ( createDBPath ) { ossDelete( dbPath ) ; } goto done ; }
void S2NearStage::init() { _initted = true; // The field we're near-ing from is the n-th field. Figure out what that 'n' is. We // put the cover for the search annulus in this spot in the bounds. _nearFieldIndex = 0; BSONObjIterator specIt(_params.indexKeyPattern); while (specIt.more()) { if (specIt.next().fieldName() == _params.nearQuery.field) { break; } ++_nearFieldIndex; } verify(_nearFieldIndex < _params.indexKeyPattern.nFields()); // FLAT implies the distances are in radians. Convert to meters. if (FLAT == _params.nearQuery.centroid.crs) { _params.nearQuery.minDistance *= kRadiusOfEarthInMeters; _params.nearQuery.maxDistance *= kRadiusOfEarthInMeters; } // Make sure distances are sane. Possibly redundant given the checking during parsing. _minDistance = max(0.0, _params.nearQuery.minDistance); _maxDistance = min(M_PI * kRadiusOfEarthInMeters, _params.nearQuery.maxDistance); _minDistance = min(_minDistance, _maxDistance); // We grow _outerRadius in nextAnnulus() below. _innerRadius = _outerRadius = _minDistance; _outerRadiusInclusive = false; // Grab the IndexDescriptor. Database* db = cc().database(); if (!db) { _failed = true; return; } Collection* collection = db->getCollection(_params.ns); if (!collection) { _failed = true; return; } _descriptor = collection->getIndexCatalog()->findIndexByKeyPattern(_params.indexKeyPattern); if (NULL == _descriptor) { _failed = true; return; } // The user can override this so we honor it. We could ignore it though -- it's just used // to set _radiusIncrement, not to do any covering. int finestIndexedLevel; BSONElement fl = _descriptor->infoObj()["finestIndexedLevel"]; if (fl.isNumber()) { finestIndexedLevel = fl.numberInt(); } else { finestIndexedLevel = S2::kAvgEdge.GetClosestLevel(500.0 / kRadiusOfEarthInMeters); } // Start with a conservative _radiusIncrement. When we're done searching a shell we // increment the two radii by this. _radiusIncrement = 5 * S2::kAvgEdge.GetValue(finestIndexedLevel) * kRadiusOfEarthInMeters; }
RocksEngine::RocksEngine(const std::string& path, bool durable) : _path(path), _durable(durable), _maxPrefix(0) { { // create block cache uint64_t cacheSizeGB = rocksGlobalOptions.cacheSizeGB; if (cacheSizeGB == 0) { ProcessInfo pi; unsigned long long memSizeMB = pi.getMemSizeMB(); if (memSizeMB > 0) { double cacheMB = memSizeMB / 2; cacheSizeGB = static_cast<uint64_t>(cacheMB / 1024); } if (cacheSizeGB < 1) { cacheSizeGB = 1; } } _block_cache = rocksdb::NewLRUCache(cacheSizeGB * 1024 * 1024 * 1024LL, 6); } _maxWriteMBPerSec = rocksGlobalOptions.maxWriteMBPerSec; _rateLimiter.reset( rocksdb::NewGenericRateLimiter(static_cast<int64_t>(_maxWriteMBPerSec) * 1024 * 1024)); // open DB rocksdb::DB* db; auto s = rocksdb::DB::Open(_options(), path, &db); invariantRocksOK(s); _db.reset(db); _counterManager.reset( new RocksCounterManager(_db.get(), rocksGlobalOptions.crashSafeCounters)); _compactionScheduler.reset(new RocksCompactionScheduler(_db.get())); // open iterator boost::scoped_ptr<rocksdb::Iterator> iter(_db->NewIterator(rocksdb::ReadOptions())); // find maxPrefix iter->SeekToLast(); if (iter->Valid()) { // otherwise the DB is empty, so we just keep it at 0 bool ok = extractPrefix(iter->key(), &_maxPrefix); // this is DB corruption here invariant(ok); } // load ident to prefix map. also update _maxPrefix if there's any prefix bigger than // current _maxPrefix { boost::lock_guard<boost::mutex> lk(_identPrefixMapMutex); for (iter->Seek(kMetadataPrefix); iter->Valid() && iter->key().starts_with(kMetadataPrefix); iter->Next()) { invariantRocksOK(iter->status()); rocksdb::Slice ident(iter->key()); ident.remove_prefix(kMetadataPrefix.size()); // this could throw DBException, which then means DB corruption. We just let it fly // to the caller BSONObj identConfig(iter->value().data()); BSONElement element = identConfig.getField("prefix"); if (element.eoo() || !element.isNumber()) { log() << "Mongo metadata in RocksDB database is corrupted."; invariant(false); } uint32_t identPrefix = static_cast<uint32_t>(element.numberInt()); _identPrefixMap[StringData(ident.data(), ident.size())] = identPrefix; _maxPrefix = std::max(_maxPrefix, identPrefix); } } // just to be extra sure. we need this if last collection is oplog -- in that case we // reserve prefix+1 for oplog key tracker ++_maxPrefix; // load dropped prefixes { rocksdb::WriteBatch wb; // we will use this iter to check if prefixes are still alive boost::scoped_ptr<rocksdb::Iterator> prefixIter( _db->NewIterator(rocksdb::ReadOptions())); for (iter->Seek(kDroppedPrefix); iter->Valid() && iter->key().starts_with(kDroppedPrefix); iter->Next()) { invariantRocksOK(iter->status()); rocksdb::Slice prefix(iter->key()); prefix.remove_prefix(kDroppedPrefix.size()); prefixIter->Seek(prefix); invariantRocksOK(iter->status()); if (prefixIter->Valid() && prefixIter->key().starts_with(prefix)) { // prefix is still alive, let's instruct the compaction filter to clear it up uint32_t int_prefix; bool ok = extractPrefix(prefix, &int_prefix); invariant(ok); { boost::lock_guard<boost::mutex> lk(_droppedPrefixesMutex); _droppedPrefixes.insert(int_prefix); } } else { // prefix is no longer alive. let's remove the prefix from our dropped prefixes // list wb.Delete(iter->key()); } } if (wb.Count() > 0) { auto s = _db->Write(rocksdb::WriteOptions(), &wb); invariantRocksOK(s); } } }
bool run(OperationContext* txn, const string& dbname, BSONObj& cmdObj, int, string& errmsg, BSONObjBuilder& result, bool fromRepl) { const string ns = dbname + "." + cmdObj.firstElement().valuestr(); if (!cmdObj["start"].eoo()) { errmsg = "using deprecated 'start' argument to geoNear"; return false; } Client::ReadContext ctx(txn, ns); Database* db = ctx.ctx().db(); if ( !db ) { errmsg = "can't find ns"; return false; } Collection* collection = db->getCollection( txn, ns ); if ( !collection ) { errmsg = "can't find ns"; return false; } IndexCatalog* indexCatalog = collection->getIndexCatalog(); // cout << "raw cmd " << cmdObj.toString() << endl; // We seek to populate this. string nearFieldName; bool using2DIndex = false; if (!getFieldName(txn, collection, indexCatalog, &nearFieldName, &errmsg, &using2DIndex)) { return false; } PointWithCRS point; uassert(17304, "'near' field must be point", GeoParser::parseQueryPoint(cmdObj["near"], &point).isOK()); bool isSpherical = cmdObj["spherical"].trueValue(); if (!using2DIndex) { uassert(17301, "2dsphere index must have spherical: true", isSpherical); } // Build the $near expression for the query. BSONObjBuilder nearBob; if (isSpherical) { nearBob.append("$nearSphere", cmdObj["near"].Obj()); } else { nearBob.append("$near", cmdObj["near"].Obj()); } if (!cmdObj["maxDistance"].eoo()) { uassert(17299, "maxDistance must be a number",cmdObj["maxDistance"].isNumber()); nearBob.append("$maxDistance", cmdObj["maxDistance"].number()); } if (!cmdObj["minDistance"].eoo()) { uassert(17298, "minDistance doesn't work on 2d index", !using2DIndex); uassert(17300, "minDistance must be a number",cmdObj["minDistance"].isNumber()); nearBob.append("$minDistance", cmdObj["minDistance"].number()); } if (!cmdObj["uniqueDocs"].eoo()) { warning() << ns << ": ignoring deprecated uniqueDocs option in geoNear command"; } // And, build the full query expression. BSONObjBuilder queryBob; queryBob.append(nearFieldName, nearBob.obj()); if (!cmdObj["query"].eoo() && cmdObj["query"].isABSONObj()) { queryBob.appendElements(cmdObj["query"].Obj()); } BSONObj rewritten = queryBob.obj(); // cout << "rewritten query: " << rewritten.toString() << endl; int numWanted = 100; const char* limitName = !cmdObj["num"].eoo() ? "num" : "limit"; BSONElement eNumWanted = cmdObj[limitName]; if (!eNumWanted.eoo()) { uassert(17303, "limit must be number", eNumWanted.isNumber()); numWanted = eNumWanted.numberInt(); uassert(17302, "limit must be >=0", numWanted >= 0); } bool includeLocs = false; if (!cmdObj["includeLocs"].eoo()) { includeLocs = cmdObj["includeLocs"].trueValue(); } double distanceMultiplier = 1.0; BSONElement eDistanceMultiplier = cmdObj["distanceMultiplier"]; if (!eDistanceMultiplier.eoo()) { uassert(17296, "distanceMultiplier must be a number", eDistanceMultiplier.isNumber()); distanceMultiplier = eDistanceMultiplier.number(); uassert(17297, "distanceMultiplier must be non-negative", distanceMultiplier >= 0); } BSONObj projObj = BSON("$pt" << BSON("$meta" << LiteParsedQuery::metaGeoNearPoint) << "$dis" << BSON("$meta" << LiteParsedQuery::metaGeoNearDistance)); CanonicalQuery* cq; const NamespaceString nss(dbname); const WhereCallbackReal whereCallback(txn, nss.db()); if (!CanonicalQuery::canonicalize(ns, rewritten, BSONObj(), projObj, 0, numWanted, BSONObj(), &cq, whereCallback).isOK()) { errmsg = "Can't parse filter / create query"; return false; } PlanExecutor* rawExec; if (!getExecutor(txn, collection, cq, &rawExec, 0).isOK()) { errmsg = "can't get query runner"; return false; } auto_ptr<PlanExecutor> exec(rawExec); const ScopedExecutorRegistration safety(exec.get()); double totalDistance = 0; BSONObjBuilder resultBuilder(result.subarrayStart("results")); double farthestDist = 0; BSONObj currObj; int results = 0; while ((results < numWanted) && PlanExecutor::ADVANCED == exec->getNext(&currObj, NULL)) { // Come up with the correct distance. double dist = currObj["$dis"].number() * distanceMultiplier; totalDistance += dist; if (dist > farthestDist) { farthestDist = dist; } // Strip out '$dis' and '$pt' from the result obj. The rest gets added as 'obj' // in the command result. BSONObjIterator resIt(currObj); BSONObjBuilder resBob; while (resIt.more()) { BSONElement elt = resIt.next(); if (!mongoutils::str::equals("$pt", elt.fieldName()) && !mongoutils::str::equals("$dis", elt.fieldName())) { resBob.append(elt); } } BSONObj resObj = resBob.obj(); // Don't make a too-big result object. if (resultBuilder.len() + resObj.objsize()> BSONObjMaxUserSize) { warning() << "Too many geoNear results for query " << rewritten.toString() << ", truncating output."; break; } // Add the next result to the result builder. BSONObjBuilder oneResultBuilder( resultBuilder.subobjStart(BSONObjBuilder::numStr(results))); oneResultBuilder.append("dis", dist); if (includeLocs) { oneResultBuilder.appendAs(currObj["$pt"], "loc"); } oneResultBuilder.append("obj", resObj); oneResultBuilder.done(); ++results; } resultBuilder.done(); // Fill out the stats subobj. BSONObjBuilder stats(result.subobjStart("stats")); // Fill in nscanned from the explain. PlanSummaryStats summary; Explain::getSummaryStats(exec.get(), &summary); stats.appendNumber("nscanned", summary.totalKeysExamined); stats.appendNumber("objectsLoaded", summary.totalDocsExamined); stats.append("avgDistance", totalDistance / results); stats.append("maxDistance", farthestDist); stats.append("time", txn->getCurOp()->elapsedMillis()); stats.done(); return true; }
Status LiteParsedQuery::initFullQuery(const BSONObj& top) { BSONObjIterator i(top); while (i.more()) { BSONElement e = i.next(); const char* name = e.fieldName(); if (0 == strcmp("$orderby", name) || 0 == strcmp("orderby", name)) { if (Object == e.type()) { _sort = e.embeddedObject(); } else if (Array == e.type()) { _sort = e.embeddedObject(); // TODO: Is this ever used? I don't think so. // Quote: // This is for languages whose "objects" are not well ordered (JSON is well // ordered). // [ { a : ... } , { b : ... } ] -> { a : ..., b : ... } // note: this is slow, but that is ok as order will have very few pieces BSONObjBuilder b; char p[2] = "0"; while (1) { BSONObj j = _sort.getObjectField(p); if (j.isEmpty()) { break; } BSONElement e = j.firstElement(); if (e.eoo()) { return Status(ErrorCodes::BadValue, "bad order array"); } if (!e.isNumber()) { return Status(ErrorCodes::BadValue, "bad order array [2]"); } b.append(e); (*p)++; if (!(*p <= '9')) { return Status(ErrorCodes::BadValue, "too many ordering elements"); } } _sort = b.obj(); } else { return Status(ErrorCodes::BadValue, "sort must be object or array"); } } else if ('$' == *name) { name++; if (str::equals("explain", name)) { // Won't throw. _explain = e.trueValue(); } else if (str::equals("snapshot", name)) { // Won't throw. _snapshot = e.trueValue(); } else if (str::equals("min", name)) { if (!e.isABSONObj()) { return Status(ErrorCodes::BadValue, "$min must be a BSONObj"); } _min = e.embeddedObject(); } else if (str::equals("max", name)) { if (!e.isABSONObj()) { return Status(ErrorCodes::BadValue, "$max must be a BSONObj"); } _max = e.embeddedObject(); } else if (str::equals("hint", name)) { if (e.isABSONObj()) { _hint = e.embeddedObject(); } else { // Hint can be specified as an object or as a string. Wrap takes care of // it. _hint = e.wrap(); } } else if (str::equals("returnKey", name)) { // Won't throw. if (e.trueValue()) { _returnKey = true; BSONObjBuilder projBob; projBob.appendElements(_proj); // XXX: what's the syntax here? BSONObj indexKey = BSON("$$" << BSON("$meta" << "indexKey")); projBob.append(indexKey.firstElement()); _proj = projBob.obj(); } } else if (str::equals("maxScan", name)) { // Won't throw. _maxScan = e.numberInt(); } else if (str::equals("showDiskLoc", name)) { // Won't throw. if (e.trueValue()) { BSONObjBuilder projBob; projBob.appendElements(_proj); BSONObj metaDiskLoc = BSON("$diskLoc" << BSON("$meta" << "diskloc")); projBob.append(metaDiskLoc.firstElement()); _proj = projBob.obj(); } } else if (str::equals("maxTimeMS", name)) { StatusWith<int> maxTimeMS = parseMaxTimeMS(e); if (!maxTimeMS.isOK()) { return maxTimeMS.getStatus(); } _maxTimeMS = maxTimeMS.getValue(); } } } if (_snapshot) { if (!_sort.isEmpty()) { return Status(ErrorCodes::BadValue, "E12001 can't use sort with $snapshot"); } if (!_hint.isEmpty()) { return Status(ErrorCodes::BadValue, "E12002 can't use hint with $snapshot"); } } return Status::OK(); }
void run() { Client::WriteContext ctx(ns()); Database* db = ctx.ctx().db(); Collection* coll = db->getCollection(ns()); if (!coll) { coll = db->createCollection(ns()); } // Insert a bunch of data for (int i = 0; i < 50; ++i) { insert(BSON("foo" << 1 << "bar" << 1)); } addIndex(BSON("foo" << 1)); addIndex(BSON("bar" << 1)); WorkingSet ws; scoped_ptr<AndSortedStage> ah(new AndSortedStage(&ws, NULL)); // Scan over foo == 1 IndexScanParams params; params.descriptor = getIndex(BSON("foo" << 1), coll); params.bounds.isSimpleRange = true; params.bounds.startKey = BSON("" << 1); params.bounds.endKey = BSON("" << 1); params.bounds.endKeyInclusive = true; params.direction = 1; ah->addChild(new IndexScan(params, &ws, NULL)); // Scan over bar == 1 params.descriptor = getIndex(BSON("bar" << 1), coll); ah->addChild(new IndexScan(params, &ws, NULL)); // Get the set of disklocs in our collection to use later. set<DiskLoc> data; getLocs(&data, coll); // We're making an assumption here that happens to be true because we clear out the // collection before running this: increasing inserts have increasing DiskLocs. // This isn't true in general if the collection is not dropped beforehand. WorkingSetID id; // Sorted AND looks at the first child, which is an index scan over foo==1. ah->work(&id); // The first thing that the index scan returns (due to increasing DiskLoc trick) is the // very first insert, which should be the very first thing in data. Let's invalidate it // and make sure it shows up in the flagged results. ah->prepareToYield(); ah->invalidate(*data.begin()); remove(data.begin()->obj()); ah->recoverFromYield(); // Make sure the nuked obj is actually in the flagged data. ASSERT_EQUALS(ws.getFlagged().size(), size_t(1)); WorkingSetMember* member = ws.get(*ws.getFlagged().begin()); ASSERT_EQUALS(WorkingSetMember::OWNED_OBJ, member->state); BSONElement elt; ASSERT_TRUE(member->getFieldDotted("foo", &elt)); ASSERT_EQUALS(1, elt.numberInt()); ASSERT_TRUE(member->getFieldDotted("bar", &elt)); ASSERT_EQUALS(1, elt.numberInt()); set<DiskLoc>::iterator it = data.begin(); // Proceed along, AND-ing results. int count = 0; while (!ah->isEOF() && count < 10) { WorkingSetID id; PlanStage::StageState status = ah->work(&id); if (PlanStage::ADVANCED != status) { continue; } ++count; ++it; member = ws.get(id); ASSERT_TRUE(member->getFieldDotted("foo", &elt)); ASSERT_EQUALS(1, elt.numberInt()); ASSERT_TRUE(member->getFieldDotted("bar", &elt)); ASSERT_EQUALS(1, elt.numberInt()); ASSERT_EQUALS(member->loc, *it); } // Move 'it' to a result that's yet to show up. for (int i = 0; i < count + 10; ++i) { ++it; } // Remove a result that's coming up. It's not the 'target' result of the AND so it's // not flagged. ah->prepareToYield(); ah->invalidate(*it); remove(it->obj()); ah->recoverFromYield(); // Get all results aside from the two we killed. while (!ah->isEOF()) { WorkingSetID id; PlanStage::StageState status = ah->work(&id); if (PlanStage::ADVANCED != status) { continue; } ++count; member = ws.get(id); ASSERT_TRUE(member->getFieldDotted("foo", &elt)); ASSERT_EQUALS(1, elt.numberInt()); ASSERT_TRUE(member->getFieldDotted("bar", &elt)); ASSERT_EQUALS(1, elt.numberInt()); } ASSERT_EQUALS(count, 48); ASSERT_EQUALS(size_t(1), ws.getFlagged().size()); }
StatusWithMatchExpression MatchExpressionParser::_parseSubField( const BSONObj& context, const char* name, const BSONElement& e, int position, bool* stop ) { *stop = false; // TODO: these should move to getGtLtOp, or its replacement if ( mongoutils::str::equals( "$eq", e.fieldName() ) ) return _parseComparison( name, new EqualityMatchExpression(), e ); if ( mongoutils::str::equals( "$not", e.fieldName() ) ) { return _parseNot( name, e ); } int x = e.getGtLtOp(-1); switch ( x ) { case -1: return StatusWithMatchExpression( ErrorCodes::BadValue, mongoutils::str::stream() << "unknown operator: " << e.fieldName() ); case BSONObj::LT: return _parseComparison( name, new LTMatchExpression(), e ); case BSONObj::LTE: return _parseComparison( name, new LTEMatchExpression(), e ); case BSONObj::GT: return _parseComparison( name, new GTMatchExpression(), e ); case BSONObj::GTE: return _parseComparison( name, new GTEMatchExpression(), e ); case BSONObj::NE: return _parseComparison( name, new NEMatchExpression(), e ); case BSONObj::Equality: return _parseComparison( name, new EqualityMatchExpression(), e ); case BSONObj::opIN: { if ( e.type() != Array ) return StatusWithMatchExpression( ErrorCodes::BadValue, "$in needs an array" ); std::auto_ptr<InMatchExpression> temp( new InMatchExpression() ); temp->init( name ); Status s = _parseArrayFilterEntries( temp->getArrayFilterEntries(), e.Obj() ); if ( !s.isOK() ) return StatusWithMatchExpression( s ); return StatusWithMatchExpression( temp.release() ); } case BSONObj::NIN: { if ( e.type() != Array ) return StatusWithMatchExpression( ErrorCodes::BadValue, "$nin needs an array" ); std::auto_ptr<NinMatchExpression> temp( new NinMatchExpression() ); temp->init( name ); Status s = _parseArrayFilterEntries( temp->getArrayFilterEntries(), e.Obj() ); if ( !s.isOK() ) return StatusWithMatchExpression( s ); return StatusWithMatchExpression( temp.release() ); } case BSONObj::opSIZE: { int size = 0; if ( e.type() == String ) { // matching old odd semantics size = 0; } else if ( e.type() == NumberInt || e.type() == NumberLong ) { size = e.numberInt(); } else if ( e.type() == NumberDouble ) { if ( e.numberInt() == e.numberDouble() ) { size = e.numberInt(); } else { // old semantcs require exact numeric match // so [1,2] != 1 or 2 size = -1; } } else { return StatusWithMatchExpression( ErrorCodes::BadValue, "$size needs a number" ); } std::auto_ptr<SizeMatchExpression> temp( new SizeMatchExpression() ); Status s = temp->init( name, size ); if ( !s.isOK() ) return StatusWithMatchExpression( s ); return StatusWithMatchExpression( temp.release() ); } case BSONObj::opEXISTS: { if ( e.eoo() ) return StatusWithMatchExpression( ErrorCodes::BadValue, "$exists can't be eoo" ); std::auto_ptr<ExistsMatchExpression> temp( new ExistsMatchExpression() ); Status s = temp->init( name, e.trueValue() ); if ( !s.isOK() ) return StatusWithMatchExpression( s ); return StatusWithMatchExpression( temp.release() ); } case BSONObj::opTYPE: { if ( !e.isNumber() ) return StatusWithMatchExpression( ErrorCodes::BadValue, "$type has to be a number" ); int type = e.numberInt(); if ( e.type() != NumberInt && type != e.number() ) type = -1; std::auto_ptr<TypeMatchExpression> temp( new TypeMatchExpression() ); Status s = temp->init( name, type ); if ( !s.isOK() ) return StatusWithMatchExpression( s ); return StatusWithMatchExpression( temp.release() ); } case BSONObj::opMOD: return _parseMOD( name, e ); case BSONObj::opOPTIONS: return StatusWithMatchExpression( ErrorCodes::BadValue, "$options has to be after a $regex" ); case BSONObj::opREGEX: { if ( position != 0 ) return StatusWithMatchExpression( ErrorCodes::BadValue, "$regex has to be first" ); *stop = true; return _parseRegexDocument( name, context ); } case BSONObj::opELEM_MATCH: return _parseElemMatch( name, e ); case BSONObj::opALL: return _parseAll( name, e ); case BSONObj::opWITHIN: return expressionParserGeoCallback( name, context ); default: return StatusWithMatchExpression( ErrorCodes::BadValue, "not done" ); } }
void _sptInvoker::_reportError( JSContext *cx, INT32 rc, const bson::BSONObj &detail ) { sdbSetErrno( rc ) ; if ( SDB_OK != rc ) { stringstream ss ; BSONObjIterator itr( detail) ; INT32 fieldNum = detail.nFields() ; INT32 count = 0 ; while ( itr.more() ) { if ( count > 0 ) { ss << ", " ; } BSONElement e = itr.next() ; if ( fieldNum > 1 || 0 != ossStrcmp( SPT_ERR, e.fieldName() ) ) { ss << e.fieldName() << ": " ; } if ( String == e.type() ) { ss << e.valuestr() ; } else if ( NumberInt == e.type() ) { ss << e.numberInt() ; } else if ( NumberLong == e.type() ) { ss << e.numberLong() ; } else if ( NumberDouble == e.type() ) { ss << e.numberDouble() ; } else if ( Bool == e.type() ) { ss << ( e.boolean() ? "true" : "false" ) ; } else { ss << e.toString( false, false ) ; } ++count ; } sdbSetErrMsg( ss.str().c_str() ) ; if ( sdbIsErrMsgEmpty() ) { sdbSetErrMsg( getErrDesp( rc ) ) ; } JS_SetPendingException( cx , INT_TO_JSVAL( rc ) ) ; } else { sdbSetErrMsg( NULL ) ; } return ; }
static int configValueWithDefault(const IndexDescriptor *desc, const string& name, int def) { BSONElement e = desc->getInfoElement(name); if (e.isNumber()) { return e.numberInt(); } return def; }
bool handleSpecialNamespaces( Request& r , QueryMessage& q ) { const char * ns = r.getns(); ns = strstr( r.getns() , ".$cmd.sys." ); if ( ! ns ) return false; ns += 10; BSONObjBuilder b; vector<Shard> shards; if ( strcmp( ns , "inprog" ) == 0 ) { Shard::getAllShards( shards ); BSONArrayBuilder arr( b.subarrayStart( "inprog" ) ); for ( unsigned i=0; i<shards.size(); i++ ) { Shard shard = shards[i]; ScopedDbConnection conn( shard ); BSONObj temp = conn->findOne( r.getns() , BSONObj() ); if ( temp["inprog"].isABSONObj() ) { BSONObjIterator i( temp["inprog"].Obj() ); while ( i.more() ) { BSONObjBuilder x; BSONObjIterator j( i.next().Obj() ); while( j.more() ) { BSONElement e = j.next(); if ( str::equals( e.fieldName() , "opid" ) ) { stringstream ss; ss << shard.getName() << ':' << e.numberInt(); x.append( "opid" , ss.str() ); } else if ( str::equals( e.fieldName() , "client" ) ) { x.appendAs( e , "client_s" ); } else { x.append( e ); } } arr.append( x.obj() ); } } conn.done(); } arr.done(); } else if ( strcmp( ns , "killop" ) == 0 ) { BSONElement e = q.query["op"]; if ( strstr( r.getns() , "admin." ) != 0 ) { b.append( "err" , "unauthorized" ); } else if ( e.type() != String ) { b.append( "err" , "bad op" ); b.append( e ); } else { b.append( e ); string s = e.String(); string::size_type i = s.find( ':' ); if ( i == string::npos ) { b.append( "err" , "bad opid" ); } else { string shard = s.substr( 0 , i ); int opid = atoi( s.substr( i + 1 ).c_str() ); b.append( "shard" , shard ); b.append( "shardid" , opid ); log() << "want to kill op: " << e << endl; Shard s(shard); ScopedDbConnection conn( s ); conn->findOne( r.getns() , BSON( "op" << opid ) ); conn.done(); } } } else if ( strcmp( ns , "unlock" ) == 0 ) { b.append( "err" , "can't do unlock through mongos" ); } else { log( LL_WARNING ) << "unknown sys command [" << ns << "]" << endl; return false; } BSONObj x = b.done(); replyToQuery(0, r.p(), r.m(), x); return true; }
Status MemberConfig::initialize(const BSONObj& mcfg, ReplicaSetTagConfig* tagConfig) { Status status = bsonCheckOnlyHasFields( "replica set member configuration", mcfg, kLegalMemberConfigFieldNames); if (!status.isOK()) return status; // // Parse _id field. // BSONElement idElement = mcfg[kIdFieldName]; if (idElement.eoo()) { return Status(ErrorCodes::NoSuchKey, str::stream() << kIdFieldName << " field is missing"); } if (!idElement.isNumber()) { return Status(ErrorCodes::TypeMismatch, str::stream() << kIdFieldName << " field has non-numeric type " << typeName(idElement.type())); } _id = idElement.numberInt(); // // Parse h field. // std::string hostAndPortString; status = bsonExtractStringField(mcfg, kHostFieldName, &hostAndPortString); if (!status.isOK()) return status; boost::trim(hostAndPortString); status = _host.initialize(hostAndPortString); if (!status.isOK()) return status; if (!_host.hasPort()) { // make port explicit even if default. _host = HostAndPort(_host.host(), _host.port()); } // // Parse votes field. // BSONElement votesElement = mcfg[kVotesFieldName]; int votes; if (votesElement.eoo()) { votes = kVotesFieldDefault; } else if (votesElement.isNumber()) { votes = votesElement.numberInt(); } else { return Status(ErrorCodes::TypeMismatch, str::stream() << kVotesFieldName << " field value has non-numeric type " << typeName(votesElement.type())); } if (votes != 0 && votes != 1) { return Status(ErrorCodes::BadValue, str::stream() << kVotesFieldName << " field value is " << votesElement.numberInt() << " but must be 0 or 1"); } _isVoter = bool(votes); // // Parse priority field. // BSONElement priorityElement = mcfg[kPriorityFieldName]; if (priorityElement.eoo()) { _priority = kPriorityFieldDefault; } else if (priorityElement.isNumber()) { _priority = priorityElement.numberDouble(); } else { return Status(ErrorCodes::TypeMismatch, str::stream() << kPriorityFieldName << " field has non-numeric type " << typeName(priorityElement.type())); } // // Parse arbiterOnly field. // status = bsonExtractBooleanFieldWithDefault(mcfg, kArbiterOnlyFieldName, kArbiterOnlyFieldDefault, &_arbiterOnly); if (!status.isOK()) return status; // // Parse slaveDelay field. // BSONElement slaveDelayElement = mcfg[kSlaveDelayFieldName]; if (slaveDelayElement.eoo()) { _slaveDelay = kSlaveDelayFieldDefault; } else if (slaveDelayElement.isNumber()) { _slaveDelay = Seconds(slaveDelayElement.numberInt()); } else { return Status(ErrorCodes::TypeMismatch, str::stream() << kSlaveDelayFieldName << " field value has non-numeric type " << typeName(slaveDelayElement.type())); } // // Parse hidden field. // status = bsonExtractBooleanFieldWithDefault(mcfg, kHiddenFieldName, kHiddenFieldDefault, &_hidden); if (!status.isOK()) return status; // // Parse buildIndexes field. // status = bsonExtractBooleanFieldWithDefault(mcfg, kBuildIndexesFieldName, kBuildIndexesFieldDefault, &_buildIndexes); if (!status.isOK()) return status; // // Parse "tags" field. // _tags.clear(); BSONElement tagsElement; status = bsonExtractTypedField(mcfg, kTagsFieldName, Object, &tagsElement); if (status.isOK()) { for (BSONObj::iterator tagIter(tagsElement.Obj()); tagIter.more();) { const BSONElement& tag = tagIter.next(); if (tag.type() != String) { return Status(ErrorCodes::TypeMismatch, str::stream() << "tags." << tag.fieldName() << " field has non-string value of type " << typeName(tag.type())); } _tags.push_back(tagConfig->makeTag(tag.fieldNameStringData(), tag.valueStringData())); } } else if (ErrorCodes::NoSuchKey != status) { return status; } return Status::OK(); }
Status ReplSetHeartbeatResponse::initialize(const BSONObj& doc, long long term) { // Old versions set this even though they returned not "ok" _mismatch = doc[kMismatchFieldName].trueValue(); if (_mismatch) return Status(ErrorCodes::InconsistentReplicaSetNames, "replica set name doesn't match."); // Old versions sometimes set the replica set name ("set") but ok:0 const BSONElement replSetNameElement = doc[kReplSetFieldName]; if (replSetNameElement.eoo()) { _setName.clear(); } else if (replSetNameElement.type() != String) { return Status(ErrorCodes::TypeMismatch, str::stream() << "Expected \"" << kReplSetFieldName << "\" field in response to replSetHeartbeat to have " "type String, but found " << typeName(replSetNameElement.type())); } else { _setName = replSetNameElement.String(); } if (_setName.empty() && !doc[kOkFieldName].trueValue()) { std::string errMsg = doc[kErrMsgFieldName].str(); BSONElement errCodeElem = doc[kErrorCodeFieldName]; if (errCodeElem.ok()) { if (!errCodeElem.isNumber()) return Status(ErrorCodes::BadValue, "Error code is not a number!"); int errorCode = errCodeElem.numberInt(); return Status(ErrorCodes::Error(errorCode), errMsg); } return Status(ErrorCodes::UnknownError, errMsg); } const BSONElement hasDataElement = doc[kHasDataFieldName]; _hasDataSet = !hasDataElement.eoo(); _hasData = hasDataElement.trueValue(); const BSONElement electionTimeElement = doc[kElectionTimeFieldName]; if (electionTimeElement.eoo()) { _electionTimeSet = false; } else if (electionTimeElement.type() == bsonTimestamp) { _electionTimeSet = true; _electionTime = electionTimeElement.timestamp(); } else if (electionTimeElement.type() == Date) { _electionTimeSet = true; _electionTime = Timestamp(electionTimeElement.date()); } else { return Status(ErrorCodes::TypeMismatch, str::stream() << "Expected \"" << kElectionTimeFieldName << "\" field in response to replSetHeartbeat " "command to have type Date or Timestamp, but found type " << typeName(electionTimeElement.type())); } const BSONElement timeElement = doc[kTimeFieldName]; if (timeElement.eoo()) { _timeSet = false; } else if (timeElement.isNumber()) { _timeSet = true; _time = Seconds(timeElement.numberLong()); } else { return Status(ErrorCodes::TypeMismatch, str::stream() << "Expected \"" << kTimeFieldName << "\" field in response to replSetHeartbeat " "command to have a numeric type, but found type " << typeName(timeElement.type())); } _isReplSet = doc[kIsReplSetFieldName].trueValue(); Status termStatus = bsonExtractIntegerField(doc, kTermFieldName, &_term); if (!termStatus.isOK() && termStatus != ErrorCodes::NoSuchKey) { return termStatus; } // In order to support both the 3.0(V0) and 3.2(V1) heartbeats we must parse the OpTime // field based on its type. If it is a Date, we parse it as the timestamp and use // initialize's term argument to complete the OpTime type. If it is an Object, then it's // V1 and we construct an OpTime out of its nested fields. const BSONElement opTimeElement = doc[kOpTimeFieldName]; if (opTimeElement.eoo()) { _opTimeSet = false; } else if (opTimeElement.type() == bsonTimestamp) { _opTimeSet = true; _opTime = OpTime(opTimeElement.timestamp(), term); } else if (opTimeElement.type() == Date) { _opTimeSet = true; _opTime = OpTime(Timestamp(opTimeElement.date()), term); } else if (opTimeElement.type() == Object) { Status status = bsonExtractOpTimeField(doc, kOpTimeFieldName, &_opTime); _opTimeSet = true; // since a v1 OpTime was in the response, the member must be part of a replset _isReplSet = true; } else { return Status(ErrorCodes::TypeMismatch, str::stream() << "Expected \"" << kOpTimeFieldName << "\" field in response to replSetHeartbeat " "command to have type Date or Timestamp, but found type " << typeName(opTimeElement.type())); } const BSONElement electableElement = doc[kIsElectableFieldName]; if (electableElement.eoo()) { _electableSet = false; } else { _electableSet = true; _electable = electableElement.trueValue(); } const BSONElement memberStateElement = doc[kMemberStateFieldName]; if (memberStateElement.eoo()) { _stateSet = false; } else if (memberStateElement.type() != NumberInt && memberStateElement.type() != NumberLong) { return Status(ErrorCodes::TypeMismatch, str::stream() << "Expected \"" << kMemberStateFieldName << "\" field in response to replSetHeartbeat " "command to have type NumberInt or NumberLong, but found type " << typeName(memberStateElement.type())); } else { long long stateInt = memberStateElement.numberLong(); if (stateInt < 0 || stateInt > MemberState::RS_MAX) { return Status(ErrorCodes::BadValue, str::stream() << "Value for \"" << kMemberStateFieldName << "\" in response to replSetHeartbeat is " "out of range; legal values are non-negative and no more than " << MemberState::RS_MAX); } _stateSet = true; _state = MemberState(static_cast<int>(stateInt)); } _stateDisagreement = doc[kHasStateDisagreementFieldName].trueValue(); // Not required for the case of uninitialized members -- they have no config const BSONElement configVersionElement = doc[kConfigVersionFieldName]; // If we have an optime then we must have a configVersion if (_opTimeSet && configVersionElement.eoo()) { return Status(ErrorCodes::NoSuchKey, str::stream() << "Response to replSetHeartbeat missing required \"" << kConfigVersionFieldName << "\" field even though initialized"); } // If there is a "v" (config version) then it must be an int. if (!configVersionElement.eoo() && configVersionElement.type() != NumberInt) { return Status(ErrorCodes::TypeMismatch, str::stream() << "Expected \"" << kConfigVersionFieldName << "\" field in response to replSetHeartbeat to have " "type NumberInt, but found " << typeName(configVersionElement.type())); } _configVersion = configVersionElement.numberInt(); const BSONElement hbMsgElement = doc[kHbMessageFieldName]; if (hbMsgElement.eoo()) { _hbmsg.clear(); } else if (hbMsgElement.type() != String) { return Status(ErrorCodes::TypeMismatch, str::stream() << "Expected \"" << kHbMessageFieldName << "\" field in response to replSetHeartbeat to have " "type String, but found " << typeName(hbMsgElement.type())); } else { _hbmsg = hbMsgElement.String(); } const BSONElement syncingToElement = doc[kSyncSourceFieldName]; if (syncingToElement.eoo()) { _syncingTo = HostAndPort(); } else if (syncingToElement.type() != String) { return Status(ErrorCodes::TypeMismatch, str::stream() << "Expected \"" << kSyncSourceFieldName << "\" field in response to replSetHeartbeat to " "have type String, but found " << typeName(syncingToElement.type())); } else { _syncingTo = HostAndPort(syncingToElement.String()); } const BSONElement rsConfigElement = doc[kConfigFieldName]; if (rsConfigElement.eoo()) { _configSet = false; _config = ReplicaSetConfig(); return Status::OK(); } else if (rsConfigElement.type() != Object) { return Status(ErrorCodes::TypeMismatch, str::stream() << "Expected \"" << kConfigFieldName << "\" in response to replSetHeartbeat to have type " "Object, but found " << typeName(rsConfigElement.type())); } _configSet = true; return _config.initialize(rsConfigElement.Obj()); }
BSONObj FTSSpec::fixSpec( const BSONObj& spec ) { if ( spec["textIndexVersion"].numberInt() == TEXT_INDEX_VERSION_1 ) { return _fixSpecV1( spec ); } map<string,int> m; BSONObj keyPattern; { BSONObjBuilder b; // Populate m and keyPattern. { bool addedFtsStuff = false; BSONObjIterator i( spec["key"].Obj() ); while ( i.more() ) { BSONElement e = i.next(); if ( str::equals( e.fieldName(), "_fts" ) ) { uassert( 17271, "expecting _fts:\"text\"", INDEX_NAME == e.valuestrsafe() ); addedFtsStuff = true; b.append( e ); } else if ( str::equals( e.fieldName(), "_ftsx" ) ) { uassert( 17272, "expecting _ftsx:1", e.numberInt() == 1 ); b.append( e ); } else if ( e.type() == String && INDEX_NAME == e.valuestr() ) { if ( !addedFtsStuff ) { _addFTSStuff( &b ); addedFtsStuff = true; } m[e.fieldName()] = 1; } else { uassert( 17273, "expected value 1 or -1 for non-text key in compound index", e.numberInt() == 1 || e.numberInt() == -1 ); b.append( e ); } } verify( addedFtsStuff ); } keyPattern = b.obj(); // Verify that index key is in the correct format: extraBefore fields, then text // fields, then extraAfter fields. { BSONObjIterator i( spec["key"].Obj() ); BSONElement e; // extraBefore fields do { verify( i.more() ); e = i.next(); } while ( INDEX_NAME != e.valuestrsafe() ); // text fields bool alreadyFixed = str::equals( e.fieldName(), "_fts" ); if ( alreadyFixed ) { uassert( 17288, "expected _ftsx after _fts", i.more() ); e = i.next(); uassert( 17274, "expected _ftsx after _fts", str::equals( e.fieldName(), "_ftsx" ) ); e = i.next(); } else { do { uassert( 17289, "text index with reserved fields _fts/ftsx not allowed", !str::equals( e.fieldName(), "_fts" ) && !str::equals( e.fieldName(), "_ftsx" ) ); e = i.next(); } while ( !e.eoo() && INDEX_NAME == e.valuestrsafe() ); } // extraAfterFields while ( !e.eoo() ) { uassert( 17290, "compound text index key suffix fields must have value 1", e.numberInt() == 1 && !str::equals( "_ftsx", e.fieldName() ) ); e = i.next(); } } } if ( spec["weights"].type() == Object ) { BSONObjIterator i( spec["weights"].Obj() ); while ( i.more() ) { BSONElement e = i.next(); uassert( 17283, "weight for text index needs numeric type", e.isNumber() ); m[e.fieldName()] = e.numberInt(); // Verify weight refers to a valid field. if ( str::equals( e.fieldName(), "$**" ) ) { continue; } FieldRef keyField( e.fieldName() ); uassert( 17294, "weight cannot be on an empty field", keyField.numParts() != 0 ); for ( size_t i = 0; i < keyField.numParts(); i++ ) { StringData part = keyField.getPart(i); uassert( 17291, "weight cannot have empty path component", !part.empty() ); uassert( 17292, "weight cannot have path component with $ prefix", !part.startsWith( "$" ) ); } } } else if ( spec["weights"].str() == WILDCARD ) { m[WILDCARD] = 1; } else if ( !spec["weights"].eoo() ) { uasserted( 17284, "text index option 'weights' must be an object" ); } BSONObj weights; { BSONObjBuilder b; for ( map<string,int>::iterator i = m.begin(); i != m.end(); ++i ) { uassert( 16674, "score for word too high", i->second > 0 && i->second < MAX_WORD_WEIGHT ); b.append( i->first, i->second ); } weights = b.obj(); } BSONElement default_language_elt = spec["default_language"]; string default_language( default_language_elt.str() ); if ( default_language_elt.eoo() ) { default_language = moduleDefaultLanguage; } else { uassert( 17263, "default_language needs a string type", default_language_elt.type() == String ); } uassert( 17264, "default_language is not valid", FTSLanguage::make( default_language, TEXT_INDEX_VERSION_2 ).getStatus().isOK() ); BSONElement language_override_elt = spec["language_override"]; string language_override( language_override_elt.str() ); if ( language_override_elt.eoo() ) { language_override = "language"; } else { uassert( 17136, "language_override is not valid", language_override_elt.type() == String && validateOverride( language_override ) ); } int version = -1; int textIndexVersion = TEXT_INDEX_VERSION_2; BSONObjBuilder b; BSONObjIterator i( spec ); while ( i.more() ) { BSONElement e = i.next(); if ( str::equals( e.fieldName(), "key" ) ) { b.append( "key", keyPattern ); } else if ( str::equals( e.fieldName(), "weights" ) ) { b.append( "weights", weights ); weights = BSONObj(); } else if ( str::equals( e.fieldName(), "default_language" ) ) { b.append( "default_language", default_language); default_language = ""; } else if ( str::equals( e.fieldName(), "language_override" ) ) { b.append( "language_override", language_override); language_override = ""; } else if ( str::equals( e.fieldName(), "v" ) ) { version = e.numberInt(); } else if ( str::equals( e.fieldName(), "textIndexVersion" ) ) { uassert( 17293, "text index option 'textIndexVersion' must be a number", e.isNumber() ); textIndexVersion = e.numberInt(); uassert( 16730, str::stream() << "bad textIndexVersion: " << textIndexVersion, textIndexVersion == TEXT_INDEX_VERSION_2 ); } else { b.append( e ); } } if ( !weights.isEmpty() ) b.append( "weights", weights ); if ( !default_language.empty() ) b.append( "default_language", default_language); if ( !language_override.empty() ) b.append( "language_override", language_override); if ( version >= 0 ) b.append( "v", version ); b.append( "textIndexVersion", textIndexVersion ); return b.obj(); }
// static bool QueryPlannerTestLib::solutionMatches(const BSONObj& testSoln, const QuerySolutionNode* trueSoln) { // // leaf nodes // if (STAGE_COLLSCAN == trueSoln->getType()) { const CollectionScanNode* csn = static_cast<const CollectionScanNode*>(trueSoln); BSONElement el = testSoln["cscan"]; if (el.eoo() || !el.isABSONObj()) { return false; } BSONObj csObj = el.Obj(); BSONElement dir = csObj["dir"]; if (dir.eoo() || !dir.isNumber()) { return false; } if (dir.numberInt() != csn->direction) { return false; } BSONElement filter = csObj["filter"]; if (filter.eoo()) { return true; } else if (filter.isNull()) { return NULL == csn->filter; } else if (!filter.isABSONObj()) { return false; } return filterMatches(filter.Obj(), trueSoln); } else if (STAGE_IXSCAN == trueSoln->getType()) { const IndexScanNode* ixn = static_cast<const IndexScanNode*>(trueSoln); BSONElement el = testSoln["ixscan"]; if (el.eoo() || !el.isABSONObj()) { return false; } BSONObj ixscanObj = el.Obj(); BSONElement pattern = ixscanObj["pattern"]; if (pattern.eoo() || !pattern.isABSONObj()) { return false; } if (pattern.Obj() != ixn->indexKeyPattern) { return false; } BSONElement bounds = ixscanObj["bounds"]; if (!bounds.eoo()) { if (!bounds.isABSONObj()) { return false; } else if (!boundsMatch(bounds.Obj(), ixn->bounds)) { return false; } } BSONElement dir = ixscanObj["dir"]; if (!dir.eoo() && NumberInt == dir.type()) { if (dir.numberInt() != ixn->direction) { return false; } } BSONElement filter = ixscanObj["filter"]; if (filter.eoo()) { return true; } else if (filter.isNull()) { return NULL == ixn->filter; } else if (!filter.isABSONObj()) { return false; } return filterMatches(filter.Obj(), trueSoln); } else if (STAGE_GEO_2D == trueSoln->getType()) { const Geo2DNode* node = static_cast<const Geo2DNode*>(trueSoln); BSONElement el = testSoln["geo2d"]; if (el.eoo() || !el.isABSONObj()) { return false; } BSONObj geoObj = el.Obj(); return geoObj == node->indexKeyPattern; } else if (STAGE_GEO_NEAR_2D == trueSoln->getType()) { const GeoNear2DNode* node = static_cast<const GeoNear2DNode*>(trueSoln); BSONElement el = testSoln["geoNear2d"]; if (el.eoo() || !el.isABSONObj()) { return false; } BSONObj geoObj = el.Obj(); return geoObj == node->indexKeyPattern; } else if (STAGE_GEO_NEAR_2DSPHERE == trueSoln->getType()) { const GeoNear2DSphereNode* node = static_cast<const GeoNear2DSphereNode*>(trueSoln); BSONElement el = testSoln["geoNear2dsphere"]; if (el.eoo() || !el.isABSONObj()) { return false; } BSONObj geoObj = el.Obj(); return geoObj == node->indexKeyPattern; } else if (STAGE_TEXT == trueSoln->getType()) { // {text: {search: "somestr", language: "something", filter: {blah: 1}}} const TextNode* node = static_cast<const TextNode*>(trueSoln); BSONElement el = testSoln["text"]; if (el.eoo() || !el.isABSONObj()) { return false; } BSONObj textObj = el.Obj(); BSONElement searchElt = textObj["search"]; if (!searchElt.eoo()) { if (searchElt.String() != node->query) { return false; } } BSONElement languageElt = textObj["language"]; if (!languageElt.eoo()) { if (languageElt.String() != node->language) { return false; } } BSONElement indexPrefix = textObj["prefix"]; if (!indexPrefix.eoo()) { if (!indexPrefix.isABSONObj()) { return false; } if (0 != indexPrefix.Obj().woCompare(node->indexPrefix)) { return false; } } BSONElement filter = textObj["filter"]; if (!filter.eoo()) { if (filter.isNull()) { if (NULL != node->filter) { return false; } } else if (!filter.isABSONObj()) { return false; } else if (!filterMatches(filter.Obj(), trueSoln)) { return false; } } return true; } // // internal nodes // if (STAGE_FETCH == trueSoln->getType()) { const FetchNode* fn = static_cast<const FetchNode*>(trueSoln); BSONElement el = testSoln["fetch"]; if (el.eoo() || !el.isABSONObj()) { return false; } BSONObj fetchObj = el.Obj(); BSONElement filter = fetchObj["filter"]; if (!filter.eoo()) { if (filter.isNull()) { if (NULL != fn->filter) { return false; } } else if (!filter.isABSONObj()) { return false; } else if (!filterMatches(filter.Obj(), trueSoln)) { return false; } } BSONElement child = fetchObj["node"]; if (child.eoo() || !child.isABSONObj()) { return false; } return solutionMatches(child.Obj(), fn->children[0]); } else if (STAGE_OR == trueSoln->getType()) { const OrNode * orn = static_cast<const OrNode*>(trueSoln); BSONElement el = testSoln["or"]; if (el.eoo() || !el.isABSONObj()) { return false; } BSONObj orObj = el.Obj(); return childrenMatch(orObj, orn); } else if (STAGE_AND_HASH == trueSoln->getType()) { const AndHashNode* ahn = static_cast<const AndHashNode*>(trueSoln); BSONElement el = testSoln["andHash"]; if (el.eoo() || !el.isABSONObj()) { return false; } BSONObj andHashObj = el.Obj(); // XXX: andHashObj can have filter return childrenMatch(andHashObj, ahn); } else if (STAGE_AND_SORTED == trueSoln->getType()) { const AndSortedNode* asn = static_cast<const AndSortedNode*>(trueSoln); BSONElement el = testSoln["andSorted"]; if (el.eoo() || !el.isABSONObj()) { return false; } BSONObj andSortedObj = el.Obj(); // XXX: anSortedObj can have filter too return childrenMatch(andSortedObj, asn); } else if (STAGE_PROJECTION == trueSoln->getType()) { const ProjectionNode* pn = static_cast<const ProjectionNode*>(trueSoln); BSONElement el = testSoln["proj"]; if (el.eoo() || !el.isABSONObj()) { return false; } BSONObj projObj = el.Obj(); BSONElement spec = projObj["spec"]; if (spec.eoo() || !spec.isABSONObj()) { return false; } BSONElement child = projObj["node"]; if (child.eoo() || !child.isABSONObj()) { return false; } return (spec.Obj() == pn->projection) && solutionMatches(child.Obj(), pn->children[0]); } else if (STAGE_SORT == trueSoln->getType()) { const SortNode* sn = static_cast<const SortNode*>(trueSoln); BSONElement el = testSoln["sort"]; if (el.eoo() || !el.isABSONObj()) { return false; } BSONObj sortObj = el.Obj(); BSONElement patternEl = sortObj["pattern"]; if (patternEl.eoo() || !patternEl.isABSONObj()) { return false; } BSONElement limitEl = sortObj["limit"]; if (!limitEl.isNumber()) { return false; } BSONElement child = sortObj["node"]; if (child.eoo() || !child.isABSONObj()) { return false; } return (patternEl.Obj() == sn->pattern) && (limitEl.numberInt() == sn->limit) && solutionMatches(child.Obj(), sn->children[0]); } else if (STAGE_SORT_MERGE == trueSoln->getType()) { const MergeSortNode* msn = static_cast<const MergeSortNode*>(trueSoln); BSONElement el = testSoln["mergeSort"]; if (el.eoo() || !el.isABSONObj()) { return false; } BSONObj mergeSortObj = el.Obj(); return childrenMatch(mergeSortObj, msn); } else if (STAGE_SKIP == trueSoln->getType()) { const SkipNode* sn = static_cast<const SkipNode*>(trueSoln); BSONElement el = testSoln["skip"]; if (el.eoo() || !el.isABSONObj()) { return false; } BSONObj sortObj = el.Obj(); BSONElement skipEl = sortObj["n"]; if (!skipEl.isNumber()) { return false; } BSONElement child = sortObj["node"]; if (child.eoo() || !child.isABSONObj()) { return false; } return (skipEl.numberInt() == sn->skip) && solutionMatches(child.Obj(), sn->children[0]); } else if (STAGE_LIMIT == trueSoln->getType()) { const LimitNode* ln = static_cast<const LimitNode*>(trueSoln); BSONElement el = testSoln["limit"]; if (el.eoo() || !el.isABSONObj()) { return false; } BSONObj sortObj = el.Obj(); BSONElement limitEl = sortObj["n"]; if (!limitEl.isNumber()) { return false; } BSONElement child = sortObj["node"]; if (child.eoo() || !child.isABSONObj()) { return false; } return (limitEl.numberInt() == ln->limit) && solutionMatches(child.Obj(), ln->children[0]); } else if (STAGE_KEEP_MUTATIONS == trueSoln->getType()) { const KeepMutationsNode* kn = static_cast<const KeepMutationsNode*>(trueSoln); BSONElement el = testSoln["keep"]; if (el.eoo() || !el.isABSONObj()) { return false; } BSONObj keepObj = el.Obj(); // Doesn't have any parameters really. BSONElement child = keepObj["node"]; if (child.eoo() || !child.isABSONObj()) { return false; } return solutionMatches(child.Obj(), kn->children[0]); } return false; }
void lua_push_value(lua_State *L, const BSONElement &elem) { int type = elem.type(); switch(type) { case mongo::Undefined: lua_pushnil(L); break; case mongo::NumberInt: lua_pushinteger(L, elem.numberInt()); break; case mongo::NumberLong: case mongo::NumberDouble: lua_pushnumber(L, elem.number()); break; case mongo::Bool: lua_pushboolean(L, elem.boolean()); break; case mongo::String: lua_pushstring(L, elem.valuestr()); break; case mongo::Array: bson_to_array(L, elem.embeddedObject()); break; case mongo::Object: bson_to_table(L, elem.embeddedObject()); break; case mongo::Date: push_bsontype_table(L, mongo::Date); lua_pushnumber(L, elem.date()); lua_rawseti(L, -2, 1); break; case mongo::Timestamp: push_bsontype_table(L, mongo::Date); lua_pushnumber(L, elem.timestampTime()); lua_rawseti(L, -2, 1); break; case mongo::Symbol: push_bsontype_table(L, mongo::Symbol); lua_pushstring(L, elem.valuestr()); lua_rawseti(L, -2, 1); break; case mongo::RegEx: push_bsontype_table(L, mongo::RegEx); lua_pushstring(L, elem.regex()); lua_rawseti(L, -2, 1); lua_pushstring(L, elem.regexFlags()); lua_rawseti(L, -2, 2); break; case mongo::jstOID: push_bsontype_table(L, mongo::jstOID); lua_pushstring(L, elem.__oid().str().c_str()); lua_rawseti(L, -2, 1); break; case mongo::jstNULL: push_bsontype_table(L, mongo::jstNULL); break; case mongo::EOO: break; default: luaL_error(L, LUAMONGO_UNSUPPORTED_BSON_TYPE, bson_name(type)); } }
void run() { Client::WriteContext ctx(&_txn, ns()); Database* db = ctx.ctx().db(); Collection* coll = db->getCollection(&_txn, ns()); if (!coll) { WriteUnitOfWork wuow(&_txn); coll = db->createCollection(&_txn, ns()); wuow.commit(); } WorkingSet ws; // Sort by foo:1 MergeSortStageParams msparams; msparams.pattern = BSON("foo" << 1); auto_ptr<MergeSortStage> ms(new MergeSortStage(msparams, &ws, coll)); IndexScanParams params; params.bounds.isSimpleRange = true; params.bounds.startKey = objWithMinKey(1); params.bounds.endKey = objWithMaxKey(1); params.bounds.endKeyInclusive = true; params.direction = 1; // Index 'a'+i has foo equal to 'i'. int numIndices = 20; for (int i = 0; i < numIndices; ++i) { // 'a', 'b', ... string index(1, 'a' + i); insert(BSON(index << 1 << "foo" << i)); BSONObj indexSpec = BSON(index << 1 << "foo" << 1); addIndex(indexSpec); params.descriptor = getIndex(indexSpec, coll); ms->addChild(new IndexScan(&_txn, params, &ws, NULL)); } set<RecordId> locs; getLocs(&locs, coll); set<RecordId>::iterator it = locs.begin(); // Get 10 results. Should be getting results in order of 'locs'. int count = 0; while (!ms->isEOF() && count < 10) { WorkingSetID id = WorkingSet::INVALID_ID; PlanStage::StageState status = ms->work(&id); if (PlanStage::ADVANCED != status) { continue; } WorkingSetMember* member = ws.get(id); ASSERT_EQUALS(member->loc, *it); BSONElement elt; string index(1, 'a' + count); ASSERT(member->getFieldDotted(index, &elt)); ASSERT_EQUALS(1, elt.numberInt()); ASSERT(member->getFieldDotted("foo", &elt)); ASSERT_EQUALS(count, elt.numberInt()); ++count; ++it; } // Invalidate locs[11]. Should force a fetch. We don't get it back. ms->saveState(); ms->invalidate(&_txn, *it, INVALIDATION_DELETION); ms->restoreState(&_txn); // Make sure locs[11] was fetched for us. { // TODO: If we have "return upon invalidation" ever triggerable, do the following test. /* WorkingSetID id = WorkingSet::INVALID_ID; PlanStage::StageState status; do { status = ms->work(&id); } while (PlanStage::ADVANCED != status); WorkingSetMember* member = ws.get(id); ASSERT(!member->hasLoc()); ASSERT(member->hasObj()); string index(1, 'a' + count); BSONElement elt; ASSERT_TRUE(member->getFieldDotted(index, &elt)); ASSERT_EQUALS(1, elt.numberInt()); ASSERT(member->getFieldDotted("foo", &elt)); ASSERT_EQUALS(count, elt.numberInt()); */ ++it; ++count; } // And get the rest. while (!ms->isEOF()) { WorkingSetID id = WorkingSet::INVALID_ID; PlanStage::StageState status = ms->work(&id); if (PlanStage::ADVANCED != status) { continue; } WorkingSetMember* member = ws.get(id); ASSERT_EQUALS(member->loc, *it); BSONElement elt; string index(1, 'a' + count); ASSERT_TRUE(member->getFieldDotted(index, &elt)); ASSERT_EQUALS(1, elt.numberInt()); ASSERT(member->getFieldDotted("foo", &elt)); ASSERT_EQUALS(count, elt.numberInt()); ++count; ++it; } }
bool run(OperationContext* txn, const string& dbname, BSONObj& jsobj, int, string& errmsg, BSONObjBuilder& result) { const std::string ns = parseNs(dbname, jsobj); md5digest d; md5_state_t st; md5_init(&st); int n = 0; bool partialOk = jsobj["partialOk"].trueValue(); if (partialOk) { // WARNING: This code depends on the binary layout of md5_state. It will not be // compatible with different md5 libraries or work correctly in an environment with // mongod's of different endians. It is ok for mongos to be a different endian since // it just passes the buffer through to another mongod. BSONElement stateElem = jsobj["md5state"]; if (!stateElem.eoo()) { int len; const char* data = stateElem.binDataClean(len); massert(16247, "md5 state not correct size", len == sizeof(st)); memcpy(&st, data, sizeof(st)); } n = jsobj["startAt"].numberInt(); } BSONObj query = BSON("files_id" << jsobj["filemd5"] << "n" << GTE << n); BSONObj sort = BSON("files_id" << 1 << "n" << 1); MONGO_WRITE_CONFLICT_RETRY_LOOP_BEGIN { CanonicalQuery* cq; if (!CanonicalQuery::canonicalize(ns, query, sort, BSONObj(), &cq).isOK()) { uasserted(17240, "Can't canonicalize query " + query.toString()); return 0; } // Check shard version at startup. // This will throw before we've done any work if shard version is outdated // We drop and re-acquire these locks every document because md5'ing is expensive unique_ptr<AutoGetCollectionForRead> ctx(new AutoGetCollectionForRead(txn, ns)); Collection* coll = ctx->getCollection(); PlanExecutor* rawExec; if (!getExecutor(txn, coll, cq, PlanExecutor::YIELD_MANUAL, &rawExec, QueryPlannerParams::NO_TABLE_SCAN).isOK()) { uasserted(17241, "Can't get executor for query " + query.toString()); return 0; } unique_ptr<PlanExecutor> exec(rawExec); // Process notifications when the lock is released/reacquired in the loop below exec->registerExec(); BSONObj obj; PlanExecutor::ExecState state; while (PlanExecutor::ADVANCED == (state = exec->getNext(&obj, NULL))) { BSONElement ne = obj["n"]; verify(ne.isNumber()); int myn = ne.numberInt(); if (n != myn) { if (partialOk) { break; // skipped chunk is probably on another shard } log() << "should have chunk: " << n << " have:" << myn << endl; dumpChunks(txn, ns, query, sort); uassert(10040, "chunks out of order", n == myn); } // make a copy of obj since we access data in it while yielding locks BSONObj owned = obj.getOwned(); exec->saveState(); // UNLOCKED ctx.reset(); int len; const char* data = owned["data"].binDataClean(len); // This is potentially an expensive operation, so do it out of the lock md5_append(&st, (const md5_byte_t*)(data), len); n++; try { // RELOCKED ctx.reset(new AutoGetCollectionForRead(txn, ns)); } catch (const SendStaleConfigException& ex) { LOG(1) << "chunk metadata changed during filemd5, will retarget and continue"; break; } // Have the lock again. See if we were killed. if (!exec->restoreState(txn)) { if (!partialOk) { uasserted(13281, "File deleted during filemd5 command"); } } } if (partialOk) result.appendBinData("md5state", sizeof(st), BinDataGeneral, &st); // This must be *after* the capture of md5state since it mutates st md5_finish(&st, d); result.append("numChunks", n); result.append("md5", digestToString(d)); } MONGO_WRITE_CONFLICT_RETRY_LOOP_END(txn, "filemd5", dbname); return true; }