boost::optional<Document> DocumentSourceRedact::redactObject() { const Value expressionResult = _expression->evaluate(_variables.get()); if (expressionResult == keepVal) { return _variables->getDocument(_currentId); } else if (expressionResult == pruneVal) { return boost::optional<Document>(); } else if (expressionResult == descendVal) { const Document in = _variables->getDocument(_currentId); MutableDocument out; out.copyMetaDataFrom(in); FieldIterator fields(in); while (fields.more()) { const Document::FieldPair field(fields.next()); // This changes CURRENT so don't read from _variables after this const Value val = redactValue(field.second); if (!val.missing()) { out.addField(field.first, val); } } return out.freeze(); } else { uasserted(17053, str::stream() << "$redact's expression should not return anything " << "aside from the variables $$KEEP, $$DESCEND, and " << "$$PRUNE, but returned " << expressionResult.toString()); } }
boost::optional<Document> DocumentSourceRedact::redactObject(const Variables& in) { const Value expressionResult = _expression->evaluate(in); if (expressionResult == keepVal) { return in.current.getDocument(); } else if (expressionResult == pruneVal) { return boost::optional<Document>(); } else if (expressionResult == descendVal) { MutableDocument out; FieldIterator fields(in.current.getDocument()); while (fields.more()) { const Document::FieldPair field(fields.next()); const Value val = redactValue(in, field.second); if (!val.missing()) { out.addField(field.first, val); } } return out.freeze(); } else { uasserted(17053, str::stream() << "$redact's expression should not return anything " << "aside from the variables $$KEEP, $$DESCEND, and " << "$$PRUNE, but returned " << expressionResult.toString()); } }
Document DocumentSourceSort::sortKeyPattern(SortKeySerialization serializationMode) const { MutableDocument keyObj; const size_t n = _sortPattern.size(); for (size_t i = 0; i < n; ++i) { if (_sortPattern[i].fieldPath) { // Append a named integer based on whether the sort is ascending/descending. keyObj.setField(_sortPattern[i].fieldPath->fullPath(), Value(_sortPattern[i].isAscending ? 1 : -1)); } else { // Sorting by an expression, use a made up field name. auto computedFieldName = string(str::stream() << "$computed" << i); switch (serializationMode) { case SortKeySerialization::kForExplain: case SortKeySerialization::kForPipelineSerialization: { const bool isExplain = (serializationMode == SortKeySerialization::kForExplain); keyObj[computedFieldName] = _sortPattern[i].expression->serialize(isExplain); break; } case SortKeySerialization::kForSortKeyMerging: { // We need to be able to tell which direction the sort is. Expression sorts are // always descending. keyObj[computedFieldName] = Value(-1); break; } } } } return keyObj.freeze(); }
DocumentSource::GetNextResult DocumentSourceFacet::getNext() { pExpCtx->checkForInterrupt(); if (_done) { return GetNextResult::makeEOF(); } vector<vector<Value>> results(_facets.size()); bool allPipelinesEOF = false; while (!allPipelinesEOF) { allPipelinesEOF = true; // Set this to false if any pipeline isn't EOF. for (size_t facetId = 0; facetId < _facets.size(); ++facetId) { const auto& pipeline = _facets[facetId].pipeline; auto next = pipeline->getSources().back()->getNext(); for (; next.isAdvanced(); next = pipeline->getSources().back()->getNext()) { results[facetId].emplace_back(next.releaseDocument()); } allPipelinesEOF = allPipelinesEOF && next.isEOF(); } } MutableDocument resultDoc; for (size_t facetId = 0; facetId < _facets.size(); ++facetId) { resultDoc[_facets[facetId].name] = Value(std::move(results[facetId])); } _done = true; // We will only ever produce one result. return resultDoc.freeze(); }
bool applyProjectionToOneField(StringData field) const { MutableDocument doc; const FieldPath f{field}; doc.setNestedField(f, Value(1.0)); const Document transformedDoc = applyTransformation(doc.freeze()); return !transformedDoc.getNestedField(f).missing(); }
Value DocumentSourceGroup::serialize(bool explain) const { MutableDocument insides; // add the _id if (_idFieldNames.empty()) { invariant(_idExpressions.size() == 1); insides["_id"] = _idExpressions[0]->serialize(explain); } else { // decomposed document case invariant(_idExpressions.size() == _idFieldNames.size()); MutableDocument md; for (size_t i = 0; i < _idExpressions.size(); i++) { md[_idFieldNames[i]] = _idExpressions[i]->serialize(explain); } insides["_id"] = md.freezeToValue(); } // add the remaining fields const size_t n = vFieldName.size(); for(size_t i = 0; i < n; ++i) { intrusive_ptr<Accumulator> accum = vpAccumulatorFactory[i](); insides[vFieldName[i]] = Value(DOC(accum->getOpName() << vpExpression[i]->serialize(explain))); } if (_doingMerge) { // This makes the output unparsable (with error) on pre 2.6 shards, but it will never // be sent to old shards when this flag is true since they can't do a merge anyway. insides["$doingMerge"] = Value(true); } return Value(DOC(getSourceName() << insides.freeze())); }
void ProjectionNode::serialize(boost::optional<ExplainOptions::Verbosity> explain, MutableDocument* output) const { // Determine the boolean value for projected fields in the explain output. const bool projVal = !applyLeafProjectionToValue(Value(true)).missing(); // Always put "_id" first if it was projected (implicitly or explicitly). if (_projectedFields.find("_id") != _projectedFields.end()) { output->addField("_id", Value(projVal)); } for (auto&& projectedField : _projectedFields) { if (projectedField != "_id") { output->addField(projectedField, Value(projVal)); } } for (auto&& field : _orderToProcessAdditionsAndChildren) { auto childIt = _children.find(field); if (childIt != _children.end()) { MutableDocument subDoc; childIt->second->serialize(explain, &subDoc); output->addField(field, subDoc.freezeToValue()); } else { invariant(_policies.computedFieldsPolicy == ComputedFieldsPolicy::kAllowComputedFields); auto expressionIt = _expressions.find(field); invariant(expressionIt != _expressions.end()); output->addField(field, expressionIt->second->serialize(static_cast<bool>(explain))); } } }
Value DocumentSourceFacet::serialize(bool explain) const { MutableDocument serialized; for (auto&& facet : _facets) { serialized[facet.name] = Value(explain ? facet.pipeline->writeExplainOps() : facet.pipeline->serialize()); } return Value(Document{{"$facet", serialized.freezeToValue()}}); }
MutableValue MutableDocument::getNestedFieldHelper(const FieldPath& dottedField, size_t level) { if (level == dottedField.getPathLength()-1) { return getField(dottedField.getFieldName(level)); } else { MutableDocument nested (getField(dottedField.getFieldName(level))); return nested.getNestedFieldHelper(dottedField, level+1); } }
MutableValue MutableDocument::getNestedFieldHelper(const vector<Position>& positions, size_t level) { if (level == positions.size()-1) { return getField(positions[level]); } else { MutableDocument nested (getField(positions[level])); return nested.getNestedFieldHelper(positions, level+1); } }
Document ExclusionNode::serialize() const { MutableDocument output; for (auto&& excludedField : _excludedFields) { output.addField(excludedField, Value(false)); } for (auto&& childPair : _children) { output.addField(childPair.first, Value(childPair.second->serialize())); } return output.freeze(); }
Value DocumentSourceCursor::serialize(boost::optional<ExplainOptions::Verbosity> verbosity) const { // We never parse a DocumentSourceCursor, so we only serialize for explain. if (!verbosity) return Value(); invariant(_exec); uassert(50660, "Mismatch between verbosity passed to serialize() and expression context verbosity", verbosity == pExpCtx->explain); MutableDocument out; out["query"] = Value(_query); if (!_sort.isEmpty()) out["sort"] = Value(_sort); if (_limit) out["limit"] = Value(_limit->getLimit()); if (!_projection.isEmpty()) out["fields"] = Value(_projection); BSONObjBuilder explainStatsBuilder; { auto opCtx = pExpCtx->opCtx; auto lockMode = getLockModeForQuery(opCtx, _exec->nss()); AutoGetDb dbLock(opCtx, _exec->nss().db(), lockMode); Lock::CollectionLock collLock(opCtx, _exec->nss(), lockMode); auto collection = dbLock.getDb() ? dbLock.getDb()->getCollection(opCtx, _exec->nss()) : nullptr; Explain::explainStages(_exec.get(), collection, verbosity.get(), _execStatus, _winningPlanTrialStats.get(), BSONObj(), &explainStatsBuilder); } BSONObj explainStats = explainStatsBuilder.obj(); invariant(explainStats["queryPlanner"]); out["queryPlanner"] = Value(explainStats["queryPlanner"]); if (verbosity.get() >= ExplainOptions::Verbosity::kExecStats) { invariant(explainStats["executionStats"]); out["executionStats"] = Value(explainStats["executionStats"]); } return Value(DOC(getSourceName() << out.freezeToValue())); }
Value DocumentSourceCursor::serialize(bool explain) const { // we never parse a documentSourceCursor, so we only serialize for explain if (!explain) return Value(); Status explainStatus(ErrorCodes::InternalError, ""); scoped_ptr<TypeExplain> plan; { Lock::DBRead lk(_ns); Client::Context ctx(_ns, storageGlobalParams.dbpath, /*doVersion=*/false); ClientCursorPin pin(_cursorId); ClientCursor* cursor = pin.c(); uassert(17135, "Cursor deleted. Was the collection or database dropped?", cursor); Runner* runner = cursor->getRunner(); runner->restoreState(); TypeExplain* explainRaw; explainStatus = runner->getExplainPlan(&explainRaw); if (explainStatus.isOK()) plan.reset(explainRaw); runner->saveState(); } MutableDocument out; out["query"] = Value(_query); if (!_sort.isEmpty()) out["sort"] = Value(_sort); if (_limit) out["limit"] = Value(_limit->getLimit()); if (!_projection.isEmpty()) out["fields"] = Value(_projection); if (explainStatus.isOK()) { out["plan"] = Value(extractInfo(plan)); } else { out["planError"] = Value(explainStatus.toString()); } return out.freezeToValue(); }
Value AccumulatorAvg::getValue() const { if (!pCtx->getInShard()) { double avg = 0; if (count) avg = doubleTotal / static_cast<double>(count); return Value::createDouble(avg); } MutableDocument out; out.addField(subTotalName, Value::createDouble(doubleTotal)); out.addField(countName, Value::createLong(count)); return Value::createDocument(out.freeze()); }
Value AccumulatorAvg::getValue(bool toBeMerged) const { if (!toBeMerged) { double avg = 0; if (count) avg = doubleTotal / static_cast<double>(count); return Value(avg); } else { MutableDocument out; out.addField(subTotalName, Value(doubleTotal)); out.addField(countName, Value(count)); return Value(out.freeze()); } }
Document DocumentSourceSort::serializeSortKey() const { MutableDocument keyObj; // add the key fields const size_t n = vSortKey.size(); for(size_t i = 0; i < n; ++i) { // get the field name out of each ExpressionFieldPath const FieldPath& withVariable = vSortKey[i]->getFieldPath(); verify(withVariable.getPathLength() > 1); verify(withVariable.getFieldName(0) == "ROOT"); const string fieldPath = withVariable.tail().getPath(false); // append a named integer based on the sort order keyObj.setField(fieldPath, Value(vAscending[i] ? 1 : -1)); } return keyObj.freeze(); }
void DocumentSourceGroup::sourceToBson(BSONObjBuilder* pBuilder, bool explain) const { MutableDocument insides; /* add the _id */ insides["_id"] = pIdExpression->serialize(); /* add the remaining fields */ const size_t n = vFieldName.size(); for(size_t i = 0; i < n; ++i) { intrusive_ptr<Accumulator> accum = vpAccumulatorFactory[i](); insides[vFieldName[i]] = Value( DOC(accum->getOpName() << vpExpression[i]->serialize())); } *pBuilder << groupName << insides.freeze(); }
Value DocumentSourceCursor::serialize(bool explain) const { // we never parse a documentSourceCursor, so we only serialize for explain if (!explain) return Value(); // Get planner-level explain info from the underlying PlanExecutor. BSONObjBuilder explainBuilder; Status explainStatus(ErrorCodes::InternalError, ""); { const NamespaceString nss(_ns); AutoGetCollectionForRead autoColl(pExpCtx->opCtx, nss); massert(17392, "No _exec. Were we disposed before explained?", _exec); _exec->restoreState(pExpCtx->opCtx); explainStatus = Explain::explainStages(pExpCtx->opCtx, _exec.get(), ExplainCommon::QUERY_PLANNER, &explainBuilder); _exec->saveState(); } MutableDocument out; out["query"] = Value(_query); if (!_sort.isEmpty()) out["sort"] = Value(_sort); if (_limit) out["limit"] = Value(_limit->getLimit()); if (!_projection.isEmpty()) out["fields"] = Value(_projection); // Add explain results from the query system into the agg explain output. if (explainStatus.isOK()) { BSONObj explainObj = explainBuilder.obj(); invariant(explainObj.hasField("queryPlanner")); out["queryPlanner"] = Value(explainObj["queryPlanner"]); } else { out["planError"] = Value(explainStatus.toString()); } return Value(DOC(getSourceName() << out.freezeToValue())); }
// Taken as a whole, these three functions should produce the same output document given the // same deps set as mongo::Projection::transform would on the output of depsToProjection. The // only exceptions are that we correctly handle the case where no fields are needed and we don't // need to work around the above mentioned bug with subfields of _id (SERVER-7502). This is // tested in a DEV block in DocumentSourceCursor::findNext(). // // Output from this function is input for the next two // // ParsedDeps is a simple recursive look-up table. For each field in a ParsedDeps: // If the value has type==Bool, the whole field is needed // If the value has type==Object, the fields in the subobject are needed // All other fields should be missing which means not needed DocumentSource::ParsedDeps DocumentSource::parseDeps(const set<string>& deps) { MutableDocument md; string last; for (set<string>::const_iterator it(deps.begin()), end(deps.end()); it!=end; ++it) { if (!last.empty() && str::startsWith(*it, last)) { // we are including a parent of *it so we don't need to include this field // explicitly. In fact, if we included this field, the parent wouldn't be fully // included. This logic relies on on set iterators going in lexicographic order so // that a string is always directly before of all fields it prefixes. continue; } last = *it + '.'; md.setNestedField(*it, Value(true)); } return md.freeze(); }
Value DocumentSourceCursor::serialize(bool explain) const { // we never parse a documentSourceCursor, so we only serialize for explain if (!explain) return Value(); Status explainStatus(ErrorCodes::InternalError, ""); scoped_ptr<TypeExplain> plan; { Lock::DBRead lk(pExpCtx->opCtx->lockState(), _ns); Client::Context ctx(pExpCtx->opCtx, _ns, /*doVersion=*/ false); massert(17392, "No _runner. Were we disposed before explained?", _runner); _runner->restoreState(pExpCtx->opCtx); TypeExplain* explainRaw; explainStatus = _runner->getInfo(&explainRaw, NULL); if (explainStatus.isOK()) plan.reset(explainRaw); _runner->saveState(); } MutableDocument out; out["query"] = Value(_query); if (!_sort.isEmpty()) out["sort"] = Value(_sort); if (_limit) out["limit"] = Value(_limit->getLimit()); if (!_projection.isEmpty()) out["fields"] = Value(_projection); if (explainStatus.isOK()) { out["plan"] = Value(extractInfo(plan)); } else { out["planError"] = Value(explainStatus.toString()); } return Value(DOC(getSourceName() << out.freezeToValue())); }
void run() { MutableDocument md; md.addField("foo", Value(1)); ASSERT_EQUALS(1U, md.peek().size()); ASSERT_EQUALS(1, md.peek()["foo"].getInt()); md.addField("bar", Value(99)); ASSERT_EQUALS(2U, md.peek().size()); ASSERT_EQUALS(99, md.peek()["bar"].getInt()); // No assertion is triggered by a duplicate field name. md.addField("a", Value(5)); Document final = md.freeze(); ASSERT_EQUALS(3U, final.size()); assertRoundTrips(final); }
boost::optional<Document> DocumentSourceProject::getNext() { pExpCtx->checkForInterrupt(); boost::optional<Document> input = pSource->getNext(); if (!input) return boost::none; /* create the result document */ const size_t sizeHint = pEO->getSizeHint(); MutableDocument out (sizeHint); out.copyMetaDataFrom(*input); /* Use the ExpressionObject to create the base result. If we're excluding fields at the top level, leave out the _id if it is found, because we took care of it above. */ _variables->setRoot(*input); pEO->addToDocument(out, *input, _variables.get()); _variables->clearRoot(); #if defined(_DEBUG) if (!_simpleProjection.getSpec().isEmpty()) { // Make sure we return the same results as Projection class BSONObj inputBson = input->toBson(); BSONObj outputBson = out.peek().toBson(); BSONObj projected = _simpleProjection.transform(inputBson); if (projected != outputBson) { log() << "$project applied incorrectly: " << getRaw() << endl; log() << "input: " << inputBson << endl; log() << "out: " << outputBson << endl; log() << "projected: " << projected << endl; verify(false); // exits in _DEBUG builds } } #endif return out.freeze(); }
Document Document::fromBsonWithMetaData(const BSONObj& bson) { MutableDocument md; BSONObjIterator it(bson); while(it.more()) { BSONElement elem(it.next()); if (elem.fieldName()[0] == '$') { if (elem.fieldNameStringData() == metaFieldTextScore) { md.setTextScore(elem.Double()); continue; } } // Note: this will not parse out metadata in embedded documents. md.addField(elem.fieldNameStringData(), Value(elem)); } return md.freeze(); }
boost::optional<Document> DocumentSourceGeoNear::getNext() { pExpCtx->checkForInterrupt(); if (!resultsIterator) runCommand(); if (!resultsIterator->more()) return boost::none; // each result from the geoNear command is wrapped in a wrapper object with "obj", // "dis" and maybe "loc" fields. We want to take the object from "obj" and inject the // other fields into it. Document result (resultsIterator->next().embeddedObject()); MutableDocument output (result["obj"].getDocument()); output.setNestedField(*distanceField, result["dis"]); if (includeLocs) output.setNestedField(*includeLocs, result["loc"]); return output.freeze(); }
Document DocumentSourceProject::getCurrent() { Document pInDocument(pSource->getCurrent()); /* create the result document */ const size_t sizeHint = pEO->getSizeHint(); MutableDocument out (sizeHint); /* Use the ExpressionObject to create the base result. If we're excluding fields at the top level, leave out the _id if it is found, because we took care of it above. */ pEO->addToDocument(out, pInDocument, /*root=*/pInDocument); #if defined(_DEBUG) if (!_simpleProjection.getSpec().isEmpty()) { // Make sure we return the same results as Projection class BSONObjBuilder inputBuilder; pSource->getCurrent()->toBson(&inputBuilder); BSONObj input = inputBuilder.done(); BSONObjBuilder outputBuilder; out.peek().toBson(&outputBuilder); BSONObj output = outputBuilder.done(); BSONObj projected = _simpleProjection.transform(input); if (projected != output) { log() << "$project applied incorrectly: " << getRaw() << endl; log() << "input: " << input << endl; log() << "out: " << output << endl; log() << "projected: " << projected << endl; verify(false); // exits in _DEBUG builds } } #endif return out.freeze(); }
Document DocumentSourceSort::serializeSortKey(bool explain) const { MutableDocument keyObj; // add the key fields const size_t n = vSortKey.size(); for (size_t i = 0; i < n; ++i) { if (ExpressionFieldPath* efp = dynamic_cast<ExpressionFieldPath*>(vSortKey[i].get())) { // ExpressionFieldPath gets special syntax that includes direction const FieldPath& withVariable = efp->getFieldPath(); verify(withVariable.getPathLength() > 1); verify(withVariable.getFieldName(0) == "ROOT"); const string fieldPath = withVariable.tail().fullPath(); // append a named integer based on the sort order keyObj.setField(fieldPath, Value(vAscending[i] ? 1 : -1)); } else { // other expressions use a made-up field name keyObj[string(str::stream() << "$computed" << i)] = vSortKey[i]->serialize(explain); } } return keyObj.freeze(); }
Value DocumentSourceBucketAuto::serialize(bool explain) const { MutableDocument insides; insides["groupBy"] = _groupByExpression->serialize(explain); insides["buckets"] = Value(_nBuckets); if (_granularityRounder) { insides["granularity"] = Value(_granularityRounder->getName()); } const size_t nOutputFields = _fieldNames.size(); MutableDocument outputSpec(nOutputFields); for (size_t i = 0; i < nOutputFields; i++) { intrusive_ptr<Accumulator> accum = _accumulatorFactories[i](); outputSpec[_fieldNames[i]] = Value{Document{{accum->getOpName(), _expressions[i]->serialize(explain)}}}; } insides["output"] = outputSpec.freezeToValue(); return Value{Document{{getSourceName(), insides.freezeToValue()}}}; }
DocumentSource::GetNextResult DocumentSourceIndexStats::getNext() { pExpCtx->checkForInterrupt(); if (_indexStatsMap.empty()) { _indexStatsMap = _mongod->getIndexStats(pExpCtx->opCtx, pExpCtx->ns); _indexStatsIter = _indexStatsMap.begin(); } if (_indexStatsIter != _indexStatsMap.end()) { const auto& stats = _indexStatsIter->second; MutableDocument doc; doc["name"] = Value(_indexStatsIter->first); doc["key"] = Value(stats.indexKey); doc["host"] = Value(_processName); doc["accesses"]["ops"] = Value(stats.accesses.loadRelaxed()); doc["accesses"]["since"] = Value(stats.trackerStartTime); ++_indexStatsIter; return doc.freeze(); } return GetNextResult::makeEOF(); }
Value DocumentSourceCursor::serialize(boost::optional<ExplainOptions::Verbosity> explain) const { // We never parse a DocumentSourceCursor, so we only serialize for explain. if (!explain) return Value(); // Get planner-level explain info from the underlying PlanExecutor. invariant(_exec); BSONObjBuilder explainBuilder; { AutoGetCollectionForRead autoColl(pExpCtx->opCtx, _exec->nss()); _exec->restoreState(); Explain::explainStages(_exec.get(), autoColl.getCollection(), *explain, &explainBuilder); _exec->saveState(); } MutableDocument out; out["query"] = Value(_query); if (!_sort.isEmpty()) out["sort"] = Value(_sort); if (_limit) out["limit"] = Value(_limit->getLimit()); if (!_projection.isEmpty()) out["fields"] = Value(_projection); // Add explain results from the query system into the agg explain output. BSONObj explainObj = explainBuilder.obj(); invariant(explainObj.hasField("queryPlanner")); out["queryPlanner"] = Value(explainObj["queryPlanner"]); if (*explain >= ExplainOptions::Verbosity::kExecStats) { invariant(explainObj.hasField("executionStats")); out["executionStats"] = Value(explainObj["executionStats"]); } return Value(DOC(getSourceName() << out.freezeToValue())); }
// ParsedDeps::_fields is a simple recursive look-up table. For each field: // If the value has type==Bool, the whole field is needed // If the value has type==Object, the fields in the subobject are needed // All other fields should be missing which means not needed boost::optional<ParsedDeps> DepsTracker::toParsedDeps() const { MutableDocument md; if (needWholeDocument || _needTextScore) { // can't use ParsedDeps in this case return boost::none; } string last; for (set<string>::const_iterator it(fields.begin()), end(fields.end()); it != end; ++it) { if (!last.empty() && str::startsWith(*it, last)) { // we are including a parent of *it so we don't need to include this field // explicitly. In fact, if we included this field, the parent wouldn't be fully // included. This logic relies on on set iterators going in lexicographic order so // that a string is always directly before of all fields it prefixes. continue; } last = *it + '.'; md.setNestedField(*it, Value(true)); } return ParsedDeps(md.freeze()); }