void ProjectionNode::serialize(boost::optional<ExplainOptions::Verbosity> explain, MutableDocument* output) const { // Determine the boolean value for projected fields in the explain output. const bool projVal = !applyLeafProjectionToValue(Value(true)).missing(); // Always put "_id" first if it was projected (implicitly or explicitly). if (_projectedFields.find("_id") != _projectedFields.end()) { output->addField("_id", Value(projVal)); } for (auto&& projectedField : _projectedFields) { if (projectedField != "_id") { output->addField(projectedField, Value(projVal)); } } for (auto&& field : _orderToProcessAdditionsAndChildren) { auto childIt = _children.find(field); if (childIt != _children.end()) { MutableDocument subDoc; childIt->second->serialize(explain, &subDoc); output->addField(field, subDoc.freezeToValue()); } else { invariant(_policies.computedFieldsPolicy == ComputedFieldsPolicy::kAllowComputedFields); auto expressionIt = _expressions.find(field); invariant(expressionIt != _expressions.end()); output->addField(field, expressionIt->second->serialize(static_cast<bool>(explain))); } } }
Value DocumentSourceGroup::serialize(bool explain) const { MutableDocument insides; // add the _id if (_idFieldNames.empty()) { invariant(_idExpressions.size() == 1); insides["_id"] = _idExpressions[0]->serialize(explain); } else { // decomposed document case invariant(_idExpressions.size() == _idFieldNames.size()); MutableDocument md; for (size_t i = 0; i < _idExpressions.size(); i++) { md[_idFieldNames[i]] = _idExpressions[i]->serialize(explain); } insides["_id"] = md.freezeToValue(); } // add the remaining fields const size_t n = vFieldName.size(); for(size_t i = 0; i < n; ++i) { intrusive_ptr<Accumulator> accum = vpAccumulatorFactory[i](); insides[vFieldName[i]] = Value(DOC(accum->getOpName() << vpExpression[i]->serialize(explain))); } if (_doingMerge) { // This makes the output unparsable (with error) on pre 2.6 shards, but it will never // be sent to old shards when this flag is true since they can't do a merge anyway. insides["$doingMerge"] = Value(true); } return Value(DOC(getSourceName() << insides.freeze())); }
Value DocumentSourceFacet::serialize(bool explain) const { MutableDocument serialized; for (auto&& facet : _facets) { serialized[facet.name] = Value(explain ? facet.pipeline->writeExplainOps() : facet.pipeline->serialize()); } return Value(Document{{"$facet", serialized.freezeToValue()}}); }
Value DocumentSourceCursor::serialize(boost::optional<ExplainOptions::Verbosity> verbosity) const { // We never parse a DocumentSourceCursor, so we only serialize for explain. if (!verbosity) return Value(); invariant(_exec); uassert(50660, "Mismatch between verbosity passed to serialize() and expression context verbosity", verbosity == pExpCtx->explain); MutableDocument out; out["query"] = Value(_query); if (!_sort.isEmpty()) out["sort"] = Value(_sort); if (_limit) out["limit"] = Value(_limit->getLimit()); if (!_projection.isEmpty()) out["fields"] = Value(_projection); BSONObjBuilder explainStatsBuilder; { auto opCtx = pExpCtx->opCtx; auto lockMode = getLockModeForQuery(opCtx, _exec->nss()); AutoGetDb dbLock(opCtx, _exec->nss().db(), lockMode); Lock::CollectionLock collLock(opCtx, _exec->nss(), lockMode); auto collection = dbLock.getDb() ? dbLock.getDb()->getCollection(opCtx, _exec->nss()) : nullptr; Explain::explainStages(_exec.get(), collection, verbosity.get(), _execStatus, _winningPlanTrialStats.get(), BSONObj(), &explainStatsBuilder); } BSONObj explainStats = explainStatsBuilder.obj(); invariant(explainStats["queryPlanner"]); out["queryPlanner"] = Value(explainStats["queryPlanner"]); if (verbosity.get() >= ExplainOptions::Verbosity::kExecStats) { invariant(explainStats["executionStats"]); out["executionStats"] = Value(explainStats["executionStats"]); } return Value(DOC(getSourceName() << out.freezeToValue())); }
Value DocumentSourceCursor::serialize(bool explain) const { // we never parse a documentSourceCursor, so we only serialize for explain if (!explain) return Value(); Status explainStatus(ErrorCodes::InternalError, ""); scoped_ptr<TypeExplain> plan; { Lock::DBRead lk(_ns); Client::Context ctx(_ns, storageGlobalParams.dbpath, /*doVersion=*/false); ClientCursorPin pin(_cursorId); ClientCursor* cursor = pin.c(); uassert(17135, "Cursor deleted. Was the collection or database dropped?", cursor); Runner* runner = cursor->getRunner(); runner->restoreState(); TypeExplain* explainRaw; explainStatus = runner->getExplainPlan(&explainRaw); if (explainStatus.isOK()) plan.reset(explainRaw); runner->saveState(); } MutableDocument out; out["query"] = Value(_query); if (!_sort.isEmpty()) out["sort"] = Value(_sort); if (_limit) out["limit"] = Value(_limit->getLimit()); if (!_projection.isEmpty()) out["fields"] = Value(_projection); if (explainStatus.isOK()) { out["plan"] = Value(extractInfo(plan)); } else { out["planError"] = Value(explainStatus.toString()); } return out.freezeToValue(); }
Value DocumentSourceCursor::serialize(bool explain) const { // we never parse a documentSourceCursor, so we only serialize for explain if (!explain) return Value(); // Get planner-level explain info from the underlying PlanExecutor. BSONObjBuilder explainBuilder; Status explainStatus(ErrorCodes::InternalError, ""); { const NamespaceString nss(_ns); AutoGetCollectionForRead autoColl(pExpCtx->opCtx, nss); massert(17392, "No _exec. Were we disposed before explained?", _exec); _exec->restoreState(pExpCtx->opCtx); explainStatus = Explain::explainStages(pExpCtx->opCtx, _exec.get(), ExplainCommon::QUERY_PLANNER, &explainBuilder); _exec->saveState(); } MutableDocument out; out["query"] = Value(_query); if (!_sort.isEmpty()) out["sort"] = Value(_sort); if (_limit) out["limit"] = Value(_limit->getLimit()); if (!_projection.isEmpty()) out["fields"] = Value(_projection); // Add explain results from the query system into the agg explain output. if (explainStatus.isOK()) { BSONObj explainObj = explainBuilder.obj(); invariant(explainObj.hasField("queryPlanner")); out["queryPlanner"] = Value(explainObj["queryPlanner"]); } else { out["planError"] = Value(explainStatus.toString()); } return Value(DOC(getSourceName() << out.freezeToValue())); }
Value DocumentSourceCursor::serialize(bool explain) const { // we never parse a documentSourceCursor, so we only serialize for explain if (!explain) return Value(); Status explainStatus(ErrorCodes::InternalError, ""); scoped_ptr<TypeExplain> plan; { Lock::DBRead lk(pExpCtx->opCtx->lockState(), _ns); Client::Context ctx(pExpCtx->opCtx, _ns, /*doVersion=*/ false); massert(17392, "No _runner. Were we disposed before explained?", _runner); _runner->restoreState(pExpCtx->opCtx); TypeExplain* explainRaw; explainStatus = _runner->getInfo(&explainRaw, NULL); if (explainStatus.isOK()) plan.reset(explainRaw); _runner->saveState(); } MutableDocument out; out["query"] = Value(_query); if (!_sort.isEmpty()) out["sort"] = Value(_sort); if (_limit) out["limit"] = Value(_limit->getLimit()); if (!_projection.isEmpty()) out["fields"] = Value(_projection); if (explainStatus.isOK()) { out["plan"] = Value(extractInfo(plan)); } else { out["planError"] = Value(explainStatus.toString()); } return Value(DOC(getSourceName() << out.freezeToValue())); }
Value DocumentSourceBucketAuto::serialize(bool explain) const { MutableDocument insides; insides["groupBy"] = _groupByExpression->serialize(explain); insides["buckets"] = Value(_nBuckets); if (_granularityRounder) { insides["granularity"] = Value(_granularityRounder->getName()); } const size_t nOutputFields = _fieldNames.size(); MutableDocument outputSpec(nOutputFields); for (size_t i = 0; i < nOutputFields; i++) { intrusive_ptr<Accumulator> accum = _accumulatorFactories[i](); outputSpec[_fieldNames[i]] = Value{Document{{accum->getOpName(), _expressions[i]->serialize(explain)}}}; } insides["output"] = outputSpec.freezeToValue(); return Value{Document{{getSourceName(), insides.freezeToValue()}}}; }
Value DocumentSourceCursor::serialize(boost::optional<ExplainOptions::Verbosity> explain) const { // We never parse a DocumentSourceCursor, so we only serialize for explain. if (!explain) return Value(); // Get planner-level explain info from the underlying PlanExecutor. invariant(_exec); BSONObjBuilder explainBuilder; { AutoGetCollectionForRead autoColl(pExpCtx->opCtx, _exec->nss()); _exec->restoreState(); Explain::explainStages(_exec.get(), autoColl.getCollection(), *explain, &explainBuilder); _exec->saveState(); } MutableDocument out; out["query"] = Value(_query); if (!_sort.isEmpty()) out["sort"] = Value(_sort); if (_limit) out["limit"] = Value(_limit->getLimit()); if (!_projection.isEmpty()) out["fields"] = Value(_projection); // Add explain results from the query system into the agg explain output. BSONObj explainObj = explainBuilder.obj(); invariant(explainObj.hasField("queryPlanner")); out["queryPlanner"] = Value(explainObj["queryPlanner"]); if (*explain >= ExplainOptions::Verbosity::kExecStats) { invariant(explainObj.hasField("executionStats")); out["executionStats"] = Value(explainObj["executionStats"]); } return Value(DOC(getSourceName() << out.freezeToValue())); }
Value ProjectionNode::applyExpressionsToValue(const Document& root, Value inputValue) const { if (inputValue.getType() == BSONType::Object) { MutableDocument outputDoc(inputValue.getDocument()); applyExpressions(root, &outputDoc); return outputDoc.freezeToValue(); } else if (inputValue.getType() == BSONType::Array) { std::vector<Value> values = inputValue.getArray(); for (auto& value : values) { value = applyExpressionsToValue(root, value); } return Value(std::move(values)); } else { if (subtreeContainsComputedFields()) { // Our semantics in this case are to replace whatever existing value we find with a new // document of all the computed values. This case represents applying a projection like // {"a.b": {$literal: 1}} to the document {a: 1}. This should yield {a: {b: 1}}. MutableDocument outputDoc; applyExpressions(root, &outputDoc); return outputDoc.freezeToValue(); } // We didn't have any expressions, so just skip this value. return transformSkippedValueForOutput(inputValue); } }