コード例 #1
0
ファイル: document.cpp プロジェクト: i80and/mongo
Document Document::fromBsonWithMetaData(const BSONObj& bson) {
    MutableDocument md;

    BSONObjIterator it(bson);
    while (it.more()) {
        BSONElement elem(it.next());
        auto fieldName = elem.fieldNameStringData();
        if (fieldName[0] == '$') {
            if (fieldName == metaFieldTextScore) {
                md.setTextScore(elem.Double());
                continue;
            } else if (fieldName == metaFieldRandVal) {
                md.setRandMetaField(elem.Double());
                continue;
            } else if (fieldName == metaFieldSortKey) {
                md.setSortKeyMetaField(elem.Obj());
                continue;
            }
        }

        // Note: this will not parse out metadata in embedded documents.
        md.addField(fieldName, Value(elem));
    }

    return md.freeze();
}
コード例 #2
0
boost::optional<Document> DocumentSourceRedact::redactObject(const Variables& in) {
    const Value expressionResult = _expression->evaluate(in);

    if (expressionResult == keepVal) {
        return in.current.getDocument();
    }
    else if (expressionResult == pruneVal) {
        return boost::optional<Document>();
    }
    else if (expressionResult == descendVal) {
        MutableDocument out;
        FieldIterator fields(in.current.getDocument());
        while (fields.more()) {
            const Document::FieldPair field(fields.next());
            const Value val = redactValue(in, field.second);
            if (!val.missing()) {
                out.addField(field.first, val);
            }
        }
        return out.freeze();
    }
    else {
        uasserted(17053, str::stream() << "$redact's expression should not return anything "
                  << "aside from the variables $$KEEP, $$DESCEND, and "
                  << "$$PRUNE, but returned "
                  << expressionResult.toString());
    }
}
コード例 #3
0
 bool applyProjectionToOneField(StringData field) const {
     MutableDocument doc;
     const FieldPath f{field};
     doc.setNestedField(f, Value(1.0));
     const Document transformedDoc = applyTransformation(doc.freeze());
     return !transformedDoc.getNestedField(f).missing();
 }
コード例 #4
0
boost::optional<Document> DocumentSourceRedact::redactObject() {
    const Value expressionResult = _expression->evaluate(_variables.get());

    if (expressionResult == keepVal) {
        return _variables->getDocument(_currentId);
    } else if (expressionResult == pruneVal) {
        return boost::optional<Document>();
    } else if (expressionResult == descendVal) {
        const Document in = _variables->getDocument(_currentId);
        MutableDocument out;
        out.copyMetaDataFrom(in);
        FieldIterator fields(in);
        while (fields.more()) {
            const Document::FieldPair field(fields.next());

            // This changes CURRENT so don't read from _variables after this
            const Value val = redactValue(field.second);
            if (!val.missing()) {
                out.addField(field.first, val);
            }
        }
        return out.freeze();
    } else {
        uasserted(17053,
                  str::stream() << "$redact's expression should not return anything "
                                << "aside from the variables $$KEEP, $$DESCEND, and "
                                << "$$PRUNE, but returned "
                                << expressionResult.toString());
    }
}
コード例 #5
0
Document DocumentSourceSort::sortKeyPattern(SortKeySerialization serializationMode) const {
    MutableDocument keyObj;
    const size_t n = _sortPattern.size();
    for (size_t i = 0; i < n; ++i) {
        if (_sortPattern[i].fieldPath) {
            // Append a named integer based on whether the sort is ascending/descending.
            keyObj.setField(_sortPattern[i].fieldPath->fullPath(),
                            Value(_sortPattern[i].isAscending ? 1 : -1));
        } else {
            // Sorting by an expression, use a made up field name.
            auto computedFieldName = string(str::stream() << "$computed" << i);
            switch (serializationMode) {
                case SortKeySerialization::kForExplain:
                case SortKeySerialization::kForPipelineSerialization: {
                    const bool isExplain = (serializationMode == SortKeySerialization::kForExplain);
                    keyObj[computedFieldName] = _sortPattern[i].expression->serialize(isExplain);
                    break;
                }
                case SortKeySerialization::kForSortKeyMerging: {
                    // We need to be able to tell which direction the sort is. Expression sorts are
                    // always descending.
                    keyObj[computedFieldName] = Value(-1);
                    break;
                }
            }
        }
    }
    return keyObj.freeze();
}
コード例 #6
0
DocumentSource::GetNextResult DocumentSourceFacet::getNext() {
    pExpCtx->checkForInterrupt();

    if (_done) {
        return GetNextResult::makeEOF();
    }

    vector<vector<Value>> results(_facets.size());
    bool allPipelinesEOF = false;
    while (!allPipelinesEOF) {
        allPipelinesEOF = true;  // Set this to false if any pipeline isn't EOF.
        for (size_t facetId = 0; facetId < _facets.size(); ++facetId) {
            const auto& pipeline = _facets[facetId].pipeline;
            auto next = pipeline->getSources().back()->getNext();
            for (; next.isAdvanced(); next = pipeline->getSources().back()->getNext()) {
                results[facetId].emplace_back(next.releaseDocument());
            }
            allPipelinesEOF = allPipelinesEOF && next.isEOF();
        }
    }

    MutableDocument resultDoc;
    for (size_t facetId = 0; facetId < _facets.size(); ++facetId) {
        resultDoc[_facets[facetId].name] = Value(std::move(results[facetId]));
    }

    _done = true;  // We will only ever produce one result.
    return resultDoc.freeze();
}
コード例 #7
0
    Value DocumentSourceGroup::serialize(bool explain) const {
        MutableDocument insides;

        // add the _id
        if (_idFieldNames.empty()) {
            invariant(_idExpressions.size() == 1);
            insides["_id"] = _idExpressions[0]->serialize(explain);
        }
        else {
            // decomposed document case
            invariant(_idExpressions.size() == _idFieldNames.size());
            MutableDocument md;
            for (size_t i = 0; i < _idExpressions.size(); i++) {
                md[_idFieldNames[i]] = _idExpressions[i]->serialize(explain);
            }
            insides["_id"] = md.freezeToValue();
        }

        // add the remaining fields
        const size_t n = vFieldName.size();
        for(size_t i = 0; i < n; ++i) {
            intrusive_ptr<Accumulator> accum = vpAccumulatorFactory[i]();
            insides[vFieldName[i]] =
                Value(DOC(accum->getOpName() << vpExpression[i]->serialize(explain)));
        }

        if (_doingMerge) {
            // This makes the output unparsable (with error) on pre 2.6 shards, but it will never
            // be sent to old shards when this flag is true since they can't do a merge anyway.

            insides["$doingMerge"] = Value(true);
        }

        return Value(DOC(getSourceName() << insides.freeze()));
    }
コード例 #8
0
Value DocumentSourceGeoNear::serialize(bool explain) const {
    MutableDocument result;

    if (coordsIsArray) {
        result.setField("near", Value(BSONArray(coords)));
    } else {
        result.setField("near", Value(coords));
    }

    // not in buildGeoNearCmd
    result.setField("distanceField", Value(distanceField->fullPath()));

    result.setField("limit", Value(limit));

    if (maxDistance > 0)
        result.setField("maxDistance", Value(maxDistance));

    if (minDistance > 0)
        result.setField("minDistance", Value(minDistance));

    result.setField("query", Value(query));
    result.setField("spherical", Value(spherical));
    result.setField("distanceMultiplier", Value(distanceMultiplier));

    if (includeLocs)
        result.setField("includeLocs", Value(includeLocs->fullPath()));

    return Value(DOC(getSourceName() << result.freeze()));
}
コード例 #9
0
Document ExclusionNode::serialize() const {
    MutableDocument output;
    for (auto&& excludedField : _excludedFields) {
        output.addField(excludedField, Value(false));
    }

    for (auto&& childPair : _children) {
        output.addField(childPair.first, Value(childPair.second->serialize()));
    }
    return output.freeze();
}
コード例 #10
0
    void run() {
        MutableDocument md;
        md.addField("foo", Value(1));
        ASSERT_EQUALS(1U, md.peek().size());
        ASSERT_EQUALS(1, md.peek()["foo"].getInt());
        md.addField("bar", Value(99));
        ASSERT_EQUALS(2U, md.peek().size());
        ASSERT_EQUALS(99, md.peek()["bar"].getInt());
        // No assertion is triggered by a duplicate field name.
        md.addField("a", Value(5));

        Document final = md.freeze();
        ASSERT_EQUALS(3U, final.size());
        assertRoundTrips(final);
    }
コード例 #11
0
ファイル: accumulator_avg.cpp プロジェクト: 7segments/mongo
    Value AccumulatorAvg::getValue() const {
        if (!pCtx->getInShard()) {
            double avg = 0;
            if (count)
                avg = doubleTotal / static_cast<double>(count);

            return Value::createDouble(avg);
        }

        MutableDocument out;
        out.addField(subTotalName, Value::createDouble(doubleTotal));
        out.addField(countName, Value::createLong(count));

        return Value::createDocument(out.freeze());
    }
コード例 #12
0
    Document DocumentSourceSort::serializeSortKey() const {
        MutableDocument keyObj;
        // add the key fields
        const size_t n = vSortKey.size();
        for(size_t i = 0; i < n; ++i) {
            // get the field name out of each ExpressionFieldPath
            const FieldPath& withVariable = vSortKey[i]->getFieldPath();
            verify(withVariable.getPathLength() > 1);
            verify(withVariable.getFieldName(0) == "ROOT");
            const string fieldPath = withVariable.tail().getPath(false);

            // append a named integer based on the sort order
            keyObj.setField(fieldPath, Value(vAscending[i] ? 1 : -1));
        }
        return keyObj.freeze();
    }
コード例 #13
0
ファイル: accumulator_avg.cpp プロジェクト: Axv2/mongo
    Value AccumulatorAvg::getValue(bool toBeMerged) const {
        if (!toBeMerged) {
            double avg = 0;
            if (count)
                avg = doubleTotal / static_cast<double>(count);

            return Value(avg);
        }
        else {
            MutableDocument out;
            out.addField(subTotalName, Value(doubleTotal));
            out.addField(countName, Value(count));

            return Value(out.freeze());
        }
    }
コード例 #14
0
    void DocumentSourceGroup::sourceToBson(BSONObjBuilder* pBuilder, bool explain) const {
        MutableDocument insides;

        /* add the _id */
        insides["_id"] = pIdExpression->serialize();

        /* add the remaining fields */
        const size_t n = vFieldName.size();
        for(size_t i = 0; i < n; ++i) {
            intrusive_ptr<Accumulator> accum = vpAccumulatorFactory[i]();
            insides[vFieldName[i]] = Value(
                    DOC(accum->getOpName() << vpExpression[i]->serialize()));
        }

        *pBuilder << groupName << insides.freeze();
    }
コード例 #15
0
ファイル: document_source.cpp プロジェクト: DjComandos/mongo
    // Taken as a whole, these three functions should produce the same output document given the
    // same deps set as mongo::Projection::transform would on the output of depsToProjection. The
    // only exceptions are that we correctly handle the case where no fields are needed and we don't
    // need to work around the above mentioned bug with subfields of _id (SERVER-7502). This is
    // tested in a DEV block in DocumentSourceCursor::findNext().
    //
    // Output from this function is input for the next two
    //
    // ParsedDeps is a simple recursive look-up table. For each field in a ParsedDeps:
    //      If the value has type==Bool, the whole field is needed
    //      If the value has type==Object, the fields in the subobject are needed
    //      All other fields should be missing which means not needed
    DocumentSource::ParsedDeps DocumentSource::parseDeps(const set<string>& deps) {
        MutableDocument md;

        string last;
        for (set<string>::const_iterator it(deps.begin()), end(deps.end()); it!=end; ++it) {
            if (!last.empty() && str::startsWith(*it, last)) {
                // we are including a parent of *it so we don't need to include this field
                // explicitly. In fact, if we included this field, the parent wouldn't be fully
                // included.  This logic relies on on set iterators going in lexicographic order so
                // that a string is always directly before of all fields it prefixes.
                continue;
            }
            last = *it + '.';
            md.setNestedField(*it, Value(true));
        }

        return md.freeze();
    }
コード例 #16
0
    boost::optional<Document> DocumentSourceProject::getNext() {
        pExpCtx->checkForInterrupt();

        boost::optional<Document> input = pSource->getNext();
        if (!input)
            return boost::none;

        /* create the result document */
        const size_t sizeHint = pEO->getSizeHint();
        MutableDocument out (sizeHint);
        out.copyMetaDataFrom(*input);

        /*
          Use the ExpressionObject to create the base result.

          If we're excluding fields at the top level, leave out the _id if
          it is found, because we took care of it above.
        */
        _variables->setRoot(*input);
        pEO->addToDocument(out, *input, _variables.get());
        _variables->clearRoot();

#if defined(_DEBUG)
        if (!_simpleProjection.getSpec().isEmpty()) {
            // Make sure we return the same results as Projection class

            BSONObj inputBson = input->toBson();
            BSONObj outputBson = out.peek().toBson();

            BSONObj projected = _simpleProjection.transform(inputBson);

            if (projected != outputBson) {
                log() << "$project applied incorrectly: " << getRaw() << endl;
                log() << "input:  " << inputBson << endl;
                log() << "out: " << outputBson << endl;
                log() << "projected: " << projected << endl;
                verify(false); // exits in _DEBUG builds
            }
        }
#endif

        return out.freeze();
    }
コード例 #17
0
    boost::optional<Document> DocumentSourceGeoNear::getNext() {
        pExpCtx->checkForInterrupt();

        if (!resultsIterator)
            runCommand();

        if (!resultsIterator->more())
            return boost::none;

        // each result from the geoNear command is wrapped in a wrapper object with "obj",
        // "dis" and maybe "loc" fields. We want to take the object from "obj" and inject the
        // other fields into it.
        Document result (resultsIterator->next().embeddedObject());
        MutableDocument output (result["obj"].getDocument());
        output.setNestedField(*distanceField, result["dis"]);
        if (includeLocs)
            output.setNestedField(*includeLocs, result["loc"]);

        return output.freeze();
    }
コード例 #18
0
    Document DocumentSourceProject::getCurrent() {
        Document pInDocument(pSource->getCurrent());

        /* create the result document */
        const size_t sizeHint = pEO->getSizeHint();
        MutableDocument out (sizeHint);

        /*
          Use the ExpressionObject to create the base result.

          If we're excluding fields at the top level, leave out the _id if
          it is found, because we took care of it above.
        */
        pEO->addToDocument(out, pInDocument, /*root=*/pInDocument);

#if defined(_DEBUG)
        if (!_simpleProjection.getSpec().isEmpty()) {
            // Make sure we return the same results as Projection class

            BSONObjBuilder inputBuilder;
            pSource->getCurrent()->toBson(&inputBuilder);
            BSONObj input = inputBuilder.done();

            BSONObjBuilder outputBuilder;
            out.peek().toBson(&outputBuilder);
            BSONObj output = outputBuilder.done();

            BSONObj projected = _simpleProjection.transform(input);

            if (projected != output) {
                log() << "$project applied incorrectly: " << getRaw() << endl;
                log() << "input:  " << input << endl;
                log() << "out: " << output << endl;
                log() << "projected: " << projected << endl;
                verify(false); // exits in _DEBUG builds
            }
        }
#endif

        return out.freeze();
    }
コード例 #19
0
Document DocumentSourceSort::serializeSortKey(bool explain) const {
    MutableDocument keyObj;
    // add the key fields
    const size_t n = vSortKey.size();
    for (size_t i = 0; i < n; ++i) {
        if (ExpressionFieldPath* efp = dynamic_cast<ExpressionFieldPath*>(vSortKey[i].get())) {
            // ExpressionFieldPath gets special syntax that includes direction
            const FieldPath& withVariable = efp->getFieldPath();
            verify(withVariable.getPathLength() > 1);
            verify(withVariable.getFieldName(0) == "ROOT");
            const string fieldPath = withVariable.tail().fullPath();

            // append a named integer based on the sort order
            keyObj.setField(fieldPath, Value(vAscending[i] ? 1 : -1));
        } else {
            // other expressions use a made-up field name
            keyObj[string(str::stream() << "$computed" << i)] = vSortKey[i]->serialize(explain);
        }
    }
    return keyObj.freeze();
}
コード例 #20
0
DocumentSource::GetNextResult DocumentSourceIndexStats::getNext() {
    pExpCtx->checkForInterrupt();

    if (_indexStatsMap.empty()) {
        _indexStatsMap = _mongod->getIndexStats(pExpCtx->opCtx, pExpCtx->ns);
        _indexStatsIter = _indexStatsMap.begin();
    }

    if (_indexStatsIter != _indexStatsMap.end()) {
        const auto& stats = _indexStatsIter->second;
        MutableDocument doc;
        doc["name"] = Value(_indexStatsIter->first);
        doc["key"] = Value(stats.indexKey);
        doc["host"] = Value(_processName);
        doc["accesses"]["ops"] = Value(stats.accesses.loadRelaxed());
        doc["accesses"]["since"] = Value(stats.trackerStartTime);
        ++_indexStatsIter;
        return doc.freeze();
    }

    return GetNextResult::makeEOF();
}
コード例 #21
0
    Document DocumentSourceGroup::makeDocument(const Value& id,
                                               const Accumulators& accums,
                                               bool mergeableOutput) {
        const size_t n = vFieldName.size();
        MutableDocument out (1 + n);

        /* add the _id field */
        out.addField("_id", id);

        /* add the rest of the fields */
        for(size_t i = 0; i < n; ++i) {
            Value val = accums[i]->getValue(mergeableOutput);
            if (val.missing()) {
                // we return null in this case so return objects are predictable
                out.addField(vFieldName[i], Value(BSONNULL));
            }
            else {
                out.addField(vFieldName[i], val);
            }
        }

        return out.freeze();
    }
コード例 #22
0
    Document DocumentSourceGroup::makeDocument(
        const GroupsType::iterator &rIter) {
        vector<intrusive_ptr<Accumulator> > *pGroup = &rIter->second;
        const size_t n = vFieldName.size();
        MutableDocument out (1 + n);

        /* add the _id field */
        out.addField("_id", rIter->first);

        /* add the rest of the fields */
        for(size_t i = 0; i < n; ++i) {
            Value pValue((*pGroup)[i]->getValue());
            if (pValue.missing()) {
                // we return undefined in this case so return objects are predictable
                out.addField(vFieldName[i], Value(BSONUndefined));
            }
            else {
                out.addField(vFieldName[i], pValue);
            }
        }

        return out.freeze();
    }
コード例 #23
0
ファイル: dependencies.cpp プロジェクト: Machyne/mongo
// ParsedDeps::_fields is a simple recursive look-up table. For each field:
//      If the value has type==Bool, the whole field is needed
//      If the value has type==Object, the fields in the subobject are needed
//      All other fields should be missing which means not needed
boost::optional<ParsedDeps> DepsTracker::toParsedDeps() const {
    MutableDocument md;

    if (needWholeDocument || _needTextScore) {
        // can't use ParsedDeps in this case
        return boost::none;
    }

    string last;
    for (set<string>::const_iterator it(fields.begin()), end(fields.end()); it != end; ++it) {
        if (!last.empty() && str::startsWith(*it, last)) {
            // we are including a parent of *it so we don't need to include this field
            // explicitly. In fact, if we included this field, the parent wouldn't be fully
            // included.  This logic relies on on set iterators going in lexicographic order so
            // that a string is always directly before of all fields it prefixes.
            continue;
        }
        last = *it + '.';
        md.setNestedField(*it, Value(true));
    }

    return ParsedDeps(md.freeze());
}
コード例 #24
0
    boost::optional<Document> DocumentSourceProject::getNext() {
        pExpCtx->checkForInterrupt();

        boost::optional<Document> input = pSource->getNext();
        if (!input)
            return boost::none;

        /* create the result document */
        const size_t sizeHint = pEO->getSizeHint();
        MutableDocument out (sizeHint);
        out.copyMetaDataFrom(*input);

        /*
          Use the ExpressionObject to create the base result.

          If we're excluding fields at the top level, leave out the _id if
          it is found, because we took care of it above.
        */
        _variables->setRoot(*input);
        pEO->addToDocument(out, *input, _variables.get());
        _variables->clearRoot();

        return out.freeze();
    }
コード例 #25
0
boost::optional<Document> DocumentSourceFacet::getNext() {
    pExpCtx->checkForInterrupt();

    if (_done) {
        return boost::none;
    }
    _done = true;  // We will only ever produce one result.

    // Build the results by executing each pipeline serially, one at a time.
    MutableDocument results;
    for (auto&& facet : _facetPipelines) {
        auto facetName = facet.first;
        auto facetPipeline = facet.second;

        std::vector<Value> facetResults;
        while (auto next = facetPipeline->getSources().back()->getNext()) {
            facetResults.emplace_back(std::move(*next));
        }
        results[facetName] = Value(std::move(facetResults));
    }

    _teeBuffer->dispose();  // Clear the buffer since we'll no longer need it.
    return results.freeze();
}
コード例 #26
0
Document DocumentSourceChangeStreamTransform::applyTransformation(const Document& input) {
    // If we're executing a change stream pipeline that was forwarded from mongos, then we expect it
    // to "need merge"---we expect to be executing the shards part of a split pipeline. It is never
    // correct for mongos to pass through the change stream without splitting into into a merging
    // part executed on mongos and a shards part.
    //
    // This is necessary so that mongos can correctly handle "invalidate" and "retryNeeded" change
    // notifications. See SERVER-31978 for an example of why the pipeline must be split.
    //
    // We have to check this invariant at run-time of the change stream rather than parse time,
    // since a mongos may forward a change stream in an invalid position (e.g. in a nested $lookup
    // or $facet pipeline). In this case, mongod is responsible for parsing the pipeline and
    // throwing an error without ever executing the change stream.
    if (pExpCtx->fromMongos) {
        invariant(pExpCtx->needsMerge);
    }

    MutableDocument doc;

    // Extract the fields we need.
    checkValueType(input[repl::OplogEntry::kOpTypeFieldName],
                   repl::OplogEntry::kOpTypeFieldName,
                   BSONType::String);
    string op = input[repl::OplogEntry::kOpTypeFieldName].getString();
    Value ts = input[repl::OplogEntry::kTimestampFieldName];
    Value ns = input[repl::OplogEntry::kNssFieldName];
    checkValueType(ns, repl::OplogEntry::kNssFieldName, BSONType::String);
    Value uuid = input[repl::OplogEntry::kUuidFieldName];
    std::vector<FieldPath> documentKeyFields;

    // Deal with CRUD operations and commands.
    auto opType = repl::OpType_parse(IDLParserErrorContext("ChangeStreamEntry.op"), op);

    NamespaceString nss(ns.getString());
    // Ignore commands in the oplog when looking up the document key fields since a command implies
    // that the change stream is about to be invalidated (e.g. collection drop).
    if (!uuid.missing() && opType != repl::OpTypeEnum::kCommand) {
        checkValueType(uuid, repl::OplogEntry::kUuidFieldName, BSONType::BinData);
        // We need to retrieve the document key fields if our cache does not have an entry for this
        // UUID or if the cache entry is not definitively final, indicating that the collection was
        // unsharded when the entry was last populated.
        auto it = _documentKeyCache.find(uuid.getUuid());
        if (it == _documentKeyCache.end() || !it->second.isFinal) {
            auto docKeyFields =
                pExpCtx->mongoProcessInterface->collectDocumentKeyFieldsForHostedCollection(
                    pExpCtx->opCtx, nss, uuid.getUuid());
            if (it == _documentKeyCache.end() || docKeyFields.second) {
                _documentKeyCache[uuid.getUuid()] = DocumentKeyCacheEntry(docKeyFields);
            }
        }

        documentKeyFields = _documentKeyCache.find(uuid.getUuid())->second.documentKeyFields;
    }
    Value id = input.getNestedField("o._id");
    // Non-replace updates have the _id in field "o2".
    StringData operationType;
    Value fullDocument;
    Value updateDescription;
    Value documentKey;

    switch (opType) {
        case repl::OpTypeEnum::kInsert: {
            operationType = DocumentSourceChangeStream::kInsertOpType;
            fullDocument = input[repl::OplogEntry::kObjectFieldName];
            documentKey = Value(document_path_support::extractPathsFromDoc(
                fullDocument.getDocument(), documentKeyFields));
            break;
        }
        case repl::OpTypeEnum::kDelete: {
            operationType = DocumentSourceChangeStream::kDeleteOpType;
            documentKey = input[repl::OplogEntry::kObjectFieldName];
            break;
        }
        case repl::OpTypeEnum::kUpdate: {
            if (id.missing()) {
                operationType = DocumentSourceChangeStream::kUpdateOpType;
                checkValueType(input[repl::OplogEntry::kObjectFieldName],
                               repl::OplogEntry::kObjectFieldName,
                               BSONType::Object);
                Document opObject = input[repl::OplogEntry::kObjectFieldName].getDocument();
                Value updatedFields = opObject["$set"];
                Value removedFields = opObject["$unset"];

                // Extract the field names of $unset document.
                vector<Value> removedFieldsVector;
                if (removedFields.getType() == BSONType::Object) {
                    auto iter = removedFields.getDocument().fieldIterator();
                    while (iter.more()) {
                        removedFieldsVector.push_back(Value(iter.next().first));
                    }
                }
                updateDescription = Value(Document{
                    {"updatedFields", updatedFields.missing() ? Value(Document()) : updatedFields},
                    {"removedFields", removedFieldsVector}});
            } else {
                operationType = DocumentSourceChangeStream::kReplaceOpType;
                fullDocument = input[repl::OplogEntry::kObjectFieldName];
            }
            documentKey = input[repl::OplogEntry::kObject2FieldName];
            break;
        }
        case repl::OpTypeEnum::kCommand: {
            if (!input.getNestedField("o.drop").missing()) {
                operationType = DocumentSourceChangeStream::kDropCollectionOpType;

                // The "o.drop" field will contain the actual collection name.
                nss = NamespaceString(nss.db(), input.getNestedField("o.drop").getString());
            } else if (!input.getNestedField("o.renameCollection").missing()) {
                operationType = DocumentSourceChangeStream::kRenameCollectionOpType;

                // The "o.renameCollection" field contains the namespace of the original collection.
                nss = NamespaceString(input.getNestedField("o.renameCollection").getString());

                // The "o.to" field contains the target namespace for the rename.
                const auto renameTargetNss =
                    NamespaceString(input.getNestedField("o.to").getString());
                doc.addField(DocumentSourceChangeStream::kRenameTargetNssField,
                             Value(Document{{"db", renameTargetNss.db()},
                                            {"coll", renameTargetNss.coll()}}));
            } else if (!input.getNestedField("o.dropDatabase").missing()) {
                operationType = DocumentSourceChangeStream::kDropDatabaseOpType;

                // Extract the database name from the namespace field and leave the collection name
                // empty.
                nss = NamespaceString(nss.db());
            } else {
                // All other commands will invalidate the stream.
                operationType = DocumentSourceChangeStream::kInvalidateOpType;
            }

            // Make sure the result doesn't have a document key.
            documentKey = Value();
            break;
        }
        case repl::OpTypeEnum::kNoop: {
            operationType = DocumentSourceChangeStream::kNewShardDetectedOpType;
            // Generate a fake document Id for NewShardDetected operation so that we can resume
            // after this operation.
            documentKey = Value(Document{{DocumentSourceChangeStream::kIdField,
                                          input[repl::OplogEntry::kObject2FieldName]}});
            break;
        }
        default: { MONGO_UNREACHABLE; }
    }

    // UUID should always be present except for invalidate and dropDatabase entries.
    if (operationType != DocumentSourceChangeStream::kInvalidateOpType &&
        operationType != DocumentSourceChangeStream::kDropDatabaseOpType) {
        invariant(!uuid.missing(), "Saw a CRUD op without a UUID");
    }

    // Note that 'documentKey' and/or 'uuid' might be missing, in which case they will not appear
    // in the output.
    auto resumeTokenData = getResumeToken(ts, uuid, documentKey);
    auto resumeToken = ResumeToken(resumeTokenData).toDocument();

    // Add some additional fields only relevant to transactions.
    if (_txnContext) {
        doc.addField(DocumentSourceChangeStream::kTxnNumberField,
                     Value(static_cast<long long>(_txnContext->txnNumber)));
        doc.addField(DocumentSourceChangeStream::kLsidField, Value(_txnContext->lsid));
    }

    doc.addField(DocumentSourceChangeStream::kIdField, Value(resumeToken));
    doc.addField(DocumentSourceChangeStream::kOperationTypeField, Value(operationType));
    doc.addField(DocumentSourceChangeStream::kClusterTimeField, Value(resumeTokenData.clusterTime));

    // We set the resume token as the document's sort key in both the sharded and non-sharded cases,
    // since we will subsequently rely upon it to generate a correct postBatchResumeToken.
    // TODO SERVER-38539: when returning results for merging, we first check whether 'mergeByPBRT'
    // has been set. If not, then the request was sent from an older mongoS which cannot merge by
    // raw resume tokens, and we must use the old sort key format. This check, and the 'mergeByPBRT'
    // flag, are no longer necessary in 4.4; all change streams will be merged by resume token.
    if (pExpCtx->needsMerge && !pExpCtx->mergeByPBRT) {
        doc.setSortKeyMetaField(BSON("" << ts << "" << uuid << "" << documentKey));
    } else {
        doc.setSortKeyMetaField(resumeToken.toBson());
    }

    // "invalidate" and "newShardDetected" entries have fewer fields.
    if (operationType == DocumentSourceChangeStream::kInvalidateOpType ||
        operationType == DocumentSourceChangeStream::kNewShardDetectedOpType) {
        return doc.freeze();
    }

    doc.addField(DocumentSourceChangeStream::kFullDocumentField, fullDocument);
    doc.addField(DocumentSourceChangeStream::kNamespaceField,
                 operationType == DocumentSourceChangeStream::kDropDatabaseOpType
                     ? Value(Document{{"db", nss.db()}})
                     : Value(Document{{"db", nss.db()}, {"coll", nss.coll()}}));
    doc.addField(DocumentSourceChangeStream::kDocumentKeyField, documentKey);

    // Note that 'updateDescription' might be the 'missing' value, in which case it will not be
    // serialized.
    doc.addField("updateDescription", updateDescription);
    return doc.freeze();
}
コード例 #27
0
DocumentSource::GetNextResult DocumentSourceCurrentOp::getNext() {
    pExpCtx->checkForInterrupt();

    if (_ops.empty()) {
        _ops = pExpCtx->mongoProcessInterface->getCurrentOps(
            pExpCtx->opCtx, _includeIdleConnections, _includeOpsFromAllUsers, _truncateOps);

        _opsIter = _ops.begin();

        if (pExpCtx->fromMongos) {
            _shardName = pExpCtx->mongoProcessInterface->getShardName(pExpCtx->opCtx);

            uassert(40465,
                    "Aggregation request specified 'fromMongos' but unable to retrieve shard name "
                    "for $currentOp pipeline stage.",
                    !_shardName.empty());
        }
    }

    if (_opsIter != _ops.end()) {
        if (!pExpCtx->fromMongos) {
            return Document(*_opsIter++);
        }

        // This $currentOp is running in a sharded context.
        invariant(!_shardName.empty());

        const BSONObj& op = *_opsIter++;
        MutableDocument doc;

        // Add the shard name to the output document.
        doc.addField(kShardFieldName, Value(_shardName));

        // For operations on a shard, we change the opid from the raw numeric form to
        // 'shardname:opid'. We also change the fieldname 'client' to 'client_s' to indicate
        // that the IP is that of the mongos which initiated this request.
        for (auto&& elt : op) {
            StringData fieldName = elt.fieldNameStringData();

            if (fieldName == kOpIdFieldName) {
                uassert(ErrorCodes::TypeMismatch,
                        str::stream() << "expected numeric opid for $currentOp response from '"
                                      << _shardName
                                      << "' but got: "
                                      << typeName(elt.type()),
                        elt.isNumber());

                std::string shardOpID = (str::stream() << _shardName << ":" << elt.numberInt());
                doc.addField(kOpIdFieldName, Value(shardOpID));
            } else if (fieldName == kClientFieldName) {
                doc.addField(kMongosClientFieldName, Value(elt.str()));
            } else {
                doc.addField(fieldName, Value(elt));
            }
        }

        return doc.freeze();
    }

    return GetNextResult::makeEOF();
}
コード例 #28
0
Document DocumentSourceChangeStream::Transformation::applyTransformation(const Document& input) {
    MutableDocument doc;

    // Extract the fields we need.
    checkValueType(input[repl::OplogEntry::kOpTypeFieldName],
                   repl::OplogEntry::kOpTypeFieldName,
                   BSONType::String);
    string op = input[repl::OplogEntry::kOpTypeFieldName].getString();
    Value ts = input[repl::OplogEntry::kTimestampFieldName];
    Value ns = input[repl::OplogEntry::kNamespaceFieldName];
    checkValueType(ns, repl::OplogEntry::kNamespaceFieldName, BSONType::String);
    NamespaceString nss(ns.getString());
    Value id = input.getNestedField("o._id");
    // Non-replace updates have the _id in field "o2".
    Value documentId = id.missing() ? input.getNestedField("o2._id") : id;
    StringData operationType;
    Value fullDocument;
    Value updateDescription;

    // Deal with CRUD operations and commands.
    auto opType = repl::OpType_parse(IDLParserErrorContext("ChangeStreamEntry.op"), op);
    switch (opType) {
        case repl::OpTypeEnum::kInsert: {
            operationType = kInsertOpType;
            fullDocument = input[repl::OplogEntry::kObjectFieldName];
            break;
        }
        case repl::OpTypeEnum::kDelete: {
            operationType = kDeleteOpType;
            break;
        }
        case repl::OpTypeEnum::kUpdate: {
            if (id.missing()) {
                operationType = kUpdateOpType;
                checkValueType(input[repl::OplogEntry::kObjectFieldName],
                               repl::OplogEntry::kObjectFieldName,
                               BSONType::Object);
                Document opObject = input[repl::OplogEntry::kObjectFieldName].getDocument();
                Value updatedFields = opObject["$set"];
                Value removedFields = opObject["$unset"];

                // Extract the field names of $unset document.
                vector<Value> removedFieldsVector;
                if (removedFields.getType() == BSONType::Object) {
                    auto iter = removedFields.getDocument().fieldIterator();
                    while (iter.more()) {
                        removedFieldsVector.push_back(Value(iter.next().first));
                    }
                }
                updateDescription = Value(Document{
                    {"updatedFields", updatedFields.missing() ? Value(Document()) : updatedFields},
                    {"removedFields", removedFieldsVector}});
            } else {
                operationType = kReplaceOpType;
                fullDocument = input[repl::OplogEntry::kObjectFieldName];
            }
            break;
        }
        case repl::OpTypeEnum::kCommand: {
            operationType = kInvalidateOpType;
            // Make sure the result doesn't have a document id.
            documentId = Value();
            break;
        }
        default: { MONGO_UNREACHABLE; }
    }

    // Construct the result document.
    Value documentKey;
    if (!documentId.missing()) {
        documentKey = Value(Document{{kIdField, documentId}});
    }
    // Note that 'documentKey' might be missing, in which case it will not appear in the output.
    Document resumeToken{{kClusterTimeField, Document{{kTimestampField, ts}}},
                         {kNamespaceField, ns},
                         {kDocumentKeyField, documentKey}};
    doc.addField(kIdField, Value(resumeToken));
    doc.addField(kOperationTypeField, Value(operationType));
    doc.addField(kFullDocumentField, fullDocument);

    // "invalidate" entry has fewer fields.
    if (opType == repl::OpTypeEnum::kCommand) {
        return doc.freeze();
    }

    doc.addField(kNamespaceField, Value(Document{{"db", nss.db()}, {"coll", nss.coll()}}));
    doc.addField(kDocumentKeyField, documentKey);

    // Note that 'updateDescription' might be the 'missing' value, in which case it will not be
    // serialized.
    doc.addField("updateDescription", updateDescription);
    return doc.freeze();
}
Document ProjectionNode::serialize(boost::optional<ExplainOptions::Verbosity> explain) const {
    MutableDocument outputDoc;
    serialize(explain, &outputDoc);
    return outputDoc.freeze();
}