/** * Factory function for producing DbCheckRun's from command objects. */ std::unique_ptr<DbCheckRun> getRun(OperationContext* opCtx, const std::string& dbName, const BSONObj& obj) { BSONObjBuilder builder; // Get rid of generic command fields. for (const auto& elem : obj) { if (!isGenericArgument(elem.fieldNameStringData())) { builder.append(elem); } } BSONObj toParse = builder.obj(); // If the dbCheck argument is a string, this is the per-collection form. if (toParse["dbCheck"].type() == BSONType::String) { return singleCollectionRun( opCtx, dbName, DbCheckSingleInvocation::parse(IDLParserErrorContext(""), toParse)); } else { // Otherwise, it's the database-wide form. return fullDatabaseRun( opCtx, dbName, DbCheckAllInvocation::parse(IDLParserErrorContext(""), toParse)); } }
bool CurrentOpCommandBase::run(OperationContext* opCtx, const std::string& dbName, const BSONObj& cmdObj, BSONObjBuilder& result) { // Convert the currentOp command spec into an equivalent aggregation command. This will be // of the form {aggregate:1, pipeline: [{$currentOp: {idleConnections: $all, allUsers: // !$ownOps, truncateOps: true}}, {$match: {<filter>}}, {$group: {_id: null, inprog: {$push: // "$$ROOT"}}}], cursor:{}} std::vector<BSONObj> pipeline; // {$currentOp: {idleConnections: $all, allUsers: !$ownOps, truncateOps: true}} BSONObjBuilder currentOpBuilder; BSONObjBuilder currentOpSpecBuilder(currentOpBuilder.subobjStart("$currentOp")); // If test commands are enabled, then we allow the currentOp commands to specify whether or not // to truncate long operations via the '$truncateOps' parameter. Otherwise, we always truncate // operations to match the behaviour of the legacy currentOp command. const bool truncateOps = !getTestCommandsEnabled() || !cmdObj[kTruncateOps] || cmdObj[kTruncateOps].trueValue(); currentOpSpecBuilder.append("idleConnections", cmdObj[kAll].trueValue()); currentOpSpecBuilder.append("allUsers", !cmdObj[kOwnOps].trueValue()); currentOpSpecBuilder.append("truncateOps", truncateOps); currentOpSpecBuilder.doneFast(); pipeline.push_back(currentOpBuilder.obj()); // {$match: {<user-defined filter>}} BSONObjBuilder matchBuilder; BSONObjBuilder matchSpecBuilder(matchBuilder.subobjStart("$match")); size_t idx = 0; for (const auto& elt : cmdObj) { const auto fieldName = elt.fieldNameStringData(); if (0 == idx++ || kCurOpCmdParams.count(fieldName) || isGenericArgument(fieldName)) { continue; } matchSpecBuilder.append(elt); } matchSpecBuilder.doneFast(); pipeline.push_back(matchBuilder.obj()); // Perform any required modifications to the pipeline before adding the final $group stage. modifyPipeline(&pipeline); // {$group: {_id: null, inprog: {$push: "$$ROOT"}}} BSONObjBuilder groupBuilder; BSONObjBuilder groupSpecBuilder(groupBuilder.subobjStart("$group")); groupSpecBuilder.append("_id", 0); BSONObjBuilder inprogSpecBuilder(groupSpecBuilder.subobjStart("inprog")); inprogSpecBuilder.append("$push", "$$ROOT"); inprogSpecBuilder.doneFast(); groupSpecBuilder.doneFast(); pipeline.push_back(groupBuilder.obj()); // Pipeline is complete; create an AggregationRequest for $currentOp. const AggregationRequest request(NamespaceString::makeCollectionlessAggregateNSS("admin"), std::move(pipeline)); // Run the pipeline and obtain a CursorResponse. auto aggResults = uassertStatusOK(runAggregation(opCtx, request)); if (aggResults.getBatch().empty()) { result.append("inprog", BSONArray()); } else { result.append(aggResults.getBatch().front()["inprog"]); } // Make any final custom additions to the response object. appendToResponse(&result); return true; }
StatusWith<FindAndModifyRequest> FindAndModifyRequest::parseFromBSON(NamespaceString fullNs, const BSONObj& cmdObj) { BSONObj query; BSONObj fields; BSONObj updateObj; BSONObj sort; boost::optional<write_ops::UpdateModification> update; BSONObj collation; bool shouldReturnNew = false; bool isUpsert = false; bool isRemove = false; bool arrayFiltersSet = false; std::vector<BSONObj> arrayFilters; for (auto&& field : cmdObj.getFieldNames<std::set<std::string>>()) { if (field == kQueryField) { query = cmdObj.getObjectField(kQueryField); } else if (field == kSortField) { sort = cmdObj.getObjectField(kSortField); } else if (field == kRemoveField) { isRemove = cmdObj[kRemoveField].trueValue(); } else if (field == kUpdateField) { update = write_ops::UpdateModification::parseFromBSON(cmdObj[kUpdateField]); } else if (field == kNewField) { shouldReturnNew = cmdObj[kNewField].trueValue(); } else if (field == kFieldProjectionField) { fields = cmdObj.getObjectField(kFieldProjectionField); } else if (field == kUpsertField) { isUpsert = cmdObj[kUpsertField].trueValue(); } else if (field == kCollationField) { BSONElement collationElt; Status collationEltStatus = bsonExtractTypedField(cmdObj, kCollationField, BSONType::Object, &collationElt); if (!collationEltStatus.isOK() && (collationEltStatus != ErrorCodes::NoSuchKey)) { return collationEltStatus; } if (collationEltStatus.isOK()) { collation = collationElt.Obj(); } } else if (field == kArrayFiltersField) { BSONElement arrayFiltersElt; Status arrayFiltersEltStatus = bsonExtractTypedField( cmdObj, kArrayFiltersField, BSONType::Array, &arrayFiltersElt); if (!arrayFiltersEltStatus.isOK() && (arrayFiltersEltStatus != ErrorCodes::NoSuchKey)) { return arrayFiltersEltStatus; } if (arrayFiltersEltStatus.isOK()) { arrayFiltersSet = true; for (auto arrayFilter : arrayFiltersElt.Obj()) { if (arrayFilter.type() != BSONType::Object) { return {ErrorCodes::TypeMismatch, str::stream() << "Each array filter must be an object, found " << arrayFilter.type()}; } arrayFilters.push_back(arrayFilter.Obj()); } } } else if (!isGenericArgument(field) && !std::count(_knownFields.begin(), _knownFields.end(), field)) { return {ErrorCodes::Error(51177), str::stream() << "BSON field '" << field << "' is an unknown field."}; } } if (!isRemove && !update) { return {ErrorCodes::FailedToParse, "Either an update or remove=true must be specified"}; } if (isRemove) { if (update) { return {ErrorCodes::FailedToParse, "Cannot specify both an update and remove=true"}; } if (isUpsert) { return {ErrorCodes::FailedToParse, "Cannot specify both upsert=true and remove=true"}; } if (shouldReturnNew) { return {ErrorCodes::FailedToParse, "Cannot specify both new=true and remove=true;" " 'remove' always returns the deleted document"}; } if (arrayFiltersSet) { return {ErrorCodes::FailedToParse, "Cannot specify arrayFilters and remove=true"}; } } if (update && update->type() == write_ops::UpdateModification::Type::kPipeline && arrayFiltersSet) { return {ErrorCodes::FailedToParse, "Cannot specify arrayFilters and a pipeline update"}; } FindAndModifyRequest request(std::move(fullNs), query, std::move(update)); request.setFieldProjection(fields); request.setSort(sort); request.setCollation(collation); if (arrayFiltersSet) { request.setArrayFilters(std::move(arrayFilters)); } if (!isRemove) { request.setShouldReturnNew(shouldReturnNew); request.setUpsert(isUpsert); } return request; }
StatusWith<AggregationRequest> AggregationRequest::parseFromBSON( NamespaceString nss, const BSONObj& cmdObj, boost::optional<ExplainOptions::Verbosity> explainVerbosity) { // Parse required parameters. auto pipelineElem = cmdObj[kPipelineName]; auto pipeline = AggregationRequest::parsePipelineFromBSON(pipelineElem); if (!pipeline.isOK()) { return pipeline.getStatus(); } AggregationRequest request(std::move(nss), std::move(pipeline.getValue())); const std::initializer_list<StringData> optionsParsedElseWhere = {kPipelineName, kCommandName}; bool hasCursorElem = false; bool hasExplainElem = false; bool hasFromMongosElem = false; bool hasNeedsMergeElem = false; // Parse optional parameters. for (auto&& elem : cmdObj) { auto fieldName = elem.fieldNameStringData(); if (QueryRequest::kUnwrappedReadPrefField == fieldName) { // We expect this field to be validated elsewhere. request.setUnwrappedReadPref(elem.embeddedObject()); } else if (std::find(optionsParsedElseWhere.begin(), optionsParsedElseWhere.end(), fieldName) != optionsParsedElseWhere.end()) { // Ignore options that are parsed elsewhere. } else if (kCursorName == fieldName) { long long batchSize; auto status = CursorRequest::parseCommandCursorOptions(cmdObj, kDefaultBatchSize, &batchSize); if (!status.isOK()) { return status; } hasCursorElem = true; request.setBatchSize(batchSize); } else if (kCollationName == fieldName) { if (elem.type() != BSONType::Object) { return {ErrorCodes::TypeMismatch, str::stream() << kCollationName << " must be an object, not a " << typeName(elem.type())}; } request.setCollation(elem.embeddedObject().getOwned()); } else if (QueryRequest::cmdOptionMaxTimeMS == fieldName) { auto maxTimeMs = QueryRequest::parseMaxTimeMS(elem); if (!maxTimeMs.isOK()) { return maxTimeMs.getStatus(); } request.setMaxTimeMS(maxTimeMs.getValue()); } else if (repl::ReadConcernArgs::kReadConcernFieldName == fieldName) { if (elem.type() != BSONType::Object) { return {ErrorCodes::TypeMismatch, str::stream() << repl::ReadConcernArgs::kReadConcernFieldName << " must be an object, not a " << typeName(elem.type())}; } request.setReadConcern(elem.embeddedObject().getOwned()); } else if (kHintName == fieldName) { if (BSONType::Object == elem.type()) { request.setHint(elem.embeddedObject()); } else if (BSONType::String == elem.type()) { request.setHint(BSON("$hint" << elem.valueStringData())); } else { return Status(ErrorCodes::FailedToParse, str::stream() << kHintName << " must be specified as a string representing an index" << " name, or an object representing an index's key pattern"); } } else if (kCommentName == fieldName) { if (elem.type() != BSONType::String) { return {ErrorCodes::TypeMismatch, str::stream() << kCommentName << " must be a string, not a " << typeName(elem.type())}; } request.setComment(elem.str()); } else if (kExplainName == fieldName) { if (elem.type() != BSONType::Bool) { return {ErrorCodes::TypeMismatch, str::stream() << kExplainName << " must be a boolean, not a " << typeName(elem.type())}; } hasExplainElem = true; if (elem.Bool()) { request.setExplain(ExplainOptions::Verbosity::kQueryPlanner); } } else if (kFromMongosName == fieldName) { if (elem.type() != BSONType::Bool) { return {ErrorCodes::TypeMismatch, str::stream() << kFromMongosName << " must be a boolean, not a " << typeName(elem.type())}; } hasFromMongosElem = true; request.setFromMongos(elem.Bool()); } else if (kNeedsMergeName == fieldName) { if (elem.type() != BSONType::Bool) { return {ErrorCodes::TypeMismatch, str::stream() << kNeedsMergeName << " must be a boolean, not a " << typeName(elem.type())}; } hasNeedsMergeElem = true; request.setNeedsMerge(elem.Bool()); } else if (kAllowDiskUseName == fieldName) { if (storageGlobalParams.readOnly) { return {ErrorCodes::IllegalOperation, str::stream() << "The '" << kAllowDiskUseName << "' option is not permitted in read-only mode."}; } else if (elem.type() != BSONType::Bool) { return {ErrorCodes::TypeMismatch, str::stream() << kAllowDiskUseName << " must be a boolean, not a " << typeName(elem.type())}; } request.setAllowDiskUse(elem.Bool()); } else if (bypassDocumentValidationCommandOption() == fieldName) { request.setBypassDocumentValidation(elem.trueValue()); } else if (!isGenericArgument(fieldName)) { return {ErrorCodes::FailedToParse, str::stream() << "unrecognized field '" << elem.fieldName() << "'"}; } } if (explainVerbosity) { if (hasExplainElem) { return { ErrorCodes::FailedToParse, str::stream() << "The '" << kExplainName << "' option is illegal when a explain verbosity is also provided"}; } request.setExplain(explainVerbosity); } // 'hasExplainElem' implies an aggregate command-level explain option, which does not require // a cursor argument. if (!hasCursorElem && !hasExplainElem) { return {ErrorCodes::FailedToParse, str::stream() << "The '" << kCursorName << "' option is required, except for aggregate with the explain argument"}; } if (request.getExplain() && cmdObj[WriteConcernOptions::kWriteConcernField]) { return {ErrorCodes::FailedToParse, str::stream() << "Aggregation explain does not support the'" << WriteConcernOptions::kWriteConcernField << "' option"}; } if (hasNeedsMergeElem && !hasFromMongosElem) { return {ErrorCodes::FailedToParse, str::stream() << "Cannot specify '" << kNeedsMergeName << "' without '" << kFromMongosName << "'"}; } return request; }