void run() { setup(); insert(fromjson("{_id: 1, x: 5}")); insert(fromjson("{_id: 2, x: 6}")); insert(fromjson("{_id: 3, x: 10}")); std::unique_ptr<IndexScan> ixscan( createIndexScan(BSON("x" << 5), BSON("x" << 10), false, false)); // Expect to get key {'': 6}. WorkingSetMember* member = getNext(ixscan.get()); ASSERT_EQ(WorkingSetMember::RID_AND_IDX, member->getState()); ASSERT_BSONOBJ_EQ(member->keyData[0].keyData, BSON("" << 6)); // Save state and insert an indexed doc. ixscan->saveState(); insert(fromjson("{_id: 4, x: 7}")); ixscan->restoreState(); member = getNext(ixscan.get()); ASSERT_EQ(WorkingSetMember::RID_AND_IDX, member->getState()); ASSERT_BSONOBJ_EQ(member->keyData[0].keyData, BSON("" << 7)); WorkingSetID id; ASSERT_EQ(PlanStage::IS_EOF, ixscan->work(&id)); ASSERT(ixscan->isEOF()); }
TEST(FTSSpec, Extra3) { BSONObj user = BSON("key" << BSON("x" << 1 << "data" << "text")); BSONObj fixed = assertGet(FTSSpec::fixSpec(user)); ASSERT_BSONOBJ_EQ(BSON("x" << 1 << "_fts" << "text" << "_ftsx" << 1), fixed["key"].Obj()); ASSERT_BSONOBJ_EQ(BSON("data" << 1), fixed["weights"].Obj()); BSONObj fixed2 = assertGet(FTSSpec::fixSpec(fixed)); ASSERT_BSONOBJ_EQ(fixed, fixed2); FTSSpec spec(fixed); ASSERT_EQUALS(1U, spec.numExtraBefore()); ASSERT_EQUALS(StringData("x"), spec.extraBefore(0)); ASSERT_EQUALS(0U, spec.numExtraAfter()); BSONObj prefix; ASSERT(spec.getIndexPrefix(BSON("x" << 2), &prefix).isOK()); ASSERT_BSONOBJ_EQ(BSON("x" << 2), prefix); ASSERT(spec.getIndexPrefix(BSON("x" << 3 << "y" << 4), &prefix).isOK()); ASSERT_BSONOBJ_EQ(BSON("x" << 3), prefix); ASSERT(!spec.getIndexPrefix(BSON("x" << BSON("$gt" << 5)), &prefix).isOK()); ASSERT(!spec.getIndexPrefix(BSON("y" << 4), &prefix).isOK()); ASSERT(!spec.getIndexPrefix(BSONObj(), &prefix).isOK()); }
TEST_F(DocumentSourceMatchTest, MultipleMatchStagesShouldCombineIntoOne) { auto match1 = DocumentSourceMatch::create(BSON("a" << 1), getExpCtx()); auto match2 = DocumentSourceMatch::create(BSON("b" << 1), getExpCtx()); auto match3 = DocumentSourceMatch::create(BSON("c" << 1), getExpCtx()); Pipeline::SourceContainer container; // Check initial state ASSERT_BSONOBJ_EQ(match1->getQuery(), BSON("a" << 1)); ASSERT_BSONOBJ_EQ(match2->getQuery(), BSON("b" << 1)); ASSERT_BSONOBJ_EQ(match3->getQuery(), BSON("c" << 1)); container.push_back(match1); container.push_back(match2); match1->optimizeAt(container.begin(), &container); ASSERT_EQUALS(container.size(), 1U); ASSERT_BSONOBJ_EQ(match1->getQuery(), fromjson("{'$and': [{a:1}, {b:1}]}")); container.push_back(match3); match1->optimizeAt(container.begin(), &container); ASSERT_EQUALS(container.size(), 1U); ASSERT_BSONOBJ_EQ(match1->getQuery(), fromjson("{'$and': [{'$and': [{a:1}, {b:1}]}," "{c:1}]}")); }
void run() { setup(); insert(fromjson("{_id: 1, x: 10}")); insert(fromjson("{_id: 2, x: 8}")); insert(fromjson("{_id: 3, x: 3}")); std::unique_ptr<IndexScan> ixscan( createIndexScan(BSON("x" << 10), BSON("x" << 5), true, true, -1 /* reverse scan */)); // Expect to get key {'': 10} and then {'': 8}. WorkingSetMember* member = getNext(ixscan.get()); ASSERT_EQ(WorkingSetMember::RID_AND_IDX, member->getState()); ASSERT_BSONOBJ_EQ(member->keyData[0].keyData, BSON("" << 10)); member = getNext(ixscan.get()); ASSERT_EQ(WorkingSetMember::RID_AND_IDX, member->getState()); ASSERT_BSONOBJ_EQ(member->keyData[0].keyData, BSON("" << 8)); // Save state and insert an indexed doc. ixscan->saveState(); insert(fromjson("{_id: 4, x: 6}")); insert(fromjson("{_id: 5, x: 9}")); ixscan->restoreState(); // Ensure that we don't erroneously return {'': 9} or {'':3}. member = getNext(ixscan.get()); ASSERT_EQ(WorkingSetMember::RID_AND_IDX, member->getState()); ASSERT_BSONOBJ_EQ(member->keyData[0].keyData, BSON("" << 6)); WorkingSetID id; ASSERT_EQ(PlanStage::IS_EOF, ixscan->work(&id)); ASSERT(ixscan->isEOF()); }
void run() { auto indexCatalog = collection()->getIndexCatalog(); ASSERT_BSONOBJ_EQ(BSON("x" << 1), indexCatalog->fixIndexKey(BSON("x" << 1))); ASSERT_BSONOBJ_EQ(BSON("_id" << 1), indexCatalog->fixIndexKey(BSON("_id" << 1))); ASSERT_BSONOBJ_EQ(BSON("_id" << 1), indexCatalog->fixIndexKey(BSON("_id" << true))); }
// File Sanity check TEST(FTDCFileTest, TestFileBasicMetadata) { unittest::TempDir tempdir("metrics_testpath"); boost::filesystem::path p(tempdir.path()); p /= kTestFile; deleteFileIfNeeded(p); BSONObj doc1 = BSON("name" << "joe" << "key1" << 34 << "key2" << 45); BSONObj doc2 = BSON("name" << "joe" << "key3" << 34 << "key5" << 45); FTDCConfig config; FTDCFileWriter writer(&config); ASSERT_OK(writer.open(p)); ASSERT_OK(writer.writeMetadata(doc1, Date_t())); ASSERT_OK(writer.writeMetadata(doc2, Date_t())); writer.close(); FTDCFileReader reader; ASSERT_OK(reader.open(p)); ASSERT_OK(reader.hasNext()); BSONObj doc1a = std::get<1>(reader.next()); ASSERT_BSONOBJ_EQ(doc1, doc1a); ASSERT_OK(reader.hasNext()); BSONObj doc2a = std::get<1>(reader.next()); ASSERT_BSONOBJ_EQ(doc2, doc2a); auto sw = reader.hasNext(); ASSERT_OK(sw); ASSERT_EQUALS(sw.getValue(), false); }
TEST(UpdateZoneKeyRangeRequest, BasicValidMongosRemoveCommand) { auto requestStatus = UpdateZoneKeyRangeRequest::parseFromMongosCommand(fromjson(R"BSON({ updateZoneKeyRange: "foo.bar", min: { x: 1 }, max: { x: 100 }, zone: null })BSON")); ASSERT_OK(requestStatus.getStatus()); auto request = requestStatus.getValue(); ASSERT_EQ("foo.bar", request.getNS().ns()); ASSERT_BSONOBJ_EQ(BSON("x" << 1), request.getRange().getMin()); ASSERT_BSONOBJ_EQ(BSON("x" << 100), request.getRange().getMax()); ASSERT_TRUE(request.isRemove()); }
TEST(CollectionOptions, Validator) { CollectionOptions options; ASSERT_NOT_OK(options.parse(fromjson("{validator: 'notAnObject'}"))); ASSERT_OK(options.parse(fromjson("{validator: {a: 1}}"))); ASSERT_BSONOBJ_EQ(options.validator, fromjson("{a: 1}")); options.validator = fromjson("{b: 1}"); ASSERT_BSONOBJ_EQ(options.toBSON()["validator"].Obj(), fromjson("{b: 1}")); CollectionOptions defaultOptions; ASSERT_BSONOBJ_EQ(defaultOptions.validator, BSONObj()); ASSERT(!defaultOptions.toBSON()["validator"]); }
void SyncTailTest::_testSyncApplyInsertDocument(ErrorCodes::Error expectedError, const BSONObj* explicitOp) { const BSONObj op = explicitOp ? *explicitOp : BSON("op" << "i" << "ns" << "test.t"); bool applyOpCalled = false; SyncTail::ApplyOperationInLockFn applyOp = [&](OperationContext* opCtx, Database* db, const BSONObj& theOperation, bool inSteadyStateReplication, stdx::function<void()>) { applyOpCalled = true; ASSERT_TRUE(opCtx); ASSERT_TRUE(opCtx->lockState()->isDbLockedForMode("test", MODE_IX)); ASSERT_FALSE(opCtx->lockState()->isDbLockedForMode("test", MODE_X)); ASSERT_TRUE(opCtx->lockState()->isCollectionLockedForMode("test.t", MODE_IX)); ASSERT_FALSE(opCtx->writesAreReplicated()); ASSERT_TRUE(documentValidationDisabled(opCtx)); ASSERT_TRUE(db); ASSERT_BSONOBJ_EQ(op, theOperation); ASSERT_TRUE(inSteadyStateReplication); return Status::OK(); }; ASSERT_TRUE(_opCtx->writesAreReplicated()); ASSERT_FALSE(documentValidationDisabled(_opCtx.get())); ASSERT_EQ(SyncTail::syncApply(_opCtx.get(), op, true, applyOp, failedApplyCommand, _incOps), expectedError); ASSERT_EQ(applyOpCalled, expectedError == ErrorCodes::OK); }
TEST(CollectionOptions, CollationFieldParsesCorrectly) { CollectionOptions options; ASSERT_OK(options.parse(fromjson("{collation: {locale: 'en'}}"))); ASSERT_BSONOBJ_EQ(options.collation, fromjson("{locale: 'en'}")); ASSERT_TRUE(options.isValid()); ASSERT_OK(options.validate()); }
TEST(SortKeyGeneratorTest, CompoundPatternLeadingFieldIsArray) { auto sortKeyGen = stdx::make_unique<SortKeyGenerator>(BSON("c" << 1 << "b" << 1), nullptr); auto sortKey = sortKeyGen->getSortKey( fromjson("{_id: 0, z: 'thing1', a: 99, c: [2, 4, 1], b: 16}"), nullptr); ASSERT_OK(sortKey.getStatus()); ASSERT_BSONOBJ_EQ(sortKey.getValue(), BSON("" << 1 << "" << 16)); }
TEST(SortKeyGeneratorTest, CompoundSortPatternWithDottedPath) { auto sortKeyGen = stdx::make_unique<SortKeyGenerator>(BSON("c.a" << 1 << "b" << 1), nullptr); auto sortKey = sortKeyGen->getSortKey(fromjson("{_id: 0, z: 'thing1', a: 99, c: {a: 4}, b: 16}"), nullptr); ASSERT_OK(sortKey.getStatus()); ASSERT_BSONOBJ_EQ(sortKey.getValue(), BSON("" << 4 << "" << 16)); }
TEST(SortKeyGeneratorTest, SortKeyGenerationForArraysRespectsCompoundOrdering) { auto sortKeyGen = stdx::make_unique<SortKeyGenerator>(BSON("a.b" << 1 << "a.c" << -1), nullptr); auto sortKey = sortKeyGen->getSortKey( fromjson("{_id: 0, a: [{b: 1, c: 0}, {b: 0, c: 3}, {b: 0, c: 1}]}"), nullptr); ASSERT_OK(sortKey.getStatus()); ASSERT_BSONOBJ_EQ(sortKey.getValue(), BSON("" << 0 << "" << 3)); }
TEST(SortKeyGeneratorTest, CollatorHasNoEffectWhenExtractingNonStringSortKey) { CollatorInterfaceMock collator(CollatorInterfaceMock::MockType::kReverseString); auto sortKeyGen = stdx::make_unique<SortKeyGenerator>(BSON("a" << 1), &collator); auto sortKey = sortKeyGen->getSortKey(fromjson("{_id: 0, z: 10, a: 6, b: 16}"), nullptr); ASSERT_OK(sortKey.getStatus()); ASSERT_BSONOBJ_EQ(sortKey.getValue(), BSON("" << 6)); }
TEST(QueryRequestTest, ConvertToAggregationWithPipeline) { QueryRequest qr(testns); qr.setFilter(BSON("x" << 1)); qr.setSort(BSON("y" << -1)); qr.setLimit(3); qr.setSkip(7); qr.setProj(BSON("z" << 0)); auto agg = qr.asAggregationCommand(); ASSERT_OK(agg); auto ar = AggregationRequest::parseFromBSON(testns, agg.getValue()); ASSERT_OK(ar.getStatus()); ASSERT(!ar.getValue().getExplain()); ASSERT_EQ(ar.getValue().getBatchSize(), AggregationRequest::kDefaultBatchSize); ASSERT_EQ(ar.getValue().getNamespaceString(), testns); ASSERT_BSONOBJ_EQ(ar.getValue().getCollation(), BSONObj()); std::vector<BSONObj> expectedPipeline{BSON("$match" << BSON("x" << 1)), BSON("$sort" << BSON("y" << -1)), BSON("$skip" << 7), BSON("$limit" << 3), BSON("$project" << BSON("z" << 0))}; ASSERT(std::equal(expectedPipeline.begin(), expectedPipeline.end(), ar.getValue().getPipeline().begin(), SimpleBSONObjComparator::kInstance.makeEqualTo())); }
TEST(QueryRequestTest, AsFindCommandWithUuidNoAvailableNamespace) { BSONObj cmdObj = fromjson("{find: { \"$binary\" : \"ASNFZ4mrze/ty6mHZUMhAQ==\", \"$type\" : \"04\" }}"); QueryRequest qr(NamespaceStringOrUUID( "test", UUID::parse("01234567-89ab-cdef-edcb-a98765432101").getValue())); ASSERT_BSONOBJ_EQ(cmdObj, qr.asFindCommandWithUuid()); }
TEST_F(OplogBufferCollectionTest, extractEmbeddedOplogDocumentChangesIdToTimestamp) { auto nss = makeNamespace(_agent); OplogBufferCollection oplogBuffer(_storageInterface, nss); const BSONObj expectedOp = makeOplogEntry(1); BSONObj originalOp = BSON("_id" << Timestamp(1, 1) << "entry" << expectedOp); ASSERT_BSONOBJ_EQ(expectedOp, OplogBufferCollection::extractEmbeddedOplogDocument(originalOp)); }
TEST(SortKeyGeneratorTest, ExtractStringKeyNonCompoundNonNested) { auto sortKeyGen = stdx::make_unique<SortKeyGenerator>(BSON("a" << 1), nullptr); auto sortKey = sortKeyGen->getSortKey(fromjson("{_id: 0, z: 'thing1', a: 'thing2', b: 16}"), nullptr); ASSERT_OK(sortKey.getStatus()); ASSERT_BSONOBJ_EQ(sortKey.getValue(), BSON("" << "thing2")); }
TEST(Commands, appendCommandStatusOK) { BSONObjBuilder actualResult; CommandHelpers::appendCommandStatus(actualResult, Status::OK()); BSONObjBuilder expectedResult; expectedResult.append("ok", 1.0); ASSERT_BSONOBJ_EQ(actualResult.obj(), expectedResult.obj()); }
TEST(SortKeyGeneratorTest, ExtractStringSortKeyWithCollatorUsesComparisonKey) { CollatorInterfaceMock collator(CollatorInterfaceMock::MockType::kReverseString); auto sortKeyGen = stdx::make_unique<SortKeyGenerator>(BSON("a" << 1), &collator); auto sortKey = sortKeyGen->getSortKey(fromjson("{_id: 0, z: 'thing1', a: 'thing2', b: 16}"), nullptr); ASSERT_OK(sortKey.getStatus()); ASSERT_BSONOBJ_EQ(sortKey.getValue(), BSON("" << "2gniht")); }
TEST_F(OplogBufferCollectionTest, addIdToDocumentChangesTimestampToId) { auto nss = makeNamespace(_agent); OplogBufferCollection oplogBuffer(_storageInterface, nss); const BSONObj originalOp = makeOplogEntry(1); BSONObj expectedOp = BSON("_id" << Timestamp(1, 1) << "entry" << originalOp); auto testOpPair = OplogBufferCollection::addIdToDocument(originalOp); ASSERT_BSONOBJ_EQ(expectedOp, testOpPair.first); ASSERT_EQUALS(Timestamp(1, 1), testOpPair.second); }
TEST(QueryRequestTest, ConvertToAggregationWithHintSucceeds) { QueryRequest qr(testns); qr.setHint(fromjson("{a_1: -1}")); const auto aggCmd = qr.asAggregationCommand(); ASSERT_OK(aggCmd); auto ar = AggregationRequest::parseFromBSON(testns, aggCmd.getValue()); ASSERT_OK(ar.getStatus()); ASSERT_BSONOBJ_EQ(qr.getHint(), ar.getValue().getHint()); }
TEST(SortKeyGeneratorTest, EnsureSortKeyGenerationForArraysRespectsCollation) { CollatorInterfaceMock collator(CollatorInterfaceMock::MockType::kReverseString); auto sortKeyGen = stdx::make_unique<SortKeyGenerator>(BSON("a" << 1), &collator); auto sortKey = sortKeyGen->getSortKey(fromjson("{_id: 0, a: ['aaz', 'zza', 'yya', 'zzb']}"), nullptr); ASSERT_OK(sortKey.getStatus()); ASSERT_BSONOBJ_EQ(sortKey.getValue(), BSON("" << "ayy")); }
TEST(QueryRequestTest, ParseFromLegacyStringMetaOpComment) { BSONObj queryObj = fromjson( "{$query: {a: 1}," "$comment: 'ParseFromLegacyStringMetaOpComment'}"); const NamespaceString nss("test.testns"); unique_ptr<QueryRequest> qr( assertGet(QueryRequest::fromLegacyQuery(nss, queryObj, BSONObj(), 0, 0, 0))); ASSERT_EQ(qr->getComment(), "ParseFromLegacyStringMetaOpComment"); ASSERT_BSONOBJ_EQ(qr->getFilter(), fromjson("{a: 1}")); }
TEST(QueryRequestTest, ParseFromCommandLimitIsZero) { BSONObj cmdObj = fromjson( "{find: 'testns'," "limit: 0," "filter: {a: 3}}"); const NamespaceString nss("test.testns"); bool isExplain = false; unique_ptr<QueryRequest> qr( assertGet(QueryRequest::makeFromFindCommand(nss, cmdObj, isExplain))); ASSERT_BSONOBJ_EQ(BSON("a" << 3), qr->getFilter()); ASSERT_FALSE(qr->getLimit()); }
TEST(QueryRequestTest, AllowTailableWithNaturalSort) { BSONObj cmdObj = fromjson( "{find: 'testns'," "tailable: true," "sort: {$natural: 1}}"); const NamespaceString nss("test.testns"); bool isExplain = false; auto result = QueryRequest::makeFromFindCommand(nss, cmdObj, isExplain); ASSERT_OK(result.getStatus()); ASSERT_TRUE(result.getValue()->isTailable()); ASSERT_BSONOBJ_EQ(result.getValue()->getSort(), BSON("$natural" << 1)); }
TEST(QueryRequestTest, ParseFromLegacyObjMetaOpComment) { BSONObj queryObj = fromjson( "{$query: {a: 1}," "$comment: {b: 2, c: {d: 'ParseFromLegacyObjMetaOpComment'}}}"); const NamespaceString nss("test.testns"); unique_ptr<QueryRequest> qr( assertGet(QueryRequest::fromLegacyQuery(nss, queryObj, BSONObj(), 0, 0, 0))); // Ensure that legacy comment meta-operator is parsed to a string comment ASSERT_EQ(qr->getComment(), "{ b: 2, c: { d: \"ParseFromLegacyObjMetaOpComment\" } }"); ASSERT_BSONOBJ_EQ(qr->getFilter(), fromjson("{a: 1}")); }
TEST(QueryRequestTest, ParseFromLegacyQuery) { const auto kSkip = 1; const auto kNToReturn = 2; BSONObj queryObj = fromjson(R"({ query: {query: 1}, orderby: {sort: 1}, $hint: {hint: 1}, $explain: false, $min: {x: 'min'}, $max: {x: 'max'} })"); const NamespaceString nss("test.testns"); unique_ptr<QueryRequest> qr(assertGet(QueryRequest::fromLegacyQuery( nss, queryObj, BSON("proj" << 1), kSkip, kNToReturn, QueryOption_Exhaust))); ASSERT_EQ(qr->nss(), nss); ASSERT_BSONOBJ_EQ(qr->getFilter(), fromjson("{query: 1}")); ASSERT_BSONOBJ_EQ(qr->getProj(), fromjson("{proj: 1}")); ASSERT_BSONOBJ_EQ(qr->getSort(), fromjson("{sort: 1}")); ASSERT_BSONOBJ_EQ(qr->getHint(), fromjson("{hint: 1}")); ASSERT_BSONOBJ_EQ(qr->getMin(), fromjson("{x: 'min'}")); ASSERT_BSONOBJ_EQ(qr->getMax(), fromjson("{x: 'max'}")); ASSERT_EQ(qr->getSkip(), boost::optional<long long>(kSkip)); ASSERT_EQ(qr->getNToReturn(), boost::optional<long long>(kNToReturn)); ASSERT_EQ(qr->wantMore(), true); ASSERT_EQ(qr->isExplain(), false); ASSERT_EQ(qr->isSlaveOk(), false); ASSERT_EQ(qr->isOplogReplay(), false); ASSERT_EQ(qr->isNoCursorTimeout(), false); ASSERT_EQ(qr->isTailable(), false); ASSERT_EQ(qr->isExhaust(), true); ASSERT_EQ(qr->isAllowPartialResults(), false); ASSERT_EQ(qr->getOptions(), QueryOption_Exhaust); }
TEST(QueryRequestTest, ConvertToAggregationSucceeds) { QueryRequest qr(testns); auto agg = qr.asAggregationCommand(); ASSERT_OK(agg); auto ar = AggregationRequest::parseFromBSON(testns, agg.getValue()); ASSERT_OK(ar.getStatus()); ASSERT(!ar.getValue().getExplain()); ASSERT(ar.getValue().getPipeline().empty()); ASSERT_EQ(ar.getValue().getBatchSize(), AggregationRequest::kDefaultBatchSize); ASSERT_EQ(ar.getValue().getNamespaceString(), testns); ASSERT_BSONOBJ_EQ(ar.getValue().getCollation(), BSONObj()); }
TEST(QueryRequestTest, ConvertToAggregationOmitsExplain) { QueryRequest qr(testns); qr.setExplain(true); auto agg = qr.asAggregationCommand(); ASSERT_OK(agg); auto ar = AggregationRequest::parseFromBSON(testns, agg.getValue()); ASSERT_OK(ar.getStatus()); ASSERT_FALSE(ar.getValue().getExplain()); ASSERT(ar.getValue().getPipeline().empty()); ASSERT_EQ(ar.getValue().getNamespaceString(), testns); ASSERT_BSONOBJ_EQ(ar.getValue().getCollation(), BSONObj()); }