static void setUpDirs(DirectoryPtr dir, DirectoryPtr aux) { IndexWriterPtr writer; writer = newWriter(dir, true); writer->setMaxBufferedDocs(1000); // add 1000 documents in 1 segment addDocs(writer, 1000); BOOST_CHECK_EQUAL(1000, writer->maxDoc()); BOOST_CHECK_EQUAL(1, writer->getSegmentCount()); writer->close(); writer = newWriter(aux, true); writer->setUseCompoundFile(false); // use one without a compound file writer->setMaxBufferedDocs(100); writer->setMergeFactor(10); // add 30 documents in 3 segments for (int32_t i = 0; i < 3; ++i) { addDocs(writer, 10); writer->close(); writer = newWriter(aux, false); writer->setUseCompoundFile(false); // use one without a compound file writer->setMaxBufferedDocs(100); writer->setMergeFactor(10); } BOOST_CHECK_EQUAL(30, writer->maxDoc()); BOOST_CHECK_EQUAL(3, writer->getSegmentCount()); writer->close(); }
void runTest(DirectoryPtr directory, MergeSchedulerPtr merger) { IndexWriterPtr writer = newLucene<IndexWriter>(directory, analyzer, true, IndexWriter::MaxFieldLengthUNLIMITED); writer->setMaxBufferedDocs(2); if (merger) writer->setMergeScheduler(merger); for (int32_t iter = 0; iter < NUM_ITER; ++iter) { int32_t iterFinal = iter; writer->setMergeFactor(1000); for (int32_t i = 0; i < 200; ++i) { DocumentPtr d = newLucene<Document>(); d->add(newLucene<Field>(L"id", StringUtils::toString(i), Field::STORE_YES, Field::INDEX_NOT_ANALYZED)); d->add(newLucene<Field>(L"contents", intToEnglish(i), Field::STORE_NO, Field::INDEX_ANALYZED)); writer->addDocument(d); } writer->setMergeFactor(4); Collection<LuceneThreadPtr> threads = Collection<LuceneThreadPtr>::newInstance(NUM_THREADS); for (int32_t i = 0; i < NUM_THREADS; ++i) { int32_t iFinal = i; IndexWriterPtr writerFinal = writer; threads[i] = newLucene<OptimizeThread>(NUM_ITER2, iterFinal, iFinal, writer, writerFinal); } for (int32_t i = 0; i < NUM_THREADS; ++i) threads[i]->start(); for (int32_t i = 0; i < NUM_THREADS; ++i) threads[i]->join(); int32_t expectedDocCount = (int32_t)((1 + iter) * (200 + 8 * NUM_ITER2 * (int32_t)(((double)NUM_THREADS / 2.0) * (double)(1 + NUM_THREADS)))); BOOST_CHECK_EQUAL(expectedDocCount, writer->maxDoc()); writer->close(); writer = newLucene<IndexWriter>(directory, analyzer, false, IndexWriter::MaxFieldLengthUNLIMITED); writer->setMaxBufferedDocs(2); IndexReaderPtr reader = IndexReader::open(directory, true); BOOST_CHECK(reader->isOptimized()); BOOST_CHECK_EQUAL(expectedDocCount, reader->numDocs()); reader->close(); } writer->close(); }