virtual void doWork()
 {
     IndexReaderPtr r = IndexReader::open(directory, true);
     if (r->numDocs() != 100)
         BOOST_FAIL("num docs failure");
     r->close();
 }
TEST_F(IndexWriterReaderTest, testAddIndexesAndDoDeletesThreads) {
    int32_t numIter = 5;
    int32_t numDirs = 3;

    DirectoryPtr mainDir = newLucene<MockRAMDirectory>();
    IndexWriterPtr mainWriter = newLucene<IndexWriter>(mainDir, newLucene<WhitespaceAnalyzer>(), IndexWriter::MaxFieldLengthLIMITED);
    AddDirectoriesThreadsPtr addDirThreads = newLucene<AddDirectoriesThreads>(numIter, mainWriter);
    addDirThreads->launchThreads(numDirs);
    addDirThreads->joinThreads();

    EXPECT_EQ(addDirThreads->count->intValue(), addDirThreads->mainWriter->numDocs());

    addDirThreads->close(true);

    EXPECT_TRUE(addDirThreads->failures.empty());

    checkIndex(mainDir);

    IndexReaderPtr reader = IndexReader::open(mainDir, true);
    EXPECT_EQ(addDirThreads->count->intValue(), reader->numDocs());
    reader->close();

    addDirThreads->closeDir();
    mainDir->close();
}
static void verifyNumDocs(DirectoryPtr dir, int32_t numDocs)
{
    IndexReaderPtr reader = IndexReader::open(dir, true);
    BOOST_CHECK_EQUAL(reader->maxDoc(), numDocs);
    BOOST_CHECK_EQUAL(reader->numDocs(), numDocs);
    reader->close();
}
// Run one indexer and 2 searchers against single index as stress test.
static void runTest(DirectoryPtr directory)
{
    Collection<TimedThreadPtr> threads(Collection<TimedThreadPtr>::newInstance(4));
    AnalyzerPtr analyzer = newLucene<SimpleAnalyzer>();
    
    IndexWriterPtr writer = newLucene<MockIndexWriter>(directory, analyzer, true, IndexWriter::MaxFieldLengthUNLIMITED);
    
    writer->setMaxBufferedDocs(7);
    writer->setMergeFactor(3);
    
    // Establish a base index of 100 docs
    for (int32_t i = 0; i < 100; ++i)
    {
        DocumentPtr d = newLucene<Document>();
        d->add(newLucene<Field>(L"id", StringUtils::toString(i), Field::STORE_YES, Field::INDEX_NOT_ANALYZED));
        d->add(newLucene<Field>(L"contents", intToEnglish(i), Field::STORE_NO, Field::INDEX_ANALYZED));
        if ((i - 1) % 7 == 0)
            writer->commit();
        writer->addDocument(d);
    }
    writer->commit();
    
    IndexReaderPtr r = IndexReader::open(directory, true);
    BOOST_CHECK_EQUAL(100, r->numDocs());
    r->close();

    IndexerThreadPtr indexerThread1 = newLucene<IndexerThread>(writer);
    threads[0] = indexerThread1;
    indexerThread1->start();

    IndexerThreadPtr indexerThread2 = newLucene<IndexerThread>(writer);
    threads[1] = indexerThread2;
    indexerThread2->start();

    SearcherThreadPtr searcherThread1 = newLucene<SearcherThread>(directory);
    threads[2] = searcherThread1;
    searcherThread1->start();

    SearcherThreadPtr searcherThread2 = newLucene<SearcherThread>(directory);
    threads[3] = searcherThread2;
    searcherThread2->start();
    
    indexerThread1->join();
    indexerThread2->join();
    searcherThread1->join();
    searcherThread2->join();
    
    writer->close();

    BOOST_CHECK(!indexerThread1->failed); // hit unexpected exception in indexer1
    BOOST_CHECK(!indexerThread2->failed); // hit unexpected exception in indexer2
    BOOST_CHECK(!searcherThread1->failed); // hit unexpected exception in search1
    BOOST_CHECK(!searcherThread2->failed); // hit unexpected exception in search2
}
    void runTest(DirectoryPtr directory, MergeSchedulerPtr merger)
    {
        IndexWriterPtr writer = newLucene<IndexWriter>(directory, analyzer, true, IndexWriter::MaxFieldLengthUNLIMITED);
        writer->setMaxBufferedDocs(2);
        if (merger)
            writer->setMergeScheduler(merger);

        for (int32_t iter = 0; iter < NUM_ITER; ++iter)
        {
            int32_t iterFinal = iter;

            writer->setMergeFactor(1000);

            for (int32_t i = 0; i < 200; ++i)
            {
                DocumentPtr d = newLucene<Document>();
                d->add(newLucene<Field>(L"id", StringUtils::toString(i), Field::STORE_YES, Field::INDEX_NOT_ANALYZED));
                d->add(newLucene<Field>(L"contents", intToEnglish(i), Field::STORE_NO, Field::INDEX_ANALYZED));
                writer->addDocument(d);
            }

            writer->setMergeFactor(4);

            Collection<LuceneThreadPtr> threads = Collection<LuceneThreadPtr>::newInstance(NUM_THREADS);

            for (int32_t i = 0; i < NUM_THREADS; ++i)
            {
                int32_t iFinal = i;
                IndexWriterPtr writerFinal = writer;
                threads[i] = newLucene<OptimizeThread>(NUM_ITER2, iterFinal, iFinal, writer, writerFinal);
            }

            for (int32_t i = 0; i < NUM_THREADS; ++i)
                threads[i]->start();
            for (int32_t i = 0; i < NUM_THREADS; ++i)
                threads[i]->join();

            int32_t expectedDocCount = (int32_t)((1 + iter) * (200 + 8 * NUM_ITER2 * (int32_t)(((double)NUM_THREADS / 2.0) * (double)(1 + NUM_THREADS))));

            BOOST_CHECK_EQUAL(expectedDocCount, writer->maxDoc());

            writer->close();
            writer = newLucene<IndexWriter>(directory, analyzer, false, IndexWriter::MaxFieldLengthUNLIMITED);
            writer->setMaxBufferedDocs(2);

            IndexReaderPtr reader = IndexReader::open(directory, true);
            BOOST_CHECK(reader->isOptimized());
            BOOST_CHECK_EQUAL(expectedDocCount, reader->numDocs());
            reader->close();
        }
        writer->close();
    }
    void testTermVectors()
    {
        int32_t numDocs = reader->numDocs();
        int64_t start = 0;
        for (int32_t docId = 0; docId < numDocs; ++docId)
        {
            start = MiscUtils::currentTimeMillis();
            Collection<TermFreqVectorPtr> vectors = reader->getTermFreqVectors(docId);
            timeElapsed += MiscUtils::currentTimeMillis() - start;

            // verify vectors result
            verifyVectors(vectors, docId);

            start = MiscUtils::currentTimeMillis();
            TermFreqVectorPtr vector = reader->getTermFreqVector(docId, L"field");
            timeElapsed += MiscUtils::currentTimeMillis() - start;

            vectors = newCollection<TermFreqVectorPtr>(vector);

            verifyVectors(vectors, docId);
        }
    }
    void doTestUndeleteAll()
    {
        sis->read(dir);
        IndexReaderPtr reader = openReader();
        BOOST_CHECK(reader);
        BOOST_CHECK_EQUAL(2, reader->numDocs());
        reader->deleteDocument(0);
        BOOST_CHECK_EQUAL(1, reader->numDocs());
        reader->undeleteAll();
        BOOST_CHECK_EQUAL(2, reader->numDocs());

        // Ensure undeleteAll survives commit/close/reopen
        reader->commit(MapStringString());
        reader->close();
        
        if (boost::dynamic_pointer_cast<MultiReader>(reader))
        {
            // MultiReader does not "own" the directory so it does not write the changes to sis on commit
            sis->commit(dir);
        }
        
        sis->read(dir);
        reader = openReader();
        BOOST_CHECK_EQUAL(2, reader->numDocs());

        reader->deleteDocument(0);
        BOOST_CHECK_EQUAL(1, reader->numDocs());
        reader->commit(MapStringString());
        reader->close();
        
        if (boost::dynamic_pointer_cast<MultiReader>(reader))
        {
            // MultiReader does not "own" the directory so it does not write the changes to sis on commit
            sis->commit(dir);
        }
        
        sis->read(dir);
        reader = openReader();
        BOOST_CHECK_EQUAL(1, reader->numDocs());
    }