int main(int argc, char **argv) { cout << "Test begins." << endl; cout << "-------------------------------------------------------" << endl; string index_dir = getenv("index_dir"); string init_data_file = index_dir + "/init_data"; string update_data_file = index_dir + "/update_data"; cout << "Read init data from " << init_data_file << endl; cout << "Save index to " << index_dir << endl; cout << "Read update data from " << update_data_file << endl; buildIndex(init_data_file, index_dir); IndexMetaData *indexMetaData = new IndexMetaData( new CacheManager(), mergeEveryNSeconds, mergeEveryMWrites, updateHistogramEveryPMerges, updateHistogramEveryQWrites, index_dir); Indexer *index = Indexer::load(indexMetaData); index->createAndStartMergeThreadLoop(); cout << "Index loaded." << endl; updateIndexAndLookupRecord(update_data_file, index); delete index; delete indexMetaData; return 0; }
/** * @brief Adds Jacobian @f$ \frac{\partial F}{\partial \dot{y}} @f$ to bead mobile phase rows of system Jacobian * @details Actually adds @f$ \alpha \frac{\partial F}{\partial \dot{y}} @f$, which is useful * for constructing the linear system in BDF time discretization. * @param [in,out] jac On entry, RowIterator of the particle block pointing to the beginning of a bead shell; * on exit, the iterator points to the end of the mobile phase * @param [in] idxr Indexer * @param [in] alpha Value of \f$ \alpha \f$ (arises from BDF time discretization) * @param [in] timeFactor Factor which is premultiplied to the time derivatives originating from time transformation */ void GeneralRateModel::addMobilePhaseTimeDerivativeToJacobianParticleBlock(linalg::FactorizableBandMatrix::RowIterator& jac, const Indexer& idxr, double alpha, double timeFactor) { // Compute total factor alpha *= timeFactor; // Mobile phase for (int comp = 0; comp < static_cast<int>(_disc.nComp); ++comp, ++jac) { // Add derivative with respect to dc_p / dt to Jacobian jac[0] += alpha; const double invBetaP = (1.0 - static_cast<double>(_parPorosity)) / (static_cast<double>(_poreAccessFactor[comp]) * static_cast<double>(_parPorosity)); // Add derivative with respect to dq / dt to Jacobian for (int i = 0; i < static_cast<int>(_disc.nBound[comp]); ++i) { // Index explanation: // -comp -> go back to beginning of liquid phase // + strideParLiquid() skip to solid phase // + offsetBoundComp() jump to component (skips all bound states of previous components) // + i go to current bound state jac[idxr.strideParLiquid() - comp + idxr.offsetBoundComp(comp) + i] += alpha * invBetaP; } } }
BMessage* Copy::Do(PDocument *doc, BMessage *settings) { BMessage *clip = NULL; BMessage *node = NULL; BMessage *from = NULL; BMessage *to = NULL; bool fselect = false; bool tselect = false; BMessage *copyMessage = new BMessage(); BList *selected = doc->GetSelected(); BList *allConnections = doc->GetAllConnections(); int32 i = 0; Indexer *indexer = new Indexer(doc); if (doc->Lock()) { for (i=0;i<selected->CountItems();i++) { if (node=(BMessage *)selected->ItemAt(i)) { if (node->what != P_C_CONNECTION_TYPE) copyMessage->AddMessage("node",indexer->IndexNode(node)); } } for (i=0;i<allConnections->CountItems();i++ ) { node = (BMessage *)allConnections->ItemAt(i); if ( (node->FindPointer("From",(void **)&from) == B_OK) && (node->FindPointer("To",(void **)&to) == B_OK) ) { if ((from->FindBool("selected",&fselect)==B_OK) && (to->FindBool("selected",&tselect) == B_OK) ) { if (fselect && tselect) copyMessage->AddMessage("node",indexer->IndexConnection(node,true)); else if (fselect || tselect) copyMessage->AddMessage("node",indexer->IndexConnection(node)); } } } doc->Unlock(); } if (be_clipboard->Lock()) { be_clipboard->Clear(); if (clip = be_clipboard->Data()) { clip->AddData("application/x-vnd.projectconceptor-document", B_MIME_TYPE, copyMessage, sizeof(copyMessage)); clip->AddMessage("test",copyMessage); PRINT_OBJECT(*clip); be_clipboard->Commit(); } be_clipboard->Unlock(); } settings = PCommand::Do(doc,settings); return settings; }
status_t PDocument::Archive(BMessage* archive, bool deep) const { TRACE(); BLooper::Archive(archive, deep); Indexer *indexer = new Indexer((PDocument *)this); int32 i = 0; BMessage *commandManage = new BMessage(); BMessage *tmpNode = NULL; BMessage *allNodesMessage = new BMessage(); BMessage *allConnectionsMessage = new BMessage(); BMessage *selectedMessage = new BMessage(); archive->AddMessage("PDocument::documentSetting",documentSetting); //save all Nodes for (i=0; i<allNodes->CountItems();i++) { tmpNode=(BMessage *)allNodes->ItemAt(i); allNodesMessage->AddMessage("node",indexer->IndexNode(tmpNode)); } archive->AddMessage("PDocument::allNodes",allNodesMessage); //save all Connections for (i=0; i<allConnections->CountItems();i++) { tmpNode=(BMessage *)allConnections->ItemAt(i); allConnectionsMessage->AddMessage("node",indexer->IndexConnection(tmpNode)); } archive->AddMessage("PDocument::allConnections",allConnectionsMessage); //save the selected List for (i=0; i<selected->CountItems();i++) { selectedMessage->AddPointer("node",selected->ItemAt(i)); } archive->AddMessage("PDocument::selected",selectedMessage); //save all Command related Stuff like Undo/Makor // commandManager->Archive(commandManage); for (i=0;i<(commandManager->GetUndoList())->CountItems();i++) { commandManage->AddMessage("undo",indexer->IndexMacroCommand((BMessage *)(commandManager->GetUndoList())->ItemAt(i))); } for (i=0;i<(commandManager->GetMacroList())->CountItems();i++) { commandManage->AddMessage("macro",(BMessage *)(commandManager->GetMacroList())->ItemAt(i)); } commandManage->AddInt32("undoStatus",commandManager->GetUndoIndex()); archive->AddMessage("PDocument::commandManager", commandManage); delete indexer; delete commandManage; //delete tmpNode; delete allNodesMessage; delete allConnectionsMessage; delete selectedMessage; return B_OK; }
BMessage* Paste::Do(PDocument *doc, BMessage *settings) { BMessage *clip = NULL; BMessage *node = new BMessage(); BMessage *deIndexedNode = NULL; BMessage *connect = NULL; bool connectselect = false; BMessage *copyMessage = new BMessage; int32 messagelen = 0; int32 i = 0; Indexer *indexer = new Indexer(doc); if (be_clipboard->Lock()) { if (clip = be_clipboard->Data()) { // clip->FindData("application/x-vnd.projectconceptor-document", B_MIME_TYPE, (const void **)©Message, &messagelen); clip->FindMessage("test",copyMessage); copyMessage->PrintToStream(); } be_clipboard->Unlock(); } if (copyMessage) { BMessage *inserter = new BMessage(P_C_EXECUTE_COMMAND); BMessage *select = new BMessage(P_C_EXECUTE_COMMAND); inserter->AddString("Command::Name","Insert"); select->AddString("Command::Name","Select"); while (copyMessage->FindMessage("node",i,node) == B_OK) { if (node->what == P_C_CONNECTION_TYPE) deIndexedNode = indexer->DeIndexConnection(node); else { deIndexedNode = indexer->RegisterDeIndexNode(node); //only select nodes.. because es the copy and paste funktion with selected nodes dosent work proper select->AddPointer("node",deIndexedNode); } inserter->AddPointer("node",deIndexedNode); i++; node = new BMessage(); } i=0; while (inserter->FindPointer("node",i,(void **)&node) == B_OK) { if (node->what != P_C_CONNECTION_TYPE) indexer->DeIndexNode(node); i++; } inserter->AddMessage("PCommand::subPCommand",select); PRINT_OBJECT(*inserter); (new BMessenger(NULL,doc))->SendMessage(inserter); } return settings; }
void test3() { addRecords(); /// Test the Trie SynonymContainer *syn = SynonymContainer::getInstance("", SYNONYM_DONOT_KEEP_ORIGIN); syn->init(); Analyzer *analyzer = new Analyzer(NULL, NULL, NULL, syn, ""); unsigned mergeEveryNSeconds = 3; unsigned mergeEveryMWrites = 5; unsigned updateHistogramEveryPMerges = 1; unsigned updateHistogramEveryQWrites = 5; string INDEX_DIR = "."; IndexMetaData *indexMetaData = new IndexMetaData( GlobalCache::create(1000,1000), mergeEveryNSeconds, mergeEveryMWrites, updateHistogramEveryPMerges, updateHistogramEveryQWrites, INDEX_DIR); Indexer *indexer = Indexer::load(indexMetaData); //index->print_Index(); Record *record = new Record(indexer->getSchema()); record->setPrimaryKey(1999); record->setSearchableAttributeValue(0, "steve jobs"); record->setSearchableAttributeValue(1, "stanford speech"); record->setRecordBoost(90); indexer->addRecord(record, analyzer); indexer->merge_ForTesting(); /* // create an index searcher IndexSearcher *indexSearcher = IndexSearcher::create(indexer); Analyzer *analyzer = indexer->getAnalyzer(); indexer->print_Index(); ASSERT ( ping(analyzer, indexSearcher, "tom" , 1 , 1001) == true); ASSERT ( ping(analyzer, indexSearcher, "jimi" , 1 , 1008) == true); ASSERT ( ping(analyzer, indexSearcher, "smith" , 1 , 1001) == true); ASSERT ( ping(analyzer, indexSearcher, "jobs" , 1 , 1999) == true); (void)analyzer; delete indexSearcher;*/ delete indexer; delete analyzer; }
void PCommandManager::PlayMacro(BMessage *makro) { int32 i = 0; BMessage *message = new BMessage(); Indexer *playDeIndexer = new Indexer(doc); status_t err = B_OK; while ( (makro->FindMessage("Macro::Commmand", i,message) == B_OK) && (err==B_OK) ) { err = Execute(playDeIndexer->DeIndexCommand(message)); snooze(100000); i++; } }
int main() { Parser *parser = new Parser("base/"); parser->Process(); vector<Document *>* collection = parser->GetCollection(); Indexer *indexer = new Indexer(collection); indexer->Initialize(); indexer->Process(); indexer->Print(); indexer->WriteIndexFile(); delete indexer; delete parser; }
Indexer* Indexer::createIndex(QString path, int ordinal) { Indexer* indexer; QList<QStringList> indices = listIndices(path); QStringList info = indices[ordinal]; ImageFeatures* alg = ImageFeatures::factory(info[3]); alg->setParams(info[4]); indexer = factory(info[1], alg, path); indexer->setParams(info[2]); indexer->loadIndex(info[0]); return indexer; }
void I2C::init400k(void){ //todo:3 parameters for fast and a kiloHertz init(); clearHW(); #if I2C_TRACE stageTrace.rewind(); #endif //cr1 as apb.init sets it up is fine int apbRate = getClockRate(); dcb->cr2 = apbRate / 1000000; //keeps all interrupt enable stuff disabled //compute the two fast options, choose the highest int r1 = rate(apbRate, (25 * 400000)); //@36 = 4, 4*25 = 100 clocks = 360kHz int r0 = rate(apbRate, (3 * 400000)); //@36 = 30, 30*3 = 90 clocks = 400kHz if(25 * r1 > 3 * r0) { dcb->ccr = (3 << 14) /* fast 9:16*/ | r1; } else { dcb->ccr = (2 << 14) /* 400kHz 1:2*/ | r0; } //maximum clock risetime: 3/1000000 = 3e-9 in integers without overflowing dcb->riseTime = 1 + quanta(apbRate * 3, 10000000); //300ns: ~11 clocks at 36MHz apb clock. //alh: suspect the +1 given in the documents is due to them truncating rather than rounding up. //enable interrupts at NVIC: eventIrq.prepare(); //NB: simple enable() often generates an interrupt even when all interrupts are masked, dammit! (Busy was true) errorIrq.enable(); } /* init */
void AccessTreeViewListener::OnDropIndex(DesktopDragObject* drag_object, index_gid_t source, index_gid_t dest) { AccessModel* model = static_cast<AccessModel*>(m_treeview.GetTreeModel()); Indexer* indexer = g_m2_engine->GetIndexer(); if (dest == 0) dest = model->GetCategoryID(); else if (drag_object->GetInsertType() == DesktopDragObject::INSERT_BEFORE || drag_object->GetInsertType() == DesktopDragObject::INSERT_AFTER) dest = indexer->GetIndexById(dest)->GetParentId(); if (!CanDropIndex(source, dest)) return; // corner case, changing the order of two folders OpINT32Vector children; indexer->GetChildren(source, children, FALSE); if (children.Find(dest) != -1) { Index* index = indexer->GetIndexById(dest); while (index->GetParentId() != source) { index = indexer->GetIndexById(index->GetParentId()); } index->SetParentId(indexer->GetIndexById(source)->GetParentId()); } indexer->GetIndexById(source)->SetParentId(dest); }
void PDocument::PushToStream(BPositionIO *pushTo) { Indexer *indexer = new Indexer((PDocument *)this); BMessage *tmpNode = NULL; BMessage *commandManage = new BMessage(); BMessage *selectedMessage = new BMessage(); int i = 0; //**security check if the passed BPositionIO ok is documentSetting->Flatten(pushTo); for (i=0; i<allNodes->CountItems();i++) { tmpNode=(BMessage *)allNodes->ItemAt(i); BMessage *indexed = indexer->IndexNode(tmpNode); indexed->Flatten(pushTo); } for (i=0; i<allConnections->CountItems();i++) { tmpNode=(BMessage *)allConnections->ItemAt(i); BMessage *indexed = indexer->IndexConnection(tmpNode); indexed->Flatten(pushTo); } for (i=0; i<selected->CountItems();i++) { selectedMessage->AddPointer("node",selected->ItemAt(i)); } selectedMessage->Flatten(pushTo); for (i=0;i<(commandManager->GetMacroList())->CountItems();i++) { BMessage *macro =(BMessage *)(commandManager->GetMacroList()->ItemAt(i)); macro->Flatten(pushTo); } for (i=0;i<(commandManager->GetUndoList())->CountItems();i++) { BMessage *indexed = indexer->IndexMacroCommand((BMessage *)(commandManager->GetUndoList()->ItemAt(i))); indexed->Flatten(pushTo); } //**add the UndoIndex commandManage->AddInt32("undoStatus",commandManager->GetUndoIndex()); //add the commandManage commandManage->Flatten(pushTo); delete indexer; delete commandManage; delete selectedMessage; }
template <typename T> void GenericFactor<T>::absorveEvidence (const T& arg, unsigned obsIdx) { size_t idx = indexOf (arg); assert (idx != args_.size()); assert (obsIdx < ranges_[idx]); Params newps; newps.reserve (params_.size() / ranges_[idx]); Indexer indexer (ranges_); for (unsigned i = 0; i < obsIdx; ++i) { indexer.incrementDimension (idx); } while (indexer.valid()) { newps.push_back (params_[indexer]); indexer.incrementExceptDimension (idx); } params_ = newps; args_.erase (args_.begin() + idx); ranges_.erase (ranges_.begin() + idx); }
TEST(IndexerTest, testGetUserPostIdsMultipleUsers) { Indexer i; const string username1 = "val"; const string username2 = "kim"; const uint64_t postId1 = 666; const uint64_t postId2 = 777; { const TTopics topics = {"find", "secret", "on", "valbok.com"}; EXPECT_TRUE(i.index(topics, username1, postId1)); auto item = i.getTopicItem("secret"); auto postIds = item.userPostIds; EXPECT_EQ(1, postIds.size()); auto it = postIds.begin(); EXPECT_EQ(username1, it->first); EXPECT_EQ(1, it->second.size()); auto sit = it->second.begin(); EXPECT_EQ(postId1, *sit); } { const TTopics topics = {"open", "secret", "page"}; EXPECT_TRUE(i.index(topics, username2, postId2)); auto item = i.getTopicItem("secret"); auto postIds = item.userPostIds; EXPECT_EQ(2, postIds.size()); auto it = postIds.begin(); EXPECT_EQ(username2, it->first); EXPECT_EQ(1, it->second.size()); auto sit = it->second.begin(); EXPECT_EQ(postId2, *sit); ++it; EXPECT_EQ(username1, it->first); EXPECT_EQ(1, it->second.size()); sit = it->second.begin(); EXPECT_EQ(postId1, *sit); } }
void testLELASplicerVsIndexer(const char *file_name) { typedef uint16 modulus_type; typedef Modular<modulus_type> Ring; modulus_type modulus = MatrixUtil::loadF4Modulus(file_name); Ring R (modulus); Context<Ring> ctx (R); Indexer<uint32> indexer; std::ostream &report = commentator.report (Commentator::LEVEL_NORMAL, INTERNAL_DESCRIPTION); commentator.start("Loading matrix"); SparseMatrix<Ring::Element> A = MatrixUtil::loadF4Matrix(R, file_name); MatrixUtil::invertMatrixRows(A); //accomodate format to LELA's splicer commentator.stop(MSG_DONE); commentator.start("Constructing submatrices using Splicer", "USING LELA SPLICER"); LELA_GF_UTIL::spliceMatrix(R, A); commentator.stop(MSG_DONE, "USING LELA SPLICER"); report << endl; commentator.start("Constructing submatrices using Indexer", "USING INDEXER"); commentator.start("Constructing indexes"); indexer.processMatrix(A); commentator.stop(MSG_DONE); SparseMatrix<Ring::Element> sub_A (indexer.Npiv, indexer.Npiv), sub_B (indexer.Npiv, A.coldim () - indexer.Npiv), sub_C (A.rowdim () - indexer.Npiv, indexer.Npiv), sub_D (A.rowdim () - indexer.Npiv, A.coldim () - indexer.Npiv); commentator.start("Constructing sub matrices"); indexer.constructSubMatrices(A, sub_A, sub_B, sub_C, sub_D, true); commentator.stop(MSG_DONE); commentator.stop(MSG_DONE, "USING INDEXER"); }
void Serialize(string directoryName) { // Create a schema Schema *schema = Schema::create(LocationIndex); schema->setPrimaryKey("list_id"); // integer, by default not searchable schema->setSearchableAttribute("title", 2); // searchable text schema->setSearchableAttribute("address", 7); // searchable text // Create an analyzer Analyzer *analyzer = new Analyzer(NULL, NULL, NULL, NULL, ""); unsigned mergeEveryNSeconds = 3; unsigned mergeEveryMWrites = 5; unsigned updateHistogramEveryPMerges = 1; unsigned updateHistogramEveryQWrites = 5; CacheManager *cache = new CacheManager(134217728); IndexMetaData *indexMetaData = new IndexMetaData(cache, mergeEveryNSeconds, mergeEveryMWrites, updateHistogramEveryPMerges, updateHistogramEveryQWrites, directoryName); Indexer *indexer = Indexer::create(indexMetaData, analyzer, schema); readRecordsFromFile(indexer, schema, analyzer, directoryName+"/quadtree/1K"); boost::shared_ptr<QuadTreeRootNodeAndFreeLists> quadtree_ReadView; quadtree_ReadView = dynamic_cast<IndexReaderWriter *>(indexer)->getQuadTree_ReadView(); QuadTreeNode *qt = quadtree_ReadView->root; // serialize the index indexer->commit(); indexer->save(directoryName); delete indexer; delete indexMetaData; delete analyzer; delete schema; }
TEST(IndexerTest, testGetUserPostIds) { Indexer i; const string username = "******"; const uint64_t postId1 = 666; const uint64_t postId2 = 777; { const TTopics topics = {"please", "give", "me", "job"}; EXPECT_TRUE(i.index(topics, username, postId1)); EXPECT_FALSE(i.hasTopicItem("unknown")); EXPECT_TRUE(i.hasTopicItem("job")); auto item = i.getTopicItem("job"); EXPECT_EQ(1, item.postsCount); auto postIds = item.userPostIds; EXPECT_EQ(1, postIds.size()); auto it = postIds.begin(); EXPECT_EQ(username, it->first); EXPECT_EQ(1, it->second.size()); auto sit = it->second.begin(); EXPECT_EQ(postId1, *sit); } { const TTopics topics = {"waiting", "for", "job"}; EXPECT_TRUE(i.index(topics, username, postId2)); EXPECT_FALSE(i.hasTopicItem("unknown")); EXPECT_TRUE(i.hasTopicItem("job")); auto item = i.getTopicItem("job"); EXPECT_EQ(2, item.postsCount); auto postIds = item.userPostIds; EXPECT_EQ(1, postIds.size()); auto it = postIds.begin(); EXPECT_EQ(username, it->first); EXPECT_EQ(2, it->second.size()); auto sit = it->second.begin(); EXPECT_EQ(postId2, *sit); EXPECT_EQ(postId1, *(++sit)); } }
/* Article indexer methods */ void *Indexer::indexArticles(void *ptr) { pthread_setcanceltype(PTHREAD_CANCEL_DEFERRED, NULL); Indexer *self = (Indexer *)ptr; unsigned int indexedArticleCount = 0; indexerToken token; self->indexingPrelude(self->getIndexPath()); while (self->popFromToIndexQueue(token)) { self->index(token.url, token.accentedTitle, token.title, token.keywords, token.content, token.snippet, token.size, token.wordCount ); indexedArticleCount += 1; /* Make a hard-disk flush every 10.000 articles */ if (indexedArticleCount % 5000 == 0) { self->flush(); } /* Test if the thread should be cancelled */ pthread_testcancel(); } self->indexingPostlude(); usleep(100); self->articleIndexerRunning(false); pthread_exit(NULL); return NULL; }
/** * @brief Assembles a particle Jacobian block @f$ J_i @f$ (@f$ i > 0 @f$) of the time-discretized equations * @details The system \f[ \left( \frac{\partial F}{\partial y} + \alpha \frac{\partial F}{\partial \dot{y}} \right) x = b \f] * has to be solved. The system Jacobian of the original equations, * \f[ \frac{\partial F}{\partial y}, \f] * is already computed (by AD or manually in residualImpl() with @c wantJac = true). This function is responsible * for adding * \f[ \alpha \frac{\partial F}{\partial \dot{y}} \f] * to the system Jacobian, which yields the Jacobian of the time-discretized equations * \f[ F\left(t, y_0, \sum_{k=0}^N \alpha_k y_k \right) = 0 \f] * when a BDF method is used. The time integrator needs to solve this equation for @f$ y_0 @f$, which requires * the solution of the linear system mentioned above (@f$ \alpha_0 = \alpha @f$ given in @p alpha). * * @param [in] pblk Index of the particle block * @param [in] alpha Value of \f$ \alpha \f$ (arises from BDF time discretization) * @param [in] idxr Indexer * @param [in] timeFactor Factor which is premultiplied to the time derivatives originating from time transformation */ void GeneralRateModel::assembleDiscretizedJacobianParticleBlock(unsigned int pblk, double alpha, const Indexer& idxr, double timeFactor) { linalg::FactorizableBandMatrix& fbm = _jacPdisc[pblk]; const linalg::BandMatrix& bm = _jacP[pblk]; // Copy normal matrix over to factorizable matrix fbm.copyOver(bm); // Add time derivatives to particle shells linalg::FactorizableBandMatrix::RowIterator jac = fbm.row(0); for (unsigned int j = 0; j < _disc.nPar; ++j) { // Mobile phase (advances jac accordingly) addMobilePhaseTimeDerivativeToJacobianParticleBlock(jac, idxr, alpha, timeFactor); // Stationary phase _binding->jacobianAddDiscretized(alpha * timeFactor, jac); // Advance pointers over all bound states jac += idxr.strideParBound(); } }
TEST(IndexerTest, testGetPostTopic) { Indexer i; const string username = "******"; const uint64_t postId = 666; { const TTopics topics = {"secret", "still", "there"}; EXPECT_TRUE(i.index(topics, username, postId)); EXPECT_FALSE(i.hasPostTopics(0)); EXPECT_TRUE(i.hasPostTopics(postId)); auto set = i.getPostTopics(postId); EXPECT_EQ(3, set.size()); auto it = set.begin(); EXPECT_EQ("there", *it); EXPECT_EQ("still", *++it); EXPECT_EQ("secret", *++it); } { EXPECT_TRUE(i.index({"check_html"}, username, postId)); auto set = i.getPostTopics(postId); EXPECT_EQ(4, set.size()); auto it = set.begin(); EXPECT_EQ("check_html", *it); } }
int main(int agrc, char * argv[]){ Indexer indexer; indexer.readFiles(argv[1]); }
TEST(IndexerTest, testExtractTopics) { Indexer i; { vector<string> v; bool found = i.extractTopics("", v); EXPECT_FALSE(found); EXPECT_EQ(0, v.size()); } { vector<string> v; bool found = i.extractTopics("#", v); EXPECT_FALSE(found); EXPECT_EQ(0, v.size()); } { vector<string> v; bool found = i.extractTopics("#!", v); EXPECT_FALSE(found); EXPECT_EQ(0, v.size()); } { vector<string> v; bool found = i.extractTopics("no #bc!de", v); EXPECT_FALSE(found); EXPECT_EQ(0, v.size()); } { vector<string> v; bool found = i.extractTopics("eating steak for dinner", v); EXPECT_FALSE(found); EXPECT_EQ(0, v.size()); } { vector<string> v; bool found = i.extractTopics("eating #steak for dinner", v); EXPECT_TRUE(found); EXPECT_EQ(1, v.size()); EXPECT_EQ("steak", v[0]); } { vector<string> v; bool found = i.extractTopics("#eating #steak for #dinner", v); EXPECT_TRUE(found); EXPECT_EQ(3, v.size()); EXPECT_EQ("eating", v[0]); EXPECT_EQ("steak", v[1]); EXPECT_EQ("dinner", v[2]); } { vector<string> v; bool found = i.extractTopics("#eating, #steak. for #dinner. #exactly! #yes?", v); EXPECT_TRUE(found); EXPECT_EQ(5, v.size()); EXPECT_EQ("eating", v[0]); EXPECT_EQ("steak", v[1]); EXPECT_EQ("dinner", v[2]); EXPECT_EQ("exactly", v[3]); EXPECT_EQ("yes", v[4]); } { vector<string> v; bool found = i.extractTopics("#eating!!!, #steak???. for #dinner...... #exactly!,,.: #yes?:", v); EXPECT_TRUE(found); EXPECT_EQ(5, v.size()); EXPECT_EQ("eating", v[0]); EXPECT_EQ("steak", v[1]); EXPECT_EQ("dinner", v[2]); EXPECT_EQ("exactly", v[3]); EXPECT_EQ("yes", v[4]); } }
// Read data from file, build the index, and save the index to disk void buildIndex(string data_file, string index_dir) { /// Set up the Schema Schema *schema = Schema::create(srch2is::DefaultIndex); schema->setPrimaryKey("primaryKey"); schema->setSearchableAttribute("description", 2); schema->setScoringExpression("idf_score*doc_boost"); /// Create an Analyzer Analyzer *analyzer = new Analyzer(NULL, NULL, NULL, NULL, "", srch2is::STANDARD_ANALYZER); /// Create an index writer unsigned mergeEveryNSeconds = 2; unsigned mergeEveryMWrites = 5; unsigned updateHistogramEveryPMerges = 1; unsigned updateHistogramEveryQWrites = 5; IndexMetaData *indexMetaData = new IndexMetaData( new CacheManager(), mergeEveryNSeconds, mergeEveryMWrites, updateHistogramEveryPMerges, updateHistogramEveryQWrites, index_dir); Indexer *indexer = Indexer::create(indexMetaData, analyzer, schema); Record *record = new Record(schema); unsigned docsCounter = 0; string line; ifstream data(data_file.c_str()); /// Read records from file /// the file should have two fields, seperated by '^' /// the first field is the primary key, the second field is a searchable attribute while(getline(data,line)) { unsigned cellCounter = 0; stringstream lineStream(line); string cell; while(getline(lineStream,cell,'^') && cellCounter < 3 ) { if (cellCounter == 0) { record->setPrimaryKey(cell.c_str()); } else if (cellCounter == 1) { record->setSearchableAttributeValue(0, cell); } else { float recordBoost = atof(cell.c_str()); record->setRecordBoost(recordBoost); } cellCounter++; } indexer->addRecord(record, analyzer); docsCounter++; record->clear(); } cout << "#Docs Read:" << docsCounter << endl; indexer->commit(); indexer->save(); cout << "Index saved." << endl; data.close(); delete indexer; delete indexMetaData; delete analyzer; delete schema; }
// Test using the circle range void testCircleRange(string directoryName) { // Create a schema Schema *schema = Schema::create(LocationIndex); schema->setPrimaryKey("list_id"); // integer, by default not searchable schema->setSearchableAttribute("title", 2); // searchable text schema->setSearchableAttribute("address", 7); // searchable text // Create an analyzer Analyzer *analyzer = new Analyzer(NULL, NULL, NULL, NULL, ""); unsigned mergeEveryNSeconds = 3; unsigned mergeEveryMWrites = 5; unsigned updateHistogramEveryPMerges = 1; unsigned updateHistogramEveryQWrites = 5; CacheManager *cache = new CacheManager(134217728); IndexMetaData *indexMetaData = new IndexMetaData( cache, mergeEveryNSeconds, mergeEveryMWrites, updateHistogramEveryPMerges, updateHistogramEveryQWrites, directoryName); Indexer *indexer = Indexer::create(indexMetaData, analyzer, schema); // Create five records of 8 attributes and add them to the index addGeoRecord(indexer, schema, analyzer, 0, "Tom Smith and Jack Lennon", "Yesterday Once More", 100.0, 100.0); addGeoRecord(indexer, schema, analyzer, 1, "George Harris", "Here comes the sun", 110.0, 110.0); addGeoRecord(indexer, schema, analyzer, 2, "George Harris", "Here comes the sun", 10.0, 10.0); addGeoRecord(indexer, schema, analyzer, 3, "George Harris", "Here comes the sun", -100.0, -100.0); addGeoRecord(indexer, schema, analyzer, 4, "George Harris", "Here comes the sun", -110.0, -110.0); addGeoRecord(indexer, schema, analyzer, 5, "George Harris", "Here comes the sun", -100.0, 100.0); addGeoRecord(indexer, schema, analyzer, 6, "George Harris", "Here comes the sun", 100.0, -100.0); addGeoRecord(indexer, schema, analyzer, 7, "George Harris", "Here comes the sun", 101.0, -101.0); // commit the index bool retval = indexer->commit(); ASSERT( retval == 1 ); (void)retval; // Storing results of the query and expected results vector<vector<unsigned>*> expectedResults; vector<vector<GeoElement*>*> results; QueryEvaluatorRuntimeParametersContainer runTimeParameters; QueryEvaluator * queryEvaluator = new QueryEvaluator(indexer,&runTimeParameters ); //Rectangle queryRange(pair(pair(-20,-20),pair(20,20))); Point point; point.x = 100; point.y = 100; Circle circle(point,30); boost::shared_ptr<QuadTreeRootNodeAndFreeLists> quadtree_ReadView; quadtree_ReadView = dynamic_cast<IndexReaderWriter *>(indexer)->getQuadTree_ReadView(); QuadTreeNode *qt = quadtree_ReadView->root; qt->rangeQuery(results,circle); vector<unsigned> res; res.push_back(getExternalId(queryEvaluator,0)); res.push_back(getExternalId(queryEvaluator,1)); res.push_back(getExternalId(queryEvaluator,2)); expectedResults.push_back(&res); verifyResults(results,expectedResults); delete indexer; delete indexMetaData; delete analyzer; delete schema; }
TEST(IndexerTest, testPurgeByUserSingleItem) { Indexer i; const string username = "******"; const uint64_t postId = 666; const TTopics topics = {"check", "source", "html"}; EXPECT_TRUE(i.index(topics, username, postId)); EXPECT_TRUE(i.hasPostTopics(postId)); EXPECT_TRUE(i.hasTopicItem("check")); EXPECT_TRUE(i.hasTopicItem("source")); EXPECT_TRUE(i.hasTopicItem("html")); EXPECT_FALSE(i.purgeByUser("kim")); EXPECT_TRUE(i.purgeByUser(username)); EXPECT_FALSE(i.hasPostTopics(postId)); EXPECT_FALSE(i.hasTopicItem("check")); EXPECT_FALSE(i.hasTopicItem("source")); EXPECT_FALSE(i.hasTopicItem("html")); }
void I2C::setStage(I2C::Stage stageCode){ stage = stageCode; if(stageTrace.hasNext()) { stageTrace.next() = stageCode; } }
TEST(IndexerTest, testPurgeByUser) { Indexer i; const string username1 = "val"; const uint64_t postId1 = 666; const string username2 = "kim"; const uint64_t postId2 = 777; EXPECT_TRUE(i.index(TTopics({"href", "cv", "date"}), username1, postId1)); EXPECT_TRUE(i.index(TTopics({"cv"}), username1, postId1 + 1)); EXPECT_TRUE(i.hasPostTopics(postId1)); EXPECT_TRUE(i.hasPostTopics(postId1 + 1)); EXPECT_FALSE(i.hasPostTopics(postId2)); EXPECT_TRUE(i.hasTopicItem("href")); EXPECT_TRUE(i.hasTopicItem("cv")); EXPECT_TRUE(i.hasTopicItem("date")); EXPECT_EQ(2, i.getTopicItem("cv").postsCount); EXPECT_TRUE(i.index(TTopics({"href", "cv"}), username2, postId2)); EXPECT_TRUE(i.index(TTopics({"cv"}), username2, postId2 + 1)); EXPECT_TRUE(i.hasPostTopics(postId2)); EXPECT_TRUE(i.hasPostTopics(postId2 + 1)); EXPECT_EQ(4, i.getTopicItem("cv").postsCount); EXPECT_EQ(2, i.getTopicItem("href").postsCount); EXPECT_TRUE(i.purgeByUser(username1)); EXPECT_FALSE(i.hasPostTopics(postId1)); EXPECT_FALSE(i.hasPostTopics(postId1 + 1)); EXPECT_TRUE(i.hasPostTopics(postId2)); EXPECT_TRUE(i.hasPostTopics(postId2 + 1)); EXPECT_EQ(2, i.getTopicItem("cv").postsCount); EXPECT_EQ(1, i.getTopicItem("href").postsCount); EXPECT_FALSE(i.hasTopicItem("date")); EXPECT_TRUE(i.purgeByUser(username2)); EXPECT_FALSE(i.hasPostTopics(postId2)); EXPECT_FALSE(i.hasPostTopics(postId2 + 1)); EXPECT_FALSE(i.hasTopicItem("cv")); EXPECT_FALSE(i.hasTopicItem("href")); }
TEST(IndexerTest, testSizes) { Indexer i; EXPECT_TRUE(i.index({"Y",}, "val", 1)); EXPECT_EQ(1, i.getTopicItemsSize()); EXPECT_EQ(1, i.getPostTopicsSize()); EXPECT_EQ(1, i.getUserTopicsSize()); EXPECT_TRUE(i.index({"m"}, "val", 1)); EXPECT_EQ(2, i.getTopicItemsSize()); EXPECT_EQ(1, i.getPostTopicsSize()); EXPECT_EQ(1, i.getUserTopicsSize()); EXPECT_TRUE(i.index({"d"}, "val", 2)); EXPECT_EQ(3, i.getTopicItemsSize()); EXPECT_EQ(2, i.getPostTopicsSize()); EXPECT_EQ(1, i.getUserTopicsSize()); EXPECT_TRUE(i.index({"find"}, "kim", 3)); EXPECT_EQ(4, i.getTopicItemsSize()); EXPECT_EQ(3, i.getPostTopicsSize()); EXPECT_EQ(2, i.getUserTopicsSize()); EXPECT_TRUE(i.index({"Y"}, "kim", 3)); EXPECT_EQ(4, i.getTopicItemsSize()); EXPECT_EQ(3, i.getPostTopicsSize()); EXPECT_EQ(2, i.getUserTopicsSize()); EXPECT_TRUE(i.purgeByUser("val")); EXPECT_EQ(2, i.getTopicItemsSize()); EXPECT_EQ(1, i.getPostTopicsSize()); EXPECT_EQ(1, i.getUserTopicsSize()); EXPECT_TRUE(i.purgeByUser("kim")); EXPECT_EQ(0, i.getTopicItemsSize()); EXPECT_EQ(0, i.getPostTopicsSize()); EXPECT_EQ(0, i.getUserTopicsSize()); EXPECT_TRUE(i.index({"find", "admin", "page"}, "kim", 4)); EXPECT_EQ(3, i.getTopicItemsSize()); EXPECT_EQ(1, i.getPostTopicsSize()); EXPECT_EQ(1, i.getUserTopicsSize()); }
void test1() { Schema *schema = Schema::create(srch2::instantsearch::DefaultIndex); schema->setPrimaryKey("article_id"); // integer, not searchable schema->setSearchableAttribute("article_id"); // convert id to searchable text schema->setSearchableAttribute("article_authors", 2); // searchable text schema->setSearchableAttribute("article_title", 7); // searchable text // create an analyzer SynonymContainer *syn = SynonymContainer::getInstance("", SYNONYM_DONOT_KEEP_ORIGIN); syn->init(); Analyzer *analyzer = new Analyzer(NULL, NULL, NULL, syn, ""); unsigned mergeEveryNSeconds = 3; unsigned mergeEveryMWrites = 5; unsigned updateHistogramEveryPMerges = 1; unsigned updateHistogramEveryQWrites = 5; string INDEX_DIR = "test"; IndexMetaData *indexMetaData = new IndexMetaData( new CacheManager(), mergeEveryNSeconds, mergeEveryMWrites, updateHistogramEveryPMerges, updateHistogramEveryQWrites, INDEX_DIR); Indexer *index = Indexer::create(indexMetaData, analyzer, schema); Record *record = new Record(schema); char* authorsCharStar = new char[30]; char* titleCharStar = new char[30]; //generate random characers srand ( time(NULL) ); // create a record of 3 attributes for (unsigned i = 0; i < 1000; i++) { record->setPrimaryKey(i + 1000); sprintf(authorsCharStar,"John %cLen%cnon",(rand() % 50)+65,(rand() % 10)+65); string authors = string(authorsCharStar); record->setSearchableAttributeValue("article_authors", authors); sprintf(titleCharStar,"Yesterday %cOnc%ce %cMore", (rand()%59)+65, (rand()%59)+65, (rand()%10)+65); string title = string(titleCharStar); record->setSearchableAttributeValue("article_title", title); record->setRecordBoost(rand() % 100); index->addRecord(record, analyzer); // for creating another record record->clear(); } // build the index index->commit(); //indexer->printNumberOfBytes(); delete[] authorsCharStar; delete[] titleCharStar; delete record; delete index; delete analyzer; delete schema; }
void addRecords() { ///Create Schema Schema *schema = Schema::create(srch2::instantsearch::DefaultIndex); schema->setPrimaryKey("article_id"); // integer, not searchable schema->setSearchableAttribute("article_id"); // convert id to searchable text schema->setSearchableAttribute("article_authors", 2); // searchable text schema->setSearchableAttribute("article_title", 7); // searchable text SynonymContainer *syn = SynonymContainer::getInstance("", SYNONYM_DONOT_KEEP_ORIGIN); syn->init(); Record *record = new Record(schema); Analyzer *analyzer = new Analyzer(NULL, NULL, NULL, syn, ""); unsigned mergeEveryNSeconds = 3; unsigned mergeEveryMWrites = 5; unsigned updateHistogramEveryPMerges = 1; unsigned updateHistogramEveryQWrites = 5; string INDEX_DIR = "."; IndexMetaData *indexMetaData = new IndexMetaData( NULL, mergeEveryNSeconds, mergeEveryMWrites, updateHistogramEveryPMerges, updateHistogramEveryQWrites, INDEX_DIR); Indexer *index = Indexer::create(indexMetaData, analyzer, schema); record->setPrimaryKey(1001); record->setSearchableAttributeValue("article_authors", "Tom Smith and Jack Lennon"); record->setSearchableAttributeValue("article_title", "come Yesterday Once More"); record->setRecordBoost(10); index->addRecord(record, analyzer); record->clear(); record->setPrimaryKey(1008); record->setSearchableAttributeValue(0, "Jimi Hendrix"); record->setSearchableAttributeValue(1, "Little wing"); record->setRecordBoost(90); index->addRecord(record, analyzer); index->commit(); //index->commit(); //index->print_Index(); std::cout << "print 1 $$$$$$$$$$$$$$" << std::endl; record->clear(); record->setPrimaryKey(1007); record->setSearchableAttributeValue(0, "Jimaai Hendaarix"); record->setSearchableAttributeValue(1, "Littaale waaing"); record->setRecordBoost(90); index->addRecord(record, analyzer); //index->print_Index(); std::cout << "print 2 $$$$$$$$$$$$$$" << std::endl; delete schema; delete record; delete analyzer; delete index; syn->free(); }