void PreferencesSearchPageWidget::createSearchEngine() { const QString identifier(Utils::createIdentifier(QString(), m_searchEngines.keys())); if (identifier.isEmpty()) { return; } SearchEnginesManager::SearchEngineDefinition searchEngine; searchEngine.identifier = identifier; searchEngine.title = tr("New Search Engine"); searchEngine.icon = ThemesManager::createIcon(QLatin1String("edit-find")); SearchEnginePropertiesDialog dialog(searchEngine, getKeywords(m_ui->searchViewWidget->getSourceModel()), this); if (dialog.exec() == QDialog::Rejected) { return; } searchEngine = dialog.getSearchEngine(); m_searchEngines[identifier] = {true, searchEngine}; m_ui->searchViewWidget->insertRow(createRow(searchEngine)); emit settingsModified(); }
/*static*/ wxString SqlTokenizer::getKeywordsString(KeywordCase kwc) { wxArrayString keywordsArray(getKeywords(kwc)); wxString keywords; for (size_t i = 0; i < keywordsArray.size(); ++i) { if (i) keywords += " "; keywords += keywordsArray[i]; } return keywords; }
/*static*/ QString SqlTokenizer::getKeywordsString(KeywordCase kwc) { QStringList keywordsArray(getKeywords(kwc)); QString keywords; for (int i = 0; i < keywordsArray.size(); ++i) { if (i) keywords += QString::fromLatin1(" "); keywords += keywordsArray[i]; } return keywords; }
void PreferencesSearchPageWidget::addSearchEngine(const QString &path, const QString &identifier, bool isReadding) { QFile file(path); if (!file.open(QIODevice::ReadOnly)) { QMessageBox::warning(this, tr("Error"), tr("Failed to open Open Search file.")); return; } SearchEnginesManager::SearchEngineDefinition searchEngine(SearchEnginesManager::loadSearchEngine(&file, identifier, false)); file.close(); if (!searchEngine.isValid() || m_searchEngines.contains(identifier)) { QMessageBox::warning(this, tr("Error"), tr("Failed to open Open Search file.")); return; } const QStringList keywords(getKeywords(m_ui->searchViewWidget->getSourceModel())); if (keywords.contains(searchEngine.keyword)) { QMessageBox messageBox; messageBox.setWindowTitle(tr("Question")); messageBox.setText(tr("Keyword is already in use. Do you want to continue anyway?")); messageBox.setIcon(QMessageBox::Question); messageBox.setStandardButtons(QMessageBox::Yes | QMessageBox::Cancel); messageBox.setDefaultButton(QMessageBox::Cancel); if (messageBox.exec() == QMessageBox::Cancel) { return; } searchEngine.keyword.clear(); } m_searchEngines[identifier] = {false, searchEngine}; m_ui->searchViewWidget->insertRow(createRow(searchEngine)); if (isReadding) { updateReaddSearchEngineMenu(); } emit settingsModified(); }
bool ParameterRuleset::matchArguments(list<TokenList*>* arguments) { list<string> parameters = getKeywords(); list<TokenList*>::iterator ait = arguments->begin(); list<string>::iterator pit = parameters.begin(); for(; pit != parameters.end(); pit++) { if (ait != arguments->end()) ait++; else if (getDefault(*pit) == NULL) return false; } return (ait == arguments->end() || unlimitedArguments); }
int ReimportTest::doTest() { QList<QUrl> files; files << setupFilePathForTest("images-for-tests/vector/026.jpg"); VERIFY(m_TestsApp.addFilesForTest(files), "Failed to add files"); auto artwork = m_TestsApp.getArtwork(0); const Common::ID_t id = artwork->getItemID(); const QString originalDescription = artwork->getDescription(); const QString originalTitle = artwork->getTitle(); const QStringList originalKeywords = artwork->getKeywords(); QStringList keywords; keywords << "picture" << "seagull" << "bird"; QString title = "Brand new title"; QString description = "Brand new description"; artwork->setDescription(description); artwork->setTitle(title); artwork->getBasicModel().setKeywords(keywords); SignalWaiter waiter; m_TestsApp.connectWaiterForImport(waiter); m_TestsApp.selectAllArtworks(); m_TestsApp.dispatch(QMLExtensions::UICommandID::SetupReimportMetadata); VERIFY(m_TestsApp.continueReading(waiter), "Failed to reimport metadata"); const QStringList &actualKeywords = artwork->getKeywords(); const QString &actualTitle = artwork->getTitle(); const QString &actualDescription = artwork->getDescription(); VERIFY(id == artwork->getItemID(), "ID should match"); VERIFY(actualKeywords == originalKeywords, "Original keywords are not the same"); VERIFY(actualTitle == originalTitle, "Original title is not the same"); VERIFY(actualDescription == originalDescription, "Original description is not the same"); return 0; }
bool QuickBuffer::copyToCurrentEditable() { LOG_DEBUG << "#"; bool result = false; auto currentEditable = m_CurrentEditableModel.getCurrentEditable(); if (currentEditable) { auto command = currentEditable->applyEdits(getTitle(), getDescription(), getKeywords()); m_CommandManager.processCommand(command); result = true; } else { LOG_WARNING << "Nothing registered as current item"; sendMessage(Connectivity::EventType::CurrentItemMiss); } return result; }
bool ParameterRuleset::putArguments(ValueProcessor* valueProcessor, list<TokenList*>* arguments) { list<string> parameters = getKeywords(); list<TokenList*>::iterator ait = arguments->begin(); list<string>::iterator pit = parameters.begin(); TokenList* argsCombined = new TokenList(); TokenList* restVar = NULL; TokenList* variable; if (unlimitedArguments && rest != "") restVar = new TokenList(); // combine with parameter names and add to local scope for(; pit != parameters.end(); pit++) { if (ait != arguments->end()) { valueProcessor->putVariable(*pit, *ait); argsCombined->push((*ait)->clone()); ait++; } else { variable = getDefault(*pit); if (variable == NULL) { delete argsCombined; return false; } valueProcessor->putVariable(*pit, variable->clone()); argsCombined->push(variable->clone()); } argsCombined->push(new Token(" ", Token::WHITESPACE)); } if (argsCombined->size() > 0) delete argsCombined->pop(); if (restVar != NULL) { while (ait != arguments->end()) { restVar->push(*ait); restVar->push(new Token(" ", Token::WHITESPACE)); ait++; } if (restVar->size() > 0) delete restVar->pop(); valueProcessor->putVariable(rest, restVar); } valueProcessor->putVariable("@arguments", argsCombined); return true; }
UBool PluralRules::operator==(const PluralRules& other) const { int32_t limit; const UnicodeString *ptrKeyword; UErrorCode status= U_ZERO_ERROR; if ( this == &other ) { return TRUE; } LocalPointer<StringEnumeration> myKeywordList(getKeywords(status)); LocalPointer<StringEnumeration> otherKeywordList(other.getKeywords(status)); if (U_FAILURE(status)) { return FALSE; } if (myKeywordList->count(status)!=otherKeywordList->count(status)) { return FALSE; } myKeywordList->reset(status); while ((ptrKeyword=myKeywordList->snext(status))!=NULL) { if (!other.isKeyword(*ptrKeyword)) { return FALSE; } } otherKeywordList->reset(status); while ((ptrKeyword=otherKeywordList->snext(status))!=NULL) { if (!this->isKeyword(*ptrKeyword)) { return FALSE; } } if (U_FAILURE(status)) { return FALSE; } if ((limit=this->getRepeatLimit()) != other.getRepeatLimit()) { return FALSE; } UnicodeString myKeyword, otherKeyword; for (int32_t i=0; i<limit; ++i) { myKeyword = this->select(i); otherKeyword = other.select(i); if (myKeyword!=otherKeyword) { return FALSE; } } return TRUE; }
int ReadLegacySavedTest::doTest() { QList<QUrl> files; files << setupFilePathForTest("images-for-tests/pixmap/img_0007.jpg"); VERIFY(m_TestsApp.addFilesForTest(files), "Failed to add files"); auto artwork = m_TestsApp.getArtwork(0); const QStringList &keywords = artwork->getKeywords(); QStringList expectedKeywords = QString("rock,nature,landscape,white,background,beautiful,sun,light,mountain,outdoor,top,rocky,snow,fog,horizon").split(','); auto image = std::dynamic_pointer_cast<Artworks::ImageArtwork>(artwork); VERIFY(expectedKeywords == keywords, "Keywords are not the same!"); VERIFY(image->getImageSize().width() == 2752, "Image width was read incorrectly"); VERIFY(image->getImageSize().height() == 2794, "Image height was read incorrectly"); return 0; }
void PreferencesSearchPageWidget::editSearchEngine() { const QModelIndex index(m_ui->searchViewWidget->getIndex(m_ui->searchViewWidget->getCurrentRow(), 0)); const QString identifier(index.data(IdentifierRole).toString()); if (identifier.isEmpty() || !m_searchEngines.contains(identifier)) { return; } const QStringList keywords(getKeywords(m_ui->searchViewWidget->getSourceModel(), m_ui->searchViewWidget->getCurrentRow())); SearchEnginePropertiesDialog dialog(m_searchEngines[identifier].second, keywords, this); if (dialog.exec() == QDialog::Rejected) { return; } SearchEnginesManager::SearchEngineDefinition searchEngine(dialog.getSearchEngine()); if (keywords.contains(searchEngine.keyword)) { searchEngine.keyword.clear(); } m_searchEngines[identifier] = {true, searchEngine}; m_ui->searchViewWidget->setData(index, searchEngine.title, Qt::DisplayRole); m_ui->searchViewWidget->setData(index, searchEngine.title, Qt::ToolTipRole); m_ui->searchViewWidget->setData(m_ui->searchViewWidget->getIndex(index.row(), 1), searchEngine.keyword, Qt::DisplayRole); m_ui->searchViewWidget->setData(m_ui->searchViewWidget->getIndex(index.row(), 1), searchEngine.keyword, Qt::ToolTipRole); if (searchEngine.icon.isNull()) { m_ui->searchViewWidget->setData(index, QColor(Qt::transparent), Qt::DecorationRole); } else { m_ui->searchViewWidget->setData(index, searchEngine.icon, Qt::DecorationRole); } emit settingsModified(); }
int MetadataCacheSaveTest::doTest() { QList<QUrl> files; files << setupFilePathForTest("images-for-tests/pixmap/img_0007.jpg") << setupFilePathForTest("images-for-tests/pixmap/seagull-for-clear.jpg") << setupFilePathForTest("images-for-tests/pixmap/seagull.jpg") << setupFilePathForTest("images-for-tests/vector/026.jpg") << setupFilePathForTest("images-for-tests/vector/027.jpg") << setupFilePathForTest("images-for-tests/mixed/0267.jpg"); MetadataIO::MetadataIOService &metadataIOService = m_TestsApp.getMetadataIOService(); MetadataIO::MetadataIOWorker *worker = metadataIOService.getWorker(); MetadataIO::MetadataCache &metadataCache = worker->getMetadataCache(); VERIFY(metadataCache.retrieveRecordsCount() == 0, "Metadata cache is not empty on startup"); VERIFY(m_TestsApp.addFilesForTest(files), "Failed to add files"); const int desiredCount = files.count(); sleepWaitUntil(5, [&metadataCache, &desiredCount]() { return metadataCache.retrieveRecordsCount() == desiredCount; }); VERIFY(metadataCache.retrieveRecordsCount() == desiredCount, "Metadata cache was not filled in time"); QVector<MetadataIO::CachedArtwork> cachedArtworks; metadataCache.dumpToArray(cachedArtworks); VERIFY(m_TestsApp.getArtworksCount() == cachedArtworks.count(), "Metadata cache size does not match"); for (MetadataIO::CachedArtwork &ca: cachedArtworks) { auto artwork = findArtworkByFilepath(m_TestsApp.getArtworksListModel(), ca.m_Filepath); VERIFY(artwork != nullptr, "Metadata cache contains orphanned artworks"); VERIFY(artwork->getTitle() == ca.m_Title, "Title does not match"); VERIFY(artwork->getDescription() == ca.m_Description, "Description does not match"); VERIFY(artwork->getKeywords() == ca.m_Keywords, "Keywords do not match"); } return 0; }
int ClearMetadataTest::doTest() { QList<QUrl> files; files << setupFilePathForTest("images-for-tests/pixmap/seagull-for-clear.jpg"); VERIFY(m_TestsApp.addFilesForTest(files), "Failed to add files"); auto artwork = m_TestsApp.getArtwork(0); const QStringList &keywords = artwork->getKeywords(); QStringList expectedKeywords = QString("picture,seagull,bird").split(','); VERIFY(expectedKeywords == keywords, "Keywords are not the same!"); VERIFY(artwork->getDescription() == "Seagull description", "Description is not the same!"); VERIFY(artwork->getTitle() == "Seagull title", "Title is not the same!"); artwork->setIsSelected(true); m_TestsApp.getFilteredArtworksModel().removeMetadataInSelected(); SignalWaiter waiter; m_TestsApp.connectWaiterForExport(waiter); m_TestsApp.selectAllArtworks(); m_TestsApp.dispatch(QMLExtensions::UICommandID::ExportMetadata); VERIFY(waiter.wait(20), "Timeout exceeded for writing metadata."); VERIFY(m_TestsApp.checkExportSucceeded(), "Failed to export"); m_TestsApp.deleteArtworks(Helpers::IndicesRanges(files.size())); VERIFY(m_TestsApp.addFilesForTest(files), "Failed to add files"); artwork = m_TestsApp.getArtwork(0); VERIFY(artwork->getBasicMetadataModel().isDescriptionEmpty(), "Description was not empty"); VERIFY(artwork->getBasicMetadataModel().isTitleEmpty(), "Title was not empty"); VERIFY(artwork->getBasicMetadataModel().areKeywordsEmpty(), "Keywords were not empty"); return 0; }
bool QuickBuffer::copyFromCurrentEditable() { LOG_DEBUG << "#"; bool result = false; auto currentEditable = m_CurrentEditableModel.getCurrentEditable(); if (currentEditable) { QString title = currentEditable->getTitle(); if (!title.isEmpty()) { this->setTitle(title); } QString description = currentEditable->getDescription(); if (!description.isEmpty()) { this->setDescription(description); } QStringList keywords = currentEditable->getKeywords(); if (!keywords.empty()) { this->setKeywords(keywords); } result = true; } else { LOG_WARNING << "Nothing registered as current item"; sendMessage(Connectivity::EventType::CurrentItemMiss); } return result; }
void PresetTests::appendFromPresetTrivial() { const int itemsToGenerate = 5; DECLARE_MODELS_AND_GENERATE(itemsToGenerate); presetKeywordsModel.addItem(); presetKeywordsModel.setName(0, QString("keyword_0")); presetKeywordsModel.appendKeyword(0, "keyword_1"); presetKeywordsModel.appendKeyword(0, "keyword_2"); presetKeywordsModel.appendKeyword(0, "keyword_3"); presetKeywordsModel.appendKeyword(0, "keyword_4"); presetKeywordsModel.appendKeyword(0, "keyword_5"); for (int i = 0; i < itemsToGenerate; i++) { auto metadata = artworksListModel.getMockArtwork(i); metadata->set("title", "description", QStringList() << "keyword_0" << "keyword_1" << "keyword_2"); } artworksListModel.addPreset(0, 0, presetKeywordsModel)->execute(); auto metadata = artworksListModel.getMockArtwork(0); QStringList finalString; finalString << "keyword_0" << "keyword_1" << "keyword_2" << "keyword_3" << "keyword_4" << "keyword_5"; QCOMPARE(metadata->getKeywords(), finalString); QVERIFY(metadata->isModified()); }
vector<string> Room::getObjKeywords(string objName){ //Gets a List of an Object's Keywords. if ( findKeyword(objName) ){ return getKeywords(); } for (auto & door : doorList) { if ( door->findKeyword(objName) || door->getLeadsTo()->findKeyword(objName)){ return door->getKeywords(); } } for (auto & npc : npcList) { if ( npc->searchKeyword(objName)){ return npc->getKeyword(); } } for (auto & item : itemList) { if ( item->searchKeyword(objName)){ return item->getKeyword(); } } vector<string> result; result.push_back("Object not found!"); return result; }
int main(int argc, char **argv) { std::string *flagValues; //Holds values passed from the cmd line std::ifstream inputFile; std::ofstream outputFile; try { //STEP 1: PROCESS COMMAND LINE ARGUMENTS //Parses the cmd args flagValues = parseCmdArgs(argc, argv); //TODO: Fills in unspecified flags with default values //STEP 2: CLEAN THE SOURCE FILE //Opens the files inputFile.open(flagValues[NUM_FLAGS]); outputFile.open(flagValues[CLEAN_FILE_FLAG]); if(!inputFile.is_open()); //throw an error if(!outputFile.is_open()); //throw an error //Initializes the cleaner object and CodeCleaner cleaner("~~", "/~", "~/"); cleaner.clean(inputFile, outputFile); //Closes the files inputFile.close(); outputFile.close(); //STEP 3: TOKENIZE THE CLEANED INPUT //Gets the list of keywords are their tokens std::vector<std::pair<std::string, std::string>> keywords = getKeywords(KW_FILE); //Opens the files inputFile.open(flagValues[CLEAN_FILE_FLAG]); outputFile.open(flagValues[TOKEN_FILE_FLAG]); if(!inputFile.is_open()); //throw an error if(!outputFile.is_open()); //throw an error //Initializes the tokenizer object and tokenizes the clean file ProseTokenizer tokenizer(keywords); tokenizer.tokenize(inputFile, outputFile); //STEP 4: CREATE PARSE TREES AND CONVERT THEM TO ASSEMBLY } catch(std::regex_error e) { std::cout << "regex_error code: " << e.code() << std::endl; } catch(std::exception& e) { std::cout << e.what() << std::endl; } //Frees dynamically allocated memory delete[] flagValues; //Closes all files inputFile.close(); outputFile.close(); return 0; }
/** * Find keywords in the given file and append * the string describing these keywords to the * long string (for the suffix tree). */ int buildIndex(struct EXTRACT_Process * eproc, FILE * logFile, const char * filename, struct DOODLE_SuffixTree * tree, int do_filenames) { struct KeywordList * head; struct KeywordList * pos; head = getKeywords(eproc, filename); pos = head; while (pos != NULL) { char * cpos; size_t slen; cpos = pos->keyword; if (logFile != NULL) fprintf(logFile, "%s\n", cpos); slen = strlen(cpos); if (slen > MAX_LENGTH) { char section[MAX_LENGTH+1]; char * xpos; int j; section[MAX_LENGTH] = '\0'; for (j=0;j<slen;j+=MAX_LENGTH/2) { strncpy(section, &cpos[j], MAX_LENGTH); xpos = §ion[0]; while (xpos[0] != '\0') { if (0 != DOODLE_tree_expand(tree, xpos, filename)) { freeKeywords(head); return 0; } xpos++; } } } else { while (cpos[0] != '\0') { if (0 != DOODLE_tree_expand(tree, cpos, filename)) { freeKeywords(head); return 0; } cpos++; } } pos = pos->next; } freeKeywords(head); if (do_filenames) { const char * cpos = filename; while (cpos[0] != '\0') { if (0 != DOODLE_tree_expand(tree, cpos, filename)) return 0; cpos++; } } return 1; }
bool Chain::substituent_m ( const int& l1, //start pos of c const int& u1, //end pos of c const int& l2, //start pos of this const int& u2, //end pos of this const Chain* c, vector<cMatchType>& result //result ) const { if (!result.empty ()) result.clear (); assert (l1 <= u1 + 1); assert (l2 <= u2 + 1); // take case! l1 = u1+1 and l2 = u2+1 has been ruled out previously if (l1 == u1 + 1) { if (l2 == u2 + 1) return true; else return false; } if (l2 == u2 + 1) { if (l1 == u1 + 1) return true; else { cMatchType res; for (int i=l1; i <= u1; i++) { string partRef = c->listOfParts[i]->getPartRef (); int keyValue = getKeywords (partRef); if (keyValue != -1) { switch (keyValue) { case 0: case 1: {res.push_back (make_pair(l2,u2));break;} default: return false; } } else { string errno ("Invalid Substituent Type: "); errno += partRef + "!"; throw CoreException (errno); } } result.push_back (res); return true; } } // // only handle one part and left recuisively // int startpos, endpos; assert (l1 < c->listOfParts.size ()); string partRef = c->listOfParts[l1]->getPartRef (); string partType = c->listOfParts[l1]->getPartType (); int keyValue = getKeywords (partRef); if (keyValue != -1) { switch (keyValue) { case 0: { startpos = l2; endpos = u2+1; if (!partType.empty ()) { for (int i=l2; i<=u2; i++) if (!type_match (listOfParts[i]->partType, partType)) endpos = i; break; } break; } case 1: { startpos = l2; endpos = u2+1; for (int i=l2; i<=u2; i++) { if (listOfParts[i]->isBinded) endpos = i; break; } if (!partType.empty ()) { for (int i=l2; i<= endpos-1; i++) { if (!type_match (listOfParts[i]->partType, partType)) endpos = i; break; } } break; } case 2: { startpos = l2+1; endpos = u2+1; if (!partType.empty ()) { for (int i=l2; i<=u2; i++) if (!type_match (listOfParts[i]->partType, partType)) endpos = i; break; } break; } case 3: { startpos = l2+1; endpos = u2+1; for (int i=l2; i<=u2; i++) { if (listOfParts[i]->isBinded) endpos = i; break; } if (!partType.empty ()) { for (int i=l2; i<= endpos-1; i++) { if (!type_match (listOfParts[i]->partType, partType)) endpos = i; break; } } break; } case 4: { startpos = endpos = l2+1; if (!type_match (listOfParts[l2]->partType, partType)) endpos = l2; break; } case 5: { startpos = endpos = l2+1; if (listOfParts[l2]->isBinded) endpos = l2; if (!type_match (listOfParts[l2]->partType, partType)) endpos = l2; break; } default: break; } } else { string errno ("Invalid Substituent Type: "); errno += partRef + "!"; throw CoreException (errno); } bool rVal = false; for (int i = startpos; i <= endpos; i++) { vector< list< pair<int,int> > > recursive; bool mok = substituent_m (l1+1, u1, i, u2, c, recursive); if (mok) { if (recursive.size () == 0) { // only push back in matchings of current part cMatchType only_one; only_one.push_back (make_pair (l2, i-1)); result.push_back (only_one); } else { for (int j=0; j < recursive.size (); j++) { recursive[j].push_front (make_pair (l2, i-1)); result.push_back (recursive[j]); } } rVal = true; } else continue; } return rVal; }
Boolean UNIX_BGPService::getKeywords(CIMProperty &p) const { p = CIMProperty(PROPERTY_KEYWORDS, getKeywords()); return true; }
void SSE::remove(docid_t docName, double& duration){ // docName = "/Users/naveed/BStore/datasets/testdir/" + docName + "."; double diskTime = 0; clock_t startTime = clock(); uint64_t docHash = getDocNameHash(boost::lexical_cast<string>(docName)); uint64_t docID0 = docHash; CLEAR_BIT(docID0, 0); uint64_t docID1 = docHash; SET_BIT(docID1, 0); duration += (double)(clock()-startTime)/(double)CLOCKS_PER_SEC; OnlineSession session; session.resetDiskAccessTime(); clock_t ignoredStartTime = clock(); if(fstore.isFilePresent(boost::lexical_cast<string>(docID0))){ fstore.remove(boost::lexical_cast<string>(docID0)); // duration -= (double)(clock()-ignoredStartTime)/(double)CLOCKS_PER_SEC; /* Entries from Index will be delete using lazy delete*/ } else if (fstore.isFilePresent(boost::lexical_cast<string>(docID1))){ // else if (fstore.isFilePresent(boost::lexical_cast<string>(docID))){ byte* doc; double SKETime = 0; // size_t size = fstore.get("/Users/naveed/BStore/datasets/test4MB/allen-p/straw/8.", doc, SKETime); size_t size = fstore.get(boost::lexical_cast<string>(docID1), doc, SKETime); cout << "SKE took " << SKETime << " seconds." << endl; unordered_set<string, stringhash> keywords; getKeywords(doc, size, keywords); duration -= (double)(clock()-ignoredStartTime)/(double)CLOCKS_PER_SEC; for(unordered_set<string, stringhash>::iterator it = keywords.begin(); it != keywords.end(); ++it){ string keyword = boost::lexical_cast<string>(1) + *it; OnlineSession session; byte* docIDs = NULL; size_t size = session.updateRead(keyword, docIDs, -sizeof(docid_t), diskTime); // cout << "Updating keyword " << keyword << endl; // cout << "Number of files containing keyword \"" << keyword << "\" are " << size << endl << " " << size - sizeof(docid_t) << endl; // uint32_t docIDtoRemove = findDocID(docIDs, size, docID1); uint32_t docIDtoRemove = findDocID(docIDs, size, docName); if(docIDtoRemove == -1){ std::cerr << "File present in filestore but is not present in BStore." << endl; std::cerr << "This is probably due to the files stored from previoius runs. Try again after deleting files from previous runs." << endl; exit(1); } deleteDocID(docIDs, size, docIDtoRemove); // cout << "DocID to remove " << docIDtoRemove << endl; session.updateWrite(keyword, docIDs, size, diskTime); delete[] docIDs; docIDs = NULL; } delete[] doc; duration += (double)(clock()-startTime)/(double)CLOCKS_PER_SEC - diskTime; cout << "Disk operations took " << session.getDiskAccessTime() << " seconds." << endl; fstore.remove(boost::lexical_cast<string>(docID1)); } else{ cout << "File not found!" << endl; } // TODO: delete docNameHash(document) from DocumentStore }
bool CloudsClip::hasKeyword(string keyword){ return ofContains(getKeywords(), keyword); }
void SSE::add(docid_t docName, string docFileName, double& duration){ double diskTime = 0; clock_t startTime = clock(); uint64_t docID = getDocNameHash(boost::lexical_cast<string>(docName)); SET_BIT(docID, 0); byte* doc; clock_t ignoredStartTime = clock(); size_t size = fstore.getNew(docFileName, doc); unordered_set<string, stringhash> keywords; getKeywords(doc, size, keywords); duration -= (double)(clock() - ignoredStartTime)/(double)CLOCKS_PER_SEC; remove(docName, duration); OnlineSession session; session.resetDiskAccessTime(); // cout << "Adding keywords " << endl; size_t partialIndexDownloadSize = 0; for(unordered_set<string, stringhash>::iterator it = keywords.begin(); it != keywords.end(); ++it){ // string keyword = boost::lexical_cast<string>(1) + *it; string keyword = boost::lexical_cast<string>(0) + *it; //Changed to 0 just for computing data when data added is too much OnlineSession session; byte* docIDs = new byte[4]; session.updateWithInsecureAppend(keyword, docIDs, diskTime); /* byte* docIDs; size_t size = session.updateRead(keyword, docIDs, sizeof(docid_t), diskTime); if(size <= SIZE_MIN * MAX_BLOCK_DATA_SIZE) partialIndexDownloadSize += SIZE_MIN*BLOCK_SIZE; else partialIndexDownloadSize += (int)ceil((double)size/(double)MAX_BLOCK_DATA_SIZE)*BLOCK_SIZE*BLOW_UP; // cout << "Data downloaded is " << partialIndexDownloadSize << endl; // cout << "Size is " << size << endl; // addDocID(docIDs, size, docID); // printhex(docIDs, size, "BEFORE"); byte updatedDocIDs[size+sizeof(docid_t)]; memcpy(updatedDocIDs, docIDs, size); memcpy(&updatedDocIDs[size], static_cast<byte*>(static_cast<void*>(&docName)), sizeof(docid_t)); // printhex(updatedDocIDs, size+sizeof(docid_t), "AFTER"); session.updateWrite(keyword, updatedDocIDs, size + sizeof(docid_t), diskTime); delete[] docIDs; */ } // cout << "Disk operations took " << session.getDiskAccessTime() << " seconds." << endl; cout << "Total data downloaded is " << partialIndexDownloadSize << endl; duration += (double)(clock()-startTime)/(double)CLOCKS_PER_SEC - diskTime; cout << "Disk Access took " << session.getDiskAccessTime() << " seconds." << endl; double SKETime = 0; // fstore.put(boost::lexical_cast<string>(docID), doc, size, SKETime); cout << "SKE took write" << SKETime << " seconds." << endl; delete[] doc; }