void indri::collection::Repository::_buildChain(indri::api::Parameters& parameters, indri::api::Parameters* options) { // Extract url from metadata before case normalizing. // this could be parameterized. if (parameters.get("injectURL", true)) _transformations.push_back(new indri::parse::URLTextAnnotator()); bool dontNormalize = parameters.exists("normalize") && (false == (bool) parameters["normalize"]); if (dontNormalize == false) { _transformations.push_back(new indri::parse::NormalizationTransformation()); _transformations.push_back(new indri::parse::UTF8CaseNormalizationTransformation()); } for(size_t i=0; i<_fields.size(); i++) { if (_fields[i].parserName == "NumericFieldAnnotator") { _transformations.push_back(new indri::parse::NumericFieldAnnotator(_fields[i].name)); } else if (_fields[i].parserName == "DateFieldAnnotator") { _transformations.push_back(new indri::parse::DateFieldAnnotator(_fields[i].name)); } } if (_parameters.exists("stopper.word")) { indri::api::Parameters stop = _parameters["stopper.word"]; _transformations.push_back(new indri::parse::StopperTransformation(stop)); } // the transient chain stopwords need to precede the stemmer. if (options) { if (options->exists("stopper.word")) { indri::api::Parameters stop = (*options)["stopper.word"]; _transformations.push_back(new indri::parse::StopperTransformation(stop)); } } if (_parameters.exists("stemmer.name")) { std::string stemmerName = std::string(_parameters["stemmer.name"]); indri::api::Parameters stemmerParams = _parameters["stemmer"]; _transformations.push_back(indri::parse::StemmerFactory::get(stemmerName, stemmerParams)); } }
void indri::collection::Repository::_openIndexes(indri::api::Parameters& params, const std::string& parentPath) { try { indri::api::Parameters container = params["indexes"]; _active = new index_vector; _states.push_back(_active); _indexCount = params.get("indexCount", 0); if (container.exists("index")) { indri::api::Parameters indexes = container["index"]; for(size_t i=0; i<indexes.size(); i++) { indri::api::Parameters indexSpec = indexes[i]; indri::index::DiskIndex* diskIndex = new indri::index::DiskIndex(); std::string indexName = (std::string) indexSpec; diskIndex->open(parentPath, indexName); _active->push_back(diskIndex); } } } catch(lemur::api::Exception& e) { LEMUR_RETHROW(e, "_openIndexes: Couldn't open DiskIndexes because:"); } }
std::string indri::collection::Repository::_stemmerName(indri::api::Parameters& parameters) { return parameters.get("stemmer.name", ""); }
void processFields( indri::api::Parameters ¶m ) { g_timer.start(); std::string index = param.get("index"); std::cout << "Opening: " << index << std::endl; // make sure this path doesn't exist. std::string idx2 = index + ".new"; // temp target index. // presumes a single input oa file for the entire collection. std::string offsetAnnotationsPath = param.get("annotations"); /// these need to be combined with existing. // fields to add // these need to supply numeric/parental/ordinal/etc... if (param.exists("addField")) addFields = param["addField"]; // fields to remove // these only need to be a list of names. if (param.exists("removeField")) { indri::api::Parameters slice = param["removeField"]; for (size_t i = 0; i < slice.size(); i++) { if( slice[i].exists("name") ) { removeNames.push_back( slice[i]["name"] ); } } } // need to know the file class environment to get the // conflations right. std::string className = param.get("fileclass", ""); indri::collection::Repository sourceRepo; indri::collection::Repository targetRepo; indri::parse::OffsetAnnotationAnnotator oa_annotator; indri::parse::FileClassEnvironmentFactory _fileClassFactory; // Open source repo sourceRepo.openRead(index); // Copy its parameters, create target repo, adding or removing // fields. repo.loadFile( indri::file::Path::combine( index, "manifest" ) ); int mem = param.get("memory", INT64(100*1024*1024)); repo.set("memory", mem); adding = addFields.exists("field"); _mergeFields(); // Create the offset annotator. fce = _fileClassFactory.get( className ); indri::parse::Conflater* conflater = 0; if( fce ) { conflater = fce->conflater; } if (adding) { oa_annotator.setConflater( conflater ); oa_annotator.open( offsetAnnotationsPath ); } targetRepo.create(idx2, &repo); // for each document in the source repo, fetch ParsedDocument // construct full rep, apply annotator, insert into // target repo. _index = sourceRepo.indexes()->front(); // presume 1 _docIter = _index->termListFileIterator(); _docIter->startIteration(); // ought to deal with deleted documents here... // if there are deleted documents, regular add to collection // if not, only rewrite the indexes, then rename the collection. indri::index::DeletedDocumentList& deleted = sourceRepo.deletedList(); UINT64 delCount = deleted.deletedCount(); if (delCount > 0) { // either warn, compact and then process, or // do it the old way... FIXME! std::cerr << "Deleted documents detected... compact with dumpindex first." << std::endl; return; } for (UINT64 docid = 1; docid <= _index->documentCount(); docid++) { if ((docid % 500) == 0) { g_timer.printElapsedSeconds(std::cout); std::cout << ": " << docid << "\r"; std::cout.flush(); } parsed = sourceRepo.collection()->retrieve(docid); // combine field and term data with parsed document _mergeData(); // apply annotator if (adding) parsed = oa_annotator.transform(parsed); targetRepo.addDocument(parsed, false); // TagList allocs memory for the tags... for (size_t i = 0; i < parsed->tags.size(); i++) delete(parsed->tags[i]); delete(parsed); _docIter->nextEntry(); } std::cout << std::endl; g_timer.printElapsedSeconds(std::cout); std::cout << ": " << _index->documentCount() << std::endl; g_timer.printElapsedSeconds(std::cout); std::cout << ": closing" << std::endl; targetRepo.close(); sourceRepo.close(); std::string oldcollectionPath = indri::file::Path::combine( index, "collection" ); std::string newcollectionPath = indri::file::Path::combine( idx2, "collection" ); // clone the collection indri::file::Path::remove(newcollectionPath); indri::file::Path::rename(oldcollectionPath, newcollectionPath); // rename target repo to source repo. indri::file::Path::remove(index); indri::file::Path::rename(idx2, index); g_timer.printElapsedSeconds(std::cout); std::cout << ": done" << std::endl; }