void indri::infnet::InferenceNetwork::_evaluateIndex( indri::index::Index& index ) { // don't need to do anything unless there are some // evaluators in the network that need full evaluation if( _complexEvaluators.size() ) { lemur::api::DOCID_T maximumDocument = index.documentMaximum(); if (maximumDocument == index.documentBase()) { // empty memory index, nothing to score. return; } lemur::api::DOCID_T lastCandidate = MAX_INT32; // 64 int scoredDocuments = 0; lemur::api::DOCID_T candidate = 0; indri::index::DeletedDocumentList::read_transaction* deleted; deleted = _repository.deletedList().getReadTransaction(); while(1) { // ask the root node for a candidate document // this asks the whole inference network for the // first document that might possibly produce a // usable (above the max score threshold) score candidate = _nextCandidateDocument( deleted ); if (candidate < index.documentBase()) { std::cerr << candidate << " < index.documentBase()" << std::endl; break; } assert( candidate >= index.documentBase() ); // if candidate is MAX_INT32, we're done if( candidate == MAX_INT32 || candidate > maximumDocument ) { break; } // move all the doc info lists to this new document // in preparation for scoring if( candidate != lastCandidate ) { _moveToDocument( candidate ); } // ask all the evaluators to evaluate this document _evaluateDocument( index, candidate ); scoredDocuments++; // if that was the last document, we can quit now if( candidate+1 > maximumDocument ) break; // move all candidate iterators to candidate+1 _moveToDocument( candidate+1 ); lastCandidate = candidate+1; assert( candidate >= index.documentBase() ); } delete deleted; } }
void indri::infnet::ContextSimpleCountAccumulator::indexChanged( indri::index::Index& index ) { _computeCounts( index ); _maximumDocument = index.documentCount() + index.documentBase(); }