InputPath:: InputPath(const Phrase &phrase, const NonTerminalSet &sourceNonTerms, const WordsRange &range, const InputPath *prevNode, const ScorePair *inputScore) :m_prevPath(prevNode) ,m_phrase(phrase) ,m_range(range) ,m_inputScore(inputScore) ,m_nextNode(1) ,m_sourceNonTerms(sourceNonTerms) ,m_sourceNonTermArray(FactorCollection::Instance().GetNumNonTerminals(), false) { for (NonTerminalSet::const_iterator iter = sourceNonTerms.begin(); iter != sourceNonTerms.end(); ++iter) { size_t idx = (*iter)[0]->GetId(); m_sourceNonTermArray[idx] = true; } //cerr << "phrase=" << phrase << " m_inputScore=" << *m_inputScore << endl; }
// Given a partial rule application ending at startPos-1 and given the sets of // source and target non-terminals covering the span [startPos, endPos], // determines the full or partial rule applications that can be produced through // extending the current rule application by a single non-terminal. void ChartRuleLookupManagerMemory::ExtendPartialRuleApplication( const PhraseDictionaryNodeSCFG & node, const WordConsumed *prevWordConsumed, size_t startPos, size_t endPos, size_t stackInd, const NonTerminalSet & sourceNonTerms, const NonTerminalSet & targetNonTerms, ProcessedRuleColl & processedRuleColl) { const PhraseDictionaryNodeSCFG::NonTerminalMap & nonTermMap = node.GetNonTerminalMap(); const size_t numChildren = nonTermMap.size(); if (numChildren == 0) { return; } const size_t numSourceNonTerms = sourceNonTerms.size(); const size_t numTargetNonTerms = targetNonTerms.size(); const size_t numCombinations = numSourceNonTerms * numTargetNonTerms; // We can search by either: // 1. Enumerating all possible source-target NT pairs that are valid for // the span and then searching for matching children in the node, // or // 2. Iterating over all the NT children in the node, searching // for each source and target NT in the span's sets. // We'll do whichever minimises the number of lookups: if (numCombinations <= numChildren*2) { NonTerminalSet::const_iterator p = sourceNonTerms.begin(); NonTerminalSet::const_iterator sEnd = sourceNonTerms.end(); for (; p != sEnd; ++p) { const Word & sourceNonTerm = *p; NonTerminalSet::const_iterator q = targetNonTerms.begin(); NonTerminalSet::const_iterator tEnd = targetNonTerms.end(); for (; q != tEnd; ++q) { const Word & targetNonTerm = *q; const PhraseDictionaryNodeSCFG * child = node.GetChild(sourceNonTerm, targetNonTerm); if (child == NULL) { continue; } WordConsumed * wc = new WordConsumed(startPos, endPos, targetNonTerm, prevWordConsumed); ProcessedRule * rule = new ProcessedRule(*child, wc); processedRuleColl.Add(stackInd, rule); } } } else { PhraseDictionaryNodeSCFG::NonTerminalMap::const_iterator p; PhraseDictionaryNodeSCFG::NonTerminalMap::const_iterator end = nonTermMap.end(); for (p = nonTermMap.begin(); p != end; ++p) { const PhraseDictionaryNodeSCFG::NonTerminalMapKey & key = p->first; const Word & sourceNonTerm = key.first; if (sourceNonTerms.find(sourceNonTerm) == sourceNonTerms.end()) { continue; } const Word & targetNonTerm = key.second; if (targetNonTerms.find(targetNonTerm) == targetNonTerms.end()) { continue; } const PhraseDictionaryNodeSCFG & child = p->second; WordConsumed * wc = new WordConsumed(startPos, endPos, targetNonTerm, prevWordConsumed); ProcessedRule * rule = new ProcessedRule(child, wc); processedRuleColl.Add(stackInd, rule); } } }