ChartRuleLookupManagerOnDisk::~ChartRuleLookupManagerOnDisk()
{
  std::map<UINT64, const TargetPhraseCollection*>::const_iterator iterCache;
  for (iterCache = m_cache.begin(); iterCache != m_cache.end(); ++iterCache) {
    delete iterCache->second;
  }
  m_cache.clear();

  RemoveAllInColl(m_expandableDottedRuleListVec);
  RemoveAllInColl(m_sourcePhraseNode);
}
MockHypothesisGuard::~MockHypothesisGuard()
{
  RemoveAllInColl(m_toptions);
  while (m_hypothesis) {
    Hypothesis* prevHypo = const_cast<Hypothesis*>(m_hypothesis->GetPrevHypo());
    delete m_hypothesis;
    m_hypothesis = prevHypo;
  }
}
void PhraseDictionaryNewFormat::CleanUp()
{
	//RemoveAllInColl(m_chartTargetPhraseColl);
	std::vector<ChartRuleCollection*>::iterator iter;
	for (iter = m_chartTargetPhraseColl.begin(); iter != m_chartTargetPhraseColl.end(); ++iter)
	{
		ChartRuleCollection *item = *iter;
		ChartRuleCollection::Delete(item);
	}
	m_chartTargetPhraseColl.clear();
	
	RemoveAllInColl(m_runningNodesVec);
}
Example #4
0
ChartParser::~ChartParser()
{
  RemoveAllInColl(m_ruleLookupManagers);
  StaticData::Instance().CleanUpAfterSentenceProcessing(m_source);

  InputPathMatrix::const_iterator iterOuter;
  for (iterOuter = m_inputPathMatrix.begin(); iterOuter != m_inputPathMatrix.end(); ++iterOuter) {
    const std::vector<InputPath*> &outer = *iterOuter;

    std::vector<InputPath*>::const_iterator iterInner;
    for (iterInner = outer.begin(); iterInner != outer.end(); ++iterInner) {
      InputPath *path = *iterInner;
      delete path;
    }
  }
}
Example #5
0
void FeatureFunction::Destroy()
{
  RemoveAllInColl(s_staticColl);
}
Example #6
0
Sentence::~Sentence()
{
  RemoveAllInColl(m_xmlOptions);
}
TranslationOptionList::~TranslationOptionList()
{
	RemoveAllInColl(m_coll);
}
ChartRuleLookupManagerMemoryPerSentence::~ChartRuleLookupManagerMemoryPerSentence()
{
  RemoveAllInColl(m_dottedRuleColls);
}
Example #9
0
DecodeGraph::~DecodeGraph()
{
	RemoveAllInColl(m_steps);
}
void LMList::CleanUp()
{
  RemoveAllInColl(m_coll);
}
ChartCellCollectionBase::~ChartCellCollectionBase()
{
  m_source.clear();
  for (std::vector<std::vector<ChartCellBase*> >::iterator i = m_cells.begin(); i != m_cells.end(); ++i)
    RemoveAllInColl(*i);
}
ChartTrellisNode::~ChartTrellisNode()
{
  RemoveAllInColl(m_children);
}
vector<float> PhraseDictionaryMultiModel::MinimizePerplexity(vector<pair<string, string> > &phrase_pair_vector)
{

  map<pair<string, string>, size_t> phrase_pair_map;

  for ( vector<pair<string, string> >::const_iterator iter = phrase_pair_vector.begin(); iter != phrase_pair_vector.end(); ++iter ) {
    phrase_pair_map[*iter] += 1;
  }

  vector<multiModelStatisticsOptimization*> optimizerStats;

  for ( map<pair<string, string>, size_t>::iterator iter = phrase_pair_map.begin(); iter != phrase_pair_map.end(); ++iter ) {

    pair<string, string> phrase_pair = iter->first;
    string source_string = phrase_pair.first;
    string target_string = phrase_pair.second;

    vector<float> fs(m_numModels);
    map<string,multiModelStatistics*>* allStats = new(map<string,multiModelStatistics*>);

    Phrase sourcePhrase(0);
    sourcePhrase.CreateFromString(Input, m_input, source_string, NULL);

    CollectSufficientStatistics(sourcePhrase, allStats); //optimization potential: only call this once per source phrase

    //phrase pair not found; leave cache empty
    if (allStats->find(target_string) == allStats->end()) {
      RemoveAllInMap(*allStats);
      delete allStats;
      continue;
    }

    multiModelStatisticsOptimization* targetStatistics = new multiModelStatisticsOptimization();
    targetStatistics->targetPhrase = new TargetPhrase(*(*allStats)[target_string]->targetPhrase);
    targetStatistics->p = (*allStats)[target_string]->p;
    targetStatistics->f = iter->second;
    optimizerStats.push_back(targetStatistics);

    RemoveAllInMap(*allStats);
    delete allStats;
  }

  Sentence sentence;
  CleanUpAfterSentenceProcessing(sentence); // free memory used by compact phrase tables

  size_t numWeights = m_numScoreComponents;

  vector<float> ret (m_numModels*numWeights);
  for (size_t iFeature=0; iFeature < numWeights; iFeature++) {

    CrossEntropy * ObjectiveFunction = new CrossEntropy(optimizerStats, this, iFeature);

    vector<float> weight_vector = Optimize(ObjectiveFunction, m_numModels);

    if (m_mode == "interpolate") {
      weight_vector = normalizeWeights(weight_vector);
    }

    cerr << "Weight vector for feature " << iFeature << ": ";
    for (size_t i=0; i < m_numModels; i++) {
      ret[(iFeature*m_numModels)+i] = weight_vector[i];
      cerr << weight_vector[i] << " ";
    }
    cerr << endl;
    delete ObjectiveFunction;
  }

  RemoveAllInColl(optimizerStats);
  return ret;

}
Example #14
0
vector<float> PhraseDictionaryMultiModelCounts::MinimizePerplexity(vector<pair<string, string> > &phrase_pair_vector)
{

  const StaticData &staticData = StaticData::Instance();
  const string& factorDelimiter = staticData.GetFactorDelimiter();

  map<pair<string, string>, size_t> phrase_pair_map;

  for ( vector<pair<string, string> >::const_iterator iter = phrase_pair_vector.begin(); iter != phrase_pair_vector.end(); ++iter ) {
    phrase_pair_map[*iter] += 1;
  }

  vector<multiModelCountsStatisticsOptimization*> optimizerStats;

  for ( map<pair<string, string>, size_t>::iterator iter = phrase_pair_map.begin(); iter != phrase_pair_map.end(); ++iter ) {

    pair<string, string> phrase_pair = iter->first;
    string source_string = phrase_pair.first;
    string target_string = phrase_pair.second;

    vector<float> fs(m_numModels);
    map<string,multiModelCountsStatistics*>* allStats = new(map<string,multiModelCountsStatistics*>);

    Phrase sourcePhrase(0);
    sourcePhrase.CreateFromString(Input, m_input, source_string, factorDelimiter, NULL);

    CollectSufficientStatistics(sourcePhrase, fs, allStats); //optimization potential: only call this once per source phrase

    //phrase pair not found; leave cache empty
    if (allStats->find(target_string) == allStats->end()) {
      RemoveAllInMap(*allStats);
      delete allStats;
      continue;
    }

    multiModelCountsStatisticsOptimization * targetStatistics = new multiModelCountsStatisticsOptimization();
    targetStatistics->targetPhrase = new TargetPhrase(*(*allStats)[target_string]->targetPhrase);
    targetStatistics->fs = fs;
    targetStatistics->fst = (*allStats)[target_string]->fst;
    targetStatistics->ft = (*allStats)[target_string]->ft;
    targetStatistics->f = iter->second;

    try {
      pair<vector< set<size_t> >, vector< set<size_t> > > alignment = GetAlignmentsForLexWeights(sourcePhrase, static_cast<const Phrase&>(*targetStatistics->targetPhrase), targetStatistics->targetPhrase->GetAlignTerm());
      targetStatistics->lexCachee2f = CacheLexicalStatistics(static_cast<const Phrase&>(*targetStatistics->targetPhrase), sourcePhrase, alignment.second, m_lexTable_e2f, false );
      targetStatistics->lexCachef2e = CacheLexicalStatistics(sourcePhrase, static_cast<const Phrase&>(*targetStatistics->targetPhrase), alignment.first, m_lexTable_f2e, true );

      optimizerStats.push_back(targetStatistics);
    } catch (AlignmentException& e) {}

    RemoveAllInMap(*allStats);
    delete allStats;
  }

  Sentence sentence;
  CleanUpAfterSentenceProcessing(sentence); // free memory used by compact phrase tables

  vector<float> ret (m_numModels*4);
  for (size_t iFeature=0; iFeature < 4; iFeature++) {

    CrossEntropyCounts * ObjectiveFunction = new CrossEntropyCounts(optimizerStats, this, iFeature);

    vector<float> weight_vector = Optimize(ObjectiveFunction, m_numModels);

    if (m_mode == "interpolate") {
      weight_vector = normalizeWeights(weight_vector);
    } else if (m_mode == "instance_weighting") {
      float first_value = weight_vector[0];
      for (size_t i=0; i < m_numModels; i++) {
        weight_vector[i] = weight_vector[i]/first_value;
      }
    }
    cerr << "Weight vector for feature " << iFeature << ": ";
    for (size_t i=0; i < m_numModels; i++) {
      ret[(iFeature*m_numModels)+i] = weight_vector[i];
      cerr << weight_vector[i] << " ";
    }
    cerr << endl;
    delete ObjectiveFunction;
  }

  RemoveAllInColl(optimizerStats);
  return ret;

}
ChartTranslationOptionList::~ChartTranslationOptionList()
{
  RemoveAllInColl(m_collection);
}
Example #16
0
PhraseDictionaryMultiModelCounts::~PhraseDictionaryMultiModelCounts()
{
  RemoveAllInColl(m_lexTable_e2f);
  RemoveAllInColl(m_lexTable_f2e);
}
ChartRuleLookupManagerMemory::~ChartRuleLookupManagerMemory()
{
  RemoveAllInColl(m_processedRuleColls);
}
Example #18
0
void SkeletonPT::CleanUpAfterSentenceProcessing(const InputType& source)
{
  RemoveAllInColl(m_allTPColl);
}
RuleCube::~RuleCube()
{
  RemoveAllInColl(m_covered);
}
Example #20
0
AlignmentPhrase::~AlignmentPhrase()
{
	RemoveAllInColl(m_collection);
}
Example #21
0
ChartParserUnknown::~ChartParserUnknown()
{
  RemoveAllInColl(m_unksrcs);
  RemoveAllInColl(m_cacheTargetPhraseCollection);
}
Example #22
0
ProcessedRuleStack::~ProcessedRuleStack()
{
	RemoveAllInColl(m_coll);
	RemoveAllInColl(m_savedNode);
}
ChartRuleLookupManagerMemory::~ChartRuleLookupManagerMemory()
{
  RemoveAllInColl(m_dottedRuleColls);
}
ChartRuleCollection::~ChartRuleCollection()
{
	RemoveAllInColl(m_collection);
}