Exemple #1
0
void TestToken::TestPrecNextGraph()
{
	SpeechSet* speechSet = new SpeechSet();
	Speech* speech = new Speech(speechSet);
	Segment* segment = Segment::CreateWithDuration(0, 10000, speech);
	/* initialize tokens with a graph like
	*     B
	*    / \
	* --A   D--
	*    \ /
	*     C
	*/
	Token* tokenA = Token::CreateWithDuration(0, 0, segment);
	tokenA->SetSourceText("A");
	Token* tokenB = Token::CreateWithDuration(0, 0, segment);
	tokenB->SetSourceText("B");
	Token* tokenC = Token::CreateWithDuration(0, 0, segment);
	tokenC->SetSourceText("C");
	Token* tokenD = Token::CreateWithDuration(0, 0, segment);
	tokenD->SetSourceText("D");
	tokenA->AddNextToken(tokenB);
	tokenA->AddNextToken(tokenC);
	tokenB->AddPrecToken(tokenA);
	tokenB->AddNextToken(tokenD);
	tokenC->AddPrecToken(tokenA);
	tokenC->AddNextToken(tokenD);
	tokenD->AddPrecToken(tokenB);
	tokenD->AddPrecToken(tokenC);     

	// a few basics assert
	assert(tokenA->GetNextToken(0)->GetNextToken(0) == tokenA->GetNextToken(1)->GetNextToken(0));
	assert(tokenD->GetPrecToken(0)->GetPrecToken(0) == tokenA->GetPrecToken(1)->GetPrecToken(0));

	//TODO need to be more complex
	delete tokenA;
	delete tokenB;
	delete tokenC;
	delete tokenD;
	delete segment;
	delete speech;
}
Exemple #2
0
void Speech::RemoveSegment(Segment* currentSegment)
{	
	list<Token*> listPreviousTokenofFirstToken;
	list<Token*> listNextTokenofLastToken;
		
	// Remove links from the previous tokens of the first tokens of the segment
	for(size_t f=0; f<currentSegment->GetNumberOfFirstToken(); ++f)
	{
		Token* firstToken = currentSegment->GetFirstToken(f);
		
		if(firstToken)
		{
			for(size_t p=0; p<firstToken->GetNbOfPrecTokens(); ++p)
			{
				Token* previousTokenofFirstToken = firstToken->GetPrecToken(p);
				listPreviousTokenofFirstToken.push_back(previousTokenofFirstToken);
				previousTokenofFirstToken->UnlinkNextToken(firstToken);
			}
		}
	}
	
	// Remove links from the next tokens of the last tokens of the segment
	for(size_t l=0; l<currentSegment->GetNumberOfLastToken(); ++l)
	{
		Token* lastToken = currentSegment->GetLastToken(l);
		
		if(lastToken)
		{
			for(size_t n=0; n<lastToken->GetNbOfNextTokens(); ++n)
			{
				Token* nextTokenofLastToken = lastToken->GetNextToken(n);
				listNextTokenofLastToken.push_back(nextTokenofLastToken);
				nextTokenofLastToken->UnlinkPrevToken(lastToken);
			}
		}
	}
		
	// Re-attach the tokens
	list<Token*>::iterator prev  = listPreviousTokenofFirstToken.begin();
	list<Token*>::iterator eprev = listPreviousTokenofFirstToken.end();
	list<Token*>::iterator next  = listNextTokenofLastToken.begin();
	list<Token*>::iterator enext = listNextTokenofLastToken.end();
	
	while(prev != eprev)
	{
		while(next != enext)
		{
			(*prev)->AddNextToken(*next);
			(*next)->AddPrecToken(*prev);
			
			++next;
		}
		
		++prev;
	}
    
    listPreviousTokenofFirstToken.clear();
    listNextTokenofLastToken.clear();
  	  	  
    // Remove Segment from vector
    vector<Segment*>::iterator SegIter = m_segments.begin();
    	
	while (SegIter != m_segments.end() && (*SegIter) != currentSegment)
		++SegIter;

	if (SegIter == m_segments.end())
    {
		LOG_FATAL(m_pLogger, "Speech::RemoveSegment(), the segment is not at the right spot!!");
		exit(E_INVALID);
	}
	
	m_segments.erase(SegIter);
    
    // destroy! the segment now
    delete currentSegment;
}
Exemple #3
0
/** returns the list of previous indexes */
void Graph::PreviousIndexes(list<size_t>& listPrev, const size_t& dim, const size_t& index)
{
	listPrev.clear();
    
	// Asking for the previous tokens of the last
	if(index == 0)
	{
		listPrev.push_front(0);
		return;
	}

    list<size_t>* listprevious = m_TabCacheDimPreviousIndex[dim][index];
    
    if(listprevious)
    {
        listPrev = *listprevious;
        return;
    }
        
    m_TabCacheDimPreviousIndex[dim][index] = new list<size_t>;
	
	list<Token*>::iterator i, ei;
	bool is0added = false;
	
	// Asking for the first tokens to work on
	if(index == GetDimensionDeep(dim)-1)
	{
		i = m_TabLastTokens[dim].begin();
		ei = m_TabLastTokens[dim].end();
		
		while(i != ei)
		{
			if( (*i == NULL) && (!is0added) )
			{
				is0added = true;
				//listPrev.push_front(0);
                m_TabCacheDimPreviousIndex[dim][index]->push_front(0);
			}
			else
			{
				//listPrev.push_front(m_TabMapTokenIndex[dim][*i]);
                m_TabCacheDimPreviousIndex[dim][index]->push_front(m_TabMapTokenIndex[dim][*i]);
			}

			++i;
		}
	}
	else
	{
		i = m_TabFirstTokens[dim].begin();
		ei = m_TabFirstTokens[dim].end();
		
		while(i != ei)
		{
			if( (*i == m_TabVecHypRef[dim][index]) && (!is0added) )
			{
				is0added = true;
				//listPrev.push_front(0);
                m_TabCacheDimPreviousIndex[dim][index]->push_front(0);
			}
			else
			{
				Token* tokenIndex = m_TabVecHypRef[dim][index];
				size_t nbprevtokens = tokenIndex->GetNbOfPrecTokens();
				
				if(nbprevtokens == 0)
				{
					//listPrev.push_front(0);
                    m_TabCacheDimPreviousIndex[dim][index]->push_front(0);
				}
				else
				{
					for(size_t j=0; j<nbprevtokens; ++j)
					{
						//listPrev.push_front(m_TabMapTokenIndex[dim][tokenIndex->GetPrecToken(j)]);
                        m_TabCacheDimPreviousIndex[dim][index]->push_front(m_TabMapTokenIndex[dim][tokenIndex->GetPrecToken(j)]);
					}
				}
			}
			
			++i;
		}
	}
    
    listPrev = *(m_TabCacheDimPreviousIndex[dim][index]);
}