/******************************************************************************
 * AUTHOR		: Tarun Madan
 * DATE			: Aug-07-2007
 * NAME			: convertToTraceGroup
 * DESCRIPTION	: 
 * ARGUMENTS		: 
 * RETURNS		:
 * NOTES			: 
 * CHANGE HISTROY
 * Author			Date				Description
 ******************************************************************************/
int PointFloatShapeFeatureExtractor::convertFeatVecToTraceGroup(
                                 const vector<LTKShapeFeaturePtr>& shapeFeature, 
                                 LTKTraceGroup& outTraceGroup)
{
    LOG( LTKLogger::LTK_LOGLEVEL_DEBUG) << "Entering " <<
        "PointFloatShapeFeatureExtractor::convertFeatVecToTraceGroup()" << endl;
    
	vector<LTKChannel> channels;				//	channels of a trace 

	LTKChannel xChannel("X", DT_INT, true);	//	x-coordinate channel of the trace 
	LTKChannel yChannel("Y", DT_INT, true);	//	y-coordinate channel of the trace

	//initializing the channels of the trace
	channels.push_back(xChannel);	
	channels.push_back(yChannel);

	//	composing the trace format object
	LTKTraceFormat traceFormat(channels);

	vector<float> point;				//	a point of a trace

	LTKTrace trace(traceFormat); 
	int featureVectorSize = shapeFeature.size();

	for(int count=0; count < featureVectorSize; count++)
	{
		float Xpoint, Ypoint;
		bool penUp;

		PointFloatShapeFeature* ptr = (PointFloatShapeFeature*)(shapeFeature[count].operator ->());
		Xpoint = ptr->getX();
		Ypoint = ptr->getY();
		penUp = ptr->getPenUp();

		

		point.push_back(Xpoint);
		point.push_back(Ypoint);

		trace.addPoint(point);
		point.clear();


		if(penUp == true)	// end of a trace, clearing the trace now
		{
			outTraceGroup.addTrace(trace); 
			trace.emptyTrace();
			LTKTrace tempTrace(traceFormat);
			trace = tempTrace;
		}
	}

    LOG( LTKLogger::LTK_LOGLEVEL_DEBUG) << "Exiting " <<
        "PointFloatShapeFeatureExtractor::convertFeatVecToTraceGroup()" << endl;
    
	return SUCCESS;
}
Ejemplo n.º 2
0
int HolisticFeatureExtractor::resampleTraceGroup(LTKTraceGroup& inTraceGroup, int numPoints, LTKTraceGroup& outTraceGroup, LTKPreprocessorInterface *ltkShapeRecPtr)
{
    LOG( LTKLogger::LTK_LOGLEVEL_DEBUG) << 
        "Entered HolisticFeatureExtractor::resampleTraceGroup"  << endl;

    //LTKPreprocessor preProcessorObj;
	
	int traceIndex;                     //  variable to loop over the traces of the trace group     

    int numberOfTraces;                 //  number of traces in the trace group

    //  initializing the variables

    numberOfTraces = inTraceGroup.getNumTraces();

	for(traceIndex = 0; traceIndex < numberOfTraces; ++traceIndex)
    {
        LTKTrace newTrace;

        const LTKTrace& trace = inTraceGroup.getTraceAt(traceIndex);

		// resampling all the traces of the traceGroup to numPoints

        //ltkShapeRecPtr->resampleTrace(trace, numPoints, newTrace);
		resampleTrace(trace, numPoints, newTrace);
            
        //outTraceGroup.addTrace(newTrace);
		outTraceGroup.m_traceVector.push_back(newTrace);
	   // LOG( LTKLogger::LTK_LOGLEVEL_DEBUG) << "Number of points after resampling trace " + traceIndex + " is " + resamplePointsVec[traceIndex]  << endl;

    }

    LOG( LTKLogger::LTK_LOGLEVEL_DEBUG) << 
        "Exiting HolisticFeatureExtractor::resampleTraceGroup"  << endl;

    return SUCCESS;
}
/**********************************************************************************
 * AUTHOR		: Saravanan R.
 * DATE			: 23-Jan-2007
 * NAME			: preprocess
 * DESCRIPTION	: calls the required pre-processing functions from the LTKPreprocessor library
 * ARGUMENTS		: inTraceGroup - reference to the input trace group
 *				  outPreprocessedTraceGroup - pre-processed inTraceGroup
 * RETURNS		: SUCCESS on successful pre-processing operation
 * NOTES			:
 * CHANGE HISTROY
 * Author			Date				Description
 *************************************************************************************/
int featurefilewriter::preprocess (const LTKTraceGroup& inTraceGroup,
        LTKTraceGroup& outPreprocessedTraceGroup)
{
    LOG(LTKLogger::LTK_LOGLEVEL_DEBUG) << "Entering " <<
        "featurefilewriter::preprocess()" << endl;

    int indx = 0;
	int errorCode = -1;

    string module = "";
    string funName = "" ;

    LTKTraceGroup local_inTraceGroup;

    local_inTraceGroup = inTraceGroup;

    if(m_preprocSequence.size() != 0)
    {
        while(indx < m_preprocSequence.size())
        {
            module = m_preprocSequence.at(indx).first;
            funName =  m_preprocSequence.at(indx).second;

            FN_PTR_PREPROCESSOR pPreprocFunc = NULL;
            pPreprocFunc = m_ptrPreproc->getPreprocptr(funName);

            if(pPreprocFunc!= NULL)
            {
                outPreprocessedTraceGroup.emptyAllTraces();


                if((errorCode = (m_ptrPreproc->*(pPreprocFunc))
                            (local_inTraceGroup,outPreprocessedTraceGroup)) != SUCCESS)
                {
                    LOG(LTKLogger::LTK_LOGLEVEL_ERR) <<"Error: "<<  errorCode << " " <<
                        " featurefilewriter::preprocess()" << endl;
                    LTKReturnError(errorCode);
                }

                local_inTraceGroup = outPreprocessedTraceGroup;
            }
            indx++;
        }
    }
    LOG(LTKLogger::LTK_LOGLEVEL_DEBUG)<<"Exiting featurefilewriter::preprocess()"<<endl;
    return SUCCESS;
}
int PointFloatShapeFeatureExtractor::extractFeatures(const LTKTraceGroup& inTraceGroup,
                                   vector<LTKShapeFeaturePtr>& outFeatureVec)
{
    LOG( LTKLogger::LTK_LOGLEVEL_DEBUG) << "Entering " <<
        "PointFloatShapeFeatureExtractor::extractFeatures()" << endl;
    
    PointFloatShapeFeature *featurePtr = NULL;
	float x,y,deltax;
	int numPoints=0;						// number of pts
	int count=0;
	int currentStrokeSize;
	float sintheta, costheta,sqsum;
	int i;

    int numberOfTraces = inTraceGroup.getNumTraces();

    if (numberOfTraces == 0 )
    {
        LOG( LTKLogger::LTK_LOGLEVEL_ERR) << "Error: " << 
            EEMPTY_TRACE_GROUP << " : " << getErrorMessage(EEMPTY_TRACE_GROUP)<<
            " PointFloatShapeFeatureExtractor::extractFeatures" <<endl;
        
        LTKReturnError(EEMPTY_TRACE_GROUP);
    }
    
	LTKTraceVector allTraces = inTraceGroup.getAllTraces();
	LTKTraceVector::iterator traceIter = allTraces.begin();
	LTKTraceVector::iterator traceEnd = allTraces.end();


	//***CONCATENTATING THE STROKES***
	for (; traceIter != traceEnd ; ++traceIter)
	{
		floatVector tempxVec, tempyVec;
		
		(*traceIter).getChannelValues("X", tempxVec);

		(*traceIter).getChannelValues("Y", tempyVec);

		// Number of points in the stroke
		numPoints = numPoints + tempxVec.size(); 
	}	

	//***THE CONCATENATED FULL STROKE***
	floatVector xVec(numPoints);
	floatVector yVec(numPoints);	


	traceIter = allTraces.begin();
	traceEnd  = allTraces.end();

	boolVector penUp;
	// Add the penUp here	
	for (; traceIter != traceEnd ; ++traceIter)
	{
		floatVector tempxVec, tempyVec;
		
		(*traceIter).getChannelValues("X", tempxVec);

		(*traceIter).getChannelValues("Y", tempyVec);

		currentStrokeSize = tempxVec.size();

        if (currentStrokeSize == 0)
        {
            LOG( LTKLogger::LTK_LOGLEVEL_ERR) << "Error: " << 
            EEMPTY_TRACE << " : " << getErrorMessage(EEMPTY_TRACE) <<
            " PointFloatShapeFeatureExtractor::extractFeatures" <<endl;
            
            LTKReturnError(EEMPTY_TRACE);
        }
        
		for( int point=0; point < currentStrokeSize ; point++ )
		{
			xVec[count] = tempxVec[point];
			yVec[count] = tempyVec[point];
			count++;
            
			if(point == currentStrokeSize - 1 )
            {         
				penUp.push_back(true);
            }
			else
            {         
				penUp.push_back(false);
            }
		}	

	}
	//***CONCATENTATING THE STROKES***

	vector<float> theta(numPoints);
	vector<float> delta_x(numPoints-1);
	vector<float> delta_y(numPoints-1);

	for(i=0; i<numPoints-1; ++i)
	{  
		delta_x[i]=xVec[i+1]-xVec[i];
		delta_y[i]=yVec[i+1]-yVec[i];

	}

	//Add the controlInfo here
	sqsum = sqrt( pow(xVec[0],2)+ pow(yVec[0],2))+ EPS;
    
	sintheta = (1+yVec[0]/sqsum)*PREPROC_DEF_NORMALIZEDSIZE/2;
    
	costheta = (1+xVec[0]/sqsum)*PREPROC_DEF_NORMALIZEDSIZE/2;

    featurePtr = new PointFloatShapeFeature(xVec[0],
                                             yVec[0],
                                             sintheta,
                                             costheta,
                                             penUp[0]);

	outFeatureVec.push_back(LTKShapeFeaturePtr(featurePtr));
	featurePtr = NULL;

    
	for( i=1; i<numPoints; ++i)
	{  

		//Add the controlInfo here

		sqsum = sqrt(pow(delta_x[i-1],2) + pow(delta_y[i-1],2))+EPS;
		sintheta = (1+delta_y[i-1]/sqsum)*PREPROC_DEF_NORMALIZEDSIZE/2;
		costheta = (1+delta_x[i-1]/sqsum)*PREPROC_DEF_NORMALIZEDSIZE/2;

        featurePtr = new PointFloatShapeFeature(xVec[i],
                                               yVec[i],
                                               sintheta,
                                               costheta,
                                               penUp[i]);
		//***POPULATING THE FEATURE VECTOR***
		outFeatureVec.push_back(LTKShapeFeaturePtr(featurePtr));
		featurePtr = NULL;
    
	}

    LOG( LTKLogger::LTK_LOGLEVEL_DEBUG) << "Exiting " <<
        "PointFloatShapeFeatureExtractor::extractFeatures()" << endl;
    
	return SUCCESS;
}
Ejemplo n.º 5
0
//This currently assume the anotation
//is sequential
int readWordFile(string fileName, vector<LTKTraceGroup>& traceGroupVec, LTKCaptureDevice& dc, LTKScreenContext& sc)
{

	//anotation Info
	map<string, string>anotInfo;   //annotation info
	LTKTraceGroup traceGroup;      //trace group
	vector<string> strIdscs;       //comma separated strings
	vector<string> strIdshs;       //hyphen separated strings
	vector<int> intIds;            //intIds
	int firstId, secondId;         //first and last IDs when there are hyphenated IDs
	int loopIndex;                 //index of Ids
	
	vector<string>::iterator strIter;

	//Reading the anotation file
	if(LTKInkFileReader::readUnipenInkFileWithAnnotation(fileName,"CHARACTER","ALL",traceGroup,anotInfo, dc, sc) != SUCCESS)
	{
		cout<<"Error reading ink file:"<<fileName<<endl;
		LOG(LTKLogger::LTK_LOGLEVEL_ERR) << "Error reading ink file:"<<fileName<<endl;
		return FAILURE;
	}

	for(map<string,string>::iterator annotationIter=anotInfo.begin();annotationIter!=anotInfo.end();++annotationIter)
	{
		LTKTraceGroup charTraceGroup;  //TraceGroup corresponding to this char

		string strokeIndices=annotationIter->first;
		string comment=annotationIter->second;

		//Finding out the stroke IDs
		LTKStringUtil::tokenizeString(strokeIndices,  " ,\t",  strIdscs);
		for(strIter = strIdscs.begin(); strIter != strIdscs.end(); ++strIter)
		{
			if((*strIter).find('-')!=-1)
			{
				//tokenize string again if there are hyphens
				LTKStringUtil::tokenizeString(*strIter, "-", strIdshs);
				firstId  = atoi((strIdshs.front()).c_str()); //first ID
				secondId = atoi((strIdshs.back()).c_str());  //second ID

				for(loopIndex = firstId; loopIndex <=secondId; ++loopIndex )
				{
					intIds.push_back(loopIndex);
				}
			}
			else
			{
				intIds.push_back(atoi((*strIter).c_str() ));
			}
			strIdshs.clear();
		}

		strIdscs.clear();

		//sort the IDs in the sequence
		sort(intIds.begin(), intIds.end(),less<int>());

		//constructing a temporary trace group
		for(loopIndex = 0; loopIndex < intIds.size(); ++loopIndex)
		{

			if(intIds.at(loopIndex) < 0 || intIds.at(loopIndex) >= traceGroup.getNumTraces())
			{
				cout<<"Annotation and trace group read from ink file:"<<fileName<<" do not match"<<endl;
				LOG(LTKLogger::LTK_LOGLEVEL_ERR) << "Annotation and trace group read from ink file:"<<fileName<<" do not match"<<endl;
				return FAILURE;

			}

			LTKTrace tempTrace;
			traceGroup.getTraceAt(intIds.at(loopIndex), tempTrace);
			
			charTraceGroup.addTrace(tempTrace);
		}

		
		//push back this trace vector.
		traceGroupVec.push_back(charTraceGroup);


		intIds.clear();
	}

	return SUCCESS;
}
Ejemplo n.º 6
0
/**********************************************************************************
* AUTHOR		: Deepu V.
* DATE			: 09-MAR-2005
* NAME			: computeChannelStatistics
* DESCRIPTION	: This is a generic function that computes the statistics of channels of
*                 an LTKTraceGroup object passed to it.   
* ARGUMENTS		: traceGroup        - The TraceGroup whose statistics need to be computed                  channelNames      - Names of channels in the traceGroup for which 
*			      channelNames      - channels for which statistics have to be comptued
*		          properties        - The names of the statistics to be computed
*			      channelStatistics - output vector containing results
*			      channelStatistics[i][j] the statistics properties[j] for channelname
*			      channelNames[i]
*
* RETURNS		:  SUCCESS/FAILURE
* NOTES			:
* CHANGE HISTROY
* Author			Date				Description of change
*************************************************************************************/
int LTKInkUtils::computeChannelStatistics(const LTKTraceGroup& traceGroup,
               const vector<string>& channelNames,  const vector<ELTKTraceGroupStatistics>& properties,
			   vector<vector<float> >& channelStatistics)
{
	LOG( LTKLogger::LTK_LOGLEVEL_DEBUG) << 
		  " Entering: LTKInkUtils::computeChannelStatistics()" << endl;
	
	vector<float> tempVec; //temporary vector

	int numChannels = channelNames.size(); //num of channels for which statistics need to be computed

	int numFeatures = properties.size();   //number of properties to be calculated

	int numTraces = traceGroup.getNumTraces(); //number of traces in each tracegroup

	int numPoints;              //number of points in a stroke


	int totalNumPoints=0;  //each channel is of equal length

	float currVal;              //value of current point in the channel

	int traceIndex, channelIndex, pointIndex, featureIndex;

	// Clear the output vector
	channelStatistics.clear();

	//Make an initial vector
	tempVec.clear();
	for (featureIndex= 0 ; featureIndex <numFeatures; ++featureIndex)
	{
		switch(properties[featureIndex])
		{
		//initializing max
		case TG_MAX:tempVec.push_back(-FLT_MAX);
			break;
		//initializing min
		case TG_MIN:tempVec.push_back(FLT_MAX);
			break;
		//initializing avg
		case TG_AVG:tempVec.push_back(0);
			break;

		default: LOG(LTKLogger::LTK_LOGLEVEL_ERR)
				       <<"Error: LTKInkUtils::computeChannelStatistics()"<<endl;

			LTKReturnError(EUNSUPPORTED_STATISTICS);
		}
	}
	
	//Initialization Every channel has the same value
	for(channelIndex =0; channelIndex<numChannels; ++channelIndex)
	{
		channelStatistics.push_back(tempVec);

		//initialize total number of points for each channel to zero
	}


	//Iterating through all the strokes
	for (traceIndex = 0; traceIndex <numTraces; ++traceIndex)
	{
		LTKTrace trace;
		traceGroup.getTraceAt(traceIndex, trace);

		//Iterating through all the channels in a stroke
		for (channelIndex =0; channelIndex<numChannels; ++channelIndex)
		{
			//get the current channel values
			floatVector currChannel;
			trace.getChannelValues(channelNames[channelIndex], currChannel);

			//get the current output vector to be updated
			floatVector& currStats = channelStatistics.at(channelIndex);

			//number of points in this channel
			numPoints = currChannel.size();

			if(channelIndex==0)
			{
				totalNumPoints += numPoints;
			}

			//iterate through all points in the channel
			for(pointIndex = 0; pointIndex <numPoints; ++pointIndex)
			{
				currVal = currChannel[pointIndex];

				//updating all features as we iterate through each point;
				for (featureIndex =0; featureIndex<numFeatures; featureIndex++)
				{
					switch(properties[featureIndex])
					{

					//updating the maximum
					case TG_MAX:
						if(currVal > currStats[featureIndex])
							currStats[featureIndex] = currVal;
						break;

					//updating the minimum
					case TG_MIN:
						if(currVal < currStats[featureIndex])
							currStats[featureIndex] = currVal;
						break;

					//accumulating the sum
					case TG_AVG:
						currStats[featureIndex] += currVal;
						break;

					default: LOG(LTKLogger::LTK_LOGLEVEL_ERR)
							        <<"Error: LTKInkUtils::computeChannelStatistics()"<<endl;

						LTKReturnError(EUNSUPPORTED_STATISTICS);

					}

				}

			}
			
		}

	}

	//Finalization Step
	for (channelIndex= 0 ; channelIndex<numChannels; ++channelIndex)
	{

		floatVector& currStats = channelStatistics.at(channelIndex);

		//total number of points in this channel
		numPoints = totalNumPoints; 

		for(featureIndex = 0; featureIndex<numFeatures; ++featureIndex)
		{
			switch(properties[featureIndex])
			{
			//finding the average
			case TG_AVG:
					currStats[featureIndex] /= numPoints;
				break;
			}
		}
	}

	LOG( LTKLogger::LTK_LOGLEVEL_DEBUG) << 
		  " Exiting: LTKInkUtils::computeChannelStatistics()" << endl;

	return SUCCESS;
}
Ejemplo n.º 7
0
int HolisticFeatureExtractor::extractFeatures(const LTKTraceGroup& traceGroup, const LTKCaptureDevice& captureDevice, const LTKScreenContext& screenContext, LTKPreprocessorInterface *ltkShapeRecPtr, float2DVector& featureVector)
{
	LOG( LTKLogger::LTK_LOGLEVEL_DEBUG) << 
        "Entered HolisticFeatureExtractor::extractFeatures"  << endl;

	LTKTrace preprocessedTrace;				// a trace of the trace group

	LTKTrace preprocessedTrace2;			// a trace of the trace group

	LTKTrace preprocessedTrace3;			// a trace of the trace group

	LTKTraceGroup preprocessedTraceGroup;

	LTKTraceGroup preprocessedTraceGroup2;

	LTKTraceGroup preprocessedTraceGroup3;

	int	traceIndex;				//	variable to loop over all traces of the trace group


	// preprocessing the traceGroup in 3 ways to extract 3 kinds of features
		
	preprocess(traceGroup, preprocessedTraceGroup, captureDevice, screenContext, ltkShapeRecPtr);

	preprocess2(traceGroup, preprocessedTraceGroup2, captureDevice, screenContext, ltkShapeRecPtr);

	preprocess3(traceGroup, preprocessedTraceGroup3, captureDevice, screenContext, ltkShapeRecPtr);

	//	extracting the feature vector

	for(traceIndex = 0; traceIndex < traceGroup.getNumTraces(); ++traceIndex)
	{
		preprocessedTrace = preprocessedTraceGroup.getTraceAt(traceIndex);

		preprocessedTrace2 = preprocessedTraceGroup2.getTraceAt(traceIndex);

		preprocessedTrace3 = preprocessedTraceGroup3.getTraceAt(traceIndex);

		// calling the compute features methods 

		floatVector features;

		// calculating features with preprocessedTrace2
		
		features.push_back(computeEER(preprocessedTrace));

		features.push_back(computeOrientation(preprocessedTrace));

		// calculating features with preprocessedTrace2

		float TCL = computeTCL(preprocessedTrace2);

		TCL /= calculateBBoxPerimeter(screenContext);	// normalizing using the perimeter
		
		features.push_back(TCL);

		features.push_back(computePII(preprocessedTrace2, screenContext));

		// calculating features with preprocessedTrace3

		float swAng = computeSweptAngle(preprocessedTrace3);

		// normalizing the swept angle with swAngNormFactor x 360 degrees
		swAng /= (2*PI*m_swAngNormFactor);

		features.push_back(swAng);

		featureVector.push_back(features);

	}//traceIndex

	LOG( LTKLogger::LTK_LOGLEVEL_DEBUG) << 
        "Exiting HolisticFeatureExtractor::extractFeatures"  << endl;

	return SUCCESS;

}
Ejemplo n.º 8
0
/**********************************************************************************
* AUTHOR        : Bharath A
* DATE          : 12-Jun-2008
* NAME          : extractFeatures
* DESCRIPTION   : Extracts NPen features from a trace group
* ARGUMENTS     : The trace group from which features have to be extracted
* RETURNS       : vector of NPenShapeFeature objects
* NOTES         :
* CHANGE HISTROY
* Author            Date                Description of change
*************************************************************************************/
int NPenShapeFeatureExtractor::extractFeatures(const LTKTraceGroup& inTraceGroup, 
                                      vector<LTKShapeFeaturePtr>& outFeatureVec)
{

	LOG(LTKLogger::LTK_LOGLEVEL_DEBUG) << "Entering " <<
        "NPenShapeFeatureExtractor::extractFeatures()" << endl;

    NPenShapeFeature* featurePtr = NULL;  

	vector<vector<float> > floatFeatureValues;
    


	int errorCode;

	if(inTraceGroup.getNumTraces() == 0)
	{
		LOG(LTKLogger::LTK_LOGLEVEL_ERR)
		        <<"Error: FeatureExtractor::findAllFeatures"<<endl;

		LTKReturnError(EEMPTY_TRACE_GROUP);
	}


	vector<vector<float> > concatenatedCoord;
	int currPenUpPointIndex = -1;
	vector<int> penUpPointsIndices;
	

	int halfWindowSize = m_windowSize/2;

	if(halfWindowSize==0)
	{
		LOG(LTKLogger::LTK_LOGLEVEL_ERR)
		        <<"Error: FeatureExtractor::findAllFeatures"<<endl;

		LTKReturnError(EINVALID_NUM_OF_POINTS);
	}


	for(int t=0;t<inTraceGroup.getNumTraces();++t)
	{

		LTKTrace eachTrace;
		inTraceGroup.getTraceAt(t,eachTrace);

		if(eachTrace.isEmpty())
		{
			LOG(LTKLogger::LTK_LOGLEVEL_ERR)
						    <<"Error: FeatureExtractor::findAllFeatures"<<endl;

			LTKReturnError(EEMPTY_TRACE);

		}

		vector<float> xVec;
		vector<float> yVec;

		eachTrace.getChannelValues(X_CHANNEL_NAME,xVec);
		eachTrace.getChannelValues(Y_CHANNEL_NAME,yVec);

		if(t==0)
		{
			vector<float> firstPoint;
			firstPoint.push_back(xVec[0]);
			firstPoint.push_back(yVec[0]);

			concatenatedCoord.insert(concatenatedCoord.begin(),halfWindowSize,firstPoint);
		}

		for(int p=0;p<xVec.size();++p)
		{
			vector<float> point;
			point.push_back(xVec[p]);
			point.push_back(yVec[p]);

			concatenatedCoord.push_back(point);

		}

		currPenUpPointIndex += xVec.size();

		penUpPointsIndices.push_back(currPenUpPointIndex);

		if(t==(inTraceGroup.getNumTraces()-1))
		{
			vector<float> lastPoint;
			lastPoint.push_back(xVec[xVec.size()-1]);
			lastPoint.push_back(yVec[yVec.size()-1]);

			concatenatedCoord.insert(concatenatedCoord.end(),halfWindowSize,lastPoint);
		}

	}

	

	/*	0 - normalized x
		1 - normalized y
		2 - cos alpha
		3 - sin alpha
		4 - cos beta
		5 - sin beta
		6 - aspect
		7 - curliness
		8 - linearity
		9 - slope  
		10 - pen-up / pen-down stroke (0 for pen-down and 1 for pen-up)*/

	float deltaX=0;
	float deltaY=0;
	float hypotenuse=0;


	float cosalpha=0;
	float sinalpha=0;
	float cosbeta=0;
	float sinbeta=0;
	float ispenup=0;
	float aspect=0;
	float curliness=0;
	float linearity=0;
	float slope=0;


	float xMin,yMin,xMax,yMax; //for vicnity bounding box;
	float bbWidth,bbHeight;
	float maxOfWidthHeight;



	currPenUpPointIndex = 0;


	for(int f=halfWindowSize;f<(concatenatedCoord.size()-halfWindowSize);++f)
	{

		vector<float> eachPointFeature;

		eachPointFeature.push_back(concatenatedCoord[f][0]);  //x
		eachPointFeature.push_back(concatenatedCoord[f][1]);  //y

		deltaX = concatenatedCoord[f-1][0] - concatenatedCoord[f+1][0];
		deltaY = concatenatedCoord[f-1][1] - concatenatedCoord[f+1][1];

		hypotenuse = sqrt((deltaX*deltaX)+(deltaY*deltaY));

		if(hypotenuse < EPS)
		{
			cosalpha = 1;
			sinalpha = 0;
		}
		else
		{
			cosalpha = deltaX / hypotenuse;
			sinalpha = deltaY / hypotenuse;
		}

		eachPointFeature.push_back(cosalpha);
		eachPointFeature.push_back(sinalpha);

		eachPointFeature.push_back(cosbeta); //creating empty spaces for cosine and sine betas for future assignment
		eachPointFeature.push_back(sinbeta);

		vector<vector<float> > vicinity;

		float vicinityTrajLen = 0.0f;
		
		for(int v=f-halfWindowSize;v<=f+halfWindowSize;++v)
		{
			vicinity.push_back(concatenatedCoord[v]);

			if(v<(f+halfWindowSize))
			{
				vicinityTrajLen += (sqrt(((concatenatedCoord[v+1][1]-concatenatedCoord[v][1])*(concatenatedCoord[v+1][1]-concatenatedCoord[v][1]))+((concatenatedCoord[v+1][0]-concatenatedCoord[v][0])*(concatenatedCoord[v+1][0]-concatenatedCoord[v][0]))));
			}
		}

		findVicinityBoundingBox(vicinity,xMin,yMin,xMax,yMax);

		bbWidth = xMax - xMin;

		bbHeight = yMax - yMin;

		if(fabs(bbHeight+bbWidth)<EPS)
		{
			aspect = 0.0;
		}
		else
		{
			aspect = (bbHeight-bbWidth)/(bbHeight+bbWidth);
		}

		
		eachPointFeature.push_back(aspect);

		
		maxOfWidthHeight = ( bbWidth > bbHeight) ? bbWidth : bbHeight;

		if(fabs(maxOfWidthHeight) < EPS)
		{
			curliness = 0.0f;
		}
		else
		{
			curliness = (vicinityTrajLen / maxOfWidthHeight) - 2;
		}

		eachPointFeature.push_back(curliness);

		computeLinearityAndSlope(vicinity,linearity,slope);

		eachPointFeature.push_back(linearity);
		eachPointFeature.push_back(slope);

		if(penUpPointsIndices[currPenUpPointIndex] == (f-halfWindowSize))
		{
			ispenup = 1;
			++currPenUpPointIndex;
		}
		else
		{
			ispenup = 0;
		}
		eachPointFeature.push_back(ispenup); //currently assuming pen-up strokes are not resampled

		floatFeatureValues.push_back(eachPointFeature);

	}


		//duplicating first and last features
		vector<float> firstFeaturePoint = floatFeatureValues[0];

		floatFeatureValues.insert(floatFeatureValues.begin(),1,firstFeaturePoint);

		vector<float>  lastFeaturePoint = floatFeatureValues[floatFeatureValues.size()-1];

		floatFeatureValues.insert(floatFeatureValues.end(),1,lastFeaturePoint);


		for(int ff=1;ff<(floatFeatureValues.size()-1);++ff)
		{

			floatFeatureValues[ff][4] = (floatFeatureValues[ff-1][2]*floatFeatureValues[ff+1][2]) + (floatFeatureValues[ff-1][3]*floatFeatureValues[ff+1][3]);
			floatFeatureValues[ff][5] = (floatFeatureValues[ff-1][2]*floatFeatureValues[ff+1][3]) - (floatFeatureValues[ff-1][3]*floatFeatureValues[ff+1][2]);
			
		}

		//removing the extraneous feature points at the beginning and end
		floatFeatureValues.erase(floatFeatureValues.begin(),floatFeatureValues.begin()+1); 
		floatFeatureValues.pop_back();


		for(int a=0;a<floatFeatureValues.size();++a)
		{
				NPenShapeFeature* ptrFeature = new NPenShapeFeature();
				ptrFeature->setX(floatFeatureValues[a][0]);
				ptrFeature->setY(floatFeatureValues[a][1]);
				ptrFeature->setCosAlpha(floatFeatureValues[a][2]);
				ptrFeature->setSinAlpha(floatFeatureValues[a][3]);
				ptrFeature->setCosBeta(floatFeatureValues[a][4]);
				ptrFeature->setSinBeta(floatFeatureValues[a][5]);
				ptrFeature->setAspect(floatFeatureValues[a][6]);
				ptrFeature->setCurliness(floatFeatureValues[a][7]);
				ptrFeature->setLinearity(floatFeatureValues[a][8]);
				ptrFeature->setSlope(floatFeatureValues[a][9]);
				
				if(fabs(floatFeatureValues[a][10]-1.0f) < EPS)
				{
					ptrFeature->setPenUp(true);
				}
				else
				{
					ptrFeature->setPenUp(false);
				}

				outFeatureVec.push_back(LTKShapeFeaturePtr(ptrFeature));

				ptrFeature = NULL;
			
		}
		
	



    LOG(LTKLogger::LTK_LOGLEVEL_DEBUG) << "Exiting " <<
        "NPenShapeFeatureExtractor::extractFeatures()" << endl;
    
	return SUCCESS;
}
Ejemplo n.º 9
0
int HolisticRecognizer::train(const string& trainingList, string& strModelDataHeaderInfoFile, string &comment, string &dataset, const string &inFileType)
{
	LOG( LTKLogger::LTK_LOGLEVEL_DEBUG) << "Entered HolisticRecognizer::train"  << endl;

	string path;							//	file path of a ink file

	string shapeId;							//	shape id of the ink file

	int shapeCount = 0;						//	count of the no. of shape samples read so far

	int prevShape = 0;						//	shape id of the previous shape

	int numStrokes = 0;						//	number of strokes in a particular character

	int numFeatures = 0;					//	number of features being used

	int	traceIndex;							//	variable to loop over all traces of the trace group

	bool firstFileFlag = true;				//	flag is used for writing the number of features in the dat file

	LTKCaptureDevice captureDevice;

	LTKScreenContext screenContext;

	int iMajor, iMinor, iBugFix;			// Version identifiers

	char versionInfo[VERSION_STR_LEN];					// Holds the version information.

    string version;	///, pca="PCA";


	//	opening the file containing the training list of each of the shapes	
	
	ifstream in(trainingList.c_str());

	//	throwing error if unable to open the training list file

	if(!in)
	{
		throw LTKException(ETRAINLIST_FILE_OPEN);
	}

	//	opening the output reference model file

	ofstream out(m_referenceModelFile.c_str(),ios::out);

	// throwing error if unable to open the reference model file

	if(!out)
	{
		throw LTKException(EMODEL_DATA_FILE_OPEN);
	}

	//	writing the header information into the reference model file

	out << m_numShapes << endl;

	LOG( LTKLogger::LTK_LOGLEVEL_DEBUG) << "m_numShapes = " << m_numShapes << endl;

	//	reading the ink file paths from the training list file
	
	while(in)
	{
		
		getline(in, path, ' ');

		getline(in, shapeId);

		if(atoi(shapeId.c_str()) != prevShape)
		{
			++shapeCount;

			prevShape = atoi(shapeId.c_str()); 
		}

		// checking for end of training data

		if(shapeCount == m_numShapes)
		{
			break;
		}

		LTKTraceGroup traceGroup;

		float2DVector featureVector;

		cout << path ;

		// reading the ink files

		LTKInkFileReader::readUnipenInkFile(path, traceGroup, captureDevice, screenContext);

		numStrokes = traceGroup.getNumTraces();
		
		// extracting features from the traceGroup

		m_featureExtractorObj.extractFeatures(traceGroup, captureDevice, screenContext, ltkShapeRecPtr, featureVector);

		// writing the number of features in the dat file only the first time

		if (firstFileFlag==true)
		{
			numFeatures = featureVector.at(0).size();			

			out << numFeatures << endl;
		}

		firstFileFlag = false;

		// writing the shapeId and the corresponding features in the dat file

		out << shapeId << " ";

		out << numStrokes << " ";
		
		for(traceIndex = 0; traceIndex < numStrokes; ++traceIndex)
		{
			for(int loopIndex = 0; loopIndex < numFeatures; ++loopIndex)
			{
				out << featureVector.at(traceIndex).at(loopIndex) << " ";
			}
		}

		out << endl;
		
	}//while(in)

	//	closing the training list file and returning

	in.close();

	out.close();

	getCurrentVersion(&iMajor,&iMinor,&iBugFix);

	sprintf(versionInfo, "%d.%d.%d",iMajor, iMinor, iBugFix);

	version = versionInfo;

	headerInfo[RECVERSION] = version;
	string algoName = "holistic";
	headerInfo[RECNAME] = algoName;
	headerInfo[COMMENT]=comment;
	headerInfo[DATASET]=dataset;
	if(errorCode = cheSumGen.addHeaderInfo(strModelDataHeaderInfoFile, m_referenceModelFile, headerInfo))
	{
		return errorCode;
	}

	LOG( LTKLogger::LTK_LOGLEVEL_DEBUG) << "Holistic Engine Trained"  << endl;

	LOG( LTKLogger::LTK_LOGLEVEL_DEBUG) << "Exiting HolisticRecognizer::train"  << endl;

	return SUCCESS;
}
Ejemplo n.º 10
0
int LTKInkFileReader::readRawInkFile(const string& inkFile, LTKTraceGroup& traceGroup, LTKCaptureDevice& captureDevice, LTKScreenContext& screenContext)
{

    LOG( LTKLogger::LTK_LOGLEVEL_DEBUG) <<
                                        " Entering: LTKInkFileReader::readRawInkFile()" << endl;

    string dataLine;

    vector<string> dataVector;

    vector<float> point;				//	a point of a trace

    int pointIndex;

    if(inkFile.empty())
    {
        LOG(LTKLogger::LTK_LOGLEVEL_ERR)
                <<"Error : "<< EINKFILE_EMPTY <<":"<< getErrorMessage(EINKFILE_EMPTY)
                <<"LTKInkFileReader::readRawInkFile()" <<endl;

        LTKReturnError(EINKFILE_EMPTY);
    }

    //	opening the ink file

    ifstream infile(inkFile.c_str());

    //	checking if the file open was successful

    if(!infile)
    {
        LOG(LTKLogger::LTK_LOGLEVEL_ERR)
                <<"Error: LTKInkFileReader::readRawInkFile()"<<endl;

        LTKReturnError(EINK_FILE_OPEN);
    }

    vector<LTKChannel> channels; 				//	channels of a trace

    LTKChannel xChannel("X", DT_FLOAT, true);	//	x-coordinate channel of the trace

    LTKChannel yChannel("Y", DT_FLOAT, true);	//	y-coordinate channel of the trace

    LTKChannel tChannel("T", DT_FLOAT, true);	//	time channel of the trace

    //	initializing the channels of the trace

    channels.push_back(xChannel);

    channels.push_back(yChannel);

    channels.push_back(tChannel);

    //	composing the trace format object

    LTKTraceFormat traceFormat(channels);

    //	reading the ink file

    while(infile)
    {
        LTKTrace trace(traceFormat);

        while(infile)
        {
            getline(infile, dataLine);

            LTKStringUtil::tokenizeString(dataLine, " \t", dataVector);

            if(fabs( atof(dataVector[0].c_str()) + 1 ) < EPS)
            {
                traceGroup.addTrace(trace);

                break;
            }
            else if(fabs( atof(dataVector[0].c_str()) + 2 ) < EPS)
            {
                return SUCCESS;
            }
            else if(fabs( atof(dataVector[0].c_str()) + 6 ) < EPS)
            {
                captureDevice.setXDPI(atof(dataVector[1].c_str()));

                captureDevice.setYDPI(atof(dataVector[2].c_str()));
            }
            else if(atof(dataVector[0].c_str()) < 0)
            {
                //	unknown tag. skipping line

                continue;
            }
            else
            {

                for(pointIndex = 0; pointIndex < dataVector.size(); ++pointIndex)
                {
                    point.push_back(atof(dataVector[pointIndex].c_str()));
                }

                if(dataVector.size() == 2)
                {
                    point.push_back(0.0);
                }

                trace.addPoint(point);

                point.clear();
            }
        }
    }
    LOG( LTKLogger::LTK_LOGLEVEL_DEBUG) <<
                                        " Exiting: LTKInkFileReader::readRawInkFile()" << endl;

    return FAILURE;
}