Example #1
0
//--------------------------------------------------------------
void testApp::setup(){

    ofSetFrameRate(30);

    //Initialize the training and info variables
    infoText = "";
    trainingClassLabel = 1;
    record = false;

    //Open the connection with Synapse
    synapseStreamer.openSynapseConnection();

    //Set which joints we want to track : we track only the hand joints
    synapseStreamer.trackAllJoints(false);
    synapseStreamer.trackLeftHand(true);
    synapseStreamer.trackRightHand(true);
    synapseStreamer.computeHandDistFeature(true);

    //The input to the training data will be the [x y z] of the two hands
    //so we set the number of dimensions to 6
    trainingData.setNumDimensions( 6 );
    trainingClassLabel = 1; // on ne s'occupe pas du trainingClassLabel pour la reconnaissance

    //Initialize the DTW classifier
    DTW dtw;

    //Turn on null rejection, this lets the classifier output the predicted class label of 0 when the likelihood of a gesture is low
    dtw.enableNullRejection( true );

    //Set the null rejection coefficient to 3, this controls the thresholds for the automatic null rejection
    //You can increase this value if you find that your real-time gestures are not being recognized
    //If you are getting too many false positives then you should decrease this value
    dtw.setNullRejectionCoeff( PRECISION_RECO );

    //Turn on the automatic data triming, this will remove any sections of none movement from the start and end of the training samples
    dtw.enableTrimTrainingData(true, 0.1, 90);

    //Offset the timeseries data by the first sample, this makes your gestures (more) invariant to the location the gesture is performed
    dtw.setOffsetTimeseriesUsingFirstSample(true);

    //Add the classifier to the pipeline (after we do this, we don't need the DTW classifier anymore)
    pipeline.setClassifier( dtw );

    //Load the data from TrainingData.txt, and train the pipeline
    if( trainingData.loadDatasetFromFile("TrainingData.txt") )
    {
        infoText = "Training data saved to file";
        if( pipeline.train( trainingData ) )
        {
            infoText = "Pipeline Trained";
        }
        else infoText = "WARNING: Failed to train pipeline";
    }
    else infoText = "WARNING: Failed to load training data from file";

}
bool GRT_Recognizer::initPipeline(string trainingdatafile, int dimension)
{
			    //Initialize the training and info variables
   // infoText = "";
   // trainingClassLabel = 1;
   // noOfHands = 2;
	//noOfTrackedHands = 0;
    
	
	//The input to the training data will be the R[x y z]L[x y z] from the left end right hand
	// so we set the number of dimensions to 6
	LabelledTimeSeriesClassificationData trainingData; 
    //trainingData.setNumDimensions(6);
	trainingData.loadDatasetFromFile(trainingdatafile);
    
    //Initialize the DTW classifier
    DTW dtw;
    
    //Turn on null rejection, this lets the classifier output the predicted class label of 0 when the likelihood of a gesture is low
    dtw.enableNullRejection( true);
    
    //Set the null rejection coefficient to 3, this controls the thresholds for the automatic null rejection
    //You can increase this value if you find that your real-time gestures are not being recognized
    //If you are getting too many false positives then you should decrease this value
    dtw.setNullRejectionCoeff(2);

    
    //Turn on the automatic data triming, this will remove any sections of none movement from the start and end of the training samples
    dtw.enableTrimTrainingData(true, 0.1, 90);
    
    //Offset the timeseries data by the first sample, this makes your gestures (more) invariant to the location the gesture is performed
    dtw.setOffsetTimeseriesUsingFirstSample(true);
  

    //Add the classifier to the pipeline (after we do this, we don't need the DTW classifier anymore)
    pipeline.setClassifier( dtw );
	//pipeline.addPreProcessingModule(MovingAverageFilter(5,dimension));
	//pipeline.addFeatureExtractionModule(FFT(16,1, dimension));
	/*ClassLabelFilter myFilter = ClassLabelFilter();
	myFilter.setBufferSize(6);
	myFilter.setBufferSize(2);*/

	pipeline.addPostProcessingModule(ClassLabelChangeFilter());
	pipeline.train(trainingData);

	return true;
}
//--------------------------------------------------------------
void testApp::setup() {

    ofSetFrameRate(30);

    //Initialize the training and info variables
    infoText = "";
    trainingClassLabel = 1;
    record = false;
    noOfHands = 1;
    noOfTrackedHands = 0;
    //The input to the training data will be the [x y] from the mouse, so we set the number of dimensions to 2
    trainingData.setNumDimensions( noOfHands*3 );
    //trainingData.setNumDimensions( 3 );

    //Initialize the DTW classifier
    DTW dtw;

    //Turn on null rejection, this lets the classifier output the predicted class label of 0 when the likelihood of a gesture is low
    dtw.enableNullRejection( true );

    //Set the null rejection coefficient to 3, this controls the thresholds for the automatic null rejection
    //You can increase this value if you find that your real-time gestures are not being recognized
    //If you are getting too many false positives then you should decrease this value
    dtw.setNullRejectionCoeff( 3 );


    //Turn on the automatic data triming, this will remove any sections of none movement from the start and end of the training samples
    dtw.enableTrimTrainingData(true, 0.1, 90);

    //Offset the timeseries data by the first sample, this makes your gestures (more) invariant to the location the gesture is performed
    dtw.setOffsetTimeseriesUsingFirstSample(true);


    //Add the classifier to the pipeline (after we do this, we don't need the DTW classifier anymore)
    pipeline.setClassifier( dtw );

    //pipeline.inputVectorDimensions(3*noOfHands);

    ///setup nite


    niteRc = nite::NiTE::initialize();
    if (niteRc != nite::STATUS_OK)
    {
        printf("NiTE initialization failed\n");
        return;
    }

    niteRc = handTracker.create();
    if (niteRc != nite::STATUS_OK)
    {
        printf("Couldn't create user tracker\n");
        return;
    }

    handTracker.startGestureDetection(nite::GESTURE_CLICK);

    printf("\nPoint with your hand to start tracking it...\n");

    //put cursor in the middle of the screen: SPI_GETWORKAREA
    int screenX = GetSystemMetrics(SM_CXSCREEN);
    int screenY = GetSystemMetrics(SM_CYSCREEN);
    SetCursorPos(screenX / 2, screenY / 2);
    xcursorpos = 500;
    ycursorpos = 500;

}