コード例 #1
0
void initCalibration()
{
// Now wind the input points file while the head calibration isn't done
    while ( true )
    {   // Here we load the variables needed to keep track of the experiment status
        readline(inputStream, trialNumber,  headCalibration,  trialMode, pointMatrix );
        //First phase of calibration (equivalent when spacebar is pressed first time )
        if ( (headCalibration== 1) && (headCalibrationDone==0 ))
        {   headEyeCoords.init(pointMatrix.col(3),pointMatrix.col(4), pointMatrix.col(0),pointMatrix.col(1),pointMatrix.col(2),interoculardistance );
            headCalibrationDone=headCalibration;
            //cerr << headCalibrationDone << endl;
        }
        // Second phase of calibration (equivalent when space bar is pressed second time )
        if ( (headCalibration== 2) && (headCalibrationDone==1 ))
        {   headEyeCoords.init( headEyeCoords.getP1(),headEyeCoords.getP2(), pointMatrix.col(0),pointMatrix.col(1),pointMatrix.col(2),interoculardistance );
            eyeCalibration=headEyeCoords.getRightEye();
            headCalibrationDone=headCalibration;
        }
        // Third and final phase of calibration ( equivalent when spacebar is pressed third time )
        if ((headCalibration==3))
        {   headEyeCoords.init( headEyeCoords.getP1(),headEyeCoords.getP2(), pointMatrix.col(0),pointMatrix.col(1),pointMatrix.col(2),interoculardistance );
            eyeCalibration=headEyeCoords.getRightEye();
            headCalibrationDone=3;
            break; // exit the while cycle
        }
        // simulates the update of head and eyes positions
        if ( headCalibration==headCalibrationDone)
            headEyeCoords.update(pointMatrix.col(0),pointMatrix.col(1),pointMatrix.col(2));
    }
}
コード例 #2
0
/**
 * @brief paintGL
 */
void paintGL()
{
	if (stereo)
    {   glDrawBuffer(GL_BACK);
        // Draw left eye view
        glDrawBuffer(GL_BACK_LEFT);
        glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
        glClearColor(0.0,0.0,0.0,1.0);
        cam.setEye(headEyeCoords.getRightEye());
        drawInfo();
        drawTrial();
        // Draw right eye view
        glDrawBuffer(GL_BACK_RIGHT);
        glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
        glClearColor(0.0,0.0,0.0,1.0);
        cam.setEye(headEyeCoords.getLeftEye());
        drawInfo();
        drawTrial();
        glutSwapBuffers();
    }
    else
    {
        glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
        glClearColor(0.0,0.0,0.0,1.0);
        glMatrixMode(GL_MODELVIEW);
        glLoadIdentity();
        cam.setEye(headEyeCoords.getRightEye());
        drawInfo();
        drawTrial();
        glutSwapBuffers();
    }
}
コード例 #3
0
/**
 * @brief idle
 */
void idle()
{
    // Update markers
    optotrak.updateMarkers();
    markers = optotrak.getAllMarkers();
    allVisiblePatch = isVisible(markers[1].p) && isVisible(markers[2].p) && isVisible(markers[3].p); 
	allVisibleHead = isVisible(markers[17].p) && isVisible(markers[18].p) && allVisiblePatch;
    headEyeCoords.update(markers.at(1).p,markers.at(2).p,markers.at(3).p);

    if ( trialTimer.getElapsedTimeInMilliSec() >= parameters.get("WaitTime") && trialMode==BLACK_MODE )
    {
        frame=0.0;
        trialMode++;
        trialTimer.start();
    }

	if ( trialTimer.getElapsedTimeInMilliSec() >= parameters.get("ProbeTime") && trialMode==PROBE_MODE )
    {
        frame=0.0;
        trialMode++;
        trialTimer.start();
    }

    if ( trialTimer.getElapsedTimeInMilliSec() >= parameters.get("StimulusTime") && trialMode==STIMULUS_MODE )
    {
		Beep(660,660);
        frame=0.0;
        trialMode=RESPONSE_MODE;
        trialTimer.start();
    }

	markersFile << fixed << trialNumber << " " << headCalibrationDone << " " << trialMode << " " ;
    markersFile << setprecision(3) << 
		( isVisible(markers[1].p) ? markers[1].p.transpose() : junk ) << " " <<
		( isVisible(markers[2].p) ? markers[2].p.transpose() : junk ) << " " <<
		( isVisible(markers[3].p) ? markers[3].p.transpose() : junk ) << " " <<
		( isVisible(markers[17].p) ? markers[17].p.transpose() : junk ) << " " <<
		( isVisible(markers[18].p) ? markers[18].p.transpose() : junk ) << " " <<
		( isVisible(markers[18].p) ? markers[18].p.transpose() : junk ) << " " <<
		( isVisible(headEyeCoords.getLeftEye()) ? headEyeCoords.getLeftEye().transpose() : junk ) << " " <<
		( isVisible(headEyeCoords.getRightEye()) ? headEyeCoords.getRightEye().transpose() : junk ) << " " ;
    markersFile << setprecision(1) <<
				trial.getCurrent().at("ZWidth") << " " <<
				trial.getCurrent().at("Slant") << " " <<
				trial.getCurrent().at("Tilt") << " " <<
				trial.getCurrent().at("StimulusAnchored") << " " <<
                endl;
}
コード例 #4
0
void initVariables()
{
    totalTimer.start();
    interoculardistance = str2num<double>(parameters.find("IOD"));
    trial.init(parameters);
    factors = trial.getNext(); // Initialize the factors in order to start from trial 1

    useCircularMask = util::str2num<int>( parameters.find("CircularMask")) == 1 ;
    circularMaskRadius = util::str2num<int>(parameters.find("CircularMaskRadius"));
    if (useCircularMask)
        glEnable(GL_STENCIL_TEST);
    else
        glDisable(GL_STENCIL_TEST);

    fixationDurationInSeconds = util::str2num<double>(parameters.find("AdaptationDurationInSeconds"));
    // Imposta stimolo e drawer
    redDotsPlane.setNpoints(util::str2num<int>(parameters.find("NumStimulusPoints")));
    redDotsPlane.setDimensions(
        util::str2num<int>(parameters.find("StimulusEdgeLength")),
        util::str2num<int>(parameters.find("StimulusEdgeLength")),0.1);
    redDotsPlane.compute();
    stimDrawer.initList(&redDotsPlane,glRed);

    resetPointStrip();

    stimulusDurationInMilliSeconds = util::str2num<double>(parameters.find("StimulusDuration"));
    initialAdaptationTimeInSeconds = util::str2num<double>(parameters.find("InitialAdaptationTime"));
    initialAdaptationFlowIncrement = util::str2num<double>(parameters.find("InitialAdaptationFlowIncrement"));
    stimMotion=SINUSOIDAL_MOTION;
    trialMode = INITIALADAPTATION;

    headEyeCoords.init(Vector3d(interoculardistance/2,0,0),Vector3d(interoculardistance/2,0,0), Vector3d(0,0,0),Vector3d(0,10,0),Vector3d(0,0,10),interoculardistance );
    eyeCalibration=headEyeCoords.getRightEye();
}
コード例 #5
0
void initVariables()
{
    interoculardistance = str2num<double>(parameters.find("IOD"));
    stimulusEmiCycles= atoi(parameters.find("StimulusEmiCycles").c_str());
    trial.init(parameters);
    factors = trial.getNext(); // Initialize the factors in order to start from trial 1

    if ( atoi(parameters.find("DrawOccluder").c_str()) ==1 )
    {
        redDotsPlane.setNpoints(500);  //XXX controllare densita di distribuzione dei punti
        redDotsPlane.setDimensions(200,200,0.1);
    }
    else
    {
        redDotsPlane.setNpoints(75);  //XXX controllare densita di distribuzione dei punti
        redDotsPlane.setDimensions(50,50,0.1);
    }
    //redDotsPlane.setSlantTilt( factors["Slant"], (int) factors["Tilt"] );
    redDotsPlane.compute();
    stimDrawer.setStimulus(&redDotsPlane);
    stimDrawer.initList(&redDotsPlane);

    /** Bound check things **/
    signsX.push_back(false);
    signsX.push_back(false);

    signsY.push_back(false);
    signsY.push_back(false);

    headEyeCoords.init(Vector3d(-32.5,0,0),Vector3d(32.5,0,0), Vector3d(0,0,0),Vector3d(0,10,0),Vector3d(0,0,10),interoculardistance );
    eyeCalibration=headEyeCoords.getRightEye();
    model.load("../data/objmodels/occluder.obj");
}
コード例 #6
0
void idle()
{   
    double deltaT=1E-2;
    optotrak.updateMarkers();
    markers = optotrak.getAllMarkers();

	allVisiblePlatform = isVisible(markers.at(15).p) && isVisible(markers.at(16).p);
    allVisibleThumb = isVisible(markers.at(11).p) && isVisible(markers.at(12).p) && isVisible(markers.at(13).p);
    allVisibleIndex = isVisible(markers.at(7).p) && isVisible(markers.at(8).p) && isVisible(markers.at(9).p);
    allVisibleFingers = allVisibleThumb && allVisibleIndex;

    allVisiblePatch = isVisible(markers.at(1).p) && isVisible(markers.at(2).p) && isVisible(markers.at(3).p);
    allVisibleHead = allVisiblePatch && isVisible(markers.at(17).p) && isVisible(markers.at(18).p);

    headEyeCoords.update(markers.at(1).p,markers.at(2).p,markers.at(3).p);
	// update thumb coordinates
    thumbCoords.update(markers.at(11).p,markers.at(12).p,markers.at(13).p);
	// update index coordinates
    indexCoords.update(markers.at(7).p, markers.at(8).p, markers.at(9).p);
	
    eyeLeft = headEyeCoords.getLeftEye();
    eyeRight = headEyeCoords.getRightEye();

	realThumb = thumbCoords.getP1();
	realIndex = indexCoords.getP1();

	reflectedIndex = getReflected(realIndex,mirrorPlane);
	reflectedThumb = getReflected(realThumb,mirrorPlane);
}
コード例 #7
0
ファイル: demoBuddha.cpp プロジェクト: guendas/cncsvision
void update(int value)
{   markers = optotrak.getAllMarkers();
    headEyeCoords.update(markers[1].p,markers[2].p,markers[3].p);

    eyeLeft = headEyeCoords.getLeftEye();
    eyeRight = headEyeCoords.getRightEye();

    glutPostRedisplay();
    glutTimerFunc(TIMER_MS, update, 0);
}
コード例 #8
0
/**
 * @brief drawFixation
 */
void drawFixation()
{
    double circleRadius = parameters.get("MaxCircleRadius");	// millimeters
    double zBoundary    = parameters.get("MaxZOscillation"); // millimeters
    // Projection of view normal on the focal plane
    Vector3d directionOfSight = (headEyeCoords.getRigidStart().getFullTransformation().linear()*Vector3d(0,0,-1)).normalized();
    Eigen::ParametrizedLine<double,3> lineOfSightRight = Eigen::ParametrizedLine<double,3>::Through( headEyeCoords.getRightEye() , headEyeCoords.getRightEye()+directionOfSight );
    Eigen::Hyperplane<double,3> focalPlane = Eigen::Hyperplane<double,3>::Through( Vector3d(1,0,focalDistance), Vector3d(0,1,focalDistance),Vector3d(0,0,focalDistance) );
    double lineOfSightRightDistanceToFocalPlane = lineOfSightRight.intersection(focalPlane);
    Vector3d opticalAxisToFocalPlaneIntersection = lineOfSightRightDistanceToFocalPlane *(directionOfSight)+ (headEyeCoords.getRightEye());

    switch ( headCalibrationDone )
    {
    case 1:
    {
        // STIM_FIXED stimulus at (0,0,focalDistance)
        glPushAttrib(GL_POINT_BIT);
        glColor3fv(glRed);
        glPointSize(5);
        glBegin(GL_POINTS);
        glVertex3d(0,0,focalDistance);
		glVertex3d(headEyeCoords.getRightEye().x(),headEyeCoords.getRightEye().y(),focalDistance);
        glEnd();
        glPopAttrib();
        break;
    }
    case 2:
    {
        // STIM_FIXED stimulus + projected points
        glPushAttrib( GL_ALL_ATTRIB_BITS );
        glPointSize(5);
		glLineWidth(2);
		
        glBegin(GL_POINTS);
        glColor3fv(glRed);
        glVertex3d(0,0,focalDistance);
		glColor3fv(glBlue);
		glVertex3dv(opticalAxisToFocalPlaneIntersection.data());
		glColor3fv(glWhite);
		glVertex3d(headEyeCoords.getRightEye().x(),headEyeCoords.getRightEye().y(),focalDistance);
        glEnd();

		double r2EyeRight = pow(headEyeCoords.getRightEye().x(),2)+pow(headEyeCoords.getRightEye().y(),2);
        // Draw the calibration circle
        if ( pow(opticalAxisToFocalPlaneIntersection.x(),2)+pow(opticalAxisToFocalPlaneIntersection.y(),2) <= circleRadius*circleRadius && abs(headEyeCoords.getRightEye().z()) < zBoundary && r2EyeRight<circleRadius*circleRadius )
        {
			readyToStart=true;
            drawCircle(circleRadius,0,0,focalDistance,glGreen);
        }
        else
        {
            drawCircle(circleRadius,0,0,focalDistance,glRed);
        }
        glPopAttrib();
		break;
	}
    }
}
コード例 #9
0
void idle()
{
Timer frameTimer; frameTimer.start();
    // Timing things
    timeFrame+=1;

    double oscillationPeriod = factors.at("StimulusDuration")*TIMER_MS;

    switch (stimMotion)
    {
    case SAWTOOTH_MOTION:
        periodicValue = oscillationAmplitude*mathcommon::sawtooth(timeFrame,oscillationPeriod);
        break;
    case TRIANGLE_MOTION:
        periodicValue = oscillationAmplitude*mathcommon::trianglewave(timeFrame,oscillationPeriod);
        break;
    case SINUSOIDAL_MOTION:
        periodicValue = oscillationAmplitude*sin(3.14*timeFrame/(oscillationPeriod));
        break;
    default:
        SAWTOOTH_MOTION;
    }

    timingFile << totalTimer.getElapsedTimeInMilliSec() << " " << periodicValue << endl;

    // Simulate head translation
    // Coordinates picker
    markers[1] = Vector3d(0,0,0);
    markers[2] = Vector3d(0,10,0);
    markers[3] = Vector3d(0,0,10);

    headEyeCoords.update(markers[1],markers[2],markers[3]);

    eyeLeft = headEyeCoords.getLeftEye();
    eyeRight = headEyeCoords.getRightEye();

    Vector3d fixationPoint = (headEyeCoords.getRigidStart().getFullTransformation() * ( Vector3d(0,0,focalDistance) ) );
    // Projection of view normal on the focal plane
    Eigen::ParametrizedLine<double,3> pline = Eigen::ParametrizedLine<double,3>::Through(eyeRight,fixationPoint);
    projPoint = pline.intersection(focalPlane)*((fixationPoint - eyeRight).normalized()) + eyeRight;

    stimTransformation.matrix().setIdentity();
    stimTransformation.translation() <<0,0,focalDistance;

	Timer sleepTimer;
	sleepTimer.sleep((TIMER_MS - frameTimer.getElapsedTimeInMilliSec())/2);
}
コード例 #10
0
// Questa funzione e' quella che in background fa tutti i conti matematici, quindi qui devi inserire 
// 1) Scrittura su file continua delle coordinate che vuoi salvare
// 2) Estrazione delle coordinate a partire dai corpi rigidi precedentemente definiti vedi ad esempio
// come e' fatto per eyeLeft e eyeRight oppure per thumb ed index
void idle()
{
timeFile << globalTimer.getElapsedTimeInMilliSec() << endl;
    optotrak->updateMarkers();
    markers = optotrak->getAllMarkers();
    // Coordinates picker
	allVisiblePatch = isVisible(markers[5].p) && isVisible(markers[6].p) && isVisible(markers[7].p);
    headEyeCoords.update(markers[5],markers[6],markers[7]);
    eyeLeft = headEyeCoords.getLeftEye();
    eyeRight = headEyeCoords.getRightEye();
	
	eyeRight = Vector3d(interoculardistance/2,0,0);
	eyeLeft = -eyeRight;

	thumb = thumbCoords.getP1();
	index = indexCoords.getP1();
}
コード例 #11
0
void update(int value)
{   optotrak.updatePoints();
    markers = optotrak.getAllPoints();

    headEyeCoords.update(markers[1],markers[2],markers[3]);
    modelCoordinates.update(markers[10],markers[19],markers[20]);
    if ( isVisible(markers[20]+markers[19]+markers[10]) )
    {
        rigidCurrent.setRigidBody(markers[20], markers[19],modelCoordinates.getFinger() );
        rigidAux.computeTransformation(rigidCurrent,true);	// true the scaling matrix
    }
    eyeLeft = headEyeCoords.getLeftEye();
    eyeRight = headEyeCoords.getRightEye();

    glutPostRedisplay();
    glutTimerFunc(TIMER_MS, update, 0);
}
コード例 #12
0
ファイル: stimulusTest.cpp プロジェクト: guendas/cncsvision
// Questa funzione e' quella che in background fa tutti i conti matematici, quindi qui devi inserire 
// 1) Scrittura su file continua delle coordinate che vuoi salvare
// 2) Estrazione delle coordinate a partire dai corpi rigidi precedentemente definiti vedi ad esempio
// come e' fatto per eyeLeft e eyeRight oppure per thumb ed index
void idle()
{
timeFile << globalTimer.getElapsedTimeInMilliSec() << endl;
    eyeLeft = headEyeCoords.getLeftEye();
    eyeRight = headEyeCoords.getRightEye();
	
	eyeRight = Vector3d(interoculardistance/2,0,0);
	eyeLeft = -eyeRight;

	thumb = thumbCoords.getP1();
	index = indexCoords.getP1();

	dz = zedge + (jitter/2);
	dx = xedge+jitterX;

	r = sqrt(dx*dz/4 + dz*dz/4);

}
コード例 #13
0
// Questa funzione e' quella che in background fa tutti i conti matematici, quindi qui devi inserire 
// 1) Scrittura su file continua delle coordinate che vuoi salvare
// 2) Estrazione delle coordinate a partire dai corpi rigidi precedentemente definiti vedi ad esempio
// come e' fatto per eyeLeft e eyeRight oppure per thumb ed index
void idle()
{
timeFile << globalTimer.getElapsedTimeInMilliSec() << endl;
    //optotrak->updateMarkers();
    //markers = optotrak->getAllMarkers();
    // Coordinates picker
	//allVisiblePatch = isVisible(markers[5].p) && isVisible(markers[6].p) && isVisible(markers[7].p);
    //headEyeCoords.update(markers[5],markers[6],markers[7]);
    eyeLeft = headEyeCoords.getLeftEye();
    eyeRight = headEyeCoords.getRightEye();
	
	eyeRight = Vector3d(interoculardistance/2,0,0);
	eyeLeft = -eyeRight;

	thumb = thumbCoords.getP1();
	index = indexCoords.getP1();

	dz = zedge + (jitter/2);
	dx = xedge+jitterX;

	r = sqrt(dx*dz/4 + dz*dz/4);
}
コード例 #14
0
/**
 * @brief drawInfo
 */
void drawInfo()
{
	if (infoDraw==false)
		return;
    GLText text(SCREEN_WIDTH,SCREEN_HEIGHT,glWhite);
	text.enterTextInputMode();

    switch ( headCalibrationDone )
    {
    case 0:
    {
        if ( allVisibleHead )
            text.draw("PRESS SPACEBAR TO CALIBRATE");
        else
            text.draw("BE VISIBLE...");
        break;
    }
    case 1:
    {
        break;
    }
    }
	text.draw("HeadCalibration="+util::stringify<int>(headCalibrationDone));
	text.draw("TrialMode=" + util::stringify<int>(trialMode));
	text.draw("ReadyToStart="+util::stringify<int>(readyToStart) );
	text.draw("RightEye="+util::stringify< Eigen::Matrix<int,1,3> >(headEyeCoords.getRightEye().transpose().cast<int>()) );
	/*
	if ( !trial.getCurrent().empty() )
	{
		for ( map<std::string,std::string>::iterator iter = trial.getCurrentAsMapToString().begin(); iter!=trial.getCurrentAsMapToString().end();++iter )
		{
			text.draw(iter->first + "= " + iter->second);
		}
	}
	*/
	text.leaveTextInputMode();
}
コード例 #15
0
// Questa funzione e' quella che in background fa tutti i conti matematici, quindi qui devi inserire 
// 1) Scrittura su file continua delle coordinate che vuoi salvare
// 2) Estrazione delle coordinate a partire dai corpi rigidi precedentemente definiti vedi ad esempio
// come e' fatto per eyeLeft e eyeRight oppure per thumb ed index
void idle()
{

    //optotrak->updateMarkers();
    //markers = optotrak->getAllMarkers();
    // Coordinates picker
	//allVisiblePatch = isVisible(markers[5].p) && isVisible(markers[6].p) && isVisible(markers[7].p);
    //headEyeCoords.update(markers[5],markers[6],markers[7]);
    eyeLeft = headEyeCoords.getLeftEye();
    eyeRight = headEyeCoords.getRightEye();
	
	eyeRight = Vector3d(interoculardistance/2,0,0);
	eyeLeft = -eyeRight;

	thumb = thumbCoords.getP1();
	index = indexCoords.getP1();

	dz = zedge + jitter;
	dx = xedge+jitterX;

	r = sqrt(dx*dz/4 + dz*dz/4);

	frame++;
}
コード例 #16
0
void initVariables()
{
    totalTimer.start();
    interoculardistance = str2num<double>(parameters.find("IOD"));
    trial.init(parameters);
    factors = trial.getNext(); // Initialize the factors in order to start from trial 1

    // Imposta stimolo e drawer
    redDotsPlane.setNpoints(75);
    redDotsPlane.setDimensions(50,50,0.1);
    redDotsPlane.compute();
    stimDrawer.initList(&redDotsPlane,glRed);

    // Imposta striscia del fixation e drawer
    stripPlane.setNpoints(N_STRIP_POINTS);
    stripPlane.setDimensions(STRIP_WIDTH,STRIP_HEIGHT,0.01);
    stripPlane.compute();
    stripDrawer.initList(&stripPlane,glRed);

    stimMotion=SINUSOIDAL_MOTION;

    headEyeCoords.init(Vector3d(-32.5,0,0),Vector3d(32.5,0,0), Vector3d(0,0,0),Vector3d(0,10,0),Vector3d(0,0,10),interoculardistance );
    eyeCalibration=headEyeCoords.getRightEye();
}
コード例 #17
0
// Questa funzione e' quella che in background fa tutti i conti matematici, quindi qui devi inserire 
// 1) Scrittura su file continua delle coordinate che vuoi salvare
// 2) Estrazione delle coordinate a partire dai corpi rigidi precedentemente definiti vedi ad esempio
// come e' fatto per eyeLeft e eyeRight oppure per thumb ed index
void idle()
{
	optotrak->updateMarkers();
	//cerr << deltaT << endl;
	markers = optotrak->getAllMarkers();
	// Coordinates picker
	allVisiblePlatform = isVisible(markers[1].p);
	allVisibleThumb = isVisible(markers[15].p) && isVisible(markers[17].p) && isVisible(markers[18].p);
	allVisibleIndex = isVisible(markers[13].p) && isVisible(markers[14].p) && isVisible(markers[16].p);
	allVisibleFingers = allVisibleThumb && allVisibleIndex;

	allVisiblePatch = isVisible(markers[5].p) && isVisible(markers[6].p) && isVisible(markers[7].p);
	allVisibleHead = allVisiblePatch && isVisible(markers[1].p);

	if ( allVisiblePatch )
		headEyeCoords.update(markers[5].p,markers[6].p,markers[7].p);


	if ( allVisibleThumb )
		thumbCoords.update(markers[15].p,markers[17].p,markers[18].p);
	if ( allVisibleIndex )
		indexCoords.update(markers[13].p, markers[14].p, markers[16].p );
	if ( headCalibrationDone==3 && fingerCalibrationDone==3 )
		{
		if ( !allVisibleIndex )
			occludedFrames++;
		if ( !allVisibleThumb )
			occludedFrames++;
		}
	
	if(headCalibration)
	{
	eyeLeft = headEyeCoords.getLeftEye();
	eyeRight = headEyeCoords.getRightEye();
	} else	{
	eyeRight = Vector3d(interoculardistance/2,0,0);
	eyeLeft = -eyeRight;
	}

	thumb = thumbCoords.getP1();
	index = indexCoords.getP1();

	singleMarker = markers.at(4).p.transpose();

	if(!letStimTimer)
		stimTimer.start();

	if( endTrial && fingersAtStart)
	{
		advanceTrial();
	} 

	#ifdef WRITE
	// Write to file
	if ( headCalibrationDone==3 && fingerCalibrationDone==3 )
		{
		markersFile << fixed << trialNumber << "\t" << 
			eyeLeft.transpose() << "\t" << eyeRight.transpose()  << "\t" <<
			//markers.at(4).p.transpose() << "\t" << 
			index.transpose() << "\t" << 
			thumb.transpose() << "\t" << 
			fingersAtStart << "\t" <<
			isStimulusDrawn
			;

		markersFile << endl;
		}
#endif

}
コード例 #18
0
void update(int value)
{
    frameTimer.start();
// Read the experiment from file, if the file is finished exit suddenly
    if ( inputStream.eof() )
    {   exit(0);
    }

    if ( isReading )
    {   // This reads a line (frame) in inputStream
        readline(inputStream, trialNumber,  headCalibration,  trialMode, pointMatrix );
        headEyeCoords.update(pointMatrix.col(0),pointMatrix.col(1),pointMatrix.col(2));
        Affine3d active = headEyeCoords.getRigidStart().getFullTransformation();
        eulerAngles.init( headEyeCoords.getRigidStart().getFullTransformation().rotation() );

        eyeLeft = headEyeCoords.getLeftEye();
        eyeRight= headEyeCoords.getRightEye();
        //cerr << eyeRight.transpose() << endl;
        cyclopeanEye = (eyeLeft+eyeRight)/2.0;

        if ( trialMode == STIMULUSMODE )
            stimulusFrames++;
        if ( trialMode == FIXATIONMODE )
            stimulusFrames=0;

        // Projection of view normal on the focal plane
        Vector3d directionOfSight = (active.rotation()*Vector3d(0,0,-1)).normalized();
        Eigen::ParametrizedLine<double,3> lineOfSightRight = Eigen::ParametrizedLine<double,3>::Through( eyeRight , eyeRight+directionOfSight );

        double lineOfSightRightDistanceToFocalPlane = lineOfSightRight.intersection(focalPlane);

        //double lenghtOnZ = (active*(center-eyeCalibration )+eyeRight).z();
        projPointEyeRight = lineOfSightRightDistanceToFocalPlane *(directionOfSight)+ (eyeRight);
        // second projection the fixation point computed with z non constant but perfectly parallel to projPointEyeRight
        lineOfSightRightDistanceToFocalPlane= (( active.rotation()*(center)) - eyeRight).norm();
        Vector3d secondProjection = lineOfSightRightDistanceToFocalPlane *(directionOfSight)+ (eyeRight);
        projPointEyeRight=secondProjection ;

        // Compute the translation to move the eye in order to avoid share components
        Vector3d posAlongLineOfSight = (headEyeCoords.getRigidStart().getFullTransformation().rotation())*(eyeRight -eyeCalibration);
        // GENERATION OF PASSIVE MODE.
        // HERE WE MOVE THE SCREEN TO FACE THE OBSERVER's EYE
        if ( passiveMode )
        {   initProjectionScreen(0, headEyeCoords.getRigidStart().getFullTransformation()*Translation3d(center));
        }
        else
            initProjectionScreen(focalDistance, Affine3d::Identity());

        if ( trialMode == STIMULUSMODE )
        {
            // IMPORTANT Reset the previous status of transformations
            objectActiveTransformation[0].setIdentity();
            objectActiveTransformation[1].setIdentity();
            // PLANE 0 Transformation QUELLO CHE STA SOTTO
            alpha = atan( eyeRight.x()/abs(projPointEyeRight.z()) );
            if ( overallTilt )
            {
                instantPlaneSlant = alphaMultiplier*alpha+toRadians(-factors.at("DeltaSlant")-factors.at("StillPlaneSlant"));
                AngleAxis<double> aa0( instantPlaneSlant,Vector3d::UnitY());
                objectActiveTransformation[0]*=aa0;
                double planesYOffset = factors.at("PlanesCentersYDistance")*(whichPlaneDrawUp ? 1 : -1);
                objectActiveTransformation[0].translation() = Vector3d(0,planesYOffset,focalDistance);

                // PLANE 1 Transformation QUELLO CHE STA SOPRA
                AngleAxis<double> aa1(-toRadians(factors.at("StillPlaneSlant")),Vector3d::UnitY());
                objectActiveTransformation[1]*=aa1;
                objectActiveTransformation[1].translation() = Vector3d(0,-planesYOffset,focalDistance);
            }
            else
            {
                instantPlaneSlant = alphaMultiplier*alpha+toRadians(factors.at("DeltaSlant")+factors.at("StillPlaneSlant"));
                AngleAxis<double> aa0( instantPlaneSlant,Vector3d::UnitY());
                objectActiveTransformation[0]*=aa0;
                double planesYOffset = factors.at("PlanesCentersYDistance")*(whichPlaneDrawUp ? 1 : -1);
                objectActiveTransformation[0].translation() = Vector3d(0,planesYOffset,focalDistance);

                // PLANE 1 Transformation QUELLO CHE STA SOPRA
                AngleAxis<double> aa1(toRadians(factors.at("StillPlaneSlant")),Vector3d::UnitY());
                objectActiveTransformation[1]*=aa1;
                objectActiveTransformation[1].translation() = Vector3d(0,-planesYOffset,focalDistance);
            }

            objectPassiveTransformation[0] = ( cam.getModelViewMatrix()*objectActiveTransformation[0] );
            objectPassiveTransformation[1] = ( cam.getModelViewMatrix()*objectActiveTransformation[1] );

            //cout << toDegrees(instantPlaneSlant) << endl;

            // **************** COMPUTE THE OPTIC FLOWS **************************
            // 1) Project the points to screen by computing their coordinates on focalPlane in passive (quite complicate, see the specific method)
            // *********** FOR THE MOVING PLANE *************
            vector<Vector3d> projPointsMovingPlane = stimDrawer[0].projectStimulusPoints(objectActiveTransformation[0],headEyeCoords.getRigidStart().getFullTransformation(),cam,focalDistance, screen, eyeCalibration,passiveMode,false);

            // 2) Get the angles formed by stimulus and observer
            // updating with the latest values
            Vector3d oldAlphaMoving = flowsAnglesAlphaMoving,oldBetaMoving=flowsAnglesBetaMoving;
            // alpha is the "pitch" angle, beta is the "yaw" angle
            // Here me must use the points 4,5,8 of the stimulus
            flowsAnglesAlphaMoving(0)  =  ( atan2(projPointsMovingPlane[4].x(), abs(focalDistance) ) );
            flowsAnglesAlphaMoving(1)  =  ( atan2(projPointsMovingPlane[5].x(), abs(focalDistance) ) );
            flowsAnglesAlphaMoving(2)  =  ( atan2(projPointsMovingPlane[8].x(), abs(focalDistance) ) );

            flowsAnglesBetaMoving(0)      =  ( atan2(projPointsMovingPlane[4].y(), abs(focalDistance) ) );
            flowsAnglesBetaMoving(1)      =  ( atan2(projPointsMovingPlane[5].y(), abs(focalDistance) ) );
            flowsAnglesBetaMoving(2)      =  ( atan2(projPointsMovingPlane[8].y(), abs(focalDistance) ) );

            // 3) Fill the matrix of derivatives
            MatrixXd angVelocitiesMoving(6,1);
            angVelocitiesMoving(0) = flowsAnglesAlphaMoving(0)-oldAlphaMoving(0);
            angVelocitiesMoving(1) = flowsAnglesBetaMoving(0)-oldBetaMoving(0);
            angVelocitiesMoving(2) = flowsAnglesAlphaMoving(1)-oldAlphaMoving(1);
            angVelocitiesMoving(3) = flowsAnglesBetaMoving(1)-oldBetaMoving(1);
            angVelocitiesMoving(4) = flowsAnglesAlphaMoving(2)-oldAlphaMoving(2);
            angVelocitiesMoving(5) = flowsAnglesBetaMoving(2)-oldBetaMoving(2);
            angVelocitiesMoving /= ((double)TIMER_MS/(double)1000);

            // 4) Fill the coefficient matrix, to solve the linear system
            MatrixXd coeffMatrixMoving(6,6);
            coeffMatrixMoving <<
                              1, flowsAnglesAlphaMoving(0),   flowsAnglesBetaMoving(0), 0, 0, 0,
                                 0, 0,    0,    1,flowsAnglesAlphaMoving(0),flowsAnglesBetaMoving(0),
                                 1, flowsAnglesAlphaMoving(1),   flowsAnglesBetaMoving(1), 0, 0, 0,
                                 0, 0,    0,    1,flowsAnglesAlphaMoving(1),flowsAnglesBetaMoving(1),
                                 1, flowsAnglesAlphaMoving(2),   flowsAnglesBetaMoving(2), 0, 0, 0,
                                 0, 0,    0,    1,flowsAnglesAlphaMoving(2),flowsAnglesBetaMoving(2)
                                 ;
            // 5) Solve the linear system by robust fullPivHouseholderQR decomposition (see Eigen for details http://eigen.tuxfamily.org/dox/TutorialLinearAlgebra.html )
            MatrixXd velocitiesMoving = coeffMatrixMoving.colPivHouseholderQr().solve(angVelocitiesMoving);
            // 6) Write the output to file flowsFileMoving
            flowsFileMoving << fixed << trialNumber << "\t" <<  //1
                            stimulusFrames << " " <<
                            factors.at("DeltaSlant")<< " " <<
                            factors.at("StillPlaneSlant") << " " <<
                            overallTilt << " " <<
                            projPointsMovingPlane[4].transpose() << " " <<
                            projPointsMovingPlane[5].transpose() << " " <<
                            projPointsMovingPlane[8].transpose() << " " <<
                            angVelocitiesMoving.transpose() << " " <<
                            velocitiesMoving.transpose() << endl;

            // ********************* FLOWS FOR THE STILL PLANE **************
            // Here we must repeat the same things for the still plane
            vector<Vector3d> projPointsStillPlane = stimDrawer[1].projectStimulusPoints(objectActiveTransformation[1],headEyeCoords.getRigidStart().getFullTransformation(),cam,focalDistance, screen, eyeCalibration,passiveMode,false);

            // 2) Get the angles formed by stimulus and observer
            // updating with the latest values
            Vector3d oldAlphaStill = flowsAnglesAlphaStill,oldBetaStill=flowsAnglesBetaStill;
            // alpha is the "pitch" angle, beta is the "yaw" angle
            // Here me must use the points 4,5,8 of the stimulus
            flowsAnglesAlphaStill(0)  =  ( atan2(projPointsStillPlane[4].x(), abs(focalDistance) ) );
            flowsAnglesAlphaStill(1)  =  ( atan2(projPointsStillPlane[5].x(), abs(focalDistance) ) );
            flowsAnglesAlphaStill(2)  =  ( atan2(projPointsStillPlane[8].x(), abs(focalDistance) ) );

            flowsAnglesBetaStill(0)      =  ( atan2(projPointsStillPlane[4].y(), abs(focalDistance) ) );
            flowsAnglesBetaStill(1)      =  ( atan2(projPointsStillPlane[5].y(), abs(focalDistance) ) );
            flowsAnglesBetaStill(2)      =  ( atan2(projPointsStillPlane[8].y(), abs(focalDistance) ) );

            // 3) Fill the matrix of derivatives
            MatrixXd angVelocitiesStill(6,1);
            angVelocitiesStill(0) = flowsAnglesAlphaStill(0)-oldAlphaStill(0);
            angVelocitiesStill(1) = flowsAnglesBetaStill(0)-oldBetaStill(0);
            angVelocitiesStill(2) = flowsAnglesAlphaStill(1)-oldAlphaStill(1);
            angVelocitiesStill(3) = flowsAnglesBetaStill(1)-oldBetaStill(1);
            angVelocitiesStill(4) = flowsAnglesAlphaStill(2)-oldAlphaStill(2);
            angVelocitiesStill(5) = flowsAnglesBetaStill(2)-oldBetaStill(2);
            angVelocitiesStill /= ((double)TIMER_MS/(double)1000);

            // 4) Fill the coefficient matrix, to solve the linear system
            MatrixXd coeffMatrixStill(6,6);
            coeffMatrixStill <<
                             1, flowsAnglesAlphaStill(0),   flowsAnglesBetaStill(0), 0, 0, 0,
                                0, 0,    0,    1,flowsAnglesAlphaStill(0),flowsAnglesBetaStill(0),
                                1, flowsAnglesAlphaStill(1),   flowsAnglesBetaStill(1), 0, 0, 0,
                                0, 0,    0,    1,flowsAnglesAlphaStill(1),flowsAnglesBetaStill(1),
                                1, flowsAnglesAlphaStill(2),   flowsAnglesBetaStill(2), 0, 0, 0,
                                0, 0,    0,    1,flowsAnglesAlphaStill(2),flowsAnglesBetaStill(2)
                                ;
            // 5) Solve the linear system by robust fullPivHouseholderQR decomposition (see Eigen for details http://eigen.tuxfamily.org/dox/TutorialLinearAlgebra.html )
            MatrixXd velocitiesStill = coeffMatrixStill.colPivHouseholderQr().solve(angVelocitiesStill);
            // 6) Write the output to file flowsFileStill
            flowsFileStill << fixed << trialNumber << "\t" <<  // 1
                           stimulusFrames << " " <<	// 2
                           factors.at("DeltaSlant")<< " " << // 3
                           factors.at("StillPlaneSlant") << " " << // 4
                           overallTilt << " " <<
                           projPointsStillPlane[4].transpose() << " " << // 5,6,7
                           projPointsStillPlane[5].transpose() << " " << // 8,9,10
                           projPointsStillPlane[8].transpose() << " " << // 11,12,13
                           angVelocitiesStill.transpose() << " " << // 14, 15, 16, 17, 18, 19
                           velocitiesStill.transpose() << endl;	// 20, 21, 22, 23, 24, 25
            // **************** END OF OPTIC FLOWS COMPUTATION
        }
        /*
        ofstream outputfile;
        outputfile.open("data.dat");
        outputfile << "Subject Name: " << parameters.find("SubjectName") << endl;
        outputfile << "Passive matrix:" << endl << objectPassiveTransformation.matrix() << endl;
        outputfile << "Yaw: " << toDegrees(eulerAngles.getYaw()) << endl <<"Pitch: " << toDegrees(eulerAngles.getPitch()) << endl;
        outputfile << "EyeLeft: " <<  headEyeCoords.getLeftEye().transpose() << endl;
        outputfile << "EyeRight: " << headEyeCoords.getRightEye().transpose() << endl << endl;
        outputfile << "Factors:" << endl;
        for (map<string,double>::iterator iter=factors.begin(); iter!=factors.end(); ++iter)
        {   outputfile << "\t\t" << iter->first << "= " << iter->second << endl;
        }
        */

    }

    if ( trialMode == PROBEMODE )
        isReading=false;

    glutPostRedisplay();
    glutTimerFunc(TIMER_MS, update, 0);
}
コード例 #19
0
void update(int value)
{   // Read the experiment from file, if the file is finished exit suddenly
    if ( inputStream.eof() )
    {   cleanup();
        exit(0);
    }

    if ( isReading )
    {   // This reads a line (frame) in inputStream
        readline(inputStream, trialNumber,  headCalibration,  trialMode, pointMatrix );

        headEyeCoords.update(pointMatrix.col(0),pointMatrix.col(1),pointMatrix.col(2));
        Affine3d active = headEyeCoords.getRigidStart().getFullTransformation();
        eulerAngles.init( headEyeCoords.getRigidStart().getFullTransformation().rotation() );

        eyeLeft = headEyeCoords.getLeftEye();
        eyeRight= headEyeCoords.getRightEye();

        cyclopeanEye = (eyeLeft+eyeRight)/2.0;

		if ( trialMode == STIMULUSMODE )
			stimulusFrames++;
		if ( trialMode == FIXATIONMODE )
			stimulusFrames=0;

        // Projection of view normal on the focal plane
	Vector3d directionOfSight = (active.rotation()*Vector3d(0,0,-1)).normalized();
	Eigen::ParametrizedLine<double,3> lineOfSightRight = Eigen::ParametrizedLine<double,3>::Through( eyeRight , eyeRight+directionOfSight );
	Eigen::ParametrizedLine<double,3> lineOfSightLeft  = Eigen::ParametrizedLine<double,3>::Through( eyeLeft, eyeLeft+directionOfSight );
	
	double lineOfSightRightDistanceToFocalPlane = lineOfSightRight.intersection(focalPlane);
	double lineOfSightLeftDistanceToFocalPlane = lineOfSightLeft.intersection(focalPlane);
	
	//double lenghtOnZ = (active*(center-eyeCalibration )+eyeRight).z();
	projPointEyeRight = lineOfSightRightDistanceToFocalPlane *(directionOfSight)+ (eyeRight);
	projPointEyeLeft= lineOfSightLeftDistanceToFocalPlane * (directionOfSight) + (eyeLeft);
	// second projection the fixation point computed with z non constant but perfectly parallel to projPointEyeRight
	lineOfSightRightDistanceToFocalPlane= (( active.rotation()*(center)) - eyeRight).norm();
	Vector3d secondProjection = lineOfSightRightDistanceToFocalPlane *(directionOfSight)+ (eyeRight);
	
	if ( !zOnFocalPlane )
	projPointEyeRight=secondProjection ;

	// Compute the translation to move the eye in order to avoid share components
	Vector3d posAlongLineOfSight = (headEyeCoords.getRigidStart().getFullTransformation().rotation())*(eyeRight -eyeCalibration);
	// GENERATION OF PASSIVE MODE.
        // HERE WE MOVE THE SCREEN TO FACE THE OBSERVER's EYE
        if ( passiveMode )
        {
            initProjectionScreen(0, headEyeCoords.getRigidStart().getFullTransformation()*Translation3d(center));
        }
        else
            initProjectionScreen(focalDistance, Affine3d::Identity());
        
	objectPassiveTransformation = ( cam.getModelViewMatrix()*objectActiveTransformation );
    
	ofstream outputfile;
	outputfile.open("data.dat");
	outputfile << "Subject Name: " << parameters.find("SubjectName") << endl;
	outputfile << "Passive matrix:" << endl << objectPassiveTransformation.matrix() << endl;
	outputfile << "Yaw: " << toDegrees(eulerAngles.getYaw()) << endl <<"Pitch: " << toDegrees(eulerAngles.getPitch()) << endl;
	outputfile << "EyeLeft: " <<  headEyeCoords.getLeftEye().transpose() << endl;
	outputfile << "EyeRight: " << headEyeCoords.getRightEye().transpose() << endl << endl;
	outputfile << "Slant: " << instantPlaneSlant << endl;
	outputfile << "Factors:" << endl;
	for (map<string,double>::iterator iter=factors.begin(); iter!=factors.end(); ++iter)
	{
		outputfile << "\t\t" << iter->first << "= " << iter->second << endl;
	}
	
	}

    if ( trialMode == PROBEMODE )
        isReading=false;

    glutPostRedisplay();
    glutTimerFunc(TIMER_MS, update, 0);
}
コード例 #20
0
// Questa funzione e' quella che in background fa tutti i conti matematici, quindi qui devi inserire 
// 1) Scrittura su file continua delle coordinate che vuoi salvare
// 2) Estrazione delle coordinate a partire dai corpi rigidi precedentemente definiti vedi ad esempio
// come e' fatto per eyeLeft e eyeRight oppure per thumb ed index
void idle()
{
	optotrak->updateMarkers();
	//cerr << deltaT << endl;
	markers = optotrak->getAllMarkers();
	// Coordinates picker
	allVisiblePlatform = isVisible(markers[1].p);
	allVisibleIndex = isVisible(markers[13].p) && isVisible(markers[14].p) && isVisible(markers[16].p);
	allVisibleFingers = allVisibleIndex;

	allVisiblePatch = isVisible(markers[5].p) && isVisible(markers[6].p) && isVisible(markers[7].p);
	allVisibleHead = allVisiblePatch && isVisible(markers[1].p);

	if ( allVisiblePatch )
		headEyeCoords.update(markers[5].p,markers[6].p,markers[7].p);

	if ( allVisibleIndex )
	{
		indexCoords.update(markers[13].p, markers[14].p, markers[16].p );
		viewingFrames=1;
		occludedFrames = occludedFrames;
	}

	if (fingerCalibrationDone==3 )
	{
		if ( !allVisibleIndex )
		{
			viewingFrames=0;
			occludedFrames++;
		}

		if(index.z() < (-180) && markers[14].v.norm() < 250)
			frontRodShown=true;
		else
			frontRodShown=frontRodShown;


		if(index.z() < (-180) && markers[14].v.norm() < 200)
			timeStim++;
		else
			timeStim=timeStim;
	}

	if(!frontRodShown)
		frame++;
	else 
		frame=frame;
	
	if(headCalibration)
	{
	eyeLeft = headEyeCoords.getLeftEye();
	eyeRight = headEyeCoords.getRightEye();
	} else	{
	eyeRight = Vector3d(interoculardistance/2,0,0);
	eyeLeft = -eyeRight;
	}

	index = indexCoords.getP1();

	singleMarker = markers.at(4).p.transpose();

	lastPos[frame%NFrames] = index;

/*	if(index.z() > (startingPoint - 40.0))
		fingersAtStart=true;
	else
		fingersAtStart=false;

	if(fingersAtStart)
		timer.start();
	
	if(endTrial)
		drawBlack();

	if( fingersAtStart && endTrial )
	{
		advanceTrial();
		timer.start();
	}
*/
//	#ifdef WRITE
	// Write to file
	if (fingerCalibrationDone==3 )
		{
		markersFile << fixed << 
			trialNumber << "\t" << 
			index.transpose() << "\t" << 
			frame << "\t" << 
			occludedFrames << "\t" << 
			viewingFrames << "\t" << 
			markers[14].v.norm() << "\t" << 
			timeStim << "\t" <<
			timer.getElapsedTimeInMilliSec()
			;

		markersFile << endl;

		velocityFile <<  fixed << 
			markers[14].p.transpose() << "\t" <<
			markers[14].v.transpose() << "\t" << 
			markers[14].v.norm() //  sqrt( vx^2+vy^2+vz^2 )
			<< endl; 
						//markers[14].v.z()
		}
//#endif

}
コード例 #21
0
// Questa funzione e' quella che in background fa tutti i conti matematici, quindi qui devi inserire 
// 1) Scrittura su file continua delle coordinate che vuoi salvare
// 2) Estrazione delle coordinate a partire dai corpi rigidi precedentemente definiti vedi ad esempio
// come e' fatto per eyeLeft e eyeRight oppure per thumb ed index
void idle()
{
	optotrak->updateMarkers();
	//cerr << deltaT << endl;
	markers = optotrak->getAllMarkers();
	// Coordinates picker
	allVisiblePlatform = isVisible(markers[1].p);
	allVisibleIndex = isVisible(markers[13].p) && isVisible(markers[14].p) && isVisible(markers[16].p);
	allVisibleThumb = isVisible(markers[15].p) && isVisible(markers[17].p) && isVisible(markers[18].p);
	allVisibleFingers = allVisibleIndex && allVisibleThumb;

	allVisiblePatch = isVisible(markers[5].p) && isVisible(markers[6].p) && isVisible(markers[7].p);
	allVisibleHead = allVisiblePatch && isVisible(markers[1].p);

	if ( allVisiblePatch )
		headEyeCoords.update(markers[5].p,markers[6].p,markers[7].p);

	if ( allVisibleFingers )
	{
		indexCoords.update(markers[13].p, markers[14].p, markers[16].p );
		thumbCoords.update(markers[15].p, markers[17].p, markers[18].p );
		fingersOccluded = 0;
	}

	if (fingerCalibrationDone==3 )
	{
		if ( !allVisibleFingers )
		{
			fingersOccluded=1;
		}
		
		frameN++;

		if(condition == 0)
			condition = unifRand(-1, 1);

		if(index.z() < (-180) && index.y() > -150 && markers[14].v.norm() < 90)
			pointing=true;
		else
			pointing=pointing;
		
		if(index.z() > (-80))
			handAtStart = true;
		else
			handAtStart = false;
		
		if(handAtStart)
			checkHandAtStart = true;
		else
			checkHandAtStart = checkHandAtStart;
		
		if(!checkHandAtStart)
			timer.start();

		if(pointing && !handRecorded)
		{
			recFHP = index.z();
			tFHP = timer.getElapsedTimeInMilliSec();
			handRecorded = true;
		} else
		{
			recFHP = recFHP;
			tFHP = tFHP;
			handRecorded = handRecorded;
		}
		
		if(handAtStart && !pointing)
		{
			timer.start();
			started=false;
		} else
			started=true;

		if(condition < 0) // vision then pointing
		{
			if(timer.getElapsedTimeInMilliSec() > stepVisual) // move to drawing the cross
			{
				moveToPartII = true;
				step = stepVisual;
			}
			if(moveToPartII && timer.getElapsedTimeInMilliSec() > step + 500) // move to drawing the first stimulus
			{
				moveToPartIII = true;
				step = step + 500;
			}
			if(moveToPartIII && timer.getElapsedTimeInMilliSec() > step + 800) // move to drawing black
			{
				moveToPartIV = true;
				step = step + 800;
			}
		} else // pointing then vision
		{
			if(tFHP > 0) // move to drawing the cross
			{
				moveToPartII = true;
				timeToFHP = tFHP;
				step = tFHP;
			} 
			if(moveToPartII && timer.getElapsedTimeInMilliSec() > step + 500) // move to drawing the first stimulus
			{
				moveToPartIII = true;
				step = step + 500;
			}
			if(moveToPartIII && timer.getElapsedTimeInMilliSec() > step + 800 && !training) // move to drawing black
			{
				moveToPartIV = true;
				step = step + 800;
			}
		}
			
	}

	if(headCalibration)
	{
	eyeLeft = headEyeCoords.getLeftEye();
	eyeRight = headEyeCoords.getRightEye();
	} else	{
	eyeRight = Vector3d(interoculardistance/2,0,0);
	eyeLeft = -eyeRight;
	}

	index = indexCoords.getP1();
	thumb = thumbCoords.getP1();

	singleMarker = markers.at(4).p.transpose();

/*	if(pause && !paused)
	{
		paused = true;
	}
	else if(paused && !pause)
	{

		initTrial();
		paused = false;
	}
*/
	// Write to trialFile
	if (fingerCalibrationDone==3 )
		{

		trialFile << fixed <<
			parameters.find("SubjectName") << "\t" <<		//subjName
			trialNumber << "\t" <<							//trialN
			timer.getElapsedTimeInMilliSec() << "\t" <<		//time
			frameN << "\t" <<								//frameN
			index.transpose() << "\t" <<					//indexXraw, indexYraw, indexZraw
			thumb.transpose() << "\t" <<					//thumbXraw, thumbYraw, thumbZraw
			eyeRight.transpose() << "\t" <<					//eyeRXraw, eyeRYraw, eyeRZraw
			eyeLeft.transpose() << "\t" <<					//eyeLXraw, eyeLYraw, eyeLZraw
			fingersOccluded << "\t" <<						//fingersOccluded
			markers[14].v.norm() << "\t" <<					//indexVelraw
			recFHP << "\t" <<								
			tFHP << "\t" << 
			part << "\t" << 
			step << "\t" << 
			moveToPartII << "\t" << 
			moveToPartIII << "\t" << 
			moveToPartIV << "\t" << 
			firstD << "\t" << 
			handRecorded << "\t" << 
			pointing << "\t" << 
			stepVisual << "\t" << 
			attempt
			;

		trialFile << endl;
		
		}

}
コード例 #22
0
// Questa funzione e' quella che in background fa tutti i conti matematici, quindi qui devi inserire 
// 1) Scrittura su file continua delle coordinate che vuoi salvare
// 2) Estrazione delle coordinate a partire dai corpi rigidi precedentemente definiti vedi ad esempio
// come e' fatto per eyeLeft e eyeRight oppure per thumb ed index
void idle()
{
	optotrak->updateMarkers();
	//cerr << trial.getCurrent()["AbsDepth"] << endl;
	markers = optotrak->getAllMarkers();
	// Coordinates picker
	allVisiblePlatform = isVisible(markers[1].p) && isVisible(markers[2].p);
	allVisibleIndex = isVisible(markers[13].p) && isVisible(markers[14].p) && isVisible(markers[16].p);
	allVisibleThumb = isVisible(markers[15].p) && isVisible(markers[17].p) && isVisible(markers[18].p);
	allVisibleFingers = allVisibleIndex && allVisibleThumb;

	allVisibleObject = isVisible(markers[8].p) && isVisible(markers[11].p) && isVisible(markers[12].p);

	allVisiblePatch = isVisible(markers[10].p) && isVisible(markers[11].p) && isVisible(markers[12].p);
	allVisibleHead = allVisiblePatch && isVisible(markers[9].p);
	
	mirrorAlignment = asin(
			abs((markers[6].p.z()-markers[7].p.z()))/
			sqrt(
			pow(markers[6].p.x()-markers[7].p.x(), 2) +
			pow(markers[6].p.z()-markers[7].p.z(), 2)
			)
			)*180/M_PI;

	if ( allVisiblePatch )
		headEyeCoords.update(markers[10].p,markers[11].p,markers[12].p);

	if(headCalibration)
	{
	eyeLeft = headEyeCoords.getLeftEye();
	eyeRight = headEyeCoords.getRightEye();
	} else	{
	eyeRight = Vector3d(interoculardistance/2,0,0);
	eyeLeft = -eyeRight;
	}

	if ( allVisibleFingers )
	{
		indexCoords.update(markers[13].p, markers[14].p, markers[16].p );
		thumbCoords.update(markers[15].p, markers[17].p, markers[18].p );
		fingersOccluded = 0;
	}

	// Record the calibration platform's position and home position
	if ( isVisible(markers[1].p) && allVisibleObject && fingerCalibrationDone==0 )
	{
		fingerCalibrationDone=1;
		calibration_fingers(fingerCalibrationDone);
	}

	// Record the calibration platform's position and home position
	if ( isVisible(markers[2].p) && allVisibleObject && fingerCalibrationDone==1 )
	{
		fingerCalibrationDone=2;
		calibration_fingers(fingerCalibrationDone);
		beepOk(2);
	}

	if ( allVisibleObject && fingerCalibrationDone==2 )
	{
		upperPin.update(markers[8].p, markers[11].p, markers[12].p );
		lowerPin.update(markers[8].p, markers[11].p, markers[12].p );
	}

	if (fingerCalibrationDone==4 )
	{
		if ( !allVisibleFingers )
		{
			fingersOccluded=1;
		}
		
		frameN++;
		
		if(indexDisappeared && thumbDisappeared)
			fingersDisappeared = true;
		else
			fingersDisappeared = fingersDisappeared;

		if(index.z() > (-80))
			handAtStart = true;
		else
			handAtStart = false;
		
		index = indexCoords.getP1();
		thumb = thumbCoords.getP1();
		
	}

}
コード例 #23
0
void idle()
{

    if (totalTimer.getElapsedTimeInSec() > initialAdaptationTimeInSeconds
            && trialMode == INITIALADAPTATION && experimentStarted )
    {
        trialMode = STIMULUSMODE;
        totalTimer.start();
    }

    if (totalTimer.getElapsedTimeInSec() > fixationDurationInSeconds && trialMode == FIXATIONMODE && experimentStarted )
    {
        timeFrame=0.0;
        trialMode=STIMULUSMODE;
        totalTimer.start();
    }

    if (totalTimer.getElapsedTimeInMilliSec() > stimulusDurationInMilliSeconds && trialMode == STIMULUSMODE && experimentStarted )
    {
        timeFrame=0.0;
        trialMode=PROBEMODE;
        totalTimer.start();
    }

    if (!experimentStarted)
        return;
    Timer frameTimer;
    frameTimer.start();
    // Timing things

    double oscillationPeriod = stimulusDurationInMilliSeconds;
    switch (stimMotion)
    {
    case SAWTOOTH_MOTION:
        periodicValue = oscillationAmplitude*mathcommon::sawtooth(timeFrame,oscillationPeriod);
        break;
    case TRIANGLE_MOTION:
        periodicValue = oscillationAmplitude*mathcommon::trianglewave(timeFrame,oscillationPeriod);
        break;
    case SINUSOIDAL_MOTION:
        periodicValue = oscillationAmplitude*sin(3.14*timeFrame/(oscillationPeriod));
        break;
    default:
        SAWTOOTH_MOTION;
    }

    timingFile << totalTimer.getElapsedTimeInMilliSec() << " " << periodicValue << endl;

    // Simulate head translation
    // Coordinates picker
    markers[1] = Vector3d(0,0,0);
    markers[2] = Vector3d(0,10,0);
    markers[3] = Vector3d(0,0,10);

    headEyeCoords.update(markers[1],markers[2],markers[3]);

    eyeLeft = headEyeCoords.getLeftEye();
    eyeRight = headEyeCoords.getRightEye();

    stimTransformation.matrix().setIdentity();
    stimTransformation.translation() <<0,0,focalDistance;

    Timer sleepTimer;
    sleepTimer.sleep(abs(TIMER_MS - frameTimer.getElapsedTimeInMilliSec()));
    timeFrame+=1;

}
コード例 #24
0
void update(int value)
{
    // Timing things
    if ( trialMode != PROBEMODE )
    {
        oldvariable = variable;
        variable = -factors["Onset"]*mathcommon::trianglewave( timeFrame , factors["StimulusDuration"]/(TIMER_MS*factors["FollowingSpeed"]) );

        timeFrame+=1;
        bool isInside = ((projPoint - Vector3d(0,0,focalDistance) ).norm()) <= (circleRadius);
        // permette di avanzare se siamo in stimulusMode e lo stimolo ha fatto un semiciclo ( una passata da dx a sx o da su a giù )
        bool nextMode = ( sumOutside == 0 ) && (trialMode==STIMULUSMODE);

        if ( isInside && ( sumOutside > stimulusEmiCycles ) || (nextMode) )
        {
            sumOutside=-1;
            advanceTrial();
            //cerr << "stim time= " << stimulusTimer.getElapsedTimeInMilliSec() << endl;
        }
    }

    // Simulate head translation
    // Coordinates picker
    markers[1] = Vector3d(0,0,0);
    markers[2] = Vector3d(0,10,0);
    markers[3] = Vector3d(0,0,10);

    Vector3d translation(0,0,0);
    switch ( (int) factors["Anchored"] )
    {
    case 0:
        translation = Vector3d((circleRadius+1)*variable,0,0);
        break;
    case 1:
        translation = Vector3d((circleRadius+1)*variable,0,0);
        break;
    case 2:
        translation = Vector3d(0,(circleRadius+1)*variable,0);
        break;
    }

    markers[1]+=translation;
    markers[2]+=translation;
    markers[3]+=translation;

    headEyeCoords.update(markers[1],markers[2],markers[3]);

    eyeLeft = headEyeCoords.getLeftEye();
    eyeRight = headEyeCoords.getRightEye();



    fixationPoint = (headEyeCoords.getRigidStart().getFullTransformation() * ( Vector3d(0,0,focalDistance) ) );
    // Projection of view normal on the focal plane
    pline = Eigen::ParametrizedLine<double,3>::Through(eyeRight,fixationPoint);
    projPoint = pline.intersection(focalPlane)*((fixationPoint - eyeRight).normalized()) + eyeRight;

    checkBounds();

    glutPostRedisplay();
    glutTimerFunc(TIMER_MS, update, 0);
}
コード例 #25
0
void handleKeypress(unsigned char key, int x, int y)
{   switch (key)
    {   //Quit program
    case 'o':
        orthographicMode=!orthographicMode;
        cam.setOrthoGraphicProjection(orthographicMode);
        break;
    case 'z':
        zOnFocalPlane=!zOnFocalPlane;
        break;
    case 'p':
        passiveMode=!passiveMode;
        break;
    case 'q':
    case 27:
    {   cleanup();
        exit(0);
    }
    break;
    case ' ':
    {
        // Here we record the head shape - coordinates of eyes and markers, but centered in (0,0,0)
        if ( headCalibrationDone==0 && allVisibleHead )
        {
            headEyeCoords.init(markers[17],markers[18], markers[1],markers[2],markers[3],interoculardistance );
            headCalibrationDone=1;
            break;
        }
        // Second calibration, you must look a fixed fixation point
        if ( headCalibrationDone==1 )
        {
            headEyeCoords.init( headEyeCoords.getP1(),headEyeCoords.getP2(), markers[1], markers[2],markers[3],interoculardistance );
            eyeCalibration=headEyeCoords.getRightEye();
            headCalibrationDone=2;
            break;
        }
        if ( headCalibrationDone==2 )
        {   headEyeCoords.init( headEyeCoords.getP1(),headEyeCoords.getP2(), markers[1], markers[2],markers[3],interoculardistance );
            eyeCalibration=headEyeCoords.getRightEye();
            break;
        }
    }
    break;
    // Enter key: press to make the final calibration
    case 13:
    {
        if ( headCalibrationDone == 2)
        {
            headEyeCoords.init( headEyeCoords.getP1(),headEyeCoords.getP2(), markers[1], markers[2],markers[3],interoculardistance );
            eyeCalibration=headEyeCoords.getRightEye();
            headCalibrationDone=3;
        }
    }
    break;

    case '2':
    {
        probeAngle=270;
        keyPressed();
    }
    break;
    case '8':
    {
        probeAngle=90;
        keyPressed();
    }
    break;
    case '4':
    {
        probeAngle=180;
        keyPressed();
    }
    break;
    case '6':
    {
        probeAngle=0;
        keyPressed();
    }
    break;
    }
}
コード例 #26
0
void update(int value)
{
    // Conta i cicli di presentazione dello stimolo
    if ( (sumOutside > str2num<int>(parameters.find("StimulusCycles")) ) &&  (trialMode == STIMULUSMODE) )
    {
        sumOutside=0;
        trialMode++;
        trialMode=trialMode%4;
    }

    if (conditionInside && (sumOutside*2 > str2num<int>(parameters.find("FixationCycles"))) && (trialMode ==FIXATIONMODE )  )
    {
        sumOutside=0;
        trialMode++;
        trialMode=trialMode%4;
        stimulusDuration.start();
    }
    if ( trialMode == STIMULUSMODE )
        stimulusFrames++;
    if ( trialMode == FIXATIONMODE )
        stimulusFrames=0;

    Screen screenPassive;

    screenPassive.setWidthHeight(SCREEN_WIDE_SIZE, SCREEN_WIDE_SIZE*SCREEN_HEIGHT/SCREEN_WIDTH);
    screenPassive.setOffset(alignmentX,alignmentY);
    screenPassive.setFocalDistance(0);
    screenPassive.transform(headEyeCoords.getRigidStart().getFullTransformation()*Translation3d(center));

    camPassive.init(screenPassive);
    camPassive.setDrySimulation(true);
    camPassive.setEye(eyeRight);
    objectPassiveTransformation = ( camPassive.getModelViewMatrix()*objectActiveTransformation );
    // Coordinates picker
    markers = optotrak.getAllPoints();
    if ( isVisible(markers[1]) && isVisible(markers[2]) && isVisible(markers[3]) )
        headEyeCoords.update(markers[1],markers[2],markers[3]);
    Affine3d active = headEyeCoords.getRigidStart().getFullTransformation();

    eulerAngles.init( headEyeCoords.getRigidStart().getFullTransformation().rotation() );

    eyeLeft = headEyeCoords.getLeftEye();
    eyeRight = headEyeCoords.getRightEye();

    cyclopeanEye = (eyeLeft+eyeRight)/2.0;

    // Projection of view normal on the focal plane
    Vector3d directionOfSight = (active.rotation()*Vector3d(0,0,-1)).normalized();
    Eigen::ParametrizedLine<double,3> lineOfSightRight = Eigen::ParametrizedLine<double,3>::Through( eyeRight , eyeRight+directionOfSight );
    Eigen::ParametrizedLine<double,3> lineOfSightLeft  = Eigen::ParametrizedLine<double,3>::Through( eyeLeft, eyeLeft+directionOfSight );

    double lineOfSightRightDistanceToFocalPlane = lineOfSightRight.intersection(focalPlane);
    double lineOfSightLeftDistanceToFocalPlane = lineOfSightLeft.intersection(focalPlane);

    //double lenghtOnZ = (active*(center-eyeCalibration )+eyeRight).z();
    projPointEyeRight = lineOfSightRightDistanceToFocalPlane *(directionOfSight)+ (eyeRight);
    projPointEyeLeft= lineOfSightLeftDistanceToFocalPlane * (directionOfSight) + (eyeLeft);
    // second projection the fixation point computed with z non constant but perfectly parallel to projPointEyeRight
    lineOfSightRightDistanceToFocalPlane= (( active.rotation()*(center)) - eyeRight).norm();
    Vector3d secondProjection = lineOfSightRightDistanceToFocalPlane *(directionOfSight)+ (eyeRight);

    if ( !zOnFocalPlane )
        projPointEyeRight=secondProjection ;

    // Compute the translation to move the eye in order to avoid shear components
    Vector3d posAlongLineOfSight = (headEyeCoords.getRigidStart().getFullTransformation().rotation())*(eyeRight -eyeCalibration);

    switch ( (int)factors["Translation"] )
    {
    case -1:
    case -2:
        translationFactor.setZero();
        if ( trialMode == STIMULUSMODE )
            projPointEyeRight=center;
        break;
    case 0:
        translationFactor.setZero();
        break;
    case 1:
        translationFactor = factors["TranslationConstant"]*Vector3d(posAlongLineOfSight.z(),0,0);
        break;
    case 2:
        translationFactor = factors["TranslationConstant"]*Vector3d(0,posAlongLineOfSight.z(),0);
        break;
    }
    if ( passiveMode )
        initProjectionScreen(0,headEyeCoords.getRigidStart().getFullTransformation()*Translation3d(Vector3d(0,0,focalDistance)));
    else
        initProjectionScreen(focalDistance,Affine3d::Identity());

    checkBounds();
    /**** Save to file part ****/
    // Markers file save the used markers and time-depending experimental variable to a file
    // (Make sure that in passive experiment the list of variables has the same order)
    markersFile << trialNumber << " " << headCalibrationDone << " " << trialMode << " " ;
    markersFile <<markers[1].transpose() << " " << markers[2].transpose() << " " << markers[3].transpose() << " " << markers[17].transpose() << " " << markers[18].transpose() << " " ;

    markersFile <<	factors["Tilt"] << " " <<
                factors["Slant"] << " " <<
                factors["Translation"] << " " <<
                factors["Onset"] << " " <<
                factors["TranslationConstant"] <<
                endl;

    ofstream outputfile;
    outputfile.open("data.dat");
    outputfile << "Subject Name: " << parameters.find("SubjectName") << endl;
    outputfile << "Passive matrix:" << endl << objectPassiveTransformation.matrix() << endl;
    outputfile << "Yaw: " << toDegrees(eulerAngles.getYaw()) << endl <<"Pitch: " << toDegrees(eulerAngles.getPitch()) << endl;
    outputfile << "EyeLeft: " <<  headEyeCoords.getLeftEye().transpose() << endl;
    outputfile << "EyeRight: " << headEyeCoords.getRightEye().transpose() << endl << endl;
    outputfile << "Slant: " << instantPlaneSlant << endl;
    outputfile << "(Width,Height) [px]: " << getPlaneDimensions().transpose() << " " << endl;
    outputfile << "Factors:" << endl;
    for (map<string,double>::iterator iter=factors.begin(); iter!=factors.end(); ++iter)
    {
        outputfile << "\t\t" << iter->first << "= " << iter->second << endl;
    }
    outputfile << "Trial remaining: " << trial.getRemainingTrials()+1 << endl;
    outputfile << "Last response: " << probeAngle << endl;
    // Here we save plane projected width and height


    // now rewind the file
    outputfile.clear();
    outputfile.seekp(0,ios::beg);

    // Write down frame by frame the trajectories and angles of eyes and head
    if ( trialMode == STIMULUSMODE && headCalibrationDone > 2 )
    {
        trajFile << setw(6) << left <<
                 trialNumber << " " <<
                 stimulusFrames << " " <<
                 eyeRight.transpose() << endl;

        anglesFile << setw(6) << left <<
                   trialNumber << " " <<
                   stimulusFrames << " " <<
                   toDegrees(eulerAngles.getPitch()) << " " <<
                   toDegrees(eulerAngles.getRoll()) << " " <<
                   toDegrees(eulerAngles.getYaw()) << " " <<
                   instantPlaneSlant << endl;

        matrixFile << setw(6) << left <<
                   trialNumber << " " <<
                   stimulusFrames << " " ;
        for (int i=0; i<3; i++)
            matrixFile << objectPassiveTransformation.matrix().row(i) << " " ;
        matrixFile << endl;

        // Write the 13 special extremal points on stimFile
        stimFile << setw(6) << left <<
                 trialNumber << " " <<
                 stimulusFrames << " " ;
        double winx=0,winy=0,winz=0;

        for (PointsRandIterator iRand = redDotsPlane.specialPointsRand.begin(); iRand!=redDotsPlane.specialPointsRand.end(); ++iRand)
        {   Point3D *p=(*iRand);
            Vector3d v = objectActiveTransformation*Vector3d( p->x, p->y, p->z);

            gluProject(v.x(),v.y(),v.z(), (&cam)->getModelViewMatrix().data(), (&cam)->getProjectiveMatrix().data(), (&cam)->getViewport().data(), &winx,&winy,&winz);
            stimFile << winx << " " << winy << " " << winz << " ";
        }
        stimFile << endl;
    }

    glutPostRedisplay();
    glutTimerFunc(TIMER_MS, update, 0);
}
コード例 #27
0
// Questa funzione e' quella che in background fa tutti i conti matematici, quindi qui devi inserire 
// 1) Scrittura su file continua delle coordinate che vuoi salvare
// 2) Estrazione delle coordinate a partire dai corpi rigidi precedentemente definiti vedi ad esempio
// come e' fatto per eyeLeft e eyeRight oppure per thumb ed index
void idle()
{
	optotrak->updateMarkers();
	//cerr << deltaT << endl;
	markers = optotrak->getAllMarkers();
	// Coordinates picker
	allVisiblePlatform = isVisible(markers[1].p);
	allVisibleIndex = isVisible(markers[13].p) && isVisible(markers[14].p) && isVisible(markers[16].p);
	allVisibleFingers = allVisibleIndex;

	allVisiblePatch = isVisible(markers[5].p) && isVisible(markers[6].p) && isVisible(markers[7].p);
	allVisibleHead = allVisiblePatch && isVisible(markers[1].p);

	if ( allVisiblePatch )
		headEyeCoords.update(markers[5].p,markers[6].p,markers[7].p);

	if ( allVisibleIndex )
	{
		indexCoords.update(markers[13].p, markers[14].p, markers[16].p );
		viewingFrames=1;
		occludedFrames = occludedFrames;
	}

	if (fingerCalibrationDone==3 )
	{
		if ( !allVisibleIndex )
		{
			viewingFrames=0;
			occludedFrames++;
		}
		
		if(condition == 0)
			condition = unifRand(-1, 1);

		if(index.z() < (-180) && index.y() > -150 && markers[14].v.norm() < 90)
			pointing=true;
		else
			pointing=pointing;
		
		if(index.z() > (-80))
			handAtStart = true;
		else
			handAtStart = false;

		if(pointing && !handRecorded)
		{
			recFHP = index.z();
			tFHP = timer.getElapsedTimeInMilliSec();
			handRecorded = true;
		} else
		{
			recFHP = recFHP;
			tFHP = tFHP;
			handRecorded = handRecorded;
		}

		if(condition < 0) // vision then pointing
		{
			if(timer.getElapsedTimeInMilliSec() > stepVisual) // move to drawing the cross
			{
				moveToPartII = true;
				step = stepVisual;
			}
			if(moveToPartII && timer.getElapsedTimeInMilliSec() > step + 500) // move to drawing the first stimulus
			{
				moveToPartIII = true;
				step = step + 500;
			}
			if(moveToPartIII && timer.getElapsedTimeInMilliSec() > step + 800) // move to drawing black
			{
				moveToPartIV = true;
				step = step + 800;
			}
			if(moveToPartIV) // move to drawing the letter
			{
				moveToPartV = true;
			}
			if(moveToPartV && tFHP > 0) // move to drawing the cross
			{
				moveToPartVI = true;
				timeToFHP = tFHP - step;
				step = tFHP;
			}
			if(moveToPartVI && timer.getElapsedTimeInMilliSec() > step + 500) // move to drawing the second stimulus
			{
				moveToPartVII = true;
				step = step + 500;
			}
			if(moveToPartVII && timer.getElapsedTimeInMilliSec() > step + 800) // move to drawing black
			{
				moveToPartVIII = true;
			}
		} else // pointing then vision
		{
			if(tFHP > 0) // move to drawing the cross
			{
				moveToPartII = true;
				timeToFHP = tFHP;
				step = tFHP;
			}
			if(moveToPartII && timer.getElapsedTimeInMilliSec() > step + 500) // move to drawing the first stimulus
			{
				moveToPartIII = true;
				step = step + 500;
			}
			if(moveToPartIII && timer.getElapsedTimeInMilliSec() > step + 800) // move to drawing black
			{
				moveToPartIV = true;
				step = step + 800;
			}
			if(moveToPartIV) // move to drawing the letter
			{
				moveToPartV = true;
			}
			if(moveToPartV && timer.getElapsedTimeInMilliSec() > step + stepVisual)	//move to drawing the cross
			{
				if(handAtStart && !moveToPartVI)
				{
					stepT = timer.getElapsedTimeInMilliSec();
					moveToPartVI = true;
				}
			}
			if(moveToPartVI) // move to drawing the second stimulus
			{
				step = stepT;
				if(timer.getElapsedTimeInMilliSec() > step + 500)
				{
					moveToPartVII = true;
					step = step + 500;
				}
			}
			if(moveToPartVII && timer.getElapsedTimeInMilliSec() > step + 800) // move to drawing black
			{
				moveToPartVIII = true;
			}
		}
			
	}

	if(headCalibration)
	{
	eyeLeft = headEyeCoords.getLeftEye();
	eyeRight = headEyeCoords.getRightEye();
	} else	{
	eyeRight = Vector3d(interoculardistance/2,0,0);
	eyeLeft = -eyeRight;
	}

	index = indexCoords.getP1();

	singleMarker = markers.at(4).p.transpose();

	if(pause && !paused)
	{
		timer.stop();
		paused = true;
	}
	else if(paused && !pause)
	{
		initTrial();
		paused = false;
	}

	// Write to markersfile
	if (fingerCalibrationDone==3 )
		{
		markersFile << fixed << 
			trialNumber << "\t" << 
			index.transpose() << "\t" << 
			occludedFrames << "\t" << 
			viewingFrames << "\t" << 
			markers[14].v.norm() << "\t" << 
			recFHP << "\t" << 
			timer.getElapsedTimeInMilliSec() << "\t" << 
			tFHP << "\t" << 
			part << "\t" << 
			step << "\t" << 
			moveToPartII << "\t" << 
			moveToPartIII << "\t" << 
			moveToPartIV << "\t" << 
			moveToPartV << "\t" << 
			moveToPartVI << "\t" << 
			moveToPartVII << "\t" << 
			moveToPartVIII << "\t" << 
			firstD << "\t" << 
			secondD << "\t" << 
			handRecorded << "\t" << 
			pointing << "\t" << 
			stepVisual
			;

		markersFile << endl;

		}

}
コード例 #28
0
void idle()
{
	// get new marker positions from optotrak
	optotrak->updateMarkers();
	markers = optotrak->getAllMarkers();

	// check visibility
	if (triangulate){
		allVisiblePlatform3 = isVisible(markers[8].p) && isVisible(markers[11].p) && isVisible(markers[12].p);
		allVisiblePlatform1 = isVisible(markers[1].p) && allVisiblePlatform3;
		allVisiblePlatform2 = isVisible(markers[2].p) && allVisiblePlatform3;
		allVisibleIndex = isVisible(markers[13].p) && isVisible(markers[14].p) && isVisible(markers[16].p);
		allVisibleThumb = isVisible(markers[15].p) && isVisible(markers[17].p) && isVisible(markers[18].p);
		allVisibleFingers = allVisibleIndex && allVisibleThumb;// && allVisibleWrist;
		allVisiblePatch = isVisible(markers[5].p) && isVisible(markers[6].p) && isVisible(markers[7].p);
		allVisibleHead = allVisiblePatch && isVisible(markers[1].p);
	} else {
		allVisibleIndex = isVisible(markers[9].p);
		allVisibleThumb = isVisible(markers[10].p);
		allVisibleFingers = allVisibleThumb && allVisibleIndex;
		allVisibleWrist = isVisible(markers[22].p);
	}


	// check equipment alignments
	mirrorAlignment = asin(
			abs((markers[6].p.z()-markers[7].p.z()))/
			sqrt(
			pow(markers[6].p.x()-markers[7].p.x(), 2) +
			pow(markers[6].p.z()-markers[7].p.z(), 2)
			)
			)*180/M_PI;
	screenAlignmentY = asin(
			abs((markers[19].p.y()-markers[21].p.y()))/
			sqrt(
			pow(markers[19].p.x()-markers[21].p.x(), 2) +
			pow(markers[19].p.y()-markers[21].p.y(), 2)
			)
			)*180/M_PI;
	screenAlignmentZ = asin(
			abs(markers[19].p.z()-markers[20].p.z())/
			sqrt(
			pow(markers[19].p.x()-markers[20].p.x(), 2) +
			pow(markers[19].p.z()-markers[20].p.z(), 2)
			)
			)*180/M_PI*
			abs(markers[19].p.x()-markers[20].p.x())/
			(markers[19].p.x()-markers[20].p.x());

	// update head coordinates
	if ( allVisiblePatch )
		headEyeCoords.update(markers[5].p,markers[6].p,markers[7].p);

	// update finger coordinates (but we don't really use these directly!)
	if ( allVisibleFingers ) {
		if(triangulate){
			indexCoords.update(markers[13].p, markers[14].p, markers[16].p );
			thumbCoords.update(markers[15].p, markers[17].p, markers[18].p );
		}
		//wristCoords = markers[wristMarkerNum].p;
		fingersOccluded = 0;
	}
	if (triangulate){
		if ( allVisiblePlatform1 && fingerCalibration==0 )
			{
				// get upper pin
				calibration_fingers(1);
				fingerCalibration=1;
			}
			if ( fingerCalibration==1 && allVisiblePlatform2 )
			{
				// get lower pin
				calibration_fingers(2);
				fingerCalibration=2;
				beepOk(0);
			}
		if ( allVisiblePlatform3 && fingerCalibration<3 )
		{
			upperPin.update(markers[8].p, markers[11].p, markers[12].p );
			lowerPin.update(markers[8].p, markers[11].p, markers[12].p );
		}

		// update the finger position in the objects we actually use
		if (allVisibleIndex)
			index = indexCoords.getP1();
		if (allVisibleThumb)
			thumb = thumbCoords.getP1();
	}
	if (!triangulate){
		if (fingerCalibration==0)
			fingerCalibration=3;
		index = markers[9].p;
		thumb = markers[10].p;
		wrist = markers[22].p;
	}

	//////////////////////////////////////
	// While the experiment is running! //
	//////////////////////////////////////
	if (fingerCalibration==7 && !finished)
	{
		// Check for finger occlusion
		if ( !allVisibleFingers )
		{
			fingersOccluded = 1;
			//if (!started)
			//	beepOk(4);
			if (started && !reachedObject) // only increment if we're in flight
			{
				num_lost_frames += 1;
			}
		}
		
		// Advance frame number
		frameN++;

		// find distance from grip center to object center
		grip_Origin_X = (index.x()+thumb.x())/2;
		grip_Origin_Y = (index.y()+thumb.y())/2;
		grip_Origin_Z = (index.z()+thumb.z())/2;
		x_dist = abs(grip_Origin_X - targetOriginX);
		y_dist = abs(grip_Origin_Y - targetOriginY);
		z_dist = abs(grip_Origin_Z - targetOriginZ);
		distanceGripCenterToObject = sqrt((x_dist*x_dist)+(y_dist*y_dist)+(z_dist*z_dist));

		x_dist_home = abs(grip_Origin_X - (startPosLeft + startPosRight)/2);
		y_dist_home = abs(grip_Origin_Y - (startPosTop + startPosBottom)/2);
		z_dist_home = abs(grip_Origin_Z - (startPosFront + startPosRear)/2);
		distanceGripCenterToHome = sqrt((x_dist_home*x_dist_home)+(y_dist_home*y_dist_home)+(z_dist_home*z_dist_home));

		// compute grip aperture
		grip_aperture = sqrt(
			(index.x() - thumb.x())*(index.x() - thumb.x()) + 
			(index.y() - thumb.y())*(index.y() - thumb.y()) + 
			(index.z() - thumb.z())*(index.z() - thumb.z())
			);

		/* Check that both fingers are in the start position
		if( ( (index.y() < startPosTop) && // index below ceiling
			  (index.y() > startPosBottom) && // index above floor
			  (index.x() > startPosLeft) && // index right of left wall
			  (index.x() < startPosRight) && // index left of right wall
			  (index.z() > startPosFront) &&  // index behind front wall
		      (index.z() < startPosRear) && // index in front of rear wall
			  (thumb.y() < startPosTop) && // thumb below ceiling
			  (index.y() > startPosBottom) && // thumb above floor
			  (thumb.x() > startPosLeft) && // thumb right of left wall
			  (thumb.x() < startPosRight) && // thumb left of right wall
			  (thumb.z() > startPosFront) &&  // thumb behind front wall
			  (thumb.z() < startPosRear) ) // thumb in front of rear wall
			  || (estimate_given==0) )*/
		if (handAtStart || (estimate_given==0))
		{	
			// if we already gave the estimate and are returning to the start position
			if (estimate_given==1){
				estimate_given=2;
				beepOk(10);
			} 
			// keep resetting timer
			//handAtStart = true;
			maxTime = timer.getElapsedTimeInMilliSec();
			timer.start();
		} else if (estimate_given==2) { 
			// otherwise we are in flight, so set flags and let the timer run
			if (start_frame==0){
				start_frame=frameN;
				start_dist = distanceGripCenterToHome;
			}
			//handAtStart = false;
			started = true;
		}

		// if we are still approaching object
		if (!reachedObject && started) {
			// when conditions for "end criterion" are satisfied (usually re: GA and distanceToObject)
			if ( (distanceGripCenterToObject<=10) && (grip_aperture<(cylHeight+10)) ){
				// set flag and record the frame (for computing % missing frames)
				reachedObject = true;
				//spin = true;
				TGA_frame = frameN;
			}
		}
	}

	// recompute the eye coordinates for drawing so we can change IOD online
	if(headCalibration){
		eyeLeft = headEyeCoords.getLeftEye();
		eyeRight = headEyeCoords.getRightEye();
	}else{
		eyeRight = Vector3d(interoculardistance/2,0,0);
		eyeLeft = -eyeRight;
	}

	// Write to trialFile once calibration is over
	if (fingerCalibration==7 )
	{
		trialFile << fixed <<
		//parameters.find("SubjectName") << "\t" <<	//subjName
		parameters[0].find("SubjectName") << "\t" <<	//subjName
		trialNumber << "\t" <<							//trialN
		timer.getElapsedTimeInMilliSec() << "\t" <<		//time
		frameN << "\t" <<								//frameN
		index.transpose() << "\t" <<					//indexXraw, indexYraw, indexZraw
		thumb.transpose() << "\t" <<					//thumbXraw, thumbYraw, thumbZraw
		//wrist.transpose() << "\t" <<
		distanceGripCenterToObject << "\t" <<			//distanceToObject
		fingersOccluded << "\t" <<						//fingersOccluded
		reachedObject << endl;							//reachedObject
	}

	// conditions for trial advance
	if(handAtStart && started)
		advanceTrial();
}
コード例 #29
0
// Questa funzione e' quella che in background fa tutti i conti matematici, quindi qui devi inserire 
// 1) Scrittura su file continua delle coordinate che vuoi salvare
// 2) Estrazione delle coordinate a partire dai corpi rigidi precedentemente definiti vedi ad esempio
// come e' fatto per eyeLeft e eyeRight oppure per thumb ed index
void idle()
{
	optotrak->updateMarkers();
	
	/*cerr << trial.getCurrent()["AbsDepth"] << "\t" << 
		trial.getCurrent()["ObjHeight"] << "\t" <<
		trial.getCurrent()["HapticFB"] << endl;
	*/
	//cerr << parameters.find("fObjHeight",1) << endl;

	markers = optotrak->getAllMarkers();
	// Coordinates picker
	allVisiblePlatform = isVisible(markers[1].p);
	allVisibleIndex = isVisible(markers[13].p) && isVisible(markers[14].p) && isVisible(markers[16].p);
	allVisibleThumb = isVisible(markers[15].p) && isVisible(markers[17].p) && isVisible(markers[18].p);
	allVisibleFingers = allVisibleIndex && allVisibleThumb;

	allVisiblePatch = isVisible(markers[5].p) && isVisible(markers[6].p) && isVisible(markers[7].p);
	allVisibleHead = allVisiblePatch && isVisible(markers[1].p);

	if ( allVisiblePatch )
		headEyeCoords.update(markers[5].p,markers[6].p,markers[7].p);

	if ( allVisibleFingers )
	{
		indexCoords.update(markers[13].p, markers[14].p, markers[16].p );
		thumbCoords.update(markers[15].p, markers[17].p, markers[18].p );
		fingersOccluded = 0;
	}

	if (fingerCalibrationDone==3 )
	{
		if ( !allVisibleFingers )
		{
			fingersOccluded=1;
		}
		
		frameN++;

		if(((-500) < index.y()) && (index.y() < (-315)) && ((-500) < thumb.y()) && (thumb.y() < (-315)))
		{
			handAtStart = true;
			timer.start();
		} else 
		{
			handAtStart = false;
		}
		
		if(!handAtStart && ((-315) <= index.y()) && (index.y() < 200) && ((-315) <= thumb.y()) && (thumb.y() < 100))
		{
			started=true;
		} else
			started=started;
	}

	if(headCalibration)
	{
	eyeLeft = headEyeCoords.getLeftEye();
	eyeRight = headEyeCoords.getRightEye();
	} else	{
	eyeRight = Vector3d(interoculardistance/2,0,0);
	eyeLeft = -eyeRight;
	}

	index = indexCoords.getP1();
	thumb = thumbCoords.getP1();

	// Write to trialFile
	if (fingerCalibrationDone==3 )
		{

		trialFile << fixed <<
			parameters.find("SubjectName") << "\t" <<		//subjName
			trialNumber << "\t" <<							//trialN
			timer.getElapsedTimeInMilliSec() << "\t" <<		//time
			frameN << "\t" <<								//frameN
			index.transpose() << "\t" <<					//indexXraw, indexYraw, indexZraw
			thumb.transpose() << "\t" <<					//thumbXraw, thumbYraw, thumbZraw
			//eyeRight.transpose() << "\t" <<					//eyeRXraw, eyeRYraw, eyeRZraw
			//eyeLeft.transpose() << "\t" <<					//eyeLXraw, eyeLYraw, eyeLZraw
			fingersOccluded << "\t" <<						//fingersOccluded
			attempt
			;

		trialFile << endl;
		
		}

}
コード例 #30
0
/**
 * @brief keyPressEvent
 * @param key
 * @param x
 * @param y
 */
void keyPressEvent(unsigned char key, int x, int y)
{
    switch (key)
    {       //Quit program
    case 'q':
    case 27:
    {
        exit(0);
        break;
    }
	case 'i':
	{
		infoDraw=!infoDraw;
		break;
	}
    case ' ':
    {
        // Here we record the head shape - coordinates of eyes and markers, but centered in (0,0,0)
        if ( headCalibrationDone==0 && allVisibleHead && trialMode==PRECALIBRATION_MODE)
        {
            headEyeCoords.init(markers.at(17).p,markers.at(18).p, markers.at(1).p,markers.at(2).p,markers.at(3).p,interocularDistance );
            headCalibrationDone=1;
            beepOk();
            break;
        }
        // Second calibration the user must be in the red circle
        if ( headCalibrationDone==1 && allVisiblePatch && trialMode==PRECALIBRATION_MODE)
        {
			headEyeCoords.init(headEyeCoords.getLeftEye(),headEyeCoords.getRightEye(), markers.at(1).p,markers.at(2).p,markers.at(3).p,interocularDistance );
            headCalibrationDone=2;
            beepOk();
            break;
		}
        break;
    }
    case 13:
    {
        // Third calibration the user must look toward the green/red circle and be in the correct z position, then the experiment starts
        if ( readyToStart && allVisiblePatch )
        {
			headEyeCoords.init(headEyeCoords.getLeftEye(),headEyeCoords.getRightEye(), markers.at(1).p,markers.at(2).p,markers.at(3).p,interocularDistance );
			eyeCalibration = headEyeCoords.getRightEye();
			headCalibrationDone=3;
            trialMode=BLACK_MODE;
            beepOk();
            trialTimer.start();
            break;
        }
    }
    case '4':
    {
        if (trialMode==RESPONSE_MODE)
        {
            advanceTrial(4);
        }
        break;
    }
    case '6':
    {
        if (trialMode==RESPONSE_MODE)
        {
            advanceTrial(6);
        }
        break;
    }
    }
}