示例#1
0
/**
 * @brief drawFixation
 */
void drawFixation()
{
    double circleRadius = parameters.get("MaxCircleRadius");	// millimeters
    double zBoundary    = parameters.get("MaxZOscillation"); // millimeters
    // Projection of view normal on the focal plane
    Vector3d directionOfSight = (headEyeCoords.getRigidStart().getFullTransformation().linear()*Vector3d(0,0,-1)).normalized();
    Eigen::ParametrizedLine<double,3> lineOfSightRight = Eigen::ParametrizedLine<double,3>::Through( headEyeCoords.getRightEye() , headEyeCoords.getRightEye()+directionOfSight );
    Eigen::Hyperplane<double,3> focalPlane = Eigen::Hyperplane<double,3>::Through( Vector3d(1,0,focalDistance), Vector3d(0,1,focalDistance),Vector3d(0,0,focalDistance) );
    double lineOfSightRightDistanceToFocalPlane = lineOfSightRight.intersection(focalPlane);
    Vector3d opticalAxisToFocalPlaneIntersection = lineOfSightRightDistanceToFocalPlane *(directionOfSight)+ (headEyeCoords.getRightEye());

    switch ( headCalibrationDone )
    {
    case 1:
    {
        // STIM_FIXED stimulus at (0,0,focalDistance)
        glPushAttrib(GL_POINT_BIT);
        glColor3fv(glRed);
        glPointSize(5);
        glBegin(GL_POINTS);
        glVertex3d(0,0,focalDistance);
		glVertex3d(headEyeCoords.getRightEye().x(),headEyeCoords.getRightEye().y(),focalDistance);
        glEnd();
        glPopAttrib();
        break;
    }
    case 2:
    {
        // STIM_FIXED stimulus + projected points
        glPushAttrib( GL_ALL_ATTRIB_BITS );
        glPointSize(5);
		glLineWidth(2);
		
        glBegin(GL_POINTS);
        glColor3fv(glRed);
        glVertex3d(0,0,focalDistance);
		glColor3fv(glBlue);
		glVertex3dv(opticalAxisToFocalPlaneIntersection.data());
		glColor3fv(glWhite);
		glVertex3d(headEyeCoords.getRightEye().x(),headEyeCoords.getRightEye().y(),focalDistance);
        glEnd();

		double r2EyeRight = pow(headEyeCoords.getRightEye().x(),2)+pow(headEyeCoords.getRightEye().y(),2);
        // Draw the calibration circle
        if ( pow(opticalAxisToFocalPlaneIntersection.x(),2)+pow(opticalAxisToFocalPlaneIntersection.y(),2) <= circleRadius*circleRadius && abs(headEyeCoords.getRightEye().z()) < zBoundary && r2EyeRight<circleRadius*circleRadius )
        {
			readyToStart=true;
            drawCircle(circleRadius,0,0,focalDistance,glGreen);
        }
        else
        {
            drawCircle(circleRadius,0,0,focalDistance,glRed);
        }
        glPopAttrib();
		break;
	}
    }
}
示例#2
0
Vector3d getEyeProjectionPoint()
{
    Eigen::Hyperplane<double,3> focalPlane = Eigen::Hyperplane<double,3>::Through   ( Vector3d(1,0,focalDistance), Vector3d(0,1,focalDistance),Vector3d(0,0,focalDistance) );

    // Projection of view normal on the focal plane
    Vector3d directionOfSight = (headEyeCoords.getRigidStart().getFullTransformation().rotation()*Vector3d(0,0,-1)).normalized();
    Eigen::ParametrizedLine<double,3> lineOfSightRight = Eigen::ParametrizedLine<double,3>::Through( eyeRight , eyeRight+directionOfSight );

    double dist = lineOfSightRight.intersection(focalPlane);

    return (dist*(directionOfSight)+ (eyeRight));
}
void idle()
{
Timer frameTimer; frameTimer.start();
    // Timing things
    timeFrame+=1;

    double oscillationPeriod = factors.at("StimulusDuration")*TIMER_MS;

    switch (stimMotion)
    {
    case SAWTOOTH_MOTION:
        periodicValue = oscillationAmplitude*mathcommon::sawtooth(timeFrame,oscillationPeriod);
        break;
    case TRIANGLE_MOTION:
        periodicValue = oscillationAmplitude*mathcommon::trianglewave(timeFrame,oscillationPeriod);
        break;
    case SINUSOIDAL_MOTION:
        periodicValue = oscillationAmplitude*sin(3.14*timeFrame/(oscillationPeriod));
        break;
    default:
        SAWTOOTH_MOTION;
    }

    timingFile << totalTimer.getElapsedTimeInMilliSec() << " " << periodicValue << endl;

    // Simulate head translation
    // Coordinates picker
    markers[1] = Vector3d(0,0,0);
    markers[2] = Vector3d(0,10,0);
    markers[3] = Vector3d(0,0,10);

    headEyeCoords.update(markers[1],markers[2],markers[3]);

    eyeLeft = headEyeCoords.getLeftEye();
    eyeRight = headEyeCoords.getRightEye();

    Vector3d fixationPoint = (headEyeCoords.getRigidStart().getFullTransformation() * ( Vector3d(0,0,focalDistance) ) );
    // Projection of view normal on the focal plane
    Eigen::ParametrizedLine<double,3> pline = Eigen::ParametrizedLine<double,3>::Through(eyeRight,fixationPoint);
    projPoint = pline.intersection(focalPlane)*((fixationPoint - eyeRight).normalized()) + eyeRight;

    stimTransformation.matrix().setIdentity();
    stimTransformation.translation() <<0,0,focalDistance;

	Timer sleepTimer;
	sleepTimer.sleep((TIMER_MS - frameTimer.getElapsedTimeInMilliSec())/2);
}
示例#4
0
void update(int value)
{
    frameTimer.start();
// Read the experiment from file, if the file is finished exit suddenly
    if ( inputStream.eof() )
    {   exit(0);
    }

    if ( isReading )
    {   // This reads a line (frame) in inputStream
        readline(inputStream, trialNumber,  headCalibration,  trialMode, pointMatrix );
        headEyeCoords.update(pointMatrix.col(0),pointMatrix.col(1),pointMatrix.col(2));
        Affine3d active = headEyeCoords.getRigidStart().getFullTransformation();
        eulerAngles.init( headEyeCoords.getRigidStart().getFullTransformation().rotation() );

        eyeLeft = headEyeCoords.getLeftEye();
        eyeRight= headEyeCoords.getRightEye();
        //cerr << eyeRight.transpose() << endl;
        cyclopeanEye = (eyeLeft+eyeRight)/2.0;

        if ( trialMode == STIMULUSMODE )
            stimulusFrames++;
        if ( trialMode == FIXATIONMODE )
            stimulusFrames=0;

        // Projection of view normal on the focal plane
        Vector3d directionOfSight = (active.rotation()*Vector3d(0,0,-1)).normalized();
        Eigen::ParametrizedLine<double,3> lineOfSightRight = Eigen::ParametrizedLine<double,3>::Through( eyeRight , eyeRight+directionOfSight );

        double lineOfSightRightDistanceToFocalPlane = lineOfSightRight.intersection(focalPlane);

        //double lenghtOnZ = (active*(center-eyeCalibration )+eyeRight).z();
        projPointEyeRight = lineOfSightRightDistanceToFocalPlane *(directionOfSight)+ (eyeRight);
        // second projection the fixation point computed with z non constant but perfectly parallel to projPointEyeRight
        lineOfSightRightDistanceToFocalPlane= (( active.rotation()*(center)) - eyeRight).norm();
        Vector3d secondProjection = lineOfSightRightDistanceToFocalPlane *(directionOfSight)+ (eyeRight);
        projPointEyeRight=secondProjection ;

        // Compute the translation to move the eye in order to avoid share components
        Vector3d posAlongLineOfSight = (headEyeCoords.getRigidStart().getFullTransformation().rotation())*(eyeRight -eyeCalibration);
        // GENERATION OF PASSIVE MODE.
        // HERE WE MOVE THE SCREEN TO FACE THE OBSERVER's EYE
        if ( passiveMode )
        {   initProjectionScreen(0, headEyeCoords.getRigidStart().getFullTransformation()*Translation3d(center));
        }
        else
            initProjectionScreen(focalDistance, Affine3d::Identity());

        if ( trialMode == STIMULUSMODE )
        {
            // IMPORTANT Reset the previous status of transformations
            objectActiveTransformation[0].setIdentity();
            objectActiveTransformation[1].setIdentity();
            // PLANE 0 Transformation QUELLO CHE STA SOTTO
            alpha = atan( eyeRight.x()/abs(projPointEyeRight.z()) );
            if ( overallTilt )
            {
                instantPlaneSlant = alphaMultiplier*alpha+toRadians(-factors.at("DeltaSlant")-factors.at("StillPlaneSlant"));
                AngleAxis<double> aa0( instantPlaneSlant,Vector3d::UnitY());
                objectActiveTransformation[0]*=aa0;
                double planesYOffset = factors.at("PlanesCentersYDistance")*(whichPlaneDrawUp ? 1 : -1);
                objectActiveTransformation[0].translation() = Vector3d(0,planesYOffset,focalDistance);

                // PLANE 1 Transformation QUELLO CHE STA SOPRA
                AngleAxis<double> aa1(-toRadians(factors.at("StillPlaneSlant")),Vector3d::UnitY());
                objectActiveTransformation[1]*=aa1;
                objectActiveTransformation[1].translation() = Vector3d(0,-planesYOffset,focalDistance);
            }
            else
            {
                instantPlaneSlant = alphaMultiplier*alpha+toRadians(factors.at("DeltaSlant")+factors.at("StillPlaneSlant"));
                AngleAxis<double> aa0( instantPlaneSlant,Vector3d::UnitY());
                objectActiveTransformation[0]*=aa0;
                double planesYOffset = factors.at("PlanesCentersYDistance")*(whichPlaneDrawUp ? 1 : -1);
                objectActiveTransformation[0].translation() = Vector3d(0,planesYOffset,focalDistance);

                // PLANE 1 Transformation QUELLO CHE STA SOPRA
                AngleAxis<double> aa1(toRadians(factors.at("StillPlaneSlant")),Vector3d::UnitY());
                objectActiveTransformation[1]*=aa1;
                objectActiveTransformation[1].translation() = Vector3d(0,-planesYOffset,focalDistance);
            }

            objectPassiveTransformation[0] = ( cam.getModelViewMatrix()*objectActiveTransformation[0] );
            objectPassiveTransformation[1] = ( cam.getModelViewMatrix()*objectActiveTransformation[1] );

            //cout << toDegrees(instantPlaneSlant) << endl;

            // **************** COMPUTE THE OPTIC FLOWS **************************
            // 1) Project the points to screen by computing their coordinates on focalPlane in passive (quite complicate, see the specific method)
            // *********** FOR THE MOVING PLANE *************
            vector<Vector3d> projPointsMovingPlane = stimDrawer[0].projectStimulusPoints(objectActiveTransformation[0],headEyeCoords.getRigidStart().getFullTransformation(),cam,focalDistance, screen, eyeCalibration,passiveMode,false);

            // 2) Get the angles formed by stimulus and observer
            // updating with the latest values
            Vector3d oldAlphaMoving = flowsAnglesAlphaMoving,oldBetaMoving=flowsAnglesBetaMoving;
            // alpha is the "pitch" angle, beta is the "yaw" angle
            // Here me must use the points 4,5,8 of the stimulus
            flowsAnglesAlphaMoving(0)  =  ( atan2(projPointsMovingPlane[4].x(), abs(focalDistance) ) );
            flowsAnglesAlphaMoving(1)  =  ( atan2(projPointsMovingPlane[5].x(), abs(focalDistance) ) );
            flowsAnglesAlphaMoving(2)  =  ( atan2(projPointsMovingPlane[8].x(), abs(focalDistance) ) );

            flowsAnglesBetaMoving(0)      =  ( atan2(projPointsMovingPlane[4].y(), abs(focalDistance) ) );
            flowsAnglesBetaMoving(1)      =  ( atan2(projPointsMovingPlane[5].y(), abs(focalDistance) ) );
            flowsAnglesBetaMoving(2)      =  ( atan2(projPointsMovingPlane[8].y(), abs(focalDistance) ) );

            // 3) Fill the matrix of derivatives
            MatrixXd angVelocitiesMoving(6,1);
            angVelocitiesMoving(0) = flowsAnglesAlphaMoving(0)-oldAlphaMoving(0);
            angVelocitiesMoving(1) = flowsAnglesBetaMoving(0)-oldBetaMoving(0);
            angVelocitiesMoving(2) = flowsAnglesAlphaMoving(1)-oldAlphaMoving(1);
            angVelocitiesMoving(3) = flowsAnglesBetaMoving(1)-oldBetaMoving(1);
            angVelocitiesMoving(4) = flowsAnglesAlphaMoving(2)-oldAlphaMoving(2);
            angVelocitiesMoving(5) = flowsAnglesBetaMoving(2)-oldBetaMoving(2);
            angVelocitiesMoving /= ((double)TIMER_MS/(double)1000);

            // 4) Fill the coefficient matrix, to solve the linear system
            MatrixXd coeffMatrixMoving(6,6);
            coeffMatrixMoving <<
                              1, flowsAnglesAlphaMoving(0),   flowsAnglesBetaMoving(0), 0, 0, 0,
                                 0, 0,    0,    1,flowsAnglesAlphaMoving(0),flowsAnglesBetaMoving(0),
                                 1, flowsAnglesAlphaMoving(1),   flowsAnglesBetaMoving(1), 0, 0, 0,
                                 0, 0,    0,    1,flowsAnglesAlphaMoving(1),flowsAnglesBetaMoving(1),
                                 1, flowsAnglesAlphaMoving(2),   flowsAnglesBetaMoving(2), 0, 0, 0,
                                 0, 0,    0,    1,flowsAnglesAlphaMoving(2),flowsAnglesBetaMoving(2)
                                 ;
            // 5) Solve the linear system by robust fullPivHouseholderQR decomposition (see Eigen for details http://eigen.tuxfamily.org/dox/TutorialLinearAlgebra.html )
            MatrixXd velocitiesMoving = coeffMatrixMoving.colPivHouseholderQr().solve(angVelocitiesMoving);
            // 6) Write the output to file flowsFileMoving
            flowsFileMoving << fixed << trialNumber << "\t" <<  //1
                            stimulusFrames << " " <<
                            factors.at("DeltaSlant")<< " " <<
                            factors.at("StillPlaneSlant") << " " <<
                            overallTilt << " " <<
                            projPointsMovingPlane[4].transpose() << " " <<
                            projPointsMovingPlane[5].transpose() << " " <<
                            projPointsMovingPlane[8].transpose() << " " <<
                            angVelocitiesMoving.transpose() << " " <<
                            velocitiesMoving.transpose() << endl;

            // ********************* FLOWS FOR THE STILL PLANE **************
            // Here we must repeat the same things for the still plane
            vector<Vector3d> projPointsStillPlane = stimDrawer[1].projectStimulusPoints(objectActiveTransformation[1],headEyeCoords.getRigidStart().getFullTransformation(),cam,focalDistance, screen, eyeCalibration,passiveMode,false);

            // 2) Get the angles formed by stimulus and observer
            // updating with the latest values
            Vector3d oldAlphaStill = flowsAnglesAlphaStill,oldBetaStill=flowsAnglesBetaStill;
            // alpha is the "pitch" angle, beta is the "yaw" angle
            // Here me must use the points 4,5,8 of the stimulus
            flowsAnglesAlphaStill(0)  =  ( atan2(projPointsStillPlane[4].x(), abs(focalDistance) ) );
            flowsAnglesAlphaStill(1)  =  ( atan2(projPointsStillPlane[5].x(), abs(focalDistance) ) );
            flowsAnglesAlphaStill(2)  =  ( atan2(projPointsStillPlane[8].x(), abs(focalDistance) ) );

            flowsAnglesBetaStill(0)      =  ( atan2(projPointsStillPlane[4].y(), abs(focalDistance) ) );
            flowsAnglesBetaStill(1)      =  ( atan2(projPointsStillPlane[5].y(), abs(focalDistance) ) );
            flowsAnglesBetaStill(2)      =  ( atan2(projPointsStillPlane[8].y(), abs(focalDistance) ) );

            // 3) Fill the matrix of derivatives
            MatrixXd angVelocitiesStill(6,1);
            angVelocitiesStill(0) = flowsAnglesAlphaStill(0)-oldAlphaStill(0);
            angVelocitiesStill(1) = flowsAnglesBetaStill(0)-oldBetaStill(0);
            angVelocitiesStill(2) = flowsAnglesAlphaStill(1)-oldAlphaStill(1);
            angVelocitiesStill(3) = flowsAnglesBetaStill(1)-oldBetaStill(1);
            angVelocitiesStill(4) = flowsAnglesAlphaStill(2)-oldAlphaStill(2);
            angVelocitiesStill(5) = flowsAnglesBetaStill(2)-oldBetaStill(2);
            angVelocitiesStill /= ((double)TIMER_MS/(double)1000);

            // 4) Fill the coefficient matrix, to solve the linear system
            MatrixXd coeffMatrixStill(6,6);
            coeffMatrixStill <<
                             1, flowsAnglesAlphaStill(0),   flowsAnglesBetaStill(0), 0, 0, 0,
                                0, 0,    0,    1,flowsAnglesAlphaStill(0),flowsAnglesBetaStill(0),
                                1, flowsAnglesAlphaStill(1),   flowsAnglesBetaStill(1), 0, 0, 0,
                                0, 0,    0,    1,flowsAnglesAlphaStill(1),flowsAnglesBetaStill(1),
                                1, flowsAnglesAlphaStill(2),   flowsAnglesBetaStill(2), 0, 0, 0,
                                0, 0,    0,    1,flowsAnglesAlphaStill(2),flowsAnglesBetaStill(2)
                                ;
            // 5) Solve the linear system by robust fullPivHouseholderQR decomposition (see Eigen for details http://eigen.tuxfamily.org/dox/TutorialLinearAlgebra.html )
            MatrixXd velocitiesStill = coeffMatrixStill.colPivHouseholderQr().solve(angVelocitiesStill);
            // 6) Write the output to file flowsFileStill
            flowsFileStill << fixed << trialNumber << "\t" <<  // 1
                           stimulusFrames << " " <<	// 2
                           factors.at("DeltaSlant")<< " " << // 3
                           factors.at("StillPlaneSlant") << " " << // 4
                           overallTilt << " " <<
                           projPointsStillPlane[4].transpose() << " " << // 5,6,7
                           projPointsStillPlane[5].transpose() << " " << // 8,9,10
                           projPointsStillPlane[8].transpose() << " " << // 11,12,13
                           angVelocitiesStill.transpose() << " " << // 14, 15, 16, 17, 18, 19
                           velocitiesStill.transpose() << endl;	// 20, 21, 22, 23, 24, 25
            // **************** END OF OPTIC FLOWS COMPUTATION
        }
        /*
        ofstream outputfile;
        outputfile.open("data.dat");
        outputfile << "Subject Name: " << parameters.find("SubjectName") << endl;
        outputfile << "Passive matrix:" << endl << objectPassiveTransformation.matrix() << endl;
        outputfile << "Yaw: " << toDegrees(eulerAngles.getYaw()) << endl <<"Pitch: " << toDegrees(eulerAngles.getPitch()) << endl;
        outputfile << "EyeLeft: " <<  headEyeCoords.getLeftEye().transpose() << endl;
        outputfile << "EyeRight: " << headEyeCoords.getRightEye().transpose() << endl << endl;
        outputfile << "Factors:" << endl;
        for (map<string,double>::iterator iter=factors.begin(); iter!=factors.end(); ++iter)
        {   outputfile << "\t\t" << iter->first << "= " << iter->second << endl;
        }
        */

    }

    if ( trialMode == PROBEMODE )
        isReading=false;

    glutPostRedisplay();
    glutTimerFunc(TIMER_MS, update, 0);
}
示例#5
0
void update(int value)
{
    // Conta i cicli di presentazione dello stimolo
    if ( (sumOutside > str2num<int>(parameters.find("StimulusCycles")) ) &&  (trialMode == STIMULUSMODE) )
    {
        sumOutside=0;
        trialMode++;
        trialMode=trialMode%4;
    }

    if (conditionInside && (sumOutside*2 > str2num<int>(parameters.find("FixationCycles"))) && (trialMode ==FIXATIONMODE )  )
    {
        sumOutside=0;
        trialMode++;
        trialMode=trialMode%4;
        stimulusDuration.start();
    }
    if ( trialMode == STIMULUSMODE )
        stimulusFrames++;
    if ( trialMode == FIXATIONMODE )
        stimulusFrames=0;

    Screen screenPassive;

    screenPassive.setWidthHeight(SCREEN_WIDE_SIZE, SCREEN_WIDE_SIZE*SCREEN_HEIGHT/SCREEN_WIDTH);
    screenPassive.setOffset(alignmentX,alignmentY);
    screenPassive.setFocalDistance(0);
    screenPassive.transform(headEyeCoords.getRigidStart().getFullTransformation()*Translation3d(center));

    camPassive.init(screenPassive);
    camPassive.setDrySimulation(true);
    camPassive.setEye(eyeRight);
    objectPassiveTransformation = ( camPassive.getModelViewMatrix()*objectActiveTransformation );
    // Coordinates picker
    markers = optotrak.getAllPoints();
    if ( isVisible(markers[1]) && isVisible(markers[2]) && isVisible(markers[3]) )
        headEyeCoords.update(markers[1],markers[2],markers[3]);
    Affine3d active = headEyeCoords.getRigidStart().getFullTransformation();

    eulerAngles.init( headEyeCoords.getRigidStart().getFullTransformation().rotation() );

    eyeLeft = headEyeCoords.getLeftEye();
    eyeRight = headEyeCoords.getRightEye();

    cyclopeanEye = (eyeLeft+eyeRight)/2.0;

    // Projection of view normal on the focal plane
    Vector3d directionOfSight = (active.rotation()*Vector3d(0,0,-1)).normalized();
    Eigen::ParametrizedLine<double,3> lineOfSightRight = Eigen::ParametrizedLine<double,3>::Through( eyeRight , eyeRight+directionOfSight );
    Eigen::ParametrizedLine<double,3> lineOfSightLeft  = Eigen::ParametrizedLine<double,3>::Through( eyeLeft, eyeLeft+directionOfSight );

    double lineOfSightRightDistanceToFocalPlane = lineOfSightRight.intersection(focalPlane);
    double lineOfSightLeftDistanceToFocalPlane = lineOfSightLeft.intersection(focalPlane);

    //double lenghtOnZ = (active*(center-eyeCalibration )+eyeRight).z();
    projPointEyeRight = lineOfSightRightDistanceToFocalPlane *(directionOfSight)+ (eyeRight);
    projPointEyeLeft= lineOfSightLeftDistanceToFocalPlane * (directionOfSight) + (eyeLeft);
    // second projection the fixation point computed with z non constant but perfectly parallel to projPointEyeRight
    lineOfSightRightDistanceToFocalPlane= (( active.rotation()*(center)) - eyeRight).norm();
    Vector3d secondProjection = lineOfSightRightDistanceToFocalPlane *(directionOfSight)+ (eyeRight);

    if ( !zOnFocalPlane )
        projPointEyeRight=secondProjection ;

    // Compute the translation to move the eye in order to avoid shear components
    Vector3d posAlongLineOfSight = (headEyeCoords.getRigidStart().getFullTransformation().rotation())*(eyeRight -eyeCalibration);

    switch ( (int)factors["Translation"] )
    {
    case -1:
    case -2:
        translationFactor.setZero();
        if ( trialMode == STIMULUSMODE )
            projPointEyeRight=center;
        break;
    case 0:
        translationFactor.setZero();
        break;
    case 1:
        translationFactor = factors["TranslationConstant"]*Vector3d(posAlongLineOfSight.z(),0,0);
        break;
    case 2:
        translationFactor = factors["TranslationConstant"]*Vector3d(0,posAlongLineOfSight.z(),0);
        break;
    }
    if ( passiveMode )
        initProjectionScreen(0,headEyeCoords.getRigidStart().getFullTransformation()*Translation3d(Vector3d(0,0,focalDistance)));
    else
        initProjectionScreen(focalDistance,Affine3d::Identity());

    checkBounds();
    /**** Save to file part ****/
    // Markers file save the used markers and time-depending experimental variable to a file
    // (Make sure that in passive experiment the list of variables has the same order)
    markersFile << trialNumber << " " << headCalibrationDone << " " << trialMode << " " ;
    markersFile <<markers[1].transpose() << " " << markers[2].transpose() << " " << markers[3].transpose() << " " << markers[17].transpose() << " " << markers[18].transpose() << " " ;

    markersFile <<	factors["Tilt"] << " " <<
                factors["Slant"] << " " <<
                factors["Translation"] << " " <<
                factors["Onset"] << " " <<
                factors["TranslationConstant"] <<
                endl;

    ofstream outputfile;
    outputfile.open("data.dat");
    outputfile << "Subject Name: " << parameters.find("SubjectName") << endl;
    outputfile << "Passive matrix:" << endl << objectPassiveTransformation.matrix() << endl;
    outputfile << "Yaw: " << toDegrees(eulerAngles.getYaw()) << endl <<"Pitch: " << toDegrees(eulerAngles.getPitch()) << endl;
    outputfile << "EyeLeft: " <<  headEyeCoords.getLeftEye().transpose() << endl;
    outputfile << "EyeRight: " << headEyeCoords.getRightEye().transpose() << endl << endl;
    outputfile << "Slant: " << instantPlaneSlant << endl;
    outputfile << "(Width,Height) [px]: " << getPlaneDimensions().transpose() << " " << endl;
    outputfile << "Factors:" << endl;
    for (map<string,double>::iterator iter=factors.begin(); iter!=factors.end(); ++iter)
    {
        outputfile << "\t\t" << iter->first << "= " << iter->second << endl;
    }
    outputfile << "Trial remaining: " << trial.getRemainingTrials()+1 << endl;
    outputfile << "Last response: " << probeAngle << endl;
    // Here we save plane projected width and height


    // now rewind the file
    outputfile.clear();
    outputfile.seekp(0,ios::beg);

    // Write down frame by frame the trajectories and angles of eyes and head
    if ( trialMode == STIMULUSMODE && headCalibrationDone > 2 )
    {
        trajFile << setw(6) << left <<
                 trialNumber << " " <<
                 stimulusFrames << " " <<
                 eyeRight.transpose() << endl;

        anglesFile << setw(6) << left <<
                   trialNumber << " " <<
                   stimulusFrames << " " <<
                   toDegrees(eulerAngles.getPitch()) << " " <<
                   toDegrees(eulerAngles.getRoll()) << " " <<
                   toDegrees(eulerAngles.getYaw()) << " " <<
                   instantPlaneSlant << endl;

        matrixFile << setw(6) << left <<
                   trialNumber << " " <<
                   stimulusFrames << " " ;
        for (int i=0; i<3; i++)
            matrixFile << objectPassiveTransformation.matrix().row(i) << " " ;
        matrixFile << endl;

        // Write the 13 special extremal points on stimFile
        stimFile << setw(6) << left <<
                 trialNumber << " " <<
                 stimulusFrames << " " ;
        double winx=0,winy=0,winz=0;

        for (PointsRandIterator iRand = redDotsPlane.specialPointsRand.begin(); iRand!=redDotsPlane.specialPointsRand.end(); ++iRand)
        {   Point3D *p=(*iRand);
            Vector3d v = objectActiveTransformation*Vector3d( p->x, p->y, p->z);

            gluProject(v.x(),v.y(),v.z(), (&cam)->getModelViewMatrix().data(), (&cam)->getProjectiveMatrix().data(), (&cam)->getViewport().data(), &winx,&winy,&winz);
            stimFile << winx << " " << winy << " " << winz << " ";
        }
        stimFile << endl;
    }

    glutPostRedisplay();
    glutTimerFunc(TIMER_MS, update, 0);
}
示例#6
0
void update(int value)
{
    // Timing things
    if ( trialMode != PROBEMODE )
    {
        oldvariable = variable;
        variable = -factors["Onset"]*mathcommon::trianglewave( timeFrame , factors["StimulusDuration"]/(TIMER_MS*factors["FollowingSpeed"]) );

        timeFrame+=1;
        bool isInside = ((projPoint - Vector3d(0,0,focalDistance) ).norm()) <= (circleRadius);
        // permette di avanzare se siamo in stimulusMode e lo stimolo ha fatto un semiciclo ( una passata da dx a sx o da su a giù )
        bool nextMode = ( sumOutside == 0 ) && (trialMode==STIMULUSMODE);

        if ( isInside && ( sumOutside > stimulusEmiCycles ) || (nextMode) )
        {
            sumOutside=-1;
            advanceTrial();
            //cerr << "stim time= " << stimulusTimer.getElapsedTimeInMilliSec() << endl;
        }
    }

    // Simulate head translation
    // Coordinates picker
    markers[1] = Vector3d(0,0,0);
    markers[2] = Vector3d(0,10,0);
    markers[3] = Vector3d(0,0,10);

    Vector3d translation(0,0,0);
    switch ( (int) factors["Anchored"] )
    {
    case 0:
        translation = Vector3d((circleRadius+1)*variable,0,0);
        break;
    case 1:
        translation = Vector3d((circleRadius+1)*variable,0,0);
        break;
    case 2:
        translation = Vector3d(0,(circleRadius+1)*variable,0);
        break;
    }

    markers[1]+=translation;
    markers[2]+=translation;
    markers[3]+=translation;

    headEyeCoords.update(markers[1],markers[2],markers[3]);

    eyeLeft = headEyeCoords.getLeftEye();
    eyeRight = headEyeCoords.getRightEye();



    fixationPoint = (headEyeCoords.getRigidStart().getFullTransformation() * ( Vector3d(0,0,focalDistance) ) );
    // Projection of view normal on the focal plane
    pline = Eigen::ParametrizedLine<double,3>::Through(eyeRight,fixationPoint);
    projPoint = pline.intersection(focalPlane)*((fixationPoint - eyeRight).normalized()) + eyeRight;

    checkBounds();

    glutPostRedisplay();
    glutTimerFunc(TIMER_MS, update, 0);
}
void update(int value)
{   // Read the experiment from file, if the file is finished exit suddenly
    if ( inputStream.eof() )
    {   cleanup();
        exit(0);
    }

    if ( isReading )
    {   // This reads a line (frame) in inputStream
        readline(inputStream, trialNumber,  headCalibration,  trialMode, pointMatrix );

        headEyeCoords.update(pointMatrix.col(0),pointMatrix.col(1),pointMatrix.col(2));
        Affine3d active = headEyeCoords.getRigidStart().getFullTransformation();
        eulerAngles.init( headEyeCoords.getRigidStart().getFullTransformation().rotation() );

        eyeLeft = headEyeCoords.getLeftEye();
        eyeRight= headEyeCoords.getRightEye();

        cyclopeanEye = (eyeLeft+eyeRight)/2.0;

		if ( trialMode == STIMULUSMODE )
			stimulusFrames++;
		if ( trialMode == FIXATIONMODE )
			stimulusFrames=0;

        // Projection of view normal on the focal plane
	Vector3d directionOfSight = (active.rotation()*Vector3d(0,0,-1)).normalized();
	Eigen::ParametrizedLine<double,3> lineOfSightRight = Eigen::ParametrizedLine<double,3>::Through( eyeRight , eyeRight+directionOfSight );
	Eigen::ParametrizedLine<double,3> lineOfSightLeft  = Eigen::ParametrizedLine<double,3>::Through( eyeLeft, eyeLeft+directionOfSight );
	
	double lineOfSightRightDistanceToFocalPlane = lineOfSightRight.intersection(focalPlane);
	double lineOfSightLeftDistanceToFocalPlane = lineOfSightLeft.intersection(focalPlane);
	
	//double lenghtOnZ = (active*(center-eyeCalibration )+eyeRight).z();
	projPointEyeRight = lineOfSightRightDistanceToFocalPlane *(directionOfSight)+ (eyeRight);
	projPointEyeLeft= lineOfSightLeftDistanceToFocalPlane * (directionOfSight) + (eyeLeft);
	// second projection the fixation point computed with z non constant but perfectly parallel to projPointEyeRight
	lineOfSightRightDistanceToFocalPlane= (( active.rotation()*(center)) - eyeRight).norm();
	Vector3d secondProjection = lineOfSightRightDistanceToFocalPlane *(directionOfSight)+ (eyeRight);
	
	if ( !zOnFocalPlane )
	projPointEyeRight=secondProjection ;

	// Compute the translation to move the eye in order to avoid share components
	Vector3d posAlongLineOfSight = (headEyeCoords.getRigidStart().getFullTransformation().rotation())*(eyeRight -eyeCalibration);
	// GENERATION OF PASSIVE MODE.
        // HERE WE MOVE THE SCREEN TO FACE THE OBSERVER's EYE
        if ( passiveMode )
        {
            initProjectionScreen(0, headEyeCoords.getRigidStart().getFullTransformation()*Translation3d(center));
        }
        else
            initProjectionScreen(focalDistance, Affine3d::Identity());
        
	objectPassiveTransformation = ( cam.getModelViewMatrix()*objectActiveTransformation );
    
	ofstream outputfile;
	outputfile.open("data.dat");
	outputfile << "Subject Name: " << parameters.find("SubjectName") << endl;
	outputfile << "Passive matrix:" << endl << objectPassiveTransformation.matrix() << endl;
	outputfile << "Yaw: " << toDegrees(eulerAngles.getYaw()) << endl <<"Pitch: " << toDegrees(eulerAngles.getPitch()) << endl;
	outputfile << "EyeLeft: " <<  headEyeCoords.getLeftEye().transpose() << endl;
	outputfile << "EyeRight: " << headEyeCoords.getRightEye().transpose() << endl << endl;
	outputfile << "Slant: " << instantPlaneSlant << endl;
	outputfile << "Factors:" << endl;
	for (map<string,double>::iterator iter=factors.begin(); iter!=factors.end(); ++iter)
	{
		outputfile << "\t\t" << iter->first << "= " << iter->second << endl;
	}
	
	}

    if ( trialMode == PROBEMODE )
        isReading=false;

    glutPostRedisplay();
    glutTimerFunc(TIMER_MS, update, 0);
}
示例#8
0
void drawGeneratedPlanes()
{
	Hyperplane<double,3> screenPlane,mirrorPlane;

	//mirror[3] = Vector3d(mirror[1].x(),mirror[2].y(),mirror[1].z());
	screen[3] = Vector3d(screen[1].x(), screen[2].y(),screen[1].z());


	screenPlane = Hyperplane<double,3>::Through( screen[0], screen[1],screen[2] );
	mirrorPlane = Hyperplane<double,3>::Through(mirror[1], mirror[0], mirror[2] );

	screenNormal = screenPlane.normal();
	mirrorNormal = mirrorPlane.normal();

	Eigen::ParametrizedLine<double,3> line = Eigen::ParametrizedLine<double,3>::Through( screenCenter , screenCenter + screenNormal*10);
	// Calcola il punto proiezione della normale uscente dallo schermo sullo specchio
	double p = line.intersection(mirrorPlane);
	screenProjectionOnMirror = p*((screenNormal).normalized()) + screenCenter;

	planesIntersectionBase = -getPlaneIntersection(mirrorPlane,screenPlane,0);
	planesIntersectionDirection = mirrorPlane.normal().cross(screenPlane.normal());

	planesIntersecEnd = planesIntersectionBase - 200*planesIntersectionDirection;

	// Draw the planes intersection
	glPushMatrix();
	glColor3fv(glBlue);
	glBegin(GL_LINES);
	glVertex3dv(planesIntersectionBase.data());
	glVertex3dv(planesIntersecEnd.data());
	glEnd();
	glPopMatrix();

	// Draw the mirror plane (only if calibrationPhase==0)
	if ( calibrationPhase==0 )
	{
		glPushMatrix();
		Grid gridMirror;
		gridMirror.setRowsAndCols(20,20);
		gridMirror.init(mirror[1],mirror[0],mirror[2],mirror[3]);	// invert the order to have 45 degrees yaw and 90 degree pitch when correct
		glColor3fv(glRed);
		gridMirror.draw();
		glPopMatrix();

		// Draw the mirror normal
		mirrorCenter = (mirror[0]+mirror[1]+mirror[2]+mirror[3])/4.0;
		Vector3d mirrorCenterDirection = mirrorCenter + 20*mirrorNormal;
		glPushMatrix();
		glColor3fv(glRed);
		glBegin(GL_LINES);
		glVertex3dv(mirrorCenter.data());
		glVertex3dv(mirrorCenterDirection.data());
		glEnd();
		glPopMatrix();
	}

	// Draw the screen plane
	if ( calibrationPhase!=0)
	{
		glPushMatrix();
		Grid gridScreen;
		gridScreen.setRowsAndCols(20,20);
		gridScreen.init(screen[0],screen[1],screen[2],screen[3]);
		glColor3fv(glGreen);
		gridScreen.draw();
		glPopMatrix();

		// Draw the screen normal
		screenCenter = (screen[0]+screen[1]+screen[2]+screen[3])/4.0;
		Vector3d screenCenterDirection = screenCenter + 20*screenNormal;
		glPushMatrix();
		glBegin(GL_LINES);
		glVertex3dv(screenCenter.data());
		glVertex3dv(screenCenterDirection.data());
		glEnd();
		glPopMatrix();
	}


	// Draw the vector from screen center to mirror center
	glPushMatrix();
	glColor3fv(glRed);
	glBegin(GL_LINES);
	glVertex3dv(screenCenter.data());
	glVertex3dv(mirrorCenter.data());
	glEnd();
	glPopMatrix();

	// Draw the vector from mirror center to X axis
	Vector3d orthoProjMirrorCenter(mirrorCenter);
	glPushMatrix();
	glColor3fv(glRed);
	glBegin(GL_LINES);
	glVertex3dv(mirrorCenter.data());
	glVertex3d(mirrorCenter.x(),mirrorCenter.y(),0);
	glEnd();
	glPopMatrix();

	// draw mirror saved center
	if ( calibrationPhase == 1)
	{
		glPushMatrix();
		glTranslatef(mirrorSavedCenter.x(),mirrorSavedCenter.y(),mirrorSavedCenter.z());
		glutSolidSphere(1,10,20);
		glPopMatrix();
	}

	mirrorYaw = mathcommon::toDegrees(acos(mirrorNormal.z()));
	mirrorPitch = mathcommon::toDegrees(acos(mirrorNormal.y()));

	screenYaw = mathcommon::toDegrees(acos(screenNormal.z()));
	screenPitch = mathcommon::toDegrees(acos(screenNormal.y()));
	glColor3fv(glWhite);

	double tolerance = 0.5; //degrees
	bool mirrorYawOK = abs(mirrorYaw - 45) <= tolerance;
	bool screenYawOK = abs(screenYaw - 90) <= tolerance;
	bool mirrorPitchOK = abs(mirrorPitch-90) <= tolerance;
	bool screenPitchOK = abs(screenPitch-90) <= tolerance;


	GLText text;
	text.init(SCREEN_WIDTH,SCREEN_HEIGHT,glWhite,GLUT_BITMAP_HELVETICA_18);
	text.enterTextInputMode();
	switch( calibrationPhase )
	{
	case 0:
		text.draw("==== INSTRUCTIONS ====");
		text.draw("Put the markers on the mirror vertices, try to keep them aligned. Press SPACEBAR to save their coordinates");
		text.draw("Press 2/8 to zoom out/in. Press '+/-' to go forward or backward");
		break;
	case 1:
		{
			text.draw("Now remove the mirror, mirror coordinates are saved to calibrationCoordinates.txt");
			text.draw("Now you must put the markers on the screen");
			text.draw("");
			text.draw("");
			text.draw("Mirror current center= " + util::stringify< Eigen::Matrix<double,1,3> >(mirrorSavedCenter.transpose() ) );
			text.draw("Distance from mirror center to screen center " + util::stringify<double>( (mirrorSavedCenter - screenCenter).norm()	 ));
			text.draw("Projection error of screen normal on mirror center  " + util::stringify<Eigen::Matrix<double,1,3> >( screenProjectionOnMirror.transpose() ) );
		}
		break;
	case 2:
		break;
	}

	text.draw("==== MIRROR INFO ====");
	mirrorPitchOK ? glColor3fv(glGreen) : glColor3fv(glRed);
	text.draw("Mirror Pitch= " + util::stringify<double>(mirrorPitch) );
	mirrorYawOK ? glColor3fv(glGreen) : glColor3fv(glRed);
	text.draw("Mirror Yaw=" + util::stringify<double>(mirrorYaw));
	glColor3fv(glWhite);
	text.draw("==== SCREEN INFO ====");
	screenPitchOK ? glColor3fv(glGreen) : glColor3fv(glRed);
	text.draw("Screen Pitch=" + util::stringify<double>(screenPitch));
	screenYawOK ? glColor3fv(glGreen) : glColor3fv(glRed);
	text.draw("Screen Yaw=" + util::stringify<double>(screenYaw));
	// Here print the relative orientation of screen and mirror
	if ( calibrationPhase==0)
	{   text.draw("Total distance (norm)= " + util::stringify<double>(abs(mirrorCenter.z())+(mirrorCenter - screenCenter).norm()) );
	text.draw("Total distance (only x+z)= " + util::stringify<double>( abs(mirrorCenter.x()-screenCenter.x()) + abs(mirrorCenter.z()) ));
	}
	else
	{
		text.draw("Total distance (norm)= " + util::stringify<double>(abs(mirrorSavedCenter.z())+(mirrorSavedCenter - screenCenter).norm()) );
		text.draw("Total distance (ignore y offset)= " + util::stringify<double>( abs(mirrorSavedCenter.x()-screenCenter.x()) + abs(mirrorSavedCenter.z()) ));
	}
	text.draw("Intersection of planes" + util::stringify<Eigen::Matrix<int,1,3> >(planesIntersectionBase.cast<int>()));
	text.leaveTextInputMode();
}