Exemplo n.º 1
0
void SceneGraphTest::testScalingNodeParents()
{
	NodePointer t1(new ScalingNode(1, 5, 1));
	NodePointer t2(new ScalingNode(4, 1, 1, t1));
	NodePointer n1(new Node(t2));
	Affine3d expected;
	expected.setIdentity();
	AlignedScaling3d expectedScaling(4, 5, 1);
	expected *= expectedScaling;
	Matrix4d actual = n1->getTransform().matrix();
	QCOMPARE(actual, expected.matrix());
}
Exemplo n.º 2
0
void Mesh::push_matrix() const
{
  using namespace igl;
  using namespace Eigen;
  glPushMatrix();

  Affine3d t;
  t.setIdentity();
  t.rotate(rotation);
  glMultMatrixd(t.matrix().data());
  glScaled(scale,scale,scale);
  glTranslated(shift(0),shift(1),shift(2));
}
geometry_msgs::Pose MoveitPlanningInterface::transformEigenAffine3dToPose(Affine3d e) {
	Vector3d Oe;
	Matrix3d Re;
	geometry_msgs::Pose pose;
	Oe = e.translation();
	Re = e.linear();
	
	Quaterniond q(Re); // convert rotation matrix Re to a quaternion, q
	pose.position.x = Oe(0);
	pose.position.y = Oe(1);
	pose.position.z = Oe(2);
	
	pose.orientation.x = q.x();
	pose.orientation.y = q.y();
	pose.orientation.z = q.z();
	pose.orientation.w = q.w();
	
	return pose;
}
Exemplo n.º 4
0
bool pre_draw(igl::opengl::glfw::Viewer & viewer)
{
  using namespace Eigen;
  using namespace std;
  if(viewer.core.is_animating)
  {
    // Interpolate pose and identity
    RotationList anim_pose(pose.size());
    for(int e = 0;e<pose.size();e++)
    {
      anim_pose[e] = pose[e].slerp(anim_t,Quaterniond::Identity());
    }
    // Propagate relative rotations via FK to retrieve absolute transformations
    RotationList vQ;
    vector<Vector3d> vT;
    igl::forward_kinematics(C,BE,P,anim_pose,vQ,vT);
    const int dim = C.cols();
    MatrixXd T(BE.rows()*(dim+1),dim);
    for(int e = 0;e<BE.rows();e++)
    {
      Affine3d a = Affine3d::Identity();
      a.translate(vT[e]);
      a.rotate(vQ[e]);
      T.block(e*(dim+1),0,dim+1,dim) =
        a.matrix().transpose().block(0,0,dim+1,dim);
    }
    // Compute deformation via LBS as matrix multiplication
    U = M*T;

    // Also deform skeleton edges
    MatrixXd CT;
    MatrixXi BET;
    igl::deform_skeleton(C,BE,T,CT,BET);

    viewer.data().set_vertices(U);
    viewer.data().set_edges(CT,BET,sea_green);
    viewer.data().compute_normals();
    anim_t += anim_t_dir;
    anim_t_dir *= (anim_t>=1.0 || anim_t<=0.0?-1.0:1.0);
  }
  return false;
}
/**
* \ingroup Geometry
* \brief Initialize this object with 3 screen coordinates and 3 mirror coordinates.
* Virtual screen coordinates are computed from the inputs given.
* \param _realScreen coordinates of 3 points on the screenCoord
* \param _realMirror coordinates of 3 points that define the mirror plane
**/
void ScreenCoordinatesExtractor::init( const vector<Vector3d> &_realScreen, const vector<Vector3d> &_realMirror )
{  
	if (_realScreen.size() != 3 || _realMirror.size() !=3 )
		throw std::runtime_error("I need exactly 3 points to define a plane!");

   virtualScreenCoordinates.resize(3);
   screenCoordinates.resize(3);
   for (int i=0; i<3; i++)
      screenCoordinates[i]=_realScreen[i];
   Hyperplane<double,3> screenPlane,mirrorPlane;

   screenPlane = Hyperplane<double,3>::Through( _realScreen[0],_realScreen[1],_realScreen[2] );
   mirrorPlane = Hyperplane<double,3>::Through( _realMirror[0],_realMirror[1],_realMirror[2] );

   if ( screenPlane.isApprox(mirrorPlane) )
     throw std::runtime_error("Planes are parallel!");
   
   // Save the planes informations for debug purposes
   mirrorOffset=mirrorPlane.offset();
   screenOffset=screenPlane.offset();
   mirrorNormal=mirrorPlane.normal();
   screenNormal=screenPlane.normal();

   // Correct the two planes for the marker offset (3.3 mm), translating them back along their normal of markerOffset
   if (hasMarkerOffset)
   {  Affine3d transScreen = Affine3d::Identity();
      Affine3d transMirror = Affine3d::Identity();
      transScreen.translate ( screenNormal*markerOffset );
      transMirror.translate ( mirrorNormal*markerOffset );
      screenPlane.transform(transScreen);
      mirrorPlane.transform(transMirror);
      //cerr << "SCREEN\n" << transScreen.matrix() << endl << "MIRROR\n" << transMirror.matrix() << endl;
   }

   for (int i=0; i<3; i++)
   {  Vector3d tmp = mirrorPlane.projection(Vector3d(screenCoordinates[i]));
      virtualScreenCoordinates[i] = Vector3d(tmp)*2 -screenCoordinates[i];

   }
}
Exemplo n.º 6
0
void drawRedDotsPlane()
{   // Draw the stimulus ( red-dots plane )
    glDisable(GL_COLOR_MATERIAL);
    glDisable(GL_BLEND);
    glDisable(GL_LIGHTING);

    // IMPORTANT Reset the previous status of transformation
    objectActiveTransformation.setIdentity();
    objectActiveTransformation.translation() = projPointEyeRight + translationFactor;
    if ((int)factors["Translation"]==-1 || (int)factors["Translation"]==-2 )
        objectActiveTransformation.linear().setIdentity();
    else
        objectActiveTransformation.linear()	= (AngleAxis<double>(eulerAngles.getYaw(), Vector3d::UnitY())*AngleAxis<double>(eulerAngles.getPitch(), Vector3d::UnitX())).toRotationMatrix();

    glPushMatrix();     // PUSH MATRIX
    glLoadIdentity();
    glMultMatrixd(objectActiveTransformation.data());

    Vector3d posAlongLineOfSight = (headEyeCoords.getRigidStart().getFullTransformation().rotation())*(eyeRight-eyeCalibration);

    double argslant = acos( cos(toRadians(factors["Slant"]))*(focalDistance-posAlongLineOfSight.z() )/((focalDistance )));
    instantPlaneSlant = toDegrees(argslant);


    switch ( (int) factors["Tilt"] )
    {
    case 0:
        glRotated( instantPlaneSlant ,0,1,0);
        //objectActiveTransformation*=AngleAxisd( toRadians(-instantPlaneSlant), Vector3d::UnitY() );
        glScaled(1/sin(toRadians( -90-factors["Slant"])),1,1);	//backprojection phase
        break;
    case 90:
        glRotated( -instantPlaneSlant ,1,0,0);
        //objectActiveTransformation*=AngleAxisd( toRadians(-instantPlaneSlant), Vector3d::UnitX() );
        glScaled(1,1/sin(toRadians( -90-factors["Slant"] )),1); //backprojection phase
        break;
    case 180:
        glRotated( -instantPlaneSlant ,0,1,0);
        //objectActiveTransformation*=AngleAxisd( toRadians(-instantPlaneSlant), Vector3d::UnitY() );
        glScaled(1/sin(toRadians( -90-factors["Slant"] )),1,1); //backprojection phase
        break;
    case 270:
        glRotated( instantPlaneSlant ,1,0,0);
        //objectActiveTransformation*=AngleAxisd( toRadians(-instantPlaneSlant), Vector3d::UnitX() );
        glScaled(1,1/sin(toRadians( -90-factors["Slant"] )),1); //backprojection phase
        break;
    }

    glGetDoublev(GL_MODELVIEW_MATRIX,objectActiveTransformation.data());
    BoundChecker stimBoundariesActive(&cam, &redDotsPlane);
    BoundChecker stimBoundariesPassive(&camPassive, &redDotsPlane);

    stimOutside = ( stimBoundariesActive.checkOutside(objectActiveTransformation) || stimBoundariesPassive.checkOutside(objectActiveTransformation));
    stimDrawer.draw();

    glPopMatrix();	// POP MATRIX

}
Exemplo n.º 7
0
void drawRedDotsPlane()
{	
// questo serve per disegnare lo stimolo solo se l'occhio ha passato da destra verso sinistra, facendo così appare nel centro
    glPushAttrib(GL_ALL_ATTRIB_BITS);
    glPointSize(1);
    // Draw the stimulus ( red-dots plane )
    double angle = factors.at("Slant") + deltaT/1000.0*factors.at("OmegaY");
    //angle = mathcommon::toDegrees(headEyeCoords.getYaw());
    glPushMatrix();
    glLoadIdentity();
    glTranslated(0,0,focalDistance);
    switch ( (int)factors.at("Tilt") )
    {
    case 0:
    {
        glRotated( angle ,0,1,0);
    }
    break;
    case 90:
    {
        glRotated( angle,1,0,0);
    }
    break;
    case 180:
    {
        glRotated( angle,0,-1,0);
    }
    break;
    case 270:
    {
        glRotated( angle,-1,0,0);
    }
    break;
    }
    glGetDoublev(GL_MODELVIEW_MATRIX,objectActiveTransformation.data());
    if ( (eyeRight.x()) < centerTolerance )
	stimDrawer.draw();
    glPopMatrix();
    glPopAttrib();
}
Exemplo n.º 8
0
void update(int value)
{
    frameTimer.start();
// Read the experiment from file, if the file is finished exit suddenly
    if ( inputStream.eof() )
    {   exit(0);
    }

    if ( isReading )
    {   // This reads a line (frame) in inputStream
        readline(inputStream, trialNumber,  headCalibration,  trialMode, pointMatrix );
        headEyeCoords.update(pointMatrix.col(0),pointMatrix.col(1),pointMatrix.col(2));
        Affine3d active = headEyeCoords.getRigidStart().getFullTransformation();
        eulerAngles.init( headEyeCoords.getRigidStart().getFullTransformation().rotation() );

        eyeLeft = headEyeCoords.getLeftEye();
        eyeRight= headEyeCoords.getRightEye();
        //cerr << eyeRight.transpose() << endl;
        cyclopeanEye = (eyeLeft+eyeRight)/2.0;

        if ( trialMode == STIMULUSMODE )
            stimulusFrames++;
        if ( trialMode == FIXATIONMODE )
            stimulusFrames=0;

        // Projection of view normal on the focal plane
        Vector3d directionOfSight = (active.rotation()*Vector3d(0,0,-1)).normalized();
        Eigen::ParametrizedLine<double,3> lineOfSightRight = Eigen::ParametrizedLine<double,3>::Through( eyeRight , eyeRight+directionOfSight );

        double lineOfSightRightDistanceToFocalPlane = lineOfSightRight.intersection(focalPlane);

        //double lenghtOnZ = (active*(center-eyeCalibration )+eyeRight).z();
        projPointEyeRight = lineOfSightRightDistanceToFocalPlane *(directionOfSight)+ (eyeRight);
        // second projection the fixation point computed with z non constant but perfectly parallel to projPointEyeRight
        lineOfSightRightDistanceToFocalPlane= (( active.rotation()*(center)) - eyeRight).norm();
        Vector3d secondProjection = lineOfSightRightDistanceToFocalPlane *(directionOfSight)+ (eyeRight);
        projPointEyeRight=secondProjection ;

        // Compute the translation to move the eye in order to avoid share components
        Vector3d posAlongLineOfSight = (headEyeCoords.getRigidStart().getFullTransformation().rotation())*(eyeRight -eyeCalibration);
        // GENERATION OF PASSIVE MODE.
        // HERE WE MOVE THE SCREEN TO FACE THE OBSERVER's EYE
        if ( passiveMode )
        {   initProjectionScreen(0, headEyeCoords.getRigidStart().getFullTransformation()*Translation3d(center));
        }
        else
            initProjectionScreen(focalDistance, Affine3d::Identity());

        if ( trialMode == STIMULUSMODE )
        {
            // IMPORTANT Reset the previous status of transformations
            objectActiveTransformation[0].setIdentity();
            objectActiveTransformation[1].setIdentity();
            // PLANE 0 Transformation QUELLO CHE STA SOTTO
            alpha = atan( eyeRight.x()/abs(projPointEyeRight.z()) );
            if ( overallTilt )
            {
                instantPlaneSlant = alphaMultiplier*alpha+toRadians(-factors.at("DeltaSlant")-factors.at("StillPlaneSlant"));
                AngleAxis<double> aa0( instantPlaneSlant,Vector3d::UnitY());
                objectActiveTransformation[0]*=aa0;
                double planesYOffset = factors.at("PlanesCentersYDistance")*(whichPlaneDrawUp ? 1 : -1);
                objectActiveTransformation[0].translation() = Vector3d(0,planesYOffset,focalDistance);

                // PLANE 1 Transformation QUELLO CHE STA SOPRA
                AngleAxis<double> aa1(-toRadians(factors.at("StillPlaneSlant")),Vector3d::UnitY());
                objectActiveTransformation[1]*=aa1;
                objectActiveTransformation[1].translation() = Vector3d(0,-planesYOffset,focalDistance);
            }
            else
            {
                instantPlaneSlant = alphaMultiplier*alpha+toRadians(factors.at("DeltaSlant")+factors.at("StillPlaneSlant"));
                AngleAxis<double> aa0( instantPlaneSlant,Vector3d::UnitY());
                objectActiveTransformation[0]*=aa0;
                double planesYOffset = factors.at("PlanesCentersYDistance")*(whichPlaneDrawUp ? 1 : -1);
                objectActiveTransformation[0].translation() = Vector3d(0,planesYOffset,focalDistance);

                // PLANE 1 Transformation QUELLO CHE STA SOPRA
                AngleAxis<double> aa1(toRadians(factors.at("StillPlaneSlant")),Vector3d::UnitY());
                objectActiveTransformation[1]*=aa1;
                objectActiveTransformation[1].translation() = Vector3d(0,-planesYOffset,focalDistance);
            }

            objectPassiveTransformation[0] = ( cam.getModelViewMatrix()*objectActiveTransformation[0] );
            objectPassiveTransformation[1] = ( cam.getModelViewMatrix()*objectActiveTransformation[1] );

            //cout << toDegrees(instantPlaneSlant) << endl;

            // **************** COMPUTE THE OPTIC FLOWS **************************
            // 1) Project the points to screen by computing their coordinates on focalPlane in passive (quite complicate, see the specific method)
            // *********** FOR THE MOVING PLANE *************
            vector<Vector3d> projPointsMovingPlane = stimDrawer[0].projectStimulusPoints(objectActiveTransformation[0],headEyeCoords.getRigidStart().getFullTransformation(),cam,focalDistance, screen, eyeCalibration,passiveMode,false);

            // 2) Get the angles formed by stimulus and observer
            // updating with the latest values
            Vector3d oldAlphaMoving = flowsAnglesAlphaMoving,oldBetaMoving=flowsAnglesBetaMoving;
            // alpha is the "pitch" angle, beta is the "yaw" angle
            // Here me must use the points 4,5,8 of the stimulus
            flowsAnglesAlphaMoving(0)  =  ( atan2(projPointsMovingPlane[4].x(), abs(focalDistance) ) );
            flowsAnglesAlphaMoving(1)  =  ( atan2(projPointsMovingPlane[5].x(), abs(focalDistance) ) );
            flowsAnglesAlphaMoving(2)  =  ( atan2(projPointsMovingPlane[8].x(), abs(focalDistance) ) );

            flowsAnglesBetaMoving(0)      =  ( atan2(projPointsMovingPlane[4].y(), abs(focalDistance) ) );
            flowsAnglesBetaMoving(1)      =  ( atan2(projPointsMovingPlane[5].y(), abs(focalDistance) ) );
            flowsAnglesBetaMoving(2)      =  ( atan2(projPointsMovingPlane[8].y(), abs(focalDistance) ) );

            // 3) Fill the matrix of derivatives
            MatrixXd angVelocitiesMoving(6,1);
            angVelocitiesMoving(0) = flowsAnglesAlphaMoving(0)-oldAlphaMoving(0);
            angVelocitiesMoving(1) = flowsAnglesBetaMoving(0)-oldBetaMoving(0);
            angVelocitiesMoving(2) = flowsAnglesAlphaMoving(1)-oldAlphaMoving(1);
            angVelocitiesMoving(3) = flowsAnglesBetaMoving(1)-oldBetaMoving(1);
            angVelocitiesMoving(4) = flowsAnglesAlphaMoving(2)-oldAlphaMoving(2);
            angVelocitiesMoving(5) = flowsAnglesBetaMoving(2)-oldBetaMoving(2);
            angVelocitiesMoving /= ((double)TIMER_MS/(double)1000);

            // 4) Fill the coefficient matrix, to solve the linear system
            MatrixXd coeffMatrixMoving(6,6);
            coeffMatrixMoving <<
                              1, flowsAnglesAlphaMoving(0),   flowsAnglesBetaMoving(0), 0, 0, 0,
                                 0, 0,    0,    1,flowsAnglesAlphaMoving(0),flowsAnglesBetaMoving(0),
                                 1, flowsAnglesAlphaMoving(1),   flowsAnglesBetaMoving(1), 0, 0, 0,
                                 0, 0,    0,    1,flowsAnglesAlphaMoving(1),flowsAnglesBetaMoving(1),
                                 1, flowsAnglesAlphaMoving(2),   flowsAnglesBetaMoving(2), 0, 0, 0,
                                 0, 0,    0,    1,flowsAnglesAlphaMoving(2),flowsAnglesBetaMoving(2)
                                 ;
            // 5) Solve the linear system by robust fullPivHouseholderQR decomposition (see Eigen for details http://eigen.tuxfamily.org/dox/TutorialLinearAlgebra.html )
            MatrixXd velocitiesMoving = coeffMatrixMoving.colPivHouseholderQr().solve(angVelocitiesMoving);
            // 6) Write the output to file flowsFileMoving
            flowsFileMoving << fixed << trialNumber << "\t" <<  //1
                            stimulusFrames << " " <<
                            factors.at("DeltaSlant")<< " " <<
                            factors.at("StillPlaneSlant") << " " <<
                            overallTilt << " " <<
                            projPointsMovingPlane[4].transpose() << " " <<
                            projPointsMovingPlane[5].transpose() << " " <<
                            projPointsMovingPlane[8].transpose() << " " <<
                            angVelocitiesMoving.transpose() << " " <<
                            velocitiesMoving.transpose() << endl;

            // ********************* FLOWS FOR THE STILL PLANE **************
            // Here we must repeat the same things for the still plane
            vector<Vector3d> projPointsStillPlane = stimDrawer[1].projectStimulusPoints(objectActiveTransformation[1],headEyeCoords.getRigidStart().getFullTransformation(),cam,focalDistance, screen, eyeCalibration,passiveMode,false);

            // 2) Get the angles formed by stimulus and observer
            // updating with the latest values
            Vector3d oldAlphaStill = flowsAnglesAlphaStill,oldBetaStill=flowsAnglesBetaStill;
            // alpha is the "pitch" angle, beta is the "yaw" angle
            // Here me must use the points 4,5,8 of the stimulus
            flowsAnglesAlphaStill(0)  =  ( atan2(projPointsStillPlane[4].x(), abs(focalDistance) ) );
            flowsAnglesAlphaStill(1)  =  ( atan2(projPointsStillPlane[5].x(), abs(focalDistance) ) );
            flowsAnglesAlphaStill(2)  =  ( atan2(projPointsStillPlane[8].x(), abs(focalDistance) ) );

            flowsAnglesBetaStill(0)      =  ( atan2(projPointsStillPlane[4].y(), abs(focalDistance) ) );
            flowsAnglesBetaStill(1)      =  ( atan2(projPointsStillPlane[5].y(), abs(focalDistance) ) );
            flowsAnglesBetaStill(2)      =  ( atan2(projPointsStillPlane[8].y(), abs(focalDistance) ) );

            // 3) Fill the matrix of derivatives
            MatrixXd angVelocitiesStill(6,1);
            angVelocitiesStill(0) = flowsAnglesAlphaStill(0)-oldAlphaStill(0);
            angVelocitiesStill(1) = flowsAnglesBetaStill(0)-oldBetaStill(0);
            angVelocitiesStill(2) = flowsAnglesAlphaStill(1)-oldAlphaStill(1);
            angVelocitiesStill(3) = flowsAnglesBetaStill(1)-oldBetaStill(1);
            angVelocitiesStill(4) = flowsAnglesAlphaStill(2)-oldAlphaStill(2);
            angVelocitiesStill(5) = flowsAnglesBetaStill(2)-oldBetaStill(2);
            angVelocitiesStill /= ((double)TIMER_MS/(double)1000);

            // 4) Fill the coefficient matrix, to solve the linear system
            MatrixXd coeffMatrixStill(6,6);
            coeffMatrixStill <<
                             1, flowsAnglesAlphaStill(0),   flowsAnglesBetaStill(0), 0, 0, 0,
                                0, 0,    0,    1,flowsAnglesAlphaStill(0),flowsAnglesBetaStill(0),
                                1, flowsAnglesAlphaStill(1),   flowsAnglesBetaStill(1), 0, 0, 0,
                                0, 0,    0,    1,flowsAnglesAlphaStill(1),flowsAnglesBetaStill(1),
                                1, flowsAnglesAlphaStill(2),   flowsAnglesBetaStill(2), 0, 0, 0,
                                0, 0,    0,    1,flowsAnglesAlphaStill(2),flowsAnglesBetaStill(2)
                                ;
            // 5) Solve the linear system by robust fullPivHouseholderQR decomposition (see Eigen for details http://eigen.tuxfamily.org/dox/TutorialLinearAlgebra.html )
            MatrixXd velocitiesStill = coeffMatrixStill.colPivHouseholderQr().solve(angVelocitiesStill);
            // 6) Write the output to file flowsFileStill
            flowsFileStill << fixed << trialNumber << "\t" <<  // 1
                           stimulusFrames << " " <<	// 2
                           factors.at("DeltaSlant")<< " " << // 3
                           factors.at("StillPlaneSlant") << " " << // 4
                           overallTilt << " " <<
                           projPointsStillPlane[4].transpose() << " " << // 5,6,7
                           projPointsStillPlane[5].transpose() << " " << // 8,9,10
                           projPointsStillPlane[8].transpose() << " " << // 11,12,13
                           angVelocitiesStill.transpose() << " " << // 14, 15, 16, 17, 18, 19
                           velocitiesStill.transpose() << endl;	// 20, 21, 22, 23, 24, 25
            // **************** END OF OPTIC FLOWS COMPUTATION
        }
        /*
        ofstream outputfile;
        outputfile.open("data.dat");
        outputfile << "Subject Name: " << parameters.find("SubjectName") << endl;
        outputfile << "Passive matrix:" << endl << objectPassiveTransformation.matrix() << endl;
        outputfile << "Yaw: " << toDegrees(eulerAngles.getYaw()) << endl <<"Pitch: " << toDegrees(eulerAngles.getPitch()) << endl;
        outputfile << "EyeLeft: " <<  headEyeCoords.getLeftEye().transpose() << endl;
        outputfile << "EyeRight: " << headEyeCoords.getRightEye().transpose() << endl << endl;
        outputfile << "Factors:" << endl;
        for (map<string,double>::iterator iter=factors.begin(); iter!=factors.end(); ++iter)
        {   outputfile << "\t\t" << iter->first << "= " << iter->second << endl;
        }
        */

    }

    if ( trialMode == PROBEMODE )
        isReading=false;

    glutPostRedisplay();
    glutTimerFunc(TIMER_MS, update, 0);
}
Exemplo n.º 9
0
void idle()
{
    if (trialNumber >= maxTotalTrials )
        exit(0);
	double elapsedFrameTime = totalTimer.getTimeIntervalInMilliSec();
    optotrak.updateMarkers(elapsedFrameTime);
    markers = optotrak.getAllMarkers();
    headEyeCoords.update(markers[1],markers[2],markers[3],TIMER_MS);

    allVisiblePatch =  markers[1].isVisible() && markers[2].isVisible()
                       && markers[3].isVisible();
    allVisibleHead = markers[17].isVisible() && markers[18].isVisible() && allVisibleHead;

    eyeLeft = headEyeCoords.getLeftEye().p;
    eyeRight = headEyeCoords.getRightEye().p;
    cyclopeanEye = (eyeLeft+eyeRight)/2.0;

    projPointEyeRight = getEyeProjectionPoint();

    checkBounds(nOscillationsFixation,
                eyeRight.x(),
                trialMode,
                headCalibrationDone,
                minOscTime,
                maxOscTime,
                maxXOscillation,
                translationTimer,
                beepOk,
                tweeter,
                woofer,tweeter);

    if ( trialMode == STIMULUSMODE )
        deltaT+=TIMER_MS;
    else
        deltaT=0;

    if (headCalibrationDone == 3 && trialMode != PROBEMODE )
    {
        // Questo rende conto del fatto che lo stimolo appare solo quando l'occhio è quasi in centro
		int actualTrialMode = trialMode;
		if ( trialMode == STIMULUSMODE && ( eyeRight.x()) > centerTolerance )
			actualTrialMode=FIXATIONMODE;

	markersFile << fixed <<   trialNumber << " " << actualTrialMode << " " ;
        markersFile << fixed << setprecision(3) << eyeRight.transpose() << " " << eyeLeft.transpose() << " " << toDegrees(headEyeCoords.getPitch()) << " " << toDegrees(headEyeCoords.getYaw()) << " " << toDegrees(headEyeCoords.getRoll()) << " " ;
        markersFile <<	fixed << setprecision(0)<<
                    factors["OmegaY"] << " " <<
                    factors["Binocular"] << " " <<
                    factors["Tilt"] << " " <<
                    factors["Slant"] << " " <<
                    totalTimer.getElapsedTimeInMilliSec() << endl;

        //objectPassiveTransformation.setIdentity();
		if ( actualTrialMode == STIMULUSMODE )
		{
        objectPassiveTransformation = getPassiveMatrix();
        matrixFile << setw(6) << left <<
                   trialNumber << " "  ;
        for ( int i=0; i<3; i++)
            matrixFile << objectPassiveTransformation.matrix().row(i) << " " ;
        matrixFile << endl;
		}

		if ( actualTrialMode == STIMULUSMODE )
		{
				vector< Vector3d> projPoints = stimDrawer.projectStimulusPoints(objectActiveTransformation,headEyeCoords.getRigidStart().getFullTransformation(),cam,focalDistance,screen,Vector3d(0,0,0),false,false);

				MatrixXd a1toa6 = stimDrawer.computeOpticFlow(projPoints, focalDistance, elapsedFrameTime/1000);
				flowsFile << trialNumber << " " << a1toa6.transpose() << endl;
		}
		}

    writeContinuosDataFile();

}
Exemplo n.º 10
0
void display()
{
  using namespace igl;
  using namespace std;
  using namespace Eigen;
  const float back[4] = {30.0/255.0,30.0/255.0,50.0/255.0,0};
  glClearColor(back[0],back[1],back[2],0);
  glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

  if(is_animating)
  {
    double t = (get_seconds() - animation_start_time)/ANIMATION_DURATION;
    if(t > 1)
    {
      t = 1;
      is_animating = false;
    }
    Quaterniond q = animation_from_quat.slerp(t,animation_to_quat).normalized();
    auto & camera = s.camera;
    camera.orbit(q.conjugate());
  }

  glEnable(GL_DEPTH_TEST);
  glEnable(GL_NORMALIZE);
  lights();
  push_scene();

  // Draw a nice floor
  glEnable(GL_DEPTH_TEST);
  glPushMatrix();
  const double floor_offset =
    -2./bbd*(V.col(1).maxCoeff()-Vmid(1));
  glTranslated(0,floor_offset,0);
  const float GREY[4] = {0.5,0.5,0.6,1.0};
  const float DARK_GREY[4] = {0.2,0.2,0.3,1.0};
  glPolygonMode(GL_FRONT_AND_BACK,GL_FILL);
  draw_floor(GREY,DARK_GREY);
  glPopMatrix();

  push_object();

  // Set material properties
  glDisable(GL_COLOR_MATERIAL);
  glMaterialfv(GL_FRONT_AND_BACK, GL_AMBIENT,  SILVER_AMBIENT);
  glMaterialfv(GL_FRONT_AND_BACK, GL_DIFFUSE,  SILVER_DIFFUSE  );
  glMaterialfv(GL_FRONT_AND_BACK, GL_SPECULAR, SILVER_SPECULAR);
  glMaterialf (GL_FRONT_AND_BACK, GL_SHININESS, 128);

  typedef std::vector<
    Eigen::Quaterniond,Eigen::aligned_allocator<Eigen::Quaterniond> >
    RotationList;
  RotationList dQ(BE.rows(),Quaterniond::Identity()),vQ;
  vector<Vector3d> vT;
  Matrix3d A = Matrix3d::Identity();
  for(int e = 0;e<BE.rows();e++)
  {
    dQ[e] = AngleAxisd((sin(get_seconds()+e))*0.06*PI,A.col(e%3));
  }
  forward_kinematics(C,BE,P,dQ,vQ,vT);
  const int dim = C.cols();
  MatrixXd T(BE.rows()*(dim+1),dim);
  for(int e = 0;e<BE.rows();e++)
  {
    Affine3d a = Affine3d::Identity();
    a.translate(vT[e]);
    a.rotate(vQ[e]);
    T.block(e*(dim+1),0,dim+1,dim) =
      a.matrix().transpose().block(0,0,dim+1,dim);
  }

  if(wireframe)
  {
    glPolygonMode(GL_FRONT_AND_BACK,GL_LINE);
  }
  glLineWidth(1.0);
  MatrixXd U = M*T;
  per_face_normals(U,F,N);
  draw_mesh(U,F,N);
  glPolygonMode(GL_FRONT_AND_BACK,GL_FILL);

  if(skeleton_on_top)
  {
    glDisable(GL_DEPTH_TEST);
  }

  switch(skel_style)
  {
    default:
    case SKEL_STYLE_TYPE_3D:
      draw_skeleton_3d(C,BE,T,MAYA_VIOLET,bbd*0.5);
      break;
    case SKEL_STYLE_TYPE_VECTOR_GRAPHICS:
      draw_skeleton_vector_graphics(C,BE,T);
      break;
  }

  pop_object();

  pop_scene();

  report_gl_error();

  TwDraw();
  glutSwapBuffers();
  glutPostRedisplay();
}
Exemplo n.º 11
0
void update(int value)
{
    // Conta i cicli di presentazione dello stimolo
    if ( (sumOutside > str2num<int>(parameters.find("StimulusCycles")) ) &&  (trialMode == STIMULUSMODE) )
    {
        sumOutside=0;
        trialMode++;
        trialMode=trialMode%4;
    }

    if (conditionInside && (sumOutside*2 > str2num<int>(parameters.find("FixationCycles"))) && (trialMode ==FIXATIONMODE )  )
    {
        sumOutside=0;
        trialMode++;
        trialMode=trialMode%4;
        stimulusDuration.start();
    }
    if ( trialMode == STIMULUSMODE )
        stimulusFrames++;
    if ( trialMode == FIXATIONMODE )
        stimulusFrames=0;

    Screen screenPassive;

    screenPassive.setWidthHeight(SCREEN_WIDE_SIZE, SCREEN_WIDE_SIZE*SCREEN_HEIGHT/SCREEN_WIDTH);
    screenPassive.setOffset(alignmentX,alignmentY);
    screenPassive.setFocalDistance(0);
    screenPassive.transform(headEyeCoords.getRigidStart().getFullTransformation()*Translation3d(center));

    camPassive.init(screenPassive);
    camPassive.setDrySimulation(true);
    camPassive.setEye(eyeRight);
    objectPassiveTransformation = ( camPassive.getModelViewMatrix()*objectActiveTransformation );
    // Coordinates picker
    markers = optotrak.getAllPoints();
    if ( isVisible(markers[1]) && isVisible(markers[2]) && isVisible(markers[3]) )
        headEyeCoords.update(markers[1],markers[2],markers[3]);
    Affine3d active = headEyeCoords.getRigidStart().getFullTransformation();

    eulerAngles.init( headEyeCoords.getRigidStart().getFullTransformation().rotation() );

    eyeLeft = headEyeCoords.getLeftEye();
    eyeRight = headEyeCoords.getRightEye();

    cyclopeanEye = (eyeLeft+eyeRight)/2.0;

    // Projection of view normal on the focal plane
    Vector3d directionOfSight = (active.rotation()*Vector3d(0,0,-1)).normalized();
    Eigen::ParametrizedLine<double,3> lineOfSightRight = Eigen::ParametrizedLine<double,3>::Through( eyeRight , eyeRight+directionOfSight );
    Eigen::ParametrizedLine<double,3> lineOfSightLeft  = Eigen::ParametrizedLine<double,3>::Through( eyeLeft, eyeLeft+directionOfSight );

    double lineOfSightRightDistanceToFocalPlane = lineOfSightRight.intersection(focalPlane);
    double lineOfSightLeftDistanceToFocalPlane = lineOfSightLeft.intersection(focalPlane);

    //double lenghtOnZ = (active*(center-eyeCalibration )+eyeRight).z();
    projPointEyeRight = lineOfSightRightDistanceToFocalPlane *(directionOfSight)+ (eyeRight);
    projPointEyeLeft= lineOfSightLeftDistanceToFocalPlane * (directionOfSight) + (eyeLeft);
    // second projection the fixation point computed with z non constant but perfectly parallel to projPointEyeRight
    lineOfSightRightDistanceToFocalPlane= (( active.rotation()*(center)) - eyeRight).norm();
    Vector3d secondProjection = lineOfSightRightDistanceToFocalPlane *(directionOfSight)+ (eyeRight);

    if ( !zOnFocalPlane )
        projPointEyeRight=secondProjection ;

    // Compute the translation to move the eye in order to avoid shear components
    Vector3d posAlongLineOfSight = (headEyeCoords.getRigidStart().getFullTransformation().rotation())*(eyeRight -eyeCalibration);

    switch ( (int)factors["Translation"] )
    {
    case -1:
    case -2:
        translationFactor.setZero();
        if ( trialMode == STIMULUSMODE )
            projPointEyeRight=center;
        break;
    case 0:
        translationFactor.setZero();
        break;
    case 1:
        translationFactor = factors["TranslationConstant"]*Vector3d(posAlongLineOfSight.z(),0,0);
        break;
    case 2:
        translationFactor = factors["TranslationConstant"]*Vector3d(0,posAlongLineOfSight.z(),0);
        break;
    }
    if ( passiveMode )
        initProjectionScreen(0,headEyeCoords.getRigidStart().getFullTransformation()*Translation3d(Vector3d(0,0,focalDistance)));
    else
        initProjectionScreen(focalDistance,Affine3d::Identity());

    checkBounds();
    /**** Save to file part ****/
    // Markers file save the used markers and time-depending experimental variable to a file
    // (Make sure that in passive experiment the list of variables has the same order)
    markersFile << trialNumber << " " << headCalibrationDone << " " << trialMode << " " ;
    markersFile <<markers[1].transpose() << " " << markers[2].transpose() << " " << markers[3].transpose() << " " << markers[17].transpose() << " " << markers[18].transpose() << " " ;

    markersFile <<	factors["Tilt"] << " " <<
                factors["Slant"] << " " <<
                factors["Translation"] << " " <<
                factors["Onset"] << " " <<
                factors["TranslationConstant"] <<
                endl;

    ofstream outputfile;
    outputfile.open("data.dat");
    outputfile << "Subject Name: " << parameters.find("SubjectName") << endl;
    outputfile << "Passive matrix:" << endl << objectPassiveTransformation.matrix() << endl;
    outputfile << "Yaw: " << toDegrees(eulerAngles.getYaw()) << endl <<"Pitch: " << toDegrees(eulerAngles.getPitch()) << endl;
    outputfile << "EyeLeft: " <<  headEyeCoords.getLeftEye().transpose() << endl;
    outputfile << "EyeRight: " << headEyeCoords.getRightEye().transpose() << endl << endl;
    outputfile << "Slant: " << instantPlaneSlant << endl;
    outputfile << "(Width,Height) [px]: " << getPlaneDimensions().transpose() << " " << endl;
    outputfile << "Factors:" << endl;
    for (map<string,double>::iterator iter=factors.begin(); iter!=factors.end(); ++iter)
    {
        outputfile << "\t\t" << iter->first << "= " << iter->second << endl;
    }
    outputfile << "Trial remaining: " << trial.getRemainingTrials()+1 << endl;
    outputfile << "Last response: " << probeAngle << endl;
    // Here we save plane projected width and height


    // now rewind the file
    outputfile.clear();
    outputfile.seekp(0,ios::beg);

    // Write down frame by frame the trajectories and angles of eyes and head
    if ( trialMode == STIMULUSMODE && headCalibrationDone > 2 )
    {
        trajFile << setw(6) << left <<
                 trialNumber << " " <<
                 stimulusFrames << " " <<
                 eyeRight.transpose() << endl;

        anglesFile << setw(6) << left <<
                   trialNumber << " " <<
                   stimulusFrames << " " <<
                   toDegrees(eulerAngles.getPitch()) << " " <<
                   toDegrees(eulerAngles.getRoll()) << " " <<
                   toDegrees(eulerAngles.getYaw()) << " " <<
                   instantPlaneSlant << endl;

        matrixFile << setw(6) << left <<
                   trialNumber << " " <<
                   stimulusFrames << " " ;
        for (int i=0; i<3; i++)
            matrixFile << objectPassiveTransformation.matrix().row(i) << " " ;
        matrixFile << endl;

        // Write the 13 special extremal points on stimFile
        stimFile << setw(6) << left <<
                 trialNumber << " " <<
                 stimulusFrames << " " ;
        double winx=0,winy=0,winz=0;

        for (PointsRandIterator iRand = redDotsPlane.specialPointsRand.begin(); iRand!=redDotsPlane.specialPointsRand.end(); ++iRand)
        {   Point3D *p=(*iRand);
            Vector3d v = objectActiveTransformation*Vector3d( p->x, p->y, p->z);

            gluProject(v.x(),v.y(),v.z(), (&cam)->getModelViewMatrix().data(), (&cam)->getProjectiveMatrix().data(), (&cam)->getViewport().data(), &winx,&winy,&winz);
            stimFile << winx << " " << winy << " " << winz << " ";
        }
        stimFile << endl;
    }

    glutPostRedisplay();
    glutTimerFunc(TIMER_MS, update, 0);
}
Exemplo n.º 12
0
    void trace_rays0(MData *m, SurfaceList *surflist, int nsurfs, Ray *R, int nray, int rsize, char *xray)
    {

	int *traversed = new int[nray];
	Ray *ray;

	//fprintf(stderr, "Ray0 %ld %p %d %d\n", R, R, nray, rsize);
	//fflush(stderr);

	//return;

	for ( int h = 0; h < nsurfs; h++ ) {

	    Surface *surf = surflist[h].surf;
	    int     nsurf = surflist[h].nsurf;
	    int      once = surflist[h].type;
	    int		j;

	    if ( once ) { for ( int j = 0; j < nray; j++ ) { traversed[j] = 0; } }

	    for ( int i = 0; i < nsurf; i++ ) {
		if ( isinf(surf[i].p[Px_thickness]) ) { continue; }
		if (      !surf[i].enable           ) { continue; } 

		Affine3d txforward = Affine3d::Identity();
		Affine3d txreverse  ;
		Affine3d rtforward  ;
		Affine3d rtreverse  ;

		txforward   = 						// Ray position transform
			    Affine3d::Identity()
			    * Translation3d(0.0, 0.0,  m->z)
				* AngleAxisd(d2r( surf[i].p[Px_rz]), Vector3d(0.0, 0.0, -1.0))
				* AngleAxisd(d2r(-surf[i].p[Px_ry]), Vector3d(0.0, 1.0,  0.0))
				* AngleAxisd(d2r(-surf[i].p[Px_rx]), Vector3d(1.0, 0.0,  0.0))
			    * Translation3d(0.0, 0.0, -m->z)
			    * Translation3d(-surf[i].p[Px_px], -surf[i].p[Px_py], -surf[i].p[Px_pz])
			;

		rtforward   =						// Ray direction transform
			    Affine3d::Identity()
				* AngleAxisd(d2r( surf[i].p[Px_rz]), Vector3d(0.0, 0.0, -1.0))
				* AngleAxisd(d2r(-surf[i].p[Px_ry]), Vector3d(0.0, 1.0,  0.0))
				* AngleAxisd(d2r(-surf[i].p[Px_rx]), Vector3d(1.0, 0.0,  0.0))
			;

		txreverse 	= txforward.inverse();
		rtreverse 	= rtforward.inverse();

		//printf("Surface %s %d %d: %f %f %f	%d %ld\n", surf[i].type, h, i, -surf[i].p[Px_px], -surf[i].p[Px_py], -surf[i].p[Px_pz], once, surf[i].traverse);

		for ( j = 0, ray = R; j < nray; j++, ray = (Ray *) (((char *) ray) + rsize) ) {
		    Vector3d saveP = ray->p;
		    Vector3d saveK = ray->k;

			//printf("Ray  ");
			//prays(ray, 1);

		    if ( ray->vignetted ) { continue; }

		    ray->p = txforward * ray->p;		// Put the ray into the surface cs.
		    ray->k = rtforward * ray->k;

			//printf("Conv ");
			//prays(ray, 1);

		    ray->vignetted = surf[i].traverse(m, &surf[i], ray);

			//printf("Trav ");
			//prays(ray, 1);

		    if ( ray->vignetted == 2 ) {		// Coordbreak returns 2
			ray->vignetted = 0;

			if ( xray ) {
			    memcpy(xray, ray, rsize);
			    xray += rsize;
			}
			continue; 
		    }

		    if ( ray->vignetted || (!ray->vignetted && aper_clip(&surf[i], ray)) ) {
		        ray->vignetted = i ? i : -1;
		    }

		    if ( once ) {
			//printf("Here ");
			//prays(ray, 1);

			  if ( !ray->vignetted ) {		// If the ray was not vignetted it has traversed this surface.
			//printf("Trav ");
			//prays(ray, 1);
			      traversed[j] = 1;			// Don't try this ray again
			      ray->vignetted = 1;
			  } else {
			//printf("Ving ");
			//prays(&ray[j], 1);
			      ray->p = saveP;			// Reset
			      ray->k = saveK;

			      ray->vignetted = 0;		// Try again on next surface

			      continue;
			  }
		    }

		    ray->p = txreverse * ray->p;		// Put the ray back into global cs..
		    ray->k = rtreverse * ray->k;

		    if ( xray ) {
			memcpy(xray, ray, rsize);
			xray += rsize;
		    }

			//printf("Next ");
			//prays(ray, 1);
		}

		if ( !once ) {
		    m->indicies  = surf[i].indicies[0] > 0.0 ? surf[i].indicies: m->indicies;
		    m->z += surf[i].p[Px_thickness];
		}
	    }
			//printf("Done ");
			//prays(ray, 1);

	    if ( once ) {
		for ( j = 0, ray = R; j < nray; j++, ray = (Ray *) (((char *) ray) + rsize) ) {	// Rays that have not traversed are vignetted.
		    ray->vignetted = !traversed[j];
		}
		m->indicies  = surf[0].indicies[0] > 0.0 ? surf[0].indicies: m->indicies;
		m->z += surf[0].p[Px_thickness];
	    }
	}
	delete [] traversed;
    }
void update(int value)
{   // Read the experiment from file, if the file is finished exit suddenly
    if ( inputStream.eof() )
    {   cleanup();
        exit(0);
    }

    if ( isReading )
    {   // This reads a line (frame) in inputStream
        readline(inputStream, trialNumber,  headCalibration,  trialMode, pointMatrix );

        headEyeCoords.update(pointMatrix.col(0),pointMatrix.col(1),pointMatrix.col(2));
        Affine3d active = headEyeCoords.getRigidStart().getFullTransformation();
        eulerAngles.init( headEyeCoords.getRigidStart().getFullTransformation().rotation() );

        eyeLeft = headEyeCoords.getLeftEye();
        eyeRight= headEyeCoords.getRightEye();

        cyclopeanEye = (eyeLeft+eyeRight)/2.0;

		if ( trialMode == STIMULUSMODE )
			stimulusFrames++;
		if ( trialMode == FIXATIONMODE )
			stimulusFrames=0;

        // Projection of view normal on the focal plane
	Vector3d directionOfSight = (active.rotation()*Vector3d(0,0,-1)).normalized();
	Eigen::ParametrizedLine<double,3> lineOfSightRight = Eigen::ParametrizedLine<double,3>::Through( eyeRight , eyeRight+directionOfSight );
	Eigen::ParametrizedLine<double,3> lineOfSightLeft  = Eigen::ParametrizedLine<double,3>::Through( eyeLeft, eyeLeft+directionOfSight );
	
	double lineOfSightRightDistanceToFocalPlane = lineOfSightRight.intersection(focalPlane);
	double lineOfSightLeftDistanceToFocalPlane = lineOfSightLeft.intersection(focalPlane);
	
	//double lenghtOnZ = (active*(center-eyeCalibration )+eyeRight).z();
	projPointEyeRight = lineOfSightRightDistanceToFocalPlane *(directionOfSight)+ (eyeRight);
	projPointEyeLeft= lineOfSightLeftDistanceToFocalPlane * (directionOfSight) + (eyeLeft);
	// second projection the fixation point computed with z non constant but perfectly parallel to projPointEyeRight
	lineOfSightRightDistanceToFocalPlane= (( active.rotation()*(center)) - eyeRight).norm();
	Vector3d secondProjection = lineOfSightRightDistanceToFocalPlane *(directionOfSight)+ (eyeRight);
	
	if ( !zOnFocalPlane )
	projPointEyeRight=secondProjection ;

	// Compute the translation to move the eye in order to avoid share components
	Vector3d posAlongLineOfSight = (headEyeCoords.getRigidStart().getFullTransformation().rotation())*(eyeRight -eyeCalibration);
	// GENERATION OF PASSIVE MODE.
        // HERE WE MOVE THE SCREEN TO FACE THE OBSERVER's EYE
        if ( passiveMode )
        {
            initProjectionScreen(0, headEyeCoords.getRigidStart().getFullTransformation()*Translation3d(center));
        }
        else
            initProjectionScreen(focalDistance, Affine3d::Identity());
        
	objectPassiveTransformation = ( cam.getModelViewMatrix()*objectActiveTransformation );
    
	ofstream outputfile;
	outputfile.open("data.dat");
	outputfile << "Subject Name: " << parameters.find("SubjectName") << endl;
	outputfile << "Passive matrix:" << endl << objectPassiveTransformation.matrix() << endl;
	outputfile << "Yaw: " << toDegrees(eulerAngles.getYaw()) << endl <<"Pitch: " << toDegrees(eulerAngles.getPitch()) << endl;
	outputfile << "EyeLeft: " <<  headEyeCoords.getLeftEye().transpose() << endl;
	outputfile << "EyeRight: " << headEyeCoords.getRightEye().transpose() << endl << endl;
	outputfile << "Slant: " << instantPlaneSlant << endl;
	outputfile << "Factors:" << endl;
	for (map<string,double>::iterator iter=factors.begin(); iter!=factors.end(); ++iter)
	{
		outputfile << "\t\t" << iter->first << "= " << iter->second << endl;
	}
	
	}

    if ( trialMode == PROBEMODE )
        isReading=false;

    glutPostRedisplay();
    glutTimerFunc(TIMER_MS, update, 0);
}
void drawFixation()
{
	switch ( headCalibrationDone )
	{   
	case 1:
		// Fixed stimulus
		glColor3fv(glWhite);
		glDisable(GL_BLEND);
		glPointSize(5);
		glBegin(GL_POINTS);
		glVertex3d(0,0,focalDistance);
		glEnd();
		glPointSize(1);
		break;
	case 2:
		// Fixed stimulus + projected points
		glColor3fv(glWhite);
		glDisable(GL_BLEND);
		glPointSize(5);
		glBegin(GL_POINTS);
		glVertex3d(0,0,focalDistance);
		glColor3fv(glRed);
		glVertex3dv(projPointEyeRight.data());
		glColor3fv(glBlue);
		glVertex3d(eyeRight.x(),eyeRight.y(),focalDistance);
		glEnd();
		glPointSize(1);
	
		// Draw the calibration circle
		glColor3fv(glWhite);
		break;
	
	case 3:
	{
	// DRAW THE FIXATION POINT 
	double eyeToCenterAngleX= toDegrees(atan(eyeRight.x()/(-focalDistance-eyeRight.z()) ));
	double eyeToCenterAngleY= toDegrees(atan(eyeRight.y()/(-focalDistance-eyeRight.z()) ));
	double projPointAngleX = toDegrees( atan( (projPointEyeRight.x()-eyeRight.x())/abs(projPointEyeRight.z())));
	double maxAllowedTranslationYaw = str2num<double>(parameters.find("MaxAllowedTranslationYaw"));


	Vector3d stimulusCenter(0,0,0);
	Matrix3d objrotation = Matrix3d::Identity();
	// IMPORTANT Reset the previous status of transformation
	objectActiveTransformation.setIdentity();

	switch ( (int) factors["Rotation"] )
	{
	case 2:
	{
		objrotation = (AngleAxis<double>(eulerAngles.getYaw()*factors["FollowingSpeed"], Vector3d::UnitY())
		*AngleAxis<double>(eulerAngles.getPitch(), Vector3d::UnitX())).toRotationMatrix();
		instantPlaneSlant = toDegrees(eulerAngles.getYaw())*factors["RotationSpeed"]+factors["Slant"];
		stimulusCenter = objrotation*Vector3d(0,0,focalDistance)+headEyeCoords.getRigidStart().getFullTransformation().translation();
		objectActiveTransformation.linear()=objrotation;
	}
	break;
	case 1:
	{
		objrotation = (AngleAxis<double>(eulerAngles.getYaw(), Vector3d::UnitY())
		*AngleAxis<double>(eulerAngles.getPitch()*factors["FollowingSpeed"], Vector3d::UnitX())).toRotationMatrix();
		instantPlaneSlant = toDegrees(eulerAngles.getPitch())*factors["RotationSpeed"]+factors["Slant"];
		stimulusCenter = objrotation*Vector3d(0,0,focalDistance)+headEyeCoords.getRigidStart().getFullTransformation().translation();
		objectActiveTransformation.linear()=objrotation;
	}
	break;
	case 0:
	{
		objrotation = (AngleAxis<double>(eulerAngles.getYaw(), Vector3d::UnitY())
		*AngleAxis<double>(eulerAngles.getPitch(), Vector3d::UnitX())).toRotationMatrix();
		//instantPlaneSlant = eyeRight.x()*factors["RotationSpeed"]/10+factors["Slant"];
		instantPlaneSlant = toDegrees( atan(eyeRight.x()/abs(focalDistance+eyeRight.z()) ) )*factors["RotationSpeed"]+factors["Slant"];
		stimulusCenter  = headEyeCoords.getRigidStart().getFullTransformation().linear()*Vector3d(eyeRight.x()*factors["FollowingSpeed"],eyeRight.y(),eyeRight.z()+focalDistance);
		objectActiveTransformation.linear() = objrotation;
	}
	break;
	}
	objectActiveTransformation.translation() = stimulusCenter;
	Vector3d fixationPointTmp = objectActiveTransformation.translation();

	glPushMatrix();
	glTranslated(fixationPointTmp.x(),fixationPointTmp.y(),fixationPointTmp.z());
	glutSolidSphere(1,10,10);
	glPopMatrix();
	break;	
	}
}
}
void drawRedDotsPlane()
{   // Draw the stimulus ( red-dots plane )
	glDisable(GL_COLOR_MATERIAL);
	glDisable(GL_BLEND);
	glDisable(GL_LIGHTING);

	Matrix3d objrotation ;
	Vector3d stimulusCenter;
    
	double instantPlaneSlant=0; 
	
	// IMPORTANT Reset the previous status of transformation
	objectActiveTransformation.setIdentity();

	switch ( (int) factors["Rotation"] )
	{
	case 2:
	{
		objrotation = (AngleAxis<double>(eulerAngles.getYaw()*factors["FollowingSpeed"], Vector3d::UnitY())
		*AngleAxis<double>(eulerAngles.getPitch(), Vector3d::UnitX())).toRotationMatrix();
		instantPlaneSlant = toDegrees(eulerAngles.getYaw())*factors["RotationSpeed"]+factors["Slant"];
		stimulusCenter = objrotation*Vector3d(0,0,focalDistance)+headEyeCoords.getRigidStart().getFullTransformation().translation();
		objectActiveTransformation.linear()=objrotation;
	}
	break;
	case 1:
	{
		objrotation = (AngleAxis<double>(eulerAngles.getYaw(), Vector3d::UnitY())
		*AngleAxis<double>(eulerAngles.getPitch()*factors["FollowingSpeed"], Vector3d::UnitX())).toRotationMatrix();
		instantPlaneSlant = toDegrees(eulerAngles.getPitch())*factors["RotationSpeed"]+factors["Slant"];
		stimulusCenter = objrotation*Vector3d(0,0,focalDistance)+headEyeCoords.getRigidStart().getFullTransformation().translation();
		objectActiveTransformation.linear()=objrotation;
	}
	break;
	case 0:
	{
		objrotation = (AngleAxis<double>(eulerAngles.getYaw(), Vector3d::UnitY())
		*AngleAxis<double>(eulerAngles.getPitch(), Vector3d::UnitX())).toRotationMatrix();
		//instantPlaneSlant = eyeRight.x()*factors["RotationSpeed"]/10+factors["Slant"];
		instantPlaneSlant = toDegrees( atan(eyeRight.x()/abs(focalDistance+eyeRight.z()) ) )+factors["Slant"];
		stimulusCenter  = headEyeCoords.getRigidStart().getFullTransformation().linear()*Vector3d(eyeRight.x()*factors["FollowingSpeed"],eyeRight.y(),eyeRight.z()+focalDistance);
		objectActiveTransformation.linear() = objrotation;
	}
	break;

	}
	objectActiveTransformation.translation() = stimulusCenter;
	
	//cerr << instantPlaneSlant << endl;

	glPushMatrix();     // PUSH MATRIX
	glLoadIdentity();
	glMultMatrixd(objectActiveTransformation.data());
	
	switch ( (int) factors["Tilt"] )
    {
    case 0:
        glRotated( instantPlaneSlant ,0,1,0);
        //objectActiveTransformation*=AngleAxisd( toRadians(-instantPlaneSlant), Vector3d::UnitY() );
        glScaled(1/sin(toRadians( -90-factors["Slant"])),1,1);	//backprojection phase
        break;
    case 90:
        glRotated( -instantPlaneSlant ,1,0,0);
        //objectActiveTransformation*=AngleAxisd( toRadians(-instantPlaneSlant), Vector3d::UnitX() );
        glScaled(1,1/sin(toRadians( -90-factors["Slant"] )),1); //backprojection phase
        break;
    case 180:
        glRotated( -instantPlaneSlant ,0,1,0);
        //objectActiveTransformation*=AngleAxisd( toRadians(-instantPlaneSlant), Vector3d::UnitY() );
        glScaled(1/sin(toRadians( -90-factors["Slant"] )),1,1); //backprojection phase
        break;
    case 270:
        glRotated( instantPlaneSlant ,1,0,0);
        //objectActiveTransformation*=AngleAxisd( toRadians(-instantPlaneSlant), Vector3d::UnitX() );
        glScaled(1,1/sin(toRadians( -90-factors["Slant"] )),1); //backprojection phase
        break;
    }
	stimDrawer.draw();

	glPopMatrix();	// POP MATRIX

}
Exemplo n.º 16
0
 const base::Affine3d getTransform() const
 {
     Affine3d trans (this->orientation);
     trans.translation() = this->translation;
     return trans;
 }