Beispiel #1
0
void initScreen()
{
    screen.setWidthHeight(SCREEN_WIDE_SIZE, SCREEN_WIDE_SIZE*SCREEN_HEIGHT/SCREEN_WIDTH);
    screen.setFocalDistance(focalDistance);
    cam.init(screen);
    cam.setNearFarPlanes(0.2,1000);
}
Beispiel #2
0
void initProjectionScreen(double _focalDist, const Affine3d &_transformation)
{   screen.setWidthHeight(SCREEN_WIDE_SIZE, SCREEN_WIDE_SIZE*SCREEN_HEIGHT/SCREEN_WIDTH);
    screen.setOffset(alignmentX,alignmentY);
    screen.setFocalDistance(_focalDist);
    screen.transform(_transformation);
    cam.init(screen);
}
void initScreen()
{
    screen.setWidthHeight(SCREEN_WIDE_SIZE, SCREEN_WIDE_SIZE*SCREEN_HEIGHT/SCREEN_WIDTH);
    screen.setOffset(alignmentX,alignmentY);
    screen.setFocalDistance(focalDistance);

    cam.init(screen);
}
Beispiel #4
0
/**
 * @brief initializeExperiment
 */
void initializeExperiment()
{
    // MUST BE CALLED WITHIN A VALID OPENGL CONTEXT
    //Screen screen(SCREEN_WIDE_SIZE, SCREEN_WIDE_SIZE*SCREEN_HEIGHT/SCREEN_WIDTH, alignmentX, alignmentY, focalDistance );
	screen.init(SCREEN_WIDE_SIZE, SCREEN_WIDE_SIZE*SCREEN_HEIGHT/SCREEN_WIDTH, alignmentX, alignmentY, focalDistance );
	screen.setOffset(alignmentX,alignmentY);
    screen.setFocalDistance(focalDistance);
    cam.init(screen);

    // Initialize all the streams for file output eccetera
	parameters.loadParameterFile("C:/cncsvisiondata/parametersFiles/Fulvio/expMicroHeadMovements/parametersExpMicroHeadMovementsActive.txt");
	// Initialize focal distance and interocular distance from parameter file
    focalDistance = parameters.get("FocalDistance");
    interocularDistance = parameters.get("IOD");
    infoDraw = (int)parameters.get("DrawInfo");
	useCircularOcclusor = (int)parameters.get("UseCircularOcclusor");
    // Initialize trials balance factors
    trial.init(parameters);
	trial.next();
    // Base directory, full path
    string baseDir = parameters.find("BaseDir");
    // Subject name
    string subjectName = parameters.find("SubjectName");
    // Principal streams file

	string markersFileName = baseDir + "markersFile_MicroHeadMovementsActive_" + subjectName  + ".txt";
    string outputFileName  = baseDir + "responseFile_MicroHeadMovementsActive_" + subjectName +".txt";
    string timingFileName  = baseDir + "timingFile_MicroHeadMovementsActive_" + subjectName   + ".txt";
    // Check for output file existence
    if ( !util::fileExists((outputFileName)) )
        responseFile.open(outputFileName.c_str());
    cerr << "File " << outputFileName << " loaded successfully" << endl;

	// Check for output markers file existence
    if ( !util::fileExists((markersFileName)) )
        markersFile.open(markersFileName.c_str());
   cerr << "File " << markersFileName << " loaded successfully" << endl;

	// Check for output timing file existence
    if ( !util::fileExists((timingFileName)) )
        timingFile.open(( timingFileName ).c_str());
    cerr << "File " << timingFileName << " opened successfully" << endl;

	// Write the response file header
    responseFile << "# SubjectName\tTrialNumber\tFocalDistance\tKeyPressed\tResponseTime\tfZWidth\tfSlant\tfTilt\tfStimAnchored" << endl;

  // Finished loading parameter files

    // Update the stimulus
    updateStimulus();

    trialTimer.start();
    globalTimer.start();
}
Beispiel #5
0
// move monitor to specific distance
void initProjectionScreen(double _focalDist, const Affine3d &_transformation, bool synchronous)
{
	focalDistance = _focalDist;	
	screen.setWidthHeight(SCREEN_WIDE_SIZE, SCREEN_WIDE_SIZE*SCREEN_HEIGHT/SCREEN_WIDTH);
	screen.setOffset(alignmentX,alignmentY);
	screen.setFocalDistance(_focalDist);
	screen.transform(_transformation);
	cam.init(screen);
	if ( synchronous )
		moveScreenAbsolute(_focalDist,homeFocalDistance,4500);
	else
		moveScreenAbsoluteAsynchronous(_focalDist,homeFocalDistance,4500);
}
Affine3d getPassiveMatrix()
{
    Screen screenPassive;
    screenPassive.setWidthHeight(SCREEN_WIDE_SIZE, SCREEN_WIDE_SIZE*SCREEN_HEIGHT/SCREEN_WIDTH);
    screenPassive.setOffset(alignmentX,alignmentY);
    screenPassive.setFocalDistance(0);
    screenPassive.transform(headEyeCoords.getRigidStart().getFullTransformation()*Translation3d(Vector3d(0,0,focalDistance)));
    VRCamera camPassive;
    camPassive.init(screenPassive);
    camPassive.setDrySimulation(true);
    camPassive.setEye(eyeRight);
    return ( camPassive.getModelViewMatrix()*objectActiveTransformation );
}
Beispiel #7
0
void initRendering()
{
    glEnable(GL_DEPTH_TEST);
    glBlendFunc (GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
    glEnable (GL_BLEND);
    glEnable(GL_LIGHTING);
    glEnable(GL_LIGHT0);
    glEnable(GL_NORMALIZE);
    glEnable(GL_COLOR_MATERIAL);
    glClearColor(0.0,0.0,0.0,1.0);
    glMatrixMode(GL_MODELVIEW);
    glLoadIdentity();
    cam.init(Screen(20,20,0,0,focalDistance));
    cam.setNearFarPlanes(1.0,200.0);
    cam.setOrthoGraphicProjection(true);
}
Beispiel #8
0
int main(int argc, char*argv[])
{

    optotrak.setTranslation(calibration);
    if ( optotrak.init(LastAlignedFile) != 0)
    {   cleanup();
        exit(0);
    }

    screen.setWidthHeight(SCREEN_WIDE_SIZE, SCREEN_WIDE_SIZE*SCREEN_HEIGHT/SCREEN_WIDTH);
    screen.setOffset(alignmentX,alignmentY);
    screen.setFocalDistance(focalDistance);
    cam.init(screen);

    recordHeadEyeRelativePositions();

    glutInit(&argc, argv);
    if (stereo)
        glutInitDisplayMode(GLUT_DOUBLE | GLUT_RGB | GLUT_DEPTH | GLUT_STEREO);
    else
        glutInitDisplayMode( GLUT_DOUBLE | GLUT_RGBA | GLUT_DEPTH);

    if ( gameMode )
    {
        glutGameModeString(ROVERETO_GAME_MODE_STRING);
        glutEnterGameMode();
    }
    else
    {
        glutInitWindowSize(SCREEN_WIDTH, SCREEN_HEIGHT);
        glutCreateWindow("CNCSVISION Example 2 HappyBuddha");
        glutFullScreen();
    }
    initRendering();

    model.load("../../data/objmodels/happyBuddha.obj");
    glutDisplayFunc(drawGLScene);
    glutKeyboardFunc(handleKeypress);
    glutReshapeFunc(handleResize);
    glutTimerFunc(TIMER_MS, update, 0);
    glutSetCursor(GLUT_CURSOR_NONE);
    /* Application main loop */
    glutMainLoop();

    cleanup();
    return 0;
}
Beispiel #9
0
void initProjectionScreen(double _focalDist)
{   screen.setWidthHeight(SCREEN_WIDE_SIZE, SCREEN_WIDE_SIZE*SCREEN_HEIGHT/SCREEN_WIDTH);
    screen.setOffset(alignmentX,alignmentY);
    screen.setFocalDistance(_focalDist);
    cam.init(screen);
}
void initProjectionScreen(double _focalDist)
{
    cam.init(Screen(SCREEN_WIDE_SIZE, SCREEN_WIDE_SIZE*SCREEN_HEIGHT/SCREEN_WIDTH,alignmentX,alignmentY,_focalDist));
	RoveretoMotorFunctions::moveScreenAbsolute(_focalDist,homeFocalDistance,3500);
}
// Questa funzione definisce la distanza e l'orientazione dello schermo di proiezione virtuale, e' importante che la lasci cosi almeno che tu non voglia:
// 1) simulare uno schermo di proiezione che si muove o comunque con un orientamento diverso da quello standard cioe' il piano a z=focalDistance
// 2) spostare il piano di proiezione cambiando alignmentX ed alignmentY ma per quello ti consiglio di contattarmi o comunque utilizzare il file headCalibration.cpp che ha legato ai tasti 2,4,6,8 il movimento dello schermo per allineare mondo virtuale e mondo optotrak
void initProjectionScreen(double _focalDist, const Affine3d &_transformation, bool synchronous)
{
	cam.init(Screen(SCREEN_WIDE_SIZE, SCREEN_WIDE_SIZE*SCREEN_HEIGHT/SCREEN_WIDTH,alignmentX,alignmentY,_focalDist));
}
Beispiel #12
0
void update(int value)
{
    // Conta i cicli di presentazione dello stimolo
    if ( (sumOutside > str2num<int>(parameters.find("StimulusCycles")) ) &&  (trialMode == STIMULUSMODE) )
    {
        sumOutside=0;
        trialMode++;
        trialMode=trialMode%4;
    }

    if (conditionInside && (sumOutside*2 > str2num<int>(parameters.find("FixationCycles"))) && (trialMode ==FIXATIONMODE )  )
    {
        sumOutside=0;
        trialMode++;
        trialMode=trialMode%4;
        stimulusDuration.start();
    }
    if ( trialMode == STIMULUSMODE )
        stimulusFrames++;
    if ( trialMode == FIXATIONMODE )
        stimulusFrames=0;

    Screen screenPassive;

    screenPassive.setWidthHeight(SCREEN_WIDE_SIZE, SCREEN_WIDE_SIZE*SCREEN_HEIGHT/SCREEN_WIDTH);
    screenPassive.setOffset(alignmentX,alignmentY);
    screenPassive.setFocalDistance(0);
    screenPassive.transform(headEyeCoords.getRigidStart().getFullTransformation()*Translation3d(center));

    camPassive.init(screenPassive);
    camPassive.setDrySimulation(true);
    camPassive.setEye(eyeRight);
    objectPassiveTransformation = ( camPassive.getModelViewMatrix()*objectActiveTransformation );
    // Coordinates picker
    markers = optotrak.getAllPoints();
    if ( isVisible(markers[1]) && isVisible(markers[2]) && isVisible(markers[3]) )
        headEyeCoords.update(markers[1],markers[2],markers[3]);
    Affine3d active = headEyeCoords.getRigidStart().getFullTransformation();

    eulerAngles.init( headEyeCoords.getRigidStart().getFullTransformation().rotation() );

    eyeLeft = headEyeCoords.getLeftEye();
    eyeRight = headEyeCoords.getRightEye();

    cyclopeanEye = (eyeLeft+eyeRight)/2.0;

    // Projection of view normal on the focal plane
    Vector3d directionOfSight = (active.rotation()*Vector3d(0,0,-1)).normalized();
    Eigen::ParametrizedLine<double,3> lineOfSightRight = Eigen::ParametrizedLine<double,3>::Through( eyeRight , eyeRight+directionOfSight );
    Eigen::ParametrizedLine<double,3> lineOfSightLeft  = Eigen::ParametrizedLine<double,3>::Through( eyeLeft, eyeLeft+directionOfSight );

    double lineOfSightRightDistanceToFocalPlane = lineOfSightRight.intersection(focalPlane);
    double lineOfSightLeftDistanceToFocalPlane = lineOfSightLeft.intersection(focalPlane);

    //double lenghtOnZ = (active*(center-eyeCalibration )+eyeRight).z();
    projPointEyeRight = lineOfSightRightDistanceToFocalPlane *(directionOfSight)+ (eyeRight);
    projPointEyeLeft= lineOfSightLeftDistanceToFocalPlane * (directionOfSight) + (eyeLeft);
    // second projection the fixation point computed with z non constant but perfectly parallel to projPointEyeRight
    lineOfSightRightDistanceToFocalPlane= (( active.rotation()*(center)) - eyeRight).norm();
    Vector3d secondProjection = lineOfSightRightDistanceToFocalPlane *(directionOfSight)+ (eyeRight);

    if ( !zOnFocalPlane )
        projPointEyeRight=secondProjection ;

    // Compute the translation to move the eye in order to avoid shear components
    Vector3d posAlongLineOfSight = (headEyeCoords.getRigidStart().getFullTransformation().rotation())*(eyeRight -eyeCalibration);

    switch ( (int)factors["Translation"] )
    {
    case -1:
    case -2:
        translationFactor.setZero();
        if ( trialMode == STIMULUSMODE )
            projPointEyeRight=center;
        break;
    case 0:
        translationFactor.setZero();
        break;
    case 1:
        translationFactor = factors["TranslationConstant"]*Vector3d(posAlongLineOfSight.z(),0,0);
        break;
    case 2:
        translationFactor = factors["TranslationConstant"]*Vector3d(0,posAlongLineOfSight.z(),0);
        break;
    }
    if ( passiveMode )
        initProjectionScreen(0,headEyeCoords.getRigidStart().getFullTransformation()*Translation3d(Vector3d(0,0,focalDistance)));
    else
        initProjectionScreen(focalDistance,Affine3d::Identity());

    checkBounds();
    /**** Save to file part ****/
    // Markers file save the used markers and time-depending experimental variable to a file
    // (Make sure that in passive experiment the list of variables has the same order)
    markersFile << trialNumber << " " << headCalibrationDone << " " << trialMode << " " ;
    markersFile <<markers[1].transpose() << " " << markers[2].transpose() << " " << markers[3].transpose() << " " << markers[17].transpose() << " " << markers[18].transpose() << " " ;

    markersFile <<	factors["Tilt"] << " " <<
                factors["Slant"] << " " <<
                factors["Translation"] << " " <<
                factors["Onset"] << " " <<
                factors["TranslationConstant"] <<
                endl;

    ofstream outputfile;
    outputfile.open("data.dat");
    outputfile << "Subject Name: " << parameters.find("SubjectName") << endl;
    outputfile << "Passive matrix:" << endl << objectPassiveTransformation.matrix() << endl;
    outputfile << "Yaw: " << toDegrees(eulerAngles.getYaw()) << endl <<"Pitch: " << toDegrees(eulerAngles.getPitch()) << endl;
    outputfile << "EyeLeft: " <<  headEyeCoords.getLeftEye().transpose() << endl;
    outputfile << "EyeRight: " << headEyeCoords.getRightEye().transpose() << endl << endl;
    outputfile << "Slant: " << instantPlaneSlant << endl;
    outputfile << "(Width,Height) [px]: " << getPlaneDimensions().transpose() << " " << endl;
    outputfile << "Factors:" << endl;
    for (map<string,double>::iterator iter=factors.begin(); iter!=factors.end(); ++iter)
    {
        outputfile << "\t\t" << iter->first << "= " << iter->second << endl;
    }
    outputfile << "Trial remaining: " << trial.getRemainingTrials()+1 << endl;
    outputfile << "Last response: " << probeAngle << endl;
    // Here we save plane projected width and height


    // now rewind the file
    outputfile.clear();
    outputfile.seekp(0,ios::beg);

    // Write down frame by frame the trajectories and angles of eyes and head
    if ( trialMode == STIMULUSMODE && headCalibrationDone > 2 )
    {
        trajFile << setw(6) << left <<
                 trialNumber << " " <<
                 stimulusFrames << " " <<
                 eyeRight.transpose() << endl;

        anglesFile << setw(6) << left <<
                   trialNumber << " " <<
                   stimulusFrames << " " <<
                   toDegrees(eulerAngles.getPitch()) << " " <<
                   toDegrees(eulerAngles.getRoll()) << " " <<
                   toDegrees(eulerAngles.getYaw()) << " " <<
                   instantPlaneSlant << endl;

        matrixFile << setw(6) << left <<
                   trialNumber << " " <<
                   stimulusFrames << " " ;
        for (int i=0; i<3; i++)
            matrixFile << objectPassiveTransformation.matrix().row(i) << " " ;
        matrixFile << endl;

        // Write the 13 special extremal points on stimFile
        stimFile << setw(6) << left <<
                 trialNumber << " " <<
                 stimulusFrames << " " ;
        double winx=0,winy=0,winz=0;

        for (PointsRandIterator iRand = redDotsPlane.specialPointsRand.begin(); iRand!=redDotsPlane.specialPointsRand.end(); ++iRand)
        {   Point3D *p=(*iRand);
            Vector3d v = objectActiveTransformation*Vector3d( p->x, p->y, p->z);

            gluProject(v.x(),v.y(),v.z(), (&cam)->getModelViewMatrix().data(), (&cam)->getProjectiveMatrix().data(), (&cam)->getViewport().data(), &winx,&winy,&winz);
            stimFile << winx << " " << winy << " " << winz << " ";
        }
        stimFile << endl;
    }

    glutPostRedisplay();
    glutTimerFunc(TIMER_MS, update, 0);
}