예제 #1
0
/*Funciones callback*/
static void
colorCallback(void *data, SoSensor *sensor)
{
	unsigned char *color;
	SoSFVec3f *background = (SoSFVec3f *)((SoFieldSensor *)sensor)->getAttachedField();
	color=(unsigned char *) data;

    color[0]=(char)(255.0*background->getValue()[0]);
	color[1]=(char)(255.0*background->getValue()[1]);
	color[2]=(char)(255.0*background->getValue()[2]);    

}
예제 #2
0
파일: vpSimulator.cpp 프로젝트: tswang/visp
void
vpSimulator::getExternalCameraPosition(vpHomogeneousMatrix &cMf)
{
/*  SoCamera *camera ;
  camera  = this->externalView->getCamera() ;*/
  SoSFVec3f 	position = externalCamera->position ;

  // get the rotation
  SoSFRotation 	orientation = externalCamera->orientation;
  SbVec3f axis ;  float angle ;
  orientation.getValue(axis,angle) ;
  SbRotation rotation(axis,angle) ;

  // get the translation
  SbVec3f t ;
  t = position.getValue() ;

  SbMatrix matrix ;
  matrix.setRotate(rotation) ;

  vpHomogeneousMatrix fMc ;
  SbMatrix rotX;
  rotX.setRotate (SbRotation (SbVec3f(1.0f, 0.0f, 0.0f), (float)M_PI));
  matrix.multLeft (rotX);
  for(unsigned int i=0;i<4;i++)
    for(unsigned int j=0;j<4;j++)
      fMc[j][i]=matrix[(int)i][(int)j];
  fMc[0][3] = t[0] ;
  fMc[1][3] = t[1] ;
  fMc[2][3] = t[2] ;

  cMf = fMc.inverse() ;
}
예제 #3
0
파일: InvTracker.cpp 프로젝트: nixz/covise
////////////////////////////////////////////////////////////////////////
//
//	Called whenever the tracking sensor fires.
//
//  Use: static private
//
//  Uwe Woessner
//
void
InvFullViewer::trackingSensorCB(void *p, SoSensor *)
//
////////////////////////////////////////////////////////////////////////
{
    InvFullViewer *v = (InvFullViewer *)p;
    MouseRecordType record;
    PolhemusRecord p_record;
    float P, Y, R;
    float dP, dY, dR;
    float dpx, dpy, dpz;
    // static float angle = 0;
    static float tdpx = 0;
    static float tdpy = 0;
    static float tdpz = 0;
    static int out_of_range_flag = 0;
    // static float tdP=0;
    // static float tdY=0;
    // static float tdR=0;

    // SoCamera *camera = InvViewer::viewer->getCamera();

    // grab one valid record
    if (v->trackingDevice < 3) // Logitech on port 1 or 2
    {
        get_record(v->trackingFd, &record);
        // out of range
        if ((record.buttons & 0x20) && (!out_of_range_flag))
        {
            v->buttonList[TRACK_PUSH]->select(0);
            out_of_range_flag = 1;
        }
        else if (out_of_range_flag)
        {
            v->buttonList[TRACK_PUSH]->select(1);
            out_of_range_flag = 0;
        }

// move the camera position by the translation amount
// in the forward direction.
// we will ignore 'y' motion since this is the walk viewer.
// first, orient the tracker data to our camera orientation.

// first update the position according to current position and tracker position
#define TFACT 0.05

        // get current transform
        SoSFVec3f translation;
        // scale position and create relative position
        dpx = (record.x - tdpx) * TFACT;
        dpy = (record.y - tdpy) * TFACT;
        dpz = (record.z - tdpz) * TFACT;
        // printf("%f %f, %f\n",dpx,dpy,dpz);
        // set error boundary
        if ((fabs(dpx) > 10.0) || (fabs(dpy) > 10.0) || (fabs(dpz) > 10.0))
        {
            cerr << "You moved to fast!\n";
            // save values
            tdpx = record.x;
            tdpy = record.y;
            tdpz = record.z;
            return; // maybe the tracker track is not correct
        }
        // save values
        tdpx = record.x;
        tdpy = record.y;
        tdpz = record.z;

        // printf("r: %f  %f %f %f\n",dpx,dpy,dpz,record.xr,record.yr,record.zr,record.ar);
        translation.setValue(dpx, dpy, dpz);
        v->camera->position.setValue(v->camera->position.getValue() + translation.getValue());

        // now the rotations
        //

        // little correction
        P = (record.pitch + 20.0) * M_PI / 180.0;
        Y = (record.yaw) * M_PI / 180.0;
        R = (record.roll) * M_PI / 180.0;

#define RFACT 2.0
        // scale rotation (better not)
        dP = P * (RFACT);
        dY = Y * (RFACT);
        dR = R * (RFACT / 2.0);

        float ra = cos(dR / 2) * cos(dP / 2) * cos(dY / 2) + sin(dR / 2) * sin(dP / 2) * sin(dY / 2);
        float rx = cos(dR / 2) * sin(dP / 2) * cos(dY / 2) + sin(dR / 2) * cos(dP / 2) * sin(dY / 2);
        float ry = cos(dR / 2) * cos(dP / 2) * sin(dY / 2) + sin(dR / 2) * sin(dP / 2) * cos(dY / 2);
        float rz = sin(dR / 2) * cos(dP / 2) * cos(dY / 2) + cos(dR / 2) * sin(dP / 2) * sin(dY / 2);

        v->camera->orientation.setValue(rx, ry, rz, ra);
    }
    else // Fastrak is in the house (port 3 or 4)
    {
        fastrackGetSingleRecord(v->trackingFd, &p_record);
        SoSFVec3f translation;
        // scale position and create relative position
        dpx = (p_record.x - tdpx) * TFACT;
        dpy = (p_record.y - tdpy) * TFACT;
        dpz = (p_record.z - tdpz) * TFACT;
        // save values
        tdpx = p_record.x;
        tdpy = p_record.y;
        tdpz = p_record.z;

        // printf("r: %f  %f %f %f\n",p_record.q1,p_record.q2,p_record.q3,p_record.w);
        translation.setValue(dpx, dpy, dpz);
        v->camera->position.setValue(v->camera->position.getValue() + translation.getValue());

        // v->camera->orientation.setValue(p_record.q1,p_record.q2,p_record.q3,(float)(2*facos(p_record.w)));
    }
}