void ViewProviderMirror::unsetEdit(int ModNum)
{
    if (ModNum == ViewProvider::Default) {
        SoCenterballManip* manip = static_cast<SoCenterballManip *>(pcEditNode->getChild(0));

        SbVec3f move = manip->translation.getValue();
        SbVec3f center = manip->center.getValue();
        SbRotation rot = manip->rotation.getValue();

        // get the whole translation
        move += center;
        rot.multVec(center,center);
        move -= center;

        // the new axis of the plane
        SbVec3f norm(0,0,1);
        rot.multVec(norm,norm);

        // apply the new values
        Part::Mirroring* mf = static_cast<Part::Mirroring*>(getObject());
        mf->Base.setValue(move[0],move[1],move[2]);
        mf->Normal.setValue(norm[0],norm[1],norm[2]);

        pcRoot->removeChild(pcEditNode);
        pcEditNode->removeAllChildren();
    }
    else {
        ViewProviderPart::unsetEdit(ModNum);
    }
}
void 
SoXipOverlayTransformBoxManip::rotate( SoHandleEventAction* action )
{
	SbVec3f projPt;
	getPoint( action, projPt );
	
	const SbVec3f* point = mControlPointsCoords->point.getValues(0);

	SbVec3f center = (point[0] + point[4]) / 2.;

	SbVec3f rotateFrom	= point[mControlPointId] - center;
	SbVec3f rotateTo	= projPt - center;

	SbVec3f normal = mViewVolume.getPlane(0).getNormal();
	
	SbRotation rotation;
	rotation.setValue( normal, angleBetweenVectors( rotateFrom, rotateTo, normal ) );
	
	SbMatrix centerMatrix;
	centerMatrix.setTranslate( center );

	SbMatrix rotationMatrix;
	rotationMatrix.setRotate( rotation );

	mTransformationMatrix = centerMatrix.inverse() * rotationMatrix * centerMatrix;
	transform( mActionNode );
}
/*! \param[in,out] vect vector to rotate
 *  \param axis
 *  \param angle
 */
void kCamera::rotateVector(SbVec3f& vect, const SbVec3f axis, const double angle)
{
	SbRotation vecRotation;
	vecRotation.setValue(axis,angle);
	vecRotation.multVec(vect,vect);
	vect.normalize();
}
/*! \param rotAxis
 *  \param rotAngle
 */
void kCamera::rotatePosition(SbVec3f rotAxis, double rotAngle, SbVec3f axisPoint)
{
	//Error20051017: Er rotierte immer nur um eine Achse, ohne diese zu verschieben (in den LookAt-Punkt).
	//Daher zunächst eine Translation der aktuellen Position um den Rotationspunkt und anschließend wieder zurück

	SbVec3f tempPos = currentPosition - axisPoint; //Error20051017

	// Position rotieren
	SbRotation pointRotation;
	pointRotation.setValue(rotAxis,rotAngle);
	//pointRotation.multVec(currentPosition, currentPosition);
	pointRotation.multVec(tempPos, tempPos); //Error20051017
	currentPosition = tempPos + axisPoint; //Error20051017

	currentLookDir = currentLookAt-currentPosition;
	currentLookDir.normalize();

	currentUpVec = calcUpVector(currentLookDir,NormPlump); //! Neuen UpVec ausrechnen - Rotation wird schon in calcUpVector vollzogen
	currentUpVec.normalize();

	currentOrientation = calcOrientation(currentUpVec,currentLookDir); //! Berechnet neue orientation

//	writeOrientation(currentOrientation); //! Schreibt orientation in ObjMgr
//	writePosition(currentPosition); //! Schreibt position in ObjMgr
}
Exemple #5
0
void Cone::transform(SoTransform *transformation){

	scale_vector = transformation->scaleFactor.getValue();
    radius = scale_vector[0];

	SbRotation rotation = transformation->rotation.getValue();
	rotation.getValue(rotation_axis, rotation_angle);
	translation_vector = transformation->translation.getValue();
	SbMatrix T,S, R, F;
	T.setTranslate(translation_vector);
	print_vector(translation_vector); //ntc
	print_vector(scale_vector); //ntc
	//std::cout << rotation[0] << rotation[1]<<rotation[2]<<rotation[3]; //ntc
	print_vector(rotation_axis); //ntc
	std::cout<<rotation_angle<<std::endl<<std::endl;

	R.setRotate(rotation);
	S.setScale(scale_vector);
//	F = T*R*S;
    F = S * R *T;
	SbVec3f pos(0,0,0);
	F.multVecMatrix(pos, pos);
	M = F; // save transformation matrix
	if(M.det4() != 0 ){
	    iM = F.inverse(); // store inverse if inverse exists
    }
	position = pos;
	print_vector(position);
}
Exemple #6
0
void
vpSimulator::moveInternalCamera(vpHomogeneousMatrix &cMf)
{

  SbMatrix matrix;
  SbRotation rotCam;
  SbMatrix rotX;
  rotX.setRotate (SbRotation (SbVec3f(1.0f, 0.0f, 0.0f), (float)M_PI));
  for(unsigned int i=0;i<4;i++)
    for(unsigned int j=0;j<4;j++)
      matrix[(int)j][(int)i]=(float)cMf[i][j];

  matrix= matrix.inverse();
  matrix.multLeft (rotX);
  rotCam.setValue(matrix);


  internalCamera->ref() ;
  internalCamera->orientation.setValue(rotCam);
  internalCamera->position.setValue(matrix[3][0],matrix[3][1],matrix[3][2]);
  internalCamera->unref() ;

  rotX.setRotate (SbRotation (SbVec3f(-1.0f, 0.0f, 0.0f), (float)M_PI));
  matrix.multLeft (rotX);
  rotCam.setValue(matrix);
  internalCameraPosition->ref() ;
  internalCameraPosition->rotation.setValue(rotCam);
  internalCameraPosition->translation.setValue(matrix[3][0],matrix[3][1],matrix[3][2]);
  internalCameraPosition->unref() ;
}
Exemple #7
0
void NaviCubeImplementation::rotateView(int axis,float rotAngle) {
	SbRotation viewRot = m_View3DInventorViewer->getCameraOrientation();

	SbVec3f up;
	viewRot.multVec(SbVec3f(0,1,0),up);

	SbVec3f out;
	viewRot.multVec(SbVec3f(0,0,1),out);

	SbVec3f& u = up;
	SbVec3f& o = out;
	SbVec3f right  (u[1]*o[2]-u[2]*o[1], u[2]*o[0]-u[0]*o[2], u[0]*o[1]-u[1]*o[0]);

	SbVec3f direction;
	switch (axis) {
	default :
		return;
	case DIR_UP :
		direction = up;
		break;
	case DIR_OUT :
		direction = out;
		break;
	case DIR_RIGHT :
		direction = right;
		break;
	}

	SbRotation rot(direction, -rotAngle*M_PI/180.0);
	SbRotation newViewRot = viewRot * rot;
	m_View3DInventorViewer->setCameraOrientation(newViewRot);

}
Exemple #8
0
/*!
  Sets this quaternion by converting from Inventor format.
*/
void
Quaternion::set(const SbRotation &SbRot)
{
  w = (double)SbRot.getValue()[3];
  x = (double)SbRot.getValue()[0];
  y = (double)SbRot.getValue()[1];
  z = (double)SbRot.getValue()[2];
  normalise();
}
/// return the camera definition of the active view
static PyObject *
povViewCamera(PyObject *self, PyObject *args)
{
    // no arguments
    if (!PyArg_ParseTuple(args, ""))
        return NULL;
    PY_TRY {
        std::string out;
        const char* ppReturn=0;

        Gui::Application::Instance->sendMsgToActiveView("GetCamera",&ppReturn);

        SoNode* rootNode;
        SoInput in;
        in.setBuffer((void*)ppReturn,std::strlen(ppReturn));
        SoDB::read(&in,rootNode);

        if (!rootNode || !rootNode->getTypeId().isDerivedFrom(SoCamera::getClassTypeId()))
            throw Base::Exception("CmdRaytracingWriteCamera::activated(): Could not read "
                                  "camera information from ASCII stream....\n");

        // root-node returned from SoDB::readAll() has initial zero
        // ref-count, so reference it before we start using it to
        // avoid premature destruction.
        SoCamera * Cam = static_cast<SoCamera*>(rootNode);
        Cam->ref();

        SbRotation camrot = Cam->orientation.getValue();

        SbVec3f upvec(0, 1, 0); // init to default up vector
        camrot.multVec(upvec, upvec);

        SbVec3f lookat(0, 0, -1); // init to default view direction vector
        camrot.multVec(lookat, lookat);

        SbVec3f pos = Cam->position.getValue();
        float Dist = Cam->focalDistance.getValue();

        // making gp out of the Coin stuff
        gp_Vec gpPos(pos.getValue()[0],pos.getValue()[1],pos.getValue()[2]);
        gp_Vec gpDir(lookat.getValue()[0],lookat.getValue()[1],lookat.getValue()[2]);
        lookat *= Dist;
        lookat += pos;
        gp_Vec gpLookAt(lookat.getValue()[0],lookat.getValue()[1],lookat.getValue()[2]);
        gp_Vec gpUp(upvec.getValue()[0],upvec.getValue()[1],upvec.getValue()[2]);

        // getting image format
        ParameterGrp::handle hGrp = App::GetApplication().GetParameterGroupByPath("User parameter:BaseApp/Preferences/Mod/Raytracing");
        int width = hGrp->GetInt("OutputWidth", 800);
        int height = hGrp->GetInt("OutputHeight", 600);

        // call the write method of PovTools....
        out = PovTools::getCamera(CamDef(gpPos,gpDir,gpLookAt,gpUp),width,height);

        return Py::new_reference_to(Py::String(out));
    } PY_CATCH;
}
Exemple #10
0
SbRotation
SoBillboard::calculateRotation(SoState *state)
{
    SbRotation rot;
#ifdef INVENTORRENDERER
    const SbViewVolume &viewVolume = SoViewVolumeElement::get(state);

    if (SbVec3f(0.0f, 0.0f, 0.0f) == axis.getValue())
    {
        rot = viewVolume.getAlignRotation();
    }
#else
    const SbMatrix &mm = SoModelMatrixElement::get(state);
    SbMatrix imm = mm.inverse();

    SbVec3f toviewer;
    SbVec3f cameray(0.0f, 1.0f, 0.0f);
    const SbViewVolume &vv = SoViewVolumeElement::get(state);

    toviewer = -vv.getProjectionDirection();
    imm.multDirMatrix(toviewer, toviewer);

    (void)toviewer.normalize();

    SbVec3f rotaxis = this->axis.getValue();

    if (rotaxis == SbVec3f(0.0f, 0.0f, 0.0f))
    {
        // 1. Compute the billboard-to-viewer vector.
        // 2. Rotate the Z-axis of the billboard to be collinear with the
        //    billboard-to-viewer vector and pointing towards the viewer's position.
        // 3. Rotate the Y-axis of the billboard to be parallel and oriented in the
        //    same direction as the Y-axis of the viewer.
        rot.setValue(SbVec3f(0.f, 0.0f, 1.0f), toviewer);
        SbVec3f viewup = vv.getViewUp();
        imm.multDirMatrix(viewup, viewup);

        SbVec3f yaxis(0.0f, 1.0f, 0.0f);
        rot.multVec(yaxis, yaxis);
        SbRotation rot2(yaxis, viewup);

        SbVec3f axis;
        float angle;
        rot.getValue(axis, angle);
        rot2.getValue(axis, angle);
        rot = rot * rot2;
        //SoModelMatrixElement::rotateBy(state, (SoNode*) this, rot);
    }
#endif
    else
    {
        fprintf(stderr, "SoBillboard: axis != (0.0, 0.0, 0.0) not implemented\n");
    }

    return rot;
}
Exemple #11
0
bool Save_camera_settings_to_file::
init( std::string& parameters, GsTL_project* proj,
      Error_messages_handler* errors ) {

  std::vector< std::string > params = 
    String_Op::decompose_string( parameters, Actions::separator,
                      				   Actions::unique );

  if( params.size() != 1 ) {
    errors->report( "save_camera_settings filename" );  
    return false;
  }

  SmartPtr<Named_interface> view_ni =
    Root::instance()->interface( projectViews_manager + "/main_view"  );
  Oinv_view* view = dynamic_cast<Oinv_view*>( view_ni.raw_ptr() );
  if( !view ) {
    return false;
  }
  
  SoCamera* camera = view->get_render_area()->getCamera();
  if( !camera ) return false;


  std::ofstream out( params[0].c_str() );
  if( !out ) {
    std::ostringstream message;
    message << "Can't write to file " << params[0]; 
    errors->report( message.str() );  
    return false;
  }

  SbVec3f pos = camera->position.getValue();
  SbRotation rot = camera->orientation.getValue();
  float rot1, rot2, rot3, rot4;
  rot.getValue( rot1, rot2, rot3, rot4 );


  out << pos[0] << " " << pos[1] << " " << pos[2] << "\n"
      << rot1 << " " << rot2 << " " << rot3 << " " << rot4 << "\n"
      << camera->aspectRatio.getValue() << "\n"
      << camera->nearDistance.getValue() << "\n"
      << camera->farDistance.getValue() << "\n"
      << camera->focalDistance.getValue() << std::endl;
  out.close();

  return true;
}
Exemple #12
0
    static
    void copyCameraSettings(SoCamera* cam1, SbRotation& rot_cam1, SbVec3f& pos_cam1,
                            SoCamera* cam2, SbRotation& rot_cam2, SbVec3f& pos_cam2)
    {
        // recompute the diff we have applied to the camera's orientation
        SbRotation rot = cam1->orientation.getValue();
        SbRotation dif = rot * rot_cam1.inverse();
        rot_cam1 = rot;

        // copy the values
        cam2->enableNotify(false);
        cam2->nearDistance = cam1->nearDistance;
        cam2->farDistance = cam1->farDistance;
        cam2->focalDistance = cam1->focalDistance;
        reorientCamera(cam2,dif);
        rot_cam2 = cam2->orientation.getValue();

        // reverse engineer the translation part in wc
        SbVec3f pos = cam1->position.getValue();
        SbVec3f difpos = pos - pos_cam1;
        pos_cam1 = pos;
        // the translation in pixel coords
        cam1->orientation.getValue().inverse().multVec(difpos,difpos);
        // the translation again in wc for the second camera
        cam2->orientation.getValue().multVec(difpos,difpos);
        cam2->position.setValue(cam2->position.getValue()+difpos);

        if (cam1->getTypeId() == cam2->getTypeId()) {
            if (cam1->getTypeId() == SoOrthographicCamera::getClassTypeId())
                static_cast<SoOrthographicCamera*>(cam2)->height =
                static_cast<SoOrthographicCamera*>(cam1)->height;
        }

        cam2->enableNotify(true);
    }
Exemple #13
0
void
vpSimulator::addObject(SoSeparator * object,
		       const vpHomogeneousMatrix &fMo,
		       SoSeparator * root)
{

  bool identity = true ;
  for (unsigned int i=0 ; i <4 ;i++){
    for (unsigned int j=0 ; j < 4 ; j++){
      if (i==j){
	      if  (fabs(fMo[i][j] -1) > 1e-6)  identity=false ;
      }
      else{
      	if (fabs(fMo[i][j]) > 1e-6)  identity=false ;
      }
    }
  }

  if (identity==true)
  {
    root->addChild (object);
  }
  else
  {
    SbMatrix matrix;
    SbRotation rotation;
    for(unsigned int i=0;i<4;i++)
      for(unsigned int j=0;j<4;j++)
      	matrix[(int)j][(int)i]=(float)fMo[i][j];

    //  matrix= matrix.inverse();
    rotation.setValue(matrix);

    SoTransform *displacement = new SoTransform;
    SoSeparator *newNode =  new SoSeparator;

    displacement->rotation.setValue(rotation);
    displacement->translation.setValue(matrix[3][0],
				       matrix[3][1],
				       matrix[3][2]);

    root->addChild (newNode);
    newNode->addChild (displacement);
    newNode->addChild (object);

  }
}
Exemple #14
0
void
SoBillboard::getMatrix(SoGetMatrixAction *action)
//
////////////////////////////////////////////////////////////////////////
{
    //if (! rotation.isIgnored() && ! rotation.isDefault()) {
    SbRotation rot = calculateRotation(action->getState());
    SbMatrix &ctm = action->getMatrix();
    SbMatrix &inv = action->getInverse();
    SbMatrix m;

    rot.getValue(m);
    ctm.multLeft(m);
    rot.invert();
    rot.getValue(m);
    inv.multRight(m);
    //}
}
void SoXipDicomExaminer::tiltCamera( const SbRotation& rot )
{
	SbMatrix m;
	m = getCamera()->orientation.getValue();

	SbVec3f camy;
	rot.multVec( SbVec3f( m[1][0], m[1][1], m[1][2] ), camy );
	m[1][0] = camy[0];
	m[1][1] = camy[1];
	m[1][2] = camy[2];
	
	SbVec3f camx;
	rot.multVec( SbVec3f( m[0][0], m[0][1], m[0][2] ), camx );
	m[0][0] = camx[0];
	m[0][1] = camx[1];
	m[0][2] = camx[2];

	getCamera()->orientation.setValue( SbRotation(m) );
}
void ViewProviderRobotObject::DraggerMotionCallback(SoDragger *dragger)
{
    float q0, q1, q2, q3;

    Robot::RobotObject* robObj = static_cast<Robot::RobotObject*>(pcObject);
    Base::Placement Tcp = robObj->Tcp.getValue();
    const SbMatrix & M = dragger->getMotionMatrix ();
    SbVec3f    translation;
    SbRotation rotation;
    SbVec3f    scaleFactor;
    SbRotation scaleOrientation;
    SbVec3f    center(Tcp.getPosition().x,Tcp.getPosition().y,Tcp.getPosition().z);
    M.getTransform(translation, rotation, scaleFactor, scaleOrientation);
    rotation.getValue(q0, q1, q2, q3);
    //Base::Console().Message("M %f %f %f\n", M[3][0], M[3][1], M[3][2]);
    Base::Rotation rot(q0, q1, q2, q3);
    Base::Vector3d pos(translation[0],translation[1],translation[2]);
    robObj->Tcp.setValue(Base::Placement(pos,rot));
}
/*! \param orientation
 *  \param[out] upVec
 *  \param[out] lookDir
 *  \param[out] upVecAngle
 */
void kCamera::splitOrientation(const SbRotation orientation, SbVec3f& upVec, SbVec3f& lookDir, double& upVecAngle)
{
	// Extrahieren der lookDir aus der aktuellen orientation
	lookDir.setValue(0.0, 0.0, -1.0); //! init to default lookat direction (DIRECTION!)
    orientation.multVec(lookDir, lookDir);
	lookDir.normalize();

	// Extrahieren des upVectors aus der aktuellen orientation
	upVec.setValue(0.0, 1.0, 0.0); // init to default up vector direction
	orientation.multVec(upVec, upVec);
	upVec.normalize();

	// Ermitteln des perfekten upVectors (upVector ohne zusätzliche Drehung um die Sichtachse)
	SbVec3f perfectUpVec;
	if (fabs(lookDir.dot(NormPlump))>(1.0-epsilon)) //! wenn lookDir und perfectUpVec parallel, dann gleich setzen (Vermeidung von Berechnungsfehlern
		perfectUpVec = upVec;
	else
		perfectUpVec = calcPerfectUpVector(lookDir,NormPlump);

	perfectUpVec.normalize();

	// a dot b = |a|*|b|+cos(a,b)
	double tempDot = upVec.dot(perfectUpVec); //! Es gab Fälle, in denen war .dot minimal größer als 1.0 -> acos = 1.#IND
	if (tempDot>1.0) tempDot = 1.0;	if (tempDot<-1.0) tempDot = -1.0;
	upVecAngle = acos(tempDot); //! 1.0 = Produkt der beiden Längen ... sind aber normalisiert


	// Ermittlung der Drehrichtung des Winkels und ggf. Erhöhung um 180Grad
	// Im R3 gibt es zuerst einmal keine positiven und negativen drehwinkel.
	// Erst wenn man die Ebene, die die beiden Vektoren definieren, orientiert, geht das.
	// Man kann also festlegen, dass der Winkel positiv ist, wenn Hesse-Normalenvektor der Ebene und Kreuzprodukt in dieselbe Richtung zeigen.
	if (getUpVecAngleDir(lookDir,upVec)>epsilon && (upVecAngle<(kBasics::PI-epsilon)))
		upVecAngle = kBasics::PI + (kBasics::PI - upVecAngle);

	// Vermeidung von Winkeln > 2*PI
	if (upVecAngle>(2*(kBasics::PI-epsilon)))
		upVecAngle = upVecAngle - 2*kBasics::PI;

	// Sehr kleine Winkel werden auf 0.0 gesetzt
	if (fabs(upVecAngle)<epsilon)
		upVecAngle = 0.0; //! sonst kommt es zu Dingen wie 1.#IND
}
Exemple #18
0
void
SbMatrix::setTransform(const SbVec3f &translation,
		 const SbRotation &rotation,
		 const SbVec3f &scaleFactor,
		 const SbRotation &scaleOrientation,
		 const SbVec3f &center)
{
#define TRANSLATE(vec)		m.setTranslate(vec), multLeft(m)
#define ROTATE(rot)		rot.getValue(m), multLeft(m)
    SbMatrix m;

    makeIdentity();
    
    if (translation != SbVec3f(0,0,0))
	TRANSLATE(translation);

    if (center != SbVec3f(0,0,0))
	TRANSLATE(center);

    if (rotation != SbRotation(0,0,0,1))
	ROTATE(rotation);

    if (scaleFactor != SbVec3f(1,1,1)) {
	SbRotation so = scaleOrientation;
	if (so != SbRotation(0,0,0,1))
	    ROTATE(so);
	
	m.setScale(scaleFactor);
	multLeft(m);

	if (so != SbRotation(0,0,0,1)) {
	    so.invert();
	    ROTATE(so);
	}
    }

    if (center != SbVec3f(0,0,0))
	TRANSLATE(-center);

#undef TRANSLATE
#undef ROTATE
}
void ManualAlignment::setViewingDirections(const Base::Vector3d& view1, const Base::Vector3d& up1,
                                           const Base::Vector3d& view2, const Base::Vector3d& up2)
{
    if (myViewer.isNull())
        return;

    {
        SbRotation rot;
        rot.setValue(SbVec3f(0.0f, 0.0f, 1.0f), SbVec3f(-view1.x,-view1.y,-view1.z));

        SbRotation rot2;
        SbVec3f up(0.0f, 1.0f, 0.0f);
        rot.multVec(up, up);
        rot2.setValue(up, SbVec3f(up1.x, up1.y, up1.z));
        myViewer->getViewer(0)->getCamera()->orientation.setValue(rot * rot2);
        myViewer->getViewer(0)->viewAll();
    }

    {
        SbRotation rot;
        rot.setValue(SbVec3f(0.0f, 0.0f, 1.0f), SbVec3f(-view2.x,-view2.y,-view2.z));

        SbRotation rot2;
        SbVec3f up(0.0f, 1.0f, 0.0f);
        rot.multVec(up, up);
        rot2.setValue(up, SbVec3f(up2.x, up2.y, up2.z));
        myViewer->getViewer(1)->getCamera()->orientation.setValue(rot * rot2);
        myViewer->getViewer(1)->viewAll();
    }
}
/*!
  Should be called after motion matrix has been updated by a child
  dragger.
*/
void
SoCenterballDragger::transferCenterDraggerMotion(SoDragger * childdragger)
{
  if (coin_assert_cast<SoNode *>(childdragger) == XCenterChanger.getValue() ||
      coin_assert_cast<SoNode *>(childdragger) == YCenterChanger.getValue() ||
      coin_assert_cast<SoNode *>(childdragger) == ZCenterChanger.getValue()) {
    // translate part of matrix should not change. Move motion
    // into center instead.

    SbVec3f transl;
    SbMatrix matrix = this->getMotionMatrix();
    transl[0] = matrix[3][0];
    transl[1] = matrix[3][1];
    transl[2] = matrix[3][2];

    SbVec3f difftransl = transl - this->savedtransl;
    { // consider rotation before translating
      SbRotation rot = this->rotation.getValue();
      SbMatrix tmp;
      tmp.setRotate(rot.inverse());
      tmp.multVecMatrix(difftransl, difftransl);
    }

    this->centerFieldSensor->detach();
    this->center = difftransl + this->savedcenter;
    this->centerFieldSensor->attach(&this->center);

    matrix[3][0] = this->savedtransl[0];
    matrix[3][1] = this->savedtransl[1];
    matrix[3][2] = this->savedtransl[2];

    SbBool oldval = this->enableValueChangedCallbacks(FALSE);
    this->setMotionMatrix(matrix);
    this->enableValueChangedCallbacks(oldval);

    SoMatrixTransform *mt = SO_GET_ANY_PART(this, "translateToCenter", SoMatrixTransform);
    matrix.setTranslate(this->center.getValue());
    mt->matrix = matrix;
  }
}
void
SoModelMatrixElement::rotateEltBy(const SbRotation &rotation)
//
////////////////////////////////////////////////////////////////////////
{
    SbMatrix	m;

    rotation.getValue(m);
    modelMatrix.multLeft(m);

    flags.isModelIdentity  = FALSE;	// Assume the worst
    flags.haveModelCull = FALSE;
}
void SIM::Coin3D::Quarter::SoQTQuarterAdaptor::convertOrtho2Perspective(const SoOrthographicCamera* in,
        SoPerspectiveCamera* out)
{
    out->aspectRatio.setValue(in->aspectRatio.getValue());
    out->focalDistance.setValue(in->focalDistance.getValue());
    out->orientation.setValue(in->orientation.getValue());
    out->position.setValue(in->position.getValue());
    out->viewportMapping.setValue(in->viewportMapping.getValue());

    SbRotation camrot = in->orientation.getValue();

    float focaldist = in->height.getValue() / (2.0*tan(M_PI / 8.0));

    SbVec3f offset(0,0,focaldist-in->focalDistance.getValue());

    camrot.multVec(offset,offset);
    out->position.setValue(offset+in->position.getValue());

    out->focalDistance.setValue(focaldist);

    // 45° is the default value of this field in SoPerspectiveCamera.
    out->heightAngle = (float)(M_PI / 4.0);
};
Exemple #23
0
// Read rotation value from input stream, return TRUE if
// successful. Used from SoSFRotation and SoMFRotation.
SbBool
sosfrotation_read_value(SoInput * in, SbRotation & r)
{
  float f[4];
  for (int i = 0; i < 4; i++) {
    if (!in->read(f[i])) return FALSE;
  }
  SbVec3f axis(f[0], f[1], f[2]);
  const float angle = f[3];

  // vrml97 identity rotations are often specified with a null vector.
  // test for this and just set to z-axis.
  if (axis == SbVec3f(0.0f, 0.0f, 0.0f) && angle == 0.0f) {
    axis = SbVec3f(0.0f, 0.0f, 1.0f);
  }
  r.setValue(axis, angle);
  return TRUE;
}
Exemple #24
0
// Write SbRotation to output stream. Used from SoSFRotation and
// SoMFRotation.
void
sosfrotation_write_value(SoOutput * out, const SbRotation & r)
{
  SbVec3f axis;
  float angle;
  r.getValue(axis, angle);

  // Handle invalid rotation specifications.
  if (axis.length() == 0.0f) {
    axis.setValue(0.0f, 0.0f, 1.0f);
    angle = 0.0f;
  }

  out->write(axis[0]);
  if(!out->isBinary()) out->write(' ');
  out->write(axis[1]);
  if(!out->isBinary()) out->write(' ');
  out->write(axis[2]);
  if(!out->isBinary()) out->write("  ");
  out->write(angle);
}
Exemple #25
0
  SbMatrix IVElement::orientationMatrix() {
	  SbMatrix mat;
	  SbRotation rr = rotVec->getValue();
	  rr.getValue(mat);
	  return mat.transpose();
  }
Exemple #26
0
// internal callback
void InvPlaneMover::dragFinishCB(void *me, SoDragger *drag)
{

    InvPlaneMover *mee = static_cast<InvPlaneMover *>(me);

    if (mee->show_)
    {

        SbVec3f t = ((SoJackDragger *)drag)->translation.getValue();

        int i;
        for (i = 0; i < 3; ++i)
            t[i] *= mee->scale_->scaleFactor.getValue()[i];

        SbRotation r = ((SoJackDragger *)drag)->rotation.getValue();

        SbVec3f n;
        SbVec3f ax;
        float angle;
        r.getValue(ax, angle);

        SbVec3f axN;
        mee->fullRot_->rotation.getValue().multVec(ax, axN);

        r.setValue(axN, angle);

        r.multVec(mee->nnn_, n);

        // we have to rotate the translation around the x-axis
        // (because we have a y-axis dragger)
        SbVec3f tt;
        n.normalize();

        // snap normal to the closest coordinate axis
        // here done by snaping it to the axis with the biggest projection onto it.
        if (mee->motionMode_ == InvPlaneMover::SNAP)
        {
            int axis;
            float mmax;
            int dir = 1;
            SbVec3f nn;

            if (n[0] * n[0] < n[1] * n[1])
            {
                axis = 1;
                mmax = n[1];
                if (n[1] < 0)
                    dir = -1;
                else
                    dir = +1;
                //dir = (int) copysign(1,n[1]);
            }
            else
            {
                axis = 0;
                mmax = n[0];
                if (n[0] < 0)
                    dir = -1;
                else
                    dir = +1;
                //dir = (int) copysign(1,n[0]);
            }
            if (mmax * mmax < n[2] * n[2])
            {
                axis = 2;
                if (n[2] < 0)
                    dir = -1;
                else
                    dir = +1;
                //dir = (int) copysign(1,n[2]);
            }

            switch (axis)
            {
            case 0:
                nn.setValue(1, 0, 0);
                break;
            case 1:
                nn.setValue(0, 1, 0);
                break;
            case 2:
                nn.setValue(0, 0, 1);
                break;
            }
            n = dir * nn;
        }

        tt = t[1] * n;

        float d;
        d = n.dot(tt + mee->distOffset_);

        float data[4];
        data[0] = n[0];
        data[1] = n[1];
        data[2] = n[2];
        data[3] = d;

        // send feedback message to contoller
        ((InvPlaneMover *)me)->sendFeedback(data);
    }
}
void
SoXipNeheStarGenerator::GLRender(SoGLRenderAction *action)
{
  float yawAngle = 0;
  static float spinAngle = 0;
  float distance = 0;
  float pitchAngle = M_PI /2; // tilt the view
  SbRotation pitch(SbVec3f(1, 0, 0), pitchAngle); // rotation around X
  SbMatrix pitchM;
  pitch.getValue(pitchM);

  SbRotation yaw = SbRotation::identity(); 
  SbMatrix yawM = SbMatrix::identity();
  SbRotation spin = SbRotation::identity(); 
  SbMatrix spinM = SbMatrix::identity();

  for (int i = 0; i < MAX_STARS; i++)
  {
     SoXipNeheStar* star = static_cast<SoXipNeheStar*>(this->getChild(i));

     // spin angle for this star
     spin.setValue(SbVec3f(0,0,1), spinAngle); // rotation around Z
     spin.getValue(spinM);


     // yaw angle for this star
     //_starInfos[i].angle += ((float(i)/MAX_STARS) * (180 / M_PI));
     yawAngle =  _starInfos[i].angle;
     yaw.setValue(SbVec3f(0,1,0), yawAngle); // rotation around Y 
     yaw.getValue(yawM);
     
     
     // let's compose all matrices once to position each star
     SbMatrix transM = SbMatrix::identity();
     transM.setTranslate(SbVec3f(_starInfos[i].distance,0,0));
     SbMatrix transform = spinM * pitchM.inverse() * yawM.inverse() * transM * yawM * pitchM ;

     // position the star
     star->trans.setValue(transform);
     unsigned int color = convertRGBtoHex(_starInfos[i].r, _starInfos[i].g, _starInfos[i].b);
     star->color.set1Value(0, color);
     star->color.set1Value(1, color);
     star->color.set1Value(2, color);
     star->color.set1Value(3, color);
         
     spinAngle += ( 0.01f * (M_PI / 180));
      
     
     // change setting of all stars except the very first one (index 0)
     if (i)
     {
         _starInfos[i].angle += ((float(i)/MAX_STARS) * ( M_PI / 180));
         _starInfos[i].distance -= 0.01f;
         if (_starInfos[i].distance < 0.0f)
         {
           _starInfos[i].distance += 5.0f;
           _starInfos[i].r= rand() % 255 + 1 ;
           _starInfos[i].g= rand() % 255 + 1;
           _starInfos[i].b= rand() % 255 + 1;
         }
     }
  }

  SoXipKit::GLRender(action);
  
}
Exemple #28
0
void ScreenSpaceBox::setCameraOrientation(const SbRotation &cameraRot)
{
	rotation->rotation = cameraRot; // technically this is backwards, but we're rendering two-sided so oh well!
	SbVec3f camLook(0, 0, -1);
	cameraRot.multVec(camLook, normal); // store the (backward) normal for later use
}
Exemple #29
0
void CmdRaytracingWriteCamera::activated(int iMsg)
{
    const char* ppReturn=0;
    getGuiApplication()->sendMsgToActiveView("GetCamera",&ppReturn);
    if (ppReturn) {
        std::string str(ppReturn);
        if (str.find("PerspectiveCamera") == std::string::npos) {
            int ret = QMessageBox::warning(Gui::getMainWindow(), 
                qApp->translate("CmdRaytracingWriteView","No perspective camera"),
                qApp->translate("CmdRaytracingWriteView","The current view camera is not perspective"
                                " and thus the result of the povray image later might look different to"
                                " what you expect.\nDo you want to continue?"),
                QMessageBox::Yes|QMessageBox::No);
            if (ret != QMessageBox::Yes)
                return;
        }
    }

    SoInput in;
    in.setBuffer((void*)ppReturn,std::strlen(ppReturn));

    SoNode* rootNode;
    SoDB::read(&in,rootNode);

    if (!rootNode || !rootNode->getTypeId().isDerivedFrom(SoCamera::getClassTypeId()))
        throw Base::Exception("CmdRaytracingWriteCamera::activated(): Could not read "
                              "camera information from ASCII stream....\n");

    // root-node returned from SoDB::readAll() has initial zero
    // ref-count, so reference it before we start using it to
    // avoid premature destruction.
    SoCamera * Cam = static_cast<SoCamera*>(rootNode);
    Cam->ref();

    SbRotation camrot = Cam->orientation.getValue();

    SbVec3f upvec(0, 1, 0); // init to default up vector
    camrot.multVec(upvec, upvec);

    SbVec3f lookat(0, 0, -1); // init to default view direction vector
    camrot.multVec(lookat, lookat);

    SbVec3f pos = Cam->position.getValue();
    float Dist = Cam->focalDistance.getValue();

    QStringList filter;
    filter << QObject::tr("Povray(*.pov)");
    filter << QObject::tr("All Files (*.*)");
    QString fn = Gui::FileDialog::getSaveFileName(Gui::getMainWindow(), QObject::tr("Export page"), QString(), filter.join(QLatin1String(";;")));
    if (fn.isEmpty()) 
        return;
    std::string cFullName = (const char*)fn.toUtf8();

    // building up the python string
    std::stringstream out;
    out << "Raytracing.writeCameraFile(\"" << strToPython(cFullName) << "\"," 
        << "(" << pos.getValue()[0]    <<"," << pos.getValue()[1]    <<"," << pos.getValue()[2]    <<")," 
        << "(" << lookat.getValue()[0] <<"," << lookat.getValue()[1] <<"," << lookat.getValue()[2] <<")," ;
    lookat *= Dist;
    lookat += pos;
    out << "(" << lookat.getValue()[0] <<"," << lookat.getValue()[1] <<"," << lookat.getValue()[2] <<")," 
        << "(" << upvec.getValue()[0]  <<"," << upvec.getValue()[1]  <<"," << upvec.getValue()[2]  <<") )" ;

    doCommand(Doc,"import Raytracing");
    doCommand(Gui,out.str().c_str());


    // Bring ref-count of root-node back to zero to cause the
    // destruction of the camera.
    Cam->unref();
}
SbRotation
SbSphereSectionProjector::getRotation(const SbVec3f &p1, const SbVec3f &p2)
//
////////////////////////////////////////////////////////////////////////
{
    SbBool tol1 = isWithinTolerance(p1);
    SbBool tol2 = isWithinTolerance(p2);

    if (tol1 && tol2) {
	// both points in tolerance, rotate about
	// sphere center

	return SbRotation(
	    p1 - sphere.getCenter(),
	    p2 - sphere.getCenter());
    }
    else if (!tol1 && !tol2) {
	// both points out of tolerance, rotate about
	// plane point

	// Would like to just use this:
	SbRotation badRot = SbRotation(p1 - planePoint, p2 - planePoint);
	// but fp instablity gives back a goofy axis, so we don't get
	// pure roll.

	// So we need to snap the axis to be parallel to plane dir
	SbVec3f badAxis; float goodAngle;
	badRot.getValue(badAxis, goodAngle);

	SbVec3f goodAxis;
	if (badAxis.dot(planeDir) > 0.0)
	    goodAxis = planeDir;
	else 	    
	    goodAxis = -planeDir;

	SbRotation rollRot(goodAxis, goodAngle);

	//Now find rotation in the direction perpendicular to this:
	SbVec3f diff1 = p1 - planePoint;
	SbVec3f diff2 = p2 - planePoint;
	float d = diff2.length() - diff1.length();

	// Check for degenerate cases
	float theta = d / sphere.getRadius();
	if ( fabs(theta) < 0.000001 || fabs(theta) > 1.0 )
	    return rollRot;

	diff1.normalize();
	SbVec3f pullAxis = planeDir.cross( diff1 );
	pullAxis.normalize();
	SbRotation pullRot(pullAxis, getRadialFactor() * theta );

	SbRotation totalRot = rollRot * pullRot;
	return totalRot;

    }
    else {
	// one point in, one point out, so rotate about
	// the center of the sphere from the point on the
	// sphere to the intersection of the plane and the
	// sphere closest to the point off the sphere

	SbLine planeLine;
	SbVec3f intersection;

	if (tol1) {
	    planeLine.setValue(planePoint, p2);
	}
	else {
	    planeLine.setValue(planePoint, p1);
	}

	if (! sphere.intersect(planeLine, intersection))
#ifdef DEBUG
	    SoDebugError::post("SbSphereSectionProjector::getRotation",
			       "Couldn't intersect plane line with sphere");
#else
	/* Do nothing */;
#endif

	if (tol1) {
	    // went off sphere
	    return SbRotation(
		p1 - sphere.getCenter(),
		intersection - sphere.getCenter());
	}
	else {
	    // came on to sphere
	    // "Hey cutie. You've got quite a radius..."
	    return SbRotation(
		intersection - sphere.getCenter(),
		p2 - sphere.getCenter());
	}
    }

}