void ViewProviderMirror::unsetEdit(int ModNum) { if (ModNum == ViewProvider::Default) { SoCenterballManip* manip = static_cast<SoCenterballManip *>(pcEditNode->getChild(0)); SbVec3f move = manip->translation.getValue(); SbVec3f center = manip->center.getValue(); SbRotation rot = manip->rotation.getValue(); // get the whole translation move += center; rot.multVec(center,center); move -= center; // the new axis of the plane SbVec3f norm(0,0,1); rot.multVec(norm,norm); // apply the new values Part::Mirroring* mf = static_cast<Part::Mirroring*>(getObject()); mf->Base.setValue(move[0],move[1],move[2]); mf->Normal.setValue(norm[0],norm[1],norm[2]); pcRoot->removeChild(pcEditNode); pcEditNode->removeAllChildren(); } else { ViewProviderPart::unsetEdit(ModNum); } }
void NaviCubeImplementation::rotateView(int axis,float rotAngle) { SbRotation viewRot = m_View3DInventorViewer->getCameraOrientation(); SbVec3f up; viewRot.multVec(SbVec3f(0,1,0),up); SbVec3f out; viewRot.multVec(SbVec3f(0,0,1),out); SbVec3f& u = up; SbVec3f& o = out; SbVec3f right (u[1]*o[2]-u[2]*o[1], u[2]*o[0]-u[0]*o[2], u[0]*o[1]-u[1]*o[0]); SbVec3f direction; switch (axis) { default : return; case DIR_UP : direction = up; break; case DIR_OUT : direction = out; break; case DIR_RIGHT : direction = right; break; } SbRotation rot(direction, -rotAngle*M_PI/180.0); SbRotation newViewRot = viewRot * rot; m_View3DInventorViewer->setCameraOrientation(newViewRot); }
void ManualAlignment::setViewingDirections(const Base::Vector3d& view1, const Base::Vector3d& up1, const Base::Vector3d& view2, const Base::Vector3d& up2) { if (myViewer.isNull()) return; { SbRotation rot; rot.setValue(SbVec3f(0.0f, 0.0f, 1.0f), SbVec3f(-view1.x,-view1.y,-view1.z)); SbRotation rot2; SbVec3f up(0.0f, 1.0f, 0.0f); rot.multVec(up, up); rot2.setValue(up, SbVec3f(up1.x, up1.y, up1.z)); myViewer->getViewer(0)->getCamera()->orientation.setValue(rot * rot2); myViewer->getViewer(0)->viewAll(); } { SbRotation rot; rot.setValue(SbVec3f(0.0f, 0.0f, 1.0f), SbVec3f(-view2.x,-view2.y,-view2.z)); SbRotation rot2; SbVec3f up(0.0f, 1.0f, 0.0f); rot.multVec(up, up); rot2.setValue(up, SbVec3f(up2.x, up2.y, up2.z)); myViewer->getViewer(1)->getCamera()->orientation.setValue(rot * rot2); myViewer->getViewer(1)->viewAll(); } }
/// return the camera definition of the active view static PyObject * povViewCamera(PyObject *self, PyObject *args) { // no arguments if (!PyArg_ParseTuple(args, "")) return NULL; PY_TRY { std::string out; const char* ppReturn=0; Gui::Application::Instance->sendMsgToActiveView("GetCamera",&ppReturn); SoNode* rootNode; SoInput in; in.setBuffer((void*)ppReturn,std::strlen(ppReturn)); SoDB::read(&in,rootNode); if (!rootNode || !rootNode->getTypeId().isDerivedFrom(SoCamera::getClassTypeId())) throw Base::Exception("CmdRaytracingWriteCamera::activated(): Could not read " "camera information from ASCII stream....\n"); // root-node returned from SoDB::readAll() has initial zero // ref-count, so reference it before we start using it to // avoid premature destruction. SoCamera * Cam = static_cast<SoCamera*>(rootNode); Cam->ref(); SbRotation camrot = Cam->orientation.getValue(); SbVec3f upvec(0, 1, 0); // init to default up vector camrot.multVec(upvec, upvec); SbVec3f lookat(0, 0, -1); // init to default view direction vector camrot.multVec(lookat, lookat); SbVec3f pos = Cam->position.getValue(); float Dist = Cam->focalDistance.getValue(); // making gp out of the Coin stuff gp_Vec gpPos(pos.getValue()[0],pos.getValue()[1],pos.getValue()[2]); gp_Vec gpDir(lookat.getValue()[0],lookat.getValue()[1],lookat.getValue()[2]); lookat *= Dist; lookat += pos; gp_Vec gpLookAt(lookat.getValue()[0],lookat.getValue()[1],lookat.getValue()[2]); gp_Vec gpUp(upvec.getValue()[0],upvec.getValue()[1],upvec.getValue()[2]); // getting image format ParameterGrp::handle hGrp = App::GetApplication().GetParameterGroupByPath("User parameter:BaseApp/Preferences/Mod/Raytracing"); int width = hGrp->GetInt("OutputWidth", 800); int height = hGrp->GetInt("OutputHeight", 600); // call the write method of PovTools.... out = PovTools::getCamera(CamDef(gpPos,gpDir,gpLookAt,gpUp),width,height); return Py::new_reference_to(Py::String(out)); } PY_CATCH; }
/*! \param rotAxis * \param rotAngle */ void kCamera::rotatePosition(SbVec3f rotAxis, double rotAngle, SbVec3f axisPoint) { //Error20051017: Er rotierte immer nur um eine Achse, ohne diese zu verschieben (in den LookAt-Punkt). //Daher zunächst eine Translation der aktuellen Position um den Rotationspunkt und anschließend wieder zurück SbVec3f tempPos = currentPosition - axisPoint; //Error20051017 // Position rotieren SbRotation pointRotation; pointRotation.setValue(rotAxis,rotAngle); //pointRotation.multVec(currentPosition, currentPosition); pointRotation.multVec(tempPos, tempPos); //Error20051017 currentPosition = tempPos + axisPoint; //Error20051017 currentLookDir = currentLookAt-currentPosition; currentLookDir.normalize(); currentUpVec = calcUpVector(currentLookDir,NormPlump); //! Neuen UpVec ausrechnen - Rotation wird schon in calcUpVector vollzogen currentUpVec.normalize(); currentOrientation = calcOrientation(currentUpVec,currentLookDir); //! Berechnet neue orientation // writeOrientation(currentOrientation); //! Schreibt orientation in ObjMgr // writePosition(currentPosition); //! Schreibt position in ObjMgr }
/*! \param[in,out] vect vector to rotate * \param axis * \param angle */ void kCamera::rotateVector(SbVec3f& vect, const SbVec3f axis, const double angle) { SbRotation vecRotation; vecRotation.setValue(axis,angle); vecRotation.multVec(vect,vect); vect.normalize(); }
SbRotation SoBillboard::calculateRotation(SoState *state) { SbRotation rot; #ifdef INVENTORRENDERER const SbViewVolume &viewVolume = SoViewVolumeElement::get(state); if (SbVec3f(0.0f, 0.0f, 0.0f) == axis.getValue()) { rot = viewVolume.getAlignRotation(); } #else const SbMatrix &mm = SoModelMatrixElement::get(state); SbMatrix imm = mm.inverse(); SbVec3f toviewer; SbVec3f cameray(0.0f, 1.0f, 0.0f); const SbViewVolume &vv = SoViewVolumeElement::get(state); toviewer = -vv.getProjectionDirection(); imm.multDirMatrix(toviewer, toviewer); (void)toviewer.normalize(); SbVec3f rotaxis = this->axis.getValue(); if (rotaxis == SbVec3f(0.0f, 0.0f, 0.0f)) { // 1. Compute the billboard-to-viewer vector. // 2. Rotate the Z-axis of the billboard to be collinear with the // billboard-to-viewer vector and pointing towards the viewer's position. // 3. Rotate the Y-axis of the billboard to be parallel and oriented in the // same direction as the Y-axis of the viewer. rot.setValue(SbVec3f(0.f, 0.0f, 1.0f), toviewer); SbVec3f viewup = vv.getViewUp(); imm.multDirMatrix(viewup, viewup); SbVec3f yaxis(0.0f, 1.0f, 0.0f); rot.multVec(yaxis, yaxis); SbRotation rot2(yaxis, viewup); SbVec3f axis; float angle; rot.getValue(axis, angle); rot2.getValue(axis, angle); rot = rot * rot2; //SoModelMatrixElement::rotateBy(state, (SoNode*) this, rot); } #endif else { fprintf(stderr, "SoBillboard: axis != (0.0, 0.0, 0.0) not implemented\n"); } return rot; }
void SoXipDicomExaminer::tiltCamera( const SbRotation& rot ) { SbMatrix m; m = getCamera()->orientation.getValue(); SbVec3f camy; rot.multVec( SbVec3f( m[1][0], m[1][1], m[1][2] ), camy ); m[1][0] = camy[0]; m[1][1] = camy[1]; m[1][2] = camy[2]; SbVec3f camx; rot.multVec( SbVec3f( m[0][0], m[0][1], m[0][2] ), camx ); m[0][0] = camx[0]; m[0][1] = camx[1]; m[0][2] = camx[2]; getCamera()->orientation.setValue( SbRotation(m) ); }
/*! \param orientation * \param[out] upVec * \param[out] lookDir * \param[out] upVecAngle */ void kCamera::splitOrientation(const SbRotation orientation, SbVec3f& upVec, SbVec3f& lookDir, double& upVecAngle) { // Extrahieren der lookDir aus der aktuellen orientation lookDir.setValue(0.0, 0.0, -1.0); //! init to default lookat direction (DIRECTION!) orientation.multVec(lookDir, lookDir); lookDir.normalize(); // Extrahieren des upVectors aus der aktuellen orientation upVec.setValue(0.0, 1.0, 0.0); // init to default up vector direction orientation.multVec(upVec, upVec); upVec.normalize(); // Ermitteln des perfekten upVectors (upVector ohne zusätzliche Drehung um die Sichtachse) SbVec3f perfectUpVec; if (fabs(lookDir.dot(NormPlump))>(1.0-epsilon)) //! wenn lookDir und perfectUpVec parallel, dann gleich setzen (Vermeidung von Berechnungsfehlern perfectUpVec = upVec; else perfectUpVec = calcPerfectUpVector(lookDir,NormPlump); perfectUpVec.normalize(); // a dot b = |a|*|b|+cos(a,b) double tempDot = upVec.dot(perfectUpVec); //! Es gab Fälle, in denen war .dot minimal größer als 1.0 -> acos = 1.#IND if (tempDot>1.0) tempDot = 1.0; if (tempDot<-1.0) tempDot = -1.0; upVecAngle = acos(tempDot); //! 1.0 = Produkt der beiden Längen ... sind aber normalisiert // Ermittlung der Drehrichtung des Winkels und ggf. Erhöhung um 180Grad // Im R3 gibt es zuerst einmal keine positiven und negativen drehwinkel. // Erst wenn man die Ebene, die die beiden Vektoren definieren, orientiert, geht das. // Man kann also festlegen, dass der Winkel positiv ist, wenn Hesse-Normalenvektor der Ebene und Kreuzprodukt in dieselbe Richtung zeigen. if (getUpVecAngleDir(lookDir,upVec)>epsilon && (upVecAngle<(kBasics::PI-epsilon))) upVecAngle = kBasics::PI + (kBasics::PI - upVecAngle); // Vermeidung von Winkeln > 2*PI if (upVecAngle>(2*(kBasics::PI-epsilon))) upVecAngle = upVecAngle - 2*kBasics::PI; // Sehr kleine Winkel werden auf 0.0 gesetzt if (fabs(upVecAngle)<epsilon) upVecAngle = 0.0; //! sonst kommt es zu Dingen wie 1.#IND }
void SIM::Coin3D::Quarter::SoQTQuarterAdaptor::convertOrtho2Perspective(const SoOrthographicCamera* in, SoPerspectiveCamera* out) { out->aspectRatio.setValue(in->aspectRatio.getValue()); out->focalDistance.setValue(in->focalDistance.getValue()); out->orientation.setValue(in->orientation.getValue()); out->position.setValue(in->position.getValue()); out->viewportMapping.setValue(in->viewportMapping.getValue()); SbRotation camrot = in->orientation.getValue(); float focaldist = in->height.getValue() / (2.0*tan(M_PI / 8.0)); SbVec3f offset(0,0,focaldist-in->focalDistance.getValue()); camrot.multVec(offset,offset); out->position.setValue(offset+in->position.getValue()); out->focalDistance.setValue(focaldist); // 45° is the default value of this field in SoPerspectiveCamera. out->heightAngle = (float)(M_PI / 4.0); };
// internal callback void InvPlaneMover::dragFinishCB(void *me, SoDragger *drag) { InvPlaneMover *mee = static_cast<InvPlaneMover *>(me); if (mee->show_) { SbVec3f t = ((SoJackDragger *)drag)->translation.getValue(); int i; for (i = 0; i < 3; ++i) t[i] *= mee->scale_->scaleFactor.getValue()[i]; SbRotation r = ((SoJackDragger *)drag)->rotation.getValue(); SbVec3f n; SbVec3f ax; float angle; r.getValue(ax, angle); SbVec3f axN; mee->fullRot_->rotation.getValue().multVec(ax, axN); r.setValue(axN, angle); r.multVec(mee->nnn_, n); // we have to rotate the translation around the x-axis // (because we have a y-axis dragger) SbVec3f tt; n.normalize(); // snap normal to the closest coordinate axis // here done by snaping it to the axis with the biggest projection onto it. if (mee->motionMode_ == InvPlaneMover::SNAP) { int axis; float mmax; int dir = 1; SbVec3f nn; if (n[0] * n[0] < n[1] * n[1]) { axis = 1; mmax = n[1]; if (n[1] < 0) dir = -1; else dir = +1; //dir = (int) copysign(1,n[1]); } else { axis = 0; mmax = n[0]; if (n[0] < 0) dir = -1; else dir = +1; //dir = (int) copysign(1,n[0]); } if (mmax * mmax < n[2] * n[2]) { axis = 2; if (n[2] < 0) dir = -1; else dir = +1; //dir = (int) copysign(1,n[2]); } switch (axis) { case 0: nn.setValue(1, 0, 0); break; case 1: nn.setValue(0, 1, 0); break; case 2: nn.setValue(0, 0, 1); break; } n = dir * nn; } tt = t[1] * n; float d; d = n.dot(tt + mee->distOffset_); float data[4]; data[0] = n[0]; data[1] = n[1]; data[2] = n[2]; data[3] = d; // send feedback message to contoller ((InvPlaneMover *)me)->sendFeedback(data); } }
void CmdRaytracingWriteCamera::activated(int iMsg) { const char* ppReturn=0; getGuiApplication()->sendMsgToActiveView("GetCamera",&ppReturn); if (ppReturn) { std::string str(ppReturn); if (str.find("PerspectiveCamera") == std::string::npos) { int ret = QMessageBox::warning(Gui::getMainWindow(), qApp->translate("CmdRaytracingWriteView","No perspective camera"), qApp->translate("CmdRaytracingWriteView","The current view camera is not perspective" " and thus the result of the povray image later might look different to" " what you expect.\nDo you want to continue?"), QMessageBox::Yes|QMessageBox::No); if (ret != QMessageBox::Yes) return; } } SoInput in; in.setBuffer((void*)ppReturn,std::strlen(ppReturn)); SoNode* rootNode; SoDB::read(&in,rootNode); if (!rootNode || !rootNode->getTypeId().isDerivedFrom(SoCamera::getClassTypeId())) throw Base::Exception("CmdRaytracingWriteCamera::activated(): Could not read " "camera information from ASCII stream....\n"); // root-node returned from SoDB::readAll() has initial zero // ref-count, so reference it before we start using it to // avoid premature destruction. SoCamera * Cam = static_cast<SoCamera*>(rootNode); Cam->ref(); SbRotation camrot = Cam->orientation.getValue(); SbVec3f upvec(0, 1, 0); // init to default up vector camrot.multVec(upvec, upvec); SbVec3f lookat(0, 0, -1); // init to default view direction vector camrot.multVec(lookat, lookat); SbVec3f pos = Cam->position.getValue(); float Dist = Cam->focalDistance.getValue(); QStringList filter; filter << QObject::tr("Povray(*.pov)"); filter << QObject::tr("All Files (*.*)"); QString fn = Gui::FileDialog::getSaveFileName(Gui::getMainWindow(), QObject::tr("Export page"), QString(), filter.join(QLatin1String(";;"))); if (fn.isEmpty()) return; std::string cFullName = (const char*)fn.toUtf8(); // building up the python string std::stringstream out; out << "Raytracing.writeCameraFile(\"" << strToPython(cFullName) << "\"," << "(" << pos.getValue()[0] <<"," << pos.getValue()[1] <<"," << pos.getValue()[2] <<")," << "(" << lookat.getValue()[0] <<"," << lookat.getValue()[1] <<"," << lookat.getValue()[2] <<")," ; lookat *= Dist; lookat += pos; out << "(" << lookat.getValue()[0] <<"," << lookat.getValue()[1] <<"," << lookat.getValue()[2] <<")," << "(" << upvec.getValue()[0] <<"," << upvec.getValue()[1] <<"," << upvec.getValue()[2] <<") )" ; doCommand(Doc,"import Raytracing"); doCommand(Gui,out.str().c_str()); // Bring ref-count of root-node back to zero to cause the // destruction of the camera. Cam->unref(); }
void ScreenSpaceBox::setCameraOrientation(const SbRotation &cameraRot) { rotation->rotation = cameraRot; // technically this is backwards, but we're rendering two-sided so oh well! SbVec3f camLook(0, 0, -1); cameraRot.multVec(camLook, normal); // store the (backward) normal for later use }