int main() { const size_t npts = 10; const SS::GhostData ghost(0); const SS::BoundaryCellInfo bc = SS::BoundaryCellInfo::build<FieldT>(); const SS::MemoryWindow mw( SS::IntVec( npts, 1, 1 ) ); FieldT f( mw, bc, ghost, NULL ); double x=0.1; for( FieldT::iterator ifld=f.begin(); ifld!=f.end(); ++ifld, x+=1.0 ){ *ifld = x; } TestHelper status(true); FieldT::interior_iterator i2=f.interior_begin(); for( FieldT::iterator i=f.begin(); i!=f.end(); ++i, ++i2 ){ status( *i==*i2, "value" ); status( &*i == &*i2, "address" ); } { typedef SS::ConstValEval BCVal; typedef SS::BoundaryCondition<FieldT,BCVal> BC; BC bc1( SS::IntVec(2,1,1), BCVal(1.234) ); BC bc2( SS::IntVec(4,1,1), BCVal(3.456) ); bc1(f); bc2(f); status( f[2] == 1.234, "point BC 1" ); status( f[4] == 3.456, "point BC 2" ); } { std::vector<size_t> ix; ix.push_back(4); ix.push_back(2); SpatialOps::Point::FieldToPoint<FieldT> ftp(ix); SpatialOps::Point::PointToField<FieldT> ptf(ix); const SS::MemoryWindow mw2( SpatialOps::structured::IntVec(2,1,1) ); FieldT f2( mw2, bc, ghost, NULL ); ftp.apply_to_field( f, f2 ); status( f2[0] == 3.456, "Field2Point Interp (1)" ); status( f2[1] == 1.234, "Field2Point Interp (2)" ); f2[0] = -1.234; f2[1] = -3.456; ptf.apply_to_field( f2, f ); status( f[2] == -3.456, "Point2Field Interp (1)" ); status( f[4] == -1.234, "Point2Field Interp (2)" ); } if( status.ok() ) return 0; return -1; }
void CGauge::Draw(CDC *pDC) { CPoints pts; GetPoints(pts); pDC->Polyline(pts.GetData(),(int)pts.GetCount()); CRect rc; pDC->DrawText(m_strDesc,rc,DT_CALCRECT); CPointF ptf(m_rcBounds.CenterPoint()); CPoint pt((int)ptf.x,(int)ptf.y); rc.OffsetRect(pt-rc.CenterPoint()); pDC->DrawText(m_strDesc,rc,DT_TOP); }
void QFormulatorEditWidget::dropEvent( QDropEvent *event ) { if( event->mimeData() && (event->dropAction() == Qt::CopyAction || event->dropAction() == Qt::MoveAction) ) { PointFde ptf( event->pos().x(), event->pos().y() ); RectFde rcf = clientRect(); CFmlDrawEngineQt::DPtoLPEx( ptf, scale(), scale() ); CFmlDrawEngineQt::DPtoLPEx( rcf, scale(), scale() ); if( rootNode()->DrInSelection() == 0 || rootNode()->PtInSelection( rcf, ptf ) == 0 ) { setBlinkingCursorEnabled( true ); rootNode()->GetDrag().SetEmpty(); rootNode()->GetSelectInfo().SetEmpty(); rootNode()->MoveCaret( rcf, ptf, 0 ); updateFormulatorWidget(); } else { if( event->dropAction() == Qt::MoveAction && isRemovedSelection() == 0 ) { CFrameNode *pFrameNode = rootNode()->RemoveOrCopySelection( 1 ); if( pFrameNode ) { delete pFrameNode; setRemovedSelection(); } } else if( event->dropAction() == Qt::CopyAction ) { rootNode()->GetSelectInfo().SetEmpty(); } int iRet = document()->paste( event->mimeData(), false ); setBlinkingCursorEnabled( true ); rootNode()->GetDrag().SetEmpty(); updateViewDocument(); if( !hasFocus() ) setFocus(); if( iRet ) event->acceptProposedAction(); } } else { setBlinkingCursorEnabled( true ); rootNode()->GetDrag().SetEmpty(); updateFormulatorWidget(); } }
void QFormulatorEditWidget::customEvent_MouseMoveOnParentWindow( QMouseEvent *e ) { if( e->buttons() & Qt::LeftButton ) { QPoint pos = e->pos(); mapFromParent( pos ); PointFde ptf( pos.x(), pos.y() ); RectFde rcf = clientRect(); CFmlDrawEngineQt::DPtoLPEx( ptf, scale(), scale() ); CFmlDrawEngineQt::DPtoLPEx( rcf, scale(), scale() ); if( rootNode()->MoveCaret( rcf, ptf, -1 ) == 0 ) updateFormulatorWidget(); } //mouseMoveEvent( e ); }
void QFormulatorEditWidget::mouseMoveEvent( QMouseEvent *e ) { if( e->buttons() == Qt::NoButton ) { PointFde ptf( e->x(), e->y() ); RectFde rcf = clientRect(); CFmlDrawEngineQt::DPtoLPEx( ptf, scale(), scale() ); CFmlDrawEngineQt::DPtoLPEx( rcf, scale(), scale() ); if( ::TestSelection( rcf, rootNode()->GetSelectInfo(), ptf ) == 0 ) setCursor(Qt::ArrowCursor); else setCursor(Qt::IBeamCursor); return; } else if( e->buttons() & Qt::LeftButton ) { PointFde ptf( e->x(), e->y() ); RectFde rcf = clientRect(); CFmlDrawEngineQt::DPtoLPEx( ptf, scale(), scale() ); CFmlDrawEngineQt::DPtoLPEx( rcf, scale(), scale() ); if( rootNode()->MoveCaret( rcf, ptf, -1 ) == 0 ) updateFormulatorWidget(); } }
int main(int argc, char *argv[]) { QApplication a(argc, argv); MainWindow w; if (argc>1){ (char (*argv[1])== '0' ) ? w.setAutoMode(false) : w.setAutoMode(true); QString ptf(argv[2]); w.setPathToFolder(ptf); w.autoStart(); } else w.show(); return a.exec(); }
int main(int argc, char *argv[]) { QApplication a(argc, argv); MainWindow w; // 0 - single file mod // 1 - all files in current dir and all subdirs // path to folder if (argc>1){ (char (*argv[1])== '0' ) ? w.setAutoMode(false) : w.setAutoMode(true); QString ptf(argv[2]); w.setPathToFolder(ptf); w.autoStart(); } else w.show(); return a.exec(); }
void QFormulatorEditWidget::mouseDoubleClickEvent( QMouseEvent *e ) { if( !rootNode() ) return; if( e->button() == Qt::LeftButton ) { PointFde ptf( e->x(), e->y() ); RectFde rcf = clientRect(); CFmlDrawEngineQt::DPtoLPEx( ptf, scale(), scale() ); CFmlDrawEngineQt::DPtoLPEx( rcf, scale(), scale() ); if( rootNode()->DblClk( rcf, ptf ) == 0 ) updateFormulatorWidget(); //cbFireEvent( FORMULIB_CALLBACK_LMOUSEDBLCLICK, nFlags, MAKEWPARAM( point.x(), point.y() ) ); } else if( e->button() == Qt::RightButton ) { //cbFireEvent( FORMULIB_CALLBACK_RMOUSEDBLCLICK, nFlags, MAKEWPARAM( point.x(), point.y() ) ); } }
static void calcDescriptors(const vector<Mat>& gpyr, const vector<KeyPoint>& keypoints, Mat& descriptors, int nOctaveLayers, int firstOctave ) { int d = SIFT_DESCR_WIDTH, n = SIFT_DESCR_HIST_BINS; for( size_t i = 0; i < keypoints.size(); i++ ) { KeyPoint kpt = keypoints[i]; int octave, layer; float scale; unpackOctave(kpt, octave, layer, scale); CV_Assert(octave >= firstOctave && layer <= nOctaveLayers+2); float size=kpt.size*scale; Point2f ptf(kpt.pt.x*scale, kpt.pt.y*scale); const Mat& img = gpyr[(octave - firstOctave)*(nOctaveLayers + 3) + layer]; float angle = 360.f - kpt.angle; if(std::abs(angle - 360.f) < FLT_EPSILON) angle = 0.f; calcSIFTDescriptor(img, ptf, angle, size*0.5f, d, n, descriptors.ptr<float>((int)i)); } }
void QFormulatorEditWidget::dragMoveEvent( QDragMoveEvent *event ) { if( event->mimeData() ) { if( document()->isAcceptedMimeFormat( event->mimeData() ) /*&& event->answerRect().intersects( geometry() )*/ ) { event->acceptProposedAction(); PointFde ptf( event->pos().x(), event->pos().y() ); RectFde rcf = clientRect(); CFmlDrawEngineQt::DPtoLPEx( ptf, scale(), scale() ); CFmlDrawEngineQt::DPtoLPEx( rcf, scale(), scale() ); if( rootNode()->DragCaret( rcf, ptf ) == 0 ) { RectFde dragRect = rootNode()->GetDragRect(); updateScrollPosition( dragRect.topLeft(), dragRect.size(), 50, 50 ); update(); } } } }
void QFormulatorEditWidget::mousePressEvent( QMouseEvent *e ) { if( e->button() == Qt::LeftButton ) { PointFde ptf( e->x(), e->y() ); RectFde rcf = clientRect(); CFmlDrawEngineQt::DPtoLPEx( ptf, scale(), scale() ); CFmlDrawEngineQt::DPtoLPEx( rcf, scale(), scale() ); if( rootNode()->PtInSelection( rcf, ptf ) == 0 ) { editCopy(); // to enable interoperating with Other Applications CFrameNode *pFrameNode = rootNode()->RemoveOrCopySelection( 0 ); if( pFrameNode ) { CRootNode *pRootNode = new CRootNode( this ); // zero marginds when inserting from a server pRootNode->setIndents( 0, 0, 0, 0 ); pRootNode->Insert( pFrameNode ); QPixmap pixmap; if( ::isOption_Perfomance_CopyImage() ) pixmap = drawSelection2QPixmap( true ); QImage pixImg = pixmap.toImage(); QMimeData *mimeData = CFormulatorDoc::copyToMimeData( pRootNode, pixmap.isNull() ? 0 : &pixImg ); if( mimeData ) { QDrag *drag = new QDrag( this ); drag->setMimeData( mimeData ); if( !pixmap.isNull() ) { const int dragPixmapMaxSize = 256; if( qMax(pixmap.width(), pixmap.height()) > dragPixmapMaxSize ) drag->setPixmap( pixmap.width() > pixmap.height() ? pixmap.scaledToWidth( dragPixmapMaxSize ) : pixmap.scaledToHeight( dragPixmapMaxSize ) ); else drag->setPixmap( pixmap ); } drag->setHotSpot( QPoint(0, 0) ); clearRemovedSelection(); Qt::DropAction dropAction = drag->exec(Qt::CopyAction | Qt::MoveAction, Qt::MoveAction); if( dropAction == Qt::MoveAction && isRemovedSelection() == 0 ) { CFrameNode *pRemovedFrame = rootNode()->RemoveOrCopySelection( 1 ); if( pRemovedFrame ) { delete pRemovedFrame; setRemovedSelection(); } if( !hasFocus() ) setFocus(); updateViewDocument(); } } delete pRootNode; delete pFrameNode; } } else if( rootNode()->MoveCaret( rcf, ptf, e->modifiers() & Qt::ShiftModifier ) == 0 ) { updateFormulatorWidget(); } //cbFireEvent( FORMULIB_CALLBACK_LMOUSECLICK, nFlags, MAKEWPARAM( point.x(), point.y() ) ); } else if( e->button() == Qt::RightButton ) { //cbFireEvent( FORMULIB_CALLBACK_RMOUSECLICK, nFlags, MAKEWPARAM( point.x(), point.y() ) ); } }
ScreenCalibrator::ScreenCalibrator(int& argc,char**& argv,char**& appDefaults) :Vrui::Application(argc,argv,appDefaults), trackingPointsMover(0) { /* Create and register the point query tool class: */ PointQueryToolFactory* pointQueryToolFactory=new PointQueryToolFactory("PointQueryTool","Point Query",0,*Vrui::getToolManager()); pointQueryToolFactory->setNumButtons(1); pointQueryToolFactory->setButtonFunction(0,"Query Point"); Vrui::getToolManager()->addClass(pointQueryToolFactory,Vrui::ToolManager::defaultToolFactoryDestructor); /* Parse the command line: */ const char* optitrackFileName=0; bool optitrackFlipZ=false; const char* totalstationFileName=0; int screenPixelSize[2]={-1,-1}; int screenSquareSize=200; double unitScale=1.0; for(int i=1;i<argc;++i) { if(argv[i][0]=='-') { if(strcasecmp(argv[i]+1,"screenSize")==0) { for(int j=0;j<2;++j) { ++i; screenPixelSize[j]=atoi(argv[i]); } } else if(strcasecmp(argv[i]+1,"squareSize")==0) { ++i; screenSquareSize=atoi(argv[i]); } else if(strcasecmp(argv[i]+1,"metersToInches")==0) unitScale=1000.0/25.4; else if(strcasecmp(argv[i]+1,"unitScale")==0) { ++i; unitScale=atof(argv[i]); } else if(strcasecmp(argv[i]+1,"flipZ")==0) optitrackFlipZ=true; else { } } else if(totalstationFileName==0) totalstationFileName=argv[i]; else if(optitrackFileName==0) optitrackFileName=argv[i]; else { } } /* Read the Optitrack sample file: */ if(optitrackFileName!=0) { readOptitrackSampleFile(optitrackFileName,optitrackFlipZ); std::cout<<"Read "<<trackingPoints.size()<<" ball points from Optitrack sample file"<<std::endl; } /* Read relevant point classes from the Totalstation survey file: */ if(totalstationFileName!=0) { screenPoints=readTotalstationSurveyFile(totalstationFileName,"SCREEN"); floorPoints=readTotalstationSurveyFile(totalstationFileName,"FLOOR"); ballPoints=readTotalstationSurveyFile(totalstationFileName,"BALLS"); std::cout<<"Read "<<ballPoints.size()<<" ball points from TotalStation survey file"<<std::endl; } /********************************************************************* Establish a normalized coordinate system with the floor at the z=0 plane, the screen in a plane about orthogonal to the y axis, and the screen center above the origin. *********************************************************************/ /* Fit a plane to the floor points: */ Geometry::PCACalculator<3> floorPca; for(PointList::const_iterator fpIt=floorPoints.begin();fpIt!=floorPoints.end();++fpIt) floorPca.accumulatePoint(*fpIt); Point floorCentroid=floorPca.calcCentroid(); floorPca.calcCovariance(); double floorEv[3]; floorPca.calcEigenvalues(floorEv); Geometry::PCACalculator<3>::Vector floorNormal=floorPca.calcEigenvector(floorEv[2]); /* Fit a plane to the screen points: */ Geometry::PCACalculator<3> screenPca; for(PointList::const_iterator spIt=screenPoints.begin();spIt!=screenPoints.end();++spIt) screenPca.accumulatePoint(*spIt); Point screenCentroid=screenPca.calcCentroid(); screenPca.calcCovariance(); double screenEv[3]; screenPca.calcEigenvalues(screenEv); Geometry::PCACalculator<3>::Vector screenNormal=screenPca.calcEigenvector(screenEv[2]); /* Flip the floor normal such that it points towards the screen points: */ if((screenCentroid-floorCentroid)*floorNormal<Scalar(0)) floorNormal=-floorNormal; /* Flip the screen normal such that it points away from the ball points: */ Point::AffineCombiner ballC; for(PointList::const_iterator bpIt=ballPoints.begin();bpIt!=ballPoints.end();++bpIt) ballC.addPoint(*bpIt); if((ballC.getPoint()-screenCentroid)*screenNormal>Scalar(0)) screenNormal=-screenNormal; /* Project the screen centroid onto the floor plane to get the coordinate system origin: */ Point origin=screenCentroid-floorNormal*(((screenCentroid-floorCentroid)*floorNormal)/Geometry::sqr(floorNormal)); /* Orthonormalize the screen normal against the floor normal: */ Vector y=screenNormal-floorNormal*((screenNormal*floorNormal)/Geometry::sqr(floorNormal)); Vector x=Geometry::cross(y,floorNormal); #if 0 /* Calculate a rotation to align the floor normal with +z and the (horizontal) screen normal with +y: */ ONTransform::Rotation rot=ONTransform::Rotation::fromBaseVectors(x,y); #endif /********************************************************************* Calculate a transformation to move the Totalstation survey points into the normalized coordinate system: *********************************************************************/ ONTransform transform(origin-Point::origin,ONTransform::Rotation::fromBaseVectors(x,y)); transform.doInvert(); /* Transform all survey points: */ for(PointList::iterator spIt=screenPoints.begin();spIt!=screenPoints.end();++spIt) *spIt=transform.transform(*spIt); for(PointList::iterator fpIt=floorPoints.begin();fpIt!=floorPoints.end();++fpIt) *fpIt=transform.transform(*fpIt); for(PointList::iterator bpIt=ballPoints.begin();bpIt!=ballPoints.end();++bpIt) *bpIt=transform.transform(*bpIt); if(screenPixelSize[0]>0&&screenPixelSize[1]>0&&screenSquareSize>0) { /********************************************************************* Calculate the optimal projective transformation and screen transformation (orthonormal transformation plus non-uniform scaling in x and y) from theoretical screen points to surveyed screen points: *********************************************************************/ /* Create a list of theoretical screen points: */ PointList screen; int screenPixelOffset[2]; for(int i=0;i<2;++i) screenPixelOffset[i]=((screenPixelSize[i]-1)%screenSquareSize)/2; for(int y=screenPixelOffset[1];y<screenPixelSize[1];y+=screenSquareSize) for(int x=screenPixelOffset[0];x<screenPixelSize[0];x+=screenSquareSize) screen.push_back(Point((Scalar(x)+Scalar(0.5))/Scalar(screenPixelSize[0]),Scalar(1)-(Scalar(y)+Scalar(0.5))/Scalar(screenPixelSize[1]),0)); if(screen.size()!=screenPoints.size()) Misc::throwStdErr("Wrong number of screen points, got %d instead of %d",int(screenPoints.size()),int(screen.size())); /* Find the best-fitting projective transformation for the measured screen points: */ PTransformFitter ptf(screen.size(),&screen[0],&screenPoints[0]); PTransformFitter::Scalar screenResult2=LevenbergMarquardtMinimizer<PTransformFitter>::minimize(ptf); std::cout<<"Projective transformation fitting final distance: "<<screenResult2<<std::endl; pScreenTransform=ptf.getTransform(); /* Print the screen transformation matrix: */ std::cout<<"Projective transformation matrix:"<<std::endl; std::cout<<std::setprecision(6)<<pScreenTransform<<std::endl; /* Find the best-fitting screen transformation for the measured screen points: */ ScreenTransformFitter stf(screen.size(),&screen[0],&screenPoints[0]); ScreenTransformFitter::Scalar screenResult1=LevenbergMarquardtMinimizer<ScreenTransformFitter>::minimize(stf); std::cout<<"Screen transformation fitting final distance: "<<screenResult1<<std::endl; screenTransform=stf.getTransform(); screenSize[0]=stf.getSize(0); screenSize[1]=stf.getSize(1); std::cout<<"Optimal screen size: "<<screenSize[0]<<", "<<screenSize[1]<<std::endl; std::cout<<"Optimal screen origin: "<<screenTransform.getOrigin()<<std::endl; std::cout<<"Optimal horizontal screen axis: "<<screenTransform.getDirection(0)<<std::endl; std::cout<<"Optimal vertical screen axis: "<<screenTransform.getDirection(1)<<std::endl; /********************************************************************* Calculate a homography matrix from the optimal screen transformation to the optimal projective transformation to correct screen misalignments: *********************************************************************/ Point sCorners[4]; Point pCorners[4]; for(int i=0;i<4;++i) { sCorners[i][0]=i&0x1?screenSize[0]*unitScale:0.0; sCorners[i][1]=i&0x2?screenSize[1]*unitScale:0.0; sCorners[i][2]=0.0; pCorners[i][0]=i&0x1?1.0:0.0; pCorners[i][1]=i&0x2?1.0:0.0; pCorners[i][2]=0.0; pCorners[i]=screenTransform.inverseTransform(pScreenTransform.transform(pCorners[i])); pCorners[i][0]*=unitScale; pCorners[i][1]*=unitScale; } Geometry::ProjectiveTransformation<double,2> sHom=calcHomography(sCorners); Geometry::ProjectiveTransformation<double,2> pHom=calcHomography(pCorners); Geometry::ProjectiveTransformation<double,2> hom=pHom; hom.leftMultiply(Geometry::invert(sHom)); for(int i=0;i<3;++i) for(int j=0;j<3;++j) hom.getMatrix()(i,j)/=hom.getMatrix()(2,2); #if 0 std::cout<<"Homography matrix for projective transform: "<<pHom<<std::endl; std::cout<<"Homography matrix for screen transform: "<<sHom<<std::endl; std::cout<<"Screen correction homography matrix: "<<hom<<std::endl; #endif #if 0 /* Do some experiments: */ Geometry::ProjectiveTransformation<double,3> hom3=Geometry::ProjectiveTransformation<double,3>::identity; for(int i=0;i<3;++i) for(int j=0;j<3;++j) hom3.getMatrix()(i<2?i:3,j<2?j:3)=hom.getMatrix()(i,j); hom3.getMatrix()(2,0)=hom3.getMatrix()(3,0); hom3.getMatrix()(2,1)=hom3.getMatrix()(3,1); std::cout<<hom3<<std::endl; std::cout<<Geometry::invert(hom3)<<std::endl; std::cout<<hom3.transform(Geometry::HVector<double,3>(-1.0,-1.0,-1.0,1.0)).toPoint()<<std::endl; std::cout<<hom3.transform(Geometry::HVector<double,3>( 1.0,-1.0,-1.0,1.0)).toPoint()<<std::endl; std::cout<<hom3.transform(Geometry::HVector<double,3>(-1.0, 1.0,-1.0,1.0)).toPoint()<<std::endl; std::cout<<hom3.transform(Geometry::HVector<double,3>( 1.0, 1.0,-1.0,1.0)).toPoint()<<std::endl; std::cout<<hom3.transform(Geometry::HVector<double,3>(-1.0,-1.0, 1.0,1.0)).toPoint()<<std::endl; std::cout<<hom3.transform(Geometry::HVector<double,3>( 1.0,-1.0, 1.0,1.0)).toPoint()<<std::endl; std::cout<<hom3.transform(Geometry::HVector<double,3>(-1.0, 1.0, 1.0,1.0)).toPoint()<<std::endl; std::cout<<hom3.transform(Geometry::HVector<double,3>( 1.0, 1.0, 1.0,1.0)).toPoint()<<std::endl; #endif /* Print a configuration file section for the screen: */ std::cout<<std::endl<<"Configuration settings for screen:"<<std::endl; std::cout<<"origin "<<screenTransform.getTranslation()*unitScale<<std::endl; std::cout<<"horizontalAxis "<<screenTransform.getDirection(0)<<std::endl; std::cout<<"width "<<screenSize[0]*unitScale<<std::endl; std::cout<<"verticalAxis "<<screenTransform.getDirection(1)<<std::endl; std::cout<<"height "<<screenSize[1]*unitScale<<std::endl; std::cout<<"offAxis true"<<std::endl; std::cout<<"homography ( "; for(int j=0;j<3;++j) { if(j>0) std::cout<<", \\"<<std::endl<<" "; std::cout<<"( "; for(int i=0;i<3;++i) { if(i>0) std::cout<<", "; std::cout<<pHom.getMatrix()(i,j); } std::cout<<" )"; } std::cout<<" )"<<std::endl; std::cout<<std::endl; } if(optitrackFileName!=0&&totalstationFileName!=0) { /********************************************************************* Calculate the optimal orthonormal transformation from tracking system coordinates to the normalized coordinate system by aligning ball positions observed by the tracking system with ball positions measured using the total station: *********************************************************************/ /* Find an orthonormal transformation to align the tracking points with the ball points: */ size_t numPoints=trackingPoints.size(); if(numPoints>ballPoints.size()) numPoints=ballPoints.size(); /* Calculate the centroid of the tracking points: */ Point::AffineCombiner tpCc; for(size_t i=0;i<numPoints;++i) tpCc.addPoint(trackingPoints[i]); Vector tpTranslation=tpCc.getPoint()-Point::origin; for(size_t i=0;i<numPoints;++i) trackingPoints[i]-=tpTranslation; ONTransformFitter ontf(numPoints,&trackingPoints[0],&ballPoints[0]); //ontf.setTransform(ONTransformFitter::Transform::rotate(ONTransformFitter::Transform::Rotation::rotateX(Math::rad(Scalar(90))))); ONTransformFitter::Scalar result=LevenbergMarquardtMinimizer<ONTransformFitter>::minimize(ontf); ONTransform tsCal=ontf.getTransform(); tsCal*=ONTransform::translate(-tpTranslation); std::cout<<"Final distance: "<<result<<std::endl; std::cout<<"Tracking system calibration transformation: "<<tsCal<<std::endl; std::cout<<"Configuration settings for tracking calibrator: "<<std::endl; std::cout<<"transformation translate "<<tsCal.getTranslation()*unitScale<<" \\"<<std::endl; std::cout<<" * scale "<<unitScale<<" \\"<<std::endl; std::cout<<" * rotate "<<tsCal.getRotation().getAxis()<<", "<<Math::deg(tsCal.getRotation().getAngle())<<std::endl; /* Transform the tracking points with the result transformation: */ for(PointList::iterator tpIt=trackingPoints.begin();tpIt!=trackingPoints.end();++tpIt) *tpIt=tsCal.transform(*tpIt+tpTranslation); } /* Initialize the navigation transformation: */ Geometry::Box<Scalar,3> bbox=Geometry::Box<Scalar,3>::empty; for(PointList::const_iterator tpIt=trackingPoints.begin();tpIt!=trackingPoints.end();++tpIt) bbox.addPoint(*tpIt); for(PointList::const_iterator spIt=screenPoints.begin();spIt!=screenPoints.end();++spIt) bbox.addPoint(*spIt); for(PointList::const_iterator fpIt=floorPoints.begin();fpIt!=floorPoints.end();++fpIt) bbox.addPoint(*fpIt); for(PointList::const_iterator bpIt=ballPoints.begin();bpIt!=ballPoints.end();++bpIt) bbox.addPoint(*bpIt); Vrui::setNavigationTransformation(Geometry::mid(bbox.min,bbox.max),Geometry::dist(bbox.min,bbox.max)); /* Create a virtual input device to move the tracking points interactively: */ trackingPointsMover=Vrui::addVirtualInputDevice("TrackingPointsMover",0,0); // Vrui::getInputGraphManager()->setNavigational(trackingPointsMover,true); Vrui::NavTrackerState scaledDeviceT=Vrui::getInverseNavigationTransformation(); scaledDeviceT*=trackingPointsMover->getTransformation(); trackingPointsTransform=Vrui::TrackerState(scaledDeviceT.getTranslation(),scaledDeviceT.getRotation()); trackingPointsTransform.doInvert(); }