StereoCamera::StereoCamera(Type type, cv::Ptr<Calibration> calibration) : calibration(calibration), imageSize(640, 480) { if (type == REAL) { leftCamera = Ptr<Camera>(new RealCamera(0)); rightCamera = Ptr<Camera>(new RealCamera(1)); } else if (type == DUMMY) { leftCamera = Ptr<Camera>(new DummyCamera(0)); rightCamera = Ptr<Camera>(new DummyCamera(1)); } if (calibration.get() != NULL) { int numberOfDisparities = ((imageSize.width / 8) + 15) & -16; P1 = 8 * 3 * 3 * 3; P2 = 32 * 3 * 3 * 3; preFilterCap = 63; uniquenessRatio = 2; speckleWindowSize = 500; speckleRange = 32; SADWindowSize = 5; sgbm = createStereoSGBM(0, numberOfDisparities, SADWindowSize, P1, P2, 1, preFilterCap, uniquenessRatio, speckleWindowSize, speckleRange, StereoSGBM::MODE_SGBM); namedWindow("Magic SGBM", 1); createTrackbar("preFilterCap", "Magic SGBM", &preFilterCap, 100, onChanged); createTrackbar("SADWindowSize", "Magic SGBM", &SADWindowSize, 21, onChanged); createTrackbar("uniquenessRatio", "Magic SGBM", &uniquenessRatio, 50, onChanged); createTrackbar("speckleWindowSize", "Magic SGBM", &speckleWindowSize, 2000, onChanged); createTrackbar("speckleRange", "Magic SGBM", &speckleRange, 2000, onChanged); createTrackbar("P1", "Magic SGBM", &P1, 2000, onChanged); createTrackbar("P2", "Magic SGBM", &P2, 2000, onChanged); dispRoi = getValidDisparityROI(calibration->validRoiLeft, calibration->validRoiRight, 0, numberOfDisparities, 3); } };
virtual void run() { helper::PerformanceMeasurer PM; PM.tic(); while(!is->done()) { _mutex.lock(); is->get(frame); if(frame.empty()) { loglni("[TrackThread] no valid frame, exit!!!"); OpenCVneedQuit = true; _mutex.unlock(); break; } cvtColor(frame, gray, CV_BGR2GRAY); double rms, ncc; if( needToInit ) { loglni("[TrackThread] INITing..."); Mat tmpH; if( findAprilTag(frame, 0, tmpH, true) ) { #if !USE_INTERNAL_DETECTOR Mat initH = tmpH * iHI; needToInit = !tracker.init(gray, initH, rms, ncc); #else needToInit=!tracker.init(gray,rms,ncc); #endif if(!needToInit) loglni("[TrackThread] ...INITed"); if(!needToInit) tracker.draw3D(frame); } } else { needToInit=!tracker(gray, rms, ncc, opencvDraw?&frame:0); if(!needToInit) tracker.GetCameraPose(camR,camT,false,true); } ++BkgModifyCnt; loglni("[TrackThread] fps="<<1.0/PM.toctic()); _mutex.unlock(); if(OpenCVneedQuit) { break; } ++framecnt; } OpenThreads::Thread::YieldCurrentThread(); loglni("[TrackThread] OpenCV quited..."); if(!videoFromWebcam) { loglni("[TrackThread] OpenCV notify OSG to quit..."); viewer.setDone(true); } }
int main( int argc, char **argv ) { if(argc<4) { usage(argc,argv); return 1; } is = helper::createImageSource(argv[1]); if(is.empty() || is->done()) { loglne("[main] createImageSource failed or no valid imagesource!"); return -1; } is->pause(false); is->reportInfo(); is->get(frame); imgW = frame.cols; imgH = frame.rows; videoFromWebcam = false; if( is->classname() == "ImageSource_Camera" ) { videoFromWebcam = true; } loglni("[main] loading K matrix from: "<<argv[2]); double K[9]; std::ifstream kfile(argv[2]); for(int i=0; i<9; ++i) kfile >> K[i]; tracker.loadK(K); loglni("[main] K matrix loaded:"); loglni(helper::PrintMat<>(3,3,K)); loglni("[main] load template image from: "<<argv[3]); tracker.loadTemplate(argv[3]); //////////////// TagDetector ///////////////////////////////////////// int tagid = 0; //default tag16h5 if(argc>5) tagid = atoi(argv[5]); tagFamily = TagFamilyFactory::create(tagid); if(tagFamily.empty()) { loglne("[main] create TagFamily fail!"); return -1; } detector = new TagDetector(tagFamily); if(detector.empty()) { loglne("[main] create TagDetector fail!"); return -1; } Mat temp = imread(argv[3]); if( findAprilTag(temp, 0, HI, true) ) { namedWindow("template"); imshow("template", temp); iHI = HI.inv(); } else { loglne("[main error] detector did not find any apriltag on template image!"); return -1; } //////////////// OSG //////////////////////////////////////////////// osg::ref_ptr<osg::Group> root = new osg::Group; string scenefilename = (argc>4?argv[4]:("cow.osg")); osg::ref_ptr<osg::Node> cow = osgDB::readNodeFile(scenefilename); arscene = new helper::ARSceneRoot; helper::FixMat<3,double>::Type matK = helper::FixMat<3,double>::ConvertType(K); CV2CG::cv2cg(matK,0.01,500,imgW,imgH,*arscene); manipMat = new osg::MatrixTransform(osg::Matrix::identity()); manipMat->addChild(cow); manipMat->getOrCreateStateSet()->setMode(GL_NORMALIZE, osg::StateAttribute::ON); arscene->addChild(manipMat); osg::ref_ptr<osg::Image> backgroundImage = new osg::Image; helper::cvmat2osgimage(frame,backgroundImage); arvideo = new helper::ARVideoBackground(backgroundImage); root->setUpdateCallback(new ARUpdateCallback); root->addChild(arvideo); root->addChild(arscene); viewer.setSceneData(root); viewer.addEventHandler(new osgViewer::StatsHandler); viewer.addEventHandler(new osgViewer::WindowSizeHandler); viewer.addEventHandler(new QuitHandler); //start tracking thread OpenThreads::Thread::Init(); TrackThread* thr = new TrackThread; thr->start(); viewer.run(); delete thr; loglni("[main] DONE...exit!"); return 0; }