Exemplo n.º 1
0
void Calibration::calibrationCut(){
	namedWindow("input");

	while(true){
		if(!device){
			inputImage = imread(imagePath);
		}else{
			cam >> inputImage;
		}
		
		setMouseCallback("input", callbackMouseClickCut, 0);
	
		draw();
		rectangle(inputImage, staticBoundBottom, staticBoundTop, CV_RGB(255, 255, 255), 1, 8, 0);

		imshow("input", inputImage);

		char key = waitKey(10); 
		if(key == 27){
			break;
		}else if(key == 32){
			stringstream ss;

			ss << PATHSAVE << "Vision/";
			ss << "cut.csv";

			staticBounds = Rect(staticBoundTop, staticBoundBottom);
			crud.saveCut(ss.str(), staticBounds);
			break;
		}
	}
}
Exemplo n.º 2
0
  Rect2d ROISelector::select(const cv::String& windowName, Mat img, bool showCrossair, bool fromCenter){

    key=0;

    // set the drawing mode
    selectorParams.drawFromCenter = fromCenter;

    // show the image and give feedback to user
    imshow(windowName,img);

    // copy the data, rectangle should be drawn in the fresh image
    selectorParams.image=img.clone();

    // select the object
    setMouseCallback( windowName, mouseHandler, (void *)&selectorParams );

    // extract lower 8 bits for scancode comparison
    unsigned int key_ = key & 0xFF;
    // end selection process on SPACE (32) ESC (27) or ENTER (13)
    while(!(key_==32 || key_==27 || key_==13)){
      // draw the selected object
      rectangle(
        selectorParams.image,
        selectorParams.box,
        Scalar(255,0,0),2,1
      );

      // draw cross air in the middle of bounding box
      if(showCrossair){
        // horizontal line
        line(
          selectorParams.image,
          Point((int)selectorParams.box.x,(int)(selectorParams.box.y+selectorParams.box.height/2)),
          Point((int)(selectorParams.box.x+selectorParams.box.width),(int)(selectorParams.box.y+selectorParams.box.height/2)),
          Scalar(255,0,0),2,1
        );

        // vertical line
        line(
          selectorParams.image,
          Point((int)(selectorParams.box.x+selectorParams.box.width/2),(int)selectorParams.box.y),
          Point((int)(selectorParams.box.x+selectorParams.box.width/2),(int)(selectorParams.box.y+selectorParams.box.height)),
          Scalar(255,0,0),2,1
        );
      }

      // show the image bouding box
      imshow(windowName,selectorParams.image);

      // reset the image
      selectorParams.image=img.clone();

      //get keyboard event
      key=waitKey(1);
    }


    return selectorParams.box;
  }
Exemplo n.º 3
0
Point2f* selectPoint(Mat src)
{
    Point2f* points;
    points = (Point2f*)malloc(sizeof(Point2f)*4);
    char c;
    
    if( src.empty() )
    {
       printf("Image empty\n");
        return NULL;
    }
    
    src.copyTo(imageC);

    namedWindow( "SelectPoints", 1 );
    cnt = 0;
    setMouseCallback( "SelectPoints", onMouseSelect, 0 );
    
    while(cnt<5)
    {	
  
    	if(flagc==1)
    	{
    	
    		imshow("SelectPoints",imageC);
    		c = waitKey(0);
    		if(c=='c')
    		{
    			points[cnt-1] = point; 
    			printf("\ncnt:%d\n",cnt);
    			if(cnt == 4)
    			break;
    		}
    		else
    		{
    			cnt--;
    			printf("\ncnt:%d",cnt);
    			src.copyTo(imageC);
    			
    		}
    		flagc = 0;
            cout<<"point selection done"<<endl;
    	}
    	
    	imshow("SelectPoints",imageC);
    	
    	if(waitKey(15)==27)
    	    return NULL;
    	
    	
    	//src.copyTo(imageC);
    	
     }	
    printf("\nReturning\n");
    return points;

}
//检验是否有效
void findCircleParameter::checkVarify()
{
	namedWindow(check_win_name, CV_WINDOW_NORMAL);
	resizeWindow(check_win_name, width_disp_img, height_disp_img);
	imshow(check_win_name, image);
	setMouseCallback(check_win_name, onMouse);
	waitKey();
	cv::destroyWindow(check_win_name);
}
Exemplo n.º 5
0
void ballDetect :: initDetect(char *videoInput){

    VideoCapture capture;
    Mat src, src_HSV, processed;
    int x=0; int y=0; 

    Mat currentFrame, back, fore;   
    BackgroundSubtractorMOG2 bg;

    std::vector<std::vector<cv::Point> > contours;

    capture.open(videoInput);
    capture.set(CV_CAP_PROP_FRAME_WIDTH, FRAME_WIDTH);
    capture.set(CV_CAP_PROP_FRAME_HEIGHT, FRAME_HEIGHT);

    // int xyz=1;

    while(1){

        // cout<<xyz++<<endl;
        capture.read(src);
        cvtColor(src, src_HSV, COLOR_BGR2HSV);

        bg.operator ()(src, fore);
        bg.getBackgroundImage(back);
        erode(fore, fore, Mat());
        dilate(fore, fore, Mat());
        findContours(fore, contours, CV_RETR_EXTERNAL, CV_CHAIN_APPROX_NONE);
        // drawContours(src,  contours,  -1,  Scalar(0,  0,  255),  2);
        contourCount=contours.size();

        if(white_collide.x != -1 && white_collide.y!=-1){
            circle(src, white_collide, 2, Scalar(0, 0, 0), 2);
            circle(src, white_initial, 2, Scalar(0, 0, 0), 2);
            line(src, white_initial, white_collide, Scalar(255, 255, 255), 1, CV_AA);
        }

        inRange(src_HSV, *minval, *maxval, processed);
        morphOps(processed);
        trackFilteredObject(x, y, processed, src);

        for(int i=0;i<(int)white_position.size()-1;++i){
            line(src, white_position[i], white_position[i+1], Scalar(255, 255, 255), 1, CV_AA); 
        }
        while(white_collide.x == -1 && white_collide.y==-1){
            setMouseCallback("source", onMouseClick, &src);
            putText(src, "Specify Point", Point(750, 40), 1, 1, Scalar(255, 0, 0), 2);
            imshow("source", src);
            waitKey(5);
        }

        imshow("source", src);
        waitKey(5);
    }
}
void GrabCutSegmentor::Run()
{
	cout<<"====="<<m_Name<<" Runing..."<<endl;

	namedWindow(m_WinName);
	setMouseCallback(m_WinName, onMouse, this);
	showImage();

	cout << "\nThe Grabcut Segmentation\n"
        "\nSelect a rectangular area around the object you want to segment\n" <<
        "\tESC - Finished\n"
        "\tr - restore the original image\n"
        "\ts - do segmentation\n"
        "\tleft mouse button - set rectangle\n"
        "\tCTRL+left mouse button - set GC_BGD pixels\n"
        "\tSHIFT+left mouse button - set CG_FGD pixels\n"
        "\tCTRL+right mouse button - set GC_PR_BGD pixels\n"
        "\tSHIFT+right mouse button - set CG_PR_FGD pixels\n" << endl;

	for(;;)
    {
        int c = waitKey(0);
        switch( (char) c )
        {
        case '\x1b':
            cout << "Exiting ..." << endl;
            goto exit;
        case 'r':
            cout << endl;
            reset();
            showImage();
            break;
        case 's':
			int iter = iterCount;
            //cout << "<" << iterCount << "... ";
            int newIterCount = nextIter();
            if( newIterCount > iter )
            {
                showImage();
                cout << "Finished!" << endl;
            }
            else
                cout << "rect must be determined!" << endl;
            break;
        }
    }

exit:
	m_Mask = m_Mask & 1;
	m_Mask.convertTo(m_Result, CV_32SC1);

	destroyWindow( m_WinName );	

	Segmentor::Run();
}
Exemplo n.º 7
0
GeoMapEditor::GeoMapEditor( const char* _root_folder ) /// = NULL )
  : iObjType(0), title(GEOMAP_EDITOR_TITLE), rubbering_mode(0)//, non_stop_mode(false)
{ 
  namedWindow( title, WINDOW_NORMAL ); //WINDOW_AUTOSIZE); // -- в режиме AUTOSIZE координаты x y мышки надо пересчитывать
  setMouseCallback( title, mouseCallBack4GeoMapEditor, this);

  if (_root_folder!=NULL)
  {
    gm.import(_root_folder); // импорт имеет преференцию перед тем что было.
    gm.read(_root_folder); // в момент чтения если лист карты уже _импортирован_ (по имени) то старый не вкл
  }
};
Exemplo n.º 8
0
Cor Visao::gravaCor(){        
    Cor cor = Cor();

    while (true){ 
        clock_t start=tempo();  

        cam >> imagem;

        cvtColor(imagem, imagemHSV, COLOR_BGR2HSV); 

        imshow("Imagem Normal", imagem); 
        moveWindow("Imagem Normal", 0, 0);  

        inRange(imagemHSV, Scalar(pontos[0]-variacaoH, pontos[1]*(1-variacao), pontos[2]*(1-variacao)),
         Scalar(pontos[0]+variacaoH, pontos[1]*(1+variacao), pontos[2]*(1+variacao)), imagemTratada); 
        
        //medianBlur(imagemTratada, imagemTratada, 3);     

        imshow("Calibragem", imagemTratada); 
        moveWindow("Calibragem", 640, 0);

        setMouseCallback("Imagem Normal", corPixel, 0);

        char key =  waitKey(1);    
        if (key == 27){
            for (int i=0; i<10; i++){
                destroyAllWindows();
                waitKey(1);  
            }
            break; 
        }
        if (clique == true){
            cor.corH = pontos[0];
            cor.corS = pontos[1];
            cor.corV = pontos[2];

            if (key == 32){ 
                for (int i=0; i<10; i++){
                    destroyAllWindows();
                    waitKey(1);  
                }
                cout << "Cor Calibrada" << endl;
                clique = false;
                pontos[0] = 0; pontos[1] = 0; pontos[2] = 0;  
                break; 
            }            
        }        
    }
    return cor;
}
Exemplo n.º 9
0
void TrackFace::on_recognition_clicked()
{
    TrackFace::capture.open(0);
    string windowName="Track Face";
    cv::namedWindow(windowName.c_str(), cv::WINDOW_AUTOSIZE);
    moveWindow(windowName.c_str(), window_x, window_y);
    setMouseCallback(windowName.c_str(), trackFaceCallBack, NULL);

    while (true)
    {
        cv::Mat frame, buffer;
        if (!capture.isOpened()) break;

        capture >> buffer;
        cv::resize(buffer, frame,Size(buffer.cols/2,buffer.rows/2),0,0,INTER_LINEAR);

        vector<Rect_<int> > faces=haar_faces(frame);

        for (size_t i=0;i<faces.size();i++)
        {
            cv::Mat face_resized=resizeRecognitionFace(frame, faces[i]);

            int prediction=model->predict(face_resized);

            double confidence=0.0;
            model->predict(face_resized, prediction, confidence);

            cout << confidence << endl;

            string box_text="";
            if (confidence<=2400)
                box_text="Prediction is family";
            else box_text="Prediction is stranger";

            //string box_text=format("Prediction is %s", names[prediction].c_str());

            drawFace(frame, faces[i], box_text);
        }

        putText(frame, "Recognizing Face", Point(frame.cols/2-100, 30), FONT_HERSHEY_PLAIN, 1.5, CV_RGB(0,0,255),2.0);

        cv::imshow(windowName.c_str(), frame);
        while (cv::waitKey(5)==27)
        {
            capture.release();
            cv::destroyWindow(windowName.c_str());
        }
    }
}
Exemplo n.º 10
0
DisplayWindow::DisplayWindow(String name){
    dirname = "";
    windowName=name;
    dragStartL = Point(-1,-1);
    dragStartR = Point(-1,-1);
    currentPos = Point(-1,-1);
    leftDrag=false;
    rightDrag=false;
    dragging = false;
    clickTime = std::chrono::system_clock::now();
    mode=0;
    namedWindow(name);
    setMouseCallback(name, staticMouseCallback, this);
    running = true;
    t = new boost::thread(boost::ref(*this));
}
Exemplo n.º 11
0
void *streaming( void *)        /*pega imagem da camera ou do arquivo*/
{
    while(1)
    {
        pthread_mutex_lock(&in_frame);
        cap >> frame;
        imshow("frame",frame);
        namedWindow("frame", CV_WINDOW_NORMAL);
        setMouseCallback("frame", CallBackFunc, NULL);
        waitKey(30);
        pthread_mutex_unlock(&in_frame);
        Caviso;  printf("Fps do streaming: "); end_fps();
    }
    Cerro; printf("Streaming Down !\n");
    return NULL;
}
Exemplo n.º 12
0
void Calibration::calibrationVisionColor(){
	namedWindow("input");
	namedWindow("values");

	createTrackbar("HMin", "values", &staticVisionColorHelper[0], 180, callbackHueMin, &staticVisionColorHelper[0]);
	createTrackbar("HMax", "values", &staticVisionColorHelper[3], 180, callbackHueMax, &staticVisionColorHelper[3]);

	createTrackbar("SMin", "values", &staticVisionColorHelper[1], 255, callbackSaturationMin, &staticVisionColorHelper[1]);
	createTrackbar("SMax", "values", &staticVisionColorHelper[4], 255, callbackSaturationMax, &staticVisionColorHelper[4]);

	createTrackbar("VMin", "values", &staticVisionColorHelper[2], 255, callbackValueMin, &staticVisionColorHelper[2]);
	createTrackbar("VMax", "values", &staticVisionColorHelper[5], 255, callbackValueMax, &staticVisionColorHelper[5]);

	while(true){
		if(!device){
			inputImage = imread(imagePath);
		}else{
			cam >> inputImage;
		}
		
		applyFilters();
		setMouseCallback("input", callbackMouseClickColor, 0);
	
		draw();
		imshow("input", inputImage);
		medianBlur(outputImage, outputImage, 3);
		imshow("output", outputImage);


		char key = waitKey(10); 
		if(key == 27){
			break;
		}else if(key == 32){
			for(int i = 0 ; i < 3 ; i++){
				staticVisionColor.min.rgb[i] = staticVisionColorHelper[i];
				staticVisionColor.max.rgb[i] = staticVisionColorHelper[i+3];
			}

			handleHSV(staticVisionColor.min);
			handleHSV(staticVisionColor.max);

			saveHSV();
			saveRGB();
			break;
		}
	}
}
Exemplo n.º 13
0
void onMouseClick(int event,int x,int y,int flags, void* userdata)
{
    Mat img = *((Mat *)userdata);
    flags = 0;

    if(event == EVENT_LBUTTONDOWN)
    {
        // cout<<x<<" "<<y<<endl;
        if((x>=68+7 && x<=1091-7) && (y>=73+7 && y<=603-7)){
            white_collide.x = x;
            white_collide.y = y;
        }
    }		 

    else if  (event == EVENT_RBUTTONDOWN)
        setMouseCallback("source", NULL,NULL);
}
Exemplo n.º 14
0
OpenniFilter::OpenniFilter() : viewer("PCL OpenNI Viewer") 
{
    // initialize display
    namedWindow(windowTracker, CV_WINDOW_AUTOSIZE);
    namedWindow(windowFilter, CV_WINDOW_AUTOSIZE);
    setMouseCallback(windowTracker, mouseCallback, this);

    // initialize filter windows
    namedWindow(windowParam, CV_WINDOW_AUTOSIZE);
    // the hue should be in accordence with what user has chosen
    cvCreateTrackbar("Hue Deviation", windowParam.c_str(), &iHueDev, 255);
    cvCreateTrackbar("LowS",          windowParam.c_str(), &iLowS,  255); //Saturation (0 - 255)
    cvCreateTrackbar("HighS",         windowParam.c_str(), &iHighS, 255);
    cvCreateTrackbar("LowV",          windowParam.c_str(), &iLowV,  255); //Value (0 - 255)
    cvCreateTrackbar("HighV",         windowParam.c_str(), &iHighV, 255);

}
Exemplo n.º 15
0
void VideoHandle::selectImageColor()
{
    flag_select = false;
    namedWindow("select_color");
    setMouseCallback("select_color", onMouse, this);

    while(!flag_select) {
        getImage();
        Mat temp = frame.clone();
        undistort(frame, temp, distortmtx, distortdist);
        frame = temp;
        showImage("select_color");
    }

    cout << selectx << " " << selecty << endl;
    cout << select_color_hsv << endl;
    destroyWindow("select_color");
}
Exemplo n.º 16
0
DisplayWindow::DisplayWindow(std::string name, std::vector<ProcessingElement*> prcElm, std::vector<std::vector<int > > pipelineVec){
    dirname = "";
    windowName=name;
    dragStartL = Point(-1,-1);
    dragStartR = Point(-1,-1);
    currentPos = Point(-1,-1);
    leftDrag=false;
    rightDrag=false;
    dragging = false;
    processingElements = prcElm;
    pipelineVector = pipelineVec;
    clickTime = std::chrono::system_clock::now();
    mode=0;
    namedWindow(name);
    setMouseCallback(name, staticMouseCallback, this);
    running = true;
    t = new boost::thread(boost::ref(*this));
}
Exemplo n.º 17
0
////////////////////////////////////////////////////////////////////
// Panel::ShowImageWithCalibration
// Description: Views image with camera calibration applied if a 
// calibration has been loaded or computed. Saves the image in the 
// release folder as "Calibrated_Image.jpg"
////////////////////////////////////////////////////////////////////
bool Panel::ShowImageWithCalibration(string sImgPath, string windowTitle, Mat calibratedImg, bool showImg)
{
	// read specified image
	m_pPanel->m_Image = imread(sImgPath, IMREAD_COLOR);

	if (m_pPanel->m_Image.empty()) // Check for invalid input
	{
		ShowMessage("Could not open or find the image");
		return false;
	}

	// resize the image to have 1000 width, keeping the aspect ratio
	//float r = 750.0 / m_pPanel->m_Image.cols;
	//Size dim = Size(750.0, int(m_pPanel->m_Image.rows * r));
	//resize(m_pPanel->m_Image, m_pPanel->m_Image, dim);

	// Find the ROI
	//const Rect roi(0, 0, 650, m_pPanel->m_Image.rows);
	//m_pPanel->m_Image(roi);

	// Calibrate
	Mat rview;
	if (!m_mainMap1.empty()){
		remap(m_pPanel->m_Image, rview, m_mainMap1, m_mainMap2, INTER_LINEAR);
	}
	else{
		rview = m_pPanel->m_Image;
	}

	// Show the image
	namedWindow(windowTitle, CV_WINDOW_KEEPRATIO);
	if (showImg)
		imshow(windowTitle, rview);

	// Save image
	imwrite("Calibrated_Image.jpg", rview);

	// Set mouse callback to show the color of the point clicked
	setMouseCallback(windowTitle, onMouseColor, static_cast<void*>(&m_pPanel));

	return true;
}
Exemplo n.º 18
0
void GrabCut::run(Mat img, Mat &msk)
{
    cout << "run grabcut" << endl;
    _src	= img;
    _cutResultMask = Mat(img.size(), CV_8UC1, Scalar(0));
    _maskStore = Mat(img.size(), CV_8UC1, Scalar(0));
    _mask	= Mat::ones(_src.size(),CV_8UC1)*GC_PR_BGD;
    _bin	= Mat::zeros(_src.size(),CV_8UC1);
    cout << "GC_BGD " << GC_BGD <<endl;				// 0
    cout << "GC_FGD " << GC_FGD <<endl;				// 1
    cout << "GC_PR_BGD " << GC_PR_BGD <<endl;		// 2
    cout << "GC_PR_FGD " << GC_PR_FGD <<endl;		// 3
    _name = "graphcut";
    namedWindow(_name);
    setMouseCallback(_name, wevents,this);
    Rect roi(0,0,_src.cols,_src.rows);
    _dsp = Mat::zeros(_src.rows*2,_src.cols*2,CV_8UC3);
    _src.copyTo(_dsp(roi));
    //_dsp(roi) = _src.clone();
    cout << "loop" << endl;
    while(1)
    {
        imshow(_name,_dsp);
        char c = waitKey(1);				// 
        
        if(c=='d')							// done
        {			
            msk = _bin*1.0;					// output
            break;
        }
        else if(c=='f') _mode = GC_FGD;		// forground mode
        else if(c=='b') _mode = GC_BGD;		// background mode
        else if(c=='r')						// reset
        {
            _src.copyTo(_dsp(roi));			// 
            _mask	= GC_PR_BGD;
            _gcut	= GC_PR_BGD;
            show();
        }
    }
    destroyWindow(_name);
}
Exemplo n.º 19
0
void MainWindow::encrypt()
{
    isEncryption = true;

    QString text = QInputDialog::getText(this, "Password",
                     "Please enter your password for encryption",
                      QLineEdit::Password, QString());

    string pwd = string((const char *)text.toLocal8Bit());
    MD5 md5(pwd);
    key = md5.getDigest();

    _dst = _src.clone();
    zone = Mat::zeros(_src.size(), CV_8UC1);

    setMouseCallback("Image", &mouseHandler, this);
    imshow("Image", _dst);
    waitKey(0);
    destroyAllWindows();
}
Exemplo n.º 20
0
//==============================================================================
int main(int argc,char** argv)
{
  //parse cmd line options
  if(parse_help(argc,argv)){
    cout << "usage: ./annotate [-v video] [-m muct_dir] [-d output_dir]" 
     << endl; return 0;
  }
  string odir = parse_odir(argc,argv);
  string ifile; int type = parse_ifile(argc,argv,ifile);
  string fname = odir + "annotations.yaml"; //file to save annotation data to

  //get data
  namedWindow(annotation.wname);  
  if(type == 2){ //MUCT data
    string lmfile = ifile + "muct-landmarks/muct76-opencv.csv";
    ifstream file(lmfile.c_str()); 
    if(!file.is_open()){
      cerr << "Failed opening " << lmfile << " for reading!" << endl; return 0;
    }
    string str; getline(file,str);
    while(!file.eof()){
      getline(file,str); if(str.length() == 0)break;
      muct_data d(str,ifile); if(d.name.length() == 0)continue;
      annotation.data.imnames.push_back(d.name);
      annotation.data.points.push_back(d.points);
    }
    file.close();
    annotation.data.rm_incomplete_samples();
  }else{
    //open video stream
    VideoCapture cam; 
    if(type == 1)cam.open(ifile); else cam.open(1);
    if(!cam.isOpened()){
      cout << "Failed opening video file." << endl
       << "usage: ./annotate [-v video] [-m muct_dir] [-d output_dir]" 
       << endl; return 0;
    }
    //get images to annotate
    annotation.set_capture_instructions();
    while(cam.get(CV_CAP_PROP_POS_AVI_RATIO) < 0.999999){
      Mat im,img; cam >> im; annotation.image = im.clone(); 
      annotation.draw_instructions();
      imshow(annotation.wname,annotation.image); int c = waitKey(10);
      if(c == 'q')break;
      else if(c == 's'){
    int idx = annotation.data.imnames.size(); char str[1024]; 
    if     (idx < 10)sprintf(str,"%s00%d.png",odir.c_str(),idx);
    else if(idx < 100)sprintf(str,"%s0%d.png",odir.c_str(),idx);
    else               sprintf(str,"%s%d.png",odir.c_str(),idx);
    imwrite(str,im); annotation.data.imnames.push_back(str);
    im = Scalar::all(255); imshow(annotation.wname,im); waitKey(10);
      }
    }
    if(annotation.data.imnames.size() == 0)return 0;
    annotation.data.points.resize(annotation.data.imnames.size());

    //annotate first image
    setMouseCallback(annotation.wname,pp_MouseCallback,0);
    annotation.set_pick_points_instructions();
    annotation.set_current_image(0);
    annotation.draw_instructions();
    annotation.idx = 0;
    while(1){ annotation.draw_points();
      imshow(annotation.wname,annotation.image); if(waitKey(0) == 'q')break;
    }
    if(annotation.data.points[0].size() == 0)return 0;
    annotation.replicate_annotations(0);
  }
  save_ft(fname.c_str(),annotation.data);
  
  //annotate connectivity
  setMouseCallback(annotation.wname,pc_MouseCallback,0);
  annotation.set_connectivity_instructions();
  annotation.set_current_image(0);
  annotation.draw_instructions();
  annotation.idx = 0;
  while(1){ annotation.draw_connections(); 
    imshow(annotation.wname,annotation.image); if(waitKey(0) == 'q')break;
  }
  save_ft(fname.c_str(),annotation.data); 

  //annotate symmetry
  setMouseCallback(annotation.wname,ps_MouseCallback,0);
  annotation.initialise_symmetry(0);
  annotation.set_symmetry_instructions();
  annotation.set_current_image(0);
  annotation.draw_instructions();
  annotation.idx = 0; annotation.pidx = -1;
  while(1){ annotation.draw_symmetry(); 
    imshow(annotation.wname,annotation.image); if(waitKey(0) == 'q')break;
  }
  save_ft(fname.c_str(),annotation.data); 

  //annotate the rest
  if(type != 2){
    setMouseCallback(annotation.wname,mv_MouseCallback,0);
    annotation.set_move_points_instructions();
    annotation.idx = 1; annotation.pidx = -1;
    while(1){
      annotation.set_current_image(annotation.idx);
      annotation.draw_instructions();
      annotation.set_clean_image();
      annotation.draw_connections();
      imshow(annotation.wname,annotation.image); 
      int c = waitKey(0);
      if     (c == 'q')break;
      else if(c == 'p'){annotation.idx++; annotation.pidx = -1;}
      else if(c == 'o'){annotation.idx--; annotation.pidx = -1;}
      if(annotation.idx < 0)annotation.idx = 0;
      if(annotation.idx >= int(annotation.data.imnames.size()))
    annotation.idx = annotation.data.imnames.size()-1;
    }
  }
  save_ft(fname.c_str(),annotation.data); destroyWindow("Annotate"); return 0;
}
Exemplo n.º 21
0
void guiZoom(InputArray src, OutputArray dst)
{
	const int width = src.size().width;
	const int height = src.size().height;

	string wname = "Zoom";
	namedWindow(wname);
	int th=1;
	//Point mpt = Point(width/2, height/2);
	Point mpt = Point(410, 230);
	int d = 40; 
	int z = 7;

	createTrackbar("x", wname,&mpt.x, width-1);
	createTrackbar("y", wname,&mpt.y, height-1);
	createTrackbar("d", wname,&d, min(width,height)-1);
	createTrackbar("zoom", wname,&z, 20);
	int key = 0;

	setMouseCallback(wname,onMouse,&mpt);
	
	Mat dest;
	Mat input = src.getMat();

	int mode = 0;
	while(key!='q')
	{
		input.copyTo(dest);
		z = max(z,1);

		Rect rct = Rect(mpt.x, mpt.y, d,d);
		Scalar color = Scalar(0,0,255);
		int thick = 2;

		rectangle(dest, rct, color, thick);

		Mat crop;
		dest(rct).copyTo(crop);
		Mat res;
		resize(crop,res, Size(z*d,z*d), 0,0, INTER_NEAREST);
		
		if(res.cols <= dest.cols && res.rows <= dest.rows)
		{
			if(mode==0) res.copyTo(dest(Rect(0, 0 ,res.size().width, res.size().height)));
			else if(mode==1) res.copyTo(dest(Rect(dest.cols-1-res.size().width, 0, res.size().width, res.size().height)));
			else if(mode==2) res.copyTo(dest(Rect(dest.cols-1-res.size().width, dest.rows-1-res.size().height, res.size().width, res.size().height)));
			else if(mode==3) res.copyTo(dest(Rect(0, dest.rows-1-res.size().height, res.size().width, res.size().height)));
		}

		imshow(wname,dest);
		key = waitKey(1);

		setTrackbarPos("x",wname,mpt.x);
		setTrackbarPos("y",wname,mpt.y);
		if(key=='r')
		{
			mode++;
			if(mode>4) mode=0;
		}
		if(key=='s')
		{
			imwrite("out.png", dest);
		}
	}

	dest.copyTo(dst);
	destroyWindow(wname);
}
Exemplo n.º 22
0
void *image_show( void *)        /*analiza imagem*/
{
    Mat frameCopy;
    Mat frameAnalize;
    Mat result;
    mouseInfo.event=-1;
    sleep(1);
    timer timer_image_show;
    while(1)
    {

        timer_image_show.a();
        pthread_mutex_lock(&in_frame);
        frameCopy=frame;
        pthread_mutex_unlock(&in_frame);

        
        if(mouseInfo.x > 26 && mouseInfo.y >26 && mouseInfo.event==EVENT_LBUTTONDOWN)
        {
            Cerro;
            printf("Change! \n");
            Rect myDim(mouseInfo.x-25,mouseInfo.y-25, 50, 50);
            frameAnalize = frameCopy(myDim).clone();     
            frameAnalize.copyTo(frameAnalize);
        }
        else if(mouseInfo.event == -1)
        {
            Rect myDim(frameCopy.cols/2,frameCopy.rows/2, 50, 50);
            frameAnalize = frameCopy(myDim);     
            frameAnalize.copyTo(frameAnalize);
            mouseInfo.event=-2;
        }
        
        /// Create the result matrix
        int result_cols =  frameCopy.cols - frameAnalize.cols + 1;
        int result_rows = frameCopy.rows - frameAnalize.rows + 1;
        result.create( result_cols, result_rows, CV_32FC1 );

        /// Do the Matching and Normalize
        int match_method=1; //1-5
        matchTemplate( frameCopy, frameAnalize, result, match_method );
        normalize( result, result, 0, 1, NORM_MINMAX, -1, Mat() );

        /// Localizing the best match with minMaxLoc
        double minVal; double maxVal; Point minLoc; Point maxLoc;
        Point matchLoc;
        minMaxLoc( result, &minVal, &maxVal, &minLoc, &maxLoc, Mat() );

        /// For SQDIFF and SQDIFF_NORMED, the best matches are lower values. For all the other methods, the higher the better
        if( match_method  == CV_TM_SQDIFF || match_method == CV_TM_SQDIFF_NORMED )
            { matchLoc = minLoc; }
        else
            { matchLoc = maxLoc; }
        
        /// make retangles
        rectangle( frameCopy, matchLoc, Point( matchLoc.x + frameAnalize.cols , matchLoc.y + frameAnalize.rows ), Scalar::all(0), 2, 8, 0 );
        rectangle( result, matchLoc, Point( matchLoc.x + frameAnalize.cols , matchLoc.y + frameAnalize.rows ), Scalar::all(0), 2, 8, 0 );

        /// make a dif with the original and the matched
        Rect myDim2(matchLoc.x,matchLoc.y,50 , 50);
        Mat frameAnalizado = frameCopy(myDim2).clone(); 
        //Mat subt = frameAnalize - frameAnalizado;

        /// Make a simple text to debug
        char str[256];
        sprintf(str, "x:%d/y:%d", matchLoc.x+25, matchLoc.y+25);
        putText(frameCopy, str, cvPoint(30,30), FONT_HERSHEY_COMPLEX_SMALL, 0.8, cvScalar(200,200,250), 1, CV_AA);

        sprintf(str, "maxVal:%.8f/minVal:%.8f", maxVal, minVal);
        putText(frameCopy, str, cvPoint(30,60), FONT_HERSHEY_COMPLEX_SMALL, 0.6, cvScalar(200,200,250), 1, CV_AA);

        //draw lines
        //pthread_mutex_lock(&in_window);
        line(frameCopy, Point (0,matchLoc.y+25), Point (frameCopy.cols,matchLoc.y+25), cvScalar(200,200,250), 1, 8, 0);
        line(frameCopy, Point (matchLoc.x+25,0), Point (matchLoc.x+25,frameCopy.rows), cvScalar(200,200,250), 1, 8, 0);
        //line(frameCopy, Point (0,0), Point (frameCopy.cols,frameCopy.rows), cvScalar(200,200,250), 1, 8, 0);

        /// Show de imgs
        imshow("image_show",frameCopy);
        namedWindow("image_show", CV_WINDOW_NORMAL); 
        setMouseCallback("image_show", CallBackFunc, NULL);
        
        imshow("analize",frameAnalize);
        namedWindow("analize", CV_WINDOW_NORMAL);
        
        //imshow("result",result);
        //namedWindow("result", CV_WINDOW_NORMAL); 
        
        imshow("analizado",frameAnalizado);
        namedWindow("analizado", CV_WINDOW_NORMAL); waitKey(30);
        
        //imshow("sub",subt);
        //namedWindow("sub", CV_WINDOW_NORMAL); 
        Caviso;  printf("Fps do streaming: %.2f\n",1/timer_image_show.b()); //end_fps();
        Caviso;  printf("tempo de image_show: %f s \n",timer_image_show.b());
        waitKey(30);
        //pthread_mutex_unlock(&in_window);
        
    }
    Cerro; printf("Image_show Down !\n");
    return NULL;
}
Exemplo n.º 23
0
void TrackFace::on_drawKeypoints_clicked()
{
    int nFeatures=128;
    TrackFace::capture.open(0);

    string windowName="Draw Keypoints";
    cv::namedWindow(windowName.c_str(), cv::WINDOW_AUTOSIZE);
    cv::moveWindow(windowName.c_str(), window_x, window_y);

    featureExtractor_state=SIFT_MODE;

    while (true)
    {
        cv::Mat frame, buffer;
        if (!capture.isOpened()) break;

        capture >> buffer;
        cv::resize(buffer, frame,Size(buffer.cols/2,buffer.rows/2),0,0,INTER_LINEAR);
        setMouseCallback(windowName.c_str(), drawKeypointsCallBack, NULL);

        switch(featureExtractor_state)
        {
        case SIFT_MODE:
        {
            SiftFeatureDetector detector( nFeatures );
            std::vector<KeyPoint> keypoints;

            detector.detect(frame, keypoints);
            cv::Mat img_keypoints;
            drawKeypoints(frame, keypoints, img_keypoints, Scalar::all(-1), DrawMatchesFlags::DEFAULT );
            putText(img_keypoints, "SIFT MODE, right click to SURF MODE", Point(10, 20), FONT_HERSHEY_PLAIN, 1.0, CV_RGB(255,0,0),2.0);

            imshow(windowName.c_str(), img_keypoints);

            break;
        }
        case SURF_MODE:
        {
            SurfFeatureDetector detector( nFeatures );
            std::vector<KeyPoint> keypoints;

            detector.detect(frame, keypoints);
            cv::Mat img_keypoints;
            drawKeypoints(frame, keypoints, img_keypoints, Scalar::all(-1), DrawMatchesFlags::DEFAULT );

            putText(img_keypoints, "SURF MODE, left click to SIFT MODE", Point(10, 20), FONT_HERSHEY_PLAIN, 1.0, CV_RGB(255,0,0),2.0);

            imshow(windowName.c_str(), img_keypoints);

            break;
        }
        default: break;
        }

        while (cv::waitKey(100)==27)
        {
            capture.release();
            cv::destroyWindow(windowName.c_str());
        }
    }
}
Exemplo n.º 24
0
void TrackFace::on_grabPhoto_clicked()
{
    // new window to collect information
    /*

    transmit data between form is tricky.
    grabForm.show();
    string message=grabForm.getMsg();

    cout << message << endl;
    */

    string name=ui->grabName->text().toStdString();
    cout << name << endl;

    string namepath=iofunctions.addName(name, fn_namedb, fn_path);
    int frames=1;

    int label=0;
    if (!labels.empty()) label=labels[labels.size()-1]+1;

    // Face tracking
    TrackFace::capture.open(0);

    string windowName="Grab Face";
    cv::namedWindow(windowName.c_str(), cv::WINDOW_AUTOSIZE);
    moveWindow(windowName.c_str(), window_x, window_y);
    grab_state=GRABBING_OFF;

    while(true)
    {
        cv::Mat frame, buffer;
        if (!capture.isOpened()) break;

        capture >> buffer;
        cv::resize(buffer, frame,Size(buffer.cols/2,buffer.rows/2),0,0,INTER_LINEAR);
        setMouseCallback(windowName.c_str(), grabFaceCallBack, NULL);

        switch(grab_state)
        {
        case GRABBING_OFF:
        {
            string text=format("Grabbing your face No. %d", frames);
            putText(frame, text, Point(frame.cols/2-250, 100), FONT_HERSHEY_PLAIN, 1.2, CV_RGB(255,0,0),2.0);
            cv::imshow(windowName.c_str(), frame);
            break;
        }
        case GRABBING_ON:
        {
            vector<cv::Rect_<int> > faces=haar_faces(frame);

            if (faces.size()>0)
            {
                size_t n=findMaxFace(faces);

                Mat resizedFace=resizeFace(frame(faces[n]), im_width, im_height);

                string imgPath=namepath+name+"_"+(char)(frames+'A'-1)+".jpg";
                cv::imwrite(imgPath,resizedFace);
                iofunctions.addToTrain(fn_images,"resources/"+name+"/"+name+"_"+(char)(frames+'A'-1)+".jpg", label);

                frames++;

                if (frames>20)
                {
                    grab_state=GRABBING_CLOSE;
                }
                else grab_state=GRABBING_OFF;

                drawFace(frame, faces[n], name);
            }

            cv::imshow(windowName.c_str(), frame);
            break;
        }
        case GRABBING_CLOSE :
        {
            capture.release();
            cv::destroyWindow(windowName.c_str());
            break;
        }
        default: break;
        }

        while (cv::waitKey(5)==27)
        {
            capture.release();
            cv::destroyWindow(windowName.c_str());
        }
    }
}
Exemplo n.º 25
0
void Tracker::track(int nsamples, double dynamicp)
{
	Mat frame;
	Mat hsvFrame;
	bool finishInnerLoop = false;
	Particles pf(nsamples, dynamicp);
	bool wasInit = false;
  
	namedWindow("fr", CV_WINDOW_KEEPRATIO);
	createTrackbar("kapa", "fr", &(pf.measure_param), 1000, NULL);
	setMouseCallback("fr", wrappedOnMouse, (void*)this);

	do{
		(*capture) >> frame;

		if(!frame.empty()){

			if(wasInit){
				cvtColor(frame, hsvFrame , CV_RGB2HSV);
				pf.resample();
				pf.predict();
				pf.measure(hsvFrame);
				pf.new_state(hsvFrame);

				for(int i=0 ; i<pf.pnum ; i++) {
					circle(frame, Point(pf.particles[i].pos_x, pf.particles[i].pos_y), 5, 
					       Scalar(0,0,255));
					circle(frame, Point((int)pf.mean_pos_x, (int)pf.mean_pos_y), 5, 
						   Scalar(255,0,0), -1);
					// rectangle(frame, Point(pf.particles[i].pos_x + (pf.width>>1), pf.particles[i].pos_y + (pf.height>>1)),
					//           Point(pf.particles[i].pos_x - (pf.width>>1), pf.particles[i].pos_y - (pf.height>>1)),
					//           Scalar(0,255,0));
				}
			}

			imshow("fr", frame);

			finishInnerLoop = false;
			switch(waitKey(2) & 255){
				case 't': // zastaveni prehravani a moznost oznacit objekt mysi

					filling = false;
					filled = false;
						
					while(!finishInnerLoop){
						Mat frameCopy = frame.clone();

						// vykresleni obdelniku, pokud tahnu mysi
						if(filling)
							rectangle(frameCopy, Point(px1, py1), Point(px2, py2), Scalar(255), 2);

						if(filled){
							filling = false;
							filled = false;
						}

						imshow("fr", frameCopy);

						switch(waitKey(2) & 255){
							case 't':
							case ' ':
								finishInnerLoop = true;
								Rect rct(Point(px1,py1), Point(px2,py2));
								if(rct.width <= 0 || rct.height <= 0)
									break;
								cvtColor(frame, hsvFrame , CV_RGB2HSV);
								pf.init_samples(hsvFrame, rct);
								wasInit = true;
								break;
						}
					}
					break;
			}

			writer->write(frame);
		}
	} while( !frame.empty() );
} 
Exemplo n.º 26
0
int main()
{
	
	int N = 2;
	Mat imageStack[N];
	Mat grayStack[N];
	Mat focal_Measure[N];
	

	Mat lap_x;
	Mat lap_y;


	
	// kernel for boosting intensity of modified laplacian
	float kernel[3][3] = {{1,1,1}, {1,1,1}, {1,1,1}};
	Mat boostingFilter = Mat(3, 3, CV_32FC1, kernel);
	//Load the image stack and convert it to grayscale.
	//TODO !
	//Loaded images in unint8. Consider changing them to float during grayscale
	//Conversion for arithmetic precision. 
	for(int i = 0; i < N; i++){
		char buffer[50];
		sprintf(buffer,"align/img%d.jpg",i+1);
		imageStack[i] = imread(buffer);
		
		if(imageStack[i].cols > 1000 || imageStack[i].rows > 1000){
		
			float scale = 1.0/(max(imageStack[i].rows,imageStack[i].cols)/1000+1);
			Mat resizedImg;
			
			resize(imageStack[i],resizedImg,Size(0,0),scale,scale,CV_INTER_AREA);
			imageStack[i] = resizedImg;
		}
		
		
		if(!imageStack[i].data){
			cerr << "Could not open or find the image" << endl;
			exit(0);
		}
		
		if(i>0){
			Mat imgPrev;
			Mat imgNext;
			
			alignToPrevImage(imageStack[i-1],imageStack[i],imgPrev,imgNext);
			imageStack[i-1] = imgPrev;
			imageStack[i] = imgNext;
			
			/*
			namedWindow("img1",CV_WINDOW_NORMAL);
			imshow("img1",imgPrev);
			namedWindow("img2",CV_WINDOW_NORMAL);
			imshow("img2",imgNext);
			cvWaitKey(0);
			*/
		}
		
	}
	
	for(int i = 0; i < N; i++){
		
		char buffer[50];
		sprintf(buffer,"align/img%d.jpg",i+1);
		int rows;
		int cols;

		rows=focal_Measure[0].rows;
		cols=focal_Measure[0].cols;
		
		//Create a new Gray scale image

		Mat grayImg;// = toGray(imageStack[i] );
		Mat grayImgFloat;
		cvtColor(imageStack[i],grayImg,CV_BGR2GRAY,1);
		grayImg.convertTo(grayImgFloat,CV_32FC1,1/255.0);
		grayStack[i] = grayImgFloat;
		
		namedWindow(buffer,WINDOW_AUTOSIZE);
		imshow(buffer,grayImgFloat);
		waitKey(0);

		

		//sid
		Mat gray_image;
 		//cvtColor( imageStack[i], gray_image, CV_BGR2GRAY );
 		//grayStack[i]=gray_image;

		lap_x=lap_dir(grayStack[i],0);
		lap_y=lap_dir(grayStack[i],1);
		


		lap_x=abs(lap_x);
		lap_y=abs(lap_y);
		
		

		Mat modLaplacian;
		addWeighted(lap_x,1,lap_y,1,0.0,modLaplacian);
		

		// commented out-- Sid
		// Size ksize(9,9);
		// float sigma = 10.0;
		// Mat modLapSmooth;
		// GaussianBlur(modLaplacian,modLapSmooth,ksize,sigma);

		

		//siddhartha - 2-May

		//locally boosting all pixel intensities based on a 3X3 neighborhood
		Mat boosted;
		filter2D(modLaplacian, boosted, -1, boostingFilter);

		
		//averaging values of the focal measure: average filter preferred ouver gaussian filter as gaussian does not resolve the issue of noisy patches
		Size ksize(19,19);
		Mat modLapSmooth;
		boxFilter(boosted,modLapSmooth,-1,ksize);
		

		focal_Measure[i]=modLapSmooth;
		
		continue;
		
				
		namedWindow(buffer,WINDOW_AUTOSIZE);
		imshow(buffer,modLapSmooth);
		waitKey(0);

	
	}
	
	for(int i = 0; false && i < N ; i++){
		int u = 100;
		int v = 200;
		cout<< focal_Measure[i].at<float>(u,v)<<endl;
	}
	

	
	int rows;
	int cols;

	rows=focal_Measure[0].rows;
	cols=focal_Measure[0].cols;
	
	cout<<focal_Measure[0].rows<<endl;
	cout<<focal_Measure[0].cols<<endl;

	Mat focusMap= Mat::zeros( rows, cols, CV_8UC1);
	int  maxK;
	double maxVal;
	double tempVal;
	

	for(int y = 0; y < rows; y++)
	{
	  	for(int x = 0; x < cols; x++)
	  	{
			
	  		//Iterate over all the images on the stack and get the one in focus.
	  		maxK=0;
	  		maxVal=focal_Measure[0].at<float>(y,x);
	 		for(int k =1; k< N; k++)
	 		{
	 			tempVal=focal_Measure[k].at<float>(y,x);
	 			// cout<<tempVal<<endl;
	 			if(tempVal>maxVal)
	 			{
	 				maxK=k;
	 				maxVal=tempVal;
	 			}
	  		}
	  		focusMap.at<uchar>(y,x)=maxK;		//TODO take out this scale factor. For visialization and debug	
	  	}
			
 	}

 	//commenting out- Sid ; taken care of smoothing by forming modLapsmooth
	//Focus does not change rapidly among objects. 
	//Smooth it!
	// Size ksize(9,9);
	// float sigma = 12.0;
	// Mat focusMapSmooth;
	// GaussianBlur(focusMap,focusMapSmooth,ksize,sigma);
	
	namedWindow("focusMap",WINDOW_AUTOSIZE);
	imshow("focusMap",focusMap);
	waitKey(0);

	namedWindow("result",WINDOW_AUTOSIZE);
	setMouseCallback("result", CallBackFunc, NULL);
	imshow("result",imageStack[0]);

	int previos=0;
	int current=0;
	int step;
	int i;
	//press ctrl+ mouse click to exit
	while(1)
	{
		while (x_coord == -1 && y_coord == -1) cvWaitKey(100); 

		if(x_coord==0)
			return 0;
	
		//cout<<"coords found:  "<<endl<<x_coord<<endl<<y_coord<<endl;
		current=focusMap.at<uchar>(y_coord,x_coord);
		
		step=current-previos;

		if(step<1)
		{
			for(i=previos; i>=current;i--)
			{
				imshow("result",imageStack[i]);	
				cvWaitKey(2);
			}
			
		}
			
		else
		{
			for(i=previos; i<=current; i++)
			{
				imshow("result",imageStack[i]);	
				cvWaitKey(2);
			}
			
			
		}
			

		//cout<<endl<<step<<endl;

		previos=current;

		
		x_coord=-1;
		y_coord =-1;	
		while (x_coord == -1 && y_coord == -1) cvWaitKey(100); 
	}

	return 0;
}
Exemplo n.º 27
0
bool ExtCamControl::start(JSON* pJson)
{
	g_pExtCamControl = this;

	//Init Camera
	m_pCamFront = new _CamStream();
	CHECK_FATAL(m_pCamFront->init(pJson, "FRONTL"));

	//Init Marker Detector
	m_pMD = new _MarkerDetector();
	CHECK_FATAL(m_pMD->init(pJson, "RED_CIRCLE"));
	m_pMD->m_pCamStream = m_pCamFront;
	m_pCamFront->m_bHSV = true;

	//Init BgFg Detector
	m_pBgFgD = new _BgFgDetector();
	CHECK_FATAL(m_pBgFgD->init(pJson, ""));
	m_pBgFgD->m_pCamStream = m_pCamFront;

	//Init Autopilot
/*	m_pAP = new _AutoPilot();
	CHECK_FATAL(m_pAP->setup(&m_Json, ""));
	m_pAP->init();
	m_pAP->setCamStream(m_pCamFront, CAM_FRONT);
	m_pAP->m_pOD = m_pOD;
	m_pAP->m_pFD = m_pFD;
//	m_pMD = m_pAP->m_pCamStream[CAM_FRONT].m_pCam->m_pMarkerDetect;
*/

	//Connect to Mavlink
/*	m_pMavlink = new _MavlinkInterface();
	CHECK_FATAL(m_pMavlink->setup(&m_Json, "FC"));
	CHECK_INFO(m_pMavlink->open());
*/

	//Main window
	m_pShow = new CamFrame();
	m_pMat = new CamFrame();
	m_pMat2 = new CamFrame();

	//Init UI Monitor
	m_pUIMonitor = new UIMonitor();
	m_pUIMonitor->init("OpenKAI demo", pJson);
	m_pUIMonitor->addFullFrame(m_pShow);

	//Start threads
	m_pCamFront->start();
//	m_pMavlink->start();
//	m_pDF->start();
	m_pMD->start();
//	m_pAP->start();
	m_pBgFgD->start();

	//UI thread
	m_bRun = true;
	namedWindow(APP_NAME, CV_WINDOW_NORMAL);
	setWindowProperty(APP_NAME, CV_WND_PROP_FULLSCREEN, CV_WINDOW_FULLSCREEN);
	setMouseCallback(APP_NAME, onMouseExtCamControl, NULL);

	while (m_bRun)
	{
//		Mavlink_Messages mMsg;
//		mMsg = m_pMavlink->current_messages;
//		m_pCamFront->m_pCamL->m_bGimbal = true;
//		m_pCamFront->m_pCamL->setAttitude(mMsg.attitude.roll, 0, mMsg.time_stamps.attitude);

		if(!showScreenMarkerDetector())
		{
			showScreenBgFgDetector();
		}

		//Handle key input
		m_key = waitKey(30);
		handleKey(m_key);
	}

//	m_pAP->stop();
	m_pMD->stop();
//	m_pMavlink->stop();
	m_pBgFgD->stop();

	m_pMD->complete();
	m_pBgFgD->complete();
//	m_pDF->complete();
//	m_pAP->complete();
//	m_pCamFront->complete();
//	m_pMavlink->complete();
//	m_pMavlink->close();

//	delete m_pAP;
//	delete m_pMavlink;
//	delete m_pDF;
	delete m_pCamFront;

	return 0;

}
Exemplo n.º 28
0
void *image_show( void *)        /*analiza imagem*/
{
    Mat frameCopy;
    Mat frameAnalize;
    Mat result;
    Point alvo;             // target coord
    Point alvof;            // target coord  with filter
    timer timer_image_show;
    filterOrder1 filter;
    filterOrder1 filterx;
    filterOrder1 filtery;
    mouseInfo.event=-1;
    sleep(1);
    while(1)
    {

        timer_image_show.a();
        pthread_mutex_lock(&in_frame);
        frameCopy=frame;
        pthread_mutex_unlock(&in_frame);

        
        if(mouseInfo.x[0] > 26 && mouseInfo.y[0] >26 && mouseInfo.event==EVENT_LBUTTONDOWN)
        {
            Cerro;
            printf("Change! \n");
            Rect myDim(mouseInfo.x[0]-25,mouseInfo.y[0]-25, 50, 50);
            frameAnalize = frameCopy(myDim).clone();     
            frameAnalize.copyTo(frameAnalize);
        }
        else if(mouseInfo.event == -1)
        {
            Rect myDim(frameCopy.cols/2,frameCopy.rows/2, 50, 50);
            frameAnalize = frameCopy(myDim);     
            frameAnalize.copyTo(frameAnalize);
            mouseInfo.event=-2;
        }
        
        /// Create the result matrix
        int result_cols =  frameCopy.cols - frameAnalize.cols + 1;
        int result_rows = frameCopy.rows - frameAnalize.rows + 1;
        result.create( result_cols, result_rows, CV_32FC1 );

        /// Do the Matching and Normalize
        int match_method=1; //1-5
        matchTemplate( frameCopy, frameAnalize, result, match_method );
        normalize( result, result, 0, 1, NORM_MINMAX, -1, Mat() );

        /// Localizing the best match with minMaxLoc
        double minVal; double maxVal; Point minLoc; Point maxLoc;
        Point matchLoc;
        minMaxLoc( result, &minVal, &maxVal, &minLoc, &maxLoc, Mat() );

        /// For SQDIFF and SQDIFF_NORMED, the best matches are lower values. For all the other methods, the higher the better
        if( match_method  == CV_TM_SQDIFF || match_method == CV_TM_SQDIFF_NORMED )
            { matchLoc = minLoc; }
        else
            { matchLoc = maxLoc; }
        
        /// make a dif with the original and the matched
        Rect myDim2(matchLoc.x,matchLoc.y,50 , 50);
        Mat frameAnalizado = frameCopy(myDim2).clone(); 
        // Mat subt = frameAnalize - frameAnalizado;

        /// cut the image to make something more.... cool
        Rect roi1( Point( frameCopy.cols-50, 0 ), frameAnalize.size() );
        frameAnalize.copyTo( frameCopy( roi1 ) );
        Rect roi2( Point( frameCopy.cols-50, 50 ), frameAnalize.size() );
        frameAnalizado.copyTo( frameCopy( roi2 ) );

        // Translate matchCoord to Point
        alvo.x=matchLoc.x+25;
        alvo.y=matchLoc.y+25;
        alvof.x=filterx.filter(alvo.x,timer_image_show.end()*3);
        alvof.y=filtery.filter(alvo.y,timer_image_show.end()*3);

        /// Make the image colorful again
        cvtColor(frameCopy, frameCopy, CV_GRAY2RGB);

        /// make retangles or circles
        #if 0
            rectangle( frameCopy, matchLoc, Point( matchLoc.x + frameAnalize.cols , matchLoc.y + frameAnalize.rows ), Scalar::all(0), 2, 8, 0 );
            rectangle( result, matchLoc, Point( matchLoc.x + frameAnalize.cols , matchLoc.y + frameAnalize.rows ), Scalar::all(0), 2, 8, 0 );
        #else
            circle(frameCopy, alvof, 3, cvScalar(0,0,255), 1, 8, 0);
        #endif
        
        /// Make a simple text to debug
        char str[256];
        sprintf(str, "x:%d/y:%d", alvof.x, alvof.y);
        putText(frameCopy, str, cvPoint(alvof.x+30,alvof.y-30), FONT_HERSHEY_COMPLEX_SMALL, 0.5, cvScalar(0,0,255), 1, CV_AA);

        sprintf(str, "x:%d/y:%d", alvo.x, alvo.y);
        putText(frameCopy, str, cvPoint(alvo.x+30,alvo.y+30), FONT_HERSHEY_COMPLEX_SMALL, 0.5, cvScalar(205,201,201), 1, CV_AA);

        sprintf(str, "maxVal:%.8f/minVal:%.8f", maxVal, minVal);
        putText(frameCopy, str, cvPoint(30,30), FONT_HERSHEY_COMPLEX_SMALL, 0.6, cvScalar(0,100,0), 1, CV_AA);

        //draw lines     
        line(frameCopy, Point (0,alvo.y), Point (frameCopy.cols,alvo.y), cvScalar(205,201,201), 1, 8, 0);
        line(frameCopy, Point (alvo.x,0), Point (alvo.x,frameCopy.rows), cvScalar(205,201,201), 1, 8, 0);

        imshow("image_show",frameCopy);
        namedWindow("image_show", CV_WINDOW_NORMAL); 
        setMouseCallback("image_show", CallBackFunc, NULL);
        
        //imshow("analize",frameAnalize);
        //namedWindow("analize", CV_WINDOW_NORMAL);
        
        //imshow("result",result);
        //namedWindow("result", CV_WINDOW_NORMAL); 
        
        //imshow("analizado",frameAnalizado);
        //namedWindow("analizado", CV_WINDOW_NORMAL); waitKey(30);
        
        //imshow("sub",subt);
        //namedWindow("sub", CV_WINDOW_NORMAL); 
        Caviso;  printf("Fps do streaming: %.2f\n",1/filter.filter(timer_image_show.b(),5*timer_image_show.b())); //end_fps();
        Caviso;  printf("tempo de image_show: %f s \n",timer_image_show.b());
        waitKey(30);
        //pthread_mutex_unlock(&in_window);
        
    }
    Cerro; printf("Image_show Down !\n");
    return NULL;
}
Exemplo n.º 29
0
int main()
{
    namedWindow("draw");//原图绘制窗口
    Mat src, copy, dst;//dst为库函数输出
    src = imread("test8.jpg");//读入目标图像
    
    copy = src.clone();//拷贝,以后仅对copy更改,不再更改src
    mask = new Mat(src.rows, src.cols, CV_8UC1, Scalar(0));//建立一个原图大小的单通道图像矩阵
    
    imshow("draw", src);
    setMouseCallback("draw", on_mouse, &src);//鼠标事件监测(绘制mask)
    waitKey(0);
    
    namedWindow("mask");
    imshow("mask", *mask);
    waitKey(0);
    myRect edge = getRect(*mask); //将mask包围的最小矩形
    //cout << edge;
    
    
    namedWindow("naive");//库函数跑出来的结果
    //inpaint(copy, *mask, dst, 30, INPAINT_TELEA);//调用库函数,后面两个参数是取样半径和处理方法(方法有两个,分别参照两篇paper)
    process(src,*mask);
    dst=src.clone();
    imshow("naive", dst);
    waitKey(0);
    
    namedWindow("output");
    myRect full(0,dst.rows,0,dst.cols);//myRect(int top, int bottom, int left, int right);此处表示全图
    
    shrink(*mask, full, patches, dst, good);//根据mask在full表示的范围内,把dst中mask覆盖的区域切成小块扔到patches里面,其他区域扔到good里面
    
    
    getSimilar(patches,good,indexMap);
    //对于patches中的每个元素,在good中找到与之最相似的,并将结果存入indexMap中
    
    /*
     set<patch>::iterator itg = good.begin(),itd = patches.begin();
     int cnt = 0;
     for (itd = patches.begin(); itd != patches.end(); itd++)//对于每一个需要打补丁的地方
     {
     double min = 1000000; //INF
     for (itg = good.begin(); itg != good.end(); itg++)//在good里找到和它最相似的的,放到一个map indexMap里
     {
     double crt = sim(*itd, *itg);
     if (crt < min)
     {
     min = crt;
     indexMap[*itd] = *itg;//itd指向dst上需要打补丁的地方 itg指向选出来的补丁
     }
     }
     cout << cnt++ << endl;
     }
     */
    
    
    //cout << patches.size() << endl;
    for (map<patch,patch>::iterator i = indexMap.begin(); i != indexMap.end(); i++)
    {
        //dst为库函数输出的结果
        Mat roi(dst, Rect(i->first.anchor.x, i->first.anchor.y, PATCH_SIZE, PATCH_SIZE));//按照补丁的anchor(左上角点)把这一块设置从roi
        
        //namedWindow("test");
        //namedWindow("test2");
        //imshow("test", i->second.data);
        //cout << i->second.data << endl;
        //waitKey(0);
        
        exertPatch(i->first.anchor, i->second.data ,dst, 0);
        //void exertPatch(Point anchor, Mat &ROI, Mat &dst, int pos)
        //对于patches中需要更改的anchor处的patch,用ROI来对其进行覆盖
        
        //cout << i->second.data << endl;
        exertPatchR(i->first.anchor, i->second.data, dst, 1);
        
        i->second.data.copyTo(roi);//打补丁到roi
        
        //imshow("test2", i->second.data);
        //waitKey(0);
    }
    
    Mat dstROI(dst, Rect(edge.left, edge.top, edge.right - edge.left, edge.bottom - edge.top));
    
    Mat ddst = dstROI.clone();
    //blur(dstROI, ddst, Size(5, 5));
    //ddst.copyTo(dstROI);
    imshow("output", dst);
    waitKey(0);
    delete mask;
    
    return 0;
}
Exemplo n.º 30
0
int main(int argc, char *argv[])
{
    // get input arguments

    string configFile = "";
    string video_file_left = "", video_file_right = "", video_directory = "";
    int starting_frame_number = 0;
    bool enable_gamma = false;
    float random_results = -1.0;

    int last_frame_number = -1;

    int last_playback_frame_number = -2;

    ConciseArgs parser(argc, argv);
    parser.add(configFile, "c", "config", "Configuration file containing camera GUIDs, etc.", true);
    parser.add(show_display, "d", "show-dispaly", "Enable for visual debugging display. Will reduce framerate significantly.");
    parser.add(show_display_wait, "w", "show-display-wait", "Optional argument to decrease framerate for lower network traffic when forwarding the display.");
    parser.add(show_unrectified, "u", "show-unrectified", "When displaying images, do not apply rectification.");
    parser.add(disable_stereo, "s", "disable-stereo", "Disable online stereo processing.");
    parser.add(force_brightness, "b", "force-brightness", "Force a brightness setting.");
    parser.add(force_exposure, "e", "force-exposure", "Force an exposure setting.");
    parser.add(quiet_mode, "q", "quiet", "Reduce text output.");
    parser.add(video_file_left, "l", "video-file-left", "Do not use cameras, instead use this video file (also requires a right video file).");
    parser.add(video_file_right, "t", "video-file-right", "Right video file, only for use with the -l option.");
    parser.add(video_directory, "i", "video-directory", "Directory to search for videos in (for playback).");
    parser.add(starting_frame_number, "f", "starting-frame", "Frame to start at when playing back videos.");
    parser.add(display_hud, "v", "hud", "Overlay HUD on display images.");
    parser.add(record_hud, "x", "record-hud", "Record the HUD display.");
    parser.add(file_frame_skip, "p", "skip", "Number of frames skipped in recording (for playback).");
    parser.add(enable_gamma, "g", "enable-gamma", "Turn gamma on for both cameras.");
    parser.add(random_results, "R", "random-results", "Number of random points to produce per frame.  Can be a float in which case we'll take a random sample to decide if to produce the last one.  Disables real stereo processing.  Only for debugging / analysis!");
    parser.add(publish_all_images, "P", "publish-all-images", "Publish all images to LCM");
    parser.parse();

    // parse the config file
    if (ParseConfigFile(configFile, &stereoConfig) != true)
    {
        fprintf(stderr, "Failed to parse configuration file, quitting.\n");
        return -1;
    }

    if (video_file_left.length() > 0
        && video_file_right.length() <= 0) {

        fprintf(stderr, "Error: for playback you must specify both "
            "a right and left video file. (Only got a left one.)\n");

        return -1;
    }

     if (video_file_left.length() <= 0
        && video_file_right.length() > 0) {

        fprintf(stderr, "Error: for playback you must specify both "
            "a right and left video file. (Only got a right one.)\n");

        return -1;
    }

    recording_manager.Init(stereoConfig);

    // attempt to load video files / directories
    if (video_file_left.length() > 0) {
        if (recording_manager.LoadVideoFiles(video_file_left, video_file_right) != true) {
            // don't have videos, bail out.
            return -1;
        }
    }

    if (video_directory.length() > 0) {
        if (recording_manager.SetPlaybackVideoDirectory(video_directory) != true) {
            // bail
            return -1;
        }
    }

    recording_manager.SetQuietMode(quiet_mode);
    recording_manager.SetPlaybackFrameNumber(starting_frame_number);



    uint64 guid = stereoConfig.guidLeft;
    uint64 guid2 = stereoConfig.guidRight;

    // start up LCM
    lcm_t * lcm;
    lcm = lcm_create (stereoConfig.lcmUrl.c_str());


    unsigned long elapsed;

    Hud hud;


    // --- setup control-c handling ---
    struct sigaction sigIntHandler;

    sigIntHandler.sa_handler = control_c_handler;
    sigemptyset(&sigIntHandler.sa_mask);
    sigIntHandler.sa_flags = 0;

    sigaction(SIGINT, &sigIntHandler, NULL);
    // --- end ctrl-c handling code ---

    dc1394error_t   err;
    dc1394error_t   err2;


    // tell opencv to use only one core so that we can manage our
    // own threading without a fight
    setNumThreads(1);

    if (recording_manager.UsingLiveCameras()) {
        d = dc1394_new ();
        if (!d)
            cerr << "Could not create dc1394 context" << endl;

        d2 = dc1394_new ();
        if (!d2)
            cerr << "Could not create dc1394 context for camera 2" << endl;

        camera = dc1394_camera_new (d, guid);
        if (!camera)
        {
            cerr << "Could not create dc1394 camera... quitting." << endl;
            exit(1);
        }

        camera2 = dc1394_camera_new (d2, guid2);
        if (!camera2)
            cerr << "Could not create dc1394 camera for camera 2" << endl;
        // reset the bus
        dc1394_reset_bus(camera);
        dc1394_reset_bus(camera2);

        // setup
        err = setup_gray_capture(camera, DC1394_VIDEO_MODE_FORMAT7_1);
        DC1394_ERR_CLN_RTN(err, cleanup_and_exit(camera), "Could not setup camera");

        err2 = setup_gray_capture(camera2, DC1394_VIDEO_MODE_FORMAT7_1);
        DC1394_ERR_CLN_RTN(err2, cleanup_and_exit(camera2), "Could not setup camera number 2");

        // enable camera
        err = dc1394_video_set_transmission(camera, DC1394_ON);
        DC1394_ERR_CLN_RTN(err, cleanup_and_exit(camera), "Could not start camera iso transmission");
        err2 = dc1394_video_set_transmission(camera2, DC1394_ON);
        DC1394_ERR_CLN_RTN(err2, cleanup_and_exit(camera2), "Could not start camera iso transmission for camera number 2");

        InitBrightnessSettings(camera, camera2, enable_gamma);
    }

    if (show_display) {

        namedWindow("Input", CV_WINDOW_AUTOSIZE | CV_WINDOW_KEEPRATIO);
        namedWindow("Input2", CV_WINDOW_AUTOSIZE | CV_WINDOW_KEEPRATIO);
        namedWindow("Stereo", CV_WINDOW_AUTOSIZE | CV_WINDOW_KEEPRATIO);

        namedWindow("Left Block", CV_WINDOW_AUTOSIZE | CV_WINDOW_KEEPRATIO);
        namedWindow("Right Block", CV_WINDOW_AUTOSIZE | CV_WINDOW_KEEPRATIO);

        namedWindow("Debug 1", CV_WINDOW_AUTOSIZE | CV_WINDOW_KEEPRATIO);
        namedWindow("Debug 2", CV_WINDOW_AUTOSIZE | CV_WINDOW_KEEPRATIO);



        setMouseCallback("Input", onMouse); // for drawing disparity lines
        setMouseCallback("Stereo", onMouseStereo, &hud); // for drawing disparity lines

        moveWindow("Input", stereoConfig.displayOffsetX + 100, stereoConfig.displayOffsetY + 100);
        moveWindow("Stereo", stereoConfig.displayOffsetX + 100, stereoConfig.displayOffsetY + 370);
        moveWindow("Input2", stereoConfig.displayOffsetX + 478, stereoConfig.displayOffsetY + 100);
        moveWindow("Left Block", stereoConfig.displayOffsetX + 900, stereoConfig.displayOffsetY + 100);
        moveWindow("Right Block", stereoConfig.displayOffsetX + 1400, stereoConfig.displayOffsetY + 100);

        moveWindow("Debug 1", stereoConfig.displayOffsetX + 900, stereoConfig.displayOffsetY + 670);
        moveWindow("Debug 2", stereoConfig.displayOffsetX + 1400, stereoConfig.displayOffsetY + 670);

    } // show display

    if (show_display || publish_all_images) {
        // if a channel exists, subscribe to it
        if (stereoConfig.stereo_replay_channel.length() > 0) {
            stereo_replay_sub = lcmt_stereo_subscribe(lcm, stereoConfig.stereo_replay_channel.c_str(), &stereo_replay_handler, &hud);
        }

        if (stereoConfig.pose_channel.length() > 0) {
            mav_pose_t_sub = mav_pose_t_subscribe(lcm, stereoConfig.pose_channel.c_str(), &mav_pose_t_handler, &hud);
        }

        if (stereoConfig.gps_channel.length() > 0) {
            mav_gps_data_t_sub = mav_gps_data_t_subscribe(lcm, stereoConfig.gps_channel.c_str(), &mav_gps_data_t_handler, &hud);
        }

        if (stereoConfig.baro_airspeed_channel.length() > 0) {
            baro_airspeed_sub = lcmt_baro_airspeed_subscribe(lcm, stereoConfig.baro_airspeed_channel.c_str(), &baro_airspeed_handler, &hud);
        }

        if (stereoConfig.servo_out_channel.length() > 0) {
            servo_out_sub = lcmt_deltawing_u_subscribe(lcm, stereoConfig.servo_out_channel.c_str(), &servo_out_handler, &hud);
        }

        if (stereoConfig.battery_status_channel.length() > 0) {
            battery_status_sub = lcmt_battery_status_subscribe(lcm, stereoConfig.battery_status_channel.c_str(), &battery_status_handler, &hud);
        }

        if (stereoConfig.cpu_info_channel1.length() > 0) {
            cpu_info_sub1 = lcmt_cpu_info_subscribe(lcm, stereoConfig.cpu_info_channel1.c_str(), &cpu_info_handler, &recording_manager);
            cpu_info_sub2 = lcmt_cpu_info_subscribe(lcm, stereoConfig.cpu_info_channel2.c_str(), &cpu_info_handler, &recording_manager);
            cpu_info_sub3 = lcmt_cpu_info_subscribe(lcm, stereoConfig.cpu_info_channel3.c_str(), &cpu_info_handler, &recording_manager);
        }

        if (stereoConfig.log_size_channel1.length() > 0) {
            log_size_sub1 = lcmt_log_size_subscribe(lcm, stereoConfig.log_size_channel1.c_str(), &log_size_handler, &hud);
            log_size_sub2 = lcmt_log_size_subscribe(lcm, stereoConfig.log_size_channel2.c_str(), &log_size_handler, &hud);
            log_size_sub3 = lcmt_log_size_subscribe(lcm, stereoConfig.log_size_channel3.c_str(), &log_size_handler, &hud);
        }

    } // end show_display || publish_all_images

    // load calibration
    OpenCvStereoCalibration stereoCalibration;

    if (LoadCalibration(stereoConfig.calibrationDir, &stereoCalibration) != true)
    {
        cerr << "Error: failed to read calibration files. Quitting." << endl;
        return -1;
    }

    int inf_disparity_tester, disparity_tester;
    disparity_tester = GetDisparityForDistance(10, stereoCalibration, &inf_disparity_tester);

    std::cout << "computed disparity is = " << disparity_tester << ", inf disparity = " << inf_disparity_tester << std::endl;

    // subscribe to the stereo control channel
    stereo_control_sub = lcmt_stereo_control_subscribe(lcm, stereoConfig.stereoControlChannel.c_str(), &lcm_stereo_control_handler, NULL);


    Mat imgDisp;
    Mat imgDisp2;

    // initilize default parameters
    //PushbroomStereoState state; // HACK

    state.disparity = stereoConfig.disparity;
    state.zero_dist_disparity = stereoConfig.infiniteDisparity;
    state.sobelLimit = stereoConfig.interestOperatorLimit;
    state.horizontalInvarianceMultiplier = stereoConfig.horizontalInvarianceMultiplier;
    state.blockSize = stereoConfig.blockSize;
    state.random_results = random_results;
    state.check_horizontal_invariance = true;

    if (state.blockSize > 10 || state.blockSize < 1)
    {
        fprintf(stderr, "Warning: block size is very large "
            "or small (%d).  Expect trouble.\n", state.blockSize);
    }

    state.sadThreshold = stereoConfig.sadThreshold;

    state.mapxL = stereoCalibration.mx1fp;
    state.mapxR = stereoCalibration.mx2fp;
    state.Q = stereoCalibration.qMat;
    state.show_display = show_display;

    state.lastValidPixelRow = stereoConfig.lastValidPixelRow;

    Mat matL, matR;
    bool quit = false;

    if (recording_manager.UsingLiveCameras()) {
        matL = GetFrameFormat7(camera);
        matR = GetFrameFormat7(camera2);

        if (recording_manager.InitRecording(matL, matR) != true) {
            // failed to init recording, things are going bad.  bail.
            return -1;
        }

        // before we start, turn the cameras on and set the brightness and exposure
        MatchBrightnessSettings(camera, camera2, true, force_brightness, force_exposure);

        // grab a few frames and send them over LCM for the user
        // to verify that everything is working
        if (!show_display && !publish_all_images) {
            printf("Sending init images over LCM... ");
            fflush(stdout);

            for (int i = 0; i < 5; i++) {

                matL = GetFrameFormat7(camera);
                SendImageOverLcm(lcm, "stereo_image_left", matL, 50);

                matR = GetFrameFormat7(camera2);
                SendImageOverLcm(lcm, "stereo_image_right", matR, 50);

                // don't send these too fast, otherwise we'll flood the ethernet link
                // and not actually be helpful

                // wait one second
                printf(".");
                fflush(stdout);

                sleep(1);
            }
            printf(" done.\n");
        }

    } // recording_manager.UsingLiveCameras()

    // spool up worker threads
    PushbroomStereo pushbroom_stereo;

    // start the framerate clock
    struct timeval start, now;
    gettimeofday( &start, NULL );

    while (quit == false) {

        // get the frames from the camera
        if (recording_manager.UsingLiveCameras()) {
            // we would like to match brightness every frame
            // but that would really hurt our framerate
            // match brightness every 10 frames instead
            if (numFrames % MATCH_BRIGHTNESS_EVERY_N_FRAMES == 0)
            {
                MatchBrightnessSettings(camera, camera2);
            }

            // capture images from the cameras
            matL = GetFrameFormat7(camera);
            matR = GetFrameFormat7(camera2);

            // record video
            recording_manager.AddFrames(matL, matR);


        } else {
            // using a video file -- get the next frame
            recording_manager.GetFrames(matL, matR);
        }

        cv::vector<Point3f> pointVector3d;
        cv::vector<uchar> pointColors;
        cv::vector<Point3i> pointVector2d; // for display
        cv::vector<Point3i> pointVector2d_inf; // for display

        // do the main stereo processing
        if (disable_stereo != true) {

            gettimeofday( &now, NULL );
            double before = now.tv_usec + now.tv_sec * 1000 * 1000;

            pushbroom_stereo.ProcessImages(matL, matR, &pointVector3d, &pointColors, &pointVector2d, state);

            gettimeofday( &now, NULL );
            double after = now.tv_usec + now.tv_sec * 1000 * 1000;

            timer_sum += after-before;
            timer_count ++;

        }

        // build an LCM message for the stereo data
        lcmt_stereo msg;


        if (recording_manager.UsingLiveCameras() || stereo_lcm_msg == NULL) {
            msg.timestamp = getTimestampNow();
        } else {
            // if we are replaying videos, preserve the timestamp of the original video
            msg.timestamp = stereo_lcm_msg->timestamp;

        }


        msg.number_of_points = (int)pointVector3d.size();

        float x[msg.number_of_points];
        float y[msg.number_of_points];
        float z[msg.number_of_points];
        uchar grey[msg.number_of_points];

        for (unsigned int i=0;i<pointVector3d.size();i++) {

            x[i] = pointVector3d[i].x / stereoConfig.calibrationUnitConversion;
            y[i] = pointVector3d[i].y / stereoConfig.calibrationUnitConversion;
            z[i] = pointVector3d[i].z / stereoConfig.calibrationUnitConversion;
            grey[i] = pointColors[i];
        }

        msg.x = x;
        msg.y = y;
        msg.z = z;
        msg.grey = grey;
        msg.frame_number = recording_manager.GetFrameNumber();

        if (recording_manager.UsingLiveCameras()) {
            msg.frame_number = msg.frame_number - 1;  // minus one since recording manager has
                                                      // already recorded this frame (above in
                                                      // AddFrames) but we haven't made a message
                                                      // for it yet
        }


        msg.video_number = recording_manager.GetRecVideoNumber();

        // publish the LCM message
        if (last_frame_number != msg.frame_number) {
            lcmt_stereo_publish(lcm, "stereo", &msg);
            last_frame_number = msg.frame_number;
        }

        if (publish_all_images) {
            if (recording_manager.GetFrameNumber() != last_playback_frame_number) {
                SendImageOverLcm(lcm, "stereo_image_left", matL, 80);
                SendImageOverLcm(lcm, "stereo_image_right", matR, 80);

                last_playback_frame_number = recording_manager.GetFrameNumber();
            }

            //process LCM until there are no more messages
            // this allows us to drop frames if we are behind
            while (NonBlockingLcm(lcm)) {}
        }

        Mat matDisp, remapL, remapR;

        if (show_display) {
            // we remap again here because we're just in display
            Mat remapLtemp(matL.rows, matL.cols, matL.depth());
            Mat remapRtemp(matR.rows, matR.cols, matR.depth());

            remapL = remapLtemp;
            remapR = remapRtemp;

            remap(matL, remapL, stereoCalibration.mx1fp, Mat(), INTER_NEAREST);
            remap(matR, remapR, stereoCalibration.mx2fp, Mat(), INTER_NEAREST);

            remapL.copyTo(matDisp);

            //process LCM until there are no more messages
            // this allows us to drop frames if we are behind
            while (NonBlockingLcm(lcm)) {}
        } // end show_display


        if (show_display) {

            for (unsigned int i=0;i<pointVector2d.size();i++) {
                int x2 = pointVector2d[i].x;
                int y2 = pointVector2d[i].y;
                //int sad = pointVector2d[i].z;
                rectangle(matDisp, Point(x2,y2), Point(x2+state.blockSize, y2+state.blockSize), 0,  CV_FILLED);
                rectangle(matDisp, Point(x2+1,y2+1), Point(x2+state.blockSize-1, y2-1+state.blockSize), 255);

            }

            // draw pixel blocks
            if (lineLeftImgPosition >= 0 && lineLeftImgPositionY > 1) {
                DisplayPixelBlocks(remapL, remapR, lineLeftImgPosition - state.blockSize/2, lineLeftImgPositionY - state.blockSize/2, state, &pushbroom_stereo);
            }

            // draw a line for the user to show disparity
            DrawLines(remapL, remapR, matDisp, lineLeftImgPosition, lineLeftImgPositionY, state.disparity, state.zero_dist_disparity);


            if (visualize_stereo_hits == true && stereo_lcm_msg != NULL) {

                // transform the points from 3D space back onto the image's 2D space
                vector<Point3f> lcm_points;
                Get3DPointsFromStereoMsg(stereo_lcm_msg, &lcm_points);

                // draw the points on the unrectified image (to see these
                // you must pass the -u flag)
                Draw3DPointsOnImage(matL, &lcm_points, stereoCalibration.M1, stereoCalibration.D1, stereoCalibration.R1, 128);

            }

            if (show_unrectified == false) {

                imshow("Input", remapL);
                imshow("Input2", remapR);
            } else {
                imshow("Input", matL);
                imshow("Input2", matR);
            }


            if (display_hud) {
                Mat with_hud;

                recording_manager.SetHudNumbers(&hud);

                hud.DrawHud(matDisp, with_hud);

                if (record_hud) {
                    // put this frame into the HUD recording
                    recording_manager.RecFrameHud(with_hud);

                }

                imshow("Stereo", with_hud);
            } else {
                imshow("Stereo", matDisp);
            }


            char key = waitKey(show_display_wait);

            if (key != 255 && key != -1)
            {
                cout << endl << key << endl;
            }

            switch (key)
            {
                case 'T':
                    state.disparity --;
                    break;
                case 'R':
                    state.disparity ++;
                    break;

                case 'w':
                    state.sobelLimit += 10;
                    break;

                case 's':
                    state.sobelLimit -= 10;
                    break;

                case 'd':
                    state.horizontalInvarianceMultiplier -= 0.1;
                    break;

                case 'D':
                    state.horizontalInvarianceMultiplier += 0.1;
                    break;

                case 'g':
                    state.blockSize ++;
                    break;

                case 'b':
                    state.blockSize --;
                    if (state.blockSize < 1) {
                        state.blockSize = 1;
                    }
                    break;

                case 'Y':
                    state.sadThreshold += 50;
                    break;

                case 'y':
                    state.sadThreshold ++;
                    break;

                case 'h':
                    state.sadThreshold --;
                    break;

                case 'H':
                    state.sadThreshold -= 50;
                    break;

                case 'm':
                    if (recording_manager.UsingLiveCameras()) {
                        MatchBrightnessSettings(camera, camera2, true, force_brightness, force_exposure);
                    }
                    break;

                case '1':
                    force_brightness --;
                    if (recording_manager.UsingLiveCameras()) {
                        MatchBrightnessSettings(camera, camera2, true, force_brightness, force_exposure);
                    }
                    break;

                case '2':
                    force_brightness ++;
                    if (recording_manager.UsingLiveCameras()) {
                        MatchBrightnessSettings(camera, camera2, true, force_brightness, force_exposure);
                    }
                    break;

                case '3':
                    force_exposure --;
                    if (recording_manager.UsingLiveCameras()) {
                        MatchBrightnessSettings(camera, camera2, true, force_brightness, force_exposure);
                    }
                    break;

                case '4':
                    force_exposure ++;
                    if (recording_manager.UsingLiveCameras()) {
                        MatchBrightnessSettings(camera, camera2, true, force_brightness, force_exposure);
                    }
                    break;

                case '5':
                    // to show SAD boxes
                    state.sobelLimit = 0;
                    state.sadThreshold = 255;
                    break;

                case 'I':
                    state.check_horizontal_invariance = !state.check_horizontal_invariance;
                    break;

                case '.':
                    recording_manager.SetPlaybackFrameNumber(recording_manager.GetFrameNumber() + 1);
                    break;

                case ',':
                    recording_manager.SetPlaybackFrameNumber(recording_manager.GetFrameNumber() - 1);
                    break;

                case '>':
                    recording_manager.SetPlaybackFrameNumber(recording_manager.GetFrameNumber() + 50);
                    break;

                case '<':
                    recording_manager.SetPlaybackFrameNumber(recording_manager.GetFrameNumber() - 50);
                    break;

                //case 'k':
                //    state.zero_dist_disparity ++;
                 //   break;

                case 'l':
                    state.zero_dist_disparity --;
                    break;

                case 'o':
                    inf_sad_add --;
                    break;

                case 'p':
                    inf_sad_add ++;
                    break;

                case '[':
                    y_offset --;
                    if (y_offset < 0) {
                        y_offset = 0;
                    }
                    break;

                case ']':
                    y_offset ++;
                    break;

                case 'v':
                    display_hud = !display_hud;
                    break;

                case 'c':
                    hud.SetClutterLevel(hud.GetClutterLevel() + 1);
                    break;

                case 'C':
                    hud.SetClutterLevel(hud.GetClutterLevel() - 1);
                    break;

                case '}':
                    hud.SetPitchRangeOfLens(hud.GetPitchRangeOfLens() + 1);
                    break;
                case '{':
                    hud.SetPitchRangeOfLens(hud.GetPitchRangeOfLens() - 1);
                    break;

                case 'S':
                    // take a screen cap of the left and right images
                    // useful for putting into a stereo tuner
                    printf("\nWriting left.ppm...");
                    imwrite("left.ppm", remapL);

                    printf("\nWriting right.ppm...");
                    imwrite("right.ppm", remapR);

                    printf("\ndone.");
                    break;

                case 'V':
                    // record the HUD
                    record_hud = true;
                    recording_manager.RestartRecHud();
                    break;

                    /*
                case 'j':
                    state.debugJ --;
                    break;

                case 'J':
                    state.debugJ ++;
                    break;

                case 'i':
                    state.debugI --;
                    break;

                case 'I':
                    state.debugI ++;
                    break;

                case 'k':
                    state.debugDisparity --;
                    break;

                case 'K':
                    state.debugDisparity ++;
                    break;

                    */

                case 'q':
                    quit = true;
                    break;
            }

            if (key != 255 && key != -1)
            {
                cout << "sadThreshold = " << state.sadThreshold << endl;
                cout << "sobelLimit = " << state.sobelLimit << endl;
                cout << "horizontalInvarianceMultiplier = " << state.horizontalInvarianceMultiplier << endl;
                cout << "brightness: " << force_brightness << endl;
                cout << "exposure: " << force_exposure << endl;
                cout << "disparity = " << state.disparity << endl;
                cout << "inf_disparity = " << state.zero_dist_disparity << endl;
                cout << "inf_sad_add = " << inf_sad_add << endl;
                cout << "blockSize = " << state.blockSize << endl;
                cout << "frame_number = " << recording_manager.GetFrameNumber() << endl;
                cout << "y offset = " << y_offset << endl;
                cout << "PitchRangeOfLens = " << hud.GetPitchRangeOfLens() << endl;
            }
        } // end show_display

        numFrames ++;

        // check for new LCM messages
        NonBlockingLcm(lcm);

        if (quiet_mode == false || numFrames % 100 == 0) {
            // compute framerate
            gettimeofday( &now, NULL );

            elapsed = (now.tv_usec / 1000 + now.tv_sec * 1000) -
            (start.tv_usec / 1000 + start.tv_sec * 1000);

            printf("\r%d frames (%lu ms) - %4.1f fps | %4.1f ms/frame, stereo: %f", numFrames, elapsed, (float)numFrames/elapsed * 1000, elapsed/(float)numFrames, timer_sum/(double)timer_count);
            fflush(stdout);
        }


    } // end main while loop

    printf("\n\n");

    destroyWindow("Input");
    destroyWindow("Input2");
    destroyWindow("Stereo");

    // close camera
    if (recording_manager.UsingLiveCameras()) {
        StopCapture(d, camera);
        StopCapture(d2, camera2);
    }

    return 0;
}