void PointCloudBuilder::extractFrom(Mat& dispMap, Mat& leftColorImage, bool handleMissingValues)
{
    reprojectImageTo3D(dispMap, pointCloud, Q, handleMissingValues);

    pointCloudMsg->points.clear();
    pointCloudMsg->height = pointCloud.rows;
    pointCloudMsg->width = pointCloud.cols;

    for(unsigned int i = 0; i < pointCloudMsg->height; i++)
    {
        Vec3f *point = pointCloud.ptr<Vec3f>(i);
        Vec3b *pixel = leftColorImage.ptr<Vec3b>(i);

        for(unsigned int j = 0; j < pointCloudMsg->width; j++)
        {
            PointXYZRGB p;

            p.data[0] = point[j][0]/2.46;
            p.data[1] = -point[j][1]/2.46;
            p.data[2] = point[j][2]/2.46;
            p.data[3] = 1.0f;

            if(isinf(p.x) || isnan(p.x) || isinf(p.y) || isnan(p.y) || isinf(p.z) || isnan(p.z))
                p.data[0] = p.data[1] = p.data[2] = 0;
            
            uint8_t r = pixel[j][0];
            uint8_t g = pixel[j][1];
            uint8_t b = pixel[j][2];
            int32_t rgb = (b << 16) | (g << 8) | r;
            p.rgb = *(float*) (&rgb);

            pointCloudMsg->points.push_back(p);
        }
    }
}
Пример #2
0
void FrameProcessor::processStereoFrame(const Mat & frameL, const Mat & frameR, Mat & pointCloud){   

	Mat disparityMap, disparityMapNormalized;
    Mat frameTransposedL, frameTransposedR, frameRemappedL, frameRemappedR, frameGrayscaleL, frameGrayscaleR;     

    Mat rotMatL = cvCreateMat(2,3,CV_32FC1);
    Mat rotMatR = cvCreateMat(2,3,CV_32FC1);
    // Compute rotation matrix
    CvPoint2D32f centerL = cvPoint2D32f( frameL.cols/2, frameL.rows/2 );
    rotMatL = getRotationMatrix2D( centerL, 90, 1 );

    CvPoint2D32f centerR = cvPoint2D32f( frameR.cols/2, frameR.rows/2 );
    rotMatR = getRotationMatrix2D( centerR, 90, 1 );

    warpAffine(frameL, frameTransposedL, rotMatL, frameL.size() );
    warpAffine(frameR, frameTransposedR, rotMatR, frameR.size() );




    //transpose(frameL, frameTransposedL);
    //transpose(frameR, frameTransposedR);


    remap(frameTransposedL, frameRemappedL, rmap[0][0], rmap[0][1], CV_INTER_LINEAR);
    remap(frameTransposedR, frameRemappedR, rmap[1][0], rmap[1][1], CV_INTER_LINEAR);

    //imshow("LiveFeedL",frameTransposedL);
   //imshow("LiveFeedR",frameTransposedR);

    cvtColor(frameRemappedL, frameGrayscaleL, CV_RGB2GRAY);
    cvtColor(frameRemappedR, frameGrayscaleR, CV_RGB2GRAY);


    BlockMatcher( frameGrayscaleL, frameGrayscaleR, disparityMap, CV_32F);
    normalize(disparityMap, disparityMapNormalized, 0, 255, CV_MINMAX, CV_8U);

    imshow("Disparity", disparityMapNormalized);

    reprojectImageTo3D( disparityMap, pointCloud, Q, false);
}
Пример #3
0
void Depth_and_Disparity::convert_disperity_Mat_to_depth(Mat in_disp, Mat & out_depth)
{
	reprojectImageTo3D(in_disp, out_depth, Q, true); 
	//Vec3f point_middle = xyz_again.at<Vec3f>(xyz_again.rows/2, xyz_again.cols/2);
	//printf("\n\n middle point relative coor. are: %f %f %f \n\n", point_middle.val[0],point_middle.val[1],point_middle.val[2]);
}
Пример #4
0
int Depth_and_Disparity::do_stereo_match(Mat imgR, Mat imgL , Mat& disp8 )
{
	ready_disparity_result  = false;	// runover current unused previous result. if any.
	/* 
		when calibrated and saving matrices - i calibrated 
			Left camera as img1 , 
			Right camera as img2
	*/
	img1 = imgL.clone();
	img2 = imgR.clone(); 

	// scale if image sizes are different then calibrated images resultion
 /*   if (scale != 1.f)
    {
        Mat temp1, temp2;
        int method = scale < 1 ? INTER_AREA : INTER_CUBIC;
        resize(img1, temp1, Size(), scale, scale, method);
        img1 = temp1;
        resize(img2, temp2, Size(), scale, scale, method);
        img2 = temp2;
    }*/

    Size img_size = img1.size();

    if( intrinsic_filename )
    {
		/* passed to init function
        stereoRectify( M1, D1, M2, D2, img_size, R, T, R1, R2, P1, P2, Q, CALIB_ZERO_DISPARITY, -1, img_size, &roi1, &roi2 );

        initUndistortRectifyMap(M1, D1, R1, P1, img_size, CV_16SC2, map11, map12);
        initUndistortRectifyMap(M2, D2, R2, P2, img_size, CV_16SC2, map21, map22);*/

        remap(img1, img1r, map11, map12, INTER_LINEAR);
        remap(img2, img2r, map21, map22, INTER_LINEAR);

        img1 = img1r;
        img2 = img2r;

		rectification_output_vars.imageSize					= img_size;
		rectification_output_vars.rectR						=	img2;
		rectification_output_vars.rectL						=	img1;
		rectification_output_vars.validROI1					=	roi1;
		rectification_output_vars.validROI2					=	roi2;
		rectification_output_vars.originalyGivenframeCycle	=	relevantframeCycleIndex;
				
    }


    Mat		disp;

    int64	t = getTickCount(); 

    if( alg == STEREO_BM ){
		// connversion not needed because already gray and UINT8
		//img1.convertTo(img1, CV_8UC1+CV_BGR2GRAY);
		//img2.convertTo(img2, CV_8UC1+CV_BGR2GRAY);
		
        bm->compute(img1, img2, disp);
		}
    else if( alg == STEREO_SGBM || alg == STEREO_HH )
        sgbm->compute(img1, img2, disp);

    t = getTickCount() - t;
  ////
	printf("STEREO_BM/STEREO_SGBM Time elapsed: %fms\n\n", t*1000/getTickFrequency());

    //	disp = disp.colRange(numberOfDisparities, img1p.cols);
    if( alg != STEREO_VAR )
        disp.convertTo(disp8, CV_8U, 255/(numberOfDisparities*16.));
    else
        disp.convertTo(disp8, CV_8U);

    if( !no_display )
    {
        namedWindow("disparity", 0);   imshow("disparity", disp8);
		Mat tmpD ;
			applyColorMap(disp8,tmpD, cv::COLORMAP_JET);
			namedWindow("disparity Heat", 1);   imshow("disparity Heat", tmpD); 
       /// fflush(stdout); 
        printf("\n");
    }
	
    if(disparity_filename)
        imwrite(disparity_filename, disp8);

	{
		///http://stackoverflow.com/questions/27374970/q-matrix-for-the-reprojectimageto3d-function-in-opencv
///#ifndef COMPILING_ON_ROBOT
		if (1==2)		
		{
			Mat xyz_again;
			///Q.at<double>(3,2) = Q.at<double>(3,2)       ;////10.0;
			reprojectImageTo3D(disp, xyz_again, Q, true); 
			Vec3f point_middle = xyz_again.at<Vec3f>(xyz_again.rows/2, xyz_again.cols/2);
			printf("\n\n middle point relative coor. are: %f %f %f \n\n", point_middle.val[0],point_middle.val[1],point_middle.val[2]);
		}
///#endif
	}

	ready_disparity_result  = true;

    return 0;
}