Esempio n. 1
0
void ELASStereo::Run() {
  // get image width and height
  int32_t width = m_I1->width();
  int32_t height = m_I1->height();

  // allocate memory for disparity images
  const int32_t dims[3] = {width, height, width};  // bytes per line = width

  // process
  Elas::parameters param;
  param.postprocess_only_left = false;
  Elas elas(param);
  elas.process(m_I1->data, m_I2->data, (float*)m_hDisparity1.data,
               (float*)m_hDisparity2.data, dims);

  // -----
  // upload disparity to GPU
  m_dDisparity.MemcpyFromHost((float*)m_hDisparity1.data);

  // convert disparity to depth
  roo::Disp2Depth(m_dDisparity, m_dDepth, m_Kl(0, 0), m_baseline);

  // download depth from GPU
  m_dDepth.MemcpyToHost(m_hDepth.data);
  //  ShowHeatDepthMat("depth image", m_hDepth);
  //  cv::waitKey(1);
}
Esempio n. 2
0
int evalderFunctional2D(pInstance instance, double *F){
  pEdge  pa;
  int    k, icase, ret, ielem, i, coef, ref, ref1;
  double cb[3],dist, n[2], len, q[2];
  double *a, *b;
  
  coef = 1;
  
  fprintf(stdout,"  --  Compute the descent direction \n");
  
  memset(instance->sol_omega0.u, 0, 2*instance->sol_omega0.np*sizeof(double));
  memset(F, 0, 2*instance->sol_omega0.np*sizeof(double));
  
  
  /* Assembly elastic second membre */
  for (k = 1; k <= instance->mesh_omega0.na; k++) {
    pa = &instance->mesh_omega0.edge[k];
    ref = getRefm(&instance->mesh_omega0,pa->ref);
    ref1 = getRefb(&instance->mesh_omega0,pa->ref);
    if ( (!ref) && (!ref1)) {
      a = &instance->mesh_omega0.point[pa->v[0]].c[0];
      b = &instance->mesh_omega0.point[pa->v[1]].c[0];
      
      len = length(a, b, n);
      
      q[0] = (a[0]+b[0])/2;
      q[1] = (a[1]+b[1])/2;
      
      if (q[0] < 0 || q[0] > 1 || q[1] < 0 || q[1] > 1) {
        for (i = 0; i < 2; i++) {
          F[2*(pa->v[i]-1)+0] -= coef*n[0]/2;
          F[2*(pa->v[i]-1)+1] -= coef*n[1]/2;
        }
      }
      else {
        icase  = buckin(&instance->mesh_distance, &instance->bucket, q);
        ielem  = locelt(&instance->mesh_distance, icase, q, cb);
        
        if (ielem) {
          ret = intpp1(&instance->sol_distance,instance->mesh_distance.tria[ielem].v,&dist,ielem,cb);
          if (!ret)  exit(1);
        }
        
        for (i = 0; i < 2; i++) {
          F[2*(pa->v[i]-1)+0] -= coef*len*dist*n[0]/2;
          F[2*(pa->v[i]-1)+1] -= coef*len*dist*n[1]/2;
        }
      }
    }
  }
  
  /* compute the displacement u */
  if ( !elas(instance, F) )  return 0;
  
  return 1;
}
int main()
{
	Mat leftim=imread("view1.png");
	Mat rightim=imread("view5.png");
	Mat dest;
	StereoEfficientLargeScale elas(0,128);

	// we can set various parameter
		//elas.elas.param.ipol_gap_width=;
		//elas.elas.param.speckle_size=getParameter("speckle_size");
		//elas.elas.param.speckle_sim_threshold=getParameter("speckle_sim");

	
	elas(leftim,rightim,dest,100);

	
	

	Mat show;
	dest.convertTo(show,CV_8U,1.0/8);
	imshow("disp",show);
	waitKey();
	return 0;
}
Esempio n. 4
0
int main( int /*argc*/, char** /*argv*/ )
{









    //!< load camera calibration info
    cv::Mat mx1, my1, mx2, my2, Q;
    loadCalibrationInfo(mx1, my1, mx2, my2, Q);


    //!< connect to cameras
    unsigned int numCameras;
    FlyCapture2::Camera** ppCameras;
    prepareCameras( numCameras, ppCameras );





    //!< Allocate images

    //!< an image par camera: a vector of pointers to images (cv::Mat)
    std::vector< std::shared_ptr< cv::Mat > > pimageCamera;
    for ( unsigned int i = 0; i < numCameras; i++ )
        pimageCamera.push_back( std::shared_ptr< cv::Mat > ( new cv::Mat(480, 640, CV_8UC1) ) ); //!< create cv::Mat*, cast to shared_ptr, then push_back

    //!< disparity maps
    cv::Mat imageDisparity(480, 640, CV_32F), disp8U;



    
    
    
    //!< OpenCV control panel: block stereo matching parameters
    cv::namedWindow("control panel", cv::WINDOW_NORMAL );
    int ndisparities = 5;
    cv::createTrackbar( "(n+1)x16=ndisparities", "control panel", &ndisparities, 20, NULL ); //!< ndisparities % 16 == 0, positive, ndisparities < width
    int blockSize = 5;
    cv::createTrackbar( "(s+2)x2+1=blockSize", "control panel", &blockSize, 30, NULL ); //!<  5 <= blocksize <= 255, odd


#define ADDBAR( name1 , name2 , init, maxval ) \
    int name1 = 5; \
    cv::createTrackbar( #name1, "control panel", &name1, maxval, NULL ); \
    sbm->name2( name1 );

    

#ifdef USE_OPENCV_SBM
    //!< sample code from https://github.com/Itseez/opencv/blob/52a785e95a30d9336bfbac97a0a0d0089ffaa7de/samples/cpp/stereo_match.cpp
    //!< stereo matching object
    cv::Ptr< cv::StereoBM > sbm = cv::createStereoBM( ndisparities, blockSize );
    //sbm->setPreFilterType( cv::StereoBM::PREFILTER_NORMALIZED_RESPONSE );
    //sbm->setPreFilterType( cv::StereoBM::PREFILTER_XSOBEL );

    ADDBAR( textureThreshold, setTextureThreshold, 5, 30 );
    ADDBAR( smallerBlockSize, setSmallerBlockSize, 5, 30 );
    
#endif
    
#ifdef USE_OPENCV_SGBM
    //!< sample code from https://github.com/Itseez/opencv/blob/52a785e95a30d9336bfbac97a0a0d0089ffaa7de/samples/cpp/stereo_match.cpp
    cv::Ptr< cv::StereoSGBM > sbm = cv::createStereoSGBM( 0, ndisparities, blockSize );

    //!< parameters from http://www.jayrambhia.com/blog/disparity-maps/
    ADDBAR( P1, setP1, 500, 1000 );
    ADDBAR( P2, setP2, 2000, 3000 );
#endif

#if defined(USE_OPENCV_SBM) || defined(USE_OPENCV_SGBM)
    ADDBAR( preFilterCap, setPreFilterCap, 4, 20 );
    //sbm->setMinDisparity( 0 );
    //sbm->setNumDisparities( ndisparities );
    ADDBAR( uniquenessRatio, setUniquenessRatio, 10, 20 );
    ADDBAR( speckleWindowSize, setSpeckleWindowSize, 150, 200 );
    ADDBAR( speckleRange, setSpeckleRange, 2, 10 );
    ADDBAR( disp12MaxDiff, setDisp12MaxDiff, 5, 20 );
#endif
    
    
    
#ifdef USE_ELAS
    Elas::parameters param;
    param.postprocess_only_left = true;
    param.disp_min = 0;
    param.disp_max = (ndisparities + 1) * 16;
    const int32_t dims[3] = {640, 480, 640};
    Elas elas( param );
    //float* D1_data = (float*)malloc(640*480*sizeof(float));
    //float* D2_data = (float*)malloc(width*height*sizeof(float));
#endif

    
    
    
    
    
    
  
    
    pcl::PointCloud< pcl::PointXYZ >::Ptr pointCloudFromDepth_ptr ( new pcl::PointCloud< pcl::PointXYZ > );
    {
	pcl::PointXYZ initPoint;
	initPoint.x = 0;
	initPoint.y = 0;
	initPoint.z = 0;
	pointCloudFromDepth_ptr->points.push_back( initPoint );
	pointCloudFromDepth_ptr->width = (int) pointCloudFromDepth_ptr->points.size ();
	pointCloudFromDepth_ptr->height = 1;
    }
    
    boost::shared_ptr< pcl::visualization::PCLVisualizer > viewer (new pcl::visualization::PCLVisualizer ("3D Viewer"));
    viewer->setBackgroundColor (0, 0, 0);
    viewer->addPointCloud<pcl::PointXYZ> (pointCloudFromDepth_ptr, "my points");
    viewer->setPointCloudRenderingProperties (pcl::visualization::PCL_VISUALIZER_POINT_SIZE, 1, "my points");
    viewer->addCoordinateSystem (1.0, "global");
    viewer->initCameraParameters ();
    
    
    
    
    

    while ( 1 )
    {
	//!< Display the timestamps for all cameras to show that the image
	//!< capture is synchronized for each image
	
	#if defined(USE_OPENCV_SBM)
	sbm->setTextureThreshold( textureThreshold );
	sbm->setSmallerBlockSize( smallerBlockSize );
	#endif
	
	#if defined(USE_OPENCV_SGBM)
	sbm->setP1( P1 );
	sbm->setP2( P2 );
	#endif
	
	#if defined(USE_OPENCV_SBM) || defined(USE_OPENCV_SGBM)
	sbm->setBlockSize( (blockSize + 2) * 2 + 1 );
	sbm->setNumDisparities( (ndisparities + 1) * 16 );
	
	sbm->setUniquenessRatio( uniquenessRatio );
	sbm->setSpeckleWindowSize( speckleWindowSize );
	sbm->setSpeckleRange( speckleRange );
	sbm->setDisp12MaxDiff( disp12MaxDiff );
	#endif
	
#ifdef USE_ELAS
	param.disp_max = (ndisparities + 1) * 16;
	elas.setParameters( param );
#endif
	
        for ( unsigned int i = 0; i < numCameras; i++ )
        {
	    FlyCapture2::Image rawImage; //!< buffer: Must be here in the loop, otherwise cameras do not synchronize.
            FlyCapture2::Error error;
            error = ppCameras[i]->RetrieveBuffer( &rawImage );
            PRINT_ERROR( error );
	    
// 	    FlyCapture2::TimeStamp timestamp = rawImage.GetTimeStamp();
//             fprintf(stderr, 
//                 "Cam %d  TimeStamp [%d %d]\n", 
//                 i, 
//                 timestamp.cycleSeconds, 
//                 timestamp.cycleCount);

            memcpy( pimageCamera[i]->data, rawImage.GetData(), rawImage.GetDataSize() );
        }

        //!< rectifying images (and un-distorting?)
        cv::remap( *pimageCamera[1], *pimageCamera[1], mx1, my1, cv::INTER_LINEAR );
        cv::remap( *pimageCamera[0], *pimageCamera[0], mx2, my2, cv::INTER_LINEAR );


#if defined(USE_OPENCV_SBM) || defined(USE_OPENCV_SGBM)
        sbm->compute( *pimageCamera[1], *pimageCamera[0], imageDisparity );
#endif

#ifdef USE_ELAS
//        int static skip = 0;
//        if (skip % 15 ==0)
        {
 //         elas.process(imageCamera1.data, imageCamera0.data, (float*)imageDisparity.data,(float*)imageDisparity.data, dims);
            elas.process(pimageCamera[1]->data, pimageCamera[0]->data, (float*)imageDisparity.data,(float*)imageDisparity.data, dims);

        }
//        skip++;
#endif

        //!< normalize disparity map for imshow
        double minVal, maxVal;
        minMaxLoc( imageDisparity, &minVal, &maxVal );
        imageDisparity.convertTo( disp8U, CV_8UC1, 255/(maxVal - minVal) );


	#define DRAWTXT(img, str, x, y, s) \
	cv::putText( img, str, cv::Point(x,y), cv::FONT_HERSHEY_SIMPLEX, s, cv::Scalar::all(255) )
	
	DRAWTXT( disp8U, "disparity", 10, 20, 0.5 );
	cv::imshow( "disparity", disp8U );
	
	
	
	
	//!< disparity map --> depth map --> point cloud
	cv::Mat depth;
	cv::reprojectImageTo3D( imageDisparity, depth, Q );
	cv::Mat pointCloudFromDepth = depth.reshape( 3, depth.size().area() );
	pointCloudFromDepth_ptr->clear();
	for (int i = 0; i < pointCloudFromDepth.rows; i++)
	{
	    float *pt = pointCloudFromDepth.ptr < float > ( i );
	    pcl::PointXYZ basic_point;
	    basic_point.x = pt[0];
	    basic_point.y = pt[1];
	    basic_point.z = pt[2];
#if defined(USE_OPENCV_SBM) || defined(USE_OPENCV_SGBM)
	    basic_point.z /= 8.0; //!< simple hack, but unknown reason..... //!< still wrong. Depth maybe strange.
#endif

	    if ( pt[2] < 129 ) //!< simple hack
		pointCloudFromDepth_ptr->points.push_back ( basic_point );
	}
	pointCloudFromDepth_ptr->width = (int) pointCloudFromDepth_ptr->points.size ();
	pointCloudFromDepth_ptr->height = 1;
	viewer->updatePointCloud< pcl::PointXYZ > ( pointCloudFromDepth_ptr, "my points" );
	viewer->spinOnce ();

	
	
	
	for ( unsigned int i = 0; i < numCameras; i++ )
	{
	    // drawCameraInfo( ppCameras[i], *pimageCamera[i] );
	    cv::imshow( std::string("cam") + std::to_string(i), *pimageCamera[i] );
	}
	
	cv::imshow( "sub",  cv::abs( *pimageCamera[0] - *pimageCamera[1] ) );
	
	
	
	char keyValue = cv::waitKey( 10 );
	if (keyValue == 'q' || keyValue == 27 /* ESC */ )
	{
	    break;
	}
	
    }

    exit(0); //!< hack; code below may freeze.

    for ( unsigned int i = 0; i < numCameras; i++ )
    {
        ppCameras[i]->StopCapture();
        ppCameras[i]->Disconnect();
        delete ppCameras[i];
    }

    delete [] ppCameras;




    return 0;
}