int doStereo( TriclopsContext const & triclops, TriclopsInput const & stereoData, TriclopsImage16 & depthImage ) { TriclopsError te; // Set subpixel interpolation on to use // TriclopsImage16 structures when we access and save the disparity image te = triclopsSetSubpixelInterpolation( triclops, 1 ); _HANDLE_TRICLOPS_ERROR( "triclopsSetSubpixelInterpolation()", te ); te = triclopsSetDisparity( triclops, 0, 70 ); _HANDLE_TRICLOPS_ERROR( "triclopsSetSubpixelInterpolation()", te ); //ROS_INFO("stereoData x,y: %d,%d",stereoData.nrows, stereoData.ncols); te = triclopsSetResolution( triclops, stereoData.nrows, stereoData.ncols ); _HANDLE_TRICLOPS_ERROR( "triclopsSetResolution()", te ); // Rectify the images te = triclopsRectify( triclops, const_cast<TriclopsInput *>( &stereoData ) ); _HANDLE_TRICLOPS_ERROR( "triclopsRectify()", te ); // Do stereo processing te = triclopsStereo( triclops ); _HANDLE_TRICLOPS_ERROR( "triclopsStereo()", te ); // Retrieve the interpolated depth image from the context te = triclopsGetImage16( triclops, TriImg16_DISPARITY, TriCam_REFERENCE, &depthImage ); _HANDLE_TRICLOPS_ERROR( "triclopsGetImage()", te ); return 0; }
int main( int /* argc */, char** /* argv */ ) { TriclopsContext triclops; TriclopsImage disparityImage; TriclopsImage refImage; TriclopsInput triclopsInput; TriclopsROI* pRois; int nMaxRois; TriclopsError te; FlyCaptureContext flycapture; FlyCaptureImage flycaptureImage; FlyCaptureInfoEx pInfo; FlyCapturePixelFormat pixelFormat; FlyCaptureError fe; int iMaxCols = 0; int iMaxRows = 0; // Create the camera context fe = flycaptureCreateContext( &flycapture ); _HANDLE_FLYCAPTURE_ERROR( "flycaptureCreateContext()", fe ); // Initialize the camera fe = flycaptureInitialize( flycapture, 0 ); _HANDLE_FLYCAPTURE_ERROR( "flycaptureInitialize()", fe ); // Get the camera configuration char* szCalFile; fe = flycaptureGetCalibrationFileFromCamera( flycapture, &szCalFile ); _HANDLE_FLYCAPTURE_ERROR( "flycaptureGetCalibrationFileFromCamera()", fe ); // Create a Triclops context from the cameras calibration file te = triclopsGetDefaultContextFromFile( &triclops, szCalFile ); _HANDLE_TRICLOPS_ERROR( "triclopsGetDefaultContextFromFile()", te ); // Get camera information fe = flycaptureGetCameraInfo( flycapture, &pInfo ); _HANDLE_FLYCAPTURE_ERROR( "flycatpureGetCameraInfo()", fe ); if (pInfo.CameraType == FLYCAPTURE_COLOR) { pixelFormat = FLYCAPTURE_RAW16; } else { pixelFormat = FLYCAPTURE_MONO16; } switch (pInfo.CameraModel) { case FLYCAPTURE_BUMBLEBEE2: { unsigned long ulValue; flycaptureGetCameraRegister( flycapture, 0x1F28, &ulValue ); if ( ( ulValue & 0x2 ) == 0 ) { // Hi-res BB2 iMaxCols = 1024; iMaxRows = 768; } else { // Low-res BB2 iMaxCols = 640; iMaxRows = 480; } } break; case FLYCAPTURE_BUMBLEBEEXB3: iMaxCols = 1280; iMaxRows = 960; break; default: te = TriclopsErrorInvalidCamera; _HANDLE_TRICLOPS_ERROR( "triclopsCheckCameraModel()", te ); break; } // Start grabbing fe = flycaptureStartCustomImage( flycapture, 3, 0, 0, iMaxCols, iMaxRows, 100, pixelFormat); _HANDLE_FLYCAPTURE_ERROR( "flycaptureStart()", fe ); // Grab an image from the camera fe = flycaptureGrabImage2( flycapture, &flycaptureImage ); _HANDLE_FLYCAPTURE_ERROR( "flycaptureGrabImage()", fe ); // Extract information from the FlycaptureImage int imageCols = flycaptureImage.iCols; int imageRows = flycaptureImage.iRows; int imageRowInc = flycaptureImage.iRowInc; int iSideBySideImages = flycaptureImage.iNumImages; unsigned long timeStampSeconds = flycaptureImage.timeStamp.ulSeconds; unsigned long timeStampMicroSeconds = flycaptureImage.timeStamp.ulMicroSeconds; // Create buffers for holding the mono images unsigned char* rowIntColor = new unsigned char[ imageCols * imageRows * iSideBySideImages * 4]; unsigned char* rowIntMono = new unsigned char[ imageCols * imageRows * iSideBySideImages ]; // Create a temporary FlyCaptureImage for preparing the stereo image FlyCaptureImage tempColorImage; FlyCaptureImage tempMonoImage; tempColorImage.pData = rowIntColor; tempMonoImage.pData = rowIntMono; // Convert the pixel interleaved raw data to row interleaved format fe = flycapturePrepareStereoImage( flycapture, flycaptureImage, &tempMonoImage, &tempColorImage ); _HANDLE_FLYCAPTURE_ERROR( "flycapturePrepareStereoImage()", fe ); // Pointers to positions in the mono buffer that correspond to the beginning // of the red, green and blue sections unsigned char* redMono = NULL; unsigned char* greenMono = NULL; unsigned char* blueMono = NULL; redMono = rowIntMono; if (flycaptureImage.iNumImages == 2) { greenMono = redMono + imageCols; blueMono = redMono + imageCols; } if (flycaptureImage.iNumImages == 3) { greenMono = redMono + imageCols; blueMono = redMono + ( 2 * imageCols ); } // Use the row interleaved images to build up an RGB TriclopsInput. // An RGB triclops input will contain the 3 raw images (1 from each camera). te = triclopsBuildRGBTriclopsInput( imageCols, imageRows, imageRowInc, timeStampSeconds, timeStampMicroSeconds, redMono, greenMono, blueMono, &triclopsInput); _HANDLE_TRICLOPS_ERROR( "triclopsBuildRGBTriclopsInput()", te ); // Set up some stereo parameters: // Set to 640x480 output images te = triclopsSetResolution( triclops, 480, 640 ); _HANDLE_TRICLOPS_ERROR( "triclopsSetResolution()", te ); // Set disparity range to be quite wide te = triclopsSetDisparity( triclops, 0, 200 ); _HANDLE_TRICLOPS_ERROR( "triclopsSetDisparity()", te ); // Set subpixel interpolation off - so we know we don't need to use // TriclopsImage16 structures when we access and save the disparity image te = triclopsSetSubpixelInterpolation( triclops, 0 ); _HANDLE_TRICLOPS_ERROR( "triclopsSetSubpixelInterpolation()", te ); // Get the pointer to the regions of interest array te = triclopsGetROIs( triclops, &pRois, &nMaxRois ); _HANDLE_TRICLOPS_ERROR( "triclopsGetROIs()", te ); if( nMaxRois >= 4 ) { // Set up four regions of interest: // Entire upper left quadrant of image pRois[0].row = 0; pRois[0].col = 0; pRois[0].nrows = 240; pRois[0].ncols = 320; // Part of the lower right pRois[1].row = 240; pRois[1].col = 320; pRois[1].nrows = 180; pRois[1].ncols = 240; // Centered in upper right quadrant pRois[2].row = 60; pRois[2].col = 400; pRois[2].nrows = 120; pRois[2].ncols = 160; // Small section of lower left pRois[3].row = 300; pRois[3].col = 30; pRois[3].nrows = 80; pRois[3].ncols = 80; // Tell the TriclopsContext how many ROIs we want to process te = triclopsSetNumberOfROIs( triclops, 4 ); _HANDLE_TRICLOPS_ERROR( "triclopsSetNumberOfROIs()", te ); } else { printf( "Only %d ROIs available in the TriclopsContext " "- this should never happen!\n" "Aborting!\n", nMaxRois ); // Destroy the Triclops context triclopsDestroyContext( triclops ) ; // Close the camera and destroy the context flycaptureStop( flycapture ); flycaptureDestroyContext( flycapture ); return 1; } // Rectify the images te = triclopsRectify( triclops, &triclopsInput ); _HANDLE_TRICLOPS_ERROR( "triclopsRectify()", te ); // Do stereo processing te = triclopsStereo( triclops ); _HANDLE_TRICLOPS_ERROR( "triclopsStereo()", te ); // Retrieve the disparity image from the Triclops context te = triclopsGetImage( triclops, TriImg_DISPARITY, TriCam_REFERENCE, &disparityImage ); _HANDLE_TRICLOPS_ERROR( "triclopsGetImage()", te ); // Retrieve the rectified image from the Triclops context te = triclopsGetImage( triclops, TriImg_RECTIFIED, TriCam_REFERENCE, &refImage ); _HANDLE_TRICLOPS_ERROR( "triclopsGetImage()", te ); // Save the disparity image te = triclopsSaveImage( &disparityImage, "disparity.pgm" ); _HANDLE_TRICLOPS_ERROR( "triclopsSaveImage()", te ); // Save the rectified image te = triclopsSaveImage( &refImage, "rectified.pgm" ); _HANDLE_TRICLOPS_ERROR( "triclopsSaveImage()", te ); // Delete the image buffer, it is not needed once the TriclopsInput // has been built delete [] rowIntMono; redMono = NULL; greenMono = NULL; blueMono = NULL; // Destroy the Triclops context triclopsDestroyContext( triclops ) ; // Close the camera and destroy the Flycapture context flycaptureStop( flycapture ); flycaptureDestroyContext( flycapture ); return 0; }
int main( int /* argc */, char* /* * argv */ ) { TriclopsContext triclops; TriclopsError error; TriclopsInput triclopsInput; char* szInputFile = "input.ppm"; char* szCalFile = "input.cal"; char* szDisparityBase = "disparity"; // Get the camera calibration data error = triclopsGetDefaultContextFromFile( &triclops, szCalFile ); _HANDLE_TRICLOPS_ERROR( "triclopsGetDefaultContextFromFile(): " "Can't open calibration file", error ); // Load images from file if ( !ppmReadToTriclopsInput( szInputFile, &triclopsInput ) ) { printf( "ppmReadToTriclopsInput() failed. Can't read '%s'\n", szInputFile ); return 1; } doRectifyStereoSave( triclops, &triclopsInput, szDisparityBase, "default" ); // try doing stereo with a big stereo mask triclopsSetStereoMask( triclops, 21 ); doRectifyStereoSave( triclops, &triclopsInput, szDisparityBase, "bigmask" ); // try doing stereo with a small stereo mask triclopsSetStereoMask( triclops, 5 ); doRectifyStereoSave( triclops, &triclopsInput, szDisparityBase, "smallmask" ); // set mask to a neutral value triclopsSetStereoMask( triclops, 11 ); // try doing stereo without any validation triclopsSetSurfaceValidation( triclops, 0 ); triclopsSetUniquenessValidation( triclops, 0 ); triclopsSetTextureValidation( triclops, 0 ); doRectifyStereoSave( triclops, &triclopsInput, szDisparityBase, "novalidation" ); // try doing stereo with only texture and surface triclopsSetSurfaceValidation( triclops, 1 ); triclopsSetTextureValidation( triclops, 1 ); doRectifyStereoSave( triclops, &triclopsInput, szDisparityBase, "surf-tex-val" ); // try doing stereo in 2 camera mode with back and forth validation triclopsSetCameraConfiguration( triclops, TriCfg_2CAM_HORIZONTAL ); triclopsSetBackForthValidation( triclops, 1 ); doRectifyStereoSave( triclops, &triclopsInput, szDisparityBase, "backforth" ); // try doing stereo in subpixel mode to compare the results of subpixel with // whole pixel - try running "convertimage16" example to convert this image to // one that is more easily viewed triclopsSetSubpixelInterpolation( triclops, 1 ); doRectifyStereoSave( triclops, &triclopsInput, szDisparityBase, "subpixel" ); // clean up memory allocated in context freeInput( &triclopsInput ); error = triclopsDestroyContext( triclops ); _HANDLE_TRICLOPS_ERROR( "triclopsDestroyContext()", error ); return 0; }
stereoCam::stereoCam() { // Find cameras on the 1394 buses d = dc1394_new (); // Enumerate cameras connected to the PC err = dc1394_camera_enumerate (d, &list); if ( err != DC1394_SUCCESS ) { fprintf( stderr, "Unable to look for cameras\n\n" "Please check \n" " - if the kernel modules `ieee1394',`raw1394' and `ohci1394' " "are loaded \n" " - if you have read/write access to /dev/raw1394\n\n"); nThisCam=-1; //return 1; } if (list->num == 0) { fprintf( stderr, "No cameras found!\n"); nThisCam=-1; //return 1; } printf( "There were %d camera(s) found attached to your PC\n", list->num ); // Identify cameras. Use the first stereo camera that is found for ( nThisCam = 0; nThisCam < list->num; nThisCam++ ) { camera = dc1394_camera_new(d, list->ids[nThisCam].guid); if(!camera) { printf("Failed to initialize camera with guid %llx", list->ids[nThisCam].guid); continue; } printf( "Camera %d model = '%s'\n", nThisCam, camera->model ); if ( isStereoCamera(camera)) { printf( "Using this camera\n" ); break; } dc1394_camera_free(camera); } if ( nThisCam == list->num ) { printf( "No stereo cameras were detected\n" ); nThisCam=-1; //return 0; } // Free memory used by the camera list dc1394_camera_free_list (list); // query information about this stereo camera err = queryStereoCamera( camera, &stereoCamera ); if ( err != DC1394_SUCCESS ) { fprintf( stderr, "Cannot query all information from camera\n" ); cleanup_and_exit( camera ); } if ( stereoCamera.nBytesPerPixel != 2 ) { // can't handle XB3 3 bytes per pixel fprintf( stderr, "Example not updated to work with XB3 in 3 camera mode yet!\n" ); cleanup_and_exit( stereoCamera.camera ); } // set the capture mode printf( "Setting stereo video capture mode\n" ); err = setStereoVideoCapture( &stereoCamera ); if ( err != DC1394_SUCCESS ) { fprintf( stderr, "Could not set up video capture mode\n" ); cleanup_and_exit( stereoCamera.camera ); } // have the camera start sending us data printf( "Start transmission\n" ); err = startTransmission( &stereoCamera ); if ( err != DC1394_SUCCESS ) { fprintf( stderr, "Unable to start camera iso transmission\n" ); cleanup_and_exit( stereoCamera.camera ); } // give the auto-gain algorithms a chance to catch up // printf( "Giving auto-gain algorithm a chance to stabilize\n" ); // sleep( 5 ); // Allocate all the buffers. // Unfortunately color processing is a bit inefficient // because of the number of // data copies. Color data needs to be // - de-interleaved into separate bayer tile images // - color processed into RGB images // - de-interleaved to extract the green channel // for stereo (or other mono conversion) // size of buffer for all images at mono8 nBufferSize = stereoCamera.nRows * stereoCamera.nCols * stereoCamera.nBytesPerPixel; // allocate a buffer to hold the de-interleaved images pucDeInterlacedBuffer = new unsigned char[ nBufferSize ]; pucRGBBuffer = new unsigned char[ 3 * nBufferSize ]; pucGreenBuffer = new unsigned char[ nBufferSize ]; // do stereo processing printf( "Getting TriclopsContext from camera (slowly)... \n" ); e = getTriclopsContextFromCamera( &stereoCamera, &triclops ); TRIERR(e); printf( "...got it\n" ); // make sure we are in subpixel mode e = triclopsSetSubpixelInterpolation( triclops, 1 ); TRIERR(e); e = triclopsSetDisparityMapping(triclops,0,60); TRIERR(e); e = triclopsSetDisparityMappingOn(triclops,1); TRIERR(e); e = triclopsSetUniquenessValidationMapping(triclops,0); TRIERR(e); e = triclopsSetTextureValidationMapping(triclops,0); TRIERR(e); isInRoutine=false; }