/* version that takes the DPPU pan and tilt, and the robot pose */ void stereoCam::doStereoFrame( #ifdef OPENCV IplImage *frame, IplImage *disparity, IplImage *depth, FILE *out, double pan, double tilt, double rx, double ry, double rth #else FILE *out #endif ) { if ( inRoutine() ) return; // protected routine for (int i=0; i<10; i++) { pucRightRGB = NULL; pucLeftRGB = NULL; pucCenterRGB = NULL; extractImagesColor( &stereoCamera, DC1394_BAYER_METHOD_NEAREST, pucDeInterlacedBuffer, pucRGBBuffer, pucGreenBuffer, &pucRightRGB, &pucLeftRGB, &pucCenterRGB, &input ); } #if 0 // write out the raw images from the camera if ( !writePpm( "yright.ppm", pucRightRGB, stereoCamera.nCols, stereoCamera.nRows ) ) printf( "wrote right.ppm\n" ); if ( !writePpm( "yleft.ppm", pucLeftRGB, stereoCamera.nCols, stereoCamera.nRows ) ) printf( "wrote left.ppm\n" ); #endif e = triclopsRectify( triclops, &input ); TRIERR(e); // e = triclopsPreprocess(triclops, &input); // TRIERR(e); /************************************/ e = triclopsStereo( triclops ); TRIERR(e) /************************************/ #if 0 // get and save the rectified and disparity images e=triclopsGetImage( triclops, TriImg_RECTIFIED, TriCam_REFERENCE, &image ); TRIERR(e); e=triclopsSaveImage( &image, "yrectified.pgm" ); TRIERR(e); printf( "wrote 'rectified.pgm'\n" ); #endif e=triclopsGetImage16( triclops, TriImg16_DISPARITY, TriCam_REFERENCE, &image16 ); TRIERR(e); #if 0 // save disparity image -- warning its 16bit not 8bit! if (gWriteImages) { e=triclopsSaveImage16( &image16, "ydisparity.pgm" ); TRIERR(e); printf( "wrote 'disparity.pgm'\n" ); } #endif outRoutine(); // allow other access to camera now int nPoints=0; int iPixelInc = image16.rowinc/2; int rPixelInc = 3*stereoCamera.nCols; for ( int iRow = 0; iRow < image16.nrows; iRow++ ) { unsigned short* pusRow = image16.data + iRow * iPixelInc; unsigned char * rgbRow = pucRightRGB + 3 * iRow * rPixelInc; for ( int iCol= 0; iCol < image16.ncols; iCol++ ) { unsigned short usDisp = pusRow[ iCol ]; float x,y,z; double posBB[3], posW[3]; // x,y,z in bumblebee and in world frames #ifdef OPENCV if (frame!=0) cvSet2D(frame,iRow,iCol, // populate RGB image cvScalar(rgbRow[ 3*iCol*3+2], rgbRow[ 3*iCol*3+1], rgbRow[ 3*iCol*3 ],0)); #endif if ( usDisp < 0xFF00 ) { // valid disparity only // convert the 16 bit disparity value to floating point x,y,z triclopsRCD16ToXYZ( triclops, iRow, iCol, usDisp, &x, &y, &z // x,y,z, in bumblebee frarame ); nPoints++; posBB[0]=x; posBB[1]=y; posBB[2]=z; if (z>0.1 && z< 5.0) {// near and far distance (meters) filter transformBumbleBee2World(posBB, posW, pan, tilt, rx, ry, rth); if (out!=0) fprintf(out, "%f, %f, %f, %d, %d, %d, %d, %d, %d\n", posW[0], posW[1], posW[2], // write out world frame locations iRow, iCol, usDisp, rgbRow[ 3*iCol*3], rgbRow[ 3*iCol*3+1], rgbRow[ 3*iCol*3+2]); #ifdef OPENCV if (depth!=0) cvSet2D(depth,iRow,iCol, // populate depth image with local frame info //cvScalar(double(x), double(y), double(z), 0.0)); cvScalar(posW[0], posW[1], posW[2], 0)); #endif }// write out result #ifdef OPENCV if (disparity!=0) cvSet2D(disparity,iRow,iCol, // populate disparity image cvScalar(usDisp/255,0,0,0)); #endif }// if valid disparity }// for iCol }// for iRow return; }
int do3dPoints( FC2::Image const & grabbedImage, TriclopsContext const & triclops, TriclopsImage16 const & disparityImage16, TriclopsInput const & colorData, PointCloud & returnedPoints ) { TriclopsImage monoImage = {0}; TriclopsColorImage colorImage = {0}; TriclopsError te; float x, y, z; int nPoints = 0; int pixelinc ; int i, j, k; unsigned short * row; unsigned short disparity; PointT point3d; // Rectify the color image if applicable bool isColor = false; if ( grabbedImage.GetPixelFormat() == FC2::PIXEL_FORMAT_RAW16 ) { isColor = true; te = triclopsRectifyColorImage( triclops, TriCam_REFERENCE, const_cast<TriclopsInput *>( &colorData ), &colorImage ); _HANDLE_TRICLOPS_ERROR( "triclopsRectifyColorImage()", te ); } else { te = triclopsGetImage( triclops, TriImg_RECTIFIED, TriCam_REFERENCE, &monoImage ); _HANDLE_TRICLOPS_ERROR( "triclopsGetImage()", te ); } // The format for the output file is: // <x> <y> <z> <red> <grn> <blu> <row> <col> // <x> <y> <z> <red> <grn> <blu> <row> <col> // ... // Determine the number of pixels spacing per row pixelinc = disparityImage16.rowinc / 2; //ROS_INFO("DisparityData x,y: %d,%d",disparityImage16.nrows, disparityImage16.ncols); for ( i = 0, k = 0; i < disparityImage16.nrows; i++ ) { row = disparityImage16.data + i * pixelinc; for ( j = 0; j < disparityImage16.ncols; j++, k++ ) { disparity = row[j]; // do not save invalid points if ( disparity < 0xFF00 ) { // convert the 16 bit disparity value to floating point x,y,z triclopsRCD16ToXYZ( triclops, i, j, disparity, &x, &y, &z ); // look at points within a range if ( z < 5.0 ) { point3d.x = z; point3d.y = -x; point3d.z = -y; if ( isColor ) { point3d.r = ( int )colorImage.red[k]; point3d.g = ( int )colorImage.green[k]; point3d.b = ( int )colorImage.blue[k]; } else { // For mono cameras, we just assign the same value to RGB point3d.r = ( int )monoImage.data[k]; point3d.g = ( int )monoImage.data[k]; point3d.b = ( int )monoImage.data[k]; } returnedPoints.push_back( point3d ); // fprintf( pPointFile, "%f %f %f %d %d %d %d %d\n", x, y, z, r, g, b, i, j ); nPoints++; } } } } //ROS_INFO( "Points in file: %d\n", nPoints ); return 0; }