int main(int argc, char * argv[]) { struct cmdlineInfo cmdline; FILE * ifP; unsigned int cols, rows; pixval maxval; xvPalette xvPalette; ppm_init(&argc, argv); parseCommandLine(argc, argv, &cmdline); ifP = pm_openr(cmdline.inputFileName); makeXvPalette(&xvPalette); readXvHeader(ifP, &cols, &rows, &maxval); writePpm(ifP, &xvPalette, cols, rows, maxval, stdout); pm_close(ifP); return 0; }
void publish_saliency_points_message() { FILE* fd; char image[256]; sprintf(image, "/media/OS/Users/Lauro/Downloads/Log/global_map/datasets3/2012/%03dl.ppm", frame_counter); writePpm(image, saliency_points.reference_image, saliency_points.image_width, saliency_points.image_height); fd = fopen("/media/OS/Users/Lauro/Downloads/Log/global_map/datasets3/2012/data.txt", "a+"); fprintf(fd, "%s ", image); fprintf(fd, "%f %f %f %f %f %f", saliency_points.pose.position.x, saliency_points.pose.position.y, saliency_points.pose.position.z, saliency_points.pose.orientation.yaw, saliency_points.pose.orientation.pitch, saliency_points.pose.orientation.roll); for(int i = 0; i < saliency_points.saliency_list_size; i++) { fprintf(fd, " %d %d %f %f %f", saliency_points.saliency_list[i].coordinates.x, saliency_points.saliency_list[i].coordinates.y, saliency_points.saliency_list[i].pose.x, saliency_points.saliency_list[i].pose.y, saliency_points.saliency_list[i].pose.z); } fprintf(fd, "\n"); fclose(fd); // IPC_RETURN_TYPE err; // // publishing_saliencies = 1; // err = IPC_publishData(CARMEN_SALIENCY_SEARCH_SALIENCY_POINTS_MESSAGE_NAME, &saliency_points); // carmen_test_ipc_exit(err, "Could not publish", CARMEN_SALIENCY_SEARCH_SALIENCY_POINTS_MESSAGE_FMT); }
/* version that takes the DPPU pan and tilt, and the robot pose */ void stereoCam::doStereoFrame( #ifdef OPENCV IplImage *frame, IplImage *disparity, IplImage *depth, FILE *out, double pan, double tilt, double rx, double ry, double rth #else FILE *out #endif ) { if ( inRoutine() ) return; // protected routine for (int i=0; i<10; i++) { pucRightRGB = NULL; pucLeftRGB = NULL; pucCenterRGB = NULL; extractImagesColor( &stereoCamera, DC1394_BAYER_METHOD_NEAREST, pucDeInterlacedBuffer, pucRGBBuffer, pucGreenBuffer, &pucRightRGB, &pucLeftRGB, &pucCenterRGB, &input ); } #if 0 // write out the raw images from the camera if ( !writePpm( "yright.ppm", pucRightRGB, stereoCamera.nCols, stereoCamera.nRows ) ) printf( "wrote right.ppm\n" ); if ( !writePpm( "yleft.ppm", pucLeftRGB, stereoCamera.nCols, stereoCamera.nRows ) ) printf( "wrote left.ppm\n" ); #endif e = triclopsRectify( triclops, &input ); TRIERR(e); // e = triclopsPreprocess(triclops, &input); // TRIERR(e); /************************************/ e = triclopsStereo( triclops ); TRIERR(e) /************************************/ #if 0 // get and save the rectified and disparity images e=triclopsGetImage( triclops, TriImg_RECTIFIED, TriCam_REFERENCE, &image ); TRIERR(e); e=triclopsSaveImage( &image, "yrectified.pgm" ); TRIERR(e); printf( "wrote 'rectified.pgm'\n" ); #endif e=triclopsGetImage16( triclops, TriImg16_DISPARITY, TriCam_REFERENCE, &image16 ); TRIERR(e); #if 0 // save disparity image -- warning its 16bit not 8bit! if (gWriteImages) { e=triclopsSaveImage16( &image16, "ydisparity.pgm" ); TRIERR(e); printf( "wrote 'disparity.pgm'\n" ); } #endif outRoutine(); // allow other access to camera now int nPoints=0; int iPixelInc = image16.rowinc/2; int rPixelInc = 3*stereoCamera.nCols; for ( int iRow = 0; iRow < image16.nrows; iRow++ ) { unsigned short* pusRow = image16.data + iRow * iPixelInc; unsigned char * rgbRow = pucRightRGB + 3 * iRow * rPixelInc; for ( int iCol= 0; iCol < image16.ncols; iCol++ ) { unsigned short usDisp = pusRow[ iCol ]; float x,y,z; double posBB[3], posW[3]; // x,y,z in bumblebee and in world frames #ifdef OPENCV if (frame!=0) cvSet2D(frame,iRow,iCol, // populate RGB image cvScalar(rgbRow[ 3*iCol*3+2], rgbRow[ 3*iCol*3+1], rgbRow[ 3*iCol*3 ],0)); #endif if ( usDisp < 0xFF00 ) { // valid disparity only // convert the 16 bit disparity value to floating point x,y,z triclopsRCD16ToXYZ( triclops, iRow, iCol, usDisp, &x, &y, &z // x,y,z, in bumblebee frarame ); nPoints++; posBB[0]=x; posBB[1]=y; posBB[2]=z; if (z>0.1 && z< 5.0) {// near and far distance (meters) filter transformBumbleBee2World(posBB, posW, pan, tilt, rx, ry, rth); if (out!=0) fprintf(out, "%f, %f, %f, %d, %d, %d, %d, %d, %d\n", posW[0], posW[1], posW[2], // write out world frame locations iRow, iCol, usDisp, rgbRow[ 3*iCol*3], rgbRow[ 3*iCol*3+1], rgbRow[ 3*iCol*3+2]); #ifdef OPENCV if (depth!=0) cvSet2D(depth,iRow,iCol, // populate depth image with local frame info //cvScalar(double(x), double(y), double(z), 0.0)); cvScalar(posW[0], posW[1], posW[2], 0)); #endif }// write out result #ifdef OPENCV if (disparity!=0) cvSet2D(disparity,iRow,iCol, // populate disparity image cvScalar(usDisp/255,0,0,0)); #endif }// if valid disparity }// for iCol }// for iRow return; }
int main( int argc, char *argv[] ) { dc1394camera_t* camera; dc1394error_t err; dc1394_t * d; dc1394camera_list_t * list; unsigned int nThisCam; // Find cameras on the 1394 buses d = dc1394_new (); // Enumerate cameras connected to the PC err = dc1394_camera_enumerate (d, &list); if ( err != DC1394_SUCCESS ) { fprintf( stderr, "Unable to look for cameras\n\n" "Please check \n" " - if the kernel modules `ieee1394',`raw1394' and `ohci1394' " "are loaded \n" " - if you have read/write access to /dev/raw1394\n\n"); return 1; } if (list->num == 0) { fprintf( stderr, "No cameras found!\n"); return 1; } printf( "There were %d camera(s) found attached to your PC\n", list->num ); // Identify cameras. Use the first stereo camera that is found for ( nThisCam = 0; nThisCam < list->num; nThisCam++ ) { camera = dc1394_camera_new(d, list->ids[nThisCam].guid); if(!camera) { printf("Failed to initialize camera with guid %llx", list->ids[nThisCam].guid); continue; } printf( "Camera %d model = '%s'\n", nThisCam, camera->model ); if ( isStereoCamera(camera)) { printf( "Using this camera\n" ); break; } dc1394_camera_free(camera); } if ( nThisCam == list->num ) { printf( "No stereo cameras were detected\n" ); return 0; } // Free memory used by the camera list dc1394_camera_free_list (list); PGRStereoCamera_t stereoCamera; // query information about this stereo camera err = queryStereoCamera( camera, &stereoCamera ); if ( err != DC1394_SUCCESS ) { fprintf( stderr, "Cannot query all information from camera\n" ); cleanup_and_exit( camera ); } // set the capture mode printf( "Setting stereo video capture mode\n" ); err = setStereoVideoCapture( &stereoCamera ); if ( err != DC1394_SUCCESS ) { fprintf( stderr, "Could not set up video capture mode\n" ); cleanup_and_exit( stereoCamera.camera ); } // have the camera start sending us data printf( "Start transmission\n" ); err = startTransmission( &stereoCamera ); if ( err != DC1394_SUCCESS ) { fprintf( stderr, "Unable to start camera iso transmission\n" ); cleanup_and_exit( stereoCamera.camera ); } // give the auto-gain algorithms a chance to catch up printf( "Wait for the auto-gain algorithm to stabilize\n" ); sleep( 5 ); // Allocate all the buffers. // Unfortunately color processing is a bit inefficient because of the number of // data copies. Color data needs to be // - de-interleaved into separate bayer tile images // - color processed into RGB images // - de-interleaved to extract the green channel for stereo (or other mono conversion) // size of capture buffer unsigned int nBufferSize = stereoCamera.nRows * stereoCamera.nCols * stereoCamera.nBytesPerPixel; // allocate a buffer to hold the de-interleaved images unsigned char* pucDeInterlacedBuffer = new unsigned char[ nBufferSize ]; if ( stereoCamera.bColor ) { unsigned char* pucRGBBuffer = new unsigned char[ 3 * nBufferSize ]; unsigned char* pucGreenBuffer = new unsigned char[ nBufferSize ]; unsigned char* pucRightRGB = NULL; unsigned char* pucLeftRGB = NULL; unsigned char* pucCenterRGB = NULL; TriclopsInput input; // get the images from the capture buffer and do all required processing // note: produces a TriclopsInput that can be used for stereo processing extractImagesColor( &stereoCamera, DC1394_BAYER_METHOD_NEAREST, pucDeInterlacedBuffer, pucRGBBuffer, pucGreenBuffer, &pucRightRGB, &pucLeftRGB, &pucCenterRGB, &input ); // write the color images to file if ( !writePpm( "right.ppm", pucRightRGB, stereoCamera.nCols, stereoCamera.nRows ) ) printf( "wrote right.ppm\n" ); if ( !writePpm( "left.ppm", pucLeftRGB, stereoCamera.nCols, stereoCamera.nRows ) ) printf( "wrote left.ppm\n" ); if ( pucCenterRGB != pucLeftRGB ) if ( !writePpm( "center.ppm", pucCenterRGB, stereoCamera.nCols, stereoCamera.nRows ) ) printf( "wrote center.ppm\n" ); delete[] pucRGBBuffer; delete[] pucGreenBuffer; } else { unsigned char* pucRightMono = NULL; unsigned char* pucLeftMono = NULL; unsigned char* pucCenterMono = NULL; TriclopsInput input; // get the images from the capture buffer and do all required processing // note: produces a TriclopsInput that can be used for stereo processing extractImagesMono( &stereoCamera, pucDeInterlacedBuffer, &pucRightMono, &pucLeftMono, &pucCenterMono, &input ); // write the greyscale images to file if ( !writePgm( "right.pgm", pucRightMono, stereoCamera.nCols, stereoCamera.nRows ) ) printf( "wrote right.pgm\n" ); if ( !writePgm( "left.pgm", pucLeftMono, stereoCamera.nCols, stereoCamera.nRows ) ) printf( "wrote left.pgm\n" ); if ( pucCenterMono != pucLeftMono ) if ( !writePgm( "center.pgm", pucCenterMono, stereoCamera.nCols, stereoCamera.nRows ) ) printf( "wrote center.pgm\n" ); } printf( "Stop transmission\n" ); // Stop data transmission if ( dc1394_video_set_transmission( stereoCamera.camera, DC1394_OFF ) != DC1394_SUCCESS ) { fprintf( stderr, "Couldn't stop the camera?\n" ); } delete[] pucDeInterlacedBuffer; // close camera cleanup_and_exit( camera ); return 0; }