Example #1
0
    void GHL_CALL TextureStage3d::SetData(UInt32 x,UInt32 y,const Image* data,UInt32 level) {
        if (data && (x==0) && (y==0) &&
            (data->GetWidth()==GetWidth()) &&
            (data->GetHeight()==GetHeight())) {
            
            
            Image* img = data->Clone();
            img->Convert(IMAGE_FORMAT_RGBA);
            img->SwapRB();
            
            if (m_internal_data) {
                m_internal_data->Release();
            }
            
            m_internal_data = img;
            
        } else {
            if (!m_internal_data) {
                m_internal_data = GHL_CreateImage(GetWidth(),GetHeight(),IMAGE_FORMAT_RGBA);
            }
            
            Image* img = data->Clone();
            img->Convert(IMAGE_FORMAT_RGBA);
            img->SwapRB();
            m_internal_data->Draw(x,y,img);
            img->Release();

        }
        NotifySetData();
    }
int RunSingleCamera( PGRGuid guid, char* filename, int lr, int numImage)
{
	printf("Begina Capturing the image:....\n");
	const int k_numImages = 1;

	Error error;
	Camera cam;

	// Connect to a camera
	error = cam.Connect(&guid);

	// Get the camera information
	CameraInfo camInfo;
	error = cam.GetCameraInfo(&camInfo);


	// Start capturing images
	error = cam.StartCapture();

	Image rawImage;    
	for ( int imageCnt=0; imageCnt < k_numImages; imageCnt++ )
	{                
		// Retrieve an image
		error = cam.RetrieveBuffer( &rawImage );
		if (lr)
			printf( "Grabbed right image %d\n", numImage);
		else
			printf( "Grabbed left image %d\n", numImage);
		// Create a converted image
		Image convertedImage;

		// Convert the raw image
		error = rawImage.Convert( PIXEL_FORMAT_MONO8, &convertedImage );

		// Create a unique filename
		//        char filename[512];
		//       sprintf( filename, "FlyCapture2Test-%u-%d.png", camInfo.serialNumber, imageCnt );

		// Save the image. If a file format is not passed in, then the file
		// extension is parsed to attempt to determine the file format.
		error = convertedImage.Save( filename );
	}            

	// Stop capturing images
	error = cam.StopCapture(); 

	// Disconnect the camera
	error = cam.Disconnect();
	return 0;
}
Example #3
0
int main(int argc, char** argv)
{    

initialize_camera();

 ros::init(argc, argv, "image_publisher");
 ros::NodeHandle nh;
 //image_transport::ImageTransport it(nh);
 //image_transport::Publisher pub = it.advertise("/camera/image_raw", 1);

ros::Publisher pub = nh.advertise<sensor_msgs::Image>("out_image_topic", 1);
 
sensor_msgs::Image my_image_msg;
while(ros::ok())
{
//image_converter(my_image_msg);
//pub.publish(my_img_msg);


Image rawImage;
 error = camera.RetrieveBuffer( &rawImage );
if ( error != PGRERROR_OK )
{
std::cout << "capture error" << std::endl;
//continue;
}
// convert to rgb
Image rgbImage;
rawImage.Convert( FlyCapture2::PIXEL_FORMAT_BGR, &rgbImage );
// convert to OpenCV Mat
unsigned int rowBytes = (double)rgbImage.GetReceivedDataSize()/(double)rgbImage.GetRows();
cv::Mat image = cv::Mat(rgbImage.GetRows(), rgbImage.GetCols(), CV_8UC3, rgbImage.GetData(),rowBytes);
//cv::imshow("image", image);

cv_bridge::CvImage out_msg;
out_msg.encoding = sensor_msgs::image_encodings::BGR8; 
out_msg.image    = image;
out_msg.header.seq = 1;
out_msg.header.frame_id = 2;
out_msg.header.stamp = ros::Time::now();

out_msg.toImageMsg(my_image_msg);
pub.publish(my_image_msg);

}

}
Example #4
0
void Image::ReadPixels(const Offset3D& offset, const Extent3D& extent, const DstImageDescriptor& imageDesc, std::size_t threadCount) const
{
    if (imageDesc.data && IsRegionInside(offset, extent))
    {
        /* Validate required size */
        ValidateImageDataSize(extent, imageDesc);

        /* Get source image parameters */
        const auto  bpp             = GetBytesPerPixel();
        const auto  srcRowStride    = bpp * GetExtent().width;
        const auto  srcDepthStride  = srcRowStride * GetExtent().height;
        auto        src             = data_.get() + GetDataPtrOffset(offset);

        if (GetFormat() == imageDesc.format && GetDataType() == imageDesc.dataType)
        {
            /* Get destination image parameters */
            const auto  dstRowStride    = bpp * extent.width;
            const auto  dstDepthStride  = dstRowStride * extent.height;
            auto        dst             = reinterpret_cast<char*>(imageDesc.data);

            /* Blit region into destination image */
            BitBlit(
                extent, bpp,
                dst, dstRowStride, dstDepthStride,
                src, srcRowStride, srcDepthStride
            );
        }
        else
        {
            /* Copy region into temporary sub-image */
            Image subImage { extent, GetFormat(), GetDataType() };

            BitBlit(
                extent, bpp,
                reinterpret_cast<char*>(subImage.GetData()), subImage.GetRowStride(), subImage.GetDepthStride(),
                src, srcRowStride, srcDepthStride
            );

            /* Convert sub-image */
            subImage.Convert(imageDesc.format, imageDesc.dataType, threadCount);

            /* Copy sub-image into output data */
            ::memcpy(imageDesc.data, subImage.GetData(), imageDesc.dataSize);
        }
    }
}
Example #5
0
int BlobTracker::get_frame (Camera &camera) {

    Image rawImage;
    err = camera.RetrieveBuffer( &rawImage );
    if ( err != PGRERROR_OK ) {
      std::cout << "capture err" << std::endl;
      return false;
    }

    // convert to rgb
    Image rgbImage;
    rawImage.Convert( FlyCapture2::PIXEL_FORMAT_BGR, &rgbImage );
     
    // convert to OpenCV Mat
    unsigned int rowBytes = (double)rgbImage.GetReceivedDataSize()/(double)rgbImage.GetRows();  

    cv::Mat frame;

    frame = cv::Mat(rgbImage.GetRows(), rgbImage.GetCols(), CV_8UC3, rgbImage.GetData(),rowBytes);

    // cv::undistort( image_dist, image, cameraMat, distCoeffs ); 

    // Undistort image
    cv::Mat rview, map1, map2;
    Size imageSize = frame.size();

    initUndistortRectifyMap(cameraMat, distCoeffs, Mat(),
        getOptimalNewCameraMatrix(cameraMat, distCoeffs, imageSize, 1, imageSize, 0),
        imageSize, CV_16SC2, map1, map2);
    remap(frame, rview, map1, map2, INTER_LINEAR);

    // Turn right-side up
    flip(rview,rview,-1);

    rview.copyTo(img);

    return 0;

}
Example #6
0
int get_frame (cv::Mat* frame) {

    Image rawImage;
    err = pt_grey.RetrieveBuffer( &rawImage );
    if ( err != PGRERROR_OK ) {
        std::cout << "capture err" << std::endl;
        return false;
    }

    // convert to rgb
    Image rgbImage;
    rawImage.Convert( FlyCapture2::PIXEL_FORMAT_BGR, &rgbImage );

    // convert to OpenCV Mat
    unsigned int rowBytes = (double)rgbImage.GetReceivedDataSize()/(double)rgbImage.GetRows();

    *frame = cv::Mat(rgbImage.GetRows(), rgbImage.GetCols(), CV_8UC3, rgbImage.GetData(),rowBytes);

    // Turn right-side up
    flip(*frame,*frame,-1);

    return 0;

}
Example #7
0
int RunSingleCamera( PGRGuid guid , int k_numImages , float k_shutterVal)
{

	Error error;
	Camera cam;

	// Connect to a camera
	error = cam.Connect(&guid);
	if (error != PGRERROR_OK)
	{
		PrintError( error );
		return -1;
	}

	// Get the camera information
	CameraInfo camInfo;
	error = cam.GetCameraInfo(&camInfo);
	if (error != PGRERROR_OK)
	{
		PrintError( error );
		return -1;
	}

	// Check if the camera supports the FRAME_RATE property
	PropertyInfo propInfo;
	propInfo.type = FRAME_RATE;
	error = cam.GetPropertyInfo( &propInfo );
	if (error != PGRERROR_OK)
	{
		PrintError( error );
		return -1;
	}

	ExtendedShutterType shutterType = NO_EXTENDED_SHUTTER;

	if ( propInfo.present == true )
	{
		// Turn off frame rate

		Property prop;
		prop.type = FRAME_RATE;
		error = cam.GetProperty( &prop );
		if (error != PGRERROR_OK)
		{
			PrintError( error );
			return -1;
		}

		prop.autoManualMode = false;
		prop.onOff = false;

		error = cam.SetProperty( &prop );
		if (error != PGRERROR_OK)
		{
			PrintError( error );
			return -1;
		}

		shutterType = GENERAL_EXTENDED_SHUTTER;
	}
	else
	{
		// Frame rate property does not appear to be supported.
		// Disable the extended shutter register instead.
		// This is only applicable for Dragonfly.

		const unsigned int k_extendedShutter = 0x1028;
		unsigned int extendedShutterRegVal = 0;

		error = cam.ReadRegister( k_extendedShutter, &extendedShutterRegVal );
		if (error != PGRERROR_OK)
		{
			PrintError( error );
			return -1;
		}

		std::bitset<32> extendedShutterBS((int) extendedShutterRegVal );
		if ( extendedShutterBS[31] == true )
		{
			// Set the camera into extended shutter mode
			error = cam.WriteRegister( k_extendedShutter, 0x80020000 );
			if (error != PGRERROR_OK)
			{
				PrintError( error );
				return -1;
			}
		}
		else
		{
			cout << "Frame rate and extended shutter are not supported... exiting" << endl;
			return -1;
		}

		shutterType = DRAGONFLY_EXTENDED_SHUTTER;
	}

	// Set the shutter property of the camera
	Property prop;
	prop.type = SHUTTER;
	error = cam.GetProperty( &prop );
	if (error != PGRERROR_OK)
	{
		PrintError( error );
		return -1;
	}

	prop.autoManualMode = false;
	prop.absControl = true;


	prop.absValue = k_shutterVal;

	error = cam.SetProperty( &prop );
	if (error != PGRERROR_OK)
	{
		PrintError( error );
		return -1;
	}

	cout << "Shutter time set to " << fixed << setprecision(2) << k_shutterVal << "ms" << endl;



	// Enable timestamping
	EmbeddedImageInfo embeddedInfo;

	error = cam.GetEmbeddedImageInfo( &embeddedInfo );
	if ( error != PGRERROR_OK )
	{
		PrintError( error );
		return -1;
	}

	if ( embeddedInfo.timestamp.available != 0 )
	{
		embeddedInfo.timestamp.onOff = true;
	}

	error = cam.SetEmbeddedImageInfo( &embeddedInfo );
	if ( error != PGRERROR_OK )
	{
		PrintError( error );
		return -1;
	}

	// Start capturing images
	error = cam.StartCapture();
	if (error != PGRERROR_OK)
	{
		PrintError( error );
		return -1;
	}
	

	for ( int imageCnt=0; imageCnt < k_numImages; imageCnt++ ) 
	{
		Image rawImage;
		// Retrieve an image
		error = cam.RetrieveBuffer( &rawImage );
		
		if (error != PGRERROR_OK)
		{
			PrintError( error );
			continue;
		}
		
		cout << "Grabbed image " << imageCnt << endl;

		// Create a converted image
		Image convertedImage;

		// Convert the raw image
		error = rawImage.Convert( PIXEL_FORMAT_RGB16, &convertedImage );
		if (error != PGRERROR_OK)
		{
			PrintError( error );
			return -1;
		}
		
		// Create a unique filename
		time_t rawtime;
		struct tm * ptm;
		time ( &rawtime );
		ptm = gmtime ( &rawtime );
		ostringstream filename;
		filename << "GONetwork-" << (ptm->tm_mday) << "D" << (ptm->tm_mon+1) << "M @"<< (ptm->tm_hour)%24 << "-" << (ptm->tm_min) << "-" << (ptm->tm_sec) << ".bmp";

		// Save the image. If a file format is not passed in, then the file
		// extension is parsed to attempt to determine the file format.
		error = convertedImage.Save( filename.str().c_str() );
		if (error != PGRERROR_OK)
		{
			PrintError( error );
			return -1;
		}
	}

	// Stop capturing images
	error = cam.StopCapture();
	if (error != PGRERROR_OK)
	{
		PrintError( error );
		return -1;
	}

	// Disconnect the camera
	error = cam.Disconnect();
	if (error != PGRERROR_OK)
	{
		PrintError( error );
		return -1;
	}

	return 0;
}
Example #8
0
int main(int /*argc*/, char** /*argv*/)
{
    PrintBuildInfo();

    const Mode k_fmt7Mode = MODE_0;
    const PixelFormat k_fmt7PixFmt = PIXEL_FORMAT_MONO8;
    const int k_numImages = 10;

    Error error;

    // Since this application saves images in the current folder
    // we must ensure that we have permission to write to this folder.
    // If we do not have permission, fail right away.
	FILE* tempFile = fopen("test.txt", "w+");
	if (tempFile == NULL)
	{
		printf("Failed to create file in current folder.  Please check permissions.\n");
		return -1;
	}
	fclose(tempFile);
	remove("test.txt");

    BusManager busMgr;
    unsigned int numCameras;
    error = busMgr.GetNumOfCameras(&numCameras);
    if (error != PGRERROR_OK)
    {
        PrintError( error );
        return -1;
    }

    printf( "Number of cameras detected: %u\n", numCameras );

    if ( numCameras < 1 )
    {
        printf( "Insufficient number of cameras... exiting\n" );
        return -1;
    }

    PGRGuid guid;
    error = busMgr.GetCameraFromIndex(0, &guid);
    if (error != PGRERROR_OK)
    {
        PrintError( error );
        return -1;
    }

    Camera cam;

    // Connect to a camera
    error = cam.Connect(&guid);
    if (error != PGRERROR_OK)
    {
        PrintError( error );
        return -1;
    }

    // Get the camera information
    CameraInfo camInfo;
    error = cam.GetCameraInfo(&camInfo);
    if (error != PGRERROR_OK)
    {
        PrintError( error );
        return -1;
    }

    PrintCameraInfo(&camInfo);        

    // Query for available Format 7 modes
    Format7Info fmt7Info;
    bool supported;
    fmt7Info.mode = k_fmt7Mode;
    error = cam.GetFormat7Info( &fmt7Info, &supported );
    if (error != PGRERROR_OK)
    {
        PrintError( error );
        return -1;
    }

    PrintFormat7Capabilities( fmt7Info );

    if ( (k_fmt7PixFmt & fmt7Info.pixelFormatBitField) == 0 )
    {
        // Pixel format not supported!
		printf("Pixel format is not supported\n");
        return -1;
    }
    
    Format7ImageSettings fmt7ImageSettings;
    fmt7ImageSettings.mode = k_fmt7Mode;
    fmt7ImageSettings.offsetX = 0;
    fmt7ImageSettings.offsetY = 0;
    fmt7ImageSettings.width = fmt7Info.maxWidth;
    fmt7ImageSettings.height = fmt7Info.maxHeight;
    fmt7ImageSettings.pixelFormat = k_fmt7PixFmt;

    bool valid;
    Format7PacketInfo fmt7PacketInfo;

    // Validate the settings to make sure that they are valid
    error = cam.ValidateFormat7Settings(
        &fmt7ImageSettings,
        &valid,
        &fmt7PacketInfo );
    if (error != PGRERROR_OK)
    {
        PrintError( error );
        return -1;
    }

    if ( !valid )
    {
        // Settings are not valid
		printf("Format7 settings are not valid\n");
        return -1;
    }

    // Set the settings to the camera
    error = cam.SetFormat7Configuration(
        &fmt7ImageSettings,
        fmt7PacketInfo.recommendedBytesPerPacket );
    if (error != PGRERROR_OK)
    {
        PrintError( error );
        return -1;
    }

    // Start capturing images
    error = cam.StartCapture();
    if (error != PGRERROR_OK)
    {
        PrintError( error );
        return -1;
    }

    // Retrieve frame rate property
    Property frmRate;
    frmRate.type = FRAME_RATE;
    error = cam.GetProperty( &frmRate );
    if (error != PGRERROR_OK)
    {
        PrintError( error );
        return -1;
    }

    printf( "Frame rate is %3.2f fps\n", frmRate.absValue );

    printf( "Grabbing %d images\n", k_numImages );

    Image rawImage;    
    for ( int imageCount=0; imageCount < k_numImages; imageCount++ )
    {
        // Retrieve an image
        error = cam.RetrieveBuffer( &rawImage );
        if (error != PGRERROR_OK)
        {
            PrintError( error );
            continue;
        }

        printf( "." );

        // Get the raw image dimensions
        PixelFormat pixFormat;
        unsigned int rows, cols, stride;
        rawImage.GetDimensions( &rows, &cols, &stride, &pixFormat );

        // Create a converted image
        Image convertedImage;

        // Convert the raw image
        error = rawImage.Convert( PIXEL_FORMAT_BGRU, &convertedImage );
        if (error != PGRERROR_OK)
        {
            PrintError( error );
            return -1;
        }  

        // Create a unique filename
        char filename[512];
        sprintf( filename, "%u-%d.bmp", camInfo.serialNumber, imageCount );

        // Save the image. If a file format is not passed in, then the file
        // extension is parsed to attempt to determine the file format.
        error = convertedImage.Save( filename );
        if (error != PGRERROR_OK)
        {
            PrintError( error );
            return -1;
        }  
    }

    printf( "\nFinished grabbing images\n" );

    // Stop capturing images
    error = cam.StopCapture();
    if (error != PGRERROR_OK)
    {
        PrintError( error );
        return -1;
    }      

    // Disconnect the camera
    error = cam.Disconnect();
    if (error != PGRERROR_OK)
    {
        PrintError( error );
        return -1;
    }

    printf( "Done! Press Enter to exit...\n" );
    getchar();

	return 0;
}
int RunSingleCamera( PGRGuid guid )
{
    const int k_numImages = 10;

    Error error;
    Camera cam;

    // Connect to a camera
    error = cam.Connect(&guid);
    if (error != PGRERROR_OK)
    {
        PrintError( error );
        return -1;
    }

    // Get the camera information
    CameraInfo camInfo;
    error = cam.GetCameraInfo(&camInfo);
    if (error != PGRERROR_OK)
    {
        PrintError( error );
        return -1;
    }

    PrintCameraInfo(&camInfo);        

    // Start capturing images
    error = cam.StartCapture();
    if (error != PGRERROR_OK)
    {
        PrintError( error );
        return -1;
    }

    Image rawImage;    
    for ( int imageCnt=0; imageCnt < k_numImages; imageCnt++ )
    {                
        // Retrieve an image
        error = cam.RetrieveBuffer( &rawImage );
        if (error != PGRERROR_OK)
        {
            PrintError( error );
            continue;
        }

        cout << "Grabbed image " << imageCnt << endl; 

        // Create a converted image
        Image convertedImage;

        // Convert the raw image
        error = rawImage.Convert( PIXEL_FORMAT_MONO8, &convertedImage );
        if (error != PGRERROR_OK)
        {
            PrintError( error );
            return -1;
        }  

        // Create a unique filename
        
		ostringstream filename;
		filename << "FlyCapture2Test-" << camInfo.serialNumber << "-" << imageCnt << ".pgm";

        // Save the image. If a file format is not passed in, then the file
        // extension is parsed to attempt to determine the file format.
        error = convertedImage.Save( filename.str().c_str() );
        if (error != PGRERROR_OK)
        {
            PrintError( error );
            return -1;
        }  
    }            

    // Stop capturing images
    error = cam.StopCapture();
    if (error != PGRERROR_OK)
    {
        PrintError( error );
        return -1;
    }      

    // Disconnect the camera
    error = cam.Disconnect();
    if (error != PGRERROR_OK)
    {
        PrintError( error );
        return -1;
    }

    return 0;
}
Example #10
0
void imageCallback(FlyCapture2::Camera *camera) {

  cv::Mat image;

  cv::namedWindow("camera", CV_WINDOW_NORMAL | CV_WINDOW_KEEPRATIO);
  cvMoveWindow("camera", 500, 10);
  cvResizeWindow("camera",1500, 1000);

  while(key != 'q')
  {

    Image rawImage;
    err = camera->RetrieveBuffer( &rawImage );

    // convert to rgb
    Image rgbImage;
    rawImage.Convert( FlyCapture2::PIXEL_FORMAT_BGR, &rgbImage );
    
    // convert to OpenCV Mat
    unsigned int rowBytes = (double)
    rgbImage.GetReceivedDataSize()/(double)rgbImage.GetRows();  

    image = cv::Mat(rgbImage.GetRows(), rgbImage.GetCols(), CV_8UC3, rgbImage.GetData(),rowBytes);


    if (use_calib) {

      // Undistort image
      cv::Mat rview, map1, map2;
      cv::Size imageSize = image.size();
      double alpha = 1; // 0 = zoomed-in, all valid pixels; 1 = invalid pixels are blacked-out

      /* Using projection matrix from calibration results in different image */
      initUndistortRectifyMap(cameraMat, distCoeffs, cv::Mat(),
          getOptimalNewCameraMatrix(cameraMat, distCoeffs, imageSize, alpha, imageSize, 0),
          imageSize, CV_16SC2, map1, map2);
      remap(image, rview, map1, map2, cv::INTER_LINEAR);

      cv::imshow("camera",rview);
      key=cv::waitKey(50);

      if (key == 'p') {

        std::cout << "key-press = " << key << std::endl;
        
        get_pose(&rview);

        key=0;

      }  

    }

    else {
      
      cv::imshow("camera",image);
      key=cv::waitKey(50);

      if (key == 'p'){

        std::cout << "key-press = " << key << std::endl;  

        get_pose(&image);

        key=0;

      }

    }

  }

}
/// @copydoc ResourceHandler::CacheResource()
bool Texture2dResourceHandler::CacheResource(
    ObjectPreprocessor* pObjectPreprocessor,
    Resource* pResource,
    const String& rSourceFilePath )
{
    HELIUM_ASSERT( pObjectPreprocessor );
    HELIUM_ASSERT( pResource );

    Texture2d* pTexture = Reflect::AssertCast< Texture2d >( pResource );

    // Load the source texture data.
    FileStream* pSourceFileStream = FileStream::OpenFileStream( rSourceFilePath, FileStream::MODE_READ );
    if( !pSourceFileStream )
    {
        HELIUM_TRACE(
            TraceLevels::Error,
            ( TXT( "Texture2dResourceHandler::CacheResource(): Failed to open source texture file \"%s\" for " )
            TXT( "reading.\n" ) ),
            *rSourceFilePath );

        return false;
    }

    Image sourceImage;
    bool bLoadSuccess;

    {
        BufferedStream sourceStream( pSourceFileStream );

        // Determine the proper image loader to used based on the image extension.
        FilePath sourceFilePath = *rSourceFilePath;
        String extension( sourceFilePath.Extension().c_str() );
        if( extension == TXT( "png" ) )
        {
            bLoadSuccess = PngImageLoader::Load( sourceImage, &sourceStream );
        }
        else
        {
            bLoadSuccess = TgaImageLoader::Load( sourceImage, &sourceStream );
        }
    }

    delete pSourceFileStream;

    if( !bLoadSuccess )
    {
        HELIUM_TRACE(
            TraceLevels::Error,
            TXT( "Texture2dResourceHandler::CacheResource(): Failed to load source texture image \"%s\".\n" ),
            *rSourceFilePath );
    }

    // Convert the source image to a 32-bit BGRA image for the NVIDIA texture tools library to process.
    Image::Format bgraFormat;
    bgraFormat.SetBytesPerPixel( 4 );
    bgraFormat.SetChannelBitCount( Image::CHANNEL_RED, 8 );
    bgraFormat.SetChannelBitCount( Image::CHANNEL_GREEN, 8 );
    bgraFormat.SetChannelBitCount( Image::CHANNEL_BLUE, 8 );
    bgraFormat.SetChannelBitCount( Image::CHANNEL_ALPHA, 8 );
#if HELIUM_ENDIAN_LITTLE
    bgraFormat.SetChannelBitOffset( Image::CHANNEL_RED, 16 );
    bgraFormat.SetChannelBitOffset( Image::CHANNEL_GREEN, 8 );
    bgraFormat.SetChannelBitOffset( Image::CHANNEL_BLUE, 0 );
    bgraFormat.SetChannelBitOffset( Image::CHANNEL_ALPHA, 24 );
#else
    bgraFormat.SetChannelBitOffset( Image::CHANNEL_RED, 8 );
    bgraFormat.SetChannelBitOffset( Image::CHANNEL_GREEN, 16 );
    bgraFormat.SetChannelBitOffset( Image::CHANNEL_BLUE, 24 );
    bgraFormat.SetChannelBitOffset( Image::CHANNEL_ALPHA, 0 );
#endif

    Image bgraImage;

    HELIUM_VERIFY( sourceImage.Convert( bgraImage, bgraFormat ) );
    sourceImage.Unload();

    // If the texture is flagged to ignore alpha data, set the alpha channel to fully opaque for each pixel in the
    // image.  Otherwise, check if the image is fully opaque (in which case alpha data can be ignored during
    // compression, and we can potentially use cheaper compressed formats).
    uint32_t imageWidth = bgraImage.GetWidth();
    uint32_t imageHeight = bgraImage.GetHeight();
    uint32_t pixelCount = imageWidth * imageHeight;

    void* pImagePixelData = bgraImage.GetPixelData();
    HELIUM_ASSERT( pImagePixelData );

    uint8_t* pPixelAlpha = static_cast< uint8_t* >( pImagePixelData ) + 3;

    bool bIgnoreAlpha = pTexture->GetIgnoreAlpha();
    if( bIgnoreAlpha )
    {
        for( uint32_t pixelIndex = 0; pixelIndex < pixelCount; ++pixelIndex, pPixelAlpha += 4 )
        {
            *pPixelAlpha = 0xff;
        }
    }
    else
    {
        uint32_t pixelIndex;
        for( pixelIndex = 0; pixelIndex < pixelCount; ++pixelIndex, pPixelAlpha += 4 )
        {
            if( *pPixelAlpha != 0xff )
            {
                break;
            }
        }

        if( pixelIndex >= pixelCount )
        {
            bIgnoreAlpha = true;
        }
    }

    // Set up the input options for the texture compressor.
    Texture::ECompression compression = pTexture->GetCompression();
    HELIUM_ASSERT( static_cast< size_t >( compression ) < static_cast< size_t >( Texture::ECompression::MAX ) );

    bool bIsNormalMap = Texture::IsNormalMapCompression( compression );

    bool bSrgb = pTexture->GetSrgb();
    bool bCreateMipmaps = pTexture->GetCreateMipmaps();

    nvtt::InputOptions inputOptions;
    inputOptions.setTextureLayout( nvtt::TextureType_2D, imageWidth, imageHeight );
    inputOptions.setMipmapData( pImagePixelData, imageWidth, imageHeight );
    inputOptions.setMipmapGeneration( bCreateMipmaps );
    inputOptions.setMipmapFilter( nvtt::MipmapFilter_Box );
    inputOptions.setWrapMode( nvtt::WrapMode_Repeat );

    float gamma = ( bSrgb ? 2.2f : 1.0f );
    inputOptions.setGamma( gamma, gamma );

    inputOptions.setNormalMap( bIsNormalMap );
    inputOptions.setNormalizeMipmaps( bIsNormalMap );

    // Set up the output options for the texture compressor.
    MemoryTextureOutputHandler outputHandler( imageWidth, imageHeight, false, bCreateMipmaps );

    nvtt::OutputOptions outputOptions;
    outputOptions.setOutputHandler( &outputHandler );
    outputOptions.setOutputHeader( false );

    // Set up the compression options for the texture compressor.
    nvtt::CompressionOptions compressionOptions;

    nvtt::Format outputFormat = nvtt::Format_BC1;
    ERendererPixelFormat pixelFormat = RENDERER_PIXEL_FORMAT_BC1;

    switch( compression )
    {
    case Texture::ECompression::NONE:
        {
            outputFormat = nvtt::Format_RGBA;
#if HELIUM_ENDIAN_LITTLE
            compressionOptions.setPixelFormat( 32, 0xff000000, 0x00ff0000, 0x0000ff00, 0x000000ff );
#else
            compressionOptions.setPixelFormat( 32, 0x000000ff, 0x0000ff00, 0x00ff0000, 0xff000000 );
#endif
            pixelFormat = ( bSrgb ? RENDERER_PIXEL_FORMAT_R8G8B8A8_SRGB : RENDERER_PIXEL_FORMAT_R8G8B8A8 );

            break;
        }

    case Texture::ECompression::COLOR:
        {
            outputFormat = ( bIgnoreAlpha ? nvtt::Format_BC1 : nvtt::Format_BC1a );
            pixelFormat = ( bSrgb ? RENDERER_PIXEL_FORMAT_BC1_SRGB : RENDERER_PIXEL_FORMAT_BC1 );

            break;
        }

    case Texture::ECompression::COLOR_SHARP_ALPHA:
        {
            if( bIgnoreAlpha )
            {
                outputFormat = nvtt::Format_BC1;
                pixelFormat = ( bSrgb ? RENDERER_PIXEL_FORMAT_BC1_SRGB : RENDERER_PIXEL_FORMAT_BC1 );
            }
            else
            {
                outputFormat = nvtt::Format_BC2;
                pixelFormat = ( bSrgb ? RENDERER_PIXEL_FORMAT_BC2_SRGB : RENDERER_PIXEL_FORMAT_BC2 );
            }

            break;
        }

    case Texture::ECompression::COLOR_SMOOTH_ALPHA:
        {
            if( bIgnoreAlpha )
            {
                outputFormat = nvtt::Format_BC1;
                pixelFormat = ( bSrgb ? RENDERER_PIXEL_FORMAT_BC1_SRGB : RENDERER_PIXEL_FORMAT_BC1 );
            }
            else
            {
                outputFormat = nvtt::Format_BC3;
                pixelFormat = ( bSrgb ? RENDERER_PIXEL_FORMAT_BC3_SRGB : RENDERER_PIXEL_FORMAT_BC3 );
            }

            break;
        }

    case Texture::ECompression::NORMAL_MAP:
        {
            outputFormat = nvtt::Format_BC3n;
            pixelFormat = RENDERER_PIXEL_FORMAT_BC3;

            break;
        }

    case Texture::ECompression::NORMAL_MAP_COMPACT:
        {
            outputFormat = nvtt::Format_BC1;
            pixelFormat = RENDERER_PIXEL_FORMAT_BC1;

            break;
        }
    }

    compressionOptions.setFormat( outputFormat );
    compressionOptions.setQuality( nvtt::Quality_Normal );

    // Compress the texture.
    nvtt::Compressor compressor;
    bool bCompressSuccess = compressor.process( inputOptions, compressionOptions, outputOptions );
    HELIUM_ASSERT( bCompressSuccess );
    if( !bCompressSuccess )
    {
        HELIUM_TRACE(
            TraceLevels::Error,
            ( TXT( "Texture2dResourceHandler::CacheResource(): Texture compression failed for texture image " )
            TXT( "\"%s\".\n" ) ),
            *rSourceFilePath );

        return false;
    }

    // Cache the data for each supported platform.
    const MemoryTextureOutputHandler::MipLevelArray& rMipLevels = outputHandler.GetFace( 0 );
    uint32_t mipLevelCount = static_cast< uint32_t >( rMipLevels.GetSize() );
    HELIUM_ASSERT( mipLevelCount != 0 );

    int32_t pixelFormatIndex = static_cast< int32_t >( pixelFormat );

    //PMDTODO: Implement this
    //BinarySerializer serializer;
    //for( size_t platformIndex = 0; platformIndex < static_cast< size_t >( Cache::PLATFORM_MAX ); ++platformIndex )
    //{
    //    PlatformPreprocessor* pPreprocessor = pObjectPreprocessor->GetPlatformPreprocessor(
    //        static_cast< Cache::EPlatform >( platformIndex ) );
    //    if( !pPreprocessor )
    //    {
    //        continue;
    //    }

    //    Resource::PreprocessedData& rPreprocessedData = pTexture->GetPreprocessedData(
    //        static_cast< Cache::EPlatform >( platformIndex ) );

    //    // Serialize the persistent data about the texture first.
    //    serializer.SetByteSwapping( pPreprocessor->SwapBytes() );
    //    serializer.BeginSerialize();
    //    serializer << imageWidth;
    //    serializer << imageHeight;
    //    serializer << mipLevelCount;
    //    serializer << pixelFormatIndex;
    //    serializer.EndSerialize();

    //    rPreprocessedData.persistentDataBuffer = serializer.GetPropertyStreamBuffer();

    //    // Serialize each mip level.
    //    DynamicArray< DynamicArray< uint8_t > >& rSubDataBuffers = rPreprocessedData.subDataBuffers;
    //    rSubDataBuffers.Reserve( mipLevelCount );
    //    rSubDataBuffers.Resize( mipLevelCount );
    //    rSubDataBuffers.Trim();

    //    for( uint32_t mipLevelIndex = 0; mipLevelIndex < mipLevelCount; ++mipLevelIndex )
    //    {
    //        rSubDataBuffers[ mipLevelIndex ] = rMipLevels[ mipLevelIndex ];
    //    }

    //    // Platform data is now loaded.
    //    rPreprocessedData.bLoaded = true;
    //}

    return true;
}
Example #12
0
int main(int argc, char **argv){
    ros::init(argc, argv, "pointgrey_camera_driver");
    ros::NodeHandle nh("~");

    // Get the serial number of the camera to connect to
    int serial;
    nh.param<int>("serial", serial, 0);

    // Try connecting to that camera
    Camera* cam = ConnectCamera(serial);
    if (cam == NULL) {
        std::stringstream serial_string;
        serial_string << serial;
        std::string msg = "PointGreyCamera::connect Could not find camera with serial number: " + serial_string.str() + ". Is that camera plugged in?";
        throw std::runtime_error(msg);
    }

    cout << "connection success!" << endl;

    // What type of camera is it? Wide angle or narrow angle?
    string camera_type;
    nh.param<string>("camera_type", camera_type, "");

    // Connection successful! lets setup the ros topic. 
    
    // Get the location of our camera config yaml
    string camera_info_url, frame_id;
    nh.param<std::string>("camera_info_url", camera_info_url, "");
    // Get the desired frame_id, set to 'camera' if not found
    nh.param<std::string>("frame_id", frame_id, "camera");
   
    // Start the camera info manager and attempt to load any configurations
    std::stringstream cinfo_name;
    cinfo_name << serial;
    camera_info_manager::CameraInfoManager* cinfo = new camera_info_manager::CameraInfoManager(nh, cinfo_name.str(), camera_info_url);
    
    // Publish topics using ImageTransport through camera_info_manager (gives cool things like compression)
    image_transport::ImageTransport* it = new image_transport::ImageTransport(nh);
    image_transport::CameraPublisher it_pub = it->advertiseCamera("/" + ros::this_node::getNamespace() + "/image_raw", 1000);

    // get ready for capture loop
    Image image;
    Image bgrImage;
    IplImage* cvImg;
    string imageEncoding = sensor_msgs::image_encodings::BGR8;
    sensor_msgs::ImagePtr ros_image; ///< Camera Info message.
    sensor_msgs::CameraInfoPtr ci; ///< Camera Info message.

    uint64_t num_frames = 0; 
    
    // Now configure the camera and put it in run mode

    if (camera_type.compare("narrow") == 0)
        RunCamera(cam);
    else if (camera_type.compare("wide") == 0)
        RunWideAngleCamera(cam);
    else throw std::runtime_error("Camera type not specified");

    // capture loop
    while (ros::ok()) {

        // try to get an image
        Error error = cam->RetrieveBuffer(&image);

        //check if we had a timeout or some other error 
        if (error == PGRERROR_TIMEOUT) 
            continue;
        else if (error != PGRERROR_OK)
            throw std::runtime_error(error.GetDescription());

        // HACK: throw away half the frames for the WFOV cameras
        num_frames++;
        if (camera_type.compare("wide") == 0 && num_frames % 2 != 1)
            continue;

        //grabbed image, reset ros structures and fill out fields
        ros_image.reset(new sensor_msgs::Image());
        ci.reset(new sensor_msgs::CameraInfo(cinfo->getCameraInfo()));

        ros_image->header.stamp = ros::Time::now();
        ros_image->header.frame_id = frame_id;
        ci->header.stamp = ros_image->header.stamp;
        ci->header.frame_id = frame_id;
        
        //convert to bgr
        image.Convert(PIXEL_FORMAT_BGR, &bgrImage);
        cvImg = cvCreateImageHeader(cvSize(bgrImage.GetCols(), bgrImage.GetRows()), IPL_DEPTH_8U, 3);
        convertToCV(&bgrImage, cvImg);

        if (camera_type.compare("wide") == 0)
            cvFlip(cvImg, cvImg, -1);

        // package in ros header 
         //fillImage(*ros_image, imageEncoding, bgrImage.GetRows(), bgrImage.GetCols(), bgrImage.GetStride(), bgrImage.GetData());
        fillImage(*ros_image, imageEncoding, cvImg->height, cvImg->width, cvImg->widthStep, cvImg->imageData);

        // Publish the message using standard image transport
        it_pub.publish(ros_image, ci);
    }

    // node asked to terminate
    CloseCamera(cam);
    delete cam; 
}
Example #13
0
int main(int argc, char** argv)
{
  using namespace boost::program_options;
  options_description desc("Allowed options");
  desc.add_options()
    ("help", "produce help message")
    ("serial,s", value<string>(), "the serial location for the gps unit")
    ("maxframes,m", value<uint64_t>(), "maximum number of frames to capture")
    ("lidar_capture,l", "run LIDAR data collection")
    ("output,o", value<string>(), "the filename for data logging");

  signal (SIGINT, ctrlC);

  string diagnostics_address = "tcp://localhost:"+boost::to_string(ZMQ_DIAGNOSTICS_SEND_PORT);
  send_diagnostics.connect(diagnostics_address.c_str());
  sleep(1);
  sendDiagnosticsMessage("WARN:Connecting to Cameras...");

  variables_map vm;
  store(parse_command_line(argc, argv, desc), vm);
  notify(vm);
  if (vm.count("help")) { 
    cout << desc << endl;
    return 1;
  }
  string fname = "";
  string ext = ".avi"; 
  string fnamegps = "";
  if (vm.count("output")) { 
    fname = vm["output"].as<string>();
    fnamegps = vm["output"].as<string>() + "_gps.out";
  }
  else {
    cout << desc << endl;
    return 1;
  };

  uint64_t maxframes = -1; 
  if (vm.count("maxframes")) {
    maxframes = vm["maxframes"].as<uint64_t>(); 
  }

  bool useLIDAR = false; 
  int lidar_pid = 0; 
  
  if (vm.count("lidar_capture")) {
    string outputPcap = fname + ".pcap";
    useLIDAR = true;
    int pid = fork();
    if (pid == 0) {
        
        char* const parmList[] = {(char *)"/usr/sbin/tcpdump",
            (char*)"-i" ,
            (char*)"eth0",
            (char*)"-w",
            (char*)strdup(outputPcap.c_str()),
            (char*)"udp port 2368 or udp port 8308", 
            NULL}; 
            
        int rc = execv("/usr/sbin/tcpdump", parmList);
        if (rc == -1) {
            cout << "error loading lidar script" << endl;
            return 0; 
        }
    } else { 
        lidar_pid = pid;
    }
  }

#if defined(DEBUG_NO_SENSORS) || defined(NOSYNC) || defined(NOGPS)
  bool useGPS = false;
#else
  bool useGPS = true; 
#endif
  GPSLogger gpsLogger;
  ofstream gps_output;
  if (useGPS) {
    gps_output.open(fnamegps.c_str());
    gpsLogger.Connect(vm["serial"].as<string>());
  }
  int imageWidth = 1280;
  int imageHeight = 960;

  cout << "Capturing for maximum of " << maxframes << " frames" << endl; 

  for (int cam_num = 0; cam_num < NUM_CAMS; cam_num++) 
  {
      for (int thread_num = 0; thread_num < NUMTHREAD_PER_BUFFER; thread_num++)
      {
          cam_buff[cam_num][thread_num].getBuffer()->setCapacity(1000);
      }
  }

#ifndef DEBUG_NO_SENSORS
  for (int cam_num = 0; cam_num < NUM_CAMS; cam_num++)  {
      cam[cam_num] = ConnectCamera(cam_num);
      assert(cam[cam_num] != NULL);
      RunCamera(cam[cam_num]);

      for (int thread_num = 0; thread_num < NUMTHREAD_PER_BUFFER; thread_num++)
      {
          stringstream sstm;
          sstm << "split_" << thread_num << "_" << fname << cam_num+1 << ext; 
          string thread_fname = sstm.str();
          cam_consumer[cam_num][thread_num] = new Consumer<Image>(
                  cam_buff[cam_num][thread_num].getBuffer(),
                  thread_fname, cam_buff[cam_num][thread_num].getMutex(), 50.0f,
                  imageWidth, imageHeight);
      }

    #ifdef DISPLAY
      stringstream wstm;
      wstm << "cam" << cam_num;
      string window_name = wstm.str();
      cvNamedWindow(window_name.c_str(), CV_WINDOW_AUTOSIZE); // TODO
    #endif
#endif
  }

  IplImage* img = cvCreateImage(cvSize(imageWidth, imageHeight), IPL_DEPTH_8U, 3);
  Image* imgToSend = new Image();
  string lastValidGPSPacket;

  //// setup ZMQ communication
  string bind_address = "tcp://*:"+boost::to_string(ZMQ_CAMERALOGGER_BIND_PORT);
  cout << "binding to " << bind_address << endl; 
  my_commands.bind("tcp://*:5001");
  my_commands.setsockopt(ZMQ_SUBSCRIBE, NULL, 0);
  cout << "binding successful" << endl; 
  
#ifdef DEBUG_NO_SENSORS
  img = cvLoadImage("/home/smart/sail-car-log/cameralogger/xkcd.png");
  lastValidGPSPacket="#MARK1PVAA,USB1,0,92.0,EXACT,1753,238273.050,00000000,0000,470;1753,238273.050000025,37.856359728,-122.494864165,110.390118713,16.041177523,-24.789919285,-1.752024973,-3.954175540,4.393638450,32.292000751,INS_SOLUTION_GOOD*016857cc";
#endif
  
  // start GPS Trigger
  sendDiagnosticsMessage("WARN:Connecting to GPS...");
  if (useGPS) gpsLogger.Run();
  
  uint64_t numframes = 0;
  uint64_t lastframes = 0; 
  ///////// main loop
  while (!is_done_working or numframes < 250) {
    numframes++;
    if (numframes > maxframes) { 
      is_done_working = true;
    }
    try {
      zmq::message_t command_msg; 
      if (my_commands.recv(&command_msg, ZMQ_NOBLOCK) == true) { 
        string command((const char*)command_msg.data(), command_msg.size());
        if (command.compare("TERMINATE") == 0) {
          // properly close the application 
          is_done_working = true; 
          quit_via_user_input = true;
        } 
        else if (command.compare("LASTCAMERADATA") == 0) { 
          // transmit the camera data over
        }
        else if (command.compare("LASTGPSDATA") == 0) { 
          // transmit the last known GPS log over
        }
      }
    } catch (const zmq::error_t&e) {}

#ifndef DEBUG_NO_SENSORS
    for (int cam_num = 0; cam_num < NUM_CAMS; cam_num++) { 
        Image image; 
        cam[cam_num]->RetrieveBuffer(&image);
        image.Convert(PIXEL_FORMAT_BGR, imgToSend);
        ImageCallback(&image, NULL, &cam_buff[cam_num][numframes % NUMTHREAD_PER_BUFFER]);

        Image cimage; 
        if (numframes % CAMERA_DISPLAY_SKIP == 0) {
            image.Convert(PIXEL_FORMAT_BGR, &cimage);
            convertToCV(&cimage, img); 
#ifdef DISPLAY
            stringstream wstm;
            wstm << "cam" << cam_num;
            string window_name = wstm.str();
            show(img, window_name.c_str());
#endif //DISPLAY
        }
    }
#endif //DEBUG_NO_SENSORS

#ifdef DISPLAY
    char r = cvWaitKey(1);
    if (r == 'q') {
      is_done_working = true;
      quit_via_user_input = true;
    }
#endif //DISPLAY

    if (useGPS) {
      GPSLogger::GPSPacketType packet = gpsLogger.getPacket();
      int GPSPacketSuccess = boost::get<0>(packet); 
      if (GPSPacketSuccess > 0) { 
        lastValidGPSPacket = boost::get<1>(packet);
        gps_output << lastValidGPSPacket << endl;
      } else {
        sendDiagnosticsMessage("WARN:Bad GPS Packet:" + boost::get<1>(packet)); 
      }

    }

    currentTime = Time(boost::posix_time::microsec_clock::local_time());
    if ((currentTime - lastTime).total_milliseconds() > 1000) {
      string captureRateMsg = "INFOCAPTURERATE:" + boost::to_string(numframes-lastframes); 
      cout << captureRateMsg << endl;
      sendDiagnosticsMessage(captureRateMsg);

      int queue_size = 0;
      for (int cam_num = 0; cam_num < NUM_CAMS; cam_num++) { 
          for (int thread_num = 0; thread_num < NUMTHREAD_PER_BUFFER; 
                  thread_num++) {
              queue_size += cam_buff[cam_num][thread_num].getBuffer()->getSize();
          }
      }
      string bufferSizeMsg = "INFOBUFFERSIZE:" + boost::to_string(queue_size);
      cout << bufferSizeMsg << endl; 
      sendDiagnosticsMessage(bufferSizeMsg); 

      // encode last image for transmission
      Mat lastCameraImage;
      vector<uchar> lastCameraImageBuf; 
      string lastCameraImageMsg;

      IplImage* cvImgToSend = cvCreateImage(cvSize(imageWidth, imageHeight), IPL_DEPTH_8U, 3);
      convertToCV(imgToSend, cvImgToSend);
      resize(Mat(cvImgToSend), lastCameraImage, Size(320,240));
      imencode(".png", lastCameraImage, lastCameraImageBuf);
      lastCameraImageMsg = string(lastCameraImageBuf.begin(), lastCameraImageBuf.end());
      lastCameraImageMsg = "CAM:" + lastCameraImageMsg; 
      sendDiagnosticsMessage(lastCameraImageMsg);

      GPSRecord record(lastValidGPSPacket);
      if (record.isValid()) {
        std::ostringstream lat;
        lat << std::fixed << std::setprecision(8);
        lat << record.latitude; 
        std::ostringstream lon;
        lon << std::fixed << std::setprecision(8);
        lon << record.longitude;
        string gpsMsg = "GPS:" + lat.str() + "," + lon.str();
        sendDiagnosticsMessage(gpsMsg);
      }


      lastTime = currentTime;
      lastframes = numframes; 
    }
#ifdef DEBUG_NO_SENSORS
    usleep(200); 
#endif
  }  
  cout << "numframes = " << numframes << endl;


  /////// cleanup
  sendDiagnosticsMessage("WARN:Shutting Down GPS...");
  if (useGPS) gpsLogger.Close();
  sendDiagnosticsMessage("WARN:Shutting Down Cameras...");
#ifndef DEBUG_NO_SENSORS
  for (int cam_num = 0; cam_num < NUM_CAMS; cam_num++) { 

      for (int thread_num = 0; thread_num < NUMTHREAD_PER_BUFFER; thread_num++) {
          cam_consumer[cam_num][thread_num]->stop();
          delete cam_consumer[cam_num][thread_num];
      }
      CloseCamera(cam[cam_num]);
      delete cam[cam_num]; 
  }
  if (useLIDAR) 
      kill(lidar_pid, SIGTERM); 
#endif
#ifdef DISPLAY
  cvDestroyAllWindows(); 
#endif
  if (quit_via_user_input) return 1;
  return 0;
}
Example #14
0
int main(int argc, char** argv)
{

  ros::init(argc, argv, "image_publisher");
  ros::NodeHandle nh;
  image_transport::ImageTransport it(nh);
  image_transport::Publisher pub_L = it.advertise("camera_left/image", 1);
  image_transport::Publisher pub_R = it.advertise("camera_right/image", 1);

  int serial_L=14481155;
  int serial_R=14435551;

  // Get the location of our camera config yaml
  std::string camera_info_url ;
  // std::string camera_info_url = "/home/sasha/hydro_ws/src/alvar_ptgrey/info/ptgrey_calib.yaml";
  std::string frame_id_; 
  sensor_msgs::CameraInfoPtr ci_;
  boost::shared_ptr<camera_info_manager::CameraInfoManager> cinfo_L;
  boost::shared_ptr<camera_info_manager::CameraInfoManager> cinfo_R;

  nh.param<std::string>("camera_info_url", camera_info_url, "");
  // Get the desired frame_id, set to 'camera' if not found
  // nh.param<std::string>("frame_id", frame_id_, "camera"); 

  std::cout << camera_info_url << std::endl;

  // Start the camera info manager and attempt to load any configurations
  std::stringstream cinfo_name_L;
  std::stringstream cinfo_name_R;
  cinfo_name_L << serial_L;
  cinfo_name_R << serial_R;

  ros::Publisher camera_info_pub_L = nh.advertise<sensor_msgs::CameraInfo>("/camera_left/camera_info",1);
  ros::Publisher camera_info_pub_R = nh.advertise<sensor_msgs::CameraInfo>("/camera_right/camera_info",1);
  // sensor_msgs::CameraInfo cinfo;

  cinfo_L.reset(new camera_info_manager::CameraInfoManager(nh, cinfo_name_L.str(), camera_info_url));
  cinfo_R.reset(new camera_info_manager::CameraInfoManager(nh, cinfo_name_R.str(), camera_info_url));

  BusManager busMgr;
  // unsigned int numCameras = 2;

  // error = busMgr.GetNumOfCameras(&numCameras);
  // if (error != PGRERROR_OK) {
  //   std::cout << "Bus Manager error" << std::endl;
  //   return false;
  // }

  PGRGuid guid_L;
  // error = busMgr.GetCameraFromIndex(1, &guid_L);
  error = busMgr.GetCameraFromSerialNumber(14481155, &guid_L);
  if (error != PGRERROR_OK) {
    std::cout << "Index error" << std::endl;
    return false;
  }

  PGRGuid guid_R;
  // error = busMgr.GetCameraFromIndex(2, &guid_L);
  error = busMgr.GetCameraFromSerialNumber(14435551, &guid_R);
  if (error != PGRERROR_OK) {
    std::cout << "Index error" << std::endl;
    return false;
  }

  Camera pt_grey_L, pt_grey_R;
  cv::Mat image_L, image_R;

  // connect to camera
  // init_cam_capture(pt_grey, serialNum);
  init_cam_capture(pt_grey_L, guid_L);
  init_cam_capture(pt_grey_R, guid_R);

  // set camera parameters
 // set_cam_param(pt_grey);

  int frame_no = 0;

  // capture loop
  while(ros::ok())
  {
    
    // Get the image
    Image rawImage;
    error = pt_grey_L.RetrieveBuffer( &rawImage );
    if ( error != PGRERROR_OK )
    {
      std::cout << "capture error" << std::endl;
      return false;
    }
    
    // convert to bgr (default format for OpenCV) 
      Image rgbImage;
        rawImage.Convert( FlyCapture2::PIXEL_FORMAT_BGR, &rgbImage );
       
    // convert to OpenCV Mat
    unsigned int rowBytes = (double)rgbImage.GetReceivedDataSize()/(double)rgbImage.GetRows();  
    
    cv::Mat image = cv::Mat(rgbImage.GetRows(), rgbImage.GetCols(), CV_8UC3, rgbImage.GetData(),rowBytes);

    sensor_msgs::ImagePtr msg = cv_bridge::CvImage(std_msgs::Header(), "bgr8", image).toImageMsg();
    msg->header.frame_id = "/camera_left";
    msg->header.stamp = ros::Time::now();
    pub_L.publish(msg);

    // sensor_msgs::CameraInfoPtr ci(new sensor_msgs::CameraInfo(cinfo_->getCameraInfo()));
    ci_.reset(new sensor_msgs::CameraInfo(cinfo_L->getCameraInfo()));
    ci_->header.stamp = msg->header.stamp;
    ci_->header.frame_id = msg->header.frame_id ;
    camera_info_pub_L.publish(ci_);

    ros::spinOnce();


    // Get the image
    error = pt_grey_R.RetrieveBuffer( &rawImage );
    if ( error != PGRERROR_OK )
    {
      std::cout << "capture error" << std::endl;
      return false;
    }
    
    // convert to bgr (default format for OpenCV) 
      rawImage.Convert( FlyCapture2::PIXEL_FORMAT_BGR, &rgbImage );
       
    // convert to OpenCV Mat
    rowBytes = (double)rgbImage.GetReceivedDataSize()/(double)rgbImage.GetRows();  
    
    image = cv::Mat(rgbImage.GetRows(), rgbImage.GetCols(), CV_8UC3, rgbImage.GetData(),rowBytes);
    
    msg = cv_bridge::CvImage(std_msgs::Header(), "bgr8", image).toImageMsg();
    msg->header.frame_id = "/camera_right";
    msg->header.stamp = ros::Time::now();
    pub_R.publish(msg);

    ci_.reset(new sensor_msgs::CameraInfo(cinfo_R->getCameraInfo()));
    ci_->header.stamp = msg->header.stamp;
    ci_->header.frame_id = msg->header.frame_id ;
    camera_info_pub_R.publish(ci_);

    

    ros::spinOnce();

  }

  error = pt_grey_L.StopCapture();
  if (error != PGRERROR_OK) {
    // PrintError( error );
    return false;
  }
  error = pt_grey_R.StopCapture();
  if (error != PGRERROR_OK) {
    // PrintError( error );
    return false;
  }
  
  pt_grey_L.Disconnect();
  pt_grey_R.Disconnect();

}
Example #15
0
int main()
{

  BusManager busMgr;
  unsigned int numCameras = 2;

  error = busMgr.GetNumOfCameras(&numCameras);
  if (error != PGRERROR_OK) {
    std::cout << "Bus Manager error" << std::endl;
    return false;
  }

  PGRGuid guid_L;
  // error = busMgr.GetCameraFromIndex(1, &guid_L);
  error = busMgr.GetCameraFromSerialNumber(14481155, &guid_L);
  if (error != PGRERROR_OK) {
    std::cout << "Index error" << std::endl;
    return false;
  }

  PGRGuid guid_R;
  // error = busMgr.GetCameraFromIndex(2, &guid_L);
  error = busMgr.GetCameraFromSerialNumber(14435551, &guid_R);
  if (error != PGRERROR_OK) {
    std::cout << "Index error" << std::endl;
    return false;
  }

  Camera pt_grey_L, pt_grey_R;
  cv::Mat image_L, image_R;

  cv::namedWindow("image_L", CV_WINDOW_NORMAL | CV_WINDOW_KEEPRATIO);
  cvMoveWindow("image_L", 3000, 10);
  cvResizeWindow("image_L",700, 500);

  cv::namedWindow("image_R", CV_WINDOW_NORMAL | CV_WINDOW_KEEPRATIO);
  cvMoveWindow("image_R", 3000, 10);
  cvResizeWindow("image_R",700, 500);


  // connect to camera
  // init_cam_capture(pt_grey, serialNum);
  init_cam_capture(pt_grey_L, guid_L);
  init_cam_capture(pt_grey_R, guid_R);

  // set camera parameters
 // set_cam_param(pt_grey);

  int frame_no = 0;

  // capture loop
  char key = 0;
    while(key != 'q')
  {
    
    // Get the image
    Image rawImage;
    error = pt_grey_L.RetrieveBuffer( &rawImage );
    if ( error != PGRERROR_OK )
    {
      std::cout << "capture error" << std::endl;
      return false;
    }
    
    // convert to bgr (default format for OpenCV) 
      Image rgbImage;
        rawImage.Convert( FlyCapture2::PIXEL_FORMAT_BGR, &rgbImage );
       
    // convert to OpenCV Mat
    unsigned int rowBytes = (double)rgbImage.GetReceivedDataSize()/(double)rgbImage.GetRows();  
    
    cv::Mat image = cv::Mat(rgbImage.GetRows(), rgbImage.GetCols(), CV_8UC3, rgbImage.GetData(),rowBytes);

    cv::imshow("image_L", image);
    key = cv::waitKey(1);


    // Get the image
    error = pt_grey_R.RetrieveBuffer( &rawImage );
    if ( error != PGRERROR_OK )
    {
      std::cout << "capture error" << std::endl;
      return false;
    }
    
    // convert to bgr (default format for OpenCV) 
      rawImage.Convert( FlyCapture2::PIXEL_FORMAT_BGR, &rgbImage );
       
    // convert to OpenCV Mat
    rowBytes = (double)rgbImage.GetReceivedDataSize()/(double)rgbImage.GetRows();  
    
    image = cv::Mat(rgbImage.GetRows(), rgbImage.GetCols(), CV_8UC3, rgbImage.GetData(),rowBytes);

    cv::imshow("image_R", image);

    key = cv::waitKey(1);

    // key = cv::waitKey(50);
//     if (key == 'c') {
//       char img_name[50];
//     sprintf (img_name, "./frame_%03d.png", frame_no);
// //    sprintf (img_name, "./images/frame_%03d.png", frame_no);
// //      sprintf (img_name, "../cam_calib/images/frame_%03d.png", frame_no);
// //      sprintf (img_name, "../homography2d/images/frame_%03d.png", frame_no);
//       std::cout << "Writing to " << img_name << std::endl;
//       cv::imwrite(img_name, image);

//       frame_no++;
//     }

  }

  error = pt_grey_L.StopCapture();
  if (error != PGRERROR_OK) {
    // PrintError( error );
    return false;
  }
  error = pt_grey_R.StopCapture();
  if (error != PGRERROR_OK) {
    // PrintError( error );
    return false;
  }
  
  pt_grey_L.Disconnect();
  pt_grey_R.Disconnect();

}
Example #16
0
int main()
{

  Camera pt_grey;
  cv::Mat image;

  cv::namedWindow("image", CV_WINDOW_NORMAL | CV_WINDOW_KEEPRATIO);
  cvMoveWindow("image", 3000, 10);
  cvResizeWindow("image",700, 500);

  // connect to camera
  init_cam_capture(pt_grey);

  // set camera parameters
 // set_cam_param(pt_grey);

  int frame_no = 0;

  // capture loop
  char key = 0;
    while(key != 'q')
  {
    
    // Get the image
    Image rawImage;
    error = pt_grey.RetrieveBuffer( &rawImage );
    if ( error != PGRERROR_OK )
    {
      std::cout << "capture error" << std::endl;
      return false;
    }
    
    // convert to bgr (default format for OpenCV) 
      Image rgbImage;
        rawImage.Convert( FlyCapture2::PIXEL_FORMAT_BGR, &rgbImage );
       
    // convert to OpenCV Mat
    unsigned int rowBytes = (double)rgbImage.GetReceivedDataSize()/(double)rgbImage.GetRows();  
    
    cv::Mat image = cv::Mat(rgbImage.GetRows(), rgbImage.GetCols(), CV_8UC3, rgbImage.GetData(),rowBytes);

    cv::imshow("image", image);
    key = cv::waitKey(50);

    char img_name[50];
    sprintf (img_name, "./frame_%03d.png", frame_no);
//    sprintf (img_name, "./images/frame_%03d.png", frame_no);
//      sprintf (img_name, "../cam_calib/images/frame_%03d.png", frame_no);
//      sprintf (img_name, "../homography2d/images/frame_%03d.png", frame_no);
    //  std::cout << "Writing to " << img_name << std::endl;
      cv::imwrite(img_name, image);

      frame_no++;

  }

  error = pt_grey.StopCapture();
  
  if ( error != PGRERROR_OK )
  {
      // This may fail when the camera was removed, so don't show 
      // an error message
  }  
  
  pt_grey.Disconnect();

}
int RunSingleCamera( PGRGuid guid )
{
    const int k_numImages = 1000;

    Error error;
    GigECamera cam;

    printf( "Connecting to camera...\n" );

    // Connect to a camera
    error = cam.Connect(&guid);
    if (error != PGRERROR_OK)
    {
        PrintError( error );
        return -1;
    }

    // Get the camera information
    CameraInfo camInfo;
    error = cam.GetCameraInfo(&camInfo);
    if (error != PGRERROR_OK)
    {
        PrintError( error );
        return -1;
    }

    PrintCameraInfo(&camInfo);        

    unsigned int numStreamChannels = 0;
    error = cam.GetNumStreamChannels( &numStreamChannels );
    if (error != PGRERROR_OK)
    {
        PrintError( error );
        return -1;
    }

    for (unsigned int i=0; i < numStreamChannels; i++)
    {
        GigEStreamChannel streamChannel;
        error = cam.GetGigEStreamChannelInfo( i, &streamChannel );
        if (error != PGRERROR_OK)
        {
            PrintError( error );
            return -1;
        }

        printf( "\nPrinting stream channel information for channel %u:\n", i );
        PrintStreamChannelInfo( &streamChannel );
    }    

    printf( "Querying GigE image setting information...\n" );

    GigEImageSettingsInfo imageSettingsInfo;
    error = cam.GetGigEImageSettingsInfo( &imageSettingsInfo );
    if (error != PGRERROR_OK)
    {
        PrintError( error );
        return -1;
    }

    GigEImageSettings imageSettings;
    imageSettings.offsetX = 0;
    imageSettings.offsetY = 0;
    imageSettings.height = imageSettingsInfo.maxHeight;
    imageSettings.width = imageSettingsInfo.maxWidth;
    imageSettings.pixelFormat = PIXEL_FORMAT_MONO8;

    printf( "Setting GigE image settings...\n" );

    error = cam.SetGigEImageSettings( &imageSettings );
    if (error != PGRERROR_OK)
    {
        PrintError( error );
        return -1;
    }

    printf( "Starting image capture...\n" );

    // Start capturing images
    error = cam.StartCapture();
    if (error != PGRERROR_OK)
    {
        PrintError( error );
        return -1;
    }

    Image rawImage;  
    Image convertedImage;
    for ( int imageCnt=0; imageCnt < k_numImages; imageCnt++ )
    {              
        // Retrieve an image
        error = cam.RetrieveBuffer( &rawImage );
        if (error != PGRERROR_OK)
        {
            PrintError( error );
            continue;
        }

        printf( "Grabbed image %d\n", imageCnt );

        // Convert the raw image
        error = rawImage.Convert( PIXEL_FORMAT_RGBU, &convertedImage );
        if (error != PGRERROR_OK)
        {
            PrintError( error );
            return -1;
        }  

        /*
        // Create a unique filename
        char filename[512];
        sprintf( filename, "GigEGrabEx-%u-%d.png", camInfo.serialNumber, imageCnt );

        // Save the image. If a file format is not passed in, then the file
        // extension is parsed to attempt to determine the file format.
        error = convertedImage.Save( filename );
        if (error != PGRERROR_OK)
        {
            PrintError( error );
            return -1;
        } 
        */
    }         

    printf( "Stopping capture...\n" );

    // Stop capturing images
    error = cam.StopCapture();
    if (error != PGRERROR_OK)
    {
        PrintError( error );
        return -1;
    }      

    // Disconnect the camera
    error = cam.Disconnect();
    if (error != PGRERROR_OK)
    {
        PrintError( error );
        return -1;
    }

    return 0;
}
int main( int argc, char *argv[] )
{


  bool LOAD_CALIB = false;

  Camera pt_grey;
  cv::Mat image, image_dist, image_disp;
  cv::Mat cameraMat, distCoeffs;


  // connect to camera

  init_cam_capture(pt_grey);

  if (LOAD_CALIB){
// Read calibration matrices
    std::string calibFile = "ptgrey_calib.xml";

    cv::FileStorage fs( calibFile, cv::FileStorage::READ);
   
    if (!fs.isOpened()){
      std::cerr << "Failed to open " << calibFile << std::endl;
      return 1; 
    }

    fs["Camera_Matrix"] >> cameraMat;
    fs["Distortion_Coefficients"] >> distCoeffs;

  }

  bool win_start = true;

  while(1)
  {
    // Get the image

    Image rawImage;
    error = pt_grey.RetrieveBuffer( &rawImage );
    if ( error != PGRERROR_OK ) {
      std::cout << "capture error" << std::endl;
      return false;
    }
  
    // convert to rgb
    Image rgbImage;
    rawImage.Convert( FlyCapture2::PIXEL_FORMAT_BGR, &rgbImage );
     
    // convert to OpenCV Mat
    unsigned int rowBytes = (double)rgbImage.GetReceivedDataSize()/(double)rgbImage.GetRows();  

    image = cv::Mat(rgbImage.GetRows(), rgbImage.GetCols(), CV_8UC3, rgbImage.GetData(),rowBytes);

    setImageInWindow(image, "image", win_start);
    win_start = false;

  }

  error = pt_grey.StopCapture();
  
  if ( error != PGRERROR_OK )
  {
      // This may fail when the camera was removed, so don't show 
      // an error message
  }  
  
  pt_grey.Disconnect();
 
  return 0;

}