コード例 #1
0
ファイル: mltcam.cpp プロジェクト: TomoControl/TomoControl
void MLTCam::WaitForExecution()
{
    qDebug() <<  "MLTCam::Wait for execution...";
    PvBuffer *lBuffer = NULL;
    PvResult lOperationResult;

    // Retrieve next buffer
    PvResult lResult = lStream->RetrieveBuffer( &lBuffer, &lOperationResult, 1000 );
    if ( lResult.IsOK() )
    {
        if ( lOperationResult.IsOK() )
        {
            qDebug() <<  "MLTCam::Get image complete";
            PvImage *lImage2 = lBuffer->GetImage();
            ushort * tdata = (ushort*)lImage2->GetDataPointer();

            memcpy(data, tdata, IMAGE_WIDTH*IMAGE_HEIGHT*2);

            timer->stop();
            delete timer;

            emit GetDataComplete(data);
            lDevice->StreamDisable();
        }

        // Re-queue the buffer in the stream object
        lStream->QueueBuffer( lBuffer );
    }

}
コード例 #2
0
ファイル: mltcam.cpp プロジェクト: TomoControl/TomoControl
void MLTCam::CreateStreamBuffers( PvDevice *aDevice, PvStream *aStream, BufferList *aBufferList )
{
    // Reading payload size from device
    uint32_t lSize = aDevice->GetPayloadSize();

    // Use BUFFER_COUNT or the maximum number of buffers, whichever is smaller
    uint32_t lBufferCount = ( aStream->GetQueuedBufferMaximum() < BUFFER_COUNT ) ?
                            aStream->GetQueuedBufferMaximum() :
                            BUFFER_COUNT;

    // Allocate buffers
    for ( uint32_t i = 0; i < lBufferCount; i++ )
    {
        // Create new buffer object
        PvBuffer *lBuffer = new PvBuffer;

        // Have the new buffer object allocate payload memory
        lBuffer->Alloc( static_cast<uint32_t>( lSize ) );

        // Add to external list - used to eventually release the buffers
        aBufferList->push_back( lBuffer );
    }

    // Queue all buffers in the stream
    BufferList::iterator lIt = aBufferList->begin();
    while ( lIt != aBufferList->end() )
    {
        aStream->QueueBuffer( *lIt );
        lIt++;
    }
}
コード例 #3
0
ファイル: pleora.cpp プロジェクト: Anantak/Pangolin
bool PleoraVideo::GrabNext( unsigned char* image, bool /*wait*/ )
{
    PvBuffer *lBuffer = NULL;
    PvResult lOperationResult;

    // Retrieve next buffer
    PvResult lResult = lStream->RetrieveBuffer( &lBuffer, &lOperationResult, 1000 );
    if ( !lResult.IsOK() ) {
        pango_print_warn("Pleora error: %s\n", lResult.GetCodeString().GetAscii() );
        return false;
    }

    bool good = false;

    if ( lOperationResult.IsOK() )
    {
        PvPayloadType lType = lBuffer->GetPayloadType();
        if ( lType == PvPayloadTypeImage )
        {
            PvImage *lImage = lBuffer->GetImage();
            std::memcpy(image, lImage->GetDataPointer(), size_bytes);
            good = true;
        }
    } else {
        pango_print_warn("Pleora error: %s\n", lOperationResult.GetCodeString().GetAscii() );
    }

    lStream->QueueBuffer( lBuffer );
    return good;
}
コード例 #4
0
ファイル: ImperxStream.cpp プロジェクト: kjgregory/SAS
int ImperxStream::Snap(cv::Mat &frame, int timeout)
{
//  std::cout << "ImperxStream::Snap starting" << std::endl;
    // The pipeline is already "armed", we just have to tell the device
    // to start sending us images
    lDeviceParams->ExecuteCommand( "AcquisitionStart" );
    int lWidth, lHeight, result = 0;
    // Retrieve next buffer             
    PvBuffer *lBuffer = NULL;
    PvResult lOperationResult;
    PvResult lResult = lPipeline.RetrieveNextBuffer( &lBuffer, timeout, &lOperationResult );
        
    if ( lResult.IsOK() )
    {
        if ( lOperationResult.IsOK() )
        {
            // Process Buffer
            
            if ( lBuffer->GetPayloadType() == PvPayloadTypeImage )
            {
//              std::cout << "ImperxStream::Snap Copying frame" << std::endl;
                // Get image specific buffer interface
                PvImage *lImage = lBuffer->GetImage();
              
                // Read width, height
                lWidth = (int) lImage->GetWidth();
                lHeight = (int) lImage->GetHeight();
                unsigned char *img = lImage->GetDataPointer();
                cv::Mat lframe(lHeight,lWidth,CV_8UC1,img, cv::Mat::AUTO_STEP);
                lframe.copyTo(frame);
                result = 0;
            }
            else
            {
                std::cout << "ImperxStream::Snap No image in buffer" << std::endl;
                result = 1;
            }
        }
        else
        {
            std::cout << "ImperxStream::Snap Operation result: " << lOperationResult << std::endl;
            result = 1;;
        }
        // We have an image - do some processing (...) and VERY IMPORTANT,
        // release the buffer back to the pipeline
    }
    else
    {
        std::cout << "ImperxStream::Snap Timeout: " << lResult << std::endl;
        result = 1;
    }
    
    lPipeline.ReleaseBuffer( lBuffer );
//    std::cout << "ImperxStream::Snap Exiting" << std::endl;
    return result;
}
コード例 #5
0
ファイル: pleora.cpp プロジェクト: DapengChalmers/ZSLAM_TX2
void PleoraVideo::InitBuffers(size_t buffer_count)
{
    // Reading payload size from device
    const uint32_t lSize = lDevice->GetPayloadSize();

    // Use buffer_count or the maximum number of buffers, whichever is smaller
    const uint32_t lBufferCount = ( lStream->GetQueuedBufferMaximum() < buffer_count ) ?
        lStream->GetQueuedBufferMaximum() :
        buffer_count;

    // Allocate buffers and queue
    for( uint32_t i = 0; i < lBufferCount; i++ ) {
        PvBuffer *lBuffer = new PvBuffer;
        lBuffer->Alloc( static_cast<uint32_t>( lSize ) );
        lBufferList.push_back( lBuffer );
    }
}
コード例 #6
0
ファイル: pleora.cpp プロジェクト: Anantak/Pangolin
bool PleoraVideo::GrabNewest( unsigned char* image, bool wait )
{
    PvBuffer *lBuffer0 = NULL;
    PvBuffer *lBuffer = NULL;
    PvResult lOperationResult;

    const uint32_t timeout = wait ? 0xFFFFFFFF : 0;

    PvResult lResult = lStream->RetrieveBuffer( &lBuffer, &lOperationResult, timeout );
    if ( !lResult.IsOK() ) {
        pango_print_warn("Pleora error: %s\n", lResult.GetCodeString().GetAscii() );
        return false;
    }else if( !lOperationResult.IsOK() ) {
        pango_print_warn("Pleora error: %s\n", lOperationResult.GetCodeString().GetAscii() );
        lStream->QueueBuffer( lBuffer );
        return false;
    }

    // We have at least one frame. Capture more until we fail, 0 timeout
    while(true) {
        PvResult lResult = lStream->RetrieveBuffer( &lBuffer0, &lOperationResult, 0 );
        if ( !lResult.IsOK() ) {
            break;
        }else if( !lOperationResult.IsOK() ) {
            lStream->QueueBuffer( lBuffer0 );
            break;
        }else{
            lStream->QueueBuffer( lBuffer );
            lBuffer = lBuffer0;
        }
    }

    bool good = false;

    PvPayloadType lType = lBuffer->GetPayloadType();
    if ( lType == PvPayloadTypeImage )
    {
        PvImage *lImage = lBuffer->GetImage();
        std::memcpy(image, lImage->GetDataPointer(), size_bytes);
        good = true;
    }

    lStream->QueueBuffer( lBuffer );
    return good;
}
コード例 #7
0
ファイル: ImperxStream.cpp プロジェクト: cello623/SAS
void ImperxStream::Stream(unsigned char *frame, Semaphore &frame_semaphore, Flag &stream_flag)
{
    // The pipeline is already "armed", we just have to tell the device
    // to start sending us images
    printf( "Sending StartAcquisition command to device\n" );
    lDeviceParams->ExecuteCommand( "AcquisitionStart" );

    char lDoodle[] = "|\\-|-/";
    int lDoodleIndex = 0;
    PvInt64 lImageCountVal = 0;
    double lFrameRateVal = 0.0;
    double lBandwidthVal = 0.0;

    // Acquire images until the user instructs us to stop
    printf( "\n<press a key to stop streaming>\n" );
    while ( stream_flag.check() )
    {
	std::cout << "here\n";
	// Retrieve next buffer		
	PvBuffer *lBuffer = NULL;
	PvResult  lOperationResult;
	PvResult lResult = lPipeline.RetrieveNextBuffer( &lBuffer, 1000, &lOperationResult );
        
	if ( lResult.IsOK() )
        {
	    if ( lOperationResult.IsOK() )
            {
		// Process Buffer
		lStreamParams->GetIntegerValue( "ImagesCount", lImageCountVal );
		lStreamParams->GetFloatValue( "AcquisitionRateAverage", lFrameRateVal );
		lStreamParams->GetFloatValue( "BandwidthAverage", lBandwidthVal );
            
		// If the buffer contains an image, display width and height
		PvUInt32 lWidth = 0, lHeight = 0;
		if ( lBuffer->GetPayloadType() == PvPayloadTypeImage )
		{
		    // Get image specific buffer interface
		    PvImage *lImage = lBuffer->GetImage();

		    // Read width, height
		    lWidth = lBuffer->GetImage()->GetWidth();
		    lHeight = lBuffer->GetImage()->GetHeight();
		    stream_flag.raise();	    

		}

		std::cout << lWidth << " " << lHeight << "\n";
	    
            }
	    // We have an image - do some processing (...) and VERY IMPORTANT,
	    // release the buffer back to the pipeline
	    //semaphore thing
	    //get all in there.
	    //a semaphore thing
    	
	    lPipeline.ReleaseBuffer( lBuffer );
        }
	else
        {
	    // Timeout
	    printf( "%c Timeout\r", lDoodle[ lDoodleIndex ] );
        }

	++lDoodleIndex %= 6;

    }
}
コード例 #8
0
ファイル: ImperxStream.cpp プロジェクト: cello623/SAS
void ImperxStream::Snap(cv::Mat &frame)
{
    // The pipeline is already "armed", we just have to tell the device
    // to start sending us images
    printf( "Sending StartAcquisition command to device\n" );
    lDeviceParams->ExecuteCommand( "AcquisitionStart" );

    char lDoodle[] = "|\\-|-/";
    int lDoodleIndex = 0;

    PvInt64 lImageCountVal = 0;
    double lFrameRateVal = 0.0;
    double lBandwidthVal = 0.0;

    std::cout << "here\n";
    // Retrieve next buffer		
    PvBuffer *lBuffer = NULL;
    PvResult lOperationResult;
    PvResult lResult = lPipeline.RetrieveNextBuffer( &lBuffer, 1000, &lOperationResult );
        
    if ( lResult.IsOK() )
    {
	if ( lOperationResult.IsOK() )
	{
	    // Process Buffer
	    lStreamParams->GetIntegerValue( "ImagesCount", lImageCountVal );
	    lStreamParams->GetFloatValue( "AcquisitionRateAverage", lFrameRateVal );
	    lStreamParams->GetFloatValue( "BandwidthAverage", lBandwidthVal );
            
	    // If the buffer contains an image, display width and height
	    int lWidth = 0, lHeight = 0;
	    if ( lBuffer->GetPayloadType() == PvPayloadTypeImage )
	    {
		// Get image specific buffer interface
		PvImage *lImage = lBuffer->GetImage();
	      
	      
		// Read width, height
		lWidth = (int) lImage->GetWidth();
		lHeight = (int) lImage->GetHeight();
		unsigned char *img = lImage->GetDataPointer();
//		cv::Mat lframe(lHeight,lWidth,CV_8UC1,img, cv::Mat::AUTO_STEP);
//		lframe.copyTo(frame);
		for (int m = 0; m < lHeight; m++)
		{
		    for (int n = 0; n < lWidth; n++)
		    {
			frame.at<unsigned char>(m,n) = img[m*lWidth + n];
//			std::cout << (short int) img[n*lHeight +m] << " ";
		    }
		}

	    }
	    else
	    {
		std::cout << "No image\n";
	    }
	    
	    std::cout << lWidth << " " << lHeight << "\n";
	    
	}
	else
	{
	    std::cout << "Damaged Result\n";
	}
	// We have an image - do some processing (...) and VERY IMPORTANT,
	// release the buffer back to the pipeline
	//semaphore thing
	//get all in there.
	//a semaphore thing

	lPipeline.ReleaseBuffer( lBuffer );
    }
    else
    {
	std::cout << "Timeout\n";
    }

    ++lDoodleIndex %= 6;
}
コード例 #9
0
int main( int aCount, const char ** aArgs )
{
    char lDeviceAddress[1024];
    char lMulticastAddress[1024];
    char lLocalAddress[1024];

    memset( lLocalAddress, 0, 1024 );
    sprintf( lMulticastAddress, "239.192.1.1" );
    memset( lDeviceAddress, 0, 1024 );

    bool lPassive = true;
    PvUInt32 lChannel = 0;
    PvUInt16 lHostPort = 1042;
    PvResult lResult;
    for ( int i=1; i<aCount; i++ )
	{
		std::string lString = aArgs[i];
        if ( lString.find( "--hostport" ) != std::string::npos )
        {
            sscanf( aArgs[i], "--hostport=%d", &lHostPort );
        }
        else if ( lString.find( "--localaddress" ) != std::string::npos )
        {
            sscanf( aArgs[i], "--localaddress=%s", lLocalAddress );
        }
        else if ( lString.find( "--multicastaddress" ) != std::string::npos )
        {
            sscanf( aArgs[i], "--multicastaddress=%s", lMulticastAddress );
        }
        else if ( lString.find( "--deviceaddress" ) != std::string::npos )
        {
            sscanf( aArgs[i], "--deviceaddress=%s", lDeviceAddress );
        }
        else if ( lString.find( "--unicast" ) != std::string::npos )
        {
            memset( lMulticastAddress, 0, 1024 );
        }
        else if ( lString.find( "--connectdevice" ) != std::string::npos )
        {
            lPassive = false;
        }
        else if ( lString.find( "--channel" ) != std::string::npos )
        {
            sscanf( aArgs[i], "--channel=%d", &lChannel );
        }
        else if ( lString.find( "--help" ) != std::string::npos )
        {
            PrintHelp();
            return 0;
        }
        else
        {
            printf( "Did not recognize argument %s\n", aArgs[i] );
            PrintHelp();
            return 1;
        }
    }

    if ( strlen( lDeviceAddress ) == 0 )
    {
        // No device address specified. Prompt with the device finder.
        PvDeviceFinderWnd lWnd;
        if ( !lWnd.ShowModal().IsOK() )
        {
            printf( "No GEV device selected.\n" );
            return 1;
        }
        PvDeviceInfo* lInfo = lWnd.GetSelected();
        sprintf( lDeviceAddress, "%s", lInfo->GetIPAddress().GetAscii() );
    }

    PvStream lStream;

    if ( strlen( lMulticastAddress ) == 0 )
    {
        lResult = lStream.Open( lDeviceAddress, lHostPort, lChannel, lLocalAddress );
        printf( "Receiving from device %s on interface %s:%d\n", 
            lDeviceAddress, lStream.GetLocalIPAddress().GetAscii(), lStream.GetLocalPort() );
    }
    else
    {
        lResult = lStream.Open( lDeviceAddress, lMulticastAddress, lHostPort, lChannel, lLocalAddress );
        printf( "Receiving from multicast address %s:%d (device %s) on interface %s:%d\n",
            lMulticastAddress, lHostPort, lDeviceAddress, lStream.GetLocalIPAddress().GetAscii(), lStream.GetLocalPort() );
    }

    if ( !lResult.IsOK() )
    {
        printf( "Failed opening the incoming stream: %s\n", lResult.GetDescription().GetAscii() );
        return 1;
    }

    PvPipeline lPipeline( &lStream );

    PvDevice lDevice;
    PvGenParameterArray *lDeviceParams = NULL;
    if ( !lPassive )
    {
        lResult = lDevice.Connect( lDeviceAddress );
        if ( !lResult.IsOK() )
        {
            printf( "Failed connecting to the device to set its destination and initiate an AcquisitionStart: %s\n", 
                lResult.GetDescription().GetAscii() );
            printf( "If the eBUS Transmitter to receive from doesn't have full device capabilities, add the --passive command line option and initiate streaming manually.\n" );
            return 1;
        }
        lDevice.SetStreamDestination( lStream.GetLocalIPAddress(), lStream.GetLocalPort(), lChannel );
            
        // Get device parameters need to control streaming
        lDeviceParams = lDevice.GetGenParameters();

        // Reading payload size from device. Otherwise, the pipeline may miss the first several images.
        PvInt64 lReceivePayloadSize = 0;
	    lDeviceParams->GetIntegerValue( "PayloadSize", lReceivePayloadSize );

        // Set the Buffer size and the Buffer count
        lPipeline.SetBufferSize( static_cast<PvUInt32>( lReceivePayloadSize ) );
    }

    lPipeline.SetBufferCount( 16 ); // Increase for high frame rate without missing block IDs
    lPipeline.Start();

    if ( !lPassive )
    {
        // TLParamsLocked is optional but when present, it MUST be set to 1
        // before sending the AcquisitionStart command
	    lDeviceParams->SetIntegerValue( "TLParamsLocked", 1 );

	    lDeviceParams->ExecuteCommand( "GevTimestampControlReset" );

        // The pipeline is already "armed", we just have to tell the device
        // to start sending us images
	    lDeviceParams->ExecuteCommand( "AcquisitionStart" );
    }
    
    // Get stream parameters/stats
    PvGenParameterArray *lStreamParams = lStream.GetParameters();

    printf( "Press any key to stop receiving.  \n" );

    char lDoodle[] = "|\\-|-/";
    int lDoodleIndex = 0;
    PvInt64 lImageCountVal = 0;
    double lFrameRateVal = 0.0;
    double lBandwidthVal = 0.0;

    while ( !PvKbHit() )
    {
        PvBuffer *lBuffer = NULL;
        PvResult  lOperationResult;
        PvResult lResult = lPipeline.RetrieveNextBuffer( &lBuffer, 1000, &lOperationResult );
        
        if ( lResult.IsOK() )
        {
            if ( lOperationResult.IsOK() )
            {
                //
                // We now have a valid buffer. This is where you would typically process the buffer.
                // -----------------------------------------------------------------------------------------
                // ...

                lStreamParams->GetIntegerValue( "ImagesCount", lImageCountVal );
                lStreamParams->GetFloatValue( "AcquisitionRateAverage", lFrameRateVal );
                lStreamParams->GetFloatValue( "BandwidthAverage", lBandwidthVal );

                printf( "%c BlockID: %016llX %.01f FPS %.01f Mb/s\r", 
                    lDoodle[ lDoodleIndex ],
                    lBuffer->GetBlockID(),
                    lFrameRateVal,
                    lBandwidthVal / 1000000.0 ); 
            }
            // We have an image - do some processing (...) and VERY IMPORTANT,
            // release the buffer back to the pipeline
            lPipeline.ReleaseBuffer( lBuffer );
        }
        else
        {
            printf( "%c Timeout\r", lDoodle[ lDoodleIndex ] );
        }

        ++lDoodleIndex %= 6;
    }
}
コード例 #10
0
bool StartSlave()
{
	// Let the user select the device to receive from
	PvString lDeviceIP;
	if ( !SelectDevice( lDeviceIP ) )
	{
		return false;
	}
	
	// Create the PvStream object
	PvStream lStream;

	// Create the PvPipeline object
	PvPipeline lPipeline( &lStream );

    // Create a PvPipeline event sink (used to trap buffer too small events)
    PipelineEventSink lPipelineEventSink;
    lPipeline.RegisterEventSink( &lPipelineEventSink );

	// Open stream
	printf( "Opening stream\n" );
	lStream.Open( lDeviceIP, "239.192.1.1", 1042 );

	// IMPORTANT: the pipeline needs to be "armed", or started before 
	// we instruct the device to send us images
	printf( "Starting pipeline\n" );
    lPipeline.SetBufferCount( 16 );
	lPipeline.Start();

	// Get stream parameters/stats
	PvGenParameterArray *lStreamParams = lStream.GetParameters();
	PvGenInteger *lCount = dynamic_cast<PvGenInteger *>( lStreamParams->Get( "ImagesCount" ) );
	PvGenFloat *lFrameRate = dynamic_cast<PvGenFloat *>( lStreamParams->Get( "AcquisitionRateAverage" ) );
	PvGenFloat *lBandwidth = dynamic_cast<PvGenFloat *>( lStreamParams->Get( "BandwidthAverage" ) );
	PvGenBoolean *lIgnoreMissingPackets = dynamic_cast<PvGenBoolean *>( lStreamParams->Get( "IgnoreMissingPackets" ) );

	// Disabling resend packets
	lIgnoreMissingPackets->SetValue( true );

	char lDoodle[] = "|\\-|-/";
	int lDoodleIndex = 0;
	PvInt64 lImageCountVal = 0;
	double lFrameRateVal = 0.0;
	double lBandwidthVal = 0.0;

	// Acquire images until the user instructs us to stop
	printf( "\n<press a key to stop receiving>\n" );
	while ( !PvKbHit() )
	{
		// Retrieve next buffer		
		PvBuffer *lBuffer = NULL;
        PvResult  lOperationResult;
		PvResult lResult = lPipeline.RetrieveNextBuffer( &lBuffer, 1000, &lOperationResult );
		
        if ( lResult.IsOK() )
		{
            if (lOperationResult.IsOK())
            {
                //
                // We now have a valid buffer. This is where you would typically process the buffer.
                // -----------------------------------------------------------------------------------------
                // ...

			    lCount->GetValue( lImageCountVal );
			    lFrameRate->GetValue( lFrameRateVal );
			    lBandwidth->GetValue( lBandwidthVal );
			
				// If the buffer contains an image, display width and height
				PvUInt32 lWidth = 0, lHeight = 0;
				if ( lBuffer->GetPayloadType() == PvPayloadTypeImage )
				{
					// Get image specific buffer interface
					PvImage *lImage = lBuffer->GetImage();

					// Read width, height
					lWidth = lBuffer->GetImage()->GetWidth();
					lHeight = lBuffer->GetImage()->GetHeight();
				}
				
				printf( "%c BlockID: %016llX W: %i H: %i %.01f FPS %.01f Mb/s\r", 
                    lDoodle[ lDoodleIndex ],
                    lBuffer->GetBlockID(),
					lWidth,
					lHeight,
                    lFrameRateVal,
                    lBandwidthVal / 1000000.0 ); 
            }

            // We have an image - do some processing (...) and VERY IMPORTANT,
			// release the buffer back to the pipeline
			lPipeline.ReleaseBuffer( lBuffer );
		}
		else
		{
			// Timeout
			printf( "%c Timeout\r", lDoodle[ lDoodleIndex ] );
		}

		++lDoodleIndex %= 6;
	}

	PvGetChar(); // Flush key buffer for next stop
	printf( "\n\n" );

	// We stop the pipeline - letting the object lapse out of 
	// scope would have had the destructor do the same, but we do it anyway
	printf( "Stop pipeline\n" );
	lPipeline.Stop();

	// Now close the stream. Also optionnal but nice to have
	printf( "Closing stream\n" );
	lStream.Close();

    // Unregister pipeline event sink. Optional but nice to have.
    lPipeline.UnregisterEventSink( &lPipelineEventSink );

	return true;
}
コード例 #11
0
int main( int aCount, const char ** aArgs )
{
    // Creates default configuration, parse command line parameters
    Config lConfig;
    lConfig.ParseCommandLine( aCount, aArgs );

    // Create video source
    VideoSource lSource( lConfig.GetDeviceAddress() );
    lSource.Connect();
    lSource.StartAcquisition();

    // Get video source properties
    PvUInt32 lWidth = lSource.GetWidth();
    PvUInt32 lHeight = lSource.GetHeight();
    PvPixelType lPixelFormat = lSource.GetPixelFormat();
    PvUInt32 lSize = lWidth * lHeight;

    // Allocate transmit buffers
    PvBufferList lBuffers;
    PvBufferList lFreeBuffers;
    for ( PvUInt32 i = 0; i < lConfig.GetBufferCount(); i++ )
    {
        // Alloc new buffer
        PvBuffer *lBuffer = new PvBuffer();
        lBuffer->SetID( i );

        // Add to both buffer list and free buffer list
        lBuffers.push_back( lBuffer );
        lFreeBuffers.push_back( lBuffer );
    }

    // Create transmitter, set packet size
    PvTransmitterRaw lTransmitter;
    lTransmitter.SetPacketSize( lConfig.GetPacketSize() );

    // Create virtual device (used for discovery)
    PvVirtualDevice lDevice;
    lDevice.StartListening( lConfig.GetSourceAddress() );

    cout << "Listening for device discovery requests on " << lConfig.GetSourceAddress() << endl;

    // Open transmitter - sets destination and source
    PvResult lResult = lTransmitter.Open( 
        lConfig.GetDestinationAddress(), lConfig.GetDestinationPort(), 
        lConfig.GetSourceAddress(), lConfig.GetSourcePort() );
    if ( !lResult.IsOK() )
    {
        cout << "Failed to open a connection to the transmitter." << endl;
        return 1;
    }

    cout << "Transmission stream opened:" << endl;
    cout << "Source: " << lTransmitter.GetSourceIPAddress().GetAscii() << " port " << lTransmitter.GetSourcePort() << endl;
    cout << "Destination: " << lConfig.GetDestinationAddress() << " port " << lConfig.GetDestinationPort() << endl; 

    if ( !lConfig.GetSilent() )
    {
        cout << "Press any key to begin transmitting.\r";
        PvWaitForKeyPress();
    }

    cout << "Press any key to stop transmitting." << endl;

    // Set maximum throughput (just to even out traffic, as we control throughput at the source)
    if ( lConfig.GetFPS() != 0 )
    {
        // Multiply image size (in bits) by FPS
        float lMax = static_cast<float>( lSize ) * 8;
        lMax *= lConfig.GetFPS();

        // Since we control throughput at the source, make sure maximum throughput is slightly
        // higher than what we need. We want to even out packet traffic, not slow down source frame rate
        lMax *= 1.1f;

        // Set max throughput
        lTransmitter.SetMaxPayloadThroughput( lMax );
    }

    char lDoodle[] = "|\\-|-/";
    int lDoodleIndex = 0;

    // Reset transmitter stats
    lTransmitter.ResetStats();

    // Used to transmit at a steady frame rate
    PvFPSStabilizer lStabilizer;

    // Seed used to generate the test pattern
    unsigned char lSeed = 0;

    // Acquisition/transmission loop
    while( !PvKbHit() )
    {
        // Step 1: If timing is right to meet desired FPS, generate pattern, transmit
        if ( ( lConfig.GetFPS() == 0 ) || lStabilizer.IsTimeToDisplay( (PvUInt32)lConfig.GetFPS() ) )
        {
            // Are there buffers available for transmission?
            if ( lFreeBuffers.size() > 0 )
            {
                // Retrieve buffer from list
                PvBuffer *lBuffer = lFreeBuffers.front();
                lFreeBuffers.pop_front();

                // Get, transform and copy image into buffer
                if ( lSource.FillBuffer( lBuffer, &lTransmitter ) )
                {
                    // Queue the buffer for transmission
                    lTransmitter.QueueBuffer( lBuffer );
                }
                else
                {
                    // No source image, put buffer back in free list
                    lFreeBuffers.push_front( lBuffer );
                }
            }
        }

        // Step 2: Retrieve free buffer(s), display stats and requeue
        PvBuffer *lBuffer = NULL;
        while ( lTransmitter.RetrieveFreeBuffer( &lBuffer, 0 ).IsOK() )
        {
            // Queue buffers back in available buffer list
            lFreeBuffers.push_back( lBuffer );

            // Buffer transmission complete, dislay stats
            cout << fixed << setprecision( 1 );
            cout << lDoodle[ lDoodleIndex ] << " ";
            cout << "Transmitted " << lTransmitter.GetBlocksTransmitted() << " blocks ";
            cout << "at " << lTransmitter.GetAverageTransmissionRate() << " ";
            cout << "(" << lTransmitter.GetInstantaneousTransmissionRate() << ") FPS ";
            cout << lTransmitter.GetAveragePayloadThroughput() / 1000000.0f << " ";
            cout << "(" << lTransmitter.GetInstantaneousPayloadThroughput() / 1000000.0f << ") Mb/s  \r";
            ++lDoodleIndex %= 6;
        }
    }

    // Close transmitter (will also abort buffers)
    lTransmitter.Close();

    // Free buffers
    PvBufferList::iterator lIt = lBuffers.begin();
    while ( lIt != lBuffers.end() )
    {
        delete ( *lIt );
        lIt++;
    }

    // Stop video source
    lSource.StopAcquisition();
    lSource.Disconnect();

    // Stop virtual device
    lDevice.StopListening();
}
コード例 #12
0
ファイル: LirTest.cpp プロジェクト: USF-COT/LirLogger
bool AcquireImages(CamInfo* cams, int numCams)
{
    int i;
    PvResult result;

    // Initialize Camera System
    PvSystem system;
    system.SetDetectionTimeout(2000);
    result = system.Find();    
    if(!result.IsOK()){
        printf("PvSystem::Find Error: %s", result.GetCodeString().GetAscii());
        return false;
    }

    PvDevice lDevice[numCams];
    PvGenParameterArray *lDeviceParams[numCams];
    PvStream lStream[numCams];
    PvPipeline *lPipeline[numCams];
    for(i=0; i < numCams; i++){
        PvDeviceInfo* lDeviceInfo = NULL;
        PvDeviceInfo* tempInfo;

        // Get the number of GEV Interfaces that were found using GetInterfaceCount.
        PvUInt32 lInterfaceCount = system.GetInterfaceCount();

        // For each interface, check MAC Address against passed address
        for( PvUInt32 x = 0; x < lInterfaceCount; x++ )
        {
            // get pointer to each of interface
            PvInterface * lInterface = system.GetInterface( x );

            // Get the number of GEV devices that were found using GetDeviceCount.
            PvUInt32 lDeviceCount = lInterface->GetDeviceCount();

            for( PvUInt32 y = 0; y < lDeviceCount ; y++ )
            {
                tempInfo = lInterface->GetDeviceInfo( y );
                if(strlen(cams[i].MACAddress) == strlen(tempInfo->GetMACAddress().GetAscii()) && strncmp(cams[i].MACAddress,tempInfo->GetMACAddress().GetAscii(),strlen(cams[i].MACAddress)) == 0){
                    lDeviceInfo = tempInfo;
                    break;
                }
            }
        }

        // If no device is selected, abort
        if( lDeviceInfo == NULL )
        {
            printf( "No device selected.\n" );
            return false;
        }

        // Connect to the GEV Device
        printf( "Connecting to %s\n", lDeviceInfo->GetMACAddress().GetAscii() );
        if ( !lDevice[i].Connect( lDeviceInfo ).IsOK() )
        {
            printf( "Unable to connect to %s\n", lDeviceInfo->GetMACAddress().GetAscii() );
            return false;
        }
        printf( "Successfully connected to %s\n", lDeviceInfo->GetMACAddress().GetAscii() );
        printf( "\n" );

        // Get device parameters need to control streaming
        lDeviceParams[i] = lDevice[i].GetGenParameters();

        // Negotiate streaming packet size
        lDevice[i].NegotiatePacketSize();

        // Open stream - have the PvDevice do it for us
        printf( "Opening stream to device\n" );
        lStream[i].Open( lDeviceInfo->GetIPAddress() );

        // Create the PvPipeline object
        lPipeline[i] = new PvPipeline( &lStream[i] );

        // Reading payload size from device
        PvInt64 lSize = 0;
        lDeviceParams[i]->GetIntegerValue( "PayloadSize", lSize );

        // Set the Buffer size and the Buffer count
        lPipeline[i]->SetBufferSize( static_cast<PvUInt32>( lSize ) );
        lPipeline[i]->SetBufferCount( 16 ); // Increase for high frame rate without missing block IDs

        // Have to set the Device IP destination to the Stream
        lDevice[i].SetStreamDestination( lStream[i].GetLocalIPAddress(), lStream[i].GetLocalPort() );
    }

    PvGenParameterArray *lStreamParams[numCams];
    for(i=0; i < numCams; i++){
        // IMPORTANT: the pipeline needs to be "armed", or started before
        // we instruct the device to send us images
        printf( "Starting pipeline %d\n",i);
        lPipeline[i]->Start();

        // Get stream parameters/stats
        lStreamParams[i] = lStream[i].GetParameters();

        // TLParamsLocked is optional but when present, it MUST be set to 1
        // before sending the AcquisitionStart command
        lDeviceParams[i]->SetIntegerValue( "TLParamsLocked", 1 );

        printf( "Resetting timestamp counter...\n" );
        lDeviceParams[i]->ExecuteCommand( "GevTimestampControlReset" );

        // The pipeline is already "armed", we just have to tell the device
        // to start sending us images
        printf( "Sending StartAcquisition command to device\n" );
        lDeviceParams[i]->ExecuteCommand( "AcquisitionStart" );
    }

    char lDoodle[] = "|\\-|-/";
    int lDoodleIndex = 0;
    PvInt64 lImageCountVal = 0;
    double lFrameRateVal = 0.0;
    double lBandwidthVal = 0.0;
    PvInt64 lPipelineBlocksDropped = 0;

    // Acquire images until the user instructs us to stop
    printf( "\n<press the enter key to stop streaming>\n" );
    //PvBufferWriter writer;
    char filePath[MAXFILEPATH];
    PvUInt32 lWidth = 4096, lHeight = 9250;
    while ( running )
    {
        for(i=0; i < numCams; i++){
            // Retrieve next buffer		
            PvBuffer *lBuffer = NULL;
            PvResult  lOperationResult;
            PvResult lResult = lPipeline[i]->RetrieveNextBuffer( &lBuffer, 1000, &lOperationResult );

            if ( lResult.IsOK() )
            {
                if ( lOperationResult.IsOK() )
                {
                    lStreamParams[i]->GetIntegerValue( "ImagesCount", lImageCountVal );
                    lStreamParams[i]->GetFloatValue( "AcquisitionRateAverage", lFrameRateVal );
                    lStreamParams[i]->GetFloatValue( "BandwidthAverage", lBandwidthVal );
                    lStreamParams[i]->GetIntegerValue("PipelineBlocksDropped", lPipelineBlocksDropped);

                    filePath[0] = '\0';
                    sprintf(filePath,"%s/%s%04X.tif",cams[i].filename,cams[i].prefix,lBuffer->GetBlockID());
                    TIFF *out = TIFFOpen(filePath,"w");
                    TIFFSetField(out, TIFFTAG_IMAGEWIDTH, lWidth);
                    TIFFSetField(out, TIFFTAG_IMAGELENGTH, lHeight);
                    TIFFSetField(out, TIFFTAG_SAMPLESPERPIXEL, 1);
                    TIFFSetField(out, TIFFTAG_BITSPERSAMPLE, 8);
                    TIFFSetField(out, TIFFTAG_ORIENTATION, ORIENTATION_TOPLEFT);
                    TIFFSetField(out, TIFFTAG_PLANARCONFIG, PLANARCONFIG_CONTIG);
                    TIFFSetField(out, TIFFTAG_PHOTOMETRIC, PHOTOMETRIC_MINISBLACK);
                    TIFFSetField(out, TIFFTAG_COMPRESSION, COMPRESSION_NONE);

                    TIFFWriteEncodedStrip(out,0,lBuffer->GetDataPointer(),lWidth*lHeight);
                    TIFFClose(out);

                    printf( "%d:%s%c Timestamp: %016llX BlockID: %04X %.01f FPS %d DROP %.01f Mb/s\r\n",i,
                            cams[i].prefix,
                            lDoodle[ lDoodleIndex ],
                            lBuffer->GetTimestamp(),
                            lBuffer->GetBlockID(),
                            lFrameRateVal,
                            lPipelineBlocksDropped,
                            lBandwidthVal / 1000000.0 );
                } else {
                    printf("%d: ERROR Code: %s, Description: %s\r\n",i,lOperationResult.GetCodeString().GetAscii(),lOperationResult.GetDescription().GetAscii());
                }
                // We have an image - do some processing (...)
                // VERY IMPORTANT:
                // release the buffer back to the pipeline
                lPipeline[i]->ReleaseBuffer( lBuffer );
                usleep(200);
            }
            else
            {
                // Timeout
                printf( "%d:%s%c Timeout\r\n",i, cams[i].prefix,lDoodle[ lDoodleIndex ]);
            }
        }
        ++lDoodleIndex %= 6;
    }

    //_getch(); // Flush key buffer for next stop
    printf( "\n\n" );

    for(i=0; i < numCams; i++){
        // Tell the device to stop sending images
        printf( "Sending AcquisitionStop command to the device\n" );
        lDeviceParams[i]->ExecuteCommand( "AcquisitionStop" );

        // If present reset TLParamsLocked to 0. Must be done AFTER the 
        // streaming has been stopped
        lDeviceParams[i]->SetIntegerValue( "TLParamsLocked", 0 );

        // We stop the pipeline - letting the object lapse out of 
        // scope would have had the destructor do the same, but we do it anyway
        printf( "Stop pipeline\n" );
        lPipeline[i]->Stop();
        delete lPipeline[i];

        // Now close the stream. Also optionnal but nice to have
        printf( "Closing stream\n" );
        lStream[i].Close();

        // Finally disconnect the device. Optional, still nice to have
        printf( "Disconnecting device\n" );
        lDevice[i].Disconnect();
    }

    return true;
}
コード例 #13
0
ファイル: ThermoCamSingle.cpp プロジェクト: khobbs91/16.831
bool AcquireImages()
{
	PvResult lResult;	
	PvDeviceInfo *lDeviceInfo = NULL;
	PvSystem lSystem;
	PvStream lStream;
	lSystem.SetDetectionTimeout( 20000 );
	lResult = lSystem.Find();
	if( !lResult.IsOK() )
	{
		cout << "PvSystem::Find Error: " << lResult.GetCodeString().GetAscii();
		return -1;
	}
	PvUInt32 lInterfaceCount = lSystem.GetInterfaceCount();
	for( PvUInt32 x = 0; x < lInterfaceCount; x++ )
	{
		PvInterface * lInterface = lSystem.GetInterface( x );
		cout << "Ethernet Interface " << endl;
		cout << "IP Address: " << lInterface->GetIPAddress().GetAscii() << endl;
		cout << "Subnet Mask: " << lInterface->GetSubnetMask().GetAscii() << endl << endl;
		PvUInt32 lDeviceCount = lInterface->GetDeviceCount();
		for( PvUInt32 y = 0; y < lDeviceCount ; y++ )
		{
			lDeviceInfo = lInterface->GetDeviceInfo( y );
			cout << "ThermoCam " << endl;
			cout << "IP Address: " << lDeviceInfo->GetIPAddress().GetAscii() << endl;
		}
	}
	if( lDeviceInfo != NULL )
	{
		cout << "Connecting to " << lDeviceInfo->GetIPAddress().GetAscii() << endl;
		PvDevice lDevice;
		lResult = lDevice.Connect( lDeviceInfo );
		if ( !lResult.IsOK() )
		{
			cout << "Unable to connect to " << lDeviceInfo->GetIPAddress().GetAscii() << endl;
		}
		else
		{
			cout << "Successfully connected to " << lDeviceInfo->GetIPAddress().GetAscii() << endl;
    			lResult = lDevice.NegotiatePacketSize( );
    			if ( !lResult.IsOK() )
    			{
				cout << endl;
        			cout << " Failed to negotiate a packet size setting GevSCPSPacketSize to original value";
        			PvSleepMs( 2500 );
    			}
			cout << endl;
    			cout << "3. Open stream......";
			lResult = lStream.Open( lDeviceInfo->GetIPAddress() );
			if ( !lResult.IsOK() )
			{	
				cout << endl;
				cout << "  Failed to open stream";
				return 0;
			}
			lDevice.SetStreamDestination( lStream.GetLocalIPAddress(), lStream.GetLocalPort() );
			PvInt64 lPayloadSize;
			lDevice.GetGenParameters()->GetIntegerValue( "PayloadSize", lPayloadSize );
			PvBuffer * lBuffer = new PvBuffer();
			lBuffer->Alloc( static_cast<PvUInt32>( lPayloadSize ) );
			PvBuffer *lPtr = NULL; 
			PvImage *lImage = NULL;
			cout << endl;
			cout << "5. Grab one image" << endl;
			lStream.QueueBuffer( lBuffer );
			lDevice.GetGenParameters()->SetIntegerValue( "TLParamsLocked", 1 );
			lDevice.GetGenParameters()->ExecuteCommand( "AcquisitionStart" );
			PvResult lStreamResult;
			lResult = lStream.RetrieveBuffer( &lPtr, &lStreamResult, 10000 );
			lDevice.GetGenParameters()->ExecuteCommand( "AcquisitionStop" );
			lDevice.GetGenParameters()->SetIntegerValue( "TLParamsLocked", 0 );
			PvInt64 lWidth = 0, lHeight = 0;
			PvGenParameterArray *lDeviceParams = lDevice.GetGenParameters();	
			lDeviceParams->GetIntegerValue( "Width", lWidth);
			lDeviceParams->GetIntegerValue( "Height", lHeight);			
			cvNamedWindow("OpenCV: ThermoCam",CV_WINDOW_NORMAL);
			cv::Mat raw_lImage(cv::Size(lWidth,lHeight),CV_8U);
			if ( lResult.IsOK() )
			{
				if ( lStreamResult.IsOK() )
				{
					cout << endl;
					cout << "6. Using RGB Filter";		
					lImage=lPtr->GetImage();
					lPtr->GetImage()->Alloc(lImage->GetWidth(),lImage->GetHeight(),PvPixelMono8);
					cout << "  a. Save the original image into ImageOriginal.bmp";			
					PvBufferWriter lBufferWriter;			
					lBufferWriter.Store(lPtr,"ThermoCam.bmp",PvBufferFormatBMP);
				}
				lImage->Attach(raw_lImage.data,lImage->GetWidth(),lImage->GetHeight(),PvPixelMono8);
				//cv::imshow("OpenCV: ThermoCam",raw_lImage);
				cv::FileStorage fs("ThermoCam.xml",cv::FileStorage::WRITE);		
				fs << "raw_lImage" << raw_lImage;
				fs.release();	
				//if(cv::waitKey(1000) >= 0) break;
				lPtr->Free();
			}
		  	lBuffer->Free();
			lDevice.ResetStreamDestination();
			lStream.Close();
			lDevice.Disconnect();
			return true;
		}
	}
	else
	{
		cout << "No device found" << endl;
	}
	return 0;
}
コード例 #14
0
ファイル: pleora.cpp プロジェクト: Anantak/Pangolin
PleoraVideo::PleoraVideo(const char* model_name, const char* serial_num, size_t index, size_t bpp,  size_t binX, size_t binY, size_t buffer_count,
                         size_t desired_size_x, size_t desired_size_y, size_t desired_pos_x, size_t desired_pos_y)
    : lPvSystem(0), lDevice(0), lStream(0)
{
    lPvSystem = new PvSystem();
    if ( !lPvSystem ) {
        throw pangolin::VideoException("Pleora: Unable to create PvSystem");
    }

    const PvDeviceInfo *lDeviceInfo = SelectDevice(*lPvSystem, model_name, serial_num, index);
    if ( !lDeviceInfo ) {
        throw pangolin::VideoException("Pleora: Unable to select device");
    }

    PvResult lResult;
    lDevice = PvDevice::CreateAndConnect( lDeviceInfo, &lResult );
    if ( !lDevice ) {
        throw pangolin::VideoException("Pleora: Unable to connect to device", lResult.GetDescription().GetAscii() );
    }

    lStream = PvStream::CreateAndOpen( lDeviceInfo->GetConnectionID(), &lResult );
    if ( !lStream ) {
        lDevice->Disconnect();
        PvDevice::Free(lDevice);
        throw pangolin::VideoException("Pleora: Unable to open stream", lResult.GetDescription().GetAscii() );
    }

    lDeviceParams = lDevice->GetParameters();
    lStart = dynamic_cast<PvGenCommand*>( lDeviceParams->Get( "AcquisitionStart" ) );
    lStop = dynamic_cast<PvGenCommand*>( lDeviceParams->Get( "AcquisitionStop" ) );

    if( bpp == 8) {
        lDeviceParams->SetEnumValue("PixelFormat", PvString("Mono8") );
    }else if(bpp == 10) {
        lDeviceParams->SetEnumValue("PixelFormat", PvString("Mono10p") );
    }else if(bpp == 12) {
        lDeviceParams->SetEnumValue("PixelFormat", PvString("Mono12p") );
    }

    // Height and width will fail if not multiples of 8.
    lDeviceParams->SetIntegerValue("Height", desired_size_y );
    if(lResult.IsFailure()){
        pango_print_error("Height %zu fail\n", desired_size_y);
        int64_t max, min;
        lDeviceParams->GetIntegerRange("Height", max, min );
        lDeviceParams->SetIntegerValue("Height", max );
    }
    lDeviceParams->SetIntegerValue("Width", desired_size_x );
    if(lResult.IsFailure()){
        pango_print_error("Width %zu fail\n", desired_size_x);
        int64_t max, min;
        lDeviceParams->GetIntegerRange("Width", max, min );
        lDeviceParams->SetIntegerValue("Width", max );
    }

    lDeviceParams = lDevice->GetParameters();
    const int w = DeviceParam<int64_t>("Width");
    const int h = DeviceParam<int64_t>("Height");

    // Offset will fail if not multiple of 8.
    lDeviceParams->SetIntegerValue("OffsetX", desired_pos_x );
    if(lResult.IsFailure()){
        pango_print_error("OffsetX %zu fail\n", desired_pos_x);
    }
    lDeviceParams->SetIntegerValue("OffsetX", desired_pos_y );
    if(lResult.IsFailure()){
        pango_print_error("OffsetY %zu fail\n", desired_pos_y);
    }

    lResult =lDeviceParams->SetIntegerValue("BinningHorizontal", binX );
    if(lResult.IsFailure()){
        pango_print_error("BinningHorizontal %zu fail\n", binX);
    }
    lResult =lDeviceParams->SetIntegerValue("BinningVertical", binY );
    if(lResult.IsFailure()){
        pango_print_error("BinningVertical %zu fail\n", binY);
    }

    lStreamParams = lStream->GetParameters();

    // Reading payload size from device
    const uint32_t lSize = lDevice->GetPayloadSize();

    // Use buffer_count or the maximum number of buffers, whichever is smaller
    const uint32_t lBufferCount = ( lStream->GetQueuedBufferMaximum() < buffer_count ) ?
        lStream->GetQueuedBufferMaximum() :
        buffer_count;

    // Allocate buffers and queue
    for ( uint32_t i = 0; i < lBufferCount; i++ )
    {
        PvBuffer *lBuffer = new PvBuffer;
        lBuffer->Alloc( static_cast<uint32_t>( lSize ) );
        lBufferList.push_back( lBuffer );
    }

    // Setup pangolin for stream
    PvGenEnum* lpixfmt = dynamic_cast<PvGenEnum*>( lDeviceParams->Get("PixelFormat") );
    const VideoPixelFormat fmt = PleoraFormat(lpixfmt);
    streams.push_back(StreamInfo(fmt, w, h, (w*fmt.bpp)/8));
    size_bytes = lSize;

    Start();
}