void PleoraVideo::InitStream() { // Setup Stream PvResult lResult; lStream = PvStream::CreateAndOpen( lDeviceInfo->GetConnectionID(), &lResult ); if ( !lStream ) { DeinitDevice(); throw pangolin::VideoException("Pleora: Unable to open stream", lResult.GetDescription().GetAscii() ); } lStreamParams = lStream->GetParameters(); }
void PleoraVideo::InitDevice( const char* model_name, const char* serial_num, size_t index ) { lPvSystem = new PvSystem(); if ( !lPvSystem ) { throw pangolin::VideoException("Pleora: Unable to create PvSystem"); } lDeviceInfo = SelectDevice(*lPvSystem, model_name, serial_num, index); if ( !lDeviceInfo ) { delete lPvSystem; throw pangolin::VideoException("Pleora: Unable to select device"); } PvResult lResult; lDevice = PvDevice::CreateAndConnect( lDeviceInfo, &lResult ); if ( !lDevice ) { delete lPvSystem; throw pangolin::VideoException("Pleora: Unable to connect to device", lResult.GetDescription().GetAscii() ); } lDeviceParams = lDevice->GetParameters(); }
int main( int aCount, const char ** aArgs ) { char lDeviceAddress[1024]; char lMulticastAddress[1024]; char lLocalAddress[1024]; memset( lLocalAddress, 0, 1024 ); sprintf( lMulticastAddress, "239.192.1.1" ); memset( lDeviceAddress, 0, 1024 ); bool lPassive = true; PvUInt32 lChannel = 0; PvUInt16 lHostPort = 1042; PvResult lResult; for ( int i=1; i<aCount; i++ ) { std::string lString = aArgs[i]; if ( lString.find( "--hostport" ) != std::string::npos ) { sscanf( aArgs[i], "--hostport=%d", &lHostPort ); } else if ( lString.find( "--localaddress" ) != std::string::npos ) { sscanf( aArgs[i], "--localaddress=%s", lLocalAddress ); } else if ( lString.find( "--multicastaddress" ) != std::string::npos ) { sscanf( aArgs[i], "--multicastaddress=%s", lMulticastAddress ); } else if ( lString.find( "--deviceaddress" ) != std::string::npos ) { sscanf( aArgs[i], "--deviceaddress=%s", lDeviceAddress ); } else if ( lString.find( "--unicast" ) != std::string::npos ) { memset( lMulticastAddress, 0, 1024 ); } else if ( lString.find( "--connectdevice" ) != std::string::npos ) { lPassive = false; } else if ( lString.find( "--channel" ) != std::string::npos ) { sscanf( aArgs[i], "--channel=%d", &lChannel ); } else if ( lString.find( "--help" ) != std::string::npos ) { PrintHelp(); return 0; } else { printf( "Did not recognize argument %s\n", aArgs[i] ); PrintHelp(); return 1; } } if ( strlen( lDeviceAddress ) == 0 ) { // No device address specified. Prompt with the device finder. PvDeviceFinderWnd lWnd; if ( !lWnd.ShowModal().IsOK() ) { printf( "No GEV device selected.\n" ); return 1; } PvDeviceInfo* lInfo = lWnd.GetSelected(); sprintf( lDeviceAddress, "%s", lInfo->GetIPAddress().GetAscii() ); } PvStream lStream; if ( strlen( lMulticastAddress ) == 0 ) { lResult = lStream.Open( lDeviceAddress, lHostPort, lChannel, lLocalAddress ); printf( "Receiving from device %s on interface %s:%d\n", lDeviceAddress, lStream.GetLocalIPAddress().GetAscii(), lStream.GetLocalPort() ); } else { lResult = lStream.Open( lDeviceAddress, lMulticastAddress, lHostPort, lChannel, lLocalAddress ); printf( "Receiving from multicast address %s:%d (device %s) on interface %s:%d\n", lMulticastAddress, lHostPort, lDeviceAddress, lStream.GetLocalIPAddress().GetAscii(), lStream.GetLocalPort() ); } if ( !lResult.IsOK() ) { printf( "Failed opening the incoming stream: %s\n", lResult.GetDescription().GetAscii() ); return 1; } PvPipeline lPipeline( &lStream ); PvDevice lDevice; PvGenParameterArray *lDeviceParams = NULL; if ( !lPassive ) { lResult = lDevice.Connect( lDeviceAddress ); if ( !lResult.IsOK() ) { printf( "Failed connecting to the device to set its destination and initiate an AcquisitionStart: %s\n", lResult.GetDescription().GetAscii() ); printf( "If the eBUS Transmitter to receive from doesn't have full device capabilities, add the --passive command line option and initiate streaming manually.\n" ); return 1; } lDevice.SetStreamDestination( lStream.GetLocalIPAddress(), lStream.GetLocalPort(), lChannel ); // Get device parameters need to control streaming lDeviceParams = lDevice.GetGenParameters(); // Reading payload size from device. Otherwise, the pipeline may miss the first several images. PvInt64 lReceivePayloadSize = 0; lDeviceParams->GetIntegerValue( "PayloadSize", lReceivePayloadSize ); // Set the Buffer size and the Buffer count lPipeline.SetBufferSize( static_cast<PvUInt32>( lReceivePayloadSize ) ); } lPipeline.SetBufferCount( 16 ); // Increase for high frame rate without missing block IDs lPipeline.Start(); if ( !lPassive ) { // TLParamsLocked is optional but when present, it MUST be set to 1 // before sending the AcquisitionStart command lDeviceParams->SetIntegerValue( "TLParamsLocked", 1 ); lDeviceParams->ExecuteCommand( "GevTimestampControlReset" ); // The pipeline is already "armed", we just have to tell the device // to start sending us images lDeviceParams->ExecuteCommand( "AcquisitionStart" ); } // Get stream parameters/stats PvGenParameterArray *lStreamParams = lStream.GetParameters(); printf( "Press any key to stop receiving. \n" ); char lDoodle[] = "|\\-|-/"; int lDoodleIndex = 0; PvInt64 lImageCountVal = 0; double lFrameRateVal = 0.0; double lBandwidthVal = 0.0; while ( !PvKbHit() ) { PvBuffer *lBuffer = NULL; PvResult lOperationResult; PvResult lResult = lPipeline.RetrieveNextBuffer( &lBuffer, 1000, &lOperationResult ); if ( lResult.IsOK() ) { if ( lOperationResult.IsOK() ) { // // We now have a valid buffer. This is where you would typically process the buffer. // ----------------------------------------------------------------------------------------- // ... lStreamParams->GetIntegerValue( "ImagesCount", lImageCountVal ); lStreamParams->GetFloatValue( "AcquisitionRateAverage", lFrameRateVal ); lStreamParams->GetFloatValue( "BandwidthAverage", lBandwidthVal ); printf( "%c BlockID: %016llX %.01f FPS %.01f Mb/s\r", lDoodle[ lDoodleIndex ], lBuffer->GetBlockID(), lFrameRateVal, lBandwidthVal / 1000000.0 ); } // We have an image - do some processing (...) and VERY IMPORTANT, // release the buffer back to the pipeline lPipeline.ReleaseBuffer( lBuffer ); } else { printf( "%c Timeout\r", lDoodle[ lDoodleIndex ] ); } ++lDoodleIndex %= 6; } }
bool AcquireImages(CamInfo* cams, int numCams) { int i; PvResult result; // Initialize Camera System PvSystem system; system.SetDetectionTimeout(2000); result = system.Find(); if(!result.IsOK()){ printf("PvSystem::Find Error: %s", result.GetCodeString().GetAscii()); return false; } PvDevice lDevice[numCams]; PvGenParameterArray *lDeviceParams[numCams]; PvStream lStream[numCams]; PvPipeline *lPipeline[numCams]; for(i=0; i < numCams; i++){ PvDeviceInfo* lDeviceInfo = NULL; PvDeviceInfo* tempInfo; // Get the number of GEV Interfaces that were found using GetInterfaceCount. PvUInt32 lInterfaceCount = system.GetInterfaceCount(); // For each interface, check MAC Address against passed address for( PvUInt32 x = 0; x < lInterfaceCount; x++ ) { // get pointer to each of interface PvInterface * lInterface = system.GetInterface( x ); // Get the number of GEV devices that were found using GetDeviceCount. PvUInt32 lDeviceCount = lInterface->GetDeviceCount(); for( PvUInt32 y = 0; y < lDeviceCount ; y++ ) { tempInfo = lInterface->GetDeviceInfo( y ); if(strlen(cams[i].MACAddress) == strlen(tempInfo->GetMACAddress().GetAscii()) && strncmp(cams[i].MACAddress,tempInfo->GetMACAddress().GetAscii(),strlen(cams[i].MACAddress)) == 0){ lDeviceInfo = tempInfo; break; } } } // If no device is selected, abort if( lDeviceInfo == NULL ) { printf( "No device selected.\n" ); return false; } // Connect to the GEV Device printf( "Connecting to %s\n", lDeviceInfo->GetMACAddress().GetAscii() ); if ( !lDevice[i].Connect( lDeviceInfo ).IsOK() ) { printf( "Unable to connect to %s\n", lDeviceInfo->GetMACAddress().GetAscii() ); return false; } printf( "Successfully connected to %s\n", lDeviceInfo->GetMACAddress().GetAscii() ); printf( "\n" ); // Get device parameters need to control streaming lDeviceParams[i] = lDevice[i].GetGenParameters(); // Negotiate streaming packet size lDevice[i].NegotiatePacketSize(); // Open stream - have the PvDevice do it for us printf( "Opening stream to device\n" ); lStream[i].Open( lDeviceInfo->GetIPAddress() ); // Create the PvPipeline object lPipeline[i] = new PvPipeline( &lStream[i] ); // Reading payload size from device PvInt64 lSize = 0; lDeviceParams[i]->GetIntegerValue( "PayloadSize", lSize ); // Set the Buffer size and the Buffer count lPipeline[i]->SetBufferSize( static_cast<PvUInt32>( lSize ) ); lPipeline[i]->SetBufferCount( 16 ); // Increase for high frame rate without missing block IDs // Have to set the Device IP destination to the Stream lDevice[i].SetStreamDestination( lStream[i].GetLocalIPAddress(), lStream[i].GetLocalPort() ); } PvGenParameterArray *lStreamParams[numCams]; for(i=0; i < numCams; i++){ // IMPORTANT: the pipeline needs to be "armed", or started before // we instruct the device to send us images printf( "Starting pipeline %d\n",i); lPipeline[i]->Start(); // Get stream parameters/stats lStreamParams[i] = lStream[i].GetParameters(); // TLParamsLocked is optional but when present, it MUST be set to 1 // before sending the AcquisitionStart command lDeviceParams[i]->SetIntegerValue( "TLParamsLocked", 1 ); printf( "Resetting timestamp counter...\n" ); lDeviceParams[i]->ExecuteCommand( "GevTimestampControlReset" ); // The pipeline is already "armed", we just have to tell the device // to start sending us images printf( "Sending StartAcquisition command to device\n" ); lDeviceParams[i]->ExecuteCommand( "AcquisitionStart" ); } char lDoodle[] = "|\\-|-/"; int lDoodleIndex = 0; PvInt64 lImageCountVal = 0; double lFrameRateVal = 0.0; double lBandwidthVal = 0.0; PvInt64 lPipelineBlocksDropped = 0; // Acquire images until the user instructs us to stop printf( "\n<press the enter key to stop streaming>\n" ); //PvBufferWriter writer; char filePath[MAXFILEPATH]; PvUInt32 lWidth = 4096, lHeight = 9250; while ( running ) { for(i=0; i < numCams; i++){ // Retrieve next buffer PvBuffer *lBuffer = NULL; PvResult lOperationResult; PvResult lResult = lPipeline[i]->RetrieveNextBuffer( &lBuffer, 1000, &lOperationResult ); if ( lResult.IsOK() ) { if ( lOperationResult.IsOK() ) { lStreamParams[i]->GetIntegerValue( "ImagesCount", lImageCountVal ); lStreamParams[i]->GetFloatValue( "AcquisitionRateAverage", lFrameRateVal ); lStreamParams[i]->GetFloatValue( "BandwidthAverage", lBandwidthVal ); lStreamParams[i]->GetIntegerValue("PipelineBlocksDropped", lPipelineBlocksDropped); filePath[0] = '\0'; sprintf(filePath,"%s/%s%04X.tif",cams[i].filename,cams[i].prefix,lBuffer->GetBlockID()); TIFF *out = TIFFOpen(filePath,"w"); TIFFSetField(out, TIFFTAG_IMAGEWIDTH, lWidth); TIFFSetField(out, TIFFTAG_IMAGELENGTH, lHeight); TIFFSetField(out, TIFFTAG_SAMPLESPERPIXEL, 1); TIFFSetField(out, TIFFTAG_BITSPERSAMPLE, 8); TIFFSetField(out, TIFFTAG_ORIENTATION, ORIENTATION_TOPLEFT); TIFFSetField(out, TIFFTAG_PLANARCONFIG, PLANARCONFIG_CONTIG); TIFFSetField(out, TIFFTAG_PHOTOMETRIC, PHOTOMETRIC_MINISBLACK); TIFFSetField(out, TIFFTAG_COMPRESSION, COMPRESSION_NONE); TIFFWriteEncodedStrip(out,0,lBuffer->GetDataPointer(),lWidth*lHeight); TIFFClose(out); printf( "%d:%s%c Timestamp: %016llX BlockID: %04X %.01f FPS %d DROP %.01f Mb/s\r\n",i, cams[i].prefix, lDoodle[ lDoodleIndex ], lBuffer->GetTimestamp(), lBuffer->GetBlockID(), lFrameRateVal, lPipelineBlocksDropped, lBandwidthVal / 1000000.0 ); } else { printf("%d: ERROR Code: %s, Description: %s\r\n",i,lOperationResult.GetCodeString().GetAscii(),lOperationResult.GetDescription().GetAscii()); } // We have an image - do some processing (...) // VERY IMPORTANT: // release the buffer back to the pipeline lPipeline[i]->ReleaseBuffer( lBuffer ); usleep(200); } else { // Timeout printf( "%d:%s%c Timeout\r\n",i, cams[i].prefix,lDoodle[ lDoodleIndex ]); } } ++lDoodleIndex %= 6; } //_getch(); // Flush key buffer for next stop printf( "\n\n" ); for(i=0; i < numCams; i++){ // Tell the device to stop sending images printf( "Sending AcquisitionStop command to the device\n" ); lDeviceParams[i]->ExecuteCommand( "AcquisitionStop" ); // If present reset TLParamsLocked to 0. Must be done AFTER the // streaming has been stopped lDeviceParams[i]->SetIntegerValue( "TLParamsLocked", 0 ); // We stop the pipeline - letting the object lapse out of // scope would have had the destructor do the same, but we do it anyway printf( "Stop pipeline\n" ); lPipeline[i]->Stop(); delete lPipeline[i]; // Now close the stream. Also optionnal but nice to have printf( "Closing stream\n" ); lStream[i].Close(); // Finally disconnect the device. Optional, still nice to have printf( "Disconnecting device\n" ); lDevice[i].Disconnect(); } return true; }
void PleoraVideo::SetDeviceParams(Params& p) { lStart = dynamic_cast<PvGenCommand*>( lDeviceParams->Get( "AcquisitionStart" ) ); lStop = dynamic_cast<PvGenCommand*>( lDeviceParams->Get( "AcquisitionStop" ) ); for(Params::ParamMap::iterator it = p.params.begin(); it != p.params.end(); it++) { if(it->first == "get_temperature"){ getTemp = p.Get<bool>("get_temperature",false); } else { if (it->second == "Execute") { // This is a command, deal with it accordingly. PvGenCommand* cmd = dynamic_cast<PvGenCommand*>(lDeviceParams->Get(it->first.c_str())); if(cmd) { PvResult r = cmd->Execute(); if(!r.IsOK()){ pango_print_error("Error executing command %s Reason:%s\n", it->first.c_str(), r.GetDescription().GetAscii()); } else { pango_print_info("Executed Command %s\n", it->first.c_str()); } bool done; int attempts = 20; do { cmd->IsDone(done); std::this_thread::sleep_for(std::chrono::milliseconds(1000)); attempts--; } while(!done && (attempts > 0)); if(attempts == 0) { pango_print_error("Timeout while waiting for command %s done\n", it->first.c_str()); } } else { pango_print_error("Command %s not recognized\n", it->first.c_str()); } } else { try { PvGenParameter* par = lDeviceParams->Get(PvString(it->first.c_str())); if(par) { PvResult r = par->FromString(PvString(it->second.c_str())); if(!r.IsOK()){ pango_print_error("Error setting parameter %s to:%s Reason:%s\n", it->first.c_str(), it->second.c_str(), r.GetDescription().GetAscii()); } else { pango_print_info("Setting parameter %s to:%s\n", it->first.c_str(), it->second.c_str()); } } else { pango_print_error("Parameter %s not recognized\n", it->first.c_str()); } } catch(std::runtime_error e) { pango_print_error("Set parameter %s: %s\n", it->first.c_str(), e.what()); } } } } // Get Handles to properties we'll be using. lAnalogGain = lDeviceParams->GetInteger("AnalogGain"); lGamma = lDeviceParams->GetFloat("Gamma"); lAnalogBlackLevel = lDeviceParams->GetInteger("AnalogBlackLevel"); lExposure = lDeviceParams->GetFloat("ExposureTime"); lAquisitionMode = lDeviceParams->GetEnum("AcquisitionMode"); lTriggerSource = lDeviceParams->GetEnum("TriggerSource"); lTriggerMode = lDeviceParams->GetEnum("TriggerMode"); if(getTemp) { lTemperatureCelcius = lDeviceParams->GetFloat("DeviceTemperatureCelsius"); pango_print_warn("Warning: get_temperature might add a blocking call taking several ms to each frame read."); } }
void PleoraVideo::SetParameter(const std::string& name, const std::string& value) { PvGenParameter* par = lDeviceParams->Get(PvString(name.c_str())); if(par) { PvResult r = par->FromString(PvString(value.c_str())); if(!r.IsOK()){ pango_print_error("Error setting parameter %s to:%s Reason:%s\n", name.c_str(), value.c_str(), r.GetDescription().GetAscii()); } else { pango_print_info("Setting parameter %s to:%s\n", name.c_str(), value.c_str()); } } else { pango_print_error("Parameter %s not recognized\n", name.c_str()); } }
PleoraVideo::PleoraVideo(const char* model_name, const char* serial_num, size_t index, size_t bpp, size_t binX, size_t binY, size_t buffer_count, size_t desired_size_x, size_t desired_size_y, size_t desired_pos_x, size_t desired_pos_y) : lPvSystem(0), lDevice(0), lStream(0) { lPvSystem = new PvSystem(); if ( !lPvSystem ) { throw pangolin::VideoException("Pleora: Unable to create PvSystem"); } const PvDeviceInfo *lDeviceInfo = SelectDevice(*lPvSystem, model_name, serial_num, index); if ( !lDeviceInfo ) { throw pangolin::VideoException("Pleora: Unable to select device"); } PvResult lResult; lDevice = PvDevice::CreateAndConnect( lDeviceInfo, &lResult ); if ( !lDevice ) { throw pangolin::VideoException("Pleora: Unable to connect to device", lResult.GetDescription().GetAscii() ); } lStream = PvStream::CreateAndOpen( lDeviceInfo->GetConnectionID(), &lResult ); if ( !lStream ) { lDevice->Disconnect(); PvDevice::Free(lDevice); throw pangolin::VideoException("Pleora: Unable to open stream", lResult.GetDescription().GetAscii() ); } lDeviceParams = lDevice->GetParameters(); lStart = dynamic_cast<PvGenCommand*>( lDeviceParams->Get( "AcquisitionStart" ) ); lStop = dynamic_cast<PvGenCommand*>( lDeviceParams->Get( "AcquisitionStop" ) ); if( bpp == 8) { lDeviceParams->SetEnumValue("PixelFormat", PvString("Mono8") ); }else if(bpp == 10) { lDeviceParams->SetEnumValue("PixelFormat", PvString("Mono10p") ); }else if(bpp == 12) { lDeviceParams->SetEnumValue("PixelFormat", PvString("Mono12p") ); } // Height and width will fail if not multiples of 8. lDeviceParams->SetIntegerValue("Height", desired_size_y ); if(lResult.IsFailure()){ pango_print_error("Height %zu fail\n", desired_size_y); int64_t max, min; lDeviceParams->GetIntegerRange("Height", max, min ); lDeviceParams->SetIntegerValue("Height", max ); } lDeviceParams->SetIntegerValue("Width", desired_size_x ); if(lResult.IsFailure()){ pango_print_error("Width %zu fail\n", desired_size_x); int64_t max, min; lDeviceParams->GetIntegerRange("Width", max, min ); lDeviceParams->SetIntegerValue("Width", max ); } lDeviceParams = lDevice->GetParameters(); const int w = DeviceParam<int64_t>("Width"); const int h = DeviceParam<int64_t>("Height"); // Offset will fail if not multiple of 8. lDeviceParams->SetIntegerValue("OffsetX", desired_pos_x ); if(lResult.IsFailure()){ pango_print_error("OffsetX %zu fail\n", desired_pos_x); } lDeviceParams->SetIntegerValue("OffsetX", desired_pos_y ); if(lResult.IsFailure()){ pango_print_error("OffsetY %zu fail\n", desired_pos_y); } lResult =lDeviceParams->SetIntegerValue("BinningHorizontal", binX ); if(lResult.IsFailure()){ pango_print_error("BinningHorizontal %zu fail\n", binX); } lResult =lDeviceParams->SetIntegerValue("BinningVertical", binY ); if(lResult.IsFailure()){ pango_print_error("BinningVertical %zu fail\n", binY); } lStreamParams = lStream->GetParameters(); // Reading payload size from device const uint32_t lSize = lDevice->GetPayloadSize(); // Use buffer_count or the maximum number of buffers, whichever is smaller const uint32_t lBufferCount = ( lStream->GetQueuedBufferMaximum() < buffer_count ) ? lStream->GetQueuedBufferMaximum() : buffer_count; // Allocate buffers and queue for ( uint32_t i = 0; i < lBufferCount; i++ ) { PvBuffer *lBuffer = new PvBuffer; lBuffer->Alloc( static_cast<uint32_t>( lSize ) ); lBufferList.push_back( lBuffer ); } // Setup pangolin for stream PvGenEnum* lpixfmt = dynamic_cast<PvGenEnum*>( lDeviceParams->Get("PixelFormat") ); const VideoPixelFormat fmt = PleoraFormat(lpixfmt); streams.push_back(StreamInfo(fmt, w, h, (w*fmt.bpp)/8)); size_bytes = lSize; Start(); }