bool PleoraVideo::GrabNewest( unsigned char* image, bool wait ) { PvBuffer *lBuffer0 = NULL; PvBuffer *lBuffer = NULL; PvResult lOperationResult; const uint32_t timeout = wait ? 0xFFFFFFFF : 0; PvResult lResult = lStream->RetrieveBuffer( &lBuffer, &lOperationResult, timeout ); if ( !lResult.IsOK() ) { pango_print_warn("Pleora error: %s\n", lResult.GetCodeString().GetAscii() ); return false; }else if( !lOperationResult.IsOK() ) { pango_print_warn("Pleora error: %s\n", lOperationResult.GetCodeString().GetAscii() ); lStream->QueueBuffer( lBuffer ); return false; } // We have at least one frame. Capture more until we fail, 0 timeout while(true) { PvResult lResult = lStream->RetrieveBuffer( &lBuffer0, &lOperationResult, 0 ); if ( !lResult.IsOK() ) { break; }else if( !lOperationResult.IsOK() ) { lStream->QueueBuffer( lBuffer0 ); break; }else{ lStream->QueueBuffer( lBuffer ); lBuffer = lBuffer0; } } bool good = false; PvPayloadType lType = lBuffer->GetPayloadType(); if ( lType == PvPayloadTypeImage ) { PvImage *lImage = lBuffer->GetImage(); std::memcpy(image, lImage->GetDataPointer(), size_bytes); good = true; } lStream->QueueBuffer( lBuffer ); return good; }
bool PleoraVideo::GrabNext( unsigned char* image, bool /*wait*/ ) { PvBuffer *lBuffer = NULL; PvResult lOperationResult; // Retrieve next buffer PvResult lResult = lStream->RetrieveBuffer( &lBuffer, &lOperationResult, 1000 ); if ( !lResult.IsOK() ) { pango_print_warn("Pleora error: %s\n", lResult.GetCodeString().GetAscii() ); return false; } bool good = false; if ( lOperationResult.IsOK() ) { PvPayloadType lType = lBuffer->GetPayloadType(); if ( lType == PvPayloadTypeImage ) { PvImage *lImage = lBuffer->GetImage(); std::memcpy(image, lImage->GetDataPointer(), size_bytes); good = true; } } else { pango_print_warn("Pleora error: %s\n", lOperationResult.GetCodeString().GetAscii() ); } lStream->QueueBuffer( lBuffer ); return good; }
DebayerVideo::DebayerVideo(VideoInterface* src, color_filter_t tile, bayer_method_t method) : size_bytes(0), buffer(0), tile(tile), method(method) { if(!src) { throw VideoException("DebayerVideo: VideoInterface in must not be null"); } videoin.push_back(src); #ifndef HAVE_DC1394 pango_print_warn("debayer: dc1394 unavailable for debayering. Using simple downsampling method instead.\n"); this->method = BAYER_METHOD_DOWNSAMPLE; #endif const pangolin::VideoPixelFormat rgb_format = pangolin::VideoFormatFromString("RGB24"); for(size_t s=0; s< src->Streams().size(); ++s) { size_t w = src->Streams()[s].Width(); size_t h = src->Streams()[s].Height(); if(this->method==BAYER_METHOD_DOWNSAMPLE) { w = w/2; h = h/2; } streams.push_back(pangolin::StreamInfo( rgb_format, w, h, w*rgb_format.bpp / 8, (unsigned char*)0 + size_bytes )); size_bytes += w*h*rgb_format.bpp / 8; } buffer = new unsigned char[src->SizeBytes()]; }
PacketStreamReader::FrameInfo PacketStreamReader::NextFrame(PacketStreamSourceId src, SyncTime *sync) { Lock(); //we cannot use a scoped lock here, because we may not want to release the lock, depending on what we find. try { while (1) { auto fi = _nextFrame(); if (!fi) { // Nothing left in stream Unlock(); return fi; } else { // So we have accurate sequence numbers for frames. ++_next_packet_framenum[fi.src]; if (_stream.seekable()) { if (!_index.has(fi.src, fi.sequence_num)) { // If it's not in the index for some reason, add it. _index.add(fi.src, fi.sequence_num, fi.frame_streampos); } else if (_index.position(fi.src, fi.sequence_num) != fi.frame_streampos) { PANGO_ENSURE(_index.position(fi.src, fi.sequence_num) == fi.packet_streampos); static bool warned_already = false; if(!warned_already) { pango_print_warn("CAUTION: Old .pango files do not update frame_properties on seek.\n"); warned_already = true; } } } _stream.data_len(fi.size); //now we are positioned on packet data for n characters. } if (sync) { //if we are doing timesync, wait, even if it's not our packet. WaitForTimeSync(*sync, fi.time); } //if it's ours, return it and don't release lock if (fi.src == src) { return _stream.readFrameHeader(*this); } //otherwise skip it and get the next one. _stream.skip(fi.size); } } catch (std::exception &e) { // Since we are not using a scoped lock, we must catch and release. Unlock(); throw e; } catch (...) { // We will always release, even if we cannot identify the exception. Unlock(); throw std::runtime_error("Caught an unknown exception"); } }
void PacketStreamReader::ParseHeader() { _stream.readTag(TAG_PANGO_HDR); json::value json_header; json::parse(json_header, _stream); //looks like right now, we don't do anything with this. _starttime = json_header["time_us"].get<int64_t>(); if (!_starttime) pango_print_warn("Unable to read stream start time. Time sync to treat stream as realtime will not work!\n"); _stream.get(); // consume newline }
void PleoraVideo::Start() { if(lStream->GetQueuedBufferCount() == 0) { // Queue all buffers in the stream for( BufferList::iterator lIt = lBufferList.begin(); lIt != lBufferList.end(); lIt++ ) { lStream->QueueBuffer( *lIt ); } lDevice->StreamEnable(); lStart->Execute(); } else { pango_print_warn("PleoraVideo: Already started.\n"); } }
SplitVideo::SplitVideo(std::unique_ptr<VideoInterface> &src_, const std::vector<StreamInfo>& streams) : src(std::move(src_)), streams(streams) { videoin.push_back(src.get()); // Warn if stream over-runs input stream for(unsigned int i=0; i < streams.size(); ++i) { if(src->SizeBytes() < (size_t)streams[i].Offset() + streams[i].SizeBytes() ) { pango_print_warn("VideoSplitter: stream extends past end of input.\n"); break; } } }
bool MirrorVideo::DropNFrames(uint32_t n) { BufferAwareVideoInterface* vpi = dynamic_cast<BufferAwareVideoInterface*>(videoin.get()); if(!vpi) { pango_print_warn("Mirror: child interface is not buffer aware."); return false; } else { return vpi->DropNFrames(n); } }
size_t PacketStreamReader::Skip(size_t len) { if (!_stream.data_len()) throw runtime_error("Packetstream not positioned on data block. nextFrame() should be called before skip()."); else if (_stream.data_len() < len) { pango_print_warn("skip() requested skip of %zu bytes when only %zu bytes remain in data block. Trimming to remaining data size.", len, _stream.data_len()); len = _stream.data_len(); } auto r = _stream.skip(len); if (!_stream.data_len()) //we are done skipping, and should release the lock from nextFrame() Unlock(); return r; }
void DestroyWindow(const std::string& name) { contexts_mutex.lock(); ContextMap::iterator ic = contexts.find(name); PangolinGl *context_to_destroy = (ic == contexts.end()) ? 0 : ic->second.get(); if (context_to_destroy == context) { context = nullptr; } size_t erased = contexts.erase(name); if(erased == 0) { pango_print_warn("Context '%s' doesn't exist for deletion.\n", name.c_str()); } contexts_mutex.unlock(); }
size_t PacketStreamReader::ReadRaw(char* target, size_t len) { if (!_stream.data_len()) { throw runtime_error("Packetstream not positioned on data block. nextFrame() should be called before readraw()."); } else if (_stream.data_len() < len) { pango_print_warn("readraw() requested read of %zu bytes when only %zu bytes remain in data block. Trimming to available data size.", len, _stream.data_len()); len = _stream.data_len(); } auto r = _stream.read(target, len); if (!_stream.data_len()) { //we are done reading, and should release the lock from nextFrame() Unlock(); } return r; }
void PleoraVideo::Stop() { // stop grab thread if(lStream->GetQueuedBufferCount() > 0) { lStop->Execute(); lDevice->StreamDisable(); // Abort all buffers from the stream and dequeue lStream->AbortQueuedBuffers(); while ( lStream->GetQueuedBufferCount() > 0 ) { PvBuffer *lBuffer = NULL; PvResult lOperationResult; lStream->RetrieveBuffer( &lBuffer, &lOperationResult ); } } else { pango_print_warn("PleoraVideo: Already stopped.\n"); } }
TypedImage LoadImage( const std::string& filename, const PixelFormat& raw_fmt, size_t raw_width, size_t raw_height, size_t raw_pitch ) { TypedImage img(raw_width, raw_height, raw_fmt, raw_pitch); // Read from file, row at a time. std::ifstream bFile( filename.c_str(), std::ios::in | std::ios::binary ); for(size_t r=0; r<img.h; ++r) { bFile.read( (char*)img.ptr + r*img.pitch, img.pitch ); if(bFile.fail()) { pango_print_warn("Unable to read raw image file to completion."); break; } } return img; }
PacketStreamReader::FrameInfo PacketStreamReader::Seek(PacketStreamSourceId src, size_t framenum, SyncTime *sync) { lock_guard<decltype(_mutex)> lg(_mutex); if (!_stream.seekable()) throw std::runtime_error("Stream is not seekable (probably a pipe)."); if (src > _sources.size()) throw std::runtime_error("Invalid Frame Source ID."); if(_stream.data_len()) //we were in the middle of reading data, and are holding an extra lock. We need to release it, while still holding the scoped lock. Skip(_stream.data_len()); while (!_index.has(src, framenum)) { pango_print_warn("seek index miss... reading ahead.\n"); if (_stream.data_len()) _stream.skip(_stream.data_len()); auto fi = NextFrame(src, nullptr); if (!fi) //if we hit the end, throw throw std::out_of_range("frame number not in sequence"); } auto target_header_start = _index.position(src, framenum); _stream.seekg(target_header_start); _next_packet_framenum[src] = framenum; //this increments when we parse the header in the next line; //THIS WILL BREAK _next_packet_framenum FOR ALL OTHER SOURCES. Todo more refactoring to fix. auto r = _stream.peekFrameHeader(*this); //we need to do this now, because we need r.time in order to sync up our playback. if (nullptr != sync && _starttime) sync->ResyncToOffset(r.time - _starttime); //if we have a sync timer, we need to reset it to play synchronized frame from where we just did a seek to. return r; }
PacketStreamReader::FrameInfo PacketStreamReader::_nextFrame() { while (1) { auto t = _stream.peekTag(); switch (t) { case TAG_PANGO_SYNC: SkipSync(); break; case TAG_ADD_SOURCE: ParseNewSource(); break; case TAG_SRC_JSON: //frames are sometimes preceded by metadata, but metadata must ALWAYS be followed by a frame from the same source. case TAG_SRC_PACKET: return _stream.peekFrameHeader(*this); case TAG_PANGO_STATS: ParseIndex(); break; case TAG_PANGO_FOOTER: //end of frames case TAG_END: return FrameInfo(); //none case TAG_PANGO_HDR: //shoudln't encounter this ParseHeader(); break; case TAG_PANGO_MAGIC: //or this SkipSync(); break; default: //or anything else pango_print_warn("Unexpected packet type: \"%s\". Resyncing()\n", tagName(t).c_str()); ReSync(); break; } } }
void VideoViewer(const std::string& input_uri, const std::string& output_uri) { pangolin::Var<int> record_timelapse_frame_skip("viewer.record_timelapse_frame_skip", 1 ); pangolin::Var<int> end_frame("viewer.end_frame", std::numeric_limits<int>::max() ); pangolin::Var<bool> video_wait("video.wait", true); pangolin::Var<bool> video_newest("video.newest", false); // Open Video by URI pangolin::VideoRecordRepeat video(input_uri, output_uri); const size_t num_streams = video.Streams().size(); if(num_streams == 0) { pango_print_error("No video streams from device.\n"); return; } // Output details of video stream for(size_t s = 0; s < num_streams; ++s) { const pangolin::StreamInfo& si = video.Streams()[s]; std::cout << "Stream " << s << ": " << si.Width() << " x " << si.Height() << " " << si.PixFormat().format << " (pitch: " << si.Pitch() << " bytes)" << std::endl; } // Check if video supports VideoPlaybackInterface pangolin::VideoPlaybackInterface* video_playback = pangolin::FindFirstMatchingVideoInterface<pangolin::VideoPlaybackInterface>(video); const int total_frames = video_playback ? video_playback->GetTotalFrames() : std::numeric_limits<int>::max(); const int slider_size = (total_frames < std::numeric_limits<int>::max() ? 20 : 0); if( video_playback ) { if(total_frames < std::numeric_limits<int>::max() ) { std::cout << "Video length: " << total_frames << " frames" << std::endl; } end_frame = 0; } std::vector<unsigned char> buffer; buffer.resize(video.SizeBytes()+1); // Create OpenGL window - guess sensible dimensions pangolin::CreateWindowAndBind( "VideoViewer", (int)(video.Width() * num_streams), (int)(video.Height() + slider_size) ); // Assume packed OpenGL data unless otherwise specified glPixelStorei(GL_UNPACK_ALIGNMENT, 1); glPixelStorei(GL_PACK_ALIGNMENT, 1); glEnable (GL_BLEND); glBlendFunc (GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA); // Setup resizable views for video streams std::vector<pangolin::GlPixFormat> glfmt; std::vector<std::pair<float,float> > gloffsetscale; std::vector<size_t> strides; std::vector<pangolin::ImageViewHandler> handlers; handlers.reserve(num_streams); size_t scratch_buffer_bytes = 0; pangolin::View& container = pangolin::Display("streams"); container.SetLayout(pangolin::LayoutEqual) .SetBounds(pangolin::Attach::Pix(slider_size), 1.0, 0.0, 1.0); for(unsigned int d=0; d < num_streams; ++d) { const pangolin::StreamInfo& si = video.Streams()[d]; pangolin::View& view = pangolin::CreateDisplay().SetAspect(si.Aspect()); container.AddDisplay(view); glfmt.push_back(pangolin::GlPixFormat(si.PixFormat())); gloffsetscale.push_back(std::pair<float,float>(0.0f, 1.0f) ); if( si.PixFormat().bpp % 8 ) { pango_print_warn("Stream %i: Unable to display formats that are not a multiple of 8 bits.", d); } if( (8*si.Pitch()) % si.PixFormat().bpp ) { pango_print_warn("Stream %i: Unable to display formats whose pitch is not a whole number of pixels.", d); } if(glfmt.back().gltype == GL_DOUBLE) { scratch_buffer_bytes = std::max(scratch_buffer_bytes, sizeof(float)*si.Width() * si.Height()); } strides.push_back( (8*si.Pitch()) / si.PixFormat().bpp ); handlers.push_back( pangolin::ImageViewHandler(si.Width(), si.Height()) ); view.SetHandler(&handlers.back()); } // current frame in memory buffer and displaying. pangolin::Var<int> frame("ui.frame", -1, 0, total_frames-1 ); pangolin::Slider frame_slider("frame", frame.Ref() ); if(video_playback && total_frames < std::numeric_limits<int>::max()) { frame_slider.SetBounds(0.0, pangolin::Attach::Pix(slider_size), 0.0, 1.0); pangolin::DisplayBase().AddDisplay(frame_slider); } std::vector<unsigned char> scratch_buffer; scratch_buffer.resize(scratch_buffer_bytes); std::vector<pangolin::Image<unsigned char> > images; #ifdef CALLEE_HAS_CPP11 const int FRAME_SKIP = 30; const char show_hide_keys[] = {'1','2','3','4','5','6','7','8','9'}; const char screenshot_keys[] = {'!','"','#','$','%','^','&','*','('}; // Show/hide streams for(size_t v=0; v < container.NumChildren() && v < 9; v++) { pangolin::RegisterKeyPressCallback(show_hide_keys[v], [v,&container](){ container[v].ToggleShow(); } ); pangolin::RegisterKeyPressCallback(screenshot_keys[v], [v,&images,&video](){ if(v < images.size() && images[v].ptr) { try{ pangolin::SaveImage( images[v], video.Streams()[v].PixFormat(), pangolin::MakeUniqueFilename("capture.png") ); }catch(std::exception e){ pango_print_error("Unable to save frame: %s\n", e.what()); } } } ); } pangolin::RegisterKeyPressCallback('r', [&](){ if(!video.IsRecording()) { video.SetTimelapse( static_cast<size_t>(record_timelapse_frame_skip) ); video.Record(); pango_print_info("Started Recording.\n"); }else{ video.Stop(); pango_print_info("Finished recording.\n"); } fflush(stdout); }); pangolin::RegisterKeyPressCallback('p', [&](){ video.Play(); end_frame = std::numeric_limits<int>::max(); pango_print_info("Playing from file log.\n"); fflush(stdout); }); pangolin::RegisterKeyPressCallback('s', [&](){ video.Source(); end_frame = std::numeric_limits<int>::max(); pango_print_info("Playing from source input.\n"); fflush(stdout); }); pangolin::RegisterKeyPressCallback(' ', [&](){ end_frame = (frame < end_frame) ? frame : std::numeric_limits<int>::max(); }); pangolin::RegisterKeyPressCallback('w', [&](){ video_wait = !video_wait; if(video_wait) { pango_print_info("Gui wait's for video frame.\n"); }else{ pango_print_info("Gui doesn't wait for video frame.\n"); } }); pangolin::RegisterKeyPressCallback('d', [&](){ video_newest = !video_newest; if(video_newest) { pango_print_info("Discarding old frames.\n"); }else{ pango_print_info("Not discarding old frames.\n"); } }); pangolin::RegisterKeyPressCallback('<', [&](){ if(video_playback) { frame = video_playback->Seek(frame - FRAME_SKIP) -1; end_frame = frame + 1; }else{ pango_print_warn("Unable to skip backward."); } }); pangolin::RegisterKeyPressCallback('>', [&](){ if(video_playback) { frame = video_playback->Seek(frame + FRAME_SKIP) -1; end_frame = frame + 1; }else{ end_frame = frame + FRAME_SKIP; } }); pangolin::RegisterKeyPressCallback(',', [&](){ if(video_playback) { frame = video_playback->Seek(frame - 1) -1; end_frame = frame+1; }else{ pango_print_warn("Unable to skip backward."); } }); pangolin::RegisterKeyPressCallback('.', [&](){ // Pause at next frame end_frame = frame+1; }); pangolin::RegisterKeyPressCallback('0', [&](){ video.RecordOneFrame(); }); pangolin::RegisterKeyPressCallback('a', [&](){ // Adapt scale for(unsigned int i=0; i<images.size(); ++i) { if(container[i].HasFocus()) { pangolin::Image<unsigned char>& img = images[i]; pangolin::ImageViewHandler& ivh = handlers[i]; const bool have_selection = std::isfinite(ivh.GetSelection().Area()) && std::abs(ivh.GetSelection().Area()) >= 4; pangolin::XYRangef froi = have_selection ? ivh.GetSelection() : ivh.GetViewToRender(); gloffsetscale[i] = pangolin::GetOffsetScale(img, froi.Cast<int>(), glfmt[i]); } } }); pangolin::RegisterKeyPressCallback('g', [&](){ std::pair<float,float> os_default(0.0f, 1.0f); // Get the scale and offset from the container that has focus. for(unsigned int i=0; i<images.size(); ++i) { if(container[i].HasFocus()) { pangolin::Image<unsigned char>& img = images[i]; pangolin::ImageViewHandler& ivh = handlers[i]; const bool have_selection = std::isfinite(ivh.GetSelection().Area()) && std::abs(ivh.GetSelection().Area()) >= 4; pangolin::XYRangef froi = have_selection ? ivh.GetSelection() : ivh.GetViewToRender(); os_default = pangolin::GetOffsetScale(img, froi.Cast<int>(), glfmt[i]); break; } } // Adapt scale for all images equally // TODO : we're assuming the type of all the containers images' are the same. for(unsigned int i=0; i<images.size(); ++i) { gloffsetscale[i] = os_default; } }); #endif // CALLEE_HAS_CPP11 #ifdef DEBUGVIDEOVIEWER unsigned int delayms = 0; pangolin::RegisterKeyPressCallback('z', [&](){ // Adapt delay delayms += 1; std::cout << " Fake delay " << delayms << "ms" << std::endl; }); pangolin::RegisterKeyPressCallback('x', [&](){ // Adapt delay delayms = (delayms > 1) ? delayms-1 : 0; }); pangolin::basetime start,now; #endif // DEBUGVIDEOVIEWER // Stream and display video while(!pangolin::ShouldQuit()) { glClear(GL_DEPTH_BUFFER_BIT | GL_COLOR_BUFFER_BIT); glColor3f(1.0f, 1.0f, 1.0f); if(frame.GuiChanged()) { if(video_playback) { frame = video_playback->Seek(frame) -1; } end_frame = frame + 1; } #ifdef DEBUGVIDEOVIEWER boostd::this_thread::sleep_for(boostd::chrono::milliseconds(delayms)); std::cout << "-------------------------------------------------------" << std::endl; now = pangolin::TimeNow(); std::cout << " FPS: " << 1.0/pangolin::TimeDiff_s(start, now) << " artificial delay: " << delayms <<"ms"<< std::endl; std::cout << "-------------------------------------------------------" << std::endl; start = now; #endif if ( frame < end_frame ) { if( video.Grab(&buffer[0], images, video_wait, video_newest) ) { frame = frame +1; } } #ifdef DEBUGVIDEOVIEWER const pangolin::basetime end = pangolin::TimeNow(); std::cout << "Total grab time: " << 1000*pangolin::TimeDiff_s(start, end) << "ms" << std::endl; #endif glLineWidth(1.5f); glDisable(GL_DEPTH_TEST); for(unsigned int i=0; i<images.size(); ++i) { if(container[i].IsShown()) { container[i].Activate(); pangolin::Image<unsigned char>& image = images[i]; // Get texture of correct dimension / format const pangolin::GlPixFormat& fmt = glfmt[i]; pangolin::GlTexture& tex = pangolin::TextureCache::I().GlTex((GLsizei)image.w, (GLsizei)image.h, fmt.scalable_internal_format, fmt.glformat, GL_FLOAT); // Upload image data to texture tex.Bind(); if(fmt.gltype == GL_DOUBLE) { // Convert to float first, using scrath_buffer for storage pangolin::Image<float> fimage(image.w, image.h, image.w*sizeof(float), (float*)scratch_buffer.data()); ConvertPixels<float,double>( fimage, image.Reinterpret<double>() ); glPixelStorei(GL_UNPACK_ROW_LENGTH, 0); tex.Upload(fimage.ptr,0,0, (GLsizei)fimage.w, (GLsizei)fimage.h, fmt.glformat, GL_FLOAT); }else{ glPixelStorei(GL_UNPACK_ROW_LENGTH, (GLint)strides[i]); tex.Upload(image.ptr,0,0, (GLsizei)image.w, (GLsizei)image.h, fmt.glformat, fmt.gltype); } // Render handlers[i].UpdateView(); handlers[i].glSetViewOrtho(); const std::pair<float,float> os = gloffsetscale[i]; pangolin::GlSlUtilities::OffsetAndScale(os.first, os.second); handlers[i].glRenderTexture(tex); pangolin::GlSlUtilities::UseNone(); handlers[i].glRenderOverlay(); } } glPixelStorei(GL_UNPACK_ROW_LENGTH, 0); // leave in pixel orthographic for slider to render. pangolin::DisplayBase().ActivatePixelOrthographic(); if(video.IsRecording()) { pangolin::glRecordGraphic(pangolin::DisplayBase().v.w-14.0f, pangolin::DisplayBase().v.h-14.0f, 7.0f); } pangolin::FinishFrame(); } }
GLXContext CreateGlContext(::Display *display, ::GLXFBConfig chosenFbc, GLXContext share_context = 0) { int glx_major, glx_minor; if ( !glXQueryVersion( display, &glx_major, &glx_minor ) || ( ( glx_major == 1 ) && ( glx_minor < 3 ) ) || ( glx_major < 1 ) ) { throw std::runtime_error("Pangolin X11: Invalid GLX version. Require GLX >= 1.3"); } GLXContext new_ctx; // Get the default screen's GLX extension list const char *glxExts = glXQueryExtensionsString( display, DefaultScreen( display ) ); glXCreateContextAttribsARBProc glXCreateContextAttribsARB = (glXCreateContextAttribsARBProc) glXGetProcAddressARB( (const GLubyte *) "glXCreateContextAttribsARB" ); // Install an X error handler so the application won't exit if GL 3.0 // context allocation fails. Handler is global and shared across all threads. ctxErrorOccurred = false; int (*oldHandler)(::Display*, ::XErrorEvent*) = XSetErrorHandler(&ctxErrorHandler); if ( isExtensionSupported( glxExts, "GLX_ARB_create_context" ) && glXCreateContextAttribsARB ) { int context_attribs[] = { GLX_CONTEXT_MAJOR_VERSION_ARB, 3, GLX_CONTEXT_MINOR_VERSION_ARB, 0, //GLX_CONTEXT_FLAGS_ARB , GLX_CONTEXT_FORWARD_COMPATIBLE_BIT_ARB, None }; new_ctx = glXCreateContextAttribsARB( display, chosenFbc, share_context, True, context_attribs ); // Sync to ensure any errors generated are processed. XSync( display, False ); if ( ctxErrorOccurred || !new_ctx ) { ctxErrorOccurred = false; // Fall back to old-style 2.x context. Implementations will return the newest // context version compatible with OpenGL versions less than version 3.0. context_attribs[1] = 1; // GLX_CONTEXT_MAJOR_VERSION_ARB = 1 context_attribs[3] = 0; // GLX_CONTEXT_MINOR_VERSION_ARB = 0 new_ctx = glXCreateContextAttribsARB( display, chosenFbc, share_context, True, context_attribs ); } } else { // Fallback to GLX 1.3 Context new_ctx = glXCreateNewContext( display, chosenFbc, GLX_RGBA_TYPE, share_context, True ); } // Sync to ensure any errors generated are processed. XSync( display, False ); // Restore the original error handler XSetErrorHandler( oldHandler ); if ( ctxErrorOccurred || !new_ctx ) { throw std::runtime_error("Pangolin X11: Failed to create an OpenGL context"); } // Verifying that context is a direct context if ( ! glXIsDirect ( display, new_ctx ) ) { pango_print_warn("Pangolin X11: Indirect GLX rendering context obtained\n"); } return new_ctx; }
int CreateX11Window( const std::string& title, int width, int height, bool glx_doublebuffer,int glx_sample_buffers, int glx_samples ) { display = XOpenDisplay(NULL); if (!display) { throw std::runtime_error("Pangolin X11: Failed to open X display"); } // Desired attributes static int visual_attribs[] = { GLX_X_RENDERABLE , True, GLX_DRAWABLE_TYPE , GLX_WINDOW_BIT, GLX_RENDER_TYPE , GLX_RGBA_BIT, GLX_X_VISUAL_TYPE , GLX_TRUE_COLOR, GLX_RED_SIZE , 8, GLX_GREEN_SIZE , 8, GLX_BLUE_SIZE , 8, GLX_ALPHA_SIZE , 8, GLX_DEPTH_SIZE , 24, GLX_STENCIL_SIZE , 8, GLX_DOUBLEBUFFER , glx_doublebuffer ? True : False, GLX_SAMPLE_BUFFERS , glx_sample_buffers, GLX_SAMPLES , glx_sample_buffers > 0 ? glx_samples : 0, None }; int glx_major, glx_minor; if ( !glXQueryVersion( display, &glx_major, &glx_minor ) || ( ( glx_major == 1 ) && ( glx_minor < 3 ) ) || ( glx_major < 1 ) ) { // FBConfigs were added in GLX version 1.3. throw std::runtime_error("Pangolin X11: Invalid GLX version. Require GLX >= 1.3"); } int fbcount; GLXFBConfig* fbc = glXChooseFBConfig(display, DefaultScreen(display), visual_attribs, &fbcount); if (!fbc) { throw std::runtime_error("Pangolin X11: Unable to retrieve framebuffer options"); } int best_fbc = -1; int worst_fbc = -1; int best_num_samp = -1; int worst_num_samp = 999; // Enumerate framebuffer options, storing the best and worst that match our attribs for (int i=0; i<fbcount; ++i) { XVisualInfo *vi = glXGetVisualFromFBConfig( display, fbc[i] ); if ( vi ) { int samp_buf, samples; glXGetFBConfigAttrib( display, fbc[i], GLX_SAMPLE_BUFFERS, &samp_buf ); glXGetFBConfigAttrib( display, fbc[i], GLX_SAMPLES , &samples ); if ( (best_fbc < 0) || (samp_buf>0 && samples>best_num_samp) ) best_fbc = i, best_num_samp = samples; if ( (worst_fbc < 0) || (samp_buf>0 && samples<worst_num_samp) ) worst_fbc = i, worst_num_samp = samples; } XFree( vi ); } // Select the minimum suitable option. The 'best' is often too slow. GLXFBConfig bestFbc = fbc[ worst_fbc ]; XFree( fbc ); // Get a visual XVisualInfo *vi = glXGetVisualFromFBConfig( display, bestFbc ); // Create colourmap XSetWindowAttributes swa; swa.colormap = cmap = XCreateColormap( display, RootWindow( display, vi->screen ), vi->visual, AllocNone ); swa.background_pixmap = None ; swa.border_pixel = 0; swa.event_mask = StructureNotifyMask; // Create window win = XCreateWindow( display, RootWindow( display, vi->screen ), 0, 0, width, height, 0, vi->depth, InputOutput, vi->visual, CWBorderPixel|CWColormap|CWEventMask, &swa ); XFree( vi ); if ( !win ) { throw std::runtime_error("Pangolin X11: Failed to create window." ); } XStoreName( display, win, title.c_str() ); XMapWindow( display, win ); // Request to be notified of these events XSelectInput(display, win, EVENT_MASKS ); // Get the default screen's GLX extension list const char *glxExts = glXQueryExtensionsString( display, DefaultScreen( display ) ); glXCreateContextAttribsARBProc glXCreateContextAttribsARB = (glXCreateContextAttribsARBProc) glXGetProcAddressARB( (const GLubyte *) "glXCreateContextAttribsARB" ); // Install an X error handler so the application won't exit if GL 3.0 // context allocation fails. Handler is global and shared across all threads. ctxErrorOccurred = false; int (*oldHandler)(Display*, XErrorEvent*) = XSetErrorHandler(&ctxErrorHandler); if ( isExtensionSupported( glxExts, "GLX_ARB_create_context" ) && glXCreateContextAttribsARB ) { int context_attribs[] = { GLX_CONTEXT_MAJOR_VERSION_ARB, 3, GLX_CONTEXT_MINOR_VERSION_ARB, 0, //GLX_CONTEXT_FLAGS_ARB , GLX_CONTEXT_FORWARD_COMPATIBLE_BIT_ARB, None }; ctx = glXCreateContextAttribsARB( display, bestFbc, 0, True, context_attribs ); // Sync to ensure any errors generated are processed. XSync( display, False ); if ( ctxErrorOccurred || !ctx ) { ctxErrorOccurred = false; // Fall back to old-style 2.x context. Implementations will return the newest // context version compatible with OpenGL versions less than version 3.0. context_attribs[1] = 1; // GLX_CONTEXT_MAJOR_VERSION_ARB = 1 context_attribs[3] = 0; // GLX_CONTEXT_MINOR_VERSION_ARB = 0 ctx = glXCreateContextAttribsARB( display, bestFbc, 0, True, context_attribs ); } } else { // Fallback to GLX 1.3 Context ctx = glXCreateNewContext( display, bestFbc, GLX_RGBA_TYPE, 0, True ); } // Sync to ensure any errors generated are processed. XSync( display, False ); // Restore the original error handler XSetErrorHandler( oldHandler ); if ( ctxErrorOccurred || !ctx ) { throw std::runtime_error("Pangolin X11: Failed to create an OpenGL context"); } // Verifying that context is a direct context if ( ! glXIsDirect ( display, ctx ) ) { pango_print_warn("Pangolin X11: Indirect GLX rendering context obtained\n"); } glXMakeCurrent( display, win, ctx ); return 0; }
::GLXFBConfig ChooseFrameBuffer( ::Display *display, bool glx_doublebuffer, int glx_sample_buffers, int glx_samples ) { // Desired attributes int visual_attribs[] = { GLX_X_RENDERABLE , True, GLX_DRAWABLE_TYPE , GLX_WINDOW_BIT, GLX_RENDER_TYPE , GLX_RGBA_BIT, GLX_X_VISUAL_TYPE , GLX_TRUE_COLOR, GLX_RED_SIZE , 8, GLX_GREEN_SIZE , 8, GLX_BLUE_SIZE , 8, GLX_ALPHA_SIZE , 8, GLX_DEPTH_SIZE , 24, GLX_STENCIL_SIZE , 8, GLX_DOUBLEBUFFER , glx_doublebuffer ? True : False, None }; int fbcount; GLXFBConfig* fbc = glXChooseFBConfig(display, DefaultScreen(display), visual_attribs, &fbcount); if (!fbc) { throw std::runtime_error("Pangolin X11: Unable to retrieve framebuffer options"); } int best_fbc = -1; int worst_fbc = -1; int best_num_samp = -1; int worst_num_samp = 999; // Enumerate framebuffer options, storing the best and worst that match our attribs for (int i=0; i<fbcount; ++i) { XVisualInfo *vi = glXGetVisualFromFBConfig( display, fbc[i] ); if ( vi ) { int samp_buf, samples; glXGetFBConfigAttrib( display, fbc[i], GLX_SAMPLE_BUFFERS, &samp_buf ); glXGetFBConfigAttrib( display, fbc[i], GLX_SAMPLES , &samples ); // Filter for the best available. if ( samples > best_num_samp ) { best_fbc = i; best_num_samp = samples; } // Filter lowest settings which match minimum user requirement. if ( samp_buf >= glx_sample_buffers && samples >= glx_samples && samples < worst_num_samp ) { worst_fbc = i; worst_num_samp = samples; } } XFree( vi ); } // Select the minimum suitable option. The 'best' is often too slow. int chosen_fbc_id = worst_fbc; // If minimum requested isn't available, return the best that is. if(chosen_fbc_id < 0) { pango_print_warn("Framebuffer with requested attributes not available. Using available framebuffer. You may see visual artifacts."); chosen_fbc_id = best_fbc; } ::GLXFBConfig chosenFbc = fbc[ chosen_fbc_id ]; XFree( fbc ); return chosenFbc; }
void PleoraVideo::SetDeviceParams(Params& p) { lStart = dynamic_cast<PvGenCommand*>( lDeviceParams->Get( "AcquisitionStart" ) ); lStop = dynamic_cast<PvGenCommand*>( lDeviceParams->Get( "AcquisitionStop" ) ); for(Params::ParamMap::iterator it = p.params.begin(); it != p.params.end(); it++) { if(it->first == "get_temperature"){ getTemp = p.Get<bool>("get_temperature",false); } else { if (it->second == "Execute") { // This is a command, deal with it accordingly. PvGenCommand* cmd = dynamic_cast<PvGenCommand*>(lDeviceParams->Get(it->first.c_str())); if(cmd) { PvResult r = cmd->Execute(); if(!r.IsOK()){ pango_print_error("Error executing command %s Reason:%s\n", it->first.c_str(), r.GetDescription().GetAscii()); } else { pango_print_info("Executed Command %s\n", it->first.c_str()); } bool done; int attempts = 20; do { cmd->IsDone(done); std::this_thread::sleep_for(std::chrono::milliseconds(1000)); attempts--; } while(!done && (attempts > 0)); if(attempts == 0) { pango_print_error("Timeout while waiting for command %s done\n", it->first.c_str()); } } else { pango_print_error("Command %s not recognized\n", it->first.c_str()); } } else { try { PvGenParameter* par = lDeviceParams->Get(PvString(it->first.c_str())); if(par) { PvResult r = par->FromString(PvString(it->second.c_str())); if(!r.IsOK()){ pango_print_error("Error setting parameter %s to:%s Reason:%s\n", it->first.c_str(), it->second.c_str(), r.GetDescription().GetAscii()); } else { pango_print_info("Setting parameter %s to:%s\n", it->first.c_str(), it->second.c_str()); } } else { pango_print_error("Parameter %s not recognized\n", it->first.c_str()); } } catch(std::runtime_error e) { pango_print_error("Set parameter %s: %s\n", it->first.c_str(), e.what()); } } } } // Get Handles to properties we'll be using. lAnalogGain = lDeviceParams->GetInteger("AnalogGain"); lGamma = lDeviceParams->GetFloat("Gamma"); lAnalogBlackLevel = lDeviceParams->GetInteger("AnalogBlackLevel"); lExposure = lDeviceParams->GetFloat("ExposureTime"); lAquisitionMode = lDeviceParams->GetEnum("AcquisitionMode"); lTriggerSource = lDeviceParams->GetEnum("TriggerSource"); lTriggerMode = lDeviceParams->GetEnum("TriggerMode"); if(getTemp) { lTemperatureCelcius = lDeviceParams->GetFloat("DeviceTemperatureCelsius"); pango_print_warn("Warning: get_temperature might add a blocking call taking several ms to each frame read."); } }