Exemplo n.º 1
0
PyMODINIT_FUNC
InitPyPangolinModule()
{
    auto pypango = pybind11::module("pypangolin");
    pypangolin::PopulateModule(pypango);

    if(pypango.ptr()) {
        if (PyType_Ready(&PyVar::Py_type) >= 0) {
            Py_INCREF(&PyVar::Py_type);
            PyModule_AddObject(pypango.ptr(), "Var", (PyObject *)&PyVar::Py_type);
        }else{
            pango_print_error("Unable to create pangolin Python objects.\n");
        }
    }else{
        pango_print_error("Unable to initialise pangolin Python module.\n");
    }

#if PY_MAJOR_VERSION >= 3
    return pypango.ptr();
#endif
}
Exemplo n.º 2
0
bool OpenNi2Video::GrabNext( unsigned char* image, bool /*wait*/ )
{
    unsigned char* out_img = image;

    openni::Status rc = openni::STATUS_OK;

    for(unsigned int i=0; i<Streams().size(); ++i) {
        if(sensor_type[i].sensor_type == OpenNiUnassigned) {
            rc = openni::STATUS_OK;
            continue;
        }

        if(!video_stream[i].isValid()) {
            rc = openni::STATUS_NO_DEVICE;
            continue;
        }

        if(use_ir_and_rgb) video_stream[i].start();

        rc = video_stream[i].readFrame(&video_frame[i]);
        video_frame[0].getFrameIndex();
        if(rc != openni::STATUS_OK) {
            pango_print_error("Error reading frame:\n%s", openni::OpenNI::getExtendedError() );
        }

        const bool toGreyscale = false;
        if(toGreyscale) {
            const int w = streams[i].Width();
            const int h = streams[i].Height();

            openni::RGB888Pixel* pColour = (openni::RGB888Pixel*)video_frame[i].getData();
            for(int i = 0 ; i  < w*h;i++){
                openni::RGB888Pixel rgb = pColour[i];
                int grey = ((int)(rgb.r&0xFF) +  (int)(rgb.g&0xFF) + (int)(rgb.b&0xFF))/3;
                grey = std::min(255,std::max(0,grey));
                out_img[i] = grey;
            }
        }else{
            memcpy(out_img, video_frame[i].getData(), streams[i].SizeBytes());
        }

        // update frame properties
        (*streams_properties)[i]["devtime_us"] = video_frame[i].getTimestamp();

        if(use_ir_and_rgb) video_stream[i].stop();

        out_img += streams[i].SizeBytes();
    }

    current_frame_index = video_frame[0].getFrameIndex();

    return rc == openni::STATUS_OK;
}
Exemplo n.º 3
0
bayer_method_t DebayerVideo::BayerMethodFromString(std::string str)
{
  if(!str.compare("nearest")) return BAYER_METHOD_NEAREST;
  else if(!str.compare("simple")) return BAYER_METHOD_SIMPLE;
  else if(!str.compare("bilinear")) return BAYER_METHOD_BILINEAR;
  else if(!str.compare("hqlinear")) return BAYER_METHOD_HQLINEAR;
  else if(!str.compare("downsample")) return BAYER_METHOD_DOWNSAMPLE;
  else if(!str.compare("edgesense")) return BAYER_METHOD_EDGESENSE;
  else if(!str.compare("vng")) return BAYER_METHOD_VNG;
  else if(!str.compare("ahd")) return BAYER_METHOD_AHD;
  else {
     pango_print_error("Debayer error, %s is not a valid debayer method using downsample\n", str.c_str());
     return BAYER_METHOD_DOWNSAMPLE;
  }
}
Exemplo n.º 4
0
bool UvcVideo::GrabNext( unsigned char* image, bool wait )
{
    uvc_frame_t* frame = NULL;
    uvc_error_t err = uvc_stream_get_frame(strm_, &frame, wait ? 0 : -1);
    
    if(err!= UVC_SUCCESS) {
        pango_print_error("UvcVideo Error: %s", uvc_strerror(err) );
        return false;
    }else{
        if(frame) {
            memcpy(image, frame->data, frame->data_bytes );
            return true;
        }else{
            if(wait) {
                pango_print_debug("UvcVideo: No frame data");
            }
            return false;
        }
    }
}
Exemplo n.º 5
0
void OpenNi2Video::SetupStreamModes()
{
    streams_properties = &frame_properties["streams"];
    *streams_properties = json::value(json::array_type,false);
    streams_properties->get<json::array>().resize(numStreams);

    use_depth = false;
    use_ir = false;
    use_rgb = false;
    depth_to_color = false;
    use_ir_and_rgb = false;

    //    const char* deviceURI = openni::ANY_DEVICE;
    fromFile = false;//(deviceURI!=NULL);

    sizeBytes =0;
    for(size_t i=0; i<numStreams; ++i) {
        const OpenNiStreamMode& mode = sensor_type[i];
        openni::SensorType nisensortype;
        openni::PixelFormat nipixelfmt;

        switch( mode.sensor_type ) {
        case OpenNiDepth_1mm_Registered:
            depth_to_color = true;
        case OpenNiDepth_1mm:
            nisensortype = openni::SENSOR_DEPTH;
            nipixelfmt = openni::PIXEL_FORMAT_DEPTH_1_MM;
            use_depth = true;
            break;
        case OpenNiDepth_100um:
            nisensortype = openni::SENSOR_DEPTH;
            nipixelfmt = openni::PIXEL_FORMAT_DEPTH_100_UM;
            use_depth = true;
            break;
        case OpenNiIrProj:
        case OpenNiIr:
            nisensortype = openni::SENSOR_IR;
            nipixelfmt = openni::PIXEL_FORMAT_GRAY16;
            use_ir = true;
            break;
        case OpenNiIr24bit:
            nisensortype = openni::SENSOR_IR;
            nipixelfmt = openni::PIXEL_FORMAT_RGB888;
            use_ir = true;
            break;
        case OpenNiIr8bitProj:
        case OpenNiIr8bit:
            nisensortype = openni::SENSOR_IR;
            nipixelfmt = openni::PIXEL_FORMAT_GRAY8;
            use_ir = true;
            break;
        case OpenNiRgb:
            nisensortype = openni::SENSOR_COLOR;
            nipixelfmt = openni::PIXEL_FORMAT_RGB888;
            use_rgb = true;
            break;
        case OpenNiGrey:
            nisensortype = openni::SENSOR_COLOR;
            nipixelfmt = openni::PIXEL_FORMAT_GRAY8;
            use_rgb = true;
            break;
        case OpenNiUnassigned:
        default:
            continue;
        }

        openni::VideoMode onivmode;
        try {
            onivmode = FindOpenNI2Mode(devices[mode.device], nisensortype, mode.dim.x, mode.dim.y, mode.fps, nipixelfmt);
        }catch(VideoException e) {
            pango_print_error("Unable to find compatible OpenNI Video Mode. Please choose from:\n");
            PrintOpenNI2Modes(nisensortype);
            fflush(stdout);
            throw e;
        }

        openni::Status rc = video_stream[i].setVideoMode(onivmode);
        if(rc != openni::STATUS_OK)
            throw VideoException("Couldn't set OpenNI VideoMode", openni::OpenNI::getExtendedError());

        const VideoPixelFormat fmt = VideoFormatFromOpenNI2(nipixelfmt);
        const StreamInfo stream(
            fmt, onivmode.getResolutionX(), onivmode.getResolutionY(),
            (onivmode.getResolutionX() * fmt.bpp) / 8,
            (unsigned char*)0 + sizeBytes
        );

        sizeBytes += stream.SizeBytes();
        streams.push_back(stream);
    }

    SetRegisterDepthToImage(depth_to_color);

    use_ir_and_rgb = use_rgb && use_ir;
}
Exemplo n.º 6
0
void PleoraVideo::SetDeviceParams(Params& p) {

    lStart = dynamic_cast<PvGenCommand*>( lDeviceParams->Get( "AcquisitionStart" ) );
    lStop = dynamic_cast<PvGenCommand*>( lDeviceParams->Get( "AcquisitionStop" ) );

    for(Params::ParamMap::iterator it = p.params.begin(); it != p.params.end(); it++) {
        if(it->first == "get_temperature"){
            getTemp = p.Get<bool>("get_temperature",false);
        } else {
            if (it->second == "Execute") {
              // This is a command, deal with it accordingly.
              PvGenCommand* cmd = dynamic_cast<PvGenCommand*>(lDeviceParams->Get(it->first.c_str()));
              if(cmd) {
                  PvResult r = cmd->Execute();
                  if(!r.IsOK()){
                      pango_print_error("Error executing command %s Reason:%s\n", it->first.c_str(), r.GetDescription().GetAscii());
                  } else {
                      pango_print_info("Executed Command %s\n", it->first.c_str());
                  }
                  bool done;
                  int attempts = 20;
                  do {
                      cmd->IsDone(done);
                      std::this_thread::sleep_for(std::chrono::milliseconds(1000));
                      attempts--;
                  } while(!done && (attempts > 0));
                  if(attempts == 0) {
                      pango_print_error("Timeout while waiting for command %s done\n", it->first.c_str());
                  }
              } else {
                  pango_print_error("Command %s not recognized\n", it->first.c_str());
              }
            } else {
                try {
                    PvGenParameter* par = lDeviceParams->Get(PvString(it->first.c_str()));
                    if(par) {
                        PvResult r = par->FromString(PvString(it->second.c_str()));
                        if(!r.IsOK()){
                            pango_print_error("Error setting parameter %s to:%s Reason:%s\n", it->first.c_str(), it->second.c_str(), r.GetDescription().GetAscii());
                        } else {
                            pango_print_info("Setting parameter %s to:%s\n", it->first.c_str(), it->second.c_str());
                        }
                    } else {
                        pango_print_error("Parameter %s not recognized\n", it->first.c_str());
                    }
                } catch(std::runtime_error e) {
                    pango_print_error("Set parameter %s: %s\n", it->first.c_str(), e.what());
                }
            }
        }
    }

    // Get Handles to properties we'll be using.
    lAnalogGain = lDeviceParams->GetInteger("AnalogGain");
    lGamma = lDeviceParams->GetFloat("Gamma");
    lAnalogBlackLevel = lDeviceParams->GetInteger("AnalogBlackLevel");
    lExposure = lDeviceParams->GetFloat("ExposureTime");
    lAquisitionMode = lDeviceParams->GetEnum("AcquisitionMode");
    lTriggerSource = lDeviceParams->GetEnum("TriggerSource");
    lTriggerMode = lDeviceParams->GetEnum("TriggerMode");

    if(getTemp) {
        lTemperatureCelcius = lDeviceParams->GetFloat("DeviceTemperatureCelsius");
        pango_print_warn("Warning: get_temperature might add a blocking call taking several ms to each frame read.");
    }

}
Exemplo n.º 7
0
PleoraVideo::PleoraVideo(const char* model_name, const char* serial_num, size_t index, size_t bpp,  size_t binX, size_t binY, size_t buffer_count,
                         size_t desired_size_x, size_t desired_size_y, size_t desired_pos_x, size_t desired_pos_y)
    : lPvSystem(0), lDevice(0), lStream(0)
{
    lPvSystem = new PvSystem();
    if ( !lPvSystem ) {
        throw pangolin::VideoException("Pleora: Unable to create PvSystem");
    }

    const PvDeviceInfo *lDeviceInfo = SelectDevice(*lPvSystem, model_name, serial_num, index);
    if ( !lDeviceInfo ) {
        throw pangolin::VideoException("Pleora: Unable to select device");
    }

    PvResult lResult;
    lDevice = PvDevice::CreateAndConnect( lDeviceInfo, &lResult );
    if ( !lDevice ) {
        throw pangolin::VideoException("Pleora: Unable to connect to device", lResult.GetDescription().GetAscii() );
    }

    lStream = PvStream::CreateAndOpen( lDeviceInfo->GetConnectionID(), &lResult );
    if ( !lStream ) {
        lDevice->Disconnect();
        PvDevice::Free(lDevice);
        throw pangolin::VideoException("Pleora: Unable to open stream", lResult.GetDescription().GetAscii() );
    }

    lDeviceParams = lDevice->GetParameters();
    lStart = dynamic_cast<PvGenCommand*>( lDeviceParams->Get( "AcquisitionStart" ) );
    lStop = dynamic_cast<PvGenCommand*>( lDeviceParams->Get( "AcquisitionStop" ) );

    if( bpp == 8) {
        lDeviceParams->SetEnumValue("PixelFormat", PvString("Mono8") );
    }else if(bpp == 10) {
        lDeviceParams->SetEnumValue("PixelFormat", PvString("Mono10p") );
    }else if(bpp == 12) {
        lDeviceParams->SetEnumValue("PixelFormat", PvString("Mono12p") );
    }

    // Height and width will fail if not multiples of 8.
    lDeviceParams->SetIntegerValue("Height", desired_size_y );
    if(lResult.IsFailure()){
        pango_print_error("Height %zu fail\n", desired_size_y);
        int64_t max, min;
        lDeviceParams->GetIntegerRange("Height", max, min );
        lDeviceParams->SetIntegerValue("Height", max );
    }
    lDeviceParams->SetIntegerValue("Width", desired_size_x );
    if(lResult.IsFailure()){
        pango_print_error("Width %zu fail\n", desired_size_x);
        int64_t max, min;
        lDeviceParams->GetIntegerRange("Width", max, min );
        lDeviceParams->SetIntegerValue("Width", max );
    }

    lDeviceParams = lDevice->GetParameters();
    const int w = DeviceParam<int64_t>("Width");
    const int h = DeviceParam<int64_t>("Height");

    // Offset will fail if not multiple of 8.
    lDeviceParams->SetIntegerValue("OffsetX", desired_pos_x );
    if(lResult.IsFailure()){
        pango_print_error("OffsetX %zu fail\n", desired_pos_x);
    }
    lDeviceParams->SetIntegerValue("OffsetX", desired_pos_y );
    if(lResult.IsFailure()){
        pango_print_error("OffsetY %zu fail\n", desired_pos_y);
    }

    lResult =lDeviceParams->SetIntegerValue("BinningHorizontal", binX );
    if(lResult.IsFailure()){
        pango_print_error("BinningHorizontal %zu fail\n", binX);
    }
    lResult =lDeviceParams->SetIntegerValue("BinningVertical", binY );
    if(lResult.IsFailure()){
        pango_print_error("BinningVertical %zu fail\n", binY);
    }

    lStreamParams = lStream->GetParameters();

    // Reading payload size from device
    const uint32_t lSize = lDevice->GetPayloadSize();

    // Use buffer_count or the maximum number of buffers, whichever is smaller
    const uint32_t lBufferCount = ( lStream->GetQueuedBufferMaximum() < buffer_count ) ?
        lStream->GetQueuedBufferMaximum() :
        buffer_count;

    // Allocate buffers and queue
    for ( uint32_t i = 0; i < lBufferCount; i++ )
    {
        PvBuffer *lBuffer = new PvBuffer;
        lBuffer->Alloc( static_cast<uint32_t>( lSize ) );
        lBufferList.push_back( lBuffer );
    }

    // Setup pangolin for stream
    PvGenEnum* lpixfmt = dynamic_cast<PvGenEnum*>( lDeviceParams->Get("PixelFormat") );
    const VideoPixelFormat fmt = PleoraFormat(lpixfmt);
    streams.push_back(StreamInfo(fmt, w, h, (w*fmt.bpp)/8));
    size_bytes = lSize;

    Start();
}
Exemplo n.º 8
0
void VideoViewer(const std::string& input_uri, const std::string& output_uri)
{
    pangolin::Var<int>  record_timelapse_frame_skip("viewer.record_timelapse_frame_skip", 1 );
    pangolin::Var<int>  end_frame("viewer.end_frame", std::numeric_limits<int>::max() );
    pangolin::Var<bool> video_wait("video.wait", true);
    pangolin::Var<bool> video_newest("video.newest", false);

    // Open Video by URI
    pangolin::VideoRecordRepeat video(input_uri, output_uri);
    const size_t num_streams = video.Streams().size();

    if(num_streams == 0) {
        pango_print_error("No video streams from device.\n");
        return;
    }

    // Output details of video stream
    for(size_t s = 0; s < num_streams; ++s) {
        const pangolin::StreamInfo& si = video.Streams()[s];
        std::cout << "Stream " << s << ": " << si.Width() << " x " << si.Height()
                  << " " << si.PixFormat().format << " (pitch: " << si.Pitch() << " bytes)" << std::endl;
    }

    // Check if video supports VideoPlaybackInterface
    pangolin::VideoPlaybackInterface* video_playback = pangolin::FindFirstMatchingVideoInterface<pangolin::VideoPlaybackInterface>(video);
    const int total_frames = video_playback ? video_playback->GetTotalFrames() : std::numeric_limits<int>::max();
    const int slider_size = (total_frames < std::numeric_limits<int>::max() ? 20 : 0);

    if( video_playback ) {
        if(total_frames < std::numeric_limits<int>::max() ) {
            std::cout << "Video length: " << total_frames << " frames" << std::endl;
        }
        end_frame = 0;
    }

    std::vector<unsigned char> buffer;
    buffer.resize(video.SizeBytes()+1);

    // Create OpenGL window - guess sensible dimensions
    pangolin::CreateWindowAndBind( "VideoViewer",
        (int)(video.Width() * num_streams),
        (int)(video.Height() + slider_size)
    );

    // Assume packed OpenGL data unless otherwise specified
    glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
    glPixelStorei(GL_PACK_ALIGNMENT, 1);
    glEnable (GL_BLEND);
    glBlendFunc (GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);

    // Setup resizable views for video streams
    std::vector<pangolin::GlPixFormat> glfmt;
    std::vector<std::pair<float,float> > gloffsetscale;
    std::vector<size_t> strides;
    std::vector<pangolin::ImageViewHandler> handlers;
    handlers.reserve(num_streams);

    size_t scratch_buffer_bytes = 0;

    pangolin::View& container = pangolin::Display("streams");
    container.SetLayout(pangolin::LayoutEqual)
             .SetBounds(pangolin::Attach::Pix(slider_size), 1.0, 0.0, 1.0);
    for(unsigned int d=0; d < num_streams; ++d) {
        const pangolin::StreamInfo& si = video.Streams()[d];
        pangolin::View& view = pangolin::CreateDisplay().SetAspect(si.Aspect());
        container.AddDisplay(view);
        glfmt.push_back(pangolin::GlPixFormat(si.PixFormat()));
        gloffsetscale.push_back(std::pair<float,float>(0.0f, 1.0f) );
        if( si.PixFormat().bpp % 8 ) {
            pango_print_warn("Stream %i: Unable to display formats that are not a multiple of 8 bits.", d);
        }
        if( (8*si.Pitch()) % si.PixFormat().bpp ) {
            pango_print_warn("Stream %i: Unable to display formats whose pitch is not a whole number of pixels.", d);
        }
        if(glfmt.back().gltype == GL_DOUBLE) {
            scratch_buffer_bytes = std::max(scratch_buffer_bytes, sizeof(float)*si.Width() * si.Height());
        }
        strides.push_back( (8*si.Pitch()) / si.PixFormat().bpp );
        handlers.push_back( pangolin::ImageViewHandler(si.Width(), si.Height()) );
        view.SetHandler(&handlers.back());
    }

    // current frame in memory buffer and displaying.
    pangolin::Var<int> frame("ui.frame", -1, 0, total_frames-1 );
    pangolin::Slider frame_slider("frame", frame.Ref() );
    if(video_playback && total_frames < std::numeric_limits<int>::max())
    {
        frame_slider.SetBounds(0.0, pangolin::Attach::Pix(slider_size), 0.0, 1.0);
        pangolin::DisplayBase().AddDisplay(frame_slider);
    }

    std::vector<unsigned char> scratch_buffer;
    scratch_buffer.resize(scratch_buffer_bytes);

    std::vector<pangolin::Image<unsigned char> > images;

#ifdef CALLEE_HAS_CPP11
    const int FRAME_SKIP = 30;
    const char show_hide_keys[]  = {'1','2','3','4','5','6','7','8','9'};
    const char screenshot_keys[] = {'!','"','#','$','%','^','&','*','('};

    // Show/hide streams
    for(size_t v=0; v < container.NumChildren() && v < 9; v++) {
        pangolin::RegisterKeyPressCallback(show_hide_keys[v], [v,&container](){
            container[v].ToggleShow();
        } );
        pangolin::RegisterKeyPressCallback(screenshot_keys[v], [v,&images,&video](){
            if(v < images.size() && images[v].ptr) {
                try{
                    pangolin::SaveImage(
                        images[v], video.Streams()[v].PixFormat(),
                        pangolin::MakeUniqueFilename("capture.png")
                    );
                }catch(std::exception e){
                    pango_print_error("Unable to save frame: %s\n", e.what());
                }
            }
        } );
    }

    pangolin::RegisterKeyPressCallback('r', [&](){
        if(!video.IsRecording()) {
            video.SetTimelapse( static_cast<size_t>(record_timelapse_frame_skip) );
            video.Record();
            pango_print_info("Started Recording.\n");
        }else{
            video.Stop();
            pango_print_info("Finished recording.\n");
        }
        fflush(stdout);
    });
    pangolin::RegisterKeyPressCallback('p', [&](){
        video.Play();
        end_frame = std::numeric_limits<int>::max();
        pango_print_info("Playing from file log.\n");
        fflush(stdout);
    });
    pangolin::RegisterKeyPressCallback('s', [&](){
        video.Source();
        end_frame = std::numeric_limits<int>::max();
        pango_print_info("Playing from source input.\n");
        fflush(stdout);
    });
    pangolin::RegisterKeyPressCallback(' ', [&](){
        end_frame = (frame < end_frame) ? frame : std::numeric_limits<int>::max();
    });
    pangolin::RegisterKeyPressCallback('w', [&](){
        video_wait = !video_wait;
        if(video_wait) {
            pango_print_info("Gui wait's for video frame.\n");
        }else{
            pango_print_info("Gui doesn't wait for video frame.\n");
        }
    });
    pangolin::RegisterKeyPressCallback('d', [&](){
        video_newest = !video_newest;
        if(video_newest) {
            pango_print_info("Discarding old frames.\n");
        }else{
            pango_print_info("Not discarding old frames.\n");
        }
    });
    pangolin::RegisterKeyPressCallback('<', [&](){
        if(video_playback) {
            frame = video_playback->Seek(frame - FRAME_SKIP) -1;
            end_frame = frame + 1;
        }else{
            pango_print_warn("Unable to skip backward.");
        }
    });
    pangolin::RegisterKeyPressCallback('>', [&](){
        if(video_playback) {
            frame = video_playback->Seek(frame + FRAME_SKIP) -1;
            end_frame = frame + 1;
        }else{
            end_frame = frame + FRAME_SKIP;
        }
    });
    pangolin::RegisterKeyPressCallback(',', [&](){
        if(video_playback) {
            frame = video_playback->Seek(frame - 1) -1;
            end_frame = frame+1;
        }else{
            pango_print_warn("Unable to skip backward.");
        }
    });
    pangolin::RegisterKeyPressCallback('.', [&](){
        // Pause at next frame
        end_frame = frame+1;
    });
    pangolin::RegisterKeyPressCallback('0', [&](){
        video.RecordOneFrame();
    });
    pangolin::RegisterKeyPressCallback('a', [&](){
        // Adapt scale
        for(unsigned int i=0; i<images.size(); ++i) {
            if(container[i].HasFocus()) {
                pangolin::Image<unsigned char>& img = images[i];
                pangolin::ImageViewHandler& ivh = handlers[i];

                const bool have_selection = std::isfinite(ivh.GetSelection().Area()) && std::abs(ivh.GetSelection().Area()) >= 4;
                pangolin::XYRangef froi = have_selection ? ivh.GetSelection() : ivh.GetViewToRender();
                gloffsetscale[i] = pangolin::GetOffsetScale(img, froi.Cast<int>(), glfmt[i]);
            }
        }
    });
    pangolin::RegisterKeyPressCallback('g', [&](){
        std::pair<float,float> os_default(0.0f, 1.0f);

        // Get the scale and offset from the container that has focus.
        for(unsigned int i=0; i<images.size(); ++i) {
            if(container[i].HasFocus()) {
                pangolin::Image<unsigned char>& img = images[i];
                pangolin::ImageViewHandler& ivh = handlers[i];

                const bool have_selection = std::isfinite(ivh.GetSelection().Area()) && std::abs(ivh.GetSelection().Area()) >= 4;
                pangolin::XYRangef froi = have_selection ? ivh.GetSelection() : ivh.GetViewToRender();
                os_default = pangolin::GetOffsetScale(img, froi.Cast<int>(), glfmt[i]);
                break;
            }
        }

        // Adapt scale for all images equally
        // TODO : we're assuming the type of all the containers images' are the same.
        for(unsigned int i=0; i<images.size(); ++i) {
            gloffsetscale[i] = os_default;
        }

    });
#endif // CALLEE_HAS_CPP11

#ifdef DEBUGVIDEOVIEWER
    unsigned int delayms = 0;
    pangolin::RegisterKeyPressCallback('z', [&](){
      // Adapt delay
      delayms += 1;
      std::cout << "                  Fake delay " << delayms << "ms" << std::endl;
    });

    pangolin::RegisterKeyPressCallback('x', [&](){
      // Adapt delay
      delayms = (delayms > 1) ? delayms-1 : 0;
    });

    pangolin::basetime start,now;
#endif // DEBUGVIDEOVIEWER

    // Stream and display video
    while(!pangolin::ShouldQuit())
    {
        glClear(GL_DEPTH_BUFFER_BIT | GL_COLOR_BUFFER_BIT);
        glColor3f(1.0f, 1.0f, 1.0f);

        if(frame.GuiChanged()) {
            if(video_playback) {
                frame = video_playback->Seek(frame) -1;
            }
            end_frame = frame + 1;
        }

#ifdef DEBUGVIDEOVIEWER
        boostd::this_thread::sleep_for(boostd::chrono::milliseconds(delayms));
        std::cout << "-------------------------------------------------------" << std::endl;
        now = pangolin::TimeNow();
        std::cout << "      FPS: " << 1.0/pangolin::TimeDiff_s(start, now) << " artificial delay: " << delayms <<"ms"<< std::endl;
        std::cout << "-------------------------------------------------------" << std::endl;
        start = now;
#endif
        if ( frame < end_frame ) {
            if( video.Grab(&buffer[0], images, video_wait, video_newest) ) {
                frame = frame +1;
            }
        }
#ifdef DEBUGVIDEOVIEWER
        const pangolin::basetime end = pangolin::TimeNow();
        std::cout << "Total grab time: " << 1000*pangolin::TimeDiff_s(start, end) << "ms" << std::endl;
#endif

        glLineWidth(1.5f);
        glDisable(GL_DEPTH_TEST);

        for(unsigned int i=0; i<images.size(); ++i)
        {
            if(container[i].IsShown()) {
                container[i].Activate();
                pangolin::Image<unsigned char>& image = images[i];

                // Get texture of correct dimension / format
                const pangolin::GlPixFormat& fmt = glfmt[i];
                pangolin::GlTexture& tex = pangolin::TextureCache::I().GlTex((GLsizei)image.w, (GLsizei)image.h, fmt.scalable_internal_format, fmt.glformat, GL_FLOAT);

                // Upload image data to texture
                tex.Bind();
                if(fmt.gltype == GL_DOUBLE) {
                    // Convert to float first, using scrath_buffer for storage
                    pangolin::Image<float> fimage(image.w, image.h, image.w*sizeof(float), (float*)scratch_buffer.data());
                    ConvertPixels<float,double>( fimage, image.Reinterpret<double>() );
                    glPixelStorei(GL_UNPACK_ROW_LENGTH, 0);
                    tex.Upload(fimage.ptr,0,0, (GLsizei)fimage.w, (GLsizei)fimage.h, fmt.glformat, GL_FLOAT);
                }else{
                    glPixelStorei(GL_UNPACK_ROW_LENGTH, (GLint)strides[i]);
                    tex.Upload(image.ptr,0,0, (GLsizei)image.w, (GLsizei)image.h, fmt.glformat, fmt.gltype);
                }

                // Render
                handlers[i].UpdateView();
                handlers[i].glSetViewOrtho();
                const std::pair<float,float> os = gloffsetscale[i];
                pangolin::GlSlUtilities::OffsetAndScale(os.first, os.second);
                handlers[i].glRenderTexture(tex);
                pangolin::GlSlUtilities::UseNone();
                handlers[i].glRenderOverlay();
            }
        }
        glPixelStorei(GL_UNPACK_ROW_LENGTH, 0);

        // leave in pixel orthographic for slider to render.
        pangolin::DisplayBase().ActivatePixelOrthographic();
        if(video.IsRecording()) {
            pangolin::glRecordGraphic(pangolin::DisplayBase().v.w-14.0f, pangolin::DisplayBase().v.h-14.0f, 7.0f);
        }
        pangolin::FinishFrame();
    }
}