ReaderImplementation::ReadResult QtGifReaderImplementation::readFramesTill(int64 frameMs, uint64 systemMs) { if (!_frame.isNull() && _frameTime > frameMs) { return ReadResult::Success; } auto readResult = readNextFrame(); if (readResult != ReadResult::Success || _frameTime > frameMs) { return readResult; } readResult = readNextFrame(); if (_frameTime <= frameMs) { _frameTime = frameMs + 5; // keep up } return readResult; }
RlfAnimation::RlfAnimation(const Common::String &fileName, bool stream) : _stream(stream), _lastFrameRead(0), _frameCount(0), _width(0), _height(0), _frameTime(0), _frames(0), _currentFrame(-1), _frameBufferByteSize(0) { if (!_file.open(fileName)) { warning("RLF animation file %s could not be opened", fileName.c_str()); return; } if (!readHeader()) { warning("%s is not a RLF animation file. Wrong magic number", fileName.c_str()); return; } _currentFrameBuffer.create(_width, _height, Graphics::createPixelFormat<565>()); _frameBufferByteSize = _width * _height * sizeof(uint16); if (!stream) { _frames = new Frame[_frameCount]; // Read in each frame for (uint i = 0; i < _frameCount; ++i) { _frames[i] = readNextFrame(); } } }
RlfAnimation::RlfAnimation(Common::SeekableReadStream *rstream, bool stream) : _stream(stream), _readStream(rstream), _lastFrameRead(0), _frameCount(0), _width(0), _height(0), _frameTime(0), _frames(0), _nextFrame(0), _frameBufferByteSize(0) { if (!readHeader()) { warning("Stream is not a RLF animation. Wrong magic number"); return; } _currentFrameBuffer.create(_width, _height, Graphics::createPixelFormat<565>()); _frameBufferByteSize = _width * _height * sizeof(uint16); if (!stream) { _frames = new Frame[_frameCount]; // Read in each frame for (uint i = 0; i < _frameCount; ++i) { _frames[i] = readNextFrame(); } } }
TrajectoryFrame TrajectoryFrameReader::frame() { TrajectoryFrame frame; if (!haveProbedForNextFrame_) { readNextFrame(); } if (!nextFrameExists_) { GMX_THROW(APIError("There is no next frame, so there should have been no attempt to use the data, e.g. by reacting to a call to readNextFrame().")); } // Prepare for reading future frames haveProbedForNextFrame_ = false; nextFrameExists_ = false; // The probe filled trxframeGuard_ with new data, so return it frame.frame_ = trxframeGuard_.get(); if (!frame.frame_->bStep) { GMX_THROW(APIError("Cannot handle trajectory frame that lacks a step number")); } if (!frame.frame_->bTime) { GMX_THROW(APIError("Cannot handle trajectory frame that lacks a time")); } return frame; }
const char* AvReader::readFrameAt( const size_t frame ) { // /std::cout << "seek at " << frame << std::endl; _inputFile.seekAtFrame( frame ); _inputVideo->flushDecoder(); return readNextFrame(); }
EnergyFrame EnergyFrameReader::frame() { EnergyFrame energyFrame; if (!haveProbedForNextFrame_) { readNextFrame(); } if (!nextFrameExists_) { GMX_THROW(APIError("There is no next frame, so there should have been no attempt to use the data, e.g. by reacting to a call to readNextFrame().")); } // The probe filled enxframe_ with new data, so now we use that data to fill energyFrame t_enxframe *enxframe = enxframeGuard_.get(); energyFrame.time_ = enxframe->t; energyFrame.step_ = enxframe->step; for (auto &index : indicesOfEnergyFields_) { if (index.second >= enxframe->nre) { GMX_THROW(InternalError(formatString("Index %d for energy %s not present in energy frame with %d energies", index.second, index.first.c_str(), enxframe->nre))); } energyFrame.values_[index.first] = enxframe->ener[index.second].e; } // Prepare for reading future frames haveProbedForNextFrame_ = false; nextFrameExists_ = false; return energyFrame; }
const Graphics::Surface *RlfAnimation::getNextFrame() { assert(_currentFrame + 1 < (int)_frameCount); if (_stream) { applyFrameToCurrent(readNextFrame()); } else { applyFrameToCurrent(_currentFrame + 1); } _currentFrame++; return &_currentFrameBuffer; }
/** * GUI action -> goto next frame * handle error and eof of frame */ void PFSViewMainWin::gotoNextFrame() { try { if( !readNextFrame() ) { statusBar()->showMessage( "No more frames", 1000 ); } } catch( pfs::Exception ex ) { // Display message and keep the old frame QMessageBox::critical( this, "pfsview error", ex.getMessage() ); qApp->quit(); } }
void VideoProcessor::run(){ cv::Mat frame; cv::Mat output; if (!isOpened()) // if no capture device has been set return; stop= false; while (!isStopped()) { if (!readNextFrame(frame)) // read next frame if any break; if (windowNameInput.length()!=0) // display input frame cv::imshow(windowNameInput,frame); if (callIt) { // calling the process function or method if (process){ // process the frame process(frame, output); } else if (frameProcessor){ frameProcessor->process(frame,output); } fnumber++; // increment frame number } else { output= frame; } if (outputFile.length()!=0) writeNextFrame(output); if (windowNameOutput.length()!=0) // display output frame cv::imshow(windowNameOutput,output); if (delay>=0 && cv::waitKey(delay)>=0) // introduce a delay stopIt(); if (frameToStop>=0 && getFrameNumber()==frameToStop)// check if we should stop stopIt(); } }
PFSViewMainWin::PFSViewMainWin( float window_min, float window_max ): QMainWindow( 0 ) { currentFrame = frameList.end(); QScrollArea *pfsViewArea = new PFSViewWidgetArea( this ); pfsView = (PFSViewWidget*)pfsViewArea->widget(); setCentralWidget( pfsViewArea ); setWindowIcon( QIcon( ":icons/appicon.png" ) ); QAction *nextFrameAct = new QAction( tr( "&Next frame" ), this ); nextFrameAct->setStatusTip( tr( "Load next frame" ) ); nextFrameAct->setShortcut( Qt::Key_PageDown ); connect( nextFrameAct, SIGNAL(triggered()), this, SLOT(gotoNextFrame()) ); QAction *previousFrameAct = new QAction( tr( "&Previous frame" ), this ); previousFrameAct->setStatusTip( tr( "Load previous frame" ) ); previousFrameAct->setShortcut( Qt::Key_PageUp ); connect( previousFrameAct, SIGNAL(triggered()), this, SLOT(gotoPreviousFrame()) ); QToolBar *toolBar = addToolBar( tr( "Navigation" ) ); // toolBar->setHorizontalStretchable( true ); QToolButton *previousFrameBt = new QToolButton( toolBar ); previousFrameBt->setArrowType( Qt::LeftArrow ); previousFrameBt->setMinimumWidth( 15 ); connect( previousFrameBt, SIGNAL(clicked()), this, SLOT(gotoPreviousFrame()) ); previousFrameBt->setToolTip( "Goto previous frame" ); toolBar->addWidget( previousFrameBt ); QToolButton *nextFrameBt = new QToolButton( toolBar ); nextFrameBt->setArrowType( Qt::RightArrow ); nextFrameBt->setMinimumWidth( 15 ); connect( nextFrameBt, SIGNAL(clicked()), this, SLOT(gotoNextFrame()) ); nextFrameBt->setToolTip( "Goto next frame" ); toolBar->addWidget( nextFrameBt ); QLabel *channelSelLabel = new QLabel( "&Channel", toolBar ); channelSelection = new QComboBox( toolBar ); channelSelLabel->setBuddy( channelSelection ); connect( channelSelection, SIGNAL( activated( int ) ), this, SLOT( setChannelSelection(int) ) ); toolBar->addWidget( channelSelLabel ); toolBar->addWidget( channelSelection ); toolBar->addSeparator(); QLabel *mappingMethodLabel = new QLabel( "&Mapping", toolBar ); mappingMethodLabel->setAlignment( Qt::AlignRight | Qt::AlignVCenter ); // | // Qt::TextExpandTabs | Qt::TextShowMnemonic ); mappingMethodCB = new QComboBox( toolBar ); mappingMethodLabel->setBuddy( mappingMethodCB ); mappingMethodCB->addItem( "Linear" ); mappingMethodCB->addItem( "Gamma 1.4" ); mappingMethodCB->addItem( "Gamma 1.8" ); mappingMethodCB->addItem( "Gamma 2.2" ); mappingMethodCB->addItem( "Gamma 2.6" ); mappingMethodCB->addItem( "Logarithmic" ); mappingMethodCB->setCurrentIndex( 3 ); connect( mappingMethodCB, SIGNAL( activated( int ) ), this, SLOT( setLumMappingMethod(int) ) ); toolBar->addWidget( mappingMethodLabel ); toolBar->addWidget( mappingMethodCB ); // addToolBar( Qt::BottomToolBarArea, toolBar ); QToolBar *toolBarLR = addToolBar( tr( "Histogram" ) ); lumRange = new LuminanceRangeWidget( toolBarLR ); connect( lumRange, SIGNAL( updateRangeWindow() ), this, SLOT( updateRangeWindow() ) ); toolBarLR->addWidget( lumRange ); // addToolBar( toolBar ); pointerPosAndVal = new QLabel( statusBar() ); statusBar()->addWidget( pointerPosAndVal ); // QFont fixedFont = QFont::defaultFont(); // fixedFont.setFixedPitch( true ); // pointerPosAndVal->setFont( fixedFont ); zoomValue = new QLabel( statusBar() ); statusBar()->addWidget( zoomValue ); exposureValue = new QLabel( statusBar() ); statusBar()->addWidget( exposureValue ); connect( pfsView, SIGNAL(updatePointerValue()), this, SLOT(updatePointerValue()) ); QMenu *frameMenu = menuBar()->addMenu( tr( "&Frame" ) ); frameMenu->addAction( nextFrameAct ); frameMenu->addAction( previousFrameAct ); frameMenu->addSeparator(); frameMenu->addAction( "&Save image...", this, SLOT(saveImage()), QKeySequence::Save ); frameMenu->addAction( "&Copy image to clipboard", this, SLOT(copyImage()), QKeySequence::Copy ); frameMenu->addSeparator(); frameMenu->addAction( "&Quit", qApp, SLOT(quit()), Qt::Key_Q ); //QKeySequence::Quit QShortcut *shortcut = new QShortcut( QKeySequence::Close, this ); connect( shortcut, SIGNAL(activated()), qApp, SLOT(quit()) ); QAction *act; QMenu *viewMenu = menuBar()->addMenu( tr( "&View" ) ); act = viewMenu->addAction( "&Zoom in", pfsView, SLOT(zoomIn()), Qt::Key_Period ); // QKeySequence::ZoomIn -- not doing it -- silly binding under Linux connect( act, SIGNAL(triggered()), this, SLOT(updateZoomValue()) ); act = viewMenu->addAction( "Zoom &out", pfsView, SLOT(zoomOut()), Qt::Key_Comma ); connect( act, SIGNAL(triggered()), this, SLOT(updateZoomValue()) ); act = viewMenu->addAction( "Zoom &1:1", pfsView, SLOT(zoomOriginal()), Qt::Key_N ); connect( act, SIGNAL(triggered()), this, SLOT(updateZoomValue()) ); viewMenu->addAction( "&Fit window to content", this, SLOT(updateViewSize()), Qt::Key_C ); viewMenu->addSeparator(); QMenu *infnanMenu = viewMenu->addMenu( "NaN and &Inf values" ); QActionGroup *infnanActGrp = new QActionGroup( this ); infnanActGrp->setExclusive( true ); QAction *infnanHideAct = new QAction( tr( "&Hide" ), this ); infnanHideAct->setCheckable(true); infnanHideAct->setData(0); infnanActGrp->addAction( infnanHideAct ); infnanMenu->addAction( infnanHideAct ); QAction *infnanMarkAct = new QAction( tr( "Mark with &red color" ), this ); infnanMarkAct->setCheckable(true); infnanMarkAct->setData(1); infnanActGrp->addAction( infnanMarkAct ); infnanMenu->addAction( infnanMarkAct ); infnanMarkAct->setChecked( true ); connect( infnanActGrp, SIGNAL(triggered(QAction*)), pfsView, SLOT(setInfNaNTreatment(QAction*)) ); QMenu *colorClipMenu = viewMenu->addMenu( "&Color clipping" ); QActionGroup *colorClipActGrp = new QActionGroup( this ); colorClipActGrp->setExclusive( true ); QAction *colorClipSimpleAct = new QAction( tr( "&Simple clipping" ), this ); colorClipSimpleAct->setCheckable(true); colorClipSimpleAct->setData(CLIP_SIMPLE); colorClipSimpleAct->setShortcut( Qt::CTRL + Qt::Key_H ); colorClipActGrp->addAction( colorClipSimpleAct ); colorClipMenu->addAction( colorClipSimpleAct ); QAction *colorClipCodedAct = new QAction( tr( "&Color-coded clipping" ), this ); colorClipCodedAct->setCheckable(true); colorClipCodedAct->setShortcut( Qt::CTRL + Qt::Key_J ); colorClipCodedAct->setData(CLIP_COLORCODED); colorClipActGrp->addAction( colorClipCodedAct ); colorClipMenu->addAction( colorClipCodedAct ); QAction *colorClipBriHueAct = new QAction( tr( "&Keep brightness and hue" ), this ); colorClipBriHueAct->setCheckable(true); colorClipBriHueAct->setShortcut( Qt::CTRL + Qt::Key_K ); colorClipBriHueAct->setData(CLIP_KEEP_BRI_HUE); colorClipActGrp->addAction( colorClipBriHueAct ); colorClipMenu->addAction( colorClipBriHueAct ); colorClipSimpleAct->setChecked( true ); connect( colorClipActGrp, SIGNAL(triggered(QAction*)), pfsView, SLOT(setRGBClippingMethod(QAction*)) ); QMenu *negativeMenu = viewMenu->addMenu( "&Negative values" ); QActionGroup *negativeActGrp = new QActionGroup( this ); negativeActGrp->setExclusive( true ); act = new QAction( tr( "&Black" ), this ); act->setCheckable(true); act->setData(NEGATIVE_BLACK); act->setShortcut( Qt::ALT + Qt::Key_B ); negativeActGrp->addAction( act ); negativeMenu->addAction( act ); act->setChecked( true ); act = new QAction( tr( "Mark with &red color" ), this ); act->setCheckable(true); act->setData(NEGATIVE_MARK_AS_RED); act->setShortcut( Qt::ALT + Qt::Key_R ); negativeActGrp->addAction( act ); negativeMenu->addAction( act ); act = new QAction( tr( "Use &green color scale" ), this ); act->setCheckable(true); act->setData(NEGATIVE_GREEN_SCALE); act->setShortcut( Qt::ALT + Qt::Key_G ); negativeActGrp->addAction( act ); negativeMenu->addAction( act ); act = new QAction( tr( "Use &absolute values" ), this ); act->setCheckable(true); act->setData(NEGATIVE_ABSOLUTE); act->setShortcut( Qt::ALT + Qt::Key_A ); negativeActGrp->addAction( act ); negativeMenu->addAction( act ); connect( negativeActGrp, SIGNAL(triggered(QAction*)), pfsView, SLOT(setNegativeTreatment(QAction*)) ); viewMenu->addSeparator(); QMenu *colorCoordMenu = viewMenu->addMenu( "Color coo&rdinates" ); QActionGroup *colorCoordActGrp = new QActionGroup( this ); colorCoordActGrp->setExclusive( true ); act = new QAction( tr( "&RGB" ), this ); act->setCheckable(true); act->setData(CC_RGB); act->setShortcut( Qt::SHIFT + Qt::ALT + Qt::Key_R ); colorCoordActGrp->addAction( act ); colorCoordMenu->addAction( act ); act->setChecked( true ); act = new QAction( tr( "&XYZ" ), this ); act->setCheckable(true); act->setData(CC_XYZ); act->setShortcut( Qt::SHIFT + Qt::ALT + Qt::Key_X ); colorCoordActGrp->addAction( act ); colorCoordMenu->addAction( act ); act = new QAction( tr( "Y&u'v'" ), this ); act->setCheckable(true); act->setData(CC_Yupvp); act->setShortcut( Qt::SHIFT + Qt::ALT + Qt::Key_U ); colorCoordActGrp->addAction( act ); colorCoordMenu->addAction( act ); act = new QAction( tr( "Yx&y" ), this ); act->setCheckable(true); act->setData(CC_Yxy); act->setShortcut( Qt::SHIFT + Qt::ALT + Qt::Key_Y ); colorCoordActGrp->addAction( act ); colorCoordMenu->addAction( act ); connect( colorCoordActGrp, SIGNAL(triggered(QAction*)), this, SLOT(setColorCoord(QAction*)) ); QMenu *mappingMenu = menuBar()->addMenu( tr( "&Tone mapping" ) ); mappingMenu->addAction( "Increase exposure", lumRange, SLOT(increaseExposure()), Qt::Key_Minus ); mappingMenu->addAction( "Decrease exposure", lumRange, SLOT(decreaseExposure()), Qt::Key_Equal ); mappingMenu->addAction( "Extend dynamic range", lumRange, SLOT(extendRange()), Qt::Key_BracketRight ); mappingMenu->addAction( "Shrink dynamic range", lumRange, SLOT(shrinkRange()), Qt::Key_BracketLeft ); mappingMenu->addAction( "Fit to dynamic range", lumRange, SLOT(fitToDynamicRange()), Qt::Key_Backslash ); mappingMenu->addAction( "Low dynamic range", lumRange, SLOT(lowDynamicRange()), Qt::ALT + Qt::Key_L ); QMenu *mapfuncMenu = mappingMenu->addMenu( "&Mapping function" ); QActionGroup *mapfuncActGrp = new QActionGroup( this ); mapfuncActGrp->setExclusive( true ); mappingAct[0] = act = new QAction( tr( "&Linear" ), this ); act->setCheckable(true); act->setData(0); act->setShortcut( Qt::Key_L ); mapfuncActGrp->addAction( act ); mapfuncMenu->addAction( act ); mappingAct[1] = act = new QAction( tr( "Gamma 1.&4" ), this ); act->setCheckable(true); act->setData(1); act->setShortcut( Qt::Key_1 ); mapfuncActGrp->addAction( act ); mapfuncMenu->addAction( act ); mappingAct[2] = act = new QAction( tr( "Gamma 1.&8" ), this ); act->setCheckable(true); act->setData(2); act->setShortcut( Qt::Key_2 ); mapfuncActGrp->addAction( act ); mapfuncMenu->addAction( act ); mappingAct[3] = act = new QAction( tr( "Gamma 2.&2" ), this ); act->setCheckable(true); act->setData(3); act->setChecked( true ); act->setShortcut( Qt::Key_3 ); mapfuncActGrp->addAction( act ); mapfuncMenu->addAction( act ); mappingAct[4] = act = new QAction( tr( "Gamma 2.&6" ), this ); act->setCheckable(true); act->setData(4); act->setShortcut( Qt::Key_4 ); mapfuncActGrp->addAction( act ); mapfuncMenu->addAction( act ); mappingAct[5] = act = new QAction( tr( "L&ogarithmic" ), this ); act->setCheckable(true); act->setData(5); act->setShortcut( Qt::Key_O ); mapfuncActGrp->addAction( act ); mapfuncMenu->addAction( act ); connect( mapfuncActGrp, SIGNAL(triggered(QAction*)), this, SLOT(setLumMappingMethod(QAction*)) ); QMenu *helpMenu = menuBar()->addMenu( tr( "&Help" ) ); helpMenu->addAction( "&About", this, SLOT(showAboutdialog()) ); colorCoord = CC_RGB; //Window should not be larger than desktop // TODO: how to find desktop size - gnome taksbars // setMaximumSize( QApplication::desktop()->width(), QApplication::desktop()->height() ); try { if( !readNextFrame() ) throw PFSViewException(); if( window_min < window_max ) lumRange->setRangeWindowMinMax( window_min, window_max ); } catch( pfs::Exception ex ) { QMessageBox::critical( this, "pfsview error", ex.getMessage() ); throw PFSViewException(); } }
VideoStream::VideoStream(const std::string& filename, unsigned int numFrameBuffered, GLenum minFilter, GLenum magFilter, GLenum sWrapping, GLenum tWrapping, int maxLevel) : __ReadOnly_ComponentLayout(declareLayout(numFrameBuffered)), InputDevice(declareLayout(numFrameBuffered), "Reader"), idVideoStream(0), readFrameCount(0), timeStampFrameRate(1.0f), timeStampOffset(0), timeStampOfLastFrameRead(0), endReached(false), pFormatCtx(NULL), pCodecCtx(NULL), pCodec(NULL), pFrame(NULL), pFrameRGB(NULL), buffer(NULL), pSWSCtx(NULL), idCurrentBufferForWritting(0) { #ifdef __USE_PBO__ #ifdef __VIDEO_STREAM_VERBOSE__ std::cout << "VideoStream::VideoStream - Using PBO for uploading data to the GPU." << std::endl; #endif pbo = NULL; #else #ifdef __VIDEO_STREAM_VERBOSE__ std::cout << "VideoStream::VideoStream - Using standard method HdlTexture::write for uploading data to the GPU." << std::endl; #endif #endif int retCode = 0; // Open stream : //DEPRECATED IN libavformat : retCode = av_open_input_file(&pFormatCtx, filename.c_str(), NULL, 0, NULL)!=0); retCode = avformat_open_input(&pFormatCtx, filename.c_str(), NULL, NULL); if(retCode!=0) throw Exception("VideoStream::VideoStream - Failed to open stream (at av_open_input_file).", __FILE__, __LINE__); // Find stream information : //DEPRECATED : retCode = av_find_stream_info(pFormatCtx); retCode = avformat_find_stream_info(pFormatCtx, NULL); if(retCode<0) throw Exception("VideoStream::VideoStream - Failed to open stream (at av_find_stream_info).", __FILE__, __LINE__); // Walk through pFormatCtx->nb_streams to find a/the first video stream : for(idVideoStream=0; idVideoStream<pFormatCtx->nb_streams; idVideoStream++) //DEPRECATED : if(pFormatCtx->streams[idVideoStream]->codec->codec_type==CODEC_TYPE_VIDEO) if(pFormatCtx->streams[idVideoStream]->codec->codec_type==AVMEDIA_TYPE_VIDEO) break; if(idVideoStream>=pFormatCtx->nb_streams) throw Exception("VideoStream::VideoStream - Failed to find video stream (at streams[idVideoStream]->codec->codec_type==CODEC_TYPE_VIDEO).", __FILE__, __LINE__); // Get a pointer to the codec context for the video stream : pCodecCtx = pFormatCtx->streams[idVideoStream]->codec; // Find the decoder for the video stream : pCodec = avcodec_find_decoder(pCodecCtx->codec_id); if(pCodec==NULL) throw Exception("VideoStream::VideoStream - No suitable codec found (at avcodec_find_decoder).", __FILE__, __LINE__); // Open codec : //DEPRECATED : retCode = avcodec_open(pCodecCtx, pCodec); retCode = avcodec_open2(pCodecCtx, pCodec, NULL); if(retCode<0) throw Exception("VideoStream::VideoStream - Could not open codec (at avcodec_open).", __FILE__, __LINE__); // Get the framerate : /*float timeUnit_sec = static_cast<float>(pCodecCtx->time_base.num)/static_cast<float>(pCodecCtx->time_base.den); frameRate = 1.0f/(pCodecCtx->timeUnit_sec;*/ timeStampFrameRate = static_cast<float>(pFormatCtx->streams[idVideoStream]->time_base.den)/static_cast<float>(pFormatCtx->streams[idVideoStream]->time_base.num); // Get the duration : duration_sec = pFormatCtx->duration / AV_TIME_BASE; #ifdef __VIDEO_STREAM_VERBOSE__ std::cout << "VideoStream::VideoStream" << std::endl; std::cout << " - Frame rate : " << timeStampFrameRate << " frames per second (for time stamps)" << std::endl; std::cout << " - Duration : " << duration_sec << " second(s)" << std::endl; #endif // Allocate video frame : pFrame = avcodec_alloc_frame(); // Allocate an AVFrame structure : pFrameRGB = avcodec_alloc_frame(); if(pFrameRGB==NULL) throw Exception("VideoStream::VideoStream - Failed to open stream (at avcodec_alloc_frame).", __FILE__, __LINE__); // Determine required buffer size and allocate buffer : bufferSizeBytes = avpicture_get_size(PIX_FMT_RGB24, pCodecCtx->width, pCodecCtx->height); buffer = (uint8_t *)av_malloc(bufferSizeBytes*sizeof(uint8_t)); #ifdef __VIDEO_STREAM_VERBOSE__ std::cout << "VideoStream::VideoStream - Frame size : " << pCodecCtx->width << "x" << pCodecCtx->height << std::endl; #endif if(buffer==NULL) throw Exception("VideoStream::VideoStream - Unable to allocate video frame buffer.", __FILE__, __LINE__); // Assign appropriate parts of buffer to image planes in pFrameRGB (Note that pFrameRGB is an AVFrame, but AVFrame is a superset of AVPicture) : avpicture_fill( (AVPicture *)pFrameRGB, buffer, PIX_FMT_RGB24, pCodecCtx->width, pCodecCtx->height); // Initialize libswscale : pSWSCtx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height, PIX_FMT_RGB24, SWS_POINT, NULL, NULL, NULL); // Create format : HdlTextureFormat frameFormat(pCodecCtx->width, pCodecCtx->height, GL_RGB, GL_UNSIGNED_BYTE, minFilter, magFilter, sWrapping, tWrapping, 0, maxLevel); // Create the texture : for(unsigned int i=0; i<numFrameBuffered; i++) { //old : addOutputPort("output" + to_string(i)); textureBuffers.push_back( new HdlTexture(frameFormat) ); // YOU MUST WRITE ONCE IN THE TEXTURE BEFORE USING PBO::copyToTexture ON IT. // We are also doing this to prevent reading from an empty (not-yet-allocated) texture. textureBuffers.back()->fill(0); // Set links : setTextureLink(textureBuffers.back(), i); } #ifdef __USE_PBO__ // Create PBO for uplodaing data to GPU : pbo = new HdlPBO(frameFormat, GL_PIXEL_UNPACK_BUFFER_ARB,GL_STREAM_DRAW_ARB); #endif // Finish by forcing read of first frame : readNextFrame(); }