void GStreamerWrapper::stop() { if ( m_GstPipeline != NULL ) { // "Hack" for stopping ... gst_element_set_state( m_GstPipeline, GST_STATE_PAUSED ); if ( m_PlayDirection == FORWARD ) seekFrame( 0 ); else if ( m_PlayDirection == BACKWARD ) seekFrame( m_iDurationInNs ); m_CurrentPlayState = STOPPED; } }
//============================================================================== bool cVideo::getFrame(int a_index, cImage &a_image) { // find frame at requested index bool newFrame = seekFrame(a_index); // check if image needs to be initialized if (!a_image.isInitialized() || a_image.getWidth() != m_width || a_image.getHeight() != m_height || a_image.getFormat() != GL_RGB || a_image.getType() != GL_UNSIGNED_BYTE) { a_image.allocate(m_width, m_height, GL_RGB, GL_UNSIGNED_BYTE); if (a_image.setProperties(m_width, m_height, GL_RGB, GL_UNSIGNED_BYTE) == false) { return false; } } // copy the frame buffer to the image if (!memcpy (a_image.getData(), m_data, 3*m_width*m_height)) { return false; } return newFrame; }
/********************************* * RSAT_decoder_init: * Decoder initialization routine. */ bin_state *RSAT_decoder_init(char *inN,char *outN,readPulseFunc *reader) { bin_state *s=new_bin_state(); RSAT_frame aux_frame; asfPrintStatus(" Initializing RSAT decoder...\n"); *reader=RSAT_readNextPulse; RSAT_init(s); openBinary(s,inN); /*Seek to first valid auxiliary data record.*/ while (RSAT_readNextFrame(s,&aux_frame)->is_aux!=1 || 0==RSAT_auxIsImaging(&aux_frame.aux)) {} /*Update satellite parameters based on auxiliary data record.*/ RSAT_auxUpdate(&aux_frame.aux,s); /*Write pulse replica.*/ RSAT_writeReplica(s,outN,1.0); seekFrame(s,0); return s; }
/** \brief Seek to millisecond **/ bool DecodeThread::seekMs(int tsms) { //printf("**** SEEK TO ms %d. LLT: %d. LT: %d. LLF: %d. LF: %d. LastFrameOk: %d\n",tsms,LastLastFrameTime,LastFrameTime,LastLastFrameNumber,LastFrameNumber,(int)LastFrameOk); cout << "tsms" << tsms << endl; // Convert time into frame number qint64 DesiredFrameNumber = av_rescale(tsms,pFormatCtx->streams[videoStream]->time_base.den,pFormatCtx->streams[videoStream]->time_base.num); DesiredFrameNumber/=1000; return seekFrame(DesiredFrameNumber); }
void TimeLine::goToNextKeyframe() { ///runs only in the main thread assert( QThread::currentThread() == qApp->thread() ); _keyframes.sort(); std::list<SequenceTime>::iterator upperBound = std::upper_bound(_keyframes.begin(), _keyframes.end(), _currentFrame); if ( upperBound != _keyframes.end() ) { seekFrame(*upperBound,NULL,Natron::PLAYBACK_SEEK); } }
void VideoDecoder::seekFile(int ms) { qDebug() << "seekFile();"; // avformat_seek_file(formatContext, audioStream,INT64_MIN, ms, INT64_MAX, 0 ); if(!init) return; int64_t desiredFrameNumber = av_rescale(ms, videoFormatContext->streams[videoStream]->time_base.den, videoFormatContext->streams[videoStream]->time_base.num); desiredFrameNumber/=1000; seekFrame(desiredFrameNumber); baseTime = 0; }
void GStreamerWrapper::setPosition( float fPos ) { if( fPos < 0.0 ) fPos = 0.0; else if( fPos > 1.0 ) fPos = 1.0; m_dCurrentTimeInMs = fPos * m_dCurrentTimeInMs; m_iCurrentFrameNumber = fPos * m_iNumberOfFrames; m_iCurrentTimeInNs = fPos * m_iDurationInNs; seekFrame( m_iCurrentTimeInNs ); }
/** \brief Seek to millisecond **/ bool QVideoDecoder::seekMs(int tsms) { if(!ok) return false; //printf("**** SEEK TO ms %d. LLT: %d. LT: %d. LLF: %d. LF: %d. LastFrameOk: %d\n",tsms,LastLastFrameTime,LastFrameTime,LastLastFrameNumber,LastFrameNumber,(int)LastFrameOk); // Convert time into frame number DesiredFrameNumber = ffmpeg::av_rescale(tsms,pFormatCtx->streams[videoStream]->time_base.den,pFormatCtx->streams[videoStream]->time_base.num); DesiredFrameNumber/=1000; return seekFrame(DesiredFrameNumber); }
void seek(int nDiff) { if (!isPlayerOn()) { displayMessage("Seeking is only supported in playback mode!"); return; } seekFrame(nDiff); // now step the last one (that way, if seek is not supported, as in sensor, at least one frame // will be read). g_bPause = false; g_bStep = true; }
//============================================================================== bool cVideo::getFramePointer(int a_index, cImage &a_image) { // find frame at requested index bool newFrame = seekFrame(a_index); // set image data to point to the actual frame buffer a_image.setData(m_data, 3*m_width*m_height, false); // set image properties if (a_image.setProperties(m_width, m_height, GL_RGB, GL_UNSIGNED_BYTE) == false) { return false; } return newFrame; }
//============================================================================== bool cVideo::seek(double a_time) { return seekFrame((int)(a_time*m_fps)); }
void GStreamerWrapper::setTimePositionInNs( gint64 iTargetTimeInNs ) { m_iCurrentTimeInNs = iTargetTimeInNs; seekFrame( m_iCurrentTimeInNs ); }
void GStreamerWrapper::setTimePositionInMs( double dTargetTimeInMs ) { m_dCurrentTimeInMs = dTargetTimeInMs; seekFrame( m_dCurrentTimeInMs * 1000000 ); }