void V4LFrameSource::readFrame(Frame *frame) { if (!isOpen()) throw IOException("open() must be called before readFrame()"); int bufIdx = 0; int64_t usecs; int bufSize = getFrame(&bufIdx, &usecs); int srcLineSize = PixelFormat::s_calcByteWidth(m_srcFormat, 0, m_width); int srcBytesPerPlane = getHeight() * srcLineSize; size_t nbsrc = srcBytesPerPlane * m_srcFormat.getNumPlanes(); if ((int) nbsrc != bufSize) { std::cerr << "expected size: " << nbsrc << " but got: " << bufSize << std::endl; throw IOException("got buffer of unexpected size!"); } Frame f; f.getFormat().setWidth(m_width); f.getFormat().setHeight(m_height); f.getFormat().setPixelFormat(m_srcFormat); f.setTimestamp(Time::currentTimeMicros()); f.setFrameNumber(-1); for (int i = 0; i < m_srcFormat.getNumPlanes(); i++) { uint8_t *bufp = (uint8_t *)m_buffer[bufIdx].ptr + srcBytesPerPlane * i; f.setPlane(i, Frame::Plane(bufp, srcLineSize)); } m_converter.convert(frame, f); releaseFrame(bufIdx); }
/** * End top of the stack. */ void Profiler::endFrame(const TypedValue *retval, const char *symbol, bool endMain) { if (m_stack) { // special case for main() frame that's only ended by endAllFrames() if (!endMain && m_stack->m_parent == nullptr) { return; } endFrameEx(retval, symbol); m_func_hash_counters[m_stack->m_hash_code]--; releaseFrame(); } }
bool Pipe::_cmdFrameFinish( co::ICommand& cmd ) { LB_TS_THREAD( _pipeThread ); co::ObjectICommand command( cmd ); const uint128_t frameID = command.get< uint128_t >(); const uint32_t frameNumber = command.get< uint32_t >(); LBLOG( LOG_TASKS ) << "---- TASK finish frame --- " << command << " frame " << frameNumber << " id " << frameID << std::endl; LBASSERTINFO( _impl->currentFrame >= frameNumber, "current " <<_impl->currentFrame << " finish " <<frameNumber); frameFinish( frameID, frameNumber ); LBASSERTINFO( _impl->finishedFrame >= frameNumber, "Pipe::frameFinish() did not release frame " << frameNumber ); if( _impl->unlockedFrame < frameNumber ) { LBWARN << "Finished frame was not locally unlocked, enforcing unlock" << std::endl << " unlocked " << _impl->unlockedFrame.get() << " done " << frameNumber << std::endl; releaseFrameLocal( frameNumber ); } if( _impl->finishedFrame < frameNumber ) { LBWARN << "Finished frame was not released, enforcing unlock" << std::endl; releaseFrame( frameNumber ); } _releaseViews(); const uint128_t version = commit(); if( version != co::VERSION_NONE ) send( command.getNode(), fabric::CMD_OBJECT_SYNC ); return true; }
void Node::_frameFinish( const uint128_t& frameID, const uint32_t frameNumber ) { frameFinish( frameID, frameNumber ); LBLOG( LOG_TASKS ) << "---- Finished Frame --- " << frameNumber << std::endl; if( _unlockedFrame < frameNumber ) { LBWARN << "Finished frame was not locally unlocked, enforcing unlock" << std::endl; releaseFrameLocal( frameNumber ); } if( _finishedFrame < frameNumber ) { LBWARN << "Finished frame was not released, enforcing unlock" << std::endl; releaseFrame( frameNumber ); } }
void Pipe::frameFinish( const uint128_t&, const uint32_t frameNumber ) { const Node* node = getNode(); switch( node->getIAttribute( Node::IATTR_THREAD_MODEL )) { case ASYNC: // released in frameStart break; case DRAW_SYNC: // released in frameDrawFinish break; case LOCAL_SYNC: // release releaseFrameLocal( frameNumber ); break; default: LBUNIMPLEMENTED; } // Global release releaseFrame( frameNumber ); }
void Window::frameFinish( const uint128_t&, const uint32_t frameNumber ) { releaseFrame( frameNumber ); flush(); _updateFPS(); }
void Node::frameFinish( const uint128_t&, const uint32_t frameNumber ) { releaseFrame( frameNumber ); }
// image calculation // load frame from video void VideoFFmpeg::calcImage (unsigned int texId, double ts) { if (m_status == SourcePlaying) { // get actual time double startTime = PIL_check_seconds_timer(); double actTime; // timestamp passed from audio actuators can sometimes be slightly negative if (m_isFile && ts >= -0.5) { // allow setting timestamp only when not streaming actTime = ts; if (actTime * actFrameRate() < m_lastFrame) { // user is asking to rewind, force a cache clear to make sure we will do a seek // note that this does not decrement m_repeat if ts didn't reach m_range[1] stopCache(); } } else { if (m_lastFrame == -1 && !m_isFile) m_startTime = startTime; actTime = startTime - m_startTime; } // if video has ended if (m_isFile && actTime * m_frameRate >= m_range[1]) { // in any case, this resets the cache stopCache(); // if repeats are set, decrease them if (m_repeat > 0) --m_repeat; // if video has to be replayed if (m_repeat != 0) { // reset its position actTime -= (m_range[1] - m_range[0]) / m_frameRate; m_startTime += (m_range[1] - m_range[0]) / m_frameRate; } // if video has to be stopped, stop it else { m_status = SourceStopped; return; } } // actual frame long actFrame = (m_isImage) ? m_lastFrame+1 : long(actTime * actFrameRate()); // if actual frame differs from last frame if (actFrame != m_lastFrame) { AVFrame* frame; // get image if ((frame = grabFrame(actFrame)) != NULL) { if (!m_isFile && !m_cacheStarted) { // streaming without cache: detect synchronization problem double execTime = PIL_check_seconds_timer() - startTime; if (execTime > 0.005) { // exec time is too long, it means that the function was blocking // resynchronize the stream from this time m_startTime += execTime; } } // save actual frame m_lastFrame = actFrame; // init image, if needed init(short(m_codecCtx->width), short(m_codecCtx->height)); // process image process((BYTE*)(frame->data[0])); // finished with the frame, release it so that cache can reuse it releaseFrame(frame); // in case it is an image, automatically stop reading it if (m_isImage) { m_status = SourceStopped; // close the file as we don't need it anymore release(); } } else if (m_isStreaming) { // we didn't get a frame and we are streaming, this may be due to // a delay in the network or because we are getting the frame too fast. // In the later case, shift time by a small amount to compensate for a drift m_startTime += 0.001; } } } }