double FFmpegClocks::getCurrentTime() { if(!m_paused) m_last_current_time = getAudioTime(); return m_last_current_time; }
double FFmpegClocks::videoRefreshSchedule(const double pts) { ScopedLock lock(m_mutex); // DEBUG // std::cerr << "ftime / dpts / delay / audio_time / adelay: "; double delay = pts - m_last_frame_pts; // std::cerr << m_frame_time << " / "; // std::cerr << delay << " / "; // If incorrect delay, use previous one if (delay <= 0.0 || delay >= 1.0) { delay = m_last_frame_delay; if (!m_audio_disabled) m_frame_time = pts - delay; } // Save for next time m_last_frame_delay = delay; m_last_frame_pts = pts; // Update the delay to synch to the audio stream // Ideally the frame time should be incremented after the actual delay is computed. // But because of the sound latency, it seems better to keep some latency in the video too. m_frame_time += delay; const double audio_time = getAudioTime(); const double actual_delay = clamp(m_frame_time - audio_time, -0.5 * delay, 2.5 * delay); // m_frame_time += delay; // DEBUG // std::cerr << delay << " / "; // std::cerr << audio_time << " / "; // std::cerr << actual_delay << std::endl; m_last_actual_delay = actual_delay; return actual_delay; }
void FFmpegClocks::setSeekTime(double seek_time) { m_seek_time += getAudioTime() - seek_time; }
int AVIWrapper::playVideo(void* userdata) { int i; struct { bool valid; CImage* image; double time; } cache[NUM_CACHES]; for (i = 0; i < NUM_CACHES; i++) { cache[i].valid = false; } int remaining_cache = NUM_CACHES; while (status == AVI_PLAYING && !v_stream->Eof()) { CImage* image = v_stream->GetFrame(true); if (image == NULL) break; double current_time = v_stream->GetTime(); double minimum_time = current_time; // look for the nearest in the cache int nearest_cache = -1; for (i = 0; i < NUM_CACHES; i++) { if (cache[i].valid) { if (nearest_cache == -1 || nearest_cache >= 0 && cache[i].time < cache[nearest_cache].time) { nearest_cache = i; if (minimum_time > cache[nearest_cache].time) minimum_time = cache[nearest_cache].time; } } } double async = getAudioTime() - minimum_time; //printf("audio %f (%f - %f) minimum %d %f cur %f\n", async, getAudioTime(), minimum_time, nearest_cache, minimum_time, current_time ); if (async < -0.01) { if (remaining_cache == 0) { //printf("sync0 %f %f %f %f\n", async, (a_stream)?a_stream->GetTime():0.0, v_stream->GetTime(), minimum_time ); SDL_Delay((int) (-async * 1000)); } } if (async < -0.01 && remaining_cache > 0 || nearest_cache >= 0) { // add cache for (i = 0; i < NUM_CACHES; i++) { if (cache[i].valid == false) { cache[i].valid = true; cache[i].image = new CImage(image); cache[i].time = current_time; remaining_cache--; break; } } if (async < -0.01) { image->Release(); continue; } } if (nearest_cache >= 0 && minimum_time == cache[nearest_cache].time) { //printf("draw cache %d %f\n", nearest_cache, cache[nearest_cache].time ); //if ( async <= 0.033 ) // drop frame if necessary drawFrame(cache[nearest_cache].image); cache[nearest_cache].image->Release(); cache[nearest_cache].valid = false; remaining_cache++; } else { //printf("draw real %f\n", current_time ); //if ( async <= 0.033 ) // drop frame if necessary drawFrame(image); } image->Release(); } status = AVI_STOP; for (i = 0; i < NUM_CACHES; i++) if (cache[i].valid) cache[i].image->Release(); return 0; }