Exemple #1
0
void VideoThr::run()
{
	bool skip = false, paused = false, oneFrame = false, useLastDelay = false, lastOSDListEmpty = true, maybeFlush = false;
	double tmp_time = 0.0, sync_last_pts = 0.0, frame_timer = -1.0, sync_timer = 0.0;
	QMutex emptyBufferMutex;
	VideoFrame videoFrame;
	unsigned fast = 0;
	int tmp_br = 0, frames = 0;
	canWrite = true;

	while (!br)
	{
		if (deleteFrame)
		{
			videoFrame.clear();
			frame_timer = -1.0;
			deleteFrame = false;
		}

		if (doScreenshot && !videoFrame.isEmpty())
		{
			QMetaObject::invokeMethod(this, "screenshot", Q_ARG(VideoFrame, videoFrame));
			doScreenshot = false;
		}

		const bool mustFetchNewPacket = !filters.readyRead();
		playC.vPackets.lock();
		const bool hasVPackets = playC.vPackets.canFetch();
		if (maybeFlush)
			maybeFlush = playC.endOfStream && !hasVPackets;
		if ((playC.paused && !oneFrame) || (!(maybeFlush || hasVPackets) && mustFetchNewPacket) || playC.waitForData)
		{
			if (playC.paused && !paused)
			{
				QMetaObject::invokeMethod(this, "pause");
				paused = true;
				frame_timer = -1.0;
			}
			playC.vPackets.unlock();

			tmp_br = tmp_time = frames = 0;
			skip = false;
			fast = 0;

			if (!playC.paused)
				waiting = playC.fillBufferB = true;

			emptyBufferMutex.lock();
			playC.emptyBufferCond.wait(&emptyBufferMutex, MUTEXWAIT_TIMEOUT);
			emptyBufferMutex.unlock();

			if (frame_timer != -1.0)
				frame_timer = gettime();

			continue;
		}
		paused = waiting = false;
		Packet packet;
		if (hasVPackets && mustFetchNewPacket)
			packet = playC.vPackets.fetch();
		else
			packet.ts.setInvalid();
		playC.vPackets.unlock();
		if (playC.nextFrameB)
		{
			skip = playC.nextFrameB = false;
			oneFrame = playC.paused = true;
			fast = 0;
		}
		playC.fillBufferB = true;

		/* Subtitles packet */
		Packet sPacket;
		playC.sPackets.lock();
		if (playC.sPackets.canFetch())
			sPacket = playC.sPackets.fetch();
		playC.sPackets.unlock();

		mutex.lock();
		if (br)
		{
			mutex.unlock();
			break;
		}

		/* Subtitles */
		const double subsPts = playC.frame_last_pts + playC.frame_last_delay  - playC.subtitlesSync;
		QList<const QMPlay2_OSD *> osdList, osdListToDelete;
		playC.subsMutex.lock();
		if (sDec) //Image subs (pgssub, dvdsub, ...)
		{
			if (!sDec->decodeSubtitle(sPacket, subsPts, subtitles, W, H))
			{
				osdListToDelete += subtitles;
				subtitles = NULL;
			}
		}
		else
		{
			if (!sPacket.isEmpty())
			{
				const QByteArray sPacketData = QByteArray::fromRawData((const char *)sPacket.data(), sPacket.size());
				if (playC.ass->isASS())
					playC.ass->addASSEvent(sPacketData);
				else
					playC.ass->addASSEvent(Functions::convertToASS(sPacketData), sPacket.ts, sPacket.duration);
			}
			if (!playC.ass->getASS(subtitles, subsPts))
			{
				osdListToDelete += subtitles;
				subtitles = NULL;
			}
		}
		if (subtitles)
		{
			const bool hasDuration = subtitles->duration() >= 0.0;
			if (deleteSubs || (subtitles->isStarted() && subsPts < subtitles->pts()) || (hasDuration && subsPts > subtitles->pts() + subtitles->duration()))
			{
				osdListToDelete += subtitles;
				subtitles = NULL;
			}
			else if (subsPts >= subtitles->pts())
			{
				subtitles->start();
				osdList += subtitles;
			}
		}
		playC.subsMutex.unlock();
		playC.osdMutex.lock();
		if (playC.osd)
		{
			if (deleteOSD || playC.osd->left_duration() < 0)
			{
				osdListToDelete += playC.osd;
				playC.osd = NULL;
			}
			else
				osdList += playC.osd;
		}
		playC.osdMutex.unlock();
		if ((!lastOSDListEmpty || !osdList.isEmpty()) && writer && writer->readyWrite())
		{
			((VideoWriter *)writer)->writeOSD(osdList);
			lastOSDListEmpty = osdList.isEmpty();
		}
		while (!osdListToDelete.isEmpty())
			delete osdListToDelete.takeFirst();
		deleteSubs = deleteOSD = false;
		/**/

		filtersMutex.lock();
		if (playC.flushVideo)
		{
			filters.clearBuffers();
			frame_timer = -1.0;
		}

		if (!packet.isEmpty() || maybeFlush)
		{
			VideoFrame decoded;
			const int bytes_consumed = dec->decodeVideo(packet, decoded, playC.flushVideo, skip ? ~0 : (fast >> 1));
			if (playC.flushVideo)
			{
				useLastDelay = true; //if seeking
				playC.flushVideo = false;
			}
			if (!decoded.isEmpty())
				filters.addFrame(decoded, packet.ts);
			else if (skip)
				filters.removeLastFromInputBuffer();
			tmp_br += bytes_consumed;
		}

		const bool ptsIsValid = filters.getFrame(videoFrame, packet.ts);
		filtersMutex.unlock();

		if ((maybeFlush = packet.ts.isValid()))
		{
			if (packet.sampleAspectRatio && lastSampleAspectRatio != -1.0 && fabs(lastSampleAspectRatio - packet.sampleAspectRatio) >= 0.000001) //zmiana współczynnika proporcji
			{
				lastSampleAspectRatio = -1.0; //Needs to be updated later
				emit playC.aRatioUpdate(packet.sampleAspectRatio * (double)W / (double)H); //Sets lastSampleAspectRatio because it calls processParams()
			}
			if (ptsIsValid || packet.ts > playC.pos)
				playC.chPos(packet.ts);

			double delay = packet.ts - playC.frame_last_pts;
			if (useLastDelay || delay <= 0.0 || (playC.frame_last_pts <= 0.0 && delay > playC.frame_last_delay))
			{
				delay = playC.frame_last_delay;
				useLastDelay = false;
			}

			tmp_time += delay * 1000.0;
			frames += 1000;
			if (tmp_time >= 1000.0)
			{
				emit playC.updateBitrate(-1, round((tmp_br << 3) / tmp_time), frames / tmp_time);
				frames = tmp_br = tmp_time = 0;
			}

			delay /= playC.speed;

			playC.frame_last_delay = delay;
			playC.frame_last_pts = packet.ts;

			if (playC.skipAudioFrame < 0.0)
				playC.skipAudioFrame = 0.0;

			const double true_delay = delay;

			if (syncVtoA && playC.audio_current_pts > 0.0 && !oneFrame)
			{
				double sync_pts = playC.audio_current_pts;
				if (sync_last_pts == sync_pts)
					sync_pts += gettime() - sync_timer;
				else
				{
					sync_last_pts = sync_pts;
					sync_timer = gettime();
				}

				const double diff = packet.ts - (delay + sync_pts - playC.videoSync);
				const double sync_threshold = qMax(delay, playC.audio_last_delay);
				const double max_threshold = sync_threshold < 0.1 ? 0.125 : sync_threshold * 1.5;
				const double fDiff = qAbs(diff);

				if (fast && !skip && diff > -sync_threshold / 2.0)
					fast = 0;
				skip = false;

//				qDebug() << "diff" << diff << "sync_threshold" << sync_threshold << "max_threshold" << max_threshold;
				if (fDiff > sync_threshold && fDiff < max_threshold)
				{
					if (diff < 0.0) //obraz się spóźnia
					{
						delay -= sync_threshold / 10.0;
// 						qDebug() << "speed up" << diff << delay << sync_threshold;
					}
					else if (diff > 0.0) //obraz idzie za szybko
					{
						delay += sync_threshold / 10.0;
// 						qDebug() << "slow down" << diff << delay << sync_threshold;
					}
				}
				else if (fDiff >= max_threshold)
				{
					if (diff < 0.0) //obraz się spóźnia
					{
						delay = 0.0;
						if (fast >= 7)
							skip = true;
					}
					else if (diff > 0.0) //obraz idzie za szybko
					{
						if (diff <= 0.5)
							delay *= 2.0;
						else if (!playC.skipAudioFrame)
							playC.skipAudioFrame = diff;
					}
//					qDebug() << "Skipping" << diff << skip << fast << delay;
				}
			}
			else if (playC.audio_current_pts <= 0.0 || oneFrame)
			{
				skip = false;
				fast = 0;
			}

			if (!videoFrame.isEmpty())
			{
				if (frame_timer != -1.0)
				{
					const double delay_diff = gettime() - frame_timer;
					delay -= delay_diff;
					if (syncVtoA && true_delay > 0.0 && delay_diff > true_delay)
						++fast;
					else if (fast && delay > 0.0)
					{
						if (delay > true_delay / 2.0)
							delay /= 2.0;
						if (fast & 1)
							--fast;
					}
					while (delay > 0.0 && !playC.paused && !br && !br2)
					{
						const double sleepTime = qMin(delay, 0.1);
						Functions::s_wait(sleepTime);
						delay -= sleepTime;
					}
				}
				if (!skip && canWrite)
				{
					oneFrame = canWrite = false;
					QMetaObject::invokeMethod(this, "write", Q_ARG(VideoFrame, videoFrame));
				}
				frame_timer = gettime();
			}
			else if (frame_timer != -1.0)
				frame_timer = gettime();
		}

		mutex.unlock();
	}