Example #1
0
bool TheoraDecoder::TheoraVideoTrack::decodePacket(ogg_packet &oggPacket) {
	if (th_decode_packetin(_theoraDecode, &oggPacket, 0) == 0) {
		_curFrame++;

		// Convert YUV data to RGB data
		th_ycbcr_buffer yuv;
		th_decode_ycbcr_out(_theoraDecode, yuv);
		translateYUVtoRGBA(yuv);

		double time = th_granule_time(_theoraDecode, oggPacket.granulepos);

		// We need to calculate when the next frame should be shown
		// This is all in floating point because that's what the Ogg code gives us
		// Ogg is a lossy container format, so it doesn't always list the time to the
		// next frame. In such cases, we need to calculate it ourselves.
		if (time == -1.0)
			_nextFrameStartTime += _frameRate.getInverse().toDouble();
		else
			_nextFrameStartTime = time;

		return true;
	}

	return false;
}
Example #2
0
const Graphics::Surface *TheoraDecoder::decodeNextFrame() {
	// First, let's get our frame
	while (_theoraPacket) {
		// theora is one in, one out...
		if (ogg_stream_packetout(&_theoraOut, &_oggPacket) > 0) {

			if (_ppInc) {
				_ppLevel += _ppInc;
				th_decode_ctl(_theoraDecode, TH_DECCTL_SET_PPLEVEL, &_ppLevel, sizeof(_ppLevel));
				_ppInc = 0;
			}

			if (th_decode_packetin(_theoraDecode, &_oggPacket, NULL) == 0) {
				_curFrame++;

				// Convert YUV data to RGB data
				th_ycbcr_buffer yuv;
				th_decode_ycbcr_out(_theoraDecode, yuv);
				translateYUVtoRGBA(yuv);

				if (_curFrame == 0)
					_startTime = g_system->getMillis();

				double time = th_granule_time(_theoraDecode, _oggPacket.granulepos);

				// We need to calculate when the next frame should be shown
				// This is all in floating point because that's what the Ogg code gives us
				// Ogg is a lossy container format, so it doesn't always list the time to the
				// next frame. In such cases, we need to calculate it ourselves.
				if (time == -1.0)
					_nextFrameStartTime += _frameRate.getInverse().toDouble();
				else
					_nextFrameStartTime = time;

				// break out
				break;
			}
		} else {
			// If we can't get any more frames, we're done.
			if (_theoraOut.e_o_s || _fileStream->eos()) {
				_endOfVideo = true;
				break;
			}

			// Queue more data
			bufferData();
			while (ogg_sync_pageout(&_oggSync, &_oggPage) > 0)
				queuePage(&_oggPage);
		}

		// Update audio if we can
		queueAudio();
	}

	// Force at least some audio to be buffered
	// TODO: 5 is very arbitrary. We probably should do something like QuickTime does.
	while (!_endOfAudio && _audStream->numQueuedStreams() < 5) {
		bufferData();
		while (ogg_sync_pageout(&_oggSync, &_oggPage) > 0)
			queuePage(&_oggPage);

		bool queuedAudio = queueAudio();
		if ((_vorbisOut.e_o_s  || _fileStream->eos()) && !queuedAudio) {
			_endOfAudio = true;
			break;
		}
	}

	return &_displaySurface;
}