Beispiel #1
0
void SeekableBinkDecoder::skipNextFrame() {
	if (endOfVideo())
		return;

	VideoFrame &frame = _frames[_curFrame + 1];

	if (!_bink->seek(frame.offset))
		error("Bad bink seek");

	uint32 frameSize = frame.size;

	for (uint32 i = 0; i < _audioTracks.size(); i++) {
		uint32 audioPacketLength = _bink->readUint32LE();

		frameSize -= 4;

		if (frameSize < audioPacketLength)
			error("Audio packet too big for the frame");

		if (audioPacketLength >= 4) {
			// Skip audio data
			_bink->seek(audioPacketLength, SEEK_CUR);

			frameSize -= audioPacketLength;
		}
	}

	uint32 videoPacketStart = _bink->pos();
	uint32 videoPacketEnd   = _bink->pos() + frameSize;

	frame.bits =
		new Common::BitStream32LELSB(new Common::SeekableSubReadStream(_bink,
		    videoPacketStart, videoPacketEnd), true);

	videoPacket(frame);

	delete frame.bits;
	frame.bits = 0;

	_curFrame++;
	if (_curFrame == 0)
		_startTime = g_system->getMillis();
}
Beispiel #2
0
const Graphics::Surface *BinkDecoder::decodeNextFrame() {
	if (endOfVideo())
		return 0;

	VideoFrame &frame = _frames[_curFrame + 1];

	if (!_bink->seek(frame.offset))
		error("Bad bink seek");

	uint32 frameSize = frame.size;

	for (uint32 i = 0; i < _audioTracks.size(); i++) {
		AudioTrack &audio = _audioTracks[i];

		uint32 audioPacketLength = _bink->readUint32LE();

		frameSize -= 4;

		if (frameSize < audioPacketLength)
			error("Audio packet too big for the frame");

		if (audioPacketLength >= 4) {
			uint32 audioPacketStart = _bink->pos();
			uint32 audioPacketEnd   = _bink->pos() + audioPacketLength;

			if (i == _audioTrack) {
				// Only play one audio track

				//                  Number of samples in bytes
				audio.sampleCount = _bink->readUint32LE() / (2 * audio.channels);

				audio.bits =
					new Common::BitStream32LELSB(new Common::SeekableSubReadStream(_bink,
					    audioPacketStart + 4, audioPacketEnd), true);

				audioPacket(audio);

				delete audio.bits;
				audio.bits = 0;
			}

			_bink->seek(audioPacketEnd);

			frameSize -= audioPacketLength;
		}
	}

	uint32 videoPacketStart = _bink->pos();
	uint32 videoPacketEnd   = _bink->pos() + frameSize;

	frame.bits =
		new Common::BitStream32LELSB(new Common::SeekableSubReadStream(_bink,
		    videoPacketStart, videoPacketEnd), true);

	videoPacket(frame);

	delete frame.bits;
	frame.bits = 0;

	_curFrame++;
	if (_curFrame == 0)
		_startTime = g_system->getMillis();

	return &_surface;
}
Beispiel #3
0
void Bink::processData() {
    if (getTimeToNextFrame() > 0)
        return;

    if (_curFrame >= _frames.size()) {
        finish();
        return;
    }

    VideoFrame &frame = _frames[_curFrame];

    if (!_bink->seek(frame.offset))
        throw Common::Exception(Common::kSeekError);

    uint32 frameSize = frame.size;

    for (uint32 i = 0; i < _audioTracks.size(); i++) {
        AudioTrack &audio = _audioTracks[i];

        uint32 audioPacketLength = _bink->readUint32LE();

        frameSize -= 4;

        if (frameSize < audioPacketLength)
            throw Common::Exception("Audio packet too big for the frame");

        if (audioPacketLength >= 4) {
            uint32 audioPacketStart = _bink->pos();
            uint32 audioPacketEnd   = _bink->pos() + audioPacketLength;

            if (i == _audioTrack) {
                // Only play one audio track

                //                  Number of samples in bytes
                audio.sampleCount = _bink->readUint32LE() / (2 * audio.channels);

                audio.bits =
                    new Common::BitStream32LELSB(new Common::SeekableSubReadStream(_bink,
                                                 audioPacketStart + 4, audioPacketEnd), true);

                audioPacket(audio);

                delete audio.bits;
                audio.bits = 0;
            }

            _bink->seek(audioPacketEnd);

            frameSize -= audioPacketLength;
        }
    }

    uint32 videoPacketStart = _bink->pos();
    uint32 videoPacketEnd   = _bink->pos() + frameSize;

    frame.bits =
        new Common::BitStream32LELSB(new Common::SeekableSubReadStream(_bink,
                                     videoPacketStart, videoPacketEnd), true);

    videoPacket(frame);

    delete frame.bits;
    frame.bits = 0;

    _needCopy = true;

    _curFrame++;
}
Beispiel #4
0
const Graphics::Surface *BinkDecoder::decodeNextFrame() {
	if (endOfVideo())
		return 0;

	VideoFrame &frame = _frames[_curFrame + 1];

	if (!_bink->seek(frame.offset))
		error("Bad bink seek");

	uint32 frameSize = frame.size;

	for (uint32 i = 0; i < _audioTracks.size(); i++) {
		AudioTrack &audio = _audioTracks[i];

		uint32 audioPacketLength = _bink->readUint32LE();

		frameSize -= 4;

		if (frameSize < audioPacketLength)
			error("Audio packet too big for the frame");

		if (audioPacketLength >= 4) {
			if (i == _audioTrack) {
				// Only play one audio track

				//                  Number of samples in bytes
				audio.sampleCount = _bink->readUint32LE() / (2 * audio.channels);

				audio.bits = new Common::BitStream32LE(*_bink, (audioPacketLength - 4) * 8);

				audioPacket(audio);

				delete audio.bits;
				audio.bits = 0;

			} else
				// Skip the rest
				_bink->skip(audioPacketLength);

			frameSize -= audioPacketLength;
		}
	}

	frame.bits = new Common::BitStream32LE(*_bink, frameSize * 8);

	videoPacket(frame);

	delete frame.bits;
	frame.bits = 0;

	_curFrame++;
	if (_curFrame == 0)
		_startTime = g_system->getMillis();

	if (!_audioStarted && _audioStream) {
		_audioStarted = true;
		g_system->getMixer()->playStream(Audio::Mixer::kPlainSoundType, &_audioHandle, _audioStream);
	}

	return &_surface;
}
Beispiel #5
0
AkPacket ConvertVideo::convert(const AkPacket &packet)
{
    AkVideoPacket videoPacket(packet);

    // Convert input format.
    QString format = AkVideoCaps::pixelFormatToString(videoPacket.caps().format());
    AVPixelFormat iFormat = av_get_pix_fmt(format.toStdString().c_str());

    // Initialize rescaling context.
    this->m_scaleContext = sws_getCachedContext(this->m_scaleContext,
                                                videoPacket.caps().width(),
                                                videoPacket.caps().height(),
                                                iFormat,
                                                videoPacket.caps().width(),
                                                videoPacket.caps().height(),
                                                AV_PIX_FMT_BGRA,
                                                SWS_FAST_BILINEAR,
                                                NULL,
                                                NULL,
                                                NULL);

    if (!this->m_scaleContext)
        return AkPacket();

    // Create iPicture.
    AVFrame iFrame;
    memset(&iFrame, 0, sizeof(AVFrame));

    if (av_image_fill_arrays((uint8_t **) iFrame.data,
                         iFrame.linesize,
                         (const uint8_t *) videoPacket.buffer().constData(),
                         iFormat,
                         videoPacket.caps().width(),
                         videoPacket.caps().height(),
                         1) < 0)
        return AkPacket();

    // Create oPicture
    int frameSize = av_image_get_buffer_size(AV_PIX_FMT_BGRA,
                                             videoPacket.caps().width(),
                                             videoPacket.caps().height(),
                                             1);

    QByteArray oBuffer(frameSize, Qt::Uninitialized);
    AVFrame oFrame;
    memset(&oFrame, 0, sizeof(AVFrame));

    if (av_image_fill_arrays((uint8_t **) oFrame.data,
                         oFrame.linesize,
                         (const uint8_t *) oBuffer.constData(),
                         AV_PIX_FMT_BGRA,
                         videoPacket.caps().width(),
                         videoPacket.caps().height(),
                         1) < 0)
        return AkPacket();

    // Convert picture format
    sws_scale(this->m_scaleContext,
              iFrame.data,
              iFrame.linesize,
              0,
              videoPacket.caps().height(),
              oFrame.data,
              oFrame.linesize);

    // Create packet
    AkVideoPacket oPacket(packet);
    oPacket.caps().format() = AkVideoCaps::Format_bgra;
    oPacket.buffer() = oBuffer;

    return oPacket.toPacket();
}