void VPXDecoder::decodeFrame(Graphics::Surface &surface, Common::SeekableReadStream &dataStream) { if (!_initialized) return; // Read all the data from the stream Common::ScopedArray<byte> data(new byte[dataStream.size()]); dataStream.read(data.get(), dataStream.size()); // Perform the actual decode vpx_codec_err_t result = vpx_codec_decode(&_context, data.get(), dataStream.size(), 0, 0); if (result != VPX_CODEC_OK) return; // Try to get the image vpx_codec_iter_t iter = 0; vpx_image_t *image = vpx_codec_get_frame(&_context, &iter); if (!image) return; // Figure out the color range Graphics::YUVToRGBManager::LuminanceScale scale; switch (image->range) { case VPX_CR_STUDIO_RANGE: scale = Graphics::YUVToRGBManager::kScaleITU; break; case VPX_CR_FULL_RANGE: scale = Graphics::YUVToRGBManager::kScaleFull; break; default: return; } // If we don't have it already, create our local surface if (!_surface) _surface.reset(new Graphics::Surface(image->w, image->h)); // Do the conversion based on the color space switch (image->fmt) { case VPX_IMG_FMT_I420: YUVToRGBMan.convert420(scale, _surface->getData(), _surface->getPitch(), image->planes[0], image->planes[1], image->planes[2], image->w, image->h, image->stride[0], image->stride[1]); break; default: return; } // Copy the subarea into the surface for (int y = 0; y < surface.getHeight(); y++) memcpy(surface.getData() + y * surface.getPitch(), _surface->getData() + (y * image->d_h) * image->d_w * 4, image->d_w * 4); }
void H263Codec::decodeFrame(Graphics::Surface &surface, Common::SeekableReadStream &dataStream) { // NOTE: When asking libxvidcore to decode the video into BGRA, it fills the alpha // values with 0x00, rendering the output invisible (!). // Since we, surprise, actually want to see the video, we would have to pass // over the whole video data and fix-up the alpha values ourselves. Or // alternatively do the YUV->BGRA conversion ourselves. We chose the latter. int dataSize = dataStream.size(); byte *data = new byte[dataSize]; dataStream.read(data, dataSize); xvid_dec_frame_t xvid_dec_frame; memset(&xvid_dec_frame, 0, sizeof(xvid_dec_frame_t)); xvid_dec_frame.version = XVID_VERSION; xvid_dec_frame.general = XVID_DEBLOCKY | XVID_DEBLOCKUV | XVID_DERINGY | XVID_DERINGUV; xvid_dec_frame.bitstream = data; xvid_dec_frame.length = dataSize; xvid_dec_frame.output.csp = XVID_CSP_INTERNAL; xvid_dec_stats_t xvid_dec_stats; memset(&xvid_dec_stats, 0, sizeof(xvid_dec_stats_t)); xvid_dec_stats.version = XVID_VERSION; int c = xvid_decore(_decHandle, XVID_DEC_DECODE, &xvid_dec_frame, &xvid_dec_stats); if ((dataSize - c) > 1) warning("H263Codec::decodeFrame(): %d bytes left in frame", dataSize - c); delete[] data; if (xvid_dec_frame.output.plane[0] && xvid_dec_frame.output.plane[1] && xvid_dec_frame.output.plane[2]) YUVToRGBMan.convert420(Graphics::YUVToRGBManager::kScaleFull, surface.getData(), surface.getWidth() * 4, (const byte *) xvid_dec_frame.output.plane[0], (const byte *) xvid_dec_frame.output.plane[1], (const byte *) xvid_dec_frame.output.plane[2], _width, _height, xvid_dec_frame.output.stride[0], xvid_dec_frame.output.stride[1]); }