wxImage wxFfmpegMediaDecoder::GetNextFrame() { if (!m_frame && !BeginDecode()) return wxImage(); int frameFinished; AVPacket packet; while (av_read_frame(m_formatCtx, &packet) >=0) { // is this a packet from the video stream? if (packet.stream_index == m_videoStream) { // decode video frame avcodec_decode_video2(m_codecCtx, m_frame, &frameFinished, &packet); if (frameFinished) { SwsContext* imgConvertCtx = sws_getContext(m_codecCtx->width, m_codecCtx->height, m_codecCtx->pix_fmt, m_width, m_height, AV_PIX_FMT_RGB24, SWS_BICUBIC, NULL, NULL, NULL); if (imgConvertCtx == NULL) { av_packet_unref(&packet); return wxImage(); } wxImage img(m_width, m_height); uint8_t *rgbSrc[3] = { img.GetData(), NULL, NULL }; int rgbStride[3] = { 3 * m_width, 0, 0 }; sws_scale(imgConvertCtx, m_frame->data, m_frame->linesize, 0, m_codecCtx->height, rgbSrc, rgbStride); av_packet_unref(&packet); sws_freeContext(imgConvertCtx); return img; } } // free the packet that was allocated by av_read_frame av_packet_unref(&packet); } return wxImage(); }
inline unsigned const * IlbmBodyChunk::DecodeFirstRow() const { if (BeginDecode()) return DecodeNextRow(); else return NULL; }
bool wxFfmpegMediaDecoder::SetPosition(double pos, bool keyFrame) { if (m_formatCtx == NULL) return false; if (!m_frame && !BeginDecode()) return false; int64_t timestamp = (int64_t) (pos * AV_TIME_BASE); if (m_formatCtx->start_time != (int64_t)AV_NOPTS_VALUE) timestamp += m_formatCtx->start_time; avcodec_flush_buffers(m_codecCtx); bool res = av_seek_frame(m_formatCtx, -1, timestamp, keyFrame ? AVSEEK_FLAG_BACKWARD : AVSEEK_FLAG_ANY|AVSEEK_FLAG_BACKWARD) >= 0; avcodec_flush_buffers(m_codecCtx); return res; }