std::string MovieDecoder::createScaleString(int size, bool maintainAspectRatio) { std::stringstream scale; if (!maintainAspectRatio) { scale << "w=" << size << ":h=" << size; } else { auto width = GetVideoWidth(); auto height = GetVideoHeight(); AVRational par = av_guess_sample_aspect_ratio(m_pFormatContext, m_pVideoStream, m_pFrame); // if the pixel aspect ratio is defined and is not 1, we have an anamorphic stream bool anamorphic = par.num != 0 && par.num != par.den; if (anamorphic) { width = width * par.num / par.den; } if (height > width) { scale << "w=-1:h=" << (size == 0 ? height : size); } else { scale << "h=-1:w=" << (size == 0 ? width : size); } } return scale.str(); }
int MovieDecoder::getWidth() { if (m_pVideoCodecContext) { return GetVideoWidth(); } return -1; }
void MovieDecoder::calculateDimensions(int squareSize, bool maintainAspectRatio, int& destWidth, int& destHeight) { if (squareSize == 0) { // use original video size squareSize = max(GetVideoWidth(), GetVideoHeigth()); } if (!maintainAspectRatio) { destWidth = squareSize; destHeight = squareSize; } else { int srcWidth = GetVideoWidth(); int srcHeight = GetVideoHeigth(); int ascpectNominator = m_pVideoCodecContext->sample_aspect_ratio.num; int ascpectDenominator = m_pVideoCodecContext->sample_aspect_ratio.den; if (ascpectNominator != 0 && ascpectDenominator != 0) { srcWidth = srcWidth * ascpectNominator / ascpectDenominator; } if (srcWidth > srcHeight) { destWidth = squareSize; destHeight = static_cast<int>(static_cast<float>(squareSize) / srcWidth * srcHeight); } else { destWidth = static_cast<int>(static_cast<float>(squareSize) / srcHeight * srcWidth); destHeight = squareSize; } } }
gameLevel * setLevel(gameLevel *gl) { gl->xview=0; gl->yview=0; gl->view.buf=GetVideo(); gl->view.resx=GetVideoWidth(); gl->view.resy=GetVideoHeight(); gl->idxLoadActor=0; gl->stateGame=0; cloneImage(&gl->view,&gl->frame); runStateGame=0; flgRun=1; return gl; }
void MovieDecoder::getScaledVideoFrame(int scaledSize, bool maintainAspectRatio, VideoFrame& videoFrame) { if (m_pFrame->interlaced_frame) { avpicture_deinterlace((AVPicture*) m_pFrame, (AVPicture*) m_pFrame, m_pVideoCodecContext->pix_fmt, GetVideoWidth(), GetVideoHeigth()); } int scaledWidth, scaledHeight; convertAndScaleFrame(PIX_FMT_RGBA, scaledSize, maintainAspectRatio, scaledWidth, scaledHeight); videoFrame.width = scaledWidth; videoFrame.height = scaledHeight; videoFrame.lineSize = m_pFrame->linesize[0]; if(videoFrame.frameData != nullptr) delete videoFrame.frameData; videoFrame.frameData = new uint8_t[videoFrame.lineSize * videoFrame.height]; memcpy(videoFrame.frameData, m_pFrame->data[0], videoFrame.lineSize * videoFrame.height); }
void MovieDecoder::convertAndScaleFrame(PixelFormat format, int scaledSize, bool maintainAspectRatio, int& scaledWidth, int& scaledHeight) { calculateDimensions(scaledSize, maintainAspectRatio, scaledWidth, scaledHeight); #ifdef LATEST_GREATEST_FFMPEG // Enable this when it hits the released ffmpeg version SwsContext* scaleContext = sws_alloc_context(); if (scaleContext == nullptr) { throw std::logic_error("Failed to allocate scale context"); } av_opt_set_int(scaleContext, "srcw", GetVideoWidth(),0); av_opt_set_int(scaleContext, "srch", GetVideoHeigth(),0); av_opt_set_int(scaleContext, "src_format", m_pVideoCodecContext->pix_fmt,0); av_opt_set_int(scaleContext, "dstw", scaledWidth,0); av_opt_set_int(scaleContext, "dsth", scaledHeight,0); av_opt_set_int(scaleContext, "dst_format", format,0); av_opt_set_int(scaleContext, "sws_flags", SWS_BICUBIC,0); const int* coeff = sws_getCoefficients(SWS_CS_DEFAULT); if (sws_setColorspaceDetails(scaleContext, coeff, m_pVideoCodecContext->pix_fmt, coeff, format, 0, 1<<16, 1<<16) < 0) { sws_freeContext(scaleContext); throw std::logic_error("Failed to set colorspace details"); } if (sws_init_context(scaleContext, nullptr, nullptr) < 0) { sws_freeContext(scaleContext); throw std::logic_error("Failed to initialise scale context"); } #else SwsContext* scaleContext = sws_getContext(GetVideoWidth(), GetVideoHeigth(), m_pVideoCodecContext->pix_fmt, scaledWidth, scaledHeight, format, SWS_BICUBIC, nullptr, nullptr, nullptr); #endif if (nullptr == scaleContext) { throw logic_error("Failed to create resize context"); } AVFrame* convertedFrame = nullptr; uint8_t* convertedFrameBuffer = nullptr; createAVFrame(&convertedFrame, &convertedFrameBuffer, scaledWidth, scaledHeight, format); sws_scale(scaleContext, m_pFrame->data, m_pFrame->linesize, 0, GetVideoHeigth(), convertedFrame->data, convertedFrame->linesize); sws_freeContext(scaleContext); av_free(m_pFrame); av_free(m_pFrameBuffer); m_pFrame = convertedFrame; m_pFrameBuffer = convertedFrameBuffer; }
void MovieDecoder::initializeFilterGraph(const AVRational& timeBase, int size, bool maintainAspectRatio) { static const AVPixelFormat pixelFormats[] = { AV_PIX_FMT_BGRA, AV_PIX_FMT_NONE }; auto del = [] (AVBufferSinkParams* p) { av_freep(p); }; std::unique_ptr<AVBufferSinkParams, decltype(del)> buffersinkParams(av_buffersink_params_alloc(), del); m_pFilterGraph = avfilter_graph_alloc(); assert(m_pFilterGraph); std::stringstream ss; ss << "video_size=" << GetVideoWidth() << "x" << GetVideoHeight() << ":pix_fmt=" << m_pVideoCodecContext->pix_fmt << ":time_base=" << timeBase.num << "/" << timeBase.den << ":pixel_aspect=" << m_pVideoCodecContext->sample_aspect_ratio.num << "/" << FFMAX(m_pVideoCodecContext->sample_aspect_ratio.den, 1); checkRc(avfilter_graph_create_filter(&m_pFilterSource, avfilter_get_by_name("buffer"), "thumb_buffer", ss.str().c_str(), nullptr, m_pFilterGraph), "Failed to create filter source"); buffersinkParams->pixel_fmts = pixelFormats; checkRc(avfilter_graph_create_filter(&m_pFilterSink, avfilter_get_by_name("buffersink"), "thumb_buffersink", nullptr, buffersinkParams.get(), m_pFilterGraph), "Failed to create filter sink"); buffersinkParams.release(); AVFilterContext* yadifFilter = nullptr; if (m_pFrame->interlaced_frame != 0) { checkRc(avfilter_graph_create_filter(&yadifFilter, avfilter_get_by_name("yadif"), "thumb_deint", "deint=1", nullptr, m_pFilterGraph), "Failed to create deinterlace filter"); } AVFilterContext* scaleFilter = nullptr; checkRc(avfilter_graph_create_filter(&scaleFilter, avfilter_get_by_name("scale"), "thumb_scale", createScaleString(size, maintainAspectRatio).c_str(), nullptr, m_pFilterGraph), "Failed to create scale filter"); AVFilterContext* formatFilter = nullptr; checkRc(avfilter_graph_create_filter(&formatFilter, avfilter_get_by_name("format"), "thumb_format", "pix_fmts=bgra", nullptr, m_pFilterGraph), "Failed to create format filter"); AVFilterContext* rotateFilter = nullptr; auto rotation = getStreamRotation(); if (rotation != -1) { checkRc(avfilter_graph_create_filter(&rotateFilter, avfilter_get_by_name("transpose"), "thumb_rotate", to_string(rotation).c_str(), nullptr, m_pFilterGraph), "Failed to create rotate filter"); } checkRc(avfilter_link(rotateFilter ? rotateFilter : formatFilter, 0, m_pFilterSink, 0), "Failed to link final filter"); if (rotateFilter) { checkRc(avfilter_link(formatFilter, 0, rotateFilter, 0), "Failed to link format filter"); } checkRc(avfilter_link(scaleFilter, 0, formatFilter, 0), "Failed to link scale filter"); if (yadifFilter) { checkRc(avfilter_link(yadifFilter, 0, scaleFilter, 0), "Failed to link yadif filter"); } checkRc(avfilter_link(m_pFilterSource, 0, yadifFilter ? yadifFilter : scaleFilter, 0), "Failed to link source filter"); checkRc(avfilter_graph_config(m_pFilterGraph, nullptr), "Failed to configure filter graph"); }