Пример #1
0
void MovieDecoder::convertAndScaleFrame(AVPixelFormat format, int scaledSize, bool maintainAspectRatio, int& scaledWidth, int& scaledHeight)
{
    calculateDimensions(scaledSize, maintainAspectRatio, scaledWidth, scaledHeight);
    SwsContext* scaleContext = sws_getContext(m_pVideoCodecContext->width, m_pVideoCodecContext->height,
                               m_pVideoCodecContext->pix_fmt, scaledWidth, scaledHeight,
                               format, SWS_BICUBIC, NULL, NULL, NULL);

    if (NULL == scaleContext) {
        qDebug() << "Failed to create resize context";
        return;
    }

    AVFrame* convertedFrame = NULL;
    uint8_t* convertedFrameBuffer = NULL;

    createAVFrame(&convertedFrame, &convertedFrameBuffer, scaledWidth, scaledHeight, format);

    sws_scale(scaleContext, m_pFrame->data, m_pFrame->linesize, 0, m_pVideoCodecContext->height,
              convertedFrame->data, convertedFrame->linesize);
    sws_freeContext(scaleContext);

    av_free(m_pFrame);
    av_free(m_pFrameBuffer);

    m_pFrame        = convertedFrame;
    m_pFrameBuffer  = convertedFrameBuffer;
}
Пример #2
0
void MovieDecoder::convertAndScaleFrame(PixelFormat format, int& scaledWidth, int& scaledHeight)
{
    calculateDimensions(scaledWidth, scaledHeight);

    SwsContext* scaleContext = sws_getContext(pVideoCodecContext_->width, pVideoCodecContext_->height,
                                              pVideoCodecContext_->pix_fmt, scaledWidth, scaledHeight,
                                              format, SWS_BICUBIC, 0, 0, 0);
    if (0 == scaleContext)
    {
        throw std::logic_error("Failed to create resize context");
    }

    AVFrame* convertedFrame = NULL;
    uint8_t* convertedFrameBuffer = NULL;

    createAVFrame(&convertedFrame, &convertedFrameBuffer, scaledWidth, scaledHeight, format);

    sws_scale(scaleContext, pFrame_->data, pFrame_->linesize, 0, pVideoCodecContext_->height,
              convertedFrame->data, convertedFrame->linesize);
    sws_freeContext(scaleContext);

    av_free(pFrame_);
    av_free(pFrameBuffer_);
    pFrame_ = convertedFrame;
    pFrameBuffer_  = convertedFrameBuffer;
}
Пример #3
0
void MovieDecoder::convertAndScaleFrame(PixelFormat format, int scaledSize, bool maintainAspectRatio, int& scaledWidth, int& scaledHeight)
{
    calculateDimensions(scaledSize, maintainAspectRatio, scaledWidth, scaledHeight);

#ifdef LATEST_GREATEST_FFMPEG
	// Enable this when it hits the released ffmpeg version
    SwsContext* scaleContext = sws_alloc_context();
    if (scaleContext == nullptr)
    {
		throw std::logic_error("Failed to allocate scale context");
	}
	
	av_set_int(scaleContext, "srcw", m_pVideoCodecContext->width);
    av_set_int(scaleContext, "srch", m_pVideoCodecContext->height);
    av_set_int(scaleContext, "src_format", m_pVideoCodecContext->pix_fmt);
    av_set_int(scaleContext, "dstw", scaledWidth);
    av_set_int(scaleContext, "dsth", scaledHeight);
    av_set_int(scaleContext, "dst_format", format);
	av_set_int(scaleContext, "sws_flags", SWS_BICUBIC);
	
	const int* coeff = sws_getCoefficients(SWS_CS_DEFAULT);
    if (sws_setColorspaceDetails(scaleContext, coeff, m_pVideoCodecContext->pix_fmt, coeff, format, 0, 1<<16, 1<<16) < 0)
    {
		sws_freeContext(scaleContext);
		throw std::logic_error("Failed to set colorspace details");
	}

	if (sws_init_context(scaleContext, nullptr, nullptr) < 0)
	{
		sws_freeContext(scaleContext);
		throw std::logic_error("Failed to initialise scale context");
	}
#endif
    
    SwsContext* scaleContext = sws_getContext(m_pVideoCodecContext->width, m_pVideoCodecContext->height,
                                              m_pVideoCodecContext->pix_fmt, scaledWidth, scaledHeight,
                                              format, SWS_BICUBIC, nullptr, nullptr, nullptr);

    if (nullptr == scaleContext)
    {
        throw logic_error("Failed to create resize context");
    }

    AVFrame* convertedFrame = nullptr;
    uint8_t* convertedFrameBuffer = nullptr;

    createAVFrame(&convertedFrame, &convertedFrameBuffer, scaledWidth, scaledHeight, format);
    
    sws_scale(scaleContext, m_pFrame->data, m_pFrame->linesize, 0, m_pVideoCodecContext->height,
              convertedFrame->data, convertedFrame->linesize);
    sws_freeContext(scaleContext);

    av_free(m_pFrame);
    av_free(m_pFrameBuffer);
    
    m_pFrame        = convertedFrame;
    m_pFrameBuffer  = convertedFrameBuffer;
}
Пример #4
0
IplImage *VideoStreamDecoder::decodeVideoFrame(const unsigned char *buf, int size) {
	if (size <= 1)
		return NULL;

	IplImage *image = NULL;
	int decodedFrameSize = 0;

	// use the new API instead
	AVPacket avpkt;
	av_init_packet(&avpkt);
	avpkt.data = (uint8_t *)buf;
	avpkt.size = size;
	avpkt.flags = AV_PKT_FLAG_KEY;
	int res = avcodec_decode_video2(codecContext, decodeFrame, &decodedFrameSize, &avpkt);
	//int res = avcodec_decode_video(codecContext, decodeFrame, &decodedFrameSize, buf, size);
	if (res < 0) {
		fprintf(stderr, "Can not decode buffer\n");
		return NULL;
	}
	
	if (openCVFrame == NULL) {
		openCVFrame = createAVFrame(openCVPixelFormat, codecContext->width, codecContext->height);
	}

	if(decodedFrameSize > 0) {
		struct SwsContext *imgConvertCtx;
		int width = codecContext->width;
		int height = codecContext->height;
		imgConvertCtx = sws_getContext(width, height, (RAW_STREAM_FORMAT), width, height, openCVPixelFormat, SWS_BICUBIC, NULL, NULL, NULL);
		sws_scale(imgConvertCtx, decodeFrame->data,  decodeFrame->linesize, 0, height, openCVFrame->data, openCVFrame->linesize);
		//img_convert((AVPicture *)openCVFrame, openCVPixelFormat, (AVPicture *)decodeFrame, PIX_FMT_YUV420P, codecContext->width, codecContext->height);
	}

	image = cvCreateImage(cvSize(codecContext->width, codecContext->height), IPL_DEPTH_8U, 3);
	memcpy(image->imageData, openCVFrame->data[0], image->imageSize);
	image->widthStep = openCVFrame->linesize[0];

	return image;
}