Ejemplo n.º 1
0
SwsContext *GetSwsContext(int SrcW, int SrcH, AVPixelFormat SrcFormat, int SrcColorSpace, int SrcColorRange, int DstW, int DstH, AVPixelFormat DstFormat, int DstColorSpace, int DstColorRange, int64_t Flags) {
	Flags |= SWS_FULL_CHR_H_INT | SWS_FULL_CHR_H_INP | SWS_ACCURATE_RND;
	SwsContext *Context = sws_alloc_context();
	if (!Context) return nullptr;

	// 0 = limited range, 1 = full range
	int SrcRange = SrcColorRange == AVCOL_RANGE_JPEG;
	int DstRange = DstColorRange == AVCOL_RANGE_JPEG;

	av_opt_set_int(Context, "sws_flags",  Flags, 0);
	av_opt_set_int(Context, "srcw",       SrcW, 0);
	av_opt_set_int(Context, "srch",       SrcH, 0);
	av_opt_set_int(Context, "dstw",       DstW, 0);
	av_opt_set_int(Context, "dsth",       DstH, 0);
	av_opt_set_int(Context, "src_range",  SrcRange, 0);
	av_opt_set_int(Context, "dst_range",  DstRange, 0);
	av_opt_set_int(Context, "src_format", SrcFormat, 0);
	av_opt_set_int(Context, "dst_format", DstFormat, 0);

	sws_setColorspaceDetails(Context,
		sws_getCoefficients(SrcColorSpace), SrcRange,
		sws_getCoefficients(DstColorSpace), DstRange,
		0, 1<<16, 1<<16);

	if(sws_init_context(Context, nullptr, nullptr) < 0){
		sws_freeContext(Context);
		return nullptr;
	}

	return Context;
}
Ejemplo n.º 2
0
static bool mp_media_init_scaling(mp_media_t *m)
{
	int space = get_sws_colorspace(m->v.decoder->colorspace);
	int range = get_sws_range(m->v.decoder->color_range);
	const int *coeff = sws_getCoefficients(space);

	m->swscale = sws_getCachedContext(NULL,
			m->v.decoder->width, m->v.decoder->height,
			m->v.decoder->pix_fmt,
			m->v.decoder->width, m->v.decoder->height,
			m->scale_format,
			SWS_FAST_BILINEAR, NULL, NULL, NULL);
	if (!m->swscale) {
		blog(LOG_WARNING, "MP: Failed to initialize scaler");
		return false;
	}

	sws_setColorspaceDetails(m->swscale, coeff, range, coeff, range, 0,
			FIXED_1_0, FIXED_1_0);

	int ret = av_image_alloc(m->scale_pic, m->scale_linesizes,
			m->v.decoder->width, m->v.decoder->height,
			m->scale_format, 1);
	if (ret < 0) {
		blog(LOG_WARNING, "MP: Failed to create scale pic data");
		return false;
	}

	return true;
}
Ejemplo n.º 3
0
bool ImageConverterFFPrivate::setupColorspaceDetails(bool force)
{
    if (!sws_ctx) {
        update_eq = true;
        return false;
    }
    if (force)
        update_eq = true;
    if (!update_eq) {
        return true;
    }
    const int srcRange = range_in == ColorRange_Limited ? 0 : 1;
    int dstRange = range_out == ColorRange_Limited ? 0 : 1;
    // TODO: color space
    bool supported = sws_setColorspaceDetails(sws_ctx, sws_getCoefficients(SWS_CS_DEFAULT)
                             , srcRange, sws_getCoefficients(SWS_CS_DEFAULT)
                             , dstRange
                             , ((brightness << 16) + 50)/100
                             , (((contrast + 100) << 16) + 50)/100
                             , (((saturation + 100) << 16) + 50)/100
                             ) >= 0;
    //sws_init_context(d.sws_ctx, NULL, NULL);
    update_eq = false;
    return supported;
}
Ejemplo n.º 4
0
bool ImageConverterFF::setupColorspaceDetails()
{
    DPTR_D(ImageConverterFF);
    if (!d.sws_ctx) {
        d.update_eq = true;
        return false;
    }
    //if (!d.update_eq)
    //    return true;
    // FIXME: how to fill the ranges?
    const int srcRange = 1;
    const int dstRange = 0;
    // TODO: SWS_CS_DEFAULT?
    sws_setColorspaceDetails(d.sws_ctx, sws_getCoefficients(SWS_CS_DEFAULT)
                             , srcRange, sws_getCoefficients(SWS_CS_DEFAULT)
                             , dstRange
                             , ((d.brightness << 16) + 50)/100
                             , (((d.contrast + 100) << 16) + 50)/100
                             , (((d.saturation + 100) << 16) + 50)/100
                             );
    // TODO: b, c, s map function?
    //sws_init_context(d.sws_ctx, NULL, NULL);
    d.update_eq = false;
    return true;
}
Ejemplo n.º 5
0
static void set_luma_transfer( struct SwsContext *context, int colorspace, int use_full_range )
{
	int *coefficients;
	const int *new_coefficients;
	int full_range;
	int brightness, contrast, saturation;

	if ( sws_getColorspaceDetails( context, &coefficients, &full_range, &coefficients, &full_range,
			&brightness, &contrast, &saturation ) != -1 )
	{
		// Don't change these from defaults unless explicitly told to.
		if ( use_full_range >= 0 )
			full_range = use_full_range;
		switch ( colorspace )
		{
		case 170:
		case 470:
		case 601:
		case 624:
			new_coefficients = sws_getCoefficients( SWS_CS_ITU601 );
			break;
		case 240:
			new_coefficients = sws_getCoefficients( SWS_CS_SMPTE240M );
			break;
		case 709:
			new_coefficients = sws_getCoefficients( SWS_CS_ITU709 );
			break;
		default:
			new_coefficients = coefficients;
			break;
		}
		sws_setColorspaceDetails( context, new_coefficients, full_range, new_coefficients, full_range,
			brightness, contrast, saturation );
	}
}
Ejemplo n.º 6
0
bool ImageConverterFFPrivate::setupColorspaceDetails(bool force)
{
    if (!sws_ctx) {
        update_eq = true;
        return false;
    }
    if (force)
        update_eq = true;
    if (!update_eq) {
        return true;
    }
    // FIXME: how to fill the ranges?
    const int srcRange = 1;
    const int dstRange = 0;
    // TODO: SWS_CS_DEFAULT?
    bool supported = sws_setColorspaceDetails(sws_ctx, sws_getCoefficients(SWS_CS_DEFAULT)
                     , srcRange, sws_getCoefficients(SWS_CS_DEFAULT)
                     , dstRange
                     , ((brightness << 16) + 50)/100
                     , (((contrast + 100) << 16) + 50)/100
                     , (((saturation + 100) << 16) + 50)/100
                                             ) >= 0;
    //sws_init_context(d.sws_ctx, NULL, NULL);
    update_eq = false;
    return supported;
}
Ejemplo n.º 7
0
void MovieDecoder::convertAndScaleFrame(PixelFormat format, int scaledSize, bool maintainAspectRatio, int& scaledWidth, int& scaledHeight)
{
    calculateDimensions(scaledSize, maintainAspectRatio, scaledWidth, scaledHeight);

#ifdef LATEST_GREATEST_FFMPEG
	// Enable this when it hits the released ffmpeg version
    SwsContext* scaleContext = sws_alloc_context();
    if (scaleContext == nullptr)
    {
		throw std::logic_error("Failed to allocate scale context");
	}
	
	av_set_int(scaleContext, "srcw", m_pVideoCodecContext->width);
    av_set_int(scaleContext, "srch", m_pVideoCodecContext->height);
    av_set_int(scaleContext, "src_format", m_pVideoCodecContext->pix_fmt);
    av_set_int(scaleContext, "dstw", scaledWidth);
    av_set_int(scaleContext, "dsth", scaledHeight);
    av_set_int(scaleContext, "dst_format", format);
	av_set_int(scaleContext, "sws_flags", SWS_BICUBIC);
	
	const int* coeff = sws_getCoefficients(SWS_CS_DEFAULT);
    if (sws_setColorspaceDetails(scaleContext, coeff, m_pVideoCodecContext->pix_fmt, coeff, format, 0, 1<<16, 1<<16) < 0)
    {
		sws_freeContext(scaleContext);
		throw std::logic_error("Failed to set colorspace details");
	}

	if (sws_init_context(scaleContext, nullptr, nullptr) < 0)
	{
		sws_freeContext(scaleContext);
		throw std::logic_error("Failed to initialise scale context");
	}
#endif
    
    SwsContext* scaleContext = sws_getContext(m_pVideoCodecContext->width, m_pVideoCodecContext->height,
                                              m_pVideoCodecContext->pix_fmt, scaledWidth, scaledHeight,
                                              format, SWS_BICUBIC, nullptr, nullptr, nullptr);

    if (nullptr == scaleContext)
    {
        throw logic_error("Failed to create resize context");
    }

    AVFrame* convertedFrame = nullptr;
    uint8_t* convertedFrameBuffer = nullptr;

    createAVFrame(&convertedFrame, &convertedFrameBuffer, scaledWidth, scaledHeight, format);
    
    sws_scale(scaleContext, m_pFrame->data, m_pFrame->linesize, 0, m_pVideoCodecContext->height,
              convertedFrame->data, convertedFrame->linesize);
    sws_freeContext(scaleContext);

    av_free(m_pFrame);
    av_free(m_pFrameBuffer);
    
    m_pFrame        = convertedFrame;
    m_pFrameBuffer  = convertedFrameBuffer;
}
int video_scaler_create(video_scaler_t **scaler_out,
		const struct video_scale_info *dst,
		const struct video_scale_info *src,
		enum video_scale_type type)
{
	enum AVPixelFormat format_src = get_ffmpeg_video_format(src->format);
	enum AVPixelFormat format_dst = get_ffmpeg_video_format(dst->format);
	int                scale_type = get_ffmpeg_scale_type(type);
	const int          *coeff_src = get_ffmpeg_coeffs(src->colorspace);
	const int          *coeff_dst = get_ffmpeg_coeffs(dst->colorspace);
	int                range_src  = get_ffmpeg_range_type(src->range);
	int                range_dst  = get_ffmpeg_range_type(dst->range);
	struct video_scaler *scaler;
	int ret;

	if (!scaler_out)
		return VIDEO_SCALER_FAILED;

	if (format_src == AV_PIX_FMT_NONE ||
	    format_dst == AV_PIX_FMT_NONE)
		return VIDEO_SCALER_BAD_CONVERSION;

	scaler = bzalloc(sizeof(struct video_scaler));
	scaler->src_height = src->height;

	scaler->swscale = sws_getCachedContext(NULL,
			src->width, src->height, format_src,
			dst->width, dst->height, format_dst,
			scale_type, NULL, NULL, NULL);
	if (!scaler->swscale) {
		blog(LOG_ERROR, "video_scaler_create: Could not create "
		                "swscale");
		goto fail;
	}

	ret = sws_setColorspaceDetails(scaler->swscale,
			coeff_src, range_src,
			coeff_dst, range_dst,
			0, FIXED_1_0, FIXED_1_0);
	if (ret < 0) {
		blog(LOG_DEBUG, "video_scaler_create: "
		                "sws_setColorspaceDetails failed, ignoring");
	}

	*scaler_out = scaler;
	return VIDEO_SCALER_SUCCESS;

fail:
	video_scaler_destroy(scaler);
	return VIDEO_SCALER_FAILED;
}
Ejemplo n.º 9
0
int mlt_set_luma_transfer( struct SwsContext *context, int src_colorspace,
	int dst_colorspace, int src_full_range, int dst_full_range )
{
	const int *src_coefficients = sws_getCoefficients( SWS_CS_DEFAULT );
	const int *dst_coefficients = sws_getCoefficients( SWS_CS_DEFAULT );
	int brightness = 0;
	int contrast = 1 << 16;
	int saturation = 1  << 16;
	int src_range = src_full_range ? 1 : 0;
	int dst_range = dst_full_range ? 1 : 0;

	switch ( src_colorspace )
	{
	case 170:
	case 470:
	case 601:
	case 624:
		src_coefficients = sws_getCoefficients( SWS_CS_ITU601 );
		break;
	case 240:
		src_coefficients = sws_getCoefficients( SWS_CS_SMPTE240M );
		break;
	case 709:
		src_coefficients = sws_getCoefficients( SWS_CS_ITU709 );
		break;
	default:
		break;
	}
	switch ( dst_colorspace )
	{
	case 170:
	case 470:
	case 601:
	case 624:
		dst_coefficients = sws_getCoefficients( SWS_CS_ITU601 );
		break;
	case 240:
		dst_coefficients = sws_getCoefficients( SWS_CS_SMPTE240M );
		break;
	case 709:
		dst_coefficients = sws_getCoefficients( SWS_CS_ITU709 );
		break;
	default:
		break;
	}
	return sws_setColorspaceDetails( context, src_coefficients, src_range, dst_coefficients, dst_range,
		brightness, contrast, saturation );
}
Ejemplo n.º 10
0
struct SwsContext *update_scaler_configuration
(
    struct SwsContext *sws_ctx,
    int                flags,
    int                width,
    int                height,
    enum AVPixelFormat input_pixel_format,
    enum AVPixelFormat output_pixel_format,
    enum AVColorSpace  colorspace,
    int                yuv_range
)
{
    if( sws_ctx )
        sws_freeContext( sws_ctx );
    sws_ctx = sws_alloc_context();
    if( !sws_ctx )
        return NULL;
    av_opt_set_int( sws_ctx, "sws_flags",  flags,               0 );
    av_opt_set_int( sws_ctx, "srcw",       width,               0 );
    av_opt_set_int( sws_ctx, "srch",       height,              0 );
    av_opt_set_int( sws_ctx, "dstw",       width,               0 );
    av_opt_set_int( sws_ctx, "dsth",       height,              0 );
    av_opt_set_int( sws_ctx, "src_format", input_pixel_format,  0 );
    av_opt_set_int( sws_ctx, "dst_format", output_pixel_format, 0 );
    const int *yuv2rgb_coeffs = sws_getCoefficients( colorspace );
    sws_setColorspaceDetails( sws_ctx,
                              yuv2rgb_coeffs, yuv_range,
                              yuv2rgb_coeffs, yuv_range,
                              0, 1 << 16, 1 << 16 );
    if( sws_init_context( sws_ctx, NULL, NULL ) < 0 )
    {
        sws_freeContext( sws_ctx );
        return NULL;
    }
    return sws_ctx;
}
Ejemplo n.º 11
0
SwsContext *FFGetSwsContext(int SrcW, int SrcH, PixelFormat SrcFormat, int DstW, int DstH, PixelFormat DstFormat, int64_t Flags, int ColorSpace, int ColorRange) {
    Flags |= SWS_FULL_CHR_H_INT | SWS_FULL_CHR_H_INP;
#if LIBSWSCALE_VERSION_INT < AV_VERSION_INT(0, 12, 0)
    return sws_getContext(SrcW, SrcH, SrcFormat, DstW, DstH, DstFormat, Flags, 0, 0, 0);
#else
    SwsContext *Context = sws_alloc_context();
    if (!Context) return 0;

    // The intention here is to never change the color range.
    int Range; // 0 = limited range, 1 = full range
    if (ColorRange == AVCOL_RANGE_JPEG)
        Range = 1;
    else // explicit limited range, or unspecified
        Range = 0;

    av_opt_set_int(Context, "sws_flags", Flags, 0);
    av_opt_set_int(Context, "srcw",       SrcW, 0);
    av_opt_set_int(Context, "srch",       SrcH, 0);
    av_opt_set_int(Context, "dstw",       DstW, 0);
    av_opt_set_int(Context, "dsth",       DstH, 0);
    av_opt_set_int(Context, "src_range",  Range, 0);
    av_opt_set_int(Context, "dst_range",  Range, 0);
    av_opt_set_int(Context, "src_format", SrcFormat, 0);
    av_opt_set_int(Context, "dst_format", DstFormat, 0);

    sws_setColorspaceDetails(Context, sws_getCoefficients(ColorSpace), Range, sws_getCoefficients(ColorSpace), Range, 0, 1<<16, 1<<16);

    if(sws_init_context(Context, 0, 0) < 0){
        sws_freeContext(Context);
        return 0;
    }

    return Context;
#endif

}
Ejemplo n.º 12
0
SwsContext *GetSwsContext(int SrcW, int SrcH, PixelFormat SrcFormat, int SrcColorSpace, int SrcColorRange, int DstW, int DstH, PixelFormat DstFormat, int DstColorSpace, int DstColorRange, int64_t Flags) {
    Flags |= SWS_FULL_CHR_H_INT | SWS_FULL_CHR_H_INP;
#if LIBSWSCALE_VERSION_INT < AV_VERSION_INT(0, 12, 0)
    return sws_getContext(SrcW, SrcH, SrcFormat, DstW, DstH, DstFormat, Flags, 0, 0, 0);
#else
    SwsContext *Context = sws_alloc_context();
    if (!Context) return 0;

    // 0 = limited range, 1 = full range
    int SrcRange = SrcColorRange == AVCOL_RANGE_JPEG;
    int DstRange = DstColorRange == AVCOL_RANGE_JPEG;

    av_opt_set_int(Context, "sws_flags",  Flags, 0);
    av_opt_set_int(Context, "srcw",       SrcW, 0);
    av_opt_set_int(Context, "srch",       SrcH, 0);
    av_opt_set_int(Context, "dstw",       DstW, 0);
    av_opt_set_int(Context, "dsth",       DstH, 0);
    av_opt_set_int(Context, "src_range",  SrcRange, 0);
    av_opt_set_int(Context, "dst_range",  DstRange, 0);
    av_opt_set_int(Context, "src_format", SrcFormat, 0);
    av_opt_set_int(Context, "dst_format", DstFormat, 0);

    sws_setColorspaceDetails(Context,
                             sws_getCoefficients(SrcColorSpace), SrcRange,
                             sws_getCoefficients(DstColorSpace), DstRange,
                             0, 1<<16, 1<<16);

    if(sws_init_context(Context, 0, 0) < 0) {
        sws_freeContext(Context);
        return 0;
    }

    return Context;
#endif

}
Ejemplo n.º 13
0
void MediaEngine::updateSwsFormat(int videoPixelMode) {
#ifdef USE_FFMPEG
	auto codecIter = m_pCodecCtxs.find(m_videoStream);
	AVCodecContext *m_pCodecCtx = codecIter == m_pCodecCtxs.end() ? 0 : codecIter->second;

	AVPixelFormat swsDesired = getSwsFormat(videoPixelMode);
	if (swsDesired != m_sws_fmt && m_pCodecCtx != 0) {
		m_sws_fmt = swsDesired;
		m_sws_ctx = sws_getCachedContext
			(
				m_sws_ctx,
				m_pCodecCtx->width,
				m_pCodecCtx->height,
				m_pCodecCtx->pix_fmt,
				m_desWidth,
				m_desHeight,
				(AVPixelFormat)m_sws_fmt,
				SWS_BILINEAR,
				NULL,
				NULL,
				NULL
			);

		int *inv_coefficients;
		int *coefficients;
		int srcRange, dstRange;
		int brightness, contrast, saturation;

		if (sws_getColorspaceDetails(m_sws_ctx, &inv_coefficients, &srcRange, &coefficients, &dstRange, &brightness, &contrast, &saturation) != -1) {
			srcRange = 0;
			dstRange = 0;
			sws_setColorspaceDetails(m_sws_ctx, inv_coefficients, srcRange, coefficients, dstRange, brightness, contrast, saturation);
		}
	}
#endif
}
Ejemplo n.º 14
0
void BenchmarkScale(unsigned int in_w, unsigned int in_h, unsigned int out_w, unsigned int out_h) {

	std::mt19937 rng(12345);
#if SSR_USE_X86_ASM
	bool use_ssse3 = (CPUFeatures::HasMMX() && CPUFeatures::HasSSE() && CPUFeatures::HasSSE2() && CPUFeatures::HasSSE3() && CPUFeatures::HasSSSE3());
#endif

	// the queue needs to use enough memory to make sure that the CPU cache is flushed
	unsigned int pixels = std::max(in_w * in_h, out_w * out_h);
	unsigned int queue_size = 1 + 20000000 / pixels;
	unsigned int run_size = queue_size * 20;

	// create queue
	std::vector<std::unique_ptr<ImageGeneric> > queue_in(queue_size);
	std::vector<std::unique_ptr<ImageGeneric> > queue_out(queue_size);
	for(unsigned int i = 0; i < queue_size; ++i) {
		queue_in[i] = NewImageBGRA(in_w, in_h, rng);
		queue_out[i] = NewImageBGRA(out_w, out_h, rng);
	}

	// run test
	unsigned int time_swscale = 0, time_fallback = 0, time_ssse3 = 0;
	{
		SwsContext *sws = sws_getCachedContext(NULL,
											   in_w, in_h, AV_PIX_FMT_BGRA,
											   out_w, out_h, AV_PIX_FMT_BGRA,
											   SWS_BILINEAR, NULL, NULL, NULL);
		if(sws == NULL) {
			Logger::LogError("[BenchmarkScale] " + Logger::tr("Error: Can't get swscale context!", "Don't translate 'swscale'"));
			throw LibavException();
		}
		sws_setColorspaceDetails(sws,
								 sws_getCoefficients(SWS_CS_ITU709), 0,
								 sws_getCoefficients(SWS_CS_DEFAULT), 0,
								 0, 1 << 16, 1 << 16);
		int64_t t1 = hrt_time_micro();
		for(unsigned int i = 0; i < run_size / 2; ++i) {
			unsigned int ii = i % queue_size;
			sws_scale(sws, queue_in[ii]->m_data.data(), queue_in[ii]->m_stride.data(), 0, in_h, queue_out[ii]->m_data.data(), queue_out[ii]->m_stride.data());
		}
		int64_t t2 = hrt_time_micro();
		time_swscale = (t2 - t1) / (run_size / 2);
	}
	{
		int64_t t1 = hrt_time_micro();
		for(unsigned int i = 0; i < run_size; ++i) {
			unsigned int ii = i % queue_size;
			Scale_BGRA_Fallback(in_w, in_h, queue_in[ii]->m_data[0], queue_in[ii]->m_stride[0],
								out_w, out_h, queue_out[ii]->m_data[0], queue_out[ii]->m_stride[0]);
		}
		int64_t t2 = hrt_time_micro();
		time_fallback = (t2 - t1) / run_size;
	}
#if SSR_USE_X86_ASM
	if(use_ssse3) {
		int64_t t1 = hrt_time_micro();
		for(unsigned int i = 0; i < run_size; ++i) {
			unsigned int ii = i % queue_size;
			Scale_BGRA_SSSE3(in_w, in_h, queue_in[ii]->m_data[0], queue_in[ii]->m_stride[0],
							 out_w, out_h, queue_out[ii]->m_data[0], queue_out[ii]->m_stride[0]);
		}
		int64_t t2 = hrt_time_micro();
		time_ssse3 = (t2 - t1) / run_size;
	}
#endif

	// print result
	QString in_size = QString("%1x%2").arg(in_w).arg(in_h);
	QString out_size = QString("%1x%2").arg(out_w).arg(out_h);
	Logger::LogInfo("[BenchmarkScale] " + Logger::tr("BGRA %1 to BGRA %2  |  SWScale %3 us  |  Fallback %4 us (%5%)  |  SSSE3 %6 us (%7%)")
					.arg(in_size, 9).arg(out_size, 9)
					.arg(time_swscale, 6)
					.arg(time_fallback, 6).arg(100 * time_fallback / time_swscale, 3)
					.arg(time_ssse3, 6).arg(100 * time_ssse3 / time_fallback, 3));

}
Ejemplo n.º 15
0
void BenchmarkConvert(unsigned int w, unsigned int h, PixelFormat in_format, PixelFormat out_format, const QString& in_format_name, const QString& out_format_name, NewImageFunc in_image, NewImageFunc out_image, ConvertFunc fallback
#if SSR_USE_X86_ASM
, ConvertFunc ssse3
#endif
) {

	std::mt19937 rng(12345);
#if SSR_USE_X86_ASM
	bool use_ssse3 = (CPUFeatures::HasMMX() && CPUFeatures::HasSSE() && CPUFeatures::HasSSE2() && CPUFeatures::HasSSE3() && CPUFeatures::HasSSSE3());
#endif

	// the queue needs to use enough memory to make sure that the CPU cache is flushed
	unsigned int pixels = w * h;
	unsigned int queue_size = 1 + 20000000 / pixels;
	unsigned int run_size = queue_size * 20;

	// create queue
	std::vector<std::unique_ptr<ImageGeneric> > queue_in(queue_size);
	std::vector<std::unique_ptr<ImageGeneric> > queue_out(queue_size);
	for(unsigned int i = 0; i < queue_size; ++i) {
		queue_in[i] = in_image(w, h, rng);
		queue_out[i] = out_image(w, h, rng);
	}

	// run test
	unsigned int time_swscale = 0, time_fallback = 0, time_ssse3 = 0;
	{
		SwsContext *sws = sws_getCachedContext(NULL,
											   w, h, in_format,
											   w, h, out_format,
											   SWS_BILINEAR, NULL, NULL, NULL);
		if(sws == NULL) {
			Logger::LogError("[BenchmarkScale] " + Logger::tr("Error: Can't get swscale context!", "Don't translate 'swscale'"));
			throw LibavException();
		}
		sws_setColorspaceDetails(sws,
								 sws_getCoefficients(SWS_CS_ITU709), 0,
								 sws_getCoefficients(SWS_CS_DEFAULT), 0,
								 0, 1 << 16, 1 << 16);
		int64_t t1 = hrt_time_micro();
		for(unsigned int i = 0; i < run_size / 2; ++i) {
			unsigned int ii = i % queue_size;
			sws_scale(sws, queue_in[ii]->m_data.data(), queue_in[ii]->m_stride.data(), 0, h, queue_out[ii]->m_data.data(), queue_out[ii]->m_stride.data());
		}
		int64_t t2 = hrt_time_micro();
		time_swscale = (t2 - t1) / (run_size / 2);
	}
	{
		int64_t t1 = hrt_time_micro();
		for(unsigned int i = 0; i < run_size; ++i) {
			unsigned int ii = i % queue_size;
			fallback(w, h, queue_in[ii]->m_data[0], queue_in[ii]->m_stride[0], queue_out[ii]->m_data.data(), queue_out[ii]->m_stride.data());
		}
		int64_t t2 = hrt_time_micro();
		time_fallback = (t2 - t1) / run_size;
	}
#if SSR_USE_X86_ASM
	if(use_ssse3) {
		int64_t t1 = hrt_time_micro();
		for(unsigned int i = 0; i < run_size; ++i) {
			unsigned int ii = i % queue_size;
			ssse3(w, h, queue_in[ii]->m_data[0], queue_in[ii]->m_stride[0], queue_out[ii]->m_data.data(), queue_out[ii]->m_stride.data());
		}
		int64_t t2 = hrt_time_micro();
		time_ssse3 = (t2 - t1) / run_size;
	}
#endif

	// print result
	QString size = QString("%1x%2").arg(w).arg(h);
	Logger::LogInfo("[BenchmarkConvert] " + Logger::tr("%1 %2 to %3 %4  |  SWScale %5 us  |  Fallback %6 us (%7%)  |  SSSE3 %8 us (%9%)")
					.arg(in_format_name, 6).arg(size, 9).arg(out_format_name, 6).arg(size, 9)
					.arg(time_swscale, 6)
					.arg(time_fallback, 6).arg(100 * time_fallback / time_swscale, 3)
					.arg(time_ssse3, 6).arg(100 * time_ssse3 / time_fallback, 3));

}
Ejemplo n.º 16
0
bool CFFmpegImage::CreateThumbnailFromSurface(unsigned char* bufferin, unsigned int width,
                                             unsigned int height, unsigned int format,
                                             unsigned int pitch,
                                             const std::string& destFile,
                                             unsigned char* &bufferout,
                                             unsigned int &bufferoutSize)
{
  // It seems XB_FMT_A8R8G8B8 mean RGBA and not ARGB
  if (format != XB_FMT_A8R8G8B8)
  {
    CLog::Log(LOGERROR, "Supplied format: %d is not supported.", format);
    return false;
  }

  bool jpg_output = false;
  if (m_strMimeType == "image/jpeg" || m_strMimeType == "image/jpg")
    jpg_output = true;
  else if (m_strMimeType == "image/png")
    jpg_output = false;
  else
  {
    CLog::Log(LOGERROR, "Output Format is not supported: %s is not supported.", destFile.c_str());
    return false;
  }

  ThumbDataManagement tdm;

  tdm.codec = avcodec_find_encoder(jpg_output ? AV_CODEC_ID_MJPEG : AV_CODEC_ID_PNG);
  if (!tdm.codec)
  {
    CLog::Log(LOGERROR, "Your are missing a working encoder for format: %d", jpg_output ? AV_CODEC_ID_MJPEG : AV_CODEC_ID_PNG);
    return false;
  }

  tdm.avOutctx = avcodec_alloc_context3(tdm.codec);
  if (!tdm.avOutctx)
  {
    CLog::Log(LOGERROR, "Could not allocate context for thumbnail: %s", destFile.c_str());
    return false;
  }

  tdm.avOutctx->height = height;
  tdm.avOutctx->width = width;
  tdm.avOutctx->time_base.num = 1;
  tdm.avOutctx->time_base.den = 1;
  tdm.avOutctx->pix_fmt = jpg_output ? AV_PIX_FMT_YUVJ420P : AV_PIX_FMT_RGBA;
  tdm.avOutctx->flags = CODEC_FLAG_QSCALE;
  tdm.avOutctx->mb_lmin = tdm.avOutctx->qmin * FF_QP2LAMBDA;
  tdm.avOutctx->mb_lmax = tdm.avOutctx->qmax * FF_QP2LAMBDA;
  tdm.avOutctx->global_quality = tdm.avOutctx->qmin * FF_QP2LAMBDA;

  unsigned int internalBufOutSize = 0;

  int size = avpicture_get_size(tdm.avOutctx->pix_fmt, tdm.avOutctx->width, tdm.avOutctx->height);
  if (size < 0)
  {
    CLog::Log(LOGERROR, "Could not compute picture size for thumbnail: %s", destFile.c_str());
    CleanupLocalOutputBuffer();
    return false;
  }
  internalBufOutSize = (unsigned int) size;

  m_outputBuffer = (uint8_t*) av_malloc(internalBufOutSize);

  if (!m_outputBuffer)
  {
    CLog::Log(LOGERROR, "Could not generate allocate memory for thumbnail: %s", destFile.c_str());
    CleanupLocalOutputBuffer();
    return false;
  }


  tdm.intermediateBuffer = (uint8_t*) av_malloc(internalBufOutSize);
  if (!tdm.intermediateBuffer)
  {
    CLog::Log(LOGERROR, "Could not allocate memory for thumbnail: %s", destFile.c_str());
    CleanupLocalOutputBuffer();
    return false;
  }

  if (avcodec_open2(tdm.avOutctx, tdm.codec, NULL) < 0)
  {
    CLog::Log(LOGERROR, "Could not open avcodec context thumbnail: %s", destFile.c_str());
    CleanupLocalOutputBuffer();
    return false;
  }

  tdm.frame_input = av_frame_alloc();
  if (!tdm.frame_input)
  {
    CLog::Log(LOGERROR, "Could not allocate frame for thumbnail: %s", destFile.c_str());
    CleanupLocalOutputBuffer();
    return false;
  }

  // convert the RGB32 frame to AV_PIX_FMT_YUV420P - we use this later on as AV_PIX_FMT_YUVJ420P
  tdm.frame_temporary = av_frame_alloc();
  if (!tdm.frame_temporary)
  {
    CLog::Log(LOGERROR, "Could not allocate frame for thumbnail: %s", destFile.c_str());
    CleanupLocalOutputBuffer();
    return false;
  }

  if (avpicture_fill((AVPicture*)tdm.frame_temporary, tdm.intermediateBuffer, jpg_output ? AV_PIX_FMT_YUV420P : AV_PIX_FMT_RGBA, width, height) < 0)
  {
    CLog::Log(LOGERROR, "Could not fill picture for thumbnail: %s", destFile.c_str());
    CleanupLocalOutputBuffer();
    return false;
  }

  uint8_t* src[] = { bufferin, NULL, NULL, NULL };
  int srcStride[] = { (int) pitch, 0, 0, 0};

  //input size == output size which means only pix_fmt conversion
  tdm.sws = sws_getContext(width, height, AV_PIX_FMT_RGB32, width, height, jpg_output ? AV_PIX_FMT_YUV420P : AV_PIX_FMT_RGBA, 0, 0, 0, 0);
  if (!tdm.sws)
  {
    CLog::Log(LOGERROR, "Could not setup scaling context for thumbnail: %s", destFile.c_str());
    CleanupLocalOutputBuffer();
    return false;
  }

  // Setup jpeg range for sws
  if (jpg_output)
  {
    int* inv_table = nullptr;
    int* table = nullptr;
    int srcRange, dstRange, brightness, contrast, saturation;

    if (sws_getColorspaceDetails(tdm.sws, &inv_table, &srcRange, &table, &dstRange, &brightness, &contrast, &saturation) < 0)
    {
      CLog::Log(LOGERROR, "SWS_SCALE failed to get ColorSpaceDetails for thumbnail: %s", destFile.c_str());
      CleanupLocalOutputBuffer();
      return false;
    }
    dstRange = 1; // jpeg full range yuv420p output
    srcRange = 0; // full range RGB32 input
    if (sws_setColorspaceDetails(tdm.sws, inv_table, srcRange, table, dstRange, brightness, contrast, saturation) < 0)
    {
      CLog::Log(LOGERROR, "SWS_SCALE failed to set ColorSpace Details for thumbnail: %s", destFile.c_str());
      CleanupLocalOutputBuffer();
      return false;
    }
  }

  if (sws_scale(tdm.sws, src, srcStride, 0, height, tdm.frame_temporary->data, tdm.frame_temporary->linesize) < 0)
  {
    CLog::Log(LOGERROR, "SWS_SCALE failed for thumbnail: %s", destFile.c_str());
    CleanupLocalOutputBuffer();
    return false;
  }
  tdm.frame_input->pts = 1;
  tdm.frame_input->quality = tdm.avOutctx->global_quality;
  tdm.frame_input->data[0] = (uint8_t*) tdm.frame_temporary->data[0];
  tdm.frame_input->data[1] = (uint8_t*) tdm.frame_temporary->data[1];
  tdm.frame_input->data[2] = (uint8_t*) tdm.frame_temporary->data[2];
  tdm.frame_input->height = height;
  tdm.frame_input->width = width;
  tdm.frame_input->linesize[0] = tdm.frame_temporary->linesize[0];
  tdm.frame_input->linesize[1] = tdm.frame_temporary->linesize[1];
  tdm.frame_input->linesize[2] = tdm.frame_temporary->linesize[2];
  // this is deprecated but mjpeg is not yet transitioned
  tdm.frame_input->format = jpg_output ? AV_PIX_FMT_YUVJ420P : AV_PIX_FMT_RGBA;

  int got_package = 0;
  AVPacket avpkt;
  av_init_packet(&avpkt);
  avpkt.data = m_outputBuffer;
  avpkt.size = internalBufOutSize;

  if ((avcodec_encode_video2(tdm.avOutctx, &avpkt, tdm.frame_input, &got_package) < 0) || (got_package == 0))
  {
    CLog::Log(LOGERROR, "Could not encode thumbnail: %s", destFile.c_str());
    CleanupLocalOutputBuffer();
    return false;
  }

  bufferoutSize = avpkt.size;
  bufferout = m_outputBuffer;

  return true;
}
Ejemplo n.º 17
0
bool CFFmpegImage::Decode(unsigned char * const pixels, unsigned int width, unsigned int height,
                          unsigned int pitch, unsigned int format)
{
  if (m_width == 0 || m_height == 0 || format != XB_FMT_A8R8G8B8)
    return false;

  if (!m_pFrame || !m_pFrame->data[0])
  {
    CLog::LogFunction(LOGERROR, __FUNCTION__, "AVFrame member not allocated");
    return false;
  }

  AVPicture* pictureRGB = static_cast<AVPicture*>(av_mallocz(sizeof(AVPicture)));
  if (!pictureRGB)
  {
    CLog::LogFunction(LOGERROR, __FUNCTION__, "AVPicture could not be allocated");
    return false;
  }

  int size = avpicture_fill(pictureRGB, NULL, AV_PIX_FMT_RGB32, width, height);
  if (size < 0)
  {
    CLog::LogFunction(LOGERROR, __FUNCTION__, "Could not allocate AVPicture member with %i x %i pixes", width, height);
    av_free(pictureRGB);
    return false;
  }

  bool needsCopy = false;
  int pixelsSize = pitch * height;
  if (size == pixelsSize && (int) pitch == pictureRGB->linesize[0])
  {
    // We can use the pixels buffer directly
    pictureRGB->data[0] = pixels;
  }
  else
  {
    // We need an extra buffer and copy it manually afterwards
    if (avpicture_alloc(pictureRGB, AV_PIX_FMT_RGB32, width, height) < 0)
    {
      CLog::LogFunction(LOGERROR, __FUNCTION__, "Could not allocate temp buffer of size %i bytes", size);
      av_free(pictureRGB);
      return false;
    }
    needsCopy = true;
  }

  // Especially jpeg formats are full range this we need to take care here
  // Input Formats like RGBA are handled correctly automatically
  AVColorRange range = av_frame_get_color_range(m_pFrame);
  AVPixelFormat pixFormat = ConvertFormats(m_pFrame);

  // assumption quadratic maximums e.g. 2048x2048
  float ratio = m_width / (float) m_height;
  unsigned int nHeight = m_originalHeight;
  unsigned int nWidth = m_originalWidth;
  if (nHeight > height)
  {
    nHeight = height;
    nWidth = (unsigned int) (nHeight * ratio + 0.5f);
  }
  if (nWidth > width)
  {
    nWidth = width;
    nHeight = (unsigned int) (nWidth / ratio + 0.5f);
  }

  struct SwsContext* context = sws_getContext(m_originalWidth, m_originalHeight, pixFormat,
    nWidth, nHeight, AV_PIX_FMT_RGB32, SWS_BICUBIC, NULL, NULL, NULL);

  if (range == AVCOL_RANGE_JPEG)
  {
    int* inv_table = nullptr;
    int* table = nullptr;
    int srcRange, dstRange, brightness, contrast, saturation;
    sws_getColorspaceDetails(context, &inv_table, &srcRange, &table, &dstRange, &brightness, &contrast, &saturation);
    srcRange = 1;
    sws_setColorspaceDetails(context, inv_table, srcRange, table, dstRange, brightness, contrast, saturation);
  }

  sws_scale(context, m_pFrame->data, m_pFrame->linesize, 0, m_originalHeight,
    pictureRGB->data, pictureRGB->linesize);
  sws_freeContext(context);

  if (needsCopy)
  {
    int minPitch = std::min((int)pitch, pictureRGB->linesize[0]);
    if (minPitch < 0)
    {
      CLog::LogFunction(LOGERROR, __FUNCTION__, "negative pitch or height");
      av_free(pictureRGB);
      return false;
    }
    const unsigned char *src = pictureRGB->data[0];
    unsigned char* dst = pixels;

    for (unsigned int y = 0; y < nHeight; y++)
    {
      memcpy(dst, src, minPitch);
      src += pictureRGB->linesize[0];
      dst += pitch;
    }

    avpicture_free(pictureRGB);
  }
  pictureRGB->data[0] = nullptr;
  avpicture_free(pictureRGB);

  // update width and height original dimensions are kept
  m_height = nHeight;
  m_width = nWidth;

  return true;
}
Ejemplo n.º 18
0
bool CFFmpegImage::DecodeFrame(AVFrame* frame, unsigned int width, unsigned int height, unsigned int pitch, unsigned char * const pixels)
{
  if (pixels == nullptr)
  {
    CLog::Log(LOGERROR, "%s - No valid buffer pointer (nullptr) passed", __FUNCTION__);
    return false;
  }

  AVFrame* pictureRGB = av_frame_alloc();
  if (!pictureRGB)
  {
    CLog::LogF(LOGERROR, "AVFrame could not be allocated");
    return false;
  }

  // we align on 16 as the input provided by the Texture also aligns the buffer size to 16
  int size = av_image_fill_arrays(pictureRGB->data, pictureRGB->linesize, NULL, AV_PIX_FMT_RGB32, width, height, 16);
  if (size < 0)
  {
    CLog::LogF(LOGERROR, "Could not allocate AVFrame member with %i x %i pixes", width, height);
    av_frame_free(&pictureRGB);
    return false;
  }

  bool needsCopy = false;
  int pixelsSize = pitch * height;
  bool aligned = (((uintptr_t)(const void *)(pixels)) % (32) == 0);
  if (!aligned)
    CLog::Log(LOGDEBUG, "Alignment of external buffer is not suitable for ffmpeg intrinsics - please fix your malloc");

  if (aligned && size == pixelsSize && (int)pitch == pictureRGB->linesize[0])
  {
    // We can use the pixels buffer directly
    pictureRGB->data[0] = pixels;
  }
  else
  {
    // We need an extra buffer and copy it manually afterwards
    pictureRGB->format = AV_PIX_FMT_RGB32;
    pictureRGB->width = width;
    pictureRGB->height = height;
    // we copy the data manually later so give a chance to intrinsics (e.g. mmx, neon)
    if (av_frame_get_buffer(pictureRGB, 32) < 0)
    {
      CLog::LogF(LOGERROR, "Could not allocate temp buffer of size %i bytes", size);
      av_frame_free(&pictureRGB);
      return false;
    }
    needsCopy = true;
  }

  // Especially jpeg formats are full range this we need to take care here
  // Input Formats like RGBA are handled correctly automatically
  AVColorRange range = frame->color_range;
  AVPixelFormat pixFormat = ConvertFormats(frame);

  // assumption quadratic maximums e.g. 2048x2048
  float ratio = m_width / (float)m_height;
  unsigned int nHeight = m_originalHeight;
  unsigned int nWidth = m_originalWidth;
  if (nHeight > height)
  {
    nHeight = height;
    nWidth = (unsigned int)(nHeight * ratio + 0.5f);
  }
  if (nWidth > width)
  {
    nWidth = width;
    nHeight = (unsigned int)(nWidth / ratio + 0.5f);
  }

  struct SwsContext* context = sws_getContext(m_originalWidth, m_originalHeight, pixFormat,
    nWidth, nHeight, AV_PIX_FMT_RGB32, SWS_BICUBIC, NULL, NULL, NULL);

  if (range == AVCOL_RANGE_JPEG)
  {
    int* inv_table = nullptr;
    int* table = nullptr;
    int srcRange, dstRange, brightness, contrast, saturation;
    sws_getColorspaceDetails(context, &inv_table, &srcRange, &table, &dstRange, &brightness, &contrast, &saturation);
    srcRange = 1;
    sws_setColorspaceDetails(context, inv_table, srcRange, table, dstRange, brightness, contrast, saturation);
  }

  sws_scale(context, frame->data, frame->linesize, 0, m_originalHeight,
    pictureRGB->data, pictureRGB->linesize);
  sws_freeContext(context);

  if (needsCopy)
  {
    int minPitch = std::min((int)pitch, pictureRGB->linesize[0]);
    if (minPitch < 0)
    {
      CLog::LogF(LOGERROR, "negative pitch or height");
      av_frame_free(&pictureRGB);
      return false;
    }
    const unsigned char *src = pictureRGB->data[0];
    unsigned char* dst = pixels;

    for (unsigned int y = 0; y < nHeight; y++)
    {
      memcpy(dst, src, minPitch);
      src += pictureRGB->linesize[0];
      dst += pitch;
    }
    av_frame_free(&pictureRGB);
  }
  else
  {
    // we only lended the data so don't get it deleted
    pictureRGB->data[0] = nullptr;
    av_frame_free(&pictureRGB);
  }

  // update width and height original dimensions are kept
  m_height = nHeight;
  m_width = nWidth;

  return true;
}
Ejemplo n.º 19
0
// UCI格式图像解码,目前只支持输出24位BGR和32位的BGRA,返回0表示调用成功,负数表示错误,不支持多线程同时访问
__declspec(dllexport) int __stdcall UCIDecode(
	const void* src,	// 输入UCI数据指针(不能传入null,其它指针参数可以传入null表示不需要输出)
	int 		srclen, // 输入UCI数据长度
	void**		dst,	// 输出RAW数据的指针(BGR或BGRA格式)
	int*		stride, // 输出RAW数据的行间字节跨度(dst不为null时,stride不能传入null)
	int*		width,	// 输出图像的宽度值
	int*		height, // 输出图像的高度值
	int*		bit)	// 输出图像的bpp值(每像素位数)
{
	extern AVCodec ff_h264_decoder;
	int ret = 0;
	U8* frame0 = 0;
	U8* frame1 = 0;
	U8* frame_data[4];
	int frame_size[4];
	int w, h, b, m;
	int ww, hh;
	int size, hasimg, bit10;
	U8* psrc, *pdst;
	const U8* srcend = (const U8*)src + srclen;

	if(dst	 ) *dst    = 0;
	if(stride) *stride = 0;
	if(width ) *width  = 0;
	if(height) *height = 0;
	if(bit	 ) *bit    = 0;
	if(!src || srclen < 0 || dst && !stride) return -1;
	if(srclen < 12) return -2;
	if((*(U32*)src & 0xffffff) != *(U32*)"UCI") return -3;
	switch(*((U8*)src + 3))
	{
	case '3':	b = 24; m = 0; break; // part range, YUV420
	case '4':	b = 32; m = 0; break; // part range, YUV420+A
	case 'T':	b = 24; m = 1; break; // part range, Y+U+V(420)
	case 'Q':	b = 32; m = 1; break; // part range, Y+U+V(420)+A
	case 0x20:	b = 24; m = 2; break; // full range, YUV420
	case 0x21:	b = 32; m = 2; break; // full range, YUV420+A
	case 0x40:	b = 24; m = 3; break; // full range, YUV444
	case 0x41:	b = 32; m = 3; break; // full range, YUV444+A
	default: return -4;
	}
	w = *(int*)((U8*)src + 4);
	h = *(int*)((U8*)src + 8);

	if(width ) *width  = w;
	if(height) *height = h;
	if(bit	 ) *bit    = b;
	if(stride) *stride = (m == 1 ? w : (w+7)&0xfffffff8) * (b/8);
	if(!dst) return 0;
	if(srclen < 12 + 4) return -5;
	ww = w + (w & ((m & 1) ^ 1));
	hh = h + (h & ((m & 1) ^ 1));

	frame_data[0] = (U8*)src + 12 + 4;
	frame_size[0] = *(int*)((U8*)src + 12);
	if(m != 1)
	{
		if(b == 24)
		{
			if(frame_size[0] < 0 || frame_data[0] + frame_size[0] > srcend)		return -10;
		}
		else
		{
			if(frame_size[0] < 0 || frame_data[0] + frame_size[0] + 4 > srcend) return -11;
			frame_data[3] = frame_data[0] + frame_size[0] + 4;
			frame_size[3] = *(int*)(frame_data[0] + frame_size[0]);
			if(frame_size[3] < 0 || frame_data[3] + frame_size[3] > srcend)		return -12;
		}
	}
	else
	{
		if(frame_size[0] < 0 || frame_data[0] + frame_size[0] + 4 > srcend)		return -13;
		frame_data[1] = frame_data[0] + frame_size[0] + 4;
		frame_size[1] = *(int*)(frame_data[0] + frame_size[0]);
		if(frame_size[1] < 0 || frame_data[1] + frame_size[1] + 4 > srcend)		return -14;
		frame_data[2] = frame_data[1] + frame_size[1] + 4;
		frame_size[2] = *(int*)(frame_data[1] + frame_size[1]);
		if(b == 24)
		{
			if(frame_size[2] < 0 || frame_data[2] + frame_size[2] > srcend)		return -15;
		}
		else
		{
			if(frame_size[2] < 0 || frame_data[2] + frame_size[2] + 4 > srcend) return -16;
			frame_data[3] = frame_data[2] + frame_size[2] + 4;
			frame_size[3] = *(int*)(frame_data[2] + frame_size[2]);
			if(frame_size[3] < 0 || frame_data[3] + frame_size[3] > srcend)		return -17;
		}
	}

	EnterCriticalSection(&g_cs);
	if(!g_frame && !(g_frame = avcodec_alloc_frame()))							{ ret = -20; goto end_; }
	if(!g_context && !(g_context = avcodec_alloc_context3(&ff_h264_decoder)))	{ ret = -21; goto end_; }
	g_context->flags &= ~CODEC_FLAG_EMU_EDGE;
	if(av_log_get_level() >= AV_LOG_DEBUG) g_context->debug = -1;
	if(avcodec_open2(g_context, &ff_h264_decoder, 0) < 0)						{ ret = -22; goto end_; }
	g_context->flags &= ~CODEC_FLAG_EMU_EDGE;
	g_packet.data = frame_data[0];
	g_packet.size = frame_size[0];
	size = avcodec_decode_video2(g_context, g_frame, &hasimg, &g_packet);
	if(size <= 0)																{ ret = -100 + size; goto end_; }
	if(!hasimg) 																{ ret = -23; goto end_; }
	if(g_context->width < ww || g_context->height < hh)							{ ret = -24; goto end_; }
	if(!g_frame->data[0] || m != 1 && (!g_frame->data[1] || !g_frame->data[2]))	{ ret = -25; goto end_; }
	if(g_frame->linesize[1] != g_frame->linesize[2])							{ ret = -26; goto end_; }
	bit10 = ((H264Context*)g_context->priv_data)->sps.bit_depth_luma;
	if(bit10 == 8) bit10 = 0; else if(bit10 != 10)								{ ret = -27; goto end_; }

	if(m != 1)
	{
		enum AVPixelFormat pfsrc, pfdst = (b == 24 ? AV_PIX_FMT_BGR24 : AV_PIX_FMT_BGRA);
		if(m != 3)	pfsrc = (bit10 ? AV_PIX_FMT_YUV420P10LE : AV_PIX_FMT_YUV420P);
		else		pfsrc = (bit10 ? AV_PIX_FMT_YUV444P10LE : AV_PIX_FMT_YUV444P);
		if(!(*dst = malloc(*stride * hh)))						{ ret = -28; goto end_; }
		g_swsctx = sws_getCachedContext(g_swsctx, ww, hh, pfsrc, ww, hh, pfdst, SWS_LANCZOS, 0, 0, 0);
		if(!g_swsctx)											{ ret = -29; goto end_; }
		sws_setColorspaceDetails(g_swsctx, g_cs_table, m != 0, g_cs_table, m != 0, 0, 1 << 16, 1 << 16);
		if(sws_scale(g_swsctx, g_frame->data, g_frame->linesize, 0, hh, dst, stride) != hh) { ret = -30; goto end_; }
	}
	else
	{
		if(!(frame0 = (U8*)malloc(g_frame->linesize[0] * h)))	{ ret = -31; goto end_; }
		memcpy(frame0, g_frame->data[0], g_frame->linesize[0] * h);
		frame_data[0] = frame0;
		frame_size[0] = g_frame->linesize[0];
		g_packet.data = frame_data[1];
		g_packet.size = frame_size[1];
		size = avcodec_decode_video2(g_context, g_frame, &hasimg, &g_packet);
		if(size <= 0)											{ ret = -200 + size; goto end_; }
		if(!hasimg) 											{ ret = -32; goto end_; }
		if(g_context->width < ww || g_context->height < hh)		{ ret = -33; goto end_; }
		if(!g_frame->data[0])									{ ret = -34; goto end_; }
		if(!(frame1 = (U8*)malloc(g_frame->linesize[0] * h)))	{ ret = -35; goto end_; }
		memcpy(frame1, g_frame->data[0], g_frame->linesize[0] * h);
		frame_data[1] = frame1;
		frame_size[1] = g_frame->linesize[0];
		g_packet.data = frame_data[2];
		g_packet.size = frame_size[2];
		size = avcodec_decode_video2(g_context, g_frame, &hasimg, &g_packet);
		if(size <= 0)											{ ret = -300 + size; goto end_; }
		if(!hasimg) 											{ ret = -36; goto end_; }
		if(g_context->width < ww || g_context->height < hh)		{ ret = -37; goto end_; }
		if(!g_frame->data[0])									{ ret = -38; goto end_; }
		frame_data[2] = g_frame->data[0];
		frame_size[2] = g_frame->linesize[0];
		if(!(*dst = malloc(*stride * h)))						{ ret = -39; goto end_; }
		if(b == 24) YUV444_BGR (*dst, w * 3, frame_data, frame_size, w, h);
		else		YUV444_BGRA(*dst, w * 4, frame_data, frame_size, w, h);
	}

	if(b == 32)
	{
		avcodec_close(g_context);
		if(!(g_context = avcodec_alloc_context3(&ff_h264_decoder)))	{ ret = -40; goto end_; }
		if(av_log_get_level() >= AV_LOG_DEBUG) g_context->debug = -1;
		if(avcodec_open2(g_context, &ff_h264_decoder, 0) < 0)	{ ret = -41; goto end_; }
		g_packet.data = frame_data[3];
		g_packet.size = frame_size[3];
		size = avcodec_decode_video2(g_context, g_frame, &hasimg, &g_packet);
		if(size <= 0)											{ ret = -400 + size; goto end_; }
		if(!hasimg) 											{ ret = -42; goto end_; }
		if(g_context->width < ww || g_context->height < hh)		{ ret = -43; goto end_; }
		if(!g_frame->data[0])									{ ret = -44; goto end_; }
		psrc = g_frame->data[0];
		pdst = (U8*)*dst + 3;
		if(!bit10)
			for(; h; --h)
			{
				for(m = 0; m < w; ++m)
					pdst[m * 4] = psrc[m];
				psrc += g_frame->linesize[0];
				pdst += *stride;
			}
		else
			for(; h; --h)
			{
				for(m = 0; m < w; ++m)
					pdst[m * 4] = ((U16*)psrc)[m] >> 2;
				psrc += g_frame->linesize[0];
				pdst += *stride;
			}
	}
Ejemplo n.º 20
0
static int startffmpeg(struct anim *anim)
{
	int i, videoStream;

	AVCodec *pCodec;
	AVFormatContext *pFormatCtx = NULL;
	AVCodecContext *pCodecCtx;
	int frs_num;
	double frs_den;
	int streamcount;

#ifdef FFMPEG_SWSCALE_COLOR_SPACE_SUPPORT
	/* The following for color space determination */
	int srcRange, dstRange, brightness, contrast, saturation;
	int *table;
	const int *inv_table;
#endif

	if (anim == 0) return(-1);

	streamcount = anim->streamindex;

	if (avformat_open_input(&pFormatCtx, anim->name, NULL, NULL) != 0) {
		return -1;
	}

	if (avformat_find_stream_info(pFormatCtx, NULL) < 0) {
		av_close_input_file(pFormatCtx);
		return -1;
	}

	av_dump_format(pFormatCtx, 0, anim->name, 0);


	/* Find the video stream */
	videoStream = -1;

	for (i = 0; i < pFormatCtx->nb_streams; i++)
		if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) {
			if (streamcount > 0) {
				streamcount--;
				continue;
			}
			videoStream = i;
			break;
		}

	if (videoStream == -1) {
		av_close_input_file(pFormatCtx);
		return -1;
	}

	pCodecCtx = pFormatCtx->streams[videoStream]->codec;

	/* Find the decoder for the video stream */
	pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
	if (pCodec == NULL) {
		av_close_input_file(pFormatCtx);
		return -1;
	}

	pCodecCtx->workaround_bugs = 1;

	if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
		av_close_input_file(pFormatCtx);
		return -1;
	}

	anim->duration = ceil(pFormatCtx->duration *
	                      av_q2d(pFormatCtx->streams[videoStream]->r_frame_rate) /
	                      AV_TIME_BASE);

	frs_num = pFormatCtx->streams[videoStream]->r_frame_rate.num;
	frs_den = pFormatCtx->streams[videoStream]->r_frame_rate.den;

	frs_den *= AV_TIME_BASE;

	while (frs_num % 10 == 0 && frs_den >= 2.0 && frs_num > 10) {
		frs_num /= 10;
		frs_den /= 10;
	}

	anim->frs_sec = frs_num;
	anim->frs_sec_base = frs_den;

	anim->params = 0;

	anim->x = pCodecCtx->width;
	anim->y = av_get_cropped_height_from_codec(pCodecCtx);

	anim->pFormatCtx = pFormatCtx;
	anim->pCodecCtx = pCodecCtx;
	anim->pCodec = pCodec;
	anim->videoStream = videoStream;

	anim->interlacing = 0;
	anim->orientation = 0;
	anim->framesize = anim->x * anim->y * 4;

	anim->curposition = -1;
	anim->last_frame = 0;
	anim->last_pts = -1;
	anim->next_pts = -1;
	anim->next_packet.stream_index = -1;

	anim->pFrame = avcodec_alloc_frame();
	anim->pFrameComplete = FALSE;
	anim->pFrameDeinterlaced = avcodec_alloc_frame();
	anim->pFrameRGB = avcodec_alloc_frame();

	if (avpicture_get_size(PIX_FMT_RGBA, anim->x, anim->y) !=
	    anim->x * anim->y * 4)
	{
		fprintf(stderr,
		        "ffmpeg has changed alloc scheme ... ARGHHH!\n");
		avcodec_close(anim->pCodecCtx);
		av_close_input_file(anim->pFormatCtx);
		av_free(anim->pFrameRGB);
		av_free(anim->pFrameDeinterlaced);
		av_free(anim->pFrame);
		anim->pCodecCtx = NULL;
		return -1;
	}

	if (anim->ib_flags & IB_animdeinterlace) {
		avpicture_fill((AVPicture *) anim->pFrameDeinterlaced,
		               MEM_callocN(avpicture_get_size(
		                               anim->pCodecCtx->pix_fmt,
		                               anim->pCodecCtx->width,
		                               anim->pCodecCtx->height),
		                           "ffmpeg deinterlace"),
		               anim->pCodecCtx->pix_fmt, 
		               anim->pCodecCtx->width,
		               anim->pCodecCtx->height);
	}

	if (pCodecCtx->has_b_frames) {
		anim->preseek = 25; /* FIXME: detect gopsize ... */
	}
	else {
		anim->preseek = 0;
	}
	
	anim->img_convert_ctx = sws_getContext(
	        anim->x,
	        anim->y,
	        anim->pCodecCtx->pix_fmt,
	        anim->x,
	        anim->y,
	        PIX_FMT_RGBA,
	        SWS_FAST_BILINEAR | SWS_PRINT_INFO | SWS_FULL_CHR_H_INT,
	        NULL, NULL, NULL);
		
	if (!anim->img_convert_ctx) {
		fprintf(stderr,
		        "Can't transform color space??? Bailing out...\n");
		avcodec_close(anim->pCodecCtx);
		av_close_input_file(anim->pFormatCtx);
		av_free(anim->pFrameRGB);
		av_free(anim->pFrameDeinterlaced);
		av_free(anim->pFrame);
		anim->pCodecCtx = NULL;
		return -1;
	}

#ifdef FFMPEG_SWSCALE_COLOR_SPACE_SUPPORT
	/* Try do detect if input has 0-255 YCbCR range (JFIF Jpeg MotionJpeg) */
	if (!sws_getColorspaceDetails(anim->img_convert_ctx, (int **)&inv_table, &srcRange,
	                              &table, &dstRange, &brightness, &contrast, &saturation))
	{
		srcRange = srcRange || anim->pCodecCtx->color_range == AVCOL_RANGE_JPEG;
		inv_table = sws_getCoefficients(anim->pCodecCtx->colorspace);

		if (sws_setColorspaceDetails(anim->img_convert_ctx, (int *)inv_table, srcRange,
		                             table, dstRange, brightness, contrast, saturation))
		{
			fprintf(stderr, "Warning: Could not set libswscale colorspace details.\n");
		}
	}
	else {
		fprintf(stderr, "Warning: Could not set libswscale colorspace details.\n");
	}
#endif
		
	return (0);
}
Ejemplo n.º 21
0
static ImBuf *avi_fetchibuf(struct anim *anim, int position)
{
	ImBuf *ibuf = NULL;
	int *tmp;
	int y;
	
	if (anim == NULL) {
		return NULL;
	}

#if defined(_WIN32) && !defined(FREE_WINDOWS)
	if (anim->avistreams) {
		LPBITMAPINFOHEADER lpbi;

		if (anim->pgf) {
			lpbi = AVIStreamGetFrame(anim->pgf, position + AVIStreamStart(anim->pavi[anim->firstvideo]));
			if (lpbi) {
				ibuf = IMB_ibImageFromMemory((unsigned char *) lpbi, 100, IB_rect, anim->colorspace, "<avi_fetchibuf>");
//Oh brother...
			}
		}
	}
	else {
#else
	if (1) {
#endif
		ibuf = IMB_allocImBuf(anim->x, anim->y, 24, IB_rect);

		tmp = AVI_read_frame(anim->avi, AVI_FORMAT_RGB32, position,
		                     AVI_get_stream(anim->avi, AVIST_VIDEO, 0));
		
		if (tmp == NULL) {
			printf("Error reading frame from AVI: '%s'\n", anim->name);
			IMB_freeImBuf(ibuf);
			return NULL;
		}

		for (y = 0; y < anim->y; y++) {
			memcpy(&(ibuf->rect)[((anim->y - y) - 1) * anim->x],  &tmp[y * anim->x],
			       anim->x * 4);
		}
		
		MEM_freeN(tmp);
	}
	
	ibuf->rect_colorspace = colormanage_colorspace_get_named(anim->colorspace);

	return ibuf;
}
#endif  /* WITH_AVI */

#ifdef WITH_FFMPEG

static int startffmpeg(struct anim *anim)
{
	int i, videoStream;

	AVCodec *pCodec;
	AVFormatContext *pFormatCtx = NULL;
	AVCodecContext *pCodecCtx;
	int frs_num;
	double frs_den;
	int streamcount;

#ifdef FFMPEG_SWSCALE_COLOR_SPACE_SUPPORT
	/* The following for color space determination */
	int srcRange, dstRange, brightness, contrast, saturation;
	int *table;
	const int *inv_table;
#endif

	if (anim == 0) return(-1);

	streamcount = anim->streamindex;

	if (avformat_open_input(&pFormatCtx, anim->name, NULL, NULL) != 0) {
		return -1;
	}

	if (avformat_find_stream_info(pFormatCtx, NULL) < 0) {
		av_close_input_file(pFormatCtx);
		return -1;
	}

	av_dump_format(pFormatCtx, 0, anim->name, 0);


	/* Find the video stream */
	videoStream = -1;

	for (i = 0; i < pFormatCtx->nb_streams; i++)
		if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) {
			if (streamcount > 0) {
				streamcount--;
				continue;
			}
			videoStream = i;
			break;
		}

	if (videoStream == -1) {
		av_close_input_file(pFormatCtx);
		return -1;
	}

	pCodecCtx = pFormatCtx->streams[videoStream]->codec;

	/* Find the decoder for the video stream */
	pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
	if (pCodec == NULL) {
		av_close_input_file(pFormatCtx);
		return -1;
	}

	pCodecCtx->workaround_bugs = 1;

	if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
		av_close_input_file(pFormatCtx);
		return -1;
	}

	anim->duration = ceil(pFormatCtx->duration *
	                      av_q2d(pFormatCtx->streams[videoStream]->r_frame_rate) /
	                      AV_TIME_BASE);

	frs_num = pFormatCtx->streams[videoStream]->r_frame_rate.num;
	frs_den = pFormatCtx->streams[videoStream]->r_frame_rate.den;

	frs_den *= AV_TIME_BASE;

	while (frs_num % 10 == 0 && frs_den >= 2.0 && frs_num > 10) {
		frs_num /= 10;
		frs_den /= 10;
	}

	anim->frs_sec = frs_num;
	anim->frs_sec_base = frs_den;

	anim->params = 0;

	anim->x = pCodecCtx->width;
	anim->y = av_get_cropped_height_from_codec(pCodecCtx);

	anim->pFormatCtx = pFormatCtx;
	anim->pCodecCtx = pCodecCtx;
	anim->pCodec = pCodec;
	anim->videoStream = videoStream;

	anim->interlacing = 0;
	anim->orientation = 0;
	anim->framesize = anim->x * anim->y * 4;

	anim->curposition = -1;
	anim->last_frame = 0;
	anim->last_pts = -1;
	anim->next_pts = -1;
	anim->next_packet.stream_index = -1;

	anim->pFrame = avcodec_alloc_frame();
	anim->pFrameComplete = FALSE;
	anim->pFrameDeinterlaced = avcodec_alloc_frame();
	anim->pFrameRGB = avcodec_alloc_frame();

	if (avpicture_get_size(PIX_FMT_RGBA, anim->x, anim->y) !=
	    anim->x * anim->y * 4)
	{
		fprintf(stderr,
		        "ffmpeg has changed alloc scheme ... ARGHHH!\n");
		avcodec_close(anim->pCodecCtx);
		av_close_input_file(anim->pFormatCtx);
		av_free(anim->pFrameRGB);
		av_free(anim->pFrameDeinterlaced);
		av_free(anim->pFrame);
		anim->pCodecCtx = NULL;
		return -1;
	}

	if (anim->ib_flags & IB_animdeinterlace) {
		avpicture_fill((AVPicture *) anim->pFrameDeinterlaced,
		               MEM_callocN(avpicture_get_size(
		                               anim->pCodecCtx->pix_fmt,
		                               anim->pCodecCtx->width,
		                               anim->pCodecCtx->height),
		                           "ffmpeg deinterlace"),
		               anim->pCodecCtx->pix_fmt, 
		               anim->pCodecCtx->width,
		               anim->pCodecCtx->height);
	}

	if (pCodecCtx->has_b_frames) {
		anim->preseek = 25; /* FIXME: detect gopsize ... */
	}
	else {
		anim->preseek = 0;
	}
	
	anim->img_convert_ctx = sws_getContext(
	        anim->x,
	        anim->y,
	        anim->pCodecCtx->pix_fmt,
	        anim->x,
	        anim->y,
	        PIX_FMT_RGBA,
	        SWS_FAST_BILINEAR | SWS_PRINT_INFO | SWS_FULL_CHR_H_INT,
	        NULL, NULL, NULL);
		
	if (!anim->img_convert_ctx) {
		fprintf(stderr,
		        "Can't transform color space??? Bailing out...\n");
		avcodec_close(anim->pCodecCtx);
		av_close_input_file(anim->pFormatCtx);
		av_free(anim->pFrameRGB);
		av_free(anim->pFrameDeinterlaced);
		av_free(anim->pFrame);
		anim->pCodecCtx = NULL;
		return -1;
	}

#ifdef FFMPEG_SWSCALE_COLOR_SPACE_SUPPORT
	/* Try do detect if input has 0-255 YCbCR range (JFIF Jpeg MotionJpeg) */
	if (!sws_getColorspaceDetails(anim->img_convert_ctx, (int **)&inv_table, &srcRange,
	                              &table, &dstRange, &brightness, &contrast, &saturation))
	{
		srcRange = srcRange || anim->pCodecCtx->color_range == AVCOL_RANGE_JPEG;
		inv_table = sws_getCoefficients(anim->pCodecCtx->colorspace);

		if (sws_setColorspaceDetails(anim->img_convert_ctx, (int *)inv_table, srcRange,
		                             table, dstRange, brightness, contrast, saturation))
		{
			fprintf(stderr, "Warning: Could not set libswscale colorspace details.\n");
		}
	}
	else {
		fprintf(stderr, "Warning: Could not set libswscale colorspace details.\n");
	}
#endif
		
	return (0);
}