static int sharpness_vaapi_build_filter_params(AVFilterContext *avctx)
{
    VAAPIVPPContext *vpp_ctx   = avctx->priv;
    SharpnessVAAPIContext *ctx = avctx->priv;

    VAProcFilterCap caps;

    VAStatus vas;
    uint32_t num_caps = 1;

    VAProcFilterParameterBuffer sharpness;

    vas = vaQueryVideoProcFilterCaps(vpp_ctx->hwctx->display, vpp_ctx->va_context,
                                     VAProcFilterSharpening,
                                     &caps, &num_caps);
    if (vas != VA_STATUS_SUCCESS) {
        av_log(avctx, AV_LOG_ERROR, "Failed to query sharpness caps "
               "context: %d (%s).\n", vas, vaErrorStr(vas));
        return AVERROR(EIO);
    }

    sharpness.type  = VAProcFilterSharpening;
    sharpness.value = map(ctx->sharpness,
                          SHARPNESS_MIN, SHARPNESS_MAX,
                          caps.range.min_value,
                          caps.range.max_value);
    ff_vaapi_vpp_make_param_buffers(avctx,
                                    VAProcFilterParameterBufferType,
                                    &sharpness, sizeof(sharpness), 1);

    return 0;
}
static int denoise_vaapi_build_filter_params(AVFilterContext *avctx)
{
    VAAPIVPPContext *vpp_ctx = avctx->priv;
    DenoiseVAAPIContext *ctx = avctx->priv;

    VAProcFilterCap caps;

    VAStatus vas;
    uint32_t num_caps = 1;

    VAProcFilterParameterBuffer denoise;

    vas = vaQueryVideoProcFilterCaps(vpp_ctx->hwctx->display, vpp_ctx->va_context,
                                     VAProcFilterNoiseReduction,
                                     &caps, &num_caps);
    if (vas != VA_STATUS_SUCCESS) {
        av_log(avctx, AV_LOG_ERROR, "Failed to query denoise caps "
               "context: %d (%s).\n", vas, vaErrorStr(vas));
        return AVERROR(EIO);
    }

    denoise.type  = VAProcFilterNoiseReduction;
    denoise.value =  map(ctx->denoise, DENOISE_MIN, DENOISE_MAX,
                         caps.range.min_value,
                         caps.range.max_value);
    ff_vaapi_vpp_make_param_buffers(avctx, VAProcFilterParameterBufferType,
                                    &denoise, sizeof(denoise), 1);

    return 0;
}
Esempio n. 3
0
VaApiFilterInfo::VaApiFilterInfo(VAContextID context, VAProcFilterType type) {
	m_type = type;
	uint size = 0;
	auto dpy = VaApi::glx();
	switch (type) {
	case VAProcFilterNoiseReduction:
	case VAProcFilterSharpening: {
		VAProcFilterCap cap; size = 1;
		if (!isSuccess(vaQueryVideoProcFilterCaps(dpy, context, type, &cap, &size)) || size != 1)
			return;
		m_caps.resize(1); m_caps[0].algorithm = type; m_caps[0].range = cap.range;
		break;
	} case VAProcFilterDeinterlacing: {
		size = VAProcDeinterlacingCount;
		VAProcFilterCapDeinterlacing caps[VAProcDeinterlacingCount];
		if (!isSuccess(vaQueryVideoProcFilterCaps(dpy, context, VAProcFilterDeinterlacing, caps, &size)))
			return;
		m_caps.resize(size);
		for (uint i=0; i<size; ++i)
			m_caps[i].algorithm = caps[i].type;
		break;
	} case VAProcFilterColorBalance: {
		size = VAProcColorBalanceCount;
		VAProcFilterCapColorBalance caps[VAProcColorBalanceCount];
		if (!isSuccess(vaQueryVideoProcFilterCaps(dpy, context, VAProcFilterColorBalance, caps, &size)))
			return;
		m_caps.resize(size);
		for (uint i=0; i<size; ++i) {
			m_caps[i].algorithm = caps[i].type;
			m_caps[i].range = caps[i].range;
		}
		break;
	} default:
		return;
	}
	m_algorithms.resize(m_caps.size());
	for (int i=0; i<m_caps.size(); ++i)
		m_algorithms[i] = m_caps[i].algorithm;
}
static int procamp_vaapi_build_filter_params(AVFilterContext *avctx)
{
    VAAPIVPPContext *vpp_ctx = avctx->priv;
    ProcampVAAPIContext *ctx = avctx->priv;
    VAStatus vas;
    VAProcFilterParameterBufferColorBalance procamp_params[4];
    VAProcFilterCapColorBalance procamp_caps[VAProcColorBalanceCount];
    int num_caps;
    int i = 0;

    memset(&procamp_params, 0, sizeof(procamp_params));
    memset(&procamp_caps, 0, sizeof(procamp_caps));

    num_caps = VAProcColorBalanceCount;
    vas = vaQueryVideoProcFilterCaps(vpp_ctx->hwctx->display, vpp_ctx->va_context,
                                     VAProcFilterColorBalance, &procamp_caps, &num_caps);

    if (vas != VA_STATUS_SUCCESS) {
        av_log(avctx, AV_LOG_ERROR, "Failed to query procamp "
               "filter caps: %d (%s).\n", vas, vaErrorStr(vas));
        return AVERROR(EIO);
    }

    /* brightness */
    procamp_params[i].type   = VAProcFilterColorBalance;
    procamp_params[i].attrib = VAProcColorBalanceBrightness;
    procamp_params[i].value  = map(ctx->bright, BRIGHTNESS_MIN, BRIGHTNESS_MAX,
                                   procamp_caps[VAProcColorBalanceBrightness-1].range.min_value,
                                   procamp_caps[VAProcColorBalanceBrightness-1].range.max_value);
    i++;

    /* contrast */
    procamp_params[i].type   = VAProcFilterColorBalance;
    procamp_params[i].attrib = VAProcColorBalanceContrast;
    procamp_params[i].value  = map(ctx->contrast, CONTRAST_MIN, CONTRAST_MAX,
                                   procamp_caps[VAProcColorBalanceContrast-1].range.min_value,
                                   procamp_caps[VAProcColorBalanceContrast-1].range.max_value);
    i++;

    /* hue */
    procamp_params[i].type   = VAProcFilterColorBalance;
    procamp_params[i].attrib = VAProcColorBalanceHue;
    procamp_params[i].value  = map(ctx->hue, HUE_MIN, HUE_MAX,
                                   procamp_caps[VAProcColorBalanceHue-1].range.min_value,
                                   procamp_caps[VAProcColorBalanceHue-1].range.max_value);
    i++;

    /* saturation */
    procamp_params[i].type   = VAProcFilterColorBalance;
    procamp_params[i].attrib = VAProcColorBalanceSaturation;
    procamp_params[i].value  = map(ctx->saturation, SATURATION_MIN, SATURATION_MAX,
                                   procamp_caps[VAProcColorBalanceSaturation-1].range.min_value,
                                   procamp_caps[VAProcColorBalanceSaturation-1].range.max_value);
    i++;

    return ff_vaapi_vpp_make_param_buffers(avctx,
                                           VAProcFilterParameterBufferType,
                                           &procamp_params,
                                           sizeof(procamp_params[0]),
                                           i);
}
Esempio n. 5
0
void VAApiWriter::init_vpp()
{
#ifdef HAVE_VPP
	use_vpp = true;
	if
	(
		vaCreateConfig( VADisp, ( VAProfile )-1, VAEntrypointVideoProc, NULL, 0, &config_vpp ) == VA_STATUS_SUCCESS &&
		vaCreateContext( VADisp, config_vpp, 0, 0, 0, NULL, 0, &context_vpp ) == VA_STATUS_SUCCESS &&
		vaCreateSurfaces( &id_vpp, 1 )
	)
	{
		unsigned num_filters = VAProcFilterCount;
		VAProcFilterType filters[ VAProcFilterCount ];
		if ( vaQueryVideoProcFilters( VADisp, context_vpp, filters, &num_filters ) != VA_STATUS_SUCCESS )
			num_filters = 0;
		if ( num_filters )
		{
			if ( vpp_deint_type != VAProcDeinterlacingNone )
				for ( unsigned i = 0 ; i < num_filters ; ++i )
					if ( filters[ i ] == VAProcFilterDeinterlacing )
					{
						VAProcFilterCapDeinterlacing deinterlacing_caps[ VAProcDeinterlacingCount ];
						unsigned num_deinterlacing_caps = VAProcDeinterlacingCount;
						if ( vaQueryVideoProcFilterCaps( VADisp, context_vpp, VAProcFilterDeinterlacing, &deinterlacing_caps, &num_deinterlacing_caps ) != VA_STATUS_SUCCESS )
							num_deinterlacing_caps = 0;
						bool vpp_deint_types[ 2 ] = { false };
						for ( unsigned j = 0 ; j < num_deinterlacing_caps ; ++j )
						{
							switch ( deinterlacing_caps[ j ].type )
							{
								case VAProcDeinterlacingMotionAdaptive:
									vpp_deint_types[ 0 ] = true;
									break;
								case VAProcDeinterlacingMotionCompensated:
									vpp_deint_types[ 1 ] = true;
									break;
								default:
									break;
							}
						}
						if ( vpp_deint_type == VAProcDeinterlacingMotionCompensated && !vpp_deint_types[ 1 ] )
						{
							QMPlay2Core.log( tr( "Nie obsługiwany algorytm usuwania przeplotu" ) + " - Motion compensated", ErrorLog | LogOnce );
							vpp_deint_type = VAProcDeinterlacingMotionAdaptive;
						}
						if ( vpp_deint_type == VAProcDeinterlacingMotionAdaptive && !vpp_deint_types[ 0 ] )
						{
							QMPlay2Core.log( tr( "Nie obsługiwany algorytm usuwania przeplotu" ) + " - Motion adaptive", ErrorLog | LogOnce );
							vpp_deint_type = VAProcDeinterlacingNone;
						}
						if ( vpp_deint_type != VAProcDeinterlacingNone )
						{
							VAProcFilterParameterBufferDeinterlacing deint_params = { VAProcFilterDeinterlacing, vpp_deint_type, VPP_TFF };
							if ( vaCreateBuffer( VADisp, context_vpp, VAProcFilterParameterBufferType, sizeof deint_params, 1, &deint_params, &vpp_deint ) != VA_STATUS_SUCCESS )
								vpp_deint = VA_INVALID_ID;
						}
						break;
					}
			return;
		}
	}
	if ( vpp_deint_type != VAProcDeinterlacingNone )
		QMPlay2Core.log( tr( "Nie można otworzyć filtrów usuwających przeplot" ), ErrorLog | LogOnce );
	clr_vpp();
#endif
}
Esempio n. 6
0
void VAApiWriter::init_vpp()
{
#ifdef HAVE_VPP
	use_vpp = true;
	if
	(
		vaCreateConfig( VADisp, ( VAProfile )-1, VAEntrypointVideoProc, NULL, 0, &config_vpp ) == VA_STATUS_SUCCESS &&
		vaCreateContext( VADisp, config_vpp, 0, 0, 0, NULL, 0, &context_vpp ) == VA_STATUS_SUCCESS &&
		vaCreateSurfaces( &id_vpp, 1 )
	)
	{
		unsigned num_filters = VAProcFilterCount;
		VAProcFilterType filters[ VAProcFilterCount ];
		if ( vaQueryVideoProcFilters( VADisp, context_vpp, filters, &num_filters ) != VA_STATUS_SUCCESS )
			num_filters = 0;
		if ( num_filters )
		{
			/* Creating dummy filter (some drivers/api versions crashes without any filter) */
			VAProcFilterParameterBufferBase none_params = { VAProcFilterNone };
			if ( vaCreateBuffer( VADisp, context_vpp, VAProcFilterParameterBufferType, sizeof none_params, 1, &none_params, &vpp_buffers[ VAProcFilterNone ] ) != VA_STATUS_SUCCESS )
				vpp_buffers[ VAProcFilterNone ] = VA_INVALID_ID;
			/* Searching deinterlacing filter */
			if ( vpp_deint_type != VAProcDeinterlacingNone )
				for ( unsigned i = 0 ; i < num_filters ; ++i )
					if ( filters[ i ] == VAProcFilterDeinterlacing )
					{
						VAProcFilterCapDeinterlacing deinterlacing_caps[ VAProcDeinterlacingCount ];
						unsigned num_deinterlacing_caps = VAProcDeinterlacingCount;
						if ( vaQueryVideoProcFilterCaps( VADisp, context_vpp, VAProcFilterDeinterlacing, &deinterlacing_caps, &num_deinterlacing_caps ) != VA_STATUS_SUCCESS )
							num_deinterlacing_caps = 0;
						bool vpp_deint_types[ 2 ] = { false };
						for ( unsigned j = 0 ; j < num_deinterlacing_caps ; ++j )
						{
							switch ( deinterlacing_caps[ j ].type )
							{
								case VAProcDeinterlacingMotionAdaptive:
									vpp_deint_types[ 0 ] = true;
									break;
								case VAProcDeinterlacingMotionCompensated:
									vpp_deint_types[ 1 ] = true;
									break;
								default:
									break;
							}
						}
						if ( vpp_deint_type == VAProcDeinterlacingMotionCompensated && !vpp_deint_types[ 1 ] )
						{
							QMPlay2Core.log( tr( "Nie obsługiwany algorytm usuwania przeplotu" ) + " - Motion compensated", ErrorLog | LogOnce );
							vpp_deint_type = VAProcDeinterlacingMotionAdaptive;
						}
						if ( vpp_deint_type == VAProcDeinterlacingMotionAdaptive && !vpp_deint_types[ 0 ] )
						{
							QMPlay2Core.log( tr( "Nie obsługiwany algorytm usuwania przeplotu" ) + " - Motion adaptive", ErrorLog | LogOnce );
							vpp_deint_type = VAProcDeinterlacingNone;
						}
						if ( vpp_deint_type != VAProcDeinterlacingNone )
						{
							VAProcFilterParameterBufferDeinterlacing deint_params = { VAProcFilterDeinterlacing, vpp_deint_type, 0 };
							if ( vaCreateBuffer( VADisp, context_vpp, VAProcFilterParameterBufferType, sizeof deint_params, 1, &deint_params, &vpp_buffers[ VAProcFilterDeinterlacing ] ) != VA_STATUS_SUCCESS )
								vpp_buffers[ VAProcFilterDeinterlacing ] = VA_INVALID_ID;
						}
						break;
					}
			return;
		}
	}
	if ( vpp_deint_type != VAProcDeinterlacingNone ) //Show error only when filter is required
		QMPlay2Core.log( "VA-API :: " + tr( "Nie można otworzyć filtrów obrazu" ), ErrorLog | LogOnce );
	clr_vpp();
#endif
}
Esempio n. 7
0
static int deint_vaapi_build_filter_params(AVFilterContext *avctx)
{
    DeintVAAPIContext *ctx = avctx->priv;
    VAStatus vas;
    VAProcFilterParameterBufferDeinterlacing params;
    int i;

    ctx->nb_deint_caps = VAProcDeinterlacingCount;
    vas = vaQueryVideoProcFilterCaps(ctx->hwctx->display,
                                     ctx->va_context,
                                     VAProcFilterDeinterlacing,
                                     &ctx->deint_caps,
                                     &ctx->nb_deint_caps);
    if (vas != VA_STATUS_SUCCESS) {
        av_log(avctx, AV_LOG_ERROR, "Failed to query deinterlacing "
               "caps: %d (%s).\n", vas, vaErrorStr(vas));
        return AVERROR(EIO);
    }

    if (ctx->mode == VAProcDeinterlacingNone) {
        for (i = 0; i < ctx->nb_deint_caps; i++) {
            if (ctx->deint_caps[i].type > ctx->mode)
                ctx->mode = ctx->deint_caps[i].type;
        }
        av_log(avctx, AV_LOG_VERBOSE, "Picking %d (%s) as default "
               "deinterlacing mode.\n", ctx->mode,
               deint_vaapi_mode_name(ctx->mode));
    } else {
        for (i = 0; i < ctx->nb_deint_caps; i++) {
            if (ctx->deint_caps[i].type == ctx->mode)
                break;
        }
        if (i >= ctx->nb_deint_caps) {
            av_log(avctx, AV_LOG_ERROR, "Deinterlacing mode %d (%s) is "
                   "not supported.\n", ctx->mode,
                   deint_vaapi_mode_name(ctx->mode));
        }
    }

    params.type      = VAProcFilterDeinterlacing;
    params.algorithm = ctx->mode;
    params.flags     = 0;

    av_assert0(ctx->filter_buffer == VA_INVALID_ID);
    vas = vaCreateBuffer(ctx->hwctx->display, ctx->va_context,
                         VAProcFilterParameterBufferType,
                         sizeof(params), 1, &params,
                         &ctx->filter_buffer);
    if (vas != VA_STATUS_SUCCESS) {
        av_log(avctx, AV_LOG_ERROR, "Failed to create deinterlace "
               "parameter buffer: %d (%s).\n", vas, vaErrorStr(vas));
        return AVERROR(EIO);
    }

    vas = vaQueryVideoProcPipelineCaps(ctx->hwctx->display,
                                       ctx->va_context,
                                       &ctx->filter_buffer, 1,
                                       &ctx->pipeline_caps);
    if (vas != VA_STATUS_SUCCESS) {
        av_log(avctx, AV_LOG_ERROR, "Failed to query pipeline "
               "caps: %d (%s).\n", vas, vaErrorStr(vas));
        return AVERROR(EIO);
    }

    ctx->extra_delay_for_timestamps = ctx->field_rate == 2 &&
        ctx->pipeline_caps.num_backward_references == 0;

    ctx->queue_depth = ctx->pipeline_caps.num_backward_references +
                       ctx->pipeline_caps.num_forward_references +
                       ctx->extra_delay_for_timestamps + 1;
    if (ctx->queue_depth > MAX_REFERENCES) {
        av_log(avctx, AV_LOG_ERROR, "Pipeline requires too many "
               "references (%u forward, %u back).\n",
               ctx->pipeline_caps.num_forward_references,
               ctx->pipeline_caps.num_backward_references);
        return AVERROR(ENOSYS);
    }

    return 0;
}