void test()
{
    VAEntrypoint entrypoints[5];
    int num_entrypoints,slice_entrypoint;
    VAConfigAttrib attrib[2];
    VAConfigID config_id;
    unsigned int fourcc, luma_stride, chroma_u_stride, chroma_v_stride, luma_offset, chroma_u_offset;
    unsigned int chroma_v_offset, buffer_name;
    unsigned int *p_buffer;
    int i, ret;
    char c;
    int frame_width=640,  frame_height=480;
    VASurfaceID surface_id;
    VAImage image_id;
    unsigned char *usrbuf;

    usrbuf = (unsigned char*)malloc(frame_width * frame_height * 2);
    ASSERT ( usrbuf != NULL);

    VASurfaceAttribExternalBuffers  vaSurfaceExternBuf;
    VASurfaceAttrib attrib_list[2];

    va_status = vaQueryConfigEntrypoints(va_dpy, VAProfileH264Baseline, entrypoints, &num_entrypoints);
    ASSERT( VA_STATUS_SUCCESS == va_status );

    for	(slice_entrypoint = 0; slice_entrypoint < num_entrypoints; slice_entrypoint++) {
        if (entrypoints[slice_entrypoint] == VAEntrypointEncSlice)
            break;
    }
    if (slice_entrypoint == num_entrypoints) {
        /* not find Slice entry point */
        status("VAEntrypointEncSlice doesn't support, exit.\n");
        ASSERT(0);
    }

    /* find out the format for the render target, and rate control mode */
    attrib[0].type = VAConfigAttribRTFormat;
    attrib[1].type = VAConfigAttribRateControl;
    va_status = vaGetConfigAttributes(va_dpy, VAProfileH264Baseline, VAEntrypointEncSlice, &attrib[0], 2);
    ASSERT( VA_STATUS_SUCCESS == va_status );

    if ((attrib[0].value & VA_RT_FORMAT_YUV420) == 0) {
        /* not find desired YUV420 RT format */
        status("VA_RT_FORMAT_YUV420 doesn't support, exit\n");
        ASSERT(0);
    }
    if ((attrib[1].value & VA_RC_VBR) == 0) {
        /* Can't find matched RC mode */
        status("VBR mode doesn't found, exit\n");
        ASSERT(0);
    }

    attrib[0].value = VA_RT_FORMAT_YUV420; /* set to desired RT format */
    attrib[1].value = VA_RC_VBR; /* set to desired RC mode */

    va_status = vaCreateConfig(va_dpy, VAProfileH264Baseline, VAEntrypointEncSlice, &attrib[0], 2, &config_id);
    ASSERT( VA_STATUS_SUCCESS == va_status );

    attrib_list[1].type = (VASurfaceAttribType)VASurfaceAttribExternalBufferDescriptor;
    attrib_list[0].type = (VASurfaceAttribType)VASurfaceAttribMemoryType;
    va_status = vaGetSurfaceAttributes(va_dpy, config_id, attrib_list, 2);
    ASSERT( VA_STATUS_SUCCESS == va_status );

    if (attrib_list[0].flags != VA_SURFACE_ATTRIB_NOT_SUPPORTED) {
        status("supported memory type:\n");
        if (attrib_list[0].value.value.i & VA_SURFACE_ATTRIB_MEM_TYPE_VA)
            status("\tVA_SURFACE_ATTRIB_MEM_TYPE_VA\n");
        if (attrib_list[0].value.value.i & VA_SURFACE_ATTRIB_MEM_TYPE_V4L2)
            status("\tVA_SURFACE_ATTRIB_MEM_TYPE_V4L2\n");
        if (attrib_list[0].value.value.i & VA_SURFACE_ATTRIB_MEM_TYPE_USER_PTR)
            status("\tVA_SURFACE_ATTRIB_MEM_TYPE_USER_PTR\n");
        if (attrib_list[0].value.value.i & VA_SURFACE_ATTRIB_MEM_TYPE_ANDROID_GRALLOC)
           status("\tVA_SURFACE_ATTRIB_MEM_TYPE_ANDROID_GRALLOC\n");
        if (attrib_list[0].value.value.i &  VA_SURFACE_ATTRIB_MEM_TYPE_ANDROID_ION)
            status("\tVA_SURFACE_ATTRIB_MEM_TYPE_ANDROID_ION\n");
        if (attrib_list[0].value.value.i & VA_SURFACE_ATTRIB_MEM_TYPE_KERNEL_DRM)
            status("\tVA_SURFACE_ATTRIB_MEM_TYPE_KERNEL_DRM\n");
    }

    if ((attrib_list[1].flags != VA_SURFACE_ATTRIB_NOT_SUPPORTED) &&
            (attrib_list[0].value.value.i & VA_SURFACE_ATTRIB_MEM_TYPE_USER_PTR)) {
        status("vaCreateSurfaces from external usr pointer\n");

        vaSurfaceExternBuf.buffers = (unsigned long*)malloc(sizeof(unsigned int));
        vaSurfaceExternBuf.buffers[0] = usrbuf;
        vaSurfaceExternBuf.num_buffers = 1;
        vaSurfaceExternBuf.width = frame_width;
        vaSurfaceExternBuf.height = frame_height;
        vaSurfaceExternBuf.pitches[0] = vaSurfaceExternBuf.pitches[1] = vaSurfaceExternBuf.pitches[2] = frame_width;
        //vaSurfaceExternBuf.flags = VA_SURFACE_ATTRIB_MEM_TYPE_ANDROID_GRALLOC;
        vaSurfaceExternBuf.pixel_format = VA_FOURCC_NV12;
        //vaSurfaceExternBuf.pitches[0] = attribute_tpi->luma_stride;

        attrib_list[1].flags = VA_SURFACE_ATTRIB_SETTABLE;
        attrib_list[1].value.type = VAGenericValueTypePointer;
        attrib_list[1].value.value.p = (void *)&vaSurfaceExternBuf;

        attrib_list[0].flags = VA_SURFACE_ATTRIB_SETTABLE;
        attrib_list[0].value.type = VAGenericValueTypeInteger;
        attrib_list[0].value.value.i = VA_SURFACE_ATTRIB_MEM_TYPE_USER_PTR;

        va_status = vaCreateSurfaces(va_dpy, VA_RT_FORMAT_YUV420, frame_width, frame_height, &surface_id, 1, attrib_list, 2);
        ASSERT( VA_STATUS_SUCCESS == va_status );

        va_status = vaDeriveImage(va_dpy, surface_id, &image_id);
        ASSERT( VA_STATUS_SUCCESS == va_status );

        va_status = vaMapBuffer(va_dpy, image_id.buf, (void**)&p_buffer);
        ASSERT( VA_STATUS_SUCCESS == va_status );

        memset(p_buffer, 0x80, image_id.width * image_id.height);

        va_status = vaUnmapBuffer(va_dpy, image_id.buf);
        ASSERT( VA_STATUS_SUCCESS == va_status );

        va_status = vaDestroyImage(va_dpy, image_id.image_id);
        ASSERT( VA_STATUS_SUCCESS == va_status );

        va_status = vaDestroySurfaces(va_dpy, &surface_id, 1);
        ASSERT( VA_STATUS_SUCCESS == va_status );

    }

    va_status = vaDestroyConfig(va_dpy, config_id);
    ASSERT( VA_STATUS_SUCCESS == va_status );

    free(usrbuf);

}
示例#2
0
void VaApi::initFilters() {
	if (!hasEntryPoint(VAEntrypointVideoProc, VAProfileNone))
		return;
	auto display = VaApi::glx();
	VAConfigID config = VA_INVALID_ID;
	VAContextID context = VA_INVALID_ID;
	do {
		if (!isSuccess(vaCreateConfig(display, VAProfileNone, VAEntrypointVideoProc, nullptr, 0, &config)))
			break;
		if (!isSuccess(vaCreateContext(display, config, 0, 0, 0, nullptr, 0, &context)))
			break;
		QVector<VAProcFilterType> types(VAProcFilterCount);
		uint size = VAProcFilterCount;
		if (!isSuccess(vaQueryVideoProcFilters(display, context, types.data(), &size)))
			break;
		types.resize(size);
		for (const auto &type : types) {
			VaApiFilterInfo info(context, type);
			if (info.isSuccess() && !info.algorithms().isEmpty())
				m_filters.insert(type, info);
		}
	} while (false);
	if (context != VA_INVALID_ID)
		vaDestroyContext(display, context);
	if (config != VA_INVALID_ID)
		vaDestroyConfig(display, config);
}
示例#3
0
bool HwAccVaApi::fillContext(AVCodecContext *avctx) {
	if (status() != VA_STATUS_SUCCESS)
		return false;
	freeContext();
	d->context.display = VaApi::glx();
	if (!d->context.display)
		return false;
	const auto codec = VaApi::codec(avctx->codec_id);
	if (!codec)
		return false;
	d->profile = codec->profile(avctx->profile);
	VAConfigAttrib attr = { VAConfigAttribRTFormat, 0 };
	if(!isSuccess(vaGetConfigAttributes(d->context.display, d->profile, VAEntrypointVLD, &attr, 1)))
		return false;
	const uint rts =  attr.value & (VA_RT_FORMAT_YUV420 | VA_RT_FORMAT_YUV422);
	if(!rts)
		return isSuccess(VA_STATUS_ERROR_UNSUPPORTED_RT_FORMAT);
	if(!isSuccess(vaCreateConfig(d->context.display, d->profile, VAEntrypointVLD, &attr, 1, &d->context.config_id)))
		return false;
	const int w = avctx->width, h = avctx->height;
	auto tryRtFormat = [rts, this, codec, w, h] (uint rt) { return (rts & rt) && d->pool.create(codec->surfaces, w, h, rt); };
	if (!tryRtFormat(VA_RT_FORMAT_YUV420) && !tryRtFormat(VA_RT_FORMAT_YUV422))
		return false;
	VaApi::get().setSurfaceFormat(d->pool.format());
	auto ids = d->pool.ids();
	if (!isSuccess(vaCreateContext(d->context.display, d->context.config_id, w, h, VA_PROGRESSIVE, ids.data(), ids.size(), &d->context.context_id)))
		return false;
	return true;
}
示例#4
0
// Create an empty dummy VPP. This works around a weird bug that affects the
// VA surface format, as it is reported by vaDeriveImage(). Before a VPP
// context or a decoder context is created, the surface format will be reported
// as YV12. Surfaces created after context creation will report NV12 (even
// though surface creation does not take a context as argument!). Existing
// surfaces will change their format from YV12 to NV12 as soon as the decoder
// renders to them! Because we want know the surface format in advance (to
// simplify our renderer configuration logic), we hope that this hack gives
// us reasonable behavior.
// See: https://bugs.freedesktop.org/show_bug.cgi?id=79848
static void insane_hack(struct gl_hwdec *hw)
{
    struct priv *p = hw->priv;
    VAConfigID config;
    if (vaCreateConfig(p->display, VAProfileNone, VAEntrypointVideoProc,
                       NULL, 0, &config) == VA_STATUS_SUCCESS)
    {
        // We want to keep this until the VADisplay is destroyed. It will
        // implicitly free the context.
        VAContextID context;
        vaCreateContext(p->display, config, 0, 0, 0, NULL, 0, &context);
    }
}
static
enum AVPixelFormat
prepare_vaapi_context(struct pp_video_decoder_s *vd, int width, int height)
{
    VAStatus status;

    vd->va_context.display = display.va;
    vd->va_context.config_id = VA_INVALID_ID;
    vd->va_context.context_id = VA_INVALID_ID;

    // function is called from libavcodec internals which were already protected by mutex
    status = vaCreateConfig(display.va, VAProfileH264High, VAEntrypointVLD, NULL, 0,
                            &vd->va_context.config_id);
    if (status != VA_STATUS_SUCCESS) {
        trace_error("%s, can't create VA config\n", __func__);
        goto err;
    }

#if VA_CHECK_VERSION(0, 34, 0)
    status = vaCreateSurfaces(display.va, VA_RT_FORMAT_YUV420, width, height,
                              vd->surfaces, MAX_VIDEO_SURFACES, NULL, 0);
#else
    status = vaCreateSurfaces(display.va, width, height, VA_RT_FORMAT_YUV420,
                              MAX_VIDEO_SURFACES, vd->surfaces);
#endif
    if (status != VA_STATUS_SUCCESS) {
        trace_error("%s, can't create VA surfaces\n", __func__);
        goto err;
    }

    status = vaCreateContext(display.va, vd->va_context.config_id, width, height,
                             VA_PROGRESSIVE, vd->surfaces, MAX_VIDEO_SURFACES,
                             &vd->va_context.context_id);
    if (status != VA_STATUS_SUCCESS) {
        trace_error("%s, can't create VA context\n", __func__);
        goto err;
    }

    vd->avctx->hwaccel_context = &vd->va_context;
    return AV_PIX_FMT_VAAPI_VLD;

err:
    vd->failed_state = 1;
    vd->ppp_video_decoder_dev->NotifyError(vd->instance->id, vd->self_id,
                                           PP_VIDEODECODERERROR_UNREADABLE_INPUT);
    return AV_PIX_FMT_NONE;
}
示例#6
0
bool VaapiEncoderBase::initVA()
{
    FUNC_ENTER();

    if (!m_externalDisplay)
        m_xDisplay = XOpenDisplay(NULL);
    if (!m_xDisplay)
    {
        ERROR("no x display.");
        return false;
    }

    int majorVersion, minorVersion;
    VAStatus vaStatus;

    m_display = vaGetDisplay(m_xDisplay);
    if (m_display == NULL) {
        ERROR("vaGetDisplay failed.");
        goto error;
    }

    vaStatus= vaInitialize(m_display, &majorVersion, &minorVersion);
    DEBUG("vaInitialize \n");
    if (!checkVaapiStatus(vaStatus, "vaInitialize"))
        goto error;

    DEBUG("profile = %d", m_videoParamCommon.profile);
    vaStatus = vaCreateConfig(m_display, m_videoParamCommon.profile, m_entrypoint,
                              NULL, 0, &m_config);
    if (!checkVaapiStatus(vaStatus, "vaCreateConfig "))
        goto error;

    vaStatus = vaCreateContext(m_display, m_config,
                               m_videoParamCommon.resolution.width,
                               m_videoParamCommon.resolution.height,
                               VA_PROGRESSIVE, 0, 0,
                               &m_context);
    if (!checkVaapiStatus(vaStatus, "vaCreateContext "))
        goto error;
    return true;

error:
    cleanupVA();
    return false;
}
示例#7
0
static int
ensure_surface_attribs(void)
{
    VAStatus va_status;
    VASurfaceAttrib *surface_attribs;
    unsigned int num_image_formats, num_surface_attribs;

    if (va_num_surface_attribs >= 0)
        return va_num_surface_attribs;

    num_image_formats = vaMaxNumImageFormats(va_dpy);
    if (num_image_formats == 0)
        return 0;

    va_status = vaCreateConfig(va_dpy, VAProfileNone, VAEntrypointVideoProc,
        NULL, 0, &vpp_config_id);
    CHECK_VASTATUS(va_status, "vaCreateConfig()");

    /* Guess the number of surface attributes, thus including any
       pixel-format supported by the VA driver */
    num_surface_attribs = VASurfaceAttribCount + num_image_formats;
    surface_attribs = malloc(num_surface_attribs * sizeof(*surface_attribs));
    if (!surface_attribs)
        return 0;

    va_status = vaQuerySurfaceAttributes(va_dpy, vpp_config_id,
        surface_attribs, &num_surface_attribs);
    if (va_status == VA_STATUS_SUCCESS)
        va_surface_attribs =  surface_attribs;
    else if (va_status == VA_STATUS_ERROR_MAX_NUM_EXCEEDED) {
        va_surface_attribs = realloc(surface_attribs,
            num_surface_attribs * sizeof(*va_surface_attribs));
        if (!va_surface_attribs) {
            free(surface_attribs);
            return 0;
        }
        va_status = vaQuerySurfaceAttributes(va_dpy, vpp_config_id,
            va_surface_attribs, &num_surface_attribs);
    }
    CHECK_VASTATUS(va_status, "vaQuerySurfaceAttributes()");
    va_num_surface_attribs = num_surface_attribs;
    return num_surface_attribs;
}
示例#8
0
bool VAAPIContext::InitContext(void)
{
    if (!m_ctx.display || m_vaEntrypoint > VAEntrypointVLD)
        return false;

    MythXLocker locker(m_display->m_x_disp);
    VAConfigAttrib attrib;
    attrib.type = VAConfigAttribRTFormat;
    INIT_ST;
    va_status = vaGetConfigAttributes(m_ctx.display, m_vaProfile,
                                      m_vaEntrypoint, &attrib, 1);
    CHECK_ST;

    if (!ok || !(attrib.value & VA_RT_FORMAT_YUV420))
    {
        LOG(VB_GENERAL, LOG_ERR, LOC + "Failed to confirm YUV420 chroma");
        return false;
    }

    va_status = vaCreateConfig(m_ctx.display, m_vaProfile, m_vaEntrypoint,
                               &attrib, 1, &m_ctx.config_id);
    CHECK_ST;
    if (!ok)
    {
        LOG(VB_GENERAL, LOG_ERR, LOC + "Failed to create decoder config.");
        return false;
    }

    va_status = vaCreateContext(m_ctx.display, m_ctx.config_id,
                                m_size.width(), m_size.height(), VA_PROGRESSIVE,
                                m_surfaces, m_numSurfaces,
                                &m_ctx.context_id);
    CHECK_ST;
    if (!ok)
    {
        LOG(VB_GENERAL, LOG_ERR, LOC + "Failed to create decoder context.");
        return false;
    }
    return true;
}
示例#9
0
VaApiPostProcessor::VaApiPostProcessor() : d(new Data) {
	VAConfigID config;
	d->dpy = VaApi::glx();
	if (!isSuccess(vaCreateConfig(d->dpy, VAProfileNone, VAEntrypointVideoProc, nullptr, 0, &config)))
		return;
	d->config = config;
	VAContextID context;
	if (!isSuccess(vaCreateContext(d->dpy, d->config, 0, 0, 0, NULL, 0, &context)))
		return;
	d->context = context;
	const auto filters = VaApi::filters();
	for (int i=0; i<filters.size(); ++i) {
		switch (filters[i].type()) {
		case VAProcFilterDeinterlacing:
			_New(d->deinterlacer, i, d->context);
			break;
		default:
			continue;
		}
	}
	if (!filters.isEmpty())
		_New(d->pipeline, d->context);
}
示例#10
0
文件: vaapi.c 项目: coog009/myproject
int vaapi_init_decoder(VAProfile    profile,
                       VAEntrypoint entrypoint,
                       unsigned int picture_width,
                       unsigned int picture_height)
{
    VAAPIContext * const vaapi = vaapi_get_context();
    VAConfigAttrib attrib;
    VAConfigID config_id = 0;
    VAContextID context_id = 0;
    VASurfaceID surface_id = 0;
    VAStatus status;

    if (!vaapi)
        return -1;
#if 0
    if (common_init_decoder(picture_width, picture_height) < 0)
        return -1;
#endif

    if (!has_profile(vaapi, profile))
        return -1;
    if (!has_entrypoint(vaapi, profile, entrypoint))
        return -1;

    if (vaapi->profile != profile || vaapi->entrypoint != entrypoint) {
        if (vaapi->config_id)
            vaDestroyConfig(vaapi->display, vaapi->config_id);

        attrib.type = VAConfigAttribRTFormat;
        status = vaGetConfigAttributes(vaapi->display, profile, entrypoint,
                                       &attrib, 1);
        if (!vaapi_check_status(status, "vaGetConfigAttributes()"))
            return -1;
        if ((attrib.value & VA_RT_FORMAT_YUV420) == 0)
            return -1;

        status = vaCreateConfig(vaapi->display, profile, entrypoint,
                                &attrib, 1, &config_id);
        if (!vaapi_check_status(status, "vaCreateConfig()"))
            return -1;
    }
    else
        config_id = vaapi->config_id;

    if (vaapi->picture_width != picture_width || vaapi->picture_height != picture_height) {
        if (vaapi->surface_id)
            vaDestroySurfaces(vaapi->display, &vaapi->surface_id, 1);

        status = vaCreateSurfaces(vaapi->display, picture_width, picture_height,
                                  VA_RT_FORMAT_YUV420, 1, &surface_id);
        if (!vaapi_check_status(status, "vaCreateSurfaces()"))
            return -1;

        if (vaapi->context_id)
            vaDestroyContext(vaapi->display, vaapi->context_id);

        status = vaCreateContext(vaapi->display, config_id,
                                 picture_width, picture_height,
                                 VA_PROGRESSIVE,
                                 &surface_id, 1,
                                 &context_id);
        if (!vaapi_check_status(status, "vaCreateContext()"))
            return -1;
    }
    else {
        context_id = vaapi->context_id;
        surface_id = vaapi->surface_id;
    }

    vaapi->config_id      = config_id;
    vaapi->context_id     = context_id;
    vaapi->surface_id     = surface_id;
    vaapi->profile        = profile;
    vaapi->entrypoint     = entrypoint;
    vaapi->picture_width  = picture_width;
    vaapi->picture_height = picture_height;
    return 0;
}
示例#11
0
av_cold int ff_vaapi_encode_init(AVCodecContext *avctx,
                                 const VAAPIEncodeType *type)
{
    VAAPIEncodeContext *ctx = avctx->priv_data;
    AVVAAPIFramesContext *recon_hwctx = NULL;
    AVVAAPIHWConfig *hwconfig = NULL;
    AVHWFramesConstraints *constraints = NULL;
    enum AVPixelFormat recon_format;
    VAStatus vas;
    int err, i;

    if (!avctx->hw_frames_ctx) {
        av_log(avctx, AV_LOG_ERROR, "A hardware frames reference is "
               "required to associate the encoding device.\n");
        return AVERROR(EINVAL);
    }

    ctx->codec = type;
    ctx->codec_options = ctx->codec_options_data;

    ctx->priv_data = av_mallocz(type->priv_data_size);
    if (!ctx->priv_data) {
        err = AVERROR(ENOMEM);
        goto fail;
    }

    ctx->input_frames_ref = av_buffer_ref(avctx->hw_frames_ctx);
    if (!ctx->input_frames_ref) {
        err = AVERROR(ENOMEM);
        goto fail;
    }
    ctx->input_frames = (AVHWFramesContext*)ctx->input_frames_ref->data;

    ctx->device_ref = av_buffer_ref(ctx->input_frames->device_ref);
    if (!ctx->device_ref) {
        err = AVERROR(ENOMEM);
        goto fail;
    }
    ctx->device = (AVHWDeviceContext*)ctx->device_ref->data;
    ctx->hwctx = ctx->device->hwctx;

    err = ctx->codec->init(avctx);
    if (err < 0)
        goto fail;

    vas = vaCreateConfig(ctx->hwctx->display,
                         ctx->va_profile, ctx->va_entrypoint,
                         ctx->config_attributes, ctx->nb_config_attributes,
                         &ctx->va_config);
    if (vas != VA_STATUS_SUCCESS) {
        av_log(avctx, AV_LOG_ERROR, "Failed to create encode pipeline "
               "configuration: %d (%s).\n", vas, vaErrorStr(vas));
        err = AVERROR(EIO);
        goto fail;
    }

    hwconfig = av_hwdevice_hwconfig_alloc(ctx->device_ref);
    if (!hwconfig) {
        err = AVERROR(ENOMEM);
        goto fail;
    }
    hwconfig->config_id = ctx->va_config;

    constraints = av_hwdevice_get_hwframe_constraints(ctx->device_ref,
                                                      hwconfig);
    if (!constraints) {
        err = AVERROR(ENOMEM);
        goto fail;
    }

    // Probably we can use the input surface format as the surface format
    // of the reconstructed frames.  If not, we just pick the first (only?)
    // format in the valid list and hope that it all works.
    recon_format = AV_PIX_FMT_NONE;
    if (constraints->valid_sw_formats) {
        for (i = 0; constraints->valid_sw_formats[i] != AV_PIX_FMT_NONE; i++) {
            if (ctx->input_frames->sw_format ==
                constraints->valid_sw_formats[i]) {
                recon_format = ctx->input_frames->sw_format;
                break;
            }
        }
        if (recon_format == AV_PIX_FMT_NONE)
            recon_format = constraints->valid_sw_formats[i];
    } else {
        // No idea what to use; copy input format.
        recon_format = ctx->input_frames->sw_format;
    }
    av_log(avctx, AV_LOG_DEBUG, "Using %s as format of "
           "reconstructed frames.\n", av_get_pix_fmt_name(recon_format));

    if (ctx->aligned_width  < constraints->min_width  ||
        ctx->aligned_height < constraints->min_height ||
        ctx->aligned_width  > constraints->max_width ||
        ctx->aligned_height > constraints->max_height) {
        av_log(avctx, AV_LOG_ERROR, "Hardware does not support encoding at "
               "size %dx%d (constraints: width %d-%d height %d-%d).\n",
               ctx->aligned_width, ctx->aligned_height,
               constraints->min_width,  constraints->max_width,
               constraints->min_height, constraints->max_height);
        err = AVERROR(EINVAL);
        goto fail;
    }

    av_freep(&hwconfig);
    av_hwframe_constraints_free(&constraints);

    ctx->recon_frames_ref = av_hwframe_ctx_alloc(ctx->device_ref);
    if (!ctx->recon_frames_ref) {
        err = AVERROR(ENOMEM);
        goto fail;
    }
    ctx->recon_frames = (AVHWFramesContext*)ctx->recon_frames_ref->data;

    ctx->recon_frames->format    = AV_PIX_FMT_VAAPI;
    ctx->recon_frames->sw_format = recon_format;
    ctx->recon_frames->width     = ctx->aligned_width;
    ctx->recon_frames->height    = ctx->aligned_height;
    ctx->recon_frames->initial_pool_size = ctx->nb_recon_frames;

    err = av_hwframe_ctx_init(ctx->recon_frames_ref);
    if (err < 0) {
        av_log(avctx, AV_LOG_ERROR, "Failed to initialise reconstructed "
               "frame context: %d.\n", err);
        goto fail;
    }
    recon_hwctx = ctx->recon_frames->hwctx;

    vas = vaCreateContext(ctx->hwctx->display, ctx->va_config,
                          ctx->aligned_width, ctx->aligned_height,
                          VA_PROGRESSIVE,
                          recon_hwctx->surface_ids,
                          recon_hwctx->nb_surfaces,
                          &ctx->va_context);
    if (vas != VA_STATUS_SUCCESS) {
        av_log(avctx, AV_LOG_ERROR, "Failed to create encode pipeline "
               "context: %d (%s).\n", vas, vaErrorStr(vas));
        err = AVERROR(EIO);
        goto fail;
    }

    ctx->input_order  = 0;
    ctx->output_delay = avctx->max_b_frames;
    ctx->decode_delay = 1;
    ctx->output_order = - ctx->output_delay - 1;

    if (ctx->codec->sequence_params_size > 0) {
        ctx->codec_sequence_params =
            av_mallocz(ctx->codec->sequence_params_size);
        if (!ctx->codec_sequence_params) {
            err = AVERROR(ENOMEM);
            goto fail;
        }
    }
    if (ctx->codec->picture_params_size > 0) {
        ctx->codec_picture_params =
            av_mallocz(ctx->codec->picture_params_size);
        if (!ctx->codec_picture_params) {
            err = AVERROR(ENOMEM);
            goto fail;
        }
    }

    if (ctx->codec->init_sequence_params) {
        err = ctx->codec->init_sequence_params(avctx);
        if (err < 0) {
            av_log(avctx, AV_LOG_ERROR, "Codec sequence initialisation "
                   "failed: %d.\n", err);
            goto fail;
        }
    }

    // All I are IDR for now.
    ctx->i_per_idr = 0;
    ctx->p_per_i = ((avctx->gop_size + avctx->max_b_frames) /
                    (avctx->max_b_frames + 1));
    ctx->b_per_p = avctx->max_b_frames;

    // This should be configurable somehow.  (Needs testing on a machine
    // where it actually overlaps properly, though.)
    ctx->issue_mode = ISSUE_MODE_MAXIMISE_THROUGHPUT;

    return 0;

fail:
    av_freep(&hwconfig);
    av_hwframe_constraints_free(&constraints);
    ff_vaapi_encode_close(avctx);
    return err;
}
示例#12
0
static int Create( vlc_va_t *va, AVCodecContext *ctx, enum PixelFormat pix_fmt,
                   const es_format_t *fmt, picture_sys_t *p_sys )
{
    if( pix_fmt != AV_PIX_FMT_VAAPI_VLD )
        return VLC_EGENERIC;

    (void) fmt;
    (void) p_sys;
#ifdef VLC_VA_BACKEND_XLIB
    if( !vlc_xlib_init( VLC_OBJECT(va) ) )
    {
        msg_Warn( va, "Ignoring VA-X11 API" );
        return VLC_EGENERIC;
    }
#endif

    VAProfile i_profile, *p_profiles_list;
    bool b_supported_profile = false;
    int i_profiles_nb = 0;
    unsigned count = 3;

    /* */
    switch( ctx->codec_id )
    {
    case AV_CODEC_ID_MPEG1VIDEO:
    case AV_CODEC_ID_MPEG2VIDEO:
        i_profile = VAProfileMPEG2Main;
        count = 4;
        break;
    case AV_CODEC_ID_MPEG4:
        i_profile = VAProfileMPEG4AdvancedSimple;
        break;
    case AV_CODEC_ID_WMV3:
        i_profile = VAProfileVC1Main;
        break;
    case AV_CODEC_ID_VC1:
        i_profile = VAProfileVC1Advanced;
        break;
    case AV_CODEC_ID_H264:
        i_profile = VAProfileH264High;
        count = 18;
        break;;
    default:
        return VLC_EGENERIC;
    }
    count += ctx->thread_count;

    vlc_va_sys_t *sys;
    void *mem;

    assert(popcount(sizeof (sys->surfaces)) == 1);
    if (unlikely(posix_memalign(&mem, sizeof (sys->surfaces), sizeof (*sys))))
       return VLC_ENOMEM;

    sys = mem;
    memset(sys, 0, sizeof (*sys));

    /* */
    sys->hw_ctx.display = NULL;
    sys->hw_ctx.config_id = VA_INVALID_ID;
    sys->hw_ctx.context_id = VA_INVALID_ID;
    sys->width = ctx->coded_width;
    sys->height = ctx->coded_height;
    sys->count = count;
    sys->available = (1 << sys->count) - 1;
    assert(count < sizeof (sys->available) * CHAR_BIT);
    assert(count * sizeof (sys->surfaces[0]) <= sizeof (sys->surfaces));

    /* Create a VA display */
#ifdef VLC_VA_BACKEND_XLIB
    sys->p_display_x11 = XOpenDisplay(NULL);
    if( !sys->p_display_x11 )
    {
        msg_Err( va, "Could not connect to X server" );
        goto error;
    }

    sys->hw_ctx.display = vaGetDisplay(sys->p_display_x11);
#endif
#ifdef VLC_VA_BACKEND_DRM
    sys->drm_fd = vlc_open("/dev/dri/card0", O_RDWR);
    if( sys->drm_fd == -1 )
    {
        msg_Err( va, "Could not access rendering device: %m" );
        goto error;
    }

    sys->hw_ctx.display = vaGetDisplayDRM(sys->drm_fd);
#endif
    if (sys->hw_ctx.display == NULL)
    {
        msg_Err( va, "Could not get a VAAPI device" );
        goto error;
    }

    int major, minor;
    if (vaInitialize(sys->hw_ctx.display, &major, &minor))
    {
        msg_Err( va, "Failed to initialize the VAAPI device" );
        goto error;
    }

    /* Check if the selected profile is supported */
    i_profiles_nb = vaMaxNumProfiles(sys->hw_ctx.display);
    p_profiles_list = calloc( i_profiles_nb, sizeof( VAProfile ) );
    if( !p_profiles_list )
        goto error;

    if (vaQueryConfigProfiles(sys->hw_ctx.display, p_profiles_list,
                              &i_profiles_nb) == VA_STATUS_SUCCESS)
    {
        for( int i = 0; i < i_profiles_nb; i++ )
        {
            if ( p_profiles_list[i] == i_profile )
            {
                b_supported_profile = true;
                break;
            }
        }
    }
    free( p_profiles_list );
    if ( !b_supported_profile )
    {
        msg_Dbg( va, "Codec and profile not supported by the hardware" );
        goto error;
    }

    /* Create a VA configuration */
    VAConfigAttrib attrib;
    memset( &attrib, 0, sizeof(attrib) );
    attrib.type = VAConfigAttribRTFormat;
    if (vaGetConfigAttributes(sys->hw_ctx.display, i_profile, VAEntrypointVLD,
                              &attrib, 1))
        goto error;

    /* Not sure what to do if not, I don't have a way to test */
    if( (attrib.value & VA_RT_FORMAT_YUV420) == 0 )
        goto error;
    if (vaCreateConfig(sys->hw_ctx.display, i_profile, VAEntrypointVLD,
                       &attrib, 1, &sys->hw_ctx.config_id))
    {
        sys->hw_ctx.config_id = VA_INVALID_ID;
        goto error;
    }

    /* Create surfaces */
    assert(ctx->coded_width > 0 && ctx->coded_height > 0);
    if (vaCreateSurfaces(sys->hw_ctx.display, VA_RT_FORMAT_YUV420,
                         ctx->coded_width, ctx->coded_height,
                         sys->surfaces, sys->count, NULL, 0))
    {
        goto error;
    }

    /* Create a context */
    if (vaCreateContext(sys->hw_ctx.display, sys->hw_ctx.config_id,
                        ctx->coded_width, ctx->coded_height, VA_PROGRESSIVE,
                        sys->surfaces, sys->count, &sys->hw_ctx.context_id))
    {
        sys->hw_ctx.context_id = VA_INVALID_ID;
        vaDestroySurfaces(sys->hw_ctx.display, sys->surfaces, sys->count);
        goto error;
    }

    if (FindFormat(sys))
        goto error;

    if (unlikely(CopyInitCache(&sys->image_cache, ctx->coded_width)))
        goto error;

    vlc_mutex_init(&sys->lock);

    msg_Dbg(va, "using %s image format 0x%08x",
            sys->do_derive ? "derive" : "get", sys->format.fourcc);

    ctx->hwaccel_context = &sys->hw_ctx;
    va->sys = sys;
    va->description = vaQueryVendorString(sys->hw_ctx.display);
    va->get = Get;
    va->release = Release;
    va->extract = Extract;
    return VLC_SUCCESS;

error:
    if (sys->hw_ctx.context_id != VA_INVALID_ID)
    {
        vaDestroyContext(sys->hw_ctx.display, sys->hw_ctx.context_id);
        vaDestroySurfaces(sys->hw_ctx.display, sys->surfaces, sys->count);
    }
    if (sys->hw_ctx.config_id != VA_INVALID_ID)
        vaDestroyConfig(sys->hw_ctx.display, sys->hw_ctx.config_id);
    if (sys->hw_ctx.display != NULL)
        vaTerminate(sys->hw_ctx.display);
#ifdef VLC_VA_BACKEND_XLIB
    if( sys->p_display_x11 != NULL )
        XCloseDisplay( sys->p_display_x11 );
#endif
#ifdef VLC_VA_BACKEND_DRM
    if( sys->drm_fd != -1 )
        close( sys->drm_fd );
#endif
    free( sys );
    return VLC_EGENERIC;
}
示例#13
0
static int deint_vaapi_config_output(AVFilterLink *outlink)
{
    AVFilterContext    *avctx = outlink->src;
    AVFilterLink      *inlink = avctx->inputs[0];
    DeintVAAPIContext    *ctx = avctx->priv;
    AVVAAPIHWConfig *hwconfig = NULL;
    AVHWFramesConstraints *constraints = NULL;
    AVVAAPIFramesContext *va_frames;
    VAStatus vas;
    int err;

    deint_vaapi_pipeline_uninit(avctx);

    av_assert0(ctx->input_frames);
    ctx->device_ref = av_buffer_ref(ctx->input_frames->device_ref);
    ctx->hwctx = ((AVHWDeviceContext*)ctx->device_ref->data)->hwctx;

    ctx->output_width  = ctx->input_frames->width;
    ctx->output_height = ctx->input_frames->height;

    av_assert0(ctx->va_config == VA_INVALID_ID);
    vas = vaCreateConfig(ctx->hwctx->display, VAProfileNone,
                         VAEntrypointVideoProc, 0, 0, &ctx->va_config);
    if (vas != VA_STATUS_SUCCESS) {
        av_log(avctx, AV_LOG_ERROR, "Failed to create processing pipeline "
               "config: %d (%s).\n", vas, vaErrorStr(vas));
        err = AVERROR(EIO);
        goto fail;
    }

    hwconfig = av_hwdevice_hwconfig_alloc(ctx->device_ref);
    if (!hwconfig) {
        err = AVERROR(ENOMEM);
        goto fail;
    }
    hwconfig->config_id = ctx->va_config;

    constraints = av_hwdevice_get_hwframe_constraints(ctx->device_ref,
                                                      hwconfig);
    if (!constraints) {
        err = AVERROR(ENOMEM);
        goto fail;
    }

    if (ctx->output_width  < constraints->min_width  ||
        ctx->output_height < constraints->min_height ||
        ctx->output_width  > constraints->max_width  ||
        ctx->output_height > constraints->max_height) {
        av_log(avctx, AV_LOG_ERROR, "Hardware does not support "
               "deinterlacing to size %dx%d "
               "(constraints: width %d-%d height %d-%d).\n",
               ctx->output_width, ctx->output_height,
               constraints->min_width,  constraints->max_width,
               constraints->min_height, constraints->max_height);
        err = AVERROR(EINVAL);
        goto fail;
    }

    ctx->output_frames_ref = av_hwframe_ctx_alloc(ctx->device_ref);
    if (!ctx->output_frames_ref) {
        av_log(avctx, AV_LOG_ERROR, "Failed to create HW frame context "
               "for output.\n");
        err = AVERROR(ENOMEM);
        goto fail;
    }

    ctx->output_frames = (AVHWFramesContext*)ctx->output_frames_ref->data;

    ctx->output_frames->format    = AV_PIX_FMT_VAAPI;
    ctx->output_frames->sw_format = ctx->input_frames->sw_format;
    ctx->output_frames->width     = ctx->output_width;
    ctx->output_frames->height    = ctx->output_height;

    // The number of output frames we need is determined by what follows
    // the filter.  If it's an encoder with complex frame reference
    // structures then this could be very high.
    ctx->output_frames->initial_pool_size = 10;

    err = av_hwframe_ctx_init(ctx->output_frames_ref);
    if (err < 0) {
        av_log(avctx, AV_LOG_ERROR, "Failed to initialise VAAPI frame "
               "context for output: %d\n", err);
        goto fail;
    }

    va_frames = ctx->output_frames->hwctx;

    av_assert0(ctx->va_context == VA_INVALID_ID);
    vas = vaCreateContext(ctx->hwctx->display, ctx->va_config,
                          ctx->output_width, ctx->output_height, 0,
                          va_frames->surface_ids, va_frames->nb_surfaces,
                          &ctx->va_context);
    if (vas != VA_STATUS_SUCCESS) {
        av_log(avctx, AV_LOG_ERROR, "Failed to create processing pipeline "
               "context: %d (%s).\n", vas, vaErrorStr(vas));
        err = AVERROR(EIO);
        goto fail;
    }

    err = deint_vaapi_build_filter_params(avctx);
    if (err < 0)
        goto fail;

    outlink->w = inlink->w;
    outlink->h = inlink->h;

    outlink->time_base  = av_mul_q(inlink->time_base,
                                   (AVRational) { 1, ctx->field_rate });
    outlink->frame_rate = av_mul_q(inlink->frame_rate,
                                   (AVRational) { ctx->field_rate, 1 });

    outlink->hw_frames_ctx = av_buffer_ref(ctx->output_frames_ref);
    if (!outlink->hw_frames_ctx) {
        err = AVERROR(ENOMEM);
        goto fail;
    }

    av_freep(&hwconfig);
    av_hwframe_constraints_free(&constraints);
    return 0;

fail:
    av_buffer_unref(&ctx->output_frames_ref);
    av_freep(&hwconfig);
    av_hwframe_constraints_free(&constraints);
    return err;
}
示例#14
0
void VAApiWriter::init_vpp()
{
#ifdef HAVE_VPP
	use_vpp = true;
	if
	(
		vaCreateConfig( VADisp, ( VAProfile )-1, VAEntrypointVideoProc, NULL, 0, &config_vpp ) == VA_STATUS_SUCCESS &&
		vaCreateContext( VADisp, config_vpp, 0, 0, 0, NULL, 0, &context_vpp ) == VA_STATUS_SUCCESS &&
		vaCreateSurfaces( &id_vpp, 1 )
	)
	{
		unsigned num_filters = VAProcFilterCount;
		VAProcFilterType filters[ VAProcFilterCount ];
		if ( vaQueryVideoProcFilters( VADisp, context_vpp, filters, &num_filters ) != VA_STATUS_SUCCESS )
			num_filters = 0;
		if ( num_filters )
		{
			/* Creating dummy filter (some drivers/api versions crashes without any filter) */
			VAProcFilterParameterBufferBase none_params = { VAProcFilterNone };
			if ( vaCreateBuffer( VADisp, context_vpp, VAProcFilterParameterBufferType, sizeof none_params, 1, &none_params, &vpp_buffers[ VAProcFilterNone ] ) != VA_STATUS_SUCCESS )
				vpp_buffers[ VAProcFilterNone ] = VA_INVALID_ID;
			/* Searching deinterlacing filter */
			if ( vpp_deint_type != VAProcDeinterlacingNone )
				for ( unsigned i = 0 ; i < num_filters ; ++i )
					if ( filters[ i ] == VAProcFilterDeinterlacing )
					{
						VAProcFilterCapDeinterlacing deinterlacing_caps[ VAProcDeinterlacingCount ];
						unsigned num_deinterlacing_caps = VAProcDeinterlacingCount;
						if ( vaQueryVideoProcFilterCaps( VADisp, context_vpp, VAProcFilterDeinterlacing, &deinterlacing_caps, &num_deinterlacing_caps ) != VA_STATUS_SUCCESS )
							num_deinterlacing_caps = 0;
						bool vpp_deint_types[ 2 ] = { false };
						for ( unsigned j = 0 ; j < num_deinterlacing_caps ; ++j )
						{
							switch ( deinterlacing_caps[ j ].type )
							{
								case VAProcDeinterlacingMotionAdaptive:
									vpp_deint_types[ 0 ] = true;
									break;
								case VAProcDeinterlacingMotionCompensated:
									vpp_deint_types[ 1 ] = true;
									break;
								default:
									break;
							}
						}
						if ( vpp_deint_type == VAProcDeinterlacingMotionCompensated && !vpp_deint_types[ 1 ] )
						{
							QMPlay2Core.log( tr( "Nie obsługiwany algorytm usuwania przeplotu" ) + " - Motion compensated", ErrorLog | LogOnce );
							vpp_deint_type = VAProcDeinterlacingMotionAdaptive;
						}
						if ( vpp_deint_type == VAProcDeinterlacingMotionAdaptive && !vpp_deint_types[ 0 ] )
						{
							QMPlay2Core.log( tr( "Nie obsługiwany algorytm usuwania przeplotu" ) + " - Motion adaptive", ErrorLog | LogOnce );
							vpp_deint_type = VAProcDeinterlacingNone;
						}
						if ( vpp_deint_type != VAProcDeinterlacingNone )
						{
							VAProcFilterParameterBufferDeinterlacing deint_params = { VAProcFilterDeinterlacing, vpp_deint_type, 0 };
							if ( vaCreateBuffer( VADisp, context_vpp, VAProcFilterParameterBufferType, sizeof deint_params, 1, &deint_params, &vpp_buffers[ VAProcFilterDeinterlacing ] ) != VA_STATUS_SUCCESS )
								vpp_buffers[ VAProcFilterDeinterlacing ] = VA_INVALID_ID;
						}
						break;
					}
			return;
		}
	}
	if ( vpp_deint_type != VAProcDeinterlacingNone ) //Show error only when filter is required
		QMPlay2Core.log( "VA-API :: " + tr( "Nie można otworzyć filtrów obrazu" ), ErrorLog | LogOnce );
	clr_vpp();
#endif
}
示例#15
0
static int Open( vlc_va_vaapi_t *p_va, int i_codec_id )
{
    VAProfile i_profile, *p_profiles_list;
    bool b_supported_profile = false;
    int i_profiles_nb = 0;
    int i_surface_count;

    /* */
    switch( i_codec_id )
    {
    case CODEC_ID_MPEG1VIDEO:
    case CODEC_ID_MPEG2VIDEO:
        i_profile = VAProfileMPEG2Main;
        i_surface_count = 2+1;
        break;
    case CODEC_ID_MPEG4:
        i_profile = VAProfileMPEG4AdvancedSimple;
        i_surface_count = 2+1;
        break;
    case CODEC_ID_WMV3:
        i_profile = VAProfileVC1Main;
        i_surface_count = 2+1;
        break;
    case CODEC_ID_VC1:
        i_profile = VAProfileVC1Advanced;
        i_surface_count = 2+1;
        break;
    case CODEC_ID_H264:
        i_profile = VAProfileH264High;
        i_surface_count = 16+1;
        break;
    default:
        return VLC_EGENERIC;
    }

    /* */
    memset( p_va, 0, sizeof(*p_va) );
    p_va->i_config_id  = VA_INVALID_ID;
    p_va->i_context_id = VA_INVALID_ID;
    p_va->image.image_id = VA_INVALID_ID;

    /* Create a VA display */
    p_va->p_display_x11 = XOpenDisplay(NULL);
    if( !p_va->p_display_x11 )
        goto error;

    p_va->p_display = vaGetDisplay( p_va->p_display_x11 );
    if( !p_va->p_display )
        goto error;

    if( vaInitialize( p_va->p_display, &p_va->i_version_major, &p_va->i_version_minor ) )
        goto error;

    /* Check if the selected profile is supported */
    i_profiles_nb = vaMaxNumProfiles( p_va->p_display );
    p_profiles_list = calloc( i_profiles_nb, sizeof( VAProfile ) );
    if ( !p_profiles_list )
        goto error;

    VAStatus i_status = vaQueryConfigProfiles( p_va->p_display, p_profiles_list, &i_profiles_nb );
    if ( i_status == VA_STATUS_SUCCESS )
    {
        for( int i = 0; i < i_profiles_nb; i++ )
        {
            if ( p_profiles_list[i] == i_profile )
            {
                b_supported_profile = true;
                break;
            }
        }
    }
    free( p_profiles_list );
    if ( !b_supported_profile )
        goto error;

    /* Create a VA configuration */
    VAConfigAttrib attrib;
    memset( &attrib, 0, sizeof(attrib) );
    attrib.type = VAConfigAttribRTFormat;
    if( vaGetConfigAttributes( p_va->p_display,
                               i_profile, VAEntrypointVLD, &attrib, 1 ) )
        goto error;

    /* Not sure what to do if not, I don't have a way to test */
    if( (attrib.value & VA_RT_FORMAT_YUV420) == 0 )
        goto error;
    if( vaCreateConfig( p_va->p_display,
                        i_profile, VAEntrypointVLD, &attrib, 1, &p_va->i_config_id ) )
    {
        p_va->i_config_id = VA_INVALID_ID;
        goto error;
    }

    p_va->i_surface_count = i_surface_count;

    p_va->b_supports_derive = false;

    if( asprintf( &p_va->va.description, "VA API version %d.%d",
                  p_va->i_version_major, p_va->i_version_minor ) < 0 )
        p_va->va.description = NULL;

    return VLC_SUCCESS;

error:
    return VLC_EGENERIC;
}
示例#16
0
static void h264_init_decoder(int width, int height)
{
    VAStatus va_status;

    if (va_context_id) {
        rfbClientLog("%s: va_dpy already initialized\n", __FUNCTION__);
    }

    if (va_dpy != NULL) {
        rfbClientLog("%s: Re-initializing H.264 decoder\n", __FUNCTION__);
    }
    else {
        rfbClientLog("%s: initializing H.264 decoder\n", __FUNCTION__);

        /* Attach VA display to local X display */
        Display *win_display = (Display *)XOpenDisplay(":0.0");
        if (win_display == NULL) {
            rfbClientErr("Can't connect to local display\n");
            exit(-1);
        }

        int major_ver, minor_ver;
        va_dpy = vaGetDisplay(win_display);
        va_status = vaInitialize(va_dpy, &major_ver, &minor_ver);
        CHECK_VASTATUS(va_status, "vaInitialize");
        rfbClientLog("%s: libva version %d.%d found\n", __FUNCTION__, major_ver, minor_ver);
    }

    /* Check for VLD entrypoint */
    int num_entrypoints;
    VAEntrypoint    entrypoints[5];
    int vld_entrypoint_found = 0;

    /* Change VAProfileH264High if needed */
    VAProfile profile = VAProfileH264High;
    va_status = vaQueryConfigEntrypoints(va_dpy, profile, entrypoints, &num_entrypoints);
    CHECK_VASTATUS(va_status, "vaQueryConfigEntrypoints");
    int i;
    for (i = 0; i < num_entrypoints; ++i) {
        if (entrypoints[i] == VAEntrypointVLD) {
            vld_entrypoint_found = 1;
            break;
        }
    }

    if (vld_entrypoint_found == 0) {
        rfbClientErr("VLD entrypoint not found\n");
        exit(1);
    }

    /* Create configuration for the decode pipeline */
    VAConfigAttrib attrib;
    attrib.type = VAConfigAttribRTFormat;
    va_status = vaCreateConfig(va_dpy, profile, VAEntrypointVLD, &attrib, 1, &va_config_id);
    CHECK_VASTATUS(va_status, "vaCreateConfig");

    /* Create VA surfaces */
    for (i = 0; i < SURFACE_NUM; ++i) {
        va_surface_id[i]       = VA_INVALID_ID;
        va_pic_param_buf_id[i] = VA_INVALID_ID;
        va_mat_param_buf_id[i] = VA_INVALID_ID;
        va_sp_param_buf_id[i]  = VA_INVALID_ID;
        va_d_param_buf_id[i]   = VA_INVALID_ID;
    }
    va_status = vaCreateSurfaces(va_dpy, width, height, VA_RT_FORMAT_YUV420, SURFACE_NUM, &va_surface_id[0]);
    CHECK_VASTATUS(va_status, "vaCreateSurfaces");
    for (i = 0; i < SURFACE_NUM; ++i) {
        DebugLog(("%s: va_surface_id[%d] = %p\n", __FUNCTION__, i, va_surface_id[i]));
    }

    /* Create VA context */
    va_status = vaCreateContext(va_dpy, va_config_id, width, height, 0/*VA_PROGRESSIVE*/,  &va_surface_id[0], SURFACE_NUM, &va_context_id);
    CHECK_VASTATUS(va_status, "vaCreateContext");
    DebugLog(("%s: VA context created (id: %d)\n", __FUNCTION__, va_context_id));


    /* Instantiate decode pipeline */
    va_status = vaBeginPicture(va_dpy, va_context_id, va_surface_id[0]);
    CHECK_VASTATUS(va_status, "vaBeginPicture");

    rfbClientLog("%s: H.264 decoder initialized\n", __FUNCTION__);
}
示例#17
0
/*
 * Set *va_config and the frames_ref fields from the current codec parameters
 * in avctx.
 */
static int vaapi_decode_make_config(AVCodecContext *avctx,
                                    AVBufferRef *device_ref,
                                    VAConfigID *va_config,
                                    AVBufferRef *frames_ref)
{
    AVVAAPIHWConfig       *hwconfig    = NULL;
    AVHWFramesConstraints *constraints = NULL;
    VAStatus vas;
    int err, i, j;
    const AVCodecDescriptor *codec_desc;
    VAProfile profile, va_profile, *profile_list = NULL;
    int profile_count, exact_match, alt_profile;
    const AVPixFmtDescriptor *sw_desc, *desc;

    AVHWDeviceContext    *device = (AVHWDeviceContext*)device_ref->data;
    AVVAAPIDeviceContext *hwctx = device->hwctx;

    codec_desc = avcodec_descriptor_get(avctx->codec_id);
    if (!codec_desc) {
        err = AVERROR(EINVAL);
        goto fail;
    }

    profile_count = vaMaxNumProfiles(hwctx->display);
    profile_list  = av_malloc_array(profile_count,
                                    sizeof(VAProfile));
    if (!profile_list) {
        err = AVERROR(ENOMEM);
        goto fail;
    }

    vas = vaQueryConfigProfiles(hwctx->display,
                                profile_list, &profile_count);
    if (vas != VA_STATUS_SUCCESS) {
        av_log(avctx, AV_LOG_ERROR, "Failed to query profiles: "
               "%d (%s).\n", vas, vaErrorStr(vas));
        err = AVERROR(ENOSYS);
        goto fail;
    }

    profile = VAProfileNone;
    exact_match = 0;

    for (i = 0; i < FF_ARRAY_ELEMS(vaapi_profile_map); i++) {
        int profile_match = 0;
        if (avctx->codec_id != vaapi_profile_map[i].codec_id)
            continue;
        if (avctx->profile == vaapi_profile_map[i].codec_profile)
            profile_match = 1;
        profile = vaapi_profile_map[i].va_profile;
        for (j = 0; j < profile_count; j++) {
            if (profile == profile_list[j]) {
                exact_match = profile_match;
                break;
            }
        }
        if (j < profile_count) {
            if (exact_match)
                break;
            alt_profile = vaapi_profile_map[i].codec_profile;
            va_profile = vaapi_profile_map[i].va_profile;
        }
    }
    av_freep(&profile_list);

    if (profile == VAProfileNone) {
        av_log(avctx, AV_LOG_ERROR, "No support for codec %s "
               "profile %d.\n", codec_desc->name, avctx->profile);
        err = AVERROR(ENOSYS);
        goto fail;
    }
    if (!exact_match) {
        if (avctx->hwaccel_flags &
            AV_HWACCEL_FLAG_ALLOW_PROFILE_MISMATCH) {
            av_log(avctx, AV_LOG_VERBOSE, "Codec %s profile %d not "
                   "supported for hardware decode.\n",
                   codec_desc->name, avctx->profile);
            av_log(avctx, AV_LOG_WARNING, "Using possibly-"
                   "incompatible profile %d instead.\n",
                   alt_profile);
            profile = va_profile;
        } else {
            av_log(avctx, AV_LOG_VERBOSE, "Codec %s profile %d not "
                   "supported for hardware decode.\n",
                   codec_desc->name, avctx->profile);
            err = AVERROR(EINVAL);
            goto fail;
        }
    }

    vas = vaCreateConfig(hwctx->display, profile,
                         VAEntrypointVLD, NULL, 0,
                         va_config);
    if (vas != VA_STATUS_SUCCESS) {
        av_log(avctx, AV_LOG_ERROR, "Failed to create decode "
               "configuration: %d (%s).\n", vas, vaErrorStr(vas));
        err = AVERROR(EIO);
        goto fail;
    }

    hwconfig = av_hwdevice_hwconfig_alloc(device_ref);
    if (!hwconfig) {
        err = AVERROR(ENOMEM);
        goto fail;
    }
    hwconfig->config_id = *va_config;

    constraints =
        av_hwdevice_get_hwframe_constraints(device_ref, hwconfig);
    if (!constraints) {
        err = AVERROR(ENOMEM);
        goto fail;
    }

    if (avctx->coded_width  < constraints->min_width  ||
        avctx->coded_height < constraints->min_height ||
        avctx->coded_width  > constraints->max_width  ||
        avctx->coded_height > constraints->max_height) {
        av_log(avctx, AV_LOG_ERROR, "Hardware does not support image "
               "size %dx%d (constraints: width %d-%d height %d-%d).\n",
               avctx->coded_width, avctx->coded_height,
               constraints->min_width,  constraints->max_width,
               constraints->min_height, constraints->max_height);
        err = AVERROR(EINVAL);
        goto fail;
    }
    if (!constraints->valid_sw_formats ||
        constraints->valid_sw_formats[0] == AV_PIX_FMT_NONE) {
        av_log(avctx, AV_LOG_ERROR, "Hardware does not offer any "
               "usable surface formats.\n");
        err = AVERROR(EINVAL);
        goto fail;
    }

    if (frames_ref) {
        AVHWFramesContext *frames = (AVHWFramesContext *)frames_ref->data;

        frames->format = AV_PIX_FMT_VAAPI;
        frames->width = avctx->coded_width;
        frames->height = avctx->coded_height;

        // Find the first format in the list which matches the expected
        // bit depth and subsampling.  If none are found (this can happen
        // when 10-bit streams are decoded to 8-bit surfaces, for example)
        // then just take the first format on the list.
        frames->sw_format = constraints->valid_sw_formats[0];
        sw_desc = av_pix_fmt_desc_get(avctx->sw_pix_fmt);
        for (i = 0; constraints->valid_sw_formats[i] != AV_PIX_FMT_NONE; i++) {
            desc = av_pix_fmt_desc_get(constraints->valid_sw_formats[i]);
            if (desc->nb_components != sw_desc->nb_components ||
                desc->log2_chroma_w != sw_desc->log2_chroma_w ||
                desc->log2_chroma_h != sw_desc->log2_chroma_h)
                continue;
            for (j = 0; j < desc->nb_components; j++) {
                if (desc->comp[j].depth != sw_desc->comp[j].depth)
                    break;
            }
            if (j < desc->nb_components)
                continue;
            frames->sw_format = constraints->valid_sw_formats[i];
            break;
        }

        frames->initial_pool_size = 1;
        // Add per-codec number of surfaces used for storing reference frames.
        switch (avctx->codec_id) {
        case AV_CODEC_ID_H264:
        case AV_CODEC_ID_HEVC:
            frames->initial_pool_size += 16;
            break;
        case AV_CODEC_ID_VP9:
            frames->initial_pool_size += 8;
            break;
        case AV_CODEC_ID_VP8:
            frames->initial_pool_size += 3;
            break;
        default:
            frames->initial_pool_size += 2;
        }
    }

    av_hwframe_constraints_free(&constraints);
    av_freep(&hwconfig);

    return 0;

fail:
    av_hwframe_constraints_free(&constraints);
    av_freep(&hwconfig);
    if (*va_config != VA_INVALID_ID) {
        vaDestroyConfig(hwctx->display, *va_config);
        *va_config = VA_INVALID_ID;
    }
    av_freep(&profile_list);
    return err;
}
示例#18
0
static int scale_vaapi_config_output(AVFilterLink *outlink)
{
    AVFilterContext *avctx = outlink->src;
    ScaleVAAPIContext *ctx = avctx->priv;
    AVVAAPIHWConfig *hwconfig = NULL;
    AVHWFramesConstraints *constraints = NULL;
    AVVAAPIFramesContext *va_frames;
    VAStatus vas;
    int err, i;

    scale_vaapi_pipeline_uninit(ctx);

    ctx->device_ref = av_buffer_ref(ctx->input_frames->device_ref);
    ctx->hwctx = ((AVHWDeviceContext*)ctx->device_ref->data)->hwctx;

    av_assert0(ctx->va_config == VA_INVALID_ID);
    vas = vaCreateConfig(ctx->hwctx->display, VAProfileNone,
                         VAEntrypointVideoProc, 0, 0, &ctx->va_config);
    if (vas != VA_STATUS_SUCCESS) {
        av_log(ctx, AV_LOG_ERROR, "Failed to create processing pipeline "
               "config: %d (%s).\n", vas, vaErrorStr(vas));
        err = AVERROR(EIO);
        goto fail;
    }

    hwconfig = av_hwdevice_hwconfig_alloc(ctx->device_ref);
    if (!hwconfig) {
        err = AVERROR(ENOMEM);
        goto fail;
    }
    hwconfig->config_id = ctx->va_config;

    constraints = av_hwdevice_get_hwframe_constraints(ctx->device_ref,
                                                      hwconfig);
    if (!constraints) {
        err = AVERROR(ENOMEM);
        goto fail;
    }

    if (ctx->output_format == AV_PIX_FMT_NONE)
        ctx->output_format = ctx->input_frames->sw_format;
    if (constraints->valid_sw_formats) {
        for (i = 0; constraints->valid_sw_formats[i] != AV_PIX_FMT_NONE; i++) {
            if (ctx->output_format == constraints->valid_sw_formats[i])
                break;
        }
        if (constraints->valid_sw_formats[i] == AV_PIX_FMT_NONE) {
            av_log(ctx, AV_LOG_ERROR, "Hardware does not support output "
                   "format %s.\n", av_get_pix_fmt_name(ctx->output_format));
            err = AVERROR(EINVAL);
            goto fail;
        }
    }

    if (ctx->output_width  < constraints->min_width  ||
        ctx->output_height < constraints->min_height ||
        ctx->output_width  > constraints->max_width  ||
        ctx->output_height > constraints->max_height) {
        av_log(ctx, AV_LOG_ERROR, "Hardware does not support scaling to "
               "size %dx%d (constraints: width %d-%d height %d-%d).\n",
               ctx->output_width, ctx->output_height,
               constraints->min_width,  constraints->max_width,
               constraints->min_height, constraints->max_height);
        err = AVERROR(EINVAL);
        goto fail;
    }

    ctx->output_frames_ref = av_hwframe_ctx_alloc(ctx->device_ref);
    if (!ctx->output_frames_ref) {
        av_log(ctx, AV_LOG_ERROR, "Failed to create HW frame context "
               "for output.\n");
        err = AVERROR(ENOMEM);
        goto fail;
    }

    ctx->output_frames = (AVHWFramesContext*)ctx->output_frames_ref->data;

    ctx->output_frames->format    = AV_PIX_FMT_VAAPI;
    ctx->output_frames->sw_format = ctx->output_format;
    ctx->output_frames->width     = ctx->output_width;
    ctx->output_frames->height    = ctx->output_height;

    // The number of output frames we need is determined by what follows
    // the filter.  If it's an encoder with complex frame reference
    // structures then this could be very high.
    ctx->output_frames->initial_pool_size = 10;

    err = av_hwframe_ctx_init(ctx->output_frames_ref);
    if (err < 0) {
        av_log(ctx, AV_LOG_ERROR, "Failed to initialise VAAPI frame "
               "context for output: %d\n", err);
        goto fail;
    }

    va_frames = ctx->output_frames->hwctx;

    av_assert0(ctx->va_context == VA_INVALID_ID);
    vas = vaCreateContext(ctx->hwctx->display, ctx->va_config,
                          ctx->output_width, ctx->output_height,
                          VA_PROGRESSIVE,
                          va_frames->surface_ids, va_frames->nb_surfaces,
                          &ctx->va_context);
    if (vas != VA_STATUS_SUCCESS) {
        av_log(ctx, AV_LOG_ERROR, "Failed to create processing pipeline "
               "context: %d (%s).\n", vas, vaErrorStr(vas));
        return AVERROR(EIO);
    }

    outlink->w = ctx->output_width;
    outlink->h = ctx->output_height;

    outlink->hw_frames_ctx = av_buffer_ref(ctx->output_frames_ref);
    if (!outlink->hw_frames_ctx) {
        err = AVERROR(ENOMEM);
        goto fail;
    }

    av_freep(&hwconfig);
    av_hwframe_constraints_free(&constraints);
    return 0;

fail:
    av_buffer_unref(&ctx->output_frames_ref);
    av_freep(&hwconfig);
    av_hwframe_constraints_free(&constraints);
    return err;
}
示例#19
0
void VAApiWriter::init_vpp()
{
#ifdef HAVE_VPP
	use_vpp = true;
	if
	(
		vaCreateConfig( VADisp, ( VAProfile )-1, VAEntrypointVideoProc, NULL, 0, &config_vpp ) == VA_STATUS_SUCCESS &&
		vaCreateContext( VADisp, config_vpp, 0, 0, 0, NULL, 0, &context_vpp ) == VA_STATUS_SUCCESS &&
		vaCreateSurfaces( &id_vpp, 1 )
	)
	{
		unsigned num_filters = VAProcFilterCount;
		VAProcFilterType filters[ VAProcFilterCount ];
		if ( vaQueryVideoProcFilters( VADisp, context_vpp, filters, &num_filters ) != VA_STATUS_SUCCESS )
			num_filters = 0;
		if ( num_filters )
		{
			if ( vpp_deint_type != VAProcDeinterlacingNone )
				for ( unsigned i = 0 ; i < num_filters ; ++i )
					if ( filters[ i ] == VAProcFilterDeinterlacing )
					{
						VAProcFilterCapDeinterlacing deinterlacing_caps[ VAProcDeinterlacingCount ];
						unsigned num_deinterlacing_caps = VAProcDeinterlacingCount;
						if ( vaQueryVideoProcFilterCaps( VADisp, context_vpp, VAProcFilterDeinterlacing, &deinterlacing_caps, &num_deinterlacing_caps ) != VA_STATUS_SUCCESS )
							num_deinterlacing_caps = 0;
						bool vpp_deint_types[ 2 ] = { false };
						for ( unsigned j = 0 ; j < num_deinterlacing_caps ; ++j )
						{
							switch ( deinterlacing_caps[ j ].type )
							{
								case VAProcDeinterlacingMotionAdaptive:
									vpp_deint_types[ 0 ] = true;
									break;
								case VAProcDeinterlacingMotionCompensated:
									vpp_deint_types[ 1 ] = true;
									break;
								default:
									break;
							}
						}
						if ( vpp_deint_type == VAProcDeinterlacingMotionCompensated && !vpp_deint_types[ 1 ] )
						{
							QMPlay2Core.log( tr( "Nie obsługiwany algorytm usuwania przeplotu" ) + " - Motion compensated", ErrorLog | LogOnce );
							vpp_deint_type = VAProcDeinterlacingMotionAdaptive;
						}
						if ( vpp_deint_type == VAProcDeinterlacingMotionAdaptive && !vpp_deint_types[ 0 ] )
						{
							QMPlay2Core.log( tr( "Nie obsługiwany algorytm usuwania przeplotu" ) + " - Motion adaptive", ErrorLog | LogOnce );
							vpp_deint_type = VAProcDeinterlacingNone;
						}
						if ( vpp_deint_type != VAProcDeinterlacingNone )
						{
							VAProcFilterParameterBufferDeinterlacing deint_params = { VAProcFilterDeinterlacing, vpp_deint_type, VPP_TFF };
							if ( vaCreateBuffer( VADisp, context_vpp, VAProcFilterParameterBufferType, sizeof deint_params, 1, &deint_params, &vpp_deint ) != VA_STATUS_SUCCESS )
								vpp_deint = VA_INVALID_ID;
						}
						break;
					}
			return;
		}
	}
	if ( vpp_deint_type != VAProcDeinterlacingNone )
		QMPlay2Core.log( tr( "Nie można otworzyć filtrów usuwających przeplot" ), ErrorLog | LogOnce );
	clr_vpp();
#endif
}
示例#20
0
static int Open( vlc_va_vaapi_t *p_va, int i_codec_id )
{
    VAProfile i_profile;
    int i_surface_count;

    /* */
    switch( i_codec_id )
    {
    case CODEC_ID_MPEG1VIDEO:
    case CODEC_ID_MPEG2VIDEO:
        i_profile = VAProfileMPEG2Main;
        i_surface_count = 2+1;
        break;
    case CODEC_ID_MPEG4:
        i_profile = VAProfileMPEG4AdvancedSimple;
        i_surface_count = 2+1;
        break;
    case CODEC_ID_WMV3:
        i_profile = VAProfileVC1Main;
        i_surface_count = 2+1;
        break;
    case CODEC_ID_VC1:
        i_profile = VAProfileVC1Advanced;
        i_surface_count = 2+1;
        break;
    case CODEC_ID_H264:
        i_profile = VAProfileH264High;
        i_surface_count = 16+1;
        break;
    default:
        return VLC_EGENERIC;
    }

    /* */
    memset( p_va, 0, sizeof(*p_va) );
    p_va->i_config_id  = VA_INVALID_ID;
    p_va->i_context_id = VA_INVALID_ID;
    p_va->image.image_id = VA_INVALID_ID;

    /* Create a VA display */
    p_va->p_display_x11 = XOpenDisplay(NULL);
    if( !p_va->p_display_x11 )
        goto error;

    p_va->p_display = vaGetDisplay( p_va->p_display_x11 );
    if( !p_va->p_display )
        goto error;

    if( vaInitialize( p_va->p_display, &p_va->i_version_major, &p_va->i_version_minor ) )
        goto error;

    /* Create a VA configuration */
    VAConfigAttrib attrib;
    memset( &attrib, 0, sizeof(attrib) );
    attrib.type = VAConfigAttribRTFormat;
    if( vaGetConfigAttributes( p_va->p_display,
                               i_profile, VAEntrypointVLD, &attrib, 1 ) )
        goto error;

    /* Not sure what to do if not, I don't have a way to test */
    if( (attrib.value & VA_RT_FORMAT_YUV420) == 0 )
        goto error;
    if( vaCreateConfig( p_va->p_display,
                        i_profile, VAEntrypointVLD, &attrib, 1, &p_va->i_config_id ) )
    {
        p_va->i_config_id = VA_INVALID_ID;
        goto error;
    }

    p_va->i_surface_count = i_surface_count;

    if( asprintf( &p_va->va.description, "VA API version %d.%d",
                  p_va->i_version_major, p_va->i_version_minor ) < 0 )
        p_va->va.description = NULL;

    return VLC_SUCCESS;

error:
    return VLC_EGENERIC;
}
示例#21
0
bool VAApiWriter::vaCreateConfigAndContext()
{
	return vaCreateConfig( VADisp, profile, VAEntrypointVLD, NULL, 0, &config ) == VA_STATUS_SUCCESS && vaCreateContext( VADisp, config, outW, outH, VA_PROGRESSIVE, surfaces, surfacesCount, &context ) == VA_STATUS_SUCCESS;
}
示例#22
0
VdpStatus
softVdpDecoderCreate(VdpDevice device, VdpDecoderProfile profile, uint32_t width, uint32_t height,
                     uint32_t max_references, VdpDecoder *decoder)
{
    VdpStatus err_code;
    if (!decoder)
        return VDP_STATUS_INVALID_POINTER;
    VdpDeviceData *deviceData = handle_acquire(device, HANDLETYPE_DEVICE);
    if (NULL == deviceData)
        return VDP_STATUS_INVALID_HANDLE;
    if (!deviceData->va_available) {
        err_code = VDP_STATUS_INVALID_DECODER_PROFILE;
        goto quit;
    }
    VADisplay va_dpy = deviceData->va_dpy;

    VdpDecoderData *data = calloc(1, sizeof(VdpDecoderData));
    if (NULL == data) {
        err_code = VDP_STATUS_RESOURCES;
        goto quit;
    }

    data->type = HANDLETYPE_DECODER;
    data->device = deviceData;
    data->profile = profile;
    data->width = width;
    data->height = height;
    data->max_references = max_references;

    // initialize free_list. Initially they all free
    data->free_list_head = -1;
    for (int k = 0; k < MAX_RENDER_TARGETS; k ++) {
        free_list_push(data->free_list, &data->free_list_head, k);
    }

    VAProfile va_profile;
    VAStatus status;
    int final_try = 0;
    VdpDecoderProfile next_profile = profile;

    // Try to create decoder for asked profile. On failure try to create more advanced one
    while (! final_try) {
        profile = next_profile;
        switch (profile) {
        case VDP_DECODER_PROFILE_H264_BASELINE:
            va_profile = VAProfileH264Baseline;
            data->num_render_targets = NUM_RENDER_TARGETS_H264;
            next_profile = VDP_DECODER_PROFILE_H264_MAIN;
            break;
        case VDP_DECODER_PROFILE_H264_MAIN:
            va_profile = VAProfileH264Main;
            data->num_render_targets = NUM_RENDER_TARGETS_H264;
            next_profile = VDP_DECODER_PROFILE_H264_HIGH;
            break;
        case VDP_DECODER_PROFILE_H264_HIGH:
            va_profile = VAProfileH264High;
            data->num_render_targets = NUM_RENDER_TARGETS_H264;
            // there is no more advanced profile, so it's final try
            final_try = 1;
            break;
        default:
            traceError("error (softVdpDecoderCreate): decoder %s not implemented\n",
                       reverse_decoder_profile(profile));
            err_code = VDP_STATUS_INVALID_DECODER_PROFILE;
            goto quit_free_data;
        }

        status = vaCreateConfig(va_dpy, va_profile, VAEntrypointVLD, NULL, 0, &data->config_id);
        if (VA_STATUS_SUCCESS == status)        // break loop if decoder created
            break;
    }

    if (VA_STATUS_SUCCESS != status) {
        err_code = VDP_STATUS_ERROR;
        goto quit_free_data;
    }

    // Create surfaces. All video surfaces created here, rather than in VdpVideoSurfaceCreate.
    // VAAPI requires surfaces to be bound with context on its creation time, while VDPAU allows
    // to do it later. So here is a trick: VDP video surfaces get their va_surf dynamically in
    // DecoderRender.

    // TODO: check format of surfaces created
#if VA_CHECK_VERSION(0, 34, 0)
    status = vaCreateSurfaces(va_dpy, VA_RT_FORMAT_YUV420, width, height,
        data->render_targets, data->num_render_targets, NULL, 0);
#else
    status = vaCreateSurfaces(va_dpy, width, height, VA_RT_FORMAT_YUV420,
        data->num_render_targets, data->render_targets);
#endif
    if (VA_STATUS_SUCCESS != status) {
        err_code = VDP_STATUS_ERROR;
        goto quit_free_data;
    }

    status = vaCreateContext(va_dpy, data->config_id, width, height, VA_PROGRESSIVE,
        data->render_targets, data->num_render_targets, &data->context_id);
    if (VA_STATUS_SUCCESS != status) {
        err_code = VDP_STATUS_ERROR;
        goto quit_free_data;
    }

    deviceData->refcount ++;
    *decoder = handle_insert(data);

    err_code = VDP_STATUS_OK;
    goto quit;

quit_free_data:
    free(data);
quit:
    handle_release(device);
    return err_code;
}
Decode_Status
    VaapiDecoderBase::setupVA(uint32_t numSurface, VAProfile profile)
{
    INFO("base: setup VA");
    uint32_t i;
    VASurfaceID *surfaces;
    VideoSurfaceBuffer *buf;
    VaapiSurface *suf;
    Decode_Status status;
    VAStatus vaStatus = VA_STATUS_SUCCESS;

    if (m_enableNativeBuffersFlag == true) {
        numSurface = 20;        //NATIVE_WINDOW_COUNT;
    }

    if (m_VAStarted) {
        return DECODE_SUCCESS;
    }

    if (m_VADisplay != NULL) {
        WARNING("VA is partially started.");
        return DECODE_FAIL;
    }
#ifdef ANDROID
    m_display = new Display;
    *m_display = ANDROID_DISPLAY_HANDLE;
#else
    if (!m_display) {
        m_display = XOpenDisplay(NULL);
        m_ownNativeDisplay = true;
    }
#if __PLATFORM_BYT__
    if (setenv("LIBVA_DRIVER_NAME", "wrapper", 1) == 0) {
        INFO("setting LIBVA_DRIVER_NAME to wrapper for chromeos");
    }
#endif
#endif

    m_VADisplay = vaGetDisplay(m_display);
    if (m_VADisplay == NULL) {
        ERROR("vaGetDisplay failed.");
        return DECODE_DRIVER_FAIL;
    }

    int majorVersion, minorVersion;
    vaStatus = vaInitialize(m_VADisplay, &majorVersion, &minorVersion);
    checkVaapiStatus(vaStatus, "vaInitialize");

    VAConfigAttrib attrib;
    attrib.type = VAConfigAttribRTFormat;
    attrib.value = VA_RT_FORMAT_YUV420;

    INFO("base:the profile = %d", profile);
    vaStatus = vaCreateConfig(m_VADisplay,
                              profile,
                              VAEntrypointVLD, &attrib, 1, &m_VAConfig);
    checkVaapiStatus(vaStatus, "vaCreateConfig");

    m_configBuffer.surfaceNumber = numSurface;
    m_bufPool = new VaapiSurfaceBufferPool(m_VADisplay, &m_configBuffer);
    surfaces = new VASurfaceID[numSurface];
    for (i = 0; i < numSurface; i++) {
        surfaces[i] = VA_INVALID_SURFACE;
        buf = m_bufPool->getBufferByIndex(i);
        suf = m_bufPool->getVaapiSurface(buf);
        if (suf)
            surfaces[i] = suf->getID();
    }

    vaStatus = vaCreateContext(m_VADisplay,
                               m_VAConfig,
                               m_videoFormatInfo.width,
                               m_videoFormatInfo.height,
                               0, surfaces, numSurface, &m_VAContext);
    checkVaapiStatus(vaStatus, "vaCreateContext");

    VADisplayAttribute rotate;
    rotate.type = VADisplayAttribRotation;
    rotate.value = VA_ROTATION_NONE;
    if (m_configBuffer.rotationDegrees == 0)
        rotate.value = VA_ROTATION_NONE;
    else if (m_configBuffer.rotationDegrees == 90)
        rotate.value = VA_ROTATION_90;
    else if (m_configBuffer.rotationDegrees == 180)
        rotate.value = VA_ROTATION_180;
    else if (m_configBuffer.rotationDegrees == 270)
        rotate.value = VA_ROTATION_270;

    vaStatus = vaSetDisplayAttributes(m_VADisplay, &rotate, 1);

    m_videoFormatInfo.surfaceNumber = numSurface;
    m_videoFormatInfo.ctxSurfaces = surfaces;
    if (!(m_configBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER)) {
        m_videoFormatInfo.surfaceWidth = m_videoFormatInfo.width;
        m_videoFormatInfo.surfaceHeight = m_videoFormatInfo.height;
    }

    m_VAStarted = true;
    return DECODE_SUCCESS;
}