/** Finds a suitable profile from FFmpeg context */
static GstVaapiProfile
get_profile(AVCodecContext *avctx, GstVaapiEntrypoint entrypoint)
{
    GstVaapiContextFfmpeg * const vactx = avctx->hwaccel_context;
    GstVaapiDisplay *display;
    GstVaapiProfile profiles[4];
    guint i, n_profiles = 0;

#define ADD_PROFILE(profile) do {                               \
        profiles[n_profiles++] = GST_VAAPI_PROFILE_##profile;   \
    } while (0)

    switch (avctx->codec_id) {
    case CODEC_ID_MPEG1VIDEO:
        ADD_PROFILE(MPEG1);
        break;
    case CODEC_ID_MPEG2VIDEO:
        ADD_PROFILE(MPEG2_MAIN);
        ADD_PROFILE(MPEG2_SIMPLE);
        break;
    case CODEC_ID_H263:
        ADD_PROFILE(H263_BASELINE);
        /* fall-through */
    case CODEC_ID_MPEG4:
        ADD_PROFILE(MPEG4_MAIN);
        ADD_PROFILE(MPEG4_ADVANCED_SIMPLE);
        ADD_PROFILE(MPEG4_SIMPLE);
        break;
    case CODEC_ID_H264:
        if (avctx->profile == 66) /* baseline */
            ADD_PROFILE(H264_BASELINE);
        else {
            if (avctx->profile == 77) /* main */
                ADD_PROFILE(H264_MAIN);
            ADD_PROFILE(H264_HIGH);
        }
        break;
    case CODEC_ID_WMV3:
        if (avctx->profile == 0) /* simple */
            ADD_PROFILE(VC1_SIMPLE);
        ADD_PROFILE(VC1_MAIN);
        break;
    case CODEC_ID_VC1:
        ADD_PROFILE(VC1_ADVANCED);
        break;
    default:
        break;
    }

#undef ADD_PROFILE

    display = GST_VAAPI_DECODER_DISPLAY(vactx->decoder);
    if (!display)
        return 0;

    for (i = 0; i < n_profiles; i++)
        if (gst_vaapi_display_has_decoder(display, profiles[i], entrypoint))
            return profiles[i];
    return 0;
}
static GstVaapiDecoderStatus
ensure_context(GstVaapiDecoderVC1 *decoder)
{
    GstVaapiDecoderVC1Private * const priv = decoder->priv;
    GstVaapiProfile profiles[2];
    GstVaapiEntrypoint entrypoint = GST_VAAPI_ENTRYPOINT_VLD;
    guint i, n_profiles = 0;
    gboolean reset_context = FALSE;

    if (priv->profile_changed) {
        GST_DEBUG("profile changed");
        priv->profile_changed = FALSE;
        reset_context         = TRUE;

        profiles[n_profiles++] = priv->profile;
        if (priv->profile == GST_VAAPI_PROFILE_VC1_SIMPLE)
            profiles[n_profiles++] = GST_VAAPI_PROFILE_VC1_MAIN;

        for (i = 0; i < n_profiles; i++) {
            if (gst_vaapi_display_has_decoder(GST_VAAPI_DECODER_DISPLAY(decoder),
                                              profiles[i], entrypoint))
                break;
        }
        if (i == n_profiles)
            return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
        priv->profile = profiles[i];
    }

    if (priv->size_changed) {
        GST_DEBUG("size changed");
        priv->size_changed = FALSE;
        reset_context      = TRUE;
    }

    if (reset_context) {
        GstVaapiContextInfo info;

        info.profile    = priv->profile;
        info.entrypoint = entrypoint;
        info.width      = priv->width;
        info.height     = priv->height;
        info.ref_frames = 2;
        reset_context   = gst_vaapi_decoder_ensure_context(
            GST_VAAPI_DECODER(decoder),
            &info
        );
        if (!reset_context)
            return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
    }
    return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
static GstVaapiDecoderStatus
ensure_context(GstVaapiDecoderJpeg *decoder)
{
    GstVaapiDecoderJpegPrivate * const priv = &decoder->priv;
    GstVaapiProfile profiles[2];
    GstVaapiEntrypoint entrypoint = GST_VAAPI_ENTRYPOINT_VLD;
    guint i, n_profiles = 0;
    gboolean reset_context = FALSE;

    if (priv->profile_changed) {
        GST_DEBUG("profile changed");
        priv->profile_changed = FALSE;
        reset_context         = TRUE;

        profiles[n_profiles++] = priv->profile;
        //if (priv->profile == GST_VAAPI_PROFILE_JPEG_EXTENDED)
        //    profiles[n_profiles++] = GST_VAAPI_PROFILE_JPEG_BASELINE;

        for (i = 0; i < n_profiles; i++) {
            if (gst_vaapi_display_has_decoder(GST_VAAPI_DECODER_DISPLAY(decoder),
                                              profiles[i], entrypoint))
                break;
        }
        if (i == n_profiles)
            return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
        priv->profile = profiles[i];
    }

    if (reset_context) {
        GstVaapiContextInfo info;

        info.profile    = priv->profile;
        info.entrypoint = entrypoint;
        info.chroma_type = GST_VAAPI_CHROMA_TYPE_YUV420;
        info.width      = priv->width;
        info.height     = priv->height;
        info.ref_frames = 2;
        reset_context   = gst_vaapi_decoder_ensure_context(
            GST_VAAPI_DECODER(decoder),
            &info
        );
        if (!reset_context)
            return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
    }
    return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
static GstVaapiDecoderStatus
ensure_context (GstVaapiDecoderVp8 * decoder)
{
  GstVaapiDecoderVp8Private *const priv = &decoder->priv;
  const GstVaapiProfile profile = GST_VAAPI_PROFILE_VP8;
  const GstVaapiEntrypoint entrypoint = GST_VAAPI_ENTRYPOINT_VLD;
  gboolean reset_context = FALSE;

  if (priv->profile != profile) {
    if (!gst_vaapi_display_has_decoder (GST_VAAPI_DECODER_DISPLAY (decoder),
            profile, entrypoint))
      return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;

    priv->profile = profile;
    reset_context = TRUE;
  }

  if (priv->size_changed) {
    GST_DEBUG ("size changed");
    priv->size_changed = FALSE;
    reset_context = TRUE;
  }

  if (reset_context) {
    GstVaapiContextInfo info;

    info.profile = priv->profile;
    info.entrypoint = entrypoint;
    info.chroma_type = GST_VAAPI_CHROMA_TYPE_YUV420;
    info.width = priv->width;
    info.height = priv->height;
    info.ref_frames = 3;
    reset_context =
        gst_vaapi_decoder_ensure_context (GST_VAAPI_DECODER (decoder), &info);

    if (!reset_context)
      return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
  }
  return GST_VAAPI_DECODER_STATUS_SUCCESS;
}