/** Finds a suitable profile from FFmpeg context */
static GstVaapiProfile
get_profile(AVCodecContext *avctx, GstVaapiEntrypoint entrypoint)
{
    GstVaapiContextFfmpeg * const vactx = avctx->hwaccel_context;
    GstVaapiDisplay *display;
    GstVaapiProfile profiles[4];
    guint i, n_profiles = 0;

#define ADD_PROFILE(profile) do {                               \
        profiles[n_profiles++] = GST_VAAPI_PROFILE_##profile;   \
    } while (0)

    switch (avctx->codec_id) {
    case CODEC_ID_MPEG1VIDEO:
        ADD_PROFILE(MPEG1);
        break;
    case CODEC_ID_MPEG2VIDEO:
        ADD_PROFILE(MPEG2_MAIN);
        ADD_PROFILE(MPEG2_SIMPLE);
        break;
    case CODEC_ID_H263:
        ADD_PROFILE(H263_BASELINE);
        /* fall-through */
    case CODEC_ID_MPEG4:
        ADD_PROFILE(MPEG4_MAIN);
        ADD_PROFILE(MPEG4_ADVANCED_SIMPLE);
        ADD_PROFILE(MPEG4_SIMPLE);
        break;
    case CODEC_ID_H264:
        if (avctx->profile == 66) /* baseline */
            ADD_PROFILE(H264_BASELINE);
        else {
            if (avctx->profile == 77) /* main */
                ADD_PROFILE(H264_MAIN);
            ADD_PROFILE(H264_HIGH);
        }
        break;
    case CODEC_ID_WMV3:
        if (avctx->profile == 0) /* simple */
            ADD_PROFILE(VC1_SIMPLE);
        ADD_PROFILE(VC1_MAIN);
        break;
    case CODEC_ID_VC1:
        ADD_PROFILE(VC1_ADVANCED);
        break;
    default:
        break;
    }

#undef ADD_PROFILE

    display = GST_VAAPI_DECODER_DISPLAY(vactx->decoder);
    if (!display)
        return 0;

    for (i = 0; i < n_profiles; i++)
        if (gst_vaapi_display_has_decoder(display, profiles[i], entrypoint))
            return profiles[i];
    return 0;
}
예제 #2
0
static GstVaapiDecoderStatus
ensure_context(GstVaapiDecoderVC1 *decoder)
{
    GstVaapiDecoderVC1Private * const priv = decoder->priv;
    GstVaapiProfile profiles[2];
    GstVaapiEntrypoint entrypoint = GST_VAAPI_ENTRYPOINT_VLD;
    guint i, n_profiles = 0;
    gboolean reset_context = FALSE;

    if (priv->profile_changed) {
        GST_DEBUG("profile changed");
        priv->profile_changed = FALSE;
        reset_context         = TRUE;

        profiles[n_profiles++] = priv->profile;
        if (priv->profile == GST_VAAPI_PROFILE_VC1_SIMPLE)
            profiles[n_profiles++] = GST_VAAPI_PROFILE_VC1_MAIN;

        for (i = 0; i < n_profiles; i++) {
            if (gst_vaapi_display_has_decoder(GST_VAAPI_DECODER_DISPLAY(decoder),
                                              profiles[i], entrypoint))
                break;
        }
        if (i == n_profiles)
            return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
        priv->profile = profiles[i];
    }

    if (priv->size_changed) {
        GST_DEBUG("size changed");
        priv->size_changed = FALSE;
        reset_context      = TRUE;
    }

    if (reset_context) {
        GstVaapiContextInfo info;

        info.profile    = priv->profile;
        info.entrypoint = entrypoint;
        info.width      = priv->width;
        info.height     = priv->height;
        info.ref_frames = 2;
        reset_context   = gst_vaapi_decoder_ensure_context(
            GST_VAAPI_DECODER(decoder),
            &info
        );
        if (!reset_context)
            return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
    }
    return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
static GstVaapiDecoderStatus
ensure_context(GstVaapiDecoderJpeg *decoder)
{
    GstVaapiDecoderJpegPrivate * const priv = &decoder->priv;
    GstVaapiProfile profiles[2];
    GstVaapiEntrypoint entrypoint = GST_VAAPI_ENTRYPOINT_VLD;
    guint i, n_profiles = 0;
    gboolean reset_context = FALSE;

    if (priv->profile_changed) {
        GST_DEBUG("profile changed");
        priv->profile_changed = FALSE;
        reset_context         = TRUE;

        profiles[n_profiles++] = priv->profile;
        //if (priv->profile == GST_VAAPI_PROFILE_JPEG_EXTENDED)
        //    profiles[n_profiles++] = GST_VAAPI_PROFILE_JPEG_BASELINE;

        for (i = 0; i < n_profiles; i++) {
            if (gst_vaapi_display_has_decoder(GST_VAAPI_DECODER_DISPLAY(decoder),
                                              profiles[i], entrypoint))
                break;
        }
        if (i == n_profiles)
            return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
        priv->profile = profiles[i];
    }

    if (reset_context) {
        GstVaapiContextInfo info;

        info.profile    = priv->profile;
        info.entrypoint = entrypoint;
        info.chroma_type = GST_VAAPI_CHROMA_TYPE_YUV420;
        info.width      = priv->width;
        info.height     = priv->height;
        info.ref_frames = 2;
        reset_context   = gst_vaapi_decoder_ensure_context(
            GST_VAAPI_DECODER(decoder),
            &info
        );
        if (!reset_context)
            return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
    }
    return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
/** Ensures VA context is correctly set up for the current FFmpeg context */
static GstVaapiContext *
get_context(AVCodecContext *avctx)
{
    GstVaapiContextFfmpeg * const vactx = avctx->hwaccel_context;
    GstVaapiDecoder * const decoder = GST_VAAPI_DECODER(vactx->decoder);
    GstVaapiDisplay *display;
    GstVaapiContext *context;
    gboolean success;

    if (!avctx->coded_width || !avctx->coded_height)
        return NULL;

    gst_vaapi_decoder_set_framerate(
        decoder,
        avctx->time_base.den / avctx->ticks_per_frame,
        avctx->time_base.num
    );

    gst_vaapi_decoder_set_pixel_aspect_ratio(
        decoder,
        avctx->sample_aspect_ratio.num,
        avctx->sample_aspect_ratio.den
    );

    success = gst_vaapi_decoder_ensure_context(
        decoder,
        vactx->profile,
        vactx->entrypoint,
        avctx->coded_width,
        avctx->coded_height
    );
    if (!success) {
        GST_DEBUG("failed to reset VA context:");
        GST_DEBUG("  profile 0x%08x", vactx->profile);
        GST_DEBUG("  entrypoint %d", vactx->entrypoint);
        GST_DEBUG("  surface size %dx%d", avctx->width, avctx->height);
        return NULL;
    }
    display                = GST_VAAPI_DECODER_DISPLAY(decoder);
    context                = GST_VAAPI_DECODER_CONTEXT(decoder);
    vactx->base.display    = GST_VAAPI_DISPLAY_VADISPLAY(display);
    vactx->base.context_id = GST_VAAPI_OBJECT_ID(context);
    return context;
}
예제 #5
0
static GstVaapiDecoderStatus
ensure_context (GstVaapiDecoderVp8 * decoder)
{
  GstVaapiDecoderVp8Private *const priv = &decoder->priv;
  const GstVaapiProfile profile = GST_VAAPI_PROFILE_VP8;
  const GstVaapiEntrypoint entrypoint = GST_VAAPI_ENTRYPOINT_VLD;
  gboolean reset_context = FALSE;

  if (priv->profile != profile) {
    if (!gst_vaapi_display_has_decoder (GST_VAAPI_DECODER_DISPLAY (decoder),
            profile, entrypoint))
      return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;

    priv->profile = profile;
    reset_context = TRUE;
  }

  if (priv->size_changed) {
    GST_DEBUG ("size changed");
    priv->size_changed = FALSE;
    reset_context = TRUE;
  }

  if (reset_context) {
    GstVaapiContextInfo info;

    info.profile = priv->profile;
    info.entrypoint = entrypoint;
    info.chroma_type = GST_VAAPI_CHROMA_TYPE_YUV420;
    info.width = priv->width;
    info.height = priv->height;
    info.ref_frames = 3;
    reset_context =
        gst_vaapi_decoder_ensure_context (GST_VAAPI_DECODER (decoder), &info);

    if (!reset_context)
      return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
  }
  return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
static GstVaapiDecoderStatus
decode_frame(GstVaapiDecoderFfmpeg *ffdecoder, guchar *buf, guint buf_size)
{
    GstVaapiDecoderFfmpegPrivate * const priv = ffdecoder->priv;
    GstVaapiDisplay * const display = GST_VAAPI_DECODER_DISPLAY(ffdecoder);
    GstVaapiSurface *surface;
    int bytes_read, got_picture = 0;
    AVPacket avpkt;

    GST_VAAPI_DISPLAY_LOCK(display);
    av_init_packet(&avpkt);
    avpkt.data = buf;
    avpkt.size = buf_size;
    bytes_read = avcodec_decode_video2(
        priv->avctx,
        priv->frame,
        &got_picture,
	&avpkt
    );
    GST_VAAPI_DISPLAY_UNLOCK(display);
    if (!got_picture)
        return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
    if (bytes_read < 0)
        return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;

    surface = gst_vaapi_context_find_surface_by_id(
        GST_VAAPI_DECODER_CONTEXT(ffdecoder),
        GPOINTER_TO_UINT(priv->frame->data[3])
    );
    if (!surface)
        return GST_VAAPI_DECODER_STATUS_ERROR_INVALID_SURFACE;

    if (!gst_vaapi_decoder_push_surface(GST_VAAPI_DECODER_CAST(ffdecoder),
                                        surface, priv->frame->pts))
        return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
    return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
static gboolean
gst_vaapi_decoder_ffmpeg_open(GstVaapiDecoderFfmpeg *ffdecoder, GstBuffer *buffer)
{
    GstVaapiDecoderFfmpegPrivate * const priv = ffdecoder->priv;
    GstVaapiDisplay * const display = GST_VAAPI_DECODER_DISPLAY(ffdecoder);
    GstBuffer * const codec_data = GST_VAAPI_DECODER_CODEC_DATA(ffdecoder);
    GstVaapiCodec codec = GST_VAAPI_DECODER_CODEC(ffdecoder);
    enum CodecID codec_id;
    AVCodec *ffcodec;
    gboolean try_parser, need_parser;
    int ret;

    gst_vaapi_decoder_ffmpeg_close(ffdecoder);

    if (codec_data) {
        const guchar *data = GST_BUFFER_DATA(codec_data);
        const guint   size = GST_BUFFER_SIZE(codec_data);
        if (!set_codec_data(priv->avctx, data, size))
            return FALSE;
    }

    codec_id = get_codec_id_from_codec(codec);
    if (codec_id == CODEC_ID_NONE)
        return FALSE;

    ffcodec = avcodec_find_decoder(codec_id);
    if (!ffcodec)
        return FALSE;

    switch (codec_id) {
    case CODEC_ID_H264:
        /* For AVC1 formats, sequence headers are in extradata and
           input encoded buffers represent the whole NAL unit */
        try_parser  = priv->avctx->extradata_size == 0;
        need_parser = try_parser;
        break;
    case CODEC_ID_WMV3:
        /* There is no WMV3 parser in FFmpeg */
        try_parser  = FALSE;
        need_parser = FALSE;
        break;
    case CODEC_ID_VC1:
        /* For VC-1, sequence headers ae in extradata and input encoded
           buffers represent the whole slice */
        try_parser  = priv->avctx->extradata_size == 0;
        need_parser = FALSE;
        break;
    default:
        try_parser  = TRUE;
        need_parser = TRUE;
        break;
    }

    if (try_parser) {
        priv->pctx = av_parser_init(codec_id);
        if (!priv->pctx && need_parser)
            return FALSE;
    }

    /* XXX: av_find_stream_info() does this and some codecs really
       want hard an extradata buffer for initialization (e.g. VC-1) */
    if (!priv->avctx->extradata && priv->pctx && priv->pctx->parser->split) {
        const guchar *buf = GST_BUFFER_DATA(buffer);
        guint buf_size = GST_BUFFER_SIZE(buffer);
        buf_size = priv->pctx->parser->split(priv->avctx, buf, buf_size);
        if (buf_size > 0 && !set_codec_data(priv->avctx, buf, buf_size))
            return FALSE;
    }

    if (priv->pctx && !need_parser) {
        av_parser_close(priv->pctx);
        priv->pctx = NULL;
    }

    /* Use size information from the demuxer, whenever available */
    priv->avctx->coded_width  = GST_VAAPI_DECODER_WIDTH(ffdecoder);
    priv->avctx->coded_height = GST_VAAPI_DECODER_HEIGHT(ffdecoder);

    GST_VAAPI_DISPLAY_LOCK(display);
    ret = avcodec_open(priv->avctx, ffcodec);
    GST_VAAPI_DISPLAY_UNLOCK(display);
    if (ret < 0)
        return FALSE;
    return TRUE;
}