/** * gst_vaapi_decoder_vc1_new: * @display: a #GstVaapiDisplay * @caps: a #GstCaps holding codec information * * Creates a new #GstVaapiDecoder for VC-1 decoding. The @caps can * hold extra information like codec-data and pictured coded size. * * Return value: the newly allocated #GstVaapiDecoder object */ GstVaapiDecoder * gst_vaapi_decoder_vc1_new(GstVaapiDisplay *display, GstCaps *caps) { GstVaapiDecoderVC1 *decoder; g_return_val_if_fail(GST_VAAPI_IS_DISPLAY(display), NULL); g_return_val_if_fail(GST_IS_CAPS(caps), NULL); decoder = g_object_new( GST_VAAPI_TYPE_DECODER_VC1, "display", display, "caps", caps, NULL ); if (!decoder->priv->is_constructed) { g_object_unref(decoder); return NULL; } return GST_VAAPI_DECODER_CAST(decoder); }
/** * gst_vaapi_decoder_ffmpeg_new: * @display: a #GstVaapiDisplay * @caps: a #GstCaps holding codec information * * Creates a new #GstVaapiDecoder based on FFmpeg where the codec is * determined from @caps. The @caps can hold extra information like * codec-data and pictured coded size. * * Return value: the newly allocated #GstVaapiDecoder object */ GstVaapiDecoder * gst_vaapi_decoder_ffmpeg_new(GstVaapiDisplay *display, GstCaps *caps) { GstVaapiDecoderFfmpeg *ffdecoder; g_return_val_if_fail(GST_VAAPI_IS_DISPLAY(display), NULL); g_return_val_if_fail(GST_IS_CAPS(caps), NULL); ffdecoder = g_object_new( GST_VAAPI_TYPE_DECODER_FFMPEG, "display", display, "caps", caps, NULL ); if (!ffdecoder->priv->is_constructed) { g_object_unref(ffdecoder); return NULL; } return GST_VAAPI_DECODER_CAST(ffdecoder); }
static GstVaapiDecoderStatus decode_frame(GstVaapiDecoderFfmpeg *ffdecoder, guchar *buf, guint buf_size) { GstVaapiDecoderFfmpegPrivate * const priv = ffdecoder->priv; GstVaapiDisplay * const display = GST_VAAPI_DECODER_DISPLAY(ffdecoder); GstVaapiSurface *surface; int bytes_read, got_picture = 0; AVPacket avpkt; GST_VAAPI_DISPLAY_LOCK(display); av_init_packet(&avpkt); avpkt.data = buf; avpkt.size = buf_size; bytes_read = avcodec_decode_video2( priv->avctx, priv->frame, &got_picture, &avpkt ); GST_VAAPI_DISPLAY_UNLOCK(display); if (!got_picture) return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA; if (bytes_read < 0) return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN; surface = gst_vaapi_context_find_surface_by_id( GST_VAAPI_DECODER_CONTEXT(ffdecoder), GPOINTER_TO_UINT(priv->frame->data[3]) ); if (!surface) return GST_VAAPI_DECODER_STATUS_ERROR_INVALID_SURFACE; if (!gst_vaapi_decoder_push_surface(GST_VAAPI_DECODER_CAST(ffdecoder), surface, priv->frame->pts)) return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED; return GST_VAAPI_DECODER_STATUS_SUCCESS; }
static GstVaapiDecoderStatus decode_frame(GstVaapiDecoderVC1 *decoder, GstVC1BDU *rbdu, GstVC1BDU *ebdu) { GstVaapiDecoderVC1Private * const priv = decoder->priv; GstVC1SeqHdr * const seq_hdr = &priv->seq_hdr; GstVC1AdvancedSeqHdr *advanced = &seq_hdr->advanced; GstVC1FrameHdr * const frame_hdr = &priv->frame_hdr; GstVC1ParserResult result; GstVaapiPicture *picture; GstVaapiSlice *slice; GstVaapiDecoderStatus status; VASliceParameterBufferVC1 *slice_param; GstClockTime pts; gint32 poc; status = ensure_context(decoder); if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) { GST_DEBUG("failed to reset context"); return status; } if (priv->current_picture ) { if(!decode_current_picture(decoder)) return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN; } picture = GST_VAAPI_PICTURE_NEW(VC1, decoder); if (!picture) { GST_DEBUG("failed to allocate picture"); return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED; } if (!gst_vc1_bitplanes_ensure_size(priv->bitplanes, seq_hdr)) { GST_DEBUG("failed to allocate bitplanes"); return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED; } memset(frame_hdr, 0, sizeof(*frame_hdr)); result = gst_vc1_parse_frame_header( rbdu->data + rbdu->offset, rbdu->size, frame_hdr, seq_hdr, priv->bitplanes ); if (result != GST_VC1_PARSER_OK) { GST_DEBUG("failed to parse frame layer"); return get_status(result); } switch (frame_hdr->ptype) { case GST_VC1_PICTURE_TYPE_I: picture->type = GST_VAAPI_PICTURE_TYPE_I; GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_REFERENCE); break; case GST_VC1_PICTURE_TYPE_SKIPPED: case GST_VC1_PICTURE_TYPE_P: picture->type = GST_VAAPI_PICTURE_TYPE_P; GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_REFERENCE); break; case GST_VC1_PICTURE_TYPE_B: picture->type = GST_VAAPI_PICTURE_TYPE_B; priv->successive_bfrm_cnt += 1; break; case GST_VC1_PICTURE_TYPE_BI: picture->type = GST_VAAPI_PICTURE_TYPE_BI; priv->successive_bfrm_cnt += 1; break; default: GST_DEBUG("unsupported picture type %d", frame_hdr->ptype); return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN; } /*As far as dpb is only storing the reference pictures, always increesing poc should work fine. But we should explictly calculate the pts of frames*/ if(!GST_VAAPI_PICTURE_IS_REFERENCE(picture)) picture->poc = priv->frm_cnt - 1; else picture->poc = priv->frm_cnt; gst_vaapi_decoder_get_framerate(GST_VAAPI_DECODER_CAST(decoder), &priv->fps_n, &priv->fps_d); pts = gst_util_uint64_scale(picture->poc, GST_SECOND * priv->fps_d, priv->fps_n); if(GST_VAAPI_PICTURE_IS_REFERENCE(picture)){ if (priv->successive_bfrm_cnt) { poc = priv->frm_cnt - priv->successive_bfrm_cnt - 1; pts = gst_util_uint64_scale(picture->poc, GST_SECOND * priv->fps_d, priv->fps_n); gst_vaapi_dpb_reset_pts (priv->dpb, poc, pts); priv->successive_bfrm_cnt = 0; } } picture->pts = pts; priv->frm_cnt++; if (!fill_picture(decoder, picture)) return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN; slice = GST_VAAPI_SLICE_NEW( VC1, decoder, ebdu->data + ebdu->sc_offset, ebdu->size + ebdu->offset - ebdu->sc_offset ); if (!slice) { GST_DEBUG("failed to allocate slice"); return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED; } gst_vaapi_picture_add_slice(picture, slice); /* Fill in VASliceParameterBufferVC1 */ slice_param = slice->param; slice_param->macroblock_offset = 8 * (ebdu->offset - ebdu->sc_offset) + frame_hdr->header_size; slice_param->slice_vertical_position = 0; gst_vaapi_picture_replace(&priv->current_picture, picture); gst_vaapi_picture_unref(picture); return GST_VAAPI_DECODER_STATUS_SUCCESS; }
static GstVaapiDecoderStatus decode_codec_data(GstVaapiDecoderVC1 *decoder, GstBuffer *buffer) { GstVaapiDecoderVC1Private * const priv = decoder->priv; GstVC1SeqHdr * const seq_hdr = &priv->seq_hdr; GstVaapiDecoderStatus status; GstVC1ParserResult result; GstVC1BDU ebdu; GstCaps *caps; GstStructure *structure; guchar *buf; guint buf_size, ofs; gint width, height; guint32 format; buf = GST_BUFFER_DATA(buffer); buf_size = GST_BUFFER_SIZE(buffer); if (!buf || buf_size == 0) return GST_VAAPI_DECODER_STATUS_SUCCESS; caps = GST_VAAPI_DECODER_CAST(decoder)->priv->caps; structure = gst_caps_get_structure(caps, 0); if (!gst_structure_get_int(structure, "width", &width) || !gst_structure_get_int(structure, "height", &height)) { GST_DEBUG("failed to parse size from codec-data"); return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN; } if (!gst_structure_get_fourcc(structure, "format", &format)) { GST_DEBUG("failed to parse profile from codec-data"); return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CODEC; } /* WMV3 -- expecting sequence header */ if (format == GST_MAKE_FOURCC('W','M','V','3')) { seq_hdr->struct_c.coded_width = width; seq_hdr->struct_c.coded_height = height; ebdu.type = GST_VC1_SEQUENCE; ebdu.size = buf_size; ebdu.sc_offset = 0; ebdu.offset = 0; ebdu.data = buf; return decode_ebdu(decoder, &ebdu); } /* WVC1 -- expecting bitstream data units */ if (format != GST_MAKE_FOURCC('W','V','C','1')) return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE; seq_hdr->advanced.max_coded_width = width; seq_hdr->advanced.max_coded_height = height; ofs = 0; do { result = gst_vc1_identify_next_bdu( buf + ofs, buf_size - ofs, &ebdu ); switch (result) { case GST_VC1_PARSER_NO_BDU_END: /* Assume the EBDU is complete within codec-data bounds */ ebdu.size = buf_size - ofs - (ebdu.offset - ebdu.sc_offset); // fall-through case GST_VC1_PARSER_OK: status = decode_ebdu(decoder, &ebdu); ofs += ebdu.offset + ebdu.size; break; default: status = get_status(result); break; } } while (status == GST_VAAPI_DECODER_STATUS_SUCCESS && ofs < buf_size); return status; }