static GstFlowReturn gst_vdp_h264_dec_handle_frame (GstBaseVideoDecoder * base_video_decoder, GstVideoFrame * frame, GstClockTimeDiff deadline) { GstVdpH264Dec *h264_dec = GST_VDP_H264_DEC (base_video_decoder); GstH264Frame *h264_frame; GstH264Slice *slice; GstH264Picture *pic G_GNUC_UNUSED; GstH264Sequence *seq G_GNUC_UNUSED; GstFlowReturn ret; GstVdpVideoBuffer *outbuf; VdpPictureInfoH264 info; VdpBitstreamBuffer *bufs; guint n_bufs; GST_DEBUG ("handle_frame"); h264_frame = GST_H264_FRAME_CAST (frame); slice = &h264_frame->slice_hdr; pic = slice->picture; seq = pic->sequence; if (slice->nal_unit.IdrPicFlag) { ret = gst_vdp_h264_dec_idr (h264_dec, h264_frame); if (ret == GST_FLOW_OK) h264_dec->got_idr = TRUE; else { gst_base_video_decoder_skip_frame (base_video_decoder, frame); return GST_FLOW_OK; } } /* check if we've got a IDR frame yet */ if (!h264_dec->got_idr) { gst_base_video_decoder_skip_frame (base_video_decoder, frame); return GST_FLOW_OK; } gst_vdp_h264_dec_init_frame_info (h264_dec, h264_frame); info = gst_vdp_h264_dec_fill_info (h264_dec, h264_frame); bufs = gst_vdp_h264_dec_create_bitstream_buffers (h264_dec, h264_frame, &n_bufs); ret = gst_vdp_decoder_render (GST_VDP_DECODER (h264_dec), (VdpPictureInfo *) & info, n_bufs, bufs, &outbuf); g_free (bufs); if (ret != GST_FLOW_OK) { gst_base_video_decoder_skip_frame (base_video_decoder, frame); return ret; } frame->src_buffer = GST_BUFFER_CAST (outbuf); /* DPB handling */ if (slice->nal_unit.ref_idc != 0 && !slice->nal_unit.IdrPicFlag) { if (slice->dec_ref_pic_marking.adaptive_ref_pic_marking_mode_flag) { GstH264RefPicMarking *marking; guint i; marking = slice->dec_ref_pic_marking.ref_pic_marking; for (i = 0; i < slice->dec_ref_pic_marking.n_ref_pic_marking; i++) { switch (marking[i].memory_management_control_operation) { case 1: { guint16 pic_num; pic_num = slice->frame_num - (marking[i].difference_of_pic_nums_minus1 + 1); gst_h264_dpb_mark_short_term_unused (h264_dec->dpb, pic_num); break; } case 2: { gst_h264_dpb_mark_long_term_unused (h264_dec->dpb, marking[i].long_term_pic_num); break; } case 3: { guint16 pic_num; pic_num = slice->frame_num - (marking[i].difference_of_pic_nums_minus1 + 1); gst_h264_dpb_mark_long_term (h264_dec->dpb, pic_num, marking[i].long_term_frame_idx); break; } case 4: { g_object_set (h264_dec->dpb, "max-longterm-frame-idx", marking[i].max_long_term_frame_idx_plus1 - 1, NULL); break; } case 5: { gst_h264_dpb_mark_all_unused (h264_dec->dpb); g_object_set (h264_dec->dpb, "max-longterm-frame-idx", -1, NULL); break; } default: break; } } } else gst_h264_dpb_mark_sliding (h264_dec->dpb); } return gst_h264_dpb_add (h264_dec->dpb, h264_frame); }
static GstFlowReturn gst_vdp_h264_dec_handle_frame (GstVideoDecoder * video_decoder, GstVideoCodecFrame * frame) { GstVdpH264Dec *h264_dec = GST_VDP_H264_DEC (video_decoder); GstH264Meta *h264_meta; GstH264Frame *h264_frame; GList *tmp; GstFlowReturn ret; VdpPictureInfoH264 info; VdpBitstreamBuffer *bufs; GstH264SliceHdr *first_slice; guint i; GstMapInfo map; GST_DEBUG ("handle_frame"); h264_meta = gst_buffer_get_h264_meta (frame->input_buffer); if (G_UNLIKELY (h264_meta == NULL)) goto no_h264_meta; if (G_UNLIKELY (h264_meta->num_slices == 0)) goto no_slices; /* Handle PPS/SPS/SEI if present */ if (h264_meta->sps) { for (tmp = h264_meta->sps; tmp; tmp = tmp->next) { GstH264SPS *sps = (GstH264SPS *) tmp->data; GST_LOG_OBJECT (h264_dec, "Storing SPS %d", sps->id); h264_dec->sps[sps->id] = g_slice_dup (GstH264SPS, sps); } } if (h264_meta->pps) { for (tmp = h264_meta->pps; tmp; tmp = tmp->next) { GstH264PPS *pps = (GstH264PPS *) tmp->data; GST_LOG_OBJECT (h264_dec, "Storing PPS %d", pps->id); h264_dec->pps[pps->id] = g_slice_dup (GstH264PPS, pps); /* Adjust pps pointer */ h264_dec->pps[pps->id]->sequence = h264_dec->sps[pps->sps_id]; } } first_slice = &h264_meta->slices[0]; if (!h264_dec->got_idr && first_slice->slice_type != GST_H264_NAL_SLICE_IDR) goto no_idr; /* Handle slices */ for (i = 0; i < h264_meta->num_slices; i++) { GstH264SliceHdr *slice = &h264_meta->slices[i]; GST_LOG_OBJECT (h264_dec, "Handling slice #%d", i); slice->pps = h264_dec->pps[slice->pps_id]; } if (first_slice->slice_type == GST_H264_NAL_SLICE_IDR) { ret = gst_vdp_h264_dec_idr (h264_dec, frame, first_slice); if (ret == GST_FLOW_OK) h264_dec->got_idr = TRUE; else goto skip_frame; } h264_frame = g_slice_new0 (GstH264Frame); gst_video_codec_frame_set_user_data (frame, h264_frame, (GDestroyNotify) gst_h264_frame_free); gst_vdp_h264_dec_init_frame_info (h264_dec, h264_frame, first_slice); h264_frame->frame = frame; gst_vdp_h264_dec_fill_info (&info, h264_dec, h264_frame, first_slice); info.slice_count = h264_meta->num_slices; if (!gst_buffer_map (frame->input_buffer, &map, GST_MAP_READ)) goto map_fail; bufs = gst_vdp_h264_dec_create_bitstream_buffers (h264_dec, h264_meta, &map); ret = gst_vdp_decoder_render (GST_VDP_DECODER (h264_dec), (VdpPictureInfo *) & info, h264_meta->num_slices, bufs, frame); g_free (bufs); gst_buffer_unmap (frame->input_buffer, &map); if (ret != GST_FLOW_OK) goto render_fail; /* DPB handling */ return gst_vdp_h264_dec_handle_dpb (h264_dec, h264_frame, first_slice); /* EARLY exit */ no_idr: { GST_DEBUG_OBJECT (video_decoder, "Didn't see a IDR yet, skipping frame"); return gst_video_decoder_finish_frame (video_decoder, frame); } skip_frame: { GST_DEBUG_OBJECT (video_decoder, "Skipping frame"); return gst_video_decoder_finish_frame (video_decoder, frame); } /* ERRORS */ no_h264_meta: { GST_ERROR_OBJECT (video_decoder, "Input buffer doesn't have GstH264Meta"); return GST_FLOW_ERROR; } no_slices: { GST_ERROR_OBJECT (video_decoder, "Input buffer doesn't have any slices"); return GST_FLOW_ERROR; } map_fail: { GST_ERROR_OBJECT (video_decoder, "Failed to map input buffer for READ"); return GST_FLOW_ERROR; } render_fail: { GST_ERROR_OBJECT (video_decoder, "Failed to render : %s", gst_flow_get_name (ret)); gst_video_decoder_drop_frame (video_decoder, frame); return ret; } }