static GstFlowReturn gst_vdp_h264_dec_handle_frame (GstBaseVideoDecoder * base_video_decoder, GstVideoFrame * frame, GstClockTimeDiff deadline) { GstVdpH264Dec *h264_dec = GST_VDP_H264_DEC (base_video_decoder); GstH264Frame *h264_frame; GstH264Slice *slice; GstH264Picture *pic G_GNUC_UNUSED; GstH264Sequence *seq G_GNUC_UNUSED; GstFlowReturn ret; GstVdpVideoBuffer *outbuf; VdpPictureInfoH264 info; VdpBitstreamBuffer *bufs; guint n_bufs; GST_DEBUG ("handle_frame"); h264_frame = GST_H264_FRAME_CAST (frame); slice = &h264_frame->slice_hdr; pic = slice->picture; seq = pic->sequence; if (slice->nal_unit.IdrPicFlag) { ret = gst_vdp_h264_dec_idr (h264_dec, h264_frame); if (ret == GST_FLOW_OK) h264_dec->got_idr = TRUE; else { gst_base_video_decoder_skip_frame (base_video_decoder, frame); return GST_FLOW_OK; } } /* check if we've got a IDR frame yet */ if (!h264_dec->got_idr) { gst_base_video_decoder_skip_frame (base_video_decoder, frame); return GST_FLOW_OK; } gst_vdp_h264_dec_init_frame_info (h264_dec, h264_frame); info = gst_vdp_h264_dec_fill_info (h264_dec, h264_frame); bufs = gst_vdp_h264_dec_create_bitstream_buffers (h264_dec, h264_frame, &n_bufs); ret = gst_vdp_decoder_render (GST_VDP_DECODER (h264_dec), (VdpPictureInfo *) & info, n_bufs, bufs, &outbuf); g_free (bufs); if (ret != GST_FLOW_OK) { gst_base_video_decoder_skip_frame (base_video_decoder, frame); return ret; } frame->src_buffer = GST_BUFFER_CAST (outbuf); /* DPB handling */ if (slice->nal_unit.ref_idc != 0 && !slice->nal_unit.IdrPicFlag) { if (slice->dec_ref_pic_marking.adaptive_ref_pic_marking_mode_flag) { GstH264RefPicMarking *marking; guint i; marking = slice->dec_ref_pic_marking.ref_pic_marking; for (i = 0; i < slice->dec_ref_pic_marking.n_ref_pic_marking; i++) { switch (marking[i].memory_management_control_operation) { case 1: { guint16 pic_num; pic_num = slice->frame_num - (marking[i].difference_of_pic_nums_minus1 + 1); gst_h264_dpb_mark_short_term_unused (h264_dec->dpb, pic_num); break; } case 2: { gst_h264_dpb_mark_long_term_unused (h264_dec->dpb, marking[i].long_term_pic_num); break; } case 3: { guint16 pic_num; pic_num = slice->frame_num - (marking[i].difference_of_pic_nums_minus1 + 1); gst_h264_dpb_mark_long_term (h264_dec->dpb, pic_num, marking[i].long_term_frame_idx); break; } case 4: { g_object_set (h264_dec->dpb, "max-longterm-frame-idx", marking[i].max_long_term_frame_idx_plus1 - 1, NULL); break; } case 5: { gst_h264_dpb_mark_all_unused (h264_dec->dpb); g_object_set (h264_dec->dpb, "max-longterm-frame-idx", -1, NULL); break; } default: break; } } } else gst_h264_dpb_mark_sliding (h264_dec->dpb); } return gst_h264_dpb_add (h264_dec->dpb, h264_frame); }
static GstFlowReturn gst_vdp_mpeg_dec_handle_frame (GstBaseVideoDecoder * base_video_decoder, GstVideoFrame * frame, GstClockTimeDiff deadline) { GstVdpMpegDec *mpeg_dec = GST_VDP_MPEG_DEC (base_video_decoder); VdpPictureInfoMPEG1Or2 *info; GstVdpMpegFrame *mpeg_frame; GstFlowReturn ret = GST_FLOW_OK; VdpBitstreamBuffer vbit[1]; GstVdpVideoBuffer *outbuf; /* MPEG_PACKET_SEQUENCE */ mpeg_frame = GST_VDP_MPEG_FRAME (frame); if (mpeg_frame->seq) { ret = gst_vdp_mpeg_dec_handle_sequence (mpeg_dec, mpeg_frame->seq, mpeg_frame->seq_ext); if (ret != GST_FLOW_OK) { gst_base_video_decoder_skip_frame (base_video_decoder, frame); return ret; } } if (mpeg_dec->state == GST_VDP_MPEG_DEC_STATE_NEED_SEQUENCE) { GST_DEBUG_OBJECT (mpeg_dec, "Drop frame since we haven't found a " "MPEG_PACKET_SEQUENCE yet"); gst_base_video_decoder_skip_frame (base_video_decoder, frame); return GST_FLOW_OK; } /* MPEG_PACKET_PICTURE */ if (mpeg_frame->pic) gst_vdp_mpeg_dec_handle_picture (mpeg_dec, mpeg_frame->pic); /* MPEG_PACKET_EXT_PICTURE_CODING */ if (mpeg_frame->pic_ext) gst_vdp_mpeg_dec_handle_picture_coding (mpeg_dec, mpeg_frame->pic_ext, frame); /* MPEG_PACKET_GOP */ if (mpeg_frame->gop) gst_vdp_mpeg_dec_handle_gop (mpeg_dec, mpeg_frame->gop); /* MPEG_PACKET_EXT_QUANT_MATRIX */ if (mpeg_frame->qm_ext) gst_vdp_mpeg_dec_handle_quant_matrix (mpeg_dec, mpeg_frame->qm_ext); info = &mpeg_dec->vdp_info; info->slice_count = mpeg_frame->n_slices; /* check if we can decode the frame */ if (info->picture_coding_type != I_FRAME && info->backward_reference == VDP_INVALID_HANDLE) { GST_DEBUG_OBJECT (mpeg_dec, "Drop frame since we haven't got an I_FRAME yet"); gst_base_video_decoder_skip_frame (base_video_decoder, frame); return GST_FLOW_OK; } if (info->picture_coding_type == B_FRAME && info->forward_reference == VDP_INVALID_HANDLE) { GST_DEBUG_OBJECT (mpeg_dec, "Drop frame since we haven't got two non B_FRAMES yet"); gst_base_video_decoder_skip_frame (base_video_decoder, frame); return GST_FLOW_OK; } if (info->picture_coding_type != B_FRAME) { if (info->backward_reference != VDP_INVALID_HANDLE) { ret = gst_base_video_decoder_finish_frame (base_video_decoder, mpeg_dec->b_frame); } if (info->forward_reference != VDP_INVALID_HANDLE) { gst_video_frame_unref (mpeg_dec->f_frame); info->forward_reference = VDP_INVALID_HANDLE; } info->forward_reference = info->backward_reference; mpeg_dec->f_frame = mpeg_dec->b_frame; info->backward_reference = VDP_INVALID_HANDLE; } if (ret != GST_FLOW_OK) { gst_base_video_decoder_skip_frame (base_video_decoder, frame); return ret; } /* decode */ vbit[0].struct_version = VDP_BITSTREAM_BUFFER_VERSION; vbit[0].bitstream = GST_BUFFER_DATA (mpeg_frame->slices); vbit[0].bitstream_bytes = GST_BUFFER_SIZE (mpeg_frame->slices); ret = gst_vdp_decoder_render (GST_VDP_DECODER (mpeg_dec), (VdpPictureInfo *) info, 1, vbit, &outbuf); if (ret != GST_FLOW_OK) return ret; frame->src_buffer = GST_BUFFER_CAST (outbuf); if (info->picture_coding_type == B_FRAME) { ret = gst_base_video_decoder_finish_frame (base_video_decoder, frame); } else { info->backward_reference = GST_VDP_VIDEO_BUFFER (outbuf)->surface; mpeg_dec->b_frame = gst_video_frame_ref (frame); } return ret; }
static GstFlowReturn gst_vdp_h264_dec_idr (GstVdpH264Dec * h264_dec, GstH264Frame * h264_frame) { GstH264Slice *slice; GstH264Sequence *seq; h264_dec->poc_msb = 0; h264_dec->prev_poc_lsb = 0; slice = &h264_frame->slice_hdr; if (slice->dec_ref_pic_marking.no_output_of_prior_pics_flag) gst_h264_dpb_flush (h264_dec->dpb, FALSE); else gst_h264_dpb_flush (h264_dec->dpb, TRUE); if (slice->dec_ref_pic_marking.long_term_reference_flag) g_object_set (h264_dec->dpb, "max-longterm-frame-idx", 0, NULL); else g_object_set (h264_dec->dpb, "max-longterm-frame-idx", -1, NULL); seq = slice->picture->sequence; if (seq != h264_dec->sequence) { GstVideoState state; VdpDecoderProfile profile; GstFlowReturn ret; state = gst_base_video_decoder_get_state (GST_BASE_VIDEO_DECODER (h264_dec)); state.width = (seq->pic_width_in_mbs_minus1 + 1) * 16 - 2 * seq->frame_crop_right_offset; state.height = (2 - seq->frame_mbs_only_flag) * (seq->pic_height_in_map_units_minus1 + 1) * 16; if (seq->frame_mbs_only_flag) state.height -= 2 * seq->frame_crop_bottom_offset; else state.height -= 4 * seq->frame_crop_bottom_offset; /* calculate framerate if we haven't got one */ if (state.fps_n == 0 && seq->vui_parameters_present_flag) { GstH264VUIParameters *vui; guint16 par_n, par_d; vui = &seq->vui_parameters; if (gst_vdp_h264_dec_calculate_par (vui, &par_n, &par_d)) { state.par_n = par_n; state.par_d = par_d; } if (vui->timing_info_present_flag && vui->fixed_frame_rate_flag) { state.fps_n = vui->time_scale; state.fps_d = vui->num_units_in_tick; if (seq->frame_mbs_only_flag) state.fps_d *= 2; } } gst_base_video_decoder_set_state (GST_BASE_VIDEO_DECODER (h264_dec), state); switch (seq->profile_idc) { case 66: profile = VDP_DECODER_PROFILE_H264_BASELINE; break; case 77: profile = VDP_DECODER_PROFILE_H264_MAIN; break; case 100: profile = VDP_DECODER_PROFILE_H264_HIGH; break; default: GST_ELEMENT_ERROR (h264_dec, STREAM, WRONG_TYPE, ("vdpauh264dec doesn't support this streams profile"), ("profile_idc: %d", seq->profile_idc)); return GST_FLOW_ERROR; } ret = gst_vdp_decoder_init_decoder (GST_VDP_DECODER (h264_dec), profile, seq->num_ref_frames); if (ret != GST_FLOW_OK) return ret; g_object_set (h264_dec->dpb, "num-ref-frames", seq->num_ref_frames, NULL); h264_dec->sequence = seq; } return GST_FLOW_OK; }
static GstFlowReturn gst_vdp_mpeg_dec_handle_sequence (GstVdpMpegDec * mpeg_dec, GstBuffer * seq, GstBuffer * seq_ext) { GstBaseVideoDecoder *base_video_decoder = GST_BASE_VIDEO_DECODER (mpeg_dec); MPEGSeqHdr hdr; GstVdpMpegStreamInfo stream_info; if (!mpeg_util_parse_sequence_hdr (&hdr, seq)) return GST_FLOW_CUSTOM_ERROR; memcpy (&mpeg_dec->vdp_info.intra_quantizer_matrix, &hdr.intra_quantizer_matrix, 64); memcpy (&mpeg_dec->vdp_info.non_intra_quantizer_matrix, &hdr.non_intra_quantizer_matrix, 64); stream_info.width = hdr.width; stream_info.height = hdr.height; stream_info.fps_n = hdr.fps_n; stream_info.fps_d = hdr.fps_d; stream_info.par_n = hdr.par_w; stream_info.par_d = hdr.par_h; stream_info.interlaced = FALSE; stream_info.version = 1; stream_info.profile = VDP_DECODER_PROFILE_MPEG1; if (seq_ext) { MPEGSeqExtHdr ext; if (!mpeg_util_parse_sequence_extension (&ext, seq_ext)) return GST_FLOW_CUSTOM_ERROR; stream_info.fps_n *= (ext.fps_n_ext + 1); stream_info.fps_d *= (ext.fps_d_ext + 1); stream_info.width += (ext.horiz_size_ext << 12); stream_info.height += (ext.vert_size_ext << 12); stream_info.interlaced = !ext.progressive; stream_info.version = 2; stream_info.profile = gst_vdp_mpeg_dec_get_profile (&ext); } if (memcmp (&mpeg_dec->stream_info, &stream_info, sizeof (GstVdpMpegStreamInfo)) != 0) { GstVideoState state; GstFlowReturn ret; state = gst_base_video_decoder_get_state (base_video_decoder); state.width = stream_info.width; state.height = stream_info.height; state.fps_n = stream_info.fps_n; state.fps_d = stream_info.fps_d; state.par_n = stream_info.par_n; state.par_d = stream_info.par_d; state.interlaced = stream_info.interlaced; gst_base_video_decoder_set_state (base_video_decoder, state); ret = gst_vdp_decoder_init_decoder (GST_VDP_DECODER (mpeg_dec), stream_info.profile, 2); if (ret != GST_FLOW_OK) return ret; memcpy (&mpeg_dec->stream_info, &stream_info, sizeof (GstVdpMpegStreamInfo)); } mpeg_dec->state = GST_VDP_MPEG_DEC_STATE_NEED_DATA; return GST_FLOW_OK; }
static GstFlowReturn gst_vdp_mpeg4_dec_handle_frame (GstBaseVideoDecoder * base_video_decoder, GstVideoFrame * frame, GstClockTimeDiff deadline) { GstVdpMpeg4Dec *mpeg4_dec = GST_VDP_MPEG4_DEC (base_video_decoder); GstMpeg4Frame *mpeg4_frame; GstFlowReturn ret; Mpeg4VideoObjectLayer *vol; Mpeg4VideoObjectPlane vop; VdpPictureInfoMPEG4Part2 info; VdpBitstreamBuffer bufs[1]; GstVdpVideoBuffer *video_buf; mpeg4_frame = GST_MPEG4_FRAME (frame); ret = gst_vdp_mpeg4_dec_handle_configuration (mpeg4_dec, mpeg4_frame); if (ret != GST_FLOW_OK) return ret; vol = &mpeg4_dec->vol; if (!mpeg4_util_parse_VOP (mpeg4_frame->vop_buf, vol, &vop)) { gst_base_video_decoder_skip_frame (base_video_decoder, frame); return GST_FLOW_CUSTOM_ERROR; } /* calculate vop time */ mpeg4_frame->vop_time = vop.modulo_time_base * vol->vop_time_increment_resolution + vop.time_increment; if (mpeg4_dec->tframe == -1 && vop.coding_type == B_VOP) mpeg4_dec->tframe = mpeg4_frame->vop_time - mpeg4_dec->f_frame->vop_time; if (vop.coding_type != B_VOP) { if (mpeg4_dec->b_frame) { ret = gst_base_video_decoder_finish_frame (base_video_decoder, GST_VIDEO_FRAME_CAST (mpeg4_dec->b_frame)); if (mpeg4_dec->f_frame) gst_video_frame_unref (GST_VIDEO_FRAME_CAST (mpeg4_dec->f_frame)); mpeg4_dec->f_frame = mpeg4_dec->b_frame; mpeg4_dec->b_frame = NULL; } } info = gst_vdp_mpeg4_dec_fill_info (mpeg4_dec, mpeg4_frame, &vop); bufs[0].struct_version = VDP_BITSTREAM_BUFFER_VERSION; bufs[0].bitstream = GST_BUFFER_DATA (mpeg4_frame->vop_buf); bufs[0].bitstream_bytes = GST_BUFFER_SIZE (mpeg4_frame->vop_buf); ret = gst_vdp_decoder_render (GST_VDP_DECODER (base_video_decoder), (VdpPictureInfo *) & info, 1, bufs, &video_buf); if (ret != GST_FLOW_OK) { gst_base_video_decoder_skip_frame (base_video_decoder, frame); return ret; } frame->src_buffer = GST_BUFFER_CAST (video_buf); if (vop.coding_type == B_VOP) ret = gst_base_video_decoder_finish_frame (base_video_decoder, frame); else { gst_video_frame_ref (GST_VIDEO_FRAME_CAST (mpeg4_frame)); mpeg4_dec->b_frame = mpeg4_frame; ret = GST_FLOW_OK; } return ret; }
static gboolean gst_vdp_mpeg4_dec_handle_configuration (GstVdpMpeg4Dec * mpeg4_dec, GstMpeg4Frame * mpeg4_frame) { Mpeg4VisualObjectSequence vos; Mpeg4VisualObject vo; Mpeg4VideoObjectLayer vol; GstVideoState state; guint8 profile_indication; VdpDecoderProfile profile; GstFlowReturn ret; if (mpeg4_dec->is_configured) return GST_FLOW_OK; if (!mpeg4_frame->vos_buf || !mpeg4_frame->vo_buf || !mpeg4_frame->vol_buf) goto skip_frame; if (!mpeg4_util_parse_VOS (mpeg4_frame->vos_buf, &vos)) goto skip_frame; if (!mpeg4_util_parse_VO (mpeg4_frame->vo_buf, &vo)) goto skip_frame; if (!mpeg4_util_parse_VOL (mpeg4_frame->vol_buf, &vo, &vol)) goto skip_frame; state = gst_base_video_decoder_get_state (GST_BASE_VIDEO_DECODER (mpeg4_dec)); state.width = vol.width; state.height = vol.height; if (vol.fixed_vop_rate) { state.fps_n = vol.vop_time_increment_resolution; state.fps_d = vol.fixed_vop_time_increment; } state.par_n = vol.par_n; state.par_d = vol.par_d; gst_base_video_decoder_set_state (GST_BASE_VIDEO_DECODER (mpeg4_dec), state); profile_indication = vos.profile_and_level_indication >> 4; switch (profile_indication) { case 0x0: profile = VDP_DECODER_PROFILE_MPEG4_PART2_SP; break; case 0xf: profile = VDP_DECODER_PROFILE_MPEG4_PART2_ASP; break; default: goto unsupported_profile; } ret = gst_vdp_decoder_init_decoder (GST_VDP_DECODER (mpeg4_dec), profile, 2); if (ret != GST_FLOW_OK) return ret; mpeg4_dec->vol = vol; mpeg4_dec->is_configured = TRUE; return GST_FLOW_OK; skip_frame: GST_WARNING ("Skipping frame since we're not configured yet"); gst_base_video_decoder_skip_frame (GST_BASE_VIDEO_DECODER (mpeg4_dec), GST_VIDEO_FRAME (mpeg4_frame)); return GST_FLOW_CUSTOM_ERROR; unsupported_profile: GST_ELEMENT_ERROR (mpeg4_dec, STREAM, WRONG_TYPE, ("vdpaumpeg4dec doesn't support this streams profile"), ("profile_and_level_indication: %d", vos.profile_and_level_indication)); return GST_FLOW_ERROR; }
static GstFlowReturn gst_vdp_h264_dec_handle_frame (GstVideoDecoder * video_decoder, GstVideoCodecFrame * frame) { GstVdpH264Dec *h264_dec = GST_VDP_H264_DEC (video_decoder); GstH264Meta *h264_meta; GstH264Frame *h264_frame; GList *tmp; GstFlowReturn ret; VdpPictureInfoH264 info; VdpBitstreamBuffer *bufs; GstH264SliceHdr *first_slice; guint i; GstMapInfo map; GST_DEBUG ("handle_frame"); h264_meta = gst_buffer_get_h264_meta (frame->input_buffer); if (G_UNLIKELY (h264_meta == NULL)) goto no_h264_meta; if (G_UNLIKELY (h264_meta->num_slices == 0)) goto no_slices; /* Handle PPS/SPS/SEI if present */ if (h264_meta->sps) { for (tmp = h264_meta->sps; tmp; tmp = tmp->next) { GstH264SPS *sps = (GstH264SPS *) tmp->data; GST_LOG_OBJECT (h264_dec, "Storing SPS %d", sps->id); h264_dec->sps[sps->id] = g_slice_dup (GstH264SPS, sps); } } if (h264_meta->pps) { for (tmp = h264_meta->pps; tmp; tmp = tmp->next) { GstH264PPS *pps = (GstH264PPS *) tmp->data; GST_LOG_OBJECT (h264_dec, "Storing PPS %d", pps->id); h264_dec->pps[pps->id] = g_slice_dup (GstH264PPS, pps); /* Adjust pps pointer */ h264_dec->pps[pps->id]->sequence = h264_dec->sps[pps->sps_id]; } } first_slice = &h264_meta->slices[0]; if (!h264_dec->got_idr && first_slice->slice_type != GST_H264_NAL_SLICE_IDR) goto no_idr; /* Handle slices */ for (i = 0; i < h264_meta->num_slices; i++) { GstH264SliceHdr *slice = &h264_meta->slices[i]; GST_LOG_OBJECT (h264_dec, "Handling slice #%d", i); slice->pps = h264_dec->pps[slice->pps_id]; } if (first_slice->slice_type == GST_H264_NAL_SLICE_IDR) { ret = gst_vdp_h264_dec_idr (h264_dec, frame, first_slice); if (ret == GST_FLOW_OK) h264_dec->got_idr = TRUE; else goto skip_frame; } h264_frame = g_slice_new0 (GstH264Frame); gst_video_codec_frame_set_user_data (frame, h264_frame, (GDestroyNotify) gst_h264_frame_free); gst_vdp_h264_dec_init_frame_info (h264_dec, h264_frame, first_slice); h264_frame->frame = frame; gst_vdp_h264_dec_fill_info (&info, h264_dec, h264_frame, first_slice); info.slice_count = h264_meta->num_slices; if (!gst_buffer_map (frame->input_buffer, &map, GST_MAP_READ)) goto map_fail; bufs = gst_vdp_h264_dec_create_bitstream_buffers (h264_dec, h264_meta, &map); ret = gst_vdp_decoder_render (GST_VDP_DECODER (h264_dec), (VdpPictureInfo *) & info, h264_meta->num_slices, bufs, frame); g_free (bufs); gst_buffer_unmap (frame->input_buffer, &map); if (ret != GST_FLOW_OK) goto render_fail; /* DPB handling */ return gst_vdp_h264_dec_handle_dpb (h264_dec, h264_frame, first_slice); /* EARLY exit */ no_idr: { GST_DEBUG_OBJECT (video_decoder, "Didn't see a IDR yet, skipping frame"); return gst_video_decoder_finish_frame (video_decoder, frame); } skip_frame: { GST_DEBUG_OBJECT (video_decoder, "Skipping frame"); return gst_video_decoder_finish_frame (video_decoder, frame); } /* ERRORS */ no_h264_meta: { GST_ERROR_OBJECT (video_decoder, "Input buffer doesn't have GstH264Meta"); return GST_FLOW_ERROR; } no_slices: { GST_ERROR_OBJECT (video_decoder, "Input buffer doesn't have any slices"); return GST_FLOW_ERROR; } map_fail: { GST_ERROR_OBJECT (video_decoder, "Failed to map input buffer for READ"); return GST_FLOW_ERROR; } render_fail: { GST_ERROR_OBJECT (video_decoder, "Failed to render : %s", gst_flow_get_name (ret)); gst_video_decoder_drop_frame (video_decoder, frame); return ret; } }
static GstFlowReturn gst_vdp_h264_dec_idr (GstVdpH264Dec * h264_dec, GstVideoCodecFrame * frame, GstH264SliceHdr * slice) { GstH264SPS *seq; GST_DEBUG_OBJECT (h264_dec, "Handling IDR slice"); h264_dec->poc_msb = 0; h264_dec->prev_poc_lsb = 0; if (slice->dec_ref_pic_marking.no_output_of_prior_pics_flag) gst_h264_dpb_flush (h264_dec->dpb, FALSE); else gst_h264_dpb_flush (h264_dec->dpb, TRUE); if (slice->dec_ref_pic_marking.long_term_reference_flag) g_object_set (h264_dec->dpb, "max-longterm-frame-idx", 0, NULL); else g_object_set (h264_dec->dpb, "max-longterm-frame-idx", -1, NULL); seq = slice->pps->sequence; if (seq->id != h264_dec->current_sps) { GstVideoCodecState *state; VdpDecoderProfile profile; GstFlowReturn ret; GST_DEBUG_OBJECT (h264_dec, "Sequence changed !"); state = gst_video_decoder_set_output_state (GST_VIDEO_DECODER (h264_dec), GST_VIDEO_FORMAT_YV12, seq->width, seq->height, h264_dec->input_state); /* calculate framerate if we haven't got one */ if (state->info.fps_n == 0) { state->info.fps_n = seq->fps_num; state->info.fps_d = seq->fps_den; } if (state->info.par_n == 0 && seq->vui_parameters_present_flag) { state->info.par_n = seq->vui_parameters.par_n; state->info.par_d = seq->vui_parameters.par_d; } if (!gst_video_decoder_negotiate (GST_VIDEO_DECODER (h264_dec))) goto nego_fail; switch (seq->profile_idc) { case 66: profile = VDP_DECODER_PROFILE_H264_BASELINE; break; case 77: profile = VDP_DECODER_PROFILE_H264_MAIN; break; case 100: profile = VDP_DECODER_PROFILE_H264_HIGH; break; default: goto profile_not_suported; } ret = gst_vdp_decoder_init_decoder (GST_VDP_DECODER (h264_dec), profile, seq->num_ref_frames, h264_dec->input_state); if (ret != GST_FLOW_OK) return ret; g_object_set (h264_dec->dpb, "num-ref-frames", seq->num_ref_frames, NULL); h264_dec->current_sps = seq->id; } return GST_FLOW_OK; profile_not_suported: { GST_ELEMENT_ERROR (h264_dec, STREAM, WRONG_TYPE, ("vdpauh264dec doesn't support this streams profile"), ("profile_idc: %d", seq->profile_idc)); return GST_FLOW_ERROR; } nego_fail: { GST_ERROR_OBJECT (h264_dec, "Negotiation failed"); return GST_FLOW_NOT_NEGOTIATED; } }