Ejemplo n.º 1
0
static GstFlowReturn
gst_vdp_h264_dec_handle_frame (GstBaseVideoDecoder * base_video_decoder,
    GstVideoFrame * frame, GstClockTimeDiff deadline)
{
  GstVdpH264Dec *h264_dec = GST_VDP_H264_DEC (base_video_decoder);

  GstH264Frame *h264_frame;
  GstH264Slice *slice;
  GstH264Picture *pic G_GNUC_UNUSED;
  GstH264Sequence *seq G_GNUC_UNUSED;

  GstFlowReturn ret;
  GstVdpVideoBuffer *outbuf;
  VdpPictureInfoH264 info;
  VdpBitstreamBuffer *bufs;
  guint n_bufs;

  GST_DEBUG ("handle_frame");

  h264_frame = GST_H264_FRAME_CAST (frame);

  slice = &h264_frame->slice_hdr;
  pic = slice->picture;
  seq = pic->sequence;


  if (slice->nal_unit.IdrPicFlag) {
    ret = gst_vdp_h264_dec_idr (h264_dec, h264_frame);

    if (ret == GST_FLOW_OK)
      h264_dec->got_idr = TRUE;
    else {
      gst_base_video_decoder_skip_frame (base_video_decoder, frame);
      return GST_FLOW_OK;
    }
  }

  /* check if we've got a IDR frame yet */
  if (!h264_dec->got_idr) {
    gst_base_video_decoder_skip_frame (base_video_decoder, frame);
    return GST_FLOW_OK;
  }


  gst_vdp_h264_dec_init_frame_info (h264_dec, h264_frame);

  info = gst_vdp_h264_dec_fill_info (h264_dec, h264_frame);
  bufs = gst_vdp_h264_dec_create_bitstream_buffers (h264_dec, h264_frame,
      &n_bufs);

  ret = gst_vdp_decoder_render (GST_VDP_DECODER (h264_dec),
      (VdpPictureInfo *) & info, n_bufs, bufs, &outbuf);
  g_free (bufs);

  if (ret != GST_FLOW_OK) {
    gst_base_video_decoder_skip_frame (base_video_decoder, frame);
    return ret;
  }

  frame->src_buffer = GST_BUFFER_CAST (outbuf);


  /* DPB handling */
  if (slice->nal_unit.ref_idc != 0 && !slice->nal_unit.IdrPicFlag) {
    if (slice->dec_ref_pic_marking.adaptive_ref_pic_marking_mode_flag) {
      GstH264RefPicMarking *marking;
      guint i;

      marking = slice->dec_ref_pic_marking.ref_pic_marking;
      for (i = 0; i < slice->dec_ref_pic_marking.n_ref_pic_marking; i++) {

        switch (marking[i].memory_management_control_operation) {
          case 1:
          {
            guint16 pic_num;

            pic_num = slice->frame_num -
                (marking[i].difference_of_pic_nums_minus1 + 1);
            gst_h264_dpb_mark_short_term_unused (h264_dec->dpb, pic_num);
            break;
          }

          case 2:
          {
            gst_h264_dpb_mark_long_term_unused (h264_dec->dpb,
                marking[i].long_term_pic_num);
            break;
          }

          case 3:
          {
            guint16 pic_num;

            pic_num = slice->frame_num -
                (marking[i].difference_of_pic_nums_minus1 + 1);
            gst_h264_dpb_mark_long_term (h264_dec->dpb, pic_num,
                marking[i].long_term_frame_idx);
            break;
          }

          case 4:
          {
            g_object_set (h264_dec->dpb, "max-longterm-frame-idx",
                marking[i].max_long_term_frame_idx_plus1 - 1, NULL);
            break;
          }

          case 5:
          {
            gst_h264_dpb_mark_all_unused (h264_dec->dpb);
            g_object_set (h264_dec->dpb, "max-longterm-frame-idx", -1, NULL);
            break;
          }

          default:
            break;
        }
      }
    } else
      gst_h264_dpb_mark_sliding (h264_dec->dpb);
  }

  return gst_h264_dpb_add (h264_dec->dpb, h264_frame);
}
Ejemplo n.º 2
0
static GstFlowReturn
gst_vdp_mpeg4_dec_handle_frame (GstBaseVideoDecoder * base_video_decoder,
    GstVideoFrame * frame, GstClockTimeDiff deadline)
{
  GstVdpMpeg4Dec *mpeg4_dec = GST_VDP_MPEG4_DEC (base_video_decoder);

  GstMpeg4Frame *mpeg4_frame;
  GstFlowReturn ret;

  Mpeg4VideoObjectLayer *vol;
  Mpeg4VideoObjectPlane vop;

  VdpPictureInfoMPEG4Part2 info;
  VdpBitstreamBuffer bufs[1];
  GstVdpVideoBuffer *video_buf;

  mpeg4_frame = GST_MPEG4_FRAME (frame);

  ret = gst_vdp_mpeg4_dec_handle_configuration (mpeg4_dec, mpeg4_frame);
  if (ret != GST_FLOW_OK)
    return ret;

  vol = &mpeg4_dec->vol;
  if (!mpeg4_util_parse_VOP (mpeg4_frame->vop_buf, vol, &vop)) {
    gst_base_video_decoder_skip_frame (base_video_decoder, frame);
    return GST_FLOW_CUSTOM_ERROR;
  }

  /* calculate vop time */
  mpeg4_frame->vop_time =
      vop.modulo_time_base * vol->vop_time_increment_resolution +
      vop.time_increment;

  if (mpeg4_dec->tframe == -1 && vop.coding_type == B_VOP)
    mpeg4_dec->tframe = mpeg4_frame->vop_time - mpeg4_dec->f_frame->vop_time;

  if (vop.coding_type != B_VOP) {
    if (mpeg4_dec->b_frame) {

      ret = gst_base_video_decoder_finish_frame (base_video_decoder,
          GST_VIDEO_FRAME_CAST (mpeg4_dec->b_frame));

      if (mpeg4_dec->f_frame)
        gst_video_frame_unref (GST_VIDEO_FRAME_CAST (mpeg4_dec->f_frame));

      mpeg4_dec->f_frame = mpeg4_dec->b_frame;
      mpeg4_dec->b_frame = NULL;
    }
  }

  info = gst_vdp_mpeg4_dec_fill_info (mpeg4_dec, mpeg4_frame, &vop);
  bufs[0].struct_version = VDP_BITSTREAM_BUFFER_VERSION;
  bufs[0].bitstream = GST_BUFFER_DATA (mpeg4_frame->vop_buf);
  bufs[0].bitstream_bytes = GST_BUFFER_SIZE (mpeg4_frame->vop_buf);

  ret = gst_vdp_decoder_render (GST_VDP_DECODER (base_video_decoder),
      (VdpPictureInfo *) & info, 1, bufs, &video_buf);
  if (ret != GST_FLOW_OK) {
    gst_base_video_decoder_skip_frame (base_video_decoder, frame);
    return ret;
  }

  frame->src_buffer = GST_BUFFER_CAST (video_buf);

  if (vop.coding_type == B_VOP)
    ret = gst_base_video_decoder_finish_frame (base_video_decoder, frame);
  else {
    gst_video_frame_ref (GST_VIDEO_FRAME_CAST (mpeg4_frame));
    mpeg4_dec->b_frame = mpeg4_frame;
    ret = GST_FLOW_OK;
  }

  return ret;
}
Ejemplo n.º 3
0
static GstFlowReturn
gst_vdp_mpeg_dec_handle_frame (GstBaseVideoDecoder * base_video_decoder,
    GstVideoFrame * frame, GstClockTimeDiff deadline)
{
  GstVdpMpegDec *mpeg_dec = GST_VDP_MPEG_DEC (base_video_decoder);

  VdpPictureInfoMPEG1Or2 *info;
  GstVdpMpegFrame *mpeg_frame;

  GstFlowReturn ret = GST_FLOW_OK;
  VdpBitstreamBuffer vbit[1];
  GstVdpVideoBuffer *outbuf;

  /* MPEG_PACKET_SEQUENCE */
  mpeg_frame = GST_VDP_MPEG_FRAME (frame);
  if (mpeg_frame->seq) {
    ret = gst_vdp_mpeg_dec_handle_sequence (mpeg_dec, mpeg_frame->seq,
        mpeg_frame->seq_ext);
    if (ret != GST_FLOW_OK) {
      gst_base_video_decoder_skip_frame (base_video_decoder, frame);
      return ret;
    }
  }

  if (mpeg_dec->state == GST_VDP_MPEG_DEC_STATE_NEED_SEQUENCE) {
    GST_DEBUG_OBJECT (mpeg_dec, "Drop frame since we haven't found a "
        "MPEG_PACKET_SEQUENCE yet");

    gst_base_video_decoder_skip_frame (base_video_decoder, frame);
    return GST_FLOW_OK;
  }

  /* MPEG_PACKET_PICTURE */
  if (mpeg_frame->pic)
    gst_vdp_mpeg_dec_handle_picture (mpeg_dec, mpeg_frame->pic);

  /* MPEG_PACKET_EXT_PICTURE_CODING */
  if (mpeg_frame->pic_ext)
    gst_vdp_mpeg_dec_handle_picture_coding (mpeg_dec, mpeg_frame->pic_ext,
        frame);

  /* MPEG_PACKET_GOP */
  if (mpeg_frame->gop)
    gst_vdp_mpeg_dec_handle_gop (mpeg_dec, mpeg_frame->gop);

  /* MPEG_PACKET_EXT_QUANT_MATRIX */
  if (mpeg_frame->qm_ext)
    gst_vdp_mpeg_dec_handle_quant_matrix (mpeg_dec, mpeg_frame->qm_ext);


  info = &mpeg_dec->vdp_info;

  info->slice_count = mpeg_frame->n_slices;

  /* check if we can decode the frame */
  if (info->picture_coding_type != I_FRAME
      && info->backward_reference == VDP_INVALID_HANDLE) {
    GST_DEBUG_OBJECT (mpeg_dec,
        "Drop frame since we haven't got an I_FRAME yet");

    gst_base_video_decoder_skip_frame (base_video_decoder, frame);
    return GST_FLOW_OK;
  }
  if (info->picture_coding_type == B_FRAME
      && info->forward_reference == VDP_INVALID_HANDLE) {
    GST_DEBUG_OBJECT (mpeg_dec,
        "Drop frame since we haven't got two non B_FRAMES yet");

    gst_base_video_decoder_skip_frame (base_video_decoder, frame);
    return GST_FLOW_OK;
  }


  if (info->picture_coding_type != B_FRAME) {
    if (info->backward_reference != VDP_INVALID_HANDLE) {
      ret = gst_base_video_decoder_finish_frame (base_video_decoder,
          mpeg_dec->b_frame);
    }

    if (info->forward_reference != VDP_INVALID_HANDLE) {
      gst_video_frame_unref (mpeg_dec->f_frame);
      info->forward_reference = VDP_INVALID_HANDLE;
    }

    info->forward_reference = info->backward_reference;
    mpeg_dec->f_frame = mpeg_dec->b_frame;

    info->backward_reference = VDP_INVALID_HANDLE;
  }

  if (ret != GST_FLOW_OK) {
    gst_base_video_decoder_skip_frame (base_video_decoder, frame);
    return ret;
  }

  /* decode */
  vbit[0].struct_version = VDP_BITSTREAM_BUFFER_VERSION;
  vbit[0].bitstream = GST_BUFFER_DATA (mpeg_frame->slices);
  vbit[0].bitstream_bytes = GST_BUFFER_SIZE (mpeg_frame->slices);

  ret = gst_vdp_decoder_render (GST_VDP_DECODER (mpeg_dec),
      (VdpPictureInfo *) info, 1, vbit, &outbuf);
  if (ret != GST_FLOW_OK)
    return ret;

  frame->src_buffer = GST_BUFFER_CAST (outbuf);

  if (info->picture_coding_type == B_FRAME) {
    ret = gst_base_video_decoder_finish_frame (base_video_decoder, frame);
  } else {
    info->backward_reference = GST_VDP_VIDEO_BUFFER (outbuf)->surface;
    mpeg_dec->b_frame = gst_video_frame_ref (frame);
  }

  return ret;
}
Ejemplo n.º 4
0
static GstFlowReturn
gst_vdp_h264_dec_handle_frame (GstVideoDecoder * video_decoder,
    GstVideoCodecFrame * frame)
{
  GstVdpH264Dec *h264_dec = GST_VDP_H264_DEC (video_decoder);
  GstH264Meta *h264_meta;
  GstH264Frame *h264_frame;
  GList *tmp;
  GstFlowReturn ret;
  VdpPictureInfoH264 info;
  VdpBitstreamBuffer *bufs;
  GstH264SliceHdr *first_slice;
  guint i;
  GstMapInfo map;

  GST_DEBUG ("handle_frame");

  h264_meta = gst_buffer_get_h264_meta (frame->input_buffer);
  if (G_UNLIKELY (h264_meta == NULL))
    goto no_h264_meta;

  if (G_UNLIKELY (h264_meta->num_slices == 0))
    goto no_slices;

  /* Handle PPS/SPS/SEI if present */
  if (h264_meta->sps) {
    for (tmp = h264_meta->sps; tmp; tmp = tmp->next) {
      GstH264SPS *sps = (GstH264SPS *) tmp->data;
      GST_LOG_OBJECT (h264_dec, "Storing SPS %d", sps->id);
      h264_dec->sps[sps->id] = g_slice_dup (GstH264SPS, sps);
    }
  }
  if (h264_meta->pps) {
    for (tmp = h264_meta->pps; tmp; tmp = tmp->next) {
      GstH264PPS *pps = (GstH264PPS *) tmp->data;
      GST_LOG_OBJECT (h264_dec, "Storing PPS %d", pps->id);
      h264_dec->pps[pps->id] = g_slice_dup (GstH264PPS, pps);
      /* Adjust pps pointer */
      h264_dec->pps[pps->id]->sequence = h264_dec->sps[pps->sps_id];
    }
  }

  first_slice = &h264_meta->slices[0];

  if (!h264_dec->got_idr && first_slice->slice_type != GST_H264_NAL_SLICE_IDR)
    goto no_idr;

  /* Handle slices */
  for (i = 0; i < h264_meta->num_slices; i++) {
    GstH264SliceHdr *slice = &h264_meta->slices[i];

    GST_LOG_OBJECT (h264_dec, "Handling slice #%d", i);
    slice->pps = h264_dec->pps[slice->pps_id];
  }

  if (first_slice->slice_type == GST_H264_NAL_SLICE_IDR) {
    ret = gst_vdp_h264_dec_idr (h264_dec, frame, first_slice);
    if (ret == GST_FLOW_OK)
      h264_dec->got_idr = TRUE;
    else
      goto skip_frame;
  }

  h264_frame = g_slice_new0 (GstH264Frame);
  gst_video_codec_frame_set_user_data (frame, h264_frame,
      (GDestroyNotify) gst_h264_frame_free);

  gst_vdp_h264_dec_init_frame_info (h264_dec, h264_frame, first_slice);
  h264_frame->frame = frame;
  gst_vdp_h264_dec_fill_info (&info, h264_dec, h264_frame, first_slice);
  info.slice_count = h264_meta->num_slices;

  if (!gst_buffer_map (frame->input_buffer, &map, GST_MAP_READ))
    goto map_fail;
  bufs = gst_vdp_h264_dec_create_bitstream_buffers (h264_dec, h264_meta, &map);

  ret = gst_vdp_decoder_render (GST_VDP_DECODER (h264_dec),
      (VdpPictureInfo *) & info, h264_meta->num_slices, bufs, frame);
  g_free (bufs);
  gst_buffer_unmap (frame->input_buffer, &map);

  if (ret != GST_FLOW_OK)
    goto render_fail;

  /* DPB handling */
  return gst_vdp_h264_dec_handle_dpb (h264_dec, h264_frame, first_slice);

  /* EARLY exit */
no_idr:
  {
    GST_DEBUG_OBJECT (video_decoder, "Didn't see a IDR yet, skipping frame");
    return gst_video_decoder_finish_frame (video_decoder, frame);
  }

skip_frame:
  {
    GST_DEBUG_OBJECT (video_decoder, "Skipping frame");
    return gst_video_decoder_finish_frame (video_decoder, frame);
  }

  /* ERRORS */
no_h264_meta:
  {
    GST_ERROR_OBJECT (video_decoder, "Input buffer doesn't have GstH264Meta");
    return GST_FLOW_ERROR;
  }

no_slices:
  {
    GST_ERROR_OBJECT (video_decoder, "Input buffer doesn't have any slices");
    return GST_FLOW_ERROR;
  }

map_fail:
  {
    GST_ERROR_OBJECT (video_decoder, "Failed to map input buffer for READ");
    return GST_FLOW_ERROR;
  }

render_fail:
  {
    GST_ERROR_OBJECT (video_decoder, "Failed to render : %s",
        gst_flow_get_name (ret));
    gst_video_decoder_drop_frame (video_decoder, frame);
    return ret;
  }
}