static GstVideoCodecFrame *
gst_v4l2_video_dec_get_oldest_frame (GstVideoDecoder * decoder)
{
  GstVideoCodecFrame *frame = NULL;
  GList *frames, *l;
  gint count = 0;

  frames = gst_video_decoder_get_frames (decoder);

  for (l = frames; l != NULL; l = l->next) {
    GstVideoCodecFrame *f = l->data;

    if (!frame || frame->pts > f->pts)
      frame = f;

    count++;
  }

  if (frame) {
    GST_LOG_OBJECT (decoder,
        "Oldest frame is %d %" GST_TIME_FORMAT " and %d frames left",
        frame->system_frame_number, GST_TIME_ARGS (frame->pts), count - 1);
    gst_video_codec_frame_ref (frame);
  }

  g_list_free_full (frames, (GDestroyNotify) gst_video_codec_frame_unref);

  return frame;
}
/**
 * gst_vaapi_encoder_get_buffer_with_timeout:
 * @encoder: a #GstVaapiEncoder
 * @out_codedbuf_proxy_ptr: the next coded buffer as a #GstVaapiCodedBufferProxy
 * @timeout: the number of microseconds to wait for the coded buffer, at most
 *
 * Upon successful return, *@out_codedbuf_proxy_ptr contains the next
 * coded buffer as a #GstVaapiCodedBufferProxy. The caller owns this
 * object, so gst_vaapi_coded_buffer_proxy_unref() shall be called
 * after usage. Otherwise, @GST_VAAPI_DECODER_STATUS_ERROR_NO_BUFFER
 * is returned if no coded buffer is available so far (timeout).
 *
 * The parent frame is available as a #GstVideoCodecFrame attached to
 * the user-data anchor of the output coded buffer. Ownership of the
 * frame is transferred to the coded buffer.
 *
 * Return value: a #GstVaapiEncoderStatus
 */
GstVaapiEncoderStatus
gst_vaapi_encoder_get_buffer_with_timeout (GstVaapiEncoder * encoder,
    GstVaapiCodedBufferProxy ** out_codedbuf_proxy_ptr, guint64 timeout)
{
  GstVaapiEncPicture *picture;
  GstVaapiCodedBufferProxy *codedbuf_proxy;

  codedbuf_proxy = g_async_queue_timeout_pop (encoder->codedbuf_queue, timeout);
  if (!codedbuf_proxy)
    return GST_VAAPI_ENCODER_STATUS_NO_BUFFER;

  /* Wait for completion of all operations and report any error that occurred */
  picture = gst_vaapi_coded_buffer_proxy_get_user_data (codedbuf_proxy);
  if (!gst_vaapi_surface_sync (picture->surface))
    goto error_invalid_buffer;

  gst_vaapi_coded_buffer_proxy_set_user_data (codedbuf_proxy,
      gst_video_codec_frame_ref (picture->frame),
      (GDestroyNotify) gst_video_codec_frame_unref);

  if (out_codedbuf_proxy_ptr)
    *out_codedbuf_proxy_ptr = gst_vaapi_coded_buffer_proxy_ref (codedbuf_proxy);
  gst_vaapi_coded_buffer_proxy_unref (codedbuf_proxy);
  return GST_VAAPI_ENCODER_STATUS_SUCCESS;

  /* ERRORS */
error_invalid_buffer:
  {
    GST_ERROR ("failed to encode the frame");
    gst_vaapi_coded_buffer_proxy_unref (codedbuf_proxy);
    return GST_VAAPI_ENCODER_STATUS_ERROR_INVALID_SURFACE;
  }
}
static inline void
push_frame (GstVaapiDecoder * decoder, GstVideoCodecFrame * frame)
{
  GstVaapiSurfaceProxy *const proxy = frame->user_data;

  GST_DEBUG ("push frame %d (surface 0x%08x)", frame->system_frame_number,
      (guint32) GST_VAAPI_SURFACE_PROXY_SURFACE_ID (proxy));

  g_async_queue_push (decoder->frames, gst_video_codec_frame_ref (frame));
}
static void
drop_frame (GstVaapiDecoder * decoder, GstVideoCodecFrame * frame)
{
  GST_DEBUG ("drop frame %d", frame->system_frame_number);

  /* no surface proxy */
  gst_video_codec_frame_set_user_data (frame, NULL, NULL);

  frame->pts = GST_CLOCK_TIME_NONE;
  GST_VIDEO_CODEC_FRAME_FLAG_SET (frame,
      GST_VIDEO_CODEC_FRAME_FLAG_DECODE_ONLY);

  g_async_queue_push (decoder->frames, gst_video_codec_frame_ref (frame));
}
Exemple #5
0
static FrameData *
gst_x265_enc_queue_frame (GstX265Enc * enc, GstVideoCodecFrame * frame,
    GstVideoInfo * info)
{
  GstVideoFrame vframe;
  FrameData *fdata;

  if (!gst_video_frame_map (&vframe, info, frame->input_buffer, GST_MAP_READ))
    return NULL;

  fdata = g_slice_new (FrameData);
  fdata->frame = gst_video_codec_frame_ref (frame);
  fdata->vframe = vframe;

  enc->pending_frames = g_list_prepend (enc->pending_frames, fdata);

  return fdata;
}
static GstVideoCodecFrame *
gst_msdkdec_get_oldest_frame (GstVideoDecoder * decoder)
{
  GstVideoCodecFrame *frame = NULL, *old_frame = NULL;
  GList *frames, *l;
  gint count = 0;

  frames = gst_video_decoder_get_frames (decoder);

  for (l = frames; l != NULL; l = l->next) {
    GstVideoCodecFrame *f = l->data;

    if (!GST_CLOCK_TIME_IS_VALID (f->pts)) {
      GST_INFO
          ("Frame doesn't have a valid pts yet, Use gst_video_decoder_get_oldest_frame()"
          "with out considering the PTS for selecting the frame to be finished");
      old_frame = gst_video_decoder_get_oldest_frame (decoder);
      break;
    }

    if (!frame || frame->pts > f->pts)
      frame = f;

    count++;
  }

  if (old_frame)
    frame = old_frame;

  if (frame) {
    GST_LOG_OBJECT (decoder,
        "Oldest frame is %d %" GST_TIME_FORMAT " and %d frames left",
        frame->system_frame_number, GST_TIME_ARGS (frame->pts), count - 1);
    gst_video_codec_frame_ref (frame);
  }

  if (old_frame)
    gst_video_codec_frame_unref (old_frame);

  g_list_free_full (frames, (GDestroyNotify) gst_video_codec_frame_unref);

  return frame;
}
static GstFlowReturn
_gst_libde265_return_image (GstVideoDecoder * decoder,
    GstVideoCodecFrame * frame, const struct de265_image *img)
{
  GstLibde265Dec *dec = GST_LIBDE265_DEC (decoder);
  struct GstLibde265FrameRef *ref;
  GstFlowReturn result;
  GstVideoFrame outframe;
  GstVideoCodecFrame *out_frame;
  int frame_number;
  int plane;

  ref = (struct GstLibde265FrameRef *) de265_get_image_plane_user_data (img, 0);
  if (ref != NULL) {
    /* decoder is using direct rendering */
    out_frame = gst_video_codec_frame_ref (ref->frame);
    if (frame != NULL) {
      gst_video_codec_frame_unref (frame);
    }
    gst_buffer_replace (&out_frame->output_buffer, ref->buffer);
    gst_buffer_replace (&ref->buffer, NULL);
    return gst_video_decoder_finish_frame (decoder, out_frame);
  }

  result =
      _gst_libde265_image_available (decoder, de265_get_image_width (img, 0),
      de265_get_image_height (img, 0));
  if (result != GST_FLOW_OK) {
    GST_ERROR_OBJECT (dec, "Failed to notify about available image");
    return result;
  }

  frame_number = (uintptr_t) de265_get_image_user_data (img) - 1;
  if (frame_number != -1) {
    out_frame = gst_video_decoder_get_frame (decoder, frame_number);
  } else {
    out_frame = NULL;
  }
  if (frame != NULL) {
    gst_video_codec_frame_unref (frame);
  }

  if (out_frame == NULL) {
    GST_ERROR_OBJECT (dec, "No frame available to return");
    return GST_FLOW_ERROR;
  }

  result = gst_video_decoder_allocate_output_frame (decoder, out_frame);
  if (result != GST_FLOW_OK) {
    GST_ERROR_OBJECT (dec, "Failed to allocate output frame");
    return result;
  }

  g_assert (dec->output_state != NULL);
  if (!gst_video_frame_map (&outframe, &dec->output_state->info,
          out_frame->output_buffer, GST_MAP_WRITE)) {
    GST_ERROR_OBJECT (dec, "Failed to map output buffer");
    return GST_FLOW_ERROR;
  }

  for (plane = 0; plane < 3; plane++) {
    int width = de265_get_image_width (img, plane);
    int height = de265_get_image_height (img, plane);
    int srcstride = width;
    int dststride = GST_VIDEO_FRAME_COMP_STRIDE (&outframe, plane);
    const uint8_t *src = de265_get_image_plane (img, plane, &srcstride);
    uint8_t *dest = GST_VIDEO_FRAME_COMP_DATA (&outframe, plane);
    if (srcstride == width && dststride == width) {
      memcpy (dest, src, height * width);
    } else {
      while (height--) {
        memcpy (dest, src, width);
        src += srcstride;
        dest += dststride;
      }
    }
  }
  gst_video_frame_unmap (&outframe);
  return gst_video_decoder_finish_frame (decoder, out_frame);
}
Exemple #8
0
static GstFlowReturn
handle_picture (GstMpeg2dec * mpeg2dec, const mpeg2_info_t * info,
    GstVideoCodecFrame * frame)
{
  GstFlowReturn ret;
  gint type;
  const gchar *type_str = NULL;
  gboolean key_frame = FALSE;
  const mpeg2_picture_t *picture = info->current_picture;
  GstBuffer *buffer;

  ret = gst_mpeg2dec_alloc_buffer (mpeg2dec, frame, &buffer);
  if (ret != GST_FLOW_OK)
    return ret;

  type = picture->flags & PIC_MASK_CODING_TYPE;
  switch (type) {
    case PIC_FLAG_CODING_TYPE_I:
      key_frame = TRUE;
      mpeg2_skip (mpeg2dec->decoder, 0);
      type_str = "I";
      break;
    case PIC_FLAG_CODING_TYPE_P:
      type_str = "P";
      break;
    case PIC_FLAG_CODING_TYPE_B:
      type_str = "B";
      break;
    default:
      gst_video_codec_frame_ref (frame);
      ret = gst_video_decoder_drop_frame (GST_VIDEO_DECODER (mpeg2dec), frame);
      GST_VIDEO_DECODER_ERROR (mpeg2dec, 1, STREAM, DECODE,
          ("decoding error"), ("Invalid picture type"), ret);
      return ret;
  }

  GST_DEBUG_OBJECT (mpeg2dec, "handle picture type %s", type_str);
  GST_DEBUG_OBJECT (mpeg2dec, "picture %s, frame %i",
      key_frame ? ", kf," : "    ", frame->system_frame_number);

  if (GST_VIDEO_INFO_IS_INTERLACED (&mpeg2dec->decoded_info)) {
    /* This implies SEQ_FLAG_PROGRESSIVE_SEQUENCE is not set */
    if (picture->flags & PIC_FLAG_TOP_FIELD_FIRST) {
      GST_BUFFER_FLAG_SET (buffer, GST_VIDEO_BUFFER_FLAG_TFF);
    }
    if (!(picture->flags & PIC_FLAG_PROGRESSIVE_FRAME)) {
      GST_BUFFER_FLAG_SET (buffer, GST_VIDEO_BUFFER_FLAG_INTERLACED);
    }
#if MPEG2_RELEASE >= MPEG2_VERSION(0,5,0)
    /* repeat field introduced in 0.5.0 */
    if (picture->flags & PIC_FLAG_REPEAT_FIRST_FIELD) {
      GST_BUFFER_FLAG_SET (buffer, GST_VIDEO_BUFFER_FLAG_RFF);
    }
#endif
  }

  if (mpeg2dec->discont_state == MPEG2DEC_DISC_NEW_PICTURE && key_frame) {
    mpeg2dec->discont_state = MPEG2DEC_DISC_NEW_KEYFRAME;
  }

  GST_DEBUG_OBJECT (mpeg2dec,
      "picture: %s %s %s %s %s fields:%d ts:%"
      GST_TIME_FORMAT,
      (picture->flags & PIC_FLAG_PROGRESSIVE_FRAME ? "prog" : "    "),
      (picture->flags & PIC_FLAG_TOP_FIELD_FIRST ? "tff" : "   "),
#if MPEG2_RELEASE >= MPEG2_VERSION(0,5,0)
      (picture->flags & PIC_FLAG_REPEAT_FIRST_FIELD ? "rff" : "   "),
#else
      "unknown rff",
#endif
      (picture->flags & PIC_FLAG_SKIP ? "skip" : "    "),
      (picture->flags & PIC_FLAG_COMPOSITE_DISPLAY ? "composite" : "         "),
      picture->nb_fields, GST_TIME_ARGS (frame->pts));

  return ret;
}
static GstVideoCodecFrame *
_find_nearest_frame (GstAmcVideoEnc * self, GstClockTime reference_timestamp)
{
  GList *l, *best_l = NULL;
  GList *finish_frames = NULL;
  GstVideoCodecFrame *best = NULL;
  guint64 best_timestamp = 0;
  guint64 best_diff = G_MAXUINT64;
  BufferIdentification *best_id = NULL;
  GList *frames;

  frames = gst_video_encoder_get_frames (GST_VIDEO_ENCODER (self));

  for (l = frames; l; l = l->next) {
    GstVideoCodecFrame *tmp = l->data;
    BufferIdentification *id = gst_video_codec_frame_get_user_data (tmp);
    guint64 timestamp, diff;

    /* This happens for frames that were just added but
     * which were not passed to the component yet. Ignore
     * them here!
     */
    if (!id)
      continue;

    timestamp = id->timestamp;

    if (timestamp > reference_timestamp)
      diff = timestamp - reference_timestamp;
    else
      diff = reference_timestamp - timestamp;

    if (best == NULL || diff < best_diff) {
      best = tmp;
      best_timestamp = timestamp;
      best_diff = diff;
      best_l = l;
      best_id = id;

      /* For frames without timestamp we simply take the first frame */
      if ((reference_timestamp == 0 && timestamp == 0) || diff == 0)
        break;
    }
  }

  if (best_id) {
    for (l = frames; l && l != best_l; l = l->next) {
      GstVideoCodecFrame *tmp = l->data;
      BufferIdentification *id = gst_video_codec_frame_get_user_data (tmp);
      guint64 diff_time, diff_frames;

      if (id->timestamp > best_timestamp)
        break;

      if (id->timestamp == 0 || best_timestamp == 0)
        diff_time = 0;
      else
        diff_time = best_timestamp - id->timestamp;
      diff_frames = best->system_frame_number - tmp->system_frame_number;

      if (diff_time > MAX_FRAME_DIST_TIME
          || diff_frames > MAX_FRAME_DIST_FRAMES) {
        finish_frames =
            g_list_prepend (finish_frames, gst_video_codec_frame_ref (tmp));
      }
    }
  }

  if (finish_frames) {
    g_warning ("%s: Too old frames, bug in encoder -- please file a bug",
        GST_ELEMENT_NAME (self));
    for (l = finish_frames; l; l = l->next) {
      gst_video_encoder_finish_frame (GST_VIDEO_ENCODER (self), l->data);
    }
  }

  if (best)
    gst_video_codec_frame_ref (best);

  g_list_foreach (frames, (GFunc) gst_video_codec_frame_unref, NULL);
  g_list_free (frames);

  return best;
}
static GstFlowReturn
handle_picture (GstMpeg2dec * mpeg2dec, const mpeg2_info_t * info,
    GstVideoCodecFrame * frame)
{
  GstVideoDecoder *decoder = (GstVideoDecoder *) mpeg2dec;
  GstFlowReturn ret;
  gint type;
  const gchar *type_str = NULL;
  gboolean key_frame = FALSE;
  const mpeg2_picture_t *picture = info->current_picture;
  GstVideoFrame vframe;
  guint8 *buf[3];

  ret = gst_video_decoder_allocate_output_frame (decoder, frame);
  if (ret != GST_FLOW_OK)
    return ret;

  type = picture->flags & PIC_MASK_CODING_TYPE;
  switch (type) {
    case PIC_FLAG_CODING_TYPE_I:
      key_frame = TRUE;
      mpeg2_skip (mpeg2dec->decoder, 0);
      type_str = "I";
      break;
    case PIC_FLAG_CODING_TYPE_P:
      type_str = "P";
      break;
    case PIC_FLAG_CODING_TYPE_B:
      type_str = "B";
      break;
    default:
      gst_video_codec_frame_ref (frame);
      ret = gst_video_decoder_drop_frame (GST_VIDEO_DECODER (mpeg2dec), frame);
      GST_VIDEO_DECODER_ERROR (mpeg2dec, 1, STREAM, DECODE,
          ("decoding error"), ("Invalid picture type"), ret);
      return ret;
  }

  GST_DEBUG_OBJECT (mpeg2dec, "handle picture type %s", type_str);
  GST_DEBUG_OBJECT (mpeg2dec, "picture %s, frame %i",
      key_frame ? ", kf," : "    ", frame->system_frame_number);

  if (GST_VIDEO_INFO_IS_INTERLACED (&mpeg2dec->decoded_info)) {
    /* This implies SEQ_FLAG_PROGRESSIVE_SEQUENCE is not set */
    if (picture->flags & PIC_FLAG_TOP_FIELD_FIRST) {
      GST_BUFFER_FLAG_SET (frame->output_buffer, GST_VIDEO_BUFFER_FLAG_TFF);
    }
    if (!(picture->flags & PIC_FLAG_PROGRESSIVE_FRAME)) {
      GST_BUFFER_FLAG_SET (frame->output_buffer,
          GST_VIDEO_BUFFER_FLAG_INTERLACED);
    }
#if MPEG2_RELEASE >= MPEG2_VERSION(0,5,0)
    /* repeat field introduced in 0.5.0 */
    if (picture->flags & PIC_FLAG_REPEAT_FIRST_FIELD) {
      GST_BUFFER_FLAG_SET (frame->output_buffer, GST_VIDEO_BUFFER_FLAG_RFF);
    }
#endif
  }

  if (mpeg2dec->discont_state == MPEG2DEC_DISC_NEW_PICTURE && key_frame) {
    mpeg2dec->discont_state = MPEG2DEC_DISC_NEW_KEYFRAME;
  }

  GST_DEBUG_OBJECT (mpeg2dec,
      "picture: %s %s %s %s %s fields:%d ts:%"
      GST_TIME_FORMAT,
      (picture->flags & PIC_FLAG_PROGRESSIVE_FRAME ? "prog" : "    "),
      (picture->flags & PIC_FLAG_TOP_FIELD_FIRST ? "tff" : "   "),
#if MPEG2_RELEASE >= MPEG2_VERSION(0,5,0)
      (picture->flags & PIC_FLAG_REPEAT_FIRST_FIELD ? "rff" : "   "),
#else
      "unknown rff",
#endif
      (picture->flags & PIC_FLAG_SKIP ? "skip" : "    "),
      (picture->flags & PIC_FLAG_COMPOSITE_DISPLAY ? "composite" : "         "),
      picture->nb_fields, GST_TIME_ARGS (frame->pts));

  if (!gst_video_frame_map (&vframe, &mpeg2dec->decoded_info,
          frame->output_buffer, GST_MAP_READ | GST_MAP_WRITE))
    goto map_fail;

  buf[0] = GST_VIDEO_FRAME_PLANE_DATA (&vframe, 0);
  buf[1] = GST_VIDEO_FRAME_PLANE_DATA (&vframe, 1);
  buf[2] = GST_VIDEO_FRAME_PLANE_DATA (&vframe, 2);

  GST_DEBUG_OBJECT (mpeg2dec, "set_buf: %p %p %p, frame %i",
      buf[0], buf[1], buf[2], frame->system_frame_number);

  /* Note: We use a non-null 'id' value to make the distinction
   * between the dummy buffers (which have an id of NULL) and the
   * ones we did */
  mpeg2_stride (mpeg2dec->decoder, vframe.info.stride[0]);
  mpeg2_set_buf (mpeg2dec->decoder, buf,
      GINT_TO_POINTER (frame->system_frame_number + 1));
  gst_mpeg2dec_save_buffer (mpeg2dec, frame->system_frame_number, &vframe);

  return ret;

map_fail:
  {
    GST_ELEMENT_ERROR (mpeg2dec, RESOURCE, WRITE, ("Failed to map frame"),
        (NULL));
    return GST_FLOW_ERROR;
  }
}
static GstFlowReturn
gst_vaapiencode_push_frame (GstVaapiEncode * encode, gint64 timeout)
{
  GstVideoEncoder *const venc = GST_VIDEO_ENCODER_CAST (encode);
  GstVaapiEncodeClass *const klass = GST_VAAPIENCODE_GET_CLASS (encode);
  GstVideoCodecFrame *out_frame;
  GstVaapiCodedBufferProxy *codedbuf_proxy = NULL;
  GstVaapiEncoderStatus status;
  GstBuffer *out_buffer;
  GstFlowReturn ret;

  status = gst_vaapi_encoder_get_buffer_with_timeout (encode->encoder,
      &codedbuf_proxy, timeout);
  if (status == GST_VAAPI_ENCODER_STATUS_NO_BUFFER)
    return GST_VAAPI_ENCODE_FLOW_TIMEOUT;
  if (status != GST_VAAPI_ENCODER_STATUS_SUCCESS)
    goto error_get_buffer;

  out_frame = gst_vaapi_coded_buffer_proxy_get_user_data (codedbuf_proxy);
  if (!out_frame)
    goto error_get_buffer;
  gst_video_codec_frame_ref (out_frame);
  gst_video_codec_frame_set_user_data (out_frame, NULL, NULL);

  /* Update output state */
  GST_VIDEO_ENCODER_STREAM_LOCK (encode);
  if (!ensure_output_state (encode))
    goto error_output_state;
  GST_VIDEO_ENCODER_STREAM_UNLOCK (encode);

  /* Allocate and copy buffer into system memory */
  out_buffer = NULL;
  ret = klass->alloc_buffer (encode,
      GST_VAAPI_CODED_BUFFER_PROXY_BUFFER (codedbuf_proxy), &out_buffer);
  gst_vaapi_coded_buffer_proxy_replace (&codedbuf_proxy, NULL);
  if (ret != GST_FLOW_OK)
    goto error_allocate_buffer;

  gst_buffer_replace (&out_frame->output_buffer, out_buffer);
  gst_buffer_unref (out_buffer);

  GST_TRACE_OBJECT (encode, "output:%" GST_TIME_FORMAT ", size:%zu",
      GST_TIME_ARGS (out_frame->pts), gst_buffer_get_size (out_buffer));

  return gst_video_encoder_finish_frame (venc, out_frame);

  /* ERRORS */
error_get_buffer:
  {
    GST_ERROR ("failed to get encoded buffer (status %d)", status);
    if (codedbuf_proxy)
      gst_vaapi_coded_buffer_proxy_unref (codedbuf_proxy);
    return GST_FLOW_ERROR;
  }
error_allocate_buffer:
  {
    GST_ERROR ("failed to allocate encoded buffer in system memory");
    if (out_buffer)
      gst_buffer_unref (out_buffer);
    gst_video_codec_frame_unref (out_frame);
    return ret;
  }
error_output_state:
  {
    GST_ERROR ("failed to negotiate output state (status %d)", status);
    GST_VIDEO_ENCODER_STREAM_UNLOCK (encode);
    gst_video_codec_frame_unref (out_frame);
    return GST_FLOW_NOT_NEGOTIATED;
  }
}
static GstFlowReturn
gst_msdkdec_handle_frame (GstVideoDecoder * decoder, GstVideoCodecFrame * frame)
{
  GstMsdkDec *thiz = GST_MSDKDEC (decoder);
  GstMsdkDecClass *klass = GST_MSDKDEC_GET_CLASS (thiz);
  GstFlowReturn flow;
  GstBuffer *buffer, *input_buffer = NULL;
  GstVideoInfo alloc_info;
  MsdkDecTask *task = NULL;
  mfxBitstream bitstream;
  MsdkSurface *surface = NULL;
  mfxSession session;
  mfxStatus status;
  GstMapInfo map_info;
  guint i;
  gsize data_size;
  gboolean hard_reset = FALSE;

  /* configure the subclass in order to fill the CodecID field of
   * mfxVideoParam and also to load the PluginID for some of the
   * codecs which is mandatory to invoke the
   * MFXVideoDECODE_DecodeHeader API.
   *
   * For non packetized formats (currently only vc1), there
   * could be headers received as codec_data which are not available
   * instream and in that case subclass implementation will
   * push it to the internal adapter. We invoke the subclass configure
   * well early to make sure the codec_data received has been correctly
   * pushed to the adapter by the subclasses before doing
   * the DecodeHeader() later on
   */
  if (!thiz->initialized || thiz->do_renego) {
    /* Clear the internal adapter in renegotiation for non-packetized
     * formats */
    if (!thiz->is_packetized)
      gst_adapter_clear (thiz->adapter);

    if (!klass->configure || !klass->configure (thiz)) {
      flow = GST_FLOW_OK;
      goto error;
    }
  }

  /* Current frame-codec could be pushed and released before this
   * function ends -- because msdkdec pushes the oldest frame,
   * according its PTS, and it could be this very same frame-codec
   * among others pending frame-codecs.
   *
   * Instead of copying the input data into the mfxBitstream, let's
   * keep an extra reference to frame-codec's input buffer */
  input_buffer = gst_buffer_ref (frame->input_buffer);
  if (!gst_buffer_map (input_buffer, &map_info, GST_MAP_READ)) {
    gst_buffer_unref (input_buffer);
    return GST_FLOW_ERROR;
  }

  memset (&bitstream, 0, sizeof (bitstream));

  if (thiz->is_packetized) {
    /* Packetized stream: We prefer to have a parser as connected upstream
     * element to the decoder */
    bitstream.Data = map_info.data;
    bitstream.DataLength = map_info.size;
    bitstream.MaxLength = map_info.size;
    bitstream.DataFlag = MFX_BITSTREAM_COMPLETE_FRAME;
  } else {
    /* Non packetized streams: eg: vc1 advanced profile with per buffer bdu */
    gst_adapter_push (thiz->adapter, gst_buffer_ref (input_buffer));
    data_size = gst_adapter_available (thiz->adapter);

    bitstream.Data = (mfxU8 *) gst_adapter_map (thiz->adapter, data_size);
    bitstream.DataLength = (mfxU32) data_size;
    bitstream.MaxLength = bitstream.DataLength;
  }
  GST_INFO_OBJECT (thiz,
      "mfxBitStream=> DataLength:%d DataOffset:%d MaxLength:%d",
      bitstream.DataLength, bitstream.DataOffset, bitstream.MaxLength);

  session = gst_msdk_context_get_session (thiz->context);

  if (!thiz->initialized || thiz->do_renego) {

    /* gstreamer caps will not bring all the necessary parameters
     * required for optimal decode configuration. For eg: the required numbers
     * of surfaces to be allocated can be calculated based on H264 SEI header
     * and this information can't be retrieved from the negotiated caps.
     * So instead of introducing the codecparser dependency to parse the headers
     * inside msdk plugin, we simply use the mfx apis to extract header information */
    status = MFXVideoDECODE_DecodeHeader (session, &bitstream, &thiz->param);
    if (status == MFX_ERR_MORE_DATA) {
      flow = GST_FLOW_OK;
      goto done;
    }

    if (!thiz->initialized)
      hard_reset = TRUE;
    else if (thiz->allocation_caps) {
      gst_video_info_from_caps (&alloc_info, thiz->allocation_caps);

      /* Check whether we need complete reset for dynamic resolution change */
      if (thiz->param.mfx.FrameInfo.Width > GST_VIDEO_INFO_WIDTH (&alloc_info)
          || thiz->param.mfx.FrameInfo.Height >
          GST_VIDEO_INFO_HEIGHT (&alloc_info))
        hard_reset = TRUE;
    }

    /* if subclass requested for the force reset */
    if (thiz->force_reset_on_res_change)
      hard_reset = TRUE;

    /* Config changed dynamically and we are going to do a full reset,
     * this will unref the input frame which has the new configuration.
     * Keep a ref to the input_frame to keep it alive */
    if (thiz->initialized && thiz->do_renego)
      gst_video_codec_frame_ref (frame);

    if (!gst_msdkdec_negotiate (thiz, hard_reset)) {
      GST_ELEMENT_ERROR (thiz, CORE, NEGOTIATION,
          ("Could not negotiate the stream"), (NULL));
      flow = GST_FLOW_ERROR;
      goto error;
    }
  }

  for (;;) {
    task = &g_array_index (thiz->tasks, MsdkDecTask, thiz->next_task);
    flow = gst_msdkdec_finish_task (thiz, task);
    if (flow != GST_FLOW_OK)
      goto error;
    if (!surface) {
      flow = allocate_output_buffer (thiz, &buffer);
      if (flow != GST_FLOW_OK)
        goto error;
      surface = get_surface (thiz, buffer);
      if (!surface) {
        /* Can't get a surface for some reason, finish tasks to see if
           a surface becomes available. */
        for (i = 0; i < thiz->tasks->len - 1; i++) {
          thiz->next_task = (thiz->next_task + 1) % thiz->tasks->len;
          task = &g_array_index (thiz->tasks, MsdkDecTask, thiz->next_task);
          flow = gst_msdkdec_finish_task (thiz, task);
          if (flow != GST_FLOW_OK)
            goto error;
          surface = get_surface (thiz, buffer);
          if (surface)
            break;
        }
        if (!surface) {
          GST_ERROR_OBJECT (thiz, "Couldn't get a surface");
          flow = GST_FLOW_ERROR;
          goto error;
        }
      }
    }

    status =
        MFXVideoDECODE_DecodeFrameAsync (session, &bitstream, surface->surface,
        &task->surface, &task->sync_point);

    /* media-sdk requires complete reset since the surface is inadaquate to
     * do further decoding */
    if (status == MFX_ERR_INCOMPATIBLE_VIDEO_PARAM) {
      /* Requires memory re-allocation, do a hard reset */
      if (!gst_msdkdec_negotiate (thiz, TRUE))
        goto error;
      status =
          MFXVideoDECODE_DecodeFrameAsync (session, &bitstream,
          surface->surface, &task->surface, &task->sync_point);
    }

    if (G_LIKELY (status == MFX_ERR_NONE)
        || (status == MFX_WRN_VIDEO_PARAM_CHANGED)) {
      thiz->next_task = (thiz->next_task + 1) % thiz->tasks->len;

      if (surface->surface->Data.Locked > 0 || !thiz->use_video_memory)
        surface = NULL;

      if (bitstream.DataLength == 0) {
        flow = GST_FLOW_OK;
        break;
      }
    } else if (status == MFX_ERR_MORE_DATA) {
      if (task->surface) {
        task->decode_only = TRUE;
        thiz->next_task = (thiz->next_task + 1) % thiz->tasks->len;
      }

      if (surface->surface->Data.Locked > 0)
        surface = NULL;
      flow = GST_VIDEO_DECODER_FLOW_NEED_DATA;
      break;
    } else if (status == MFX_ERR_MORE_SURFACE) {
      surface = NULL;
      continue;
    } else if (status == MFX_WRN_DEVICE_BUSY) {
      /* If device is busy, wait 1ms and retry, as per MSDK's recomendation */
      g_usleep (1000);

      /* If the current surface is still busy, we should do sync oepration
       * then tries to decode again
       */
      thiz->next_task = (thiz->next_task + 1) % thiz->tasks->len;
    } else if (status < MFX_ERR_NONE) {
      GST_ERROR_OBJECT (thiz, "DecodeFrameAsync failed (%s)",
          msdk_status_to_string (status));
      flow = GST_FLOW_ERROR;
      break;
    }
  }

  if (!thiz->is_packetized) {
    /* flush out the data which is already consumed by msdk */
    gst_adapter_flush (thiz->adapter, bitstream.DataOffset);
    flow = GST_FLOW_OK;
  }

done:
  if (surface)
    free_surface (thiz, surface);

  gst_buffer_unmap (input_buffer, &map_info);
  gst_buffer_unref (input_buffer);
  return flow;

error:
  if (input_buffer) {
    gst_buffer_unmap (input_buffer, &map_info);
    gst_buffer_unref (input_buffer);
  }
  gst_video_decoder_drop_frame (decoder, frame);

  return flow;
}