static GstFlowReturn
_gst_libde265_return_image (GstVideoDecoder * decoder,
    GstVideoCodecFrame * frame, const struct de265_image *img)
{
  GstLibde265Dec *dec = GST_LIBDE265_DEC (decoder);
  struct GstLibde265FrameRef *ref;
  GstFlowReturn result;
  GstVideoFrame outframe;
  GstVideoCodecFrame *out_frame;
  int frame_number;
  int plane;

  ref = (struct GstLibde265FrameRef *) de265_get_image_plane_user_data (img, 0);
  if (ref != NULL) {
    /* decoder is using direct rendering */
    out_frame = gst_video_codec_frame_ref (ref->frame);
    if (frame != NULL) {
      gst_video_codec_frame_unref (frame);
    }
    gst_buffer_replace (&out_frame->output_buffer, ref->buffer);
    gst_buffer_replace (&ref->buffer, NULL);
    return gst_video_decoder_finish_frame (decoder, out_frame);
  }

  result =
      _gst_libde265_image_available (decoder, de265_get_image_width (img, 0),
      de265_get_image_height (img, 0));
  if (result != GST_FLOW_OK) {
    GST_ERROR_OBJECT (dec, "Failed to notify about available image");
    return result;
  }

  frame_number = (uintptr_t) de265_get_image_user_data (img) - 1;
  if (frame_number != -1) {
    out_frame = gst_video_decoder_get_frame (decoder, frame_number);
  } else {
    out_frame = NULL;
  }
  if (frame != NULL) {
    gst_video_codec_frame_unref (frame);
  }

  if (out_frame == NULL) {
    GST_ERROR_OBJECT (dec, "No frame available to return");
    return GST_FLOW_ERROR;
  }

  result = gst_video_decoder_allocate_output_frame (decoder, out_frame);
  if (result != GST_FLOW_OK) {
    GST_ERROR_OBJECT (dec, "Failed to allocate output frame");
    return result;
  }

  g_assert (dec->output_state != NULL);
  if (!gst_video_frame_map (&outframe, &dec->output_state->info,
          out_frame->output_buffer, GST_MAP_WRITE)) {
    GST_ERROR_OBJECT (dec, "Failed to map output buffer");
    return GST_FLOW_ERROR;
  }

  for (plane = 0; plane < 3; plane++) {
    int width = de265_get_image_width (img, plane);
    int height = de265_get_image_height (img, plane);
    int srcstride = width;
    int dststride = GST_VIDEO_FRAME_COMP_STRIDE (&outframe, plane);
    const uint8_t *src = de265_get_image_plane (img, plane, &srcstride);
    uint8_t *dest = GST_VIDEO_FRAME_COMP_DATA (&outframe, plane);
    if (srcstride == width && dststride == width) {
      memcpy (dest, src, height * width);
    } else {
      while (height--) {
        memcpy (dest, src, width);
        src += srcstride;
        dest += dststride;
      }
    }
  }
  gst_video_frame_unmap (&outframe);
  return gst_video_decoder_finish_frame (decoder, out_frame);
}
Exemple #2
0
static GstFlowReturn
handle_slice (GstMpeg2dec * mpeg2dec, const mpeg2_info_t * info)
{
  GstFlowReturn ret = GST_FLOW_OK;
  GstVideoCodecFrame *frame;
  const mpeg2_picture_t *picture;
  gboolean key_frame = FALSE;
  GstVideoCodecState *state;

  GST_DEBUG_OBJECT (mpeg2dec,
      "fbuf:%p display_picture:%p current_picture:%p fbuf->id:%d",
      info->display_fbuf, info->display_picture, info->current_picture,
      GPOINTER_TO_INT (info->display_fbuf->id) - 1);

  /* Note, the fbuf-id is shifted by 1 to make the difference between
   * NULL values (used by dummy buffers) and 'real' values */
  frame = gst_video_decoder_get_frame (GST_VIDEO_DECODER (mpeg2dec),
      GPOINTER_TO_INT (info->display_fbuf->id) - 1);
  if (!frame)
    goto no_frame;
  picture = info->display_picture;
  key_frame = (picture->flags & PIC_MASK_CODING_TYPE) == PIC_FLAG_CODING_TYPE_I;

  GST_DEBUG_OBJECT (mpeg2dec, "picture flags: %d, type: %d, keyframe: %d",
      picture->flags, picture->flags & PIC_MASK_CODING_TYPE, key_frame);

  if (key_frame) {
    mpeg2_skip (mpeg2dec->decoder, 0);
  }

  if (mpeg2dec->discont_state == MPEG2DEC_DISC_NEW_KEYFRAME && key_frame)
    mpeg2dec->discont_state = MPEG2DEC_DISC_NONE;

  if (picture->flags & PIC_FLAG_SKIP) {
    GST_DEBUG_OBJECT (mpeg2dec, "dropping buffer because of skip flag");
    ret = gst_video_decoder_drop_frame (GST_VIDEO_DECODER (mpeg2dec), frame);
    mpeg2_skip (mpeg2dec->decoder, 1);
    return ret;
  }

  if (mpeg2dec->discont_state != MPEG2DEC_DISC_NONE) {
    GST_DEBUG_OBJECT (mpeg2dec, "dropping buffer, discont state %d",
        mpeg2dec->discont_state);
    ret = gst_video_decoder_drop_frame (GST_VIDEO_DECODER (mpeg2dec), frame);
    return ret;
  }

  state = gst_video_decoder_get_output_state (GST_VIDEO_DECODER (mpeg2dec));

  /* do cropping if the target region is smaller than the input one */
  if (mpeg2dec->need_cropping && !mpeg2dec->has_cropping) {
    GstVideoFrame *vframe;

    if (gst_video_decoder_get_max_decode_time (GST_VIDEO_DECODER (mpeg2dec),
            frame) < 0) {
      GST_DEBUG_OBJECT (mpeg2dec, "dropping buffer crop, too late");
      ret = gst_video_decoder_drop_frame (GST_VIDEO_DECODER (mpeg2dec), frame);
      goto beach;
    }

    GST_DEBUG_OBJECT (mpeg2dec, "cropping buffer");
    vframe = gst_mpeg2dec_get_buffer (mpeg2dec, frame->system_frame_number);
    g_assert (vframe != NULL);
    ret = gst_mpeg2dec_crop_buffer (mpeg2dec, frame, vframe);
  }

  ret = gst_video_decoder_finish_frame (GST_VIDEO_DECODER (mpeg2dec), frame);

beach:
  gst_video_codec_state_unref (state);
  return ret;

no_frame:
  {
    GST_DEBUG ("display buffer does not have a valid frame");
    return GST_FLOW_OK;
  }
}
static int
gst_libde265_dec_get_buffer (de265_decoder_context * ctx,
    struct de265_image_spec *spec, struct de265_image *img, void *userdata)
{
  GstVideoDecoder *base = (GstVideoDecoder *) userdata;
  GstLibde265Dec *dec = GST_LIBDE265_DEC (base);
  GstVideoCodecFrame *frame = NULL;
  int i;
  int width = spec->width;
  int height = spec->height;
  GstFlowReturn ret;
  struct GstLibde265FrameRef *ref;
  GstVideoInfo *info;
  int frame_number;

  frame_number = (uintptr_t) de265_get_image_user_data (img) - 1;
  if (G_UNLIKELY (frame_number == -1)) {
    /* should not happen... */
    GST_WARNING_OBJECT (base, "Frame has no number assigned!");
    goto fallback;
  }

  frame = gst_video_decoder_get_frame (base, frame_number);
  if (G_UNLIKELY (frame == NULL)) {
    /* should not happen... */
    GST_WARNING_OBJECT (base, "Couldn't get codec frame!");
    goto fallback;
  }

  if (width % spec->alignment) {
    width += spec->alignment - (width % spec->alignment);
  }
  if (width != spec->visible_width || height != spec->visible_height) {
    /* clipping not supported for now */
    goto fallback;
  }

  ret = _gst_libde265_image_available (base, width, height);
  if (G_UNLIKELY (ret != GST_FLOW_OK)) {
    GST_ERROR_OBJECT (dec, "Failed to notify about available image");
    goto fallback;
  }

  ret =
      gst_video_decoder_allocate_output_frame (GST_VIDEO_DECODER (dec), frame);
  if (G_UNLIKELY (ret != GST_FLOW_OK)) {
    GST_ERROR_OBJECT (dec, "Failed to allocate output buffer");
    goto fallback;
  }

  ref = (struct GstLibde265FrameRef *) g_malloc0 (sizeof (*ref));
  g_assert (ref != NULL);
  ref->decoder = base;
  ref->frame = frame;

  gst_buffer_replace (&ref->buffer, frame->output_buffer);
  gst_buffer_replace (&frame->output_buffer, NULL);

  info = &dec->output_state->info;
  if (!gst_video_frame_map (&ref->vframe, info, ref->buffer, GST_MAP_READWRITE)) {
    GST_ERROR_OBJECT (dec, "Failed to map frame output buffer");
    goto error;
  }

  ref->mapped = TRUE;
  if (GST_VIDEO_FRAME_PLANE_STRIDE (&ref->vframe,
          0) < width * GST_VIDEO_FRAME_COMP_PSTRIDE (&ref->vframe, 0)) {
    GST_DEBUG_OBJECT (dec, "plane 0: pitch too small (%d/%d*%d)",
        GST_VIDEO_FRAME_PLANE_STRIDE (&ref->vframe, 0), width,
        GST_VIDEO_FRAME_COMP_PSTRIDE (&ref->vframe, 0));
    goto error;
  }

  if (GST_VIDEO_FRAME_COMP_HEIGHT (&ref->vframe, 0) < height) {
    GST_DEBUG_OBJECT (dec, "plane 0: lines too few (%d/%d)",
        GST_VIDEO_FRAME_COMP_HEIGHT (&ref->vframe, 0), height);
    goto error;
  }

  for (i = 0; i < 3; i++) {
    uint8_t *data;
    int stride = GST_VIDEO_FRAME_PLANE_STRIDE (&ref->vframe, i);
    if (stride % spec->alignment) {
      GST_DEBUG_OBJECT (dec, "plane %d: pitch not aligned (%d%%%d)",
          i, stride, spec->alignment);
      goto error;
    }

    data = GST_VIDEO_FRAME_PLANE_DATA (&ref->vframe, i);
    if ((uintptr_t) (data) % spec->alignment) {
      GST_DEBUG_OBJECT (dec, "plane %d not aligned", i);
      goto error;
    }

    de265_set_image_plane (img, i, data, stride, ref);
  }
  return 1;

error:
  gst_libde265_dec_release_frame_ref (ref);
  frame = NULL;

fallback:
  if (frame != NULL) {
    gst_video_codec_frame_unref (frame);
  }
  return de265_get_default_image_allocation_functions ()->get_buffer (ctx,
      spec, img, userdata);
}
Exemple #4
0
static GstFlowReturn
gst_mfc_dec_dequeue_output (GstMFCDec * self)
{
  GstFlowReturn ret = GST_FLOW_OK;
  gint mfc_ret;
  GstVideoCodecFrame *frame = NULL;
  GstBuffer *outbuf = NULL;
  struct mfc_buffer *mfc_outbuf = NULL;
  gint width, height;
  gint crop_left, crop_top, crop_width, crop_height;
  gint src_ystride, src_uvstride;
  GstVideoCodecState *state = NULL;
  gint64 deadline;
  struct timeval timestamp;

  if (!self->initialized) {
    GST_DEBUG_OBJECT (self, "Initializing decoder");
    if ((mfc_ret = mfc_dec_init_output (self->context, 1)) < 0)
      goto initialize_error;
    self->initialized = TRUE;
  }

  while ((mfc_ret = mfc_dec_output_available (self->context)) > 0) {
    GST_DEBUG_OBJECT (self, "Dequeueing output");

    mfc_dec_get_output_size (self->context, &width, &height);
    mfc_dec_get_output_stride (self->context, &src_ystride, &src_uvstride);
    mfc_dec_get_crop_size (self->context, &crop_left, &crop_top, &crop_width,
        &crop_height);

    GST_DEBUG_OBJECT (self, "Have output: width %d, height %d, "
        "Y stride %d, UV stride %d, "
        "crop_left %d, crop_right %d, "
        "crop_width %d, crop_height %d", width, height, src_ystride,
        src_uvstride, crop_left, crop_top, crop_width, crop_height);

    state = gst_video_decoder_get_output_state (GST_VIDEO_DECODER (self));

    if (!state || self->width != width || self->height != height ||
        self->src_stride[0] != src_ystride
        || self->src_stride[1] != src_uvstride
        || self->crop_left != self->crop_left || self->crop_top != crop_top
        || self->crop_width != crop_width || self->crop_height != crop_height) {
      self->width = width;
      self->height = height;
      self->crop_left = crop_left;
      self->crop_top = crop_top;
      self->crop_width = crop_width;
      self->crop_height = crop_height;
      self->src_stride[0] = src_ystride;
      self->src_stride[1] = src_uvstride;
      self->src_stride[2] = 0;

      if (!gst_video_decoder_negotiate (GST_VIDEO_DECODER (self)))
        goto negotiate_error;

      if (state)
        gst_video_codec_state_unref (state);
      state = gst_video_decoder_get_output_state (GST_VIDEO_DECODER (self));
    }

    if ((mfc_ret =
            mfc_dec_dequeue_output (self->context, &mfc_outbuf,
                &timestamp)) < 0) {
      if (mfc_ret == -2) {
        GST_DEBUG_OBJECT (self, "Timeout dequeueing output, trying again");
        mfc_ret =
            mfc_dec_dequeue_output (self->context, &mfc_outbuf, &timestamp);
      }

      if (mfc_ret < 0)
        goto dequeue_error;
    }

    g_assert (mfc_outbuf != NULL);

    GST_DEBUG_OBJECT (self, "Got output buffer with ID %ld", timestamp.tv_sec);

    frame = NULL;
    if (timestamp.tv_sec != -1)
      frame =
          gst_video_decoder_get_frame (GST_VIDEO_DECODER (self),
          timestamp.tv_sec);

    if (frame) {
      deadline =
          gst_video_decoder_get_max_decode_time (GST_VIDEO_DECODER (self),
          frame);
      if (deadline < 0) {
        GST_LOG_OBJECT (self,
            "Dropping too late frame: deadline %" G_GINT64_FORMAT, deadline);
        ret = gst_video_decoder_drop_frame (GST_VIDEO_DECODER (self), frame);
        frame = NULL;
        outbuf = NULL;
        goto done;
      }

      ret =
          gst_video_decoder_allocate_output_frame (GST_VIDEO_DECODER (self),
          frame);

      if (ret != GST_FLOW_OK)
        goto alloc_error;

      outbuf = frame->output_buffer;
    } else {
      GST_WARNING_OBJECT (self, "Didn't find a frame for ID %ld",
          timestamp.tv_sec);

      outbuf =
          gst_video_decoder_allocate_output_buffer (GST_VIDEO_DECODER (self));

      if (!outbuf) {
        ret = GST_FLOW_ERROR;
        goto alloc_error;
      }
    }

    ret = gst_mfc_dec_fill_outbuf (self, outbuf, mfc_outbuf, state);
    if (ret != GST_FLOW_OK)
      goto fill_error;

    if (frame) {
      ret = gst_video_decoder_finish_frame (GST_VIDEO_DECODER (self), frame);
      frame = NULL;
      outbuf = NULL;
    } else {
      ret = gst_pad_push (GST_VIDEO_DECODER_SRC_PAD (self), outbuf);
      outbuf = NULL;
    }

    if (ret != GST_FLOW_OK)
      GST_INFO_OBJECT (self, "Pushing frame returned: %s",
          gst_flow_get_name (ret));

  done:
    if (mfc_outbuf) {
      if ((mfc_ret = mfc_dec_enqueue_output (self->context, mfc_outbuf)) < 0)
        goto enqueue_error;
    }

    if (!frame && outbuf)
      gst_buffer_unref (outbuf);
    if (frame)
      gst_video_codec_frame_unref (frame);
    if (state)
      gst_video_codec_state_unref (state);

    frame = NULL;
    outbuf = NULL;

    if (ret != GST_FLOW_OK)
      break;
  }

  return ret;

initialize_error:
  {
    GST_ELEMENT_ERROR (self, LIBRARY, INIT, ("Failed to initialize output"),
        ("mfc_dec_init: %d", mfc_ret));
    ret = GST_FLOW_ERROR;
    goto done;
  }

negotiate_error:
  {
    GST_ELEMENT_ERROR (self, CORE, NEGOTIATION, ("Failed to negotiate"),
        (NULL));
    ret = GST_FLOW_NOT_NEGOTIATED;
    goto done;
  }

dequeue_error:
  {
    GST_ELEMENT_ERROR (self, LIBRARY, FAILED,
        ("Failed to dequeue output buffer"), ("mfc_dec_dequeue_output: %d",
            mfc_ret));
    ret = GST_FLOW_ERROR;
    goto done;
  }

alloc_error:
  {
    GST_ELEMENT_ERROR (self, CORE, FAILED, ("Failed to allocate output buffer"),
        (NULL));
    ret = GST_FLOW_ERROR;
    goto done;
  }

fill_error:
  {
    GST_ELEMENT_ERROR (self, LIBRARY, FAILED, ("Failed to fill output buffer"),
        (NULL));
    ret = GST_FLOW_ERROR;
    goto done;
  }

enqueue_error:
  {
    GST_ELEMENT_ERROR (self, LIBRARY, FAILED,
        ("Failed to enqueue output buffer"), ("mfc_dec_enqueue_output: %d",
            mfc_ret));
    ret = GST_FLOW_ERROR;
    goto done;
  }
}