Ejemplo n.º 1
0
static GstFlowReturn
theora_dec_handle_frame (GstVideoDecoder * bdec, GstVideoCodecFrame * frame)
{
  GstTheoraDec *dec;
  GstFlowReturn res;

  dec = GST_THEORA_DEC (bdec);

  res = theora_dec_decode_buffer (dec, frame->input_buffer, frame);
  switch (res) {
    case GST_FLOW_OK:
      res = gst_video_decoder_finish_frame (bdec, frame);
      break;
    case GST_CUSTOM_FLOW_DROP:
      res = gst_video_decoder_drop_frame (bdec, frame);
      break;
    default:
      gst_video_codec_frame_unref (frame);
      break;
  }

  return res;
}
Ejemplo n.º 2
0
static GstFlowReturn
_gst_libde265_return_image (GstVideoDecoder * decoder,
    GstVideoCodecFrame * frame, const struct de265_image *img)
{
  GstLibde265Dec *dec = GST_LIBDE265_DEC (decoder);
  struct GstLibde265FrameRef *ref;
  GstFlowReturn result;
  GstVideoFrame outframe;
  GstVideoCodecFrame *out_frame;
  int frame_number;
  int plane;

  ref = (struct GstLibde265FrameRef *) de265_get_image_plane_user_data (img, 0);
  if (ref != NULL) {
    /* decoder is using direct rendering */
    out_frame = gst_video_codec_frame_ref (ref->frame);
    if (frame != NULL) {
      gst_video_codec_frame_unref (frame);
    }
    gst_buffer_replace (&out_frame->output_buffer, ref->buffer);
    gst_buffer_replace (&ref->buffer, NULL);
    return gst_video_decoder_finish_frame (decoder, out_frame);
  }

  result =
      _gst_libde265_image_available (decoder, de265_get_image_width (img, 0),
      de265_get_image_height (img, 0));
  if (result != GST_FLOW_OK) {
    GST_ERROR_OBJECT (dec, "Failed to notify about available image");
    return result;
  }

  frame_number = (uintptr_t) de265_get_image_user_data (img) - 1;
  if (frame_number != -1) {
    out_frame = gst_video_decoder_get_frame (decoder, frame_number);
  } else {
    out_frame = NULL;
  }
  if (frame != NULL) {
    gst_video_codec_frame_unref (frame);
  }

  if (out_frame == NULL) {
    GST_ERROR_OBJECT (dec, "No frame available to return");
    return GST_FLOW_ERROR;
  }

  result = gst_video_decoder_allocate_output_frame (decoder, out_frame);
  if (result != GST_FLOW_OK) {
    GST_ERROR_OBJECT (dec, "Failed to allocate output frame");
    return result;
  }

  g_assert (dec->output_state != NULL);
  if (!gst_video_frame_map (&outframe, &dec->output_state->info,
          out_frame->output_buffer, GST_MAP_WRITE)) {
    GST_ERROR_OBJECT (dec, "Failed to map output buffer");
    return GST_FLOW_ERROR;
  }

  for (plane = 0; plane < 3; plane++) {
    int width = de265_get_image_width (img, plane);
    int height = de265_get_image_height (img, plane);
    int srcstride = width;
    int dststride = GST_VIDEO_FRAME_COMP_STRIDE (&outframe, plane);
    const uint8_t *src = de265_get_image_plane (img, plane, &srcstride);
    uint8_t *dest = GST_VIDEO_FRAME_COMP_DATA (&outframe, plane);
    if (srcstride == width && dststride == width) {
      memcpy (dest, src, height * width);
    } else {
      while (height--) {
        memcpy (dest, src, width);
        src += srcstride;
        dest += dststride;
      }
    }
  }
  gst_video_frame_unmap (&outframe);
  return gst_video_decoder_finish_frame (decoder, out_frame);
}
Ejemplo n.º 3
0
static GstFlowReturn
gst_vaapidecode_push_decoded_frame (GstVideoDecoder * vdec,
    GstVideoCodecFrame * out_frame)
{
  GstVaapiDecode *const decode = GST_VAAPIDECODE (vdec);
  GstVaapiSurfaceProxy *proxy;
  GstVaapiSurface *surface;
  GstFlowReturn ret;
  const GstVaapiRectangle *crop_rect;
  GstVaapiVideoMeta *meta;
  GstBufferPoolAcquireParams *params = NULL;
  GstVaapiVideoBufferPoolAcquireParams vaapi_params = { {0,}, };
  guint flags, out_flags = 0;
  gboolean alloc_renegotiate, caps_renegotiate;

  if (!GST_VIDEO_CODEC_FRAME_IS_DECODE_ONLY (out_frame)) {
    proxy = gst_video_codec_frame_get_user_data (out_frame);
    surface = GST_VAAPI_SURFACE_PROXY_SURFACE (proxy);
    crop_rect = gst_vaapi_surface_proxy_get_crop_rect (proxy);

    /* in theory, we are not supposed to check the surface resolution
     * change here since it should be advertised before from ligstvaapi.
     * But there are issues with it especially for some vp9 streams where
     * upstream element set un-cropped values in set_format() which make
     * everything a mess. So better doing the explicit check here irrespective
     * of what notification we get from upstream or libgstvaapi.Also, even if
     * we received notification from libgstvaapi, the frame we are going to
     * be pushed at this point might not have the notified resolution if there
     * are queued frames in decoded picture buffer. */
    alloc_renegotiate = is_surface_resolution_changed (decode, surface);
    caps_renegotiate = is_display_resolution_changed (decode, crop_rect);

    if (gst_pad_needs_reconfigure (GST_VIDEO_DECODER_SRC_PAD (vdec))
        || alloc_renegotiate || caps_renegotiate || decode->do_renego) {

      g_atomic_int_set (&decode->do_renego, FALSE);
      if (!gst_vaapidecode_negotiate (decode))
        return GST_FLOW_ERROR;
    }

    gst_vaapi_surface_proxy_set_destroy_notify (proxy,
        (GDestroyNotify) gst_vaapidecode_release, gst_object_ref (decode));

    if (is_src_allocator_dmabuf (decode)) {
      vaapi_params.proxy = gst_vaapi_surface_proxy_ref (proxy);
      params = (GstBufferPoolAcquireParams *) & vaapi_params;
    }

    ret = gst_video_decoder_allocate_output_frame_with_params (vdec, out_frame,
        params);
    if (params)
      gst_vaapi_surface_proxy_unref (vaapi_params.proxy);
    if (ret != GST_FLOW_OK)
      goto error_create_buffer;

    /* if not dmabuf is negotiated set the vaapi video meta in the
     * proxy */
    if (!params) {
      meta = gst_buffer_get_vaapi_video_meta (out_frame->output_buffer);
      if (!meta)
        goto error_get_meta;
      gst_vaapi_video_meta_set_surface_proxy (meta, proxy);
    }

    flags = gst_vaapi_surface_proxy_get_flags (proxy);
    if (flags & GST_VAAPI_SURFACE_PROXY_FLAG_CORRUPTED)
      out_flags |= GST_BUFFER_FLAG_CORRUPTED;
    if (flags & GST_VAAPI_SURFACE_PROXY_FLAG_INTERLACED) {
      out_flags |= GST_VIDEO_BUFFER_FLAG_INTERLACED;
      if (flags & GST_VAAPI_SURFACE_PROXY_FLAG_TFF)
        out_flags |= GST_VIDEO_BUFFER_FLAG_TFF;
      if (flags & GST_VAAPI_SURFACE_PROXY_FLAG_RFF)
        out_flags |= GST_VIDEO_BUFFER_FLAG_RFF;
      if (flags & GST_VAAPI_SURFACE_PROXY_FLAG_ONEFIELD)
        out_flags |= GST_VIDEO_BUFFER_FLAG_ONEFIELD;
    }
    GST_BUFFER_FLAG_SET (out_frame->output_buffer, out_flags);

    if (flags & GST_VAAPI_SURFACE_PROXY_FLAG_FFB) {
      GST_BUFFER_FLAG_SET (out_frame->output_buffer,
          GST_VIDEO_BUFFER_FLAG_FIRST_IN_BUNDLE);
    }
#if (USE_GLX || USE_EGL)
    if (decode->has_texture_upload_meta)
      gst_buffer_ensure_texture_upload_meta (out_frame->output_buffer);
#endif
  }

  if (decode->in_segment.rate < 0.0
      && !GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (out_frame)) {
    GST_TRACE_OBJECT (decode, "drop frame in reverse playback");
    gst_video_decoder_release_frame (GST_VIDEO_DECODER (decode), out_frame);
    return GST_FLOW_OK;
  }

  ret = gst_video_decoder_finish_frame (vdec, out_frame);
  if (ret != GST_FLOW_OK)
    goto error_commit_buffer;
  return GST_FLOW_OK;

  /* ERRORS */
error_create_buffer:
  {
    const GstVaapiID surface_id =
        gst_vaapi_surface_get_id (GST_VAAPI_SURFACE_PROXY_SURFACE (proxy));

    GST_ELEMENT_ERROR (vdec, STREAM, FAILED,
        ("Failed to create sink buffer"),
        ("video sink failed to create video buffer for proxy'ed "
            "surface %" GST_VAAPI_ID_FORMAT, GST_VAAPI_ID_ARGS (surface_id)));
    gst_video_decoder_drop_frame (vdec, out_frame);
    return GST_FLOW_ERROR;
  }
error_get_meta:
  {
    GST_ELEMENT_ERROR (vdec, STREAM, FAILED,
        ("Failed to get vaapi video meta attached to video buffer"),
        ("Failed to get vaapi video meta attached to video buffer"));
    gst_video_decoder_drop_frame (vdec, out_frame);
    return GST_FLOW_ERROR;
  }
error_commit_buffer:
  {
    GST_INFO_OBJECT (decode, "downstream element rejected the frame (%s [%d])",
        gst_flow_get_name (ret), ret);
    return ret;
  }
}
Ejemplo n.º 4
0
static GstFlowReturn
gst_vaapidecode_push_decoded_frame (GstVideoDecoder * vdec,
    GstVideoCodecFrame * out_frame)
{
  GstVaapiDecode *const decode = GST_VAAPIDECODE (vdec);
  GstVaapiSurfaceProxy *proxy;
  GstFlowReturn ret;
  const GstVaapiRectangle *crop_rect;
  GstVaapiVideoMeta *meta;
  guint flags, out_flags = 0;

  if (!GST_VIDEO_CODEC_FRAME_IS_DECODE_ONLY (out_frame)) {
    proxy = gst_video_codec_frame_get_user_data (out_frame);

    /* reconfigure if un-cropped surface resolution changed */
    if (is_surface_resolution_changed (vdec, GST_VAAPI_SURFACE_PROXY_SURFACE (proxy)))
      gst_vaapidecode_negotiate (decode);

    gst_vaapi_surface_proxy_set_destroy_notify (proxy,
        (GDestroyNotify) gst_vaapidecode_release, gst_object_ref (decode));

    ret = gst_video_decoder_allocate_output_frame (vdec, out_frame);
    if (ret != GST_FLOW_OK)
      goto error_create_buffer;

    meta = gst_buffer_get_vaapi_video_meta (out_frame->output_buffer);
    if (!meta)
      goto error_get_meta;
    gst_vaapi_video_meta_set_surface_proxy (meta, proxy);

    flags = gst_vaapi_surface_proxy_get_flags (proxy);
    if (flags & GST_VAAPI_SURFACE_PROXY_FLAG_CORRUPTED)
      out_flags |= GST_BUFFER_FLAG_CORRUPTED;
    if (flags & GST_VAAPI_SURFACE_PROXY_FLAG_INTERLACED) {
      out_flags |= GST_VIDEO_BUFFER_FLAG_INTERLACED;
      if (flags & GST_VAAPI_SURFACE_PROXY_FLAG_TFF)
        out_flags |= GST_VIDEO_BUFFER_FLAG_TFF;
      if (flags & GST_VAAPI_SURFACE_PROXY_FLAG_RFF)
        out_flags |= GST_VIDEO_BUFFER_FLAG_RFF;
      if (flags & GST_VAAPI_SURFACE_PROXY_FLAG_ONEFIELD)
        out_flags |= GST_VIDEO_BUFFER_FLAG_ONEFIELD;
    }
    GST_BUFFER_FLAG_SET (out_frame->output_buffer, out_flags);

#if GST_CHECK_VERSION(1,5,0)
    /* First-in-bundle flag only appeared in 1.5 dev */
    if (flags & GST_VAAPI_SURFACE_PROXY_FLAG_FFB) {
      GST_BUFFER_FLAG_SET (out_frame->output_buffer,
          GST_VIDEO_BUFFER_FLAG_FIRST_IN_BUNDLE);
    }
#endif

    crop_rect = gst_vaapi_surface_proxy_get_crop_rect (proxy);
    if (crop_rect) {
      GstVideoCropMeta *const crop_meta =
          gst_buffer_add_video_crop_meta (out_frame->output_buffer);
      if (crop_meta) {
        crop_meta->x = crop_rect->x;
        crop_meta->y = crop_rect->y;
        crop_meta->width = crop_rect->width;
        crop_meta->height = crop_rect->height;
      }
    }
#if (USE_GLX || USE_EGL)
    if (decode->has_texture_upload_meta)
      gst_buffer_ensure_texture_upload_meta (out_frame->output_buffer);
#endif
  }

  ret = gst_video_decoder_finish_frame (vdec, out_frame);
  if (ret != GST_FLOW_OK)
    goto error_commit_buffer;

  gst_video_codec_frame_unref (out_frame);
  return GST_FLOW_OK;

  /* ERRORS */
error_create_buffer:
  {
    const GstVaapiID surface_id =
        gst_vaapi_surface_get_id (GST_VAAPI_SURFACE_PROXY_SURFACE (proxy));

    GST_ELEMENT_ERROR (vdec, STREAM, FAILED,
        ("Failed to create sink buffer"),
        ("video sink failed to create video buffer for proxy'ed "
            "surface %" GST_VAAPI_ID_FORMAT, GST_VAAPI_ID_ARGS (surface_id)));
    gst_video_decoder_drop_frame (vdec, out_frame);
    gst_video_codec_frame_unref (out_frame);
    return GST_FLOW_ERROR;
  }
error_get_meta:
  {
    GST_ELEMENT_ERROR (vdec, STREAM, FAILED,
        ("Failed to get vaapi video meta attached to video buffer"),
        ("Failed to get vaapi video meta attached to video buffer"));
    gst_video_decoder_drop_frame (vdec, out_frame);
    gst_video_codec_frame_unref (out_frame);
    return GST_FLOW_ERROR;
  }
error_commit_buffer:
  {
    GST_INFO_OBJECT (decode, "downstream element rejected the frame (%s [%d])",
        gst_flow_get_name (ret), ret);
    gst_video_codec_frame_unref (out_frame);
    return ret;
  }
}
Ejemplo n.º 5
0
static GstFlowReturn
handle_slice (GstMpeg2dec * mpeg2dec, const mpeg2_info_t * info)
{
  GstFlowReturn ret = GST_FLOW_OK;
  GstVideoCodecFrame *frame;
  const mpeg2_picture_t *picture;
  gboolean key_frame = FALSE;
  GstVideoCodecState *state;

  GST_DEBUG_OBJECT (mpeg2dec,
      "fbuf:%p display_picture:%p current_picture:%p fbuf->id:%d",
      info->display_fbuf, info->display_picture, info->current_picture,
      GPOINTER_TO_INT (info->display_fbuf->id) - 1);

  /* Note, the fbuf-id is shifted by 1 to make the difference between
   * NULL values (used by dummy buffers) and 'real' values */
  frame = gst_video_decoder_get_frame (GST_VIDEO_DECODER (mpeg2dec),
      GPOINTER_TO_INT (info->display_fbuf->id) - 1);
  if (!frame)
    goto no_frame;
  picture = info->display_picture;
  key_frame = (picture->flags & PIC_MASK_CODING_TYPE) == PIC_FLAG_CODING_TYPE_I;

  GST_DEBUG_OBJECT (mpeg2dec, "picture flags: %d, type: %d, keyframe: %d",
      picture->flags, picture->flags & PIC_MASK_CODING_TYPE, key_frame);

  if (key_frame) {
    mpeg2_skip (mpeg2dec->decoder, 0);
  }

  if (mpeg2dec->discont_state == MPEG2DEC_DISC_NEW_KEYFRAME && key_frame)
    mpeg2dec->discont_state = MPEG2DEC_DISC_NONE;

  if (picture->flags & PIC_FLAG_SKIP) {
    GST_DEBUG_OBJECT (mpeg2dec, "dropping buffer because of skip flag");
    ret = gst_video_decoder_drop_frame (GST_VIDEO_DECODER (mpeg2dec), frame);
    mpeg2_skip (mpeg2dec->decoder, 1);
    return ret;
  }

  if (mpeg2dec->discont_state != MPEG2DEC_DISC_NONE) {
    GST_DEBUG_OBJECT (mpeg2dec, "dropping buffer, discont state %d",
        mpeg2dec->discont_state);
    ret = gst_video_decoder_drop_frame (GST_VIDEO_DECODER (mpeg2dec), frame);
    return ret;
  }

  state = gst_video_decoder_get_output_state (GST_VIDEO_DECODER (mpeg2dec));

  /* do cropping if the target region is smaller than the input one */
  if (mpeg2dec->need_cropping && !mpeg2dec->has_cropping) {
    GstVideoFrame *vframe;

    if (gst_video_decoder_get_max_decode_time (GST_VIDEO_DECODER (mpeg2dec),
            frame) < 0) {
      GST_DEBUG_OBJECT (mpeg2dec, "dropping buffer crop, too late");
      ret = gst_video_decoder_drop_frame (GST_VIDEO_DECODER (mpeg2dec), frame);
      goto beach;
    }

    GST_DEBUG_OBJECT (mpeg2dec, "cropping buffer");
    vframe = gst_mpeg2dec_get_buffer (mpeg2dec, frame->system_frame_number);
    g_assert (vframe != NULL);
    ret = gst_mpeg2dec_crop_buffer (mpeg2dec, frame, vframe);
  }

  ret = gst_video_decoder_finish_frame (GST_VIDEO_DECODER (mpeg2dec), frame);

beach:
  gst_video_codec_state_unref (state);
  return ret;

no_frame:
  {
    GST_DEBUG ("display buffer does not have a valid frame");
    return GST_FLOW_OK;
  }
}
Ejemplo n.º 6
0
static GstFlowReturn
gst_vdp_h264_dec_handle_frame (GstVideoDecoder * video_decoder,
    GstVideoCodecFrame * frame)
{
  GstVdpH264Dec *h264_dec = GST_VDP_H264_DEC (video_decoder);
  GstH264Meta *h264_meta;
  GstH264Frame *h264_frame;
  GList *tmp;
  GstFlowReturn ret;
  VdpPictureInfoH264 info;
  VdpBitstreamBuffer *bufs;
  GstH264SliceHdr *first_slice;
  guint i;
  GstMapInfo map;

  GST_DEBUG ("handle_frame");

  h264_meta = gst_buffer_get_h264_meta (frame->input_buffer);
  if (G_UNLIKELY (h264_meta == NULL))
    goto no_h264_meta;

  if (G_UNLIKELY (h264_meta->num_slices == 0))
    goto no_slices;

  /* Handle PPS/SPS/SEI if present */
  if (h264_meta->sps) {
    for (tmp = h264_meta->sps; tmp; tmp = tmp->next) {
      GstH264SPS *sps = (GstH264SPS *) tmp->data;
      GST_LOG_OBJECT (h264_dec, "Storing SPS %d", sps->id);
      h264_dec->sps[sps->id] = g_slice_dup (GstH264SPS, sps);
    }
  }
  if (h264_meta->pps) {
    for (tmp = h264_meta->pps; tmp; tmp = tmp->next) {
      GstH264PPS *pps = (GstH264PPS *) tmp->data;
      GST_LOG_OBJECT (h264_dec, "Storing PPS %d", pps->id);
      h264_dec->pps[pps->id] = g_slice_dup (GstH264PPS, pps);
      /* Adjust pps pointer */
      h264_dec->pps[pps->id]->sequence = h264_dec->sps[pps->sps_id];
    }
  }

  first_slice = &h264_meta->slices[0];

  if (!h264_dec->got_idr && first_slice->slice_type != GST_H264_NAL_SLICE_IDR)
    goto no_idr;

  /* Handle slices */
  for (i = 0; i < h264_meta->num_slices; i++) {
    GstH264SliceHdr *slice = &h264_meta->slices[i];

    GST_LOG_OBJECT (h264_dec, "Handling slice #%d", i);
    slice->pps = h264_dec->pps[slice->pps_id];
  }

  if (first_slice->slice_type == GST_H264_NAL_SLICE_IDR) {
    ret = gst_vdp_h264_dec_idr (h264_dec, frame, first_slice);
    if (ret == GST_FLOW_OK)
      h264_dec->got_idr = TRUE;
    else
      goto skip_frame;
  }

  h264_frame = g_slice_new0 (GstH264Frame);
  gst_video_codec_frame_set_user_data (frame, h264_frame,
      (GDestroyNotify) gst_h264_frame_free);

  gst_vdp_h264_dec_init_frame_info (h264_dec, h264_frame, first_slice);
  h264_frame->frame = frame;
  gst_vdp_h264_dec_fill_info (&info, h264_dec, h264_frame, first_slice);
  info.slice_count = h264_meta->num_slices;

  if (!gst_buffer_map (frame->input_buffer, &map, GST_MAP_READ))
    goto map_fail;
  bufs = gst_vdp_h264_dec_create_bitstream_buffers (h264_dec, h264_meta, &map);

  ret = gst_vdp_decoder_render (GST_VDP_DECODER (h264_dec),
      (VdpPictureInfo *) & info, h264_meta->num_slices, bufs, frame);
  g_free (bufs);
  gst_buffer_unmap (frame->input_buffer, &map);

  if (ret != GST_FLOW_OK)
    goto render_fail;

  /* DPB handling */
  return gst_vdp_h264_dec_handle_dpb (h264_dec, h264_frame, first_slice);

  /* EARLY exit */
no_idr:
  {
    GST_DEBUG_OBJECT (video_decoder, "Didn't see a IDR yet, skipping frame");
    return gst_video_decoder_finish_frame (video_decoder, frame);
  }

skip_frame:
  {
    GST_DEBUG_OBJECT (video_decoder, "Skipping frame");
    return gst_video_decoder_finish_frame (video_decoder, frame);
  }

  /* ERRORS */
no_h264_meta:
  {
    GST_ERROR_OBJECT (video_decoder, "Input buffer doesn't have GstH264Meta");
    return GST_FLOW_ERROR;
  }

no_slices:
  {
    GST_ERROR_OBJECT (video_decoder, "Input buffer doesn't have any slices");
    return GST_FLOW_ERROR;
  }

map_fail:
  {
    GST_ERROR_OBJECT (video_decoder, "Failed to map input buffer for READ");
    return GST_FLOW_ERROR;
  }

render_fail:
  {
    GST_ERROR_OBJECT (video_decoder, "Failed to render : %s",
        gst_flow_get_name (ret));
    gst_video_decoder_drop_frame (video_decoder, frame);
    return ret;
  }
}
Ejemplo n.º 7
0
static GstFlowReturn
gst_jpeg_dec_handle_frame (GstVideoDecoder * bdec, GstVideoCodecFrame * frame)
{
  GstFlowReturn ret = GST_FLOW_OK;
  GstJpegDec *dec = (GstJpegDec *) bdec;
  GstVideoFrame vframe;
  gint width, height;
  gint r_h, r_v;
  guint code, hdr_ok;
  gboolean need_unmap = TRUE;
  GstVideoCodecState *state = NULL;

  dec->current_frame = frame;
  gst_buffer_map (frame->input_buffer, &dec->current_frame_map, GST_MAP_READ);
  gst_jpeg_dec_fill_input_buffer (&dec->cinfo);

  if (setjmp (dec->jerr.setjmp_buffer)) {
    code = dec->jerr.pub.msg_code;

    if (code == JERR_INPUT_EOF) {
      GST_DEBUG ("jpeg input EOF error, we probably need more data");
      goto need_more_data;
    }
    goto decode_error;
  }

  /* read header */
  hdr_ok = jpeg_read_header (&dec->cinfo, TRUE);
  if (G_UNLIKELY (hdr_ok != JPEG_HEADER_OK)) {
    GST_WARNING_OBJECT (dec, "reading the header failed, %d", hdr_ok);
  }

  GST_LOG_OBJECT (dec, "num_components=%d", dec->cinfo.num_components);
  GST_LOG_OBJECT (dec, "jpeg_color_space=%d", dec->cinfo.jpeg_color_space);

  if (!dec->cinfo.num_components || !dec->cinfo.comp_info)
    goto components_not_supported;

  r_h = dec->cinfo.comp_info[0].h_samp_factor;
  r_v = dec->cinfo.comp_info[0].v_samp_factor;

  GST_LOG_OBJECT (dec, "r_h = %d, r_v = %d", r_h, r_v);

  if (dec->cinfo.num_components > 3)
    goto components_not_supported;

  /* verify color space expectation to avoid going *boom* or bogus output */
  if (dec->cinfo.jpeg_color_space != JCS_YCbCr &&
      dec->cinfo.jpeg_color_space != JCS_GRAYSCALE &&
      dec->cinfo.jpeg_color_space != JCS_RGB)
    goto unsupported_colorspace;

#ifndef GST_DISABLE_GST_DEBUG
  {
    gint i;

    for (i = 0; i < dec->cinfo.num_components; ++i) {
      GST_LOG_OBJECT (dec, "[%d] h_samp_factor=%d, v_samp_factor=%d, cid=%d",
          i, dec->cinfo.comp_info[i].h_samp_factor,
          dec->cinfo.comp_info[i].v_samp_factor,
          dec->cinfo.comp_info[i].component_id);
    }
  }
#endif

  /* prepare for raw output */
  dec->cinfo.do_fancy_upsampling = FALSE;
  dec->cinfo.do_block_smoothing = FALSE;
  dec->cinfo.out_color_space = dec->cinfo.jpeg_color_space;
  dec->cinfo.dct_method = dec->idct_method;
  dec->cinfo.raw_data_out = TRUE;

  GST_LOG_OBJECT (dec, "starting decompress");
  guarantee_huff_tables (&dec->cinfo);
  if (!jpeg_start_decompress (&dec->cinfo)) {
    GST_WARNING_OBJECT (dec, "failed to start decompression cycle");
  }

  /* sanity checks to get safe and reasonable output */
  switch (dec->cinfo.jpeg_color_space) {
    case JCS_GRAYSCALE:
      if (dec->cinfo.num_components != 1)
        goto invalid_yuvrgbgrayscale;
      break;
    case JCS_RGB:
      if (dec->cinfo.num_components != 3 || dec->cinfo.max_v_samp_factor > 1 ||
          dec->cinfo.max_h_samp_factor > 1)
        goto invalid_yuvrgbgrayscale;
      break;
    case JCS_YCbCr:
      if (dec->cinfo.num_components != 3 ||
          r_v > 2 || r_v < dec->cinfo.comp_info[0].v_samp_factor ||
          r_v < dec->cinfo.comp_info[1].v_samp_factor ||
          r_h < dec->cinfo.comp_info[0].h_samp_factor ||
          r_h < dec->cinfo.comp_info[1].h_samp_factor)
        goto invalid_yuvrgbgrayscale;
      break;
    default:
      g_assert_not_reached ();
      break;
  }

  width = dec->cinfo.output_width;
  height = dec->cinfo.output_height;

  if (G_UNLIKELY (width < MIN_WIDTH || width > MAX_WIDTH ||
          height < MIN_HEIGHT || height > MAX_HEIGHT))
    goto wrong_size;

  gst_jpeg_dec_negotiate (dec, width, height, dec->cinfo.jpeg_color_space);

  state = gst_video_decoder_get_output_state (bdec);
  ret = gst_video_decoder_alloc_output_frame (bdec, frame);
  if (G_UNLIKELY (ret != GST_FLOW_OK))
    goto alloc_failed;

  if (!gst_video_frame_map (&vframe, &state->info, frame->output_buffer,
          GST_MAP_READWRITE))
    goto alloc_failed;

  GST_LOG_OBJECT (dec, "width %d, height %d", width, height);

  if (dec->cinfo.jpeg_color_space == JCS_RGB) {
    gst_jpeg_dec_decode_rgb (dec, &vframe);
  } else if (dec->cinfo.jpeg_color_space == JCS_GRAYSCALE) {
    gst_jpeg_dec_decode_grayscale (dec, &vframe);
  } else {
    GST_LOG_OBJECT (dec, "decompressing (reqired scanline buffer height = %u)",
        dec->cinfo.rec_outbuf_height);

    /* For some widths jpeglib requires more horizontal padding than I420 
     * provides. In those cases we need to decode into separate buffers and then
     * copy over the data into our final picture buffer, otherwise jpeglib might
     * write over the end of a line into the beginning of the next line,
     * resulting in blocky artifacts on the left side of the picture. */
    if (G_UNLIKELY (width % (dec->cinfo.max_h_samp_factor * DCTSIZE) != 0
            || dec->cinfo.comp_info[0].h_samp_factor != 2
            || dec->cinfo.comp_info[1].h_samp_factor != 1
            || dec->cinfo.comp_info[2].h_samp_factor != 1)) {
      GST_CAT_LOG_OBJECT (GST_CAT_PERFORMANCE, dec,
          "indirect decoding using extra buffer copy");
      gst_jpeg_dec_decode_indirect (dec, &vframe, r_v, r_h,
          dec->cinfo.num_components);
    } else {
      ret = gst_jpeg_dec_decode_direct (dec, &vframe);

      if (G_UNLIKELY (ret != GST_FLOW_OK))
        goto decode_direct_failed;
    }
  }

  gst_video_frame_unmap (&vframe);

  GST_LOG_OBJECT (dec, "decompressing finished");
  jpeg_finish_decompress (&dec->cinfo);

  /* reset error count on successful decode */
  dec->error_count = 0;

  gst_buffer_unmap (frame->input_buffer, &dec->current_frame_map);
  ret = gst_video_decoder_finish_frame (bdec, frame);
  need_unmap = FALSE;

done:

exit:

  if (G_UNLIKELY (ret == GST_FLOW_ERROR)) {
    jpeg_abort_decompress (&dec->cinfo);
    ret = gst_jpeg_dec_post_error_or_warning (dec);
  }

  if (need_unmap)
    gst_buffer_unmap (frame->input_buffer, &dec->current_frame_map);

  if (state)
    gst_video_codec_state_unref (state);

  return ret;

  /* special cases */
need_more_data:
  {
    GST_LOG_OBJECT (dec, "we need more data");
    ret = GST_FLOW_OK;
    goto exit;
  }
  /* ERRORS */
wrong_size:
  {
    gst_jpeg_dec_set_error (dec, GST_FUNCTION, __LINE__,
        "Picture is too small or too big (%ux%u)", width, height);
    ret = GST_FLOW_ERROR;
    goto done;
  }
decode_error:
  {
    gchar err_msg[JMSG_LENGTH_MAX];

    dec->jerr.pub.format_message ((j_common_ptr) (&dec->cinfo), err_msg);

    gst_jpeg_dec_set_error (dec, GST_FUNCTION, __LINE__,
        "Decode error #%u: %s", code, err_msg);

    gst_buffer_unmap (frame->input_buffer, &dec->current_frame_map);
    gst_video_decoder_drop_frame (bdec, frame);
    need_unmap = FALSE;

    ret = GST_FLOW_ERROR;
    goto done;
  }
decode_direct_failed:
  {
    /* already posted an error message */
    jpeg_abort_decompress (&dec->cinfo);
    goto done;
  }
alloc_failed:
  {
    const gchar *reason;

    reason = gst_flow_get_name (ret);

    GST_DEBUG_OBJECT (dec, "failed to alloc buffer, reason %s", reason);
    /* Reset for next time */
    jpeg_abort_decompress (&dec->cinfo);
    if (ret != GST_FLOW_EOS && ret != GST_FLOW_FLUSHING &&
        ret != GST_FLOW_NOT_LINKED) {
      gst_jpeg_dec_set_error (dec, GST_FUNCTION, __LINE__,
          "Buffer allocation failed, reason: %s", reason);
    }
    goto exit;
  }
components_not_supported:
  {
    gst_jpeg_dec_set_error (dec, GST_FUNCTION, __LINE__,
        "number of components not supported: %d (max 3)",
        dec->cinfo.num_components);
    ret = GST_FLOW_ERROR;
    goto done;
  }
unsupported_colorspace:
  {
    gst_jpeg_dec_set_error (dec, GST_FUNCTION, __LINE__,
        "Picture has unknown or unsupported colourspace");
    ret = GST_FLOW_ERROR;
    goto done;
  }
invalid_yuvrgbgrayscale:
  {
    gst_jpeg_dec_set_error (dec, GST_FUNCTION, __LINE__,
        "Picture is corrupt or unhandled YUV/RGB/grayscale layout");
    ret = GST_FLOW_ERROR;
    goto done;
  }
}
Ejemplo n.º 8
0
static GstFlowReturn
gst_mfc_dec_dequeue_output (GstMFCDec * self)
{
  GstFlowReturn ret = GST_FLOW_OK;
  gint mfc_ret;
  GstVideoCodecFrame *frame = NULL;
  GstBuffer *outbuf = NULL;
  struct mfc_buffer *mfc_outbuf = NULL;
  gint width, height;
  gint crop_left, crop_top, crop_width, crop_height;
  gint src_ystride, src_uvstride;
  GstVideoCodecState *state = NULL;
  gint64 deadline;
  struct timeval timestamp;

  if (!self->initialized) {
    GST_DEBUG_OBJECT (self, "Initializing decoder");
    if ((mfc_ret = mfc_dec_init_output (self->context, 1)) < 0)
      goto initialize_error;
    self->initialized = TRUE;
  }

  while ((mfc_ret = mfc_dec_output_available (self->context)) > 0) {
    GST_DEBUG_OBJECT (self, "Dequeueing output");

    mfc_dec_get_output_size (self->context, &width, &height);
    mfc_dec_get_output_stride (self->context, &src_ystride, &src_uvstride);
    mfc_dec_get_crop_size (self->context, &crop_left, &crop_top, &crop_width,
        &crop_height);

    GST_DEBUG_OBJECT (self, "Have output: width %d, height %d, "
        "Y stride %d, UV stride %d, "
        "crop_left %d, crop_right %d, "
        "crop_width %d, crop_height %d", width, height, src_ystride,
        src_uvstride, crop_left, crop_top, crop_width, crop_height);

    state = gst_video_decoder_get_output_state (GST_VIDEO_DECODER (self));

    if (!state || self->width != width || self->height != height ||
        self->src_stride[0] != src_ystride
        || self->src_stride[1] != src_uvstride
        || self->crop_left != self->crop_left || self->crop_top != crop_top
        || self->crop_width != crop_width || self->crop_height != crop_height) {
      self->width = width;
      self->height = height;
      self->crop_left = crop_left;
      self->crop_top = crop_top;
      self->crop_width = crop_width;
      self->crop_height = crop_height;
      self->src_stride[0] = src_ystride;
      self->src_stride[1] = src_uvstride;
      self->src_stride[2] = 0;

      if (!gst_video_decoder_negotiate (GST_VIDEO_DECODER (self)))
        goto negotiate_error;

      if (state)
        gst_video_codec_state_unref (state);
      state = gst_video_decoder_get_output_state (GST_VIDEO_DECODER (self));
    }

    if ((mfc_ret =
            mfc_dec_dequeue_output (self->context, &mfc_outbuf,
                &timestamp)) < 0) {
      if (mfc_ret == -2) {
        GST_DEBUG_OBJECT (self, "Timeout dequeueing output, trying again");
        mfc_ret =
            mfc_dec_dequeue_output (self->context, &mfc_outbuf, &timestamp);
      }

      if (mfc_ret < 0)
        goto dequeue_error;
    }

    g_assert (mfc_outbuf != NULL);

    GST_DEBUG_OBJECT (self, "Got output buffer with ID %ld", timestamp.tv_sec);

    frame = NULL;
    if (timestamp.tv_sec != -1)
      frame =
          gst_video_decoder_get_frame (GST_VIDEO_DECODER (self),
          timestamp.tv_sec);

    if (frame) {
      deadline =
          gst_video_decoder_get_max_decode_time (GST_VIDEO_DECODER (self),
          frame);
      if (deadline < 0) {
        GST_LOG_OBJECT (self,
            "Dropping too late frame: deadline %" G_GINT64_FORMAT, deadline);
        ret = gst_video_decoder_drop_frame (GST_VIDEO_DECODER (self), frame);
        frame = NULL;
        outbuf = NULL;
        goto done;
      }

      ret =
          gst_video_decoder_allocate_output_frame (GST_VIDEO_DECODER (self),
          frame);

      if (ret != GST_FLOW_OK)
        goto alloc_error;

      outbuf = frame->output_buffer;
    } else {
      GST_WARNING_OBJECT (self, "Didn't find a frame for ID %ld",
          timestamp.tv_sec);

      outbuf =
          gst_video_decoder_allocate_output_buffer (GST_VIDEO_DECODER (self));

      if (!outbuf) {
        ret = GST_FLOW_ERROR;
        goto alloc_error;
      }
    }

    ret = gst_mfc_dec_fill_outbuf (self, outbuf, mfc_outbuf, state);
    if (ret != GST_FLOW_OK)
      goto fill_error;

    if (frame) {
      ret = gst_video_decoder_finish_frame (GST_VIDEO_DECODER (self), frame);
      frame = NULL;
      outbuf = NULL;
    } else {
      ret = gst_pad_push (GST_VIDEO_DECODER_SRC_PAD (self), outbuf);
      outbuf = NULL;
    }

    if (ret != GST_FLOW_OK)
      GST_INFO_OBJECT (self, "Pushing frame returned: %s",
          gst_flow_get_name (ret));

  done:
    if (mfc_outbuf) {
      if ((mfc_ret = mfc_dec_enqueue_output (self->context, mfc_outbuf)) < 0)
        goto enqueue_error;
    }

    if (!frame && outbuf)
      gst_buffer_unref (outbuf);
    if (frame)
      gst_video_codec_frame_unref (frame);
    if (state)
      gst_video_codec_state_unref (state);

    frame = NULL;
    outbuf = NULL;

    if (ret != GST_FLOW_OK)
      break;
  }

  return ret;

initialize_error:
  {
    GST_ELEMENT_ERROR (self, LIBRARY, INIT, ("Failed to initialize output"),
        ("mfc_dec_init: %d", mfc_ret));
    ret = GST_FLOW_ERROR;
    goto done;
  }

negotiate_error:
  {
    GST_ELEMENT_ERROR (self, CORE, NEGOTIATION, ("Failed to negotiate"),
        (NULL));
    ret = GST_FLOW_NOT_NEGOTIATED;
    goto done;
  }

dequeue_error:
  {
    GST_ELEMENT_ERROR (self, LIBRARY, FAILED,
        ("Failed to dequeue output buffer"), ("mfc_dec_dequeue_output: %d",
            mfc_ret));
    ret = GST_FLOW_ERROR;
    goto done;
  }

alloc_error:
  {
    GST_ELEMENT_ERROR (self, CORE, FAILED, ("Failed to allocate output buffer"),
        (NULL));
    ret = GST_FLOW_ERROR;
    goto done;
  }

fill_error:
  {
    GST_ELEMENT_ERROR (self, LIBRARY, FAILED, ("Failed to fill output buffer"),
        (NULL));
    ret = GST_FLOW_ERROR;
    goto done;
  }

enqueue_error:
  {
    GST_ELEMENT_ERROR (self, LIBRARY, FAILED,
        ("Failed to enqueue output buffer"), ("mfc_dec_enqueue_output: %d",
            mfc_ret));
    ret = GST_FLOW_ERROR;
    goto done;
  }
}
static void
gst_v4l2_video_dec_loop (GstVideoDecoder * decoder)
{
  GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (decoder);
  GstV4l2BufferPool *v4l2_pool = GST_V4L2_BUFFER_POOL (self->v4l2capture->pool);
  GstBufferPool *pool;
  GstVideoCodecFrame *frame;
  GstBuffer *buffer = NULL;
  GstFlowReturn ret;

  GST_LOG_OBJECT (decoder, "Allocate output buffer");

  do {
    /* We cannot use the base class allotate helper since it taking the internal
     * stream lock. we know that the acquire may need to poll until more frames
     * comes in and holding this lock would prevent that.
     */
    pool = gst_video_decoder_get_buffer_pool (decoder);

    /* Pool may be NULL if we started going to READY state */
    if (pool == NULL) {
      ret = GST_FLOW_FLUSHING;
      goto beach;
    }

    ret = gst_buffer_pool_acquire_buffer (pool, &buffer, NULL);
    g_object_unref (pool);

    if (ret != GST_FLOW_OK)
      goto beach;

    GST_LOG_OBJECT (decoder, "Process output buffer");
    ret = gst_v4l2_buffer_pool_process (v4l2_pool, &buffer);

  } while (ret == GST_V4L2_FLOW_CORRUPTED_BUFFER);

  if (ret != GST_FLOW_OK)
    goto beach;

  frame = gst_v4l2_video_dec_get_oldest_frame (decoder);

  if (frame) {
    frame->output_buffer = buffer;
    buffer = NULL;
    ret = gst_video_decoder_finish_frame (decoder, frame);

    if (ret != GST_FLOW_OK)
      goto beach;
  } else {
    GST_WARNING_OBJECT (decoder, "Decoder is producing too many buffers");
    gst_buffer_unref (buffer);
  }

  return;

beach:
  GST_DEBUG_OBJECT (decoder, "Leaving output thread: %s",
      gst_flow_get_name (ret));

  gst_buffer_replace (&buffer, NULL);
  self->output_flow = ret;
  g_atomic_int_set (&self->processing, FALSE);
  gst_v4l2_object_unlock (self->v4l2output);
  gst_pad_pause_task (decoder->srcpad);
}
Ejemplo n.º 10
0
static GstFlowReturn
gst_openjpeg_dec_handle_frame (GstVideoDecoder * decoder,
    GstVideoCodecFrame * frame)
{
  GstOpenJPEGDec *self = GST_OPENJPEG_DEC (decoder);
  GstFlowReturn ret = GST_FLOW_OK;
  gint64 deadline;
  GstMapInfo map;
  opj_dinfo_t *dec;
  opj_event_mgr_t callbacks;
  opj_cio_t *io;
  opj_image_t *image;
  GstVideoFrame vframe;
  opj_dparameters_t params;

  GST_DEBUG_OBJECT (self, "Handling frame");

  deadline = gst_video_decoder_get_max_decode_time (decoder, frame);
  if (deadline < 0) {
    GST_LOG_OBJECT (self, "Dropping too late frame: deadline %" G_GINT64_FORMAT,
        deadline);
    ret = gst_video_decoder_drop_frame (decoder, frame);
    return ret;
  }

  dec = opj_create_decompress (self->codec_format);
  if (!dec)
    goto initialization_error;

  if (G_UNLIKELY (gst_debug_category_get_threshold (GST_CAT_DEFAULT) >=
          GST_LEVEL_TRACE)) {
    callbacks.error_handler = gst_openjpeg_dec_opj_error;
    callbacks.warning_handler = gst_openjpeg_dec_opj_warning;
    callbacks.info_handler = gst_openjpeg_dec_opj_info;
    opj_set_event_mgr ((opj_common_ptr) dec, &callbacks, self);
  } else {
    opj_set_event_mgr ((opj_common_ptr) dec, NULL, NULL);
  }

  params = self->params;
  if (self->ncomps)
    params.jpwl_exp_comps = self->ncomps;
  opj_setup_decoder (dec, &params);

  if (!gst_buffer_map (frame->input_buffer, &map, GST_MAP_READ))
    goto map_read_error;

  io = opj_cio_open ((opj_common_ptr) dec, map.data + (self->is_jp2c ? 8 : 0),
      map.size - (self->is_jp2c ? 8 : 0));
  if (!io)
    goto open_error;

  image = opj_decode (dec, io);
  if (!image)
    goto decode_error;

  gst_buffer_unmap (frame->input_buffer, &map);

  ret = gst_openjpeg_dec_negotiate (self, image);
  if (ret != GST_FLOW_OK)
    goto negotiate_error;

  ret = gst_video_decoder_allocate_output_frame (decoder, frame);
  if (ret != GST_FLOW_OK)
    goto allocate_error;

  if (!gst_video_frame_map (&vframe, &self->output_state->info,
          frame->output_buffer, GST_MAP_WRITE))
    goto map_write_error;

  self->fill_frame (&vframe, image);

  gst_video_frame_unmap (&vframe);

  opj_image_destroy (image);
  opj_cio_close (io);
  opj_destroy_decompress (dec);

  ret = gst_video_decoder_finish_frame (decoder, frame);

  return ret;

initialization_error:
  {
    gst_video_codec_frame_unref (frame);
    GST_ELEMENT_ERROR (self, LIBRARY, INIT,
        ("Failed to initialize OpenJPEG decoder"), (NULL));
    return GST_FLOW_ERROR;
  }
map_read_error:
  {
    opj_destroy_decompress (dec);
    gst_video_codec_frame_unref (frame);

    GST_ELEMENT_ERROR (self, CORE, FAILED,
        ("Failed to map input buffer"), (NULL));
    return GST_FLOW_ERROR;
  }
open_error:
  {
    opj_destroy_decompress (dec);
    gst_buffer_unmap (frame->input_buffer, &map);
    gst_video_codec_frame_unref (frame);

    GST_ELEMENT_ERROR (self, LIBRARY, INIT,
        ("Failed to open OpenJPEG stream"), (NULL));
    return GST_FLOW_ERROR;
  }
decode_error:
  {
    opj_cio_close (io);
    opj_destroy_decompress (dec);
    gst_buffer_unmap (frame->input_buffer, &map);
    gst_video_codec_frame_unref (frame);

    GST_VIDEO_DECODER_ERROR (self, 1, STREAM, DECODE,
        ("Failed to decode OpenJPEG stream"), (NULL), ret);
    return ret;
  }
negotiate_error:
  {
    opj_image_destroy (image);
    opj_cio_close (io);
    opj_destroy_decompress (dec);
    gst_video_codec_frame_unref (frame);

    GST_ELEMENT_ERROR (self, CORE, NEGOTIATION,
        ("Failed to negotiate"), (NULL));
    return ret;
  }
allocate_error:
  {
    opj_image_destroy (image);
    opj_cio_close (io);
    opj_destroy_decompress (dec);
    gst_video_codec_frame_unref (frame);

    GST_ELEMENT_ERROR (self, CORE, FAILED,
        ("Failed to allocate output buffer"), (NULL));
    return ret;
  }
map_write_error:
  {
    opj_image_destroy (image);
    opj_cio_close (io);
    opj_destroy_decompress (dec);
    gst_video_codec_frame_unref (frame);

    GST_ELEMENT_ERROR (self, CORE, FAILED,
        ("Failed to map output buffer"), (NULL));
    return GST_FLOW_ERROR;
  }
}
Ejemplo n.º 11
0
static GstFlowReturn
gst_pnmdec_handle_frame (GstVideoDecoder * decoder, GstVideoCodecFrame * frame)
{
  GstPnmdec *s = (GstPnmdec *) decoder;
  GstMapInfo imap, omap;
  guint i_rowstride;
  guint o_rowstride;
  GstFlowReturn r = GST_FLOW_OK;
  gint bytes, i, total_bytes = 0;

  r = gst_video_decoder_allocate_output_frame (decoder, frame);
  if (r != GST_FLOW_OK) {
    gst_video_decoder_drop_frame (GST_VIDEO_DECODER (s), frame);
    goto out;
  }

  if (s->mngr.info.encoding == GST_PNM_ENCODING_ASCII) {
    /* In case of ASCII parsed data is stored in buf, so input needs to be
       taken from here for frame processing */
    gst_buffer_map (s->buf, &imap, GST_MAP_READ);
  } else {
    gst_buffer_map (frame->input_buffer, &imap, GST_MAP_READ);
  }
  gst_buffer_map (frame->output_buffer, &omap, GST_MAP_WRITE);

  gst_buffer_copy_into (frame->output_buffer, frame->input_buffer,
      GST_BUFFER_COPY_METADATA, 0, 0);

  if (s->mngr.info.type == GST_PNM_TYPE_BITMAP) {
    bytes = (s->mngr.info.width * s->mngr.info.height + 7) / 8;
    for (i = 0; i < bytes; i++) {
      omap.data[i * 8] = (imap.data[i] & 0x80) ? 0 : 255;
      omap.data[i * 8 + 1] = (imap.data[i] & 0x40) ? 0 : 255;
      omap.data[i * 8 + 2] = (imap.data[i] & 0x20) ? 0 : 255;
      omap.data[i * 8 + 3] = (imap.data[i] & 0x10) ? 0 : 255;
      omap.data[i * 8 + 4] = (imap.data[i] & 0x08) ? 0 : 255;
      omap.data[i * 8 + 5] = (imap.data[i] & 0x04) ? 0 : 255;
      omap.data[i * 8 + 6] = (imap.data[i] & 0x02) ? 0 : 255;
      omap.data[i * 8 + 7] = (imap.data[i] & 0x01) ? 0 : 255;
    }
    total_bytes = bytes * 8;
  } else
    /* Need to convert from PNM rowstride to GStreamer rowstride */
  if (s->mngr.info.width % 4 != 0) {
    if (s->mngr.info.type == GST_PNM_TYPE_PIXMAP) {
      i_rowstride = 3 * s->mngr.info.width;
      o_rowstride = GST_ROUND_UP_4 (i_rowstride);
    } else {
      i_rowstride = s->mngr.info.width;
      o_rowstride = GST_ROUND_UP_4 (i_rowstride);
    }

    for (i = 0; i < s->mngr.info.height; i++)
      memcpy (omap.data + i * o_rowstride, imap.data + i * i_rowstride,
          i_rowstride);
    total_bytes = o_rowstride * s->mngr.info.height;
  } else {
    memcpy (omap.data, imap.data, s->size);
    total_bytes = s->size;
  }

  if (s->mngr.info.type != GST_PNM_TYPE_BITMAP) {
    /* Convert the pixels from 0 - max range to 0 - 255 range */
    if (s->mngr.info.max < 255) {
      gint max = s->mngr.info.max;
      for (i = 0; i < total_bytes; i++) {
        if (omap.data[i] <= max) {
          omap.data[i] = 255 * omap.data[i] / max;
        } else {
          /* This is an error case, wherein value in the data stream is
             more than max. Clamp such values to 255 */
          omap.data[i] = 255;
        }
      }
    }
  }

  if (s->mngr.info.encoding == GST_PNM_ENCODING_ASCII) {
    gst_buffer_unmap (s->buf, &imap);
  } else {
    gst_buffer_unmap (frame->input_buffer, &imap);
  }
  gst_buffer_unmap (frame->output_buffer, &omap);

  r = gst_video_decoder_finish_frame (GST_VIDEO_DECODER (s), frame);

out:
  gst_pnmdec_flush (s);

  return r;
}
Ejemplo n.º 12
0
static void
gst_amc_video_dec_loop (GstAmcVideoDec * self)
{
  GstVideoCodecFrame *frame;
  GstFlowReturn flow_ret = GST_FLOW_OK;
  GstClockTimeDiff deadline;
  gboolean is_eos;
  GstAmcBuffer *buf;
  GstAmcBufferInfo buffer_info;
  gint idx;
  GError *err = NULL;

  GST_VIDEO_DECODER_STREAM_LOCK (self);

retry:
  /*if (self->input_state_changed) {
     idx = INFO_OUTPUT_FORMAT_CHANGED;
     } else { */
  GST_DEBUG_OBJECT (self, "Waiting for available output buffer");
  GST_VIDEO_DECODER_STREAM_UNLOCK (self);
  /* Wait at most 100ms here, some codecs don't fail dequeueing if
   * the codec is flushing, causing deadlocks during shutdown */
  idx =
      gst_amc_codec_dequeue_output_buffer (self->codec, &buffer_info, 100000,
      &err);
  GST_VIDEO_DECODER_STREAM_LOCK (self);
  /*} */

  if (idx < 0) {
    if (self->flushing) {
      g_clear_error (&err);
      goto flushing;
    }

    switch (idx) {
      case INFO_OUTPUT_BUFFERS_CHANGED:
        /* Handled internally */
        g_assert_not_reached ();
        break;
      case INFO_OUTPUT_FORMAT_CHANGED:{
        GstAmcFormat *format;
        gchar *format_string;

        GST_DEBUG_OBJECT (self, "Output format has changed");

        format = gst_amc_codec_get_output_format (self->codec, &err);
        if (!format)
          goto format_error;

        format_string = gst_amc_format_to_string (format, &err);
        if (!format) {
          gst_amc_format_free (format);
          goto format_error;
        }
        GST_DEBUG_OBJECT (self, "Got new output format: %s", format_string);
        g_free (format_string);

        if (!gst_amc_video_dec_set_src_caps (self, format)) {
          gst_amc_format_free (format);
          goto format_error;
        }
        gst_amc_format_free (format);

        goto retry;
      }
      case INFO_TRY_AGAIN_LATER:
        GST_DEBUG_OBJECT (self, "Dequeueing output buffer timed out");
        goto retry;
      case G_MININT:
        GST_ERROR_OBJECT (self, "Failure dequeueing output buffer");
        goto dequeue_error;
      default:
        g_assert_not_reached ();
        break;
    }

    goto retry;
  }

  GST_DEBUG_OBJECT (self,
      "Got output buffer at index %d: offset %d size %d time %" G_GINT64_FORMAT
      " flags 0x%08x", idx, buffer_info.offset, buffer_info.size,
      buffer_info.presentation_time_us, buffer_info.flags);

  frame =
      _find_nearest_frame (self,
      gst_util_uint64_scale (buffer_info.presentation_time_us, GST_USECOND, 1));

  is_eos = ! !(buffer_info.flags & BUFFER_FLAG_END_OF_STREAM);

  buf = gst_amc_codec_get_output_buffer (self->codec, idx, &err);
  if (!buf)
    goto failed_to_get_output_buffer;

  if (frame
      && (deadline =
          gst_video_decoder_get_max_decode_time (GST_VIDEO_DECODER (self),
              frame)) < 0) {
    GST_WARNING_OBJECT (self,
        "Frame is too late, dropping (deadline %" GST_TIME_FORMAT ")",
        GST_TIME_ARGS (-deadline));
    flow_ret = gst_video_decoder_drop_frame (GST_VIDEO_DECODER (self), frame);
  } else if (!frame && buffer_info.size > 0) {
    GstBuffer *outbuf;

    /* This sometimes happens at EOS or if the input is not properly framed,
     * let's handle it gracefully by allocating a new buffer for the current
     * caps and filling it
     */
    GST_ERROR_OBJECT (self, "No corresponding frame found");

    outbuf =
        gst_video_decoder_allocate_output_buffer (GST_VIDEO_DECODER (self));

    if (!gst_amc_video_dec_fill_buffer (self, buf, &buffer_info, outbuf)) {
      gst_buffer_unref (outbuf);
      if (!gst_amc_codec_release_output_buffer (self->codec, idx, &err))
        GST_ERROR_OBJECT (self, "Failed to release output buffer index %d",
            idx);
      if (err && !self->flushing)
        GST_ELEMENT_WARNING_FROM_ERROR (self, err);
      g_clear_error (&err);
      gst_amc_buffer_free (buf);
      buf = NULL;
      goto invalid_buffer;
    }

    GST_BUFFER_PTS (outbuf) =
        gst_util_uint64_scale (buffer_info.presentation_time_us, GST_USECOND,
        1);
    flow_ret = gst_pad_push (GST_VIDEO_DECODER_SRC_PAD (self), outbuf);
  } else if (buffer_info.size > 0) {
    if ((flow_ret = gst_video_decoder_allocate_output_frame (GST_VIDEO_DECODER
                (self), frame)) != GST_FLOW_OK) {
      GST_ERROR_OBJECT (self, "Failed to allocate buffer");
      if (!gst_amc_codec_release_output_buffer (self->codec, idx, &err))
        GST_ERROR_OBJECT (self, "Failed to release output buffer index %d",
            idx);
      if (err && !self->flushing)
        GST_ELEMENT_WARNING_FROM_ERROR (self, err);
      g_clear_error (&err);
      gst_amc_buffer_free (buf);
      buf = NULL;
      goto flow_error;
    }

    if (!gst_amc_video_dec_fill_buffer (self, buf, &buffer_info,
            frame->output_buffer)) {
      gst_buffer_replace (&frame->output_buffer, NULL);
      gst_video_decoder_drop_frame (GST_VIDEO_DECODER (self), frame);
      if (!gst_amc_codec_release_output_buffer (self->codec, idx, &err))
        GST_ERROR_OBJECT (self, "Failed to release output buffer index %d",
            idx);
      if (err && !self->flushing)
        GST_ELEMENT_WARNING_FROM_ERROR (self, err);
      g_clear_error (&err);
      gst_amc_buffer_free (buf);
      buf = NULL;
      goto invalid_buffer;
    }

    flow_ret = gst_video_decoder_finish_frame (GST_VIDEO_DECODER (self), frame);
  } else if (frame != NULL) {
    flow_ret = gst_video_decoder_drop_frame (GST_VIDEO_DECODER (self), frame);
  }

  gst_amc_buffer_free (buf);
  buf = NULL;

  if (!gst_amc_codec_release_output_buffer (self->codec, idx, &err)) {
    if (self->flushing) {
      g_clear_error (&err);
      goto flushing;
    }
    goto failed_release;
  }

  if (is_eos || flow_ret == GST_FLOW_EOS) {
    GST_VIDEO_DECODER_STREAM_UNLOCK (self);
    g_mutex_lock (&self->drain_lock);
    if (self->draining) {
      GST_DEBUG_OBJECT (self, "Drained");
      self->draining = FALSE;
      g_cond_broadcast (&self->drain_cond);
    } else if (flow_ret == GST_FLOW_OK) {
      GST_DEBUG_OBJECT (self, "Component signalled EOS");
      flow_ret = GST_FLOW_EOS;
    }
    g_mutex_unlock (&self->drain_lock);
    GST_VIDEO_DECODER_STREAM_LOCK (self);
  } else {
    GST_DEBUG_OBJECT (self, "Finished frame: %s", gst_flow_get_name (flow_ret));
  }

  self->downstream_flow_ret = flow_ret;

  if (flow_ret != GST_FLOW_OK)
    goto flow_error;

  GST_VIDEO_DECODER_STREAM_UNLOCK (self);

  return;

dequeue_error:
  {
    GST_ELEMENT_ERROR_FROM_ERROR (self, err);
    gst_pad_push_event (GST_VIDEO_DECODER_SRC_PAD (self), gst_event_new_eos ());
    gst_pad_pause_task (GST_VIDEO_DECODER_SRC_PAD (self));
    self->downstream_flow_ret = GST_FLOW_ERROR;
    GST_VIDEO_DECODER_STREAM_UNLOCK (self);
    g_mutex_lock (&self->drain_lock);
    self->draining = FALSE;
    g_cond_broadcast (&self->drain_cond);
    g_mutex_unlock (&self->drain_lock);
    return;
  }

format_error:
  {
    if (err)
      GST_ELEMENT_ERROR_FROM_ERROR (self, err);
    else
      GST_ELEMENT_ERROR (self, LIBRARY, FAILED, (NULL),
          ("Failed to handle format"));
    gst_pad_push_event (GST_VIDEO_DECODER_SRC_PAD (self), gst_event_new_eos ());
    gst_pad_pause_task (GST_VIDEO_DECODER_SRC_PAD (self));
    self->downstream_flow_ret = GST_FLOW_ERROR;
    GST_VIDEO_DECODER_STREAM_UNLOCK (self);
    g_mutex_lock (&self->drain_lock);
    self->draining = FALSE;
    g_cond_broadcast (&self->drain_cond);
    g_mutex_unlock (&self->drain_lock);
    return;
  }
failed_release:
  {
    GST_VIDEO_DECODER_ERROR_FROM_ERROR (self, err);
    gst_pad_push_event (GST_VIDEO_DECODER_SRC_PAD (self), gst_event_new_eos ());
    gst_pad_pause_task (GST_VIDEO_DECODER_SRC_PAD (self));
    self->downstream_flow_ret = GST_FLOW_ERROR;
    GST_VIDEO_DECODER_STREAM_UNLOCK (self);
    g_mutex_lock (&self->drain_lock);
    self->draining = FALSE;
    g_cond_broadcast (&self->drain_cond);
    g_mutex_unlock (&self->drain_lock);
    return;
  }
flushing:
  {
    GST_DEBUG_OBJECT (self, "Flushing -- stopping task");
    gst_pad_pause_task (GST_VIDEO_DECODER_SRC_PAD (self));
    self->downstream_flow_ret = GST_FLOW_FLUSHING;
    GST_VIDEO_DECODER_STREAM_UNLOCK (self);
    return;
  }

flow_error:
  {
    if (flow_ret == GST_FLOW_EOS) {
      GST_DEBUG_OBJECT (self, "EOS");
      gst_pad_push_event (GST_VIDEO_DECODER_SRC_PAD (self),
          gst_event_new_eos ());
      gst_pad_pause_task (GST_VIDEO_DECODER_SRC_PAD (self));
    } else if (flow_ret < GST_FLOW_EOS) {
      GST_ELEMENT_ERROR (self, STREAM, FAILED,
          ("Internal data stream error."), ("stream stopped, reason %s",
              gst_flow_get_name (flow_ret)));
      gst_pad_push_event (GST_VIDEO_DECODER_SRC_PAD (self),
          gst_event_new_eos ());
      gst_pad_pause_task (GST_VIDEO_DECODER_SRC_PAD (self));
    } else if (flow_ret == GST_FLOW_FLUSHING) {
      GST_DEBUG_OBJECT (self, "Flushing -- stopping task");
      gst_pad_pause_task (GST_VIDEO_DECODER_SRC_PAD (self));
    }
    GST_VIDEO_DECODER_STREAM_UNLOCK (self);
    g_mutex_lock (&self->drain_lock);
    self->draining = FALSE;
    g_cond_broadcast (&self->drain_cond);
    g_mutex_unlock (&self->drain_lock);
    return;
  }

failed_to_get_output_buffer:
  {
    GST_VIDEO_DECODER_ERROR_FROM_ERROR (self, err);
    gst_pad_push_event (GST_VIDEO_DECODER_SRC_PAD (self), gst_event_new_eos ());
    gst_pad_pause_task (GST_VIDEO_DECODER_SRC_PAD (self));
    self->downstream_flow_ret = GST_FLOW_ERROR;
    GST_VIDEO_DECODER_STREAM_UNLOCK (self);
    g_mutex_lock (&self->drain_lock);
    self->draining = FALSE;
    g_cond_broadcast (&self->drain_cond);
    g_mutex_unlock (&self->drain_lock);
    return;
  }

invalid_buffer:
  {
    GST_ELEMENT_ERROR (self, LIBRARY, SETTINGS, (NULL),
        ("Invalid sized input buffer"));
    gst_pad_push_event (GST_VIDEO_DECODER_SRC_PAD (self), gst_event_new_eos ());
    gst_pad_pause_task (GST_VIDEO_DECODER_SRC_PAD (self));
    self->downstream_flow_ret = GST_FLOW_NOT_NEGOTIATED;
    GST_VIDEO_DECODER_STREAM_UNLOCK (self);
    g_mutex_lock (&self->drain_lock);
    self->draining = FALSE;
    g_cond_broadcast (&self->drain_cond);
    g_mutex_unlock (&self->drain_lock);
    return;
  }
}
static GstFlowReturn gst_openh264dec_handle_frame(GstVideoDecoder *decoder, GstVideoCodecFrame *frame)
{
    GstOpenh264Dec *openh264dec = GST_OPENH264DEC(decoder);
    GstMapInfo map_info;
    GstVideoCodecState *state;
    SBufferInfo dst_buf_info;
    DECODING_STATE ret;
    guint8 *yuvdata[3];
    GstFlowReturn flow_status;
    GstVideoFrame video_frame;
    guint actual_width, actual_height;
    guint i;
    guint8 *p;
    guint row_stride, component_width, component_height, src_width, row;

    if (frame) {
        if (!gst_buffer_map(frame->input_buffer, &map_info, GST_MAP_READ)) {
            GST_ERROR_OBJECT(openh264dec, "Cannot map input buffer!");
            return GST_FLOW_ERROR;
        }

        GST_LOG_OBJECT(openh264dec, "handle frame, %d", map_info.size > 4 ? map_info.data[4] & 0x1f : -1);

        memset (&dst_buf_info, 0, sizeof (SBufferInfo));
        ret = openh264dec->priv->decoder->DecodeFrame2(map_info.data, map_info.size, yuvdata, &dst_buf_info);

        if (ret == dsNoParamSets) {
            GST_DEBUG_OBJECT(openh264dec, "Requesting a key unit");
            gst_pad_push_event(GST_VIDEO_DECODER_SINK_PAD(decoder),
                gst_video_event_new_upstream_force_key_unit(GST_CLOCK_TIME_NONE, FALSE, 0));
        }

        if (ret != dsErrorFree && ret != dsNoParamSets) {
            GST_DEBUG_OBJECT(openh264dec, "Requesting a key unit");
            gst_pad_push_event(GST_VIDEO_DECODER_SINK_PAD(decoder),
                               gst_video_event_new_upstream_force_key_unit(GST_CLOCK_TIME_NONE, FALSE, 0));
            GST_LOG_OBJECT(openh264dec, "error decoding nal, return code: %d", ret);
        }

        gst_buffer_unmap(frame->input_buffer, &map_info);
        gst_video_codec_frame_unref (frame);
        frame = NULL;
    } else {
        memset (&dst_buf_info, 0, sizeof (SBufferInfo));
        ret = openh264dec->priv->decoder->DecodeFrame2(NULL, 0, yuvdata, &dst_buf_info);
        if (ret != dsErrorFree)
            return GST_FLOW_EOS;
    }

    /* FIXME: openh264 has no way for us to get a connection
     * between the input and output frames, we just have to
     * guess based on the input. Fortunately openh264 can
     * only do baseline profile. */
    frame = gst_video_decoder_get_oldest_frame (decoder);
    if (!frame) {
      /* Can only happen in finish() */
      return GST_FLOW_EOS;
    }

    /* No output available yet */
    if (dst_buf_info.iBufferStatus != 1) {
        return (frame ? GST_FLOW_OK : GST_FLOW_EOS);
    }

    actual_width  = dst_buf_info.UsrData.sSystemBuffer.iWidth;
    actual_height = dst_buf_info.UsrData.sSystemBuffer.iHeight;

    if (!gst_pad_has_current_caps (GST_VIDEO_DECODER_SRC_PAD (openh264dec)) || actual_width != openh264dec->priv->width || actual_height != openh264dec->priv->height) {
        state = gst_video_decoder_set_output_state(decoder,
            GST_VIDEO_FORMAT_I420,
            actual_width,
            actual_height,
            openh264dec->priv->input_state);
        openh264dec->priv->width = actual_width;
        openh264dec->priv->height = actual_height;

        if (!gst_video_decoder_negotiate(decoder)) {
            GST_ERROR_OBJECT(openh264dec, "Failed to negotiate with downstream elements");
            return GST_FLOW_NOT_NEGOTIATED;
        }
    } else {
        state = gst_video_decoder_get_output_state(decoder);
    }

    flow_status = gst_video_decoder_allocate_output_frame(decoder, frame);
    if (flow_status != GST_FLOW_OK) {
        gst_video_codec_state_unref (state);
        return flow_status;
    }

    if (!gst_video_frame_map(&video_frame, &state->info, frame->output_buffer, GST_MAP_WRITE)) {
        GST_ERROR_OBJECT(openh264dec, "Cannot map output buffer!");
        gst_video_codec_state_unref (state);
        return GST_FLOW_ERROR;
    }

    for (i = 0; i < 3; i++) {
        p = GST_VIDEO_FRAME_COMP_DATA(&video_frame, i);
        row_stride = GST_VIDEO_FRAME_COMP_STRIDE(&video_frame, i);
        component_width = GST_VIDEO_FRAME_COMP_WIDTH(&video_frame, i);
        component_height = GST_VIDEO_FRAME_COMP_HEIGHT(&video_frame, i);
        src_width = i < 1 ? dst_buf_info.UsrData.sSystemBuffer.iStride[0] : dst_buf_info.UsrData.sSystemBuffer.iStride[1];
        for (row = 0; row < component_height; row++) {
            memcpy(p, yuvdata[i], component_width);
            p += row_stride;
            yuvdata[i] += src_width;
        }
    }
    gst_video_codec_state_unref (state);
    gst_video_frame_unmap(&video_frame);

    return gst_video_decoder_finish_frame(decoder, frame);
}
Ejemplo n.º 14
0
static GstFlowReturn
gst_vp9_dec_handle_frame (GstVideoDecoder * decoder, GstVideoCodecFrame * frame)
{
  GstVP9Dec *dec;
  GstFlowReturn ret = GST_FLOW_OK;
  vpx_codec_err_t status;
  vpx_codec_iter_t iter = NULL;
  vpx_image_t *img;
  long decoder_deadline = 0;
  GstClockTimeDiff deadline;
  GstMapInfo minfo;

  GST_DEBUG_OBJECT (decoder, "handle_frame");

  dec = GST_VP9_DEC (decoder);

  if (!dec->decoder_inited) {
    ret = open_codec (dec, frame);
    if (ret == GST_FLOW_CUSTOM_SUCCESS_1)
      return GST_FLOW_OK;
    else if (ret != GST_FLOW_OK)
      return ret;
  }

  deadline = gst_video_decoder_get_max_decode_time (decoder, frame);
  if (deadline < 0) {
    decoder_deadline = 1;
  } else if (deadline == G_MAXINT64) {
    decoder_deadline = 0;
  } else {
    decoder_deadline = MAX (1, deadline / GST_MSECOND);
  }

  if (!gst_buffer_map (frame->input_buffer, &minfo, GST_MAP_READ)) {
    GST_ERROR_OBJECT (dec, "Failed to map input buffer");
    return GST_FLOW_ERROR;
  }

  status = vpx_codec_decode (&dec->decoder,
      minfo.data, minfo.size, NULL, decoder_deadline);

  gst_buffer_unmap (frame->input_buffer, &minfo);

  if (status) {
    GST_VIDEO_DECODER_ERROR (decoder, 1, LIBRARY, ENCODE,
        ("Failed to decode frame"), ("%s", gst_vpx_error_name (status)), ret);
    return ret;
  }

  img = vpx_codec_get_frame (&dec->decoder, &iter);
  if (img) {
    GstVideoFormat fmt;

    switch (img->fmt) {
      case VPX_IMG_FMT_I420:
        fmt = GST_VIDEO_FORMAT_I420;
        break;
      case VPX_IMG_FMT_YV12:
        fmt = GST_VIDEO_FORMAT_YV12;
        break;
      case VPX_IMG_FMT_I422:
        fmt = GST_VIDEO_FORMAT_Y42B;
        break;
      case VPX_IMG_FMT_I444:
        fmt = GST_VIDEO_FORMAT_Y444;
        break;
      default:
        vpx_img_free (img);
        GST_ELEMENT_ERROR (decoder, LIBRARY, ENCODE,
            ("Failed to decode frame"), ("Unsupported color format %d",
                img->fmt));
        return GST_FLOW_ERROR;
        break;
    }

    if (!dec->output_state || dec->output_state->info.finfo->format != fmt ||
        dec->output_state->info.width != img->d_w ||
        dec->output_state->info.height != img->d_h) {
      gboolean send_tags = !dec->output_state;

      if (dec->output_state)
        gst_video_codec_state_unref (dec->output_state);

      dec->output_state =
          gst_video_decoder_set_output_state (GST_VIDEO_DECODER (dec),
          fmt, img->d_w, img->d_h, dec->input_state);
      gst_video_decoder_negotiate (GST_VIDEO_DECODER (dec));

      if (send_tags)
        gst_vp9_dec_send_tags (dec);
    }

    if (deadline < 0) {
      GST_LOG_OBJECT (dec, "Skipping late frame (%f s past deadline)",
          (double) -deadline / GST_SECOND);
      gst_video_decoder_drop_frame (decoder, frame);
    } else {
      ret = gst_video_decoder_allocate_output_frame (decoder, frame);

      if (ret == GST_FLOW_OK) {
        gst_vp9_dec_image_to_buffer (dec, img, frame->output_buffer);
        ret = gst_video_decoder_finish_frame (decoder, frame);
      } else {
        gst_video_decoder_drop_frame (decoder, frame);
      }
    }

    vpx_img_free (img);

    while ((img = vpx_codec_get_frame (&dec->decoder, &iter))) {
      GST_WARNING_OBJECT (decoder, "Multiple decoded frames... dropping");
      vpx_img_free (img);
    }
  } else {
    /* Invisible frame */
    GST_VIDEO_CODEC_FRAME_SET_DECODE_ONLY (frame);
    gst_video_decoder_finish_frame (decoder, frame);
  }

  return ret;
}