Ejemplo n.º 1
0
static GstFlowReturn _gst_libde265_image_available(VIDEO_DECODER_BASE * parse,
    const struct de265_image *image)
{
    GstLibde265Dec *dec = GST_LIBDE265_DEC (parse);
    int width = de265_get_image_width(image, 0);
    int height = de265_get_image_height(image, 0);

    if (G_UNLIKELY(width != dec->width || height != dec->height)) {
#if GST_CHECK_VERSION(1,0,0)
        GstVideoCodecState *state = gst_video_decoder_set_output_state (parse, GST_VIDEO_FORMAT_I420, width, height, dec->input_state);
        g_assert (state != NULL);
        if (dec->fps_n > 0) {
            state->info.fps_n = dec->fps_n;
            state->info.fps_d = dec->fps_d;
        }
        gst_video_decoder_negotiate(parse);
#else
        GstVideoState *state = gst_base_video_decoder_get_state (parse);
        g_assert (state != NULL);
        state->format = GST_VIDEO_FORMAT_I420;
        state->width = width;
        state->height = height;
        if (dec->fps_n > 0) {
            state->fps_n = dec->fps_n;
            state->fps_d = dec->fps_d;
        }
        gst_base_video_decoder_set_src_caps (parse);
#endif
        GST_DEBUG ("Frame dimensions are %d x %d\n", width, height);
        dec->width = width;
        dec->height = height;
    }
    
    return HAVE_FRAME (parse);
}
Ejemplo n.º 2
0
static GstFlowReturn
_gst_libde265_image_available (GstVideoDecoder * decoder, int width, int height)
{
  GstLibde265Dec *dec = GST_LIBDE265_DEC (decoder);

  if (G_UNLIKELY (dec->output_state == NULL
          || width != dec->output_state->info.width
          || height != dec->output_state->info.height)) {
    GstVideoCodecState *state =
        gst_video_decoder_set_output_state (decoder, GST_VIDEO_FORMAT_I420,
        width, height, dec->input_state);
    if (state == NULL) {
      GST_ERROR_OBJECT (dec, "Failed to set output state");
      return GST_FLOW_ERROR;
    }
    if (!gst_video_decoder_negotiate (decoder)) {
      GST_ERROR_OBJECT (dec, "Failed to negotiate format");
      gst_video_codec_state_unref (state);
      return GST_FLOW_ERROR;
    }
    if (dec->output_state != NULL) {
      gst_video_codec_state_unref (dec->output_state);
    }
    dec->output_state = state;
    GST_DEBUG_OBJECT (dec, "Frame dimensions are %d x %d", width, height);
  }

  return GST_FLOW_OK;
}
Ejemplo n.º 3
0
static void
gst_libde265_dec_finalize (GObject * object)
{
  GstLibde265Dec *dec = GST_LIBDE265_DEC (object);

  _gst_libde265_dec_free_decoder (dec);

  G_OBJECT_CLASS (parent_class)->finalize (object);
}
Ejemplo n.º 4
0
static gboolean
gst_libde265_dec_stop (VIDEO_DECODER_BASE * parse)
{
    GstLibde265Dec *dec = GST_LIBDE265_DEC (parse);
    
    _gst_libde265_dec_free_decoder(dec);
    
    return TRUE;
}
Ejemplo n.º 5
0
static gboolean
gst_libde265_dec_stop (GstVideoDecoder * decoder)
{
  GstLibde265Dec *dec = GST_LIBDE265_DEC (decoder);

  _gst_libde265_dec_free_decoder (dec);

  return TRUE;
}
Ejemplo n.º 6
0
static gboolean gst_libde265_dec_reset (VIDEO_DECODER_BASE * parse)
#endif
{
    GstLibde265Dec *dec = GST_LIBDE265_DEC (parse);

    de265_reset(dec->ctx);
    dec->buffer_full = 0;

    return TRUE;
}
Ejemplo n.º 7
0
static GstFlowReturn gst_libde265_dec_handle_frame (VIDEO_DECODER_BASE * parse,
    VIDEO_FRAME * frame)
{
    GstFlowReturn ret = GST_FLOW_OK;
    GstLibde265Dec *dec = GST_LIBDE265_DEC (parse);
    
    const struct de265_image *img = de265_get_next_picture(dec->ctx);
    if (img == NULL) {
        // need more data
        return GST_FLOW_OK;
    }
    
    ret = ALLOC_OUTPUT_FRAME(parse, frame);
    if (ret != GST_FLOW_OK) {
        return ret;
    }
    
    uint8_t *dest;
#if GST_CHECK_VERSION(1,0,0)
    GstMapInfo info;
    if (!gst_buffer_map(frame->output_buffer, &info, GST_MAP_WRITE)) {
        return GST_FLOW_ERROR;
    }
    
    dest = info.data;
#else
    dest = GST_BUFFER_DATA(frame->src_buffer);
#endif
    
    int plane;
    for (plane=0; plane<3; plane++) {
        int stride;
        int width = de265_get_image_width(img, plane);
        int height = de265_get_image_height(img, plane);
        const uint8_t *src = de265_get_image_plane(img, plane, &stride);
        if (stride == width) {
            memcpy(dest, src, height * stride);
            dest += (height * stride);
        } else {
            while (height--) {
                memcpy(dest, src, width);
                src += stride;
                dest += width;
            }
        }
    }
#if GST_CHECK_VERSION(1,0,0)
    gst_buffer_unmap(frame->output_buffer, &info);
    frame->pts = (GstClockTime) de265_get_image_PTS(img);
#endif
    
    return FINISH_FRAME (parse, frame);
}
Ejemplo n.º 8
0
static void
gst_libde265_dec_get_property (GObject * object, guint prop_id,
    GValue * value, GParamSpec * pspec)
{
  GstLibde265Dec *dec = GST_LIBDE265_DEC (object);

  switch (prop_id) {
    case PROP_MAX_THREADS:
      g_value_set_int (value, dec->max_threads);
      break;
    default:
      break;
  }
}
Ejemplo n.º 9
0
static gboolean gst_libde265_dec_set_format (VIDEO_DECODER_BASE * parse,
    GstVideoCodecState * state)
{
    GstLibde265Dec *dec = GST_LIBDE265_DEC (parse);

    if (dec->input_state != NULL) {
        gst_video_codec_state_unref(dec->input_state);
    }
    dec->input_state = state;
    if (state != NULL) {
        gst_video_codec_state_ref(state);
    }
    
    return TRUE;
}
Ejemplo n.º 10
0
static GstFlowReturn
gst_libde265_dec_finish (GstVideoDecoder * decoder)
{
  GstLibde265Dec *dec = GST_LIBDE265_DEC (decoder);
  de265_error err;
  const struct de265_image *img;
  int more;
  GstFlowReturn result;

  err = de265_flush_data (dec->ctx);
  if (!de265_isOK (err)) {
    GST_ELEMENT_ERROR (decoder, STREAM, DECODE,
        ("Failed to flush decoder: %s (code=%d)",
            de265_get_error_text (err), err), (NULL));
    return GST_FLOW_ERROR;
  }

  do {
    err = de265_decode (dec->ctx, &more);
    switch (err) {
      case DE265_OK:
      case DE265_ERROR_IMAGE_BUFFER_FULL:
        img = de265_get_next_picture (dec->ctx);
        if (img != NULL) {
          result = _gst_libde265_return_image (decoder, NULL, img);
          if (result != GST_FLOW_OK) {
            return result;
          }
        }
        break;

      case DE265_ERROR_WAITING_FOR_INPUT_DATA:
        /* not really an error */
        more = 0;
        break;

      default:
        if (!de265_isOK (err)) {
          GST_ELEMENT_ERROR (decoder, STREAM, DECODE,
              ("Failed to decode codec data: %s (code=%d)",
                  de265_get_error_text (err), err), (NULL));
          return FALSE;
        }
    }
  } while (more);

  return GST_FLOW_OK;
}
Ejemplo n.º 11
0
static void gst_libde265_dec_get_property (GObject * object, guint prop_id,
    GValue * value, GParamSpec * pspec)
{
    GstLibde265Dec *dec = GST_LIBDE265_DEC (object);

    switch (prop_id) {
    case PROP_MODE:
        g_value_set_enum (value, dec->mode);
        break;
    case PROP_FRAMERATE:
        gst_value_set_fraction(value, dec->fps_n, dec->fps_d);
        break;
    default:
        break;
    }
}
Ejemplo n.º 12
0
static gboolean
gst_libde265_dec_flush (GstVideoDecoder * decoder)
{
  GstLibde265Dec *dec = GST_LIBDE265_DEC (decoder);

  de265_reset (dec->ctx);
  dec->buffer_full = 0;
  if (dec->codec_data != NULL
      && dec->format == GST_TYPE_LIBDE265_FORMAT_BYTESTREAM) {
    int more;
    de265_error err =
        de265_push_data (dec->ctx, dec->codec_data, dec->codec_data_size, 0,
        NULL);
    if (!de265_isOK (err)) {
      GST_ELEMENT_ERROR (decoder, STREAM, DECODE,
          ("Failed to push codec data: %s (code=%d)",
              de265_get_error_text (err), err), (NULL));
      return FALSE;
    }
    de265_push_end_of_NAL (dec->ctx);
    do {
      err = de265_decode (dec->ctx, &more);
      switch (err) {
        case DE265_OK:
          break;

        case DE265_ERROR_IMAGE_BUFFER_FULL:
        case DE265_ERROR_WAITING_FOR_INPUT_DATA:
          /* not really an error */
          more = 0;
          break;

        default:
          if (!de265_isOK (err)) {
            GST_ELEMENT_ERROR (decoder, STREAM, DECODE,
                ("Failed to decode codec data: %s (code=%d)",
                    de265_get_error_text (err), err), (NULL));
            return FALSE;
          }
      }
    } while (more);
  }

  return TRUE;
}
Ejemplo n.º 13
0
static void
gst_libde265_dec_set_property (GObject * object, guint prop_id,
    const GValue * value, GParamSpec * pspec)
{
  GstLibde265Dec *dec = GST_LIBDE265_DEC (object);

  switch (prop_id) {
    case PROP_MAX_THREADS:
      dec->max_threads = g_value_get_int (value);
      if (dec->max_threads) {
        GST_DEBUG_OBJECT (dec, "Max. threads set to %d", dec->max_threads);
      } else {
        GST_DEBUG_OBJECT (dec, "Max. threads set to auto");
      }
      break;
    default:
      break;
  }
}
Ejemplo n.º 14
0
static void gst_libde265_dec_set_property (GObject * object, guint prop_id,
    const GValue * value, GParamSpec * pspec)
{
    GstLibde265Dec *dec = GST_LIBDE265_DEC (object);

    switch (prop_id) {
    case PROP_MODE:
        dec->mode = g_value_get_enum (value);
        GST_DEBUG ("Mode set to %d\n", dec->mode);
        break;
    case PROP_FRAMERATE:
        dec->fps_n = gst_value_get_fraction_numerator(value);
        dec->fps_d = gst_value_get_fraction_denominator(value);
        GST_DEBUG ("Framerate set to %d/%d\n", dec->fps_n, dec->fps_d);
        break;
    default:
        break;
    }
}
Ejemplo n.º 15
0
static gboolean
gst_libde265_dec_start (VIDEO_DECODER_BASE * parse)
{
    GstLibde265Dec *dec = GST_LIBDE265_DEC (parse);
    
    _gst_libde265_dec_free_decoder(dec);
    dec->ctx = de265_new_decoder();
    if (dec->ctx == NULL) {
        return FALSE;
    }
    
    int threads;
#if defined(_SC_NPROC_ONLN)
    threads = sysconf(_SC_NPROC_ONLN);
#elif defined(_SC_NPROCESSORS_ONLN)
    threads = sysconf(_SC_NPROCESSORS_ONLN);
#else
    #warning "Don't know how to get number of CPU cores, will use the default thread count"
    threads = DEFAULT_THREAD_COUNT;
#endif
    if (threads <= 0) {
        threads = DEFAULT_THREAD_COUNT;
    }

    // XXX: We start more threads than cores for now, as some threads
    // might get blocked while waiting for dependent data. Having more
    // threads increases decoding speed by about 10%
    threads *= 2;
    if (threads > 32) {
        // TODO: this limit should come from the libde265 headers
        threads = 32;
    }
    de265_start_worker_threads(dec->ctx, threads);
    GST_INFO ("Starting %d worker threads\n", threads);
    
    // NOTE: we explicitly disable hash checks for now
    de265_set_parameter_bool(dec->ctx, DE265_DECODER_PARAM_BOOL_SEI_CHECK_HASH, 0);
    return TRUE;
}
Ejemplo n.º 16
0
static gboolean
gst_libde265_dec_start (GstVideoDecoder * decoder)
{
  GstLibde265Dec *dec = GST_LIBDE265_DEC (decoder);
  int threads = dec->max_threads;
  struct de265_image_allocation allocation;

  _gst_libde265_dec_free_decoder (dec);
  dec->ctx = de265_new_decoder ();
  if (dec->ctx == NULL) {
    return FALSE;
  }
  if (threads == 0) {
    threads = g_get_num_processors ();

    /* NOTE: We start more threads than cores for now, as some threads
     * might get blocked while waiting for dependent data. Having more
     * threads increases decoding speed by about 10% */
    threads *= 2;
  }
  if (threads > 1) {
    if (threads > 32) {
      /* TODO: this limit should come from the libde265 headers */
      threads = 32;
    }
    de265_start_worker_threads (dec->ctx, threads);
  }
  GST_INFO_OBJECT (dec, "Using libde265 %s with %d worker threads",
      de265_get_version (), threads);

  allocation.get_buffer = gst_libde265_dec_get_buffer;
  allocation.release_buffer = gst_libde265_dec_release_buffer;
  de265_set_image_allocation_functions (dec->ctx, &allocation, decoder);
  /* NOTE: we explicitly disable hash checks for now */
  de265_set_parameter_bool (dec->ctx, DE265_DECODER_PARAM_BOOL_SEI_CHECK_HASH,
      0);
  return TRUE;
}
Ejemplo n.º 17
0
static GstFlowReturn
gst_libde265_dec_handle_frame (GstVideoDecoder * decoder,
    GstVideoCodecFrame * frame)
{
  GstLibde265Dec *dec = GST_LIBDE265_DEC (decoder);
  uint8_t *frame_data;
  uint8_t *end_data;
  const struct de265_image *img;
  de265_error ret = DE265_OK;
  int more = 0;
  GstClockTime pts;
  gsize size;
  GstMapInfo info;

  pts = frame->pts;
  if (pts == GST_CLOCK_TIME_NONE) {
    pts = frame->dts;
  }

  if (!gst_buffer_map (frame->input_buffer, &info, GST_MAP_READ)) {
    GST_ERROR_OBJECT (dec, "Failed to map input buffer");
    return GST_FLOW_ERROR;
  }

  frame_data = info.data;
  size = info.size;
  end_data = frame_data + size;

  if (size > 0) {
    if (dec->format == GST_TYPE_LIBDE265_FORMAT_PACKETIZED) {
      /* stream contains length fields and NALs */
      uint8_t *start_data = frame_data;
      while (start_data + dec->length_size <= end_data) {
        int nal_size = 0;
        int i;
        for (i = 0; i < dec->length_size; i++) {
          nal_size = (nal_size << 8) | start_data[i];
        }
        if (start_data + dec->length_size + nal_size > end_data) {
          GST_ELEMENT_ERROR (decoder, STREAM, DECODE,
              ("Overflow in input data, check stream format"), (NULL));
          goto error_input;
        }
        ret =
            de265_push_NAL (dec->ctx, start_data + dec->length_size, nal_size,
            (de265_PTS) pts,
            (void *) (uintptr_t) (frame->system_frame_number + 1));
        if (ret != DE265_OK) {
          GST_ELEMENT_ERROR (decoder, STREAM, DECODE,
              ("Error while pushing data: %s (code=%d)",
                  de265_get_error_text (ret), ret), (NULL));
          goto error_input;
        }
        start_data += dec->length_size + nal_size;
      }
    } else {
      ret =
          de265_push_data (dec->ctx, frame_data, size, (de265_PTS) pts,
          (void *) (uintptr_t) (frame->system_frame_number + 1));
      if (ret != DE265_OK) {
        GST_ELEMENT_ERROR (decoder, STREAM, DECODE,
            ("Error while pushing data: %s (code=%d)",
                de265_get_error_text (ret), ret), (NULL));
        goto error_input;
      }
    }
  } else {
    ret = de265_flush_data (dec->ctx);
    if (ret != DE265_OK) {
      GST_ELEMENT_ERROR (decoder, STREAM, DECODE,
          ("Error while flushing data: %s (code=%d)",
              de265_get_error_text (ret), ret), (NULL));
      goto error_input;
    }
  }
  gst_buffer_unmap (frame->input_buffer, &info);

  /* decode as much as possible */
  do {
    ret = de265_decode (dec->ctx, &more);
  } while (more && ret == DE265_OK);

  switch (ret) {
    case DE265_OK:
    case DE265_ERROR_WAITING_FOR_INPUT_DATA:
      break;

    case DE265_ERROR_IMAGE_BUFFER_FULL:
      dec->buffer_full = 1;
      if ((img = de265_peek_next_picture (dec->ctx)) == NULL) {
        return GST_FLOW_OK;
      }
      break;

    default:
      GST_ELEMENT_ERROR (decoder, STREAM, DECODE,
          ("Error while decoding: %s (code=%d)", de265_get_error_text (ret),
              ret), (NULL));
      return GST_FLOW_ERROR;
  }

  while ((ret = de265_get_warning (dec->ctx)) != DE265_OK) {
    GST_ELEMENT_WARNING (decoder, STREAM, DECODE,
        ("%s (code=%d)", de265_get_error_text (ret), ret), (NULL));
  }

  img = de265_get_next_picture (dec->ctx);
  if (img == NULL) {
    /* need more data */
    return GST_FLOW_OK;
  }

  return _gst_libde265_return_image (decoder, frame, img);

error_input:
  gst_buffer_unmap (frame->input_buffer, &info);
  return GST_FLOW_ERROR;
}
Ejemplo n.º 18
0
static GstFlowReturn
_gst_libde265_return_image (GstVideoDecoder * decoder,
    GstVideoCodecFrame * frame, const struct de265_image *img)
{
  GstLibde265Dec *dec = GST_LIBDE265_DEC (decoder);
  struct GstLibde265FrameRef *ref;
  GstFlowReturn result;
  GstVideoFrame outframe;
  GstVideoCodecFrame *out_frame;
  int frame_number;
  int plane;

  ref = (struct GstLibde265FrameRef *) de265_get_image_plane_user_data (img, 0);
  if (ref != NULL) {
    /* decoder is using direct rendering */
    out_frame = gst_video_codec_frame_ref (ref->frame);
    if (frame != NULL) {
      gst_video_codec_frame_unref (frame);
    }
    gst_buffer_replace (&out_frame->output_buffer, ref->buffer);
    gst_buffer_replace (&ref->buffer, NULL);
    return gst_video_decoder_finish_frame (decoder, out_frame);
  }

  result =
      _gst_libde265_image_available (decoder, de265_get_image_width (img, 0),
      de265_get_image_height (img, 0));
  if (result != GST_FLOW_OK) {
    GST_ERROR_OBJECT (dec, "Failed to notify about available image");
    return result;
  }

  frame_number = (uintptr_t) de265_get_image_user_data (img) - 1;
  if (frame_number != -1) {
    out_frame = gst_video_decoder_get_frame (decoder, frame_number);
  } else {
    out_frame = NULL;
  }
  if (frame != NULL) {
    gst_video_codec_frame_unref (frame);
  }

  if (out_frame == NULL) {
    GST_ERROR_OBJECT (dec, "No frame available to return");
    return GST_FLOW_ERROR;
  }

  result = gst_video_decoder_allocate_output_frame (decoder, out_frame);
  if (result != GST_FLOW_OK) {
    GST_ERROR_OBJECT (dec, "Failed to allocate output frame");
    return result;
  }

  g_assert (dec->output_state != NULL);
  if (!gst_video_frame_map (&outframe, &dec->output_state->info,
          out_frame->output_buffer, GST_MAP_WRITE)) {
    GST_ERROR_OBJECT (dec, "Failed to map output buffer");
    return GST_FLOW_ERROR;
  }

  for (plane = 0; plane < 3; plane++) {
    int width = de265_get_image_width (img, plane);
    int height = de265_get_image_height (img, plane);
    int srcstride = width;
    int dststride = GST_VIDEO_FRAME_COMP_STRIDE (&outframe, plane);
    const uint8_t *src = de265_get_image_plane (img, plane, &srcstride);
    uint8_t *dest = GST_VIDEO_FRAME_COMP_DATA (&outframe, plane);
    if (srcstride == width && dststride == width) {
      memcpy (dest, src, height * width);
    } else {
      while (height--) {
        memcpy (dest, src, width);
        src += srcstride;
        dest += dststride;
      }
    }
  }
  gst_video_frame_unmap (&outframe);
  return gst_video_decoder_finish_frame (decoder, out_frame);
}
Ejemplo n.º 19
0
static gboolean
gst_libde265_dec_set_format (GstVideoDecoder * decoder,
    GstVideoCodecState * state)
{
  GstLibde265Dec *dec = GST_LIBDE265_DEC (decoder);

  if (dec->input_state != NULL) {
    gst_video_codec_state_unref (dec->input_state);
  }
  dec->input_state = state;
  if (state != NULL) {
    gst_video_codec_state_ref (state);
  }
  if (state != NULL && state->caps != NULL) {
    GstStructure *str;
    const GValue *value;
    str = gst_caps_get_structure (state->caps, 0);
    if ((value = gst_structure_get_value (str, "codec_data"))) {
      GstMapInfo info;
      guint8 *data;
      gsize size;
      GstBuffer *buf;
      de265_error err;
      int more;

      buf = gst_value_get_buffer (value);
      if (!gst_buffer_map (buf, &info, GST_MAP_READ)) {
        GST_ELEMENT_ERROR (decoder, STREAM, DECODE,
            ("Failed to map codec data"), (NULL));
        return FALSE;
      }
      data = info.data;
      size = info.size;
      free (dec->codec_data);
      dec->codec_data = malloc (size);
      g_assert (dec->codec_data != NULL);
      dec->codec_data_size = size;
      memcpy (dec->codec_data, data, size);
      if (size > 3 && (data[0] || data[1] || data[2] > 1)) {
        /* encoded in "hvcC" format (assume version 0) */
        dec->format = GST_TYPE_LIBDE265_FORMAT_PACKETIZED;
        if (size > 22) {
          int i;
          int num_param_sets;
          int pos;
          if (data[0] != 0) {
            GST_ELEMENT_WARNING (decoder, STREAM,
                DECODE, ("Unsupported extra data version %d, decoding may fail",
                    data[0]), (NULL));
          }
          dec->length_size = (data[21] & 3) + 1;
          num_param_sets = data[22];
          pos = 23;
          for (i = 0; i < num_param_sets; i++) {
            int j;
            int nal_count;
            if (pos + 3 > size) {
              GST_ELEMENT_ERROR (decoder, STREAM, DECODE,
                  ("Buffer underrun in extra header (%d >= %" G_GSIZE_FORMAT
                      ")", pos + 3, size), (NULL));
              return FALSE;
            }
            /* ignore flags + NAL type (1 byte) */
            nal_count = data[pos + 1] << 8 | data[pos + 2];
            pos += 3;
            for (j = 0; j < nal_count; j++) {
              int nal_size;
              if (pos + 2 > size) {
                GST_ELEMENT_ERROR (decoder, STREAM, DECODE,
                    ("Buffer underrun in extra nal header (%d >= %"
                        G_GSIZE_FORMAT ")", pos + 2, size), (NULL));
                return FALSE;
              }
              nal_size = data[pos] << 8 | data[pos + 1];
              if (pos + 2 + nal_size > size) {
                GST_ELEMENT_ERROR (decoder, STREAM, DECODE,
                    ("Buffer underrun in extra nal (%d >= %" G_GSIZE_FORMAT ")",
                        pos + 2 + nal_size, size), (NULL));
                return FALSE;
              }
              err =
                  de265_push_NAL (dec->ctx, data + pos + 2, nal_size, 0, NULL);
              if (!de265_isOK (err)) {
                GST_ELEMENT_ERROR (decoder, STREAM, DECODE,
                    ("Failed to push data: %s (%d)", de265_get_error_text (err),
                        err), (NULL));
                return FALSE;
              }
              pos += 2 + nal_size;
            }
          }
        }
        GST_DEBUG ("Assuming packetized data (%d bytes length)",
            dec->length_size);
      } else {
        dec->format = GST_TYPE_LIBDE265_FORMAT_BYTESTREAM;
        GST_DEBUG_OBJECT (dec, "Assuming non-packetized data");
        err = de265_push_data (dec->ctx, data, size, 0, NULL);
        if (!de265_isOK (err)) {
          gst_buffer_unmap (buf, &info);
          GST_ELEMENT_ERROR (decoder, STREAM, DECODE,
              ("Failed to push codec data: %s (code=%d)",
                  de265_get_error_text (err), err), (NULL));
          return FALSE;
        }
      }
      gst_buffer_unmap (buf, &info);
      de265_push_end_of_NAL (dec->ctx);
      do {
        err = de265_decode (dec->ctx, &more);
        switch (err) {
          case DE265_OK:
            break;

          case DE265_ERROR_IMAGE_BUFFER_FULL:
          case DE265_ERROR_WAITING_FOR_INPUT_DATA:
            /* not really an error */
            more = 0;
            break;

          default:
            if (!de265_isOK (err)) {
              GST_ELEMENT_ERROR (decoder, STREAM, DECODE,
                  ("Failed to decode codec data: %s (code=%d)",
                      de265_get_error_text (err), err), (NULL));
              return FALSE;
            }
        }
      } while (more);
    } else if ((value = gst_structure_get_value (str, "stream-format"))) {
      const gchar *str = g_value_get_string (value);
      if (strcmp (str, "byte-stream") == 0) {
        dec->format = GST_TYPE_LIBDE265_FORMAT_BYTESTREAM;
        GST_DEBUG_OBJECT (dec, "Assuming raw byte-stream");
      }
    }
  }

  return TRUE;
}
Ejemplo n.º 20
0
static GstFlowReturn gst_libde265_dec_parse_data (VIDEO_DECODER_BASE * parse,
    gboolean at_eos)
#endif
{
    GstLibde265Dec *dec = GST_LIBDE265_DEC (parse);
    const struct de265_image *img;
    de265_error ret = DE265_OK;
    int more = 0;
#if GST_CHECK_VERSION(1,0,0)
    de265_PTS pts = (de265_PTS) frame->pts;
#else
    de265_PTS pts = 0;
#endif
    
    if (dec->buffer_full) {
        // return any pending images before decoding more data
        if ((img = de265_peek_next_picture(dec->ctx)) != NULL) {
            return _gst_libde265_image_available(parse, img);
        }
        dec->buffer_full = 0;
    }
    
#if !GST_CHECK_VERSION(1,0,0)
    GstAdapter *adapter = parse->input_adapter;
#endif
    gsize size = gst_adapter_available (adapter);
    if (size == 0) {
        return NEED_DATA_RESULT;
    }
    
    GstBuffer *buf = gst_adapter_take_buffer(adapter, size);
    uint8_t *frame_data;
    uint8_t *end_data;
#if GST_CHECK_VERSION(1,0,0)
    GstMapInfo info;
    if (!gst_buffer_map(buf, &info, GST_MAP_READWRITE)) {
        return GST_FLOW_ERROR;
    }
    
    frame_data = info.data;
#else
    frame_data = GST_BUFFER_DATA(buf);
#endif
    end_data = frame_data + size;
    
    if (size > 0) {
        if (dec->mode == GST_TYPE_LIBDE265_DEC_PACKETIZED) {
            // replace 4-byte length fields with NAL start codes
            uint8_t *start_data = frame_data;
            while (start_data + 4 <= end_data) {
                int nal_size = READ_BE32(start_data);
                if (start_data + nal_size > end_data) {
                    GST_ELEMENT_ERROR (parse, STREAM, DECODE,
                        ("Overflow in input data, check data mode"),
                        (NULL));
                    goto error;
                }
                ret = de265_push_NAL(dec->ctx, start_data + 4, nal_size, pts, NULL);
                if (ret != DE265_OK) {
                    GST_ELEMENT_ERROR (parse, STREAM, DECODE,
                        ("Error while pushing data: %s (code=%d)", de265_get_error_text(ret), ret),
                        (NULL));
                    goto error;
                }
                start_data += 4 + nal_size;
            }
        } else {
            ret = de265_push_data(dec->ctx, frame_data, size, pts, NULL);
        }
    } else {
        ret = de265_flush_data(dec->ctx);
    }
    
    // decode as much as possible
    do {
        ret = de265_decode(dec->ctx, &more);
    } while (more && ret == DE265_OK);
#if GST_CHECK_VERSION(1,0,0)
    gst_buffer_unmap(buf, &info);
#endif
    gst_buffer_unref(buf);
    switch (ret) {
    case DE265_OK:
        break;

    case DE265_ERROR_IMAGE_BUFFER_FULL:
        dec->buffer_full = 1;
        if ((img = de265_peek_next_picture(dec->ctx)) == NULL) {
            return NEED_DATA_RESULT;
        }
        return _gst_libde265_image_available(parse, img);;

    case DE265_ERROR_WAITING_FOR_INPUT_DATA:
        return NEED_DATA_RESULT;

    default:
        GST_ELEMENT_ERROR (parse, STREAM, DECODE,
            ("Error while decoding: %s (code=%d)", de265_get_error_text(ret), ret),
            (NULL));
        return GST_FLOW_ERROR;
    }
    
    while ((ret = de265_get_warning(dec->ctx)) != DE265_OK) {
        GST_ELEMENT_WARNING (parse, STREAM, DECODE,
            ("%s (code=%d)", de265_get_error_text(ret), ret),
            (NULL));
    }
    
    if ((img = de265_peek_next_picture(dec->ctx)) == NULL) {
        // need more data
        return NEED_DATA_RESULT;
    }
    
    return _gst_libde265_image_available(parse, img);

error:
#if GST_CHECK_VERSION(1,0,0)
    gst_buffer_unmap(buf, &info);
#endif
    gst_buffer_unref(buf);
    return GST_FLOW_ERROR;
}
Ejemplo n.º 21
0
static int
gst_libde265_dec_get_buffer (de265_decoder_context * ctx,
    struct de265_image_spec *spec, struct de265_image *img, void *userdata)
{
  GstVideoDecoder *base = (GstVideoDecoder *) userdata;
  GstLibde265Dec *dec = GST_LIBDE265_DEC (base);
  GstVideoCodecFrame *frame = NULL;
  int i;
  int width = spec->width;
  int height = spec->height;
  GstFlowReturn ret;
  struct GstLibde265FrameRef *ref;
  GstVideoInfo *info;
  int frame_number;

  frame_number = (uintptr_t) de265_get_image_user_data (img) - 1;
  if (G_UNLIKELY (frame_number == -1)) {
    /* should not happen... */
    GST_WARNING_OBJECT (base, "Frame has no number assigned!");
    goto fallback;
  }

  frame = gst_video_decoder_get_frame (base, frame_number);
  if (G_UNLIKELY (frame == NULL)) {
    /* should not happen... */
    GST_WARNING_OBJECT (base, "Couldn't get codec frame!");
    goto fallback;
  }

  if (width % spec->alignment) {
    width += spec->alignment - (width % spec->alignment);
  }
  if (width != spec->visible_width || height != spec->visible_height) {
    /* clipping not supported for now */
    goto fallback;
  }

  ret = _gst_libde265_image_available (base, width, height);
  if (G_UNLIKELY (ret != GST_FLOW_OK)) {
    GST_ERROR_OBJECT (dec, "Failed to notify about available image");
    goto fallback;
  }

  ret =
      gst_video_decoder_allocate_output_frame (GST_VIDEO_DECODER (dec), frame);
  if (G_UNLIKELY (ret != GST_FLOW_OK)) {
    GST_ERROR_OBJECT (dec, "Failed to allocate output buffer");
    goto fallback;
  }

  ref = (struct GstLibde265FrameRef *) g_malloc0 (sizeof (*ref));
  g_assert (ref != NULL);
  ref->decoder = base;
  ref->frame = frame;

  gst_buffer_replace (&ref->buffer, frame->output_buffer);
  gst_buffer_replace (&frame->output_buffer, NULL);

  info = &dec->output_state->info;
  if (!gst_video_frame_map (&ref->vframe, info, ref->buffer, GST_MAP_READWRITE)) {
    GST_ERROR_OBJECT (dec, "Failed to map frame output buffer");
    goto error;
  }

  ref->mapped = TRUE;
  if (GST_VIDEO_FRAME_PLANE_STRIDE (&ref->vframe,
          0) < width * GST_VIDEO_FRAME_COMP_PSTRIDE (&ref->vframe, 0)) {
    GST_DEBUG_OBJECT (dec, "plane 0: pitch too small (%d/%d*%d)",
        GST_VIDEO_FRAME_PLANE_STRIDE (&ref->vframe, 0), width,
        GST_VIDEO_FRAME_COMP_PSTRIDE (&ref->vframe, 0));
    goto error;
  }

  if (GST_VIDEO_FRAME_COMP_HEIGHT (&ref->vframe, 0) < height) {
    GST_DEBUG_OBJECT (dec, "plane 0: lines too few (%d/%d)",
        GST_VIDEO_FRAME_COMP_HEIGHT (&ref->vframe, 0), height);
    goto error;
  }

  for (i = 0; i < 3; i++) {
    uint8_t *data;
    int stride = GST_VIDEO_FRAME_PLANE_STRIDE (&ref->vframe, i);
    if (stride % spec->alignment) {
      GST_DEBUG_OBJECT (dec, "plane %d: pitch not aligned (%d%%%d)",
          i, stride, spec->alignment);
      goto error;
    }

    data = GST_VIDEO_FRAME_PLANE_DATA (&ref->vframe, i);
    if ((uintptr_t) (data) % spec->alignment) {
      GST_DEBUG_OBJECT (dec, "plane %d not aligned", i);
      goto error;
    }

    de265_set_image_plane (img, i, data, stride, ref);
  }
  return 1;

error:
  gst_libde265_dec_release_frame_ref (ref);
  frame = NULL;

fallback:
  if (frame != NULL) {
    gst_video_codec_frame_unref (frame);
  }
  return de265_get_default_image_allocation_functions ()->get_buffer (ctx,
      spec, img, userdata);
}