static GstFlowReturn
gst_base_video_encoder_chain (GstPad * pad, GstBuffer * buf)
{
  GstBaseVideoEncoder *base_video_encoder;
  GstBaseVideoEncoderClass *klass;
  GstVideoFrame *frame;

  if (!gst_pad_is_negotiated (pad)) {
    return GST_FLOW_NOT_NEGOTIATED;
  }

  base_video_encoder = GST_BASE_VIDEO_ENCODER (gst_pad_get_parent (pad));
  klass = GST_BASE_VIDEO_ENCODER_GET_CLASS (base_video_encoder);

  if (base_video_encoder->a.at_eos) {
    return GST_FLOW_UNEXPECTED;
  }

  if (base_video_encoder->sink_clipping) {
    gint64 start = GST_BUFFER_TIMESTAMP (buf);
    gint64 stop = start + GST_BUFFER_DURATION (buf);
    gint64 clip_start;
    gint64 clip_stop;

    if (!gst_segment_clip (&GST_BASE_VIDEO_CODEC (base_video_encoder)->segment,
            GST_FORMAT_TIME, start, stop, &clip_start, &clip_stop)) {
      GST_DEBUG ("clipping to segment dropped frame");
      goto done;
    }
  }

  frame =
      gst_base_video_codec_new_frame (GST_BASE_VIDEO_CODEC
      (base_video_encoder));
  frame->sink_buffer = buf;
  frame->presentation_timestamp = GST_BUFFER_TIMESTAMP (buf);
  frame->presentation_duration = GST_BUFFER_DURATION (buf);
  frame->presentation_frame_number =
      base_video_encoder->presentation_frame_number;
  base_video_encoder->presentation_frame_number++;

  GST_BASE_VIDEO_CODEC (base_video_encoder)->frames =
      g_list_append (GST_BASE_VIDEO_CODEC (base_video_encoder)->frames, frame);

  klass->handle_frame (base_video_encoder, frame);

done:
  g_object_unref (base_video_encoder);

  return GST_FLOW_OK;
}
static GstPadLinkReturn
gst_videodrop_link (GstPad * pad, const GstCaps * caps)
{
  GstVideodrop *videodrop;
  GstStructure *structure;
  gboolean ret;
  double fps;
  GstPad *otherpad;

  videodrop = GST_VIDEODROP (gst_pad_get_parent (pad));

  otherpad = (pad == videodrop->srcpad) ? videodrop->sinkpad :
      videodrop->srcpad;

  structure = gst_caps_get_structure (caps, 0);
  ret = gst_structure_get_double (structure, "framerate", &fps);
  if (!ret)
    return GST_PAD_LINK_REFUSED;

  if (pad == videodrop->srcpad) {
    videodrop->to_fps = fps;
  } else {
    videodrop->from_fps = fps;
  }

  if (gst_pad_is_negotiated (otherpad)) {
    /* 
     * Ensure that the other side talks the format we're trying to set
     */
    GstCaps *newcaps = gst_caps_copy (caps);

    if (pad == videodrop->srcpad) {
      gst_caps_set_simple (newcaps,
          "framerate", G_TYPE_DOUBLE, videodrop->from_fps, NULL);
    } else {
      gst_caps_set_simple (newcaps,
          "framerate", G_TYPE_DOUBLE, videodrop->to_fps, NULL);
    }
    ret = gst_pad_try_set_caps (otherpad, newcaps);
    if (GST_PAD_LINK_FAILED (ret)) {
      return GST_PAD_LINK_REFUSED;
    }
  }

  return GST_PAD_LINK_OK;
}
static GstCaps *
gst_videodrop_getcaps (GstPad * pad)
{
  GstVideodrop *videodrop;
  GstPad *otherpad;
  GstCaps *caps, *copy, *copy2 = NULL;
  int i;
  gdouble otherfps;
  GstStructure *structure;
  gboolean negotiated;

  videodrop = GST_VIDEODROP (gst_pad_get_parent (pad));

  otherpad = (pad == videodrop->srcpad) ? videodrop->sinkpad :
      videodrop->srcpad;
  negotiated = gst_pad_is_negotiated (otherpad);
  otherfps = (pad == videodrop->srcpad) ? videodrop->from_fps :
      videodrop->to_fps;

  caps = gst_pad_get_allowed_caps (otherpad);
  copy = gst_caps_copy (caps);
  if (negotiated) {
    copy2 = gst_caps_copy (caps);
  }
  for (i = 0; i < gst_caps_get_size (caps); i++) {
    structure = gst_caps_get_structure (caps, i);

    gst_structure_set (structure,
        "framerate", GST_TYPE_DOUBLE_RANGE, 0.0, G_MAXDOUBLE, NULL);
  }
  if ((negotiated) && (videodrop->speed != 1.0)) {
    for (i = 0; i < gst_caps_get_size (copy2); i++) {
      structure = gst_caps_get_structure (copy2, i);

      gst_structure_set (structure,
          "framerate", G_TYPE_DOUBLE, otherfps * videodrop->speed, NULL);
    }
    gst_caps_append (copy2, copy);
    copy = copy2;
  }
  gst_caps_append (copy, caps);

  return copy;
}
Exemple #4
0
static GstFlowReturn
gst_base_video_decoder_chain (GstPad * pad, GstBuffer * buf)
{
  GstBaseVideoDecoder *base_video_decoder;
  GstBaseVideoDecoderClass *klass;
  GstBuffer *buffer;
  GstFlowReturn ret;

  GST_DEBUG ("chain %" G_GINT64_FORMAT, GST_BUFFER_TIMESTAMP (buf));

#if 0
  /* requiring the pad to be negotiated makes it impossible to use
   * oggdemux or filesrc ! decoder */
  if (!gst_pad_is_negotiated (pad)) {
    GST_DEBUG ("not negotiated");
    return GST_FLOW_NOT_NEGOTIATED;
  }
#endif

  base_video_decoder = GST_BASE_VIDEO_DECODER (gst_pad_get_parent (pad));
  klass = GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder);

  GST_DEBUG_OBJECT (base_video_decoder, "chain");

  if (G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DISCONT))) {
    GST_DEBUG_OBJECT (base_video_decoder, "received DISCONT buffer");
    if (base_video_decoder->started) {
      gst_base_video_decoder_reset (base_video_decoder);
    }
  }

  if (!base_video_decoder->started) {
    klass->start (base_video_decoder);
    base_video_decoder->started = TRUE;
  }

  if (GST_BUFFER_TIMESTAMP (buf) != GST_CLOCK_TIME_NONE) {
    GST_DEBUG ("timestamp %" G_GINT64_FORMAT " offset %" G_GINT64_FORMAT,
        GST_BUFFER_TIMESTAMP (buf), base_video_decoder->offset);
    base_video_decoder->last_sink_timestamp = GST_BUFFER_TIMESTAMP (buf);
  }
  if (GST_BUFFER_OFFSET_END (buf) != -1) {
    GST_DEBUG ("gp %" G_GINT64_FORMAT, GST_BUFFER_OFFSET_END (buf));
    base_video_decoder->last_sink_offset_end = GST_BUFFER_OFFSET_END (buf);
  }
  base_video_decoder->offset += GST_BUFFER_SIZE (buf);

#if 0
  if (base_video_decoder->timestamp_offset == GST_CLOCK_TIME_NONE &&
      GST_BUFFER_TIMESTAMP (buf) != GST_CLOCK_TIME_NONE) {
    GST_DEBUG ("got new offset %lld", GST_BUFFER_TIMESTAMP (buf));
    base_video_decoder->timestamp_offset = GST_BUFFER_TIMESTAMP (buf);
  }
#endif

  if (base_video_decoder->current_frame == NULL) {
    base_video_decoder->current_frame =
        gst_base_video_decoder_new_frame (base_video_decoder);
  }

  gst_adapter_push (base_video_decoder->input_adapter, buf);

  if (!base_video_decoder->have_sync) {
    int n, m;

    GST_DEBUG ("no sync, scanning");

    n = gst_adapter_available (base_video_decoder->input_adapter);
    m = klass->scan_for_sync (base_video_decoder, FALSE, 0, n);

    if (m < 0) {
      g_warning ("subclass returned negative scan %d", m);
    }

    if (m >= n) {
      g_warning ("subclass scanned past end %d >= %d", m, n);
    }

    gst_adapter_flush (base_video_decoder->input_adapter, m);

    if (m < n) {
      GST_DEBUG ("found possible sync after %d bytes (of %d)", m, n);

      /* this is only "maybe" sync */
      base_video_decoder->have_sync = TRUE;
    }

    if (!base_video_decoder->have_sync) {
      gst_object_unref (base_video_decoder);
      return GST_FLOW_OK;
    }
  }

  /* FIXME: use gst_adapter_prev_timestamp() here instead? */
  buffer = gst_adapter_get_buffer (base_video_decoder->input_adapter);

  base_video_decoder->buffer_timestamp = GST_BUFFER_TIMESTAMP (buffer);
  gst_buffer_unref (buffer);

  do {
    ret = klass->parse_data (base_video_decoder, FALSE);
  } while (ret == GST_FLOW_OK);

  if (ret == GST_BASE_VIDEO_DECODER_FLOW_NEED_DATA) {
    gst_object_unref (base_video_decoder);
    return GST_FLOW_OK;
  }

  gst_object_unref (base_video_decoder);
  return ret;
}
static GstPadLinkReturn
gst_hermes_colorspace_link (GstPad * pad, const GstCaps * caps)
{
  GstHermesColorspace *space;
  GstPad *otherpad;
  GstStructure *structure;
  GstPadLinkReturn link_ret;
  int width, height;
  double fps;
  int i;

  space = GST_HERMES_COLORSPACE (gst_pad_get_parent (pad));
  otherpad = (pad == space->sinkpad) ? space->srcpad : space->sinkpad;

  link_ret = gst_pad_try_set_caps (otherpad, caps);
  if (link_ret == GST_PAD_LINK_OK) {
    space->passthru = TRUE;
    return link_ret;
  }

  structure = gst_caps_get_structure (caps, 0);

  for (i = 0; i < G_N_ELEMENTS (gst_hermes_colorspace_formats); i++) {
    GstCaps *icaps;
    GstCaps *fcaps;

    fcaps =
        gst_caps_copy (gst_static_caps_get (&gst_hermes_colorspace_formats
            [i].caps));

    icaps = gst_caps_intersect (caps, fcaps);
    if (!gst_caps_is_empty (icaps)) {
      break;
    }
    gst_caps_free (icaps);
  }
  if (i == G_N_ELEMENTS (gst_hermes_colorspace_formats)) {
    g_assert_not_reached ();
    return GST_PAD_LINK_REFUSED;
  }

  gst_structure_get_int (structure, "width", &width);
  gst_structure_get_int (structure, "height", &height);
  gst_structure_get_double (structure, "framerate", &fps);

  GST_INFO ("size: %dx%d", space->width, space->height);

  if (gst_pad_is_negotiated (otherpad)) {
    GstCaps *othercaps;

    othercaps = gst_caps_copy (gst_pad_get_negotiated_caps (otherpad));

    gst_caps_set_simple (othercaps,
        "width", G_TYPE_INT, width,
        "height", G_TYPE_INT, height, "framerate", G_TYPE_DOUBLE, fps, NULL);

    link_ret = gst_pad_try_set_caps (otherpad, othercaps);
    if (link_ret != GST_PAD_LINK_OK) {
      return link_ret;
    }
  }

  if (pad == space->srcpad) {
    space->src_format_index = i;
    gst_hermes_colorspace_structure_to_hermes_format (&space->src_format,
        structure);
  } else {
    space->sink_format_index = i;
    gst_hermes_colorspace_structure_to_hermes_format (&space->sink_format,
        structure);
  }

  space->sink_stride = width * (space->sink_format.bits / 8);
  space->src_stride = width * (space->src_format.bits / 8);
  space->sink_size = space->sink_stride * height;
  space->src_size = space->src_stride * height;
  space->width = width;
  space->height = height;
  space->fps = fps;

  if (gst_pad_is_negotiated (otherpad)) {
    if (!Hermes_ConverterRequest (space->h_handle, &space->sink_format,
            &space->src_format)) {
      g_warning ("Hermes: could not get converter\n");
      return GST_PAD_LINK_REFUSED;
    }
    g_print ("inited\n");
  }

  return GST_PAD_LINK_OK;
}
static GstFlowReturn
gst_base_video_decoder_chain (GstPad * pad, GstBuffer * buf)
{
  GstBaseVideoDecoder *base_video_decoder;
  GstBaseVideoDecoderClass *base_video_decoder_class;
  GstFlowReturn ret;

  GST_DEBUG ("chain %" GST_TIME_FORMAT " duration %" GST_TIME_FORMAT,
      GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
      GST_TIME_ARGS (GST_BUFFER_DURATION (buf)));

#if 0
  /* requiring the pad to be negotiated makes it impossible to use
   * oggdemux or filesrc ! decoder */
  if (!gst_pad_is_negotiated (pad)) {
    GST_DEBUG ("not negotiated");
    return GST_FLOW_NOT_NEGOTIATED;
  }
#endif

  base_video_decoder = GST_BASE_VIDEO_DECODER (gst_pad_get_parent (pad));
  base_video_decoder_class =
      GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder);

  GST_DEBUG_OBJECT (base_video_decoder, "chain");

  if (!base_video_decoder->have_segment) {
    GstEvent *event;
    GstFlowReturn ret;

    GST_WARNING
        ("Received buffer without a new-segment. Assuming timestamps start from 0.");

    gst_segment_set_newsegment_full (&base_video_decoder->segment,
        FALSE, 1.0, 1.0, GST_FORMAT_TIME, 0, GST_CLOCK_TIME_NONE, 0);
    base_video_decoder->have_segment = TRUE;

    event = gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME, 0,
        GST_CLOCK_TIME_NONE, 0);

    ret =
        gst_pad_push_event (GST_BASE_VIDEO_DECODER_SRC_PAD (base_video_decoder),
        event);
    if (!ret) {
      GST_ERROR ("new segment event ret=%d", ret);
      return GST_FLOW_ERROR;
    }
  }

  if (G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DISCONT))) {
    GST_DEBUG_OBJECT (base_video_decoder, "received DISCONT buffer");
    gst_base_video_decoder_flush (base_video_decoder);
  }

  if (base_video_decoder->current_frame == NULL) {
    base_video_decoder->current_frame =
        gst_base_video_decoder_new_frame (base_video_decoder);
  }

  base_video_decoder->input_offset += GST_BUFFER_SIZE (buf);
  if (GST_BUFFER_TIMESTAMP_IS_VALID (buf)) {
    gst_base_video_decoder_add_timestamp (base_video_decoder, buf);
  }


  if (base_video_decoder->packetized) {
    base_video_decoder->current_frame->sink_buffer = buf;

    ret = gst_base_video_decoder_have_frame (base_video_decoder, TRUE, NULL);
  } else {

    gst_adapter_push (base_video_decoder->input_adapter, buf);

    ret = gst_base_video_decoder_drain (base_video_decoder, FALSE);
  }

  gst_object_unref (base_video_decoder);
  return ret;
}
static GstFlowReturn
gst_base_video_decoder_chain (GstPad * pad, GstBuffer * buf)
{
  GstBaseVideoDecoder *base_video_decoder;
  GstBaseVideoDecoderClass *klass;
  GstFlowReturn ret;

  GST_DEBUG ("chain %" GST_TIME_FORMAT " duration %" GST_TIME_FORMAT,
      GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
      GST_TIME_ARGS (GST_BUFFER_DURATION (buf)));

#if 0
  /* requiring the pad to be negotiated makes it impossible to use
   * oggdemux or filesrc ! decoder */
  if (!gst_pad_is_negotiated (pad)) {
    GST_DEBUG ("not negotiated");
    return GST_FLOW_NOT_NEGOTIATED;
  }
#endif

  base_video_decoder = GST_BASE_VIDEO_DECODER (gst_pad_get_parent (pad));
  klass = GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder);

  GST_DEBUG_OBJECT (base_video_decoder, "chain");

  if (!base_video_decoder->have_segment) {
    GstEvent *event;
    GstFlowReturn ret;

    GST_WARNING
        ("Received buffer without a new-segment. Assuming timestamps start from 0.");

    gst_segment_set_newsegment_full (&base_video_decoder->segment,
        FALSE, 1.0, 1.0, GST_FORMAT_TIME, 0, GST_CLOCK_TIME_NONE, 0);
    base_video_decoder->have_segment = TRUE;

    event = gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME, 0,
        GST_CLOCK_TIME_NONE, 0);

    ret =
        gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_decoder),
        event);
    if (!ret) {
      GST_ERROR ("new segment event ret=%d", ret);
      return GST_FLOW_ERROR;
    }
  }

  if (G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DISCONT))) {
    GST_DEBUG_OBJECT (base_video_decoder, "received DISCONT buffer");
    gst_base_video_decoder_reset (base_video_decoder);
  }

  if (!base_video_decoder->started) {
    klass->start (base_video_decoder);
    base_video_decoder->started = TRUE;
  }

  if (base_video_decoder->current_frame == NULL) {
    base_video_decoder->current_frame =
        gst_base_video_decoder_new_frame (base_video_decoder);
  }

  if (GST_BUFFER_TIMESTAMP_IS_VALID (buf)) {
    gst_base_video_decoder_add_timestamp (base_video_decoder, buf);
  }
  base_video_decoder->input_offset += GST_BUFFER_SIZE (buf);

#if 0
  if (base_video_decoder->timestamp_offset == GST_CLOCK_TIME_NONE &&
      GST_BUFFER_TIMESTAMP (buf) != GST_CLOCK_TIME_NONE) {
    GST_DEBUG ("got new offset %" GST_TIME_FORMAT,
        GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)));
    base_video_decoder->timestamp_offset = GST_BUFFER_TIMESTAMP (buf);
  }
#endif

  if (base_video_decoder->packetized) {
    base_video_decoder->current_frame->sink_buffer = buf;

    ret = gst_base_video_decoder_have_frame_2 (base_video_decoder);
  } else {

    gst_adapter_push (base_video_decoder->input_adapter, buf);

    if (!base_video_decoder->have_sync) {
      int n, m;

      GST_DEBUG ("no sync, scanning");

      n = gst_adapter_available (base_video_decoder->input_adapter);
      m = klass->scan_for_sync (base_video_decoder, FALSE, 0, n);
      if (m == -1) {
        gst_object_unref (base_video_decoder);
        return GST_FLOW_OK;
      }

      if (m < 0) {
        g_warning ("subclass returned negative scan %d", m);
      }

      if (m >= n) {
        GST_ERROR ("subclass scanned past end %d >= %d", m, n);
      }

      gst_adapter_flush (base_video_decoder->input_adapter, m);

      if (m < n) {
        GST_DEBUG ("found possible sync after %d bytes (of %d)", m, n);

        /* this is only "maybe" sync */
        base_video_decoder->have_sync = TRUE;
      }

      if (!base_video_decoder->have_sync) {
        gst_object_unref (base_video_decoder);
        return GST_FLOW_OK;
      }
    }

    do {
      ret = klass->parse_data (base_video_decoder, FALSE);
    } while (ret == GST_FLOW_OK);

    if (ret == GST_BASE_VIDEO_DECODER_FLOW_NEED_DATA) {
      gst_object_unref (base_video_decoder);
      return GST_FLOW_OK;
    }
  }

  gst_object_unref (base_video_decoder);
  return ret;
}