static GstFlowReturn
recv_sample (GstAppSink * appsink, gpointer user_data)
{
  KmsRecorderEndpoint *self =
      KMS_RECORDER_ENDPOINT (GST_OBJECT_PARENT (appsink));
  GstAppSrc *appsrc = GST_APP_SRC (user_data);
  GstFlowReturn ret;
  GstSample *sample;
  GstSegment *segment;
  GstBuffer *buffer;
  BaseTimeType *base_time;
  GstClockTime offset;

  g_signal_emit_by_name (appsink, "pull-sample", &sample);
  if (sample == NULL)
    return GST_FLOW_OK;

  buffer = gst_sample_get_buffer (sample);
  if (buffer == NULL) {
    ret = GST_FLOW_OK;
    goto end;
  }

  segment = gst_sample_get_segment (sample);

  KMS_ELEMENT_LOCK (self);
  if (self->priv->moving_to_state != KMS_URI_ENDPOINT_STATE_START) {
    GST_WARNING ("Dropping buffer received in invalid state %" GST_PTR_FORMAT,
        buffer);
    // TODO: Add a flag to discard buffers until keyframe
    ret = GST_FLOW_OK;
    goto end;
  }

  gst_buffer_ref (buffer);
  buffer = gst_buffer_make_writable (buffer);

  if (GST_BUFFER_PTS_IS_VALID (buffer))
    buffer->pts =
        gst_segment_to_running_time (segment, GST_FORMAT_TIME, buffer->pts);
  if (GST_BUFFER_DTS_IS_VALID (buffer))
    buffer->dts =
        gst_segment_to_running_time (segment, GST_FORMAT_TIME, buffer->dts);

  BASE_TIME_LOCK (self);

  base_time = g_object_get_qdata (G_OBJECT (self), base_time_key_quark ());

  if (base_time == NULL) {
    base_time = g_slice_new0 (BaseTimeType);
    base_time->pts = buffer->pts;
    base_time->dts = buffer->dts;
    GST_DEBUG_OBJECT (appsrc, "Setting pts base time to: %" G_GUINT64_FORMAT,
        base_time->pts);
    g_object_set_qdata_full (G_OBJECT (self), base_time_key_quark (), base_time,
        release_base_time_type);
  }

  if (!GST_CLOCK_TIME_IS_VALID (base_time->pts)
      && GST_BUFFER_PTS_IS_VALID (buffer)) {
    base_time->pts = buffer->pts;
    GST_DEBUG_OBJECT (appsrc, "Setting pts base time to: %" G_GUINT64_FORMAT,
        base_time->pts);
    base_time->dts = buffer->dts;
  }

  if (GST_CLOCK_TIME_IS_VALID (base_time->pts)) {
    if (GST_BUFFER_PTS_IS_VALID (buffer)) {
      offset = base_time->pts + self->priv->paused_time;
      if (buffer->pts > offset) {
        buffer->pts -= offset;
      } else {
        buffer->pts = 0;
      }
    }
  }

  if (GST_CLOCK_TIME_IS_VALID (base_time->dts)) {
    if (GST_BUFFER_DTS_IS_VALID (buffer)) {
      offset = base_time->dts + self->priv->paused_time;
      if (buffer->dts > offset) {
        buffer->dts -= offset;
      } else {
        buffer->dts = 0;
      }
    }
  }

  BASE_TIME_UNLOCK (GST_OBJECT_PARENT (appsink));

  GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_LIVE);

  if (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_HEADER))
    GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT);

  ret = gst_app_src_push_buffer (appsrc, buffer);

  if (ret != GST_FLOW_OK) {
    /* something wrong */
    GST_ERROR_OBJECT (self, "Could not send buffer to appsrc %s. Cause: %s",
        GST_ELEMENT_NAME (appsrc), gst_flow_get_name (ret));
    ret = GST_FLOW_CUSTOM_SUCCESS;
  }

end:
  KMS_ELEMENT_UNLOCK (self);
  if (sample != NULL) {
    gst_sample_unref (sample);
  }

  return ret;
}
static GstFlowReturn
theora_enc_chain (GstPad * pad, GstBuffer * buffer)
{
  GstTheoraEnc *enc;
  ogg_packet op;
  GstClockTime timestamp, duration, running_time;
  GstFlowReturn ret;
  gboolean force_keyframe;

  enc = GST_THEORA_ENC (GST_PAD_PARENT (pad));

  /* we keep track of two timelines.
   * - The timestamps from the incomming buffers, which we copy to the outgoing
   *   encoded buffers as-is. We need to do this as we simply forward the
   *   newsegment events.
   * - The running_time of the buffers, which we use to construct the granulepos
   *   in the packets.
   */
  timestamp = GST_BUFFER_TIMESTAMP (buffer);
  duration = GST_BUFFER_DURATION (buffer);

  running_time =
      gst_segment_to_running_time (&enc->segment, GST_FORMAT_TIME, timestamp);
  if ((gint64) running_time < 0) {
    GST_DEBUG_OBJECT (enc, "Dropping buffer, timestamp: %" GST_TIME_FORMAT,
        GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer)));
    gst_buffer_unref (buffer);
    return GST_FLOW_OK;
  }

  GST_OBJECT_LOCK (enc);
  if (enc->bitrate_changed) {
    long int bitrate = enc->video_bitrate;

    th_encode_ctl (enc->encoder, TH_ENCCTL_SET_BITRATE, &bitrate,
        sizeof (long int));
    enc->bitrate_changed = FALSE;
  }

  if (enc->quality_changed) {
    long int quality = enc->video_quality;

    th_encode_ctl (enc->encoder, TH_ENCCTL_SET_QUALITY, &quality,
        sizeof (long int));
    enc->quality_changed = FALSE;
  }

  /* see if we need to schedule a keyframe */
  force_keyframe = enc->force_keyframe;
  enc->force_keyframe = FALSE;
  GST_OBJECT_UNLOCK (enc);

  if (force_keyframe) {
    GstClockTime stream_time;
    GstStructure *s;

    stream_time = gst_segment_to_stream_time (&enc->segment,
        GST_FORMAT_TIME, timestamp);

    s = gst_structure_new ("GstForceKeyUnit",
        "timestamp", G_TYPE_UINT64, timestamp,
        "stream-time", G_TYPE_UINT64, stream_time,
        "running-time", G_TYPE_UINT64, running_time, NULL);

    theora_enc_force_keyframe (enc);

    gst_pad_push_event (enc->srcpad,
        gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM, s));
  }

  /* make sure we copy the discont flag to the next outgoing buffer when it's
   * set on the incomming buffer */
  if (GST_BUFFER_IS_DISCONT (buffer)) {
    enc->next_discont = TRUE;
  }

  if (enc->packetno == 0) {
    /* no packets written yet, setup headers */
    GstCaps *caps;
    GstBuffer *buf;
    GSList *buffers = NULL;
    int result;

    enc->granulepos_offset = 0;
    enc->timestamp_offset = 0;

    GST_DEBUG_OBJECT (enc, "output headers");
    /* Theora streams begin with three headers; the initial header (with
       most of the codec setup parameters) which is mandated by the Ogg
       bitstream spec.  The second header holds any comment fields.  The
       third header holds the bitstream codebook.  We merely need to
       make the headers, then pass them to libtheora one at a time;
       libtheora handles the additional Ogg bitstream constraints */

    /* create the remaining theora headers */
    th_comment_clear (&enc->comment);
    th_comment_init (&enc->comment);

    while ((result =
            th_encode_flushheader (enc->encoder, &enc->comment, &op)) > 0) {
      ret =
          theora_buffer_from_packet (enc, &op, GST_CLOCK_TIME_NONE,
          GST_CLOCK_TIME_NONE, GST_CLOCK_TIME_NONE, &buf);
      if (ret != GST_FLOW_OK) {
        goto header_buffer_alloc;
      }
      buffers = g_slist_prepend (buffers, buf);
    }
    if (result < 0) {
      g_slist_foreach (buffers, (GFunc) gst_buffer_unref, NULL);
      g_slist_free (buffers);
      goto encoder_disabled;
    }

    buffers = g_slist_reverse (buffers);

    /* mark buffers and put on caps */
    caps = gst_pad_get_caps (enc->srcpad);
    caps = theora_set_header_on_caps (caps, buffers);
    GST_DEBUG ("here are the caps: %" GST_PTR_FORMAT, caps);
    gst_pad_set_caps (enc->srcpad, caps);

    g_slist_foreach (buffers, (GFunc) gst_buffer_set_caps, caps);

    gst_caps_unref (caps);

    /* push out the header buffers */
    while (buffers) {
      buf = buffers->data;
      buffers = g_slist_delete_link (buffers, buffers);
      if ((ret = theora_push_buffer (enc, buf)) != GST_FLOW_OK) {
        g_slist_foreach (buffers, (GFunc) gst_buffer_unref, NULL);
        g_slist_free (buffers);
        goto header_push;
      }
    }

    enc->granulepos_offset =
        gst_util_uint64_scale (running_time, enc->fps_n,
        GST_SECOND * enc->fps_d);
    enc->timestamp_offset = running_time;
    enc->next_ts = 0;
  }

  {
    th_ycbcr_buffer ycbcr;
    gint res;

    theora_enc_init_buffer (ycbcr, &enc->info, GST_BUFFER_DATA (buffer));

    if (theora_enc_is_discontinuous (enc, running_time, duration)) {
      theora_enc_reset (enc);
      enc->granulepos_offset =
          gst_util_uint64_scale (running_time, enc->fps_n,
          GST_SECOND * enc->fps_d);
      enc->timestamp_offset = running_time;
      enc->next_ts = 0;
      enc->next_discont = TRUE;
    }

    if (enc->multipass_cache_fd
        && enc->multipass_mode == MULTIPASS_MODE_SECOND_PASS) {
      if (!theora_enc_read_multipass_cache (enc)) {
        ret = GST_FLOW_ERROR;
        goto multipass_read_failed;
      }
    }

    res = th_encode_ycbcr_in (enc->encoder, ycbcr);
    /* none of the failure cases can happen here */
    g_assert (res == 0);

    if (enc->multipass_cache_fd
        && enc->multipass_mode == MULTIPASS_MODE_FIRST_PASS) {
      if (!theora_enc_write_multipass_cache (enc, FALSE, FALSE)) {
        ret = GST_FLOW_ERROR;
        goto multipass_write_failed;
      }
    }

    ret = GST_FLOW_OK;
    while (th_encode_packetout (enc->encoder, 0, &op)) {
      GstClockTime next_time;

      next_time = th_granule_time (enc->encoder, op.granulepos) * GST_SECOND;

      ret =
          theora_push_packet (enc, &op, timestamp, enc->next_ts,
          next_time - enc->next_ts);

      enc->next_ts = next_time;
      if (ret != GST_FLOW_OK)
        goto data_push;
    }
    gst_buffer_unref (buffer);
  }

  return ret;

  /* ERRORS */
multipass_read_failed:
  {
    gst_buffer_unref (buffer);
    return ret;
  }
multipass_write_failed:
  {
    gst_buffer_unref (buffer);
    return ret;
  }
header_buffer_alloc:
  {
    gst_buffer_unref (buffer);
    return ret;
  }
header_push:
  {
    gst_buffer_unref (buffer);
    return ret;
  }
data_push:
  {
    gst_buffer_unref (buffer);
    return ret;
  }
encoder_disabled:
  {
    GST_ELEMENT_ERROR (enc, STREAM, ENCODE, (NULL),
        ("libtheora has been compiled with the encoder disabled"));
    gst_buffer_unref (buffer);
    return GST_FLOW_ERROR;
  }
}
static gboolean
gst_identity_sink_event (GstBaseTransform * trans, GstEvent * event)
{
  GstIdentity *identity;
  gboolean ret = TRUE;

  identity = GST_IDENTITY (trans);

  if (!identity->silent) {
    const GstStructure *s;
    const gchar *tstr;
    gchar *sstr;

    GST_OBJECT_LOCK (identity);
    g_free (identity->last_message);

    tstr = gst_event_type_get_name (GST_EVENT_TYPE (event));
    if ((s = gst_event_get_structure (event)))
      sstr = gst_structure_to_string (s);
    else
      sstr = g_strdup ("");

    identity->last_message =
        g_strdup_printf ("event   ******* (%s:%s) E (type: %s (%d), %s) %p",
        GST_DEBUG_PAD_NAME (trans->sinkpad), tstr, GST_EVENT_TYPE (event),
        sstr, event);
    g_free (sstr);
    GST_OBJECT_UNLOCK (identity);

    gst_identity_notify_last_message (identity);
  }

  if (identity->single_segment && (GST_EVENT_TYPE (event) == GST_EVENT_SEGMENT)) {
    if (!trans->have_segment) {
      GstEvent *news;
      GstSegment segment;

      gst_event_copy_segment (event, &segment);
      gst_event_copy_segment (event, &trans->segment);
      trans->have_segment = TRUE;

      /* This is the first segment, send out a (0, -1) segment */
      gst_segment_init (&segment, segment.format);
      news = gst_event_new_segment (&segment);

      gst_pad_event_default (trans->sinkpad, GST_OBJECT_CAST (trans), news);
    } else {
      /* need to track segment for proper running time */
      gst_event_copy_segment (event, &trans->segment);
    }
  }

  if (GST_EVENT_TYPE (event) == GST_EVENT_GAP &&
      trans->have_segment && trans->segment.format == GST_FORMAT_TIME) {
    GstClockTime start, dur;

    gst_event_parse_gap (event, &start, &dur);
    if (GST_CLOCK_TIME_IS_VALID (start)) {
      start = gst_segment_to_running_time (&trans->segment,
          GST_FORMAT_TIME, start);

      gst_identity_do_sync (identity, start);

      /* also transform GAP timestamp similar to buffer timestamps */
      if (identity->single_segment) {
        gst_event_unref (event);
        event = gst_event_new_gap (start, dur);
      }
    }
  }

  /* Reset previous timestamp, duration and offsets on SEGMENT
   * to prevent false warnings when checking for perfect streams */
  if (GST_EVENT_TYPE (event) == GST_EVENT_SEGMENT) {
    identity->prev_timestamp = identity->prev_duration = GST_CLOCK_TIME_NONE;
    identity->prev_offset = identity->prev_offset_end = GST_BUFFER_OFFSET_NONE;
  }

  if (identity->single_segment && GST_EVENT_TYPE (event) == GST_EVENT_SEGMENT) {
    /* eat up segments */
    gst_event_unref (event);
    ret = TRUE;
  } else {
    if (GST_EVENT_TYPE (event) == GST_EVENT_FLUSH_START) {
      GST_OBJECT_LOCK (identity);
      if (identity->clock_id) {
        GST_DEBUG_OBJECT (identity, "unlock clock wait");
        gst_clock_id_unschedule (identity->clock_id);
      }
      GST_OBJECT_UNLOCK (identity);
    }

    ret = GST_BASE_TRANSFORM_CLASS (parent_class)->sink_event (trans, event);
  }

  return ret;
}
Beispiel #4
0
static GstFlowReturn
theora_handle_data_packet (GstTheoraDec * dec, ogg_packet * packet,
    GstClockTime outtime, GstClockTime outdur)
{
  /* normal data packet */
  th_ycbcr_buffer buf;
  GstBuffer *out;
  gboolean keyframe;
  GstFlowReturn result;
  ogg_int64_t gp;

  if (G_UNLIKELY (!dec->have_header))
    goto not_initialized;

  /* get timestamp and durations */
  if (outtime == -1)
    outtime = dec->last_timestamp;
  if (outdur == -1)
    outdur = gst_util_uint64_scale_int (GST_SECOND, dec->info.fps_denominator,
        dec->info.fps_numerator);

  /* calculate expected next timestamp */
  if (outtime != -1 && outdur != -1)
    dec->last_timestamp = outtime + outdur;

  /* the second most significant bit of the first data byte is cleared 
   * for keyframes. We can only check it if it's not a zero-length packet. */
  keyframe = packet->bytes && ((packet->packet[0] & 0x40) == 0);
  if (G_UNLIKELY (keyframe)) {
    GST_DEBUG_OBJECT (dec, "we have a keyframe");
    dec->need_keyframe = FALSE;
  } else if (G_UNLIKELY (dec->need_keyframe)) {
    goto dropping;
  }

  GST_DEBUG_OBJECT (dec, "parsing data packet");

  /* this does the decoding */
  if (G_UNLIKELY (th_decode_packetin (dec->decoder, packet, &gp) < 0))
    goto decode_error;

  if (outtime != -1) {
    gboolean need_skip;
    GstClockTime running_time;
    GstClockTime earliest_time;
    gdouble proportion;

    /* qos needs to be done on running time */
    running_time = gst_segment_to_running_time (&dec->segment, GST_FORMAT_TIME,
        outtime);

    GST_OBJECT_LOCK (dec);
    proportion = dec->proportion;
    earliest_time = dec->earliest_time;
    /* check for QoS, don't perform the last steps of getting and
     * pushing the buffers that are known to be late. */
    need_skip = earliest_time != -1 && running_time <= earliest_time;
    GST_OBJECT_UNLOCK (dec);

    if (need_skip) {
      GstMessage *qos_msg;
      guint64 stream_time;
      gint64 jitter;

      GST_DEBUG_OBJECT (dec, "skipping decoding: qostime %"
          GST_TIME_FORMAT " <= %" GST_TIME_FORMAT,
          GST_TIME_ARGS (running_time), GST_TIME_ARGS (earliest_time));

      dec->dropped++;

      stream_time =
          gst_segment_to_stream_time (&dec->segment, GST_FORMAT_TIME, outtime);
      jitter = GST_CLOCK_DIFF (running_time, earliest_time);

      qos_msg =
          gst_message_new_qos (GST_OBJECT_CAST (dec), FALSE, running_time,
          stream_time, outtime, outdur);
      gst_message_set_qos_values (qos_msg, jitter, proportion, 1000000);
      gst_message_set_qos_stats (qos_msg, GST_FORMAT_BUFFERS,
          dec->processed, dec->dropped);
      gst_element_post_message (GST_ELEMENT_CAST (dec), qos_msg);

      goto dropping_qos;
    }
  }

  /* this does postprocessing and set up the decoded frame
   * pointers in our yuv variable */
  if (G_UNLIKELY (th_decode_ycbcr_out (dec->decoder, buf) < 0))
    goto no_yuv;

  if (G_UNLIKELY ((buf[0].width != dec->info.frame_width)
          || (buf[0].height != dec->info.frame_height)))
    goto wrong_dimensions;

  result = theora_handle_image (dec, buf, &out);
  if (result != GST_FLOW_OK)
    return result;

  GST_BUFFER_OFFSET (out) = dec->frame_nr;
  if (dec->frame_nr != -1)
    dec->frame_nr++;
  GST_BUFFER_OFFSET_END (out) = dec->frame_nr;

  GST_BUFFER_TIMESTAMP (out) = outtime;
  GST_BUFFER_DURATION (out) = outdur;

  dec->processed++;

  if (dec->segment.rate >= 0.0)
    result = theora_dec_push_forward (dec, out);
  else
    result = theora_dec_push_reverse (dec, out);

  return result;

  /* ERRORS */
not_initialized:
  {
    GST_ELEMENT_ERROR (GST_ELEMENT (dec), STREAM, DECODE,
        (NULL), ("no header sent yet"));
    return GST_FLOW_ERROR;
  }
dropping:
  {
    GST_WARNING_OBJECT (dec, "dropping frame because we need a keyframe");
    dec->discont = TRUE;
    return GST_FLOW_OK;
  }
dropping_qos:
  {
    if (dec->frame_nr != -1)
      dec->frame_nr++;
    dec->discont = TRUE;
    GST_WARNING_OBJECT (dec, "dropping frame because of QoS");
    return GST_FLOW_OK;
  }
decode_error:
  {
    GST_ELEMENT_ERROR (GST_ELEMENT (dec), STREAM, DECODE,
        (NULL), ("theora decoder did not decode data packet"));
    return GST_FLOW_ERROR;
  }
no_yuv:
  {
    GST_ELEMENT_ERROR (GST_ELEMENT (dec), STREAM, DECODE,
        (NULL), ("couldn't read out YUV image"));
    return GST_FLOW_ERROR;
  }
wrong_dimensions:
  {
    GST_ELEMENT_ERROR (GST_ELEMENT (dec), STREAM, FORMAT,
        (NULL), ("dimensions of image do not match header"));
    return GST_FLOW_ERROR;
  }
}
Beispiel #5
0
static GstMessage *
gst_spectrum_message_new (GstSpectrum * spectrum, GstClockTime timestamp,
    GstClockTime duration)
{
  GstBaseTransform *trans = GST_BASE_TRANSFORM_CAST (spectrum);
  GstSpectrumChannel *cd;
  GstStructure *s;
  GValue *mcv = NULL, *pcv = NULL;
  GstClockTime endtime, running_time, stream_time;

  GST_DEBUG_OBJECT (spectrum, "preparing message, bands =%d ", spectrum->bands);

  running_time = gst_segment_to_running_time (&trans->segment, GST_FORMAT_TIME,
      timestamp);
  stream_time = gst_segment_to_stream_time (&trans->segment, GST_FORMAT_TIME,
      timestamp);
  /* endtime is for backwards compatibility */
  endtime = stream_time + duration;

  s = gst_structure_new ("spectrum",
      "endtime", GST_TYPE_CLOCK_TIME, endtime,
      "timestamp", G_TYPE_UINT64, timestamp,
      "stream-time", G_TYPE_UINT64, stream_time,
      "running-time", G_TYPE_UINT64, running_time,
      "duration", G_TYPE_UINT64, duration, NULL);

  if (!spectrum->multi_channel) {
    cd = &spectrum->channel_data[0];

    if (spectrum->message_magnitude) {
      /* FIXME 0.11: this should be an array, not a list */
      mcv = gst_spectrum_message_add_container (s, GST_TYPE_LIST, "magnitude");
      gst_spectrum_message_add_list (mcv, cd->spect_magnitude, spectrum->bands);
    }
    if (spectrum->message_phase) {
      /* FIXME 0.11: this should be an array, not a list */
      pcv = gst_spectrum_message_add_container (s, GST_TYPE_LIST, "phase");
      gst_spectrum_message_add_list (pcv, cd->spect_phase, spectrum->bands);
    }
  } else {
    guint c;
    guint channels = GST_AUDIO_FILTER_CHANNELS (spectrum);

    if (spectrum->message_magnitude) {
      mcv = gst_spectrum_message_add_container (s, GST_TYPE_ARRAY, "magnitude");
    }
    if (spectrum->message_phase) {
      pcv = gst_spectrum_message_add_container (s, GST_TYPE_ARRAY, "phase");
    }

    for (c = 0; c < channels; c++) {
      cd = &spectrum->channel_data[c];

      if (spectrum->message_magnitude) {
        gst_spectrum_message_add_array (mcv, cd->spect_magnitude,
            spectrum->bands);
      }
      if (spectrum->message_phase) {
        gst_spectrum_message_add_array (pcv, cd->spect_magnitude,
            spectrum->bands);
      }
    }
  }
  return gst_message_new_element (GST_OBJECT (spectrum), s);
}
Beispiel #6
0
static GstFlowReturn
gst_visual_chain (GstPad * pad, GstBuffer * buffer)
{
  GstBuffer *outbuf = NULL;
  guint i;
  GstVisual *visual = GST_VISUAL (gst_pad_get_parent (pad));
  GstFlowReturn ret = GST_FLOW_OK;
  guint avail;

  GST_DEBUG_OBJECT (visual, "chain function called");

  /* If we don't have an output format yet, preallocate a buffer to try and
   * set one */
  if (GST_PAD_CAPS (visual->srcpad) == NULL) {
    ret = get_buffer (visual, &outbuf);
    if (ret != GST_FLOW_OK) {
      gst_buffer_unref (buffer);
      goto beach;
    }
  }

  /* resync on DISCONT */
  if (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DISCONT)) {
    gst_adapter_clear (visual->adapter);
    visual->next_ts = -1;
  }

  /* Match timestamps from the incoming audio */
  if (GST_BUFFER_TIMESTAMP (buffer) != GST_CLOCK_TIME_NONE)
    visual->next_ts = GST_BUFFER_TIMESTAMP (buffer);

  GST_DEBUG_OBJECT (visual,
      "Input buffer has %d samples, time=%" G_GUINT64_FORMAT,
      GST_BUFFER_SIZE (buffer) / visual->bps, GST_BUFFER_TIMESTAMP (buffer));

  gst_adapter_push (visual->adapter, buffer);

  while (TRUE) {
    gboolean need_skip;
    const guint16 *data;

    GST_DEBUG_OBJECT (visual, "processing buffer");

    avail = gst_adapter_available (visual->adapter);
    GST_DEBUG_OBJECT (visual, "avail now %u", avail);

    /* we need at least 512 samples */
    if (avail < 512 * visual->bps)
      break;

    /* we need at least enough samples to make one frame */
    if (avail < visual->spf * visual->bps)
      break;

    if (visual->next_ts != -1) {
      gint64 qostime;

      /* QoS is done on running time */
      qostime = gst_segment_to_running_time (&visual->segment, GST_FORMAT_TIME,
          visual->next_ts);

      GST_OBJECT_LOCK (visual);
      /* check for QoS, don't compute buffers that are known to be late */
      need_skip = visual->earliest_time != -1 &&
          qostime <= visual->earliest_time;
      GST_OBJECT_UNLOCK (visual);

      if (need_skip) {
        GST_WARNING_OBJECT (visual,
            "QoS: skip ts: %" GST_TIME_FORMAT ", earliest: %" GST_TIME_FORMAT,
            GST_TIME_ARGS (qostime), GST_TIME_ARGS (visual->earliest_time));
        goto skip;
      }
    }

    /* Read 512 samples per channel */
    data =
        (const guint16 *) gst_adapter_peek (visual->adapter, 512 * visual->bps);

#if defined(VISUAL_API_VERSION) && VISUAL_API_VERSION >= 4000 && VISUAL_API_VERSION < 5000
    {
      VisBuffer *lbuf, *rbuf;
      guint16 ldata[512], rdata[512];
      VisAudioSampleRateType rate;

      lbuf = visual_buffer_new_with_buffer (ldata, sizeof (ldata), NULL);
      rbuf = visual_buffer_new_with_buffer (rdata, sizeof (rdata), NULL);

      if (visual->channels == 2) {
        for (i = 0; i < 512; i++) {
          ldata[i] = *data++;
          rdata[i] = *data++;
        }
      } else {
        for (i = 0; i < 512; i++) {
          ldata[i] = *data;
          rdata[i] = *data++;
        }
      }

      switch (visual->rate) {
        case 8000:
          rate = VISUAL_AUDIO_SAMPLE_RATE_8000;
          break;
        case 11250:
          rate = VISUAL_AUDIO_SAMPLE_RATE_11250;
          break;
        case 22500:
          rate = VISUAL_AUDIO_SAMPLE_RATE_22500;
          break;
        case 32000:
          rate = VISUAL_AUDIO_SAMPLE_RATE_32000;
          break;
        case 44100:
          rate = VISUAL_AUDIO_SAMPLE_RATE_44100;
          break;
        case 48000:
          rate = VISUAL_AUDIO_SAMPLE_RATE_48000;
          break;
        case 96000:
          rate = VISUAL_AUDIO_SAMPLE_RATE_96000;
          break;
        default:
          visual_object_unref (VISUAL_OBJECT (lbuf));
          visual_object_unref (VISUAL_OBJECT (rbuf));
          GST_ERROR_OBJECT (visual, "unsupported rate %d", visual->rate);
          ret = GST_FLOW_ERROR;
          goto beach;
          break;
      }

      visual_audio_samplepool_input_channel (visual->audio->samplepool,
          lbuf,
          rate, VISUAL_AUDIO_SAMPLE_FORMAT_S16, VISUAL_AUDIO_CHANNEL_LEFT);
      visual_audio_samplepool_input_channel (visual->audio->samplepool,
          rbuf,
          rate, VISUAL_AUDIO_SAMPLE_FORMAT_S16, VISUAL_AUDIO_CHANNEL_RIGHT);

      visual_object_unref (VISUAL_OBJECT (lbuf));
      visual_object_unref (VISUAL_OBJECT (rbuf));

    }
#else
    if (visual->channels == 2) {
      for (i = 0; i < 512; i++) {
        visual->audio->plugpcm[0][i] = *data++;
        visual->audio->plugpcm[1][i] = *data++;
      }
    } else {
      for (i = 0; i < 512; i++) {
        visual->audio->plugpcm[0][i] = *data;
        visual->audio->plugpcm[1][i] = *data++;
      }
    }
#endif

    /* alloc a buffer if we don't have one yet, this happens
     * when we pushed a buffer in this while loop before */
    if (outbuf == NULL) {
      ret = get_buffer (visual, &outbuf);
      if (ret != GST_FLOW_OK) {
        goto beach;
      }
    }
    visual_video_set_buffer (visual->video, GST_BUFFER_DATA (outbuf));
    visual_audio_analyze (visual->audio);
    visual_actor_run (visual->actor, visual->audio);
    visual_video_set_buffer (visual->video, NULL);
    GST_DEBUG_OBJECT (visual, "rendered one frame");

    GST_BUFFER_TIMESTAMP (outbuf) = visual->next_ts;
    GST_BUFFER_DURATION (outbuf) = visual->duration;

    ret = gst_pad_push (visual->srcpad, outbuf);
    outbuf = NULL;

  skip:
    /* interpollate next timestamp */
    if (visual->next_ts != -1)
      visual->next_ts += visual->duration;

    GST_DEBUG_OBJECT (visual, "finished frame, flushing %u samples from input",
        visual->spf);

    /* Flush out the number of samples per frame */
    gst_adapter_flush (visual->adapter, visual->spf * visual->bps);

    /* quit the loop if something was wrong */
    if (ret != GST_FLOW_OK)
      break;
  }

beach:

  if (outbuf != NULL)
    gst_buffer_unref (outbuf);

  gst_object_unref (visual);

  return ret;
}
Beispiel #7
0
static GstFlowReturn
gst_visual_gl_chain (GstPad * pad, GstBuffer * buffer)
{
  GstGLBuffer *outbuf = NULL;
  GstVisualGL *visual = GST_VISUAL_GL (gst_pad_get_parent (pad));
  GstFlowReturn ret = GST_FLOW_OK;
  guint avail;

  GST_DEBUG_OBJECT (visual, "chain function called");

  /* If we don't have an output format yet, preallocate a buffer to try and
   * set one */
  if (GST_PAD_CAPS (visual->srcpad) == NULL) {
    ret = get_buffer (visual, &outbuf);
    if (ret != GST_FLOW_OK) {
      gst_buffer_unref (buffer);
      goto beach;
    }
  }

  /* resync on DISCONT */
  if (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DISCONT)) {
    gst_adapter_clear (visual->adapter);
  }

  GST_DEBUG_OBJECT (visual,
      "Input buffer has %d samples, time=%" G_GUINT64_FORMAT,
      GST_BUFFER_SIZE (buffer) / visual->bps, GST_BUFFER_TIMESTAMP (buffer));

  gst_adapter_push (visual->adapter, buffer);

  while (TRUE) {
    gboolean need_skip;
    guint64 dist, timestamp;

    GST_DEBUG_OBJECT (visual, "processing buffer");

    avail = gst_adapter_available (visual->adapter);
    GST_DEBUG_OBJECT (visual, "avail now %u", avail);

    /* we need at least VISUAL_SAMPLES samples */
    if (avail < VISUAL_SAMPLES * visual->bps)
      break;

    /* we need at least enough samples to make one frame */
    if (avail < visual->spf * visual->bps)
      break;

    /* get timestamp of the current adapter byte */
    timestamp = gst_adapter_prev_timestamp (visual->adapter, &dist);
    if (GST_CLOCK_TIME_IS_VALID (timestamp)) {
      /* convert bytes to time */
      dist /= visual->bps;
      timestamp += gst_util_uint64_scale_int (dist, GST_SECOND, visual->rate);
    }

    if (timestamp != -1) {
      gint64 qostime;

      /* QoS is done on running time */
      qostime = gst_segment_to_running_time (&visual->segment, GST_FORMAT_TIME,
          timestamp);
      qostime += visual->duration;

      GST_OBJECT_LOCK (visual);
      /* check for QoS, don't compute buffers that are known to be late */
      need_skip = visual->earliest_time != -1 &&
          qostime <= visual->earliest_time;
      GST_OBJECT_UNLOCK (visual);

      if (need_skip) {
        GST_WARNING_OBJECT (visual,
            "QoS: skip ts: %" GST_TIME_FORMAT ", earliest: %" GST_TIME_FORMAT,
            GST_TIME_ARGS (qostime), GST_TIME_ARGS (visual->earliest_time));
        goto skip;
      }
    }

    /* alloc a buffer if we don't have one yet, this happens
     * when we pushed a buffer in this while loop before */
    if (outbuf == NULL) {
      ret = get_buffer (visual, &outbuf);
      if (ret != GST_FLOW_OK) {
        goto beach;
      }
    }

    /* render libvisual plugin to our target */
    gst_gl_display_use_fbo_v2 (visual->display,
        visual->width, visual->height, visual->fbo, visual->depthbuffer,
        visual->midtexture, (GLCB_V2) render_frame, (gpointer *) visual);

    /* gst video is top-down whereas opengl plan is bottom up */
    gst_gl_display_use_fbo (visual->display,
        visual->width, visual->height, visual->fbo, visual->depthbuffer,
        outbuf->texture, (GLCB) bottom_up_to_top_down,
        visual->width, visual->height, visual->midtexture,
        0, visual->width, 0, visual->height, GST_GL_DISPLAY_PROJECTION_ORTHO2D,
        (gpointer *) visual);

    GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
    GST_BUFFER_DURATION (outbuf) = visual->duration;

    ret = gst_pad_push (visual->srcpad, GST_BUFFER (outbuf));
    outbuf = NULL;

  skip:
    GST_DEBUG_OBJECT (visual, "finished frame, flushing %u samples from input",
        visual->spf);

    /* Flush out the number of samples per frame */
    gst_adapter_flush (visual->adapter, visual->spf * visual->bps);

    /* quit the loop if something was wrong */
    if (ret != GST_FLOW_OK)
      break;
  }

beach:

  if (outbuf != NULL)
    gst_gl_buffer_unref (outbuf);

  gst_object_unref (visual);

  return ret;
}
Beispiel #8
0
static GstFlowReturn
gst_monoscope_chain (GstPad * pad, GstBuffer * inbuf)
{
  GstFlowReturn flow_ret = GST_FLOW_OK;
  GstMonoscope *monoscope;

  monoscope = GST_MONOSCOPE (GST_PAD_PARENT (pad));

  /* don't try to combine samples from discont buffer */
  if (GST_BUFFER_FLAG_IS_SET (inbuf, GST_BUFFER_FLAG_DISCONT)) {
    gst_adapter_clear (monoscope->adapter);
    monoscope->next_ts = GST_CLOCK_TIME_NONE;
  }

  /* Match timestamps from the incoming audio */
  if (GST_BUFFER_TIMESTAMP (inbuf) != GST_CLOCK_TIME_NONE)
    monoscope->next_ts = GST_BUFFER_TIMESTAMP (inbuf);

  GST_LOG_OBJECT (monoscope, "in buffer has %d samples, ts=%" GST_TIME_FORMAT,
      GST_BUFFER_SIZE (inbuf) / monoscope->bps,
      GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (inbuf)));

  gst_adapter_push (monoscope->adapter, inbuf);
  inbuf = NULL;

  /* Collect samples until we have enough for an output frame */
  while (flow_ret == GST_FLOW_OK) {
    gint16 *samples;
    GstBuffer *outbuf = NULL;
    guint32 *pixels, avail, bytesperframe;

    avail = gst_adapter_available (monoscope->adapter);
    GST_LOG_OBJECT (monoscope, "bytes avail now %u", avail);

    /* do negotiation if not done yet, so ->spf etc. is set */
    if (GST_PAD_CAPS (monoscope->srcpad) == NULL) {
      flow_ret = get_buffer (monoscope, &outbuf);
      if (flow_ret != GST_FLOW_OK)
        goto out;
      gst_buffer_unref (outbuf);
      outbuf = NULL;
    }

    bytesperframe = monoscope->spf * monoscope->bps;
    if (avail < bytesperframe)
      break;

    /* FIXME: something is wrong with QoS, we are skipping way too much
     * stuff even with very low CPU loads */
#if 0
    if (monoscope->next_ts != -1) {
      gboolean need_skip;
      gint64 qostime;

      qostime = gst_segment_to_running_time (&monoscope->segment,
          GST_FORMAT_TIME, monoscope->next_ts);

      GST_OBJECT_LOCK (monoscope);
      /* check for QoS, don't compute buffers that are known to be late */
      need_skip =
          GST_CLOCK_TIME_IS_VALID (monoscope->earliest_time) &&
          qostime <= monoscope->earliest_time;
      GST_OBJECT_UNLOCK (monoscope);

      if (need_skip) {
        GST_WARNING_OBJECT (monoscope,
            "QoS: skip ts: %" GST_TIME_FORMAT ", earliest: %" GST_TIME_FORMAT,
            GST_TIME_ARGS (qostime), GST_TIME_ARGS (monoscope->earliest_time));
        goto skip;
      }
    }
#endif

    samples = (gint16 *) gst_adapter_peek (monoscope->adapter, bytesperframe);

    if (monoscope->spf < 512) {
      gint16 in_data[512], i;

      for (i = 0; i < 512; ++i) {
        gdouble off;

        off = ((gdouble) i * (gdouble) monoscope->spf) / 512.0;
        in_data[i] = samples[MIN ((guint) off, monoscope->spf)];
      }
      pixels = monoscope_update (monoscope->visstate, in_data);
    } else {
      /* not really correct, but looks much prettier */
      pixels = monoscope_update (monoscope->visstate, samples);
    }

    flow_ret = get_buffer (monoscope, &outbuf);
    if (flow_ret != GST_FLOW_OK)
      goto out;

    memcpy (GST_BUFFER_DATA (outbuf), pixels, monoscope->outsize);

    GST_BUFFER_TIMESTAMP (outbuf) = monoscope->next_ts;
    GST_BUFFER_DURATION (outbuf) = monoscope->frame_duration;

    flow_ret = gst_pad_push (monoscope->srcpad, outbuf);

#if 0
  skip:
#endif

    if (GST_CLOCK_TIME_IS_VALID (monoscope->next_ts))
      monoscope->next_ts += monoscope->frame_duration;

    gst_adapter_flush (monoscope->adapter, bytesperframe);
  }

out:

  return flow_ret;
}
static GstFlowReturn
gst_timestampoverlay_transform_frame_ip (GstVideoFilter * filter, GstVideoFrame * frame)
{
  GstTimeStampOverlay *overlay = GST_TIMESTAMPOVERLAY (filter);

  GST_DEBUG_OBJECT (overlay, "transform_frame_ip");

  GstClockTime buffer_time, stream_time, running_time, clock_time, latency,
      render_time, render_realtime;
  GstSegment *segment = &GST_BASE_TRANSFORM (overlay)->segment;
  unsigned char * imgdata;

  buffer_time = GST_BUFFER_TIMESTAMP (frame->buffer);

  if (!GST_CLOCK_TIME_IS_VALID (buffer_time)) {
    GST_DEBUG_OBJECT (filter, "Can't draw timestamps: buffer timestamp is "
        "invalid");
    return GST_FLOW_OK;
  }

  if (frame->info.stride[0] < (8 * frame->info.finfo->pixel_stride[0] * 64)) {
    GST_WARNING_OBJECT (filter, "Can't draw timestamps: video-frame is to narrow");
    return GST_FLOW_OK;
  }

  GST_DEBUG ("buffer with timestamp %" GST_TIME_FORMAT,
      GST_TIME_ARGS (buffer_time));

  stream_time = gst_segment_to_stream_time (segment, GST_FORMAT_TIME,
      buffer_time);
  running_time = gst_segment_to_running_time (segment, GST_FORMAT_TIME,
      buffer_time);
  clock_time = running_time + gst_element_get_base_time (GST_ELEMENT (overlay));

  latency = overlay->latency;
  if (GST_CLOCK_TIME_IS_VALID (latency))
    render_time = clock_time + latency;
  else
    render_time = clock_time;

  GST_OBJECT_LOCK (overlay->realtime_clock);
  render_realtime = gst_clock_unadjust_unlocked (
      overlay->realtime_clock, render_time);
  GST_OBJECT_UNLOCK (overlay->realtime_clock);

  imgdata = frame->data[0];

  /* Centre Vertically: */
  imgdata += (frame->info.height - 6 * 8) * frame->info.stride[0] / 2;

  /* Centre Horizontally: */
  imgdata += (frame->info.width - 64 * 8) * frame->info.finfo->pixel_stride[0]
      / 2;

  draw_timestamp (0, buffer_time, imgdata, frame->info.stride[0],
      frame->info.finfo->pixel_stride[0]);
  draw_timestamp (1, stream_time, imgdata, frame->info.stride[0],
      frame->info.finfo->pixel_stride[0]);
  draw_timestamp (2, running_time, imgdata, frame->info.stride[0],
      frame->info.finfo->pixel_stride[0]);
  draw_timestamp (3, clock_time, imgdata, frame->info.stride[0],
      frame->info.finfo->pixel_stride[0]);
  draw_timestamp (4, render_time, imgdata, frame->info.stride[0],
      frame->info.finfo->pixel_stride[0]);
  draw_timestamp (5, render_realtime, imgdata, frame->info.stride[0],
      frame->info.finfo->pixel_stride[0]);

  return GST_FLOW_OK;
}
Beispiel #10
0
static GstFlowReturn
gst_goom_chain (GstPad * pad, GstBuffer * buffer)
{
  GstGoom *goom;
  GstFlowReturn ret;
  GstBuffer *outbuf = NULL;

  goom = GST_GOOM (gst_pad_get_parent (pad));
    
  /* If we don't have an output format yet, preallocate a buffer to try and
   * set one */
  if (GST_PAD_CAPS (goom->srcpad) == NULL) {
    ret = get_buffer (goom, &outbuf);
    if (ret != GST_FLOW_OK) {
      gst_buffer_unref (buffer);
      goto beach;
    }
  }
  
  /* don't try to combine samples from discont buffer */
  if (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DISCONT)) {
    gst_adapter_clear (goom->adapter);
    goom->next_ts = -1;
  }

  /* Match timestamps from the incoming audio */
  if (GST_BUFFER_TIMESTAMP (buffer) != GST_CLOCK_TIME_NONE)
    goom->next_ts = GST_BUFFER_TIMESTAMP (buffer);

  GST_DEBUG_OBJECT (goom,
      "Input buffer has %d samples, time=%" G_GUINT64_FORMAT,
      GST_BUFFER_SIZE (buffer) / goom->bps, GST_BUFFER_TIMESTAMP (buffer));

  /* Collect samples until we have enough for an output frame */
  gst_adapter_push (goom->adapter, buffer);

  ret = GST_FLOW_OK;

  while (TRUE) {
    const guint16 *data;
    gboolean need_skip;
    guchar *out_frame;
    gint i, c;
    guint avail, to_flush;
	
	Message m;

    avail = gst_adapter_available (goom->adapter);
    GST_DEBUG_OBJECT (goom, "avail now %u", avail);

    /* we need GOOM_SAMPLES to get a meaningful result from goom. */
    if (avail < (GOOM_SAMPLES * goom->bps))
      break;

    /* we also need enough samples to produce one frame at least */
    if (avail < goom->bpf)
      break;

    GST_DEBUG_OBJECT (goom, "processing buffer");

    if (goom->next_ts != -1) {
      gint64 qostime;

      qostime = gst_segment_to_running_time (&goom->segment, GST_FORMAT_TIME,
          goom->next_ts);

      GST_OBJECT_LOCK (goom);
      /* check for QoS, don't compute buffers that are known to be late */
      need_skip = goom->earliest_time != -1 && qostime <= goom->earliest_time;
      GST_OBJECT_UNLOCK (goom);

      if (need_skip) {
        GST_WARNING_OBJECT (goom,
            "QoS: skip ts: %" GST_TIME_FORMAT ", earliest: %" GST_TIME_FORMAT,
            GST_TIME_ARGS (qostime), GST_TIME_ARGS (goom->earliest_time));
        goto skip;
      }
    }

    /* get next GOOM_SAMPLES, we have at least this amount of samples */
    data =
        (const guint16 *) gst_adapter_peek (goom->adapter,
        GOOM_SAMPLES * goom->bps);

    if (goom->channels == 2) {
      for (i = 0; i < GOOM_SAMPLES; i++) {
        goom->datain[0][i] = *data++;
        goom->datain[1][i] = *data++;
      }
    } else {
      for (i = 0; i < GOOM_SAMPLES; i++) {
        goom->datain[0][i] = *data;
        goom->datain[1][i] = *data++;
      }
    }

    /* alloc a buffer if we don't have one yet, this happens
     * when we pushed a buffer in this while loop before */
    if (outbuf == NULL) {
      ret = get_buffer (goom, &outbuf);
      if (ret != GST_FLOW_OK) {
        goto beach;
      }
    }

    GST_BUFFER_TIMESTAMP (outbuf) = goom->next_ts;
    GST_BUFFER_DURATION (outbuf) = goom->duration;
    GST_BUFFER_SIZE (outbuf) = goom->outsize;

	//gst_spectrum_transform_ip(gstSpectrum,buffer);

	//print_spectrum_message(gstSpectrum);
	//readMessage(&m);

	//c = m.magnitude[0] + m.magnitude[1] + m.magnitude[2] + m.magnitude[3] + m.magnitude[4];
	//if (c != 0) {
		/*printf("\n\n [");
		for (i = 0; i < 5; i++)
			printf("%f $ ", m.magnitude[i]);
		printf("] \n\n");*/
		
		/*g_print ("Goom: %" GST_TIME_FORMAT ", message received: %" GST_TIME_FORMAT "\n",
			GST_TIME_ARGS (goom->next_ts), GST_TIME_ARGS (m.timestamp));*/
	//}

    out_frame = (guchar *) goom_update (goom->plugin, goom->datain, 0, 0);
    memcpy (GST_BUFFER_DATA (outbuf), out_frame, goom->outsize);

    GST_DEBUG ("Pushing frame with time=%" GST_TIME_FORMAT ", duration=%"
        GST_TIME_FORMAT, GST_TIME_ARGS (goom->next_ts),
        GST_TIME_ARGS (goom->duration));

    ret = gst_pad_push (goom->srcpad, outbuf);
    outbuf = NULL;

  skip:
    /* interpollate next timestamp */
    if (goom->next_ts != -1)
      goom->next_ts += goom->duration;

    /* Now flush the samples we needed for this frame, which might be more than
     * the samples we used (GOOM_SAMPLES). */
    to_flush = goom->bpf;

    GST_DEBUG_OBJECT (goom, "finished frame, flushing %u bytes from input",
        to_flush);
    gst_adapter_flush (goom->adapter, to_flush);

    if (ret != GST_FLOW_OK)
      break;
  }

  if (outbuf != NULL)
    gst_buffer_unref (outbuf);

beach:
  gst_object_unref (goom);

  return ret;
}
Beispiel #11
0
static GstFlowReturn
gst_mimenc_chain (GstPad * pad, GstBuffer * in)
{
  GstMimEnc *mimenc;
  GstBuffer *out_buf = NULL, *buf = NULL;
  guchar *data;
  gint buffer_size;
  GstBuffer *header = NULL;
  GstFlowReturn res = GST_FLOW_OK;
  GstEvent *event = NULL;
  gboolean keyframe;

  g_return_val_if_fail (GST_IS_PAD (pad), GST_FLOW_ERROR);
  mimenc = GST_MIMENC (gst_pad_get_parent (pad));

  g_return_val_if_fail (GST_IS_MIMENC (mimenc), GST_FLOW_ERROR);

  GST_OBJECT_LOCK (mimenc);

  if (mimenc->segment.format == GST_FORMAT_UNDEFINED) {
    GST_WARNING_OBJECT (mimenc, "No new-segment received,"
        " initializing segment with time 0..-1");
    gst_segment_init (&mimenc->segment, GST_FORMAT_TIME);
    gst_segment_set_newsegment (&mimenc->segment,
        FALSE, 1.0, GST_FORMAT_TIME, 0, -1, 0);
  }

  if (mimenc->enc == NULL) {
    mimenc->enc = mimic_open ();
    if (mimenc->enc == NULL) {
      GST_WARNING_OBJECT (mimenc, "mimic_open error\n");
      res = GST_FLOW_ERROR;
      goto out_unlock;
    }

    if (!mimic_encoder_init (mimenc->enc, mimenc->res)) {
      GST_WARNING_OBJECT (mimenc, "mimic_encoder_init error\n");
      mimic_close (mimenc->enc);
      mimenc->enc = NULL;
      res = GST_FLOW_ERROR;
      goto out_unlock;
    }

    if (!mimic_get_property (mimenc->enc, "buffer_size", &mimenc->buffer_size)) {
      GST_WARNING_OBJECT (mimenc, "mimic_get_property('buffer_size') error\n");
      mimic_close (mimenc->enc);
      mimenc->enc = NULL;
      res = GST_FLOW_ERROR;
      goto out_unlock;
    }
  }

  buf = in;
  data = GST_BUFFER_DATA (buf);

  out_buf = gst_buffer_new_and_alloc (mimenc->buffer_size);
  GST_BUFFER_TIMESTAMP (out_buf) =
      gst_segment_to_running_time (&mimenc->segment, GST_FORMAT_TIME,
      GST_BUFFER_TIMESTAMP (buf));
  mimenc->last_buffer = GST_BUFFER_TIMESTAMP (out_buf);
  buffer_size = mimenc->buffer_size;
  keyframe = (mimenc->frames % MAX_INTERFRAMES) == 0 ? TRUE : FALSE;
  if (!mimic_encode_frame (mimenc->enc, data, GST_BUFFER_DATA (out_buf),
          &buffer_size, keyframe)) {
    GST_WARNING_OBJECT (mimenc, "mimic_encode_frame error\n");
    gst_buffer_unref (out_buf);
    gst_buffer_unref (buf);
    res = GST_FLOW_ERROR;
    goto out_unlock;
  }
  GST_BUFFER_SIZE (out_buf) = buffer_size;

  GST_DEBUG_OBJECT (mimenc, "incoming buf size %d, encoded size %d",
      GST_BUFFER_SIZE (buf), GST_BUFFER_SIZE (out_buf));
  ++mimenc->frames;

  // now let's create that tcp header
  header = gst_mimenc_create_tcp_header (mimenc, buffer_size,
      GST_BUFFER_TIMESTAMP (out_buf), keyframe, FALSE);

  if (!header) {
    gst_buffer_unref (out_buf);
    GST_DEBUG_OBJECT (mimenc, "header not created succesfully");
    res = GST_FLOW_ERROR;
    goto out_unlock;
  }

  if (mimenc->need_newsegment) {
    event = gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME, 0, -1, 0);
    mimenc->need_newsegment = FALSE;
  }

  GST_OBJECT_UNLOCK (mimenc);

  if (event) {
    if (!gst_pad_push_event (mimenc->srcpad, event))
      GST_WARNING_OBJECT (mimenc, "Failed to push NEWSEGMENT event");
  }

  res = gst_pad_push (mimenc->srcpad, header);
  if (res != GST_FLOW_OK) {
    gst_buffer_unref (out_buf);
    goto out;
  }

  res = gst_pad_push (mimenc->srcpad, out_buf);

out:
  if (buf)
    gst_buffer_unref (buf);
  gst_object_unref (mimenc);

  return res;

out_unlock:
  GST_OBJECT_UNLOCK (mimenc);
  goto out;

}
/**
 * gst_base_video_encoder_finish_frame:
 * @base_video_encoder: a #GstBaseVideoEncoder
 * @frame: an encoded #GstVideoFrame 
 *
 * @frame must have a valid encoded data buffer, whose metadata fields
 * are then appropriately set according to frame data or no buffer at
 * all if the frame should be dropped.
 * It is subsequently pushed downstream or provided to @shape_output.
 * In any case, the frame is considered finished and released.
 *
 * Returns: a #GstFlowReturn resulting from sending data downstream
 */
GstFlowReturn
gst_base_video_encoder_finish_frame (GstBaseVideoEncoder * base_video_encoder,
    GstVideoFrame * frame)
{
  GstFlowReturn ret = GST_FLOW_OK;
  GstBaseVideoEncoderClass *base_video_encoder_class;
  GList *l;

  base_video_encoder_class =
      GST_BASE_VIDEO_ENCODER_GET_CLASS (base_video_encoder);

  GST_LOG_OBJECT (base_video_encoder,
      "finish frame fpn %d", frame->presentation_frame_number);

  /* FIXME get rid of this ?
   * seems a roundabout way that adds little benefit to simply get
   * and subsequently set.  subclass is adult enough to set_caps itself ...
   * so simply check/ensure/assert that src pad caps are set by now */
  if (!base_video_encoder->set_output_caps) {
    if (!GST_PAD_CAPS (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder))) {
      GstCaps *caps;

      if (base_video_encoder_class->get_caps) {
        caps = base_video_encoder_class->get_caps (base_video_encoder);
      } else {
        caps = gst_caps_new_simple ("video/unknown", NULL);
      }
      GST_DEBUG_OBJECT (base_video_encoder, "src caps %" GST_PTR_FORMAT, caps);
      gst_pad_set_caps (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder),
          caps);
      gst_caps_unref (caps);
    }
    base_video_encoder->set_output_caps = TRUE;
  }

  /* Push all pending events that arrived before this frame */
  for (l = base_video_encoder->base_video_codec.frames; l; l = l->next) {
    GstVideoFrame *tmp = l->data;

    if (tmp->events) {
      GList *k;

      for (k = g_list_last (tmp->events); k; k = k->prev)
        gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder),
            k->data);
      g_list_free (tmp->events);
      tmp->events = NULL;
    }

    if (tmp == frame)
      break;
  }

  if (frame->force_keyframe) {
    GstClockTime stream_time;
    GstClockTime running_time;
    GstEvent *ev;

    running_time =
        gst_segment_to_running_time (&GST_BASE_VIDEO_CODEC
        (base_video_encoder)->segment, GST_FORMAT_TIME,
        frame->presentation_timestamp);
    stream_time =
        gst_segment_to_stream_time (&GST_BASE_VIDEO_CODEC
        (base_video_encoder)->segment, GST_FORMAT_TIME,
        frame->presentation_timestamp);

    /* re-use upstream event if any so it also conveys any additional
     * info upstream arranged in there */
    GST_OBJECT_LOCK (base_video_encoder);
    if (base_video_encoder->force_keyunit_event) {
      ev = base_video_encoder->force_keyunit_event;
      base_video_encoder->force_keyunit_event = NULL;
    } else {
      ev = gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM,
          gst_structure_new ("GstForceKeyUnit", NULL));
    }
    GST_OBJECT_UNLOCK (base_video_encoder);

    gst_structure_set (ev->structure,
        "timestamp", G_TYPE_UINT64, frame->presentation_timestamp,
        "stream-time", G_TYPE_UINT64, stream_time,
        "running-time", G_TYPE_UINT64, running_time, NULL);

    gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder), ev);
  }

  /* no buffer data means this frame is skipped/dropped */
  if (!frame->src_buffer) {
    GST_DEBUG_OBJECT (base_video_encoder, "skipping frame %" GST_TIME_FORMAT,
        GST_TIME_ARGS (frame->presentation_timestamp));
    goto done;
  }

  if (frame->is_sync_point) {
    GST_LOG_OBJECT (base_video_encoder, "key frame");
    base_video_encoder->distance_from_sync = 0;
    GST_BUFFER_FLAG_UNSET (frame->src_buffer, GST_BUFFER_FLAG_DELTA_UNIT);
  } else {
    GST_BUFFER_FLAG_SET (frame->src_buffer, GST_BUFFER_FLAG_DELTA_UNIT);
  }

  frame->distance_from_sync = base_video_encoder->distance_from_sync;
  base_video_encoder->distance_from_sync++;

  frame->decode_frame_number = frame->system_frame_number - 1;
  if (frame->decode_frame_number < 0) {
    frame->decode_timestamp = 0;
  } else {
    frame->decode_timestamp = gst_util_uint64_scale (frame->decode_frame_number,
        GST_SECOND * GST_BASE_VIDEO_CODEC (base_video_encoder)->state.fps_d,
        GST_BASE_VIDEO_CODEC (base_video_encoder)->state.fps_n);
  }

  GST_BUFFER_TIMESTAMP (frame->src_buffer) = frame->presentation_timestamp;
  GST_BUFFER_DURATION (frame->src_buffer) = frame->presentation_duration;
  GST_BUFFER_OFFSET (frame->src_buffer) = frame->decode_timestamp;

  /* update rate estimate */
  GST_BASE_VIDEO_CODEC (base_video_encoder)->bytes +=
      GST_BUFFER_SIZE (frame->src_buffer);
  if (GST_CLOCK_TIME_IS_VALID (frame->presentation_duration)) {
    GST_BASE_VIDEO_CODEC (base_video_encoder)->time +=
        frame->presentation_duration;
  } else {
    /* better none than nothing valid */
    GST_BASE_VIDEO_CODEC (base_video_encoder)->time = GST_CLOCK_TIME_NONE;
  }

  if (G_UNLIKELY (GST_BASE_VIDEO_CODEC (base_video_encoder)->discont)) {
    GST_LOG_OBJECT (base_video_encoder, "marking discont");
    GST_BUFFER_FLAG_SET (frame->src_buffer, GST_BUFFER_FLAG_DISCONT);
    GST_BASE_VIDEO_CODEC (base_video_encoder)->discont = FALSE;
  }

  gst_buffer_set_caps (GST_BUFFER (frame->src_buffer),
      GST_PAD_CAPS (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder)));

  if (base_video_encoder_class->shape_output) {
    ret = base_video_encoder_class->shape_output (base_video_encoder, frame);
  } else {
    ret =
        gst_pad_push (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder),
        frame->src_buffer);
  }
  frame->src_buffer = NULL;

done:
  /* handed out */
  GST_BASE_VIDEO_CODEC (base_video_encoder)->frames =
      g_list_remove (GST_BASE_VIDEO_CODEC (base_video_encoder)->frames, frame);

  gst_base_video_codec_free_frame (frame);

  return ret;
}
static GstFlowReturn
gst_identity_transform_ip (GstBaseTransform * trans, GstBuffer * buf)
{
  GstFlowReturn ret = GST_FLOW_OK;
  GstIdentity *identity = GST_IDENTITY (trans);
  GstClockTime rundts = GST_CLOCK_TIME_NONE;
  GstClockTime runpts = GST_CLOCK_TIME_NONE;
  GstClockTime ts, duration, runtimestamp;
  gsize size;

  size = gst_buffer_get_size (buf);

  if (identity->check_imperfect_timestamp)
    gst_identity_check_imperfect_timestamp (identity, buf);
  if (identity->check_imperfect_offset)
    gst_identity_check_imperfect_offset (identity, buf);

  /* update prev values */
  identity->prev_timestamp = GST_BUFFER_TIMESTAMP (buf);
  identity->prev_duration = GST_BUFFER_DURATION (buf);
  identity->prev_offset_end = GST_BUFFER_OFFSET_END (buf);
  identity->prev_offset = GST_BUFFER_OFFSET (buf);

  if (identity->error_after >= 0) {
    identity->error_after--;
    if (identity->error_after == 0)
      goto error_after;
  }

  if (identity->drop_probability > 0.0) {
    if ((gfloat) (1.0 * rand () / (RAND_MAX)) < identity->drop_probability)
      goto dropped;
  }

  if (GST_BUFFER_FLAG_IS_SET (buf, identity->drop_buffer_flags))
    goto dropped;

  if (identity->dump) {
    GstMapInfo info;

    if (gst_buffer_map (buf, &info, GST_MAP_READ)) {
      gst_util_dump_mem (info.data, info.size);
      gst_buffer_unmap (buf, &info);
    }
  }

  if (!identity->silent) {
    gst_identity_update_last_message_for_buffer (identity, "chain", buf, size);
  }

  if (identity->datarate > 0) {
    GstClockTime time = gst_util_uint64_scale_int (identity->offset,
        GST_SECOND, identity->datarate);

    GST_BUFFER_PTS (buf) = GST_BUFFER_DTS (buf) = time;
    GST_BUFFER_DURATION (buf) = size * GST_SECOND / identity->datarate;
  }

  if (identity->signal_handoffs)
    g_signal_emit (identity, gst_identity_signals[SIGNAL_HANDOFF], 0, buf);

  if (trans->segment.format == GST_FORMAT_TIME) {
    rundts = gst_segment_to_running_time (&trans->segment,
        GST_FORMAT_TIME, GST_BUFFER_DTS (buf));
    runpts = gst_segment_to_running_time (&trans->segment,
        GST_FORMAT_TIME, GST_BUFFER_PTS (buf));
  }

  if (GST_CLOCK_TIME_IS_VALID (rundts))
    runtimestamp = rundts;
  else if (GST_CLOCK_TIME_IS_VALID (runpts))
    runtimestamp = runpts;
  else
    runtimestamp = 0;
  ret = gst_identity_do_sync (identity, runtimestamp);

  identity->offset += size;

  if (identity->sleep_time && ret == GST_FLOW_OK)
    g_usleep (identity->sleep_time);

  if (identity->single_segment && (trans->segment.format == GST_FORMAT_TIME)
      && (ret == GST_FLOW_OK)) {
    GST_BUFFER_DTS (buf) = rundts;
    GST_BUFFER_PTS (buf) = runpts;
    GST_BUFFER_OFFSET (buf) = GST_CLOCK_TIME_NONE;
    GST_BUFFER_OFFSET_END (buf) = GST_CLOCK_TIME_NONE;
  }

  return ret;

  /* ERRORS */
error_after:
  {
    GST_ELEMENT_ERROR (identity, CORE, FAILED,
        (_("Failed after iterations as requested.")), (NULL));
    return GST_FLOW_ERROR;
  }
dropped:
  {
    if (!identity->silent) {
      gst_identity_update_last_message_for_buffer (identity, "dropping", buf,
          size);
    }

    ts = GST_BUFFER_TIMESTAMP (buf);
    if (GST_CLOCK_TIME_IS_VALID (ts)) {
      duration = GST_BUFFER_DURATION (buf);
      gst_pad_push_event (GST_BASE_TRANSFORM_SRC_PAD (identity),
          gst_event_new_gap (ts, duration));
    }

    /* return DROPPED to basetransform. */
    return GST_BASE_TRANSFORM_FLOW_DROPPED;
  }
}
Beispiel #14
0
static GstFlowReturn
gst_decklink_video_sink_prepare (GstBaseSink * bsink, GstBuffer * buffer)
{
  GstDecklinkVideoSink *self = GST_DECKLINK_VIDEO_SINK_CAST (bsink);
  GstVideoFrame vframe;
  IDeckLinkMutableVideoFrame *frame;
  guint8 *outdata, *indata;
  GstFlowReturn flow_ret;
  HRESULT ret;
  GstClockTime timestamp, duration;
  GstClockTime running_time, running_time_duration;
  GstClockTime latency, render_delay;
  GstClockTimeDiff ts_offset;
  gint i;
  GstDecklinkVideoFormat caps_format;
  BMDPixelFormat format;
  gint bpp;
  GstVideoTimeCodeMeta *tc_meta;

  GST_DEBUG_OBJECT (self, "Preparing buffer %p", buffer);

  // FIXME: Handle no timestamps
  if (!GST_BUFFER_TIMESTAMP_IS_VALID (buffer)) {
    return GST_FLOW_ERROR;
  }

  caps_format = gst_decklink_type_from_video_format (self->info.finfo->format);
  format = gst_decklink_pixel_format_from_type (caps_format);
  bpp = gst_decklink_bpp_from_type (caps_format);

  timestamp = GST_BUFFER_TIMESTAMP (buffer);
  duration = GST_BUFFER_DURATION (buffer);
  if (duration == GST_CLOCK_TIME_NONE) {
    duration =
        gst_util_uint64_scale_int (GST_SECOND, self->info.fps_d,
        self->info.fps_n);
  }
  running_time =
      gst_segment_to_running_time (&GST_BASE_SINK_CAST (self)->segment,
      GST_FORMAT_TIME, timestamp);
  running_time_duration =
      gst_segment_to_running_time (&GST_BASE_SINK_CAST (self)->segment,
      GST_FORMAT_TIME, timestamp + duration) - running_time;

  /* See gst_base_sink_adjust_time() */
  latency = gst_base_sink_get_latency (bsink);
  render_delay = gst_base_sink_get_render_delay (bsink);
  ts_offset = gst_base_sink_get_ts_offset (bsink);

  running_time += latency;

  if (ts_offset < 0) {
    ts_offset = -ts_offset;
    if ((GstClockTime) ts_offset < running_time)
      running_time -= ts_offset;
    else
      running_time = 0;
  } else {
    running_time += ts_offset;
  }

  if (running_time > render_delay)
    running_time -= render_delay;
  else
    running_time = 0;

  ret = self->output->output->CreateVideoFrame (self->info.width,
      self->info.height, self->info.stride[0], format, bmdFrameFlagDefault,
      &frame);
  if (ret != S_OK) {
    GST_ELEMENT_ERROR (self, STREAM, FAILED,
        (NULL), ("Failed to create video frame: 0x%08x", ret));
    return GST_FLOW_ERROR;
  }

  if (!gst_video_frame_map (&vframe, &self->info, buffer, GST_MAP_READ)) {
    GST_ERROR_OBJECT (self, "Failed to map video frame");
    flow_ret = GST_FLOW_ERROR;
    goto out;
  }

  frame->GetBytes ((void **) &outdata);
  indata = (guint8 *) GST_VIDEO_FRAME_PLANE_DATA (&vframe, 0);
  for (i = 0; i < self->info.height; i++) {
    memcpy (outdata, indata, GST_VIDEO_FRAME_WIDTH (&vframe) * bpp);
    indata += GST_VIDEO_FRAME_PLANE_STRIDE (&vframe, 0);
    outdata += frame->GetRowBytes ();
  }
  gst_video_frame_unmap (&vframe);

  tc_meta = gst_buffer_get_video_time_code_meta (buffer);
  if (tc_meta) {
    BMDTimecodeFlags bflags = (BMDTimecodeFlags) 0;
    gchar *tc_str;

    if (((GstVideoTimeCodeFlags) (tc_meta->tc.
                config.flags)) & GST_VIDEO_TIME_CODE_FLAGS_DROP_FRAME)
      bflags = (BMDTimecodeFlags) (bflags | bmdTimecodeIsDropFrame);
    else
      bflags = (BMDTimecodeFlags) (bflags | bmdTimecodeFlagDefault);
    if (tc_meta->tc.field_count == 2)
      bflags = (BMDTimecodeFlags) (bflags | bmdTimecodeFieldMark);

    tc_str = gst_video_time_code_to_string (&tc_meta->tc);
    ret = frame->SetTimecodeFromComponents (self->timecode_format,
        (uint8_t) tc_meta->tc.hours,
        (uint8_t) tc_meta->tc.minutes,
        (uint8_t) tc_meta->tc.seconds, (uint8_t) tc_meta->tc.frames, bflags);
    if (ret != S_OK) {
      GST_ERROR_OBJECT (self,
          "Failed to set timecode %s to video frame: 0x%08x", tc_str, ret);
      flow_ret = GST_FLOW_ERROR;
      g_free (tc_str);
      goto out;
    }
    GST_DEBUG_OBJECT (self, "Set frame timecode to %s", tc_str);
    g_free (tc_str);
  }

  convert_to_internal_clock (self, &running_time, &running_time_duration);

  GST_LOG_OBJECT (self, "Scheduling video frame %p at %" GST_TIME_FORMAT
      " with duration %" GST_TIME_FORMAT, frame, GST_TIME_ARGS (running_time),
      GST_TIME_ARGS (running_time_duration));

  ret = self->output->output->ScheduleVideoFrame (frame,
      running_time, running_time_duration, GST_SECOND);
  if (ret != S_OK) {
    GST_ELEMENT_ERROR (self, STREAM, FAILED,
        (NULL), ("Failed to schedule frame: 0x%08x", ret));
    flow_ret = GST_FLOW_ERROR;
    goto out;
  }

  flow_ret = GST_FLOW_OK;

out:

  frame->Release ();

  return flow_ret;
}
Beispiel #15
0
static MpegPsPadData *
mpegpsmux_choose_best_stream (MpegPsMux * mux)
{
  /* Choose from which stream to mux with */

  MpegPsPadData *best = NULL;
  GstCollectData *c_best = NULL;
  GSList *walk;

  for (walk = mux->collect->data; walk != NULL; walk = g_slist_next (walk)) {
    GstCollectData *c_data = (GstCollectData *) walk->data;
    MpegPsPadData *ps_data = (MpegPsPadData *) walk->data;

    if (ps_data->eos == FALSE) {
      if (ps_data->queued_buf == NULL) {
        GstBuffer *buf;

        ps_data->queued_buf = buf =
            gst_collect_pads_peek (mux->collect, c_data);

        if (buf != NULL) {
          if (ps_data->prepare_func) {
            buf = ps_data->prepare_func (buf, ps_data, mux);
            if (buf) {          /* Take the prepared buffer instead */
              gst_buffer_unref (ps_data->queued_buf);
              ps_data->queued_buf = buf;
            } else {            /* If data preparation returned NULL, use unprepared one */
              buf = ps_data->queued_buf;
            }
          }
          if (GST_BUFFER_TIMESTAMP (buf) != GST_CLOCK_TIME_NONE) {
            /* Ignore timestamps that go backward for now. FIXME: Handle all
             * incoming PTS */
            if (ps_data->last_ts == GST_CLOCK_TIME_NONE ||
                ps_data->last_ts < GST_BUFFER_TIMESTAMP (buf)) {
              ps_data->cur_ts = ps_data->last_ts =
                  gst_segment_to_running_time (&c_data->segment,
                  GST_FORMAT_TIME, GST_BUFFER_TIMESTAMP (buf));
            } else {
              GST_DEBUG_OBJECT (mux, "Ignoring PTS that has gone backward");
            }
          } else
            ps_data->cur_ts = GST_CLOCK_TIME_NONE;

          GST_DEBUG_OBJECT (mux, "Pulled buffer with ts %" GST_TIME_FORMAT
              " (uncorrected ts %" GST_TIME_FORMAT " %" G_GUINT64_FORMAT
              ") for PID 0x%04x",
              GST_TIME_ARGS (ps_data->cur_ts),
              GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
              GST_BUFFER_TIMESTAMP (buf), ps_data->stream_id);

          /* Choose a stream we've never seen a timestamp for to ensure
           * we push enough buffers from it to reach a timestamp */
          if (ps_data->last_ts == GST_CLOCK_TIME_NONE) {
            best = ps_data;
            c_best = c_data;
          }
        } else {
          ps_data->eos = TRUE;
          continue;
        }
      }

      /* If we don't yet have a best pad, take this one, otherwise take
       * whichever has the oldest timestamp */
      if (best != NULL) {
        if (ps_data->last_ts != GST_CLOCK_TIME_NONE &&
            best->last_ts != GST_CLOCK_TIME_NONE &&
            ps_data->last_ts < best->last_ts) {
          best = ps_data;
          c_best = c_data;
        }
      } else {
        best = ps_data;
        c_best = c_data;
      }
    }
  }
  if (c_best) {
    gst_buffer_unref (gst_collect_pads_pop (mux->collect, c_best));
  }

  return best;
}
Beispiel #16
0
static GstFlowReturn
gst_live_live_adder_chain (GstPad * pad, GstBuffer * buffer)
{
  GstLiveAdder *adder = GST_LIVE_ADDER (gst_pad_get_parent_element (pad));
  GstLiveAdderPadPrivate *padprivate = NULL;
  GstFlowReturn ret = GST_FLOW_OK;
  GList *item = NULL;
  GstClockTime skip = 0;
  gint64 drift = 0;             /* Positive if new buffer after old buffer */

  GST_OBJECT_LOCK (adder);

  ret = adder->srcresult;

  GST_DEBUG ("Incoming buffer time:%" GST_TIME_FORMAT " duration:%"
      GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer)),
      GST_TIME_ARGS (GST_BUFFER_DURATION (buffer)));

  if (ret != GST_FLOW_OK) {
    GST_DEBUG_OBJECT (adder, "Passing non-ok result from src: %s",
        gst_flow_get_name (ret));
    gst_buffer_unref (buffer);
    goto out;
  }

  padprivate = gst_pad_get_element_private (pad);

  if (!padprivate) {
    ret = GST_FLOW_NOT_LINKED;
    gst_buffer_unref (buffer);
    goto out;
  }

  if (padprivate->eos) {
    GST_DEBUG_OBJECT (adder, "Received buffer after EOS");
    ret = GST_FLOW_UNEXPECTED;
    gst_buffer_unref (buffer);
    goto out;
  }

  if (!GST_BUFFER_TIMESTAMP_IS_VALID (buffer))
    goto invalid_timestamp;

  if (padprivate->segment.format == GST_FORMAT_UNDEFINED) {
    GST_WARNING_OBJECT (adder, "No new-segment received,"
        " initializing segment with time 0..-1");
    gst_segment_init (&padprivate->segment, GST_FORMAT_TIME);
    gst_segment_set_newsegment (&padprivate->segment,
        FALSE, 1.0, GST_FORMAT_TIME, 0, -1, 0);
  }

  if (padprivate->segment.format != GST_FORMAT_TIME)
    goto invalid_segment;

  buffer = gst_buffer_make_metadata_writable (buffer);

  drift = GST_BUFFER_TIMESTAMP (buffer) - padprivate->expected_timestamp;

  /* Just see if we receive invalid timestamp/durations */
  if (GST_CLOCK_TIME_IS_VALID (padprivate->expected_timestamp) &&
      !GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DISCONT) &&
      (drift != 0)) {
    GST_LOG_OBJECT (adder,
        "Timestamp discontinuity without the DISCONT flag set"
        " (expected %" GST_TIME_FORMAT ", got %" GST_TIME_FORMAT
        " drift:%" G_GINT64_FORMAT "ms)",
        GST_TIME_ARGS (padprivate->expected_timestamp),
        GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer)), drift / GST_MSECOND);

    /* We accept drifts of 10ms */
    if (ABS (drift) < (10 * GST_MSECOND)) {
      GST_DEBUG ("Correcting minor drift");
      GST_BUFFER_TIMESTAMP (buffer) = padprivate->expected_timestamp;
    }
  }


  /* If there is no duration, lets set one */
  if (!GST_BUFFER_DURATION_IS_VALID (buffer)) {
    GST_BUFFER_DURATION (buffer) =
        gst_audio_duration_from_pad_buffer (pad, buffer);
    padprivate->expected_timestamp = GST_CLOCK_TIME_NONE;
  } else {
    padprivate->expected_timestamp = GST_BUFFER_TIMESTAMP (buffer) +
        GST_BUFFER_DURATION (buffer);
  }


  /*
   * Lets clip the buffer to the segment (so we don't have to worry about
   * cliping afterwards).
   * This should also guarantee us that we'll have valid timestamps and
   * durations afterwards
   */

  buffer = gst_audio_buffer_clip (buffer, &padprivate->segment, adder->rate,
      adder->bps);

  /* buffer can be NULL if it's completely outside of the segment */
  if (!buffer) {
    GST_DEBUG ("Buffer completely outside of configured segment, dropping it");
    goto out;
  }

  /*
   * Make sure all incoming buffers share the same timestamping
   */
  GST_BUFFER_TIMESTAMP (buffer) =
      gst_segment_to_running_time (&padprivate->segment,
      padprivate->segment.format, GST_BUFFER_TIMESTAMP (buffer));


  if (GST_CLOCK_TIME_IS_VALID (adder->next_timestamp) &&
      GST_BUFFER_TIMESTAMP (buffer) < adder->next_timestamp) {
    if (GST_BUFFER_TIMESTAMP (buffer) + GST_BUFFER_DURATION (buffer) <
        adder->next_timestamp) {
      GST_DEBUG_OBJECT (adder, "Buffer is late, dropping (ts: %" GST_TIME_FORMAT
          " duration: %" GST_TIME_FORMAT ")",
          GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer)),
          GST_TIME_ARGS (GST_BUFFER_DURATION (buffer)));
      gst_buffer_unref (buffer);
      goto out;
    } else {
      skip = adder->next_timestamp - GST_BUFFER_TIMESTAMP (buffer);
      GST_DEBUG_OBJECT (adder, "Buffer is partially late, skipping %"
          GST_TIME_FORMAT, GST_TIME_ARGS (skip));
    }
  }

  /* If our new buffer's head is higher than the queue's head, lets wake up,
   * we may not have to wait for as long
   */
  if (adder->clock_id &&
      g_queue_peek_head (adder->buffers) != NULL &&
      GST_BUFFER_TIMESTAMP (buffer) + skip <
      GST_BUFFER_TIMESTAMP (g_queue_peek_head (adder->buffers)))
    gst_clock_id_unschedule (adder->clock_id);

  for (item = g_queue_peek_head_link (adder->buffers);
      item; item = g_list_next (item)) {
    GstBuffer *oldbuffer = item->data;
    GstClockTime old_skip = 0;
    GstClockTime mix_duration = 0;
    GstClockTime mix_start = 0;
    GstClockTime mix_end = 0;

    /* We haven't reached our place yet */
    if (GST_BUFFER_TIMESTAMP (buffer) + skip >=
        GST_BUFFER_TIMESTAMP (oldbuffer) + GST_BUFFER_DURATION (oldbuffer))
      continue;

    /* We're past our place, lets insert ouselves here */
    if (GST_BUFFER_TIMESTAMP (buffer) + GST_BUFFER_DURATION (buffer) <=
        GST_BUFFER_TIMESTAMP (oldbuffer))
      break;

    /* if we reach this spot, we have overlap, so we must mix */

    /* First make a subbuffer with the non-overlapping part */
    if (GST_BUFFER_TIMESTAMP (buffer) + skip < GST_BUFFER_TIMESTAMP (oldbuffer)) {
      GstBuffer *subbuffer = NULL;
      GstClockTime subbuffer_duration = GST_BUFFER_TIMESTAMP (oldbuffer) -
          (GST_BUFFER_TIMESTAMP (buffer) + skip);

      subbuffer = gst_buffer_create_sub (buffer,
          gst_live_adder_length_from_duration (adder, skip),
          gst_live_adder_length_from_duration (adder, subbuffer_duration));

      GST_BUFFER_TIMESTAMP (subbuffer) = GST_BUFFER_TIMESTAMP (buffer) + skip;
      GST_BUFFER_DURATION (subbuffer) = subbuffer_duration;

      skip += subbuffer_duration;

      g_queue_insert_before (adder->buffers, item, subbuffer);
    }

    /* Now we are on the overlapping part */
    oldbuffer = gst_buffer_make_writable (oldbuffer);
    item->data = oldbuffer;

    old_skip = GST_BUFFER_TIMESTAMP (buffer) + skip -
        GST_BUFFER_TIMESTAMP (oldbuffer);

    mix_start = GST_BUFFER_TIMESTAMP (oldbuffer) + old_skip;

    if (GST_BUFFER_TIMESTAMP (buffer) + GST_BUFFER_DURATION (buffer) <
        GST_BUFFER_TIMESTAMP (oldbuffer) + GST_BUFFER_DURATION (oldbuffer))
      mix_end = GST_BUFFER_TIMESTAMP (buffer) + GST_BUFFER_DURATION (buffer);
    else
      mix_end = GST_BUFFER_TIMESTAMP (oldbuffer) +
          GST_BUFFER_DURATION (oldbuffer);

    mix_duration = mix_end - mix_start;

    adder->func (GST_BUFFER_DATA (oldbuffer) +
        gst_live_adder_length_from_duration (adder, old_skip),
        GST_BUFFER_DATA (buffer) +
        gst_live_adder_length_from_duration (adder, skip),
        gst_live_adder_length_from_duration (adder, mix_duration));

    skip += mix_duration;
  }

  g_cond_broadcast (adder->not_empty_cond);

  if (skip == GST_BUFFER_DURATION (buffer)) {
    gst_buffer_unref (buffer);
  } else {
    if (skip) {
      GstClockTime subbuffer_duration = GST_BUFFER_DURATION (buffer) - skip;
      GstClockTime subbuffer_ts = GST_BUFFER_TIMESTAMP (buffer) + skip;

      buffer = gst_buffer_create_sub (buffer,
          gst_live_adder_length_from_duration (adder, skip),
          gst_live_adder_length_from_duration (adder, subbuffer_duration));
      GST_BUFFER_TIMESTAMP (buffer) = subbuffer_ts;
      GST_BUFFER_DURATION (buffer) = subbuffer_duration;
    }

    if (item)
      g_queue_insert_before (adder->buffers, item, buffer);
    else
      g_queue_push_tail (adder->buffers, buffer);
  }

out:

  GST_OBJECT_UNLOCK (adder);
  gst_object_unref (adder);

  return ret;

invalid_timestamp:

  GST_OBJECT_UNLOCK (adder);
  gst_buffer_unref (buffer);
  GST_ELEMENT_ERROR (adder, STREAM, FAILED,
      ("Buffer without a valid timestamp received"),
      ("Invalid timestamp received on buffer"));

  return GST_FLOW_ERROR;

invalid_segment:
  {
    const gchar *format = gst_format_get_name (padprivate->segment.format);
    GST_OBJECT_UNLOCK (adder);
    gst_buffer_unref (buffer);
    GST_ELEMENT_ERROR (adder, STREAM, FAILED,
        ("This element only supports TIME segments, received other type"),
        ("Received a segment of type %s, only support time segment", format));

    return GST_FLOW_ERROR;
  }

}
Beispiel #17
0
static GstFlowReturn
gst_rdt_manager_chain_rdt (GstPad * pad, GstBuffer * buffer)
{
  GstFlowReturn res;
  GstRDTManager *rdtmanager;
  GstRDTManagerSession *session;
  GstClockTime timestamp;
  GstRDTPacket packet;
  guint32 ssrc;
  guint8 pt;
  gboolean more;

  rdtmanager = GST_RDT_MANAGER (GST_PAD_PARENT (pad));

  GST_DEBUG_OBJECT (rdtmanager, "got RDT packet");

  ssrc = 0;
  pt = 0;

  GST_DEBUG_OBJECT (rdtmanager, "SSRC %08x, PT %d", ssrc, pt);

  /* find session */
  session = gst_pad_get_element_private (pad);

  /* see if we have the pad */
  if (!session->active) {
    activate_session (rdtmanager, session, ssrc, pt);
    session->active = TRUE;
  }

  if (GST_BUFFER_IS_DISCONT (buffer)) {
    GST_DEBUG_OBJECT (rdtmanager, "received discont");
    session->discont = TRUE;
  }

  res = GST_FLOW_OK;

  /* take the timestamp of the buffer. This is the time when the packet was
   * received and is used to calculate jitter and clock skew. We will adjust
   * this timestamp with the smoothed value after processing it in the
   * jitterbuffer. */
  timestamp = GST_BUFFER_TIMESTAMP (buffer);
  /* bring to running time */
  timestamp = gst_segment_to_running_time (&session->segment, GST_FORMAT_TIME,
      timestamp);

  more = gst_rdt_buffer_get_first_packet (buffer, &packet);
  while (more) {
    GstRDTType type;

    type = gst_rdt_packet_get_type (&packet);
    GST_DEBUG_OBJECT (rdtmanager, "Have packet of type %04x", type);

    if (GST_RDT_IS_DATA_TYPE (type)) {
      GST_DEBUG_OBJECT (rdtmanager, "We have a data packet");
      res = gst_rdt_manager_handle_data_packet (session, timestamp, &packet);
    } else {
      switch (type) {
        default:
          GST_DEBUG_OBJECT (rdtmanager, "Ignoring packet");
          break;
      }
    }
    if (res != GST_FLOW_OK)
      break;

    more = gst_rdt_packet_move_to_next (&packet);
  }

  gst_buffer_unref (buffer);

  return res;
}
Beispiel #18
0
static GstFlowReturn
fs_funnel_chain (GstPad * pad, GstBuffer * buffer)
{
  GstFlowReturn res;
  FsFunnel *funnel = FS_FUNNEL (gst_pad_get_parent (pad));
  FsFunnelPadPrivate *priv = gst_pad_get_element_private (pad);
  GstEvent *event = NULL;
  GstClockTime newts;
  GstCaps *padcaps;

  GST_DEBUG_OBJECT (funnel, "received buffer %p", buffer);

  GST_OBJECT_LOCK (funnel);
  if (priv->segment.format == GST_FORMAT_UNDEFINED) {
    GST_WARNING_OBJECT (funnel, "Got buffer without segment,"
        " setting segment [0,inf[");
     gst_segment_set_newsegment_full (&priv->segment, FALSE, 1.0, 1.0,
         GST_FORMAT_TIME, 0, -1, 0);
  }

  if (GST_CLOCK_TIME_IS_VALID (GST_BUFFER_TIMESTAMP (buffer)))
    gst_segment_set_last_stop (&priv->segment, priv->segment.format,
        GST_BUFFER_TIMESTAMP (buffer));

  newts = gst_segment_to_running_time (&priv->segment,
      priv->segment.format, GST_BUFFER_TIMESTAMP (buffer));
  if (newts != GST_BUFFER_TIMESTAMP (buffer)) {
    buffer = gst_buffer_make_metadata_writable (buffer);
    GST_BUFFER_TIMESTAMP (buffer) = newts;
  }

  if (!funnel->has_segment)
  {
    event = gst_event_new_new_segment_full (FALSE, 1.0, 1.0, GST_FORMAT_TIME,
        0, -1, 0);
    funnel->has_segment = TRUE;
  }
  GST_OBJECT_UNLOCK (funnel);

  if (event) {
    if (!gst_pad_push_event (funnel->srcpad, event))
      GST_WARNING_OBJECT (funnel, "Could not push out newsegment event");
  }


  GST_OBJECT_LOCK (pad);
  padcaps = GST_PAD_CAPS (funnel->srcpad);
  GST_OBJECT_UNLOCK (pad);

  if (GST_BUFFER_CAPS (buffer) && GST_BUFFER_CAPS (buffer) != padcaps) {
    if (!gst_pad_set_caps (funnel->srcpad, GST_BUFFER_CAPS (buffer))) {
      res = GST_FLOW_NOT_NEGOTIATED;
      goto out;
    }
  }

  res = gst_pad_push (funnel->srcpad, buffer);

  GST_LOG_OBJECT (funnel, "handled buffer %s", gst_flow_get_name (res));

 out:
  gst_object_unref (funnel);

  return res;
}
/**
 * gst_basertppayload_push:
 * @payload: a #GstBaseRTPPayload
 * @buffer: a #GstBuffer
 *
 * Push @buffer to the peer element of the payloader. The SSRC, payload type,
 * seqnum and timestamp of the RTP buffer will be updated first.
 * 
 * This function takes ownership of @buffer.
 *
 * Returns: a #GstFlowReturn.
 */
GstFlowReturn
gst_basertppayload_push (GstBaseRTPPayload * payload, GstBuffer * buffer)
{
  GstFlowReturn res;
  GstClockTime timestamp;
  guint32 rtptime;
  GstBaseRTPPayloadPrivate *priv;

  if (payload->clock_rate == 0)
    goto no_rate;

  priv = payload->priv;

  gst_rtp_buffer_set_ssrc (buffer, payload->current_ssrc);

  gst_rtp_buffer_set_payload_type (buffer, payload->pt);

  /* update first, so that the property is set to the last
   * seqnum pushed */
  payload->seqnum = priv->next_seqnum;
  gst_rtp_buffer_set_seq (buffer, payload->seqnum);

  /* can wrap around, which is perfectly fine */
  priv->next_seqnum++;

  /* add our random offset to the timestamp */
  rtptime = payload->ts_base;

  timestamp = GST_BUFFER_TIMESTAMP (buffer);
  if (GST_CLOCK_TIME_IS_VALID (timestamp)) {
    gint64 rtime;

    rtime = gst_segment_to_running_time (&payload->segment, GST_FORMAT_TIME,
        timestamp);

    rtime = gst_util_uint64_scale_int (rtime, payload->clock_rate, GST_SECOND);

    /* add running_time in clock-rate units to the base timestamp */
    rtptime += rtime;
  } else {
    /* no timestamp to convert, take previous timestamp */
    rtptime = payload->timestamp;
  }
  gst_rtp_buffer_set_timestamp (buffer, rtptime);

  payload->timestamp = rtptime;

  /* set caps */
  gst_buffer_set_caps (buffer, GST_PAD_CAPS (payload->srcpad));

  GST_LOG_OBJECT (payload,
      "Pushing packet size %d, seq=%d, rtptime=%u, timestamp %" GST_TIME_FORMAT,
      GST_BUFFER_SIZE (buffer), payload->seqnum, rtptime,
      GST_TIME_ARGS (timestamp));

  res = gst_pad_push (payload->srcpad, buffer);

  return res;

  /* ERRORS */
no_rate:
  {
    GST_ELEMENT_ERROR (payload, STREAM, NOT_IMPLEMENTED, (NULL),
        ("subclass did not specify clock-rate"));
    gst_buffer_unref (buffer);
    return GST_FLOW_ERROR;
  }
}
GstFlowReturn
gst_base_video_encoder_finish_frame (GstBaseVideoEncoder * base_video_encoder,
    GstVideoFrame * frame)
{
  GstFlowReturn ret;
  GstBaseVideoEncoderClass *base_video_encoder_class;

  base_video_encoder_class =
      GST_BASE_VIDEO_ENCODER_GET_CLASS (base_video_encoder);

  if (frame->is_sync_point) {
    base_video_encoder->distance_from_sync = 0;
    GST_BUFFER_FLAG_UNSET (frame->src_buffer, GST_BUFFER_FLAG_DELTA_UNIT);
  } else {
    GST_BUFFER_FLAG_SET (frame->src_buffer, GST_BUFFER_FLAG_DELTA_UNIT);
  }

  frame->distance_from_sync = base_video_encoder->distance_from_sync;
  base_video_encoder->distance_from_sync++;

  frame->decode_frame_number = frame->system_frame_number - 1;
  if (frame->decode_frame_number < 0) {
    frame->decode_timestamp = 0;
  } else {
    frame->decode_timestamp = gst_util_uint64_scale (frame->decode_frame_number,
        GST_SECOND * GST_BASE_VIDEO_CODEC (base_video_encoder)->state.fps_d,
        GST_BASE_VIDEO_CODEC (base_video_encoder)->state.fps_n);
  }

  GST_BUFFER_TIMESTAMP (frame->src_buffer) = frame->presentation_timestamp;
  GST_BUFFER_DURATION (frame->src_buffer) = frame->presentation_duration;
  GST_BUFFER_OFFSET (frame->src_buffer) = frame->decode_timestamp;

  GST_BASE_VIDEO_CODEC (base_video_encoder)->frames =
      g_list_remove (GST_BASE_VIDEO_CODEC (base_video_encoder)->frames, frame);

  if (!base_video_encoder->set_output_caps) {
    if (base_video_encoder_class->get_caps) {
      GST_BASE_VIDEO_CODEC (base_video_encoder)->caps =
          base_video_encoder_class->get_caps (base_video_encoder);
    } else {
      GST_BASE_VIDEO_CODEC (base_video_encoder)->caps =
          gst_caps_new_simple ("video/unknown", NULL);
    }
    gst_pad_set_caps (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder),
        GST_BASE_VIDEO_CODEC (base_video_encoder)->caps);
    base_video_encoder->set_output_caps = TRUE;
  }

  gst_buffer_set_caps (GST_BUFFER (frame->src_buffer),
      GST_BASE_VIDEO_CODEC (base_video_encoder)->caps);

  if (frame->force_keyframe) {
    GstClockTime stream_time;
    GstClockTime running_time;
    GstStructure *s;

    running_time =
        gst_segment_to_running_time (&GST_BASE_VIDEO_CODEC
        (base_video_encoder)->segment, GST_FORMAT_TIME,
        frame->presentation_timestamp);
    stream_time =
        gst_segment_to_stream_time (&GST_BASE_VIDEO_CODEC
        (base_video_encoder)->segment, GST_FORMAT_TIME,
        frame->presentation_timestamp);

    /* FIXME this should send the event that we got on the sink pad
       instead of creating a new one */
    s = gst_structure_new ("GstForceKeyUnit",
        "timestamp", G_TYPE_UINT64, frame->presentation_timestamp,
        "stream-time", G_TYPE_UINT64, stream_time,
        "running-time", G_TYPE_UINT64, running_time, NULL);

    gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder),
        gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM, s));
  }

  if (base_video_encoder_class->shape_output) {
    ret = base_video_encoder_class->shape_output (base_video_encoder, frame);
  } else {
    ret =
        gst_pad_push (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder),
        frame->src_buffer);
  }

  gst_base_video_codec_free_frame (frame);

  return ret;
}
static GstFlowReturn
dvdspu_handle_vid_buffer (GstDVDSpu * dvdspu, GstBuffer * buf)
{
  GstClockTime new_ts;
  GstFlowReturn ret;
  gboolean using_ref = FALSE;

  DVD_SPU_LOCK (dvdspu);

  if (buf == NULL) {
    GstClockTime next_ts = dvdspu->video_seg.position;

    next_ts += gst_util_uint64_scale_int (GST_SECOND,
        dvdspu->spu_state.info.fps_d, dvdspu->spu_state.info.fps_n);

    /* NULL buffer was passed - use the reference frame and update the timestamp,
     * or else there's nothing to draw, and just return GST_FLOW_OK */
    if (dvdspu->ref_frame == NULL) {
      dvdspu->video_seg.position = next_ts;
      goto no_ref_frame;
    }

    buf = gst_buffer_copy (dvdspu->ref_frame);

#if 0
    g_print ("Duping frame %" GST_TIME_FORMAT " with new TS %" GST_TIME_FORMAT
        "\n", GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
        GST_TIME_ARGS (next_ts));
#endif

    GST_BUFFER_TIMESTAMP (buf) = next_ts;
    using_ref = TRUE;
  }

  if (GST_BUFFER_TIMESTAMP_IS_VALID (buf)) {
    dvdspu->video_seg.position = GST_BUFFER_TIMESTAMP (buf);
  }

  new_ts = gst_segment_to_running_time (&dvdspu->video_seg, GST_FORMAT_TIME,
      dvdspu->video_seg.position);

#if 0
  g_print ("TS %" GST_TIME_FORMAT " running: %" GST_TIME_FORMAT "\n",
      GST_TIME_ARGS (dvdspu->video_seg.position), GST_TIME_ARGS (new_ts));
#endif

  gst_dvd_spu_advance_spu (dvdspu, new_ts);

  /* If we have an active SPU command set, we store a copy of the frame in case
   * we hit a still and need to draw on it. Otherwise, a reference is
   * sufficient in case we later encounter a still */
  if ((dvdspu->spu_state.flags & SPU_STATE_FORCED_DSP) ||
      ((dvdspu->spu_state.flags & SPU_STATE_FORCED_ONLY) == 0 &&
          (dvdspu->spu_state.flags & SPU_STATE_DISPLAY))) {
    if (using_ref == FALSE) {
      GstBuffer *copy;

      /* Take a copy in case we hit a still frame and need the pristine 
       * frame around */
      copy = gst_buffer_copy (buf);
      gst_buffer_replace (&dvdspu->ref_frame, copy);
      gst_buffer_unref (copy);
    }

    /* Render the SPU overlay onto the buffer */
    buf = gst_buffer_make_writable (buf);

    gstspu_render (dvdspu, buf);
  } else {
    if (using_ref == FALSE) {
      /* Not going to draw anything on this frame, just store a reference
       * in case we hit a still frame and need it */
      gst_buffer_replace (&dvdspu->ref_frame, buf);
    }
  }

  if (dvdspu->spu_state.flags & SPU_STATE_STILL_FRAME) {
    GST_DEBUG_OBJECT (dvdspu, "Outputting buffer with TS %" GST_TIME_FORMAT
        "from chain while in still",
        GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)));
  }

  DVD_SPU_UNLOCK (dvdspu);

  /* just push out the incoming buffer without touching it */
  ret = gst_pad_push (dvdspu->srcpad, buf);

  return ret;

no_ref_frame:

  DVD_SPU_UNLOCK (dvdspu);

  return GST_FLOW_OK;
}
/* Called with the object lock for both the element and pad held,
 * as well as the aagg lock
 */
static gboolean
gst_audio_aggregator_fill_buffer (GstAudioAggregator * aagg,
    GstAudioAggregatorPad * pad, GstBuffer * inbuf)
{
  GstClockTime start_time, end_time;
  gboolean discont = FALSE;
  guint64 start_offset, end_offset;
  gint rate, bpf;

  GstAggregator *agg = GST_AGGREGATOR (aagg);
  GstAggregatorPad *aggpad = GST_AGGREGATOR_PAD (pad);

  g_assert (pad->priv->buffer == NULL);

  rate = GST_AUDIO_INFO_RATE (&pad->info);
  bpf = GST_AUDIO_INFO_BPF (&pad->info);

  pad->priv->position = 0;
  pad->priv->size = gst_buffer_get_size (inbuf) / bpf;

  if (!GST_BUFFER_PTS_IS_VALID (inbuf)) {
    if (pad->priv->output_offset == -1)
      pad->priv->output_offset = aagg->priv->offset;
    if (pad->priv->next_offset == -1)
      pad->priv->next_offset = pad->priv->size;
    else
      pad->priv->next_offset += pad->priv->size;
    goto done;
  }

  start_time = GST_BUFFER_PTS (inbuf);
  end_time =
      start_time + gst_util_uint64_scale_ceil (pad->priv->size, GST_SECOND,
      rate);

  /* Clipping should've ensured this */
  g_assert (start_time >= aggpad->segment.start);

  start_offset =
      gst_util_uint64_scale (start_time - aggpad->segment.start, rate,
      GST_SECOND);
  end_offset = start_offset + pad->priv->size;

  if (GST_BUFFER_IS_DISCONT (inbuf)
      || GST_BUFFER_FLAG_IS_SET (inbuf, GST_BUFFER_FLAG_RESYNC)
      || pad->priv->new_segment || pad->priv->next_offset == -1) {
    discont = TRUE;
    pad->priv->new_segment = FALSE;
  } else {
    guint64 diff, max_sample_diff;

    /* Check discont, based on audiobasesink */
    if (start_offset <= pad->priv->next_offset)
      diff = pad->priv->next_offset - start_offset;
    else
      diff = start_offset - pad->priv->next_offset;

    max_sample_diff =
        gst_util_uint64_scale_int (aagg->priv->alignment_threshold, rate,
        GST_SECOND);

    /* Discont! */
    if (G_UNLIKELY (diff >= max_sample_diff)) {
      if (aagg->priv->discont_wait > 0) {
        if (pad->priv->discont_time == GST_CLOCK_TIME_NONE) {
          pad->priv->discont_time = start_time;
        } else if (start_time - pad->priv->discont_time >=
            aagg->priv->discont_wait) {
          discont = TRUE;
          pad->priv->discont_time = GST_CLOCK_TIME_NONE;
        }
      } else {
        discont = TRUE;
      }
    } else if (G_UNLIKELY (pad->priv->discont_time != GST_CLOCK_TIME_NONE)) {
      /* we have had a discont, but are now back on track! */
      pad->priv->discont_time = GST_CLOCK_TIME_NONE;
    }
  }

  if (discont) {
    /* Have discont, need resync */
    if (pad->priv->next_offset != -1)
      GST_INFO_OBJECT (pad, "Have discont. Expected %"
          G_GUINT64_FORMAT ", got %" G_GUINT64_FORMAT,
          pad->priv->next_offset, start_offset);
    pad->priv->output_offset = -1;
    pad->priv->next_offset = end_offset;
  } else {
    pad->priv->next_offset += pad->priv->size;
  }

  if (pad->priv->output_offset == -1) {
    GstClockTime start_running_time;
    GstClockTime end_running_time;
    guint64 start_output_offset;
    guint64 end_output_offset;

    start_running_time =
        gst_segment_to_running_time (&aggpad->segment,
        GST_FORMAT_TIME, start_time);
    end_running_time =
        gst_segment_to_running_time (&aggpad->segment,
        GST_FORMAT_TIME, end_time);

    /* Convert to position in the output segment */
    start_output_offset =
        gst_segment_to_position (&agg->segment, GST_FORMAT_TIME,
        start_running_time);
    if (start_output_offset != -1)
      start_output_offset =
          gst_util_uint64_scale (start_output_offset - agg->segment.start, rate,
          GST_SECOND);

    end_output_offset =
        gst_segment_to_position (&agg->segment, GST_FORMAT_TIME,
        end_running_time);
    if (end_output_offset != -1)
      end_output_offset =
          gst_util_uint64_scale (end_output_offset - agg->segment.start, rate,
          GST_SECOND);

    if (start_output_offset == -1 && end_output_offset == -1) {
      /* Outside output segment, drop */
      gst_buffer_unref (inbuf);
      pad->priv->buffer = NULL;
      pad->priv->position = 0;
      pad->priv->size = 0;
      pad->priv->output_offset = -1;
      GST_DEBUG_OBJECT (pad, "Buffer outside output segment");
      return FALSE;
    }

    /* Calculate end_output_offset if it was outside the output segment */
    if (end_output_offset == -1)
      end_output_offset = start_output_offset + pad->priv->size;

    if (end_output_offset < aagg->priv->offset) {
      /* Before output segment, drop */
      gst_buffer_unref (inbuf);
      pad->priv->buffer = NULL;
      pad->priv->position = 0;
      pad->priv->size = 0;
      pad->priv->output_offset = -1;
      GST_DEBUG_OBJECT (pad,
          "Buffer before segment or current position: %" G_GUINT64_FORMAT " < %"
          G_GINT64_FORMAT, end_output_offset, aagg->priv->offset);
      return FALSE;
    }

    if (start_output_offset == -1 || start_output_offset < aagg->priv->offset) {
      guint diff;

      if (start_output_offset == -1 && end_output_offset < pad->priv->size) {
        diff = pad->priv->size - end_output_offset + aagg->priv->offset;
      } else if (start_output_offset == -1) {
        start_output_offset = end_output_offset - pad->priv->size;

        if (start_output_offset < aagg->priv->offset)
          diff = aagg->priv->offset - start_output_offset;
        else
          diff = 0;
      } else {
        diff = aagg->priv->offset - start_output_offset;
      }

      pad->priv->position += diff;
      if (pad->priv->position >= pad->priv->size) {
        /* Empty buffer, drop */
        gst_buffer_unref (inbuf);
        pad->priv->buffer = NULL;
        pad->priv->position = 0;
        pad->priv->size = 0;
        pad->priv->output_offset = -1;
        GST_DEBUG_OBJECT (pad,
            "Buffer before segment or current position: %" G_GUINT64_FORMAT
            " < %" G_GINT64_FORMAT, end_output_offset, aagg->priv->offset);
        return FALSE;
      }
    }

    if (start_output_offset == -1 || start_output_offset < aagg->priv->offset)
      pad->priv->output_offset = aagg->priv->offset;
    else
      pad->priv->output_offset = start_output_offset;

    GST_DEBUG_OBJECT (pad,
        "Buffer resynced: Pad offset %" G_GUINT64_FORMAT
        ", current audio aggregator offset %" G_GINT64_FORMAT,
        pad->priv->output_offset, aagg->priv->offset);
  }

done:

  GST_LOG_OBJECT (pad,
      "Queued new buffer at offset %" G_GUINT64_FORMAT,
      pad->priv->output_offset);
  pad->priv->buffer = inbuf;

  return TRUE;
}
Beispiel #23
0
/**
 * gst_segment_do_seek:
 * @segment: a #GstSegment structure.
 * @rate: the rate of the segment.
 * @format: the format of the segment.
 * @flags: the segment flags for the segment
 * @start_type: the seek method
 * @start: the seek start value
 * @stop_type: the seek method
 * @stop: the seek stop value
 * @update: boolean holding whether position was updated.
 *
 * Update the segment structure with the field values of a seek event (see
 * gst_event_new_seek()).
 *
 * After calling this method, the segment field position and time will
 * contain the requested new position in the segment. The new requested
 * position in the segment depends on @rate and @start_type and @stop_type.
 *
 * For positive @rate, the new position in the segment is the new @segment
 * start field when it was updated with a @start_type different from
 * #GST_SEEK_TYPE_NONE. If no update was performed on @segment start position
 * (#GST_SEEK_TYPE_NONE), @start is ignored and @segment position is
 * unmodified.
 *
 * For negative @rate, the new position in the segment is the new @segment
 * stop field when it was updated with a @stop_type different from
 * #GST_SEEK_TYPE_NONE. If no stop was previously configured in the segment, the
 * duration of the segment will be used to update the stop position.
 * If no update was performed on @segment stop position (#GST_SEEK_TYPE_NONE),
 * @stop is ignored and @segment position is unmodified.
 *
 * The applied rate of the segment will be set to 1.0 by default.
 * If the caller can apply a rate change, it should update @segment
 * rate and applied_rate after calling this function.
 *
 * @update will be set to TRUE if a seek should be performed to the segment
 * position field. This field can be FALSE if, for example, only the @rate
 * has been changed but not the playback position.
 *
 * Returns: %TRUE if the seek could be performed.
 */
gboolean
gst_segment_do_seek (GstSegment * segment, gdouble rate,
    GstFormat format, GstSeekFlags flags,
    GstSeekType start_type, guint64 start,
    GstSeekType stop_type, guint64 stop, gboolean * update)
{
  gboolean update_stop, update_start;
  guint64 position, base;

  g_return_val_if_fail (rate != 0.0, FALSE);
  g_return_val_if_fail (segment != NULL, FALSE);
  g_return_val_if_fail (segment->format == format, FALSE);

  update_start = update_stop = TRUE;

  position = segment->position;

  /* segment->start is never invalid */
  switch (start_type) {
    case GST_SEEK_TYPE_NONE:
      /* no update to segment, take previous start */
      start = segment->start;
      update_start = FALSE;
      break;
    case GST_SEEK_TYPE_SET:
      /* start holds desired position, map -1 to the start */
      if (start == -1)
        start = 0;
      break;
    case GST_SEEK_TYPE_END:
      if (segment->duration != -1) {
        /* add start to total length */
        start = segment->duration + start;
      } else {
        /* no update if duration unknown */
        start = segment->start;
        update_start = FALSE;
      }
      break;
  }
  /* bring in sane range */
  if (segment->duration != -1)
    start = MIN (start, segment->duration);
  else
    start = MAX (start, 0);

  /* stop can be -1 if we have not configured a stop. */
  switch (stop_type) {
    case GST_SEEK_TYPE_NONE:
      stop = segment->stop;
      update_stop = FALSE;
      break;
    case GST_SEEK_TYPE_SET:
      /* stop holds required value */
      break;
    case GST_SEEK_TYPE_END:
      if (segment->duration != -1) {
        stop = segment->duration + stop;
      } else {
        stop = segment->stop;
        update_stop = FALSE;
      }
      break;
  }

  /* if we have a valid stop time, make sure it is clipped */
  if (stop != -1) {
    if (segment->duration != -1)
      stop = CLAMP (stop, 0, segment->duration);
    else
      stop = MAX (stop, 0);
  }

  /* we can't have stop before start */
  if (stop != -1) {
    if (start > stop) {
      g_return_val_if_fail (start <= stop, FALSE);
      GST_WARNING ("segment update failed: start(%" G_GUINT64_FORMAT
          ") > stop(%" G_GUINT64_FORMAT ")", start, stop);
      return FALSE;
    }
  }

  if (flags & GST_SEEK_FLAG_FLUSH) {
    /* flush resets the running_time */
    base = 0;
  } else {
    /* remember the elapsed time */
    base = gst_segment_to_running_time (segment, format, position);
    GST_DEBUG ("updated segment.base: %" G_GUINT64_FORMAT, base);
  }

  if (update_start && rate > 0.0) {
    position = start;
  }
  if (update_stop && rate < 0.0) {
    if (stop != -1)
      position = stop;
    else {
      if (segment->duration != -1)
        position = segment->duration;
      else
        position = 0;
    }
  }

  /* set update arg to reflect update of position */
  if (update)
    *update = position != segment->position;

  /* update new values */
  /* be explicit about our GstSeekFlag -> GstSegmentFlag conversion */
  segment->flags = GST_SEGMENT_FLAG_NONE;
  if ((flags & GST_SEEK_FLAG_FLUSH) != 0)
    segment->flags |= GST_SEGMENT_FLAG_RESET;
  if ((flags & GST_SEEK_FLAG_SKIP) != 0)
    segment->flags |= GST_SEGMENT_FLAG_SKIP;
  if ((flags & GST_SEEK_FLAG_SEGMENT) != 0)
    segment->flags |= GST_SEGMENT_FLAG_SEGMENT;

  segment->rate = rate;
  segment->applied_rate = 1.0;

  segment->base = base;
  if (rate > 0.0)
    segment->offset = position - start;
  else {
    if (stop != -1)
      segment->offset = stop - position;
    else if (segment->duration != -1)
      segment->offset = segment->duration - position;
    else
      segment->offset = 0;
  }

  segment->start = start;
  segment->stop = stop;
  segment->time = start;
  segment->position = position;

  GST_INFO ("segment updated: %" GST_SEGMENT_FORMAT, segment);

  return TRUE;
}