static GstFlowReturn
gst_ogg_avi_parse_push_packet (GstOggAviParse * ogg, ogg_packet * packet)
{
  GstBuffer *buffer;
  GstFlowReturn result;

  /* allocate space for header and body */
  buffer = gst_buffer_new_and_alloc (packet->bytes);
  memcpy (GST_BUFFER_DATA (buffer), packet->packet, packet->bytes);

  GST_LOG_OBJECT (ogg, "created buffer %p from page", buffer);

  GST_BUFFER_OFFSET_END (buffer) = packet->granulepos;

  if (ogg->discont) {
    GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT);
    ogg->discont = FALSE;
  }

  result = gst_pad_push (ogg->srcpad, buffer);

  return result;
}
static void
gst_identity_update_last_message_for_buffer (GstIdentity * identity,
    const gchar * action, GstBuffer * buf)
{
  gchar ts_str[64], dur_str[64];

  GST_OBJECT_LOCK (identity);

  g_free (identity->last_message);
  identity->last_message = g_strdup_printf ("%s   ******* (%s:%s)i "
      "(%u bytes, timestamp: %s, duration: %s, offset: %" G_GINT64_FORMAT ", "
      "offset_end: % " G_GINT64_FORMAT ", flags: %d) %p", action,
      GST_DEBUG_PAD_NAME (GST_BASE_TRANSFORM_CAST (identity)->sinkpad),
      GST_BUFFER_SIZE (buf),
      print_pretty_time (ts_str, sizeof (ts_str), GST_BUFFER_TIMESTAMP (buf)),
      print_pretty_time (dur_str, sizeof (dur_str), GST_BUFFER_DURATION (buf)),
      GST_BUFFER_OFFSET (buf), GST_BUFFER_OFFSET_END (buf),
      GST_BUFFER_FLAGS (buf), buf);

  GST_OBJECT_UNLOCK (identity);

  gst_identity_notify_last_message (identity);
}
Beispiel #3
0
static void
gst_multi_file_sink_post_message (GstMultiFileSink * multifilesink,
                                  GstBuffer * buffer, const char *filename)
{
    if (multifilesink->post_messages) {
        GstClockTime duration, timestamp;
        GstClockTime running_time, stream_time;
        guint64 offset, offset_end;
        GstStructure *s;
        GstSegment *segment;
        GstFormat format;

        segment = &GST_BASE_SINK (multifilesink)->segment;
        format = segment->format;

        timestamp = GST_BUFFER_TIMESTAMP (buffer);
        duration = GST_BUFFER_DURATION (buffer);
        offset = GST_BUFFER_OFFSET (buffer);
        offset_end = GST_BUFFER_OFFSET_END (buffer);

        running_time = gst_segment_to_running_time (segment, format, timestamp);
        stream_time = gst_segment_to_stream_time (segment, format, timestamp);

        s = gst_structure_new ("GstMultiFileSink",
                               "filename", G_TYPE_STRING, filename,
                               "index", G_TYPE_INT, multifilesink->index,
                               "timestamp", G_TYPE_UINT64, timestamp,
                               "stream-time", G_TYPE_UINT64, stream_time,
                               "running-time", G_TYPE_UINT64, running_time,
                               "duration", G_TYPE_UINT64, duration,
                               "offset", G_TYPE_UINT64, offset,
                               "offset-end", G_TYPE_UINT64, offset_end, NULL);

        gst_element_post_message (GST_ELEMENT_CAST (multifilesink),
                                  gst_message_new_element (GST_OBJECT_CAST (multifilesink), s));
    }
}
static GstFlowReturn
gst_kate_parse_push_buffer (GstKateParse * parse, GstBuffer * buf,
                            gint64 granulepos)
{
    GST_LOG_OBJECT (parse, "granulepos %16" G_GINT64_MODIFIER "x", granulepos);
    if (granulepos < 0) {
        /* packets coming not from Ogg won't have a granpos in the offset end,
           so we have to synthesize one here - only problem is we don't know
           the backlink - pretend there's none for now */
        GST_INFO_OBJECT (parse, "No granulepos on buffer, synthesizing one");
        granulepos =
            kate_duration_granule (&parse->ki,
                                   GST_BUFFER_TIMESTAMP (buf) /
                                   (double) GST_SECOND) << kate_granule_shift (&parse->ki);
    }
    GST_BUFFER_OFFSET (buf) =
        kate_granule_time (&parse->ki, granulepos) * GST_SECOND;
    GST_BUFFER_OFFSET_END (buf) = granulepos;
    GST_BUFFER_TIMESTAMP (buf) = GST_BUFFER_OFFSET (buf);

    gst_buffer_set_caps (buf, GST_PAD_CAPS (parse->srcpad));

    return gst_pad_push (parse->srcpad, buf);
}
Beispiel #5
0
static GstFlowReturn
send_buffers_before(AudioTrim *filter, gint64 before)
{
  GstFlowReturn ret = GST_FLOW_OK;
  GList *b = filter->buffers;
  while(b) {
    GstBuffer *buf = b->data;
    b->data = NULL;
    if ((gint64)GST_BUFFER_OFFSET(buf) >= before) {
      gst_buffer_unref(buf);
    } else {
      if ((gint64)GST_BUFFER_OFFSET_END(buf) > before) {
	GstBuffer *head = buffer_head(filter, buf, before);
	gst_buffer_unref(buf);
	buf = head;
      }
      ret = gst_pad_push(filter->srcpad, buf);
      if (ret != GST_FLOW_OK) break;
    }
    b = g_list_next(b);
  }
  release_buffers(filter);
  return ret;
}
Beispiel #6
0
static GstFlowReturn
send_buffers_after(AudioTrim *filter, gint64 after)
{
  GstFlowReturn ret = GST_FLOW_OK;
  GList *b = filter->buffers;
  while(b) {
    GstBuffer *buf = b->data;
    b->data = NULL;
    if ((gint64)GST_BUFFER_OFFSET_END(buf) <= after) {
      gst_buffer_unref(buf);
    } else {
      if ((gint64)GST_BUFFER_OFFSET(buf) < after) {
	GstBuffer *tail = buffer_tail(filter, buf, after);
	gst_buffer_unref(buf);
	buf = tail;
      }
      ret = gst_pad_push(filter->srcpad, buf);
      if (ret != GST_FLOW_OK) break;
    }
    b = g_list_next(b);
  }
  release_buffers(filter);
  return ret;
}
Beispiel #7
0
void ofxGstRTPServer::newOscMsg(ofxOscMessage & msg, GstClockTime timestamp){
	if(!appSrcOsc) return;

	GstClockTime now = timestamp;
	if(!oscAutoTimestamp){
		if(now==GST_CLOCK_TIME_NONE){
			now = getTimeStamp();
		}

		if(firstOscFrame){
			prevTimestampOsc = now;
			firstOscFrame = false;
			return;
		}
	}

	PooledOscPacket * pooledOscPkg = oscPacketPool.newBuffer();
	appendMessage(msg,pooledOscPkg->packet);

	GstBuffer * buffer = gst_buffer_new_wrapped_full(GST_MEMORY_FLAG_READONLY,(void*)pooledOscPkg->compressedData(),pooledOscPkg->compressedSize(),0,pooledOscPkg->compressedSize(),pooledOscPkg,(GDestroyNotify)&ofxOscPacketPool::relaseBuffer);


	if(!oscAutoTimestamp){
		GST_BUFFER_OFFSET(buffer) = numFrameOsc++;
		GST_BUFFER_OFFSET_END(buffer) = numFrameOsc;
		GST_BUFFER_DTS (buffer) = now;
		GST_BUFFER_PTS (buffer) = now;
		GST_BUFFER_DURATION(buffer) = now-prevTimestampOsc;
		prevTimestampOsc = now;
	}

	GstFlowReturn flow_return = gst_app_src_push_buffer((GstAppSrc*)appSrcOsc, buffer);
	if (flow_return != GST_FLOW_OK) {
		ofLogError() << "error pushing osc buffer: flow_return was " << flow_return;
	}
}
void
gst_audio_fx_base_fir_filter_push_residue (GstAudioFXBaseFIRFilter * self)
{
  GstBuffer *outbuf;
  GstFlowReturn res;
  gint rate = GST_AUDIO_FILTER_RATE (self);
  gint channels = GST_AUDIO_FILTER_CHANNELS (self);
  gint bps = GST_AUDIO_FILTER_BPS (self);
  gint outsize, outsamples;
  GstMapInfo map;
  guint8 *in, *out;

  if (channels == 0 || rate == 0 || self->nsamples_in == 0) {
    self->buffer_fill = 0;
    g_free (self->buffer);
    self->buffer = NULL;
    return;
  }

  /* Calculate the number of samples and their memory size that
   * should be pushed from the residue */
  outsamples = self->nsamples_in - (self->nsamples_out - self->latency);
  if (outsamples <= 0) {
    self->buffer_fill = 0;
    g_free (self->buffer);
    self->buffer = NULL;
    return;
  }
  outsize = outsamples * channels * bps;

  if (!self->fft || self->low_latency) {
    gint64 diffsize, diffsamples;

    /* Process the difference between latency and residue length samples
     * to start at the actual data instead of starting at the zeros before
     * when we only got one buffer smaller than latency */
    diffsamples =
        ((gint64) self->latency) - ((gint64) self->buffer_fill) / channels;
    if (diffsamples > 0) {
      diffsize = diffsamples * channels * bps;
      in = g_new0 (guint8, diffsize);
      out = g_new0 (guint8, diffsize);
      self->nsamples_out += self->process (self, in, out, diffsamples);
      g_free (in);
      g_free (out);
    }

    outbuf = gst_buffer_new_and_alloc (outsize);

    /* Convolve the residue with zeros to get the actual remaining data */
    in = g_new0 (guint8, outsize);
    gst_buffer_map (outbuf, &map, GST_MAP_READWRITE);
    self->nsamples_out += self->process (self, in, map.data, outsamples);
    gst_buffer_unmap (outbuf, &map);

    g_free (in);
  } else {
    guint gensamples = 0;

    outbuf = gst_buffer_new_and_alloc (outsize);
    gst_buffer_map (outbuf, &map, GST_MAP_READWRITE);

    while (gensamples < outsamples) {
      guint step_insamples = self->block_length - self->buffer_fill;
      guint8 *zeroes = g_new0 (guint8, step_insamples * channels * bps);
      guint8 *out = g_new (guint8, self->block_length * channels * bps);
      guint step_gensamples;

      step_gensamples = self->process (self, zeroes, out, step_insamples);
      g_free (zeroes);

      memcpy (map.data + gensamples * bps, out, MIN (step_gensamples,
              outsamples - gensamples) * bps);
      gensamples += MIN (step_gensamples, outsamples - gensamples);

      g_free (out);
    }
    self->nsamples_out += gensamples;

    gst_buffer_unmap (outbuf, &map);
  }

  /* Set timestamp, offset, etc from the values we
   * saved when processing the regular buffers */
  if (GST_CLOCK_TIME_IS_VALID (self->start_ts))
    GST_BUFFER_TIMESTAMP (outbuf) = self->start_ts;
  else
    GST_BUFFER_TIMESTAMP (outbuf) = 0;
  GST_BUFFER_TIMESTAMP (outbuf) +=
      gst_util_uint64_scale_int (self->nsamples_out - outsamples -
      self->latency, GST_SECOND, rate);

  GST_BUFFER_DURATION (outbuf) =
      gst_util_uint64_scale_int (outsamples, GST_SECOND, rate);

  if (self->start_off != GST_BUFFER_OFFSET_NONE) {
    GST_BUFFER_OFFSET (outbuf) =
        self->start_off + self->nsamples_out - outsamples - self->latency;
    GST_BUFFER_OFFSET_END (outbuf) = GST_BUFFER_OFFSET (outbuf) + outsamples;
  }

  GST_DEBUG_OBJECT (self,
      "Pushing residue buffer of size %" G_GSIZE_FORMAT " with timestamp: %"
      GST_TIME_FORMAT ", duration: %" GST_TIME_FORMAT ", offset: %"
      G_GUINT64_FORMAT ", offset_end: %" G_GUINT64_FORMAT ", nsamples_out: %d",
      gst_buffer_get_size (outbuf),
      GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)),
      GST_TIME_ARGS (GST_BUFFER_DURATION (outbuf)), GST_BUFFER_OFFSET (outbuf),
      GST_BUFFER_OFFSET_END (outbuf), outsamples);

  res = gst_pad_push (GST_BASE_TRANSFORM_CAST (self)->srcpad, outbuf);

  if (G_UNLIKELY (res != GST_FLOW_OK)) {
    GST_WARNING_OBJECT (self, "failed to push residue");
  }

  self->buffer_fill = 0;
}
Beispiel #9
0
static void
do_perfect_stream_test (guint rate, guint width, gdouble drop_probability,
    gdouble inject_probability)
{
  GstElement *pipe, *src, *conv, *filter, *injector, *audiorate, *sink;
  GstMessage *msg;
  GstCaps *caps;
  GstPad *srcpad;
  GList *l, *bufs = NULL;
  GstClockTime next_time = GST_CLOCK_TIME_NONE;
  guint64 next_offset = GST_BUFFER_OFFSET_NONE;

  caps = gst_caps_new_simple ("audio/x-raw-int", "rate", G_TYPE_INT,
      rate, "width", G_TYPE_INT, width, NULL);

  GST_INFO ("-------- drop=%.0f%% caps = %" GST_PTR_FORMAT " ---------- ",
      drop_probability * 100.0, caps);

  g_assert (drop_probability >= 0.0 && drop_probability <= 1.0);
  g_assert (inject_probability >= 0.0 && inject_probability <= 1.0);
  g_assert (width > 0 && (width % 8) == 0);

  pipe = gst_pipeline_new ("pipeline");
  fail_unless (pipe != NULL);

  src = gst_element_factory_make ("audiotestsrc", "audiotestsrc");
  fail_unless (src != NULL);

  g_object_set (src, "num-buffers", 100, NULL);

  

  conv = gst_element_factory_make ("audioconvert", "audioconvert");
  fail_unless (conv != NULL);

  filter = gst_element_factory_make ("capsfilter", "capsfilter");

   fail_unless (filter != NULL);
   g_object_set (filter, "caps", caps, NULL);

  injector_inject_probability = inject_probability;
  injector = GST_ELEMENT (g_object_new (test_injector_get_type (), NULL));

  srcpad = gst_element_get_pad (injector, "src");
  fail_unless (srcpad != NULL);
   gst_pad_add_buffer_probe (srcpad, G_CALLBACK (probe_cb), &drop_probability);
  gst_object_unref (srcpad);
         audiorate = gst_element_factory_make ("audiorate", "audiorate");
         fail_unless (audiorate != NULL);
   sink = gst_element_factory_make ("fakesink", "fakesink");
  fail_unless (sink != NULL);
   g_object_set (sink, "signal-handoffs", TRUE, NULL);
   g_signal_connect (sink, "handoff", G_CALLBACK (got_buf), &bufs);

  gst_bin_add_many (GST_BIN (pipe), src, conv, filter, injector, audiorate,
      sink, NULL);
  gst_element_link_many (src, conv, filter, injector, audiorate, sink, NULL);

  fail_unless_equals_int (gst_element_set_state (pipe, GST_STATE_PLAYING),
      GST_STATE_CHANGE_ASYNC);

  fail_unless_equals_int (gst_element_get_state (pipe, NULL, NULL, -1),
      GST_STATE_CHANGE_SUCCESS);

  msg = gst_bus_poll (GST_ELEMENT_BUS (pipe),
      GST_MESSAGE_EOS | GST_MESSAGE_ERROR, -1);
  fail_unless_equals_string (GST_MESSAGE_TYPE_NAME (msg), "eos");

  for (l = bufs; l != NULL; l = l->next) {
    GstBuffer *buf = GST_BUFFER (l->data);
    guint num_samples;

    fail_unless (GST_BUFFER_TIMESTAMP_IS_VALID (buf));
    fail_unless (GST_BUFFER_DURATION_IS_VALID (buf));
    fail_unless (GST_BUFFER_OFFSET_IS_VALID (buf));
    fail_unless (GST_BUFFER_OFFSET_END_IS_VALID (buf));

    GST_LOG ("buffer: ts=%" GST_TIME_FORMAT ", end_ts=%" GST_TIME_FORMAT
        " off=%" G_GINT64_FORMAT ", end_off=%" G_GINT64_FORMAT,
        GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
        GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf) + GST_BUFFER_DURATION (buf)),
        GST_BUFFER_OFFSET (buf), GST_BUFFER_OFFSET_END (buf));

    if (GST_CLOCK_TIME_IS_VALID (next_time)) {
      fail_unless_equals_uint64 (next_time, GST_BUFFER_TIMESTAMP (buf));
    }
    if (next_offset != GST_BUFFER_OFFSET_NONE) {
      fail_unless_equals_uint64 (next_offset, GST_BUFFER_OFFSET (buf));
    }

    /* check buffer size for sanity */
    fail_unless_equals_int (GST_BUFFER_SIZE (buf) % (width / 8), 0);

    /* check there is actually as much data as there should be */
    num_samples = GST_BUFFER_OFFSET_END (buf) - GST_BUFFER_OFFSET (buf);
    fail_unless_equals_int (GST_BUFFER_SIZE (buf), num_samples * (width / 8));

    next_time = GST_BUFFER_TIMESTAMP (buf) + GST_BUFFER_DURATION (buf);
    next_offset = GST_BUFFER_OFFSET_END (buf);
  }

  gst_message_unref (msg);
  gst_element_set_state (pipe, GST_STATE_NULL);
  gst_object_unref (pipe);

  g_list_foreach (bufs, (GFunc) gst_mini_object_unref, NULL);
  g_list_free (bufs);

  gst_caps_unref (caps);
}
Beispiel #10
0
static GstFlowReturn
gst_aiff_parse_stream_data (GstAiffParse * aiff)
{
  GstBuffer *buf = NULL;
  GstFlowReturn res = GST_FLOW_OK;
  guint64 desired, obtained;
  GstClockTime timestamp, next_timestamp, duration;
  guint64 pos, nextpos;

iterate_adapter:
  GST_LOG_OBJECT (aiff,
      "offset: %" G_GINT64_FORMAT " , end: %" G_GINT64_FORMAT " , dataleft: %"
      G_GINT64_FORMAT, aiff->offset, aiff->end_offset, aiff->dataleft);

  /* Get the next n bytes and output them */
  if (aiff->dataleft == 0 || aiff->dataleft < aiff->bytes_per_sample)
    goto found_eos;

  /* scale the amount of data by the segment rate so we get equal
   * amounts of data regardless of the playback rate */
  desired =
      MIN (gst_guint64_to_gdouble (aiff->dataleft),
      MAX_BUFFER_SIZE * aiff->segment.abs_rate);

  if (desired >= aiff->bytes_per_sample && aiff->bytes_per_sample > 0)
    desired -= (desired % aiff->bytes_per_sample);

  GST_LOG_OBJECT (aiff, "Fetching %" G_GINT64_FORMAT " bytes of data "
      "from the sinkpad", desired);

  if (aiff->streaming) {
    guint avail = gst_adapter_available (aiff->adapter);

    if (avail < desired) {
      GST_LOG_OBJECT (aiff, "Got only %d bytes of data from the sinkpad",
          avail);
      return GST_FLOW_OK;
    }

    buf = gst_adapter_take_buffer (aiff->adapter, desired);
  } else {
    if ((res = gst_pad_pull_range (aiff->sinkpad, aiff->offset,
                desired, &buf)) != GST_FLOW_OK)
      goto pull_error;
  }

  /* If we have a pending close/start segment, send it now. */
  if (G_UNLIKELY (aiff->close_segment != NULL)) {
    gst_pad_push_event (aiff->srcpad, aiff->close_segment);
    aiff->close_segment = NULL;
  }
  if (G_UNLIKELY (aiff->start_segment != NULL)) {
    gst_pad_push_event (aiff->srcpad, aiff->start_segment);
    aiff->start_segment = NULL;
  }

  obtained = GST_BUFFER_SIZE (buf);

  /* our positions in bytes */
  pos = aiff->offset - aiff->datastart;
  nextpos = pos + obtained;

  /* update offsets, does not overflow. */
  GST_BUFFER_OFFSET (buf) = pos / aiff->bytes_per_sample;
  GST_BUFFER_OFFSET_END (buf) = nextpos / aiff->bytes_per_sample;

  if (aiff->bps > 0) {
    /* and timestamps if we have a bitrate, be careful for overflows */
    timestamp = uint64_ceiling_scale (pos, GST_SECOND, (guint64) aiff->bps);
    next_timestamp =
        uint64_ceiling_scale (nextpos, GST_SECOND, (guint64) aiff->bps);
    duration = next_timestamp - timestamp;

    /* update current running segment position */
    gst_segment_set_last_stop (&aiff->segment, GST_FORMAT_TIME, next_timestamp);
  } else {
    /* no bitrate, all we know is that the first sample has timestamp 0, all
     * other positions and durations have unknown timestamp. */
    if (pos == 0)
      timestamp = 0;
    else
      timestamp = GST_CLOCK_TIME_NONE;
    duration = GST_CLOCK_TIME_NONE;
    /* update current running segment position with byte offset */
    gst_segment_set_last_stop (&aiff->segment, GST_FORMAT_BYTES, nextpos);
  }
  if (aiff->discont) {
    GST_DEBUG_OBJECT (aiff, "marking DISCONT");
    GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DISCONT);
    aiff->discont = FALSE;
  }

  GST_BUFFER_TIMESTAMP (buf) = timestamp;
  GST_BUFFER_DURATION (buf) = duration;
  gst_buffer_set_caps (buf, aiff->caps);

  GST_LOG_OBJECT (aiff,
      "Got buffer. timestamp:%" GST_TIME_FORMAT " , duration:%" GST_TIME_FORMAT
      ", size:%u", GST_TIME_ARGS (timestamp), GST_TIME_ARGS (duration),
      GST_BUFFER_SIZE (buf));

  if ((res = gst_pad_push (aiff->srcpad, buf)) != GST_FLOW_OK)
    goto push_error;

  if (obtained < aiff->dataleft) {
    aiff->offset += obtained;
    aiff->dataleft -= obtained;
  } else {
    aiff->offset += aiff->dataleft;
    aiff->dataleft = 0;
  }

  /* Iterate until need more data, so adapter size won't grow */
  if (aiff->streaming) {
    GST_LOG_OBJECT (aiff,
        "offset: %" G_GINT64_FORMAT " , end: %" G_GINT64_FORMAT, aiff->offset,
        aiff->end_offset);
    goto iterate_adapter;
  }
  return res;

  /* ERROR */
found_eos:
  {
    GST_DEBUG_OBJECT (aiff, "found EOS");
    return GST_FLOW_UNEXPECTED;
  }
pull_error:
  {
    /* check if we got EOS */
    if (res == GST_FLOW_UNEXPECTED)
      goto found_eos;

    GST_WARNING_OBJECT (aiff,
        "Error getting %" G_GINT64_FORMAT " bytes from the "
        "sinkpad (dataleft = %" G_GINT64_FORMAT ")", desired, aiff->dataleft);
    return res;
  }
push_error:
  {
    GST_INFO_OBJECT (aiff,
        "Error pushing on srcpad %s:%s, reason %s, is linked? = %d",
        GST_DEBUG_PAD_NAME (aiff->srcpad), gst_flow_get_name (res),
        gst_pad_is_linked (aiff->srcpad));
    return res;
  }
}
static GstFlowReturn
gst_split_file_src_create (GstBaseSrc * basesrc, guint64 offset, guint size,
    GstBuffer ** buffer)
{
  GstSplitFileSrc *src = GST_SPLIT_FILE_SRC (basesrc);
  GstFilePart cur_part;
  GInputStream *stream;
  GCancellable *cancel;
  GSeekable *seekable;
  GstBuffer *buf;
  GError *err = NULL;
  guint64 read_offset;
  guint8 *data;
  guint to_read;

  cur_part = src->parts[src->cur_part];
  if (offset < cur_part.start || offset > cur_part.stop) {
    if (!gst_split_file_src_find_part_for_offset (src, offset, &src->cur_part))
      return GST_FLOW_UNEXPECTED;
    cur_part = src->parts[src->cur_part];
  }

  GST_LOG_OBJECT (src, "current part: %u (%" G_GUINT64_FORMAT " - "
      "%" G_GUINT64_FORMAT ", %s)", src->cur_part, cur_part.start,
      cur_part.stop, cur_part.path);

  buf = gst_buffer_new_and_alloc (size);

  GST_BUFFER_OFFSET (buf) = offset;

  data = GST_BUFFER_DATA (buf);

  cancel = src->cancellable;

  while (size > 0) {
    guint64 bytes_to_end_of_part;
    gsize read = 0;

    /* we want the offset into the file part */
    read_offset = offset - cur_part.start;

    GST_LOG ("Reading part %03u from offset %" G_GUINT64_FORMAT " (%s)",
        src->cur_part, read_offset, cur_part.path);

    /* FIXME: only seek when needed (hopefully gio is smart) */
    seekable = G_SEEKABLE (cur_part.stream);
    if (!g_seekable_seek (seekable, read_offset, G_SEEK_SET, cancel, &err))
      goto seek_failed;

    GST_LOG_OBJECT (src, "now: %" G_GUINT64_FORMAT, g_seekable_tell (seekable));

    bytes_to_end_of_part = (cur_part.stop - cur_part.start) + 1 - read_offset;
    to_read = MIN (size, bytes_to_end_of_part);

    GST_LOG_OBJECT (src, "reading %u bytes from part %u (bytes to end of "
        "part: %u)", to_read, src->cur_part, (guint) bytes_to_end_of_part);

    stream = G_INPUT_STREAM (cur_part.stream);

    /* NB: we won't try to read beyond EOF */
    if (!g_input_stream_read_all (stream, data, to_read, &read, cancel, &err))
      goto read_failed;

    GST_LOG_OBJECT (src, "read %u bytes", (guint) read);

    data += read;
    size -= read;
    offset += read;

    /* are we done? */
    if (size == 0)
      break;

    GST_LOG_OBJECT (src, "%u bytes left to read for this chunk", size);

    /* corner case, this should never really happen (assuming basesrc clips
     * requests beyond the file size) */
    if (read < to_read) {
      if (src->cur_part == src->num_parts - 1) {
        /* last file part, stop reading and truncate buffer */
        GST_BUFFER_SIZE (buf) = offset - GST_BUFFER_OFFSET (buf);
        break;
      } else {
        goto file_part_changed;
      }
    }

    ++src->cur_part;
    cur_part = src->parts[src->cur_part];
  }

  GST_BUFFER_OFFSET_END (buf) = offset;

  *buffer = buf;
  GST_LOG_OBJECT (src, "read %u bytes into buf %p", GST_BUFFER_SIZE (buf), buf);
  return GST_FLOW_OK;

/* ERRORS */
seek_failed:
  {
    if (err->code == G_IO_ERROR_CANCELLED)
      goto cancelled;

    GST_ELEMENT_ERROR (src, RESOURCE, SEEK, (NULL),
        ("Seek to %" G_GUINT64_FORMAT " in %s failed", read_offset,
            cur_part.path));
    g_error_free (err);
    gst_buffer_unref (buf);
    return GST_FLOW_ERROR;
  }
read_failed:
  {
    if (err->code == G_IO_ERROR_CANCELLED)
      goto cancelled;

    GST_ELEMENT_ERROR (src, RESOURCE, READ, ("%s", err->message),
        ("Read from %" G_GUINT64_FORMAT " in %s failed", read_offset,
            cur_part.path));
    g_error_free (err);
    gst_buffer_unref (buf);
    return GST_FLOW_ERROR;
  }
file_part_changed:
  {
    GST_ELEMENT_ERROR (src, RESOURCE, READ,
        ("Read error while reading file part %s", cur_part.path),
        ("Short read in file part, file may have been modified since start"));
    gst_buffer_unref (buf);
    return GST_FLOW_ERROR;
  }
cancelled:
  {
    GST_DEBUG_OBJECT (src, "I/O operation cancelled from another thread");
    g_error_free (err);
    gst_buffer_unref (buf);
    return GST_FLOW_WRONG_STATE;
  }
}
Beispiel #12
0
static GstFlowReturn
gst_identity_transform_ip (GstBaseTransform * trans, GstBuffer * buf)
{
  GstFlowReturn ret = GST_FLOW_OK;
  GstIdentity *identity = GST_IDENTITY (trans);
  GstClockTime runtimestamp = G_GINT64_CONSTANT (0);
  gsize size;

  size = gst_buffer_get_size (buf);

  if (identity->check_imperfect_timestamp)
    gst_identity_check_imperfect_timestamp (identity, buf);
  if (identity->check_imperfect_offset)
    gst_identity_check_imperfect_offset (identity, buf);

  /* update prev values */
  identity->prev_timestamp = GST_BUFFER_TIMESTAMP (buf);
  identity->prev_duration = GST_BUFFER_DURATION (buf);
  identity->prev_offset_end = GST_BUFFER_OFFSET_END (buf);
  identity->prev_offset = GST_BUFFER_OFFSET (buf);

  if (identity->error_after >= 0) {
    identity->error_after--;
    if (identity->error_after == 0)
      goto error_after;
  }

  if (identity->drop_probability > 0.0) {
    if ((gfloat) (1.0 * rand () / (RAND_MAX)) < identity->drop_probability)
      goto dropped;
  }

  if (identity->dump) {
    GstMapInfo info;

    gst_buffer_map (buf, &info, GST_MAP_READ);
    gst_util_dump_mem (info.data, info.size);
    gst_buffer_unmap (buf, &info);
  }

  if (!identity->silent) {
    gst_identity_update_last_message_for_buffer (identity, "chain", buf, size);
  }

  if (identity->datarate > 0) {
    GstClockTime time = gst_util_uint64_scale_int (identity->offset,
        GST_SECOND, identity->datarate);

    GST_BUFFER_TIMESTAMP (buf) = time;
    GST_BUFFER_DURATION (buf) = size * GST_SECOND / identity->datarate;
  }

  if (identity->signal_handoffs)
    g_signal_emit (identity, gst_identity_signals[SIGNAL_HANDOFF], 0, buf);

  if (trans->segment.format == GST_FORMAT_TIME)
    runtimestamp = gst_segment_to_running_time (&trans->segment,
        GST_FORMAT_TIME, GST_BUFFER_TIMESTAMP (buf));

  if ((identity->sync) && (trans->segment.format == GST_FORMAT_TIME)) {
    GstClock *clock;

    GST_OBJECT_LOCK (identity);
    if ((clock = GST_ELEMENT (identity)->clock)) {
      GstClockReturn cret;
      GstClockTime timestamp;

      timestamp = runtimestamp + GST_ELEMENT (identity)->base_time;

      /* save id if we need to unlock */
      identity->clock_id = gst_clock_new_single_shot_id (clock, timestamp);
      GST_OBJECT_UNLOCK (identity);

      cret = gst_clock_id_wait (identity->clock_id, NULL);

      GST_OBJECT_LOCK (identity);
      if (identity->clock_id) {
        gst_clock_id_unref (identity->clock_id);
        identity->clock_id = NULL;
      }
      if (cret == GST_CLOCK_UNSCHEDULED)
        ret = GST_FLOW_EOS;
    }
    GST_OBJECT_UNLOCK (identity);
  }

  identity->offset += size;

  if (identity->sleep_time && ret == GST_FLOW_OK)
    g_usleep (identity->sleep_time);

  if (identity->single_segment && (trans->segment.format == GST_FORMAT_TIME)
      && (ret == GST_FLOW_OK)) {
    GST_BUFFER_TIMESTAMP (buf) = runtimestamp;
    GST_BUFFER_OFFSET (buf) = GST_CLOCK_TIME_NONE;
    GST_BUFFER_OFFSET_END (buf) = GST_CLOCK_TIME_NONE;
  }

  return ret;

  /* ERRORS */
error_after:
  {
    GST_ELEMENT_ERROR (identity, CORE, FAILED,
        (_("Failed after iterations as requested.")), (NULL));
    return GST_FLOW_ERROR;
  }
dropped:
  {
    if (!identity->silent) {
      gst_identity_update_last_message_for_buffer (identity, "dropping", buf,
          size);
    }
    /* return DROPPED to basetransform. */
    return GST_BASE_TRANSFORM_FLOW_DROPPED;
  }
}
Beispiel #13
0
static gboolean
gst_ks_video_src_timestamp_buffer (GstKsVideoSrc * self, GstBuffer * buf,
    GstClockTime presentation_time)
{
  GstKsVideoSrcPrivate *priv = GST_KS_VIDEO_SRC_GET_PRIVATE (self);
  GstClockTime duration;
  GstClock *clock;
  GstClockTime timestamp;

  duration = gst_ks_video_device_get_duration (priv->device);

  GST_OBJECT_LOCK (self);
  clock = GST_ELEMENT_CLOCK (self);
  if (clock != NULL) {
    gst_object_ref (clock);
    timestamp = GST_ELEMENT (self)->base_time;

    if (GST_CLOCK_TIME_IS_VALID (presentation_time)) {
      if (presentation_time > GST_ELEMENT (self)->base_time)
        presentation_time -= GST_ELEMENT (self)->base_time;
      else
        presentation_time = 0;
    }
  } else {
    timestamp = GST_CLOCK_TIME_NONE;
  }
  GST_OBJECT_UNLOCK (self);

  if (clock != NULL) {

    /* The time according to the current clock */
    timestamp = gst_clock_get_time (clock) - timestamp;
    if (timestamp > duration)
      timestamp -= duration;
    else
      timestamp = 0;

    if (GST_CLOCK_TIME_IS_VALID (presentation_time)) {
      /*
       * We don't use this for anything yet, need to ponder how to deal
       * with pins that use an internal clock and timestamp from 0.
       */
      GstClockTimeDiff diff = GST_CLOCK_DIFF (presentation_time, timestamp);
      GST_DEBUG_OBJECT (self, "diff between gst and driver timestamp: %"
          G_GINT64_FORMAT, diff);
    }

    gst_object_unref (clock);
    clock = NULL;

    /* Unless it's the first frame, align the current timestamp on a multiple
     * of duration since the previous */
    if (GST_CLOCK_TIME_IS_VALID (priv->prev_ts)) {
      GstClockTime delta;
      guint delta_remainder, delta_offset;

      /* REVISIT: I've seen this happen with the GstSystemClock on Windows,
       *          scary... */
      if (timestamp < priv->prev_ts) {
        GST_INFO_OBJECT (self, "clock is ticking backwards");
        return FALSE;
      }

      /* Round to a duration boundary */
      delta = timestamp - priv->prev_ts;
      delta_remainder = delta % duration;

      if (delta_remainder < duration / 3)
        timestamp -= delta_remainder;
      else
        timestamp += duration - delta_remainder;

      /* How many frames are we off then? */
      delta = timestamp - priv->prev_ts;
      delta_offset = delta / duration;

      if (delta_offset == 1)    /* perfect */
        GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DISCONT);
      else if (delta_offset > 1) {
        guint lost = delta_offset - 1;
        GST_INFO_OBJECT (self, "lost %d frame%s, setting discont flag",
            lost, (lost > 1) ? "s" : "");
        GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DISCONT);
      } else if (delta_offset == 0) {   /* overproduction, skip this frame */
        GST_INFO_OBJECT (self, "skipping frame");
        return FALSE;
      }

      priv->offset += delta_offset;
    }

    priv->prev_ts = timestamp;
  }

  GST_BUFFER_OFFSET (buf) = priv->offset;
  GST_BUFFER_OFFSET_END (buf) = GST_BUFFER_OFFSET (buf) + 1;
  GST_BUFFER_TIMESTAMP (buf) = timestamp;
  GST_BUFFER_DURATION (buf) = duration;

  return TRUE;
}
GstFlowReturn
gst_base_video_decoder_finish_frame (GstBaseVideoDecoder * base_video_decoder,
    GstVideoFrame * frame)
{
  GstBaseVideoDecoderClass *base_video_decoder_class;
  GstBuffer *src_buffer;

  GST_DEBUG ("finish frame");

  base_video_decoder_class =
      GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder);

  GST_DEBUG ("finish frame sync=%d pts=%" GST_TIME_FORMAT,
      GST_VIDEO_FRAME_FLAG_IS_SET (frame, GST_VIDEO_FRAME_FLAG_SYNC_POINT),
      GST_TIME_ARGS (frame->presentation_timestamp));

  if (GST_CLOCK_TIME_IS_VALID (frame->presentation_timestamp)) {
    if (frame->presentation_timestamp != base_video_decoder->timestamp_offset) {
      GST_DEBUG ("sync timestamp %" GST_TIME_FORMAT " diff %" GST_TIME_FORMAT,
          GST_TIME_ARGS (frame->presentation_timestamp),
          GST_TIME_ARGS (frame->presentation_timestamp -
              base_video_decoder->segment.start));
      base_video_decoder->timestamp_offset = frame->presentation_timestamp;
      base_video_decoder->field_index = 0;
    } else {
      /* This case is for one initial timestamp and no others, e.g.,
       * filesrc ! decoder ! xvimagesink */
      GST_WARNING ("sync timestamp didn't change, ignoring");
      frame->presentation_timestamp = GST_CLOCK_TIME_NONE;
    }
  } else {
    if (GST_VIDEO_FRAME_FLAG_IS_SET (frame, GST_VIDEO_FRAME_FLAG_SYNC_POINT)) {
      GST_WARNING ("sync point doesn't have timestamp");
      if (!GST_CLOCK_TIME_IS_VALID (base_video_decoder->timestamp_offset)) {
        GST_WARNING
            ("No base timestamp.  Assuming frames start at segment start");
        base_video_decoder->timestamp_offset =
            base_video_decoder->segment.start;
        base_video_decoder->field_index = 0;
      }
    }
  }
  frame->field_index = base_video_decoder->field_index;
  base_video_decoder->field_index += frame->n_fields;

  if (frame->presentation_timestamp == GST_CLOCK_TIME_NONE) {
    frame->presentation_timestamp =
        gst_base_video_decoder_get_field_timestamp (base_video_decoder,
        frame->field_index);
    frame->presentation_duration = GST_CLOCK_TIME_NONE;
    frame->decode_timestamp =
        gst_base_video_decoder_get_timestamp (base_video_decoder,
        frame->decode_frame_number);
  }
  if (frame->presentation_duration == GST_CLOCK_TIME_NONE) {
    frame->presentation_duration =
        gst_base_video_decoder_get_field_duration (base_video_decoder,
        frame->n_fields);
  }

  if (GST_CLOCK_TIME_IS_VALID (base_video_decoder->last_timestamp)) {
    if (frame->presentation_timestamp < base_video_decoder->last_timestamp) {
      GST_WARNING ("decreasing timestamp (%" GST_TIME_FORMAT " < %"
          GST_TIME_FORMAT ")", GST_TIME_ARGS (frame->presentation_timestamp),
          GST_TIME_ARGS (base_video_decoder->last_timestamp));
    }
  }
  base_video_decoder->last_timestamp = frame->presentation_timestamp;

  src_buffer = frame->src_buffer;

  GST_BUFFER_FLAG_UNSET (src_buffer, GST_BUFFER_FLAG_DELTA_UNIT);
  if (base_video_decoder->state.interlaced) {
#ifndef GST_VIDEO_BUFFER_TFF
#define GST_VIDEO_BUFFER_TFF (GST_MINI_OBJECT_FLAG_LAST << 5)
#endif
#ifndef GST_VIDEO_BUFFER_RFF
#define GST_VIDEO_BUFFER_RFF (GST_MINI_OBJECT_FLAG_LAST << 6)
#endif
#ifndef GST_VIDEO_BUFFER_ONEFIELD
#define GST_VIDEO_BUFFER_ONEFIELD (GST_MINI_OBJECT_FLAG_LAST << 7)
#endif

    if (GST_VIDEO_FRAME_FLAG_IS_SET (frame, GST_VIDEO_FRAME_FLAG_TFF)) {
      GST_BUFFER_FLAG_SET (src_buffer, GST_VIDEO_BUFFER_TFF);
    } else {
      GST_BUFFER_FLAG_UNSET (src_buffer, GST_VIDEO_BUFFER_TFF);
    }
    GST_BUFFER_FLAG_UNSET (src_buffer, GST_VIDEO_BUFFER_RFF);
    GST_BUFFER_FLAG_UNSET (src_buffer, GST_VIDEO_BUFFER_ONEFIELD);
    if (frame->n_fields == 3) {
      GST_BUFFER_FLAG_SET (src_buffer, GST_VIDEO_BUFFER_RFF);
    } else if (frame->n_fields == 1) {
      GST_BUFFER_FLAG_SET (src_buffer, GST_VIDEO_BUFFER_ONEFIELD);
    }
  }
  if (base_video_decoder->discont) {
    GST_BUFFER_FLAG_UNSET (src_buffer, GST_BUFFER_FLAG_DISCONT);
    base_video_decoder->discont = FALSE;
  }

  GST_BUFFER_TIMESTAMP (src_buffer) = frame->presentation_timestamp;
  GST_BUFFER_DURATION (src_buffer) = frame->presentation_duration;
  GST_BUFFER_OFFSET (src_buffer) = GST_BUFFER_OFFSET_NONE;
  GST_BUFFER_OFFSET_END (src_buffer) = GST_BUFFER_OFFSET_NONE;

  GST_DEBUG ("pushing frame %" GST_TIME_FORMAT,
      GST_TIME_ARGS (frame->presentation_timestamp));

  gst_base_video_decoder_set_src_caps (base_video_decoder);

  if (base_video_decoder->sink_clipping) {
    gint64 start = GST_BUFFER_TIMESTAMP (src_buffer);
    gint64 stop = GST_BUFFER_TIMESTAMP (src_buffer) +
        GST_BUFFER_DURATION (src_buffer);

    if (gst_segment_clip (&base_video_decoder->segment, GST_FORMAT_TIME,
            start, stop, &start, &stop)) {
      GST_BUFFER_TIMESTAMP (src_buffer) = start;
      GST_BUFFER_DURATION (src_buffer) = stop - start;
      GST_DEBUG ("accepting buffer inside segment: %" GST_TIME_FORMAT
          " %" GST_TIME_FORMAT
          " seg %" GST_TIME_FORMAT " to %" GST_TIME_FORMAT
          " time %" GST_TIME_FORMAT,
          GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (src_buffer)),
          GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (src_buffer) +
              GST_BUFFER_DURATION (src_buffer)),
          GST_TIME_ARGS (base_video_decoder->segment.start),
          GST_TIME_ARGS (base_video_decoder->segment.stop),
          GST_TIME_ARGS (base_video_decoder->segment.time));
    } else {
      GST_DEBUG ("dropping buffer outside segment: %" GST_TIME_FORMAT
          " %" GST_TIME_FORMAT
          " seg %" GST_TIME_FORMAT " to %" GST_TIME_FORMAT
          " time %" GST_TIME_FORMAT,
          GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (src_buffer)),
          GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (src_buffer) +
              GST_BUFFER_DURATION (src_buffer)),
          GST_TIME_ARGS (base_video_decoder->segment.start),
          GST_TIME_ARGS (base_video_decoder->segment.stop),
          GST_TIME_ARGS (base_video_decoder->segment.time));
      gst_video_frame_unref (frame);
      return GST_FLOW_OK;
    }
  }

  gst_buffer_ref (src_buffer);
  gst_video_frame_unref (frame);

  if (base_video_decoder_class->shape_output)
    return base_video_decoder_class->shape_output (base_video_decoder,
        src_buffer);

  return gst_pad_push (GST_BASE_VIDEO_DECODER_SRC_PAD (base_video_decoder),
      src_buffer);
}
Beispiel #15
0
EXPORT_C
#endif

GstBuffer *
gst_audio_buffer_clip (GstBuffer * buffer, GstSegment * segment, gint rate,
    gint frame_size)
{
  GstBuffer *ret;
  GstClockTime timestamp = GST_CLOCK_TIME_NONE, duration = GST_CLOCK_TIME_NONE;
  guint64 offset = GST_BUFFER_OFFSET_NONE, offset_end = GST_BUFFER_OFFSET_NONE;
  guint8 *data;
  guint size;

  gboolean change_duration = TRUE, change_offset = TRUE, change_offset_end =
      TRUE;

  g_return_val_if_fail (segment->format == GST_FORMAT_TIME ||
      segment->format == GST_FORMAT_DEFAULT, buffer);
  g_return_val_if_fail (GST_IS_BUFFER (buffer), NULL);

  if (!GST_BUFFER_TIMESTAMP_IS_VALID (buffer))
    /* No timestamp - assume the buffer is completely in the segment */
    return buffer;

  /* Get copies of the buffer metadata to change later. 
   * Calculate the missing values for the calculations,
   * they won't be changed later though. */

  data = GST_BUFFER_DATA (buffer);
  size = GST_BUFFER_SIZE (buffer);

  timestamp = GST_BUFFER_TIMESTAMP (buffer);
  if (GST_BUFFER_DURATION_IS_VALID (buffer)) {
    duration = GST_BUFFER_DURATION (buffer);
  } else {
    change_duration = FALSE;
    duration = gst_util_uint64_scale (size / frame_size, GST_SECOND, rate);
  }

  if (GST_BUFFER_OFFSET_IS_VALID (buffer)) {
    offset = GST_BUFFER_OFFSET (buffer);
  } else {
    change_offset = FALSE;
    offset = 0;
  }

  if (GST_BUFFER_OFFSET_END_IS_VALID (buffer)) {
    offset_end = GST_BUFFER_OFFSET_END (buffer);
  } else {
    change_offset_end = FALSE;
    offset_end = offset + size / frame_size;
  }

  if (segment->format == GST_FORMAT_TIME) {
    /* Handle clipping for GST_FORMAT_TIME */

    gint64 start, stop, cstart, cstop, diff;

    start = timestamp;
    stop = timestamp + duration;

    if (gst_segment_clip (segment, GST_FORMAT_TIME,
            start, stop, &cstart, &cstop)) {

      diff = cstart - start;
      if (diff > 0) {
        timestamp = cstart;

        if (change_duration)
          duration -= diff;

        diff = gst_util_uint64_scale (diff, rate, GST_SECOND);
        if (change_offset)
          offset += diff;
        data += diff * frame_size;
        size -= diff * frame_size;
      }

      diff = stop - cstop;
      if (diff > 0) {
        /* duration is always valid if stop is valid */
        duration -= diff;

        diff = gst_util_uint64_scale (diff, rate, GST_SECOND);
        if (change_offset_end)
          offset_end -= diff;
        size -= diff * frame_size;
      }
    } else {
      gst_buffer_unref (buffer);
      return NULL;
    }
  } else {
    /* Handle clipping for GST_FORMAT_DEFAULT */
    gint64 start, stop, cstart, cstop, diff;

    g_return_val_if_fail (GST_BUFFER_OFFSET_IS_VALID (buffer), buffer);

    start = offset;
    stop = offset_end;

    if (gst_segment_clip (segment, GST_FORMAT_DEFAULT,
            start, stop, &cstart, &cstop)) {

      diff = cstart - start;
      if (diff > 0) {
        offset = cstart;

        timestamp = gst_util_uint64_scale (cstart, GST_SECOND, rate);

        if (change_duration)
          duration -= gst_util_uint64_scale (diff, GST_SECOND, rate);

        data += diff * frame_size;
        size -= diff * frame_size;
      }

      diff = stop - cstop;
      if (diff > 0) {
        offset_end = cstop;

        if (change_duration)
          duration -= gst_util_uint64_scale (diff, GST_SECOND, rate);

        size -= diff * frame_size;
      }
    } else {
      gst_buffer_unref (buffer);
      return NULL;
    }
  }

  /* Get a metadata writable buffer and apply all changes */
  ret = gst_buffer_make_metadata_writable (buffer);

  GST_BUFFER_TIMESTAMP (ret) = timestamp;
  GST_BUFFER_SIZE (ret) = size;
  GST_BUFFER_DATA (ret) = data;

  if (change_duration)
    GST_BUFFER_DURATION (ret) = duration;
  if (change_offset)
    GST_BUFFER_OFFSET (ret) = offset;
  if (change_offset_end)
    GST_BUFFER_OFFSET_END (ret) = offset_end;

  return ret;
}
Beispiel #16
0
static GstFlowReturn
audioresample_transform (GstBaseTransform * base, GstBuffer * inbuf,
    GstBuffer * outbuf)
{
  GstAudioresample *audioresample;
  ResampleState *r;
  guchar *data, *datacopy;
  gulong size;
  GstClockTime timestamp;

  audioresample = GST_AUDIORESAMPLE (base);
  r = audioresample->resample;

  data = GST_BUFFER_DATA (inbuf);
  size = GST_BUFFER_SIZE (inbuf);
  timestamp = GST_BUFFER_TIMESTAMP (inbuf);

  GST_LOG_OBJECT (audioresample, "transforming buffer of %ld bytes, ts %"
      GST_TIME_FORMAT ", duration %" GST_TIME_FORMAT ", offset %"
      G_GINT64_FORMAT ", offset_end %" G_GINT64_FORMAT,
      size, GST_TIME_ARGS (timestamp),
      GST_TIME_ARGS (GST_BUFFER_DURATION (inbuf)),
      GST_BUFFER_OFFSET (inbuf), GST_BUFFER_OFFSET_END (inbuf));

  /* check for timestamp discontinuities and flush/reset if needed */
  if (G_UNLIKELY (audioresample_check_discont (audioresample, timestamp))) {
    /* Flush internal samples */
    audioresample_pushthrough (audioresample);
    /* Inform downstream element about discontinuity */
    audioresample->need_discont = TRUE;
    /* We want to recalculate the offset */
    audioresample->ts_offset = -1;
  }

  if (audioresample->ts_offset == -1) {
    /* if we don't know the initial offset yet, calculate it based on the 
     * input timestamp. */
    if (GST_CLOCK_TIME_IS_VALID (timestamp)) {
      GstClockTime stime;

      /* offset used to calculate the timestamps. We use the sample offset for
       * this to make it more accurate. We want the first buffer to have the
       * same timestamp as the incoming timestamp. */
      audioresample->next_ts = timestamp;
      audioresample->ts_offset =
          gst_util_uint64_scale_int (timestamp, r->o_rate, GST_SECOND);
      /* offset used to set as the buffer offset, this offset is always
       * relative to the stream time, note that timestamp is not... */
      stime = (timestamp - base->segment.start) + base->segment.time;
      audioresample->offset =
          gst_util_uint64_scale_int (stime, r->o_rate, GST_SECOND);
    }
  }
  audioresample->prev_ts = timestamp;
  audioresample->prev_duration = GST_BUFFER_DURATION (inbuf);

  /* need to memdup, resample takes ownership. */
  datacopy = g_memdup (data, size);
  resample_add_input_data (r, datacopy, size, g_free, datacopy);

  return audioresample_do_output (audioresample, outbuf);
}
Beispiel #17
0
static GstFlowReturn
gst_file_src_create_read (GstFileSrc * src, guint64 offset, guint length,
    GstBuffer ** buffer)
{
  int ret;
  GstBuffer *buf;

  if (G_UNLIKELY (src->read_position != offset)) {
    off_t res;

    res = lseek (src->fd, offset, SEEK_SET);
    if (G_UNLIKELY (res < 0 || res != offset))
      goto seek_failed;

    src->read_position = offset;
  }

  buf = gst_buffer_try_new_and_alloc (length);
  if (G_UNLIKELY (buf == NULL && length > 0)) {
    GST_ERROR_OBJECT (src, "Failed to allocate %u bytes", length);
    return GST_FLOW_ERROR;
  }

  /* No need to read anything if length is 0 */
  if (length > 0) {
    GST_LOG_OBJECT (src, "Reading %d bytes at offset 0x%" G_GINT64_MODIFIER "x",
        length, offset);
    ret = read (src->fd, GST_BUFFER_DATA (buf), length);
    if (G_UNLIKELY (ret < 0))
      goto could_not_read;

    /* seekable regular files should have given us what we expected */
    if (G_UNLIKELY ((guint) ret < length && src->seekable))
      goto unexpected_eos;

    /* other files should eos if they read 0 and more was requested */
    if (G_UNLIKELY (ret == 0 && length > 0))
      goto eos;

    length = ret;
    GST_BUFFER_SIZE (buf) = length;
    GST_BUFFER_OFFSET (buf) = offset;
    GST_BUFFER_OFFSET_END (buf) = offset + length;

    src->read_position += length;
  }

  *buffer = buf;

  return GST_FLOW_OK;

  /* ERROR */
seek_failed:
  {
    GST_ELEMENT_ERROR (src, RESOURCE, READ, (NULL), GST_ERROR_SYSTEM);
    return GST_FLOW_ERROR;
  }
could_not_read:
  {
    GST_ELEMENT_ERROR (src, RESOURCE, READ, (NULL), GST_ERROR_SYSTEM);
    gst_buffer_unref (buf);
    return GST_FLOW_ERROR;
  }
unexpected_eos:
  {
    GST_ELEMENT_ERROR (src, RESOURCE, READ, (NULL),
        ("unexpected end of file."));
    gst_buffer_unref (buf);
    return GST_FLOW_ERROR;
  }
eos:
  {
    GST_DEBUG ("non-regular file hits EOS");
    gst_buffer_unref (buf);
    return GST_FLOW_UNEXPECTED;
  }
}
static GstBuffer *
gst_rtp_dtmf_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
{

  GstRtpDTMFDepay *rtpdtmfdepay = NULL;
  GstBuffer *outbuf = NULL;
  gint payload_len;
  guint8 *payload = NULL;
  guint32 timestamp;
  GstRTPDTMFPayload dtmf_payload;
  gboolean marker;
  GstStructure *structure = NULL;
  GstMessage *dtmf_message = NULL;

  rtpdtmfdepay = GST_RTP_DTMF_DEPAY (depayload);

  if (!gst_rtp_buffer_validate (buf))
    goto bad_packet;

  payload_len = gst_rtp_buffer_get_payload_len (buf);
  payload = gst_rtp_buffer_get_payload (buf);

  if (payload_len != sizeof (GstRTPDTMFPayload))
    goto bad_packet;

  memcpy (&dtmf_payload, payload, sizeof (GstRTPDTMFPayload));

  if (dtmf_payload.event > MAX_EVENT)
    goto bad_packet;


  marker = gst_rtp_buffer_get_marker (buf);

  timestamp = gst_rtp_buffer_get_timestamp (buf);

  dtmf_payload.duration = g_ntohs (dtmf_payload.duration);

  /* clip to whole units of unit_time */
  if (rtpdtmfdepay->unit_time) {
    guint unit_time_clock =
        (rtpdtmfdepay->unit_time * depayload->clock_rate) / 1000;
    if (dtmf_payload.duration % unit_time_clock) {
      /* Make sure we don't overflow the duration */
      if (dtmf_payload.duration < G_MAXUINT16 - unit_time_clock)
        dtmf_payload.duration += unit_time_clock -
            (dtmf_payload.duration % unit_time_clock);
      else
        dtmf_payload.duration -= dtmf_payload.duration % unit_time_clock;
    }
  }

  /* clip to max duration */
  if (rtpdtmfdepay->max_duration) {
    guint max_duration_clock =
        (rtpdtmfdepay->max_duration * depayload->clock_rate) / 1000;

    if (max_duration_clock < G_MAXUINT16 &&
        dtmf_payload.duration > max_duration_clock)
      dtmf_payload.duration = max_duration_clock;
  }

  GST_DEBUG_OBJECT (depayload, "Received new RTP DTMF packet : "
      "marker=%d - timestamp=%u - event=%d - duration=%d",
      marker, timestamp, dtmf_payload.event, dtmf_payload.duration);

  GST_DEBUG_OBJECT (depayload,
      "Previous information : timestamp=%u - duration=%d",
      rtpdtmfdepay->previous_ts, rtpdtmfdepay->previous_duration);

  /* First packet */
  if (marker || rtpdtmfdepay->previous_ts != timestamp) {
    rtpdtmfdepay->sample = 0;
    rtpdtmfdepay->previous_ts = timestamp;
    rtpdtmfdepay->previous_duration = dtmf_payload.duration;
    rtpdtmfdepay->first_gst_ts = GST_BUFFER_TIMESTAMP (buf);

    structure = gst_structure_new ("dtmf-event",
        "number", G_TYPE_INT, dtmf_payload.event,
        "volume", G_TYPE_INT, dtmf_payload.volume,
        "type", G_TYPE_INT, 1, "method", G_TYPE_INT, 1, NULL);
    if (structure) {
      dtmf_message =
          gst_message_new_element (GST_OBJECT (depayload), structure);
      if (dtmf_message) {
        if (!gst_element_post_message (GST_ELEMENT (depayload), dtmf_message)) {
          GST_ERROR_OBJECT (depayload,
              "Unable to send dtmf-event message to bus");
        }
      } else {
        GST_ERROR_OBJECT (depayload, "Unable to create dtmf-event message");
      }
    } else {
      GST_ERROR_OBJECT (depayload, "Unable to create dtmf-event structure");
    }
  } else {
    guint16 duration = dtmf_payload.duration;
    dtmf_payload.duration -= rtpdtmfdepay->previous_duration;
    /* If late buffer, ignore */
    if (duration > rtpdtmfdepay->previous_duration)
      rtpdtmfdepay->previous_duration = duration;
  }

  GST_DEBUG_OBJECT (depayload, "new previous duration : %d - new duration : %d"
      " - diff  : %d - clock rate : %d - timestamp : %llu",
      rtpdtmfdepay->previous_duration, dtmf_payload.duration,
      (rtpdtmfdepay->previous_duration - dtmf_payload.duration),
      depayload->clock_rate, GST_BUFFER_TIMESTAMP (buf));

  /* If late or duplicate packet (like the redundant end packet). Ignore */
  if (dtmf_payload.duration > 0) {
    outbuf = gst_buffer_new ();
    gst_dtmf_src_generate_tone (rtpdtmfdepay, dtmf_payload, outbuf);


    GST_BUFFER_TIMESTAMP (outbuf) = rtpdtmfdepay->first_gst_ts +
        (rtpdtmfdepay->previous_duration - dtmf_payload.duration) *
        GST_SECOND / depayload->clock_rate;
    GST_BUFFER_OFFSET (outbuf) =
        (rtpdtmfdepay->previous_duration - dtmf_payload.duration) *
        GST_SECOND / depayload->clock_rate;
    GST_BUFFER_OFFSET_END (outbuf) = rtpdtmfdepay->previous_duration *
        GST_SECOND / depayload->clock_rate;

    GST_DEBUG_OBJECT (depayload, "timestamp : %llu - time %" GST_TIME_FORMAT,
        GST_BUFFER_TIMESTAMP (buf), GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)));

  }

  return outbuf;


bad_packet:
  GST_ELEMENT_WARNING (rtpdtmfdepay, STREAM, DECODE,
      ("Packet did not validate"), (NULL));
  return NULL;
}
static GstFlowReturn
gst_fake_src_create (GstBaseSrc * basesrc, guint64 offset, guint length,
    GstBuffer ** ret)
{
  GstFakeSrc *src;
  GstBuffer *buf;
  GstClockTime time;
  gsize size;

  src = GST_FAKE_SRC (basesrc);

  buf = gst_fake_src_create_buffer (src, &size);
  GST_BUFFER_OFFSET (buf) = offset;

  if (src->datarate > 0) {
    time = (src->bytes_sent * GST_SECOND) / src->datarate;

    GST_BUFFER_DURATION (buf) = size * GST_SECOND / src->datarate;
  } else if (gst_base_src_is_live (basesrc)) {
    GstClock *clock;

    clock = gst_element_get_clock (GST_ELEMENT (src));

    if (clock) {
      time = gst_clock_get_time (clock);
      time -= gst_element_get_base_time (GST_ELEMENT (src));
      gst_object_unref (clock);
    } else {
      /* not an error not to have a clock */
      time = GST_CLOCK_TIME_NONE;
    }
  } else {
    time = GST_CLOCK_TIME_NONE;
  }

  GST_BUFFER_DTS (buf) = time;
  GST_BUFFER_PTS (buf) = time;

  if (!src->silent) {
    gchar dts_str[64], pts_str[64], dur_str[64];
    gchar *flag_str;

    GST_OBJECT_LOCK (src);
    g_free (src->last_message);

    if (GST_BUFFER_DTS (buf) != GST_CLOCK_TIME_NONE) {
      g_snprintf (dts_str, sizeof (dts_str), "%" GST_TIME_FORMAT,
          GST_TIME_ARGS (GST_BUFFER_DTS (buf)));
    } else {
      g_strlcpy (dts_str, "none", sizeof (dts_str));
    }
    if (GST_BUFFER_PTS (buf) != GST_CLOCK_TIME_NONE) {
      g_snprintf (pts_str, sizeof (pts_str), "%" GST_TIME_FORMAT,
          GST_TIME_ARGS (GST_BUFFER_PTS (buf)));
    } else {
      g_strlcpy (pts_str, "none", sizeof (pts_str));
    }
    if (GST_BUFFER_DURATION (buf) != GST_CLOCK_TIME_NONE) {
      g_snprintf (dur_str, sizeof (dur_str), "%" GST_TIME_FORMAT,
          GST_TIME_ARGS (GST_BUFFER_DURATION (buf)));
    } else {
      g_strlcpy (dur_str, "none", sizeof (dur_str));
    }

    flag_str = gst_buffer_get_flags_string (buf);
    src->last_message =
        g_strdup_printf ("create   ******* (%s:%s) (%u bytes, dts: %s, pts:%s"
        ", duration: %s, offset: %" G_GINT64_FORMAT ", offset_end: %"
        G_GINT64_FORMAT ", flags: %08x %s) %p",
        GST_DEBUG_PAD_NAME (GST_BASE_SRC_CAST (src)->srcpad), (guint) size,
        dts_str, pts_str, dur_str, GST_BUFFER_OFFSET (buf),
        GST_BUFFER_OFFSET_END (buf), GST_MINI_OBJECT_CAST (buf)->flags,
        flag_str, buf);
    g_free (flag_str);
    GST_OBJECT_UNLOCK (src);

    g_object_notify_by_pspec ((GObject *) src, pspec_last_message);
  }

  if (src->signal_handoffs) {
    GST_LOG_OBJECT (src, "pre handoff emit");
    g_signal_emit (src, gst_fake_src_signals[SIGNAL_HANDOFF], 0, buf,
        basesrc->srcpad);
    GST_LOG_OBJECT (src, "post handoff emit");
  }

  src->bytes_sent += size;

  *ret = buf;
  return GST_FLOW_OK;
}
Beispiel #20
0
/* encode the CMML head tag and push the CMML headers
 */
static void
gst_cmml_enc_parse_tag_head (GstCmmlEnc * enc, GstCmmlTagHead * head)
{
  GList *headers = NULL;
  GList *walk;
  guchar *head_string;
  GstCaps *caps;
  GstBuffer *ident_buf, *preamble_buf, *head_buf;
  GstBuffer *buffer;

  if (enc->preamble == NULL)
    goto flow_unexpected;

  GST_INFO_OBJECT (enc, "parsing head tag");

  enc->flow_return = gst_cmml_enc_new_ident_header (enc, &ident_buf);
  if (enc->flow_return != GST_FLOW_OK)
    goto alloc_error;
  headers = g_list_append (headers, ident_buf);

  enc->flow_return = gst_cmml_enc_new_buffer (enc,
      enc->preamble, strlen ((gchar *) enc->preamble), &preamble_buf);
  if (enc->flow_return != GST_FLOW_OK)
    goto alloc_error;
  headers = g_list_append (headers, preamble_buf);

  head_string = gst_cmml_parser_tag_head_to_string (enc->parser, head);
  enc->flow_return = gst_cmml_enc_new_buffer (enc,
      head_string, strlen ((gchar *) head_string), &head_buf);
  g_free (head_string);
  if (enc->flow_return != GST_FLOW_OK)
    goto alloc_error;
  headers = g_list_append (headers, head_buf);

  caps = gst_pad_get_caps (enc->srcpad);
  caps = gst_cmml_enc_set_header_on_caps (enc, caps,
      ident_buf, preamble_buf, head_buf);

  while (headers) {
    buffer = GST_BUFFER (headers->data);
    /* set granulepos 0 on headers */
    GST_BUFFER_OFFSET_END (buffer) = 0;
    gst_buffer_set_caps (buffer, caps);

    enc->flow_return = gst_cmml_enc_push (enc, buffer);
    headers = g_list_delete_link (headers, headers);

    if (GST_FLOW_IS_FATAL (enc->flow_return))
      goto push_error;
  }

  gst_caps_unref (caps);

  enc->sent_headers = TRUE;
  return;

flow_unexpected:
  GST_ELEMENT_ERROR (enc, STREAM, ENCODE,
      (NULL), ("got head tag before preamble"));
  enc->flow_return = GST_FLOW_ERROR;
  return;
push_error:
  gst_caps_unref (caps);
  /* fallthrough */
alloc_error:
  for (walk = headers; walk; walk = walk->next)
    gst_buffer_unref (GST_BUFFER (walk->data));
  g_list_free (headers);
  return;
}
Beispiel #21
0
static GstFlowReturn
gst_gsmdec_chain (GstPad * pad, GstBuffer * buf)
{
    GstGSMDec *gsmdec;
    gsm_byte *data;
    GstFlowReturn ret = GST_FLOW_OK;
    GstClockTime timestamp;
    gint needed;

    gsmdec = GST_GSMDEC (gst_pad_get_parent (pad));

    timestamp = GST_BUFFER_TIMESTAMP (buf);

    if (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DISCONT)) {
        gst_adapter_clear (gsmdec->adapter);
        gsmdec->next_ts = GST_CLOCK_TIME_NONE;
        /* FIXME, do some good offset */
        gsmdec->next_of = 0;
    }
    gst_adapter_push (gsmdec->adapter, buf);

    needed = 33;
    /* do we have enough bytes to read a frame */
    while (gst_adapter_available (gsmdec->adapter) >= needed) {
        GstBuffer *outbuf;

        /* always the same amount of output samples */
        outbuf = gst_buffer_new_and_alloc (ENCODED_SAMPLES * sizeof (gsm_signal));

        /* If we are not given any timestamp, interpolate from last seen
         * timestamp (if any). */
        if (timestamp == GST_CLOCK_TIME_NONE)
            timestamp = gsmdec->next_ts;

        GST_BUFFER_TIMESTAMP (outbuf) = timestamp;

        /* interpolate in the next run */
        if (timestamp != GST_CLOCK_TIME_NONE)
            gsmdec->next_ts = timestamp + gsmdec->duration;
        timestamp = GST_CLOCK_TIME_NONE;

        GST_BUFFER_DURATION (outbuf) = gsmdec->duration;
        GST_BUFFER_OFFSET (outbuf) = gsmdec->next_of;
        if (gsmdec->next_of != -1)
            gsmdec->next_of += ENCODED_SAMPLES;
        GST_BUFFER_OFFSET_END (outbuf) = gsmdec->next_of;

        gst_buffer_set_caps (outbuf, GST_PAD_CAPS (gsmdec->srcpad));

        /* now encode frame into the output buffer */
        data = (gsm_byte *) gst_adapter_peek (gsmdec->adapter, needed);
        if (gsm_decode (gsmdec->state, data,
                        (gsm_signal *) GST_BUFFER_DATA (outbuf)) < 0) {
            /* invalid frame */
            GST_WARNING_OBJECT (gsmdec, "tried to decode an invalid frame, skipping");
        }
        gst_adapter_flush (gsmdec->adapter, needed);

        /* WAV49 requires alternating 33 and 32 bytes of input */
        if (gsmdec->use_wav49)
            needed = (needed == 33 ? 32 : 33);

        GST_DEBUG_OBJECT (gsmdec, "Pushing buffer of size %d ts %" GST_TIME_FORMAT,
                          GST_BUFFER_SIZE (outbuf),
                          GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)));

        /* push */
        ret = gst_pad_push (gsmdec->srcpad, outbuf);
    }

    gst_object_unref (gsmdec);

    return ret;
}
static GstFlowReturn
gst_inter_video_src_create (GstBaseSrc * src, guint64 offset, guint size,
    GstBuffer ** buf)
{
  GstInterVideoSrc *intervideosrc = GST_INTER_VIDEO_SRC (src);
  GstBuffer *buffer;

  GST_DEBUG_OBJECT (intervideosrc, "create");

  buffer = NULL;

  g_mutex_lock (intervideosrc->surface->mutex);
  if (intervideosrc->surface->video_buffer) {
    buffer = gst_buffer_ref (intervideosrc->surface->video_buffer);
    intervideosrc->surface->video_buffer_count++;
    if (intervideosrc->surface->video_buffer_count >= 30) {
      gst_buffer_unref (intervideosrc->surface->video_buffer);
      intervideosrc->surface->video_buffer = NULL;
    }
  }
  g_mutex_unlock (intervideosrc->surface->mutex);

  if (buffer == NULL) {
    GstMapInfo map;

    buffer =
        gst_buffer_new_and_alloc (GST_VIDEO_INFO_SIZE (&intervideosrc->info));

    gst_buffer_map (buffer, &map, GST_MAP_WRITE);
    memset (map.data, 16, GST_VIDEO_INFO_COMP_STRIDE (&intervideosrc->info, 0) *
        GST_VIDEO_INFO_COMP_HEIGHT (&intervideosrc->info, 0));

    memset (map.data + GST_VIDEO_INFO_COMP_OFFSET (&intervideosrc->info, 1),
        128,
        2 * GST_VIDEO_INFO_COMP_STRIDE (&intervideosrc->info, 1) *
        GST_VIDEO_INFO_COMP_HEIGHT (&intervideosrc->info, 1));
    gst_buffer_unmap (buffer, &map);
  }

  buffer = gst_buffer_make_writable (buffer);

  GST_BUFFER_TIMESTAMP (buffer) =
      gst_util_uint64_scale_int (GST_SECOND * intervideosrc->n_frames,
      GST_VIDEO_INFO_FPS_D (&intervideosrc->info),
      GST_VIDEO_INFO_FPS_N (&intervideosrc->info));
  GST_DEBUG_OBJECT (intervideosrc, "create ts %" GST_TIME_FORMAT,
      GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer)));
  GST_BUFFER_DURATION (buffer) =
      gst_util_uint64_scale_int (GST_SECOND * (intervideosrc->n_frames + 1),
      GST_VIDEO_INFO_FPS_D (&intervideosrc->info),
      GST_VIDEO_INFO_FPS_N (&intervideosrc->info)) -
      GST_BUFFER_TIMESTAMP (buffer);
  GST_BUFFER_OFFSET (buffer) = intervideosrc->n_frames;
  GST_BUFFER_OFFSET_END (buffer) = -1;
  GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_DISCONT);
  if (intervideosrc->n_frames == 0) {
    GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT);
  }
  intervideosrc->n_frames++;

  *buf = buffer;

  return GST_FLOW_OK;
}
static int
gst_wavpack_enc_push_block (void *id, void *data, int32_t count)
{
  GstWavpackEncWriteID *wid = (GstWavpackEncWriteID *) id;
  GstWavpackEnc *enc = GST_WAVPACK_ENC (wid->wavpack_enc);
  GstFlowReturn *flow;
  GstBuffer *buffer;
  GstPad *pad;
  guchar *block = (guchar *) data;

  pad = (wid->correction) ? enc->wvcsrcpad : enc->srcpad;
  flow =
      (wid->correction) ? &enc->wvcsrcpad_last_return : &enc->
      srcpad_last_return;

  *flow = gst_pad_alloc_buffer_and_set_caps (pad, GST_BUFFER_OFFSET_NONE,
      count, GST_PAD_CAPS (pad), &buffer);

  if (*flow != GST_FLOW_OK) {
    GST_WARNING_OBJECT (enc, "flow on %s:%s = %s",
        GST_DEBUG_PAD_NAME (pad), gst_flow_get_name (*flow));
    return FALSE;
  }

  g_memmove (GST_BUFFER_DATA (buffer), block, count);

  if (count > sizeof (WavpackHeader) && memcmp (block, "wvpk", 4) == 0) {
    /* if it's a Wavpack block set buffer timestamp and duration, etc */
    WavpackHeader wph;

    GST_LOG_OBJECT (enc, "got %d bytes of encoded wavpack %sdata",
        count, (wid->correction) ? "correction " : "");

    gst_wavpack_read_header (&wph, block);

    /* Only set when pushing the first buffer again, in that case
     * we don't want to delay the buffer or push newsegment events
     */
    if (!wid->passthrough) {
      /* Only push complete blocks */
      if (enc->pending_buffer == NULL) {
        enc->pending_buffer = buffer;
        enc->pending_offset = wph.block_index;
      } else if (enc->pending_offset == wph.block_index) {
        enc->pending_buffer = gst_buffer_join (enc->pending_buffer, buffer);
      } else {
        GST_ERROR ("Got incomplete block, dropping");
        gst_buffer_unref (enc->pending_buffer);
        enc->pending_buffer = buffer;
        enc->pending_offset = wph.block_index;
      }

      if (!(wph.flags & FINAL_BLOCK))
        return TRUE;

      buffer = enc->pending_buffer;
      enc->pending_buffer = NULL;
      enc->pending_offset = 0;

      /* if it's the first wavpack block, send a NEW_SEGMENT event */
      if (wph.block_index == 0) {
        gst_pad_push_event (pad,
            gst_event_new_new_segment (FALSE,
                1.0, GST_FORMAT_TIME, 0, GST_BUFFER_OFFSET_NONE, 0));

        /* save header for later reference, so we can re-send it later on
         * EOS with fixed up values for total sample count etc. */
        if (enc->first_block == NULL && !wid->correction) {
          enc->first_block =
              g_memdup (GST_BUFFER_DATA (buffer), GST_BUFFER_SIZE (buffer));
          enc->first_block_size = GST_BUFFER_SIZE (buffer);
        }
      }
    }

    /* set buffer timestamp, duration, offset, offset_end from
     * the wavpack header */
    GST_BUFFER_TIMESTAMP (buffer) = enc->timestamp_offset +
        gst_util_uint64_scale_int (GST_SECOND, wph.block_index,
        enc->samplerate);
    GST_BUFFER_DURATION (buffer) =
        gst_util_uint64_scale_int (GST_SECOND, wph.block_samples,
        enc->samplerate);
    GST_BUFFER_OFFSET (buffer) = wph.block_index;
    GST_BUFFER_OFFSET_END (buffer) = wph.block_index + wph.block_samples;
  } else {
    /* if it's something else set no timestamp and duration on the buffer */
    GST_DEBUG_OBJECT (enc, "got %d bytes of unknown data", count);

    GST_BUFFER_TIMESTAMP (buffer) = GST_CLOCK_TIME_NONE;
    GST_BUFFER_DURATION (buffer) = GST_CLOCK_TIME_NONE;
  }

  /* push the buffer and forward errors */
  GST_DEBUG_OBJECT (enc, "pushing buffer with %d bytes",
      GST_BUFFER_SIZE (buffer));
  *flow = gst_pad_push (pad, buffer);

  if (*flow != GST_FLOW_OK) {
    GST_WARNING_OBJECT (enc, "flow on %s:%s = %s",
        GST_DEBUG_PAD_NAME (pad), gst_flow_get_name (*flow));
    return FALSE;
  }

  return TRUE;
}
Beispiel #24
0
GstFlowReturn
gst_base_video_decoder_have_frame (GstBaseVideoDecoder * base_video_decoder)
{
  GstVideoFrame *frame = base_video_decoder->current_frame;
  GstBuffer *buffer;
  GstBaseVideoDecoderClass *base_video_decoder_class;
  GstFlowReturn ret = GST_FLOW_OK;
  int n_available;

  GST_DEBUG ("have_frame");

  base_video_decoder_class =
      GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder);

  n_available = gst_adapter_available (base_video_decoder->output_adapter);
  if (n_available) {
    buffer = gst_adapter_take_buffer (base_video_decoder->output_adapter,
        n_available);
  } else {
    buffer = gst_buffer_new_and_alloc (0);
  }

  frame->distance_from_sync = base_video_decoder->distance_from_sync;
  base_video_decoder->distance_from_sync++;

#if 0
  if (frame->presentation_timestamp == GST_CLOCK_TIME_NONE) {
    frame->presentation_timestamp =
        gst_base_video_decoder_get_timestamp (base_video_decoder,
        frame->presentation_frame_number);
    frame->presentation_duration =
        gst_base_video_decoder_get_timestamp (base_video_decoder,
        frame->presentation_frame_number + 1) - frame->presentation_timestamp;
    frame->decode_timestamp =
        gst_base_video_decoder_get_timestamp (base_video_decoder,
        frame->decode_frame_number);
  }
#endif

#if 0
  GST_BUFFER_TIMESTAMP (buffer) = frame->presentation_timestamp;
  GST_BUFFER_DURATION (buffer) = frame->presentation_duration;
  if (frame->decode_frame_number < 0) {
    GST_BUFFER_OFFSET (buffer) = 0;
  } else {
    GST_BUFFER_OFFSET (buffer) = frame->decode_timestamp;
  }
  GST_BUFFER_OFFSET_END (buffer) = GST_CLOCK_TIME_NONE;
#endif

  GST_DEBUG ("pts %" GST_TIME_FORMAT,
      GST_TIME_ARGS (frame->presentation_timestamp));
  GST_DEBUG ("dts %" GST_TIME_FORMAT, GST_TIME_ARGS (frame->decode_timestamp));
  GST_DEBUG ("dist %d", frame->distance_from_sync);

  if (frame->is_sync_point) {
    GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_DELTA_UNIT);
  } else {
    GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT);
  }
  if (base_video_decoder->discont) {
    GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT);
    base_video_decoder->discont = FALSE;
  }

  frame->sink_buffer = buffer;

  base_video_decoder->frames = g_list_append (base_video_decoder->frames,
      frame);

  /* do something with frame */
  ret = base_video_decoder_class->handle_frame (base_video_decoder, frame);
  if (!GST_FLOW_IS_SUCCESS (ret)) {
    GST_DEBUG ("flow error!");
  }

  /* create new frame */
  base_video_decoder->current_frame =
      gst_base_video_decoder_new_frame (base_video_decoder);

  return ret;
}
Beispiel #25
0
static GstFlowReturn
test_injector_chain (GstPad * pad, GstBuffer * buf)
{
  GstFlowReturn ret;
  GstPad *srcpad;

  srcpad = gst_element_get_pad (GST_ELEMENT (GST_PAD_PARENT (pad)), "src");

  /* since we're increasing timestamp/offsets, push this one first */
  GST_LOG (" passing buffer   [t=%" GST_TIME_FORMAT "-%" GST_TIME_FORMAT
      "], offset=%" G_GINT64_FORMAT ", offset_end=%" G_GINT64_FORMAT,
      GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
      GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf) + GST_BUFFER_DURATION (buf)),
      GST_BUFFER_OFFSET (buf), GST_BUFFER_OFFSET_END (buf));

  gst_buffer_ref (buf);

  ret = gst_pad_push (srcpad, buf);

  if (g_random_double () < injector_inject_probability) {
    GstBuffer *ibuf;

    ibuf = gst_buffer_copy (buf);

    if (GST_BUFFER_OFFSET_IS_VALID (buf) &&
        GST_BUFFER_OFFSET_END_IS_VALID (buf)) {
      guint64 delta;

      delta = GST_BUFFER_OFFSET_END (buf) - GST_BUFFER_OFFSET (buf);
      GST_BUFFER_OFFSET (ibuf) += delta / 4;
      GST_BUFFER_OFFSET_END (ibuf) += delta / 4;
    } else {
      GST_BUFFER_OFFSET (ibuf) = GST_BUFFER_OFFSET_NONE;
      GST_BUFFER_OFFSET_END (ibuf) = GST_BUFFER_OFFSET_NONE;
    }

    if (GST_BUFFER_TIMESTAMP_IS_VALID (buf) &&
        GST_BUFFER_DURATION_IS_VALID (buf)) {
      GstClockTime delta;

      delta = GST_BUFFER_DURATION (buf);
      GST_BUFFER_TIMESTAMP (ibuf) += delta / 4;
    } else {
      GST_BUFFER_TIMESTAMP (ibuf) = GST_CLOCK_TIME_NONE;
      GST_BUFFER_DURATION (ibuf) = GST_CLOCK_TIME_NONE;
    }

    if (GST_BUFFER_TIMESTAMP_IS_VALID (ibuf) ||
        GST_BUFFER_OFFSET_IS_VALID (ibuf)) {
      GST_LOG ("injecting buffer [t=%" GST_TIME_FORMAT "-%" GST_TIME_FORMAT
          "], offset=%" G_GINT64_FORMAT ", offset_end=%" G_GINT64_FORMAT,
          GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (ibuf)),
          GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (ibuf) +
              GST_BUFFER_DURATION (ibuf)), GST_BUFFER_OFFSET (ibuf),
          GST_BUFFER_OFFSET_END (ibuf));

      if (gst_pad_push (srcpad, ibuf) != GST_FLOW_OK) {
        /* ignore return value */
      }
    } else {
      GST_WARNING ("couldn't inject buffer, no incoming timestamps or offsets");
      gst_buffer_unref (ibuf);
    }
  }

  gst_buffer_unref (buf);

  return ret;
}
Beispiel #26
0
static GstFlowReturn
gst_base_video_decoder_chain (GstPad * pad, GstBuffer * buf)
{
  GstBaseVideoDecoder *base_video_decoder;
  GstBaseVideoDecoderClass *klass;
  GstBuffer *buffer;
  GstFlowReturn ret;

  GST_DEBUG ("chain %" G_GINT64_FORMAT, GST_BUFFER_TIMESTAMP (buf));

#if 0
  /* requiring the pad to be negotiated makes it impossible to use
   * oggdemux or filesrc ! decoder */
  if (!gst_pad_is_negotiated (pad)) {
    GST_DEBUG ("not negotiated");
    return GST_FLOW_NOT_NEGOTIATED;
  }
#endif

  base_video_decoder = GST_BASE_VIDEO_DECODER (gst_pad_get_parent (pad));
  klass = GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder);

  GST_DEBUG_OBJECT (base_video_decoder, "chain");

  if (G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DISCONT))) {
    GST_DEBUG_OBJECT (base_video_decoder, "received DISCONT buffer");
    if (base_video_decoder->started) {
      gst_base_video_decoder_reset (base_video_decoder);
    }
  }

  if (!base_video_decoder->started) {
    klass->start (base_video_decoder);
    base_video_decoder->started = TRUE;
  }

  if (GST_BUFFER_TIMESTAMP (buf) != GST_CLOCK_TIME_NONE) {
    GST_DEBUG ("timestamp %" G_GINT64_FORMAT " offset %" G_GINT64_FORMAT,
        GST_BUFFER_TIMESTAMP (buf), base_video_decoder->offset);
    base_video_decoder->last_sink_timestamp = GST_BUFFER_TIMESTAMP (buf);
  }
  if (GST_BUFFER_OFFSET_END (buf) != -1) {
    GST_DEBUG ("gp %" G_GINT64_FORMAT, GST_BUFFER_OFFSET_END (buf));
    base_video_decoder->last_sink_offset_end = GST_BUFFER_OFFSET_END (buf);
  }
  base_video_decoder->offset += GST_BUFFER_SIZE (buf);

#if 0
  if (base_video_decoder->timestamp_offset == GST_CLOCK_TIME_NONE &&
      GST_BUFFER_TIMESTAMP (buf) != GST_CLOCK_TIME_NONE) {
    GST_DEBUG ("got new offset %lld", GST_BUFFER_TIMESTAMP (buf));
    base_video_decoder->timestamp_offset = GST_BUFFER_TIMESTAMP (buf);
  }
#endif

  if (base_video_decoder->current_frame == NULL) {
    base_video_decoder->current_frame =
        gst_base_video_decoder_new_frame (base_video_decoder);
  }

  gst_adapter_push (base_video_decoder->input_adapter, buf);

  if (!base_video_decoder->have_sync) {
    int n, m;

    GST_DEBUG ("no sync, scanning");

    n = gst_adapter_available (base_video_decoder->input_adapter);
    m = klass->scan_for_sync (base_video_decoder, FALSE, 0, n);

    if (m < 0) {
      g_warning ("subclass returned negative scan %d", m);
    }

    if (m >= n) {
      g_warning ("subclass scanned past end %d >= %d", m, n);
    }

    gst_adapter_flush (base_video_decoder->input_adapter, m);

    if (m < n) {
      GST_DEBUG ("found possible sync after %d bytes (of %d)", m, n);

      /* this is only "maybe" sync */
      base_video_decoder->have_sync = TRUE;
    }

    if (!base_video_decoder->have_sync) {
      gst_object_unref (base_video_decoder);
      return GST_FLOW_OK;
    }
  }

  /* FIXME: use gst_adapter_prev_timestamp() here instead? */
  buffer = gst_adapter_get_buffer (base_video_decoder->input_adapter);

  base_video_decoder->buffer_timestamp = GST_BUFFER_TIMESTAMP (buffer);
  gst_buffer_unref (buffer);

  do {
    ret = klass->parse_data (base_video_decoder, FALSE);
  } while (ret == GST_FLOW_OK);

  if (ret == GST_BASE_VIDEO_DECODER_FLOW_NEED_DATA) {
    gst_object_unref (base_video_decoder);
    return GST_FLOW_OK;
  }

  gst_object_unref (base_video_decoder);
  return ret;
}
Beispiel #27
0
/* this tests that the output is a correct discontinuous stream
 * if the input is; ie input drops in time come out the same way */
static void
test_discont_stream_instance (int inrate, int outrate, int samples,
    int numbuffers)
{
  GstElement *audioresample;
  GstBuffer *inbuffer, *outbuffer;
  GstCaps *caps;
  GstClockTime ints;

  int i, j;
  GstMapInfo map;
  gint16 *p;

  GST_DEBUG ("inrate:%d outrate:%d samples:%d numbuffers:%d",
      inrate, outrate, samples, numbuffers);

  audioresample =
      setup_audioresample (2, 3, inrate, outrate, GST_AUDIO_NE (S16));
  caps = gst_pad_get_current_caps (mysrcpad);
  fail_unless (gst_caps_is_fixed (caps));

  fail_unless (gst_element_set_state (audioresample,
          GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS,
      "could not set to playing");

  for (j = 1; j <= numbuffers; ++j) {

    inbuffer = gst_buffer_new_and_alloc (samples * 4);
    GST_BUFFER_DURATION (inbuffer) = samples * GST_SECOND / inrate;
    /* "drop" half the buffers */
    ints = GST_BUFFER_DURATION (inbuffer) * 2 * (j - 1);
    GST_BUFFER_TIMESTAMP (inbuffer) = ints;
    GST_BUFFER_OFFSET (inbuffer) = (j - 1) * 2 * samples;
    GST_BUFFER_OFFSET_END (inbuffer) = j * 2 * samples + samples;

    gst_buffer_map (inbuffer, &map, GST_MAP_WRITE);
    p = (gint16 *) map.data;
    /* create a 16 bit signed ramp */
    for (i = 0; i < samples; ++i) {
      *p = -32767 + i * (65535 / samples);
      ++p;
      *p = -32767 + i * (65535 / samples);
      ++p;
    }
    gst_buffer_unmap (inbuffer, &map);

    GST_DEBUG ("Sending Buffer time:%" G_GUINT64_FORMAT " duration:%"
        G_GINT64_FORMAT " discont:%d offset:%" G_GUINT64_FORMAT " offset_end:%"
        G_GUINT64_FORMAT, GST_BUFFER_TIMESTAMP (inbuffer),
        GST_BUFFER_DURATION (inbuffer), GST_BUFFER_IS_DISCONT (inbuffer),
        GST_BUFFER_OFFSET (inbuffer), GST_BUFFER_OFFSET_END (inbuffer));
    /* pushing gives away my reference ... */
    fail_unless (gst_pad_push (mysrcpad, inbuffer) == GST_FLOW_OK);

    /* check if the timestamp of the pushed buffer matches the incoming one */
    outbuffer = g_list_nth_data (buffers, g_list_length (buffers) - 1);
    fail_if (outbuffer == NULL);
    fail_unless_equals_uint64 (ints, GST_BUFFER_TIMESTAMP (outbuffer));
    GST_DEBUG ("Got Buffer time:%" G_GUINT64_FORMAT " duration:%"
        G_GINT64_FORMAT " discont:%d offset:%" G_GUINT64_FORMAT " offset_end:%"
        G_GUINT64_FORMAT, GST_BUFFER_TIMESTAMP (outbuffer),
        GST_BUFFER_DURATION (outbuffer), GST_BUFFER_IS_DISCONT (outbuffer),
        GST_BUFFER_OFFSET (outbuffer), GST_BUFFER_OFFSET_END (outbuffer));
    if (j > 1) {
      fail_unless (GST_BUFFER_IS_DISCONT (outbuffer),
          "expected discont for buffer #%d", j);
    }
  }

  /* cleanup */
  gst_caps_unref (caps);
  cleanup_audioresample (audioresample);
}
Beispiel #28
0
GstFlowReturn
gst_base_video_decoder_finish_frame (GstBaseVideoDecoder * base_video_decoder,
    GstVideoFrame * frame)
{
  GstBaseVideoDecoderClass *base_video_decoder_class;
  GstBuffer *src_buffer;

  GST_DEBUG ("finish frame");

  base_video_decoder_class =
      GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder);

  GST_DEBUG ("finish frame sync=%d pts=%" G_GINT64_FORMAT, frame->is_sync_point,
      frame->presentation_timestamp);

  if (frame->is_sync_point) {
    if (GST_CLOCK_TIME_IS_VALID (frame->presentation_timestamp)) {
      if (frame->presentation_timestamp != base_video_decoder->timestamp_offset) {
        GST_DEBUG ("sync timestamp %" G_GINT64_FORMAT " diff %" G_GINT64_FORMAT,
            frame->presentation_timestamp,
            frame->presentation_timestamp -
            base_video_decoder->state.segment.start);
        base_video_decoder->timestamp_offset = frame->presentation_timestamp;
        base_video_decoder->field_index = 0;
      } else {
        /* This case is for one initial timestamp and no others, e.g.,
         * filesrc ! decoder ! xvimagesink */
        GST_WARNING ("sync timestamp didn't change, ignoring");
        frame->presentation_timestamp = GST_CLOCK_TIME_NONE;
      }
    } else {
      GST_WARNING ("sync point doesn't have timestamp");
      if (GST_CLOCK_TIME_IS_VALID (base_video_decoder->timestamp_offset)) {
        GST_ERROR ("No base timestamp.  Assuming frames start at 0");
        base_video_decoder->timestamp_offset = 0;
        base_video_decoder->field_index = 0;
      }
    }
  }
  frame->field_index = base_video_decoder->field_index;
  base_video_decoder->field_index += frame->n_fields;

  if (frame->presentation_timestamp == GST_CLOCK_TIME_NONE) {
    frame->presentation_timestamp =
        gst_base_video_decoder_get_field_timestamp (base_video_decoder,
        frame->field_index);
    frame->presentation_duration = GST_CLOCK_TIME_NONE;
    frame->decode_timestamp =
        gst_base_video_decoder_get_timestamp (base_video_decoder,
        frame->decode_frame_number);
  }
  if (frame->presentation_duration == GST_CLOCK_TIME_NONE) {
    frame->presentation_duration =
        gst_base_video_decoder_get_field_timestamp (base_video_decoder,
        frame->field_index + frame->n_fields) - frame->presentation_timestamp;
  }

  if (GST_CLOCK_TIME_IS_VALID (base_video_decoder->last_timestamp)) {
    if (frame->presentation_timestamp < base_video_decoder->last_timestamp) {
      GST_WARNING ("decreasing timestamp (%" G_GINT64_FORMAT " < %"
          G_GINT64_FORMAT ")", frame->presentation_timestamp,
          base_video_decoder->last_timestamp);
    }
  }
  base_video_decoder->last_timestamp = frame->presentation_timestamp;

  GST_BUFFER_FLAG_UNSET (frame->src_buffer, GST_BUFFER_FLAG_DELTA_UNIT);
  if (base_video_decoder->state.interlaced) {
#ifndef GST_VIDEO_BUFFER_TFF
#define GST_VIDEO_BUFFER_TFF (GST_MINI_OBJECT_FLAG_LAST << 5)
#endif
#ifndef GST_VIDEO_BUFFER_RFF
#define GST_VIDEO_BUFFER_RFF (GST_MINI_OBJECT_FLAG_LAST << 6)
#endif
#ifndef GST_VIDEO_BUFFER_ONEFIELD
#define GST_VIDEO_BUFFER_ONEFIELD (GST_MINI_OBJECT_FLAG_LAST << 7)
#endif
    int tff = base_video_decoder->state.top_field_first;

    if (frame->field_index & 1) {
      tff ^= 1;
    }
    if (tff) {
      GST_BUFFER_FLAG_SET (frame->src_buffer, GST_VIDEO_BUFFER_TFF);
    } else {
      GST_BUFFER_FLAG_UNSET (frame->src_buffer, GST_VIDEO_BUFFER_TFF);
    }
    GST_BUFFER_FLAG_UNSET (frame->src_buffer, GST_VIDEO_BUFFER_RFF);
    GST_BUFFER_FLAG_UNSET (frame->src_buffer, GST_VIDEO_BUFFER_ONEFIELD);
    if (frame->n_fields == 3) {
      GST_BUFFER_FLAG_SET (frame->src_buffer, GST_VIDEO_BUFFER_RFF);
    } else if (frame->n_fields == 1) {
      GST_BUFFER_FLAG_UNSET (frame->src_buffer, GST_VIDEO_BUFFER_ONEFIELD);
    }
  }

  GST_BUFFER_TIMESTAMP (frame->src_buffer) = frame->presentation_timestamp;
  GST_BUFFER_DURATION (frame->src_buffer) = frame->presentation_duration;
  GST_BUFFER_OFFSET (frame->src_buffer) = -1;
  GST_BUFFER_OFFSET_END (frame->src_buffer) = -1;

  GST_DEBUG ("pushing frame %" G_GINT64_FORMAT, frame->presentation_timestamp);

  base_video_decoder->frames =
      g_list_remove (base_video_decoder->frames, frame);

  gst_base_video_decoder_set_src_caps (base_video_decoder);

  src_buffer = frame->src_buffer;
  frame->src_buffer = NULL;

  gst_base_video_decoder_free_frame (frame);

  if (base_video_decoder->sink_clipping) {
    gint64 start = GST_BUFFER_TIMESTAMP (src_buffer);
    gint64 stop = GST_BUFFER_TIMESTAMP (src_buffer) +
        GST_BUFFER_DURATION (src_buffer);

    if (gst_segment_clip (&base_video_decoder->state.segment, GST_FORMAT_TIME,
            start, stop, &start, &stop)) {
      GST_BUFFER_TIMESTAMP (src_buffer) = start;
      GST_BUFFER_DURATION (src_buffer) = stop - start;
    } else {
      GST_DEBUG ("dropping buffer outside segment");
      gst_buffer_unref (src_buffer);
      return GST_FLOW_OK;
    }
  }

  return gst_pad_push (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_decoder),
      src_buffer);
}
static GstFlowReturn
gst_audio_fx_base_fir_filter_transform (GstBaseTransform * base,
    GstBuffer * inbuf, GstBuffer * outbuf)
{
  GstAudioFXBaseFIRFilter *self = GST_AUDIO_FX_BASE_FIR_FILTER (base);
  GstClockTime timestamp, expected_timestamp;
  gint channels = GST_AUDIO_FILTER_CHANNELS (self);
  gint rate = GST_AUDIO_FILTER_RATE (self);
  gint bps = GST_AUDIO_FILTER_BPS (self);
  GstMapInfo inmap, outmap;
  guint input_samples;
  guint output_samples;
  guint generated_samples;
  guint64 output_offset;
  gint64 diff = 0;
  GstClockTime stream_time;

  timestamp = GST_BUFFER_TIMESTAMP (outbuf);

  if (!GST_CLOCK_TIME_IS_VALID (timestamp)
      && !GST_CLOCK_TIME_IS_VALID (self->start_ts)) {
    GST_ERROR_OBJECT (self, "Invalid timestamp");
    return GST_FLOW_ERROR;
  }

  g_mutex_lock (&self->lock);
  stream_time =
      gst_segment_to_stream_time (&base->segment, GST_FORMAT_TIME, timestamp);

  GST_DEBUG_OBJECT (self, "sync to %" GST_TIME_FORMAT,
      GST_TIME_ARGS (timestamp));

  if (GST_CLOCK_TIME_IS_VALID (stream_time))
    gst_object_sync_values (GST_OBJECT (self), stream_time);

  g_return_val_if_fail (self->kernel != NULL, GST_FLOW_ERROR);
  g_return_val_if_fail (channels != 0, GST_FLOW_ERROR);

  if (GST_CLOCK_TIME_IS_VALID (self->start_ts))
    expected_timestamp =
        self->start_ts + gst_util_uint64_scale_int (self->nsamples_in,
        GST_SECOND, rate);
  else
    expected_timestamp = GST_CLOCK_TIME_NONE;

  /* Reset the residue if already existing on discont buffers */
  if (GST_BUFFER_IS_DISCONT (inbuf)
      || (GST_CLOCK_TIME_IS_VALID (expected_timestamp)
          && (ABS (GST_CLOCK_DIFF (timestamp,
                      expected_timestamp) > 5 * GST_MSECOND)))) {
    GST_DEBUG_OBJECT (self, "Discontinuity detected - flushing");
    if (GST_CLOCK_TIME_IS_VALID (expected_timestamp))
      gst_audio_fx_base_fir_filter_push_residue (self);
    self->buffer_fill = 0;
    g_free (self->buffer);
    self->buffer = NULL;
    self->start_ts = timestamp;
    self->start_off = GST_BUFFER_OFFSET (inbuf);
    self->nsamples_out = 0;
    self->nsamples_in = 0;
  } else if (!GST_CLOCK_TIME_IS_VALID (self->start_ts)) {
    self->start_ts = timestamp;
    self->start_off = GST_BUFFER_OFFSET (inbuf);
  }

  gst_buffer_map (inbuf, &inmap, GST_MAP_READ);
  gst_buffer_map (outbuf, &outmap, GST_MAP_WRITE);

  input_samples = (inmap.size / bps) / channels;
  output_samples = (outmap.size / bps) / channels;

  self->nsamples_in += input_samples;

  generated_samples =
      self->process (self, inmap.data, outmap.data, input_samples);

  gst_buffer_unmap (inbuf, &inmap);
  gst_buffer_unmap (outbuf, &outmap);

  g_assert (generated_samples <= output_samples);
  self->nsamples_out += generated_samples;
  if (generated_samples == 0)
    goto no_samples;

  /* Calculate the number of samples we can push out now without outputting
   * latency zeros in the beginning */
  diff = ((gint64) self->nsamples_out) - ((gint64) self->latency);
  if (diff < 0)
    goto no_samples;

  if (diff < generated_samples) {
    gint64 tmp = diff;
    diff = generated_samples - diff;
    generated_samples = tmp;
  } else {
    diff = 0;
  }

  gst_buffer_resize (outbuf, diff * bps * channels,
      generated_samples * bps * channels);

  output_offset = self->nsamples_out - self->latency - generated_samples;
  GST_BUFFER_TIMESTAMP (outbuf) =
      self->start_ts + gst_util_uint64_scale_int (output_offset, GST_SECOND,
      rate);
  GST_BUFFER_DURATION (outbuf) =
      gst_util_uint64_scale_int (output_samples, GST_SECOND, rate);
  if (self->start_off != GST_BUFFER_OFFSET_NONE) {
    GST_BUFFER_OFFSET (outbuf) = self->start_off + output_offset;
    GST_BUFFER_OFFSET_END (outbuf) =
        GST_BUFFER_OFFSET (outbuf) + generated_samples;
  } else {
    GST_BUFFER_OFFSET (outbuf) = GST_BUFFER_OFFSET_NONE;
    GST_BUFFER_OFFSET_END (outbuf) = GST_BUFFER_OFFSET_NONE;
  }
  g_mutex_unlock (&self->lock);

  GST_DEBUG_OBJECT (self,
      "Pushing buffer of size %" G_GSIZE_FORMAT " with timestamp: %"
      GST_TIME_FORMAT ", duration: %" GST_TIME_FORMAT ", offset: %"
      G_GUINT64_FORMAT ", offset_end: %" G_GUINT64_FORMAT ", nsamples_out: %d",
      gst_buffer_get_size (outbuf),
      GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)),
      GST_TIME_ARGS (GST_BUFFER_DURATION (outbuf)), GST_BUFFER_OFFSET (outbuf),
      GST_BUFFER_OFFSET_END (outbuf), generated_samples);

  return GST_FLOW_OK;

no_samples:
  {
    g_mutex_unlock (&self->lock);
    return GST_BASE_TRANSFORM_FLOW_DROPPED;
  }
}
Beispiel #30
0
static GstFlowReturn
audioresample_do_output (GstAudioresample * audioresample, GstBuffer * outbuf)
{
  int outsize;
  int outsamples;
  ResampleState *r;

  r = audioresample->resample;

  outsize = resample_get_output_size (r);
  GST_LOG_OBJECT (audioresample, "audioresample can give me %d bytes", outsize);

  /* protect against mem corruption */
  if (outsize > GST_BUFFER_SIZE (outbuf)) {
    GST_WARNING_OBJECT (audioresample,
        "overriding audioresample's outsize %d with outbuffer's size %d",
        outsize, GST_BUFFER_SIZE (outbuf));
    outsize = GST_BUFFER_SIZE (outbuf);
  }
  /* catch possibly wrong size differences */
  if (GST_BUFFER_SIZE (outbuf) - outsize > r->sample_size) {
    GST_WARNING_OBJECT (audioresample,
        "audioresample's outsize %d too far from outbuffer's size %d",
        outsize, GST_BUFFER_SIZE (outbuf));
  }

  outsize = resample_get_output_data (r, GST_BUFFER_DATA (outbuf), outsize);
  outsamples = outsize / r->sample_size;
  GST_LOG_OBJECT (audioresample, "resample gave me %d bytes or %d samples",
      outsize, outsamples);

  GST_BUFFER_OFFSET (outbuf) = audioresample->offset;
  GST_BUFFER_TIMESTAMP (outbuf) = audioresample->next_ts;

  if (audioresample->ts_offset != -1) {
    audioresample->offset += outsamples;
    audioresample->ts_offset += outsamples;
    audioresample->next_ts =
        gst_util_uint64_scale_int (audioresample->ts_offset, GST_SECOND,
        audioresample->o_rate);
    GST_BUFFER_OFFSET_END (outbuf) = audioresample->offset;

    /* we calculate DURATION as the difference between "next" timestamp
     * and current timestamp so we ensure a contiguous stream, instead of
     * having rounding errors. */
    GST_BUFFER_DURATION (outbuf) = audioresample->next_ts -
        GST_BUFFER_TIMESTAMP (outbuf);
  } else {
    /* no valid offset know, we can still sortof calculate the duration though */
    GST_BUFFER_DURATION (outbuf) =
        gst_util_uint64_scale_int (outsamples, GST_SECOND,
        audioresample->o_rate);
  }

  /* check for possible mem corruption */
  if (outsize > GST_BUFFER_SIZE (outbuf)) {
    /* this is an error that when it happens, would need fixing in the
     * resample library; we told it we wanted only GST_BUFFER_SIZE (outbuf),
     * and it gave us more ! */
    GST_WARNING_OBJECT (audioresample,
        "audioresample, you memory corrupting bastard. "
        "you gave me outsize %d while my buffer was size %d",
        outsize, GST_BUFFER_SIZE (outbuf));
    return GST_FLOW_ERROR;
  }
  /* catch possibly wrong size differences */
  if (GST_BUFFER_SIZE (outbuf) - outsize > r->sample_size) {
    GST_WARNING_OBJECT (audioresample,
        "audioresample's written outsize %d too far from outbuffer's size %d",
        outsize, GST_BUFFER_SIZE (outbuf));
  }
  GST_BUFFER_SIZE (outbuf) = outsize;

  if (G_UNLIKELY (audioresample->need_discont)) {
    GST_DEBUG_OBJECT (audioresample,
        "marking this buffer with the DISCONT flag");
    GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
    audioresample->need_discont = FALSE;
  }

  GST_LOG_OBJECT (audioresample, "transformed to buffer of %d bytes, ts %"
      GST_TIME_FORMAT ", duration %" GST_TIME_FORMAT ", offset %"
      G_GINT64_FORMAT ", offset_end %" G_GINT64_FORMAT,
      outsize, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)),
      GST_TIME_ARGS (GST_BUFFER_DURATION (outbuf)),
      GST_BUFFER_OFFSET (outbuf), GST_BUFFER_OFFSET_END (outbuf));


  return GST_FLOW_OK;
}