示例#1
0
static void
gst_dasf_src_init (GstDasfSrc* self, GstDasfSrcClass* klass)
{
	GST_DEBUG ("");

	GstBaseAudioSrc *baseaudiosrc;
	baseaudiosrc = GST_BASE_AUDIO_SRC (self);

	GstDasfSrcPrivate* priv = GST_DASF_SRC_GET_PRIVATE (self);
	priv->num_output_buffers = NUM_OUTPUT_BUFFERS_DEFAULT;
	priv->incount = 0;
	priv->outcount = 0;
	priv->volume = 100;
	priv->mute = FALSE;

	baseaudiosrc->clock = gst_audio_clock_new ("GstDasfSrcClock", (GstAudioClockGetTimeFunc) gst_dasf_src_get_time, self);

	self->component = NULL;

	self->tracks = NULL;

	GstMixerTrack* track = g_object_new (GST_TYPE_MIXER_TRACK, NULL);
	track->label = g_strdup ("DASF Volume");
	track->num_channels = 1;
	track->min_volume = 0;
	track->max_volume = 100;
	track->flags = GST_MIXER_TRACK_MASTER | GST_MIXER_TRACK_MUTE;
	self->tracks = g_slist_append (self->tracks, track);

	return;
}
static void
gst_base_audio_src_set_property (GObject * object, guint prop_id,
    const GValue * value, GParamSpec * pspec)
{
  GstBaseAudioSrc *src;

  src = GST_BASE_AUDIO_SRC (object);

  switch (prop_id) {
    case PROP_BUFFER_TIME:
      src->buffer_time = g_value_get_int64 (value);
      break;
    case PROP_LATENCY_TIME:
      src->latency_time = g_value_get_int64 (value);
      break;
    case PROP_PROVIDE_CLOCK:
      gst_base_audio_src_set_provide_clock (src, g_value_get_boolean (value));
      break;
    case PROP_SLAVE_METHOD:
      gst_base_audio_src_set_slave_method (src, g_value_get_enum (value));
      break;
    default:
      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
      break;
  }
}
示例#3
0
static gboolean
gst_base_audio_src_event (GstBaseSrc * bsrc, GstEvent * event)
{
  GstBaseAudioSrc *src = GST_BASE_AUDIO_SRC (bsrc);
  gboolean res;

  res = TRUE;

  switch (GST_EVENT_TYPE (event)) {
    case GST_EVENT_FLUSH_START:
      GST_DEBUG_OBJECT (bsrc, "flush-start");
      gst_ring_buffer_pause (src->ringbuffer);
      gst_ring_buffer_clear_all (src->ringbuffer);
      break;
    case GST_EVENT_FLUSH_STOP:
      GST_DEBUG_OBJECT (bsrc, "flush-stop");
      /* always resync on sample after a flush */
      src->next_sample = -1;
      gst_ring_buffer_clear_all (src->ringbuffer);
      break;
    case GST_EVENT_SEEK:
      GST_DEBUG_OBJECT (bsrc, "refuse to seek");
      res = FALSE;
      break;
    default:
      GST_DEBUG_OBJECT (bsrc, "dropping event %p", event);
      break;
  }
  return res;
}
示例#4
0
static void
gst_pulsesrc_init (GstPulseSrc * pulsesrc, GstPulseSrcClass * klass)
{
  pulsesrc->server = NULL;
  pulsesrc->device = NULL;
  pulsesrc->device_description = NULL;

  pulsesrc->context = NULL;
  pulsesrc->stream = NULL;

  pulsesrc->read_buffer = NULL;
  pulsesrc->read_buffer_length = 0;

#ifdef HAVE_PULSE_0_9_13
  pa_sample_spec_init (&pulsesrc->sample_spec);
#else
  pulsesrc->sample_spec.format = PA_SAMPLE_INVALID;
  pulsesrc->sample_spec.rate = 0;
  pulsesrc->sample_spec.channels = 0;
#endif

  pulsesrc->operation_success = FALSE;
  pulsesrc->paused = FALSE;
  pulsesrc->in_read = FALSE;

  pulsesrc->mixer = NULL;

  pulsesrc->probe = gst_pulseprobe_new (G_OBJECT (pulsesrc), G_OBJECT_GET_CLASS (pulsesrc), PROP_DEVICE, pulsesrc->server, FALSE, TRUE);        /* FALSE for sinks, TRUE for sources */

  /* this should be the default but it isn't yet */
  gst_base_audio_src_set_slave_method (GST_BASE_AUDIO_SRC (pulsesrc),
      GST_BASE_AUDIO_SRC_SLAVE_SKEW);
}
static gboolean
gst_base_audio_src_setcaps (GstBaseSrc * bsrc, GstCaps * caps)
{
  GstBaseAudioSrc *src = GST_BASE_AUDIO_SRC (bsrc);
  GstRingBufferSpec *spec;

  spec = &src->ringbuffer->spec;

  spec->buffer_time = src->buffer_time;
  spec->latency_time = src->latency_time;

  if (!gst_ring_buffer_parse_caps (spec, caps))
    goto parse_error;

  /* calculate suggested segsize and segtotal */
  spec->segsize =
      spec->rate * spec->bytes_per_sample * spec->latency_time / GST_MSECOND;
  spec->segtotal = spec->buffer_time / spec->latency_time;

  GST_DEBUG ("release old ringbuffer");

  gst_ring_buffer_release (src->ringbuffer);

  gst_ring_buffer_debug_spec_buff (spec);

  GST_DEBUG ("acquire new ringbuffer");

  if (!gst_ring_buffer_acquire (src->ringbuffer, spec))
    goto acquire_error;

  /* calculate actual latency and buffer times */
  spec->latency_time =
      spec->segsize * GST_MSECOND / (spec->rate * spec->bytes_per_sample);
  spec->buffer_time =
      spec->segtotal * spec->segsize * GST_MSECOND / (spec->rate *
      spec->bytes_per_sample);

  gst_ring_buffer_debug_spec_buff (spec);

  g_object_notify (G_OBJECT (src), "actual-buffer-time");
  g_object_notify (G_OBJECT (src), "actual-latency-time");

  return TRUE;

  /* ERRORS */
parse_error:
  {
    GST_DEBUG ("could not parse caps");
    return FALSE;
  }
acquire_error:
  {
    GST_DEBUG ("could not acquire ringbuffer");
    return FALSE;
  }
}
示例#6
0
static gboolean
gst_base_audio_src_query (GstBaseSrc * bsrc, GstQuery * query)
{
  GstBaseAudioSrc *src = GST_BASE_AUDIO_SRC (bsrc);
  gboolean res = FALSE;

  switch (GST_QUERY_TYPE (query)) {
    case GST_QUERY_LATENCY:
    {
      GstClockTime min_latency, max_latency;
      GstRingBufferSpec *spec;

      GST_OBJECT_LOCK (src);
      if (G_UNLIKELY (src->ringbuffer == NULL
              || src->ringbuffer->spec.rate == 0)) {
        GST_OBJECT_UNLOCK (src);
        goto done;
      }

      spec = &src->ringbuffer->spec;

      /* we have at least 1 segment of latency */
      min_latency =
          gst_util_uint64_scale_int (spec->segsize, GST_SECOND,
          spec->rate * spec->bytes_per_sample);
      /* we cannot delay more than the buffersize else we lose data */
      max_latency =
          gst_util_uint64_scale_int (spec->segtotal * spec->segsize, GST_SECOND,
          spec->rate * spec->bytes_per_sample);
      GST_OBJECT_UNLOCK (src);

      GST_DEBUG_OBJECT (src,
          "report latency min %" GST_TIME_FORMAT " max %" GST_TIME_FORMAT,
          GST_TIME_ARGS (min_latency), GST_TIME_ARGS (max_latency));

      /* we are always live, the min latency is 1 segment and the max latency is
       * the complete buffer of segments. */
      gst_query_set_latency (query, TRUE, min_latency, max_latency);

      res = TRUE;
      break;
    }
    default:
      res = GST_BASE_SRC_CLASS (parent_class)->query (bsrc, query);
      break;
  }
done:
  return res;
}
static void
gst_base_audio_src_get_property (GObject * object, guint prop_id,
    GValue * value, GParamSpec * pspec)
{
  GstBaseAudioSrc *src;

  src = GST_BASE_AUDIO_SRC (object);

  switch (prop_id) {
    case PROP_BUFFER_TIME:
      g_value_set_int64 (value, src->buffer_time);
      break;
    case PROP_LATENCY_TIME:
      g_value_set_int64 (value, src->latency_time);
      break;
    case PROP_ACTUAL_BUFFER_TIME:
      GST_OBJECT_LOCK (src);
      if (src->ringbuffer && src->ringbuffer->acquired)
        g_value_set_int64 (value, src->ringbuffer->spec.buffer_time);
      else
        g_value_set_int64 (value, DEFAULT_ACTUAL_BUFFER_TIME);
      GST_OBJECT_UNLOCK (src);
      break;
    case PROP_ACTUAL_LATENCY_TIME:
      GST_OBJECT_LOCK (src);
      if (src->ringbuffer && src->ringbuffer->acquired)
        g_value_set_int64 (value, src->ringbuffer->spec.latency_time);
      else
        g_value_set_int64 (value, DEFAULT_ACTUAL_LATENCY_TIME);
      GST_OBJECT_UNLOCK (src);
      break;
    case PROP_PROVIDE_CLOCK:
      g_value_set_boolean (value, gst_base_audio_src_get_provide_clock (src));
      break;
    case PROP_SLAVE_METHOD:
      g_value_set_enum (value, gst_base_audio_src_get_slave_method (src));
      break;
    default:
      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
      break;
  }
}
static gboolean
gst_base_audio_src_event (GstBaseSrc * bsrc, GstEvent * event)
{
  GstBaseAudioSrc *src = GST_BASE_AUDIO_SRC (bsrc);

  switch (GST_EVENT_TYPE (event)) {
    case GST_EVENT_FLUSH_START:
      gst_ring_buffer_pause (src->ringbuffer);
      gst_ring_buffer_clear_all (src->ringbuffer);
      break;
    case GST_EVENT_FLUSH_STOP:
      /* always resync on sample after a flush */
      src->next_sample = -1;
      gst_ring_buffer_clear_all (src->ringbuffer);
      break;
    default:
      break;
  }
  return TRUE;
}
static void
gst_base_audio_src_dispose (GObject * object)
{
  GstBaseAudioSrc *src;

  src = GST_BASE_AUDIO_SRC (object);

  GST_OBJECT_LOCK (src);
  if (src->clock)
    gst_object_unref (src->clock);
  src->clock = NULL;

  if (src->ringbuffer) {
    gst_object_unparent (GST_OBJECT_CAST (src->ringbuffer));
    src->ringbuffer = NULL;
  }
  GST_OBJECT_UNLOCK (src);

  G_OBJECT_CLASS (parent_class)->dispose (object);
}
static GstClock *
gst_base_audio_src_provide_clock (GstElement * elem)
{
  GstBaseAudioSrc *src;
  GstClock *clock;

  src = GST_BASE_AUDIO_SRC (elem);

  /* we have no ringbuffer (must be NULL state) */
  if (src->ringbuffer == NULL)
    goto wrong_state;

  if (!gst_ring_buffer_is_acquired (src->ringbuffer))
    goto wrong_state;

  GST_OBJECT_LOCK (src);
  if (!src->priv->provide_clock)
    goto clock_disabled;

  clock = GST_CLOCK_CAST (gst_object_ref (src->clock));
  GST_OBJECT_UNLOCK (src);

  return clock;

  /* ERRORS */
wrong_state:
  {
    GST_DEBUG_OBJECT (src, "ringbuffer not acquired");
    return NULL;
  }
clock_disabled:
  {
    GST_DEBUG_OBJECT (src, "clock provide disabled");
    GST_OBJECT_UNLOCK (src);
    return NULL;
  }
}
示例#11
0
static GstFlowReturn
gst_dasf_src_create (GstAudioSrc *audiosrc,
						guint64 offset,
						guint length,
						GstBuffer **buffer)
{
	GstDasfSrc* self = GST_DASF_SRC (audiosrc);
	GstBaseAudioSrc *baseaudiosrc = GST_BASE_AUDIO_SRC (self);
	GstBuffer* gst_buffer = NULL;
	OMX_BUFFERHEADERTYPE* omx_buffer = NULL;
	GstDasfSrcPrivate* priv = GST_DASF_SRC_GET_PRIVATE (self);
	GstGooAudioFilter* me = self->peer_element;

	GST_DEBUG ("");

	if (me->component->cur_state != OMX_StateExecuting)
	{
		return GST_FLOW_UNEXPECTED;
	}

	GST_DEBUG ("goo stuff");
	{
		omx_buffer = goo_port_grab_buffer (me->outport);

		if (gst_pad_alloc_buffer (GST_BASE_SRC_PAD (self),
					  priv->outcount,
					  omx_buffer->nFilledLen,
					  GST_PAD_CAPS (GST_BASE_SRC_PAD (self)),
					  &gst_buffer) == GST_FLOW_OK)
		{
			if (GST_IS_GOO_BUFFER (gst_buffer))
			{
				memcpy (GST_BUFFER_DATA (gst_buffer),
					omx_buffer->pBuffer,
					omx_buffer->nFilledLen);

				goo_component_release_buffer (me->component,
							      omx_buffer);
			}
			else
 			{
				gst_buffer_unref (gst_buffer);
				gst_buffer = (GstBuffer*) gst_goo_buffer_new ();
				gst_goo_buffer_set_data (gst_buffer,
							 me->component,
							 omx_buffer);
			}
		}
		else
		{
			goto fail;
		}
	}


	GST_DEBUG ("gst stuff");
	{
		GstClock* clock = NULL;
		GstClockTime timestamp, duration;
		clock = gst_element_get_clock (GST_ELEMENT (self));

		timestamp = gst_clock_get_time (clock);
		timestamp -= gst_element_get_base_time (GST_ELEMENT (self));
		gst_object_unref (clock);

		GST_BUFFER_TIMESTAMP (gst_buffer) = gst_util_uint64_scale_int (GST_SECOND, priv->outcount, 50);

		/* Set 20 millisecond duration */
		duration = gst_util_uint64_scale_int
			(GST_SECOND,
			 1,
			 50);

		GST_BUFFER_DURATION (gst_buffer) = duration;
		GST_BUFFER_OFFSET (gst_buffer) = priv->outcount++;
		GST_BUFFER_OFFSET_END (gst_buffer) = priv->outcount;

		gst_buffer_set_caps (gst_buffer,
		GST_PAD_CAPS (GST_BASE_SRC_PAD (self)));
	}

beach:
	*buffer = gst_buffer;
	return GST_FLOW_OK;

fail:
	if (G_LIKELY (*buffer))
	{
		gst_buffer_unref (*buffer);
	}

	return GST_FLOW_ERROR;
}
static GstFlowReturn
gst_base_audio_src_create (GstBaseSrc * bsrc, guint64 offset, guint length,
    GstBuffer ** outbuf)
{
  GstBaseAudioSrc *src = GST_BASE_AUDIO_SRC (bsrc);
  GstBuffer *buf;
  guchar *data;
  guint samples, total_samples;
  guint64 sample;
  gint bps;
  GstRingBuffer *ringbuffer;
  GstRingBufferSpec *spec;
  guint read;
  GstClockTime timestamp, duration;
  GstClock *clock;

  ringbuffer = src->ringbuffer;
  spec = &ringbuffer->spec;

  if (G_UNLIKELY (!gst_ring_buffer_is_acquired (ringbuffer)))
    goto wrong_state;

  bps = spec->bytes_per_sample;

  if ((length == 0 && bsrc->blocksize == 0) || length == -1)
    /* no length given, use the default segment size */
    length = spec->segsize;
  else
    /* make sure we round down to an integral number of samples */
    length -= length % bps;

  /* figure out the offset in the ringbuffer */
  if (G_UNLIKELY (offset != -1)) {
    sample = offset / bps;
    /* if a specific offset was given it must be the next sequential
     * offset we expect or we fail for now. */
    if (src->next_sample != -1 && sample != src->next_sample)
      goto wrong_offset;
  } else {
    /* calculate the sequentially next sample we need to read. This can jump and
     * create a DISCONT. */
    sample = gst_base_audio_src_get_offset (src);
  }

  GST_DEBUG_OBJECT (src, "reading from sample %" G_GUINT64_FORMAT, sample);

  /* get the number of samples to read */
  total_samples = samples = length / bps;

  /* FIXME, using a bufferpool would be nice here */
  buf = gst_buffer_new_and_alloc (length);
  data = GST_BUFFER_DATA (buf);

  do {
    read = gst_ring_buffer_read (ringbuffer, sample, data, samples);
    GST_DEBUG_OBJECT (src, "read %u of %u", read, samples);
    /* if we read all, we're done */
    if (read == samples)
      break;

    /* else something interrupted us and we wait for playing again. */
    GST_DEBUG_OBJECT (src, "wait playing");
    if (gst_base_src_wait_playing (bsrc) != GST_FLOW_OK)
      goto stopped;

    GST_DEBUG_OBJECT (src, "continue playing");

    /* read next samples */
    sample += read;
    samples -= read;
    data += read * bps;
  } while (TRUE);

  /* mark discontinuity if needed */
  if (G_UNLIKELY (sample != src->next_sample) && src->next_sample != -1) {
    GST_WARNING_OBJECT (src,
        "create DISCONT of %" G_GUINT64_FORMAT " samples at sample %"
        G_GUINT64_FORMAT, sample - src->next_sample, sample);
    GST_ELEMENT_WARNING (src, CORE, CLOCK,
        (_("Can't record audio fast enough")),
        ("dropped %" G_GUINT64_FORMAT " samples", sample - src->next_sample));
    GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DISCONT);
  }

  src->next_sample = sample + samples;

  /* get the normal timestamp to get the duration. */
  timestamp = gst_util_uint64_scale_int (sample, GST_SECOND, spec->rate);
  duration = gst_util_uint64_scale_int (src->next_sample, GST_SECOND,
      spec->rate) - timestamp;

  GST_OBJECT_LOCK (src);
  if (!(clock = GST_ELEMENT_CLOCK (src)))
    goto no_sync;

  if (clock != src->clock) {
    /* we are slaved, check how to handle this */
    switch (src->priv->slave_method) {
      case GST_BASE_AUDIO_SRC_SLAVE_RESAMPLE:
        /* not implemented, use skew algorithm. This algorithm should
         * work on the readout pointer and produces more or less samples based
         * on the clock drift */
      case GST_BASE_AUDIO_SRC_SLAVE_SKEW:
      {
        GstClockTime running_time;
        GstClockTime base_time;
        GstClockTime current_time;
        guint64 running_time_sample;
        gint running_time_segment;
        gint current_segment;
        gint segment_skew;
        gint sps;

        /* samples per segment */
        sps = ringbuffer->samples_per_seg;

        /* get the current time */
        current_time = gst_clock_get_time (clock);

        /* get the basetime */
        base_time = GST_ELEMENT_CAST (src)->base_time;

        /* get the running_time */
        running_time = current_time - base_time;

        /* the running_time converted to a sample (relative to the ringbuffer) */
        running_time_sample =
            gst_util_uint64_scale_int (running_time, spec->rate, GST_SECOND);

        /* the segmentnr corrensponding to running_time, round down */
        running_time_segment = running_time_sample / sps;

        /* the segment currently read from the ringbuffer */
        current_segment = sample / sps;

        /* the skew we have between running_time and the ringbuffertime */
        segment_skew = running_time_segment - current_segment;

        GST_DEBUG_OBJECT (bsrc, "\n running_time = %" GST_TIME_FORMAT
            "\n timestamp     = %" GST_TIME_FORMAT
            "\n running_time_segment = %d"
            "\n current_segment      = %d"
            "\n segment_skew         = %d",
            GST_TIME_ARGS (running_time),
            GST_TIME_ARGS (timestamp),
            running_time_segment, current_segment, segment_skew);

        /* Resync the ringbuffer if:
         * 1. We get one segment into the future.
         *    This is clearly a lie, because we can't
         *    possibly have a buffer with timestamp 1 at
         *    time 0. (unless it has time-travelled...)
         *
         * 2. We are more than the length of the ringbuffer behind.
         *    The length of the ringbuffer then gets to dictate
         *    the threshold for what is concidered "too late"
         *
         * 3. If this is our first buffer.
         *    We know that we should catch up to running_time
         *    the first time we are ran.
         */
        if ((segment_skew < 0) ||
            (segment_skew >= ringbuffer->spec.segtotal) ||
            (current_segment == 0)) {
          gint segments_written;
          gint first_segment;
          gint last_segment;
          gint new_last_segment;
          gint segment_diff;
          gint new_first_segment;
          guint64 new_sample;

          /* we are going to say that the last segment was captured at the current time
             (running_time), minus one segment of creation-latency in the ringbuffer.
             This can be thought of as: The segment arrived in the ringbuffer at time X, and
             that means it was created at time X - (one segment). */
          new_last_segment = running_time_segment - 1;

          /* for better readablity */
          first_segment = current_segment;

          /* get the amount of segments written from the device by now */
          segments_written = g_atomic_int_get (&ringbuffer->segdone);

          /* subtract the base to segments_written to get the number of the
             last written segment in the ringbuffer (one segment written = segment 0) */
          last_segment = segments_written - ringbuffer->segbase - 1;

          /* we see how many segments the ringbuffer was timeshifted */
          segment_diff = new_last_segment - last_segment;

          /* we move the first segment an equal amount */
          new_first_segment = first_segment + segment_diff;

          /* and we also move the segmentbase the same amount */
          ringbuffer->segbase -= segment_diff;

          /* we calculate the new sample value */
          new_sample = ((guint64) new_first_segment) * sps;

          /* and get the relative time to this -> our new timestamp */
          timestamp =
              gst_util_uint64_scale_int (new_sample, GST_SECOND, spec->rate);

          /* we update the next sample accordingly */
          src->next_sample = new_sample + samples;

          GST_DEBUG_OBJECT (bsrc,
              "Timeshifted the ringbuffer with %d segments: "
              "Updating the timestamp to %" GST_TIME_FORMAT ", "
              "and src->next_sample to %" G_GUINT64_FORMAT, segment_diff,
              GST_TIME_ARGS (timestamp), src->next_sample);
        }
        break;
      }
      case GST_BASE_AUDIO_SRC_SLAVE_RETIMESTAMP:
      {
        GstClockTime base_time, latency;

        /* We are slaved to another clock, take running time of the pipeline clock and
         * timestamp against it. Somebody else in the pipeline should figure out the
         * clock drift. We keep the duration we calculated above. */
        timestamp = gst_clock_get_time (clock);
        base_time = GST_ELEMENT_CAST (src)->base_time;

        if (timestamp > base_time)
          timestamp -= base_time;
        else
          timestamp = 0;

        /* subtract latency */
        latency =
            gst_util_uint64_scale_int (total_samples, GST_SECOND, spec->rate);
        if (timestamp > latency)
          timestamp -= latency;
        else
          timestamp = 0;
      }
      case GST_BASE_AUDIO_SRC_SLAVE_NONE:
        break;
    }
  } else {
    GstClockTime base_time;

    /* we are not slaved, subtract base_time */
    base_time = GST_ELEMENT_CAST (src)->base_time;

    if (timestamp > base_time)
      timestamp -= base_time;
    else
      timestamp = 0;
  }

no_sync:
  GST_OBJECT_UNLOCK (src);

  GST_BUFFER_TIMESTAMP (buf) = timestamp;
  GST_BUFFER_DURATION (buf) = duration;
  GST_BUFFER_OFFSET (buf) = sample;
  GST_BUFFER_OFFSET_END (buf) = sample + samples;

  *outbuf = buf;

  return GST_FLOW_OK;

  /* ERRORS */
wrong_state:
  {
    GST_DEBUG_OBJECT (src, "ringbuffer in wrong state");
    return GST_FLOW_WRONG_STATE;
  }
wrong_offset:
  {
    GST_ELEMENT_ERROR (src, RESOURCE, SEEK,
        (NULL), ("resource can only be operated on sequentially but offset %"
            G_GUINT64_FORMAT " was given", offset));
    return GST_FLOW_ERROR;
  }
stopped:
  {
    gst_buffer_unref (buf);
    GST_DEBUG_OBJECT (src, "ringbuffer stopped");
    return GST_FLOW_WRONG_STATE;
  }
}
static GstStateChangeReturn
gst_base_audio_src_change_state (GstElement * element,
    GstStateChange transition)
{
  GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
  GstBaseAudioSrc *src = GST_BASE_AUDIO_SRC (element);

  switch (transition) {
    case GST_STATE_CHANGE_NULL_TO_READY:
      GST_DEBUG_OBJECT (src, "NULL->READY");
      GST_OBJECT_LOCK (src);
      if (src->ringbuffer == NULL) {
        gst_audio_clock_reset (GST_AUDIO_CLOCK (src->clock), 0);
        src->ringbuffer = gst_base_audio_src_create_ringbuffer (src);
      }
      GST_OBJECT_UNLOCK (src);
      if (!gst_ring_buffer_open_device (src->ringbuffer))
        goto open_failed;
      break;
    case GST_STATE_CHANGE_READY_TO_PAUSED:
      GST_DEBUG_OBJECT (src, "READY->PAUSED");
      src->next_sample = -1;
      gst_ring_buffer_set_flushing (src->ringbuffer, FALSE);
      gst_ring_buffer_may_start (src->ringbuffer, FALSE);
      break;
    case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
      GST_DEBUG_OBJECT (src, "PAUSED->PLAYING");
      gst_ring_buffer_may_start (src->ringbuffer, TRUE);
      break;
    case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
      GST_DEBUG_OBJECT (src, "PLAYING->PAUSED");
      gst_ring_buffer_may_start (src->ringbuffer, FALSE);
      gst_ring_buffer_pause (src->ringbuffer);
      break;
    case GST_STATE_CHANGE_PAUSED_TO_READY:
      GST_DEBUG_OBJECT (src, "PAUSED->READY");
      gst_ring_buffer_set_flushing (src->ringbuffer, TRUE);
      break;
    default:
      break;
  }

  ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);

  switch (transition) {
    case GST_STATE_CHANGE_PAUSED_TO_READY:
      GST_DEBUG_OBJECT (src, "PAUSED->READY");
      gst_ring_buffer_release (src->ringbuffer);
      break;
    case GST_STATE_CHANGE_READY_TO_NULL:
      GST_DEBUG_OBJECT (src, "READY->NULL");
      gst_ring_buffer_close_device (src->ringbuffer);
      GST_OBJECT_LOCK (src);
      gst_object_unparent (GST_OBJECT_CAST (src->ringbuffer));
      src->ringbuffer = NULL;
      GST_OBJECT_UNLOCK (src);
      break;
    default:
      break;
  }

  return ret;

  /* ERRORS */
open_failed:
  {
    /* subclass must post a meaningfull error message */
    GST_DEBUG_OBJECT (src, "open failed");
    return GST_STATE_CHANGE_FAILURE;
  }

}
示例#14
0
static GstStateChangeReturn
gst_base_audio_src_change_state (GstElement * element,
    GstStateChange transition)
{
  GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
  GstBaseAudioSrc *src = GST_BASE_AUDIO_SRC (element);

  switch (transition) {
    case GST_STATE_CHANGE_NULL_TO_READY:
      GST_DEBUG_OBJECT (src, "NULL->READY");
      GST_OBJECT_LOCK (src);
      if (src->ringbuffer == NULL) {
        gst_audio_clock_reset (GST_AUDIO_CLOCK (src->clock), 0);
        src->ringbuffer = gst_base_audio_src_create_ringbuffer (src);
      }
      GST_OBJECT_UNLOCK (src);
      if (!gst_ring_buffer_open_device (src->ringbuffer))
        goto open_failed;
      break;
    case GST_STATE_CHANGE_READY_TO_PAUSED:
      GST_DEBUG_OBJECT (src, "READY->PAUSED");
      src->next_sample = -1;
      gst_ring_buffer_set_flushing (src->ringbuffer, FALSE);
      gst_ring_buffer_may_start (src->ringbuffer, FALSE);
      /* Only post clock-provide messages if this is the clock that
       * we've created. If the subclass has overriden it the subclass
       * should post this messages whenever necessary */
      if (src->clock && GST_IS_AUDIO_CLOCK (src->clock) &&
          GST_AUDIO_CLOCK_CAST (src->clock)->func ==
          (GstAudioClockGetTimeFunc) gst_base_audio_src_get_time)
        gst_element_post_message (element,
            gst_message_new_clock_provide (GST_OBJECT_CAST (element),
                src->clock, TRUE));
      break;
    case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
      GST_DEBUG_OBJECT (src, "PAUSED->PLAYING");
      gst_ring_buffer_may_start (src->ringbuffer, TRUE);
      break;
    case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
      GST_DEBUG_OBJECT (src, "PLAYING->PAUSED");
      gst_ring_buffer_may_start (src->ringbuffer, FALSE);
      gst_ring_buffer_pause (src->ringbuffer);
      break;
    case GST_STATE_CHANGE_PAUSED_TO_READY:
      GST_DEBUG_OBJECT (src, "PAUSED->READY");
      /* Only post clock-lost messages if this is the clock that
       * we've created. If the subclass has overriden it the subclass
       * should post this messages whenever necessary */
      if (src->clock && GST_IS_AUDIO_CLOCK (src->clock) &&
          GST_AUDIO_CLOCK_CAST (src->clock)->func ==
          (GstAudioClockGetTimeFunc) gst_base_audio_src_get_time)
        gst_element_post_message (element,
            gst_message_new_clock_lost (GST_OBJECT_CAST (element), src->clock));
      gst_ring_buffer_set_flushing (src->ringbuffer, TRUE);
      break;
    default:
      break;
  }

  ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);

  switch (transition) {
    case GST_STATE_CHANGE_PAUSED_TO_READY:
      GST_DEBUG_OBJECT (src, "PAUSED->READY");
      gst_ring_buffer_release (src->ringbuffer);
      break;
    case GST_STATE_CHANGE_READY_TO_NULL:
      GST_DEBUG_OBJECT (src, "READY->NULL");
      gst_ring_buffer_close_device (src->ringbuffer);
      GST_OBJECT_LOCK (src);
      gst_object_unparent (GST_OBJECT_CAST (src->ringbuffer));
      src->ringbuffer = NULL;
      GST_OBJECT_UNLOCK (src);
      break;
    default:
      break;
  }

  return ret;

  /* ERRORS */
open_failed:
  {
    /* subclass must post a meaningfull error message */
    GST_DEBUG_OBJECT (src, "open failed");
    return GST_STATE_CHANGE_FAILURE;
  }

}