Esempio n. 1
0
static void
print_clocking_info (GstElement * element)
{
  if (!gst_element_requires_clock (element) &&
      !(gst_element_provides_clock (element) &&
          gst_element_get_clock (element))) {
    n_print ("\n");
    n_print ("Element has no clocking capabilities.");
    return;
  }

  n_print ("\n");
  n_print ("Clocking Interaction:\n");
  if (gst_element_requires_clock (element)) {
    n_print ("  element requires a clock\n");
  }

  if (gst_element_provides_clock (element)) {
    GstClock *clock;

    clock = gst_element_get_clock (element);
    if (clock)
      n_print ("  element provides a clock: %s\n", GST_OBJECT_NAME (clock));
    else
      n_print ("  element is supposed to provide a clock but returned NULL\n");
  }
}
Esempio n. 2
0
static void
gst_dtmf_prepare_timestamps (GstDTMFSrc * dtmfsrc)
{
  GstClockTime last_stop;
  GstClockTime timestamp;

  GST_OBJECT_LOCK (dtmfsrc);
  last_stop = dtmfsrc->last_stop;
  GST_OBJECT_UNLOCK (dtmfsrc);

  if (GST_CLOCK_TIME_IS_VALID (last_stop)) {
    timestamp = last_stop;
  } else {
    GstClock *clock;

    /* If there is no valid start time, lets use now as the start time */

    clock = gst_element_get_clock (GST_ELEMENT (dtmfsrc));
    if (clock != NULL) {
      timestamp = gst_clock_get_time (clock)
          - gst_element_get_base_time (GST_ELEMENT (dtmfsrc));
      gst_object_unref (clock);
    } else {
      gchar *dtmf_name = gst_element_get_name (dtmfsrc);
      GST_ERROR_OBJECT (dtmfsrc, "No clock set for element %s", dtmf_name);
      dtmfsrc->timestamp = GST_CLOCK_TIME_NONE;
      g_free (dtmf_name);
      return;
    }
  }

  /* Make sure the timestamp always goes forward */
  if (timestamp > dtmfsrc->timestamp)
    dtmfsrc->timestamp = timestamp;
}
Esempio n. 3
0
MbEvent *
handle_state_change_message (GstMessage *message)
{
  MbEvent *mb_event = NULL;
  GstState old_state, new_state;
  GstElement *source = (GstElement *) message->src;
  gst_message_parse_state_changed (message, &old_state, &new_state, NULL);
  if (new_state == GST_STATE_PLAYING)
  {
    if (source == _mb_global_data.pipeline)
    {
      g_mutex_lock (&(_mb_global_data.mutex));
      if (_mb_global_data.initialized == FALSE)
      {
        if (_mb_global_data.clock_provider == NULL)
          _mb_global_data.clock_provider = 
            gst_element_get_clock(_mb_global_data.pipeline);

        _mb_global_data.initialized = TRUE;

        mb_event = create_app_event (MB_APP_INIT_DONE);
      }
      g_mutex_unlock (&(_mb_global_data.mutex));
    }
    else
    {
      if (strcmp (G_OBJECT_TYPE_NAME(G_OBJECT (source)), "GstBin") == 0)
        mb_event = create_state_change_event (MB_BEGIN,
            GST_ELEMENT_NAME (source));
    }
  }

  return mb_event;
}
Esempio n. 4
0
static void
gst_rtp_dtmf_prepare_timestamps (GstRTPDTMFSrc * dtmfsrc)
{
  GstClock *clock;
  GstClockTime base_time;

#ifdef MAEMO_BROKEN
  base_time = 0;
#else
  base_time = gst_element_get_base_time (GST_ELEMENT (dtmfsrc));
#endif

  clock = gst_element_get_clock (GST_ELEMENT (dtmfsrc));
  if (clock != NULL) {
    dtmfsrc->timestamp = gst_clock_get_time (clock)
        + (MIN_INTER_DIGIT_INTERVAL * GST_MSECOND) - base_time;
    dtmfsrc->start_timestamp = dtmfsrc->timestamp;
    gst_object_unref (clock);
  } else {
    gchar *dtmf_name = gst_element_get_name (dtmfsrc);
    GST_ERROR_OBJECT (dtmfsrc, "No clock set for element %s", dtmf_name);
    dtmfsrc->timestamp = GST_CLOCK_TIME_NONE;
    g_free (dtmf_name);
  }

  dtmfsrc->rtp_timestamp = dtmfsrc->ts_base +
      gst_util_uint64_scale_int (gst_segment_to_running_time (&GST_BASE_SRC
          (dtmfsrc)->segment, GST_FORMAT_TIME, dtmfsrc->timestamp),
      dtmfsrc->clock_rate, GST_SECOND);
}
Esempio n. 5
0
/* this is the first function where pipeline is built */
void 
setup_streamer(int narg, char *sarg[])
{
    /* the message bus */
    GstBus *bus;

    /* this init the global sources structures */
    init_global_sources();
    init_global_outputs();

    pipeline = gst_pipeline_new("pipeline");
    bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
    gst_bus_add_watch (bus, cb_papaya_pipeline_bus, loop);
    gst_object_unref (bus);

    switch (STREAM_TYPE){
        case AUDIO_PLAYLIST:
        case AUDIO_LIVE:
            papaya_audio_tree_build();
            break;
        case VIDEO_PLAYLIST:
        case VIDEO_LIVE:
        case XORG_LIVE:
        case DVB_LIVE:
        case DV1394_LIVE:
            papaya_video_tree_build();
            break;
    }
    /* after having built the chain, let's
     * play some media */ 
    gst_element_set_state (pipeline, GST_STATE_PLAYING);

    pipeclock = gst_element_get_clock(pipeline);
}
Esempio n. 6
0
static gboolean
gst_rtp_dtmf_prepare_timestamps (GstRTPDTMFSrc * dtmfsrc)
{
  GstClockTime last_stop;

  GST_OBJECT_LOCK (dtmfsrc);
  last_stop = dtmfsrc->last_stop;
  GST_OBJECT_UNLOCK (dtmfsrc);

  if (GST_CLOCK_TIME_IS_VALID (last_stop)) {
    dtmfsrc->start_timestamp = last_stop;
  } else {
    GstClock *clock = gst_element_get_clock (GST_ELEMENT (dtmfsrc));

    if (clock == NULL)
      return FALSE;

    dtmfsrc->start_timestamp = gst_clock_get_time (clock)
        - gst_element_get_base_time (GST_ELEMENT (dtmfsrc));
    gst_object_unref (clock);
  }

  /* If the last stop was in the past, then lets add the buffers together */
  if (dtmfsrc->start_timestamp < dtmfsrc->timestamp)
    dtmfsrc->start_timestamp = dtmfsrc->timestamp;

  dtmfsrc->timestamp = dtmfsrc->start_timestamp;

  dtmfsrc->rtp_timestamp = dtmfsrc->ts_base +
      gst_util_uint64_scale_int (gst_segment_to_running_time (&GST_BASE_SRC
          (dtmfsrc)->segment, GST_FORMAT_TIME, dtmfsrc->timestamp),
      dtmfsrc->clock_rate, GST_SECOND);

  return TRUE;
}
Esempio n. 7
0
static void
print_clocking_info (GstElement * element)
{
  gboolean requires_clock, provides_clock;

  requires_clock =
      GST_OBJECT_FLAG_IS_SET (element, GST_ELEMENT_FLAG_REQUIRE_CLOCK);
  provides_clock =
      GST_OBJECT_FLAG_IS_SET (element, GST_ELEMENT_FLAG_PROVIDE_CLOCK);

  if (!requires_clock && !provides_clock) {
    n_print ("\n");
    n_print ("Element has no clocking capabilities.\n");
    return;
  }

  n_print ("\n");
  n_print ("Clocking Interaction:\n");
  if (requires_clock) {
    n_print ("  element requires a clock\n");
  }

  if (provides_clock) {
    GstClock *clock;

    clock = gst_element_get_clock (element);
    if (clock) {
      n_print ("  element provides a clock: %s\n", GST_OBJECT_NAME (clock));
      gst_object_unref (clock);
    } else
      n_print ("  element is supposed to provide a clock but returned NULL\n");
  }
}
static gboolean
gst_dshowvideosrc_push_buffer (guint8 * buffer, guint size, gpointer src_object,
    GstClockTime duration)
{
  GstDshowVideoSrc *src = GST_DSHOWVIDEOSRC (src_object);
  GstBuffer *buf = NULL;
  IPin *pPin = NULL;
  HRESULT hres = S_FALSE;
  AM_MEDIA_TYPE *pMediaType = NULL;

  if (!buffer || size == 0 || !src) {
    return FALSE;
  }

  /* create a new buffer assign to it the clock time as timestamp */
  buf = gst_buffer_new_and_alloc (size);

  GST_BUFFER_SIZE (buf) = size;

  GstClock *clock = gst_element_get_clock (GST_ELEMENT (src));
  GST_BUFFER_TIMESTAMP (buf) =
    GST_CLOCK_DIFF (gst_element_get_base_time (GST_ELEMENT (src)), gst_clock_get_time (clock));
  gst_object_unref (clock);

  GST_BUFFER_DURATION (buf) = duration;

  if (src->is_rgb) {
    /* FOR RGB directshow decoder will return bottom-up BITMAP
     * There is probably a way to get top-bottom video frames from
     * the decoder...
     */
    gint line = 0;
    gint stride = size / src->height;

    for (; line < src->height; line++) {
      memcpy (GST_BUFFER_DATA (buf) + (line * stride),
          buffer + (size - ((line + 1) * (stride))), stride);
    }
  } else {
    memcpy (GST_BUFFER_DATA (buf), buffer, size);
  }

  GST_DEBUG ("push_buffer => pts %" GST_TIME_FORMAT "duration %"
      GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
      GST_TIME_ARGS (duration));

  /* the negotiate() method already set caps on the source pad */
  gst_buffer_set_caps (buf, GST_PAD_CAPS (GST_BASE_SRC_PAD (src)));

  g_mutex_lock (src->buffer_mutex);
  if (src->buffer != NULL)
    gst_buffer_unref (src->buffer);
  src->buffer = buf;
  g_cond_signal (src->buffer_cond);
  g_mutex_unlock (src->buffer_mutex);

  return TRUE;
}
Esempio n. 9
0
/* Method: clock
 * Returns: the clock of the element (as a Gst::Clock object), or nil
 * if this element does not provide a clock.
 */
static VALUE
rg_clock(VALUE self)
{
    GstClock *clock;

    clock = gst_element_get_clock(SELF(self));
    return clock != NULL ? RGST_CLOCK_NEW(clock)
        : Qnil;
}
static guint64 get_gst_time_us(GstScreamQueue *self)
{
    GstClock *clock = NULL;
    GstClockTime time = 0;

    clock = gst_element_get_clock(GST_ELEMENT(self));
    if (G_LIKELY(clock)) {
        time = gst_clock_get_time(clock);
    }
    return time / 1000;
}
static GstStateChangeReturn
gst_decklink_video_sink_stop_scheduled_playback (GstDecklinkVideoSink * self)
{
  GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
  GstClockTime start_time;
  HRESULT res;
  GstClock *clock;

  if (!self->output->started)
    return ret;

  clock = gst_element_get_clock (GST_ELEMENT_CAST (self));
  if (clock) {
    // FIXME: start time is the same for the complete pipeline,
    // but what we need here is the start time of this element!
    start_time = gst_element_get_base_time (GST_ELEMENT (self));
    if (start_time != GST_CLOCK_TIME_NONE)
      start_time = gst_clock_get_time (clock) - start_time;

    // FIXME: This will probably not work
    if (start_time == GST_CLOCK_TIME_NONE)
      start_time = 0;

    convert_to_internal_clock (self, &start_time, NULL);

    // The start time is now the running time when we stopped
    // playback

    gst_object_unref (clock);
  } else {
    GST_WARNING_OBJECT (self,
        "No clock, stopping scheduled playback immediately");
    start_time = 0;
  }

  GST_DEBUG_OBJECT (self,
      "Stopping scheduled playback at %" GST_TIME_FORMAT,
      GST_TIME_ARGS (start_time));

  g_mutex_lock (&self->output->lock);
  self->output->started = FALSE;
  g_mutex_unlock (&self->output->lock);
  res = self->output->output->StopScheduledPlayback (start_time, 0, GST_SECOND);
  if (res != S_OK) {
    GST_ELEMENT_ERROR (self, STREAM, FAILED,
        (NULL), ("Failed to stop scheduled playback: 0x%08x", res));
    ret = GST_STATE_CHANGE_FAILURE;
  }
  self->internal_base_time = GST_CLOCK_TIME_NONE;
  self->external_base_time = GST_CLOCK_TIME_NONE;

  return ret;
}
static void
gst_wrapper_camera_bin_reset_video_src_caps (GstWrapperCameraBinSrc * self,
    GstCaps * caps)
{
  GstClock *clock;
  gint64 base_time;

  GST_DEBUG_OBJECT (self, "Resetting src caps to %" GST_PTR_FORMAT, caps);
  if (self->src_vid_src) {
    clock = gst_element_get_clock (self->src_vid_src);
    base_time = gst_element_get_base_time (self->src_vid_src);

    gst_element_set_state (self->src_vid_src, GST_STATE_READY);
    set_capsfilter_caps (self, caps);

    self->drop_newseg = TRUE;

    GST_DEBUG_OBJECT (self, "Bringing source up");
    gst_element_sync_state_with_parent (self->src_vid_src);

    if (clock) {
      gst_element_set_clock (self->src_vid_src, clock);
      gst_element_set_base_time (self->src_vid_src, base_time);

      if (GST_IS_BIN (self->src_vid_src)) {
        GstIterator *it =
            gst_bin_iterate_elements (GST_BIN (self->src_vid_src));
        gpointer item = NULL;
        gboolean done = FALSE;
        while (!done) {
          switch (gst_iterator_next (it, &item)) {
            case GST_ITERATOR_OK:
              gst_element_set_base_time (GST_ELEMENT (item), base_time);
              gst_object_unref (item);
              break;
            case GST_ITERATOR_RESYNC:
              gst_iterator_resync (it);
              break;
            case GST_ITERATOR_ERROR:
              done = TRUE;
              break;
            case GST_ITERATOR_DONE:
              done = TRUE;
              break;
          }
        }
        gst_iterator_free (it);
      }

      gst_object_unref (clock);
    }
  }
}
static GstStateChangeReturn
gst_decklink_video_src_change_state (GstElement * element,
    GstStateChange transition)
{
  GstDecklinkVideoSrc *self = GST_DECKLINK_VIDEO_SRC_CAST (element);
  GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;

  switch (transition) {
    case GST_STATE_CHANGE_NULL_TO_READY:
      if (!gst_decklink_video_src_open (self)) {
        ret = GST_STATE_CHANGE_FAILURE;
        goto out;
      }
      if (self->mode == GST_DECKLINK_MODE_AUTO &&
          self->video_format != GST_DECKLINK_VIDEO_FORMAT_AUTO) {
        GST_WARNING_OBJECT (self, "Warning: mode=auto and format!=auto may \
                            not work");
      }
      break;
    case GST_STATE_CHANGE_READY_TO_PAUSED:
      g_mutex_lock (&self->input->lock);
      self->input->clock_start_time = GST_CLOCK_TIME_NONE;
      self->input->clock_epoch += self->input->clock_last_time;
      self->input->clock_last_time = 0;
      self->input->clock_offset = 0;
      g_mutex_unlock (&self->input->lock);
      gst_element_post_message (element,
          gst_message_new_clock_provide (GST_OBJECT_CAST (element),
              self->input->clock, TRUE));
      self->flushing = FALSE;
      break;
    case GST_STATE_CHANGE_PAUSED_TO_PLAYING:{
      GstClock *clock;

      clock = gst_element_get_clock (GST_ELEMENT_CAST (self));
      if (clock) {
        if (clock != self->input->clock) {
          gst_clock_set_master (self->input->clock, clock);
        }

        gst_object_unref (clock);
      } else {
        GST_ELEMENT_ERROR (self, STREAM, FAILED,
            (NULL), ("Need a clock to go to PLAYING"));
        ret = GST_STATE_CHANGE_FAILURE;
      }

      break;
    }
    default:
      break;
  }
static void
gst_decklink_video_src_start_streams (GstElement * element)
{
  GstDecklinkVideoSrc *self = GST_DECKLINK_VIDEO_SRC_CAST (element);
  HRESULT res;

  if (self->input->video_enabled && (!self->input->audiosrc
          || self->input->audio_enabled)
      && (GST_STATE (self) == GST_STATE_PLAYING
          || GST_STATE_PENDING (self) == GST_STATE_PLAYING)) {
    GstClock *clock;

    clock = gst_element_get_clock (element);
    if (!clock) {
      GST_ELEMENT_ERROR (self, STREAM, FAILED, (NULL),
          ("Streams supposed to start but we have no clock"));
      return;
    }

    GST_DEBUG_OBJECT (self, "Starting streams");

    res = self->input->input->StartStreams ();
    if (res != S_OK) {
      GST_ELEMENT_ERROR (self, STREAM, FAILED,
          (NULL), ("Failed to start streams: 0x%08x", res));
      gst_object_unref (clock);
      return;
    }

    self->input->started = TRUE;
    self->input->clock_restart = TRUE;

    // Need to unlock to get the clock time
    g_mutex_unlock (&self->input->lock);

    // Current times of internal and external clock when we go to
    // playing. We need this to convert the pipeline running time
    // to the running time of the hardware
    //
    // We can't use the normal base time for the external clock
    // because we might go to PLAYING later than the pipeline
    self->internal_base_time = gst_clock_get_internal_time (self->input->clock);
    self->external_base_time = gst_clock_get_internal_time (clock);

    gst_object_unref (clock);
    g_mutex_lock (&self->input->lock);
  } else {
    GST_DEBUG_OBJECT (self, "Not starting streams yet");
  }
}
Esempio n. 15
0
/**
 * @brief Handle buffer data.
 * @return None
 * @param self pointer to GstTensorSink2
 * @param buffer pointer to GstBuffer to be handled
 */
static void
gst_tensor_sink2_render_buffer (GstTensorSink2 * self, GstBuffer * buffer)
{
  GstClockTime now = GST_CLOCK_TIME_NONE;
  guint signal_rate;
  gboolean notify = FALSE;

  g_return_if_fail (GST_IS_TENSOR_SINK2 (self));

  signal_rate = gst_tensor_sink2_get_signal_rate (self);

  if (signal_rate) {
    GstClock *clock;
    GstClockTime render_time;
    GstClockTime last_render_time;

    clock = gst_element_get_clock (GST_ELEMENT (self));

    if (clock) {
      now = gst_clock_get_time (clock);
      last_render_time = gst_tensor_sink2_get_last_render_time (self);

      /** time for next signal */
      render_time = (1000 / signal_rate) * GST_MSECOND + last_render_time;

      if (!GST_CLOCK_TIME_IS_VALID (last_render_time) ||
          GST_CLOCK_DIFF (now, render_time) <= 0) {
        /** send data after render time, or firstly received buffer */
        notify = TRUE;
      }

      gst_object_unref (clock);
    }
  } else {
    /** send data if signal rate is 0 */
    notify = TRUE;
  }

  if (notify) {
    gst_tensor_sink2_set_last_render_time (self, now);

    if (gst_tensor_sink2_get_emit_signal (self)) {
      silent_debug ("Emit signal for new data [%" GST_TIME_FORMAT "] rate [%d]",
          GST_TIME_ARGS (now), signal_rate);

      g_signal_emit (self, _tensor_sink2_signals[SIGNAL_NEW_DATA], 0, buffer);
    }
  }
}
Esempio n. 16
0
int
main (int argc, char *argv[])
{
  GstBus *bus;

  gst_init (&argc, &argv);

  loop = g_main_loop_new (NULL, TRUE);

  pipeline = gst_pipeline_new ("pipeline");

  /* setup message handling */
  bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
  gst_bus_add_signal_watch_full (bus, G_PRIORITY_HIGH);
  g_signal_connect (bus, "message::error", (GCallback) message_received,
      pipeline);
  g_signal_connect (bus, "message::warning", (GCallback) message_received,
      pipeline);
  g_signal_connect (bus, "message::eos", (GCallback) eos_message_received,
      pipeline);

  /* we set the pipeline to PLAYING, this will distribute a default clock and
   * start running. no preroll is needed */
  gst_element_set_state (pipeline, GST_STATE_PLAYING);

  /* get the clock now. Since we never set the pipeline to PAUSED again, the
   * clock will not change, even when we add new clock providers later.  */
  theclock = gst_element_get_clock (pipeline);

  /* start our actions while we are in the mainloop so that we can catch errors
   * and other messages. */
  g_idle_add ((GSourceFunc) perform_step, GINT_TO_POINTER (0));
  /* go to main loop */
  g_main_loop_run (loop);

  gst_element_set_state (pipeline, GST_STATE_NULL);

  gst_object_unref (bus);
  gst_object_unref (pipeline);
  gst_object_unref (theclock);

  return 0;
}
Esempio n. 17
0
/**
 * @brief Push GstBuffer
 */
static void
gst_tensor_reposink_render_buffer (GstTensorRepoSink * self, GstBuffer * buffer)
{
  GstClockTime now = GST_CLOCK_TIME_NONE;
  guint signal_rate;
  gboolean notify = FALSE;
  g_return_if_fail (GST_IS_TENSOR_REPOSINK (self));

  signal_rate = self->signal_rate;

  if (signal_rate) {
    GstClock *clock;
    GstClockTime render_time;

    clock = gst_element_get_clock (GST_ELEMENT (self));

    if (clock) {
      now = gst_clock_get_time (clock);
      render_time = (1000 / signal_rate) * GST_MSECOND + self->last_render_time;

      if (!GST_CLOCK_TIME_IS_VALID (self->last_render_time) ||
          GST_CLOCK_DIFF (now, render_time) <= 0) {
        notify = TRUE;
      }

      gst_object_unref (clock);
    }
  } else {
    notify = TRUE;
  }

  if (notify) {
    self->last_render_time = now;

    if (!gst_tensor_repo_set_buffer (self->myid, self->o_myid, buffer,
            self->in_caps)) {
      GST_ELEMENT_ERROR (self, RESOURCE, WRITE,
          ("Cannot Set buffer into repo [key: %d]", self->myid), NULL);
    }
  }
}
Esempio n. 18
0
static GstStateChangeReturn
gst_alsasrc_change_state (GstElement * element, GstStateChange transition)
{
  GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
  GstAlsaSrc *alsa = GST_ALSA_SRC (element);
  GstClock *clk;

  switch (transition) {
      /* show the compiler that we care */
    case GST_STATE_CHANGE_NULL_TO_READY:
    case GST_STATE_CHANGE_READY_TO_PAUSED:
    case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
    case GST_STATE_CHANGE_PAUSED_TO_READY:
    case GST_STATE_CHANGE_READY_TO_NULL:
    case GST_STATE_CHANGE_NULL_TO_NULL:
    case GST_STATE_CHANGE_READY_TO_READY:
    case GST_STATE_CHANGE_PAUSED_TO_PAUSED:
    case GST_STATE_CHANGE_PLAYING_TO_PLAYING:
      break;
    case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
      alsa->driver_timestamps = FALSE;

      clk = gst_element_get_clock (element);
      if (clk != NULL) {
        if (GST_IS_SYSTEM_CLOCK (clk)) {
          gint clocktype;
          g_object_get (clk, "clock-type", &clocktype, NULL);
          if (clocktype == GST_CLOCK_TYPE_MONOTONIC) {
            GST_INFO ("Using driver timestamps !");
            alsa->driver_timestamps = TRUE;
          }
        }

        gst_object_unref (clk);
      }
      break;
  }
  ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);

  return ret;
}
Esempio n. 19
0
static gboolean
check_last_request_time (GstPad * pad)
{
  GstClockTime *last, now;
  GstClock *clock;
  GstElement *element = gst_pad_get_parent_element (pad);
  gboolean ret = FALSE;

  if (element == NULL) {
    GST_ERROR_OBJECT (pad, "Cannot get parent object to get clock");
    return TRUE;
  }

  clock = gst_element_get_clock (element);
  now = gst_clock_get_time (clock);
  g_object_unref (clock);
  g_object_unref (element);

  GST_OBJECT_LOCK (pad);

  last =
      g_object_get_qdata (G_OBJECT (pad), last_key_frame_request_time_quark ());

  if (last == NULL) {
    last = g_slice_new (GstClockTime);
    g_object_set_qdata_full (G_OBJECT (pad),
        last_key_frame_request_time_quark (), last,
        (GDestroyNotify) kms_utils_destroy_GstClockTime);

    *last = now;
    ret = TRUE;
  } else if (((*last) + DEFAULT_KEYFRAME_DISPERSION) < now) {
    ret = TRUE;
    *last = now;
  }

  GST_OBJECT_UNLOCK (pad);

  return ret;
}
Esempio n. 20
0
static void
gst_dtmf_prepare_timestamps (GstDTMFSrc * dtmfsrc)
{
  GstClock *clock;
  GstClockTime base_time;

  base_time = gst_element_get_base_time (GST_ELEMENT (dtmfsrc));

  clock = gst_element_get_clock (GST_ELEMENT (dtmfsrc));
  if (clock != NULL) {
#ifdef MAEMO_BROKEN
    dtmfsrc->timestamp = gst_clock_get_time (clock);
#else
    dtmfsrc->timestamp = gst_clock_get_time (clock) - base_time;
#endif
    gst_object_unref (clock);
  } else {
    gchar *dtmf_name = gst_element_get_name (dtmfsrc);
    GST_ERROR_OBJECT (dtmfsrc, "No clock set for element %s", dtmf_name);
    dtmfsrc->timestamp = GST_CLOCK_TIME_NONE;
    g_free (dtmf_name);
  }
}
Esempio n. 21
0
static GstFlowReturn
gst_rtp_dtmf_src_create (GstBaseSrc * basesrc, guint64 offset,
    guint length, GstBuffer ** buffer)
{
  GstRTPDTMFSrcEvent *event;
  GstRTPDTMFSrc *dtmfsrc;
  GstClock *clock;
  GstClockID *clockid;
  GstClockReturn clockret;
  GstMessage *message;
  GQueue messages = G_QUEUE_INIT;

  dtmfsrc = GST_RTP_DTMF_SRC (basesrc);

  do {

    if (dtmfsrc->payload == NULL) {
      GST_DEBUG_OBJECT (dtmfsrc, "popping");
      event = g_async_queue_pop (dtmfsrc->event_queue);

      GST_DEBUG_OBJECT (dtmfsrc, "popped %d", event->event_type);

      switch (event->event_type) {
        case RTP_DTMF_EVENT_TYPE_STOP:
          GST_WARNING_OBJECT (dtmfsrc,
              "Received a DTMF stop event when already stopped");
          gst_dtmf_src_post_message (dtmfsrc, "dtmf-event-dropped", event);
          break;

        case RTP_DTMF_EVENT_TYPE_START:
          dtmfsrc->first_packet = TRUE;
          dtmfsrc->last_packet = FALSE;
          /* Set the redundancy on the first packet */
          dtmfsrc->redundancy_count = dtmfsrc->packet_redundancy;
          if (!gst_rtp_dtmf_prepare_timestamps (dtmfsrc))
            goto no_clock;

          g_queue_push_tail (&messages,
              gst_dtmf_src_prepare_message (dtmfsrc, "dtmf-event-processed",
                  event));
          dtmfsrc->payload = event->payload;
          dtmfsrc->payload->duration =
              dtmfsrc->ptime * dtmfsrc->clock_rate / 1000;
          event->payload = NULL;
          break;

        case RTP_DTMF_EVENT_TYPE_PAUSE_TASK:
          /*
           * We're pushing it back because it has to stay in there until
           * the task is really paused (and the queue will then be flushed
           */
          GST_OBJECT_LOCK (dtmfsrc);
          if (dtmfsrc->paused) {
            g_async_queue_push (dtmfsrc->event_queue, event);
            goto paused_locked;
          }
          GST_OBJECT_UNLOCK (dtmfsrc);
          break;
      }

      gst_rtp_dtmf_src_event_free (event);
    } else if (!dtmfsrc->first_packet && !dtmfsrc->last_packet &&
        (dtmfsrc->timestamp - dtmfsrc->start_timestamp) / GST_MSECOND >=
        MIN_PULSE_DURATION) {
      GST_DEBUG_OBJECT (dtmfsrc, "try popping");
      event = g_async_queue_try_pop (dtmfsrc->event_queue);


      if (event != NULL) {
        GST_DEBUG_OBJECT (dtmfsrc, "try popped %d", event->event_type);

        switch (event->event_type) {
          case RTP_DTMF_EVENT_TYPE_START:
            GST_WARNING_OBJECT (dtmfsrc,
                "Received two consecutive DTMF start events");
            gst_dtmf_src_post_message (dtmfsrc, "dtmf-event-dropped", event);
            break;

          case RTP_DTMF_EVENT_TYPE_STOP:
            dtmfsrc->first_packet = FALSE;
            dtmfsrc->last_packet = TRUE;
            /* Set the redundancy on the last packet */
            dtmfsrc->redundancy_count = dtmfsrc->packet_redundancy;
            g_queue_push_tail (&messages,
                gst_dtmf_src_prepare_message (dtmfsrc, "dtmf-event-processed",
                    event));
            break;

          case RTP_DTMF_EVENT_TYPE_PAUSE_TASK:
            /*
             * We're pushing it back because it has to stay in there until
             * the task is really paused (and the queue will then be flushed)
             */
            GST_DEBUG_OBJECT (dtmfsrc, "pushing pause_task...");
            GST_OBJECT_LOCK (dtmfsrc);
            if (dtmfsrc->paused) {
              g_async_queue_push (dtmfsrc->event_queue, event);
              goto paused_locked;
            }
            GST_OBJECT_UNLOCK (dtmfsrc);
            break;
        }
        gst_rtp_dtmf_src_event_free (event);
      }
    }
  } while (dtmfsrc->payload == NULL);


  GST_DEBUG_OBJECT (dtmfsrc, "Processed events, now lets wait on the clock");

  clock = gst_element_get_clock (GST_ELEMENT (basesrc));
  if (!clock)
    goto no_clock;
  clockid = gst_clock_new_single_shot_id (clock, dtmfsrc->timestamp +
      gst_element_get_base_time (GST_ELEMENT (dtmfsrc)));
  gst_object_unref (clock);

  GST_OBJECT_LOCK (dtmfsrc);
  if (!dtmfsrc->paused) {
    dtmfsrc->clockid = clockid;
    GST_OBJECT_UNLOCK (dtmfsrc);

    clockret = gst_clock_id_wait (clockid, NULL);

    GST_OBJECT_LOCK (dtmfsrc);
    if (dtmfsrc->paused)
      clockret = GST_CLOCK_UNSCHEDULED;
  } else {
    clockret = GST_CLOCK_UNSCHEDULED;
  }
  gst_clock_id_unref (clockid);
  dtmfsrc->clockid = NULL;
  GST_OBJECT_UNLOCK (dtmfsrc);

  while ((message = g_queue_pop_head (&messages)) != NULL)
    gst_element_post_message (GST_ELEMENT (dtmfsrc), message);

  if (clockret == GST_CLOCK_UNSCHEDULED) {
    goto paused;
  }

send_last:

  if (dtmfsrc->dirty)
    if (!gst_rtp_dtmf_src_negotiate (basesrc))
      return GST_FLOW_NOT_NEGOTIATED;

  /* create buffer to hold the payload */
  *buffer = gst_rtp_dtmf_src_create_next_rtp_packet (dtmfsrc);

  if (dtmfsrc->redundancy_count)
    dtmfsrc->redundancy_count--;

  /* Only the very first one has a marker */
  dtmfsrc->first_packet = FALSE;

  /* This is the end of the event */
  if (dtmfsrc->last_packet == TRUE && dtmfsrc->redundancy_count == 0) {

    g_slice_free (GstRTPDTMFPayload, dtmfsrc->payload);
    dtmfsrc->payload = NULL;

    dtmfsrc->last_packet = FALSE;
  }

  return GST_FLOW_OK;

paused_locked:

  GST_OBJECT_UNLOCK (dtmfsrc);

paused:

  if (dtmfsrc->payload) {
    dtmfsrc->first_packet = FALSE;
    dtmfsrc->last_packet = TRUE;
    /* Set the redundanc on the last packet */
    dtmfsrc->redundancy_count = dtmfsrc->packet_redundancy;
    goto send_last;
  } else {
    return GST_FLOW_FLUSHING;
  }

no_clock:
  GST_ELEMENT_ERROR (dtmfsrc, STREAM, MUX, ("No available clock"),
      ("No available clock"));
  gst_pad_pause_task (GST_BASE_SRC_PAD (dtmfsrc));
  return GST_FLOW_ERROR;
}
static GstStateChangeReturn
gst_decklink_video_sink_change_state (GstElement * element,
    GstStateChange transition)
{
  GstDecklinkVideoSink *self = GST_DECKLINK_VIDEO_SINK_CAST (element);
  GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;

  switch (transition) {
    case GST_STATE_CHANGE_READY_TO_PAUSED:
      g_mutex_lock (&self->output->lock);
      self->output->clock_start_time = GST_CLOCK_TIME_NONE;
      self->output->clock_epoch += self->output->clock_last_time;
      self->output->clock_last_time = 0;
      self->output->clock_offset = 0;
      g_mutex_unlock (&self->output->lock);
      gst_element_post_message (element,
          gst_message_new_clock_provide (GST_OBJECT_CAST (element),
              self->output->clock, TRUE));
      break;
    case GST_STATE_CHANGE_PAUSED_TO_PLAYING:{
      GstClock *clock, *audio_clock;

      clock = gst_element_get_clock (GST_ELEMENT_CAST (self));
      if (clock) {
        audio_clock = gst_decklink_output_get_audio_clock (self->output);
        if (clock && clock != self->output->clock && clock != audio_clock) {
          gst_clock_set_master (self->output->clock, clock);
        }
        gst_object_unref (clock);
        if (audio_clock)
          gst_object_unref (audio_clock);
      } else {
        GST_ELEMENT_ERROR (self, STREAM, FAILED,
            (NULL), ("Need a clock to go to PLAYING"));
        ret = GST_STATE_CHANGE_FAILURE;
      }

      break;
    }
    default:
      break;
  }

  if (ret == GST_STATE_CHANGE_FAILURE)
    return ret;
  ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
  if (ret == GST_STATE_CHANGE_FAILURE)
    return ret;

  switch (transition) {
    case GST_STATE_CHANGE_PAUSED_TO_READY:
      gst_element_post_message (element,
          gst_message_new_clock_lost (GST_OBJECT_CAST (element),
              self->output->clock));
      gst_clock_set_master (self->output->clock, NULL);
      // Reset calibration to make the clock reusable next time we use it
      gst_clock_set_calibration (self->output->clock, 0, 0, 1, 1);
      g_mutex_lock (&self->output->lock);
      self->output->clock_start_time = GST_CLOCK_TIME_NONE;
      self->output->clock_epoch += self->output->clock_last_time;
      self->output->clock_last_time = 0;
      self->output->clock_offset = 0;
      g_mutex_unlock (&self->output->lock);
      gst_decklink_video_sink_stop (self);
      break;
    case GST_STATE_CHANGE_PLAYING_TO_PAUSED:{
      if (gst_decklink_video_sink_stop_scheduled_playback (self) ==
          GST_STATE_CHANGE_FAILURE)
        ret = GST_STATE_CHANGE_FAILURE;
      break;
    }
    case GST_STATE_CHANGE_PAUSED_TO_PLAYING:{
      g_mutex_lock (&self->output->lock);
      if (self->output->start_scheduled_playback)
        self->output->start_scheduled_playback (self->output->videosink);
      g_mutex_unlock (&self->output->lock);
      break;
    }
    default:
      break;
  }

  return ret;
}
static GstFlowReturn
new_sample_cb (GstElement * appsink, gpointer user_data)
{
  GstElement *appsrc = GST_ELEMENT (user_data);
  GstFlowReturn ret;
  GstSample *sample;
  GstBuffer *buffer;
  GstClockTime *base_time;
  GstPad *src, *sink;

  g_signal_emit_by_name (appsink, "pull-sample", &sample);

  if (sample == NULL)
    return GST_FLOW_OK;

  buffer = gst_sample_get_buffer (sample);

  if (buffer == NULL) {
    ret = GST_FLOW_OK;
    goto end;
  }

  gst_buffer_ref (buffer);

  buffer = gst_buffer_make_writable (buffer);

  KMS_ELEMENT_LOCK (GST_OBJECT_PARENT (appsrc));

  base_time =
      g_object_get_data (G_OBJECT (GST_OBJECT_PARENT (appsrc)), BASE_TIME_DATA);

  if (base_time == NULL) {
    GstClock *clock;

    clock = gst_element_get_clock (appsrc);
    base_time = g_slice_new0 (GstClockTime);

    g_object_set_data_full (G_OBJECT (GST_OBJECT_PARENT (appsrc)),
        BASE_TIME_DATA, base_time, release_gst_clock);
    *base_time =
        gst_clock_get_time (clock) - gst_element_get_base_time (appsrc);
    g_object_unref (clock);
    GST_DEBUG ("Setting base time to: %" G_GUINT64_FORMAT, *base_time);
  }

  src = gst_element_get_static_pad (appsrc, "src");
  sink = gst_pad_get_peer (src);

  if (sink != NULL) {
    if (GST_OBJECT_FLAG_IS_SET (sink, GST_PAD_FLAG_EOS)) {
      GST_INFO_OBJECT (sink, "Sending flush events");
      gst_pad_send_event (sink, gst_event_new_flush_start ());
      gst_pad_send_event (sink, gst_event_new_flush_stop (FALSE));
    }
    g_object_unref (sink);
  }

  g_object_unref (src);

  if (GST_BUFFER_PTS_IS_VALID (buffer))
    buffer->pts += *base_time;
  if (GST_BUFFER_DTS_IS_VALID (buffer))
    buffer->dts += *base_time;

  KMS_ELEMENT_UNLOCK (GST_OBJECT_PARENT (appsrc));

  // TODO: Do something to fix a possible previous EOS event
  g_signal_emit_by_name (appsrc, "push-buffer", buffer, &ret);

  gst_buffer_unref (buffer);

  if (ret != GST_FLOW_OK) {
    /* something wrong */
    GST_ERROR ("Could not send buffer to appsrc %s. Cause: %s",
        GST_ELEMENT_NAME (appsrc), gst_flow_get_name (ret));
  }

end:
  if (sample != NULL)
    gst_sample_unref (sample);

  return ret;
}
Esempio n. 24
0
static GstFlowReturn
gst_fake_src_create (GstBaseSrc * basesrc, guint64 offset, guint length,
    GstBuffer ** ret)
{
  GstFakeSrc *src;
  GstBuffer *buf;
  GstClockTime time;
  gsize size;

  src = GST_FAKE_SRC (basesrc);

  buf = gst_fake_src_create_buffer (src, &size);
  GST_BUFFER_OFFSET (buf) = offset;

  if (src->datarate > 0) {
    time = (src->bytes_sent * GST_SECOND) / src->datarate;

    GST_BUFFER_DURATION (buf) = size * GST_SECOND / src->datarate;
  } else if (gst_base_src_is_live (basesrc)) {
    GstClock *clock;

    clock = gst_element_get_clock (GST_ELEMENT (src));

    if (clock) {
      time = gst_clock_get_time (clock);
      time -= gst_element_get_base_time (GST_ELEMENT (src));
      gst_object_unref (clock);
    } else {
      /* not an error not to have a clock */
      time = GST_CLOCK_TIME_NONE;
    }
  } else {
    time = GST_CLOCK_TIME_NONE;
  }

  GST_BUFFER_DTS (buf) = time;
  GST_BUFFER_PTS (buf) = time;

  if (!src->silent) {
    gchar dts_str[64], pts_str[64], dur_str[64];
    gchar *flag_str;

    GST_OBJECT_LOCK (src);
    g_free (src->last_message);

    if (GST_BUFFER_DTS (buf) != GST_CLOCK_TIME_NONE) {
      g_snprintf (dts_str, sizeof (dts_str), "%" GST_TIME_FORMAT,
          GST_TIME_ARGS (GST_BUFFER_DTS (buf)));
    } else {
      g_strlcpy (dts_str, "none", sizeof (dts_str));
    }
    if (GST_BUFFER_PTS (buf) != GST_CLOCK_TIME_NONE) {
      g_snprintf (pts_str, sizeof (pts_str), "%" GST_TIME_FORMAT,
          GST_TIME_ARGS (GST_BUFFER_PTS (buf)));
    } else {
      g_strlcpy (pts_str, "none", sizeof (pts_str));
    }
    if (GST_BUFFER_DURATION (buf) != GST_CLOCK_TIME_NONE) {
      g_snprintf (dur_str, sizeof (dur_str), "%" GST_TIME_FORMAT,
          GST_TIME_ARGS (GST_BUFFER_DURATION (buf)));
    } else {
      g_strlcpy (dur_str, "none", sizeof (dur_str));
    }

    flag_str = gst_buffer_get_flags_string (buf);
    src->last_message =
        g_strdup_printf ("create   ******* (%s:%s) (%u bytes, dts: %s, pts:%s"
        ", duration: %s, offset: %" G_GINT64_FORMAT ", offset_end: %"
        G_GINT64_FORMAT ", flags: %08x %s) %p",
        GST_DEBUG_PAD_NAME (GST_BASE_SRC_CAST (src)->srcpad), (guint) size,
        dts_str, pts_str, dur_str, GST_BUFFER_OFFSET (buf),
        GST_BUFFER_OFFSET_END (buf), GST_MINI_OBJECT_CAST (buf)->flags,
        flag_str, buf);
    g_free (flag_str);
    GST_OBJECT_UNLOCK (src);

    g_object_notify_by_pspec ((GObject *) src, pspec_last_message);
  }

  if (src->signal_handoffs) {
    GST_LOG_OBJECT (src, "pre handoff emit");
    g_signal_emit (src, gst_fake_src_signals[SIGNAL_HANDOFF], 0, buf,
        basesrc->srcpad);
    GST_LOG_OBJECT (src, "post handoff emit");
  }

  src->bytes_sent += size;

  *ret = buf;
  return GST_FLOW_OK;
}
static void
gst_decklink_video_sink_start_scheduled_playback (GstElement * element)
{
  GstDecklinkVideoSink *self = GST_DECKLINK_VIDEO_SINK_CAST (element);
  GstClockTime start_time;
  HRESULT res;
  bool active;

  if (self->output->video_enabled && (!self->output->audiosink
          || self->output->audio_enabled)
      && (GST_STATE (self) == GST_STATE_PLAYING
          || GST_STATE_PENDING (self) == GST_STATE_PLAYING)) {
    GstClock *clock = NULL;

    clock = gst_element_get_clock (element);
    if (!clock) {
      GST_ELEMENT_ERROR (self, STREAM, FAILED, (NULL),
          ("Scheduled playback supposed to start but we have no clock"));
      return;
    }
    // Need to unlock to get the clock time
    g_mutex_unlock (&self->output->lock);

    // FIXME: start time is the same for the complete pipeline,
    // but what we need here is the start time of this element!
    start_time = gst_element_get_base_time (element);
    if (start_time != GST_CLOCK_TIME_NONE)
      start_time = gst_clock_get_time (clock) - start_time;

    // FIXME: This will probably not work
    if (start_time == GST_CLOCK_TIME_NONE)
      start_time = 0;

    // Current times of internal and external clock when we go to
    // playing. We need this to convert the pipeline running time
    // to the running time of the hardware
    //
    // We can't use the normal base time for the external clock
    // because we might go to PLAYING later than the pipeline
    self->internal_base_time =
        gst_clock_get_internal_time (self->output->clock);
    self->external_base_time = gst_clock_get_internal_time (clock);

    convert_to_internal_clock (self, &start_time, NULL);

    g_mutex_lock (&self->output->lock);
    // Check if someone else started in the meantime
    if (self->output->started) {
      gst_object_unref (clock);
      return;
    }

    active = false;
    self->output->output->IsScheduledPlaybackRunning (&active);
    if (active) {
      GST_DEBUG_OBJECT (self, "Stopping scheduled playback");

      self->output->started = FALSE;

      res = self->output->output->StopScheduledPlayback (0, 0, 0);
      if (res != S_OK) {
        GST_ELEMENT_ERROR (self, STREAM, FAILED,
            (NULL), ("Failed to stop scheduled playback: 0x%08x", res));
        gst_object_unref (clock);
        return;
      }
    }

    GST_DEBUG_OBJECT (self,
        "Starting scheduled playback at %" GST_TIME_FORMAT,
        GST_TIME_ARGS (start_time));

    res =
        self->output->output->StartScheduledPlayback (start_time,
        GST_SECOND, 1.0);
    if (res != S_OK) {
      GST_ELEMENT_ERROR (self, STREAM, FAILED,
          (NULL), ("Failed to start scheduled playback: 0x%08x", res));
      gst_object_unref (clock);
      return;
    }

    self->output->started = TRUE;
    self->output->clock_restart = TRUE;

    // Need to unlock to get the clock time
    g_mutex_unlock (&self->output->lock);

    // Sample the clocks again to get the most accurate values
    // after we started scheduled playback
    self->internal_base_time =
        gst_clock_get_internal_time (self->output->clock);
    self->external_base_time = gst_clock_get_internal_time (clock);
    g_mutex_lock (&self->output->lock);
    gst_object_unref (clock);
  } else {
    GST_DEBUG_OBJECT (self, "Not starting scheduled playback yet");
  }
}
static void
convert_to_internal_clock (GstDecklinkVideoSink * self,
    GstClockTime * timestamp, GstClockTime * duration)
{
  GstClock *clock, *audio_clock;

  g_assert (timestamp != NULL);

  clock = gst_element_get_clock (GST_ELEMENT_CAST (self));
  audio_clock = gst_decklink_output_get_audio_clock (self->output);
  if (clock && clock != self->output->clock && clock != audio_clock) {
    GstClockTime internal, external, rate_n, rate_d;
    gst_clock_get_calibration (self->output->clock, &internal, &external,
        &rate_n, &rate_d);

    if (self->internal_base_time != GST_CLOCK_TIME_NONE) {
      GstClockTime external_timestamp = *timestamp;
      GstClockTime base_time;

      // Convert to the running time corresponding to both clock times
      if (internal < self->internal_base_time)
        internal = 0;
      else
        internal -= self->internal_base_time;

      if (external < self->external_base_time)
        external = 0;
      else
        external -= self->external_base_time;

      // Convert timestamp to the "running time" since we started scheduled
      // playback, that is the difference between the pipeline's base time
      // and our own base time.
      base_time = gst_element_get_base_time (GST_ELEMENT_CAST (self));
      if (base_time > self->external_base_time)
        base_time = 0;
      else
        base_time = self->external_base_time - base_time;

      if (external_timestamp < base_time)
        external_timestamp = 0;
      else
        external_timestamp = external_timestamp - base_time;

      // Get the difference in the external time, note
      // that the running time is external time.
      // Then scale this difference and offset it to
      // our internal time. Now we have the running time
      // according to our internal clock.
      //
      // For the duration we just scale
      *timestamp =
          gst_clock_unadjust_with_calibration (NULL, external_timestamp,
          internal, external, rate_n, rate_d);

      GST_LOG_OBJECT (self,
          "Converted %" GST_TIME_FORMAT " to %" GST_TIME_FORMAT " (internal: %"
          GST_TIME_FORMAT " external %" GST_TIME_FORMAT " rate: %lf)",
          GST_TIME_ARGS (external_timestamp), GST_TIME_ARGS (*timestamp),
          GST_TIME_ARGS (internal), GST_TIME_ARGS (external),
          ((gdouble) rate_n) / ((gdouble) rate_d));

      if (duration) {
        GstClockTime external_duration = *duration;

        *duration = gst_util_uint64_scale (external_duration, rate_d, rate_n);

        GST_LOG_OBJECT (self,
            "Converted duration %" GST_TIME_FORMAT " to %" GST_TIME_FORMAT
            " (internal: %" GST_TIME_FORMAT " external %" GST_TIME_FORMAT
            " rate: %lf)", GST_TIME_ARGS (external_duration),
            GST_TIME_ARGS (*duration), GST_TIME_ARGS (internal),
            GST_TIME_ARGS (external), ((gdouble) rate_n) / ((gdouble) rate_d));
      }
    } else {
      GST_LOG_OBJECT (self, "No clock conversion needed, not started yet");
    }
  } else {
    GST_LOG_OBJECT (self, "No clock conversion needed, same clocks");
  }
}
Esempio n. 27
0
static GstFlowReturn
gst_dasf_src_create (GstAudioSrc *audiosrc,
						guint64 offset,
						guint length,
						GstBuffer **buffer)
{
	GstDasfSrc* self = GST_DASF_SRC (audiosrc);
	GstBaseAudioSrc *baseaudiosrc = GST_BASE_AUDIO_SRC (self);
	GstBuffer* gst_buffer = NULL;
	OMX_BUFFERHEADERTYPE* omx_buffer = NULL;
	GstDasfSrcPrivate* priv = GST_DASF_SRC_GET_PRIVATE (self);
	GstGooAudioFilter* me = self->peer_element;

	GST_DEBUG ("");

	if (me->component->cur_state != OMX_StateExecuting)
	{
		return GST_FLOW_UNEXPECTED;
	}

	GST_DEBUG ("goo stuff");
	{
		omx_buffer = goo_port_grab_buffer (me->outport);

		if (gst_pad_alloc_buffer (GST_BASE_SRC_PAD (self),
					  priv->outcount,
					  omx_buffer->nFilledLen,
					  GST_PAD_CAPS (GST_BASE_SRC_PAD (self)),
					  &gst_buffer) == GST_FLOW_OK)
		{
			if (GST_IS_GOO_BUFFER (gst_buffer))
			{
				memcpy (GST_BUFFER_DATA (gst_buffer),
					omx_buffer->pBuffer,
					omx_buffer->nFilledLen);

				goo_component_release_buffer (me->component,
							      omx_buffer);
			}
			else
 			{
				gst_buffer_unref (gst_buffer);
				gst_buffer = (GstBuffer*) gst_goo_buffer_new ();
				gst_goo_buffer_set_data (gst_buffer,
							 me->component,
							 omx_buffer);
			}
		}
		else
		{
			goto fail;
		}
	}


	GST_DEBUG ("gst stuff");
	{
		GstClock* clock = NULL;
		GstClockTime timestamp, duration;
		clock = gst_element_get_clock (GST_ELEMENT (self));

		timestamp = gst_clock_get_time (clock);
		timestamp -= gst_element_get_base_time (GST_ELEMENT (self));
		gst_object_unref (clock);

		GST_BUFFER_TIMESTAMP (gst_buffer) = gst_util_uint64_scale_int (GST_SECOND, priv->outcount, 50);

		/* Set 20 millisecond duration */
		duration = gst_util_uint64_scale_int
			(GST_SECOND,
			 1,
			 50);

		GST_BUFFER_DURATION (gst_buffer) = duration;
		GST_BUFFER_OFFSET (gst_buffer) = priv->outcount++;
		GST_BUFFER_OFFSET_END (gst_buffer) = priv->outcount;

		gst_buffer_set_caps (gst_buffer,
		GST_PAD_CAPS (GST_BASE_SRC_PAD (self)));
	}

beach:
	*buffer = gst_buffer;
	return GST_FLOW_OK;

fail:
	if (G_LIKELY (*buffer))
	{
		gst_buffer_unref (*buffer);
	}

	return GST_FLOW_ERROR;
}
void
gst_decklink_video_src_convert_to_external_clock (GstDecklinkVideoSrc * self,
    GstClockTime * timestamp, GstClockTime * duration)
{
  GstClock *clock;

  g_assert (timestamp != NULL);

  if (*timestamp == GST_CLOCK_TIME_NONE)
    return;

  clock = gst_element_get_clock (GST_ELEMENT_CAST (self));
  if (clock && clock != self->input->clock) {
    GstClockTime internal, external, rate_n, rate_d;
    GstClockTimeDiff external_start_time_diff;

    gst_clock_get_calibration (self->input->clock, &internal, &external,
        &rate_n, &rate_d);

    if (rate_n != rate_d && self->internal_base_time != GST_CLOCK_TIME_NONE) {
      GstClockTime internal_timestamp = *timestamp;

      // Convert to the running time corresponding to both clock times
      internal -= self->internal_base_time;
      external -= self->external_base_time;

      // Get the difference in the internal time, note
      // that the capture time is internal time.
      // Then scale this difference and offset it to
      // our external time. Now we have the running time
      // according to our external clock.
      //
      // For the duration we just scale
      *timestamp =
          gst_clock_adjust_with_calibration (NULL, internal_timestamp, internal,
          external, rate_n, rate_d);

      GST_LOG_OBJECT (self,
          "Converted %" GST_TIME_FORMAT " to %" GST_TIME_FORMAT " (external: %"
          GST_TIME_FORMAT " internal %" GST_TIME_FORMAT " rate: %lf)",
          GST_TIME_ARGS (internal_timestamp), GST_TIME_ARGS (*timestamp),
          GST_TIME_ARGS (external), GST_TIME_ARGS (internal),
          ((gdouble) rate_n) / ((gdouble) rate_d));

      if (duration) {
        GstClockTime internal_duration = *duration;

        *duration = gst_util_uint64_scale (internal_duration, rate_d, rate_n);

        GST_LOG_OBJECT (self,
            "Converted duration %" GST_TIME_FORMAT " to %" GST_TIME_FORMAT
            " (external: %" GST_TIME_FORMAT " internal %" GST_TIME_FORMAT
            " rate: %lf)", GST_TIME_ARGS (internal_duration),
            GST_TIME_ARGS (*duration), GST_TIME_ARGS (external),
            GST_TIME_ARGS (internal), ((gdouble) rate_n) / ((gdouble) rate_d));
      }
    } else {
      GST_LOG_OBJECT (self, "No clock conversion needed, relative rate is 1.0");
    }

    // Add the diff between the external time when we
    // went to playing and the external time when the
    // pipeline went to playing. Otherwise we will
    // always start outputting from 0 instead of the
    // current running time.
    external_start_time_diff =
        gst_element_get_base_time (GST_ELEMENT_CAST (self));
    external_start_time_diff =
        self->external_base_time - external_start_time_diff;
    *timestamp += external_start_time_diff;
  } else {
    GST_LOG_OBJECT (self, "No clock conversion needed, same clocks");
  }
}
static GstFlowReturn
gst_gdiscreencapsrc_create (GstPushSrc * push_src, GstBuffer ** buf)
{
  GstGDIScreenCapSrc *src = GST_GDISCREENCAPSRC (push_src);
  GstBuffer *new_buf;
  gint new_buf_size;
  GstClock *clock;
  GstClockTime buf_time, buf_dur;
  guint64 frame_number;

  if (G_UNLIKELY (!src->info.bmiHeader.biWidth ||
          !src->info.bmiHeader.biHeight)) {
    GST_ELEMENT_ERROR (src, CORE, NEGOTIATION, (NULL),
        ("format wasn't negotiated before create function"));
    return GST_FLOW_NOT_NEGOTIATED;
  }

  new_buf_size = GST_ROUND_UP_4 (src->info.bmiHeader.biWidth * 3) *
      (-src->info.bmiHeader.biHeight);

  GST_LOG_OBJECT (src,
      "creating buffer of %d bytes with %dx%d image",
      new_buf_size, (gint) src->info.bmiHeader.biWidth,
      (gint) (-src->info.bmiHeader.biHeight));

  new_buf = gst_buffer_new_and_alloc (new_buf_size);

  clock = gst_element_get_clock (GST_ELEMENT (src));
  if (clock != NULL) {
    GstClockTime time, base_time;

    /* Calculate sync time. */

    time = gst_clock_get_time (clock);
    base_time = gst_element_get_base_time (GST_ELEMENT (src));
    buf_time = time - base_time;

    if (src->rate_numerator) {
      frame_number = gst_util_uint64_scale (buf_time,
          src->rate_numerator, GST_SECOND * src->rate_denominator);
    } else {
      frame_number = -1;
    }
  } else {
    buf_time = GST_CLOCK_TIME_NONE;
    frame_number = -1;
  }

  if (frame_number != -1 && frame_number == src->frame_number) {
    GstClockID id;
    GstClockReturn ret;

    /* Need to wait for the next frame */
    frame_number += 1;

    /* Figure out what the next frame time is */
    buf_time = gst_util_uint64_scale (frame_number,
        src->rate_denominator * GST_SECOND, src->rate_numerator);

    id = gst_clock_new_single_shot_id (clock,
        buf_time + gst_element_get_base_time (GST_ELEMENT (src)));
    GST_OBJECT_LOCK (src);
    src->clock_id = id;
    GST_OBJECT_UNLOCK (src);

    GST_DEBUG_OBJECT (src, "Waiting for next frame time %" G_GUINT64_FORMAT,
        buf_time);
    ret = gst_clock_id_wait (id, NULL);
    GST_OBJECT_LOCK (src);

    gst_clock_id_unref (id);
    src->clock_id = NULL;
    if (ret == GST_CLOCK_UNSCHEDULED) {
      /* Got woken up by the unlock function */
      GST_OBJECT_UNLOCK (src);
      return GST_FLOW_FLUSHING;
    }
    GST_OBJECT_UNLOCK (src);

    /* Duration is a complete 1/fps frame duration */
    buf_dur =
        gst_util_uint64_scale_int (GST_SECOND, src->rate_denominator,
        src->rate_numerator);
  } else if (frame_number != -1) {
    GstClockTime next_buf_time;

    GST_DEBUG_OBJECT (src, "No need to wait for next frame time %"
        G_GUINT64_FORMAT " next frame = %" G_GINT64_FORMAT " prev = %"
        G_GINT64_FORMAT, buf_time, frame_number, src->frame_number);
    next_buf_time = gst_util_uint64_scale (frame_number + 1,
        src->rate_denominator * GST_SECOND, src->rate_numerator);
    /* Frame duration is from now until the next expected capture time */
    buf_dur = next_buf_time - buf_time;
  } else {
    buf_dur = GST_CLOCK_TIME_NONE;
  }
  src->frame_number = frame_number;

  GST_BUFFER_TIMESTAMP (new_buf) = buf_time;
  GST_BUFFER_DURATION (new_buf) = buf_dur;

  /* Do screen capture and put it into buffer... */
  gst_gdiscreencapsrc_screen_capture (src, new_buf);

  gst_object_unref (clock);

  *buf = new_buf;
  return GST_FLOW_OK;
}
Esempio n. 30
0
static GstFlowReturn
gst_dtmf_src_create (GstBaseSrc * basesrc, guint64 offset,
    guint length, GstBuffer ** buffer)
{
  GstBuffer *buf = NULL;
  GstDTMFSrcEvent *event;
  GstDTMFSrc *dtmfsrc;
  GstClock *clock;
  GstClockID *clockid;
  GstClockReturn clockret;

  dtmfsrc = GST_DTMF_SRC (basesrc);

  do {

    if (dtmfsrc->last_event == NULL) {
      GST_DEBUG_OBJECT (dtmfsrc, "popping");
      event = g_async_queue_pop (dtmfsrc->event_queue);

      GST_DEBUG_OBJECT (dtmfsrc, "popped %d", event->event_type);

      switch (event->event_type) {
        case DTMF_EVENT_TYPE_STOP:
          GST_WARNING_OBJECT (dtmfsrc,
              "Received a DTMF stop event when already stopped");
          gst_dtmf_src_post_message (dtmfsrc, "dtmf-event-dropped", event);
          break;
        case DTMF_EVENT_TYPE_START:
          gst_dtmf_prepare_timestamps (dtmfsrc);

          event->packet_count = 0;
          dtmfsrc->last_event = event;
          event = NULL;
          gst_dtmf_src_post_message (dtmfsrc, "dtmf-event-processed",
              dtmfsrc->last_event);
          break;
        case DTMF_EVENT_TYPE_PAUSE_TASK:
          /*
           * We're pushing it back because it has to stay in there until
           * the task is really paused (and the queue will then be flushed)
           */
          GST_DEBUG_OBJECT (dtmfsrc, "pushing pause_task...");
          GST_OBJECT_LOCK (dtmfsrc);
          if (dtmfsrc->paused) {
            g_async_queue_push (dtmfsrc->event_queue, event);
            goto paused_locked;
          }
          GST_OBJECT_UNLOCK (dtmfsrc);
          break;
      }
      if (event)
        g_slice_free (GstDTMFSrcEvent, event);
    } else if (dtmfsrc->last_event->packet_count * dtmfsrc->interval >=
        MIN_DUTY_CYCLE) {
      event = g_async_queue_try_pop (dtmfsrc->event_queue);

      if (event != NULL) {

        switch (event->event_type) {
          case DTMF_EVENT_TYPE_START:
            GST_WARNING_OBJECT (dtmfsrc,
                "Received two consecutive DTMF start events");
            gst_dtmf_src_post_message (dtmfsrc, "dtmf-event-dropped", event);
            break;
          case DTMF_EVENT_TYPE_STOP:
            g_slice_free (GstDTMFSrcEvent, dtmfsrc->last_event);
            dtmfsrc->last_event = NULL;
            gst_dtmf_src_post_message (dtmfsrc, "dtmf-event-processed", event);
            break;
          case DTMF_EVENT_TYPE_PAUSE_TASK:
            /*
             * We're pushing it back because it has to stay in there until
             * the task is really paused (and the queue will then be flushed)
             */
            GST_DEBUG_OBJECT (dtmfsrc, "pushing pause_task...");

            GST_OBJECT_LOCK (dtmfsrc);
            if (dtmfsrc->paused) {
              g_async_queue_push (dtmfsrc->event_queue, event);
              goto paused_locked;
            }
            GST_OBJECT_UNLOCK (dtmfsrc);

            break;
        }
        g_slice_free (GstDTMFSrcEvent, event);
      }
    }
  } while (dtmfsrc->last_event == NULL);

  GST_LOG_OBJECT (dtmfsrc, "end event check, now wait for the proper time");

  clock = gst_element_get_clock (GST_ELEMENT (basesrc));

  clockid = gst_clock_new_single_shot_id (clock, dtmfsrc->timestamp +
      gst_element_get_base_time (GST_ELEMENT (dtmfsrc)));
  gst_object_unref (clock);

  GST_OBJECT_LOCK (dtmfsrc);
  if (!dtmfsrc->paused) {
    dtmfsrc->clockid = clockid;
    GST_OBJECT_UNLOCK (dtmfsrc);

    clockret = gst_clock_id_wait (clockid, NULL);

    GST_OBJECT_LOCK (dtmfsrc);
    if (dtmfsrc->paused)
      clockret = GST_CLOCK_UNSCHEDULED;
  } else {
    clockret = GST_CLOCK_UNSCHEDULED;
  }
  gst_clock_id_unref (clockid);
  dtmfsrc->clockid = NULL;
  GST_OBJECT_UNLOCK (dtmfsrc);

  if (clockret == GST_CLOCK_UNSCHEDULED) {
    goto paused;
  }

  buf = gst_dtmf_src_create_next_tone_packet (dtmfsrc, dtmfsrc->last_event);

  GST_LOG_OBJECT (dtmfsrc, "Created buffer of size %" G_GSIZE_FORMAT,
      gst_buffer_get_size (buf));
  *buffer = buf;

  return GST_FLOW_OK;

paused_locked:
  GST_OBJECT_UNLOCK (dtmfsrc);

paused:

  if (dtmfsrc->last_event) {
    GST_DEBUG_OBJECT (dtmfsrc, "Stopping current event");
    /* Don't forget to release the stream lock */
    g_slice_free (GstDTMFSrcEvent, dtmfsrc->last_event);
    dtmfsrc->last_event = NULL;
  }

  return GST_FLOW_FLUSHING;

}