Beispiel #1
0
void ofxGstRTPServer::sendAudioOut(PooledAudioFrame * pooledFrame){
	GstClock * clock = gst_pipeline_get_clock(GST_PIPELINE(gst.getPipeline()));
	gst_object_ref(clock);
	GstClockTime now = gst_clock_get_time (clock) - gst_element_get_base_time(gst.getPipeline());
	gst_object_unref (clock);
	if(firstAudioFrame && !audioAutoTimestamp){
		prevTimestampAudio = now;
		firstAudioFrame = false;
		return;
	}

	int size = pooledFrame->audioFrame._payloadDataLengthInSamples*2*pooledFrame->audioFrame._audioChannel;

	GstBuffer * echoCancelledBuffer = gst_buffer_new_wrapped_full(GST_MEMORY_FLAG_READONLY,(void*)pooledFrame->audioFrame._payloadData,size,0,size,pooledFrame,(GDestroyNotify)&ofxWebRTCAudioPool::relaseFrame);

	if(!audioAutoTimestamp){
		GstClockTime duration = (pooledFrame->audioFrame._payloadDataLengthInSamples * GST_SECOND / pooledFrame->audioFrame._frequencyInHz);
		GstClockTime now = prevTimestamp + duration;

		GST_BUFFER_OFFSET(echoCancelledBuffer) = numFrameAudio++;
		GST_BUFFER_OFFSET_END(echoCancelledBuffer) = numFrameAudio;
		GST_BUFFER_DTS (echoCancelledBuffer) = now;
		GST_BUFFER_PTS (echoCancelledBuffer) = now;
		GST_BUFFER_DURATION(echoCancelledBuffer) = duration;
		prevTimestampAudio = now;
	}


	GstFlowReturn flow_return = gst_app_src_push_buffer((GstAppSrc*)appSrcAudio, echoCancelledBuffer);
	if (flow_return != GST_FLOW_OK) {
		ofLogError(LOG_NAME) << "error pushing audio buffer: flow_return was " << flow_return;
	}
}
static void
gst_rtp_dtmf_prepare_timestamps (GstRTPDTMFSrc * dtmfsrc)
{
  GstClock *clock;
  GstClockTime base_time;

#ifdef MAEMO_BROKEN
  base_time = 0;
#else
  base_time = gst_element_get_base_time (GST_ELEMENT (dtmfsrc));
#endif

  clock = gst_element_get_clock (GST_ELEMENT (dtmfsrc));
  if (clock != NULL) {
    dtmfsrc->timestamp = gst_clock_get_time (clock)
        + (MIN_INTER_DIGIT_INTERVAL * GST_MSECOND) - base_time;
    dtmfsrc->start_timestamp = dtmfsrc->timestamp;
    gst_object_unref (clock);
  } else {
    gchar *dtmf_name = gst_element_get_name (dtmfsrc);
    GST_ERROR_OBJECT (dtmfsrc, "No clock set for element %s", dtmf_name);
    dtmfsrc->timestamp = GST_CLOCK_TIME_NONE;
    g_free (dtmf_name);
  }

  dtmfsrc->rtp_timestamp = dtmfsrc->ts_base +
      gst_util_uint64_scale_int (gst_segment_to_running_time (&GST_BASE_SRC
          (dtmfsrc)->segment, GST_FORMAT_TIME, dtmfsrc->timestamp),
      dtmfsrc->clock_rate, GST_SECOND);
}
static gboolean
gst_rtp_dtmf_prepare_timestamps (GstRTPDTMFSrc * dtmfsrc)
{
  GstClockTime last_stop;

  GST_OBJECT_LOCK (dtmfsrc);
  last_stop = dtmfsrc->last_stop;
  GST_OBJECT_UNLOCK (dtmfsrc);

  if (GST_CLOCK_TIME_IS_VALID (last_stop)) {
    dtmfsrc->start_timestamp = last_stop;
  } else {
    GstClock *clock = gst_element_get_clock (GST_ELEMENT (dtmfsrc));

    if (clock == NULL)
      return FALSE;

    dtmfsrc->start_timestamp = gst_clock_get_time (clock)
        - gst_element_get_base_time (GST_ELEMENT (dtmfsrc));
    gst_object_unref (clock);
  }

  /* If the last stop was in the past, then lets add the buffers together */
  if (dtmfsrc->start_timestamp < dtmfsrc->timestamp)
    dtmfsrc->start_timestamp = dtmfsrc->timestamp;

  dtmfsrc->timestamp = dtmfsrc->start_timestamp;

  dtmfsrc->rtp_timestamp = dtmfsrc->ts_base +
      gst_util_uint64_scale_int (gst_segment_to_running_time (&GST_BASE_SRC
          (dtmfsrc)->segment, GST_FORMAT_TIME, dtmfsrc->timestamp),
      dtmfsrc->clock_rate, GST_SECOND);

  return TRUE;
}
Beispiel #4
0
static void
gst_dtmf_prepare_timestamps (GstDTMFSrc * dtmfsrc)
{
  GstClockTime last_stop;
  GstClockTime timestamp;

  GST_OBJECT_LOCK (dtmfsrc);
  last_stop = dtmfsrc->last_stop;
  GST_OBJECT_UNLOCK (dtmfsrc);

  if (GST_CLOCK_TIME_IS_VALID (last_stop)) {
    timestamp = last_stop;
  } else {
    GstClock *clock;

    /* If there is no valid start time, lets use now as the start time */

    clock = gst_element_get_clock (GST_ELEMENT (dtmfsrc));
    if (clock != NULL) {
      timestamp = gst_clock_get_time (clock)
          - gst_element_get_base_time (GST_ELEMENT (dtmfsrc));
      gst_object_unref (clock);
    } else {
      gchar *dtmf_name = gst_element_get_name (dtmfsrc);
      GST_ERROR_OBJECT (dtmfsrc, "No clock set for element %s", dtmf_name);
      dtmfsrc->timestamp = GST_CLOCK_TIME_NONE;
      g_free (dtmf_name);
      return;
    }
  }

  /* Make sure the timestamp always goes forward */
  if (timestamp > dtmfsrc->timestamp)
    dtmfsrc->timestamp = timestamp;
}
static gboolean
gst_dshowvideosrc_push_buffer (guint8 * buffer, guint size, gpointer src_object,
    GstClockTime duration)
{
  GstDshowVideoSrc *src = GST_DSHOWVIDEOSRC (src_object);
  GstBuffer *buf = NULL;
  IPin *pPin = NULL;
  HRESULT hres = S_FALSE;
  AM_MEDIA_TYPE *pMediaType = NULL;

  if (!buffer || size == 0 || !src) {
    return FALSE;
  }

  /* create a new buffer assign to it the clock time as timestamp */
  buf = gst_buffer_new_and_alloc (size);

  GST_BUFFER_SIZE (buf) = size;

  GstClock *clock = gst_element_get_clock (GST_ELEMENT (src));
  GST_BUFFER_TIMESTAMP (buf) =
    GST_CLOCK_DIFF (gst_element_get_base_time (GST_ELEMENT (src)), gst_clock_get_time (clock));
  gst_object_unref (clock);

  GST_BUFFER_DURATION (buf) = duration;

  if (src->is_rgb) {
    /* FOR RGB directshow decoder will return bottom-up BITMAP
     * There is probably a way to get top-bottom video frames from
     * the decoder...
     */
    gint line = 0;
    gint stride = size / src->height;

    for (; line < src->height; line++) {
      memcpy (GST_BUFFER_DATA (buf) + (line * stride),
          buffer + (size - ((line + 1) * (stride))), stride);
    }
  } else {
    memcpy (GST_BUFFER_DATA (buf), buffer, size);
  }

  GST_DEBUG ("push_buffer => pts %" GST_TIME_FORMAT "duration %"
      GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
      GST_TIME_ARGS (duration));

  /* the negotiate() method already set caps on the source pad */
  gst_buffer_set_caps (buf, GST_PAD_CAPS (GST_BASE_SRC_PAD (src)));

  g_mutex_lock (src->buffer_mutex);
  if (src->buffer != NULL)
    gst_buffer_unref (src->buffer);
  src->buffer = buf;
  g_cond_signal (src->buffer_cond);
  g_mutex_unlock (src->buffer_mutex);

  return TRUE;
}
Beispiel #6
0
GstClockTime ofxGstRTPServer::getTimeStamp(){
	if(!gst.isLoaded()) return GST_CLOCK_TIME_NONE;
	GstClock * clock = gst_pipeline_get_clock(GST_PIPELINE(gst.getPipeline()));

	gst_object_ref(clock);
	GstClockTime now = gst_clock_get_time (clock) - gst_element_get_base_time(gst.getPipeline());
	gst_object_unref (clock);
	return now;
}
Beispiel #7
0
static gboolean
pause_play_stream (GstElement * bin, gint seconds)
{
  gboolean punch_in;
  GstStateChangeReturn ret;
  GstClockTime now, base_time, running_time;

  /* get current running time, we need this value to continue playback of
   * non-live pipelines. */
  now = gst_clock_get_time (theclock);
  base_time = gst_element_get_base_time (bin);

  running_time = now - base_time;

  /* set the new bin to PAUSED, the parent bin will notice (because of the ASYNC
   * message and will perform latency calculations again when going to PLAYING
   * later. */
  ret = gst_element_set_state (bin, GST_STATE_PAUSED);

  switch (ret) {
    case GST_STATE_CHANGE_NO_PREROLL:
      /* live source, timestamps are running_time of the pipeline clock. */
      punch_in = FALSE;
      break;
    case GST_STATE_CHANGE_SUCCESS:
      /* success, no async state changes, same as async, timestamps start
       * from 0 */
    case GST_STATE_CHANGE_ASYNC:
      /* no live source, bin will preroll. We have to punch it in because in
       * this situation timestamps start from 0.  */
      punch_in = TRUE;
      break;
    default:
    case GST_STATE_CHANGE_FAILURE:
      return FALSE;
  }

  if (seconds)
    g_usleep (seconds * G_USEC_PER_SEC);

  if (punch_in) {
    /* new bin has to be aligned with previous running_time. We do this by taking
     * the current absolute clock time and calculating the base time that would
     * give the previous running_time. We set this base_time on the bin before
     * setting it to PLAYING. */
    now = gst_clock_get_time (theclock);
    base_time = now - running_time;

    gst_element_set_base_time (bin, base_time);
  }

  /* now set the pipeline to PLAYING */
  gst_element_set_state (bin, GST_STATE_PLAYING);

  return TRUE;
}
/* spice_gst_decoder_queue_frame() queues the SpiceFrame for decoding and
 * displaying. The steps it goes through are as follows:
 *
 * 1) A SpiceGstFrame is created to keep track of SpiceFrame and some additional
 *    metadata. The SpiceGstFrame is then pushed to the decoding_queue.
 * 2) frame->data, which contains the compressed frame data, is reffed and
 *    wrapped in a GstBuffer which is pushed to the GStreamer pipeline for
 *    decoding.
 * 3) As soon as the GStreamer pipeline no longer needs the compressed frame it
 *    will call frame->unref_data() to free it.
 * 4) Once the decompressed frame is available the GStreamer pipeline calls
 *    new_sample() in the GStreamer thread.
 * 5) new_sample() then matches the decompressed frame to a SpiceGstFrame from
 *    the decoding queue using the GStreamer timestamp information to deal with
 *    dropped frames. The SpiceGstFrame is popped from the decoding_queue.
 * 6) new_sample() then attaches the decompressed frame to the SpiceGstFrame,
 *    pushes it to the display_queue and calls schedule_frame().
 * 7) schedule_frame() then uses gstframe->frame->mm_time to arrange for
 *    display_frame() to be called, in the main thread, at the right time for
 *    the next frame.
 * 8) display_frame() pops the first SpiceGstFrame from the display_queue and
 *    calls stream_display_frame().
 * 9) display_frame() then frees the SpiceGstFrame, which frees the SpiceFrame
 *    and decompressed frame with it.
 */
static gboolean spice_gst_decoder_queue_frame(VideoDecoder *video_decoder,
                                              SpiceFrame *frame, int latency)
{
    SpiceGstDecoder *decoder = (SpiceGstDecoder*)video_decoder;

    if (frame->size == 0) {
        SPICE_DEBUG("got an empty frame buffer!");
        frame->free(frame);
        return TRUE;
    }

    if (frame->mm_time < decoder->last_mm_time) {
        SPICE_DEBUG("new-frame-time < last-frame-time (%u < %u):"
                    " resetting stream",
                    frame->mm_time, decoder->last_mm_time);
        /* Let GStreamer deal with the frame anyway */
    }
    decoder->last_mm_time = frame->mm_time;

    if (latency < 0 &&
        decoder->base.codec_type == SPICE_VIDEO_CODEC_TYPE_MJPEG) {
        /* Dropping MJPEG frames has no impact on those that follow and
         * saves CPU so do it.
         */
        SPICE_DEBUG("dropping a late MJPEG frame");
        frame->free(frame);
        return TRUE;
    }

    if (decoder->pipeline == NULL) {
        /* An error occurred, causing the GStreamer pipeline to be freed */
        spice_warning("An error occurred, stopping the video stream");
        return FALSE;
    }

    /* ref() the frame data for the buffer */
    frame->ref_data(frame->data_opaque);
    GstBuffer *buffer = gst_buffer_new_wrapped_full(GST_MEMORY_FLAG_PHYSICALLY_CONTIGUOUS,
                                                    frame->data, frame->size, 0, frame->size,
                                                    frame->data_opaque, frame->unref_data);

    GST_BUFFER_DURATION(buffer) = GST_CLOCK_TIME_NONE;
    GST_BUFFER_DTS(buffer) = GST_CLOCK_TIME_NONE;
    GST_BUFFER_PTS(buffer) = gst_clock_get_time(decoder->clock) - gst_element_get_base_time(decoder->pipeline) + ((uint64_t)MAX(0, latency)) * 1000 * 1000;

    g_mutex_lock(&decoder->queues_mutex);
    g_queue_push_tail(decoder->decoding_queue, create_gst_frame(buffer, frame));
    g_mutex_unlock(&decoder->queues_mutex);

    if (gst_app_src_push_buffer(decoder->appsrc, buffer) != GST_FLOW_OK) {
        SPICE_DEBUG("GStreamer error: unable to push frame of size %u", frame->size);
        stream_dropped_frame_on_playback(decoder->base.stream);
    }
    return TRUE;
}
static void
gst_wrapper_camera_bin_reset_video_src_caps (GstWrapperCameraBinSrc * self,
    GstCaps * caps)
{
  GstClock *clock;
  gint64 base_time;

  GST_DEBUG_OBJECT (self, "Resetting src caps to %" GST_PTR_FORMAT, caps);
  if (self->src_vid_src) {
    clock = gst_element_get_clock (self->src_vid_src);
    base_time = gst_element_get_base_time (self->src_vid_src);

    gst_element_set_state (self->src_vid_src, GST_STATE_READY);
    set_capsfilter_caps (self, caps);

    self->drop_newseg = TRUE;

    GST_DEBUG_OBJECT (self, "Bringing source up");
    gst_element_sync_state_with_parent (self->src_vid_src);

    if (clock) {
      gst_element_set_clock (self->src_vid_src, clock);
      gst_element_set_base_time (self->src_vid_src, base_time);

      if (GST_IS_BIN (self->src_vid_src)) {
        GstIterator *it =
            gst_bin_iterate_elements (GST_BIN (self->src_vid_src));
        gpointer item = NULL;
        gboolean done = FALSE;
        while (!done) {
          switch (gst_iterator_next (it, &item)) {
            case GST_ITERATOR_OK:
              gst_element_set_base_time (GST_ELEMENT (item), base_time);
              gst_object_unref (item);
              break;
            case GST_ITERATOR_RESYNC:
              gst_iterator_resync (it);
              break;
            case GST_ITERATOR_ERROR:
              done = TRUE;
              break;
            case GST_ITERATOR_DONE:
              done = TRUE;
              break;
          }
        }
        gst_iterator_free (it);
      }

      gst_object_unref (clock);
    }
  }
}
static GstStateChangeReturn
gst_decklink_video_sink_stop_scheduled_playback (GstDecklinkVideoSink * self)
{
  GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
  GstClockTime start_time;
  HRESULT res;
  GstClock *clock;

  if (!self->output->started)
    return ret;

  clock = gst_element_get_clock (GST_ELEMENT_CAST (self));
  if (clock) {
    // FIXME: start time is the same for the complete pipeline,
    // but what we need here is the start time of this element!
    start_time = gst_element_get_base_time (GST_ELEMENT (self));
    if (start_time != GST_CLOCK_TIME_NONE)
      start_time = gst_clock_get_time (clock) - start_time;

    // FIXME: This will probably not work
    if (start_time == GST_CLOCK_TIME_NONE)
      start_time = 0;

    convert_to_internal_clock (self, &start_time, NULL);

    // The start time is now the running time when we stopped
    // playback

    gst_object_unref (clock);
  } else {
    GST_WARNING_OBJECT (self,
        "No clock, stopping scheduled playback immediately");
    start_time = 0;
  }

  GST_DEBUG_OBJECT (self,
      "Stopping scheduled playback at %" GST_TIME_FORMAT,
      GST_TIME_ARGS (start_time));

  g_mutex_lock (&self->output->lock);
  self->output->started = FALSE;
  g_mutex_unlock (&self->output->lock);
  res = self->output->output->StopScheduledPlayback (start_time, 0, GST_SECOND);
  if (res != S_OK) {
    GST_ELEMENT_ERROR (self, STREAM, FAILED,
        (NULL), ("Failed to stop scheduled playback: 0x%08x", res));
    ret = GST_STATE_CHANGE_FAILURE;
  }
  self->internal_base_time = GST_CLOCK_TIME_NONE;
  self->external_base_time = GST_CLOCK_TIME_NONE;

  return ret;
}
Beispiel #11
0
static void on_new_decoded_pad(GstElement *introdec, 
                               GstPad *srcpad,
                               gpointer data)
{  
   GstPadLinkReturn result;
   GstPad *sinkpad;
   GstCaps *new_pad_caps;

   CustomData *cdata=(CustomData*)data;
   GstElement *introbin=cdata->introbin;

   new_pad_caps=gst_pad_query_caps(srcpad,NULL);
   g_print("Caps:%s\n",gst_caps_to_string(new_pad_caps));

   /* Setup src pad offset, sync with pipeline. */
   gint64 pos2;
   pos2=gst_element_get_base_time(cdata->pipeline);
   GstClock *clock;
   clock=gst_pipeline_get_clock(GST_PIPELINE(cdata->pipeline));
   GstClockTime clock_time;
   clock_time=gst_clock_get_time(clock);
   gst_object_unref(clock);
//   g_print("Pipeline times: base_time=%lld\n clock_time=%lld\n",
//		            pos2,clock_time);
   gst_pad_set_offset(srcpad,clock_time-pos2);
   cdata->introbin_offset=clock_time-pos2;

   if(strncmp(gst_caps_to_string(new_pad_caps),"video",5)==0) {
       GstElement *vqueue;
       vqueue=gst_bin_get_by_name(GST_BIN(introbin),"introscale");
       sinkpad=gst_element_get_static_pad(vqueue,"sink");
       result=gst_pad_link(srcpad,sinkpad);
       if(result!=GST_PAD_LINK_OK) {
          g_printerr("Couldn't link introbin decodebin video pad...\n");
       }
       gst_object_unref(vqueue);
   }
   if(strncmp(gst_caps_to_string(new_pad_caps),"audio",5)==0) {
       GstElement *arate;
       arate=gst_bin_get_by_name(GST_BIN(introbin),"introaudiorate");
       sinkpad=gst_element_get_static_pad(arate,"sink");
       result=gst_pad_link(srcpad,sinkpad);
       if(result!=GST_PAD_LINK_OK) {
          GstCaps *peer_caps;
	  peer_caps=gst_pad_query_caps(sinkpad,NULL);
	  g_print("SinkCaps:%s\n",gst_caps_to_string(peer_caps));
          g_printerr("Couldn't link introbin decodebin audio pad...\n");
	  gst_caps_unref(peer_caps);
       }
       gst_object_unref(arate);
   }
}
Beispiel #12
0
static gboolean 
state_change(GstElement *newbin, guint seconds)
{
    gboolean kick;
    GstStateChangeReturn chret;
    GstClockTime base;
    GstClockTime now;
    GstClockTime offset;

    now = gst_clock_get_time(pipeclock);
    base = gst_element_get_base_time(newbin);
    offset = now -base;

    /* change state and evaluate return */
    chret = gst_element_set_state(newbin, GST_STATE_PAUSED);

    switch (chret) {
        case GST_STATE_CHANGE_NO_PREROLL:
            /* live source, timestamps are running_time of the
             * pipeline clock. */
            g_printf("no preroll\n");
            kick = FALSE;
            break;
        case GST_STATE_CHANGE_SUCCESS:
            /* success, no async state changes,
             * same as async, timestamps start
             *        * from 0 */
            g_printf("changed immediately\n");
        case GST_STATE_CHANGE_ASYNC:
            /* no live source, bin will preroll.
             * We have to punch it in because in
             * this situation timestamps
             *        start from 0.  */
            g_printf("will change async\n");
            kick = TRUE;
            break;
        default:
        case GST_STATE_CHANGE_FAILURE:
            return FALSE;
    }

    if(seconds)
        g_usleep(seconds * G_USEC_PER_SEC);

    if(kick){
        now = gst_clock_get_time(pipeclock);
        base = now - offset;

        gst_element_set_base_time(newbin, base);
    }
}
Beispiel #13
0
void ofxGstRTPServer::emitDepthKeyFrame(){
	GstClock * clock = gst_pipeline_get_clock(GST_PIPELINE(gst.getPipeline()));
	gst_object_ref(clock);
	GstClockTime time = gst_clock_get_time (clock);
	GstClockTime now =  time - gst_element_get_base_time(gst.getPipeline());
	gst_object_unref (clock);
	GstEvent * keyFrameEvent = gst_video_event_new_downstream_force_key_unit(now,
															 time,
															 now,
															 TRUE,
															 0);
	gst_element_send_event(appSrcDepth,keyFrameEvent);

}
Beispiel #14
0
static GstFlowReturn gst_imx_v4l2src_fill(GstPushSrc *src, GstBuffer *buf)
{
	GstImxV4l2VideoSrc *v4l2src = GST_IMX_V4L2SRC(src);
	GstClockTime ts;

	GST_LOG_OBJECT(v4l2src, "fill");

	ts = gst_clock_get_time(GST_ELEMENT(v4l2src)->clock);
	if (ts != GST_CLOCK_TIME_NONE)
		ts -= gst_element_get_base_time(GST_ELEMENT(v4l2src));
	else
		ts = v4l2src->count * v4l2src->time_per_frame;
	v4l2src->count++;

	GST_BUFFER_TIMESTAMP(buf) = ts;
	GST_BUFFER_DURATION(buf) = v4l2src->time_per_frame;
	return GST_FLOW_OK;
}
/* receive spectral data from element message */
static gboolean
message_handler (GstBus * bus, GstMessage * message, gpointer data)
{
  if (message->type == GST_MESSAGE_ELEMENT) {
    const GstStructure *s = gst_message_get_structure (message);
    const gchar *name = gst_structure_get_name (s);

    if (strcmp (name, "spectrum") == 0) {
      GstElement *spectrum = GST_ELEMENT (GST_MESSAGE_SRC (message));
      GstClockTime timestamp, duration;
      GstClockTime waittime = GST_CLOCK_TIME_NONE;

      if (gst_structure_get_clock_time (s, "running-time", &timestamp) &&
          gst_structure_get_clock_time (s, "duration", &duration)) {
        /* wait for middle of buffer */
        waittime = timestamp + duration / 2;
      } else if (gst_structure_get_clock_time (s, "endtime", &timestamp)) {
        waittime = timestamp;
      }
      if (GST_CLOCK_TIME_IS_VALID (waittime)) {
        GstClockID clock_id;
        GstClockTime basetime = gst_element_get_base_time (spectrum);
        gfloat *spect = g_new (gfloat, spect_bands);
        const GValue *list;
        const GValue *value;
        guint i;

        list = gst_structure_get_value (s, "magnitude");
        for (i = 0; i < spect_bands; ++i) {
          value = gst_value_list_get_value (list, i);
          spect[i] = height_scale * g_value_get_float (value);
        }

        clock_id =
            gst_clock_new_single_shot_id (sync_clock, waittime + basetime);
        gst_clock_id_wait_async (clock_id, delayed_spectrum_update,
            (gpointer) spect);
        gst_clock_id_unref (clock_id);
      }
    }
  }
  return TRUE;
}
/* called when we need to give data to appsrc */
static void
need_data (GstElement * appsrc, guint unused, MyContext * ctx)
{
  GstBuffer *buffer;
  guint size;
  GstFlowReturn ret;


  size = 385 * 288 * 2;

  buffer = gst_buffer_new_allocate (NULL, size, NULL);

  /* this makes the image black/white */
  gst_buffer_memset (buffer, 0, ctx->white ? 0xff : 0x0, size);

  ctx->white = !ctx->white;

  /* get timestamp from clock */
  //ctx->timestamp = gst_clock_get_time (global_clock);
  
  g_print("Time is: %"G_GUINT64_FORMAT" \n", ctx->timestamp);
  
  //remove basetime
  //ctx->timestamp -= gst_element_get_base_time (GST_ELEMENT (appsrc));
  
  
  GST_BUFFER_PTS (buffer) = ctx->timestamp;
  g_print("PTS: %"G_GUINT64_FORMAT" BASETIME %"G_GUINT64_FORMAT" SUM %"G_GUINT64_FORMAT " \n", ctx->timestamp, gst_element_get_base_time(appsrc),
            ( ctx->timestamp) + (  gst_element_get_base_time(appsrc)));

  
  
  GST_BUFFER_DURATION (buffer) = gst_util_uint64_scale_int (1, GST_SECOND, 2);
  
  ctx->timestamp += GST_BUFFER_DURATION (buffer);
  

  g_signal_emit_by_name (appsrc, "push-buffer", buffer, &ret);
}
Beispiel #17
0
gboolean introbin_set_pad_offset(CustomData *data)
{
  gint64 pos2;
  pos2=gst_element_get_base_time(data->pipeline);
  GstClock *clock;
  clock=gst_pipeline_get_clock(GST_PIPELINE(data->pipeline));
  GstClockTime clock_time;
  clock_time=gst_clock_get_time(clock);
  gst_object_unref(clock);
  g_print("Pipeline times: base_time=%lld\n clock_time=%lld",
		            pos2,clock_time);
  GstElement *dec=gst_bin_get_by_name(GST_BIN(data->introbin),"introdec");
  GstPad *src_pad1,*src_pad2;
  src_pad1=gst_element_get_static_pad(GST_ELEMENT(dec),"src_0");
  gst_pad_set_offset(src_pad1,clock_time-pos2);
  gst_object_unref(src_pad1);
  src_pad2=gst_element_get_static_pad(GST_ELEMENT(dec),"src_1");
  gst_pad_set_offset(src_pad2,clock_time-pos2);
  gst_object_unref(src_pad2);

  return TRUE;
}
Beispiel #18
0
static void
gst_dtmf_prepare_timestamps (GstDTMFSrc * dtmfsrc)
{
  GstClock *clock;
  GstClockTime base_time;

  base_time = gst_element_get_base_time (GST_ELEMENT (dtmfsrc));

  clock = gst_element_get_clock (GST_ELEMENT (dtmfsrc));
  if (clock != NULL) {
#ifdef MAEMO_BROKEN
    dtmfsrc->timestamp = gst_clock_get_time (clock);
#else
    dtmfsrc->timestamp = gst_clock_get_time (clock) - base_time;
#endif
    gst_object_unref (clock);
  } else {
    gchar *dtmf_name = gst_element_get_name (dtmfsrc);
    GST_ERROR_OBJECT (dtmfsrc, "No clock set for element %s", dtmf_name);
    dtmfsrc->timestamp = GST_CLOCK_TIME_NONE;
    g_free (dtmf_name);
  }
}
static GstFlowReturn
gst_rtp_dtmf_src_create (GstBaseSrc * basesrc, guint64 offset,
    guint length, GstBuffer ** buffer)
{
  GstRTPDTMFSrcEvent *event;
  GstRTPDTMFSrc *dtmfsrc;
  GstClock *clock;
  GstClockID *clockid;
  GstClockReturn clockret;
  GstMessage *message;
  GQueue messages = G_QUEUE_INIT;

  dtmfsrc = GST_RTP_DTMF_SRC (basesrc);

  do {

    if (dtmfsrc->payload == NULL) {
      GST_DEBUG_OBJECT (dtmfsrc, "popping");
      event = g_async_queue_pop (dtmfsrc->event_queue);

      GST_DEBUG_OBJECT (dtmfsrc, "popped %d", event->event_type);

      switch (event->event_type) {
        case RTP_DTMF_EVENT_TYPE_STOP:
          GST_WARNING_OBJECT (dtmfsrc,
              "Received a DTMF stop event when already stopped");
          gst_dtmf_src_post_message (dtmfsrc, "dtmf-event-dropped", event);
          break;

        case RTP_DTMF_EVENT_TYPE_START:
          dtmfsrc->first_packet = TRUE;
          dtmfsrc->last_packet = FALSE;
          /* Set the redundancy on the first packet */
          dtmfsrc->redundancy_count = dtmfsrc->packet_redundancy;
          if (!gst_rtp_dtmf_prepare_timestamps (dtmfsrc))
            goto no_clock;

          g_queue_push_tail (&messages,
              gst_dtmf_src_prepare_message (dtmfsrc, "dtmf-event-processed",
                  event));
          dtmfsrc->payload = event->payload;
          dtmfsrc->payload->duration =
              dtmfsrc->ptime * dtmfsrc->clock_rate / 1000;
          event->payload = NULL;
          break;

        case RTP_DTMF_EVENT_TYPE_PAUSE_TASK:
          /*
           * We're pushing it back because it has to stay in there until
           * the task is really paused (and the queue will then be flushed
           */
          GST_OBJECT_LOCK (dtmfsrc);
          if (dtmfsrc->paused) {
            g_async_queue_push (dtmfsrc->event_queue, event);
            goto paused_locked;
          }
          GST_OBJECT_UNLOCK (dtmfsrc);
          break;
      }

      gst_rtp_dtmf_src_event_free (event);
    } else if (!dtmfsrc->first_packet && !dtmfsrc->last_packet &&
        (dtmfsrc->timestamp - dtmfsrc->start_timestamp) / GST_MSECOND >=
        MIN_PULSE_DURATION) {
      GST_DEBUG_OBJECT (dtmfsrc, "try popping");
      event = g_async_queue_try_pop (dtmfsrc->event_queue);


      if (event != NULL) {
        GST_DEBUG_OBJECT (dtmfsrc, "try popped %d", event->event_type);

        switch (event->event_type) {
          case RTP_DTMF_EVENT_TYPE_START:
            GST_WARNING_OBJECT (dtmfsrc,
                "Received two consecutive DTMF start events");
            gst_dtmf_src_post_message (dtmfsrc, "dtmf-event-dropped", event);
            break;

          case RTP_DTMF_EVENT_TYPE_STOP:
            dtmfsrc->first_packet = FALSE;
            dtmfsrc->last_packet = TRUE;
            /* Set the redundancy on the last packet */
            dtmfsrc->redundancy_count = dtmfsrc->packet_redundancy;
            g_queue_push_tail (&messages,
                gst_dtmf_src_prepare_message (dtmfsrc, "dtmf-event-processed",
                    event));
            break;

          case RTP_DTMF_EVENT_TYPE_PAUSE_TASK:
            /*
             * We're pushing it back because it has to stay in there until
             * the task is really paused (and the queue will then be flushed)
             */
            GST_DEBUG_OBJECT (dtmfsrc, "pushing pause_task...");
            GST_OBJECT_LOCK (dtmfsrc);
            if (dtmfsrc->paused) {
              g_async_queue_push (dtmfsrc->event_queue, event);
              goto paused_locked;
            }
            GST_OBJECT_UNLOCK (dtmfsrc);
            break;
        }
        gst_rtp_dtmf_src_event_free (event);
      }
    }
  } while (dtmfsrc->payload == NULL);


  GST_DEBUG_OBJECT (dtmfsrc, "Processed events, now lets wait on the clock");

  clock = gst_element_get_clock (GST_ELEMENT (basesrc));
  if (!clock)
    goto no_clock;
  clockid = gst_clock_new_single_shot_id (clock, dtmfsrc->timestamp +
      gst_element_get_base_time (GST_ELEMENT (dtmfsrc)));
  gst_object_unref (clock);

  GST_OBJECT_LOCK (dtmfsrc);
  if (!dtmfsrc->paused) {
    dtmfsrc->clockid = clockid;
    GST_OBJECT_UNLOCK (dtmfsrc);

    clockret = gst_clock_id_wait (clockid, NULL);

    GST_OBJECT_LOCK (dtmfsrc);
    if (dtmfsrc->paused)
      clockret = GST_CLOCK_UNSCHEDULED;
  } else {
    clockret = GST_CLOCK_UNSCHEDULED;
  }
  gst_clock_id_unref (clockid);
  dtmfsrc->clockid = NULL;
  GST_OBJECT_UNLOCK (dtmfsrc);

  while ((message = g_queue_pop_head (&messages)) != NULL)
    gst_element_post_message (GST_ELEMENT (dtmfsrc), message);

  if (clockret == GST_CLOCK_UNSCHEDULED) {
    goto paused;
  }

send_last:

  if (dtmfsrc->dirty)
    if (!gst_rtp_dtmf_src_negotiate (basesrc))
      return GST_FLOW_NOT_NEGOTIATED;

  /* create buffer to hold the payload */
  *buffer = gst_rtp_dtmf_src_create_next_rtp_packet (dtmfsrc);

  if (dtmfsrc->redundancy_count)
    dtmfsrc->redundancy_count--;

  /* Only the very first one has a marker */
  dtmfsrc->first_packet = FALSE;

  /* This is the end of the event */
  if (dtmfsrc->last_packet == TRUE && dtmfsrc->redundancy_count == 0) {

    g_slice_free (GstRTPDTMFPayload, dtmfsrc->payload);
    dtmfsrc->payload = NULL;

    dtmfsrc->last_packet = FALSE;
  }

  return GST_FLOW_OK;

paused_locked:

  GST_OBJECT_UNLOCK (dtmfsrc);

paused:

  if (dtmfsrc->payload) {
    dtmfsrc->first_packet = FALSE;
    dtmfsrc->last_packet = TRUE;
    /* Set the redundanc on the last packet */
    dtmfsrc->redundancy_count = dtmfsrc->packet_redundancy;
    goto send_last;
  } else {
    return GST_FLOW_FLUSHING;
  }

no_clock:
  GST_ELEMENT_ERROR (dtmfsrc, STREAM, MUX, ("No available clock"),
      ("No available clock"));
  gst_pad_pause_task (GST_BASE_SRC_PAD (dtmfsrc));
  return GST_FLOW_ERROR;
}
static void
gst_decklink_video_sink_start_scheduled_playback (GstElement * element)
{
  GstDecklinkVideoSink *self = GST_DECKLINK_VIDEO_SINK_CAST (element);
  GstClockTime start_time;
  HRESULT res;
  bool active;

  if (self->output->video_enabled && (!self->output->audiosink
          || self->output->audio_enabled)
      && (GST_STATE (self) == GST_STATE_PLAYING
          || GST_STATE_PENDING (self) == GST_STATE_PLAYING)) {
    GstClock *clock = NULL;

    clock = gst_element_get_clock (element);
    if (!clock) {
      GST_ELEMENT_ERROR (self, STREAM, FAILED, (NULL),
          ("Scheduled playback supposed to start but we have no clock"));
      return;
    }
    // Need to unlock to get the clock time
    g_mutex_unlock (&self->output->lock);

    // FIXME: start time is the same for the complete pipeline,
    // but what we need here is the start time of this element!
    start_time = gst_element_get_base_time (element);
    if (start_time != GST_CLOCK_TIME_NONE)
      start_time = gst_clock_get_time (clock) - start_time;

    // FIXME: This will probably not work
    if (start_time == GST_CLOCK_TIME_NONE)
      start_time = 0;

    // Current times of internal and external clock when we go to
    // playing. We need this to convert the pipeline running time
    // to the running time of the hardware
    //
    // We can't use the normal base time for the external clock
    // because we might go to PLAYING later than the pipeline
    self->internal_base_time =
        gst_clock_get_internal_time (self->output->clock);
    self->external_base_time = gst_clock_get_internal_time (clock);

    convert_to_internal_clock (self, &start_time, NULL);

    g_mutex_lock (&self->output->lock);
    // Check if someone else started in the meantime
    if (self->output->started) {
      gst_object_unref (clock);
      return;
    }

    active = false;
    self->output->output->IsScheduledPlaybackRunning (&active);
    if (active) {
      GST_DEBUG_OBJECT (self, "Stopping scheduled playback");

      self->output->started = FALSE;

      res = self->output->output->StopScheduledPlayback (0, 0, 0);
      if (res != S_OK) {
        GST_ELEMENT_ERROR (self, STREAM, FAILED,
            (NULL), ("Failed to stop scheduled playback: 0x%08x", res));
        gst_object_unref (clock);
        return;
      }
    }

    GST_DEBUG_OBJECT (self,
        "Starting scheduled playback at %" GST_TIME_FORMAT,
        GST_TIME_ARGS (start_time));

    res =
        self->output->output->StartScheduledPlayback (start_time,
        GST_SECOND, 1.0);
    if (res != S_OK) {
      GST_ELEMENT_ERROR (self, STREAM, FAILED,
          (NULL), ("Failed to start scheduled playback: 0x%08x", res));
      gst_object_unref (clock);
      return;
    }

    self->output->started = TRUE;
    self->output->clock_restart = TRUE;

    // Need to unlock to get the clock time
    g_mutex_unlock (&self->output->lock);

    // Sample the clocks again to get the most accurate values
    // after we started scheduled playback
    self->internal_base_time =
        gst_clock_get_internal_time (self->output->clock);
    self->external_base_time = gst_clock_get_internal_time (clock);
    g_mutex_lock (&self->output->lock);
    gst_object_unref (clock);
  } else {
    GST_DEBUG_OBJECT (self, "Not starting scheduled playback yet");
  }
}
static void
convert_to_internal_clock (GstDecklinkVideoSink * self,
    GstClockTime * timestamp, GstClockTime * duration)
{
  GstClock *clock, *audio_clock;

  g_assert (timestamp != NULL);

  clock = gst_element_get_clock (GST_ELEMENT_CAST (self));
  audio_clock = gst_decklink_output_get_audio_clock (self->output);
  if (clock && clock != self->output->clock && clock != audio_clock) {
    GstClockTime internal, external, rate_n, rate_d;
    gst_clock_get_calibration (self->output->clock, &internal, &external,
        &rate_n, &rate_d);

    if (self->internal_base_time != GST_CLOCK_TIME_NONE) {
      GstClockTime external_timestamp = *timestamp;
      GstClockTime base_time;

      // Convert to the running time corresponding to both clock times
      if (internal < self->internal_base_time)
        internal = 0;
      else
        internal -= self->internal_base_time;

      if (external < self->external_base_time)
        external = 0;
      else
        external -= self->external_base_time;

      // Convert timestamp to the "running time" since we started scheduled
      // playback, that is the difference between the pipeline's base time
      // and our own base time.
      base_time = gst_element_get_base_time (GST_ELEMENT_CAST (self));
      if (base_time > self->external_base_time)
        base_time = 0;
      else
        base_time = self->external_base_time - base_time;

      if (external_timestamp < base_time)
        external_timestamp = 0;
      else
        external_timestamp = external_timestamp - base_time;

      // Get the difference in the external time, note
      // that the running time is external time.
      // Then scale this difference and offset it to
      // our internal time. Now we have the running time
      // according to our internal clock.
      //
      // For the duration we just scale
      *timestamp =
          gst_clock_unadjust_with_calibration (NULL, external_timestamp,
          internal, external, rate_n, rate_d);

      GST_LOG_OBJECT (self,
          "Converted %" GST_TIME_FORMAT " to %" GST_TIME_FORMAT " (internal: %"
          GST_TIME_FORMAT " external %" GST_TIME_FORMAT " rate: %lf)",
          GST_TIME_ARGS (external_timestamp), GST_TIME_ARGS (*timestamp),
          GST_TIME_ARGS (internal), GST_TIME_ARGS (external),
          ((gdouble) rate_n) / ((gdouble) rate_d));

      if (duration) {
        GstClockTime external_duration = *duration;

        *duration = gst_util_uint64_scale (external_duration, rate_d, rate_n);

        GST_LOG_OBJECT (self,
            "Converted duration %" GST_TIME_FORMAT " to %" GST_TIME_FORMAT
            " (internal: %" GST_TIME_FORMAT " external %" GST_TIME_FORMAT
            " rate: %lf)", GST_TIME_ARGS (external_duration),
            GST_TIME_ARGS (*duration), GST_TIME_ARGS (internal),
            GST_TIME_ARGS (external), ((gdouble) rate_n) / ((gdouble) rate_d));
      }
    } else {
      GST_LOG_OBJECT (self, "No clock conversion needed, not started yet");
    }
  } else {
    GST_LOG_OBJECT (self, "No clock conversion needed, same clocks");
  }
}
static gboolean
gst_dshowvideosrc_push_buffer (guint8 * buffer, guint size, gpointer src_object,
                               GstClockTime duration)
{
    GstDshowVideoSrc *src = GST_DSHOWVIDEOSRC (src_object);
    GstBuffer *buf = NULL;
    GstMapInfo map;
    IPin *pPin = NULL;
    HRESULT hres = S_FALSE;
    AM_MEDIA_TYPE *pMediaType = NULL;

    if (!buffer || size == 0 || !src) {
        return FALSE;
    }

    /* create a new buffer assign to it the clock time as timestamp */
    buf = gst_buffer_new_and_alloc (size);

    gst_buffer_set_size(buf, size);

    GstClock *clock = gst_element_get_clock (GST_ELEMENT (src));
    GST_BUFFER_PTS (buf) =
        GST_CLOCK_DIFF (gst_element_get_base_time (GST_ELEMENT (src)), gst_clock_get_time (clock));
    //GST_BUFFER_DTS(buf) = GST_BUFFER_PTS (buf);
    GST_BUFFER_DTS(buf) = GST_CLOCK_TIME_NONE;
    GST_BUFFER_OFFSET(buf) = src->offset++;
    GST_BUFFER_OFFSET_END(buf) = src->offset;
    GST_BUFFER_FLAG_SET(buf, GST_BUFFER_FLAG_LIVE);

    gst_object_unref (clock);

    GST_BUFFER_DURATION (buf) = duration;

    gst_buffer_map(buf, &map, GST_MAP_WRITE);

    if (src->is_rgb) {
        /* FOR RGB directshow decoder will return bottom-up BITMAP
        * There is probably a way to get top-bottom video frames from
        * the decoder...
        */
        gint line = 0;
        gint stride = size / src->height;

        for (; line < src->height; line++) {
            memcpy (map.data + (line * stride),
                    buffer + (size - ((line + 1) * (stride))), stride);
        }
    } else {
        memcpy (map.data, buffer, size);
    }

    gst_buffer_unmap(buf, &map);

    src->time += duration;
    gst_object_sync_values (GST_OBJECT (src), src->time);

    GST_DEBUG ("push_buffer => pts %" GST_TIME_FORMAT "duration %"
               GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
               GST_TIME_ARGS (duration));

    g_mutex_lock (&src->buffer_mutex);
    if (src->buffer != NULL)
        gst_buffer_unref (src->buffer);
    src->buffer = buf;
    g_cond_signal (&src->buffer_cond);
    g_mutex_unlock (&src->buffer_mutex);

    return TRUE;
}
static GstFlowReturn
gst_gdiscreencapsrc_create (GstPushSrc * push_src, GstBuffer ** buf)
{
  GstGDIScreenCapSrc *src = GST_GDISCREENCAPSRC (push_src);
  GstBuffer *new_buf;
  GstFlowReturn res;
  gint new_buf_size;
  GstClock *clock;
  GstClockTime time;
  GstClockTime base_time;

  if (G_UNLIKELY (!src->info.bmiHeader.biWidth ||
          !src->info.bmiHeader.biHeight)) {
    GST_ELEMENT_ERROR (src, CORE, NEGOTIATION, (NULL),
        ("format wasn't negotiated before create function"));
    return GST_FLOW_NOT_NEGOTIATED;
  } else if (G_UNLIKELY (src->rate_numerator == 0 && src->frames == 1)) {
    GST_DEBUG_OBJECT (src, "eos: 0 framerate, frame %d", (gint) src->frames);
    return GST_FLOW_UNEXPECTED;
  }

  new_buf_size = GST_ROUND_UP_4 (src->info.bmiHeader.biWidth * 3) *
      (-src->info.bmiHeader.biHeight);

  GST_LOG_OBJECT (src,
      "creating buffer of %lu bytes with %dx%d image for frame %d",
      new_buf_size, src->info.bmiHeader.biWidth,
      -src->info.bmiHeader.biHeight, (gint) src->frames);

  res =
      gst_pad_alloc_buffer_and_set_caps (GST_BASE_SRC_PAD (src),
      GST_BUFFER_OFFSET_NONE, new_buf_size,
      GST_PAD_CAPS (GST_BASE_SRC_PAD (push_src)), &new_buf);
  if (res != GST_FLOW_OK) {
    GST_DEBUG_OBJECT (src, "could not allocate buffer, reason %s",
        gst_flow_get_name (res));
    return res;
  }

  clock = gst_element_get_clock (GST_ELEMENT (src));
  if (clock) {
    /* Calculate sync time. */
    GstClockTime frame_time =
        gst_util_uint64_scale_int (src->frames * GST_SECOND,
        src->rate_denominator, src->rate_numerator);

    time = gst_clock_get_time (clock);
    base_time = gst_element_get_base_time (GST_ELEMENT (src));
    GST_BUFFER_TIMESTAMP (new_buf) = MAX (time - base_time, frame_time);
  } else {
    GST_BUFFER_TIMESTAMP (new_buf) = GST_CLOCK_TIME_NONE;
  }

  /* Do screen capture and put it into buffer... */
  gst_gdiscreencapsrc_screen_capture (src, new_buf);

  if (src->rate_numerator) {
    GST_BUFFER_DURATION (new_buf) =
        gst_util_uint64_scale_int (GST_SECOND,
        src->rate_denominator, src->rate_numerator);
    if (clock) {
      GST_BUFFER_DURATION (new_buf) =
          MAX (GST_BUFFER_DURATION (new_buf),
          gst_clock_get_time (clock) - time);
    }
  } else {
    /* NONE means forever */
    GST_BUFFER_DURATION (new_buf) = GST_CLOCK_TIME_NONE;
  }

  GST_BUFFER_OFFSET (new_buf) = src->frames;
  src->frames++;
  GST_BUFFER_OFFSET_END (new_buf) = src->frames;

  gst_object_unref (clock);

  *buf = new_buf;
  return GST_FLOW_OK;
}
Beispiel #24
0
static GstFlowReturn
gst_dtmf_src_create (GstBaseSrc * basesrc, guint64 offset,
    guint length, GstBuffer ** buffer)
{
  GstBuffer *buf = NULL;
  GstDTMFSrcEvent *event;
  GstDTMFSrc *dtmfsrc;
  GstClock *clock;
  GstClockID *clockid;
  GstClockReturn clockret;

  dtmfsrc = GST_DTMF_SRC (basesrc);

  do {

    if (dtmfsrc->last_event == NULL) {
      GST_DEBUG_OBJECT (dtmfsrc, "popping");
      event = g_async_queue_pop (dtmfsrc->event_queue);

      GST_DEBUG_OBJECT (dtmfsrc, "popped %d", event->event_type);

      switch (event->event_type) {
        case DTMF_EVENT_TYPE_STOP:
          GST_WARNING_OBJECT (dtmfsrc,
              "Received a DTMF stop event when already stopped");
          gst_dtmf_src_post_message (dtmfsrc, "dtmf-event-dropped", event);
          break;
        case DTMF_EVENT_TYPE_START:
          gst_dtmf_prepare_timestamps (dtmfsrc);

          event->packet_count = 0;
          dtmfsrc->last_event = event;
          event = NULL;
          gst_dtmf_src_post_message (dtmfsrc, "dtmf-event-processed",
              dtmfsrc->last_event);
          break;
        case DTMF_EVENT_TYPE_PAUSE_TASK:
          /*
           * We're pushing it back because it has to stay in there until
           * the task is really paused (and the queue will then be flushed)
           */
          GST_DEBUG_OBJECT (dtmfsrc, "pushing pause_task...");
          GST_OBJECT_LOCK (dtmfsrc);
          if (dtmfsrc->paused) {
            g_async_queue_push (dtmfsrc->event_queue, event);
            goto paused_locked;
          }
          GST_OBJECT_UNLOCK (dtmfsrc);
          break;
      }
      if (event)
        g_slice_free (GstDTMFSrcEvent, event);
    } else if (dtmfsrc->last_event->packet_count * dtmfsrc->interval >=
        MIN_DUTY_CYCLE) {
      event = g_async_queue_try_pop (dtmfsrc->event_queue);

      if (event != NULL) {

        switch (event->event_type) {
          case DTMF_EVENT_TYPE_START:
            GST_WARNING_OBJECT (dtmfsrc,
                "Received two consecutive DTMF start events");
            gst_dtmf_src_post_message (dtmfsrc, "dtmf-event-dropped", event);
            break;
          case DTMF_EVENT_TYPE_STOP:
            g_slice_free (GstDTMFSrcEvent, dtmfsrc->last_event);
            dtmfsrc->last_event = NULL;
            gst_dtmf_src_post_message (dtmfsrc, "dtmf-event-processed", event);
            break;
          case DTMF_EVENT_TYPE_PAUSE_TASK:
            /*
             * We're pushing it back because it has to stay in there until
             * the task is really paused (and the queue will then be flushed)
             */
            GST_DEBUG_OBJECT (dtmfsrc, "pushing pause_task...");

            GST_OBJECT_LOCK (dtmfsrc);
            if (dtmfsrc->paused) {
              g_async_queue_push (dtmfsrc->event_queue, event);
              goto paused_locked;
            }
            GST_OBJECT_UNLOCK (dtmfsrc);

            break;
        }
        g_slice_free (GstDTMFSrcEvent, event);
      }
    }
  } while (dtmfsrc->last_event == NULL);

  GST_LOG_OBJECT (dtmfsrc, "end event check, now wait for the proper time");

  clock = gst_element_get_clock (GST_ELEMENT (basesrc));

  clockid = gst_clock_new_single_shot_id (clock, dtmfsrc->timestamp +
      gst_element_get_base_time (GST_ELEMENT (dtmfsrc)));
  gst_object_unref (clock);

  GST_OBJECT_LOCK (dtmfsrc);
  if (!dtmfsrc->paused) {
    dtmfsrc->clockid = clockid;
    GST_OBJECT_UNLOCK (dtmfsrc);

    clockret = gst_clock_id_wait (clockid, NULL);

    GST_OBJECT_LOCK (dtmfsrc);
    if (dtmfsrc->paused)
      clockret = GST_CLOCK_UNSCHEDULED;
  } else {
    clockret = GST_CLOCK_UNSCHEDULED;
  }
  gst_clock_id_unref (clockid);
  dtmfsrc->clockid = NULL;
  GST_OBJECT_UNLOCK (dtmfsrc);

  if (clockret == GST_CLOCK_UNSCHEDULED) {
    goto paused;
  }

  buf = gst_dtmf_src_create_next_tone_packet (dtmfsrc, dtmfsrc->last_event);

  GST_LOG_OBJECT (dtmfsrc, "Created buffer of size %" G_GSIZE_FORMAT,
      gst_buffer_get_size (buf));
  *buffer = buf;

  return GST_FLOW_OK;

paused_locked:
  GST_OBJECT_UNLOCK (dtmfsrc);

paused:

  if (dtmfsrc->last_event) {
    GST_DEBUG_OBJECT (dtmfsrc, "Stopping current event");
    /* Don't forget to release the stream lock */
    g_slice_free (GstDTMFSrcEvent, dtmfsrc->last_event);
    dtmfsrc->last_event = NULL;
  }

  return GST_FLOW_FLUSHING;

}
static GstFlowReturn
new_sample_cb (GstElement * appsink, gpointer user_data)
{
  GstElement *appsrc = GST_ELEMENT (user_data);
  GstFlowReturn ret;
  GstSample *sample;
  GstBuffer *buffer;
  GstClockTime *base_time;
  GstPad *src, *sink;

  g_signal_emit_by_name (appsink, "pull-sample", &sample);

  if (sample == NULL)
    return GST_FLOW_OK;

  buffer = gst_sample_get_buffer (sample);

  if (buffer == NULL) {
    ret = GST_FLOW_OK;
    goto end;
  }

  gst_buffer_ref (buffer);

  buffer = gst_buffer_make_writable (buffer);

  KMS_ELEMENT_LOCK (GST_OBJECT_PARENT (appsrc));

  base_time =
      g_object_get_data (G_OBJECT (GST_OBJECT_PARENT (appsrc)), BASE_TIME_DATA);

  if (base_time == NULL) {
    GstClock *clock;

    clock = gst_element_get_clock (appsrc);
    base_time = g_slice_new0 (GstClockTime);

    g_object_set_data_full (G_OBJECT (GST_OBJECT_PARENT (appsrc)),
        BASE_TIME_DATA, base_time, release_gst_clock);
    *base_time =
        gst_clock_get_time (clock) - gst_element_get_base_time (appsrc);
    g_object_unref (clock);
    GST_DEBUG ("Setting base time to: %" G_GUINT64_FORMAT, *base_time);
  }

  src = gst_element_get_static_pad (appsrc, "src");
  sink = gst_pad_get_peer (src);

  if (sink != NULL) {
    if (GST_OBJECT_FLAG_IS_SET (sink, GST_PAD_FLAG_EOS)) {
      GST_INFO_OBJECT (sink, "Sending flush events");
      gst_pad_send_event (sink, gst_event_new_flush_start ());
      gst_pad_send_event (sink, gst_event_new_flush_stop (FALSE));
    }
    g_object_unref (sink);
  }

  g_object_unref (src);

  if (GST_BUFFER_PTS_IS_VALID (buffer))
    buffer->pts += *base_time;
  if (GST_BUFFER_DTS_IS_VALID (buffer))
    buffer->dts += *base_time;

  KMS_ELEMENT_UNLOCK (GST_OBJECT_PARENT (appsrc));

  // TODO: Do something to fix a possible previous EOS event
  g_signal_emit_by_name (appsrc, "push-buffer", buffer, &ret);

  gst_buffer_unref (buffer);

  if (ret != GST_FLOW_OK) {
    /* something wrong */
    GST_ERROR ("Could not send buffer to appsrc %s. Cause: %s",
        GST_ELEMENT_NAME (appsrc), gst_flow_get_name (ret));
  }

end:
  if (sample != NULL)
    gst_sample_unref (sample);

  return ret;
}
static GstFlowReturn
gst_fake_src_create (GstBaseSrc * basesrc, guint64 offset, guint length,
    GstBuffer ** ret)
{
  GstFakeSrc *src;
  GstBuffer *buf;
  GstClockTime time;
  gsize size;

  src = GST_FAKE_SRC (basesrc);

  buf = gst_fake_src_create_buffer (src, &size);
  GST_BUFFER_OFFSET (buf) = offset;

  if (src->datarate > 0) {
    time = (src->bytes_sent * GST_SECOND) / src->datarate;

    GST_BUFFER_DURATION (buf) = size * GST_SECOND / src->datarate;
  } else if (gst_base_src_is_live (basesrc)) {
    GstClock *clock;

    clock = gst_element_get_clock (GST_ELEMENT (src));

    if (clock) {
      time = gst_clock_get_time (clock);
      time -= gst_element_get_base_time (GST_ELEMENT (src));
      gst_object_unref (clock);
    } else {
      /* not an error not to have a clock */
      time = GST_CLOCK_TIME_NONE;
    }
  } else {
    time = GST_CLOCK_TIME_NONE;
  }

  GST_BUFFER_DTS (buf) = time;
  GST_BUFFER_PTS (buf) = time;

  if (!src->silent) {
    gchar dts_str[64], pts_str[64], dur_str[64];
    gchar *flag_str;

    GST_OBJECT_LOCK (src);
    g_free (src->last_message);

    if (GST_BUFFER_DTS (buf) != GST_CLOCK_TIME_NONE) {
      g_snprintf (dts_str, sizeof (dts_str), "%" GST_TIME_FORMAT,
          GST_TIME_ARGS (GST_BUFFER_DTS (buf)));
    } else {
      g_strlcpy (dts_str, "none", sizeof (dts_str));
    }
    if (GST_BUFFER_PTS (buf) != GST_CLOCK_TIME_NONE) {
      g_snprintf (pts_str, sizeof (pts_str), "%" GST_TIME_FORMAT,
          GST_TIME_ARGS (GST_BUFFER_PTS (buf)));
    } else {
      g_strlcpy (pts_str, "none", sizeof (pts_str));
    }
    if (GST_BUFFER_DURATION (buf) != GST_CLOCK_TIME_NONE) {
      g_snprintf (dur_str, sizeof (dur_str), "%" GST_TIME_FORMAT,
          GST_TIME_ARGS (GST_BUFFER_DURATION (buf)));
    } else {
      g_strlcpy (dur_str, "none", sizeof (dur_str));
    }

    flag_str = gst_buffer_get_flags_string (buf);
    src->last_message =
        g_strdup_printf ("create   ******* (%s:%s) (%u bytes, dts: %s, pts:%s"
        ", duration: %s, offset: %" G_GINT64_FORMAT ", offset_end: %"
        G_GINT64_FORMAT ", flags: %08x %s) %p",
        GST_DEBUG_PAD_NAME (GST_BASE_SRC_CAST (src)->srcpad), (guint) size,
        dts_str, pts_str, dur_str, GST_BUFFER_OFFSET (buf),
        GST_BUFFER_OFFSET_END (buf), GST_MINI_OBJECT_CAST (buf)->flags,
        flag_str, buf);
    g_free (flag_str);
    GST_OBJECT_UNLOCK (src);

    g_object_notify_by_pspec ((GObject *) src, pspec_last_message);
  }

  if (src->signal_handoffs) {
    GST_LOG_OBJECT (src, "pre handoff emit");
    g_signal_emit (src, gst_fake_src_signals[SIGNAL_HANDOFF], 0, buf,
        basesrc->srcpad);
    GST_LOG_OBJECT (src, "post handoff emit");
  }

  src->bytes_sent += size;

  *ret = buf;
  return GST_FLOW_OK;
}
Beispiel #27
0
static GstFlowReturn
gst_dasf_src_create (GstAudioSrc *audiosrc,
						guint64 offset,
						guint length,
						GstBuffer **buffer)
{
	GstDasfSrc* self = GST_DASF_SRC (audiosrc);
	GstBaseAudioSrc *baseaudiosrc = GST_BASE_AUDIO_SRC (self);
	GstBuffer* gst_buffer = NULL;
	OMX_BUFFERHEADERTYPE* omx_buffer = NULL;
	GstDasfSrcPrivate* priv = GST_DASF_SRC_GET_PRIVATE (self);
	GstGooAudioFilter* me = self->peer_element;

	GST_DEBUG ("");

	if (me->component->cur_state != OMX_StateExecuting)
	{
		return GST_FLOW_UNEXPECTED;
	}

	GST_DEBUG ("goo stuff");
	{
		omx_buffer = goo_port_grab_buffer (me->outport);

		if (gst_pad_alloc_buffer (GST_BASE_SRC_PAD (self),
					  priv->outcount,
					  omx_buffer->nFilledLen,
					  GST_PAD_CAPS (GST_BASE_SRC_PAD (self)),
					  &gst_buffer) == GST_FLOW_OK)
		{
			if (GST_IS_GOO_BUFFER (gst_buffer))
			{
				memcpy (GST_BUFFER_DATA (gst_buffer),
					omx_buffer->pBuffer,
					omx_buffer->nFilledLen);

				goo_component_release_buffer (me->component,
							      omx_buffer);
			}
			else
 			{
				gst_buffer_unref (gst_buffer);
				gst_buffer = (GstBuffer*) gst_goo_buffer_new ();
				gst_goo_buffer_set_data (gst_buffer,
							 me->component,
							 omx_buffer);
			}
		}
		else
		{
			goto fail;
		}
	}


	GST_DEBUG ("gst stuff");
	{
		GstClock* clock = NULL;
		GstClockTime timestamp, duration;
		clock = gst_element_get_clock (GST_ELEMENT (self));

		timestamp = gst_clock_get_time (clock);
		timestamp -= gst_element_get_base_time (GST_ELEMENT (self));
		gst_object_unref (clock);

		GST_BUFFER_TIMESTAMP (gst_buffer) = gst_util_uint64_scale_int (GST_SECOND, priv->outcount, 50);

		/* Set 20 millisecond duration */
		duration = gst_util_uint64_scale_int
			(GST_SECOND,
			 1,
			 50);

		GST_BUFFER_DURATION (gst_buffer) = duration;
		GST_BUFFER_OFFSET (gst_buffer) = priv->outcount++;
		GST_BUFFER_OFFSET_END (gst_buffer) = priv->outcount;

		gst_buffer_set_caps (gst_buffer,
		GST_PAD_CAPS (GST_BASE_SRC_PAD (self)));
	}

beach:
	*buffer = gst_buffer;
	return GST_FLOW_OK;

fail:
	if (G_LIKELY (*buffer))
	{
		gst_buffer_unref (*buffer);
	}

	return GST_FLOW_ERROR;
}
void
gst_decklink_video_src_convert_to_external_clock (GstDecklinkVideoSrc * self,
    GstClockTime * timestamp, GstClockTime * duration)
{
  GstClock *clock;

  g_assert (timestamp != NULL);

  if (*timestamp == GST_CLOCK_TIME_NONE)
    return;

  clock = gst_element_get_clock (GST_ELEMENT_CAST (self));
  if (clock && clock != self->input->clock) {
    GstClockTime internal, external, rate_n, rate_d;
    GstClockTimeDiff external_start_time_diff;

    gst_clock_get_calibration (self->input->clock, &internal, &external,
        &rate_n, &rate_d);

    if (rate_n != rate_d && self->internal_base_time != GST_CLOCK_TIME_NONE) {
      GstClockTime internal_timestamp = *timestamp;

      // Convert to the running time corresponding to both clock times
      internal -= self->internal_base_time;
      external -= self->external_base_time;

      // Get the difference in the internal time, note
      // that the capture time is internal time.
      // Then scale this difference and offset it to
      // our external time. Now we have the running time
      // according to our external clock.
      //
      // For the duration we just scale
      *timestamp =
          gst_clock_adjust_with_calibration (NULL, internal_timestamp, internal,
          external, rate_n, rate_d);

      GST_LOG_OBJECT (self,
          "Converted %" GST_TIME_FORMAT " to %" GST_TIME_FORMAT " (external: %"
          GST_TIME_FORMAT " internal %" GST_TIME_FORMAT " rate: %lf)",
          GST_TIME_ARGS (internal_timestamp), GST_TIME_ARGS (*timestamp),
          GST_TIME_ARGS (external), GST_TIME_ARGS (internal),
          ((gdouble) rate_n) / ((gdouble) rate_d));

      if (duration) {
        GstClockTime internal_duration = *duration;

        *duration = gst_util_uint64_scale (internal_duration, rate_d, rate_n);

        GST_LOG_OBJECT (self,
            "Converted duration %" GST_TIME_FORMAT " to %" GST_TIME_FORMAT
            " (external: %" GST_TIME_FORMAT " internal %" GST_TIME_FORMAT
            " rate: %lf)", GST_TIME_ARGS (internal_duration),
            GST_TIME_ARGS (*duration), GST_TIME_ARGS (external),
            GST_TIME_ARGS (internal), ((gdouble) rate_n) / ((gdouble) rate_d));
      }
    } else {
      GST_LOG_OBJECT (self, "No clock conversion needed, relative rate is 1.0");
    }

    // Add the diff between the external time when we
    // went to playing and the external time when the
    // pipeline went to playing. Otherwise we will
    // always start outputting from 0 instead of the
    // current running time.
    external_start_time_diff =
        gst_element_get_base_time (GST_ELEMENT_CAST (self));
    external_start_time_diff =
        self->external_base_time - external_start_time_diff;
    *timestamp += external_start_time_diff;
  } else {
    GST_LOG_OBJECT (self, "No clock conversion needed, same clocks");
  }
}
static GstFlowReturn
gst_gdiscreencapsrc_create (GstPushSrc * push_src, GstBuffer ** buf)
{
  GstGDIScreenCapSrc *src = GST_GDISCREENCAPSRC (push_src);
  GstBuffer *new_buf;
  gint new_buf_size;
  GstClock *clock;
  GstClockTime buf_time, buf_dur;
  guint64 frame_number;

  if (G_UNLIKELY (!src->info.bmiHeader.biWidth ||
          !src->info.bmiHeader.biHeight)) {
    GST_ELEMENT_ERROR (src, CORE, NEGOTIATION, (NULL),
        ("format wasn't negotiated before create function"));
    return GST_FLOW_NOT_NEGOTIATED;
  }

  new_buf_size = GST_ROUND_UP_4 (src->info.bmiHeader.biWidth * 3) *
      (-src->info.bmiHeader.biHeight);

  GST_LOG_OBJECT (src,
      "creating buffer of %d bytes with %dx%d image",
      new_buf_size, (gint) src->info.bmiHeader.biWidth,
      (gint) (-src->info.bmiHeader.biHeight));

  new_buf = gst_buffer_new_and_alloc (new_buf_size);

  clock = gst_element_get_clock (GST_ELEMENT (src));
  if (clock != NULL) {
    GstClockTime time, base_time;

    /* Calculate sync time. */

    time = gst_clock_get_time (clock);
    base_time = gst_element_get_base_time (GST_ELEMENT (src));
    buf_time = time - base_time;

    if (src->rate_numerator) {
      frame_number = gst_util_uint64_scale (buf_time,
          src->rate_numerator, GST_SECOND * src->rate_denominator);
    } else {
      frame_number = -1;
    }
  } else {
    buf_time = GST_CLOCK_TIME_NONE;
    frame_number = -1;
  }

  if (frame_number != -1 && frame_number == src->frame_number) {
    GstClockID id;
    GstClockReturn ret;

    /* Need to wait for the next frame */
    frame_number += 1;

    /* Figure out what the next frame time is */
    buf_time = gst_util_uint64_scale (frame_number,
        src->rate_denominator * GST_SECOND, src->rate_numerator);

    id = gst_clock_new_single_shot_id (clock,
        buf_time + gst_element_get_base_time (GST_ELEMENT (src)));
    GST_OBJECT_LOCK (src);
    src->clock_id = id;
    GST_OBJECT_UNLOCK (src);

    GST_DEBUG_OBJECT (src, "Waiting for next frame time %" G_GUINT64_FORMAT,
        buf_time);
    ret = gst_clock_id_wait (id, NULL);
    GST_OBJECT_LOCK (src);

    gst_clock_id_unref (id);
    src->clock_id = NULL;
    if (ret == GST_CLOCK_UNSCHEDULED) {
      /* Got woken up by the unlock function */
      GST_OBJECT_UNLOCK (src);
      return GST_FLOW_FLUSHING;
    }
    GST_OBJECT_UNLOCK (src);

    /* Duration is a complete 1/fps frame duration */
    buf_dur =
        gst_util_uint64_scale_int (GST_SECOND, src->rate_denominator,
        src->rate_numerator);
  } else if (frame_number != -1) {
    GstClockTime next_buf_time;

    GST_DEBUG_OBJECT (src, "No need to wait for next frame time %"
        G_GUINT64_FORMAT " next frame = %" G_GINT64_FORMAT " prev = %"
        G_GINT64_FORMAT, buf_time, frame_number, src->frame_number);
    next_buf_time = gst_util_uint64_scale (frame_number + 1,
        src->rate_denominator * GST_SECOND, src->rate_numerator);
    /* Frame duration is from now until the next expected capture time */
    buf_dur = next_buf_time - buf_time;
  } else {
    buf_dur = GST_CLOCK_TIME_NONE;
  }
  src->frame_number = frame_number;

  GST_BUFFER_TIMESTAMP (new_buf) = buf_time;
  GST_BUFFER_DURATION (new_buf) = buf_dur;

  /* Do screen capture and put it into buffer... */
  gst_gdiscreencapsrc_screen_capture (src, new_buf);

  gst_object_unref (clock);

  *buf = new_buf;
  return GST_FLOW_OK;
}
static GstFlowReturn
gst_genicamsrc_create (GstPushSrc * psrc, GstBuffer ** buf)
{
  GstGenicamSrc *src = GST_GENICAM_SRC (psrc);
  guint32 dropped_frames = 0;
  GstClock *clock;
  GstClockTime clock_time;

  GST_LOG_OBJECT (src, "create");

  *buf = gst_genicamsrc_get_buffer (src);
  if (!*buf) {
    return GST_FLOW_ERROR;
  }

  clock = gst_element_get_clock (GST_ELEMENT (src));
  clock_time = gst_clock_get_time (clock);
  gst_object_unref (clock);

  /* check for dropped frames and disrupted signal */
  //dropped_frames = (circ_handle.FrameCount - src->last_frame_count) - 1;
  if (dropped_frames > 0) {
    src->total_dropped_frames += dropped_frames;
    GST_WARNING_OBJECT (src, "Dropped %d frames (%d total)", dropped_frames,
        src->total_dropped_frames);
  } else if (dropped_frames < 0) {
    GST_WARNING_OBJECT (src, "Frame count non-monotonic, signal disrupted?");
  }
  //src->last_frame_count = circ_handle.FrameCount;

  /* create GstBuffer then release circ buffer back to acquisition */
  //*buf = gst_genicamsrc_create_buffer_from_circ_handle (src, &circ_handle);
  //ret =
  //    BiCirStatusSet (src->board, &src->buffer_array, circ_handle, BIAVAILABLE);
  //if (ret != BI_OK) {
  //  GST_ELEMENT_ERROR (src, RESOURCE, FAILED,
  //      ("Failed to release buffer: %s", gst_genicamsrc_get_error_string (src,
  //              ret)), (NULL));
  //  return GST_FLOW_ERROR;
  //}

  /* TODO: understand why timestamps for circ_handle are sometimes 0 */
  //GST_BUFFER_TIMESTAMP (*buf) =
  //    GST_CLOCK_DIFF (gst_element_get_base_time (GST_ELEMENT (src)),
  //    src->acq_start_time + circ_handle.HiResTimeStamp.totalSec * GST_SECOND);
  GST_BUFFER_TIMESTAMP (*buf) =
      GST_CLOCK_DIFF (gst_element_get_base_time (GST_ELEMENT (src)),
      clock_time);
  //GST_BUFFER_OFFSET (*buf) = circ_handle.FrameCount - 1;

  if (src->stop_requested) {
    if (*buf != NULL) {
      gst_buffer_unref (*buf);
      *buf = NULL;
    }
    return GST_FLOW_FLUSHING;
  }

  return GST_FLOW_OK;

error:
  return GST_FLOW_ERROR;
}