Пример #1
0
static gboolean
gst_jasper_dec_src_event (GstPad * pad, GstEvent * event)
{
  GstJasperDec *dec;
  gboolean res;

  dec = GST_JASPER_DEC (gst_pad_get_parent (pad));

  switch (GST_EVENT_TYPE (event)) {
    case GST_EVENT_QOS:{
      GstClockTimeDiff diff;
      GstClockTime timestamp;
      gdouble proportion;

      gst_event_parse_qos (event, &proportion, &diff, &timestamp);

      gst_jasper_dec_update_qos (dec, proportion, timestamp + diff);
      break;
    }
    default:
      break;
  }

  res = gst_pad_push_event (dec->sinkpad, event);

  gst_object_unref (dec);
  return res;
}
static gboolean
gst_vdp_vpp_src_event (GstPad * pad, GstEvent * event)
{
  GstVdpVideoPostProcess *vpp =
      GST_VDP_VIDEO_POST_PROCESS (gst_pad_get_parent (pad));
  gboolean res;

  switch (GST_EVENT_TYPE (event)) {
    case GST_EVENT_QOS:
    {
      gdouble proportion;
      GstClockTimeDiff diff;
      GstClockTime timestamp;

      gst_event_parse_qos (event, &proportion, &diff, &timestamp);

      GST_OBJECT_LOCK (vpp);
      vpp->earliest_time = timestamp + diff;
      GST_OBJECT_UNLOCK (vpp);

      res = gst_pad_event_default (pad, event);
      break;
    }
    default:
      res = gst_pad_event_default (pad, event);
  }

  gst_object_unref (vpp);

  return res;
}
Пример #3
0
static gboolean
gst_shape_wipe_src_event (GstPad * pad, GstEvent * event)
{
  GstShapeWipe *self = GST_SHAPE_WIPE (gst_pad_get_parent (pad));
  gboolean ret;

  switch (GST_EVENT_TYPE (event)) {
    case GST_EVENT_QOS:{
      GstClockTimeDiff diff;
      GstClockTime timestamp;
      gdouble proportion;

      gst_event_parse_qos (event, &proportion, &diff, &timestamp);

      gst_shape_wipe_update_qos (self, proportion, diff, timestamp);
    }
      /* fall through */
    default:
      ret = gst_pad_push_event (self->video_sinkpad, event);
      break;
  }

  gst_object_unref (self);
  return ret;
}
Пример #4
0
static gboolean
gst_shape_wipe_src_event (GstPad * pad, GstObject * parent, GstEvent * event)
{
  GstShapeWipe *self = GST_SHAPE_WIPE (parent);
  gboolean ret;

  GST_LOG_OBJECT (pad, "Got %s event", GST_EVENT_TYPE_NAME (event));

  switch (GST_EVENT_TYPE (event)) {
    case GST_EVENT_QOS:{
      GstQOSType type;
      GstClockTimeDiff diff;
      GstClockTime timestamp;
      gdouble proportion;

      gst_event_parse_qos (event, &type, &proportion, &diff, &timestamp);

      gst_shape_wipe_update_qos (self, proportion, diff, timestamp);
    }
      /* fall through */
    default:
      ret = gst_pad_push_event (self->video_sinkpad, event);
      break;
  }

  return ret;
}
Пример #5
0
static VALUE
qos_parse(VALUE self)
{
    gdouble proportion;
    GstClockTimeDiff diff;
    GstClockTime timestamp;

    gst_event_parse_qos(RGST_EVENT(self), &proportion, &diff, &timestamp);

    return rb_ary_new3(3, DBL2NUM(proportion), LL2NUM(diff),
                       ULL2NUM(timestamp));
}
Пример #6
0
static gboolean
on_video_sink_data_flow (GstPad * pad, GstMiniObject * mini_obj,
    gpointer user_data)
{
  GstFPSDisplaySink *self = GST_FPS_DISPLAY_SINK (user_data);

#if 0
  if (GST_IS_BUFFER (mini_obj)) {
    GstBuffer *buf = GST_BUFFER_CAST (mini_obj);

    if (GST_CLOCK_TIME_IS_VALID (self->next_ts)) {
      if (GST_BUFFER_TIMESTAMP (buf) <= self->next_ts) {
        self->frames_rendered++;
      } else {
        GST_WARNING_OBJECT (self, "dropping frame : ts %" GST_TIME_FORMAT
            " < expected_ts %" GST_TIME_FORMAT,
            GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
            GST_TIME_ARGS (self->next_ts));
        self->frames_dropped++;
      }
    } else {
      self->frames_rendered++;
    }
  } else
#endif
  if (GST_IS_EVENT (mini_obj)) {
    GstEvent *ev = GST_EVENT_CAST (mini_obj);

    if (GST_EVENT_TYPE (ev) == GST_EVENT_QOS) {
      GstClockTimeDiff diff;
      GstClockTime ts;

      gst_event_parse_qos (ev, NULL, &diff, &ts);
      if (diff <= 0.0) {
        g_atomic_int_inc (&self->frames_rendered);
      } else {
        g_atomic_int_inc (&self->frames_dropped);
      }

      ts = gst_util_get_timestamp ();
      if (G_UNLIKELY (!GST_CLOCK_TIME_IS_VALID (self->start_ts))) {
        self->interval_ts = self->last_ts = self->start_ts = ts;
      }
      if (GST_CLOCK_DIFF (self->interval_ts, ts) > self->fps_update_interval) {
        display_current_fps (self);
        self->interval_ts = ts;
      }
    }
  }
  return TRUE;
}
Пример #7
0
static gboolean
on_video_sink_data_flow (GstPad * pad, GstMiniObject * mini_obj,
    gpointer user_data)
{
  GstFPSDisplaySink *self = GST_FPS_DISPLAY_SINK (user_data);

#if 0
  if (GST_IS_BUFFER (mini_obj)) {
    GstBuffer *buf = GST_BUFFER_CAST (mini_obj);

    if (GST_CLOCK_TIME_IS_VALID (self->next_ts)) {
      if (GST_BUFFER_TIMESTAMP (buf) <= self->next_ts) {
        self->frames_rendered++;
      } else {
        GST_WARNING_OBJECT (self, "dropping frame : ts %" GST_TIME_FORMAT
            " < expected_ts %" GST_TIME_FORMAT,
            GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
            GST_TIME_ARGS (self->next_ts));
        self->frames_dropped++;
      }
    } else {
      self->frames_rendered++;
    }
  } else
#endif
  if (GST_IS_EVENT (mini_obj)) {
    GstEvent *ev = GST_EVENT_CAST (mini_obj);

    if (GST_EVENT_TYPE (ev) == GST_EVENT_QOS) {
      GstClockTimeDiff diff;
      GstClockTime ts;

      gst_event_parse_qos (ev, NULL, &diff, &ts);
      self->next_ts = ts + diff;
      if (diff <= 0.0) {
        self->frames_rendered++;
      } else {
        self->frames_dropped++;
      }
    }
  }
  return TRUE;
}
Пример #8
0
static gboolean
kms_face_detector_src_eventfunc (GstBaseTransform * trans, GstEvent * event)
{
    KmsFaceDetector *facedetector = KMS_FACE_DETECTOR (trans);

    switch (GST_EVENT_TYPE (event)) {
    case GST_EVENT_QOS:
    {
        gdouble proportion;
        GstClockTimeDiff diff;
        GstClockTime timestamp;
        GstQOSType type;
        gfloat difference;

        gst_event_parse_qos (event, &type, &proportion, &diff, &timestamp);
        gst_base_transform_update_qos (trans, proportion, diff, timestamp);
        difference = (((gfloat) (gint) diff) / (gfloat) GST_SECOND);

        g_mutex_lock (&facedetector->priv->mutex);

        if (difference > MIN_TIME) {
            if (facedetector->priv->throw_frames <= MIN_FPS) {
                facedetector->priv->qos_control = TRUE;
            } else {
                facedetector->priv->qos_control = FALSE;
                facedetector->priv->throw_frames = 0;
            }
        } else {
            facedetector->priv->qos_control = FALSE;
            facedetector->priv->throw_frames = 0;
        }

        g_mutex_unlock (&facedetector->priv->mutex);

        break;
    }
    default:
        break;
    }

    return gst_pad_push_event (trans->sinkpad, event);
}
Пример #9
0
static gboolean
gst_visual_src_event (GstPad * pad, GstEvent * event)
{
  GstVisual *visual;
  gboolean res;

  visual = GST_VISUAL (gst_pad_get_parent (pad));

  switch (GST_EVENT_TYPE (event)) {
    case GST_EVENT_QOS:
    {
      gdouble proportion;
      GstClockTimeDiff diff;
      GstClockTime timestamp;

      gst_event_parse_qos (event, &proportion, &diff, &timestamp);

      /* save stuff for the _chain function */
      GST_OBJECT_LOCK (visual);
      visual->proportion = proportion;
      if (diff >= 0)
        /* we're late, this is a good estimate for next displayable
         * frame (see part-qos.txt) */
        visual->earliest_time = timestamp + 2 * diff + visual->duration;
      else
        visual->earliest_time = timestamp + diff;

      GST_OBJECT_UNLOCK (visual);

      res = gst_pad_push_event (visual->sinkpad, event);
      break;
    }
    default:
      res = gst_pad_push_event (visual->sinkpad, event);
      break;
  }

  gst_object_unref (visual);
  return res;
}
Пример #10
0
static gboolean
gst_monoscope_src_event (GstPad * pad, GstEvent * event)
{
  GstMonoscope *monoscope;
  gboolean res;

  monoscope = GST_MONOSCOPE (gst_pad_get_parent (pad));

  switch (GST_EVENT_TYPE (event)) {
    case GST_EVENT_QOS:{
      gdouble proportion;
      GstClockTimeDiff diff;
      GstClockTime timestamp;

      gst_event_parse_qos (event, &proportion, &diff, &timestamp);

      /* save stuff for the _chain() function */
      GST_OBJECT_LOCK (monoscope);
      monoscope->proportion = proportion;
      if (diff >= 0)
        /* we're late, this is a good estimate for next displayable
         * frame (see part-qos.txt) */
        monoscope->earliest_time =
            timestamp + 2 * diff + monoscope->frame_duration;
      else
        monoscope->earliest_time = timestamp + diff;
      GST_OBJECT_UNLOCK (monoscope);

      res = gst_pad_push_event (monoscope->sinkpad, event);
      break;
    }
    default:
      res = gst_pad_push_event (monoscope->sinkpad, event);
      break;
  }
  gst_object_unref (monoscope);

  return res;
}
Пример #11
0
static gboolean
gst_goom_src_event (GstPad * pad, GstObject * parent, GstEvent * event)
{
  gboolean res;
  GstGoom *goom;

  goom = GST_GOOM (parent);

  switch (GST_EVENT_TYPE (event)) {
    case GST_EVENT_QOS:
    {
      gdouble proportion;
      GstClockTimeDiff diff;
      GstClockTime timestamp;

      gst_event_parse_qos (event, NULL, &proportion, &diff, &timestamp);

      /* save stuff for the _chain() function */
      GST_OBJECT_LOCK (goom);
      goom->proportion = proportion;
      if (diff >= 0)
        /* we're late, this is a good estimate for next displayable
         * frame (see part-qos.txt) */
        goom->earliest_time = timestamp + 2 * diff + goom->duration;
      else
        goom->earliest_time = timestamp + diff;
      GST_OBJECT_UNLOCK (goom);

      res = gst_pad_push_event (goom->sinkpad, event);
      break;
    }
    default:
      res = gst_pad_push_event (goom->sinkpad, event);
      break;
  }

  return res;
}
Пример #12
0
static gboolean
theora_dec_src_event (GstPad * pad, GstEvent * event)
{
  gboolean res = TRUE;
  GstTheoraDec *dec;

  dec = GST_THEORA_DEC (gst_pad_get_parent (pad));

  switch (GST_EVENT_TYPE (event)) {
    case GST_EVENT_SEEK:
    {
      GstFormat format, tformat;
      gdouble rate;
      GstEvent *real_seek;
      GstSeekFlags flags;
      GstSeekType cur_type, stop_type;
      gint64 cur, stop;
      gint64 tcur, tstop;

      gst_event_parse_seek (event, &rate, &format, &flags, &cur_type, &cur,
          &stop_type, &stop);
      gst_event_unref (event);

      /* we have to ask our peer to seek to time here as we know
       * nothing about how to generate a granulepos from the src
       * formats or anything.
       * 
       * First bring the requested format to time 
       */
      tformat = GST_FORMAT_TIME;
      if (!(res = theora_dec_src_convert (pad, format, cur, &tformat, &tcur)))
        goto convert_error;
      if (!(res = theora_dec_src_convert (pad, format, stop, &tformat, &tstop)))
        goto convert_error;

      /* then seek with time on the peer */
      real_seek = gst_event_new_seek (rate, GST_FORMAT_TIME,
          flags, cur_type, tcur, stop_type, tstop);

      res = gst_pad_push_event (dec->sinkpad, real_seek);
      break;
    }
    case GST_EVENT_QOS:
    {
      gdouble proportion;
      GstClockTimeDiff diff;
      GstClockTime timestamp;

      gst_event_parse_qos (event, &proportion, &diff, &timestamp);

      /* we cannot randomly skip frame decoding since we don't have
       * B frames. we can however use the timestamp and diff to not
       * push late frames. This would at least save us the time to
       * crop/memcpy the data. */
      GST_OBJECT_LOCK (dec);
      dec->proportion = proportion;
      dec->earliest_time = timestamp + diff;
      GST_OBJECT_UNLOCK (dec);

      GST_DEBUG_OBJECT (dec, "got QoS %" GST_TIME_FORMAT ", %" G_GINT64_FORMAT,
          GST_TIME_ARGS (timestamp), diff);

      res = gst_pad_push_event (dec->sinkpad, event);
      break;
    }
    default:
      res = gst_pad_push_event (dec->sinkpad, event);
      break;
  }
done:
  gst_object_unref (dec);

  return res;

  /* ERRORS */
convert_error:
  {
    GST_DEBUG_OBJECT (dec, "could not convert format");
    goto done;
  }
}
Пример #13
0
static gboolean
gst_base_video_codec_src_event (GstPad * pad, GstEvent * event)
{
  GstBaseVideoCodec *base_video_codec;
  gboolean res = FALSE;

  base_video_codec = GST_BASE_VIDEO_CODEC (gst_pad_get_parent (pad));

  switch (GST_EVENT_TYPE (event)) {
    case GST_EVENT_SEEK:
    {
      GstFormat format, tformat;
      gdouble rate;
      GstEvent *real_seek;
      GstSeekFlags flags;
      GstSeekType cur_type, stop_type;
      gint64 cur, stop;
      gint64 tcur, tstop;

      gst_event_parse_seek (event, &rate, &format, &flags, &cur_type,
          &cur, &stop_type, &stop);
      gst_event_unref (event);

      tformat = GST_FORMAT_TIME;
      res = gst_base_video_encoded_video_convert (&base_video_codec->state,
          format, cur, &tformat, &tcur);
      if (!res)
        goto convert_error;
      res = gst_base_video_encoded_video_convert (&base_video_codec->state,
          format, stop, &tformat, &tstop);
      if (!res)
        goto convert_error;

      real_seek = gst_event_new_seek (rate, GST_FORMAT_TIME,
          flags, cur_type, tcur, stop_type, tstop);

      res = gst_pad_push_event (base_video_codec->sinkpad, real_seek);

      break;
    }
#if 0
    case GST_EVENT_QOS:
    {
      gdouble proportion;
      GstClockTimeDiff diff;
      GstClockTime timestamp;

      gst_event_parse_qos (event, &proportion, &diff, &timestamp);

      GST_OBJECT_LOCK (base_video_codec);
      base_video_codec->proportion = proportion;
      base_video_codec->earliest_time = timestamp + diff;
      GST_OBJECT_UNLOCK (base_video_codec);

      GST_DEBUG_OBJECT (base_video_codec,
          "got QoS %" GST_TIME_FORMAT ", %" G_GINT64_FORMAT,
          GST_TIME_ARGS (timestamp), diff);

      res = gst_pad_push_event (base_video_codec->sinkpad, event);
      break;
    }
#endif
    default:
      res = gst_pad_push_event (base_video_codec->sinkpad, event);
      break;
  }
done:
  gst_object_unref (base_video_codec);
  return res;

convert_error:
  GST_DEBUG_OBJECT (base_video_codec, "could not convert format");
  goto done;
}
Пример #14
0
static gboolean
gst_base_video_decoder_src_event (GstPad * pad, GstEvent * event)
{
  GstBaseVideoDecoder *base_video_decoder;
  gboolean res = FALSE;

  base_video_decoder = GST_BASE_VIDEO_DECODER (gst_pad_get_parent (pad));

  switch (GST_EVENT_TYPE (event)) {
    case GST_EVENT_SEEK:
    {
      GstFormat format, tformat;
      gdouble rate;
      GstEvent *real_seek;
      GstSeekFlags flags;
      GstSeekType cur_type, stop_type;
      gint64 cur, stop;
      gint64 tcur, tstop;

      GST_DEBUG ("seek event");

      gst_event_parse_seek (event, &rate, &format, &flags, &cur_type,
          &cur, &stop_type, &stop);
      gst_event_unref (event);

      tformat = GST_FORMAT_TIME;
      res =
          gst_base_video_decoder_src_convert (pad, format, cur, &tformat,
          &tcur);
      if (!res)
        goto convert_error;
      res =
          gst_base_video_decoder_src_convert (pad, format, stop, &tformat,
          &tstop);
      if (!res)
        goto convert_error;

      real_seek = gst_event_new_seek (rate, GST_FORMAT_TIME,
          flags, cur_type, tcur, stop_type, tstop);

      res =
          gst_pad_push_event (GST_BASE_VIDEO_DECODER_SINK_PAD
          (base_video_decoder), real_seek);

      break;
    }
    case GST_EVENT_QOS:
    {
      gdouble proportion;
      GstClockTimeDiff diff;
      GstClockTime timestamp;
      GstClockTime duration;

      gst_event_parse_qos (event, &proportion, &diff, &timestamp);

      GST_OBJECT_LOCK (base_video_decoder);
      base_video_decoder->proportion = proportion;
      if (G_LIKELY (GST_CLOCK_TIME_IS_VALID (timestamp))) {
        if (G_UNLIKELY (diff > 0)) {
          if (base_video_decoder->state.fps_n > 0)
            duration =
                gst_util_uint64_scale (GST_SECOND,
                base_video_decoder->state.fps_d,
                base_video_decoder->state.fps_n);
          else
            duration = 0;
          base_video_decoder->earliest_time = timestamp + 2 * diff + duration;
        } else {
          base_video_decoder->earliest_time = timestamp + diff;
        }
      } else {
        base_video_decoder->earliest_time = GST_CLOCK_TIME_NONE;
      }
      GST_OBJECT_UNLOCK (base_video_decoder);

      GST_DEBUG_OBJECT (base_video_decoder,
          "got QoS %" GST_TIME_FORMAT ", %" G_GINT64_FORMAT ", %g",
          GST_TIME_ARGS (timestamp), diff, proportion);

      res =
          gst_pad_push_event (GST_BASE_VIDEO_DECODER_SINK_PAD
          (base_video_decoder), event);
      break;
    }
    default:
      res =
          gst_pad_push_event (GST_BASE_VIDEO_DECODER_SINK_PAD
          (base_video_decoder), event);
      break;
  }
done:
  gst_object_unref (base_video_decoder);
  return res;

convert_error:
  GST_DEBUG_OBJECT (base_video_decoder, "could not convert format");
  goto done;
}
Пример #15
0
static gboolean
gst_base_video_decoder_src_event (GstPad * pad, GstEvent * event)
{
  GstBaseVideoDecoder *base_video_decoder;
  gboolean res = FALSE;

  base_video_decoder = GST_BASE_VIDEO_DECODER (gst_pad_get_parent (pad));

  switch (GST_EVENT_TYPE (event)) {
    case GST_EVENT_SEEK:

      /* FIXME: do seek using bitrate incase upstream doesn't handle it */
      res =
          gst_pad_push_event (GST_BASE_VIDEO_DECODER_SINK_PAD
          (base_video_decoder), event);

      break;

    case GST_EVENT_QOS:
    {
      gdouble proportion;
      GstClockTimeDiff diff;
      GstClockTime timestamp;
      GstClockTime duration;

      gst_event_parse_qos (event, &proportion, &diff, &timestamp);

      GST_OBJECT_LOCK (base_video_decoder);
      base_video_decoder->proportion = proportion;
      if (G_LIKELY (GST_CLOCK_TIME_IS_VALID (timestamp))) {
        if (G_UNLIKELY (diff > 0)) {
          if (base_video_decoder->state.fps_n > 0)
            duration =
                gst_util_uint64_scale (GST_SECOND,
                base_video_decoder->state.fps_d,
                base_video_decoder->state.fps_n);
          else
            duration = 0;
          base_video_decoder->earliest_time = timestamp + 2 * diff + duration;
        } else {
          base_video_decoder->earliest_time = timestamp + diff;
        }
      } else {
        base_video_decoder->earliest_time = GST_CLOCK_TIME_NONE;
      }
      GST_OBJECT_UNLOCK (base_video_decoder);

      GST_DEBUG_OBJECT (base_video_decoder,
          "got QoS %" GST_TIME_FORMAT ", %" G_GINT64_FORMAT ", %g",
          GST_TIME_ARGS (timestamp), diff, proportion);

      res =
          gst_pad_push_event (GST_BASE_VIDEO_DECODER_SINK_PAD
          (base_video_decoder), event);
      break;
    }

    default:
      res =
          gst_pad_push_event (GST_BASE_VIDEO_DECODER_SINK_PAD
          (base_video_decoder), event);
      break;
  }

  gst_object_unref (base_video_decoder);
  return res;
}
Пример #16
0
/* this function handles sink events */
static gboolean
gst_dwt_filter_sink_event (GstPad * pad, GstObject * parent, GstEvent * event)
{
	gboolean ret;
	GstDwtFilter *filter;
	GstStructure *structure;
	gchar *format;
	int i;

	filter = GST_DWTFILTER (parent);

	switch (GST_EVENT_TYPE (event)) {
	case GST_EVENT_CAPS:
	{
		GstCaps *caps;

		gst_event_parse_caps (event, &caps);
		/* do something with the caps */

		for (i = 0; i < gst_caps_get_size (caps); i++)
		{
			structure = gst_caps_get_structure (caps, i);
			if(structure)
			{
				gst_structure_get_int(structure, "width", &filter->width);
				gst_structure_get_int(structure, "height", &filter->height);
				format = gst_structure_get_string(structure, "format");
				g_print("width = %d height = %d format = %s\n", filter->width, filter->height, format);
				//pAccum = malloc(4 * width * height * sizeof(long int));
				//memset(pAccum, 0, 4 * width * height * sizeof(long int));
				filter->pDWTBuffer = (double*) malloc(filter->width * filter->height * sizeof(double));
				filter->pTmpBuffer = (double*) malloc(filter->width * filter->height * sizeof(double));
				filter->pTmpBuffer2 = (double*) malloc(filter->width * filter->height * sizeof(double));

				memset(filter->pDWTBuffer, 0, filter->width * filter->height * sizeof(double));

				filter->work = gsl_wavelet_workspace_alloc (filter->width);
			}
			else
			{
				//				g_print("structure = 0\n");
			}
		}

		/* and forward */
		ret = gst_pad_event_default (pad, parent, event);
		break;
	}
	case GST_EVENT_QOS:
		g_print("GST_EVENT_QOS\n");

		GstQOSType type;
		gdouble proportion;
		GstClockTimeDiff diff;
		GstClockTime timestamp;

		gst_event_parse_qos (event, &type, &proportion, &diff, &timestamp);

		ret = gst_pad_event_default (pad, parent, event);
		break;
	default:
		ret = gst_pad_event_default (pad, parent, event);
		break;
	}
	return ret;
}
/* srcpad functions */
static gboolean
gst_stream_synchronizer_src_event (GstPad * pad, GstObject * parent,
    GstEvent * event)
{
  GstStreamSynchronizer *self = GST_STREAM_SYNCHRONIZER (parent);
  GstPad *opad;
  gboolean ret = FALSE;

  GST_LOG_OBJECT (pad, "Handling event %s: %" GST_PTR_FORMAT,
      GST_EVENT_TYPE_NAME (event), event);

  switch (GST_EVENT_TYPE (event)) {
    case GST_EVENT_QOS:{
      gdouble proportion;
      GstClockTimeDiff diff;
      GstClockTime timestamp;
      gint64 running_time_diff = -1;
      GstStream *stream;

      gst_event_parse_qos (event, NULL, &proportion, &diff, &timestamp);
      gst_event_unref (event);

      GST_STREAM_SYNCHRONIZER_LOCK (self);
      stream = gst_pad_get_element_private (pad);
      if (stream)
        running_time_diff = stream->segment.base;
      GST_STREAM_SYNCHRONIZER_UNLOCK (self);

      if (running_time_diff == -1) {
        GST_WARNING_OBJECT (pad, "QOS event before group start");
        goto out;
      }
      if (timestamp < running_time_diff) {
        GST_DEBUG_OBJECT (pad, "QOS event from previous group");
        goto out;
      }

      GST_LOG_OBJECT (pad,
          "Adjusting QOS event: %" GST_TIME_FORMAT " - %" GST_TIME_FORMAT " = %"
          GST_TIME_FORMAT, GST_TIME_ARGS (timestamp),
          GST_TIME_ARGS (running_time_diff),
          GST_TIME_ARGS (timestamp - running_time_diff));

      timestamp -= running_time_diff;

      /* That case is invalid for QoS events */
      if (diff < 0 && -diff > timestamp) {
        GST_DEBUG_OBJECT (pad, "QOS event from previous group");
        ret = TRUE;
        goto out;
      }

      event =
          gst_event_new_qos (GST_QOS_TYPE_UNDERFLOW, proportion, diff,
          timestamp);
      break;
    }
    default:
      break;
  }

  opad = gst_stream_get_other_pad_from_pad (self, pad);
  if (opad) {
    ret = gst_pad_push_event (opad, event);
    gst_object_unref (opad);
  }

out:
  return ret;
}
Пример #18
0
static gboolean
gst_merger_event_srcv (GstPad * pad, GstObject * parent, GstEvent * event)
{
  GstMerger *merger = GST_MERGER (parent);
  gboolean ret;

  GST_DEBUG_OBJECT (pad, "got source event %s", GST_EVENT_TYPE_NAME (event));

  switch (GST_EVENT_TYPE (event)) {

    case GST_EVENT_CAPS:
    {
      GstCaps *caps;

      gst_event_parse_caps (event, &caps);

      GST_DEBUG_OBJECT (pad, "event caps are %" GST_PTR_FORMAT, caps);

      break;
    }

    case GST_EVENT_EOS:
      process_eos (merger);
      gst_event_unref (event);
      ret = TRUE;
      break;
    case GST_EVENT_SEGMENT:
      gst_event_copy_segment (event, &merger->s_segment);
      GST_DEBUG_OBJECT (merger, "segment: %" GST_SEGMENT_FORMAT,
          &merger->s_segment);
      break;
    case GST_EVENT_QOS:
    {
      GstQOSType type;
      gdouble proportion;
      GstClockTimeDiff diff;
      GstClockTime timestamp;
      gst_event_parse_qos (event, &type, &proportion, &diff, &timestamp);

      if (type == GST_QOS_TYPE_UNDERFLOW) {
        GST_INFO_OBJECT (pad,
            "got QOS event UNDERFLOW proportion %f diff %" PRIu64 " ts %"
            PRIu64, proportion, diff, timestamp);
        GST_OBJECT_LOCK (merger);

        gint num, den;
        {
          GstCaps *caps = gst_pad_get_current_caps (merger->srcv_pad);
          GstStructure *s = gst_caps_get_structure (caps, 0);
          gst_structure_get_fraction (s, "framerate", &num, &den);
          gst_caps_unref (caps);
        }

        int nb_bufs = 0.5 + diff / (1e9 * den / num);
        GST_WARNING_OBJECT (merger, "Discarding %d buffers", nb_bufs);
        g_queue_pop_head (&merger->bufs_l);
        g_queue_pop_head (&merger->bufs_r);
        GST_OBJECT_UNLOCK (merger);
      } else {
        GST_WARNING_OBJECT (pad, "QOS type %d not implemented", type);
        ret = gst_pad_event_default (pad, parent, event);
      }
    }
      break;

    default:
      GST_INFO_OBJECT (pad, "got source event %s", GST_EVENT_TYPE_NAME (event));
      ret = gst_pad_event_default (pad, parent, event);
      break;
  }
  return ret;
}