static gboolean
_set_duration (GESTimelineElement * element, GstClockTime duration)
{
  GESTrackElement *object = GES_TRACK_ELEMENT (element);
  GESTrackElementPrivate *priv = object->priv;

  if (GST_CLOCK_TIME_IS_VALID (_MAXDURATION (element)) &&
      duration > _INPOINT (object) + _MAXDURATION (element))
    duration = _MAXDURATION (element) - _INPOINT (object);

  if (priv->gnlobject != NULL) {
    if (G_UNLIKELY (duration == _DURATION (object)))
      return FALSE;

    g_object_set (priv->gnlobject, "duration", duration, NULL);
  } else
    priv->pending_duration = duration;

  _update_control_bindings (element, ges_timeline_element_get_inpoint (element),
      duration);

  return TRUE;
}
Пример #2
0
void AudioTestSource_i::_new_gst_buffer(GstElement *sink, AudioTestSource_i* comp) {
	static GstBuffer *buffer;
	static std::vector<short> packet;

    /* Retrieve the buffer */
    g_signal_emit_by_name (sink, "pull-buffer", &buffer);
    if (buffer) {
    	BULKIO::PrecisionUTCTime T;

	    /* The only thing we do in this example is print a * to indicate a received buffer */
    	if (GST_CLOCK_TIME_IS_VALID(buffer->timestamp)) {
    		T = _from_gst_timestamp(buffer->timestamp);
    	} else {
    		T = _now();
    	}

    	packet.resize(buffer->size / 2); // TODO the division should come from reading buffer->caps
    	memcpy(&packet[0], buffer->data, buffer->size);

    	comp->audio_out->pushPacket(packet, T, false, comp->stream_id);
	    gst_buffer_unref (buffer);
    }
}
Пример #3
0
EXPORT_C
#endif

gboolean
gst_interpolation_control_source_unset (GstInterpolationControlSource * self,
    GstClockTime timestamp)
{
  GList *node;
  gboolean res = FALSE;

  g_return_val_if_fail (GST_IS_INTERPOLATION_CONTROL_SOURCE (self), FALSE);
  g_return_val_if_fail (GST_CLOCK_TIME_IS_VALID (timestamp), FALSE);

  g_mutex_lock (self->lock);
  /* check if a control point for the timestamp exists */
  if ((node = g_list_find_custom (self->priv->values, &timestamp,
              gst_control_point_find))) {
    GstControlPoint *cp = node->data;

    if (cp->timestamp == 0) {
      /* Restore the default node */
      g_value_reset (&cp->value);
      g_value_copy (&self->priv->default_value, &cp->value);
    } else {
      if (node == self->priv->last_requested_value)
        self->priv->last_requested_value = NULL;
      gst_control_point_free (node->data);      /* free GstControlPoint */
      self->priv->values = g_list_delete_link (self->priv->values, node);
      self->priv->nvalues--;
    }
    self->priv->valid_cache = FALSE;
    res = TRUE;
  }
  g_mutex_unlock (self->lock);

  return res;
}
/* this function does the actual processing
 */
static GstFlowReturn
gst_audio_panorama_transform (GstBaseTransform * base, GstBuffer * inbuf,
    GstBuffer * outbuf)
{
  GstAudioPanorama *filter = GST_AUDIO_PANORAMA (base);
  GstClockTime timestamp, stream_time;
  GstMapInfo inmap, outmap;

  timestamp = GST_BUFFER_TIMESTAMP (inbuf);
  stream_time =
      gst_segment_to_stream_time (&base->segment, GST_FORMAT_TIME, timestamp);

  GST_DEBUG_OBJECT (filter, "sync to %" GST_TIME_FORMAT,
      GST_TIME_ARGS (timestamp));

  if (GST_CLOCK_TIME_IS_VALID (stream_time))
    gst_object_sync_values (GST_OBJECT (filter), stream_time);

  gst_buffer_map (inbuf, &inmap, GST_MAP_READ);
  gst_buffer_map (outbuf, &outmap, GST_MAP_WRITE);

  if (G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (inbuf, GST_BUFFER_FLAG_GAP))) {
    GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_GAP);
    memset (outmap.data, 0, outmap.size);
  } else {
    /* output always stereo, input mono or stereo,
     * and info describes input format */
    guint num_samples = outmap.size / (2 * GST_AUDIO_INFO_BPS (&filter->info));

    filter->process (filter, inmap.data, outmap.data, num_samples);
  }

  gst_buffer_unmap (inbuf, &inmap);
  gst_buffer_unmap (outbuf, &outmap);

  return GST_FLOW_OK;
}
Пример #5
0
static GstFlowReturn
gst_mpegv_parse_parse_frame (GstBaseParse * parse, GstBaseParseFrame * frame)
{
  GstMpegvParse *mpvparse = GST_MPEGVIDEO_PARSE (parse);
  GstBuffer *buffer = frame->buffer;

  if (G_UNLIKELY (mpvparse->pichdr.pic_type == GST_MPEG_VIDEO_PICTURE_TYPE_I))
    GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_DELTA_UNIT);
  else
    GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT);

  /* maybe only sequence in this buffer, though not recommended,
   * so mark it as such and force 0 duration */
  if (G_UNLIKELY (mpvparse->pic_offset < 0)) {
    GST_DEBUG_OBJECT (mpvparse, "frame holds no picture data");
    frame->flags |= GST_BASE_PARSE_FRAME_FLAG_NO_FRAME;
    GST_BUFFER_DURATION (buffer) = 0;
  }

  if (mpvparse->pic_offset > 4) {
    gst_base_parse_set_ts_at_offset (parse, mpvparse->pic_offset - 4);
  }

  if (mpvparse->frame_repeat_count
      && GST_CLOCK_TIME_IS_VALID (GST_BUFFER_DURATION (buffer))) {
    GST_BUFFER_DURATION (buffer) =
        (1 + mpvparse->frame_repeat_count) * GST_BUFFER_DURATION (buffer) / 2;
  }

  if (G_UNLIKELY (mpvparse->drop && !mpvparse->config)) {
    GST_DEBUG_OBJECT (mpvparse, "dropping frame as no config yet");
    return GST_BASE_PARSE_FLOW_DROPPED;
  }

  gst_mpegv_parse_update_src_caps (mpvparse);
  return GST_FLOW_OK;
}
Пример #6
0
/* Actual processing. */
static GstFlowReturn
gst_burn_transform_frame (GstVideoFilter * vfilter,
    GstVideoFrame * in_frame, GstVideoFrame * out_frame)
{
  GstBurn *filter = GST_BURN (vfilter);
  gint video_size, adjustment;
  guint32 *src, *dest;
  GstClockTime timestamp;
  gint64 stream_time;

  src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
  dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);

  video_size = GST_VIDEO_FRAME_WIDTH (in_frame) *
      GST_VIDEO_FRAME_HEIGHT (in_frame);

  /* GstController: update the properties */
  timestamp = GST_BUFFER_TIMESTAMP (in_frame->buffer);
  stream_time =
      gst_segment_to_stream_time (&GST_BASE_TRANSFORM (filter)->segment,
      GST_FORMAT_TIME, timestamp);

  GST_DEBUG_OBJECT (filter, "sync to %" GST_TIME_FORMAT,
      GST_TIME_ARGS (timestamp));

  if (GST_CLOCK_TIME_IS_VALID (stream_time))
    gst_object_sync_values (GST_OBJECT (filter), stream_time);

  GST_OBJECT_LOCK (filter);
  adjustment = filter->adjustment;
  GST_OBJECT_UNLOCK (filter);

  /*** Now the image processing work.... ***/
  gaudi_orc_burn (dest, src, adjustment, video_size);

  return GST_FLOW_OK;
}
Пример #7
0
static GstFlowReturn
gst_rtp_mpv_pay_handle_buffer (GstBaseRTPPayload * basepayload,
    GstBuffer * buffer)
{
  GstRTPMPVPay *rtpmpvpay;
  guint avail, packet_len;
  GstClockTime timestamp, duration;
  GstFlowReturn ret;

  rtpmpvpay = GST_RTP_MPV_PAY (basepayload);

  timestamp = GST_BUFFER_TIMESTAMP (buffer);
  duration = GST_BUFFER_DURATION (buffer);

  gst_adapter_push (rtpmpvpay->adapter, buffer);
  avail = gst_adapter_available (rtpmpvpay->adapter);

  /* Initialize new RTP payload */
  if (avail == 0) {
    rtpmpvpay->first_ts = timestamp;
    rtpmpvpay->duration = duration;
  }

  /* get packet length of previous data and this new data,
   * payload length includes a 4 byte MPEG video-specific header */
  packet_len = gst_rtp_buffer_calc_packet_len (4 + avail, 0, 0);

  if (gst_basertppayload_is_filled (basepayload,
          packet_len, rtpmpvpay->duration + duration)) {
    ret = gst_rtp_mpv_pay_flush (rtpmpvpay, timestamp, duration);
  } else {
    if (GST_CLOCK_TIME_IS_VALID (duration))
      rtpmpvpay->duration += duration;
    ret = GST_FLOW_OK;
  }
  return ret;
}
Пример #8
0
/**
 * Utility function to handle transferring Gstreamer timestamp to OMX
 * timestamp.  This function handles discontinuities and timestamp
 * renormalization.
 *
 * @omx_buffer the destination OMX buffer for the timestamp
 * @buffer     the source Gstreamer buffer for the timestamp
 * @normalize  should this buffer be the one that we renormalize on
 *   (iff normalization is required)?  (ie. with TI OMX, you should
 *   only re-normalize on a video buffer)
 */
gboolean
gst_goo_timestamp_gst2omx (
		OMX_BUFFERHEADERTYPE* omx_buffer,
		GstBuffer* buffer,
		gboolean normalize)
{
	GstClockTime timestamp = GST_BUFFER_TIMESTAMP (buffer);

	if (GST_GOO_UTIL_IS_DISCONT (buffer))
	{
		needs_normalization = TRUE;
		GST_DEBUG ("needs_normalization");
	}

	if (needs_normalization && normalize)
	{
		GST_INFO ("Setting OMX_BUFFER_STARTTIME..");
		omx_buffer->nFlags |= OMX_BUFFERFLAG_STARTTIME;
		omx_normalize_timestamp = GST2OMX_TIMESTAMP ((gint64)timestamp);
		needs_normalization = FALSE;
		GST_DEBUG ("omx_normalize_timestamp=%lld", omx_normalize_timestamp);
	}

	/* transfer timestamp to openmax */
	if (GST_CLOCK_TIME_IS_VALID (timestamp))
	{
		omx_buffer->nTimeStamp = GST2OMX_TIMESTAMP ((gint64)timestamp) - omx_normalize_timestamp;
		GST_INFO ("OMX timestamp = %lld (%lld - %lld)", omx_buffer->nTimeStamp, GST2OMX_TIMESTAMP ((gint64)timestamp), omx_normalize_timestamp);
		return TRUE;
	}
	else
	{
		GST_WARNING ("Invalid timestamp!");
		return FALSE;
	}
}
/**
 * gst_timed_value_control_source_unset:
 * @self: the #GstTimedValueControlSource object
 * @timestamp: the time the control-change should be removed from
 *
 * Used to remove the value of given controller-handled property at a certain
 * time.
 *
 * Returns: FALSE if the value couldn't be unset (i.e. not found, TRUE otherwise.
 */
gboolean
gst_timed_value_control_source_unset (GstTimedValueControlSource * self,
    GstClockTime timestamp)
{
  GSequenceIter *iter;
  gboolean res = FALSE;
  GstControlPoint *cp = NULL;

  g_return_val_if_fail (GST_IS_TIMED_VALUE_CONTROL_SOURCE (self), FALSE);
  g_return_val_if_fail (GST_CLOCK_TIME_IS_VALID (timestamp), FALSE);

  g_mutex_lock (&self->lock);
  /* check if a control point for the timestamp exists */
  if (G_LIKELY (self->values) && (iter =
          g_sequence_lookup (self->values, &timestamp,
              (GCompareDataFunc) gst_control_point_find, NULL))) {

    /* Iter contains the iter right after timestamp, i.e.
     * we need to get the previous one and check the timestamp
     */
    cp = g_slice_dup (GstControlPoint, g_sequence_get (iter));
    g_sequence_remove (iter);
    self->nvalues--;
    self->valid_cache = FALSE;
    res = TRUE;
  }
  g_mutex_unlock (&self->lock);

  if (cp) {
    g_signal_emit (self,
        gst_timed_value_control_source_signals[VALUE_REMOVED_SIGNAL], 0, cp);
    g_slice_free (GstControlPoint, cp);
  }

  return res;
}
Пример #10
0
static GValue *
gst_direct_control_binding_get_value (GstControlBinding * _self,
    GstClockTime timestamp)
{
  GstDirectControlBinding *self = GST_DIRECT_CONTROL_BINDING (_self);
  GValue *dst_val = NULL;
  gdouble src_val;

  g_return_val_if_fail (GST_IS_DIRECT_CONTROL_BINDING (self), NULL);
  g_return_val_if_fail (GST_CLOCK_TIME_IS_VALID (timestamp), NULL);
  g_return_val_if_fail (GST_CONTROL_BINDING_PSPEC (self), FALSE);

  /* get current value via control source */
  if (gst_control_source_get_value (self->cs, timestamp, &src_val)) {
    dst_val = g_new0 (GValue, 1);
    g_value_init (dst_val, G_PARAM_SPEC_VALUE_TYPE (_self->pspec));
    self->convert_g_value (self, src_val, dst_val);
  } else {
    GST_LOG ("no control value for property %s at ts %" GST_TIME_FORMAT,
        _self->name, GST_TIME_ARGS (timestamp));
  }

  return dst_val;
}
static GstFlowReturn
gst_frame_positionner_transform_ip (GstBaseTransform * trans, GstBuffer * buf)
{
  GstFramePositionnerMeta *meta;
  GstFramePositionner *framepositionner = GST_FRAME_POSITIONNER (trans);
  GstClockTime timestamp = GST_BUFFER_TIMESTAMP (buf);

  if (GST_CLOCK_TIME_IS_VALID (timestamp)) {
    gst_object_sync_values (GST_OBJECT (trans), timestamp);
  }

  meta =
      (GstFramePositionnerMeta *) gst_buffer_add_meta (buf,
      gst_frame_positionner_get_info (), NULL);

  GST_OBJECT_LOCK (framepositionner);
  meta->alpha = framepositionner->alpha;
  meta->posx = framepositionner->posx;
  meta->posy = framepositionner->posy;
  meta->zorder = framepositionner->zorder;
  GST_OBJECT_UNLOCK (framepositionner);

  return GST_FLOW_OK;
}
Пример #12
0
/**
 * ges_layer_add_asset:
 * @layer: a #GESLayer
 * @asset: The asset to add to
 * @start: The start value to set on the new #GESClip
 * @inpoint: The inpoint value to set on the new #GESClip
 * @duration: The duration value to set on the new #GESClip
 * @track_types: The #GESTrackType to set on the the new #GESClip
 *
 * Creates Clip from asset, adds it to layer and
 * returns a reference to it.
 *
 * Returns: (transfer none): Created #GESClip
 */
GESClip *
ges_layer_add_asset (GESLayer * layer,
    GESAsset * asset, GstClockTime start, GstClockTime inpoint,
    GstClockTime duration, GESTrackType track_types)
{
  GESClip *clip;

  g_return_val_if_fail (GES_IS_LAYER (layer), NULL);
  g_return_val_if_fail (GES_IS_ASSET (asset), NULL);
  g_return_val_if_fail (g_type_is_a (ges_asset_get_extractable_type
          (asset), GES_TYPE_CLIP), NULL);

  GST_DEBUG_OBJECT (layer, "Adding asset %s with: start: %" GST_TIME_FORMAT
      " inpoint: %" GST_TIME_FORMAT " duration: %" GST_TIME_FORMAT
      " track types: %d (%s)", ges_asset_get_id (asset), GST_TIME_ARGS (start),
      GST_TIME_ARGS (inpoint), GST_TIME_ARGS (duration), track_types,
      ges_track_type_name (track_types));

  clip = GES_CLIP (ges_asset_extract (asset, NULL));
  _set_start0 (GES_TIMELINE_ELEMENT (clip), start);
  _set_inpoint0 (GES_TIMELINE_ELEMENT (clip), inpoint);
  if (track_types != GES_TRACK_TYPE_UNKNOWN)
    ges_clip_set_supported_formats (clip, track_types);

  if (GST_CLOCK_TIME_IS_VALID (duration)) {
    _set_duration0 (GES_TIMELINE_ELEMENT (clip), duration);
  }

  if (!ges_layer_add_clip (layer, clip)) {
    gst_object_unref (clip);

    return NULL;
  }

  return clip;
}
/* GstBaseTransform vmethod implementations */
static GstFlowReturn
gst_audio_fx_base_iir_filter_transform_ip (GstBaseTransform * base,
    GstBuffer * buf)
{
  GstAudioFXBaseIIRFilter *filter = GST_AUDIO_FX_BASE_IIR_FILTER (base);
  guint num_samples;
  GstClockTime timestamp, stream_time;
  GstMapInfo map;

  timestamp = GST_BUFFER_TIMESTAMP (buf);
  stream_time =
      gst_segment_to_stream_time (&base->segment, GST_FORMAT_TIME, timestamp);

  GST_DEBUG_OBJECT (filter, "sync to %" GST_TIME_FORMAT,
      GST_TIME_ARGS (timestamp));

  if (GST_CLOCK_TIME_IS_VALID (stream_time))
    gst_object_sync_values (GST_OBJECT (filter), stream_time);

  gst_buffer_map (buf, &map, GST_MAP_READWRITE);
  num_samples = map.size / GST_AUDIO_FILTER_BPS (filter);

  g_mutex_lock (&filter->lock);
  if (filter->a == NULL || filter->b == NULL) {
    g_warn_if_fail (filter->a != NULL && filter->b != NULL);
    gst_buffer_unmap (buf, &map);
    g_mutex_unlock (&filter->lock);
    return GST_FLOW_ERROR;
  }
  filter->process (filter, map.data, num_samples);
  g_mutex_unlock (&filter->lock);

  gst_buffer_unmap (buf, &map);

  return GST_FLOW_OK;
}
Пример #14
0
static gboolean
gst_v4l2_video_dec_decide_allocation (GstVideoDecoder * decoder,
    GstQuery * query)
{
  GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (decoder);
  GstClockTime latency;
  gboolean ret = FALSE;

  if (gst_v4l2_object_decide_allocation (self->v4l2capture, query))
    ret = GST_VIDEO_DECODER_CLASS (parent_class)->decide_allocation (decoder,
        query);

  if (GST_CLOCK_TIME_IS_VALID (self->v4l2capture->duration)) {
    latency = self->v4l2capture->min_buffers * self->v4l2capture->duration;
    GST_DEBUG_OBJECT (self, "Setting latency: %" GST_TIME_FORMAT " (%"
        G_GUINT32_FORMAT " * %" G_GUINT64_FORMAT, GST_TIME_ARGS (latency),
        self->v4l2capture->min_buffers, self->v4l2capture->duration);
    gst_video_decoder_set_latency (decoder, latency, latency);
  } else {
    GST_WARNING_OBJECT (self, "Duration invalid, not setting latency");
  }

  return ret;
}
static inline gboolean
ges_track_object_set_duration_internal (GESTrackObject * object,
    guint64 duration)
{
  GESTrackObjectPrivate *priv = object->priv;

  GST_DEBUG ("object:%p, duration:%" GST_TIME_FORMAT,
      object, GST_TIME_ARGS (duration));

  if (GST_CLOCK_TIME_IS_VALID (priv->maxduration) &&
      duration > object->inpoint + priv->maxduration)
    duration = priv->maxduration - object->inpoint;

  if (priv->gnlobject != NULL) {
    if (G_UNLIKELY (duration == object->duration))
      return FALSE;

    g_object_set (priv->gnlobject, "duration", duration,
        "media-duration", duration, NULL);
  } else
    priv->pending_duration = duration;

  return TRUE;
}
Пример #16
0
static GstFlowReturn
gst_mim_dec_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
{
  GstMimDec *mimdec = GST_MIM_DEC (parent);
  GstBuffer *out_buf;
  const guchar *header, *frame_body;
  guint32 fourcc;
  guint16 header_size;
  gint width, height;
  GstCaps *caps;
  GstFlowReturn res = GST_FLOW_OK;
  GstClockTime in_time = GST_BUFFER_TIMESTAMP (buf);
  GstEvent *event = NULL;
  gboolean result = TRUE;
  guint32 payload_size;
  guint32 current_ts;
  GstMapInfo map;

  gst_adapter_push (mimdec->adapter, buf);


  /* do we have enough bytes to read a header */
  while (gst_adapter_available (mimdec->adapter) >= 24) {
    header = gst_adapter_map (mimdec->adapter, 24);
    header_size = header[0];
    if (header_size != 24) {
      gst_adapter_unmap (mimdec->adapter);
      gst_adapter_flush (mimdec->adapter, 24);
      GST_ELEMENT_ERROR (mimdec, STREAM, DECODE, (NULL),
          ("invalid frame: header size %d incorrect", header_size));
      return GST_FLOW_ERROR;
    }

    if (header[1] == 1) {
      /* This is a a paused frame, skip it */
      gst_adapter_unmap (mimdec->adapter);
      gst_adapter_flush (mimdec->adapter, 24);
      continue;
    }

    fourcc = GUINT32_FROM_LE (*((guint32 *) (header + 12)));
    if (GST_MAKE_FOURCC ('M', 'L', '2', '0') != fourcc) {
      gst_adapter_unmap (mimdec->adapter);
      gst_adapter_flush (mimdec->adapter, 24);
      GST_ELEMENT_ERROR (mimdec, STREAM, WRONG_TYPE, (NULL),
          ("invalid frame: unknown FOURCC code %X (%" GST_FOURCC_FORMAT ")",
              fourcc, GST_FOURCC_ARGS (fourcc)));
      return GST_FLOW_ERROR;
    }

    payload_size = GUINT32_FROM_LE (*((guint32 *) (header + 8)));

    current_ts = GUINT32_FROM_LE (*((guint32 *) (header + 20)));

    gst_adapter_unmap (mimdec->adapter);

    GST_LOG_OBJECT (mimdec, "Got packet, payload size %d", payload_size);

    if (gst_adapter_available (mimdec->adapter) < payload_size + 24)
      return GST_FLOW_OK;

    /* We have a whole packet and have read the header, lets flush it out */
    gst_adapter_flush (mimdec->adapter, 24);

    frame_body = gst_adapter_map (mimdec->adapter, payload_size);

    if (mimdec->buffer_size < 0) {
      /* Check if its a keyframe, otherwise skip it */
      if (GUINT32_FROM_LE (*((guint32 *) (frame_body + 12))) != 0) {
        gst_adapter_unmap (mimdec->adapter);
        gst_adapter_flush (mimdec->adapter, payload_size);
        return GST_FLOW_OK;
      }

      if (!mimic_decoder_init (mimdec->dec, frame_body)) {
        gst_adapter_unmap (mimdec->adapter);
        gst_adapter_flush (mimdec->adapter, payload_size);
        GST_ELEMENT_ERROR (mimdec, LIBRARY, INIT, (NULL),
            ("mimic_decoder_init error"));
        return GST_FLOW_ERROR;
      }

      if (!mimic_get_property (mimdec->dec, "buffer_size",
              &mimdec->buffer_size)) {
        gst_adapter_unmap (mimdec->adapter);
        gst_adapter_flush (mimdec->adapter, payload_size);
        GST_ELEMENT_ERROR (mimdec, LIBRARY, INIT, (NULL),
            ("mimic_get_property('buffer_size') error"));
        return GST_FLOW_ERROR;
      }

      mimic_get_property (mimdec->dec, "width", &width);
      mimic_get_property (mimdec->dec, "height", &height);
      GST_DEBUG_OBJECT (mimdec,
          "Initialised decoder with %d x %d payload size %d buffer_size %d",
          width, height, payload_size, mimdec->buffer_size);
      caps = gst_caps_new_simple ("video/x-raw",
          "format", G_TYPE_STRING, "RGB",
          "framerate", GST_TYPE_FRACTION, 0, 1,
          "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, NULL);
      gst_pad_set_caps (mimdec->srcpad, caps);
      gst_caps_unref (caps);
    }


    if (mimdec->need_segment) {
      GstSegment segment;

      gst_segment_init (&segment, GST_FORMAT_TIME);

      if (GST_CLOCK_TIME_IS_VALID (in_time))
        segment.start = in_time;
      else
        segment.start = current_ts * GST_MSECOND;
      event = gst_event_new_segment (&segment);
    }
    mimdec->need_segment = FALSE;

    if (event)
      result = gst_pad_push_event (mimdec->srcpad, event);
    event = NULL;

    if (!result) {
      GST_WARNING_OBJECT (mimdec, "gst_pad_push_event failed");
      return GST_FLOW_ERROR;
    }


    out_buf = gst_buffer_new_allocate (NULL, mimdec->buffer_size, NULL);
    gst_buffer_map (out_buf, &map, GST_MAP_READWRITE);

    if (!mimic_decode_frame (mimdec->dec, frame_body, map.data)) {
      GST_WARNING_OBJECT (mimdec, "mimic_decode_frame error\n");

      gst_adapter_flush (mimdec->adapter, payload_size);

      gst_buffer_unmap (out_buf, &map);
      gst_buffer_unref (out_buf);
      GST_ELEMENT_ERROR (mimdec, STREAM, DECODE, (NULL),
          ("mimic_decode_frame error"));
      return GST_FLOW_ERROR;
    }
    gst_buffer_unmap (out_buf, &map);
    gst_adapter_flush (mimdec->adapter, payload_size);

    if (GST_CLOCK_TIME_IS_VALID (in_time))
      GST_BUFFER_TIMESTAMP (out_buf) = in_time;
    else
      GST_BUFFER_TIMESTAMP (out_buf) = current_ts * GST_MSECOND;

    res = gst_pad_push (mimdec->srcpad, out_buf);

    if (res != GST_FLOW_OK)
      break;
  }

  return res;
}
Пример #17
0
/**
 * @brief Chain function, this function does the actual processing.
 */
static GstFlowReturn
gst_tensor_aggregator_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
{
  GstTensorAggregator *self;
  GstFlowReturn ret = GST_FLOW_OK;
  GstAdapter *adapter;
  gsize avail, buf_size, frame_size, out_size;
  guint frames_in, frames_out, frames_flush;
  GstClockTime duration;

  self = GST_TENSOR_AGGREGATOR (parent);
  g_assert (self->tensor_configured);

  buf_size = gst_buffer_get_size (buf);
  g_return_val_if_fail (buf_size > 0, GST_FLOW_ERROR);

  frames_in = self->frames_in;
  frames_out = self->frames_out;
  frames_flush = self->frames_flush;
  frame_size = buf_size / frames_in;

  if (frames_in == frames_out) {
    /** push the incoming buffer (do concat if needed) */
    return gst_tensor_aggregator_push (self, buf, frame_size);
  }

  adapter = self->adapter;
  g_assert (adapter != NULL);

  duration = GST_BUFFER_DURATION (buf);
  if (GST_CLOCK_TIME_IS_VALID (duration)) {
    /** supposed same duration for incoming buffer */
    duration = gst_util_uint64_scale_int (duration, frames_out, frames_in);
  }

  gst_adapter_push (adapter, buf);

  out_size = frame_size * frames_out;
  g_assert (out_size > 0);

  while ((avail = gst_adapter_available (adapter)) >= out_size &&
      ret == GST_FLOW_OK) {
    GstBuffer *outbuf;
    GstClockTime pts, dts;
    guint64 pts_dist, dts_dist;
    gsize flush;

    pts = gst_adapter_prev_pts (adapter, &pts_dist);
    dts = gst_adapter_prev_dts (adapter, &dts_dist);

    /**
     * Update timestamp.
     * If frames-in is larger then frames-out, the same timestamp (pts and dts) would be returned.
     */
    if (frames_in > 1) {
      gint fn, fd;

      fn = self->in_config.rate_n;
      fd = self->in_config.rate_d;

      if (fn > 0 && fd > 0) {
        if (GST_CLOCK_TIME_IS_VALID (pts)) {
          pts +=
              gst_util_uint64_scale_int (pts_dist * fd, GST_SECOND,
              fn * frame_size);
        }

        if (GST_CLOCK_TIME_IS_VALID (dts)) {
          dts +=
              gst_util_uint64_scale_int (dts_dist * fd, GST_SECOND,
              fn * frame_size);
        }
      }
    }

    outbuf = gst_adapter_get_buffer (adapter, out_size);
    outbuf = gst_buffer_make_writable (outbuf);

    /** set timestamp */
    GST_BUFFER_PTS (outbuf) = pts;
    GST_BUFFER_DTS (outbuf) = dts;
    GST_BUFFER_DURATION (outbuf) = duration;

    ret = gst_tensor_aggregator_push (self, outbuf, frame_size);

    /** flush data */
    if (frames_flush > 0) {
      flush = frame_size * frames_flush;

      if (flush > avail) {
        /**
         * @todo flush data
         * Invalid state, tried to flush large size.
         * We have to determine how to handle this case. (flush the out-size or all available bytes)
         * Now all available bytes in adapter will be flushed.
         */
        flush = avail;
      }
    } else {
      flush = out_size;
    }

    gst_adapter_flush (adapter, flush);
  }

  return ret;
}
Пример #18
0
static void
gst_hls_demux_loop (GstHLSDemux * demux)
{
  GstBuffer *buf;
  GstFlowReturn ret;

  /* Loop for the source pad task. The task is started when we have
   * received the main playlist from the source element. It tries first to
   * cache the first fragments and then it waits until it has more data in the
   * queue. This task is woken up when we push a new fragment to the queue or
   * when we reached the end of the playlist  */

  if (G_UNLIKELY (demux->need_cache)) {
    if (!gst_hls_demux_cache_fragments (demux))
      goto cache_error;

    /* we can start now the updates thread */
    gst_hls_demux_start_update (demux);
    GST_INFO_OBJECT (demux, "First fragments cached successfully");
  }

  if (g_queue_is_empty (demux->queue)) {
    if (demux->end_of_playlist)
      goto end_of_playlist;

    goto empty_queue;
  }

  buf = g_queue_pop_head (demux->queue);

  /* Figure out if we need to create/switch pads */
  if (G_UNLIKELY (!demux->srcpad
          || GST_BUFFER_CAPS (buf) != GST_PAD_CAPS (demux->srcpad)
          || demux->need_segment)) {
    switch_pads (demux, GST_BUFFER_CAPS (buf));
    demux->need_segment = TRUE;
  }
  if (demux->need_segment) {
    /* And send a newsegment */
    GST_DEBUG_OBJECT (demux, "Sending new-segment. Segment start:%"
        GST_TIME_FORMAT, GST_TIME_ARGS (demux->position));
    gst_pad_push_event (demux->srcpad,
        gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME, demux->position,
            GST_CLOCK_TIME_NONE, demux->position));
    demux->need_segment = FALSE;
  }

  if (GST_CLOCK_TIME_IS_VALID (GST_BUFFER_DURATION (buf)))
    demux->position += GST_BUFFER_DURATION (buf);

  ret = gst_pad_push (demux->srcpad, buf);
  if (ret != GST_FLOW_OK)
    goto error;

  return;

end_of_playlist:
  {
    GST_DEBUG_OBJECT (demux, "Reached end of playlist, sending EOS");
    gst_pad_push_event (demux->srcpad, gst_event_new_eos ());
    gst_hls_demux_stop (demux);
    return;
  }

cache_error:
  {
    gst_task_pause (demux->task);
    if (!demux->cancelled) {
      GST_ELEMENT_ERROR (demux, RESOURCE, NOT_FOUND,
          ("Could not cache the first fragments"), (NULL));
      gst_hls_demux_stop (demux);
    }
    return;
  }

error:
  {
    /* FIXME: handle error */
    GST_DEBUG_OBJECT (demux, "error, stopping task");
    gst_hls_demux_stop (demux);
    return;
  }

empty_queue:
  {
    gst_task_pause (demux->task);
    return;
  }
}
Пример #19
0
static gboolean
gst_hls_demux_src_query (GstPad * pad, GstQuery * query)
{
  GstHLSDemux *hlsdemux;
  gboolean ret = FALSE;

  if (query == NULL)
    return FALSE;

  hlsdemux = GST_HLS_DEMUX (gst_pad_get_element_private (pad));

  switch (query->type) {
    case GST_QUERY_DURATION:{
      GstClockTime duration = -1;
      GstFormat fmt;

      gst_query_parse_duration (query, &fmt, NULL);
      if (fmt == GST_FORMAT_TIME) {
        duration = gst_m3u8_client_get_duration (hlsdemux->client);
        if (GST_CLOCK_TIME_IS_VALID (duration) && duration > 0) {
          gst_query_set_duration (query, GST_FORMAT_TIME, duration);
          ret = TRUE;
        }
      }
      GST_INFO_OBJECT (hlsdemux, "GST_QUERY_DURATION returns %s with duration %"
          GST_TIME_FORMAT, ret ? "TRUE" : "FALSE", GST_TIME_ARGS (duration));
      break;
    }
    case GST_QUERY_URI:
      if (hlsdemux->client) {
        /* FIXME: Do we answer with the variant playlist, with the current
         * playlist or the the uri of the least downlowaded fragment? */
        gst_query_set_uri (query, hlsdemux->client->current->uri);
        ret = TRUE;
      }
      break;
    case GST_QUERY_SEEKING:{
      GstFormat fmt;
      gint64 stop = -1;

      gst_query_parse_seeking (query, &fmt, NULL, NULL, NULL);
      GST_INFO_OBJECT (hlsdemux, "Received GST_QUERY_SEEKING with format %d",
          fmt);
      if (fmt == GST_FORMAT_TIME) {
        GstClockTime duration;

        duration = gst_m3u8_client_get_duration (hlsdemux->client);
        if (GST_CLOCK_TIME_IS_VALID (duration) && duration > 0)
          stop = duration;

        gst_query_set_seeking (query, fmt,
            !gst_m3u8_client_is_live (hlsdemux->client), 0, stop);
        ret = TRUE;
        GST_INFO_OBJECT (hlsdemux, "GST_QUERY_SEEKING returning with stop : %"
            GST_TIME_FORMAT, GST_TIME_ARGS (stop));
      }
      break;
    }
    default:
      /* Don't fordward queries upstream because of the special nature of this
       * "demuxer", which relies on the upstream element only to be fed with the
       * first playlist */
      break;
  }

  return ret;
}
Пример #20
0
GstFlowReturn
gst_ks_video_device_read_frame (GstKsVideoDevice * self, guint8 * buf,
    gulong buf_size, gulong * bytes_read, GstClockTime * presentation_time,
    gulong * error_code, gchar ** error_str)
{
  GstKsVideoDevicePrivate *priv = GST_KS_VIDEO_DEVICE_GET_PRIVATE (self);
  guint req_idx;
  DWORD wait_ret;
  BOOL success;
  DWORD bytes_returned;

  g_assert (priv->cur_media_type != NULL);

  /* First time we're called, submit the requests. */
  if (G_UNLIKELY (!priv->requests_submitted)) {
    priv->requests_submitted = TRUE;

    for (req_idx = 0; req_idx < priv->num_requests; req_idx++) {
      ReadRequest *req = &g_array_index (priv->requests, ReadRequest, req_idx);

      if (!gst_ks_video_device_request_frame (self, req, error_code, error_str))
        goto error_request_failed;
    }
  }

  do {
    /* Wait for either a request to complete, a cancel or a timeout */
    wait_ret = WaitForMultipleObjects (priv->request_events->len,
        (HANDLE *) priv->request_events->data, FALSE, READ_TIMEOUT);
    if (wait_ret == WAIT_TIMEOUT)
      goto error_timeout;
    else if (wait_ret == WAIT_FAILED)
      goto error_wait;

    /* Stopped? */
    if (WaitForSingleObject (priv->cancel_event, 0) == WAIT_OBJECT_0)
      goto error_cancel;

    *bytes_read = 0;

    /* Find the last ReadRequest that finished and get the result, immediately
     * re-issuing each request that has completed. */
    for (req_idx = wait_ret - WAIT_OBJECT_0;
        req_idx < priv->num_requests; req_idx++) {
      ReadRequest *req = &g_array_index (priv->requests, ReadRequest, req_idx);

      /*
       * Completed? WaitForMultipleObjects() returns the lowest index if
       * multiple objects are in the signaled state, and we know that requests
       * are processed one by one so there's no point in looking further once
       * we've found the first that's non-signaled.
       */
      if (WaitForSingleObject (req->overlapped.hEvent, 0) != WAIT_OBJECT_0)
        break;

      success = GetOverlappedResult (priv->pin_handle, &req->overlapped,
          &bytes_returned, TRUE);

      ResetEvent (req->overlapped.hEvent);

      if (success) {
        KSSTREAM_HEADER *hdr = &req->params.header;
        KS_FRAME_INFO *frame_info = &req->params.frame_info;
        GstClockTime timestamp = GST_CLOCK_TIME_NONE;
        GstClockTime duration = GST_CLOCK_TIME_NONE;

        if (hdr->OptionsFlags & KSSTREAM_HEADER_OPTIONSF_TIMEVALID)
          timestamp = hdr->PresentationTime.Time * 100;

        if (hdr->OptionsFlags & KSSTREAM_HEADER_OPTIONSF_DURATIONVALID)
          duration = hdr->Duration * 100;

        /* Assume it's a good frame */
        *bytes_read = hdr->DataUsed;

        if (G_LIKELY (presentation_time != NULL))
          *presentation_time = timestamp;

        if (G_UNLIKELY (GST_DEBUG_IS_ENABLED ())) {
          gchar *options_flags_str =
              ks_options_flags_to_string (hdr->OptionsFlags);

          GST_DEBUG ("PictureNumber=%" G_GUINT64_FORMAT ", DropCount=%"
              G_GUINT64_FORMAT ", PresentationTime=%" GST_TIME_FORMAT
              ", Duration=%" GST_TIME_FORMAT ", OptionsFlags=%s: %d bytes",
              frame_info->PictureNumber, frame_info->DropCount,
              GST_TIME_ARGS (timestamp), GST_TIME_ARGS (duration),
              options_flags_str, hdr->DataUsed);

          g_free (options_flags_str);
        }

        /* Protect against old frames. This should never happen, see previous
         * comment on last_timestamp. */
        if (G_LIKELY (GST_CLOCK_TIME_IS_VALID (timestamp))) {
          if (G_UNLIKELY (GST_CLOCK_TIME_IS_VALID (priv->last_timestamp) &&
                  timestamp < priv->last_timestamp)) {
            GST_WARNING ("got an old frame (last_timestamp=%" GST_TIME_FORMAT
                ", timestamp=%" GST_TIME_FORMAT ")",
                GST_TIME_ARGS (priv->last_timestamp),
                GST_TIME_ARGS (timestamp));
            *bytes_read = 0;
          } else {
            priv->last_timestamp = timestamp;
          }
        }

        if (*bytes_read > 0) {
          /* Grab the frame data */
          g_assert (buf_size >= hdr->DataUsed);
          memcpy (buf, req->buf, hdr->DataUsed);

          if (priv->is_mjpeg) {
            /*
             * Workaround for cameras/drivers that intermittently provide us
             * with incomplete or corrupted MJPEG frames.
             *
             * Happens with for instance Microsoft LifeCam VX-7000.
             */

            gboolean valid = FALSE;
            guint padding = 0;

            /* JFIF SOI marker */
            if (*bytes_read > MJPEG_MAX_PADDING
                && buf[0] == 0xff && buf[1] == 0xd8) {
              guint8 *p = buf + *bytes_read - 2;

              /* JFIF EOI marker (but skip any padding) */
              while (padding < MJPEG_MAX_PADDING - 1 - 2 && !valid) {
                if (p[0] == 0xff && p[1] == 0xd9) {
                  valid = TRUE;
                } else {
                  padding++;
                  p--;
                }
              }
            }

            if (valid)
              *bytes_read -= padding;
            else
              *bytes_read = 0;
          }
        }
      } else if (GetLastError () != ERROR_OPERATION_ABORTED)
        goto error_get_result;

      /* Submit a new request immediately */
      if (!gst_ks_video_device_request_frame (self, req, error_code, error_str))
        goto error_request_failed;
    }
  } while (*bytes_read == 0);

  return GST_FLOW_OK;

  /* ERRORS */
error_request_failed:
  {
    return GST_FLOW_ERROR;
  }
error_timeout:
  {
    GST_DEBUG ("IOCTL_KS_READ_STREAM timed out");

    if (error_code != NULL)
      *error_code = 0;
    if (error_str != NULL)
      *error_str = NULL;

    return GST_FLOW_UNEXPECTED;
  }
error_wait:
  {
    gst_ks_video_device_parse_win32_error ("WaitForMultipleObjects",
        GetLastError (), error_code, error_str);

    return GST_FLOW_ERROR;
  }
error_cancel:
  {
    if (error_code != NULL)
      *error_code = 0;
    if (error_str != NULL)
      *error_str = NULL;

    return GST_FLOW_WRONG_STATE;
  }
error_get_result:
  {
    gst_ks_video_device_parse_win32_error ("GetOverlappedResult",
        GetLastError (), error_code, error_str);

    return GST_FLOW_ERROR;
  }
}
Пример #21
0
static GstFlowReturn
gst_shape_wipe_video_sink_chain (GstPad * pad, GstObject * parent,
    GstBuffer * buffer)
{
  GstShapeWipe *self = GST_SHAPE_WIPE (parent);
  GstFlowReturn ret = GST_FLOW_OK;
  GstBuffer *mask = NULL, *outbuf = NULL;
  GstClockTime timestamp;
  gboolean new_outbuf = FALSE;
  GstVideoFrame inframe, outframe, maskframe;

  if (G_UNLIKELY (GST_VIDEO_INFO_FORMAT (&self->vinfo) ==
          GST_VIDEO_FORMAT_UNKNOWN))
    goto not_negotiated;

  timestamp = GST_BUFFER_TIMESTAMP (buffer);
  timestamp =
      gst_segment_to_stream_time (&self->segment, GST_FORMAT_TIME, timestamp);

  if (GST_CLOCK_TIME_IS_VALID (timestamp))
    gst_object_sync_values (GST_OBJECT (self), timestamp);

  GST_LOG_OBJECT (self,
      "Blending buffer with timestamp %" GST_TIME_FORMAT " at position %f",
      GST_TIME_ARGS (timestamp), self->mask_position);

  g_mutex_lock (&self->mask_mutex);
  if (self->shutdown)
    goto shutdown;

  if (!self->mask)
    g_cond_wait (&self->mask_cond, &self->mask_mutex);

  if (self->mask == NULL || self->shutdown) {
    goto shutdown;
  } else {
    mask = gst_buffer_ref (self->mask);
  }
  g_mutex_unlock (&self->mask_mutex);

  if (!gst_shape_wipe_do_qos (self, GST_BUFFER_TIMESTAMP (buffer)))
    goto qos;

  /* Try to blend inplace, if it's not possible
   * get a new buffer from downstream. */
  if (!gst_buffer_is_writable (buffer)) {
    outbuf = gst_buffer_new_allocate (NULL, gst_buffer_get_size (buffer), NULL);
    gst_buffer_copy_into (outbuf, buffer, GST_BUFFER_COPY_METADATA, 0, -1);
    new_outbuf = TRUE;
  } else {
    outbuf = buffer;
  }

  gst_video_frame_map (&inframe, &self->vinfo, buffer,
      new_outbuf ? GST_MAP_READ : GST_MAP_READWRITE);
  gst_video_frame_map (&outframe, &self->vinfo, outbuf,
      new_outbuf ? GST_MAP_WRITE : GST_MAP_READWRITE);

  gst_video_frame_map (&maskframe, &self->minfo, mask, GST_MAP_READ);

  switch (GST_VIDEO_INFO_FORMAT (&self->vinfo)) {
    case GST_VIDEO_FORMAT_AYUV:
    case GST_VIDEO_FORMAT_ARGB:
    case GST_VIDEO_FORMAT_ABGR:
      if (self->mask_bpp == 16)
        gst_shape_wipe_blend_argb_16 (self, &inframe, &maskframe, &outframe);
      else
        gst_shape_wipe_blend_argb_8 (self, &inframe, &maskframe, &outframe);
      break;
    case GST_VIDEO_FORMAT_BGRA:
    case GST_VIDEO_FORMAT_RGBA:
      if (self->mask_bpp == 16)
        gst_shape_wipe_blend_bgra_16 (self, &inframe, &maskframe, &outframe);
      else
        gst_shape_wipe_blend_bgra_8 (self, &inframe, &maskframe, &outframe);
      break;
    default:
      g_assert_not_reached ();
      break;
  }

  gst_video_frame_unmap (&outframe);
  gst_video_frame_unmap (&inframe);

  gst_video_frame_unmap (&maskframe);

  gst_buffer_unref (mask);
  if (new_outbuf)
    gst_buffer_unref (buffer);

  ret = gst_pad_push (self->srcpad, outbuf);
  if (G_UNLIKELY (ret != GST_FLOW_OK))
    goto push_failed;

  return ret;

  /* Errors */
not_negotiated:
  {
    GST_ERROR_OBJECT (self, "No valid caps yet");
    gst_buffer_unref (buffer);
    return GST_FLOW_NOT_NEGOTIATED;
  }
shutdown:
  {
    GST_DEBUG_OBJECT (self, "Shutting down");
    gst_buffer_unref (buffer);
    return GST_FLOW_FLUSHING;
  }
qos:
  {
    GST_DEBUG_OBJECT (self, "Dropping buffer because of QoS");
    gst_buffer_unref (buffer);
    gst_buffer_unref (mask);
    return GST_FLOW_OK;
  }
push_failed:
  {
    GST_ERROR_OBJECT (self, "Pushing buffer downstream failed: %s",
        gst_flow_get_name (ret));
    return ret;
  }
}
Пример #22
0
static GstFlowReturn
gst_two_lame_chain (GstPad * pad, GstBuffer * buf)
{
    GstTwoLame *twolame;
    guchar *mp3_data;
    gint mp3_buffer_size, mp3_size;
    gint64 duration;
    GstFlowReturn result;
    gint num_samples;
    guint8 *data;
    guint size;

    twolame = GST_TWO_LAME (GST_PAD_PARENT (pad));

    GST_LOG_OBJECT (twolame, "entered chain");

    if (!twolame->setup)
        goto not_setup;

    data = GST_BUFFER_DATA (buf);
    size = GST_BUFFER_SIZE (buf);

    if (twolame->float_input)
        num_samples = size / 4;
    else
        num_samples = size / 2;

    /* allocate space for output */
    mp3_buffer_size = 1.25 * num_samples + 16384;
    mp3_data = g_malloc (mp3_buffer_size);

    if (twolame->num_channels == 1) {
        if (twolame->float_input)
            mp3_size = twolame_encode_buffer_float32 (twolame->glopts,
                       (float *) data,
                       (float *) data, num_samples, mp3_data, mp3_buffer_size);
        else
            mp3_size = twolame_encode_buffer (twolame->glopts,
                                              (short int *) data,
                                              (short int *) data, num_samples, mp3_data, mp3_buffer_size);
    } else {
        if (twolame->float_input)
            mp3_size = twolame_encode_buffer_float32_interleaved (twolame->glopts,
                       (float *) data,
                       num_samples / twolame->num_channels, mp3_data, mp3_buffer_size);
        else
            mp3_size = twolame_encode_buffer_interleaved (twolame->glopts,
                       (short int *) data,
                       num_samples / twolame->num_channels, mp3_data, mp3_buffer_size);
    }

    GST_LOG_OBJECT (twolame, "encoded %d bytes of audio to %d bytes of mp3",
                    size, mp3_size);

    if (twolame->float_input)
        duration = gst_util_uint64_scale_int (size, GST_SECOND,
                                              4 * twolame->samplerate * twolame->num_channels);
    else
        duration = gst_util_uint64_scale_int (size, GST_SECOND,
                                              2 * twolame->samplerate * twolame->num_channels);

    if (GST_BUFFER_DURATION (buf) != GST_CLOCK_TIME_NONE &&
            GST_BUFFER_DURATION (buf) != duration) {
        GST_DEBUG_OBJECT (twolame, "incoming buffer had incorrect duration %"
                          GST_TIME_FORMAT ", outgoing buffer will have correct duration %"
                          GST_TIME_FORMAT,
                          GST_TIME_ARGS (GST_BUFFER_DURATION (buf)), GST_TIME_ARGS (duration));
    }

    if (twolame->last_ts == GST_CLOCK_TIME_NONE) {
        twolame->last_ts = GST_BUFFER_TIMESTAMP (buf);
        twolame->last_offs = GST_BUFFER_OFFSET (buf);
        twolame->last_duration = duration;
    } else {
        twolame->last_duration += duration;
    }

    gst_buffer_unref (buf);

    if (mp3_size < 0) {
        g_warning ("error %d", mp3_size);
    }

    if (mp3_size > 0) {
        GstBuffer *outbuf;

        outbuf = gst_buffer_new ();
        GST_BUFFER_DATA (outbuf) = mp3_data;
        GST_BUFFER_MALLOCDATA (outbuf) = mp3_data;
        GST_BUFFER_SIZE (outbuf) = mp3_size;
        GST_BUFFER_TIMESTAMP (outbuf) = twolame->last_ts;
        GST_BUFFER_OFFSET (outbuf) = twolame->last_offs;
        GST_BUFFER_DURATION (outbuf) = twolame->last_duration;
        gst_buffer_set_caps (outbuf, GST_PAD_CAPS (twolame->srcpad));

        result = gst_pad_push (twolame->srcpad, outbuf);
        twolame->last_flow = result;
        if (result != GST_FLOW_OK) {
            GST_DEBUG_OBJECT (twolame, "flow return: %s", gst_flow_get_name (result));
        }

        if (GST_CLOCK_TIME_IS_VALID (twolame->last_ts))
            twolame->eos_ts = twolame->last_ts + twolame->last_duration;
        else
            twolame->eos_ts = GST_CLOCK_TIME_NONE;
        twolame->last_ts = GST_CLOCK_TIME_NONE;
    } else {
        g_free (mp3_data);
        result = GST_FLOW_OK;
    }

    return result;

    /* ERRORS */
not_setup:
    {
        gst_buffer_unref (buf);
        GST_ELEMENT_ERROR (twolame, CORE, NEGOTIATION, (NULL),
                           ("encoder not initialized (input is not audio?)"));
        return GST_FLOW_ERROR;
    }
}
Пример #23
0
static gboolean
gst_ks_video_src_timestamp_buffer (GstKsVideoSrc * self, GstBuffer * buf,
    GstClockTime presentation_time)
{
  GstKsVideoSrcPrivate *priv = GST_KS_VIDEO_SRC_GET_PRIVATE (self);
  GstClockTime duration;
  GstClock *clock;
  GstClockTime timestamp;

  duration = gst_ks_video_device_get_duration (priv->device);

  GST_OBJECT_LOCK (self);
  clock = GST_ELEMENT_CLOCK (self);
  if (clock != NULL) {
    gst_object_ref (clock);
    timestamp = GST_ELEMENT (self)->base_time;

    if (GST_CLOCK_TIME_IS_VALID (presentation_time)) {
      if (presentation_time > GST_ELEMENT (self)->base_time)
        presentation_time -= GST_ELEMENT (self)->base_time;
      else
        presentation_time = 0;
    }
  } else {
    timestamp = GST_CLOCK_TIME_NONE;
  }
  GST_OBJECT_UNLOCK (self);

  if (clock != NULL) {

    /* The time according to the current clock */
    timestamp = gst_clock_get_time (clock) - timestamp;
    if (timestamp > duration)
      timestamp -= duration;
    else
      timestamp = 0;

    if (GST_CLOCK_TIME_IS_VALID (presentation_time)) {
      /*
       * We don't use this for anything yet, need to ponder how to deal
       * with pins that use an internal clock and timestamp from 0.
       */
      GstClockTimeDiff diff = GST_CLOCK_DIFF (presentation_time, timestamp);
      GST_DEBUG_OBJECT (self, "diff between gst and driver timestamp: %"
          G_GINT64_FORMAT, diff);
    }

    gst_object_unref (clock);
    clock = NULL;

    /* Unless it's the first frame, align the current timestamp on a multiple
     * of duration since the previous */
    if (GST_CLOCK_TIME_IS_VALID (priv->prev_ts)) {
      GstClockTime delta;
      guint delta_remainder, delta_offset;

      /* REVISIT: I've seen this happen with the GstSystemClock on Windows,
       *          scary... */
      if (timestamp < priv->prev_ts) {
        GST_INFO_OBJECT (self, "clock is ticking backwards");
        return FALSE;
      }

      /* Round to a duration boundary */
      delta = timestamp - priv->prev_ts;
      delta_remainder = delta % duration;

      if (delta_remainder < duration / 3)
        timestamp -= delta_remainder;
      else
        timestamp += duration - delta_remainder;

      /* How many frames are we off then? */
      delta = timestamp - priv->prev_ts;
      delta_offset = delta / duration;

      if (delta_offset == 1)    /* perfect */
        GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DISCONT);
      else if (delta_offset > 1) {
        guint lost = delta_offset - 1;
        GST_INFO_OBJECT (self, "lost %d frame%s, setting discont flag",
            lost, (lost > 1) ? "s" : "");
        GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DISCONT);
      } else if (delta_offset == 0) {   /* overproduction, skip this frame */
        GST_INFO_OBJECT (self, "skipping frame");
        return FALSE;
      }

      priv->offset += delta_offset;
    }

    priv->prev_ts = timestamp;
  }

  GST_BUFFER_OFFSET (buf) = priv->offset;
  GST_BUFFER_OFFSET_END (buf) = GST_BUFFER_OFFSET (buf) + 1;
  GST_BUFFER_TIMESTAMP (buf) = timestamp;
  GST_BUFFER_DURATION (buf) = duration;

  return TRUE;
}
Пример #24
0
/* Updates the SSRC, payload type, seqnum and timestamp of the RTP buffer
 * before the buffer is pushed. */
static GstFlowReturn
gst_rtp_base_payload_prepare_push (GstRTPBasePayload * payload,
    gpointer obj, gboolean is_list)
{
  GstRTPBasePayloadPrivate *priv;
  HeaderData data;

  if (payload->clock_rate == 0)
    goto no_rate;

  priv = payload->priv;

  /* update first, so that the property is set to the last
   * seqnum pushed */
  payload->seqnum = priv->next_seqnum;

  /* fill in the fields we want to set on all headers */
  data.payload = payload;
  data.seqnum = payload->seqnum;
  data.ssrc = payload->current_ssrc;
  data.pt = payload->pt;

  /* find the first buffer with a timestamp */
  if (is_list) {
    data.dts = -1;
    data.pts = -1;
    data.offset = GST_BUFFER_OFFSET_NONE;
    gst_buffer_list_foreach (GST_BUFFER_LIST_CAST (obj), find_timestamp, &data);
  } else {
    data.dts = GST_BUFFER_DTS (GST_BUFFER_CAST (obj));
    data.pts = GST_BUFFER_PTS (GST_BUFFER_CAST (obj));
    data.offset = GST_BUFFER_OFFSET (GST_BUFFER_CAST (obj));
  }

  /* convert to RTP time */
  if (priv->perfect_rtptime && data.offset != GST_BUFFER_OFFSET_NONE &&
      priv->base_offset != GST_BUFFER_OFFSET_NONE) {
    /* if we have an offset, use that for making an RTP timestamp */
    data.rtptime = payload->ts_base + priv->base_rtime +
        data.offset - priv->base_offset;
    GST_LOG_OBJECT (payload,
        "Using offset %" G_GUINT64_FORMAT " for RTP timestamp", data.offset);
  } else if (GST_CLOCK_TIME_IS_VALID (data.pts)) {
    gint64 rtime;

    /* no offset, use the gstreamer pts */
    rtime = gst_segment_to_running_time (&payload->segment, GST_FORMAT_TIME,
        data.pts);

    if (rtime == -1) {
      GST_LOG_OBJECT (payload, "Clipped pts, using base RTP timestamp");
      rtime = 0;
    } else {
      GST_LOG_OBJECT (payload,
          "Using running_time %" GST_TIME_FORMAT " for RTP timestamp",
          GST_TIME_ARGS (rtime));
      rtime =
          gst_util_uint64_scale_int (rtime, payload->clock_rate, GST_SECOND);
      priv->base_offset = data.offset;
      priv->base_rtime = rtime;
    }
    /* add running_time in clock-rate units to the base timestamp */
    data.rtptime = payload->ts_base + rtime;
  } else {
    GST_LOG_OBJECT (payload,
        "Using previous RTP timestamp %" G_GUINT32_FORMAT, payload->timestamp);
    /* no timestamp to convert, take previous timestamp */
    data.rtptime = payload->timestamp;
  }

  /* set ssrc, payload type, seq number, caps and rtptime */
  if (is_list) {
    gst_buffer_list_foreach (GST_BUFFER_LIST_CAST (obj), set_headers, &data);
  } else {
    GstBuffer *buf = GST_BUFFER_CAST (obj);
    set_headers (&buf, 0, &data);
  }

  priv->next_seqnum = data.seqnum;
  payload->timestamp = data.rtptime;

  GST_LOG_OBJECT (payload, "Preparing to push packet with size %"
      G_GSIZE_FORMAT ", seq=%d, rtptime=%u, pts %" GST_TIME_FORMAT,
      (is_list) ? -1 : gst_buffer_get_size (GST_BUFFER (obj)),
      payload->seqnum, data.rtptime, GST_TIME_ARGS (data.pts));

  if (g_atomic_int_compare_and_exchange (&payload->
          priv->notified_first_timestamp, 1, 0)) {
    g_object_notify (G_OBJECT (payload), "timestamp");
    g_object_notify (G_OBJECT (payload), "seqnum");
  }

  return GST_FLOW_OK;

  /* ERRORS */
no_rate:
  {
    GST_ELEMENT_ERROR (payload, STREAM, NOT_IMPLEMENTED, (NULL),
        ("subclass did not specify clock-rate"));
    return GST_FLOW_ERROR;
  }
}
Пример #25
0
static GstFlowReturn
mpegtsmux_collected (GstCollectPads * pads, MpegTsMux * mux)
{
  GstFlowReturn ret = GST_FLOW_OK;
  MpegTsPadData *best = NULL;

  GST_DEBUG_OBJECT (mux, "Pads collected");

  if (mux->first) {
    ret = mpegtsmux_create_streams (mux);
    if (G_UNLIKELY (ret != GST_FLOW_OK))
      return ret;

    best = mpegtsmux_choose_best_stream (mux);

    if (!mpegtsdemux_prepare_srcpad (mux)) {
      GST_DEBUG_OBJECT (mux, "Failed to send new segment");
      goto new_seg_fail;
    }

    mux->first = FALSE;
  } else {
    best = mpegtsmux_choose_best_stream (mux);
  }

  if (best != NULL) {
    TsMuxProgram *prog = best->prog;
    GstBuffer *buf = best->queued_buf;
    gint64 pts = -1;
    gboolean delta = TRUE;

    if (prog == NULL) {
      GST_ELEMENT_ERROR (mux, STREAM, MUX, ("Stream is not associated with "
              "any program"), (NULL));
      return GST_FLOW_ERROR;
    }

    if (G_UNLIKELY (prog->pcr_stream == NULL)) {
      if (best) {
        /* Take the first data stream for the PCR */
        GST_DEBUG_OBJECT (COLLECT_DATA_PAD (best),
            "Use stream (pid=%d) from pad as PCR for program (prog_id = %d)",
            MPEG_TS_PAD_DATA (best)->pid, MPEG_TS_PAD_DATA (best)->prog_id);

        /* Set the chosen PCR stream */
        tsmux_program_set_pcr_stream (prog, best->stream);
      }
    }

    g_return_val_if_fail (buf != NULL, GST_FLOW_ERROR);
    if (best->stream->is_video_stream)
      delta = GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
    GST_DEBUG_OBJECT (mux, "delta: %d", delta);

    GST_DEBUG_OBJECT (COLLECT_DATA_PAD (best),
        "Chose stream for output (PID: 0x%04x)", best->pid);

    if (GST_CLOCK_TIME_IS_VALID (best->cur_ts)) {
      pts = GSTTIME_TO_MPEGTIME (best->cur_ts);
      GST_DEBUG_OBJECT (mux, "Buffer has TS %" GST_TIME_FORMAT " pts %"
          G_GINT64_FORMAT, GST_TIME_ARGS (best->cur_ts), pts);
    }

    tsmux_stream_add_data (best->stream, GST_BUFFER_DATA (buf),
        GST_BUFFER_SIZE (buf), buf, pts, -1, !delta);
    best->queued_buf = NULL;

    mux->is_delta = delta;
    while (tsmux_stream_bytes_in_buffer (best->stream) > 0) {
      if (!tsmux_write_stream_packet (mux->tsmux, best->stream)) {
        GST_DEBUG_OBJECT (mux, "Failed to write data packet");
        goto write_fail;
      }
    }
    if (prog->pcr_stream == best->stream) {
      mux->last_ts = best->last_ts;
    }
  } else {
    /* FIXME: Drain all remaining streams */
    /* At EOS */
    gst_pad_push_event (mux->srcpad, gst_event_new_eos ());
  }

  return ret;
new_seg_fail:
  return GST_FLOW_ERROR;
write_fail:
  /* FIXME: Failed writing data for some reason. Should set appropriate error */
  return mux->last_flow_ret;
}
void
gst_audio_fx_base_fir_filter_push_residue (GstAudioFXBaseFIRFilter * self)
{
  GstBuffer *outbuf;
  GstFlowReturn res;
  gint rate = GST_AUDIO_FILTER_RATE (self);
  gint channels = GST_AUDIO_FILTER_CHANNELS (self);
  gint bps = GST_AUDIO_FILTER_BPS (self);
  gint outsize, outsamples;
  GstMapInfo map;
  guint8 *in, *out;

  if (channels == 0 || rate == 0 || self->nsamples_in == 0) {
    self->buffer_fill = 0;
    g_free (self->buffer);
    self->buffer = NULL;
    return;
  }

  /* Calculate the number of samples and their memory size that
   * should be pushed from the residue */
  outsamples = self->nsamples_in - (self->nsamples_out - self->latency);
  if (outsamples <= 0) {
    self->buffer_fill = 0;
    g_free (self->buffer);
    self->buffer = NULL;
    return;
  }
  outsize = outsamples * channels * bps;

  if (!self->fft || self->low_latency) {
    gint64 diffsize, diffsamples;

    /* Process the difference between latency and residue length samples
     * to start at the actual data instead of starting at the zeros before
     * when we only got one buffer smaller than latency */
    diffsamples =
        ((gint64) self->latency) - ((gint64) self->buffer_fill) / channels;
    if (diffsamples > 0) {
      diffsize = diffsamples * channels * bps;
      in = g_new0 (guint8, diffsize);
      out = g_new0 (guint8, diffsize);
      self->nsamples_out += self->process (self, in, out, diffsamples);
      g_free (in);
      g_free (out);
    }

    outbuf = gst_buffer_new_and_alloc (outsize);

    /* Convolve the residue with zeros to get the actual remaining data */
    in = g_new0 (guint8, outsize);
    gst_buffer_map (outbuf, &map, GST_MAP_READWRITE);
    self->nsamples_out += self->process (self, in, map.data, outsamples);
    gst_buffer_unmap (outbuf, &map);

    g_free (in);
  } else {
    guint gensamples = 0;

    outbuf = gst_buffer_new_and_alloc (outsize);
    gst_buffer_map (outbuf, &map, GST_MAP_READWRITE);

    while (gensamples < outsamples) {
      guint step_insamples = self->block_length - self->buffer_fill;
      guint8 *zeroes = g_new0 (guint8, step_insamples * channels * bps);
      guint8 *out = g_new (guint8, self->block_length * channels * bps);
      guint step_gensamples;

      step_gensamples = self->process (self, zeroes, out, step_insamples);
      g_free (zeroes);

      memcpy (map.data + gensamples * bps, out, MIN (step_gensamples,
              outsamples - gensamples) * bps);
      gensamples += MIN (step_gensamples, outsamples - gensamples);

      g_free (out);
    }
    self->nsamples_out += gensamples;

    gst_buffer_unmap (outbuf, &map);
  }

  /* Set timestamp, offset, etc from the values we
   * saved when processing the regular buffers */
  if (GST_CLOCK_TIME_IS_VALID (self->start_ts))
    GST_BUFFER_TIMESTAMP (outbuf) = self->start_ts;
  else
    GST_BUFFER_TIMESTAMP (outbuf) = 0;
  GST_BUFFER_TIMESTAMP (outbuf) +=
      gst_util_uint64_scale_int (self->nsamples_out - outsamples -
      self->latency, GST_SECOND, rate);

  GST_BUFFER_DURATION (outbuf) =
      gst_util_uint64_scale_int (outsamples, GST_SECOND, rate);

  if (self->start_off != GST_BUFFER_OFFSET_NONE) {
    GST_BUFFER_OFFSET (outbuf) =
        self->start_off + self->nsamples_out - outsamples - self->latency;
    GST_BUFFER_OFFSET_END (outbuf) = GST_BUFFER_OFFSET (outbuf) + outsamples;
  }

  GST_DEBUG_OBJECT (self,
      "Pushing residue buffer of size %" G_GSIZE_FORMAT " with timestamp: %"
      GST_TIME_FORMAT ", duration: %" GST_TIME_FORMAT ", offset: %"
      G_GUINT64_FORMAT ", offset_end: %" G_GUINT64_FORMAT ", nsamples_out: %d",
      gst_buffer_get_size (outbuf),
      GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)),
      GST_TIME_ARGS (GST_BUFFER_DURATION (outbuf)), GST_BUFFER_OFFSET (outbuf),
      GST_BUFFER_OFFSET_END (outbuf), outsamples);

  res = gst_pad_push (GST_BASE_TRANSFORM_CAST (self)->srcpad, outbuf);

  if (G_UNLIKELY (res != GST_FLOW_OK)) {
    GST_WARNING_OBJECT (self, "failed to push residue");
  }

  self->buffer_fill = 0;
}
Пример #27
0
static void
do_perfect_stream_test (guint rate, guint width, gdouble drop_probability,
    gdouble inject_probability)
{
  GstElement *pipe, *src, *conv, *filter, *injector, *audiorate, *sink;
  GstMessage *msg;
  GstCaps *caps;
  GstPad *srcpad;
  GList *l, *bufs = NULL;
  GstClockTime next_time = GST_CLOCK_TIME_NONE;
  guint64 next_offset = GST_BUFFER_OFFSET_NONE;

  caps = gst_caps_new_simple ("audio/x-raw-int", "rate", G_TYPE_INT,
      rate, "width", G_TYPE_INT, width, NULL);

  GST_INFO ("-------- drop=%.0f%% caps = %" GST_PTR_FORMAT " ---------- ",
      drop_probability * 100.0, caps);

  g_assert (drop_probability >= 0.0 && drop_probability <= 1.0);
  g_assert (inject_probability >= 0.0 && inject_probability <= 1.0);
  g_assert (width > 0 && (width % 8) == 0);

  pipe = gst_pipeline_new ("pipeline");
  fail_unless (pipe != NULL);

  src = gst_element_factory_make ("audiotestsrc", "audiotestsrc");
  fail_unless (src != NULL);

  g_object_set (src, "num-buffers", 100, NULL);

  

  conv = gst_element_factory_make ("audioconvert", "audioconvert");
  fail_unless (conv != NULL);

  filter = gst_element_factory_make ("capsfilter", "capsfilter");

   fail_unless (filter != NULL);
   g_object_set (filter, "caps", caps, NULL);

  injector_inject_probability = inject_probability;
  injector = GST_ELEMENT (g_object_new (test_injector_get_type (), NULL));

  srcpad = gst_element_get_pad (injector, "src");
  fail_unless (srcpad != NULL);
   gst_pad_add_buffer_probe (srcpad, G_CALLBACK (probe_cb), &drop_probability);
  gst_object_unref (srcpad);
         audiorate = gst_element_factory_make ("audiorate", "audiorate");
         fail_unless (audiorate != NULL);
   sink = gst_element_factory_make ("fakesink", "fakesink");
  fail_unless (sink != NULL);
   g_object_set (sink, "signal-handoffs", TRUE, NULL);
   g_signal_connect (sink, "handoff", G_CALLBACK (got_buf), &bufs);

  gst_bin_add_many (GST_BIN (pipe), src, conv, filter, injector, audiorate,
      sink, NULL);
  gst_element_link_many (src, conv, filter, injector, audiorate, sink, NULL);

  fail_unless_equals_int (gst_element_set_state (pipe, GST_STATE_PLAYING),
      GST_STATE_CHANGE_ASYNC);

  fail_unless_equals_int (gst_element_get_state (pipe, NULL, NULL, -1),
      GST_STATE_CHANGE_SUCCESS);

  msg = gst_bus_poll (GST_ELEMENT_BUS (pipe),
      GST_MESSAGE_EOS | GST_MESSAGE_ERROR, -1);
  fail_unless_equals_string (GST_MESSAGE_TYPE_NAME (msg), "eos");

  for (l = bufs; l != NULL; l = l->next) {
    GstBuffer *buf = GST_BUFFER (l->data);
    guint num_samples;

    fail_unless (GST_BUFFER_TIMESTAMP_IS_VALID (buf));
    fail_unless (GST_BUFFER_DURATION_IS_VALID (buf));
    fail_unless (GST_BUFFER_OFFSET_IS_VALID (buf));
    fail_unless (GST_BUFFER_OFFSET_END_IS_VALID (buf));

    GST_LOG ("buffer: ts=%" GST_TIME_FORMAT ", end_ts=%" GST_TIME_FORMAT
        " off=%" G_GINT64_FORMAT ", end_off=%" G_GINT64_FORMAT,
        GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
        GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf) + GST_BUFFER_DURATION (buf)),
        GST_BUFFER_OFFSET (buf), GST_BUFFER_OFFSET_END (buf));

    if (GST_CLOCK_TIME_IS_VALID (next_time)) {
      fail_unless_equals_uint64 (next_time, GST_BUFFER_TIMESTAMP (buf));
    }
    if (next_offset != GST_BUFFER_OFFSET_NONE) {
      fail_unless_equals_uint64 (next_offset, GST_BUFFER_OFFSET (buf));
    }

    /* check buffer size for sanity */
    fail_unless_equals_int (GST_BUFFER_SIZE (buf) % (width / 8), 0);

    /* check there is actually as much data as there should be */
    num_samples = GST_BUFFER_OFFSET_END (buf) - GST_BUFFER_OFFSET (buf);
    fail_unless_equals_int (GST_BUFFER_SIZE (buf), num_samples * (width / 8));

    next_time = GST_BUFFER_TIMESTAMP (buf) + GST_BUFFER_DURATION (buf);
    next_offset = GST_BUFFER_OFFSET_END (buf);
  }

  gst_message_unref (msg);
  gst_element_set_state (pipe, GST_STATE_NULL);
  gst_object_unref (pipe);

  g_list_foreach (bufs, (GFunc) gst_mini_object_unref, NULL);
  g_list_free (bufs);

  gst_caps_unref (caps);
}
Пример #28
0
static GstFlowReturn
audioresample_transform (GstBaseTransform * base, GstBuffer * inbuf,
    GstBuffer * outbuf)
{
  GstAudioresample *audioresample;
  ResampleState *r;
  guchar *data, *datacopy;
  gulong size;
  GstClockTime timestamp;

  audioresample = GST_AUDIORESAMPLE (base);
  r = audioresample->resample;

  data = GST_BUFFER_DATA (inbuf);
  size = GST_BUFFER_SIZE (inbuf);
  timestamp = GST_BUFFER_TIMESTAMP (inbuf);

  GST_LOG_OBJECT (audioresample, "transforming buffer of %ld bytes, ts %"
      GST_TIME_FORMAT ", duration %" GST_TIME_FORMAT ", offset %"
      G_GINT64_FORMAT ", offset_end %" G_GINT64_FORMAT,
      size, GST_TIME_ARGS (timestamp),
      GST_TIME_ARGS (GST_BUFFER_DURATION (inbuf)),
      GST_BUFFER_OFFSET (inbuf), GST_BUFFER_OFFSET_END (inbuf));

  /* check for timestamp discontinuities and flush/reset if needed */
  if (G_UNLIKELY (audioresample_check_discont (audioresample, timestamp))) {
    /* Flush internal samples */
    audioresample_pushthrough (audioresample);
    /* Inform downstream element about discontinuity */
    audioresample->need_discont = TRUE;
    /* We want to recalculate the offset */
    audioresample->ts_offset = -1;
  }

  if (audioresample->ts_offset == -1) {
    /* if we don't know the initial offset yet, calculate it based on the 
     * input timestamp. */
    if (GST_CLOCK_TIME_IS_VALID (timestamp)) {
      GstClockTime stime;

      /* offset used to calculate the timestamps. We use the sample offset for
       * this to make it more accurate. We want the first buffer to have the
       * same timestamp as the incoming timestamp. */
      audioresample->next_ts = timestamp;
      audioresample->ts_offset =
          gst_util_uint64_scale_int (timestamp, r->o_rate, GST_SECOND);
      /* offset used to set as the buffer offset, this offset is always
       * relative to the stream time, note that timestamp is not... */
      stime = (timestamp - base->segment.start) + base->segment.time;
      audioresample->offset =
          gst_util_uint64_scale_int (stime, r->o_rate, GST_SECOND);
    }
  }
  audioresample->prev_ts = timestamp;
  audioresample->prev_duration = GST_BUFFER_DURATION (inbuf);

  /* need to memdup, resample takes ownership. */
  datacopy = g_memdup (data, size);
  resample_add_input_data (r, datacopy, size, g_free, datacopy);

  return audioresample_do_output (audioresample, outbuf);
}
static GstFlowReturn
gst_audio_fx_base_fir_filter_transform (GstBaseTransform * base,
    GstBuffer * inbuf, GstBuffer * outbuf)
{
  GstAudioFXBaseFIRFilter *self = GST_AUDIO_FX_BASE_FIR_FILTER (base);
  GstClockTime timestamp, expected_timestamp;
  gint channels = GST_AUDIO_FILTER_CHANNELS (self);
  gint rate = GST_AUDIO_FILTER_RATE (self);
  gint bps = GST_AUDIO_FILTER_BPS (self);
  GstMapInfo inmap, outmap;
  guint input_samples;
  guint output_samples;
  guint generated_samples;
  guint64 output_offset;
  gint64 diff = 0;
  GstClockTime stream_time;

  timestamp = GST_BUFFER_TIMESTAMP (outbuf);

  if (!GST_CLOCK_TIME_IS_VALID (timestamp)
      && !GST_CLOCK_TIME_IS_VALID (self->start_ts)) {
    GST_ERROR_OBJECT (self, "Invalid timestamp");
    return GST_FLOW_ERROR;
  }

  g_mutex_lock (&self->lock);
  stream_time =
      gst_segment_to_stream_time (&base->segment, GST_FORMAT_TIME, timestamp);

  GST_DEBUG_OBJECT (self, "sync to %" GST_TIME_FORMAT,
      GST_TIME_ARGS (timestamp));

  if (GST_CLOCK_TIME_IS_VALID (stream_time))
    gst_object_sync_values (GST_OBJECT (self), stream_time);

  g_return_val_if_fail (self->kernel != NULL, GST_FLOW_ERROR);
  g_return_val_if_fail (channels != 0, GST_FLOW_ERROR);

  if (GST_CLOCK_TIME_IS_VALID (self->start_ts))
    expected_timestamp =
        self->start_ts + gst_util_uint64_scale_int (self->nsamples_in,
        GST_SECOND, rate);
  else
    expected_timestamp = GST_CLOCK_TIME_NONE;

  /* Reset the residue if already existing on discont buffers */
  if (GST_BUFFER_IS_DISCONT (inbuf)
      || (GST_CLOCK_TIME_IS_VALID (expected_timestamp)
          && (ABS (GST_CLOCK_DIFF (timestamp,
                      expected_timestamp) > 5 * GST_MSECOND)))) {
    GST_DEBUG_OBJECT (self, "Discontinuity detected - flushing");
    if (GST_CLOCK_TIME_IS_VALID (expected_timestamp))
      gst_audio_fx_base_fir_filter_push_residue (self);
    self->buffer_fill = 0;
    g_free (self->buffer);
    self->buffer = NULL;
    self->start_ts = timestamp;
    self->start_off = GST_BUFFER_OFFSET (inbuf);
    self->nsamples_out = 0;
    self->nsamples_in = 0;
  } else if (!GST_CLOCK_TIME_IS_VALID (self->start_ts)) {
    self->start_ts = timestamp;
    self->start_off = GST_BUFFER_OFFSET (inbuf);
  }

  gst_buffer_map (inbuf, &inmap, GST_MAP_READ);
  gst_buffer_map (outbuf, &outmap, GST_MAP_WRITE);

  input_samples = (inmap.size / bps) / channels;
  output_samples = (outmap.size / bps) / channels;

  self->nsamples_in += input_samples;

  generated_samples =
      self->process (self, inmap.data, outmap.data, input_samples);

  gst_buffer_unmap (inbuf, &inmap);
  gst_buffer_unmap (outbuf, &outmap);

  g_assert (generated_samples <= output_samples);
  self->nsamples_out += generated_samples;
  if (generated_samples == 0)
    goto no_samples;

  /* Calculate the number of samples we can push out now without outputting
   * latency zeros in the beginning */
  diff = ((gint64) self->nsamples_out) - ((gint64) self->latency);
  if (diff < 0)
    goto no_samples;

  if (diff < generated_samples) {
    gint64 tmp = diff;
    diff = generated_samples - diff;
    generated_samples = tmp;
  } else {
    diff = 0;
  }

  gst_buffer_resize (outbuf, diff * bps * channels,
      generated_samples * bps * channels);

  output_offset = self->nsamples_out - self->latency - generated_samples;
  GST_BUFFER_TIMESTAMP (outbuf) =
      self->start_ts + gst_util_uint64_scale_int (output_offset, GST_SECOND,
      rate);
  GST_BUFFER_DURATION (outbuf) =
      gst_util_uint64_scale_int (output_samples, GST_SECOND, rate);
  if (self->start_off != GST_BUFFER_OFFSET_NONE) {
    GST_BUFFER_OFFSET (outbuf) = self->start_off + output_offset;
    GST_BUFFER_OFFSET_END (outbuf) =
        GST_BUFFER_OFFSET (outbuf) + generated_samples;
  } else {
    GST_BUFFER_OFFSET (outbuf) = GST_BUFFER_OFFSET_NONE;
    GST_BUFFER_OFFSET_END (outbuf) = GST_BUFFER_OFFSET_NONE;
  }
  g_mutex_unlock (&self->lock);

  GST_DEBUG_OBJECT (self,
      "Pushing buffer of size %" G_GSIZE_FORMAT " with timestamp: %"
      GST_TIME_FORMAT ", duration: %" GST_TIME_FORMAT ", offset: %"
      G_GUINT64_FORMAT ", offset_end: %" G_GUINT64_FORMAT ", nsamples_out: %d",
      gst_buffer_get_size (outbuf),
      GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)),
      GST_TIME_ARGS (GST_BUFFER_DURATION (outbuf)), GST_BUFFER_OFFSET (outbuf),
      GST_BUFFER_OFFSET_END (outbuf), generated_samples);

  return GST_FLOW_OK;

no_samples:
  {
    g_mutex_unlock (&self->lock);
    return GST_BASE_TRANSFORM_FLOW_DROPPED;
  }
}
Пример #30
0
GstFlowReturn
gst_base_video_decoder_finish_frame (GstBaseVideoDecoder * base_video_decoder,
    GstVideoFrame * frame)
{
  GstBaseVideoDecoderClass *base_video_decoder_class;
  GstBuffer *src_buffer;

  GST_DEBUG ("finish frame");

  base_video_decoder_class =
      GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder);

  GST_DEBUG ("finish frame sync=%d pts=%" G_GINT64_FORMAT, frame->is_sync_point,
      frame->presentation_timestamp);

  if (frame->is_sync_point) {
    if (GST_CLOCK_TIME_IS_VALID (frame->presentation_timestamp)) {
      if (frame->presentation_timestamp != base_video_decoder->timestamp_offset) {
        GST_DEBUG ("sync timestamp %" G_GINT64_FORMAT " diff %" G_GINT64_FORMAT,
            frame->presentation_timestamp,
            frame->presentation_timestamp -
            base_video_decoder->state.segment.start);
        base_video_decoder->timestamp_offset = frame->presentation_timestamp;
        base_video_decoder->field_index = 0;
      } else {
        /* This case is for one initial timestamp and no others, e.g.,
         * filesrc ! decoder ! xvimagesink */
        GST_WARNING ("sync timestamp didn't change, ignoring");
        frame->presentation_timestamp = GST_CLOCK_TIME_NONE;
      }
    } else {
      GST_WARNING ("sync point doesn't have timestamp");
      if (GST_CLOCK_TIME_IS_VALID (base_video_decoder->timestamp_offset)) {
        GST_ERROR ("No base timestamp.  Assuming frames start at 0");
        base_video_decoder->timestamp_offset = 0;
        base_video_decoder->field_index = 0;
      }
    }
  }
  frame->field_index = base_video_decoder->field_index;
  base_video_decoder->field_index += frame->n_fields;

  if (frame->presentation_timestamp == GST_CLOCK_TIME_NONE) {
    frame->presentation_timestamp =
        gst_base_video_decoder_get_field_timestamp (base_video_decoder,
        frame->field_index);
    frame->presentation_duration = GST_CLOCK_TIME_NONE;
    frame->decode_timestamp =
        gst_base_video_decoder_get_timestamp (base_video_decoder,
        frame->decode_frame_number);
  }
  if (frame->presentation_duration == GST_CLOCK_TIME_NONE) {
    frame->presentation_duration =
        gst_base_video_decoder_get_field_timestamp (base_video_decoder,
        frame->field_index + frame->n_fields) - frame->presentation_timestamp;
  }

  if (GST_CLOCK_TIME_IS_VALID (base_video_decoder->last_timestamp)) {
    if (frame->presentation_timestamp < base_video_decoder->last_timestamp) {
      GST_WARNING ("decreasing timestamp (%" G_GINT64_FORMAT " < %"
          G_GINT64_FORMAT ")", frame->presentation_timestamp,
          base_video_decoder->last_timestamp);
    }
  }
  base_video_decoder->last_timestamp = frame->presentation_timestamp;

  GST_BUFFER_FLAG_UNSET (frame->src_buffer, GST_BUFFER_FLAG_DELTA_UNIT);
  if (base_video_decoder->state.interlaced) {
#ifndef GST_VIDEO_BUFFER_TFF
#define GST_VIDEO_BUFFER_TFF (GST_MINI_OBJECT_FLAG_LAST << 5)
#endif
#ifndef GST_VIDEO_BUFFER_RFF
#define GST_VIDEO_BUFFER_RFF (GST_MINI_OBJECT_FLAG_LAST << 6)
#endif
#ifndef GST_VIDEO_BUFFER_ONEFIELD
#define GST_VIDEO_BUFFER_ONEFIELD (GST_MINI_OBJECT_FLAG_LAST << 7)
#endif
    int tff = base_video_decoder->state.top_field_first;

    if (frame->field_index & 1) {
      tff ^= 1;
    }
    if (tff) {
      GST_BUFFER_FLAG_SET (frame->src_buffer, GST_VIDEO_BUFFER_TFF);
    } else {
      GST_BUFFER_FLAG_UNSET (frame->src_buffer, GST_VIDEO_BUFFER_TFF);
    }
    GST_BUFFER_FLAG_UNSET (frame->src_buffer, GST_VIDEO_BUFFER_RFF);
    GST_BUFFER_FLAG_UNSET (frame->src_buffer, GST_VIDEO_BUFFER_ONEFIELD);
    if (frame->n_fields == 3) {
      GST_BUFFER_FLAG_SET (frame->src_buffer, GST_VIDEO_BUFFER_RFF);
    } else if (frame->n_fields == 1) {
      GST_BUFFER_FLAG_UNSET (frame->src_buffer, GST_VIDEO_BUFFER_ONEFIELD);
    }
  }

  GST_BUFFER_TIMESTAMP (frame->src_buffer) = frame->presentation_timestamp;
  GST_BUFFER_DURATION (frame->src_buffer) = frame->presentation_duration;
  GST_BUFFER_OFFSET (frame->src_buffer) = -1;
  GST_BUFFER_OFFSET_END (frame->src_buffer) = -1;

  GST_DEBUG ("pushing frame %" G_GINT64_FORMAT, frame->presentation_timestamp);

  base_video_decoder->frames =
      g_list_remove (base_video_decoder->frames, frame);

  gst_base_video_decoder_set_src_caps (base_video_decoder);

  src_buffer = frame->src_buffer;
  frame->src_buffer = NULL;

  gst_base_video_decoder_free_frame (frame);

  if (base_video_decoder->sink_clipping) {
    gint64 start = GST_BUFFER_TIMESTAMP (src_buffer);
    gint64 stop = GST_BUFFER_TIMESTAMP (src_buffer) +
        GST_BUFFER_DURATION (src_buffer);

    if (gst_segment_clip (&base_video_decoder->state.segment, GST_FORMAT_TIME,
            start, stop, &start, &stop)) {
      GST_BUFFER_TIMESTAMP (src_buffer) = start;
      GST_BUFFER_DURATION (src_buffer) = stop - start;
    } else {
      GST_DEBUG ("dropping buffer outside segment");
      gst_buffer_unref (src_buffer);
      return GST_FLOW_OK;
    }
  }

  return gst_pad_push (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_decoder),
      src_buffer);
}