static void
gst_vdp_sink_get_times (GstBaseSink * bsink, GstBuffer * buf,
    GstClockTime * start, GstClockTime * end)
{
  VdpSink *vdp_sink;

  vdp_sink = GST_VDP_SINK (bsink);

  if (GST_BUFFER_TIMESTAMP_IS_VALID (buf)) {
    *start = GST_BUFFER_TIMESTAMP (buf);
    if (GST_BUFFER_DURATION_IS_VALID (buf)) {
      *end = *start + GST_BUFFER_DURATION (buf);
    } else {
      if (vdp_sink->fps_n > 0) {
        *end = *start +
            gst_util_uint64_scale_int (GST_SECOND, vdp_sink->fps_d,
            vdp_sink->fps_n);
      }
    }
  }
}
Example #2
0
static void
gst_fbdevsink_get_times (GstBaseSink * basesink, GstBuffer * buffer,
    GstClockTime * start, GstClockTime * end)
{
  GstFBDEVSink *fbdevsink;

  fbdevsink = GST_FBDEVSINK (basesink);

  if (GST_BUFFER_TIMESTAMP_IS_VALID (buffer)) {
    *start = GST_BUFFER_TIMESTAMP (buffer);
    if (GST_BUFFER_DURATION_IS_VALID (buffer)) {
      *end = *start + GST_BUFFER_DURATION (buffer);
    } else {
      if (fbdevsink->fps_n > 0) {
        *end = *start +
            gst_util_uint64_scale_int (GST_SECOND, fbdevsink->fps_d,
            fbdevsink->fps_n);
      }
    }
  }
}
Example #3
0
static void
gst_egl_sink_get_times (GstBaseSink * bsink, GstBuffer * buf,
    GstClockTime * start, GstClockTime * end)
{
  GstEGLSink *eglsink;

  eglsink = GST_EGL_SINK (bsink);

  if (GST_BUFFER_TIMESTAMP_IS_VALID (buf)) {
    *start = GST_BUFFER_TIMESTAMP (buf);
    if (GST_BUFFER_DURATION_IS_VALID (buf))
      *end = *start + GST_BUFFER_DURATION (buf);
    else {
      if (eglsink->fps_n > 0) {
        *end = *start +
            gst_util_uint64_scale_int (GST_SECOND, eglsink->fps_d,
            eglsink->fps_n);
      }
    }
  }
}
static void
gst_glimage_sink_get_times (GstBaseSink * bsink, GstBuffer * buf,
    GstClockTime * start, GstClockTime * end)
{
  GstGLImageSink *glimagesink;

  glimagesink = GST_GLIMAGE_SINK (bsink);

  if (GST_BUFFER_TIMESTAMP_IS_VALID (buf)) {
    *start = GST_BUFFER_TIMESTAMP (buf);
    if (GST_BUFFER_DURATION_IS_VALID (buf))
      *end = *start + GST_BUFFER_DURATION (buf);
    else {
      if (GST_VIDEO_INFO_FPS_N (&glimagesink->info) > 0) {
        *end = *start +
            gst_util_uint64_scale_int (GST_SECOND,
            GST_VIDEO_INFO_FPS_D (&glimagesink->info),
            GST_VIDEO_INFO_FPS_N (&glimagesink->info));
      }
    }
  }
}
Example #5
0
static void
gst_kms_sink_get_times (GstBaseSink * bsink, GstBuffer * buf,
    GstClockTime * start, GstClockTime * end)
{
  GstKMSSink *self;

  self = GST_KMS_SINK (bsink);

  if (GST_BUFFER_TIMESTAMP_IS_VALID (buf)) {
    *start = GST_BUFFER_TIMESTAMP (buf);
    if (GST_BUFFER_DURATION_IS_VALID (buf))
      *end = *start + GST_BUFFER_DURATION (buf);
    else {
      if (GST_VIDEO_INFO_FPS_N (&self->vinfo) > 0) {
        *end = *start +
            gst_util_uint64_scale_int (GST_SECOND,
            GST_VIDEO_INFO_FPS_D (&self->vinfo),
            GST_VIDEO_INFO_FPS_N (&self->vinfo));
      }
    }
  }
}
static void
gst_pvrvideosink_get_times (GstBaseSink * bsink, GstBuffer * buf,
    GstClockTime * start, GstClockTime * end)
{
  GstPVRVideoSink *pvrvideosink;

  pvrvideosink = GST_PVRVIDEOSINK (bsink);

  if (GST_BUFFER_TIMESTAMP_IS_VALID (buf)) {
    *start = GST_BUFFER_TIMESTAMP (buf);
    if (GST_BUFFER_DURATION_IS_VALID (buf)) {
      *end = *start + GST_BUFFER_DURATION (buf);
    } else {
      gint fps_n, fps_d;
      fps_n = GST_VIDEO_INFO_FPS_N (&pvrvideosink->info);
      fps_d = GST_VIDEO_INFO_FPS_D (&pvrvideosink->info);
      if (fps_n > 0) {
        *end = *start + gst_util_uint64_scale_int (GST_SECOND, fps_d, fps_n);
      }
    }
  }
}
Example #7
0
gboolean
gst_kate_util_decoder_base_update_segment (GstKateDecoderBase * decoder,
    GstElement * element, GstBuffer * buf)
{
  gint64 clip_start = 0, clip_stop = 0;
  gboolean in_seg;

  if (decoder->kate_flushing) {
    GST_LOG_OBJECT (element, "Kate pad flushing, buffer ignored");
    return FALSE;
  }

  if (G_LIKELY (GST_BUFFER_TIMESTAMP_IS_VALID (buf))) {
    GstClockTime stop;

    if (G_LIKELY (GST_BUFFER_DURATION_IS_VALID (buf)))
      stop = GST_BUFFER_TIMESTAMP (buf) + GST_BUFFER_DURATION (buf);
    else
      stop = GST_CLOCK_TIME_NONE;

    in_seg = gst_segment_clip (&decoder->kate_segment, GST_FORMAT_TIME,
        GST_BUFFER_TIMESTAMP (buf), stop, &clip_start, &clip_stop);
  } else {
    in_seg = TRUE;
  }

  if (in_seg) {
    if (GST_BUFFER_TIMESTAMP_IS_VALID (buf)) {
      gst_segment_set_last_stop (&decoder->kate_segment, GST_FORMAT_TIME,
          clip_start);
    }
  } else {
    GST_INFO_OBJECT (element, "Kate buffer not in segment, ignored");
  }

  return in_seg;
}
Example #8
0
static GstFlowReturn got_data(GstPad *pad, GstBuffer *buf) {
    GstTfImpl *This = gst_pad_get_element_private(pad);
    IMediaSample *sample = GST_APP_BUFFER(buf)->priv;
    REFERENCE_TIME tStart, tStop;
    HRESULT hr;

    if (GST_BUFFER_TIMESTAMP_IS_VALID(buf) &&
        GST_BUFFER_DURATION_IS_VALID(buf)) {
        tStart = buf->timestamp / 100;
        tStop = tStart + buf->duration / 100;
        IMediaSample_SetTime(sample, &tStart, &tStop);
    }
    else
        IMediaSample_SetTime(sample, NULL, NULL);
    if (GST_BUFFER_OFFSET_IS_VALID(buf) &&
        GST_BUFFER_OFFSET_END_IS_VALID(buf)) {
        tStart = buf->offset / 100;
        tStop = buf->offset_end / 100;
        IMediaSample_SetMediaTime(sample, &tStart, &tStop);
    }
    else
        IMediaSample_SetMediaTime(sample, NULL, NULL);

    IMediaSample_SetDiscontinuity(sample, GST_BUFFER_FLAG_IS_SET(buf, GST_BUFFER_FLAG_DISCONT));
    IMediaSample_SetPreroll(sample, GST_BUFFER_FLAG_IS_SET(buf, GST_BUFFER_FLAG_PREROLL));
    IMediaSample_SetSyncPoint(sample, !GST_BUFFER_FLAG_IS_SET(buf, GST_BUFFER_FLAG_DELTA_UNIT));
    IMediaSample_SetActualDataLength(sample, GST_BUFFER_SIZE(buf));

    hr = BaseOutputPinImpl_Deliver((BaseOutputPin*)This->tf.ppPins[1], sample);
    gst_buffer_unref(buf);
    if (FAILED(hr))
        return GST_FLOW_WRONG_STATE;
    if (hr != S_OK)
        return GST_FLOW_RESEND;
    return GST_FLOW_OK;
}
static void
gst_inter_audio_src_get_times (GstBaseSrc * src, GstBuffer * buffer,
    GstClockTime * start, GstClockTime * end)
{
  GstInterAudioSrc *interaudiosrc = GST_INTER_AUDIO_SRC (src);

  GST_DEBUG_OBJECT (src, "get_times");

  /* for live sources, sync on the timestamp of the buffer */
  if (gst_base_src_is_live (src)) {
    if (GST_BUFFER_TIMESTAMP_IS_VALID (buffer)) {
      *start = GST_BUFFER_TIMESTAMP (buffer);
      if (GST_BUFFER_DURATION_IS_VALID (buffer)) {
        *end = *start + GST_BUFFER_DURATION (buffer);
      } else {
        if (interaudiosrc->info.rate > 0) {
          *end = *start +
              gst_util_uint64_scale_int (gst_buffer_get_size (buffer),
              GST_SECOND, interaudiosrc->info.rate * interaudiosrc->info.bpf);
        }
      }
    }
  }
}
Example #10
0
static void
do_test (SubParseInputChunk * input, guint num, const gchar * format)
{
  guint n;
  GstCaps *outcaps;

  setup_subparse ();

  for (n = 0; n < num; ++n) {
    GstBuffer *buf;

    buf = buffer_from_static_string (input[n].in);
    fail_unless_equals_int (gst_pad_push (mysrcpad, buf), GST_FLOW_OK);
  }

  gst_pad_push_event (mysrcpad, gst_event_new_eos ());

  fail_unless_equals_int (g_list_length (buffers), num);

  outcaps = gst_pad_get_current_caps (mysinkpad);

  for (n = 0; n < num; ++n) {
    const GstStructure *buffer_caps_struct;
    GstBuffer *buf;
    GstMapInfo map;

    buf = g_list_nth_data (buffers, n);
    fail_unless (buf != NULL);

    /* check timestamp */
    fail_unless (GST_BUFFER_TIMESTAMP_IS_VALID (buf), NULL);
    fail_unless_equals_uint64 (GST_BUFFER_TIMESTAMP (buf), input[n].from_ts);

    /* might not be able to put a duration on the last buffer */
    if (input[n].to_ts != GST_CLOCK_TIME_NONE) {
      /* check duration */
      fail_unless (GST_BUFFER_DURATION_IS_VALID (buf), NULL);
      fail_unless_equals_uint64 (GST_BUFFER_DURATION (buf),
          input[n].to_ts - input[n].from_ts);
    }

    gst_buffer_map (buf, &map, GST_MAP_READ);
    /* can be NULL */
    if (map.data != NULL) {
      /* shouldn't have trailing newline characters */
      fail_if (map.size > 0 && map.data[map.size - 1] == '\n');
      /* shouldn't include NUL-terminator in data size */
      fail_if (map.size > 0 && map.data[map.size - 1] == '\0');
      /* but should still have a  NUL-terminator behind the declared data */
      fail_unless_equals_int (map.data[map.size], '\0');
      /* make sure out string matches expected string */
      fail_unless_equals_string ((gchar *) map.data, input[n].out);
    }
    gst_buffer_unmap (buf, &map);
    /* check caps */
    fail_unless (outcaps != NULL);
    buffer_caps_struct = gst_caps_get_structure (outcaps, 0);
    fail_unless (gst_structure_has_name (buffer_caps_struct, "text/x-raw"));
    fail_unless_equals_string (gst_structure_get_string (buffer_caps_struct,
            "format"), format);
  }
  gst_caps_unref (outcaps);

  teardown_subparse ();
}
Example #11
0
static GstFlowReturn
gst_speex_enc_chain (GstPad * pad, GstBuffer * buf)
{
  GstSpeexEnc *enc;
  GstFlowReturn ret = GST_FLOW_OK;

  enc = GST_SPEEX_ENC (GST_PAD_PARENT (pad));

  if (!enc->setup)
    goto not_setup;

  if (!enc->header_sent) {
    /* Speex streams begin with two headers; the initial header (with
       most of the codec setup parameters) which is mandated by the Ogg
       bitstream spec.  The second header holds any comment fields.
       We merely need to make the headers, then pass them to libspeex 
       one at a time; libspeex handles the additional Ogg bitstream 
       constraints */
    GstBuffer *buf1, *buf2;
    GstCaps *caps;
    guchar *data;
    gint data_len;

    /* create header buffer */
    data = (guint8 *) speex_header_to_packet (&enc->header, &data_len);
    buf1 = gst_speex_enc_buffer_from_data (enc, data, data_len, 0);
    free (data);

    /* create comment buffer */
    buf2 = gst_speex_enc_create_metadata_buffer (enc);

    /* mark and put on caps */
    caps = gst_pad_get_caps (enc->srcpad);
    caps = gst_speex_enc_set_header_on_caps (caps, buf1, buf2);

    gst_caps_set_simple (caps,
        "rate", G_TYPE_INT, enc->rate,
        "channels", G_TYPE_INT, enc->channels, NULL);

    /* negotiate with these caps */
    GST_DEBUG_OBJECT (enc, "here are the caps: %" GST_PTR_FORMAT, caps);
    gst_pad_set_caps (enc->srcpad, caps);

    gst_buffer_set_caps (buf1, caps);
    gst_buffer_set_caps (buf2, caps);
    gst_caps_unref (caps);

    /* push out buffers */
    ret = gst_speex_enc_push_buffer (enc, buf1);

    if (ret != GST_FLOW_OK) {
      gst_buffer_unref (buf2);
      goto done;
    }

    ret = gst_speex_enc_push_buffer (enc, buf2);

    if (ret != GST_FLOW_OK)
      goto done;

    speex_bits_reset (&enc->bits);

    enc->header_sent = TRUE;
  }

  /* Save the timestamp of the first buffer. This will be later
   * used as offset for all following buffers */
  if (enc->start_ts == GST_CLOCK_TIME_NONE) {
    if (GST_BUFFER_TIMESTAMP_IS_VALID (buf)) {
      enc->start_ts = GST_BUFFER_TIMESTAMP (buf);
      enc->granulepos_offset = gst_util_uint64_scale
          (GST_BUFFER_TIMESTAMP (buf), enc->rate, GST_SECOND);
    } else {
      enc->start_ts = 0;
      enc->granulepos_offset = 0;
    }
  }

  /* Check if we have a continous stream, if not drop some samples or the buffer or
   * insert some silence samples */
  if (enc->next_ts != GST_CLOCK_TIME_NONE &&
      GST_BUFFER_TIMESTAMP (buf) < enc->next_ts) {
    guint64 diff = enc->next_ts - GST_BUFFER_TIMESTAMP (buf);
    guint64 diff_bytes;

    GST_WARNING_OBJECT (enc, "Buffer is older than previous "
        "timestamp + duration (%" GST_TIME_FORMAT "< %" GST_TIME_FORMAT
        "), cannot handle. Clipping buffer.",
        GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
        GST_TIME_ARGS (enc->next_ts));

    diff_bytes = GST_CLOCK_TIME_TO_FRAMES (diff, enc->rate) * enc->channels * 2;
    if (diff_bytes >= GST_BUFFER_SIZE (buf)) {
      gst_buffer_unref (buf);
      return GST_FLOW_OK;
    }
    buf = gst_buffer_make_metadata_writable (buf);
    GST_BUFFER_DATA (buf) += diff_bytes;
    GST_BUFFER_SIZE (buf) -= diff_bytes;

    GST_BUFFER_TIMESTAMP (buf) += diff;
    if (GST_BUFFER_DURATION_IS_VALID (buf))
      GST_BUFFER_DURATION (buf) -= diff;
  }

  if (enc->next_ts != GST_CLOCK_TIME_NONE
      && GST_BUFFER_TIMESTAMP_IS_VALID (buf)) {
    guint64 max_diff =
        gst_util_uint64_scale (enc->frame_size, GST_SECOND, enc->rate);

    if (GST_BUFFER_TIMESTAMP (buf) != enc->next_ts &&
        GST_BUFFER_TIMESTAMP (buf) - enc->next_ts > max_diff) {
      GST_WARNING_OBJECT (enc,
          "Discontinuity detected: %" G_GUINT64_FORMAT " > %" G_GUINT64_FORMAT,
          GST_BUFFER_TIMESTAMP (buf) - enc->next_ts, max_diff);

      gst_speex_enc_encode (enc, TRUE);

      enc->frameno_out = 0;
      enc->start_ts = GST_BUFFER_TIMESTAMP (buf);
      enc->granulepos_offset = gst_util_uint64_scale
          (GST_BUFFER_TIMESTAMP (buf), enc->rate, GST_SECOND);
    }
  }

  if (GST_BUFFER_TIMESTAMP_IS_VALID (buf)
      && GST_BUFFER_DURATION_IS_VALID (buf))
    enc->next_ts = GST_BUFFER_TIMESTAMP (buf) + GST_BUFFER_DURATION (buf);
  else
    enc->next_ts = GST_CLOCK_TIME_NONE;

  GST_DEBUG_OBJECT (enc, "received buffer of %u bytes", GST_BUFFER_SIZE (buf));

  /* push buffer to adapter */
  gst_adapter_push (enc->adapter, buf);
  buf = NULL;

  ret = gst_speex_enc_encode (enc, FALSE);

done:

  if (buf)
    gst_buffer_unref (buf);

  return ret;

  /* ERRORS */
not_setup:
  {
    GST_ELEMENT_ERROR (enc, CORE, NEGOTIATION, (NULL),
        ("encoder not initialized (input is not audio?)"));
    ret = GST_FLOW_NOT_NEGOTIATED;
    goto done;
  }

}
Example #12
0
/**
 * gst_audio_buffer_clip:
 * @buffer: (transfer full): The buffer to clip.
 * @segment: Segment in %GST_FORMAT_TIME or %GST_FORMAT_DEFAULT to which
 *           the buffer should be clipped.
 * @rate: sample rate.
 * @bpf: size of one audio frame in bytes. This is the size of one sample
 * * channels.
 *
 * Clip the buffer to the given %GstSegment.
 *
 * After calling this function the caller does not own a reference to
 * @buffer anymore.
 *
 * Returns: (transfer full): %NULL if the buffer is completely outside the configured segment,
 * otherwise the clipped buffer is returned.
 *
 * If the buffer has no timestamp, it is assumed to be inside the segment and
 * is not clipped
 */
GstBuffer *
gst_audio_buffer_clip (GstBuffer * buffer, GstSegment * segment, gint rate,
    gint bpf)
{
  GstBuffer *ret;
  GstClockTime timestamp = GST_CLOCK_TIME_NONE, duration = GST_CLOCK_TIME_NONE;
  guint64 offset = GST_BUFFER_OFFSET_NONE, offset_end = GST_BUFFER_OFFSET_NONE;
  gsize trim, size, osize;
  gboolean change_duration = TRUE, change_offset = TRUE, change_offset_end =
      TRUE;

  g_return_val_if_fail (segment->format == GST_FORMAT_TIME ||
      segment->format == GST_FORMAT_DEFAULT, buffer);
  g_return_val_if_fail (GST_IS_BUFFER (buffer), NULL);

  if (!GST_BUFFER_TIMESTAMP_IS_VALID (buffer))
    /* No timestamp - assume the buffer is completely in the segment */
    return buffer;

  /* Get copies of the buffer metadata to change later.
   * Calculate the missing values for the calculations,
   * they won't be changed later though. */

  trim = 0;
  osize = size = gst_buffer_get_size (buffer);

  /* no data, nothing to clip */
  if (!size)
    return buffer;

  timestamp = GST_BUFFER_TIMESTAMP (buffer);
  GST_DEBUG ("timestamp %" GST_TIME_FORMAT, GST_TIME_ARGS (timestamp));
  if (GST_BUFFER_DURATION_IS_VALID (buffer)) {
    duration = GST_BUFFER_DURATION (buffer);
  } else {
    change_duration = FALSE;
    duration = gst_util_uint64_scale (size / bpf, GST_SECOND, rate);
  }

  if (GST_BUFFER_OFFSET_IS_VALID (buffer)) {
    offset = GST_BUFFER_OFFSET (buffer);
  } else {
    change_offset = FALSE;
    offset = 0;
  }

  if (GST_BUFFER_OFFSET_END_IS_VALID (buffer)) {
    offset_end = GST_BUFFER_OFFSET_END (buffer);
  } else {
    change_offset_end = FALSE;
    offset_end = offset + size / bpf;
  }

  if (segment->format == GST_FORMAT_TIME) {
    /* Handle clipping for GST_FORMAT_TIME */

    guint64 start, stop, cstart, cstop, diff;

    start = timestamp;
    stop = timestamp + duration;

    if (gst_segment_clip (segment, GST_FORMAT_TIME,
            start, stop, &cstart, &cstop)) {

      diff = cstart - start;
      if (diff > 0) {
        timestamp = cstart;

        if (change_duration)
          duration -= diff;

        diff = gst_util_uint64_scale (diff, rate, GST_SECOND);
        if (change_offset)
          offset += diff;
        trim += diff * bpf;
        size -= diff * bpf;
      }

      diff = stop - cstop;
      if (diff > 0) {
        /* duration is always valid if stop is valid */
        duration -= diff;

        diff = gst_util_uint64_scale (diff, rate, GST_SECOND);
        if (change_offset_end)
          offset_end -= diff;
        size -= diff * bpf;
      }
    } else {
      gst_buffer_unref (buffer);
      return NULL;
    }
  } else {
    /* Handle clipping for GST_FORMAT_DEFAULT */
    guint64 start, stop, cstart, cstop, diff;

    g_return_val_if_fail (GST_BUFFER_OFFSET_IS_VALID (buffer), buffer);

    start = offset;
    stop = offset_end;

    if (gst_segment_clip (segment, GST_FORMAT_DEFAULT,
            start, stop, &cstart, &cstop)) {

      diff = cstart - start;
      if (diff > 0) {
        offset = cstart;

        timestamp = gst_util_uint64_scale (cstart, GST_SECOND, rate);

        if (change_duration)
          duration -= gst_util_uint64_scale (diff, GST_SECOND, rate);

        trim += diff * bpf;
        size -= diff * bpf;
      }

      diff = stop - cstop;
      if (diff > 0) {
        offset_end = cstop;

        if (change_duration)
          duration -= gst_util_uint64_scale (diff, GST_SECOND, rate);

        size -= diff * bpf;
      }
    } else {
      gst_buffer_unref (buffer);
      return NULL;
    }
  }

  if (trim == 0 && size == osize) {
    ret = buffer;

    if (GST_BUFFER_TIMESTAMP (ret) != timestamp) {
      ret = gst_buffer_make_writable (ret);
      GST_BUFFER_TIMESTAMP (ret) = timestamp;
    }
    if (GST_BUFFER_DURATION (ret) != duration) {
      ret = gst_buffer_make_writable (ret);
      GST_BUFFER_DURATION (ret) = duration;
    }
  } else {
    /* Get a writable buffer and apply all changes */
    GST_DEBUG ("trim %" G_GSIZE_FORMAT " size %" G_GSIZE_FORMAT, trim, size);
    ret = gst_buffer_copy_region (buffer, GST_BUFFER_COPY_ALL, trim, size);
    gst_buffer_unref (buffer);

    GST_DEBUG ("timestamp %" GST_TIME_FORMAT, GST_TIME_ARGS (timestamp));
    GST_BUFFER_TIMESTAMP (ret) = timestamp;

    if (change_duration)
      GST_BUFFER_DURATION (ret) = duration;
    if (change_offset)
      GST_BUFFER_OFFSET (ret) = offset;
    if (change_offset_end)
      GST_BUFFER_OFFSET_END (ret) = offset_end;
  }
  return ret;
}
static GstFlowReturn
gst_selector_pad_chain (GstPad * pad, GstBuffer * buf)
{
  GstInputSelector *sel;
  GstFlowReturn res;
  GstPad *active_sinkpad;
  GstPad *prev_active_sinkpad;
  GstSelectorPad *selpad;
  GstClockTime start_time;
  GstSegment *seg;
  GstEvent *close_event = NULL, *start_event = NULL;
  GstCaps *caps;

  sel = GST_INPUT_SELECTOR (gst_pad_get_parent (pad));
  selpad = GST_SELECTOR_PAD_CAST (pad);
  seg = &selpad->segment;

  GST_INPUT_SELECTOR_LOCK (sel);
  /* wait or check for flushing */
  if (gst_input_selector_wait (sel, pad))
    goto flushing;

  GST_LOG_OBJECT (pad, "getting active pad");

  prev_active_sinkpad = sel->active_sinkpad;
  active_sinkpad = gst_input_selector_activate_sinkpad (sel, pad);

  /* update the segment on the srcpad */
  start_time = GST_BUFFER_TIMESTAMP (buf);
  if (GST_CLOCK_TIME_IS_VALID (start_time)) {
    GST_LOG_OBJECT (pad, "received start time %" GST_TIME_FORMAT,
        GST_TIME_ARGS (start_time));
    if (GST_BUFFER_DURATION_IS_VALID (buf))
      GST_LOG_OBJECT (pad, "received end time %" GST_TIME_FORMAT,
          GST_TIME_ARGS (start_time + GST_BUFFER_DURATION (buf)));

    GST_OBJECT_LOCK (pad);
    gst_segment_set_last_stop (seg, seg->format, start_time);
    GST_OBJECT_UNLOCK (pad);
  }

  /* Ignore buffers from pads except the selected one */
  if (pad != active_sinkpad)
    goto ignore;

  if (G_UNLIKELY (sel->pending_close)) {
    GstSegment *cseg = &sel->segment;

    GST_DEBUG_OBJECT (sel,
        "pushing close NEWSEGMENT update %d, rate %lf, applied rate %lf, "
        "format %d, "
        "%" G_GINT64_FORMAT " -- %" G_GINT64_FORMAT ", time %"
        G_GINT64_FORMAT, TRUE, cseg->rate, cseg->applied_rate, cseg->format,
        cseg->start, cseg->stop, cseg->time);

    /* create update segment */
    close_event = gst_event_new_new_segment_full (TRUE, cseg->rate,
        cseg->applied_rate, cseg->format, cseg->start, cseg->stop, cseg->time);

    sel->pending_close = FALSE;
  }
  /* if we have a pending segment, push it out now */
  if (G_UNLIKELY (selpad->segment_pending)) {
    GST_DEBUG_OBJECT (pad,
        "pushing pending NEWSEGMENT update %d, rate %lf, applied rate %lf, "
        "format %d, "
        "%" G_GINT64_FORMAT " -- %" G_GINT64_FORMAT ", time %"
        G_GINT64_FORMAT, FALSE, seg->rate, seg->applied_rate, seg->format,
        seg->start, seg->stop, seg->time);

    start_event = gst_event_new_new_segment_full (FALSE, seg->rate,
        seg->applied_rate, seg->format, seg->start, seg->stop, seg->time);

    selpad->segment_pending = FALSE;
  }
  GST_INPUT_SELECTOR_UNLOCK (sel);

  if (prev_active_sinkpad != active_sinkpad && pad == active_sinkpad)
    g_object_notify (G_OBJECT (sel), "active-pad");

  if (close_event)
    gst_pad_push_event (sel->srcpad, close_event);

  if (start_event)
    gst_pad_push_event (sel->srcpad, start_event);

  if (selpad->discont) {
    buf = gst_buffer_make_metadata_writable (buf);

    GST_DEBUG_OBJECT (pad, "Marking discont buffer %p", buf);
    GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DISCONT);
    selpad->discont = FALSE;
  }

  /* forward */
  GST_LOG_OBJECT (pad, "Forwarding buffer %p", buf);

  if ((caps = GST_BUFFER_CAPS (buf))) {
    if (GST_PAD_CAPS (sel->srcpad) != caps)
      gst_pad_set_caps (sel->srcpad, caps);
  }

  res = gst_pad_push (sel->srcpad, buf);

done:
  gst_object_unref (sel);
  return res;

  /* dropped buffers */
ignore:
  {
    GST_DEBUG_OBJECT (pad, "Pad not active, discard buffer %p", buf);
    /* when we drop a buffer, we're creating a discont on this pad */
    selpad->discont = TRUE;
    GST_INPUT_SELECTOR_UNLOCK (sel);
    gst_buffer_unref (buf);

    /* figure out what to return upstream */
    GST_OBJECT_LOCK (selpad);
    if (selpad->always_ok)
      res = GST_FLOW_OK;
    else
      res = GST_FLOW_NOT_LINKED;
    GST_OBJECT_UNLOCK (selpad);

    goto done;
  }
flushing:
  {
    GST_DEBUG_OBJECT (pad, "We are flushing, discard buffer %p", buf);
    GST_INPUT_SELECTOR_UNLOCK (sel);
    gst_buffer_unref (buf);
    res = GST_FLOW_WRONG_STATE;
    goto done;
  }
}
Example #14
0
EXPORT_C
#endif

GstBuffer *
gst_audio_buffer_clip (GstBuffer * buffer, GstSegment * segment, gint rate,
    gint frame_size)
{
  GstBuffer *ret;
  GstClockTime timestamp = GST_CLOCK_TIME_NONE, duration = GST_CLOCK_TIME_NONE;
  guint64 offset = GST_BUFFER_OFFSET_NONE, offset_end = GST_BUFFER_OFFSET_NONE;
  guint8 *data;
  guint size;

  gboolean change_duration = TRUE, change_offset = TRUE, change_offset_end =
      TRUE;

  g_return_val_if_fail (segment->format == GST_FORMAT_TIME ||
      segment->format == GST_FORMAT_DEFAULT, buffer);
  g_return_val_if_fail (GST_IS_BUFFER (buffer), NULL);

  if (!GST_BUFFER_TIMESTAMP_IS_VALID (buffer))
    /* No timestamp - assume the buffer is completely in the segment */
    return buffer;

  /* Get copies of the buffer metadata to change later. 
   * Calculate the missing values for the calculations,
   * they won't be changed later though. */

  data = GST_BUFFER_DATA (buffer);
  size = GST_BUFFER_SIZE (buffer);

  timestamp = GST_BUFFER_TIMESTAMP (buffer);
  if (GST_BUFFER_DURATION_IS_VALID (buffer)) {
    duration = GST_BUFFER_DURATION (buffer);
  } else {
    change_duration = FALSE;
    duration = gst_util_uint64_scale (size / frame_size, GST_SECOND, rate);
  }

  if (GST_BUFFER_OFFSET_IS_VALID (buffer)) {
    offset = GST_BUFFER_OFFSET (buffer);
  } else {
    change_offset = FALSE;
    offset = 0;
  }

  if (GST_BUFFER_OFFSET_END_IS_VALID (buffer)) {
    offset_end = GST_BUFFER_OFFSET_END (buffer);
  } else {
    change_offset_end = FALSE;
    offset_end = offset + size / frame_size;
  }

  if (segment->format == GST_FORMAT_TIME) {
    /* Handle clipping for GST_FORMAT_TIME */

    gint64 start, stop, cstart, cstop, diff;

    start = timestamp;
    stop = timestamp + duration;

    if (gst_segment_clip (segment, GST_FORMAT_TIME,
            start, stop, &cstart, &cstop)) {

      diff = cstart - start;
      if (diff > 0) {
        timestamp = cstart;

        if (change_duration)
          duration -= diff;

        diff = gst_util_uint64_scale (diff, rate, GST_SECOND);
        if (change_offset)
          offset += diff;
        data += diff * frame_size;
        size -= diff * frame_size;
      }

      diff = stop - cstop;
      if (diff > 0) {
        /* duration is always valid if stop is valid */
        duration -= diff;

        diff = gst_util_uint64_scale (diff, rate, GST_SECOND);
        if (change_offset_end)
          offset_end -= diff;
        size -= diff * frame_size;
      }
    } else {
      gst_buffer_unref (buffer);
      return NULL;
    }
  } else {
    /* Handle clipping for GST_FORMAT_DEFAULT */
    gint64 start, stop, cstart, cstop, diff;

    g_return_val_if_fail (GST_BUFFER_OFFSET_IS_VALID (buffer), buffer);

    start = offset;
    stop = offset_end;

    if (gst_segment_clip (segment, GST_FORMAT_DEFAULT,
            start, stop, &cstart, &cstop)) {

      diff = cstart - start;
      if (diff > 0) {
        offset = cstart;

        timestamp = gst_util_uint64_scale (cstart, GST_SECOND, rate);

        if (change_duration)
          duration -= gst_util_uint64_scale (diff, GST_SECOND, rate);

        data += diff * frame_size;
        size -= diff * frame_size;
      }

      diff = stop - cstop;
      if (diff > 0) {
        offset_end = cstop;

        if (change_duration)
          duration -= gst_util_uint64_scale (diff, GST_SECOND, rate);

        size -= diff * frame_size;
      }
    } else {
      gst_buffer_unref (buffer);
      return NULL;
    }
  }

  /* Get a metadata writable buffer and apply all changes */
  ret = gst_buffer_make_metadata_writable (buffer);

  GST_BUFFER_TIMESTAMP (ret) = timestamp;
  GST_BUFFER_SIZE (ret) = size;
  GST_BUFFER_DATA (ret) = data;

  if (change_duration)
    GST_BUFFER_DURATION (ret) = duration;
  if (change_offset)
    GST_BUFFER_OFFSET (ret) = offset;
  if (change_offset_end)
    GST_BUFFER_OFFSET_END (ret) = offset_end;

  return ret;
}
Example #15
0
static GstFlowReturn
gst_rsvg_dec_chain (GstPad * pad, GstBuffer * buffer)
{
  GstRsvgDec *rsvg = GST_RSVG_DEC (GST_PAD_PARENT (pad));
  gboolean completed = FALSE;
  const guint8 *data;
  guint size;
  gboolean ret = GST_FLOW_OK;

  /* first_timestamp is used slightly differently where a framerate
     is given or not.
     If there is a frame rate, it will be used as a base.
     If there is not, it will be used to keep track of the timestamp
     of the first buffer, to be used as the timestamp of the output
     buffer. When a buffer is output, first timestamp will resync to
     the next buffer's timestamp. */
  if (rsvg->first_timestamp == GST_CLOCK_TIME_NONE) {
    if (GST_BUFFER_TIMESTAMP_IS_VALID (buffer))
      rsvg->first_timestamp = GST_BUFFER_TIMESTAMP (buffer);
    else if (rsvg->fps_n != 0)
      rsvg->first_timestamp = 0;
  }

  gst_adapter_push (rsvg->adapter, buffer);

  size = gst_adapter_available (rsvg->adapter);

  /* "<svg></svg>" */
  while (size >= 5 + 6 && ret == GST_FLOW_OK) {
    guint i;

    data = gst_adapter_peek (rsvg->adapter, size);
    for (i = size - 6; i >= 5; i--) {
      if (memcmp (data + i, "</svg>", 6) == 0) {
        completed = TRUE;
        size = i + 6;
        break;
      }
    }

    if (completed) {
      GstBuffer *outbuf = NULL;

      GST_LOG_OBJECT (rsvg, "have complete svg of %u bytes", size);

      data = gst_adapter_peek (rsvg->adapter, size);

      ret = gst_rsvg_decode_image (rsvg, data, size, &outbuf);
      if (ret != GST_FLOW_OK)
        break;


      if (rsvg->first_timestamp != GST_CLOCK_TIME_NONE) {
        GST_BUFFER_TIMESTAMP (outbuf) = rsvg->first_timestamp;
        GST_BUFFER_DURATION (outbuf) = GST_CLOCK_TIME_NONE;
        if (GST_BUFFER_DURATION_IS_VALID (buffer)) {
          GstClockTime end =
              GST_BUFFER_TIMESTAMP_IS_VALID (buffer) ?
              GST_BUFFER_TIMESTAMP (buffer) : rsvg->first_timestamp;
          end += GST_BUFFER_DURATION (buffer);
          GST_BUFFER_DURATION (outbuf) = end - GST_BUFFER_TIMESTAMP (outbuf);
        }
        if (rsvg->fps_n == 0) {
          rsvg->first_timestamp = GST_CLOCK_TIME_NONE;
        } else {
          GST_BUFFER_DURATION (outbuf) =
              gst_util_uint64_scale (rsvg->frame_count, rsvg->fps_d,
              rsvg->fps_n * GST_SECOND);
        }
      } else if (rsvg->fps_n != 0) {
        GST_BUFFER_TIMESTAMP (outbuf) =
            rsvg->first_timestamp + gst_util_uint64_scale (rsvg->frame_count,
            rsvg->fps_d, rsvg->fps_n * GST_SECOND);
        GST_BUFFER_DURATION (outbuf) =
            gst_util_uint64_scale (rsvg->frame_count, rsvg->fps_d,
            rsvg->fps_n * GST_SECOND);
      } else {
        GST_BUFFER_TIMESTAMP (outbuf) = rsvg->first_timestamp;
        GST_BUFFER_DURATION (outbuf) = GST_CLOCK_TIME_NONE;
      }
      rsvg->frame_count++;

      if (rsvg->need_newsegment) {
        gst_pad_push_event (rsvg->srcpad,
            gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME, 0, -1, 0));
        rsvg->need_newsegment = FALSE;
      }

      if (rsvg->pending_events) {
        GList *l;

        for (l = rsvg->pending_events; l; l = l->next)
          gst_pad_push_event (rsvg->srcpad, l->data);
        g_list_free (rsvg->pending_events);
        rsvg->pending_events = NULL;
      }

      GST_LOG_OBJECT (rsvg, "image rendered okay");

      ret = gst_pad_push (rsvg->srcpad, outbuf);
      if (ret != GST_FLOW_OK)
        break;

      gst_adapter_flush (rsvg->adapter, size);
      size = gst_adapter_available (rsvg->adapter);
      continue;
    } else {
      break;
    }
  }

  return GST_FLOW_OK;
}
static GstFlowReturn
gst_rtp_mux_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer)
{
  GstRTPMux *rtp_mux;
  GstFlowReturn ret;
  GstRTPMuxPadPrivate *padpriv;
  gboolean drop;
  gboolean changed = FALSE;
  GstRTPBuffer rtpbuffer = GST_RTP_BUFFER_INIT;

  rtp_mux = GST_RTP_MUX (parent);

  if (gst_pad_check_reconfigure (rtp_mux->srcpad)) {
    GstCaps *current_caps = gst_pad_get_current_caps (pad);

    if (!gst_rtp_mux_setcaps (pad, rtp_mux, current_caps)) {
      ret = GST_FLOW_NOT_NEGOTIATED;
      gst_buffer_unref (buffer);
      goto out;
    }
    gst_caps_unref (current_caps);
  }

  GST_OBJECT_LOCK (rtp_mux);
  padpriv = gst_pad_get_element_private (pad);

  if (!padpriv) {
    GST_OBJECT_UNLOCK (rtp_mux);
    gst_buffer_unref (buffer);
    return GST_FLOW_NOT_LINKED;
  }

  buffer = gst_buffer_make_writable (buffer);

  if (!gst_rtp_buffer_map (buffer, GST_MAP_READWRITE, &rtpbuffer)) {
    GST_OBJECT_UNLOCK (rtp_mux);
    gst_buffer_unref (buffer);
    GST_ERROR_OBJECT (rtp_mux, "Invalid RTP buffer");
    return GST_FLOW_ERROR;
  }

  drop = !process_buffer_locked (rtp_mux, padpriv, &rtpbuffer);

  gst_rtp_buffer_unmap (&rtpbuffer);

  if (!drop) {
    if (pad != rtp_mux->last_pad) {
      changed = TRUE;
      g_clear_object (&rtp_mux->last_pad);
      rtp_mux->last_pad = g_object_ref (pad);
    }

    if (GST_BUFFER_DURATION_IS_VALID (buffer) &&
        GST_BUFFER_PTS_IS_VALID (buffer))
      rtp_mux->last_stop = GST_BUFFER_PTS (buffer) +
          GST_BUFFER_DURATION (buffer);
    else
      rtp_mux->last_stop = GST_CLOCK_TIME_NONE;
  }

  GST_OBJECT_UNLOCK (rtp_mux);

  if (changed)
    gst_pad_sticky_events_foreach (pad, resend_events, rtp_mux);

  if (drop) {
    gst_buffer_unref (buffer);
    ret = GST_FLOW_OK;
  } else {
    ret = gst_pad_push (rtp_mux->srcpad, buffer);
  }

out:
  return ret;
}
Example #17
0
static GstFlowReturn
gst_ffmpegmux_collected (GstCollectPads * pads, gpointer user_data)
{
  GstFFMpegMux *ffmpegmux = (GstFFMpegMux *) user_data;
  GSList *collected;
  GstFFMpegMuxPad *best_pad;
  GstClockTime best_time;
  const GstTagList *tags;

  /* open "file" (gstreamer protocol to next element) */
  if (!ffmpegmux->opened) {
    int open_flags = URL_WRONLY;

    /* we do need all streams to have started capsnego,
     * or things will go horribly wrong */
    for (collected = ffmpegmux->collect->data; collected;
        collected = g_slist_next (collected)) {
      GstFFMpegMuxPad *collect_pad = (GstFFMpegMuxPad *) collected->data;
      AVStream *st = ffmpegmux->context->streams[collect_pad->padnum];

      /* check whether the pad has successfully completed capsnego */
      if (st->codec->codec_id == CODEC_ID_NONE) {
        GST_ELEMENT_ERROR (ffmpegmux, CORE, NEGOTIATION, (NULL),
            ("no caps set on stream %d (%s)", collect_pad->padnum,
                (st->codec->codec_type == CODEC_TYPE_VIDEO) ?
                "video" : "audio"));
        return GST_FLOW_ERROR;
      }
      /* set framerate for audio */
      if (st->codec->codec_type == CODEC_TYPE_AUDIO) {
        switch (st->codec->codec_id) {
          case CODEC_ID_PCM_S16LE:
          case CODEC_ID_PCM_S16BE:
          case CODEC_ID_PCM_U16LE:
          case CODEC_ID_PCM_U16BE:
          case CODEC_ID_PCM_S8:
          case CODEC_ID_PCM_U8:
            st->codec->frame_size = 1;
            break;
          default:
          {
            GstBuffer *buffer;

            /* FIXME : This doesn't work for RAW AUDIO...
             * in fact I'm wondering if it even works for any kind of audio... */
            buffer = gst_collect_pads_peek (ffmpegmux->collect,
                (GstCollectData *) collect_pad);
            if (buffer) {
              st->codec->frame_size =
                  st->codec->sample_rate *
                  GST_BUFFER_DURATION (buffer) / GST_SECOND;
              gst_buffer_unref (buffer);
            }
          }
        }
      }
    }

    /* tags */
    tags = gst_tag_setter_get_tag_list (GST_TAG_SETTER (ffmpegmux));
    if (tags) {
      gint i;
      gchar *s;

      /* get the interesting ones */
      if (gst_tag_list_get_string (tags, GST_TAG_TITLE, &s)) {
        strncpy (ffmpegmux->context->title, s,
            sizeof (ffmpegmux->context->title));
      }
      if (gst_tag_list_get_string (tags, GST_TAG_ARTIST, &s)) {
        strncpy (ffmpegmux->context->author, s,
            sizeof (ffmpegmux->context->author));
      }
      if (gst_tag_list_get_string (tags, GST_TAG_COPYRIGHT, &s)) {
        strncpy (ffmpegmux->context->copyright, s,
            sizeof (ffmpegmux->context->copyright));
      }
      if (gst_tag_list_get_string (tags, GST_TAG_COMMENT, &s)) {
        strncpy (ffmpegmux->context->comment, s,
            sizeof (ffmpegmux->context->comment));
      }
      if (gst_tag_list_get_string (tags, GST_TAG_ALBUM, &s)) {
        strncpy (ffmpegmux->context->album, s,
            sizeof (ffmpegmux->context->album));
      }
      if (gst_tag_list_get_string (tags, GST_TAG_GENRE, &s)) {
        strncpy (ffmpegmux->context->genre, s,
            sizeof (ffmpegmux->context->genre));
      }
      if (gst_tag_list_get_int (tags, GST_TAG_TRACK_NUMBER, &i)) {
        ffmpegmux->context->track = i;
      }
    }

    /* set the streamheader flag for gstffmpegprotocol if codec supports it */
    if (!strcmp (ffmpegmux->context->oformat->name, "flv")) {
      open_flags |= GST_FFMPEG_URL_STREAMHEADER;
    }

    if (url_fopen (&ffmpegmux->context->pb,
            ffmpegmux->context->filename, open_flags) < 0) {
      GST_ELEMENT_ERROR (ffmpegmux, LIBRARY, TOO_LAZY, (NULL),
          ("Failed to open stream context in ffmux"));
      return GST_FLOW_ERROR;
    }

    if (av_set_parameters (ffmpegmux->context, NULL) < 0) {
      GST_ELEMENT_ERROR (ffmpegmux, LIBRARY, INIT, (NULL),
          ("Failed to initialize muxer"));
      return GST_FLOW_ERROR;
    }

    /* now open the mux format */
    if (av_write_header (ffmpegmux->context) < 0) {
      GST_ELEMENT_ERROR (ffmpegmux, LIBRARY, SETTINGS, (NULL),
          ("Failed to write file header - check codec settings"));
      return GST_FLOW_ERROR;
    }

    /* we're now opened */
    ffmpegmux->opened = TRUE;

    /* flush the header so it will be used as streamheader */
    put_flush_packet (ffmpegmux->context->pb);
  }

  /* take the one with earliest timestamp,
   * and push it forward */
  best_pad = NULL;
  best_time = GST_CLOCK_TIME_NONE;
  for (collected = ffmpegmux->collect->data; collected;
      collected = g_slist_next (collected)) {
    GstFFMpegMuxPad *collect_pad = (GstFFMpegMuxPad *) collected->data;
    GstBuffer *buffer = gst_collect_pads_peek (ffmpegmux->collect,
        (GstCollectData *) collect_pad);

    /* if there's no buffer, just continue */
    if (buffer == NULL) {
      continue;
    }

    /* if we have no buffer yet, just use the first one */
    if (best_pad == NULL) {
      best_pad = collect_pad;
      best_time = GST_BUFFER_TIMESTAMP (buffer);
      goto next_pad;
    }

    /* if we do have one, only use this one if it's older */
    if (GST_BUFFER_TIMESTAMP (buffer) < best_time) {
      best_time = GST_BUFFER_TIMESTAMP (buffer);
      best_pad = collect_pad;
    }

  next_pad:
    gst_buffer_unref (buffer);

    /* Mux buffers with invalid timestamp first */
    if (!GST_CLOCK_TIME_IS_VALID (best_time))
      break;
  }

  /* now handle the buffer, or signal EOS if we have
   * no buffers left */
  if (best_pad != NULL) {
    GstBuffer *buf;
    AVPacket pkt;
    gboolean need_free = FALSE;

    /* push out current buffer */
    buf = gst_collect_pads_pop (ffmpegmux->collect,
        (GstCollectData *) best_pad);

    ffmpegmux->context->streams[best_pad->padnum]->codec->frame_number++;

    /* set time */
    pkt.pts = gst_ffmpeg_time_gst_to_ff (GST_BUFFER_TIMESTAMP (buf),
        ffmpegmux->context->streams[best_pad->padnum]->time_base);
    pkt.dts = pkt.pts;

    if (strcmp (ffmpegmux->context->oformat->name, "gif") == 0) {
      AVStream *st = ffmpegmux->context->streams[best_pad->padnum];
      AVPicture src, dst;

      need_free = TRUE;
      pkt.size = st->codec->width * st->codec->height * 3;
      pkt.data = g_malloc (pkt.size);

      dst.data[0] = pkt.data;
      dst.data[1] = NULL;
      dst.data[2] = NULL;
      dst.linesize[0] = st->codec->width * 3;

      gst_ffmpeg_avpicture_fill (&src, GST_BUFFER_DATA (buf),
          PIX_FMT_RGB24, st->codec->width, st->codec->height);

      av_picture_copy (&dst, &src, PIX_FMT_RGB24,
          st->codec->width, st->codec->height);
    } else {
      pkt.data = GST_BUFFER_DATA (buf);
      pkt.size = GST_BUFFER_SIZE (buf);
    }

    pkt.stream_index = best_pad->padnum;
    pkt.flags = 0;

    if (!GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT))
      pkt.flags |= PKT_FLAG_KEY;

    if (GST_BUFFER_DURATION_IS_VALID (buf))
      pkt.duration =
          gst_ffmpeg_time_gst_to_ff (GST_BUFFER_DURATION (buf),
          ffmpegmux->context->streams[best_pad->padnum]->time_base);
    else
      pkt.duration = 0;
    av_write_frame (ffmpegmux->context, &pkt);
    gst_buffer_unref (buf);
    if (need_free)
      g_free (pkt.data);
  } else {
    /* close down */
    av_write_trailer (ffmpegmux->context);
    ffmpegmux->opened = FALSE;
    put_flush_packet (ffmpegmux->context->pb);
    url_fclose (ffmpegmux->context->pb);
    gst_pad_push_event (ffmpegmux->srcpad, gst_event_new_eos ());
    return GST_FLOW_UNEXPECTED;
  }

  return GST_FLOW_OK;
}
Example #18
0
bool nvxio::GStreamerBaseRenderImpl::flush()
{
    if (!pipeline)
        return false;

    glfwMakeContextCurrent(window_);

    if (glfwWindowShouldClose(window_))
        return false;

    gl_->PixelStorei(GL_PACK_ALIGNMENT, 1);
    gl_->PixelStorei(GL_PACK_ROW_LENGTH, wndWidth_);

    {
        GstClockTime duration = GST_SECOND / (double)GSTREAMER_DEFAULT_FPS;
        GstClockTime timestamp = num_frames * duration;

#if GST_VERSION_MAJOR == 0
        GstBuffer * buffer = gst_buffer_try_new_and_alloc(wndHeight_ * wndWidth_ * 4);
        if (!buffer)
        {
            NVXIO_PRINT("Cannot create GStreamer buffer");
            FinalizeGStreamerPipeline();
            return false;
        }

        gl_->ReadPixels(0, 0, wndWidth_, wndHeight_, GL_RGBA, GL_UNSIGNED_BYTE, GST_BUFFER_DATA (buffer));

        GST_BUFFER_TIMESTAMP(buffer) = timestamp;
        if (!GST_BUFFER_TIMESTAMP_IS_VALID(buffer))
            NVXIO_PRINT("Failed to setup timestamp");
#else
        GstBuffer * buffer = gst_buffer_new_allocate(NULL, wndHeight_ * wndWidth_ * 4, NULL);

        GstMapInfo info;
        gst_buffer_map(buffer, &info, GST_MAP_READ);
        gl_->ReadPixels(0, 0, wndWidth_, wndHeight_, GL_RGBA, GL_UNSIGNED_BYTE, info.data);
        gst_buffer_unmap(buffer, &info);

        GST_BUFFER_PTS(buffer) = timestamp;
        if (!GST_BUFFER_PTS_IS_VALID(buffer))
            NVXIO_PRINT("Failed to setup PTS");

        GST_BUFFER_DTS(buffer) = timestamp;
        if (!GST_BUFFER_DTS_IS_VALID(buffer))
            NVXIO_PRINT("Failed to setup DTS");
#endif
        GST_BUFFER_DURATION(buffer) = duration;
        if (!GST_BUFFER_DURATION_IS_VALID(buffer))
            NVXIO_PRINT("Failed to setup duration");

        GST_BUFFER_OFFSET(buffer) = num_frames++;
        if (!GST_BUFFER_OFFSET_IS_VALID(buffer))
            NVXIO_PRINT("Failed to setup offset");

        if (gst_app_src_push_buffer(appsrc, buffer) != GST_FLOW_OK)
        {
            NVXIO_PRINT("Error pushing buffer to GStreamer pipeline");
            FinalizeGStreamerPipeline();
            return false;
        }
    }

    // reset state
    gl_->PixelStorei(GL_PACK_ALIGNMENT, 4);
    gl_->PixelStorei(GL_PACK_ROW_LENGTH, 0);

    glfwSwapBuffers(window_);

    clearGlBuffer();

    return true;
}
Example #19
0
static GstFlowReturn
gst_live_live_adder_chain (GstPad * pad, GstBuffer * buffer)
{
    GstLiveAdder *adder = GST_LIVE_ADDER (gst_pad_get_parent_element (pad));
    GstLiveAdderPadPrivate *padprivate = NULL;
    GstFlowReturn ret = GST_FLOW_OK;
    GList *item = NULL;
    GstClockTime skip = 0;
    gint64 drift = 0;             /* Positive if new buffer after old buffer */

    GST_OBJECT_LOCK (adder);

    ret = adder->srcresult;

    GST_DEBUG ("Incoming buffer time:%" GST_TIME_FORMAT " duration:%"
               GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer)),
               GST_TIME_ARGS (GST_BUFFER_DURATION (buffer)));

    if (ret != GST_FLOW_OK) {
        GST_DEBUG_OBJECT (adder, "Passing non-ok result from src: %s",
                          gst_flow_get_name (ret));
        gst_buffer_unref (buffer);
        goto out;
    }

    padprivate = gst_pad_get_element_private (pad);

    if (!padprivate) {
        ret = GST_FLOW_NOT_LINKED;
        gst_buffer_unref (buffer);
        goto out;
    }

    if (padprivate->eos) {
        GST_DEBUG_OBJECT (adder, "Received buffer after EOS");
        ret = GST_FLOW_UNEXPECTED;
        gst_buffer_unref (buffer);
        goto out;
    }

    if (!GST_BUFFER_TIMESTAMP_IS_VALID (buffer))
        goto invalid_timestamp;

    if (padprivate->segment.format == GST_FORMAT_UNDEFINED) {
        GST_WARNING_OBJECT (adder, "No new-segment received,"
                            " initializing segment with time 0..-1");
        gst_segment_init (&padprivate->segment, GST_FORMAT_TIME);
        gst_segment_set_newsegment (&padprivate->segment,
                                    FALSE, 1.0, GST_FORMAT_TIME, 0, -1, 0);
    }

    if (padprivate->segment.format != GST_FORMAT_TIME)
        goto invalid_segment;

    buffer = gst_buffer_make_metadata_writable (buffer);

    drift = GST_BUFFER_TIMESTAMP (buffer) - padprivate->expected_timestamp;

    /* Just see if we receive invalid timestamp/durations */
    if (GST_CLOCK_TIME_IS_VALID (padprivate->expected_timestamp) &&
            !GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DISCONT) &&
            (drift != 0)) {
        GST_LOG_OBJECT (adder,
                        "Timestamp discontinuity without the DISCONT flag set"
                        " (expected %" GST_TIME_FORMAT ", got %" GST_TIME_FORMAT
                        " drift:%" G_GINT64_FORMAT "ms)",
                        GST_TIME_ARGS (padprivate->expected_timestamp),
                        GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer)), drift / GST_MSECOND);

        /* We accept drifts of 10ms */
        if (ABS (drift) < (10 * GST_MSECOND)) {
            GST_DEBUG ("Correcting minor drift");
            GST_BUFFER_TIMESTAMP (buffer) = padprivate->expected_timestamp;
        }
    }


    /* If there is no duration, lets set one */
    if (!GST_BUFFER_DURATION_IS_VALID (buffer)) {
        GST_BUFFER_DURATION (buffer) =
            gst_audio_duration_from_pad_buffer (pad, buffer);
        padprivate->expected_timestamp = GST_CLOCK_TIME_NONE;
    } else {
        padprivate->expected_timestamp = GST_BUFFER_TIMESTAMP (buffer) +
                                         GST_BUFFER_DURATION (buffer);
    }


    /*
     * Lets clip the buffer to the segment (so we don't have to worry about
     * cliping afterwards).
     * This should also guarantee us that we'll have valid timestamps and
     * durations afterwards
     */

    buffer = gst_audio_buffer_clip (buffer, &padprivate->segment, adder->rate,
                                    adder->bps);

    /* buffer can be NULL if it's completely outside of the segment */
    if (!buffer) {
        GST_DEBUG ("Buffer completely outside of configured segment, dropping it");
        goto out;
    }

    /*
     * Make sure all incoming buffers share the same timestamping
     */
    GST_BUFFER_TIMESTAMP (buffer) =
        gst_segment_to_running_time (&padprivate->segment,
                                     padprivate->segment.format, GST_BUFFER_TIMESTAMP (buffer));


    if (GST_CLOCK_TIME_IS_VALID (adder->next_timestamp) &&
            GST_BUFFER_TIMESTAMP (buffer) < adder->next_timestamp) {
        if (GST_BUFFER_TIMESTAMP (buffer) + GST_BUFFER_DURATION (buffer) <
                adder->next_timestamp) {
            GST_DEBUG_OBJECT (adder, "Buffer is late, dropping (ts: %" GST_TIME_FORMAT
                              " duration: %" GST_TIME_FORMAT ")",
                              GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer)),
                              GST_TIME_ARGS (GST_BUFFER_DURATION (buffer)));
            gst_buffer_unref (buffer);
            goto out;
        } else {
            skip = adder->next_timestamp - GST_BUFFER_TIMESTAMP (buffer);
            GST_DEBUG_OBJECT (adder, "Buffer is partially late, skipping %"
                              GST_TIME_FORMAT, GST_TIME_ARGS (skip));
        }
    }

    /* If our new buffer's head is higher than the queue's head, lets wake up,
     * we may not have to wait for as long
     */
    if (adder->clock_id &&
            g_queue_peek_head (adder->buffers) != NULL &&
            GST_BUFFER_TIMESTAMP (buffer) + skip <
            GST_BUFFER_TIMESTAMP (g_queue_peek_head (adder->buffers)))
        gst_clock_id_unschedule (adder->clock_id);

    for (item = g_queue_peek_head_link (adder->buffers);
            item; item = g_list_next (item)) {
        GstBuffer *oldbuffer = item->data;
        GstClockTime old_skip = 0;
        GstClockTime mix_duration = 0;
        GstClockTime mix_start = 0;
        GstClockTime mix_end = 0;

        /* We haven't reached our place yet */
        if (GST_BUFFER_TIMESTAMP (buffer) + skip >=
                GST_BUFFER_TIMESTAMP (oldbuffer) + GST_BUFFER_DURATION (oldbuffer))
            continue;

        /* We're past our place, lets insert ouselves here */
        if (GST_BUFFER_TIMESTAMP (buffer) + GST_BUFFER_DURATION (buffer) <=
                GST_BUFFER_TIMESTAMP (oldbuffer))
            break;

        /* if we reach this spot, we have overlap, so we must mix */

        /* First make a subbuffer with the non-overlapping part */
        if (GST_BUFFER_TIMESTAMP (buffer) + skip < GST_BUFFER_TIMESTAMP (oldbuffer)) {
            GstBuffer *subbuffer = NULL;
            GstClockTime subbuffer_duration = GST_BUFFER_TIMESTAMP (oldbuffer) -
                                              (GST_BUFFER_TIMESTAMP (buffer) + skip);

            subbuffer = gst_buffer_create_sub (buffer,
                                               gst_live_adder_length_from_duration (adder, skip),
                                               gst_live_adder_length_from_duration (adder, subbuffer_duration));

            GST_BUFFER_TIMESTAMP (subbuffer) = GST_BUFFER_TIMESTAMP (buffer) + skip;
            GST_BUFFER_DURATION (subbuffer) = subbuffer_duration;

            skip += subbuffer_duration;

            g_queue_insert_before (adder->buffers, item, subbuffer);
        }

        /* Now we are on the overlapping part */
        oldbuffer = gst_buffer_make_writable (oldbuffer);
        item->data = oldbuffer;

        old_skip = GST_BUFFER_TIMESTAMP (buffer) + skip -
                   GST_BUFFER_TIMESTAMP (oldbuffer);

        mix_start = GST_BUFFER_TIMESTAMP (oldbuffer) + old_skip;

        if (GST_BUFFER_TIMESTAMP (buffer) + GST_BUFFER_DURATION (buffer) <
                GST_BUFFER_TIMESTAMP (oldbuffer) + GST_BUFFER_DURATION (oldbuffer))
            mix_end = GST_BUFFER_TIMESTAMP (buffer) + GST_BUFFER_DURATION (buffer);
        else
            mix_end = GST_BUFFER_TIMESTAMP (oldbuffer) +
                      GST_BUFFER_DURATION (oldbuffer);

        mix_duration = mix_end - mix_start;

        adder->func (GST_BUFFER_DATA (oldbuffer) +
                     gst_live_adder_length_from_duration (adder, old_skip),
                     GST_BUFFER_DATA (buffer) +
                     gst_live_adder_length_from_duration (adder, skip),
                     gst_live_adder_length_from_duration (adder, mix_duration));

        skip += mix_duration;
    }

    g_cond_broadcast (adder->not_empty_cond);

    if (skip == GST_BUFFER_DURATION (buffer)) {
        gst_buffer_unref (buffer);
    } else {
        if (skip) {
            GstClockTime subbuffer_duration = GST_BUFFER_DURATION (buffer) - skip;
            GstClockTime subbuffer_ts = GST_BUFFER_TIMESTAMP (buffer) + skip;
            GstBuffer *new_buffer = gst_buffer_create_sub (buffer,
                                    gst_live_adder_length_from_duration (adder, skip),
                                    gst_live_adder_length_from_duration (adder, subbuffer_duration));
            gst_buffer_unref (buffer);
            buffer = new_buffer;
            GST_BUFFER_TIMESTAMP (buffer) = subbuffer_ts;
            GST_BUFFER_DURATION (buffer) = subbuffer_duration;
        }

        if (item)
            g_queue_insert_before (adder->buffers, item, buffer);
        else
            g_queue_push_tail (adder->buffers, buffer);
    }

out:

    GST_OBJECT_UNLOCK (adder);
    gst_object_unref (adder);

    return ret;

invalid_timestamp:

    GST_OBJECT_UNLOCK (adder);
    gst_buffer_unref (buffer);
    GST_ELEMENT_ERROR (adder, STREAM, FAILED,
                       ("Buffer without a valid timestamp received"),
                       ("Invalid timestamp received on buffer"));

    return GST_FLOW_ERROR;

invalid_segment:
    {
        const gchar *format = gst_format_get_name (padprivate->segment.format);
        GST_OBJECT_UNLOCK (adder);
        gst_buffer_unref (buffer);
        GST_ELEMENT_ERROR (adder, STREAM, FAILED,
                           ("This element only supports TIME segments, received other type"),
                           ("Received a segment of type %s, only support time segment", format));

        return GST_FLOW_ERROR;
    }

}
Example #20
0
static gboolean
gst_video_rate_sink_event (GstBaseTransform * trans, GstEvent * event)
{
  GstVideoRate *videorate;

  videorate = GST_VIDEO_RATE (trans);

  switch (GST_EVENT_TYPE (event)) {
    case GST_EVENT_SEGMENT:
    {
      const GstSegment *segment;

      gst_event_parse_segment (event, &segment);

      if (segment->format != GST_FORMAT_TIME)
        goto format_error;

      GST_DEBUG_OBJECT (videorate, "handle NEWSEGMENT");

      /* close up the previous segment, if appropriate */
      if (videorate->prevbuf) {
        gint count = 0;
        GstFlowReturn res;

        res = GST_FLOW_OK;
        /* fill up to the end of current segment,
         * or only send out the stored buffer if there is no specific stop.
         * regardless, prevent going loopy in strange cases */
        while (res == GST_FLOW_OK && count <= MAGIC_LIMIT &&
            ((GST_CLOCK_TIME_IS_VALID (videorate->segment.stop) &&
                    videorate->next_ts - videorate->segment.base
                    < videorate->segment.stop)
                || count < 1)) {
          res = gst_video_rate_flush_prev (videorate, count > 0);
          count++;
        }
        if (count > 1) {
          videorate->dup += count - 1;
          if (!videorate->silent)
            gst_video_rate_notify_duplicate (videorate);
        } else if (count == 0) {
          videorate->drop++;
          if (!videorate->silent)
            gst_video_rate_notify_drop (videorate);
        }
        /* clean up for the new one; _chain will resume from the new start */
        gst_video_rate_swap_prev (videorate, NULL, 0);
      }

      videorate->base_ts = 0;
      videorate->out_frame_count = 0;
      videorate->next_ts = GST_CLOCK_TIME_NONE;

      /* We just want to update the accumulated stream_time  */
      gst_segment_copy_into (segment, &videorate->segment);

      GST_DEBUG_OBJECT (videorate, "updated segment: %" GST_SEGMENT_FORMAT,
          &videorate->segment);
      break;
    }
    case GST_EVENT_EOS:{
      gint count = 0;
      GstFlowReturn res = GST_FLOW_OK;

      GST_DEBUG_OBJECT (videorate, "Got EOS");

      /* If the segment has a stop position, fill the segment */
      if (GST_CLOCK_TIME_IS_VALID (videorate->segment.stop)) {
        /* fill up to the end of current segment,
         * or only send out the stored buffer if there is no specific stop.
         * regardless, prevent going loopy in strange cases */
        while (res == GST_FLOW_OK && count <= MAGIC_LIMIT &&
            ((videorate->next_ts - videorate->segment.base <
                    videorate->segment.stop)
                || count < 1)) {
          res = gst_video_rate_flush_prev (videorate, count > 0);
          count++;
        }
      } else if (videorate->prevbuf) {
        /* Output at least one frame but if the buffer duration is valid, output
         * enough frames to use the complete buffer duration */
        if (GST_BUFFER_DURATION_IS_VALID (videorate->prevbuf)) {
          GstClockTime end_ts =
              videorate->next_ts + GST_BUFFER_DURATION (videorate->prevbuf);

          while (res == GST_FLOW_OK && count <= MAGIC_LIMIT &&
              ((videorate->next_ts - videorate->segment.base < end_ts)
                  || count < 1)) {
            res = gst_video_rate_flush_prev (videorate, count > 0);
            count++;
          }
        } else {
          res = gst_video_rate_flush_prev (videorate, FALSE);
          count = 1;
        }
      }

      if (count > 1) {
        videorate->dup += count - 1;
        if (!videorate->silent)
          gst_video_rate_notify_duplicate (videorate);
      } else if (count == 0) {
        videorate->drop++;
        if (!videorate->silent)
          gst_video_rate_notify_drop (videorate);
      }

      break;
    }
    case GST_EVENT_FLUSH_STOP:
      /* also resets the segment */
      GST_DEBUG_OBJECT (videorate, "Got FLUSH_STOP");
      gst_video_rate_reset (videorate);
      break;
    case GST_EVENT_GAP:
      /* no gaps after videorate, ignore the event */
      gst_event_unref (event);
      return TRUE;
    default:
      break;
  }

  return GST_BASE_TRANSFORM_CLASS (parent_class)->sink_event (trans, event);

  /* ERRORS */
format_error:
  {
    GST_WARNING_OBJECT (videorate,
        "Got segment but doesn't have GST_FORMAT_TIME value");
    return FALSE;
  }
}
static GstFlowReturn
gst_multi_file_sink_write_buffer (GstMultiFileSink * multifilesink,
    GstBuffer * buffer)
{
  GstMapInfo map;
  gboolean ret;
  gboolean first_file = TRUE;

  gst_buffer_map (buffer, &map, GST_MAP_READ);

  switch (multifilesink->next_file) {
    case GST_MULTI_FILE_SINK_NEXT_BUFFER:
      if (multifilesink->files != NULL)
        first_file = FALSE;
      if (!gst_multi_file_sink_open_next_file (multifilesink))
        goto stdio_write_error;
      if (first_file == FALSE)
        gst_multi_file_sink_write_stream_headers (multifilesink);
      GST_DEBUG_OBJECT (multifilesink,
          "Writing buffer data (%" G_GSIZE_FORMAT " bytes) to new file",
          map.size);
      ret = fwrite (map.data, map.size, 1, multifilesink->file);
      if (ret != 1)
        goto stdio_write_error;

      gst_multi_file_sink_close_file (multifilesink, buffer);
      break;
    case GST_MULTI_FILE_SINK_NEXT_DISCONT:
      if (GST_BUFFER_IS_DISCONT (buffer)) {
        if (multifilesink->file)
          gst_multi_file_sink_close_file (multifilesink, buffer);
      }

      if (multifilesink->file == NULL) {
        if (!gst_multi_file_sink_open_next_file (multifilesink))
          goto stdio_write_error;
      }

      ret = fwrite (map.data, map.size, 1, multifilesink->file);
      if (ret != 1)
        goto stdio_write_error;

      break;
    case GST_MULTI_FILE_SINK_NEXT_KEY_FRAME:
      if (multifilesink->next_segment == GST_CLOCK_TIME_NONE) {
        if (GST_BUFFER_TIMESTAMP_IS_VALID (buffer)) {
          multifilesink->next_segment = GST_BUFFER_TIMESTAMP (buffer) +
              10 * GST_SECOND;
        }
      }

      if (GST_BUFFER_TIMESTAMP_IS_VALID (buffer) &&
          GST_BUFFER_TIMESTAMP (buffer) >= multifilesink->next_segment &&
          !GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT)) {
        if (multifilesink->file) {
          first_file = FALSE;
          gst_multi_file_sink_close_file (multifilesink, buffer);
        }
        multifilesink->next_segment += 10 * GST_SECOND;
      }

      if (multifilesink->file == NULL) {
        if (!gst_multi_file_sink_open_next_file (multifilesink))
          goto stdio_write_error;

        if (!first_file)
          gst_multi_file_sink_write_stream_headers (multifilesink);
      }

      ret = fwrite (map.data, map.size, 1, multifilesink->file);
      if (ret != 1)
        goto stdio_write_error;

      break;
    case GST_MULTI_FILE_SINK_NEXT_KEY_UNIT_EVENT:
      if (multifilesink->file == NULL) {
        if (!gst_multi_file_sink_open_next_file (multifilesink))
          goto stdio_write_error;

        /* we don't need to write stream headers here, they will be inserted in
         * the stream by upstream elements if key unit events have
         * all_headers=true set
         */
      }

      ret = fwrite (map.data, map.size, 1, multifilesink->file);

      if (ret != 1)
        goto stdio_write_error;

      break;
    case GST_MULTI_FILE_SINK_NEXT_MAX_SIZE:{
      guint64 new_size;

      new_size = multifilesink->cur_file_size + map.size;
      if (new_size > multifilesink->max_file_size) {

        GST_INFO_OBJECT (multifilesink, "current size: %" G_GUINT64_FORMAT
            ", new_size: %" G_GUINT64_FORMAT ", max. size %" G_GUINT64_FORMAT,
            multifilesink->cur_file_size, new_size,
            multifilesink->max_file_size);

        if (multifilesink->file != NULL) {
          first_file = FALSE;
          gst_multi_file_sink_close_file (multifilesink, buffer);
        }
      }

      if (multifilesink->file == NULL) {
        if (!gst_multi_file_sink_open_next_file (multifilesink))
          goto stdio_write_error;

        if (!first_file)
          gst_multi_file_sink_write_stream_headers (multifilesink);
      }

      ret = fwrite (map.data, map.size, 1, multifilesink->file);

      if (ret != 1)
        goto stdio_write_error;

      multifilesink->cur_file_size += map.size;
      break;
    }
    case GST_MULTI_FILE_SINK_NEXT_MAX_DURATION:{
      GstClockTime new_duration = 0;

      if (GST_BUFFER_PTS_IS_VALID (buffer)
          && GST_CLOCK_TIME_IS_VALID (multifilesink->file_pts)) {
        /* The new duration will extend to this new buffer pts ... */
        new_duration = GST_BUFFER_PTS (buffer) - multifilesink->file_pts;
        /* ... and duration (if it has one) */
        if (GST_BUFFER_DURATION_IS_VALID (buffer))
          new_duration += GST_BUFFER_DURATION (buffer);
      }

      if (new_duration > multifilesink->max_file_duration) {

        GST_INFO_OBJECT (multifilesink,
            "new_duration: %" G_GUINT64_FORMAT ", max. duration %"
            G_GUINT64_FORMAT, new_duration, multifilesink->max_file_duration);

        if (multifilesink->file != NULL) {
          first_file = FALSE;
          gst_multi_file_sink_close_file (multifilesink, buffer);
        }
      }

      if (multifilesink->file == NULL) {
        if (!gst_multi_file_sink_open_next_file (multifilesink))
          goto stdio_write_error;

        multifilesink->file_pts = GST_BUFFER_PTS (buffer);
        if (!first_file)
          gst_multi_file_sink_write_stream_headers (multifilesink);
      }

      ret = fwrite (map.data, map.size, 1, multifilesink->file);

      if (ret != 1)
        goto stdio_write_error;

      break;
    }
    default:
      g_assert_not_reached ();
  }

  gst_buffer_unmap (buffer, &map);
  return GST_FLOW_OK;

  /* ERRORS */
stdio_write_error:
  switch (errno) {
    case ENOSPC:
      GST_ELEMENT_ERROR (multifilesink, RESOURCE, NO_SPACE_LEFT,
          ("Error while writing to file."), ("%s", g_strerror (errno)));
      break;
    default:
      GST_ELEMENT_ERROR (multifilesink, RESOURCE, WRITE,
          ("Error while writing to file."), ("%s", g_strerror (errno)));
  }
  gst_buffer_unmap (buffer, &map);
  return GST_FLOW_ERROR;
}
static GstFlowReturn gst_mpeg4p2unpack_chain(GstPad *pad, GstObject *parent, GstBuffer *buffer)
{
	guint8 *data;
	gsize data_len;
	GstMapInfo buffermap;
	GstFlowReturn ret = GST_FLOW_OK;
	GstMpeg4P2Unpack *self = GST_MPEG4P2UNPACK(GST_PAD_PARENT(pad));

	if (self->buffer_duration == GST_CLOCK_TIME_NONE)
	{
		if (!GST_BUFFER_DURATION_IS_VALID(buffer))
		{
			GST_WARNING_OBJECT(self, "Cannot retrieve buffer duration, dropping");
			gst_buffer_unref(buffer);
			return GST_FLOW_OK;
		}
		self->buffer_duration = GST_BUFFER_DURATION(buffer);
	}

	gst_buffer_map(buffer, &buffermap, GST_MAP_READ);
	data = buffermap.data;
	data_len = buffermap.size;

	int pos_p = -1, nb_vop = 0, pos_vop2 = -1;
	mpeg4p2_scan_buffer(data, data_len, &pos_p, &nb_vop, &pos_vop2);
	// GST_LOG_OBJECT(self, "pos_p=%d, num_vop=%d, pos_vop2=%d", pos_p, nb_vop, pos_vop2);

	/* if we don't have userdata we can unmap buffer */
	if (pos_p < 0)
	{
		gst_buffer_unmap(buffer, &buffermap);
		data = NULL;
	}

	if (pos_vop2 >= 0)
	{
		if (self->b_frame)
		{
			GST_WARNING_OBJECT(self, "Missing one N-VOP packet, discarding one B-frame");
			gst_buffer_unref(self->b_frame);
			self->b_frame = NULL;
		}
		// GST_LOG_OBJECT(self, "Storing B-Frame of packed PB-Frame");
		self->b_frame = gst_buffer_copy_region(buffer, GST_BUFFER_COPY_ALL, pos_vop2, data_len - pos_vop2);
		GST_BUFFER_DTS(self->b_frame) = GST_BUFFER_DTS(buffer) + self->buffer_duration;
	}

	if (nb_vop > 2)
	{
		GST_WARNING_OBJECT(self, "Found %d VOP headers in one packet, only unpacking one.", nb_vop);
	}

	if (nb_vop == 1 && self->b_frame)
	{
		// GST_LOG_OBJECT(self, "Push previous B-Frame");
		ret = gst_mpeg4p2unpack_handle_frame(self, self->b_frame);
		if (data_len <= MPEG4P2_MAX_NVOP_SIZE)
		{
			// GST_LOG_OBJECT(self, "Skipping N-VOP");
			self->b_frame = NULL;
			gst_buffer_unref(buffer);
		}
		else
		{
			// GST_LOG_OBJECT(self, "Store B-Frame");
			GST_BUFFER_DTS(buffer) = GST_BUFFER_DTS(self->b_frame) + self->buffer_duration;
			self->b_frame = buffer;
		}
	}
	else if (nb_vop >= 2)
	{
		// GST_LOG_OBJECT(self, "Push P-frame of packed PB-Frame");
		GstBuffer *p_frame = gst_buffer_copy_region(buffer, GST_BUFFER_COPY_ALL, 0, pos_vop2);
		ret = gst_mpeg4p2unpack_handle_frame(self, p_frame);
		gst_buffer_unref(buffer);
	}
	else if (pos_p >= 0)
	{
		// GST_LOG_OBJECT(self, "Updating DivX userdata (replacing trailing 'p')");
		gst_buffer_unmap(buffer, &buffermap);
		gst_buffer_map(buffer, &buffermap, GST_MAP_WRITE);
		data = buffermap.data;
		data[pos_p] = 'n';
		gst_buffer_unmap(buffer, &buffermap);
		data = NULL;
		ret = gst_mpeg4p2unpack_handle_frame(self, buffer);
	}
	else
	{
		ret = gst_mpeg4p2unpack_handle_frame(self, buffer);
	}
	return ret;
}
static gboolean
probe_cb (InsanityGstTest * ptest, GstPad * pad, GstMiniObject * object,
    gpointer userdata)
{
  InsanityTest *test = INSANITY_TEST (ptest);

  global_last_probe = g_get_monotonic_time ();

  if (GST_IS_BUFFER (object)) {
    GstClockTime buf_start, buf_end;
    GstBuffer *next_sub, *buf = GST_BUFFER (object);

    buf_start =
        gst_segment_to_stream_time (&glob_suboverlay_src_probe->last_segment,
        glob_suboverlay_src_probe->last_segment.format, GST_BUFFER_PTS (buf));
    buf_end = buf_start + GST_BUFFER_DURATION (buf);

    if (glob_in_progress == TEST_SUBTTILE_DESCRIPTOR_GENERATION) {
      if (glob_pipeline_restarted == TRUE) {
        gboolean has_subs;

        if (glob_duration > 0 && buf_end > glob_duration) {
          /* Done according to the duration previously found by the
           * discoverer */
          next_test (test);
        }

        has_subs = frame_contains_subtitles (buf);
        if (GST_CLOCK_TIME_IS_VALID (glob_last_subtitled_frame)) {
          if (has_subs == FALSE) {
            GstBuffer *nbuf = gst_buffer_new ();

            GST_BUFFER_PTS (nbuf) = glob_last_subtitled_frame;
            GST_BUFFER_DURATION (nbuf) = buf_end - glob_last_subtitled_frame;
            media_descriptor_writer_add_frame (glob_writer, pad, nbuf);

            glob_last_subtitled_frame = GST_CLOCK_TIME_NONE;
            gst_buffer_unref (nbuf);
          }
        } else if (has_subs) {
          glob_last_subtitled_frame = buf_start;
        }
      }

      goto done;
    }

    /* We played enough... next test */
    if (GST_CLOCK_TIME_IS_VALID (glob_first_subtitle_ts) &&
        buf_start >=
        glob_first_subtitle_ts + glob_playback_duration * GST_SECOND) {
      next_test (test);
    }

    switch (glob_in_progress) {
      case TEST_NONE:
      {

        if (glob_suboverlay_src_probe->waiting_first_segment == TRUE) {
          insanity_test_validate_checklist_item (test, "first-segment", FALSE,
              "Got a buffer before the first segment");
        }
        next_test (test);
      }
      default:
        break;
    }

    if (glob_subtitled_frames != NULL) {
      GstClockTime sub_start, sub_end;

      next_sub = GST_BUFFER (glob_subtitled_frames->data);

      sub_start = GST_BUFFER_PTS (next_sub);
      sub_end = GST_BUFFER_DURATION_IS_VALID (next_sub) ?
          GST_BUFFER_DURATION (next_sub) + sub_start : -1;

      if (buf_start >= sub_start && buf_end < sub_end) {
        if (frame_contains_subtitles (buf) == TRUE) {
          glob_sub_render_found = TRUE;
          insanity_test_validate_checklist_item (test, "subtitle-rendered",
              TRUE, NULL);
        } else {
          gchar *msg = g_strdup_printf ("Subtitle start %" GST_TIME_FORMAT
              " end %" GST_TIME_FORMAT " received buffer with no sub start %"
              GST_TIME_FORMAT " end %" GST_TIME_FORMAT,
              GST_TIME_ARGS (sub_start),
              GST_TIME_ARGS (sub_end), GST_TIME_ARGS (buf_start),
              GST_TIME_ARGS (buf_end));

          insanity_test_validate_checklist_item (test, "subtitle-rendered",
              FALSE, msg);
          glob_wrong_rendered_buf = TRUE;

          g_free (msg);
        }
      } else if (buf_end > sub_end) {
        /* We got a buffer that is after the subtitle we were waiting for
         * remove that buffer as not waiting for it anymore */
        gst_buffer_unref (next_sub);

        glob_subtitled_frames = g_list_remove (glob_subtitled_frames, next_sub);
      }
    }

  } else if (GST_IS_EVENT (object)) {
    GstEvent *event = GST_EVENT (object);

    switch (GST_EVENT_TYPE (event)) {
      case GST_EVENT_SEGMENT:
      {
        gst_event_copy_segment (event,
            &glob_suboverlay_src_probe->last_segment);

        if (glob_suboverlay_src_probe->waiting_first_segment == TRUE) {
          insanity_test_validate_checklist_item (test, "first-segment", TRUE,
              NULL);
          glob_suboverlay_src_probe->waiting_first_segment = FALSE;
        }

        if (glob_suboverlay_src_probe->waiting_segment == FALSE)
          /* Cache the segment as it will be our reference but don't look
           * further */
          goto done;

        if (glob_suboverlay_src_probe->waiting_first_segment == TRUE) {
          /* Make sure that a new segment has been received for each stream */
          glob_suboverlay_src_probe->waiting_first_segment = FALSE;
          glob_suboverlay_src_probe->waiting_segment = FALSE;
        }

        glob_suboverlay_src_probe->waiting_segment = FALSE;
        break;
      }
      default:
        break;
    }
  }

done:
  return TRUE;
}
Example #24
0
static GstFlowReturn
test_injector_chain (GstPad * pad, GstBuffer * buf)
{
  GstFlowReturn ret;
  GstPad *srcpad;

  srcpad = gst_element_get_pad (GST_ELEMENT (GST_PAD_PARENT (pad)), "src");

  /* since we're increasing timestamp/offsets, push this one first */
  GST_LOG (" passing buffer   [t=%" GST_TIME_FORMAT "-%" GST_TIME_FORMAT
      "], offset=%" G_GINT64_FORMAT ", offset_end=%" G_GINT64_FORMAT,
      GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
      GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf) + GST_BUFFER_DURATION (buf)),
      GST_BUFFER_OFFSET (buf), GST_BUFFER_OFFSET_END (buf));

  gst_buffer_ref (buf);

  ret = gst_pad_push (srcpad, buf);

  if (g_random_double () < injector_inject_probability) {
    GstBuffer *ibuf;

    ibuf = gst_buffer_copy (buf);

    if (GST_BUFFER_OFFSET_IS_VALID (buf) &&
        GST_BUFFER_OFFSET_END_IS_VALID (buf)) {
      guint64 delta;

      delta = GST_BUFFER_OFFSET_END (buf) - GST_BUFFER_OFFSET (buf);
      GST_BUFFER_OFFSET (ibuf) += delta / 4;
      GST_BUFFER_OFFSET_END (ibuf) += delta / 4;
    } else {
      GST_BUFFER_OFFSET (ibuf) = GST_BUFFER_OFFSET_NONE;
      GST_BUFFER_OFFSET_END (ibuf) = GST_BUFFER_OFFSET_NONE;
    }

    if (GST_BUFFER_TIMESTAMP_IS_VALID (buf) &&
        GST_BUFFER_DURATION_IS_VALID (buf)) {
      GstClockTime delta;

      delta = GST_BUFFER_DURATION (buf);
      GST_BUFFER_TIMESTAMP (ibuf) += delta / 4;
    } else {
      GST_BUFFER_TIMESTAMP (ibuf) = GST_CLOCK_TIME_NONE;
      GST_BUFFER_DURATION (ibuf) = GST_CLOCK_TIME_NONE;
    }

    if (GST_BUFFER_TIMESTAMP_IS_VALID (ibuf) ||
        GST_BUFFER_OFFSET_IS_VALID (ibuf)) {
      GST_LOG ("injecting buffer [t=%" GST_TIME_FORMAT "-%" GST_TIME_FORMAT
          "], offset=%" G_GINT64_FORMAT ", offset_end=%" G_GINT64_FORMAT,
          GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (ibuf)),
          GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (ibuf) +
              GST_BUFFER_DURATION (ibuf)), GST_BUFFER_OFFSET (ibuf),
          GST_BUFFER_OFFSET_END (ibuf));

      if (gst_pad_push (srcpad, ibuf) != GST_FLOW_OK) {
        /* ignore return value */
      }
    } else {
      GST_WARNING ("couldn't inject buffer, no incoming timestamps or offsets");
      gst_buffer_unref (ibuf);
    }
  }

  gst_buffer_unref (buf);

  return ret;
}
/* Pipeline Callbacks */
static gboolean
probe_cb (InsanityGstTest * ptest, GstPad * pad, GstMiniObject * object,
    gpointer userdata)
{
  InsanityTest *test = INSANITY_TEST (ptest);

  global_last_probe = g_get_monotonic_time ();

  DECODER_TEST_LOCK ();
  if (GST_IS_BUFFER (object)) {
    GstBuffer *buf;
    GstClockTime ts;

    buf = GST_BUFFER (object);
    ts = GST_BUFFER_PTS (buf);

    /* First check clipping */
    if (glob_testing_parser == FALSE && GST_CLOCK_TIME_IS_VALID (ts) &&
        glob_waiting_segment == FALSE) {
      GstClockTime ts_end, cstart, cstop;

      /* Check if buffer is completely outside the segment */
      ts_end = ts;
      if (GST_BUFFER_DURATION_IS_VALID (buf))
        ts_end += GST_BUFFER_DURATION (buf);

      /* Check if buffer is completely outside the segment */
      ts_end = ts;
      if (!gst_segment_clip (&glob_last_segment,
              glob_last_segment.format, ts, ts_end, &cstart, &cstop)) {
        char *msg = g_strdup_printf ("Got timestamp %" GST_TIME_FORMAT " -- %"
            GST_TIME_FORMAT ", outside configured segment (%" GST_TIME_FORMAT
            " -- %" GST_TIME_FORMAT "), method %s",
            GST_TIME_ARGS (ts), GST_TIME_ARGS (ts_end),
            GST_TIME_ARGS (glob_last_segment.start),
            GST_TIME_ARGS (glob_last_segment.stop),
            test_get_name (glob_in_progress));
        insanity_test_validate_checklist_item (INSANITY_TEST (ptest),
            "segment-clipping", FALSE, msg);
        g_free (msg);
        glob_bad_segment_clipping = TRUE;
      }
    }

    switch (glob_in_progress) {
      case TEST_NONE:
        if (glob_waiting_first_segment == TRUE)
          insanity_test_validate_checklist_item (test, "first-segment",
              FALSE, "Got a buffer before the first segment");

        /* Got the first buffer, starting testing dance */
        next_test (test);
        break;
      case TEST_POSITION:
        test_position (test, buf);
        break;
      case TEST_FAST_FORWARD:
      case TEST_BACKWARD_PLAYBACK:
      case TEST_FAST_BACKWARD:
      {
        gint64 stime_ts;

        if (GST_CLOCK_TIME_IS_VALID (ts) == FALSE ||
            glob_waiting_segment == TRUE) {
          break;
        }

        stime_ts = gst_segment_to_stream_time (&glob_last_segment,
            glob_last_segment.format, ts);

        if (GST_CLOCK_TIME_IS_VALID (glob_seek_first_buf_ts) == FALSE) {
          GstClockTime expected_ts =
              gst_segment_to_stream_time (&glob_last_segment,
              glob_last_segment.format,
              glob_seek_rate <
              0 ? glob_seek_stop_ts : glob_seek_segment_seektime);

          GstClockTimeDiff diff = ABS (GST_CLOCK_DIFF (stime_ts, expected_ts));

          if (diff > SEEK_THRESHOLD) {
            gchar *valmsg =
                g_strdup_printf ("Received buffer timestamp %" GST_TIME_FORMAT
                " Seeek wanted %" GST_TIME_FORMAT "",
                GST_TIME_ARGS (stime_ts),
                GST_TIME_ARGS (expected_ts));

            validate_current_test (test, FALSE, valmsg);
            next_test (test);

            g_free (valmsg);
          } else
            glob_seek_first_buf_ts = stime_ts;

        } else {
          GstClockTimeDiff diff =
              GST_CLOCK_DIFF (stime_ts, glob_seek_first_buf_ts);

          if (diff < 0)
            diff = -diff;

          if (diff >= glob_playback_duration * GST_SECOND) {
            validate_current_test (test, TRUE, NULL);
            next_test (test);
          }
        }
        break;
      }
      default:
        break;
    }

  } else if (GST_IS_EVENT (object)) {
    GstEvent *event = GST_EVENT (object);
    guint seqnum = gst_event_get_seqnum (event);

    if (G_LIKELY (glob_seqnum_found == FALSE) && seqnum == glob_seqnum)
      glob_seqnum_found = TRUE;

    if (glob_seqnum_found == TRUE && seqnum != glob_seqnum) {
      gchar *message = g_strdup_printf ("Current seqnum %i != "
          "received %i", glob_seqnum, seqnum);

      insanity_test_validate_checklist_item (test, "seqnum-management",
          FALSE, message);

      glob_wrong_seqnum = TRUE;
      g_free (message);
    }

    switch (GST_EVENT_TYPE (event)) {
      case GST_EVENT_SEGMENT:
      {
        gst_event_copy_segment (event, &glob_last_segment);

        if (glob_waiting_segment == FALSE)
          /* Cache the segment as it will be our reference but don't look
           * further */
          goto done;

        glob_last_segment_start_time = glob_last_segment.start;
        if (glob_waiting_first_segment == TRUE) {
          insanity_test_validate_checklist_item (test, "first-segment", TRUE,
              NULL);

          glob_waiting_first_segment = FALSE;
        } else if (glob_in_progress >= TEST_FAST_FORWARD &&
            glob_in_progress <= TEST_FAST_BACKWARD) {
          GstClockTimeDiff diff;
          gboolean valid_stop = TRUE;
          GstClockTimeDiff wdiff, rdiff;

          rdiff =
              ABS (GST_CLOCK_DIFF (glob_last_segment.stop,
                  glob_last_segment.start)) * ABS (glob_last_segment.rate *
              glob_last_segment.applied_rate);
          wdiff =
              ABS (GST_CLOCK_DIFF (glob_seek_stop_ts,
                  glob_seek_segment_seektime));

          diff =
              GST_CLOCK_DIFF (glob_last_segment.position,
              glob_seek_segment_seektime);
          if (diff < 0)
            diff = -diff;

          /* Now compare with the expected segment */
          if ((glob_last_segment.rate * glob_last_segment.applied_rate) ==
              glob_seek_rate && diff <= SEEK_THRESHOLD && valid_stop) {
            glob_seek_got_segment = TRUE;
          } else {
            GstClockTime stopdiff = ABS (GST_CLOCK_DIFF (rdiff, wdiff));

            gchar *validate_msg =
                g_strdup_printf ("Wrong segment received, Rate %f expected "
                "%f, start time diff %" GST_TIME_FORMAT " stop diff %"
                GST_TIME_FORMAT,
                (glob_last_segment.rate * glob_last_segment.applied_rate),
                glob_seek_rate,
                GST_TIME_ARGS (diff), GST_TIME_ARGS (stopdiff));

            validate_current_test (test, FALSE, validate_msg);
            next_test (test);
            g_free (validate_msg);
          }
        }

        glob_waiting_segment = FALSE;
        break;
      }
      default:
        break;
    }
  }

done:
  DECODER_TEST_UNLOCK ();
  return TRUE;
}
Example #26
0
static GstFlowReturn
handle_buffer (GstSubParse * self, GstBuffer * buf)
{
  GstFlowReturn ret = GST_FLOW_OK;
  GstCaps *caps = NULL;
  gchar *line, *subtitle;

  feed_textbuf (self, buf);

  /* make sure we know the format */
  if (G_UNLIKELY (self->parser_type == GST_SUB_PARSE_FORMAT_UNKNOWN)) {
    if (!(caps = gst_sub_parse_format_autodetect (self))) {
      return GST_FLOW_UNEXPECTED;
    }
    if (!gst_pad_set_caps (self->srcpad, caps)) {
      gst_caps_unref (caps);
      return GST_FLOW_UNEXPECTED;
    }
    gst_caps_unref (caps);
  }

  while ((line = get_next_line (self)) && !self->flushing) {
    /* Set segment on our parser state machine */
    self->state.segment = &self->segment;
    /* Now parse the line, out of segment lines will just return NULL */
    GST_LOG_OBJECT (self, "Parsing line '%s'", line);
    subtitle = self->parse_line (&self->state, line);
    g_free (line);

    if (subtitle) {
      guint subtitle_len = strlen (subtitle);

      /* +1 for terminating NUL character */
      ret = gst_pad_alloc_buffer_and_set_caps (self->srcpad,
          GST_BUFFER_OFFSET_NONE, subtitle_len + 1,
          GST_PAD_CAPS (self->srcpad), &buf);

      if (ret == GST_FLOW_OK) {
        /* copy terminating NUL character as well */
        memcpy (GST_BUFFER_DATA (buf), subtitle, subtitle_len + 1);
        GST_BUFFER_SIZE (buf) = subtitle_len;
        GST_BUFFER_TIMESTAMP (buf) = self->state.start_time;
        GST_BUFFER_DURATION (buf) = self->state.duration;

        /* in some cases (e.g. tmplayer) we can only determine the duration
         * of a text chunk from the timestamp of the next text chunk; in those
         * cases, we probably want to limit the duration to something
         * reasonable, so we don't end up showing some text for e.g. 40 seconds
         * just because nothing else is being said during that time */
        if (self->state.max_duration > 0 && GST_BUFFER_DURATION_IS_VALID (buf)) {
          if (GST_BUFFER_DURATION (buf) > self->state.max_duration)
            GST_BUFFER_DURATION (buf) = self->state.max_duration;
        }

        gst_segment_set_last_stop (&self->segment, GST_FORMAT_TIME,
            self->state.start_time);

        GST_DEBUG_OBJECT (self, "Sending text '%s', %" GST_TIME_FORMAT " + %"
            GST_TIME_FORMAT, subtitle, GST_TIME_ARGS (self->state.start_time),
            GST_TIME_ARGS (self->state.duration));

        ret = gst_pad_push (self->srcpad, buf);
      }

      /* move this forward (the tmplayer parser needs this) */
      if (self->state.duration != GST_CLOCK_TIME_NONE)
        self->state.start_time += self->state.duration;

      g_free (subtitle);
      subtitle = NULL;

      if (ret != GST_FLOW_OK) {
        GST_DEBUG_OBJECT (self, "flow: %s", gst_flow_get_name (ret));
        break;
      }
    }
  }

  return ret;
}
Example #27
0
static GstFlowReturn
gst_wavpack_enc_chain (GstPad * pad, GstBuffer * buf)
{
  GstWavpackEnc *enc = GST_WAVPACK_ENC (gst_pad_get_parent (pad));
  uint32_t sample_count = GST_BUFFER_SIZE (buf) / 4;
  GstFlowReturn ret;

  /* reset the last returns to GST_FLOW_OK. This is only set to something else
   * while WavpackPackSamples() or more specific gst_wavpack_enc_push_block()
   * so not valid anymore */
  enc->srcpad_last_return = enc->wvcsrcpad_last_return = GST_FLOW_OK;

  GST_DEBUG ("got %u raw samples", sample_count);

  /* check if we already have a valid WavpackContext, otherwise make one */
  if (!enc->wp_context) {
    /* create raw context */
    enc->wp_context =
        WavpackOpenFileOutput (gst_wavpack_enc_push_block, &enc->wv_id,
        (enc->correction_mode > 0) ? &enc->wvc_id : NULL);
    if (!enc->wp_context) {
      GST_ELEMENT_ERROR (enc, LIBRARY, INIT, (NULL),
          ("error creating Wavpack context"));
      gst_object_unref (enc);
      gst_buffer_unref (buf);
      return GST_FLOW_ERROR;
    }

    /* set the WavpackConfig according to our parameters */
    gst_wavpack_enc_set_wp_config (enc);

    /* set the configuration to the context now that we know everything
     * and initialize the encoder */
    if (!WavpackSetConfiguration (enc->wp_context,
            enc->wp_config, (uint32_t) (-1))
        || !WavpackPackInit (enc->wp_context)) {
      GST_ELEMENT_ERROR (enc, LIBRARY, SETTINGS, (NULL),
          ("error setting up wavpack encoding context"));
      WavpackCloseFile (enc->wp_context);
      gst_object_unref (enc);
      gst_buffer_unref (buf);
      return GST_FLOW_ERROR;
    }
    GST_DEBUG ("setup of encoding context successfull");
  }

  /* Save the timestamp of the first buffer. This will be later
   * used as offset for all following buffers */
  if (enc->timestamp_offset == GST_CLOCK_TIME_NONE) {
    if (GST_BUFFER_TIMESTAMP_IS_VALID (buf)) {
      enc->timestamp_offset = GST_BUFFER_TIMESTAMP (buf);
      enc->next_ts = GST_BUFFER_TIMESTAMP (buf);
    } else {
      enc->timestamp_offset = 0;
      enc->next_ts = 0;
    }
  }

  /* Check if we have a continous stream, if not drop some samples or the buffer or
   * insert some silence samples */
  if (enc->next_ts != GST_CLOCK_TIME_NONE &&
      GST_BUFFER_TIMESTAMP (buf) < enc->next_ts) {
    guint64 diff = enc->next_ts - GST_BUFFER_TIMESTAMP (buf);
    guint64 diff_bytes;

    GST_WARNING_OBJECT (enc, "Buffer is older than previous "
        "timestamp + duration (%" GST_TIME_FORMAT "< %" GST_TIME_FORMAT
        "), cannot handle. Clipping buffer.",
        GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
        GST_TIME_ARGS (enc->next_ts));

    diff_bytes =
        GST_CLOCK_TIME_TO_FRAMES (diff, enc->samplerate) * enc->channels * 2;
    if (diff_bytes >= GST_BUFFER_SIZE (buf)) {
      gst_buffer_unref (buf);
      return GST_FLOW_OK;
    }
    buf = gst_buffer_make_metadata_writable (buf);
    GST_BUFFER_DATA (buf) += diff_bytes;
    GST_BUFFER_SIZE (buf) -= diff_bytes;

    GST_BUFFER_TIMESTAMP (buf) += diff;
    if (GST_BUFFER_DURATION_IS_VALID (buf))
      GST_BUFFER_DURATION (buf) -= diff;
  }

  /* Allow a diff of at most 5 ms */
  if (enc->next_ts != GST_CLOCK_TIME_NONE
      && GST_BUFFER_TIMESTAMP_IS_VALID (buf)) {
    if (GST_BUFFER_TIMESTAMP (buf) != enc->next_ts &&
        GST_BUFFER_TIMESTAMP (buf) - enc->next_ts > 5 * GST_MSECOND) {
      GST_WARNING_OBJECT (enc,
          "Discontinuity detected: %" G_GUINT64_FORMAT " > %" G_GUINT64_FORMAT,
          GST_BUFFER_TIMESTAMP (buf) - enc->next_ts, 5 * GST_MSECOND);

      WavpackFlushSamples (enc->wp_context);
      enc->timestamp_offset += (GST_BUFFER_TIMESTAMP (buf) - enc->next_ts);
    }
  }

  if (GST_BUFFER_TIMESTAMP_IS_VALID (buf)
      && GST_BUFFER_DURATION_IS_VALID (buf))
    enc->next_ts = GST_BUFFER_TIMESTAMP (buf) + GST_BUFFER_DURATION (buf);
  else
    enc->next_ts = GST_CLOCK_TIME_NONE;

  if (enc->need_channel_remap) {
    buf = gst_buffer_make_writable (buf);
    gst_wavpack_enc_fix_channel_order (enc, (gint32 *) GST_BUFFER_DATA (buf),
        sample_count);
  }

  /* if we want to append the MD5 sum to the stream update it here
   * with the current raw samples */
  if (enc->md5) {
    g_checksum_update (enc->md5_context, GST_BUFFER_DATA (buf),
        GST_BUFFER_SIZE (buf));
  }

  /* encode and handle return values from encoding */
  if (WavpackPackSamples (enc->wp_context, (int32_t *) GST_BUFFER_DATA (buf),
          sample_count / enc->channels)) {
    GST_DEBUG ("encoding samples successful");
    ret = GST_FLOW_OK;
  } else {
    if ((enc->srcpad_last_return == GST_FLOW_RESEND) ||
        (enc->wvcsrcpad_last_return == GST_FLOW_RESEND)) {
      ret = GST_FLOW_RESEND;
    } else if ((enc->srcpad_last_return == GST_FLOW_OK) ||
        (enc->wvcsrcpad_last_return == GST_FLOW_OK)) {
      ret = GST_FLOW_OK;
    } else if ((enc->srcpad_last_return == GST_FLOW_NOT_LINKED) &&
        (enc->wvcsrcpad_last_return == GST_FLOW_NOT_LINKED)) {
      ret = GST_FLOW_NOT_LINKED;
    } else if ((enc->srcpad_last_return == GST_FLOW_WRONG_STATE) &&
        (enc->wvcsrcpad_last_return == GST_FLOW_WRONG_STATE)) {
      ret = GST_FLOW_WRONG_STATE;
    } else {
      GST_ELEMENT_ERROR (enc, LIBRARY, ENCODE, (NULL),
          ("encoding samples failed"));
      ret = GST_FLOW_ERROR;
    }
  }

  gst_buffer_unref (buf);
  gst_object_unref (enc);
  return ret;
}
static GstFlowReturn
gst_dvbsub_overlay_chain_video (GstPad * pad, GstObject * parent,
    GstBuffer * buffer)
{
  GstDVBSubOverlay *overlay = GST_DVBSUB_OVERLAY (parent);
  GstFlowReturn ret = GST_FLOW_OK;
  gint64 start, stop;
  guint64 cstart, cstop;
  gboolean in_seg;
  GstClockTime vid_running_time, vid_running_time_end;

  if (GST_VIDEO_INFO_FORMAT (&overlay->info) == GST_VIDEO_FORMAT_UNKNOWN)
    return GST_FLOW_NOT_NEGOTIATED;

  if (!GST_BUFFER_TIMESTAMP_IS_VALID (buffer))
    goto missing_timestamp;

  start = GST_BUFFER_TIMESTAMP (buffer);

  GST_LOG_OBJECT (overlay,
      "Video segment: %" GST_SEGMENT_FORMAT " --- Subtitle position: %"
      GST_TIME_FORMAT " --- BUFFER: ts=%" GST_TIME_FORMAT,
      &overlay->video_segment,
      GST_TIME_ARGS (overlay->subtitle_segment.position),
      GST_TIME_ARGS (start));

  /* ignore buffers that are outside of the current segment */
  if (!GST_BUFFER_DURATION_IS_VALID (buffer)) {
    stop = GST_CLOCK_TIME_NONE;
  } else {
    stop = start + GST_BUFFER_DURATION (buffer);
  }

  in_seg = gst_segment_clip (&overlay->video_segment, GST_FORMAT_TIME,
      start, stop, &cstart, &cstop);
  if (!in_seg) {
    GST_DEBUG_OBJECT (overlay, "Buffer outside configured segment -- dropping");
    gst_buffer_unref (buffer);
    return GST_FLOW_OK;
  }

  buffer = gst_buffer_make_writable (buffer);
  GST_BUFFER_TIMESTAMP (buffer) = cstart;
  if (GST_BUFFER_DURATION_IS_VALID (buffer))
    GST_BUFFER_DURATION (buffer) = cstop - cstart;

  vid_running_time =
      gst_segment_to_running_time (&overlay->video_segment, GST_FORMAT_TIME,
      cstart);
  if (GST_BUFFER_DURATION_IS_VALID (buffer))
    vid_running_time_end =
        gst_segment_to_running_time (&overlay->video_segment, GST_FORMAT_TIME,
        cstop);
  else
    vid_running_time_end = vid_running_time;

  GST_DEBUG_OBJECT (overlay, "Video running time: %" GST_TIME_FORMAT,
      GST_TIME_ARGS (vid_running_time));

  overlay->video_segment.position = GST_BUFFER_TIMESTAMP (buffer);

  g_mutex_lock (&overlay->dvbsub_mutex);
  if (!g_queue_is_empty (overlay->pending_subtitles)) {
    DVBSubtitles *tmp, *candidate = NULL;

    while (!g_queue_is_empty (overlay->pending_subtitles)) {
      tmp = g_queue_peek_head (overlay->pending_subtitles);

      if (tmp->pts > vid_running_time_end) {
        /* For a future video frame */
        break;
      } else if (tmp->num_rects == 0) {
        /* Clear screen */
        if (overlay->current_subtitle)
          dvb_subtitles_free (overlay->current_subtitle);
        overlay->current_subtitle = NULL;
        if (candidate)
          dvb_subtitles_free (candidate);
        candidate = NULL;
        g_queue_pop_head (overlay->pending_subtitles);
        dvb_subtitles_free (tmp);
        tmp = NULL;
      } else if (tmp->pts + tmp->page_time_out * GST_SECOND *
          ABS (overlay->subtitle_segment.rate) >= vid_running_time) {
        if (candidate)
          dvb_subtitles_free (candidate);
        candidate = tmp;
        g_queue_pop_head (overlay->pending_subtitles);
      } else {
        /* Too late */
        dvb_subtitles_free (tmp);
        tmp = NULL;
        g_queue_pop_head (overlay->pending_subtitles);
      }
    }

    if (candidate) {
      GST_DEBUG_OBJECT (overlay,
          "Time to show the next subtitle page (%" GST_TIME_FORMAT " >= %"
          GST_TIME_FORMAT ") - it has %u regions",
          GST_TIME_ARGS (vid_running_time), GST_TIME_ARGS (candidate->pts),
          candidate->num_rects);
      dvb_subtitles_free (overlay->current_subtitle);
      overlay->current_subtitle = candidate;
      if (overlay->current_comp)
        gst_video_overlay_composition_unref (overlay->current_comp);
      overlay->current_comp =
          gst_dvbsub_overlay_subs_to_comp (overlay, overlay->current_subtitle);
    }
  }

  /* Check that we haven't hit the fallback timeout for current subtitle page */
  if (overlay->current_subtitle
      && vid_running_time >
      (overlay->current_subtitle->pts +
          overlay->current_subtitle->page_time_out * GST_SECOND *
          ABS (overlay->subtitle_segment.rate))) {
    GST_INFO_OBJECT (overlay,
        "Subtitle page not redefined before fallback page_time_out of %u seconds (missed data?) - deleting current page",
        overlay->current_subtitle->page_time_out);
    dvb_subtitles_free (overlay->current_subtitle);
    overlay->current_subtitle = NULL;
  }

  /* Now render it */
  if (g_atomic_int_get (&overlay->enable) && overlay->current_subtitle) {
    GstVideoFrame frame;

    g_assert (overlay->current_comp);
    if (overlay->attach_compo_to_buffer) {
      GST_DEBUG_OBJECT (overlay, "Attaching overlay image to video buffer");
      gst_buffer_add_video_overlay_composition_meta (buffer,
          overlay->current_comp);
    } else {
      GST_DEBUG_OBJECT (overlay, "Blending overlay image to video buffer");
      gst_video_frame_map (&frame, &overlay->info, buffer, GST_MAP_READWRITE);
      gst_video_overlay_composition_blend (overlay->current_comp, &frame);
      gst_video_frame_unmap (&frame);
    }
  }
  g_mutex_unlock (&overlay->dvbsub_mutex);

  ret = gst_pad_push (overlay->srcpad, buffer);

  return ret;

missing_timestamp:
  {
    GST_WARNING_OBJECT (overlay, "video buffer without timestamp, discarding");
    gst_buffer_unref (buffer);
    return GST_FLOW_OK;
  }
}
Example #29
0
static void
do_perfect_stream_test (guint rate, guint width, gdouble drop_probability,
    gdouble inject_probability)
{
  GstElement *pipe, *src, *conv, *filter, *injector, *audiorate, *sink;
  GstMessage *msg;
  GstCaps *caps;
  GstPad *srcpad;
  GList *l, *bufs = NULL;
  GstClockTime next_time = GST_CLOCK_TIME_NONE;
  guint64 next_offset = GST_BUFFER_OFFSET_NONE;

  caps = gst_caps_new_simple ("audio/x-raw-int", "rate", G_TYPE_INT,
      rate, "width", G_TYPE_INT, width, NULL);

  GST_INFO ("-------- drop=%.0f%% caps = %" GST_PTR_FORMAT " ---------- ",
      drop_probability * 100.0, caps);

  g_assert (drop_probability >= 0.0 && drop_probability <= 1.0);
  g_assert (inject_probability >= 0.0 && inject_probability <= 1.0);
  g_assert (width > 0 && (width % 8) == 0);

  pipe = gst_pipeline_new ("pipeline");
  fail_unless (pipe != NULL);

  src = gst_element_factory_make ("audiotestsrc", "audiotestsrc");
  fail_unless (src != NULL);

  g_object_set (src, "num-buffers", 100, NULL);

  

  conv = gst_element_factory_make ("audioconvert", "audioconvert");
  fail_unless (conv != NULL);

  filter = gst_element_factory_make ("capsfilter", "capsfilter");

   fail_unless (filter != NULL);
   g_object_set (filter, "caps", caps, NULL);

  injector_inject_probability = inject_probability;
  injector = GST_ELEMENT (g_object_new (test_injector_get_type (), NULL));

  srcpad = gst_element_get_pad (injector, "src");
  fail_unless (srcpad != NULL);
   gst_pad_add_buffer_probe (srcpad, G_CALLBACK (probe_cb), &drop_probability);
  gst_object_unref (srcpad);
         audiorate = gst_element_factory_make ("audiorate", "audiorate");
         fail_unless (audiorate != NULL);
   sink = gst_element_factory_make ("fakesink", "fakesink");
  fail_unless (sink != NULL);
   g_object_set (sink, "signal-handoffs", TRUE, NULL);
   g_signal_connect (sink, "handoff", G_CALLBACK (got_buf), &bufs);

  gst_bin_add_many (GST_BIN (pipe), src, conv, filter, injector, audiorate,
      sink, NULL);
  gst_element_link_many (src, conv, filter, injector, audiorate, sink, NULL);

  fail_unless_equals_int (gst_element_set_state (pipe, GST_STATE_PLAYING),
      GST_STATE_CHANGE_ASYNC);

  fail_unless_equals_int (gst_element_get_state (pipe, NULL, NULL, -1),
      GST_STATE_CHANGE_SUCCESS);

  msg = gst_bus_poll (GST_ELEMENT_BUS (pipe),
      GST_MESSAGE_EOS | GST_MESSAGE_ERROR, -1);
  fail_unless_equals_string (GST_MESSAGE_TYPE_NAME (msg), "eos");

  for (l = bufs; l != NULL; l = l->next) {
    GstBuffer *buf = GST_BUFFER (l->data);
    guint num_samples;

    fail_unless (GST_BUFFER_TIMESTAMP_IS_VALID (buf));
    fail_unless (GST_BUFFER_DURATION_IS_VALID (buf));
    fail_unless (GST_BUFFER_OFFSET_IS_VALID (buf));
    fail_unless (GST_BUFFER_OFFSET_END_IS_VALID (buf));

    GST_LOG ("buffer: ts=%" GST_TIME_FORMAT ", end_ts=%" GST_TIME_FORMAT
        " off=%" G_GINT64_FORMAT ", end_off=%" G_GINT64_FORMAT,
        GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
        GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf) + GST_BUFFER_DURATION (buf)),
        GST_BUFFER_OFFSET (buf), GST_BUFFER_OFFSET_END (buf));

    if (GST_CLOCK_TIME_IS_VALID (next_time)) {
      fail_unless_equals_uint64 (next_time, GST_BUFFER_TIMESTAMP (buf));
    }
    if (next_offset != GST_BUFFER_OFFSET_NONE) {
      fail_unless_equals_uint64 (next_offset, GST_BUFFER_OFFSET (buf));
    }

    /* check buffer size for sanity */
    fail_unless_equals_int (GST_BUFFER_SIZE (buf) % (width / 8), 0);

    /* check there is actually as much data as there should be */
    num_samples = GST_BUFFER_OFFSET_END (buf) - GST_BUFFER_OFFSET (buf);
    fail_unless_equals_int (GST_BUFFER_SIZE (buf), num_samples * (width / 8));

    next_time = GST_BUFFER_TIMESTAMP (buf) + GST_BUFFER_DURATION (buf);
    next_offset = GST_BUFFER_OFFSET_END (buf);
  }

  gst_message_unref (msg);
  gst_element_set_state (pipe, GST_STATE_NULL);
  gst_object_unref (pipe);

  g_list_foreach (bufs, (GFunc) gst_mini_object_unref, NULL);
  g_list_free (bufs);

  gst_caps_unref (caps);
}
Example #30
0
static GstFlowReturn
gst_video_rate_transform_ip (GstBaseTransform * trans, GstBuffer * buffer)
{
  GstVideoRate *videorate;
  GstFlowReturn res = GST_BASE_TRANSFORM_FLOW_DROPPED;
  GstClockTime intime, in_ts, in_dur;
  GstClockTime avg_period;
  gboolean skip = FALSE;

  videorate = GST_VIDEO_RATE (trans);

  /* make sure the denominators are not 0 */
  if (videorate->from_rate_denominator == 0 ||
      videorate->to_rate_denominator == 0)
    goto not_negotiated;

  if (videorate->to_rate_numerator == 0 && videorate->prevbuf &&
      !videorate->force_variable_rate) {
    gst_video_rate_check_variable_rate (videorate, buffer);
  }

  GST_OBJECT_LOCK (videorate);
  avg_period = videorate->average_period_set;
  GST_OBJECT_UNLOCK (videorate);

  /* MT-safe switching between modes */
  if (G_UNLIKELY (avg_period != videorate->average_period)) {
    gboolean switch_mode = (avg_period == 0 || videorate->average_period == 0);
    videorate->average_period = avg_period;
    videorate->last_ts = GST_CLOCK_TIME_NONE;

    if (switch_mode) {
      if (avg_period) {
        /* enabling average mode */
        videorate->average = 0;
        /* make sure no cached buffers from regular mode are left */
        gst_video_rate_swap_prev (videorate, NULL, 0);
      } else {
        /* enable regular mode */
        videorate->next_ts = GST_CLOCK_TIME_NONE;
        skip = TRUE;
      }

      /* max averaging mode has a no latency, normal mode does */
      gst_element_post_message (GST_ELEMENT (videorate),
          gst_message_new_latency (GST_OBJECT (videorate)));
    }
  }

  if (videorate->average_period > 0)
    return gst_video_rate_trans_ip_max_avg (videorate, buffer);

  in_ts = GST_BUFFER_TIMESTAMP (buffer);
  in_dur = GST_BUFFER_DURATION (buffer);

  if (G_UNLIKELY (in_ts == GST_CLOCK_TIME_NONE)) {
    in_ts = videorate->last_ts;
    if (G_UNLIKELY (in_ts == GST_CLOCK_TIME_NONE))
      goto invalid_buffer;
  }

  /* get the time of the next expected buffer timestamp, we use this when the
   * next buffer has -1 as a timestamp */
  videorate->last_ts = in_ts;
  if (in_dur != GST_CLOCK_TIME_NONE)
    videorate->last_ts += in_dur;

  GST_DEBUG_OBJECT (videorate, "got buffer with timestamp %" GST_TIME_FORMAT,
      GST_TIME_ARGS (in_ts));

  /* the input time is the time in the segment + all previously accumulated
   * segments */
  intime = in_ts + videorate->segment.base;

  /* we need to have two buffers to compare */
  if (videorate->prevbuf == NULL || videorate->drop_only) {
    gst_video_rate_swap_prev (videorate, buffer, intime);
    videorate->in++;
    if (!GST_CLOCK_TIME_IS_VALID (videorate->next_ts)) {
      /* new buffer, we expect to output a buffer that matches the first
       * timestamp in the segment */
      if (videorate->skip_to_first || skip) {
        videorate->next_ts = intime;
        videorate->base_ts = in_ts - videorate->segment.start;
        videorate->out_frame_count = 0;
      } else {
        videorate->next_ts = videorate->segment.start + videorate->segment.base;
      }
    }

    /* In drop-only mode we can already decide here if we should output the
     * current frame or drop it because it's coming earlier than our minimum
     * allowed frame period. This also keeps latency down to 0 frames
     */
    if (videorate->drop_only) {
      if (intime >= videorate->next_ts) {
        GstFlowReturn r;

        /* on error the _flush function posted a warning already */
        if ((r = gst_video_rate_flush_prev (videorate, FALSE)) != GST_FLOW_OK) {
          res = r;
          goto done;
        }
      }
      /* No need to keep the buffer around for longer */
      gst_buffer_replace (&videorate->prevbuf, NULL);
    }
  } else {
    GstClockTime prevtime;
    gint count = 0;
    gint64 diff1, diff2;

    prevtime = videorate->prev_ts;

    GST_LOG_OBJECT (videorate,
        "BEGINNING prev buf %" GST_TIME_FORMAT " new buf %" GST_TIME_FORMAT
        " outgoing ts %" GST_TIME_FORMAT, GST_TIME_ARGS (prevtime),
        GST_TIME_ARGS (intime), GST_TIME_ARGS (videorate->next_ts));

    videorate->in++;

    /* drop new buffer if it's before previous one */
    if (intime < prevtime) {
      GST_DEBUG_OBJECT (videorate,
          "The new buffer (%" GST_TIME_FORMAT
          ") is before the previous buffer (%"
          GST_TIME_FORMAT "). Dropping new buffer.",
          GST_TIME_ARGS (intime), GST_TIME_ARGS (prevtime));
      videorate->drop++;
      if (!videorate->silent)
        gst_video_rate_notify_drop (videorate);
      goto done;
    }

    /* got 2 buffers, see which one is the best */
    do {

      diff1 = prevtime - videorate->next_ts;
      diff2 = intime - videorate->next_ts;

      /* take absolute values, beware: abs and ABS don't work for gint64 */
      if (diff1 < 0)
        diff1 = -diff1;
      if (diff2 < 0)
        diff2 = -diff2;

      GST_LOG_OBJECT (videorate,
          "diff with prev %" GST_TIME_FORMAT " diff with new %"
          GST_TIME_FORMAT " outgoing ts %" GST_TIME_FORMAT,
          GST_TIME_ARGS (diff1), GST_TIME_ARGS (diff2),
          GST_TIME_ARGS (videorate->next_ts));

      if (!GST_BUFFER_DURATION_IS_VALID (videorate->prevbuf) &&
          intime > prevtime) {
        /* Make sure that we have a duration for previous buffer */
        GST_BUFFER_DURATION (videorate->prevbuf) = intime - prevtime;
      }

      /* output first one when its the best */
      if (diff1 <= diff2) {
        GstFlowReturn r;
        count++;

        /* on error the _flush function posted a warning already */
        if ((r = gst_video_rate_flush_prev (videorate,
                    count > 1)) != GST_FLOW_OK) {
          res = r;
          goto done;
        }
      }

      /* Do not produce any dups. We can exit loop now */
      if (videorate->drop_only)
        break;
      /* continue while the first one was the best, if they were equal avoid
       * going into an infinite loop */
    }
    while (diff1 < diff2);

    /* if we outputed the first buffer more then once, we have dups */
    if (count > 1) {
      videorate->dup += count - 1;
      if (!videorate->silent)
        gst_video_rate_notify_duplicate (videorate);
    }
    /* if we didn't output the first buffer, we have a drop */
    else if (count == 0) {
      videorate->drop++;

      if (!videorate->silent)
        gst_video_rate_notify_drop (videorate);

      GST_LOG_OBJECT (videorate,
          "new is best, old never used, drop, outgoing ts %"
          GST_TIME_FORMAT, GST_TIME_ARGS (videorate->next_ts));
    }
    GST_LOG_OBJECT (videorate,
        "END, putting new in old, diff1 %" GST_TIME_FORMAT
        ", diff2 %" GST_TIME_FORMAT ", next_ts %" GST_TIME_FORMAT
        ", in %" G_GUINT64_FORMAT ", out %" G_GUINT64_FORMAT ", drop %"
        G_GUINT64_FORMAT ", dup %" G_GUINT64_FORMAT, GST_TIME_ARGS (diff1),
        GST_TIME_ARGS (diff2), GST_TIME_ARGS (videorate->next_ts),
        videorate->in, videorate->out, videorate->drop, videorate->dup);

    /* swap in new one when it's the best */
    gst_video_rate_swap_prev (videorate, buffer, intime);
  }
done:
  return res;

  /* ERRORS */
not_negotiated:
  {
    GST_WARNING_OBJECT (videorate, "no framerate negotiated");
    res = GST_FLOW_NOT_NEGOTIATED;
    goto done;
  }

invalid_buffer:
  {
    GST_WARNING_OBJECT (videorate,
        "Got buffer with GST_CLOCK_TIME_NONE timestamp, discarding it");
    res = GST_BASE_TRANSFORM_FLOW_DROPPED;
    goto done;
  }
}