static void
gst_glimage_sink_get_times (GstBaseSink * bsink, GstBuffer * buf,
    GstClockTime * start, GstClockTime * end)
{
  GstGLImageSink *glimagesink;

  glimagesink = GST_GLIMAGE_SINK (bsink);

  if (GST_BUFFER_TIMESTAMP_IS_VALID (buf)) {
    *start = GST_BUFFER_TIMESTAMP (buf);
    if (GST_BUFFER_DURATION_IS_VALID (buf))
      *end = *start + GST_BUFFER_DURATION (buf);
    else {
      if (GST_VIDEO_INFO_FPS_N (&glimagesink->info) > 0) {
        *end = *start +
            gst_util_uint64_scale_int (GST_SECOND,
            GST_VIDEO_INFO_FPS_D (&glimagesink->info),
            GST_VIDEO_INFO_FPS_N (&glimagesink->info));
      }
    }
  }
}
Ejemplo n.º 2
0
static void
gst_pvrvideosink_get_times (GstBaseSink * bsink, GstBuffer * buf,
    GstClockTime * start, GstClockTime * end)
{
  GstPVRVideoSink *pvrvideosink;

  pvrvideosink = GST_PVRVIDEOSINK (bsink);

  if (GST_BUFFER_TIMESTAMP_IS_VALID (buf)) {
    *start = GST_BUFFER_TIMESTAMP (buf);
    if (GST_BUFFER_DURATION_IS_VALID (buf)) {
      *end = *start + GST_BUFFER_DURATION (buf);
    } else {
      gint fps_n, fps_d;
      fps_n = GST_VIDEO_INFO_FPS_N (&pvrvideosink->info);
      fps_d = GST_VIDEO_INFO_FPS_D (&pvrvideosink->info);
      if (fps_n > 0) {
        *end = *start + gst_util_uint64_scale_int (GST_SECOND, fps_d, fps_n);
      }
    }
  }
}
Ejemplo n.º 3
0
static void
gst_kms_sink_get_times (GstBaseSink * bsink, GstBuffer * buf,
    GstClockTime * start, GstClockTime * end)
{
  GstKMSSink *self;

  self = GST_KMS_SINK (bsink);

  if (GST_BUFFER_TIMESTAMP_IS_VALID (buf)) {
    *start = GST_BUFFER_TIMESTAMP (buf);
    if (GST_BUFFER_DURATION_IS_VALID (buf))
      *end = *start + GST_BUFFER_DURATION (buf);
    else {
      if (GST_VIDEO_INFO_FPS_N (&self->vinfo) > 0) {
        *end = *start +
            gst_util_uint64_scale_int (GST_SECOND,
            GST_VIDEO_INFO_FPS_D (&self->vinfo),
            GST_VIDEO_INFO_FPS_N (&self->vinfo));
      }
    }
  }
}
Ejemplo n.º 4
0
static GstFlowReturn got_data(GstPad *pad, GstBuffer *buf) {
    GstTfImpl *This = gst_pad_get_element_private(pad);
    IMediaSample *sample = GST_APP_BUFFER(buf)->priv;
    REFERENCE_TIME tStart, tStop;
    HRESULT hr;

    if (GST_BUFFER_TIMESTAMP_IS_VALID(buf) &&
        GST_BUFFER_DURATION_IS_VALID(buf)) {
        tStart = buf->timestamp / 100;
        tStop = tStart + buf->duration / 100;
        IMediaSample_SetTime(sample, &tStart, &tStop);
    }
    else
        IMediaSample_SetTime(sample, NULL, NULL);
    if (GST_BUFFER_OFFSET_IS_VALID(buf) &&
        GST_BUFFER_OFFSET_END_IS_VALID(buf)) {
        tStart = buf->offset / 100;
        tStop = buf->offset_end / 100;
        IMediaSample_SetMediaTime(sample, &tStart, &tStop);
    }
    else
        IMediaSample_SetMediaTime(sample, NULL, NULL);

    IMediaSample_SetDiscontinuity(sample, GST_BUFFER_FLAG_IS_SET(buf, GST_BUFFER_FLAG_DISCONT));
    IMediaSample_SetPreroll(sample, GST_BUFFER_FLAG_IS_SET(buf, GST_BUFFER_FLAG_PREROLL));
    IMediaSample_SetSyncPoint(sample, !GST_BUFFER_FLAG_IS_SET(buf, GST_BUFFER_FLAG_DELTA_UNIT));
    IMediaSample_SetActualDataLength(sample, GST_BUFFER_SIZE(buf));

    hr = BaseOutputPinImpl_Deliver((BaseOutputPin*)This->tf.ppPins[1], sample);
    gst_buffer_unref(buf);
    if (FAILED(hr))
        return GST_FLOW_WRONG_STATE;
    if (hr != S_OK)
        return GST_FLOW_RESEND;
    return GST_FLOW_OK;
}
Ejemplo n.º 5
0
static void
gst_inter_audio_src_get_times (GstBaseSrc * src, GstBuffer * buffer,
    GstClockTime * start, GstClockTime * end)
{
  GstInterAudioSrc *interaudiosrc = GST_INTER_AUDIO_SRC (src);

  GST_DEBUG_OBJECT (src, "get_times");

  /* for live sources, sync on the timestamp of the buffer */
  if (gst_base_src_is_live (src)) {
    if (GST_BUFFER_TIMESTAMP_IS_VALID (buffer)) {
      *start = GST_BUFFER_TIMESTAMP (buffer);
      if (GST_BUFFER_DURATION_IS_VALID (buffer)) {
        *end = *start + GST_BUFFER_DURATION (buffer);
      } else {
        if (interaudiosrc->info.rate > 0) {
          *end = *start +
              gst_util_uint64_scale_int (gst_buffer_get_size (buffer),
              GST_SECOND, interaudiosrc->info.rate * interaudiosrc->info.bpf);
        }
      }
    }
  }
}
static GstFlowReturn
gst_decklink_video_sink_prepare (GstBaseSink * bsink, GstBuffer * buffer)
{
  GstDecklinkVideoSink *self = GST_DECKLINK_VIDEO_SINK_CAST (bsink);
  GstVideoFrame vframe;
  IDeckLinkMutableVideoFrame *frame;
  guint8 *outdata, *indata;
  GstFlowReturn flow_ret;
  HRESULT ret;
  GstClockTime timestamp, duration;
  GstClockTime running_time, running_time_duration;
  GstClockTime latency, render_delay;
  GstClockTimeDiff ts_offset;
  gint i;
  GstDecklinkVideoFormat caps_format;
  BMDPixelFormat format;
  gint bpp;
  GstVideoTimeCodeMeta *tc_meta;

  GST_DEBUG_OBJECT (self, "Preparing buffer %p", buffer);

  // FIXME: Handle no timestamps
  if (!GST_BUFFER_TIMESTAMP_IS_VALID (buffer)) {
    return GST_FLOW_ERROR;
  }

  caps_format = gst_decklink_type_from_video_format (self->info.finfo->format);
  format = gst_decklink_pixel_format_from_type (caps_format);
  bpp = gst_decklink_bpp_from_type (caps_format);

  timestamp = GST_BUFFER_TIMESTAMP (buffer);
  duration = GST_BUFFER_DURATION (buffer);
  if (duration == GST_CLOCK_TIME_NONE) {
    duration =
        gst_util_uint64_scale_int (GST_SECOND, self->info.fps_d,
        self->info.fps_n);
  }
  running_time =
      gst_segment_to_running_time (&GST_BASE_SINK_CAST (self)->segment,
      GST_FORMAT_TIME, timestamp);
  running_time_duration =
      gst_segment_to_running_time (&GST_BASE_SINK_CAST (self)->segment,
      GST_FORMAT_TIME, timestamp + duration) - running_time;

  /* See gst_base_sink_adjust_time() */
  latency = gst_base_sink_get_latency (bsink);
  render_delay = gst_base_sink_get_render_delay (bsink);
  ts_offset = gst_base_sink_get_ts_offset (bsink);

  running_time += latency;

  if (ts_offset < 0) {
    ts_offset = -ts_offset;
    if ((GstClockTime) ts_offset < running_time)
      running_time -= ts_offset;
    else
      running_time = 0;
  } else {
    running_time += ts_offset;
  }

  if (running_time > render_delay)
    running_time -= render_delay;
  else
    running_time = 0;

  ret = self->output->output->CreateVideoFrame (self->info.width,
      self->info.height, self->info.stride[0], format, bmdFrameFlagDefault,
      &frame);
  if (ret != S_OK) {
    GST_ELEMENT_ERROR (self, STREAM, FAILED,
        (NULL), ("Failed to create video frame: 0x%08x", ret));
    return GST_FLOW_ERROR;
  }

  if (!gst_video_frame_map (&vframe, &self->info, buffer, GST_MAP_READ)) {
    GST_ERROR_OBJECT (self, "Failed to map video frame");
    flow_ret = GST_FLOW_ERROR;
    goto out;
  }

  frame->GetBytes ((void **) &outdata);
  indata = (guint8 *) GST_VIDEO_FRAME_PLANE_DATA (&vframe, 0);
  for (i = 0; i < self->info.height; i++) {
    memcpy (outdata, indata, GST_VIDEO_FRAME_WIDTH (&vframe) * bpp);
    indata += GST_VIDEO_FRAME_PLANE_STRIDE (&vframe, 0);
    outdata += frame->GetRowBytes ();
  }
  gst_video_frame_unmap (&vframe);

  tc_meta = gst_buffer_get_video_time_code_meta (buffer);
  if (tc_meta) {
    BMDTimecodeFlags bflags = (BMDTimecodeFlags) 0;
    gchar *tc_str;

    if (((GstVideoTimeCodeFlags) (tc_meta->tc.
                config.flags)) & GST_VIDEO_TIME_CODE_FLAGS_DROP_FRAME)
      bflags = (BMDTimecodeFlags) (bflags | bmdTimecodeIsDropFrame);
    else
      bflags = (BMDTimecodeFlags) (bflags | bmdTimecodeFlagDefault);
    if (tc_meta->tc.field_count == 2)
      bflags = (BMDTimecodeFlags) (bflags | bmdTimecodeFieldMark);

    tc_str = gst_video_time_code_to_string (&tc_meta->tc);
    ret = frame->SetTimecodeFromComponents (self->timecode_format,
        (uint8_t) tc_meta->tc.hours,
        (uint8_t) tc_meta->tc.minutes,
        (uint8_t) tc_meta->tc.seconds, (uint8_t) tc_meta->tc.frames, bflags);
    if (ret != S_OK) {
      GST_ERROR_OBJECT (self,
          "Failed to set timecode %s to video frame: 0x%08x", tc_str, ret);
      flow_ret = GST_FLOW_ERROR;
      g_free (tc_str);
      goto out;
    }
    GST_DEBUG_OBJECT (self, "Set frame timecode to %s", tc_str);
    g_free (tc_str);
  }

  convert_to_internal_clock (self, &running_time, &running_time_duration);

  GST_LOG_OBJECT (self, "Scheduling video frame %p at %" GST_TIME_FORMAT
      " with duration %" GST_TIME_FORMAT, frame, GST_TIME_ARGS (running_time),
      GST_TIME_ARGS (running_time_duration));

  ret = self->output->output->ScheduleVideoFrame (frame,
      running_time, running_time_duration, GST_SECOND);
  if (ret != S_OK) {
    GST_ELEMENT_ERROR (self, STREAM, FAILED,
        (NULL), ("Failed to schedule frame: 0x%08x", ret));
    flow_ret = GST_FLOW_ERROR;
    goto out;
  }

  flow_ret = GST_FLOW_OK;

out:

  frame->Release ();

  return flow_ret;
}
static gboolean
gst_output_selector_switch (GstOutputSelector * osel)
{
  gboolean res = FALSE;
  GstEvent *ev = NULL;
  GstSegment *seg = NULL;
  GstPad *active_srcpad;

  /* Switch */
  GST_OBJECT_LOCK (osel);
  GST_INFO_OBJECT (osel, "switching to pad %" GST_PTR_FORMAT,
      osel->pending_srcpad);
  if (!osel->pending_srcpad) {
    GST_OBJECT_UNLOCK (osel);
    return TRUE;
  }

  if (gst_pad_is_linked (osel->pending_srcpad)) {
    osel->active_srcpad = osel->pending_srcpad;
    res = TRUE;
  }
  gst_object_unref (osel->pending_srcpad);
  osel->pending_srcpad = NULL;
  active_srcpad = res ? gst_object_ref (osel->active_srcpad) : NULL;
  GST_OBJECT_UNLOCK (osel);

  /* Send SEGMENT event and latest buffer if switching succeeded
   * and we already have a valid segment configured */
  if (res) {
    GstBuffer *latest_buffer;

    g_object_notify (G_OBJECT (osel), "active-pad");

    GST_OBJECT_LOCK (osel);
    latest_buffer =
        osel->latest_buffer ? gst_buffer_ref (osel->latest_buffer) : NULL;
    GST_OBJECT_UNLOCK (osel);

    gst_pad_sticky_events_foreach (osel->sinkpad, forward_sticky_events,
        active_srcpad);

    /* update segment if required */
    if (osel->segment.format != GST_FORMAT_UNDEFINED) {
      /* Send SEGMENT to the pad we are going to switch to */
      seg = &osel->segment;
      /* If resending then mark segment start and position accordingly */
      if (osel->resend_latest && latest_buffer &&
          GST_BUFFER_TIMESTAMP_IS_VALID (latest_buffer)) {
        seg->position = GST_BUFFER_TIMESTAMP (latest_buffer);
      }

      ev = gst_event_new_segment (seg);

      if (!gst_pad_push_event (active_srcpad, ev)) {
        GST_WARNING_OBJECT (osel,
            "newsegment handling failed in %" GST_PTR_FORMAT, active_srcpad);
      }
    }

    /* Resend latest buffer to newly switched pad */
    if (osel->resend_latest && latest_buffer) {
      GST_INFO ("resending latest buffer");
      gst_pad_push (active_srcpad, latest_buffer);
    } else if (latest_buffer) {
      gst_buffer_unref (latest_buffer);
    }

    gst_object_unref (active_srcpad);
  } else {
    GST_WARNING_OBJECT (osel, "switch failed, pad not linked");
  }

  return res;
}
Ejemplo n.º 8
0
static GstFlowReturn
test_injector_chain (GstPad * pad, GstBuffer * buf)
{
  GstFlowReturn ret;
  GstPad *srcpad;

  srcpad = gst_element_get_pad (GST_ELEMENT (GST_PAD_PARENT (pad)), "src");

  /* since we're increasing timestamp/offsets, push this one first */
  GST_LOG (" passing buffer   [t=%" GST_TIME_FORMAT "-%" GST_TIME_FORMAT
      "], offset=%" G_GINT64_FORMAT ", offset_end=%" G_GINT64_FORMAT,
      GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
      GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf) + GST_BUFFER_DURATION (buf)),
      GST_BUFFER_OFFSET (buf), GST_BUFFER_OFFSET_END (buf));

  gst_buffer_ref (buf);

  ret = gst_pad_push (srcpad, buf);

  if (g_random_double () < injector_inject_probability) {
    GstBuffer *ibuf;

    ibuf = gst_buffer_copy (buf);

    if (GST_BUFFER_OFFSET_IS_VALID (buf) &&
        GST_BUFFER_OFFSET_END_IS_VALID (buf)) {
      guint64 delta;

      delta = GST_BUFFER_OFFSET_END (buf) - GST_BUFFER_OFFSET (buf);
      GST_BUFFER_OFFSET (ibuf) += delta / 4;
      GST_BUFFER_OFFSET_END (ibuf) += delta / 4;
    } else {
      GST_BUFFER_OFFSET (ibuf) = GST_BUFFER_OFFSET_NONE;
      GST_BUFFER_OFFSET_END (ibuf) = GST_BUFFER_OFFSET_NONE;
    }

    if (GST_BUFFER_TIMESTAMP_IS_VALID (buf) &&
        GST_BUFFER_DURATION_IS_VALID (buf)) {
      GstClockTime delta;

      delta = GST_BUFFER_DURATION (buf);
      GST_BUFFER_TIMESTAMP (ibuf) += delta / 4;
    } else {
      GST_BUFFER_TIMESTAMP (ibuf) = GST_CLOCK_TIME_NONE;
      GST_BUFFER_DURATION (ibuf) = GST_CLOCK_TIME_NONE;
    }

    if (GST_BUFFER_TIMESTAMP_IS_VALID (ibuf) ||
        GST_BUFFER_OFFSET_IS_VALID (ibuf)) {
      GST_LOG ("injecting buffer [t=%" GST_TIME_FORMAT "-%" GST_TIME_FORMAT
          "], offset=%" G_GINT64_FORMAT ", offset_end=%" G_GINT64_FORMAT,
          GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (ibuf)),
          GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (ibuf) +
              GST_BUFFER_DURATION (ibuf)), GST_BUFFER_OFFSET (ibuf),
          GST_BUFFER_OFFSET_END (ibuf));

      if (gst_pad_push (srcpad, ibuf) != GST_FLOW_OK) {
        /* ignore return value */
      }
    } else {
      GST_WARNING ("couldn't inject buffer, no incoming timestamps or offsets");
      gst_buffer_unref (ibuf);
    }
  }

  gst_buffer_unref (buf);

  return ret;
}
Ejemplo n.º 9
0
static GstFlowReturn
celt_dec_chain_parse_data (GstCeltDec * dec, GstBuffer * buf,
    GstClockTime timestamp, GstClockTime duration)
{
  GstFlowReturn res = GST_FLOW_OK;
  gint size;
  guint8 *data;
  GstBuffer *outbuf;
  gint16 *out_data;
  gint error = CELT_OK;

  if (timestamp != -1) {
    dec->segment.last_stop = timestamp;
    dec->granulepos = -1;
  }

  if (buf) {
    data = GST_BUFFER_DATA (buf);
    size = GST_BUFFER_SIZE (buf);

    GST_DEBUG_OBJECT (dec, "received buffer of size %u", size);
    if (!GST_BUFFER_TIMESTAMP_IS_VALID (buf)
        && GST_BUFFER_OFFSET_END_IS_VALID (buf)) {
      dec->granulepos = GST_BUFFER_OFFSET_END (buf);
      GST_DEBUG_OBJECT (dec,
          "Taking granulepos from upstream: %" G_GUINT64_FORMAT,
          dec->granulepos);
    }

    /* copy timestamp */
  } else {
    /* concealment data, pass NULL as the bits parameters */
    GST_DEBUG_OBJECT (dec, "creating concealment data");
    data = NULL;
    size = 0;
  }

  res = gst_pad_alloc_buffer_and_set_caps (dec->srcpad,
      GST_BUFFER_OFFSET_NONE, dec->frame_size * dec->header.nb_channels * 2,
      GST_PAD_CAPS (dec->srcpad), &outbuf);

  if (res != GST_FLOW_OK) {
    GST_DEBUG_OBJECT (dec, "buf alloc flow: %s", gst_flow_get_name (res));
    return res;
  }

  out_data = (gint16 *) GST_BUFFER_DATA (outbuf);

  GST_LOG_OBJECT (dec, "decoding frame");

  error = celt_decode (dec->state, data, size, out_data);
  if (error != CELT_OK) {
    GST_WARNING_OBJECT (dec, "Decoding error: %d", error);
    return GST_FLOW_ERROR;
  }

  if (dec->granulepos == -1) {
    if (dec->segment.format != GST_FORMAT_TIME) {
      GST_WARNING_OBJECT (dec, "segment not initialized or not TIME format");
      dec->granulepos = dec->frame_size;
    } else {
      dec->granulepos = gst_util_uint64_scale_int (dec->segment.last_stop,
          dec->header.sample_rate, GST_SECOND) + dec->frame_size;
    }
    GST_DEBUG_OBJECT (dec, "granulepos=%" G_GINT64_FORMAT, dec->granulepos);
  }

  GST_BUFFER_OFFSET (outbuf) = dec->granulepos - dec->frame_size;
  GST_BUFFER_OFFSET_END (outbuf) = dec->granulepos;
  GST_BUFFER_TIMESTAMP (outbuf) =
      gst_util_uint64_scale_int (dec->granulepos - dec->frame_size, GST_SECOND,
      dec->header.sample_rate);
  GST_BUFFER_DURATION (outbuf) = dec->frame_duration;

  dec->granulepos += dec->frame_size;
  dec->segment.last_stop += dec->frame_duration;

  GST_LOG_OBJECT (dec, "pushing buffer with ts=%" GST_TIME_FORMAT ", dur=%"
      GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)),
      GST_TIME_ARGS (dec->frame_duration));

  res = gst_pad_push (dec->srcpad, outbuf);

  if (res != GST_FLOW_OK)
    GST_DEBUG_OBJECT (dec, "flow: %s", gst_flow_get_name (res));

  return res;
}
Ejemplo n.º 10
0
static GstFlowReturn
gst_dvd_spu_subpic_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
{
  GstDVDSpu *dvdspu = (GstDVDSpu *) parent;
  GstFlowReturn ret = GST_FLOW_OK;
  gsize size;

  g_return_val_if_fail (dvdspu != NULL, GST_FLOW_ERROR);

  GST_INFO_OBJECT (dvdspu, "Have subpicture buffer with timestamp %"
      GST_TIME_FORMAT " and size %" G_GSIZE_FORMAT,
      GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)), gst_buffer_get_size (buf));

  DVD_SPU_LOCK (dvdspu);

  if (GST_BUFFER_TIMESTAMP_IS_VALID (buf)) {
    dvdspu->subp_seg.position = GST_BUFFER_TIMESTAMP (buf);
  }

  if (GST_BUFFER_IS_DISCONT (buf) && dvdspu->partial_spu) {
    gst_buffer_unref (dvdspu->partial_spu);
    dvdspu->partial_spu = NULL;
  }

  if (dvdspu->partial_spu != NULL) {
    if (GST_BUFFER_TIMESTAMP_IS_VALID (buf))
      GST_WARNING_OBJECT (dvdspu,
          "Joining subpicture buffer with timestamp to previous");
    dvdspu->partial_spu = gst_buffer_append (dvdspu->partial_spu, buf);
  } else {
    /* If we don't yet have a buffer, wait for one with a timestamp,
     * since that will avoid collecting the 2nd half of a partial buf */
    if (GST_BUFFER_TIMESTAMP_IS_VALID (buf))
      dvdspu->partial_spu = buf;
    else
      gst_buffer_unref (buf);
  }

  if (dvdspu->partial_spu == NULL)
    goto done;

  size = gst_buffer_get_size (dvdspu->partial_spu);

  switch (dvdspu->spu_input_type) {
    case SPU_INPUT_TYPE_VOBSUB:
      if (size >= 2) {
        guint8 header[2];
        guint16 packet_size;

        gst_buffer_extract (dvdspu->partial_spu, 0, header, 2);
        packet_size = GST_READ_UINT16_BE (header);
        if (packet_size == size) {
          submit_new_spu_packet (dvdspu, dvdspu->partial_spu);
          dvdspu->partial_spu = NULL;
        } else if (packet_size == 0) {
          GST_LOG_OBJECT (dvdspu, "Discarding empty SPU buffer");
          gst_buffer_unref (dvdspu->partial_spu);
          dvdspu->partial_spu = NULL;
        } else if (packet_size < size) {
          /* Somehow we collected too much - something is wrong. Drop the
           * packet entirely and wait for a new one */
          GST_DEBUG_OBJECT (dvdspu,
              "Discarding invalid SPU buffer of size %" G_GSIZE_FORMAT, size);

          gst_buffer_unref (dvdspu->partial_spu);
          dvdspu->partial_spu = NULL;
        } else {
          GST_LOG_OBJECT (dvdspu,
              "SPU buffer claims to be of size %u. Collected %" G_GSIZE_FORMAT
              " so far.", packet_size, size);
        }
      }
      break;
    case SPU_INPUT_TYPE_PGS:{
      /* Collect until we have a command buffer that ends exactly at the size
       * we've collected */
      guint8 packet_type;
      guint16 packet_size;
      GstMapInfo map;
      guint8 *ptr, *end;
      gboolean invalid = FALSE;

      gst_buffer_map (dvdspu->partial_spu, &map, GST_MAP_READ);

      ptr = map.data;
      end = ptr + map.size;

      /* FIXME: There's no need to walk the command set each time. We can set a
       * marker and resume where we left off next time */
      /* FIXME: Move the packet parsing and sanity checking into the format-specific modules */
      while (ptr != end) {
        if (ptr + 3 > end)
          break;
        packet_type = *ptr++;
        packet_size = GST_READ_UINT16_BE (ptr);
        ptr += 2;
        if (ptr + packet_size > end)
          break;
        ptr += packet_size;
        /* 0x80 is the END command for PGS packets */
        if (packet_type == 0x80 && ptr != end) {
          /* Extra cruft on the end of the packet -> assume invalid */
          invalid = TRUE;
          break;
        }
      }
      gst_buffer_unmap (dvdspu->partial_spu, &map);

      if (invalid) {
        gst_buffer_unref (dvdspu->partial_spu);
        dvdspu->partial_spu = NULL;
      } else if (ptr == end) {
        GST_DEBUG_OBJECT (dvdspu,
            "Have complete PGS packet of size %" G_GSIZE_FORMAT ". Enqueueing.",
            map.size);
        submit_new_spu_packet (dvdspu, dvdspu->partial_spu);
        dvdspu->partial_spu = NULL;
      }
      break;
    }
    default:
      GST_ERROR_OBJECT (dvdspu, "Input type not configured before SPU passing");
      goto caps_not_set;
  }

done:
  DVD_SPU_UNLOCK (dvdspu);

  return ret;

  /* ERRORS */
caps_not_set:
  {
    GST_ELEMENT_ERROR (dvdspu, RESOURCE, NO_SPACE_LEFT,
        (_("Subpicture format was not configured before data flow")), (NULL));
    ret = GST_FLOW_ERROR;
    goto done;
  }
}
Ejemplo n.º 11
0
static GstFlowReturn
gst_goo_encpcm_chain (GstPad* pad, GstBuffer* buffer)
{
	GstGooEncPcm* self = GST_GOO_ENCPCM (gst_pad_get_parent (pad));
	guint omxbufsiz = GOO_PORT_GET_DEFINITION (self->inport)->nBufferSize;
	GstFlowReturn ret;

	if (self->rate == 0 || self->channels == 0 ||
	    self->component->cur_state != OMX_StateExecuting)
	{
		goto not_negotiated;
	}

	if (goo_port_is_tunneled (self->inport))
	{
		GST_DEBUG_OBJECT (self, "DASF Source");
		OMX_BUFFERHEADERTYPE* omxbuf = NULL;
		omxbuf = goo_port_grab_buffer (self->outport);
		ret = process_output_buffer (self, omxbuf);
		return ret;
	}

	/* discontinuity clears adapter, FIXME, maybe we can set some
	 * encoder flag to mask the discont. */
	if (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DISCONT))
	{
		gst_goo_adapter_clear (self->adapter);
		self->ts = 0;
	}

	/* take latest timestamp, FIXME timestamp is the one of the
	 * first buffer in the adapter. */
	if (GST_BUFFER_TIMESTAMP_IS_VALID (buffer))
	{
		self->ts = GST_BUFFER_TIMESTAMP (buffer);
	}

	ret = GST_FLOW_OK;
	GST_DEBUG_OBJECT (self, "Pushing a GST buffer to adapter (%d)",
			  GST_BUFFER_SIZE (buffer));
	gst_goo_adapter_push (self->adapter, buffer);

	/* Collect samples until we have enough for an output frame */
	while (gst_goo_adapter_available (self->adapter) >= omxbufsiz)
	{
		GST_DEBUG_OBJECT (self, "Popping an OMX buffer");
		OMX_BUFFERHEADERTYPE* omxbuf;
		omxbuf = goo_port_grab_buffer (self->inport);
		gst_goo_adapter_peek (self->adapter, omxbufsiz, omxbuf);
		omxbuf->nFilledLen = omxbufsiz;
		gst_goo_adapter_flush (self->adapter, omxbufsiz);
		goo_component_release_buffer (self->component, omxbuf);
	}

	GST_DEBUG_OBJECT (self, "");
	return ret;

	/* ERRORS */
not_negotiated:
	{
		GST_ELEMENT_ERROR (self, STREAM, TYPE_NOT_FOUND,
				   (NULL), ("unknown type"));
		return GST_FLOW_NOT_NEGOTIATED;
	}
}
Ejemplo n.º 12
0
static GstFlowReturn
gst_dvdlpcmdec_chain_dvd (GstPad * pad, GstBuffer * buf)
{
  GstDvdLpcmDec *dvdlpcmdec;
  guint8 *data;
  guint size;
  guint first_access;
  guint32 header;
  GstBuffer *subbuf;
  GstFlowReturn ret = GST_FLOW_OK;
  gint off, len;

  dvdlpcmdec = GST_DVDLPCMDEC (gst_pad_get_parent (pad));

  size = GST_BUFFER_SIZE (buf);
  data = GST_BUFFER_DATA (buf);

  if (size < 5) {
    /* Buffer is too small */
    GST_ELEMENT_WARNING (dvdlpcmdec, STREAM, DECODE,
        ("Invalid data found parsing LPCM packet"),
        ("LPCM packet was too small. Dropping"));
    ret = GST_FLOW_OK;
    goto done;
  }

  /* We have a 5 byte header, now.
   * The first two bytes are a (big endian) 16 bit offset into our buffer.
   * The buffer timestamp refers to this offset.
   *
   * The other three bytes are a (big endian) number in which the header is
   * encoded.
   */
  first_access = (data[0] << 8) | data[1];
  if (first_access > size) {
    GST_ELEMENT_WARNING (dvdlpcmdec, STREAM, DECODE,
        ("Invalid data found parsing LPCM packet"),
        ("LPCM packet contained invalid first access. Dropping"));
    ret = GST_FLOW_OK;
    goto done;
  }

  /* Don't keep the 'frame number' low 5 bits of the first byte */
  header = ((data[2] & 0xC0) << 16) | (data[3] << 8) | data[4];

  /* see if we have a new header */
  if (header != dvdlpcmdec->header) {
    parse_header (dvdlpcmdec, header);

    if (!gst_dvdlpcmdec_set_outcaps (dvdlpcmdec))
      goto negotiation_failed;

    dvdlpcmdec->header = header;
  }

  GST_LOG_OBJECT (dvdlpcmdec, "first_access %d, buffer length %d", first_access,
      size);

  /* After first_access, we have an additional 3 bytes of data we've parsed and
   * don't want to handle; this is included within the value of first_access.
   * So a first_access value of between 1 and 3 is just broken, we treat that
   * the same as zero. first_access == 4 means we only need to create a single
   * sub-buffer, greater than that we need to create two. */

  /* skip access unit bytes and info */
  off = 5;

  if (first_access > 4) {
    guint samples = 0;
    GstClockTime ts;

    /* length of first buffer */
    len = first_access - 4;

    GST_LOG_OBJECT (dvdlpcmdec, "Creating first sub-buffer off %d, len %d",
        off, len);

    /* see if we need a subbuffer without timestamp */
    if (off + len > size) {
      GST_WARNING_OBJECT (pad, "Bad first_access parameter in buffer");
      GST_ELEMENT_ERROR (dvdlpcmdec, STREAM, DECODE,
          (NULL),
          ("first_access parameter out of range: bad buffer from demuxer"));
      ret = GST_FLOW_ERROR;
      goto done;
    }

    subbuf = gst_buffer_create_sub (buf, off, len);

    /* If we don't have a stored timestamp from the last packet,
     * (it's straight after a new-segment, but we have one on the
     * first access buffer, then calculate the timestamp to align
     * this buffer to just before the first_access buffer */
    if (!GST_CLOCK_TIME_IS_VALID (dvdlpcmdec->timestamp) &&
        GST_BUFFER_TIMESTAMP_IS_VALID (buf)) {
      switch (dvdlpcmdec->width) {
        case 16:
          samples = len / dvdlpcmdec->channels / 2;
          break;
        case 20:
          samples = (len / dvdlpcmdec->channels) * 2 / 5;
          break;
        case 24:
          samples = len / dvdlpcmdec->channels / 3;
          break;
      }
    }
    if (samples != 0) {
      ts = gst_util_uint64_scale (samples, GST_SECOND, dvdlpcmdec->rate);
      if (ts < GST_BUFFER_TIMESTAMP (buf))
        GST_BUFFER_TIMESTAMP (subbuf) = GST_BUFFER_TIMESTAMP (buf) - ts;
      else
        GST_BUFFER_TIMESTAMP (subbuf) = 0;
    } else {
      GST_BUFFER_TIMESTAMP (subbuf) = GST_CLOCK_TIME_NONE;
    }

    ret = gst_dvdlpcmdec_chain_raw (pad, subbuf);
    if (ret != GST_FLOW_OK)
      goto done;

    /* then the buffer with new timestamp */
    off += len;
    len = size - off;

    GST_LOG_OBJECT (dvdlpcmdec, "Creating next sub-buffer off %d, len %d", off,
        len);

    if (len > 0) {
      subbuf = gst_buffer_create_sub (buf, off, len);
      GST_BUFFER_TIMESTAMP (subbuf) = GST_BUFFER_TIMESTAMP (buf);

      ret = gst_dvdlpcmdec_chain_raw (pad, subbuf);
    }
  } else {
    GST_LOG_OBJECT (dvdlpcmdec, "Creating single sub-buffer off %d, len %d",
        off, size - off);
    subbuf = gst_buffer_create_sub (buf, off, size - off);
    GST_BUFFER_TIMESTAMP (subbuf) = GST_BUFFER_TIMESTAMP (buf);
    ret = gst_dvdlpcmdec_chain_raw (pad, subbuf);
  }

done:
  gst_buffer_unref (buf);
  gst_object_unref (dvdlpcmdec);

  return ret;

  /* ERRORS */
negotiation_failed:
  {
    GST_ELEMENT_ERROR (dvdlpcmdec, STREAM, FORMAT, (NULL),
        ("Failed to configure output format"));
    ret = GST_FLOW_NOT_NEGOTIATED;
    goto done;
  }
}
Ejemplo n.º 13
0
static GstFlowReturn
gst_multi_file_sink_render (GstBaseSink * sink, GstBuffer * buffer)
{
    GstMultiFileSink *multifilesink;
    guint size;
    guint8 *data;
    gchar *filename;
    gboolean ret;
    GError *error = NULL;

    size = GST_BUFFER_SIZE (buffer);
    data = GST_BUFFER_DATA (buffer);

    multifilesink = GST_MULTI_FILE_SINK (sink);

    switch (multifilesink->next_file) {
    case GST_MULTI_FILE_SINK_NEXT_BUFFER:
        filename = g_strdup_printf (multifilesink->filename,
                                    multifilesink->index);

        ret = g_file_set_contents (filename, (char *) data, size, &error);
        if (!ret)
            goto write_error;

        gst_multi_file_sink_post_message (multifilesink, buffer, filename);
        multifilesink->index++;

        g_free (filename);
        break;
    case GST_MULTI_FILE_SINK_NEXT_DISCONT:
        if (GST_BUFFER_IS_DISCONT (buffer)) {
            if (multifilesink->file) {
                fclose (multifilesink->file);
                multifilesink->file = NULL;

                filename = g_strdup_printf (multifilesink->filename,
                                            multifilesink->index);
                gst_multi_file_sink_post_message (multifilesink, buffer, filename);
                g_free (filename);
                multifilesink->index++;
            }
        }

        if (multifilesink->file == NULL) {
            filename = g_strdup_printf (multifilesink->filename,
                                        multifilesink->index);
            multifilesink->file = g_fopen (filename, "wb");
            g_free (filename);

            if (multifilesink->file == NULL)
                goto stdio_write_error;
        }

        ret = fwrite (GST_BUFFER_DATA (buffer), GST_BUFFER_SIZE (buffer), 1,
                      multifilesink->file);
        if (ret != 1)
            goto stdio_write_error;

        break;
    case GST_MULTI_FILE_SINK_NEXT_KEY_FRAME:
        if (multifilesink->next_segment == GST_CLOCK_TIME_NONE) {
            if (GST_BUFFER_TIMESTAMP_IS_VALID (buffer)) {
                multifilesink->next_segment = GST_BUFFER_TIMESTAMP (buffer) +
                                              10 * GST_SECOND;
            }
        }

        if (GST_BUFFER_TIMESTAMP_IS_VALID (buffer) &&
                GST_BUFFER_TIMESTAMP (buffer) >= multifilesink->next_segment &&
                !GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT)) {
            if (multifilesink->file) {
                fclose (multifilesink->file);
                multifilesink->file = NULL;

                filename = g_strdup_printf (multifilesink->filename,
                                            multifilesink->index);
                gst_multi_file_sink_post_message (multifilesink, buffer, filename);
                g_free (filename);
                multifilesink->index++;
            }

            multifilesink->next_segment += 10 * GST_SECOND;
        }

        if (multifilesink->file == NULL) {
            filename = g_strdup_printf (multifilesink->filename,
                                        multifilesink->index);
            multifilesink->file = g_fopen (filename, "wb");
            g_free (filename);

            if (multifilesink->file == NULL)
                goto stdio_write_error;
        }

        ret = fwrite (GST_BUFFER_DATA (buffer), GST_BUFFER_SIZE (buffer), 1,
                      multifilesink->file);
        if (ret != 1)
            goto stdio_write_error;

        break;
    default:
        g_assert_not_reached ();
    }

    return GST_FLOW_OK;

    /* ERRORS */
write_error:
    {
        switch (error->code) {
        case G_FILE_ERROR_NOSPC: {
            GST_ELEMENT_ERROR (multifilesink, RESOURCE, NO_SPACE_LEFT, (NULL),
                               (NULL));
            break;
        }
        default: {
            GST_ELEMENT_ERROR (multifilesink, RESOURCE, WRITE,
                               ("Error while writing to file \"%s\".", filename),
                               ("%s", g_strerror (errno)));
        }
        }
        g_error_free (error);
        g_free (filename);

        return GST_FLOW_ERROR;
    }
stdio_write_error:
    GST_ELEMENT_ERROR (multifilesink, RESOURCE, WRITE,
                       ("Error while writing to file."), (NULL));
    return GST_FLOW_ERROR;
}
Ejemplo n.º 14
0
static GstFlowReturn
gst_speex_enc_chain (GstPad * pad, GstBuffer * buf)
{
  GstSpeexEnc *enc;
  GstFlowReturn ret = GST_FLOW_OK;

  enc = GST_SPEEX_ENC (GST_PAD_PARENT (pad));

  if (!enc->setup)
    goto not_setup;

  if (!enc->header_sent) {
    /* Speex streams begin with two headers; the initial header (with
       most of the codec setup parameters) which is mandated by the Ogg
       bitstream spec.  The second header holds any comment fields.
       We merely need to make the headers, then pass them to libspeex 
       one at a time; libspeex handles the additional Ogg bitstream 
       constraints */
    GstBuffer *buf1, *buf2;
    GstCaps *caps;
    guchar *data;
    gint data_len;

    /* create header buffer */
    data = (guint8 *) speex_header_to_packet (&enc->header, &data_len);
    buf1 = gst_speex_enc_buffer_from_data (enc, data, data_len, 0);
    free (data);

    /* create comment buffer */
    buf2 = gst_speex_enc_create_metadata_buffer (enc);

    /* mark and put on caps */
    caps = gst_pad_get_caps (enc->srcpad);
    caps = gst_speex_enc_set_header_on_caps (caps, buf1, buf2);

    gst_caps_set_simple (caps,
        "rate", G_TYPE_INT, enc->rate,
        "channels", G_TYPE_INT, enc->channels, NULL);

    /* negotiate with these caps */
    GST_DEBUG_OBJECT (enc, "here are the caps: %" GST_PTR_FORMAT, caps);
    gst_pad_set_caps (enc->srcpad, caps);

    gst_buffer_set_caps (buf1, caps);
    gst_buffer_set_caps (buf2, caps);
    gst_caps_unref (caps);

    /* push out buffers */
    ret = gst_speex_enc_push_buffer (enc, buf1);

    if (ret != GST_FLOW_OK) {
      gst_buffer_unref (buf2);
      goto done;
    }

    ret = gst_speex_enc_push_buffer (enc, buf2);

    if (ret != GST_FLOW_OK)
      goto done;

    speex_bits_reset (&enc->bits);

    enc->header_sent = TRUE;
  }

  /* Save the timestamp of the first buffer. This will be later
   * used as offset for all following buffers */
  if (enc->start_ts == GST_CLOCK_TIME_NONE) {
    if (GST_BUFFER_TIMESTAMP_IS_VALID (buf)) {
      enc->start_ts = GST_BUFFER_TIMESTAMP (buf);
      enc->granulepos_offset = gst_util_uint64_scale
          (GST_BUFFER_TIMESTAMP (buf), enc->rate, GST_SECOND);
    } else {
      enc->start_ts = 0;
      enc->granulepos_offset = 0;
    }
  }

  /* Check if we have a continous stream, if not drop some samples or the buffer or
   * insert some silence samples */
  if (enc->next_ts != GST_CLOCK_TIME_NONE &&
      GST_BUFFER_TIMESTAMP (buf) < enc->next_ts) {
    guint64 diff = enc->next_ts - GST_BUFFER_TIMESTAMP (buf);
    guint64 diff_bytes;

    GST_WARNING_OBJECT (enc, "Buffer is older than previous "
        "timestamp + duration (%" GST_TIME_FORMAT "< %" GST_TIME_FORMAT
        "), cannot handle. Clipping buffer.",
        GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
        GST_TIME_ARGS (enc->next_ts));

    diff_bytes = GST_CLOCK_TIME_TO_FRAMES (diff, enc->rate) * enc->channels * 2;
    if (diff_bytes >= GST_BUFFER_SIZE (buf)) {
      gst_buffer_unref (buf);
      return GST_FLOW_OK;
    }
    buf = gst_buffer_make_metadata_writable (buf);
    GST_BUFFER_DATA (buf) += diff_bytes;
    GST_BUFFER_SIZE (buf) -= diff_bytes;

    GST_BUFFER_TIMESTAMP (buf) += diff;
    if (GST_BUFFER_DURATION_IS_VALID (buf))
      GST_BUFFER_DURATION (buf) -= diff;
  }

  if (enc->next_ts != GST_CLOCK_TIME_NONE
      && GST_BUFFER_TIMESTAMP_IS_VALID (buf)) {
    guint64 max_diff =
        gst_util_uint64_scale (enc->frame_size, GST_SECOND, enc->rate);

    if (GST_BUFFER_TIMESTAMP (buf) != enc->next_ts &&
        GST_BUFFER_TIMESTAMP (buf) - enc->next_ts > max_diff) {
      GST_WARNING_OBJECT (enc,
          "Discontinuity detected: %" G_GUINT64_FORMAT " > %" G_GUINT64_FORMAT,
          GST_BUFFER_TIMESTAMP (buf) - enc->next_ts, max_diff);

      gst_speex_enc_encode (enc, TRUE);

      enc->frameno_out = 0;
      enc->start_ts = GST_BUFFER_TIMESTAMP (buf);
      enc->granulepos_offset = gst_util_uint64_scale
          (GST_BUFFER_TIMESTAMP (buf), enc->rate, GST_SECOND);
    }
  }

  if (GST_BUFFER_TIMESTAMP_IS_VALID (buf)
      && GST_BUFFER_DURATION_IS_VALID (buf))
    enc->next_ts = GST_BUFFER_TIMESTAMP (buf) + GST_BUFFER_DURATION (buf);
  else
    enc->next_ts = GST_CLOCK_TIME_NONE;

  GST_DEBUG_OBJECT (enc, "received buffer of %u bytes", GST_BUFFER_SIZE (buf));

  /* push buffer to adapter */
  gst_adapter_push (enc->adapter, buf);
  buf = NULL;

  ret = gst_speex_enc_encode (enc, FALSE);

done:

  if (buf)
    gst_buffer_unref (buf);

  return ret;

  /* ERRORS */
not_setup:
  {
    GST_ELEMENT_ERROR (enc, CORE, NEGOTIATION, (NULL),
        ("encoder not initialized (input is not audio?)"));
    ret = GST_FLOW_NOT_NEGOTIATED;
    goto done;
  }

}
Ejemplo n.º 15
0
static void
do_test (SubParseInputChunk * input, guint num, const gchar * format)
{
  guint n;
  GstCaps *outcaps;

  setup_subparse ();

  for (n = 0; n < num; ++n) {
    GstBuffer *buf;

    buf = buffer_from_static_string (input[n].in);
    fail_unless_equals_int (gst_pad_push (mysrcpad, buf), GST_FLOW_OK);
  }

  gst_pad_push_event (mysrcpad, gst_event_new_eos ());

  fail_unless_equals_int (g_list_length (buffers), num);

  outcaps = gst_pad_get_current_caps (mysinkpad);

  for (n = 0; n < num; ++n) {
    const GstStructure *buffer_caps_struct;
    GstBuffer *buf;
    GstMapInfo map;

    buf = g_list_nth_data (buffers, n);
    fail_unless (buf != NULL);

    /* check timestamp */
    fail_unless (GST_BUFFER_TIMESTAMP_IS_VALID (buf), NULL);
    fail_unless_equals_uint64 (GST_BUFFER_TIMESTAMP (buf), input[n].from_ts);

    /* might not be able to put a duration on the last buffer */
    if (input[n].to_ts != GST_CLOCK_TIME_NONE) {
      /* check duration */
      fail_unless (GST_BUFFER_DURATION_IS_VALID (buf), NULL);
      fail_unless_equals_uint64 (GST_BUFFER_DURATION (buf),
          input[n].to_ts - input[n].from_ts);
    }

    gst_buffer_map (buf, &map, GST_MAP_READ);
    /* can be NULL */
    if (map.data != NULL) {
      /* shouldn't have trailing newline characters */
      fail_if (map.size > 0 && map.data[map.size - 1] == '\n');
      /* shouldn't include NUL-terminator in data size */
      fail_if (map.size > 0 && map.data[map.size - 1] == '\0');
      /* but should still have a  NUL-terminator behind the declared data */
      fail_unless_equals_int (map.data[map.size], '\0');
      /* make sure out string matches expected string */
      fail_unless_equals_string ((gchar *) map.data, input[n].out);
    }
    gst_buffer_unmap (buf, &map);
    /* check caps */
    fail_unless (outcaps != NULL);
    buffer_caps_struct = gst_caps_get_structure (outcaps, 0);
    fail_unless (gst_structure_has_name (buffer_caps_struct, "text/x-raw"));
    fail_unless_equals_string (gst_structure_get_string (buffer_caps_struct,
            "format"), format);
  }
  gst_caps_unref (outcaps);

  teardown_subparse ();
}
Ejemplo n.º 16
0
/**
 * gst_audio_buffer_clip:
 * @buffer: (transfer full): The buffer to clip.
 * @segment: Segment in %GST_FORMAT_TIME or %GST_FORMAT_DEFAULT to which
 *           the buffer should be clipped.
 * @rate: sample rate.
 * @bpf: size of one audio frame in bytes. This is the size of one sample
 * * channels.
 *
 * Clip the buffer to the given %GstSegment.
 *
 * After calling this function the caller does not own a reference to
 * @buffer anymore.
 *
 * Returns: (transfer full): %NULL if the buffer is completely outside the configured segment,
 * otherwise the clipped buffer is returned.
 *
 * If the buffer has no timestamp, it is assumed to be inside the segment and
 * is not clipped
 */
GstBuffer *
gst_audio_buffer_clip (GstBuffer * buffer, GstSegment * segment, gint rate,
    gint bpf)
{
  GstBuffer *ret;
  GstClockTime timestamp = GST_CLOCK_TIME_NONE, duration = GST_CLOCK_TIME_NONE;
  guint64 offset = GST_BUFFER_OFFSET_NONE, offset_end = GST_BUFFER_OFFSET_NONE;
  gsize trim, size, osize;
  gboolean change_duration = TRUE, change_offset = TRUE, change_offset_end =
      TRUE;

  g_return_val_if_fail (segment->format == GST_FORMAT_TIME ||
      segment->format == GST_FORMAT_DEFAULT, buffer);
  g_return_val_if_fail (GST_IS_BUFFER (buffer), NULL);

  if (!GST_BUFFER_TIMESTAMP_IS_VALID (buffer))
    /* No timestamp - assume the buffer is completely in the segment */
    return buffer;

  /* Get copies of the buffer metadata to change later.
   * Calculate the missing values for the calculations,
   * they won't be changed later though. */

  trim = 0;
  osize = size = gst_buffer_get_size (buffer);

  /* no data, nothing to clip */
  if (!size)
    return buffer;

  timestamp = GST_BUFFER_TIMESTAMP (buffer);
  GST_DEBUG ("timestamp %" GST_TIME_FORMAT, GST_TIME_ARGS (timestamp));
  if (GST_BUFFER_DURATION_IS_VALID (buffer)) {
    duration = GST_BUFFER_DURATION (buffer);
  } else {
    change_duration = FALSE;
    duration = gst_util_uint64_scale (size / bpf, GST_SECOND, rate);
  }

  if (GST_BUFFER_OFFSET_IS_VALID (buffer)) {
    offset = GST_BUFFER_OFFSET (buffer);
  } else {
    change_offset = FALSE;
    offset = 0;
  }

  if (GST_BUFFER_OFFSET_END_IS_VALID (buffer)) {
    offset_end = GST_BUFFER_OFFSET_END (buffer);
  } else {
    change_offset_end = FALSE;
    offset_end = offset + size / bpf;
  }

  if (segment->format == GST_FORMAT_TIME) {
    /* Handle clipping for GST_FORMAT_TIME */

    guint64 start, stop, cstart, cstop, diff;

    start = timestamp;
    stop = timestamp + duration;

    if (gst_segment_clip (segment, GST_FORMAT_TIME,
            start, stop, &cstart, &cstop)) {

      diff = cstart - start;
      if (diff > 0) {
        timestamp = cstart;

        if (change_duration)
          duration -= diff;

        diff = gst_util_uint64_scale (diff, rate, GST_SECOND);
        if (change_offset)
          offset += diff;
        trim += diff * bpf;
        size -= diff * bpf;
      }

      diff = stop - cstop;
      if (diff > 0) {
        /* duration is always valid if stop is valid */
        duration -= diff;

        diff = gst_util_uint64_scale (diff, rate, GST_SECOND);
        if (change_offset_end)
          offset_end -= diff;
        size -= diff * bpf;
      }
    } else {
      gst_buffer_unref (buffer);
      return NULL;
    }
  } else {
    /* Handle clipping for GST_FORMAT_DEFAULT */
    guint64 start, stop, cstart, cstop, diff;

    g_return_val_if_fail (GST_BUFFER_OFFSET_IS_VALID (buffer), buffer);

    start = offset;
    stop = offset_end;

    if (gst_segment_clip (segment, GST_FORMAT_DEFAULT,
            start, stop, &cstart, &cstop)) {

      diff = cstart - start;
      if (diff > 0) {
        offset = cstart;

        timestamp = gst_util_uint64_scale (cstart, GST_SECOND, rate);

        if (change_duration)
          duration -= gst_util_uint64_scale (diff, GST_SECOND, rate);

        trim += diff * bpf;
        size -= diff * bpf;
      }

      diff = stop - cstop;
      if (diff > 0) {
        offset_end = cstop;

        if (change_duration)
          duration -= gst_util_uint64_scale (diff, GST_SECOND, rate);

        size -= diff * bpf;
      }
    } else {
      gst_buffer_unref (buffer);
      return NULL;
    }
  }

  if (trim == 0 && size == osize) {
    ret = buffer;

    if (GST_BUFFER_TIMESTAMP (ret) != timestamp) {
      ret = gst_buffer_make_writable (ret);
      GST_BUFFER_TIMESTAMP (ret) = timestamp;
    }
    if (GST_BUFFER_DURATION (ret) != duration) {
      ret = gst_buffer_make_writable (ret);
      GST_BUFFER_DURATION (ret) = duration;
    }
  } else {
    /* Get a writable buffer and apply all changes */
    GST_DEBUG ("trim %" G_GSIZE_FORMAT " size %" G_GSIZE_FORMAT, trim, size);
    ret = gst_buffer_copy_region (buffer, GST_BUFFER_COPY_ALL, trim, size);
    gst_buffer_unref (buffer);

    GST_DEBUG ("timestamp %" GST_TIME_FORMAT, GST_TIME_ARGS (timestamp));
    GST_BUFFER_TIMESTAMP (ret) = timestamp;

    if (change_duration)
      GST_BUFFER_DURATION (ret) = duration;
    if (change_offset)
      GST_BUFFER_OFFSET (ret) = offset;
    if (change_offset_end)
      GST_BUFFER_OFFSET_END (ret) = offset_end;
  }
  return ret;
}
static GstFlowReturn
gst_dvbsub_overlay_chain_video (GstPad * pad, GstObject * parent,
    GstBuffer * buffer)
{
  GstDVBSubOverlay *overlay = GST_DVBSUB_OVERLAY (parent);
  GstFlowReturn ret = GST_FLOW_OK;
  gint64 start, stop;
  guint64 cstart, cstop;
  gboolean in_seg;
  GstClockTime vid_running_time, vid_running_time_end;

  if (GST_VIDEO_INFO_FORMAT (&overlay->info) == GST_VIDEO_FORMAT_UNKNOWN)
    return GST_FLOW_NOT_NEGOTIATED;

  if (!GST_BUFFER_TIMESTAMP_IS_VALID (buffer))
    goto missing_timestamp;

  start = GST_BUFFER_TIMESTAMP (buffer);

  GST_LOG_OBJECT (overlay,
      "Video segment: %" GST_SEGMENT_FORMAT " --- Subtitle position: %"
      GST_TIME_FORMAT " --- BUFFER: ts=%" GST_TIME_FORMAT,
      &overlay->video_segment,
      GST_TIME_ARGS (overlay->subtitle_segment.position),
      GST_TIME_ARGS (start));

  /* ignore buffers that are outside of the current segment */
  if (!GST_BUFFER_DURATION_IS_VALID (buffer)) {
    stop = GST_CLOCK_TIME_NONE;
  } else {
    stop = start + GST_BUFFER_DURATION (buffer);
  }

  in_seg = gst_segment_clip (&overlay->video_segment, GST_FORMAT_TIME,
      start, stop, &cstart, &cstop);
  if (!in_seg) {
    GST_DEBUG_OBJECT (overlay, "Buffer outside configured segment -- dropping");
    gst_buffer_unref (buffer);
    return GST_FLOW_OK;
  }

  buffer = gst_buffer_make_writable (buffer);
  GST_BUFFER_TIMESTAMP (buffer) = cstart;
  if (GST_BUFFER_DURATION_IS_VALID (buffer))
    GST_BUFFER_DURATION (buffer) = cstop - cstart;

  vid_running_time =
      gst_segment_to_running_time (&overlay->video_segment, GST_FORMAT_TIME,
      cstart);
  if (GST_BUFFER_DURATION_IS_VALID (buffer))
    vid_running_time_end =
        gst_segment_to_running_time (&overlay->video_segment, GST_FORMAT_TIME,
        cstop);
  else
    vid_running_time_end = vid_running_time;

  GST_DEBUG_OBJECT (overlay, "Video running time: %" GST_TIME_FORMAT,
      GST_TIME_ARGS (vid_running_time));

  overlay->video_segment.position = GST_BUFFER_TIMESTAMP (buffer);

  g_mutex_lock (&overlay->dvbsub_mutex);
  if (!g_queue_is_empty (overlay->pending_subtitles)) {
    DVBSubtitles *tmp, *candidate = NULL;

    while (!g_queue_is_empty (overlay->pending_subtitles)) {
      tmp = g_queue_peek_head (overlay->pending_subtitles);

      if (tmp->pts > vid_running_time_end) {
        /* For a future video frame */
        break;
      } else if (tmp->num_rects == 0) {
        /* Clear screen */
        if (overlay->current_subtitle)
          dvb_subtitles_free (overlay->current_subtitle);
        overlay->current_subtitle = NULL;
        if (candidate)
          dvb_subtitles_free (candidate);
        candidate = NULL;
        g_queue_pop_head (overlay->pending_subtitles);
        dvb_subtitles_free (tmp);
        tmp = NULL;
      } else if (tmp->pts + tmp->page_time_out * GST_SECOND *
          ABS (overlay->subtitle_segment.rate) >= vid_running_time) {
        if (candidate)
          dvb_subtitles_free (candidate);
        candidate = tmp;
        g_queue_pop_head (overlay->pending_subtitles);
      } else {
        /* Too late */
        dvb_subtitles_free (tmp);
        tmp = NULL;
        g_queue_pop_head (overlay->pending_subtitles);
      }
    }

    if (candidate) {
      GST_DEBUG_OBJECT (overlay,
          "Time to show the next subtitle page (%" GST_TIME_FORMAT " >= %"
          GST_TIME_FORMAT ") - it has %u regions",
          GST_TIME_ARGS (vid_running_time), GST_TIME_ARGS (candidate->pts),
          candidate->num_rects);
      dvb_subtitles_free (overlay->current_subtitle);
      overlay->current_subtitle = candidate;
      if (overlay->current_comp)
        gst_video_overlay_composition_unref (overlay->current_comp);
      overlay->current_comp =
          gst_dvbsub_overlay_subs_to_comp (overlay, overlay->current_subtitle);
    }
  }

  /* Check that we haven't hit the fallback timeout for current subtitle page */
  if (overlay->current_subtitle
      && vid_running_time >
      (overlay->current_subtitle->pts +
          overlay->current_subtitle->page_time_out * GST_SECOND *
          ABS (overlay->subtitle_segment.rate))) {
    GST_INFO_OBJECT (overlay,
        "Subtitle page not redefined before fallback page_time_out of %u seconds (missed data?) - deleting current page",
        overlay->current_subtitle->page_time_out);
    dvb_subtitles_free (overlay->current_subtitle);
    overlay->current_subtitle = NULL;
  }

  /* Now render it */
  if (g_atomic_int_get (&overlay->enable) && overlay->current_subtitle) {
    GstVideoFrame frame;

    g_assert (overlay->current_comp);
    if (overlay->attach_compo_to_buffer) {
      GST_DEBUG_OBJECT (overlay, "Attaching overlay image to video buffer");
      gst_buffer_add_video_overlay_composition_meta (buffer,
          overlay->current_comp);
    } else {
      GST_DEBUG_OBJECT (overlay, "Blending overlay image to video buffer");
      gst_video_frame_map (&frame, &overlay->info, buffer, GST_MAP_READWRITE);
      gst_video_overlay_composition_blend (overlay->current_comp, &frame);
      gst_video_frame_unmap (&frame);
    }
  }
  g_mutex_unlock (&overlay->dvbsub_mutex);

  ret = gst_pad_push (overlay->srcpad, buffer);

  return ret;

missing_timestamp:
  {
    GST_WARNING_OBJECT (overlay, "video buffer without timestamp, discarding");
    gst_buffer_unref (buffer);
    return GST_FLOW_OK;
  }
}
static GstFlowReturn
gst_decklink_video_sink_prepare (GstBaseSink * bsink, GstBuffer * buffer)
{
  GstDecklinkVideoSink *self = GST_DECKLINK_VIDEO_SINK_CAST (bsink);
  GstVideoFrame vframe;
  IDeckLinkMutableVideoFrame *frame;
  guint8 *outdata, *indata;
  GstFlowReturn flow_ret;
  HRESULT ret;
  GstClockTime timestamp, duration;
  GstClockTime running_time, running_time_duration;
  gint i;
  GstClock *clock;

  GST_DEBUG_OBJECT (self, "Preparing buffer %p", buffer);

  // FIXME: Handle no timestamps
  if (!GST_BUFFER_TIMESTAMP_IS_VALID (buffer)) {
    return GST_FLOW_ERROR;
  }

  timestamp = GST_BUFFER_TIMESTAMP (buffer);
  duration = GST_BUFFER_DURATION (buffer);
  if (duration == GST_CLOCK_TIME_NONE) {
    duration =
        gst_util_uint64_scale_int (GST_SECOND, self->info.fps_d,
        self->info.fps_n);
  }
  running_time =
      gst_segment_to_running_time (&GST_BASE_SINK_CAST (self)->segment,
      GST_FORMAT_TIME, timestamp);
  running_time_duration =
      gst_segment_to_running_time (&GST_BASE_SINK_CAST (self)->segment,
      GST_FORMAT_TIME, timestamp + duration) - running_time;

  // FIXME: https://bugzilla.gnome.org/show_bug.cgi?id=742916
  // We need to drop late buffers here immediately instead of
  // potentially overflowing the internal queue of the hardware
  clock = gst_element_get_clock (GST_ELEMENT_CAST (self));
  if (clock) {
    GstClockTime clock_running_time, base_time, clock_time, latency,
        max_lateness;

    base_time = gst_element_get_base_time (GST_ELEMENT_CAST (self));
    clock_time = gst_clock_get_time (clock);
    if (base_time != GST_CLOCK_TIME_NONE && clock_time != GST_CLOCK_TIME_NONE) {
      clock_running_time = clock_time - base_time;
      latency = gst_base_sink_get_latency (GST_BASE_SINK_CAST (self));
      max_lateness = gst_base_sink_get_max_lateness (GST_BASE_SINK_CAST (self));

      if (clock_running_time >
          running_time + running_time_duration + latency + max_lateness) {
        GST_DEBUG_OBJECT (self,
            "Late buffer: %" GST_TIME_FORMAT " > %" GST_TIME_FORMAT,
            GST_TIME_ARGS (clock_running_time),
            GST_TIME_ARGS (running_time + running_time_duration));

        if (self->last_render_time == GST_CLOCK_TIME_NONE
            || (self->last_render_time < clock_running_time
                && clock_running_time - self->last_render_time >= GST_SECOND)) {
          GST_DEBUG_OBJECT (self,
              "Rendering frame nonetheless because we had none for more than 1s");
          running_time = clock_running_time;
          running_time_duration = 0;
        } else {
          GST_WARNING_OBJECT (self, "Dropping frame");
          gst_object_unref (clock);
          return GST_FLOW_OK;
        }
      }
    }

    gst_object_unref (clock);
  }
  self->last_render_time = running_time;

  ret = self->output->output->CreateVideoFrame (self->info.width,
      self->info.height, self->info.stride[0], bmdFormat8BitYUV,
      bmdFrameFlagDefault, &frame);
  if (ret != S_OK) {
    GST_ELEMENT_ERROR (self, STREAM, FAILED,
        (NULL), ("Failed to create video frame: 0x%08x", ret));
    return GST_FLOW_ERROR;
  }

  if (!gst_video_frame_map (&vframe, &self->info, buffer, GST_MAP_READ)) {
    GST_ERROR_OBJECT (self, "Failed to map video frame");
    flow_ret = GST_FLOW_ERROR;
    goto out;
  }

  frame->GetBytes ((void **) &outdata);
  indata = (guint8 *) GST_VIDEO_FRAME_PLANE_DATA (&vframe, 0);
  for (i = 0; i < self->info.height; i++) {
    memcpy (outdata, indata, GST_VIDEO_FRAME_WIDTH (&vframe) * 2);
    indata += GST_VIDEO_FRAME_PLANE_STRIDE (&vframe, 0);
    outdata += frame->GetRowBytes ();
  }
  gst_video_frame_unmap (&vframe);

  convert_to_internal_clock (self, &running_time, &running_time_duration);

  GST_LOG_OBJECT (self, "Scheduling video frame %p at %" GST_TIME_FORMAT
      " with duration %" GST_TIME_FORMAT, frame, GST_TIME_ARGS (running_time),
      GST_TIME_ARGS (running_time_duration));

  ret = self->output->output->ScheduleVideoFrame (frame,
      running_time, running_time_duration, GST_SECOND);
  if (ret != S_OK) {
    GST_ELEMENT_ERROR (self, STREAM, FAILED,
        (NULL), ("Failed to schedule frame: 0x%08x", ret));
    flow_ret = GST_FLOW_ERROR;
    goto out;
  }

  flow_ret = GST_FLOW_OK;

out:

  frame->Release ();

  return flow_ret;
}
Ejemplo n.º 19
0
EXPORT_C
#endif

GstBuffer *
gst_audio_buffer_clip (GstBuffer * buffer, GstSegment * segment, gint rate,
    gint frame_size)
{
  GstBuffer *ret;
  GstClockTime timestamp = GST_CLOCK_TIME_NONE, duration = GST_CLOCK_TIME_NONE;
  guint64 offset = GST_BUFFER_OFFSET_NONE, offset_end = GST_BUFFER_OFFSET_NONE;
  guint8 *data;
  guint size;

  gboolean change_duration = TRUE, change_offset = TRUE, change_offset_end =
      TRUE;

  g_return_val_if_fail (segment->format == GST_FORMAT_TIME ||
      segment->format == GST_FORMAT_DEFAULT, buffer);
  g_return_val_if_fail (GST_IS_BUFFER (buffer), NULL);

  if (!GST_BUFFER_TIMESTAMP_IS_VALID (buffer))
    /* No timestamp - assume the buffer is completely in the segment */
    return buffer;

  /* Get copies of the buffer metadata to change later. 
   * Calculate the missing values for the calculations,
   * they won't be changed later though. */

  data = GST_BUFFER_DATA (buffer);
  size = GST_BUFFER_SIZE (buffer);

  timestamp = GST_BUFFER_TIMESTAMP (buffer);
  if (GST_BUFFER_DURATION_IS_VALID (buffer)) {
    duration = GST_BUFFER_DURATION (buffer);
  } else {
    change_duration = FALSE;
    duration = gst_util_uint64_scale (size / frame_size, GST_SECOND, rate);
  }

  if (GST_BUFFER_OFFSET_IS_VALID (buffer)) {
    offset = GST_BUFFER_OFFSET (buffer);
  } else {
    change_offset = FALSE;
    offset = 0;
  }

  if (GST_BUFFER_OFFSET_END_IS_VALID (buffer)) {
    offset_end = GST_BUFFER_OFFSET_END (buffer);
  } else {
    change_offset_end = FALSE;
    offset_end = offset + size / frame_size;
  }

  if (segment->format == GST_FORMAT_TIME) {
    /* Handle clipping for GST_FORMAT_TIME */

    gint64 start, stop, cstart, cstop, diff;

    start = timestamp;
    stop = timestamp + duration;

    if (gst_segment_clip (segment, GST_FORMAT_TIME,
            start, stop, &cstart, &cstop)) {

      diff = cstart - start;
      if (diff > 0) {
        timestamp = cstart;

        if (change_duration)
          duration -= diff;

        diff = gst_util_uint64_scale (diff, rate, GST_SECOND);
        if (change_offset)
          offset += diff;
        data += diff * frame_size;
        size -= diff * frame_size;
      }

      diff = stop - cstop;
      if (diff > 0) {
        /* duration is always valid if stop is valid */
        duration -= diff;

        diff = gst_util_uint64_scale (diff, rate, GST_SECOND);
        if (change_offset_end)
          offset_end -= diff;
        size -= diff * frame_size;
      }
    } else {
      gst_buffer_unref (buffer);
      return NULL;
    }
  } else {
    /* Handle clipping for GST_FORMAT_DEFAULT */
    gint64 start, stop, cstart, cstop, diff;

    g_return_val_if_fail (GST_BUFFER_OFFSET_IS_VALID (buffer), buffer);

    start = offset;
    stop = offset_end;

    if (gst_segment_clip (segment, GST_FORMAT_DEFAULT,
            start, stop, &cstart, &cstop)) {

      diff = cstart - start;
      if (diff > 0) {
        offset = cstart;

        timestamp = gst_util_uint64_scale (cstart, GST_SECOND, rate);

        if (change_duration)
          duration -= gst_util_uint64_scale (diff, GST_SECOND, rate);

        data += diff * frame_size;
        size -= diff * frame_size;
      }

      diff = stop - cstop;
      if (diff > 0) {
        offset_end = cstop;

        if (change_duration)
          duration -= gst_util_uint64_scale (diff, GST_SECOND, rate);

        size -= diff * frame_size;
      }
    } else {
      gst_buffer_unref (buffer);
      return NULL;
    }
  }

  /* Get a metadata writable buffer and apply all changes */
  ret = gst_buffer_make_metadata_writable (buffer);

  GST_BUFFER_TIMESTAMP (ret) = timestamp;
  GST_BUFFER_SIZE (ret) = size;
  GST_BUFFER_DATA (ret) = data;

  if (change_duration)
    GST_BUFFER_DURATION (ret) = duration;
  if (change_offset)
    GST_BUFFER_OFFSET (ret) = offset;
  if (change_offset_end)
    GST_BUFFER_OFFSET_END (ret) = offset_end;

  return ret;
}
Ejemplo n.º 20
0
static GstFlowReturn
gst_dvdlpcmdec_chain_raw (GstPad * pad, GstBuffer * buf)
{
  GstDvdLpcmDec *dvdlpcmdec;
  guint8 *data;
  guint size;
  GstFlowReturn ret;
  guint samples = 0;

  dvdlpcmdec = GST_DVDLPCMDEC (gst_pad_get_parent (pad));

  size = GST_BUFFER_SIZE (buf);
  data = GST_BUFFER_DATA (buf);

  GST_LOG_OBJECT (dvdlpcmdec,
      "got buffer %p of size %d with ts %" GST_TIME_FORMAT,
      buf, size, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)));

  if (dvdlpcmdec->rate == 0)
    goto not_negotiated;

  if (GST_BUFFER_TIMESTAMP_IS_VALID (buf))
    dvdlpcmdec->timestamp = GST_BUFFER_TIMESTAMP (buf);

  /* We don't currently do anything at all regarding emphasis, mute or
   * dynamic_range - I'm not sure what they're for */
  switch (dvdlpcmdec->width) {
    case 16:
    {
      /* We can just pass 16-bits straight through intact, once we set
       * appropriate things on the buffer */
      samples = size / dvdlpcmdec->channels / 2;
      if (samples < 1)
        goto drop;
      buf = gst_buffer_make_metadata_writable (buf);
      break;
    }
    case 20:
    {
      /* Allocate a new buffer and copy 20-bit width to 24-bit */
      gint64 samples = size * 8 / 20;
      gint64 count = size / 10;
      gint64 i;
      guint8 *src;
      guint8 *dest;
      GstBuffer *outbuf;
      GstCaps *bufcaps = GST_PAD_CAPS (dvdlpcmdec->srcpad);

      if (samples < 1)
        goto drop;

      ret = gst_pad_alloc_buffer_and_set_caps (dvdlpcmdec->srcpad, 0,
          samples * 3, bufcaps, &outbuf);

      if (ret != GST_FLOW_OK)
        goto buffer_alloc_failed;

      gst_buffer_copy_metadata (outbuf, buf, GST_BUFFER_COPY_TIMESTAMPS);

      /* adjust samples so we can calc the new timestamp */
      samples = samples / dvdlpcmdec->channels;

      src = data;
      dest = GST_BUFFER_DATA (outbuf);

      /* Copy 20-bit LPCM format to 24-bit buffers, with 0x00 in the lowest
       * nibble. Note that the first 2 bytes are already correct */
      for (i = 0; i < count; i++) {
        dest[0] = src[0];
        dest[1] = src[1];
        dest[2] = src[8] & 0xf0;
        dest[3] = src[2];
        dest[4] = src[3];
        dest[5] = (src[8] & 0x0f) << 4;
        dest[6] = src[4];
        dest[7] = src[5];
        dest[8] = src[9] & 0x0f;
        dest[9] = src[6];
        dest[10] = src[7];
        dest[11] = (src[9] & 0x0f) << 4;

        src += 10;
        dest += 12;
      }

      gst_buffer_unref (buf);
      buf = outbuf;
      break;
    }
    case 24:
    {
      /* Rearrange 24-bit LPCM format in-place. Note that the first 2
       * and last byte are already correct */
      guint count = size / 12;
      gint i;
      guint8 *src;

      samples = size / dvdlpcmdec->channels / 3;

      if (samples < 1)
        goto drop;

      /* Ensure our output buffer is writable */
      buf = gst_buffer_make_writable (buf);

      src = GST_BUFFER_DATA (buf);
      for (i = 0; i < count; i++) {
        guint8 tmp;

        tmp = src[10];
        src[10] = src[7];
        src[7] = src[5];
        src[5] = src[9];
        src[9] = src[6];
        src[6] = src[4];
        src[4] = src[3];
        src[3] = src[2];
        src[2] = src[8];
        src[8] = tmp;

        src += 12;
      }
      break;
    }
    default:
      goto invalid_width;
  }

  /* Set appropriate caps on it to pass downstream */
  gst_buffer_set_caps (buf, GST_PAD_CAPS (dvdlpcmdec->srcpad));
  update_timestamps (dvdlpcmdec, buf, samples);

  ret = gst_pad_push (dvdlpcmdec->srcpad, buf);

done:
  gst_object_unref (dvdlpcmdec);

  return ret;

  /* ERRORS */
drop:
  {
    GST_DEBUG_OBJECT (dvdlpcmdec, "Buffer of size %u is too small. Dropping",
        GST_BUFFER_SIZE (buf));
    gst_buffer_unref (buf);
    ret = GST_FLOW_OK;
    goto done;
  }
not_negotiated:
  {
    GST_ELEMENT_ERROR (dvdlpcmdec, STREAM, FORMAT, (NULL),
        ("Buffer pushed before negotiation"));
    gst_buffer_unref (buf);
    ret = GST_FLOW_NOT_NEGOTIATED;
    goto done;
  }
buffer_alloc_failed:
  {
    GST_ELEMENT_ERROR (dvdlpcmdec, RESOURCE, FAILED, (NULL),
        ("Buffer allocation failed"));
    gst_buffer_unref (buf);
    goto done;
  }
invalid_width:
  {
    GST_ELEMENT_ERROR (dvdlpcmdec, STREAM, WRONG_TYPE, (NULL),
        ("Invalid sample width configured"));
    gst_buffer_unref (buf);
    ret = GST_FLOW_NOT_NEGOTIATED;
    goto done;
  }
}
Ejemplo n.º 21
0
static GstFlowReturn
gst_base_video_decoder_chain (GstPad * pad, GstBuffer * buf)
{
  GstBaseVideoDecoder *base_video_decoder;
  GstBaseVideoDecoderClass *base_video_decoder_class;
  GstFlowReturn ret;

  GST_DEBUG ("chain %" GST_TIME_FORMAT " duration %" GST_TIME_FORMAT,
      GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
      GST_TIME_ARGS (GST_BUFFER_DURATION (buf)));

#if 0
  /* requiring the pad to be negotiated makes it impossible to use
   * oggdemux or filesrc ! decoder */
  if (!gst_pad_is_negotiated (pad)) {
    GST_DEBUG ("not negotiated");
    return GST_FLOW_NOT_NEGOTIATED;
  }
#endif

  base_video_decoder = GST_BASE_VIDEO_DECODER (gst_pad_get_parent (pad));
  base_video_decoder_class =
      GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder);

  GST_DEBUG_OBJECT (base_video_decoder, "chain");

  if (!base_video_decoder->have_segment) {
    GstEvent *event;
    GstFlowReturn ret;

    GST_WARNING
        ("Received buffer without a new-segment. Assuming timestamps start from 0.");

    gst_segment_set_newsegment_full (&base_video_decoder->segment,
        FALSE, 1.0, 1.0, GST_FORMAT_TIME, 0, GST_CLOCK_TIME_NONE, 0);
    base_video_decoder->have_segment = TRUE;

    event = gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME, 0,
        GST_CLOCK_TIME_NONE, 0);

    ret =
        gst_pad_push_event (GST_BASE_VIDEO_DECODER_SRC_PAD (base_video_decoder),
        event);
    if (!ret) {
      GST_ERROR ("new segment event ret=%d", ret);
      return GST_FLOW_ERROR;
    }
  }

  if (G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DISCONT))) {
    GST_DEBUG_OBJECT (base_video_decoder, "received DISCONT buffer");
    gst_base_video_decoder_flush (base_video_decoder);
  }

  if (base_video_decoder->current_frame == NULL) {
    base_video_decoder->current_frame =
        gst_base_video_decoder_new_frame (base_video_decoder);
  }

  base_video_decoder->input_offset += GST_BUFFER_SIZE (buf);
  if (GST_BUFFER_TIMESTAMP_IS_VALID (buf)) {
    gst_base_video_decoder_add_timestamp (base_video_decoder, buf);
  }


  if (base_video_decoder->packetized) {
    base_video_decoder->current_frame->sink_buffer = buf;

    ret = gst_base_video_decoder_have_frame (base_video_decoder, TRUE, NULL);
  } else {

    gst_adapter_push (base_video_decoder->input_adapter, buf);

    ret = gst_base_video_decoder_drain (base_video_decoder, FALSE);
  }

  gst_object_unref (base_video_decoder);
  return ret;
}
/***********************************************************************************
 * chain
 ***********************************************************************************/
static GstFlowReturn videodecoder_chain(GstPad *pad, GstBuffer *buf)
{
    VideoDecoder  *decoder = VIDEODECODER(GST_PAD_PARENT(pad));
    BaseDecoder   *base = BASEDECODER(decoder);
    GstFlowReturn  result = GST_FLOW_OK;
    int            num_dec = NO_DATA_USED;

    if (base->is_flushing)  // Reject buffers in flushing state.
    {
        result = GST_FLOW_WRONG_STATE;
        goto _exit;
    }

    if (!base->is_initialized && !videodecoder_configure(decoder, GST_PAD_CAPS(pad)))
    {
        result = GST_FLOW_ERROR;
        goto _exit;
    }

    if (!base->is_hls)
    {
        if (av_new_packet(&decoder->packet, GST_BUFFER_SIZE(buf)) == 0)
        {
            memcpy(decoder->packet.data, GST_BUFFER_DATA(buf), GST_BUFFER_SIZE(buf));
            if (GST_BUFFER_TIMESTAMP_IS_VALID(buf))
                base->context->reordered_opaque = GST_BUFFER_TIMESTAMP(buf);
            else
                base->context->reordered_opaque = AV_NOPTS_VALUE;
            num_dec = avcodec_decode_video2(base->context, base->frame, &decoder->frame_finished, &decoder->packet);
            av_free_packet(&decoder->packet);
        }
        else
        {
            result = GST_FLOW_ERROR;
            goto _exit;
        }
    }
    else
    {
        av_init_packet(&decoder->packet);
        decoder->packet.data = GST_BUFFER_DATA(buf);
        decoder->packet.size = GST_BUFFER_SIZE(buf);
        if (GST_BUFFER_TIMESTAMP_IS_VALID(buf))
            base->context->reordered_opaque = GST_BUFFER_TIMESTAMP(buf);
        else
            base->context->reordered_opaque = AV_NOPTS_VALUE;

        num_dec = avcodec_decode_video2(base->context, base->frame, &decoder->frame_finished, &decoder->packet);
    }

    if (num_dec < 0)
    {
        //        basedecoder_flush(base);
#ifdef DEBUG_OUTPUT
        g_print ("videodecoder_chain error: %s\n", avelement_error_to_string(AVELEMENT(decoder), num_dec));
#endif
        goto _exit;
    }

    if (decoder->frame_finished > 0)
    {
        if (!videodecoder_configure_sourcepad(decoder))
            result = GST_FLOW_ERROR;
        else
        {
            GstBuffer *outbuf = NULL;
            result = gst_pad_alloc_buffer_and_set_caps(base->srcpad, base->context->frame_number,
                                                       decoder->frame_size, GST_PAD_CAPS(base->srcpad), &outbuf);
            if (result != GST_FLOW_OK)
            {
                if (result != GST_FLOW_WRONG_STATE)
                {
                    gst_element_message_full(GST_ELEMENT(decoder), GST_MESSAGE_ERROR,
                                             GST_STREAM_ERROR, GST_STREAM_ERROR_DECODE,
                                             ("Decoded video buffer allocation failed"), NULL,
                                             ("videodecoder.c"), ("videodecoder_chain"), 0);
                }
            }
            else
            {
                if (base->frame->reordered_opaque != AV_NOPTS_VALUE)
                {
                    GST_BUFFER_TIMESTAMP(outbuf) = base->frame->reordered_opaque;
                    GST_BUFFER_DURATION(outbuf) = GST_BUFFER_DURATION(buf); // Duration for video usually same
                }
                GST_BUFFER_SIZE(outbuf) = decoder->frame_size;

                // Copy image by parts from different arrays.
                memcpy(GST_BUFFER_DATA(outbuf),                     base->frame->data[0], decoder->u_offset);
                memcpy(GST_BUFFER_DATA(outbuf) + decoder->u_offset, base->frame->data[1], decoder->uv_blocksize);
                memcpy(GST_BUFFER_DATA(outbuf) + decoder->v_offset, base->frame->data[2], decoder->uv_blocksize);

                GST_BUFFER_OFFSET_END(outbuf) = GST_BUFFER_OFFSET_NONE;

                if (decoder->discont || GST_BUFFER_IS_DISCONT(buf))
                {
#ifdef DEBUG_OUTPUT
                    g_print("Video discont: frame size=%dx%d\n", base->context->width, base->context->height);
#endif
                    GST_BUFFER_FLAG_SET(outbuf, GST_BUFFER_FLAG_DISCONT);
                    decoder->discont = FALSE;
                }


#ifdef VERBOSE_DEBUG
                g_print("videodecoder: pushing buffer ts=%.4f sec", (double)GST_BUFFER_TIMESTAMP(outbuf)/GST_SECOND);
#endif
                result = gst_pad_push(base->srcpad, outbuf);
#ifdef VERBOSE_DEBUG
                g_print(" done, res=%s\n", gst_flow_get_name(result));
#endif
            }
        }
    }

_exit:
// INLINE - gst_buffer_unref()
    gst_buffer_unref(buf);
    return result;
}
Ejemplo n.º 23
0
static GstFlowReturn
dvdspu_handle_vid_buffer (GstDVDSpu * dvdspu, GstBuffer * buf)
{
  GstClockTime new_ts;
  GstFlowReturn ret;
  gboolean using_ref = FALSE;

  DVD_SPU_LOCK (dvdspu);

  if (buf == NULL) {
    GstClockTime next_ts = dvdspu->video_seg.position;

    next_ts += gst_util_uint64_scale_int (GST_SECOND,
        dvdspu->spu_state.info.fps_d, dvdspu->spu_state.info.fps_n);

    /* NULL buffer was passed - use the reference frame and update the timestamp,
     * or else there's nothing to draw, and just return GST_FLOW_OK */
    if (dvdspu->ref_frame == NULL) {
      dvdspu->video_seg.position = next_ts;
      goto no_ref_frame;
    }

    buf = gst_buffer_copy (dvdspu->ref_frame);

#if 0
    g_print ("Duping frame %" GST_TIME_FORMAT " with new TS %" GST_TIME_FORMAT
        "\n", GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
        GST_TIME_ARGS (next_ts));
#endif

    GST_BUFFER_TIMESTAMP (buf) = next_ts;
    using_ref = TRUE;
  }

  if (GST_BUFFER_TIMESTAMP_IS_VALID (buf)) {
    dvdspu->video_seg.position = GST_BUFFER_TIMESTAMP (buf);
  }

  new_ts = gst_segment_to_running_time (&dvdspu->video_seg, GST_FORMAT_TIME,
      dvdspu->video_seg.position);

#if 0
  g_print ("TS %" GST_TIME_FORMAT " running: %" GST_TIME_FORMAT "\n",
      GST_TIME_ARGS (dvdspu->video_seg.position), GST_TIME_ARGS (new_ts));
#endif

  gst_dvd_spu_advance_spu (dvdspu, new_ts);

  /* If we have an active SPU command set, we store a copy of the frame in case
   * we hit a still and need to draw on it. Otherwise, a reference is
   * sufficient in case we later encounter a still */
  if ((dvdspu->spu_state.flags & SPU_STATE_FORCED_DSP) ||
      ((dvdspu->spu_state.flags & SPU_STATE_FORCED_ONLY) == 0 &&
          (dvdspu->spu_state.flags & SPU_STATE_DISPLAY))) {
    if (using_ref == FALSE) {
      GstBuffer *copy;

      /* Take a copy in case we hit a still frame and need the pristine 
       * frame around */
      copy = gst_buffer_copy (buf);
      gst_buffer_replace (&dvdspu->ref_frame, copy);
      gst_buffer_unref (copy);
    }

    /* Render the SPU overlay onto the buffer */
    buf = gst_buffer_make_writable (buf);

    gstspu_render (dvdspu, buf);
  } else {
    if (using_ref == FALSE) {
      /* Not going to draw anything on this frame, just store a reference
       * in case we hit a still frame and need it */
      gst_buffer_replace (&dvdspu->ref_frame, buf);
    }
  }

  if (dvdspu->spu_state.flags & SPU_STATE_STILL_FRAME) {
    GST_DEBUG_OBJECT (dvdspu, "Outputting buffer with TS %" GST_TIME_FORMAT
        "from chain while in still",
        GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)));
  }

  DVD_SPU_UNLOCK (dvdspu);

  /* just push out the incoming buffer without touching it */
  ret = gst_pad_push (dvdspu->srcpad, buf);

  return ret;

no_ref_frame:

  DVD_SPU_UNLOCK (dvdspu);

  return GST_FLOW_OK;
}
Ejemplo n.º 24
0
static GstFlowReturn
gst_voamrwbenc_chain (GstPad * pad, GstBuffer * buffer)
{
  GstVoAmrWbEnc *amrwbenc;
  GstFlowReturn ret = GST_FLOW_OK;
  const int buffer_size = sizeof (short) * L_FRAME16k;

  amrwbenc = GST_VOAMRWBENC (gst_pad_get_parent (pad));

  g_return_val_if_fail (amrwbenc->handle, GST_FLOW_WRONG_STATE);

  if (amrwbenc->rate == 0 || amrwbenc->channels == 0) {
    ret = GST_FLOW_NOT_NEGOTIATED;
    goto done;
  }

  /* discontinuity clears adapter, FIXME, maybe we can set some
   * encoder flag to mask the discont. */
  if (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DISCONT)) {
    gst_adapter_clear (amrwbenc->adapter);
    amrwbenc->ts = 0;
    amrwbenc->discont = TRUE;
  }

  if (GST_BUFFER_TIMESTAMP_IS_VALID (buffer))
    amrwbenc->ts = GST_BUFFER_TIMESTAMP (buffer);

  ret = GST_FLOW_OK;
  gst_adapter_push (amrwbenc->adapter, buffer);

  /* Collect samples until we have enough for an output frame */
  while (gst_adapter_available (amrwbenc->adapter) >= buffer_size) {
    GstBuffer *out;
    guint8 *data;
    gint outsize;

    out = gst_buffer_new_and_alloc (buffer_size);
    GST_BUFFER_DURATION (out) = GST_SECOND * L_FRAME16k /
        (amrwbenc->rate * amrwbenc->channels);
    GST_BUFFER_TIMESTAMP (out) = amrwbenc->ts;
    if (amrwbenc->ts != -1) {
      amrwbenc->ts += GST_BUFFER_DURATION (out);
    }
    if (amrwbenc->discont) {
      GST_BUFFER_FLAG_SET (out, GST_BUFFER_FLAG_DISCONT);
      amrwbenc->discont = FALSE;
    }
    gst_buffer_set_caps (out, gst_pad_get_caps (amrwbenc->srcpad));

    data = (guint8 *) gst_adapter_peek (amrwbenc->adapter, buffer_size);

    /* encode */
    outsize =
        E_IF_encode (amrwbenc->handle, amrwbenc->bandmode, (const short *) data,
        (unsigned char *) GST_BUFFER_DATA (out), 0);

    gst_adapter_flush (amrwbenc->adapter, buffer_size);
    GST_BUFFER_SIZE (out) = outsize;

    /* play */
    if ((ret = gst_pad_push (amrwbenc->srcpad, out)) != GST_FLOW_OK)
      break;
  }

done:

  gst_object_unref (amrwbenc);
  return ret;

}
static GstFlowReturn
gst_decklink_audio_sink_render (GstBaseSink * bsink, GstBuffer * buffer)
{
  GstDecklinkAudioSink *self = GST_DECKLINK_AUDIO_SINK_CAST (bsink);
  GstDecklinkVideoSink *video_sink;
  GstFlowReturn flow_ret;
  HRESULT ret;
  GstClockTime timestamp, duration;
  GstClockTime running_time, running_time_duration;
  GstClockTime schedule_time, schedule_time_duration;
  GstClockTime latency, render_delay;
  GstClockTimeDiff ts_offset;
  GstMapInfo map_info;
  const guint8 *data;
  gsize len, written_all;
  gboolean discont;

  GST_DEBUG_OBJECT (self, "Rendering buffer %p", buffer);

  // FIXME: Handle no timestamps
  if (!GST_BUFFER_TIMESTAMP_IS_VALID (buffer)) {
    return GST_FLOW_ERROR;
  }

  if (GST_BASE_SINK_CAST (self)->flushing) {
    return GST_FLOW_FLUSHING;
  }
  // If we're called before output is actually started, start pre-rolling
  if (!self->output->started) {
    self->output->output->BeginAudioPreroll ();
  }

  video_sink =
      GST_DECKLINK_VIDEO_SINK (gst_object_ref (self->output->videosink));

  timestamp = GST_BUFFER_TIMESTAMP (buffer);
  duration = GST_BUFFER_DURATION (buffer);
  discont = gst_audio_stream_align_process (self->stream_align,
      GST_BUFFER_IS_DISCONT (buffer), timestamp,
      gst_buffer_get_size (buffer) / self->info.bpf, &timestamp, &duration,
      NULL);

  if (discont && self->resampler)
    gst_audio_resampler_reset (self->resampler);

  if (GST_BASE_SINK_CAST (self)->segment.rate < 0.0) {
    GstMapInfo out_map;
    gint out_frames = gst_buffer_get_size (buffer) / self->info.bpf;

    buffer = gst_buffer_make_writable (gst_buffer_ref (buffer));

    gst_buffer_map (buffer, &out_map, GST_MAP_READWRITE);
    if (self->info.finfo->format == GST_AUDIO_FORMAT_S16) {
      gint16 *swap_data = (gint16 *) out_map.data;
      gint16 *swap_data_end =
          swap_data + (out_frames - 1) * self->info.channels;
      gint16 swap_tmp[16];

      while (out_frames > 0) {
        memcpy (&swap_tmp, swap_data, self->info.bpf);
        memcpy (swap_data, swap_data_end, self->info.bpf);
        memcpy (swap_data_end, &swap_tmp, self->info.bpf);

        swap_data += self->info.channels;
        swap_data_end -= self->info.channels;

        out_frames -= 2;
      }
    } else {
      gint32 *swap_data = (gint32 *) out_map.data;
      gint32 *swap_data_end =
          swap_data + (out_frames - 1) * self->info.channels;
      gint32 swap_tmp[16];

      while (out_frames > 0) {
        memcpy (&swap_tmp, swap_data, self->info.bpf);
        memcpy (swap_data, swap_data_end, self->info.bpf);
        memcpy (swap_data_end, &swap_tmp, self->info.bpf);

        swap_data += self->info.channels;
        swap_data_end -= self->info.channels;

        out_frames -= 2;
      }
    }
    gst_buffer_unmap (buffer, &out_map);
  } else {
    gst_buffer_ref (buffer);
  }

  if (self->resampler) {
    gint in_frames = gst_buffer_get_size (buffer) / self->info.bpf;
    gint out_frames =
        gst_audio_resampler_get_out_frames (self->resampler, in_frames);
    GstBuffer *out_buf = gst_buffer_new_and_alloc (out_frames * self->info.bpf);
    GstMapInfo out_map;

    gst_buffer_map (buffer, &map_info, GST_MAP_READ);
    gst_buffer_map (out_buf, &out_map, GST_MAP_READWRITE);

    gst_audio_resampler_resample (self->resampler, (gpointer *) & map_info.data,
        in_frames, (gpointer *) & out_map.data, out_frames);

    gst_buffer_unmap (out_buf, &out_map);
    gst_buffer_unmap (buffer, &map_info);
    buffer = out_buf;
  }

  gst_buffer_map (buffer, &map_info, GST_MAP_READ);
  data = map_info.data;
  len = map_info.size / self->info.bpf;
  written_all = 0;

  do {
    GstClockTime timestamp_now =
        timestamp + gst_util_uint64_scale (written_all, GST_SECOND,
        self->info.rate);
    guint32 buffered_samples;
    GstClockTime buffered_time;
    guint32 written = 0;
    GstClock *clock;
    GstClockTime clock_ahead;

    if (GST_BASE_SINK_CAST (self)->flushing) {
      flow_ret = GST_FLOW_FLUSHING;
      break;
    }

    running_time =
        gst_segment_to_running_time (&GST_BASE_SINK_CAST (self)->segment,
        GST_FORMAT_TIME, timestamp_now);
    running_time_duration =
        gst_segment_to_running_time (&GST_BASE_SINK_CAST (self)->segment,
        GST_FORMAT_TIME, timestamp_now + duration) - running_time;

    /* See gst_base_sink_adjust_time() */
    latency = gst_base_sink_get_latency (bsink);
    render_delay = gst_base_sink_get_render_delay (bsink);
    ts_offset = gst_base_sink_get_ts_offset (bsink);
    running_time += latency;

    if (ts_offset < 0) {
      ts_offset = -ts_offset;
      if ((GstClockTime) ts_offset < running_time)
        running_time -= ts_offset;
      else
        running_time = 0;
    } else {
      running_time += ts_offset;
    }

    if (running_time > render_delay)
      running_time -= render_delay;
    else
      running_time = 0;

    clock = gst_element_get_clock (GST_ELEMENT_CAST (self));
    clock_ahead = 0;
    if (clock) {
      GstClockTime clock_now = gst_clock_get_time (clock);
      GstClockTime base_time =
          gst_element_get_base_time (GST_ELEMENT_CAST (self));
      gst_object_unref (clock);
      clock = NULL;

      if (clock_now != GST_CLOCK_TIME_NONE && base_time != GST_CLOCK_TIME_NONE) {
        GST_DEBUG_OBJECT (self,
            "Clock time %" GST_TIME_FORMAT ", base time %" GST_TIME_FORMAT
            ", target running time %" GST_TIME_FORMAT,
            GST_TIME_ARGS (clock_now), GST_TIME_ARGS (base_time),
            GST_TIME_ARGS (running_time));
        if (clock_now > base_time)
          clock_now -= base_time;
        else
          clock_now = 0;

        if (clock_now < running_time)
          clock_ahead = running_time - clock_now;
      }
    }

    GST_DEBUG_OBJECT (self,
        "Ahead %" GST_TIME_FORMAT " of the clock running time",
        GST_TIME_ARGS (clock_ahead));

    if (self->output->
        output->GetBufferedAudioSampleFrameCount (&buffered_samples) != S_OK)
      buffered_samples = 0;

    buffered_time =
        gst_util_uint64_scale (buffered_samples, GST_SECOND, self->info.rate);
    buffered_time /= ABS (GST_BASE_SINK_CAST (self)->segment.rate);
    GST_DEBUG_OBJECT (self,
        "Buffered %" GST_TIME_FORMAT " in the driver (%u samples)",
        GST_TIME_ARGS (buffered_time), buffered_samples);
    // We start waiting once we have more than buffer-time buffered
    if (buffered_time > self->buffer_time || clock_ahead > self->buffer_time) {
      GstClockReturn clock_ret;
      GstClockTime wait_time = running_time;

      GST_DEBUG_OBJECT (self,
          "Buffered enough, wait for preroll or the clock or flushing");

      if (wait_time < self->buffer_time)
        wait_time = 0;
      else
        wait_time -= self->buffer_time;

      flow_ret =
          gst_base_sink_do_preroll (GST_BASE_SINK_CAST (self),
          GST_MINI_OBJECT_CAST (buffer));
      if (flow_ret != GST_FLOW_OK)
        break;

      clock_ret =
          gst_base_sink_wait_clock (GST_BASE_SINK_CAST (self), wait_time, NULL);
      if (GST_BASE_SINK_CAST (self)->flushing) {
        flow_ret = GST_FLOW_FLUSHING;
        break;
      }
      // Rerun the whole loop again
      if (clock_ret == GST_CLOCK_UNSCHEDULED)
        continue;
    }

    schedule_time = running_time;
    schedule_time_duration = running_time_duration;

    gst_decklink_video_sink_convert_to_internal_clock (video_sink,
        &schedule_time, &schedule_time_duration);

    GST_LOG_OBJECT (self, "Scheduling audio samples at %" GST_TIME_FORMAT
        " with duration %" GST_TIME_FORMAT, GST_TIME_ARGS (schedule_time),
        GST_TIME_ARGS (schedule_time_duration));

    ret = self->output->output->ScheduleAudioSamples ((void *) data, len,
        schedule_time, GST_SECOND, &written);
    if (ret != S_OK) {
      bool is_running = true;
      self->output->output->IsScheduledPlaybackRunning (&is_running);

      if (is_running && !GST_BASE_SINK_CAST (self)->flushing
          && self->output->started) {
        GST_ELEMENT_ERROR (self, STREAM, FAILED, (NULL),
            ("Failed to schedule frame: 0x%08lx", (unsigned long) ret));
        flow_ret = GST_FLOW_ERROR;
        break;
      } else {
        // Ignore the error and go out of the loop here, we're shutting down
        // or are not started yet and there's nothing we can do at this point
        GST_INFO_OBJECT (self,
            "Ignoring scheduling error 0x%08x because we're not started yet"
            " or not anymore", (guint) ret);
        flow_ret = GST_FLOW_OK;
        break;
      }
    }

    len -= written;
    data += written * self->info.bpf;
    if (self->resampler)
      written_all += written * ABS (GST_BASE_SINK_CAST (self)->segment.rate);
    else
      written_all += written;

    flow_ret = GST_FLOW_OK;
  } while (len > 0);

  gst_buffer_unmap (buffer, &map_info);
  gst_buffer_unref (buffer);

  GST_DEBUG_OBJECT (self, "Returning %s", gst_flow_get_name (flow_ret));

  return flow_ret;
}
Ejemplo n.º 26
0
static GstFlowReturn
gst_multi_file_sink_write_buffer (GstMultiFileSink * multifilesink,
    GstBuffer * buffer)
{
  GstMapInfo map;
  gboolean ret;
  gboolean first_file = TRUE;

  gst_buffer_map (buffer, &map, GST_MAP_READ);

  switch (multifilesink->next_file) {
    case GST_MULTI_FILE_SINK_NEXT_BUFFER:
      if (multifilesink->files != NULL)
        first_file = FALSE;
      if (!gst_multi_file_sink_open_next_file (multifilesink))
        goto stdio_write_error;
      if (first_file == FALSE)
        gst_multi_file_sink_write_stream_headers (multifilesink);
      GST_DEBUG_OBJECT (multifilesink,
          "Writing buffer data (%" G_GSIZE_FORMAT " bytes) to new file",
          map.size);
      ret = fwrite (map.data, map.size, 1, multifilesink->file);
      if (ret != 1)
        goto stdio_write_error;

      gst_multi_file_sink_close_file (multifilesink, buffer);
      break;
    case GST_MULTI_FILE_SINK_NEXT_DISCONT:
      if (GST_BUFFER_IS_DISCONT (buffer)) {
        if (multifilesink->file)
          gst_multi_file_sink_close_file (multifilesink, buffer);
      }

      if (multifilesink->file == NULL) {
        if (!gst_multi_file_sink_open_next_file (multifilesink))
          goto stdio_write_error;
      }

      ret = fwrite (map.data, map.size, 1, multifilesink->file);
      if (ret != 1)
        goto stdio_write_error;

      break;
    case GST_MULTI_FILE_SINK_NEXT_KEY_FRAME:
      if (multifilesink->next_segment == GST_CLOCK_TIME_NONE) {
        if (GST_BUFFER_TIMESTAMP_IS_VALID (buffer)) {
          multifilesink->next_segment = GST_BUFFER_TIMESTAMP (buffer) +
              10 * GST_SECOND;
        }
      }

      if (GST_BUFFER_TIMESTAMP_IS_VALID (buffer) &&
          GST_BUFFER_TIMESTAMP (buffer) >= multifilesink->next_segment &&
          !GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT)) {
        if (multifilesink->file) {
          first_file = FALSE;
          gst_multi_file_sink_close_file (multifilesink, buffer);
        }
        multifilesink->next_segment += 10 * GST_SECOND;
      }

      if (multifilesink->file == NULL) {
        if (!gst_multi_file_sink_open_next_file (multifilesink))
          goto stdio_write_error;

        if (!first_file)
          gst_multi_file_sink_write_stream_headers (multifilesink);
      }

      ret = fwrite (map.data, map.size, 1, multifilesink->file);
      if (ret != 1)
        goto stdio_write_error;

      break;
    case GST_MULTI_FILE_SINK_NEXT_KEY_UNIT_EVENT:
      if (multifilesink->file == NULL) {
        if (!gst_multi_file_sink_open_next_file (multifilesink))
          goto stdio_write_error;

        /* we don't need to write stream headers here, they will be inserted in
         * the stream by upstream elements if key unit events have
         * all_headers=true set
         */
      }

      ret = fwrite (map.data, map.size, 1, multifilesink->file);

      if (ret != 1)
        goto stdio_write_error;

      break;
    case GST_MULTI_FILE_SINK_NEXT_MAX_SIZE:{
      guint64 new_size;

      new_size = multifilesink->cur_file_size + map.size;
      if (new_size > multifilesink->max_file_size) {

        GST_INFO_OBJECT (multifilesink, "current size: %" G_GUINT64_FORMAT
            ", new_size: %" G_GUINT64_FORMAT ", max. size %" G_GUINT64_FORMAT,
            multifilesink->cur_file_size, new_size,
            multifilesink->max_file_size);

        if (multifilesink->file != NULL) {
          first_file = FALSE;
          gst_multi_file_sink_close_file (multifilesink, buffer);
        }
      }

      if (multifilesink->file == NULL) {
        if (!gst_multi_file_sink_open_next_file (multifilesink))
          goto stdio_write_error;

        if (!first_file)
          gst_multi_file_sink_write_stream_headers (multifilesink);
      }

      ret = fwrite (map.data, map.size, 1, multifilesink->file);

      if (ret != 1)
        goto stdio_write_error;

      multifilesink->cur_file_size += map.size;
      break;
    }
    case GST_MULTI_FILE_SINK_NEXT_MAX_DURATION:{
      GstClockTime new_duration = 0;

      if (GST_BUFFER_PTS_IS_VALID (buffer)
          && GST_CLOCK_TIME_IS_VALID (multifilesink->file_pts)) {
        /* The new duration will extend to this new buffer pts ... */
        new_duration = GST_BUFFER_PTS (buffer) - multifilesink->file_pts;
        /* ... and duration (if it has one) */
        if (GST_BUFFER_DURATION_IS_VALID (buffer))
          new_duration += GST_BUFFER_DURATION (buffer);
      }

      if (new_duration > multifilesink->max_file_duration) {

        GST_INFO_OBJECT (multifilesink,
            "new_duration: %" G_GUINT64_FORMAT ", max. duration %"
            G_GUINT64_FORMAT, new_duration, multifilesink->max_file_duration);

        if (multifilesink->file != NULL) {
          first_file = FALSE;
          gst_multi_file_sink_close_file (multifilesink, buffer);
        }
      }

      if (multifilesink->file == NULL) {
        if (!gst_multi_file_sink_open_next_file (multifilesink))
          goto stdio_write_error;

        multifilesink->file_pts = GST_BUFFER_PTS (buffer);
        if (!first_file)
          gst_multi_file_sink_write_stream_headers (multifilesink);
      }

      ret = fwrite (map.data, map.size, 1, multifilesink->file);

      if (ret != 1)
        goto stdio_write_error;

      break;
    }
    default:
      g_assert_not_reached ();
  }

  gst_buffer_unmap (buffer, &map);
  return GST_FLOW_OK;

  /* ERRORS */
stdio_write_error:
  switch (errno) {
    case ENOSPC:
      GST_ELEMENT_ERROR (multifilesink, RESOURCE, NO_SPACE_LEFT,
          ("Error while writing to file."), ("%s", g_strerror (errno)));
      break;
    default:
      GST_ELEMENT_ERROR (multifilesink, RESOURCE, WRITE,
          ("Error while writing to file."), ("%s", g_strerror (errno)));
  }
  gst_buffer_unmap (buffer, &map);
  return GST_FLOW_ERROR;
}
Ejemplo n.º 27
0
static GstFlowReturn
gst_wavpack_enc_chain (GstPad * pad, GstBuffer * buf)
{
  GstWavpackEnc *enc = GST_WAVPACK_ENC (gst_pad_get_parent (pad));
  uint32_t sample_count = GST_BUFFER_SIZE (buf) / 4;
  GstFlowReturn ret;

  /* reset the last returns to GST_FLOW_OK. This is only set to something else
   * while WavpackPackSamples() or more specific gst_wavpack_enc_push_block()
   * so not valid anymore */
  enc->srcpad_last_return = enc->wvcsrcpad_last_return = GST_FLOW_OK;

  GST_DEBUG ("got %u raw samples", sample_count);

  /* check if we already have a valid WavpackContext, otherwise make one */
  if (!enc->wp_context) {
    /* create raw context */
    enc->wp_context =
        WavpackOpenFileOutput (gst_wavpack_enc_push_block, &enc->wv_id,
        (enc->correction_mode > 0) ? &enc->wvc_id : NULL);
    if (!enc->wp_context) {
      GST_ELEMENT_ERROR (enc, LIBRARY, INIT, (NULL),
          ("error creating Wavpack context"));
      gst_object_unref (enc);
      gst_buffer_unref (buf);
      return GST_FLOW_ERROR;
    }

    /* set the WavpackConfig according to our parameters */
    gst_wavpack_enc_set_wp_config (enc);

    /* set the configuration to the context now that we know everything
     * and initialize the encoder */
    if (!WavpackSetConfiguration (enc->wp_context,
            enc->wp_config, (uint32_t) (-1))
        || !WavpackPackInit (enc->wp_context)) {
      GST_ELEMENT_ERROR (enc, LIBRARY, SETTINGS, (NULL),
          ("error setting up wavpack encoding context"));
      WavpackCloseFile (enc->wp_context);
      gst_object_unref (enc);
      gst_buffer_unref (buf);
      return GST_FLOW_ERROR;
    }
    GST_DEBUG ("setup of encoding context successfull");
  }

  /* Save the timestamp of the first buffer. This will be later
   * used as offset for all following buffers */
  if (enc->timestamp_offset == GST_CLOCK_TIME_NONE) {
    if (GST_BUFFER_TIMESTAMP_IS_VALID (buf)) {
      enc->timestamp_offset = GST_BUFFER_TIMESTAMP (buf);
      enc->next_ts = GST_BUFFER_TIMESTAMP (buf);
    } else {
      enc->timestamp_offset = 0;
      enc->next_ts = 0;
    }
  }

  /* Check if we have a continous stream, if not drop some samples or the buffer or
   * insert some silence samples */
  if (enc->next_ts != GST_CLOCK_TIME_NONE &&
      GST_BUFFER_TIMESTAMP (buf) < enc->next_ts) {
    guint64 diff = enc->next_ts - GST_BUFFER_TIMESTAMP (buf);
    guint64 diff_bytes;

    GST_WARNING_OBJECT (enc, "Buffer is older than previous "
        "timestamp + duration (%" GST_TIME_FORMAT "< %" GST_TIME_FORMAT
        "), cannot handle. Clipping buffer.",
        GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
        GST_TIME_ARGS (enc->next_ts));

    diff_bytes =
        GST_CLOCK_TIME_TO_FRAMES (diff, enc->samplerate) * enc->channels * 2;
    if (diff_bytes >= GST_BUFFER_SIZE (buf)) {
      gst_buffer_unref (buf);
      return GST_FLOW_OK;
    }
    buf = gst_buffer_make_metadata_writable (buf);
    GST_BUFFER_DATA (buf) += diff_bytes;
    GST_BUFFER_SIZE (buf) -= diff_bytes;

    GST_BUFFER_TIMESTAMP (buf) += diff;
    if (GST_BUFFER_DURATION_IS_VALID (buf))
      GST_BUFFER_DURATION (buf) -= diff;
  }

  /* Allow a diff of at most 5 ms */
  if (enc->next_ts != GST_CLOCK_TIME_NONE
      && GST_BUFFER_TIMESTAMP_IS_VALID (buf)) {
    if (GST_BUFFER_TIMESTAMP (buf) != enc->next_ts &&
        GST_BUFFER_TIMESTAMP (buf) - enc->next_ts > 5 * GST_MSECOND) {
      GST_WARNING_OBJECT (enc,
          "Discontinuity detected: %" G_GUINT64_FORMAT " > %" G_GUINT64_FORMAT,
          GST_BUFFER_TIMESTAMP (buf) - enc->next_ts, 5 * GST_MSECOND);

      WavpackFlushSamples (enc->wp_context);
      enc->timestamp_offset += (GST_BUFFER_TIMESTAMP (buf) - enc->next_ts);
    }
  }

  if (GST_BUFFER_TIMESTAMP_IS_VALID (buf)
      && GST_BUFFER_DURATION_IS_VALID (buf))
    enc->next_ts = GST_BUFFER_TIMESTAMP (buf) + GST_BUFFER_DURATION (buf);
  else
    enc->next_ts = GST_CLOCK_TIME_NONE;

  if (enc->need_channel_remap) {
    buf = gst_buffer_make_writable (buf);
    gst_wavpack_enc_fix_channel_order (enc, (gint32 *) GST_BUFFER_DATA (buf),
        sample_count);
  }

  /* if we want to append the MD5 sum to the stream update it here
   * with the current raw samples */
  if (enc->md5) {
    g_checksum_update (enc->md5_context, GST_BUFFER_DATA (buf),
        GST_BUFFER_SIZE (buf));
  }

  /* encode and handle return values from encoding */
  if (WavpackPackSamples (enc->wp_context, (int32_t *) GST_BUFFER_DATA (buf),
          sample_count / enc->channels)) {
    GST_DEBUG ("encoding samples successful");
    ret = GST_FLOW_OK;
  } else {
    if ((enc->srcpad_last_return == GST_FLOW_RESEND) ||
        (enc->wvcsrcpad_last_return == GST_FLOW_RESEND)) {
      ret = GST_FLOW_RESEND;
    } else if ((enc->srcpad_last_return == GST_FLOW_OK) ||
        (enc->wvcsrcpad_last_return == GST_FLOW_OK)) {
      ret = GST_FLOW_OK;
    } else if ((enc->srcpad_last_return == GST_FLOW_NOT_LINKED) &&
        (enc->wvcsrcpad_last_return == GST_FLOW_NOT_LINKED)) {
      ret = GST_FLOW_NOT_LINKED;
    } else if ((enc->srcpad_last_return == GST_FLOW_WRONG_STATE) &&
        (enc->wvcsrcpad_last_return == GST_FLOW_WRONG_STATE)) {
      ret = GST_FLOW_WRONG_STATE;
    } else {
      GST_ELEMENT_ERROR (enc, LIBRARY, ENCODE, (NULL),
          ("encoding samples failed"));
      ret = GST_FLOW_ERROR;
    }
  }

  gst_buffer_unref (buf);
  gst_object_unref (enc);
  return ret;
}
Ejemplo n.º 28
0
static GstFlowReturn
gst_rsvg_dec_chain (GstPad * pad, GstBuffer * buffer)
{
  GstRsvgDec *rsvg = GST_RSVG_DEC (GST_PAD_PARENT (pad));
  gboolean completed = FALSE;
  const guint8 *data;
  guint size;
  gboolean ret = GST_FLOW_OK;

  /* first_timestamp is used slightly differently where a framerate
     is given or not.
     If there is a frame rate, it will be used as a base.
     If there is not, it will be used to keep track of the timestamp
     of the first buffer, to be used as the timestamp of the output
     buffer. When a buffer is output, first timestamp will resync to
     the next buffer's timestamp. */
  if (rsvg->first_timestamp == GST_CLOCK_TIME_NONE) {
    if (GST_BUFFER_TIMESTAMP_IS_VALID (buffer))
      rsvg->first_timestamp = GST_BUFFER_TIMESTAMP (buffer);
    else if (rsvg->fps_n != 0)
      rsvg->first_timestamp = 0;
  }

  gst_adapter_push (rsvg->adapter, buffer);

  size = gst_adapter_available (rsvg->adapter);

  /* "<svg></svg>" */
  while (size >= 5 + 6 && ret == GST_FLOW_OK) {
    guint i;

    data = gst_adapter_peek (rsvg->adapter, size);
    for (i = size - 6; i >= 5; i--) {
      if (memcmp (data + i, "</svg>", 6) == 0) {
        completed = TRUE;
        size = i + 6;
        break;
      }
    }

    if (completed) {
      GstBuffer *outbuf = NULL;

      GST_LOG_OBJECT (rsvg, "have complete svg of %u bytes", size);

      data = gst_adapter_peek (rsvg->adapter, size);

      ret = gst_rsvg_decode_image (rsvg, data, size, &outbuf);
      if (ret != GST_FLOW_OK)
        break;


      if (rsvg->first_timestamp != GST_CLOCK_TIME_NONE) {
        GST_BUFFER_TIMESTAMP (outbuf) = rsvg->first_timestamp;
        GST_BUFFER_DURATION (outbuf) = GST_CLOCK_TIME_NONE;
        if (GST_BUFFER_DURATION_IS_VALID (buffer)) {
          GstClockTime end =
              GST_BUFFER_TIMESTAMP_IS_VALID (buffer) ?
              GST_BUFFER_TIMESTAMP (buffer) : rsvg->first_timestamp;
          end += GST_BUFFER_DURATION (buffer);
          GST_BUFFER_DURATION (outbuf) = end - GST_BUFFER_TIMESTAMP (outbuf);
        }
        if (rsvg->fps_n == 0) {
          rsvg->first_timestamp = GST_CLOCK_TIME_NONE;
        } else {
          GST_BUFFER_DURATION (outbuf) =
              gst_util_uint64_scale (rsvg->frame_count, rsvg->fps_d,
              rsvg->fps_n * GST_SECOND);
        }
      } else if (rsvg->fps_n != 0) {
        GST_BUFFER_TIMESTAMP (outbuf) =
            rsvg->first_timestamp + gst_util_uint64_scale (rsvg->frame_count,
            rsvg->fps_d, rsvg->fps_n * GST_SECOND);
        GST_BUFFER_DURATION (outbuf) =
            gst_util_uint64_scale (rsvg->frame_count, rsvg->fps_d,
            rsvg->fps_n * GST_SECOND);
      } else {
        GST_BUFFER_TIMESTAMP (outbuf) = rsvg->first_timestamp;
        GST_BUFFER_DURATION (outbuf) = GST_CLOCK_TIME_NONE;
      }
      rsvg->frame_count++;

      if (rsvg->need_newsegment) {
        gst_pad_push_event (rsvg->srcpad,
            gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME, 0, -1, 0));
        rsvg->need_newsegment = FALSE;
      }

      if (rsvg->pending_events) {
        GList *l;

        for (l = rsvg->pending_events; l; l = l->next)
          gst_pad_push_event (rsvg->srcpad, l->data);
        g_list_free (rsvg->pending_events);
        rsvg->pending_events = NULL;
      }

      GST_LOG_OBJECT (rsvg, "image rendered okay");

      ret = gst_pad_push (rsvg->srcpad, outbuf);
      if (ret != GST_FLOW_OK)
        break;

      gst_adapter_flush (rsvg->adapter, size);
      size = gst_adapter_available (rsvg->adapter);
      continue;
    } else {
      break;
    }
  }

  return GST_FLOW_OK;
}
Ejemplo n.º 29
0
static void
do_perfect_stream_test (guint rate, guint width, gdouble drop_probability,
    gdouble inject_probability)
{
  GstElement *pipe, *src, *conv, *filter, *injector, *audiorate, *sink;
  GstMessage *msg;
  GstCaps *caps;
  GstPad *srcpad;
  GList *l, *bufs = NULL;
  GstClockTime next_time = GST_CLOCK_TIME_NONE;
  guint64 next_offset = GST_BUFFER_OFFSET_NONE;

  caps = gst_caps_new_simple ("audio/x-raw-int", "rate", G_TYPE_INT,
      rate, "width", G_TYPE_INT, width, NULL);

  GST_INFO ("-------- drop=%.0f%% caps = %" GST_PTR_FORMAT " ---------- ",
      drop_probability * 100.0, caps);

  g_assert (drop_probability >= 0.0 && drop_probability <= 1.0);
  g_assert (inject_probability >= 0.0 && inject_probability <= 1.0);
  g_assert (width > 0 && (width % 8) == 0);

  pipe = gst_pipeline_new ("pipeline");
  fail_unless (pipe != NULL);

  src = gst_element_factory_make ("audiotestsrc", "audiotestsrc");
  fail_unless (src != NULL);

  g_object_set (src, "num-buffers", 100, NULL);

  

  conv = gst_element_factory_make ("audioconvert", "audioconvert");
  fail_unless (conv != NULL);

  filter = gst_element_factory_make ("capsfilter", "capsfilter");

   fail_unless (filter != NULL);
   g_object_set (filter, "caps", caps, NULL);

  injector_inject_probability = inject_probability;
  injector = GST_ELEMENT (g_object_new (test_injector_get_type (), NULL));

  srcpad = gst_element_get_pad (injector, "src");
  fail_unless (srcpad != NULL);
   gst_pad_add_buffer_probe (srcpad, G_CALLBACK (probe_cb), &drop_probability);
  gst_object_unref (srcpad);
         audiorate = gst_element_factory_make ("audiorate", "audiorate");
         fail_unless (audiorate != NULL);
   sink = gst_element_factory_make ("fakesink", "fakesink");
  fail_unless (sink != NULL);
   g_object_set (sink, "signal-handoffs", TRUE, NULL);
   g_signal_connect (sink, "handoff", G_CALLBACK (got_buf), &bufs);

  gst_bin_add_many (GST_BIN (pipe), src, conv, filter, injector, audiorate,
      sink, NULL);
  gst_element_link_many (src, conv, filter, injector, audiorate, sink, NULL);

  fail_unless_equals_int (gst_element_set_state (pipe, GST_STATE_PLAYING),
      GST_STATE_CHANGE_ASYNC);

  fail_unless_equals_int (gst_element_get_state (pipe, NULL, NULL, -1),
      GST_STATE_CHANGE_SUCCESS);

  msg = gst_bus_poll (GST_ELEMENT_BUS (pipe),
      GST_MESSAGE_EOS | GST_MESSAGE_ERROR, -1);
  fail_unless_equals_string (GST_MESSAGE_TYPE_NAME (msg), "eos");

  for (l = bufs; l != NULL; l = l->next) {
    GstBuffer *buf = GST_BUFFER (l->data);
    guint num_samples;

    fail_unless (GST_BUFFER_TIMESTAMP_IS_VALID (buf));
    fail_unless (GST_BUFFER_DURATION_IS_VALID (buf));
    fail_unless (GST_BUFFER_OFFSET_IS_VALID (buf));
    fail_unless (GST_BUFFER_OFFSET_END_IS_VALID (buf));

    GST_LOG ("buffer: ts=%" GST_TIME_FORMAT ", end_ts=%" GST_TIME_FORMAT
        " off=%" G_GINT64_FORMAT ", end_off=%" G_GINT64_FORMAT,
        GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
        GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf) + GST_BUFFER_DURATION (buf)),
        GST_BUFFER_OFFSET (buf), GST_BUFFER_OFFSET_END (buf));

    if (GST_CLOCK_TIME_IS_VALID (next_time)) {
      fail_unless_equals_uint64 (next_time, GST_BUFFER_TIMESTAMP (buf));
    }
    if (next_offset != GST_BUFFER_OFFSET_NONE) {
      fail_unless_equals_uint64 (next_offset, GST_BUFFER_OFFSET (buf));
    }

    /* check buffer size for sanity */
    fail_unless_equals_int (GST_BUFFER_SIZE (buf) % (width / 8), 0);

    /* check there is actually as much data as there should be */
    num_samples = GST_BUFFER_OFFSET_END (buf) - GST_BUFFER_OFFSET (buf);
    fail_unless_equals_int (GST_BUFFER_SIZE (buf), num_samples * (width / 8));

    next_time = GST_BUFFER_TIMESTAMP (buf) + GST_BUFFER_DURATION (buf);
    next_offset = GST_BUFFER_OFFSET_END (buf);
  }

  gst_message_unref (msg);
  gst_element_set_state (pipe, GST_STATE_NULL);
  gst_object_unref (pipe);

  g_list_foreach (bufs, (GFunc) gst_mini_object_unref, NULL);
  g_list_free (bufs);

  gst_caps_unref (caps);
}
Ejemplo n.º 30
0
static GstFlowReturn
gst_audio_segment_clip_clip_buffer (GstSegmentClip * base, GstBuffer * buffer,
    GstBuffer ** outbuf)
{
  GstAudioSegmentClip *self = GST_AUDIO_SEGMENT_CLIP (base);
  GstSegment *segment = &base->segment;
  GstClockTime timestamp = GST_BUFFER_TIMESTAMP (buffer);
  GstClockTime duration = GST_BUFFER_DURATION (buffer);
  guint64 offset = GST_BUFFER_OFFSET (buffer);
  guint64 offset_end = GST_BUFFER_OFFSET_END (buffer);
  guint size = gst_buffer_get_size (buffer);

  if (!self->rate || !self->framesize) {
    GST_ERROR_OBJECT (self, "Not negotiated yet");
    gst_buffer_unref (buffer);
    return GST_FLOW_NOT_NEGOTIATED;
  }

  if (segment->format != GST_FORMAT_DEFAULT &&
      segment->format != GST_FORMAT_TIME) {
    GST_DEBUG_OBJECT (self, "Unsupported segment format %s",
        gst_format_get_name (segment->format));
    *outbuf = buffer;
    return GST_FLOW_OK;
  }

  if (!GST_BUFFER_TIMESTAMP_IS_VALID (buffer)) {
    GST_WARNING_OBJECT (self, "Buffer without valid timestamp");
    *outbuf = buffer;
    return GST_FLOW_OK;
  }

  *outbuf =
      gst_audio_buffer_clip (buffer, segment, self->rate, self->framesize);

  if (!*outbuf) {
    GST_DEBUG_OBJECT (self, "Buffer outside the configured segment");

    /* Now return unexpected if we're before/after the end */
    if (segment->format == GST_FORMAT_TIME) {
      if (segment->rate >= 0) {
        if (segment->stop != -1 && timestamp >= segment->stop)
          return GST_FLOW_EOS;
      } else {
        if (!GST_CLOCK_TIME_IS_VALID (duration))
          duration =
              gst_util_uint64_scale_int (size, GST_SECOND,
              self->framesize * self->rate);

        if (segment->start != -1 && timestamp + duration <= segment->start)
          return GST_FLOW_EOS;
      }
    } else {
      if (segment->rate >= 0) {
        if (segment->stop != -1 && offset != -1 && offset >= segment->stop)
          return GST_FLOW_EOS;
      } else if (offset != -1 || offset_end != -1) {
        if (offset_end == -1)
          offset_end = offset + size / self->framesize;

        if (segment->start != -1 && offset_end <= segment->start)
          return GST_FLOW_EOS;
      }
    }
  }

  return GST_FLOW_OK;
}