示例#1
0
/**
 * gst_ebml_write_flush_cache:
 * @ebml: a #GstEbmlWrite.
 *
 * Flush the cache.
 */
void
gst_ebml_write_flush_cache (GstEbmlWrite * ebml)
{
  if (!ebml->cache)
    return;

  /* this is very important. It may fail, in which case the client
   * programmer didn't use the cache somewhere. That's fatal. */
  g_assert (ebml->handled == GST_BUFFER_SIZE (ebml->cache));
  g_assert (GST_BUFFER_SIZE (ebml->cache) +
      GST_BUFFER_OFFSET (ebml->cache) == ebml->pos);

  if (ebml->last_write_result == GST_FLOW_OK) {
    if (ebml->need_newsegment) {
      GstEvent *ev;

      g_assert (ebml->handled == 0);
      ev = gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_BYTES, 0, -1, 0);
      if (gst_pad_push_event (ebml->srcpad, ev))
        ebml->need_newsegment = FALSE;
    }
    ebml->last_write_result = gst_pad_push (ebml->srcpad, ebml->cache);
  }

  ebml->cache = NULL;
  ebml->cache_size = 0;
  ebml->handled = 0;
}
示例#2
0
static void
gst_wavpack_enc_rewrite_first_block (GstWavpackEnc * enc)
{
  GstEvent *event = gst_event_new_new_segment (TRUE, 1.0, GST_FORMAT_BYTES,
      0, GST_BUFFER_OFFSET_NONE, 0);
  gboolean ret;

  g_return_if_fail (enc);
  g_return_if_fail (enc->first_block);

  /* update the sample count in the first block */
  WavpackUpdateNumSamples (enc->wp_context, enc->first_block);

  /* try to seek to the beginning of the output */
  ret = gst_pad_push_event (enc->srcpad, event);
  if (ret) {
    /* try to rewrite the first block */
    GST_DEBUG_OBJECT (enc, "rewriting first block ...");
    enc->wv_id.passthrough = TRUE;
    ret = gst_wavpack_enc_push_block (&enc->wv_id,
        enc->first_block, enc->first_block_size);
    enc->wv_id.passthrough = FALSE;
  } else {
    GST_WARNING_OBJECT (enc, "rewriting of first block failed. "
        "Seeking to first block failed!");
  }
}
static GstEvent *
gst_tag_lib_mux_adjust_event_offsets (GstTagLibMux * mux,
    const GstEvent * newsegment_event)
{
  GstFormat format;
  gint64 start, stop, cur;

  gst_event_parse_new_segment ((GstEvent *) newsegment_event, NULL, NULL,
      &format, &start, &stop, &cur);

  g_assert (format == GST_FORMAT_BYTES);

  if (start != -1)
    start += mux->tag_size;
  if (stop != -1)
    stop += mux->tag_size;
  if (cur != -1)
    cur += mux->tag_size;

  GST_DEBUG_OBJECT (mux, "adjusting newsegment event offsets to start=%"
      G_GINT64_FORMAT ", stop=%" G_GINT64_FORMAT ", cur=%" G_GINT64_FORMAT
      " (delta = +%" G_GSIZE_FORMAT ")", start, stop, cur, mux->tag_size);

  return gst_event_new_new_segment (TRUE, 1.0, format, start, stop, cur);
}
示例#4
0
static GstFlowReturn
gst_wavenc_push_header (GstWavEnc * wavenc, guint audio_data_size)
{
  GstFlowReturn ret;
  GstBuffer *outbuf;

  /* seek to beginning of file */
  gst_pad_push_event (wavenc->srcpad,
      gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_BYTES,
          0, GST_CLOCK_TIME_NONE, 0));

  GST_DEBUG_OBJECT (wavenc, "writing header with datasize=%u", audio_data_size);

  outbuf = gst_wavenc_create_header_buf (wavenc, audio_data_size);
  GST_BUFFER_OFFSET (outbuf) = 0;

  ret = gst_pad_push (wavenc->srcpad, outbuf);

  if (ret != GST_FLOW_OK) {
    GST_WARNING_OBJECT (wavenc, "push header failed: flow = %s",
        gst_flow_get_name (ret));
  }

  return ret;
}
示例#5
0
static gboolean
gst_cdxa_parse_do_seek (GstCDXAParse * cdxa, GstEvent * event)
{
  GstSeekFlags flags;
  GstSeekType start_type;
  GstFormat format;
  gint64 start, off, upstream_size;

  gst_event_parse_seek (event, NULL, &format, &flags, &start_type, &start,
      NULL, NULL);

  if (format != GST_FORMAT_BYTES) {
    GST_DEBUG_OBJECT (cdxa, "Can only handle seek in BYTES format");
    return FALSE;
  }

  if (start_type != GST_SEEK_TYPE_SET) {
    GST_DEBUG_OBJECT (cdxa, "Can only handle seek from start (SEEK_TYPE_SET)");
    return FALSE;
  }

  GST_OBJECT_LOCK (cdxa);
  off = gst_cdxa_parse_convert_src_to_sink_offset (cdxa, start);
  upstream_size = cdxa->datasize;
  GST_OBJECT_UNLOCK (cdxa);

  if (off >= upstream_size) {
    GST_DEBUG_OBJECT (cdxa, "Invalid target offset %" G_GINT64_FORMAT ", file "
        "is only %" G_GINT64_FORMAT " bytes in size", off, upstream_size);
    return FALSE;
  }

  /* unlock upstream pull_range */
  gst_pad_push_event (cdxa->sinkpad, gst_event_new_flush_start ());

  /* make sure our loop function exits */
  gst_pad_push_event (cdxa->srcpad, gst_event_new_flush_start ());

  /* wait for streaming to finish */
  GST_PAD_STREAM_LOCK (cdxa->sinkpad);

  /* prepare for streaming again */
  gst_pad_push_event (cdxa->sinkpad, gst_event_new_flush_stop ());
  gst_pad_push_event (cdxa->srcpad, gst_event_new_flush_stop ());

  gst_pad_push_event (cdxa->srcpad,
      gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_BYTES,
          start, GST_CLOCK_TIME_NONE, 0));

  GST_OBJECT_LOCK (cdxa);
  cdxa->offset = off;
  GST_OBJECT_UNLOCK (cdxa);

  /* and restart */
  gst_pad_start_task (cdxa->sinkpad,
      (GstTaskFunction) gst_cdxa_parse_loop, cdxa->sinkpad, NULL);

  GST_PAD_STREAM_UNLOCK (cdxa->sinkpad);
  return TRUE;
}
示例#6
0
static gboolean
speed_sink_event (GstPad * pad, GstEvent * event)
{
  GstSpeed *filter;
  gboolean ret = FALSE;

  filter = GST_SPEED (gst_pad_get_parent (pad));

  switch (GST_EVENT_TYPE (event)) {
    case GST_EVENT_NEWSEGMENT:{
      gdouble rate;
      gboolean update = FALSE;
      GstFormat format;
      gint64 start_value, stop_value, base;

      gst_event_parse_new_segment (event, &update, &rate, &format, &start_value,
          &stop_value, &base);
      gst_event_unref (event);

      if (format != GST_FORMAT_TIME) {
        GST_WARNING_OBJECT (filter, "newsegment event not in TIME format!");
        break;
      }

      g_assert (filter->speed > 0);

      if (start_value >= 0)
        start_value /= filter->speed;
      if (stop_value >= 0)
        stop_value /= filter->speed;
      base /= filter->speed;

      /* this would only really be correct if we clipped incoming data */
      filter->timestamp = start_value;

      /* set to NONE so it gets reset later based on the timestamp when we have
       * the samplerate */
      filter->offset = GST_BUFFER_OFFSET_NONE;

      ret =
          gst_pad_event_default (pad, gst_event_new_new_segment (update, rate,
              format, start_value, stop_value, base));
      break;
    }
    default:
      ret = gst_pad_event_default (pad, event);
      break;
  }


  gst_object_unref (filter);

  return ret;


}
示例#7
0
GstFlowReturn
gst_vdp_mpeg_dec_push_video_buffer (GstVdpMpegDec * mpeg_dec,
                                    GstVdpVideoBuffer * buffer)
{
    gint64 byterate;

    if (GST_BUFFER_TIMESTAMP (buffer) == GST_CLOCK_TIME_NONE
            && GST_CLOCK_TIME_IS_VALID (mpeg_dec->next_timestamp)) {
        GST_BUFFER_TIMESTAMP (buffer) = mpeg_dec->next_timestamp;
    } else if (GST_BUFFER_TIMESTAMP (buffer) == GST_CLOCK_TIME_NONE) {
        GST_BUFFER_TIMESTAMP (buffer) = gst_util_uint64_scale (mpeg_dec->frame_nr,
                                        GST_SECOND * mpeg_dec->fps_d, mpeg_dec->fps_n);
    }

    if (mpeg_dec->seeking) {
        GstEvent *event;

        event = gst_event_new_new_segment (FALSE,
                                           mpeg_dec->segment.rate, GST_FORMAT_TIME, GST_BUFFER_TIMESTAMP (buffer),
                                           mpeg_dec->segment.stop, GST_BUFFER_TIMESTAMP (buffer));

        gst_pad_push_event (mpeg_dec->src, event);

        mpeg_dec->seeking = FALSE;
    }

    mpeg_dec->next_timestamp = GST_BUFFER_TIMESTAMP (buffer) +
                               GST_BUFFER_DURATION (buffer);

    gst_segment_set_last_stop (&mpeg_dec->segment, GST_FORMAT_TIME,
                               GST_BUFFER_TIMESTAMP (buffer));

    mpeg_dec->accumulated_duration += GST_BUFFER_DURATION (buffer);
    mpeg_dec->accumulated_size += GST_BUFFER_SIZE (buffer);
    byterate = gst_util_uint64_scale (mpeg_dec->accumulated_size, GST_SECOND,
                                      mpeg_dec->accumulated_duration);
    GST_DEBUG ("byterate: %" G_GINT64_FORMAT, mpeg_dec->byterate);

    mpeg_dec->byterate = (mpeg_dec->byterate + byterate) / 2;

    gst_buffer_set_caps (GST_BUFFER (buffer), GST_PAD_CAPS (mpeg_dec->src));

    GST_DEBUG_OBJECT (mpeg_dec,
                      "Pushing buffer with timestamp: %" GST_TIME_FORMAT
                      " frame_nr: %" G_GINT64_FORMAT,
                      GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer)),
                      GST_BUFFER_OFFSET (buffer));

    return gst_pad_push (mpeg_dec->src, GST_BUFFER (buffer));
}
/**
 * gst_wrapper_camera_bin_src_vidsrc_probe:
 *
 * Buffer probe called before sending each buffer to image queue.
 */
static gboolean
gst_wrapper_camera_bin_src_vidsrc_probe (GstPad * pad, GstBuffer * buffer,
    gpointer data)
{
  GstWrapperCameraBinSrc *self = GST_WRAPPER_CAMERA_BIN_SRC (data);
  GstBaseCameraSrc *camerasrc = GST_BASE_CAMERA_SRC_CAST (self);
  gboolean ret = FALSE;

  GST_LOG_OBJECT (self, "Video probe, mode %d, capture status %d",
      camerasrc->mode, self->video_rec_status);

  /* TODO do we want to lock for every buffer? */
  /*
   * Note that we can use gst_pad_push_event here because we are a buffer
   * probe.
   */
  /* TODO shouldn't access this directly */
  g_mutex_lock (camerasrc->capturing_mutex);
  if (self->video_rec_status == GST_VIDEO_RECORDING_STATUS_DONE) {
    /* NOP */
  } else if (self->video_rec_status == GST_VIDEO_RECORDING_STATUS_STARTING) {
    GstClockTime ts;

    GST_DEBUG_OBJECT (self, "Starting video recording");
    self->video_rec_status = GST_VIDEO_RECORDING_STATUS_RUNNING;

    ts = GST_BUFFER_TIMESTAMP (buffer);
    if (!GST_CLOCK_TIME_IS_VALID (ts))
      ts = 0;
    gst_pad_push_event (self->vidsrc, gst_event_new_new_segment (FALSE, 1.0,
            GST_FORMAT_TIME, ts, -1, 0));

    /* post preview */
    GST_DEBUG_OBJECT (self, "Posting preview for video");
    gst_base_camera_src_post_preview (camerasrc, buffer);

    ret = TRUE;
  } else if (self->video_rec_status == GST_VIDEO_RECORDING_STATUS_FINISHING) {
    /* send eos */
    GST_DEBUG_OBJECT (self, "Finishing video recording, pushing eos");
    gst_pad_push_event (pad, gst_event_new_eos ());
    self->video_rec_status = GST_VIDEO_RECORDING_STATUS_DONE;
    gst_base_camera_src_finish_capture (camerasrc);
  } else {
    ret = TRUE;
  }
  g_mutex_unlock (camerasrc->capturing_mutex);
  return ret;
}
示例#9
0
static gboolean
gst_ebml_writer_send_new_segment_event (GstEbmlWrite * ebml, guint64 new_pos)
{
  gboolean res;

  GST_INFO ("seeking to %" G_GUINT64_FORMAT, new_pos);

  res = gst_pad_push_event (ebml->srcpad,
      gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_BYTES, new_pos, -1, 0));

  if (!res)
    GST_WARNING ("seek to %" G_GUINT64_FORMAT "failed", new_pos);

  return res;
}
static void
pipeline_op_done (GstPad *pad, gboolean blocked, GstPad *new_pad)
{
	GstEvent *segment;
	if (new_pad == NULL)
		return;

	/* send a very unimaginative new segment through the new pad */
	segment = gst_event_new_new_segment (TRUE,
					     1.0,
					     GST_FORMAT_DEFAULT,
					     0,
					     GST_CLOCK_TIME_NONE,
					     0);
	gst_pad_send_event (new_pad, segment);
	gst_object_unref (new_pad);
}
示例#11
0
void test_live_switch()
{
  GstElement *audioresample;
  GstEvent *newseg;
  GstCaps *caps;
  xmlfile = "test_live_switch";
std_log(LOG_FILENAME_LINE, "Test Started test_live_switch");
  audioresample = setup_audioresample (4, 48000, 48000, 16, FALSE);

  /* Let the sinkpad act like something that can only handle things of
   * rate 48000- and can only allocate buffers for that rate, but if someone
   * tries to get a buffer with a rate higher then 48000 tries to renegotiate
   * */
  gst_pad_set_bufferalloc_function (mysinkpad, live_switch_alloc_only_48000);
  gst_pad_set_getcaps_function (mysinkpad, live_switch_get_sink_caps);

  gst_pad_use_fixed_caps (mysrcpad);

  caps = gst_pad_get_negotiated_caps (mysrcpad);
  fail_unless (gst_caps_is_fixed (caps));

  fail_unless (gst_element_set_state (audioresample,
          GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS,
      "could not set to playing");

  newseg = gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME, 0, -1, 0);
  fail_unless (gst_pad_push_event (mysrcpad, newseg) != FALSE);

  /* downstream can provide the requested rate, a buffer alloc will be passed
   * on */
  live_switch_push (48000, caps);

  /* Downstream can never accept this rate, buffer alloc isn't passed on */
  live_switch_push (40000, caps);

  /* Downstream can provide the requested rate but will re-negotiate */
  live_switch_push (50000, caps);

  cleanup_audioresample (audioresample);
  gst_caps_unref (caps);
  
  std_log(LOG_FILENAME_LINE, "Test Successful");
  create_xml(0);
}
示例#12
0
static void
mpegtsdemux_prepare_srcpad (MpegTsMux * mux)
{
  GstEvent *new_seg =
      gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_BYTES, 0, -1, 0);
  GstCaps *caps = gst_caps_new_simple ("video/mpegts",
      "systemstream", G_TYPE_BOOLEAN, TRUE,
      "packetsize", G_TYPE_INT,
      (mux->m2ts_mode ? M2TS_PACKET_LENGTH : NORMAL_TS_PACKET_LENGTH),
      NULL);

  /* Set caps on src pad from our template and push new segment */
  gst_pad_set_caps (mux->srcpad, caps);
  gst_caps_unref (caps);

  if (!gst_pad_push_event (mux->srcpad, new_seg)) {
    GST_WARNING_OBJECT (mux, "New segment event was not handled downstream");
  }
}
示例#13
0
static gboolean
gst_wavpack_parse_send_newsegment (GstWavpackParse * wvparse, gboolean update)
{
  GstSegment *s = &wvparse->segment;

  gboolean ret;

  gint64 stop_time = -1;

  gint64 start_time = 0;

  gint64 cur_pos_time;

  gint64 diff;

  /* segment is in DEFAULT format, but we want to send a TIME newsegment */
  start_time = gst_util_uint64_scale_int (s->start, GST_SECOND,
      wvparse->samplerate);

  if (s->stop != -1) {
    stop_time = gst_util_uint64_scale_int (s->stop, GST_SECOND,
        wvparse->samplerate);
  }

  GST_DEBUG_OBJECT (wvparse, "sending newsegment from %" GST_TIME_FORMAT
      " to %" GST_TIME_FORMAT, GST_TIME_ARGS (start_time),
      GST_TIME_ARGS (stop_time));

  /* after a seek, s->last_stop will point to a chunk boundary, ie. from
   * which sample we will start sending data again, while s->start will
   * point to the sample we actually want to seek to and want to start
   * playing right after the seek. Adjust clock-time for the difference
   * so we start playing from start_time */
  cur_pos_time = gst_util_uint64_scale_int (s->last_stop, GST_SECOND,
      wvparse->samplerate);
  diff = start_time - cur_pos_time;

  ret = gst_pad_push_event (wvparse->srcpad,
      gst_event_new_new_segment (update, s->rate, GST_FORMAT_TIME,
          start_time, stop_time, start_time - diff));

  return ret;
}
示例#14
0
static gboolean
mpegpsdemux_prepare_srcpad (MpegPsMux * mux)
{
  GValue val = { 0, };
  GList *headers, *l;

  /* prepare the source pad for output */

  GstEvent *new_seg =
      gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_BYTES, 0, -1, 0);
  GstCaps *caps = gst_caps_new_simple ("video/mpeg",
      "mpegversion", G_TYPE_INT, 2,
      "systemstream", G_TYPE_BOOLEAN, TRUE,
      NULL);

/*      gst_static_pad_template_get_caps (&mpegpsmux_src_factory); */

  headers = psmux_get_stream_headers (mux->psmux);
  g_value_init (&val, GST_TYPE_ARRAY);
  for (l = headers; l != NULL; l = l->next) {
    GValue buf_val = { 0, };

    g_value_init (&buf_val, GST_TYPE_BUFFER);
    gst_value_take_buffer (&buf_val, GST_BUFFER (l->data));
    l->data = NULL;
    gst_value_array_append_value (&val, &buf_val);
    g_value_unset (&buf_val);
  }
  gst_caps_set_value (caps, "streamheader", &val);
  g_value_unset (&val);
  g_list_free (headers);

  /* Set caps on src pad from our template and push new segment */
  gst_pad_set_caps (mux->srcpad, caps);

  if (!gst_pad_push_event (mux->srcpad, new_seg)) {
    GST_WARNING_OBJECT (mux, "New segment event was not handled");
    return FALSE;
  }

  return TRUE;
}
示例#15
0
static void
gst_musepackdec_send_newsegment (GstMusepackDec * dec)
{
    GstSegment *s = &dec->segment;
    gint64 stop_time = GST_CLOCK_TIME_NONE;
    gint64 start_time = 0;

    start_time = gst_util_uint64_scale_int (s->start, GST_SECOND, dec->rate);

    if (s->stop != -1)
        stop_time = gst_util_uint64_scale_int (s->stop, GST_SECOND, dec->rate);

    GST_DEBUG_OBJECT (dec, "sending newsegment from %" GST_TIME_FORMAT
                      " to %" GST_TIME_FORMAT ", rate = %.1f", GST_TIME_ARGS (start_time),
                      GST_TIME_ARGS (stop_time), s->rate);

    gst_pad_push_event (dec->srcpad,
                        gst_event_new_new_segment (FALSE, s->rate, GST_FORMAT_TIME,
                                start_time, stop_time, start_time));
}
static gboolean mfw_gst_vpuenc_sink_event(GstPad * pad, GstEvent * event)
{
	GstVPU_Enc *vpu_enc = NULL;
	gboolean ret = FALSE;
	vpu_enc = MFW_GST_VPU_ENC(GST_PAD_PARENT(pad));
	GstFormat format;
	gint64 start, stop, position;
	gdouble rate;

	switch (GST_EVENT_TYPE(event)) {
	case GST_EVENT_NEWSEGMENT:
		gst_event_parse_new_segment(event, NULL, &rate, &format,
					    &start, &stop, &position);

		if (format == GST_FORMAT_BYTES) {
			ret = gst_pad_push_event(vpu_enc->srcpad,
						 gst_event_new_new_segment
						 (FALSE, 1.0,
						  GST_FORMAT_TIME, 0,
						  GST_CLOCK_TIME_NONE,
						  0));
		}
		break;
	case GST_EVENT_EOS:
		ret = gst_pad_push_event(vpu_enc->srcpad, event);

		if (TRUE != ret) {
			GST_ERROR("Error in pushing the event, result is %d\n",
			     ret);
			gst_event_unref(event);
		}
		break;
	default:
		ret = gst_pad_event_default(pad, event);
		break;
	}
	return ret;
}
示例#17
0
static GstFlowReturn
gst_sub_parse_chain (GstPad * sinkpad, GstBuffer * buf)
{
  GstFlowReturn ret;
  GstSubParse *self;

  self = GST_SUBPARSE (GST_PAD_PARENT (sinkpad));

  /* Push newsegment if needed */
  if (self->need_segment) {
    GST_LOG_OBJECT (self, "pushing newsegment event with %" GST_SEGMENT_FORMAT,
        &self->segment);

    gst_pad_push_event (self->srcpad, gst_event_new_new_segment (FALSE,
            self->segment.rate, self->segment.format,
            self->segment.last_stop, self->segment.stop, self->segment.time));
    self->need_segment = FALSE;
  }

  ret = handle_buffer (self, buf);

  return ret;
}
示例#18
0
static gboolean
mpegpsdemux_prepare_srcpad (MpegPsMux * mux)
{
  /* prepare the source pad for output */

  GstEvent *new_seg =
      gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_BYTES, 0, -1, 0);
  GstCaps *caps = gst_caps_new_simple ("video/mpeg",
      "mpegversion", G_TYPE_INT, 2,
      "systemstream", G_TYPE_BOOLEAN, TRUE,
      NULL);

/*      gst_static_pad_template_get_caps (&mpegpsmux_src_factory); */

  /* Set caps on src pad from our template and push new segment */
  gst_pad_set_caps (mux->srcpad, caps);

  if (!gst_pad_push_event (mux->srcpad, new_seg)) {
    GST_WARNING_OBJECT (mux, "New segment event was not handled");
    return FALSE;
  }

  return TRUE;
}
示例#19
0
/* chain function
 * this function does the actual processing
 */
static GstFlowReturn
gst_ivf_parse_chain (GstPad * pad, GstBuffer * buf)
{
  GstIvfParse *ivf = GST_IVF_PARSE (GST_OBJECT_PARENT (pad));
  gboolean res;

  /* lazy creation of the adapter */
  if (G_UNLIKELY (ivf->adapter == NULL)) {
    ivf->adapter = gst_adapter_new ();
  }

  GST_LOG_OBJECT (ivf, "Pushing buffer of size %u to adapter",
      GST_BUFFER_SIZE (buf));

  gst_adapter_push (ivf->adapter, buf); /* adapter takes ownership of buf */

  res = GST_FLOW_OK;

  switch (ivf->state) {
    case GST_IVF_PARSE_START:
      if (gst_adapter_available (ivf->adapter) >= 32) {
        GstCaps *caps;

        const guint8 *data = gst_adapter_peek (ivf->adapter, 32);
        guint32 magic = GST_READ_UINT32_LE (data);
        guint16 version = GST_READ_UINT16_LE (data + 4);
        guint16 header_size = GST_READ_UINT16_LE (data + 6);
        guint32 fourcc = GST_READ_UINT32_LE (data + 8);
        guint16 width = GST_READ_UINT16_LE (data + 12);
        guint16 height = GST_READ_UINT16_LE (data + 14);
        guint32 rate_num = GST_READ_UINT32_LE (data + 16);
        guint32 rate_den = GST_READ_UINT32_LE (data + 20);
#ifndef GST_DISABLE_GST_DEBUG
        guint32 num_frames = GST_READ_UINT32_LE (data + 24);
#endif

        /* last 4 bytes unused */
        gst_adapter_flush (ivf->adapter, 32);

        if (magic != GST_MAKE_FOURCC ('D', 'K', 'I', 'F') ||
            version != 0 || header_size != 32 ||
            fourcc != GST_MAKE_FOURCC ('V', 'P', '8', '0')) {
          GST_ELEMENT_ERROR (ivf, STREAM, WRONG_TYPE, (NULL), (NULL));
          return GST_FLOW_ERROR;
        }

        /* create src pad caps */
        caps = gst_caps_new_simple ("video/x-vp8",
            "width", G_TYPE_INT, width, "height", G_TYPE_INT, height,
            "framerate", GST_TYPE_FRACTION, rate_num, rate_den, NULL);

        GST_INFO_OBJECT (ivf, "Found stream: %" GST_PTR_FORMAT, caps);

        GST_LOG_OBJECT (ivf, "Stream has %d frames", num_frames);

        gst_pad_set_caps (ivf->srcpad, caps);
        gst_caps_unref (caps);

        /* keep framerate in instance for convenience */
        ivf->rate_num = rate_num;
        ivf->rate_den = rate_den;

        gst_pad_push_event (ivf->srcpad, gst_event_new_new_segment (FALSE, 1.0,
                GST_FORMAT_TIME, 0, -1, 0));

        /* move along */
        ivf->state = GST_IVF_PARSE_DATA;
      } else {
        GST_LOG_OBJECT (ivf, "Header data not yet available.");
        break;
      }

      /* fall through */

    case GST_IVF_PARSE_DATA:
      while (gst_adapter_available (ivf->adapter) > 12) {
        const guint8 *data = gst_adapter_peek (ivf->adapter, 12);
        guint32 frame_size = GST_READ_UINT32_LE (data);
        guint64 frame_pts = GST_READ_UINT64_LE (data + 4);

        GST_LOG_OBJECT (ivf,
            "Read frame header: size %u, pts %" G_GUINT64_FORMAT, frame_size,
            frame_pts);

        if (gst_adapter_available (ivf->adapter) >= 12 + frame_size) {
          GstBuffer *frame;

          gst_adapter_flush (ivf->adapter, 12);

          frame = gst_adapter_take_buffer (ivf->adapter, frame_size);
          gst_buffer_set_caps (frame, GST_PAD_CAPS (ivf->srcpad));
          GST_BUFFER_TIMESTAMP (frame) =
              gst_util_uint64_scale_int (GST_SECOND * frame_pts, ivf->rate_den,
              ivf->rate_num);
          GST_BUFFER_DURATION (frame) =
              gst_util_uint64_scale_int (GST_SECOND, ivf->rate_den,
              ivf->rate_num);

          GST_DEBUG_OBJECT (ivf, "Pushing frame of size %u, ts %"
              GST_TIME_FORMAT ", dur %" GST_TIME_FORMAT ", off %"
              G_GUINT64_FORMAT ", off_end %" G_GUINT64_FORMAT,
              GST_BUFFER_SIZE (frame),
              GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (frame)),
              GST_TIME_ARGS (GST_BUFFER_DURATION (frame)),
              GST_BUFFER_OFFSET (frame), GST_BUFFER_OFFSET_END (frame));

          res = gst_pad_push (ivf->srcpad, frame);
          if (res != GST_FLOW_OK)
            break;
        } else {
          GST_LOG_OBJECT (ivf, "Frame data not yet available.");
          break;
        }
      }
      break;

    default:
      g_return_val_if_reached (GST_FLOW_ERROR);
  }

  return res;
}
示例#20
0
static int
gst_wavpack_enc_push_block (void *id, void *data, int32_t count)
{
  GstWavpackEncWriteID *wid = (GstWavpackEncWriteID *) id;
  GstWavpackEnc *enc = GST_WAVPACK_ENC (wid->wavpack_enc);
  GstFlowReturn *flow;
  GstBuffer *buffer;
  GstPad *pad;
  guchar *block = (guchar *) data;

  pad = (wid->correction) ? enc->wvcsrcpad : enc->srcpad;
  flow =
      (wid->correction) ? &enc->wvcsrcpad_last_return : &enc->
      srcpad_last_return;

  *flow = gst_pad_alloc_buffer_and_set_caps (pad, GST_BUFFER_OFFSET_NONE,
      count, GST_PAD_CAPS (pad), &buffer);

  if (*flow != GST_FLOW_OK) {
    GST_WARNING_OBJECT (enc, "flow on %s:%s = %s",
        GST_DEBUG_PAD_NAME (pad), gst_flow_get_name (*flow));
    return FALSE;
  }

  g_memmove (GST_BUFFER_DATA (buffer), block, count);

  if (count > sizeof (WavpackHeader) && memcmp (block, "wvpk", 4) == 0) {
    /* if it's a Wavpack block set buffer timestamp and duration, etc */
    WavpackHeader wph;

    GST_LOG_OBJECT (enc, "got %d bytes of encoded wavpack %sdata",
        count, (wid->correction) ? "correction " : "");

    gst_wavpack_read_header (&wph, block);

    /* Only set when pushing the first buffer again, in that case
     * we don't want to delay the buffer or push newsegment events
     */
    if (!wid->passthrough) {
      /* Only push complete blocks */
      if (enc->pending_buffer == NULL) {
        enc->pending_buffer = buffer;
        enc->pending_offset = wph.block_index;
      } else if (enc->pending_offset == wph.block_index) {
        enc->pending_buffer = gst_buffer_join (enc->pending_buffer, buffer);
      } else {
        GST_ERROR ("Got incomplete block, dropping");
        gst_buffer_unref (enc->pending_buffer);
        enc->pending_buffer = buffer;
        enc->pending_offset = wph.block_index;
      }

      if (!(wph.flags & FINAL_BLOCK))
        return TRUE;

      buffer = enc->pending_buffer;
      enc->pending_buffer = NULL;
      enc->pending_offset = 0;

      /* if it's the first wavpack block, send a NEW_SEGMENT event */
      if (wph.block_index == 0) {
        gst_pad_push_event (pad,
            gst_event_new_new_segment (FALSE,
                1.0, GST_FORMAT_TIME, 0, GST_BUFFER_OFFSET_NONE, 0));

        /* save header for later reference, so we can re-send it later on
         * EOS with fixed up values for total sample count etc. */
        if (enc->first_block == NULL && !wid->correction) {
          enc->first_block =
              g_memdup (GST_BUFFER_DATA (buffer), GST_BUFFER_SIZE (buffer));
          enc->first_block_size = GST_BUFFER_SIZE (buffer);
        }
      }
    }

    /* set buffer timestamp, duration, offset, offset_end from
     * the wavpack header */
    GST_BUFFER_TIMESTAMP (buffer) = enc->timestamp_offset +
        gst_util_uint64_scale_int (GST_SECOND, wph.block_index,
        enc->samplerate);
    GST_BUFFER_DURATION (buffer) =
        gst_util_uint64_scale_int (GST_SECOND, wph.block_samples,
        enc->samplerate);
    GST_BUFFER_OFFSET (buffer) = wph.block_index;
    GST_BUFFER_OFFSET_END (buffer) = wph.block_index + wph.block_samples;
  } else {
    /* if it's something else set no timestamp and duration on the buffer */
    GST_DEBUG_OBJECT (enc, "got %d bytes of unknown data", count);

    GST_BUFFER_TIMESTAMP (buffer) = GST_CLOCK_TIME_NONE;
    GST_BUFFER_DURATION (buffer) = GST_CLOCK_TIME_NONE;
  }

  /* push the buffer and forward errors */
  GST_DEBUG_OBJECT (enc, "pushing buffer with %d bytes",
      GST_BUFFER_SIZE (buffer));
  *flow = gst_pad_push (pad, buffer);

  if (*flow != GST_FLOW_OK) {
    GST_WARNING_OBJECT (enc, "flow on %s:%s = %s",
        GST_DEBUG_PAD_NAME (pad), gst_flow_get_name (*flow));
    return FALSE;
  }

  return TRUE;
}
示例#21
0
static void
gst_hls_demux_loop (GstHLSDemux * demux)
{
  GstBuffer *buf;
  GstFlowReturn ret;

  /* Loop for the source pad task. The task is started when we have
   * received the main playlist from the source element. It tries first to
   * cache the first fragments and then it waits until it has more data in the
   * queue. This task is woken up when we push a new fragment to the queue or
   * when we reached the end of the playlist  */

  if (G_UNLIKELY (demux->need_cache)) {
    if (!gst_hls_demux_cache_fragments (demux))
      goto cache_error;

    /* we can start now the updates thread */
    gst_hls_demux_start_update (demux);
    GST_INFO_OBJECT (demux, "First fragments cached successfully");
  }

  if (g_queue_is_empty (demux->queue)) {
    if (demux->end_of_playlist)
      goto end_of_playlist;

    goto empty_queue;
  }

  buf = g_queue_pop_head (demux->queue);

  /* Figure out if we need to create/switch pads */
  if (G_UNLIKELY (!demux->srcpad
          || GST_BUFFER_CAPS (buf) != GST_PAD_CAPS (demux->srcpad)
          || demux->need_segment)) {
    switch_pads (demux, GST_BUFFER_CAPS (buf));
    demux->need_segment = TRUE;
  }
  if (demux->need_segment) {
    /* And send a newsegment */
    GST_DEBUG_OBJECT (demux, "Sending new-segment. Segment start:%"
        GST_TIME_FORMAT, GST_TIME_ARGS (demux->position));
    gst_pad_push_event (demux->srcpad,
        gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME, demux->position,
            GST_CLOCK_TIME_NONE, demux->position));
    demux->need_segment = FALSE;
  }

  if (GST_CLOCK_TIME_IS_VALID (GST_BUFFER_DURATION (buf)))
    demux->position += GST_BUFFER_DURATION (buf);

  ret = gst_pad_push (demux->srcpad, buf);
  if (ret != GST_FLOW_OK)
    goto error;

  return;

end_of_playlist:
  {
    GST_DEBUG_OBJECT (demux, "Reached end of playlist, sending EOS");
    gst_pad_push_event (demux->srcpad, gst_event_new_eos ());
    gst_hls_demux_stop (demux);
    return;
  }

cache_error:
  {
    gst_task_pause (demux->task);
    if (!demux->cancelled) {
      GST_ELEMENT_ERROR (demux, RESOURCE, NOT_FOUND,
          ("Could not cache the first fragments"), (NULL));
      gst_hls_demux_stop (demux);
    }
    return;
  }

error:
  {
    /* FIXME: handle error */
    GST_DEBUG_OBJECT (demux, "error, stopping task");
    gst_hls_demux_stop (demux);
    return;
  }

empty_queue:
  {
    gst_task_pause (demux->task);
    return;
  }
}
示例#22
0
static GstFlowReturn
gst_pngdec_caps_create_and_set (GstPngDec * pngdec)
{
  GstFlowReturn ret = GST_FLOW_OK;
  GstCaps *caps = NULL, *res = NULL;
  GstPadTemplate *templ = NULL;
  gint bpc = 0, color_type;
  png_uint_32 width, height;

  g_return_val_if_fail (GST_IS_PNGDEC (pngdec), GST_FLOW_ERROR);

  /* Get bits per channel */
  bpc = png_get_bit_depth (pngdec->png, pngdec->info);

  /* We don't handle 16 bits per color, strip down to 8 */
  if (bpc == 16) {
    GST_LOG_OBJECT (pngdec,
        "this is a 16 bits per channel PNG image, strip down to 8 bits");
    png_set_strip_16 (pngdec->png);
  }

  /* Get Color type */
  color_type = png_get_color_type (pngdec->png, pngdec->info);

#if 0
  /* We used to have this HACK to reverse the outgoing bytes, but the problem
   * that originally required the hack seems to have been in ffmpegcolorspace's
   * RGBA descriptions. It doesn't seem needed now that's fixed, but might
   * still be needed on big-endian systems, I'm not sure. J.S. 6/7/2007 */
  if (color_type == PNG_COLOR_TYPE_RGB_ALPHA)
    png_set_bgr (pngdec->png);
#endif

  /* Gray scale converted to RGB and upscaled to 8 bits */
  if ((color_type == PNG_COLOR_TYPE_GRAY_ALPHA) ||
      (color_type == PNG_COLOR_TYPE_GRAY)) {
    GST_LOG_OBJECT (pngdec, "converting grayscale png to RGB");
    png_set_gray_to_rgb (pngdec->png);
    if (bpc < 8) {              /* Convert to 8 bits */
      GST_LOG_OBJECT (pngdec, "converting grayscale image to 8 bits");
      png_set_gray_1_2_4_to_8 (pngdec->png);
    }
  }

  /* Palette converted to RGB */
  if (color_type == PNG_COLOR_TYPE_PALETTE) {
    GST_LOG_OBJECT (pngdec, "converting palette png to RGB");
    png_set_palette_to_rgb (pngdec->png);
  }

  /* Update the info structure */
  png_read_update_info (pngdec->png, pngdec->info);

  /* Get IHDR header again after transformation settings */

  png_get_IHDR (pngdec->png, pngdec->info, &width, &height,
      &bpc, &pngdec->color_type, NULL, NULL, NULL);

  pngdec->width = width;
  pngdec->height = height;

  GST_LOG_OBJECT (pngdec, "this is a %dx%d PNG image", pngdec->width,
      pngdec->height);

  switch (pngdec->color_type) {
    case PNG_COLOR_TYPE_RGB:
      GST_LOG_OBJECT (pngdec, "we have no alpha channel, depth is 24 bits");
      pngdec->bpp = 24;
      break;
    case PNG_COLOR_TYPE_RGB_ALPHA:
      GST_LOG_OBJECT (pngdec, "we have an alpha channel, depth is 32 bits");
      pngdec->bpp = 32;
      break;
    default:
      GST_ELEMENT_ERROR (pngdec, STREAM, NOT_IMPLEMENTED, (NULL),
          ("pngdec does not support this color type"));
      ret = GST_FLOW_NOT_SUPPORTED;
      goto beach;
  }

  caps = gst_caps_new_simple ("video/x-raw-rgb",
      "width", G_TYPE_INT, pngdec->width,
      "height", G_TYPE_INT, pngdec->height,
      "bpp", G_TYPE_INT, pngdec->bpp,
      "framerate", GST_TYPE_FRACTION, pngdec->fps_n, pngdec->fps_d, NULL);

  templ = gst_static_pad_template_get (&gst_pngdec_src_pad_template);

  res = gst_caps_intersect (caps, gst_pad_template_get_caps (templ));

  gst_caps_unref (caps);
  gst_object_unref (templ);

  if (!gst_pad_set_caps (pngdec->srcpad, res))
    ret = GST_FLOW_NOT_NEGOTIATED;

  GST_DEBUG_OBJECT (pngdec, "our caps %" GST_PTR_FORMAT, res);

  gst_caps_unref (res);

  /* Push a newsegment event */
  if (pngdec->need_newsegment) {
    gst_pad_push_event (pngdec->srcpad,
        gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME, 0, -1, 0));
    pngdec->need_newsegment = FALSE;
  }

beach:
  return ret;
}
示例#23
0
static GstFlowReturn
gst_base_video_decoder_chain (GstPad * pad, GstBuffer * buf)
{
  GstBaseVideoDecoder *base_video_decoder;
  GstBaseVideoDecoderClass *base_video_decoder_class;
  GstFlowReturn ret;

  GST_DEBUG ("chain %" GST_TIME_FORMAT " duration %" GST_TIME_FORMAT,
      GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
      GST_TIME_ARGS (GST_BUFFER_DURATION (buf)));

#if 0
  /* requiring the pad to be negotiated makes it impossible to use
   * oggdemux or filesrc ! decoder */
  if (!gst_pad_is_negotiated (pad)) {
    GST_DEBUG ("not negotiated");
    return GST_FLOW_NOT_NEGOTIATED;
  }
#endif

  base_video_decoder = GST_BASE_VIDEO_DECODER (gst_pad_get_parent (pad));
  base_video_decoder_class =
      GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder);

  GST_DEBUG_OBJECT (base_video_decoder, "chain");

  if (!base_video_decoder->have_segment) {
    GstEvent *event;
    GstFlowReturn ret;

    GST_WARNING
        ("Received buffer without a new-segment. Assuming timestamps start from 0.");

    gst_segment_set_newsegment_full (&base_video_decoder->segment,
        FALSE, 1.0, 1.0, GST_FORMAT_TIME, 0, GST_CLOCK_TIME_NONE, 0);
    base_video_decoder->have_segment = TRUE;

    event = gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME, 0,
        GST_CLOCK_TIME_NONE, 0);

    ret =
        gst_pad_push_event (GST_BASE_VIDEO_DECODER_SRC_PAD (base_video_decoder),
        event);
    if (!ret) {
      GST_ERROR ("new segment event ret=%d", ret);
      return GST_FLOW_ERROR;
    }
  }

  if (G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DISCONT))) {
    GST_DEBUG_OBJECT (base_video_decoder, "received DISCONT buffer");
    gst_base_video_decoder_flush (base_video_decoder);
  }

  if (base_video_decoder->current_frame == NULL) {
    base_video_decoder->current_frame =
        gst_base_video_decoder_new_frame (base_video_decoder);
  }

  base_video_decoder->input_offset += GST_BUFFER_SIZE (buf);
  if (GST_BUFFER_TIMESTAMP_IS_VALID (buf)) {
    gst_base_video_decoder_add_timestamp (base_video_decoder, buf);
  }


  if (base_video_decoder->packetized) {
    base_video_decoder->current_frame->sink_buffer = buf;

    ret = gst_base_video_decoder_have_frame (base_video_decoder, TRUE, NULL);
  } else {

    gst_adapter_push (base_video_decoder->input_adapter, buf);

    ret = gst_base_video_decoder_drain (base_video_decoder, FALSE);
  }

  gst_object_unref (base_video_decoder);
  return ret;
}
static GstBuffer *
gst_tag_lib_mux_render_tag (GstTagLibMux * mux)
{
  GstTagLibMuxClass *klass;
  GstTagMergeMode merge_mode;
  GstTagSetter *tagsetter;
  GstBuffer *buffer;
  const GstTagList *tagsetter_tags;
  GstTagList *taglist;
  GstEvent *event;

  tagsetter = GST_TAG_SETTER (mux);

  tagsetter_tags = gst_tag_setter_get_tag_list (tagsetter);
  merge_mode = gst_tag_setter_get_tag_merge_mode (tagsetter);

  GST_LOG_OBJECT (mux, "merging tags, merge mode = %d", merge_mode);
  GST_LOG_OBJECT (mux, "event tags: %" GST_PTR_FORMAT, mux->event_tags);
  GST_LOG_OBJECT (mux, "set   tags: %" GST_PTR_FORMAT, tagsetter_tags);

  taglist = gst_tag_list_merge (tagsetter_tags, mux->event_tags, merge_mode);

  GST_LOG_OBJECT (mux, "final tags: %" GST_PTR_FORMAT, taglist);

  klass = GST_TAG_LIB_MUX_CLASS (G_OBJECT_GET_CLASS (mux));

  if (klass->render_tag == NULL)
    goto no_vfunc;

  buffer = klass->render_tag (mux, taglist);

  if (buffer == NULL)
    goto render_error;

  mux->tag_size = GST_BUFFER_SIZE (buffer);
  GST_LOG_OBJECT (mux, "tag size = %" G_GSIZE_FORMAT " bytes", mux->tag_size);

  /* Send newsegment event from byte position 0, so the tag really gets
   * written to the start of the file, independent of the upstream segment */
  gst_pad_push_event (mux->srcpad,
      gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_BYTES, 0, -1, 0));

  /* Send an event about the new tags to downstream elements */
  /* gst_event_new_tag takes ownership of the list, so no need to unref it */
  event = gst_event_new_tag (taglist);
  gst_pad_push_event (mux->srcpad, event);

  GST_BUFFER_OFFSET (buffer) = 0;

  return buffer;

no_vfunc:
  {
    GST_ERROR_OBJECT (mux, "Subclass does not implement render_tag vfunc!");
    gst_tag_list_free (taglist);
    return NULL;
  }

render_error:
  {
    GST_ERROR_OBJECT (mux, "Failed to render tag");
    gst_tag_list_free (taglist);
    return NULL;
  }
}
示例#25
0
static GstFlowReturn
gst_rsvg_dec_chain (GstPad * pad, GstBuffer * buffer)
{
  GstRsvgDec *rsvg = GST_RSVG_DEC (GST_PAD_PARENT (pad));
  gboolean completed = FALSE;
  const guint8 *data;
  guint size;
  gboolean ret = GST_FLOW_OK;

  /* first_timestamp is used slightly differently where a framerate
     is given or not.
     If there is a frame rate, it will be used as a base.
     If there is not, it will be used to keep track of the timestamp
     of the first buffer, to be used as the timestamp of the output
     buffer. When a buffer is output, first timestamp will resync to
     the next buffer's timestamp. */
  if (rsvg->first_timestamp == GST_CLOCK_TIME_NONE) {
    if (GST_BUFFER_TIMESTAMP_IS_VALID (buffer))
      rsvg->first_timestamp = GST_BUFFER_TIMESTAMP (buffer);
    else if (rsvg->fps_n != 0)
      rsvg->first_timestamp = 0;
  }

  gst_adapter_push (rsvg->adapter, buffer);

  size = gst_adapter_available (rsvg->adapter);

  /* "<svg></svg>" */
  while (size >= 5 + 6 && ret == GST_FLOW_OK) {
    guint i;

    data = gst_adapter_peek (rsvg->adapter, size);
    for (i = size - 6; i >= 5; i--) {
      if (memcmp (data + i, "</svg>", 6) == 0) {
        completed = TRUE;
        size = i + 6;
        break;
      }
    }

    if (completed) {
      GstBuffer *outbuf = NULL;

      GST_LOG_OBJECT (rsvg, "have complete svg of %u bytes", size);

      data = gst_adapter_peek (rsvg->adapter, size);

      ret = gst_rsvg_decode_image (rsvg, data, size, &outbuf);
      if (ret != GST_FLOW_OK)
        break;


      if (rsvg->first_timestamp != GST_CLOCK_TIME_NONE) {
        GST_BUFFER_TIMESTAMP (outbuf) = rsvg->first_timestamp;
        GST_BUFFER_DURATION (outbuf) = GST_CLOCK_TIME_NONE;
        if (GST_BUFFER_DURATION_IS_VALID (buffer)) {
          GstClockTime end =
              GST_BUFFER_TIMESTAMP_IS_VALID (buffer) ?
              GST_BUFFER_TIMESTAMP (buffer) : rsvg->first_timestamp;
          end += GST_BUFFER_DURATION (buffer);
          GST_BUFFER_DURATION (outbuf) = end - GST_BUFFER_TIMESTAMP (outbuf);
        }
        if (rsvg->fps_n == 0) {
          rsvg->first_timestamp = GST_CLOCK_TIME_NONE;
        } else {
          GST_BUFFER_DURATION (outbuf) =
              gst_util_uint64_scale (rsvg->frame_count, rsvg->fps_d,
              rsvg->fps_n * GST_SECOND);
        }
      } else if (rsvg->fps_n != 0) {
        GST_BUFFER_TIMESTAMP (outbuf) =
            rsvg->first_timestamp + gst_util_uint64_scale (rsvg->frame_count,
            rsvg->fps_d, rsvg->fps_n * GST_SECOND);
        GST_BUFFER_DURATION (outbuf) =
            gst_util_uint64_scale (rsvg->frame_count, rsvg->fps_d,
            rsvg->fps_n * GST_SECOND);
      } else {
        GST_BUFFER_TIMESTAMP (outbuf) = rsvg->first_timestamp;
        GST_BUFFER_DURATION (outbuf) = GST_CLOCK_TIME_NONE;
      }
      rsvg->frame_count++;

      if (rsvg->need_newsegment) {
        gst_pad_push_event (rsvg->srcpad,
            gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME, 0, -1, 0));
        rsvg->need_newsegment = FALSE;
      }

      if (rsvg->pending_events) {
        GList *l;

        for (l = rsvg->pending_events; l; l = l->next)
          gst_pad_push_event (rsvg->srcpad, l->data);
        g_list_free (rsvg->pending_events);
        rsvg->pending_events = NULL;
      }

      GST_LOG_OBJECT (rsvg, "image rendered okay");

      ret = gst_pad_push (rsvg->srcpad, outbuf);
      if (ret != GST_FLOW_OK)
        break;

      gst_adapter_flush (rsvg->adapter, size);
      size = gst_adapter_available (rsvg->adapter);
      continue;
    } else {
      break;
    }
  }

  return GST_FLOW_OK;
}
示例#26
0
static int64_t
gst_ffmpegdata_seek (URLContext * h, int64_t pos, int whence)
{
  GstProtocolInfo *info;
  guint64 newpos = 0;

  GST_DEBUG ("Seeking to %" G_GINT64_FORMAT ", whence=%d", pos, whence);

  info = (GstProtocolInfo *) h->priv_data;

  /* TODO : if we are push-based, we need to return sensible info */

  switch (h->flags) {
    case URL_RDONLY:
    {
      /* sinkpad */
      switch (whence) {
        case SEEK_SET:
          newpos = (guint64) pos;
          break;
        case SEEK_CUR:
          newpos = info->offset + pos;
          break;
        case SEEK_END:
        case AVSEEK_SIZE:
          /* ffmpeg wants to know the current end position in bytes ! */
        {
          GstFormat format = GST_FORMAT_BYTES;
          gint64 duration;

          GST_DEBUG ("Seek end");

          if (gst_pad_is_linked (info->pad))
            if (gst_pad_query_duration (GST_PAD_PEER (info->pad), &format,
                    &duration))
              newpos = ((guint64) duration) + pos;
        }
          break;
        default:
          g_assert (0);
          break;
      }
      /* FIXME : implement case for push-based behaviour */
      if (whence != AVSEEK_SIZE)
        info->offset = newpos;
    }
      break;
    case URL_WRONLY:
    {
      /* srcpad */
      switch (whence) {
        case SEEK_SET:
          info->offset = (guint64) pos;
          gst_pad_push_event (info->pad, gst_event_new_new_segment
              (TRUE, 1.0, GST_FORMAT_BYTES, info->offset,
                  GST_CLOCK_TIME_NONE, info->offset));
          break;
        case SEEK_CUR:
          info->offset += pos;
          gst_pad_push_event (info->pad, gst_event_new_new_segment
              (TRUE, 1.0, GST_FORMAT_BYTES, info->offset,
                  GST_CLOCK_TIME_NONE, info->offset));
          break;
        default:
          break;
      }
      newpos = info->offset;
    }
      break;
    default:
      g_assert (0);
      break;
  }

  GST_DEBUG ("Now at offset %lld (returning %lld)", info->offset, newpos);
  return newpos;
}
示例#27
0
static gboolean
gst_identity_event (GstBaseTransform * trans, GstEvent * event)
{
  GstIdentity *identity;
  gboolean ret = TRUE;

  identity = GST_IDENTITY (trans);

  if (!identity->silent) {
    const GstStructure *s;
    gchar *sstr;

    GST_OBJECT_LOCK (identity);
    g_free (identity->last_message);

    if ((s = gst_event_get_structure (event)))
      sstr = gst_structure_to_string (s);
    else
      sstr = g_strdup ("");

    identity->last_message =
        g_strdup_printf ("event   ******* (%s:%s) E (type: %d, %s) %p",
        GST_DEBUG_PAD_NAME (trans->sinkpad), GST_EVENT_TYPE (event), sstr,
        event);
    g_free (sstr);
    GST_OBJECT_UNLOCK (identity);

    g_object_notify (G_OBJECT (identity), "last_message");
  }

  if (identity->single_segment
      && (GST_EVENT_TYPE (event) == GST_EVENT_NEWSEGMENT)) {
    if (trans->have_newsegment == FALSE) {
      GstEvent *news;
      GstFormat format;

      gst_event_parse_new_segment (event, NULL, NULL, &format, NULL, NULL,
          NULL);

      /* This is the first newsegment, send out a (0, -1) newsegment */
      news = gst_event_new_new_segment (TRUE, 1.0, format, 0, -1, 0);

      if (!(gst_pad_event_default (trans->sinkpad, news)))
        return FALSE;
    }
  }

  /* Reset previous timestamp, duration and offsets on NEWSEGMENT
   * to prevent false warnings when checking for perfect streams */
  if (GST_EVENT_TYPE (event) == GST_EVENT_NEWSEGMENT) {
    identity->prev_timestamp = identity->prev_duration = GST_CLOCK_TIME_NONE;
    identity->prev_offset = identity->prev_offset_end = GST_BUFFER_OFFSET_NONE;
  }

  ret = parent_class->event (trans, event);

  if (identity->single_segment
      && (GST_EVENT_TYPE (event) == GST_EVENT_NEWSEGMENT)) {
    /* eat up segments */
    ret = FALSE;
  }

  return ret;
}
示例#28
0
/* This function is used to perform seeks on the element in
 * pull mode.
 *
 * It also works when event is NULL, in which case it will just
 * start from the last configured segment. This technique is
 * used when activating the element and to perform the seek in
 * READY.
 */
static gboolean
gst_aiff_parse_perform_seek (GstAiffParse * aiff, GstEvent * event)
{
  gboolean res;
  gdouble rate;
  GstFormat format, bformat;
  GstSeekFlags flags;
  GstSeekType cur_type = GST_SEEK_TYPE_NONE, stop_type;
  gint64 cur, stop, upstream_size;
  gboolean flush;
  gboolean update;
  GstSegment seeksegment = { 0, };
  gint64 last_stop;

  if (event) {
    GST_DEBUG_OBJECT (aiff, "doing seek with event");

    gst_event_parse_seek (event, &rate, &format, &flags,
        &cur_type, &cur, &stop_type, &stop);

    /* no negative rates yet */
    if (rate < 0.0)
      goto negative_rate;

    if (format != aiff->segment.format) {
      GST_INFO_OBJECT (aiff, "converting seek-event from %s to %s",
          gst_format_get_name (format),
          gst_format_get_name (aiff->segment.format));
      res = TRUE;
      if (cur_type != GST_SEEK_TYPE_NONE)
        res =
            gst_pad_query_convert (aiff->srcpad, format, cur,
            &aiff->segment.format, &cur);
      if (res && stop_type != GST_SEEK_TYPE_NONE)
        res =
            gst_pad_query_convert (aiff->srcpad, format, stop,
            &aiff->segment.format, &stop);
      if (!res)
        goto no_format;

      format = aiff->segment.format;
    }
  } else {
    GST_DEBUG_OBJECT (aiff, "doing seek without event");
    flags = 0;
    rate = 1.0;
    cur_type = GST_SEEK_TYPE_SET;
    stop_type = GST_SEEK_TYPE_SET;
  }

  /* get flush flag */
  flush = flags & GST_SEEK_FLAG_FLUSH;

  /* now we need to make sure the streaming thread is stopped. We do this by
   * either sending a FLUSH_START event downstream which will cause the
   * streaming thread to stop with a WRONG_STATE.
   * For a non-flushing seek we simply pause the task, which will happen as soon
   * as it completes one iteration (and thus might block when the sink is
   * blocking in preroll). */
  if (flush) {
    GST_DEBUG_OBJECT (aiff, "sending flush start");
    gst_pad_push_event (aiff->srcpad, gst_event_new_flush_start ());
  } else {
    gst_pad_pause_task (aiff->sinkpad);
  }

  /* we should now be able to grab the streaming thread because we stopped it
   * with the above flush/pause code */
  GST_PAD_STREAM_LOCK (aiff->sinkpad);

  /* save current position */
  last_stop = aiff->segment.last_stop;

  GST_DEBUG_OBJECT (aiff, "stopped streaming at %" G_GINT64_FORMAT, last_stop);

  /* copy segment, we need this because we still need the old
   * segment when we close the current segment. */
  memcpy (&seeksegment, &aiff->segment, sizeof (GstSegment));

  /* configure the seek parameters in the seeksegment. We will then have the
   * right values in the segment to perform the seek */
  if (event) {
    GST_DEBUG_OBJECT (aiff, "configuring seek");
    gst_segment_set_seek (&seeksegment, rate, format, flags,
        cur_type, cur, stop_type, stop, &update);
  }

  /* figure out the last position we need to play. If it's configured (stop !=
   * -1), use that, else we play until the total duration of the file */
  if ((stop = seeksegment.stop) == -1)
    stop = seeksegment.duration;

  GST_DEBUG_OBJECT (aiff, "cur_type =%d", cur_type);
  if ((cur_type != GST_SEEK_TYPE_NONE)) {
    /* bring offset to bytes, if the bps is 0, we have the segment in BYTES and
     * we can just copy the last_stop. If not, we use the bps to convert TIME to
     * bytes. */
    if (aiff->bps > 0)
      aiff->offset =
          uint64_ceiling_scale (seeksegment.last_stop, (guint64) aiff->bps,
          GST_SECOND);
    else
      aiff->offset = seeksegment.last_stop;
    GST_LOG_OBJECT (aiff, "offset=%" G_GUINT64_FORMAT, aiff->offset);
    aiff->offset -= (aiff->offset % aiff->bytes_per_sample);
    GST_LOG_OBJECT (aiff, "offset=%" G_GUINT64_FORMAT, aiff->offset);
    aiff->offset += aiff->datastart;
    GST_LOG_OBJECT (aiff, "offset=%" G_GUINT64_FORMAT, aiff->offset);
  } else {
    GST_LOG_OBJECT (aiff, "continue from offset=%" G_GUINT64_FORMAT,
        aiff->offset);
  }

  if (stop_type != GST_SEEK_TYPE_NONE) {
    if (aiff->bps > 0)
      aiff->end_offset =
          uint64_ceiling_scale (stop, (guint64) aiff->bps, GST_SECOND);
    else
      aiff->end_offset = stop;
    GST_LOG_OBJECT (aiff, "end_offset=%" G_GUINT64_FORMAT, aiff->end_offset);
    aiff->end_offset -= (aiff->end_offset % aiff->bytes_per_sample);
    GST_LOG_OBJECT (aiff, "end_offset=%" G_GUINT64_FORMAT, aiff->end_offset);
    aiff->end_offset += aiff->datastart;
    GST_LOG_OBJECT (aiff, "end_offset=%" G_GUINT64_FORMAT, aiff->end_offset);
  } else {
    GST_LOG_OBJECT (aiff, "continue to end_offset=%" G_GUINT64_FORMAT,
        aiff->end_offset);
  }

  /* make sure filesize is not exceeded due to rounding errors or so,
   * same precaution as in _stream_headers */
  bformat = GST_FORMAT_BYTES;
  if (gst_pad_query_peer_duration (aiff->sinkpad, &bformat, &upstream_size))
    aiff->end_offset = MIN (aiff->end_offset, upstream_size);

  /* this is the range of bytes we will use for playback */
  aiff->offset = MIN (aiff->offset, aiff->end_offset);
  aiff->dataleft = aiff->end_offset - aiff->offset;

  GST_DEBUG_OBJECT (aiff,
      "seek: rate %lf, offset %" G_GUINT64_FORMAT ", end %" G_GUINT64_FORMAT
      ", segment %" GST_TIME_FORMAT " -- %" GST_TIME_FORMAT, rate, aiff->offset,
      aiff->end_offset, GST_TIME_ARGS (seeksegment.start),
      GST_TIME_ARGS (stop));

  /* prepare for streaming again */
  if (flush) {
    /* if we sent a FLUSH_START, we now send a FLUSH_STOP */
    GST_DEBUG_OBJECT (aiff, "sending flush stop");
    gst_pad_push_event (aiff->srcpad, gst_event_new_flush_stop ());
  } else if (aiff->segment_running) {
    /* we are running the current segment and doing a non-flushing seek,
     * close the segment first based on the previous last_stop. */
    GST_DEBUG_OBJECT (aiff, "closing running segment %" G_GINT64_FORMAT
        " to %" G_GINT64_FORMAT, aiff->segment.accum, aiff->segment.last_stop);

    /* queue the segment for sending in the stream thread */
    if (aiff->close_segment)
      gst_event_unref (aiff->close_segment);
    aiff->close_segment = gst_event_new_new_segment (TRUE,
        aiff->segment.rate, aiff->segment.format,
        aiff->segment.accum, aiff->segment.last_stop, aiff->segment.accum);

    /* keep track of our last_stop */
    seeksegment.accum = aiff->segment.last_stop;
  }

  /* now we did the seek and can activate the new segment values */
  memcpy (&aiff->segment, &seeksegment, sizeof (GstSegment));

  /* if we're doing a segment seek, post a SEGMENT_START message */
  if (aiff->segment.flags & GST_SEEK_FLAG_SEGMENT) {
    gst_element_post_message (GST_ELEMENT_CAST (aiff),
        gst_message_new_segment_start (GST_OBJECT_CAST (aiff),
            aiff->segment.format, aiff->segment.last_stop));
  }

  /* now create the newsegment */
  GST_DEBUG_OBJECT (aiff, "Creating newsegment from %" G_GINT64_FORMAT
      " to %" G_GINT64_FORMAT, aiff->segment.last_stop, stop);

  /* store the newsegment event so it can be sent from the streaming thread. */
  if (aiff->start_segment)
    gst_event_unref (aiff->start_segment);
  aiff->start_segment =
      gst_event_new_new_segment (FALSE, aiff->segment.rate,
      aiff->segment.format, aiff->segment.last_stop, stop,
      aiff->segment.last_stop);

  /* mark discont if we are going to stream from another position. */
  if (last_stop != aiff->segment.last_stop) {
    GST_DEBUG_OBJECT (aiff, "mark DISCONT, we did a seek to another position");
    aiff->discont = TRUE;
  }

  /* and start the streaming task again */
  aiff->segment_running = TRUE;
  if (!aiff->streaming) {
    gst_pad_start_task (aiff->sinkpad, (GstTaskFunction) gst_aiff_parse_loop,
        aiff->sinkpad);
  }

  GST_PAD_STREAM_UNLOCK (aiff->sinkpad);

  return TRUE;

  /* ERRORS */
negative_rate:
  {
    GST_DEBUG_OBJECT (aiff, "negative playback rates are not supported yet.");
    return FALSE;
  }
no_format:
  {
    GST_DEBUG_OBJECT (aiff, "unsupported format given, seek aborted.");
    return FALSE;
  }
}
void QGstreamerPlayerSession::setVideoRenderer(QObject *videoOutput)
{
    if (m_videoOutput != videoOutput) {
        if (m_videoOutput) {
            disconnect(m_videoOutput, SIGNAL(sinkChanged()),
                       this, SLOT(updateVideoRenderer()));
            disconnect(m_videoOutput, SIGNAL(readyChanged(bool)),
                   this, SLOT(updateVideoRenderer()));
        }

        if (videoOutput) {
            connect(videoOutput, SIGNAL(sinkChanged()),
                    this, SLOT(updateVideoRenderer()));
            connect(videoOutput, SIGNAL(readyChanged(bool)),
                   this, SLOT(updateVideoRenderer()));
        }

        m_videoOutput = videoOutput;
    }

    QGstreamerVideoRendererInterface* renderer = qobject_cast<QGstreamerVideoRendererInterface*>(videoOutput);   

    m_renderer = renderer;

#ifdef DEBUG_VO_BIN_DUMP
    dumpNum++;

    _gst_debug_bin_to_dot_file(GST_BIN(m_playbin),
                                  GstDebugGraphDetails(GST_DEBUG_GRAPH_SHOW_ALL /* GST_DEBUG_GRAPH_SHOW_MEDIA_TYPE | GST_DEBUG_GRAPH_SHOW_NON_DEFAULT_PARAMS | GST_DEBUG_GRAPH_SHOW_STATES*/),
                                  QString("playbin_%1_set").arg(dumpNum).toAscii().constData());
#endif

    GstElement *videoSink = m_renderer ? m_renderer->videoSink() : m_nullVideoSink;
    if (!videoSink)
        videoSink = m_nullVideoSink;

#ifdef DEBUG_PLAYBIN
    qDebug() << "Set video output:" << videoOutput;
    qDebug() << "Current sink:" << (m_videoSink ? GST_ELEMENT_NAME(m_videoSink) : "") <<  m_videoSink
             << "pending:" << (m_pendingVideoSink ? GST_ELEMENT_NAME(m_pendingVideoSink) : "") << m_pendingVideoSink
             << "new sink:" << (videoSink ? GST_ELEMENT_NAME(videoSink) : "") << videoSink;
#endif

    if (m_pendingVideoSink == videoSink ||
        (m_pendingVideoSink == 0 && m_videoSink == videoSink)) {
#ifdef DEBUG_PLAYBIN
        qDebug() << "Video sink has not changed, skip video output reconfiguration";
#endif
        return;
    }

#ifdef DEBUG_PLAYBIN
    qDebug() << "Reconfigure video output";
#endif

    if (m_state == QMediaPlayer::StoppedState) {
#ifdef DEBUG_PLAYBIN
        qDebug() << "The pipeline has not started yet, pending state:" << m_pendingState;
#endif
        //the pipeline has not started yet
        m_pendingVideoSink = 0;        
        gst_element_set_state(m_videoSink, GST_STATE_NULL);
        gst_element_set_state(m_playbin, GST_STATE_NULL);
        gst_element_unlink(m_videoIdentity, m_videoSink);

        gst_bin_remove(GST_BIN(m_videoOutputBin), m_videoSink);

        m_videoSink = videoSink;

        gst_bin_add(GST_BIN(m_videoOutputBin), m_videoSink);
        gst_element_link(m_videoIdentity, m_videoSink);

        switch (m_pendingState) {
        case QMediaPlayer::PausedState:
            gst_element_set_state(m_playbin, GST_STATE_PAUSED);
            break;
        case QMediaPlayer::PlayingState:
            gst_element_set_state(m_playbin, GST_STATE_PLAYING);
            break;
        default:
            break;
        }
    } else {
        if (m_pendingVideoSink) {
#ifdef DEBUG_PLAYBIN
            qDebug() << "already waiting for pad to be blocked, just change the pending sink";
#endif
            m_pendingVideoSink = videoSink;
            return;
        }

        m_pendingVideoSink = videoSink;

#ifdef DEBUG_PLAYBIN
        qDebug() << "Blocking the video output pad...";
#endif

        {
#ifdef DEBUG_PLAYBIN
            qDebug() << "send the last new segment event to the video output...";
#endif
            GstEvent *event = gst_event_new_new_segment(TRUE,
                                                        m_segment.rate,
                                                        m_segment.format,
                                                        m_segment.last_stop, //start
                                                        m_segment.stop,
                                                        m_segment.last_stop);//position

            GstPad *pad = gst_element_get_static_pad(videoSink, "sink");
            //gst_pad_send_event(pad, m_lastSegmentEvent);
            gst_pad_send_event(pad, event);
            gst_object_unref(GST_OBJECT(pad));
        }

        //block pads, async to avoid locking in paused state
        GstPad *srcPad = gst_element_get_static_pad(m_videoIdentity, "src");
        gst_pad_set_blocked_async(srcPad, true, &block_pad_cb, this);
        gst_object_unref(GST_OBJECT(srcPad));
    }
}
示例#30
0
static GstFlowReturn
gst_multipart_demux_chain (GstPad * pad, GstBuffer * buf)
{
    GstMultipartDemux *multipart;
    GstAdapter *adapter;
    GstClockTime timestamp;
    gint size = 1;
    GstFlowReturn res;

    multipart = GST_MULTIPART_DEMUX (gst_pad_get_parent (pad));
    adapter = multipart->adapter;

    res = GST_FLOW_OK;

    timestamp = GST_BUFFER_TIMESTAMP (buf);

    if (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DISCONT)) {
        gst_adapter_clear (adapter);
    }
    gst_adapter_push (adapter, buf);

    while (gst_adapter_available (adapter) > 0) {
        GstMultipartPad *srcpad;
        GstBuffer *outbuf;
        gboolean created;
        gint datalen;

        if (G_UNLIKELY (!multipart->header_completed)) {
            if ((size = multipart_parse_header (multipart)) < 0) {
                goto nodata;
            } else {
                gst_adapter_flush (adapter, size);
                multipart->header_completed = TRUE;
            }
        }
        if ((size = multipart_find_boundary (multipart, &datalen)) < 0) {
            goto nodata;
        }

        /* Invalidate header info */
        multipart->header_completed = FALSE;
        multipart->content_length = -1;

        if (G_UNLIKELY (datalen <= 0)) {
            GST_DEBUG_OBJECT (multipart, "skipping empty content.");
            gst_adapter_flush (adapter, size - datalen);
        } else {
            srcpad =
                gst_multipart_find_pad_by_mime (multipart,
                                                multipart->mime_type, &created);
            outbuf = gst_adapter_take_buffer (adapter, datalen);
            gst_adapter_flush (adapter, size - datalen);

            gst_buffer_set_caps (outbuf, GST_PAD_CAPS (srcpad->pad));
            if (created) {
                GstTagList *tags;

                /* Push new segment, first buffer has 0 timestamp */
                gst_pad_push_event (srcpad->pad,
                                    gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME, 0, -1, 0));

                tags =
                    gst_tag_list_new_full (GST_TAG_CONTAINER_FORMAT, "Multipart", NULL);
                gst_pad_push_event (srcpad->pad, gst_event_new_tag (tags));

                GST_BUFFER_TIMESTAMP (outbuf) = 0;
            } else {
                GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
            }
            GST_DEBUG_OBJECT (multipart,
                              "pushing buffer with timestamp %" GST_TIME_FORMAT,
                              GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)));
            GST_DEBUG_OBJECT (multipart, "buffer has caps %" GST_PTR_FORMAT,
                              GST_BUFFER_CAPS (outbuf));
            res = gst_pad_push (srcpad->pad, outbuf);
            res = gst_multipart_combine_flows (multipart, srcpad, res);
            if (res != GST_FLOW_OK)
                break;
        }
    }

nodata:
    gst_object_unref (multipart);

    if (G_UNLIKELY (size == MULTIPART_DATA_ERROR))
        return GST_FLOW_ERROR;
    if (G_UNLIKELY (size == MULTIPART_DATA_EOS))
        return GST_FLOW_UNEXPECTED;

    return res;
}