static GstFlowReturn
gst_y4m_dec_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer)
{
  GstY4mDec *y4mdec;
  int n_avail;
  GstFlowReturn flow_ret = GST_FLOW_OK;
#define MAX_HEADER_LENGTH 80
  char header[MAX_HEADER_LENGTH];
  int i;
  int len;

  y4mdec = GST_Y4M_DEC (parent);

  GST_DEBUG_OBJECT (y4mdec, "chain");

  if (GST_BUFFER_IS_DISCONT (buffer)) {
    GST_DEBUG ("got discont");
    gst_adapter_clear (y4mdec->adapter);
  }

  gst_adapter_push (y4mdec->adapter, buffer);
  n_avail = gst_adapter_available (y4mdec->adapter);

  if (!y4mdec->have_header) {
    gboolean ret;
    GstCaps *caps;
    GstQuery *query;

    if (n_avail < MAX_HEADER_LENGTH)
      return GST_FLOW_OK;

    gst_adapter_copy (y4mdec->adapter, (guint8 *) header, 0, MAX_HEADER_LENGTH);

    header[MAX_HEADER_LENGTH - 1] = 0;
    for (i = 0; i < MAX_HEADER_LENGTH; i++) {
      if (header[i] == 0x0a)
        header[i] = 0;
    }

    ret = gst_y4m_dec_parse_header (y4mdec, header);
    if (!ret) {
      GST_ELEMENT_ERROR (y4mdec, STREAM, DECODE,
          ("Failed to parse YUV4MPEG header"), (NULL));
      return GST_FLOW_ERROR;
    }

    y4mdec->header_size = strlen (header) + 1;
    gst_adapter_flush (y4mdec->adapter, y4mdec->header_size);

    caps = gst_video_info_to_caps (&y4mdec->info);
    ret = gst_pad_set_caps (y4mdec->srcpad, caps);

    query = gst_query_new_allocation (caps, FALSE);
    y4mdec->video_meta = FALSE;

    if (y4mdec->pool) {
      gst_buffer_pool_set_active (y4mdec->pool, FALSE);
      gst_object_unref (y4mdec->pool);
    }
    y4mdec->pool = NULL;

    if (gst_pad_peer_query (y4mdec->srcpad, query)) {
      y4mdec->video_meta =
          gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL);

      /* We only need a pool if we need to do stride conversion for downstream */
      if (!y4mdec->video_meta && memcmp (&y4mdec->info, &y4mdec->out_info,
              sizeof (y4mdec->info)) != 0) {
        GstBufferPool *pool = NULL;
        GstAllocator *allocator = NULL;
        GstAllocationParams params;
        GstStructure *config;
        guint size, min, max;

        if (gst_query_get_n_allocation_params (query) > 0) {
          gst_query_parse_nth_allocation_param (query, 0, &allocator, &params);
        } else {
          allocator = NULL;
          gst_allocation_params_init (&params);
        }

        if (gst_query_get_n_allocation_pools (query) > 0) {
          gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min,
              &max);
          size = MAX (size, y4mdec->out_info.size);
        } else {
          pool = NULL;
          size = y4mdec->out_info.size;
          min = max = 0;
        }

        if (pool == NULL) {
          pool = gst_video_buffer_pool_new ();
        }

        config = gst_buffer_pool_get_config (pool);
        gst_buffer_pool_config_set_params (config, caps, size, min, max);
        gst_buffer_pool_config_set_allocator (config, allocator, &params);
        gst_buffer_pool_set_config (pool, config);

        if (allocator)
          gst_object_unref (allocator);

        y4mdec->pool = pool;
      }
    } else if (memcmp (&y4mdec->info, &y4mdec->out_info,
            sizeof (y4mdec->info)) != 0) {
      GstBufferPool *pool;
      GstStructure *config;

      /* No pool, create our own if we need to do stride conversion */
      pool = gst_video_buffer_pool_new ();
      config = gst_buffer_pool_get_config (pool);
      gst_buffer_pool_config_set_params (config, caps, y4mdec->out_info.size, 0,
          0);
      gst_buffer_pool_set_config (pool, config);
      y4mdec->pool = pool;
    }
    if (y4mdec->pool) {
      gst_buffer_pool_set_active (y4mdec->pool, TRUE);
    }
    gst_query_unref (query);
    gst_caps_unref (caps);
    if (!ret) {
      GST_DEBUG_OBJECT (y4mdec, "Couldn't set caps on src pad");
      return GST_FLOW_ERROR;
    }

    y4mdec->have_header = TRUE;
  }

  if (y4mdec->have_new_segment) {
    GstEvent *event;
    GstClockTime start = gst_y4m_dec_bytes_to_timestamp (y4mdec,
        y4mdec->segment.start);
    GstClockTime stop = gst_y4m_dec_bytes_to_timestamp (y4mdec,
        y4mdec->segment.stop);
    GstClockTime time = gst_y4m_dec_bytes_to_timestamp (y4mdec,
        y4mdec->segment.time);
    GstSegment seg;

    gst_segment_init (&seg, GST_FORMAT_TIME);
    seg.start = start;
    seg.stop = stop;
    seg.time = time;
    event = gst_event_new_segment (&seg);

    gst_pad_push_event (y4mdec->srcpad, event);
    //gst_event_unref (event);

    y4mdec->have_new_segment = FALSE;
    y4mdec->frame_index = gst_y4m_dec_bytes_to_frames (y4mdec,
        y4mdec->segment.time);
    GST_DEBUG ("new frame_index %d", y4mdec->frame_index);

  }

  while (1) {
    n_avail = gst_adapter_available (y4mdec->adapter);
    if (n_avail < MAX_HEADER_LENGTH)
      break;

    gst_adapter_copy (y4mdec->adapter, (guint8 *) header, 0, MAX_HEADER_LENGTH);
    header[MAX_HEADER_LENGTH - 1] = 0;
    for (i = 0; i < MAX_HEADER_LENGTH; i++) {
      if (header[i] == 0x0a)
        header[i] = 0;
    }
    if (memcmp (header, "FRAME", 5) != 0) {
      GST_ELEMENT_ERROR (y4mdec, STREAM, DECODE,
          ("Failed to parse YUV4MPEG frame"), (NULL));
      flow_ret = GST_FLOW_ERROR;
      break;
    }

    len = strlen (header);
    if (n_avail < y4mdec->info.size + len + 1) {
      /* not enough data */
      GST_DEBUG ("not enough data for frame %d < %" G_GSIZE_FORMAT,
          n_avail, y4mdec->info.size + len + 1);
      break;
    }

    gst_adapter_flush (y4mdec->adapter, len + 1);

    buffer = gst_adapter_take_buffer (y4mdec->adapter, y4mdec->info.size);

    GST_BUFFER_TIMESTAMP (buffer) =
        gst_y4m_dec_frames_to_timestamp (y4mdec, y4mdec->frame_index);
    GST_BUFFER_DURATION (buffer) =
        gst_y4m_dec_frames_to_timestamp (y4mdec, y4mdec->frame_index + 1) -
        GST_BUFFER_TIMESTAMP (buffer);

    y4mdec->frame_index++;

    if (y4mdec->video_meta) {
      gst_buffer_add_video_meta_full (buffer, 0, y4mdec->info.finfo->format,
          y4mdec->info.width, y4mdec->info.height, y4mdec->info.finfo->n_planes,
          y4mdec->info.offset, y4mdec->info.stride);
    } else if (memcmp (&y4mdec->info, &y4mdec->out_info,
            sizeof (y4mdec->info)) != 0) {
      GstBuffer *outbuf;
      GstVideoFrame iframe, oframe;
      gint i, j;
      gint w, h, istride, ostride;
      guint8 *src, *dest;

      /* Allocate a new buffer and do stride conversion */
      g_assert (y4mdec->pool != NULL);

      flow_ret = gst_buffer_pool_acquire_buffer (y4mdec->pool, &outbuf, NULL);
      if (flow_ret != GST_FLOW_OK) {
        gst_buffer_unref (buffer);
        break;
      }

      gst_video_frame_map (&iframe, &y4mdec->info, buffer, GST_MAP_READ);
      gst_video_frame_map (&oframe, &y4mdec->out_info, outbuf, GST_MAP_WRITE);

      for (i = 0; i < 3; i++) {
        w = GST_VIDEO_FRAME_COMP_WIDTH (&iframe, i);
        h = GST_VIDEO_FRAME_COMP_HEIGHT (&iframe, i);
        istride = GST_VIDEO_FRAME_COMP_STRIDE (&iframe, i);
        ostride = GST_VIDEO_FRAME_COMP_STRIDE (&oframe, i);
        src = GST_VIDEO_FRAME_COMP_DATA (&iframe, i);
        dest = GST_VIDEO_FRAME_COMP_DATA (&oframe, i);

        for (j = 0; j < h; j++) {
          memcpy (dest, src, w);

          dest += ostride;
          src += istride;
        }
      }

      gst_video_frame_unmap (&iframe);
      gst_video_frame_unmap (&oframe);
      gst_buffer_copy_into (outbuf, buffer, GST_BUFFER_COPY_TIMESTAMPS, 0, -1);
      gst_buffer_unref (buffer);
      buffer = outbuf;
    }

    flow_ret = gst_pad_push (y4mdec->srcpad, buffer);
    if (flow_ret != GST_FLOW_OK)
      break;
  }

  GST_DEBUG ("returning %d", flow_ret);

  return flow_ret;
}
Esempio n. 2
0
static void
_create_issues (GstValidateRunner * runner)
{
  GstPad *srcpad1, *srcpad2, *sinkpad, *funnel_sink1, *funnel_sink2;
  GstElement *src1, *src2, *sink, *fakemixer;
  GstSegment segment;

  src1 = create_and_monitor_element ("fakesrc2", "fakesrc1", runner);
  src2 = create_and_monitor_element ("fakesrc2", "fakesrc2", runner);
  fakemixer = create_and_monitor_element ("fakemixer", "fakemixer", runner);
  sink = create_and_monitor_element ("fakesink", "fakesink", runner);

  srcpad1 = gst_element_get_static_pad (src1, "src");
  srcpad2 = gst_element_get_static_pad (src2, "src");
  funnel_sink1 = gst_element_get_request_pad (fakemixer, "sink_%u");
  funnel_sink2 = gst_element_get_request_pad (fakemixer, "sink_%u");
  sinkpad = gst_element_get_static_pad (sink, "sink");

  fail_unless (gst_element_link (fakemixer, sink));
  fail_unless (gst_pad_link (srcpad1, funnel_sink1) == GST_PAD_LINK_OK);
  fail_unless (gst_pad_link (srcpad2, funnel_sink2) == GST_PAD_LINK_OK);

  /* We want to handle the src behaviour ourselves */
  fail_unless (gst_pad_activate_mode (srcpad1, GST_PAD_MODE_PUSH, TRUE));
  fail_unless (gst_pad_activate_mode (srcpad2, GST_PAD_MODE_PUSH, TRUE));

  /* Setup all needed events */
  gst_segment_init (&segment, GST_FORMAT_TIME);
  segment.start = 0;
  segment.stop = GST_SECOND;

  fail_unless (gst_pad_push_event (srcpad1,
          gst_event_new_stream_start ("the-stream")));
  fail_unless (gst_pad_push_event (srcpad1, gst_event_new_segment (&segment)));

  fail_unless (gst_pad_push_event (srcpad2,
          gst_event_new_stream_start ("the-stream")));
  fail_unless (gst_pad_push_event (srcpad2, gst_event_new_segment (&segment)));

  fail_unless_equals_int (gst_element_set_state (fakemixer, GST_STATE_PLAYING),
      GST_STATE_CHANGE_SUCCESS);
  fail_unless_equals_int (gst_element_set_state (sink, GST_STATE_PLAYING),
      GST_STATE_CHANGE_ASYNC);

  /* Send an unexpected flush stop */
  _gst_check_expecting_log = TRUE;
  fail_unless (gst_pad_push_event (srcpad1, gst_event_new_flush_stop (TRUE)));

  /* Once again but on the other fakemixer sink */
  fail_unless (gst_pad_push_event (srcpad2, gst_event_new_flush_stop (TRUE)));

  /* clean up */
  fail_unless (gst_pad_activate_mode (srcpad1, GST_PAD_MODE_PUSH, FALSE));
  fail_unless (gst_pad_activate_mode (srcpad2, GST_PAD_MODE_PUSH, FALSE));
  fail_unless_equals_int (gst_element_set_state (fakemixer, GST_STATE_NULL),
      GST_STATE_CHANGE_SUCCESS);
  fail_unless_equals_int (gst_element_set_state (sink, GST_STATE_NULL),
      GST_STATE_CHANGE_SUCCESS);

  gst_object_unref (srcpad1);
  gst_object_unref (srcpad2);
  gst_object_unref (sinkpad);
  gst_object_unref (funnel_sink1);
  gst_object_unref (funnel_sink2);
  gst_check_objects_destroyed_on_unref (fakemixer, funnel_sink1, funnel_sink2,
      NULL);
  gst_check_objects_destroyed_on_unref (src1, srcpad1, NULL);
  gst_check_objects_destroyed_on_unref (src2, srcpad2, NULL);
  gst_check_objects_destroyed_on_unref (sink, sinkpad, NULL);
}
/**
 * gst_wrapper_camera_bin_src_vidsrc_probe:
 *
 * Buffer probe called before sending each buffer to image queue.
 */
static GstPadProbeReturn
gst_wrapper_camera_bin_src_vidsrc_probe (GstPad * pad, GstPadProbeInfo * info,
    gpointer data)
{
  GstWrapperCameraBinSrc *self = GST_WRAPPER_CAMERA_BIN_SRC (data);
  GstBaseCameraSrc *camerasrc = GST_BASE_CAMERA_SRC_CAST (self);
  GstPadProbeReturn ret = GST_PAD_PROBE_DROP;
  GstBuffer *buffer = GST_BUFFER (info->data);

  GST_LOG_OBJECT (self, "Video probe, mode %d, capture status %d",
      camerasrc->mode, self->video_rec_status);

  /* TODO do we want to lock for every buffer? */
  /*
   * Note that we can use gst_pad_push_event here because we are a buffer
   * probe.
   */
  /* TODO shouldn't access this directly */
  g_mutex_lock (&camerasrc->capturing_mutex);
  if (self->video_rec_status == GST_VIDEO_RECORDING_STATUS_DONE) {
    /* NOP */
  } else if (self->video_rec_status == GST_VIDEO_RECORDING_STATUS_STARTING) {
    GstClockTime ts;
    GstSegment segment;
    GstCaps *caps;
    GstSample *sample;

    GST_DEBUG_OBJECT (self, "Starting video recording");
    self->video_rec_status = GST_VIDEO_RECORDING_STATUS_RUNNING;

    ts = GST_BUFFER_TIMESTAMP (buffer);
    if (!GST_CLOCK_TIME_IS_VALID (ts))
      ts = 0;
    gst_segment_init (&segment, GST_FORMAT_TIME);
    segment.start = ts;
    gst_pad_push_event (self->vidsrc, gst_event_new_segment (&segment));

    /* post preview */
    GST_DEBUG_OBJECT (self, "Posting preview for video");
    caps = gst_pad_get_current_caps (pad);
    sample = gst_sample_new (buffer, caps, NULL, NULL);
    gst_base_camera_src_post_preview (camerasrc, sample);
    gst_caps_unref (caps);
    gst_sample_unref (sample);

    ret = GST_PAD_PROBE_OK;
  } else if (self->video_rec_status == GST_VIDEO_RECORDING_STATUS_FINISHING) {
    GstPad *peer;

    /* send eos */
    GST_DEBUG_OBJECT (self, "Finishing video recording, pushing eos");

    peer = gst_pad_get_peer (self->vidsrc);

    if (peer) {
      /* send to the peer as we don't want our pads with eos flag */
      gst_pad_send_event (peer, gst_event_new_eos ());
      gst_object_unref (peer);
    } else {
      GST_WARNING_OBJECT (camerasrc, "No peer pad for vidsrc");
    }
    self->video_rec_status = GST_VIDEO_RECORDING_STATUS_DONE;
    gst_base_camera_src_finish_capture (camerasrc);
  } else {
    ret = GST_PAD_PROBE_OK;
  }
  g_mutex_unlock (&camerasrc->capturing_mutex);
  return ret;
}
Esempio n. 4
0
static void
test_basic (const gchar * elem_name, const gchar * sink2, int count,
    check_cb cb)
{
  GstElement *rtpmux = NULL;
  GstPad *reqpad1 = NULL;
  GstPad *reqpad2 = NULL;
  GstPad *src1 = NULL;
  GstPad *src2 = NULL;
  GstPad *sink = NULL;
  GstBuffer *inbuf = NULL;
  GstCaps *src1caps = NULL;
  GstCaps *src2caps = NULL;
  GstCaps *sinkcaps = NULL;
  GstCaps *caps;
  GstSegment segment;
  int i;

  rtpmux = gst_check_setup_element (elem_name);

  reqpad1 = gst_element_get_request_pad (rtpmux, "sink_1");
  fail_unless (reqpad1 != NULL);
  reqpad2 = gst_element_get_request_pad (rtpmux, sink2);
  fail_unless (reqpad2 != NULL);
  sink = gst_check_setup_sink_pad_by_name (rtpmux, &sinktemplate, "src");

  src1 = gst_pad_new_from_static_template (&srctemplate, "src");
  src2 = gst_pad_new_from_static_template (&srctemplate, "src");
  fail_unless (gst_pad_link (src1, reqpad1) == GST_PAD_LINK_OK);
  fail_unless (gst_pad_link (src2, reqpad2) == GST_PAD_LINK_OK);
  gst_pad_set_query_function (src1, query_func);
  gst_pad_set_query_function (src2, query_func);
  gst_pad_set_query_function (sink, query_func);
  gst_pad_set_event_function (sink, event_func);
  g_object_set_data (G_OBJECT (src1), "caps", &src1caps);
  g_object_set_data (G_OBJECT (src2), "caps", &src2caps);
  g_object_set_data (G_OBJECT (sink), "caps", &sinkcaps);

  src1caps = gst_caps_new_simple ("application/x-rtp",
      "clock-rate", G_TYPE_INT, 1, "ssrc", G_TYPE_UINT, 11, NULL);
  src2caps = gst_caps_new_simple ("application/x-rtp",
      "clock-rate", G_TYPE_INT, 2, "ssrc", G_TYPE_UINT, 12, NULL);
  sinkcaps = gst_caps_new_simple ("application/x-rtp",
      "clock-rate", G_TYPE_INT, 3, "ssrc", G_TYPE_UINT, 13, NULL);

  caps = gst_pad_peer_query_caps (src1, NULL);
  fail_unless (gst_caps_is_empty (caps));
  gst_caps_unref (caps);

  gst_caps_set_simple (src2caps, "clock-rate", G_TYPE_INT, 3, NULL);
  caps = gst_pad_peer_query_caps (src1, NULL);
  gst_caps_unref (caps);

  g_object_set (rtpmux, "seqnum-offset", 100, "timestamp-offset", 1000,
      "ssrc", 55, NULL);

  fail_unless (gst_element_set_state (rtpmux,
          GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS);
  gst_pad_set_active (sink, TRUE);
  gst_pad_set_active (src1, TRUE);
  gst_pad_set_active (src2, TRUE);

  fail_unless (gst_pad_push_event (src1,
          gst_event_new_stream_start ("stream1")));
  fail_unless (gst_pad_push_event (src2,
          gst_event_new_stream_start ("stream2")));

  gst_caps_set_simple (sinkcaps,
      "payload", G_TYPE_INT, 98, "seqnum-offset", G_TYPE_UINT, 100,
      "timestamp-offset", G_TYPE_UINT, 1000, "ssrc", G_TYPE_UINT, 66, NULL);
  caps = gst_caps_new_simple ("application/x-rtp",
      "payload", G_TYPE_INT, 98, "clock-rate", G_TYPE_INT, 3,
      "seqnum-offset", G_TYPE_UINT, 56, "timestamp-offset", G_TYPE_UINT, 57,
      "ssrc", G_TYPE_UINT, 66, NULL);
  fail_unless (gst_pad_set_caps (src1, caps));
  gst_caps_unref (caps);

  caps = gst_pad_peer_query_caps (sink, NULL);
  fail_if (gst_caps_is_empty (caps));

  gst_segment_init (&segment, GST_FORMAT_TIME);
  segment.start = 100000;
  fail_unless (gst_pad_push_event (src1, gst_event_new_segment (&segment)));
  segment.start = 0;
  fail_unless (gst_pad_push_event (src2, gst_event_new_segment (&segment)));


  for (i = 0; i < count; i++) {
    GstRTPBuffer rtpbuffer = GST_RTP_BUFFER_INIT;

    inbuf = gst_rtp_buffer_new_allocate (10, 0, 0);
    GST_BUFFER_PTS (inbuf) = i * 1000 + 100000;
    GST_BUFFER_DURATION (inbuf) = 1000;

    gst_rtp_buffer_map (inbuf, GST_MAP_WRITE, &rtpbuffer);

    gst_rtp_buffer_set_version (&rtpbuffer, 2);
    gst_rtp_buffer_set_payload_type (&rtpbuffer, 98);
    gst_rtp_buffer_set_ssrc (&rtpbuffer, 44);
    gst_rtp_buffer_set_timestamp (&rtpbuffer, 200 + i);
    gst_rtp_buffer_set_seq (&rtpbuffer, 2000 + i);
    gst_rtp_buffer_unmap (&rtpbuffer);
    fail_unless (gst_pad_push (src1, inbuf) == GST_FLOW_OK);

    if (buffers)
      fail_unless (GST_BUFFER_PTS (buffers->data) == i * 1000, "%lld",
          GST_BUFFER_PTS (buffers->data));

    cb (src2, i);

    g_list_foreach (buffers, (GFunc) gst_buffer_unref, NULL);
    g_list_free (buffers);
    buffers = NULL;
  }


  gst_pad_set_active (sink, FALSE);
  gst_pad_set_active (src1, FALSE);
  gst_pad_set_active (src2, FALSE);
  fail_unless (gst_element_set_state (rtpmux,
          GST_STATE_NULL) == GST_STATE_CHANGE_SUCCESS);
  gst_check_teardown_pad_by_name (rtpmux, "src");
  gst_object_unref (reqpad1);
  gst_object_unref (reqpad2);
  gst_check_teardown_pad_by_name (rtpmux, "sink_1");
  gst_check_teardown_pad_by_name (rtpmux, sink2);
  gst_element_release_request_pad (rtpmux, reqpad1);
  gst_element_release_request_pad (rtpmux, reqpad2);

  gst_caps_unref (caps);
  gst_caps_replace (&src1caps, NULL);
  gst_caps_replace (&src2caps, NULL);
  gst_caps_replace (&sinkcaps, NULL);

  gst_check_teardown_element (rtpmux);
}
static GstFlowReturn
gst_multipart_demux_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
{
  GstMultipartDemux *multipart;
  GstAdapter *adapter;
  gint size = 1;
  GstFlowReturn res;

  multipart = GST_MULTIPART_DEMUX (parent);
  adapter = multipart->adapter;

  res = GST_FLOW_OK;

  if (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DISCONT)) {
    GSList *l;

    for (l = multipart->srcpads; l != NULL; l = l->next) {
      GstMultipartPad *srcpad = l->data;

      srcpad->discont = TRUE;
    }
    gst_adapter_clear (adapter);
  }
  gst_adapter_push (adapter, buf);

  while (gst_adapter_available (adapter) > 0) {
    GstMultipartPad *srcpad;
    GstBuffer *outbuf;
    gboolean created;
    gint datalen;

    if (G_UNLIKELY (!multipart->header_completed)) {
      if ((size = multipart_parse_header (multipart)) < 0) {
        goto nodata;
      } else {
        gst_adapter_flush (adapter, size);
        multipart->header_completed = TRUE;
      }
    }
    if ((size = multipart_find_boundary (multipart, &datalen)) < 0) {
      goto nodata;
    }

    /* Invalidate header info */
    multipart->header_completed = FALSE;
    multipart->content_length = -1;

    if (G_UNLIKELY (datalen <= 0)) {
      GST_DEBUG_OBJECT (multipart, "skipping empty content.");
      gst_adapter_flush (adapter, size - datalen);
    } else if (G_UNLIKELY (!multipart->mime_type)) {
      GST_DEBUG_OBJECT (multipart, "content has no MIME type.");
      gst_adapter_flush (adapter, size - datalen);
    } else {
      GstClockTime ts;

      srcpad =
          gst_multipart_find_pad_by_mime (multipart,
          multipart->mime_type, &created);

      ts = gst_adapter_prev_pts (adapter, NULL);
      outbuf = gst_adapter_take_buffer (adapter, datalen);
      gst_adapter_flush (adapter, size - datalen);

      if (created) {
        GstTagList *tags;
        GstSegment segment;

        gst_segment_init (&segment, GST_FORMAT_TIME);

        /* Push new segment, first buffer has 0 timestamp */
        gst_pad_push_event (srcpad->pad, gst_event_new_segment (&segment));

        tags = gst_tag_list_new (GST_TAG_CONTAINER_FORMAT, "Multipart", NULL);
        gst_tag_list_set_scope (tags, GST_TAG_SCOPE_GLOBAL);
        gst_pad_push_event (srcpad->pad, gst_event_new_tag (tags));
      }

      outbuf = gst_buffer_make_writable (outbuf);
      if (srcpad->last_ts == GST_CLOCK_TIME_NONE || srcpad->last_ts != ts) {
        GST_BUFFER_TIMESTAMP (outbuf) = ts;
        srcpad->last_ts = ts;
      } else {
        GST_BUFFER_TIMESTAMP (outbuf) = GST_CLOCK_TIME_NONE;
      }

      if (srcpad->discont) {
        GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
        srcpad->discont = FALSE;
      } else {
        GST_BUFFER_FLAG_UNSET (outbuf, GST_BUFFER_FLAG_DISCONT);
      }

      GST_DEBUG_OBJECT (multipart,
          "pushing buffer with timestamp %" GST_TIME_FORMAT,
          GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)));
      res = gst_pad_push (srcpad->pad, outbuf);
      res = gst_multipart_combine_flows (multipart, srcpad, res);
      if (res != GST_FLOW_OK)
        break;
    }
  }

nodata:
  if (G_UNLIKELY (size == MULTIPART_DATA_ERROR))
    return GST_FLOW_ERROR;
  if (G_UNLIKELY (size == MULTIPART_DATA_EOS))
    return GST_FLOW_EOS;

  return res;
}
Esempio n. 6
0
static gboolean
gst_identity_sink_event (GstBaseTransform * trans, GstEvent * event)
{
  GstIdentity *identity;
  gboolean ret = TRUE;

  identity = GST_IDENTITY (trans);

  if (!identity->silent) {
    const GstStructure *s;
    const gchar *tstr;
    gchar *sstr;

    GST_OBJECT_LOCK (identity);
    g_free (identity->last_message);

    tstr = gst_event_type_get_name (GST_EVENT_TYPE (event));
    if ((s = gst_event_get_structure (event)))
      sstr = gst_structure_to_string (s);
    else
      sstr = g_strdup ("");

    identity->last_message =
        g_strdup_printf ("event   ******* (%s:%s) E (type: %s (%d), %s) %p",
        GST_DEBUG_PAD_NAME (trans->sinkpad), tstr, GST_EVENT_TYPE (event),
        sstr, event);
    g_free (sstr);
    GST_OBJECT_UNLOCK (identity);

    gst_identity_notify_last_message (identity);
  }

  if (identity->single_segment && (GST_EVENT_TYPE (event) == GST_EVENT_SEGMENT)) {
    if (!trans->have_segment) {
      GstEvent *news;
      GstSegment segment;

      gst_event_copy_segment (event, &segment);
      gst_event_copy_segment (event, &trans->segment);
      trans->have_segment = TRUE;

      /* This is the first segment, send out a (0, -1) segment */
      gst_segment_init (&segment, segment.format);
      news = gst_event_new_segment (&segment);

      gst_pad_event_default (trans->sinkpad, GST_OBJECT_CAST (trans), news);
    } else {
      /* need to track segment for proper running time */
      gst_event_copy_segment (event, &trans->segment);
    }
  }

  if (GST_EVENT_TYPE (event) == GST_EVENT_GAP &&
      trans->have_segment && trans->segment.format == GST_FORMAT_TIME) {
    GstClockTime start, dur;

    gst_event_parse_gap (event, &start, &dur);
    if (GST_CLOCK_TIME_IS_VALID (start)) {
      start = gst_segment_to_running_time (&trans->segment,
          GST_FORMAT_TIME, start);

      gst_identity_do_sync (identity, start);

      /* also transform GAP timestamp similar to buffer timestamps */
      if (identity->single_segment) {
        gst_event_unref (event);
        event = gst_event_new_gap (start, dur);
      }
    }
  }

  /* Reset previous timestamp, duration and offsets on SEGMENT
   * to prevent false warnings when checking for perfect streams */
  if (GST_EVENT_TYPE (event) == GST_EVENT_SEGMENT) {
    identity->prev_timestamp = identity->prev_duration = GST_CLOCK_TIME_NONE;
    identity->prev_offset = identity->prev_offset_end = GST_BUFFER_OFFSET_NONE;
  }

  if (identity->single_segment && GST_EVENT_TYPE (event) == GST_EVENT_SEGMENT) {
    /* eat up segments */
    gst_event_unref (event);
    ret = TRUE;
  } else {
    if (GST_EVENT_TYPE (event) == GST_EVENT_FLUSH_START) {
      GST_OBJECT_LOCK (identity);
      if (identity->clock_id) {
        GST_DEBUG_OBJECT (identity, "unlock clock wait");
        gst_clock_id_unschedule (identity->clock_id);
        gst_clock_id_unref (identity->clock_id);
        identity->clock_id = NULL;
      }
      GST_OBJECT_UNLOCK (identity);
    }

    ret = GST_BASE_TRANSFORM_CLASS (parent_class)->sink_event (trans, event);
  }

  return ret;
}
Esempio n. 7
0
static gboolean
gme_setup (GstGmeDec * gme)
{
  gme_info_t *info;
  gme_err_t gme_err = NULL;
  GstTagList *taglist;
  guint64 total_duration;
  guint64 fade_time;
  GstBuffer *buffer;
  GstSegment seg;
  GstMapInfo map;

  if (!gst_adapter_available (gme->adapter) || !gme_negotiate (gme)) {
    return FALSE;
  }

  buffer =
      gst_adapter_take_buffer (gme->adapter,
      gst_adapter_available (gme->adapter));

  gst_buffer_map (buffer, &map, GST_MAP_READ);
  gme_err = gme_open_data (map.data, map.size, &gme->player, 32000);
  gst_buffer_unmap (buffer, &map);
  gst_buffer_unref (buffer);

  if (gme_err || !gme->player) {
    if (gme->player) {
      gme_delete (gme->player);
      gme->player = NULL;
    }

    GST_ELEMENT_ERROR (gme, STREAM, DEMUX, (NULL), ("%s", gme_err));

    return FALSE;
  }

  gme_err = gme_track_info (gme->player, &info, 0);

  taglist = gst_tag_list_new_empty ();

  if (info->song && *info->song)
    gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE, GST_TAG_TITLE,
        info->song, NULL);
  if (info->author && *info->author)
    gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE, GST_TAG_ARTIST,
        info->author, NULL);
  /* Prefer the name of the official soundtrack over the name of the game (since this is
   * how track numbers are derived)
   */
  if (info->game && *info->game)
    gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE, GST_TAG_ALBUM, info->game,
        NULL);

  if (info->comment && *info->comment)
    gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE, GST_TAG_COMMENT,
        info->comment, NULL);
  if (info->dumper && *info->dumper)
    gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE, GST_TAG_CONTACT,
        info->dumper, NULL);
  if (info->copyright && *info->copyright)
    gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE, GST_TAG_COPYRIGHT,
        info->copyright, NULL);
  if (info->system && *info->system)
    gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE, GST_TAG_ENCODER,
        info->system, NULL);

  gme->total_duration = total_duration =
      gst_util_uint64_scale_int (info->play_length + (info->loop_length >
          0 ? 8000 : 0), GST_MSECOND, 1);
  fade_time = info->loop_length > 0 ? info->play_length : 0;

  gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE,
      GST_TAG_DURATION, total_duration, NULL);

  gst_pad_push_event (gme->srcpad, gst_event_new_tag (taglist));

  g_free (info);

#ifdef HAVE_LIBGME_ACCURACY
  /* TODO: Is it worth it to make this optional? */
  gme_enable_accuracy (gme->player, 1);
#endif
  gme_start_track (gme->player, 0);
  if (fade_time)
    gme_set_fade (gme->player, fade_time);

  gst_segment_init (&seg, GST_FORMAT_TIME);
  gst_pad_push_event (gme->srcpad, gst_event_new_segment (&seg));

  gst_pad_start_task (gme->srcpad, (GstTaskFunction) gst_gme_play, gme->srcpad,
      NULL);

  gme->initialized = TRUE;
  gme->seeking = FALSE;
  gme->seekpoint = 0;
  return gme->initialized;
}
static void
gst_rnd_buffer_size_loop (GstRndBufferSize * self)
{
  GstBuffer *buf = NULL;
  GstFlowReturn ret;
  guint num_bytes, size;

  if (G_UNLIKELY (self->min > self->max))
    goto bogus_minmax;

  if (G_UNLIKELY (self->min != self->max)) {
    num_bytes = g_rand_int_range (self->rand, self->min, self->max);
  } else {
    num_bytes = self->min;
  }

  GST_LOG_OBJECT (self, "pulling %u bytes at offset %" G_GUINT64_FORMAT,
      num_bytes, self->offset);

  ret = gst_pad_pull_range (self->sinkpad, self->offset, num_bytes, &buf);

  if (ret != GST_FLOW_OK)
    goto pull_failed;

  size = gst_buffer_get_size (buf);

  if (size < num_bytes) {
    GST_WARNING_OBJECT (self, "short buffer: %u bytes", size);
  }

  if (self->need_newsegment) {
    GstSegment segment;

    gst_segment_init (&segment, GST_FORMAT_BYTES);
    segment.start = self->offset;
    gst_pad_push_event (self->srcpad, gst_event_new_segment (&segment));
    self->need_newsegment = FALSE;
  }

  self->offset += size;

  ret = gst_pad_push (self->srcpad, buf);

  if (ret != GST_FLOW_OK)
    goto push_failed;

  return;

pause_task:
  {
    GST_DEBUG_OBJECT (self, "pausing task");
    gst_pad_pause_task (self->sinkpad);
    return;
  }

pull_failed:
  {
    if (ret == GST_FLOW_EOS) {
      GST_DEBUG_OBJECT (self, "eos");
      gst_pad_push_event (self->srcpad, gst_event_new_eos ());
    } else {
      GST_WARNING_OBJECT (self, "pull_range flow: %s", gst_flow_get_name (ret));
    }
    goto pause_task;
  }

push_failed:
  {
    GST_DEBUG_OBJECT (self, "push flow: %s", gst_flow_get_name (ret));
    if (ret == GST_FLOW_EOS) {
      GST_DEBUG_OBJECT (self, "eos");
      gst_pad_push_event (self->srcpad, gst_event_new_eos ());
    } else if (ret < GST_FLOW_EOS || ret == GST_FLOW_NOT_LINKED) {
      GST_ELEMENT_ERROR (self, STREAM, FAILED,
          ("Internal data stream error."),
          ("streaming stopped, reason: %s", gst_flow_get_name (ret)));
    }
    goto pause_task;
  }

bogus_minmax:
  {
    GST_ELEMENT_ERROR (self, LIBRARY, SETTINGS,
        ("The minimum buffer size is smaller than the maximum buffer size."),
        ("buffer sizes: max=%d, min=%d", self->min, self->max));
    goto pause_task;
  }
}
Esempio n. 9
0
static GstFlowReturn
gst_midi_parse_do_play (GstMidiParse * midiparse)
{
  GstFlowReturn res;
  GList *walk;
  guint64 pulse, next_pulse = G_MAXUINT64;
  GstClockTime position, next_position;
  guint64 tick;

  pulse = midiparse->pulse;
  position = midiparse->segment.position;

  if (midiparse->segment_pending) {
    gst_pad_push_event (midiparse->srcpad,
        gst_event_new_segment (&midiparse->segment));
    midiparse->segment_pending = FALSE;
  }

  GST_DEBUG_OBJECT (midiparse, "pulse %" G_GUINT64_FORMAT ", position %"
      GST_TIME_FORMAT, pulse, GST_TIME_ARGS (position));

  for (walk = midiparse->tracks; walk; walk = g_list_next (walk)) {
    GstMidiTrack *track = walk->data;

    while (!track->eot && track->pulse == pulse) {
      res = handle_next_event (midiparse, track, play_push_func, NULL);
      if (res != GST_FLOW_OK)
        goto error;
    }

    if (!track->eot && track->pulse < next_pulse)
      next_pulse = track->pulse;
  }

  if (next_pulse == G_MAXUINT64)
    goto eos;

  tick = position / (10 * GST_MSECOND);
  GST_DEBUG_OBJECT (midiparse, "current tick %" G_GUINT64_FORMAT, tick);

  next_position = gst_util_uint64_scale (next_pulse,
      1000 * midiparse->tempo, midiparse->division);
  GST_DEBUG_OBJECT (midiparse, "next position %" GST_TIME_FORMAT,
      GST_TIME_ARGS (next_position));

  /* send 10ms ticks to advance the downstream element */
  while (TRUE) {
    /* get position of next tick */
    position = ++tick * (10 * GST_MSECOND);
    GST_DEBUG_OBJECT (midiparse, "tick %" G_GUINT64_FORMAT
        ", position %" GST_TIME_FORMAT, tick, GST_TIME_ARGS (position));

    if (position >= next_position)
      break;

    midiparse->segment.position = position;
    res = play_push_func (midiparse, NULL, 0xf9, NULL, 0, NULL);
    if (res != GST_FLOW_OK)
      goto error;
  }

  midiparse->pulse = next_pulse;
  midiparse->segment.position = next_position;

  return GST_FLOW_OK;

  /* ERRORS */
eos:
  {
    GST_DEBUG_OBJECT (midiparse, "we are EOS");
    return GST_FLOW_EOS;
  }
error:
  {
    GST_DEBUG_OBJECT (midiparse, "have flow result %s",
        gst_flow_get_name (res));
    return res;
  }
}
Esempio n. 10
0
static void
src_task_loop (GstPad * pad)
{
  GstDtlsEnc *self = GST_DTLS_ENC (GST_PAD_PARENT (pad));
  GstFlowReturn ret;
  GstBuffer *buffer;
  gboolean check_connection_timeout = FALSE;

  GST_TRACE_OBJECT (self, "src loop: acquiring lock");
  g_mutex_lock (&self->queue_lock);
  GST_TRACE_OBJECT (self, "src loop: acquired lock");

  if (self->flushing) {
    GST_LOG_OBJECT (self, "src task loop entered on inactive pad");
    GST_TRACE_OBJECT (self, "src loop: releasing lock");
    g_mutex_unlock (&self->queue_lock);
    return;
  }

  while (g_queue_is_empty (&self->queue)) {
    GST_TRACE_OBJECT (self, "src loop: queue empty, waiting for add");
    g_cond_wait (&self->queue_cond_add, &self->queue_lock);
    GST_TRACE_OBJECT (self, "src loop: add signaled");

    if (self->flushing) {
      GST_LOG_OBJECT (self, "pad inactive, task returning");
      GST_TRACE_OBJECT (self, "src loop: releasing lock");
      g_mutex_unlock (&self->queue_lock);
      return;
    }
  }
  GST_TRACE_OBJECT (self, "src loop: queue has element");

  buffer = g_queue_pop_head (&self->queue);
  g_mutex_unlock (&self->queue_lock);

  if (self->send_initial_events) {
    GstSegment segment;
    gchar s_id[32];
    GstCaps *caps;

    self->send_initial_events = FALSE;

    g_snprintf (s_id, sizeof (s_id), "dtlsenc-%08x", g_random_int ());
    gst_pad_push_event (self->src, gst_event_new_stream_start (s_id));
    caps = gst_caps_new_empty_simple ("application/x-dtls");
    gst_pad_push_event (self->src, gst_event_new_caps (caps));
    gst_caps_unref (caps);
    gst_segment_init (&segment, GST_FORMAT_BYTES);
    gst_pad_push_event (self->src, gst_event_new_segment (&segment));
    check_connection_timeout = TRUE;
  }

  GST_TRACE_OBJECT (self, "src loop: releasing lock");

  ret = gst_pad_push (self->src, buffer);
  if (check_connection_timeout)
    gst_dtls_connection_check_timeout (self->connection);

  if (G_UNLIKELY (ret != GST_FLOW_OK)) {
    GST_WARNING_OBJECT (self, "failed to push buffer on src pad: %s",
        gst_flow_get_name (ret));
  }
}
static gboolean
splitmux_part_pad_event (GstPad * pad, GstObject * parent, GstEvent * event)
{
  GstSplitMuxPartPad *part_pad = SPLITMUX_PART_PAD_CAST (pad);
  GstSplitMuxPartReader *reader = part_pad->reader;
  gboolean ret = TRUE;
  SplitMuxSrcPad *target;
  GstDataQueueItem *item;

  SPLITMUX_PART_LOCK (reader);

  target = gst_object_ref (part_pad->target);

  GST_LOG_OBJECT (reader, "Pad %" GST_PTR_FORMAT " event %" GST_PTR_FORMAT, pad,
      event);

  if (part_pad->flushing && GST_EVENT_TYPE (event) != GST_EVENT_FLUSH_STOP)
    goto drop_event;

  switch (GST_EVENT_TYPE (event)) {
    case GST_EVENT_SEGMENT:{
      GstSegment *seg = &part_pad->segment;

      GST_LOG_OBJECT (pad, "Received segment %" GST_PTR_FORMAT, event);

      gst_event_copy_segment (event, seg);
      gst_event_copy_segment (event, &part_pad->orig_segment);

      if (seg->format != GST_FORMAT_TIME)
        goto wrong_segment;

      /* Adjust segment */
      /* Adjust start/stop so the overall file is 0 + start_offset based */
      if (seg->stop != -1) {
        seg->stop -= seg->start;
        seg->stop += seg->time + reader->start_offset;
      }
      seg->start = seg->time + reader->start_offset;
      seg->time += reader->start_offset;
      seg->position += reader->start_offset;

      GST_LOG_OBJECT (pad, "Adjusted segment now %" GST_PTR_FORMAT, event);

      /* Replace event */
      gst_event_unref (event);
      event = gst_event_new_segment (seg);

      if (reader->prep_state != PART_STATE_PREPARING_COLLECT_STREAMS
          && reader->prep_state != PART_STATE_PREPARING_MEASURE_STREAMS)
        break;                  /* Only do further stuff with segments during initial measuring */

      /* Take the first segment from the first part */
      if (target->segment.format == GST_FORMAT_UNDEFINED) {
        gst_segment_copy_into (seg, &target->segment);
        GST_DEBUG_OBJECT (reader,
            "Target pad segment now %" GST_SEGMENT_FORMAT, &target->segment);
      }

      if (seg->stop != -1 && target->segment.stop != -1) {
        GstClockTime stop = seg->base + seg->stop;
        if (stop > target->segment.stop) {
          target->segment.stop = stop;
          GST_DEBUG_OBJECT (reader,
              "Adjusting segment stop by %" GST_TIME_FORMAT
              " output now %" GST_SEGMENT_FORMAT,
              GST_TIME_ARGS (reader->start_offset), &target->segment);
        }
      }
      GST_LOG_OBJECT (pad, "Forwarding segment %" GST_PTR_FORMAT, event);
      break;
    }
    case GST_EVENT_EOS:{

      GST_DEBUG_OBJECT (part_pad,
          "State %u EOS event. MaxTS seen %" GST_TIME_FORMAT,
          reader->prep_state, GST_TIME_ARGS (part_pad->max_ts));

      if (reader->prep_state == PART_STATE_PREPARING_COLLECT_STREAMS ||
          reader->prep_state == PART_STATE_PREPARING_MEASURE_STREAMS) {
        /* Mark this pad as EOS */
        part_pad->is_eos = TRUE;
        if (splitmux_part_is_eos_locked (reader)) {
          /* Finished measuring things, set state and tell the state change func
           * so it can seek back to the start */
          GST_LOG_OBJECT (reader,
              "EOS while measuring streams. Resetting for ready");
          reader->prep_state = PART_STATE_PREPARING_RESET_FOR_READY;
          SPLITMUX_PART_BROADCAST (reader);
        }
        goto drop_event;
      }
      break;
    }
    case GST_EVENT_FLUSH_START:
      reader->flushing = TRUE;
      part_pad->flushing = TRUE;
      GST_LOG_OBJECT (reader, "Pad %" GST_PTR_FORMAT " flushing dataqueue",
          part_pad);
      gst_data_queue_set_flushing (part_pad->queue, TRUE);
      SPLITMUX_PART_BROADCAST (reader);
      break;
    case GST_EVENT_FLUSH_STOP:{
      gst_data_queue_set_flushing (part_pad->queue, FALSE);
      gst_data_queue_flush (part_pad->queue);
      part_pad->seen_buffer = FALSE;
      part_pad->flushing = FALSE;
      part_pad->is_eos = FALSE;

      reader->flushing = splitmux_is_flushing (reader);
      GST_LOG_OBJECT (reader,
          "%s pad %" GST_PTR_FORMAT " flush_stop. Overall flushing=%d",
          reader->path, pad, reader->flushing);
      SPLITMUX_PART_BROADCAST (reader);
      break;
    }
    default:
      break;
  }

  /* Don't send events downstream while preparing */
  if (reader->prep_state != PART_STATE_READY)
    goto drop_event;

  /* Don't pass flush events - those are done by the parent */
  if (GST_EVENT_TYPE (event) == GST_EVENT_FLUSH_START ||
      GST_EVENT_TYPE (event) == GST_EVENT_FLUSH_STOP)
    goto drop_event;

  if (!block_until_can_push (reader)) {
    SPLITMUX_PART_UNLOCK (reader);
    gst_object_unref (target);
    gst_event_unref (event);
    return FALSE;
  }

  switch (GST_EVENT_TYPE (event)) {
    case GST_EVENT_GAP:{
      /* FIXME: Drop initial gap (if any) in each segment, not all GAPs */
      goto drop_event;
    }
    default:
      break;
  }

  /* We are active, and one queue is empty, place this buffer in
   * the dataqueue */
  gst_object_ref (part_pad->queue);
  SPLITMUX_PART_UNLOCK (reader);

  GST_LOG_OBJECT (reader, "Enqueueing event %" GST_PTR_FORMAT, event);
  item = g_slice_new (GstDataQueueItem);
  item->destroy = (GDestroyNotify) splitmux_part_free_queue_item;
  item->object = GST_MINI_OBJECT (event);
  item->size = 0;
  item->duration = 0;
  if (item->duration == GST_CLOCK_TIME_NONE)
    item->duration = 0;
  item->visible = FALSE;

  if (!gst_data_queue_push (part_pad->queue, item)) {
    splitmux_part_free_queue_item (item);
    ret = FALSE;
  }

  gst_object_unref (part_pad->queue);
  gst_object_unref (target);

  return ret;
wrong_segment:
  gst_event_unref (event);
  gst_object_unref (target);
  SPLITMUX_PART_UNLOCK (reader);
  GST_ELEMENT_ERROR (reader, STREAM, FAILED, (NULL),
      ("Received non-time segment - reader %s pad %" GST_PTR_FORMAT,
          reader->path, pad));
  return FALSE;
drop_event:
  GST_LOG_OBJECT (pad, "Dropping event %" GST_PTR_FORMAT
      " from %" GST_PTR_FORMAT " on %" GST_PTR_FORMAT, event, pad, target);
  gst_event_unref (event);
  gst_object_unref (target);
  SPLITMUX_PART_UNLOCK (reader);
  return TRUE;
}
static GstFlowReturn
gst_flxdec_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
{
  GstByteReader reader;
  GstBuffer *input;
  GstMapInfo map_info;
  GstCaps *caps;
  guint available;
  GstFlowReturn res = GST_FLOW_OK;

  GstFlxDec *flxdec;
  FlxHeader *flxh;

  g_return_val_if_fail (buf != NULL, GST_FLOW_ERROR);
  flxdec = (GstFlxDec *) parent;
  g_return_val_if_fail (flxdec != NULL, GST_FLOW_ERROR);

  gst_adapter_push (flxdec->adapter, buf);
  available = gst_adapter_available (flxdec->adapter);
  input = gst_adapter_get_buffer (flxdec->adapter, available);
  if (!gst_buffer_map (input, &map_info, GST_MAP_READ)) {
    GST_ELEMENT_ERROR (flxdec, STREAM, DECODE,
        ("%s", "Failed to map buffer"), (NULL));
    goto error;
  }
  gst_byte_reader_init (&reader, map_info.data, map_info.size);

  if (flxdec->state == GST_FLXDEC_READ_HEADER) {
    if (available >= FlxHeaderSize) {
      GstByteReader header;
      GstCaps *templ;

      if (!gst_byte_reader_get_sub_reader (&reader, &header, FlxHeaderSize)) {
        GST_ELEMENT_ERROR (flxdec, STREAM, DECODE,
            ("%s", "Could not read header"), (NULL));
        goto unmap_input_error;
      }
      gst_adapter_flush (flxdec->adapter, FlxHeaderSize);
      available -= FlxHeaderSize;

      if (!_read_flx_header (flxdec, &header, &flxdec->hdr)) {
        GST_ELEMENT_ERROR (flxdec, STREAM, DECODE,
            ("%s", "Failed to parse header"), (NULL));
        goto unmap_input_error;
      }

      flxh = &flxdec->hdr;

      /* check header */
      if (flxh->type != FLX_MAGICHDR_FLI &&
          flxh->type != FLX_MAGICHDR_FLC && flxh->type != FLX_MAGICHDR_FLX) {
        GST_ELEMENT_ERROR (flxdec, STREAM, WRONG_TYPE, (NULL),
            ("not a flx file (type %x)", flxh->type));
        goto unmap_input_error;
      }

      GST_INFO_OBJECT (flxdec, "size      :  %d", flxh->size);
      GST_INFO_OBJECT (flxdec, "frames    :  %d", flxh->frames);
      GST_INFO_OBJECT (flxdec, "width     :  %d", flxh->width);
      GST_INFO_OBJECT (flxdec, "height    :  %d", flxh->height);
      GST_INFO_OBJECT (flxdec, "depth     :  %d", flxh->depth);
      GST_INFO_OBJECT (flxdec, "speed     :  %d", flxh->speed);

      flxdec->next_time = 0;

      if (flxh->type == FLX_MAGICHDR_FLI) {
        flxdec->frame_time = JIFFIE * flxh->speed;
      } else if (flxh->speed == 0) {
        flxdec->frame_time = GST_SECOND / 70;
      } else {
        flxdec->frame_time = flxh->speed * GST_MSECOND;
      }

      flxdec->duration = flxh->frames * flxdec->frame_time;
      GST_LOG ("duration   :  %" GST_TIME_FORMAT,
          GST_TIME_ARGS (flxdec->duration));

      templ = gst_pad_get_pad_template_caps (flxdec->srcpad);
      caps = gst_caps_copy (templ);
      gst_caps_unref (templ);
      gst_caps_set_simple (caps,
          "width", G_TYPE_INT, flxh->width,
          "height", G_TYPE_INT, flxh->height,
          "framerate", GST_TYPE_FRACTION, (gint) GST_MSECOND,
          (gint) flxdec->frame_time / 1000, NULL);

      gst_pad_set_caps (flxdec->srcpad, caps);
      gst_caps_unref (caps);

      if (flxdec->need_segment) {
        gst_pad_push_event (flxdec->srcpad,
            gst_event_new_segment (&flxdec->segment));
        flxdec->need_segment = FALSE;
      }

      /* zero means 8 */
      if (flxh->depth == 0)
        flxh->depth = 8;

      if (flxh->depth != 8) {
        GST_ELEMENT_ERROR (flxdec, STREAM, WRONG_TYPE,
            ("%s", "Don't know how to decode non 8 bit depth streams"), (NULL));
        goto unmap_input_error;
      }

      flxdec->converter =
          flx_colorspace_converter_new (flxh->width, flxh->height);

      if (flxh->type == FLX_MAGICHDR_FLC || flxh->type == FLX_MAGICHDR_FLX) {
        GST_INFO_OBJECT (flxdec, "(FLC) aspect_dx :  %d", flxh->aspect_dx);
        GST_INFO_OBJECT (flxdec, "(FLC) aspect_dy :  %d", flxh->aspect_dy);
        GST_INFO_OBJECT (flxdec, "(FLC) oframe1   :  0x%08x", flxh->oframe1);
        GST_INFO_OBJECT (flxdec, "(FLC) oframe2   :  0x%08x", flxh->oframe2);
      }

      flxdec->size = ((guint) flxh->width * (guint) flxh->height);
      if (flxdec->size >= G_MAXSIZE / 4) {
        GST_ELEMENT_ERROR (flxdec, STREAM, DECODE,
            ("%s", "Cannot allocate required memory"), (NULL));
        goto unmap_input_error;
      }

      /* create delta and output frame */
      flxdec->frame_data = g_malloc0 (flxdec->size);
      flxdec->delta_data = g_malloc0 (flxdec->size);

      flxdec->state = GST_FLXDEC_PLAYING;
    }
  } else if (flxdec->state == GST_FLXDEC_PLAYING) {
    GstBuffer *out;

    /* while we have enough data in the adapter */
    while (available >= FlxFrameChunkSize && res == GST_FLOW_OK) {
      guint32 size;
      guint16 type;

      if (!gst_byte_reader_get_uint32_le (&reader, &size))
        goto parse_error;
      if (available < size)
        goto need_more_data;

      available -= size;
      gst_adapter_flush (flxdec->adapter, size);

      if (!gst_byte_reader_get_uint16_le (&reader, &type))
        goto parse_error;

      switch (type) {
        case FLX_FRAME_TYPE:{
          GstByteReader chunks;
          GstByteWriter writer;
          guint16 n_chunks;
          GstMapInfo map;

          GST_LOG_OBJECT (flxdec, "Have frame type 0x%02x of size %d", type,
              size);

          if (!gst_byte_reader_get_sub_reader (&reader, &chunks,
                  size - FlxFrameChunkSize))
            goto parse_error;

          if (!gst_byte_reader_get_uint16_le (&chunks, &n_chunks))
            goto parse_error;
          GST_LOG_OBJECT (flxdec, "Have %d chunks", n_chunks);

          if (n_chunks == 0)
            break;
          if (!gst_byte_reader_skip (&chunks, 8))       /* reserved */
            goto parse_error;

          gst_byte_writer_init_with_data (&writer, flxdec->frame_data,
              flxdec->size, TRUE);

          /* decode chunks */
          if (!flx_decode_chunks (flxdec, n_chunks, &chunks, &writer)) {
            GST_ELEMENT_ERROR (flxdec, STREAM, DECODE,
                ("%s", "Could not decode chunk"), NULL);
            goto unmap_input_error;
          }
          gst_byte_writer_reset (&writer);

          /* save copy of the current frame for possible delta. */
          memcpy (flxdec->delta_data, flxdec->frame_data, flxdec->size);

          out = gst_buffer_new_and_alloc (flxdec->size * 4);
          if (!gst_buffer_map (out, &map, GST_MAP_WRITE)) {
            GST_ELEMENT_ERROR (flxdec, STREAM, DECODE,
                ("%s", "Could not map output buffer"), NULL);
            gst_buffer_unref (out);
            goto unmap_input_error;
          }

          /* convert current frame. */
          flx_colorspace_convert (flxdec->converter, flxdec->frame_data,
              map.data);
          gst_buffer_unmap (out, &map);

          GST_BUFFER_TIMESTAMP (out) = flxdec->next_time;
          flxdec->next_time += flxdec->frame_time;

          res = gst_pad_push (flxdec->srcpad, out);
          break;
        }
        default:
          GST_DEBUG_OBJECT (flxdec, "Unknown frame type 0x%02x, skipping %d",
              type, size);
          if (!gst_byte_reader_skip (&reader, size - FlxFrameChunkSize))
            goto parse_error;
          break;
      }
    }
  }

need_more_data:
  gst_buffer_unmap (input, &map_info);
  gst_buffer_unref (input);
  return res;

  /* ERRORS */
parse_error:
  GST_ELEMENT_ERROR (flxdec, STREAM, DECODE,
      ("%s", "Failed to parse stream"), (NULL));
unmap_input_error:
  gst_buffer_unmap (input, &map_info);
error:
  gst_buffer_unref (input);
  return GST_FLOW_ERROR;
}
Esempio n. 13
0
static gboolean
gst_raw_parse_handle_seek_pull (GstRawParse * rp, GstEvent * event)
{
  gdouble rate;
  GstFormat format;
  GstSeekFlags flags;
  GstSeekType start_type, stop_type;
  gint64 start, stop;
  gint64 last_stop;
  gboolean ret = FALSE;
  gboolean flush;
  GstSegment seeksegment;

  if (event) {
    gst_event_parse_seek (event, &rate, &format, &flags, &start_type, &start,
        &stop_type, &stop);

    /* convert input offsets to time */
    ret = gst_raw_parse_convert (rp, format, start, GST_FORMAT_TIME, &start);
    ret &= gst_raw_parse_convert (rp, format, stop, GST_FORMAT_TIME, &stop);
    if (!ret)
      goto convert_failed;

    GST_DEBUG_OBJECT (rp, "converted start - stop to time");

    format = GST_FORMAT_TIME;

    gst_event_unref (event);
  } else {
    format = GST_FORMAT_TIME;
    flags = 0;
  }

  flush = ((flags & GST_SEEK_FLAG_FLUSH) != 0);

  /* start flushing up and downstream so that the loop function pauses and we
   * can acquire the STREAM_LOCK. */
  if (flush) {
    GST_LOG_OBJECT (rp, "flushing");
    gst_pad_push_event (rp->sinkpad, gst_event_new_flush_start ());
    gst_pad_push_event (rp->srcpad, gst_event_new_flush_start ());
  } else {
    GST_LOG_OBJECT (rp, "pause task");
    gst_pad_pause_task (rp->sinkpad);
  }

  GST_PAD_STREAM_LOCK (rp->sinkpad);

  memcpy (&seeksegment, &rp->segment, sizeof (GstSegment));

  if (event) {
    /* configure the seek values */
    gst_segment_do_seek (&seeksegment, rate, format, flags,
        start_type, start, stop_type, stop, NULL);
  }

  /* get the desired position */
  last_stop = seeksegment.position;

  GST_LOG_OBJECT (rp, "seeking to %" GST_TIME_FORMAT,
      GST_TIME_ARGS (last_stop));

  /* convert the desired position to bytes */
  ret =
      gst_raw_parse_convert (rp, format, last_stop, GST_FORMAT_BYTES,
      &last_stop);

  /* prepare for streaming */
  if (flush) {
    GST_LOG_OBJECT (rp, "stop flush");
    gst_pad_push_event (rp->sinkpad, gst_event_new_flush_stop (TRUE));
    gst_pad_push_event (rp->srcpad, gst_event_new_flush_stop (TRUE));
  }

  if (ret) {
    /* seek done */

    /* Seek on a frame boundary */
    last_stop -= last_stop % rp->framesize;

    rp->offset = last_stop;
    rp->n_frames = last_stop / rp->framesize;

    GST_LOG_OBJECT (rp, "seeking to bytes %" G_GINT64_FORMAT, last_stop);

    memcpy (&rp->segment, &seeksegment, sizeof (GstSegment));

    if (rp->segment.flags & GST_SEEK_FLAG_SEGMENT) {
      gst_element_post_message (GST_ELEMENT_CAST (rp),
          gst_message_new_segment_start (GST_OBJECT_CAST (rp),
              rp->segment.format, rp->segment.position));
    }

    /* for deriving a stop position for the playback segment from the seek
     * segment, we must take the duration when the stop is not set */
    if ((stop = rp->segment.stop) == -1)
      stop = rp->segment.duration;

    GST_DEBUG_OBJECT (rp, "preparing newsegment from %" G_GINT64_FORMAT
        " to %" G_GINT64_FORMAT, rp->segment.start, stop);

    /* now replace the old segment so that we send it in the stream thread the
     * next time it is scheduled. */
    if (rp->start_segment)
      gst_event_unref (rp->start_segment);
    rp->start_segment = gst_event_new_segment (&rp->segment);
  }
  rp->discont = TRUE;

  GST_LOG_OBJECT (rp, "start streaming");
  gst_pad_start_task (rp->sinkpad, (GstTaskFunction) gst_raw_parse_loop, rp,
      NULL);

  GST_PAD_STREAM_UNLOCK (rp->sinkpad);

  return ret;

  /* ERRORS */
convert_failed:
  {
    GST_DEBUG_OBJECT (rp, "Seek failed: couldn't convert to byte positions");
    return FALSE;
  }
}
Esempio n. 14
0
static GstFlowReturn
gst_ssa_parse_chain (GstPad * sinkpad, GstObject * parent, GstBuffer * buf)
{
  GstFlowReturn ret;
  GstSsaParse *parse = GST_SSA_PARSE (parent);
  GstClockTime ts;
  gchar *txt;
  GstMapInfo map;

  if (G_UNLIKELY (!parse->framed))
    goto not_framed;

  if (G_UNLIKELY (parse->send_tags)) {
    GstTagList *tags;

    tags = gst_tag_list_new_empty ();
    gst_tag_list_add (tags, GST_TAG_MERGE_APPEND, GST_TAG_SUBTITLE_CODEC,
        "SubStation Alpha", NULL);
    gst_pad_push_event (parse->srcpad, gst_event_new_tag (tags));
    parse->send_tags = FALSE;
  }

  /* make double-sure it's 0-terminated and all */
  gst_buffer_map (buf, &map, GST_MAP_READ);
  txt = g_strndup ((gchar *) map.data, map.size);
  gst_buffer_unmap (buf, &map);

  if (txt == NULL)
    goto empty_text;

  ts = GST_BUFFER_TIMESTAMP (buf);
  ret = gst_ssa_parse_push_line (parse, txt, ts, GST_BUFFER_DURATION (buf));

  if (ret != GST_FLOW_OK && GST_CLOCK_TIME_IS_VALID (ts)) {
    GstSegment segment;

    /* just advance time without sending anything */
    gst_segment_init (&segment, GST_FORMAT_TIME);
    segment.start = ts;
    segment.time = ts;
    gst_pad_push_event (parse->srcpad, gst_event_new_segment (&segment));
    ret = GST_FLOW_OK;
  }

  gst_buffer_unref (buf);
  g_free (txt);

  return ret;

/* ERRORS */
not_framed:
  {
    GST_ELEMENT_ERROR (parse, STREAM, FORMAT, (NULL),
        ("Only SSA subtitles embedded in containers are supported"));
    gst_buffer_unref (buf);
    return GST_FLOW_NOT_NEGOTIATED;
  }
empty_text:
  {
    GST_ELEMENT_WARNING (parse, STREAM, FORMAT, (NULL),
        ("Received empty subtitle"));
    gst_buffer_unref (buf);
    return GST_FLOW_OK;
  }
}
Esempio n. 15
0
static gboolean
gst_video_rate_sink_event (GstBaseTransform * trans, GstEvent * event)
{
  GstVideoRate *videorate;

  videorate = GST_VIDEO_RATE (trans);

  switch (GST_EVENT_TYPE (event)) {
    case GST_EVENT_SEGMENT:
    {
      GstSegment segment;

      gst_event_copy_segment (event, &segment);

      if (segment.format != GST_FORMAT_TIME)
        goto format_error;

      GST_DEBUG_OBJECT (videorate, "handle NEWSEGMENT");

      /* close up the previous segment, if appropriate */
      if (videorate->prevbuf) {
        gint count = 0;
        GstFlowReturn res;

        res = GST_FLOW_OK;
        /* fill up to the end of current segment,
         * or only send out the stored buffer if there is no specific stop.
         * regardless, prevent going loopy in strange cases */
        while (res == GST_FLOW_OK && count <= MAGIC_LIMIT &&
            ((GST_CLOCK_TIME_IS_VALID (videorate->segment.stop) &&
                    videorate->next_ts - videorate->segment.base
                    < videorate->segment.stop)
                || count < 1)) {
          res = gst_video_rate_flush_prev (videorate, count > 0);
          count++;
        }
        if (count > 1) {
          videorate->dup += count - 1;
          if (!videorate->silent)
            gst_video_rate_notify_duplicate (videorate);
        } else if (count == 0) {
          videorate->drop++;
          if (!videorate->silent)
            gst_video_rate_notify_drop (videorate);
        }
        /* clean up for the new one; _chain will resume from the new start */
        gst_video_rate_swap_prev (videorate, NULL, 0);
      }

      videorate->base_ts = 0;
      videorate->out_frame_count = 0;
      videorate->next_ts = GST_CLOCK_TIME_NONE;

      /* We just want to update the accumulated stream_time  */

      segment.start = (gint64) (segment.start / videorate->rate);
      segment.position = (gint64) (segment.position / videorate->rate);
      if (GST_CLOCK_TIME_IS_VALID (segment.stop))
        segment.stop = (gint64) (segment.stop / videorate->rate);
      segment.time = (gint64) (segment.time / videorate->rate);

      gst_segment_copy_into (&segment, &videorate->segment);
      GST_DEBUG_OBJECT (videorate, "updated segment: %" GST_SEGMENT_FORMAT,
          &videorate->segment);

      gst_event_unref (event);
      event = gst_event_new_segment (&segment);

      break;
    }
    case GST_EVENT_EOS:{
      gint count = 0;
      GstFlowReturn res = GST_FLOW_OK;

      GST_DEBUG_OBJECT (videorate, "Got EOS");

      /* If the segment has a stop position, fill the segment */
      if (GST_CLOCK_TIME_IS_VALID (videorate->segment.stop)) {
        /* fill up to the end of current segment,
         * or only send out the stored buffer if there is no specific stop.
         * regardless, prevent going loopy in strange cases */
        while (res == GST_FLOW_OK && count <= MAGIC_LIMIT &&
            ((videorate->next_ts - videorate->segment.base <
                    videorate->segment.stop)
                || count < 1)) {
          res = gst_video_rate_flush_prev (videorate, count > 0);
          count++;
        }
      } else if (videorate->prevbuf) {
        /* Output at least one frame but if the buffer duration is valid, output
         * enough frames to use the complete buffer duration */
        if (GST_BUFFER_DURATION_IS_VALID (videorate->prevbuf)) {
          GstClockTime end_ts =
              videorate->next_ts + GST_BUFFER_DURATION (videorate->prevbuf);

          while (res == GST_FLOW_OK && count <= MAGIC_LIMIT &&
              ((videorate->next_ts - videorate->segment.base < end_ts)
                  || count < 1)) {
            res = gst_video_rate_flush_prev (videorate, count > 0);
            count++;
          }
        } else {
          res = gst_video_rate_flush_prev (videorate, FALSE);
          count = 1;
        }
      }

      if (count > 1) {
        videorate->dup += count - 1;
        if (!videorate->silent)
          gst_video_rate_notify_duplicate (videorate);
      } else if (count == 0) {
        videorate->drop++;
        if (!videorate->silent)
          gst_video_rate_notify_drop (videorate);
      }

      break;
    }
    case GST_EVENT_FLUSH_STOP:
      /* also resets the segment */
      GST_DEBUG_OBJECT (videorate, "Got FLUSH_STOP");
      gst_video_rate_reset (videorate);
      break;
    default:
      break;
  }

  return GST_BASE_TRANSFORM_CLASS (parent_class)->sink_event (trans, event);

  /* ERRORS */
format_error:
  {
    GST_WARNING_OBJECT (videorate,
        "Got segment but doesn't have GST_FORMAT_TIME value");
    return FALSE;
  }
}
static void
gst_image_freeze_src_loop (GstPad * pad)
{
    GstImageFreeze *self = GST_IMAGE_FREEZE (GST_PAD_PARENT (pad));
    GstBuffer *buffer;
    guint64 offset;
    GstClockTime timestamp, timestamp_end;
    guint64 cstart, cstop;
    gboolean in_seg, eos;
    GstFlowReturn flow_ret = GST_FLOW_OK;

    g_mutex_lock (&self->lock);
    if (!gst_pad_has_current_caps (self->srcpad)) {
        GST_ERROR_OBJECT (pad, "Not negotiated yet");
        flow_ret = GST_FLOW_NOT_NEGOTIATED;
        g_mutex_unlock (&self->lock);
        goto pause_task;
    }

    if (!self->buffer) {
        GST_ERROR_OBJECT (pad, "Have no buffer yet");
        flow_ret = GST_FLOW_ERROR;
        g_mutex_unlock (&self->lock);
        goto pause_task;
    }
    buffer = gst_buffer_ref (self->buffer);
    buffer = gst_buffer_make_writable (buffer);
    g_mutex_unlock (&self->lock);

    if (self->need_segment) {
        GstEvent *e;

        GST_DEBUG_OBJECT (pad, "Pushing SEGMENT event: %" GST_SEGMENT_FORMAT,
                          &self->segment);
        e = gst_event_new_segment (&self->segment);

        g_mutex_lock (&self->lock);
        if (self->segment.rate >= 0) {
            self->offset =
                gst_util_uint64_scale (self->segment.start, self->fps_n,
                                       self->fps_d * GST_SECOND);
        } else {
            self->offset =
                gst_util_uint64_scale (self->segment.stop, self->fps_n,
                                       self->fps_d * GST_SECOND);
        }
        g_mutex_unlock (&self->lock);

        self->need_segment = FALSE;

        gst_pad_push_event (self->srcpad, e);
    }

    g_mutex_lock (&self->lock);
    offset = self->offset;

    if (self->fps_n != 0) {
        timestamp =
            gst_util_uint64_scale (offset, self->fps_d * GST_SECOND, self->fps_n);
        timestamp_end =
            gst_util_uint64_scale (offset + 1, self->fps_d * GST_SECOND,
                                   self->fps_n);
    } else {
        timestamp = self->segment.start;
        timestamp_end = GST_CLOCK_TIME_NONE;
    }

    eos = (self->fps_n == 0 && offset > 0) ||
          (self->segment.rate >= 0 && self->segment.stop != -1
           && timestamp > self->segment.stop) || (self->segment.rate < 0
                   && offset == 0) || (self->segment.rate < 0
                                       && self->segment.start != -1 && timestamp_end < self->segment.start);

    if (self->fps_n == 0 && offset > 0)
        in_seg = FALSE;
    else
        in_seg =
            gst_segment_clip (&self->segment, GST_FORMAT_TIME, timestamp,
                              timestamp_end, &cstart, &cstop);

    if (in_seg) {
        self->segment.position = cstart;
        if (self->segment.rate >= 0)
            self->segment.position = cstop;
    }

    if (self->segment.rate >= 0)
        self->offset++;
    else
        self->offset--;
    g_mutex_unlock (&self->lock);

    GST_DEBUG_OBJECT (pad, "Handling buffer with timestamp %" GST_TIME_FORMAT,
                      GST_TIME_ARGS (timestamp));

    if (in_seg) {
        GST_BUFFER_DTS (buffer) = GST_CLOCK_TIME_NONE;
        GST_BUFFER_PTS (buffer) = cstart;
        GST_BUFFER_DURATION (buffer) = cstop - cstart;
        GST_BUFFER_OFFSET (buffer) = offset;
        GST_BUFFER_OFFSET_END (buffer) = offset + 1;
        flow_ret = gst_pad_push (self->srcpad, buffer);
        GST_DEBUG_OBJECT (pad, "Pushing buffer resulted in %s",
                          gst_flow_get_name (flow_ret));
        if (flow_ret != GST_FLOW_OK)
            goto pause_task;
    } else {
        gst_buffer_unref (buffer);
    }

    if (eos) {
        flow_ret = GST_FLOW_EOS;
        goto pause_task;
    }

    return;

pause_task:
    {
        const gchar *reason = gst_flow_get_name (flow_ret);

        GST_LOG_OBJECT (self, "pausing task, reason %s", reason);
        gst_pad_pause_task (pad);

        if (flow_ret == GST_FLOW_EOS) {
            if ((self->segment.flags & GST_SEEK_FLAG_SEGMENT)) {
                GstMessage *m;
                GstEvent *e;

                GST_DEBUG_OBJECT (pad, "Sending segment done at end of segment");
                if (self->segment.rate >= 0) {
                    m = gst_message_new_segment_done (GST_OBJECT_CAST (self),
                                                      GST_FORMAT_TIME, self->segment.stop);
                    e = gst_event_new_segment_done (GST_FORMAT_TIME, self->segment.stop);
                } else {
                    m = gst_message_new_segment_done (GST_OBJECT_CAST (self),
                                                      GST_FORMAT_TIME, self->segment.start);
                    e = gst_event_new_segment_done (GST_FORMAT_TIME, self->segment.start);
                }
                gst_element_post_message (GST_ELEMENT_CAST (self), m);
                gst_pad_push_event (self->srcpad, e);
            } else {
                GST_DEBUG_OBJECT (pad, "Sending EOS at end of segment");
                gst_pad_push_event (self->srcpad, gst_event_new_eos ());
            }
        } else if (flow_ret == GST_FLOW_NOT_LINKED || flow_ret < GST_FLOW_EOS) {
            GST_ELEMENT_ERROR (self, STREAM, FAILED,
                               ("Internal data stream error."),
                               ("stream stopped, reason %s", reason));
            gst_pad_push_event (self->srcpad, gst_event_new_eos ());
        }
        return;
    }
}
static GstFlowReturn
gst_dvdec_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
{
  GstDVDec *dvdec;
  guint8 *inframe;
  guint8 *outframe_ptrs[3];
  gint outframe_pitches[3];
  GstMapInfo map;
  GstVideoFrame frame;
  GstBuffer *outbuf;
  GstFlowReturn ret = GST_FLOW_OK;
  guint length;
  guint64 cstart = GST_CLOCK_TIME_NONE, cstop = GST_CLOCK_TIME_NONE;
  gboolean PAL, wide;

  dvdec = GST_DVDEC (parent);

  gst_buffer_map (buf, &map, GST_MAP_READ);
  inframe = map.data;

  /* buffer should be at least the size of one NTSC frame, this should
   * be enough to decode the header. */
  if (G_UNLIKELY (map.size < NTSC_BUFFER))
    goto wrong_size;

  /* preliminary dropping. unref and return if outside of configured segment */
  if ((dvdec->segment.format == GST_FORMAT_TIME) &&
      (!(gst_segment_clip (&dvdec->segment, GST_FORMAT_TIME,
                  GST_BUFFER_TIMESTAMP (buf),
                  GST_BUFFER_TIMESTAMP (buf) + GST_BUFFER_DURATION (buf),
                  &cstart, &cstop))))
    goto dropping;

  if (G_UNLIKELY (dv_parse_header (dvdec->decoder, inframe) < 0))
    goto parse_header_error;

  /* get size */
  PAL = dv_system_50_fields (dvdec->decoder);
  wide = dv_format_wide (dvdec->decoder);

  /* check the buffer is of right size after we know if we are
   * dealing with PAL or NTSC */
  length = (PAL ? PAL_BUFFER : NTSC_BUFFER);
  if (G_UNLIKELY (map.size < length))
    goto wrong_size;

  dv_parse_packs (dvdec->decoder, inframe);

  if (dvdec->video_offset % dvdec->drop_factor != 0)
    goto skip;

  /* renegotiate on change */
  if (PAL != dvdec->PAL || wide != dvdec->wide) {
    dvdec->src_negotiated = FALSE;
    dvdec->PAL = PAL;
    dvdec->wide = wide;
  }

  dvdec->height = (dvdec->PAL ? PAL_HEIGHT : NTSC_HEIGHT);

  dvdec->interlaced = !dv_is_progressive (dvdec->decoder);

  /* negotiate if not done yet */
  if (!dvdec->src_negotiated) {
    if (!gst_dvdec_src_negotiate (dvdec))
      goto not_negotiated;
  }

  if (gst_pad_check_reconfigure (dvdec->srcpad)) {
    GstCaps *caps;

    caps = gst_pad_get_current_caps (dvdec->srcpad);
    if (!caps)
      goto flushing;

    gst_dvdec_negotiate_pool (dvdec, caps, &dvdec->vinfo);
    gst_caps_unref (caps);
  }

  if (dvdec->need_segment) {
    gst_pad_push_event (dvdec->srcpad, gst_event_new_segment (&dvdec->segment));
    dvdec->need_segment = FALSE;
  }

  ret = gst_buffer_pool_acquire_buffer (dvdec->pool, &outbuf, NULL);
  if (G_UNLIKELY (ret != GST_FLOW_OK))
    goto no_buffer;

  gst_video_frame_map (&frame, &dvdec->vinfo, outbuf, GST_MAP_WRITE);

  outframe_ptrs[0] = GST_VIDEO_FRAME_COMP_DATA (&frame, 0);
  outframe_pitches[0] = GST_VIDEO_FRAME_COMP_STRIDE (&frame, 0);

  /* the rest only matters for YUY2 */
  if (dvdec->bpp < 3) {
    outframe_ptrs[1] = GST_VIDEO_FRAME_COMP_DATA (&frame, 1);
    outframe_ptrs[2] = GST_VIDEO_FRAME_COMP_DATA (&frame, 2);

    outframe_pitches[1] = GST_VIDEO_FRAME_COMP_STRIDE (&frame, 1);
    outframe_pitches[2] = GST_VIDEO_FRAME_COMP_STRIDE (&frame, 2);
  }

  GST_DEBUG_OBJECT (dvdec, "decoding and pushing buffer");
  dv_decode_full_frame (dvdec->decoder, inframe,
      e_dv_color_yuv, outframe_ptrs, outframe_pitches);

  gst_video_frame_unmap (&frame);

  GST_BUFFER_FLAG_UNSET (outbuf, GST_VIDEO_BUFFER_FLAG_TFF);

  GST_BUFFER_OFFSET (outbuf) = GST_BUFFER_OFFSET (buf);
  GST_BUFFER_OFFSET_END (outbuf) = GST_BUFFER_OFFSET_END (buf);

  /* FIXME : Compute values when using non-TIME segments,
   * but for the moment make sure we at least don't set bogus values
   */
  if (GST_CLOCK_TIME_IS_VALID (cstart)) {
    GST_BUFFER_TIMESTAMP (outbuf) = cstart;
    if (GST_CLOCK_TIME_IS_VALID (cstop))
      GST_BUFFER_DURATION (outbuf) = cstop - cstart;
  }

  ret = gst_pad_push (dvdec->srcpad, outbuf);

skip:
  dvdec->video_offset++;

done:
  gst_buffer_unmap (buf, &map);
  gst_buffer_unref (buf);

  return ret;

  /* ERRORS */
wrong_size:
  {
    GST_ELEMENT_ERROR (dvdec, STREAM, DECODE,
        (NULL), ("Input buffer too small"));
    ret = GST_FLOW_ERROR;
    goto done;
  }
parse_header_error:
  {
    GST_ELEMENT_ERROR (dvdec, STREAM, DECODE,
        (NULL), ("Error parsing DV header"));
    ret = GST_FLOW_ERROR;
    goto done;
  }
not_negotiated:
  {
    GST_DEBUG_OBJECT (dvdec, "could not negotiate output");
    if (GST_PAD_IS_FLUSHING (dvdec->srcpad))
      ret = GST_FLOW_FLUSHING;
    else
      ret = GST_FLOW_NOT_NEGOTIATED;
    goto done;
  }
flushing:
  {
    GST_DEBUG_OBJECT (dvdec, "have no current caps");
    ret = GST_FLOW_FLUSHING;
    goto done;
  }
no_buffer:
  {
    GST_DEBUG_OBJECT (dvdec, "could not allocate buffer");
    goto done;
  }

dropping:
  {
    GST_DEBUG_OBJECT (dvdec,
        "dropping buffer since it's out of the configured segment");
    goto done;
  }
}
Esempio n. 18
0
static GstFlowReturn process_audio_packet(MpegTSDemuxer *demuxer, AVPacket *packet)
{
    GstFlowReturn result = GST_FLOW_OK;
    Stream *stream = &demuxer->audio;

    if (!same_stream(demuxer, stream, packet))
        return result;

    GstBuffer *buffer = NULL;
    GstEvent *newsegment_event = NULL;
    void *buffer_data = av_mallocz(packet->size);

    if (buffer_data != NULL)
    {
        memcpy(buffer_data, packet->data, packet->size);
        buffer = gst_buffer_new_wrapped_full(0, buffer_data, packet->size, 0, packet->size, buffer_data, &av_free);

        if (packet->pts != AV_NOPTS_VALUE)
        {
            if (demuxer->base_pts == GST_CLOCK_TIME_NONE)
            {
                demuxer->base_pts = PTS_TO_GSTTIME(packet->pts) + stream->offset_time;
            }

            gint64 time = PTS_TO_GSTTIME(packet->pts) + stream->offset_time - demuxer->base_pts;
            if (time < 0)
                time = 0;

            if (stream->last_time > 0 && time < (gint64) (stream->last_time - PTS_TO_GSTTIME(G_MAXUINT32)))
            {
                stream->offset_time += PTS_TO_GSTTIME(MAX_PTS + 1); // Wraparound occured
                time = PTS_TO_GSTTIME(packet->pts) + stream->offset_time;
#ifdef VERBOSE_DEBUG_AUDIO
                g_print("[Audio wraparound] updating offset_time to %lld\n", stream->offset_time);
#endif
            }

#ifdef VERBOSE_DEBUG_AUDIO
            g_print("[Audio]: pts=%lld(%.4f) time=%lld (%.4f) offset_time=%lld last_time=%lld\n",
                    PTS_TO_GSTTIME(packet->pts), (double) PTS_TO_GSTTIME(packet->pts) / GST_SECOND,
                    time, (double) time / GST_SECOND, stream->offset_time, stream->last_time);
#endif

            stream->last_time = time;
            GST_BUFFER_TIMESTAMP(buffer) = time;
        }

        if (packet->duration != 0)
            GST_BUFFER_DURATION(buffer) = PTS_TO_GSTTIME(packet->duration);

        g_mutex_lock(&demuxer->lock);
        stream->segment.position = GST_BUFFER_TIMESTAMP(buffer);

        if (stream->discont)
        {
            GstSegment newsegment;
            gst_segment_init(&newsegment, GST_FORMAT_TIME);
            newsegment.flags = stream->segment.flags;
            newsegment.rate = stream->segment.rate;
            newsegment.start = stream->segment.time;
            newsegment.stop = stream->segment.stop;
            newsegment.time = stream->segment.time;
            newsegment.position = stream->segment.position;
            newsegment_event = gst_event_new_segment(&newsegment);

            GST_BUFFER_FLAG_SET(buffer, GST_BUFFER_FLAG_DISCONT);
            stream->discont = FALSE;

#ifdef DEBUG_OUTPUT
            g_print("MpegTS: [Audio] NEWSEGMENT: last_stop = %.4f\n", (double) stream->segment.last_stop / GST_SECOND);
#endif
        }
        g_mutex_unlock(&demuxer->lock);
    } else
        result = GST_FLOW_ERROR;

    if (newsegment_event)
        result = gst_pad_push_event(stream->sourcepad, newsegment_event) ? GST_FLOW_OK : GST_FLOW_FLUSHING;

    if (result == GST_FLOW_OK)
        result = gst_pad_push(stream->sourcepad, buffer);
    else
        gst_buffer_unref(buffer);

#ifdef VERBOSE_DEBUG_AUDIO
    if (result != GST_FLOW_OK)
        g_print("MpegTS: Audio push failed: %s\n", gst_flow_get_name(result));
#endif
    return result;
}
Esempio n. 19
0
static gboolean
gst_gme_dec_src_event (GstPad * pad, GstObject * parent, GstEvent * event)
{
  GstGmeDec *gme = GST_GME_DEC (parent);
  gboolean result = FALSE;

  switch (GST_EVENT_TYPE (event)) {
    case GST_EVENT_SEEK:
    {
      gdouble rate;
      GstFormat format;
      GstSeekFlags flags;
      GstSeekType start_type, stop_type;
      gint64 start, stop;
      gboolean flush;

      gst_event_parse_seek (event, &rate, &format, &flags, &start_type, &start,
          &stop_type, &stop);

      gst_event_unref (event);

      if (format != GST_FORMAT_TIME) {
        GST_DEBUG_OBJECT (gme, "seeking is only supported in TIME format");
        break;
      }

      if (start_type != GST_SEEK_TYPE_SET || stop_type != GST_SEEK_TYPE_NONE) {
        GST_DEBUG_OBJECT (gme, "unsupported seek type");
        break;
      }

      if (stop_type == GST_SEEK_TYPE_NONE)
        stop = GST_CLOCK_TIME_NONE;

      if (start_type == GST_SEEK_TYPE_SET) {
        GstSegment seg;
        guint64 cur = gme_tell (gme->player) * GST_MSECOND;
        guint64 dest = (guint64) start;

        if (gme->total_duration != GST_CLOCK_TIME_NONE)
          dest = CLAMP (dest, 0, gme->total_duration);
        else
          dest = MAX (0, dest);

        if (dest == cur)
          break;

        flush = (flags & GST_SEEK_FLAG_FLUSH) == GST_SEEK_FLAG_FLUSH;

        if (flush) {
          gst_pad_push_event (gme->srcpad, gst_event_new_flush_start ());
        } else {
          gst_pad_stop_task (gme->srcpad);
        }

        GST_PAD_STREAM_LOCK (gme->srcpad);

        if (flags & GST_SEEK_FLAG_SEGMENT) {
          gst_element_post_message (GST_ELEMENT (gme),
              gst_message_new_segment_start (GST_OBJECT (gme), format, cur));
        }

        if (flush) {
          gst_pad_push_event (gme->srcpad, gst_event_new_flush_stop (TRUE));
        }

        if (stop == GST_CLOCK_TIME_NONE
            && gme->total_duration != GST_CLOCK_TIME_NONE)
          stop = gme->total_duration;

        gst_segment_init (&seg, GST_FORMAT_TIME);
        seg.rate = rate;
        seg.start = dest;
        seg.stop = stop;
        seg.time = dest;
        gst_pad_push_event (gme->srcpad, gst_event_new_segment (&seg));

        gme->seekpoint = dest / GST_MSECOND;    /* nsecs to msecs */
        gme->seeking = TRUE;

        gst_pad_start_task (gme->srcpad, (GstTaskFunction) gst_gme_play,
            gme->srcpad, NULL);

        GST_PAD_STREAM_UNLOCK (gme->srcpad);
        result = TRUE;
      }
      break;
    }
    default:
      result = gst_pad_push_event (gme->sinkpad, event);
      break;
  }

  return result;
}
Esempio n. 20
0
static void
_audiodecoder_flush_events (gboolean send_buffers)
{
  GstSegment segment;
  GstBuffer *buffer;
  guint64 i;
  GList *events_iter;
  GstMessage *msg;

  setup_audiodecodertester ();

  gst_pad_set_active (mysrcpad, TRUE);
  gst_element_set_state (dec, GST_STATE_PLAYING);
  gst_pad_set_active (mysinkpad, TRUE);

  send_startup_events ();

  /* push a new segment */
  gst_segment_init (&segment, GST_FORMAT_TIME);
  fail_unless (gst_pad_push_event (mysrcpad, gst_event_new_segment (&segment)));

  if (send_buffers) {
    /* push buffers, the data is actually a number so we can track them */
    for (i = 0; i < NUM_BUFFERS; i++) {
      if (i % 10 == 0) {
        GstTagList *tags;

        tags = gst_tag_list_new (GST_TAG_TRACK_NUMBER, i, NULL);
        fail_unless (gst_pad_push_event (mysrcpad, gst_event_new_tag (tags)));
      } else {
        buffer = create_test_buffer (i);

        fail_unless (gst_pad_push (mysrcpad, buffer) == GST_FLOW_OK);
      }
    }
  } else {
    /* push sticky event */
    GstTagList *tags;
    tags = gst_tag_list_new (GST_TAG_TRACK_NUMBER, 0, NULL);
    fail_unless (gst_pad_push_event (mysrcpad, gst_event_new_tag (tags)));
  }

  msg =
      gst_message_new_element (GST_OBJECT (mysrcpad),
      gst_structure_new_empty ("test"));
  fail_unless (gst_pad_push_event (mysrcpad,
      gst_event_new_sink_message ("test", msg)));
  gst_message_unref (msg);

  fail_unless (gst_pad_push_event (mysrcpad, gst_event_new_eos ()));

  events_iter = events;
  /* make sure the usual events have been received */
  {
    GstEvent *sstart = events_iter->data;
    fail_unless (GST_EVENT_TYPE (sstart) == GST_EVENT_STREAM_START);
    events_iter = g_list_next (events_iter);
  }
  if (send_buffers) {
    {
      GstEvent *caps_event = events_iter->data;
      fail_unless (GST_EVENT_TYPE (caps_event) == GST_EVENT_CAPS);
      events_iter = g_list_next (events_iter);
    }
    {
      GstEvent *segment_event = events_iter->data;
      fail_unless (GST_EVENT_TYPE (segment_event) == GST_EVENT_SEGMENT);
      events_iter = g_list_next (events_iter);
    }
    for (int i=0; i< NUM_BUFFERS / 10; i++)
    {
      GstEvent *tag_event = events_iter->data;
      fail_unless (GST_EVENT_TYPE (tag_event) == GST_EVENT_TAG);
      events_iter = g_list_next (events_iter);
    }
  }
  {
    GstEvent *eos_event = events_iter->data;
    fail_unless (GST_EVENT_TYPE (eos_event) == GST_EVENT_EOS);
    events_iter = g_list_next (events_iter);
  }

  /* check that EOS was received */
  fail_unless (GST_PAD_IS_EOS (mysrcpad));
  fail_unless (gst_pad_push_event (mysrcpad, gst_event_new_flush_start ()));
  fail_unless (GST_PAD_IS_EOS (mysrcpad));

  /* Check that we have tags */
  {
    GstEvent *tags = gst_pad_get_sticky_event (mysrcpad, GST_EVENT_TAG, 0);

    fail_unless (tags != NULL);
    gst_event_unref (tags);
  }

  /* Check that we still have a segment set */
  {
    GstEvent *segment =
        gst_pad_get_sticky_event (mysrcpad, GST_EVENT_SEGMENT, 0);

    fail_unless (segment != NULL);
    gst_event_unref (segment);
  }

  fail_unless (gst_pad_push_event (mysrcpad, gst_event_new_flush_stop (TRUE)));
  fail_if (GST_PAD_IS_EOS (mysrcpad));

  /* Check that the segment was flushed on FLUSH_STOP */
  {
    GstEvent *segment =
        gst_pad_get_sticky_event (mysrcpad, GST_EVENT_SEGMENT, 0);

    fail_unless (segment == NULL);
  }

  /* Check the tags were not lost on FLUSH_STOP */
  {
    GstEvent *tags = gst_pad_get_sticky_event (mysrcpad, GST_EVENT_TAG, 0);

    fail_unless (tags != NULL);
    gst_event_unref (tags);

  }

  g_list_free_full (buffers, (GDestroyNotify) gst_buffer_unref);
  buffers = NULL;

  gst_element_set_state (dec, GST_STATE_NULL);
  cleanup_audiodecodertest ();
}
Esempio n. 21
0
static GstFlowReturn
gst_mim_dec_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
{
  GstMimDec *mimdec = GST_MIM_DEC (parent);
  GstBuffer *out_buf;
  const guchar *header, *frame_body;
  guint32 fourcc;
  guint16 header_size;
  gint width, height;
  GstCaps *caps;
  GstFlowReturn res = GST_FLOW_OK;
  GstClockTime in_time = GST_BUFFER_TIMESTAMP (buf);
  GstEvent *event = NULL;
  gboolean result = TRUE;
  guint32 payload_size;
  guint32 current_ts;
  GstMapInfo map;

  gst_adapter_push (mimdec->adapter, buf);


  /* do we have enough bytes to read a header */
  while (gst_adapter_available (mimdec->adapter) >= 24) {
    header = gst_adapter_map (mimdec->adapter, 24);
    header_size = header[0];
    if (header_size != 24) {
      gst_adapter_unmap (mimdec->adapter);
      gst_adapter_flush (mimdec->adapter, 24);
      GST_ELEMENT_ERROR (mimdec, STREAM, DECODE, (NULL),
          ("invalid frame: header size %d incorrect", header_size));
      return GST_FLOW_ERROR;
    }

    if (header[1] == 1) {
      /* This is a a paused frame, skip it */
      gst_adapter_unmap (mimdec->adapter);
      gst_adapter_flush (mimdec->adapter, 24);
      continue;
    }

    fourcc = GUINT32_FROM_LE (*((guint32 *) (header + 12)));
    if (GST_MAKE_FOURCC ('M', 'L', '2', '0') != fourcc) {
      gst_adapter_unmap (mimdec->adapter);
      gst_adapter_flush (mimdec->adapter, 24);
      GST_ELEMENT_ERROR (mimdec, STREAM, WRONG_TYPE, (NULL),
          ("invalid frame: unknown FOURCC code 0x%" G_GINT32_MODIFIER "x",
              fourcc));
      return GST_FLOW_ERROR;
    }

    payload_size = GUINT32_FROM_LE (*((guint32 *) (header + 8)));

    current_ts = GUINT32_FROM_LE (*((guint32 *) (header + 20)));

    gst_adapter_unmap (mimdec->adapter);

    GST_LOG_OBJECT (mimdec, "Got packet, payload size %d", payload_size);

    if (gst_adapter_available (mimdec->adapter) < payload_size + 24)
      return GST_FLOW_OK;

    /* We have a whole packet and have read the header, lets flush it out */
    gst_adapter_flush (mimdec->adapter, 24);

    frame_body = gst_adapter_map (mimdec->adapter, payload_size);

    if (mimdec->buffer_size < 0) {
      /* Check if its a keyframe, otherwise skip it */
      if (GUINT32_FROM_LE (*((guint32 *) (frame_body + 12))) != 0) {
        gst_adapter_unmap (mimdec->adapter);
        gst_adapter_flush (mimdec->adapter, payload_size);
        return GST_FLOW_OK;
      }

      if (!mimic_decoder_init (mimdec->dec, frame_body)) {
        gst_adapter_unmap (mimdec->adapter);
        gst_adapter_flush (mimdec->adapter, payload_size);
        GST_ELEMENT_ERROR (mimdec, LIBRARY, INIT, (NULL),
            ("mimic_decoder_init error"));
        return GST_FLOW_ERROR;
      }

      if (!mimic_get_property (mimdec->dec, "buffer_size",
              &mimdec->buffer_size)) {
        gst_adapter_unmap (mimdec->adapter);
        gst_adapter_flush (mimdec->adapter, payload_size);
        GST_ELEMENT_ERROR (mimdec, LIBRARY, INIT, (NULL),
            ("mimic_get_property('buffer_size') error"));
        return GST_FLOW_ERROR;
      }

      mimic_get_property (mimdec->dec, "width", &width);
      mimic_get_property (mimdec->dec, "height", &height);
      GST_DEBUG_OBJECT (mimdec,
          "Initialised decoder with %d x %d payload size %d buffer_size %d",
          width, height, payload_size, mimdec->buffer_size);
      caps = gst_caps_new_simple ("video/x-raw",
          "format", G_TYPE_STRING, "RGB",
          "framerate", GST_TYPE_FRACTION, 0, 1,
          "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, NULL);
      gst_pad_set_caps (mimdec->srcpad, caps);
      gst_caps_unref (caps);
    }


    if (mimdec->need_segment) {
      GstSegment segment;

      gst_segment_init (&segment, GST_FORMAT_TIME);

      if (GST_CLOCK_TIME_IS_VALID (in_time))
        segment.start = in_time;
      else
        segment.start = current_ts * GST_MSECOND;
      event = gst_event_new_segment (&segment);
    }
    mimdec->need_segment = FALSE;

    if (event)
      result = gst_pad_push_event (mimdec->srcpad, event);
    event = NULL;

    if (!result) {
      GST_WARNING_OBJECT (mimdec, "gst_pad_push_event failed");
      return GST_FLOW_ERROR;
    }


    out_buf = gst_buffer_new_allocate (NULL, mimdec->buffer_size, NULL);
    gst_buffer_map (out_buf, &map, GST_MAP_READWRITE);

    if (!mimic_decode_frame (mimdec->dec, frame_body, map.data)) {
      GST_WARNING_OBJECT (mimdec, "mimic_decode_frame error\n");

      gst_adapter_flush (mimdec->adapter, payload_size);

      gst_buffer_unmap (out_buf, &map);
      gst_buffer_unref (out_buf);
      GST_ELEMENT_ERROR (mimdec, STREAM, DECODE, (NULL),
          ("mimic_decode_frame error"));
      return GST_FLOW_ERROR;
    }
    gst_buffer_unmap (out_buf, &map);
    gst_adapter_flush (mimdec->adapter, payload_size);

    if (GST_CLOCK_TIME_IS_VALID (in_time))
      GST_BUFFER_TIMESTAMP (out_buf) = in_time;
    else
      GST_BUFFER_TIMESTAMP (out_buf) = current_ts * GST_MSECOND;

    res = gst_pad_push (mimdec->srcpad, out_buf);

    if (res != GST_FLOW_OK)
      break;
  }

  return res;
}
/* Pop a buffer from the cache and push it downstream.
 * This functions returns the result of the push. */
static GstFlowReturn
gst_ts_shifter_pop (GstTSShifter * ts)
{
  GstFlowReturn ret = GST_FLOW_OK;
  GstBuffer *buffer;

  if (!(buffer = gst_ts_cache_pop (ts->cache, ts->is_eos)))
    goto no_item;

  if (ts->srcresult == GST_FLOW_FLUSHING) {
    gst_buffer_unref (buffer);
    goto out_flushing;
  }

  if (ts->stream_start_event) {
    if (!gst_pad_push_event (ts->srcpad, ts->stream_start_event)) {
      goto stream_start_failed;
    }
    ts->stream_start_event = NULL;
  }

  if (G_UNLIKELY (ts->need_newsegment)) {
    GstEvent *newsegment;

    ts->segment.start = GST_BUFFER_OFFSET (buffer);
    ts->segment.time = 0;       /* <- Not relevant for FORMAT_BYTES */
    ts->segment.flags |= GST_SEGMENT_FLAG_RESET;

    GST_DEBUG_OBJECT (ts, "pushing segment %" GST_SEGMENT_FORMAT, &ts->segment);

    newsegment = gst_event_new_segment (&ts->segment);
    if (newsegment) {
      if (!gst_pad_push_event (ts->srcpad, newsegment)) {
        goto segment_failed;
      }
    }
    ts->need_newsegment = FALSE;
  }
  ts->cur_bytes = GST_BUFFER_OFFSET_END (buffer);
  FLOW_MUTEX_UNLOCK (ts);

  GST_CAT_LOG_OBJECT (ts_flow, ts,
      "pushing buffer %p of size %d, offset %" G_GUINT64_FORMAT,
      buffer, gst_buffer_get_size (buffer), GST_BUFFER_OFFSET (buffer));

  ret = gst_pad_push (ts->srcpad, buffer);

  /* need to check for srcresult here as well */
  FLOW_MUTEX_LOCK_CHECK (ts, ts->srcresult, out_flushing);
  if (ret == GST_FLOW_EOS) {
    GST_CAT_LOG_OBJECT (ts_flow, ts, "got GST_FLOW_EOS from downstream");
    /* stop pushing buffers, we pop all buffers until we see an item that we
     * can push again, which is EOS or NEWSEGMENT. If there is nothing in the
     * cache we can push, we set a flag to make the sinkpad refuse more
     * buffers with an EOS return value until we receive something
     * pushable again or we get flushed. */
    while ((buffer = gst_ts_cache_pop (ts->cache, ts->is_eos))) {
      GST_CAT_LOG_OBJECT (ts_flow, ts, "dropping UNEXPECTED buffer %p", buffer);
      gst_buffer_unref (buffer);
    }
    /* no more items in the cache. Set the unexpected flag so that upstream
     * make us refuse any more buffers on the sinkpad. Since we will still
     * accept EOS and NEWSEGMENT we return GST_FLOW_OK to the caller
     * so that the task function does not shut down. */
    ts->unexpected = TRUE;
    ret = GST_FLOW_OK;
  }

  return ret;

  /* ERRORS */
no_item:
  {
    if (ts->is_eos) {
      GST_CAT_LOG_OBJECT (ts_flow, ts, "pushing EOS");
      gst_pad_push_event (ts->srcpad, gst_event_new_eos ());
      gst_pad_pause_task (ts->srcpad);
      GST_CAT_LOG_OBJECT (ts_flow, ts, "pause task, reason: EOS");
      return GST_FLOW_OK;
    } else {
      GST_CAT_LOG_OBJECT (ts_flow, ts, "exit because we have no item");
      return GST_FLOW_ERROR;
    }
  }
out_flushing:
  {
    GST_CAT_LOG_OBJECT (ts_flow, ts, "exit because we are flushing");
    return GST_FLOW_FLUSHING;
  }
segment_failed:
  {
    GST_CAT_LOG_OBJECT (ts_flow, ts, "push of SEGMENT event failed");
    return GST_FLOW_FLUSHING;
  }
stream_start_failed:
  {
    ts->stream_start_event = NULL;
    GST_CAT_LOG_OBJECT (ts_flow, ts, "push of STREAM_START event failed");
    return GST_FLOW_FLUSHING;
  }
}
/* sinkpad functions */
static gboolean
gst_stream_synchronizer_sink_event (GstPad * pad, GstObject * parent,
    GstEvent * event)
{
  GstStreamSynchronizer *self = GST_STREAM_SYNCHRONIZER (parent);
  gboolean ret = FALSE;

  GST_LOG_OBJECT (pad, "Handling event %s: %" GST_PTR_FORMAT,
      GST_EVENT_TYPE_NAME (event), event);

  switch (GST_EVENT_TYPE (event)) {
    case GST_EVENT_STREAM_START:
    {
      GstSyncStream *stream, *ostream;
      guint32 seqnum = gst_event_get_seqnum (event);
      guint group_id;
      gboolean have_group_id;
      GList *l;
      gboolean all_wait = TRUE;
      gboolean new_stream = TRUE;

      have_group_id = gst_event_parse_group_id (event, &group_id);

      GST_STREAM_SYNCHRONIZER_LOCK (self);
      self->have_group_id &= have_group_id;
      have_group_id = self->have_group_id;

      stream = gst_pad_get_element_private (pad);

      if (!stream) {
        GST_DEBUG_OBJECT (self, "No stream or STREAM_START from same source");
        GST_STREAM_SYNCHRONIZER_UNLOCK (self);
        break;
      }

      gst_event_parse_stream_flags (event, &stream->flags);

      if ((have_group_id && stream->group_id != group_id) || (!have_group_id
              && stream->stream_start_seqnum != seqnum)) {
        stream->is_eos = FALSE;
        stream->eos_sent = FALSE;
        stream->flushing = FALSE;
        stream->stream_start_seqnum = seqnum;
        stream->group_id = group_id;

        if (!have_group_id) {
          /* Check if this belongs to a stream that is already there,
           * e.g. we got the visualizations for an audio stream */
          for (l = self->streams; l; l = l->next) {
            ostream = l->data;

            if (ostream != stream && ostream->stream_start_seqnum == seqnum
                && !ostream->wait) {
              new_stream = FALSE;
              break;
            }
          }

          if (!new_stream) {
            GST_DEBUG_OBJECT (pad,
                "Stream %d belongs to running stream %d, no waiting",
                stream->stream_number, ostream->stream_number);
            stream->wait = FALSE;

            GST_STREAM_SYNCHRONIZER_UNLOCK (self);
            break;
          }
        } else if (group_id == self->group_id) {
          GST_DEBUG_OBJECT (pad, "Stream %d belongs to running group %d, "
              "no waiting", stream->stream_number, group_id);
          GST_STREAM_SYNCHRONIZER_UNLOCK (self);
          break;
        }

        GST_DEBUG_OBJECT (pad, "Stream %d changed", stream->stream_number);

        stream->wait = TRUE;

        for (l = self->streams; l; l = l->next) {
          GstSyncStream *ostream = l->data;

          all_wait = all_wait && ((ostream->flags & GST_STREAM_FLAG_SPARSE)
              || (ostream->wait && (!have_group_id
                      || ostream->group_id == group_id)));
          if (!all_wait)
            break;
        }

        if (all_wait) {
          gint64 position = 0;

          if (have_group_id)
            GST_DEBUG_OBJECT (self,
                "All streams have changed to group id %u -- unblocking",
                group_id);
          else
            GST_DEBUG_OBJECT (self, "All streams have changed -- unblocking");

          self->group_id = group_id;

          for (l = self->streams; l; l = l->next) {
            GstSyncStream *ostream = l->data;
            gint64 stop_running_time;
            gint64 position_running_time;

            ostream->wait = FALSE;

            if (ostream->segment.format == GST_FORMAT_TIME) {
              if (ostream->segment.rate > 0)
                stop_running_time =
                    gst_segment_to_running_time (&ostream->segment,
                    GST_FORMAT_TIME, ostream->segment.stop);
              else
                stop_running_time =
                    gst_segment_to_running_time (&ostream->segment,
                    GST_FORMAT_TIME, ostream->segment.start);

              position_running_time =
                  gst_segment_to_running_time (&ostream->segment,
                  GST_FORMAT_TIME, ostream->segment.position);

              position_running_time =
                  MAX (position_running_time, stop_running_time);

              if (ostream->segment.rate > 0)
                position_running_time -=
                    gst_segment_to_running_time (&ostream->segment,
                    GST_FORMAT_TIME, ostream->segment.start);
              else
                position_running_time -=
                    gst_segment_to_running_time (&ostream->segment,
                    GST_FORMAT_TIME, ostream->segment.stop);

              position_running_time = MAX (0, position_running_time);

              position = MAX (position, position_running_time);
            }
          }

          self->group_start_time += position;

          GST_DEBUG_OBJECT (self, "New group start time: %" GST_TIME_FORMAT,
              GST_TIME_ARGS (self->group_start_time));

          for (l = self->streams; l; l = l->next) {
            GstSyncStream *ostream = l->data;
            ostream->wait = FALSE;
            g_cond_broadcast (&ostream->stream_finish_cond);
          }
        }
      }

      GST_STREAM_SYNCHRONIZER_UNLOCK (self);
      break;
    }
    case GST_EVENT_SEGMENT:{
      GstSyncStream *stream;
      GstSegment segment;

      gst_event_copy_segment (event, &segment);

      GST_STREAM_SYNCHRONIZER_LOCK (self);

      gst_stream_synchronizer_wait (self, pad);

      if (self->shutdown) {
        GST_STREAM_SYNCHRONIZER_UNLOCK (self);
        gst_event_unref (event);
        goto done;
      }

      stream = gst_pad_get_element_private (pad);
      if (stream && segment.format == GST_FORMAT_TIME) {
        GST_DEBUG_OBJECT (pad,
            "New stream, updating base from %" GST_TIME_FORMAT " to %"
            GST_TIME_FORMAT, GST_TIME_ARGS (segment.base),
            GST_TIME_ARGS (segment.base + self->group_start_time));
        segment.base += self->group_start_time;

        GST_DEBUG_OBJECT (pad, "Segment was: %" GST_SEGMENT_FORMAT,
            &stream->segment);
        gst_segment_copy_into (&segment, &stream->segment);
        GST_DEBUG_OBJECT (pad, "Segment now is: %" GST_SEGMENT_FORMAT,
            &stream->segment);
        stream->segment_seqnum = gst_event_get_seqnum (event);

        GST_DEBUG_OBJECT (pad, "Stream start running time: %" GST_TIME_FORMAT,
            GST_TIME_ARGS (stream->segment.base));
        {
          GstEvent *tmpev;

          tmpev = gst_event_new_segment (&stream->segment);
          gst_event_set_seqnum (tmpev, stream->segment_seqnum);
          gst_event_unref (event);
          event = tmpev;
        }
      } else if (stream) {
        GST_WARNING_OBJECT (pad, "Non-TIME segment: %s",
            gst_format_get_name (segment.format));
        gst_segment_init (&stream->segment, GST_FORMAT_UNDEFINED);
      }
      GST_STREAM_SYNCHRONIZER_UNLOCK (self);
      break;
    }
    case GST_EVENT_FLUSH_START:{
      GstSyncStream *stream;

      GST_STREAM_SYNCHRONIZER_LOCK (self);
      stream = gst_pad_get_element_private (pad);
      self->eos = FALSE;
      if (stream) {
        GST_DEBUG_OBJECT (pad, "Flushing streams");
        stream->flushing = TRUE;
        g_cond_broadcast (&stream->stream_finish_cond);
      }
      GST_STREAM_SYNCHRONIZER_UNLOCK (self);
      break;
    }
    case GST_EVENT_FLUSH_STOP:{
      GstSyncStream *stream;
      GList *l;
      GstClockTime new_group_start_time = 0;

      GST_STREAM_SYNCHRONIZER_LOCK (self);
      stream = gst_pad_get_element_private (pad);
      if (stream) {
        GST_DEBUG_OBJECT (pad, "Resetting segment for stream %d",
            stream->stream_number);
        gst_segment_init (&stream->segment, GST_FORMAT_UNDEFINED);

        stream->is_eos = FALSE;
        stream->eos_sent = FALSE;
        stream->flushing = FALSE;
        stream->wait = FALSE;
        g_cond_broadcast (&stream->stream_finish_cond);
      }

      for (l = self->streams; l; l = l->next) {
        GstSyncStream *ostream = l->data;
        GstClockTime start_running_time;

        if (ostream == stream || ostream->flushing)
          continue;

        if (ostream->segment.format == GST_FORMAT_TIME) {
          if (ostream->segment.rate > 0)
            start_running_time =
                gst_segment_to_running_time (&ostream->segment,
                GST_FORMAT_TIME, ostream->segment.start);
          else
            start_running_time =
                gst_segment_to_running_time (&ostream->segment,
                GST_FORMAT_TIME, ostream->segment.stop);

          new_group_start_time = MAX (new_group_start_time, start_running_time);
        }
      }

      GST_DEBUG_OBJECT (pad,
          "Updating group start time from %" GST_TIME_FORMAT " to %"
          GST_TIME_FORMAT, GST_TIME_ARGS (self->group_start_time),
          GST_TIME_ARGS (new_group_start_time));
      self->group_start_time = new_group_start_time;
      GST_STREAM_SYNCHRONIZER_UNLOCK (self);
      break;
    }
      /* unblocking EOS wait when track switch. */
    case GST_EVENT_CUSTOM_DOWNSTREAM_OOB:{
      if (gst_event_has_name (event, "playsink-custom-video-flush")
          || gst_event_has_name (event, "playsink-custom-audio-flush")
          || gst_event_has_name (event, "playsink-custom-subtitle-flush")) {
        GstSyncStream *stream;

        GST_STREAM_SYNCHRONIZER_LOCK (self);
        stream = gst_pad_get_element_private (pad);
        if (stream) {
          stream->is_eos = FALSE;
          stream->eos_sent = FALSE;
          stream->wait = FALSE;
          g_cond_broadcast (&stream->stream_finish_cond);
        }
        GST_STREAM_SYNCHRONIZER_UNLOCK (self);
      }
      break;
    }
    case GST_EVENT_EOS:{
      GstSyncStream *stream;
      GList *l;
      gboolean all_eos = TRUE;
      gboolean seen_data;
      GSList *pads = NULL;
      GstPad *srcpad;
      GstClockTime timestamp;

      GST_STREAM_SYNCHRONIZER_LOCK (self);
      stream = gst_pad_get_element_private (pad);
      if (!stream) {
        GST_STREAM_SYNCHRONIZER_UNLOCK (self);
        GST_WARNING_OBJECT (pad, "EOS for unknown stream");
        break;
      }

      GST_DEBUG_OBJECT (pad, "Have EOS for stream %d", stream->stream_number);
      stream->is_eos = TRUE;

      seen_data = stream->seen_data;
      srcpad = gst_object_ref (stream->srcpad);

      if (seen_data && stream->segment.position != -1)
        timestamp = stream->segment.position;
      else if (stream->segment.rate < 0.0 || stream->segment.stop == -1)
        timestamp = stream->segment.start;
      else
        timestamp = stream->segment.stop;

      stream->segment.position = timestamp;

      for (l = self->streams; l; l = l->next) {
        GstSyncStream *ostream = l->data;

        all_eos = all_eos && ostream->is_eos;
        if (!all_eos)
          break;
      }

      if (all_eos) {
        GST_DEBUG_OBJECT (self, "All streams are EOS -- forwarding");
        self->eos = TRUE;
        for (l = self->streams; l; l = l->next) {
          GstSyncStream *ostream = l->data;
          /* local snapshot of current pads */
          gst_object_ref (ostream->srcpad);
          pads = g_slist_prepend (pads, ostream->srcpad);
        }
      }
      if (pads) {
        GstPad *pad;
        GSList *epad;
        GstSyncStream *ostream;

        ret = TRUE;
        epad = pads;
        while (epad) {
          pad = epad->data;
          ostream = gst_pad_get_element_private (pad);
          if (ostream) {
            g_cond_broadcast (&ostream->stream_finish_cond);
          }

          gst_object_unref (pad);
          epad = g_slist_next (epad);
        }
        g_slist_free (pads);
      } else {
        if (seen_data) {
          self->send_gap_event = TRUE;
          stream->gap_duration = GST_CLOCK_TIME_NONE;
          stream->wait = TRUE;
          ret = gst_stream_synchronizer_wait (self, srcpad);
        }
      }

      /* send eos if haven't seen data. seen_data will be true if data buffer
       * of the track have received in anytime. sink is ready if seen_data is
       * true, so can send GAP event. Will send EOS if sink isn't ready. The
       * scenario for the case is one track haven't any media data and then
       * send EOS. Or no any valid media data in one track, so decoder can't
       * get valid CAPS for the track. sink can't ready without received CAPS.*/
      if (!seen_data || self->eos) {
        GST_DEBUG_OBJECT (pad, "send EOS event");
        /* drop lock when sending eos, which may block in e.g. preroll */
        GST_STREAM_SYNCHRONIZER_UNLOCK (self);
        ret = gst_pad_push_event (srcpad, gst_event_new_eos ());
        GST_STREAM_SYNCHRONIZER_LOCK (self);
        stream = gst_pad_get_element_private (pad);
        if (stream) {
          stream->eos_sent = TRUE;
        }
      }

      gst_object_unref (srcpad);
      gst_event_unref (event);
      GST_STREAM_SYNCHRONIZER_UNLOCK (self);
      goto done;
    }
    default:
      break;
  }

  ret = gst_pad_event_default (pad, parent, event);

done:

  return ret;
}
Esempio n. 24
0
static GstFlowReturn
gst_ffmpegmux_collected (GstCollectPads * pads, gpointer user_data)
{
  GstFFMpegMux *ffmpegmux = (GstFFMpegMux *) user_data;
  GSList *collected;
  GstFFMpegMuxPad *best_pad;
  GstClockTime best_time;
#if 0
  /* Re-enable once converted to new AVMetaData API
   * See #566605
   */
  const GstTagList *tags;
#endif

  /* open "file" (gstreamer protocol to next element) */
  if (!ffmpegmux->opened) {
    int open_flags = AVIO_FLAG_WRITE;

    /* we do need all streams to have started capsnego,
     * or things will go horribly wrong */
    for (collected = ffmpegmux->collect->data; collected;
        collected = g_slist_next (collected)) {
      GstFFMpegMuxPad *collect_pad = (GstFFMpegMuxPad *) collected->data;
      AVStream *st = ffmpegmux->context->streams[collect_pad->padnum];

      /* check whether the pad has successfully completed capsnego */
      if (st->codec->codec_id == AV_CODEC_ID_NONE) {
        GST_ELEMENT_ERROR (ffmpegmux, CORE, NEGOTIATION, (NULL),
            ("no caps set on stream %d (%s)", collect_pad->padnum,
                (st->codec->codec_type == AVMEDIA_TYPE_VIDEO) ?
                "video" : "audio"));
        return GST_FLOW_ERROR;
      }
      /* set framerate for audio */
      if (st->codec->codec_type == AVMEDIA_TYPE_AUDIO) {
        switch (st->codec->codec_id) {
          case AV_CODEC_ID_PCM_S16LE:
          case AV_CODEC_ID_PCM_S16BE:
          case AV_CODEC_ID_PCM_U16LE:
          case AV_CODEC_ID_PCM_U16BE:
          case AV_CODEC_ID_PCM_S8:
          case AV_CODEC_ID_PCM_U8:
            st->codec->frame_size = 1;
            break;
          default:
          {
            GstBuffer *buffer;

            /* FIXME : This doesn't work for RAW AUDIO...
             * in fact I'm wondering if it even works for any kind of audio... */
            buffer = gst_collect_pads_peek (ffmpegmux->collect,
                (GstCollectData *) collect_pad);
            if (buffer) {
              st->codec->frame_size =
                  st->codec->sample_rate *
                  GST_BUFFER_DURATION (buffer) / GST_SECOND;
              gst_buffer_unref (buffer);
            }
          }
        }
      }
    }

#if 0
    /* Re-enable once converted to new AVMetaData API
     * See #566605
     */

    /* tags */
    tags = gst_tag_setter_get_tag_list (GST_TAG_SETTER (ffmpegmux));
    if (tags) {
      gint i;
      gchar *s;

      /* get the interesting ones */
      if (gst_tag_list_get_string (tags, GST_TAG_TITLE, &s)) {
        strncpy (ffmpegmux->context->title, s,
            sizeof (ffmpegmux->context->title));
      }
      if (gst_tag_list_get_string (tags, GST_TAG_ARTIST, &s)) {
        strncpy (ffmpegmux->context->author, s,
            sizeof (ffmpegmux->context->author));
      }
      if (gst_tag_list_get_string (tags, GST_TAG_COPYRIGHT, &s)) {
        strncpy (ffmpegmux->context->copyright, s,
            sizeof (ffmpegmux->context->copyright));
      }
      if (gst_tag_list_get_string (tags, GST_TAG_COMMENT, &s)) {
        strncpy (ffmpegmux->context->comment, s,
            sizeof (ffmpegmux->context->comment));
      }
      if (gst_tag_list_get_string (tags, GST_TAG_ALBUM, &s)) {
        strncpy (ffmpegmux->context->album, s,
            sizeof (ffmpegmux->context->album));
      }
      if (gst_tag_list_get_string (tags, GST_TAG_GENRE, &s)) {
        strncpy (ffmpegmux->context->genre, s,
            sizeof (ffmpegmux->context->genre));
      }
      if (gst_tag_list_get_int (tags, GST_TAG_TRACK_NUMBER, &i)) {
        ffmpegmux->context->track = i;
      }
    }
#endif

    /* set the streamheader flag for gstffmpegprotocol if codec supports it */
    if (!strcmp (ffmpegmux->context->oformat->name, "flv")) {
      open_flags |= GST_FFMPEG_URL_STREAMHEADER;
    }

    /* some house-keeping for downstream before starting data flow */
    /* stream-start (FIXME: create id based on input ids) */
    {
      gchar s_id[32];

      g_snprintf (s_id, sizeof (s_id), "avmux-%08x", g_random_int ());
      gst_pad_push_event (ffmpegmux->srcpad, gst_event_new_stream_start (s_id));
    }
    /* segment */
    {
      GstSegment segment;

      /* let downstream know we think in BYTES and expect to do seeking later on */
      gst_segment_init (&segment, GST_FORMAT_BYTES);
      gst_pad_push_event (ffmpegmux->srcpad, gst_event_new_segment (&segment));
    }

    if (gst_ffmpegdata_open (ffmpegmux->srcpad, open_flags,
            &ffmpegmux->context->pb) < 0) {
      GST_ELEMENT_ERROR (ffmpegmux, LIBRARY, TOO_LAZY, (NULL),
          ("Failed to open stream context in avmux"));
      return GST_FLOW_ERROR;
    }

    /* now open the mux format */
    if (avformat_write_header (ffmpegmux->context, NULL) < 0) {
      GST_ELEMENT_ERROR (ffmpegmux, LIBRARY, SETTINGS, (NULL),
          ("Failed to write file header - check codec settings"));
      return GST_FLOW_ERROR;
    }

    /* we're now opened */
    ffmpegmux->opened = TRUE;

    /* flush the header so it will be used as streamheader */
    avio_flush (ffmpegmux->context->pb);
  }

  /* take the one with earliest timestamp,
   * and push it forward */
  best_pad = NULL;
  best_time = GST_CLOCK_TIME_NONE;
  for (collected = ffmpegmux->collect->data; collected;
      collected = g_slist_next (collected)) {
    GstFFMpegMuxPad *collect_pad = (GstFFMpegMuxPad *) collected->data;
    GstBuffer *buffer = gst_collect_pads_peek (ffmpegmux->collect,
        (GstCollectData *) collect_pad);

    /* if there's no buffer, just continue */
    if (buffer == NULL) {
      continue;
    }

    /* if we have no buffer yet, just use the first one */
    if (best_pad == NULL) {
      best_pad = collect_pad;
      best_time = GST_BUFFER_TIMESTAMP (buffer);
      goto next_pad;
    }

    /* if we do have one, only use this one if it's older */
    if (GST_BUFFER_TIMESTAMP (buffer) < best_time) {
      best_time = GST_BUFFER_TIMESTAMP (buffer);
      best_pad = collect_pad;
    }

  next_pad:
    gst_buffer_unref (buffer);

    /* Mux buffers with invalid timestamp first */
    if (!GST_CLOCK_TIME_IS_VALID (best_time))
      break;
  }

  /* now handle the buffer, or signal EOS if we have
   * no buffers left */
  if (best_pad != NULL) {
    GstBuffer *buf;
    AVPacket pkt;
    gboolean need_free = FALSE;
    GstMapInfo map;

    /* push out current buffer */
    buf =
        gst_collect_pads_pop (ffmpegmux->collect, (GstCollectData *) best_pad);

    ffmpegmux->context->streams[best_pad->padnum]->codec->frame_number++;

    /* set time */
    pkt.pts = gst_ffmpeg_time_gst_to_ff (GST_BUFFER_TIMESTAMP (buf),
        ffmpegmux->context->streams[best_pad->padnum]->time_base);
    pkt.dts = pkt.pts;

    if (strcmp (ffmpegmux->context->oformat->name, "gif") == 0) {
      AVStream *st = ffmpegmux->context->streams[best_pad->padnum];
      AVPicture src, dst;

      need_free = TRUE;
      pkt.size = st->codec->width * st->codec->height * 3;
      pkt.data = g_malloc (pkt.size);

      dst.data[0] = pkt.data;
      dst.data[1] = NULL;
      dst.data[2] = NULL;
      dst.linesize[0] = st->codec->width * 3;

      gst_buffer_map (buf, &map, GST_MAP_READ);
      gst_ffmpeg_avpicture_fill (&src, map.data,
          AV_PIX_FMT_RGB24, st->codec->width, st->codec->height);

      av_picture_copy (&dst, &src, AV_PIX_FMT_RGB24,
          st->codec->width, st->codec->height);
      gst_buffer_unmap (buf, &map);
    } else {
      gst_buffer_map (buf, &map, GST_MAP_READ);
      pkt.data = map.data;
      pkt.size = map.size;
    }

    pkt.stream_index = best_pad->padnum;
    pkt.flags = 0;

    if (!GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT))
      pkt.flags |= AV_PKT_FLAG_KEY;

    if (GST_BUFFER_DURATION_IS_VALID (buf))
      pkt.duration =
          gst_ffmpeg_time_gst_to_ff (GST_BUFFER_DURATION (buf),
          ffmpegmux->context->streams[best_pad->padnum]->time_base);
    else
      pkt.duration = 0;
    av_write_frame (ffmpegmux->context, &pkt);
    if (need_free) {
      g_free (pkt.data);
    } else {
      gst_buffer_unmap (buf, &map);
    }
    gst_buffer_unref (buf);
  } else {
    /* close down */
    av_write_trailer (ffmpegmux->context);
    ffmpegmux->opened = FALSE;
    avio_flush (ffmpegmux->context->pb);
    gst_ffmpegdata_close (ffmpegmux->context->pb);
    gst_pad_push_event (ffmpegmux->srcpad, gst_event_new_eos ());
    return GST_FLOW_EOS;
  }

  return GST_FLOW_OK;
}
Esempio n. 25
0
static GstFlowReturn
gst_interleave_collected (GstCollectPads * pads, GstInterleave * self)
{
  guint size;
  GstBuffer *outbuf = NULL;
  GstFlowReturn ret = GST_FLOW_OK;
  GSList *collected;
  guint nsamples;
  guint ncollected = 0;
  gboolean empty = TRUE;
  gint width = self->width / 8;
  GstMapInfo write_info;
  GstClockTime timestamp = -1;

  size = gst_collect_pads_available (pads);
  if (size == 0)
    goto eos;

  g_return_val_if_fail (self->func != NULL, GST_FLOW_NOT_NEGOTIATED);
  g_return_val_if_fail (self->width > 0, GST_FLOW_NOT_NEGOTIATED);
  g_return_val_if_fail (self->channels > 0, GST_FLOW_NOT_NEGOTIATED);
  g_return_val_if_fail (self->rate > 0, GST_FLOW_NOT_NEGOTIATED);

  g_return_val_if_fail (size % width == 0, GST_FLOW_ERROR);

  GST_DEBUG_OBJECT (self, "Starting to collect %u bytes from %d channels", size,
      self->channels);

  nsamples = size / width;

  outbuf = gst_buffer_new_allocate (NULL, size * self->channels, NULL);

  if (outbuf == NULL || gst_buffer_get_size (outbuf) < size * self->channels) {
    gst_buffer_unref (outbuf);
    return GST_FLOW_NOT_NEGOTIATED;
  }

  gst_buffer_map (outbuf, &write_info, GST_MAP_WRITE);
  memset (write_info.data, 0, size * self->channels);

  for (collected = pads->data; collected != NULL; collected = collected->next) {
    GstCollectData *cdata;
    GstBuffer *inbuf;
    guint8 *outdata;
    GstMapInfo input_info;
    gint channel;

    cdata = (GstCollectData *) collected->data;

    inbuf = gst_collect_pads_take_buffer (pads, cdata, size);
    if (inbuf == NULL) {
      GST_DEBUG_OBJECT (cdata->pad, "No buffer available");
      goto next;
    }
    ncollected++;

    if (timestamp == -1)
      timestamp = GST_BUFFER_TIMESTAMP (inbuf);

    if (GST_BUFFER_FLAG_IS_SET (inbuf, GST_BUFFER_FLAG_GAP))
      goto next;

    empty = FALSE;
    channel = GST_INTERLEAVE_PAD_CAST (cdata->pad)->channel;
    if (self->channels <= 64 && self->channel_mask) {
      channel = self->default_channels_ordering_map[channel];
    }
    outdata = write_info.data + width * channel;

    gst_buffer_map (inbuf, &input_info, GST_MAP_READ);
    self->func (outdata, input_info.data, self->channels, nsamples);
    gst_buffer_unmap (inbuf, &input_info);

  next:
    if (inbuf)
      gst_buffer_unref (inbuf);
  }

  if (ncollected == 0) {
    gst_buffer_unmap (outbuf, &write_info);
    goto eos;
  }

  GST_OBJECT_LOCK (self);
  if (self->pending_segment) {
    GstEvent *event;
    GstSegment segment;

    event = self->pending_segment;
    self->pending_segment = NULL;
    GST_OBJECT_UNLOCK (self);

    /* convert the input segment to time now */
    gst_event_copy_segment (event, &segment);

    if (segment.format != GST_FORMAT_TIME) {
      gst_event_unref (event);

      /* not time, convert */
      switch (segment.format) {
        case GST_FORMAT_BYTES:
          segment.start *= width;
          if (segment.stop != -1)
            segment.stop *= width;
          if (segment.position != -1)
            segment.position *= width;
          /* fallthrough for the samples case */
        case GST_FORMAT_DEFAULT:
          segment.start =
              gst_util_uint64_scale_int (segment.start, GST_SECOND, self->rate);
          if (segment.stop != -1)
            segment.stop =
                gst_util_uint64_scale_int (segment.stop, GST_SECOND,
                self->rate);
          if (segment.position != -1)
            segment.position =
                gst_util_uint64_scale_int (segment.position, GST_SECOND,
                self->rate);
          break;
        default:
          GST_WARNING ("can't convert segment values");
          segment.start = 0;
          segment.stop = -1;
          segment.position = 0;
          break;
      }
      event = gst_event_new_segment (&segment);
    }
    gst_pad_push_event (self->src, event);

    GST_OBJECT_LOCK (self);
  }
  GST_OBJECT_UNLOCK (self);

  if (timestamp != -1) {
    self->offset = gst_util_uint64_scale_int (timestamp, self->rate,
        GST_SECOND);
    self->timestamp = timestamp;
  }

  GST_BUFFER_TIMESTAMP (outbuf) = self->timestamp;
  GST_BUFFER_OFFSET (outbuf) = self->offset;

  self->offset += nsamples;
  self->timestamp = gst_util_uint64_scale_int (self->offset,
      GST_SECOND, self->rate);

  GST_BUFFER_DURATION (outbuf) =
      self->timestamp - GST_BUFFER_TIMESTAMP (outbuf);

  if (empty)
    GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_GAP);

  gst_buffer_unmap (outbuf, &write_info);

  GST_LOG_OBJECT (self, "pushing outbuf, timestamp %" GST_TIME_FORMAT,
      GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)));
  ret = gst_pad_push (self->src, outbuf);

  return ret;

eos:
  {
    GST_DEBUG_OBJECT (self, "no data available, must be EOS");
    if (outbuf)
      gst_buffer_unref (outbuf);
    gst_pad_push_event (self->src, gst_event_new_eos ());
    return GST_FLOW_EOS;
  }
}
Esempio n. 26
0
static GstEvent *
create_segment_event (GstRTPBaseDepayload * filter, guint rtptime,
    GstClockTime position)
{
  GstEvent *event;
  GstClockTime start, stop, running_time;
  GstRTPBaseDepayloadPrivate *priv;
  GstSegment segment;

  priv = filter->priv;

  /* We don't need the object lock around - the segment
   * can't change here while we're holding the STREAM_LOCK
   */

  /* determining the start of the segment */
  start = filter->segment.start;
  if (priv->clock_base != -1 && position != -1) {
    GstClockTime exttime, gap;

    exttime = priv->clock_base;
    gst_rtp_buffer_ext_timestamp (&exttime, rtptime);
    gap = gst_util_uint64_scale_int (exttime - priv->clock_base,
        filter->clock_rate, GST_SECOND);

    /* account for lost packets */
    if (position > gap) {
      GST_DEBUG_OBJECT (filter,
          "Found gap of %" GST_TIME_FORMAT ", adjusting start: %"
          GST_TIME_FORMAT " = %" GST_TIME_FORMAT " - %" GST_TIME_FORMAT,
          GST_TIME_ARGS (gap), GST_TIME_ARGS (position - gap),
          GST_TIME_ARGS (position), GST_TIME_ARGS (gap));
      start = position - gap;
    }
  }

  /* determining the stop of the segment */
  stop = filter->segment.stop;
  if (priv->npt_stop != -1)
    stop = start + (priv->npt_stop - priv->npt_start);

  if (position == -1)
    position = start;

  running_time = gst_segment_to_running_time (&filter->segment,
      GST_FORMAT_TIME, start);

  gst_segment_init (&segment, GST_FORMAT_TIME);
  segment.rate = priv->play_speed;
  segment.applied_rate = priv->play_scale;
  segment.start = start;
  segment.stop = stop;
  segment.time = priv->npt_start;
  segment.position = position;
  segment.base = running_time;

  GST_DEBUG_OBJECT (filter, "Creating segment event %" GST_SEGMENT_FORMAT,
      &segment);
  event = gst_event_new_segment (&segment);

  return event;
}
static void src_task_loop(GstPad *pad)
{
    GstErDtlsEnc *self = GST_ER_DTLS_ENC(GST_PAD_PARENT(pad));
    GstFlowReturn ret;
    GstPad *peer;
    gboolean peer_is_active;

    if (!gst_pad_is_active(pad)) {
        GST_LOG_OBJECT(self, "src task loop entered on inactive pad");
        return;
    }

    GST_TRACE_OBJECT(self, "src loop: acquiring lock");
    g_mutex_lock(&self->queue_lock);
    GST_TRACE_OBJECT(self, "src loop: acquired lock");

    while (!self->queue->len) {
        GST_TRACE_OBJECT(self, "src loop: queue empty, waiting for add");
        g_cond_wait(&self->queue_cond_add, &self->queue_lock);
        GST_TRACE_OBJECT(self, "src loop: add signaled");

        if (!gst_pad_is_active(pad)) {
            GST_LOG_OBJECT(self, "pad inactive, task returning");
            GST_TRACE_OBJECT(self, "src loop: releasing lock");
            g_mutex_unlock(&self->queue_lock);
            return;
        }
    }
    GST_TRACE_OBJECT(self, "src loop: queue has element");

    peer = gst_pad_get_peer(pad);
    peer_is_active = gst_pad_is_active(peer);
    gst_object_unref(peer);

    if (peer_is_active) {
        GstBuffer *buffer;
        gboolean start_connection_timeout = FALSE;

        if (self->send_initial_events) {
          GstSegment segment;
          gchar s_id[32];
          GstCaps *caps;

          g_snprintf (s_id, sizeof (s_id), "erdtlsenc-%08x", g_random_int ());
          gst_pad_push_event (self->src, gst_event_new_stream_start (s_id));
          caps = gst_caps_new_empty_simple ("application/x-dtls");
          gst_pad_push_event (self->src, gst_event_new_caps (caps));
          gst_caps_unref (caps);
          gst_segment_init (&segment, GST_FORMAT_BYTES);
          gst_pad_push_event (self->src, gst_event_new_segment (&segment));
          self->send_initial_events = FALSE;
          start_connection_timeout = TRUE;
        }

        buffer = g_ptr_array_remove_index(self->queue, 0);

        GST_TRACE_OBJECT(self, "src loop: releasing lock");
        g_mutex_unlock(&self->queue_lock);

        ret = gst_pad_push(self->src, buffer);
        if (start_connection_timeout)
          er_dtls_connection_start_timeout (self->connection);

        if (G_UNLIKELY(ret != GST_FLOW_OK)) {
            GST_WARNING_OBJECT(self, "failed to push buffer on src pad: %s", gst_flow_get_name(ret));
        }
    } else {
        g_warn_if_reached();
        GST_TRACE_OBJECT(self, "src loop: releasing lock");
        g_mutex_unlock(&self->queue_lock);
    }
}
Esempio n. 28
0
/***********************************************************************************
 * Seek implementation
 ***********************************************************************************/
static gboolean progress_buffer_perform_push_seek(ProgressBuffer *element, GstPad *pad, GstEvent *event)
{
    GstFormat    format;
    gdouble      rate;
    GstSeekFlags flags;
    GstSeekType  start_type, stop_type;
    gint64       position;
    GstSegment   segment;

    gst_event_parse_seek(event, &rate, &format, &flags, &start_type, &position, &stop_type, NULL);

    if (format != GST_FORMAT_BYTES || start_type != GST_SEEK_TYPE_SET)
        return FALSE;

    if (stop_type != GST_SEEK_TYPE_NONE)
    {
        gst_element_message_full(GST_ELEMENT(element),
            GST_MESSAGE_WARNING,
            GST_CORE_ERROR,
            GST_CORE_ERROR_SEEK, g_strdup("stop_type != GST_SEEK_TYPE_NONE. Seeking to stop is not supported."), NULL,
            ("progressbuffer.c"), ("progress_buffer_perform_push_seek"), 0);
        return FALSE;
    }

    if (flags & GST_SEEK_FLAG_FLUSH)
        gst_pad_push_event(pad, gst_event_new_flush_start());

    // Signal the task to stop if it's waiting.
    g_mutex_lock(&element->lock);
    element->srcresult = GST_FLOW_FLUSHING;
    g_cond_signal(&element->add_cond);
    g_mutex_unlock(&element->lock);

    GST_PAD_STREAM_LOCK(pad); // Wait for task to stop

    g_mutex_lock(&element->lock);
    element->srcresult = GST_FLOW_OK;

#ifdef ENABLE_SOURCE_SEEKING
    element->instant_seek = (position >= element->sink_segment.start &&
                             (position - (gint64)element->sink_segment.position) <= element->bandwidth * element->wait_tolerance);

    if (element->instant_seek)
    {
        cache_set_read_position(element->cache, position - element->cache_read_offset);
        gst_segment_init(&segment, GST_FORMAT_BYTES);
        segment.rate = rate;
        segment.start = position;
        segment.stop = element->sink_segment.stop;
        segment.position = position;
        progress_buffer_set_pending_event(element, gst_event_new_segment(&segment));
    }
    else
    {
        // Clear any pending events, since we doing seek.
        reset_eos(element, TRUE);
    }
#else
    cache_set_read_position(element->cache, position - element->cache_read_offset);
    gst_segment_init(&segment, GST_FORMAT_BYTES);
    segment.rate = rate;
    segment.start = position;
    segment.stop = element->sink_segment.stop;
    segment.position = position;
    progress_buffer_set_pending_event(element, gst_event_new_segment(&segment));
#endif

    g_mutex_unlock(&element->lock);

#ifdef ENABLE_SOURCE_SEEKING
    if (!element->instant_seek)
    {
        element->is_source_seeking = TRUE;
        if (!gst_pad_push_event(element->sinkpad, gst_event_new_seek(rate, GST_FORMAT_BYTES, flags, GST_SEEK_TYPE_SET, position, GST_SEEK_TYPE_NONE, 0)))
        {
            element->instant_seek = TRUE;
            cache_set_read_position(element->cache, position - element->cache_read_offset);
            gst_segment_init(&segment, GST_FORMAT_BYTES);
            segment.rate = rate;
            segment.start = position;
            segment.stop = element->sink_segment.stop;
            segment.position = position;
            progress_buffer_set_pending_event(element, gst_event_new_segment(&segment));
        }
        element->is_source_seeking = FALSE;
    }
#endif

    if (flags & GST_SEEK_FLAG_FLUSH)
        gst_pad_push_event(pad, gst_event_new_flush_stop(TRUE));

    gst_pad_start_task(element->srcpad, progress_buffer_loop, element, NULL);
    GST_PAD_STREAM_UNLOCK(pad);

// INLINE - gst_event_unref()
    gst_event_unref(event);
    return TRUE;
}
static void
gst_type_find_element_loop (GstPad * pad)
{
  GstTypeFindElement *typefind;
  GstFlowReturn ret = GST_FLOW_OK;

  typefind = GST_TYPE_FIND_ELEMENT (GST_PAD_PARENT (pad));

  if (typefind->need_stream_start) {
    gchar *stream_id;

    stream_id = gst_pad_create_stream_id (typefind->src,
        GST_ELEMENT_CAST (typefind), NULL);

    GST_DEBUG_OBJECT (typefind, "Pushing STREAM_START");
    gst_pad_push_event (typefind->src, gst_event_new_stream_start (stream_id));

    typefind->need_stream_start = FALSE;
    g_free (stream_id);
  }

  if (typefind->mode == MODE_TYPEFIND) {
    GstPad *peer = NULL;
    GstCaps *found_caps = NULL;
    GstTypeFindProbability probability = GST_TYPE_FIND_NONE;

    GST_DEBUG_OBJECT (typefind, "find type in pull mode");

    GST_OBJECT_LOCK (typefind);
    if (typefind->force_caps) {
      found_caps = gst_caps_ref (typefind->force_caps);
      probability = GST_TYPE_FIND_MAXIMUM;
    }
    GST_OBJECT_UNLOCK (typefind);

    if (!found_caps) {
      peer = gst_pad_get_peer (pad);
      if (peer) {
        gint64 size;
        gchar *ext;

        if (!gst_pad_query_duration (peer, GST_FORMAT_BYTES, &size)) {
          GST_WARNING_OBJECT (typefind, "Could not query upstream length!");
          gst_object_unref (peer);

          ret = GST_FLOW_ERROR;
          goto pause;
        }

        /* the size if 0, we cannot continue */
        if (size == 0) {
          /* keep message in sync with message in sink event handler */
          GST_ELEMENT_ERROR (typefind, STREAM, TYPE_NOT_FOUND,
              (_("Stream contains no data.")), ("Can't typefind empty stream"));
          gst_object_unref (peer);
          ret = GST_FLOW_ERROR;
          goto pause;
        }
        ext = gst_type_find_get_extension (typefind, pad);

        found_caps =
            gst_type_find_helper_get_range (GST_OBJECT_CAST (peer),
            GST_OBJECT_PARENT (peer),
            (GstTypeFindHelperGetRangeFunction) (GST_PAD_GETRANGEFUNC (peer)),
            (guint64) size, ext, &probability);
        g_free (ext);

        GST_DEBUG ("Found caps %" GST_PTR_FORMAT, found_caps);

        gst_object_unref (peer);
      }
    }

    if (!found_caps || probability < typefind->min_probability) {
      GST_DEBUG ("Trying to guess using extension");
      gst_caps_replace (&found_caps, NULL);
      found_caps =
          gst_type_find_guess_by_extension (typefind, pad, &probability);
    }

    if (!found_caps || probability < typefind->min_probability) {
      GST_ELEMENT_ERROR (typefind, STREAM, TYPE_NOT_FOUND, (NULL), (NULL));
      gst_caps_replace (&found_caps, NULL);
      ret = GST_FLOW_ERROR;
      goto pause;
    }

    GST_DEBUG ("Emiting found caps %" GST_PTR_FORMAT, found_caps);
    g_signal_emit (typefind, gst_type_find_element_signals[HAVE_TYPE],
        0, probability, found_caps);
    typefind->mode = MODE_NORMAL;
    gst_caps_unref (found_caps);
  } else if (typefind->mode == MODE_NORMAL) {
    GstBuffer *outbuf = NULL;

    if (typefind->need_segment) {
      typefind->need_segment = FALSE;
      gst_pad_push_event (typefind->src,
          gst_event_new_segment (&typefind->segment));
    }

    /* Pull 4k blocks and send downstream */
    ret = gst_pad_pull_range (typefind->sink, typefind->offset, 4096, &outbuf);
    if (ret != GST_FLOW_OK)
      goto pause;

    typefind->offset += 4096;

    ret = gst_pad_push (typefind->src, outbuf);
    if (ret != GST_FLOW_OK)
      goto pause;
  } else {
    /* Error out */
    ret = GST_FLOW_ERROR;
    goto pause;
  }

  return;

pause:
  {
    const gchar *reason = gst_flow_get_name (ret);
    gboolean push_eos = FALSE;

    GST_LOG_OBJECT (typefind, "pausing task, reason %s", reason);
    gst_pad_pause_task (typefind->sink);

    if (ret == GST_FLOW_EOS) {
      /* perform EOS logic */

      if (typefind->segment.flags & GST_SEGMENT_FLAG_SEGMENT) {
        gint64 stop;

        /* for segment playback we need to post when (in stream time)
         * we stopped, this is either stop (when set) or the duration. */
        if ((stop = typefind->segment.stop) == -1)
          stop = typefind->offset;

        GST_LOG_OBJECT (typefind, "Sending segment done, at end of segment");
        gst_element_post_message (GST_ELEMENT (typefind),
            gst_message_new_segment_done (GST_OBJECT (typefind),
                GST_FORMAT_BYTES, stop));
        gst_pad_push_event (typefind->src,
            gst_event_new_segment_done (GST_FORMAT_BYTES, stop));
      } else {
        push_eos = TRUE;
      }
    } else if (ret == GST_FLOW_NOT_LINKED || ret < GST_FLOW_EOS) {
      /* for fatal errors we post an error message */
      GST_ELEMENT_ERROR (typefind, STREAM, FAILED, (NULL),
          ("stream stopped, reason %s", reason));
      push_eos = TRUE;
    }
    if (push_eos) {
      /* send EOS, and prevent hanging if no streams yet */
      GST_LOG_OBJECT (typefind, "Sending EOS, at end of stream");
      gst_pad_push_event (typefind->src, gst_event_new_eos ());
    }
    return;
  }
}
Esempio n. 30
0
static void
gst_hls_demux_stream_loop (GstHLSDemux * demux)
{
  GstFragment *fragment;
  GstBuffer *buf;
  GstFlowReturn ret;
  GstCaps *bufcaps, *srccaps = NULL;

  /* Loop for the source pad task. The task is started when we have
   * received the main playlist from the source element. It tries first to
   * cache the first fragments and then it waits until it has more data in the
   * queue. This task is woken up when we push a new fragment to the queue or
   * when we reached the end of the playlist  */

  if (G_UNLIKELY (demux->need_cache)) {
    if (!gst_hls_demux_cache_fragments (demux))
      goto cache_error;

    /* we can start now the updates thread (only if on playing) */
    if (GST_STATE (demux) == GST_STATE_PLAYING)
      gst_task_start (demux->updates_task);
    GST_INFO_OBJECT (demux, "First fragments cached successfully");
  }

  if (g_queue_is_empty (demux->queue)) {
    if (demux->end_of_playlist)
      goto end_of_playlist;

    goto pause_task;
  }

  fragment = g_queue_pop_head (demux->queue);
  buf = gst_fragment_get_buffer (fragment);

  /* Figure out if we need to create/switch pads */
  if (G_LIKELY (demux->srcpad))
    srccaps = gst_pad_get_current_caps (demux->srcpad);
  bufcaps = gst_fragment_get_caps (fragment);
  if (G_UNLIKELY (!srccaps || !gst_caps_is_equal_fixed (bufcaps, srccaps)
          || demux->need_segment)) {
    switch_pads (demux, bufcaps);
    demux->need_segment = TRUE;
  }
  gst_caps_unref (bufcaps);
  if (G_LIKELY (srccaps))
    gst_caps_unref (srccaps);
  g_object_unref (fragment);

  if (demux->need_segment) {
    GstSegment segment;
    GstClockTime start = GST_BUFFER_PTS (buf);

    start += demux->position_shift;
    /* And send a newsegment */
    GST_DEBUG_OBJECT (demux, "Sending new-segment. segment start:%"
        GST_TIME_FORMAT, GST_TIME_ARGS (start));
    gst_segment_init (&segment, GST_FORMAT_TIME);
    segment.start = start;
    segment.time = start;
    gst_pad_push_event (demux->srcpad, gst_event_new_segment (&segment));
    demux->need_segment = FALSE;
    demux->position_shift = 0;
  }

  ret = gst_pad_push (demux->srcpad, buf);
  if (ret != GST_FLOW_OK)
    goto error_pushing;

  return;

end_of_playlist:
  {
    GST_DEBUG_OBJECT (demux, "Reached end of playlist, sending EOS");
    gst_pad_push_event (demux->srcpad, gst_event_new_eos ());
    gst_hls_demux_stop (demux);
    return;
  }

cache_error:
  {
    gst_task_pause (demux->stream_task);
    if (!demux->cancelled) {
      GST_ELEMENT_ERROR (demux, RESOURCE, NOT_FOUND,
          ("Could not cache the first fragments"), (NULL));
      gst_hls_demux_stop (demux);
    }
    return;
  }

error_pushing:
  {
    /* FIXME: handle error */
    GST_DEBUG_OBJECT (demux, "Error pushing buffer: %s... stopping task",
        gst_flow_get_name (ret));
    gst_hls_demux_stop (demux);
    return;
  }

pause_task:
  {
    gst_task_pause (demux->stream_task);
    return;
  }
}