static GstFlowReturn gst_hls_sink_chain_list (GstPad * pad, GstObject * parent, GstBufferList * list) { guint i, len; GstBuffer *buffer; GstFlowReturn ret; GstHlsSink *sink = GST_HLS_SINK_CAST (parent); if (sink->target_duration == 0 || sink->waiting_fku) return gst_proxy_pad_chain_list_default (pad, parent, list); GST_DEBUG_OBJECT (pad, "chaining each group in list as a merged buffer"); len = gst_buffer_list_length (list); ret = GST_FLOW_OK; for (i = 0; i < len; i++) { buffer = gst_buffer_list_get (list, i); if (!sink->waiting_fku) gst_hls_sink_check_schedule_next_key_unit (sink, buffer); ret = gst_pad_chain (pad, gst_buffer_ref (buffer)); if (ret != GST_FLOW_OK) break; } gst_buffer_list_unref (list); return ret; }
static gpointer chain_async_buffer (gpointer data) { ChainData *chain_data = (ChainData *) data; chain_data->ret = gst_pad_chain (chain_data->pad, chain_data->buffer); return chain_data; }
static gpointer push_buffer (GstPad * sinkpad) { GstBuffer *buffer; buffer = gst_buffer_new_and_alloc (1 * 1024); gst_pad_chain (sinkpad, buffer); return NULL; }
static gpointer push_vbuffers (gpointer data) { GstSegment segment; GstPad *pad = data; gint i; GstClockTime timestamp = 0; if (videodelay) g_usleep (2000); if (late_video) timestamp = 50 * GST_MSECOND; gst_pad_send_event (pad, gst_event_new_stream_start ("test")); gst_segment_init (&segment, GST_FORMAT_TIME); gst_pad_send_event (pad, gst_event_new_segment (&segment)); for (i = 0; i < n_vbuffers; i++) { GstBuffer *buf = gst_buffer_new_and_alloc (1000); GstClockTime *rtime = g_new (GstClockTime, 1); gst_buffer_memset (buf, 0, i, 1); GST_BUFFER_TIMESTAMP (buf) = timestamp; timestamp += 25 * GST_MSECOND; GST_BUFFER_DURATION (buf) = timestamp - GST_BUFFER_TIMESTAMP (buf); *rtime = gst_segment_to_running_time (&segment, GST_FORMAT_TIME, timestamp); g_queue_push_tail (&v_timestamp_q, rtime); if (i == 4) { if (video_gaps) timestamp += 10 * GST_MSECOND; else if (video_overlaps) timestamp -= 10 * GST_MSECOND; } fail_unless (gst_pad_chain (pad, buf) == GST_FLOW_OK); } gst_pad_send_event (pad, gst_event_new_eos ()); return NULL; }
static GstFlowReturn gst_nle_source_push_buffer (GstNleSource * nlesrc, GstBuffer * buf, gboolean is_audio) { GstPad *sinkpad; gboolean push_buf; guint64 buf_ts, buf_rel_ts, last_ts; GstNleSrcItem *item; GstFlowReturn ret; item = (GstNleSrcItem *) g_list_nth_data (nlesrc->queue, nlesrc->index); buf_ts = GST_BUFFER_TIMESTAMP (buf); if (buf_ts < item->start) { GST_LOG_OBJECT (nlesrc, "Discard early %s buffer with ts: %" GST_TIME_FORMAT " start: %" GST_TIME_FORMAT, is_audio ? "audio" : "video", GST_TIME_ARGS (buf_ts), GST_TIME_ARGS (item->start)); gst_buffer_unref (buf); return GST_FLOW_OK; } buf_rel_ts = buf_ts - item->start; g_mutex_lock (&nlesrc->stream_lock); if (is_audio) { push_buf = nlesrc->audio_seek_done; last_ts = nlesrc->audio_ts; nlesrc->audio_ts = buf_ts; sinkpad = nlesrc->audio_sinkpad; } else { push_buf = nlesrc->video_seek_done; last_ts = nlesrc->video_ts; nlesrc->video_ts = buf_ts; sinkpad = nlesrc->video_sinkpad; } if (push_buf && GST_BUFFER_TIMESTAMP (buf) >= last_ts) { /* Retimestamps buffer */ guint64 new_ts = nlesrc->start_ts + buf_rel_ts / item->rate; GST_BUFFER_TIMESTAMP (buf) = new_ts; GST_LOG_OBJECT (nlesrc, "Pushing %s buffer with ts: %" GST_TIME_FORMAT " dur:%" GST_TIME_FORMAT " orig:%" GST_TIME_FORMAT, is_audio ? "audio" : "video", GST_TIME_ARGS (new_ts), GST_TIME_ARGS (GST_BUFFER_DURATION (buf)), GST_TIME_ARGS (buf_ts)); if (GST_BUFFER_DURATION_IS_VALID (buf)) { new_ts += GST_BUFFER_DURATION (buf); } if (new_ts >= nlesrc->accu_time) { nlesrc->accu_time = new_ts; } if (G_UNLIKELY (!nlesrc->item_setup) && !is_audio) { GST_DEBUG_OBJECT (nlesrc, "Applying roi and title properties for this segment"); gst_nle_source_update_videocrop (nlesrc, GST_BUFFER_CAPS (buf)); gst_nle_source_update_overlay_title (nlesrc); nlesrc->item_setup = TRUE; } /* We need to unlock before pushing since push_buffer can block */ g_mutex_unlock (&nlesrc->stream_lock); ret = gst_pad_chain (sinkpad, buf); if (ret != GST_FLOW_OK) { GST_WARNING_OBJECT (nlesrc, "pushing buffer returned %s", gst_flow_get_name (ret)); } return ret; } else { GST_LOG_OBJECT (nlesrc, "Discard %s buffer with ts: %" GST_TIME_FORMAT, is_audio ? "audio" : "video", GST_TIME_ARGS (buf_ts)); gst_buffer_unref (buf); g_mutex_unlock (&nlesrc->stream_lock); return GST_FLOW_OK; } }
static gpointer push_abuffers (gpointer data) { GstSegment segment; GstPad *pad = data; gint i, j, k; GstClockTime timestamp = 0; GstAudioInfo info; GstCaps *caps; guint buf_size = 1000; if (audiodelay) g_usleep (2000); if (early_video) timestamp = 50 * GST_MSECOND; gst_pad_send_event (pad, gst_event_new_stream_start ("test")); gst_audio_info_set_format (&info, GST_AUDIO_FORMAT_S8, buf_size, channels, NULL); caps = gst_audio_info_to_caps (&info); gst_pad_send_event (pad, gst_event_new_caps (caps)); gst_caps_unref (caps); gst_segment_init (&segment, GST_FORMAT_TIME); gst_pad_send_event (pad, gst_event_new_segment (&segment)); for (i = 0; i < n_abuffers; i++) { GstBuffer *buf = gst_buffer_new_and_alloc (channels * buf_size); if (per_channel) { GstMapInfo map; guint8 *in_data; gst_buffer_map (buf, &map, GST_MAP_WRITE); in_data = map.data; for (j = 0; j < buf_size; j++) { for (k = 0; k < channels; k++) { in_data[j * channels + k] = fill_value_per_channel[k]; } } gst_buffer_unmap (buf, &map); } else { gst_buffer_memset (buf, 0, fill_value, channels * buf_size); } GST_BUFFER_TIMESTAMP (buf) = timestamp; timestamp += 1 * GST_SECOND; if (audio_drift) timestamp += 50 * GST_MSECOND; else if (i == 4 && audio_nondiscont) timestamp += 30 * GST_MSECOND; GST_BUFFER_DURATION (buf) = timestamp - GST_BUFFER_TIMESTAMP (buf); fail_unless (gst_pad_chain (pad, buf) == GST_FLOW_OK); } gst_pad_send_event (pad, gst_event_new_eos ()); return NULL; }