static GstStateChangeReturn gst_interleave_change_state (GstElement * element, GstStateChange transition) { GstInterleave *self; GstStateChangeReturn ret; self = GST_INTERLEAVE (element); switch (transition) { case GST_STATE_CHANGE_NULL_TO_READY: break; case GST_STATE_CHANGE_READY_TO_PAUSED: self->timestamp = 0; self->offset = 0; gst_event_replace (&self->pending_segment, NULL); self->send_stream_start = TRUE; gst_collect_pads_start (self->collect); break; case GST_STATE_CHANGE_PAUSED_TO_PLAYING: break; default: break; } /* Stop before calling the parent's state change function as * GstCollectPads might take locks and we would deadlock in that * case */ if (transition == GST_STATE_CHANGE_PAUSED_TO_READY) gst_collect_pads_stop (self->collect); ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition); switch (transition) { case GST_STATE_CHANGE_PLAYING_TO_PAUSED: break; case GST_STATE_CHANGE_PAUSED_TO_READY: gst_caps_replace (&self->sinkcaps, NULL); gst_event_replace (&self->pending_segment, NULL); break; case GST_STATE_CHANGE_READY_TO_NULL: break; default: break; } return ret; }
/* handle an event sent directly to the element. * * This event can be sent either in the READY state or the * >READY state. The only event of interest really is the seek * event. * * In the READY state we can only store the event and try to * respect it when going to PAUSED. We assume we are in the * READY state when our parsing state != AIFF_PARSE_DATA. * * When we are steaming, we can simply perform the seek right * away. */ static gboolean gst_aiff_parse_send_event (GstElement * element, GstEvent * event) { GstAiffParse *aiff = GST_AIFF_PARSE (element); gboolean res = FALSE; GstEvent **event_p; GST_DEBUG_OBJECT (aiff, "received event %s", GST_EVENT_TYPE_NAME (event)); switch (GST_EVENT_TYPE (event)) { case GST_EVENT_SEEK: if (aiff->state == AIFF_PARSE_DATA) { /* we can handle the seek directly when streaming data */ res = gst_aiff_parse_perform_seek (aiff, event); } else { GST_DEBUG_OBJECT (aiff, "queuing seek for later"); event_p = &aiff->seek_event; gst_event_replace (event_p, event); /* we always return true */ res = TRUE; } break; default: break; } gst_event_unref (event); return res; }
/* sink currently only operates in push mode */ static inline gboolean gst_ts_shifter_sink_activate (GstPad * pad, GstObject * parent, gboolean active) { GstTSShifter *ts = GST_TS_SHIFTER (parent); if (active) { FLOW_MUTEX_LOCK (ts); GST_DEBUG_OBJECT (ts, "activating push mode"); ts->srcresult = GST_FLOW_OK; ts->sinkresult = GST_FLOW_OK; ts->is_eos = FALSE; ts->unexpected = FALSE; FLOW_MUTEX_UNLOCK (ts); } else { /* unblock chain function */ FLOW_MUTEX_LOCK (ts); GST_DEBUG_OBJECT (ts, "deactivating push mode"); ts->srcresult = GST_FLOW_FLUSHING; ts->sinkresult = GST_FLOW_FLUSHING; gst_event_replace (&ts->stream_start_event, NULL); FLOW_MUTEX_UNLOCK (ts); } return TRUE; }
static GstPadProbeReturn event_probe (GstPad * pad, GstPadProbeInfo * info, gpointer udata) { GstPadProbeReturn ret = GST_PAD_PROBE_OK; GstEvent *event = GST_PAD_PROBE_INFO_DATA (info); switch (GST_EVENT_TYPE (event)) { case GST_EVENT_STREAM_START:{ guint group_id; g_mutex_lock (&test_mutex); fail_unless (gst_event_parse_group_id (event, &group_id)); if (have_group_id) { if (group_id_pre != group_id) { event = gst_event_copy (event); gst_event_set_group_id (event, group_id_pre); gst_event_replace ((GstEvent **) & info->data, event); gst_event_unref (event); } } else { group_id_pre = group_id; have_group_id = TRUE; } g_mutex_unlock (&test_mutex); break; } default: break; } return ret; }
static gboolean gst_frei0r_mixer_sink0_event (GstPad * pad, GstEvent * event) { GstFrei0rMixer *self = GST_FREI0R_MIXER (gst_pad_get_parent (pad)); gboolean ret = FALSE; GstEvent **p_ev; GST_DEBUG ("Got %s event on pad %s:%s", GST_EVENT_TYPE_NAME (event), GST_DEBUG_PAD_NAME (pad)); switch (GST_EVENT_TYPE (event)) { case GST_EVENT_NEWSEGMENT: p_ev = &self->newseg_event; gst_event_replace (p_ev, event); break; default: break; } /* now GstCollectPads can take care of the rest, e.g. EOS */ ret = self->collect_event (pad, event); gst_object_unref (self); return ret; }
static gboolean mpegtsmux_sink_event (GstPad * pad, GstEvent * event) { MpegTsMux *mux = GST_MPEG_TSMUX (gst_pad_get_parent (pad)); MpegTsPadData *ts_data; gboolean res = TRUE; gboolean forward = TRUE; ts_data = (MpegTsPadData *) gst_pad_get_element_private (pad); switch (GST_EVENT_TYPE (event)) { case GST_EVENT_CUSTOM_DOWNSTREAM: { GstClockTime timestamp, stream_time, running_time; gboolean all_headers; guint count; if (!gst_video_event_is_force_key_unit (event)) goto out; forward = FALSE; gst_video_event_parse_downstream_force_key_unit (event, ×tamp, &stream_time, &running_time, &all_headers, &count); GST_INFO_OBJECT (mux, "have downstream force-key-unit event on pad %s, " "seqnum %d, running-time %" GST_TIME_FORMAT " count %d", gst_pad_get_name (pad), gst_event_get_seqnum (event), GST_TIME_ARGS (running_time), count); if (mux->force_key_unit_event != NULL) { GST_INFO_OBJECT (mux, "skipping downstream force key unit event " "as an upstream force key unit is already queued"); goto out; } if (!all_headers) goto out; mux->pending_key_unit_ts = running_time; gst_event_replace (&mux->force_key_unit_event, event); break; } default: break; } out: if (forward) res = ts_data->eventfunc (pad, event); gst_object_unref (mux); return res; }
static void gst_hls_sink_reset (GstHlsSink * sink) { sink->index = 0; sink->count = 0; sink->timeout_id = 0; sink->last_running_time = 0; sink->waiting_fku = FALSE; gst_event_replace (&sink->force_key_unit_event, NULL); gst_segment_init (&sink->segment, GST_FORMAT_UNDEFINED); if (sink->playlist) gst_m3u8_playlist_free (sink->playlist); sink->playlist = gst_m3u8_playlist_new (6, sink->playlist_length, FALSE); }
static GstStateChangeReturn gst_rtp_base_depayload_change_state (GstElement * element, GstStateChange transition) { GstRTPBaseDepayload *filter; GstRTPBaseDepayloadPrivate *priv; GstStateChangeReturn ret; filter = GST_RTP_BASE_DEPAYLOAD (element); priv = filter->priv; switch (transition) { case GST_STATE_CHANGE_NULL_TO_READY: break; case GST_STATE_CHANGE_READY_TO_PAUSED: filter->need_newsegment = TRUE; priv->npt_start = 0; priv->npt_stop = -1; priv->play_speed = 1.0; priv->play_scale = 1.0; priv->clock_base = -1; priv->next_seqnum = -1; priv->negotiated = FALSE; priv->discont = FALSE; break; case GST_STATE_CHANGE_PAUSED_TO_PLAYING: break; default: break; } ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition); switch (transition) { case GST_STATE_CHANGE_PLAYING_TO_PAUSED: break; case GST_STATE_CHANGE_PAUSED_TO_READY: gst_caps_replace (&priv->last_caps, NULL); gst_event_replace (&priv->segment_event, NULL); break; case GST_STATE_CHANGE_READY_TO_NULL: break; default: break; } return ret; }
static GstPadProbeReturn gst_hls_sink_ghost_event_probe (GstPad * pad, GstPadProbeInfo * info, gpointer data) { GstHlsSink *sink = GST_HLS_SINK_CAST (data); GstEvent *event = gst_pad_probe_info_get_event (info); switch (GST_EVENT_TYPE (event)) { case GST_EVENT_SEGMENT: { gst_event_copy_segment (event, &sink->segment); break; } case GST_EVENT_FLUSH_STOP: gst_segment_init (&sink->segment, GST_FORMAT_UNDEFINED); break; case GST_EVENT_CUSTOM_DOWNSTREAM: { GstClockTime timestamp; GstClockTime running_time, stream_time; gboolean all_headers; guint count; if (!gst_video_event_is_force_key_unit (event)) break; gst_event_replace (&sink->force_key_unit_event, event); gst_video_event_parse_downstream_force_key_unit (event, ×tamp, &stream_time, &running_time, &all_headers, &count); GST_INFO_OBJECT (sink, "setting index %d", count); sink->index = count; break; } default: break; } return GST_PAD_PROBE_OK; }
static void gst_frei0r_mixer_reset (GstFrei0rMixer * self) { GstFrei0rMixerClass *klass = GST_FREI0R_MIXER_GET_CLASS (self); GstEvent **p_ev; if (self->f0r_instance) { klass->ftable->destruct (self->f0r_instance); self->f0r_instance = NULL; } if (self->property_cache) gst_frei0r_property_cache_free (klass->properties, self->property_cache, klass->n_properties); self->property_cache = NULL; gst_caps_replace (&self->caps, NULL); p_ev = &self->segment_event; gst_event_replace (p_ev, NULL); gst_video_info_init (&self->info); }
static GstStateChangeReturn gst_ts_shifter_change_state (GstElement * element, GstStateChange transition) { GstTSShifter *ts; GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS; ts = GST_TS_SHIFTER (element); switch (transition) { case GST_STATE_CHANGE_READY_TO_PAUSED: gst_ts_shifter_start (ts); gst_event_replace (&ts->stream_start_event, NULL); break; case GST_STATE_CHANGE_PAUSED_TO_PLAYING: break; default: break; } if (ret == GST_STATE_CHANGE_FAILURE) return ret; ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition); if (ret == GST_STATE_CHANGE_FAILURE) return ret; switch (transition) { case GST_STATE_CHANGE_PLAYING_TO_PAUSED: case GST_STATE_CHANGE_PAUSED_TO_READY: case GST_STATE_CHANGE_READY_TO_NULL: break; default: break; } return ret; }
static void gst_frei0r_mixer_reset (GstFrei0rMixer * self) { GstFrei0rMixerClass *klass = GST_FREI0R_MIXER_GET_CLASS (self); GstEvent **p_ev; if (self->f0r_instance) { klass->ftable->destruct (self->f0r_instance); self->f0r_instance = NULL; } if (self->property_cache) gst_frei0r_property_cache_free (klass->properties, self->property_cache, klass->n_properties); self->property_cache = NULL; gst_caps_replace (&self->caps, NULL); p_ev = &self->newseg_event; gst_event_replace (p_ev, NULL); self->fmt = GST_VIDEO_FORMAT_UNKNOWN; self->width = self->height = 0; }
static gboolean gst_interleave_sink_event (GstCollectPads * pads, GstCollectData * data, GstEvent * event, gpointer user_data) { GstInterleave *self = GST_INTERLEAVE (user_data); gboolean ret = TRUE; GST_DEBUG ("Got %s event on pad %s:%s", GST_EVENT_TYPE_NAME (event), GST_DEBUG_PAD_NAME (data->pad)); switch (GST_EVENT_TYPE (event)) { case GST_EVENT_FLUSH_STOP: GST_OBJECT_LOCK (self); gst_event_replace (&self->pending_segment, NULL); GST_OBJECT_UNLOCK (self); break; case GST_EVENT_SEGMENT: { GST_OBJECT_LOCK (self); gst_event_replace (&self->pending_segment, event); GST_OBJECT_UNLOCK (self); break; } case GST_EVENT_CAPS: { GstCaps *caps; GstAudioInfo info; GValue *val; guint channel; gst_event_parse_caps (event, &caps); if (!gst_audio_info_from_caps (&info, caps)) { GST_WARNING_OBJECT (self, "invalid sink caps"); gst_event_unref (event); event = NULL; ret = FALSE; break; } if (self->channel_positions_from_input && GST_AUDIO_INFO_CHANNELS (&info) == 1) { channel = GST_INTERLEAVE_PAD_CAST (data->pad)->channel; val = g_value_array_get_nth (self->input_channel_positions, channel); g_value_set_enum (val, GST_AUDIO_INFO_POSITION (&info, 0)); } if (!gst_pad_has_current_caps (data->pad)) g_atomic_int_add (&self->configured_sinkpads_counter, 1); /* Last caps that are set on a sink pad are used as output caps */ if (g_atomic_int_get (&self->configured_sinkpads_counter) == self->channels) { ret = gst_interleave_sink_setcaps (self, data->pad, caps, &info); gst_event_unref (event); event = NULL; } break; } case GST_EVENT_TAG: GST_FIXME_OBJECT (self, "FIXME: merge tags and send after stream-start"); break; default: break; } /* now GstCollectPads can take care of the rest, e.g. EOS */ if (event != NULL) return gst_collect_pads_event_default (pads, data, event, FALSE); return ret; }
static GstFlowReturn mpegtsmux_collected (GstCollectPads2 * pads, MpegTsMux * mux) { GstFlowReturn ret = GST_FLOW_OK; MpegTsPadData *best = NULL; GST_DEBUG_OBJECT (mux, "Pads collected"); if (G_UNLIKELY (mux->first)) { ret = mpegtsmux_create_streams (mux); if (G_UNLIKELY (ret != GST_FLOW_OK)) return ret; mpegtsdemux_prepare_srcpad (mux); mux->first = FALSE; } best = mpegtsmux_choose_best_stream (mux); if (best != NULL) { TsMuxProgram *prog = best->prog; GstBuffer *buf = best->queued_buf; gint64 pts = -1; gboolean delta = TRUE; if (prog == NULL) { GST_ELEMENT_ERROR (mux, STREAM, MUX, ("Stream on pad %" GST_PTR_FORMAT " is not associated with any program", COLLECT_DATA_PAD (best)), (NULL)); return GST_FLOW_ERROR; } if (mux->force_key_unit_event != NULL && best->stream->is_video_stream) { GstEvent *event; event = check_pending_key_unit_event (mux->force_key_unit_event, &best->collect.segment, GST_BUFFER_TIMESTAMP (buf), GST_BUFFER_FLAGS (buf), mux->pending_key_unit_ts); if (event) { GstClockTime running_time; guint count; GList *cur; mux->pending_key_unit_ts = GST_CLOCK_TIME_NONE; gst_event_replace (&mux->force_key_unit_event, NULL); gst_video_event_parse_downstream_force_key_unit (event, NULL, NULL, &running_time, NULL, &count); GST_INFO_OBJECT (mux, "pushing downstream force-key-unit event %d " "%" GST_TIME_FORMAT " count %d", gst_event_get_seqnum (event), GST_TIME_ARGS (running_time), count); gst_pad_push_event (mux->srcpad, event); /* output PAT */ mux->tsmux->last_pat_ts = -1; /* output PMT for each program */ for (cur = g_list_first (mux->tsmux->programs); cur != NULL; cur = g_list_next (cur)) { TsMuxProgram *program = (TsMuxProgram *) cur->data; program->last_pmt_ts = -1; } tsmux_program_set_pcr_stream (prog, NULL); } } if (G_UNLIKELY (prog->pcr_stream == NULL)) { /* Take the first data stream for the PCR */ GST_DEBUG_OBJECT (COLLECT_DATA_PAD (best), "Use stream (pid=%d) from pad as PCR for program (prog_id = %d)", MPEG_TS_PAD_DATA (best)->pid, MPEG_TS_PAD_DATA (best)->prog_id); /* Set the chosen PCR stream */ tsmux_program_set_pcr_stream (prog, best->stream); } g_return_val_if_fail (buf != NULL, GST_FLOW_ERROR); if (best->stream->is_video_stream) delta = GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT); GST_DEBUG_OBJECT (mux, "delta: %d", delta); GST_DEBUG_OBJECT (COLLECT_DATA_PAD (best), "Chose stream for output (PID: 0x%04x)", best->pid); if (GST_CLOCK_TIME_IS_VALID (best->cur_ts)) { pts = GSTTIME_TO_MPEGTIME (best->cur_ts); GST_DEBUG_OBJECT (mux, "Buffer has TS %" GST_TIME_FORMAT " pts %" G_GINT64_FORMAT, GST_TIME_ARGS (best->cur_ts), pts); } tsmux_stream_add_data (best->stream, GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf), buf, pts, -1, !delta); best->queued_buf = NULL; mux->is_delta = delta; while (tsmux_stream_bytes_in_buffer (best->stream) > 0) { if (!tsmux_write_stream_packet (mux->tsmux, best->stream)) { /* Failed writing data for some reason. Set appropriate error */ GST_DEBUG_OBJECT (mux, "Failed to write data packet"); GST_ELEMENT_ERROR (mux, STREAM, MUX, ("Failed writing output data to stream %04x", best->stream->id), (NULL)); goto write_fail; } } if (prog->pcr_stream == best->stream) { mux->last_ts = best->last_ts; } } else { /* FIXME: Drain all remaining streams */ /* At EOS */ gst_pad_push_event (mux->srcpad, gst_event_new_eos ()); } return ret; write_fail: return mux->last_flow_ret; }
static inline gboolean gst_ts_shifter_sink_event (GstPad * pad, GstObject * parent, GstEvent * event) { GstTSShifter *ts = GST_TS_SHIFTER (parent); gboolean ret = TRUE; switch (GST_EVENT_TYPE (event)) { case GST_EVENT_EOS: { GST_CAT_LOG_OBJECT (ts_flow, ts, "received eos event"); FLOW_MUTEX_LOCK (ts); ts->is_eos = TRUE; /* Ensure to unlock the pushing loop */ FLOW_SIGNAL_ADD (ts); FLOW_MUTEX_UNLOCK (ts); gst_event_unref (event); break; } case GST_EVENT_SEGMENT: { GST_CAT_LOG_OBJECT (ts_flow, ts, "received newsegment event"); FLOW_MUTEX_LOCK (ts); ts->unexpected = FALSE; FLOW_MUTEX_UNLOCK (ts); gst_event_unref (event); break; } case GST_EVENT_FLUSH_START: { GST_CAT_LOG_OBJECT (ts_flow, ts, "received flush start event"); /* forward event */ gst_pad_push_event (ts->srcpad, event); /* now unblock the chain function */ FLOW_MUTEX_LOCK (ts); ts->srcresult = GST_FLOW_FLUSHING; ts->sinkresult = GST_FLOW_FLUSHING; /* unblock the loop and chain functions */ FLOW_SIGNAL_ADD (ts); FLOW_MUTEX_UNLOCK (ts); /* make sure it pauses, this should happen since we sent * flush_start downstream. */ gst_pad_pause_task (ts->srcpad); GST_CAT_LOG_OBJECT (ts_flow, ts, "loop stopped"); break; } case GST_EVENT_FLUSH_STOP: { GST_CAT_LOG_OBJECT (ts_flow, ts, "received flush stop event"); /* forward event */ gst_pad_push_event (ts->srcpad, event); gst_ts_shifter_start (ts); FLOW_MUTEX_LOCK (ts); gst_event_replace (&ts->stream_start_event, NULL); ts->cur_bytes = 0; ts->srcresult = GST_FLOW_OK; ts->sinkresult = GST_FLOW_OK; ts->is_eos = FALSE; ts->unexpected = FALSE; gst_pad_start_task (ts->srcpad, (GstTaskFunction) gst_ts_shifter_loop, ts->srcpad, NULL); FLOW_MUTEX_UNLOCK (ts); break; } case GST_EVENT_STREAM_START: { gst_event_replace (&ts->stream_start_event, event); gst_event_unref (event); break; } default: { GST_CAT_LOG_OBJECT (ts_flow, ts, "dropped event %s", GST_EVENT_TYPE_NAME (event)); gst_event_unref (event); ret = FALSE; break; } } return ret; }
static gboolean mpegtsmux_src_event (GstPad * pad, GstEvent * event) { MpegTsMux *mux = GST_MPEG_TSMUX (gst_pad_get_parent (pad)); gboolean res = TRUE; switch (GST_EVENT_TYPE (event)) { case GST_EVENT_CUSTOM_UPSTREAM: { GstIterator *iter; GstIteratorResult iter_ret; GstPad *sinkpad; GstClockTime running_time; gboolean all_headers, done; guint count; if (!gst_video_event_is_force_key_unit (event)) break; gst_video_event_parse_upstream_force_key_unit (event, &running_time, &all_headers, &count); GST_INFO_OBJECT (mux, "received upstream force-key-unit event, " "seqnum %d running_time %" GST_TIME_FORMAT " all_headers %d count %d", gst_event_get_seqnum (event), GST_TIME_ARGS (running_time), all_headers, count); if (!all_headers) break; mux->pending_key_unit_ts = running_time; gst_event_replace (&mux->force_key_unit_event, event); iter = gst_element_iterate_sink_pads (GST_ELEMENT_CAST (mux)); done = FALSE; while (!done) { gboolean res = FALSE, tmp; iter_ret = gst_iterator_next (iter, (gpointer *) & sinkpad); switch (iter_ret) { case GST_ITERATOR_DONE: done = TRUE; break; case GST_ITERATOR_OK: GST_INFO_OBJECT (mux, "forwarding to %s", gst_pad_get_name (sinkpad)); tmp = gst_pad_push_event (sinkpad, gst_event_ref (event)); GST_INFO_OBJECT (mux, "result %d", tmp); /* succeed if at least one pad succeeds */ res |= tmp; gst_object_unref (sinkpad); break; case GST_ITERATOR_ERROR: done = TRUE; break; case GST_ITERATOR_RESYNC: break; } } gst_event_unref (event); break; } default: res = gst_pad_event_default (pad, event); break; } gst_object_unref (mux); return res; }
static GstStateChangeReturn gst_kate_enc_change_state (GstElement * element, GstStateChange transition) { GstKateEnc *ke = GST_KATE_ENC (element); GstStateChangeReturn res; int ret; GST_INFO_OBJECT (ke, "gst_kate_enc_change_state"); switch (transition) { case GST_STATE_CHANGE_NULL_TO_READY: ke->tags = gst_tag_list_new_empty (); break; case GST_STATE_CHANGE_READY_TO_PAUSED: GST_DEBUG_OBJECT (ke, "READY -> PAUSED, initializing kate state"); ret = kate_info_init (&ke->ki); if (ret < 0) { GST_WARNING_OBJECT (ke, "failed to initialize kate info structure: %s", gst_kate_util_get_error_message (ret)); break; } if (ke->language) { ret = kate_info_set_language (&ke->ki, ke->language); if (ret < 0) { GST_WARNING_OBJECT (ke, "failed to set stream language: %s", gst_kate_util_get_error_message (ret)); break; } } if (ke->category) { ret = kate_info_set_category (&ke->ki, ke->category); if (ret < 0) { GST_WARNING_OBJECT (ke, "failed to set stream category: %s", gst_kate_util_get_error_message (ret)); break; } } ret = kate_info_set_original_canvas_size (&ke->ki, ke->original_canvas_width, ke->original_canvas_height); if (ret < 0) { GST_WARNING_OBJECT (ke, "failed to set original canvas size: %s", gst_kate_util_get_error_message (ret)); break; } ret = kate_comment_init (&ke->kc); if (ret < 0) { GST_WARNING_OBJECT (ke, "failed to initialize kate comment structure: %s", gst_kate_util_get_error_message (ret)); break; } ret = kate_encode_init (&ke->k, &ke->ki); if (ret < 0) { GST_WARNING_OBJECT (ke, "failed to initialize kate state: %s", gst_kate_util_get_error_message (ret)); break; } ke->headers_sent = FALSE; ke->initialized = TRUE; ke->last_timestamp = 0; ke->latest_end_time = 0; ke->format = GST_KATE_FORMAT_UNDEFINED; break; case GST_STATE_CHANGE_PAUSED_TO_PLAYING: break; case GST_STATE_CHANGE_READY_TO_NULL: gst_tag_list_unref (ke->tags); ke->tags = NULL; break; default: break; } res = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition); if (res == GST_STATE_CHANGE_FAILURE) { GST_WARNING_OBJECT (ke, "Parent failed to change state"); return res; } switch (transition) { case GST_STATE_CHANGE_PLAYING_TO_PAUSED: break; case GST_STATE_CHANGE_PAUSED_TO_READY: GST_DEBUG_OBJECT (ke, "PAUSED -> READY, clearing kate state"); if (ke->initialized) { kate_clear (&ke->k); kate_info_clear (&ke->ki); kate_comment_clear (&ke->kc); ke->initialized = FALSE; ke->last_timestamp = 0; ke->latest_end_time = 0; } gst_event_replace (&ke->pending_segment, NULL); break; case GST_STATE_CHANGE_READY_TO_NULL: break; default: break; } GST_DEBUG_OBJECT (ke, "State change done"); return res; }
static GstFlowReturn gst_aiff_parse_stream_headers (GstAiffParse * aiff) { GstFlowReturn res; GstBuffer *buf; guint32 tag, size; gboolean gotdata = FALSE; gboolean done = FALSE; GstEvent **event_p; GstFormat bformat; gint64 upstream_size = 0; bformat = GST_FORMAT_BYTES; gst_pad_query_peer_duration (aiff->sinkpad, &bformat, &upstream_size); GST_DEBUG_OBJECT (aiff, "upstream size %" G_GUINT64_FORMAT, upstream_size); /* loop headers until we get data */ while (!done) { if (aiff->streaming) { if (!gst_aiff_parse_peek_chunk_info (aiff, &tag, &size)) return GST_FLOW_OK; } else { if ((res = gst_pad_pull_range (aiff->sinkpad, aiff->offset, 8, &buf)) != GST_FLOW_OK) goto header_read_error; tag = GST_READ_UINT32_LE (GST_BUFFER_DATA (buf)); size = GST_READ_UINT32_BE (GST_BUFFER_DATA (buf) + 4); } GST_INFO_OBJECT (aiff, "Got TAG: %" GST_FOURCC_FORMAT ", offset %" G_GUINT64_FORMAT, GST_FOURCC_ARGS (tag), aiff->offset); /* We just keep reading chunks until we find the one we're interested in. */ switch (tag) { case GST_MAKE_FOURCC ('C', 'O', 'M', 'M'):{ if (aiff->streaming) { if (!gst_aiff_parse_peek_chunk (aiff, &tag, &size)) return GST_FLOW_OK; gst_adapter_flush (aiff->adapter, 8); aiff->offset += 8; buf = gst_adapter_take_buffer (aiff->adapter, size); } else { if ((res = gst_aiff_parse_read_chunk (aiff, &aiff->offset, &tag, &buf)) != GST_FLOW_OK) return res; } if (!gst_aiff_parse_parse_comm (aiff, buf)) { gst_buffer_unref (buf); goto parse_header_error; } gst_buffer_unref (buf); /* do sanity checks of header fields */ if (aiff->channels == 0) goto no_channels; if (aiff->rate == 0) goto no_rate; GST_DEBUG_OBJECT (aiff, "creating the caps"); aiff->caps = gst_aiff_parse_create_caps (aiff); if (!aiff->caps) goto unknown_format; gst_pad_set_caps (aiff->srcpad, aiff->caps); aiff->bytes_per_sample = aiff->channels * aiff->width / 8; aiff->bps = aiff->bytes_per_sample * aiff->rate; if (aiff->bytes_per_sample <= 0) goto no_bytes_per_sample; aiff->got_comm = TRUE; break; } case GST_MAKE_FOURCC ('S', 'S', 'N', 'D'):{ GstFormat fmt; GstBuffer *ssndbuf = NULL; const guint8 *ssnddata = NULL; guint32 datasize; GST_DEBUG_OBJECT (aiff, "Got 'SSND' TAG, size : %d", size); /* Now, read the 8-byte header in the SSND chunk */ if (aiff->streaming) { if (!gst_aiff_parse_peek_data (aiff, 16, &ssnddata)) return GST_FLOW_OK; } else { gst_buffer_unref (buf); if ((res = gst_pad_pull_range (aiff->sinkpad, aiff->offset, 16, &ssndbuf)) != GST_FLOW_OK) goto header_read_error; ssnddata = GST_BUFFER_DATA (ssndbuf); } aiff->ssnd_offset = GST_READ_UINT32_BE (ssnddata + 8); aiff->ssnd_blocksize = GST_READ_UINT32_BE (ssnddata + 12); gotdata = TRUE; if (aiff->streaming) { gst_adapter_flush (aiff->adapter, 16); } else { gst_buffer_unref (ssndbuf); } /* 8 byte chunk header, 16 byte SSND header */ aiff->offset += 24; datasize = size - 16; aiff->datastart = aiff->offset + aiff->ssnd_offset; /* file might be truncated */ fmt = GST_FORMAT_BYTES; if (upstream_size) { size = MIN (datasize, (upstream_size - aiff->datastart)); } aiff->datasize = (guint64) datasize; aiff->dataleft = (guint64) datasize; aiff->end_offset = datasize + aiff->datastart; if (!aiff->streaming) { /* We will continue looking at chunks until the end - to read tags, * etc. */ aiff->offset += datasize; } GST_DEBUG_OBJECT (aiff, "datasize = %d", datasize); if (aiff->streaming) { done = TRUE; } break; } default: gst_aiff_parse_ignore_chunk (aiff, buf, tag, size); } if (upstream_size && (aiff->offset >= upstream_size)) { /* Now we have gone through the whole file */ done = TRUE; } } /* We read all the chunks (in pull mode) or reached the SSND chunk * (in push mode). We must have both COMM and SSND now; error out * otherwise. */ if (!aiff->got_comm) { GST_WARNING_OBJECT (aiff, "Failed to find COMM chunk"); goto no_header; } if (!gotdata) { GST_WARNING_OBJECT (aiff, "Failed to find SSND chunk"); goto no_data; } GST_DEBUG_OBJECT (aiff, "Finished parsing headers"); if (gst_aiff_parse_calculate_duration (aiff)) { gst_segment_init (&aiff->segment, GST_FORMAT_TIME); gst_segment_set_duration (&aiff->segment, GST_FORMAT_TIME, aiff->duration); } else { /* no bitrate, let downstream peer do the math, we'll feed it bytes. */ gst_segment_init (&aiff->segment, GST_FORMAT_BYTES); gst_segment_set_duration (&aiff->segment, GST_FORMAT_BYTES, aiff->datasize); } /* now we have all the info to perform a pending seek if any, if no * event, this will still do the right thing and it will also send * the right newsegment event downstream. */ gst_aiff_parse_perform_seek (aiff, aiff->seek_event); /* remove pending event */ event_p = &aiff->seek_event; gst_event_replace (event_p, NULL); /* we just started, we are discont */ aiff->discont = TRUE; aiff->state = AIFF_PARSE_DATA; return GST_FLOW_OK; /* ERROR */ no_header: { GST_ELEMENT_ERROR (aiff, STREAM, TYPE_NOT_FOUND, (NULL), ("Invalid AIFF header (no COMM found)")); return GST_FLOW_ERROR; } no_data: { GST_ELEMENT_ERROR (aiff, STREAM, TYPE_NOT_FOUND, (NULL), ("Invalid AIFF: no SSND found")); return GST_FLOW_ERROR; } parse_header_error: { GST_ELEMENT_ERROR (aiff, STREAM, DEMUX, (NULL), ("Couldn't parse audio header")); return GST_FLOW_ERROR; } no_channels: { GST_ELEMENT_ERROR (aiff, STREAM, FAILED, (NULL), ("Stream claims to contain no channels - invalid data")); return GST_FLOW_ERROR; } no_rate: { GST_ELEMENT_ERROR (aiff, STREAM, FAILED, (NULL), ("Stream with sample_rate == 0 - invalid data")); return GST_FLOW_ERROR; } no_bytes_per_sample: { GST_ELEMENT_ERROR (aiff, STREAM, FAILED, (NULL), ("Could not caluclate bytes per sample - invalid data")); return GST_FLOW_ERROR; } unknown_format: { GST_ELEMENT_ERROR (aiff, STREAM, TYPE_NOT_FOUND, (NULL), ("No caps found for format 0x%x, %d channels, %d Hz", aiff->format, aiff->channels, aiff->rate)); return GST_FLOW_ERROR; } header_read_error: { GST_ELEMENT_ERROR (aiff, STREAM, DEMUX, (NULL), ("Couldn't read in header")); return GST_FLOW_ERROR; } }
static gboolean gst_rtp_base_depayload_handle_event (GstRTPBaseDepayload * filter, GstEvent * event) { gboolean res = TRUE; gboolean forward = TRUE; switch (GST_EVENT_TYPE (event)) { case GST_EVENT_FLUSH_STOP: GST_OBJECT_LOCK (filter); gst_segment_init (&filter->segment, GST_FORMAT_UNDEFINED); GST_OBJECT_UNLOCK (filter); filter->need_newsegment = TRUE; filter->priv->next_seqnum = -1; gst_event_replace (&filter->priv->segment_event, NULL); break; case GST_EVENT_CAPS: { GstCaps *caps; gst_event_parse_caps (event, &caps); res = gst_rtp_base_depayload_setcaps (filter, caps); forward = FALSE; break; } case GST_EVENT_SEGMENT: { GstSegment segment; GST_OBJECT_LOCK (filter); gst_event_copy_segment (event, &segment); if (segment.format != GST_FORMAT_TIME) { GST_ERROR_OBJECT (filter, "Segment with non-TIME format not supported"); res = FALSE; } filter->segment = segment; GST_OBJECT_UNLOCK (filter); /* don't pass the event downstream, we generate our own segment including * the NTP time and other things we receive in caps */ forward = FALSE; break; } case GST_EVENT_CUSTOM_DOWNSTREAM: { GstRTPBaseDepayloadClass *bclass; bclass = GST_RTP_BASE_DEPAYLOAD_GET_CLASS (filter); if (gst_event_has_name (event, "GstRTPPacketLost")) { /* we get this event from the jitterbuffer when it considers a packet as * being lost. We send it to our packet_lost vmethod. The default * implementation will make time progress by pushing out a GAP event. * Subclasses can override and do one of the following: * - Adjust timestamp/duration to something more accurate before * calling the parent (default) packet_lost method. * - do some more advanced error concealing on the already received * (fragmented) packets. * - ignore the packet lost. */ if (bclass->packet_lost) res = bclass->packet_lost (filter, event); forward = FALSE; } break; } default: break; } if (forward) res = gst_pad_push_event (filter->srcpad, event); else gst_event_unref (event); return res; }