static GstPadProbeReturn event_probe (GstPad * pad, GstPadProbeInfo * info, gpointer udata) { GstPadProbeReturn ret = GST_PAD_PROBE_OK; GstEvent *event = GST_PAD_PROBE_INFO_DATA (info); switch (GST_EVENT_TYPE (event)) { case GST_EVENT_STREAM_START:{ guint group_id; g_mutex_lock (&test_mutex); fail_unless (gst_event_parse_group_id (event, &group_id)); if (have_group_id) { if (group_id_pre != group_id) { event = gst_event_copy (event); gst_event_set_group_id (event, group_id_pre); gst_event_replace ((GstEvent **) & info->data, event); gst_event_unref (event); } } else { group_id_pre = group_id; have_group_id = TRUE; } g_mutex_unlock (&test_mutex); break; } default: break; } return ret; }
static gboolean prepare_src_pad (MpegTSBase * base, MpegTSParse2 * parse) { GstEvent *event; gchar *stream_id; GstCaps *caps; if (!parse->first) return TRUE; /* If there's no packet_size yet, we can't set caps yet */ if (G_UNLIKELY (base->packetizer->packet_size == 0)) return FALSE; stream_id = gst_pad_create_stream_id (parse->srcpad, GST_ELEMENT_CAST (base), "multi-program"); event = gst_pad_get_sticky_event (parse->parent.sinkpad, GST_EVENT_STREAM_START, 0); if (event) { if (gst_event_parse_group_id (event, &parse->group_id)) parse->have_group_id = TRUE; else parse->have_group_id = FALSE; gst_event_unref (event); } else if (!parse->have_group_id) { parse->have_group_id = TRUE; parse->group_id = gst_util_group_id_next (); } event = gst_event_new_stream_start (stream_id); if (parse->have_group_id) gst_event_set_group_id (event, parse->group_id); gst_pad_push_event (parse->srcpad, event); g_free (stream_id); caps = gst_caps_new_simple ("video/mpegts", "systemstream", G_TYPE_BOOLEAN, TRUE, "packetsize", G_TYPE_INT, base->packetizer->packet_size, NULL); gst_pad_set_caps (parse->srcpad, caps); gst_caps_unref (caps); /* If setting output timestamps, ensure that the output segment is TIME */ if (parse->set_timestamps == FALSE || base->segment.format == GST_FORMAT_TIME) gst_pad_push_event (parse->srcpad, gst_event_new_segment (&base->segment)); else { GstSegment seg; gst_segment_init (&seg, GST_FORMAT_TIME); GST_DEBUG_OBJECT (parse, "Generating time output segment %" GST_SEGMENT_FORMAT, &seg); gst_pad_push_event (parse->srcpad, gst_event_new_segment (&seg)); } parse->first = FALSE; return TRUE; }
/* Probe on the output of a parser chain (the last * src pad) */ static GstPadProbeReturn parse_chain_output_probe (GstPad * pad, GstPadProbeInfo * info, DecodebinInputStream * input) { GstPadProbeReturn ret = GST_PAD_PROBE_OK; if (GST_IS_EVENT (GST_PAD_PROBE_INFO_DATA (info))) { GstEvent *ev = GST_PAD_PROBE_INFO_EVENT (info); GST_DEBUG_OBJECT (pad, "Got event %s", GST_EVENT_TYPE_NAME (ev)); switch (GST_EVENT_TYPE (ev)) { case GST_EVENT_STREAM_START: { GstStream *stream = NULL; guint group_id = G_MAXUINT32; gst_event_parse_group_id (ev, &group_id); GST_DEBUG_OBJECT (pad, "Got stream-start, group_id:%d, input %p", group_id, input->input); if (set_input_group_id (input->input, &group_id)) { ev = gst_event_make_writable (ev); gst_event_set_group_id (ev, group_id); GST_PAD_PROBE_INFO_DATA (info) = ev; } input->saw_eos = FALSE; gst_event_parse_stream (ev, &stream); /* FIXME : Would we ever end up with a stream already set on the input ?? */ if (stream) { if (input->active_stream != stream) { MultiQueueSlot *slot; if (input->active_stream) gst_object_unref (input->active_stream); input->active_stream = stream; /* We have the beginning of a stream, get a multiqueue slot and link to it */ g_mutex_lock (&input->dbin->selection_lock); slot = get_slot_for_input (input->dbin, input); link_input_to_slot (input, slot); g_mutex_unlock (&input->dbin->selection_lock); } else gst_object_unref (stream); } } break; case GST_EVENT_CAPS: { GstCaps *caps = NULL; gst_event_parse_caps (ev, &caps); GST_DEBUG_OBJECT (pad, "caps %" GST_PTR_FORMAT, caps); if (caps && input->active_stream) gst_stream_set_caps (input->active_stream, caps); } break; case GST_EVENT_EOS: input->saw_eos = TRUE; if (all_inputs_are_eos (input->dbin)) { GST_DEBUG_OBJECT (pad, "real input pad, marking as EOS"); check_all_streams_for_eos (input->dbin); } else { GstPad *peer = gst_pad_get_peer (input->srcpad); if (peer) { /* Send custom-eos event to multiqueue slot */ GstStructure *s; GstEvent *event; GST_DEBUG_OBJECT (pad, "Got EOS end of input stream, post custom-eos"); s = gst_structure_new_empty ("decodebin3-custom-eos"); event = gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM, s); gst_pad_send_event (peer, event); gst_object_unref (peer); } else { GST_FIXME_OBJECT (pad, "No peer, what should we do ?"); } } ret = GST_PAD_PROBE_DROP; break; case GST_EVENT_FLUSH_STOP: GST_DEBUG_OBJECT (pad, "Clear saw_eos flag"); input->saw_eos = FALSE; default: break; } } else if (GST_IS_QUERY (GST_PAD_PROBE_INFO_DATA (info))) { GstQuery *q = GST_PAD_PROBE_INFO_QUERY (info); GST_DEBUG_OBJECT (pad, "Seeing query %s", GST_QUERY_TYPE_NAME (q)); /* If we have a parser, we want to reply to the caps query */ /* FIXME: Set a flag when the input stream is created for * streams where we shouldn't reply to these queries */ if (GST_QUERY_TYPE (q) == GST_QUERY_CAPS && (info->type & GST_PAD_PROBE_TYPE_PULL)) { GstCaps *filter = NULL; GstCaps *allowed; gst_query_parse_caps (q, &filter); allowed = get_parser_caps_filter (input->dbin, filter); GST_DEBUG_OBJECT (pad, "Intercepting caps query, setting %" GST_PTR_FORMAT, allowed); gst_query_set_caps_result (q, allowed); gst_caps_unref (allowed); ret = GST_PAD_PROBE_HANDLED; } else if (GST_QUERY_TYPE (q) == GST_QUERY_ACCEPT_CAPS) { GstCaps *prop = NULL; gst_query_parse_accept_caps (q, &prop); /* Fast check against target caps */ if (gst_caps_can_intersect (prop, input->dbin->caps)) gst_query_set_accept_caps_result (q, TRUE); else { gboolean accepted = check_parser_caps_filter (input->dbin, prop); /* check against caps filter */ gst_query_set_accept_caps_result (q, accepted); GST_DEBUG_OBJECT (pad, "ACCEPT_CAPS query, returning %d", accepted); } ret = GST_PAD_PROBE_HANDLED; } } return ret; }
/* sinkpad functions */ static gboolean gst_stream_synchronizer_sink_event (GstPad * pad, GstObject * parent, GstEvent * event) { GstStreamSynchronizer *self = GST_STREAM_SYNCHRONIZER (parent); gboolean ret = FALSE; GST_LOG_OBJECT (pad, "Handling event %s: %" GST_PTR_FORMAT, GST_EVENT_TYPE_NAME (event), event); switch (GST_EVENT_TYPE (event)) { case GST_EVENT_STREAM_START: { GstSyncStream *stream, *ostream; guint32 seqnum = gst_event_get_seqnum (event); guint group_id; gboolean have_group_id; GList *l; gboolean all_wait = TRUE; gboolean new_stream = TRUE; have_group_id = gst_event_parse_group_id (event, &group_id); GST_STREAM_SYNCHRONIZER_LOCK (self); self->have_group_id &= have_group_id; have_group_id = self->have_group_id; stream = gst_pad_get_element_private (pad); if (!stream) { GST_DEBUG_OBJECT (self, "No stream or STREAM_START from same source"); GST_STREAM_SYNCHRONIZER_UNLOCK (self); break; } gst_event_parse_stream_flags (event, &stream->flags); if ((have_group_id && stream->group_id != group_id) || (!have_group_id && stream->stream_start_seqnum != seqnum)) { stream->is_eos = FALSE; stream->eos_sent = FALSE; stream->flushing = FALSE; stream->stream_start_seqnum = seqnum; stream->group_id = group_id; if (!have_group_id) { /* Check if this belongs to a stream that is already there, * e.g. we got the visualizations for an audio stream */ for (l = self->streams; l; l = l->next) { ostream = l->data; if (ostream != stream && ostream->stream_start_seqnum == seqnum && !ostream->wait) { new_stream = FALSE; break; } } if (!new_stream) { GST_DEBUG_OBJECT (pad, "Stream %d belongs to running stream %d, no waiting", stream->stream_number, ostream->stream_number); stream->wait = FALSE; GST_STREAM_SYNCHRONIZER_UNLOCK (self); break; } } else if (group_id == self->group_id) { GST_DEBUG_OBJECT (pad, "Stream %d belongs to running group %d, " "no waiting", stream->stream_number, group_id); GST_STREAM_SYNCHRONIZER_UNLOCK (self); break; } GST_DEBUG_OBJECT (pad, "Stream %d changed", stream->stream_number); stream->wait = TRUE; for (l = self->streams; l; l = l->next) { GstSyncStream *ostream = l->data; all_wait = all_wait && ((ostream->flags & GST_STREAM_FLAG_SPARSE) || (ostream->wait && (!have_group_id || ostream->group_id == group_id))); if (!all_wait) break; } if (all_wait) { gint64 position = 0; if (have_group_id) GST_DEBUG_OBJECT (self, "All streams have changed to group id %u -- unblocking", group_id); else GST_DEBUG_OBJECT (self, "All streams have changed -- unblocking"); self->group_id = group_id; for (l = self->streams; l; l = l->next) { GstSyncStream *ostream = l->data; gint64 stop_running_time; gint64 position_running_time; ostream->wait = FALSE; if (ostream->segment.format == GST_FORMAT_TIME) { stop_running_time = gst_segment_to_running_time (&ostream->segment, GST_FORMAT_TIME, ostream->segment.stop); position_running_time = gst_segment_to_running_time (&ostream->segment, GST_FORMAT_TIME, ostream->segment.position); position_running_time = MAX (position_running_time, stop_running_time); position_running_time -= gst_segment_to_running_time (&ostream->segment, GST_FORMAT_TIME, ostream->segment.start); position_running_time = MAX (0, position_running_time); position = MAX (position, position_running_time); } } self->group_start_time += position; GST_DEBUG_OBJECT (self, "New group start time: %" GST_TIME_FORMAT, GST_TIME_ARGS (self->group_start_time)); for (l = self->streams; l; l = l->next) { GstSyncStream *ostream = l->data; ostream->wait = FALSE; g_cond_broadcast (&ostream->stream_finish_cond); } } } GST_STREAM_SYNCHRONIZER_UNLOCK (self); break; } case GST_EVENT_SEGMENT:{ GstSyncStream *stream; GstSegment segment; gst_event_copy_segment (event, &segment); GST_STREAM_SYNCHRONIZER_LOCK (self); gst_stream_synchronizer_wait (self, pad); if (self->shutdown) { GST_STREAM_SYNCHRONIZER_UNLOCK (self); gst_event_unref (event); goto done; } stream = gst_pad_get_element_private (pad); if (stream && segment.format == GST_FORMAT_TIME) { GST_DEBUG_OBJECT (pad, "New stream, updating base from %" GST_TIME_FORMAT " to %" GST_TIME_FORMAT, GST_TIME_ARGS (segment.base), GST_TIME_ARGS (segment.base + self->group_start_time)); segment.base += self->group_start_time; GST_DEBUG_OBJECT (pad, "Segment was: %" GST_SEGMENT_FORMAT, &stream->segment); gst_segment_copy_into (&segment, &stream->segment); GST_DEBUG_OBJECT (pad, "Segment now is: %" GST_SEGMENT_FORMAT, &stream->segment); stream->segment_seqnum = gst_event_get_seqnum (event); GST_DEBUG_OBJECT (pad, "Stream start running time: %" GST_TIME_FORMAT, GST_TIME_ARGS (stream->segment.base)); { GstEvent *tmpev; tmpev = gst_event_new_segment (&stream->segment); gst_event_set_seqnum (tmpev, stream->segment_seqnum); gst_event_unref (event); event = tmpev; } } else if (stream) { GST_WARNING_OBJECT (pad, "Non-TIME segment: %s", gst_format_get_name (segment.format)); gst_segment_init (&stream->segment, GST_FORMAT_UNDEFINED); } GST_STREAM_SYNCHRONIZER_UNLOCK (self); break; } case GST_EVENT_FLUSH_START:{ GstSyncStream *stream; GST_STREAM_SYNCHRONIZER_LOCK (self); stream = gst_pad_get_element_private (pad); self->eos = FALSE; if (stream) { GST_DEBUG_OBJECT (pad, "Flushing streams"); stream->flushing = TRUE; g_cond_broadcast (&stream->stream_finish_cond); } GST_STREAM_SYNCHRONIZER_UNLOCK (self); break; } case GST_EVENT_FLUSH_STOP:{ GstSyncStream *stream; GList *l; GstClockTime new_group_start_time = 0; GST_STREAM_SYNCHRONIZER_LOCK (self); stream = gst_pad_get_element_private (pad); if (stream) { GST_DEBUG_OBJECT (pad, "Resetting segment for stream %d", stream->stream_number); gst_segment_init (&stream->segment, GST_FORMAT_UNDEFINED); stream->is_eos = FALSE; stream->eos_sent = FALSE; stream->flushing = FALSE; stream->wait = FALSE; g_cond_broadcast (&stream->stream_finish_cond); } for (l = self->streams; l; l = l->next) { GstSyncStream *ostream = l->data; GstClockTime start_running_time; if (ostream == stream || ostream->flushing) continue; if (ostream->segment.format == GST_FORMAT_TIME) { start_running_time = gst_segment_to_running_time (&ostream->segment, GST_FORMAT_TIME, ostream->segment.start); new_group_start_time = MAX (new_group_start_time, start_running_time); } } GST_DEBUG_OBJECT (pad, "Updating group start time from %" GST_TIME_FORMAT " to %" GST_TIME_FORMAT, GST_TIME_ARGS (self->group_start_time), GST_TIME_ARGS (new_group_start_time)); self->group_start_time = new_group_start_time; GST_STREAM_SYNCHRONIZER_UNLOCK (self); break; } /* unblocking EOS wait when track switch. */ case GST_EVENT_CUSTOM_DOWNSTREAM_OOB:{ if (gst_event_has_name (event, "playsink-custom-video-flush") || gst_event_has_name (event, "playsink-custom-audio-flush") || gst_event_has_name (event, "playsink-custom-subtitle-flush")) { GstSyncStream *stream; GST_STREAM_SYNCHRONIZER_LOCK (self); stream = gst_pad_get_element_private (pad); if (stream) { stream->is_eos = FALSE; stream->eos_sent = FALSE; stream->wait = FALSE; g_cond_broadcast (&stream->stream_finish_cond); } GST_STREAM_SYNCHRONIZER_UNLOCK (self); } break; } case GST_EVENT_EOS:{ GstSyncStream *stream; GList *l; gboolean all_eos = TRUE; gboolean seen_data; GSList *pads = NULL; GstPad *srcpad; GstClockTime timestamp; GST_STREAM_SYNCHRONIZER_LOCK (self); stream = gst_pad_get_element_private (pad); if (!stream) { GST_STREAM_SYNCHRONIZER_UNLOCK (self); GST_WARNING_OBJECT (pad, "EOS for unknown stream"); break; } GST_DEBUG_OBJECT (pad, "Have EOS for stream %d", stream->stream_number); stream->is_eos = TRUE; seen_data = stream->seen_data; srcpad = gst_object_ref (stream->srcpad); if (seen_data && stream->segment.position != -1) timestamp = stream->segment.position; else if (stream->segment.rate < 0.0 || stream->segment.stop == -1) timestamp = stream->segment.start; else timestamp = stream->segment.stop; stream->segment.position = timestamp; for (l = self->streams; l; l = l->next) { GstSyncStream *ostream = l->data; all_eos = all_eos && ostream->is_eos; if (!all_eos) break; } if (all_eos) { GST_DEBUG_OBJECT (self, "All streams are EOS -- forwarding"); self->eos = TRUE; for (l = self->streams; l; l = l->next) { GstSyncStream *ostream = l->data; /* local snapshot of current pads */ gst_object_ref (ostream->srcpad); pads = g_slist_prepend (pads, ostream->srcpad); } } if (pads) { GstPad *pad; GSList *epad; GstSyncStream *ostream; ret = TRUE; epad = pads; while (epad) { pad = epad->data; ostream = gst_pad_get_element_private (pad); if (ostream) { g_cond_broadcast (&ostream->stream_finish_cond); } gst_object_unref (pad); epad = g_slist_next (epad); } g_slist_free (pads); } else { if (seen_data) { self->send_gap_event = TRUE; stream->gap_duration = GST_CLOCK_TIME_NONE; stream->wait = TRUE; ret = gst_stream_synchronizer_wait (self, srcpad); } } /* send eos if haven't seen data. seen_data will be true if data buffer * of the track have received in anytime. sink is ready if seen_data is * true, so can send GAP event. Will send EOS if sink isn't ready. The * scenario for the case is one track haven't any media data and then * send EOS. Or no any valid media data in one track, so decoder can't * get valid CAPS for the track. sink can't ready without received CAPS.*/ if (!seen_data || self->eos) { GST_DEBUG_OBJECT (pad, "send EOS event"); /* drop lock when sending eos, which may block in e.g. preroll */ GST_STREAM_SYNCHRONIZER_UNLOCK (self); ret = gst_pad_push_event (srcpad, gst_event_new_eos ()); GST_STREAM_SYNCHRONIZER_LOCK (self); stream = gst_pad_get_element_private (pad); if (stream) { stream->eos_sent = TRUE; } } gst_object_unref (srcpad); gst_event_unref (event); GST_STREAM_SYNCHRONIZER_UNLOCK (self); goto done; } default: break; } ret = gst_pad_event_default (pad, parent, event); done: return ret; }
static GstMultipartPad * gst_multipart_find_pad_by_mime (GstMultipartDemux * demux, gchar * mime, gboolean * created) { GSList *walk; walk = demux->srcpads; while (walk) { GstMultipartPad *pad = (GstMultipartPad *) walk->data; if (!strcmp (pad->mime, mime)) { if (created) { *created = FALSE; } return pad; } walk = walk->next; } /* pad not found, create it */ { GstPad *pad; GstMultipartPad *mppad; gchar *name; const gchar *capsname; GstCaps *caps; gchar *stream_id; GstEvent *event; mppad = g_new0 (GstMultipartPad, 1); GST_DEBUG_OBJECT (demux, "creating pad with mime: %s", mime); name = g_strdup_printf ("src_%u", demux->numpads); pad = gst_pad_new_from_static_template (&multipart_demux_src_template_factory, name); g_free (name); mppad->pad = pad; mppad->mime = g_strdup (mime); mppad->last_ret = GST_FLOW_OK; mppad->last_ts = GST_CLOCK_TIME_NONE; mppad->discont = TRUE; demux->srcpads = g_slist_prepend (demux->srcpads, mppad); demux->numpads++; gst_pad_use_fixed_caps (pad); gst_pad_set_active (pad, TRUE); /* prepare and send stream-start */ if (!demux->have_group_id) { event = gst_pad_get_sticky_event (demux->sinkpad, GST_EVENT_STREAM_START, 0); if (event) { demux->have_group_id = gst_event_parse_group_id (event, &demux->group_id); gst_event_unref (event); } else if (!demux->have_group_id) { demux->have_group_id = TRUE; demux->group_id = gst_util_group_id_next (); } } stream_id = gst_pad_create_stream_id (pad, GST_ELEMENT_CAST (demux), demux->mime_type); event = gst_event_new_stream_start (stream_id); if (demux->have_group_id) gst_event_set_group_id (event, demux->group_id); gst_pad_store_sticky_event (pad, event); g_free (stream_id); gst_event_unref (event); /* take the mime type, convert it to the caps name */ capsname = gst_multipart_demux_get_gstname (demux, mime); caps = gst_caps_from_string (capsname); GST_DEBUG_OBJECT (demux, "caps for pad: %s", capsname); gst_pad_set_caps (pad, caps); gst_element_add_pad (GST_ELEMENT_CAST (demux), pad); gst_caps_unref (caps); if (created) { *created = TRUE; } if (demux->singleStream) { gst_element_no_more_pads (GST_ELEMENT_CAST (demux)); } return mppad; } }
static GstFlowReturn gst_real_audio_demux_parse_header (GstRealAudioDemux * demux) { const guint8 *data; gchar *codec_name = NULL; GstCaps *caps = NULL; GstEvent *event; gchar *stream_id; guint avail; g_assert (demux->ra_version == 4 || demux->ra_version == 3); avail = gst_adapter_available (demux->adapter); if (avail < 16) return GST_FLOW_OK; if (!gst_real_audio_demux_get_data_offset_from_header (demux)) return GST_FLOW_ERROR; /* shouldn't happen */ GST_DEBUG_OBJECT (demux, "data_offset = %u", demux->data_offset); if (avail + 6 < demux->data_offset) { GST_DEBUG_OBJECT (demux, "Need %u bytes, but only %u available now", demux->data_offset - 6, avail); return GST_FLOW_OK; } data = gst_adapter_map (demux->adapter, demux->data_offset - 6); g_assert (data); switch (demux->ra_version) { case 3: demux->fourcc = GST_RM_AUD_14_4; demux->packet_size = 20; demux->sample_rate = 8000; demux->channels = 1; demux->sample_width = 16; demux->flavour = 1; demux->leaf_size = 0; demux->height = 0; break; case 4: demux->flavour = GST_READ_UINT16_BE (data + 16); /* demux->frame_size = GST_READ_UINT32_BE (data + 36); */ demux->leaf_size = GST_READ_UINT16_BE (data + 38); demux->height = GST_READ_UINT16_BE (data + 34); demux->packet_size = GST_READ_UINT32_BE (data + 18); demux->sample_rate = GST_READ_UINT16_BE (data + 42); demux->sample_width = GST_READ_UINT16_BE (data + 46); demux->channels = GST_READ_UINT16_BE (data + 48); demux->fourcc = GST_READ_UINT32_LE (data + 56); demux->pending_tags = gst_rm_utils_read_tags (data + 63, demux->data_offset - 63, gst_rm_utils_read_string8); if (demux->pending_tags) gst_tag_list_set_scope (demux->pending_tags, GST_TAG_SCOPE_GLOBAL); break; default: g_assert_not_reached (); #if 0 case 5: demux->flavour = GST_READ_UINT16_BE (data + 16); /* demux->frame_size = GST_READ_UINT32_BE (data + 36); */ demux->leaf_size = GST_READ_UINT16_BE (data + 38); demux->height = GST_READ_UINT16_BE (data + 34); demux->sample_rate = GST_READ_UINT16_BE (data + 48); demux->sample_width = GST_READ_UINT16_BE (data + 52); demux->n_channels = GST_READ_UINT16_BE (data + 54); demux->fourcc = RMDEMUX_FOURCC_GET (data + 60); break; #endif } GST_INFO_OBJECT (demux, "packet_size = %u", demux->packet_size); GST_INFO_OBJECT (demux, "sample_rate = %u", demux->sample_rate); GST_INFO_OBJECT (demux, "sample_width = %u", demux->sample_width); GST_INFO_OBJECT (demux, "channels = %u", demux->channels); GST_INFO_OBJECT (demux, "fourcc = '%" GST_FOURCC_FORMAT "' (%08X)", GST_FOURCC_ARGS (demux->fourcc), demux->fourcc); switch (demux->fourcc) { case GST_RM_AUD_14_4: caps = gst_caps_new_simple ("audio/x-pn-realaudio", "raversion", G_TYPE_INT, 1, NULL); demux->byterate_num = 1000; demux->byterate_denom = 1; break; case GST_RM_AUD_28_8: /* FIXME: needs descrambling */ caps = gst_caps_new_simple ("audio/x-pn-realaudio", "raversion", G_TYPE_INT, 2, NULL); break; case GST_RM_AUD_DNET: caps = gst_caps_new_simple ("audio/x-ac3", "rate", G_TYPE_INT, demux->sample_rate, NULL); if (demux->packet_size == 0 || demux->sample_rate == 0) goto broken_file; demux->byterate_num = demux->packet_size * demux->sample_rate; demux->byterate_denom = 1536; break; /* Sipro/ACELP.NET Voice Codec (MIME unknown) */ case GST_RM_AUD_SIPR: caps = gst_caps_new_empty_simple ("audio/x-sipro"); break; default: GST_WARNING_OBJECT (demux, "unknown fourcc %08X", demux->fourcc); break; } if (caps == NULL) goto unknown_fourcc; gst_caps_set_simple (caps, "flavor", G_TYPE_INT, demux->flavour, "rate", G_TYPE_INT, demux->sample_rate, "channels", G_TYPE_INT, demux->channels, "width", G_TYPE_INT, demux->sample_width, "leaf_size", G_TYPE_INT, demux->leaf_size, "packet_size", G_TYPE_INT, demux->packet_size, "height", G_TYPE_INT, demux->height, NULL); GST_INFO_OBJECT (demux, "Adding source pad, caps %" GST_PTR_FORMAT, caps); demux->srcpad = gst_pad_new_from_static_template (&src_template, "src"); gst_pad_set_event_function (demux->srcpad, GST_DEBUG_FUNCPTR (gst_real_audio_demux_src_event)); gst_pad_set_query_function (demux->srcpad, GST_DEBUG_FUNCPTR (gst_real_audio_demux_src_query)); gst_pad_set_active (demux->srcpad, TRUE); gst_pad_use_fixed_caps (demux->srcpad); stream_id = gst_pad_create_stream_id (demux->srcpad, GST_ELEMENT_CAST (demux), NULL); event = gst_pad_get_sticky_event (demux->sinkpad, GST_EVENT_STREAM_START, 0); if (event) { if (gst_event_parse_group_id (event, &demux->group_id)) demux->have_group_id = TRUE; else demux->have_group_id = FALSE; gst_event_unref (event); } else if (!demux->have_group_id) { demux->have_group_id = TRUE; demux->group_id = gst_util_group_id_next (); } event = gst_event_new_stream_start (stream_id); if (demux->have_group_id) gst_event_set_group_id (event, demux->group_id); gst_pad_push_event (demux->srcpad, event); g_free (stream_id); gst_pad_set_caps (demux->srcpad, caps); codec_name = gst_pb_utils_get_codec_description (caps); gst_caps_unref (caps); gst_element_add_pad (GST_ELEMENT (demux), demux->srcpad); if (demux->byterate_num > 0 && demux->byterate_denom > 0) { GstFormat bformat = GST_FORMAT_BYTES; gint64 size_bytes = 0; GST_INFO_OBJECT (demux, "byte rate = %u/%u = %u bytes/sec", demux->byterate_num, demux->byterate_denom, demux->byterate_num / demux->byterate_denom); if (gst_pad_peer_query_duration (demux->sinkpad, bformat, &size_bytes)) { demux->duration = gst_real_demux_get_timestamp_from_offset (demux, size_bytes); demux->upstream_size = size_bytes; GST_INFO_OBJECT (demux, "upstream_size = %" G_GUINT64_FORMAT, demux->upstream_size); GST_INFO_OBJECT (demux, "duration = %" GST_TIME_FORMAT, GST_TIME_ARGS (demux->duration)); } } demux->need_newsegment = TRUE; if (codec_name) { if (demux->pending_tags == NULL) { demux->pending_tags = gst_tag_list_new_empty (); gst_tag_list_set_scope (demux->pending_tags, GST_TAG_SCOPE_GLOBAL); } gst_tag_list_add (demux->pending_tags, GST_TAG_MERGE_REPLACE, GST_TAG_AUDIO_CODEC, codec_name, NULL); g_free (codec_name); } gst_adapter_unmap (demux->adapter); gst_adapter_flush (demux->adapter, demux->data_offset - 6); demux->state = REAL_AUDIO_DEMUX_STATE_DATA; demux->need_newsegment = TRUE; return GST_FLOW_OK; /* ERRORS */ unknown_fourcc: { GST_ELEMENT_ERROR (GST_ELEMENT (demux), STREAM, DECODE, (NULL), ("Unknown fourcc '%" GST_FOURCC_FORMAT "'", GST_FOURCC_ARGS (demux->fourcc))); return GST_FLOW_ERROR; } broken_file: { GST_ELEMENT_ERROR (GST_ELEMENT (demux), STREAM, DECODE, (NULL), ("Broken file - invalid sample_rate or other header value")); return GST_FLOW_ERROR; } }
static GstFlowReturn gst_cc_extractor_handle_meta (GstCCExtractor * filter, GstBuffer * buf, GstVideoCaptionMeta * meta) { GstBuffer *outbuf = NULL; GstEvent *event; gchar *captionid; GstFlowReturn flow; GST_DEBUG_OBJECT (filter, "Handling meta"); /* Check if the meta type matches the configured one */ if (filter->captionpad != NULL && meta->caption_type != filter->caption_type) { GST_ERROR_OBJECT (filter, "GstVideoCaptionMeta type changed, Not handled currently"); flow = GST_FLOW_NOT_NEGOTIATED; goto out; } if (filter->captionpad == NULL) { GstCaps *caption_caps = NULL; GstEvent *stream_event; GST_DEBUG_OBJECT (filter, "Creating new caption pad"); switch (meta->caption_type) { case GST_VIDEO_CAPTION_TYPE_CEA608_RAW: caption_caps = gst_caps_from_string ("closedcaption/x-cea-608,format=(string)raw"); break; case GST_VIDEO_CAPTION_TYPE_CEA608_IN_CEA708_RAW: caption_caps = gst_caps_from_string ("closedcaption/x-cea-608,format=(string)cc_data"); break; case GST_VIDEO_CAPTION_TYPE_CEA708_RAW: caption_caps = gst_caps_from_string ("closedcaption/x-cea-708,format=(string)cc_data"); break; case GST_VIDEO_CAPTION_TYPE_CEA708_CDP: caption_caps = gst_caps_from_string ("closedcaption/x-cea-708,format=(string)cdp"); break; default: break; } if (caption_caps == NULL) { GST_ERROR_OBJECT (filter, "Unknown/invalid caption type"); return GST_FLOW_NOT_NEGOTIATED; } /* Create the caption pad and set the caps */ filter->captionpad = gst_pad_new_from_static_template (&captiontemplate, "caption"); gst_pad_set_iterate_internal_links_function (filter->sinkpad, GST_DEBUG_FUNCPTR (gst_cc_extractor_iterate_internal_links)); gst_pad_set_active (filter->captionpad, TRUE); gst_element_add_pad (GST_ELEMENT (filter), filter->captionpad); gst_flow_combiner_add_pad (filter->combiner, filter->captionpad); captionid = gst_pad_create_stream_id (filter->captionpad, (GstElement *) filter, "caption"); stream_event = gst_event_new_stream_start (captionid); g_free (captionid); /* FIXME : Create a proper stream-id */ if ((event = gst_pad_get_sticky_event (filter->srcpad, GST_EVENT_STREAM_START, 0))) { guint group_id; if (gst_event_parse_group_id (event, &group_id)) gst_event_set_group_id (stream_event, group_id); gst_event_unref (event); } gst_pad_push_event (filter->captionpad, stream_event); gst_pad_set_caps (filter->captionpad, caption_caps); gst_caps_unref (caption_caps); /* Carry over sticky events */ if ((event = gst_pad_get_sticky_event (filter->srcpad, GST_EVENT_SEGMENT, 0))) gst_pad_push_event (filter->captionpad, event); if ((event = gst_pad_get_sticky_event (filter->srcpad, GST_EVENT_TAG, 0))) gst_pad_push_event (filter->captionpad, event); filter->caption_type = meta->caption_type; } GST_DEBUG_OBJECT (filter, "Creating new buffer of size %" G_GSIZE_FORMAT " bytes", meta->size); /* Extract caption data into new buffer with identical buffer timestamps */ outbuf = gst_buffer_new_allocate (NULL, meta->size, NULL); gst_buffer_fill (outbuf, 0, meta->data, meta->size); GST_BUFFER_PTS (outbuf) = GST_BUFFER_PTS (buf); GST_BUFFER_DTS (outbuf) = GST_BUFFER_DTS (buf); GST_BUFFER_DURATION (outbuf) = GST_BUFFER_DURATION (buf); /* We don't really care about the flow return */ flow = gst_pad_push (filter->captionpad, outbuf); out: /* Set flow return on pad and return combined value */ return gst_flow_combiner_update_pad_flow (filter->combiner, filter->captionpad, flow); }