/* FIXME, the duration query should reflect how long you will produce * data, that is the amount of stream time until you will emit EOS. * * For synchronized mixing this is always the max of all the durations * of upstream since we emit EOS when all of them finished. * * We don't do synchronized mixing so this really depends on where the * streams where punched in and what their relative offsets are against * eachother which we can get from the first timestamps we see. * * When we add a new stream (or remove a stream) the duration might * also become invalid again and we need to post a new DURATION * message to notify this fact to the parent. * For now we take the max of all the upstream elements so the simple * cases work at least somewhat. */ static gboolean gst_audio_aggregator_query_duration (GstAudioAggregator * aagg, GstQuery * query) { gint64 max; gboolean res; GstFormat format; GstIterator *it; gboolean done; GValue item = { 0, }; /* parse format */ gst_query_parse_duration (query, &format, NULL); max = -1; res = TRUE; done = FALSE; it = gst_element_iterate_sink_pads (GST_ELEMENT_CAST (aagg)); while (!done) { GstIteratorResult ires; ires = gst_iterator_next (it, &item); switch (ires) { case GST_ITERATOR_DONE: done = TRUE; break; case GST_ITERATOR_OK: { GstPad *pad = g_value_get_object (&item); gint64 duration; /* ask sink peer for duration */ res &= gst_pad_peer_query_duration (pad, format, &duration); /* take max from all valid return values */ if (res) { /* valid unknown length, stop searching */ if (duration == -1) { max = duration; done = TRUE; } /* else see if bigger than current max */ else if (duration > max) max = duration; } g_value_reset (&item); break; } case GST_ITERATOR_RESYNC: max = -1; res = TRUE; gst_iterator_resync (it); break; default: res = FALSE; done = TRUE; break; } } g_value_unset (&item); gst_iterator_free (it); if (res) { /* and store the max */ GST_DEBUG_OBJECT (aagg, "Total duration in format %s: %" GST_TIME_FORMAT, gst_format_get_name (format), GST_TIME_ARGS (max)); gst_query_set_duration (query, format, max); } return res; }
static void gst_raw_parse_loop (GstElement * element) { GstRawParse *rp = GST_RAW_PARSE (element); GstRawParseClass *rp_class = GST_RAW_PARSE_GET_CLASS (rp); GstFlowReturn ret; GstBuffer *buffer; gint size; if (G_UNLIKELY (rp->push_stream_start)) { gchar *stream_id; GstEvent *event; stream_id = gst_pad_create_stream_id (rp->srcpad, GST_ELEMENT_CAST (rp), NULL); event = gst_event_new_stream_start (stream_id); gst_event_set_group_id (event, gst_util_group_id_next ()); GST_DEBUG_OBJECT (rp, "Pushing STREAM_START"); gst_pad_push_event (rp->srcpad, event); rp->push_stream_start = FALSE; g_free (stream_id); } if (!gst_raw_parse_set_src_caps (rp)) goto no_caps; if (rp->start_segment) { GST_DEBUG_OBJECT (rp, "sending start segment"); gst_pad_push_event (rp->srcpad, rp->start_segment); rp->start_segment = NULL; } if (rp_class->multiple_frames_per_buffer && rp->framesize < 4096) size = 4096 - (4096 % rp->framesize); else size = rp->framesize; if (rp->segment.rate >= 0) { if (rp->offset + size > rp->upstream_length) { GstFormat fmt = GST_FORMAT_BYTES; if (!gst_pad_peer_query_duration (rp->sinkpad, fmt, &rp->upstream_length)) { GST_WARNING_OBJECT (rp, "Could not get upstream duration, trying to pull frame by frame"); size = rp->framesize; } else if (rp->upstream_length < rp->offset + rp->framesize) { ret = GST_FLOW_EOS; goto pause; } else if (rp->offset + size > rp->upstream_length) { size = rp->upstream_length - rp->offset; size -= size % rp->framesize; } } } else { if (rp->offset == 0) { ret = GST_FLOW_EOS; goto pause; } else if (rp->offset < size) { size -= rp->offset; } rp->offset -= size; } buffer = NULL; ret = gst_pad_pull_range (rp->sinkpad, rp->offset, size, &buffer); if (ret != GST_FLOW_OK) { GST_DEBUG_OBJECT (rp, "pull_range (%" G_GINT64_FORMAT ", %u) " "failed, flow: %s", rp->offset, size, gst_flow_get_name (ret)); buffer = NULL; goto pause; } if (gst_buffer_get_size (buffer) < size) { GST_DEBUG_OBJECT (rp, "Short read at offset %" G_GINT64_FORMAT ", got only %" G_GSIZE_FORMAT " of %u bytes", rp->offset, gst_buffer_get_size (buffer), size); if (size > rp->framesize) { gst_buffer_set_size (buffer, gst_buffer_get_size (buffer) - gst_buffer_get_size (buffer) % rp->framesize); } else { gst_buffer_unref (buffer); buffer = NULL; ret = GST_FLOW_EOS; goto pause; } } ret = gst_raw_parse_push_buffer (rp, buffer); if (ret != GST_FLOW_OK) goto pause; return; /* ERRORS */ no_caps: { GST_ERROR_OBJECT (rp, "could not negotiate caps"); ret = GST_FLOW_NOT_NEGOTIATED; goto pause; } pause: { const gchar *reason = gst_flow_get_name (ret); GST_LOG_OBJECT (rp, "pausing task, reason %s", reason); gst_pad_pause_task (rp->sinkpad); if (ret == GST_FLOW_EOS) { if (rp->segment.flags & GST_SEEK_FLAG_SEGMENT) { GstClockTime stop; GST_LOG_OBJECT (rp, "Sending segment done"); if ((stop = rp->segment.stop) == -1) stop = rp->segment.duration; gst_element_post_message (GST_ELEMENT_CAST (rp), gst_message_new_segment_done (GST_OBJECT_CAST (rp), rp->segment.format, stop)); gst_pad_push_event (rp->srcpad, gst_event_new_segment_done (rp->segment.format, stop)); } else { GST_LOG_OBJECT (rp, "Sending EOS, at end of stream"); gst_pad_push_event (rp->srcpad, gst_event_new_eos ()); } } else if (ret == GST_FLOW_NOT_LINKED || ret < GST_FLOW_EOS) { GST_ELEMENT_ERROR (rp, STREAM, FAILED, ("Internal data stream error."), ("stream stopped, reason %s", reason)); gst_pad_push_event (rp->srcpad, gst_event_new_eos ()); } return; } }
static gboolean gst_hls_demux_change_playlist (GstHLSDemux * demux, guint max_bitrate, gboolean * changed) { GList *previous_variant, *current_variant; gint old_bandwidth, new_bandwidth; GstAdaptiveDemux *adaptive_demux = GST_ADAPTIVE_DEMUX_CAST (demux); GstAdaptiveDemuxStream *stream; g_return_val_if_fail (adaptive_demux->streams != NULL, FALSE); stream = adaptive_demux->streams->data; previous_variant = demux->client->main->current_variant; current_variant = gst_m3u8_client_get_playlist_for_bitrate (demux->client, max_bitrate); GST_M3U8_CLIENT_LOCK (demux->client); retry_failover_protection: old_bandwidth = GST_M3U8 (previous_variant->data)->bandwidth; new_bandwidth = GST_M3U8 (current_variant->data)->bandwidth; /* Don't do anything else if the playlist is the same */ if (new_bandwidth == old_bandwidth) { GST_M3U8_CLIENT_UNLOCK (demux->client); return TRUE; } demux->client->main->current_variant = current_variant; GST_M3U8_CLIENT_UNLOCK (demux->client); gst_m3u8_client_set_current (demux->client, current_variant->data); GST_INFO_OBJECT (demux, "Client was on %dbps, max allowed is %dbps, switching" " to bitrate %dbps", old_bandwidth, max_bitrate, new_bandwidth); stream->discont = TRUE; if (gst_hls_demux_update_playlist (demux, FALSE, NULL)) { gchar *uri; gchar *main_uri; uri = gst_m3u8_client_get_current_uri (demux->client); main_uri = gst_m3u8_client_get_uri (demux->client); gst_element_post_message (GST_ELEMENT_CAST (demux), gst_message_new_element (GST_OBJECT_CAST (demux), gst_structure_new (GST_ADAPTIVE_DEMUX_STATISTICS_MESSAGE_NAME, "manifest-uri", G_TYPE_STRING, main_uri, "uri", G_TYPE_STRING, uri, "bitrate", G_TYPE_INT, new_bandwidth, NULL))); g_free (uri); g_free (main_uri); if (changed) *changed = TRUE; } else { GList *failover = NULL; GST_INFO_OBJECT (demux, "Unable to update playlist. Switching back"); GST_M3U8_CLIENT_LOCK (demux->client); failover = g_list_previous (current_variant); if (failover && new_bandwidth == GST_M3U8 (failover->data)->bandwidth) { current_variant = failover; goto retry_failover_protection; } demux->client->main->current_variant = previous_variant; GST_M3U8_CLIENT_UNLOCK (demux->client); gst_m3u8_client_set_current (demux->client, previous_variant->data); /* Try a lower bitrate (or stop if we just tried the lowest) */ if (GST_M3U8 (previous_variant->data)->iframe && new_bandwidth == GST_M3U8 (g_list_first (demux->client->main->iframe_lists)->data)-> bandwidth) return FALSE; else if (!GST_M3U8 (previous_variant->data)->iframe && new_bandwidth == GST_M3U8 (g_list_first (demux->client->main->lists)->data)->bandwidth) return FALSE; else return gst_hls_demux_change_playlist (demux, new_bandwidth - 1, changed); } /* Force typefinding since we might have changed media type */ demux->do_typefind = TRUE; return TRUE; }
static gboolean mpegtsmux_src_event (GstPad * pad, GstEvent * event) { MpegTsMux *mux = GST_MPEG_TSMUX (gst_pad_get_parent (pad)); gboolean res = TRUE; switch (GST_EVENT_TYPE (event)) { case GST_EVENT_CUSTOM_UPSTREAM: { GstIterator *iter; GstIteratorResult iter_ret; GstPad *sinkpad; GstClockTime running_time; gboolean all_headers, done; guint count; if (!gst_video_event_is_force_key_unit (event)) break; gst_video_event_parse_upstream_force_key_unit (event, &running_time, &all_headers, &count); GST_INFO_OBJECT (mux, "received upstream force-key-unit event, " "seqnum %d running_time %" GST_TIME_FORMAT " all_headers %d count %d", gst_event_get_seqnum (event), GST_TIME_ARGS (running_time), all_headers, count); if (!all_headers) break; mux->pending_key_unit_ts = running_time; gst_event_replace (&mux->force_key_unit_event, event); iter = gst_element_iterate_sink_pads (GST_ELEMENT_CAST (mux)); done = FALSE; while (!done) { gboolean res = FALSE, tmp; iter_ret = gst_iterator_next (iter, (gpointer *) & sinkpad); switch (iter_ret) { case GST_ITERATOR_DONE: done = TRUE; break; case GST_ITERATOR_OK: GST_INFO_OBJECT (mux, "forwarding to %s", gst_pad_get_name (sinkpad)); tmp = gst_pad_push_event (sinkpad, gst_event_ref (event)); GST_INFO_OBJECT (mux, "result %d", tmp); /* succeed if at least one pad succeeds */ res |= tmp; gst_object_unref (sinkpad); break; case GST_ITERATOR_ERROR: done = TRUE; break; case GST_ITERATOR_RESYNC: break; } } gst_event_unref (event); break; } default: res = gst_pad_event_default (pad, event); break; } gst_object_unref (mux); return res; }
static void paused_mode_task (gpointer data) { GstMimEnc *mimenc = GST_MIMENC (data); GstClockTime now; GstClockTimeDiff diff; GstFlowReturn ret; if (!GST_ELEMENT_CLOCK (mimenc)) { GST_ERROR_OBJECT (mimenc, "Element has no clock"); gst_pad_pause_task (mimenc->srcpad); return; } GST_OBJECT_LOCK (mimenc); if (mimenc->stop_paused_mode) { GST_OBJECT_UNLOCK (mimenc); goto stop_task; } now = gst_clock_get_time (GST_ELEMENT_CLOCK (mimenc)); diff = now - GST_ELEMENT_CAST (mimenc)->base_time - mimenc->last_buffer; if (diff < 0) diff = 0; if (diff > 3.95 * GST_SECOND) { GstBuffer *buffer = gst_mimenc_create_tcp_header (mimenc, 0, mimenc->last_buffer + 4 * GST_SECOND, FALSE, TRUE); GstEvent *event = NULL; mimenc->last_buffer += 4 * GST_SECOND; if (mimenc->need_newsegment) { event = gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME, 0, -1, 0); mimenc->need_newsegment = FALSE; } GST_OBJECT_UNLOCK (mimenc); GST_LOG_OBJECT (mimenc, "Haven't had an incoming buffer in 4 seconds," " sending out a pause frame"); if (event) { if (!gst_pad_push_event (mimenc->srcpad, event)) GST_WARNING_OBJECT (mimenc, "Failed to push NEWSEGMENT event"); } ret = gst_pad_push (mimenc->srcpad, buffer); if (ret < 0) { GST_WARNING_OBJECT (mimenc, "Error pushing paused header: %s", gst_flow_get_name (ret)); goto stop_task; } } else { GstClockTime next_stop; GstClockID id; next_stop = now + (4 * GST_SECOND - MIN (diff, 4 * GST_SECOND)); id = gst_clock_new_single_shot_id (GST_ELEMENT_CLOCK (mimenc), next_stop); if (mimenc->stop_paused_mode) { GST_OBJECT_UNLOCK (mimenc); goto stop_task; } mimenc->clock_id = id; GST_OBJECT_UNLOCK (mimenc); gst_clock_id_wait (id, NULL); GST_OBJECT_LOCK (mimenc); mimenc->clock_id = NULL; GST_OBJECT_UNLOCK (mimenc); gst_clock_id_unref (id); } return; stop_task: gst_pad_pause_task (mimenc->srcpad); }
static GstStateChangeReturn gst_mimenc_change_state (GstElement * element, GstStateChange transition) { GstMimEnc *mimenc = GST_MIMENC (element); GstStateChangeReturn ret; gboolean paused_mode; switch (transition) { case GST_STATE_CHANGE_READY_TO_NULL: GST_OBJECT_LOCK (element); if (mimenc->enc != NULL) { mimic_close (mimenc->enc); mimenc->enc = NULL; mimenc->buffer_size = -1; mimenc->frames = 0; } GST_OBJECT_UNLOCK (element); break; case GST_STATE_CHANGE_READY_TO_PAUSED: GST_OBJECT_LOCK (mimenc); gst_segment_init (&mimenc->segment, GST_FORMAT_UNDEFINED); mimenc->last_buffer = GST_CLOCK_TIME_NONE; mimenc->need_newsegment = TRUE; GST_OBJECT_UNLOCK (mimenc); break; case GST_STATE_CHANGE_PLAYING_TO_PAUSED: GST_OBJECT_LOCK (mimenc); if (mimenc->clock_id) gst_clock_id_unschedule (mimenc->clock_id); mimenc->stop_paused_mode = TRUE; GST_OBJECT_UNLOCK (mimenc); gst_pad_pause_task (mimenc->srcpad); break; default: break; } ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition); if (ret == GST_STATE_CHANGE_FAILURE) return ret; switch (transition) { case GST_STATE_CHANGE_PAUSED_TO_PLAYING: GST_OBJECT_LOCK (mimenc); mimenc->stop_paused_mode = FALSE; paused_mode = mimenc->paused_mode; if (paused_mode) { if (!GST_ELEMENT_CLOCK (mimenc)) { GST_OBJECT_UNLOCK (mimenc); GST_ELEMENT_ERROR (mimenc, RESOURCE, FAILED, ("Using paused-mode requires a clock, but no clock was provided" " to the element"), (NULL)); return GST_STATE_CHANGE_FAILURE; } if (mimenc->last_buffer == GST_CLOCK_TIME_NONE) mimenc->last_buffer = gst_clock_get_time (GST_ELEMENT_CLOCK (mimenc)) - GST_ELEMENT_CAST (mimenc)->base_time; } GST_OBJECT_UNLOCK (mimenc); if (paused_mode) { if (!gst_pad_start_task (mimenc->srcpad, paused_mode_task, mimenc)) { ret = GST_STATE_CHANGE_FAILURE; GST_ERROR_OBJECT (mimenc, "Can not start task"); } } break; default: break; } return ret; }
static GstStateChangeReturn gst_decklink_audio_src_change_state (GstElement * element, GstStateChange transition) { GstDecklinkAudioSrc *self = GST_DECKLINK_AUDIO_SRC_CAST (element); GstStateChangeReturn ret; switch (transition) { case GST_STATE_CHANGE_NULL_TO_READY: if (!gst_decklink_audio_src_open (self)) { ret = GST_STATE_CHANGE_FAILURE; goto out; } break; case GST_STATE_CHANGE_READY_TO_PAUSED:{ GstElement *videosrc = NULL; // Check if there is a video src for this input too and if it // is actually in the same pipeline g_mutex_lock (&self->input->lock); if (self->input->videosrc) videosrc = GST_ELEMENT_CAST (gst_object_ref (self->input->videosrc)); g_mutex_unlock (&self->input->lock); if (!videosrc) { GST_ELEMENT_ERROR (self, STREAM, FAILED, (NULL), ("Audio src needs a video src for its operation")); ret = GST_STATE_CHANGE_FAILURE; goto out; } // FIXME: This causes deadlocks sometimes #if 0 else if (!in_same_pipeline (GST_ELEMENT_CAST (self), videosrc)) { GST_ELEMENT_ERROR (self, STREAM, FAILED, (NULL), ("Audio src and video src need to be in the same pipeline")); ret = GST_STATE_CHANGE_FAILURE; gst_object_unref (videosrc); goto out; } #endif if (videosrc) gst_object_unref (videosrc); self->flushing = FALSE; self->next_offset = -1; break; } default: break; } ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition); if (ret == GST_STATE_CHANGE_FAILURE) return ret; switch (transition) { case GST_STATE_CHANGE_PAUSED_TO_READY: gst_decklink_audio_src_stop (self); break; case GST_STATE_CHANGE_READY_TO_NULL: gst_decklink_audio_src_close (self); break; default: break; } out: return ret; }
static GstFlowReturn gst_udpsrc_create (GstPushSrc * psrc, GstBuffer ** buf) { GstUDPSrc *udpsrc; GstBuffer *outbuf = NULL; GSocketAddress *saddr = NULL; GSocketAddress **p_saddr; gint flags = G_SOCKET_MSG_NONE; gboolean try_again; GError *err = NULL; gssize res; gsize offset; udpsrc = GST_UDPSRC_CAST (psrc); if (!gst_udpsrc_ensure_mem (udpsrc)) goto memory_alloc_error; /* Retrieve sender address unless we've been configured not to do so */ p_saddr = (udpsrc->retrieve_sender_address) ? &saddr : NULL; retry: do { gint64 timeout; try_again = FALSE; if (udpsrc->timeout) timeout = udpsrc->timeout / 1000; else timeout = -1; GST_LOG_OBJECT (udpsrc, "doing select, timeout %" G_GINT64_FORMAT, timeout); if (!g_socket_condition_timed_wait (udpsrc->used_socket, G_IO_IN | G_IO_PRI, timeout, udpsrc->cancellable, &err)) { if (g_error_matches (err, G_IO_ERROR, G_IO_ERROR_BUSY) || g_error_matches (err, G_IO_ERROR, G_IO_ERROR_CANCELLED)) { goto stopped; } else if (g_error_matches (err, G_IO_ERROR, G_IO_ERROR_TIMED_OUT)) { g_clear_error (&err); /* timeout, post element message */ gst_element_post_message (GST_ELEMENT_CAST (udpsrc), gst_message_new_element (GST_OBJECT_CAST (udpsrc), gst_structure_new ("GstUDPSrcTimeout", "timeout", G_TYPE_UINT64, udpsrc->timeout, NULL))); } else { goto select_error; } try_again = TRUE; } } while (G_UNLIKELY (try_again)); if (saddr != NULL) { g_object_unref (saddr); saddr = NULL; } res = g_socket_receive_message (udpsrc->used_socket, p_saddr, udpsrc->vec, 2, NULL, NULL, &flags, udpsrc->cancellable, &err); if (G_UNLIKELY (res < 0)) { /* G_IO_ERROR_HOST_UNREACHABLE for a UDP socket means that a packet sent * with udpsink generated a "port unreachable" ICMP response. We ignore * that and try again. * On Windows we get G_IO_ERROR_CONNECTION_CLOSED instead */ #if GLIB_CHECK_VERSION(2,44,0) if (g_error_matches (err, G_IO_ERROR, G_IO_ERROR_HOST_UNREACHABLE) || g_error_matches (err, G_IO_ERROR, G_IO_ERROR_CONNECTION_CLOSED)) { #else if (g_error_matches (err, G_IO_ERROR, G_IO_ERROR_HOST_UNREACHABLE)) { #endif g_clear_error (&err); goto retry; } goto receive_error; } /* remember maximum packet size */ if (res > udpsrc->max_size) udpsrc->max_size = res; outbuf = gst_buffer_new (); /* append first memory chunk to buffer */ gst_buffer_append_memory (outbuf, udpsrc->mem); /* if the packet didn't fit into the first chunk, add second one as well */ if (res > udpsrc->map.size) { gst_buffer_append_memory (outbuf, udpsrc->mem_max); gst_memory_unmap (udpsrc->mem_max, &udpsrc->map_max); udpsrc->vec[1].buffer = NULL; udpsrc->vec[1].size = 0; udpsrc->mem_max = NULL; } /* make sure we allocate a new chunk next time (we do this only here because * we look at map.size to see if the second memory chunk is needed above) */ gst_memory_unmap (udpsrc->mem, &udpsrc->map); udpsrc->vec[0].buffer = NULL; udpsrc->vec[0].size = 0; udpsrc->mem = NULL; offset = udpsrc->skip_first_bytes; if (G_UNLIKELY (offset > 0 && res < offset)) goto skip_error; gst_buffer_resize (outbuf, offset, res - offset); /* use buffer metadata so receivers can also track the address */ if (saddr) { gst_buffer_add_net_address_meta (outbuf, saddr); g_object_unref (saddr); saddr = NULL; } GST_LOG_OBJECT (udpsrc, "read packet of %d bytes", (int) res); *buf = GST_BUFFER_CAST (outbuf); return GST_FLOW_OK; /* ERRORS */ memory_alloc_error: { GST_ELEMENT_ERROR (udpsrc, RESOURCE, READ, (NULL), ("Failed to allocate or map memory")); return GST_FLOW_ERROR; } select_error: { GST_ELEMENT_ERROR (udpsrc, RESOURCE, READ, (NULL), ("select error: %s", err->message)); g_clear_error (&err); return GST_FLOW_ERROR; } stopped: { GST_DEBUG ("stop called"); g_clear_error (&err); return GST_FLOW_FLUSHING; } receive_error: { if (g_error_matches (err, G_IO_ERROR, G_IO_ERROR_BUSY) || g_error_matches (err, G_IO_ERROR, G_IO_ERROR_CANCELLED)) { g_clear_error (&err); return GST_FLOW_FLUSHING; } else { GST_ELEMENT_ERROR (udpsrc, RESOURCE, READ, (NULL), ("receive error %" G_GSSIZE_FORMAT ": %s", res, err->message)); g_clear_error (&err); return GST_FLOW_ERROR; } } skip_error: { gst_buffer_unref (outbuf); GST_ELEMENT_ERROR (udpsrc, STREAM, DECODE, (NULL), ("UDP buffer to small to skip header")); return GST_FLOW_ERROR; } } static gboolean gst_udpsrc_set_uri (GstUDPSrc * src, const gchar * uri, GError ** error) { gchar *address; guint16 port; if (!gst_udp_parse_uri (uri, &address, &port)) goto wrong_uri; if (port == (guint16) - 1) port = UDP_DEFAULT_PORT; g_free (src->address); src->address = address; src->port = port; g_free (src->uri); src->uri = g_strdup (uri); return TRUE; /* ERRORS */ wrong_uri: { GST_ELEMENT_ERROR (src, RESOURCE, READ, (NULL), ("error parsing uri %s", uri)); g_set_error_literal (error, GST_URI_ERROR, GST_URI_ERROR_BAD_URI, "Could not parse UDP URI"); return FALSE; } }
static GstFlowReturn vorbis_handle_comment_packet (GstVorbisDec * vd, ogg_packet * packet) { guint bitrate = 0; gchar *encoder = NULL; GstTagList *list, *old_list; GstBuffer *buf; GST_DEBUG_OBJECT (vd, "parsing comment packet"); buf = gst_buffer_new (); GST_BUFFER_DATA (buf) = gst_ogg_packet_data (packet); GST_BUFFER_SIZE (buf) = gst_ogg_packet_size (packet); list = gst_tag_list_from_vorbiscomment_buffer (buf, (guint8 *) "\003vorbis", 7, &encoder); old_list = vd->taglist; vd->taglist = gst_tag_list_merge (vd->taglist, list, GST_TAG_MERGE_REPLACE); if (old_list) gst_tag_list_free (old_list); gst_tag_list_free (list); gst_buffer_unref (buf); if (!vd->taglist) { GST_ERROR_OBJECT (vd, "couldn't decode comments"); vd->taglist = gst_tag_list_new (); } if (encoder) { if (encoder[0]) gst_tag_list_add (vd->taglist, GST_TAG_MERGE_REPLACE, GST_TAG_ENCODER, encoder, NULL); g_free (encoder); } gst_tag_list_add (vd->taglist, GST_TAG_MERGE_REPLACE, GST_TAG_ENCODER_VERSION, vd->vi.version, GST_TAG_AUDIO_CODEC, "Vorbis", NULL); if (vd->vi.bitrate_nominal > 0 && vd->vi.bitrate_nominal <= 0x7FFFFFFF) { gst_tag_list_add (vd->taglist, GST_TAG_MERGE_REPLACE, GST_TAG_NOMINAL_BITRATE, (guint) vd->vi.bitrate_nominal, NULL); bitrate = vd->vi.bitrate_nominal; } if (vd->vi.bitrate_upper > 0 && vd->vi.bitrate_upper <= 0x7FFFFFFF) { gst_tag_list_add (vd->taglist, GST_TAG_MERGE_REPLACE, GST_TAG_MAXIMUM_BITRATE, (guint) vd->vi.bitrate_upper, NULL); if (!bitrate) bitrate = vd->vi.bitrate_upper; } if (vd->vi.bitrate_lower > 0 && vd->vi.bitrate_lower <= 0x7FFFFFFF) { gst_tag_list_add (vd->taglist, GST_TAG_MERGE_REPLACE, GST_TAG_MINIMUM_BITRATE, (guint) vd->vi.bitrate_lower, NULL); if (!bitrate) bitrate = vd->vi.bitrate_lower; } if (bitrate) { gst_tag_list_add (vd->taglist, GST_TAG_MERGE_REPLACE, GST_TAG_BITRATE, (guint) bitrate, NULL); } if (vd->initialized) { gst_element_found_tags_for_pad (GST_ELEMENT_CAST (vd), vd->srcpad, vd->taglist); vd->taglist = NULL; } else { /* Only post them as messages for the time being. * * They will be pushed on the pad once the decoder is initialized */ gst_element_post_message (GST_ELEMENT_CAST (vd), gst_message_new_tag (GST_OBJECT (vd), gst_tag_list_copy (vd->taglist))); } return GST_FLOW_OK; }
static gboolean gst_decklink_audio_src_set_caps (GstBaseSrc * bsrc, GstCaps * caps) { GstDecklinkAudioSrc *self = GST_DECKLINK_AUDIO_SRC_CAST (bsrc); BMDAudioSampleType sample_depth; GstCaps *current_caps; HRESULT ret; BMDAudioConnection conn = (BMDAudioConnection) - 1; GST_DEBUG_OBJECT (self, "Setting caps %" GST_PTR_FORMAT, caps); if ((current_caps = gst_pad_get_current_caps (GST_BASE_SRC_PAD (bsrc)))) { GstCaps *curcaps_cp; GstStructure *cur_st, *caps_st; GST_DEBUG_OBJECT (self, "Pad already has caps %" GST_PTR_FORMAT, caps); curcaps_cp = gst_caps_make_writable (current_caps); cur_st = gst_caps_get_structure (curcaps_cp, 0); caps_st = gst_caps_get_structure (caps, 0); gst_structure_remove_field (cur_st, "channel-mask"); if (!gst_structure_can_intersect (caps_st, cur_st)) { GST_ERROR_OBJECT (self, "New caps are not compatible with old caps"); gst_caps_unref (current_caps); gst_caps_unref (curcaps_cp); return FALSE; } else { gst_caps_unref (current_caps); gst_caps_unref (curcaps_cp); return TRUE; } } if (!gst_audio_info_from_caps (&self->info, caps)) return FALSE; if (self->info.finfo->format == GST_AUDIO_FORMAT_S16LE) { sample_depth = bmdAudioSampleType16bitInteger; } else { sample_depth = bmdAudioSampleType32bitInteger; } switch (self->connection) { case GST_DECKLINK_AUDIO_CONNECTION_AUTO:{ GstElement *videosrc = NULL; GstDecklinkConnectionEnum vconn; // Try to get the connection from the videosrc and try // to select a sensible audio connection based on that g_mutex_lock (&self->input->lock); if (self->input->videosrc) videosrc = GST_ELEMENT_CAST (gst_object_ref (self->input->videosrc)); g_mutex_unlock (&self->input->lock); if (videosrc) { g_object_get (videosrc, "connection", &vconn, NULL); gst_object_unref (videosrc); switch (vconn) { case GST_DECKLINK_CONNECTION_SDI: conn = bmdAudioConnectionEmbedded; break; case GST_DECKLINK_CONNECTION_HDMI: conn = bmdAudioConnectionEmbedded; break; case GST_DECKLINK_CONNECTION_OPTICAL_SDI: conn = bmdAudioConnectionEmbedded; break; case GST_DECKLINK_CONNECTION_COMPONENT: conn = bmdAudioConnectionAnalog; break; case GST_DECKLINK_CONNECTION_COMPOSITE: conn = bmdAudioConnectionAnalog; break; case GST_DECKLINK_CONNECTION_SVIDEO: conn = bmdAudioConnectionAnalog; break; default: // Use default break; } } break; } case GST_DECKLINK_AUDIO_CONNECTION_EMBEDDED: conn = bmdAudioConnectionEmbedded; break; case GST_DECKLINK_AUDIO_CONNECTION_AES_EBU: conn = bmdAudioConnectionAESEBU; break; case GST_DECKLINK_AUDIO_CONNECTION_ANALOG: conn = bmdAudioConnectionAnalog; break; case GST_DECKLINK_AUDIO_CONNECTION_ANALOG_XLR: conn = bmdAudioConnectionAnalogXLR; break; case GST_DECKLINK_AUDIO_CONNECTION_ANALOG_RCA: conn = bmdAudioConnectionAnalogRCA; break; default: g_assert_not_reached (); break; } if (conn != (BMDAudioConnection) - 1) { ret = self->input->config->SetInt (bmdDeckLinkConfigAudioInputConnection, conn); if (ret != S_OK) { GST_ERROR ("set configuration (audio input connection): 0x%08x", ret); return FALSE; } } ret = self->input->input->EnableAudioInput (bmdAudioSampleRate48kHz, sample_depth, 2); if (ret != S_OK) { GST_WARNING_OBJECT (self, "Failed to enable audio input: 0x%08x", ret); return FALSE; } g_mutex_lock (&self->input->lock); self->input->audio_enabled = TRUE; if (self->input->start_streams && self->input->videosrc) self->input->start_streams (self->input->videosrc); g_mutex_unlock (&self->input->lock); return TRUE; }
static GstFlowReturn gst_base_audio_src_create (GstBaseSrc * bsrc, guint64 offset, guint length, GstBuffer ** outbuf) { GstBaseAudioSrc *src = GST_BASE_AUDIO_SRC (bsrc); GstBuffer *buf; guchar *data; guint samples, total_samples; guint64 sample; gint bps; GstRingBuffer *ringbuffer; GstRingBufferSpec *spec; guint read; GstClockTime timestamp, duration; GstClock *clock; ringbuffer = src->ringbuffer; spec = &ringbuffer->spec; if (G_UNLIKELY (!gst_ring_buffer_is_acquired (ringbuffer))) goto wrong_state; bps = spec->bytes_per_sample; if ((length == 0 && bsrc->blocksize == 0) || length == -1) /* no length given, use the default segment size */ length = spec->segsize; else /* make sure we round down to an integral number of samples */ length -= length % bps; /* figure out the offset in the ringbuffer */ if (G_UNLIKELY (offset != -1)) { sample = offset / bps; /* if a specific offset was given it must be the next sequential * offset we expect or we fail for now. */ if (src->next_sample != -1 && sample != src->next_sample) goto wrong_offset; } else { /* calculate the sequentially next sample we need to read. This can jump and * create a DISCONT. */ sample = gst_base_audio_src_get_offset (src); } GST_DEBUG_OBJECT (src, "reading from sample %" G_GUINT64_FORMAT, sample); /* get the number of samples to read */ total_samples = samples = length / bps; /* FIXME, using a bufferpool would be nice here */ buf = gst_buffer_new_and_alloc (length); data = GST_BUFFER_DATA (buf); do { read = gst_ring_buffer_read (ringbuffer, sample, data, samples); GST_DEBUG_OBJECT (src, "read %u of %u", read, samples); /* if we read all, we're done */ if (read == samples) break; /* else something interrupted us and we wait for playing again. */ GST_DEBUG_OBJECT (src, "wait playing"); if (gst_base_src_wait_playing (bsrc) != GST_FLOW_OK) goto stopped; GST_DEBUG_OBJECT (src, "continue playing"); /* read next samples */ sample += read; samples -= read; data += read * bps; } while (TRUE); /* mark discontinuity if needed */ if (G_UNLIKELY (sample != src->next_sample) && src->next_sample != -1) { GST_WARNING_OBJECT (src, "create DISCONT of %" G_GUINT64_FORMAT " samples at sample %" G_GUINT64_FORMAT, sample - src->next_sample, sample); GST_ELEMENT_WARNING (src, CORE, CLOCK, (_("Can't record audio fast enough")), ("Dropped %" G_GUINT64_FORMAT " samples. This is most likely because " "downstream can't keep up and is consuming samples too slowly.", sample - src->next_sample)); GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DISCONT); } src->next_sample = sample + samples; /* get the normal timestamp to get the duration. */ timestamp = gst_util_uint64_scale_int (sample, GST_SECOND, spec->rate); duration = gst_util_uint64_scale_int (src->next_sample, GST_SECOND, spec->rate) - timestamp; GST_OBJECT_LOCK (src); if (!(clock = GST_ELEMENT_CLOCK (src))) goto no_sync; if (clock != src->clock) { /* we are slaved, check how to handle this */ switch (src->priv->slave_method) { case GST_BASE_AUDIO_SRC_SLAVE_RESAMPLE: /* not implemented, use skew algorithm. This algorithm should * work on the readout pointer and produces more or less samples based * on the clock drift */ case GST_BASE_AUDIO_SRC_SLAVE_SKEW: { GstClockTime running_time; GstClockTime base_time; GstClockTime current_time; guint64 running_time_sample; gint running_time_segment; gint last_read_segment; gint segment_skew; gint sps; gint segments_written; gint last_written_segment; /* get the amount of segments written from the device by now */ segments_written = g_atomic_int_get (&ringbuffer->segdone); /* subtract the base to segments_written to get the number of the last written segment in the ringbuffer (one segment written = segment 0) */ last_written_segment = segments_written - ringbuffer->segbase - 1; /* samples per segment */ sps = ringbuffer->samples_per_seg; /* get the current time */ current_time = gst_clock_get_time (clock); /* get the basetime */ base_time = GST_ELEMENT_CAST (src)->base_time; /* get the running_time */ running_time = current_time - base_time; /* the running_time converted to a sample (relative to the ringbuffer) */ running_time_sample = gst_util_uint64_scale_int (running_time, spec->rate, GST_SECOND); /* the segmentnr corrensponding to running_time, round down */ running_time_segment = running_time_sample / sps; /* the segment currently read from the ringbuffer */ last_read_segment = sample / sps; /* the skew we have between running_time and the ringbuffertime (last written to) */ segment_skew = running_time_segment - last_written_segment; GST_DEBUG_OBJECT (bsrc, "\n running_time = %" GST_TIME_FORMAT "\n timestamp = %" GST_TIME_FORMAT "\n running_time_segment = %d" "\n last_written_segment = %d" "\n segment_skew (running time segment - last_written_segment) = %d" "\n last_read_segment = %d", GST_TIME_ARGS (running_time), GST_TIME_ARGS (timestamp), running_time_segment, last_written_segment, segment_skew, last_read_segment); /* Resync the ringbuffer if: * * 1. We are more than the length of the ringbuffer behind. * The length of the ringbuffer then gets to dictate * the threshold for what is concidered "too late" * * 2. If this is our first buffer. * We know that we should catch up to running_time * the first time we are ran. */ if ((segment_skew >= ringbuffer->spec.segtotal) || (last_read_segment == 0)) { gint new_read_segment; gint segment_diff; guint64 new_sample; /* the difference between running_time and the last written segment */ segment_diff = running_time_segment - last_written_segment; /* advance the ringbuffer */ gst_ring_buffer_advance (ringbuffer, segment_diff); /* we move the new read segment to the last known written segment */ new_read_segment = g_atomic_int_get (&ringbuffer->segdone) - ringbuffer->segbase; /* we calculate the new sample value */ new_sample = ((guint64) new_read_segment) * sps; /* and get the relative time to this -> our new timestamp */ timestamp = gst_util_uint64_scale_int (new_sample, GST_SECOND, spec->rate); /* we update the next sample accordingly */ src->next_sample = new_sample + samples; GST_DEBUG_OBJECT (bsrc, "Timeshifted the ringbuffer with %d segments: " "Updating the timestamp to %" GST_TIME_FORMAT ", " "and src->next_sample to %" G_GUINT64_FORMAT, segment_diff, GST_TIME_ARGS (timestamp), src->next_sample); } break; } case GST_BASE_AUDIO_SRC_SLAVE_RETIMESTAMP: { GstClockTime base_time, latency; /* We are slaved to another clock, take running time of the pipeline clock and * timestamp against it. Somebody else in the pipeline should figure out the * clock drift. We keep the duration we calculated above. */ timestamp = gst_clock_get_time (clock); base_time = GST_ELEMENT_CAST (src)->base_time; if (GST_CLOCK_DIFF (timestamp, base_time) < 0) timestamp -= base_time; else timestamp = 0; /* subtract latency */ latency = gst_util_uint64_scale_int (total_samples, GST_SECOND, spec->rate); if (timestamp > latency) timestamp -= latency; else timestamp = 0; } case GST_BASE_AUDIO_SRC_SLAVE_NONE: break; } } else { GstClockTime base_time; /* to get the timestamp against the clock we also need to add our offset */ timestamp = gst_audio_clock_adjust (clock, timestamp); /* we are not slaved, subtract base_time */ base_time = GST_ELEMENT_CAST (src)->base_time; if (GST_CLOCK_DIFF (timestamp, base_time) < 0) { timestamp -= base_time; GST_LOG_OBJECT (src, "buffer timestamp %" GST_TIME_FORMAT " (base_time %" GST_TIME_FORMAT ")", GST_TIME_ARGS (timestamp), GST_TIME_ARGS (base_time)); } else { GST_LOG_OBJECT (src, "buffer timestamp 0, ts %" GST_TIME_FORMAT " <= base_time %" GST_TIME_FORMAT, GST_TIME_ARGS (timestamp), GST_TIME_ARGS (base_time)); timestamp = 0; } } no_sync: GST_OBJECT_UNLOCK (src); GST_BUFFER_TIMESTAMP (buf) = timestamp; GST_BUFFER_DURATION (buf) = duration; GST_BUFFER_OFFSET (buf) = sample; GST_BUFFER_OFFSET_END (buf) = sample + samples; *outbuf = buf; return GST_FLOW_OK; /* ERRORS */ wrong_state: { GST_DEBUG_OBJECT (src, "ringbuffer in wrong state"); return GST_FLOW_WRONG_STATE; } wrong_offset: { GST_ELEMENT_ERROR (src, RESOURCE, SEEK, (NULL), ("resource can only be operated on sequentially but offset %" G_GUINT64_FORMAT " was given", offset)); return GST_FLOW_ERROR; } stopped: { gst_buffer_unref (buf); GST_DEBUG_OBJECT (src, "ringbuffer stopped"); return GST_FLOW_WRONG_STATE; } }
/* this internal thread does nothing else but write samples to the audio device. * It will write each segment in the ringbuffer and will update the play * pointer. * The start/stop methods control the thread. */ static void audioringbuffer_thread_func (GstAudioRingBuffer * buf) { GstAudioSink *sink; GstAudioSinkClass *csink; GstAudioSinkRingBuffer *abuf = GST_AUDIO_SINK_RING_BUFFER_CAST (buf); WriteFunc writefunc; GstMessage *message; GValue val = { 0 }; sink = GST_AUDIO_SINK (GST_OBJECT_PARENT (buf)); csink = GST_AUDIO_SINK_GET_CLASS (sink); GST_DEBUG_OBJECT (sink, "enter thread"); GST_OBJECT_LOCK (abuf); GST_DEBUG_OBJECT (sink, "signal wait"); GST_AUDIO_SINK_RING_BUFFER_SIGNAL (buf); GST_OBJECT_UNLOCK (abuf); writefunc = csink->write; if (writefunc == NULL) goto no_function; g_value_init (&val, G_TYPE_POINTER); g_value_set_pointer (&val, sink->thread); message = gst_message_new_stream_status (GST_OBJECT_CAST (buf), GST_STREAM_STATUS_TYPE_ENTER, GST_ELEMENT_CAST (sink)); gst_message_set_stream_status_object (message, &val); GST_DEBUG_OBJECT (sink, "posting ENTER stream status"); gst_element_post_message (GST_ELEMENT_CAST (sink), message); while (TRUE) { gint left, len; guint8 *readptr; gint readseg; /* buffer must be started */ if (gst_audio_ring_buffer_prepare_read (buf, &readseg, &readptr, &len)) { gint written; left = len; do { written = writefunc (sink, readptr, left); GST_LOG_OBJECT (sink, "transfered %d bytes of %d from segment %d", written, left, readseg); if (written < 0 || written > left) { /* might not be critical, it e.g. happens when aborting playback */ GST_WARNING_OBJECT (sink, "error writing data in %s (reason: %s), skipping segment (left: %d, written: %d)", GST_DEBUG_FUNCPTR_NAME (writefunc), (errno > 1 ? g_strerror (errno) : "unknown"), left, written); break; } left -= written; readptr += written; } while (left > 0); /* clear written samples */ gst_audio_ring_buffer_clear (buf, readseg); /* we wrote one segment */ gst_audio_ring_buffer_advance (buf, 1); } else { GST_OBJECT_LOCK (abuf); if (!abuf->running) goto stop_running; if (G_UNLIKELY (g_atomic_int_get (&buf->state) == GST_AUDIO_RING_BUFFER_STATE_STARTED)) { GST_OBJECT_UNLOCK (abuf); continue; } GST_DEBUG_OBJECT (sink, "signal wait"); GST_AUDIO_SINK_RING_BUFFER_SIGNAL (buf); GST_DEBUG_OBJECT (sink, "wait for action"); GST_AUDIO_SINK_RING_BUFFER_WAIT (buf); GST_DEBUG_OBJECT (sink, "got signal"); if (!abuf->running) goto stop_running; GST_DEBUG_OBJECT (sink, "continue running"); GST_OBJECT_UNLOCK (abuf); } } /* Will never be reached */ g_assert_not_reached (); return; /* ERROR */ no_function: { GST_DEBUG_OBJECT (sink, "no write function, exit thread"); return; } stop_running: { GST_OBJECT_UNLOCK (abuf); GST_DEBUG_OBJECT (sink, "stop running, exit thread"); message = gst_message_new_stream_status (GST_OBJECT_CAST (buf), GST_STREAM_STATUS_TYPE_LEAVE, GST_ELEMENT_CAST (sink)); gst_message_set_stream_status_object (message, &val); GST_DEBUG_OBJECT (sink, "posting LEAVE stream status"); gst_element_post_message (GST_ELEMENT_CAST (sink), message); return; } }
static gboolean gst_mms_start (GstBaseSrc * bsrc) { GstMMS *mms = GST_MMS (bsrc); guint bandwidth_avail; if (!mms->uri_name || *mms->uri_name == '\0') goto no_uri; if (mms->connection_speed) bandwidth_avail = mms->connection_speed; else bandwidth_avail = G_MAXINT; /* If we already have a connection, and the uri isn't changed, reuse it, as connecting is expensive. */ if (mms->connection) { if (!strcmp (mms->uri_name, mms->current_connection_uri_name)) { GST_DEBUG_OBJECT (mms, "Reusing existing connection for %s", mms->uri_name); return TRUE; } else { mmsx_close (mms->connection); g_free (mms->current_connection_uri_name); mms->current_connection_uri_name = NULL; } } /* FIXME: pass some sane arguments here */ GST_DEBUG_OBJECT (mms, "Trying mms_connect (%s) with bandwidth constraint of %d bps", mms->uri_name, bandwidth_avail); mms->connection = mmsx_connect (NULL, NULL, mms->uri_name, bandwidth_avail); if (mms->connection) { /* Save the uri name so that it can be checked for connection reusing, see above. */ mms->current_connection_uri_name = g_strdup (mms->uri_name); GST_DEBUG_OBJECT (mms, "Connect successful"); return TRUE; } else { gchar *url, *location; GST_ERROR_OBJECT (mms, "Could not connect to this stream, redirecting to rtsp"); location = strstr (mms->uri_name, "://"); if (location == NULL || *location == '\0' || *(location + 3) == '\0') goto no_uri; url = g_strdup_printf ("rtsp://%s", location + 3); gst_element_post_message (GST_ELEMENT_CAST (mms), gst_message_new_element (GST_OBJECT_CAST (mms), gst_structure_new ("redirect", "new-location", G_TYPE_STRING, url, NULL))); /* post an error message as well, so that applications that don't handle * redirect messages get to see a proper error message */ GST_ELEMENT_ERROR (mms, RESOURCE, OPEN_READ, ("Could not connect to streaming server."), ("A redirect message was posted on the bus and should have been " "handled by the application.")); return FALSE; } no_uri: { GST_ELEMENT_ERROR (mms, RESOURCE, OPEN_READ, ("No URI to open specified"), (NULL)); return FALSE; } }
static GstFlowReturn gst_dvbsrc_read_device (GstDvbSrc * object, int size, GstBuffer ** buffer) { gint count = 0; gint ret_val = 0; GstBuffer *buf = gst_buffer_new_and_alloc (size); GstClockTime timeout = object->timeout * GST_USECOND; GstMapInfo map; g_return_val_if_fail (GST_IS_BUFFER (buf), GST_FLOW_ERROR); if (object->fd_dvr < 0) return GST_FLOW_ERROR; gst_buffer_map (buf, &map, GST_MAP_WRITE); while (count < size) { ret_val = gst_poll_wait (object->poll, timeout); GST_LOG_OBJECT (object, "select returned %d", ret_val); if (G_UNLIKELY (ret_val < 0)) { if (errno == EBUSY) goto stopped; else goto select_error; } else if (G_UNLIKELY (ret_val == 0)) { /* timeout, post element message */ gst_element_post_message (GST_ELEMENT_CAST (object), gst_message_new_element (GST_OBJECT (object), gst_structure_new_empty ("dvb-read-failure"))); } else { int nread = read (object->fd_dvr, map.data + count, size - count); if (G_UNLIKELY (nread < 0)) { GST_WARNING_OBJECT (object, "Unable to read from device: /dev/dvb/adapter%d/dvr%d (%d)", object->adapter_number, object->frontend_number, errno); gst_element_post_message (GST_ELEMENT_CAST (object), gst_message_new_element (GST_OBJECT (object), gst_structure_new_empty ("dvb-read-failure"))); } else count = count + nread; } } gst_buffer_unmap (buf, &map); gst_buffer_resize (buf, 0, count); *buffer = buf; return GST_FLOW_OK; stopped: { GST_DEBUG_OBJECT (object, "stop called"); gst_buffer_unmap (buf, &map); gst_buffer_unref (buf); return GST_FLOW_FLUSHING; } select_error: { GST_ELEMENT_ERROR (object, RESOURCE, READ, (NULL), ("select error %d: %s (%d)", ret_val, g_strerror (errno), errno)); gst_buffer_unmap (buf, &map); gst_buffer_unref (buf); return GST_FLOW_ERROR; } }
static gboolean gst_dvbsrc_open_frontend (GstDvbSrc * object, gboolean writable) { struct dvb_frontend_info fe_info; const char *adapter_desc = NULL; gchar *frontend_dev; GstStructure *adapter_structure; char *adapter_name = NULL; frontend_dev = g_strdup_printf ("/dev/dvb/adapter%d/frontend%d", object->adapter_number, object->frontend_number); GST_INFO_OBJECT (object, "Using frontend device: %s", frontend_dev); /* open frontend */ if ((object->fd_frontend = open (frontend_dev, writable ? O_RDWR : O_RDONLY)) < 0) { switch (errno) { case ENOENT: GST_ELEMENT_ERROR (object, RESOURCE, NOT_FOUND, (_("Device \"%s\" does not exist."), frontend_dev), (NULL)); break; default: GST_ELEMENT_ERROR (object, RESOURCE, OPEN_READ_WRITE, (_("Could not open frontend device \"%s\"."), frontend_dev), GST_ERROR_SYSTEM); break; } close (object->fd_frontend); g_free (frontend_dev); return FALSE; } GST_DEBUG_OBJECT (object, "Device opened, querying information"); if (ioctl (object->fd_frontend, FE_GET_INFO, &fe_info) < 0) { GST_ELEMENT_ERROR (object, RESOURCE, SETTINGS, (_("Could not get settings from frontend device \"%s\"."), frontend_dev), GST_ERROR_SYSTEM); close (object->fd_frontend); g_free (frontend_dev); return FALSE; } GST_DEBUG_OBJECT (object, "Got information about adapter : %s", fe_info.name); adapter_name = g_strdup (fe_info.name); object->adapter_type = fe_info.type; switch (object->adapter_type) { case FE_QPSK: adapter_desc = "DVB-S"; adapter_structure = gst_structure_new ("dvb-adapter", "type", G_TYPE_STRING, adapter_desc, "name", G_TYPE_STRING, adapter_name, "auto-fec", G_TYPE_BOOLEAN, fe_info.caps & FE_CAN_FEC_AUTO, NULL); break; case FE_QAM: adapter_desc = "DVB-C"; adapter_structure = gst_structure_new ("dvb-adapter", "type", G_TYPE_STRING, adapter_desc, "name", G_TYPE_STRING, adapter_name, "auto-inversion", G_TYPE_BOOLEAN, fe_info.caps & FE_CAN_INVERSION_AUTO, "auto-qam", G_TYPE_BOOLEAN, fe_info.caps & FE_CAN_QAM_AUTO, "auto-fec", G_TYPE_BOOLEAN, fe_info.caps & FE_CAN_FEC_AUTO, NULL); break; case FE_OFDM: adapter_desc = "DVB-T"; adapter_structure = gst_structure_new ("dvb-adapter", "type", G_TYPE_STRING, adapter_desc, "name", G_TYPE_STRING, adapter_name, "auto-inversion", G_TYPE_BOOLEAN, fe_info.caps & FE_CAN_INVERSION_AUTO, "auto-qam", G_TYPE_BOOLEAN, fe_info.caps & FE_CAN_QAM_AUTO, "auto-transmission-mode", G_TYPE_BOOLEAN, fe_info.caps & FE_CAN_TRANSMISSION_MODE_AUTO, "auto-guard-interval", G_TYPE_BOOLEAN, fe_info.caps & FE_CAN_GUARD_INTERVAL_AUTO, "auto-hierarchy", G_TYPE_BOOLEAN, fe_info.caps % FE_CAN_HIERARCHY_AUTO, "auto-fec", G_TYPE_BOOLEAN, fe_info.caps & FE_CAN_FEC_AUTO, NULL); break; case FE_ATSC: adapter_desc = "ATSC"; adapter_structure = gst_structure_new ("dvb-adapter", "type", G_TYPE_STRING, adapter_desc, "name", G_TYPE_STRING, adapter_name, NULL); break; default: g_error ("Unknown frontend type: %d", object->adapter_type); adapter_structure = gst_structure_new ("dvb-adapter", "type", G_TYPE_STRING, "unknown", NULL); } GST_INFO_OBJECT (object, "DVB card: %s ", adapter_name); gst_element_post_message (GST_ELEMENT_CAST (object), gst_message_new_element (GST_OBJECT (object), adapter_structure)); g_free (frontend_dev); g_free (adapter_name); return TRUE; }