static gboolean gst_gdk_pixbuf_overlay_load_image (GstGdkPixbufOverlay * overlay, GError ** err) { GstVideoMeta *video_meta; GdkPixbuf *pixbuf; guint8 *pixels, *p; gint width, height, stride, w, h, plane; pixbuf = gdk_pixbuf_new_from_file (overlay->location, err); if (pixbuf == NULL) return FALSE; if (!gdk_pixbuf_get_has_alpha (pixbuf)) { GdkPixbuf *alpha_pixbuf; /* FIXME: we could do this much more efficiently ourselves below, but * we're lazy for now */ /* FIXME: perhaps expose substitute_color via properties */ alpha_pixbuf = gdk_pixbuf_add_alpha (pixbuf, FALSE, 0, 0, 0); g_object_unref (pixbuf); pixbuf = alpha_pixbuf; } width = gdk_pixbuf_get_width (pixbuf); height = gdk_pixbuf_get_height (pixbuf); stride = gdk_pixbuf_get_rowstride (pixbuf); pixels = gdk_pixbuf_get_pixels (pixbuf); /* the memory layout in GdkPixbuf is R-G-B-A, we want: * - B-G-R-A on little-endian platforms * - A-R-G-B on big-endian platforms */ for (h = 0; h < height; ++h) { p = pixels + (h * stride); for (w = 0; w < width; ++w) { guint8 tmp; /* R-G-B-A ==> B-G-R-A */ tmp = p[0]; p[0] = p[2]; p[2] = tmp; if (G_BYTE_ORDER == G_BIG_ENDIAN) { /* B-G-R-A ==> A-R-G-B */ /* we can probably assume sane alignment */ *((guint32 *) p) = GUINT32_SWAP_LE_BE (*((guint32 *) p)); } p += 4; } } /* assume we have row padding even for the last row */ /* transfer ownership of pixbuf to the buffer */ overlay->pixels = gst_buffer_new_wrapped_full (GST_MEMORY_FLAG_READONLY, pixels, height * stride, 0, height * stride, pixbuf, (GDestroyNotify) g_object_unref); video_meta = gst_buffer_add_video_meta (overlay->pixels, GST_VIDEO_FRAME_FLAG_NONE, GST_VIDEO_OVERLAY_COMPOSITION_FORMAT_RGB, width, height); for (plane = 0; plane < video_meta->n_planes; ++plane) video_meta->stride[plane] = stride; overlay->update_composition = TRUE; GST_INFO_OBJECT (overlay, "Loaded image, %d x %d", width, height); return TRUE; }
static gint gst_imx_v4l2src_capture_setup(GstImxV4l2VideoSrc *v4l2src) { struct v4l2_format fmt = {0}; struct v4l2_streamparm parm = {0}; struct v4l2_frmsizeenum fszenum = {0}; v4l2_std_id id; gint input; gint fd_v4l; fd_v4l = open(v4l2src->devicename, O_RDWR, 0); if (fd_v4l < 0) { GST_ERROR_OBJECT(v4l2src, "Unable to open %s", v4l2src->devicename); return -1; } if (ioctl (fd_v4l, VIDIOC_G_STD, &id) < 0) { GST_WARNING_OBJECT(v4l2src, "VIDIOC_G_STD failed: %s", strerror(errno)); } else { if (ioctl (fd_v4l, VIDIOC_S_STD, &id) < 0) { GST_ERROR_OBJECT(v4l2src, "VIDIOC_S_STD failed"); close(fd_v4l); return -1; } } fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; if (ioctl(fd_v4l, VIDIOC_G_FMT, &fmt) < 0) { GST_ERROR_OBJECT(v4l2src, "VIDIOC_G_FMT failed"); close(fd_v4l); return -1; } GST_DEBUG_OBJECT(v4l2src, "pixelformat = %d field = %d", fmt.fmt.pix.pixelformat, fmt.fmt.pix.field); fszenum.index = v4l2src->capture_mode; fszenum.pixel_format = fmt.fmt.pix.pixelformat; if (ioctl(fd_v4l, VIDIOC_ENUM_FRAMESIZES, &fszenum) < 0) { GST_ERROR_OBJECT(v4l2src, "VIDIOC_ENUM_FRAMESIZES failed: %s", strerror(errno)); close(fd_v4l); return -1; } v4l2src->capture_width = fszenum.discrete.width; v4l2src->capture_height = fszenum.discrete.height; GST_INFO_OBJECT(v4l2src, "capture mode %d: %dx%d", v4l2src->capture_mode, v4l2src->capture_width, v4l2src->capture_height); input = v4l2src->input; if (ioctl(fd_v4l, VIDIOC_S_INPUT, &input) < 0) { GST_ERROR_OBJECT(v4l2src, "VIDIOC_S_INPUT failed: %s", strerror(errno)); close(fd_v4l); return -1; } parm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; parm.parm.capture.timeperframe.numerator = v4l2src->fps_d; parm.parm.capture.timeperframe.denominator = v4l2src->fps_n; parm.parm.capture.capturemode = v4l2src->capture_mode; if (ioctl(fd_v4l, VIDIOC_S_PARM, &parm) < 0) { GST_ERROR_OBJECT(v4l2src, "VIDIOC_S_PARM failed: %s", strerror(errno)); close(fd_v4l); return -1; } /* Get the actual frame period if possible */ if (parm.parm.capture.capability & V4L2_CAP_TIMEPERFRAME) { v4l2src->fps_n = parm.parm.capture.timeperframe.denominator; v4l2src->fps_d = parm.parm.capture.timeperframe.numerator; } fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; fmt.fmt.pix.bytesperline = 0; fmt.fmt.pix.priv = 0; fmt.fmt.pix.sizeimage = 0; fmt.fmt.pix.width = v4l2src->capture_width; fmt.fmt.pix.height = v4l2src->capture_height; if (ioctl(fd_v4l, VIDIOC_S_FMT, &fmt) < 0) { GST_ERROR_OBJECT(v4l2src, "VIDIOC_S_FMT failed: %s", strerror(errno)); close(fd_v4l); return -1; } return fd_v4l; }
/* Iterate the input sink pads, and choose the blend format * we will generate before output conversion, which is RGBA * at some suitable size */ static void gst_gl_stereo_mix_find_best_format (GstVideoAggregator * vagg, GstCaps * downstream_caps, GstVideoInfo * best_info, gboolean * at_least_one_alpha) { GstGLStereoMix *mix = GST_GL_STEREO_MIX (vagg); GList *l; gint best_width = -1, best_height = -1; gdouble best_fps = -1, cur_fps; gint best_fps_n = 0, best_fps_d = 1; GstVideoInfo *mix_info; GstCaps *blend_caps, *tmp_caps; /* We'll deal with alpha internally, so just tell aggregator to * be quiet */ *at_least_one_alpha = FALSE; GST_OBJECT_LOCK (vagg); for (l = GST_ELEMENT (vagg)->sinkpads; l; l = l->next) { GstVideoAggregatorPad *pad = l->data; GstVideoInfo tmp = pad->info; gint this_width, this_height; gint fps_n, fps_d; if (!pad->info.finfo) continue; /* This can happen if we release a pad and another pad hasn't been negotiated_caps yet */ if (GST_VIDEO_INFO_FORMAT (&pad->info) == GST_VIDEO_FORMAT_UNKNOWN) continue; /* Convert to per-view width/height for unpacked forms */ gst_video_multiview_video_info_change_mode (&tmp, GST_VIDEO_MULTIVIEW_MODE_SEPARATED, GST_VIDEO_MULTIVIEW_FLAGS_NONE); this_width = GST_VIDEO_INFO_WIDTH (&tmp); this_height = GST_VIDEO_INFO_HEIGHT (&tmp); fps_n = GST_VIDEO_INFO_FPS_N (&tmp); fps_d = GST_VIDEO_INFO_FPS_D (&tmp); GST_INFO_OBJECT (vagg, "Input pad %" GST_PTR_FORMAT " w %u h %u", pad, this_width, this_height); if (this_width == 0 || this_height == 0) continue; if (best_width < this_width) best_width = this_width; if (best_height < this_height) best_height = this_height; if (fps_d == 0) cur_fps = 0.0; else gst_util_fraction_to_double (fps_n, fps_d, &cur_fps); if (best_fps < cur_fps) { best_fps = cur_fps; best_fps_n = fps_n; best_fps_d = fps_d; } /* FIXME: Preserve PAR for at least one input when different sized inputs */ } GST_OBJECT_UNLOCK (vagg); mix_info = &mix->mix_info; gst_video_info_set_format (mix_info, GST_VIDEO_FORMAT_RGBA, best_width, best_height); GST_VIDEO_INFO_FPS_N (mix_info) = best_fps_n; GST_VIDEO_INFO_FPS_D (mix_info) = best_fps_d; GST_VIDEO_INFO_MULTIVIEW_MODE (mix_info) = GST_VIDEO_MULTIVIEW_MODE_SEPARATED; GST_VIDEO_INFO_VIEWS (mix_info) = 2; /* FIXME: If input is marked as flipped or flopped, preserve those flags */ GST_VIDEO_INFO_MULTIVIEW_FLAGS (mix_info) = GST_VIDEO_MULTIVIEW_FLAGS_NONE; /* Choose our output format based on downstream preferences */ blend_caps = gst_video_info_to_caps (mix_info); gst_caps_set_features (blend_caps, 0, gst_caps_features_from_string (GST_CAPS_FEATURE_MEMORY_GL_MEMORY)); tmp_caps = get_converted_caps (GST_GL_STEREO_MIX (vagg), blend_caps); gst_caps_unref (blend_caps); if (mix->out_caps) gst_caps_unref (mix->out_caps); mix->out_caps = gst_caps_intersect (downstream_caps, tmp_caps); gst_caps_unref (tmp_caps); GST_DEBUG_OBJECT (vagg, "Possible output caps %" GST_PTR_FORMAT, mix->out_caps); /* Tell videoaggregator our preferred size. Actual info gets * overridden during caps nego */ *best_info = *mix_info; }
static gboolean gst_hls_demux_change_playlist (GstHLSDemux * demux, guint max_bitrate, gboolean * changed) { GList *previous_variant, *current_variant; gint old_bandwidth, new_bandwidth; GstAdaptiveDemux *adaptive_demux = GST_ADAPTIVE_DEMUX_CAST (demux); GstAdaptiveDemuxStream *stream; g_return_val_if_fail (adaptive_demux->streams != NULL, FALSE); stream = adaptive_demux->streams->data; previous_variant = demux->client->main->current_variant; current_variant = gst_m3u8_client_get_playlist_for_bitrate (demux->client, max_bitrate); GST_M3U8_CLIENT_LOCK (demux->client); retry_failover_protection: old_bandwidth = GST_M3U8 (previous_variant->data)->bandwidth; new_bandwidth = GST_M3U8 (current_variant->data)->bandwidth; /* Don't do anything else if the playlist is the same */ if (new_bandwidth == old_bandwidth) { GST_M3U8_CLIENT_UNLOCK (demux->client); return TRUE; } demux->client->main->current_variant = current_variant; GST_M3U8_CLIENT_UNLOCK (demux->client); gst_m3u8_client_set_current (demux->client, current_variant->data); GST_INFO_OBJECT (demux, "Client was on %dbps, max allowed is %dbps, switching" " to bitrate %dbps", old_bandwidth, max_bitrate, new_bandwidth); stream->discont = TRUE; if (gst_hls_demux_update_playlist (demux, FALSE, NULL)) { gchar *uri; gchar *main_uri; uri = gst_m3u8_client_get_current_uri (demux->client); main_uri = gst_m3u8_client_get_uri (demux->client); gst_element_post_message (GST_ELEMENT_CAST (demux), gst_message_new_element (GST_OBJECT_CAST (demux), gst_structure_new (GST_ADAPTIVE_DEMUX_STATISTICS_MESSAGE_NAME, "manifest-uri", G_TYPE_STRING, main_uri, "uri", G_TYPE_STRING, uri, "bitrate", G_TYPE_INT, new_bandwidth, NULL))); g_free (uri); g_free (main_uri); if (changed) *changed = TRUE; } else { GList *failover = NULL; GST_INFO_OBJECT (demux, "Unable to update playlist. Switching back"); GST_M3U8_CLIENT_LOCK (demux->client); failover = g_list_previous (current_variant); if (failover && new_bandwidth == GST_M3U8 (failover->data)->bandwidth) { current_variant = failover; goto retry_failover_protection; } demux->client->main->current_variant = previous_variant; GST_M3U8_CLIENT_UNLOCK (demux->client); gst_m3u8_client_set_current (demux->client, previous_variant->data); /* Try a lower bitrate (or stop if we just tried the lowest) */ if (GST_M3U8 (previous_variant->data)->iframe && new_bandwidth == GST_M3U8 (g_list_first (demux->client->main->iframe_lists)->data)-> bandwidth) return FALSE; else if (!GST_M3U8 (previous_variant->data)->iframe && new_bandwidth == GST_M3U8 (g_list_first (demux->client->main->lists)->data)->bandwidth) return FALSE; else return gst_hls_demux_change_playlist (demux, new_bandwidth - 1, changed); } /* Force typefinding since we might have changed media type */ demux->do_typefind = TRUE; return TRUE; }
/* load the presets of @name for the instance @preset. Returns %FALSE if something * failed. */ static gboolean gst_preset_default_load_preset (GstPreset * preset, const gchar * name) { GKeyFile *presets; gchar **props; guint i; GObjectClass *gclass; /* get the presets from the type */ if (!(presets = preset_get_keyfile (preset))) goto no_presets; /* get the preset name */ if (!g_key_file_has_group (presets, name)) goto no_group; GST_DEBUG_OBJECT (preset, "loading preset : '%s'", name); /* get the properties that we can configure in this element */ if (!(props = gst_preset_get_property_names (preset))) goto no_properties; gclass = G_OBJECT_CLASS (GST_ELEMENT_GET_CLASS (preset)); /* for each of the property names, find the preset parameter and try to * configure the property with its value */ for (i = 0; props[i]; i++) { gchar *str; GValue gvalue = { 0, }; GParamSpec *property; /* check if we have a settings for this element property */ if (!(str = g_key_file_get_value (presets, name, props[i], NULL))) { /* the element has a property but the parameter is not in the keyfile */ GST_WARNING_OBJECT (preset, "parameter '%s' not in preset", props[i]); continue; } GST_DEBUG_OBJECT (preset, "setting value '%s' for property '%s'", str, props[i]); /* FIXME, change for childproxy to get the property and element. */ if (!(property = g_object_class_find_property (gclass, props[i]))) { /* the parameter was in the keyfile, the element said it supported it but * then the property was not found in the element. This should not happen. */ GST_WARNING_OBJECT (preset, "property '%s' not in object", props[i]); g_free (str); continue; } /* try to deserialize the property value from the keyfile and set it as * the object property */ g_value_init (&gvalue, property->value_type); if (gst_value_deserialize (&gvalue, str)) { /* FIXME, change for childproxy support */ g_object_set_property (G_OBJECT (preset), props[i], &gvalue); } else { GST_WARNING_OBJECT (preset, "deserialization of value '%s' for property '%s' failed", str, props[i]); } g_value_unset (&gvalue); g_free (str); } g_strfreev (props); return TRUE; /* ERRORS */ no_presets: { GST_WARNING_OBJECT (preset, "no presets"); return FALSE; } no_group: { GST_WARNING_OBJECT (preset, "no preset named '%s'", name); return FALSE; } no_properties: { GST_INFO_OBJECT (preset, "no properties"); return FALSE; } }
static GstFlowReturn gst_decklink_audio_src_create (GstPushSrc * bsrc, GstBuffer ** buffer) { GstDecklinkAudioSrc *self = GST_DECKLINK_AUDIO_SRC_CAST (bsrc); GstFlowReturn flow_ret = GST_FLOW_OK; const guint8 *data; glong sample_count; gsize data_size; CapturePacket *p; AudioPacket *ap; GstClockTime timestamp, duration; GstClockTime start_time, end_time; guint64 start_offset, end_offset; gboolean discont = FALSE; retry: g_mutex_lock (&self->lock); while (g_queue_is_empty (&self->current_packets) && !self->flushing) { g_cond_wait (&self->cond, &self->lock); } p = (CapturePacket *) g_queue_pop_head (&self->current_packets); g_mutex_unlock (&self->lock); if (self->flushing) { if (p) capture_packet_free (p); GST_DEBUG_OBJECT (self, "Flushing"); return GST_FLOW_FLUSHING; } p->packet->GetBytes ((gpointer *) & data); sample_count = p->packet->GetSampleFrameCount (); data_size = self->info.bpf * sample_count; if (p->capture_time == GST_CLOCK_TIME_NONE && self->next_offset == (guint64) - 1) { GST_DEBUG_OBJECT (self, "Got packet without timestamp before initial " "timestamp after discont - dropping"); capture_packet_free (p); goto retry; } ap = (AudioPacket *) g_malloc0 (sizeof (AudioPacket)); *buffer = gst_buffer_new_wrapped_full ((GstMemoryFlags) GST_MEMORY_FLAG_READONLY, (gpointer) data, data_size, 0, data_size, ap, (GDestroyNotify) audio_packet_free); ap->packet = p->packet; p->packet->AddRef (); ap->input = self->input->input; ap->input->AddRef (); timestamp = p->capture_time; discont = p->discont; // Jitter and discontinuity handling, based on audiobasesrc start_time = timestamp; // Convert to the sample numbers start_offset = gst_util_uint64_scale (start_time, self->info.rate, GST_SECOND); end_offset = start_offset + sample_count; end_time = gst_util_uint64_scale_int (end_offset, GST_SECOND, self->info.rate); duration = end_time - start_time; if (discont || self->next_offset == (guint64) - 1) { discont = TRUE; } else { guint64 diff, max_sample_diff; // Check discont if (start_offset <= self->next_offset) diff = self->next_offset - start_offset; else diff = start_offset - self->next_offset; max_sample_diff = gst_util_uint64_scale_int (self->alignment_threshold, self->info.rate, GST_SECOND); // Discont! if (G_UNLIKELY (diff >= max_sample_diff)) { if (self->discont_wait > 0) { if (self->discont_time == GST_CLOCK_TIME_NONE) { self->discont_time = start_time; } else if (start_time - self->discont_time >= self->discont_wait) { discont = TRUE; self->discont_time = GST_CLOCK_TIME_NONE; } } else { discont = TRUE; } } else if (G_UNLIKELY (self->discont_time != GST_CLOCK_TIME_NONE)) { // we have had a discont, but are now back on track! self->discont_time = GST_CLOCK_TIME_NONE; } } if (discont) { // Have discont, need resync and use the capture timestamps if (self->next_offset != (guint64) - 1) GST_INFO_OBJECT (self, "Have discont. Expected %" G_GUINT64_FORMAT ", got %" G_GUINT64_FORMAT, self->next_offset, start_offset); GST_BUFFER_FLAG_SET (*buffer, GST_BUFFER_FLAG_DISCONT); self->next_offset = end_offset; // Got a discont and adjusted, reset the discont_time marker. self->discont_time = GST_CLOCK_TIME_NONE; } else { // No discont, just keep counting timestamp = gst_util_uint64_scale (self->next_offset, GST_SECOND, self->info.rate); self->next_offset += sample_count; duration = gst_util_uint64_scale (self->next_offset, GST_SECOND, self->info.rate) - timestamp; } GST_BUFFER_TIMESTAMP (*buffer) = timestamp; GST_BUFFER_DURATION (*buffer) = duration; GST_DEBUG_OBJECT (self, "Outputting buffer %p with timestamp %" GST_TIME_FORMAT " and duration %" GST_TIME_FORMAT, *buffer, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (*buffer)), GST_TIME_ARGS (GST_BUFFER_DURATION (*buffer))); capture_packet_free (p); return flow_ret; }
static GstCaps * gst_alsasink_getcaps (GstBaseSink * bsink, GstCaps * filter) { GstElementClass *element_class; GstPadTemplate *pad_template; GstAlsaSink *sink = GST_ALSA_SINK (bsink); GstCaps *caps, *templ_caps; GST_OBJECT_LOCK (sink); if (sink->handle == NULL) { GST_OBJECT_UNLOCK (sink); GST_DEBUG_OBJECT (sink, "device not open, using template caps"); return NULL; /* base class will get template caps for us */ } if (sink->cached_caps) { if (filter) { caps = gst_caps_intersect_full (filter, sink->cached_caps, GST_CAPS_INTERSECT_FIRST); GST_OBJECT_UNLOCK (sink); GST_LOG_OBJECT (sink, "Returning cached caps %" GST_PTR_FORMAT " with " "filter %" GST_PTR_FORMAT " applied: %" GST_PTR_FORMAT, sink->cached_caps, filter, caps); return caps; } else { caps = gst_caps_ref (sink->cached_caps); GST_OBJECT_UNLOCK (sink); GST_LOG_OBJECT (sink, "Returning cached caps %" GST_PTR_FORMAT, caps); return caps; } } element_class = GST_ELEMENT_GET_CLASS (sink); pad_template = gst_element_class_get_pad_template (element_class, "sink"); if (pad_template == NULL) { GST_OBJECT_UNLOCK (sink); g_assert_not_reached (); return NULL; } templ_caps = gst_pad_template_get_caps (pad_template); caps = gst_alsa_probe_supported_formats (GST_OBJECT (sink), sink->device, sink->handle, templ_caps); gst_caps_unref (templ_caps); if (caps) { sink->cached_caps = gst_caps_ref (caps); } GST_OBJECT_UNLOCK (sink); GST_INFO_OBJECT (sink, "returning caps %" GST_PTR_FORMAT, caps); if (filter) { GstCaps *intersection; intersection = gst_caps_intersect_full (filter, caps, GST_CAPS_INTERSECT_FIRST); gst_caps_unref (caps); return intersection; } else { return caps; } }
static gboolean gst_ss_demux_download_bus_cb(GstBus *bus, GstMessage *msg, gpointer data) { GstSSDemuxStream *stream = (GstSSDemuxStream *)data; GstSSDemux *demux = stream->parent; switch (GST_MESSAGE_TYPE(msg)) { case GST_MESSAGE_EOS: { guint64 download_rate = -1; GST_INFO_OBJECT (stream->pad, "received EOS on download pipe.."); // increase the fragment count on EOS stream->frag_cnt++; if (g_strrstr (gst_element_get_name(stream->urisrc), "http")) { g_object_get (stream->urisrc, "download-rate", &download_rate, NULL); g_print("*********** '%s' download rate = %d bps **************\n", stream->name, download_rate); } // TODO: need to remove download_rate> 0 check.. make it generic if ((stream->type == SS_STREAM_VIDEO) && (demux->ss_mode != SS_MODE_AONLY) && (download_rate >= 0)) { if (stream->frag_cnt >= demux->fragments_cache) { /* for switching, we are considering video download rate only */ demux->ss_mode = gst_ssm_parse_switch_qualitylevel (demux->parser, download_rate); } } else if (stream->type == SS_STREAM_AUDIO && (demux->ss_mode == SS_MODE_AONLY)) { /* when video is not present using audio download rate to calculate switching */ demux->ss_mode = gst_ssm_parse_switch_qualitylevel (demux->parser, download_rate); if (demux->ss_mode != SS_MODE_AONLY) { g_print ("\n\nMoving to AV mode by audio considering audio download rate\n\n\n\n"); } } g_cond_signal (stream->cond); #ifdef SIMULATE_AUDIO_ONLY /* when fragment count is multiple of 4, switch to audio only case */ if ((stream->frag_cnt % 4 == 0) && (stream->type == SS_STREAM_VIDEO) && GST_SSM_PARSE_IS_LIVE_PRESENTATION(demux->parser)) { g_print ("\n\t ######## Forcibly switching to audio only for testing ##########\n"); demux->ss_mode = SS_MODE_AONLY; } #endif GST_DEBUG_OBJECT (stream->pad, "Signalling eos condition..."); GST_DEBUG_OBJECT (demux, "number of fragments downloaded = %d", stream->frag_cnt); break; } case GST_MESSAGE_ERROR: { GError *error = NULL; gchar* debug = NULL; g_print ("Error from %s\n", gst_element_get_name (GST_MESSAGE_SRC(msg))); gst_message_parse_error( msg, &error, &debug ); if (error) GST_ERROR_OBJECT (demux, "GST_MESSAGE_ERROR: error= %s\n", error->message); GST_ERROR_OBJECT (demux, "GST_MESSAGE_ERROR: debug = %s\n", debug); /* handling error, when client requests url, which is yet to be prepared by server */ if ((!strncmp(error->message, "Precondition Failed", strlen("Precondition Failed"))) && (5 == error->code)) { GstStateChangeReturn ret; /* wait for 1sec & request the url again */ // TODO: need to make wait time as generic or Adding loop count to request again & again GST_INFO_OBJECT (demux, "ERROR : code = %d, msg = %s, NEED to request again", error->code, error->message); usleep (1000000); // 1 sec /* put the current pipeline to NULL state */ gst_element_set_state (stream->pipe, GST_STATE_NULL); gst_element_get_state (stream->pipe, NULL, NULL, GST_CLOCK_TIME_NONE); stream->pipe = stream->urisrc = stream->parser = stream->sink = NULL; g_print ("Going to download fragment AGAIN : %s\n", stream->uri); if (!gst_ss_demux_create_download_pipe (demux, stream, stream->uri, stream->start_ts)) { GST_ERROR_OBJECT (demux, "failed to create download pipeline"); if (!gst_element_post_message (GST_ELEMENT(demux), msg)) { GST_ERROR_OBJECT (demux, "failed to post error"); return FALSE; } } ret = gst_element_set_state (stream->pipe, GST_STATE_PLAYING); if (ret == GST_STATE_CHANGE_FAILURE) { if (!gst_element_post_message (GST_ELEMENT(demux), msg)) { GST_ERROR_OBJECT (demux, "failed to post error"); return FALSE; } } } else { if (error) g_print ("GST_MESSAGE_ERROR: error= %s\n", error->message); g_print ("GST_MESSAGE_ERROR: debug = %s\n", debug); if (!gst_element_post_message (GST_ELEMENT(demux), msg)) { GST_ERROR_OBJECT (demux, "failed to post error"); gst_ss_demux_stop (demux, stream); g_free( debug); debug = NULL; g_error_free( error); return FALSE; } gst_ss_demux_stop (demux, stream); } g_free( debug); debug = NULL; g_error_free( error); break; } case GST_MESSAGE_WARNING: { char* debug = NULL; GError* error = NULL; gst_message_parse_warning(msg, &error, &debug); GST_WARNING_OBJECT(demux, "warning : %s\n", error->message); GST_WARNING_OBJECT(demux, "debug : %s\n", debug); g_error_free( error ); g_free( debug); break; } default : { GST_LOG_OBJECT(demux, "unhandled message : %s\n", gst_message_type_get_name (GST_MESSAGE_TYPE (msg))); break; } } return TRUE; }
static gboolean setup_pipeline (void) { GstBus *bus; gboolean res = TRUE; GstElement *vmux = NULL, *ienc = NULL, *sink = NULL, *aenc = NULL, *ipp = NULL; GstCaps *filter_caps = NULL; camera_bin = gst_element_factory_make ("camerabin", NULL); if (NULL == camera_bin) { g_warning ("can't create camerabin element\n"); goto error; } g_object_set (camera_bin, "flags", flags, NULL); g_signal_connect (camera_bin, "image-done", (GCallback) img_capture_done, NULL); bus = gst_pipeline_get_bus (GST_PIPELINE (camera_bin)); gst_bus_set_sync_handler (bus, bus_callback, NULL); gst_object_unref (bus); GST_INFO_OBJECT (camera_bin, "camerabin created"); /* configure used elements */ res &= setup_pipeline_element ("viewfinder-sink", vfsink_name, &sink); res &= setup_pipeline_element ("audio-source", audiosrc_name, NULL); res &= setup_pipeline_element ("video-source", videosrc_name, NULL); res &= setup_pipeline_element ("video-source-filter", video_src_filter, NULL); res &= setup_pipeline_element ("viewfinder-filter", viewfinder_filter, NULL); if (audioenc_name) { aenc = create_audioencoder_bin (); if (aenc) g_object_set (camera_bin, "audio-encoder", aenc, NULL); else GST_WARNING ("Could not make audio encoder element"); } res &= setup_pipeline_element ("video-encoder", videoenc_name, NULL); res &= setup_pipeline_element ("image-encoder", imageenc_name, &ienc); res &= setup_pipeline_element ("image-post-processing", imagepp_name, &ipp); res &= setup_pipeline_element ("video-muxer", videomux_name, &vmux); if (!res) { goto error; } GST_INFO_OBJECT (camera_bin, "elements created"); /* set properties */ if (src_format) { filter_caps = gst_caps_from_string (src_format); } else if (src_csp && strlen (src_csp) == 4) { /* Set requested colorspace format, this is needed if the default colorspace negotiated for viewfinder doesn't match with e.g. encoders. */ filter_caps = gst_caps_new_simple ("video/x-raw-yuv", "format", GST_TYPE_FOURCC, GST_MAKE_FOURCC (src_csp[0], src_csp[1], src_csp[2], src_csp[3]), NULL); } if (filter_caps) { g_object_set (camera_bin, "filter-caps", filter_caps, NULL); gst_caps_unref (filter_caps); } g_object_set (sink, "sync", TRUE, NULL); GST_INFO_OBJECT (camera_bin, "elements configured"); /* configure a resolution and framerate */ if (mode == 1) { g_signal_emit_by_name (camera_bin, "set-video-resolution-fps", image_width, image_height, view_framerate_num, view_framerate_den, NULL); } else { g_signal_emit_by_name (camera_bin, "set-image-resolution", image_width, image_height, NULL); } if (GST_STATE_CHANGE_FAILURE == gst_element_set_state (camera_bin, GST_STATE_READY)) { g_warning ("can't set camerabin to ready\n"); goto error; } GST_INFO_OBJECT (camera_bin, "camera ready"); if (GST_STATE_CHANGE_FAILURE == gst_element_set_state (camera_bin, GST_STATE_PLAYING)) { g_warning ("can't set camerabin to playing\n"); goto error; } GST_INFO_OBJECT (camera_bin, "camera started"); return TRUE; error: cleanup_pipeline (); return FALSE; }
static gboolean gst_ss_demux_handle_src_event (GstPad * pad, GstEvent * event) { GstSSDemux *demux = GST_SS_DEMUX (gst_pad_get_parent (pad)); switch (event->type) { case GST_EVENT_SEEK: { gdouble rate; GstFormat format; GstSeekFlags flags; GstSeekType start_type, stop_type; gint64 start, stop; gint i = 0; GstSSDemuxStream *stream = NULL; GST_INFO_OBJECT (demux, "Received GST_EVENT_SEEK"); // TODO: should be able to seek in DVR window if (GST_SSM_PARSE_IS_LIVE_PRESENTATION (demux->parser)) { GST_WARNING_OBJECT (demux, "Received seek event for live stream"); return FALSE; } gst_event_parse_seek (event, &rate, &format, &flags, &start_type, &start, &stop_type, &stop); if (format != GST_FORMAT_TIME) { GST_WARNING_OBJECT (demux, "Only time format is supported in seek"); return FALSE; } GST_DEBUG_OBJECT (demux, "seek event, rate: %f start: %" GST_TIME_FORMAT " stop: %" GST_TIME_FORMAT, rate, GST_TIME_ARGS (start), GST_TIME_ARGS (stop)); for( i = 0; i < SS_STREAM_NUM; i++) { if (stream = demux->streams[i]) { g_cond_signal (stream->cond); gst_task_stop (stream->stream_task); } } if (flags & GST_SEEK_FLAG_FLUSH) { GST_INFO_OBJECT (demux, "sending flush start"); for( i = 0; i < SS_STREAM_NUM; i++) { if (stream = demux->streams[i]) { gst_pad_push_event (stream->pad, gst_event_new_flush_start ()); } } } gst_ssm_parse_seek_manifest (demux->parser, start); if (flags & GST_SEEK_FLAG_FLUSH) { GST_INFO_OBJECT (demux, "sending flush stop"); for( i = 0; i < SS_STREAM_NUM; i++) { if (stream = demux->streams[i]) { gst_pad_push_event (stream->pad, gst_event_new_flush_stop ()); GST_LOG_OBJECT (stream->pad, "Starting pad TASK again...\n"); stream->sent_ns = FALSE; stream->frag_cnt = 0; /*resetting to start buffering on SEEK */ gst_task_start (stream->stream_task); } } } return TRUE; } default: break; } return gst_pad_event_default (pad, event); }
static gboolean gst_ss_demux_create_download_pipe (GstSSDemux * demux, GstSSDemuxStream *stream, const gchar * uri, guint64 start_ts) { gchar *name = NULL; GstCaps *caps = NULL; if (!gst_uri_is_valid (uri)) return FALSE; name = g_strdup_printf("%s-%s", stream->name, "downloader"); stream->pipe = gst_pipeline_new (name); if (!stream->pipe) { GST_ERROR_OBJECT (demux, "failed to create pipeline"); return FALSE; } g_free(name); name = g_strdup_printf("%s-%s", stream->name, "httpsrc"); GST_DEBUG ("Creating source element for the URI:%s", uri); stream->urisrc = gst_element_make_from_uri (GST_URI_SRC, uri, name); if (!stream->urisrc) { GST_ERROR_OBJECT (demux, "failed to create urisrc"); return FALSE; } g_free(name); if (GST_SSM_PARSE_IS_LIVE_PRESENTATION(demux->parser)) g_object_set (G_OBJECT (stream->urisrc), "is-live", TRUE, NULL); else g_object_set (G_OBJECT (stream->urisrc), "is-live", FALSE, NULL); name = g_strdup_printf("%s-%s", stream->name, "parser"); stream->parser = gst_element_factory_make ("piffdemux", name); if (!stream->parser) { GST_ERROR_OBJECT (demux, "failed to create piffdemux element"); return FALSE; } caps = ssm_parse_get_stream_caps (demux->parser, stream->type); GST_INFO_OBJECT (stream->pad, "prepare caps = %s", gst_caps_to_string(caps)); g_object_set (G_OBJECT (stream->parser), "caps", caps, NULL); g_object_set (G_OBJECT (stream->parser), "start-ts", start_ts, NULL); g_object_set (G_OBJECT (stream->parser), "duration", GST_SSM_PARSE_GET_DURATION(demux->parser), NULL); g_object_set (G_OBJECT (stream->parser), "is-live", GST_SSM_PARSE_IS_LIVE_PRESENTATION(demux->parser), NULL); g_object_set (G_OBJECT (stream->parser), "lookahead-count", GST_SSM_PARSE_LOOKAHEAD_COUNT(demux->parser), NULL); g_signal_connect (stream->parser, "live-param", G_CALLBACK (gst_ss_demux_append_live_params), stream); g_free(name); name = g_strdup_printf("%s-%s", stream->name, "sink"); stream->sink = gst_element_factory_make ("appsink", name); if (!stream->sink) { GST_ERROR_OBJECT (demux, "failed to create appsink element"); return FALSE; } g_object_set (G_OBJECT (stream->sink), "emit-signals", TRUE, "sync", FALSE, NULL); g_signal_connect (stream->sink, "new-buffer", G_CALLBACK (gst_ssm_demux_on_new_buffer), stream); g_free(name); gst_bin_add_many (GST_BIN (stream->pipe), stream->urisrc, stream->parser, stream->sink, NULL); if (!gst_element_link_many (stream->urisrc, stream->parser, stream->sink, NULL)) { GST_ERROR ("failed to link elements..."); return FALSE; } stream->bus = gst_pipeline_get_bus (GST_PIPELINE (stream->pipe)); gst_bus_add_watch (stream->bus, (GstBusFunc)gst_ss_demux_download_bus_cb, stream); gst_object_unref (stream->bus); return TRUE; }
static void gst_hls_demux_loop (GstHLSDemux * demux) { GstBuffer *buf; GstFlowReturn ret; /* Loop for the source pad task. The task is started when we have * received the main playlist from the source element. It tries first to * cache the first fragments and then it waits until it has more data in the * queue. This task is woken up when we push a new fragment to the queue or * when we reached the end of the playlist */ if (G_UNLIKELY (demux->need_cache)) { if (!gst_hls_demux_cache_fragments (demux)) goto cache_error; /* we can start now the updates thread */ gst_hls_demux_start_update (demux); GST_INFO_OBJECT (demux, "First fragments cached successfully"); } if (g_queue_is_empty (demux->queue)) { if (demux->end_of_playlist) goto end_of_playlist; GST_TASK_WAIT (demux->task); /* If the queue is still empty check again if it's the end of the * playlist in case we reached it after beeing woken up */ if (g_queue_is_empty (demux->queue) && demux->end_of_playlist) goto end_of_playlist; } buf = g_queue_pop_head (demux->queue); ret = gst_pad_push (demux->srcpad, buf); if (ret != GST_FLOW_OK) goto error; return; end_of_playlist: { GST_DEBUG_OBJECT (demux, "Reached end of playlist, sending EOS"); gst_pad_push_event (demux->srcpad, gst_event_new_eos ()); gst_hls_demux_stop (demux); return; } cache_error: { GST_ELEMENT_ERROR (demux, RESOURCE, NOT_FOUND, ("Could not cache the first fragments"), NULL); gst_hls_demux_stop (demux); return; } error: { /* FIXME: handle error */ gst_hls_demux_stop (demux); return; } }
/* save the preset with the given name */ static gboolean gst_preset_default_save_preset (GstPreset * preset, const gchar * name) { GKeyFile *presets; gchar **props; guint i; GObjectClass *gclass; GST_INFO_OBJECT (preset, "saving new preset: %s", name); /* get the presets from the type */ if (!(presets = preset_get_keyfile (preset))) goto no_presets; /* take copies of current gobject properties from preset */ if (!(props = gst_preset_get_property_names (preset))) goto no_properties; gclass = G_OBJECT_CLASS (GST_ELEMENT_GET_CLASS (preset)); /* loop over the object properties and store the property value in the * keyfile */ for (i = 0; props[i]; i++) { GValue gvalue = { 0, }; gchar *str; GParamSpec *property; /* FIXME, change for childproxy to get the property and element. */ if (!(property = g_object_class_find_property (gclass, props[i]))) { /* the element said it supported the property but then it does not have * that property. This should not happen. */ GST_WARNING_OBJECT (preset, "property '%s' not in object", props[i]); continue; } g_value_init (&gvalue, property->value_type); /* FIXME, change for childproxy */ g_object_get_property (G_OBJECT (preset), props[i], &gvalue); if ((str = gst_value_serialize (&gvalue))) { g_key_file_set_string (presets, name, props[i], (gpointer) str); g_free (str); } else { GST_WARNING_OBJECT (preset, "serialization for property '%s' failed", props[i]); } g_value_unset (&gvalue); } GST_INFO_OBJECT (preset, " saved"); g_strfreev (props); /* save updated version */ return gst_preset_default_save_presets_file (preset); /* ERRORS */ no_presets: { GST_WARNING_OBJECT (preset, "no presets"); return FALSE; } no_properties: { GST_INFO_OBJECT (preset, "no properties"); return FALSE; } }
/* save the presets file. A copy of the existing presets file is stored in a * .bak file */ static gboolean gst_preset_default_save_presets_file (GstPreset * preset) { GKeyFile *presets; const gchar *preset_path; GError *error = NULL; gchar *bak_file_name; gboolean backup = TRUE; gchar *data; gsize data_size; preset_get_paths (preset, &preset_path, NULL); /* get the presets from the type */ if (!(presets = preset_get_keyfile (preset))) goto no_presets; GST_DEBUG_OBJECT (preset, "saving preset file: '%s'", preset_path); /* create backup if possible */ bak_file_name = g_strdup_printf ("%s.bak", preset_path); if (g_file_test (bak_file_name, G_FILE_TEST_EXISTS)) { if (g_unlink (bak_file_name)) { backup = FALSE; GST_INFO_OBJECT (preset, "cannot remove old backup file : %s", bak_file_name); } } if (backup) { if (g_rename (preset_path, bak_file_name)) { GST_INFO_OBJECT (preset, "cannot backup file : %s -> %s", preset_path, bak_file_name); } } g_free (bak_file_name); /* update gstreamer version */ g_key_file_set_string (presets, PRESET_HEADER, PRESET_HEADER_VERSION, PACKAGE_VERSION); /* get new contents, wee need this to save it */ if (!(data = g_key_file_to_data (presets, &data_size, &error))) goto convert_failed; /* write presets */ if (!g_file_set_contents (preset_path, data, data_size, &error)) goto write_failed; g_free (data); return TRUE; /* ERRORS */ no_presets: { GST_WARNING_OBJECT (preset, "no presets, trying to unlink possibly existing preset file: '%s'", preset_path); g_unlink (preset_path); return FALSE; } convert_failed: { GST_WARNING_OBJECT (preset, "can not get the keyfile contents: %s", error->message); g_error_free (error); g_free (data); return FALSE; } write_failed: { GST_WARNING_OBJECT (preset, "Unable to store preset file %s: %s", preset_path, error->message); g_error_free (error); g_free (data); return FALSE; } }
/****************************************************** * gst_v4l2_open(): * open the video device (v4l2object->videodev) * return value: TRUE on success, FALSE on error ******************************************************/ gboolean gst_v4l2_open (GstV4l2Object * v4l2object) { struct stat st; int libv4l2_fd; GstPollFD pollfd = GST_POLL_FD_INIT; GST_DEBUG_OBJECT (v4l2object->element, "Trying to open device %s", v4l2object->videodev); GST_V4L2_CHECK_NOT_OPEN (v4l2object); GST_V4L2_CHECK_NOT_ACTIVE (v4l2object); /* be sure we have a device */ if (!v4l2object->videodev) v4l2object->videodev = g_strdup ("/dev/video"); /* check if it is a device */ if (stat (v4l2object->videodev, &st) == -1) goto stat_failed; if (!S_ISCHR (st.st_mode)) goto no_device; /* open the device */ v4l2object->video_fd = open (v4l2object->videodev, O_RDWR /* | O_NONBLOCK */ ); if (!GST_V4L2_IS_OPEN (v4l2object)) goto not_open; libv4l2_fd = v4l2_fd_open (v4l2object->video_fd, V4L2_ENABLE_ENUM_FMT_EMULATION); /* Note the v4l2_xxx functions are designed so that if they get passed an unknown fd, the will behave exactly as their regular xxx counterparts, so if v4l2_fd_open fails, we continue as normal (missing the libv4l2 custom cam format to normal formats conversion). Chances are big we will still fail then though, as normally v4l2_fd_open only fails if the device is not a v4l2 device. */ if (libv4l2_fd != -1) v4l2object->video_fd = libv4l2_fd; v4l2object->can_poll_device = TRUE; /* get capabilities, error will be posted */ if (!gst_v4l2_get_capabilities (v4l2object)) goto error; /* do we need to be a capture device? */ if (GST_IS_V4L2SRC (v4l2object->element) && !(v4l2object->vcap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) goto not_capture; if (GST_IS_V4L2SINK (v4l2object->element) && !(v4l2object->vcap.capabilities & V4L2_CAP_VIDEO_OUTPUT)) goto not_output; /* create enumerations, posts errors. */ if (!gst_v4l2_fill_lists (v4l2object)) goto error; GST_INFO_OBJECT (v4l2object->element, "Opened device '%s' (%s) successfully", v4l2object->vcap.card, v4l2object->videodev); pollfd.fd = v4l2object->video_fd; gst_poll_add_fd (v4l2object->poll, &pollfd); gst_poll_fd_ctl_read (v4l2object->poll, &pollfd, TRUE); return TRUE; /* ERRORS */ stat_failed: { GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, NOT_FOUND, (_("Cannot identify device '%s'."), v4l2object->videodev), GST_ERROR_SYSTEM); goto error; } no_device: { GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, NOT_FOUND, (_("This isn't a device '%s'."), v4l2object->videodev), GST_ERROR_SYSTEM); goto error; } not_open: { GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, OPEN_READ_WRITE, (_("Could not open device '%s' for reading and writing."), v4l2object->videodev), GST_ERROR_SYSTEM); goto error; } not_capture: { GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, NOT_FOUND, (_("Device '%s' is not a capture device."), v4l2object->videodev), ("Capabilities: 0x%x", v4l2object->vcap.capabilities)); goto error; } not_output: { GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, NOT_FOUND, (_("Device '%s' is not a output device."), v4l2object->videodev), ("Capabilities: 0x%x", v4l2object->vcap.capabilities)); goto error; } error: { if (GST_V4L2_IS_OPEN (v4l2object)) { /* close device */ v4l2_close (v4l2object->video_fd); v4l2object->video_fd = -1; } /* empty lists */ gst_v4l2_empty_lists (v4l2object); return FALSE; } }
/* flush the oldest buffer */ static GstFlowReturn gst_video_rate_flush_prev (GstVideoRate * videorate, gboolean duplicate) { GstFlowReturn res; GstBuffer *outbuf; GstClockTime push_ts; if (!videorate->prevbuf) goto eos_before_buffers; /* make sure we can write to the metadata */ outbuf = gst_buffer_make_writable (gst_buffer_ref (videorate->prevbuf)); GST_BUFFER_OFFSET (outbuf) = videorate->out; GST_BUFFER_OFFSET_END (outbuf) = videorate->out + 1; if (videorate->discont) { GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT); videorate->discont = FALSE; } else GST_BUFFER_FLAG_UNSET (outbuf, GST_BUFFER_FLAG_DISCONT); if (duplicate) GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_GAP); else GST_BUFFER_FLAG_UNSET (outbuf, GST_BUFFER_FLAG_GAP); /* this is the timestamp we put on the buffer */ push_ts = videorate->next_ts; videorate->out++; videorate->out_frame_count++; if (videorate->to_rate_numerator) { /* interpolate next expected timestamp in the segment */ videorate->next_ts = videorate->segment.base + videorate->segment.start + videorate->base_ts + gst_util_uint64_scale (videorate->out_frame_count, videorate->to_rate_denominator * GST_SECOND, videorate->to_rate_numerator); GST_BUFFER_DURATION (outbuf) = videorate->next_ts - push_ts; } /* We do not need to update time in VFR (variable frame rate) mode */ if (!videorate->drop_only) { /* adapt for looping, bring back to time in current segment. */ GST_BUFFER_TIMESTAMP (outbuf) = push_ts - videorate->segment.base; } GST_LOG_OBJECT (videorate, "old is best, dup, pushing buffer outgoing ts %" GST_TIME_FORMAT, GST_TIME_ARGS (push_ts)); res = gst_pad_push (GST_BASE_TRANSFORM_SRC_PAD (videorate), outbuf); return res; /* WARNINGS */ eos_before_buffers: { GST_INFO_OBJECT (videorate, "got EOS before any buffer was received"); return GST_FLOW_OK; } }
/** * gst_vaapi_plugin_base_decide_allocation: * @plugin: a #GstVaapiPluginBase * @query: the allocation query to parse * @feature: the desired #GstVaapiCapsFeature, or zero to find the * preferred one * * Decides allocation parameters for the downstream elements. * * Returns: %TRUE if successful, %FALSE otherwise. */ gboolean gst_vaapi_plugin_base_decide_allocation (GstVaapiPluginBase * plugin, GstQuery * query, guint feature) { GstCaps *caps = NULL; GstBufferPool *pool; GstStructure *config; GstVideoInfo vi; guint size, min, max; gboolean update_pool = FALSE; gboolean has_video_meta = FALSE; gboolean has_video_alignment = FALSE; #if (USE_GLX || USE_EGL) gboolean has_texture_upload_meta = FALSE; guint idx; #endif g_return_val_if_fail (plugin->display != NULL, FALSE); gst_query_parse_allocation (query, &caps, NULL); /* We don't need any GL context beyond this point if not requested so explicitly through GstVideoGLTextureUploadMeta */ gst_object_replace (&plugin->gl_context, NULL); if (!caps) goto error_no_caps; if (!feature) feature = gst_vaapi_find_preferred_caps_feature (plugin->srcpad, GST_VIDEO_FORMAT_ENCODED, NULL); has_video_meta = gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL); #if (USE_GLX || USE_EGL) has_texture_upload_meta = gst_query_find_allocation_meta (query, GST_VIDEO_GL_TEXTURE_UPLOAD_META_API_TYPE, &idx) && (feature == GST_VAAPI_CAPS_FEATURE_GL_TEXTURE_UPLOAD_META); #if USE_GST_GL_HELPERS if (has_texture_upload_meta) { const GstStructure *params; GstObject *gl_context; gst_query_parse_nth_allocation_meta (query, idx, ¶ms); if (params) { if (gst_structure_get (params, "gst.gl.GstGLContext", GST_GL_TYPE_CONTEXT, &gl_context, NULL) && gl_context) { gst_vaapi_plugin_base_set_gl_context (plugin, gl_context); gst_object_unref (gl_context); } } } #endif #endif /* Make sure the display we pass down to the buffer pool is actually the expected one, especially when the downstream element requires a GLX or EGL display */ if (!gst_vaapi_plugin_base_ensure_display (plugin)) goto error_ensure_display; gst_video_info_init (&vi); gst_video_info_from_caps (&vi, caps); if (GST_VIDEO_INFO_FORMAT (&vi) == GST_VIDEO_FORMAT_ENCODED) gst_video_info_set_format (&vi, GST_VIDEO_FORMAT_I420, GST_VIDEO_INFO_WIDTH (&vi), GST_VIDEO_INFO_HEIGHT (&vi)); if (gst_query_get_n_allocation_pools (query) > 0) { gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min, &max); update_pool = TRUE; size = MAX (size, vi.size); if (pool) { /* Check whether downstream element proposed a bufferpool but did not provide a correct propose_allocation() implementation */ has_video_alignment = gst_buffer_pool_has_option (pool, GST_BUFFER_POOL_OPTION_VIDEO_ALIGNMENT); } } else { pool = NULL; size = vi.size; min = max = 0; } /* GstVaapiVideoMeta is mandatory, and this implies VA surface memory */ if (!pool || !gst_buffer_pool_has_option (pool, GST_BUFFER_POOL_OPTION_VAAPI_VIDEO_META)) { GST_INFO_OBJECT (plugin, "%s. Making a new pool", pool == NULL ? "No pool" : "Pool hasn't GstVaapiVideoMeta"); if (pool) gst_object_unref (pool); pool = gst_vaapi_video_buffer_pool_new (plugin->display); if (!pool) goto error_create_pool; config = gst_buffer_pool_get_config (pool); gst_buffer_pool_config_set_params (config, caps, size, min, max); gst_buffer_pool_config_add_option (config, GST_BUFFER_POOL_OPTION_VAAPI_VIDEO_META); if (!gst_buffer_pool_set_config (pool, config)) { config = gst_buffer_pool_get_config (pool); if (!gst_buffer_pool_config_validate_params (config, caps, size, min, max)) { gst_structure_free (config); goto config_failed; } if (!gst_buffer_pool_set_config (pool, config)) goto config_failed; } } /* Check whether GstVideoMeta, or GstVideoAlignment, is needed (raw video) */ if (has_video_meta) { if (!gst_vaapi_plugin_base_set_pool_config (pool, GST_BUFFER_POOL_OPTION_VIDEO_META)) goto config_failed; } else if (has_video_alignment) { if (!gst_vaapi_plugin_base_set_pool_config (pool, GST_BUFFER_POOL_OPTION_VIDEO_ALIGNMENT)) goto config_failed; } /* GstVideoGLTextureUploadMeta (OpenGL) */ #if (USE_GLX || USE_EGL) if (has_texture_upload_meta) { if (!gst_vaapi_plugin_base_set_pool_config (pool, GST_BUFFER_POOL_OPTION_VIDEO_GL_TEXTURE_UPLOAD_META)) goto config_failed; } #endif if (update_pool) gst_query_set_nth_allocation_pool (query, 0, pool, size, min, max); else gst_query_add_allocation_pool (query, pool, size, min, max); g_clear_object (&plugin->srcpad_buffer_pool); plugin->srcpad_buffer_pool = pool; return TRUE; /* ERRORS */ error_no_caps: { GST_ERROR_OBJECT (plugin, "no caps specified"); return FALSE; } error_ensure_display: { GST_ERROR_OBJECT (plugin, "failed to ensure display of type %d", plugin->display_type_req); return FALSE; } error_create_pool: { GST_ERROR_OBJECT (plugin, "failed to create buffer pool"); return FALSE; } config_failed: { if (pool) gst_object_unref (pool); GST_ELEMENT_ERROR (plugin, RESOURCE, SETTINGS, ("Failed to configure the buffer pool"), ("Configuration is most likely invalid, please report this issue.")); return FALSE; } }
static gboolean gst_dvbsrc_tune (GstDvbSrc * object) { #if DVB_API_VERSION == 3 && DVB_API_VERSION_MINOR == 3 struct dvbfe_params feparams; #else struct dvb_frontend_parameters feparams; #endif fe_sec_voltage_t voltage; fe_status_t status; int i; int j; unsigned int freq = object->freq; unsigned int sym_rate = object->sym_rate * 1000; /* found in mail archive on linuxtv.org * What works well for us is: * - first establish a TS feed (i.e. tune the frontend and check for success) * - then set filters (PES/sections) * - then tell the MPEG decoder to start * - before tuning: first stop the MPEG decoder, then stop all filters */ GST_INFO_OBJECT (object, "gst_dvbsrc_tune"); if (object->fd_frontend < 0) { /* frontend not opened yet, tune later */ GST_INFO_OBJECT (object, "Frontend not open: tuning later"); return FALSE; } gst_dvbsrc_unset_pes_filters (object); for (j = 0; j < 5; j++) { switch (object->adapter_type) { case FE_QPSK: object->tone = SEC_TONE_OFF; if (freq > 2200000) { // this must be an absolute frequency if (freq < SLOF) { feparams.frequency = (freq - LOF1); } else { feparams.frequency = (freq - LOF2); object->tone = SEC_TONE_ON; } } else { // this is an L-Band frequency feparams.frequency = freq; } feparams.inversion = INVERSION_AUTO; GST_DEBUG_OBJECT (object, "api version %d.%d", DVB_API_VERSION, DVB_API_VERSION_MINOR); #if DVB_API_VERSION == 3 && DVB_API_VERSION_MINOR == 3 GST_DEBUG_OBJECT (object, "using multiproto driver"); feparams.delsys.dvbs.symbol_rate = sym_rate; feparams.delsys.dvbs.fec = object->code_rate_hp; #else feparams.u.qpsk.symbol_rate = sym_rate; feparams.u.qpsk.fec_inner = object->code_rate_hp; #endif GST_INFO_OBJECT (object, "tuning DVB-S to L-Band:%u, Pol:%d, srate=%u, 22kHz=%s", feparams.frequency, object->pol, sym_rate, object->tone == SEC_TONE_ON ? "on" : "off"); if (object->pol == DVB_POL_H) voltage = SEC_VOLTAGE_18; else voltage = SEC_VOLTAGE_13; if (object->diseqc_src == -1 || object->send_diseqc == FALSE) { if (ioctl (object->fd_frontend, FE_SET_VOLTAGE, voltage) < 0) { g_warning ("Unable to set voltage on dvb frontend device"); } if (ioctl (object->fd_frontend, FE_SET_TONE, object->tone) < 0) { g_warning ("Error setting tone: %s", strerror (errno)); } } else { GST_DEBUG_OBJECT (object, "Sending DISEqC"); diseqc (object->fd_frontend, object->diseqc_src, voltage, object->tone); /* Once diseqc source is set, do not set it again until * app decides to change it */ //object->send_diseqc = FALSE; } break; case FE_OFDM: feparams.frequency = freq; #if DVB_API_VERSION == 3 && DVB_API_VERSION_MINOR == 3 feparams.delsys.dvbs.fec = object->code_rate_hp; feparams.delsys.dvbs.modulation = object->modulation; feparams.delsys.dvbs.symbol_rate = sym_rate; #else feparams.u.ofdm.bandwidth = object->bandwidth; feparams.u.ofdm.code_rate_HP = object->code_rate_hp; feparams.u.ofdm.code_rate_LP = object->code_rate_lp; feparams.u.ofdm.constellation = object->modulation; feparams.u.ofdm.transmission_mode = object->transmission_mode; feparams.u.ofdm.guard_interval = object->guard_interval; feparams.u.ofdm.hierarchy_information = object->hierarchy_information; #endif feparams.inversion = object->inversion; GST_INFO_OBJECT (object, "tuning DVB-T to %d Hz", freq); break; case FE_QAM: GST_INFO_OBJECT (object, "Tuning DVB-C to %d, srate=%d", freq, sym_rate); feparams.frequency = freq; feparams.inversion = object->inversion; #if DVB_API_VERSION == 3 && DVB_API_VERSION_MINOR == 3 feparams.delsys.dvbs.fec = object->code_rate_hp; feparams.delsys.dvbs.modulation = object->modulation; feparams.delsys.dvbs.symbol_rate = sym_rate; #else feparams.u.qam.fec_inner = object->code_rate_hp; feparams.u.qam.modulation = object->modulation; feparams.u.qam.symbol_rate = sym_rate; #endif break; case FE_ATSC: GST_INFO_OBJECT (object, "Tuning ATSC to %d", freq); feparams.frequency = freq; #if DVB_API_VERSION == 3 && DVB_API_VERSION_MINOR == 3 feparams.delsys.atsc.modulation = object->modulation; #else feparams.u.vsb.modulation = object->modulation; #endif break; default: g_error ("Unknown frontend type: %d", object->adapter_type); } usleep (100000); /* now tune the frontend */ #if DVB_API_VERSION == 3 && DVB_API_VERSION_MINOR == 3 if (ioctl (object->fd_frontend, DVBFE_SET_PARAMS, &feparams) < 0) { #else if (ioctl (object->fd_frontend, FE_SET_FRONTEND, &feparams) < 0) { #endif g_warning ("Error tuning channel: %s", strerror (errno)); } for (i = 0; i < 50; i++) { usleep (100000); if (ioctl (object->fd_frontend, FE_READ_STATUS, &status) == -1) { perror ("FE_READ_STATUS"); break; } GST_LOG_OBJECT (object, "status == 0x%02x", status); if (status & FE_HAS_LOCK) break; } if (status & FE_HAS_LOCK) break; } if (!(status & FE_HAS_LOCK)) return FALSE; /* set pid filters */ gst_dvbsrc_set_pes_filters (object); return TRUE; } static void gst_dvbsrc_unset_pes_filters (GstDvbSrc * object) { int i = 0; GST_INFO_OBJECT (object, "clearing PES filter"); for (i = 0; i < MAX_FILTERS; i++) { if (object->fd_filters[i] == -1) continue; close (object->fd_filters[i]); object->fd_filters[i] = -1; } }
/* create a socket for sending to remote machine */ static gboolean gst_udpsrc_open (GstUDPSrc * src) { GInetAddress *addr, *bind_addr; GSocketAddress *bind_saddr; GError *err = NULL; gst_udpsrc_create_cancellable (src); if (src->socket == NULL) { /* need to allocate a socket */ GST_DEBUG_OBJECT (src, "allocating socket for %s:%d", src->address, src->port); addr = gst_udpsrc_resolve (src, src->address); if (!addr) goto name_resolve; if ((src->used_socket = g_socket_new (g_inet_address_get_family (addr), G_SOCKET_TYPE_DATAGRAM, G_SOCKET_PROTOCOL_UDP, &err)) == NULL) goto no_socket; src->external_socket = FALSE; GST_DEBUG_OBJECT (src, "got socket %p", src->used_socket); if (src->addr) g_object_unref (src->addr); src->addr = G_INET_SOCKET_ADDRESS (g_inet_socket_address_new (addr, src->port)); GST_DEBUG_OBJECT (src, "binding on port %d", src->port); /* On Windows it's not possible to bind to a multicast address * but the OS will make sure to filter out all packets that * arrive not for the multicast address the socket joined. * * On Linux and others it is necessary to bind to a multicast * address to let the OS filter out all packets that are received * on the same port but for different addresses than the multicast * address */ #ifdef G_OS_WIN32 if (g_inet_address_get_is_multicast (addr)) bind_addr = g_inet_address_new_any (g_inet_address_get_family (addr)); else #endif bind_addr = G_INET_ADDRESS (g_object_ref (addr)); g_object_unref (addr); bind_saddr = g_inet_socket_address_new (bind_addr, src->port); g_object_unref (bind_addr); if (!g_socket_bind (src->used_socket, bind_saddr, src->reuse, &err)) goto bind_error; g_object_unref (bind_saddr); g_socket_set_multicast_loopback (src->used_socket, src->loop); } else { GInetSocketAddress *local_addr; GST_DEBUG_OBJECT (src, "using provided socket %p", src->socket); /* we use the configured socket, try to get some info about it */ src->used_socket = G_SOCKET (g_object_ref (src->socket)); src->external_socket = TRUE; local_addr = G_INET_SOCKET_ADDRESS (g_socket_get_local_address (src->used_socket, &err)); if (!local_addr) goto getsockname_error; /* See above for the reasons. Without this we would behave different on * Windows and Linux, joining multicast groups below for provided sockets * on Linux but not on Windows */ #ifdef G_OS_WIN32 addr = gst_udpsrc_resolve (src, src->address); if (!addr) goto name_resolve; if (!src->auto_multicast || !g_inet_address_get_is_any (g_inet_socket_address_get_address (local_addr)) || !g_inet_address_get_is_multicast (addr)) { g_object_unref (addr); #endif if (src->addr) g_object_unref (src->addr); src->addr = local_addr; #ifdef G_OS_WIN32 } else { g_object_unref (local_addr); if (src->addr) g_object_unref (src->addr); src->addr = G_INET_SOCKET_ADDRESS (g_inet_socket_address_new (addr, src->port)); g_object_unref (addr); } #endif } { gint val = 0; if (src->buffer_size != 0) { GError *opt_err = NULL; GST_INFO_OBJECT (src, "setting udp buffer of %d bytes", src->buffer_size); /* set buffer size, Note that on Linux this is typically limited to a * maximum of around 100K. Also a minimum of 128 bytes is required on * Linux. */ if (!g_socket_set_option (src->used_socket, SOL_SOCKET, SO_RCVBUF, src->buffer_size, &opt_err)) { GST_ELEMENT_WARNING (src, RESOURCE, SETTINGS, (NULL), ("Could not create a buffer of requested %d bytes: %s", src->buffer_size, opt_err->message)); g_error_free (opt_err); opt_err = NULL; } } /* read the value of the receive buffer. Note that on linux this returns * 2x the value we set because the kernel allocates extra memory for * metadata. The default on Linux is about 100K (which is about 50K * without metadata) */ if (g_socket_get_option (src->used_socket, SOL_SOCKET, SO_RCVBUF, &val, NULL)) { GST_INFO_OBJECT (src, "have udp buffer of %d bytes", val); } else { GST_DEBUG_OBJECT (src, "could not get udp buffer size"); } } g_socket_set_broadcast (src->used_socket, TRUE); if (src->auto_multicast && g_inet_address_get_is_multicast (g_inet_socket_address_get_address (src->addr))) { GST_DEBUG_OBJECT (src, "joining multicast group %s", src->address); if (!g_socket_join_multicast_group (src->used_socket, g_inet_socket_address_get_address (src->addr), FALSE, src->multi_iface, &err)) goto membership; } /* NOTE: sockaddr_in.sin_port works for ipv4 and ipv6 because sin_port * follows ss_family on both */ { GInetSocketAddress *addr; guint16 port; addr = G_INET_SOCKET_ADDRESS (g_socket_get_local_address (src->used_socket, &err)); if (!addr) goto getsockname_error; port = g_inet_socket_address_get_port (addr); GST_DEBUG_OBJECT (src, "bound, on port %d", port); if (port != src->port) { src->port = port; GST_DEBUG_OBJECT (src, "notifying port %d", port); g_object_notify (G_OBJECT (src), "port"); } g_object_unref (addr); } src->allocator = NULL; gst_allocation_params_init (&src->params); src->max_size = 0; return TRUE; /* ERRORS */ name_resolve: { return FALSE; } no_socket: { GST_ELEMENT_ERROR (src, RESOURCE, OPEN_READ, (NULL), ("no socket error: %s", err->message)); g_clear_error (&err); g_object_unref (addr); return FALSE; } bind_error: { GST_ELEMENT_ERROR (src, RESOURCE, SETTINGS, (NULL), ("bind failed: %s", err->message)); g_clear_error (&err); g_object_unref (bind_saddr); gst_udpsrc_close (src); return FALSE; } membership: { GST_ELEMENT_ERROR (src, RESOURCE, SETTINGS, (NULL), ("could add membership: %s", err->message)); g_clear_error (&err); gst_udpsrc_close (src); return FALSE; } getsockname_error: { GST_ELEMENT_ERROR (src, RESOURCE, SETTINGS, (NULL), ("getsockname failed: %s", err->message)); g_clear_error (&err); gst_udpsrc_close (src); return FALSE; } }
static void gst_dvbsrc_set_property (GObject * _object, guint prop_id, const GValue * value, GParamSpec * pspec) { GstDvbSrc *object; g_return_if_fail (GST_IS_DVBSRC (_object)); object = GST_DVBSRC (_object); switch (prop_id) { case ARG_DVBSRC_ADAPTER: object->adapter_number = g_value_get_int (value); break; case ARG_DVBSRC_FRONTEND: object->frontend_number = g_value_get_int (value); break; case ARG_DVBSRC_DISEQC_SRC: if (object->diseqc_src != g_value_get_int (value)) { object->diseqc_src = g_value_get_int (value); object->send_diseqc = TRUE; } GST_INFO_OBJECT (object, "Set Property: ARG_DVBSRC_DISEQC_ID"); break; case ARG_DVBSRC_FREQUENCY: object->freq = g_value_get_uint (value); GST_INFO_OBJECT (object, "Set Property: ARG_DVBSRC_FREQUENCY (%d Hz)", object->freq); break; case ARG_DVBSRC_POLARITY: { const char *s = NULL; s = g_value_get_string (value); if (s != NULL) { object->pol = (s[0] == 'h' || s[0] == 'H') ? DVB_POL_H : DVB_POL_V; GST_INFO_OBJECT (object, "Set Property: ARG_DVBSRC_POLARITY"); GST_INFO_OBJECT (object, "\t%s", (s[0] == 'h' || s[0] == 'H') ? "DVB_POL_H" : "DVB_POL_V"); } break; } case ARG_DVBSRC_PIDS: { gchar *pid_string; pid_string = g_value_dup_string (value); GST_INFO_OBJECT (object, "Set Property: ARG_DVBSRC_PIDS %s", pid_string); if (!strcmp (pid_string, "8192")) { /* get the whole ts */ int pid_count = 1; object->pids[0] = 8192; while (pid_count < MAX_FILTERS) { object->pids[pid_count++] = G_MAXUINT16; } } else { int pid = 0; int pid_count; gchar **pids; char **tmp; tmp = pids = g_strsplit (pid_string, ":", MAX_FILTERS); if (pid_string) g_free (pid_string); /* always add the PAT and CAT pids */ object->pids[0] = 0; object->pids[1] = 1; pid_count = 2; while (*pids != NULL && pid_count < MAX_FILTERS) { pid = strtol (*pids, NULL, 0); if (pid > 1 && pid <= 8192) { GST_INFO_OBJECT (object, "\tParsed Pid: %d", pid); object->pids[pid_count] = pid; pid_count++; } pids++; } while (pid_count < MAX_FILTERS) { object->pids[pid_count++] = G_MAXUINT16; } g_strfreev (tmp); } /* if we are in playing or paused, then set filters now */ GST_INFO_OBJECT (object, "checking if playing for setting pes filters"); if (GST_ELEMENT (object)->current_state == GST_STATE_PLAYING || GST_ELEMENT (object)->current_state == GST_STATE_PAUSED) { GST_INFO_OBJECT (object, "Setting pes filters now"); gst_dvbsrc_set_pes_filters (object); } } break; case ARG_DVBSRC_SYM_RATE: object->sym_rate = g_value_get_uint (value); GST_INFO_OBJECT (object, "Set Property: ARG_DVBSRC_SYM_RATE to value %d", object->sym_rate); break; case ARG_DVBSRC_BANDWIDTH: object->bandwidth = g_value_get_enum (value); break; case ARG_DVBSRC_CODE_RATE_HP: object->code_rate_hp = g_value_get_enum (value); break; case ARG_DVBSRC_CODE_RATE_LP: object->code_rate_lp = g_value_get_enum (value); break; case ARG_DVBSRC_GUARD: object->guard_interval = g_value_get_enum (value); break; case ARG_DVBSRC_MODULATION: object->modulation = g_value_get_enum (value); break; case ARG_DVBSRC_TRANSMISSION_MODE: object->transmission_mode = g_value_get_enum (value); break; case ARG_DVBSRC_HIERARCHY_INF: object->hierarchy_information = g_value_get_enum (value); break; case ARG_DVBSRC_INVERSION: object->inversion = g_value_get_enum (value); break; case ARG_DVBSRC_TUNE:{ GST_INFO_OBJECT (object, "Set Property: ARG_DVBSRC_TUNE"); /* if we are in paused/playing state tune now, otherwise in ready to paused state change */ if (GST_STATE (object) > GST_STATE_READY) { g_mutex_lock (&object->tune_mutex); gst_dvbsrc_tune (object); g_mutex_unlock (&object->tune_mutex); } break; } case ARG_DVBSRC_STATS_REPORTING_INTERVAL: object->stats_interval = g_value_get_uint (value); object->stats_counter = 0; break; case ARG_DVBSRC_TIMEOUT: object->timeout = g_value_get_uint64 (value); break; case ARG_DVBSRC_DVB_BUFFER_SIZE: object->dvb_buffer_size = g_value_get_uint (value); break; default: G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); } }
static GstFlowReturn gst_v4l2src_fill (GstPushSrc * src, GstBuffer * buf) { GstV4l2Src *v4l2src = GST_V4L2SRC (src); GstV4l2Object *obj = v4l2src->v4l2object; GstFlowReturn ret; GstClock *clock; GstClockTime abs_time, base_time, timestamp, duration; GstClockTime delay; ret = gst_v4l2_buffer_pool_process (GST_V4L2_BUFFER_POOL_CAST (obj->pool), buf); if (G_UNLIKELY (ret != GST_FLOW_OK)) goto error; timestamp = GST_BUFFER_TIMESTAMP (buf); duration = obj->duration; /* timestamps, LOCK to get clock and base time. */ /* FIXME: element clock and base_time is rarely changing */ GST_OBJECT_LOCK (v4l2src); if ((clock = GST_ELEMENT_CLOCK (v4l2src))) { /* we have a clock, get base time and ref clock */ base_time = GST_ELEMENT (v4l2src)->base_time; gst_object_ref (clock); } else { /* no clock, can't set timestamps */ base_time = GST_CLOCK_TIME_NONE; } GST_OBJECT_UNLOCK (v4l2src); /* sample pipeline clock */ if (clock) { abs_time = gst_clock_get_time (clock); gst_object_unref (clock); } else { abs_time = GST_CLOCK_TIME_NONE; } if (timestamp != GST_CLOCK_TIME_NONE) { struct timespec now; GstClockTime gstnow; /* v4l2 specs say to use the system time although many drivers switched to * the more desirable monotonic time. We first try to use the monotonic time * and see how that goes */ clock_gettime (CLOCK_MONOTONIC, &now); gstnow = GST_TIMESPEC_TO_TIME (now); if (gstnow < timestamp && (timestamp - gstnow) > (10 * GST_SECOND)) { GTimeVal now; /* very large diff, fall back to system time */ g_get_current_time (&now); gstnow = GST_TIMEVAL_TO_TIME (now); } if (gstnow > timestamp) { delay = gstnow - timestamp; } else { delay = 0; } GST_DEBUG_OBJECT (v4l2src, "ts: %" GST_TIME_FORMAT " now %" GST_TIME_FORMAT " delay %" GST_TIME_FORMAT, GST_TIME_ARGS (timestamp), GST_TIME_ARGS (gstnow), GST_TIME_ARGS (delay)); } else { /* we assume 1 frame latency otherwise */ if (GST_CLOCK_TIME_IS_VALID (duration)) delay = duration; else delay = 0; } /* set buffer metadata */ GST_BUFFER_OFFSET (buf) = v4l2src->offset++; GST_BUFFER_OFFSET_END (buf) = v4l2src->offset; if (G_LIKELY (abs_time != GST_CLOCK_TIME_NONE)) { /* the time now is the time of the clock minus the base time */ timestamp = abs_time - base_time; /* adjust for delay in the device */ if (timestamp > delay) timestamp -= delay; else timestamp = 0; } else { timestamp = GST_CLOCK_TIME_NONE; } /* activate settings for next frame */ if (GST_CLOCK_TIME_IS_VALID (duration)) { v4l2src->ctrl_time += duration; } else { /* this is not very good (as it should be the next timestamp), * still good enough for linear fades (as long as it is not -1) */ v4l2src->ctrl_time = timestamp; } gst_object_sync_values (GST_OBJECT (src), v4l2src->ctrl_time); GST_INFO_OBJECT (src, "sync to %" GST_TIME_FORMAT " out ts %" GST_TIME_FORMAT, GST_TIME_ARGS (v4l2src->ctrl_time), GST_TIME_ARGS (timestamp)); GST_BUFFER_TIMESTAMP (buf) = timestamp; GST_BUFFER_DURATION (buf) = duration; return ret; /* ERROR */ error: { GST_DEBUG_OBJECT (src, "error processing buffer %d (%s)", ret, gst_flow_get_name (ret)); return ret; } }
static gboolean gst_dvbsrc_open_frontend (GstDvbSrc * object, gboolean writable) { struct dvb_frontend_info fe_info; const char *adapter_desc = NULL; gchar *frontend_dev; GstStructure *adapter_structure; char *adapter_name = NULL; frontend_dev = g_strdup_printf ("/dev/dvb/adapter%d/frontend%d", object->adapter_number, object->frontend_number); GST_INFO_OBJECT (object, "Using frontend device: %s", frontend_dev); /* open frontend */ if ((object->fd_frontend = open (frontend_dev, writable ? O_RDWR : O_RDONLY)) < 0) { switch (errno) { case ENOENT: GST_ELEMENT_ERROR (object, RESOURCE, NOT_FOUND, (_("Device \"%s\" does not exist."), frontend_dev), (NULL)); break; default: GST_ELEMENT_ERROR (object, RESOURCE, OPEN_READ_WRITE, (_("Could not open frontend device \"%s\"."), frontend_dev), GST_ERROR_SYSTEM); break; } close (object->fd_frontend); g_free (frontend_dev); return FALSE; } GST_DEBUG_OBJECT (object, "Device opened, querying information"); if (ioctl (object->fd_frontend, FE_GET_INFO, &fe_info) < 0) { GST_ELEMENT_ERROR (object, RESOURCE, SETTINGS, (_("Could not get settings from frontend device \"%s\"."), frontend_dev), GST_ERROR_SYSTEM); close (object->fd_frontend); g_free (frontend_dev); return FALSE; } GST_DEBUG_OBJECT (object, "Got information about adapter : %s", fe_info.name); adapter_name = g_strdup (fe_info.name); object->adapter_type = fe_info.type; switch (object->adapter_type) { case FE_QPSK: adapter_desc = "DVB-S"; adapter_structure = gst_structure_new ("dvb-adapter", "type", G_TYPE_STRING, adapter_desc, "name", G_TYPE_STRING, adapter_name, "auto-fec", G_TYPE_BOOLEAN, fe_info.caps & FE_CAN_FEC_AUTO, NULL); break; case FE_QAM: adapter_desc = "DVB-C"; adapter_structure = gst_structure_new ("dvb-adapter", "type", G_TYPE_STRING, adapter_desc, "name", G_TYPE_STRING, adapter_name, "auto-inversion", G_TYPE_BOOLEAN, fe_info.caps & FE_CAN_INVERSION_AUTO, "auto-qam", G_TYPE_BOOLEAN, fe_info.caps & FE_CAN_QAM_AUTO, "auto-fec", G_TYPE_BOOLEAN, fe_info.caps & FE_CAN_FEC_AUTO, NULL); break; case FE_OFDM: adapter_desc = "DVB-T"; adapter_structure = gst_structure_new ("dvb-adapter", "type", G_TYPE_STRING, adapter_desc, "name", G_TYPE_STRING, adapter_name, "auto-inversion", G_TYPE_BOOLEAN, fe_info.caps & FE_CAN_INVERSION_AUTO, "auto-qam", G_TYPE_BOOLEAN, fe_info.caps & FE_CAN_QAM_AUTO, "auto-transmission-mode", G_TYPE_BOOLEAN, fe_info.caps & FE_CAN_TRANSMISSION_MODE_AUTO, "auto-guard-interval", G_TYPE_BOOLEAN, fe_info.caps & FE_CAN_GUARD_INTERVAL_AUTO, "auto-hierarchy", G_TYPE_BOOLEAN, fe_info.caps % FE_CAN_HIERARCHY_AUTO, "auto-fec", G_TYPE_BOOLEAN, fe_info.caps & FE_CAN_FEC_AUTO, NULL); break; case FE_ATSC: adapter_desc = "ATSC"; adapter_structure = gst_structure_new ("dvb-adapter", "type", G_TYPE_STRING, adapter_desc, "name", G_TYPE_STRING, adapter_name, NULL); break; default: g_error ("Unknown frontend type: %d", object->adapter_type); adapter_structure = gst_structure_new ("dvb-adapter", "type", G_TYPE_STRING, "unknown", NULL); } GST_INFO_OBJECT (object, "DVB card: %s ", adapter_name); gst_element_post_message (GST_ELEMENT_CAST (object), gst_message_new_element (GST_OBJECT (object), adapter_structure)); g_free (frontend_dev); g_free (adapter_name); return TRUE; }
static gboolean gst_hls_demux_update_playlist (GstHLSDemux * demux, gboolean update, GError ** err) { GstAdaptiveDemux *adaptive_demux = GST_ADAPTIVE_DEMUX (demux); GstFragment *download; GstBuffer *buf; gchar *playlist; gboolean main_checked = FALSE, updated = FALSE; gchar *uri, *main_uri; retry: uri = gst_m3u8_client_get_current_uri (demux->client); main_uri = gst_m3u8_client_get_uri (demux->client); download = gst_uri_downloader_fetch_uri (adaptive_demux->downloader, uri, main_uri, TRUE, TRUE, TRUE, err); g_free (main_uri); if (download == NULL) { if (!adaptive_demux->cancelled && update && !main_checked && gst_m3u8_client_has_variant_playlist (demux->client) && gst_m3u8_client_has_main (demux->client)) { GError *err2 = NULL; main_uri = gst_m3u8_client_get_uri (demux->client); GST_INFO_OBJECT (demux, "Updating playlist %s failed, attempt to refresh variant playlist %s", uri, main_uri); download = gst_uri_downloader_fetch_uri (adaptive_demux->downloader, main_uri, NULL, TRUE, TRUE, TRUE, &err2); g_free (main_uri); g_clear_error (&err2); if (download != NULL) { gchar *base_uri; buf = gst_fragment_get_buffer (download); playlist = gst_hls_src_buf_to_utf8_playlist (buf); gst_buffer_unref (buf); if (playlist == NULL) { GST_WARNING_OBJECT (demux, "Failed to validate variant playlist encoding"); g_free (uri); g_object_unref (download); return FALSE; } g_free (uri); if (download->redirect_permanent && download->redirect_uri) { uri = download->redirect_uri; base_uri = NULL; } else { uri = download->uri; base_uri = download->redirect_uri; } if (!gst_m3u8_client_update_variant_playlist (demux->client, playlist, uri, base_uri)) { GST_WARNING_OBJECT (demux, "Failed to update the variant playlist"); g_object_unref (download); return FALSE; } g_object_unref (download); g_clear_error (err); main_checked = TRUE; goto retry; } else { g_free (uri); return FALSE; } } else { g_free (uri); return FALSE; } } g_free (uri); /* Set the base URI of the playlist to the redirect target if any */ GST_M3U8_CLIENT_LOCK (demux->client); g_free (demux->client->current->uri); g_free (demux->client->current->base_uri); if (download->redirect_permanent && download->redirect_uri) { demux->client->current->uri = g_strdup (download->redirect_uri); demux->client->current->base_uri = NULL; } else { demux->client->current->uri = g_strdup (download->uri); demux->client->current->base_uri = g_strdup (download->redirect_uri); } GST_M3U8_CLIENT_UNLOCK (demux->client); buf = gst_fragment_get_buffer (download); playlist = gst_hls_src_buf_to_utf8_playlist (buf); gst_buffer_unref (buf); g_object_unref (download); if (playlist == NULL) { GST_WARNING_OBJECT (demux, "Couldn't validate playlist encoding"); g_set_error (err, GST_STREAM_ERROR, GST_STREAM_ERROR_WRONG_TYPE, "Couldn't validate playlist encoding"); return FALSE; } updated = gst_m3u8_client_update (demux->client, playlist); if (!updated) { GST_WARNING_OBJECT (demux, "Couldn't update playlist"); g_set_error (err, GST_STREAM_ERROR, GST_STREAM_ERROR_FAILED, "Couldn't update playlist"); return FALSE; } /* If it's a live source, do not let the sequence number go beyond * three fragments before the end of the list */ if (update == FALSE && demux->client->current && gst_m3u8_client_is_live (demux->client)) { gint64 last_sequence; GST_M3U8_CLIENT_LOCK (demux->client); last_sequence = GST_M3U8_MEDIA_FILE (g_list_last (demux->client->current-> files)->data)->sequence; if (demux->client->sequence >= last_sequence - 3) { GST_DEBUG_OBJECT (demux, "Sequence is beyond playlist. Moving back to %u", (guint) (last_sequence - 3)); //demux->need_segment = TRUE; demux->client->sequence = last_sequence - 3; } GST_M3U8_CLIENT_UNLOCK (demux->client); } else if (demux->client->current && !gst_m3u8_client_is_live (demux->client)) { GstClockTime current_pos, target_pos; guint sequence = 0; GList *walk; /* Sequence numbers are not guaranteed to be the same in different * playlists, so get the correct fragment here based on the current * position */ GST_M3U8_CLIENT_LOCK (demux->client); /* Valid because hlsdemux only has a single output */ if (GST_ADAPTIVE_DEMUX_CAST (demux)->streams) { GstAdaptiveDemuxStream *stream = GST_ADAPTIVE_DEMUX_CAST (demux)->streams->data; target_pos = stream->segment.position; } else { target_pos = 0; } if (GST_CLOCK_TIME_IS_VALID (demux->client->sequence_position)) { target_pos = MAX (target_pos, demux->client->sequence_position); } current_pos = 0; for (walk = demux->client->current->files; walk; walk = walk->next) { GstM3U8MediaFile *file = walk->data; sequence = file->sequence; if (current_pos <= target_pos && target_pos < current_pos + file->duration) { break; } current_pos += file->duration; } /* End of playlist */ if (!walk) sequence++; demux->client->sequence = sequence; demux->client->sequence_position = current_pos; GST_M3U8_CLIENT_UNLOCK (demux->client); } return updated; }
static void gst_mpegv_parse_update_src_caps (GstMpegvParse * mpvparse) { GstCaps *caps = NULL; GstStructure *s = NULL; /* only update if no src caps yet or explicitly triggered */ if (G_LIKELY (gst_pad_has_current_caps (GST_BASE_PARSE_SRC_PAD (mpvparse)) && !mpvparse->update_caps)) return; /* carry over input caps as much as possible; override with our own stuff */ caps = gst_pad_get_current_caps (GST_BASE_PARSE_SINK_PAD (mpvparse)); if (caps) { caps = gst_caps_make_writable (caps); s = gst_caps_get_structure (caps, 0); } else { caps = gst_caps_new_empty_simple ("video/mpeg"); } /* typically we don't output buffers until we have properly parsed some * config data, so we should at least know about version. * If not, it means it has been requested not to drop data, and * upstream and/or app must know what they are doing ... */ gst_caps_set_simple (caps, "mpegversion", G_TYPE_INT, (mpvparse->config_flags & FLAG_MPEG2) ? 2 : 1, NULL); gst_caps_set_simple (caps, "systemstream", G_TYPE_BOOLEAN, FALSE, "parsed", G_TYPE_BOOLEAN, TRUE, NULL); if (mpvparse->sequencehdr.width > 0 && mpvparse->sequencehdr.height > 0) { GstMpegVideoSequenceDisplayExt *seqdispext; gint width, height; width = mpvparse->sequencehdr.width; height = mpvparse->sequencehdr.height; if (mpvparse->config_flags & FLAG_SEQUENCE_DISPLAY_EXT) { seqdispext = &mpvparse->sequencedispext; if (seqdispext->display_horizontal_size <= width && seqdispext->display_vertical_size <= height) { width = seqdispext->display_horizontal_size; height = seqdispext->display_vertical_size; GST_INFO_OBJECT (mpvparse, "stream has display extension: display_width=%d display_height=%d", width, height); } } gst_caps_set_simple (caps, "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, NULL); } /* perhaps we have a framerate */ { gint fps_num = mpvparse->fps_num; gint fps_den = mpvparse->fps_den; GstClockTime latency; /* upstream overrides */ if (s && gst_structure_has_field (s, "framerate")) gst_structure_get_fraction (s, "framerate", &fps_num, &fps_den); if (fps_den > 0 && fps_num > 0) { gst_caps_set_simple (caps, "framerate", GST_TYPE_FRACTION, fps_num, fps_den, NULL); gst_base_parse_set_frame_rate (GST_BASE_PARSE (mpvparse), fps_num, fps_den, 0, 0); latency = gst_util_uint64_scale (GST_SECOND, fps_den, fps_num); gst_base_parse_set_latency (GST_BASE_PARSE (mpvparse), latency, latency); } } /* or pixel-aspect-ratio */ if (mpvparse->sequencehdr.par_w && mpvparse->sequencehdr.par_h > 0 && (!s || !gst_structure_has_field (s, "pixel-aspect-ratio"))) { gst_caps_set_simple (caps, "pixel-aspect-ratio", GST_TYPE_FRACTION, mpvparse->sequencehdr.par_w, mpvparse->sequencehdr.par_h, NULL); } if (mpvparse->config != NULL) { gst_caps_set_simple (caps, "codec_data", GST_TYPE_BUFFER, mpvparse->config, NULL); } if (mpvparse->config_flags & FLAG_SEQUENCE_EXT) { const guint profile_c = mpvparse->sequenceext.profile; const guint level_c = mpvparse->sequenceext.level; const gchar *profile = NULL, *level = NULL; /* * Profile indication - 1 => High, 2 => Spatially Scalable, * 3 => SNR Scalable, 4 => Main, 5 => Simple * 4:2:2 and Multi-view have profile = 0, with the escape bit set to 1 */ const gchar *const profiles[] = { "high", "spatial", "snr", "main", "simple" }; /* * Level indication - 4 => High, 6 => High-1440, 8 => Main, 10 => Low, * except in the case of profile = 0 */ const gchar *const levels[] = { "high", "high-1440", "main", "low" }; if (profile_c > 0 && profile_c < 6) profile = profiles[profile_c - 1]; if ((level_c > 3) && (level_c < 11) && (level_c % 2 == 0)) level = levels[(level_c >> 1) - 2]; if (profile_c == 8) { /* Non-hierarchical profile */ switch (level_c) { case 2: level = levels[0]; case 5: level = levels[2]; profile = "4:2:2"; break; case 10: level = levels[0]; case 11: level = levels[1]; case 13: level = levels[2]; case 14: level = levels[3]; profile = "multiview"; break; default: break; } } /* FIXME does it make sense to expose profile/level in the caps ? */ GST_DEBUG_OBJECT (mpvparse, "profile:'%s' level:'%s'", profile, level); if (profile) gst_caps_set_simple (caps, "profile", G_TYPE_STRING, profile, NULL); else GST_DEBUG_OBJECT (mpvparse, "Invalid profile - %u", profile_c); if (level) gst_caps_set_simple (caps, "level", G_TYPE_STRING, level, NULL); else GST_DEBUG_OBJECT (mpvparse, "Invalid level - %u", level_c); gst_caps_set_simple (caps, "interlace-mode", G_TYPE_STRING, (mpvparse->sequenceext.progressive ? "progressive" : "mixed"), NULL); } gst_pad_set_caps (GST_BASE_PARSE_SRC_PAD (mpvparse), caps); gst_caps_unref (caps); mpvparse->update_caps = FALSE; }