static gboolean gst_v4l2_h264_enc_negotiate (GstVideoEncoder * encoder) { GstV4l2H264Enc *self = GST_V4L2_H264_ENC (encoder); GstV4l2VideoEnc *venc = GST_V4L2_VIDEO_ENC (encoder); GstV4l2Object *v4l2object = venc->v4l2output; GstCaps *allowed_caps; struct ProfileLevelCtx ctx = { self, NULL, NULL }; GstVideoCodecState *state; GstStructure *s; GST_DEBUG_OBJECT (self, "Negotiating H264 profile and level."); allowed_caps = gst_pad_get_allowed_caps (GST_VIDEO_ENCODER_SRC_PAD (encoder)); if (allowed_caps) { if (gst_caps_is_empty (allowed_caps)) goto not_negotiated; allowed_caps = gst_caps_make_writable (allowed_caps); /* negotiate_profile_and_level() will return TRUE on failure to keep * iterating, if gst_caps_foreach() returns TRUE it means there was no * compatible profile and level in any of the structure */ if (gst_caps_foreach (allowed_caps, negotiate_profile_and_level, &ctx)) { goto no_profile_level; } } if (!ctx.profile) { struct v4l2_control control = { 0, }; control.id = V4L2_CID_MPEG_VIDEO_H264_PROFILE; if (v4l2_ioctl (v4l2object->video_fd, VIDIOC_G_CTRL, &control) < 0) goto g_ctrl_failed; ctx.profile = v4l2_profile_to_string (control.value); } if (!ctx.level) { struct v4l2_control control = { 0, }; control.id = V4L2_CID_MPEG_VIDEO_H264_LEVEL; if (v4l2_ioctl (v4l2object->video_fd, VIDIOC_G_CTRL, &control) < 0) goto g_ctrl_failed; ctx.level = v4l2_level_to_string (control.value); } GST_DEBUG_OBJECT (self, "Selected H264 profile %s at level %s", ctx.profile, ctx.level); state = gst_video_encoder_get_output_state (encoder); s = gst_caps_get_structure (state->caps, 0); gst_structure_set (s, "profile", G_TYPE_STRING, ctx.profile, "level", G_TYPE_STRING, ctx.level, NULL); return GST_VIDEO_ENCODER_CLASS (parent_class)->negotiate (encoder); g_ctrl_failed: GST_WARNING_OBJECT (self, "Failed to get H264 profile and level: '%s'", g_strerror (errno)); goto not_negotiated; no_profile_level: GST_WARNING_OBJECT (self, "No compatible level and profiled in caps: %" GST_PTR_FORMAT, allowed_caps); goto not_negotiated; not_negotiated: if (allowed_caps) gst_caps_unref (allowed_caps); return FALSE; }
/* * owr_local_media_source_get_pad * * The beginning of a media source chain in the pipeline looks like this: * +------------+ * /---+ inter*sink | * +--------+ +--------+ +------------+ +-----+ / +------------+ * | source +----+ scale? +---+ capsfilter +---+ tee +---/ * +--------+ +--------+ +------------+ +-----+ \ * \ +------------+ * \---+ inter*sink | * +------------+ * * For each newly requested pad a new inter*sink is added to the tee. * Note that this is a completely independent pipeline, and the complete * pipeline is only created once for a specific media source. * * Then for each newly requested pad another bin with a inter*src is * created, which is then going to be part of the transport agent * pipeline. The ghostpad of it is what we return here. * * +-----------+ +-------------------------------+ +----------+ * | inter*src +---+ converters/queues/capsfilters +---+ ghostpad | * +-----------+ +-------------------------------+ +----------+ * */ static GstElement *owr_local_media_source_request_source(OwrMediaSource *media_source, GstCaps *caps) { OwrLocalMediaSource *local_source; OwrLocalMediaSourcePrivate *priv; GstElement *source_element = NULL; GstElement *source_pipeline; GHashTable *event_data; GValue *value; #if defined(__linux__) && !defined(__ANDROID__) gchar *tmp; #endif g_assert(media_source); local_source = OWR_LOCAL_MEDIA_SOURCE(media_source); priv = local_source->priv; /* only create the source bin for this media source once */ if ((source_pipeline = _owr_media_source_get_source_bin(media_source))) GST_DEBUG_OBJECT(media_source, "Re-using existing source element/bin"); else { OwrMediaType media_type = OWR_MEDIA_TYPE_UNKNOWN; OwrSourceType source_type = OWR_SOURCE_TYPE_UNKNOWN; GstElement *source, *source_process = NULL, *capsfilter = NULL, *tee; GstPad *sinkpad, *source_pad; GEnumClass *media_enum_class, *source_enum_class; GEnumValue *media_enum_value, *source_enum_value; gchar *bin_name; GstCaps *source_caps; GstBus *bus; GSource *bus_source; event_data = _owr_value_table_new(); value = _owr_value_table_add(event_data, "start_time", G_TYPE_INT64); g_value_set_int64(value, g_get_monotonic_time()); g_object_get(media_source, "media-type", &media_type, "type", &source_type, NULL); media_enum_class = G_ENUM_CLASS(g_type_class_ref(OWR_TYPE_MEDIA_TYPE)); source_enum_class = G_ENUM_CLASS(g_type_class_ref(OWR_TYPE_SOURCE_TYPE)); media_enum_value = g_enum_get_value(media_enum_class, media_type); source_enum_value = g_enum_get_value(source_enum_class, source_type); bin_name = g_strdup_printf("local-%s-%s-source-bin-%u", media_enum_value ? media_enum_value->value_nick : "unknown", source_enum_value ? source_enum_value->value_nick : "unknown", g_atomic_int_add(&unique_bin_id, 1)); g_type_class_unref(media_enum_class); g_type_class_unref(source_enum_class); source_pipeline = gst_pipeline_new(bin_name); gst_pipeline_use_clock(GST_PIPELINE(source_pipeline), gst_system_clock_obtain()); gst_element_set_base_time(source_pipeline, _owr_get_base_time()); gst_element_set_start_time(source_pipeline, GST_CLOCK_TIME_NONE); g_free(bin_name); bin_name = NULL; #ifdef OWR_DEBUG g_signal_connect(source_pipeline, "deep-notify", G_CALLBACK(_owr_deep_notify), NULL); #endif bus = gst_pipeline_get_bus(GST_PIPELINE(source_pipeline)); bus_source = gst_bus_create_watch(bus); g_source_set_callback(bus_source, (GSourceFunc) bus_call, media_source, NULL); g_source_attach(bus_source, _owr_get_main_context()); g_source_unref(bus_source); GST_DEBUG_OBJECT(local_source, "media_type: %d, type: %d", media_type, source_type); if (media_type == OWR_MEDIA_TYPE_UNKNOWN || source_type == OWR_SOURCE_TYPE_UNKNOWN) { GST_ERROR_OBJECT(local_source, "Cannot connect source with unknown type or media type to other component"); goto done; } switch (media_type) { case OWR_MEDIA_TYPE_AUDIO: { switch (source_type) { case OWR_SOURCE_TYPE_CAPTURE: CREATE_ELEMENT(source, AUDIO_SRC, "audio-source"); #if !defined(__APPLE__) || !TARGET_IPHONE_SIMULATOR /* Default values for buffer-time and latency-time on android are 200ms and 20ms. The minimum latency-time that can be used on Android is 20ms, and using a 40ms buffer-time with a 20ms latency-time causes crackling audio. So let's just stick with the defaults. */ #if !defined(__ANDROID__) g_object_set(source, "buffer-time", G_GINT64_CONSTANT(40000), "latency-time", G_GINT64_CONSTANT(10000), NULL); #endif if (priv->device_index > -1) { #ifdef __APPLE__ g_object_set(source, "device", priv->device_index, NULL); #elif defined(__linux__) && !defined(__ANDROID__) tmp = g_strdup_printf("%d", priv->device_index); g_object_set(source, "device", tmp, NULL); g_free(tmp); #endif } #endif break; case OWR_SOURCE_TYPE_TEST: CREATE_ELEMENT(source, "audiotestsrc", "audio-source"); g_object_set(source, "is-live", TRUE, NULL); break; case OWR_SOURCE_TYPE_UNKNOWN: default: g_assert_not_reached(); goto done; } break; } case OWR_MEDIA_TYPE_VIDEO: { GstPad *srcpad; GstCaps *device_caps; switch (source_type) { case OWR_SOURCE_TYPE_CAPTURE: CREATE_ELEMENT(source, VIDEO_SRC, "video-source"); if (priv->device_index > -1) { #if defined(__APPLE__) && !TARGET_IPHONE_SIMULATOR g_object_set(source, "device-index", priv->device_index, NULL); #elif defined(__ANDROID__) g_object_set(source, "cam-index", priv->device_index, NULL); #elif defined(__linux__) tmp = g_strdup_printf("/dev/video%d", priv->device_index); g_object_set(source, "device", tmp, NULL); g_free(tmp); #endif } break; case OWR_SOURCE_TYPE_TEST: { GstElement *src, *time; GstPad *srcpad; source = gst_bin_new("video-source"); CREATE_ELEMENT(src, "videotestsrc", "videotestsrc"); g_object_set(src, "is-live", TRUE, NULL); gst_bin_add(GST_BIN(source), src); time = gst_element_factory_make("timeoverlay", "timeoverlay"); if (time) { g_object_set(time, "font-desc", "Sans 60", NULL); gst_bin_add(GST_BIN(source), time); gst_element_link(src, time); srcpad = gst_element_get_static_pad(time, "src"); } else srcpad = gst_element_get_static_pad(src, "src"); gst_element_add_pad(source, gst_ghost_pad_new("src", srcpad)); gst_object_unref(srcpad); break; } case OWR_SOURCE_TYPE_UNKNOWN: default: g_assert_not_reached(); goto done; } /* First try to see if we can just get the format we want directly */ source_caps = gst_caps_new_empty(); #if GST_CHECK_VERSION(1, 5, 0) gst_caps_foreach(caps, fix_video_caps_framerate, source_caps); #else _owr_gst_caps_foreach(caps, fix_video_caps_framerate, source_caps); #endif /* Now see what the device can really produce */ srcpad = gst_element_get_static_pad(source, "src"); gst_element_set_state(source, GST_STATE_READY); device_caps = gst_pad_query_caps(srcpad, source_caps); if (gst_caps_is_empty(device_caps)) { /* Let's see if it works when we drop format constraints (which can be dealt with downsteram) */ GstCaps *tmp = source_caps; source_caps = gst_caps_new_empty(); #if GST_CHECK_VERSION(1, 5, 0) gst_caps_foreach(tmp, fix_video_caps_format, source_caps); #else _owr_gst_caps_foreach(tmp, fix_video_caps_format, source_caps); #endif gst_caps_unref(tmp); gst_caps_unref(device_caps); device_caps = gst_pad_query_caps(srcpad, source_caps); if (gst_caps_is_empty(device_caps)) { /* Accepting any format didn't work, we're going to hope that scaling fixes it */ CREATE_ELEMENT(source_process, "videoscale", "video-source-scale"); gst_bin_add(GST_BIN(source_pipeline), source_process); } } gst_caps_unref(device_caps); gst_object_unref(srcpad); #if defined(__APPLE__) && TARGET_OS_IPHONE && !TARGET_IPHONE_SIMULATOR /* Force NV12 on iOS else the source can negotiate BGRA * ercolorspace can do NV12 -> BGRA and NV12 -> I420 which is what * is needed for Bowser */ gst_caps_set_simple(source_caps, "format", G_TYPE_STRING, "NV12", NULL); #endif CREATE_ELEMENT(capsfilter, "capsfilter", "video-source-capsfilter"); g_object_set(capsfilter, "caps", source_caps, NULL); gst_caps_unref(source_caps); gst_bin_add(GST_BIN(source_pipeline), capsfilter); break; } case OWR_MEDIA_TYPE_UNKNOWN: default: g_assert_not_reached(); goto done; } g_assert(source); source_pad = gst_element_get_static_pad(source, "src"); g_signal_connect(source_pad, "notify::caps", G_CALLBACK(on_caps), media_source); gst_object_unref(source_pad); CREATE_ELEMENT(tee, "tee", "source-tee"); g_object_set(tee, "allow-not-linked", TRUE, NULL); gst_bin_add_many(GST_BIN(source_pipeline), source, tee, NULL); /* Many sources don't like reconfiguration and it's pointless * here anyway right now. No need to reconfigure whenever something * is added to the tee or removed. * We will have to implement reconfiguration differently later by * selecting the best caps based on all consumers. */ sinkpad = gst_element_get_static_pad(tee, "sink"); gst_pad_add_probe(sinkpad, GST_PAD_PROBE_TYPE_EVENT_UPSTREAM, drop_reconfigure_event, NULL, NULL); gst_object_unref(sinkpad); if (!source) GST_ERROR_OBJECT(media_source, "Failed to create source element!"); if (capsfilter) { LINK_ELEMENTS(capsfilter, tee); if (source_process) { LINK_ELEMENTS(source_process, capsfilter); LINK_ELEMENTS(source, source_process); } else LINK_ELEMENTS(source, capsfilter); } else if (source_process) { LINK_ELEMENTS(source_process, tee); LINK_ELEMENTS(source, source_process); } else LINK_ELEMENTS(source, tee); gst_element_sync_state_with_parent(tee); if (capsfilter) gst_element_sync_state_with_parent(capsfilter); if (source_process) gst_element_sync_state_with_parent(source_process); gst_element_sync_state_with_parent(source); _owr_media_source_set_source_bin(media_source, source_pipeline); _owr_media_source_set_source_tee(media_source, tee); if (gst_element_set_state(source_pipeline, GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE) { GST_ERROR("Failed to set local source pipeline %s to playing", GST_OBJECT_NAME(source_pipeline)); /* FIXME: We should handle this and don't expose the source */ } value = _owr_value_table_add(event_data, "end_time", G_TYPE_INT64); g_value_set_int64(value, g_get_monotonic_time()); OWR_POST_EVENT(media_source, LOCAL_SOURCE_STARTED, event_data); g_signal_connect(tee, "pad-removed", G_CALLBACK(tee_pad_removed_cb), media_source); } gst_object_unref(source_pipeline); source_element = OWR_MEDIA_SOURCE_CLASS(owr_local_media_source_parent_class)->request_source(media_source, caps); done: return source_element; }