/** * gst_encoding_video_profile_set_pass: * @prof: a #GstEncodingVideoProfile * @pass: the pass number for this profile * * Sets the pass number of this video profile. The first pass profile should have * this value set to 1. If this video profile isn't part of a multi-pass profile, * you may set it to 0 (the default value). */ void gst_encoding_video_profile_set_pass (GstEncodingVideoProfile * prof, guint pass) { g_return_if_fail (GST_IS_ENCODING_VIDEO_PROFILE (prof)); prof->pass = pass; }
static gint _compare_encoding_profiles (const GstEncodingProfile * a, const GstEncodingProfile * b) { if ((G_TYPE_FROM_INSTANCE (a) != G_TYPE_FROM_INSTANCE (b)) || !_gst_caps_is_equal_safe (a->format, b->format) || (g_strcmp0 (a->preset, b->preset) != 0) || (g_strcmp0 (a->name, b->name) != 0) || (g_strcmp0 (a->description, b->description) != 0)) return -1; if (GST_IS_ENCODING_CONTAINER_PROFILE (a)) return _compare_container_encoding_profiles (GST_ENCODING_CONTAINER_PROFILE (a), GST_ENCODING_CONTAINER_PROFILE (b)); if (GST_IS_ENCODING_VIDEO_PROFILE (a)) { GstEncodingVideoProfile *va = (GstEncodingVideoProfile *) a; GstEncodingVideoProfile *vb = (GstEncodingVideoProfile *) b; if ((va->pass != vb->pass) || (va->variableframerate != vb->variableframerate)) return -1; } return 0; }
/** * gst_encoding_video_profile_get_pass: * @prof: a #GstEncodingVideoProfile * * Get the pass number if this is part of a multi-pass profile. * * Returns: The pass number. Starts at 1 for multi-pass. 0 if this is * not a multi-pass profile */ guint gst_encoding_video_profile_get_pass (GstEncodingVideoProfile * prof) { g_return_val_if_fail (GST_IS_ENCODING_VIDEO_PROFILE (prof), 0); return prof->pass; }
/** * gst_encoding_video_profile_set_variableframerate: * @prof: a #GstEncodingVideoProfile * @variableframerate: a boolean * * If set to %TRUE, then the incoming stream will be allowed to have non-constant * framerate. If set to %FALSE (default value), then the incoming stream will * be normalized by dropping/duplicating frames in order to produce a * constance framerate. */ void gst_encoding_video_profile_set_variableframerate (GstEncodingVideoProfile * prof, gboolean variableframerate) { g_return_if_fail (GST_IS_ENCODING_VIDEO_PROFILE (prof)); prof->variableframerate = variableframerate; }
/** * gst_encoding_video_profile_get_variableframerate: * @prof: a #GstEncodingVideoProfile * * Returns: Whether non-constant video framerate is allowed for encoding. */ gboolean gst_encoding_video_profile_get_variableframerate (GstEncodingVideoProfile * prof) { g_return_val_if_fail (GST_IS_ENCODING_VIDEO_PROFILE (prof), FALSE); return prof->variableframerate; }
static void kms_muxing_pipeline_configure (KmsMuxingPipeline * self) { GstEncodingContainerProfile *cprof; const GList *profiles, *l; GstElement *appsrc; cprof = kms_recording_profile_create_profile (self->priv->profile, TRUE, TRUE); profiles = gst_encoding_container_profile_get_profiles (cprof); for (l = profiles; l != NULL; l = l->next) { GstEncodingProfile *prof = l->data; GstCaps *caps; if (GST_IS_ENCODING_AUDIO_PROFILE (prof)) { appsrc = self->priv->audiosrc; } else if (GST_IS_ENCODING_VIDEO_PROFILE (prof)) { appsrc = self->priv->videosrc; } else continue; caps = gst_encoding_profile_get_input_caps (prof); g_object_set (G_OBJECT (appsrc), "is-live", TRUE, "do-timestamp", FALSE, "min-latency", G_GUINT64_CONSTANT (0), "max-latency", G_GUINT64_CONSTANT (0), "format", GST_FORMAT_TIME, NULL); gst_caps_unref (caps); } g_object_set (G_OBJECT (self->priv->encodebin), "profile", cprof, "audio-jitter-tolerance", 100 * GST_MSECOND, "avoid-reencoding", TRUE, NULL); gst_encoding_profile_unref (cprof); if (self->priv->profile == KMS_RECORDING_PROFILE_MP4) { GstElement *mux = gst_bin_get_by_name (GST_BIN (self->priv->encodebin), "muxer"); g_object_set (G_OBJECT (mux), "fragment-duration", 2000, "streamable", TRUE, NULL); g_object_unref (mux); } else if (self->priv->profile == KMS_RECORDING_PROFILE_WEBM) { GstElement *mux = gst_bin_get_by_name (GST_BIN (self->priv->encodebin), "muxer"); g_object_set (G_OBJECT (mux), "streamable", TRUE, NULL); g_object_unref (mux); } }
/** * gst_encoding_profile_get_type_nick: * @profile: a #GstEncodingProfile * * Returns: the human-readable name of the type of @profile. */ const gchar * gst_encoding_profile_get_type_nick (GstEncodingProfile * profile) { if (GST_IS_ENCODING_CONTAINER_PROFILE (profile)) return "container"; if (GST_IS_ENCODING_VIDEO_PROFILE (profile)) return "video"; if (GST_IS_ENCODING_AUDIO_PROFILE (profile)) return "audio"; return NULL; }
static gboolean is_video_profile (const GstEncodingProfile *profile) { const GList *i, *profiles_list; if (GST_IS_ENCODING_CONTAINER_PROFILE (profile)) { profiles_list = gst_encoding_container_profile_get_profiles (GST_ENCODING_CONTAINER_PROFILE (profile)); for (i = profiles_list ; i; i = i->next) if (GST_IS_ENCODING_VIDEO_PROFILE (i->data)) return TRUE; } return FALSE; }
static gboolean gst_encoding_container_profile_has_video (GstEncodingContainerProfile * profile) { const GList *l; g_return_val_if_fail (GST_IS_ENCODING_CONTAINER_PROFILE (profile), FALSE); for (l = profile->encodingprofiles; l != NULL; l = l->next) { if (GST_IS_ENCODING_VIDEO_PROFILE (l->data)) return TRUE; if (GST_IS_ENCODING_CONTAINER_PROFILE (l->data) && gst_encoding_container_profile_has_video (l->data)) return TRUE; } return FALSE; }
static GstCaps * kms_recorder_endpoint_get_caps_from_profile (KmsRecorderEndpoint * self, KmsElementPadType type) { GstEncodingContainerProfile *cprof; const GList *profiles, *l; GstCaps *caps = NULL; switch (type) { case KMS_ELEMENT_PAD_TYPE_VIDEO: cprof = kms_recording_profile_create_profile (self->priv->profile, FALSE, TRUE); break; case KMS_ELEMENT_PAD_TYPE_AUDIO: cprof = kms_recording_profile_create_profile (self->priv->profile, TRUE, FALSE); break; default: return NULL; } profiles = gst_encoding_container_profile_get_profiles (cprof); for (l = profiles; l != NULL; l = l->next) { GstEncodingProfile *prof = l->data; if ((GST_IS_ENCODING_AUDIO_PROFILE (prof) && type == KMS_ELEMENT_PAD_TYPE_AUDIO) || (GST_IS_ENCODING_VIDEO_PROFILE (prof) && type == KMS_ELEMENT_PAD_TYPE_VIDEO)) { caps = gst_encoding_profile_get_input_caps (prof); break; } } gst_encoding_profile_unref (cprof); return caps; }
static gboolean compare_encoding_profile_with_discoverer_stream (GstValidateFileChecker * fc, GstEncodingProfile * prof, GstDiscovererStreamInfo * stream, gchar ** msg) { gboolean ret = TRUE; GstCaps *caps = NULL; const GstCaps *profile_caps; const GstCaps *restriction_caps; caps = gst_discoverer_stream_info_get_caps (stream); profile_caps = gst_encoding_profile_get_format (prof); restriction_caps = gst_encoding_profile_get_restriction (prof); /* TODO need to consider profile caps restrictions */ if (!_gst_caps_can_intersect_safe (caps, profile_caps)) { gchar *caps_str = gst_caps_to_string (caps); gchar *profile_caps_str = gst_caps_to_string (profile_caps); SET_MESSAGE (msg, g_strdup_printf ("Caps '%s' didn't match profile '%s'", profile_caps_str, caps_str)); g_free (caps_str); g_free (profile_caps_str); ret = FALSE; goto end; } if (restriction_caps) { GstStructure *structure; gint i; gboolean found = FALSE; for (i = 0; i < gst_caps_get_size (restriction_caps); i++) { structure = gst_caps_get_structure (restriction_caps, i); structure = gst_structure_copy (structure); gst_structure_set_name (structure, gst_structure_get_name (gst_caps_get_structure (caps, 0))); if (gst_structure_can_intersect (structure, gst_caps_get_structure (caps, 0))) { gst_structure_free (structure); found = TRUE; break; } gst_structure_free (structure); } if (!found) { gchar *caps_str = gst_caps_to_string (caps); gchar *restriction_caps_str = gst_caps_to_string (restriction_caps); SET_MESSAGE (msg, g_strdup_printf ("Caps restriction '%s' wasn't respected on file " "with caps '%s'", restriction_caps_str, caps_str)); g_free (caps_str); g_free (restriction_caps_str); ret = FALSE; goto end; } } if (GST_IS_ENCODING_CONTAINER_PROFILE (prof)) { if (GST_IS_DISCOVERER_CONTAINER_INFO (stream)) { ret = ret & compare_container_profile_with_container_discoverer_stream (fc, (GstEncodingContainerProfile *) prof, (GstDiscovererContainerInfo *) stream, msg); } else { SET_MESSAGE (msg, g_strdup_printf ("Expected container profile but found stream of %s", gst_discoverer_stream_info_get_stream_type_nick (stream))); ret = FALSE; goto end; } } else if (GST_IS_ENCODING_VIDEO_PROFILE (prof)) { if (!GST_IS_DISCOVERER_VIDEO_INFO (stream)) { SET_MESSAGE (msg, g_strdup_printf ("Expected video profile but found stream of %s", gst_discoverer_stream_info_get_stream_type_nick (stream))); ret = FALSE; goto end; } } else if (GST_IS_ENCODING_AUDIO_PROFILE (prof)) { if (!GST_IS_DISCOVERER_AUDIO_INFO (stream)) { SET_MESSAGE (msg, g_strdup_printf ("Expected audio profile but found stream of %s", gst_discoverer_stream_info_get_stream_type_nick (stream))); ret = FALSE; goto end; } } else { g_assert_not_reached (); return FALSE; } end: if (caps) gst_caps_unref (caps); return ret; }
/** * ges_pipeline_set_render_settings: * @pipeline: a #GESPipeline * @output_uri: the URI to which the timeline will be rendered * @profile: the #GstEncodingProfile to use to render the timeline. * * Specify where the pipeline shall be rendered and with what settings. * * A copy of @profile and @output_uri will be done internally, the caller can * safely free those values afterwards. * * This method must be called before setting the pipeline mode to * #GES_PIPELINE_MODE_RENDER * * Returns: %TRUE if the settings were aknowledged properly, else %FALSE */ gboolean ges_pipeline_set_render_settings (GESPipeline * pipeline, const gchar * output_uri, GstEncodingProfile * profile) { GError *err = NULL; GstEncodingProfile *set_profile; g_return_val_if_fail (GES_IS_PIPELINE (pipeline), FALSE); /* FIXME Properly handle multi track, for now GESPipeline * only hanles single track per type, so we should just set the * presence to 1. */ if (GST_IS_ENCODING_CONTAINER_PROFILE (profile)) { const GList *tmpprofiles = gst_encoding_container_profile_get_profiles (GST_ENCODING_CONTAINER_PROFILE (profile)); GList *tmptrack, *tracks = ges_timeline_get_tracks (pipeline->priv->timeline); for (; tmpprofiles; tmpprofiles = tmpprofiles->next) { for (tmptrack = tracks; tmptrack; tmptrack = tmptrack->next) { if ((GST_IS_ENCODING_AUDIO_PROFILE (tmpprofiles->data) && GES_IS_AUDIO_TRACK (tmptrack->data)) || (GST_IS_ENCODING_VIDEO_PROFILE (tmpprofiles->data) && GES_IS_VIDEO_TRACK (tmptrack->data))) { GST_DEBUG_OBJECT (pipeline, "Setting presence to 1!"); gst_encoding_profile_set_presence (tmpprofiles->data, 1); gst_encoding_profile_set_allow_dynamic_output (tmpprofiles->data, FALSE); } } } g_list_free_full (tracks, gst_object_unref); } /* Clear previous URI sink if it existed */ /* FIXME : We should figure out if it was added to the pipeline, * and if so, remove it. */ if (pipeline->priv->urisink) { gst_object_unref (pipeline->priv->urisink); pipeline->priv->urisink = NULL; } pipeline->priv->urisink = gst_element_make_from_uri (GST_URI_SINK, output_uri, "urisink", &err); if (G_UNLIKELY (pipeline->priv->urisink == NULL)) { GST_ERROR_OBJECT (pipeline, "Couldn't not create sink for URI %s: '%s'", output_uri, ((err && err->message) ? err->message : "failed to create element")); g_clear_error (&err); return FALSE; } if (pipeline->priv->profile) gst_encoding_profile_unref (pipeline->priv->profile); g_object_set (pipeline->priv->encodebin, "avoid-reencoding", !(!(pipeline->priv->mode & GES_PIPELINE_MODE_SMART_RENDER)), NULL); g_object_set (pipeline->priv->encodebin, "profile", profile, NULL); g_object_get (pipeline->priv->encodebin, "profile", &set_profile, NULL); if (set_profile == NULL) { GST_ERROR_OBJECT (pipeline, "Profile %" GST_PTR_FORMAT " could no be set", profile); return FALSE; } /* We got a referencer when getting back the profile */ pipeline->priv->profile = profile; return TRUE; }
static gboolean serialize_stream_profiles (GKeyFile * out, GstEncodingProfile * sprof, const gchar * profilename, guint id) { gchar *sprofgroupname; gchar *tmpc; GstCaps *format, *restriction; const gchar *preset, *name, *description; sprofgroupname = g_strdup_printf ("streamprofile-%s-%d", profilename, id); /* Write the parent profile */ g_key_file_set_value (out, sprofgroupname, "parent", profilename); g_key_file_set_value (out, sprofgroupname, "type", gst_encoding_profile_get_type_nick (sprof)); format = gst_encoding_profile_get_format (sprof); if (format) { tmpc = gst_caps_to_string (format); g_key_file_set_value (out, sprofgroupname, "format", tmpc); g_free (tmpc); } name = gst_encoding_profile_get_name (sprof); if (name) g_key_file_set_string (out, sprofgroupname, "name", name); description = gst_encoding_profile_get_description (sprof); if (description) g_key_file_set_string (out, sprofgroupname, "description", description); preset = gst_encoding_profile_get_preset (sprof); if (preset) g_key_file_set_string (out, sprofgroupname, "preset", preset); restriction = gst_encoding_profile_get_restriction (sprof); if (restriction) { tmpc = gst_caps_to_string (restriction); g_key_file_set_value (out, sprofgroupname, "restriction", tmpc); g_free (tmpc); } g_key_file_set_integer (out, sprofgroupname, "presence", gst_encoding_profile_get_presence (sprof)); if (GST_IS_ENCODING_VIDEO_PROFILE (sprof)) { GstEncodingVideoProfile *vp = (GstEncodingVideoProfile *) sprof; g_key_file_set_integer (out, sprofgroupname, "pass", gst_encoding_video_profile_get_pass (vp)); g_key_file_set_boolean (out, sprofgroupname, "variableframerate", gst_encoding_video_profile_get_variableframerate (vp)); } g_free (sprofgroupname); if (format) gst_caps_unref (format); if (restriction) gst_caps_unref (restriction); return TRUE; }