static gboolean _load (GESFormatter * self, GESTimeline * timeline, const gchar * string, GError ** error) { guint i; GList *tmp; GError *err; GESStructureParser *parser = _parse_structures (string); err = ges_structure_parser_get_error (parser); if (err) { if (error) *error = err; return FALSE; } g_object_set (timeline, "auto-transition", TRUE, NULL); if (!(ges_timeline_add_track (timeline, GES_TRACK (ges_video_track_new ())))) goto fail; if (!(ges_timeline_add_track (timeline, GES_TRACK (ges_audio_track_new ())))) goto fail; /* Here we've finished initializing our timeline, we're * ready to start using it... by solely working with the layer !*/ for (tmp = parser->structures; tmp; tmp = tmp->next) { const gchar *name = gst_structure_get_name (tmp->data); if (g_str_has_prefix (name, "set-")) { EXEC (_set_child_property, tmp->data, &err); continue; } for (i = 0; i < G_N_ELEMENTS (timeline_parsing_options); i++) { if (gst_structure_has_name (tmp->data, timeline_parsing_options[i].long_name) || (strlen (name) == 1 && *name == timeline_parsing_options[i].short_name)) { EXEC (((ActionFromStructureFunc) timeline_parsing_options[i].arg_data), tmp->data, &err); } } } gst_object_unref (parser); return TRUE; fail: gst_object_unref (parser); if (err) { if (error) *error = err; } return FALSE; }
GESTimeline * positionTestTL (void) { GESTimeline *timeline; GESTrack *trackv; GError **error = NULL; GESAsset *asset; GESClip *clip; timeline = ges_timeline_new (); trackv = GES_TRACK (ges_video_track_new ()); ges_timeline_add_track (timeline, trackv); GESLayer *layer = ges_layer_new (); ges_timeline_add_layer (timeline, layer); asset = GES_ASSET (ges_uri_clip_asset_request_sync (ges_renderer_get_absolute_path ("image/wallpaper720p.jpg"), error)); clip = ges_layer_add_asset (layer, asset, 0, 0, 2 * GST_SECOND, GES_TRACK_TYPE_VIDEO); GESTrackElement *elem = ges_clip_find_track_element (clip, trackv, G_TYPE_NONE); ges_track_element_set_child_properties (elem, "posx", 100, "width", 100, NULL); ges_timeline_commit (timeline); return timeline; }
GESTimeline * ges_timeline_new_audio_video (void) { GESTrack *tracka, *trackv; GESTimeline *timeline; /* This is our main GESTimeline */ timeline = ges_timeline_new (); tracka = GES_TRACK (ges_audio_track_new ()); trackv = GES_TRACK (ges_video_track_new ()); if (!ges_timeline_add_track (timeline, trackv) || !ges_timeline_add_track (timeline, tracka)) { gst_object_unref (timeline); timeline = NULL; } return timeline; }
static gboolean create_tracks (GESFormatter * self) { GESPitiviFormatterPrivate *priv = GES_PITIVI_FORMATTER (self)->priv; GList *tracks = NULL; tracks = ges_timeline_get_tracks (self->timeline); GST_DEBUG ("Creating tracks, current number of tracks %d", g_list_length (tracks)); if (tracks) { GList *tmp = NULL; GESTrack *track; for (tmp = tracks; tmp; tmp = tmp->next) { track = tmp->data; if (track->type == GES_TRACK_TYPE_AUDIO) { priv->tracka = track; } else { priv->trackv = track; } } g_list_foreach (tracks, (GFunc) gst_object_unref, NULL); g_list_free (tracks); return TRUE; } priv->tracka = GES_TRACK (ges_audio_track_new ()); priv->trackv = GES_TRACK (ges_video_track_new ()); if (!ges_timeline_add_track (self->timeline, priv->trackv)) { return FALSE; } if (!ges_timeline_add_track (self->timeline, priv->tracka)) { return FALSE; } return TRUE; }
GESTimeline * compTL (void) { GESTimeline *timeline; GESTrack *trackv; timeline = ges_timeline_new (); trackv = GES_TRACK (ges_video_track_new ()); ges_timeline_add_track (timeline, trackv); const gchar *assets[] = { "image/vieh.png", "image/PNG_transparency_demonstration_1.png", "image/Ice_Cream.png", "image/Fish.png" }; guint asset_count = 4; for (int i = 1; i <= asset_count; i++) { GESLayer *layer = ges_layer_new (); ges_timeline_add_layer (timeline, layer); g_object_set (layer, "priority", i - 1, NULL); GESClip *vieh = ges_clip_from_rel_path (assets[i - 1], layer, 0, 0, 10, GES_TRACK_TYPE_VIDEO); GESTrackElement *elem = ges_clip_find_track_element (vieh, trackv, G_TYPE_NONE); GESUriClipAsset *asset = GES_URI_CLIP_ASSET (ges_extractable_get_asset (GES_EXTRACTABLE (vieh))); guint width = ges_asset_get_width (asset); guint height = ges_asset_get_height (asset); g_print ("%s: %dx%d\n", assets[i - 1], width, height); ges_track_element_set_child_properties (elem, "posx", i * 100, "posy", i * 100, "width", i * 100 * width / height, "height", (i * 100) - 1, NULL); } GESLayer *backgroud_layer = ges_layer_new (); ges_timeline_add_layer (timeline, backgroud_layer); g_object_set (backgroud_layer, "priority", asset_count, NULL); ges_clip_from_rel_path ("image/wallpaper-2597248.jpg", backgroud_layer, 0, 0, 10, GES_TRACK_TYPE_VIDEO); ges_timeline_commit (timeline); return timeline; }
GESPipeline * make_timeline (char *path, float duration, char *text, guint32 color, gdouble xpos, gdouble ypos) { GESTimeline *timeline; GESTrack *trackv, *tracka; GESLayer *layer1; GESClip *srca; GESClip *overlay; GESPipeline *pipeline; guint64 aduration; pipeline = ges_pipeline_new (); ges_pipeline_set_mode (pipeline, TIMELINE_MODE_PREVIEW_VIDEO); timeline = ges_timeline_new (); ges_pipeline_add_timeline (pipeline, timeline); trackv = GES_TRACK (ges_video_track_new ()); ges_timeline_add_track (timeline, trackv); tracka = GES_TRACK (ges_audio_track_new ()); ges_timeline_add_track (timeline, tracka); layer1 = GES_LAYER (ges_layer_new ()); g_object_set (layer1, "priority", (gint32) 0, NULL); if (!ges_timeline_add_layer (timeline, layer1)) exit (-1); aduration = (guint64) (duration * GST_SECOND); srca = make_source (path, 0, aduration, 1); overlay = make_overlay (text, 0, aduration, 0, color, xpos, ypos); ges_layer_add_clip (layer1, srca); ges_layer_add_clip (layer1, overlay); return pipeline; }
static GESTimelinePipeline * create_timeline (void) { GESTimelinePipeline *pipeline; GESTimelineLayer *layer; GESTrack *tracka, *trackv; GESTimeline *timeline; GESTimelineObject *src; timeline = ges_timeline_new (); tracka = ges_track_audio_raw_new (); trackv = ges_track_video_raw_new (); layer = (GESTimelineLayer *) ges_simple_timeline_layer_new (); /* Add the tracks and the layer to the timeline */ if (!ges_timeline_add_layer (timeline, layer) || !ges_timeline_add_track (timeline, tracka) || !ges_timeline_add_track (timeline, trackv)) return NULL; /* Add the main audio/video file */ src = GES_TIMELINE_OBJECT (ges_timeline_test_source_new ()); g_object_set (src, "vpattern", GES_VIDEO_TEST_PATTERN_SNOW, "duration", 10 * GST_SECOND, NULL); ges_simple_timeline_layer_add_object ((GESSimpleTimelineLayer *) layer, GES_TIMELINE_OBJECT (src), 0); pipeline = ges_timeline_pipeline_new (); if (!ges_timeline_pipeline_add_timeline (pipeline, timeline)) return NULL; return pipeline; }
GESTimeline * volumeTestTL (void) { GESTimeline *timeline; GESTrack *tracka; timeline = ges_timeline_new (); tracka = GES_TRACK (ges_audio_track_new ()); if (!ges_timeline_add_track (timeline, tracka)) { gst_object_unref (timeline); timeline = NULL; } GESLayer *layer1 = ges_layer_new (); GESLayer *layer2 = ges_layer_new (); ges_timeline_add_layer (timeline, layer1); ges_timeline_add_layer (timeline, layer2); g_object_set (layer1, "priority", 0, NULL); g_object_set (layer2, "priority", 1, NULL); GESClip *music1 = ges_clip_from_rel_path ("audio/02_Oliver_Huntemann_-_Rikarda.flac", layer1, 0, 0, 10, GES_TRACK_TYPE_AUDIO); ges_clip_from_rel_path ("audio/prof.ogg", layer2, 0, 0, 10, GES_TRACK_TYPE_AUDIO); GESTrackElement *elem = ges_clip_find_track_element (music1, tracka, G_TYPE_NONE); ges_track_element_set_child_properties (elem, "volume", 2.1, NULL); ges_timeline_commit (timeline); return timeline; }
void ges_base_xml_formatter_add_track (GESBaseXmlFormatter * self, GESTrackType track_type, GstCaps * caps, const gchar * id, GstStructure * properties, const gchar * metadatas, GError ** error) { GESTrack *track; GESBaseXmlFormatterPrivate *priv = _GET_PRIV (self); if (priv->check_only) { if (caps) gst_caps_unref (caps); return; } track = ges_track_new (track_type, caps); ges_timeline_add_track (GES_FORMATTER (self)->timeline, track); if (properties) { gchar *restriction; GstCaps *caps; gst_structure_get (properties, "restriction-caps", G_TYPE_STRING, &restriction, NULL); gst_structure_remove_fields (properties, "restriction-caps", "caps", "message-forward", NULL); if (g_strcmp0 (restriction, "NULL")) { caps = gst_caps_from_string (restriction); ges_track_set_restriction_caps (track, caps); } gst_structure_foreach (properties, (GstStructureForeachFunc) set_property_foreach, track); } g_hash_table_insert (priv->tracks, g_strdup (id), gst_object_ref (track)); if (metadatas) ges_meta_container_add_metas_from_string (GES_META_CONTAINER (track), metadatas); }
GESTimeline * alphaTestTL (void) { GESTimeline *timeline; GESTrack *trackv; timeline = ges_timeline_new (); trackv = GES_TRACK (ges_video_track_new ()); if (!ges_timeline_add_track (timeline, trackv)) { gst_object_unref (timeline); timeline = NULL; } GESLayer *layer1 = ges_layer_new (); GESLayer *layer2 = ges_layer_new (); ges_timeline_add_layer (timeline, layer1); ges_timeline_add_layer (timeline, layer2); g_object_set (layer1, "priority", 0, NULL); g_object_set (layer2, "priority", 1, NULL); GESClip *png = ges_clip_from_rel_path ("image/Fish.png", layer1, 0, 0, 10, GES_TRACK_TYPE_VIDEO); GESTrackElement *elem = ges_clip_find_track_element (png, trackv, G_TYPE_NONE); ges_track_element_set_child_properties (elem, "alpha", 0.5, NULL); ges_clip_from_rel_path ("hd/fluidsimulation.mp4", layer2, 0, 20, 10, GES_TRACK_TYPE_VIDEO); ges_timeline_commit (timeline); return timeline; }
int main (int argc, gchar ** argv) { GESTimelinePipeline *pipeline; GESTimeline *timeline; GESTrack *tracka; GESTimelineLayer *layer; GMainLoop *mainloop; guint i; if (argc < 2) { g_print ("Usage: %s <list of audio files>\n", argv[0]); return -1; } /* Initialize GStreamer (this will parse environment variables and commandline * arguments. */ gst_init (&argc, &argv); /* Initialize the GStreamer Editing Services */ ges_init (); /* Setup of an audio timeline */ /* This is our main GESTimeline */ timeline = ges_timeline_new (); tracka = ges_track_audio_raw_new (); /* We are only going to be doing one layer of timeline objects */ layer = (GESTimelineLayer *) ges_simple_timeline_layer_new (); /* Add the tracks and the layer to the timeline */ if (!ges_timeline_add_layer (timeline, layer)) return -1; if (!ges_timeline_add_track (timeline, tracka)) return -1; /* Here we've finished initializing our timeline, we're * ready to start using it... by solely working with the layer ! */ for (i = 1; i < argc; i++) { gchar *uri = gst_filename_to_uri (argv[i], NULL); GESTimelineFileSource *src = ges_timeline_filesource_new (uri); g_assert (src); g_free (uri); g_object_set (src, "duration", GST_SECOND, NULL); /* Since we're using a GESSimpleTimelineLayer, objects will be automatically * appended to the end of the layer */ ges_timeline_layer_add_object (layer, (GESTimelineObject *) src); } /* In order to view our timeline, let's grab a convenience pipeline to put * our timeline in. */ pipeline = ges_timeline_pipeline_new (); /* Add the timeline to that pipeline */ if (!ges_timeline_pipeline_add_timeline (pipeline, timeline)) return -1; /* The following is standard usage of a GStreamer pipeline (note how you haven't * had to care about GStreamer so far ?). * * We set the pipeline to playing ... */ gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PLAYING); /* .. and we start a GMainLoop. GES **REQUIRES** a GMainLoop to be running in * order to function properly ! */ mainloop = g_main_loop_new (NULL, FALSE); /* Simple code to have the mainloop shutdown after 4s */ g_timeout_add_seconds (argc - 1, (GSourceFunc) g_main_loop_quit, mainloop); g_main_loop_run (mainloop); return 0; }
static GESTimeline * create_timeline (int nbargs, gchar ** argv, gchar * audio, gchar * video) { GESTimelineLayer *layer; GESTrack *tracka = NULL, *trackv = NULL; GESTimeline *timeline; guint i; timeline = ges_timeline_new (); if (audio) tracka = ges_track_audio_raw_new (); if (video) trackv = ges_track_video_raw_new (); /* We are only going to be doing one layer of timeline objects */ layer = (GESTimelineLayer *) ges_simple_timeline_layer_new (); /* Add the tracks and the layer to the timeline */ if (!ges_timeline_add_layer (timeline, layer) || !(!audio || ges_timeline_add_track (timeline, tracka)) || !(!video || ges_timeline_add_track (timeline, trackv))) goto build_failure; /* Here we've finished initializing our timeline, we're * ready to start using it... by solely working with the layer !*/ for (i = 0; i < nbargs / 3; i++) { GESTimelineObject *obj; char *source = argv[i * 3]; char *arg0 = argv[(i * 3) + 1]; guint64 duration = str_to_time (argv[(i * 3) + 2]); if (!g_strcmp0 ("+pattern", source)) { obj = GES_TIMELINE_OBJECT (ges_timeline_test_source_new_for_nick (arg0)); if (!obj) { g_error ("%s is an invalid pattern name!\n", arg0); goto build_failure; } g_object_set (G_OBJECT (obj), "duration", duration, NULL); g_printf ("Adding <pattern:%s> duration %" GST_TIME_FORMAT "\n", arg0, GST_TIME_ARGS (duration)); } else if (!g_strcmp0 ("+transition", source)) { if (duration <= 0) { g_error ("durations must be greater than 0"); goto build_failure; } obj = GES_TIMELINE_OBJECT (ges_timeline_standard_transition_new_for_nick (arg0)); if (!obj) { g_error ("invalid transition type\n"); goto build_failure; } g_object_set (G_OBJECT (obj), "duration", duration, NULL); g_printf ("Adding <transition:%s> duration %" GST_TIME_FORMAT "\n", arg0, GST_TIME_ARGS (duration)); } else if (!g_strcmp0 ("+title", source)) { obj = GES_TIMELINE_OBJECT (ges_timeline_title_source_new ()); g_object_set (obj, "duration", duration, "text", arg0, NULL); g_printf ("Adding <title:%s> duration %" GST_TIME_FORMAT "\n", arg0, GST_TIME_ARGS (duration)); } else { gchar *uri; guint64 inpoint; if (!(uri = ensure_uri (source))) { GST_ERROR ("couldn't create uri for '%s'", source); goto build_failure; } inpoint = str_to_time (argv[i * 3 + 1]); obj = GES_TIMELINE_OBJECT (ges_timeline_filesource_new (uri)); g_object_set (obj, "in-point", (guint64) inpoint, "duration", (guint64) duration, NULL); g_printf ("Adding clip %s inpoint:%" GST_TIME_FORMAT " duration:%" GST_TIME_FORMAT "\n", uri, GST_TIME_ARGS (inpoint), GST_TIME_ARGS (duration)); g_free (uri); } /* Since we're using a GESSimpleTimelineLayer, objects will be automatically * appended to the end of the layer */ ges_timeline_layer_add_object (layer, obj); } return timeline; build_failure: { g_object_unref (timeline); return NULL; } }
int main (int argc, gchar ** argv) { GError *err = NULL; GOptionContext *ctx; GESPipeline *pipeline; GESTimeline *timeline; GESTrack *tracka, *trackv; GESLayer *layer1, *layer2; GESUriClip *src; GMainLoop *mainloop; gint inpoint = 0, duration = 10; gboolean mute = FALSE; gchar *audiofile = NULL; GOptionEntry options[] = { {"inpoint", 'i', 0, G_OPTION_ARG_INT, &inpoint, "in-point in the file (in seconds, default:0s)", "seconds"}, {"duration", 'd', 0, G_OPTION_ARG_INT, &duration, "duration to use from the file (in seconds, default:10s)", "seconds"}, {"mute", 'm', 0, G_OPTION_ARG_NONE, &mute, "Whether to mute the audio from the file",}, {"audiofile", 'a', 0, G_OPTION_ARG_FILENAME, &audiofile, "Use this audiofile instead of the original audio from the file", "audiofile"}, {NULL} }; ctx = g_option_context_new ("- Plays an video file with sound (origin/muted/replaced)"); g_option_context_add_main_entries (ctx, options, NULL); g_option_context_add_group (ctx, gst_init_get_option_group ()); if (!g_option_context_parse (ctx, &argc, &argv, &err)) { g_print ("Error initializing %s\n", err->message); exit (1); } if (argc == 1) { g_print ("%s", g_option_context_get_help (ctx, TRUE, NULL)); exit (0); } g_option_context_free (ctx); ges_init (); /* Create an Audio/Video pipeline with two layers */ pipeline = ges_pipeline_new (); timeline = ges_timeline_new (); tracka = GES_TRACK (ges_audio_track_new ()); trackv = GES_TRACK (ges_video_track_new ()); layer1 = ges_layer_new (); layer2 = ges_layer_new (); g_object_set (layer2, "priority", 1, NULL); if (!ges_timeline_add_layer (timeline, layer1) || !ges_timeline_add_layer (timeline, layer2) || !ges_timeline_add_track (timeline, tracka) || !ges_timeline_add_track (timeline, trackv) || !ges_pipeline_set_timeline (pipeline, timeline)) return -1; if (1) { gchar *uri = gst_filename_to_uri (argv[1], NULL); /* Add the main audio/video file */ src = ges_uri_clip_new (uri); g_free (uri); g_object_set (src, "start", 0, "in-point", inpoint * GST_SECOND, "duration", duration * GST_SECOND, "mute", mute, NULL); ges_layer_add_clip (layer1, GES_CLIP (src)); } /* Play the pipeline */ gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PLAYING); mainloop = g_main_loop_new (NULL, FALSE); g_timeout_add_seconds (duration + 1, (GSourceFunc) g_main_loop_quit, mainloop); g_main_loop_run (mainloop); return 0; }
/* A image sequence test */ int main (int argc, gchar ** argv) { GError *err = NULL; GOptionContext *ctx; GESPipeline *pipeline; GESTimeline *timeline; GESAsset *asset; GESLayer *layer; GMainLoop *mainloop; GESTrack *track; gint duration = 10; gchar *filepattern = NULL; GOptionEntry options[] = { {"duration", 'd', 0, G_OPTION_ARG_INT, &duration, "duration to use from the file (in seconds, default:10s)", "seconds"}, {"pattern-url", 'u', 0, G_OPTION_ARG_FILENAME, &filepattern, "Pattern of the files. i.e. multifile:///foo/%04d.jpg", "pattern-url"}, {NULL} }; ctx = g_option_context_new ("- Plays an image sequence"); g_option_context_add_main_entries (ctx, options, NULL); g_option_context_add_group (ctx, gst_init_get_option_group ()); if (!g_option_context_parse (ctx, &argc, &argv, &err)) { g_print ("Error initializing %s\n", err->message); exit (1); } if (filepattern == NULL) { g_print ("%s", g_option_context_get_help (ctx, TRUE, NULL)); exit (0); } g_option_context_free (ctx); gst_init (&argc, &argv); ges_init (); timeline = ges_timeline_new (); track = GES_TRACK (ges_video_track_new ()); ges_timeline_add_track (timeline, track); layer = ges_layer_new (); if (!ges_timeline_add_layer (timeline, layer)) return -1; asset = GES_ASSET (ges_uri_clip_asset_request_sync (filepattern, &err)); ges_layer_add_asset (layer, asset, 0, 0, 5 * GST_SECOND, GES_TRACK_TYPE_VIDEO); pipeline = ges_pipeline_new (); if (!ges_pipeline_set_timeline (pipeline, timeline)) return -1; gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PLAYING); mainloop = g_main_loop_new (NULL, FALSE); g_timeout_add_seconds (4, (GSourceFunc) g_main_loop_quit, mainloop); g_main_loop_run (mainloop); return 0; }