GESTimeline * positionTestTL (void) { GESTimeline *timeline; GESTrack *trackv; GError **error = NULL; GESAsset *asset; GESClip *clip; timeline = ges_timeline_new (); trackv = GES_TRACK (ges_video_track_new ()); ges_timeline_add_track (timeline, trackv); GESLayer *layer = ges_layer_new (); ges_timeline_add_layer (timeline, layer); asset = GES_ASSET (ges_uri_clip_asset_request_sync (ges_renderer_get_absolute_path ("image/wallpaper720p.jpg"), error)); clip = ges_layer_add_asset (layer, asset, 0, 0, 2 * GST_SECOND, GES_TRACK_TYPE_VIDEO); GESTrackElement *elem = ges_clip_find_track_element (clip, trackv, G_TYPE_NONE); ges_track_element_set_child_properties (elem, "posx", 100, "width", 100, NULL); ges_timeline_commit (timeline); return timeline; }
static GESTrackElement * _get_element_by_track_id (GESBaseXmlFormatterPrivate * priv, const gchar * track_id, GESClip * clip) { GESTrack *track = g_hash_table_lookup (priv->tracks, track_id); return ges_clip_find_track_element (clip, track, GES_TYPE_SOURCE); }
GESTimeline * compTL (void) { GESTimeline *timeline; GESTrack *trackv; timeline = ges_timeline_new (); trackv = GES_TRACK (ges_video_track_new ()); ges_timeline_add_track (timeline, trackv); const gchar *assets[] = { "image/vieh.png", "image/PNG_transparency_demonstration_1.png", "image/Ice_Cream.png", "image/Fish.png" }; guint asset_count = 4; for (int i = 1; i <= asset_count; i++) { GESLayer *layer = ges_layer_new (); ges_timeline_add_layer (timeline, layer); g_object_set (layer, "priority", i - 1, NULL); GESClip *vieh = ges_clip_from_rel_path (assets[i - 1], layer, 0, 0, 10, GES_TRACK_TYPE_VIDEO); GESTrackElement *elem = ges_clip_find_track_element (vieh, trackv, G_TYPE_NONE); GESUriClipAsset *asset = GES_URI_CLIP_ASSET (ges_extractable_get_asset (GES_EXTRACTABLE (vieh))); guint width = ges_asset_get_width (asset); guint height = ges_asset_get_height (asset); g_print ("%s: %dx%d\n", assets[i - 1], width, height); ges_track_element_set_child_properties (elem, "posx", i * 100, "posy", i * 100, "width", i * 100 * width / height, "height", (i * 100) - 1, NULL); } GESLayer *backgroud_layer = ges_layer_new (); ges_timeline_add_layer (timeline, backgroud_layer); g_object_set (backgroud_layer, "priority", asset_count, NULL); ges_clip_from_rel_path ("image/wallpaper-2597248.jpg", backgroud_layer, 0, 0, 10, GES_TRACK_TYPE_VIDEO); ges_timeline_commit (timeline); return timeline; }
GESTimeline * volumeTestTL (void) { GESTimeline *timeline; GESTrack *tracka; timeline = ges_timeline_new (); tracka = GES_TRACK (ges_audio_track_new ()); if (!ges_timeline_add_track (timeline, tracka)) { gst_object_unref (timeline); timeline = NULL; } GESLayer *layer1 = ges_layer_new (); GESLayer *layer2 = ges_layer_new (); ges_timeline_add_layer (timeline, layer1); ges_timeline_add_layer (timeline, layer2); g_object_set (layer1, "priority", 0, NULL); g_object_set (layer2, "priority", 1, NULL); GESClip *music1 = ges_clip_from_rel_path ("audio/02_Oliver_Huntemann_-_Rikarda.flac", layer1, 0, 0, 10, GES_TRACK_TYPE_AUDIO); ges_clip_from_rel_path ("audio/prof.ogg", layer2, 0, 0, 10, GES_TRACK_TYPE_AUDIO); GESTrackElement *elem = ges_clip_find_track_element (music1, tracka, G_TYPE_NONE); ges_track_element_set_child_properties (elem, "volume", 2.1, NULL); ges_timeline_commit (timeline); return timeline; }
GESTimeline * alphaTestTL (void) { GESTimeline *timeline; GESTrack *trackv; timeline = ges_timeline_new (); trackv = GES_TRACK (ges_video_track_new ()); if (!ges_timeline_add_track (timeline, trackv)) { gst_object_unref (timeline); timeline = NULL; } GESLayer *layer1 = ges_layer_new (); GESLayer *layer2 = ges_layer_new (); ges_timeline_add_layer (timeline, layer1); ges_timeline_add_layer (timeline, layer2); g_object_set (layer1, "priority", 0, NULL); g_object_set (layer2, "priority", 1, NULL); GESClip *png = ges_clip_from_rel_path ("image/Fish.png", layer1, 0, 0, 10, GES_TRACK_TYPE_VIDEO); GESTrackElement *elem = ges_clip_find_track_element (png, trackv, G_TYPE_NONE); ges_track_element_set_child_properties (elem, "alpha", 0.5, NULL); ges_clip_from_rel_path ("hd/fluidsimulation.mp4", layer2, 0, 20, 10, GES_TRACK_TYPE_VIDEO); ges_timeline_commit (timeline); return timeline; }
void getClips (JsonReader * reader, GESLayer * layer, GESTrackType type, gboolean absolute_paths) { int i; json_reader_read_member (reader, "clips"); g_print ("= clips =\n"); for (i = 0; i < json_reader_count_elements (reader); i++) { json_reader_read_element (reader, i); const char *src = getString (reader, "src"); int start = getInt (reader, "start"); int in = getInt (reader, "in"); int dur = getInt (reader, "dur"); g_print ("Clip: %s (start: %d, in: %d, dur: %d)\n", src, start, in, dur); GESClip *clip; if (is_in_members (reader, "multi") && getBool (reader, "multi")) { g_print ("multi on.\n"); clip = ges_multi_clip_from_path (src, layer, start, in, dur, absolute_paths); } else { const char *path; if (absolute_paths == TRUE) { path = src; } else { path = ges_renderer_get_absolute_path (src); } clip = ges_clip_from_path (path, layer, start, in, dur, type); } GESTimeline *tl = ges_layer_get_timeline (layer); GList *tracks = ges_timeline_get_tracks (tl); GESTrack *trackv = g_list_first (tracks)->data; GESTrack *tracka = g_list_last (tracks)->data; if (is_in_members (reader, "volume")) { double volume = getDouble (reader, "volume"); GESTrackElement *audioElement = ges_clip_find_track_element (clip, tracka, G_TYPE_NONE); if (audioElement != NULL) { ges_track_element_set_child_properties (audioElement, "volume", volume, NULL); } } GESTrackElement *videoElement = ges_clip_find_track_element (clip, trackv, G_TYPE_NONE); if (videoElement != NULL) { if (is_in_members (reader, "x")) { int x = getInt (reader, "x"); ges_track_element_set_child_properties (videoElement, "posx", x, NULL); } if (is_in_members (reader, "y")) { int y = getInt (reader, "y"); ges_track_element_set_child_properties (videoElement, "posy", y, NULL); } if (is_in_members (reader, "alpha")) { gdouble alpha = getDouble (reader, "alpha"); ges_track_element_set_child_properties (videoElement, "alpha", alpha, NULL); } if (is_in_members (reader, "size")) { gdouble size = getDouble (reader, "size"); GESUriClipAsset *asset = GES_URI_CLIP_ASSET (ges_extractable_get_asset (GES_EXTRACTABLE (clip))); guint width = ges_asset_get_width (asset); guint height = ges_asset_get_height (asset); if (width != 0 && height != 0) { double dw = width * size; double dh = height * size; g_print ("%dx%d => * %f => %dx%d\n", width, height, size, (int) dw, (int) dh); ges_track_element_set_child_properties (videoElement, "width", (int) dw, "height", (int) dh, NULL); } } if (is_in_members (reader, "effect")) { const char *effect_str = getString (reader, "effect"); if (strcmp (effect_str, "") != 0) { g_print ("Using effect %s", effect_str); GESEffect *effect = ges_effect_new (effect_str); ges_container_add (GES_CONTAINER (clip), GES_TIMELINE_ELEMENT (effect)); } } } json_reader_end_element (reader); } json_reader_end_member (reader); }